Coverage for suhteita/suhteita.py: 26.64%
215 statements
« prev ^ index » next coverage.py v7.10.5, created at 2025-08-25 21:36:35 +00:00
« prev ^ index » next coverage.py v7.10.5, created at 2025-08-25 21:36:35 +00:00
1"""Load the JIRA instance."""
3import argparse
4import datetime as dti
5import json
6import logging
7import secrets
8from typing import Dict, Union, no_type_check
10import suhteita.ticket_system_actions as actions
11from suhteita import (
12 APP_ALIAS,
13 APP_ENV,
14 BASE_URL,
15 COMMA,
16 IDENTITY,
17 IS_CLOUD,
18 NODE_INDICATOR,
19 PROJECT,
20 STORE,
21 TOKEN,
22 TS_FORMAT_PAYLOADS,
23 USER,
24 VERSION,
25 extract_fields,
26 log,
27 two_sentences,
28)
29from suhteita.store import Store
31Context = Dict[str, Union[str, dti.datetime]]
34@no_type_check
35def setup_twenty_seven(options: argparse.Namespace) -> object:
36 """Set up the scenario adn return the parameters as members of an object."""
38 class Setup:
39 pass
41 setup = Setup()
43 setup.user = options.user if options.user else USER
44 setup.token = options.token if options.token else TOKEN
45 setup.target_url = options.target_url if options.target_url else BASE_URL
46 setup.is_cloud = options.is_cloud if options.is_cloud else IS_CLOUD
47 setup.target_project = options.target_project if options.target_project else PROJECT
48 setup.scenario = options.scenario if options.scenario else 'unknown'
49 setup.identity = options.identity if options.identity else IDENTITY
50 setup.storage_path = options.out_path if options.out_path else STORE
52 log.info('=' * 84)
53 log.info(f'Generator {APP_ALIAS} version {VERSION}')
54 log.info('# Prelude of a 27-steps scenario test execution')
56 setup.c_rand, setup.d_rand = two_sentences()
57 log.info(f'- Setup <01> Random sentence of original ({setup.c_rand})')
58 log.info(f'- Setup <02> Random sentence of duplicate ({setup.d_rand})')
60 setup.random_component = secrets.token_urlsafe()
61 log.info(f'- Setup <03> Random component name ({setup.random_component})')
63 setup.todo, setup.in_progress, setup.done = options.workflow_csv.split(COMMA) # Default: 'to do,in progress,done'
64 log.info(
65 f'- Setup <04> The test workflow assumes the (case insensitive) states'
66 f' ({setup.todo}, {setup.in_progress}, {setup.done})'
67 )
69 setup.ts = dti.datetime.now(tz=dti.timezone.utc).strftime(TS_FORMAT_PAYLOADS)
70 log.info(f'- Setup <05> Timestamp marker in summaries will be ({setup.ts})')
72 setup.desc_core = '... and short description we dictate.'
73 log.info(f'- Setup <06> Common description part - of twin issues / pair - will be ({setup.desc_core})')
75 setup.amendment = 'No, no, no. They duplicated me, help!'
76 log.info(f'- Setup <07> Amendment for original description will be ({setup.amendment})')
78 setup.fake_comment = 'I am the original, surely!'
79 log.info(f'- Setup <08> Fake comment for duplicate will be ({setup.fake_comment})')
81 setup.duplicate_labels = ['du', 'pli', 'ca', 'te']
82 log.info(f'- Setup <09> Labels for duplicate will be ({setup.duplicate_labels})')
84 setup.original_labels = ['for', 'real', 'highlander']
85 log.info(f'- Setup <10> Labels for original will be ({setup.original_labels})')
87 setup.hours_value = 42
88 log.info(f'- Setup <11> Hours value for original estimate will be ({setup.hours_value})')
90 setup.purge_me = 'SUHTEITA_PURGE_ME_ORIGINAL'
91 log.info(f'- Setup <12> Purge indicator comment will be ({setup.purge_me})')
93 setup.node_indicator = NODE_INDICATOR
94 log.info(f'- Setup <13> Node indicator ({setup.node_indicator})')
96 log.info(
97 f'- Setup <14> Connect will be to upstream ({"cloud" if setup.is_cloud else "on-site"})'
98 f' service ({setup.target_url}) per login ({setup.user})'
99 )
100 log.info('-' * 84)
102 return setup
105def main(options: argparse.Namespace) -> int:
106 """Drive the transactions."""
108 if options.version: 108 ↛ 109line 108 didn't jump to line 109 because the condition on line 108 was never true
109 print(VERSION)
110 return 0
112 if not options.token and not TOKEN: 112 ↛ 116line 112 didn't jump to line 116 because the condition on line 112 was always true
113 log.error(f'No secret token or pass phrase given, please set {APP_ENV}_TOKEN accordingly')
114 return 2
116 if options.trace:
117 logging.getLogger().setLevel(logging.DEBUG)
118 elif options.debug:
119 log.setLevel(logging.DEBUG)
120 cfg = setup_twenty_seven(options=options)
122 # Belt and braces:
123 has_failures = False
125 # Here we start the timer for the session:
126 start_time = dti.datetime.now(tz=dti.timezone.utc)
127 start_ts = start_time.strftime(TS_FORMAT_PAYLOADS)
128 context: Context = {
129 'target': cfg.target_url,
130 'mode': f'{"cloud" if cfg.is_cloud else "on-site"}',
131 'project': cfg.target_project,
132 'scenario': cfg.scenario,
133 'identity': cfg.identity,
134 'start_time': start_time,
135 }
136 store = Store(context=context, setup=cfg, folder_path=cfg.storage_path)
137 log.info(f'# Starting 27-steps scenario test execution at at ({start_ts})')
138 log.info('- Step <01> LOGIN')
139 clk, service = actions.login(cfg.target_url, cfg.user, password=cfg.token, is_cloud=cfg.is_cloud)
140 log.info(f'^ Connected to upstream service; CLK={clk}')
141 store.add('LOGIN', True, clk)
143 log.info('- Step <02> SERVER_INFO')
144 clk, server_info = actions.get_server_info(service)
145 log.info(f'^ Retrieved upstream server info cf. [SRV]; CLK={clk}')
146 store.add('SERVER_INFO', True, clk, str(server_info))
148 log.info('- Step <03> PROJECTS')
149 clk, projects = actions.get_all_projects(service)
150 log.info(f'^ Retrieved {len(projects)} unarchived projects; CLK={clk}')
151 store.add('PROJECTS', True, clk, f'count({len(projects)})')
153 proj_env_ok = False
154 if cfg.target_project:
155 proj_env_ok = any((cfg.target_project == project['key'] for project in projects))
157 if not proj_env_ok:
158 log.error('Belt and braces - verify project selection:')
159 log.info(json.dumps(sorted([project['key'] for project in projects]), indent=2))
160 return 1
162 first_proj_key = cfg.target_project if proj_env_ok else projects[0]['key']
163 log.info(
164 f'Verified target project from request ({cfg.target_project}) to be'
165 f' {"" if proj_env_ok else "not "}present and set target project to ({first_proj_key})'
166 )
168 log.info('- Step <04> CREATE_ISSUE')
169 clk, c_key = actions.create_issue(
170 service, first_proj_key, cfg.ts, description=f'{cfg.c_rand}\n{cfg.desc_core}\nCAUSALITY={cfg.node_indicator}'
171 )
172 log.info(f'^ Created original ({c_key}); CLK={clk}')
173 store.add('CREATE_ISSUE', True, clk, 'original')
175 log.info('- Step <05> ISSUE_EXISTS')
176 clk, c_e = actions.issue_exists(service, c_key)
177 log.info(f'^ Existence of original ({c_key}) verified with result ({c_e}); CLK={clk}')
178 store.add('ISSUE_EXISTS', bool(c_e), clk, 'original')
180 log.info('- Step <06> CREATE_ISSUE')
181 clk, d_key = actions.create_issue(
182 service, first_proj_key, cfg.ts, description=f'{cfg.d_rand}\n{cfg.desc_core}\nCAUSALITY={cfg.node_indicator}'
183 )
184 log.info(f'^ Created duplicate ({d_key}); CLK={clk}')
185 store.add('CREATE_ISSUE', True, clk, 'duplicate')
187 log.info('- Step <07> ISSUE_EXISTS')
188 clk, d_e = actions.issue_exists(service, d_key)
189 log.info(f'^ Existence of duplicate ({d_key}) verified with result ({d_e}); CLK={clk}')
190 store.add('ISSUE_EXISTS', bool(d_e), clk, 'duplicate')
192 query = f'issue = {c_key}'
193 log.info('- Step <08> EXECUTE_JQL')
194 clk, c_q = actions.execute_jql(service=service, query=query)
195 log.info(f'^ Executed JQL({query}); CLK={clk}')
196 store.add('EXECUTE_JQL', True, clk, f'query({query.replace(c_key, "original-key")})')
198 log.info('- Step <09> AMEND_ISSUE_DESCRIPTION')
199 clk = actions.amend_issue_description(service, c_key, amendment=cfg.amendment, issue_context=c_q)
200 log.info(f'^ Amended description of original {d_key} with ({cfg.amendment}); CLK={clk}')
201 store.add('AMEND_ISSUE_DESCRIPTION', True, clk, 'original')
203 log.info('- Step <10> ADD_COMMENT')
204 clk, _ = actions.add_comment(service=service, issue_key=d_key, comment=cfg.fake_comment)
205 log.info(f'^ Added comment ({cfg.fake_comment}) to duplicate {d_key}; CLK={clk}')
206 store.add('ADD_COMMENT', True, clk, 'duplicate')
208 log.info('- Step <11> UPDATE_ISSUE_FIELD')
209 clk = actions.update_issue_field(service, d_key, labels=cfg.duplicate_labels)
210 log.info(f'^ Updated duplicate {d_key} issue field of labels to ({cfg.duplicate_labels}); CLK={clk}')
211 store.add('UPDATE_ISSUE_FIELD', True, clk, 'duplicate')
213 log.info('- Step <12> UPDATE_ISSUE_FIELD')
214 clk = actions.update_issue_field(service, c_key, labels=cfg.original_labels)
215 log.info(f'^ Updated original {c_key} issue field of labels to ({cfg.original_labels}); CLK={clk}')
216 store.add('UPDATE_ISSUE_FIELD', True, clk, 'original')
218 log.info('- Step <13> CREATE_DUPLICATES_ISSUE_LINK')
219 clk, _ = actions.create_duplicates_issue_link(service, c_key, d_key)
220 log.info(f'^ Created link on duplicate stating it duplicates the original; CLK={clk}')
221 store.add('CREATE_DUPLICATES_ISSUE_LINK', True, clk, 'dublicate duplicates original')
223 log.info('- Step <14> GET_ISSUE_STATUS')
224 clk, d_iss_state = actions.get_issue_status(service, d_key)
225 d_is_todo = d_iss_state.lower() == cfg.todo
226 log.info(
227 f'^ Retrieved status of the duplicate {d_key} as ({d_iss_state})'
228 f' with result (is_todo == {d_is_todo}); CLK={clk}'
229 )
230 store.add('GET_ISSUE_STATUS', d_is_todo, clk, f'duplicate({d_iss_state})')
232 log.info('- Step <15> SET_ISSUE_STATUS')
233 clk, _ = actions.set_issue_status(service, d_key, cfg.in_progress)
234 log.info(f'^ Transitioned the duplicate {d_key} to ({cfg.in_progress}); CLK={clk}')
235 store.add('SET_ISSUE_STATUS', True, clk, f'duplicate ({cfg.todo})->({cfg.in_progress})')
237 log.info('- Step <16> SET_ISSUE_STATUS')
238 clk, _ = actions.set_issue_status(service, d_key, cfg.done)
239 log.info(f'^ Transitioned the duplicate {d_key} to ({cfg.done}); CLK={clk}')
240 store.add('SET_ISSUE_STATUS', True, clk, f'duplicate ({cfg.in_progress})->({cfg.done})')
242 log.info('- Step <17> GET_ISSUE_STATUS')
243 clk, d_iss_state_done = actions.get_issue_status(service, d_key)
244 d_is_done = d_iss_state_done.lower() == cfg.done
245 log.info(
246 f'^ Retrieved status of the duplicate {d_key} as ({d_iss_state_done})'
247 f' with result (d_is_done == {d_is_done}); CLK={clk}'
248 )
249 store.add('GET_ISSUE_STATUS', d_is_done, clk, f'duplicate({d_iss_state_done})')
251 log.info('- Step <18> ADD_COMMENT')
252 clk, response_step_18_add_comment = actions.add_comment(service, d_key, 'Closed as duplicate.')
253 log.info(f'^ Added comment on {d_key} with response extract cf. [RESP-STEP-18]; CLK={clk}')
254 store.add('ADD_COMMENT', True, clk, f'duplicate({response_step_18_add_comment["body"]})')
256 log.info('- Step <19> SET_ORIGINAL_ESTIMATE')
257 clk, ok = actions.set_original_estimate(service, c_key, hours=cfg.hours_value)
258 log.info(
259 f'^ Added ({cfg.hours_value}) hours as original estimate to original {c_key} with result ({ok}); CLK={clk}'
260 )
261 store.add('SET_ORIGINAL_ESTIMATE', ok, clk, 'original')
263 log.info('- Step <20> GET_ISSUE_STATUS')
264 clk, c_iss_state = actions.get_issue_status(service, c_key)
265 c_is_todo = c_iss_state.lower() == cfg.todo
266 log.info(
267 f'^ Retrieved status of the original {c_key} as ({c_iss_state})'
268 f' with result (c_is_todo == {c_is_todo}); CLK={clk}'
269 )
270 store.add('GET_ISSUE_STATUS', c_is_todo, clk, f'original({c_iss_state})')
272 log.info('- Step <21> SET_ISSUE_STATUS')
273 clk, _ = actions.set_issue_status(service, c_key, cfg.in_progress)
274 log.info(f'^ Transitioned the original {c_key} to ({cfg.in_progress}); CLK={clk}')
275 store.add('SET_ISSUE_STATUS', True, clk, f'original ({cfg.todo})->({cfg.in_progress})')
277 log.info('- Step <22> GET_ISSUE_STATUS')
278 clk, c_iss_state_in_progress = actions.get_issue_status(service, c_key)
279 c_is_in_progress = c_iss_state_in_progress.lower() == cfg.in_progress
280 log.info(
281 f'^ Retrieved status of the original {c_key} as ({c_iss_state_in_progress})'
282 f' with result (c_is_in_progress == {c_is_in_progress}); CLK={clk}'
283 )
284 store.add('GET_ISSUE_STATUS', c_is_in_progress, clk, f'original({c_iss_state_in_progress})')
286 log.info('- Step <23> CREATE_COMPONENT')
287 clk, comp_id, a_component, comp_resp = actions.create_component(
288 service=service, project=first_proj_key, name=cfg.random_component, description=cfg.c_rand
289 )
290 log.info(f'^ Created component ({a_component}) with response extract cf. [RESP-STEP-23]; CLK={clk}')
291 store.add('CREATE_COMPONENT', True, clk, f'component({comp_resp["description"]})') # type: ignore
293 log.info('- Step <24> RELATE_ISSUE_TO_COMPONENT')
294 clk, ok = actions.relate_issue_to_component(service, c_key, comp_id, a_component)
295 log.info(
296 f'^ Attempted relation of original {c_key} issue to component ({a_component}) with result ({ok}); CLK={clk}'
297 )
298 store.add('RELATE_ISSUE_TO_COMPONENT', ok, clk, 'original')
299 if not ok:
300 has_failures = True
302 log.info('- Step <25> LOAD_ISSUE')
303 clk, x_iss = actions.load_issue(service, c_key)
304 log.info(f'^ Loaded issue {c_key}; CLK={clk}')
305 log.debug(json.dumps(x_iss, indent=2))
306 store.add('LOAD_ISSUE', True, clk, 'original')
308 log.info('- Step <26> ADD_COMMENT')
309 clk, response_step_26_add_comment = actions.add_comment(service=service, issue_key=c_key, comment=cfg.purge_me)
310 log.info(f'^ Added purge tag comment on original {c_key} with response extract cf. [RESP-STEP-26]; CLK={clk}')
311 store.add('ADD_COMMENT', True, clk, f'original({response_step_26_add_comment["body"]})')
313 log.info('- Step <27> ADD_COMMENT')
314 clk, response_step_27_add_comment = actions.add_comment(service=service, issue_key=d_key, comment=cfg.purge_me)
315 log.info(
316 f'^ Added purge tag comment on duplicate issue {d_key} with response extract cf. [RESP-STEP-27]; CLK={clk}'
317 )
318 store.add('ADD_COMMENT', True, clk, f'duplicate({response_step_27_add_comment["body"]})')
320 # Here we stop the timer for the session:
321 end_time = dti.datetime.now(tz=dti.timezone.utc)
322 end_ts = end_time.strftime(TS_FORMAT_PAYLOADS)
323 log.info(f'# Ended execution of 27-steps scenario test at ({end_ts})')
324 log.info(f'Execution of 27-steps scenario test took {(end_time - start_time)} h:mm:ss.uuuuuu')
325 log.info('-' * 84)
327 log.info('# References:')
328 log.info(f'[SRV] Server info is ({server_info})')
329 log.info(
330 f'[RESP-STEP-18] Add comment response is'
331 f' ({extract_fields(response_step_18_add_comment, fields=("self", "body"))})'
332 )
333 log.info(
334 f'[RESP-STEP-23] Create component response is ({extract_fields(comp_resp, fields=("self", "description"))})'
335 )
336 log.info(
337 f'[RESP-STEP-26] Add comment response is'
338 f' ({extract_fields(response_step_26_add_comment, fields=("self", "body"))})'
339 )
340 log.info(
341 f'[RESP-STEP-27] Add comment response is'
342 f' ({extract_fields(response_step_27_add_comment, fields=("self", "body"))})'
343 )
344 log.info('-' * 84)
346 log.info('Dumping records to store...')
347 store.dump(end_time=end_time, has_failures=has_failures)
348 log.info('-' * 84)
350 log.info('OK')
351 log.info('=' * 84)
353 return 0