Coverage for suhteita/suhteita.py: 26.73%

205 statements  

« prev     ^ index     » next       coverage.py v7.4.1, created at 2024-02-04 22:42:46 +00:00

1"""Load the JIRA instance.""" 

2 

3import argparse 

4import datetime as dti 

5import json 

6import secrets 

7from typing import no_type_check 

8 

9import suhteita.ticket_system_actions as actions 

10from suhteita import ( 

11 APP_ALIAS, 

12 APP_ENV, 

13 BASE_URL, 

14 IDENTITY, 

15 IS_CLOUD, 

16 NODE_INDICATOR, 

17 PROJECT, 

18 STORE, 

19 TOKEN, 

20 TS_FORMAT_PAYLOADS, 

21 USER, 

22 __version__ as version, 

23 extract_fields, 

24 log, 

25 two_sentences, 

26) 

27from suhteita.store import Store 

28 

29 

30@no_type_check 

31def setup_twenty_seven(options: argparse.Namespace) -> object: 

32 """Setup the scenario adn return the parameters as members of an object.""" 

33 

34 class Setup: 

35 pass 

36 

37 setup = Setup() 

38 

39 setup.user = options.user if options.user else USER 

40 setup.target_url = options.target_url if options.target_url else BASE_URL 

41 setup.is_cloud = options.is_cloud if options.is_cloud else IS_CLOUD 

42 setup.target_project = options.target_project if options.target_project else PROJECT 

43 setup.scenario = options.scenario if options.scenario else 'unknown' 

44 setup.identity = options.identity if options.identity else IDENTITY 

45 setup.storage_path = options.out_path if options.out_path else STORE 

46 

47 log.info('=' * 84) 

48 log.info(f'Generator {APP_ALIAS} version {version}') 

49 log.info('# Prelude of a 27-steps scenario test execution') 

50 

51 setup.c_rand, setup.d_rand = two_sentences() 

52 log.info(f'- Setup <01> Random sentence of original ({setup.c_rand})') 

53 log.info(f'- Setup <02> Random sentence of duplicate ({setup.d_rand})') 

54 

55 setup.random_component = secrets.token_urlsafe() 

56 log.info(f'- Setup <03> Random component name ({setup.random_component})') 

57 

58 setup.todo, setup.in_progress, setup.done = ('to do', 'in progress', 'done') 

59 log.info( 

60 f'- Setup <04> The test workflow assumes the (case insensitive) states' 

61 f' ({setup.todo}, {setup.in_progress}, {setup.done})' 

62 ) 

63 

64 setup.ts = dti.datetime.now(tz=dti.timezone.utc).strftime(TS_FORMAT_PAYLOADS) 

65 log.info(f'- Setup <05> Timestamp marker in summaries will be ({setup.ts})') 

66 

67 setup.desc_core = '... and short description we dictate.' 

68 log.info(f'- Setup <06> Common description part - of twin issues / pair - will be ({setup.desc_core})') 

69 

70 setup.amendment = 'No, no, no. They duplicated me, help!' 

71 log.info(f'- Setup <07> Amendment for original description will be ({setup.amendment})') 

72 

73 setup.fake_comment = 'I am the original, surely!' 

74 log.info(f'- Setup <08> Fake comment for duplicate will be ({setup.fake_comment})') 

75 

76 setup.duplicate_labels = ['du', 'pli', 'ca', 'te'] 

77 log.info(f'- Setup <09> Labels for duplicate will be ({setup.duplicate_labels})') 

78 

79 setup.original_labels = ['for', 'real', 'highlander'] 

80 log.info(f'- Setup <10> Labels for original will be ({setup.original_labels})') 

81 

82 setup.hours_value = 42 

83 log.info(f'- Setup <11> Hours value for original estimate will be ({setup.hours_value})') 

84 

85 setup.purge_me = 'SUHTEITA_PURGE_ME_ORIGINAL' 

86 log.info(f'- Setup <12> Purge indicator comment will be ({setup.purge_me})') 

87 

88 setup.node_indicator = NODE_INDICATOR 

89 log.info(f'- Setup <13> Node indicator ({setup.node_indicator})') 

90 

91 log.info( 

92 f'- Setup <14> Connect will be to upstream ({"cloud" if setup.is_cloud else "on-site"})' 

93 f' service ({setup.target_url}) per login ({setup.user})' 

94 ) 

95 log.info('-' * 84) 

96 

97 return setup 

98 

99 

100def main(options: argparse.Namespace) -> int: 

101 """Drive the transactions.""" 

102 

103 if not TOKEN: 103 ↛ 107line 103 didn't jump to line 107, because the condition on line 103 was never false

104 log.error(f'No secret token or pass phrase given, please set {APP_ENV}_TOKEN accordingly') 

105 return 2 

106 

107 cfg = setup_twenty_seven(options=options) 

108 

109 # Belt and braces: 

110 has_failures = False 

111 

112 # Here we start the timer for the session: 

113 start_time = dti.datetime.now(tz=dti.timezone.utc) 

114 start_ts = start_time.strftime(TS_FORMAT_PAYLOADS) 

115 context = { 

116 'target': cfg.target_url, 

117 'mode': f'{"cloud" if cfg.is_cloud else "on-site"}', 

118 'project': cfg.target_project, 

119 'scenario': cfg.scenario, 

120 'identity': cfg.identity, 

121 'start_time': start_time, 

122 } 

123 store = Store(context=context, setup=cfg, folder_path=cfg.storage_path) 

124 log.info(f'# Starting 27-steps scenario test execution at at ({start_ts})') 

125 log.info('- Step <01> LOGIN') 

126 clk, service = actions.login(cfg.target_url, cfg.user, password=TOKEN, is_cloud=cfg.is_cloud) 

127 log.info(f'^ Connected to upstream service; CLK={clk}') 

128 store.add('LOGIN', True, clk) 

129 

130 log.info('- Step <02> SERVER_INFO') 

131 clk, server_info = actions.get_server_info(service) 

132 log.info(f'^ Retrieved upstream server info cf. [SRV]; CLK={clk}') 

133 store.add('SERVER_INFO', True, clk, str(server_info)) 

134 

135 log.info('- Step <03> PROJECTS') 

136 clk, projects = actions.get_all_projects(service) 

137 log.info(f'^ Retrieved {len(projects)} unarchived projects; CLK={clk}') 

138 store.add('PROJECTS', True, clk, f'count({len(projects)})') 

139 

140 proj_env_ok = False 

141 if cfg.target_project: 

142 proj_env_ok = any((cfg.target_project == project['key'] for project in projects)) 

143 

144 if not proj_env_ok: 

145 log.error('Belt and braces - verify project selection:') 

146 log.info(json.dumps(sorted([project['key'] for project in projects]), indent=2)) 

147 return 1 

148 

149 first_proj_key = cfg.target_project if proj_env_ok else projects[0]['key'] 

150 log.info( 

151 f'Verified target project from request ({cfg.target_project}) to be' 

152 f' {"" if proj_env_ok else "not "}present and set target project to ({first_proj_key})' 

153 ) 

154 

155 log.info('- Step <04> CREATE_ISSUE') 

156 clk, c_key = actions.create_issue( 

157 service, first_proj_key, cfg.ts, description=f'{cfg.c_rand}\n{cfg.desc_core}\nCAUSALITY={cfg.node_indicator}' 

158 ) 

159 log.info(f'^ Created original ({c_key}); CLK={clk}') 

160 store.add('CREATE_ISSUE', True, clk, 'original') 

161 

162 log.info('- Step <05> ISSUE_EXISTS') 

163 clk, c_e = actions.issue_exists(service, c_key) 

164 log.info(f'^ Existence of original ({c_key}) verified with result ({c_e}); CLK={clk}') 

165 store.add('ISSUE_EXISTS', bool(c_e), clk, 'original') 

166 

167 log.info('- Step <06> CREATE_ISSUE') 

168 clk, d_key = actions.create_issue( 

169 service, first_proj_key, cfg.ts, description=f'{cfg.d_rand}\n{cfg.desc_core}\nCAUSALITY={cfg.node_indicator}' 

170 ) 

171 log.info(f'^ Created duplicate ({d_key}); CLK={clk}') 

172 store.add('CREATE_ISSUE', True, clk, 'duplicate') 

173 

174 log.info('- Step <07> ISSUE_EXISTS') 

175 clk, d_e = actions.issue_exists(service, d_key) 

176 log.info(f'^ Existence of duplicate ({d_key}) verified with result ({d_e}); CLK={clk}') 

177 store.add('ISSUE_EXISTS', bool(d_e), clk, 'duplicate') 

178 

179 query = f'issue = {c_key}' 

180 log.info('- Step <08> EXECUTE_JQL') 

181 clk, c_q = actions.execute_jql(service=service, query=query) 

182 log.info(f'^ Executed JQL({query}); CLK={clk}') 

183 store.add('EXECUTE_JQL', True, clk, f'query({query.replace(c_key, "original-key")})') 

184 

185 log.info('- Step <09> AMEND_ISSUE_DESCRIPTION') 

186 clk = actions.amend_issue_description(service, c_key, amendment=cfg.amendment, issue_context=c_q) 

187 log.info(f'^ Amended description of original {d_key} with ({cfg.amendment}); CLK={clk}') 

188 store.add('AMEND_ISSUE_DESCRIPTION', True, clk, 'original') 

189 

190 log.info('- Step <10> ADD_COMMENT') 

191 clk, _ = actions.add_comment(service=service, issue_key=d_key, comment=cfg.fake_comment) 

192 log.info(f'^ Added comment ({cfg.fake_comment}) to duplicate {d_key}; CLK={clk}') 

193 store.add('ADD_COMMENT', True, clk, 'duplicate') 

194 

195 log.info('- Step <11> UPDATE_ISSUE_FIELD') 

196 clk = actions.update_issue_field(service, d_key, labels=cfg.duplicate_labels) 

197 log.info(f'^ Updated duplicate {d_key} issue field of labels to ({cfg.duplicate_labels}); CLK={clk}') 

198 store.add('UPDATE_ISSUE_FIELD', True, clk, 'duplicate') 

199 

200 log.info('- Step <12> UPDATE_ISSUE_FIELD') 

201 clk = actions.update_issue_field(service, c_key, labels=cfg.original_labels) 

202 log.info(f'^ Updated original {c_key} issue field of labels to ({cfg.original_labels}); CLK={clk}') 

203 store.add('UPDATE_ISSUE_FIELD', True, clk, 'original') 

204 

205 log.info('- Step <13> CREATE_DUPLICATES_ISSUE_LINK') 

206 clk, _ = actions.create_duplicates_issue_link(service, c_key, d_key) 

207 log.info(f'^ Created link on duplicate stating it duplicates the original; CLK={clk}') 

208 store.add('CREATE_DUPLICATES_ISSUE_LINK', True, clk, 'dublicate duplicates original') 

209 

210 log.info('- Step <14> GET_ISSUE_STATUS') 

211 clk, d_iss_state = actions.get_issue_status(service, d_key) 

212 d_is_todo = d_iss_state.lower() == cfg.todo 

213 log.info( 

214 f'^ Retrieved status of the duplicate {d_key} as ({d_iss_state})' 

215 f' with result (is_todo == {d_is_todo}); CLK={clk}' 

216 ) 

217 store.add('GET_ISSUE_STATUS', d_is_todo, clk, f'duplicate({d_iss_state})') 

218 

219 log.info('- Step <15> SET_ISSUE_STATUS') 

220 clk, _ = actions.set_issue_status(service, d_key, cfg.in_progress) 

221 log.info(f'^ Transitioned the duplicate {d_key} to ({cfg.in_progress}); CLK={clk}') 

222 store.add('SET_ISSUE_STATUS', True, clk, f'duplicate ({cfg.todo})->({cfg.in_progress})') 

223 

224 log.info('- Step <16> SET_ISSUE_STATUS') 

225 clk, _ = actions.set_issue_status(service, d_key, cfg.done) 

226 log.info(f'^ Transitioned the duplicate {d_key} to ({cfg.done}); CLK={clk}') 

227 store.add('SET_ISSUE_STATUS', True, clk, f'duplicate ({cfg.in_progress})->({cfg.done})') 

228 

229 log.info('- Step <17> GET_ISSUE_STATUS') 

230 clk, d_iss_state_done = actions.get_issue_status(service, d_key) 

231 d_is_done = d_iss_state_done.lower() == cfg.done 

232 log.info( 

233 f'^ Retrieved status of the duplicate {d_key} as ({d_iss_state_done})' 

234 f' with result (d_is_done == {d_is_done}); CLK={clk}' 

235 ) 

236 store.add('GET_ISSUE_STATUS', d_is_done, clk, f'duplicate({d_iss_state_done})') 

237 

238 log.info('- Step <18> ADD_COMMENT') 

239 clk, response_step_18_add_comment = actions.add_comment(service, d_key, 'Closed as duplicate.') 

240 log.info(f'^ Added comment on {d_key} with response extract cf. [RESP-STEP-18]; CLK={clk}') 

241 store.add('ADD_COMMENT', True, clk, f'duplicate({response_step_18_add_comment["body"]})') 

242 

243 log.info('- Step <19> SET_ORIGINAL_ESTIMATE') 

244 clk, ok = actions.set_original_estimate(service, c_key, hours=cfg.hours_value) 

245 log.info( 

246 f'^ Added ({cfg.hours_value}) hours as original estimate to original {c_key} with result ({ok}); CLK={clk}' 

247 ) 

248 store.add('SET_ORIGINAL_ESTIMATE', ok, clk, 'original') 

249 

250 log.info('- Step <20> GET_ISSUE_STATUS') 

251 clk, c_iss_state = actions.get_issue_status(service, c_key) 

252 c_is_todo = c_iss_state.lower() == cfg.todo 

253 log.info( 

254 f'^ Retrieved status of the original {c_key} as ({c_iss_state})' 

255 f' with result (c_is_todo == {c_is_todo}); CLK={clk}' 

256 ) 

257 store.add('GET_ISSUE_STATUS', c_is_todo, clk, f'original({c_iss_state})') 

258 

259 log.info('- Step <21> SET_ISSUE_STATUS') 

260 clk, _ = actions.set_issue_status(service, c_key, cfg.in_progress) 

261 log.info(f'^ Transitioned the original {c_key} to ({cfg.in_progress}); CLK={clk}') 

262 store.add('SET_ISSUE_STATUS', True, clk, f'original ({cfg.todo})->({cfg.in_progress})') 

263 

264 log.info('- Step <22> GET_ISSUE_STATUS') 

265 clk, c_iss_state_in_progress = actions.get_issue_status(service, c_key) 

266 c_is_in_progress = c_iss_state_in_progress.lower() == cfg.in_progress 

267 log.info( 

268 f'^ Retrieved status of the original {c_key} as ({c_iss_state_in_progress})' 

269 f' with result (c_is_in_progress == {c_is_in_progress}); CLK={clk}' 

270 ) 

271 store.add('GET_ISSUE_STATUS', c_is_in_progress, clk, f'original({c_iss_state_in_progress})') 

272 

273 log.info('- Step <23> CREATE_COMPONENT') 

274 clk, comp_id, a_component, comp_resp = actions.create_component( 

275 service=service, project=first_proj_key, name=cfg.random_component, description=cfg.c_rand 

276 ) 

277 log.info(f'^ Created component ({a_component}) with response extract cf. [RESP-STEP-23]; CLK={clk}') 

278 store.add('CREATE_COMPONENT', True, clk, f'component({comp_resp["description"]})') # type: ignore 

279 

280 log.info('- Step <24> RELATE_ISSUE_TO_COMPONENT') 

281 clk, ok = actions.relate_issue_to_component(service, c_key, comp_id, a_component) 

282 log.info( 

283 f'^ Attempted relation of original {c_key} issue to component ({a_component}) with result ({ok}); CLK={clk}' 

284 ) 

285 store.add('RELATE_ISSUE_TO_COMPONENT', ok, clk, 'original') 

286 if not ok: 

287 has_failures = True 

288 

289 log.info('- Step <25> LOAD_ISSUE') 

290 clk, x_iss = actions.load_issue(service, c_key) 

291 log.info(f'^ Loaded issue {c_key}; CLK={clk}') 

292 log.debug(json.dumps(x_iss, indent=2)) 

293 store.add('LOAD_ISSUE', True, clk, 'original') 

294 

295 log.info('- Step <26> ADD_COMMENT') 

296 clk, response_step_26_add_comment = actions.add_comment(service=service, issue_key=c_key, comment=cfg.purge_me) 

297 log.info(f'^ Added purge tag comment on original {c_key} with response extract cf. [RESP-STEP-26]; CLK={clk}') 

298 store.add('ADD_COMMENT', True, clk, f'original({response_step_26_add_comment["body"]})') 

299 

300 log.info('- Step <27> ADD_COMMENT') 

301 clk, response_step_27_add_comment = actions.add_comment(service=service, issue_key=d_key, comment=cfg.purge_me) 

302 log.info( 

303 f'^ Added purge tag comment on duplicate issue {d_key} with response extract cf. [RESP-STEP-27]; CLK={clk}' 

304 ) 

305 store.add('ADD_COMMENT', True, clk, f'duplicate({response_step_27_add_comment["body"]})') 

306 

307 # Here we stop the timer for the session: 

308 end_time = dti.datetime.now(tz=dti.timezone.utc) 

309 end_ts = end_time.strftime(TS_FORMAT_PAYLOADS) 

310 log.info(f'# Ended execution of 27-steps scenario test at ({end_ts})') 

311 log.info(f'Execution of 27-steps scenario test took {(end_time - start_time)} h:mm:ss.uuuuuu') 

312 log.info('-' * 84) 

313 

314 log.info('# References:') 

315 log.info(f'[SRV] Server info is ({server_info})') 

316 log.info( 

317 f'[RESP-STEP-18] Add comment response is' 

318 f' ({extract_fields(response_step_18_add_comment, fields=("self", "body"))})' 

319 ) 

320 log.info( 

321 f'[RESP-STEP-23] Create component response is ({extract_fields(comp_resp, fields=("self", "description"))})' 

322 ) 

323 log.info( 

324 f'[RESP-STEP-26] Add comment response is' 

325 f' ({extract_fields(response_step_26_add_comment, fields=("self", "body"))})' 

326 ) 

327 log.info( 

328 f'[RESP-STEP-27] Add comment response is' 

329 f' ({extract_fields(response_step_27_add_comment, fields=("self", "body"))})' 

330 ) 

331 log.info('-' * 84) 

332 

333 log.info('Dumping records to store...') 

334 store.dump(end_time=end_time, has_failures=has_failures) 

335 log.info('-' * 84) 

336 

337 log.info('OK') 

338 log.info('=' * 84) 

339 

340 return 0