Coverage for putki/traverse.py: 92.95%
114 statements
« prev ^ index » next coverage.py v7.4.1, created at 2024-02-04 22:07:46 +00:00
« prev ^ index » next coverage.py v7.4.1, created at 2024-02-04 22:07:46 +00:00
1"""Ensure all components have render folders with pipe harness."""
3import datetime as dti
4import pathlib
5from typing import Any, Union
7import yaml
8from putki import APP_ALIAS, ENCODING, TS_FORMAT_GENERATOR, VERSION, VERSION_DOTTED_TRIPLE, log
10Path = Union[str, pathlib.Path]
12ROI = 'component'
13STRUCTURES = 'structures.yml'
14UNDERSCORE = '_'
15EXECUTION_TS = dti.datetime.now(dti.timezone.utc).strftime(TS_FORMAT_GENERATOR)
16GENERATOR_DATA = {
17 'executed': EXECUTION_TS,
18 'name': APP_ALIAS,
19 'package_url': f'https://pypi.org/project/putki/{VERSION_DOTTED_TRIPLE}/',
20 'purl': f'pkg:pypi/putki@{VERSION_DOTTED_TRIPLE}',
21 'version': VERSION,
22}
23FACET_DOCS: dict[str, dict[str, Any]] = {}
24FACET_DOC_TEMPLATE: dict[str, Any] = {
25 'binder': [],
26 'code': None,
27 'component_source_folder_url': None,
28 'consistent': False,
29 'date': '',
30 'declaration_paths_resolved': {},
31 'distribution': '',
32 'document_approval': None,
33 'document_author': None,
34 'document_authorization': None,
35 'document_changes': [],
36 'document_review_list': [],
37 'effective_meta': {},
38 'facet': '',
39 'generator': GENERATOR_DATA,
40 'html': False,
41 'issue': None,
42 'lang': 'en',
43 'list_of_figures': False,
44 'list_of_tables': False,
45 'pdf': True,
46 'pdf_compiles': False,
47 'pdf_document_assets_url': None,
48 'pdf_document_url': None,
49 'pdf_log_url': None,
50 'perspective': None,
51 'publication_number': None,
52 'render': True,
53 'revision': None,
54 'subtitle': None,
55 'target': None,
56 'title': None,
57 'table_of_content_level': None,
58 'type': None,
59}
62def is_path(value: Union[bool, str]) -> bool:
63 """Convention rules."""
64 return bool(value) != value
67def is_yaml(path: Path) -> bool:
68 """Convention rules again."""
69 return pathlib.Path(path).suffix.lower() in ('.yaml', '.yml')
72def walk_binder(binder_path: Path) -> dict[Path, bool]:
73 """Visit all entries of the binder assuming the paths lead to files."""
74 binder_path = pathlib.Path(binder_path)
75 try:
76 with binder_path.open('rt', encoding=ENCODING) as handle:
77 binder = [binder_path.parent / x.strip() for x in handle.readlines()]
78 except FileNotFoundError: # noqa
79 log.error(f'missing {binder_path} binder file')
80 return {}
81 v_map: dict[Path, bool] = {source: source.is_file() for source in binder}
82 for k, v in v_map.items():
83 if not v:
84 log.error(f'declared missing {k} file')
85 return v_map
88def load_yaml(path: Path) -> Union[Any, dict[str, Any]]:
89 """Eventually load a YAML resource."""
90 path = pathlib.Path(path)
91 if not path.is_file():
92 log.error(f'declared missing {path} file')
93 return {}
94 try:
95 return yaml.safe_load(path.open('rt', encoding=ENCODING))
96 except: # noqa
97 log.error(f'invalid YAML {path} file')
98 return {}
101def meta_follow_include(path: Path, data: dict[str, dict[str, Any]]) -> bool:
102 """Visit any import assuming the path leads to a file."""
103 include = data['document'].get('import', '')
104 if include:
105 incl_path = pathlib.Path(path).parent / include
106 return bool(load_yaml(incl_path))
107 return True
110def validate_facet(facet: dict[str, Any], ssp: Path) -> None:
111 """Validate the surface of a facet."""
112 for facet_code, data in facet.items():
113 log.info(f' * {facet_code}:')
114 path_likes = {}
115 for k in sorted(data.keys()):
116 v = data[k]
117 log.info(f' {k :9s} -> {v}')
118 if is_path(v):
119 pl_try = pathlib.Path(ssp).parent / v
120 path_likes[k] = pl_try
121 d = load_yaml(pl_try)
122 if k == 'bind':
123 _ = walk_binder(pl_try)
124 elif k == 'meta' and is_yaml(pl_try):
125 _ = meta_follow_include(pl_try, d)
128def follow(structures_path: Path) -> tuple[int, str, pathlib.Path, dict[str, Any]]:
129 """Execute the traversal."""
130 sp = pathlib.Path(structures_path)
131 root_path = sp.parent
133 log.info(f'Structures from {STRUCTURES}(root):')
134 s_info = load_yaml(sp)
135 if not s_info:
136 return 1, 'no structures found', root_path, {}
138 claims = s_info['structures']
139 for part, sub_path in claims.items():
140 log.info(f'- {part}:')
141 sub_p = pathlib.Path(sub_path)
142 sub_info = load_yaml(sub_p)
143 for target, structure_path_str in sub_info.get('structures', {}).items():
144 comp_cand, perspective = target.rsplit(UNDERSCORE, 1)
145 log.info(f' + {perspective}:')
146 ssp = sub_p.parent / structure_path_str
147 structure_info = load_yaml(ssp)
148 if structure_info: 148 ↛ 143line 148 didn't jump to line 143, because the condition on line 148 was never false
149 facets = structure_info.get(target)
150 if facets is None: 150 ↛ 151line 150 didn't jump to line 151, because the condition on line 150 was never true
151 log.warning(f'target ({target}) not found in data - skipping facet validation')
152 else:
153 for facet in facets:
154 validate_facet(facet, ssp)
156 return 0, '', root_path, claims
159def walk_fs(claims: dict[str, Any], root_path: Path) -> int:
160 """Yes."""
161 root_path = pathlib.Path(root_path)
162 roi_path = root_path / ROI
163 cp_declared = [claim for claim in claims]
164 try:
165 roi_items_ordered = sorted(roi_path.iterdir())
166 except FileNotFoundError as err:
167 log.error(err)
168 roi_items_ordered = []
169 component_paths = [p for p in roi_items_ordered if p.is_dir()]
170 components = [f'{p.name}' for p in component_paths]
172 log.info('Components (from folders):')
173 for c in components:
174 log.info(f'- {c}')
176 log.info(f'Verifying any component structures not declared in root level {STRUCTURES}')
177 missing = 0
178 for cp in component_paths:
179 if cp.name not in cp_declared: 179 ↛ 178line 179 didn't jump to line 178, because the condition on line 179 was never false
180 missing += 1
181 log.error(f'missing component ({cp.name}):')
182 sp = cp / STRUCTURES
183 sx_info = load_yaml(sp)
184 if sx_info: 184 ↛ 178line 184 didn't jump to line 178, because the condition on line 184 was never false
185 log.info(sx_info)
187 if missing: 187 ↛ 191line 187 didn't jump to line 191, because the condition on line 187 was never false
188 log.error(f'missing declarations (badness = {missing})')
189 return 1
190 else:
191 log.info('OK - no missing declarations')
192 return 0