antsibull-nox 0.0.1__py3-none-any.whl → 0.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,332 @@
1
+ # Author: Felix Fontein <felix@fontein.de>
2
+ # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or
3
+ # https://www.gnu.org/licenses/gpl-3.0.txt)
4
+ # SPDX-License-Identifier: GPL-3.0-or-later
5
+ # SPDX-FileCopyrightText: 2025, Ansible Project
6
+
7
+ """
8
+ Config file schema.
9
+ """
10
+
11
+ from __future__ import annotations
12
+
13
+ import os
14
+ import typing as t
15
+
16
+ import pydantic as p
17
+
18
+ from .ansible import AnsibleCoreVersion
19
+ from .utils import Version
20
+
21
+ try:
22
+ from tomllib import load as _load_toml
23
+ except ImportError:
24
+ from tomli import load as _load_toml # type: ignore
25
+
26
+
27
+ def _parse_version(value: t.Any) -> Version:
28
+ if isinstance(value, Version):
29
+ return value
30
+ if isinstance(value, str) and "." in value:
31
+ return Version.parse(value)
32
+ raise ValueError("Must be version string")
33
+
34
+
35
+ def _parse_ansible_core_version(value: t.Any) -> AnsibleCoreVersion:
36
+ if isinstance(value, Version):
37
+ return value
38
+ if isinstance(value, str):
39
+ if value == "devel":
40
+ return "devel"
41
+ if value == "milestone":
42
+ return "milestone"
43
+ if "." in value:
44
+ return Version.parse(value)
45
+ raise ValueError("Must be ansible-core version string")
46
+
47
+
48
+ PVersion = t.Annotated[Version, p.BeforeValidator(_parse_version)]
49
+ PAnsibleCoreVersion = t.Annotated[
50
+ AnsibleCoreVersion, p.BeforeValidator(_parse_ansible_core_version)
51
+ ]
52
+
53
+
54
+ class _BaseModel(p.BaseModel):
55
+ model_config = p.ConfigDict(frozen=True, extra="allow", validate_default=True)
56
+
57
+
58
+ class SessionLint(_BaseModel):
59
+ """
60
+ Lint session config.
61
+ """
62
+
63
+ default: bool = True
64
+ extra_code_files: list[str] = []
65
+
66
+ # isort:
67
+ run_isort: bool = True
68
+ isort_config: t.Optional[p.FilePath] = None
69
+ isort_package: str = "isort"
70
+
71
+ # black:
72
+ run_black: bool = True
73
+ run_black_modules: t.Optional[bool] = None
74
+ black_config: t.Optional[p.FilePath] = None
75
+ black_package: str = "black"
76
+
77
+ # flake8:
78
+ run_flake8: bool = True
79
+ flake8_config: t.Optional[p.FilePath] = None
80
+ flake8_package: str = "flake8"
81
+
82
+ # pylint:
83
+ run_pylint: bool = True
84
+ pylint_rcfile: t.Optional[p.FilePath] = None
85
+ pylint_modules_rcfile: t.Optional[p.FilePath] = None
86
+ pylint_package: str = "pylint"
87
+ pylint_ansible_core_package: t.Optional[str] = "ansible-core"
88
+ pylint_extra_deps: list[str] = []
89
+
90
+ # yamllint:
91
+ run_yamllint: bool = True
92
+ yamllint_config: t.Optional[p.FilePath] = None
93
+ yamllint_config_plugins: t.Optional[p.FilePath] = None
94
+ yamllint_config_plugins_examples: t.Optional[p.FilePath] = None
95
+ yamllint_package: str = "yamllint"
96
+
97
+ # mypy:
98
+ run_mypy: bool = True
99
+ mypy_config: t.Optional[p.FilePath] = None
100
+ mypy_package: str = "mypy"
101
+ mypy_ansible_core_package: t.Optional[str] = "ansible-core"
102
+ mypy_extra_deps: list[str] = []
103
+
104
+
105
+ class SessionDocsCheck(_BaseModel):
106
+ """
107
+ Docs check session config.
108
+ """
109
+
110
+ default: bool = True
111
+
112
+ antsibull_docs_package: str = "antsibull-docs"
113
+ ansible_core_package: str = "ansible-core"
114
+ validate_collection_refs: t.Optional[t.Literal["self", "dependent", "all"]] = None
115
+ extra_collections: list[str] = []
116
+
117
+
118
+ class SessionLicenseCheck(_BaseModel):
119
+ """
120
+ License check session config.
121
+ """
122
+
123
+ default: bool = True
124
+
125
+ run_reuse: bool = True
126
+ reuse_package: str = "reuse"
127
+ run_license_check: bool = True
128
+ license_check_extra_ignore_paths: list[str] = []
129
+
130
+
131
+ class ActionGroup(_BaseModel):
132
+ """
133
+ Information about an action group.
134
+ """
135
+
136
+ # Name of the action group.
137
+ name: str
138
+ # Regex pattern to match modules that could belong to this action group.
139
+ pattern: str
140
+ # Doc fragment that members of the action group must have, but no other module
141
+ # must have
142
+ doc_fragment: str
143
+ # Exclusion list of modules that match the regex, but should not be part of the
144
+ # action group. All other modules matching the regex are assumed to be part of
145
+ # the action group.
146
+ exclusions: list[str] = []
147
+
148
+
149
+ class SessionExtraChecks(_BaseModel):
150
+ """
151
+ Extra checks session config.
152
+ """
153
+
154
+ default: bool = True
155
+
156
+ # no-unwanted-files:
157
+ run_no_unwanted_files: bool = True
158
+ no_unwanted_files_module_extensions: list[str] = [".cs", ".ps1", ".psm1", ".py"]
159
+ no_unwanted_files_other_extensions: list[str] = [".py", ".pyi"]
160
+ no_unwanted_files_yaml_extensions: list[str] = [".yml", ".yaml"]
161
+ no_unwanted_files_skip_paths: list[str] = []
162
+ no_unwanted_files_skip_directories: t.Optional[list[str]] = []
163
+ no_unwanted_files_yaml_directories: t.Optional[list[str]] = [
164
+ "plugins/test/",
165
+ "plugins/filter/",
166
+ ]
167
+ no_unwanted_files_allow_symlinks: bool = False
168
+
169
+ # action-groups:
170
+ run_action_groups: bool = False
171
+ action_groups_config: list[ActionGroup] = []
172
+
173
+
174
+ class SessionBuildImportCheck(_BaseModel):
175
+ """
176
+ Collection build and Galaxy import session config.
177
+ """
178
+
179
+ default: bool = True
180
+
181
+ ansible_core_package: str = "ansible-core"
182
+ run_galaxy_importer: bool = True
183
+ galaxy_importer_package: str = "galaxy-importer"
184
+ # https://github.com/ansible/galaxy-importer#configuration
185
+ galaxy_importer_config_path: t.Optional[p.FilePath] = None
186
+
187
+
188
+ class DevelLikeBranch(_BaseModel):
189
+ """
190
+ A Git repository + branch for a devel-like branch of ansible-core.
191
+ """
192
+
193
+ repository: t.Optional[str] = None
194
+ branch: str
195
+
196
+ @p.model_validator(mode="before")
197
+ @classmethod
198
+ def _pre_validate(cls, values: t.Any) -> t.Any:
199
+ if isinstance(values, str):
200
+ return {"branch": values}
201
+ if (
202
+ isinstance(values, list)
203
+ and len(values) == 2
204
+ and all(isinstance(v, str) for v in values)
205
+ ):
206
+ return {"repository": values[0], "branch": values[1]}
207
+ return values
208
+
209
+
210
+ class SessionAnsibleTestSanity(_BaseModel):
211
+ """
212
+ Ansible-test sanity tests session config.
213
+ """
214
+
215
+ default: bool = False
216
+
217
+ include_devel: bool = False
218
+ include_milestone: bool = False
219
+ add_devel_like_branches: list[DevelLikeBranch] = []
220
+ min_version: t.Optional[PVersion] = None
221
+ max_version: t.Optional[PVersion] = None
222
+ except_versions: list[PAnsibleCoreVersion] = []
223
+
224
+
225
+ class SessionAnsibleTestUnits(_BaseModel):
226
+ """
227
+ Ansible-test unit tests session config.
228
+ """
229
+
230
+ default: bool = False
231
+
232
+ include_devel: bool = False
233
+ include_milestone: bool = False
234
+ add_devel_like_branches: list[DevelLikeBranch] = []
235
+ min_version: t.Optional[PVersion] = None
236
+ max_version: t.Optional[PVersion] = None
237
+ except_versions: list[PAnsibleCoreVersion] = []
238
+
239
+
240
+ class SessionAnsibleTestIntegrationWDefaultContainer(_BaseModel):
241
+ """
242
+ Ansible-test integration tests with default container session config.
243
+ """
244
+
245
+ default: bool = False
246
+
247
+ include_devel: bool = False
248
+ include_milestone: bool = False
249
+ add_devel_like_branches: list[DevelLikeBranch] = []
250
+ min_version: t.Optional[PVersion] = None
251
+ max_version: t.Optional[PVersion] = None
252
+ except_versions: list[PAnsibleCoreVersion] = []
253
+ core_python_versions: dict[t.Union[PAnsibleCoreVersion, str], list[PVersion]] = {}
254
+ controller_python_versions_only: bool = False
255
+
256
+ @p.model_validator(mode="after")
257
+ def _validate_core_keys(self) -> t.Self:
258
+ branch_names = [dlb.branch for dlb in self.add_devel_like_branches]
259
+ for key in self.core_python_versions:
260
+ if isinstance(key, Version) or key in {"devel", "milestone"}:
261
+ continue
262
+ if key in branch_names:
263
+ continue
264
+ raise ValueError(
265
+ f"Unknown ansible-core version or branch name {key!r} in core_python_versions"
266
+ )
267
+ return self
268
+
269
+
270
+ class SessionAnsibleLint(_BaseModel):
271
+ """
272
+ Ansible-lint session config.
273
+ """
274
+
275
+ default: bool = True
276
+
277
+ ansible_lint_package: str = "ansible-lint"
278
+ strict: bool = False
279
+
280
+
281
+ class Sessions(_BaseModel):
282
+ """
283
+ Configuration of nox sessions to add.
284
+ """
285
+
286
+ lint: t.Optional[SessionLint] = None
287
+ docs_check: t.Optional[SessionDocsCheck] = None
288
+ license_check: t.Optional[SessionLicenseCheck] = None
289
+ extra_checks: t.Optional[SessionExtraChecks] = None
290
+ build_import_check: t.Optional[SessionBuildImportCheck] = None
291
+ ansible_test_sanity: t.Optional[SessionAnsibleTestSanity] = None
292
+ ansible_test_units: t.Optional[SessionAnsibleTestUnits] = None
293
+ ansible_test_integration_w_default_container: t.Optional[
294
+ SessionAnsibleTestIntegrationWDefaultContainer
295
+ ] = None
296
+ ansible_lint: t.Optional[SessionAnsibleLint] = None
297
+
298
+
299
+ class CollectionSource(_BaseModel):
300
+ """
301
+ Source from which to install a collection.
302
+ """
303
+
304
+ source: str
305
+
306
+ @p.model_validator(mode="before")
307
+ @classmethod
308
+ def _pre_validate(cls, values):
309
+ if isinstance(values, str):
310
+ return {"source": values}
311
+ return values
312
+
313
+
314
+ class Config(_BaseModel):
315
+ """
316
+ The contents of a antsibull-nox config file.
317
+ """
318
+
319
+ collection_sources: dict[str, CollectionSource] = {}
320
+ sessions: Sessions = Sessions()
321
+
322
+
323
+ def load_config_from_toml(path: str | os.PathLike) -> Config:
324
+ """
325
+ Load a config TOML file.
326
+ """
327
+ with open(path, "rb") as f:
328
+ try:
329
+ data = _load_toml(f)
330
+ except ValueError as exc:
331
+ raise ValueError(f"Error while reading {path}: {exc}") from exc
332
+ return Config.model_validate(data)
@@ -0,0 +1,199 @@
1
+ #!/usr/bin/env python
2
+
3
+ # Copyright (c) 2024, Felix Fontein <felix@fontein.de>
4
+ # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt
5
+ # or https://www.gnu.org/licenses/gpl-3.0.txt)
6
+ # SPDX-License-Identifier: GPL-3.0-or-later
7
+
8
+ """Make sure all modules that should show up in the action group."""
9
+
10
+ from __future__ import annotations
11
+
12
+ import os
13
+ import re
14
+ import sys
15
+ import typing as t
16
+
17
+ import yaml
18
+
19
+ from antsibull_nox.data.antsibull_nox_data_util import setup
20
+ from antsibull_nox.sessions import ActionGroup
21
+
22
+
23
+ def compile_patterns(
24
+ config: list[ActionGroup], errors: list[str]
25
+ ) -> dict[str, re.Pattern] | None:
26
+ patterns: dict[str, re.Pattern] = {}
27
+ for action_group in config:
28
+ if action_group.name in config:
29
+ errors.append(
30
+ f"noxfile.py: Action group {action_group.name!r} defined multiple times"
31
+ )
32
+ return None
33
+ patterns[action_group.name] = re.compile(action_group.pattern)
34
+ return patterns
35
+
36
+
37
+ def load_redirects(
38
+ config: list[ActionGroup], errors: list[str], meta_runtime: str
39
+ ) -> dict[str, list[str]]:
40
+ # Load redirects
41
+ try:
42
+ with open(meta_runtime, "rb") as f:
43
+ data = yaml.safe_load(f)
44
+ action_groups = data["action_groups"]
45
+ except Exception as exc:
46
+ errors.append(f"{meta_runtime}: cannot load action groups: {exc}")
47
+ return {}
48
+
49
+ if not isinstance(action_groups, dict):
50
+ errors.append(f"{meta_runtime}: action_groups is not a dictionary")
51
+ return {}
52
+ if not all(
53
+ isinstance(k, str) and isinstance(v, list) for k, v in action_groups.items()
54
+ ):
55
+ errors.append(
56
+ f"{meta_runtime}: action_groups is not a dictionary mapping strings to list of strings"
57
+ )
58
+ return {}
59
+
60
+ # Compare meta/runtime.yml content with config
61
+ config_groups = {cfg.name for cfg in config}
62
+ for action_group in action_groups:
63
+ if action_group not in config_groups:
64
+ errors.append(
65
+ f"{meta_runtime}: found unknown action group"
66
+ f" {action_group!r}; likely noxfile needs updating"
67
+ )
68
+ for action_group in config:
69
+ if action_group.name not in action_groups:
70
+ errors.append(
71
+ f"{meta_runtime}: cannot find action group"
72
+ f" {action_group.name!r}; likely noxfile needs updating"
73
+ )
74
+
75
+ return action_groups
76
+
77
+
78
+ def load_docs(path: str, errors: list[str]) -> dict[str, t.Any] | None:
79
+ documentation = []
80
+ in_docs = False
81
+ with open(path, "r", encoding="utf-8") as f:
82
+ for line in f:
83
+ if line.startswith("DOCUMENTATION ="):
84
+ in_docs = True
85
+ elif line.startswith(("'''", '"""')) and in_docs:
86
+ in_docs = False
87
+ elif in_docs:
88
+ documentation.append(line)
89
+ if in_docs:
90
+ errors.append(f"{path}: cannot find DOCUMENTATION end")
91
+ if not documentation:
92
+ errors.append(f"{path}: cannot find DOCUMENTATION")
93
+ return None
94
+
95
+ try:
96
+ docs = yaml.safe_load("\n".join(documentation))
97
+ if not isinstance(docs, dict):
98
+ raise Exception("is not a top-level dictionary")
99
+ return docs
100
+ except Exception as exc:
101
+ errors.append(f"{path}: cannot load DOCUMENTATION as YAML: {exc}")
102
+ return None
103
+
104
+
105
+ def scan(config: list[ActionGroup], errors: list[str]) -> None:
106
+ patterns = compile_patterns(config, errors)
107
+ if patterns is None:
108
+ return
109
+
110
+ meta_runtime = "meta/runtime.yml"
111
+ action_groups = load_redirects(config, errors, meta_runtime)
112
+
113
+ modules_directory = "plugins/modules/"
114
+ modules_suffix = ".py"
115
+
116
+ errors = []
117
+ for file in os.listdir(modules_directory):
118
+ if not file.endswith(modules_suffix):
119
+ continue
120
+ module_name = file[: -len(modules_suffix)]
121
+
122
+ for action_group in config:
123
+ action_group_content = action_groups.get(action_group.name) or []
124
+ path = os.path.join(modules_directory, file)
125
+
126
+ if not patterns[action_group.name].match(module_name):
127
+ if module_name in action_group_content:
128
+ errors.append(
129
+ f"{path}: module is in action group {action_group.name!r}"
130
+ " despite not matching its pattern as defined in noxfile"
131
+ )
132
+ continue
133
+
134
+ should_be_in_action_group = (
135
+ module_name not in action_group.exclusions
136
+ if action_group.exclusions
137
+ else True
138
+ )
139
+
140
+ if should_be_in_action_group:
141
+ if module_name not in action_group_content:
142
+ errors.append(
143
+ f"{meta_runtime}: module {module_name!r} is not part"
144
+ f" of {action_group.name!r} action group"
145
+ )
146
+ else:
147
+ action_group_content.remove(module_name)
148
+
149
+ docs = load_docs(path, errors)
150
+ if docs is None:
151
+ continue
152
+
153
+ docs_fragments = docs.get("extends_documentation_fragment") or []
154
+ is_in_action_group = action_group.doc_fragment in docs_fragments
155
+
156
+ if should_be_in_action_group != is_in_action_group:
157
+ if should_be_in_action_group:
158
+ errors.append(
159
+ f"{path}: module does not document itself as part of"
160
+ f" action group {action_group.name!r}, but it should;"
161
+ f" you need to add {action_group.doc_fragment} to"
162
+ f' "extends_documentation_fragment" in DOCUMENTATION'
163
+ )
164
+ else:
165
+ errors.append(
166
+ f"{path}: module documents itself as part of"
167
+ f" action group {action_group.name!r}, but it should not be"
168
+ )
169
+
170
+ for action_group in config:
171
+ action_group_content = action_groups.get(action_group.name) or []
172
+ for module_name in action_group_content:
173
+ errors.append(
174
+ f"{meta_runtime}: module {module_name} mentioned"
175
+ f" in {action_group.name!r} action group does not exist"
176
+ " or does not match pattern defined in noxfile"
177
+ )
178
+
179
+
180
+ def main() -> int:
181
+ """Main entry point."""
182
+ paths, extra_data = setup()
183
+
184
+ if not isinstance(extra_data.get("config"), list):
185
+ raise ValueError("config is not a list")
186
+ if not all(isinstance(cfg, dict) for cfg in extra_data["config"]):
187
+ raise ValueError("config is not a list of dictionaries")
188
+ config = [ActionGroup(**cfg) for cfg in extra_data["config"]]
189
+
190
+ errors: list[str] = []
191
+ scan(config, errors)
192
+
193
+ for error in sorted(errors):
194
+ print(error)
195
+ return len(errors) > 0
196
+
197
+
198
+ if __name__ == "__main__":
199
+ sys.exit(main())
@@ -0,0 +1,91 @@
1
+ # Author: Felix Fontein <felix@fontein.de>
2
+ # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or
3
+ # https://www.gnu.org/licenses/gpl-3.0.txt)
4
+ # SPDX-License-Identifier: GPL-3.0-or-later
5
+ # SPDX-FileCopyrightText: 2025, Ansible Project
6
+
7
+ """
8
+ Utility code for scripts in data.
9
+ """
10
+
11
+ from __future__ import annotations
12
+
13
+ import json
14
+ import sys
15
+ import typing as t
16
+
17
+
18
+ def setup() -> tuple[list[str], dict[str, t.Any]]:
19
+ """
20
+ Fetch list of paths and potential extra configuration.
21
+
22
+ First thing to call in an extra sanity check script in data/.
23
+ """
24
+ if len(sys.argv) == 3 and sys.argv[1] == "--data":
25
+ # Preferred way: load information from JSON file
26
+ path = sys.argv[2]
27
+ try:
28
+ with open(path, "rb") as f:
29
+ data = json.load(f)
30
+ except Exception as exc:
31
+ raise ValueError(f"Error while reading JSON from {path}") from exc
32
+ try:
33
+ paths = get_list_of_strings(data, "paths")
34
+ except ValueError as exc:
35
+ raise ValueError(f"Invalid JSON content in {path}: {exc}") from exc
36
+ data.pop("paths")
37
+ return paths, data
38
+ if len(sys.argv) >= 2:
39
+ # It's also possible to pass a list of paths on the command line, to simplify
40
+ # testing these scripts.
41
+ return sys.argv[1:], {}
42
+ # Alternatively one can pass a list of files from stdin, for example by piping
43
+ # the output of 'git ls-files' into this script. This is also for testing these
44
+ # scripts.
45
+ return sys.stdin.read().splitlines(), {}
46
+
47
+
48
+ def get_list_of_strings(
49
+ data: dict[str, t.Any],
50
+ key: str,
51
+ *,
52
+ default: list[str] | None = None,
53
+ ) -> list[str]:
54
+ """
55
+ Retrieves a list of strings from key ``key`` of the JSON object ``data``.
56
+
57
+ If ``default`` is set to a list, a missing key results in this value being returned.
58
+ """
59
+ sentinel = object()
60
+ value = data.get(key, sentinel)
61
+ if value is sentinel:
62
+ if default is not None:
63
+ return default
64
+ raise ValueError(f"{key!r} is not a present")
65
+ if not isinstance(value, list):
66
+ raise ValueError(f"{key!r} is not a list, but {type(key)}")
67
+ if not all(isinstance(entry, str) for entry in value):
68
+ raise ValueError(f"{key!r} is not a list of strings")
69
+ return t.cast(list[str], value)
70
+
71
+
72
+ def get_bool(
73
+ data: dict[str, t.Any],
74
+ key: str,
75
+ *,
76
+ default: bool | None = None,
77
+ ) -> bool:
78
+ """
79
+ Retrieves a boolean from key ``key`` of the JSON object ``data``.
80
+
81
+ If ``default`` is set to a boolean, a missing key results in this value being returned.
82
+ """
83
+ sentinel = object()
84
+ value = data.get(key, sentinel)
85
+ if value is sentinel:
86
+ if default is not None:
87
+ return default
88
+ raise ValueError(f"{key!r} is not a present")
89
+ if not isinstance(value, bool):
90
+ raise ValueError(f"{key!r} is not a bool, but {type(key)}")
91
+ return value