antsibull-nox 0.0.1__py3-none-any.whl → 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,199 @@
1
+ #!/usr/bin/env python
2
+
3
+ # Copyright (c) 2024, Felix Fontein <felix@fontein.de>
4
+ # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt
5
+ # or https://www.gnu.org/licenses/gpl-3.0.txt)
6
+ # SPDX-License-Identifier: GPL-3.0-or-later
7
+
8
+ """Make sure all modules that should show up in the action group."""
9
+
10
+ from __future__ import annotations
11
+
12
+ import os
13
+ import re
14
+ import sys
15
+ import typing as t
16
+
17
+ import yaml
18
+
19
+ from antsibull_nox.data_util import setup
20
+ from antsibull_nox.sessions import ActionGroup
21
+
22
+
23
+ def compile_patterns(
24
+ config: list[ActionGroup], errors: list[str]
25
+ ) -> dict[str, re.Pattern] | None:
26
+ patterns: dict[str, re.Pattern] = {}
27
+ for action_group in config:
28
+ if action_group.name in config:
29
+ errors.append(
30
+ f"noxfile.py: Action group {action_group.name!r} defined multiple times"
31
+ )
32
+ return None
33
+ patterns[action_group.name] = re.compile(action_group.pattern)
34
+ return patterns
35
+
36
+
37
+ def load_redirects(
38
+ config: list[ActionGroup], errors: list[str], meta_runtime: str
39
+ ) -> dict[str, list[str]]:
40
+ # Load redirects
41
+ try:
42
+ with open(meta_runtime, "rb") as f:
43
+ data = yaml.safe_load(f)
44
+ action_groups = data["action_groups"]
45
+ except Exception as exc:
46
+ errors.append(f"{meta_runtime}: cannot load action groups: {exc}")
47
+ return {}
48
+
49
+ if not isinstance(action_groups, dict):
50
+ errors.append(f"{meta_runtime}: action_groups is not a dictionary")
51
+ return {}
52
+ if not all(
53
+ isinstance(k, str) and isinstance(v, list) for k, v in action_groups.items()
54
+ ):
55
+ errors.append(
56
+ f"{meta_runtime}: action_groups is not a dictionary mapping strings to list of strings"
57
+ )
58
+ return {}
59
+
60
+ # Compare meta/runtime.yml content with config
61
+ config_groups = {cfg.name for cfg in config}
62
+ for action_group in action_groups:
63
+ if action_group not in config_groups:
64
+ errors.append(
65
+ f"{meta_runtime}: found unknown action group"
66
+ f" {action_group!r}; likely noxfile needs updating"
67
+ )
68
+ for action_group in config:
69
+ if action_group.name not in action_groups:
70
+ errors.append(
71
+ f"{meta_runtime}: cannot find action group"
72
+ f" {action_group.name!r}; likely noxfile needs updating"
73
+ )
74
+
75
+ return action_groups
76
+
77
+
78
+ def load_docs(path: str, errors: list[str]) -> dict[str, t.Any] | None:
79
+ documentation = []
80
+ in_docs = False
81
+ with open(path, "r", encoding="utf-8") as f:
82
+ for line in f:
83
+ if line.startswith("DOCUMENTATION ="):
84
+ in_docs = True
85
+ elif line.startswith(("'''", '"""')) and in_docs:
86
+ in_docs = False
87
+ elif in_docs:
88
+ documentation.append(line)
89
+ if in_docs:
90
+ errors.append(f"{path}: cannot find DOCUMENTATION end")
91
+ if not documentation:
92
+ errors.append(f"{path}: cannot find DOCUMENTATION")
93
+ return None
94
+
95
+ try:
96
+ docs = yaml.safe_load("\n".join(documentation))
97
+ if not isinstance(docs, dict):
98
+ raise Exception("is not a top-level dictionary")
99
+ return docs
100
+ except Exception as exc:
101
+ errors.append(f"{path}: cannot load DOCUMENTATION as YAML: {exc}")
102
+ return None
103
+
104
+
105
+ def scan(config: list[ActionGroup], errors: list[str]) -> None:
106
+ patterns = compile_patterns(config, errors)
107
+ if patterns is None:
108
+ return
109
+
110
+ meta_runtime = "meta/runtime.yml"
111
+ action_groups = load_redirects(config, errors, meta_runtime)
112
+
113
+ modules_directory = "plugins/modules/"
114
+ modules_suffix = ".py"
115
+
116
+ errors = []
117
+ for file in os.listdir(modules_directory):
118
+ if not file.endswith(modules_suffix):
119
+ continue
120
+ module_name = file[: -len(modules_suffix)]
121
+
122
+ for action_group in config:
123
+ action_group_content = action_groups.get(action_group.name) or []
124
+ path = os.path.join(modules_directory, file)
125
+
126
+ if not patterns[action_group.name].match(module_name):
127
+ if module_name in action_group_content:
128
+ errors.append(
129
+ f"{path}: module is in action group {action_group.name!r}"
130
+ " despite not matching its pattern as defined in noxfile"
131
+ )
132
+ continue
133
+
134
+ should_be_in_action_group = (
135
+ module_name not in action_group.exclusions
136
+ if action_group.exclusions
137
+ else True
138
+ )
139
+
140
+ if should_be_in_action_group:
141
+ if module_name not in action_group_content:
142
+ errors.append(
143
+ f"{meta_runtime}: module {module_name!r} is not part"
144
+ f" of {action_group.name!r} action group"
145
+ )
146
+ else:
147
+ action_group_content.remove(module_name)
148
+
149
+ docs = load_docs(path, errors)
150
+ if docs is None:
151
+ continue
152
+
153
+ docs_fragments = docs.get("extends_documentation_fragment") or []
154
+ is_in_action_group = action_group.doc_fragment in docs_fragments
155
+
156
+ if should_be_in_action_group != is_in_action_group:
157
+ if should_be_in_action_group:
158
+ errors.append(
159
+ f"{path}: module does not document itself as part of"
160
+ f" action group {action_group.name!r}, but it should;"
161
+ f" you need to add {action_group.doc_fragment} to"
162
+ f' "extends_documentation_fragment" in DOCUMENTATION'
163
+ )
164
+ else:
165
+ errors.append(
166
+ f"{path}: module documents itself as part of"
167
+ f" action group {action_group.name!r}, but it should not be"
168
+ )
169
+
170
+ for action_group in config:
171
+ action_group_content = action_groups.get(action_group.name) or []
172
+ for module_name in action_group_content:
173
+ errors.append(
174
+ f"{meta_runtime}: module {module_name} mentioned"
175
+ f" in {action_group.name!r} action group does not exist"
176
+ " or does not match pattern defined in noxfile"
177
+ )
178
+
179
+
180
+ def main() -> int:
181
+ """Main entry point."""
182
+ paths, extra_data = setup()
183
+
184
+ if not isinstance(extra_data.get("config"), list):
185
+ raise ValueError("config is not a list")
186
+ if not all(isinstance(cfg, dict) for cfg in extra_data["config"]):
187
+ raise ValueError("config is not a list of dictionaries")
188
+ config = [ActionGroup(**cfg) for cfg in extra_data["config"]]
189
+
190
+ errors: list[str] = []
191
+ scan(config, errors)
192
+
193
+ for error in sorted(errors):
194
+ print(error)
195
+ return len(errors) > 0
196
+
197
+
198
+ if __name__ == "__main__":
199
+ sys.exit(main())
@@ -0,0 +1,144 @@
1
+ #!/usr/bin/env python
2
+
3
+ # Copyright (c) 2022, Felix Fontein <felix@fontein.de>
4
+ # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt
5
+ # or https://www.gnu.org/licenses/gpl-3.0.txt)
6
+ # SPDX-License-Identifier: GPL-3.0-or-later
7
+
8
+ """Prevent files without a correct license identifier from being added to the source tree."""
9
+
10
+ from __future__ import annotations
11
+
12
+ import glob
13
+ import os
14
+ import sys
15
+
16
+ from antsibull_nox.data_util import get_list_of_strings, setup
17
+
18
+
19
+ def format_license_list(licenses: list[str]) -> str:
20
+ if not licenses:
21
+ return "(empty)"
22
+ return ", ".join([f'"{license}"' for license in licenses])
23
+
24
+
25
+ def find_licenses(errors: list[str], filename: str, relax: bool = False) -> list[str]:
26
+ spdx_license_identifiers: list[str] = []
27
+ other_license_identifiers: list[str] = []
28
+ has_copyright = False
29
+ try:
30
+ with open(filename, "r", encoding="utf-8") as f:
31
+ for line in f:
32
+ line = line.rstrip()
33
+ if "Copyright " in line:
34
+ has_copyright = True
35
+ if "Copyright: " in line:
36
+ errors.append(
37
+ f'{filename}: found copyright line with "Copyright:".'
38
+ " Please remove the colon."
39
+ )
40
+ if "SPDX-FileCopyrightText: " in line:
41
+ has_copyright = True
42
+ idx = line.find("SPDX-License-Identifier: ")
43
+ if idx >= 0:
44
+ lic_id = line[idx + len("SPDX-License-Identifier: ") :]
45
+ spdx_license_identifiers.extend(lic_id.split(" OR "))
46
+ if "GNU General Public License" in line:
47
+ if "v3.0+" in line:
48
+ other_license_identifiers.append("GPL-3.0-or-later")
49
+ if "version 3 or later" in line:
50
+ other_license_identifiers.append("GPL-3.0-or-later")
51
+ if "Simplified BSD License" in line:
52
+ other_license_identifiers.append("BSD-2-Clause")
53
+ if "Apache License 2.0" in line:
54
+ other_license_identifiers.append("Apache-2.0")
55
+ if "PSF License" in line or "Python-2.0" in line:
56
+ other_license_identifiers.append("PSF-2.0")
57
+ if "MIT License" in line:
58
+ other_license_identifiers.append("MIT")
59
+ except Exception as exc:
60
+ errors.append(f"{filename}: error while processing file: {exc}")
61
+ if len(set(spdx_license_identifiers)) < len(spdx_license_identifiers):
62
+ errors.append(f"{filename}: found identical SPDX-License-Identifier values")
63
+ if other_license_identifiers and set(other_license_identifiers) != set(
64
+ spdx_license_identifiers
65
+ ):
66
+ errors.append(
67
+ f"{filename}: SPDX-License-Identifier yielded the license list"
68
+ f" {format_license_list(spdx_license_identifiers)}, while manual guessing"
69
+ f" yielded the license list {format_license_list(other_license_identifiers)}"
70
+ )
71
+ if not has_copyright and not relax:
72
+ errors.append(f"{filename}: found no copyright notice")
73
+ return sorted(spdx_license_identifiers)
74
+
75
+
76
+ def main() -> int:
77
+ """Main entry point."""
78
+ paths, extra_data = setup()
79
+
80
+ # The following paths are allowed to have no license identifier
81
+ no_comments_allowed = [
82
+ "changelogs/fragments/*.yml",
83
+ "changelogs/fragments/*.yaml",
84
+ ]
85
+
86
+ # These files are completely ignored
87
+ ignore_paths = [
88
+ ".ansible-test-timeout.json",
89
+ ".reuse/dep5",
90
+ "LICENSES/*.txt",
91
+ "COPYING",
92
+ ] + get_list_of_strings(extra_data, "extra_ignore_paths", default=[])
93
+
94
+ no_comments_allowed = [
95
+ fn for pattern in no_comments_allowed for fn in glob.glob(pattern)
96
+ ]
97
+ ignore_paths = [fn for pattern in ignore_paths for fn in glob.glob(pattern)]
98
+
99
+ valid_licenses = [
100
+ license_file[len("LICENSES/") : -len(".txt")]
101
+ for license_file in glob.glob("LICENSES/*.txt")
102
+ ]
103
+
104
+ errors: list[str] = []
105
+
106
+ for path in paths:
107
+ if path.startswith("./"):
108
+ path = path[2:]
109
+ if path in ignore_paths or path.startswith("tests/output/"):
110
+ continue
111
+ if os.stat(path).st_size == 0:
112
+ continue
113
+ if not path.endswith(".license") and os.path.exists(path + ".license"):
114
+ path = path + ".license"
115
+ valid_licenses_for_path = valid_licenses
116
+ if (
117
+ path.startswith("plugins/")
118
+ and not path.startswith(
119
+ ("plugins/modules/", "plugins/module_utils/", "plugins/doc_fragments/")
120
+ )
121
+ and path.endswith((".py", ".py.license"))
122
+ ):
123
+ valid_licenses_for_path = [
124
+ license for license in valid_licenses if license == "GPL-3.0-or-later"
125
+ ]
126
+ licenses = find_licenses(errors, path, relax=path in no_comments_allowed)
127
+ if not licenses:
128
+ if path not in no_comments_allowed:
129
+ errors.append(f"{path}: must have at least one license")
130
+ else:
131
+ for license in licenses:
132
+ if license not in valid_licenses_for_path:
133
+ errors.append(
134
+ f"{path}: found not allowed license {license!r}, must be one of"
135
+ f" {format_license_list(valid_licenses_for_path)}"
136
+ )
137
+
138
+ for error in sorted(errors):
139
+ print(error)
140
+ return len(errors) > 0
141
+
142
+
143
+ if __name__ == "__main__":
144
+ sys.exit(main())
@@ -0,0 +1,3 @@
1
+ GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
2
+ SPDX-License-Identifier: GPL-3.0-or-later
3
+ SPDX-FileCopyrightText: 2022, Felix Fontein <felix@fontein.de>
@@ -0,0 +1,119 @@
1
+ #!/usr/bin/env python
2
+
3
+ # Copyright (c) Ansible Project
4
+ # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt
5
+ # or https://www.gnu.org/licenses/gpl-3.0.txt)
6
+ # SPDX-License-Identifier: GPL-3.0-or-later
7
+
8
+ """Prevent unwanted files from being added to the source tree."""
9
+
10
+ from __future__ import annotations
11
+
12
+ import os
13
+ import sys
14
+
15
+ from antsibull_nox.data_util import get_bool, get_list_of_strings, setup
16
+
17
+
18
+ def main() -> int:
19
+ """Main entry point."""
20
+ paths, extra_data = setup()
21
+
22
+ module_extensions = tuple(
23
+ sorted(
24
+ get_list_of_strings(
25
+ extra_data,
26
+ "module_extensions",
27
+ default=[
28
+ ".cs",
29
+ ".ps1",
30
+ ".psm1",
31
+ ".py",
32
+ ],
33
+ )
34
+ )
35
+ )
36
+
37
+ other_extensions = tuple(
38
+ sorted(
39
+ get_list_of_strings(
40
+ extra_data,
41
+ "other_extensions",
42
+ default=[
43
+ ".py",
44
+ ".pyi",
45
+ ],
46
+ )
47
+ )
48
+ )
49
+
50
+ yaml_extensions = set(
51
+ get_list_of_strings(
52
+ extra_data,
53
+ "yaml_extensions",
54
+ default=[
55
+ ".yml",
56
+ ".yaml",
57
+ ],
58
+ )
59
+ )
60
+
61
+ skip_paths = set(get_list_of_strings(extra_data, "skip_paths", default=[]))
62
+
63
+ skip_directories = tuple(
64
+ get_list_of_strings(extra_data, "skip_prefixes", default=[])
65
+ )
66
+
67
+ yaml_directories = tuple(
68
+ get_list_of_strings(
69
+ extra_data,
70
+ "yaml_directories",
71
+ default=[
72
+ "plugins/test/",
73
+ "plugins/filter/",
74
+ ],
75
+ )
76
+ )
77
+
78
+ allow_symlinks = get_bool(extra_data, "allow_symlinks")
79
+
80
+ errors: list[str] = []
81
+ for path in paths:
82
+ if not path.startswith("plugins/"):
83
+ continue
84
+
85
+ if path in skip_paths:
86
+ continue
87
+
88
+ if any(path.startswith(skip_directory) for skip_directory in skip_directories):
89
+ continue
90
+
91
+ if os.path.islink(path):
92
+ if not allow_symlinks:
93
+ errors.append("%s: is a symbolic link" % (path,))
94
+ elif not os.path.isfile(path):
95
+ errors.append("%s: is not a regular file" % (path,))
96
+
97
+ ext = os.path.splitext(path)[1]
98
+
99
+ if ext in yaml_extensions and any(
100
+ path.startswith(yaml_directory) for yaml_directory in yaml_directories
101
+ ):
102
+ continue
103
+
104
+ extensions = (
105
+ module_extensions
106
+ if path.startswith("plugins/modules/")
107
+ else other_extensions
108
+ )
109
+
110
+ if ext not in extensions:
111
+ errors.append(f"{path}: extension must be one of: {', '.join(extensions)}")
112
+
113
+ for error in sorted(errors):
114
+ print(error)
115
+ return len(errors) > 0
116
+
117
+
118
+ if __name__ == "__main__":
119
+ sys.exit(main())
@@ -0,0 +1,115 @@
1
+ # Author: Felix Fontein <felix@fontein.de>
2
+ # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or
3
+ # https://www.gnu.org/licenses/gpl-3.0.txt)
4
+ # SPDX-License-Identifier: GPL-3.0-or-later
5
+ # SPDX-FileCopyrightText: 2025, Ansible Project
6
+
7
+ """
8
+ Utility code for scripts in data.
9
+ """
10
+
11
+ from __future__ import annotations
12
+
13
+ import json
14
+ import sys
15
+ import typing as t
16
+ from pathlib import Path
17
+
18
+ import nox
19
+
20
+
21
+ def setup() -> tuple[list[str], dict[str, t.Any]]:
22
+ """
23
+ Fetch list of paths and potential extra configuration.
24
+
25
+ First thing to call in an extra sanity check script in data/.
26
+ """
27
+ if len(sys.argv) == 3 and sys.argv[1] == "--data":
28
+ # Preferred way: load information from JSON file
29
+ path = sys.argv[2]
30
+ try:
31
+ with open(path, "rb") as f:
32
+ data = json.load(f)
33
+ except Exception as exc:
34
+ raise ValueError(f"Error while reading JSON from {path}") from exc
35
+ try:
36
+ paths = get_list_of_strings(data, "paths")
37
+ except ValueError as exc:
38
+ raise ValueError(f"Invalid JSON content in {path}: {exc}") from exc
39
+ data.pop("paths")
40
+ return paths, data
41
+ if len(sys.argv) >= 2:
42
+ # It's also possible to pass a list of paths on the command line, to simplify
43
+ # testing these scripts.
44
+ return sys.argv[1:], {}
45
+ # Alternatively one can pass a list of files from stdin, for example by piping
46
+ # the output of 'git ls-files' into this script. This is also for testing these
47
+ # scripts.
48
+ return sys.stdin.read().splitlines(), {}
49
+
50
+
51
+ def get_list_of_strings(
52
+ data: dict[str, t.Any],
53
+ key: str,
54
+ *,
55
+ default: list[str] | None = None,
56
+ ) -> list[str]:
57
+ """
58
+ Retrieves a list of strings from key ``key`` of the JSON object ``data``.
59
+
60
+ If ``default`` is set to a list, a missing key results in this value being returned.
61
+ """
62
+ sentinel = object()
63
+ value = data.get(key, sentinel)
64
+ if value is sentinel:
65
+ if default is not None:
66
+ return default
67
+ raise ValueError(f"{key!r} is not a present")
68
+ if not isinstance(value, list):
69
+ raise ValueError(f"{key!r} is not a list, but {type(key)}")
70
+ if not all(isinstance(entry, str) for entry in value):
71
+ raise ValueError(f"{key!r} is not a list of strings")
72
+ return t.cast(list[str], value)
73
+
74
+
75
+ def get_bool(
76
+ data: dict[str, t.Any],
77
+ key: str,
78
+ *,
79
+ default: bool | None = None,
80
+ ) -> bool:
81
+ """
82
+ Retrieves a boolean from key ``key`` of the JSON object ``data``.
83
+
84
+ If ``default`` is set to a boolean, a missing key results in this value being returned.
85
+ """
86
+ sentinel = object()
87
+ value = data.get(key, sentinel)
88
+ if value is sentinel:
89
+ if default is not None:
90
+ return default
91
+ raise ValueError(f"{key!r} is not a present")
92
+ if not isinstance(value, bool):
93
+ raise ValueError(f"{key!r} is not a bool, but {type(key)}")
94
+ return value
95
+
96
+
97
+ def prepare_data_script(
98
+ session: nox.Session,
99
+ *,
100
+ base_name: str,
101
+ paths: list[Path],
102
+ extra_data: dict[str, t.Any] | None = None,
103
+ ) -> Path:
104
+ """
105
+ Prepare a data JSON file for the extra sanity check scripts.
106
+ """
107
+ cwd = Path.cwd()
108
+ data = {}
109
+ data["paths"] = [str(path.relative_to(cwd)) for path in paths]
110
+ if extra_data:
111
+ data.update(extra_data)
112
+ file = Path(session.create_tmp()) / f"{base_name}-data.json"
113
+ with open(file, "w", encoding="utf-8") as f:
114
+ json.dump(data, f)
115
+ return file