antsibull-nox 0.0.1__py3-none-any.whl → 0.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- antsibull_nox/__init__.py +66 -3
- antsibull_nox/ansible.py +260 -0
- antsibull_nox/collection/__init__.py +56 -0
- antsibull_nox/collection/data.py +106 -0
- antsibull_nox/collection/extract.py +23 -0
- antsibull_nox/collection/install.py +523 -0
- antsibull_nox/collection/search.py +456 -0
- antsibull_nox/config.py +332 -0
- antsibull_nox/data/action-groups.py +199 -0
- antsibull_nox/data/antsibull_nox_data_util.py +91 -0
- antsibull_nox/data/license-check.py +144 -0
- antsibull_nox/data/license-check.py.license +3 -0
- antsibull_nox/data/no-unwanted-files.py +123 -0
- antsibull_nox/data/plugin-yamllint.py +244 -0
- antsibull_nox/data_util.py +38 -0
- antsibull_nox/interpret_config.py +235 -0
- antsibull_nox/paths.py +220 -0
- antsibull_nox/python.py +81 -0
- antsibull_nox/sessions.py +1389 -168
- antsibull_nox/utils.py +85 -0
- {antsibull_nox-0.0.1.dist-info → antsibull_nox-0.2.0.dist-info}/METADATA +14 -4
- antsibull_nox-0.2.0.dist-info/RECORD +25 -0
- antsibull_nox-0.0.1.dist-info/RECORD +0 -7
- {antsibull_nox-0.0.1.dist-info → antsibull_nox-0.2.0.dist-info}/WHEEL +0 -0
- {antsibull_nox-0.0.1.dist-info → antsibull_nox-0.2.0.dist-info}/licenses/LICENSES/GPL-3.0-or-later.txt +0 -0
@@ -0,0 +1,144 @@
|
|
1
|
+
#!/usr/bin/env python
|
2
|
+
|
3
|
+
# Copyright (c) 2022, Felix Fontein <felix@fontein.de>
|
4
|
+
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt
|
5
|
+
# or https://www.gnu.org/licenses/gpl-3.0.txt)
|
6
|
+
# SPDX-License-Identifier: GPL-3.0-or-later
|
7
|
+
|
8
|
+
"""Prevent files without a correct license identifier from being added to the source tree."""
|
9
|
+
|
10
|
+
from __future__ import annotations
|
11
|
+
|
12
|
+
import glob
|
13
|
+
import os
|
14
|
+
import sys
|
15
|
+
|
16
|
+
from antsibull_nox.data.antsibull_nox_data_util import get_list_of_strings, setup
|
17
|
+
|
18
|
+
|
19
|
+
def format_license_list(licenses: list[str]) -> str:
|
20
|
+
if not licenses:
|
21
|
+
return "(empty)"
|
22
|
+
return ", ".join([f'"{license}"' for license in licenses])
|
23
|
+
|
24
|
+
|
25
|
+
def find_licenses(errors: list[str], filename: str, relax: bool = False) -> list[str]:
|
26
|
+
spdx_license_identifiers: list[str] = []
|
27
|
+
other_license_identifiers: list[str] = []
|
28
|
+
has_copyright = False
|
29
|
+
try:
|
30
|
+
with open(filename, "r", encoding="utf-8") as f:
|
31
|
+
for line in f:
|
32
|
+
line = line.rstrip()
|
33
|
+
if "Copyright " in line:
|
34
|
+
has_copyright = True
|
35
|
+
if "Copyright: " in line:
|
36
|
+
errors.append(
|
37
|
+
f'{filename}: found copyright line with "Copyright:".'
|
38
|
+
" Please remove the colon."
|
39
|
+
)
|
40
|
+
if "SPDX-FileCopyrightText: " in line:
|
41
|
+
has_copyright = True
|
42
|
+
idx = line.find("SPDX-License-Identifier: ")
|
43
|
+
if idx >= 0:
|
44
|
+
lic_id = line[idx + len("SPDX-License-Identifier: ") :]
|
45
|
+
spdx_license_identifiers.extend(lic_id.split(" OR "))
|
46
|
+
if "GNU General Public License" in line:
|
47
|
+
if "v3.0+" in line:
|
48
|
+
other_license_identifiers.append("GPL-3.0-or-later")
|
49
|
+
if "version 3 or later" in line:
|
50
|
+
other_license_identifiers.append("GPL-3.0-or-later")
|
51
|
+
if "Simplified BSD License" in line:
|
52
|
+
other_license_identifiers.append("BSD-2-Clause")
|
53
|
+
if "Apache License 2.0" in line:
|
54
|
+
other_license_identifiers.append("Apache-2.0")
|
55
|
+
if "PSF License" in line or "Python-2.0" in line:
|
56
|
+
other_license_identifiers.append("PSF-2.0")
|
57
|
+
if "MIT License" in line:
|
58
|
+
other_license_identifiers.append("MIT")
|
59
|
+
except Exception as exc:
|
60
|
+
errors.append(f"{filename}: error while processing file: {exc}")
|
61
|
+
if len(set(spdx_license_identifiers)) < len(spdx_license_identifiers):
|
62
|
+
errors.append(f"{filename}: found identical SPDX-License-Identifier values")
|
63
|
+
if other_license_identifiers and set(other_license_identifiers) != set(
|
64
|
+
spdx_license_identifiers
|
65
|
+
):
|
66
|
+
errors.append(
|
67
|
+
f"{filename}: SPDX-License-Identifier yielded the license list"
|
68
|
+
f" {format_license_list(spdx_license_identifiers)}, while manual guessing"
|
69
|
+
f" yielded the license list {format_license_list(other_license_identifiers)}"
|
70
|
+
)
|
71
|
+
if not has_copyright and not relax:
|
72
|
+
errors.append(f"{filename}: found no copyright notice")
|
73
|
+
return sorted(spdx_license_identifiers)
|
74
|
+
|
75
|
+
|
76
|
+
def main() -> int:
|
77
|
+
"""Main entry point."""
|
78
|
+
paths, extra_data = setup()
|
79
|
+
|
80
|
+
# The following paths are allowed to have no license identifier
|
81
|
+
no_comments_allowed = [
|
82
|
+
"changelogs/fragments/*.yml",
|
83
|
+
"changelogs/fragments/*.yaml",
|
84
|
+
]
|
85
|
+
|
86
|
+
# These files are completely ignored
|
87
|
+
ignore_paths = [
|
88
|
+
".ansible-test-timeout.json",
|
89
|
+
".reuse/dep5",
|
90
|
+
"LICENSES/*.txt",
|
91
|
+
"COPYING",
|
92
|
+
] + get_list_of_strings(extra_data, "extra_ignore_paths", default=[])
|
93
|
+
|
94
|
+
no_comments_allowed = [
|
95
|
+
fn for pattern in no_comments_allowed for fn in glob.glob(pattern)
|
96
|
+
]
|
97
|
+
ignore_paths = [fn for pattern in ignore_paths for fn in glob.glob(pattern)]
|
98
|
+
|
99
|
+
valid_licenses = [
|
100
|
+
license_file[len("LICENSES/") : -len(".txt")]
|
101
|
+
for license_file in glob.glob("LICENSES/*.txt")
|
102
|
+
]
|
103
|
+
|
104
|
+
errors: list[str] = []
|
105
|
+
|
106
|
+
for path in paths:
|
107
|
+
if path.startswith("./"):
|
108
|
+
path = path[2:]
|
109
|
+
if path in ignore_paths or path.startswith("tests/output/"):
|
110
|
+
continue
|
111
|
+
if os.stat(path).st_size == 0:
|
112
|
+
continue
|
113
|
+
if not path.endswith(".license") and os.path.exists(path + ".license"):
|
114
|
+
path = path + ".license"
|
115
|
+
valid_licenses_for_path = valid_licenses
|
116
|
+
if (
|
117
|
+
path.startswith("plugins/")
|
118
|
+
and not path.startswith(
|
119
|
+
("plugins/modules/", "plugins/module_utils/", "plugins/doc_fragments/")
|
120
|
+
)
|
121
|
+
and path.endswith((".py", ".py.license"))
|
122
|
+
):
|
123
|
+
valid_licenses_for_path = [
|
124
|
+
license for license in valid_licenses if license == "GPL-3.0-or-later"
|
125
|
+
]
|
126
|
+
licenses = find_licenses(errors, path, relax=path in no_comments_allowed)
|
127
|
+
if not licenses:
|
128
|
+
if path not in no_comments_allowed:
|
129
|
+
errors.append(f"{path}: must have at least one license")
|
130
|
+
else:
|
131
|
+
for license in licenses:
|
132
|
+
if license not in valid_licenses_for_path:
|
133
|
+
errors.append(
|
134
|
+
f"{path}: found not allowed license {license!r}, must be one of"
|
135
|
+
f" {format_license_list(valid_licenses_for_path)}"
|
136
|
+
)
|
137
|
+
|
138
|
+
for error in sorted(errors):
|
139
|
+
print(error)
|
140
|
+
return len(errors) > 0
|
141
|
+
|
142
|
+
|
143
|
+
if __name__ == "__main__":
|
144
|
+
sys.exit(main())
|
@@ -0,0 +1,123 @@
|
|
1
|
+
#!/usr/bin/env python
|
2
|
+
|
3
|
+
# Copyright (c) Ansible Project
|
4
|
+
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt
|
5
|
+
# or https://www.gnu.org/licenses/gpl-3.0.txt)
|
6
|
+
# SPDX-License-Identifier: GPL-3.0-or-later
|
7
|
+
|
8
|
+
"""Prevent unwanted files from being added to the source tree."""
|
9
|
+
|
10
|
+
from __future__ import annotations
|
11
|
+
|
12
|
+
import os
|
13
|
+
import sys
|
14
|
+
|
15
|
+
from antsibull_nox.data.antsibull_nox_data_util import (
|
16
|
+
get_bool,
|
17
|
+
get_list_of_strings,
|
18
|
+
setup,
|
19
|
+
)
|
20
|
+
|
21
|
+
|
22
|
+
def main() -> int:
|
23
|
+
"""Main entry point."""
|
24
|
+
paths, extra_data = setup()
|
25
|
+
|
26
|
+
module_extensions = tuple(
|
27
|
+
sorted(
|
28
|
+
get_list_of_strings(
|
29
|
+
extra_data,
|
30
|
+
"module_extensions",
|
31
|
+
default=[
|
32
|
+
".cs",
|
33
|
+
".ps1",
|
34
|
+
".psm1",
|
35
|
+
".py",
|
36
|
+
],
|
37
|
+
)
|
38
|
+
)
|
39
|
+
)
|
40
|
+
|
41
|
+
other_extensions = tuple(
|
42
|
+
sorted(
|
43
|
+
get_list_of_strings(
|
44
|
+
extra_data,
|
45
|
+
"other_extensions",
|
46
|
+
default=[
|
47
|
+
".py",
|
48
|
+
".pyi",
|
49
|
+
],
|
50
|
+
)
|
51
|
+
)
|
52
|
+
)
|
53
|
+
|
54
|
+
yaml_extensions = set(
|
55
|
+
get_list_of_strings(
|
56
|
+
extra_data,
|
57
|
+
"yaml_extensions",
|
58
|
+
default=[
|
59
|
+
".yml",
|
60
|
+
".yaml",
|
61
|
+
],
|
62
|
+
)
|
63
|
+
)
|
64
|
+
|
65
|
+
skip_paths = set(get_list_of_strings(extra_data, "skip_paths", default=[]))
|
66
|
+
|
67
|
+
skip_directories = tuple(
|
68
|
+
get_list_of_strings(extra_data, "skip_prefixes", default=[])
|
69
|
+
)
|
70
|
+
|
71
|
+
yaml_directories = tuple(
|
72
|
+
get_list_of_strings(
|
73
|
+
extra_data,
|
74
|
+
"yaml_directories",
|
75
|
+
default=[
|
76
|
+
"plugins/test/",
|
77
|
+
"plugins/filter/",
|
78
|
+
],
|
79
|
+
)
|
80
|
+
)
|
81
|
+
|
82
|
+
allow_symlinks = get_bool(extra_data, "allow_symlinks")
|
83
|
+
|
84
|
+
errors: list[str] = []
|
85
|
+
for path in paths:
|
86
|
+
if not path.startswith("plugins/"):
|
87
|
+
continue
|
88
|
+
|
89
|
+
if path in skip_paths:
|
90
|
+
continue
|
91
|
+
|
92
|
+
if any(path.startswith(skip_directory) for skip_directory in skip_directories):
|
93
|
+
continue
|
94
|
+
|
95
|
+
if os.path.islink(path):
|
96
|
+
if not allow_symlinks:
|
97
|
+
errors.append("%s: is a symbolic link" % (path,))
|
98
|
+
elif not os.path.isfile(path):
|
99
|
+
errors.append("%s: is not a regular file" % (path,))
|
100
|
+
|
101
|
+
ext = os.path.splitext(path)[1]
|
102
|
+
|
103
|
+
if ext in yaml_extensions and any(
|
104
|
+
path.startswith(yaml_directory) for yaml_directory in yaml_directories
|
105
|
+
):
|
106
|
+
continue
|
107
|
+
|
108
|
+
extensions = (
|
109
|
+
module_extensions
|
110
|
+
if path.startswith("plugins/modules/")
|
111
|
+
else other_extensions
|
112
|
+
)
|
113
|
+
|
114
|
+
if ext not in extensions:
|
115
|
+
errors.append(f"{path}: extension must be one of: {', '.join(extensions)}")
|
116
|
+
|
117
|
+
for error in sorted(errors):
|
118
|
+
print(error)
|
119
|
+
return len(errors) > 0
|
120
|
+
|
121
|
+
|
122
|
+
if __name__ == "__main__":
|
123
|
+
sys.exit(main())
|
@@ -0,0 +1,244 @@
|
|
1
|
+
#!/usr/bin/env python
|
2
|
+
|
3
|
+
# Copyright (c) 2024, Felix Fontein <felix@fontein.de>
|
4
|
+
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt
|
5
|
+
# or https://www.gnu.org/licenses/gpl-3.0.txt)
|
6
|
+
# SPDX-License-Identifier: GPL-3.0-or-later
|
7
|
+
|
8
|
+
"""Make sure all plugin and module documentation adheres to yamllint."""
|
9
|
+
|
10
|
+
from __future__ import annotations
|
11
|
+
|
12
|
+
import ast
|
13
|
+
import io
|
14
|
+
import re
|
15
|
+
import sys
|
16
|
+
import traceback
|
17
|
+
import typing as t
|
18
|
+
|
19
|
+
import yaml
|
20
|
+
from antsibull_nox_data_util import setup # type: ignore
|
21
|
+
from yamllint import linter
|
22
|
+
from yamllint.config import YamlLintConfig
|
23
|
+
from yamllint.linter import PROBLEM_LEVELS
|
24
|
+
|
25
|
+
REPORT_LEVELS: set[PROBLEM_LEVELS] = {
|
26
|
+
"warning",
|
27
|
+
"error",
|
28
|
+
}
|
29
|
+
|
30
|
+
EXAMPLES_FMT_RE = re.compile(r"^# fmt:\s+(\S+)")
|
31
|
+
|
32
|
+
EXAMPLES_SECTION = "EXAMPLES"
|
33
|
+
|
34
|
+
|
35
|
+
def lint(
|
36
|
+
*,
|
37
|
+
errors: list[dict[str, t.Any]],
|
38
|
+
path: str,
|
39
|
+
data: str,
|
40
|
+
row_offset: int,
|
41
|
+
col_offset: int,
|
42
|
+
section: str,
|
43
|
+
config: YamlLintConfig,
|
44
|
+
extra_for_errors: dict[str, t.Any] | None = None,
|
45
|
+
) -> None:
|
46
|
+
try:
|
47
|
+
problems = linter.run(
|
48
|
+
io.StringIO(data),
|
49
|
+
config,
|
50
|
+
path,
|
51
|
+
)
|
52
|
+
for problem in problems:
|
53
|
+
if problem.level not in REPORT_LEVELS:
|
54
|
+
continue
|
55
|
+
msg = f"{section}: {problem.level}: {problem.desc}"
|
56
|
+
if problem.rule:
|
57
|
+
msg += f" ({problem.rule})"
|
58
|
+
errors.append(
|
59
|
+
{
|
60
|
+
"path": path,
|
61
|
+
"line": row_offset + problem.line,
|
62
|
+
# The col_offset is only valid for line 1; otherwise the offset is 0
|
63
|
+
"col": (col_offset if problem.line == 1 else 0) + problem.column,
|
64
|
+
"message": msg,
|
65
|
+
}
|
66
|
+
)
|
67
|
+
if extra_for_errors:
|
68
|
+
errors[-1].update(extra_for_errors)
|
69
|
+
except Exception as exc:
|
70
|
+
error = str(exc).replace("\n", " / ")
|
71
|
+
errors.append(
|
72
|
+
{
|
73
|
+
"path": path,
|
74
|
+
"line": row_offset + 1,
|
75
|
+
"col": col_offset + 1,
|
76
|
+
"message": (
|
77
|
+
f"{section}: Internal error while linting YAML: exception {type(exc)}:"
|
78
|
+
f" {error}; traceback: {traceback.format_exc()!r}"
|
79
|
+
),
|
80
|
+
}
|
81
|
+
)
|
82
|
+
if extra_for_errors:
|
83
|
+
errors[-1].update(extra_for_errors)
|
84
|
+
|
85
|
+
|
86
|
+
def process_python_file(
|
87
|
+
errors: list[dict[str, t.Any]],
|
88
|
+
path: str,
|
89
|
+
config: YamlLintConfig,
|
90
|
+
config_examples: YamlLintConfig,
|
91
|
+
) -> None:
|
92
|
+
try:
|
93
|
+
with open(path, "rt", encoding="utf-8") as f:
|
94
|
+
root = ast.parse(f.read(), filename=path)
|
95
|
+
except Exception as exc:
|
96
|
+
errors.append(
|
97
|
+
{
|
98
|
+
"path": path,
|
99
|
+
"line": 1,
|
100
|
+
"col": 1,
|
101
|
+
"message": (
|
102
|
+
f"Error while parsing Python code: exception {type(exc)}:"
|
103
|
+
f" {exc}; traceback: {traceback.format_exc()!r}"
|
104
|
+
),
|
105
|
+
}
|
106
|
+
)
|
107
|
+
return
|
108
|
+
|
109
|
+
# We look for top-level assignments
|
110
|
+
for child in root.body:
|
111
|
+
if not isinstance(child, ast.Assign):
|
112
|
+
continue
|
113
|
+
if not isinstance(child.value, ast.Constant):
|
114
|
+
continue
|
115
|
+
if not isinstance(child.value.value, str):
|
116
|
+
continue
|
117
|
+
for target in child.targets:
|
118
|
+
try:
|
119
|
+
section = target.id # type: ignore
|
120
|
+
except AttributeError:
|
121
|
+
continue
|
122
|
+
if section not in ("DOCUMENTATION", "EXAMPLES", "RETURN"):
|
123
|
+
continue
|
124
|
+
|
125
|
+
# Extract value and offsets
|
126
|
+
data = child.value.value
|
127
|
+
row_offset = child.value.lineno - 1
|
128
|
+
col_offset = child.value.col_offset - 1
|
129
|
+
|
130
|
+
# If the string start with optional whitespace + linebreak, skip that line
|
131
|
+
idx = data.find("\n")
|
132
|
+
if idx >= 0 and (idx == 0 or data[:idx].isspace()):
|
133
|
+
data = data[idx + 1 :]
|
134
|
+
row_offset += 1
|
135
|
+
col_offset = 0
|
136
|
+
|
137
|
+
# Check for non-YAML examples
|
138
|
+
if section == EXAMPLES_SECTION:
|
139
|
+
fmt_match = EXAMPLES_FMT_RE.match(data.lstrip())
|
140
|
+
if fmt_match and fmt_match.group(1) != "yaml":
|
141
|
+
continue
|
142
|
+
|
143
|
+
# Parse the (remaining) string content
|
144
|
+
lint(
|
145
|
+
errors=errors,
|
146
|
+
path=path,
|
147
|
+
data=data,
|
148
|
+
row_offset=row_offset,
|
149
|
+
col_offset=col_offset,
|
150
|
+
section=section,
|
151
|
+
config=config_examples if section == EXAMPLES_SECTION else config,
|
152
|
+
)
|
153
|
+
|
154
|
+
|
155
|
+
def process_sidecar_docs_file(
|
156
|
+
errors: list[dict[str, t.Any]],
|
157
|
+
path: str,
|
158
|
+
config_examples: YamlLintConfig,
|
159
|
+
) -> None:
|
160
|
+
try:
|
161
|
+
# TODO: get hold of YAML structure so we also get correct line/col numbers
|
162
|
+
# inside EXAMPLES!
|
163
|
+
with open(path, "rb") as stream:
|
164
|
+
root = yaml.load(stream, Loader=yaml.SafeLoader)
|
165
|
+
except Exception as exc:
|
166
|
+
errors.append(
|
167
|
+
{
|
168
|
+
"path": path,
|
169
|
+
"line": 1,
|
170
|
+
"col": 1,
|
171
|
+
"message": (
|
172
|
+
f"Error while parsing Python code: exception {type(exc)}:"
|
173
|
+
f" {exc}; traceback: {traceback.format_exc()!r}"
|
174
|
+
),
|
175
|
+
}
|
176
|
+
)
|
177
|
+
return
|
178
|
+
|
179
|
+
if not isinstance(root, dict):
|
180
|
+
return
|
181
|
+
examples = root.get(EXAMPLES_SECTION)
|
182
|
+
if not isinstance(examples, str):
|
183
|
+
return
|
184
|
+
|
185
|
+
# Check for non-YAML examples
|
186
|
+
fmt_match = EXAMPLES_FMT_RE.match(examples.lstrip())
|
187
|
+
if fmt_match and fmt_match.group(1) != "yaml":
|
188
|
+
return
|
189
|
+
|
190
|
+
lint(
|
191
|
+
errors=errors,
|
192
|
+
path=path,
|
193
|
+
data=examples,
|
194
|
+
row_offset=0, # TODO
|
195
|
+
col_offset=0, # TODO
|
196
|
+
section=EXAMPLES_SECTION,
|
197
|
+
config=config_examples,
|
198
|
+
extra_for_errors={
|
199
|
+
"note": "Line/column are relative to EXAMPLES string contents"
|
200
|
+
},
|
201
|
+
)
|
202
|
+
|
203
|
+
|
204
|
+
def main() -> int:
|
205
|
+
"""Main entry point."""
|
206
|
+
paths, extra_data = setup()
|
207
|
+
config: str | None = extra_data.get("config")
|
208
|
+
config_examples: str | None = extra_data.get("config_examples")
|
209
|
+
|
210
|
+
if config:
|
211
|
+
yamllint_config = YamlLintConfig(file=config)
|
212
|
+
else:
|
213
|
+
yamllint_config = YamlLintConfig(content="extends: default")
|
214
|
+
|
215
|
+
if config_examples:
|
216
|
+
yamllint_config_examples = YamlLintConfig(file=config_examples)
|
217
|
+
else:
|
218
|
+
yamllint_config_examples = yamllint_config
|
219
|
+
|
220
|
+
errors: list[dict[str, t.Any]] = []
|
221
|
+
for path in paths:
|
222
|
+
if path.endswith(".py"):
|
223
|
+
process_python_file(errors, path, yamllint_config, yamllint_config_examples)
|
224
|
+
if path.endswith((".yml", ".yaml")):
|
225
|
+
process_sidecar_docs_file(errors, path, yamllint_config_examples)
|
226
|
+
|
227
|
+
errors.sort(
|
228
|
+
key=lambda error: (error["path"], error["line"], error["col"], error["message"])
|
229
|
+
)
|
230
|
+
for error in errors:
|
231
|
+
prefix = f"{error['path']}:{error['line']}:{error['col']}: "
|
232
|
+
msg = error["message"]
|
233
|
+
if "note" in error:
|
234
|
+
msg = f"{msg}\nNote: {error['note']}"
|
235
|
+
for i, line in enumerate(msg.splitlines()):
|
236
|
+
print(f"{prefix}{line}")
|
237
|
+
if i == 0:
|
238
|
+
prefix = " " * len(prefix)
|
239
|
+
|
240
|
+
return len(errors) > 0
|
241
|
+
|
242
|
+
|
243
|
+
if __name__ == "__main__":
|
244
|
+
sys.exit(main())
|
@@ -0,0 +1,38 @@
|
|
1
|
+
# Author: Felix Fontein <felix@fontein.de>
|
2
|
+
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or
|
3
|
+
# https://www.gnu.org/licenses/gpl-3.0.txt)
|
4
|
+
# SPDX-License-Identifier: GPL-3.0-or-later
|
5
|
+
# SPDX-FileCopyrightText: 2025, Ansible Project
|
6
|
+
|
7
|
+
"""
|
8
|
+
Utility code for scripts in data.
|
9
|
+
"""
|
10
|
+
|
11
|
+
from __future__ import annotations
|
12
|
+
|
13
|
+
import json
|
14
|
+
import typing as t
|
15
|
+
from pathlib import Path
|
16
|
+
|
17
|
+
import nox
|
18
|
+
|
19
|
+
|
20
|
+
def prepare_data_script(
|
21
|
+
session: nox.Session,
|
22
|
+
*,
|
23
|
+
base_name: str,
|
24
|
+
paths: list[Path],
|
25
|
+
extra_data: dict[str, t.Any] | None = None,
|
26
|
+
) -> Path:
|
27
|
+
"""
|
28
|
+
Prepare a data JSON file for the extra sanity check scripts.
|
29
|
+
"""
|
30
|
+
cwd = Path.cwd()
|
31
|
+
data = {}
|
32
|
+
data["paths"] = [str(path.relative_to(cwd)) for path in paths]
|
33
|
+
if extra_data:
|
34
|
+
data.update(extra_data)
|
35
|
+
file = Path(session.create_tmp()) / f"{base_name}-data.json"
|
36
|
+
with open(file, "w", encoding="utf-8") as f:
|
37
|
+
json.dump(data, f)
|
38
|
+
return file
|