antsibull-nox 0.1.0__py3-none-any.whl → 0.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- antsibull_nox/__init__.py +48 -1
- antsibull_nox/ansible.py +260 -0
- antsibull_nox/collection/__init__.py +56 -0
- antsibull_nox/collection/data.py +106 -0
- antsibull_nox/collection/extract.py +23 -0
- antsibull_nox/collection/install.py +523 -0
- antsibull_nox/{collection.py → collection/search.py} +164 -253
- antsibull_nox/config.py +332 -0
- antsibull_nox/data/action-groups.py +1 -1
- antsibull_nox/data/antsibull_nox_data_util.py +91 -0
- antsibull_nox/data/license-check.py +1 -1
- antsibull_nox/data/no-unwanted-files.py +5 -1
- antsibull_nox/data/plugin-yamllint.py +244 -0
- antsibull_nox/data_util.py +0 -77
- antsibull_nox/interpret_config.py +235 -0
- antsibull_nox/paths.py +19 -0
- antsibull_nox/python.py +81 -0
- antsibull_nox/sessions.py +898 -26
- antsibull_nox/utils.py +85 -0
- {antsibull_nox-0.1.0.dist-info → antsibull_nox-0.2.0.dist-info}/METADATA +3 -1
- antsibull_nox-0.2.0.dist-info/RECORD +25 -0
- antsibull_nox-0.1.0.dist-info/RECORD +0 -14
- {antsibull_nox-0.1.0.dist-info → antsibull_nox-0.2.0.dist-info}/WHEEL +0 -0
- {antsibull_nox-0.1.0.dist-info → antsibull_nox-0.2.0.dist-info}/licenses/LICENSES/GPL-3.0-or-later.txt +0 -0
antsibull_nox/config.py
ADDED
@@ -0,0 +1,332 @@
|
|
1
|
+
# Author: Felix Fontein <felix@fontein.de>
|
2
|
+
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or
|
3
|
+
# https://www.gnu.org/licenses/gpl-3.0.txt)
|
4
|
+
# SPDX-License-Identifier: GPL-3.0-or-later
|
5
|
+
# SPDX-FileCopyrightText: 2025, Ansible Project
|
6
|
+
|
7
|
+
"""
|
8
|
+
Config file schema.
|
9
|
+
"""
|
10
|
+
|
11
|
+
from __future__ import annotations
|
12
|
+
|
13
|
+
import os
|
14
|
+
import typing as t
|
15
|
+
|
16
|
+
import pydantic as p
|
17
|
+
|
18
|
+
from .ansible import AnsibleCoreVersion
|
19
|
+
from .utils import Version
|
20
|
+
|
21
|
+
try:
|
22
|
+
from tomllib import load as _load_toml
|
23
|
+
except ImportError:
|
24
|
+
from tomli import load as _load_toml # type: ignore
|
25
|
+
|
26
|
+
|
27
|
+
def _parse_version(value: t.Any) -> Version:
|
28
|
+
if isinstance(value, Version):
|
29
|
+
return value
|
30
|
+
if isinstance(value, str) and "." in value:
|
31
|
+
return Version.parse(value)
|
32
|
+
raise ValueError("Must be version string")
|
33
|
+
|
34
|
+
|
35
|
+
def _parse_ansible_core_version(value: t.Any) -> AnsibleCoreVersion:
|
36
|
+
if isinstance(value, Version):
|
37
|
+
return value
|
38
|
+
if isinstance(value, str):
|
39
|
+
if value == "devel":
|
40
|
+
return "devel"
|
41
|
+
if value == "milestone":
|
42
|
+
return "milestone"
|
43
|
+
if "." in value:
|
44
|
+
return Version.parse(value)
|
45
|
+
raise ValueError("Must be ansible-core version string")
|
46
|
+
|
47
|
+
|
48
|
+
PVersion = t.Annotated[Version, p.BeforeValidator(_parse_version)]
|
49
|
+
PAnsibleCoreVersion = t.Annotated[
|
50
|
+
AnsibleCoreVersion, p.BeforeValidator(_parse_ansible_core_version)
|
51
|
+
]
|
52
|
+
|
53
|
+
|
54
|
+
class _BaseModel(p.BaseModel):
|
55
|
+
model_config = p.ConfigDict(frozen=True, extra="allow", validate_default=True)
|
56
|
+
|
57
|
+
|
58
|
+
class SessionLint(_BaseModel):
|
59
|
+
"""
|
60
|
+
Lint session config.
|
61
|
+
"""
|
62
|
+
|
63
|
+
default: bool = True
|
64
|
+
extra_code_files: list[str] = []
|
65
|
+
|
66
|
+
# isort:
|
67
|
+
run_isort: bool = True
|
68
|
+
isort_config: t.Optional[p.FilePath] = None
|
69
|
+
isort_package: str = "isort"
|
70
|
+
|
71
|
+
# black:
|
72
|
+
run_black: bool = True
|
73
|
+
run_black_modules: t.Optional[bool] = None
|
74
|
+
black_config: t.Optional[p.FilePath] = None
|
75
|
+
black_package: str = "black"
|
76
|
+
|
77
|
+
# flake8:
|
78
|
+
run_flake8: bool = True
|
79
|
+
flake8_config: t.Optional[p.FilePath] = None
|
80
|
+
flake8_package: str = "flake8"
|
81
|
+
|
82
|
+
# pylint:
|
83
|
+
run_pylint: bool = True
|
84
|
+
pylint_rcfile: t.Optional[p.FilePath] = None
|
85
|
+
pylint_modules_rcfile: t.Optional[p.FilePath] = None
|
86
|
+
pylint_package: str = "pylint"
|
87
|
+
pylint_ansible_core_package: t.Optional[str] = "ansible-core"
|
88
|
+
pylint_extra_deps: list[str] = []
|
89
|
+
|
90
|
+
# yamllint:
|
91
|
+
run_yamllint: bool = True
|
92
|
+
yamllint_config: t.Optional[p.FilePath] = None
|
93
|
+
yamllint_config_plugins: t.Optional[p.FilePath] = None
|
94
|
+
yamllint_config_plugins_examples: t.Optional[p.FilePath] = None
|
95
|
+
yamllint_package: str = "yamllint"
|
96
|
+
|
97
|
+
# mypy:
|
98
|
+
run_mypy: bool = True
|
99
|
+
mypy_config: t.Optional[p.FilePath] = None
|
100
|
+
mypy_package: str = "mypy"
|
101
|
+
mypy_ansible_core_package: t.Optional[str] = "ansible-core"
|
102
|
+
mypy_extra_deps: list[str] = []
|
103
|
+
|
104
|
+
|
105
|
+
class SessionDocsCheck(_BaseModel):
|
106
|
+
"""
|
107
|
+
Docs check session config.
|
108
|
+
"""
|
109
|
+
|
110
|
+
default: bool = True
|
111
|
+
|
112
|
+
antsibull_docs_package: str = "antsibull-docs"
|
113
|
+
ansible_core_package: str = "ansible-core"
|
114
|
+
validate_collection_refs: t.Optional[t.Literal["self", "dependent", "all"]] = None
|
115
|
+
extra_collections: list[str] = []
|
116
|
+
|
117
|
+
|
118
|
+
class SessionLicenseCheck(_BaseModel):
|
119
|
+
"""
|
120
|
+
License check session config.
|
121
|
+
"""
|
122
|
+
|
123
|
+
default: bool = True
|
124
|
+
|
125
|
+
run_reuse: bool = True
|
126
|
+
reuse_package: str = "reuse"
|
127
|
+
run_license_check: bool = True
|
128
|
+
license_check_extra_ignore_paths: list[str] = []
|
129
|
+
|
130
|
+
|
131
|
+
class ActionGroup(_BaseModel):
|
132
|
+
"""
|
133
|
+
Information about an action group.
|
134
|
+
"""
|
135
|
+
|
136
|
+
# Name of the action group.
|
137
|
+
name: str
|
138
|
+
# Regex pattern to match modules that could belong to this action group.
|
139
|
+
pattern: str
|
140
|
+
# Doc fragment that members of the action group must have, but no other module
|
141
|
+
# must have
|
142
|
+
doc_fragment: str
|
143
|
+
# Exclusion list of modules that match the regex, but should not be part of the
|
144
|
+
# action group. All other modules matching the regex are assumed to be part of
|
145
|
+
# the action group.
|
146
|
+
exclusions: list[str] = []
|
147
|
+
|
148
|
+
|
149
|
+
class SessionExtraChecks(_BaseModel):
|
150
|
+
"""
|
151
|
+
Extra checks session config.
|
152
|
+
"""
|
153
|
+
|
154
|
+
default: bool = True
|
155
|
+
|
156
|
+
# no-unwanted-files:
|
157
|
+
run_no_unwanted_files: bool = True
|
158
|
+
no_unwanted_files_module_extensions: list[str] = [".cs", ".ps1", ".psm1", ".py"]
|
159
|
+
no_unwanted_files_other_extensions: list[str] = [".py", ".pyi"]
|
160
|
+
no_unwanted_files_yaml_extensions: list[str] = [".yml", ".yaml"]
|
161
|
+
no_unwanted_files_skip_paths: list[str] = []
|
162
|
+
no_unwanted_files_skip_directories: t.Optional[list[str]] = []
|
163
|
+
no_unwanted_files_yaml_directories: t.Optional[list[str]] = [
|
164
|
+
"plugins/test/",
|
165
|
+
"plugins/filter/",
|
166
|
+
]
|
167
|
+
no_unwanted_files_allow_symlinks: bool = False
|
168
|
+
|
169
|
+
# action-groups:
|
170
|
+
run_action_groups: bool = False
|
171
|
+
action_groups_config: list[ActionGroup] = []
|
172
|
+
|
173
|
+
|
174
|
+
class SessionBuildImportCheck(_BaseModel):
|
175
|
+
"""
|
176
|
+
Collection build and Galaxy import session config.
|
177
|
+
"""
|
178
|
+
|
179
|
+
default: bool = True
|
180
|
+
|
181
|
+
ansible_core_package: str = "ansible-core"
|
182
|
+
run_galaxy_importer: bool = True
|
183
|
+
galaxy_importer_package: str = "galaxy-importer"
|
184
|
+
# https://github.com/ansible/galaxy-importer#configuration
|
185
|
+
galaxy_importer_config_path: t.Optional[p.FilePath] = None
|
186
|
+
|
187
|
+
|
188
|
+
class DevelLikeBranch(_BaseModel):
|
189
|
+
"""
|
190
|
+
A Git repository + branch for a devel-like branch of ansible-core.
|
191
|
+
"""
|
192
|
+
|
193
|
+
repository: t.Optional[str] = None
|
194
|
+
branch: str
|
195
|
+
|
196
|
+
@p.model_validator(mode="before")
|
197
|
+
@classmethod
|
198
|
+
def _pre_validate(cls, values: t.Any) -> t.Any:
|
199
|
+
if isinstance(values, str):
|
200
|
+
return {"branch": values}
|
201
|
+
if (
|
202
|
+
isinstance(values, list)
|
203
|
+
and len(values) == 2
|
204
|
+
and all(isinstance(v, str) for v in values)
|
205
|
+
):
|
206
|
+
return {"repository": values[0], "branch": values[1]}
|
207
|
+
return values
|
208
|
+
|
209
|
+
|
210
|
+
class SessionAnsibleTestSanity(_BaseModel):
|
211
|
+
"""
|
212
|
+
Ansible-test sanity tests session config.
|
213
|
+
"""
|
214
|
+
|
215
|
+
default: bool = False
|
216
|
+
|
217
|
+
include_devel: bool = False
|
218
|
+
include_milestone: bool = False
|
219
|
+
add_devel_like_branches: list[DevelLikeBranch] = []
|
220
|
+
min_version: t.Optional[PVersion] = None
|
221
|
+
max_version: t.Optional[PVersion] = None
|
222
|
+
except_versions: list[PAnsibleCoreVersion] = []
|
223
|
+
|
224
|
+
|
225
|
+
class SessionAnsibleTestUnits(_BaseModel):
|
226
|
+
"""
|
227
|
+
Ansible-test unit tests session config.
|
228
|
+
"""
|
229
|
+
|
230
|
+
default: bool = False
|
231
|
+
|
232
|
+
include_devel: bool = False
|
233
|
+
include_milestone: bool = False
|
234
|
+
add_devel_like_branches: list[DevelLikeBranch] = []
|
235
|
+
min_version: t.Optional[PVersion] = None
|
236
|
+
max_version: t.Optional[PVersion] = None
|
237
|
+
except_versions: list[PAnsibleCoreVersion] = []
|
238
|
+
|
239
|
+
|
240
|
+
class SessionAnsibleTestIntegrationWDefaultContainer(_BaseModel):
|
241
|
+
"""
|
242
|
+
Ansible-test integration tests with default container session config.
|
243
|
+
"""
|
244
|
+
|
245
|
+
default: bool = False
|
246
|
+
|
247
|
+
include_devel: bool = False
|
248
|
+
include_milestone: bool = False
|
249
|
+
add_devel_like_branches: list[DevelLikeBranch] = []
|
250
|
+
min_version: t.Optional[PVersion] = None
|
251
|
+
max_version: t.Optional[PVersion] = None
|
252
|
+
except_versions: list[PAnsibleCoreVersion] = []
|
253
|
+
core_python_versions: dict[t.Union[PAnsibleCoreVersion, str], list[PVersion]] = {}
|
254
|
+
controller_python_versions_only: bool = False
|
255
|
+
|
256
|
+
@p.model_validator(mode="after")
|
257
|
+
def _validate_core_keys(self) -> t.Self:
|
258
|
+
branch_names = [dlb.branch for dlb in self.add_devel_like_branches]
|
259
|
+
for key in self.core_python_versions:
|
260
|
+
if isinstance(key, Version) or key in {"devel", "milestone"}:
|
261
|
+
continue
|
262
|
+
if key in branch_names:
|
263
|
+
continue
|
264
|
+
raise ValueError(
|
265
|
+
f"Unknown ansible-core version or branch name {key!r} in core_python_versions"
|
266
|
+
)
|
267
|
+
return self
|
268
|
+
|
269
|
+
|
270
|
+
class SessionAnsibleLint(_BaseModel):
|
271
|
+
"""
|
272
|
+
Ansible-lint session config.
|
273
|
+
"""
|
274
|
+
|
275
|
+
default: bool = True
|
276
|
+
|
277
|
+
ansible_lint_package: str = "ansible-lint"
|
278
|
+
strict: bool = False
|
279
|
+
|
280
|
+
|
281
|
+
class Sessions(_BaseModel):
|
282
|
+
"""
|
283
|
+
Configuration of nox sessions to add.
|
284
|
+
"""
|
285
|
+
|
286
|
+
lint: t.Optional[SessionLint] = None
|
287
|
+
docs_check: t.Optional[SessionDocsCheck] = None
|
288
|
+
license_check: t.Optional[SessionLicenseCheck] = None
|
289
|
+
extra_checks: t.Optional[SessionExtraChecks] = None
|
290
|
+
build_import_check: t.Optional[SessionBuildImportCheck] = None
|
291
|
+
ansible_test_sanity: t.Optional[SessionAnsibleTestSanity] = None
|
292
|
+
ansible_test_units: t.Optional[SessionAnsibleTestUnits] = None
|
293
|
+
ansible_test_integration_w_default_container: t.Optional[
|
294
|
+
SessionAnsibleTestIntegrationWDefaultContainer
|
295
|
+
] = None
|
296
|
+
ansible_lint: t.Optional[SessionAnsibleLint] = None
|
297
|
+
|
298
|
+
|
299
|
+
class CollectionSource(_BaseModel):
|
300
|
+
"""
|
301
|
+
Source from which to install a collection.
|
302
|
+
"""
|
303
|
+
|
304
|
+
source: str
|
305
|
+
|
306
|
+
@p.model_validator(mode="before")
|
307
|
+
@classmethod
|
308
|
+
def _pre_validate(cls, values):
|
309
|
+
if isinstance(values, str):
|
310
|
+
return {"source": values}
|
311
|
+
return values
|
312
|
+
|
313
|
+
|
314
|
+
class Config(_BaseModel):
|
315
|
+
"""
|
316
|
+
The contents of a antsibull-nox config file.
|
317
|
+
"""
|
318
|
+
|
319
|
+
collection_sources: dict[str, CollectionSource] = {}
|
320
|
+
sessions: Sessions = Sessions()
|
321
|
+
|
322
|
+
|
323
|
+
def load_config_from_toml(path: str | os.PathLike) -> Config:
|
324
|
+
"""
|
325
|
+
Load a config TOML file.
|
326
|
+
"""
|
327
|
+
with open(path, "rb") as f:
|
328
|
+
try:
|
329
|
+
data = _load_toml(f)
|
330
|
+
except ValueError as exc:
|
331
|
+
raise ValueError(f"Error while reading {path}: {exc}") from exc
|
332
|
+
return Config.model_validate(data)
|
@@ -0,0 +1,91 @@
|
|
1
|
+
# Author: Felix Fontein <felix@fontein.de>
|
2
|
+
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or
|
3
|
+
# https://www.gnu.org/licenses/gpl-3.0.txt)
|
4
|
+
# SPDX-License-Identifier: GPL-3.0-or-later
|
5
|
+
# SPDX-FileCopyrightText: 2025, Ansible Project
|
6
|
+
|
7
|
+
"""
|
8
|
+
Utility code for scripts in data.
|
9
|
+
"""
|
10
|
+
|
11
|
+
from __future__ import annotations
|
12
|
+
|
13
|
+
import json
|
14
|
+
import sys
|
15
|
+
import typing as t
|
16
|
+
|
17
|
+
|
18
|
+
def setup() -> tuple[list[str], dict[str, t.Any]]:
|
19
|
+
"""
|
20
|
+
Fetch list of paths and potential extra configuration.
|
21
|
+
|
22
|
+
First thing to call in an extra sanity check script in data/.
|
23
|
+
"""
|
24
|
+
if len(sys.argv) == 3 and sys.argv[1] == "--data":
|
25
|
+
# Preferred way: load information from JSON file
|
26
|
+
path = sys.argv[2]
|
27
|
+
try:
|
28
|
+
with open(path, "rb") as f:
|
29
|
+
data = json.load(f)
|
30
|
+
except Exception as exc:
|
31
|
+
raise ValueError(f"Error while reading JSON from {path}") from exc
|
32
|
+
try:
|
33
|
+
paths = get_list_of_strings(data, "paths")
|
34
|
+
except ValueError as exc:
|
35
|
+
raise ValueError(f"Invalid JSON content in {path}: {exc}") from exc
|
36
|
+
data.pop("paths")
|
37
|
+
return paths, data
|
38
|
+
if len(sys.argv) >= 2:
|
39
|
+
# It's also possible to pass a list of paths on the command line, to simplify
|
40
|
+
# testing these scripts.
|
41
|
+
return sys.argv[1:], {}
|
42
|
+
# Alternatively one can pass a list of files from stdin, for example by piping
|
43
|
+
# the output of 'git ls-files' into this script. This is also for testing these
|
44
|
+
# scripts.
|
45
|
+
return sys.stdin.read().splitlines(), {}
|
46
|
+
|
47
|
+
|
48
|
+
def get_list_of_strings(
|
49
|
+
data: dict[str, t.Any],
|
50
|
+
key: str,
|
51
|
+
*,
|
52
|
+
default: list[str] | None = None,
|
53
|
+
) -> list[str]:
|
54
|
+
"""
|
55
|
+
Retrieves a list of strings from key ``key`` of the JSON object ``data``.
|
56
|
+
|
57
|
+
If ``default`` is set to a list, a missing key results in this value being returned.
|
58
|
+
"""
|
59
|
+
sentinel = object()
|
60
|
+
value = data.get(key, sentinel)
|
61
|
+
if value is sentinel:
|
62
|
+
if default is not None:
|
63
|
+
return default
|
64
|
+
raise ValueError(f"{key!r} is not a present")
|
65
|
+
if not isinstance(value, list):
|
66
|
+
raise ValueError(f"{key!r} is not a list, but {type(key)}")
|
67
|
+
if not all(isinstance(entry, str) for entry in value):
|
68
|
+
raise ValueError(f"{key!r} is not a list of strings")
|
69
|
+
return t.cast(list[str], value)
|
70
|
+
|
71
|
+
|
72
|
+
def get_bool(
|
73
|
+
data: dict[str, t.Any],
|
74
|
+
key: str,
|
75
|
+
*,
|
76
|
+
default: bool | None = None,
|
77
|
+
) -> bool:
|
78
|
+
"""
|
79
|
+
Retrieves a boolean from key ``key`` of the JSON object ``data``.
|
80
|
+
|
81
|
+
If ``default`` is set to a boolean, a missing key results in this value being returned.
|
82
|
+
"""
|
83
|
+
sentinel = object()
|
84
|
+
value = data.get(key, sentinel)
|
85
|
+
if value is sentinel:
|
86
|
+
if default is not None:
|
87
|
+
return default
|
88
|
+
raise ValueError(f"{key!r} is not a present")
|
89
|
+
if not isinstance(value, bool):
|
90
|
+
raise ValueError(f"{key!r} is not a bool, but {type(key)}")
|
91
|
+
return value
|
@@ -12,7 +12,11 @@ from __future__ import annotations
|
|
12
12
|
import os
|
13
13
|
import sys
|
14
14
|
|
15
|
-
from antsibull_nox.
|
15
|
+
from antsibull_nox.data.antsibull_nox_data_util import (
|
16
|
+
get_bool,
|
17
|
+
get_list_of_strings,
|
18
|
+
setup,
|
19
|
+
)
|
16
20
|
|
17
21
|
|
18
22
|
def main() -> int:
|
@@ -0,0 +1,244 @@
|
|
1
|
+
#!/usr/bin/env python
|
2
|
+
|
3
|
+
# Copyright (c) 2024, Felix Fontein <felix@fontein.de>
|
4
|
+
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt
|
5
|
+
# or https://www.gnu.org/licenses/gpl-3.0.txt)
|
6
|
+
# SPDX-License-Identifier: GPL-3.0-or-later
|
7
|
+
|
8
|
+
"""Make sure all plugin and module documentation adheres to yamllint."""
|
9
|
+
|
10
|
+
from __future__ import annotations
|
11
|
+
|
12
|
+
import ast
|
13
|
+
import io
|
14
|
+
import re
|
15
|
+
import sys
|
16
|
+
import traceback
|
17
|
+
import typing as t
|
18
|
+
|
19
|
+
import yaml
|
20
|
+
from antsibull_nox_data_util import setup # type: ignore
|
21
|
+
from yamllint import linter
|
22
|
+
from yamllint.config import YamlLintConfig
|
23
|
+
from yamllint.linter import PROBLEM_LEVELS
|
24
|
+
|
25
|
+
REPORT_LEVELS: set[PROBLEM_LEVELS] = {
|
26
|
+
"warning",
|
27
|
+
"error",
|
28
|
+
}
|
29
|
+
|
30
|
+
EXAMPLES_FMT_RE = re.compile(r"^# fmt:\s+(\S+)")
|
31
|
+
|
32
|
+
EXAMPLES_SECTION = "EXAMPLES"
|
33
|
+
|
34
|
+
|
35
|
+
def lint(
|
36
|
+
*,
|
37
|
+
errors: list[dict[str, t.Any]],
|
38
|
+
path: str,
|
39
|
+
data: str,
|
40
|
+
row_offset: int,
|
41
|
+
col_offset: int,
|
42
|
+
section: str,
|
43
|
+
config: YamlLintConfig,
|
44
|
+
extra_for_errors: dict[str, t.Any] | None = None,
|
45
|
+
) -> None:
|
46
|
+
try:
|
47
|
+
problems = linter.run(
|
48
|
+
io.StringIO(data),
|
49
|
+
config,
|
50
|
+
path,
|
51
|
+
)
|
52
|
+
for problem in problems:
|
53
|
+
if problem.level not in REPORT_LEVELS:
|
54
|
+
continue
|
55
|
+
msg = f"{section}: {problem.level}: {problem.desc}"
|
56
|
+
if problem.rule:
|
57
|
+
msg += f" ({problem.rule})"
|
58
|
+
errors.append(
|
59
|
+
{
|
60
|
+
"path": path,
|
61
|
+
"line": row_offset + problem.line,
|
62
|
+
# The col_offset is only valid for line 1; otherwise the offset is 0
|
63
|
+
"col": (col_offset if problem.line == 1 else 0) + problem.column,
|
64
|
+
"message": msg,
|
65
|
+
}
|
66
|
+
)
|
67
|
+
if extra_for_errors:
|
68
|
+
errors[-1].update(extra_for_errors)
|
69
|
+
except Exception as exc:
|
70
|
+
error = str(exc).replace("\n", " / ")
|
71
|
+
errors.append(
|
72
|
+
{
|
73
|
+
"path": path,
|
74
|
+
"line": row_offset + 1,
|
75
|
+
"col": col_offset + 1,
|
76
|
+
"message": (
|
77
|
+
f"{section}: Internal error while linting YAML: exception {type(exc)}:"
|
78
|
+
f" {error}; traceback: {traceback.format_exc()!r}"
|
79
|
+
),
|
80
|
+
}
|
81
|
+
)
|
82
|
+
if extra_for_errors:
|
83
|
+
errors[-1].update(extra_for_errors)
|
84
|
+
|
85
|
+
|
86
|
+
def process_python_file(
|
87
|
+
errors: list[dict[str, t.Any]],
|
88
|
+
path: str,
|
89
|
+
config: YamlLintConfig,
|
90
|
+
config_examples: YamlLintConfig,
|
91
|
+
) -> None:
|
92
|
+
try:
|
93
|
+
with open(path, "rt", encoding="utf-8") as f:
|
94
|
+
root = ast.parse(f.read(), filename=path)
|
95
|
+
except Exception as exc:
|
96
|
+
errors.append(
|
97
|
+
{
|
98
|
+
"path": path,
|
99
|
+
"line": 1,
|
100
|
+
"col": 1,
|
101
|
+
"message": (
|
102
|
+
f"Error while parsing Python code: exception {type(exc)}:"
|
103
|
+
f" {exc}; traceback: {traceback.format_exc()!r}"
|
104
|
+
),
|
105
|
+
}
|
106
|
+
)
|
107
|
+
return
|
108
|
+
|
109
|
+
# We look for top-level assignments
|
110
|
+
for child in root.body:
|
111
|
+
if not isinstance(child, ast.Assign):
|
112
|
+
continue
|
113
|
+
if not isinstance(child.value, ast.Constant):
|
114
|
+
continue
|
115
|
+
if not isinstance(child.value.value, str):
|
116
|
+
continue
|
117
|
+
for target in child.targets:
|
118
|
+
try:
|
119
|
+
section = target.id # type: ignore
|
120
|
+
except AttributeError:
|
121
|
+
continue
|
122
|
+
if section not in ("DOCUMENTATION", "EXAMPLES", "RETURN"):
|
123
|
+
continue
|
124
|
+
|
125
|
+
# Extract value and offsets
|
126
|
+
data = child.value.value
|
127
|
+
row_offset = child.value.lineno - 1
|
128
|
+
col_offset = child.value.col_offset - 1
|
129
|
+
|
130
|
+
# If the string start with optional whitespace + linebreak, skip that line
|
131
|
+
idx = data.find("\n")
|
132
|
+
if idx >= 0 and (idx == 0 or data[:idx].isspace()):
|
133
|
+
data = data[idx + 1 :]
|
134
|
+
row_offset += 1
|
135
|
+
col_offset = 0
|
136
|
+
|
137
|
+
# Check for non-YAML examples
|
138
|
+
if section == EXAMPLES_SECTION:
|
139
|
+
fmt_match = EXAMPLES_FMT_RE.match(data.lstrip())
|
140
|
+
if fmt_match and fmt_match.group(1) != "yaml":
|
141
|
+
continue
|
142
|
+
|
143
|
+
# Parse the (remaining) string content
|
144
|
+
lint(
|
145
|
+
errors=errors,
|
146
|
+
path=path,
|
147
|
+
data=data,
|
148
|
+
row_offset=row_offset,
|
149
|
+
col_offset=col_offset,
|
150
|
+
section=section,
|
151
|
+
config=config_examples if section == EXAMPLES_SECTION else config,
|
152
|
+
)
|
153
|
+
|
154
|
+
|
155
|
+
def process_sidecar_docs_file(
|
156
|
+
errors: list[dict[str, t.Any]],
|
157
|
+
path: str,
|
158
|
+
config_examples: YamlLintConfig,
|
159
|
+
) -> None:
|
160
|
+
try:
|
161
|
+
# TODO: get hold of YAML structure so we also get correct line/col numbers
|
162
|
+
# inside EXAMPLES!
|
163
|
+
with open(path, "rb") as stream:
|
164
|
+
root = yaml.load(stream, Loader=yaml.SafeLoader)
|
165
|
+
except Exception as exc:
|
166
|
+
errors.append(
|
167
|
+
{
|
168
|
+
"path": path,
|
169
|
+
"line": 1,
|
170
|
+
"col": 1,
|
171
|
+
"message": (
|
172
|
+
f"Error while parsing Python code: exception {type(exc)}:"
|
173
|
+
f" {exc}; traceback: {traceback.format_exc()!r}"
|
174
|
+
),
|
175
|
+
}
|
176
|
+
)
|
177
|
+
return
|
178
|
+
|
179
|
+
if not isinstance(root, dict):
|
180
|
+
return
|
181
|
+
examples = root.get(EXAMPLES_SECTION)
|
182
|
+
if not isinstance(examples, str):
|
183
|
+
return
|
184
|
+
|
185
|
+
# Check for non-YAML examples
|
186
|
+
fmt_match = EXAMPLES_FMT_RE.match(examples.lstrip())
|
187
|
+
if fmt_match and fmt_match.group(1) != "yaml":
|
188
|
+
return
|
189
|
+
|
190
|
+
lint(
|
191
|
+
errors=errors,
|
192
|
+
path=path,
|
193
|
+
data=examples,
|
194
|
+
row_offset=0, # TODO
|
195
|
+
col_offset=0, # TODO
|
196
|
+
section=EXAMPLES_SECTION,
|
197
|
+
config=config_examples,
|
198
|
+
extra_for_errors={
|
199
|
+
"note": "Line/column are relative to EXAMPLES string contents"
|
200
|
+
},
|
201
|
+
)
|
202
|
+
|
203
|
+
|
204
|
+
def main() -> int:
|
205
|
+
"""Main entry point."""
|
206
|
+
paths, extra_data = setup()
|
207
|
+
config: str | None = extra_data.get("config")
|
208
|
+
config_examples: str | None = extra_data.get("config_examples")
|
209
|
+
|
210
|
+
if config:
|
211
|
+
yamllint_config = YamlLintConfig(file=config)
|
212
|
+
else:
|
213
|
+
yamllint_config = YamlLintConfig(content="extends: default")
|
214
|
+
|
215
|
+
if config_examples:
|
216
|
+
yamllint_config_examples = YamlLintConfig(file=config_examples)
|
217
|
+
else:
|
218
|
+
yamllint_config_examples = yamllint_config
|
219
|
+
|
220
|
+
errors: list[dict[str, t.Any]] = []
|
221
|
+
for path in paths:
|
222
|
+
if path.endswith(".py"):
|
223
|
+
process_python_file(errors, path, yamllint_config, yamllint_config_examples)
|
224
|
+
if path.endswith((".yml", ".yaml")):
|
225
|
+
process_sidecar_docs_file(errors, path, yamllint_config_examples)
|
226
|
+
|
227
|
+
errors.sort(
|
228
|
+
key=lambda error: (error["path"], error["line"], error["col"], error["message"])
|
229
|
+
)
|
230
|
+
for error in errors:
|
231
|
+
prefix = f"{error['path']}:{error['line']}:{error['col']}: "
|
232
|
+
msg = error["message"]
|
233
|
+
if "note" in error:
|
234
|
+
msg = f"{msg}\nNote: {error['note']}"
|
235
|
+
for i, line in enumerate(msg.splitlines()):
|
236
|
+
print(f"{prefix}{line}")
|
237
|
+
if i == 0:
|
238
|
+
prefix = " " * len(prefix)
|
239
|
+
|
240
|
+
return len(errors) > 0
|
241
|
+
|
242
|
+
|
243
|
+
if __name__ == "__main__":
|
244
|
+
sys.exit(main())
|