antsibull-nox 0.4.0__py3-none-any.whl → 0.6.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- antsibull_nox/__init__.py +1 -1
- antsibull_nox/collection/__init__.py +5 -25
- antsibull_nox/collection/build.py +69 -0
- antsibull_nox/collection/utils.py +41 -0
- antsibull_nox/config.py +34 -2
- antsibull_nox/data/action-groups.py +9 -2
- antsibull_nox/data/antsibull_nox_data_util.py +25 -2
- antsibull_nox/data/no-trailing-whitespace.py +59 -0
- antsibull_nox/data/no-unwanted-files.py +1 -1
- antsibull_nox/data/rst-extra.py +167 -0
- antsibull_nox/data/rst-yamllint.py +192 -0
- antsibull_nox/interpret_config.py +33 -0
- antsibull_nox/paths.py +2 -3
- antsibull_nox/sessions/ansible_test.py +19 -4
- antsibull_nox/sessions/build_import_check.py +5 -35
- antsibull_nox/sessions/docs_check.py +56 -1
- antsibull_nox/sessions/extra_checks.py +19 -0
- antsibull_nox/sessions/lint.py +257 -56
- {antsibull_nox-0.4.0.dist-info → antsibull_nox-0.6.0.dist-info}/METADATA +9 -7
- {antsibull_nox-0.4.0.dist-info → antsibull_nox-0.6.0.dist-info}/RECORD +23 -18
- {antsibull_nox-0.4.0.dist-info → antsibull_nox-0.6.0.dist-info}/WHEEL +0 -0
- {antsibull_nox-0.4.0.dist-info → antsibull_nox-0.6.0.dist-info}/entry_points.txt +0 -0
- {antsibull_nox-0.4.0.dist-info → antsibull_nox-0.6.0.dist-info}/licenses/LICENSES/GPL-3.0-or-later.txt +0 -0
@@ -0,0 +1,192 @@
|
|
1
|
+
#!/usr/bin/env python
|
2
|
+
|
3
|
+
# Copyright (c) 2025, Felix Fontein <felix@fontein.de>
|
4
|
+
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt
|
5
|
+
# or https://www.gnu.org/licenses/gpl-3.0.txt)
|
6
|
+
# SPDX-License-Identifier: GPL-3.0-or-later
|
7
|
+
|
8
|
+
"""Make sure all plugin and module documentation adheres to yamllint."""
|
9
|
+
|
10
|
+
from __future__ import annotations
|
11
|
+
|
12
|
+
import io
|
13
|
+
import os
|
14
|
+
import sys
|
15
|
+
import traceback
|
16
|
+
import typing as t
|
17
|
+
|
18
|
+
from antsibull_docutils.rst_code_finder import find_code_blocks
|
19
|
+
from antsibull_nox_data_util import setup # type: ignore
|
20
|
+
from yamllint import linter
|
21
|
+
from yamllint.config import YamlLintConfig
|
22
|
+
from yamllint.linter import PROBLEM_LEVELS
|
23
|
+
|
24
|
+
REPORT_LEVELS: set[PROBLEM_LEVELS] = {
|
25
|
+
"warning",
|
26
|
+
"error",
|
27
|
+
}
|
28
|
+
|
29
|
+
YAML_LANGUAGES = {"yaml", "yaml+jinja"}
|
30
|
+
|
31
|
+
|
32
|
+
def lint(
|
33
|
+
*,
|
34
|
+
errors: list[dict[str, t.Any]],
|
35
|
+
path: str,
|
36
|
+
data: str,
|
37
|
+
row_offset: int,
|
38
|
+
col_offset: int,
|
39
|
+
config: YamlLintConfig,
|
40
|
+
extra_for_errors: dict[str, t.Any] | None = None,
|
41
|
+
) -> None:
|
42
|
+
# If the string start with optional whitespace + linebreak, skip that line
|
43
|
+
idx = data.find("\n")
|
44
|
+
if idx >= 0 and (idx == 0 or data[:idx].isspace()):
|
45
|
+
data = data[idx + 1 :]
|
46
|
+
row_offset += 1
|
47
|
+
col_offset = 0
|
48
|
+
|
49
|
+
try:
|
50
|
+
problems = linter.run(
|
51
|
+
io.StringIO(data),
|
52
|
+
config,
|
53
|
+
path,
|
54
|
+
)
|
55
|
+
for problem in problems:
|
56
|
+
if problem.level not in REPORT_LEVELS:
|
57
|
+
continue
|
58
|
+
msg = f"{problem.level}: {problem.desc}"
|
59
|
+
if problem.rule:
|
60
|
+
msg += f" ({problem.rule})"
|
61
|
+
errors.append(
|
62
|
+
{
|
63
|
+
"path": path,
|
64
|
+
"line": row_offset + problem.line,
|
65
|
+
# The col_offset is only valid for line 1; otherwise the offset is 0
|
66
|
+
"col": (col_offset if problem.line == 1 else 0) + problem.column,
|
67
|
+
"message": msg,
|
68
|
+
}
|
69
|
+
)
|
70
|
+
if extra_for_errors:
|
71
|
+
errors[-1].update(extra_for_errors)
|
72
|
+
except Exception as exc:
|
73
|
+
error = str(exc).replace("\n", " / ")
|
74
|
+
errors.append(
|
75
|
+
{
|
76
|
+
"path": path,
|
77
|
+
"line": row_offset + 1,
|
78
|
+
"col": col_offset + 1,
|
79
|
+
"message": (
|
80
|
+
f"Internal error while linting YAML: exception {type(exc)}:"
|
81
|
+
f" {error}; traceback: {traceback.format_exc()!r}"
|
82
|
+
),
|
83
|
+
}
|
84
|
+
)
|
85
|
+
if extra_for_errors:
|
86
|
+
errors[-1].update(extra_for_errors)
|
87
|
+
|
88
|
+
|
89
|
+
def process_rst_file(
|
90
|
+
errors: list[dict[str, t.Any]],
|
91
|
+
path: str,
|
92
|
+
config: YamlLintConfig,
|
93
|
+
) -> None:
|
94
|
+
try:
|
95
|
+
with open(path, "rt", encoding="utf-8") as f:
|
96
|
+
content = f.read()
|
97
|
+
except Exception as exc:
|
98
|
+
errors.append(
|
99
|
+
{
|
100
|
+
"path": path,
|
101
|
+
"line": 1,
|
102
|
+
"col": 1,
|
103
|
+
"message": (
|
104
|
+
f"Error while reading content: {type(exc)}:"
|
105
|
+
f" {exc}; traceback: {traceback.format_exc()!r}"
|
106
|
+
),
|
107
|
+
}
|
108
|
+
)
|
109
|
+
return
|
110
|
+
|
111
|
+
def warn_unknown_block(line: int | str, col: int, content: str) -> None:
|
112
|
+
errors.append(
|
113
|
+
{
|
114
|
+
"path": path,
|
115
|
+
"line": line,
|
116
|
+
"col": col,
|
117
|
+
"message": (
|
118
|
+
"Warning: found unknown literal block! Check for double colons '::'."
|
119
|
+
" If that is not the cause, please report this warning."
|
120
|
+
" It might indicate a bug in the checker or an unsupported Sphinx directive."
|
121
|
+
f" Content: {content!r}"
|
122
|
+
),
|
123
|
+
}
|
124
|
+
)
|
125
|
+
|
126
|
+
for code_block in find_code_blocks(
|
127
|
+
content,
|
128
|
+
path=path,
|
129
|
+
root_prefix="docs/docsite/rst",
|
130
|
+
warn_unknown_block=warn_unknown_block,
|
131
|
+
):
|
132
|
+
if (code_block.language or "").lower() not in YAML_LANGUAGES:
|
133
|
+
continue
|
134
|
+
|
135
|
+
extra_for_errors = {}
|
136
|
+
if not code_block.position_exact:
|
137
|
+
extra_for_errors["note"] = (
|
138
|
+
"The code block could not be exactly located in the source file."
|
139
|
+
" The line/column numbers might be off."
|
140
|
+
)
|
141
|
+
|
142
|
+
# Parse the (remaining) string content
|
143
|
+
lint(
|
144
|
+
errors=errors,
|
145
|
+
path=path,
|
146
|
+
data=code_block.content,
|
147
|
+
row_offset=code_block.row_offset,
|
148
|
+
col_offset=code_block.col_offset,
|
149
|
+
config=config,
|
150
|
+
extra_for_errors=extra_for_errors,
|
151
|
+
)
|
152
|
+
|
153
|
+
|
154
|
+
def main() -> int:
|
155
|
+
"""Main entry point."""
|
156
|
+
paths, extra_data = setup()
|
157
|
+
config: str | None = extra_data.get("config")
|
158
|
+
|
159
|
+
if config:
|
160
|
+
yamllint_config = YamlLintConfig(file=config)
|
161
|
+
else:
|
162
|
+
yamllint_config = YamlLintConfig(content="extends: default")
|
163
|
+
|
164
|
+
errors: list[dict[str, t.Any]] = []
|
165
|
+
for path in paths:
|
166
|
+
if not os.path.isfile(path):
|
167
|
+
continue
|
168
|
+
process_rst_file(errors, path, yamllint_config)
|
169
|
+
|
170
|
+
errors.sort(
|
171
|
+
key=lambda error: (
|
172
|
+
error["path"],
|
173
|
+
error["line"] if isinstance(error["line"], int) else 0,
|
174
|
+
error["col"],
|
175
|
+
error["message"],
|
176
|
+
)
|
177
|
+
)
|
178
|
+
for error in errors:
|
179
|
+
prefix = f"{error['path']}:{error['line']}:{error['col']}: "
|
180
|
+
msg = error["message"]
|
181
|
+
if "note" in error:
|
182
|
+
msg = f"{msg}\nNote: {error['note']}"
|
183
|
+
for i, line in enumerate(msg.splitlines()):
|
184
|
+
print(f"{prefix}{line}")
|
185
|
+
if i == 0:
|
186
|
+
prefix = " " * len(prefix)
|
187
|
+
|
188
|
+
return len(errors) > 0
|
189
|
+
|
190
|
+
|
191
|
+
if __name__ == "__main__":
|
192
|
+
sys.exit(main())
|
@@ -111,6 +111,22 @@ def _add_sessions(sessions: Sessions) -> None:
|
|
111
111
|
run_black_modules=sessions.lint.run_black_modules,
|
112
112
|
black_config=sessions.lint.black_config,
|
113
113
|
black_package=sessions.lint.black_package,
|
114
|
+
run_ruff_format=sessions.lint.run_ruff_format,
|
115
|
+
ruff_format_config=sessions.lint.ruff_format_config
|
116
|
+
or sessions.lint.ruff_config,
|
117
|
+
ruff_format_package=sessions.lint.ruff_format_package
|
118
|
+
or sessions.lint.ruff_package,
|
119
|
+
run_ruff_autofix=sessions.lint.run_ruff_autofix,
|
120
|
+
ruff_autofix_config=sessions.lint.ruff_autofix_config
|
121
|
+
or sessions.lint.ruff_config,
|
122
|
+
ruff_autofix_package=sessions.lint.ruff_autofix_package
|
123
|
+
or sessions.lint.ruff_package,
|
124
|
+
ruff_autofix_select=sessions.lint.ruff_autofix_select,
|
125
|
+
run_ruff_check=sessions.lint.run_ruff_check,
|
126
|
+
ruff_check_config=sessions.lint.ruff_check_config
|
127
|
+
or sessions.lint.ruff_config,
|
128
|
+
ruff_check_package=sessions.lint.ruff_check_package
|
129
|
+
or sessions.lint.ruff_package,
|
114
130
|
run_flake8=sessions.lint.run_flake8,
|
115
131
|
flake8_config=sessions.lint.flake8_config,
|
116
132
|
flake8_package=sessions.lint.flake8_package,
|
@@ -124,7 +140,9 @@ def _add_sessions(sessions: Sessions) -> None:
|
|
124
140
|
yamllint_config=sessions.lint.yamllint_config,
|
125
141
|
yamllint_config_plugins=sessions.lint.yamllint_config_plugins,
|
126
142
|
yamllint_config_plugins_examples=sessions.lint.yamllint_config_plugins_examples,
|
143
|
+
yamllint_config_extra_docs=sessions.lint.yamllint_config_extra_docs,
|
127
144
|
yamllint_package=sessions.lint.yamllint_package,
|
145
|
+
yamllint_antsibull_docutils_package=sessions.lint.yamllint_antsibull_docutils_package,
|
128
146
|
run_mypy=sessions.lint.run_mypy,
|
129
147
|
mypy_config=sessions.lint.mypy_config,
|
130
148
|
mypy_package=sessions.lint.mypy_package,
|
@@ -139,6 +157,13 @@ def _add_sessions(sessions: Sessions) -> None:
|
|
139
157
|
ansible_core_package=sessions.docs_check.ansible_core_package,
|
140
158
|
validate_collection_refs=sessions.docs_check.validate_collection_refs,
|
141
159
|
extra_collections=sessions.docs_check.extra_collections,
|
160
|
+
codeblocks_restrict_types=sessions.docs_check.codeblocks_restrict_types,
|
161
|
+
codeblocks_restrict_type_exact_case=(
|
162
|
+
sessions.docs_check.codeblocks_restrict_type_exact_case
|
163
|
+
),
|
164
|
+
codeblocks_allow_without_type=sessions.docs_check.codeblocks_allow_without_type,
|
165
|
+
codeblocks_allow_literal_blocks=sessions.docs_check.codeblocks_allow_literal_blocks,
|
166
|
+
antsibull_docutils_package=sessions.docs_check.antsibull_docutils_package,
|
142
167
|
)
|
143
168
|
if sessions.license_check:
|
144
169
|
add_license_check(
|
@@ -179,6 +204,13 @@ def _add_sessions(sessions: Sessions) -> None:
|
|
179
204
|
action_groups_config=_convert_action_groups(
|
180
205
|
sessions.extra_checks.action_groups_config
|
181
206
|
),
|
207
|
+
run_no_trailing_whitespace=sessions.extra_checks.run_no_trailing_whitespace,
|
208
|
+
no_trailing_whitespace_skip_paths=(
|
209
|
+
sessions.extra_checks.no_trailing_whitespace_skip_paths
|
210
|
+
),
|
211
|
+
no_trailing_whitespace_skip_directories=(
|
212
|
+
sessions.extra_checks.no_trailing_whitespace_skip_directories
|
213
|
+
),
|
182
214
|
)
|
183
215
|
if sessions.build_import_check:
|
184
216
|
add_build_import_check(
|
@@ -238,6 +270,7 @@ def _add_sessions(sessions: Sessions) -> None:
|
|
238
270
|
cfg.core_python_versions
|
239
271
|
),
|
240
272
|
controller_python_versions_only=cfg.controller_python_versions_only,
|
273
|
+
ansible_vars_from_env_vars=cfg.ansible_vars_from_env_vars,
|
241
274
|
)
|
242
275
|
if sessions.ansible_lint:
|
243
276
|
add_ansible_lint(
|
antsibull_nox/paths.py
CHANGED
@@ -14,10 +14,10 @@ import atexit
|
|
14
14
|
import functools
|
15
15
|
import os
|
16
16
|
import shutil
|
17
|
-
import tempfile
|
18
17
|
from pathlib import Path
|
19
18
|
|
20
19
|
from antsibull_fileutils.copier import Copier, GitCopier
|
20
|
+
from antsibull_fileutils.tempfile import ansible_mkdtemp
|
21
21
|
from antsibull_fileutils.vcs import detect_vcs, list_git_files
|
22
22
|
|
23
23
|
|
@@ -181,8 +181,7 @@ def create_temp_directory(basename: str) -> Path:
|
|
181
181
|
"""
|
182
182
|
Create a temporary directory outside the nox tree.
|
183
183
|
"""
|
184
|
-
|
185
|
-
path = Path(directory)
|
184
|
+
path = ansible_mkdtemp(prefix=basename)
|
186
185
|
|
187
186
|
def cleanup() -> None:
|
188
187
|
remove_path(path)
|
@@ -16,6 +16,7 @@ from collections.abc import Callable
|
|
16
16
|
from pathlib import Path
|
17
17
|
|
18
18
|
import nox
|
19
|
+
from antsibull_fileutils.yaml import store_yaml_file
|
19
20
|
|
20
21
|
from ..ansible import (
|
21
22
|
AnsibleCoreVersion,
|
@@ -168,7 +169,7 @@ def add_ansible_test_sanity_test_session(
|
|
168
169
|
"""
|
169
170
|
Add generic ansible-test sanity test session.
|
170
171
|
"""
|
171
|
-
command = ["sanity", "--
|
172
|
+
command = ["sanity", "--color", "-v", "--docker"]
|
172
173
|
if skip_tests:
|
173
174
|
for test in skip_tests:
|
174
175
|
command.extend(["--skip", test])
|
@@ -302,7 +303,7 @@ def add_ansible_test_unit_test_session(
|
|
302
303
|
add_ansible_test_session(
|
303
304
|
name=name,
|
304
305
|
description=description,
|
305
|
-
ansible_test_params=["units", "--
|
306
|
+
ansible_test_params=["units", "--color", "-v", "--docker"],
|
306
307
|
extra_deps_files=["tests/unit/requirements.yml"],
|
307
308
|
default=default,
|
308
309
|
ansible_core_version=ansible_core_version,
|
@@ -397,6 +398,7 @@ def add_ansible_test_integration_sessions_default_container(
|
|
397
398
|
dict[str | AnsibleCoreVersion, list[str | Version]] | None
|
398
399
|
) = None,
|
399
400
|
controller_python_versions_only: bool = False,
|
401
|
+
ansible_vars_from_env_vars: dict[str, str] | None = None,
|
400
402
|
default: bool = False,
|
401
403
|
) -> None:
|
402
404
|
"""
|
@@ -409,6 +411,18 @@ def add_ansible_test_integration_sessions_default_container(
|
|
409
411
|
controller Python versions.
|
410
412
|
"""
|
411
413
|
|
414
|
+
def callback_before() -> None:
|
415
|
+
if not ansible_vars_from_env_vars:
|
416
|
+
return
|
417
|
+
|
418
|
+
path = Path("tests", "integration", "integration_config.yml")
|
419
|
+
content: dict[str, t.Any] = {}
|
420
|
+
for ans_var, env_var in ansible_vars_from_env_vars.items():
|
421
|
+
value = os.environ.get(env_var)
|
422
|
+
if value is not None:
|
423
|
+
content[ans_var] = env_var
|
424
|
+
store_yaml_file(path, content, nice=True, sort_keys=True)
|
425
|
+
|
412
426
|
def add_integration_tests(
|
413
427
|
ansible_core_version: AnsibleCoreVersion,
|
414
428
|
repo_name: str | None = None,
|
@@ -459,10 +473,10 @@ def add_ansible_test_integration_sessions_default_container(
|
|
459
473
|
description=description,
|
460
474
|
ansible_test_params=[
|
461
475
|
"integration",
|
476
|
+
"--color",
|
477
|
+
"-v",
|
462
478
|
"--docker",
|
463
479
|
"default",
|
464
|
-
"-v",
|
465
|
-
"--color",
|
466
480
|
"--python",
|
467
481
|
str(py_version),
|
468
482
|
],
|
@@ -470,6 +484,7 @@ def add_ansible_test_integration_sessions_default_container(
|
|
470
484
|
ansible_core_version=ansible_core_version,
|
471
485
|
ansible_core_repo_name=repo_name,
|
472
486
|
ansible_core_branch_name=branch_name,
|
487
|
+
callback_before=callback_before,
|
473
488
|
default=False,
|
474
489
|
register_name="integration",
|
475
490
|
register_extra_data={
|
@@ -17,12 +17,7 @@ from pathlib import Path
|
|
17
17
|
import nox
|
18
18
|
|
19
19
|
from ..collection import (
|
20
|
-
|
21
|
-
load_collection_data_from_disk,
|
22
|
-
)
|
23
|
-
from ..paths import (
|
24
|
-
copy_collection,
|
25
|
-
remove_path,
|
20
|
+
build_collection,
|
26
21
|
)
|
27
22
|
from .utils import (
|
28
23
|
ci_group,
|
@@ -57,41 +52,16 @@ def add_build_import_check(
|
|
57
52
|
def build_import_check(session: nox.Session) -> None:
|
58
53
|
install(session, *compose_dependencies())
|
59
54
|
|
60
|
-
|
61
|
-
collection_dir = tmp / "collection"
|
62
|
-
remove_path(collection_dir)
|
63
|
-
copy_collection(Path.cwd(), collection_dir)
|
64
|
-
|
65
|
-
collection = load_collection_data_from_disk(
|
66
|
-
collection_dir, accept_manifest=False
|
67
|
-
)
|
68
|
-
version = collection.version
|
69
|
-
if not version:
|
70
|
-
version = "0.0.1"
|
71
|
-
force_collection_version(collection_dir, version=version)
|
72
|
-
|
73
|
-
with session.chdir(collection_dir):
|
74
|
-
build_ran = session.run("ansible-galaxy", "collection", "build") is not None
|
75
|
-
|
76
|
-
tarball = (
|
77
|
-
collection_dir
|
78
|
-
/ f"{collection.namespace}-{collection.name}-{version}.tar.gz"
|
79
|
-
)
|
80
|
-
if build_ran and not tarball.is_file():
|
81
|
-
files = "\n".join(
|
82
|
-
f"* {path.name}"
|
83
|
-
for path in collection_dir.iterdir()
|
84
|
-
if not path.is_dir()
|
85
|
-
)
|
86
|
-
session.error(f"Cannot find file {tarball}! List of all files:\n{files}")
|
55
|
+
tarball, _, __ = build_collection(session)
|
87
56
|
|
88
|
-
if run_galaxy_importer and tarball
|
57
|
+
if run_galaxy_importer and tarball:
|
89
58
|
env = {}
|
90
59
|
if galaxy_importer_config_path:
|
91
60
|
env["GALAXY_IMPORTER_CONFIG"] = str(
|
92
61
|
Path(galaxy_importer_config_path).absolute()
|
93
62
|
)
|
94
|
-
|
63
|
+
assert tarball.parent is not None
|
64
|
+
with session.chdir(tarball.parent), silence_run_verbosity():
|
95
65
|
import_log = session.run(
|
96
66
|
"python",
|
97
67
|
"-m",
|
@@ -11,14 +11,35 @@ Create nox docs check session.
|
|
11
11
|
from __future__ import annotations
|
12
12
|
|
13
13
|
import typing as t
|
14
|
+
from pathlib import Path
|
14
15
|
|
15
16
|
import nox
|
16
17
|
|
18
|
+
from ..paths import (
|
19
|
+
list_all_files,
|
20
|
+
)
|
17
21
|
from .collections import (
|
18
22
|
CollectionSetup,
|
19
23
|
prepare_collections,
|
20
24
|
)
|
21
|
-
from .utils import
|
25
|
+
from .utils import (
|
26
|
+
install,
|
27
|
+
run_bare_script,
|
28
|
+
)
|
29
|
+
|
30
|
+
|
31
|
+
def find_extra_docs_rst_files() -> list[Path]:
|
32
|
+
"""
|
33
|
+
Find all RST extra document files.
|
34
|
+
"""
|
35
|
+
all_files = list_all_files()
|
36
|
+
cwd = Path.cwd()
|
37
|
+
extra_docs_dir = cwd / "docs" / "docsite" / "rst"
|
38
|
+
return [
|
39
|
+
file
|
40
|
+
for file in all_files
|
41
|
+
if file.is_relative_to(extra_docs_dir) and file.name.lower().endswith((".rst"))
|
42
|
+
]
|
22
43
|
|
23
44
|
|
24
45
|
def add_docs_check(
|
@@ -28,15 +49,47 @@ def add_docs_check(
|
|
28
49
|
ansible_core_package: str = "ansible-core",
|
29
50
|
validate_collection_refs: t.Literal["self", "dependent", "all"] | None = None,
|
30
51
|
extra_collections: list[str] | None = None,
|
52
|
+
codeblocks_restrict_types: list[str] | None = None,
|
53
|
+
codeblocks_restrict_type_exact_case: bool = True,
|
54
|
+
codeblocks_allow_without_type: bool = True,
|
55
|
+
codeblocks_allow_literal_blocks: bool = True,
|
56
|
+
antsibull_docutils_package: str = "antsibull-docutils",
|
31
57
|
) -> None:
|
32
58
|
"""
|
33
59
|
Add docs-check session for linting.
|
34
60
|
"""
|
61
|
+
run_extra_checks = (
|
62
|
+
codeblocks_restrict_types is not None
|
63
|
+
or not codeblocks_allow_without_type
|
64
|
+
or not codeblocks_allow_literal_blocks
|
65
|
+
)
|
35
66
|
|
36
67
|
def compose_dependencies() -> list[str]:
|
37
68
|
deps = [antsibull_docs_package, ansible_core_package]
|
69
|
+
if run_extra_checks:
|
70
|
+
deps.append(antsibull_docutils_package)
|
38
71
|
return deps
|
39
72
|
|
73
|
+
def execute_extra_checks(session: nox.Session) -> None:
|
74
|
+
all_extra_docs = find_extra_docs_rst_files()
|
75
|
+
if not all_extra_docs:
|
76
|
+
session.warn(
|
77
|
+
"Skipping codeblock checks since no appropriate RST file was found..."
|
78
|
+
)
|
79
|
+
return
|
80
|
+
run_bare_script(
|
81
|
+
session,
|
82
|
+
"rst-extra",
|
83
|
+
use_session_python=True,
|
84
|
+
files=all_extra_docs,
|
85
|
+
extra_data={
|
86
|
+
"codeblocks_restrict_types": codeblocks_restrict_types,
|
87
|
+
"codeblocks_restrict_type_exact_case": codeblocks_restrict_type_exact_case,
|
88
|
+
"codeblocks_allow_without_type": codeblocks_allow_without_type,
|
89
|
+
"codeblocks_allow_literal_blocks": codeblocks_allow_literal_blocks,
|
90
|
+
},
|
91
|
+
)
|
92
|
+
|
40
93
|
def execute_antsibull_docs(
|
41
94
|
session: nox.Session, prepared_collections: CollectionSetup
|
42
95
|
) -> None:
|
@@ -61,6 +114,8 @@ def add_docs_check(
|
|
61
114
|
extra_collections=extra_collections,
|
62
115
|
install_out_of_tree=True,
|
63
116
|
)
|
117
|
+
if run_extra_checks:
|
118
|
+
execute_extra_checks(session)
|
64
119
|
if not prepared_collections:
|
65
120
|
session.warn("Skipping antsibull-docs...")
|
66
121
|
if prepared_collections:
|
@@ -58,6 +58,10 @@ def add_extra_checks(
|
|
58
58
|
# action-groups:
|
59
59
|
run_action_groups: bool = False,
|
60
60
|
action_groups_config: list[ActionGroup] | None = None,
|
61
|
+
# no-trailing-whitespace:
|
62
|
+
run_no_trailing_whitespace: bool = False,
|
63
|
+
no_trailing_whitespace_skip_paths: list[str] | None = None,
|
64
|
+
no_trailing_whitespace_skip_directories: list[str] | None = None,
|
61
65
|
) -> None:
|
62
66
|
"""
|
63
67
|
Add extra-checks session for extra checks.
|
@@ -94,11 +98,23 @@ def add_extra_checks(
|
|
94
98
|
},
|
95
99
|
)
|
96
100
|
|
101
|
+
def execute_no_trailing_whitespace(session: nox.Session) -> None:
|
102
|
+
run_bare_script(
|
103
|
+
session,
|
104
|
+
"no-trailing-whitespace",
|
105
|
+
extra_data={
|
106
|
+
"skip_paths": no_trailing_whitespace_skip_paths or [],
|
107
|
+
"skip_directories": no_trailing_whitespace_skip_directories or [],
|
108
|
+
},
|
109
|
+
)
|
110
|
+
|
97
111
|
def extra_checks(session: nox.Session) -> None:
|
98
112
|
if run_no_unwanted_files:
|
99
113
|
execute_no_unwanted_files(session)
|
100
114
|
if run_action_groups:
|
101
115
|
execute_action_groups(session)
|
116
|
+
if run_no_trailing_whitespace:
|
117
|
+
execute_no_trailing_whitespace(session)
|
102
118
|
|
103
119
|
extra_checks.__doc__ = compose_description(
|
104
120
|
prefix={
|
@@ -112,6 +128,9 @@ def add_extra_checks(
|
|
112
128
|
else False
|
113
129
|
),
|
114
130
|
"action-groups": "validate action groups" if run_action_groups else False,
|
131
|
+
"no-trailing-whitespace": (
|
132
|
+
"avoid trailing whitespace" if run_no_trailing_whitespace else False
|
133
|
+
),
|
115
134
|
},
|
116
135
|
)
|
117
136
|
nox.session(
|