antsibull-nox 0.5.0__py3-none-any.whl → 0.7.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
antsibull_nox/__init__.py CHANGED
@@ -17,7 +17,7 @@ from .config import (
17
17
  from .interpret_config import interpret_config
18
18
  from .sessions.ansible_test import add_ansible_test_session
19
19
 
20
- __version__ = "0.5.0"
20
+ __version__ = "0.7.0"
21
21
 
22
22
 
23
23
  def load_antsibull_nox_toml() -> None:
antsibull_nox/ansible.py CHANGED
@@ -87,13 +87,13 @@ _SUPPORTED_CORE_VERSIONS: dict[Version, AnsibleCoreInfo] = {
87
87
  ["3.11", "3.12", "3.13"],
88
88
  ["3.8", "3.9", "3.10", "3.11", "3.12", "3.13"],
89
89
  ],
90
+ "2.20": [
91
+ ["3.11", "3.12", "3.13"],
92
+ ["3.9", "3.10", "3.11", "3.12", "3.13"],
93
+ ],
90
94
  # The following might need updates. Look for the "``ansible-core`` support matrix" table in:
91
95
  # https://github.com/ansible/ansible-documentation/blob/devel/docs/docsite/rst/reference_appendices/release_and_maintenance.rst?plain=1
92
96
  # It contains commented-out entries for future ansible-core versions.
93
- "2.20": [
94
- ["3.12", "3.13", "3.14"],
95
- ["3.9", "3.10", "3.11", "3.12", "3.13", "3.14"],
96
- ],
97
97
  "2.21": [
98
98
  ["3.12", "3.13", "3.14"],
99
99
  ["3.9", "3.10", "3.11", "3.12", "3.13", "3.14"],
@@ -118,8 +118,8 @@ _SUPPORTED_CORE_VERSIONS: dict[Version, AnsibleCoreInfo] = {
118
118
  }
119
119
 
120
120
  _MIN_SUPPORTED_VERSION = Version.parse("2.9")
121
- _CURRENT_DEVEL_VERSION = Version.parse("2.19")
122
- _CURRENT_MILESTONE_VERSION = Version.parse("2.19")
121
+ _CURRENT_DEVEL_VERSION = Version.parse("2.20")
122
+ _CURRENT_MILESTONE_VERSION = Version.parse("2.20")
123
123
 
124
124
 
125
125
  def get_ansible_core_info(
@@ -10,10 +10,7 @@ Handle Ansible collections.
10
10
 
11
11
  from __future__ import annotations
12
12
 
13
- from pathlib import Path
14
-
15
- from antsibull_fileutils.yaml import load_yaml_file, store_yaml_file
16
-
13
+ from .build import build_collection
17
14
  from .data import CollectionData, CollectionSource, SetupResult
18
15
  from .install import (
19
16
  Runner,
@@ -21,27 +18,8 @@ from .install import (
21
18
  setup_collections,
22
19
  setup_current_tree,
23
20
  )
24
- from .search import GALAXY_YML, CollectionList, load_collection_data_from_disk
25
-
26
-
27
- def force_collection_version(path: Path, *, version: str) -> bool:
28
- """
29
- Make sure galaxy.yml contains this version.
30
-
31
- Returns ``True`` if the version was changed, and ``False`` if the version
32
- was already set to this value.
33
- """
34
- galaxy_yml = path / GALAXY_YML
35
- try:
36
- data = load_yaml_file(galaxy_yml)
37
- except Exception as exc:
38
- raise ValueError(f"Cannot parse {galaxy_yml}: {exc}") from exc
39
- if data.get("version") == version:
40
- return False
41
- data["version"] = version
42
- store_yaml_file(galaxy_yml, data)
43
- return True
44
-
21
+ from .search import CollectionList, load_collection_data_from_disk
22
+ from .utils import force_collection_version
45
23
 
46
24
  __all__ = [
47
25
  "CollectionData",
@@ -49,6 +27,8 @@ __all__ = [
49
27
  "CollectionSource",
50
28
  "SetupResult",
51
29
  "Runner",
30
+ "build_collection",
31
+ "force_collection_version",
52
32
  "load_collection_data_from_disk",
53
33
  "setup_collections",
54
34
  "setup_current_tree",
@@ -0,0 +1,69 @@
1
+ # Author: Felix Fontein <felix@fontein.de>
2
+ # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or
3
+ # https://www.gnu.org/licenses/gpl-3.0.txt)
4
+ # SPDX-License-Identifier: GPL-3.0-or-later
5
+ # SPDX-FileCopyrightText: 2025, Ansible Project
6
+
7
+ """
8
+ Build Ansible collections.
9
+ """
10
+
11
+ from __future__ import annotations
12
+
13
+ from pathlib import Path
14
+
15
+ import nox
16
+
17
+ from ..paths import (
18
+ copy_collection,
19
+ remove_path,
20
+ )
21
+ from .data import (
22
+ CollectionData,
23
+ )
24
+ from .search import (
25
+ load_collection_data_from_disk,
26
+ )
27
+ from .utils import (
28
+ force_collection_version,
29
+ )
30
+
31
+
32
+ def build_collection(
33
+ session: nox.Session,
34
+ ) -> tuple[Path | None, CollectionData, str]:
35
+ """
36
+ Build the current collection.
37
+
38
+ Return a tuple (path, collection_data, version), where path might be None in case
39
+ commands are not actually run.
40
+ """
41
+ tmp = Path(session.create_tmp())
42
+ collection_dir = tmp / "collection"
43
+ remove_path(collection_dir)
44
+ copy_collection(Path.cwd(), collection_dir)
45
+
46
+ collection = load_collection_data_from_disk(collection_dir, accept_manifest=False)
47
+ version = collection.version
48
+ if not version:
49
+ version = "0.0.1"
50
+ force_collection_version(collection_dir, version=version)
51
+
52
+ with session.chdir(collection_dir):
53
+ build_ran = session.run("ansible-galaxy", "collection", "build") is not None
54
+
55
+ tarball = (
56
+ collection_dir / f"{collection.namespace}-{collection.name}-{version}.tar.gz"
57
+ )
58
+ if build_ran and not tarball.is_file():
59
+ files = "\n".join(
60
+ f"* {path.name}" for path in collection_dir.iterdir() if not path.is_dir()
61
+ )
62
+ session.error(f"Cannot find file {tarball}! List of all files:\n{files}")
63
+
64
+ return tarball if build_ran else None, collection, version
65
+
66
+
67
+ __all__ = [
68
+ "build_collection",
69
+ ]
@@ -0,0 +1,41 @@
1
+ # Author: Felix Fontein <felix@fontein.de>
2
+ # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or
3
+ # https://www.gnu.org/licenses/gpl-3.0.txt)
4
+ # SPDX-License-Identifier: GPL-3.0-or-later
5
+ # SPDX-FileCopyrightText: 2025, Ansible Project
6
+
7
+ """
8
+ Build Ansible collections.
9
+ """
10
+
11
+ from __future__ import annotations
12
+
13
+ from pathlib import Path
14
+
15
+ from antsibull_fileutils.yaml import load_yaml_file, store_yaml_file
16
+
17
+ from .search import GALAXY_YML
18
+
19
+
20
+ def force_collection_version(path: Path, *, version: str) -> bool:
21
+ """
22
+ Make sure galaxy.yml contains this version.
23
+
24
+ Returns ``True`` if the version was changed, and ``False`` if the version
25
+ was already set to this value.
26
+ """
27
+ galaxy_yml = path / GALAXY_YML
28
+ try:
29
+ data = load_yaml_file(galaxy_yml)
30
+ except Exception as exc:
31
+ raise ValueError(f"Cannot parse {galaxy_yml}: {exc}") from exc
32
+ if data.get("version") == version:
33
+ return False
34
+ data["version"] = version
35
+ store_yaml_file(galaxy_yml, data)
36
+ return True
37
+
38
+
39
+ __all__ = [
40
+ "force_collection_version",
41
+ ]
antsibull_nox/config.py CHANGED
@@ -78,6 +78,8 @@ class SessionLint(_BaseModel):
78
78
 
79
79
  default: bool = True
80
80
  extra_code_files: list[str] = []
81
+ ruff_config: t.Optional[p.FilePath] = None
82
+ ruff_package: str = "ruff"
81
83
 
82
84
  # isort:
83
85
  run_isort: bool = True
@@ -90,6 +92,22 @@ class SessionLint(_BaseModel):
90
92
  black_config: t.Optional[p.FilePath] = None
91
93
  black_package: str = "black"
92
94
 
95
+ # ruff format:
96
+ run_ruff_format: bool = False
97
+ ruff_format_config: t.Optional[p.FilePath] = None
98
+ ruff_format_package: t.Optional[str] = None
99
+
100
+ # ruff autofix:
101
+ run_ruff_autofix: bool = False
102
+ ruff_autofix_config: t.Optional[p.FilePath] = None
103
+ ruff_autofix_package: t.Optional[str] = None
104
+ ruff_autofix_select: list[str] = []
105
+
106
+ # ruff check:
107
+ run_ruff_check: bool = False
108
+ ruff_check_config: t.Optional[p.FilePath] = None
109
+ ruff_check_package: t.Optional[str] = None
110
+
93
111
  # flake8:
94
112
  run_flake8: bool = True
95
113
  flake8_config: t.Optional[p.FilePath] = None
@@ -108,7 +126,9 @@ class SessionLint(_BaseModel):
108
126
  yamllint_config: t.Optional[p.FilePath] = None
109
127
  yamllint_config_plugins: t.Optional[p.FilePath] = None
110
128
  yamllint_config_plugins_examples: t.Optional[p.FilePath] = None
129
+ yamllint_config_extra_docs: t.Optional[p.FilePath] = None
111
130
  yamllint_package: str = "yamllint"
131
+ yamllint_antsibull_docutils_package: str = "antsibull-docutils"
112
132
 
113
133
  # mypy:
114
134
  run_mypy: bool = True
@@ -133,6 +153,12 @@ class SessionDocsCheck(_BaseModel):
133
153
  validate_collection_refs: t.Optional[t.Literal["self", "dependent", "all"]] = None
134
154
  extra_collections: list[CollectionName] = []
135
155
 
156
+ codeblocks_restrict_types: t.Optional[list[str]] = None
157
+ codeblocks_restrict_type_exact_case: bool = True
158
+ codeblocks_allow_without_type: bool = True
159
+ codeblocks_allow_literal_blocks: bool = True
160
+ antsibull_docutils_package: str = "antsibull-docutils"
161
+
136
162
 
137
163
  class SessionLicenseCheck(_BaseModel):
138
164
  """
@@ -178,8 +204,8 @@ class SessionExtraChecks(_BaseModel):
178
204
  no_unwanted_files_other_extensions: list[str] = [".py", ".pyi"]
179
205
  no_unwanted_files_yaml_extensions: list[str] = [".yml", ".yaml"]
180
206
  no_unwanted_files_skip_paths: list[str] = []
181
- no_unwanted_files_skip_directories: t.Optional[list[str]] = []
182
- no_unwanted_files_yaml_directories: t.Optional[list[str]] = [
207
+ no_unwanted_files_skip_directories: list[str] = []
208
+ no_unwanted_files_yaml_directories: list[str] = [
183
209
  "plugins/test/",
184
210
  "plugins/filter/",
185
211
  ]
@@ -189,6 +215,11 @@ class SessionExtraChecks(_BaseModel):
189
215
  run_action_groups: bool = False
190
216
  action_groups_config: list[ActionGroup] = []
191
217
 
218
+ # no-trailing-whitespace:
219
+ run_no_trailing_whitespace: bool = False
220
+ no_trailing_whitespace_skip_paths: list[str] = []
221
+ no_trailing_whitespace_skip_directories: list[str] = []
222
+
192
223
 
193
224
  class SessionBuildImportCheck(_BaseModel):
194
225
  """
@@ -59,12 +59,20 @@ def load_redirects(
59
59
 
60
60
  # Compare meta/runtime.yml content with config
61
61
  config_groups = {cfg.name for cfg in config}
62
- for action_group in action_groups:
62
+ for action_group, elements in action_groups.items():
63
63
  if action_group not in config_groups:
64
+ if len(elements) == 1 and isinstance(elements[0], dict):
65
+ # Special case: if an action group is there with a single metadata entry,
66
+ # we don't complain that it shouldn't be there.
67
+ continue
64
68
  errors.append(
65
69
  f"{meta_runtime}: found unknown action group"
66
70
  f" {action_group!r}; likely noxfile needs updating"
67
71
  )
72
+ else:
73
+ action_groups[action_group] = [
74
+ element for element in elements if isinstance(element, str)
75
+ ]
68
76
  for action_group in config:
69
77
  if action_group.name not in action_groups:
70
78
  errors.append(
@@ -33,6 +33,8 @@ def setup() -> tuple[list[str], dict[str, t.Any]]:
33
33
  paths = get_list_of_strings(data, "paths")
34
34
  except ValueError as exc:
35
35
  raise ValueError(f"Invalid JSON content in {path}: {exc}") from exc
36
+ if paths is None:
37
+ raise ValueError(f"Broken JSON content in {path}: path is missing")
36
38
  data.pop("paths")
37
39
  return paths, data
38
40
  if len(sys.argv) >= 2:
@@ -45,12 +47,33 @@ def setup() -> tuple[list[str], dict[str, t.Any]]:
45
47
  return sys.stdin.read().splitlines(), {}
46
48
 
47
49
 
50
+ _T = t.TypeVar("_T", default=None)
51
+
52
+
53
+ @t.overload
54
+ def get_list_of_strings(
55
+ data: dict[str, t.Any],
56
+ key: str,
57
+ *,
58
+ default: None = None,
59
+ ) -> list[str] | None: ...
60
+
61
+
62
+ @t.overload
63
+ def get_list_of_strings(
64
+ data: dict[str, t.Any],
65
+ key: str,
66
+ *,
67
+ default: _T,
68
+ ) -> list[str] | _T: ...
69
+
70
+
48
71
  def get_list_of_strings(
49
72
  data: dict[str, t.Any],
50
73
  key: str,
51
74
  *,
52
- default: list[str] | None = None,
53
- ) -> list[str]:
75
+ default: _T | None = None,
76
+ ) -> list[str] | _T | None:
54
77
  """
55
78
  Retrieves a list of strings from key ``key`` of the JSON object ``data``.
56
79
 
@@ -0,0 +1,36 @@
1
+ #!/usr/bin/env python
2
+
3
+ # Copyright (c) 2025, Felix Fontein <felix@fontein.de>
4
+ # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt
5
+ # or https://www.gnu.org/licenses/gpl-3.0.txt)
6
+ # SPDX-License-Identifier: GPL-3.0-or-later
7
+
8
+ """Retrieve the version of one or more packages."""
9
+
10
+ from __future__ import annotations
11
+
12
+ import json
13
+ import sys
14
+ from importlib.metadata import PackageNotFoundError, version
15
+
16
+ from antsibull_nox_data_util import get_list_of_strings, setup # type: ignore
17
+
18
+
19
+ def main() -> int:
20
+ """Main entry point."""
21
+ paths, extra_data = setup()
22
+
23
+ packages = get_list_of_strings(extra_data, "packages", default=[])
24
+
25
+ result: dict[str, str | None] = {}
26
+ for package in packages:
27
+ try:
28
+ result[package] = version(package)
29
+ except PackageNotFoundError:
30
+ result[package] = None
31
+ print(json.dumps(result))
32
+ return 0
33
+
34
+
35
+ if __name__ == "__main__":
36
+ sys.exit(main())
@@ -0,0 +1,59 @@
1
+ #!/usr/bin/env python
2
+
3
+ # Copyright (c) Ansible Project
4
+ # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt
5
+ # or https://www.gnu.org/licenses/gpl-3.0.txt)
6
+ # SPDX-License-Identifier: GPL-3.0-or-later
7
+
8
+ """Prevent unwanted files from being added to the source tree."""
9
+
10
+ from __future__ import annotations
11
+
12
+ import os
13
+ import sys
14
+
15
+ from antsibull_nox.data.antsibull_nox_data_util import (
16
+ get_list_of_strings,
17
+ setup,
18
+ )
19
+
20
+
21
+ def main() -> int:
22
+ """Main entry point."""
23
+ paths, extra_data = setup()
24
+
25
+ skip_paths = set(get_list_of_strings(extra_data, "skip_paths", default=[]))
26
+
27
+ skip_directories = tuple(
28
+ get_list_of_strings(extra_data, "skip_directories", default=[])
29
+ )
30
+
31
+ errors: list[str] = []
32
+ for path in paths:
33
+ if path in skip_paths:
34
+ continue
35
+
36
+ if any(path.startswith(skip_directory) for skip_directory in skip_directories):
37
+ continue
38
+
39
+ if not os.path.isfile(path):
40
+ continue
41
+
42
+ try:
43
+ with open(path, "r", encoding="utf-8") as f:
44
+ for i, line in enumerate(f):
45
+ line = line.rstrip("\n\r")
46
+ if line.rstrip() != line:
47
+ errors.append(f"{path}:{i + 1}: found trailing whitespace")
48
+ except UnicodeDecodeError:
49
+ errors.append(f"{path}: cannot parse file as UTF-8")
50
+ except Exception as e:
51
+ errors.append(f"{path}: unexpected error: {e}")
52
+
53
+ for error in sorted(errors):
54
+ print(error)
55
+ return len(errors) > 0
56
+
57
+
58
+ if __name__ == "__main__":
59
+ sys.exit(main())
@@ -65,7 +65,7 @@ def main() -> int:
65
65
  skip_paths = set(get_list_of_strings(extra_data, "skip_paths", default=[]))
66
66
 
67
67
  skip_directories = tuple(
68
- get_list_of_strings(extra_data, "skip_prefixes", default=[])
68
+ get_list_of_strings(extra_data, "skip_directories", default=[])
69
69
  )
70
70
 
71
71
  yaml_directories = tuple(
@@ -0,0 +1,167 @@
1
+ #!/usr/bin/env python
2
+
3
+ # Copyright (c) 2025, Felix Fontein <felix@fontein.de>
4
+ # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt
5
+ # or https://www.gnu.org/licenses/gpl-3.0.txt)
6
+ # SPDX-License-Identifier: GPL-3.0-or-later
7
+
8
+ """Make sure all plugin and module documentation adheres to yamllint."""
9
+
10
+ from __future__ import annotations
11
+
12
+ import os
13
+ import sys
14
+ import traceback
15
+ import typing as t
16
+
17
+ from antsibull_docutils.rst_code_finder import find_code_blocks
18
+ from antsibull_nox_data_util import get_bool, get_list_of_strings, setup # type: ignore
19
+
20
+
21
+ def process_rst_file(
22
+ errors: list[dict[str, t.Any]],
23
+ path: str,
24
+ *,
25
+ codeblocks_restrict_types: list[str] | None,
26
+ codeblocks_restrict_type_exact_case: bool,
27
+ codeblocks_allow_without_type: bool,
28
+ codeblocks_allow_literal_blocks: bool,
29
+ ) -> None:
30
+ try:
31
+ with open(path, "rt", encoding="utf-8") as f:
32
+ content = f.read()
33
+ except Exception as exc:
34
+ errors.append(
35
+ {
36
+ "path": path,
37
+ "line": 1,
38
+ "col": 1,
39
+ "message": (
40
+ f"Error while reading content: {type(exc)}:"
41
+ f" {exc}; traceback: {traceback.format_exc()!r}"
42
+ ),
43
+ }
44
+ )
45
+ return
46
+
47
+ def warn_unknown_block(line: int | str, col: int, content: str) -> None:
48
+ if not codeblocks_allow_literal_blocks:
49
+ errors.append(
50
+ {
51
+ "path": path,
52
+ "line": line,
53
+ "col": col,
54
+ "message": (
55
+ "Warning: found unknown literal block! Check for double colons '::'."
56
+ " If that is not the cause, please report this warning."
57
+ " It might indicate a bug in the checker"
58
+ " or an unsupported Sphinx directive."
59
+ f" Content: {content!r}"
60
+ ),
61
+ }
62
+ )
63
+
64
+ for code_block in find_code_blocks(
65
+ content,
66
+ path=path,
67
+ root_prefix="docs/docsite/rst",
68
+ warn_unknown_block=warn_unknown_block,
69
+ ):
70
+ error_data = {
71
+ "path": path,
72
+ "line": code_block.row_offset + 1,
73
+ "col": code_block.col_offset + 1,
74
+ }
75
+ if not code_block.position_exact:
76
+ error_data["note"] = (
77
+ "The code block could not be exactly located in the source file."
78
+ " The line/column numbers might be off."
79
+ )
80
+
81
+ if code_block.language is None:
82
+ if not codeblocks_allow_without_type:
83
+ msg = "Every code block must have a language."
84
+ if codeblocks_restrict_types is not None:
85
+ langs = ", ".join(sorted(codeblocks_restrict_types))
86
+ msg = f"{msg} Allowed languages are: {langs}"
87
+ error_data["message"] = msg
88
+ errors.append(error_data)
89
+ continue
90
+
91
+ if codeblocks_restrict_types is None:
92
+ continue
93
+
94
+ language = code_block.language
95
+ if not codeblocks_restrict_type_exact_case:
96
+ language = language.lower()
97
+
98
+ if language not in codeblocks_restrict_types:
99
+ langs = ", ".join(sorted(codeblocks_restrict_types))
100
+ msg = (
101
+ f"Code block with disallowed language {code_block.language!r} found."
102
+ f" Allowed languages are: {langs}"
103
+ )
104
+ error_data["message"] = msg
105
+ errors.append(error_data)
106
+
107
+
108
+ def main() -> int:
109
+ """Main entry point."""
110
+ paths, extra_data = setup()
111
+ codeblocks_restrict_types = get_list_of_strings(
112
+ extra_data, "codeblocks_restrict_types"
113
+ )
114
+ codeblocks_restrict_type_exact_case = get_bool(
115
+ extra_data, "codeblocks_restrict_type_exact_case", default=True
116
+ )
117
+ codeblocks_allow_without_type = get_bool(
118
+ extra_data, "codeblocks_allow_without_type", default=True
119
+ )
120
+ codeblocks_allow_literal_blocks = get_bool(
121
+ extra_data, "codeblocks_allow_literal_blocks", default=True
122
+ )
123
+
124
+ if (
125
+ codeblocks_restrict_types is not None
126
+ and not codeblocks_restrict_type_exact_case
127
+ ):
128
+ codeblocks_restrict_types = [
129
+ language.lower() for language in codeblocks_restrict_types
130
+ ]
131
+
132
+ errors: list[dict[str, t.Any]] = []
133
+ for path in paths:
134
+ if not os.path.isfile(path):
135
+ continue
136
+ process_rst_file(
137
+ errors,
138
+ path,
139
+ codeblocks_restrict_types=codeblocks_restrict_types,
140
+ codeblocks_restrict_type_exact_case=codeblocks_restrict_type_exact_case,
141
+ codeblocks_allow_without_type=codeblocks_allow_without_type,
142
+ codeblocks_allow_literal_blocks=codeblocks_allow_literal_blocks,
143
+ )
144
+
145
+ errors.sort(
146
+ key=lambda error: (
147
+ error["path"],
148
+ error["line"] if isinstance(error["line"], int) else 0,
149
+ error["col"],
150
+ error["message"],
151
+ )
152
+ )
153
+ for error in errors:
154
+ prefix = f"{error['path']}:{error['line']}:{error['col']}: "
155
+ msg = error["message"]
156
+ if "note" in error:
157
+ msg = f"{msg}\nNote: {error['note']}"
158
+ for i, line in enumerate(msg.splitlines()):
159
+ print(f"{prefix}{line}")
160
+ if i == 0:
161
+ prefix = " " * len(prefix)
162
+
163
+ return len(errors) > 0
164
+
165
+
166
+ if __name__ == "__main__":
167
+ sys.exit(main())