antsibull-nox 0.0.1__py3-none-any.whl → 0.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
antsibull_nox/sessions.py CHANGED
@@ -10,13 +10,49 @@ Create nox sessions.
10
10
 
11
11
  from __future__ import annotations
12
12
 
13
- import contextlib
13
+ import json
14
14
  import os
15
15
  import shlex
16
+ import subprocess
17
+ import sys
18
+ import typing as t
19
+ from contextlib import contextmanager
20
+ from dataclasses import asdict, dataclass
21
+ from pathlib import Path
16
22
 
17
23
  import nox
18
24
 
19
- IN_CI = "GITHUB_ACTIONS" in os.environ
25
+ from .ansible import (
26
+ AnsibleCoreVersion,
27
+ get_ansible_core_info,
28
+ get_ansible_core_package_name,
29
+ get_supported_core_versions,
30
+ )
31
+ from .collection import (
32
+ CollectionData,
33
+ force_collection_version,
34
+ load_collection_data_from_disk,
35
+ setup_collections,
36
+ setup_current_tree,
37
+ )
38
+ from .data_util import prepare_data_script
39
+ from .paths import (
40
+ copy_collection,
41
+ copy_directory_tree_into,
42
+ create_temp_directory,
43
+ filter_paths,
44
+ find_data_directory,
45
+ list_all_files,
46
+ remove_path,
47
+ )
48
+ from .python import get_installed_python_versions
49
+ from .utils import Version
50
+
51
+ # https://docs.github.com/en/actions/writing-workflows/choosing-what-your-workflow-does/store-information-in-variables#default-environment-variables
52
+ # https://docs.gitlab.com/ci/variables/predefined_variables/#predefined-variables
53
+ # https://docs.travis-ci.com/user/environment-variables/#default-environment-variables
54
+ IN_CI = os.environ.get("CI") == "true"
55
+ IN_GITHUB_ACTIONS = bool(os.environ.get("GITHUB_ACTION"))
20
56
  ALLOW_EDITABLE = os.environ.get("ALLOW_EDITABLE", str(not IN_CI)).lower() in (
21
57
  "1",
22
58
  "true",
@@ -36,6 +72,28 @@ MODULE_PATHS = [
36
72
  "tests/unit/plugins/module_utils/",
37
73
  ]
38
74
 
75
+ _SESSIONS: dict[str, list[dict[str, t.Any]]] = {}
76
+
77
+
78
+ @contextmanager
79
+ def _ci_group(name: str) -> t.Iterator[None]:
80
+ """
81
+ Try to ensure that the output inside the context is printed in a collapsable group.
82
+
83
+ This is highly CI system dependent, and currently only works for GitHub Actions.
84
+ """
85
+ if IN_GITHUB_ACTIONS:
86
+ print(f"::group::{name}")
87
+ yield
88
+ if IN_GITHUB_ACTIONS:
89
+ print("::endgroup::")
90
+
91
+
92
+ def _register(name: str, data: dict[str, t.Any]) -> None:
93
+ if name not in _SESSIONS:
94
+ _SESSIONS[name] = []
95
+ _SESSIONS[name].append(data)
96
+
39
97
 
40
98
  def install(session: nox.Session, *args: str, editable: bool = False, **kwargs):
41
99
  """
@@ -52,132 +110,189 @@ def install(session: nox.Session, *args: str, editable: bool = False, **kwargs):
52
110
  session.install(*args, "-U", **kwargs)
53
111
 
54
112
 
55
- @contextlib.contextmanager
56
- def ansible_collection_root():
57
- """
58
- Context manager that changes to the root directory and yields the path of
59
- the root directory and the prefix to the current working directory from the root.
60
- """
61
- cwd = os.getcwd()
62
- root = os.path.normpath(os.path.join(cwd, "..", "..", ".."))
63
- try:
64
- os.chdir(root)
65
- yield root, os.path.relpath(cwd, root)
66
- finally:
67
- os.chdir(cwd)
68
-
69
-
70
- def prefix_paths(paths: list[str], /, prefix: str) -> list[str]:
71
- """
72
- Prefix paths with the given prefix.
73
- """
74
- return [os.path.join(prefix, path) for path in paths]
75
-
76
-
77
- def match_path(path: str, is_file: bool, paths: list[str]) -> bool:
78
- """
79
- Check whether a path (that is a file or not) matches a given list of paths.
80
- """
81
- for check in paths:
82
- if check == path:
83
- return True
84
- if not is_file:
85
- if not check.endswith("/"):
86
- check += "/"
87
- if path.startswith(check):
88
- return True
89
- return False
90
-
91
-
92
- def restrict_paths(paths: list[str], restrict: list[str]) -> list[str]:
93
- """
94
- Restrict a list of paths with a given set of restrictions.
95
- """
96
- result = []
97
- for path in paths:
98
- is_file = os.path.isfile(path)
99
- if not is_file and not path.endswith("/"):
100
- path += "/"
101
- if not match_path(path, is_file, restrict):
102
- if not is_file:
103
- for check in restrict:
104
- if check.startswith(path) and os.path.exists(check):
105
- result.append(check)
106
- continue
107
- result.append(path)
108
- return result
109
-
110
-
111
- def _scan_remove_paths(
112
- path: str, remove: list[str], extensions: list[str] | None
113
- ) -> list[str]:
114
- result = []
115
- for root, dirs, files in os.walk(path, topdown=True):
116
- if not root.endswith("/"):
117
- root += "/"
118
- if match_path(root, False, remove):
119
- continue
120
- if all(not check.startswith(root) for check in remove):
121
- dirs[:] = []
122
- result.append(root)
123
- continue
124
- for file in files:
125
- if extensions and os.path.splitext(file)[1] not in extensions:
126
- continue
127
- filepath = os.path.normpath(os.path.join(root, file))
128
- if not match_path(filepath, True, remove):
129
- result.append(filepath)
130
- for directory in list(dirs):
131
- if directory == "__pycache__":
132
- continue
133
- dirpath = os.path.normpath(os.path.join(root, directory))
134
- if match_path(dirpath, False, remove):
135
- dirs.remove(directory)
136
- continue
137
- return result
138
-
139
-
140
- def remove_paths(
141
- paths: list[str], remove: list[str], extensions: list[str] | None
142
- ) -> list[str]:
143
- """
144
- Restrict a list of paths by removing paths.
145
-
146
- If ``extensions`` is specified, only files matching this extension
147
- will be considered when files need to be explicitly enumerated.
148
- """
149
- result = []
150
- for path in paths:
151
- is_file = os.path.isfile(path)
152
- if not is_file and not path.endswith("/"):
153
- path += "/"
154
- if match_path(path, is_file, remove):
155
- continue
156
- if not is_file and any(check.startswith(path) for check in remove):
157
- result.extend(_scan_remove_paths(path, remove, extensions))
158
- continue
159
- result.append(path)
160
- return result
161
-
162
-
163
- def filter_paths(
164
- paths: list[str],
165
- /,
166
- remove: list[str] | None = None,
167
- restrict: list[str] | None = None,
168
- extensions: list[str] | None = None,
169
- ) -> list[str]:
113
+ @dataclass
114
+ class CollectionSetup:
115
+ """
116
+ Information on the setup collections.
117
+ """
118
+
119
+ # The path of the ansible_collections directory where all dependent collections
120
+ # are installed. Is currently identical to current_root, but that might change
121
+ # or depend on options in the future.
122
+ collections_root: Path
123
+
124
+ # The directory in which ansible_collections can be found, as well as
125
+ # ansible_collections/<namespace>/<name> points to a copy of the current collection.
126
+ current_place: Path
127
+
128
+ # The path of the ansible_collections directory that contains the current collection.
129
+ # The following is always true:
130
+ # current_root == current_place / "ansible_collections"
131
+ current_root: Path
132
+
133
+ # Data on the current collection (as in the repository).
134
+ current_collection: CollectionData
135
+
136
+ # The path of the current collection inside the collection tree below current_root.
137
+ # The following is always true:
138
+ # current_path == current_root / current_collection.namespace / current_collection.name
139
+ current_path: Path
140
+
141
+ def prefix_current_paths(self, paths: list[str]) -> list[str]:
142
+ """
143
+ Prefix the list of given paths with ``current_path``.
144
+ """
145
+ result = []
146
+ for path in paths:
147
+ prefixed_path = (self.current_path / path).relative_to(self.current_place)
148
+ if prefixed_path.exists():
149
+ result.append(str(prefixed_path))
150
+ return result
151
+
152
+
153
+ def _run_subprocess(args: list[str]) -> tuple[bytes, bytes]:
154
+ p = subprocess.run(args, check=True, capture_output=True)
155
+ return p.stdout, p.stderr
156
+
157
+
158
+ def prepare_collections(
159
+ session: nox.Session,
160
+ *,
161
+ install_in_site_packages: bool,
162
+ extra_deps_files: list[str | os.PathLike] | None = None,
163
+ extra_collections: list[str] | None = None,
164
+ install_out_of_tree: bool = False, # can not be used with install_in_site_packages=True
165
+ ) -> CollectionSetup | None:
170
166
  """
171
- Modifies a list of paths by restricting to and/or removing paths.
167
+ Install collections in site-packages.
172
168
  """
173
- if restrict:
174
- paths = restrict_paths(paths, restrict)
175
- if remove:
176
- paths = remove_paths(paths, remove, extensions)
177
- return [path for path in paths if os.path.exists(path)]
169
+ if install_out_of_tree and install_in_site_packages:
170
+ raise ValueError(
171
+ "install_out_of_tree=True cannot be combined with install_in_site_packages=True"
172
+ )
173
+ if isinstance(session.virtualenv, nox.virtualenv.PassthroughEnv):
174
+ session.warn("No venv. Skip preparing collections...")
175
+ return None
176
+ if install_in_site_packages:
177
+ purelib = (
178
+ session.run(
179
+ "python",
180
+ "-c",
181
+ "import sysconfig; print(sysconfig.get_path('purelib'))",
182
+ silent=True,
183
+ )
184
+ or ""
185
+ ).strip()
186
+ if not purelib:
187
+ session.warn(
188
+ "Cannot find site-packages (probably due to install-only run)."
189
+ " Skip preparing collections..."
190
+ )
191
+ return None
192
+ place = Path(purelib)
193
+ elif install_out_of_tree:
194
+ place = create_temp_directory(f"antsibull-nox-{session.name}-collection-root-")
195
+ else:
196
+ place = Path(session.virtualenv.location) / "collection-root"
197
+ place.mkdir(exist_ok=True)
198
+ setup = setup_collections(
199
+ place,
200
+ _run_subprocess,
201
+ extra_deps_files=extra_deps_files,
202
+ extra_collections=extra_collections,
203
+ with_current=False,
204
+ global_cache_dir=session.cache_dir,
205
+ )
206
+ current_setup = setup_current_tree(place, setup.current_collection)
207
+ return CollectionSetup(
208
+ collections_root=setup.root,
209
+ current_place=place,
210
+ current_root=current_setup.root,
211
+ current_collection=setup.current_collection,
212
+ current_path=t.cast(Path, current_setup.current_path),
213
+ )
214
+
215
+
216
+ def _run_bare_script(
217
+ session: nox.Session,
218
+ /,
219
+ name: str,
220
+ *,
221
+ use_session_python: bool = False,
222
+ files: list[Path] | None = None,
223
+ extra_data: dict[str, t.Any] | None = None,
224
+ ) -> None:
225
+ if files is None:
226
+ files = list_all_files()
227
+ data = prepare_data_script(
228
+ session,
229
+ base_name=name,
230
+ paths=files,
231
+ extra_data=extra_data,
232
+ )
233
+ python = sys.executable
234
+ env = {}
235
+ if use_session_python:
236
+ python = "python"
237
+ env["PYTHONPATH"] = str(find_data_directory())
238
+ session.run(
239
+ python,
240
+ find_data_directory() / f"{name}.py",
241
+ "--data",
242
+ data,
243
+ external=True,
244
+ env=env,
245
+ )
246
+
247
+
248
+ def _compose_description(
249
+ *,
250
+ prefix: str | dict[t.Literal["one", "other"], str] | None = None,
251
+ programs: dict[str, str | bool | None],
252
+ ) -> str:
253
+ parts: list[str] = []
254
+
255
+ def add(text: str, *, comma: bool = False) -> None:
256
+ if parts:
257
+ if comma:
258
+ parts.append(", ")
259
+ else:
260
+ parts.append(" ")
261
+ parts.append(text)
262
+
263
+ active_programs = [
264
+ (program, value if isinstance(value, str) else None)
265
+ for program, value in programs.items()
266
+ if value not in (False, None)
267
+ ]
268
+
269
+ if prefix:
270
+ if isinstance(prefix, dict):
271
+ if len(active_programs) == 1 and "one" in prefix:
272
+ add(prefix["one"])
273
+ else:
274
+ add(prefix["other"])
275
+ else:
276
+ add(prefix)
277
+
278
+ for index, (program, value) in enumerate(active_programs):
279
+ if index + 1 == len(active_programs) and index > 0:
280
+ add("and", comma=index > 1)
281
+ add(program, comma=index > 0 and index + 1 < len(active_programs))
282
+ if value is not None:
283
+ add(f"({value})")
284
+
285
+ return "".join(parts)
178
286
 
179
287
 
180
- def add_lint(has_formatters: bool, has_codeqa: bool, has_typing: bool) -> None:
288
+ def add_lint(
289
+ *,
290
+ make_lint_default: bool,
291
+ has_formatters: bool,
292
+ has_codeqa: bool,
293
+ has_yamllint: bool,
294
+ has_typing: bool,
295
+ ) -> None:
181
296
  """
182
297
  Add nox meta session for linting.
183
298
  """
@@ -190,13 +305,33 @@ def add_lint(has_formatters: bool, has_codeqa: bool, has_typing: bool) -> None:
190
305
  dependent_sessions.append("formatters")
191
306
  if has_codeqa:
192
307
  dependent_sessions.append("codeqa")
308
+ if has_yamllint:
309
+ dependent_sessions.append("yamllint")
193
310
  if has_typing:
194
311
  dependent_sessions.append("typing")
195
- nox.session(lint, name="lint", default=True, requires=dependent_sessions) # type: ignore
312
+
313
+ lint.__doc__ = _compose_description(
314
+ prefix={
315
+ "one": "Meta session for triggering the following session:",
316
+ "other": "Meta session for triggering the following sessions:",
317
+ },
318
+ programs={
319
+ "formatters": has_formatters,
320
+ "codeqa": has_codeqa,
321
+ "yamllint": has_yamllint,
322
+ "typing": has_typing,
323
+ },
324
+ )
325
+ nox.session(
326
+ name="lint",
327
+ default=make_lint_default,
328
+ requires=dependent_sessions,
329
+ )(lint)
196
330
 
197
331
 
198
332
  def add_formatters(
199
333
  *,
334
+ extra_code_files: list[str],
200
335
  # isort:
201
336
  run_isort: bool,
202
337
  isort_config: str | os.PathLike | None,
@@ -231,13 +366,15 @@ def add_formatters(
231
366
  if isort_config is not None:
232
367
  command.extend(["--settings-file", str(isort_config)])
233
368
  command.extend(session.posargs)
234
- command.extend(filter_paths(CODE_FILES + ["noxfile.py"]))
369
+ command.extend(filter_paths(CODE_FILES + ["noxfile.py"] + extra_code_files))
235
370
  session.run(*command)
236
371
 
237
372
  def execute_black_for(session: nox.Session, paths: list[str]) -> None:
238
373
  if not paths:
239
374
  return
240
375
  command = ["black"]
376
+ if run_check:
377
+ command.append("--check")
241
378
  if black_config is not None:
242
379
  command.extend(["--config", str(black_config)])
243
380
  command.extend(session.posargs)
@@ -246,7 +383,9 @@ def add_formatters(
246
383
 
247
384
  def execute_black(session: nox.Session) -> None:
248
385
  if run_black and run_black_modules:
249
- execute_black_for(session, filter_paths(CODE_FILES + ["noxfile.py"]))
386
+ execute_black_for(
387
+ session, filter_paths(CODE_FILES + ["noxfile.py"] + extra_code_files)
388
+ )
250
389
  return
251
390
  if run_black:
252
391
  paths = filter_paths(
@@ -270,11 +409,22 @@ def add_formatters(
270
409
  if run_black or run_black_modules:
271
410
  execute_black(session)
272
411
 
273
- nox.session(formatters, name="formatters", default=False) # type: ignore
412
+ formatters.__doc__ = _compose_description(
413
+ prefix={
414
+ "one": "Run code formatter:",
415
+ "other": "Run code formatters:",
416
+ },
417
+ programs={
418
+ "isort": run_isort,
419
+ "black": run_black,
420
+ },
421
+ )
422
+ nox.session(name="formatters", default=False)(formatters)
274
423
 
275
424
 
276
- def add_codeqa(
425
+ def add_codeqa( # noqa: C901
277
426
  *,
427
+ extra_code_files: list[str],
278
428
  # flake8:
279
429
  run_flake8: bool,
280
430
  flake8_config: str | os.PathLike | None,
@@ -314,10 +464,31 @@ def add_codeqa(
314
464
  if flake8_config is not None:
315
465
  command.extend(["--config", str(flake8_config)])
316
466
  command.extend(session.posargs)
317
- command.extend(filter_paths(CODE_FILES + ["noxfile.py"]))
467
+ command.extend(filter_paths(CODE_FILES + ["noxfile.py"] + extra_code_files))
468
+ session.run(*command)
469
+
470
+ def execute_pylint_impl(
471
+ session: nox.Session,
472
+ prepared_collections: CollectionSetup,
473
+ config: os.PathLike | str | None,
474
+ paths: list[str],
475
+ ) -> None:
476
+ command = ["pylint"]
477
+ if config is not None:
478
+ command.extend(
479
+ [
480
+ "--rcfile",
481
+ os.path.join(prepared_collections.current_collection.path, config),
482
+ ]
483
+ )
484
+ command.extend(["--source-roots", "."])
485
+ command.extend(session.posargs)
486
+ command.extend(prepared_collections.prefix_current_paths(paths))
318
487
  session.run(*command)
319
488
 
320
- def execute_pylint(session: nox.Session) -> None:
489
+ def execute_pylint(
490
+ session: nox.Session, prepared_collections: CollectionSetup
491
+ ) -> None:
321
492
  if pylint_modules_rcfile is not None and pylint_modules_rcfile != pylint_rcfile:
322
493
  # Only run pylint twice when using different configurations
323
494
  module_paths = filter_paths(
@@ -330,46 +501,156 @@ def add_codeqa(
330
501
  # Otherwise run it only once using the general configuration
331
502
  module_paths = []
332
503
  other_paths = filter_paths(CODE_FILES)
333
- command: list[str]
334
- with ansible_collection_root() as (root, prefix):
504
+
505
+ with session.chdir(prepared_collections.current_place):
335
506
  if module_paths:
336
- command = ["pylint"]
337
- config = pylint_modules_rcfile or pylint_rcfile
338
- if config is not None:
339
- command.extend(
340
- [
341
- "--rcfile",
342
- os.path.join(root, prefix, config),
343
- ]
344
- )
345
- command.extend(["--source-roots", root])
346
- command.extend(session.posargs)
347
- command.extend(prefix_paths(module_paths, prefix=prefix))
348
- session.run(*command)
507
+ execute_pylint_impl(
508
+ session,
509
+ prepared_collections,
510
+ pylint_modules_rcfile or pylint_rcfile,
511
+ module_paths,
512
+ )
349
513
 
350
514
  if other_paths:
351
- command = ["pylint"]
352
- if pylint_rcfile is not None:
353
- command.extend(
354
- ["--rcfile", os.path.join(root, prefix, pylint_rcfile)]
355
- )
356
- command.extend(["--source-roots", root])
357
- command.extend(session.posargs)
358
- command.extend(prefix_paths(other_paths, prefix=prefix))
359
- session.run(*command)
515
+ execute_pylint_impl(
516
+ session, prepared_collections, pylint_rcfile, other_paths
517
+ )
360
518
 
361
519
  def codeqa(session: nox.Session) -> None:
362
520
  install(session, *compose_dependencies())
521
+ prepared_collections: CollectionSetup | None = None
522
+ if run_pylint:
523
+ prepared_collections = prepare_collections(
524
+ session,
525
+ install_in_site_packages=False,
526
+ extra_deps_files=["tests/unit/requirements.yml"],
527
+ )
528
+ if not prepared_collections:
529
+ session.warn("Skipping pylint...")
363
530
  if run_flake8:
364
531
  execute_flake8(session)
365
- if run_pylint:
366
- execute_pylint(session)
532
+ if run_pylint and prepared_collections:
533
+ execute_pylint(session, prepared_collections)
534
+
535
+ codeqa.__doc__ = _compose_description(
536
+ prefix={
537
+ "other": "Run code QA:",
538
+ },
539
+ programs={
540
+ "flake8": run_flake8,
541
+ "pylint": run_pylint,
542
+ },
543
+ )
544
+ nox.session(name="codeqa", default=False)(codeqa)
545
+
546
+
547
+ def add_yamllint(
548
+ *,
549
+ run_yamllint: bool,
550
+ yamllint_config: str | os.PathLike | None,
551
+ yamllint_config_plugins: str | os.PathLike | None,
552
+ yamllint_config_plugins_examples: str | os.PathLike | None,
553
+ yamllint_package: str,
554
+ ) -> None:
555
+ """
556
+ Add yamllint session for linting YAML files and plugin/module docs.
557
+ """
558
+
559
+ def compose_dependencies() -> list[str]:
560
+ deps = []
561
+ if run_yamllint:
562
+ deps.append(yamllint_package)
563
+ return deps
564
+
565
+ def to_str(config: str | os.PathLike | None) -> str | None:
566
+ return str(config) if config else None
567
+
568
+ def execute_yamllint(session: nox.Session) -> None:
569
+ # Run yamllint
570
+ all_files = list_all_files()
571
+ cwd = Path.cwd()
572
+ all_yaml_filenames = [
573
+ str(file.relative_to(cwd))
574
+ for file in all_files
575
+ if file.name.lower().endswith((".yml", ".yaml"))
576
+ ]
577
+ if not all_yaml_filenames:
578
+ session.warn("Skipping yamllint since no YAML file was found...")
579
+ return
580
+
581
+ command = ["yamllint"]
582
+ if yamllint_config is not None:
583
+ command.extend(
584
+ [
585
+ "-c",
586
+ str(yamllint_config),
587
+ ]
588
+ )
589
+ command.append("--strict")
590
+ command.append("--")
591
+ command.extend(all_yaml_filenames)
592
+ command.extend(session.posargs)
593
+ session.run(*command)
594
+
595
+ def execute_plugin_yamllint(session: nox.Session) -> None:
596
+ # Run yamllint
597
+ all_files = list_all_files()
598
+ cwd = Path.cwd()
599
+ plugins_dir = cwd / "plugins"
600
+ ignore_dirs = [
601
+ plugins_dir / "action",
602
+ plugins_dir / "module_utils",
603
+ plugins_dir / "plugin_utils",
604
+ ]
605
+ all_plugin_files = [
606
+ file
607
+ for file in all_files
608
+ if file.is_relative_to(plugins_dir)
609
+ and file.name.lower().endswith((".py", ".yml", ".yaml"))
610
+ and not any(file.is_relative_to(dir) for dir in ignore_dirs)
611
+ ]
612
+ if not all_plugin_files:
613
+ session.warn(
614
+ "Skipping yamllint for modules/plugins since"
615
+ " no appropriate Python file was found..."
616
+ )
617
+ return
618
+ _run_bare_script(
619
+ session,
620
+ "plugin-yamllint",
621
+ use_session_python=True,
622
+ files=all_plugin_files,
623
+ extra_data={
624
+ "config": to_str(yamllint_config_plugins or yamllint_config),
625
+ "config_examples": to_str(
626
+ yamllint_config_plugins_examples
627
+ or yamllint_config_plugins
628
+ or yamllint_config
629
+ ),
630
+ },
631
+ )
367
632
 
368
- nox.session(codeqa, name="codeqa", default=False) # type: ignore
633
+ def yamllint(session: nox.Session) -> None:
634
+ install(session, *compose_dependencies())
635
+ if run_yamllint:
636
+ execute_yamllint(session)
637
+ execute_plugin_yamllint(session)
638
+
639
+ yamllint.__doc__ = _compose_description(
640
+ prefix={
641
+ "one": "Run YAML checker:",
642
+ "other": "Run YAML checkers:",
643
+ },
644
+ programs={
645
+ "yamllint": run_yamllint,
646
+ },
647
+ )
648
+ nox.session(name="yamllint", default=False)(yamllint)
369
649
 
370
650
 
371
651
  def add_typing(
372
652
  *,
653
+ extra_code_files: list[str],
373
654
  run_mypy: bool,
374
655
  mypy_config: str | os.PathLike | None,
375
656
  mypy_package: str,
@@ -394,28 +675,59 @@ def add_typing(
394
675
  deps.extend(shlex.split(extra_dep))
395
676
  return deps
396
677
 
397
- def execute_mypy(session: nox.Session) -> None:
398
- with ansible_collection_root() as (root, prefix):
678
+ def execute_mypy(
679
+ session: nox.Session, prepared_collections: CollectionSetup
680
+ ) -> None:
681
+ # Run mypy
682
+ with session.chdir(prepared_collections.current_place):
399
683
  command = ["mypy"]
400
684
  if mypy_config is not None:
401
685
  command.extend(
402
- ["--config-file", os.path.join(root, prefix, mypy_config)]
686
+ [
687
+ "--config-file",
688
+ os.path.join(
689
+ prepared_collections.current_collection.path, mypy_config
690
+ ),
691
+ ]
403
692
  )
693
+ command.append("--namespace-packages")
404
694
  command.append("--explicit-package-bases")
405
695
  command.extend(session.posargs)
406
- command.extend(prefix_paths(CODE_FILES, prefix=prefix))
407
- session.run(*command, env={"MYPYPATH": root})
696
+ command.extend(
697
+ prepared_collections.prefix_current_paths(CODE_FILES + extra_code_files)
698
+ )
699
+ session.run(
700
+ *command, env={"MYPYPATH": str(prepared_collections.current_place)}
701
+ )
408
702
 
409
703
  def typing(session: nox.Session) -> None:
410
704
  install(session, *compose_dependencies())
411
- if run_mypy:
412
- execute_mypy(session)
705
+ prepared_collections = prepare_collections(
706
+ session,
707
+ install_in_site_packages=False,
708
+ extra_deps_files=["tests/unit/requirements.yml"],
709
+ )
710
+ if not prepared_collections:
711
+ session.warn("Skipping mypy...")
712
+ if run_mypy and prepared_collections:
713
+ execute_mypy(session, prepared_collections)
413
714
 
414
- nox.session(typing, name="typing", default=False) # type: ignore
715
+ typing.__doc__ = _compose_description(
716
+ prefix={
717
+ "one": "Run type checker:",
718
+ "other": "Run type checkers:",
719
+ },
720
+ programs={
721
+ "mypy": run_mypy,
722
+ },
723
+ )
724
+ nox.session(name="typing", default=False)(typing)
415
725
 
416
726
 
417
727
  def add_lint_sessions(
418
728
  *,
729
+ make_lint_default: bool = True,
730
+ extra_code_files: list[str] | None = None,
419
731
  # isort:
420
732
  run_isort: bool = True,
421
733
  isort_config: str | os.PathLike | None = None,
@@ -436,6 +748,12 @@ def add_lint_sessions(
436
748
  pylint_package: str = "pylint",
437
749
  pylint_ansible_core_package: str | None = "ansible-core",
438
750
  pylint_extra_deps: list[str] | None = None,
751
+ # yamllint:
752
+ run_yamllint: bool = False,
753
+ yamllint_config: str | os.PathLike | None = None,
754
+ yamllint_config_plugins: str | os.PathLike | None = None,
755
+ yamllint_config_plugins_examples: str | os.PathLike | None = None,
756
+ yamllint_package: str = "yamllint",
439
757
  # mypy:
440
758
  run_mypy: bool = True,
441
759
  mypy_config: str | os.PathLike | None = None,
@@ -448,14 +766,20 @@ def add_lint_sessions(
448
766
  """
449
767
  has_formatters = run_isort or run_black or run_black_modules or False
450
768
  has_codeqa = run_flake8 or run_pylint
769
+ has_yamllint = run_yamllint
451
770
  has_typing = run_mypy
452
771
 
453
772
  add_lint(
454
- has_formatters=has_formatters, has_codeqa=has_codeqa, has_typing=has_typing
773
+ has_formatters=has_formatters,
774
+ has_codeqa=has_codeqa,
775
+ has_yamllint=has_yamllint,
776
+ has_typing=has_typing,
777
+ make_lint_default=make_lint_default,
455
778
  )
456
779
 
457
780
  if has_formatters:
458
781
  add_formatters(
782
+ extra_code_files=extra_code_files or [],
459
783
  run_isort=run_isort,
460
784
  isort_config=isort_config,
461
785
  isort_package=isort_package,
@@ -467,6 +791,7 @@ def add_lint_sessions(
467
791
 
468
792
  if has_codeqa:
469
793
  add_codeqa(
794
+ extra_code_files=extra_code_files or [],
470
795
  run_flake8=run_flake8,
471
796
  flake8_config=flake8_config,
472
797
  flake8_package=flake8_package,
@@ -478,8 +803,18 @@ def add_lint_sessions(
478
803
  pylint_extra_deps=pylint_extra_deps or [],
479
804
  )
480
805
 
806
+ if has_yamllint:
807
+ add_yamllint(
808
+ run_yamllint=run_yamllint,
809
+ yamllint_config=yamllint_config,
810
+ yamllint_config_plugins=yamllint_config_plugins,
811
+ yamllint_config_plugins_examples=yamllint_config_plugins_examples,
812
+ yamllint_package=yamllint_package,
813
+ )
814
+
481
815
  if has_typing:
482
816
  add_typing(
817
+ extra_code_files=extra_code_files or [],
483
818
  run_mypy=run_mypy,
484
819
  mypy_config=mypy_config,
485
820
  mypy_package=mypy_package,
@@ -488,4 +823,890 @@ def add_lint_sessions(
488
823
  )
489
824
 
490
825
 
491
- __all__ = ["add_lint_sessions"]
826
+ def add_docs_check(
827
+ *,
828
+ make_docs_check_default: bool = True,
829
+ antsibull_docs_package: str = "antsibull-docs",
830
+ ansible_core_package: str = "ansible-core",
831
+ validate_collection_refs: t.Literal["self", "dependent", "all"] | None = None,
832
+ extra_collections: list[str] | None = None,
833
+ ) -> None:
834
+ """
835
+ Add docs-check session for linting.
836
+ """
837
+
838
+ def compose_dependencies() -> list[str]:
839
+ deps = [antsibull_docs_package, ansible_core_package]
840
+ return deps
841
+
842
+ def execute_antsibull_docs(
843
+ session: nox.Session, prepared_collections: CollectionSetup
844
+ ) -> None:
845
+ with session.chdir(prepared_collections.current_path):
846
+ collections_path = f"{prepared_collections.current_place}"
847
+ command = [
848
+ "antsibull-docs",
849
+ "lint-collection-docs",
850
+ "--plugin-docs",
851
+ "--skip-rstcheck",
852
+ ".",
853
+ ]
854
+ if validate_collection_refs:
855
+ command.extend(["--validate-collection-refs", validate_collection_refs])
856
+ session.run(*command, env={"ANSIBLE_COLLECTIONS_PATH": collections_path})
857
+
858
+ def docs_check(session: nox.Session) -> None:
859
+ install(session, *compose_dependencies())
860
+ prepared_collections = prepare_collections(
861
+ session,
862
+ install_in_site_packages=False,
863
+ extra_collections=extra_collections,
864
+ install_out_of_tree=True,
865
+ )
866
+ if not prepared_collections:
867
+ session.warn("Skipping antsibull-docs...")
868
+ if prepared_collections:
869
+ execute_antsibull_docs(session, prepared_collections)
870
+
871
+ docs_check.__doc__ = "Run 'antsibull-docs lint-collection-docs'"
872
+ nox.session(
873
+ name="docs-check",
874
+ default=make_docs_check_default,
875
+ )(docs_check)
876
+
877
+
878
+ def add_license_check(
879
+ *,
880
+ make_license_check_default: bool = True,
881
+ run_reuse: bool = True,
882
+ reuse_package: str = "reuse",
883
+ run_license_check: bool = True,
884
+ license_check_extra_ignore_paths: list[str] | None = None,
885
+ ) -> None:
886
+ """
887
+ Add license-check session for license checks.
888
+ """
889
+
890
+ def compose_dependencies() -> list[str]:
891
+ deps = []
892
+ if run_reuse:
893
+ deps.append(reuse_package)
894
+ return deps
895
+
896
+ def license_check(session: nox.Session) -> None:
897
+ install(session, *compose_dependencies())
898
+ if run_reuse:
899
+ session.run("reuse", "lint")
900
+ if run_license_check:
901
+ _run_bare_script(
902
+ session,
903
+ "license-check",
904
+ extra_data={
905
+ "extra_ignore_paths": license_check_extra_ignore_paths or [],
906
+ },
907
+ )
908
+
909
+ license_check.__doc__ = _compose_description(
910
+ prefix={
911
+ "one": "Run license checker:",
912
+ "other": "Run license checkers:",
913
+ },
914
+ programs={
915
+ "reuse": run_reuse,
916
+ "license-check": (
917
+ "ensure GPLv3+ for plugins" if run_license_check else False
918
+ ),
919
+ },
920
+ )
921
+ nox.session(
922
+ name="license-check",
923
+ default=make_license_check_default,
924
+ )(license_check)
925
+
926
+
927
+ @dataclass
928
+ class ActionGroup:
929
+ """
930
+ Defines an action group.
931
+ """
932
+
933
+ # Name of the action group.
934
+ name: str
935
+ # Regex pattern to match modules that could belong to this action group.
936
+ pattern: str
937
+ # Doc fragment that members of the action group must have, but no other module
938
+ # must have
939
+ doc_fragment: str
940
+ # Exclusion list of modules that match the regex, but should not be part of the
941
+ # action group. All other modules matching the regex are assumed to be part of
942
+ # the action group.
943
+ exclusions: list[str] | None = None
944
+
945
+
946
+ def add_extra_checks(
947
+ *,
948
+ make_extra_checks_default: bool = True,
949
+ # no-unwanted-files:
950
+ run_no_unwanted_files: bool = True,
951
+ no_unwanted_files_module_extensions: (
952
+ list[str] | None
953
+ ) = None, # default: .cs, .ps1, .psm1, .py
954
+ no_unwanted_files_other_extensions: list[str] | None = None, # default: .py, .pyi
955
+ no_unwanted_files_yaml_extensions: list[str] | None = None, # default: .yml, .yaml
956
+ no_unwanted_files_skip_paths: list[str] | None = None, # default: []
957
+ no_unwanted_files_skip_directories: list[str] | None = None, # default: []
958
+ no_unwanted_files_yaml_directories: (
959
+ list[str] | None
960
+ ) = None, # default: plugins/test/, plugins/filter/
961
+ no_unwanted_files_allow_symlinks: bool = False,
962
+ # action-groups:
963
+ run_action_groups: bool = False,
964
+ action_groups_config: list[ActionGroup] | None = None,
965
+ ) -> None:
966
+ """
967
+ Add extra-checks session for extra checks.
968
+ """
969
+
970
+ def execute_no_unwanted_files(session: nox.Session) -> None:
971
+ _run_bare_script(
972
+ session,
973
+ "no-unwanted-files",
974
+ extra_data={
975
+ "module_extensions": no_unwanted_files_module_extensions
976
+ or [".cs", ".ps1", ".psm1", ".py"],
977
+ "other_extensions": no_unwanted_files_other_extensions
978
+ or [".py", ".pyi"],
979
+ "yaml_extensions": no_unwanted_files_yaml_extensions
980
+ or [".yml", ".yaml"],
981
+ "skip_paths": no_unwanted_files_skip_paths or [],
982
+ "skip_directories": no_unwanted_files_skip_directories or [],
983
+ "yaml_directories": no_unwanted_files_yaml_directories
984
+ or ["plugins/test/", "plugins/filter/"],
985
+ "allow_symlinks": no_unwanted_files_allow_symlinks,
986
+ },
987
+ )
988
+
989
+ def execute_action_groups(session: nox.Session) -> None:
990
+ if action_groups_config is None:
991
+ session.warn("Skipping action-groups since config is not provided...")
992
+ return
993
+ _run_bare_script(
994
+ session,
995
+ "action-groups",
996
+ extra_data={
997
+ "config": [asdict(cfg) for cfg in action_groups_config],
998
+ },
999
+ )
1000
+
1001
+ def extra_checks(session: nox.Session) -> None:
1002
+ if run_no_unwanted_files:
1003
+ execute_no_unwanted_files(session)
1004
+ if run_action_groups:
1005
+ execute_action_groups(session)
1006
+
1007
+ extra_checks.__doc__ = _compose_description(
1008
+ prefix={
1009
+ "one": "Run extra checker:",
1010
+ "other": "Run extra checkers:",
1011
+ },
1012
+ programs={
1013
+ "no-unwanted-files": (
1014
+ "checks for unwanted files in plugins/"
1015
+ if run_no_unwanted_files
1016
+ else False
1017
+ ),
1018
+ "action-groups": "validate action groups" if run_action_groups else False,
1019
+ },
1020
+ )
1021
+ nox.session(
1022
+ name="extra-checks",
1023
+ python=False,
1024
+ default=make_extra_checks_default,
1025
+ )(extra_checks)
1026
+
1027
+
1028
+ def add_build_import_check(
1029
+ *,
1030
+ make_build_import_check_default: bool = True,
1031
+ ansible_core_package: str = "ansible-core",
1032
+ run_galaxy_importer: bool = True,
1033
+ galaxy_importer_package: str = "galaxy-importer",
1034
+ galaxy_importer_config_path: (
1035
+ str | os.PathLike | None
1036
+ ) = None, # https://github.com/ansible/galaxy-importer#configuration
1037
+ ) -> None:
1038
+ """
1039
+ Add license-check session for license checks.
1040
+ """
1041
+
1042
+ def compose_dependencies() -> list[str]:
1043
+ deps = [ansible_core_package]
1044
+ if run_galaxy_importer:
1045
+ deps.append(galaxy_importer_package)
1046
+ return deps
1047
+
1048
+ def build_import_check(session: nox.Session) -> None:
1049
+ install(session, *compose_dependencies())
1050
+
1051
+ tmp = Path(session.create_tmp())
1052
+ collection_dir = tmp / "collection"
1053
+ remove_path(collection_dir)
1054
+ copy_collection(Path.cwd(), collection_dir)
1055
+
1056
+ collection = load_collection_data_from_disk(
1057
+ collection_dir, accept_manifest=False
1058
+ )
1059
+ version = collection.version
1060
+ if not version:
1061
+ version = "0.0.1"
1062
+ force_collection_version(collection_dir, version=version)
1063
+
1064
+ with session.chdir(collection_dir):
1065
+ build_ran = session.run("ansible-galaxy", "collection", "build") is not None
1066
+
1067
+ tarball = (
1068
+ collection_dir
1069
+ / f"{collection.namespace}-{collection.name}-{version}.tar.gz"
1070
+ )
1071
+ if build_ran and not tarball.is_file():
1072
+ files = "\n".join(
1073
+ f"* {path.name}"
1074
+ for path in collection_dir.iterdir()
1075
+ if not path.is_dir()
1076
+ )
1077
+ session.error(f"Cannot find file {tarball}! List of all files:\n{files}")
1078
+
1079
+ if run_galaxy_importer and tarball.is_file():
1080
+ env = {}
1081
+ if galaxy_importer_config_path:
1082
+ env["GALAXY_IMPORTER_CONFIG"] = str(
1083
+ Path(galaxy_importer_config_path).absolute()
1084
+ )
1085
+ with session.chdir(collection_dir):
1086
+ import_log = (
1087
+ session.run(
1088
+ "python",
1089
+ "-m",
1090
+ "galaxy_importer.main",
1091
+ tarball.name,
1092
+ env=env,
1093
+ silent=True,
1094
+ )
1095
+ or ""
1096
+ )
1097
+ if import_log:
1098
+ with _ci_group("Run Galaxy importer"):
1099
+ print(import_log)
1100
+ error_prefix = "ERROR:"
1101
+ errors = []
1102
+ for line in import_log.splitlines():
1103
+ if line.startswith(error_prefix):
1104
+ errors.append(line[len(error_prefix) :].strip())
1105
+ if errors:
1106
+ messages = "\n".join(f"* {error}" for error in errors)
1107
+ session.warn(
1108
+ "Galaxy importer emitted the following non-fatal"
1109
+ f" error{'' if len(errors) == 1 else 's'}:\n{messages}"
1110
+ )
1111
+
1112
+ build_import_check.__doc__ = _compose_description(
1113
+ prefix={
1114
+ "one": "Run build and import checker:",
1115
+ "other": "Run build and import checkers:",
1116
+ },
1117
+ programs={
1118
+ "build-collection": True,
1119
+ "galaxy-importer": (
1120
+ "test whether Galaxy will import built collection"
1121
+ if run_galaxy_importer
1122
+ else False
1123
+ ),
1124
+ },
1125
+ )
1126
+ nox.session(
1127
+ name="build-import-check",
1128
+ default=make_build_import_check_default,
1129
+ )(build_import_check)
1130
+
1131
+
1132
+ def _parse_ansible_core_version(
1133
+ version: str | AnsibleCoreVersion,
1134
+ ) -> AnsibleCoreVersion:
1135
+ if version in ("devel", "milestone"):
1136
+ # For some reason mypy doesn't notice that
1137
+ return t.cast(AnsibleCoreVersion, version)
1138
+ if isinstance(version, Version):
1139
+ return version
1140
+ return Version.parse(version)
1141
+
1142
+
1143
+ def add_ansible_test_session(
1144
+ *,
1145
+ name: str,
1146
+ description: str | None,
1147
+ extra_deps_files: list[str | os.PathLike] | None = None,
1148
+ ansible_test_params: list[str],
1149
+ add_posargs: bool = True,
1150
+ default: bool,
1151
+ ansible_core_version: str | AnsibleCoreVersion,
1152
+ ansible_core_source: t.Literal["git", "pypi"] = "git",
1153
+ ansible_core_repo_name: str | None = None,
1154
+ ansible_core_branch_name: str | None = None,
1155
+ handle_coverage: t.Literal["never", "always", "auto"] = "auto",
1156
+ register_name: str | None = None,
1157
+ register_extra_data: dict[str, t.Any] | None = None,
1158
+ ) -> None:
1159
+ """
1160
+ Add generic ansible-test session.
1161
+
1162
+ Returns a list of Python versions set for this session.
1163
+ """
1164
+ parsed_ansible_core_version = _parse_ansible_core_version(ansible_core_version)
1165
+
1166
+ def compose_dependencies() -> list[str]:
1167
+ deps = [
1168
+ get_ansible_core_package_name(
1169
+ parsed_ansible_core_version,
1170
+ source=ansible_core_source,
1171
+ ansible_repo=ansible_core_repo_name,
1172
+ branch_name=ansible_core_branch_name,
1173
+ )
1174
+ ]
1175
+ return deps
1176
+
1177
+ def run_ansible_test(session: nox.Session) -> None:
1178
+ install(session, *compose_dependencies())
1179
+ prepared_collections = prepare_collections(
1180
+ session,
1181
+ install_in_site_packages=False,
1182
+ extra_deps_files=extra_deps_files,
1183
+ install_out_of_tree=True,
1184
+ )
1185
+ if not prepared_collections:
1186
+ session.warn("Skipping ansible-test...")
1187
+ return
1188
+ cwd = Path.cwd()
1189
+ with session.chdir(prepared_collections.current_path):
1190
+ command = ["ansible-test"] + ansible_test_params
1191
+ if add_posargs and session.posargs:
1192
+ command.extend(session.posargs)
1193
+ session.run(*command)
1194
+
1195
+ coverage = (handle_coverage == "auto" and "--coverage" in command) or (
1196
+ handle_coverage == "always"
1197
+ )
1198
+ if coverage:
1199
+ session.run(
1200
+ "ansible-test",
1201
+ "coverage",
1202
+ "xml",
1203
+ "--color",
1204
+ "-v",
1205
+ "--requirements",
1206
+ "--group-by",
1207
+ "command",
1208
+ "--group-by",
1209
+ "version",
1210
+ )
1211
+
1212
+ copy_directory_tree_into(
1213
+ prepared_collections.current_path / "tests" / "output",
1214
+ cwd / "tests" / "output",
1215
+ )
1216
+
1217
+ # Determine Python version(s)
1218
+ core_info = get_ansible_core_info(parsed_ansible_core_version)
1219
+ all_versions = get_installed_python_versions()
1220
+
1221
+ installed_versions = [
1222
+ version
1223
+ for version in core_info.controller_python_versions
1224
+ if version in all_versions
1225
+ ]
1226
+ python = max(installed_versions or core_info.controller_python_versions)
1227
+ python_versions = [python]
1228
+
1229
+ run_ansible_test.__doc__ = description
1230
+ nox.session(
1231
+ name=name,
1232
+ default=default,
1233
+ python=[str(python_version) for python_version in python_versions],
1234
+ )(run_ansible_test)
1235
+
1236
+ if register_name:
1237
+ data = {
1238
+ "name": name,
1239
+ "ansible-core": (
1240
+ str(ansible_core_branch_name)
1241
+ if ansible_core_branch_name is not None
1242
+ else str(parsed_ansible_core_version)
1243
+ ),
1244
+ "python": " ".join(str(python) for python in python_versions),
1245
+ }
1246
+ if register_extra_data:
1247
+ data.update(register_extra_data)
1248
+ _register(register_name, data)
1249
+
1250
+
1251
+ def add_ansible_test_sanity_test_session(
1252
+ *,
1253
+ name: str,
1254
+ description: str | None,
1255
+ default: bool,
1256
+ ansible_core_version: str | AnsibleCoreVersion,
1257
+ ansible_core_source: t.Literal["git", "pypi"] = "git",
1258
+ ansible_core_repo_name: str | None = None,
1259
+ ansible_core_branch_name: str | None = None,
1260
+ ) -> None:
1261
+ """
1262
+ Add generic ansible-test sanity test session.
1263
+ """
1264
+ add_ansible_test_session(
1265
+ name=name,
1266
+ description=description,
1267
+ ansible_test_params=["sanity", "--docker", "-v", "--color"],
1268
+ default=default,
1269
+ ansible_core_version=ansible_core_version,
1270
+ ansible_core_source=ansible_core_source,
1271
+ ansible_core_repo_name=ansible_core_repo_name,
1272
+ ansible_core_branch_name=ansible_core_branch_name,
1273
+ register_name="sanity",
1274
+ )
1275
+
1276
+
1277
+ def _parse_min_max_except(
1278
+ min_version: Version | str | None,
1279
+ max_version: Version | str | None,
1280
+ except_versions: list[AnsibleCoreVersion | str] | None,
1281
+ ) -> tuple[Version | None, Version | None, tuple[AnsibleCoreVersion, ...] | None]:
1282
+ if isinstance(min_version, str):
1283
+ min_version = Version.parse(min_version)
1284
+ if isinstance(max_version, str):
1285
+ max_version = Version.parse(max_version)
1286
+ if except_versions is None:
1287
+ return min_version, max_version, None
1288
+ evs = tuple(_parse_ansible_core_version(version) for version in except_versions)
1289
+ return min_version, max_version, evs
1290
+
1291
+
1292
+ def add_all_ansible_test_sanity_test_sessions(
1293
+ *,
1294
+ default: bool = False,
1295
+ include_devel: bool = False,
1296
+ include_milestone: bool = False,
1297
+ add_devel_like_branches: list[tuple[str | None, str]] | None = None,
1298
+ min_version: Version | str | None = None,
1299
+ max_version: Version | str | None = None,
1300
+ except_versions: list[AnsibleCoreVersion | str] | None = None,
1301
+ ) -> None:
1302
+ """
1303
+ Add ansible-test sanity test meta session that runs ansible-test sanity
1304
+ for all supported ansible-core versions.
1305
+ """
1306
+ parsed_min_version, parsed_max_version, parsed_except_versions = (
1307
+ _parse_min_max_except(min_version, max_version, except_versions)
1308
+ )
1309
+
1310
+ sanity_sessions = []
1311
+ for ansible_core_version in get_supported_core_versions(
1312
+ include_devel=include_devel,
1313
+ include_milestone=include_milestone,
1314
+ min_version=parsed_min_version,
1315
+ max_version=parsed_max_version,
1316
+ except_versions=parsed_except_versions,
1317
+ ):
1318
+ name = f"ansible-test-sanity-{ansible_core_version}"
1319
+ add_ansible_test_sanity_test_session(
1320
+ name=name,
1321
+ description=f"Run sanity tests from ansible-core {ansible_core_version}'s ansible-test",
1322
+ ansible_core_version=ansible_core_version,
1323
+ default=False,
1324
+ )
1325
+ sanity_sessions.append(name)
1326
+ if add_devel_like_branches:
1327
+ for repo_name, branch_name in add_devel_like_branches:
1328
+ repo_prefix = (
1329
+ f"{repo_name.replace('/', '-')}-" if repo_name is not None else ""
1330
+ )
1331
+ repo_postfix = f", {repo_name} repository" if repo_name is not None else ""
1332
+ name = f"ansible-test-sanity-{repo_prefix}{branch_name.replace('/', '-')}"
1333
+ add_ansible_test_sanity_test_session(
1334
+ name=name,
1335
+ description=(
1336
+ "Run sanity tests from ansible-test in ansible-core's"
1337
+ f" {branch_name} branch{repo_postfix}"
1338
+ ),
1339
+ ansible_core_version="devel",
1340
+ ansible_core_repo_name=repo_name,
1341
+ ansible_core_branch_name=branch_name,
1342
+ default=False,
1343
+ )
1344
+ sanity_sessions.append(name)
1345
+
1346
+ def run_all_sanity_tests(
1347
+ session: nox.Session, # pylint: disable=unused-argument
1348
+ ) -> None:
1349
+ pass
1350
+
1351
+ run_all_sanity_tests.__doc__ = (
1352
+ "Meta session for running all ansible-test-sanity-* sessions."
1353
+ )
1354
+ nox.session(
1355
+ name="ansible-test-sanity",
1356
+ default=default,
1357
+ requires=sanity_sessions,
1358
+ )(run_all_sanity_tests)
1359
+
1360
+
1361
+ def add_ansible_test_unit_test_session(
1362
+ *,
1363
+ name: str,
1364
+ description: str | None,
1365
+ default: bool,
1366
+ ansible_core_version: str | AnsibleCoreVersion,
1367
+ ansible_core_source: t.Literal["git", "pypi"] = "git",
1368
+ ansible_core_repo_name: str | None = None,
1369
+ ansible_core_branch_name: str | None = None,
1370
+ ) -> None:
1371
+ """
1372
+ Add generic ansible-test unit test session.
1373
+ """
1374
+ add_ansible_test_session(
1375
+ name=name,
1376
+ description=description,
1377
+ ansible_test_params=["units", "--docker", "-v", "--color"],
1378
+ extra_deps_files=["tests/unit/requirements.yml"],
1379
+ default=default,
1380
+ ansible_core_version=ansible_core_version,
1381
+ ansible_core_source=ansible_core_source,
1382
+ ansible_core_repo_name=ansible_core_repo_name,
1383
+ ansible_core_branch_name=ansible_core_branch_name,
1384
+ register_name="units",
1385
+ )
1386
+
1387
+
1388
+ def add_all_ansible_test_unit_test_sessions(
1389
+ *,
1390
+ default: bool = False,
1391
+ include_devel: bool = False,
1392
+ include_milestone: bool = False,
1393
+ add_devel_like_branches: list[tuple[str | None, str]] | None = None,
1394
+ min_version: Version | str | None = None,
1395
+ max_version: Version | str | None = None,
1396
+ except_versions: list[AnsibleCoreVersion | str] | None = None,
1397
+ ) -> None:
1398
+ """
1399
+ Add ansible-test unit test meta session that runs ansible-test units
1400
+ for all supported ansible-core versions.
1401
+ """
1402
+ parsed_min_version, parsed_max_version, parsed_except_versions = (
1403
+ _parse_min_max_except(min_version, max_version, except_versions)
1404
+ )
1405
+
1406
+ units_sessions = []
1407
+ for ansible_core_version in get_supported_core_versions(
1408
+ include_devel=include_devel,
1409
+ include_milestone=include_milestone,
1410
+ min_version=parsed_min_version,
1411
+ max_version=parsed_max_version,
1412
+ except_versions=parsed_except_versions,
1413
+ ):
1414
+ name = f"ansible-test-units-{ansible_core_version}"
1415
+ add_ansible_test_unit_test_session(
1416
+ name=name,
1417
+ description=f"Run unit tests with ansible-core {ansible_core_version}'s ansible-test",
1418
+ ansible_core_version=ansible_core_version,
1419
+ default=False,
1420
+ )
1421
+ units_sessions.append(name)
1422
+ if add_devel_like_branches:
1423
+ for repo_name, branch_name in add_devel_like_branches:
1424
+ repo_prefix = (
1425
+ f"{repo_name.replace('/', '-')}-" if repo_name is not None else ""
1426
+ )
1427
+ repo_postfix = f", {repo_name} repository" if repo_name is not None else ""
1428
+ name = f"ansible-test-units-{repo_prefix}{branch_name.replace('/', '-')}"
1429
+ add_ansible_test_unit_test_session(
1430
+ name=name,
1431
+ description=(
1432
+ "Run unit tests from ansible-test in ansible-core's"
1433
+ f" {branch_name} branch{repo_postfix}"
1434
+ ),
1435
+ ansible_core_version="devel",
1436
+ ansible_core_repo_name=repo_name,
1437
+ ansible_core_branch_name=branch_name,
1438
+ default=False,
1439
+ )
1440
+ units_sessions.append(name)
1441
+
1442
+ def run_all_unit_tests(
1443
+ session: nox.Session, # pylint: disable=unused-argument
1444
+ ) -> None:
1445
+ pass
1446
+
1447
+ run_all_unit_tests.__doc__ = (
1448
+ "Meta session for running all ansible-test-units-* sessions."
1449
+ )
1450
+ nox.session(
1451
+ name="ansible-test-units",
1452
+ default=default,
1453
+ requires=units_sessions,
1454
+ )(run_all_unit_tests)
1455
+
1456
+
1457
+ def add_ansible_test_integration_sessions_default_container(
1458
+ *,
1459
+ include_devel: bool = False,
1460
+ include_milestone: bool = False,
1461
+ add_devel_like_branches: list[tuple[str | None, str]] | None = None,
1462
+ min_version: Version | str | None = None,
1463
+ max_version: Version | str | None = None,
1464
+ except_versions: list[AnsibleCoreVersion | str] | None = None,
1465
+ core_python_versions: (
1466
+ dict[str | AnsibleCoreVersion, list[str | Version]] | None
1467
+ ) = None,
1468
+ controller_python_versions_only: bool = False,
1469
+ default: bool = False,
1470
+ ) -> None:
1471
+ """
1472
+ Add ansible-test integration tests using the default Docker container.
1473
+
1474
+ ``core_python_versions`` can be used to restrict the Python versions
1475
+ to be used for a specific ansible-core version.
1476
+
1477
+ ``controller_python_versions_only`` can be used to only run against
1478
+ controller Python versions.
1479
+ """
1480
+
1481
+ def add_integration_tests(
1482
+ ansible_core_version: AnsibleCoreVersion,
1483
+ repo_name: str | None = None,
1484
+ branch_name: str | None = None,
1485
+ ) -> list[str]:
1486
+ # Determine Python versions to run tests for
1487
+ py_versions = (
1488
+ (core_python_versions.get(branch_name) if branch_name is not None else None)
1489
+ or core_python_versions.get(ansible_core_version)
1490
+ or core_python_versions.get(str(ansible_core_version))
1491
+ if core_python_versions
1492
+ else None
1493
+ )
1494
+ if py_versions is None:
1495
+ core_info = get_ansible_core_info(ansible_core_version)
1496
+ py_versions = list(
1497
+ core_info.controller_python_versions
1498
+ if controller_python_versions_only
1499
+ else core_info.remote_python_versions
1500
+ )
1501
+
1502
+ # Add sessions
1503
+ integration_sessions_core: list[str] = []
1504
+ if branch_name is None:
1505
+ base_name = f"ansible-test-integration-{ansible_core_version}-"
1506
+ else:
1507
+ repo_prefix = (
1508
+ f"{repo_name.replace('/', '-')}-" if repo_name is not None else ""
1509
+ )
1510
+ base_name = f"ansible-test-integration-{repo_prefix}{branch_name.replace('/', '-')}-"
1511
+ for py_version in py_versions:
1512
+ name = f"{base_name}{py_version}"
1513
+ if branch_name is None:
1514
+ description = (
1515
+ f"Run integration tests from ansible-core {ansible_core_version}'s"
1516
+ f" ansible-test with Python {py_version}"
1517
+ )
1518
+ else:
1519
+ repo_postfix = (
1520
+ f", {repo_name} repository" if repo_name is not None else ""
1521
+ )
1522
+ description = (
1523
+ f"Run integration tests from ansible-test in ansible-core's {branch_name}"
1524
+ f" branch{repo_postfix} with Python {py_version}"
1525
+ )
1526
+ add_ansible_test_session(
1527
+ name=name,
1528
+ description=description,
1529
+ ansible_test_params=[
1530
+ "integration",
1531
+ "--docker",
1532
+ "default",
1533
+ "-v",
1534
+ "--color",
1535
+ "--python",
1536
+ str(py_version),
1537
+ ],
1538
+ extra_deps_files=["tests/integration/requirements.yml"],
1539
+ ansible_core_version=ansible_core_version,
1540
+ ansible_core_repo_name=repo_name,
1541
+ ansible_core_branch_name=branch_name,
1542
+ default=False,
1543
+ register_name="integration",
1544
+ register_extra_data={
1545
+ "test-container": "default",
1546
+ "test-python": str(py_version),
1547
+ },
1548
+ )
1549
+ integration_sessions_core.append(name)
1550
+ return integration_sessions_core
1551
+
1552
+ parsed_min_version, parsed_max_version, parsed_except_versions = (
1553
+ _parse_min_max_except(min_version, max_version, except_versions)
1554
+ )
1555
+ integration_sessions: list[str] = []
1556
+ for ansible_core_version in get_supported_core_versions(
1557
+ include_devel=include_devel,
1558
+ include_milestone=include_milestone,
1559
+ min_version=parsed_min_version,
1560
+ max_version=parsed_max_version,
1561
+ except_versions=parsed_except_versions,
1562
+ ):
1563
+ integration_sessions_core = add_integration_tests(ansible_core_version)
1564
+ if integration_sessions_core:
1565
+ name = f"ansible-test-integration-{ansible_core_version}"
1566
+ integration_sessions.append(name)
1567
+
1568
+ def run_integration_tests(
1569
+ session: nox.Session, # pylint: disable=unused-argument
1570
+ ) -> None:
1571
+ pass
1572
+
1573
+ run_integration_tests.__doc__ = (
1574
+ f"Meta session for running all {name}-* sessions."
1575
+ )
1576
+ nox.session(
1577
+ name=name,
1578
+ requires=integration_sessions_core,
1579
+ default=False,
1580
+ )(run_integration_tests)
1581
+ if add_devel_like_branches:
1582
+ for repo_name, branch_name in add_devel_like_branches:
1583
+ integration_sessions_core = add_integration_tests(
1584
+ "devel", repo_name=repo_name, branch_name=branch_name
1585
+ )
1586
+ if integration_sessions_core:
1587
+ repo_prefix = (
1588
+ f"{repo_name.replace('/', '-')}-" if repo_name is not None else ""
1589
+ )
1590
+ name = f"ansible-test-integration-{repo_prefix}{branch_name.replace('/', '-')}"
1591
+ integration_sessions.append(name)
1592
+
1593
+ def run_integration_tests_for_branch(
1594
+ session: nox.Session, # pylint: disable=unused-argument
1595
+ ) -> None:
1596
+ pass
1597
+
1598
+ run_integration_tests_for_branch.__doc__ = (
1599
+ f"Meta session for running all {name}-* sessions."
1600
+ )
1601
+ nox.session(
1602
+ name=name,
1603
+ requires=integration_sessions_core,
1604
+ default=False,
1605
+ )(run_integration_tests_for_branch)
1606
+
1607
+ def ansible_test_integration(
1608
+ session: nox.Session, # pylint: disable=unused-argument
1609
+ ) -> None:
1610
+ pass
1611
+
1612
+ ansible_test_integration.__doc__ = (
1613
+ "Meta session for running all ansible-test-integration-* sessions."
1614
+ )
1615
+ nox.session(
1616
+ name="ansible-test-integration",
1617
+ requires=integration_sessions,
1618
+ default=default,
1619
+ )(ansible_test_integration)
1620
+
1621
+
1622
+ def add_ansible_lint(
1623
+ *,
1624
+ make_ansible_lint_default: bool = True,
1625
+ ansible_lint_package: str = "ansible-lint",
1626
+ strict: bool = False,
1627
+ ) -> None:
1628
+ """
1629
+ Add a session that runs ansible-lint.
1630
+ """
1631
+
1632
+ def compose_dependencies() -> list[str]:
1633
+ return [ansible_lint_package]
1634
+
1635
+ def ansible_lint(session: nox.Session) -> None:
1636
+ install(session, *compose_dependencies())
1637
+ prepared_collections = prepare_collections(
1638
+ session,
1639
+ install_in_site_packages=False,
1640
+ install_out_of_tree=True,
1641
+ extra_deps_files=["tests/integration/requirements.yml"],
1642
+ )
1643
+ if not prepared_collections:
1644
+ session.warn("Skipping ansible-lint...")
1645
+ return
1646
+ env = {"ANSIBLE_COLLECTIONS_PATH": f"{prepared_collections.current_place}"}
1647
+ command = ["ansible-lint", "--offline"]
1648
+ if strict:
1649
+ command.append("--strict")
1650
+ session.run(*command, env=env)
1651
+
1652
+ ansible_lint.__doc__ = "Run ansible-lint."
1653
+ nox.session(
1654
+ name="ansible-lint",
1655
+ default=make_ansible_lint_default,
1656
+ )(ansible_lint)
1657
+
1658
+
1659
+ def add_matrix_generator() -> None:
1660
+ """
1661
+ Add a session that generates matrixes for CI systems.
1662
+ """
1663
+
1664
+ def matrix_generator(
1665
+ session: nox.Session, # pylint: disable=unused-argument
1666
+ ) -> None:
1667
+ json_output = os.environ.get("ANTSIBULL_NOX_MATRIX_JSON")
1668
+ if json_output:
1669
+ print(f"Writing JSON output to {json_output}...")
1670
+ with open(json_output, "wt", encoding="utf-8") as f:
1671
+ f.write(json.dumps(_SESSIONS))
1672
+
1673
+ github_output = os.environ.get("GITHUB_OUTPUT")
1674
+ if github_output:
1675
+ print(f"Writing GitHub output to {github_output}...")
1676
+ with open(github_output, "at", encoding="utf-8") as f:
1677
+ for name, sessions in _SESSIONS.items():
1678
+ f.write(f"{name}={json.dumps(sessions)}\n")
1679
+
1680
+ for name, sessions in sorted(_SESSIONS.items()):
1681
+ print(f"{name} ({len(sessions)}):")
1682
+ for session_data in sessions:
1683
+ data = session_data.copy()
1684
+ session_name = data.pop("name")
1685
+ print(f" {session_name}: {data}")
1686
+
1687
+ matrix_generator.__doc__ = "Generate matrix for CI systems."
1688
+ nox.session(
1689
+ name="matrix-generator",
1690
+ python=False,
1691
+ default=False,
1692
+ )(matrix_generator)
1693
+
1694
+
1695
+ __all__ = [
1696
+ "ActionGroup",
1697
+ "add_build_import_check",
1698
+ "add_docs_check",
1699
+ "add_extra_checks",
1700
+ "add_license_check",
1701
+ "add_lint_sessions",
1702
+ "add_ansible_test_session",
1703
+ "add_ansible_test_sanity_test_session",
1704
+ "add_all_ansible_test_sanity_test_sessions",
1705
+ "add_ansible_test_unit_test_session",
1706
+ "add_all_ansible_test_unit_test_sessions",
1707
+ "add_ansible_test_integration_sessions_default_container",
1708
+ "add_ansible_lint",
1709
+ "add_matrix_generator",
1710
+ "install",
1711
+ "prepare_collections",
1712
+ ]