ansible-core 2.13.8rc1__py3-none-any.whl → 2.13.9rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ansible-core might be problematic. Click here for more details.

Files changed (43) hide show
  1. ansible/cli/doc.py +10 -4
  2. ansible/galaxy/api.py +29 -10
  3. ansible/galaxy/collection/__init__.py +3 -0
  4. ansible/galaxy/collection/concrete_artifact_manager.py +34 -17
  5. ansible/galaxy/dependency_resolution/dataclasses.py +11 -1
  6. ansible/galaxy/dependency_resolution/providers.py +0 -1
  7. ansible/module_utils/ansible_release.py +1 -1
  8. ansible/module_utils/api.py +14 -1
  9. ansible/module_utils/csharp/Ansible.Basic.cs +265 -7
  10. ansible/plugins/lookup/password.py +79 -39
  11. ansible/release.py +1 -1
  12. ansible/utils/encrypt.py +9 -6
  13. {ansible_core-2.13.8rc1.dist-info → ansible_core-2.13.9rc1.dist-info}/METADATA +1 -1
  14. {ansible_core-2.13.8rc1.dist-info → ansible_core-2.13.9rc1.dist-info}/RECORD +43 -43
  15. {ansible_core-2.13.8rc1.dist-info → ansible_core-2.13.9rc1.dist-info}/WHEEL +1 -1
  16. ansible_test/_internal/ci/__init__.py +2 -2
  17. ansible_test/_internal/ci/azp.py +12 -8
  18. ansible_test/_internal/ci/local.py +2 -2
  19. ansible_test/_internal/classification/__init__.py +51 -43
  20. ansible_test/_internal/cli/argparsing/argcompletion.py +20 -5
  21. ansible_test/_internal/cli/commands/sanity.py +0 -15
  22. ansible_test/_internal/commands/coverage/combine.py +3 -1
  23. ansible_test/_internal/commands/integration/__init__.py +6 -2
  24. ansible_test/_internal/commands/integration/cloud/__init__.py +3 -1
  25. ansible_test/_internal/commands/sanity/__init__.py +7 -0
  26. ansible_test/_internal/commands/sanity/pylint.py +11 -0
  27. ansible_test/_internal/commands/sanity/validate_modules.py +66 -5
  28. ansible_test/_internal/config.py +6 -12
  29. ansible_test/_internal/core_ci.py +8 -1
  30. ansible_test/_internal/data.py +17 -8
  31. ansible_test/_internal/delegation.py +1 -2
  32. ansible_test/_internal/metadata.py +4 -0
  33. ansible_test/_internal/payload.py +75 -6
  34. ansible_test/_internal/python_requirements.py +15 -0
  35. ansible_test/_internal/target.py +3 -7
  36. ansible_test/_internal/test.py +1 -1
  37. ansible_test/_internal/util.py +17 -0
  38. ansible_test/_util/controller/sanity/mypy/ansible-test.ini +3 -0
  39. ansible_test/_util/controller/sanity/validate-modules/validate_modules/main.py +92 -126
  40. {ansible_core-2.13.8rc1.data → ansible_core-2.13.9rc1.data}/scripts/ansible-test +0 -0
  41. {ansible_core-2.13.8rc1.dist-info → ansible_core-2.13.9rc1.dist-info}/COPYING +0 -0
  42. {ansible_core-2.13.8rc1.dist-info → ansible_core-2.13.9rc1.dist-info}/entry_points.txt +0 -0
  43. {ansible_core-2.13.8rc1.dist-info → ansible_core-2.13.9rc1.dist-info}/top_level.txt +0 -0
@@ -4,6 +4,7 @@ import typing as t
4
4
 
5
5
  from .util import (
6
6
  display,
7
+ generate_name,
7
8
  )
8
9
 
9
10
  from .io import (
@@ -25,6 +26,7 @@ class Metadata:
25
26
  self.cloud_config = None # type: t.Optional[t.Dict[str, t.Dict[str, t.Union[int, str, bool]]]]
26
27
  self.change_description = None # type: t.Optional[ChangeDescription]
27
28
  self.ci_provider = None # type: t.Optional[str]
29
+ self.session_id = generate_name()
28
30
 
29
31
  def populate_changes(self, diff): # type: (t.Optional[t.List[str]]) -> None
30
32
  """Populate the changeset using the given diff."""
@@ -52,6 +54,7 @@ class Metadata:
52
54
  cloud_config=self.cloud_config,
53
55
  ci_provider=self.ci_provider,
54
56
  change_description=self.change_description.to_dict(),
57
+ session_id=self.session_id,
55
58
  )
56
59
 
57
60
  def to_file(self, path): # type: (str) -> None
@@ -76,6 +79,7 @@ class Metadata:
76
79
  metadata.cloud_config = data['cloud_config']
77
80
  metadata.ci_provider = data['ci_provider']
78
81
  metadata.change_description = ChangeDescription.from_dict(data['change_description'])
82
+ metadata.session_id = data['session_id']
79
83
 
80
84
  return metadata
81
85
 
@@ -27,6 +27,7 @@ from .util import (
27
27
 
28
28
  from .data import (
29
29
  data_context,
30
+ PayloadConfig,
30
31
  )
31
32
 
32
33
  from .util_common import (
@@ -44,11 +45,74 @@ def create_payload(args, dst_path): # type: (CommonConfig, str) -> None
44
45
  return
45
46
 
46
47
  files = list(data_context().ansible_source)
47
- filters = {}
48
+ permissions: dict[str, int] = {}
49
+ filters: dict[str, t.Callable[[tarfile.TarInfo], t.Optional[tarfile.TarInfo]]] = {}
50
+
51
+ # Exclude vendored files from the payload.
52
+ # They may not be compatible with the delegated environment.
53
+ files = [
54
+ (abs_path, rel_path) for abs_path, rel_path in files
55
+ if not rel_path.startswith('lib/ansible/_vendor/')
56
+ or rel_path == 'lib/ansible/_vendor/__init__.py'
57
+ ]
58
+
59
+ def apply_permissions(tar_info: tarfile.TarInfo, mode: int) -> t.Optional[tarfile.TarInfo]:
60
+ """
61
+ Apply the specified permissions to the given file.
62
+ Existing file type bits are preserved.
63
+ """
64
+ tar_info.mode &= ~(stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO)
65
+ tar_info.mode |= mode
66
+
67
+ return tar_info
68
+
69
+ def make_executable(tar_info: tarfile.TarInfo) -> t.Optional[tarfile.TarInfo]:
70
+ """
71
+ Make the given file executable and readable by all, and writeable by the owner.
72
+ Existing file type bits are preserved.
73
+ This ensures consistency of test results when using unprivileged users.
74
+ """
75
+ return apply_permissions(
76
+ tar_info,
77
+ stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH |
78
+ stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH |
79
+ stat.S_IWUSR
80
+ )
81
+
82
+ def make_non_executable(tar_info: tarfile.TarInfo) -> t.Optional[tarfile.TarInfo]:
83
+ """
84
+ Make the given file readable by all, and writeable by the owner.
85
+ Existing file type bits are preserved.
86
+ This ensures consistency of test results when using unprivileged users.
87
+ """
88
+ return apply_permissions(
89
+ tar_info,
90
+ stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH |
91
+ stat.S_IWUSR
92
+ )
93
+
94
+ def detect_permissions(tar_info: tarfile.TarInfo) -> t.Optional[tarfile.TarInfo]:
95
+ """
96
+ Detect and apply the appropriate permissions for a file.
97
+ Existing file type bits are preserved.
98
+ This ensures consistency of test results when using unprivileged users.
99
+ """
100
+ if tar_info.path.startswith('ansible/'):
101
+ mode = permissions.get(os.path.relpath(tar_info.path, 'ansible'))
102
+ elif data_context().content.collection and is_subdir(tar_info.path, data_context().content.collection.directory):
103
+ mode = permissions.get(os.path.relpath(tar_info.path, data_context().content.collection.directory))
104
+ else:
105
+ mode = None
106
+
107
+ if mode:
108
+ tar_info = apply_permissions(tar_info, mode)
109
+ elif tar_info.mode & (stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH):
110
+ # If any execute bit is set, treat the file as executable.
111
+ # This ensures that sanity tests which check execute bits behave correctly.
112
+ tar_info = make_executable(tar_info)
113
+ else:
114
+ tar_info = make_non_executable(tar_info)
48
115
 
49
- def make_executable(tar_info): # type: (tarfile.TarInfo) -> t.Optional[tarfile.TarInfo]
50
- """Make the given file executable."""
51
- tar_info.mode |= stat.S_IXUSR | stat.S_IXOTH | stat.S_IXGRP
52
116
  return tar_info
53
117
 
54
118
  if not ANSIBLE_SOURCE_ROOT:
@@ -85,10 +149,15 @@ def create_payload(args, dst_path): # type: (CommonConfig, str) -> None
85
149
  # there are no extra files when testing ansible itself
86
150
  extra_files = []
87
151
 
152
+ payload_config = PayloadConfig(
153
+ files=content_files,
154
+ permissions=permissions,
155
+ )
156
+
88
157
  for callback in data_context().payload_callbacks:
89
158
  # execute callbacks only on the content paths
90
159
  # this is done before placing them in the appropriate subdirectory (see below)
91
- callback(content_files)
160
+ callback(payload_config)
92
161
 
93
162
  # place ansible source files under the 'ansible' directory on the delegated host
94
163
  files = [(src, os.path.join('ansible', dst)) for src, dst in files]
@@ -109,7 +178,7 @@ def create_payload(args, dst_path): # type: (CommonConfig, str) -> None
109
178
  with tarfile.open(dst_path, mode='w:gz', compresslevel=4, format=tarfile.GNU_FORMAT) as tar:
110
179
  for src, dst in files:
111
180
  display.info('%s -> %s' % (src, dst), verbosity=4)
112
- tar.add(src, dst, filter=filters.get(dst))
181
+ tar.add(src, dst, filter=filters.get(dst, detect_permissions))
113
182
 
114
183
  duration = time.time() - start
115
184
  payload_size_bytes = os.path.getsize(dst_path)
@@ -52,6 +52,7 @@ from .data import (
52
52
  from .host_configs import (
53
53
  PosixConfig,
54
54
  PythonConfig,
55
+ VirtualPythonConfig,
55
56
  )
56
57
 
57
58
  from .connections import (
@@ -259,6 +260,20 @@ def run_pip(
259
260
  connection = connection or LocalConnection(args)
260
261
  script = prepare_pip_script(commands)
261
262
 
263
+ if isinstance(args, IntegrationConfig):
264
+ # Integration tests can involve two hosts (controller and target).
265
+ # The connection type can be used to disambiguate between the two.
266
+ context = " (controller)" if isinstance(connection, LocalConnection) else " (target)"
267
+ else:
268
+ context = ""
269
+
270
+ if isinstance(python, VirtualPythonConfig):
271
+ context += " [venv]"
272
+
273
+ # The interpreter path is not included below.
274
+ # It can be seen by running ansible-test with increased verbosity (showing all commands executed).
275
+ display.info(f'Installing requirements for Python {python.version}{context}')
276
+
262
277
  if not args.explain:
263
278
  try:
264
279
  connection.run([python.path], data=script, capture=False)
@@ -623,13 +623,9 @@ class IntegrationTarget(CompletionTarget):
623
623
  if 'needs/httptester' in groups:
624
624
  groups.append('cloud/httptester') # backwards compatibility for when it was not a cloud plugin
625
625
 
626
- if '_' in self.name:
627
- prefix = self.name[:self.name.find('_')]
628
- else:
629
- prefix = None
630
-
631
- if prefix in prefixes:
632
- group = prefixes[prefix]
626
+ for prefix, group in prefixes.items():
627
+ if not self.name.startswith(f'{prefix}_'):
628
+ continue
633
629
 
634
630
  if group != prefix:
635
631
  group = '%s/%s' % (group, prefix)
@@ -46,7 +46,7 @@ def calculate_confidence(path, line, metadata): # type: (str, int, Metadata) ->
46
46
  return 0
47
47
 
48
48
  # changes were made to the same file and line
49
- if any(r[0] <= line <= r[1] in r for r in ranges):
49
+ if any(r[0] <= line <= r[1] for r in ranges):
50
50
  return 100
51
51
 
52
52
  # changes were made to the same file and the line number is unknown
@@ -23,10 +23,14 @@ import time
23
23
  import functools
24
24
  import shlex
25
25
  import typing as t
26
+ import warnings
26
27
 
27
28
  from struct import unpack, pack
28
29
  from termios import TIOCGWINSZ
29
30
 
31
+ # CAUTION: Avoid third-party imports in this module whenever possible.
32
+ # Any third-party imports occurring here will result in an error if they are vendored by ansible-core.
33
+
30
34
  try:
31
35
  from typing_extensions import TypeGuard # TypeGuard was added in Python 3.9
32
36
  except ImportError:
@@ -333,6 +337,17 @@ def get_ansible_version(): # type: () -> str
333
337
  return ansible_version
334
338
 
335
339
 
340
+ def _enable_vendoring() -> None:
341
+ """Enable support for loading Python packages vendored by ansible-core."""
342
+ # Load the vendoring code by file path, since ansible may not be in our sys.path.
343
+ # Convert warnings into errors, to avoid problems from surfacing later.
344
+
345
+ with warnings.catch_warnings():
346
+ warnings.filterwarnings('error')
347
+
348
+ load_module(os.path.join(ANSIBLE_LIB_ROOT, '_vendor', '__init__.py'), 'ansible_vendor')
349
+
350
+
336
351
  @cache
337
352
  def get_available_python_versions(): # type: () -> t.Dict[str, str]
338
353
  """Return a dictionary indicating which supported Python versions are available."""
@@ -1133,3 +1148,5 @@ def type_guard(sequence: t.Sequence[t.Any], guard_type: t.Type[C]) -> TypeGuard[
1133
1148
 
1134
1149
 
1135
1150
  display = Display() # pylint: disable=locally-disabled, invalid-name
1151
+
1152
+ _enable_vendoring()
@@ -14,6 +14,9 @@ disable_error_code = misc
14
14
  [mypy-argcomplete]
15
15
  ignore_missing_imports = True
16
16
 
17
+ [mypy-argcomplete.finders]
18
+ ignore_missing_imports = True
19
+
17
20
  [mypy-coverage]
18
21
  ignore_missing_imports = True
19
22
 
@@ -25,9 +25,7 @@ import json
25
25
  import errno
26
26
  import os
27
27
  import re
28
- import subprocess
29
28
  import sys
30
- import tempfile
31
29
  import traceback
32
30
  import warnings
33
31
 
@@ -307,8 +305,8 @@ class ModuleValidator(Validator):
307
305
 
308
306
  ACCEPTLIST_FUTURE_IMPORTS = frozenset(('absolute_import', 'division', 'print_function'))
309
307
 
310
- def __init__(self, path, analyze_arg_spec=False, collection=None, collection_version=None,
311
- base_branch=None, git_cache=None, reporter=None, routing=None, plugin_type='module'):
308
+ def __init__(self, path, git_cache: GitCache, analyze_arg_spec=False, collection=None, collection_version=None,
309
+ reporter=None, routing=None, plugin_type='module'):
312
310
  super(ModuleValidator, self).__init__(reporter=reporter or Reporter())
313
311
 
314
312
  self.path = path
@@ -334,8 +332,8 @@ class ModuleValidator(Validator):
334
332
  self.collection_version_str = collection_version
335
333
  self.collection_version = SemanticVersion(collection_version)
336
334
 
337
- self.base_branch = base_branch
338
- self.git_cache = git_cache or GitCache()
335
+ self.git_cache = git_cache
336
+ self.base_module = self.git_cache.get_original_path(self.path)
339
337
 
340
338
  self._python_module_override = False
341
339
 
@@ -347,11 +345,6 @@ class ModuleValidator(Validator):
347
345
  except Exception:
348
346
  self.ast = None
349
347
 
350
- if base_branch:
351
- self.base_module = self._get_base_file()
352
- else:
353
- self.base_module = None
354
-
355
348
  def _create_version(self, v, collection_name=None):
356
349
  if not v:
357
350
  raise ValueError('Empty string is not a valid version')
@@ -374,13 +367,7 @@ class ModuleValidator(Validator):
374
367
  return self
375
368
 
376
369
  def __exit__(self, exc_type, exc_value, traceback):
377
- if not self.base_module:
378
- return
379
-
380
- try:
381
- os.remove(self.base_module)
382
- except Exception:
383
- pass
370
+ pass
384
371
 
385
372
  @property
386
373
  def object_name(self):
@@ -423,35 +410,9 @@ class ModuleValidator(Validator):
423
410
  except AttributeError:
424
411
  return False
425
412
 
426
- def _get_base_branch_module_path(self):
427
- """List all paths within lib/ansible/modules to try and match a moved module"""
428
- return self.git_cache.base_module_paths.get(self.object_name)
429
-
430
- def _has_alias(self):
431
- """Return true if the module has any aliases."""
432
- return self.object_name in self.git_cache.head_aliased_modules
433
-
434
- def _get_base_file(self):
435
- # In case of module moves, look for the original location
436
- base_path = self._get_base_branch_module_path()
437
-
438
- command = ['git', 'show', '%s:%s' % (self.base_branch, base_path or self.path)]
439
- p = subprocess.run(command, stdin=subprocess.DEVNULL, capture_output=True, check=False)
440
-
441
- if int(p.returncode) != 0:
442
- return None
443
-
444
- t = tempfile.NamedTemporaryFile(delete=False)
445
- t.write(p.stdout)
446
- t.close()
447
-
448
- return t.name
449
-
450
- def _is_new_module(self):
451
- if self._has_alias():
452
- return False
453
-
454
- return not self.object_name.startswith('_') and bool(self.base_branch) and not bool(self.base_module)
413
+ def _is_new_module(self) -> bool | None:
414
+ """Return True if the content is new, False if it is not and None if the information is not available."""
415
+ return self.git_cache.is_new(self.path)
455
416
 
456
417
  def _check_interpreter(self, powershell=False):
457
418
  if powershell:
@@ -2006,7 +1967,7 @@ class ModuleValidator(Validator):
2006
1967
  )
2007
1968
 
2008
1969
  def _check_for_new_args(self, doc):
2009
- if not self.base_branch or self._is_new_module():
1970
+ if not self.base_module:
2010
1971
  return
2011
1972
 
2012
1973
  with CaptureStd():
@@ -2238,7 +2199,7 @@ class ModuleValidator(Validator):
2238
2199
  # We can only validate PowerShell arg spec if it is using the new Ansible.Basic.AnsibleModule util
2239
2200
  pattern = r'(?im)^#\s*ansiblerequires\s+\-csharputil\s*Ansible\.Basic'
2240
2201
  if re.search(pattern, self.text) and self.object_name not in self.PS_ARG_VALIDATE_REJECTLIST:
2241
- with ModuleValidator(docs_path, base_branch=self.base_branch, git_cache=self.git_cache) as docs_mv:
2202
+ with ModuleValidator(docs_path, git_cache=self.git_cache) as docs_mv:
2242
2203
  docs = docs_mv._validate_docs()[1]
2243
2204
  self._validate_ansible_module_call(docs)
2244
2205
 
@@ -2283,6 +2244,84 @@ class PythonPackageValidator(Validator):
2283
2244
  )
2284
2245
 
2285
2246
 
2247
+ class GitCache(metaclass=abc.ABCMeta):
2248
+ """Base class for access to original files."""
2249
+ @abc.abstractmethod
2250
+ def get_original_path(self, path: str) -> str | None:
2251
+ """Return the path to the original version of the specified file, or None if there isn't one."""
2252
+
2253
+ @abc.abstractmethod
2254
+ def is_new(self, path: str) -> bool | None:
2255
+ """Return True if the content is new, False if it is not and None if the information is not available."""
2256
+
2257
+ @staticmethod
2258
+ def create(original_plugins: str | None, plugin_type: str) -> GitCache:
2259
+ return CoreGitCache(original_plugins, plugin_type) if original_plugins else NoOpGitCache()
2260
+
2261
+
2262
+ class CoreGitCache(GitCache):
2263
+ """Provides access to original files when testing core."""
2264
+ def __init__(self, original_plugins: str | None, plugin_type: str) -> None:
2265
+ super().__init__()
2266
+
2267
+ self.original_plugins = original_plugins
2268
+
2269
+ rel_path = 'lib/ansible/modules/' if plugin_type == 'module' else f'lib/ansible/plugins/{plugin_type}/'
2270
+ head_tree = self._find_files(rel_path)
2271
+
2272
+ head_aliased_modules = set()
2273
+
2274
+ for path in head_tree:
2275
+ filename = os.path.basename(path)
2276
+
2277
+ if filename.startswith('_') and filename != '__init__.py':
2278
+ if os.path.islink(path):
2279
+ head_aliased_modules.add(os.path.basename(os.path.realpath(path)))
2280
+
2281
+ self._head_aliased_modules = head_aliased_modules
2282
+
2283
+ def get_original_path(self, path: str) -> str | None:
2284
+ """Return the path to the original version of the specified file, or None if there isn't one."""
2285
+ path = os.path.join(self.original_plugins, path)
2286
+
2287
+ if not os.path.exists(path):
2288
+ path = None
2289
+
2290
+ return path
2291
+
2292
+ def is_new(self, path: str) -> bool | None:
2293
+ """Return True if the content is new, False if it is not and None if the information is not available."""
2294
+ if os.path.basename(path).startswith('_'):
2295
+ return False
2296
+
2297
+ if os.path.basename(path) in self._head_aliased_modules:
2298
+ return False
2299
+
2300
+ return not self.get_original_path(path)
2301
+
2302
+ @staticmethod
2303
+ def _find_files(path: str) -> list[str]:
2304
+ """Return a list of files found in the specified directory."""
2305
+ paths = []
2306
+
2307
+ for (dir_path, dir_names, file_names) in os.walk(path):
2308
+ for file_name in file_names:
2309
+ paths.append(os.path.join(dir_path, file_name))
2310
+
2311
+ return sorted(paths)
2312
+
2313
+
2314
+ class NoOpGitCache(GitCache):
2315
+ """Provides a no-op interface for access to original files."""
2316
+ def get_original_path(self, path: str) -> str | None:
2317
+ """Return the path to the original version of the specified file, or None if there isn't one."""
2318
+ return None
2319
+
2320
+ def is_new(self, path: str) -> bool | None:
2321
+ """Return True if the content is new, False if it is not and None if the information is not available."""
2322
+ return None
2323
+
2324
+
2286
2325
  def re_compile(value):
2287
2326
  """
2288
2327
  Argparse expects things to raise TypeError, re.compile raises an re.error
@@ -2308,8 +2347,6 @@ def run():
2308
2347
  type=re_compile)
2309
2348
  parser.add_argument('--arg-spec', help='Analyze module argument spec',
2310
2349
  action='store_true', default=False)
2311
- parser.add_argument('--base-branch', default=None,
2312
- help='Used in determining if new options were added')
2313
2350
  parser.add_argument('--format', choices=['json', 'plain'], default='plain',
2314
2351
  help='Output format. Default: "%(default)s"')
2315
2352
  parser.add_argument('--output', default='-',
@@ -2326,13 +2363,14 @@ def run():
2326
2363
  parser.add_argument('--plugin-type',
2327
2364
  default='module',
2328
2365
  help='The plugin type to validate. Defaults to %(default)s')
2366
+ parser.add_argument('--original-plugins')
2329
2367
 
2330
2368
  args = parser.parse_args()
2331
2369
 
2332
2370
  args.plugins = [m.rstrip('/') for m in args.plugins]
2333
2371
 
2334
2372
  reporter = Reporter()
2335
- git_cache = GitCache(args.base_branch, args.plugin_type)
2373
+ git_cache = GitCache.create(args.original_plugins, args.plugin_type)
2336
2374
 
2337
2375
  check_dirs = set()
2338
2376
 
@@ -2357,7 +2395,7 @@ def run():
2357
2395
  if ModuleValidator.is_on_rejectlist(path):
2358
2396
  continue
2359
2397
  with ModuleValidator(path, collection=args.collection, collection_version=args.collection_version,
2360
- analyze_arg_spec=args.arg_spec, base_branch=args.base_branch,
2398
+ analyze_arg_spec=args.arg_spec,
2361
2399
  git_cache=git_cache, reporter=reporter, routing=routing,
2362
2400
  plugin_type=args.plugin_type) as mv1:
2363
2401
  mv1.validate()
@@ -2382,7 +2420,7 @@ def run():
2382
2420
  if ModuleValidator.is_on_rejectlist(path):
2383
2421
  continue
2384
2422
  with ModuleValidator(path, collection=args.collection, collection_version=args.collection_version,
2385
- analyze_arg_spec=args.arg_spec, base_branch=args.base_branch,
2423
+ analyze_arg_spec=args.arg_spec,
2386
2424
  git_cache=git_cache, reporter=reporter, routing=routing,
2387
2425
  plugin_type=args.plugin_type) as mv2:
2388
2426
  mv2.validate()
@@ -2398,78 +2436,6 @@ def run():
2398
2436
  sys.exit(reporter.json(warnings=args.warnings, output=args.output))
2399
2437
 
2400
2438
 
2401
- class GitCache:
2402
- def __init__(self, base_branch, plugin_type):
2403
- self.base_branch = base_branch
2404
- self.plugin_type = plugin_type
2405
-
2406
- self.rel_path = 'lib/ansible/modules/'
2407
- if plugin_type != 'module':
2408
- self.rel_path = 'lib/ansible/plugins/%s/' % plugin_type
2409
-
2410
- if self.base_branch:
2411
- self.base_tree = self._git(['ls-tree', '-r', '--name-only', self.base_branch, self.rel_path])
2412
- else:
2413
- self.base_tree = []
2414
-
2415
- try:
2416
- self.head_tree = self._git(['ls-tree', '-r', '--name-only', 'HEAD', self.rel_path])
2417
- except GitError as ex:
2418
- if ex.status == 128:
2419
- # fallback when there is no .git directory
2420
- self.head_tree = self._get_module_files()
2421
- else:
2422
- raise
2423
- except OSError as ex:
2424
- if ex.errno == errno.ENOENT:
2425
- # fallback when git is not installed
2426
- self.head_tree = self._get_module_files()
2427
- else:
2428
- raise
2429
-
2430
- allowed_exts = ('.py', '.ps1')
2431
- if plugin_type != 'module':
2432
- allowed_exts = ('.py', )
2433
- self.base_module_paths = dict((os.path.basename(p), p) for p in self.base_tree if os.path.splitext(p)[1] in allowed_exts)
2434
-
2435
- self.base_module_paths.pop('__init__.py', None)
2436
-
2437
- self.head_aliased_modules = set()
2438
-
2439
- for path in self.head_tree:
2440
- filename = os.path.basename(path)
2441
-
2442
- if filename.startswith('_') and filename != '__init__.py':
2443
- if os.path.islink(path):
2444
- self.head_aliased_modules.add(os.path.basename(os.path.realpath(path)))
2445
-
2446
- def _get_module_files(self):
2447
- module_files = []
2448
-
2449
- for (dir_path, dir_names, file_names) in os.walk(self.rel_path):
2450
- for file_name in file_names:
2451
- module_files.append(os.path.join(dir_path, file_name))
2452
-
2453
- return module_files
2454
-
2455
- @staticmethod
2456
- def _git(args):
2457
- cmd = ['git'] + args
2458
- p = subprocess.run(cmd, stdin=subprocess.DEVNULL, capture_output=True, text=True, check=False)
2459
-
2460
- if p.returncode != 0:
2461
- raise GitError(p.stderr, p.returncode)
2462
-
2463
- return p.stdout.splitlines()
2464
-
2465
-
2466
- class GitError(Exception):
2467
- def __init__(self, message, status):
2468
- super(GitError, self).__init__(message)
2469
-
2470
- self.status = status
2471
-
2472
-
2473
2439
  def main():
2474
2440
  try:
2475
2441
  run()