ansible-core 2.19.0b5__py3-none-any.whl → 2.19.0b6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ansible/_internal/_ansiballz/__init__.py +0 -0
- ansible/_internal/_ansiballz/_builder.py +101 -0
- ansible/_internal/{_ansiballz.py → _ansiballz/_wrapper.py} +11 -11
- ansible/_internal/_templating/_jinja_bits.py +7 -4
- ansible/_internal/_templating/_jinja_plugins.py +5 -2
- ansible/_internal/_templating/_template_vars.py +72 -0
- ansible/_internal/_templating/_transform.py +6 -0
- ansible/_internal/_yaml/_constructor.py +4 -4
- ansible/_internal/_yaml/_dumper.py +26 -18
- ansible/cli/__init__.py +7 -12
- ansible/cli/arguments/option_helpers.py +1 -1
- ansible/cli/console.py +1 -1
- ansible/cli/doc.py +2 -2
- ansible/cli/inventory.py +5 -7
- ansible/config/base.yml +24 -0
- ansible/errors/__init__.py +2 -1
- ansible/executor/module_common.py +67 -39
- ansible/executor/process/worker.py +2 -2
- ansible/galaxy/api.py +1 -4
- ansible/galaxy/collection/__init__.py +1 -6
- ansible/galaxy/collection/concrete_artifact_manager.py +2 -8
- ansible/galaxy/role.py +2 -2
- ansible/module_utils/_internal/__init__.py +7 -4
- ansible/module_utils/_internal/_ansiballz/__init__.py +0 -0
- ansible/module_utils/_internal/_ansiballz/_extensions/__init__.py +0 -0
- ansible/module_utils/_internal/_ansiballz/_extensions/_coverage.py +45 -0
- ansible/module_utils/_internal/_ansiballz/_extensions/_pydevd.py +62 -0
- ansible/module_utils/_internal/{_ansiballz.py → _ansiballz/_loader.py} +10 -38
- ansible/module_utils/_internal/_ansiballz/_respawn.py +32 -0
- ansible/module_utils/_internal/_ansiballz/_respawn_wrapper.py +23 -0
- ansible/module_utils/_internal/_datatag/__init__.py +23 -1
- ansible/module_utils/_internal/_deprecator.py +27 -33
- ansible/module_utils/_internal/_json/_profiles/__init__.py +1 -0
- ansible/module_utils/_internal/_messages.py +26 -2
- ansible/module_utils/_internal/_plugin_info.py +14 -1
- ansible/module_utils/ansible_release.py +1 -1
- ansible/module_utils/basic.py +46 -56
- ansible/module_utils/common/respawn.py +4 -41
- ansible/module_utils/connection.py +8 -11
- ansible/module_utils/facts/hardware/linux.py +1 -1
- ansible/module_utils/facts/sysctl.py +4 -6
- ansible/module_utils/facts/system/caps.py +2 -2
- ansible/module_utils/facts/system/local.py +1 -1
- ansible/module_utils/facts/virtual/linux.py +1 -1
- ansible/module_utils/service.py +1 -1
- ansible/module_utils/urls.py +4 -4
- ansible/modules/apt_repository.py +10 -10
- ansible/modules/assemble.py +2 -2
- ansible/modules/async_wrapper.py +7 -17
- ansible/modules/command.py +3 -3
- ansible/modules/copy.py +4 -4
- ansible/modules/cron.py +1 -1
- ansible/modules/file.py +16 -17
- ansible/modules/find.py +3 -3
- ansible/modules/get_url.py +17 -0
- ansible/modules/git.py +9 -7
- ansible/modules/known_hosts.py +12 -14
- ansible/modules/package.py +6 -0
- ansible/modules/replace.py +2 -2
- ansible/modules/slurp.py +10 -13
- ansible/modules/stat.py +5 -7
- ansible/modules/unarchive.py +6 -6
- ansible/modules/user.py +1 -1
- ansible/modules/wait_for.py +28 -30
- ansible/modules/yum_repository.py +4 -3
- ansible/parsing/dataloader.py +2 -2
- ansible/parsing/vault/__init__.py +6 -10
- ansible/playbook/base.py +7 -2
- ansible/playbook/included_file.py +3 -1
- ansible/playbook/play_context.py +2 -0
- ansible/playbook/taggable.py +19 -5
- ansible/playbook/task.py +2 -0
- ansible/plugins/action/fetch.py +3 -3
- ansible/plugins/action/template.py +8 -2
- ansible/plugins/cache/__init__.py +17 -19
- ansible/plugins/callback/tree.py +5 -5
- ansible/plugins/connection/local.py +4 -4
- ansible/plugins/connection/paramiko_ssh.py +5 -5
- ansible/plugins/connection/ssh.py +8 -6
- ansible/plugins/connection/winrm.py +1 -1
- ansible/plugins/filter/core.py +19 -21
- ansible/plugins/filter/encryption.py +10 -2
- ansible/plugins/list.py +5 -4
- ansible/plugins/lookup/template.py +9 -4
- ansible/plugins/shell/powershell.py +3 -2
- ansible/plugins/shell/sh.py +3 -2
- ansible/plugins/strategy/__init__.py +3 -3
- ansible/plugins/test/core.py +2 -2
- ansible/release.py +1 -1
- ansible/template/__init__.py +9 -53
- ansible/utils/collection_loader/_collection_finder.py +3 -3
- ansible/utils/display.py +23 -12
- ansible/utils/galaxy.py +2 -2
- ansible/utils/hashing.py +6 -7
- ansible/utils/path.py +5 -7
- ansible/utils/py3compat.py +2 -1
- ansible/utils/ssh_functions.py +3 -2
- ansible/vars/plugins.py +3 -3
- {ansible_core-2.19.0b5.dist-info → ansible_core-2.19.0b6.dist-info}/METADATA +1 -1
- {ansible_core-2.19.0b5.dist-info → ansible_core-2.19.0b6.dist-info}/RECORD +117 -108
- ansible_test/_internal/commands/integration/coverage.py +7 -2
- ansible_test/_internal/host_profiles.py +62 -10
- ansible_test/_internal/provisioning.py +10 -4
- ansible_test/_internal/ssh.py +1 -5
- ansible_test/_internal/thread.py +2 -1
- ansible_test/_internal/timeout.py +1 -1
- ansible_test/_internal/util.py +20 -12
- ansible_test/_util/target/setup/requirements.py +3 -9
- {ansible_core-2.19.0b5.dist-info → ansible_core-2.19.0b6.dist-info}/WHEEL +0 -0
- {ansible_core-2.19.0b5.dist-info → ansible_core-2.19.0b6.dist-info}/entry_points.txt +0 -0
- {ansible_core-2.19.0b5.dist-info → ansible_core-2.19.0b6.dist-info}/licenses/COPYING +0 -0
- {ansible_core-2.19.0b5.dist-info → ansible_core-2.19.0b6.dist-info}/licenses/licenses/Apache-License.txt +0 -0
- {ansible_core-2.19.0b5.dist-info → ansible_core-2.19.0b6.dist-info}/licenses/licenses/BSD-3-Clause.txt +0 -0
- {ansible_core-2.19.0b5.dist-info → ansible_core-2.19.0b6.dist-info}/licenses/licenses/MIT-license.txt +0 -0
- {ansible_core-2.19.0b5.dist-info → ansible_core-2.19.0b6.dist-info}/licenses/licenses/PSF-license.txt +0 -0
- {ansible_core-2.19.0b5.dist-info → ansible_core-2.19.0b6.dist-info}/licenses/licenses/simplified_bsd.txt +0 -0
- {ansible_core-2.19.0b5.dist-info → ansible_core-2.19.0b6.dist-info}/top_level.txt +0 -0
@@ -37,6 +37,8 @@ from ast import AST, Import, ImportFrom
|
|
37
37
|
from io import BytesIO
|
38
38
|
|
39
39
|
from ansible._internal import _locking
|
40
|
+
from ansible._internal._ansiballz import _builder
|
41
|
+
from ansible._internal import _ansiballz
|
40
42
|
from ansible._internal._datatag import _utils
|
41
43
|
from ansible.module_utils._internal import _dataclass_validation
|
42
44
|
from ansible.module_utils.common.yaml import yaml_load
|
@@ -54,7 +56,8 @@ from ansible.plugins.loader import module_utils_loader
|
|
54
56
|
from ansible._internal._templating._engine import TemplateOptions, TemplateEngine
|
55
57
|
from ansible.template import Templar
|
56
58
|
from ansible.utils.collection_loader._collection_finder import _get_collection_metadata, _nested_dict_get
|
57
|
-
from ansible.module_utils._internal import _json
|
59
|
+
from ansible.module_utils._internal import _json
|
60
|
+
from ansible.module_utils._internal._ansiballz import _loader
|
58
61
|
from ansible.module_utils import basic as _basic
|
59
62
|
|
60
63
|
if t.TYPE_CHECKING:
|
@@ -117,7 +120,7 @@ def _strip_comments(source: str) -> str:
|
|
117
120
|
|
118
121
|
|
119
122
|
def _read_ansiballz_code() -> str:
|
120
|
-
code = (pathlib.Path(__file__).parent
|
123
|
+
code = (pathlib.Path(_ansiballz.__file__).parent / '_wrapper.py').read_text()
|
121
124
|
|
122
125
|
if not C.DEFAULT_KEEP_REMOTE_FILES:
|
123
126
|
# Keep comments when KEEP_REMOTE_FILES is set. That way users will see
|
@@ -709,7 +712,14 @@ def _get_module_metadata(module: ast.Module) -> ModuleMetadata:
|
|
709
712
|
return metadata
|
710
713
|
|
711
714
|
|
712
|
-
def recursive_finder(
|
715
|
+
def recursive_finder(
|
716
|
+
name: str,
|
717
|
+
module_fqn: str,
|
718
|
+
module_data: str | bytes,
|
719
|
+
zf: zipfile.ZipFile,
|
720
|
+
date_time: datetime.datetime,
|
721
|
+
extension_manager: _builder.ExtensionManager,
|
722
|
+
) -> ModuleMetadata:
|
713
723
|
"""
|
714
724
|
Using ModuleDepFinder, make sure we have all of the module_utils files that
|
715
725
|
the module and its module_utils files needs. (no longer actually recursive)
|
@@ -755,12 +765,14 @@ def recursive_finder(name: str, module_fqn: str, module_data: str | bytes, zf: z
|
|
755
765
|
|
756
766
|
# include module_utils that are always required
|
757
767
|
modules_to_process.extend((
|
758
|
-
_ModuleUtilsProcessEntry.from_module(
|
768
|
+
_ModuleUtilsProcessEntry.from_module(_loader),
|
759
769
|
_ModuleUtilsProcessEntry.from_module(_basic),
|
760
770
|
_ModuleUtilsProcessEntry.from_module_name(_json.get_module_serialization_profile_module_name(profile, True)),
|
761
771
|
_ModuleUtilsProcessEntry.from_module_name(_json.get_module_serialization_profile_module_name(profile, False)),
|
762
772
|
))
|
763
773
|
|
774
|
+
modules_to_process.extend(_ModuleUtilsProcessEntry.from_module_name(name) for name in extension_manager.module_names)
|
775
|
+
|
764
776
|
module_info: ModuleUtilLocatorBase
|
765
777
|
|
766
778
|
# we'll be adding new modules inline as we discover them, so just keep going til we've processed them all
|
@@ -815,12 +827,13 @@ def recursive_finder(name: str, module_fqn: str, module_data: str | bytes, zf: z
|
|
815
827
|
modules_to_process.append(_ModuleUtilsProcessEntry(normalized_name, False, module_info.redirected, is_optional=entry.is_optional))
|
816
828
|
|
817
829
|
for py_module_name in py_module_cache:
|
818
|
-
py_module_file_name = py_module_cache[py_module_name]
|
830
|
+
source_code, py_module_file_name = py_module_cache[py_module_name]
|
831
|
+
|
832
|
+
zf.writestr(_make_zinfo(py_module_file_name, date_time, zf=zf), source_code)
|
833
|
+
|
834
|
+
if extension_manager.debugger_enabled and (origin := Origin.get_tag(source_code)) and origin.path:
|
835
|
+
extension_manager.source_mapping[origin.path] = py_module_file_name
|
819
836
|
|
820
|
-
zf.writestr(
|
821
|
-
_make_zinfo(py_module_file_name, date_time, zf=zf),
|
822
|
-
py_module_cache[py_module_name][0]
|
823
|
-
)
|
824
837
|
mu_file = to_text(py_module_file_name, errors='surrogate_or_strict')
|
825
838
|
display.vvvvv("Including module_utils file %s" % mu_file)
|
826
839
|
|
@@ -879,17 +892,27 @@ def _get_ansible_module_fqn(module_path):
|
|
879
892
|
return remote_module_fqn
|
880
893
|
|
881
894
|
|
882
|
-
def _add_module_to_zip(
|
895
|
+
def _add_module_to_zip(
|
896
|
+
zf: zipfile.ZipFile,
|
897
|
+
date_time: datetime.datetime,
|
898
|
+
remote_module_fqn: str,
|
899
|
+
b_module_data: bytes,
|
900
|
+
module_path: str,
|
901
|
+
extension_manager: _builder.ExtensionManager,
|
902
|
+
) -> None:
|
883
903
|
"""Add a module from ansible or from an ansible collection into the module zip"""
|
884
904
|
module_path_parts = remote_module_fqn.split('.')
|
885
905
|
|
886
906
|
# Write the module
|
887
|
-
|
907
|
+
zip_module_path = '/'.join(module_path_parts) + '.py'
|
888
908
|
zf.writestr(
|
889
|
-
_make_zinfo(
|
909
|
+
_make_zinfo(zip_module_path, date_time, zf=zf),
|
890
910
|
b_module_data
|
891
911
|
)
|
892
912
|
|
913
|
+
if extension_manager.debugger_enabled:
|
914
|
+
extension_manager.source_mapping[module_path] = zip_module_path
|
915
|
+
|
893
916
|
existing_paths: frozenset[str]
|
894
917
|
|
895
918
|
# Write the __init__.py's necessary to get there
|
@@ -932,6 +955,8 @@ class _CachedModule:
|
|
932
955
|
|
933
956
|
zip_data: bytes
|
934
957
|
metadata: ModuleMetadata
|
958
|
+
source_mapping: dict[str, str]
|
959
|
+
"""A mapping of controller absolute source locations to target relative source locations within the AnsiballZ payload."""
|
935
960
|
|
936
961
|
def dump(self, path: str) -> None:
|
937
962
|
temp_path = pathlib.Path(path + '-part')
|
@@ -1029,6 +1054,7 @@ def _find_module_utils(
|
|
1029
1054
|
|
1030
1055
|
if module_substyle == 'python':
|
1031
1056
|
date_time = datetime.datetime.now(datetime.timezone.utc)
|
1057
|
+
|
1032
1058
|
if date_time.year < 1980:
|
1033
1059
|
raise AnsibleError(f'Cannot create zipfile due to pre-1980 configured date: {date_time}')
|
1034
1060
|
|
@@ -1038,19 +1064,19 @@ def _find_module_utils(
|
|
1038
1064
|
display.warning(u'Bad module compression string specified: %s. Using ZIP_STORED (no compression)' % module_compression)
|
1039
1065
|
compression_method = zipfile.ZIP_STORED
|
1040
1066
|
|
1067
|
+
extension_manager = _builder.ExtensionManager.create(task_vars=task_vars)
|
1068
|
+
extension_key = '~'.join(extension_manager.extension_names) if extension_manager.extension_names else 'none'
|
1041
1069
|
lookup_path = os.path.join(C.DEFAULT_LOCAL_TMP, 'ansiballz_cache') # type: ignore[attr-defined]
|
1042
|
-
cached_module_filename = os.path.join(lookup_path,
|
1070
|
+
cached_module_filename = os.path.join(lookup_path, '-'.join((remote_module_fqn, module_compression, extension_key)))
|
1043
1071
|
|
1044
1072
|
os.makedirs(os.path.dirname(cached_module_filename), exist_ok=True)
|
1045
1073
|
|
1046
|
-
|
1047
|
-
module_metadata: ModuleMetadata | None = None
|
1074
|
+
cached_module: _CachedModule | None = None
|
1048
1075
|
|
1049
1076
|
# Optimization -- don't lock if the module has already been cached
|
1050
1077
|
if os.path.exists(cached_module_filename):
|
1051
1078
|
display.debug('ANSIBALLZ: using cached module: %s' % cached_module_filename)
|
1052
1079
|
cached_module = _CachedModule.load(cached_module_filename)
|
1053
|
-
zipdata, module_metadata = cached_module.zip_data, cached_module.metadata
|
1054
1080
|
else:
|
1055
1081
|
display.debug('ANSIBALLZ: Acquiring lock')
|
1056
1082
|
lock_path = f'{cached_module_filename}.lock'
|
@@ -1065,35 +1091,40 @@ def _find_module_utils(
|
|
1065
1091
|
zf = zipfile.ZipFile(zipoutput, mode='w', compression=compression_method)
|
1066
1092
|
|
1067
1093
|
# walk the module imports, looking for module_utils to send- they'll be added to the zipfile
|
1068
|
-
module_metadata = recursive_finder(
|
1094
|
+
module_metadata = recursive_finder(
|
1095
|
+
module_name,
|
1096
|
+
remote_module_fqn,
|
1097
|
+
Origin(path=module_path).tag(b_module_data),
|
1098
|
+
zf,
|
1099
|
+
date_time,
|
1100
|
+
extension_manager,
|
1101
|
+
)
|
1069
1102
|
|
1070
1103
|
display.debug('ANSIBALLZ: Writing module into payload')
|
1071
|
-
_add_module_to_zip(zf, date_time, remote_module_fqn, b_module_data)
|
1104
|
+
_add_module_to_zip(zf, date_time, remote_module_fqn, b_module_data, module_path, extension_manager)
|
1072
1105
|
|
1073
1106
|
zf.close()
|
1074
|
-
|
1107
|
+
zip_data = base64.b64encode(zipoutput.getvalue())
|
1075
1108
|
|
1076
1109
|
# Write the assembled module to a temp file (write to temp
|
1077
1110
|
# so that no one looking for the file reads a partially
|
1078
1111
|
# written file)
|
1079
1112
|
os.makedirs(lookup_path, exist_ok=True)
|
1080
1113
|
display.debug('ANSIBALLZ: Writing module')
|
1081
|
-
cached_module = _CachedModule(zip_data=
|
1114
|
+
cached_module = _CachedModule(zip_data=zip_data, metadata=module_metadata, source_mapping=extension_manager.source_mapping)
|
1082
1115
|
cached_module.dump(cached_module_filename)
|
1083
1116
|
display.debug('ANSIBALLZ: Done creating module')
|
1084
1117
|
|
1085
|
-
if not
|
1118
|
+
if not cached_module:
|
1086
1119
|
display.debug('ANSIBALLZ: Reading module after lock')
|
1087
1120
|
# Another process wrote the file while we were waiting for
|
1088
1121
|
# the write lock. Go ahead and read the data from disk
|
1089
1122
|
# instead of re-creating it.
|
1090
1123
|
try:
|
1091
1124
|
cached_module = _CachedModule.load(cached_module_filename)
|
1092
|
-
except
|
1125
|
+
except OSError as ex:
|
1093
1126
|
raise AnsibleError('A different worker process failed to create module file. '
|
1094
|
-
'Look at traceback for that process for debugging information.')
|
1095
|
-
|
1096
|
-
zipdata, module_metadata = cached_module.zip_data, cached_module.metadata
|
1127
|
+
'Look at traceback for that process for debugging information.') from ex
|
1097
1128
|
|
1098
1129
|
o_interpreter, o_args = _extract_interpreter(b_module_data)
|
1099
1130
|
if o_interpreter is None:
|
@@ -1107,40 +1138,36 @@ def _find_module_utils(
|
|
1107
1138
|
if not isinstance(rlimit_nofile, int):
|
1108
1139
|
rlimit_nofile = int(templar._engine.template(rlimit_nofile, options=TemplateOptions(value_for_omit=0)))
|
1109
1140
|
|
1110
|
-
|
1111
|
-
|
1112
|
-
if coverage_config:
|
1113
|
-
coverage_output = os.environ['_ANSIBLE_COVERAGE_OUTPUT']
|
1114
|
-
else:
|
1115
|
-
coverage_output = None
|
1116
|
-
|
1117
|
-
if not isinstance(module_metadata, ModuleMetadataV1):
|
1141
|
+
if not isinstance(cached_module.metadata, ModuleMetadataV1):
|
1118
1142
|
raise NotImplementedError()
|
1119
1143
|
|
1120
1144
|
params = dict(ANSIBLE_MODULE_ARGS=module_args,)
|
1121
|
-
encoder = get_module_encoder(
|
1145
|
+
encoder = get_module_encoder(cached_module.metadata.serialization_profile, Direction.CONTROLLER_TO_MODULE)
|
1146
|
+
|
1122
1147
|
try:
|
1123
1148
|
encoded_params = json.dumps(params, cls=encoder)
|
1124
1149
|
except TypeError as ex:
|
1125
1150
|
raise AnsibleError(f'Failed to serialize arguments for the {module_name!r} module.') from ex
|
1126
1151
|
|
1152
|
+
extension_manager.source_mapping = cached_module.source_mapping
|
1153
|
+
|
1127
1154
|
code = _get_ansiballz_code(shebang)
|
1128
1155
|
args = dict(
|
1129
|
-
zipdata=to_text(zipdata),
|
1130
1156
|
ansible_module=module_name,
|
1131
1157
|
module_fqn=remote_module_fqn,
|
1132
|
-
|
1133
|
-
profile=module_metadata.serialization_profile,
|
1158
|
+
profile=cached_module.metadata.serialization_profile,
|
1134
1159
|
date_time=date_time,
|
1135
|
-
coverage_config=coverage_config,
|
1136
|
-
coverage_output=coverage_output,
|
1137
1160
|
rlimit_nofile=rlimit_nofile,
|
1161
|
+
params=encoded_params,
|
1162
|
+
extensions=extension_manager.get_extensions(),
|
1163
|
+
zip_data=to_text(cached_module.zip_data),
|
1138
1164
|
)
|
1139
1165
|
|
1140
1166
|
args_string = '\n'.join(f'{key}={value!r},' for key, value in args.items())
|
1141
1167
|
|
1142
1168
|
wrapper = f"""{code}
|
1143
1169
|
|
1170
|
+
|
1144
1171
|
if __name__ == "__main__":
|
1145
1172
|
_ansiballz_main(
|
1146
1173
|
{args_string}
|
@@ -1149,6 +1176,7 @@ if __name__ == "__main__":
|
|
1149
1176
|
|
1150
1177
|
output.write(to_bytes(wrapper))
|
1151
1178
|
|
1179
|
+
module_metadata = cached_module.metadata
|
1152
1180
|
b_module_data = output.getvalue()
|
1153
1181
|
|
1154
1182
|
elif module_substyle == 'powershell':
|
@@ -138,8 +138,8 @@ class WorkerProcess(multiprocessing_context.Process): # type: ignore[name-defin
|
|
138
138
|
try:
|
139
139
|
display.debug(u"WORKER HARD EXIT: %s" % to_text(e))
|
140
140
|
except BaseException:
|
141
|
-
# If the cause of the fault is
|
142
|
-
# attempting to log a debug message may trigger another
|
141
|
+
# If the cause of the fault is OSError being generated by stdio,
|
142
|
+
# attempting to log a debug message may trigger another OSError.
|
143
143
|
# Try printing once then give up.
|
144
144
|
pass
|
145
145
|
|
ansible/galaxy/api.py
CHANGED
@@ -337,10 +337,7 @@ class GalaxyAPI:
|
|
337
337
|
if not isinstance(other_galaxy_api, self.__class__):
|
338
338
|
return NotImplemented
|
339
339
|
|
340
|
-
return
|
341
|
-
self._priority > other_galaxy_api._priority or
|
342
|
-
self.name < self.name
|
343
|
-
)
|
340
|
+
return self._priority > other_galaxy_api._priority
|
344
341
|
|
345
342
|
@property # type: ignore[misc] # https://github.com/python/mypy/issues/1362
|
346
343
|
@g_connect(['v1', 'v2', 'v3'])
|
@@ -5,7 +5,6 @@
|
|
5
5
|
|
6
6
|
from __future__ import annotations
|
7
7
|
|
8
|
-
import errno
|
9
8
|
import fnmatch
|
10
9
|
import functools
|
11
10
|
import glob
|
@@ -1689,11 +1688,7 @@ def _extract_tar_dir(tar, dirname, b_dest):
|
|
1689
1688
|
b_dir_path = os.path.join(b_dest, to_bytes(dirname, errors='surrogate_or_strict'))
|
1690
1689
|
|
1691
1690
|
b_parent_path = os.path.dirname(b_dir_path)
|
1692
|
-
|
1693
|
-
os.makedirs(b_parent_path, mode=S_IRWXU_RXG_RXO)
|
1694
|
-
except OSError as e:
|
1695
|
-
if e.errno != errno.EEXIST:
|
1696
|
-
raise
|
1691
|
+
os.makedirs(b_parent_path, mode=S_IRWXU_RXG_RXO, exist_ok=True)
|
1697
1692
|
|
1698
1693
|
if tar_member.type == tarfile.SYMTYPE:
|
1699
1694
|
b_link_path = to_bytes(tar_member.linkname, errors='surrogate_or_strict')
|
@@ -656,14 +656,8 @@ def _get_json_from_installed_dir(
|
|
656
656
|
try:
|
657
657
|
with open(b_json_filepath, 'rb') as manifest_fd:
|
658
658
|
b_json_text = manifest_fd.read()
|
659
|
-
except
|
660
|
-
raise LookupError(
|
661
|
-
"The collection {manifest!s} path '{path!s}' does not exist.".
|
662
|
-
format(
|
663
|
-
manifest=filename,
|
664
|
-
path=to_native(b_json_filepath),
|
665
|
-
)
|
666
|
-
)
|
659
|
+
except OSError as ex:
|
660
|
+
raise LookupError(f"The collection {filename!r} path {to_text(b_json_filepath)!r} does not exist.") from ex
|
667
661
|
|
668
662
|
manifest_txt = to_text(b_json_text, errors='surrogate_or_strict')
|
669
663
|
|
ansible/galaxy/role.py
CHANGED
@@ -438,8 +438,8 @@ class GalaxyRole(object):
|
|
438
438
|
if not (self.src and os.path.isfile(self.src)):
|
439
439
|
try:
|
440
440
|
os.unlink(tmp_file)
|
441
|
-
except
|
442
|
-
display.
|
441
|
+
except OSError as ex:
|
442
|
+
display.error_as_warning(f"Unable to remove tmp file {tmp_file!r}.", exception=ex)
|
443
443
|
return True
|
444
444
|
|
445
445
|
return False
|
@@ -4,6 +4,9 @@ import collections.abc as c
|
|
4
4
|
|
5
5
|
import typing as t
|
6
6
|
|
7
|
+
if t.TYPE_CHECKING:
|
8
|
+
from ansible.module_utils.compat.typing import TypeGuard
|
9
|
+
|
7
10
|
|
8
11
|
INTERMEDIATE_MAPPING_TYPES = (c.Mapping,)
|
9
12
|
"""
|
@@ -18,18 +21,18 @@ These will be converted to a simple Python `list` before serialization or storag
|
|
18
21
|
CAUTION: Scalar types which are sequences should be excluded when using this.
|
19
22
|
"""
|
20
23
|
|
21
|
-
|
24
|
+
ITERABLE_SCALARS_NOT_TO_ITERATE = (str, bytes)
|
22
25
|
"""Scalars which are also iterable, and should thus be excluded from iterable checks."""
|
23
26
|
|
24
27
|
|
25
|
-
def is_intermediate_mapping(value: object) ->
|
28
|
+
def is_intermediate_mapping(value: object) -> TypeGuard[c.Mapping]:
|
26
29
|
"""Returns `True` if `value` is a type supported for projection to a Python `dict`, otherwise returns `False`."""
|
27
30
|
return isinstance(value, INTERMEDIATE_MAPPING_TYPES)
|
28
31
|
|
29
32
|
|
30
|
-
def is_intermediate_iterable(value: object) ->
|
33
|
+
def is_intermediate_iterable(value: object) -> TypeGuard[c.Iterable]:
|
31
34
|
"""Returns `True` if `value` is a type supported for projection to a Python `list`, otherwise returns `False`."""
|
32
|
-
return isinstance(value, INTERMEDIATE_ITERABLE_TYPES) and not isinstance(value,
|
35
|
+
return isinstance(value, INTERMEDIATE_ITERABLE_TYPES) and not isinstance(value, ITERABLE_SCALARS_NOT_TO_ITERATE)
|
33
36
|
|
34
37
|
|
35
38
|
is_controller: bool = False
|
File without changes
|
File without changes
|
@@ -0,0 +1,45 @@
|
|
1
|
+
from __future__ import annotations
|
2
|
+
|
3
|
+
import atexit
|
4
|
+
import dataclasses
|
5
|
+
import importlib.util
|
6
|
+
import os
|
7
|
+
import sys
|
8
|
+
|
9
|
+
import typing as t
|
10
|
+
|
11
|
+
|
12
|
+
@dataclasses.dataclass(frozen=True)
|
13
|
+
class Options:
|
14
|
+
"""Code coverage options."""
|
15
|
+
|
16
|
+
config: str
|
17
|
+
output: str | None
|
18
|
+
|
19
|
+
|
20
|
+
def run(args: dict[str, t.Any]) -> None: # pragma: nocover
|
21
|
+
"""Bootstrap `coverage` for the current Ansible module invocation."""
|
22
|
+
options = Options(**args)
|
23
|
+
|
24
|
+
if options.output:
|
25
|
+
# Enable code coverage analysis of the module.
|
26
|
+
# This feature is for internal testing and may change without notice.
|
27
|
+
python_version_string = '.'.join(str(v) for v in sys.version_info[:2])
|
28
|
+
os.environ['COVERAGE_FILE'] = f'{options.output}=python-{python_version_string}=coverage'
|
29
|
+
|
30
|
+
import coverage
|
31
|
+
|
32
|
+
cov = coverage.Coverage(config_file=options.config)
|
33
|
+
|
34
|
+
def atexit_coverage() -> None:
|
35
|
+
cov.stop()
|
36
|
+
cov.save()
|
37
|
+
|
38
|
+
atexit.register(atexit_coverage)
|
39
|
+
|
40
|
+
cov.start()
|
41
|
+
else:
|
42
|
+
# Verify coverage is available without importing it.
|
43
|
+
# This will detect when a module would fail with coverage enabled with minimal overhead.
|
44
|
+
if importlib.util.find_spec('coverage') is None:
|
45
|
+
raise RuntimeError('Could not find the `coverage` Python module.')
|
@@ -0,0 +1,62 @@
|
|
1
|
+
"""
|
2
|
+
Remote debugging support for AnsiballZ modules.
|
3
|
+
|
4
|
+
To use with PyCharm:
|
5
|
+
|
6
|
+
1) Choose an available port for PyCharm to listen on (e.g. 5678).
|
7
|
+
2) Create a Python Debug Server using that port.
|
8
|
+
3) Start the Python Debug Server.
|
9
|
+
4) Ensure the correct version of `pydevd-pycharm` is installed for the interpreter(s) which will run the code being debugged.
|
10
|
+
5) Configure Ansible with the `_ANSIBALLZ_DEBUGGER_CONFIG` option.
|
11
|
+
See `Options` below for the structure of the debugger configuration.
|
12
|
+
Example configuration using an environment variable:
|
13
|
+
export _ANSIBLE_ANSIBALLZ_DEBUGGER_CONFIG='{"module": "pydevd_pycharm", "settrace": {"host": "localhost", "port": 5678, "suspend": false}}'
|
14
|
+
6) Set any desired breakpoints.
|
15
|
+
7) Run Ansible commands.
|
16
|
+
|
17
|
+
A similar process should work for other pydevd based debuggers, such as Visual Studio Code, but they have not been tested.
|
18
|
+
"""
|
19
|
+
|
20
|
+
from __future__ import annotations
|
21
|
+
|
22
|
+
import dataclasses
|
23
|
+
import importlib
|
24
|
+
import json
|
25
|
+
import os
|
26
|
+
import pathlib
|
27
|
+
|
28
|
+
import typing as t
|
29
|
+
|
30
|
+
|
31
|
+
@dataclasses.dataclass(frozen=True)
|
32
|
+
class Options:
|
33
|
+
"""Debugger options for pydevd and its derivatives."""
|
34
|
+
|
35
|
+
module: str = 'pydevd'
|
36
|
+
"""The Python module which will be imported and which provides the `settrace` method."""
|
37
|
+
settrace: dict[str, object] = dataclasses.field(default_factory=dict)
|
38
|
+
"""The options to pass to the `{module}.settrace` method."""
|
39
|
+
source_mapping: dict[str, str] = dataclasses.field(default_factory=dict)
|
40
|
+
"""
|
41
|
+
A mapping of source paths to provide to pydevd.
|
42
|
+
This setting is used internally by AnsiballZ and is not required unless Ansible CLI commands are run from a different system than your IDE.
|
43
|
+
In that scenario, use this setting instead of configuring source mapping in your IDE.
|
44
|
+
The key is a path known to the IDE.
|
45
|
+
The value is the same path as known to the Ansible CLI.
|
46
|
+
Both file paths and directories are supported.
|
47
|
+
"""
|
48
|
+
|
49
|
+
|
50
|
+
def run(args: dict[str, t.Any]) -> None: # pragma: nocover
|
51
|
+
"""Enable remote debugging."""
|
52
|
+
|
53
|
+
options = Options(**args)
|
54
|
+
temp_dir = pathlib.Path(__file__).parent.parent.parent.parent.parent.parent
|
55
|
+
path_mapping = [[key, str(temp_dir / value)] for key, value in options.source_mapping.items()]
|
56
|
+
|
57
|
+
os.environ['PATHS_FROM_ECLIPSE_TO_PYTHON'] = json.dumps(path_mapping)
|
58
|
+
|
59
|
+
debugging_module = importlib.import_module(options.module)
|
60
|
+
debugging_module.settrace(**options.settrace)
|
61
|
+
|
62
|
+
pass # when suspend is True, execution pauses here -- it's also a convenient place to put a breakpoint
|
@@ -5,17 +5,15 @@
|
|
5
5
|
|
6
6
|
from __future__ import annotations
|
7
7
|
|
8
|
-
import
|
9
|
-
import importlib.util
|
8
|
+
import importlib
|
10
9
|
import json
|
11
|
-
import os
|
12
10
|
import runpy
|
13
11
|
import sys
|
14
12
|
import typing as t
|
15
13
|
|
16
|
-
from . import
|
17
|
-
from
|
18
|
-
from
|
14
|
+
from ansible.module_utils import basic
|
15
|
+
from ansible.module_utils._internal import _errors, _traceback, _messages, _ansiballz
|
16
|
+
from ansible.module_utils.common.json import get_module_encoder, Direction
|
19
17
|
|
20
18
|
|
21
19
|
def run_module(
|
@@ -24,13 +22,16 @@ def run_module(
|
|
24
22
|
profile: str,
|
25
23
|
module_fqn: str,
|
26
24
|
modlib_path: str,
|
25
|
+
extensions: dict[str, dict[str, object]],
|
27
26
|
init_globals: dict[str, t.Any] | None = None,
|
28
|
-
coverage_config: str | None = None,
|
29
|
-
coverage_output: str | None = None,
|
30
27
|
) -> None: # pragma: nocover
|
31
28
|
"""Used internally by the AnsiballZ wrapper to run an Ansible module."""
|
32
29
|
try:
|
33
|
-
|
30
|
+
for extension, args in extensions.items():
|
31
|
+
# importing _ansiballz instead of _extensions avoids an unnecessary import when extensions are not in use
|
32
|
+
extension_module = importlib.import_module(f'{_ansiballz.__name__}._extensions.{extension}')
|
33
|
+
extension_module.run(args)
|
34
|
+
|
34
35
|
_run_module(
|
35
36
|
json_params=json_params,
|
36
37
|
profile=profile,
|
@@ -42,35 +43,6 @@ def run_module(
|
|
42
43
|
_handle_exception(ex, profile)
|
43
44
|
|
44
45
|
|
45
|
-
def _enable_coverage(coverage_config: str | None, coverage_output: str | None) -> None: # pragma: nocover
|
46
|
-
"""Bootstrap `coverage` for the current Ansible module invocation."""
|
47
|
-
if not coverage_config:
|
48
|
-
return
|
49
|
-
|
50
|
-
if coverage_output:
|
51
|
-
# Enable code coverage analysis of the module.
|
52
|
-
# This feature is for internal testing and may change without notice.
|
53
|
-
python_version_string = '.'.join(str(v) for v in sys.version_info[:2])
|
54
|
-
os.environ['COVERAGE_FILE'] = f'{coverage_output}=python-{python_version_string}=coverage'
|
55
|
-
|
56
|
-
import coverage
|
57
|
-
|
58
|
-
cov = coverage.Coverage(config_file=coverage_config)
|
59
|
-
|
60
|
-
def atexit_coverage():
|
61
|
-
cov.stop()
|
62
|
-
cov.save()
|
63
|
-
|
64
|
-
atexit.register(atexit_coverage)
|
65
|
-
|
66
|
-
cov.start()
|
67
|
-
else:
|
68
|
-
# Verify coverage is available without importing it.
|
69
|
-
# This will detect when a module would fail with coverage enabled with minimal overhead.
|
70
|
-
if importlib.util.find_spec('coverage') is None:
|
71
|
-
raise RuntimeError('Could not find the `coverage` Python module.')
|
72
|
-
|
73
|
-
|
74
46
|
def _run_module(
|
75
47
|
*,
|
76
48
|
json_params: bytes,
|
@@ -0,0 +1,32 @@
|
|
1
|
+
from __future__ import annotations
|
2
|
+
|
3
|
+
import inspect
|
4
|
+
import sys
|
5
|
+
|
6
|
+
from ... import basic
|
7
|
+
from . import _respawn_wrapper
|
8
|
+
|
9
|
+
|
10
|
+
def create_payload() -> str:
|
11
|
+
"""Create and return an AnsiballZ payload for respawning a module."""
|
12
|
+
main = sys.modules['__main__']
|
13
|
+
code = inspect.getsource(_respawn_wrapper)
|
14
|
+
|
15
|
+
args = dict(
|
16
|
+
module_fqn=main._module_fqn,
|
17
|
+
modlib_path=main._modlib_path,
|
18
|
+
profile=basic._ANSIBLE_PROFILE,
|
19
|
+
json_params=basic._ANSIBLE_ARGS,
|
20
|
+
)
|
21
|
+
|
22
|
+
args_string = '\n'.join(f'{key}={value!r},' for key, value in args.items())
|
23
|
+
|
24
|
+
wrapper = f"""{code}
|
25
|
+
|
26
|
+
if __name__ == "__main__":
|
27
|
+
_respawn_main(
|
28
|
+
{args_string}
|
29
|
+
)
|
30
|
+
"""
|
31
|
+
|
32
|
+
return wrapper
|
@@ -0,0 +1,23 @@
|
|
1
|
+
from __future__ import annotations
|
2
|
+
|
3
|
+
|
4
|
+
def _respawn_main(
|
5
|
+
json_params: bytes,
|
6
|
+
profile: str,
|
7
|
+
module_fqn: str,
|
8
|
+
modlib_path: str,
|
9
|
+
) -> None:
|
10
|
+
import sys
|
11
|
+
|
12
|
+
sys.path.insert(0, modlib_path)
|
13
|
+
|
14
|
+
from ansible.module_utils._internal._ansiballz import _loader
|
15
|
+
|
16
|
+
_loader.run_module(
|
17
|
+
json_params=json_params,
|
18
|
+
profile=profile,
|
19
|
+
module_fqn=module_fqn,
|
20
|
+
modlib_path=modlib_path,
|
21
|
+
extensions={},
|
22
|
+
init_globals=dict(_respawned=True),
|
23
|
+
)
|
@@ -5,6 +5,7 @@ import collections.abc as c
|
|
5
5
|
import copy
|
6
6
|
import dataclasses
|
7
7
|
import datetime
|
8
|
+
import enum
|
8
9
|
import inspect
|
9
10
|
import sys
|
10
11
|
|
@@ -216,7 +217,7 @@ class AnsibleTagHelper:
|
|
216
217
|
return value
|
217
218
|
|
218
219
|
|
219
|
-
class AnsibleSerializable
|
220
|
+
class AnsibleSerializable:
|
220
221
|
__slots__ = _NO_INSTANCE_STORAGE
|
221
222
|
|
222
223
|
_known_type_map: t.ClassVar[t.Dict[str, t.Type['AnsibleSerializable']]] = {}
|
@@ -274,6 +275,27 @@ class AnsibleSerializable(metaclass=abc.ABCMeta):
|
|
274
275
|
return f'{name}({arg_string})'
|
275
276
|
|
276
277
|
|
278
|
+
class AnsibleSerializableEnum(AnsibleSerializable, enum.Enum):
|
279
|
+
"""Base class for serializable enumerations."""
|
280
|
+
|
281
|
+
def _as_dict(self) -> t.Dict[str, t.Any]:
|
282
|
+
return dict(value=self.value)
|
283
|
+
|
284
|
+
@classmethod
|
285
|
+
def _from_dict(cls, d: t.Dict[str, t.Any]) -> t.Self:
|
286
|
+
return cls(d['value'].lower())
|
287
|
+
|
288
|
+
def __str__(self) -> str:
|
289
|
+
return self.value
|
290
|
+
|
291
|
+
def __repr__(self) -> str:
|
292
|
+
return f'<{self.__class__.__name__}.{self.name}>'
|
293
|
+
|
294
|
+
@staticmethod
|
295
|
+
def _generate_next_value_(name, start, count, last_values):
|
296
|
+
return name.lower()
|
297
|
+
|
298
|
+
|
277
299
|
class AnsibleSerializableWrapper(AnsibleSerializable, t.Generic[_T], metaclass=abc.ABCMeta):
|
278
300
|
__slots__ = ('_value',)
|
279
301
|
|