ansible-core 2.19.0rc2__py3-none-any.whl → 2.19.1rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ansible-core might be problematic. Click here for more details.

Files changed (46) hide show
  1. ansible/_internal/_ansiballz/_builder.py +25 -14
  2. ansible/_internal/_templating/_engine.py +6 -4
  3. ansible/_internal/_templating/_jinja_bits.py +3 -1
  4. ansible/_internal/_templating/_jinja_plugins.py +7 -2
  5. ansible/_internal/_templating/_lazy_containers.py +5 -5
  6. ansible/config/base.yml +16 -6
  7. ansible/config/manager.py +7 -3
  8. ansible/executor/task_executor.py +4 -1
  9. ansible/executor/task_queue_manager.py +2 -2
  10. ansible/module_utils/_internal/_ansiballz/_extensions/_debugpy.py +97 -0
  11. ansible/module_utils/_internal/_ansiballz/_extensions/_pydevd.py +2 -4
  12. ansible/module_utils/_internal/_traceback.py +1 -1
  13. ansible/module_utils/ansible_release.py +1 -1
  14. ansible/module_utils/basic.py +10 -2
  15. ansible/module_utils/common/validation.py +4 -1
  16. ansible/modules/dnf.py +36 -50
  17. ansible/modules/dnf5.py +36 -29
  18. ansible/modules/meta.py +2 -1
  19. ansible/modules/service_facts.py +5 -1
  20. ansible/playbook/helpers.py +1 -0
  21. ansible/playbook/taggable.py +1 -2
  22. ansible/plugins/__init__.py +18 -10
  23. ansible/plugins/callback/__init__.py +6 -1
  24. ansible/plugins/lookup/template.py +6 -1
  25. ansible/release.py +1 -1
  26. ansible/utils/encrypt.py +2 -0
  27. {ansible_core-2.19.0rc2.dist-info → ansible_core-2.19.1rc1.dist-info}/METADATA +1 -1
  28. {ansible_core-2.19.0rc2.dist-info → ansible_core-2.19.1rc1.dist-info}/RECORD +46 -45
  29. ansible_test/_internal/commands/integration/coverage.py +2 -2
  30. ansible_test/_internal/commands/shell/__init__.py +67 -28
  31. ansible_test/_internal/coverage_util.py +28 -25
  32. ansible_test/_internal/debugging.py +337 -49
  33. ansible_test/_internal/host_profiles.py +43 -43
  34. ansible_test/_internal/metadata.py +7 -42
  35. ansible_test/_internal/python_requirements.py +2 -2
  36. ansible_test/_util/controller/sanity/pylint/config/ansible-test.cfg +1 -0
  37. ansible_test/_util/target/setup/bootstrap.sh +37 -16
  38. {ansible_core-2.19.0rc2.dist-info → ansible_core-2.19.1rc1.dist-info}/WHEEL +0 -0
  39. {ansible_core-2.19.0rc2.dist-info → ansible_core-2.19.1rc1.dist-info}/entry_points.txt +0 -0
  40. {ansible_core-2.19.0rc2.dist-info → ansible_core-2.19.1rc1.dist-info}/licenses/COPYING +0 -0
  41. {ansible_core-2.19.0rc2.dist-info → ansible_core-2.19.1rc1.dist-info}/licenses/licenses/Apache-License.txt +0 -0
  42. {ansible_core-2.19.0rc2.dist-info → ansible_core-2.19.1rc1.dist-info}/licenses/licenses/BSD-3-Clause.txt +0 -0
  43. {ansible_core-2.19.0rc2.dist-info → ansible_core-2.19.1rc1.dist-info}/licenses/licenses/MIT-license.txt +0 -0
  44. {ansible_core-2.19.0rc2.dist-info → ansible_core-2.19.1rc1.dist-info}/licenses/licenses/PSF-license.txt +0 -0
  45. {ansible_core-2.19.0rc2.dist-info → ansible_core-2.19.1rc1.dist-info}/licenses/licenses/simplified_bsd.txt +0 -0
  46. {ansible_core-2.19.0rc2.dist-info → ansible_core-2.19.1rc1.dist-info}/top_level.txt +0 -0
@@ -6,7 +6,7 @@ import json
6
6
  import typing as t
7
7
 
8
8
  from ansible.module_utils._internal._ansiballz import _extensions
9
- from ansible.module_utils._internal._ansiballz._extensions import _pydevd, _coverage
9
+ from ansible.module_utils._internal._ansiballz._extensions import _debugpy, _pydevd, _coverage
10
10
  from ansible.constants import config
11
11
 
12
12
  _T = t.TypeVar('_T')
@@ -17,15 +17,18 @@ class ExtensionManager:
17
17
 
18
18
  def __init__(
19
19
  self,
20
- debugger: _pydevd.Options | None = None,
20
+ pydevd: _pydevd.Options | None = None,
21
+ debugpy: _debugpy.Options | None = None,
21
22
  coverage: _coverage.Options | None = None,
22
23
  ) -> None:
23
24
  options = dict(
24
- _pydevd=debugger,
25
+ _pydevd=pydevd,
26
+ _debugpy=debugpy,
25
27
  _coverage=coverage,
26
28
  )
27
29
 
28
- self._debugger = debugger
30
+ self._pydevd = pydevd
31
+ self._debugpy = debugpy
29
32
  self._coverage = coverage
30
33
  self._extension_names = tuple(name for name, option in options.items() if option)
31
34
  self._module_names = tuple(f'{_extensions.__name__}.{name}' for name in self._extension_names)
@@ -35,7 +38,7 @@ class ExtensionManager:
35
38
  @property
36
39
  def debugger_enabled(self) -> bool:
37
40
  """Returns True if the debugger extension is enabled, otherwise False."""
38
- return bool(self._debugger)
41
+ return bool(self._pydevd or self._debugpy)
39
42
 
40
43
  @property
41
44
  def extension_names(self) -> tuple[str, ...]:
@@ -51,10 +54,16 @@ class ExtensionManager:
51
54
  """Return the configured extensions and their options."""
52
55
  extension_options: dict[str, t.Any] = {}
53
56
 
54
- if self._debugger:
57
+ if self._debugpy:
58
+ extension_options['_debugpy'] = dataclasses.replace(
59
+ self._debugpy,
60
+ source_mapping=self._get_source_mapping(self._debugpy.source_mapping),
61
+ )
62
+
63
+ if self._pydevd:
55
64
  extension_options['_pydevd'] = dataclasses.replace(
56
- self._debugger,
57
- source_mapping=self._get_source_mapping(),
65
+ self._pydevd,
66
+ source_mapping=self._get_source_mapping(self._pydevd.source_mapping),
58
67
  )
59
68
 
60
69
  if self._coverage:
@@ -64,18 +73,19 @@ class ExtensionManager:
64
73
 
65
74
  return extensions
66
75
 
67
- def _get_source_mapping(self) -> dict[str, str]:
76
+ def _get_source_mapping(self, debugger_mapping: dict[str, str]) -> dict[str, str]:
68
77
  """Get the source mapping, adjusting the source root as needed."""
69
- if self._debugger.source_mapping:
70
- source_mapping = {self._translate_path(key): value for key, value in self.source_mapping.items()}
78
+ if debugger_mapping:
79
+ source_mapping = {self._translate_path(key, debugger_mapping): value for key, value in self.source_mapping.items()}
71
80
  else:
72
81
  source_mapping = self.source_mapping
73
82
 
74
83
  return source_mapping
75
84
 
76
- def _translate_path(self, path: str) -> str:
85
+ @staticmethod
86
+ def _translate_path(path: str, debugger_mapping: dict[str, str]) -> str:
77
87
  """Translate a local path to a foreign path."""
78
- for replace, match in self._debugger.source_mapping.items():
88
+ for replace, match in debugger_mapping.items():
79
89
  if path.startswith(match):
80
90
  return replace + path[len(match) :]
81
91
 
@@ -85,7 +95,8 @@ class ExtensionManager:
85
95
  def create(cls, task_vars: dict[str, object]) -> t.Self:
86
96
  """Create an instance using the provided task vars."""
87
97
  return cls(
88
- debugger=cls._get_options('_ANSIBALLZ_DEBUGGER_CONFIG', _pydevd.Options, task_vars),
98
+ pydevd=cls._get_options('_ANSIBALLZ_PYDEVD_CONFIG', _pydevd.Options, task_vars),
99
+ debugpy=cls._get_options('_ANSIBALLZ_DEBUGPY_CONFIG', _debugpy.Options, task_vars),
89
100
  coverage=cls._get_options('_ANSIBALLZ_COVERAGE_CONFIG', _coverage.Options, task_vars),
90
101
  )
91
102
 
@@ -6,7 +6,6 @@ from __future__ import annotations
6
6
  import copy
7
7
  import dataclasses
8
8
  import enum
9
- import textwrap
10
9
  import typing as t
11
10
  import collections.abc as c
12
11
  import re
@@ -44,7 +43,7 @@ from ._jinja_bits import (
44
43
  _finalize_template_result,
45
44
  FinalizeMode,
46
45
  )
47
- from ._jinja_common import _TemplateConfig, MarkerError, ExceptionMarker
46
+ from ._jinja_common import _TemplateConfig, MarkerError, ExceptionMarker, JinjaCallContext
48
47
  from ._lazy_containers import _AnsibleLazyTemplateMixin
49
48
  from ._marker_behaviors import MarkerBehavior, FAIL_ON_UNDEFINED
50
49
  from ._transform import _type_transform_mapping
@@ -260,6 +259,7 @@ class TemplateEngine:
260
259
  with (
261
260
  TemplateContext(template_value=variable, templar=self, options=options, stop_on_template=stop_on_template) as ctx,
262
261
  DeprecatedAccessAuditContext.when(ctx.is_top_level),
262
+ JinjaCallContext(accept_lazy_markers=True), # let default Jinja marker behavior apply, since we're descending into a new template
263
263
  ):
264
264
  try:
265
265
  if not value_is_str:
@@ -559,9 +559,11 @@ class TemplateEngine:
559
559
 
560
560
  bool_result = bool(result)
561
561
 
562
+ result_origin = Origin.get_tag(result) or Origin.UNKNOWN
563
+
562
564
  msg = (
563
- f'Conditional result was {textwrap.shorten(str(result), width=40)!r} of type {native_type_name(result)!r}, '
564
- f'which evaluates to {bool_result}. Conditionals must have a boolean result.'
565
+ f'Conditional result ({bool_result}) was derived from value of type {native_type_name(result)!r} at {str(result_origin)!r}. '
566
+ 'Conditionals must have a boolean result.'
565
567
  )
566
568
 
567
569
  if _TemplateConfig.allow_broken_conditionals:
@@ -811,7 +811,7 @@ class AnsibleEnvironment(SandboxedEnvironment):
811
811
  try:
812
812
  value = obj[attribute]
813
813
  except (TypeError, LookupError):
814
- return self.undefined(obj=obj, name=attribute) if is_safe else self.unsafe_undefined(obj, attribute)
814
+ value = self.undefined(obj=obj, name=attribute) if is_safe else self.unsafe_undefined(obj, attribute)
815
815
 
816
816
  AnsibleAccessContext.current().access(value)
817
817
 
@@ -891,6 +891,8 @@ def _flatten_nodes(nodes: t.Iterable[t.Any]) -> t.Iterable[t.Any]:
891
891
  else:
892
892
  if type(node) is TemplateModule: # pylint: disable=unidiomatic-typecheck
893
893
  yield from _flatten_nodes(node._body_stream)
894
+ elif node is None:
895
+ continue # avoid yielding `None`-valued nodes to avoid literal "None" in stringified template results
894
896
  else:
895
897
  yield node
896
898
 
@@ -115,7 +115,13 @@ class JinjaPluginIntercept(c.MutableMapping):
115
115
 
116
116
  try:
117
117
  with JinjaCallContext(accept_lazy_markers=instance.accept_lazy_markers):
118
- return instance.j2_function(*lazify_container_args(args), **lazify_container_kwargs(kwargs))
118
+ result = instance.j2_function(*lazify_container_args(args), **lazify_container_kwargs(kwargs))
119
+
120
+ if instance.plugin_type == 'filter':
121
+ # ensure list conversion occurs under the call context
122
+ result = _wrap_plugin_output(result)
123
+
124
+ return result
119
125
  except MarkerError as ex:
120
126
  return ex.source
121
127
  except Exception as ex:
@@ -156,7 +162,6 @@ class JinjaPluginIntercept(c.MutableMapping):
156
162
  @functools.wraps(instance.j2_function)
157
163
  def wrapper(*args, **kwargs) -> t.Any:
158
164
  result = self._invoke_plugin(instance, *args, **kwargs)
159
- result = _wrap_plugin_output(result)
160
165
 
161
166
  return result
162
167
 
@@ -229,8 +229,6 @@ class _AnsibleLazyTemplateDict(_AnsibleTaggedDict, _AnsibleLazyTemplateMixin):
229
229
  __slots__ = _AnsibleLazyTemplateMixin._SLOTS
230
230
 
231
231
  def __init__(self, contents: t.Iterable | _LazyValueSource, /, **kwargs) -> None:
232
- _AnsibleLazyTemplateMixin.__init__(self, contents)
233
-
234
232
  if isinstance(contents, _AnsibleLazyTemplateDict):
235
233
  super().__init__(dict.items(contents), **kwargs)
236
234
  elif isinstance(contents, _LazyValueSource):
@@ -238,6 +236,8 @@ class _AnsibleLazyTemplateDict(_AnsibleTaggedDict, _AnsibleLazyTemplateMixin):
238
236
  else:
239
237
  raise UnsupportedConstructionMethodError()
240
238
 
239
+ _AnsibleLazyTemplateMixin.__init__(self, contents)
240
+
241
241
  def get(self, key: t.Any, default: t.Any = None) -> t.Any:
242
242
  if (value := super().get(key, _NoKeySentinel)) is _NoKeySentinel:
243
243
  return default
@@ -372,8 +372,6 @@ class _AnsibleLazyTemplateList(_AnsibleTaggedList, _AnsibleLazyTemplateMixin):
372
372
  __slots__ = _AnsibleLazyTemplateMixin._SLOTS
373
373
 
374
374
  def __init__(self, contents: t.Iterable | _LazyValueSource, /) -> None:
375
- _AnsibleLazyTemplateMixin.__init__(self, contents)
376
-
377
375
  if isinstance(contents, _AnsibleLazyTemplateList):
378
376
  super().__init__(list.__iter__(contents))
379
377
  elif isinstance(contents, _LazyValueSource):
@@ -381,6 +379,8 @@ class _AnsibleLazyTemplateList(_AnsibleTaggedList, _AnsibleLazyTemplateMixin):
381
379
  else:
382
380
  raise UnsupportedConstructionMethodError()
383
381
 
382
+ _AnsibleLazyTemplateMixin.__init__(self, contents)
383
+
384
384
  def __getitem__(self, key: t.SupportsIndex | slice, /) -> t.Any:
385
385
  if type(key) is slice: # pylint: disable=unidiomatic-typecheck
386
386
  return _AnsibleLazyTemplateList(_LazyValueSource(source=super().__getitem__(key), templar=self._templar, lazy_options=self._lazy_options))
@@ -567,7 +567,7 @@ class _AnsibleLazyAccessTuple(_AnsibleTaggedTuple, _AnsibleLazyTemplateMixin):
567
567
 
568
568
  def __getitem__(self, key: t.SupportsIndex | slice, /) -> t.Any:
569
569
  if type(key) is slice: # pylint: disable=unidiomatic-typecheck
570
- return _AnsibleLazyAccessTuple(super().__getitem__(key))
570
+ return _AnsibleLazyAccessTuple(_LazyValueSource(source=super().__getitem__(key), templar=self._templar, lazy_options=self._lazy_options))
571
571
 
572
572
  value = super().__getitem__(key)
573
573
 
ansible/config/base.yml CHANGED
@@ -11,16 +11,26 @@ _ANSIBALLZ_COVERAGE_CONFIG:
11
11
  vars:
12
12
  - {name: _ansible_ansiballz_coverage_config}
13
13
  version_added: '2.19'
14
- _ANSIBALLZ_DEBUGGER_CONFIG:
15
- name: Configure the AnsiballZ remote debugging extension
14
+ _ANSIBALLZ_DEBUGPY_CONFIG:
15
+ name: Configure the AnsiballZ remote debugging extension for debugpy
16
16
  description:
17
- - Enables and configures the AnsiballZ remote debugging extension.
17
+ - Enables and configures the AnsiballZ remote debugging extension for debugpy.
18
18
  - This is for internal use only.
19
19
  env:
20
- - {name: _ANSIBLE_ANSIBALLZ_DEBUGGER_CONFIG}
20
+ - {name: _ANSIBLE_ANSIBALLZ_DEBUGPY_CONFIG}
21
21
  vars:
22
- - {name: _ansible_ansiballz_debugger_config}
23
- version_added: '2.19'
22
+ - {name: _ansible_ansiballz_debugpy_config}
23
+ version_added: '2.20'
24
+ _ANSIBALLZ_PYDEVD_CONFIG:
25
+ name: Configure the AnsiballZ remote debugging extension for pydevd
26
+ description:
27
+ - Enables and configures the AnsiballZ remote debugging extension for pydevd.
28
+ - This is for internal use only.
29
+ env:
30
+ - {name: _ANSIBLE_ANSIBALLZ_PYDEVD_CONFIG}
31
+ vars:
32
+ - {name: _ansible_ansiballz_pydevd_config}
33
+ version_added: '2.20'
24
34
  _ANSIBLE_CONNECTION_PATH:
25
35
  env:
26
36
  - name: _ANSIBLE_CONNECTION_PATH
ansible/config/manager.py CHANGED
@@ -450,13 +450,17 @@ class ConfigManager:
450
450
  pass
451
451
 
452
452
  def get_plugin_options(self, plugin_type, name, keys=None, variables=None, direct=None):
453
+ options, dummy = self.get_plugin_options_and_origins(plugin_type, name, keys=keys, variables=variables, direct=direct)
454
+ return options
453
455
 
456
+ def get_plugin_options_and_origins(self, plugin_type, name, keys=None, variables=None, direct=None):
454
457
  options = {}
458
+ origins = {}
455
459
  defs = self.get_configuration_definitions(plugin_type=plugin_type, name=name)
456
460
  for option in defs:
457
- options[option] = self.get_config_value(option, plugin_type=plugin_type, plugin_name=name, keys=keys, variables=variables, direct=direct)
458
-
459
- return options
461
+ options[option], origins[option] = self.get_config_value_and_origin(option, plugin_type=plugin_type, plugin_name=name, keys=keys,
462
+ variables=variables, direct=direct)
463
+ return options, origins
460
464
 
461
465
  def get_plugin_vars(self, plugin_type, name):
462
466
 
@@ -712,7 +712,10 @@ class TaskExecutor:
712
712
  condname = 'failed'
713
713
 
714
714
  if self._task.failed_when:
715
- result['failed_when_result'] = result['failed'] = self._task._resolve_conditional(self._task.failed_when, vars_copy)
715
+ is_failed = result['failed_when_result'] = result['failed'] = self._task._resolve_conditional(self._task.failed_when, vars_copy)
716
+
717
+ if not is_failed and (suppressed_exception := result.pop('exception', None)):
718
+ result['failed_when_suppressed_exception'] = suppressed_exception
716
719
 
717
720
  except AnsibleError as e:
718
721
  result['failed'] = True
@@ -179,7 +179,7 @@ class TaskQueueManager:
179
179
  for fd in (STDIN_FILENO, STDOUT_FILENO, STDERR_FILENO):
180
180
  os.set_inheritable(fd, False)
181
181
  except Exception as ex:
182
- self.warning(f"failed to set stdio as non inheritable: {ex}")
182
+ display.error_as_warning("failed to set stdio as non inheritable", exception=ex)
183
183
 
184
184
  self._callback_lock = threading.Lock()
185
185
 
@@ -269,7 +269,7 @@ class TaskQueueManager:
269
269
  display.warning("Skipping callback '%s', as it does not create a valid plugin instance." % callback_name)
270
270
  continue
271
271
  except Exception as ex:
272
- display.warning_as_error(f"Failed to load callback plugin {callback_name!r}.", exception=ex)
272
+ display.error_as_warning(f"Failed to load callback plugin {callback_name!r}.", exception=ex)
273
273
  continue
274
274
 
275
275
  def run(self, play):
@@ -0,0 +1,97 @@
1
+ """
2
+ Remote debugging support for AnsiballZ modules with debugpy.
3
+
4
+ To use with VS Code:
5
+
6
+ 1) Choose an available port for VS Code to listen on (e.g. 5678).
7
+ 2) Ensure `debugpy` is installed for the interpreter(s) which will run the code being debugged.
8
+ 3) Create the following launch.json configuration
9
+
10
+ {
11
+ "version": "0.2.0",
12
+ "configurations": [
13
+ {
14
+ "name": "Python Debug Server",
15
+ "type": "debugpy",
16
+ "request": "attach",
17
+ "listen": {
18
+ "host": "localhost",
19
+ "port": 5678,
20
+ },
21
+ },
22
+ {
23
+ "name": "ansible-playbook main.yml",
24
+ "type": "debugpy",
25
+ "request": "launch",
26
+ "module": "ansible",
27
+ "args": [
28
+ "playbook",
29
+ "main.yml"
30
+ ],
31
+ "env": {
32
+ "_ANSIBLE_ANSIBALLZ_DEBUGPY_CONFIG": "{\"host\": \"localhost\", \"port\": 5678}"
33
+ },
34
+ "console": "integratedTerminal",
35
+ }
36
+ ],
37
+ "compounds": [
38
+ {
39
+ "name": "Test Module Debugging",
40
+ "configurations": [
41
+ "Python Debug Server",
42
+ "ansible-playbook main.yml"
43
+ ],
44
+ "stopAll": true
45
+ }
46
+ ]
47
+ }
48
+
49
+ 4) Set any desired breakpoints.
50
+ 5) Configure the Run and Debug view to use the "Test Module Debugging" compound configuration.
51
+ 6) Press F5 to start debugging.
52
+ """
53
+
54
+ from __future__ import annotations
55
+
56
+ import dataclasses
57
+ import json
58
+ import os
59
+ import pathlib
60
+
61
+ import typing as t
62
+
63
+
64
+ @dataclasses.dataclass(frozen=True)
65
+ class Options:
66
+ """Debugger options for debugpy."""
67
+
68
+ host: str = 'localhost'
69
+ """The host to connect to for remote debugging."""
70
+ port: int = 5678
71
+ """The port to connect to for remote debugging."""
72
+ connect: dict[str, object] = dataclasses.field(default_factory=dict)
73
+ """The options to pass to the `debugpy.connect` method."""
74
+ source_mapping: dict[str, str] = dataclasses.field(default_factory=dict)
75
+ """
76
+ A mapping of source paths to provide to debugpy.
77
+ This setting is used internally by AnsiballZ and is not required unless Ansible CLI commands are run from a different system than your IDE.
78
+ In that scenario, use this setting instead of configuring source mapping in your IDE.
79
+ The key is a path known to the IDE.
80
+ The value is the same path as known to the Ansible CLI.
81
+ Both file paths and directories are supported.
82
+ """
83
+
84
+
85
+ def run(args: dict[str, t.Any]) -> None: # pragma: nocover
86
+ """Enable remote debugging."""
87
+ import debugpy
88
+
89
+ options = Options(**args)
90
+ temp_dir = pathlib.Path(__file__).parent.parent.parent.parent.parent.parent
91
+ path_mapping = [[key, str(temp_dir / value)] for key, value in options.source_mapping.items()]
92
+
93
+ os.environ['PATHS_FROM_ECLIPSE_TO_PYTHON'] = json.dumps(path_mapping)
94
+
95
+ debugpy.connect((options.host, options.port), **options.connect)
96
+
97
+ pass # A convenient place to put a breakpoint
@@ -7,14 +7,12 @@ To use with PyCharm:
7
7
  2) Create a Python Debug Server using that port.
8
8
  3) Start the Python Debug Server.
9
9
  4) Ensure the correct version of `pydevd-pycharm` is installed for the interpreter(s) which will run the code being debugged.
10
- 5) Configure Ansible with the `_ANSIBALLZ_DEBUGGER_CONFIG` option.
10
+ 5) Configure Ansible with the `_ANSIBALLZ_PYDEVD_CONFIG` option.
11
11
  See `Options` below for the structure of the debugger configuration.
12
12
  Example configuration using an environment variable:
13
- export _ANSIBLE_ANSIBALLZ_DEBUGGER_CONFIG='{"module": "pydevd_pycharm", "settrace": {"host": "localhost", "port": 5678, "suspend": false}}'
13
+ export _ANSIBLE_ANSIBALLZ_PYDEVD_CONFIG='{"module": "pydevd_pycharm", "settrace": {"host": "localhost", "port": 5678, "suspend": false}}'
14
14
  6) Set any desired breakpoints.
15
15
  7) Run Ansible commands.
16
-
17
- A similar process should work for other pydevd based debuggers, such as Visual Studio Code, but they have not been tested.
18
16
  """
19
17
 
20
18
  from __future__ import annotations
@@ -80,7 +80,7 @@ def _is_module_traceback_enabled(event: TracebackEvent) -> bool:
80
80
  from ..basic import _PARSED_MODULE_ARGS
81
81
 
82
82
  _module_tracebacks_enabled_events = frozenset(
83
- TracebackEvent[value.upper()] for value in _PARSED_MODULE_ARGS.get('_ansible_tracebacks_for')
83
+ TracebackEvent[value.upper()] for value in _PARSED_MODULE_ARGS.get('_ansible_tracebacks_for', [])
84
84
  ) # type: ignore[union-attr]
85
85
  except BaseException:
86
86
  return True # if things failed early enough that we can't figure this out, assume we want a traceback for troubleshooting
@@ -17,6 +17,6 @@
17
17
 
18
18
  from __future__ import annotations
19
19
 
20
- __version__ = '2.19.0rc2'
20
+ __version__ = '2.19.1rc1'
21
21
  __author__ = 'Ansible, Inc.'
22
22
  __codename__ = "What Is and What Should Never Be"
@@ -1512,11 +1512,19 @@ class AnsibleModule(object):
1512
1512
  # strip no_log collisions
1513
1513
  kwargs = remove_values(kwargs, self.no_log_values)
1514
1514
 
1515
- # return preserved
1515
+ # graft preserved values back on
1516
1516
  kwargs.update(preserved)
1517
1517
 
1518
+ self._record_module_result(kwargs)
1519
+
1520
+ def _record_module_result(self, o: dict[str, t.Any]) -> None:
1521
+ """
1522
+ Temporary internal hook to enable modification/bypass of module result serialization.
1523
+
1524
+ Monkeypatched by ansible.netcommon for direct in-worker module execution.
1525
+ """
1518
1526
  encoder = _json.get_module_encoder(_ANSIBLE_PROFILE, _json.Direction.MODULE_TO_CONTROLLER)
1519
- print('\n%s' % json.dumps(kwargs, cls=encoder))
1527
+ print('\n%s' % json.dumps(o, cls=encoder))
1520
1528
 
1521
1529
  def exit_json(self, **kwargs) -> t.NoReturn:
1522
1530
  """ return from the module, without error """
@@ -376,7 +376,10 @@ def check_type_str(value, allow_conversion=True, param=None, prefix=''):
376
376
  if isinstance(value, string_types):
377
377
  return value
378
378
 
379
- if allow_conversion and value is not None:
379
+ if value is None:
380
+ return '' # approximate pre-2.19 templating None->empty str equivalency here for backward compatibility
381
+
382
+ if allow_conversion:
380
383
  return to_native(value, errors='surrogate_or_strict')
381
384
 
382
385
  msg = "'{0!r}' is not a string and conversion is not allowed".format(value)
ansible/modules/dnf.py CHANGED
@@ -208,6 +208,8 @@ options:
208
208
  packages to install (because dependencies between the downgraded
209
209
  package and others can cause changes to the packages which were
210
210
  in the earlier transaction).
211
+ - Since this feature is not provided by C(dnf) itself but by M(ansible.builtin.dnf) module,
212
+ using this in combination with wildcard characters in O(name) may result in an unexpected results.
211
213
  type: bool
212
214
  default: "no"
213
215
  version_added: "2.7"
@@ -708,72 +710,56 @@ class DnfModule(YumDnf):
708
710
  self.module.exit_json(msg="", results=results)
709
711
 
710
712
  def _is_installed(self, pkg):
711
- installed_query = dnf.subject.Subject(pkg).get_best_query(sack=self.base.sack).installed()
712
- if dnf.util.is_glob_pattern(pkg):
713
- available_query = dnf.subject.Subject(pkg).get_best_query(sack=self.base.sack).available()
714
- return not (
715
- {p.name for p in available_query} - {p.name for p in installed_query}
716
- )
717
- else:
718
- return bool(installed_query)
713
+ return bool(dnf.subject.Subject(pkg).get_best_query(sack=self.base.sack).installed())
719
714
 
720
715
  def _is_newer_version_installed(self, pkg_spec):
716
+ # expects a versioned package spec
721
717
  try:
722
718
  if isinstance(pkg_spec, dnf.package.Package):
723
719
  installed = sorted(self.base.sack.query().installed().filter(name=pkg_spec.name, arch=pkg_spec.arch))[-1]
724
720
  return installed.evr_gt(pkg_spec)
725
721
  else:
726
- available = dnf.subject.Subject(pkg_spec).get_best_query(sack=self.base.sack).available()
727
- installed = self.base.sack.query().installed().filter(name=available[0].name)
728
- for arch in sorted(set(p.arch for p in installed)): # select only from already-installed arches for this case
729
- installed_pkg = sorted(installed.filter(arch=arch))[-1]
730
- try:
731
- available_pkg = sorted(available.filter(arch=arch))[-1]
732
- except IndexError:
733
- continue # nothing currently available for this arch; keep going
734
- if installed_pkg.evr_gt(available_pkg):
735
- return True
736
- return False
722
+ solution = dnf.subject.Subject(pkg_spec).get_best_solution(self.base.sack)
723
+ q = solution["query"]
724
+ if not q or not solution['nevra'] or solution['nevra'].has_just_name():
725
+ return False
726
+ installed = self.base.sack.query().installed().filter(name=solution['nevra'].name)
727
+ if not installed:
728
+ return False
729
+ return installed[0].evr_gt(q[0])
737
730
  except IndexError:
738
731
  return False
739
732
 
740
733
  def _mark_package_install(self, pkg_spec, upgrade=False):
741
734
  """Mark the package for install."""
742
- is_newer_version_installed = self._is_newer_version_installed(pkg_spec)
743
- is_installed = self._is_installed(pkg_spec)
744
735
  msg = ''
745
736
  try:
746
- if is_newer_version_installed:
737
+ if dnf.util.is_glob_pattern(pkg_spec):
738
+ # Special case for package specs that contain glob characters.
739
+ # For these we skip `is_installed` and `is_newer_version_installed` tests that allow for the
740
+ # allow_downgrade feature and pass the package specs to dnf.
741
+ # Since allow_downgrade is not available in dnf and while it is relatively easy to implement it for
742
+ # package specs that evaluate to a single package, trying to mimic what would the dnf machinery do
743
+ # for glob package specs and then filtering those for allow_downgrade appears to always
744
+ # result in naive/inferior solution.
745
+ # NOTE this has historically never worked even before https://github.com/ansible/ansible/pull/82725
746
+ # where our (buggy) custom code ignored wildcards for the installed checks.
747
+ # TODO reasearch how feasible it is to implement the above
748
+ if upgrade:
749
+ # for upgrade we pass the spec to both upgrade and install, to satisfy both available and installed
750
+ # packages evaluated from the glob spec
751
+ try:
752
+ self.base.upgrade(pkg_spec)
753
+ except dnf.exceptions.PackagesNotInstalledError:
754
+ pass
755
+ self.base.install(pkg_spec, strict=self.base.conf.strict)
756
+ elif self._is_newer_version_installed(pkg_spec):
747
757
  if self.allow_downgrade:
748
- # dnf only does allow_downgrade, we have to handle this ourselves
749
- # because it allows a possibility for non-idempotent transactions
750
- # on a system's package set (pending the yum repo has many old
751
- # NVRs indexed)
752
- if upgrade:
753
- if is_installed: # Case 1
754
- # TODO: Is this case reachable?
755
- #
756
- # _is_installed() demands a name (*not* NVR) or else is always False
757
- # (wildcards are treated literally).
758
- #
759
- # Meanwhile, _is_newer_version_installed() demands something versioned
760
- # or else is always false.
761
- #
762
- # I fail to see how they can both be true at the same time for any
763
- # given pkg_spec. -re
764
- self.base.upgrade(pkg_spec)
765
- else: # Case 2
766
- self.base.install(pkg_spec, strict=self.base.conf.strict)
767
- else: # Case 3
768
- self.base.install(pkg_spec, strict=self.base.conf.strict)
769
- else: # Case 4, Nothing to do, report back
770
- pass
771
- elif is_installed: # A potentially older (or same) version is installed
772
- if upgrade: # Case 5
758
+ self.base.install(pkg_spec, strict=self.base.conf.strict)
759
+ elif self._is_installed(pkg_spec):
760
+ if upgrade:
773
761
  self.base.upgrade(pkg_spec)
774
- else: # Case 6, Nothing to do, report back
775
- pass
776
- else: # Case 7, The package is not installed, simply install it
762
+ else:
777
763
  self.base.install(pkg_spec, strict=self.base.conf.strict)
778
764
  except dnf.exceptions.MarkingError as e:
779
765
  msg = "No package {0} available.".format(pkg_spec)