pyinfra 3.0b1__py2.py3-none-any.whl → 3.0b2__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (104) hide show
  1. pyinfra/api/arguments.py +9 -3
  2. pyinfra/api/arguments_typed.py +8 -5
  3. pyinfra/api/command.py +5 -3
  4. pyinfra/api/config.py +115 -13
  5. pyinfra/api/connectors.py +5 -2
  6. pyinfra/api/facts.py +33 -32
  7. pyinfra/api/host.py +5 -5
  8. pyinfra/api/inventory.py +4 -0
  9. pyinfra/api/operation.py +22 -14
  10. pyinfra/api/util.py +24 -16
  11. pyinfra/connectors/base.py +3 -6
  12. pyinfra/connectors/docker.py +2 -9
  13. pyinfra/connectors/local.py +2 -2
  14. pyinfra/connectors/ssh.py +2 -2
  15. pyinfra/connectors/util.py +6 -7
  16. pyinfra/connectors/vagrant.py +5 -5
  17. pyinfra/context.py +1 -0
  18. pyinfra/facts/apk.py +2 -0
  19. pyinfra/facts/apt.py +2 -0
  20. pyinfra/facts/brew.py +2 -0
  21. pyinfra/facts/bsdinit.py +2 -0
  22. pyinfra/facts/cargo.py +2 -0
  23. pyinfra/facts/choco.py +2 -0
  24. pyinfra/facts/deb.py +7 -2
  25. pyinfra/facts/dnf.py +2 -0
  26. pyinfra/facts/docker.py +2 -0
  27. pyinfra/facts/files.py +2 -0
  28. pyinfra/facts/gem.py +2 -0
  29. pyinfra/facts/gpg.py +2 -0
  30. pyinfra/facts/hardware.py +30 -22
  31. pyinfra/facts/launchd.py +2 -0
  32. pyinfra/facts/lxd.py +2 -0
  33. pyinfra/facts/mysql.py +12 -6
  34. pyinfra/facts/npm.py +1 -0
  35. pyinfra/facts/openrc.py +2 -0
  36. pyinfra/facts/pacman.py +6 -2
  37. pyinfra/facts/pip.py +2 -0
  38. pyinfra/facts/pkg.py +2 -0
  39. pyinfra/facts/pkgin.py +2 -0
  40. pyinfra/facts/postgres.py +6 -6
  41. pyinfra/facts/postgresql.py +2 -0
  42. pyinfra/facts/rpm.py +12 -9
  43. pyinfra/facts/server.py +10 -13
  44. pyinfra/facts/snap.py +2 -0
  45. pyinfra/facts/systemd.py +2 -0
  46. pyinfra/facts/upstart.py +2 -0
  47. pyinfra/facts/util/packaging.py +3 -2
  48. pyinfra/facts/vzctl.py +2 -0
  49. pyinfra/facts/xbps.py +2 -0
  50. pyinfra/facts/yum.py +2 -0
  51. pyinfra/facts/zypper.py +2 -0
  52. pyinfra/operations/apk.py +3 -1
  53. pyinfra/operations/apt.py +16 -18
  54. pyinfra/operations/brew.py +10 -8
  55. pyinfra/operations/bsdinit.py +5 -3
  56. pyinfra/operations/cargo.py +3 -1
  57. pyinfra/operations/choco.py +3 -1
  58. pyinfra/operations/dnf.py +15 -19
  59. pyinfra/operations/files.py +81 -66
  60. pyinfra/operations/gem.py +3 -1
  61. pyinfra/operations/git.py +18 -16
  62. pyinfra/operations/iptables.py +27 -25
  63. pyinfra/operations/launchd.py +5 -6
  64. pyinfra/operations/lxd.py +7 -4
  65. pyinfra/operations/mysql.py +57 -53
  66. pyinfra/operations/npm.py +8 -1
  67. pyinfra/operations/openrc.py +5 -3
  68. pyinfra/operations/pacman.py +4 -5
  69. pyinfra/operations/pip.py +11 -9
  70. pyinfra/operations/pkg.py +3 -1
  71. pyinfra/operations/pkgin.py +3 -1
  72. pyinfra/operations/postgres.py +39 -37
  73. pyinfra/operations/postgresql.py +2 -0
  74. pyinfra/operations/puppet.py +3 -1
  75. pyinfra/operations/python.py +7 -3
  76. pyinfra/operations/selinux.py +42 -16
  77. pyinfra/operations/server.py +48 -43
  78. pyinfra/operations/snap.py +3 -1
  79. pyinfra/operations/ssh.py +12 -10
  80. pyinfra/operations/systemd.py +8 -6
  81. pyinfra/operations/sysvinit.py +6 -4
  82. pyinfra/operations/upstart.py +5 -3
  83. pyinfra/operations/util/files.py +24 -16
  84. pyinfra/operations/util/packaging.py +53 -37
  85. pyinfra/operations/util/service.py +18 -13
  86. pyinfra/operations/vzctl.py +12 -10
  87. pyinfra/operations/xbps.py +3 -1
  88. pyinfra/operations/yum.py +14 -18
  89. pyinfra/operations/zypper.py +8 -9
  90. pyinfra/version.py +5 -2
  91. {pyinfra-3.0b1.dist-info → pyinfra-3.0b2.dist-info}/METADATA +28 -26
  92. pyinfra-3.0b2.dist-info/RECORD +163 -0
  93. {pyinfra-3.0b1.dist-info → pyinfra-3.0b2.dist-info}/WHEEL +1 -1
  94. pyinfra_cli/exceptions.py +0 -5
  95. pyinfra_cli/inventory.py +38 -19
  96. pyinfra_cli/prints.py +15 -11
  97. pyinfra_cli/util.py +3 -1
  98. tests/test_api/test_api_operations.py +1 -1
  99. tests/test_connectors/test_ssh.py +66 -13
  100. tests/test_connectors/test_vagrant.py +3 -3
  101. pyinfra-3.0b1.dist-info/RECORD +0 -163
  102. {pyinfra-3.0b1.dist-info → pyinfra-3.0b2.dist-info}/LICENSE.md +0 -0
  103. {pyinfra-3.0b1.dist-info → pyinfra-3.0b2.dist-info}/entry_points.txt +0 -0
  104. {pyinfra-3.0b1.dist-info → pyinfra-3.0b2.dist-info}/top_level.txt +0 -0
pyinfra/api/arguments.py CHANGED
@@ -9,6 +9,7 @@ from typing import (
9
9
  List,
10
10
  Mapping,
11
11
  Optional,
12
+ Type,
12
13
  TypeVar,
13
14
  Union,
14
15
  cast,
@@ -170,7 +171,7 @@ class MetaArguments(TypedDict):
170
171
  name: str
171
172
  _ignore_errors: bool
172
173
  _continue_on_error: bool
173
- _if: List[Callable[[], bool]]
174
+ _if: Union[List[Callable[[], bool]], Callable[[], bool], None]
174
175
 
175
176
 
176
177
  meta_argument_meta: dict[str, ArgumentMeta] = {
@@ -191,7 +192,7 @@ meta_argument_meta: dict[str, ArgumentMeta] = {
191
192
  default=lambda _: False,
192
193
  ),
193
194
  "_if": ArgumentMeta(
194
- "Only run this operation if these functions returns True",
195
+ "Only run this operation if these functions return True",
195
196
  default=lambda _: [],
196
197
  ),
197
198
  }
@@ -228,6 +229,11 @@ class AllArguments(ConnectorArguments, MetaArguments, ExecutionArguments):
228
229
  pass
229
230
 
230
231
 
232
+ def all_global_arguments() -> List[tuple[str, Type]]:
233
+ """Return all global arguments and their types."""
234
+ return list(get_type_hints(AllArguments).items())
235
+
236
+
231
237
  all_argument_meta: dict[str, ArgumentMeta] = {
232
238
  **auth_argument_meta,
233
239
  **shell_argument_meta,
@@ -305,7 +311,7 @@ def pop_global_arguments(
305
311
  arguments: dict[str, Any] = {}
306
312
  found_keys: list[str] = []
307
313
 
308
- for key, type_ in get_type_hints(AllArguments).items():
314
+ for key, type_ in all_global_arguments():
309
315
  if keys_to_check and key not in keys_to_check:
310
316
  continue
311
317
 
@@ -32,6 +32,11 @@ class PyinfraOperation(Generic[P], Protocol):
32
32
  def __call__(
33
33
  self,
34
34
  #
35
+ # op args
36
+ # needs to be first
37
+ #
38
+ *args: P.args,
39
+ #
35
40
  # ConnectorArguments
36
41
  #
37
42
  # Auth
@@ -61,7 +66,7 @@ class PyinfraOperation(Generic[P], Protocol):
61
66
  name: Optional[str] = None,
62
67
  _ignore_errors: bool = False,
63
68
  _continue_on_error: bool = False,
64
- _if: Optional[List[Callable[[], bool]]] = None,
69
+ _if: Union[List[Callable[[], bool]], Callable[[], bool], None] = None,
65
70
  #
66
71
  # ExecutionArguments
67
72
  #
@@ -69,9 +74,7 @@ class PyinfraOperation(Generic[P], Protocol):
69
74
  _run_once: bool = False,
70
75
  _serial: bool = False,
71
76
  #
72
- # The op itself
77
+ # op kwargs
73
78
  #
74
- *args: P.args,
75
79
  **kwargs: P.kwargs,
76
- ) -> "OperationMeta":
77
- ...
80
+ ) -> "OperationMeta": ...
pyinfra/api/command.py CHANGED
@@ -1,7 +1,9 @@
1
+ from __future__ import annotations
2
+
1
3
  import shlex
2
4
  from inspect import getfullargspec
3
5
  from string import Formatter
4
- from typing import TYPE_CHECKING, Callable, Union
6
+ from typing import IO, TYPE_CHECKING, Callable, Union
5
7
 
6
8
  import gevent
7
9
  from typing_extensions import Unpack
@@ -143,7 +145,7 @@ class StringCommand(PyinfraCommand):
143
145
  class FileUploadCommand(PyinfraCommand):
144
146
  def __init__(
145
147
  self,
146
- src: str,
148
+ src: str | IO,
147
149
  dest: str,
148
150
  remote_temp_filename=None,
149
151
  **kwargs: Unpack[ConnectorArguments],
@@ -173,7 +175,7 @@ class FileDownloadCommand(PyinfraCommand):
173
175
  def __init__(
174
176
  self,
175
177
  src: str,
176
- dest: str,
178
+ dest: str | IO,
177
179
  remote_temp_filename=None,
178
180
  **kwargs: Unpack[ConnectorArguments],
179
181
  ):
pyinfra/api/config.py CHANGED
@@ -1,8 +1,14 @@
1
+ try:
2
+ import importlib_metadata
3
+ except ImportError:
4
+ import importlib.metadata as importlib_metadata # type: ignore[no-redef]
1
5
  from os import path
2
- from typing import Optional
6
+ from typing import Iterable, Optional, Set
3
7
 
4
- # TODO: move to importlib.resources
5
- from pkg_resources import Requirement, ResolutionError, parse_version, require
8
+ from packaging.markers import Marker
9
+ from packaging.requirements import Requirement
10
+ from packaging.specifiers import SpecifierSet
11
+ from packaging.version import Version
6
12
 
7
13
  from pyinfra import __version__, state
8
14
 
@@ -53,15 +59,113 @@ config_defaults = {key: value for key, value in ConfigDefaults.__dict__.items()
53
59
  def check_pyinfra_version(version: str):
54
60
  if not version:
55
61
  return
56
- running_version = parse_version(__version__)
57
- required_versions = Requirement.parse("pyinfra{0}".format(version))
62
+ running_version = Version(__version__)
63
+ required_versions = SpecifierSet(version)
58
64
 
59
- if running_version not in required_versions: # type: ignore[operator]
65
+ if running_version not in required_versions:
60
66
  raise PyinfraError(
61
67
  f"pyinfra version requirement not met (requires {version}, running {__version__})"
62
68
  )
63
69
 
64
70
 
71
+ def _check_requirements(requirements: Iterable[str]) -> Set[Requirement]:
72
+ """
73
+ Check whether each of the given requirements and all their dependencies are
74
+ installed.
75
+
76
+ Or more precisely, this checks that each of the given *requirements* is
77
+ satisfied by some installed *distribution package*, and so on recursively
78
+ for each of the dependencies of those distribution packages. The terminology
79
+ here is as follows:
80
+
81
+ * A *distribution package* is essentially a thing that can be installed with
82
+ ``pip``, from an sdist or wheel or Git repo or so on.
83
+ * A *requirement* is the expectation that a distribution package satisfying
84
+ some constraint is installed.
85
+ * A *dependency* is a requirement specified by a distribution package (as
86
+ opposed to the requirements passed in to this function).
87
+
88
+ So what this function does is start from the given requirements, for each
89
+ one check that it is satisfied by some installed distribution package, and
90
+ if so recursively perform the same check on all the dependencies of that
91
+ distribution package. In short, it's traversing the graph of package
92
+ requirements. It stops whenever it finds a requirement that is not satisfied
93
+ (i.e. a required package that is not installed), or when it runs out of
94
+ requirements to check.
95
+
96
+ .. note::
97
+ This is basically equivalent to ``pkg_resources.require()`` except that
98
+ when ``require()`` succeeds, it will return the list of distribution
99
+ packages that satisfy the given requirements and their dependencies, and
100
+ when it fails, it will raise an exception. This function just returns
101
+ the requirements which were not satisfied instead.
102
+
103
+ :param requirements: The requirements to check for in the set of installed
104
+ packages (along with their dependencies).
105
+ :return: The set of requirements that were not satisfied, which will be
106
+ an empty set if all requirements (recursively) were satisfied.
107
+ """
108
+
109
+ # Based on pkg_resources.require() from setuptools. The implementation of
110
+ # hbutils.system.check_reqs() from the hbutils package was also helpful in
111
+ # clarifying what this is supposed to do.
112
+
113
+ reqs_to_check: Set[Requirement] = set(Requirement(r) for r in requirements)
114
+ reqs_satisfied: Set[Requirement] = set()
115
+ reqs_not_satisfied: Set[Requirement] = set()
116
+
117
+ while reqs_to_check:
118
+ req = reqs_to_check.pop()
119
+ assert req not in reqs_satisfied and req not in reqs_not_satisfied
120
+
121
+ # Check for an installed distribution package with the right name and version
122
+ try:
123
+ dist = importlib_metadata.distribution(req.name)
124
+ except importlib_metadata.PackageNotFoundError:
125
+ # No installed package with the right name
126
+ # This would raise a DistributionNotFound error from pkg_resources.require()
127
+ reqs_not_satisfied.add(req)
128
+ continue
129
+
130
+ if dist.version not in req.specifier:
131
+ # There is a distribution with the right name but wrong version
132
+ # This would raise a VersionConflict error from pkg_resources.require()
133
+ reqs_not_satisfied.add(req)
134
+ continue
135
+
136
+ reqs_satisfied.add(req)
137
+
138
+ # If the distribution package has dependencies of its own, go through
139
+ # those dependencies and for each one add it to the set to be checked if
140
+ # - it's unconditional (no marker)
141
+ # - or it's conditional and the condition is satisfied (the marker
142
+ # evaluates to true) in the current environment
143
+ # Markers can check things like the Python version and system version
144
+ # etc., and/or they can check which extras of the distribution package
145
+ # were required. To facilitate checking extras we have to pass the extra
146
+ # in the environment when calling Marker.evaluate().
147
+ if dist.requires:
148
+ if req.extras:
149
+ extras_envs = [{"extra": extra} for extra in req.extras]
150
+
151
+ def evaluate_marker(marker: Marker) -> bool:
152
+ return any(map(marker.evaluate, extras_envs))
153
+
154
+ else:
155
+
156
+ def evaluate_marker(marker: Marker) -> bool:
157
+ return marker.evaluate()
158
+
159
+ for dist_req_str in dist.requires:
160
+ dist_req = Requirement(dist_req_str)
161
+ if dist_req in reqs_satisfied or dist_req in reqs_not_satisfied:
162
+ continue
163
+ if (not dist_req.marker) or evaluate_marker(dist_req.marker):
164
+ reqs_to_check.add(dist_req)
165
+
166
+ return reqs_not_satisfied
167
+
168
+
65
169
  def check_require_packages(requirements_config):
66
170
  if not requirements_config:
67
171
  return
@@ -72,14 +176,12 @@ def check_require_packages(requirements_config):
72
176
  with open(path.join(state.cwd or "", requirements_config), encoding="utf-8") as f:
73
177
  requirements = [line.split("#egg=")[-1] for line in f.read().splitlines()]
74
178
 
75
- try:
76
- require(requirements)
77
- except ResolutionError as e:
179
+ requirements_not_met = _check_requirements(requirements)
180
+ if requirements_not_met:
78
181
  raise PyinfraError(
79
- "Deploy requirements ({0}) not met: {1}".format(
80
- requirements_config,
81
- e,
82
- ),
182
+ "Deploy requirements ({0}) not met: missing {1}".format(
183
+ requirements_config, ", ".join(str(r) for r in requirements_not_met)
184
+ )
83
185
  )
84
186
 
85
187
 
pyinfra/api/connectors.py CHANGED
@@ -1,4 +1,7 @@
1
- import pkg_resources
1
+ try:
2
+ from importlib_metadata import entry_points
3
+ except ImportError:
4
+ from importlib.metadata import entry_points # type: ignore[assignment]
2
5
 
3
6
 
4
7
  def _load_connector(entrypoint):
@@ -8,7 +11,7 @@ def _load_connector(entrypoint):
8
11
  def get_all_connectors():
9
12
  return {
10
13
  entrypoint.name: _load_connector(entrypoint)
11
- for entrypoint in pkg_resources.iter_entry_points("pyinfra.connectors")
14
+ for entrypoint in entry_points(group="pyinfra.connectors")
12
15
  }
13
16
 
14
17
 
pyinfra/api/facts.py CHANGED
@@ -10,6 +10,7 @@ other host B while I operate on this host A).
10
10
 
11
11
  from __future__ import annotations
12
12
 
13
+ import inspect
13
14
  import re
14
15
  from inspect import getcallargs
15
16
  from socket import error as socket_error, timeout as timeout_error
@@ -32,7 +33,7 @@ from paramiko import SSHException
32
33
 
33
34
  from pyinfra import logger
34
35
  from pyinfra.api import StringCommand
35
- from pyinfra.api.arguments import pop_global_arguments
36
+ from pyinfra.api.arguments import all_global_arguments, pop_global_arguments
36
37
  from pyinfra.api.util import (
37
38
  get_kwargs_str,
38
39
  log_error_or_warning,
@@ -76,6 +77,17 @@ class FactBase(Generic[T]):
76
77
  module_name = cls.__module__.replace("pyinfra.facts.", "")
77
78
  cls.name = f"{module_name}.{cls.__name__}"
78
79
 
80
+ # Check that fact's `command` method does not inadvertently take a global
81
+ # argument, most commonly `name`.
82
+ if hasattr(cls, "command") and callable(cls.command):
83
+ command_args = set(inspect.signature(cls.command).parameters.keys())
84
+ global_args = set([name for name, _ in all_global_arguments()])
85
+ command_global_args = command_args & global_args
86
+
87
+ if len(command_global_args) > 0:
88
+ names = ", ".join(command_global_args)
89
+ raise TypeError(f"{cls.name}'s arguments {names} are reserved for global arguments")
90
+
79
91
  @staticmethod
80
92
  def default() -> T:
81
93
  """
@@ -146,34 +158,25 @@ def _handle_fact_kwargs(state, host, cls, args, kwargs):
146
158
  args = args or []
147
159
  kwargs = kwargs or {}
148
160
 
149
- # TODO: this is here to avoid popping stuff accidentally, this is horrible! Change the
150
- # pop function to return the clean kwargs to avoid the indirect mutation.
151
- kwargs = kwargs.copy()
161
+ # Start with a (shallow) copy of current operation kwargs if any
162
+ ctx_kwargs = (host.current_op_global_arguments or {}).copy()
163
+ # Update with the input kwargs (overrides)
164
+ ctx_kwargs.update(kwargs)
152
165
 
153
- # Get the defaults *and* overrides by popping from kwargs, executor kwargs passed
154
- # into get_fact override everything else (applied below).
155
- override_kwargs, override_kwarg_keys = pop_global_arguments(
156
- kwargs,
166
+ # Pop executor kwargs, pass remaining
167
+ global_kwargs, _ = pop_global_arguments(
168
+ ctx_kwargs,
157
169
  state=state,
158
170
  host=host,
159
- keys_to_check=CONNECTOR_ARGUMENT_KEYS,
160
- )
161
-
162
- executor_kwargs = _get_executor_kwargs(
163
- state,
164
- host,
165
- override_kwargs=override_kwargs, # type: ignore[arg-type]
166
- override_kwarg_keys=override_kwarg_keys,
167
171
  )
168
172
 
169
- fact_kwargs = {}
173
+ fact_kwargs = {key: value for key, value in kwargs.items() if key not in global_kwargs}
170
174
 
171
- if args or kwargs:
172
- assert not isinstance(cls.command, str)
175
+ if args or fact_kwargs:
173
176
  # Merges args & kwargs into a single kwargs dictionary
174
- fact_kwargs = getcallargs(cls().command, *args, **kwargs)
177
+ fact_kwargs = getcallargs(cls().command, *args, **fact_kwargs)
175
178
 
176
- return fact_kwargs, executor_kwargs
179
+ return fact_kwargs, global_kwargs
177
180
 
178
181
 
179
182
  def get_facts(state: "State", *args, **kwargs):
@@ -241,7 +244,7 @@ def _get_fact(
241
244
  fact = cls()
242
245
  name = fact.name
243
246
 
244
- fact_kwargs, executor_kwargs = _handle_fact_kwargs(state, host, cls, args, kwargs)
247
+ fact_kwargs, global_kwargs = _handle_fact_kwargs(state, host, cls, args, kwargs)
245
248
 
246
249
  kwargs_str = get_kwargs_str(fact_kwargs)
247
250
  logger.debug(
@@ -257,15 +260,9 @@ def _get_fact(
257
260
  raise_exceptions=True,
258
261
  )
259
262
 
260
- ignore_errors = (
261
- host.current_op_global_arguments["_ignore_errors"]
262
- if host.in_op and host.current_op_global_arguments
263
- else state.config.IGNORE_ERRORS
264
- )
265
-
266
263
  # Facts can override the shell (winrm powershell vs cmd support)
267
264
  if fact.shell_executable:
268
- executor_kwargs["_shell_executable"] = fact.shell_executable
265
+ global_kwargs["_shell_executable"] = fact.shell_executable
269
266
 
270
267
  command = _make_command(fact.command, fact_kwargs)
271
268
  requires_command = _make_command(fact.requires_command, fact_kwargs)
@@ -284,6 +281,10 @@ def _get_fact(
284
281
  status = False
285
282
  output = CommandOutput([])
286
283
 
284
+ executor_kwargs = {
285
+ key: value for key, value in global_kwargs.items() if key in CONNECTOR_ARGUMENT_KEYS
286
+ }
287
+
287
288
  try:
288
289
  status, output = host.run_shell_command(
289
290
  command,
@@ -295,7 +296,7 @@ def _get_fact(
295
296
  log_host_command_error(
296
297
  host,
297
298
  e,
298
- timeout=executor_kwargs["_timeout"],
299
+ timeout=global_kwargs["_timeout"],
299
300
  )
300
301
 
301
302
  stdout_lines, stderr_lines = output.stdout_lines, output.stderr_lines
@@ -334,12 +335,12 @@ def _get_fact(
334
335
 
335
336
  log_error_or_warning(
336
337
  host,
337
- ignore_errors,
338
+ global_kwargs["_ignore_errors"],
338
339
  description=("could not load fact: {0} {1}").format(name, get_kwargs_str(fact_kwargs)),
339
340
  )
340
341
 
341
342
  # Check we've not failed
342
- if not status and not ignore_errors and apply_failed_hosts:
343
+ if apply_failed_hosts and not status and not global_kwargs["_ignore_errors"]:
343
344
  state.fail_hosts({host})
344
345
 
345
346
  return data
pyinfra/api/host.py CHANGED
@@ -33,7 +33,9 @@ if TYPE_CHECKING:
33
33
  from pyinfra.api.state import State
34
34
 
35
35
 
36
- def extract_callable_datas(datas: list[Union[Callable[..., Any], Any]]) -> Generator[Any, Any, Any]:
36
+ def extract_callable_datas(
37
+ datas: list[Union[Callable[..., Any], Any]],
38
+ ) -> Generator[Any, Any, Any]:
37
39
  for data in datas:
38
40
  # Support for dynamic data, ie @deploy wrapped data defaults where
39
41
  # the data is stored on the state temporarily.
@@ -336,12 +338,10 @@ class Host:
336
338
  T = TypeVar("T")
337
339
 
338
340
  @overload
339
- def get_fact(self, name_or_cls: Type[FactBase[T]], *args, **kwargs) -> T:
340
- ...
341
+ def get_fact(self, name_or_cls: Type[FactBase[T]], *args, **kwargs) -> T: ...
341
342
 
342
343
  @overload
343
- def get_fact(self, name_or_cls: Type[ShortFactBase[T]], *args, **kwargs) -> T:
344
- ...
344
+ def get_fact(self, name_or_cls: Type[ShortFactBase[T]], *args, **kwargs) -> T: ...
345
345
 
346
346
  def get_fact(self, name_or_cls, *args, **kwargs):
347
347
  """
pyinfra/api/inventory.py CHANGED
@@ -37,6 +37,10 @@ class Inventory:
37
37
 
38
38
  state: "State"
39
39
 
40
+ @staticmethod
41
+ def empty():
42
+ return Inventory(([], {}))
43
+
40
44
  def __init__(self, names_data, override_data=None, **groups):
41
45
  # Setup basics
42
46
  self.groups = defaultdict(list) # lists of Host objects
pyinfra/api/operation.py CHANGED
@@ -45,7 +45,7 @@ class OperationMeta:
45
45
 
46
46
  _combined_output: Optional["CommandOutput"] = None
47
47
  _commands: Optional[list[Any]] = None
48
- _maybe_is_change: Optional[bool] = False
48
+ _maybe_is_change: Optional[bool] = None
49
49
  _success: Optional[bool] = None
50
50
 
51
51
  def __init__(self, hash, is_change: Optional[bool]):
@@ -87,6 +87,19 @@ class OperationMeta:
87
87
  if not self.is_complete():
88
88
  raise RuntimeError("Cannot evaluate operation result before execution")
89
89
 
90
+ @property
91
+ def will_change(self) -> bool:
92
+ if self._maybe_is_change is not None:
93
+ return self._maybe_is_change
94
+
95
+ op_data = context.state.get_op_data_for_host(context.host, self._hash)
96
+ cmd_gen = op_data.command_generator
97
+ for _ in cmd_gen():
98
+ self._maybe_is_change = True
99
+ return True
100
+ self._maybe_is_change = False
101
+ return False
102
+
90
103
  def _did_change(self) -> bool:
91
104
  return bool(self._success and len(self._commands or []) > 0)
92
105
 
@@ -98,8 +111,9 @@ class OperationMeta:
98
111
  def did_not_change(self):
99
112
  return context.host.when(lambda: not self._did_change())
100
113
 
101
- def did_succeed(self) -> bool:
102
- self._raise_if_not_complete()
114
+ def did_succeed(self, _raise_if_not_complete=True) -> bool:
115
+ if _raise_if_not_complete:
116
+ self._raise_if_not_complete()
103
117
  return self._success is True
104
118
 
105
119
  def did_error(self) -> bool:
@@ -111,15 +125,7 @@ class OperationMeta:
111
125
  def changed(self) -> bool:
112
126
  if self.is_complete():
113
127
  return self._did_change()
114
-
115
- if self._maybe_is_change is not None:
116
- return self._maybe_is_change
117
-
118
- op_data = context.state.get_op_data_for_host(context.host, self._hash)
119
- cmd_gen = op_data.command_generator
120
- for _ in cmd_gen():
121
- return True
122
- return False
128
+ return self.will_change
123
129
 
124
130
  @property
125
131
  def stdout_lines(self) -> list[str]:
@@ -257,10 +263,12 @@ def _wrap_operation(func: Callable[P, Generator], _set_in_op: bool = True) -> Py
257
263
  # *would* be made based on the *current* remote state.
258
264
 
259
265
  def command_generator() -> Iterator[PyinfraCommand]:
260
- # Check global _if_ argument function and do nothing if returns False
266
+ # Check global _if argument function and do nothing if returns False
261
267
  if state.is_executing:
262
268
  _ifs = global_arguments.get("_if")
263
- if _ifs and not all(_if() for _if in _ifs):
269
+ if isinstance(_ifs, list) and not all(_if() for _if in _ifs):
270
+ return
271
+ elif callable(_ifs) and not _ifs():
264
272
  return
265
273
 
266
274
  host.in_op = _set_in_op
pyinfra/api/util.py CHANGED
@@ -109,16 +109,16 @@ def get_caller_frameinfo(frame_offset: int = 0):
109
109
 
110
110
 
111
111
  def get_operation_order_from_stack(state: "State"):
112
+
112
113
  stack_items = list(reversed(stack()))
113
114
 
115
+ i = 0
114
116
  # Find the *first* occurrence of our deploy file in the reversed stack
115
117
  if state.current_deploy_filename:
116
118
  for i, stack_item in enumerate(stack_items):
117
119
  frame = getframeinfo(stack_item[0])
118
120
  if frame.filename == state.current_deploy_filename:
119
121
  break
120
- else:
121
- i = 0
122
122
 
123
123
  # Now generate a list of line numbers *following that file*
124
124
  line_numbers = []
@@ -139,7 +139,7 @@ def get_operation_order_from_stack(state: "State"):
139
139
  return line_numbers
140
140
 
141
141
 
142
- def get_template(filename_or_io: str):
142
+ def get_template(filename_or_io: str | IO):
143
143
  """
144
144
  Gets a jinja2 ``Template`` object for the input filename or string, with caching
145
145
  based on the filename of the template, or the SHA1 of the input string.
@@ -301,19 +301,27 @@ def make_hash(obj):
301
301
  if isinstance(obj, int)
302
302
  # Constants - the values can change between hosts but we should still
303
303
  # group them under the same operation hash.
304
- else "_PYINFRA_CONSTANT"
305
- if obj in (True, False, None)
306
- # Plain strings
307
- else obj
308
- if isinstance(obj, str)
309
- # Objects with __name__s
310
- else obj.__name__
311
- if hasattr(obj, "__name__")
312
- # Objects with names
313
- else obj.name
314
- if hasattr(obj, "name")
315
- # Repr anything else
316
- else repr(obj)
304
+ else (
305
+ "_PYINFRA_CONSTANT"
306
+ if obj in (True, False, None)
307
+ # Plain strings
308
+ else (
309
+ obj
310
+ if isinstance(obj, str)
311
+ # Objects with __name__s
312
+ else (
313
+ obj.__name__
314
+ if hasattr(obj, "__name__")
315
+ # Objects with names
316
+ else (
317
+ obj.name
318
+ if hasattr(obj, "name")
319
+ # Repr anything else
320
+ else repr(obj)
321
+ )
322
+ )
323
+ )
324
+ )
317
325
  )
318
326
 
319
327
  return sha1_hash(hash_string)
@@ -108,8 +108,7 @@ class BaseConnector(abc.ABC):
108
108
  print_output: bool,
109
109
  print_input: bool,
110
110
  **arguments: Unpack["ConnectorArguments"],
111
- ) -> tuple[bool, "CommandOutput"]:
112
- ...
111
+ ) -> tuple[bool, "CommandOutput"]: ...
113
112
 
114
113
  @abc.abstractmethod
115
114
  def put_file(
@@ -120,8 +119,7 @@ class BaseConnector(abc.ABC):
120
119
  print_output: bool = False,
121
120
  print_input: bool = False,
122
121
  **arguments: Unpack["ConnectorArguments"],
123
- ) -> bool:
124
- ...
122
+ ) -> bool: ...
125
123
 
126
124
  @abc.abstractmethod
127
125
  def get_file(
@@ -132,8 +130,7 @@ class BaseConnector(abc.ABC):
132
130
  print_output: bool = False,
133
131
  print_input: bool = False,
134
132
  **arguments: Unpack["ConnectorArguments"],
135
- ) -> bool:
136
- ...
133
+ ) -> bool: ...
137
134
 
138
135
  def check_can_rsync(self):
139
136
  raise NotImplementedError("This connector does not support rsync")
@@ -265,17 +265,10 @@ class DockerConnector(BaseConnector):
265
265
  )
266
266
 
267
267
  # Load the temporary file and write it to our file or IO object
268
- with open(temp_filename, encoding="utf-8") as temp_f:
268
+ with open(temp_filename, "rb") as temp_f:
269
269
  with get_file_io(filename_or_io, "wb") as file_io:
270
270
  data = temp_f.read()
271
- data_bytes: bytes
272
-
273
- if isinstance(data, str):
274
- data_bytes = data.encode()
275
- else:
276
- data_bytes = data
277
-
278
- file_io.write(data_bytes)
271
+ file_io.write(data)
279
272
  finally:
280
273
  os.close(fd)
281
274
  os.remove(temp_filename)
@@ -1,5 +1,5 @@
1
1
  import os
2
- from distutils.spawn import find_executable
2
+ from shutil import which
3
3
  from tempfile import mkstemp
4
4
  from typing import TYPE_CHECKING, Tuple
5
5
 
@@ -207,7 +207,7 @@ class LocalConnector(BaseConnector):
207
207
  return True
208
208
 
209
209
  def check_can_rsync(self):
210
- if not find_executable("rsync"):
210
+ if not which("rsync"):
211
211
  raise NotImplementedError("The `rsync` binary is not available on this system.")
212
212
 
213
213
  def rsync(