prefect-client 3.3.5.dev3__py3-none-any.whl → 3.3.6.dev1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
prefect/_build_info.py CHANGED
@@ -1,5 +1,5 @@
1
1
  # Generated by versioningit
2
- __version__ = "3.3.5.dev3"
3
- __build_date__ = "2025-04-16 08:08:24.630955+00:00"
4
- __git_commit__ = "d01b30e8ec3cffd839c23ab42edc62983c3cb1a4"
2
+ __version__ = "3.3.6.dev1"
3
+ __build_date__ = "2025-04-19 08:07:12.903915+00:00"
4
+ __git_commit__ = "c9c018d468694ea61ce3a482948bf4d03bb67227"
5
5
  __dirty__ = False
@@ -8,6 +8,7 @@ import multiprocessing.context
8
8
  import os
9
9
  import subprocess
10
10
  import sys
11
+ from pathlib import Path
11
12
  from typing import Any, TypedDict
12
13
 
13
14
  import cloudpickle
@@ -99,7 +100,9 @@ def extract_flow_from_bundle(bundle: SerializedBundle) -> Flow[Any, Any]:
99
100
 
100
101
 
101
102
  def _extract_and_run_flow(
102
- bundle: SerializedBundle, env: dict[str, Any] | None = None
103
+ bundle: SerializedBundle,
104
+ cwd: Path | str | None = None,
105
+ env: dict[str, Any] | None = None,
103
106
  ) -> None:
104
107
  """
105
108
  Extracts a flow from a bundle and runs it.
@@ -108,6 +111,7 @@ def _extract_and_run_flow(
108
111
 
109
112
  Args:
110
113
  bundle: The bundle to extract and run.
114
+ cwd: The working directory to use when running the flow.
111
115
  env: The environment to use when running the flow.
112
116
  """
113
117
 
@@ -120,6 +124,9 @@ def _extract_and_run_flow(
120
124
  context = _deserialize_bundle_object(bundle["context"])
121
125
  flow_run = FlowRun.model_validate(bundle["flow_run"])
122
126
 
127
+ if cwd:
128
+ os.chdir(cwd)
129
+
123
130
  with SettingsContext(
124
131
  profile=settings_context.profile,
125
132
  settings=Settings(),
@@ -138,6 +145,8 @@ def _extract_and_run_flow(
138
145
 
139
146
  def execute_bundle_in_subprocess(
140
147
  bundle: SerializedBundle,
148
+ env: dict[str, Any] | None = None,
149
+ cwd: Path | str | None = None,
141
150
  ) -> multiprocessing.context.SpawnProcess:
142
151
  """
143
152
  Executes a bundle in a subprocess.
@@ -150,6 +159,7 @@ def execute_bundle_in_subprocess(
150
159
  """
151
160
 
152
161
  ctx = multiprocessing.get_context("spawn")
162
+ env = env or {}
153
163
 
154
164
  # Install dependencies if necessary
155
165
  if dependencies := bundle.get("dependencies"):
@@ -164,7 +174,9 @@ def execute_bundle_in_subprocess(
164
174
  kwargs={
165
175
  "bundle": bundle,
166
176
  "env": get_current_settings().to_environment_variables(exclude_unset=True)
167
- | os.environ,
177
+ | os.environ
178
+ | env,
179
+ "cwd": cwd,
168
180
  },
169
181
  )
170
182
 
prefect/cache_policies.py CHANGED
@@ -33,9 +33,9 @@ def _register_stable_transforms() -> None:
33
33
  so that cache keys that utilize them are deterministic across invocations.
34
34
  """
35
35
  try:
36
- import pandas as pd
36
+ import pandas as pd # pyright: ignore
37
37
 
38
- STABLE_TRANSFORMS[pd.DataFrame] = lambda df: [
38
+ STABLE_TRANSFORMS[pd.DataFrame] = lambda df: [ # pyright: ignore
39
39
  df[col] for col in sorted(df.columns)
40
40
  ]
41
41
  except (ImportError, ModuleNotFoundError):
@@ -183,7 +183,7 @@ class CompoundCachePolicy(CachePolicy):
183
183
  Any keys that return `None` will be ignored.
184
184
  """
185
185
 
186
- policies: list[CachePolicy] = field(default_factory=list)
186
+ policies: list[CachePolicy] = field(default_factory=lambda: [])
187
187
 
188
188
  def __post_init__(self) -> None:
189
189
  # flatten any CompoundCachePolicies
@@ -349,7 +349,7 @@ class Inputs(CachePolicy):
349
349
  Policy that computes a cache key based on a hash of the runtime inputs provided to the task..
350
350
  """
351
351
 
352
- exclude: list[str] = field(default_factory=list)
352
+ exclude: list[str] = field(default_factory=lambda: [])
353
353
 
354
354
  def compute_key(
355
355
  self,
@@ -13,7 +13,9 @@ class ConcurrencyContext(ContextModel):
13
13
  # Track the slots that have been acquired but were not able to be released
14
14
  # due to cancellation or some other error. These slots are released when
15
15
  # the context manager exits.
16
- cleanup_slots: list[tuple[list[str], int, float]] = Field(default_factory=list)
16
+ cleanup_slots: list[tuple[list[str], int, float]] = Field(
17
+ default_factory=lambda: []
18
+ )
17
19
 
18
20
  def __exit__(self, *exc_info: Any) -> None:
19
21
  if self.cleanup_slots:
prefect/context.py CHANGED
@@ -18,8 +18,6 @@ from typing import TYPE_CHECKING, Any, Callable, ClassVar, Optional, TypeVar, Un
18
18
  from pydantic import BaseModel, ConfigDict, Field, PrivateAttr
19
19
  from typing_extensions import Self
20
20
 
21
- import prefect.logging
22
- import prefect.logging.configuration
23
21
  import prefect.settings
24
22
  import prefect.types._datetime
25
23
  from prefect._internal.compatibility.migration import getattr_migration
@@ -128,7 +126,7 @@ class ContextModel(BaseModel):
128
126
  def __init__(self, **kwargs: Any) -> None: ...
129
127
 
130
128
  # The context variable for storing data must be defined by the child class
131
- __var__: ClassVar[ContextVar[Self]]
129
+ __var__: ClassVar[ContextVar[Any]]
132
130
  _token: Optional[Token[Self]] = PrivateAttr(None)
133
131
  model_config: ClassVar[ConfigDict] = ConfigDict(
134
132
  arbitrary_types_allowed=True,
prefect/flows.py CHANGED
@@ -40,6 +40,7 @@ from typing import (
40
40
  from uuid import UUID
41
41
 
42
42
  import pydantic
43
+ from exceptiongroup import BaseExceptionGroup, ExceptionGroup
43
44
  from pydantic.v1 import BaseModel as V1BaseModel
44
45
  from pydantic.v1.decorator import ValidatedFunction as V1ValidatedFunction
45
46
  from pydantic.v1.errors import ConfigError # TODO
@@ -105,6 +106,9 @@ from ._internal.pydantic.v2_validated_func import (
105
106
  V2ValidatedFunction as ValidatedFunction,
106
107
  )
107
108
 
109
+ if TYPE_CHECKING:
110
+ from prefect.workers.base import BaseWorker
111
+
108
112
  T = TypeVar("T") # Generic type var for capturing the inner return type of async funcs
109
113
  R = TypeVar("R") # The return type of the user's function
110
114
  P = ParamSpec("P") # The parameters of the flow
@@ -1984,6 +1988,141 @@ class FlowDecorator:
1984
1988
  flow: FlowDecorator = FlowDecorator()
1985
1989
 
1986
1990
 
1991
+ class InfrastructureBoundFlow(Flow[P, R]):
1992
+ """
1993
+ EXPERIMENTAL: This class is experimental and may be removed or changed in future
1994
+ releases.
1995
+
1996
+ A flow that is bound to running on a specific infrastructure.
1997
+
1998
+ Attributes:
1999
+ work_pool: The name of the work pool to run the flow on. The base job
2000
+ configuration of the work pool will determine the configuration of the
2001
+ infrastructure the flow will run on.
2002
+ job_variables: Infrastructure configuration that will override the base job
2003
+ configuration of the work pool.
2004
+ worker_cls: The class of the worker to use to spin up infrastructure and submit
2005
+ the flow to it.
2006
+ """
2007
+
2008
+ def __init__(
2009
+ self,
2010
+ *args: Any,
2011
+ work_pool: str,
2012
+ job_variables: dict[str, Any],
2013
+ worker_cls: type["BaseWorker[Any, Any, Any]"],
2014
+ **kwargs: Any,
2015
+ ):
2016
+ super().__init__(*args, **kwargs)
2017
+ self.work_pool = work_pool
2018
+ self.job_variables = job_variables
2019
+ self.worker_cls = worker_cls
2020
+
2021
+ @overload
2022
+ def __call__(self: "Flow[P, NoReturn]", *args: P.args, **kwargs: P.kwargs) -> None:
2023
+ # `NoReturn` matches if a type can't be inferred for the function which stops a
2024
+ # sync function from matching the `Coroutine` overload
2025
+ ...
2026
+
2027
+ @overload
2028
+ def __call__(
2029
+ self: "Flow[P, Coroutine[Any, Any, T]]",
2030
+ *args: P.args,
2031
+ **kwargs: P.kwargs,
2032
+ ) -> Coroutine[Any, Any, T]: ...
2033
+
2034
+ @overload
2035
+ def __call__(
2036
+ self: "Flow[P, T]",
2037
+ *args: P.args,
2038
+ **kwargs: P.kwargs,
2039
+ ) -> T: ...
2040
+
2041
+ @overload
2042
+ def __call__(
2043
+ self: "Flow[P, Coroutine[Any, Any, T]]",
2044
+ *args: P.args,
2045
+ return_state: Literal[True],
2046
+ **kwargs: P.kwargs,
2047
+ ) -> Awaitable[State[T]]: ...
2048
+
2049
+ @overload
2050
+ def __call__(
2051
+ self: "Flow[P, T]",
2052
+ *args: P.args,
2053
+ return_state: Literal[True],
2054
+ **kwargs: P.kwargs,
2055
+ ) -> State[T]: ...
2056
+
2057
+ def __call__(
2058
+ self,
2059
+ *args: "P.args",
2060
+ return_state: bool = False,
2061
+ wait_for: Optional[Iterable[PrefectFuture[Any]]] = None,
2062
+ **kwargs: "P.kwargs",
2063
+ ):
2064
+ async def modified_call(
2065
+ *args: P.args,
2066
+ return_state: bool = False,
2067
+ # TODO: Handle wait_for once we have an asynchronous way to wait for futures
2068
+ # We should wait locally for futures to resolve before spinning up
2069
+ # infrastructure.
2070
+ wait_for: Optional[Iterable[PrefectFuture[Any]]] = None,
2071
+ **kwargs: P.kwargs,
2072
+ ) -> R | State[R]:
2073
+ try:
2074
+ async with self.worker_cls(work_pool_name=self.work_pool) as worker:
2075
+ parameters = get_call_parameters(self, args, kwargs)
2076
+ future = await worker.submit(
2077
+ flow=self,
2078
+ parameters=parameters,
2079
+ job_variables=self.job_variables,
2080
+ )
2081
+ if return_state:
2082
+ await future.wait_async()
2083
+ return future.state
2084
+ return await future.aresult()
2085
+ except (ExceptionGroup, BaseExceptionGroup) as exc:
2086
+ # For less verbose tracebacks
2087
+ exceptions = exc.exceptions
2088
+ if len(exceptions) == 1:
2089
+ raise exceptions[0] from None
2090
+ else:
2091
+ raise
2092
+
2093
+ if inspect.iscoroutinefunction(self.fn):
2094
+ return modified_call(
2095
+ *args, return_state=return_state, wait_for=wait_for, **kwargs
2096
+ )
2097
+ else:
2098
+ return run_coro_as_sync(
2099
+ modified_call(
2100
+ *args,
2101
+ return_state=return_state,
2102
+ wait_for=wait_for,
2103
+ **kwargs,
2104
+ )
2105
+ )
2106
+
2107
+
2108
+ def bind_flow_to_infrastructure(
2109
+ flow: Flow[P, R],
2110
+ work_pool: str,
2111
+ worker_cls: type["BaseWorker[Any, Any, Any]"],
2112
+ job_variables: dict[str, Any] | None = None,
2113
+ ) -> InfrastructureBoundFlow[P, R]:
2114
+ new = InfrastructureBoundFlow[P, R](
2115
+ flow.fn,
2116
+ work_pool=work_pool,
2117
+ job_variables=job_variables or {},
2118
+ worker_cls=worker_cls,
2119
+ )
2120
+ # Copy all attributes from the original flow
2121
+ for attr, value in flow.__dict__.items():
2122
+ setattr(new, attr, value)
2123
+ return new
2124
+
2125
+
1987
2126
  def _raise_on_name_with_banned_characters(name: Optional[str]) -> Optional[str]:
1988
2127
  """
1989
2128
  Raise an InvalidNameError if the given name contains any invalid
prefect/runner/runner.py CHANGED
@@ -636,7 +636,12 @@ class Runner:
636
636
 
637
637
  return process
638
638
 
639
- async def execute_bundle(self, bundle: SerializedBundle) -> None:
639
+ async def execute_bundle(
640
+ self,
641
+ bundle: SerializedBundle,
642
+ cwd: Path | str | None = None,
643
+ env: dict[str, str | None] | None = None,
644
+ ) -> None:
640
645
  """
641
646
  Executes a bundle in a subprocess.
642
647
  """
@@ -651,7 +656,7 @@ class Runner:
651
656
  if not self._acquire_limit_slot(flow_run.id):
652
657
  return
653
658
 
654
- process = execute_bundle_in_subprocess(bundle)
659
+ process = execute_bundle_in_subprocess(bundle, cwd=cwd, env=env)
655
660
 
656
661
  if process.pid is None:
657
662
  # This shouldn't happen because `execute_bundle_in_subprocess` starts the process
@@ -776,7 +781,7 @@ class Runner:
776
781
  if command is None:
777
782
  runner_command = [get_sys_executable(), "-m", "prefect.engine"]
778
783
  else:
779
- runner_command = shlex.split(command)
784
+ runner_command = shlex.split(command, posix=(os.name != "nt"))
780
785
 
781
786
  flow_run_logger = self._get_flow_run_logger(flow_run)
782
787
 
@@ -0,0 +1,117 @@
1
+ import ast
2
+ import math
3
+ from typing import TYPE_CHECKING, Literal
4
+
5
+ import anyio
6
+ from typing_extensions import TypeAlias
7
+
8
+ from prefect.logging.loggers import get_logger
9
+ from prefect.settings import get_current_settings
10
+ from prefect.utilities.asyncutils import LazySemaphore
11
+ from prefect.utilities.filesystem import get_open_file_limit
12
+
13
+ # Only allow half of the open file limit to be open at once to allow for other
14
+ # actors to open files.
15
+ OPEN_FILE_SEMAPHORE = LazySemaphore(lambda: math.floor(get_open_file_limit() * 0.5))
16
+
17
+ # this potentially could be a TypedDict, but you
18
+ # need some way to convince the type checker that
19
+ # Literal["flow_name", "task_name"] are being provided
20
+ DecoratedFnMetadata: TypeAlias = dict[str, str]
21
+
22
+
23
+ async def find_prefect_decorated_functions_in_file(
24
+ path: anyio.Path, decorator_module: str, decorator_name: Literal["flow", "task"]
25
+ ) -> list[DecoratedFnMetadata]:
26
+ logger = get_logger()
27
+ decorator_name_key = f"{decorator_name}_name"
28
+ decorated_functions: list[DecoratedFnMetadata] = []
29
+
30
+ async with OPEN_FILE_SEMAPHORE:
31
+ try:
32
+ async with await anyio.open_file(path) as f:
33
+ try:
34
+ tree = ast.parse(await f.read())
35
+ except SyntaxError:
36
+ if get_current_settings().debug_mode:
37
+ logger.debug(
38
+ f"Could not parse {path} as a Python file. Skipping."
39
+ )
40
+ return decorated_functions
41
+ except Exception as exc:
42
+ if get_current_settings().debug_mode:
43
+ logger.debug(f"Could not open {path}: {exc}. Skipping.")
44
+ return decorated_functions
45
+
46
+ for node in ast.walk(tree):
47
+ if isinstance(
48
+ node,
49
+ (
50
+ ast.FunctionDef,
51
+ ast.AsyncFunctionDef,
52
+ ),
53
+ ):
54
+ for decorator in node.decorator_list:
55
+ # handles @decorator_name
56
+ is_name_match = (
57
+ isinstance(decorator, ast.Name) and decorator.id == decorator_name
58
+ )
59
+ # handles @decorator_name()
60
+ is_func_name_match = (
61
+ isinstance(decorator, ast.Call)
62
+ and isinstance(decorator.func, ast.Name)
63
+ and decorator.func.id == decorator_name
64
+ )
65
+ # handles @decorator_module.decorator_name
66
+ is_module_attribute_match = (
67
+ isinstance(decorator, ast.Attribute)
68
+ and isinstance(decorator.value, ast.Name)
69
+ and decorator.value.id == decorator_module
70
+ and decorator.attr == decorator_name
71
+ )
72
+ # handles @decorator_module.decorator_name()
73
+ is_module_attribute_func_match = (
74
+ isinstance(decorator, ast.Call)
75
+ and isinstance(decorator.func, ast.Attribute)
76
+ and decorator.func.attr == decorator_name
77
+ and isinstance(decorator.func.value, ast.Name)
78
+ and decorator.func.value.id == decorator_module
79
+ )
80
+ if is_name_match or is_module_attribute_match:
81
+ decorated_functions.append(
82
+ {
83
+ decorator_name_key: node.name,
84
+ "function_name": node.name,
85
+ "filepath": str(path),
86
+ }
87
+ )
88
+ if is_func_name_match or is_module_attribute_func_match:
89
+ name_kwarg_node = None
90
+ if TYPE_CHECKING:
91
+ assert isinstance(decorator, ast.Call)
92
+ for kw in decorator.keywords:
93
+ if kw.arg == "name":
94
+ name_kwarg_node = kw
95
+ break
96
+ if name_kwarg_node is not None and isinstance(
97
+ name_kwarg_node.value, ast.Constant
98
+ ):
99
+ decorated_fn_name = name_kwarg_node.value.value
100
+ else:
101
+ decorated_fn_name = node.name
102
+ decorated_functions.append(
103
+ {
104
+ decorator_name_key: decorated_fn_name,
105
+ "function_name": node.name,
106
+ "filepath": str(path),
107
+ }
108
+ )
109
+ return decorated_functions
110
+
111
+
112
+ async def find_flow_functions_in_file(path: anyio.Path) -> list[DecoratedFnMetadata]:
113
+ return await find_prefect_decorated_functions_in_file(path, "prefect", "flow")
114
+
115
+
116
+ async def find_task_functions_in_file(path: anyio.Path) -> list[DecoratedFnMetadata]:
117
+ return await find_prefect_decorated_functions_in_file(path, "prefect", "task")
prefect/workers/base.py CHANGED
@@ -3,7 +3,11 @@ from __future__ import annotations
3
3
  import abc
4
4
  import asyncio
5
5
  import datetime
6
+ import json
7
+ import subprocess
8
+ import tempfile
6
9
  import threading
10
+ import uuid
7
11
  import warnings
8
12
  from contextlib import AsyncExitStack
9
13
  from functools import partial
@@ -21,6 +25,7 @@ from zoneinfo import ZoneInfo
21
25
  import anyio
22
26
  import anyio.abc
23
27
  import httpx
28
+ from exceptiongroup import BaseExceptionGroup, ExceptionGroup
24
29
  from importlib_metadata import (
25
30
  distributions, # type: ignore[reportUnknownVariableType] incomplete typing
26
31
  )
@@ -35,6 +40,7 @@ from prefect._internal.schemas.validators import return_v_or_none
35
40
  from prefect.client.base import ServerType
36
41
  from prefect.client.orchestration import PrefectClient, get_client
37
42
  from prefect.client.schemas.actions import WorkPoolCreate, WorkPoolUpdate
43
+ from prefect.client.schemas.objects import Flow as APIFlow
38
44
  from prefect.client.schemas.objects import (
39
45
  Integration,
40
46
  StateType,
@@ -48,6 +54,7 @@ from prefect.exceptions import (
48
54
  Abort,
49
55
  ObjectNotFound,
50
56
  )
57
+ from prefect.futures import PrefectFlowRunFuture
51
58
  from prefect.logging.loggers import (
52
59
  PrefectLogAdapter,
53
60
  flow_run_logger,
@@ -81,11 +88,12 @@ from prefect.utilities.templating import (
81
88
  from prefect.utilities.urls import url_for
82
89
 
83
90
  if TYPE_CHECKING:
84
- from prefect.client.schemas.objects import Flow, FlowRun
91
+ from prefect.client.schemas.objects import FlowRun
85
92
  from prefect.client.schemas.responses import (
86
93
  DeploymentResponse,
87
94
  WorkerFlowRunResponse,
88
95
  )
96
+ from prefect.flows import Flow
89
97
 
90
98
 
91
99
  class BaseJobConfiguration(BaseModel):
@@ -217,7 +225,7 @@ class BaseJobConfiguration(BaseModel):
217
225
  self,
218
226
  flow_run: "FlowRun",
219
227
  deployment: "DeploymentResponse | None" = None,
220
- flow: "Flow | None" = None,
228
+ flow: "APIFlow | None" = None,
221
229
  work_pool: "WorkPool | None" = None,
222
230
  worker_name: str | None = None,
223
231
  ) -> None:
@@ -314,7 +322,7 @@ class BaseJobConfiguration(BaseModel):
314
322
  return labels
315
323
 
316
324
  @staticmethod
317
- def _base_flow_labels(flow: "Flow | None") -> dict[str, str]:
325
+ def _base_flow_labels(flow: "APIFlow | None") -> dict[str, str]:
318
326
  if flow is None:
319
327
  return {}
320
328
 
@@ -418,6 +426,7 @@ class BaseWorkerResult(BaseModel, abc.ABC):
418
426
  C = TypeVar("C", bound=BaseJobConfiguration)
419
427
  V = TypeVar("V", bound=BaseVariables)
420
428
  R = TypeVar("R", bound=BaseWorkerResult)
429
+ FR = TypeVar("FR") # used to capture the return type of a flow
421
430
 
422
431
 
423
432
  @register_base_type
@@ -685,6 +694,146 @@ class BaseWorker(abc.ABC, Generic[C, V, R]):
685
694
  "Workers must implement a method for running submitted flow runs"
686
695
  )
687
696
 
697
+ async def submit(
698
+ self,
699
+ flow: "Flow[..., FR]",
700
+ parameters: dict[str, Any] | None = None,
701
+ job_variables: dict[str, Any] | None = None,
702
+ ) -> "PrefectFlowRunFuture[FR]":
703
+ """
704
+ EXPERIMENTAL: The interface for this method is subject to change.
705
+
706
+ Submits a flow to run via the worker.
707
+
708
+ Args:
709
+ flow: The flow to submit
710
+ parameters: The parameters to pass to the flow
711
+
712
+ Returns:
713
+ A flow run object
714
+ """
715
+ warnings.warn(
716
+ "Ad-hoc flow submission via workers is experimental. The interface "
717
+ "and behavior of this feature are subject to change.",
718
+ category=FutureWarning,
719
+ )
720
+ if self._runs_task_group is None:
721
+ raise RuntimeError("Worker not properly initialized")
722
+
723
+ flow_run = await self._runs_task_group.start(
724
+ partial(
725
+ self._submit_adhoc_run,
726
+ flow=flow,
727
+ parameters=parameters,
728
+ job_variables=job_variables,
729
+ ),
730
+ )
731
+ return PrefectFlowRunFuture(flow_run_id=flow_run.id)
732
+
733
+ async def _submit_adhoc_run(
734
+ self,
735
+ flow: "Flow[..., FR]",
736
+ parameters: dict[str, Any] | None = None,
737
+ job_variables: dict[str, Any] | None = None,
738
+ task_status: anyio.abc.TaskStatus["FlowRun"] | None = None,
739
+ ):
740
+ """
741
+ Submits a flow run to the Kubernetes worker.
742
+ """
743
+ from prefect._experimental.bundles import (
744
+ convert_step_to_command,
745
+ create_bundle_for_flow_run,
746
+ )
747
+
748
+ if (
749
+ self.work_pool.storage_configuration.bundle_upload_step is None
750
+ or self.work_pool.storage_configuration.bundle_execution_step is None
751
+ ):
752
+ raise RuntimeError(
753
+ f"Storage is not configured for work pool {self.work_pool.name!r}. "
754
+ "Please configure storage for the work pool by running `prefect "
755
+ "work-pool storage configure`."
756
+ )
757
+
758
+ bundle_key = str(uuid.uuid4())
759
+ upload_command = convert_step_to_command(
760
+ self.work_pool.storage_configuration.bundle_upload_step,
761
+ bundle_key,
762
+ quiet=True,
763
+ )
764
+ execute_command = convert_step_to_command(
765
+ self.work_pool.storage_configuration.bundle_execution_step, bundle_key
766
+ )
767
+
768
+ job_variables = (job_variables or {}) | {"command": " ".join(execute_command)}
769
+ flow_run = await self.client.create_flow_run(
770
+ flow,
771
+ parameters=parameters,
772
+ state=Pending(),
773
+ job_variables=job_variables,
774
+ work_pool_name=self.work_pool.name,
775
+ )
776
+ if task_status is not None:
777
+ # Emit the flow run object to .submit to allow it to return a future as soon as possible
778
+ task_status.started(flow_run)
779
+ # Avoid an API call to get the flow
780
+ api_flow = APIFlow(id=flow_run.flow_id, name=flow.name, labels={})
781
+ logger = self.get_flow_run_logger(flow_run)
782
+
783
+ configuration = await self.job_configuration.from_template_and_values(
784
+ base_job_template=self.work_pool.base_job_template,
785
+ values=job_variables,
786
+ client=self._client,
787
+ )
788
+ configuration.prepare_for_flow_run(
789
+ flow_run=flow_run,
790
+ flow=api_flow,
791
+ work_pool=self.work_pool,
792
+ worker_name=self.name,
793
+ )
794
+
795
+ bundle = create_bundle_for_flow_run(flow=flow, flow_run=flow_run)
796
+
797
+ with tempfile.TemporaryDirectory() as temp_dir:
798
+ await (
799
+ anyio.Path(temp_dir)
800
+ .joinpath(bundle_key)
801
+ .write_bytes(json.dumps(bundle).encode("utf-8"))
802
+ )
803
+
804
+ try:
805
+ full_command = upload_command + [bundle_key]
806
+ logger.debug(
807
+ "Uploading execution bundle with command: %s", full_command
808
+ )
809
+ await anyio.run_process(
810
+ full_command,
811
+ cwd=temp_dir,
812
+ )
813
+ except subprocess.CalledProcessError as e:
814
+ raise RuntimeError(e.stderr.decode("utf-8")) from e
815
+
816
+ logger.debug("Successfully uploaded execution bundle")
817
+
818
+ try:
819
+ result = await self.run(flow_run, configuration)
820
+
821
+ if result.status_code != 0:
822
+ await self._propose_crashed_state(
823
+ flow_run,
824
+ (
825
+ "Flow run infrastructure exited with non-zero status code"
826
+ f" {result.status_code}."
827
+ ),
828
+ )
829
+ except Exception as exc:
830
+ # This flow run was being submitted and did not start successfully
831
+ logger.exception(
832
+ f"Failed to submit flow run '{flow_run.id}' to infrastructure."
833
+ )
834
+ message = f"Flow run could not be submitted to infrastructure:\n{exc!r}"
835
+ await self._propose_crashed_state(flow_run, message, client=self.client)
836
+
688
837
  @classmethod
689
838
  def __dispatch_key__(cls) -> str | None:
690
839
  if cls.__name__ == "BaseWorker":
@@ -1230,11 +1379,13 @@ class BaseWorker(abc.ABC, Generic[C, V, R]):
1230
1379
  exc_info=True,
1231
1380
  )
1232
1381
 
1233
- async def _propose_crashed_state(self, flow_run: "FlowRun", message: str) -> None:
1382
+ async def _propose_crashed_state(
1383
+ self, flow_run: "FlowRun", message: str, client: PrefectClient | None = None
1384
+ ) -> None:
1234
1385
  run_logger = self.get_flow_run_logger(flow_run)
1235
1386
  try:
1236
1387
  state = await propose_state(
1237
- self.client,
1388
+ client or self.client,
1238
1389
  Crashed(message=message),
1239
1390
  flow_run_id=flow_run.id,
1240
1391
  )
@@ -1345,8 +1496,16 @@ class BaseWorker(abc.ABC, Generic[C, V, R]):
1345
1496
  return self
1346
1497
 
1347
1498
  async def __aexit__(self, *exc_info: Any) -> None:
1348
- self._logger.debug("Exiting worker context...")
1349
- await self.teardown(*exc_info)
1499
+ try:
1500
+ self._logger.debug("Exiting worker context...")
1501
+ await self.teardown(*exc_info)
1502
+ except (ExceptionGroup, BaseExceptionGroup) as exc:
1503
+ # For less verbose tracebacks
1504
+ exceptions = exc.exceptions
1505
+ if len(exceptions) == 1:
1506
+ raise exceptions[0] from None
1507
+ else:
1508
+ raise
1350
1509
 
1351
1510
  def __repr__(self) -> str:
1352
1511
  return f"Worker(pool={self._work_pool_name!r}, name={self.name!r})"
@@ -22,15 +22,17 @@ import tempfile
22
22
  import threading
23
23
  from functools import partial
24
24
  from pathlib import Path
25
- from typing import TYPE_CHECKING, Any, Callable, Optional
25
+ from typing import TYPE_CHECKING, Any, Callable, Optional, TypeVar
26
26
 
27
27
  import anyio
28
28
  import anyio.abc
29
29
  from pydantic import Field, field_validator
30
30
 
31
31
  from prefect._internal.schemas.validators import validate_working_dir
32
+ from prefect.client.schemas.objects import Flow as APIFlow
32
33
  from prefect.runner.runner import Runner
33
34
  from prefect.settings import PREFECT_WORKER_QUERY_SECONDS
35
+ from prefect.states import Pending
34
36
  from prefect.utilities.processutils import get_sys_executable
35
37
  from prefect.utilities.services import critical_service_loop
36
38
  from prefect.workers.base import (
@@ -41,8 +43,11 @@ from prefect.workers.base import (
41
43
  )
42
44
 
43
45
  if TYPE_CHECKING:
44
- from prefect.client.schemas.objects import Flow, FlowRun, WorkPool
46
+ from prefect.client.schemas.objects import FlowRun, WorkPool
45
47
  from prefect.client.schemas.responses import DeploymentResponse
48
+ from prefect.flows import Flow
49
+
50
+ FR = TypeVar("FR") # used to capture the return type of a flow
46
51
 
47
52
 
48
53
  class ProcessJobConfiguration(BaseJobConfiguration):
@@ -60,7 +65,7 @@ class ProcessJobConfiguration(BaseJobConfiguration):
60
65
  self,
61
66
  flow_run: "FlowRun",
62
67
  deployment: "DeploymentResponse | None" = None,
63
- flow: "Flow | None" = None,
68
+ flow: "APIFlow | None" = None,
64
69
  work_pool: "WorkPool | None" = None,
65
70
  worker_name: str | None = None,
66
71
  ) -> None:
@@ -231,6 +236,58 @@ class ProcessWorker(
231
236
  status_code=process.returncode, identifier=str(process.pid)
232
237
  )
233
238
 
239
+ async def _submit_adhoc_run(
240
+ self,
241
+ flow: "Flow[..., FR]",
242
+ parameters: dict[str, Any] | None = None,
243
+ job_variables: dict[str, Any] | None = None,
244
+ task_status: anyio.abc.TaskStatus["FlowRun"] | None = None,
245
+ ):
246
+ from prefect._experimental.bundles import (
247
+ create_bundle_for_flow_run,
248
+ )
249
+
250
+ flow_run = await self.client.create_flow_run(
251
+ flow,
252
+ parameters=parameters,
253
+ state=Pending(),
254
+ job_variables=job_variables,
255
+ work_pool_name=self.work_pool.name,
256
+ )
257
+ if task_status is not None:
258
+ # Emit the flow run object to .submit to allow it to return a future as soon as possible
259
+ task_status.started(flow_run)
260
+
261
+ api_flow = APIFlow(id=flow_run.flow_id, name=flow.name, labels={})
262
+ logger = self.get_flow_run_logger(flow_run)
263
+
264
+ configuration = await self.job_configuration.from_template_and_values(
265
+ base_job_template=self.work_pool.base_job_template,
266
+ values=job_variables or {},
267
+ client=self._client,
268
+ )
269
+ configuration.prepare_for_flow_run(
270
+ flow_run=flow_run,
271
+ flow=api_flow,
272
+ work_pool=self.work_pool,
273
+ worker_name=self.name,
274
+ )
275
+
276
+ bundle = create_bundle_for_flow_run(flow=flow, flow_run=flow_run)
277
+
278
+ logger.debug("Executing flow run bundle in subprocess...")
279
+ try:
280
+ await self._runner.execute_bundle(
281
+ bundle=bundle,
282
+ cwd=configuration.working_dir,
283
+ env=configuration.env,
284
+ )
285
+ except Exception:
286
+ logger.exception("Error executing flow run bundle in subprocess")
287
+ await self._propose_crashed_state(flow_run, "Flow run execution failed")
288
+ finally:
289
+ logger.debug("Flow run bundle execution complete")
290
+
234
291
  async def __aenter__(self) -> ProcessWorker:
235
292
  await super().__aenter__()
236
293
  self._runner = await self._exit_stack.enter_async_context(
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: prefect-client
3
- Version: 3.3.5.dev3
3
+ Version: 3.3.6.dev1
4
4
  Summary: Workflow orchestration and management.
5
5
  Project-URL: Changelog, https://github.com/PrefectHQ/prefect/releases
6
6
  Project-URL: Documentation, https://docs.prefect.io
@@ -47,7 +47,7 @@ Requires-Dist: prometheus-client>=0.20.0
47
47
  Requires-Dist: pydantic!=2.10.0,<3.0.0,>=2.9
48
48
  Requires-Dist: pydantic-core<3.0.0,>=2.12.0
49
49
  Requires-Dist: pydantic-extra-types<3.0.0,>=2.8.2
50
- Requires-Dist: pydantic-settings>2.2.1
50
+ Requires-Dist: pydantic-settings!=2.9.0,<3.0.0,>2.2.1
51
51
  Requires-Dist: python-dateutil<3.0.0,>=2.8.2
52
52
  Requires-Dist: python-slugify<9.0,>=5.0
53
53
  Requires-Dist: python-socks[asyncio]<3.0,>=2.5.3
@@ -1,20 +1,20 @@
1
1
  prefect/.prefectignore,sha256=awSprvKT0vI8a64mEOLrMxhxqcO-b0ERQeYpA2rNKVQ,390
2
2
  prefect/__init__.py,sha256=iCdcC5ZmeewikCdnPEP6YBAjPNV5dvfxpYCTpw30Hkw,3685
3
3
  prefect/__main__.py,sha256=WFjw3kaYJY6pOTA7WDOgqjsz8zUEUZHCcj3P5wyVa-g,66
4
- prefect/_build_info.py,sha256=8rZCXd-Nz2-o4Nm_4yyvy6osOkdGCLiuhickzD7KvFM,185
4
+ prefect/_build_info.py,sha256=jisNjC8uPjptK7U97v_vxNXQXL36_gwcQrQgC-M9VF0,185
5
5
  prefect/_result_records.py,sha256=S6QmsODkehGVSzbMm6ig022PYbI6gNKz671p_8kBYx4,7789
6
6
  prefect/_waiters.py,sha256=Ia2ITaXdHzevtyWIgJoOg95lrEXQqNEOquHvw3T33UQ,9026
7
7
  prefect/agent.py,sha256=dPvG1jDGD5HSH7aM2utwtk6RaJ9qg13XjkA0lAIgQmY,287
8
8
  prefect/artifacts.py,sha256=dMBUOAWnUamzjb5HSqwB5-GR2Qb-Gxee26XG5NDCUuw,22720
9
9
  prefect/automations.py,sha256=ZzPxn2tINdlXTQo805V4rIlbXuNWxd7cdb3gTJxZIeY,12567
10
- prefect/cache_policies.py,sha256=Kwdei4JjitNfx42OepKpDNxwPtEwRgUUAn_soxsnNzI,12699
11
- prefect/context.py,sha256=LYEOlz7ZkuSDj7TmrE4mByy3N3TquFkIE2hEy0WHW1Y,23798
10
+ prefect/cache_policies.py,sha256=jH1aDW6vItTcsEytuTCrNYyjbq87IQPwdOgF0yxiUts,12749
11
+ prefect/context.py,sha256=IXS_ddSkQVFKFCQhjWk7Fwgfstfu6ITCmNeZ1beablY,23737
12
12
  prefect/engine.py,sha256=uB5JN4l045i5JTlRQNT1x7MwlSiGQ5Bop2Q6jHHOgxY,3699
13
13
  prefect/exceptions.py,sha256=wZLQQMRB_DyiYkeEdIC5OKwbba5A94Dlnics-lrWI7A,11581
14
14
  prefect/filesystems.py,sha256=v5YqGB4uXf9Ew2VuB9VCSkawvYMMVvEtZf7w1VmAmr8,18036
15
15
  prefect/flow_engine.py,sha256=hZpTYEtwTPMtwVoTCrfD93igN7rlKeG_0kyCvdU4aYE,58876
16
16
  prefect/flow_runs.py,sha256=dbHcXsOq1UsNM7vyJV9gboCTylmdUwQ_-W4NQt4R4ds,17267
17
- prefect/flows.py,sha256=kBLT6M903ZFD4TUvmvRN-zOdNcjSlIO9d2kHhPYq5Oo,109547
17
+ prefect/flows.py,sha256=8gWWoZB8S8j8Iwz0TTc5F-f_8sTFucGm53aaue5vUi4,114116
18
18
  prefect/futures.py,sha256=ZD5rdgUHA4sfxwHaPToumOUKlyn4d989JHR7eI97-Hs,23271
19
19
  prefect/main.py,sha256=8V-qLB4GjEVCkGRgGXeaIk-JIXY8Z9FozcNluj4Sm9E,2589
20
20
  prefect/plugins.py,sha256=FPRLR2mWVBMuOnlzeiTD9krlHONZH2rtYLD753JQDNQ,2516
@@ -31,7 +31,7 @@ prefect/tasks.py,sha256=EpMw5O1B9pAFVraC0KzytMOKi8iy7ZYnKWRs7WtvogU,74742
31
31
  prefect/transactions.py,sha256=uIoPNudzJzH6NrMJhrgr5lyh6JxOJQqT1GvrXt69yNw,26068
32
32
  prefect/variables.py,sha256=dCK3vX7TbkqXZhnNT_v7rcGh3ISRqoR6pJVLpoll3Js,8342
33
33
  prefect/_experimental/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
34
- prefect/_experimental/bundles.py,sha256=EIZc6hsjQS8V2CH1RbsxOiFeR11l1IgvhiXTSO3uDyM,6386
34
+ prefect/_experimental/bundles.py,sha256=E5nRaLVTbYCrACXZcRJCd4ssOcQU-Z26ewCb_7tPeTM,6687
35
35
  prefect/_experimental/lineage.py,sha256=8LssReoq7eLtQScUCu-7FCtrWoRZstXKRdpO0PxgbKg,9958
36
36
  prefect/_experimental/sla/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
37
37
  prefect/_experimental/sla/client.py,sha256=XTkYHFZiBy_O7RgUyGEdl9MxaHP-6fEAKBk3ksNQobU,3611
@@ -122,7 +122,7 @@ prefect/concurrency/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSu
122
122
  prefect/concurrency/_asyncio.py,sha256=uHjC3vQAiznRz_ueZE1RQ4x28zTcPJPoO2MMi0J41vU,2575
123
123
  prefect/concurrency/_events.py,sha256=KWHDldCWE3b5AH9eZ7kfmajvp36lRFCjCXIEx77jtKk,1825
124
124
  prefect/concurrency/asyncio.py,sha256=SUnRfqwBdBGwQll7SvywugVQnVbEzePqPFcUfIcTNMs,4505
125
- prefect/concurrency/context.py,sha256=8ZXs3G7NOF5Q2NqydK-K3zfjmYNnmfer-25hH6r6MgA,1009
125
+ prefect/concurrency/context.py,sha256=kJWE2zGuoel9qiGOqHW5qnSyzV1INlsicTmeEEexoFo,1029
126
126
  prefect/concurrency/services.py,sha256=U_1Y8Mm-Fd4Nvn0gxiWc_UdacdqT-vKjzex-oJpUt50,2288
127
127
  prefect/concurrency/sync.py,sha256=MMRJvxK-Yzyt0WEEu95C2RaMwfLdYgYH6vejCqfSUmw,4687
128
128
  prefect/concurrency/v1/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -182,7 +182,7 @@ prefect/logging/highlighters.py,sha256=BCf_LNhFInIfGPqwuu8YVrGa4wVxNc4YXo2pYgftp
182
182
  prefect/logging/loggers.py,sha256=rwFJv0i3dhdKr25XX-xUkQy4Vv4dy18bTy366jrC0OQ,12741
183
183
  prefect/logging/logging.yml,sha256=tT7gTyC4NmngFSqFkCdHaw7R0GPNPDDsTCGZQByiJAQ,3169
184
184
  prefect/runner/__init__.py,sha256=pQBd9wVrUVUDUFJlgiweKSnbahoBZwqnd2O2jkhrULY,158
185
- prefect/runner/runner.py,sha256=D2sTbcUvFW5eiQLsQgzUO8hSTyWMvwQ7-nTg35twuIY,64962
185
+ prefect/runner/runner.py,sha256=jv87XyaJ89uK0VzKpMzL3HfXgKZky8JlRs-gW04no5Y,65117
186
186
  prefect/runner/server.py,sha256=YRYFNoYddA9XfiTIYtudxrnD1vCX-PaOLhvyGUOb9AQ,11966
187
187
  prefect/runner/storage.py,sha256=L7aSjie5L6qbXYCDqYDX3ouQ_NsNMlmfjPeaWOC-ncs,28043
188
188
  prefect/runner/submit.py,sha256=qOEj-NChQ6RYFV35hHEVMTklrNmKwaGs2mR78ku9H0o,9474
@@ -278,6 +278,7 @@ prefect/types/__init__.py,sha256=yBjKxiQmSC7jXoo0UNmM3KZil1NBFS-BWGPfwSEaoJo,462
278
278
  prefect/types/_datetime.py,sha256=Cy6z7MxPDV_-jH2vxqC3PNA2G74IdUDIB07Jaakdj5w,7294
279
279
  prefect/types/entrypoint.py,sha256=2FF03-wLPgtnqR_bKJDB2BsXXINPdu8ptY9ZYEZnXg8,328
280
280
  prefect/utilities/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
281
+ prefect/utilities/_ast.py,sha256=sgEPUWElih-3cp4PoAy1IOyPtu8E27lL0Dldf3ijnYY,4905
281
282
  prefect/utilities/_deprecated.py,sha256=b3pqRSoFANdVJAc8TJkygBcP-VjZtLJUxVIWC7kwspI,1303
282
283
  prefect/utilities/_engine.py,sha256=9GW4X1lyAbmPwCuXXIubVJ7Z0DMT3dykkEUtp9tm5hI,3356
283
284
  prefect/utilities/_git.py,sha256=bPYWQdr9xvH0BqxR1ll1RkaSb3x0vhwylhYD5EilkKU,863
@@ -310,13 +311,13 @@ prefect/utilities/schema_tools/__init__.py,sha256=At3rMHd2g_Em2P3_dFQlFgqR_EpBwr
310
311
  prefect/utilities/schema_tools/hydration.py,sha256=NkRhWkNfxxFmVGhNDfmxdK_xeKaEhs3a42q83Sg9cT4,9436
311
312
  prefect/utilities/schema_tools/validation.py,sha256=Wix26IVR-ZJ32-6MX2pHhrwm3reB-Q4iB6_phn85OKE,10743
312
313
  prefect/workers/__init__.py,sha256=EaM1F0RZ-XIJaGeTKLsXDnfOPHzVWk5bk0_c4BVS44M,64
313
- prefect/workers/base.py,sha256=2GtGIJ4eAEVUaZwi6q1OMEbU_JhkEQNUmEznzj8HKbE,53364
314
+ prefect/workers/base.py,sha256=l7ghNeSJh6DXiDdsnLmQor2-b82YaZJIz5TpFSn2dFY,59171
314
315
  prefect/workers/block.py,sha256=dPvG1jDGD5HSH7aM2utwtk6RaJ9qg13XjkA0lAIgQmY,287
315
316
  prefect/workers/cloud.py,sha256=dPvG1jDGD5HSH7aM2utwtk6RaJ9qg13XjkA0lAIgQmY,287
316
- prefect/workers/process.py,sha256=uxOwcqA2Ps-V-W6WeSdKCQMINrCxBEVx1K1Un8pb7vs,8973
317
+ prefect/workers/process.py,sha256=Yi5D0U5AQ51wHT86GdwtImXSefe0gJf3LGq4r4z9zwM,11090
317
318
  prefect/workers/server.py,sha256=2pmVeJZiVbEK02SO6BEZaBIvHMsn6G8LzjW8BXyiTtk,1952
318
319
  prefect/workers/utilities.py,sha256=VfPfAlGtTuDj0-Kb8WlMgAuOfgXCdrGAnKMapPSBrwc,2483
319
- prefect_client-3.3.5.dev3.dist-info/METADATA,sha256=LcejbNc0zC2HoeBq6bkhgj5Pyne1GPZwmi9SLwEwCBc,7456
320
- prefect_client-3.3.5.dev3.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
321
- prefect_client-3.3.5.dev3.dist-info/licenses/LICENSE,sha256=MCxsn8osAkzfxKC4CC_dLcUkU8DZLkyihZ8mGs3Ah3Q,11357
322
- prefect_client-3.3.5.dev3.dist-info/RECORD,,
320
+ prefect_client-3.3.6.dev1.dist-info/METADATA,sha256=PMgTb0Zkd5VixWs2eGC-cb6L7wdl9TRKu5ISxI5KoIo,7471
321
+ prefect_client-3.3.6.dev1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
322
+ prefect_client-3.3.6.dev1.dist-info/licenses/LICENSE,sha256=MCxsn8osAkzfxKC4CC_dLcUkU8DZLkyihZ8mGs3Ah3Q,11357
323
+ prefect_client-3.3.6.dev1.dist-info/RECORD,,