prefect-client 3.0.0rc8__py3-none-any.whl → 3.0.0rc10__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. prefect/_internal/compatibility/deprecated.py +53 -0
  2. prefect/_internal/compatibility/migration.py +53 -11
  3. prefect/_internal/integrations.py +7 -0
  4. prefect/agent.py +6 -0
  5. prefect/blocks/core.py +1 -1
  6. prefect/client/__init__.py +4 -0
  7. prefect/client/schemas/objects.py +6 -3
  8. prefect/client/utilities.py +4 -4
  9. prefect/context.py +6 -0
  10. prefect/deployments/schedules.py +5 -2
  11. prefect/deployments/steps/core.py +6 -0
  12. prefect/engine.py +4 -4
  13. prefect/events/schemas/automations.py +3 -3
  14. prefect/exceptions.py +4 -1
  15. prefect/filesystems.py +4 -3
  16. prefect/flow_engine.py +102 -15
  17. prefect/flow_runs.py +1 -1
  18. prefect/flows.py +65 -15
  19. prefect/futures.py +5 -0
  20. prefect/infrastructure/__init__.py +6 -0
  21. prefect/infrastructure/base.py +6 -0
  22. prefect/logging/loggers.py +1 -1
  23. prefect/results.py +85 -68
  24. prefect/serializers.py +3 -3
  25. prefect/settings.py +7 -33
  26. prefect/task_engine.py +78 -21
  27. prefect/task_runners.py +28 -16
  28. prefect/task_worker.py +19 -6
  29. prefect/tasks.py +39 -7
  30. prefect/transactions.py +41 -3
  31. prefect/utilities/asyncutils.py +37 -8
  32. prefect/utilities/collections.py +1 -1
  33. prefect/utilities/importtools.py +1 -1
  34. prefect/utilities/timeout.py +20 -5
  35. prefect/workers/block.py +6 -0
  36. prefect/workers/cloud.py +6 -0
  37. {prefect_client-3.0.0rc8.dist-info → prefect_client-3.0.0rc10.dist-info}/METADATA +3 -2
  38. {prefect_client-3.0.0rc8.dist-info → prefect_client-3.0.0rc10.dist-info}/RECORD +41 -36
  39. {prefect_client-3.0.0rc8.dist-info → prefect_client-3.0.0rc10.dist-info}/LICENSE +0 -0
  40. {prefect_client-3.0.0rc8.dist-info → prefect_client-3.0.0rc10.dist-info}/WHEEL +0 -0
  41. {prefect_client-3.0.0rc8.dist-info → prefect_client-3.0.0rc10.dist-info}/top_level.txt +0 -0
@@ -16,6 +16,7 @@ import warnings
16
16
  from typing import Any, Callable, List, Optional, Type, TypeVar
17
17
 
18
18
  import pendulum
19
+ import wrapt
19
20
  from pydantic import BaseModel
20
21
 
21
22
  from prefect.utilities.callables import get_call_parameters
@@ -272,3 +273,55 @@ def register_renamed_module(old_name: str, new_name: str, start_date: str):
272
273
  DEPRECATED_MODULE_ALIASES.append(
273
274
  AliasedModuleDefinition(old_name, new_name, callback)
274
275
  )
276
+
277
+
278
+ class AsyncCompatProxy(wrapt.ObjectProxy):
279
+ """
280
+ A proxy object that allows for awaiting a method that is no longer async.
281
+
282
+ See https://wrapt.readthedocs.io/en/master/wrappers.html#object-proxy for more
283
+ """
284
+
285
+ def __init__(self, wrapped, class_name: str, method_name: str):
286
+ super().__init__(wrapped)
287
+ self._self_class_name = class_name
288
+ self._self_method_name = method_name
289
+ self._self_already_awaited = False
290
+
291
+ def __await__(self):
292
+ if not self._self_already_awaited:
293
+ warnings.warn(
294
+ (
295
+ f"The {self._self_method_name!r} method on {self._self_class_name!r}"
296
+ " is no longer async and awaiting it will raise an error after Dec 2024"
297
+ " - please remove the `await` keyword."
298
+ ),
299
+ DeprecationWarning,
300
+ stacklevel=2,
301
+ )
302
+ self._self_already_awaited = True
303
+ yield
304
+ return self.__wrapped__
305
+
306
+ def __repr__(self):
307
+ return repr(self.__wrapped__)
308
+
309
+ def __reduce_ex__(self, protocol):
310
+ return (
311
+ type(self),
312
+ (self.__wrapped__,),
313
+ {"_self_already_awaited": self._self_already_awaited},
314
+ )
315
+
316
+
317
+ def deprecated_async_method(wrapped):
318
+ """Decorator that wraps a sync method to allow awaiting it even though it is no longer async."""
319
+
320
+ @wrapt.decorator
321
+ def wrapper(wrapped, instance, args, kwargs):
322
+ result = wrapped(*args, **kwargs)
323
+ return AsyncCompatProxy(
324
+ result, class_name=instance.__class__.__name__, method_name=wrapped.__name__
325
+ )
326
+
327
+ return wrapper(wrapped)
@@ -5,11 +5,41 @@ The `getattr_migration` function is used to handle imports for moved or removed
5
5
  It is used in the `__getattr__` attribute of modules that have moved or removed objects.
6
6
 
7
7
  Usage:
8
- ```python
9
- from prefect._internal.compatibility.migration import getattr_migration
10
8
 
11
- __getattr__ = getattr_migration(__name__)
12
- ```
9
+ Moved objects:
10
+ 1. Add the old and new path to the `MOVED_IN_V3` dictionary, e.g. `MOVED_IN_V3 = {"old_path": "new_path"}`
11
+ 2. In the module where the object was moved from, add the following lines:
12
+ ```python
13
+ # at top
14
+ from prefect._internal.compatibility.migration import getattr_migration
15
+
16
+ # at bottom
17
+ __getattr__ = getattr_migration(__name__)
18
+ ```
19
+
20
+ Example at src/prefect/engine.py
21
+
22
+ Removed objects:
23
+ 1. Add the old path and error message to the `REMOVED_IN_V3` dictionary, e.g. `REMOVED_IN_V3 = {"old_path": "error_message"}`
24
+ 2. In the module where the object was removed, add the following lines:
25
+ ```python
26
+ # at top
27
+ from prefect._internal.compatibility.migration import getattr_migration
28
+
29
+ # at bottom
30
+ __getattr__ = getattr_migration(__name__)
31
+
32
+ ```
33
+ If the entire old module was removed, add a stub for the module with the following lines:
34
+ ```python
35
+ # at top
36
+ from prefect._internal.compatibility.migration import getattr_migration
37
+
38
+ # at bottom
39
+ __getattr__ = getattr_migration(__name__)
40
+ ```
41
+
42
+ Example at src/prefect/infrastructure/base.py
13
43
  """
14
44
 
15
45
  import sys
@@ -27,15 +57,27 @@ MOVED_IN_V3 = {
27
57
  "prefect.engine:resume_flow_run": "prefect.flow_runs:resume_flow_run",
28
58
  "prefect.engine:suspend_flow_run": "prefect.flow_runs:suspend_flow_run",
29
59
  "prefect.engine:_in_process_pause": "prefect.flow_runs:_in_process_pause",
60
+ "prefect.client:get_client": "prefect.client.orchestration:get_client",
30
61
  }
31
62
 
63
+ upgrade_guide_msg = "Refer to the upgrade guide for more information: https://docs.prefect.io/latest/guides/upgrade-guide-agents-to-workers/."
64
+
32
65
  REMOVED_IN_V3 = {
33
- "prefect.deployments.deployments:Deployment": "Use 'flow.serve()', `flow.deploy()`, or `prefect deploy` instead.",
34
- "prefect.deployments:Deployment": "Use 'flow.serve()', `flow.deploy()`, or `prefect deploy` instead.",
35
- "prefect.filesystems:GCS": "Use 'prefect_gcp' instead.",
36
- "prefect.filesystems:Azure": "Use 'prefect_azure' instead.",
37
- "prefect.filesystems:S3": "Use 'prefect_aws' instead.",
38
- "prefect.engine:_out_of_process_pause": "Use 'prefect.flow_runs.pause_flow_run' instead.",
66
+ "prefect.client.schemas.objects:MinimalDeploymentSchedule": "Use `prefect.client.schemas.actions.DeploymentScheduleCreate` instead.",
67
+ "prefect.context:PrefectObjectRegistry": upgrade_guide_msg,
68
+ "prefect.deployments.deployments:Deployment": "Use `flow.serve()`, `flow.deploy()`, or `prefect deploy` instead.",
69
+ "prefect.deployments:Deployment": "Use `flow.serve()`, `flow.deploy()`, or `prefect deploy` instead.",
70
+ "prefect.filesystems:GCS": "Use `prefect_gcp.GcsBucket` instead.",
71
+ "prefect.filesystems:Azure": "Use `prefect_azure.AzureBlobStorageContainer` instead.",
72
+ "prefect.filesystems:S3": "Use `prefect_aws.S3Bucket` instead.",
73
+ "prefect.filesystems:GitHub": "Use `prefect_github.GitHubRepository` instead.",
74
+ "prefect.engine:_out_of_process_pause": "Use `prefect.flow_runs.pause_flow_run` instead.",
75
+ "prefect.agent:PrefectAgent": "Use workers instead. " + upgrade_guide_msg,
76
+ "prefect.infrastructure:KubernetesJob": "Use workers instead. " + upgrade_guide_msg,
77
+ "prefect.infrastructure.base:Infrastructure": "Use the `BaseWorker` class to create custom infrastructure integrations instead. "
78
+ + upgrade_guide_msg,
79
+ "prefect.workers.block:BlockWorkerJobConfiguration": upgrade_guide_msg,
80
+ "prefect.workers.cloud:BlockWorker": upgrade_guide_msg,
39
81
  }
40
82
 
41
83
  # IMPORTANT FOR USAGE: When adding new modules to MOVED_IN_V3 or REMOVED_IN_V3, include the following lines at the bottom of that module:
@@ -112,7 +154,7 @@ def getattr_migration(module_name: str) -> Callable[[str], Any]:
112
154
  if import_path in REMOVED_IN_V3.keys():
113
155
  error_message = REMOVED_IN_V3[import_path]
114
156
  raise PrefectImportError(
115
- f"{import_path!r} has been removed. {error_message}"
157
+ f"`{import_path}` has been removed. {error_message}"
116
158
  )
117
159
 
118
160
  globals: Dict[str, Any] = sys.modules[module_name].__dict__
@@ -0,0 +1,7 @@
1
+ KNOWN_EXTRAS_FOR_PACKAGES = {
2
+ "prefect-kubernetes": "prefect[kubernetes]",
3
+ "prefect-aws": "prefect[aws]",
4
+ "prefect-gcp": "prefect[gcp]",
5
+ "prefect-azure": "prefect[azure]",
6
+ "prefect-docker": "prefect[docker]",
7
+ }
prefect/agent.py ADDED
@@ -0,0 +1,6 @@
1
+ """
2
+ 2024-06-27: This surfaces an actionable error message for moved or removed objects in Prefect 3.0 upgrade.
3
+ """
4
+ from prefect._internal.compatibility.migration import getattr_migration
5
+
6
+ __getattr__ = getattr_migration(__name__)
prefect/blocks/core.py CHANGED
@@ -798,7 +798,7 @@ class Block(BaseModel, ABC):
798
798
  name: str,
799
799
  validate: bool = True,
800
800
  client: Optional["PrefectClient"] = None,
801
- ):
801
+ ) -> "Self":
802
802
  """
803
803
  Retrieves data from the block document with the given name for the block type
804
804
  that corresponds with the current class and returns an instantiated version of
@@ -15,3 +15,7 @@ $ python -m asyncio
15
15
  ```
16
16
  </div>
17
17
  """
18
+
19
+ from prefect._internal.compatibility.migration import getattr_migration
20
+
21
+ __getattr__ = getattr_migration(__name__)
@@ -8,7 +8,6 @@ from typing import (
8
8
  Generic,
9
9
  List,
10
10
  Optional,
11
- TypeVar,
12
11
  Union,
13
12
  overload,
14
13
  )
@@ -26,8 +25,9 @@ from pydantic import (
26
25
  model_validator,
27
26
  )
28
27
  from pydantic_extra_types.pendulum_dt import DateTime
29
- from typing_extensions import Literal, Self
28
+ from typing_extensions import Literal, Self, TypeVar
30
29
 
30
+ from prefect._internal.compatibility.migration import getattr_migration
31
31
  from prefect._internal.schemas.bases import ObjectBaseModel, PrefectBaseModel
32
32
  from prefect._internal.schemas.fields import CreatedBy, UpdatedBy
33
33
  from prefect._internal.schemas.validators import (
@@ -60,7 +60,7 @@ if TYPE_CHECKING:
60
60
  from prefect.results import BaseResult
61
61
 
62
62
 
63
- R = TypeVar("R")
63
+ R = TypeVar("R", default=Any)
64
64
 
65
65
 
66
66
  DEFAULT_BLOCK_SCHEMA_VERSION = "non-versioned"
@@ -1604,3 +1604,6 @@ class CsrfToken(ObjectBaseModel):
1604
1604
  expiration: datetime.datetime = Field(
1605
1605
  default=..., description="The expiration time of the CSRF token"
1606
1606
  )
1607
+
1608
+
1609
+ __getattr__ = getattr_migration(__name__)
@@ -78,10 +78,10 @@ def client_injector(
78
78
 
79
79
 
80
80
  def inject_client(
81
- fn: Callable[P, Coroutine[Any, Any, Any]],
82
- ) -> Callable[P, Coroutine[Any, Any, Any]]:
81
+ fn: Callable[P, Coroutine[Any, Any, R]],
82
+ ) -> Callable[P, Coroutine[Any, Any, R]]:
83
83
  """
84
- Simple helper to provide a context managed client to a asynchronous function.
84
+ Simple helper to provide a context managed client to an asynchronous function.
85
85
 
86
86
  The decorated function _must_ take a `client` kwarg and if a client is passed when
87
87
  called it will be used instead of creating a new one, but it will not be context
@@ -89,7 +89,7 @@ def inject_client(
89
89
  """
90
90
 
91
91
  @wraps(fn)
92
- async def with_injected_client(*args: P.args, **kwargs: P.kwargs) -> Any:
92
+ async def with_injected_client(*args: P.args, **kwargs: P.kwargs) -> R:
93
93
  client = cast(Optional["PrefectClient"], kwargs.pop("client", None))
94
94
  client, inferred = get_or_create_client(client)
95
95
  if not inferred:
prefect/context.py CHANGED
@@ -32,6 +32,7 @@ from typing_extensions import Self
32
32
  import prefect.logging
33
33
  import prefect.logging.configuration
34
34
  import prefect.settings
35
+ from prefect._internal.compatibility.migration import getattr_migration
35
36
  from prefect.client.orchestration import PrefectClient, SyncPrefectClient, get_client
36
37
  from prefect.client.schemas import FlowRun, TaskRun
37
38
  from prefect.events.worker import EventsWorker
@@ -608,3 +609,8 @@ def root_settings_context():
608
609
 
609
610
 
610
611
  GLOBAL_SETTINGS_CONTEXT: SettingsContext = root_settings_context()
612
+
613
+
614
+ # 2024-07-02: This surfaces an actionable error message for removed objects
615
+ # in Prefect 3.0 upgrade.
616
+ __getattr__ = getattr_migration(__name__)
@@ -1,11 +1,14 @@
1
- from typing import TYPE_CHECKING, Any, List, Optional
1
+ from typing import TYPE_CHECKING, Any, List, Optional, Sequence, Union
2
2
 
3
3
  from prefect.client.schemas.actions import DeploymentScheduleCreate
4
4
  from prefect.client.schemas.schedules import is_schedule_type
5
5
 
6
6
  if TYPE_CHECKING:
7
7
  from prefect.client.schemas.schedules import SCHEDULE_TYPES
8
- from prefect.client.types.flexible_schedule_list import FlexibleScheduleList
8
+
9
+ FlexibleScheduleList = Sequence[
10
+ Union[DeploymentScheduleCreate, dict[str, Any], "SCHEDULE_TYPES"]
11
+ ]
9
12
 
10
13
 
11
14
  def create_deployment_schedule_create(
@@ -22,6 +22,7 @@ from typing import Any, Dict, List, Optional, Tuple, Union
22
22
 
23
23
  from prefect._internal.compatibility.deprecated import PrefectDeprecationWarning
24
24
  from prefect._internal.concurrency.api import Call, from_async
25
+ from prefect._internal.integrations import KNOWN_EXTRAS_FOR_PACKAGES
25
26
  from prefect.logging.loggers import get_logger
26
27
  from prefect.settings import PREFECT_DEBUG_MODE
27
28
  from prefect.utilities.importtools import import_object
@@ -84,6 +85,11 @@ def _get_function_for_step(
84
85
  raise
85
86
 
86
87
  try:
88
+ packages = [
89
+ KNOWN_EXTRAS_FOR_PACKAGES.get(package, package)
90
+ for package in packages
91
+ if package
92
+ ]
87
93
  subprocess.check_call([sys.executable, "-m", "pip", "install", *packages])
88
94
  except subprocess.CalledProcessError:
89
95
  get_logger("deployments.steps.core").warning(
prefect/engine.py CHANGED
@@ -31,16 +31,16 @@ if __name__ == "__main__":
31
31
  try:
32
32
  from prefect.flow_engine import (
33
33
  load_flow_and_flow_run,
34
- run_flow_async,
35
- run_flow_sync,
34
+ run_flow,
36
35
  )
37
36
 
38
37
  flow_run, flow = load_flow_and_flow_run(flow_run_id=flow_run_id)
39
38
  # run the flow
40
39
  if flow.isasync:
41
- run_coro_as_sync(run_flow_async(flow, flow_run=flow_run))
40
+ run_coro_as_sync(run_flow(flow, flow_run=flow_run))
42
41
  else:
43
- run_flow_sync(flow, flow_run=flow_run)
42
+ run_flow(flow, flow_run=flow_run)
43
+
44
44
  except Abort as exc:
45
45
  engine_logger.info(
46
46
  f"Engine execution of flow run '{flow_run_id}' aborted by orchestrator:"
@@ -187,18 +187,18 @@ class EventTrigger(ResourceTrigger):
187
187
  within: Optional[timedelta] = data.get("within")
188
188
 
189
189
  if isinstance(within, (int, float)):
190
- data["within"] = within = timedelta(seconds=within)
190
+ within = timedelta(seconds=within)
191
191
 
192
192
  if posture == Posture.Proactive:
193
193
  if not within or within == timedelta(0):
194
- data["within"] = timedelta(seconds=10.0)
194
+ within = timedelta(seconds=10.0)
195
195
  elif within < timedelta(seconds=10.0):
196
196
  raise ValueError(
197
197
  "`within` for Proactive triggers must be greater than or equal to "
198
198
  "10 seconds"
199
199
  )
200
200
 
201
- return data
201
+ return data | {"within": within} if within else data
202
202
 
203
203
  def describe_for_cli(self, indent: int = 0) -> str:
204
204
  """Return a human-readable description of this trigger for the CLI"""
prefect/exceptions.py CHANGED
@@ -178,7 +178,10 @@ class ParameterTypeError(PrefectException):
178
178
 
179
179
  @classmethod
180
180
  def from_validation_error(cls, exc: ValidationError) -> Self:
181
- bad_params = [f'{".".join(err["loc"])}: {err["msg"]}' for err in exc.errors()]
181
+ bad_params = [
182
+ f'{".".join(str(item) for item in err["loc"])}: {err["msg"]}'
183
+ for err in exc.errors()
184
+ ]
182
185
  msg = "Flow run received invalid parameters:\n - " + "\n - ".join(bad_params)
183
186
  return cls(msg)
184
187
 
prefect/filesystems.py CHANGED
@@ -95,7 +95,7 @@ class LocalFileSystem(WritableFileSystem, WritableDeploymentStorage):
95
95
  def cast_pathlib(cls, value):
96
96
  return stringify_path(value)
97
97
 
98
- def _resolve_path(self, path: str) -> Path:
98
+ def _resolve_path(self, path: str, validate: bool = False) -> Path:
99
99
  # Only resolve the base path at runtime, default to the current directory
100
100
  basepath = (
101
101
  Path(self.basepath).expanduser().resolve()
@@ -114,11 +114,12 @@ class LocalFileSystem(WritableFileSystem, WritableDeploymentStorage):
114
114
  resolved_path = basepath / resolved_path
115
115
  else:
116
116
  resolved_path = resolved_path.resolve()
117
+
118
+ if validate:
117
119
  if basepath not in resolved_path.parents and (basepath != resolved_path):
118
120
  raise ValueError(
119
121
  f"Provided path {resolved_path} is outside of the base path {basepath}."
120
122
  )
121
-
122
123
  return resolved_path
123
124
 
124
125
  @sync_compatible
@@ -184,7 +185,7 @@ class LocalFileSystem(WritableFileSystem, WritableDeploymentStorage):
184
185
  Defaults to copying the entire contents of the current working directory to the block's basepath.
185
186
  An `ignore_file` path may be provided that can include gitignore style expressions for filepaths to ignore.
186
187
  """
187
- destination_path = self._resolve_path(to_path)
188
+ destination_path = self._resolve_path(to_path, validate=True)
188
189
 
189
190
  if not local_path:
190
191
  local_path = Path(".").absolute()
prefect/flow_engine.py CHANGED
@@ -16,6 +16,7 @@ from typing import (
16
16
  Literal,
17
17
  Optional,
18
18
  Tuple,
19
+ Type,
19
20
  TypeVar,
20
21
  Union,
21
22
  cast,
@@ -30,7 +31,13 @@ from prefect.client.schemas import FlowRun, TaskRun
30
31
  from prefect.client.schemas.filters import FlowRunFilter
31
32
  from prefect.client.schemas.sorting import FlowRunSort
32
33
  from prefect.context import ClientContext, FlowRunContext, TagsContext
33
- from prefect.exceptions import Abort, Pause, PrefectException, UpstreamTaskError
34
+ from prefect.exceptions import (
35
+ Abort,
36
+ Pause,
37
+ PrefectException,
38
+ TerminationSignal,
39
+ UpstreamTaskError,
40
+ )
34
41
  from prefect.flows import Flow, load_flow_from_entrypoint, load_flow_from_flow_run
35
42
  from prefect.futures import PrefectFuture, resolve_futures_to_states
36
43
  from prefect.logging.loggers import (
@@ -39,7 +46,7 @@ from prefect.logging.loggers import (
39
46
  get_run_logger,
40
47
  patch_print,
41
48
  )
42
- from prefect.results import ResultFactory
49
+ from prefect.results import BaseResult, ResultFactory
43
50
  from prefect.settings import PREFECT_DEBUG_MODE
44
51
  from prefect.states import (
45
52
  Failed,
@@ -50,8 +57,13 @@ from prefect.states import (
50
57
  exception_to_failed_state,
51
58
  return_value_to_state,
52
59
  )
60
+ from prefect.utilities.annotations import NotSet
53
61
  from prefect.utilities.asyncutils import run_coro_as_sync
54
- from prefect.utilities.callables import call_with_parameters, parameters_to_args_kwargs
62
+ from prefect.utilities.callables import (
63
+ call_with_parameters,
64
+ get_call_parameters,
65
+ parameters_to_args_kwargs,
66
+ )
55
67
  from prefect.utilities.collections import visit_collection
56
68
  from prefect.utilities.engine import (
57
69
  _get_hook_name,
@@ -68,6 +80,10 @@ P = ParamSpec("P")
68
80
  R = TypeVar("R")
69
81
 
70
82
 
83
+ class FlowRunTimeoutError(TimeoutError):
84
+ """Raised when a flow run exceeds its defined timeout."""
85
+
86
+
71
87
  def load_flow_and_flow_run(flow_run_id: UUID) -> Tuple[FlowRun, Flow]:
72
88
  ## TODO: add error handling to update state and log tracebacks
73
89
  entrypoint = os.environ.get("PREFECT__FLOW_ENTRYPOINT")
@@ -91,6 +107,10 @@ class FlowRunEngine(Generic[P, R]):
91
107
  flow_run_id: Optional[UUID] = None
92
108
  logger: logging.Logger = field(default_factory=lambda: get_logger("engine"))
93
109
  wait_for: Optional[Iterable[PrefectFuture]] = None
110
+ # holds the return value from the user code
111
+ _return_value: Union[R, Type[NotSet]] = NotSet
112
+ # holds the exception raised by the user code, if any
113
+ _raised: Union[Exception, Type[NotSet]] = NotSet
94
114
  _is_started: bool = False
95
115
  _client: Optional[SyncPrefectClient] = None
96
116
  short_circuit: bool = False
@@ -204,6 +224,30 @@ class FlowRunEngine(Generic[P, R]):
204
224
  return state
205
225
 
206
226
  def result(self, raise_on_failure: bool = True) -> "Union[R, State, None]":
227
+ if self._return_value is not NotSet and not isinstance(
228
+ self._return_value, State
229
+ ):
230
+ if isinstance(self._return_value, BaseResult):
231
+ _result = self._return_value.get()
232
+ else:
233
+ _result = self._return_value
234
+
235
+ if inspect.isawaitable(_result):
236
+ # getting the value for a BaseResult may return an awaitable
237
+ # depending on whether the parent frame is sync or not
238
+ _result = run_coro_as_sync(_result)
239
+ return _result
240
+
241
+ if self._raised is not NotSet:
242
+ if raise_on_failure:
243
+ raise self._raised
244
+ return self._raised
245
+
246
+ # This is a fall through case which leans on the existing state result mechanics to get the
247
+ # return value. This is necessary because we currently will return a State object if the
248
+ # the State was Prefect-created.
249
+ # TODO: Remove the need to get the result from a State except in cases where the return value
250
+ # is a State object.
207
251
  _result = self.state.result(raise_on_failure=raise_on_failure, fetch=True) # type: ignore
208
252
  # state.result is a `sync_compatible` function that may or may not return an awaitable
209
253
  # depending on whether the parent frame is sync or not
@@ -215,13 +259,15 @@ class FlowRunEngine(Generic[P, R]):
215
259
  result_factory = getattr(FlowRunContext.get(), "result_factory", None)
216
260
  if result_factory is None:
217
261
  raise ValueError("Result factory is not set")
262
+ resolved_result = resolve_futures_to_states(result)
218
263
  terminal_state = run_coro_as_sync(
219
264
  return_value_to_state(
220
- resolve_futures_to_states(result),
265
+ resolved_result,
221
266
  result_factory=result_factory,
222
267
  )
223
268
  )
224
269
  self.set_state(terminal_state)
270
+ self._return_value = resolved_result
225
271
  return result
226
272
 
227
273
  def handle_exception(
@@ -248,10 +294,16 @@ class FlowRunEngine(Generic[P, R]):
248
294
  ),
249
295
  )
250
296
  state = self.set_state(Running())
297
+ self._raised = exc
251
298
  return state
252
299
 
253
300
  def handle_timeout(self, exc: TimeoutError) -> None:
254
- message = f"Flow run exceeded timeout of {self.flow.timeout_seconds} seconds"
301
+ if isinstance(exc, FlowRunTimeoutError):
302
+ message = (
303
+ f"Flow run exceeded timeout of {self.flow.timeout_seconds} second(s)"
304
+ )
305
+ else:
306
+ message = f"Flow run failed due to timeout: {exc!r}"
255
307
  self.logger.error(message)
256
308
  state = Failed(
257
309
  data=exc,
@@ -259,12 +311,14 @@ class FlowRunEngine(Generic[P, R]):
259
311
  name="TimedOut",
260
312
  )
261
313
  self.set_state(state)
314
+ self._raised = exc
262
315
 
263
316
  def handle_crash(self, exc: BaseException) -> None:
264
317
  state = run_coro_as_sync(exception_to_crashed_state(exc))
265
318
  self.logger.error(f"Crash detected! {state.message}")
266
319
  self.logger.debug("Crash details:", exc_info=exc)
267
320
  self.set_state(state, force=True)
321
+ self._raised = exc
268
322
 
269
323
  def load_subflow_run(
270
324
  self,
@@ -311,7 +365,9 @@ class FlowRunEngine(Generic[P, R]):
311
365
  limit=1,
312
366
  )
313
367
  if flow_runs:
314
- return flow_runs[-1]
368
+ loaded_flow_run = flow_runs[-1]
369
+ self._return_value = loaded_flow_run.state
370
+ return loaded_flow_run
315
371
 
316
372
  def create_flow_run(self, client: SyncPrefectClient) -> FlowRun:
317
373
  flow_run_ctx = FlowRunContext.get()
@@ -359,7 +415,7 @@ class FlowRunEngine(Generic[P, R]):
359
415
 
360
416
  return flow_run
361
417
 
362
- def call_hooks(self, state: State = None) -> Iterable[Callable]:
418
+ def call_hooks(self, state: Optional[State] = None) -> Iterable[Callable]:
363
419
  if state is None:
364
420
  state = self.state
365
421
  flow = self.flow
@@ -500,6 +556,11 @@ class FlowRunEngine(Generic[P, R]):
500
556
  )
501
557
  try:
502
558
  yield self
559
+
560
+ except TerminationSignal as exc:
561
+ self.cancel_all_tasks()
562
+ self.handle_crash(exc)
563
+ raise
503
564
  except Exception:
504
565
  # regular exceptions are caught and re-raised to the user
505
566
  raise
@@ -535,6 +596,10 @@ class FlowRunEngine(Generic[P, R]):
535
596
  return False # TODO: handle this differently?
536
597
  return getattr(self, "flow_run").state.is_pending()
537
598
 
599
+ def cancel_all_tasks(self):
600
+ if hasattr(self.flow.task_runner, "cancel_all"):
601
+ self.flow.task_runner.cancel_all() # type: ignore
602
+
538
603
  # --------------------------
539
604
  #
540
605
  # The following methods compose the main task run loop
@@ -560,7 +625,10 @@ class FlowRunEngine(Generic[P, R]):
560
625
  # reenter the run context to ensure it is up to date for every run
561
626
  with self.setup_run_context():
562
627
  try:
563
- with timeout_context(seconds=self.flow.timeout_seconds):
628
+ with timeout_context(
629
+ seconds=self.flow.timeout_seconds,
630
+ timeout_exc_type=FlowRunTimeoutError,
631
+ ):
564
632
  self.logger.debug(
565
633
  f"Executing flow {self.flow.name!r} for flow run {self.flow_run.name!r}..."
566
634
  )
@@ -568,7 +636,7 @@ class FlowRunEngine(Generic[P, R]):
568
636
  except TimeoutError as exc:
569
637
  self.handle_timeout(exc)
570
638
  except Exception as exc:
571
- self.logger.exception(f"Encountered exception during execution: {exc}")
639
+ self.logger.exception("Encountered exception during execution: %r", exc)
572
640
  self.handle_exception(exc)
573
641
 
574
642
  def call_flow_fn(self) -> Union[R, Coroutine[Any, Any, R]]:
@@ -595,10 +663,11 @@ def run_flow_sync(
595
663
  wait_for: Optional[Iterable[PrefectFuture]] = None,
596
664
  return_type: Literal["state", "result"] = "result",
597
665
  ) -> Union[R, State, None]:
598
- parameters = flow_run.parameters if flow_run else parameters
599
-
600
666
  engine = FlowRunEngine[P, R](
601
- flow=flow, parameters=parameters, flow_run=flow_run, wait_for=wait_for
667
+ flow=flow,
668
+ parameters=parameters,
669
+ flow_run=flow_run,
670
+ wait_for=wait_for,
602
671
  )
603
672
 
604
673
  with engine.start():
@@ -616,8 +685,6 @@ async def run_flow_async(
616
685
  wait_for: Optional[Iterable[PrefectFuture]] = None,
617
686
  return_type: Literal["state", "result"] = "result",
618
687
  ) -> Union[R, State, None]:
619
- parameters = flow_run.parameters if flow_run else parameters
620
-
621
688
  engine = FlowRunEngine[P, R](
622
689
  flow=flow, parameters=parameters, flow_run=flow_run, wait_for=wait_for
623
690
  )
@@ -714,10 +781,13 @@ def run_flow(
714
781
  kwargs = dict(
715
782
  flow=flow,
716
783
  flow_run=flow_run,
717
- parameters=parameters,
784
+ parameters=_flow_parameters(
785
+ flow=flow, flow_run=flow_run, parameters=parameters
786
+ ),
718
787
  wait_for=wait_for,
719
788
  return_type=return_type,
720
789
  )
790
+
721
791
  if flow.isasync and flow.isgenerator:
722
792
  return run_generator_flow_async(**kwargs)
723
793
  elif flow.isgenerator:
@@ -726,3 +796,20 @@ def run_flow(
726
796
  return run_flow_async(**kwargs)
727
797
  else:
728
798
  return run_flow_sync(**kwargs)
799
+
800
+
801
+ def _flow_parameters(
802
+ flow: Flow[P, R], flow_run: Optional[FlowRun], parameters: Optional[Dict[str, Any]]
803
+ ) -> Dict[str, Any]:
804
+ if parameters:
805
+ # This path is taken when a flow is being called directly with
806
+ # parameters, in that case just return the parameters as-is.
807
+ return parameters
808
+
809
+ # Otherwise the flow is being executed indirectly and we may need to grab
810
+ # the parameters from the flow run. We also need to resolve any default
811
+ # parameters that are defined on the flow function itself.
812
+
813
+ parameters = flow_run.parameters if flow_run else {}
814
+ call_args, call_kwargs = parameters_to_args_kwargs(flow.fn, parameters)
815
+ return get_call_parameters(flow.fn, call_args, call_kwargs)
prefect/flow_runs.py CHANGED
@@ -340,7 +340,7 @@ async def suspend_flow_run(
340
340
  already started will run until completion. When resumed, the flow run will
341
341
  be rescheduled to finish execution. In order suspend a flow run in this
342
342
  way, the flow needs to have an associated deployment and results need to be
343
- configured with the `persist_results` option.
343
+ configured with the `persist_result` option.
344
344
 
345
345
  Args:
346
346
  flow_run_id: a flow run id. If supplied, this function will attempt to