prefect-client 3.0.0rc2__py3-none-any.whl → 3.0.0rc4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (69) hide show
  1. prefect/__init__.py +0 -1
  2. prefect/_internal/compatibility/migration.py +124 -0
  3. prefect/_internal/concurrency/__init__.py +2 -2
  4. prefect/_internal/concurrency/primitives.py +1 -0
  5. prefect/_internal/pydantic/annotations/pendulum.py +2 -2
  6. prefect/_internal/pytz.py +1 -1
  7. prefect/blocks/core.py +1 -1
  8. prefect/client/orchestration.py +96 -22
  9. prefect/client/schemas/actions.py +1 -1
  10. prefect/client/schemas/filters.py +6 -0
  11. prefect/client/schemas/objects.py +10 -3
  12. prefect/client/subscriptions.py +6 -5
  13. prefect/context.py +1 -27
  14. prefect/deployments/__init__.py +3 -0
  15. prefect/deployments/base.py +4 -2
  16. prefect/deployments/deployments.py +3 -0
  17. prefect/deployments/steps/pull.py +1 -0
  18. prefect/deployments/steps/utility.py +2 -1
  19. prefect/engine.py +3 -0
  20. prefect/events/cli/automations.py +1 -1
  21. prefect/events/clients.py +7 -1
  22. prefect/exceptions.py +9 -0
  23. prefect/filesystems.py +22 -11
  24. prefect/flow_engine.py +195 -153
  25. prefect/flows.py +95 -36
  26. prefect/futures.py +9 -1
  27. prefect/infrastructure/provisioners/container_instance.py +1 -0
  28. prefect/infrastructure/provisioners/ecs.py +2 -2
  29. prefect/input/__init__.py +4 -0
  30. prefect/logging/formatters.py +2 -2
  31. prefect/logging/handlers.py +2 -2
  32. prefect/logging/loggers.py +1 -1
  33. prefect/plugins.py +1 -0
  34. prefect/records/cache_policies.py +3 -3
  35. prefect/records/result_store.py +10 -3
  36. prefect/results.py +47 -73
  37. prefect/runner/runner.py +1 -1
  38. prefect/runner/server.py +1 -1
  39. prefect/runtime/__init__.py +1 -0
  40. prefect/runtime/deployment.py +1 -0
  41. prefect/runtime/flow_run.py +1 -0
  42. prefect/runtime/task_run.py +1 -0
  43. prefect/settings.py +16 -3
  44. prefect/states.py +15 -4
  45. prefect/task_engine.py +195 -39
  46. prefect/task_runners.py +9 -3
  47. prefect/task_runs.py +26 -12
  48. prefect/task_worker.py +149 -20
  49. prefect/tasks.py +153 -71
  50. prefect/transactions.py +85 -15
  51. prefect/types/__init__.py +10 -3
  52. prefect/utilities/asyncutils.py +3 -3
  53. prefect/utilities/callables.py +16 -4
  54. prefect/utilities/collections.py +120 -57
  55. prefect/utilities/dockerutils.py +5 -3
  56. prefect/utilities/engine.py +11 -0
  57. prefect/utilities/filesystem.py +4 -5
  58. prefect/utilities/importtools.py +29 -0
  59. prefect/utilities/services.py +2 -2
  60. prefect/utilities/urls.py +195 -0
  61. prefect/utilities/visualization.py +1 -0
  62. prefect/variables.py +4 -0
  63. prefect/workers/base.py +35 -0
  64. {prefect_client-3.0.0rc2.dist-info → prefect_client-3.0.0rc4.dist-info}/METADATA +2 -2
  65. {prefect_client-3.0.0rc2.dist-info → prefect_client-3.0.0rc4.dist-info}/RECORD +68 -66
  66. prefect/blocks/kubernetes.py +0 -115
  67. {prefect_client-3.0.0rc2.dist-info → prefect_client-3.0.0rc4.dist-info}/LICENSE +0 -0
  68. {prefect_client-3.0.0rc2.dist-info → prefect_client-3.0.0rc4.dist-info}/WHEEL +0 -0
  69. {prefect_client-3.0.0rc2.dist-info → prefect_client-3.0.0rc4.dist-info}/top_level.txt +0 -0
prefect/events/clients.py CHANGED
@@ -63,6 +63,12 @@ def get_events_subscriber(
63
63
  reconnection_attempts: int = 10,
64
64
  ) -> "PrefectEventSubscriber":
65
65
  api_url = PREFECT_API_URL.value()
66
+ if not api_url:
67
+ raise ValueError(
68
+ "A Prefect server or Prefect Cloud is required to start an event "
69
+ "subscriber. Please check the PREFECT_API_URL setting in your profile."
70
+ )
71
+
66
72
  if isinstance(api_url, str) and api_url.startswith(PREFECT_CLOUD_API_URL.value()):
67
73
  return PrefectCloudEventSubscriber(
68
74
  filter=filter, reconnection_attempts=reconnection_attempts
@@ -168,7 +174,7 @@ class PrefectEphemeralEventsClient(EventsClient):
168
174
  )
169
175
  from prefect.server.api.server import create_app
170
176
 
171
- app = create_app()
177
+ app = create_app(ephemeral=True)
172
178
 
173
179
  self._http_client = PrefectHttpxAsyncClient(
174
180
  transport=httpx.ASGITransport(app=app, raise_app_exceptions=False),
prefect/exceptions.py CHANGED
@@ -400,3 +400,12 @@ class FlowPauseTimeout(PrefectException):
400
400
 
401
401
  class FlowRunWaitTimeout(PrefectException):
402
402
  """Raised when a flow run takes longer than a given timeout"""
403
+
404
+
405
+ class PrefectImportError(ImportError):
406
+ """
407
+ An error raised when a Prefect object cannot be imported due to a move or removal.
408
+ """
409
+
410
+ def __init__(self, message: str) -> None:
411
+ super().__init__(message)
prefect/filesystems.py CHANGED
@@ -16,6 +16,8 @@ from prefect.utilities.asyncutils import run_sync_in_worker_thread, sync_compati
16
16
  from prefect.utilities.compat import copytree
17
17
  from prefect.utilities.filesystem import filter_files
18
18
 
19
+ from ._internal.compatibility.migration import getattr_migration
20
+
19
21
 
20
22
  class ReadableFileSystem(Block, abc.ABC):
21
23
  _block_schema_capabilities = ["read-path"]
@@ -42,7 +44,7 @@ class ReadableDeploymentStorage(Block, abc.ABC):
42
44
 
43
45
  @abc.abstractmethod
44
46
  async def get_directory(
45
- self, from_path: str = None, local_path: str = None
47
+ self, from_path: Optional[str] = None, local_path: Optional[str] = None
46
48
  ) -> None:
47
49
  pass
48
50
 
@@ -52,13 +54,16 @@ class WritableDeploymentStorage(Block, abc.ABC):
52
54
 
53
55
  @abc.abstractmethod
54
56
  async def get_directory(
55
- self, from_path: str = None, local_path: str = None
57
+ self, from_path: Optional[str] = None, local_path: Optional[str] = None
56
58
  ) -> None:
57
59
  pass
58
60
 
59
61
  @abc.abstractmethod
60
62
  async def put_directory(
61
- self, local_path: str = None, to_path: str = None, ignore_file: str = None
63
+ self,
64
+ local_path: Optional[str] = None,
65
+ to_path: Optional[str] = None,
66
+ ignore_file: Optional[str] = None,
62
67
  ) -> None:
63
68
  pass
64
69
 
@@ -103,18 +108,18 @@ class LocalFileSystem(WritableFileSystem, WritableDeploymentStorage):
103
108
  if path is None:
104
109
  return basepath
105
110
 
106
- path: Path = Path(path).expanduser()
111
+ resolved_path: Path = Path(path).expanduser()
107
112
 
108
- if not path.is_absolute():
109
- path = basepath / path
113
+ if not resolved_path.is_absolute():
114
+ resolved_path = basepath / resolved_path
110
115
  else:
111
- path = path.resolve()
112
- if basepath not in path.parents and (basepath != path):
116
+ resolved_path = resolved_path.resolve()
117
+ if basepath not in resolved_path.parents and (basepath != resolved_path):
113
118
  raise ValueError(
114
- f"Provided path {path} is outside of the base path {basepath}."
119
+ f"Provided path {resolved_path} is outside of the base path {basepath}."
115
120
  )
116
121
 
117
- return path
122
+ return resolved_path
118
123
 
119
124
  @sync_compatible
120
125
  async def get_directory(
@@ -168,7 +173,10 @@ class LocalFileSystem(WritableFileSystem, WritableDeploymentStorage):
168
173
 
169
174
  @sync_compatible
170
175
  async def put_directory(
171
- self, local_path: str = None, to_path: str = None, ignore_file: str = None
176
+ self,
177
+ local_path: Optional[str] = None,
178
+ to_path: Optional[str] = None,
179
+ ignore_file: Optional[str] = None,
172
180
  ) -> None:
173
181
  """
174
182
  Copies a directory from one place to another on the local filesystem.
@@ -506,3 +514,6 @@ class SMB(WritableFileSystem, WritableDeploymentStorage):
506
514
  @sync_compatible
507
515
  async def write_path(self, path: str, content: bytes) -> str:
508
516
  return await self.filesystem.write_path(path=path, content=content)
517
+
518
+
519
+ __getattr__ = getattr_migration(__name__)
prefect/flow_engine.py CHANGED
@@ -6,9 +6,11 @@ from contextlib import ExitStack, contextmanager
6
6
  from dataclasses import dataclass, field
7
7
  from typing import (
8
8
  Any,
9
+ AsyncGenerator,
9
10
  Callable,
10
11
  Coroutine,
11
12
  Dict,
13
+ Generator,
12
14
  Generic,
13
15
  Iterable,
14
16
  Literal,
@@ -20,22 +22,17 @@ from typing import (
20
22
  )
21
23
  from uuid import UUID
22
24
 
23
- import anyio
24
- import anyio._backends._asyncio
25
- from sniffio import AsyncLibraryNotFoundError
26
25
  from typing_extensions import ParamSpec
27
26
 
28
27
  from prefect import Task
29
- from prefect._internal.concurrency.api import create_call, from_sync
30
28
  from prefect.client.orchestration import SyncPrefectClient, get_client
31
29
  from prefect.client.schemas import FlowRun, TaskRun
32
30
  from prefect.client.schemas.filters import FlowRunFilter
33
31
  from prefect.client.schemas.sorting import FlowRunSort
34
- from prefect.context import ClientContext, FlowRunContext, TagsContext, TaskRunContext
32
+ from prefect.context import ClientContext, FlowRunContext, TagsContext
35
33
  from prefect.exceptions import Abort, Pause, PrefectException, UpstreamTaskError
36
34
  from prefect.flows import Flow, load_flow_from_entrypoint, load_flow_from_flow_run
37
35
  from prefect.futures import PrefectFuture, resolve_futures_to_states
38
- from prefect.logging.handlers import APILogHandler
39
36
  from prefect.logging.loggers import (
40
37
  flow_run_logger,
41
38
  get_logger,
@@ -43,7 +40,7 @@ from prefect.logging.loggers import (
43
40
  patch_print,
44
41
  )
45
42
  from prefect.results import ResultFactory
46
- from prefect.settings import PREFECT_DEBUG_MODE, PREFECT_UI_URL
43
+ from prefect.settings import PREFECT_DEBUG_MODE
47
44
  from prefect.states import (
48
45
  Failed,
49
46
  Pending,
@@ -54,16 +51,18 @@ from prefect.states import (
54
51
  return_value_to_state,
55
52
  )
56
53
  from prefect.utilities.asyncutils import run_coro_as_sync
57
- from prefect.utilities.callables import parameters_to_args_kwargs
54
+ from prefect.utilities.callables import call_with_parameters, parameters_to_args_kwargs
58
55
  from prefect.utilities.collections import visit_collection
59
56
  from prefect.utilities.engine import (
60
57
  _get_hook_name,
61
58
  _resolve_custom_flow_run_name,
62
59
  capture_sigterm,
60
+ link_state_to_result,
63
61
  propose_state_sync,
64
62
  resolve_to_final_result,
65
63
  )
66
64
  from prefect.utilities.timeout import timeout, timeout_async
65
+ from prefect.utilities.urls import url_for
67
66
 
68
67
  P = ParamSpec("P")
69
68
  R = TypeVar("R")
@@ -174,9 +173,6 @@ class FlowRunEngine(Generic[P, R]):
174
173
  while state.is_pending():
175
174
  time.sleep(0.2)
176
175
  state = self.set_state(new_state)
177
- if state.is_running():
178
- for hook in self.get_hooks(state):
179
- hook()
180
176
  return state
181
177
 
182
178
  def set_state(self, state: State, force: bool = False) -> State:
@@ -349,12 +345,14 @@ class FlowRunEngine(Generic[P, R]):
349
345
 
350
346
  return flow_run
351
347
 
352
- def get_hooks(self, state: State, as_async: bool = False) -> Iterable[Callable]:
348
+ def call_hooks(self, state: State = None) -> Iterable[Callable]:
349
+ if state is None:
350
+ state = self.state
353
351
  flow = self.flow
354
352
  flow_run = self.flow_run
355
353
 
356
354
  if not flow_run:
357
- raise ValueError("Task run is not set")
355
+ raise ValueError("Flow run is not set")
358
356
 
359
357
  enable_cancellation_and_crashed_hooks = (
360
358
  os.environ.get(
@@ -363,7 +361,6 @@ class FlowRunEngine(Generic[P, R]):
363
361
  == "true"
364
362
  )
365
363
 
366
- hooks = None
367
364
  if state.is_failed() and flow.on_failure_hooks:
368
365
  hooks = flow.on_failure_hooks
369
366
  elif state.is_completed() and flow.on_completion_hooks:
@@ -382,48 +379,30 @@ class FlowRunEngine(Generic[P, R]):
382
379
  hooks = flow.on_crashed_hooks
383
380
  elif state.is_running() and flow.on_running_hooks:
384
381
  hooks = flow.on_running_hooks
382
+ else:
383
+ hooks = None
385
384
 
386
385
  for hook in hooks or []:
387
386
  hook_name = _get_hook_name(hook)
388
387
 
389
- @contextmanager
390
- def hook_context():
391
- try:
392
- self.logger.info(
393
- f"Running hook {hook_name!r} in response to entering state"
394
- f" {state.name!r}"
395
- )
396
- yield
397
- except Exception:
398
- self.logger.error(
399
- f"An error was encountered while running hook {hook_name!r}",
400
- exc_info=True,
401
- )
402
- else:
403
- self.logger.info(
404
- f"Hook {hook_name!r} finished running successfully"
405
- )
406
-
407
- if as_async:
408
-
409
- async def _hook_fn():
410
- with hook_context():
411
- result = hook(flow, flow_run, state)
412
- if inspect.isawaitable(result):
413
- await result
414
-
388
+ try:
389
+ self.logger.info(
390
+ f"Running hook {hook_name!r} in response to entering state"
391
+ f" {state.name!r}"
392
+ )
393
+ result = hook(flow, flow_run, state)
394
+ if inspect.isawaitable(result):
395
+ run_coro_as_sync(result)
396
+ except Exception:
397
+ self.logger.error(
398
+ f"An error was encountered while running hook {hook_name!r}",
399
+ exc_info=True,
400
+ )
415
401
  else:
416
-
417
- def _hook_fn():
418
- with hook_context():
419
- result = hook(flow, flow_run, state)
420
- if inspect.isawaitable(result):
421
- run_coro_as_sync(result)
422
-
423
- yield _hook_fn
402
+ self.logger.info(f"Hook {hook_name!r} finished running successfully")
424
403
 
425
404
  @contextmanager
426
- def enter_run_context(self, client: Optional[SyncPrefectClient] = None):
405
+ def setup_run_context(self, client: Optional[SyncPrefectClient] = None):
427
406
  from prefect.utilities.engine import (
428
407
  should_log_prints,
429
408
  )
@@ -436,13 +415,6 @@ class FlowRunEngine(Generic[P, R]):
436
415
  self.flow_run = client.read_flow_run(self.flow_run.id)
437
416
  log_prints = should_log_prints(self.flow)
438
417
 
439
- # if running in a completely synchronous frame, anyio will not detect the
440
- # backend to use for the task group
441
- try:
442
- task_group = anyio.create_task_group()
443
- except AsyncLibraryNotFoundError:
444
- task_group = anyio._backends._asyncio.TaskGroup()
445
-
446
418
  with ExitStack() as stack:
447
419
  # TODO: Explore closing task runner before completing the flow to
448
420
  # wait for futures to complete
@@ -457,7 +429,6 @@ class FlowRunEngine(Generic[P, R]):
457
429
  flow_run=self.flow_run,
458
430
  parameters=self.parameters,
459
431
  client=client,
460
- background_tasks=task_group,
461
432
  result_factory=run_coro_as_sync(ResultFactory.from_flow(self.flow)),
462
433
  task_runner=task_runner,
463
434
  )
@@ -482,7 +453,7 @@ class FlowRunEngine(Generic[P, R]):
482
453
  yield
483
454
 
484
455
  @contextmanager
485
- def start(self):
456
+ def initialize_run(self):
486
457
  """
487
458
  Enters a client context and creates a flow run if needed.
488
459
  """
@@ -490,28 +461,30 @@ class FlowRunEngine(Generic[P, R]):
490
461
  self._client = client_ctx.sync_client
491
462
  self._is_started = True
492
463
 
493
- # this conditional is engaged whenever a run is triggered via deployment
494
- if self.flow_run_id and not self.flow:
495
- self.flow_run = self.client.read_flow_run(self.flow_run_id)
496
- try:
497
- self.flow = self.load_flow(self.client)
498
- except Exception as exc:
499
- self.handle_exception(
500
- exc,
501
- msg="Failed to load flow from entrypoint.",
502
- )
503
- self.short_circuit = True
504
-
505
464
  if not self.flow_run:
506
465
  self.flow_run = self.create_flow_run(self.client)
466
+ flow_run_url = url_for(self.flow_run)
507
467
 
508
- ui_url = PREFECT_UI_URL.value()
509
- if ui_url:
468
+ if flow_run_url:
510
469
  self.logger.info(
511
- f"View at {ui_url}/flow-runs/flow-run/{self.flow_run.id}",
512
- extra={"send_to_api": False},
470
+ f"View at {flow_run_url}", extra={"send_to_api": False}
471
+ )
472
+ else:
473
+ # Update the empirical policy to match the flow if it is not set
474
+ if self.flow_run.empirical_policy.retry_delay is None:
475
+ self.flow_run.empirical_policy.retry_delay = (
476
+ self.flow.retry_delay_seconds
513
477
  )
514
478
 
479
+ if self.flow_run.empirical_policy.retries is None:
480
+ self.flow_run.empirical_policy.retries = self.flow.retries
481
+
482
+ self.client.update_flow_run(
483
+ flow_run_id=self.flow_run.id,
484
+ flow_version=self.flow.version,
485
+ empirical_policy=self.flow_run.empirical_policy,
486
+ )
487
+
515
488
  # validate prior to context so that context receives validated params
516
489
  if self.flow.should_validate_parameters:
517
490
  try:
@@ -536,6 +509,9 @@ class FlowRunEngine(Generic[P, R]):
536
509
  raise
537
510
  except (Abort, Pause):
538
511
  raise
512
+ except GeneratorExit:
513
+ # Do not capture generator exits as crashes
514
+ raise
539
515
  except BaseException as exc:
540
516
  # BaseExceptions are caught and handled as crashes
541
517
  self.handle_crash(exc)
@@ -550,12 +526,6 @@ class FlowRunEngine(Generic[P, R]):
550
526
  msg=f"Finished in state {display_state}",
551
527
  )
552
528
 
553
- # flush any logs in the background if this is a "top" level run
554
- if not (FlowRunContext.get() or TaskRunContext.get()):
555
- from_sync.call_soon_in_loop_thread(
556
- create_call(APILogHandler.aflush)
557
- )
558
-
559
529
  self._is_started = False
560
530
  self._client = None
561
531
 
@@ -569,61 +539,81 @@ class FlowRunEngine(Generic[P, R]):
569
539
  return False # TODO: handle this differently?
570
540
  return getattr(self, "flow_run").state.is_pending()
571
541
 
542
+ # --------------------------
543
+ #
544
+ # The following methods compose the main task run loop
545
+ #
546
+ # --------------------------
572
547
 
573
- async def run_flow_async(
574
- flow: Flow[P, Coroutine[Any, Any, R]],
548
+ @contextmanager
549
+ def start(self) -> Generator[None, None, None]:
550
+ with self.initialize_run():
551
+ self.begin_run()
552
+
553
+ if self.state.is_running():
554
+ self.call_hooks()
555
+ try:
556
+ yield
557
+ finally:
558
+ if self.state.is_final() or self.state.is_cancelling():
559
+ self.call_hooks()
560
+
561
+ @contextmanager
562
+ def run_context(self):
563
+ timeout_context = timeout_async if self.flow.isasync else timeout
564
+ # reenter the run context to ensure it is up to date for every run
565
+ with self.setup_run_context():
566
+ try:
567
+ with timeout_context(seconds=self.flow.timeout_seconds):
568
+ self.logger.debug(
569
+ f"Executing flow {self.flow.name!r} for flow run {self.flow_run.name!r}..."
570
+ )
571
+ yield self
572
+ except TimeoutError as exc:
573
+ self.handle_timeout(exc)
574
+ except Exception as exc:
575
+ self.logger.exception(f"Encountered exception during execution: {exc}")
576
+ self.handle_exception(exc)
577
+
578
+ def call_flow_fn(self) -> Union[R, Coroutine[Any, Any, R]]:
579
+ """
580
+ Convenience method to call the flow function. Returns a coroutine if the
581
+ flow is async.
582
+ """
583
+ if self.flow.isasync:
584
+
585
+ async def _call_flow_fn():
586
+ result = await call_with_parameters(self.flow.fn, self.parameters)
587
+ self.handle_success(result)
588
+
589
+ return _call_flow_fn()
590
+ else:
591
+ result = call_with_parameters(self.flow.fn, self.parameters)
592
+ self.handle_success(result)
593
+
594
+
595
+ def run_flow_sync(
596
+ flow: Flow[P, R],
575
597
  flow_run: Optional[FlowRun] = None,
576
598
  parameters: Optional[Dict[str, Any]] = None,
577
599
  wait_for: Optional[Iterable[PrefectFuture]] = None,
578
600
  return_type: Literal["state", "result"] = "result",
579
- ) -> Union[R, None]:
580
- """
581
- Runs a flow against the API.
601
+ ) -> Union[R, State, None]:
602
+ parameters = flow_run.parameters if flow_run else parameters
582
603
 
583
- We will most likely want to use this logic as a wrapper and return a coroutine for type inference.
584
- """
585
604
  engine = FlowRunEngine[P, R](
586
- flow=flow,
587
- parameters=flow_run.parameters if flow_run else parameters,
588
- flow_run=flow_run,
589
- wait_for=wait_for,
605
+ flow=flow, parameters=parameters, flow_run=flow_run, wait_for=wait_for
590
606
  )
591
607
 
592
- # This is a context manager that keeps track of the state of the flow run.
593
- with engine.start() as run:
594
- run.begin_run()
595
-
596
- while run.is_running():
597
- with run.enter_run_context():
598
- try:
599
- # This is where the flow is actually run.
600
- with timeout_async(seconds=run.flow.timeout_seconds):
601
- call_args, call_kwargs = parameters_to_args_kwargs(
602
- flow.fn, run.parameters or {}
603
- )
604
- run.logger.debug(
605
- f"Executing flow {flow.name!r} for flow run {run.flow_run.name!r}..."
606
- )
607
- result = cast(R, await flow.fn(*call_args, **call_kwargs)) # type: ignore
608
- # If the flow run is successful, finalize it.
609
- run.handle_success(result)
610
-
611
- except TimeoutError as exc:
612
- run.handle_timeout(exc)
613
- except Exception as exc:
614
- # If the flow fails, and we have retries left, set the flow to retrying.
615
- run.logger.exception("Encountered exception during execution:")
616
- run.handle_exception(exc)
608
+ with engine.start():
609
+ while engine.is_running():
610
+ with engine.run_context():
611
+ engine.call_flow_fn()
617
612
 
618
- if run.state.is_final() or run.state.is_cancelling():
619
- for hook in run.get_hooks(run.state, as_async=True):
620
- await hook()
621
- if return_type == "state":
622
- return run.state
623
- return run.result()
613
+ return engine.state if return_type == "state" else engine.result()
624
614
 
625
615
 
626
- def run_flow_sync(
616
+ async def run_flow_async(
627
617
  flow: Flow[P, R],
628
618
  flow_run: Optional[FlowRun] = None,
629
619
  parameters: Optional[Dict[str, Any]] = None,
@@ -636,38 +626,86 @@ def run_flow_sync(
636
626
  flow=flow, parameters=parameters, flow_run=flow_run, wait_for=wait_for
637
627
  )
638
628
 
639
- # This is a context manager that keeps track of the state of the flow run.
640
- with engine.start() as run:
641
- run.begin_run()
629
+ with engine.start():
630
+ while engine.is_running():
631
+ with engine.run_context():
632
+ await engine.call_flow_fn()
633
+
634
+ return engine.state if return_type == "state" else engine.result()
635
+
636
+
637
+ def run_generator_flow_sync(
638
+ flow: Flow[P, R],
639
+ flow_run: Optional[FlowRun] = None,
640
+ parameters: Optional[Dict[str, Any]] = None,
641
+ wait_for: Optional[Iterable[PrefectFuture]] = None,
642
+ return_type: Literal["state", "result"] = "result",
643
+ ) -> Generator[R, None, None]:
644
+ if return_type != "result":
645
+ raise ValueError("The return_type for a generator flow must be 'result'")
646
+
647
+ engine = FlowRunEngine[P, R](
648
+ flow=flow, parameters=parameters, flow_run=flow_run, wait_for=wait_for
649
+ )
642
650
 
643
- while run.is_running():
644
- with run.enter_run_context():
651
+ with engine.start():
652
+ while engine.is_running():
653
+ with engine.run_context():
654
+ call_args, call_kwargs = parameters_to_args_kwargs(
655
+ flow.fn, engine.parameters or {}
656
+ )
657
+ gen = flow.fn(*call_args, **call_kwargs)
645
658
  try:
646
- # This is where the flow is actually run.
647
- with timeout(seconds=run.flow.timeout_seconds):
648
- call_args, call_kwargs = parameters_to_args_kwargs(
649
- flow.fn, run.parameters or {}
650
- )
651
- run.logger.debug(
652
- f"Executing flow {flow.name!r} for flow run {run.flow_run.name!r}..."
653
- )
654
- result = cast(R, flow.fn(*call_args, **call_kwargs)) # type: ignore
655
- # If the flow run is successful, finalize it.
656
- run.handle_success(result)
657
-
658
- except TimeoutError as exc:
659
- run.handle_timeout(exc)
660
- except Exception as exc:
661
- # If the flow fails, and we have retries left, set the flow to retrying.
662
- run.logger.exception("Encountered exception during execution:")
663
- run.handle_exception(exc)
659
+ while True:
660
+ gen_result = next(gen)
661
+ # link the current state to the result for dependency tracking
662
+ link_state_to_result(engine.state, gen_result)
663
+ yield gen_result
664
+ except StopIteration as exc:
665
+ engine.handle_success(exc.value)
666
+ except GeneratorExit as exc:
667
+ engine.handle_success(None)
668
+ gen.throw(exc)
664
669
 
665
- if run.state.is_final() or run.state.is_cancelling():
666
- for hook in run.get_hooks(run.state):
667
- hook()
668
- if return_type == "state":
669
- return run.state
670
- return run.result()
670
+ return engine.result()
671
+
672
+
673
+ async def run_generator_flow_async(
674
+ flow: Flow[P, R],
675
+ flow_run: Optional[FlowRun] = None,
676
+ parameters: Optional[Dict[str, Any]] = None,
677
+ wait_for: Optional[Iterable[PrefectFuture]] = None,
678
+ return_type: Literal["state", "result"] = "result",
679
+ ) -> AsyncGenerator[R, None]:
680
+ if return_type != "result":
681
+ raise ValueError("The return_type for a generator flow must be 'result'")
682
+
683
+ engine = FlowRunEngine[P, R](
684
+ flow=flow, parameters=parameters, flow_run=flow_run, wait_for=wait_for
685
+ )
686
+
687
+ with engine.start():
688
+ while engine.is_running():
689
+ with engine.run_context():
690
+ call_args, call_kwargs = parameters_to_args_kwargs(
691
+ flow.fn, engine.parameters or {}
692
+ )
693
+ gen = flow.fn(*call_args, **call_kwargs)
694
+ try:
695
+ while True:
696
+ # can't use anext in Python < 3.10
697
+ gen_result = await gen.__anext__()
698
+ # link the current state to the result for dependency tracking
699
+ link_state_to_result(engine.state, gen_result)
700
+ yield gen_result
701
+ except (StopAsyncIteration, GeneratorExit) as exc:
702
+ engine.handle_success(None)
703
+ if isinstance(exc, GeneratorExit):
704
+ gen.throw(exc)
705
+
706
+ # async generators can't return, but we can raise failures here
707
+ if engine.state.is_failed():
708
+ engine.result()
671
709
 
672
710
 
673
711
  def run_flow(
@@ -684,7 +722,11 @@ def run_flow(
684
722
  wait_for=wait_for,
685
723
  return_type=return_type,
686
724
  )
687
- if flow.isasync:
725
+ if flow.isasync and flow.isgenerator:
726
+ return run_generator_flow_async(**kwargs)
727
+ elif flow.isgenerator:
728
+ return run_generator_flow_sync(**kwargs)
729
+ elif flow.isasync:
688
730
  return run_flow_async(**kwargs)
689
731
  else:
690
732
  return run_flow_sync(**kwargs)