prefect-client 3.1.12__py3-none-any.whl → 3.1.14__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- prefect/_experimental/lineage.py +63 -0
- prefect/_experimental/sla/client.py +53 -27
- prefect/_experimental/sla/objects.py +10 -2
- prefect/_internal/concurrency/services.py +2 -2
- prefect/_internal/concurrency/threads.py +6 -0
- prefect/_internal/retries.py +6 -3
- prefect/_internal/schemas/validators.py +6 -4
- prefect/_version.py +3 -3
- prefect/artifacts.py +4 -1
- prefect/automations.py +1 -1
- prefect/blocks/abstract.py +5 -2
- prefect/blocks/notifications.py +1 -0
- prefect/cache_policies.py +70 -22
- prefect/client/orchestration/_automations/client.py +4 -0
- prefect/client/orchestration/_deployments/client.py +3 -3
- prefect/client/utilities.py +3 -3
- prefect/context.py +16 -6
- prefect/deployments/base.py +7 -4
- prefect/deployments/flow_runs.py +5 -1
- prefect/deployments/runner.py +6 -11
- prefect/deployments/steps/core.py +1 -1
- prefect/deployments/steps/pull.py +8 -3
- prefect/deployments/steps/utility.py +2 -2
- prefect/docker/docker_image.py +13 -9
- prefect/engine.py +19 -10
- prefect/events/cli/automations.py +4 -4
- prefect/events/clients.py +17 -14
- prefect/events/filters.py +34 -34
- prefect/events/schemas/automations.py +12 -8
- prefect/events/schemas/events.py +5 -1
- prefect/events/worker.py +1 -1
- prefect/filesystems.py +1 -1
- prefect/flow_engine.py +172 -123
- prefect/flows.py +119 -74
- prefect/futures.py +14 -7
- prefect/infrastructure/provisioners/__init__.py +2 -0
- prefect/infrastructure/provisioners/cloud_run.py +4 -4
- prefect/infrastructure/provisioners/coiled.py +249 -0
- prefect/infrastructure/provisioners/container_instance.py +4 -3
- prefect/infrastructure/provisioners/ecs.py +55 -43
- prefect/infrastructure/provisioners/modal.py +5 -4
- prefect/input/actions.py +5 -1
- prefect/input/run_input.py +157 -43
- prefect/logging/configuration.py +5 -8
- prefect/logging/filters.py +2 -2
- prefect/logging/formatters.py +15 -11
- prefect/logging/handlers.py +24 -14
- prefect/logging/highlighters.py +5 -5
- prefect/logging/loggers.py +29 -20
- prefect/main.py +3 -1
- prefect/results.py +166 -86
- prefect/runner/runner.py +112 -84
- prefect/runner/server.py +3 -1
- prefect/runner/storage.py +18 -18
- prefect/runner/submit.py +19 -12
- prefect/runtime/deployment.py +15 -8
- prefect/runtime/flow_run.py +19 -6
- prefect/runtime/task_run.py +7 -3
- prefect/settings/base.py +17 -7
- prefect/settings/legacy.py +4 -4
- prefect/settings/models/api.py +4 -3
- prefect/settings/models/cli.py +4 -3
- prefect/settings/models/client.py +7 -4
- prefect/settings/models/cloud.py +4 -3
- prefect/settings/models/deployments.py +4 -3
- prefect/settings/models/experiments.py +4 -3
- prefect/settings/models/flows.py +4 -3
- prefect/settings/models/internal.py +4 -3
- prefect/settings/models/logging.py +8 -6
- prefect/settings/models/results.py +4 -3
- prefect/settings/models/root.py +11 -16
- prefect/settings/models/runner.py +8 -5
- prefect/settings/models/server/api.py +6 -3
- prefect/settings/models/server/database.py +120 -25
- prefect/settings/models/server/deployments.py +4 -3
- prefect/settings/models/server/ephemeral.py +7 -4
- prefect/settings/models/server/events.py +6 -3
- prefect/settings/models/server/flow_run_graph.py +4 -3
- prefect/settings/models/server/root.py +4 -3
- prefect/settings/models/server/services.py +15 -12
- prefect/settings/models/server/tasks.py +7 -4
- prefect/settings/models/server/ui.py +4 -3
- prefect/settings/models/tasks.py +10 -5
- prefect/settings/models/testing.py +4 -3
- prefect/settings/models/worker.py +7 -4
- prefect/settings/profiles.py +13 -12
- prefect/settings/sources.py +20 -19
- prefect/states.py +17 -13
- prefect/task_engine.py +43 -33
- prefect/task_runners.py +35 -23
- prefect/task_runs.py +20 -11
- prefect/task_worker.py +12 -7
- prefect/tasks.py +67 -25
- prefect/telemetry/bootstrap.py +4 -1
- prefect/telemetry/run_telemetry.py +15 -13
- prefect/transactions.py +3 -3
- prefect/types/__init__.py +9 -6
- prefect/types/_datetime.py +19 -0
- prefect/utilities/_deprecated.py +38 -0
- prefect/utilities/engine.py +11 -4
- prefect/utilities/filesystem.py +2 -2
- prefect/utilities/generics.py +1 -1
- prefect/utilities/pydantic.py +21 -36
- prefect/workers/base.py +52 -30
- prefect/workers/process.py +20 -15
- prefect/workers/server.py +4 -5
- {prefect_client-3.1.12.dist-info → prefect_client-3.1.14.dist-info}/METADATA +2 -2
- {prefect_client-3.1.12.dist-info → prefect_client-3.1.14.dist-info}/RECORD +111 -108
- {prefect_client-3.1.12.dist-info → prefect_client-3.1.14.dist-info}/LICENSE +0 -0
- {prefect_client-3.1.12.dist-info → prefect_client-3.1.14.dist-info}/WHEEL +0 -0
- {prefect_client-3.1.12.dist-info → prefect_client-3.1.14.dist-info}/top_level.txt +0 -0
prefect/flow_engine.py
CHANGED
@@ -39,6 +39,7 @@ from prefect.context import (
|
|
39
39
|
FlowRunContext,
|
40
40
|
SyncClientContext,
|
41
41
|
TagsContext,
|
42
|
+
hydrated_context,
|
42
43
|
)
|
43
44
|
from prefect.exceptions import (
|
44
45
|
Abort,
|
@@ -137,6 +138,7 @@ class BaseFlowRunEngine(Generic[P, R]):
|
|
137
138
|
flow_run_id: Optional[UUID] = None
|
138
139
|
logger: logging.Logger = field(default_factory=lambda: get_logger("engine"))
|
139
140
|
wait_for: Optional[Iterable[PrefectFuture[Any]]] = None
|
141
|
+
context: Optional[dict[str, Any]] = None
|
140
142
|
# holds the return value from the user code
|
141
143
|
_return_value: Union[R, Type[NotSet]] = NotSet
|
142
144
|
# holds the exception raised by the user code, if any
|
@@ -146,7 +148,7 @@ class BaseFlowRunEngine(Generic[P, R]):
|
|
146
148
|
_flow_run_name_set: bool = False
|
147
149
|
_telemetry: RunTelemetry = field(default_factory=RunTelemetry)
|
148
150
|
|
149
|
-
def __post_init__(self):
|
151
|
+
def __post_init__(self) -> None:
|
150
152
|
if self.flow is None and self.flow_run_id is None:
|
151
153
|
raise ValueError("Either a flow or a flow_run_id must be provided.")
|
152
154
|
|
@@ -167,7 +169,7 @@ class BaseFlowRunEngine(Generic[P, R]):
|
|
167
169
|
return False # TODO: handle this differently?
|
168
170
|
return getattr(self, "flow_run").state.is_pending()
|
169
171
|
|
170
|
-
def cancel_all_tasks(self):
|
172
|
+
def cancel_all_tasks(self) -> None:
|
171
173
|
if hasattr(self.flow.task_runner, "cancel_all"):
|
172
174
|
self.flow.task_runner.cancel_all() # type: ignore
|
173
175
|
|
@@ -208,6 +210,8 @@ class BaseFlowRunEngine(Generic[P, R]):
|
|
208
210
|
@dataclass
|
209
211
|
class FlowRunEngine(BaseFlowRunEngine[P, R]):
|
210
212
|
_client: Optional[SyncPrefectClient] = None
|
213
|
+
flow_run: FlowRun | None = None
|
214
|
+
parameters: dict[str, Any] | None = None
|
211
215
|
|
212
216
|
@property
|
213
217
|
def client(self) -> SyncPrefectClient:
|
@@ -502,7 +506,7 @@ class FlowRunEngine(BaseFlowRunEngine[P, R]):
|
|
502
506
|
tags=TagsContext.get().current_tags,
|
503
507
|
)
|
504
508
|
|
505
|
-
def call_hooks(self, state: Optional[State] = None):
|
509
|
+
def call_hooks(self, state: Optional[State] = None) -> None:
|
506
510
|
if state is None:
|
507
511
|
state = self.state
|
508
512
|
flow = self.flow
|
@@ -600,7 +604,9 @@ class FlowRunEngine(BaseFlowRunEngine[P, R]):
|
|
600
604
|
|
601
605
|
# set the logger to the flow run logger
|
602
606
|
|
603
|
-
self.logger = flow_run_logger(
|
607
|
+
self.logger: "logging.Logger" = flow_run_logger(
|
608
|
+
flow_run=self.flow_run, flow=self.flow
|
609
|
+
) # type: ignore
|
604
610
|
|
605
611
|
# update the flow run name if necessary
|
606
612
|
if not self._flow_run_name_set and self.flow.flow_run_name:
|
@@ -643,65 +649,68 @@ class FlowRunEngine(BaseFlowRunEngine[P, R]):
|
|
643
649
|
"""
|
644
650
|
Enters a client context and creates a flow run if needed.
|
645
651
|
"""
|
646
|
-
with
|
647
|
-
|
648
|
-
|
652
|
+
with hydrated_context(self.context):
|
653
|
+
with SyncClientContext.get_or_create() as client_ctx:
|
654
|
+
self._client = client_ctx.client
|
655
|
+
self._is_started = True
|
649
656
|
|
650
|
-
|
651
|
-
|
652
|
-
|
653
|
-
|
654
|
-
|
655
|
-
|
656
|
-
|
657
|
-
|
657
|
+
if not self.flow_run:
|
658
|
+
self.flow_run = self.create_flow_run(self.client)
|
659
|
+
else:
|
660
|
+
# Update the empirical policy to match the flow if it is not set
|
661
|
+
if self.flow_run.empirical_policy.retry_delay is None:
|
662
|
+
self.flow_run.empirical_policy.retry_delay = (
|
663
|
+
self.flow.retry_delay_seconds
|
664
|
+
)
|
658
665
|
|
659
|
-
|
660
|
-
|
666
|
+
if self.flow_run.empirical_policy.retries is None:
|
667
|
+
self.flow_run.empirical_policy.retries = self.flow.retries
|
661
668
|
|
662
|
-
|
663
|
-
|
664
|
-
|
665
|
-
|
666
|
-
|
669
|
+
self.client.update_flow_run(
|
670
|
+
flow_run_id=self.flow_run.id,
|
671
|
+
flow_version=self.flow.version,
|
672
|
+
empirical_policy=self.flow_run.empirical_policy,
|
673
|
+
)
|
667
674
|
|
668
|
-
|
669
|
-
|
670
|
-
|
671
|
-
|
672
|
-
|
675
|
+
self._telemetry.start_span(
|
676
|
+
run=self.flow_run,
|
677
|
+
client=self.client,
|
678
|
+
parameters=self.parameters,
|
679
|
+
)
|
673
680
|
|
674
|
-
|
675
|
-
|
681
|
+
try:
|
682
|
+
yield self
|
676
683
|
|
677
|
-
|
678
|
-
|
679
|
-
|
680
|
-
|
681
|
-
|
682
|
-
|
683
|
-
|
684
|
-
|
685
|
-
|
686
|
-
|
687
|
-
|
688
|
-
|
689
|
-
|
690
|
-
|
691
|
-
|
692
|
-
|
693
|
-
|
694
|
-
|
695
|
-
|
696
|
-
|
697
|
-
|
698
|
-
|
699
|
-
|
700
|
-
|
701
|
-
|
684
|
+
except TerminationSignal as exc:
|
685
|
+
self.cancel_all_tasks()
|
686
|
+
self.handle_crash(exc)
|
687
|
+
raise
|
688
|
+
except Exception:
|
689
|
+
# regular exceptions are caught and re-raised to the user
|
690
|
+
raise
|
691
|
+
except (Abort, Pause):
|
692
|
+
raise
|
693
|
+
except GeneratorExit:
|
694
|
+
# Do not capture generator exits as crashes
|
695
|
+
raise
|
696
|
+
except BaseException as exc:
|
697
|
+
# BaseExceptions are caught and handled as crashes
|
698
|
+
self.handle_crash(exc)
|
699
|
+
raise
|
700
|
+
finally:
|
701
|
+
# If debugging, use the more complete `repr` than the usual `str` description
|
702
|
+
display_state = (
|
703
|
+
repr(self.state) if PREFECT_DEBUG_MODE else str(self.state)
|
704
|
+
)
|
705
|
+
self.logger.log(
|
706
|
+
level=logging.INFO
|
707
|
+
if self.state.is_completed()
|
708
|
+
else logging.ERROR,
|
709
|
+
msg=f"Finished in state {display_state}",
|
710
|
+
)
|
702
711
|
|
703
|
-
|
704
|
-
|
712
|
+
self._is_started = False
|
713
|
+
self._client = None
|
705
714
|
|
706
715
|
# --------------------------
|
707
716
|
#
|
@@ -768,6 +777,8 @@ class AsyncFlowRunEngine(BaseFlowRunEngine[P, R]):
|
|
768
777
|
"""
|
769
778
|
|
770
779
|
_client: Optional[PrefectClient] = None
|
780
|
+
parameters: dict[str, Any] | None = None
|
781
|
+
flow_run: FlowRun | None = None
|
771
782
|
|
772
783
|
@property
|
773
784
|
def client(self) -> PrefectClient:
|
@@ -1061,7 +1072,7 @@ class AsyncFlowRunEngine(BaseFlowRunEngine[P, R]):
|
|
1061
1072
|
tags=TagsContext.get().current_tags,
|
1062
1073
|
)
|
1063
1074
|
|
1064
|
-
async def call_hooks(self, state: Optional[State] = None):
|
1075
|
+
async def call_hooks(self, state: Optional[State] = None) -> None:
|
1065
1076
|
if state is None:
|
1066
1077
|
state = self.state
|
1067
1078
|
flow = self.flow
|
@@ -1158,7 +1169,9 @@ class AsyncFlowRunEngine(BaseFlowRunEngine[P, R]):
|
|
1158
1169
|
stack.enter_context(ConcurrencyContext())
|
1159
1170
|
|
1160
1171
|
# set the logger to the flow run logger
|
1161
|
-
self.logger = flow_run_logger(
|
1172
|
+
self.logger: "logging.Logger" = flow_run_logger(
|
1173
|
+
flow_run=self.flow_run, flow=self.flow
|
1174
|
+
)
|
1162
1175
|
|
1163
1176
|
# update the flow run name if necessary
|
1164
1177
|
|
@@ -1200,71 +1213,74 @@ class AsyncFlowRunEngine(BaseFlowRunEngine[P, R]):
|
|
1200
1213
|
"""
|
1201
1214
|
Enters a client context and creates a flow run if needed.
|
1202
1215
|
"""
|
1203
|
-
|
1204
|
-
|
1205
|
-
|
1216
|
+
with hydrated_context(self.context):
|
1217
|
+
async with AsyncClientContext.get_or_create() as client_ctx:
|
1218
|
+
self._client = client_ctx.client
|
1219
|
+
self._is_started = True
|
1220
|
+
|
1221
|
+
if not self.flow_run:
|
1222
|
+
self.flow_run = await self.create_flow_run(self.client)
|
1223
|
+
flow_run_url = url_for(self.flow_run)
|
1224
|
+
|
1225
|
+
if flow_run_url:
|
1226
|
+
self.logger.info(
|
1227
|
+
f"View at {flow_run_url}", extra={"send_to_api": False}
|
1228
|
+
)
|
1229
|
+
else:
|
1230
|
+
# Update the empirical policy to match the flow if it is not set
|
1231
|
+
if self.flow_run.empirical_policy.retry_delay is None:
|
1232
|
+
self.flow_run.empirical_policy.retry_delay = (
|
1233
|
+
self.flow.retry_delay_seconds
|
1234
|
+
)
|
1206
1235
|
|
1207
|
-
|
1208
|
-
|
1209
|
-
flow_run_url = url_for(self.flow_run)
|
1236
|
+
if self.flow_run.empirical_policy.retries is None:
|
1237
|
+
self.flow_run.empirical_policy.retries = self.flow.retries
|
1210
1238
|
|
1211
|
-
|
1212
|
-
|
1213
|
-
|
1214
|
-
|
1215
|
-
else:
|
1216
|
-
# Update the empirical policy to match the flow if it is not set
|
1217
|
-
if self.flow_run.empirical_policy.retry_delay is None:
|
1218
|
-
self.flow_run.empirical_policy.retry_delay = (
|
1219
|
-
self.flow.retry_delay_seconds
|
1239
|
+
await self.client.update_flow_run(
|
1240
|
+
flow_run_id=self.flow_run.id,
|
1241
|
+
flow_version=self.flow.version,
|
1242
|
+
empirical_policy=self.flow_run.empirical_policy,
|
1220
1243
|
)
|
1221
1244
|
|
1222
|
-
|
1223
|
-
self.flow_run
|
1224
|
-
|
1225
|
-
|
1226
|
-
flow_run_id=self.flow_run.id,
|
1227
|
-
flow_version=self.flow.version,
|
1228
|
-
empirical_policy=self.flow_run.empirical_policy,
|
1245
|
+
await self._telemetry.async_start_span(
|
1246
|
+
run=self.flow_run,
|
1247
|
+
client=self.client,
|
1248
|
+
parameters=self.parameters,
|
1229
1249
|
)
|
1230
1250
|
|
1231
|
-
|
1232
|
-
|
1233
|
-
client=self.client,
|
1234
|
-
parameters=self.parameters,
|
1235
|
-
)
|
1236
|
-
|
1237
|
-
try:
|
1238
|
-
yield self
|
1251
|
+
try:
|
1252
|
+
yield self
|
1239
1253
|
|
1240
|
-
|
1241
|
-
|
1242
|
-
|
1243
|
-
|
1244
|
-
|
1245
|
-
|
1246
|
-
|
1247
|
-
|
1248
|
-
|
1249
|
-
|
1250
|
-
|
1251
|
-
|
1252
|
-
|
1253
|
-
|
1254
|
-
|
1255
|
-
|
1256
|
-
|
1257
|
-
|
1258
|
-
|
1259
|
-
|
1260
|
-
|
1261
|
-
|
1262
|
-
|
1263
|
-
|
1264
|
-
|
1254
|
+
except TerminationSignal as exc:
|
1255
|
+
self.cancel_all_tasks()
|
1256
|
+
await self.handle_crash(exc)
|
1257
|
+
raise
|
1258
|
+
except Exception:
|
1259
|
+
# regular exceptions are caught and re-raised to the user
|
1260
|
+
raise
|
1261
|
+
except (Abort, Pause):
|
1262
|
+
raise
|
1263
|
+
except GeneratorExit:
|
1264
|
+
# Do not capture generator exits as crashes
|
1265
|
+
raise
|
1266
|
+
except BaseException as exc:
|
1267
|
+
# BaseExceptions are caught and handled as crashes
|
1268
|
+
await self.handle_crash(exc)
|
1269
|
+
raise
|
1270
|
+
finally:
|
1271
|
+
# If debugging, use the more complete `repr` than the usual `str` description
|
1272
|
+
display_state = (
|
1273
|
+
repr(self.state) if PREFECT_DEBUG_MODE else str(self.state)
|
1274
|
+
)
|
1275
|
+
self.logger.log(
|
1276
|
+
level=logging.INFO
|
1277
|
+
if self.state.is_completed()
|
1278
|
+
else logging.ERROR,
|
1279
|
+
msg=f"Finished in state {display_state}",
|
1280
|
+
)
|
1265
1281
|
|
1266
|
-
|
1267
|
-
|
1282
|
+
self._is_started = False
|
1283
|
+
self._client = None
|
1268
1284
|
|
1269
1285
|
# --------------------------
|
1270
1286
|
#
|
@@ -1320,14 +1336,16 @@ def run_flow_sync(
|
|
1320
1336
|
flow: Flow[P, R],
|
1321
1337
|
flow_run: Optional[FlowRun] = None,
|
1322
1338
|
parameters: Optional[Dict[str, Any]] = None,
|
1323
|
-
wait_for: Optional[Iterable[PrefectFuture]] = None,
|
1339
|
+
wait_for: Optional[Iterable[PrefectFuture[Any]]] = None,
|
1324
1340
|
return_type: Literal["state", "result"] = "result",
|
1341
|
+
context: Optional[dict[str, Any]] = None,
|
1325
1342
|
) -> Union[R, State, None]:
|
1326
1343
|
engine = FlowRunEngine[P, R](
|
1327
1344
|
flow=flow,
|
1328
1345
|
parameters=parameters,
|
1329
1346
|
flow_run=flow_run,
|
1330
1347
|
wait_for=wait_for,
|
1348
|
+
context=context,
|
1331
1349
|
)
|
1332
1350
|
|
1333
1351
|
with engine.start():
|
@@ -1342,11 +1360,16 @@ async def run_flow_async(
|
|
1342
1360
|
flow: Flow[P, R],
|
1343
1361
|
flow_run: Optional[FlowRun] = None,
|
1344
1362
|
parameters: Optional[Dict[str, Any]] = None,
|
1345
|
-
wait_for: Optional[Iterable[PrefectFuture]] = None,
|
1363
|
+
wait_for: Optional[Iterable[PrefectFuture[Any]]] = None,
|
1346
1364
|
return_type: Literal["state", "result"] = "result",
|
1365
|
+
context: Optional[dict[str, Any]] = None,
|
1347
1366
|
) -> Union[R, State, None]:
|
1348
1367
|
engine = AsyncFlowRunEngine[P, R](
|
1349
|
-
flow=flow,
|
1368
|
+
flow=flow,
|
1369
|
+
parameters=parameters,
|
1370
|
+
flow_run=flow_run,
|
1371
|
+
wait_for=wait_for,
|
1372
|
+
context=context,
|
1350
1373
|
)
|
1351
1374
|
|
1352
1375
|
async with engine.start():
|
@@ -1361,14 +1384,19 @@ def run_generator_flow_sync(
|
|
1361
1384
|
flow: Flow[P, R],
|
1362
1385
|
flow_run: Optional[FlowRun] = None,
|
1363
1386
|
parameters: Optional[Dict[str, Any]] = None,
|
1364
|
-
wait_for: Optional[Iterable[PrefectFuture]] = None,
|
1387
|
+
wait_for: Optional[Iterable[PrefectFuture[Any]]] = None,
|
1365
1388
|
return_type: Literal["state", "result"] = "result",
|
1389
|
+
context: Optional[dict[str, Any]] = None,
|
1366
1390
|
) -> Generator[R, None, None]:
|
1367
1391
|
if return_type != "result":
|
1368
1392
|
raise ValueError("The return_type for a generator flow must be 'result'")
|
1369
1393
|
|
1370
1394
|
engine = FlowRunEngine[P, R](
|
1371
|
-
flow=flow,
|
1395
|
+
flow=flow,
|
1396
|
+
parameters=parameters,
|
1397
|
+
flow_run=flow_run,
|
1398
|
+
wait_for=wait_for,
|
1399
|
+
context=context,
|
1372
1400
|
)
|
1373
1401
|
|
1374
1402
|
with engine.start():
|
@@ -1399,12 +1427,17 @@ async def run_generator_flow_async(
|
|
1399
1427
|
parameters: Optional[Dict[str, Any]] = None,
|
1400
1428
|
wait_for: Optional[Iterable[PrefectFuture[R]]] = None,
|
1401
1429
|
return_type: Literal["state", "result"] = "result",
|
1430
|
+
context: Optional[dict[str, Any]] = None,
|
1402
1431
|
) -> AsyncGenerator[R, None]:
|
1403
1432
|
if return_type != "result":
|
1404
1433
|
raise ValueError("The return_type for a generator flow must be 'result'")
|
1405
1434
|
|
1406
1435
|
engine = AsyncFlowRunEngine[P, R](
|
1407
|
-
flow=flow,
|
1436
|
+
flow=flow,
|
1437
|
+
parameters=parameters,
|
1438
|
+
flow_run=flow_run,
|
1439
|
+
wait_for=wait_for,
|
1440
|
+
context=context,
|
1408
1441
|
)
|
1409
1442
|
|
1410
1443
|
async with engine.start():
|
@@ -1438,8 +1471,23 @@ def run_flow(
|
|
1438
1471
|
wait_for: Optional[Iterable[PrefectFuture[R]]] = None,
|
1439
1472
|
return_type: Literal["state", "result"] = "result",
|
1440
1473
|
error_logger: Optional[logging.Logger] = None,
|
1441
|
-
|
1442
|
-
|
1474
|
+
context: Optional[dict[str, Any]] = None,
|
1475
|
+
) -> (
|
1476
|
+
R
|
1477
|
+
| State
|
1478
|
+
| None
|
1479
|
+
| Coroutine[Any, Any, R | State | None]
|
1480
|
+
| Generator[R, None, None]
|
1481
|
+
| AsyncGenerator[R, None]
|
1482
|
+
):
|
1483
|
+
ret_val: Union[
|
1484
|
+
R,
|
1485
|
+
State,
|
1486
|
+
None,
|
1487
|
+
Coroutine[Any, Any, R | State | None],
|
1488
|
+
Generator[R, None, None],
|
1489
|
+
AsyncGenerator[R, None],
|
1490
|
+
] = None
|
1443
1491
|
|
1444
1492
|
try:
|
1445
1493
|
kwargs: dict[str, Any] = dict(
|
@@ -1450,6 +1498,7 @@ def run_flow(
|
|
1450
1498
|
),
|
1451
1499
|
wait_for=wait_for,
|
1452
1500
|
return_type=return_type,
|
1501
|
+
context=context,
|
1453
1502
|
)
|
1454
1503
|
|
1455
1504
|
if flow.isasync and flow.isgenerator:
|