prefect-client 2.18.1__py3-none-any.whl → 2.18.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- prefect/automations.py +162 -0
- prefect/client/orchestration.py +29 -11
- prefect/client/schemas/objects.py +11 -8
- prefect/engine.py +17 -1
- prefect/events/cli/automations.py +157 -34
- prefect/events/clients.py +3 -2
- prefect/events/filters.py +1 -1
- prefect/events/schemas/automations.py +2 -2
- prefect/events/schemas/deployment_triggers.py +1 -1
- prefect/events/schemas/events.py +11 -4
- prefect/events/schemas/labelling.py +1 -1
- prefect/flows.py +14 -11
- prefect/input/run_input.py +3 -1
- prefect/new_flow_engine.py +244 -57
- prefect/new_task_engine.py +159 -45
- prefect/server/api/collections_data/views/aggregate-worker-metadata.json +1 -1
- prefect/settings.py +21 -0
- prefect/tasks.py +134 -24
- prefect/utilities/asyncutils.py +16 -12
- prefect/workers/process.py +2 -1
- {prefect_client-2.18.1.dist-info → prefect_client-2.18.3.dist-info}/METADATA +1 -1
- {prefect_client-2.18.1.dist-info → prefect_client-2.18.3.dist-info}/RECORD +25 -24
- {prefect_client-2.18.1.dist-info → prefect_client-2.18.3.dist-info}/LICENSE +0 -0
- {prefect_client-2.18.1.dist-info → prefect_client-2.18.3.dist-info}/WHEEL +0 -0
- {prefect_client-2.18.1.dist-info → prefect_client-2.18.3.dist-info}/top_level.txt +0 -0
prefect/flows.py
CHANGED
@@ -733,7 +733,7 @@ class Flow(Generic[P, R]):
|
|
733
733
|
@sync_compatible
|
734
734
|
async def serve(
|
735
735
|
self,
|
736
|
-
name: str,
|
736
|
+
name: Optional[str] = None,
|
737
737
|
interval: Optional[
|
738
738
|
Union[
|
739
739
|
Iterable[Union[int, float, datetime.timedelta]],
|
@@ -764,7 +764,7 @@ class Flow(Generic[P, R]):
|
|
764
764
|
Creates a deployment for this flow and starts a runner to monitor for scheduled work.
|
765
765
|
|
766
766
|
Args:
|
767
|
-
name: The name to give the created deployment.
|
767
|
+
name: The name to give the created deployment. Defaults to the name of the flow.
|
768
768
|
interval: An interval on which to execute the deployment. Accepts a number or a
|
769
769
|
timedelta object to create a single schedule. If a number is given, it will be
|
770
770
|
interpreted as seconds. Also accepts an iterable of numbers or timedelta to create
|
@@ -827,10 +827,13 @@ class Flow(Generic[P, R]):
|
|
827
827
|
"""
|
828
828
|
from prefect.runner import Runner
|
829
829
|
|
830
|
-
|
831
|
-
|
832
|
-
|
833
|
-
|
830
|
+
if not name:
|
831
|
+
name = self.name
|
832
|
+
else:
|
833
|
+
# Handling for my_flow.serve(__file__)
|
834
|
+
# Will set name to name of file where my_flow.serve() without the extension
|
835
|
+
# Non filepath strings will pass through unchanged
|
836
|
+
name = Path(name).stem
|
834
837
|
|
835
838
|
runner = Runner(name=name, pause_on_shutdown=pause_on_shutdown, limit=limit)
|
836
839
|
deployment_id = await runner.add_flow(
|
@@ -1226,19 +1229,19 @@ class Flow(Generic[P, R]):
|
|
1226
1229
|
return track_viz_task(self.isasync, self.name, parameters)
|
1227
1230
|
|
1228
1231
|
if PREFECT_EXPERIMENTAL_ENABLE_NEW_ENGINE.value():
|
1229
|
-
from prefect.new_flow_engine import run_flow
|
1230
|
-
from prefect.utilities.asyncutils import run_sync
|
1232
|
+
from prefect.new_flow_engine import run_flow, run_flow_sync
|
1231
1233
|
|
1232
|
-
|
1234
|
+
run_kwargs = dict(
|
1233
1235
|
flow=self,
|
1234
1236
|
parameters=parameters,
|
1235
1237
|
wait_for=wait_for,
|
1236
1238
|
return_type=return_type,
|
1237
1239
|
)
|
1238
1240
|
if self.isasync:
|
1239
|
-
|
1241
|
+
# this returns an awaitable coroutine
|
1242
|
+
return run_flow(**run_kwargs)
|
1240
1243
|
else:
|
1241
|
-
return
|
1244
|
+
return run_flow_sync(**run_kwargs)
|
1242
1245
|
|
1243
1246
|
return enter_flow_run_engine_from_flow_call(
|
1244
1247
|
self,
|
prefect/input/run_input.py
CHANGED
@@ -582,7 +582,9 @@ def receive_input(
|
|
582
582
|
# the signature is the same as here:
|
583
583
|
# Union[Type[R], Type[T], pydantic.BaseModel],
|
584
584
|
# Seems like a possible mypy bug, so we'll ignore the type check here.
|
585
|
-
input_cls
|
585
|
+
input_cls: Union[
|
586
|
+
Type[AutomaticRunInput[T]], Type[R]
|
587
|
+
] = run_input_subclass_from_type(input_type) # type: ignore[arg-type]
|
586
588
|
|
587
589
|
if issubclass(input_cls, AutomaticRunInput):
|
588
590
|
return input_cls.receive(
|
prefect/new_flow_engine.py
CHANGED
@@ -1,5 +1,7 @@
|
|
1
1
|
import asyncio
|
2
|
-
|
2
|
+
import inspect
|
3
|
+
import os
|
4
|
+
from contextlib import AsyncExitStack, asynccontextmanager, contextmanager
|
3
5
|
from dataclasses import dataclass
|
4
6
|
from typing import (
|
5
7
|
Any,
|
@@ -9,20 +11,26 @@ from typing import (
|
|
9
11
|
Iterable,
|
10
12
|
Literal,
|
11
13
|
Optional,
|
14
|
+
Tuple,
|
12
15
|
TypeVar,
|
13
16
|
Union,
|
14
17
|
cast,
|
15
18
|
)
|
19
|
+
from uuid import UUID
|
16
20
|
|
17
21
|
import anyio
|
22
|
+
import anyio._backends._asyncio
|
23
|
+
from sniffio import AsyncLibraryNotFoundError
|
18
24
|
from typing_extensions import ParamSpec
|
19
25
|
|
20
|
-
from prefect import
|
26
|
+
from prefect import Task, get_client
|
21
27
|
from prefect.client.orchestration import PrefectClient
|
22
28
|
from prefect.client.schemas import FlowRun, TaskRun
|
23
29
|
from prefect.client.schemas.filters import FlowRunFilter
|
24
30
|
from prefect.client.schemas.sorting import FlowRunSort
|
25
31
|
from prefect.context import FlowRunContext
|
32
|
+
from prefect.deployments import load_flow_from_flow_run
|
33
|
+
from prefect.flows import Flow, load_flow_from_entrypoint
|
26
34
|
from prefect.futures import PrefectFuture, resolve_futures_to_states
|
27
35
|
from prefect.logging.loggers import flow_run_logger
|
28
36
|
from prefect.results import ResultFactory
|
@@ -33,7 +41,8 @@ from prefect.states import (
|
|
33
41
|
exception_to_failed_state,
|
34
42
|
return_value_to_state,
|
35
43
|
)
|
36
|
-
from prefect.utilities.asyncutils import A, Async
|
44
|
+
from prefect.utilities.asyncutils import A, Async, run_sync
|
45
|
+
from prefect.utilities.callables import parameters_to_args_kwargs
|
37
46
|
from prefect.utilities.engine import (
|
38
47
|
_dynamic_key_for_task_run,
|
39
48
|
_resolve_custom_flow_run_name,
|
@@ -45,16 +54,34 @@ P = ParamSpec("P")
|
|
45
54
|
R = TypeVar("R")
|
46
55
|
|
47
56
|
|
57
|
+
async def load_flow_and_flow_run(flow_run_id: UUID) -> Tuple[FlowRun, Flow]:
|
58
|
+
## TODO: add error handling to update state and log tracebacks
|
59
|
+
entrypoint = os.environ.get("PREFECT__FLOW_ENTRYPOINT")
|
60
|
+
|
61
|
+
async with get_client() as client:
|
62
|
+
flow_run = await client.read_flow_run(flow_run_id)
|
63
|
+
flow = (
|
64
|
+
load_flow_from_entrypoint(entrypoint)
|
65
|
+
if entrypoint
|
66
|
+
else await load_flow_from_flow_run(flow_run, client=client)
|
67
|
+
)
|
68
|
+
return flow_run, flow
|
69
|
+
|
70
|
+
|
48
71
|
@dataclass
|
49
72
|
class FlowRunEngine(Generic[P, R]):
|
50
|
-
flow: Flow[P, Coroutine[Any, Any, R]]
|
73
|
+
flow: Optional[Union[Flow[P, R], Flow[P, Coroutine[Any, Any, R]]]] = None
|
51
74
|
parameters: Optional[Dict[str, Any]] = None
|
52
75
|
flow_run: Optional[FlowRun] = None
|
76
|
+
flow_run_id: Optional[UUID] = None
|
53
77
|
_is_started: bool = False
|
54
78
|
_client: Optional[PrefectClient] = None
|
55
79
|
short_circuit: bool = False
|
56
80
|
|
57
81
|
def __post_init__(self):
|
82
|
+
if self.flow is None and self.flow_run_id is None:
|
83
|
+
raise ValueError("Either a flow or a flow_run_id must be provided.")
|
84
|
+
|
58
85
|
if self.parameters is None:
|
59
86
|
self.parameters = {}
|
60
87
|
|
@@ -89,10 +116,17 @@ class FlowRunEngine(Generic[P, R]):
|
|
89
116
|
return state
|
90
117
|
|
91
118
|
async def result(self, raise_on_failure: bool = True) -> "Union[R, State, None]":
|
92
|
-
|
119
|
+
_result = self.state.result(raise_on_failure=raise_on_failure, fetch=True) # type: ignore
|
120
|
+
# state.result is a `sync_compatible` function that may or may not return an awaitable
|
121
|
+
# depending on whether the parent frame is sync or not
|
122
|
+
if inspect.isawaitable(_result):
|
123
|
+
_result = await _result
|
124
|
+
return _result
|
93
125
|
|
94
126
|
async def handle_success(self, result: R) -> R:
|
95
127
|
result_factory = getattr(FlowRunContext.get(), "result_factory", None)
|
128
|
+
if result_factory is None:
|
129
|
+
raise ValueError("Result factory is not set")
|
96
130
|
terminal_state = await return_value_to_state(
|
97
131
|
await resolve_futures_to_states(result),
|
98
132
|
result_factory=result_factory,
|
@@ -117,71 +151,102 @@ class FlowRunEngine(Generic[P, R]):
|
|
117
151
|
state = await self.set_state(Running())
|
118
152
|
return state
|
119
153
|
|
154
|
+
async def load_subflow_run(
|
155
|
+
self, parent_task_run: TaskRun, client: PrefectClient, context: FlowRunContext
|
156
|
+
) -> Union[FlowRun, None]:
|
157
|
+
"""
|
158
|
+
This method attempts to load an existing flow run for a subflow task
|
159
|
+
run, if appropriate.
|
160
|
+
|
161
|
+
If the parent task run is in a final but not COMPLETED state, and not
|
162
|
+
being rerun, then we attempt to load an existing flow run instead of
|
163
|
+
creating a new one. This will prevent the engine from running the
|
164
|
+
subflow again.
|
165
|
+
|
166
|
+
If no existing flow run is found, or if the subflow should be rerun,
|
167
|
+
then no flow run is returned.
|
168
|
+
"""
|
169
|
+
|
170
|
+
# check if the parent flow run is rerunning
|
171
|
+
rerunning = (
|
172
|
+
context.flow_run.run_count > 1
|
173
|
+
if getattr(context, "flow_run", None)
|
174
|
+
and isinstance(context.flow_run, FlowRun)
|
175
|
+
else False
|
176
|
+
)
|
177
|
+
|
178
|
+
# if the parent task run is in a final but not completed state, and
|
179
|
+
# not rerunning, then retrieve the most recent flow run instead of
|
180
|
+
# creating a new one. This effectively loads a cached flow run for
|
181
|
+
# situations where we are confident the flow should not be run
|
182
|
+
# again.
|
183
|
+
assert isinstance(parent_task_run.state, State)
|
184
|
+
if parent_task_run.state.is_final() and not (
|
185
|
+
rerunning and not parent_task_run.state.is_completed()
|
186
|
+
):
|
187
|
+
# return the most recent flow run, if it exists
|
188
|
+
flow_runs = await client.read_flow_runs(
|
189
|
+
flow_run_filter=FlowRunFilter(
|
190
|
+
parent_task_run_id={"any_": [parent_task_run.id]}
|
191
|
+
),
|
192
|
+
sort=FlowRunSort.EXPECTED_START_TIME_ASC,
|
193
|
+
limit=1,
|
194
|
+
)
|
195
|
+
if flow_runs:
|
196
|
+
return flow_runs[-1]
|
197
|
+
|
120
198
|
async def create_subflow_task_run(
|
121
199
|
self, client: PrefectClient, context: FlowRunContext
|
122
200
|
) -> TaskRun:
|
201
|
+
"""
|
202
|
+
Adds a task to a parent flow run that represents the execution of a subflow run.
|
203
|
+
|
204
|
+
The task run is referred to as the "parent task run" of the subflow and will be kept
|
205
|
+
in sync with the subflow run's state by the orchestration engine.
|
206
|
+
"""
|
123
207
|
dummy_task = Task(
|
124
208
|
name=self.flow.name, fn=self.flow.fn, version=self.flow.version
|
125
209
|
)
|
126
210
|
task_inputs = {
|
127
|
-
k: await collect_task_run_inputs(v)
|
211
|
+
k: await collect_task_run_inputs(v)
|
212
|
+
for k, v in (self.parameters or {}).items()
|
128
213
|
}
|
129
214
|
parent_task_run = await client.create_task_run(
|
130
215
|
task=dummy_task,
|
131
216
|
flow_run_id=(
|
132
|
-
context.flow_run.id
|
217
|
+
context.flow_run.id
|
218
|
+
if getattr(context, "flow_run", None)
|
219
|
+
and isinstance(context.flow_run, FlowRun)
|
220
|
+
else None
|
133
221
|
),
|
134
|
-
dynamic_key=_dynamic_key_for_task_run(context, dummy_task),
|
135
|
-
task_inputs=task_inputs,
|
222
|
+
dynamic_key=_dynamic_key_for_task_run(context, dummy_task), # type: ignore
|
223
|
+
task_inputs=task_inputs, # type: ignore
|
136
224
|
state=Pending(),
|
137
225
|
)
|
138
226
|
return parent_task_run
|
139
227
|
|
140
|
-
async def get_most_recent_flow_run_for_parent_task_run(
|
141
|
-
self, client: PrefectClient, parent_task_run: TaskRun
|
142
|
-
) -> "Union[FlowRun, None]":
|
143
|
-
"""
|
144
|
-
Get the most recent flow run associated with the provided parent task run.
|
145
|
-
|
146
|
-
Args:
|
147
|
-
- An orchestration client
|
148
|
-
- The parent task run to get the most recent flow run for
|
149
|
-
|
150
|
-
Returns:
|
151
|
-
The most recent flow run associated with the parent task run or `None` if
|
152
|
-
no flow runs are found
|
153
|
-
"""
|
154
|
-
flow_runs = await client.read_flow_runs(
|
155
|
-
flow_run_filter=FlowRunFilter(
|
156
|
-
parent_task_run_id={"any_": [parent_task_run.id]}
|
157
|
-
),
|
158
|
-
sort=FlowRunSort.EXPECTED_START_TIME_ASC,
|
159
|
-
)
|
160
|
-
return flow_runs[-1] if flow_runs else None
|
161
|
-
|
162
228
|
async def create_flow_run(self, client: PrefectClient) -> FlowRun:
|
163
229
|
flow_run_ctx = FlowRunContext.get()
|
230
|
+
parameters = self.parameters or {}
|
164
231
|
|
165
232
|
parent_task_run = None
|
233
|
+
|
166
234
|
# this is a subflow run
|
167
235
|
if flow_run_ctx:
|
236
|
+
# get the parent task run
|
168
237
|
parent_task_run = await self.create_subflow_task_run(
|
169
238
|
client=client, context=flow_run_ctx
|
170
239
|
)
|
171
|
-
|
172
|
-
#
|
173
|
-
|
174
|
-
|
175
|
-
|
176
|
-
|
177
|
-
)
|
178
|
-
)
|
179
|
-
if most_recent_flow_run:
|
180
|
-
return most_recent_flow_run
|
240
|
+
|
241
|
+
# check if there is already a flow run for this subflow
|
242
|
+
if subflow_run := await self.load_subflow_run(
|
243
|
+
parent_task_run=parent_task_run, client=client, context=flow_run_ctx
|
244
|
+
):
|
245
|
+
return subflow_run
|
181
246
|
|
182
247
|
try:
|
183
248
|
flow_run_name = _resolve_custom_flow_run_name(
|
184
|
-
flow=self.flow, parameters=
|
249
|
+
flow=self.flow, parameters=parameters
|
185
250
|
)
|
186
251
|
except TypeError:
|
187
252
|
flow_run_name = None
|
@@ -189,7 +254,7 @@ class FlowRunEngine(Generic[P, R]):
|
|
189
254
|
flow_run = await client.create_flow_run(
|
190
255
|
flow=self.flow,
|
191
256
|
name=flow_run_name,
|
192
|
-
parameters=self.flow.serialize_parameters(
|
257
|
+
parameters=self.flow.serialize_parameters(parameters),
|
193
258
|
state=Pending(),
|
194
259
|
parent_task_run_id=getattr(parent_task_run, "id", None),
|
195
260
|
)
|
@@ -199,8 +264,46 @@ class FlowRunEngine(Generic[P, R]):
|
|
199
264
|
async def enter_run_context(self, client: Optional[PrefectClient] = None):
|
200
265
|
if client is None:
|
201
266
|
client = self.client
|
267
|
+
if not self.flow_run:
|
268
|
+
raise ValueError("Flow run not set")
|
202
269
|
|
203
270
|
self.flow_run = await client.read_flow_run(self.flow_run.id)
|
271
|
+
task_runner = self.flow.task_runner.duplicate()
|
272
|
+
|
273
|
+
async with AsyncExitStack() as stack:
|
274
|
+
task_runner = await stack.enter_async_context(
|
275
|
+
self.flow.task_runner.duplicate().start()
|
276
|
+
)
|
277
|
+
stack.enter_context(
|
278
|
+
FlowRunContext(
|
279
|
+
flow=self.flow,
|
280
|
+
log_prints=self.flow.log_prints or False,
|
281
|
+
flow_run=self.flow_run,
|
282
|
+
parameters=self.parameters,
|
283
|
+
client=client,
|
284
|
+
background_tasks=anyio.create_task_group(),
|
285
|
+
result_factory=await ResultFactory.from_flow(self.flow),
|
286
|
+
task_runner=task_runner,
|
287
|
+
)
|
288
|
+
)
|
289
|
+
self.logger = flow_run_logger(flow_run=self.flow_run, flow=self.flow)
|
290
|
+
yield
|
291
|
+
|
292
|
+
@contextmanager
|
293
|
+
def enter_run_context_sync(self, client: Optional[PrefectClient] = None):
|
294
|
+
if client is None:
|
295
|
+
client = self.client
|
296
|
+
if not self.flow_run:
|
297
|
+
raise ValueError("Flow run not set")
|
298
|
+
|
299
|
+
self.flow_run = run_sync(client.read_flow_run(self.flow_run.id))
|
300
|
+
|
301
|
+
# if running in a completely synchronous frame, anyio will not detect the
|
302
|
+
# backend to use for the task group
|
303
|
+
try:
|
304
|
+
task_group = anyio.create_task_group()
|
305
|
+
except AsyncLibraryNotFoundError:
|
306
|
+
task_group = anyio._backends._asyncio.TaskGroup()
|
204
307
|
|
205
308
|
with FlowRunContext(
|
206
309
|
flow=self.flow,
|
@@ -208,8 +311,8 @@ class FlowRunEngine(Generic[P, R]):
|
|
208
311
|
flow_run=self.flow_run,
|
209
312
|
parameters=self.parameters,
|
210
313
|
client=client,
|
211
|
-
background_tasks=
|
212
|
-
result_factory=
|
314
|
+
background_tasks=task_group,
|
315
|
+
result_factory=run_sync(ResultFactory.from_flow(self.flow)),
|
213
316
|
task_runner=self.flow.task_runner,
|
214
317
|
):
|
215
318
|
self.logger = flow_run_logger(flow_run=self.flow_run, flow=self.flow)
|
@@ -224,13 +327,27 @@ class FlowRunEngine(Generic[P, R]):
|
|
224
327
|
self._client = client
|
225
328
|
self._is_started = True
|
226
329
|
|
330
|
+
# this conditional is engaged whenever a run is triggered via deployment
|
331
|
+
if self.flow_run_id and not self.flow:
|
332
|
+
self.flow_run = await client.read_flow_run(self.flow_run_id)
|
333
|
+
try:
|
334
|
+
self.flow = await self.load_flow(client)
|
335
|
+
except Exception as exc:
|
336
|
+
await self.handle_exception(
|
337
|
+
exc,
|
338
|
+
msg="Failed to load flow from entrypoint.",
|
339
|
+
)
|
340
|
+
self.short_circuit = True
|
341
|
+
|
227
342
|
if not self.flow_run:
|
228
343
|
self.flow_run = await self.create_flow_run(client)
|
229
344
|
|
230
345
|
# validate prior to context so that context receives validated params
|
231
346
|
if self.flow.should_validate_parameters:
|
232
347
|
try:
|
233
|
-
self.parameters = self.flow.validate_parameters(
|
348
|
+
self.parameters = self.flow.validate_parameters(
|
349
|
+
self.parameters or {}
|
350
|
+
)
|
234
351
|
except Exception as exc:
|
235
352
|
await self.handle_exception(
|
236
353
|
exc,
|
@@ -238,11 +355,46 @@ class FlowRunEngine(Generic[P, R]):
|
|
238
355
|
result_factory=await ResultFactory.from_flow(self.flow),
|
239
356
|
)
|
240
357
|
self.short_circuit = True
|
358
|
+
try:
|
359
|
+
yield self
|
360
|
+
finally:
|
361
|
+
self._is_started = False
|
362
|
+
self._client = None
|
363
|
+
|
364
|
+
@contextmanager
|
365
|
+
def start_sync(self):
|
366
|
+
"""
|
367
|
+
Enters a client context and creates a flow run if needed.
|
368
|
+
"""
|
241
369
|
|
370
|
+
client = get_client()
|
371
|
+
run_sync(client.__aenter__())
|
372
|
+
self._client = client
|
373
|
+
self._is_started = True
|
374
|
+
|
375
|
+
if not self.flow_run:
|
376
|
+
self.flow_run = run_sync(self.create_flow_run(client))
|
377
|
+
|
378
|
+
# validate prior to context so that context receives validated params
|
379
|
+
if self.flow.should_validate_parameters:
|
380
|
+
try:
|
381
|
+
self.parameters = self.flow.validate_parameters(self.parameters or {})
|
382
|
+
except Exception as exc:
|
383
|
+
run_sync(
|
384
|
+
self.handle_exception(
|
385
|
+
exc,
|
386
|
+
msg="Validation of flow parameters failed with error",
|
387
|
+
result_factory=run_sync(ResultFactory.from_flow(self.flow)),
|
388
|
+
)
|
389
|
+
)
|
390
|
+
self.short_circuit = True
|
391
|
+
try:
|
242
392
|
yield self
|
243
|
-
|
244
|
-
|
245
|
-
|
393
|
+
finally:
|
394
|
+
# quickly close client
|
395
|
+
run_sync(client.__aexit__(None, None, None))
|
396
|
+
self._is_started = False
|
397
|
+
self._client = None
|
246
398
|
|
247
399
|
def is_running(self) -> bool:
|
248
400
|
if getattr(self, "flow_run", None) is None:
|
@@ -256,31 +408,33 @@ class FlowRunEngine(Generic[P, R]):
|
|
256
408
|
|
257
409
|
|
258
410
|
async def run_flow(
|
259
|
-
flow:
|
411
|
+
flow: Optional[Flow[P, Coroutine[Any, Any, R]]] = None,
|
260
412
|
flow_run: Optional[FlowRun] = None,
|
413
|
+
flow_run_id: Optional[UUID] = None,
|
261
414
|
parameters: Optional[Dict[str, Any]] = None,
|
262
415
|
wait_for: Optional[Iterable[PrefectFuture[A, Async]]] = None,
|
263
416
|
return_type: Literal["state", "result"] = "result",
|
264
|
-
) ->
|
417
|
+
) -> Union[R, None]:
|
265
418
|
"""
|
266
419
|
Runs a flow against the API.
|
267
420
|
|
268
421
|
We will most likely want to use this logic as a wrapper and return a coroutine for type inference.
|
269
422
|
"""
|
270
423
|
|
271
|
-
engine = FlowRunEngine[P, R](flow, parameters, flow_run)
|
424
|
+
engine = FlowRunEngine[P, R](flow, parameters, flow_run, flow_run_id)
|
425
|
+
|
426
|
+
# This is a context manager that keeps track of the state of the flow run.
|
272
427
|
async with engine.start() as run:
|
273
|
-
# This is a context manager that keeps track of the state of the flow run.
|
274
428
|
await run.begin_run()
|
275
429
|
|
276
430
|
while run.is_running():
|
277
431
|
async with run.enter_run_context():
|
278
432
|
try:
|
279
433
|
# This is where the flow is actually run.
|
280
|
-
|
281
|
-
|
282
|
-
|
283
|
-
|
434
|
+
call_args, call_kwargs = parameters_to_args_kwargs(
|
435
|
+
flow.fn, run.parameters or {}
|
436
|
+
)
|
437
|
+
result = cast(R, await flow.fn(*call_args, **call_kwargs)) # type: ignore
|
284
438
|
# If the flow run is successful, finalize it.
|
285
439
|
await run.handle_success(result)
|
286
440
|
|
@@ -291,3 +445,36 @@ async def run_flow(
|
|
291
445
|
if return_type == "state":
|
292
446
|
return run.state
|
293
447
|
return await run.result()
|
448
|
+
|
449
|
+
|
450
|
+
def run_flow_sync(
|
451
|
+
flow: Flow[P, R],
|
452
|
+
flow_run: Optional[FlowRun] = None,
|
453
|
+
parameters: Optional[Dict[str, Any]] = None,
|
454
|
+
wait_for: Optional[Iterable[PrefectFuture[A, Async]]] = None,
|
455
|
+
return_type: Literal["state", "result"] = "result",
|
456
|
+
) -> Union[R, State, None]:
|
457
|
+
engine = FlowRunEngine[P, R](flow, parameters, flow_run)
|
458
|
+
|
459
|
+
# This is a context manager that keeps track of the state of the flow run.
|
460
|
+
with engine.start_sync() as run:
|
461
|
+
run_sync(run.begin_run())
|
462
|
+
|
463
|
+
while run.is_running():
|
464
|
+
with run.enter_run_context_sync():
|
465
|
+
try:
|
466
|
+
# This is where the flow is actually run.
|
467
|
+
call_args, call_kwargs = parameters_to_args_kwargs(
|
468
|
+
flow.fn, run.parameters or {}
|
469
|
+
)
|
470
|
+
result = cast(R, flow.fn(*call_args, **call_kwargs)) # type: ignore
|
471
|
+
# If the flow run is successful, finalize it.
|
472
|
+
run_sync(run.handle_success(result))
|
473
|
+
|
474
|
+
except Exception as exc:
|
475
|
+
# If the flow fails, and we have retries left, set the flow to retrying.
|
476
|
+
run_sync(run.handle_exception(exc))
|
477
|
+
|
478
|
+
if return_type == "state":
|
479
|
+
return run.state
|
480
|
+
return run_sync(run.result())
|