prefect-client 2.18.0__py3-none-any.whl → 2.18.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- prefect/_internal/schemas/fields.py +31 -12
- prefect/automations.py +162 -0
- prefect/blocks/core.py +1 -1
- prefect/blocks/notifications.py +2 -2
- prefect/blocks/system.py +2 -3
- prefect/client/orchestration.py +309 -30
- prefect/client/schemas/objects.py +11 -8
- prefect/client/schemas/sorting.py +9 -0
- prefect/client/utilities.py +25 -3
- prefect/concurrency/asyncio.py +11 -5
- prefect/concurrency/events.py +3 -3
- prefect/concurrency/services.py +1 -1
- prefect/concurrency/sync.py +9 -5
- prefect/deployments/deployments.py +27 -18
- prefect/deployments/runner.py +34 -26
- prefect/engine.py +3 -1
- prefect/events/actions.py +2 -1
- prefect/events/cli/automations.py +207 -46
- prefect/events/clients.py +53 -20
- prefect/events/filters.py +31 -4
- prefect/events/instrument.py +40 -40
- prefect/events/related.py +2 -1
- prefect/events/schemas/automations.py +52 -7
- prefect/events/schemas/deployment_triggers.py +16 -228
- prefect/events/schemas/events.py +18 -11
- prefect/events/schemas/labelling.py +1 -1
- prefect/events/utilities.py +1 -1
- prefect/events/worker.py +10 -7
- prefect/flows.py +42 -24
- prefect/input/actions.py +9 -9
- prefect/input/run_input.py +51 -37
- prefect/new_flow_engine.py +444 -0
- prefect/new_task_engine.py +488 -0
- prefect/results.py +3 -2
- prefect/runner/runner.py +3 -2
- prefect/server/api/collections_data/views/aggregate-worker-metadata.json +45 -4
- prefect/settings.py +47 -0
- prefect/states.py +25 -19
- prefect/tasks.py +146 -19
- prefect/utilities/asyncutils.py +41 -0
- prefect/utilities/engine.py +6 -4
- prefect/utilities/schema_tools/validation.py +1 -1
- prefect/workers/process.py +2 -1
- {prefect_client-2.18.0.dist-info → prefect_client-2.18.2.dist-info}/METADATA +1 -1
- {prefect_client-2.18.0.dist-info → prefect_client-2.18.2.dist-info}/RECORD +48 -46
- prefect/concurrency/common.py +0 -0
- {prefect_client-2.18.0.dist-info → prefect_client-2.18.2.dist-info}/LICENSE +0 -0
- {prefect_client-2.18.0.dist-info → prefect_client-2.18.2.dist-info}/WHEEL +0 -0
- {prefect_client-2.18.0.dist-info → prefect_client-2.18.2.dist-info}/top_level.txt +0 -0
prefect/input/run_input.py
CHANGED
@@ -58,7 +58,7 @@ async def receiver_flow():
|
|
58
58
|
```
|
59
59
|
"""
|
60
60
|
|
61
|
-
|
61
|
+
from inspect import isclass
|
62
62
|
from typing import (
|
63
63
|
TYPE_CHECKING,
|
64
64
|
Any,
|
@@ -96,7 +96,7 @@ if HAS_PYDANTIC_V2:
|
|
96
96
|
from prefect._internal.pydantic.v2_schema import create_v2_schema
|
97
97
|
|
98
98
|
R = TypeVar("R", bound="RunInput")
|
99
|
-
T = TypeVar("T")
|
99
|
+
T = TypeVar("T", bound="object")
|
100
100
|
|
101
101
|
Keyset = Dict[
|
102
102
|
Union[Literal["description"], Literal["response"], Literal["schema"]], str
|
@@ -114,7 +114,8 @@ def keyset_from_paused_state(state: "State") -> Keyset:
|
|
114
114
|
if not state.is_paused():
|
115
115
|
raise RuntimeError(f"{state.type.value!r} is unsupported.")
|
116
116
|
|
117
|
-
|
117
|
+
state_name = state.name or ""
|
118
|
+
base_key = f"{state_name.lower()}-{str(state.state_details.pause_key)}"
|
118
119
|
return keyset_from_base_key(base_key)
|
119
120
|
|
120
121
|
|
@@ -234,7 +235,7 @@ class RunInput(pydantic.BaseModel):
|
|
234
235
|
a flow run that requires input
|
235
236
|
- kwargs (Any): the initial data to populate the subclass
|
236
237
|
"""
|
237
|
-
fields = {}
|
238
|
+
fields: Dict[str, Any] = {}
|
238
239
|
for key, value in kwargs.items():
|
239
240
|
fields[key] = (type(value), value)
|
240
241
|
model = pydantic.create_model(cls.__name__, **fields, __base__=cls)
|
@@ -340,31 +341,34 @@ class AutomaticRunInput(RunInput, Generic[T]):
|
|
340
341
|
def subclass_from_type(cls, _type: Type[T]) -> Type["AutomaticRunInput[T]"]:
|
341
342
|
"""
|
342
343
|
Create a new `AutomaticRunInput` subclass from the given type.
|
344
|
+
|
345
|
+
This method uses the type's name as a key prefix to identify related
|
346
|
+
flow run inputs. This helps in ensuring that values saved under a type
|
347
|
+
(like List[int]) are retrievable under the generic type name (like "list").
|
343
348
|
"""
|
344
|
-
fields = {"value": (_type, ...)}
|
345
|
-
|
346
|
-
#
|
347
|
-
#
|
348
|
-
#
|
349
|
-
#
|
350
|
-
#
|
351
|
-
#
|
352
|
-
#
|
353
|
-
|
354
|
-
|
355
|
-
|
356
|
-
|
357
|
-
|
358
|
-
|
359
|
-
|
360
|
-
|
361
|
-
|
362
|
-
|
363
|
-
# automatic inputs sent to the flow run, rather than a specific
|
364
|
-
# type.
|
365
|
-
type_prefix = ""
|
349
|
+
fields: Dict[str, Any] = {"value": (_type, ...)}
|
350
|
+
|
351
|
+
# Explanation for using getattr for type name extraction:
|
352
|
+
# - "__name__": This is the usual attribute for getting the name of
|
353
|
+
# most types.
|
354
|
+
# - "_name": Used as a fallback, some type annotations in Python 3.9
|
355
|
+
# and earlier might only have this attribute instead of __name__.
|
356
|
+
# - If neither is available, defaults to an empty string to prevent
|
357
|
+
# errors, but typically we should find at least one valid name
|
358
|
+
# attribute. This will match all automatic inputs sent to the flow
|
359
|
+
# run, rather than a specific type.
|
360
|
+
#
|
361
|
+
# This approach ensures compatibility across Python versions and
|
362
|
+
# handles various edge cases in type annotations.
|
363
|
+
|
364
|
+
type_prefix: str = getattr(
|
365
|
+
_type, "__name__", getattr(_type, "_name", "")
|
366
|
+
).lower()
|
367
|
+
|
366
368
|
class_name = f"{type_prefix}AutomaticRunInput"
|
367
369
|
|
370
|
+
# Creating a new Pydantic model class dynamically with the name based
|
371
|
+
# on the type prefix.
|
368
372
|
new_cls: Type["AutomaticRunInput"] = pydantic.create_model(
|
369
373
|
class_name, **fields, __base__=AutomaticRunInput
|
370
374
|
)
|
@@ -384,18 +388,19 @@ def run_input_subclass_from_type(
|
|
384
388
|
"""
|
385
389
|
Create a new `RunInput` subclass from the given type.
|
386
390
|
"""
|
387
|
-
|
391
|
+
if isclass(_type):
|
388
392
|
if issubclass(_type, RunInput):
|
389
393
|
return cast(Type[R], _type)
|
390
394
|
elif issubclass(_type, pydantic.BaseModel):
|
391
395
|
return cast(Type[R], RunInput.subclass_from_base_model_type(_type))
|
392
|
-
except TypeError:
|
393
|
-
pass
|
394
396
|
|
395
397
|
# Could be something like a typing._GenericAlias or any other type that
|
396
398
|
# isn't a `RunInput` subclass or `pydantic.BaseModel` subclass. Try passing
|
397
399
|
# it to AutomaticRunInput to see if we can create a model from it.
|
398
|
-
return cast(
|
400
|
+
return cast(
|
401
|
+
Type[AutomaticRunInput[T]],
|
402
|
+
AutomaticRunInput.subclass_from_type(cast(Type[T], _type)),
|
403
|
+
)
|
399
404
|
|
400
405
|
|
401
406
|
class GetInputHandler(Generic[R]):
|
@@ -425,7 +430,7 @@ class GetInputHandler(Generic[R]):
|
|
425
430
|
|
426
431
|
def __next__(self) -> R:
|
427
432
|
try:
|
428
|
-
return self.next()
|
433
|
+
return cast(R, self.next())
|
429
434
|
except TimeoutError:
|
430
435
|
if self.raise_timeout_error:
|
431
436
|
raise
|
@@ -502,9 +507,11 @@ async def _send_input(
|
|
502
507
|
key_prefix: Optional[str] = None,
|
503
508
|
):
|
504
509
|
if isinstance(run_input, RunInput):
|
505
|
-
_run_input = run_input
|
510
|
+
_run_input: RunInput = run_input
|
506
511
|
else:
|
507
|
-
input_cls = run_input_subclass_from_type(
|
512
|
+
input_cls: Type[AutomaticRunInput] = run_input_subclass_from_type(
|
513
|
+
type(run_input)
|
514
|
+
)
|
508
515
|
_run_input = input_cls(value=run_input)
|
509
516
|
|
510
517
|
if key_prefix is None:
|
@@ -533,8 +540,8 @@ async def send_input(
|
|
533
540
|
|
534
541
|
|
535
542
|
@overload
|
536
|
-
def receive_input(
|
537
|
-
input_type: Type[R],
|
543
|
+
def receive_input( # type: ignore[overload-overlap]
|
544
|
+
input_type: Union[Type[R], pydantic.BaseModel],
|
538
545
|
timeout: Optional[float] = 3600,
|
539
546
|
poll_interval: float = 10,
|
540
547
|
raise_timeout_error: bool = False,
|
@@ -561,7 +568,7 @@ def receive_input(
|
|
561
568
|
|
562
569
|
|
563
570
|
def receive_input(
|
564
|
-
input_type: Union[Type[R], Type[T]],
|
571
|
+
input_type: Union[Type[R], Type[T], pydantic.BaseModel],
|
565
572
|
timeout: Optional[float] = 3600,
|
566
573
|
poll_interval: float = 10,
|
567
574
|
raise_timeout_error: bool = False,
|
@@ -570,7 +577,14 @@ def receive_input(
|
|
570
577
|
flow_run_id: Optional[UUID] = None,
|
571
578
|
with_metadata: bool = False,
|
572
579
|
) -> Union[GetAutomaticInputHandler[T], GetInputHandler[R]]:
|
573
|
-
|
580
|
+
# The typing in this module is a bit complex, and at this point `mypy`
|
581
|
+
# thinks that `run_input_subclass_from_type` accepts a `Type[Never]` but
|
582
|
+
# the signature is the same as here:
|
583
|
+
# Union[Type[R], Type[T], pydantic.BaseModel],
|
584
|
+
# Seems like a possible mypy bug, so we'll ignore the type check here.
|
585
|
+
input_cls: Union[
|
586
|
+
Type[AutomaticRunInput[T]], Type[R]
|
587
|
+
] = run_input_subclass_from_type(input_type) # type: ignore[arg-type]
|
574
588
|
|
575
589
|
if issubclass(input_cls, AutomaticRunInput):
|
576
590
|
return input_cls.receive(
|
@@ -0,0 +1,444 @@
|
|
1
|
+
import asyncio
|
2
|
+
import inspect
|
3
|
+
from contextlib import AsyncExitStack, asynccontextmanager, contextmanager
|
4
|
+
from dataclasses import dataclass
|
5
|
+
from typing import (
|
6
|
+
Any,
|
7
|
+
Coroutine,
|
8
|
+
Dict,
|
9
|
+
Generic,
|
10
|
+
Iterable,
|
11
|
+
Literal,
|
12
|
+
Optional,
|
13
|
+
TypeVar,
|
14
|
+
Union,
|
15
|
+
cast,
|
16
|
+
)
|
17
|
+
|
18
|
+
import anyio
|
19
|
+
import anyio._backends._asyncio
|
20
|
+
from sniffio import AsyncLibraryNotFoundError
|
21
|
+
from typing_extensions import ParamSpec
|
22
|
+
|
23
|
+
from prefect import Flow, Task, get_client
|
24
|
+
from prefect.client.orchestration import PrefectClient
|
25
|
+
from prefect.client.schemas import FlowRun, TaskRun
|
26
|
+
from prefect.client.schemas.filters import FlowRunFilter
|
27
|
+
from prefect.client.schemas.sorting import FlowRunSort
|
28
|
+
from prefect.context import FlowRunContext
|
29
|
+
from prefect.futures import PrefectFuture, resolve_futures_to_states
|
30
|
+
from prefect.logging.loggers import flow_run_logger
|
31
|
+
from prefect.results import ResultFactory
|
32
|
+
from prefect.states import (
|
33
|
+
Pending,
|
34
|
+
Running,
|
35
|
+
State,
|
36
|
+
exception_to_failed_state,
|
37
|
+
return_value_to_state,
|
38
|
+
)
|
39
|
+
from prefect.utilities.asyncutils import A, Async, run_sync
|
40
|
+
from prefect.utilities.callables import parameters_to_args_kwargs
|
41
|
+
from prefect.utilities.engine import (
|
42
|
+
_dynamic_key_for_task_run,
|
43
|
+
_resolve_custom_flow_run_name,
|
44
|
+
collect_task_run_inputs,
|
45
|
+
propose_state,
|
46
|
+
)
|
47
|
+
|
48
|
+
P = ParamSpec("P")
|
49
|
+
R = TypeVar("R")
|
50
|
+
|
51
|
+
|
52
|
+
@dataclass
|
53
|
+
class FlowRunEngine(Generic[P, R]):
|
54
|
+
flow: Union[Flow[P, R], Flow[P, Coroutine[Any, Any, R]]]
|
55
|
+
parameters: Optional[Dict[str, Any]] = None
|
56
|
+
flow_run: Optional[FlowRun] = None
|
57
|
+
_is_started: bool = False
|
58
|
+
_client: Optional[PrefectClient] = None
|
59
|
+
short_circuit: bool = False
|
60
|
+
|
61
|
+
def __post_init__(self):
|
62
|
+
if self.parameters is None:
|
63
|
+
self.parameters = {}
|
64
|
+
|
65
|
+
@property
|
66
|
+
def client(self) -> PrefectClient:
|
67
|
+
if not self._is_started or self._client is None:
|
68
|
+
raise RuntimeError("Engine has not started.")
|
69
|
+
return self._client
|
70
|
+
|
71
|
+
@property
|
72
|
+
def state(self) -> State:
|
73
|
+
return self.flow_run.state # type: ignore
|
74
|
+
|
75
|
+
async def begin_run(self) -> State:
|
76
|
+
new_state = Running()
|
77
|
+
state = await self.set_state(new_state)
|
78
|
+
while state.is_pending():
|
79
|
+
await asyncio.sleep(1)
|
80
|
+
state = await self.set_state(new_state)
|
81
|
+
return state
|
82
|
+
|
83
|
+
async def set_state(self, state: State) -> State:
|
84
|
+
""" """
|
85
|
+
# prevents any state-setting activity
|
86
|
+
if self.short_circuit:
|
87
|
+
return self.state
|
88
|
+
|
89
|
+
state = await propose_state(self.client, state, flow_run_id=self.flow_run.id) # type: ignore
|
90
|
+
self.flow_run.state = state # type: ignore
|
91
|
+
self.flow_run.state_name = state.name # type: ignore
|
92
|
+
self.flow_run.state_type = state.type # type: ignore
|
93
|
+
return state
|
94
|
+
|
95
|
+
async def result(self, raise_on_failure: bool = True) -> "Union[R, State, None]":
|
96
|
+
_result = self.state.result(raise_on_failure=raise_on_failure, fetch=True) # type: ignore
|
97
|
+
# state.result is a `sync_compatible` function that may or may not return an awaitable
|
98
|
+
# depending on whether the parent frame is sync or not
|
99
|
+
if inspect.isawaitable(_result):
|
100
|
+
_result = await _result
|
101
|
+
return _result
|
102
|
+
|
103
|
+
async def handle_success(self, result: R) -> R:
|
104
|
+
result_factory = getattr(FlowRunContext.get(), "result_factory", None)
|
105
|
+
if result_factory is None:
|
106
|
+
raise ValueError("Result factory is not set")
|
107
|
+
terminal_state = await return_value_to_state(
|
108
|
+
await resolve_futures_to_states(result),
|
109
|
+
result_factory=result_factory,
|
110
|
+
)
|
111
|
+
await self.set_state(terminal_state)
|
112
|
+
return result
|
113
|
+
|
114
|
+
async def handle_exception(
|
115
|
+
self,
|
116
|
+
exc: Exception,
|
117
|
+
msg: Optional[str] = None,
|
118
|
+
result_factory: Optional[ResultFactory] = None,
|
119
|
+
) -> State:
|
120
|
+
context = FlowRunContext.get()
|
121
|
+
state = await exception_to_failed_state(
|
122
|
+
exc,
|
123
|
+
message=msg or "Flow run encountered an exception:",
|
124
|
+
result_factory=result_factory or getattr(context, "result_factory", None),
|
125
|
+
)
|
126
|
+
state = await self.set_state(state)
|
127
|
+
if self.state.is_scheduled():
|
128
|
+
state = await self.set_state(Running())
|
129
|
+
return state
|
130
|
+
|
131
|
+
async def load_subflow_run(
|
132
|
+
self, parent_task_run: TaskRun, client: PrefectClient, context: FlowRunContext
|
133
|
+
) -> Union[FlowRun, None]:
|
134
|
+
"""
|
135
|
+
This method attempts to load an existing flow run for a subflow task
|
136
|
+
run, if appropriate.
|
137
|
+
|
138
|
+
If the parent task run is in a final but not COMPLETED state, and not
|
139
|
+
being rerun, then we attempt to load an existing flow run instead of
|
140
|
+
creating a new one. This will prevent the engine from running the
|
141
|
+
subflow again.
|
142
|
+
|
143
|
+
If no existing flow run is found, or if the subflow should be rerun,
|
144
|
+
then no flow run is returned.
|
145
|
+
"""
|
146
|
+
|
147
|
+
# check if the parent flow run is rerunning
|
148
|
+
rerunning = (
|
149
|
+
context.flow_run.run_count > 1
|
150
|
+
if getattr(context, "flow_run", None)
|
151
|
+
and isinstance(context.flow_run, FlowRun)
|
152
|
+
else False
|
153
|
+
)
|
154
|
+
|
155
|
+
# if the parent task run is in a final but not completed state, and
|
156
|
+
# not rerunning, then retrieve the most recent flow run instead of
|
157
|
+
# creating a new one. This effectively loads a cached flow run for
|
158
|
+
# situations where we are confident the flow should not be run
|
159
|
+
# again.
|
160
|
+
assert isinstance(parent_task_run.state, State)
|
161
|
+
if parent_task_run.state.is_final() and not (
|
162
|
+
rerunning and not parent_task_run.state.is_completed()
|
163
|
+
):
|
164
|
+
# return the most recent flow run, if it exists
|
165
|
+
flow_runs = await client.read_flow_runs(
|
166
|
+
flow_run_filter=FlowRunFilter(
|
167
|
+
parent_task_run_id={"any_": [parent_task_run.id]}
|
168
|
+
),
|
169
|
+
sort=FlowRunSort.EXPECTED_START_TIME_ASC,
|
170
|
+
limit=1,
|
171
|
+
)
|
172
|
+
if flow_runs:
|
173
|
+
return flow_runs[-1]
|
174
|
+
|
175
|
+
async def create_subflow_task_run(
|
176
|
+
self, client: PrefectClient, context: FlowRunContext
|
177
|
+
) -> TaskRun:
|
178
|
+
"""
|
179
|
+
Adds a task to a parent flow run that represents the execution of a subflow run.
|
180
|
+
|
181
|
+
The task run is referred to as the "parent task run" of the subflow and will be kept
|
182
|
+
in sync with the subflow run's state by the orchestration engine.
|
183
|
+
"""
|
184
|
+
dummy_task = Task(
|
185
|
+
name=self.flow.name, fn=self.flow.fn, version=self.flow.version
|
186
|
+
)
|
187
|
+
task_inputs = {
|
188
|
+
k: await collect_task_run_inputs(v)
|
189
|
+
for k, v in (self.parameters or {}).items()
|
190
|
+
}
|
191
|
+
parent_task_run = await client.create_task_run(
|
192
|
+
task=dummy_task,
|
193
|
+
flow_run_id=(
|
194
|
+
context.flow_run.id
|
195
|
+
if getattr(context, "flow_run", None)
|
196
|
+
and isinstance(context.flow_run, FlowRun)
|
197
|
+
else None
|
198
|
+
),
|
199
|
+
dynamic_key=_dynamic_key_for_task_run(context, dummy_task), # type: ignore
|
200
|
+
task_inputs=task_inputs, # type: ignore
|
201
|
+
state=Pending(),
|
202
|
+
)
|
203
|
+
return parent_task_run
|
204
|
+
|
205
|
+
async def create_flow_run(self, client: PrefectClient) -> FlowRun:
|
206
|
+
flow_run_ctx = FlowRunContext.get()
|
207
|
+
parameters = self.parameters or {}
|
208
|
+
|
209
|
+
parent_task_run = None
|
210
|
+
|
211
|
+
# this is a subflow run
|
212
|
+
if flow_run_ctx:
|
213
|
+
# get the parent task run
|
214
|
+
parent_task_run = await self.create_subflow_task_run(
|
215
|
+
client=client, context=flow_run_ctx
|
216
|
+
)
|
217
|
+
|
218
|
+
# check if there is already a flow run for this subflow
|
219
|
+
if subflow_run := await self.load_subflow_run(
|
220
|
+
parent_task_run=parent_task_run, client=client, context=flow_run_ctx
|
221
|
+
):
|
222
|
+
return subflow_run
|
223
|
+
|
224
|
+
try:
|
225
|
+
flow_run_name = _resolve_custom_flow_run_name(
|
226
|
+
flow=self.flow, parameters=parameters
|
227
|
+
)
|
228
|
+
except TypeError:
|
229
|
+
flow_run_name = None
|
230
|
+
|
231
|
+
flow_run = await client.create_flow_run(
|
232
|
+
flow=self.flow,
|
233
|
+
name=flow_run_name,
|
234
|
+
parameters=self.flow.serialize_parameters(parameters),
|
235
|
+
state=Pending(),
|
236
|
+
parent_task_run_id=getattr(parent_task_run, "id", None),
|
237
|
+
)
|
238
|
+
return flow_run
|
239
|
+
|
240
|
+
@asynccontextmanager
|
241
|
+
async def enter_run_context(self, client: Optional[PrefectClient] = None):
|
242
|
+
if client is None:
|
243
|
+
client = self.client
|
244
|
+
if not self.flow_run:
|
245
|
+
raise ValueError("Flow run not set")
|
246
|
+
|
247
|
+
self.flow_run = await client.read_flow_run(self.flow_run.id)
|
248
|
+
task_runner = self.flow.task_runner.duplicate()
|
249
|
+
|
250
|
+
async with AsyncExitStack() as stack:
|
251
|
+
task_runner = await stack.enter_async_context(
|
252
|
+
self.flow.task_runner.duplicate().start()
|
253
|
+
)
|
254
|
+
stack.enter_context(
|
255
|
+
FlowRunContext(
|
256
|
+
flow=self.flow,
|
257
|
+
log_prints=self.flow.log_prints or False,
|
258
|
+
flow_run=self.flow_run,
|
259
|
+
parameters=self.parameters,
|
260
|
+
client=client,
|
261
|
+
background_tasks=anyio.create_task_group(),
|
262
|
+
result_factory=await ResultFactory.from_flow(self.flow),
|
263
|
+
task_runner=task_runner,
|
264
|
+
)
|
265
|
+
)
|
266
|
+
self.logger = flow_run_logger(flow_run=self.flow_run, flow=self.flow)
|
267
|
+
yield
|
268
|
+
|
269
|
+
@contextmanager
|
270
|
+
def enter_run_context_sync(self, client: Optional[PrefectClient] = None):
|
271
|
+
if client is None:
|
272
|
+
client = self.client
|
273
|
+
if not self.flow_run:
|
274
|
+
raise ValueError("Flow run not set")
|
275
|
+
|
276
|
+
self.flow_run = run_sync(client.read_flow_run(self.flow_run.id))
|
277
|
+
|
278
|
+
# if running in a completely synchronous frame, anyio will not detect the
|
279
|
+
# backend to use for the task group
|
280
|
+
try:
|
281
|
+
task_group = anyio.create_task_group()
|
282
|
+
except AsyncLibraryNotFoundError:
|
283
|
+
task_group = anyio._backends._asyncio.TaskGroup()
|
284
|
+
|
285
|
+
with FlowRunContext(
|
286
|
+
flow=self.flow,
|
287
|
+
log_prints=self.flow.log_prints or False,
|
288
|
+
flow_run=self.flow_run,
|
289
|
+
parameters=self.parameters,
|
290
|
+
client=client,
|
291
|
+
background_tasks=task_group,
|
292
|
+
result_factory=run_sync(ResultFactory.from_flow(self.flow)),
|
293
|
+
task_runner=self.flow.task_runner,
|
294
|
+
):
|
295
|
+
self.logger = flow_run_logger(flow_run=self.flow_run, flow=self.flow)
|
296
|
+
yield
|
297
|
+
|
298
|
+
@asynccontextmanager
|
299
|
+
async def start(self):
|
300
|
+
"""
|
301
|
+
Enters a client context and creates a flow run if needed.
|
302
|
+
"""
|
303
|
+
async with get_client() as client:
|
304
|
+
self._client = client
|
305
|
+
self._is_started = True
|
306
|
+
|
307
|
+
if not self.flow_run:
|
308
|
+
self.flow_run = await self.create_flow_run(client)
|
309
|
+
|
310
|
+
# validate prior to context so that context receives validated params
|
311
|
+
if self.flow.should_validate_parameters:
|
312
|
+
try:
|
313
|
+
self.parameters = self.flow.validate_parameters(
|
314
|
+
self.parameters or {}
|
315
|
+
)
|
316
|
+
except Exception as exc:
|
317
|
+
await self.handle_exception(
|
318
|
+
exc,
|
319
|
+
msg="Validation of flow parameters failed with error",
|
320
|
+
result_factory=await ResultFactory.from_flow(self.flow),
|
321
|
+
)
|
322
|
+
self.short_circuit = True
|
323
|
+
try:
|
324
|
+
yield self
|
325
|
+
finally:
|
326
|
+
self._is_started = False
|
327
|
+
self._client = None
|
328
|
+
|
329
|
+
@contextmanager
|
330
|
+
def start_sync(self):
|
331
|
+
"""
|
332
|
+
Enters a client context and creates a flow run if needed.
|
333
|
+
"""
|
334
|
+
|
335
|
+
client = get_client()
|
336
|
+
run_sync(client.__aenter__())
|
337
|
+
self._client = client
|
338
|
+
self._is_started = True
|
339
|
+
|
340
|
+
if not self.flow_run:
|
341
|
+
self.flow_run = run_sync(self.create_flow_run(client))
|
342
|
+
|
343
|
+
# validate prior to context so that context receives validated params
|
344
|
+
if self.flow.should_validate_parameters:
|
345
|
+
try:
|
346
|
+
self.parameters = self.flow.validate_parameters(self.parameters or {})
|
347
|
+
except Exception as exc:
|
348
|
+
run_sync(
|
349
|
+
self.handle_exception(
|
350
|
+
exc,
|
351
|
+
msg="Validation of flow parameters failed with error",
|
352
|
+
result_factory=run_sync(ResultFactory.from_flow(self.flow)),
|
353
|
+
)
|
354
|
+
)
|
355
|
+
self.short_circuit = True
|
356
|
+
try:
|
357
|
+
yield self
|
358
|
+
finally:
|
359
|
+
# quickly close client
|
360
|
+
run_sync(client.__aexit__(None, None, None))
|
361
|
+
self._is_started = False
|
362
|
+
self._client = None
|
363
|
+
|
364
|
+
def is_running(self) -> bool:
|
365
|
+
if getattr(self, "flow_run", None) is None:
|
366
|
+
return False
|
367
|
+
return getattr(self, "flow_run").state.is_running()
|
368
|
+
|
369
|
+
def is_pending(self) -> bool:
|
370
|
+
if getattr(self, "flow_run", None) is None:
|
371
|
+
return False # TODO: handle this differently?
|
372
|
+
return getattr(self, "flow_run").state.is_pending()
|
373
|
+
|
374
|
+
|
375
|
+
async def run_flow(
|
376
|
+
flow: Flow[P, Coroutine[Any, Any, R]],
|
377
|
+
flow_run: Optional[FlowRun] = None,
|
378
|
+
parameters: Optional[Dict[str, Any]] = None,
|
379
|
+
wait_for: Optional[Iterable[PrefectFuture[A, Async]]] = None,
|
380
|
+
return_type: Literal["state", "result"] = "result",
|
381
|
+
) -> Union[R, None]:
|
382
|
+
"""
|
383
|
+
Runs a flow against the API.
|
384
|
+
|
385
|
+
We will most likely want to use this logic as a wrapper and return a coroutine for type inference.
|
386
|
+
"""
|
387
|
+
|
388
|
+
engine = FlowRunEngine[P, R](flow, parameters, flow_run)
|
389
|
+
|
390
|
+
# This is a context manager that keeps track of the state of the flow run.
|
391
|
+
async with engine.start() as run:
|
392
|
+
await run.begin_run()
|
393
|
+
|
394
|
+
while run.is_running():
|
395
|
+
async with run.enter_run_context():
|
396
|
+
try:
|
397
|
+
# This is where the flow is actually run.
|
398
|
+
call_args, call_kwargs = parameters_to_args_kwargs(
|
399
|
+
flow.fn, run.parameters or {}
|
400
|
+
)
|
401
|
+
result = cast(R, await flow.fn(*call_args, **call_kwargs)) # type: ignore
|
402
|
+
# If the flow run is successful, finalize it.
|
403
|
+
await run.handle_success(result)
|
404
|
+
|
405
|
+
except Exception as exc:
|
406
|
+
# If the flow fails, and we have retries left, set the flow to retrying.
|
407
|
+
await run.handle_exception(exc)
|
408
|
+
|
409
|
+
if return_type == "state":
|
410
|
+
return run.state
|
411
|
+
return await run.result()
|
412
|
+
|
413
|
+
|
414
|
+
def run_flow_sync(
|
415
|
+
flow: Flow[P, R],
|
416
|
+
flow_run: Optional[FlowRun] = None,
|
417
|
+
parameters: Optional[Dict[str, Any]] = None,
|
418
|
+
wait_for: Optional[Iterable[PrefectFuture[A, Async]]] = None,
|
419
|
+
return_type: Literal["state", "result"] = "result",
|
420
|
+
) -> Union[R, State, None]:
|
421
|
+
engine = FlowRunEngine[P, R](flow, parameters, flow_run)
|
422
|
+
|
423
|
+
# This is a context manager that keeps track of the state of the flow run.
|
424
|
+
with engine.start_sync() as run:
|
425
|
+
run_sync(run.begin_run())
|
426
|
+
|
427
|
+
while run.is_running():
|
428
|
+
with run.enter_run_context_sync():
|
429
|
+
try:
|
430
|
+
# This is where the flow is actually run.
|
431
|
+
call_args, call_kwargs = parameters_to_args_kwargs(
|
432
|
+
flow.fn, run.parameters or {}
|
433
|
+
)
|
434
|
+
result = cast(R, flow.fn(*call_args, **call_kwargs)) # type: ignore
|
435
|
+
# If the flow run is successful, finalize it.
|
436
|
+
run_sync(run.handle_success(result))
|
437
|
+
|
438
|
+
except Exception as exc:
|
439
|
+
# If the flow fails, and we have retries left, set the flow to retrying.
|
440
|
+
run_sync(run.handle_exception(exc))
|
441
|
+
|
442
|
+
if return_type == "state":
|
443
|
+
return run.state
|
444
|
+
return run_sync(run.result())
|