prefect-client 2.18.3__py3-none-any.whl → 2.19.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. prefect/__init__.py +1 -15
  2. prefect/_internal/compatibility/experimental.py +11 -2
  3. prefect/_internal/concurrency/cancellation.py +2 -0
  4. prefect/_internal/schemas/validators.py +10 -0
  5. prefect/_vendor/starlette/testclient.py +1 -1
  6. prefect/blocks/notifications.py +6 -6
  7. prefect/client/base.py +244 -1
  8. prefect/client/cloud.py +4 -2
  9. prefect/client/orchestration.py +515 -106
  10. prefect/client/schemas/actions.py +58 -8
  11. prefect/client/schemas/objects.py +15 -1
  12. prefect/client/schemas/responses.py +19 -0
  13. prefect/client/schemas/schedules.py +1 -1
  14. prefect/client/utilities.py +2 -2
  15. prefect/concurrency/asyncio.py +34 -4
  16. prefect/concurrency/sync.py +40 -6
  17. prefect/context.py +2 -2
  18. prefect/engine.py +2 -2
  19. prefect/events/clients.py +2 -2
  20. prefect/flows.py +91 -17
  21. prefect/infrastructure/process.py +0 -17
  22. prefect/logging/formatters.py +1 -4
  23. prefect/new_flow_engine.py +137 -168
  24. prefect/new_task_engine.py +137 -202
  25. prefect/runner/__init__.py +1 -1
  26. prefect/runner/runner.py +2 -107
  27. prefect/settings.py +21 -0
  28. prefect/tasks.py +76 -57
  29. prefect/types/__init__.py +27 -5
  30. prefect/utilities/annotations.py +1 -8
  31. prefect/utilities/asyncutils.py +4 -0
  32. prefect/utilities/engine.py +106 -1
  33. prefect/utilities/schema_tools/__init__.py +6 -1
  34. prefect/utilities/schema_tools/validation.py +25 -8
  35. prefect/utilities/timeout.py +34 -0
  36. prefect/workers/base.py +7 -3
  37. prefect/workers/process.py +0 -17
  38. {prefect_client-2.18.3.dist-info → prefect_client-2.19.1.dist-info}/METADATA +1 -1
  39. {prefect_client-2.18.3.dist-info → prefect_client-2.19.1.dist-info}/RECORD +42 -41
  40. {prefect_client-2.18.3.dist-info → prefect_client-2.19.1.dist-info}/LICENSE +0 -0
  41. {prefect_client-2.18.3.dist-info → prefect_client-2.19.1.dist-info}/WHEEL +0 -0
  42. {prefect_client-2.18.3.dist-info → prefect_client-2.19.1.dist-info}/top_level.txt +0 -0
@@ -1,8 +1,9 @@
1
- import asyncio
2
1
  import inspect
2
+ import logging
3
3
  import os
4
- from contextlib import AsyncExitStack, asynccontextmanager, contextmanager
5
- from dataclasses import dataclass
4
+ import time
5
+ from contextlib import contextmanager
6
+ from dataclasses import dataclass, field
6
7
  from typing import (
7
8
  Any,
8
9
  Coroutine,
@@ -24,47 +25,48 @@ from sniffio import AsyncLibraryNotFoundError
24
25
  from typing_extensions import ParamSpec
25
26
 
26
27
  from prefect import Task, get_client
27
- from prefect.client.orchestration import PrefectClient
28
+ from prefect.client.orchestration import SyncPrefectClient
28
29
  from prefect.client.schemas import FlowRun, TaskRun
29
30
  from prefect.client.schemas.filters import FlowRunFilter
30
31
  from prefect.client.schemas.sorting import FlowRunSort
31
32
  from prefect.context import FlowRunContext
32
33
  from prefect.deployments import load_flow_from_flow_run
34
+ from prefect.exceptions import Abort, Pause
33
35
  from prefect.flows import Flow, load_flow_from_entrypoint
34
36
  from prefect.futures import PrefectFuture, resolve_futures_to_states
35
- from prefect.logging.loggers import flow_run_logger
37
+ from prefect.logging.loggers import flow_run_logger, get_logger
36
38
  from prefect.results import ResultFactory
37
39
  from prefect.states import (
38
40
  Pending,
39
41
  Running,
40
42
  State,
43
+ exception_to_crashed_state,
41
44
  exception_to_failed_state,
42
45
  return_value_to_state,
43
46
  )
44
47
  from prefect.utilities.asyncutils import A, Async, run_sync
45
48
  from prefect.utilities.callables import parameters_to_args_kwargs
46
49
  from prefect.utilities.engine import (
47
- _dynamic_key_for_task_run,
48
50
  _resolve_custom_flow_run_name,
49
- collect_task_run_inputs,
50
- propose_state,
51
+ propose_state_sync,
51
52
  )
52
53
 
53
54
  P = ParamSpec("P")
54
55
  R = TypeVar("R")
55
56
 
56
57
 
57
- async def load_flow_and_flow_run(flow_run_id: UUID) -> Tuple[FlowRun, Flow]:
58
+ def load_flow_and_flow_run(flow_run_id: UUID) -> Tuple[FlowRun, Flow]:
58
59
  ## TODO: add error handling to update state and log tracebacks
59
60
  entrypoint = os.environ.get("PREFECT__FLOW_ENTRYPOINT")
60
61
 
61
- async with get_client() as client:
62
- flow_run = await client.read_flow_run(flow_run_id)
63
- flow = (
64
- load_flow_from_entrypoint(entrypoint)
65
- if entrypoint
66
- else await load_flow_from_flow_run(flow_run, client=client)
67
- )
62
+ client = get_client(sync_client=True)
63
+ flow_run = client.read_flow_run(flow_run_id)
64
+ flow = (
65
+ load_flow_from_entrypoint(entrypoint)
66
+ if entrypoint
67
+ else run_sync(load_flow_from_flow_run(flow_run, client=client))
68
+ )
69
+
68
70
  return flow_run, flow
69
71
 
70
72
 
@@ -74,8 +76,9 @@ class FlowRunEngine(Generic[P, R]):
74
76
  parameters: Optional[Dict[str, Any]] = None
75
77
  flow_run: Optional[FlowRun] = None
76
78
  flow_run_id: Optional[UUID] = None
79
+ logger: logging.Logger = field(default_factory=lambda: get_logger("engine"))
77
80
  _is_started: bool = False
78
- _client: Optional[PrefectClient] = None
81
+ _client: Optional[SyncPrefectClient] = None
79
82
  short_circuit: bool = False
80
83
 
81
84
  def __post_init__(self):
@@ -86,7 +89,7 @@ class FlowRunEngine(Generic[P, R]):
86
89
  self.parameters = {}
87
90
 
88
91
  @property
89
- def client(self) -> PrefectClient:
92
+ def client(self) -> SyncPrefectClient:
90
93
  if not self._is_started or self._client is None:
91
94
  raise RuntimeError("Engine has not started.")
92
95
  return self._client
@@ -95,64 +98,80 @@ class FlowRunEngine(Generic[P, R]):
95
98
  def state(self) -> State:
96
99
  return self.flow_run.state # type: ignore
97
100
 
98
- async def begin_run(self) -> State:
101
+ def begin_run(self) -> State:
99
102
  new_state = Running()
100
- state = await self.set_state(new_state)
103
+ state = self.set_state(new_state)
101
104
  while state.is_pending():
102
- await asyncio.sleep(1)
103
- state = await self.set_state(new_state)
105
+ time.sleep(0.2)
106
+ state = self.set_state(new_state)
104
107
  return state
105
108
 
106
- async def set_state(self, state: State) -> State:
109
+ def set_state(self, state: State, force: bool = False) -> State:
107
110
  """ """
108
111
  # prevents any state-setting activity
109
112
  if self.short_circuit:
110
113
  return self.state
111
114
 
112
- state = await propose_state(self.client, state, flow_run_id=self.flow_run.id) # type: ignore
115
+ state = propose_state_sync(
116
+ self.client, state, flow_run_id=self.flow_run.id, force=force
117
+ ) # type: ignore
113
118
  self.flow_run.state = state # type: ignore
114
119
  self.flow_run.state_name = state.name # type: ignore
115
120
  self.flow_run.state_type = state.type # type: ignore
116
121
  return state
117
122
 
118
- async def result(self, raise_on_failure: bool = True) -> "Union[R, State, None]":
123
+ def result(self, raise_on_failure: bool = True) -> "Union[R, State, None]":
119
124
  _result = self.state.result(raise_on_failure=raise_on_failure, fetch=True) # type: ignore
120
125
  # state.result is a `sync_compatible` function that may or may not return an awaitable
121
126
  # depending on whether the parent frame is sync or not
122
127
  if inspect.isawaitable(_result):
123
- _result = await _result
128
+ _result = run_sync(_result)
124
129
  return _result
125
130
 
126
- async def handle_success(self, result: R) -> R:
131
+ def handle_success(self, result: R) -> R:
127
132
  result_factory = getattr(FlowRunContext.get(), "result_factory", None)
128
133
  if result_factory is None:
129
134
  raise ValueError("Result factory is not set")
130
- terminal_state = await return_value_to_state(
131
- await resolve_futures_to_states(result),
132
- result_factory=result_factory,
135
+ terminal_state = run_sync(
136
+ return_value_to_state(
137
+ run_sync(resolve_futures_to_states(result)),
138
+ result_factory=result_factory,
139
+ )
133
140
  )
134
- await self.set_state(terminal_state)
141
+ self.set_state(terminal_state)
135
142
  return result
136
143
 
137
- async def handle_exception(
144
+ def handle_exception(
138
145
  self,
139
146
  exc: Exception,
140
147
  msg: Optional[str] = None,
141
148
  result_factory: Optional[ResultFactory] = None,
142
149
  ) -> State:
143
150
  context = FlowRunContext.get()
144
- state = await exception_to_failed_state(
145
- exc,
146
- message=msg or "Flow run encountered an exception:",
147
- result_factory=result_factory or getattr(context, "result_factory", None),
151
+ state = run_sync(
152
+ exception_to_failed_state(
153
+ exc,
154
+ message=msg or "Flow run encountered an exception:",
155
+ result_factory=result_factory
156
+ or getattr(context, "result_factory", None),
157
+ )
148
158
  )
149
- state = await self.set_state(state)
159
+ state = self.set_state(state)
150
160
  if self.state.is_scheduled():
151
- state = await self.set_state(Running())
161
+ state = self.set_state(Running())
152
162
  return state
153
163
 
154
- async def load_subflow_run(
155
- self, parent_task_run: TaskRun, client: PrefectClient, context: FlowRunContext
164
+ def handle_crash(self, exc: BaseException) -> None:
165
+ state = run_sync(exception_to_crashed_state(exc))
166
+ self.logger.error(f"Crash detected! {state.message}")
167
+ self.logger.debug("Crash details:", exc_info=exc)
168
+ self.set_state(state, force=True)
169
+
170
+ def load_subflow_run(
171
+ self,
172
+ parent_task_run: TaskRun,
173
+ client: SyncPrefectClient,
174
+ context: FlowRunContext,
156
175
  ) -> Union[FlowRun, None]:
157
176
  """
158
177
  This method attempts to load an existing flow run for a subflow task
@@ -185,7 +204,7 @@ class FlowRunEngine(Generic[P, R]):
185
204
  rerunning and not parent_task_run.state.is_completed()
186
205
  ):
187
206
  # return the most recent flow run, if it exists
188
- flow_runs = await client.read_flow_runs(
207
+ flow_runs = client.read_flow_runs(
189
208
  flow_run_filter=FlowRunFilter(
190
209
  parent_task_run_id={"any_": [parent_task_run.id]}
191
210
  ),
@@ -195,37 +214,7 @@ class FlowRunEngine(Generic[P, R]):
195
214
  if flow_runs:
196
215
  return flow_runs[-1]
197
216
 
198
- async def create_subflow_task_run(
199
- self, client: PrefectClient, context: FlowRunContext
200
- ) -> TaskRun:
201
- """
202
- Adds a task to a parent flow run that represents the execution of a subflow run.
203
-
204
- The task run is referred to as the "parent task run" of the subflow and will be kept
205
- in sync with the subflow run's state by the orchestration engine.
206
- """
207
- dummy_task = Task(
208
- name=self.flow.name, fn=self.flow.fn, version=self.flow.version
209
- )
210
- task_inputs = {
211
- k: await collect_task_run_inputs(v)
212
- for k, v in (self.parameters or {}).items()
213
- }
214
- parent_task_run = await client.create_task_run(
215
- task=dummy_task,
216
- flow_run_id=(
217
- context.flow_run.id
218
- if getattr(context, "flow_run", None)
219
- and isinstance(context.flow_run, FlowRun)
220
- else None
221
- ),
222
- dynamic_key=_dynamic_key_for_task_run(context, dummy_task), # type: ignore
223
- task_inputs=task_inputs, # type: ignore
224
- state=Pending(),
225
- )
226
- return parent_task_run
227
-
228
- async def create_flow_run(self, client: PrefectClient) -> FlowRun:
217
+ def create_flow_run(self, client: SyncPrefectClient) -> FlowRun:
229
218
  flow_run_ctx = FlowRunContext.get()
230
219
  parameters = self.parameters or {}
231
220
 
@@ -233,13 +222,21 @@ class FlowRunEngine(Generic[P, R]):
233
222
 
234
223
  # this is a subflow run
235
224
  if flow_run_ctx:
236
- # get the parent task run
237
- parent_task_run = await self.create_subflow_task_run(
238
- client=client, context=flow_run_ctx
225
+ # add a task to a parent flow run that represents the execution of a subflow run
226
+ # reuse the logic from the TaskRunEngine to ensure parents are created correctly
227
+ parent_task = Task(
228
+ name=self.flow.name, fn=self.flow.fn, version=self.flow.version
229
+ )
230
+ parent_task_run = run_sync(
231
+ parent_task.create_run(
232
+ client=self.client,
233
+ flow_run_context=flow_run_ctx,
234
+ parameters=self.parameters,
235
+ )
239
236
  )
240
237
 
241
238
  # check if there is already a flow run for this subflow
242
- if subflow_run := await self.load_subflow_run(
239
+ if subflow_run := self.load_subflow_run(
243
240
  parent_task_run=parent_task_run, client=client, context=flow_run_ctx
244
241
  ):
245
242
  return subflow_run
@@ -251,7 +248,7 @@ class FlowRunEngine(Generic[P, R]):
251
248
  except TypeError:
252
249
  flow_run_name = None
253
250
 
254
- flow_run = await client.create_flow_run(
251
+ flow_run = client.create_flow_run(
255
252
  flow=self.flow,
256
253
  name=flow_run_name,
257
254
  parameters=self.flow.serialize_parameters(parameters),
@@ -260,43 +257,14 @@ class FlowRunEngine(Generic[P, R]):
260
257
  )
261
258
  return flow_run
262
259
 
263
- @asynccontextmanager
264
- async def enter_run_context(self, client: Optional[PrefectClient] = None):
265
- if client is None:
266
- client = self.client
267
- if not self.flow_run:
268
- raise ValueError("Flow run not set")
269
-
270
- self.flow_run = await client.read_flow_run(self.flow_run.id)
271
- task_runner = self.flow.task_runner.duplicate()
272
-
273
- async with AsyncExitStack() as stack:
274
- task_runner = await stack.enter_async_context(
275
- self.flow.task_runner.duplicate().start()
276
- )
277
- stack.enter_context(
278
- FlowRunContext(
279
- flow=self.flow,
280
- log_prints=self.flow.log_prints or False,
281
- flow_run=self.flow_run,
282
- parameters=self.parameters,
283
- client=client,
284
- background_tasks=anyio.create_task_group(),
285
- result_factory=await ResultFactory.from_flow(self.flow),
286
- task_runner=task_runner,
287
- )
288
- )
289
- self.logger = flow_run_logger(flow_run=self.flow_run, flow=self.flow)
290
- yield
291
-
292
260
  @contextmanager
293
- def enter_run_context_sync(self, client: Optional[PrefectClient] = None):
261
+ def enter_run_context(self, client: Optional[SyncPrefectClient] = None):
294
262
  if client is None:
295
263
  client = self.client
296
264
  if not self.flow_run:
297
265
  raise ValueError("Flow run not set")
298
266
 
299
- self.flow_run = run_sync(client.read_flow_run(self.flow_run.id))
267
+ self.flow_run = client.read_flow_run(self.flow_run.id)
300
268
 
301
269
  # if running in a completely synchronous frame, anyio will not detect the
302
270
  # backend to use for the task group
@@ -313,34 +281,41 @@ class FlowRunEngine(Generic[P, R]):
313
281
  client=client,
314
282
  background_tasks=task_group,
315
283
  result_factory=run_sync(ResultFactory.from_flow(self.flow)),
316
- task_runner=self.flow.task_runner,
284
+ task_runner=self.flow.task_runner.duplicate(),
317
285
  ):
318
- self.logger = flow_run_logger(flow_run=self.flow_run, flow=self.flow)
319
- yield
286
+ # set the logger to the flow run logger
287
+ current_logger = self.logger
288
+ try:
289
+ self.logger = flow_run_logger(flow_run=self.flow_run, flow=self.flow)
290
+ yield
291
+ finally:
292
+ self.logger = current_logger
320
293
 
321
- @asynccontextmanager
322
- async def start(self):
294
+ @contextmanager
295
+ def start(self):
323
296
  """
324
297
  Enters a client context and creates a flow run if needed.
325
298
  """
326
- async with get_client() as client:
299
+
300
+ with get_client(sync_client=True) as client:
327
301
  self._client = client
328
302
  self._is_started = True
329
303
 
330
304
  # this conditional is engaged whenever a run is triggered via deployment
331
305
  if self.flow_run_id and not self.flow:
332
- self.flow_run = await client.read_flow_run(self.flow_run_id)
306
+ self.flow_run = client.read_flow_run(self.flow_run_id)
333
307
  try:
334
- self.flow = await self.load_flow(client)
308
+ self.flow = self.load_flow(client)
335
309
  except Exception as exc:
336
- await self.handle_exception(
310
+ self.handle_exception(
337
311
  exc,
338
312
  msg="Failed to load flow from entrypoint.",
339
313
  )
340
314
  self.short_circuit = True
341
315
 
342
316
  if not self.flow_run:
343
- self.flow_run = await self.create_flow_run(client)
317
+ self.flow_run = self.create_flow_run(client)
318
+ self.logger.debug(f'Created flow run "{self.flow_run.id}"')
344
319
 
345
320
  # validate prior to context so that context receives validated params
346
321
  if self.flow.should_validate_parameters:
@@ -349,53 +324,27 @@ class FlowRunEngine(Generic[P, R]):
349
324
  self.parameters or {}
350
325
  )
351
326
  except Exception as exc:
352
- await self.handle_exception(
327
+ self.handle_exception(
353
328
  exc,
354
329
  msg="Validation of flow parameters failed with error",
355
- result_factory=await ResultFactory.from_flow(self.flow),
330
+ result_factory=run_sync(ResultFactory.from_flow(self.flow)),
356
331
  )
357
332
  self.short_circuit = True
358
333
  try:
359
334
  yield self
335
+ except Exception:
336
+ # regular exceptions are caught and re-raised to the user
337
+ raise
338
+ except (Abort, Pause):
339
+ raise
340
+ except BaseException as exc:
341
+ # BaseExceptions are caught and handled as crashes
342
+ self.handle_crash(exc)
343
+ raise
360
344
  finally:
361
345
  self._is_started = False
362
346
  self._client = None
363
347
 
364
- @contextmanager
365
- def start_sync(self):
366
- """
367
- Enters a client context and creates a flow run if needed.
368
- """
369
-
370
- client = get_client()
371
- run_sync(client.__aenter__())
372
- self._client = client
373
- self._is_started = True
374
-
375
- if not self.flow_run:
376
- self.flow_run = run_sync(self.create_flow_run(client))
377
-
378
- # validate prior to context so that context receives validated params
379
- if self.flow.should_validate_parameters:
380
- try:
381
- self.parameters = self.flow.validate_parameters(self.parameters or {})
382
- except Exception as exc:
383
- run_sync(
384
- self.handle_exception(
385
- exc,
386
- msg="Validation of flow parameters failed with error",
387
- result_factory=run_sync(ResultFactory.from_flow(self.flow)),
388
- )
389
- )
390
- self.short_circuit = True
391
- try:
392
- yield self
393
- finally:
394
- # quickly close client
395
- run_sync(client.__aexit__(None, None, None))
396
- self._is_started = False
397
- self._client = None
398
-
399
348
  def is_running(self) -> bool:
400
349
  if getattr(self, "flow_run", None) is None:
401
350
  return False
@@ -407,7 +356,7 @@ class FlowRunEngine(Generic[P, R]):
407
356
  return getattr(self, "flow_run").state.is_pending()
408
357
 
409
358
 
410
- async def run_flow(
359
+ async def run_flow_async(
411
360
  flow: Optional[Flow[P, Coroutine[Any, Any, R]]] = None,
412
361
  flow_run: Optional[FlowRun] = None,
413
362
  flow_run_id: Optional[UUID] = None,
@@ -424,11 +373,11 @@ async def run_flow(
424
373
  engine = FlowRunEngine[P, R](flow, parameters, flow_run, flow_run_id)
425
374
 
426
375
  # This is a context manager that keeps track of the state of the flow run.
427
- async with engine.start() as run:
428
- await run.begin_run()
376
+ with engine.start() as run:
377
+ run.begin_run()
429
378
 
430
379
  while run.is_running():
431
- async with run.enter_run_context():
380
+ with run.enter_run_context():
432
381
  try:
433
382
  # This is where the flow is actually run.
434
383
  call_args, call_kwargs = parameters_to_args_kwargs(
@@ -436,15 +385,15 @@ async def run_flow(
436
385
  )
437
386
  result = cast(R, await flow.fn(*call_args, **call_kwargs)) # type: ignore
438
387
  # If the flow run is successful, finalize it.
439
- await run.handle_success(result)
388
+ run.handle_success(result)
440
389
 
441
390
  except Exception as exc:
442
391
  # If the flow fails, and we have retries left, set the flow to retrying.
443
- await run.handle_exception(exc)
392
+ run.handle_exception(exc)
444
393
 
445
394
  if return_type == "state":
446
395
  return run.state
447
- return await run.result()
396
+ return run.result()
448
397
 
449
398
 
450
399
  def run_flow_sync(
@@ -457,11 +406,11 @@ def run_flow_sync(
457
406
  engine = FlowRunEngine[P, R](flow, parameters, flow_run)
458
407
 
459
408
  # This is a context manager that keeps track of the state of the flow run.
460
- with engine.start_sync() as run:
461
- run_sync(run.begin_run())
409
+ with engine.start() as run:
410
+ run.begin_run()
462
411
 
463
412
  while run.is_running():
464
- with run.enter_run_context_sync():
413
+ with run.enter_run_context():
465
414
  try:
466
415
  # This is where the flow is actually run.
467
416
  call_args, call_kwargs = parameters_to_args_kwargs(
@@ -469,12 +418,32 @@ def run_flow_sync(
469
418
  )
470
419
  result = cast(R, flow.fn(*call_args, **call_kwargs)) # type: ignore
471
420
  # If the flow run is successful, finalize it.
472
- run_sync(run.handle_success(result))
421
+ run.handle_success(result)
473
422
 
474
423
  except Exception as exc:
475
424
  # If the flow fails, and we have retries left, set the flow to retrying.
476
- run_sync(run.handle_exception(exc))
425
+ run.handle_exception(exc)
477
426
 
478
427
  if return_type == "state":
479
428
  return run.state
480
- return run_sync(run.result())
429
+ return run.result()
430
+
431
+
432
+ def run_flow(
433
+ flow: Flow[P, R],
434
+ flow_run: Optional[FlowRun] = None,
435
+ parameters: Optional[Dict[str, Any]] = None,
436
+ wait_for: Optional[Iterable[PrefectFuture[A, Async]]] = None,
437
+ return_type: Literal["state", "result"] = "result",
438
+ ) -> Union[R, State, None]:
439
+ kwargs = dict(
440
+ flow=flow,
441
+ flow_run=flow_run,
442
+ parameters=parameters,
443
+ wait_for=wait_for,
444
+ return_type=return_type,
445
+ )
446
+ if flow.isasync:
447
+ return run_flow_async(**kwargs)
448
+ else:
449
+ return run_flow_sync(**kwargs)