prefect-client 3.1.5__py3-none-any.whl → 3.1.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (114) hide show
  1. prefect/__init__.py +3 -0
  2. prefect/_experimental/__init__.py +0 -0
  3. prefect/_experimental/lineage.py +181 -0
  4. prefect/_internal/compatibility/async_dispatch.py +38 -9
  5. prefect/_internal/compatibility/migration.py +1 -1
  6. prefect/_internal/concurrency/api.py +52 -52
  7. prefect/_internal/concurrency/calls.py +59 -35
  8. prefect/_internal/concurrency/cancellation.py +34 -18
  9. prefect/_internal/concurrency/event_loop.py +7 -6
  10. prefect/_internal/concurrency/threads.py +41 -33
  11. prefect/_internal/concurrency/waiters.py +28 -21
  12. prefect/_internal/pydantic/v1_schema.py +2 -2
  13. prefect/_internal/pydantic/v2_schema.py +10 -9
  14. prefect/_internal/pydantic/v2_validated_func.py +15 -10
  15. prefect/_internal/retries.py +15 -6
  16. prefect/_internal/schemas/bases.py +11 -8
  17. prefect/_internal/schemas/validators.py +7 -5
  18. prefect/_version.py +3 -3
  19. prefect/automations.py +53 -47
  20. prefect/blocks/abstract.py +12 -10
  21. prefect/blocks/core.py +148 -19
  22. prefect/blocks/system.py +2 -1
  23. prefect/cache_policies.py +11 -11
  24. prefect/client/__init__.py +3 -1
  25. prefect/client/base.py +36 -37
  26. prefect/client/cloud.py +26 -19
  27. prefect/client/collections.py +2 -2
  28. prefect/client/orchestration.py +430 -273
  29. prefect/client/schemas/__init__.py +24 -0
  30. prefect/client/schemas/actions.py +128 -121
  31. prefect/client/schemas/filters.py +1 -1
  32. prefect/client/schemas/objects.py +114 -85
  33. prefect/client/schemas/responses.py +19 -20
  34. prefect/client/schemas/schedules.py +136 -93
  35. prefect/client/subscriptions.py +30 -15
  36. prefect/client/utilities.py +46 -36
  37. prefect/concurrency/asyncio.py +6 -9
  38. prefect/concurrency/sync.py +35 -5
  39. prefect/context.py +40 -32
  40. prefect/deployments/flow_runs.py +6 -8
  41. prefect/deployments/runner.py +14 -14
  42. prefect/deployments/steps/core.py +3 -1
  43. prefect/deployments/steps/pull.py +60 -12
  44. prefect/docker/__init__.py +1 -1
  45. prefect/events/clients.py +55 -4
  46. prefect/events/filters.py +1 -1
  47. prefect/events/related.py +2 -1
  48. prefect/events/schemas/events.py +26 -21
  49. prefect/events/utilities.py +3 -2
  50. prefect/events/worker.py +8 -0
  51. prefect/filesystems.py +3 -3
  52. prefect/flow_engine.py +87 -87
  53. prefect/flow_runs.py +7 -5
  54. prefect/flows.py +218 -176
  55. prefect/logging/configuration.py +1 -1
  56. prefect/logging/highlighters.py +1 -2
  57. prefect/logging/loggers.py +30 -20
  58. prefect/main.py +17 -24
  59. prefect/results.py +43 -22
  60. prefect/runner/runner.py +43 -21
  61. prefect/runner/server.py +30 -32
  62. prefect/runner/storage.py +3 -3
  63. prefect/runner/submit.py +3 -6
  64. prefect/runner/utils.py +6 -6
  65. prefect/runtime/flow_run.py +7 -0
  66. prefect/serializers.py +28 -24
  67. prefect/settings/constants.py +2 -2
  68. prefect/settings/legacy.py +1 -1
  69. prefect/settings/models/experiments.py +5 -0
  70. prefect/settings/models/server/events.py +10 -0
  71. prefect/task_engine.py +87 -26
  72. prefect/task_runners.py +2 -2
  73. prefect/task_worker.py +43 -25
  74. prefect/tasks.py +148 -142
  75. prefect/telemetry/bootstrap.py +15 -2
  76. prefect/telemetry/instrumentation.py +1 -1
  77. prefect/telemetry/processors.py +10 -7
  78. prefect/telemetry/run_telemetry.py +231 -0
  79. prefect/transactions.py +14 -14
  80. prefect/types/__init__.py +5 -5
  81. prefect/utilities/_engine.py +96 -0
  82. prefect/utilities/annotations.py +25 -18
  83. prefect/utilities/asyncutils.py +126 -140
  84. prefect/utilities/callables.py +87 -78
  85. prefect/utilities/collections.py +278 -117
  86. prefect/utilities/compat.py +13 -21
  87. prefect/utilities/context.py +6 -5
  88. prefect/utilities/dispatch.py +23 -12
  89. prefect/utilities/dockerutils.py +33 -32
  90. prefect/utilities/engine.py +126 -239
  91. prefect/utilities/filesystem.py +18 -15
  92. prefect/utilities/hashing.py +10 -11
  93. prefect/utilities/importtools.py +40 -27
  94. prefect/utilities/math.py +9 -5
  95. prefect/utilities/names.py +3 -3
  96. prefect/utilities/processutils.py +121 -57
  97. prefect/utilities/pydantic.py +41 -36
  98. prefect/utilities/render_swagger.py +22 -12
  99. prefect/utilities/schema_tools/__init__.py +2 -1
  100. prefect/utilities/schema_tools/hydration.py +50 -43
  101. prefect/utilities/schema_tools/validation.py +52 -42
  102. prefect/utilities/services.py +13 -12
  103. prefect/utilities/templating.py +45 -45
  104. prefect/utilities/text.py +2 -1
  105. prefect/utilities/timeout.py +4 -4
  106. prefect/utilities/urls.py +9 -4
  107. prefect/utilities/visualization.py +46 -24
  108. prefect/variables.py +136 -27
  109. prefect/workers/base.py +15 -8
  110. {prefect_client-3.1.5.dist-info → prefect_client-3.1.7.dist-info}/METADATA +5 -2
  111. {prefect_client-3.1.5.dist-info → prefect_client-3.1.7.dist-info}/RECORD +114 -110
  112. {prefect_client-3.1.5.dist-info → prefect_client-3.1.7.dist-info}/LICENSE +0 -0
  113. {prefect_client-3.1.5.dist-info → prefect_client-3.1.7.dist-info}/WHEEL +0 -0
  114. {prefect_client-3.1.5.dist-info → prefect_client-3.1.7.dist-info}/top_level.txt +0 -0
@@ -8,7 +8,9 @@ import concurrent.futures
8
8
  import itertools
9
9
  import queue
10
10
  import threading
11
- from typing import List, Optional
11
+ from typing import Any, Optional
12
+
13
+ from typing_extensions import TypeVar
12
14
 
13
15
  from prefect._internal.concurrency import logger
14
16
  from prefect._internal.concurrency.calls import Call, Portal
@@ -16,6 +18,8 @@ from prefect._internal.concurrency.cancellation import CancelledError
16
18
  from prefect._internal.concurrency.event_loop import get_running_loop
17
19
  from prefect._internal.concurrency.primitives import Event
18
20
 
21
+ T = TypeVar("T", infer_variance=True)
22
+
19
23
 
20
24
  class WorkerThread(Portal):
21
25
  """
@@ -33,7 +37,7 @@ class WorkerThread(Portal):
33
37
  self.thread = threading.Thread(
34
38
  name=name, daemon=daemon, target=self._entrypoint
35
39
  )
36
- self._queue = queue.Queue()
40
+ self._queue: queue.Queue[Optional[Call[Any]]] = queue.Queue()
37
41
  self._run_once: bool = run_once
38
42
  self._started: bool = False
39
43
  self._submitted_count: int = 0
@@ -42,7 +46,7 @@ class WorkerThread(Portal):
42
46
  if not daemon:
43
47
  atexit.register(self.shutdown)
44
48
 
45
- def start(self):
49
+ def start(self) -> None:
46
50
  """
47
51
  Start the worker thread.
48
52
  """
@@ -51,7 +55,7 @@ class WorkerThread(Portal):
51
55
  self._started = True
52
56
  self.thread.start()
53
57
 
54
- def submit(self, call: Call) -> Call:
58
+ def submit(self, call: Call[T]) -> Call[T]:
55
59
  if self._submitted_count > 0 and self._run_once:
56
60
  raise RuntimeError(
57
61
  "Worker configured to only run once. A call has already been submitted."
@@ -83,7 +87,7 @@ class WorkerThread(Portal):
83
87
  def name(self) -> str:
84
88
  return self.thread.name
85
89
 
86
- def _entrypoint(self):
90
+ def _entrypoint(self) -> None:
87
91
  """
88
92
  Entrypoint for the thread.
89
93
  """
@@ -129,12 +133,14 @@ class EventLoopThread(Portal):
129
133
  self.thread = threading.Thread(
130
134
  name=name, daemon=daemon, target=self._entrypoint
131
135
  )
132
- self._ready_future = concurrent.futures.Future()
136
+ self._ready_future: concurrent.futures.Future[
137
+ bool
138
+ ] = concurrent.futures.Future()
133
139
  self._loop: Optional[asyncio.AbstractEventLoop] = None
134
140
  self._shutdown_event: Event = Event()
135
141
  self._run_once: bool = run_once
136
142
  self._submitted_count: int = 0
137
- self._on_shutdown: List[Call] = []
143
+ self._on_shutdown: list[Call[Any]] = []
138
144
  self._lock = threading.Lock()
139
145
 
140
146
  if not daemon:
@@ -149,7 +155,7 @@ class EventLoopThread(Portal):
149
155
  self.thread.start()
150
156
  self._ready_future.result()
151
157
 
152
- def submit(self, call: Call) -> Call:
158
+ def submit(self, call: Call[T]) -> Call[T]:
153
159
  if self._loop is None:
154
160
  self.start()
155
161
 
@@ -167,6 +173,7 @@ class EventLoopThread(Portal):
167
173
  call.set_runner(self)
168
174
 
169
175
  # Submit the call to the event loop
176
+ assert self._loop is not None
170
177
  asyncio.run_coroutine_threadsafe(self._run_call(call), self._loop)
171
178
 
172
179
  self._submitted_count += 1
@@ -180,15 +187,16 @@ class EventLoopThread(Portal):
180
187
  Shutdown the worker thread. Does not wait for the thread to stop.
181
188
  """
182
189
  with self._lock:
183
- if self._shutdown_event is None:
184
- return
185
-
186
190
  self._shutdown_event.set()
187
191
 
188
192
  @property
189
193
  def name(self) -> str:
190
194
  return self.thread.name
191
195
 
196
+ @property
197
+ def running(self) -> bool:
198
+ return not self._shutdown_event.is_set()
199
+
192
200
  def _entrypoint(self):
193
201
  """
194
202
  Entrypoint for the thread.
@@ -218,12 +226,12 @@ class EventLoopThread(Portal):
218
226
  # Empty the list to allow calls to be garbage collected. Issue #10338.
219
227
  self._on_shutdown = []
220
228
 
221
- async def _run_call(self, call: Call) -> None:
229
+ async def _run_call(self, call: Call[Any]) -> None:
222
230
  task = call.run()
223
231
  if task is not None:
224
232
  await task
225
233
 
226
- def add_shutdown_call(self, call: Call) -> None:
234
+ def add_shutdown_call(self, call: Call[Any]) -> None:
227
235
  self._on_shutdown.append(call)
228
236
 
229
237
  def __enter__(self):
@@ -235,9 +243,9 @@ class EventLoopThread(Portal):
235
243
 
236
244
 
237
245
  # the GLOBAL LOOP is used for background services, like logs
238
- GLOBAL_LOOP: Optional[EventLoopThread] = None
246
+ _global_loop: Optional[EventLoopThread] = None
239
247
  # the RUN SYNC LOOP is used exclusively for running async functions in a sync context via asyncutils.run_sync
240
- RUN_SYNC_LOOP: Optional[EventLoopThread] = None
248
+ _run_sync_loop: Optional[EventLoopThread] = None
241
249
 
242
250
 
243
251
  def get_global_loop() -> EventLoopThread:
@@ -246,29 +254,29 @@ def get_global_loop() -> EventLoopThread:
246
254
 
247
255
  Creates a new one if there is not one available.
248
256
  """
249
- global GLOBAL_LOOP
257
+ global _global_loop
250
258
 
251
259
  # Create a new worker on first call or if the existing worker is dead
252
260
  if (
253
- GLOBAL_LOOP is None
254
- or not GLOBAL_LOOP.thread.is_alive()
255
- or GLOBAL_LOOP._shutdown_event.is_set()
261
+ _global_loop is None
262
+ or not _global_loop.thread.is_alive()
263
+ or not _global_loop.running
256
264
  ):
257
- GLOBAL_LOOP = EventLoopThread(daemon=True, name="GlobalEventLoopThread")
258
- GLOBAL_LOOP.start()
265
+ _global_loop = EventLoopThread(daemon=True, name="GlobalEventLoopThread")
266
+ _global_loop.start()
259
267
 
260
- return GLOBAL_LOOP
268
+ return _global_loop
261
269
 
262
270
 
263
271
  def in_global_loop() -> bool:
264
272
  """
265
273
  Check if called from the global loop.
266
274
  """
267
- if GLOBAL_LOOP is None:
275
+ if _global_loop is None:
268
276
  # Avoid creating a global loop if there isn't one
269
277
  return False
270
278
 
271
- return get_global_loop()._loop == get_running_loop()
279
+ return getattr(get_global_loop(), "_loop") == get_running_loop()
272
280
 
273
281
 
274
282
  def get_run_sync_loop() -> EventLoopThread:
@@ -277,29 +285,29 @@ def get_run_sync_loop() -> EventLoopThread:
277
285
 
278
286
  Creates a new one if there is not one available.
279
287
  """
280
- global RUN_SYNC_LOOP
288
+ global _run_sync_loop
281
289
 
282
290
  # Create a new worker on first call or if the existing worker is dead
283
291
  if (
284
- RUN_SYNC_LOOP is None
285
- or not RUN_SYNC_LOOP.thread.is_alive()
286
- or RUN_SYNC_LOOP._shutdown_event.is_set()
292
+ _run_sync_loop is None
293
+ or not _run_sync_loop.thread.is_alive()
294
+ or not _run_sync_loop.running
287
295
  ):
288
- RUN_SYNC_LOOP = EventLoopThread(daemon=True, name="RunSyncEventLoopThread")
289
- RUN_SYNC_LOOP.start()
296
+ _run_sync_loop = EventLoopThread(daemon=True, name="RunSyncEventLoopThread")
297
+ _run_sync_loop.start()
290
298
 
291
- return RUN_SYNC_LOOP
299
+ return _run_sync_loop
292
300
 
293
301
 
294
302
  def in_run_sync_loop() -> bool:
295
303
  """
296
304
  Check if called from the global loop.
297
305
  """
298
- if RUN_SYNC_LOOP is None:
306
+ if _run_sync_loop is None:
299
307
  # Avoid creating a global loop if there isn't one
300
308
  return False
301
309
 
302
- return get_run_sync_loop()._loop == get_running_loop()
310
+ return getattr(get_run_sync_loop(), "_loop") == get_running_loop()
303
311
 
304
312
 
305
313
  def wait_for_global_loop_exit(timeout: Optional[float] = None) -> None:
@@ -10,7 +10,8 @@ import inspect
10
10
  import queue
11
11
  import threading
12
12
  from collections import deque
13
- from typing import Awaitable, Generic, List, Optional, TypeVar, Union
13
+ from collections.abc import Awaitable
14
+ from typing import TYPE_CHECKING, Any, Generic, Optional, TypeVar
14
15
  from weakref import WeakKeyDictionary
15
16
 
16
17
  import anyio
@@ -24,12 +25,12 @@ T = TypeVar("T")
24
25
 
25
26
 
26
27
  # Waiters are stored in a stack for each thread
27
- _WAITERS_BY_THREAD: "WeakKeyDictionary[threading.Thread, deque[Waiter]]" = (
28
+ _WAITERS_BY_THREAD: "WeakKeyDictionary[threading.Thread, deque[Waiter[Any]]]" = (
28
29
  WeakKeyDictionary()
29
30
  )
30
31
 
31
32
 
32
- def add_waiter_for_thread(waiter: "Waiter", thread: threading.Thread):
33
+ def add_waiter_for_thread(waiter: "Waiter[Any]", thread: threading.Thread) -> None:
33
34
  """
34
35
  Add a waiter for a thread.
35
36
  """
@@ -62,7 +63,7 @@ class Waiter(Portal, abc.ABC, Generic[T]):
62
63
  return self._call.future.done()
63
64
 
64
65
  @abc.abstractmethod
65
- def wait(self) -> Union[Awaitable[None], None]:
66
+ def wait(self) -> T:
66
67
  """
67
68
  Wait for the call to finish.
68
69
 
@@ -71,7 +72,7 @@ class Waiter(Portal, abc.ABC, Generic[T]):
71
72
  raise NotImplementedError()
72
73
 
73
74
  @abc.abstractmethod
74
- def add_done_callback(self, callback: Call) -> Call:
75
+ def add_done_callback(self, callback: Call[Any]) -> None:
75
76
  """
76
77
  Schedule a call to run when the waiter is done waiting.
77
78
 
@@ -91,11 +92,11 @@ class SyncWaiter(Waiter[T]):
91
92
 
92
93
  def __init__(self, call: Call[T]) -> None:
93
94
  super().__init__(call=call)
94
- self._queue: queue.Queue = queue.Queue()
95
- self._done_callbacks = []
95
+ self._queue: queue.Queue[Optional[Call[T]]] = queue.Queue()
96
+ self._done_callbacks: list[Call[Any]] = []
96
97
  self._done_event = threading.Event()
97
98
 
98
- def submit(self, call: Call):
99
+ def submit(self, call: Call[T]) -> Call[T]:
99
100
  """
100
101
  Submit a callback to execute while waiting.
101
102
  """
@@ -109,7 +110,7 @@ class SyncWaiter(Waiter[T]):
109
110
  def _handle_waiting_callbacks(self):
110
111
  logger.debug("Waiter %r watching for callbacks", self)
111
112
  while True:
112
- callback: Call = self._queue.get()
113
+ callback = self._queue.get()
113
114
  if callback is None:
114
115
  break
115
116
 
@@ -130,13 +131,13 @@ class SyncWaiter(Waiter[T]):
130
131
  if callback:
131
132
  callback.run()
132
133
 
133
- def add_done_callback(self, callback: Call):
134
+ def add_done_callback(self, callback: Call[Any]) -> None:
134
135
  if self._done_event.is_set():
135
136
  raise RuntimeError("Cannot add done callbacks to done waiters.")
136
137
  else:
137
138
  self._done_callbacks.append(callback)
138
139
 
139
- def wait(self) -> T:
140
+ def wait(self) -> Call[T]:
140
141
  # Stop watching for work once the future is done
141
142
  self._call.future.add_done_callback(lambda _: self._queue.put_nowait(None))
142
143
  self._call.future.add_done_callback(lambda _: self._done_event.set())
@@ -159,13 +160,13 @@ class AsyncWaiter(Waiter[T]):
159
160
 
160
161
  # Delay instantiating loop and queue as there may not be a loop present yet
161
162
  self._loop: Optional[asyncio.AbstractEventLoop] = None
162
- self._queue: Optional[asyncio.Queue] = None
163
- self._early_submissions: List[Call] = []
164
- self._done_callbacks = []
163
+ self._queue: Optional[asyncio.Queue[Optional[Call[T]]]] = None
164
+ self._early_submissions: list[Call[T]] = []
165
+ self._done_callbacks: list[Call[Any]] = []
165
166
  self._done_event = Event()
166
167
  self._done_waiting = False
167
168
 
168
- def submit(self, call: Call):
169
+ def submit(self, call: Call[T]) -> Call[T]:
169
170
  """
170
171
  Submit a callback to execute while waiting.
171
172
  """
@@ -180,11 +181,15 @@ class AsyncWaiter(Waiter[T]):
180
181
  return call
181
182
 
182
183
  # We must put items in the queue from the event loop that owns it
184
+ if TYPE_CHECKING:
185
+ assert self._loop is not None
183
186
  call_soon_in_loop(self._loop, self._queue.put_nowait, call)
184
187
  return call
185
188
 
186
- def _resubmit_early_submissions(self):
187
- assert self._queue
189
+ def _resubmit_early_submissions(self) -> None:
190
+ if TYPE_CHECKING:
191
+ assert self._queue is not None
192
+ assert self._loop is not None
188
193
  for call in self._early_submissions:
189
194
  # We must put items in the queue from the event loop that owns it
190
195
  call_soon_in_loop(self._loop, self._queue.put_nowait, call)
@@ -192,11 +197,11 @@ class AsyncWaiter(Waiter[T]):
192
197
 
193
198
  async def _handle_waiting_callbacks(self):
194
199
  logger.debug("Waiter %r watching for callbacks", self)
195
- tasks = []
200
+ tasks: list[Awaitable[None]] = []
196
201
 
197
202
  try:
198
203
  while True:
199
- callback: Call = await self._queue.get()
204
+ callback = await self._queue.get()
200
205
  if callback is None:
201
206
  break
202
207
 
@@ -228,12 +233,12 @@ class AsyncWaiter(Waiter[T]):
228
233
  with anyio.CancelScope(shield=True):
229
234
  await self._run_done_callback(callback)
230
235
 
231
- async def _run_done_callback(self, callback: Call):
236
+ async def _run_done_callback(self, callback: Call[Any]) -> None:
232
237
  coro = callback.run()
233
238
  if coro:
234
239
  await coro
235
240
 
236
- def add_done_callback(self, callback: Call):
241
+ def add_done_callback(self, callback: Call[Any]) -> None:
237
242
  if self._done_event.is_set():
238
243
  raise RuntimeError("Cannot add done callbacks to done waiters.")
239
244
  else:
@@ -243,6 +248,8 @@ class AsyncWaiter(Waiter[T]):
243
248
  # Only send a `None` to the queue if the waiter is still blocked reading from
244
249
  # the queue. Otherwise, it's possible that the event loop is stopped.
245
250
  if not self._done_waiting:
251
+ assert self._loop is not None
252
+ assert self._queue is not None
246
253
  call_soon_in_loop(self._loop, self._queue.put_nowait, None)
247
254
 
248
255
  async def wait(self) -> Call[T]:
@@ -6,7 +6,7 @@ import pydantic
6
6
  from pydantic.v1 import BaseModel as V1BaseModel
7
7
 
8
8
 
9
- def is_v1_model(v) -> bool:
9
+ def is_v1_model(v: typing.Any) -> bool:
10
10
  with warnings.catch_warnings():
11
11
  warnings.filterwarnings(
12
12
  "ignore", category=pydantic.warnings.PydanticDeprecatedSince20
@@ -23,7 +23,7 @@ def is_v1_model(v) -> bool:
23
23
  return False
24
24
 
25
25
 
26
- def is_v1_type(v) -> bool:
26
+ def is_v1_type(v: typing.Any) -> bool:
27
27
  if is_v1_model(v):
28
28
  return True
29
29
 
@@ -16,7 +16,7 @@ from prefect._internal.pydantic.annotations.pendulum import (
16
16
  from prefect._internal.pydantic.schemas import GenerateEmptySchemaForUserClasses
17
17
 
18
18
 
19
- def is_v2_model(v) -> bool:
19
+ def is_v2_model(v: t.Any) -> bool:
20
20
  if isinstance(v, V2BaseModel):
21
21
  return True
22
22
  try:
@@ -28,7 +28,7 @@ def is_v2_model(v) -> bool:
28
28
  return False
29
29
 
30
30
 
31
- def is_v2_type(v) -> bool:
31
+ def is_v2_type(v: t.Any) -> bool:
32
32
  if is_v2_model(v):
33
33
  return True
34
34
 
@@ -56,9 +56,9 @@ def process_v2_params(
56
56
  param: inspect.Parameter,
57
57
  *,
58
58
  position: int,
59
- docstrings: t.Dict[str, str],
60
- aliases: t.Dict,
61
- ) -> t.Tuple[str, t.Any, "pydantic.Field"]:
59
+ docstrings: dict[str, str],
60
+ aliases: dict[str, str],
61
+ ) -> tuple[str, t.Any, t.Any]:
62
62
  """
63
63
  Generate a sanitized name, type, and pydantic.Field for a given parameter.
64
64
 
@@ -72,7 +72,7 @@ def process_v2_params(
72
72
  else:
73
73
  name = param.name
74
74
 
75
- type_ = t.Any if param.annotation is inspect._empty else param.annotation
75
+ type_ = t.Any if param.annotation is inspect.Parameter.empty else param.annotation
76
76
 
77
77
  # Replace pendulum type annotations with our own so that they are pydantic compatible
78
78
  if type_ == pendulum.DateTime:
@@ -95,12 +95,13 @@ def process_v2_params(
95
95
  def create_v2_schema(
96
96
  name_: str,
97
97
  model_cfg: t.Optional[ConfigDict] = None,
98
- model_base: t.Optional[t.Type[V2BaseModel]] = None,
99
- **model_fields,
100
- ):
98
+ model_base: t.Optional[type[V2BaseModel]] = None,
99
+ model_fields: t.Optional[dict[str, t.Any]] = None,
100
+ ) -> dict[str, t.Any]:
101
101
  """
102
102
  Create a pydantic v2 model and craft a v1 compatible schema from it.
103
103
  """
104
+ model_fields = model_fields or {}
104
105
  model = create_model(
105
106
  name_, __config__=model_cfg, __base__=model_base, **model_fields
106
107
  )
@@ -6,16 +6,17 @@ Specifically it allows for us to validate v2 models used as flow/task
6
6
  arguments.
7
7
  """
8
8
 
9
- from typing import TYPE_CHECKING, Any, Dict, List, Optional, Type, Union
9
+ from typing import TYPE_CHECKING, Any, ClassVar, Optional, Union
10
10
 
11
11
  # importing directly from v2 to be able to create a v2 model
12
12
  from pydantic import BaseModel, ConfigDict, create_model, field_validator
13
13
  from pydantic.v1.decorator import ValidatedFunction
14
14
  from pydantic.v1.errors import ConfigError
15
15
  from pydantic.v1.utils import to_camel
16
+ from typing_extensions import TypeAlias
16
17
 
17
18
  if TYPE_CHECKING:
18
- ConfigType = Union[None, Type[Any], Dict[str, Any]]
19
+ ConfigType: TypeAlias = Union[None, type[Any], dict[str, Any]]
19
20
 
20
21
  V_POSITIONAL_ONLY_NAME = "v__positional_only"
21
22
  V_DUPLICATE_KWARGS = "v__duplicate_kwargs"
@@ -24,13 +25,17 @@ V_DUPLICATE_KWARGS = "v__duplicate_kwargs"
24
25
  class V2ValidatedFunction(ValidatedFunction):
25
26
  def create_model(
26
27
  self,
27
- fields: Dict[str, Any],
28
+ fields: dict[str, Any],
28
29
  takes_args: bool,
29
30
  takes_kwargs: bool,
30
- config: ConfigDict,
31
+ config: "ConfigType",
31
32
  ) -> None:
32
33
  pos_args = len(self.arg_mapping)
33
34
 
35
+ config = {} if config is None else config
36
+ if not isinstance(config, dict):
37
+ raise TypeError(f"config must be None or a dict, got {type(config)}")
38
+
34
39
  if config.get("fields") or config.get("alias_generator"):
35
40
  raise ConfigError(
36
41
  'Setting the "fields" and "alias_generator" property on custom Config'
@@ -42,11 +47,11 @@ class V2ValidatedFunction(ValidatedFunction):
42
47
 
43
48
  # This is the key change -- inheriting the BaseModel class from v2
44
49
  class DecoratorBaseModel(BaseModel):
45
- model_config = config
50
+ model_config: ClassVar[ConfigDict] = ConfigDict(**config)
46
51
 
47
52
  @field_validator(self.v_args_name, check_fields=False)
48
53
  @classmethod
49
- def check_args(cls, v: Optional[List[Any]]) -> Optional[List[Any]]:
54
+ def check_args(cls, v: Optional[list[Any]]) -> Optional[list[Any]]:
50
55
  if takes_args or v is None:
51
56
  return v
52
57
 
@@ -58,8 +63,8 @@ class V2ValidatedFunction(ValidatedFunction):
58
63
  @field_validator(self.v_kwargs_name, check_fields=False)
59
64
  @classmethod
60
65
  def check_kwargs(
61
- cls, v: Optional[Dict[str, Any]]
62
- ) -> Optional[Dict[str, Any]]:
66
+ cls, v: Optional[dict[str, Any]]
67
+ ) -> Optional[dict[str, Any]]:
63
68
  if takes_kwargs or v is None:
64
69
  return v
65
70
 
@@ -69,7 +74,7 @@ class V2ValidatedFunction(ValidatedFunction):
69
74
 
70
75
  @field_validator(V_POSITIONAL_ONLY_NAME, check_fields=False)
71
76
  @classmethod
72
- def check_positional_only(cls, v: Optional[List[str]]) -> None:
77
+ def check_positional_only(cls, v: Optional[list[str]]) -> None:
73
78
  if v is None:
74
79
  return
75
80
 
@@ -82,7 +87,7 @@ class V2ValidatedFunction(ValidatedFunction):
82
87
 
83
88
  @field_validator(V_DUPLICATE_KWARGS, check_fields=False)
84
89
  @classmethod
85
- def check_duplicate_kwargs(cls, v: Optional[List[str]]) -> None:
90
+ def check_duplicate_kwargs(cls, v: Optional[list[str]]) -> None:
86
91
  if v is None:
87
92
  return
88
93
 
@@ -1,10 +1,15 @@
1
1
  import asyncio
2
2
  from functools import wraps
3
- from typing import Any, Callable, Tuple, Type
3
+ from typing import Callable, Optional, Tuple, Type, TypeVar
4
+
5
+ from typing_extensions import ParamSpec
4
6
 
5
7
  from prefect._internal._logging import logger
6
8
  from prefect.utilities.math import clamped_poisson_interval
7
9
 
10
+ P = ParamSpec("P")
11
+ R = TypeVar("R")
12
+
8
13
 
9
14
  def exponential_backoff_with_jitter(
10
15
  attempt: int, base_delay: float, max_delay: float
@@ -21,7 +26,8 @@ def retry_async_fn(
21
26
  base_delay: float = 1,
22
27
  max_delay: float = 10,
23
28
  retry_on_exceptions: Tuple[Type[Exception], ...] = (Exception,),
24
- ):
29
+ operation_name: Optional[str] = None,
30
+ ) -> Callable[[Callable[P, R]], Callable[P, R]]:
25
31
  """A decorator for retrying an async function.
26
32
 
27
33
  Args:
@@ -33,23 +39,26 @@ def retry_async_fn(
33
39
  max_delay: The maximum delay to use for the last attempt.
34
40
  retry_on_exceptions: A tuple of exception types to retry on. Defaults to
35
41
  retrying on all exceptions.
42
+ operation_name: Optional name to use for logging the operation instead of
43
+ the function name. If None, uses the function name.
36
44
  """
37
45
 
38
- def decorator(func):
46
+ def decorator(func: Callable[P, R]) -> Callable[P, R]:
39
47
  @wraps(func)
40
- async def wrapper(*args: Any, **kwargs: Any) -> Any:
48
+ async def wrapper(*args: P.args, **kwargs: P.kwargs) -> R:
49
+ name = operation_name or func.__name__
41
50
  for attempt in range(max_attempts):
42
51
  try:
43
52
  return await func(*args, **kwargs)
44
53
  except retry_on_exceptions as e:
45
54
  if attempt == max_attempts - 1:
46
55
  logger.exception(
47
- f"Function {func.__name__!r} failed after {max_attempts} attempts"
56
+ f"Function {name!r} failed after {max_attempts} attempts"
48
57
  )
49
58
  raise
50
59
  delay = backoff_strategy(attempt, base_delay, max_delay)
51
60
  logger.warning(
52
- f"Attempt {attempt + 1} of function {func.__name__!r} failed with {type(e).__name__}. "
61
+ f"Attempt {attempt + 1} of function {name!r} failed with {type(e).__name__}: {str(e)}. "
53
62
  f"Retrying in {delay:.2f} seconds..."
54
63
  )
55
64
  await asyncio.sleep(delay)
@@ -4,7 +4,7 @@ Utilities for creating and working with Prefect REST API schemas.
4
4
 
5
5
  import datetime
6
6
  import os
7
- from typing import Any, ClassVar, Optional, Set, TypeVar
7
+ from typing import Any, ClassVar, Generator, Optional, Set, TypeVar, cast
8
8
  from uuid import UUID, uuid4
9
9
 
10
10
  import pendulum
@@ -13,9 +13,10 @@ from pydantic import (
13
13
  ConfigDict,
14
14
  Field,
15
15
  )
16
- from pydantic_extra_types.pendulum_dt import DateTime
17
16
  from typing_extensions import Self
18
17
 
18
+ from prefect.types import DateTime
19
+
19
20
  T = TypeVar("T")
20
21
 
21
22
 
@@ -33,7 +34,7 @@ class PrefectBaseModel(BaseModel):
33
34
 
34
35
  _reset_fields: ClassVar[Set[str]] = set()
35
36
 
36
- model_config = ConfigDict(
37
+ model_config: ClassVar[ConfigDict] = ConfigDict(
37
38
  ser_json_timedelta="float",
38
39
  defer_build=True,
39
40
  extra=(
@@ -58,7 +59,7 @@ class PrefectBaseModel(BaseModel):
58
59
  else:
59
60
  return copy_dict == other
60
61
 
61
- def __rich_repr__(self):
62
+ def __rich_repr__(self) -> Generator[tuple[str, Any, Any], None, None]:
62
63
  # Display all of the fields in the model if they differ from the default value
63
64
  for name, field in self.model_fields.items():
64
65
  value = getattr(self, name)
@@ -71,9 +72,11 @@ class PrefectBaseModel(BaseModel):
71
72
  and name == "timestamp"
72
73
  and value
73
74
  ):
74
- value = pendulum.instance(value).isoformat()
75
+ value = cast(pendulum.DateTime, pendulum.instance(value)).isoformat()
75
76
  elif isinstance(field.annotation, datetime.datetime) and value:
76
- value = pendulum.instance(value).diff_for_humans()
77
+ value = cast(
78
+ pendulum.DateTime, pendulum.instance(value)
79
+ ).diff_for_humans()
77
80
 
78
81
  yield name, value, field.get_default()
79
82
 
@@ -113,11 +116,11 @@ class ObjectBaseModel(IDBaseModel):
113
116
  """
114
117
 
115
118
  _reset_fields: ClassVar[Set[str]] = {"id", "created", "updated"}
116
- model_config = ConfigDict(from_attributes=True)
119
+ model_config: ClassVar[ConfigDict] = ConfigDict(from_attributes=True)
117
120
 
118
121
  created: Optional[DateTime] = Field(default=None, repr=False)
119
122
  updated: Optional[DateTime] = Field(default=None, repr=False)
120
123
 
121
124
 
122
125
  class ActionBaseModel(PrefectBaseModel):
123
- model_config: ConfigDict = ConfigDict(extra="forbid")
126
+ model_config: ClassVar[ConfigDict] = ConfigDict(extra="forbid")