prefect-client 3.1.10__py3-none-any.whl → 3.1.12__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (141) hide show
  1. prefect/_experimental/lineage.py +7 -8
  2. prefect/_experimental/sla/__init__.py +0 -0
  3. prefect/_experimental/sla/client.py +66 -0
  4. prefect/_experimental/sla/objects.py +53 -0
  5. prefect/_internal/_logging.py +15 -3
  6. prefect/_internal/compatibility/async_dispatch.py +22 -16
  7. prefect/_internal/compatibility/deprecated.py +42 -18
  8. prefect/_internal/compatibility/migration.py +2 -2
  9. prefect/_internal/concurrency/inspection.py +12 -14
  10. prefect/_internal/concurrency/primitives.py +2 -2
  11. prefect/_internal/concurrency/services.py +154 -80
  12. prefect/_internal/concurrency/waiters.py +13 -9
  13. prefect/_internal/pydantic/annotations/pendulum.py +7 -7
  14. prefect/_internal/pytz.py +4 -3
  15. prefect/_internal/retries.py +10 -5
  16. prefect/_internal/schemas/bases.py +19 -10
  17. prefect/_internal/schemas/validators.py +227 -388
  18. prefect/_version.py +3 -3
  19. prefect/automations.py +236 -30
  20. prefect/blocks/__init__.py +3 -3
  21. prefect/blocks/abstract.py +53 -30
  22. prefect/blocks/core.py +183 -84
  23. prefect/blocks/notifications.py +133 -73
  24. prefect/blocks/redis.py +13 -9
  25. prefect/blocks/system.py +24 -11
  26. prefect/blocks/webhook.py +7 -5
  27. prefect/cache_policies.py +3 -2
  28. prefect/client/orchestration/__init__.py +1957 -0
  29. prefect/client/orchestration/_artifacts/__init__.py +0 -0
  30. prefect/client/orchestration/_artifacts/client.py +239 -0
  31. prefect/client/orchestration/_automations/__init__.py +0 -0
  32. prefect/client/orchestration/_automations/client.py +329 -0
  33. prefect/client/orchestration/_blocks_documents/__init__.py +0 -0
  34. prefect/client/orchestration/_blocks_documents/client.py +334 -0
  35. prefect/client/orchestration/_blocks_schemas/__init__.py +0 -0
  36. prefect/client/orchestration/_blocks_schemas/client.py +200 -0
  37. prefect/client/orchestration/_blocks_types/__init__.py +0 -0
  38. prefect/client/orchestration/_blocks_types/client.py +380 -0
  39. prefect/client/orchestration/_concurrency_limits/__init__.py +0 -0
  40. prefect/client/orchestration/_concurrency_limits/client.py +762 -0
  41. prefect/client/orchestration/_deployments/__init__.py +0 -0
  42. prefect/client/orchestration/_deployments/client.py +1128 -0
  43. prefect/client/orchestration/_flow_runs/__init__.py +0 -0
  44. prefect/client/orchestration/_flow_runs/client.py +903 -0
  45. prefect/client/orchestration/_flows/__init__.py +0 -0
  46. prefect/client/orchestration/_flows/client.py +343 -0
  47. prefect/client/orchestration/_logs/__init__.py +0 -0
  48. prefect/client/orchestration/_logs/client.py +97 -0
  49. prefect/client/orchestration/_variables/__init__.py +0 -0
  50. prefect/client/orchestration/_variables/client.py +157 -0
  51. prefect/client/orchestration/base.py +46 -0
  52. prefect/client/orchestration/routes.py +145 -0
  53. prefect/client/schemas/__init__.py +68 -28
  54. prefect/client/schemas/actions.py +2 -2
  55. prefect/client/schemas/filters.py +5 -0
  56. prefect/client/schemas/objects.py +8 -15
  57. prefect/client/schemas/schedules.py +22 -10
  58. prefect/concurrency/_asyncio.py +87 -0
  59. prefect/concurrency/{events.py → _events.py} +10 -10
  60. prefect/concurrency/asyncio.py +20 -104
  61. prefect/concurrency/context.py +6 -4
  62. prefect/concurrency/services.py +26 -74
  63. prefect/concurrency/sync.py +23 -44
  64. prefect/concurrency/v1/_asyncio.py +63 -0
  65. prefect/concurrency/v1/{events.py → _events.py} +13 -15
  66. prefect/concurrency/v1/asyncio.py +27 -80
  67. prefect/concurrency/v1/context.py +6 -4
  68. prefect/concurrency/v1/services.py +33 -79
  69. prefect/concurrency/v1/sync.py +18 -37
  70. prefect/context.py +66 -45
  71. prefect/deployments/base.py +10 -144
  72. prefect/deployments/flow_runs.py +12 -2
  73. prefect/deployments/runner.py +53 -4
  74. prefect/deployments/steps/pull.py +13 -0
  75. prefect/engine.py +17 -4
  76. prefect/events/clients.py +7 -1
  77. prefect/events/schemas/events.py +3 -2
  78. prefect/filesystems.py +6 -2
  79. prefect/flow_engine.py +101 -85
  80. prefect/flows.py +10 -1
  81. prefect/input/run_input.py +2 -1
  82. prefect/logging/logging.yml +1 -1
  83. prefect/main.py +1 -3
  84. prefect/results.py +2 -307
  85. prefect/runner/runner.py +4 -2
  86. prefect/runner/storage.py +87 -21
  87. prefect/serializers.py +32 -25
  88. prefect/settings/legacy.py +4 -4
  89. prefect/settings/models/api.py +3 -3
  90. prefect/settings/models/cli.py +3 -3
  91. prefect/settings/models/client.py +5 -3
  92. prefect/settings/models/cloud.py +8 -3
  93. prefect/settings/models/deployments.py +3 -3
  94. prefect/settings/models/experiments.py +4 -7
  95. prefect/settings/models/flows.py +3 -3
  96. prefect/settings/models/internal.py +4 -2
  97. prefect/settings/models/logging.py +4 -3
  98. prefect/settings/models/results.py +3 -3
  99. prefect/settings/models/root.py +3 -2
  100. prefect/settings/models/runner.py +4 -4
  101. prefect/settings/models/server/api.py +3 -3
  102. prefect/settings/models/server/database.py +11 -4
  103. prefect/settings/models/server/deployments.py +6 -2
  104. prefect/settings/models/server/ephemeral.py +4 -2
  105. prefect/settings/models/server/events.py +3 -2
  106. prefect/settings/models/server/flow_run_graph.py +6 -2
  107. prefect/settings/models/server/root.py +3 -3
  108. prefect/settings/models/server/services.py +26 -11
  109. prefect/settings/models/server/tasks.py +6 -3
  110. prefect/settings/models/server/ui.py +3 -3
  111. prefect/settings/models/tasks.py +5 -5
  112. prefect/settings/models/testing.py +3 -3
  113. prefect/settings/models/worker.py +5 -3
  114. prefect/settings/profiles.py +15 -2
  115. prefect/states.py +61 -45
  116. prefect/task_engine.py +54 -75
  117. prefect/task_runners.py +56 -55
  118. prefect/task_worker.py +2 -2
  119. prefect/tasks.py +90 -36
  120. prefect/telemetry/bootstrap.py +10 -9
  121. prefect/telemetry/run_telemetry.py +13 -8
  122. prefect/telemetry/services.py +4 -0
  123. prefect/transactions.py +4 -15
  124. prefect/utilities/_git.py +34 -0
  125. prefect/utilities/asyncutils.py +1 -1
  126. prefect/utilities/engine.py +3 -19
  127. prefect/utilities/generics.py +18 -0
  128. prefect/utilities/templating.py +25 -1
  129. prefect/workers/base.py +6 -3
  130. prefect/workers/process.py +1 -1
  131. {prefect_client-3.1.10.dist-info → prefect_client-3.1.12.dist-info}/METADATA +2 -2
  132. {prefect_client-3.1.10.dist-info → prefect_client-3.1.12.dist-info}/RECORD +135 -109
  133. prefect/client/orchestration.py +0 -4523
  134. prefect/records/__init__.py +0 -1
  135. prefect/records/base.py +0 -235
  136. prefect/records/filesystem.py +0 -213
  137. prefect/records/memory.py +0 -184
  138. prefect/records/result_store.py +0 -70
  139. {prefect_client-3.1.10.dist-info → prefect_client-3.1.12.dist-info}/LICENSE +0 -0
  140. {prefect_client-3.1.10.dist-info → prefect_client-3.1.12.dist-info}/WHEEL +0 -0
  141. {prefect_client-3.1.10.dist-info → prefect_client-3.1.12.dist-info}/top_level.txt +0 -0
@@ -1,15 +1,14 @@
1
1
  import abc
2
2
  import asyncio
3
- import atexit
4
3
  import concurrent.futures
5
4
  import contextlib
6
5
  import logging
7
6
  import queue
8
- import sys
9
7
  import threading
10
- from typing import Awaitable, Dict, Generic, List, Optional, Type, TypeVar, Union
8
+ from collections.abc import AsyncGenerator, Awaitable, Coroutine, Generator, Hashable
9
+ from typing import TYPE_CHECKING, Any, Generic, NoReturn, Optional, Union, cast
11
10
 
12
- from typing_extensions import Self
11
+ from typing_extensions import Self, TypeVar, TypeVarTuple, Unpack
13
12
 
14
13
  from prefect._internal.concurrency import logger
15
14
  from prefect._internal.concurrency.api import create_call, from_sync
@@ -18,17 +17,19 @@ from prefect._internal.concurrency.event_loop import get_running_loop
18
17
  from prefect._internal.concurrency.threads import WorkerThread, get_global_loop
19
18
 
20
19
  T = TypeVar("T")
20
+ Ts = TypeVarTuple("Ts")
21
+ R = TypeVar("R", infer_variance=True)
21
22
 
22
23
 
23
- class QueueService(abc.ABC, Generic[T]):
24
- _instances: Dict[int, Self] = {}
24
+ class _QueueServiceBase(abc.ABC, Generic[T]):
25
+ _instances: dict[int, Self] = {}
25
26
  _instance_lock = threading.Lock()
26
27
 
27
- def __init__(self, *args) -> None:
28
- self._queue: queue.Queue = queue.Queue()
28
+ def __init__(self, *args: Hashable) -> None:
29
+ self._queue: queue.Queue[Optional[T]] = queue.Queue()
29
30
  self._loop: Optional[asyncio.AbstractEventLoop] = None
30
31
  self._done_event: Optional[asyncio.Event] = None
31
- self._task: Optional[asyncio.Task] = None
32
+ self._task: Optional[asyncio.Task[None]] = None
32
33
  self._stopped: bool = False
33
34
  self._started: bool = False
34
35
  self._key = hash(args)
@@ -41,14 +42,14 @@ class QueueService(abc.ABC, Generic[T]):
41
42
  )
42
43
  self._logger = logging.getLogger(f"{type(self).__name__}")
43
44
 
44
- def start(self):
45
+ def start(self) -> None:
45
46
  logger.debug("Starting service %r", self)
46
47
  loop_thread = get_global_loop()
47
48
 
48
- if not asyncio.get_running_loop() == loop_thread._loop:
49
+ if not asyncio.get_running_loop() == getattr(loop_thread, "_loop"):
49
50
  raise RuntimeError("Services must run on the global loop thread.")
50
51
 
51
- self._loop = loop_thread._loop
52
+ self._loop = asyncio.get_running_loop()
52
53
  self._done_event = asyncio.Event()
53
54
  self._task = self._loop.create_task(self._run())
54
55
  self._queue_get_thread.start()
@@ -58,23 +59,18 @@ class QueueService(abc.ABC, Generic[T]):
58
59
  loop_thread.add_shutdown_call(create_call(self.drain))
59
60
 
60
61
  # Stop at interpreter exit by default
61
- if sys.version_info < (3, 9):
62
- atexit.register(self._at_exit)
63
- else:
64
- # See related issue at https://bugs.python.org/issue42647
65
- # Handling items may require spawning a thread and in 3.9 new threads
66
- # cannot be spawned after the interpreter finalizes threads which happens
67
- # _before_ the normal `atexit` hook is called resulting in failure to
68
- # process items. This is particularly relevant for services which use an
69
- # httpx client.
70
- from threading import _register_atexit
71
-
72
- _register_atexit(self._at_exit)
73
-
74
- def _at_exit(self):
62
+ # Handling items may require spawning a thread and in 3.9 new threads
63
+ # cannot be spawned after the interpreter finalizes threads which
64
+ # happens _before_ the normal `atexit` hook is called resulting in
65
+ # failure to process items. This is particularly relevant for services
66
+ # which use an httpx client. See related issue at
67
+ # https://github.com/python/cpython/issues/86813
68
+ threading._register_atexit(self._at_exit) # pyright: ignore[reportUnknownVariableType, reportAttributeAccessIssue]
69
+
70
+ def _at_exit(self) -> None:
75
71
  self.drain(at_exit=True)
76
72
 
77
- def _stop(self, at_exit: bool = False):
73
+ def _stop(self, at_exit: bool = False) -> None:
78
74
  """
79
75
  Stop running this instance.
80
76
 
@@ -100,27 +96,11 @@ class QueueService(abc.ABC, Generic[T]):
100
96
  # Signal completion to the loop
101
97
  self._queue.put_nowait(None)
102
98
 
103
- def send(self, item: T):
104
- """
105
- Send an item to this instance of the service.
106
- """
107
- with self._lock:
108
- if self._stopped:
109
- raise RuntimeError("Cannot put items in a stopped service instance.")
110
-
111
- logger.debug("Service %r enqueuing item %r", self, item)
112
- self._queue.put_nowait(self._prepare_item(item))
113
-
114
- def _prepare_item(self, item: T) -> T:
115
- """
116
- Prepare an item for submission to the service. This is called before
117
- the item is sent to the service.
118
-
119
- The default implementation returns the item unchanged.
120
- """
121
- return item
99
+ @abc.abstractmethod
100
+ def send(self, item: Any) -> Any:
101
+ raise NotImplementedError
122
102
 
123
- async def _run(self):
103
+ async def _run(self) -> None:
124
104
  try:
125
105
  async with self._lifespan():
126
106
  await self._main_loop()
@@ -142,14 +122,15 @@ class QueueService(abc.ABC, Generic[T]):
142
122
  self._queue_get_thread.shutdown()
143
123
 
144
124
  self._stopped = True
125
+ assert self._done_event is not None
145
126
  self._done_event.set()
146
127
 
147
- async def _main_loop(self):
128
+ async def _main_loop(self) -> None:
148
129
  last_log_time = 0
149
130
  log_interval = 4 # log every 4 seconds
150
131
 
151
132
  while True:
152
- item: T = await self._queue_get_thread.submit(
133
+ item: Optional[T] = await self._queue_get_thread.submit(
153
134
  create_call(self._queue.get)
154
135
  ).aresult()
155
136
 
@@ -183,19 +164,17 @@ class QueueService(abc.ABC, Generic[T]):
183
164
  self._queue.task_done()
184
165
 
185
166
  @abc.abstractmethod
186
- async def _handle(self, item: T):
187
- """
188
- Process an item sent to the service.
189
- """
167
+ async def _handle(self, item: Any) -> Any:
168
+ raise NotImplementedError
190
169
 
191
170
  @contextlib.asynccontextmanager
192
- async def _lifespan(self):
171
+ async def _lifespan(self) -> AsyncGenerator[None, Any]:
193
172
  """
194
173
  Perform any setup and teardown for the service.
195
174
  """
196
175
  yield
197
176
 
198
- def _drain(self, at_exit: bool = False) -> concurrent.futures.Future:
177
+ def _drain(self, at_exit: bool = False) -> concurrent.futures.Future[bool]:
199
178
  """
200
179
  Internal implementation for `drain`. Returns a future for sync/async interfaces.
201
180
  """
@@ -204,15 +183,17 @@ class QueueService(abc.ABC, Generic[T]):
204
183
 
205
184
  self._stop(at_exit=at_exit)
206
185
 
186
+ assert self._done_event is not None
207
187
  if self._done_event.is_set():
208
- future = concurrent.futures.Future()
209
- future.set_result(None)
188
+ future: concurrent.futures.Future[bool] = concurrent.futures.Future()
189
+ future.set_result(False)
210
190
  return future
211
191
 
212
- future = asyncio.run_coroutine_threadsafe(self._done_event.wait(), self._loop)
213
- return future
192
+ assert self._loop is not None
193
+ task = cast(Coroutine[Any, Any, bool], self._done_event.wait())
194
+ return asyncio.run_coroutine_threadsafe(task, self._loop)
214
195
 
215
- def drain(self, at_exit: bool = False) -> None:
196
+ def drain(self, at_exit: bool = False) -> Union[bool, Awaitable[bool]]:
216
197
  """
217
198
  Stop this instance of the service and wait for remaining work to be completed.
218
199
 
@@ -226,15 +207,24 @@ class QueueService(abc.ABC, Generic[T]):
226
207
 
227
208
  @classmethod
228
209
  def drain_all(
229
- cls, timeout: Optional[float] = None, at_exit=True
230
- ) -> Union[Awaitable, None]:
210
+ cls, timeout: Optional[float] = None, at_exit: bool = True
211
+ ) -> Union[
212
+ tuple[
213
+ set[concurrent.futures.Future[bool]], set[concurrent.futures.Future[bool]]
214
+ ],
215
+ Coroutine[
216
+ Any,
217
+ Any,
218
+ Optional[tuple[set[asyncio.Future[bool]], set[asyncio.Future[bool]]]],
219
+ ],
220
+ ]:
231
221
  """
232
222
  Stop all instances of the service and wait for all remaining work to be
233
223
  completed.
234
224
 
235
225
  Returns an awaitable if called from an async context.
236
226
  """
237
- futures = []
227
+ futures: list[concurrent.futures.Future[bool]] = []
238
228
  with cls._instance_lock:
239
229
  instances = tuple(cls._instances.values())
240
230
 
@@ -242,26 +232,24 @@ class QueueService(abc.ABC, Generic[T]):
242
232
  futures.append(instance._drain(at_exit=at_exit))
243
233
 
244
234
  if get_running_loop() is not None:
245
- return (
246
- asyncio.wait(
235
+ if futures:
236
+ return asyncio.wait(
247
237
  [asyncio.wrap_future(fut) for fut in futures], timeout=timeout
248
238
  )
249
- if futures
250
- # `wait` errors if it receives an empty list but we need to return a
251
- # coroutine still
252
- else asyncio.sleep(0)
253
- )
239
+ # `wait` errors if it receives an empty list but we need to return a
240
+ # coroutine still
241
+ return asyncio.sleep(0)
254
242
  else:
255
243
  return concurrent.futures.wait(futures, timeout=timeout)
256
244
 
257
- def wait_until_empty(self):
245
+ def wait_until_empty(self) -> None:
258
246
  """
259
247
  Wait until the queue is empty and all items have been processed.
260
248
  """
261
249
  self._queue.join()
262
250
 
263
251
  @classmethod
264
- def instance(cls: Type[Self], *args) -> Self:
252
+ def instance(cls, *args: Hashable) -> Self:
265
253
  """
266
254
  Get an instance of the service.
267
255
 
@@ -278,7 +266,7 @@ class QueueService(abc.ABC, Generic[T]):
278
266
  self._instances.pop(self._key, None)
279
267
 
280
268
  @classmethod
281
- def _new_instance(cls, *args):
269
+ def _new_instance(cls, *args: Hashable) -> Self:
282
270
  """
283
271
  Create and start a new instance of the service.
284
272
  """
@@ -295,6 +283,87 @@ class QueueService(abc.ABC, Generic[T]):
295
283
  return instance
296
284
 
297
285
 
286
+ class QueueService(_QueueServiceBase[T]):
287
+ def send(self, item: T) -> None:
288
+ """
289
+ Send an item to this instance of the service.
290
+ """
291
+ with self._lock:
292
+ if self._stopped:
293
+ raise RuntimeError("Cannot put items in a stopped service instance.")
294
+
295
+ logger.debug("Service %r enqueuing item %r", self, item)
296
+ self._queue.put_nowait(self._prepare_item(item))
297
+
298
+ def _prepare_item(self, item: T) -> T:
299
+ """
300
+ Prepare an item for submission to the service. This is called before
301
+ the item is sent to the service.
302
+
303
+ The default implementation returns the item unchanged.
304
+ """
305
+ return item
306
+
307
+ @abc.abstractmethod
308
+ async def _handle(self, item: T) -> None:
309
+ """
310
+ Process an item sent to the service.
311
+ """
312
+
313
+
314
+ class FutureQueueService(
315
+ _QueueServiceBase[tuple[Unpack[Ts], concurrent.futures.Future[R]]]
316
+ ):
317
+ """Queued service that provides a future that is signalled with the acquired result for each item
318
+
319
+ If there was a failure acquiring, the future result is set to the exception.
320
+
321
+ Type Parameters:
322
+ Ts: the tuple of types that make up sent arguments
323
+ R: the type returned for each item once acquired
324
+
325
+ """
326
+
327
+ async def _handle(
328
+ self, item: tuple[Unpack[Ts], concurrent.futures.Future[R]]
329
+ ) -> None:
330
+ send_item, future = item[:-1], item[-1]
331
+ try:
332
+ response = await self.acquire(*send_item)
333
+ except Exception as exc:
334
+ # If the request to the increment endpoint fails in a non-standard
335
+ # way, we need to set the future's result so it'll be re-raised in
336
+ # the context of the caller.
337
+ future.set_exception(exc)
338
+ raise exc
339
+ else:
340
+ future.set_result(response)
341
+
342
+ @abc.abstractmethod
343
+ async def acquire(self, *args: Unpack[Ts]) -> R:
344
+ raise NotImplementedError
345
+
346
+ def send(self, item: tuple[Unpack[Ts]]) -> concurrent.futures.Future[R]:
347
+ with self._lock:
348
+ if self._stopped:
349
+ raise RuntimeError("Cannot put items in a stopped service instance.")
350
+
351
+ logger.debug("Service %r enqueuing item %r", self, item)
352
+ future: concurrent.futures.Future[R] = concurrent.futures.Future()
353
+ self._queue.put_nowait((*self._prepare_item(item), future))
354
+
355
+ return future
356
+
357
+ def _prepare_item(self, item: tuple[Unpack[Ts]]) -> tuple[Unpack[Ts]]:
358
+ """
359
+ Prepare an item for submission to the service. This is called before
360
+ the item is sent to the service.
361
+
362
+ The default implementation returns the item unchanged.
363
+ """
364
+ return item
365
+
366
+
298
367
  class BatchedQueueService(QueueService[T]):
299
368
  """
300
369
  A queue service that handles a batch of items instead of a single item at a time.
@@ -310,7 +379,7 @@ class BatchedQueueService(QueueService[T]):
310
379
  done = False
311
380
 
312
381
  while not done:
313
- batch = []
382
+ batch: list[T] = []
314
383
  batch_size = 0
315
384
 
316
385
  # Pull items from the queue until we reach the batch size
@@ -359,13 +428,15 @@ class BatchedQueueService(QueueService[T]):
359
428
  )
360
429
 
361
430
  @abc.abstractmethod
362
- async def _handle_batch(self, items: List[T]):
431
+ async def _handle_batch(self, items: list[T]) -> None:
363
432
  """
364
433
  Process a batch of items sent to the service.
365
434
  """
366
435
 
367
- async def _handle(self, item: T):
368
- assert False, "`_handle` should never be called for batched queue services"
436
+ async def _handle(self, item: T) -> NoReturn:
437
+ raise AssertionError(
438
+ "`_handle` should never be called for batched queue services"
439
+ )
369
440
 
370
441
  def _get_size(self, item: T) -> int:
371
442
  """
@@ -376,12 +447,15 @@ class BatchedQueueService(QueueService[T]):
376
447
 
377
448
 
378
449
  @contextlib.contextmanager
379
- def drain_on_exit(service: QueueService):
450
+ def drain_on_exit(service: QueueService[Any]) -> Generator[None, Any, None]:
380
451
  yield
381
452
  service.drain_all(at_exit=True)
382
453
 
383
454
 
384
455
  @contextlib.asynccontextmanager
385
- async def drain_on_exit_async(service: QueueService):
456
+ async def drain_on_exit_async(service: QueueService[Any]) -> AsyncGenerator[None, Any]:
386
457
  yield
387
- await service.drain_all(at_exit=True)
458
+ drain_all = service.drain_all(at_exit=True)
459
+ if TYPE_CHECKING:
460
+ assert not isinstance(drain_all, tuple)
461
+ await drain_all
@@ -10,7 +10,7 @@ import inspect
10
10
  import queue
11
11
  import threading
12
12
  from collections import deque
13
- from collections.abc import Awaitable
13
+ from collections.abc import AsyncGenerator, Awaitable, Generator
14
14
  from typing import TYPE_CHECKING, Any, Generic, Optional, TypeVar
15
15
  from weakref import WeakKeyDictionary
16
16
 
@@ -24,7 +24,7 @@ from prefect._internal.concurrency.primitives import Event
24
24
  T = TypeVar("T")
25
25
 
26
26
 
27
- # Waiters are stored in a stack for each thread
27
+ # Waiters are stored in a queue for each thread
28
28
  _WAITERS_BY_THREAD: "WeakKeyDictionary[threading.Thread, deque[Waiter[Any]]]" = (
29
29
  WeakKeyDictionary()
30
30
  )
@@ -49,8 +49,9 @@ class Waiter(Portal, abc.ABC, Generic[T]):
49
49
  """
50
50
 
51
51
  def __init__(self, call: Call[T]) -> None:
52
- if not isinstance(call, Call): # Guard against common mistake
53
- raise TypeError(f"Expected call of type `Call`; got {call!r}.")
52
+ if not TYPE_CHECKING:
53
+ if not isinstance(call, Call): # Guard against common mistake
54
+ raise TypeError(f"Expected call of type `Call`; got {call!r}.")
54
55
 
55
56
  self._call = call
56
57
  self._owner_thread = threading.current_thread()
@@ -107,7 +108,7 @@ class SyncWaiter(Waiter[T]):
107
108
  call.set_runner(self)
108
109
  return call
109
110
 
110
- def _handle_waiting_callbacks(self):
111
+ def _handle_waiting_callbacks(self) -> None:
111
112
  logger.debug("Waiter %r watching for callbacks", self)
112
113
  while True:
113
114
  callback = self._queue.get()
@@ -121,7 +122,7 @@ class SyncWaiter(Waiter[T]):
121
122
  del callback
122
123
 
123
124
  @contextlib.contextmanager
124
- def _handle_done_callbacks(self):
125
+ def _handle_done_callbacks(self) -> Generator[None, Any, None]:
125
126
  try:
126
127
  yield
127
128
  finally:
@@ -195,10 +196,13 @@ class AsyncWaiter(Waiter[T]):
195
196
  call_soon_in_loop(self._loop, self._queue.put_nowait, call)
196
197
  self._early_submissions = []
197
198
 
198
- async def _handle_waiting_callbacks(self):
199
+ async def _handle_waiting_callbacks(self) -> None:
199
200
  logger.debug("Waiter %r watching for callbacks", self)
200
201
  tasks: list[Awaitable[None]] = []
201
202
 
203
+ if TYPE_CHECKING:
204
+ assert self._queue is not None
205
+
202
206
  try:
203
207
  while True:
204
208
  callback = await self._queue.get()
@@ -221,7 +225,7 @@ class AsyncWaiter(Waiter[T]):
221
225
  self._done_waiting = True
222
226
 
223
227
  @contextlib.asynccontextmanager
224
- async def _handle_done_callbacks(self):
228
+ async def _handle_done_callbacks(self) -> AsyncGenerator[None, Any]:
225
229
  try:
226
230
  yield
227
231
  finally:
@@ -244,7 +248,7 @@ class AsyncWaiter(Waiter[T]):
244
248
  else:
245
249
  self._done_callbacks.append(callback)
246
250
 
247
- def _signal_stop_waiting(self):
251
+ def _signal_stop_waiting(self) -> None:
248
252
  # Only send a `None` to the queue if the waiter is still blocked reading from
249
253
  # the queue. Otherwise, it's possible that the event loop is stopped.
250
254
  if not self._done_waiting:
@@ -3,39 +3,39 @@ This file contains compat code to handle pendulum.DateTime objects during jsonsc
3
3
  generation and validation.
4
4
  """
5
5
 
6
- import typing as t
6
+ from typing import Annotated, Any, Union
7
7
 
8
8
  import pendulum
9
9
  from pydantic import GetCoreSchemaHandler, GetJsonSchemaHandler
10
10
  from pydantic.json_schema import JsonSchemaValue
11
11
  from pydantic_core import core_schema
12
- from typing_extensions import Annotated
13
12
 
14
13
 
15
14
  class _PendulumDateTimeAnnotation:
16
- _pendulum_type: t.Type[
17
- t.Union[pendulum.DateTime, pendulum.Date, pendulum.Time]
15
+ _pendulum_type: type[
16
+ Union[pendulum.DateTime, pendulum.Date, pendulum.Time, pendulum.Duration]
18
17
  ] = pendulum.DateTime
19
18
 
20
19
  _pendulum_types_to_schemas = {
21
20
  pendulum.DateTime: core_schema.datetime_schema(),
22
21
  pendulum.Date: core_schema.date_schema(),
22
+ pendulum.Time: core_schema.time_schema(),
23
23
  pendulum.Duration: core_schema.timedelta_schema(),
24
24
  }
25
25
 
26
26
  @classmethod
27
27
  def __get_pydantic_core_schema__(
28
28
  cls,
29
- _source_type: t.Any,
29
+ _source_type: Any,
30
30
  _handler: GetCoreSchemaHandler,
31
31
  ) -> core_schema.CoreSchema:
32
32
  def validate_from_str(
33
33
  value: str,
34
- ) -> t.Union[pendulum.DateTime, pendulum.Date, pendulum.Time]:
34
+ ) -> Union[pendulum.DateTime, pendulum.Date, pendulum.Time, pendulum.Duration]:
35
35
  return pendulum.parse(value)
36
36
 
37
37
  def to_str(
38
- value: t.Union[pendulum.DateTime, pendulum.Date, pendulum.Time],
38
+ value: Union[pendulum.DateTime, pendulum.Date, pendulum.Time],
39
39
  ) -> str:
40
40
  return value.isoformat()
41
41
 
prefect/_internal/pytz.py CHANGED
@@ -14,11 +14,12 @@ License: MIT
14
14
  """
15
15
 
16
16
  try:
17
- import pytz # noqa: F401
17
+ import pytz as pytz
18
18
  except ImportError:
19
- HAS_PYTZ = False
19
+ _has_pytz = False
20
20
  else:
21
- HAS_PYTZ = True
21
+ _has_pytz = True
22
+ HAS_PYTZ = _has_pytz
22
23
 
23
24
 
24
25
  all_timezones_set = {
@@ -1,6 +1,7 @@
1
1
  import asyncio
2
+ from collections.abc import Coroutine
2
3
  from functools import wraps
3
- from typing import Callable, Optional, Tuple, Type, TypeVar
4
+ from typing import Any, Callable, Optional, TypeVar
4
5
 
5
6
  from typing_extensions import ParamSpec
6
7
 
@@ -25,9 +26,11 @@ def retry_async_fn(
25
26
  ] = exponential_backoff_with_jitter,
26
27
  base_delay: float = 1,
27
28
  max_delay: float = 10,
28
- retry_on_exceptions: Tuple[Type[Exception], ...] = (Exception,),
29
+ retry_on_exceptions: tuple[type[Exception], ...] = (Exception,),
29
30
  operation_name: Optional[str] = None,
30
- ) -> Callable[[Callable[P, R]], Callable[P, R]]:
31
+ ) -> Callable[
32
+ [Callable[P, Coroutine[Any, Any, R]]], Callable[P, Coroutine[Any, Any, Optional[R]]]
33
+ ]:
31
34
  """A decorator for retrying an async function.
32
35
 
33
36
  Args:
@@ -43,9 +46,11 @@ def retry_async_fn(
43
46
  the function name. If None, uses the function name.
44
47
  """
45
48
 
46
- def decorator(func: Callable[P, R]) -> Callable[P, R]:
49
+ def decorator(
50
+ func: Callable[P, Coroutine[Any, Any, R]],
51
+ ) -> Callable[P, Coroutine[Any, Any, Optional[R]]]:
47
52
  @wraps(func)
48
- async def wrapper(*args: P.args, **kwargs: P.kwargs) -> R:
53
+ async def wrapper(*args: P.args, **kwargs: P.kwargs) -> Optional[R]:
49
54
  name = operation_name or func.__name__
50
55
  for attempt in range(max_attempts):
51
56
  try:
@@ -4,18 +4,16 @@ Utilities for creating and working with Prefect REST API schemas.
4
4
 
5
5
  import datetime
6
6
  import os
7
- from typing import Any, ClassVar, Generator, Optional, Set, TypeVar, cast
7
+ from typing import Any, ClassVar, Optional, TypeVar, cast
8
8
  from uuid import UUID, uuid4
9
9
 
10
10
  import pendulum
11
- from pydantic import (
12
- BaseModel,
13
- ConfigDict,
14
- Field,
15
- )
11
+ from pydantic import BaseModel, ConfigDict, Field
12
+ from rich.repr import RichReprResult
16
13
  from typing_extensions import Self
17
14
 
18
15
  from prefect.types import DateTime
16
+ from prefect.utilities.generics import validate_list
19
17
 
20
18
  T = TypeVar("T")
21
19
 
@@ -32,7 +30,7 @@ class PrefectBaseModel(BaseModel):
32
30
  subtle unintentional testing errors.
33
31
  """
34
32
 
35
- _reset_fields: ClassVar[Set[str]] = set()
33
+ _reset_fields: ClassVar[set[str]] = set()
36
34
 
37
35
  model_config: ClassVar[ConfigDict] = ConfigDict(
38
36
  ser_json_timedelta="float",
@@ -59,7 +57,18 @@ class PrefectBaseModel(BaseModel):
59
57
  else:
60
58
  return copy_dict == other
61
59
 
62
- def __rich_repr__(self) -> Generator[tuple[str, Any, Any], None, None]:
60
+ @classmethod
61
+ def model_validate_list(
62
+ cls,
63
+ obj: Any,
64
+ *,
65
+ strict: Optional[bool] = None,
66
+ from_attributes: Optional[bool] = None,
67
+ context: Optional[Any] = None,
68
+ ) -> list[Self]:
69
+ return validate_list(cls, obj)
70
+
71
+ def __rich_repr__(self) -> RichReprResult:
63
72
  # Display all of the fields in the model if they differ from the default value
64
73
  for name, field in self.model_fields.items():
65
74
  value = getattr(self, name)
@@ -102,7 +111,7 @@ class IDBaseModel(PrefectBaseModel):
102
111
  The ID is reset on copy() and not included in equality comparisons.
103
112
  """
104
113
 
105
- _reset_fields: ClassVar[Set[str]] = {"id"}
114
+ _reset_fields: ClassVar[set[str]] = {"id"}
106
115
  id: UUID = Field(default_factory=uuid4)
107
116
 
108
117
 
@@ -115,7 +124,7 @@ class ObjectBaseModel(IDBaseModel):
115
124
  equality comparisons.
116
125
  """
117
126
 
118
- _reset_fields: ClassVar[Set[str]] = {"id", "created", "updated"}
127
+ _reset_fields: ClassVar[set[str]] = {"id", "created", "updated"}
119
128
  model_config: ClassVar[ConfigDict] = ConfigDict(from_attributes=True)
120
129
 
121
130
  created: Optional[DateTime] = Field(default=None, repr=False)