prefect-client 3.4.1.dev5__py3-none-any.whl → 3.4.2.dev2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
prefect/_build_info.py CHANGED
@@ -1,5 +1,5 @@
1
1
  # Generated by versioningit
2
- __version__ = "3.4.1.dev5"
3
- __build_date__ = "2025-05-08 08:08:59.726919+00:00"
4
- __git_commit__ = "7e7a18743f5fd0bd4e513ca22c5408e7176390b8"
2
+ __version__ = "3.4.2.dev2"
3
+ __build_date__ = "2025-05-13 08:08:51.669064+00:00"
4
+ __git_commit__ = "10cd426c833753092a85fdcb242b8f92038b7bc0"
5
5
  __dirty__ = False
@@ -0,0 +1,27 @@
1
+ import inspect
2
+ from typing import Any, Union
3
+
4
+ from prefect.filesystems import NullFileSystem, WritableFileSystem
5
+
6
+
7
+ async def call_explicitly_async_block_method(
8
+ block: Union[WritableFileSystem, NullFileSystem],
9
+ method: str,
10
+ args: tuple[Any, ...],
11
+ kwargs: dict[str, Any],
12
+ ) -> Any:
13
+ """
14
+ TODO: remove this once we have explicit async methods on all storage blocks
15
+
16
+ see https://github.com/PrefectHQ/prefect/issues/15008
17
+ """
18
+ if hasattr(block, f"a{method}"): # explicit async method
19
+ return await getattr(block, f"a{method}")(*args, **kwargs)
20
+ elif hasattr(getattr(block, method, None), "aio"): # sync_compatible
21
+ return await getattr(block, method).aio(block, *args, **kwargs)
22
+ else: # should not happen in prefect, but users can override impls
23
+ maybe_coro = getattr(block, method)(*args, **kwargs)
24
+ if inspect.isawaitable(maybe_coro):
25
+ return await maybe_coro
26
+ else:
27
+ return maybe_coro
@@ -10,6 +10,7 @@ from pydantic import BaseModel, ConfigDict, Field
10
10
  from rich.repr import RichReprResult
11
11
  from typing_extensions import Self
12
12
 
13
+ from prefect._internal.uuid7 import uuid7
13
14
  from prefect.types._datetime import (
14
15
  DateTime,
15
16
  human_friendly_diff,
@@ -100,7 +101,7 @@ class PrefectBaseModel(BaseModel):
100
101
 
101
102
  class IDBaseModel(PrefectBaseModel):
102
103
  """
103
- A PrefectBaseModel with an auto-generated UUID ID value.
104
+ A PrefectBaseModel with a randomly-generated UUID ID value.
104
105
 
105
106
  The ID is reset on copy() and not included in equality comparisons.
106
107
  """
@@ -109,6 +110,15 @@ class IDBaseModel(PrefectBaseModel):
109
110
  id: UUID = Field(default_factory=uuid4)
110
111
 
111
112
 
113
+ class TimeSeriesBaseModel(IDBaseModel):
114
+ """
115
+ A PrefectBaseModel with a time-oriented UUIDv7 ID value. Used for models that
116
+ operate like timeseries, such as runs, states, and logs.
117
+ """
118
+
119
+ id: UUID = Field(default_factory=uuid7)
120
+
121
+
112
122
  class ObjectBaseModel(IDBaseModel):
113
123
  """
114
124
  A PrefectBaseModel with an auto-generated UUID ID value and created /
@@ -0,0 +1,11 @@
1
+ from typing import cast
2
+ from uuid import UUID
3
+
4
+ from uuid_extensions import uuid7 as _uuid7 # pyright: ignore[reportMissingTypeStubs]
5
+
6
+
7
+ def uuid7() -> UUID:
8
+ return cast(UUID, _uuid7())
9
+
10
+
11
+ __all__ = ["uuid7"]
@@ -15,7 +15,7 @@ from typing import (
15
15
  Union,
16
16
  overload,
17
17
  )
18
- from uuid import UUID, uuid4
18
+ from uuid import UUID
19
19
 
20
20
  import orjson
21
21
  from pydantic import (
@@ -36,7 +36,11 @@ from typing_extensions import Literal, Self, TypeVar
36
36
 
37
37
  from prefect._internal.compatibility.async_dispatch import async_dispatch
38
38
  from prefect._internal.compatibility.migration import getattr_migration
39
- from prefect._internal.schemas.bases import ObjectBaseModel, PrefectBaseModel
39
+ from prefect._internal.schemas.bases import (
40
+ ObjectBaseModel,
41
+ PrefectBaseModel,
42
+ TimeSeriesBaseModel,
43
+ )
40
44
  from prefect._internal.schemas.fields import CreatedBy, UpdatedBy
41
45
  from prefect._internal.schemas.validators import (
42
46
  get_or_create_run_name,
@@ -48,6 +52,7 @@ from prefect._internal.schemas.validators import (
48
52
  validate_not_negative,
49
53
  validate_parent_and_ref_diff,
50
54
  )
55
+ from prefect._internal.uuid7 import uuid7
51
56
  from prefect._result_records import ResultRecordMetadata
52
57
  from prefect.client.schemas.schedules import SCHEDULE_TYPES
53
58
  from prefect.settings import PREFECT_CLOUD_API_URL, PREFECT_CLOUD_UI_URL
@@ -184,7 +189,7 @@ def data_discriminator(x: Any) -> str:
184
189
  return "Any"
185
190
 
186
191
 
187
- class State(ObjectBaseModel, Generic[R]):
192
+ class State(TimeSeriesBaseModel, ObjectBaseModel, Generic[R]):
188
193
  """
189
194
  The state of a run.
190
195
  """
@@ -415,7 +420,7 @@ class State(ObjectBaseModel, Generic[R]):
415
420
  """
416
421
  return self.model_copy(
417
422
  update={
418
- "id": uuid4(),
423
+ "id": uuid7(),
419
424
  "created": now("UTC"),
420
425
  "updated": now("UTC"),
421
426
  "timestamp": now("UTC"),
@@ -511,7 +516,7 @@ class FlowRunPolicy(PrefectBaseModel):
511
516
  return values
512
517
 
513
518
 
514
- class FlowRun(ObjectBaseModel):
519
+ class FlowRun(TimeSeriesBaseModel, ObjectBaseModel):
515
520
  name: str = Field(
516
521
  default_factory=lambda: generate_slug(2),
517
522
  description=(
@@ -767,7 +772,7 @@ class Constant(TaskRunInput):
767
772
  type: str
768
773
 
769
774
 
770
- class TaskRun(ObjectBaseModel):
775
+ class TaskRun(TimeSeriesBaseModel, ObjectBaseModel):
771
776
  name: str = Field(
772
777
  default_factory=lambda: generate_slug(2), examples=["my-task-run"]
773
778
  )
@@ -1307,7 +1312,7 @@ class SavedSearch(ObjectBaseModel):
1307
1312
  )
1308
1313
 
1309
1314
 
1310
- class Log(ObjectBaseModel):
1315
+ class Log(TimeSeriesBaseModel, ObjectBaseModel):
1311
1316
  """An ORM representation of log data."""
1312
1317
 
1313
1318
  name: str = Field(default=..., description="The logger name.")
prefect/events/filters.py CHANGED
@@ -1,5 +1,7 @@
1
+ from __future__ import annotations
2
+
1
3
  import datetime
2
- from typing import Optional
4
+ from typing import Optional, Union
3
5
  from uuid import UUID
4
6
 
5
7
  from pydantic import Field
@@ -43,11 +45,21 @@ class EventDataFilter(PrefectBaseModel, extra="forbid"): # type: ignore[call-ar
43
45
  """A base class for filtering event data."""
44
46
 
45
47
  def get_filters(self) -> list["EventDataFilter"]:
46
- filters: list["EventDataFilter"] = [
47
- filter
48
- for filter in [getattr(self, name) for name in type(self).model_fields]
49
- if isinstance(filter, EventDataFilter)
50
- ]
48
+ filters: list[EventDataFilter] = []
49
+ for filter in [
50
+ getattr(self, name) for name in self.__class__.model_fields.keys()
51
+ ]:
52
+ # Any embedded list of filters are flattened and thus ANDed together
53
+ subfilters: list[EventDataFilter] = (
54
+ filter if isinstance(filter, list) else [filter]
55
+ )
56
+
57
+ for subfilter in subfilters:
58
+ if not isinstance(subfilter, EventDataFilter):
59
+ continue
60
+
61
+ filters.append(subfilter)
62
+
51
63
  return filters
52
64
 
53
65
  def includes(self, event: Event) -> bool:
@@ -233,18 +245,20 @@ class EventFilter(EventDataFilter):
233
245
  default=None,
234
246
  description="Filter criteria for the event name",
235
247
  )
236
- any_resource: Optional[EventAnyResourceFilter] = Field(
237
- default=None,
238
- description="Filter criteria for any resource involved in the event",
239
- )
240
248
  resource: Optional[EventResourceFilter] = Field(
241
249
  default=None,
242
250
  description="Filter criteria for the resource of the event",
243
251
  )
244
- related: Optional[EventRelatedFilter] = Field(
252
+ related: Optional[Union[EventRelatedFilter, list[EventRelatedFilter]]] = Field(
245
253
  default=None,
246
254
  description="Filter criteria for the related resources of the event",
247
255
  )
256
+ any_resource: Optional[
257
+ Union[EventAnyResourceFilter, list[EventAnyResourceFilter]]
258
+ ] = Field(
259
+ default=None,
260
+ description="Filter criteria for any resource involved in the event",
261
+ )
248
262
  id: EventIDFilter = Field(
249
263
  default_factory=lambda: EventIDFilter(id=[]),
250
264
  description="Filter criteria for the events' ID",
@@ -1,3 +1,5 @@
1
+ from __future__ import annotations
2
+
1
3
  import abc
2
4
  import textwrap
3
5
  from datetime import timedelta
@@ -103,7 +105,7 @@ class ResourceTrigger(Trigger, abc.ABC):
103
105
  default_factory=lambda: ResourceSpecification.model_validate({}),
104
106
  description="Labels for resources which this trigger will match.",
105
107
  )
106
- match_related: ResourceSpecification = Field(
108
+ match_related: Union[ResourceSpecification, list[ResourceSpecification]] = Field(
107
109
  default_factory=lambda: ResourceSpecification.model_validate({}),
108
110
  description="Labels for related resources which this trigger will match.",
109
111
  )
@@ -13,7 +13,7 @@ from typing import (
13
13
  Tuple,
14
14
  Union,
15
15
  )
16
- from uuid import UUID, uuid4
16
+ from uuid import UUID
17
17
 
18
18
  from pydantic import (
19
19
  AfterValidator,
@@ -26,6 +26,7 @@ from typing_extensions import Annotated, Self
26
26
 
27
27
  import prefect.types._datetime
28
28
  from prefect._internal.schemas.bases import PrefectBaseModel
29
+ from prefect._internal.uuid7 import uuid7
29
30
  from prefect.logging import get_logger
30
31
  from prefect.settings import (
31
32
  PREFECT_EVENTS_MAXIMUM_LABELS_PER_RESOURCE,
@@ -135,7 +136,7 @@ class Event(PrefectBaseModel):
135
136
  description="An open-ended set of data describing what happened",
136
137
  )
137
138
  id: UUID = Field(
138
- default_factory=uuid4,
139
+ default_factory=uuid7,
139
140
  description="The client-provided identifier of this event",
140
141
  )
141
142
  follows: Optional[UUID] = Field(
prefect/results.py CHANGED
@@ -1,11 +1,11 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  import asyncio
4
- import inspect
5
4
  import os
6
5
  import socket
7
6
  import threading
8
7
  import uuid
8
+ from datetime import datetime
9
9
  from functools import partial
10
10
  from operator import methodcaller
11
11
  from pathlib import Path
@@ -34,6 +34,8 @@ from typing_extensions import ParamSpec, Self
34
34
  import prefect
35
35
  import prefect.types._datetime
36
36
  from prefect._internal.compatibility.async_dispatch import async_dispatch
37
+ from prefect._internal.compatibility.blocks import call_explicitly_async_block_method
38
+ from prefect._internal.compatibility.deprecated import deprecated_callable
37
39
  from prefect._internal.concurrency.event_loop import get_running_loop
38
40
  from prefect._result_records import R, ResultRecord, ResultRecordMetadata
39
41
  from prefect.blocks.core import Block
@@ -285,29 +287,6 @@ def _format_user_supplied_storage_key(key: str) -> str:
285
287
  return key.format(**runtime_vars, parameters=prefect.runtime.task_run.parameters)
286
288
 
287
289
 
288
- async def _call_explicitly_async_block_method(
289
- block: WritableFileSystem | NullFileSystem,
290
- method: str,
291
- args: tuple[Any, ...],
292
- kwargs: dict[str, Any],
293
- ) -> Any:
294
- """
295
- TODO: remove this once we have explicit async methods on all storage blocks
296
-
297
- see https://github.com/PrefectHQ/prefect/issues/15008
298
- """
299
- if hasattr(block, f"a{method}"): # explicit async method
300
- return await getattr(block, f"a{method}")(*args, **kwargs)
301
- elif hasattr(getattr(block, method, None), "aio"): # sync_compatible
302
- return await getattr(block, method).aio(block, *args, **kwargs)
303
- else: # should not happen in prefect, but users can override impls
304
- maybe_coro = getattr(block, method)(*args, **kwargs)
305
- if inspect.isawaitable(maybe_coro):
306
- return await maybe_coro
307
- else:
308
- return maybe_coro
309
-
310
-
311
290
  T = TypeVar("T")
312
291
 
313
292
 
@@ -505,7 +484,7 @@ class ResultStore(BaseModel):
505
484
  # TODO: Add an `exists` method to commonly used storage blocks
506
485
  # so the entire payload doesn't need to be read
507
486
  try:
508
- metadata_content = await _call_explicitly_async_block_method(
487
+ metadata_content = await call_explicitly_async_block_method(
509
488
  self.metadata_storage, "read_path", (key,), {}
510
489
  )
511
490
  if metadata_content is None:
@@ -516,7 +495,7 @@ class ResultStore(BaseModel):
516
495
  return False
517
496
  else:
518
497
  try:
519
- content = await _call_explicitly_async_block_method(
498
+ content = await call_explicitly_async_block_method(
520
499
  self.result_storage, "read_path", (key,), {}
521
500
  )
522
501
  if content is None:
@@ -601,7 +580,7 @@ class ResultStore(BaseModel):
601
580
  self.result_storage = await aget_default_result_storage()
602
581
 
603
582
  if self.metadata_storage is not None:
604
- metadata_content = await _call_explicitly_async_block_method(
583
+ metadata_content = await call_explicitly_async_block_method(
605
584
  self.metadata_storage,
606
585
  "read_path",
607
586
  (key,),
@@ -611,7 +590,7 @@ class ResultStore(BaseModel):
611
590
  assert metadata.storage_key is not None, (
612
591
  "Did not find storage key in metadata"
613
592
  )
614
- result_content = await _call_explicitly_async_block_method(
593
+ result_content = await call_explicitly_async_block_method(
615
594
  self.result_storage,
616
595
  "read_path",
617
596
  (metadata.storage_key,),
@@ -624,7 +603,7 @@ class ResultStore(BaseModel):
624
603
  )
625
604
  await emit_result_read_event(self, resolved_key_path)
626
605
  else:
627
- content = await _call_explicitly_async_block_method(
606
+ content = await call_explicitly_async_block_method(
628
607
  self.result_storage,
629
608
  "read_path",
630
609
  (key,),
@@ -806,13 +785,13 @@ class ResultStore(BaseModel):
806
785
 
807
786
  # If metadata storage is configured, write result and metadata separately
808
787
  if self.metadata_storage is not None:
809
- await _call_explicitly_async_block_method(
788
+ await call_explicitly_async_block_method(
810
789
  self.result_storage,
811
790
  "write_path",
812
791
  (result_record.metadata.storage_key,),
813
792
  {"content": result_record.serialize_result()},
814
793
  )
815
- await _call_explicitly_async_block_method(
794
+ await call_explicitly_async_block_method(
816
795
  self.metadata_storage,
817
796
  "write_path",
818
797
  (base_key,),
@@ -821,7 +800,7 @@ class ResultStore(BaseModel):
821
800
  await emit_result_write_event(self, result_record.metadata.storage_key)
822
801
  # Otherwise, write the result metadata and result together
823
802
  else:
824
- await _call_explicitly_async_block_method(
803
+ await call_explicitly_async_block_method(
825
804
  self.result_storage,
826
805
  "write_path",
827
806
  (result_record.metadata.storage_key,),
@@ -998,6 +977,11 @@ class ResultStore(BaseModel):
998
977
 
999
978
  # TODO: These two methods need to find a new home
1000
979
 
980
+ @deprecated_callable(
981
+ start_date=datetime(2025, 5, 10),
982
+ end_date=datetime(2025, 11, 10),
983
+ help="Use `store_parameters` from `prefect.task_worker` instead.",
984
+ )
1001
985
  @sync_compatible
1002
986
  async def store_parameters(self, identifier: UUID, parameters: dict[str, Any]):
1003
987
  record = ResultRecord(
@@ -1007,21 +991,26 @@ class ResultStore(BaseModel):
1007
991
  ),
1008
992
  )
1009
993
 
1010
- await _call_explicitly_async_block_method(
994
+ await call_explicitly_async_block_method(
1011
995
  self.result_storage,
1012
996
  "write_path",
1013
997
  (f"parameters/{identifier}",),
1014
998
  {"content": record.serialize()},
1015
999
  )
1016
1000
 
1001
+ @deprecated_callable(
1002
+ start_date=datetime(2025, 5, 10),
1003
+ end_date=datetime(2025, 11, 10),
1004
+ help="Use `read_parameters` from `prefect.task_worker` instead.",
1005
+ )
1017
1006
  @sync_compatible
1018
1007
  async def read_parameters(self, identifier: UUID) -> dict[str, Any]:
1019
1008
  if self.result_storage is None:
1020
1009
  raise ValueError(
1021
1010
  "Result store is not configured - must have a result storage block to read parameters"
1022
1011
  )
1023
- record: ResultRecord[Any] = ResultRecord.deserialize(
1024
- await _call_explicitly_async_block_method(
1012
+ record: ResultRecord[Any] = ResultRecord[Any].deserialize(
1013
+ await call_explicitly_async_block_method(
1025
1014
  self.result_storage,
1026
1015
  "read_path",
1027
1016
  (f"parameters/{identifier}",),
@@ -3,7 +3,7 @@ Routes for interacting with work queue objects.
3
3
  """
4
4
 
5
5
  from typing import TYPE_CHECKING, List, Optional
6
- from uuid import UUID, uuid4
6
+ from uuid import UUID
7
7
 
8
8
  import sqlalchemy as sa
9
9
  from fastapi import (
@@ -20,6 +20,7 @@ from sqlalchemy.ext.asyncio import AsyncSession
20
20
  import prefect.server.api.dependencies as dependencies
21
21
  import prefect.server.models as models
22
22
  import prefect.server.schemas as schemas
23
+ from prefect._internal.uuid7 import uuid7
23
24
  from prefect.server.api.validation import validate_job_variable_defaults_for_work_pool
24
25
  from prefect.server.database import PrefectDBInterface, provide_database_interface
25
26
  from prefect.server.models.deployments import mark_deployments_ready
@@ -184,7 +185,7 @@ async def create_work_pool(
184
185
  )
185
186
 
186
187
  await emit_work_pool_status_event(
187
- event_id=uuid4(),
188
+ event_id=uuid7(),
188
189
  occurred=now("UTC"),
189
190
  pre_update_work_pool=None,
190
191
  work_pool=model,
prefect/task_runners.py CHANGED
@@ -21,6 +21,7 @@ from typing import (
21
21
 
22
22
  from typing_extensions import ParamSpec, Self, TypeVar
23
23
 
24
+ from prefect._internal.uuid7 import uuid7
24
25
  from prefect.client.schemas.objects import TaskRunInput
25
26
  from prefect.exceptions import MappingLengthMismatch, MappingMissingIterable
26
27
  from prefect.futures import (
@@ -290,7 +291,7 @@ class ThreadPoolTaskRunner(TaskRunner[PrefectConcurrentFuture[R]]):
290
291
  from prefect.context import FlowRunContext
291
292
  from prefect.task_engine import run_task_async, run_task_sync
292
293
 
293
- task_run_id = uuid.uuid4()
294
+ task_run_id = uuid7()
294
295
  cancel_event = threading.Event()
295
296
  self._cancel_events[task_run_id] = cancel_event
296
297
  context = copy_context()
prefect/task_worker.py CHANGED
@@ -22,17 +22,20 @@ from websockets.exceptions import InvalidStatus
22
22
 
23
23
  import prefect.types._datetime
24
24
  from prefect import Task
25
+ from prefect._internal.compatibility.blocks import call_explicitly_async_block_method
25
26
  from prefect._internal.concurrency.api import create_call, from_sync
26
27
  from prefect.cache_policies import DEFAULT, NO_CACHE
27
28
  from prefect.client.orchestration import get_client
28
29
  from prefect.client.schemas.objects import TaskRun
29
30
  from prefect.client.subscriptions import Subscription
30
31
  from prefect.logging.loggers import get_logger
31
- from prefect.results import ResultStore, get_or_create_default_task_scheduling_storage
32
- from prefect.settings import (
33
- PREFECT_API_URL,
34
- PREFECT_TASK_SCHEDULING_DELETE_FAILED_SUBMISSIONS,
32
+ from prefect.results import (
33
+ ResultRecord,
34
+ ResultRecordMetadata,
35
+ ResultStore,
36
+ get_or_create_default_task_scheduling_storage,
35
37
  )
38
+ from prefect.settings import get_current_settings
36
39
  from prefect.states import Pending
37
40
  from prefect.task_engine import run_task_async, run_task_sync
38
41
  from prefect.types import DateTime
@@ -43,6 +46,7 @@ from prefect.utilities.processutils import (
43
46
  _register_signal, # pyright: ignore[reportPrivateUsage]
44
47
  )
45
48
  from prefect.utilities.services import start_client_metrics_server
49
+ from prefect.utilities.timeout import timeout_async
46
50
  from prefect.utilities.urls import url_for
47
51
 
48
52
  if TYPE_CHECKING:
@@ -170,9 +174,13 @@ class TaskWorker:
170
174
  sys.exit(0)
171
175
 
172
176
  @sync_compatible
173
- async def start(self) -> None:
177
+ async def start(self, timeout: Optional[float] = None) -> None:
174
178
  """
175
179
  Starts a task worker, which runs the tasks provided in the constructor.
180
+
181
+ Args:
182
+ timeout: If provided, the task worker will exit after the given number of
183
+ seconds. Defaults to None, meaning the task worker will run indefinitely.
176
184
  """
177
185
  _register_signal(signal.SIGTERM, self.handle_sigterm)
178
186
 
@@ -181,14 +189,16 @@ class TaskWorker:
181
189
  async with asyncnullcontext() if self.started else self:
182
190
  logger.info("Starting task worker...")
183
191
  try:
184
- await self._subscribe_to_task_scheduling()
192
+ with timeout_async(timeout):
193
+ await self._subscribe_to_task_scheduling()
185
194
  except InvalidStatus as exc:
186
195
  if exc.response.status_code == 403:
187
196
  logger.error(
188
197
  "403: Could not establish a connection to the `/task_runs/subscriptions/scheduled`"
189
- f" endpoint found at:\n\n {PREFECT_API_URL.value()}"
190
- "\n\nPlease double-check the values of your"
191
- " `PREFECT_API_URL` and `PREFECT_API_KEY` environment variables."
198
+ f" endpoint found at:\n\n {get_current_settings().api.url}"
199
+ "\n\nPlease double-check the values of"
200
+ " `PREFECT_API_AUTH_STRING` and `PREFECT_SERVER_API_AUTH_STRING` if running a Prefect server "
201
+ "or `PREFECT_API_URL` and `PREFECT_API_KEY` environment variables if using Prefect Cloud."
192
202
  )
193
203
  else:
194
204
  raise
@@ -228,7 +238,7 @@ class TaskWorker:
228
238
  return True
229
239
 
230
240
  async def _subscribe_to_task_scheduling(self):
231
- base_url = PREFECT_API_URL.value()
241
+ base_url = get_current_settings().api.url
232
242
  if base_url is None:
233
243
  raise ValueError(
234
244
  "`PREFECT_API_URL` must be set to use the task worker. "
@@ -282,7 +292,7 @@ class TaskWorker:
282
292
  task = next((t for t in self.tasks if t.task_key == task_run.task_key), None)
283
293
 
284
294
  if not task:
285
- if PREFECT_TASK_SCHEDULING_DELETE_FAILED_SUBMISSIONS:
295
+ if get_current_settings().tasks.scheduling.delete_failed_submissions:
286
296
  logger.warning(
287
297
  f"Task {task_run.name!r} not found in task worker registry."
288
298
  )
@@ -298,12 +308,18 @@ class TaskWorker:
298
308
  run_context = None
299
309
  if should_try_to_read_parameters(task, task_run):
300
310
  parameters_id = task_run.state.state_details.task_parameters_id
311
+ if parameters_id is None:
312
+ logger.warning(
313
+ f"Task run {task_run.id!r} has no parameters ID. Skipping parameter retrieval."
314
+ )
315
+ return
316
+
301
317
  task.persist_result = True
302
318
  store = await ResultStore(
303
319
  result_storage=await get_or_create_default_task_scheduling_storage()
304
320
  ).update_for_task(task)
305
321
  try:
306
- run_data: dict[str, Any] = await store.read_parameters(parameters_id)
322
+ run_data: dict[str, Any] = await read_parameters(store, parameters_id)
307
323
  parameters = run_data.get("parameters", {})
308
324
  wait_for = run_data.get("wait_for", [])
309
325
  run_context = run_data.get("context", None)
@@ -312,7 +328,7 @@ class TaskWorker:
312
328
  f"Failed to read parameters for task run {task_run.id!r}",
313
329
  exc_info=exc,
314
330
  )
315
- if PREFECT_TASK_SCHEDULING_DELETE_FAILED_SUBMISSIONS.value():
331
+ if get_current_settings().tasks.scheduling.delete_failed_submissions:
316
332
  logger.info(
317
333
  f"Deleting task run {task_run.id!r} because it failed to submit"
318
334
  )
@@ -421,6 +437,7 @@ async def serve(
421
437
  *tasks: Task[P, R],
422
438
  limit: Optional[int] = 10,
423
439
  status_server_port: Optional[int] = None,
440
+ timeout: Optional[float] = None,
424
441
  ):
425
442
  """Serve the provided tasks so that their runs may be submitted to
426
443
  and executed in the engine. Tasks do not need to be within a flow run context to be
@@ -434,6 +451,8 @@ async def serve(
434
451
  - status_server_port: An optional port on which to start an HTTP server
435
452
  exposing status information about the task worker. If not provided, no
436
453
  status server will run.
454
+ - timeout: If provided, the task worker will exit after the given number of
455
+ seconds. Defaults to None, meaning the task worker will run indefinitely.
437
456
 
438
457
  Example:
439
458
  ```python
@@ -469,7 +488,13 @@ async def serve(
469
488
  status_server_task = loop.create_task(server.serve())
470
489
 
471
490
  try:
472
- await task_worker.start()
491
+ await task_worker.start(timeout=timeout)
492
+
493
+ except TimeoutError:
494
+ if timeout is not None:
495
+ logger.info(f"Task worker timed out after {timeout} seconds. Exiting...")
496
+ else:
497
+ raise
473
498
 
474
499
  except BaseExceptionGroup as exc: # novermin
475
500
  exceptions = exc.exceptions
@@ -492,3 +517,59 @@ async def serve(
492
517
  await status_server_task
493
518
  except asyncio.CancelledError:
494
519
  pass
520
+
521
+
522
+ async def store_parameters(
523
+ result_store: ResultStore, identifier: UUID, parameters: dict[str, Any]
524
+ ) -> None:
525
+ """Store parameters for a task run in the result store.
526
+
527
+ Args:
528
+ result_store: The result store to store the parameters in.
529
+ identifier: The identifier of the task run.
530
+ parameters: The parameters to store.
531
+ """
532
+ if result_store.result_storage is None:
533
+ raise ValueError(
534
+ "Result store is not configured - must have a result storage block to store parameters"
535
+ )
536
+ record = ResultRecord(
537
+ result=parameters,
538
+ metadata=ResultRecordMetadata(
539
+ serializer=result_store.serializer, storage_key=str(identifier)
540
+ ),
541
+ )
542
+
543
+ await call_explicitly_async_block_method(
544
+ result_store.result_storage,
545
+ "write_path",
546
+ (f"parameters/{identifier}",),
547
+ {"content": record.serialize()},
548
+ )
549
+
550
+
551
+ async def read_parameters(
552
+ result_store: ResultStore, identifier: UUID
553
+ ) -> dict[str, Any]:
554
+ """Read parameters for a task run from the result store.
555
+
556
+ Args:
557
+ result_store: The result store to read the parameters from.
558
+ identifier: The identifier of the task run.
559
+
560
+ Returns:
561
+ The parameters for the task run.
562
+ """
563
+ if result_store.result_storage is None:
564
+ raise ValueError(
565
+ "Result store is not configured - must have a result storage block to read parameters"
566
+ )
567
+ record: ResultRecord[Any] = ResultRecord[Any].deserialize(
568
+ await call_explicitly_async_block_method(
569
+ result_store.result_storage,
570
+ "read_path",
571
+ (f"parameters/{identifier}",),
572
+ {},
573
+ )
574
+ )
575
+ return record.result
prefect/tasks.py CHANGED
@@ -32,6 +32,7 @@ from uuid import UUID, uuid4
32
32
  from typing_extensions import Literal, ParamSpec, Self, TypeAlias, TypeIs
33
33
 
34
34
  import prefect.states
35
+ from prefect._internal.uuid7 import uuid7
35
36
  from prefect.cache_policies import DEFAULT, NO_CACHE, CachePolicy
36
37
  from prefect.client.orchestration import get_client
37
38
  from prefect.client.schemas import TaskRun
@@ -809,6 +810,8 @@ class Task(Generic[P, R]):
809
810
  # store parameters for background tasks so that task worker
810
811
  # can retrieve them at runtime
811
812
  if deferred and (parameters or wait_for):
813
+ from prefect.task_worker import store_parameters
814
+
812
815
  parameters_id = uuid4()
813
816
  state.state_details.task_parameters_id = parameters_id
814
817
 
@@ -824,7 +827,7 @@ class Task(Generic[P, R]):
824
827
  data["parameters"] = parameters
825
828
  if wait_for:
826
829
  data["wait_for"] = wait_for
827
- await store.store_parameters(parameters_id, data)
830
+ await store_parameters(store, parameters_id, data)
828
831
 
829
832
  # collect task inputs
830
833
  task_inputs = {
@@ -910,6 +913,8 @@ class Task(Generic[P, R]):
910
913
  # store parameters for background tasks so that task worker
911
914
  # can retrieve them at runtime
912
915
  if deferred and (parameters or wait_for):
916
+ from prefect.task_worker import store_parameters
917
+
913
918
  parameters_id = uuid4()
914
919
  state.state_details.task_parameters_id = parameters_id
915
920
 
@@ -925,7 +930,7 @@ class Task(Generic[P, R]):
925
930
  data["parameters"] = parameters
926
931
  if wait_for:
927
932
  data["wait_for"] = wait_for
928
- await store.store_parameters(parameters_id, data)
933
+ await store_parameters(store, parameters_id, data)
929
934
 
930
935
  # collect task inputs
931
936
  task_inputs = {
@@ -953,7 +958,7 @@ class Task(Generic[P, R]):
953
958
  if flow_run_context and flow_run_context.flow_run
954
959
  else None
955
960
  )
956
- task_run_id = id or uuid4()
961
+ task_run_id = id or uuid7()
957
962
  state = prefect.states.Pending(
958
963
  state_details=StateDetails(
959
964
  task_run_id=task_run_id,
prefect/types/__init__.py CHANGED
@@ -1,12 +1,12 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  from functools import partial
4
- from typing import Annotated, Any, Optional, TypeVar, Union
4
+ from typing import Annotated, Any, Optional, TypeVar, Union, cast
5
+ from uuid import UUID
5
6
  from typing_extensions import Literal
6
7
  import orjson
7
8
  import pydantic
8
9
 
9
-
10
10
  from ._datetime import DateTime, Date
11
11
  from .names import (
12
12
  Name,
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: prefect-client
3
- Version: 3.4.1.dev5
3
+ Version: 3.4.2.dev2
4
4
  Summary: Workflow orchestration and management.
5
5
  Project-URL: Changelog, https://github.com/PrefectHQ/prefect/releases
6
6
  Project-URL: Documentation, https://docs.prefect.io
@@ -60,6 +60,7 @@ Requires-Dist: sniffio<2.0.0,>=1.3.0
60
60
  Requires-Dist: toml>=0.10.0
61
61
  Requires-Dist: typing-extensions<5.0.0,>=4.10.0
62
62
  Requires-Dist: ujson<6.0.0,>=5.8.0
63
+ Requires-Dist: uuid7>=0.1.0
63
64
  Requires-Dist: uvicorn!=0.29.0,>=0.14.0
64
65
  Requires-Dist: websockets<16.0,>=13.0
65
66
  Requires-Dist: whenever<0.9.0,>=0.7.3; python_version >= '3.13'
@@ -1,7 +1,7 @@
1
1
  prefect/.prefectignore,sha256=awSprvKT0vI8a64mEOLrMxhxqcO-b0ERQeYpA2rNKVQ,390
2
2
  prefect/__init__.py,sha256=iCdcC5ZmeewikCdnPEP6YBAjPNV5dvfxpYCTpw30Hkw,3685
3
3
  prefect/__main__.py,sha256=WFjw3kaYJY6pOTA7WDOgqjsz8zUEUZHCcj3P5wyVa-g,66
4
- prefect/_build_info.py,sha256=X9CjltSx8CpEa5y-u1RGzeOI83l9BsXPZ3myZoXzOTg,185
4
+ prefect/_build_info.py,sha256=sacBgu0Y8THDSXe5InK6nCR0AQ6RVpU5G5hOtA687Go,185
5
5
  prefect/_result_records.py,sha256=S6QmsODkehGVSzbMm6ig022PYbI6gNKz671p_8kBYx4,7789
6
6
  prefect/_versioning.py,sha256=YqR5cxXrY4P6LM1Pmhd8iMo7v_G2KJpGNdsf4EvDFQ0,14132
7
7
  prefect/_waiters.py,sha256=Ia2ITaXdHzevtyWIgJoOg95lrEXQqNEOquHvw3T33UQ,9026
@@ -20,15 +20,15 @@ prefect/futures.py,sha256=5wVHLtniwG2au0zuxM-ucqo08x0B5l6e8Z1Swbe8R9s,23720
20
20
  prefect/main.py,sha256=8V-qLB4GjEVCkGRgGXeaIk-JIXY8Z9FozcNluj4Sm9E,2589
21
21
  prefect/plugins.py,sha256=FPRLR2mWVBMuOnlzeiTD9krlHONZH2rtYLD753JQDNQ,2516
22
22
  prefect/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
23
- prefect/results.py,sha256=3zuO8a4EGtti5AP4zLSnoES1YMKP1CugergxJdZa0-8,37140
23
+ prefect/results.py,sha256=Amm3TQu8U_oakSn__tCogIJ5DsTj0w_kLzuENWsxK6A,36824
24
24
  prefect/schedules.py,sha256=dhq4OhImRvcmtxF7UH1m8RbwYdHT5RQsp_FrxVXfODE,7289
25
25
  prefect/serializers.py,sha256=QI0oEal_BO4HQaWSjr6ReSwT55Hn4sbSOXxGgQI1-y0,9249
26
26
  prefect/states.py,sha256=rh7l1bnIYpTXdlXt5nnpz66y9KLjBWAJrN9Eo5RwgQs,26023
27
27
  prefect/task_engine.py,sha256=j0rr8IyBinJmKPD-486RYWKZakhifkEE9ppPCJ9Es-U,62463
28
- prefect/task_runners.py,sha256=vzJ1kiW1z90Fkkg21QNhPwjfLoDy6rVsUAToXqb6FUE,16206
28
+ prefect/task_runners.py,sha256=PozMYXXjiy5pMStifjdBTnLRTtP9uRuBa86KgafpPkQ,16243
29
29
  prefect/task_runs.py,sha256=7LIzfo3fondCyEUpU05sYFN5IfpZigBDXrhG5yc-8t0,9039
30
- prefect/task_worker.py,sha256=gMj_rl4EjTrnJ5YSOXinC6y-7KSK7fRQt_UYbZbrrV8,17879
31
- prefect/tasks.py,sha256=DODF_1xPDQVvj_paJDWm43RS46Jdx9_7b2huqT_QyiM,74778
30
+ prefect/task_worker.py,sha256=RifZ3bOl6ppoYPiOAd4TQp2_GEw9eDQoW483rq1q52Q,20805
31
+ prefect/tasks.py,sha256=s8z5k_3KUC0FXzE10-VWH17Uc36a1GKbMOn3jYGbbjk,74954
32
32
  prefect/transactions.py,sha256=uIoPNudzJzH6NrMJhrgr5lyh6JxOJQqT1GvrXt69yNw,26068
33
33
  prefect/variables.py,sha256=dCK3vX7TbkqXZhnNT_v7rcGh3ISRqoR6pJVLpoll3Js,8342
34
34
  prefect/_experimental/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -43,8 +43,10 @@ prefect/_internal/_logging.py,sha256=Igy2tCM2Hv9wNiDPcee0s5N1fTc6oRP7OffCJBqAekY
43
43
  prefect/_internal/integrations.py,sha256=U4cZMDbnilzZSKaMxvzZcSL27a1tzRMjDoTfr2ul_eY,231
44
44
  prefect/_internal/pytz.py,sha256=Sy_cD-Hkmo_Yrhx2Jucy7DgTRhvO8ZD0whW1ywbSg_U,13765
45
45
  prefect/_internal/retries.py,sha256=pMHofrTQPDSxbVWclDwXbfhFKaDC6sxe1DkUOWugV6k,3040
46
+ prefect/_internal/uuid7.py,sha256=-Wl5rFozDSKRyhSfa9WT8BK1U5Rq8ehEgZB5aV5lodU,211
46
47
  prefect/_internal/compatibility/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
47
48
  prefect/_internal/compatibility/async_dispatch.py,sha256=cUXOqSeseMUaje9oYUzasVPtNttyiHvrqfJl0zK66XI,2949
49
+ prefect/_internal/compatibility/blocks.py,sha256=SSZXoWVuCMYu1EzjqmTa4lKjDCyxvOFK47XMj6s4hsk,984
48
50
  prefect/_internal/compatibility/deprecated.py,sha256=YUK1IGOgZrDh6dYRez-9IYTB1eqNC19QiSKbBDl88Qs,9305
49
51
  prefect/_internal/compatibility/migration.py,sha256=Z_r28B90ZQkSngXjr4I_9zA6P74_u48mtp2jYWB9zGg,6797
50
52
  prefect/_internal/concurrency/__init__.py,sha256=YlTwU9ryjPNwbJa45adLJY00t_DGCh1QrdtY9WdVFfw,2140
@@ -63,7 +65,7 @@ prefect/_internal/pydantic/v1_schema.py,sha256=wSyQr3LUbIh0R9LsZ6ItmLnQeAS8dxVMN
63
65
  prefect/_internal/pydantic/v2_schema.py,sha256=n56GUlGSUeNZLpMphHliN5ksryVdE9OQHoVir2hGXoA,3224
64
66
  prefect/_internal/pydantic/v2_validated_func.py,sha256=Ld8OtPFF7Ci-gHHmKhSMizBxzuIBOQ6kuIFNRh0vRVY,3731
65
67
  prefect/_internal/schemas/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
66
- prefect/_internal/schemas/bases.py,sha256=JqcZazL5Cp2hZ8Hssu8R2SVXRxHfbdRbTqmvwDYSzyk,4291
68
+ prefect/_internal/schemas/bases.py,sha256=wYBIa5f5BwiKid7Rwp90gqxs7mt4qGBURdtv5dgWJxk,4583
67
69
  prefect/_internal/schemas/fields.py,sha256=m4LrFNz8rA9uBhMk9VyQT6FIXmV_EVAW92hdXeSvHbY,837
68
70
  prefect/_internal/schemas/serializers.py,sha256=G_RGHfObjisUiRvd29p-zc6W4bwt5rE1OdR6TXNrRhQ,825
69
71
  prefect/_internal/schemas/validators.py,sha256=bOtuOYHWfRo-i6zqkE-wvCMXQ3Yww-itj86QLp3yu3Y,16681
@@ -114,7 +116,7 @@ prefect/client/orchestration/_work_pools/client.py,sha256=s1DfUQQBgB2sLiVVPhLNTl
114
116
  prefect/client/schemas/__init__.py,sha256=InZcDzdeWA2oaV0TlyvoMcyLcbi_aaqU1U9D6Gx-eoU,2747
115
117
  prefect/client/schemas/actions.py,sha256=E46Mdq7vAq8hhYmMj6zqUF20uAPXZricViZcIYmgEf0,32443
116
118
  prefect/client/schemas/filters.py,sha256=qa--NNZduuSOcL1xw-YMd4FVIKMrDnBwPPY4m5Di0GA,35963
117
- prefect/client/schemas/objects.py,sha256=pmu3CGQ62LYHgS0bEDS_s2XDwtkuR17BYbM5_6vGcWg,57755
119
+ prefect/client/schemas/objects.py,sha256=e5CMS6FhuYqTmxXK1U80eH5zEC0YkZ_vS_aJdr0VA5o,57912
118
120
  prefect/client/schemas/responses.py,sha256=Zdcx7jlIaluEa2uYIOE5mK1HsJvWPErRAcaWM20oY_I,17336
119
121
  prefect/client/schemas/schedules.py,sha256=sxLFk0SmFY7X1Y9R9HyGDqOS3U5NINBWTciUU7vTTic,14836
120
122
  prefect/client/schemas/sorting.py,sha256=L-2Mx-igZPtsUoRUguTcG3nIEstMEMPD97NwPM2Ox5s,2579
@@ -149,16 +151,16 @@ prefect/docker/docker_image.py,sha256=bR_pEq5-FDxlwTj8CP_7nwZ_MiGK6KxIi8v7DRjy1K
149
151
  prefect/events/__init__.py,sha256=GtKl2bE--pJduTxelH2xy7SadlLJmmis8WR1EYixhuA,2094
150
152
  prefect/events/actions.py,sha256=A7jS8bo4zWGnrt3QfSoQs0uYC1xfKXio3IfU0XtTb5s,9129
151
153
  prefect/events/clients.py,sha256=e3A6cKxi-fG2TkFedaRuC472hIM3VgaVxI6mcPP41kY,27613
152
- prefect/events/filters.py,sha256=2hVfzc3Rdgy0mBHDutWxT__LJY0zpVM8greWX3y6kjM,8233
154
+ prefect/events/filters.py,sha256=tnAbA4Z0Npem8Jbin-qqe38K_4a-4YdpU-Oc4u8Y95Q,8697
153
155
  prefect/events/related.py,sha256=CTeexYUmmA93V4gsR33GIFmw-SS-X_ouOpRg-oeq-BU,6672
154
156
  prefect/events/utilities.py,sha256=ww34bTMENCNwcp6RhhgzG0KgXOvKGe0MKmBdSJ8NpZY,3043
155
157
  prefect/events/worker.py,sha256=HjbibR0_J1W1nnNMZDFTXAbB0cl_cFGaFI87DvNGcnI,4557
156
158
  prefect/events/cli/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
157
159
  prefect/events/cli/automations.py,sha256=uCX3NnypoI25TmyAoyL6qYhanWjZbJ2watwv1nfQMxs,11513
158
160
  prefect/events/schemas/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
159
- prefect/events/schemas/automations.py,sha256=5uYx18sVf8Mqx-KtfcSGli8x4GkNPUHC8LZZfsDzeBo,14568
161
+ prefect/events/schemas/automations.py,sha256=UHrV572HB5Icb1LuOUkMIdDrMDsxW1GhIiST-qzUlFs,14640
160
162
  prefect/events/schemas/deployment_triggers.py,sha256=OX9g9eHe0nqJ3PtVEzqs9Ub2LaOHMA4afLZSvSukKGU,3191
161
- prefect/events/schemas/events.py,sha256=jqZPBXPEnJUCXbk9OM0geJr94trM7jHrk9yvzt6hTbA,9235
163
+ prefect/events/schemas/events.py,sha256=r8sSx2Q1A0KIofnZR_Bri7YT1wzXKV3YS-LnxpeIXHE,9270
162
164
  prefect/events/schemas/labelling.py,sha256=bU-XYaHXhI2MEBIHngth96R9D02m8HHb85KNcHZ_1Gc,3073
163
165
  prefect/infrastructure/__init__.py,sha256=dPvG1jDGD5HSH7aM2utwtk6RaJ9qg13XjkA0lAIgQmY,287
164
166
  prefect/infrastructure/base.py,sha256=dPvG1jDGD5HSH7aM2utwtk6RaJ9qg13XjkA0lAIgQmY,287
@@ -226,7 +228,7 @@ prefect/server/api/templates.py,sha256=92bLFfcahZUp5PVNTZPjl8uJSDj4ZYRTVdmTzZXkE
226
228
  prefect/server/api/validation.py,sha256=HxSNyH8yb_tI-kOfjXESRjJp6WQK6hYWBJsaBxUvY34,14490
227
229
  prefect/server/api/variables.py,sha256=SJaKuqInfQIEdMlJOemptBDN43KLFhlf_u9QwupDu7A,6185
228
230
  prefect/server/api/work_queues.py,sha256=wBcbmkZDaQ5Ddi9wc8tNs6kYG_FdNtYwTCR0VkhPj2o,7588
229
- prefect/server/api/workers.py,sha256=-y8J9R47zeINvA07wd5P-5PCHjZmJVMm81CdfKMraww,24086
231
+ prefect/server/api/workers.py,sha256=8EVPnGv9wAC3YBWIoUx70OS7zfhRKjoXAAECFWKBMg0,24121
230
232
  prefect/server/api/collections_data/views/aggregate-worker-metadata.json,sha256=f6t13GRkIcLqGYB3OnXluAHEFoSqZM2SQP22vpcu0Mk,79793
231
233
  prefect/server/api/static/prefect-logo-mark-gradient.png,sha256=ylRjJkI_JHCw8VbQasNnXQHwZW-sH-IQiUGSD3aWP1E,73430
232
234
  prefect/server/api/ui/__init__.py,sha256=TCXO4ZUZCqCbm2QoNvWNTErkzWiX2nSACuO-0Tiomvg,93
@@ -276,7 +278,7 @@ prefect/telemetry/logging.py,sha256=ktIVTXbdZ46v6fUhoHNidFrpvpNJR-Pj-hQ4V9b40W4,
276
278
  prefect/telemetry/processors.py,sha256=jw6j6LviOVxw3IBJe7cSjsxFk0zzY43jUmy6C9pcfCE,2272
277
279
  prefect/telemetry/run_telemetry.py,sha256=_FbjiPqPemu4xvZuI2YBPwXeRJ2BcKRJ6qgO4UMzKKE,8571
278
280
  prefect/telemetry/services.py,sha256=DxgNNDTeWNtHBtioX8cjua4IrCbTiJJdYecx-gugg-w,2358
279
- prefect/types/__init__.py,sha256=vzFQspL0xeqQVW3rtXdBk1hKi_nlzvg8Qaf4jyQ95v0,4261
281
+ prefect/types/__init__.py,sha256=SwyWpbxSevAKU9lWpfauD61whUP7kksvfx-mtq3UE6E,4288
280
282
  prefect/types/_datetime.py,sha256=ZE-4YK5XJuyxnp5pqldZwtIjkxCpxDGnCSfZiTl7-TU,7566
281
283
  prefect/types/entrypoint.py,sha256=2FF03-wLPgtnqR_bKJDB2BsXXINPdu8ptY9ZYEZnXg8,328
282
284
  prefect/types/names.py,sha256=CMMZD928iiod2UvB0qrsfXEBC5jj_bO0ge1fFXcrtgM,3450
@@ -320,7 +322,7 @@ prefect/workers/cloud.py,sha256=dPvG1jDGD5HSH7aM2utwtk6RaJ9qg13XjkA0lAIgQmY,287
320
322
  prefect/workers/process.py,sha256=Yi5D0U5AQ51wHT86GdwtImXSefe0gJf3LGq4r4z9zwM,11090
321
323
  prefect/workers/server.py,sha256=2pmVeJZiVbEK02SO6BEZaBIvHMsn6G8LzjW8BXyiTtk,1952
322
324
  prefect/workers/utilities.py,sha256=VfPfAlGtTuDj0-Kb8WlMgAuOfgXCdrGAnKMapPSBrwc,2483
323
- prefect_client-3.4.1.dev5.dist-info/METADATA,sha256=PUcsY0sXpjiBspwz8PP-QToO8lAjsegJsS62iCWM1so,7471
324
- prefect_client-3.4.1.dev5.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
325
- prefect_client-3.4.1.dev5.dist-info/licenses/LICENSE,sha256=MCxsn8osAkzfxKC4CC_dLcUkU8DZLkyihZ8mGs3Ah3Q,11357
326
- prefect_client-3.4.1.dev5.dist-info/RECORD,,
325
+ prefect_client-3.4.2.dev2.dist-info/METADATA,sha256=zrwuovPi1Hx7Nm2RVWuquaAfkKU5F1YRA2wL2aNskTk,7499
326
+ prefect_client-3.4.2.dev2.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
327
+ prefect_client-3.4.2.dev2.dist-info/licenses/LICENSE,sha256=MCxsn8osAkzfxKC4CC_dLcUkU8DZLkyihZ8mGs3Ah3Q,11357
328
+ prefect_client-3.4.2.dev2.dist-info/RECORD,,