prefect-client 3.1.13__py3-none-any.whl → 3.1.14__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
prefect/runner/runner.py CHANGED
@@ -36,6 +36,7 @@ from __future__ import annotations
36
36
 
37
37
  import asyncio
38
38
  import datetime
39
+ import inspect
39
40
  import logging
40
41
  import os
41
42
  import shutil
@@ -82,15 +83,10 @@ from prefect.client.schemas.filters import (
82
83
  )
83
84
  from prefect.client.schemas.objects import (
84
85
  ConcurrencyLimitConfig,
85
- FlowRun,
86
86
  State,
87
87
  StateType,
88
88
  )
89
89
  from prefect.client.schemas.objects import Flow as APIFlow
90
- from prefect.concurrency.asyncio import (
91
- AcquireConcurrencySlotTimeoutError,
92
- ConcurrencySlotAcquisitionError,
93
- )
94
90
  from prefect.events import DeploymentTriggerTypes, TriggerTypes
95
91
  from prefect.events.related import tags_as_related_resources
96
92
  from prefect.events.schemas.events import RelatedResource
@@ -117,7 +113,6 @@ from prefect.utilities.asyncutils import (
117
113
  )
118
114
  from prefect.utilities.engine import propose_state
119
115
  from prefect.utilities.processutils import (
120
- _register_signal,
121
116
  get_sys_executable,
122
117
  run_process,
123
118
  )
@@ -130,9 +125,11 @@ from prefect.utilities.slugify import slugify
130
125
  if TYPE_CHECKING:
131
126
  import concurrent.futures
132
127
 
128
+ from prefect.client.schemas.objects import FlowRun
133
129
  from prefect.client.schemas.responses import DeploymentResponse
134
130
  from prefect.client.types.flexible_schedule_list import FlexibleScheduleList
135
131
  from prefect.deployments.runner import RunnerDeployment
132
+
136
133
  __all__ = ["Runner"]
137
134
 
138
135
 
@@ -220,7 +217,7 @@ class Runner:
220
217
  self._client: PrefectClient = get_client()
221
218
  self._submitting_flow_run_ids: set[UUID] = set()
222
219
  self._cancelling_flow_run_ids: set[UUID] = set()
223
- self._scheduled_task_scopes: set[UUID] = set()
220
+ self._scheduled_task_scopes: set[anyio.abc.CancelScope] = set()
224
221
  self._deployment_ids: set[UUID] = set()
225
222
  self._flow_run_process_map: dict[UUID, ProcessMapEntry] = dict()
226
223
 
@@ -250,10 +247,16 @@ class Runner:
250
247
  Args:
251
248
  deployment: A deployment for the runner to register.
252
249
  """
253
- deployment_id = await deployment.apply()
250
+ apply_coro = deployment.apply()
251
+ if TYPE_CHECKING:
252
+ assert inspect.isawaitable(apply_coro)
253
+ deployment_id = await apply_coro
254
254
  storage = deployment.storage
255
255
  if storage is not None:
256
- storage = await self._add_storage(storage)
256
+ add_storage_coro = self._add_storage(storage)
257
+ if TYPE_CHECKING:
258
+ assert inspect.isawaitable(add_storage_coro)
259
+ storage = await add_storage_coro
257
260
  self._deployment_storage_map[deployment_id] = storage
258
261
  self._deployment_ids.add(deployment_id)
259
262
 
@@ -321,7 +324,7 @@ class Runner:
321
324
  )
322
325
  name = self.name if name is None else name
323
326
 
324
- deployment = await flow.to_deployment(
327
+ to_deployment_coro = flow.to_deployment(
325
328
  name=name,
326
329
  interval=interval,
327
330
  cron=cron,
@@ -337,7 +340,14 @@ class Runner:
337
340
  entrypoint_type=entrypoint_type,
338
341
  concurrency_limit=concurrency_limit,
339
342
  )
340
- return await self.add_deployment(deployment)
343
+ if TYPE_CHECKING:
344
+ assert inspect.isawaitable(to_deployment_coro)
345
+ deployment = await to_deployment_coro
346
+
347
+ add_deployment_coro = self.add_deployment(deployment)
348
+ if TYPE_CHECKING:
349
+ assert inspect.isawaitable(add_deployment_coro)
350
+ return await add_deployment_coro
341
351
 
342
352
  @sync_compatible
343
353
  async def _add_storage(self, storage: RunnerStorage) -> RunnerStorage:
@@ -364,7 +374,7 @@ class Runner:
364
374
  else:
365
375
  return next(s for s in self._storage_objs if s == storage)
366
376
 
367
- def handle_sigterm(self, **kwargs: Any) -> None:
377
+ def handle_sigterm(self, *args: Any, **kwargs: Any) -> None:
368
378
  """
369
379
  Gracefully shuts down the runner when a SIGTERM is received.
370
380
  """
@@ -415,7 +425,8 @@ class Runner:
415
425
  """
416
426
  from prefect.runner.server import start_webserver
417
427
 
418
- _register_signal(signal.SIGTERM, self.handle_sigterm)
428
+ if threading.current_thread() is threading.main_thread():
429
+ signal.signal(signal.SIGTERM, self.handle_sigterm)
419
430
 
420
431
  webserver = webserver if webserver is not None else self.webserver
421
432
 
@@ -492,7 +503,7 @@ class Runner:
492
503
  return asyncio.run_coroutine_threadsafe(func(*args, **kwargs), self._loop)
493
504
 
494
505
  async def cancel_all(self) -> None:
495
- runs_to_cancel = []
506
+ runs_to_cancel: list[FlowRun] = []
496
507
 
497
508
  # done to avoid dictionary size changing during iteration
498
509
  for info in self._flow_run_process_map.values():
@@ -555,7 +566,7 @@ class Runner:
555
566
  ),
556
567
  )
557
568
 
558
- self._flow_run_process_map[flow_run.id] = dict(
569
+ self._flow_run_process_map[flow_run.id] = ProcessMapEntry(
559
570
  pid=pid, flow_run=flow_run
560
571
  )
561
572
 
@@ -586,7 +597,7 @@ class Runner:
586
597
  )
587
598
  )
588
599
 
589
- def _get_flow_run_logger(self, flow_run: "FlowRun") -> PrefectLogAdapter:
600
+ def _get_flow_run_logger(self, flow_run: "FlowRun | FlowRun") -> PrefectLogAdapter:
590
601
  return flow_run_logger(flow_run=flow_run).getChild(
591
602
  "runner",
592
603
  extra={
@@ -597,9 +608,9 @@ class Runner:
597
608
  async def _run_process(
598
609
  self,
599
610
  flow_run: "FlowRun",
600
- task_status: Optional[anyio.abc.TaskStatus[Any]] = None,
611
+ task_status: Optional[anyio.abc.TaskStatus[int]] = None,
601
612
  entrypoint: Optional[str] = None,
602
- ):
613
+ ) -> int:
603
614
  """
604
615
  Runs the given flow run in a subprocess.
605
616
 
@@ -637,7 +648,11 @@ class Runner:
637
648
  )
638
649
  env.update(**os.environ) # is this really necessary??
639
650
 
640
- storage = self._deployment_storage_map.get(flow_run.deployment_id)
651
+ storage = (
652
+ self._deployment_storage_map.get(flow_run.deployment_id)
653
+ if flow_run.deployment_id
654
+ else None
655
+ )
641
656
  if storage and storage.pull_interval:
642
657
  # perform an adhoc pull of code before running the flow if an
643
658
  # adhoc pull hasn't been performed in the last pull_interval
@@ -661,12 +676,14 @@ class Runner:
661
676
  command=command,
662
677
  stream_output=True,
663
678
  task_status=task_status,
679
+ task_status_handler=None,
664
680
  env=env,
665
- **kwargs,
666
681
  cwd=storage.destination if storage else None,
682
+ **kwargs,
667
683
  )
668
684
 
669
- # Use the pid for display if no name was given
685
+ if process.returncode is None:
686
+ raise RuntimeError("Process exited with None return code")
670
687
 
671
688
  if process.returncode:
672
689
  help_message = None
@@ -862,9 +879,12 @@ class Runner:
862
879
  async def _cancel_run(self, flow_run: "FlowRun", state_msg: Optional[str] = None):
863
880
  run_logger = self._get_flow_run_logger(flow_run)
864
881
 
865
- pid = self._flow_run_process_map.get(flow_run.id, {}).get("pid")
882
+ process_map_entry = self._flow_run_process_map.get(flow_run.id)
883
+
884
+ pid = process_map_entry.get("pid") if process_map_entry else None
866
885
  if not pid:
867
- await self._run_on_cancellation_hooks(flow_run, flow_run.state)
886
+ if flow_run.state:
887
+ await self._run_on_cancellation_hooks(flow_run, flow_run.state)
868
888
  await self._mark_flow_run_as_cancelled(
869
889
  flow_run,
870
890
  state_updates={
@@ -880,7 +900,8 @@ class Runner:
880
900
  await self._kill_process(pid)
881
901
  except RuntimeError as exc:
882
902
  self._logger.warning(f"{exc} Marking flow run as cancelled.")
883
- await self._run_on_cancellation_hooks(flow_run, flow_run.state)
903
+ if flow_run.state:
904
+ await self._run_on_cancellation_hooks(flow_run, flow_run.state)
884
905
  await self._mark_flow_run_as_cancelled(flow_run)
885
906
  except Exception:
886
907
  run_logger.exception(
@@ -890,7 +911,8 @@ class Runner:
890
911
  # We will try again on generic exceptions
891
912
  self._cancelling_flow_run_ids.remove(flow_run.id)
892
913
  else:
893
- await self._run_on_cancellation_hooks(flow_run, flow_run.state)
914
+ if flow_run.state:
915
+ await self._run_on_cancellation_hooks(flow_run, flow_run.state)
894
916
  await self._mark_flow_run_as_cancelled(
895
917
  flow_run,
896
918
  state_updates={
@@ -1037,7 +1059,7 @@ class Runner:
1037
1059
 
1038
1060
  async def _get_scheduled_flow_runs(
1039
1061
  self,
1040
- ) -> List["FlowRun"]:
1062
+ ) -> list["FlowRun"]:
1041
1063
  """
1042
1064
  Retrieve scheduled flow runs for this runner.
1043
1065
  """
@@ -1062,9 +1084,11 @@ class Runner:
1062
1084
  Returns:
1063
1085
  - bool: True if the limit has not been reached, False otherwise.
1064
1086
  """
1087
+ if not self._limiter:
1088
+ return False
1065
1089
  return self._limiter.available_tokens > 0
1066
1090
 
1067
- def _acquire_limit_slot(self, flow_run_id: str) -> bool:
1091
+ def _acquire_limit_slot(self, flow_run_id: UUID) -> bool:
1068
1092
  """
1069
1093
  Enforces flow run limit set on runner.
1070
1094
 
@@ -1089,6 +1113,8 @@ class Runner:
1089
1113
  else:
1090
1114
  raise
1091
1115
  except anyio.WouldBlock:
1116
+ if TYPE_CHECKING:
1117
+ assert self._limiter is not None
1092
1118
  self._logger.info(
1093
1119
  f"Flow run limit reached; {self._limiter.borrowed_tokens} flow runs"
1094
1120
  " in progress. You can control this limit by passing a `limit` value"
@@ -1096,7 +1122,7 @@ class Runner:
1096
1122
  )
1097
1123
  return False
1098
1124
 
1099
- def _release_limit_slot(self, flow_run_id: str) -> None:
1125
+ def _release_limit_slot(self, flow_run_id: UUID) -> None:
1100
1126
  """
1101
1127
  Frees up a slot taken by the given flow run id.
1102
1128
  """
@@ -1106,15 +1132,17 @@ class Runner:
1106
1132
 
1107
1133
  async def _submit_scheduled_flow_runs(
1108
1134
  self,
1109
- flow_run_response: List["FlowRun"],
1110
- entrypoints: Optional[List[str]] = None,
1111
- ) -> List["FlowRun"]:
1135
+ flow_run_response: list["FlowRun"],
1136
+ entrypoints: list[str] | None = None,
1137
+ ) -> list["FlowRun"]:
1112
1138
  """
1113
1139
  Takes a list of FlowRuns and submits the referenced flow runs
1114
1140
  for execution by the runner.
1115
1141
  """
1116
- submittable_flow_runs = flow_run_response
1117
- submittable_flow_runs.sort(key=lambda run: run.next_scheduled_start_time)
1142
+ submittable_flow_runs = sorted(
1143
+ flow_run_response,
1144
+ key=lambda run: run.next_scheduled_start_time or datetime.datetime.max,
1145
+ )
1118
1146
 
1119
1147
  for i, flow_run in enumerate(submittable_flow_runs):
1120
1148
  if flow_run.id in self._submitting_flow_run_ids:
@@ -1163,7 +1191,7 @@ class Runner:
1163
1191
  )
1164
1192
 
1165
1193
  if readiness_result and not isinstance(readiness_result, Exception):
1166
- self._flow_run_process_map[flow_run.id] = dict(
1194
+ self._flow_run_process_map[flow_run.id] = ProcessMapEntry(
1167
1195
  pid=readiness_result, flow_run=flow_run
1168
1196
  )
1169
1197
  # Heartbeats are opt-in and only emitted if a heartbeat frequency is set
@@ -1180,7 +1208,7 @@ class Runner:
1180
1208
  async def _submit_run_and_capture_errors(
1181
1209
  self,
1182
1210
  flow_run: "FlowRun",
1183
- task_status: Optional[anyio.abc.TaskStatus] = None,
1211
+ task_status: anyio.abc.TaskStatus[int | Exception],
1184
1212
  entrypoint: Optional[str] = None,
1185
1213
  ) -> Union[Optional[int], Exception]:
1186
1214
  run_logger = self._get_flow_run_logger(flow_run)
@@ -1191,24 +1219,8 @@ class Runner:
1191
1219
  task_status=task_status,
1192
1220
  entrypoint=entrypoint,
1193
1221
  )
1194
- except (
1195
- AcquireConcurrencySlotTimeoutError,
1196
- ConcurrencySlotAcquisitionError,
1197
- ) as exc:
1198
- self._logger.info(
1199
- (
1200
- "Deployment %s reached its concurrency limit when attempting to execute flow run %s. Will attempt to execute later."
1201
- ),
1202
- flow_run.deployment_id,
1203
- flow_run.name,
1204
- )
1205
- await self._propose_scheduled_state(flow_run)
1206
-
1207
- if not task_status._future.done():
1208
- task_status.started(exc)
1209
- return exc
1210
1222
  except Exception as exc:
1211
- if not task_status._future.done():
1223
+ if task_status:
1212
1224
  # This flow run was being submitted and did not start successfully
1213
1225
  run_logger.exception(
1214
1226
  f"Failed to start process for flow run '{flow_run.id}'."
@@ -1236,7 +1248,7 @@ class Runner:
1236
1248
 
1237
1249
  api_flow_run = await self._client.read_flow_run(flow_run_id=flow_run.id)
1238
1250
  terminal_state = api_flow_run.state
1239
- if terminal_state.is_crashed():
1251
+ if terminal_state and terminal_state.is_crashed():
1240
1252
  await self._run_on_crashed_hooks(flow_run=flow_run, state=terminal_state)
1241
1253
 
1242
1254
  return status_code
@@ -1311,12 +1323,19 @@ class Runner:
1311
1323
  )
1312
1324
 
1313
1325
  async def _mark_flow_run_as_cancelled(
1314
- self, flow_run: "FlowRun", state_updates: Optional[dict] = None
1326
+ self, flow_run: "FlowRun", state_updates: Optional[dict[str, Any]] = None
1315
1327
  ) -> None:
1316
1328
  state_updates = state_updates or {}
1317
1329
  state_updates.setdefault("name", "Cancelled")
1318
1330
  state_updates.setdefault("type", StateType.CANCELLED)
1319
- state = flow_run.state.model_copy(update=state_updates)
1331
+ state = (
1332
+ flow_run.state.model_copy(update=state_updates) if flow_run.state else None
1333
+ )
1334
+ if not state:
1335
+ self._logger.warning(
1336
+ f"Could not find state for flow run {flow_run.id} and cancellation cannot be guaranteed."
1337
+ )
1338
+ return
1320
1339
 
1321
1340
  await self._client.set_flow_run_state(flow_run.id, state, force=True)
1322
1341
 
@@ -1327,7 +1346,9 @@ class Runner:
1327
1346
  60 * 10, self._cancelling_flow_run_ids.remove, flow_run.id
1328
1347
  )
1329
1348
 
1330
- async def _schedule_task(self, __in_seconds: int, fn, *args, **kwargs):
1349
+ async def _schedule_task(
1350
+ self, __in_seconds: int, fn: Callable[..., Any], *args: Any, **kwargs: Any
1351
+ ) -> None:
1331
1352
  """
1332
1353
  Schedule a background task to start after some time.
1333
1354
 
@@ -1336,7 +1357,7 @@ class Runner:
1336
1357
  The function may be async or sync. Async functions will be awaited.
1337
1358
  """
1338
1359
 
1339
- async def wrapper(task_status):
1360
+ async def wrapper(task_status: anyio.abc.TaskStatus[None]) -> None:
1340
1361
  # If we are shutting down, do not sleep; otherwise sleep until the scheduled
1341
1362
  # time or shutdown
1342
1363
  if self.started:
@@ -1398,7 +1419,7 @@ class Runner:
1398
1419
  self._client = get_client()
1399
1420
  self._tmp_dir.mkdir(parents=True)
1400
1421
 
1401
- self._limiter = anyio.CapacityLimiter(self.limit)
1422
+ self._limiter = anyio.CapacityLimiter(self.limit) if self.limit else None
1402
1423
 
1403
1424
  if not hasattr(self, "_loop") or not self._loop:
1404
1425
  self._loop = asyncio.get_event_loop()
prefect/tasks.py CHANGED
@@ -1642,7 +1642,43 @@ def task(
1642
1642
  refresh_cache: Optional[bool] = None,
1643
1643
  on_completion: Optional[list[StateHookCallable]] = None,
1644
1644
  on_failure: Optional[list[StateHookCallable]] = None,
1645
- retry_condition_fn: Optional[Callable[[Task[P, Any], TaskRun, State], bool]] = None,
1645
+ retry_condition_fn: Literal[None] = None,
1646
+ viz_return_value: Any = None,
1647
+ ) -> Callable[[Callable[P, R]], Task[P, R]]:
1648
+ ...
1649
+
1650
+
1651
+ # see https://github.com/PrefectHQ/prefect/issues/16380
1652
+ @overload
1653
+ def task(
1654
+ __fn: Literal[None] = None,
1655
+ *,
1656
+ name: Optional[str] = None,
1657
+ description: Optional[str] = None,
1658
+ tags: Optional[Iterable[str]] = None,
1659
+ version: Optional[str] = None,
1660
+ cache_policy: Union[CachePolicy, type[NotSet]] = NotSet,
1661
+ cache_key_fn: Optional[
1662
+ Callable[["TaskRunContext", dict[str, Any]], Optional[str]]
1663
+ ] = None,
1664
+ cache_expiration: Optional[datetime.timedelta] = None,
1665
+ task_run_name: Optional[TaskRunNameValueOrCallable] = None,
1666
+ retries: int = 0,
1667
+ retry_delay_seconds: Union[
1668
+ float, int, list[float], Callable[[int], list[float]], None
1669
+ ] = None,
1670
+ retry_jitter_factor: Optional[float] = None,
1671
+ persist_result: Optional[bool] = None,
1672
+ result_storage: Optional[ResultStorage] = None,
1673
+ result_storage_key: Optional[str] = None,
1674
+ result_serializer: Optional[ResultSerializer] = None,
1675
+ cache_result_in_memory: bool = True,
1676
+ timeout_seconds: Union[int, float, None] = None,
1677
+ log_prints: Optional[bool] = None,
1678
+ refresh_cache: Optional[bool] = None,
1679
+ on_completion: Optional[list[StateHookCallable]] = None,
1680
+ on_failure: Optional[list[StateHookCallable]] = None,
1681
+ retry_condition_fn: Optional[Callable[[Task[P, R], TaskRun, State], bool]] = None,
1646
1682
  viz_return_value: Any = None,
1647
1683
  ) -> Callable[[Callable[P, R]], Task[P, R]]:
1648
1684
  ...
prefect/types/__init__.py CHANGED
@@ -2,11 +2,12 @@ from __future__ import annotations
2
2
 
3
3
  from functools import partial
4
4
  from typing import Annotated, Any, Dict, List, Optional, Set, TypeVar, Union
5
- from typing_extensions import Literal, TypeAlias
5
+ from typing_extensions import Literal
6
6
  import orjson
7
7
  import pydantic
8
- from pydantic_extra_types.pendulum_dt import DateTime as PydanticDateTime
9
- from pydantic_extra_types.pendulum_dt import Date as PydanticDate
8
+
9
+
10
+ from ._datetime import DateTime, Date
10
11
  from pydantic import (
11
12
  BeforeValidator,
12
13
  Field,
@@ -37,8 +38,6 @@ TimeZone = Annotated[
37
38
  ),
38
39
  ]
39
40
 
40
- DateTime: TypeAlias = PydanticDateTime
41
- Date: TypeAlias = PydanticDate
42
41
 
43
42
  BANNED_CHARACTERS = ["/", "%", "&", ">", "<"]
44
43
 
@@ -171,6 +170,8 @@ KeyValueLabelsField = Annotated[
171
170
 
172
171
  __all__ = [
173
172
  "ClientRetryExtraCodes",
173
+ "Date",
174
+ "DateTime",
174
175
  "LogLevel",
175
176
  "KeyValueLabelsField",
176
177
  "NonNegativeInteger",
@@ -0,0 +1,19 @@
1
+ from __future__ import annotations
2
+
3
+ import pendulum
4
+ from pendulum.date import Date as PendulumDate
5
+ from pendulum.datetime import DateTime as PendulumDateTime
6
+ from pendulum.duration import Duration as PendulumDuration
7
+ from pendulum.time import Time as PendulumTime
8
+ from pydantic_extra_types.pendulum_dt import Date as PydanticDate
9
+ from pydantic_extra_types.pendulum_dt import DateTime as PydanticDateTime
10
+ from typing_extensions import TypeAlias
11
+
12
+ DateTime: TypeAlias = PydanticDateTime
13
+ Date: TypeAlias = PydanticDate
14
+
15
+
16
+ def parse_datetime(
17
+ value: str,
18
+ ) -> PendulumDateTime | PendulumDate | PendulumTime | PendulumDuration:
19
+ return pendulum.parse(value)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: prefect-client
3
- Version: 3.1.13
3
+ Version: 3.1.14
4
4
  Summary: Workflow orchestration and management.
5
5
  Home-page: https://www.prefect.io
6
6
  Author: Prefect Technologies, Inc.
@@ -1,17 +1,17 @@
1
1
  prefect/.prefectignore,sha256=awSprvKT0vI8a64mEOLrMxhxqcO-b0ERQeYpA2rNKVQ,390
2
2
  prefect/__init__.py,sha256=FmdMSNpGH8Mrkn5X0mNZup8_SHdeB_aqEmS5taeOHAQ,3530
3
- prefect/_version.py,sha256=ov29Y6HAz9nwhwYo5YLUNdEaDeQ3H3lPQcpcUykdI6U,497
3
+ prefect/_version.py,sha256=Gwft4WAjK7fOlvP_Sv7Yw7Ltrlz2SCGW9t_NCvYnHWg,497
4
4
  prefect/agent.py,sha256=qyyUMdiv5ndUIk-O8uwamESJGXXDQ_BmhKiLlm31ue0,286
5
5
  prefect/artifacts.py,sha256=3AhzUQg2TS7nQuJuTxuqCpTP8d9GVMCD4BJzLntDQRg,12990
6
6
  prefect/automations.py,sha256=JDrHoM6s6wDnauhHG6_oTkRwR9DusPXlCcNzAHvkmAA,12599
7
- prefect/cache_policies.py,sha256=Dqi-JLQ2VWON6M-od9pIRjVgrm0JcqBfN74a3xrCFhs,9948
8
- prefect/context.py,sha256=A2kY-jwnUeFmFVKv2RyccXyeG3eKjIajLmPURZ9hk1s,23192
7
+ prefect/cache_policies.py,sha256=pItSKH2KRFHK9YPw36hTBaFEATKRbl6sN5wAEdF-Uns,11808
8
+ prefect/context.py,sha256=7SC-trRyfunmPMuM7lod4HhFd5Kc4MHwDWQwenLsh-8,23677
9
9
  prefect/engine.py,sha256=vIhgOZfP1Lssa_Vz8uEDx4Y1xoPgZ1_4r581c_Hbwac,2766
10
10
  prefect/exceptions.py,sha256=sbphPKQ4yOBUa9w0MsSFoDj_uC8Tlv9WHTjzO3cQKq8,11593
11
11
  prefect/filesystems.py,sha256=v5YqGB4uXf9Ew2VuB9VCSkawvYMMVvEtZf7w1VmAmr8,18036
12
- prefect/flow_engine.py,sha256=gUdczu7LRfk1bYqmgnIyTPKLCWS_188sCg23GVeDgCI,54225
12
+ prefect/flow_engine.py,sha256=4nGBb7Hdk-WYnDpwhKtQ_OyWQL5xl-A4E3rmhn-Okgw,55609
13
13
  prefect/flow_runs.py,sha256=-5udBBYdgdCBCjAMYvELbA1vmrjZ6oREXl-BZdZr6hc,16129
14
- prefect/flows.py,sha256=bxlKT4H3QCL0kWWsl-nFJM3MEBqWUIm3qFhJBG749HQ,96472
14
+ prefect/flows.py,sha256=VpW_DCJf5tzpxriHXcBWpySJ4T06hv61zQPFZ7Yk0fE,96489
15
15
  prefect/futures.py,sha256=NYWGeC8uRGe1WWB1MxkUshdvAdYibhc32HdFjffdiW0,17217
16
16
  prefect/main.py,sha256=6WYIkPTTAoHQ1BPOnbnbvhKpnYiUcStzqYFI6Gqqpvw,2351
17
17
  prefect/plugins.py,sha256=FPRLR2mWVBMuOnlzeiTD9krlHONZH2rtYLD753JQDNQ,2516
@@ -23,11 +23,11 @@ prefect/task_engine.py,sha256=gJghxCJzg5kvHulKuMcDQCcvKHPoVYrlNNCd7lGechc,60628
23
23
  prefect/task_runners.py,sha256=2rwrnlebVA9LS-tgjZT_sei8sUTXV4CXpVr8R97egW8,15916
24
24
  prefect/task_runs.py,sha256=7LIzfo3fondCyEUpU05sYFN5IfpZigBDXrhG5yc-8t0,9039
25
25
  prefect/task_worker.py,sha256=4NhcR4ZzCBqfpiLDVuww3p1Pu0YVwjeyqI0BFFegNWA,17791
26
- prefect/tasks.py,sha256=0qqE34GlCK0VQ66QDTdLHmlaMRzNpSIMKNxCAUKxhhE,72633
26
+ prefect/tasks.py,sha256=ZdACMzVDh8HGXi-DpU7IHoKTEO0f2tLCvYxqdg30Wjw,74001
27
27
  prefect/transactions.py,sha256=kOXwghBW3jM71gg49MkjJPTnImEzXWeTCUE_zpq2MlI,16068
28
28
  prefect/variables.py,sha256=dCK3vX7TbkqXZhnNT_v7rcGh3ISRqoR6pJVLpoll3Js,8342
29
29
  prefect/_experimental/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
30
- prefect/_experimental/lineage.py,sha256=2zI6fFUc3wPa3n50agugqV0Hlc5zgmZFTijU0yY6BKE,6633
30
+ prefect/_experimental/lineage.py,sha256=uwanTB9ZFYfgHpeqW51wDHogD0Q3UIiayFRpx9Pv4vY,9299
31
31
  prefect/_experimental/sla/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
32
32
  prefect/_experimental/sla/client.py,sha256=XTkYHFZiBy_O7RgUyGEdl9MxaHP-6fEAKBk3ksNQobU,3611
33
33
  prefect/_experimental/sla/objects.py,sha256=Zdvc_hqdCKlZd4eJTA7T-Czdezk1PPw7kZ6RlNtPeAg,1834
@@ -83,7 +83,7 @@ prefect/client/orchestration/routes.py,sha256=JFG1OWUBfrxPKW8Q7XWItlhOrSZ67IOySS
83
83
  prefect/client/orchestration/_artifacts/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
84
84
  prefect/client/orchestration/_artifacts/client.py,sha256=0GEM4rJWeedKR2xVgWQcX6DpLyn0zKFJF9nfRCQ4tpM,8855
85
85
  prefect/client/orchestration/_automations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
86
- prefect/client/orchestration/_automations/client.py,sha256=JoD8VJ8zTpxuTMUPPE8SOHro5bdmjjqzBK63UV58_98,10869
86
+ prefect/client/orchestration/_automations/client.py,sha256=z4WC7Dov6c75SSmX_awXi4bFYcSxwPwimEbWGEabdkk,10931
87
87
  prefect/client/orchestration/_blocks_documents/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
88
88
  prefect/client/orchestration/_blocks_documents/client.py,sha256=HTGUIsOkHbe-Vh4hod6oN4VnKNSaOyVuhvToDDGOZ3M,11474
89
89
  prefect/client/orchestration/_blocks_schemas/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -93,7 +93,7 @@ prefect/client/orchestration/_blocks_types/client.py,sha256=alA4xD-yp3mycAbzMyRu
93
93
  prefect/client/orchestration/_concurrency_limits/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
94
94
  prefect/client/orchestration/_concurrency_limits/client.py,sha256=ss73wg8W_dYNTyh8ST6L5DEnLc--PT8yUa4TmPWUuCI,23948
95
95
  prefect/client/orchestration/_deployments/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
96
- prefect/client/orchestration/_deployments/client.py,sha256=mplDncofcj55b9BKvL2n8Ds4QdguuSi88ZtXD3XxMqg,40551
96
+ prefect/client/orchestration/_deployments/client.py,sha256=gwfmQqRXaFTKpOYmsDj9zCLpM4tHM0DWsFH2MBBSzQY,40525
97
97
  prefect/client/orchestration/_flow_runs/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
98
98
  prefect/client/orchestration/_flow_runs/client.py,sha256=zxCnz_lYz1BgCLBhlAEzKw0lAeMkyZ7aE1LxF2134xs,30714
99
99
  prefect/client/orchestration/_flows/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -140,7 +140,7 @@ prefect/docker/docker_image.py,sha256=0PZjUCTe_20Zsrg-LtADV4HmPnAYzq7QdXRl22WK40
140
140
  prefect/events/__init__.py,sha256=GtKl2bE--pJduTxelH2xy7SadlLJmmis8WR1EYixhuA,2094
141
141
  prefect/events/actions.py,sha256=A7jS8bo4zWGnrt3QfSoQs0uYC1xfKXio3IfU0XtTb5s,9129
142
142
  prefect/events/clients.py,sha256=sGQ8ZHZVFyiI_O9p2gVgWsphpAapXQJVi4WpxufeQE4,24981
143
- prefect/events/filters.py,sha256=G1bkshD8mtVijIiHZ1lhyB_spGNXn1glZ8FmXCQIeuE,8056
143
+ prefect/events/filters.py,sha256=h9L6pukS9tD7Y8rGC3dt04KJINu0oJoti-flGLQTQQQ,8086
144
144
  prefect/events/related.py,sha256=A-1SVYwHtsxaDurRepnTsYbTWRBJSbtL5O_KffLaTwU,6534
145
145
  prefect/events/utilities.py,sha256=gaJEC5mMK9XsCt8wbWzuFhZTRyYYmfnMoR-S4s79zg4,2648
146
146
  prefect/events/worker.py,sha256=HjbibR0_J1W1nnNMZDFTXAbB0cl_cFGaFI87DvNGcnI,4557
@@ -167,15 +167,15 @@ prefect/locking/filesystem.py,sha256=zhNwdKroi2kLR6Cut6CMT-rWmFwtTtzuGKSwGH_Iw0s
167
167
  prefect/locking/memory.py,sha256=mFUgV750ywEL7aVQuxFjg9gxbjVU4esBQn7bGQYzeMY,7548
168
168
  prefect/locking/protocol.py,sha256=RsfvlaHTTEJ0YvYWSqFGoZuT2w4FPPxyQlHqjoyNGuE,4240
169
169
  prefect/logging/__init__.py,sha256=zx9f5_dWrR4DbcTOFBpNGOPoCZ1QcPFudr7zxb2XRpA,148
170
- prefect/logging/configuration.py,sha256=P3WF9JtN1TNildz9ylq6beiXvoku8v4YBrB-Gqs43OY,3395
170
+ prefect/logging/configuration.py,sha256=QIvmktuAZPteVnh8nd9jUb7vwGGkcUbBLyiti6XmbYM,3242
171
171
  prefect/logging/filters.py,sha256=NnRYubh9dMmWcCAjuW32cIVQ37rLxdn8ci26wTtQMyU,1136
172
172
  prefect/logging/formatters.py,sha256=BkPykVyOFKdnhDj_1vhhOoWiHiiBeRnWXPcaRIWK3aI,4125
173
173
  prefect/logging/handlers.py,sha256=XFqpZbAX6M5imW_87uZgf2NXMFB4ZfMvq5A-WQRRwNM,12250
174
174
  prefect/logging/highlighters.py,sha256=BCf_LNhFInIfGPqwuu8YVrGa4wVxNc4YXo2pYgftpg4,1811
175
- prefect/logging/loggers.py,sha256=XGwDE4raOFkdpZkiVSPZ9fcJ5F5-m-bFw-HxrHxYhYI,12792
175
+ prefect/logging/loggers.py,sha256=xkmHXsiuoPZZXcrrEgMA-ZQu0E-gW3tNVd4BIxWjnpM,12704
176
176
  prefect/logging/logging.yml,sha256=tT7gTyC4NmngFSqFkCdHaw7R0GPNPDDsTCGZQByiJAQ,3169
177
177
  prefect/runner/__init__.py,sha256=7U-vAOXFkzMfRz1q8Uv6Otsvc0OrPYLLP44srwkJ_8s,89
178
- prefect/runner/runner.py,sha256=u8jJRq2wcrXBIebK2O-BgeYc87NQ5-HCx8s623iFLyE,54429
178
+ prefect/runner/runner.py,sha256=boC5MYVVFQ0U876mzcUfWpNFMIkVELvruUX06MTxwUg,55315
179
179
  prefect/runner/server.py,sha256=yWU7jvu0XJYlNvtVsPJr5epCtC-w2umC2pu-fxT9qys,11209
180
180
  prefect/runner/storage.py,sha256=wlV8lwxMWJ6vgaQ7So48uUniPlN5hyNxYQ4hfDzHZ_Y,27462
181
181
  prefect/runner/submit.py,sha256=HQiNzP75-I3rXGaUKjD5TJjQkWEtbzcxXog0JfmamKw,8361
@@ -228,7 +228,8 @@ prefect/telemetry/logging.py,sha256=yn5D4D2GGRrAv0y8wlHPN7PZDmQucGjQT_YauK9M9Yo,
228
228
  prefect/telemetry/processors.py,sha256=jw6j6LviOVxw3IBJe7cSjsxFk0zzY43jUmy6C9pcfCE,2272
229
229
  prefect/telemetry/run_telemetry.py,sha256=NcMVqOc_wQVGPlGpE8cfrz-lyCbkG1EOKpcbjsqMnGA,8264
230
230
  prefect/telemetry/services.py,sha256=9X42FNth2ZH_RJ1W-Zw5yE6sjksjEdXHd9Ndstw1kGc,2374
231
- prefect/types/__init__.py,sha256=y8ScNX-f4nLl4Fi_IyLG8zEtQfnoOA5S2knm8LD5oQA,4759
231
+ prefect/types/__init__.py,sha256=UUsOIdxW61obAk1O2ID9TwlbA5n6EsEEjqkX_WikE68,4606
232
+ prefect/types/_datetime.py,sha256=5K2-1Wr7bYXMENnUFxlx5wiLE4NjzcxauJGLSsxwbig,658
232
233
  prefect/types/entrypoint.py,sha256=2FF03-wLPgtnqR_bKJDB2BsXXINPdu8ptY9ZYEZnXg8,328
233
234
  prefect/utilities/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
234
235
  prefect/utilities/_deprecated.py,sha256=b3pqRSoFANdVJAc8TJkygBcP-VjZtLJUxVIWC7kwspI,1303
@@ -269,8 +270,8 @@ prefect/workers/cloud.py,sha256=qyyUMdiv5ndUIk-O8uwamESJGXXDQ_BmhKiLlm31ue0,286
269
270
  prefect/workers/process.py,sha256=wfVD9rxcGP1PHrppHBxhm0NwypjT9c78aFRUdSh6624,20175
270
271
  prefect/workers/server.py,sha256=SEuyScZ5nGm2OotdtbHjpvqJlTRVWCh29ND7FeL_fZA,1974
271
272
  prefect/workers/utilities.py,sha256=VfPfAlGtTuDj0-Kb8WlMgAuOfgXCdrGAnKMapPSBrwc,2483
272
- prefect_client-3.1.13.dist-info/LICENSE,sha256=MCxsn8osAkzfxKC4CC_dLcUkU8DZLkyihZ8mGs3Ah3Q,11357
273
- prefect_client-3.1.13.dist-info/METADATA,sha256=3kPf642XISA4h-0GLx5uP8tcLiDxjzK0U8XdiLGGhYI,7287
274
- prefect_client-3.1.13.dist-info/WHEEL,sha256=tZoeGjtWxWRfdplE7E3d45VPlLNQnvbKiYnx7gwAy8A,92
275
- prefect_client-3.1.13.dist-info/top_level.txt,sha256=MJZYJgFdbRc2woQCeB4vM6T33tr01TmkEhRcns6H_H4,8
276
- prefect_client-3.1.13.dist-info/RECORD,,
273
+ prefect_client-3.1.14.dist-info/LICENSE,sha256=MCxsn8osAkzfxKC4CC_dLcUkU8DZLkyihZ8mGs3Ah3Q,11357
274
+ prefect_client-3.1.14.dist-info/METADATA,sha256=5csh-np86AehCBtyfmZornXXzhbrFxYy0Ogzc2NnKA0,7287
275
+ prefect_client-3.1.14.dist-info/WHEEL,sha256=tZoeGjtWxWRfdplE7E3d45VPlLNQnvbKiYnx7gwAy8A,92
276
+ prefect_client-3.1.14.dist-info/top_level.txt,sha256=MJZYJgFdbRc2woQCeB4vM6T33tr01TmkEhRcns6H_H4,8
277
+ prefect_client-3.1.14.dist-info/RECORD,,