prefect-client 3.1.15__py3-none-any.whl → 3.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (89) hide show
  1. prefect/_experimental/sla/objects.py +29 -1
  2. prefect/_internal/compatibility/deprecated.py +4 -4
  3. prefect/_internal/compatibility/migration.py +1 -1
  4. prefect/_internal/concurrency/calls.py +1 -2
  5. prefect/_internal/concurrency/cancellation.py +2 -4
  6. prefect/_internal/concurrency/threads.py +3 -3
  7. prefect/_internal/schemas/bases.py +3 -11
  8. prefect/_internal/schemas/validators.py +36 -60
  9. prefect/_result_records.py +235 -0
  10. prefect/_version.py +3 -3
  11. prefect/agent.py +1 -0
  12. prefect/automations.py +4 -8
  13. prefect/blocks/notifications.py +8 -8
  14. prefect/cache_policies.py +2 -0
  15. prefect/client/base.py +7 -8
  16. prefect/client/collections.py +3 -6
  17. prefect/client/orchestration/__init__.py +15 -263
  18. prefect/client/orchestration/_deployments/client.py +14 -6
  19. prefect/client/orchestration/_flow_runs/client.py +10 -6
  20. prefect/client/orchestration/_work_pools/__init__.py +0 -0
  21. prefect/client/orchestration/_work_pools/client.py +598 -0
  22. prefect/client/orchestration/base.py +9 -2
  23. prefect/client/schemas/actions.py +66 -2
  24. prefect/client/schemas/objects.py +22 -50
  25. prefect/client/schemas/schedules.py +7 -18
  26. prefect/client/types/flexible_schedule_list.py +2 -1
  27. prefect/context.py +2 -3
  28. prefect/deployments/flow_runs.py +1 -1
  29. prefect/deployments/runner.py +119 -43
  30. prefect/deployments/schedules.py +7 -1
  31. prefect/engine.py +4 -9
  32. prefect/events/schemas/automations.py +4 -2
  33. prefect/events/utilities.py +15 -13
  34. prefect/exceptions.py +1 -1
  35. prefect/flow_engine.py +8 -8
  36. prefect/flow_runs.py +4 -8
  37. prefect/flows.py +30 -22
  38. prefect/infrastructure/__init__.py +1 -0
  39. prefect/infrastructure/base.py +1 -0
  40. prefect/infrastructure/provisioners/__init__.py +3 -6
  41. prefect/infrastructure/provisioners/coiled.py +3 -3
  42. prefect/infrastructure/provisioners/container_instance.py +1 -0
  43. prefect/infrastructure/provisioners/ecs.py +6 -6
  44. prefect/infrastructure/provisioners/modal.py +3 -3
  45. prefect/input/run_input.py +5 -7
  46. prefect/locking/filesystem.py +4 -3
  47. prefect/main.py +1 -1
  48. prefect/results.py +42 -249
  49. prefect/runner/runner.py +9 -4
  50. prefect/runner/server.py +5 -5
  51. prefect/runner/storage.py +12 -10
  52. prefect/runner/submit.py +2 -4
  53. prefect/schedules.py +231 -0
  54. prefect/serializers.py +5 -5
  55. prefect/settings/__init__.py +2 -1
  56. prefect/settings/base.py +3 -3
  57. prefect/settings/models/root.py +4 -0
  58. prefect/settings/models/server/services.py +50 -9
  59. prefect/settings/sources.py +4 -4
  60. prefect/states.py +42 -11
  61. prefect/task_engine.py +10 -10
  62. prefect/task_runners.py +11 -22
  63. prefect/task_worker.py +9 -9
  64. prefect/tasks.py +22 -41
  65. prefect/telemetry/bootstrap.py +4 -6
  66. prefect/telemetry/services.py +2 -4
  67. prefect/types/__init__.py +2 -1
  68. prefect/types/_datetime.py +28 -1
  69. prefect/utilities/_engine.py +0 -1
  70. prefect/utilities/asyncutils.py +4 -8
  71. prefect/utilities/collections.py +13 -22
  72. prefect/utilities/dispatch.py +2 -4
  73. prefect/utilities/dockerutils.py +6 -6
  74. prefect/utilities/importtools.py +1 -68
  75. prefect/utilities/names.py +1 -1
  76. prefect/utilities/processutils.py +3 -6
  77. prefect/utilities/pydantic.py +4 -6
  78. prefect/utilities/schema_tools/hydration.py +6 -5
  79. prefect/utilities/templating.py +16 -10
  80. prefect/utilities/visualization.py +2 -4
  81. prefect/workers/base.py +3 -3
  82. prefect/workers/block.py +1 -0
  83. prefect/workers/cloud.py +1 -0
  84. prefect/workers/process.py +1 -0
  85. {prefect_client-3.1.15.dist-info → prefect_client-3.2.0.dist-info}/METADATA +1 -1
  86. {prefect_client-3.1.15.dist-info → prefect_client-3.2.0.dist-info}/RECORD +89 -85
  87. {prefect_client-3.1.15.dist-info → prefect_client-3.2.0.dist-info}/LICENSE +0 -0
  88. {prefect_client-3.1.15.dist-info → prefect_client-3.2.0.dist-info}/WHEEL +0 -0
  89. {prefect_client-3.1.15.dist-info → prefect_client-3.2.0.dist-info}/top_level.txt +0 -0
prefect/results.py CHANGED
@@ -14,14 +14,12 @@ from typing import (
14
14
  Any,
15
15
  Callable,
16
16
  ClassVar,
17
- Generic,
18
17
  Optional,
19
18
  TypeVar,
20
19
  Union,
21
20
  )
22
21
  from uuid import UUID
23
22
 
24
- import pendulum
25
23
  from cachetools import LRUCache
26
24
  from pydantic import (
27
25
  BaseModel,
@@ -29,22 +27,16 @@ from pydantic import (
29
27
  Discriminator,
30
28
  Field,
31
29
  Tag,
32
- ValidationError,
33
- model_validator,
34
30
  )
35
31
  from typing_extensions import ParamSpec, Self
36
32
 
37
33
  import prefect
38
- from prefect._experimental.lineage import (
39
- emit_result_read_event,
40
- emit_result_write_event,
41
- )
42
34
  from prefect._internal.compatibility.async_dispatch import async_dispatch
35
+ from prefect._result_records import R, ResultRecord, ResultRecordMetadata
43
36
  from prefect.blocks.core import Block
44
37
  from prefect.exceptions import (
45
38
  ConfigurationError,
46
39
  MissingContextError,
47
- SerializationError,
48
40
  )
49
41
  from prefect.filesystems import (
50
42
  LocalFileSystem,
@@ -53,7 +45,7 @@ from prefect.filesystems import (
53
45
  )
54
46
  from prefect.locking.protocol import LockManager
55
47
  from prefect.logging import get_logger
56
- from prefect.serializers import PickleSerializer, Serializer
48
+ from prefect.serializers import Serializer
57
49
  from prefect.settings.context import get_current_settings
58
50
  from prefect.types import DateTime
59
51
  from prefect.utilities.annotations import NotSet
@@ -77,7 +69,6 @@ def DEFAULT_STORAGE_KEY_FN() -> str:
77
69
 
78
70
  logger: "logging.Logger" = get_logger("results")
79
71
  P = ParamSpec("P")
80
- R = TypeVar("R")
81
72
 
82
73
  _default_storages: dict[tuple[str, str], WritableFileSystem] = {}
83
74
 
@@ -373,6 +364,31 @@ class ResultStore(BaseModel):
373
364
  return None
374
365
  return getattr(self.result_storage, "_block_document_id", None)
375
366
 
367
+ @classmethod
368
+ async def _from_metadata(cls, metadata: ResultRecordMetadata) -> "ResultRecord[R]":
369
+ """
370
+ Create a result record from metadata.
371
+
372
+ Will use the result record metadata to fetch data via a result store.
373
+
374
+ Args:
375
+ metadata: The metadata to create the result record from.
376
+
377
+ Returns:
378
+ ResultRecord: The result record.
379
+ """
380
+ if metadata.storage_block_id is None:
381
+ storage_block = None
382
+ else:
383
+ storage_block = await aresolve_result_storage(metadata.storage_block_id)
384
+ store = cls(result_storage=storage_block, serializer=metadata.serializer)
385
+ if metadata.storage_key is None:
386
+ raise ValueError(
387
+ "storage_key is required to hydrate a result record from metadata"
388
+ )
389
+ result = await store.aread(metadata.storage_key)
390
+ return result
391
+
376
392
  @sync_compatible
377
393
  async def update_for_flow(self, flow: "Flow[..., Any]") -> Self:
378
394
  """
@@ -503,7 +519,7 @@ class ResultStore(BaseModel):
503
519
  if metadata.expiration:
504
520
  # if the result has an expiration,
505
521
  # check if it is still in the future
506
- exists = metadata.expiration > pendulum.now("utc")
522
+ exists = metadata.expiration > DateTime.now("utc")
507
523
  else:
508
524
  exists = True
509
525
  return exists
@@ -555,6 +571,8 @@ class ResultStore(BaseModel):
555
571
  A result record.
556
572
  """
557
573
 
574
+ from prefect._experimental.lineage import emit_result_read_event
575
+
558
576
  if self.lock_manager is not None and not self.is_lock_holder(key, holder):
559
577
  await self.await_for_lock(key)
560
578
 
@@ -576,19 +594,19 @@ class ResultStore(BaseModel):
576
594
  {},
577
595
  )
578
596
  metadata = ResultRecordMetadata.load_bytes(metadata_content)
579
- assert (
580
- metadata.storage_key is not None
581
- ), "Did not find storage key in metadata"
597
+ assert metadata.storage_key is not None, (
598
+ "Did not find storage key in metadata"
599
+ )
582
600
  result_content = await _call_explicitly_async_block_method(
583
601
  self.result_storage,
584
602
  "read_path",
585
603
  (metadata.storage_key,),
586
604
  {},
587
605
  )
588
- result_record: ResultRecord[
589
- Any
590
- ] = ResultRecord.deserialize_from_result_and_metadata(
591
- result=result_content, metadata=metadata_content
606
+ result_record: ResultRecord[Any] = (
607
+ ResultRecord.deserialize_from_result_and_metadata(
608
+ result=result_content, metadata=metadata_content
609
+ )
592
610
  )
593
611
  await emit_result_read_event(self, resolved_key_path)
594
612
  else:
@@ -740,9 +758,11 @@ class ResultStore(BaseModel):
740
758
  result_record: The result record to persist.
741
759
  holder: The holder of the lock if a lock was set on the record.
742
760
  """
743
- assert (
744
- result_record.metadata.storage_key is not None
745
- ), "Storage key is required on result record"
761
+ assert result_record.metadata.storage_key is not None, (
762
+ "Storage key is required on result record"
763
+ )
764
+
765
+ from prefect._experimental.lineage import emit_result_write_event
746
766
 
747
767
  key = result_record.metadata.storage_key
748
768
  if result_record.metadata.storage_block_id is None:
@@ -1008,230 +1028,3 @@ def get_result_store() -> ResultStore:
1008
1028
  else:
1009
1029
  result_store = run_context.result_store
1010
1030
  return result_store
1011
-
1012
-
1013
- class ResultRecordMetadata(BaseModel):
1014
- """
1015
- Metadata for a result record.
1016
- """
1017
-
1018
- storage_key: Optional[str] = Field(
1019
- default=None
1020
- ) # optional for backwards compatibility
1021
- expiration: Optional[DateTime] = Field(default=None)
1022
- serializer: Serializer = Field(default_factory=PickleSerializer)
1023
- prefect_version: str = Field(default=prefect.__version__)
1024
- storage_block_id: Optional[uuid.UUID] = Field(default=None)
1025
-
1026
- def dump_bytes(self) -> bytes:
1027
- """
1028
- Serialize the metadata to bytes.
1029
-
1030
- Returns:
1031
- bytes: the serialized metadata
1032
- """
1033
- return self.model_dump_json(serialize_as_any=True).encode()
1034
-
1035
- @classmethod
1036
- def load_bytes(cls, data: bytes) -> "ResultRecordMetadata":
1037
- """
1038
- Deserialize metadata from bytes.
1039
-
1040
- Args:
1041
- data: the serialized metadata
1042
-
1043
- Returns:
1044
- ResultRecordMetadata: the deserialized metadata
1045
- """
1046
- return cls.model_validate_json(data)
1047
-
1048
- def __eq__(self, other: Any) -> bool:
1049
- if not isinstance(other, ResultRecordMetadata):
1050
- return False
1051
- return (
1052
- self.storage_key == other.storage_key
1053
- and self.expiration == other.expiration
1054
- and self.serializer == other.serializer
1055
- and self.prefect_version == other.prefect_version
1056
- and self.storage_block_id == other.storage_block_id
1057
- )
1058
-
1059
-
1060
- class ResultRecord(BaseModel, Generic[R]):
1061
- """
1062
- A record of a result.
1063
- """
1064
-
1065
- metadata: ResultRecordMetadata
1066
- result: R
1067
-
1068
- @property
1069
- def expiration(self) -> DateTime | None:
1070
- return self.metadata.expiration
1071
-
1072
- @property
1073
- def serializer(self) -> Serializer:
1074
- return self.metadata.serializer
1075
-
1076
- def serialize_result(self) -> bytes:
1077
- try:
1078
- data = self.serializer.dumps(self.result)
1079
- except Exception as exc:
1080
- extra_info = (
1081
- 'You can try a different serializer (e.g. result_serializer="json") '
1082
- "or disabling persistence (persist_result=False) for this flow or task."
1083
- )
1084
- # check if this is a known issue with cloudpickle and pydantic
1085
- # and add extra information to help the user recover
1086
-
1087
- if (
1088
- isinstance(exc, TypeError)
1089
- and isinstance(self.result, BaseModel)
1090
- and str(exc).startswith("cannot pickle")
1091
- ):
1092
- try:
1093
- from IPython.core.getipython import get_ipython
1094
-
1095
- if get_ipython() is not None:
1096
- extra_info = inspect.cleandoc(
1097
- """
1098
- This is a known issue in Pydantic that prevents
1099
- locally-defined (non-imported) models from being
1100
- serialized by cloudpickle in IPython/Jupyter
1101
- environments. Please see
1102
- https://github.com/pydantic/pydantic/issues/8232 for
1103
- more information. To fix the issue, either: (1) move
1104
- your Pydantic class definition to an importable
1105
- location, (2) use the JSON serializer for your flow
1106
- or task (`result_serializer="json"`), or (3)
1107
- disable result persistence for your flow or task
1108
- (`persist_result=False`).
1109
- """
1110
- ).replace("\n", " ")
1111
- except ImportError:
1112
- pass
1113
- raise SerializationError(
1114
- f"Failed to serialize object of type {type(self.result).__name__!r} with "
1115
- f"serializer {self.serializer.type!r}. {extra_info}"
1116
- ) from exc
1117
-
1118
- return data
1119
-
1120
- @model_validator(mode="before")
1121
- @classmethod
1122
- def coerce_old_format(cls, value: dict[str, Any] | Any) -> dict[str, Any]:
1123
- if isinstance(value, dict):
1124
- if "data" in value:
1125
- value["result"] = value.pop("data")
1126
- if "metadata" not in value:
1127
- value["metadata"] = {}
1128
- if "expiration" in value:
1129
- value["metadata"]["expiration"] = value.pop("expiration")
1130
- if "serializer" in value:
1131
- value["metadata"]["serializer"] = value.pop("serializer")
1132
- if "prefect_version" in value:
1133
- value["metadata"]["prefect_version"] = value.pop("prefect_version")
1134
- return value
1135
-
1136
- @classmethod
1137
- async def _from_metadata(cls, metadata: ResultRecordMetadata) -> "ResultRecord[R]":
1138
- """
1139
- Create a result record from metadata.
1140
-
1141
- Will use the result record metadata to fetch data via a result store.
1142
-
1143
- Args:
1144
- metadata: The metadata to create the result record from.
1145
-
1146
- Returns:
1147
- ResultRecord: The result record.
1148
- """
1149
- if metadata.storage_block_id is None:
1150
- storage_block = None
1151
- else:
1152
- storage_block = await aresolve_result_storage(metadata.storage_block_id)
1153
- store = ResultStore(
1154
- result_storage=storage_block, serializer=metadata.serializer
1155
- )
1156
- if metadata.storage_key is None:
1157
- raise ValueError(
1158
- "storage_key is required to hydrate a result record from metadata"
1159
- )
1160
- result = await store.aread(metadata.storage_key)
1161
- return result
1162
-
1163
- def serialize_metadata(self) -> bytes:
1164
- return self.metadata.dump_bytes()
1165
-
1166
- def serialize(
1167
- self,
1168
- ) -> bytes:
1169
- """
1170
- Serialize the record to bytes.
1171
-
1172
- Returns:
1173
- bytes: the serialized record
1174
-
1175
- """
1176
- return (
1177
- self.model_copy(update={"result": self.serialize_result()})
1178
- .model_dump_json(serialize_as_any=True)
1179
- .encode()
1180
- )
1181
-
1182
- @classmethod
1183
- def deserialize(
1184
- cls, data: bytes, backup_serializer: Serializer | None = None
1185
- ) -> "ResultRecord[R]":
1186
- """
1187
- Deserialize a record from bytes.
1188
-
1189
- Args:
1190
- data: the serialized record
1191
- backup_serializer: The serializer to use to deserialize the result record. Only
1192
- necessary if the provided data does not specify a serializer.
1193
-
1194
- Returns:
1195
- ResultRecord: the deserialized record
1196
- """
1197
- try:
1198
- instance = cls.model_validate_json(data)
1199
- except ValidationError:
1200
- if backup_serializer is None:
1201
- raise
1202
- else:
1203
- result = backup_serializer.loads(data)
1204
- return cls(
1205
- metadata=ResultRecordMetadata(serializer=backup_serializer),
1206
- result=result,
1207
- )
1208
- if isinstance(instance.result, bytes):
1209
- instance.result = instance.serializer.loads(instance.result)
1210
- elif isinstance(instance.result, str):
1211
- instance.result = instance.serializer.loads(instance.result.encode())
1212
- return instance
1213
-
1214
- @classmethod
1215
- def deserialize_from_result_and_metadata(
1216
- cls, result: bytes, metadata: bytes
1217
- ) -> "ResultRecord[R]":
1218
- """
1219
- Deserialize a record from separate result and metadata bytes.
1220
-
1221
- Args:
1222
- result: the result
1223
- metadata: the serialized metadata
1224
-
1225
- Returns:
1226
- ResultRecord: the deserialized record
1227
- """
1228
- result_record_metadata = ResultRecordMetadata.load_bytes(metadata)
1229
- return cls(
1230
- metadata=result_record_metadata,
1231
- result=result_record_metadata.serializer.loads(result),
1232
- )
1233
-
1234
- def __eq__(self, other: Any | "ResultRecord[Any]") -> bool:
1235
- if not isinstance(other, ResultRecord):
1236
- return False
1237
- return self.metadata == other.metadata and self.result == other.result
prefect/runner/runner.py CHANGED
@@ -64,7 +64,6 @@ from uuid import UUID, uuid4
64
64
 
65
65
  import anyio
66
66
  import anyio.abc
67
- import pendulum
68
67
  from cachetools import LRUCache
69
68
  from typing_extensions import Self
70
69
 
@@ -95,6 +94,7 @@ from prefect.exceptions import Abort, ObjectNotFound
95
94
  from prefect.flows import Flow, FlowStateHook, load_flow_from_flow_run
96
95
  from prefect.logging.loggers import PrefectLogAdapter, flow_run_logger, get_logger
97
96
  from prefect.runner.storage import RunnerStorage
97
+ from prefect.schedules import Schedule
98
98
  from prefect.settings import (
99
99
  PREFECT_API_URL,
100
100
  PREFECT_RUNNER_SERVER_ENABLE,
@@ -105,6 +105,7 @@ from prefect.states import (
105
105
  Pending,
106
106
  exception_to_failed_state,
107
107
  )
108
+ from prefect.types._datetime import DateTime
108
109
  from prefect.types.entrypoint import EntrypointType
109
110
  from prefect.utilities.asyncutils import (
110
111
  asyncnullcontext,
@@ -278,6 +279,7 @@ class Runner:
278
279
  cron: Optional[Union[Iterable[str], str]] = None,
279
280
  rrule: Optional[Union[Iterable[str], str]] = None,
280
281
  paused: Optional[bool] = None,
282
+ schedule: Optional[Schedule] = None,
281
283
  schedules: Optional["FlexibleScheduleList"] = None,
282
284
  concurrency_limit: Optional[Union[int, ConcurrencyLimitConfig, None]] = None,
283
285
  parameters: Optional[dict[str, Any]] = None,
@@ -303,6 +305,8 @@ class Runner:
303
305
  cron: A cron schedule of when to execute runs of this flow.
304
306
  rrule: An rrule schedule of when to execute runs of this flow.
305
307
  paused: Whether or not to set the created deployment as paused.
308
+ schedule: A schedule object defining when to execute runs of this deployment.
309
+ Used to provide additional scheduling options like `timezone` or `parameters`.
306
310
  schedules: A list of schedule objects defining when to execute runs of this flow.
307
311
  Used to define multiple schedules or additional scheduling options like `timezone`.
308
312
  concurrency_limit: The maximum number of concurrent runs of this flow to allow.
@@ -317,7 +321,7 @@ class Runner:
317
321
  entrypoint, ensure that the module will be importable in the execution environment.
318
322
  """
319
323
  api = PREFECT_API_URL.value()
320
- if any([interval, cron, rrule]) and not api:
324
+ if any([interval, cron, rrule, schedule, schedules]) and not api:
321
325
  self._logger.warning(
322
326
  "Cannot schedule flows on an ephemeral server; run `prefect server"
323
327
  " start` to start the scheduler."
@@ -329,6 +333,7 @@ class Runner:
329
333
  interval=interval,
330
334
  cron=cron,
331
335
  rrule=rrule,
336
+ schedule=schedule,
332
337
  schedules=schedules,
333
338
  paused=paused,
334
339
  triggers=triggers,
@@ -797,7 +802,7 @@ class Runner:
797
802
  if self.stopping:
798
803
  return
799
804
  runs_response = await self._get_scheduled_flow_runs()
800
- self.last_polled: pendulum.DateTime = pendulum.now("UTC")
805
+ self.last_polled: DateTime = DateTime.now("UTC")
801
806
  return await self._submit_scheduled_flow_runs(flow_run_response=runs_response)
802
807
 
803
808
  async def _check_for_cancelled_flow_runs(
@@ -1063,7 +1068,7 @@ class Runner:
1063
1068
  """
1064
1069
  Retrieve scheduled flow runs for this runner.
1065
1070
  """
1066
- scheduled_before = pendulum.now("utc").add(seconds=int(self._prefetch_seconds))
1071
+ scheduled_before = DateTime.now("utc").add(seconds=int(self._prefetch_seconds))
1067
1072
  self._logger.debug(
1068
1073
  f"Querying for flow runs scheduled before {scheduled_before}"
1069
1074
  )
prefect/runner/server.py CHANGED
@@ -1,7 +1,6 @@
1
1
  import uuid
2
2
  from typing import TYPE_CHECKING, Any, Callable, Coroutine, Hashable, Optional, Tuple
3
3
 
4
- import pendulum
5
4
  import uvicorn
6
5
  from fastapi import APIRouter, FastAPI, HTTPException, status
7
6
  from fastapi.responses import JSONResponse
@@ -22,6 +21,7 @@ from prefect.settings import (
22
21
  PREFECT_RUNNER_SERVER_MISSED_POLLS_TOLERANCE,
23
22
  PREFECT_RUNNER_SERVER_PORT,
24
23
  )
24
+ from prefect.types._datetime import DateTime
25
25
  from prefect.utilities.asyncutils import run_coro_as_sync
26
26
  from prefect.utilities.importtools import load_script_as_module
27
27
 
@@ -54,7 +54,7 @@ def perform_health_check(
54
54
  )
55
55
 
56
56
  def _health_check():
57
- now = pendulum.now("utc")
57
+ now = DateTime.now("utc")
58
58
  poll_delay = (now - runner.last_polled).total_seconds()
59
59
 
60
60
  if poll_delay > delay_threshold:
@@ -139,9 +139,9 @@ async def get_deployment_router(
139
139
  )
140
140
 
141
141
  # Used for updating the route schemas later on
142
- schemas[
143
- f"{deployment.name}-{deployment_id}"
144
- ] = deployment.parameter_openapi_schema
142
+ schemas[f"{deployment.name}-{deployment_id}"] = (
143
+ deployment.parameter_openapi_schema
144
+ )
145
145
  schemas[deployment_id] = deployment.name
146
146
  return router, schemas
147
147
 
prefect/runner/storage.py CHANGED
@@ -1,3 +1,5 @@
1
+ from __future__ import annotations
2
+
1
3
  import shutil
2
4
  import subprocess
3
5
  from copy import deepcopy
@@ -61,7 +63,7 @@ class RunnerStorage(Protocol):
61
63
  """
62
64
  ...
63
65
 
64
- def to_pull_step(self) -> dict[str, Any]:
66
+ def to_pull_step(self) -> dict[str, Any] | list[dict[str, Any]]:
65
67
  """
66
68
  Returns a dictionary representation of the storage object that can be
67
69
  used as a deployment pull step.
@@ -347,13 +349,13 @@ class GitRepository:
347
349
  }
348
350
  }
349
351
  if self._include_submodules:
350
- pull_step["prefect.deployments.steps.git_clone"][
351
- "include_submodules"
352
- ] = self._include_submodules
352
+ pull_step["prefect.deployments.steps.git_clone"]["include_submodules"] = (
353
+ self._include_submodules
354
+ )
353
355
  if isinstance(self._credentials, Block):
354
- pull_step["prefect.deployments.steps.git_clone"][
355
- "credentials"
356
- ] = f"{{{{ {self._credentials.get_block_placeholder()} }}}}"
356
+ pull_step["prefect.deployments.steps.git_clone"]["credentials"] = (
357
+ f"{{{{ {self._credentials.get_block_placeholder()} }}}}"
358
+ )
357
359
  elif isinstance(self._credentials, dict):
358
360
  if isinstance(self._credentials.get("access_token"), Secret):
359
361
  pull_step["prefect.deployments.steps.git_clone"]["credentials"] = {
@@ -546,9 +548,9 @@ class RemoteStorage:
546
548
  }
547
549
  }
548
550
  if required_package:
549
- step["prefect.deployments.steps.pull_from_remote_storage"][
550
- "requires"
551
- ] = required_package
551
+ step["prefect.deployments.steps.pull_from_remote_storage"]["requires"] = (
552
+ required_package
553
+ )
552
554
  return step
553
555
 
554
556
  def __eq__(self, __value: Any) -> bool:
prefect/runner/submit.py CHANGED
@@ -101,8 +101,7 @@ def submit_to_runner(
101
101
  prefect_callable: Union[Flow[Any, Any], Task[Any, Any]],
102
102
  parameters: Dict[str, Any],
103
103
  retry_failed_submissions: bool = True,
104
- ) -> FlowRun:
105
- ...
104
+ ) -> FlowRun: ...
106
105
 
107
106
 
108
107
  @overload
@@ -110,8 +109,7 @@ def submit_to_runner(
110
109
  prefect_callable: Union[Flow[Any, Any], Task[Any, Any]],
111
110
  parameters: list[dict[str, Any]],
112
111
  retry_failed_submissions: bool = True,
113
- ) -> list[FlowRun]:
114
- ...
112
+ ) -> list[FlowRun]: ...
115
113
 
116
114
 
117
115
  @sync_compatible