prefect-client 3.1.14__py3-none-any.whl → 3.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (98) hide show
  1. prefect/__main__.py +4 -0
  2. prefect/_experimental/lineage.py +40 -22
  3. prefect/_experimental/sla/objects.py +29 -1
  4. prefect/_internal/compatibility/deprecated.py +4 -4
  5. prefect/_internal/compatibility/migration.py +1 -1
  6. prefect/_internal/concurrency/calls.py +1 -2
  7. prefect/_internal/concurrency/cancellation.py +2 -4
  8. prefect/_internal/concurrency/services.py +1 -1
  9. prefect/_internal/concurrency/threads.py +3 -3
  10. prefect/_internal/schemas/bases.py +3 -11
  11. prefect/_internal/schemas/validators.py +36 -60
  12. prefect/_result_records.py +235 -0
  13. prefect/_version.py +3 -3
  14. prefect/agent.py +1 -0
  15. prefect/artifacts.py +408 -105
  16. prefect/automations.py +4 -8
  17. prefect/blocks/core.py +1 -1
  18. prefect/blocks/notifications.py +13 -8
  19. prefect/cache_policies.py +2 -0
  20. prefect/client/base.py +7 -8
  21. prefect/client/collections.py +3 -6
  22. prefect/client/orchestration/__init__.py +15 -263
  23. prefect/client/orchestration/_deployments/client.py +14 -6
  24. prefect/client/orchestration/_flow_runs/client.py +10 -6
  25. prefect/client/orchestration/_work_pools/__init__.py +0 -0
  26. prefect/client/orchestration/_work_pools/client.py +598 -0
  27. prefect/client/orchestration/base.py +9 -2
  28. prefect/client/schemas/actions.py +77 -3
  29. prefect/client/schemas/objects.py +22 -50
  30. prefect/client/schemas/schedules.py +11 -22
  31. prefect/client/types/flexible_schedule_list.py +2 -1
  32. prefect/context.py +2 -3
  33. prefect/deployments/base.py +13 -16
  34. prefect/deployments/flow_runs.py +1 -1
  35. prefect/deployments/runner.py +236 -47
  36. prefect/deployments/schedules.py +7 -1
  37. prefect/engine.py +4 -9
  38. prefect/events/clients.py +39 -0
  39. prefect/events/schemas/automations.py +4 -2
  40. prefect/events/utilities.py +15 -13
  41. prefect/exceptions.py +1 -1
  42. prefect/flow_engine.py +119 -0
  43. prefect/flow_runs.py +4 -8
  44. prefect/flows.py +282 -31
  45. prefect/infrastructure/__init__.py +1 -0
  46. prefect/infrastructure/base.py +1 -0
  47. prefect/infrastructure/provisioners/__init__.py +3 -6
  48. prefect/infrastructure/provisioners/coiled.py +3 -3
  49. prefect/infrastructure/provisioners/container_instance.py +1 -0
  50. prefect/infrastructure/provisioners/ecs.py +6 -6
  51. prefect/infrastructure/provisioners/modal.py +3 -3
  52. prefect/input/run_input.py +5 -7
  53. prefect/locking/filesystem.py +4 -3
  54. prefect/main.py +1 -1
  55. prefect/results.py +42 -249
  56. prefect/runner/runner.py +9 -4
  57. prefect/runner/server.py +5 -5
  58. prefect/runner/storage.py +12 -10
  59. prefect/runner/submit.py +2 -4
  60. prefect/runtime/task_run.py +37 -9
  61. prefect/schedules.py +231 -0
  62. prefect/serializers.py +5 -5
  63. prefect/settings/__init__.py +2 -1
  64. prefect/settings/base.py +3 -3
  65. prefect/settings/models/root.py +4 -0
  66. prefect/settings/models/server/services.py +50 -9
  67. prefect/settings/sources.py +4 -4
  68. prefect/states.py +42 -11
  69. prefect/task_engine.py +10 -10
  70. prefect/task_runners.py +11 -22
  71. prefect/task_worker.py +9 -9
  72. prefect/tasks.py +28 -45
  73. prefect/telemetry/bootstrap.py +4 -6
  74. prefect/telemetry/services.py +2 -4
  75. prefect/types/__init__.py +2 -1
  76. prefect/types/_datetime.py +28 -1
  77. prefect/utilities/_engine.py +0 -1
  78. prefect/utilities/asyncutils.py +4 -8
  79. prefect/utilities/collections.py +13 -22
  80. prefect/utilities/dispatch.py +2 -4
  81. prefect/utilities/dockerutils.py +6 -6
  82. prefect/utilities/importtools.py +1 -68
  83. prefect/utilities/names.py +1 -1
  84. prefect/utilities/processutils.py +3 -6
  85. prefect/utilities/pydantic.py +4 -6
  86. prefect/utilities/render_swagger.py +1 -1
  87. prefect/utilities/schema_tools/hydration.py +6 -5
  88. prefect/utilities/templating.py +21 -8
  89. prefect/utilities/visualization.py +2 -4
  90. prefect/workers/base.py +3 -3
  91. prefect/workers/block.py +1 -0
  92. prefect/workers/cloud.py +1 -0
  93. prefect/workers/process.py +1 -0
  94. {prefect_client-3.1.14.dist-info → prefect_client-3.2.0.dist-info}/METADATA +1 -1
  95. {prefect_client-3.1.14.dist-info → prefect_client-3.2.0.dist-info}/RECORD +98 -93
  96. {prefect_client-3.1.14.dist-info → prefect_client-3.2.0.dist-info}/LICENSE +0 -0
  97. {prefect_client-3.1.14.dist-info → prefect_client-3.2.0.dist-info}/WHEEL +0 -0
  98. {prefect_client-3.1.14.dist-info → prefect_client-3.2.0.dist-info}/top_level.txt +0 -0
prefect/flows.py CHANGED
@@ -49,7 +49,6 @@ from typing_extensions import Literal, ParamSpec
49
49
  from prefect._experimental.sla.objects import SlaTypes
50
50
  from prefect._internal.concurrency.api import create_call, from_async
51
51
  from prefect.blocks.core import Block
52
- from prefect.client.schemas.actions import DeploymentScheduleCreate
53
52
  from prefect.client.schemas.filters import WorkerFilter, WorkerFilterStatus
54
53
  from prefect.client.schemas.objects import ConcurrencyLimitConfig, FlowRun
55
54
  from prefect.client.utilities import client_injector
@@ -69,6 +68,7 @@ from prefect.futures import PrefectFuture
69
68
  from prefect.logging import get_logger
70
69
  from prefect.logging.loggers import flow_run_logger
71
70
  from prefect.results import ResultSerializer, ResultStorage
71
+ from prefect.schedules import Schedule
72
72
  from prefect.settings import (
73
73
  PREFECT_DEFAULT_WORK_POOL_NAME,
74
74
  PREFECT_FLOW_DEFAULT_RETRIES,
@@ -82,6 +82,7 @@ from prefect.types import BANNED_CHARACTERS, WITHOUT_BANNED_CHARACTERS
82
82
  from prefect.types.entrypoint import EntrypointType
83
83
  from prefect.utilities.annotations import NotSet
84
84
  from prefect.utilities.asyncutils import (
85
+ run_coro_as_sync,
85
86
  run_sync_in_worker_thread,
86
87
  sync_compatible,
87
88
  )
@@ -97,7 +98,7 @@ from prefect.utilities.filesystem import relative_path_to_current_platform
97
98
  from prefect.utilities.hashing import file_hash
98
99
  from prefect.utilities.importtools import import_object, safe_load_namespace
99
100
 
100
- from ._internal.compatibility.async_dispatch import is_in_async_context
101
+ from ._internal.compatibility.async_dispatch import async_dispatch, is_in_async_context
101
102
  from ._internal.pydantic.v2_schema import is_v2_type
102
103
  from ._internal.pydantic.v2_validated_func import V2ValidatedFunction
103
104
  from ._internal.pydantic.v2_validated_func import (
@@ -119,8 +120,7 @@ class FlowStateHook(Protocol, Generic[P, R]):
119
120
 
120
121
  def __call__(
121
122
  self, flow: Flow[P, R], flow_run: FlowRun, state: State
122
- ) -> Awaitable[None] | None:
123
- ...
123
+ ) -> Awaitable[None] | None: ...
124
124
 
125
125
 
126
126
  if TYPE_CHECKING:
@@ -654,8 +654,7 @@ class Flow(Generic[P, R]):
654
654
  serialized_parameters[key] = f"<{type(value).__name__}>"
655
655
  return serialized_parameters
656
656
 
657
- @sync_compatible
658
- async def to_deployment(
657
+ async def ato_deployment(
659
658
  self,
660
659
  name: str,
661
660
  interval: Optional[
@@ -669,6 +668,7 @@ class Flow(Generic[P, R]):
669
668
  cron: Optional[Union[Iterable[str], str]] = None,
670
669
  rrule: Optional[Union[Iterable[str], str]] = None,
671
670
  paused: Optional[bool] = None,
671
+ schedule: Optional[Schedule] = None,
672
672
  schedules: Optional["FlexibleScheduleList"] = None,
673
673
  concurrency_limit: Optional[Union[int, ConcurrencyLimitConfig, None]] = None,
674
674
  parameters: Optional[dict[str, Any]] = None,
@@ -684,7 +684,7 @@ class Flow(Generic[P, R]):
684
684
  _sla: Optional[Union[SlaTypes, list[SlaTypes]]] = None, # experimental
685
685
  ) -> "RunnerDeployment":
686
686
  """
687
- Creates a runner deployment object for this flow.
687
+ Asynchronously creates a runner deployment object for this flow.
688
688
 
689
689
  Args:
690
690
  name: The name to give the created deployment.
@@ -693,6 +693,8 @@ class Flow(Generic[P, R]):
693
693
  cron: A cron schedule of when to execute runs of this deployment.
694
694
  rrule: An rrule schedule of when to execute runs of this deployment.
695
695
  paused: Whether or not to set this deployment as paused.
696
+ schedule: A schedule object defining when to execute runs of this deployment.
697
+ Used to provide additional scheduling options like `timezone` or `parameters`.
696
698
  schedules: A list of schedule objects defining when to execute runs of this deployment.
697
699
  Used to define multiple schedules or additional scheduling options such as `timezone`.
698
700
  concurrency_limit: The maximum number of runs of this deployment that can run at the same time.
@@ -740,7 +742,7 @@ class Flow(Generic[P, R]):
740
742
  _raise_on_name_with_banned_characters(name)
741
743
 
742
744
  if self._storage and self._entrypoint:
743
- return await RunnerDeployment.from_storage(
745
+ return await RunnerDeployment.afrom_storage(
744
746
  storage=self._storage,
745
747
  entrypoint=self._entrypoint,
746
748
  name=name,
@@ -749,6 +751,7 @@ class Flow(Generic[P, R]):
749
751
  cron=cron,
750
752
  rrule=rrule,
751
753
  paused=paused,
754
+ schedule=schedule,
752
755
  schedules=schedules,
753
756
  concurrency_limit=concurrency_limit,
754
757
  tags=tags,
@@ -761,7 +764,7 @@ class Flow(Generic[P, R]):
761
764
  work_queue_name=work_queue_name,
762
765
  job_variables=job_variables,
763
766
  _sla=_sla,
764
- ) # type: ignore # TODO: remove sync_compatible
767
+ )
765
768
  else:
766
769
  return RunnerDeployment.from_flow(
767
770
  flow=self,
@@ -770,6 +773,147 @@ class Flow(Generic[P, R]):
770
773
  cron=cron,
771
774
  rrule=rrule,
772
775
  paused=paused,
776
+ schedule=schedule,
777
+ schedules=schedules,
778
+ concurrency_limit=concurrency_limit,
779
+ tags=tags,
780
+ triggers=triggers,
781
+ parameters=parameters or {},
782
+ description=description,
783
+ version=version,
784
+ enforce_parameter_schema=enforce_parameter_schema,
785
+ work_pool_name=work_pool_name,
786
+ work_queue_name=work_queue_name,
787
+ job_variables=job_variables,
788
+ entrypoint_type=entrypoint_type,
789
+ _sla=_sla,
790
+ )
791
+
792
+ @async_dispatch(ato_deployment)
793
+ def to_deployment(
794
+ self,
795
+ name: str,
796
+ interval: Optional[
797
+ Union[
798
+ Iterable[Union[int, float, datetime.timedelta]],
799
+ int,
800
+ float,
801
+ datetime.timedelta,
802
+ ]
803
+ ] = None,
804
+ cron: Optional[Union[Iterable[str], str]] = None,
805
+ rrule: Optional[Union[Iterable[str], str]] = None,
806
+ paused: Optional[bool] = None,
807
+ schedule: Optional[Schedule] = None,
808
+ schedules: Optional["FlexibleScheduleList"] = None,
809
+ concurrency_limit: Optional[Union[int, ConcurrencyLimitConfig, None]] = None,
810
+ parameters: Optional[dict[str, Any]] = None,
811
+ triggers: Optional[list[Union[DeploymentTriggerTypes, TriggerTypes]]] = None,
812
+ description: Optional[str] = None,
813
+ tags: Optional[list[str]] = None,
814
+ version: Optional[str] = None,
815
+ enforce_parameter_schema: bool = True,
816
+ work_pool_name: Optional[str] = None,
817
+ work_queue_name: Optional[str] = None,
818
+ job_variables: Optional[dict[str, Any]] = None,
819
+ entrypoint_type: EntrypointType = EntrypointType.FILE_PATH,
820
+ _sla: Optional[Union[SlaTypes, list[SlaTypes]]] = None, # experimental
821
+ ) -> "RunnerDeployment":
822
+ """
823
+ Creates a runner deployment object for this flow.
824
+
825
+ Args:
826
+ name: The name to give the created deployment.
827
+ interval: An interval on which to execute the new deployment. Accepts either a number
828
+ or a timedelta object. If a number is given, it will be interpreted as seconds.
829
+ cron: A cron schedule of when to execute runs of this deployment.
830
+ rrule: An rrule schedule of when to execute runs of this deployment.
831
+ paused: Whether or not to set this deployment as paused.
832
+ schedule: A schedule object defining when to execute runs of this deployment.
833
+ Used to provide additional scheduling options like `timezone` or `parameters`.
834
+ schedules: A list of schedule objects defining when to execute runs of this deployment.
835
+ Used to define multiple schedules or additional scheduling options such as `timezone`.
836
+ concurrency_limit: The maximum number of runs of this deployment that can run at the same time.
837
+ parameters: A dictionary of default parameter values to pass to runs of this deployment.
838
+ triggers: A list of triggers that will kick off runs of this deployment.
839
+ description: A description for the created deployment. Defaults to the flow's
840
+ description if not provided.
841
+ tags: A list of tags to associate with the created deployment for organizational
842
+ purposes.
843
+ version: A version for the created deployment. Defaults to the flow's version.
844
+ enforce_parameter_schema: Whether or not the Prefect API should enforce the
845
+ parameter schema for the created deployment.
846
+ work_pool_name: The name of the work pool to use for this deployment.
847
+ work_queue_name: The name of the work queue to use for this deployment's scheduled runs.
848
+ If not provided the default work queue for the work pool will be used.
849
+ job_variables: Settings used to override the values specified default base job template
850
+ of the chosen work pool. Refer to the base job template of the chosen work pool for
851
+ entrypoint_type: Type of entrypoint to use for the deployment. When using a module path
852
+ entrypoint, ensure that the module will be importable in the execution environment.
853
+ _sla: (Experimental) SLA configuration for the deployment. May be removed or modified at any time. Currently only supported on Prefect Cloud.
854
+
855
+ Examples:
856
+ Prepare two deployments and serve them:
857
+
858
+ ```python
859
+ from prefect import flow, serve
860
+
861
+ @flow
862
+ def my_flow(name):
863
+ print(f"hello {name}")
864
+
865
+ @flow
866
+ def my_other_flow(name):
867
+ print(f"goodbye {name}")
868
+
869
+ if __name__ == "__main__":
870
+ hello_deploy = my_flow.to_deployment("hello", tags=["dev"])
871
+ bye_deploy = my_other_flow.to_deployment("goodbye", tags=["dev"])
872
+ serve(hello_deploy, bye_deploy)
873
+ ```
874
+ """
875
+ from prefect.deployments.runner import RunnerDeployment
876
+
877
+ if not name.endswith(".py"):
878
+ _raise_on_name_with_banned_characters(name)
879
+
880
+ if self._storage and self._entrypoint:
881
+ return cast(
882
+ RunnerDeployment,
883
+ RunnerDeployment.from_storage(
884
+ storage=self._storage,
885
+ entrypoint=self._entrypoint,
886
+ name=name,
887
+ flow_name=self.name,
888
+ interval=interval,
889
+ cron=cron,
890
+ rrule=rrule,
891
+ paused=paused,
892
+ schedule=schedule,
893
+ schedules=schedules,
894
+ concurrency_limit=concurrency_limit,
895
+ tags=tags,
896
+ triggers=triggers,
897
+ parameters=parameters or {},
898
+ description=description,
899
+ version=version,
900
+ enforce_parameter_schema=enforce_parameter_schema,
901
+ work_pool_name=work_pool_name,
902
+ work_queue_name=work_queue_name,
903
+ job_variables=job_variables,
904
+ _sla=_sla,
905
+ _sync=True, # pyright: ignore[reportCallIssue] _sync is valid because .from_storage is decorated with async_dispatch
906
+ ),
907
+ )
908
+ else:
909
+ return RunnerDeployment.from_flow(
910
+ flow=self,
911
+ name=name,
912
+ interval=interval,
913
+ cron=cron,
914
+ rrule=rrule,
915
+ paused=paused,
916
+ schedule=schedule,
773
917
  schedules=schedules,
774
918
  concurrency_limit=concurrency_limit,
775
919
  tags=tags,
@@ -819,6 +963,7 @@ class Flow(Generic[P, R]):
819
963
  cron: Optional[Union[Iterable[str], str]] = None,
820
964
  rrule: Optional[Union[Iterable[str], str]] = None,
821
965
  paused: Optional[bool] = None,
966
+ schedule: Optional[Schedule] = None,
822
967
  schedules: Optional["FlexibleScheduleList"] = None,
823
968
  global_limit: Optional[Union[int, ConcurrencyLimitConfig, None]] = None,
824
969
  triggers: Optional[list[Union[DeploymentTriggerTypes, TriggerTypes]]] = None,
@@ -848,6 +993,8 @@ class Flow(Generic[P, R]):
848
993
  Also accepts an iterable of rrule schedule strings to create multiple schedules.
849
994
  triggers: A list of triggers that will kick off runs of this deployment.
850
995
  paused: Whether or not to set this deployment as paused.
996
+ schedule: A schedule object defining when to execute runs of this deployment.
997
+ Used to provide additional scheduling options like `timezone` or `parameters`.
851
998
  schedules: A list of schedule objects defining when to execute runs of this deployment.
852
999
  Used to define multiple schedules or additional scheduling options like `timezone`.
853
1000
  global_limit: The maximum number of concurrent runs allowed across all served flow instances associated with the same deployment.
@@ -913,6 +1060,7 @@ class Flow(Generic[P, R]):
913
1060
  cron=cron,
914
1061
  rrule=rrule,
915
1062
  paused=paused,
1063
+ schedule=schedule,
916
1064
  schedules=schedules,
917
1065
  concurrency_limit=global_limit,
918
1066
  parameters=parameters,
@@ -957,14 +1105,13 @@ class Flow(Generic[P, R]):
957
1105
  loop.stop()
958
1106
 
959
1107
  @classmethod
960
- @sync_compatible
961
- async def from_source(
1108
+ async def afrom_source(
962
1109
  cls,
963
1110
  source: Union[str, "RunnerStorage", ReadableDeploymentStorage],
964
1111
  entrypoint: str,
965
1112
  ) -> "Flow[..., Any]":
966
1113
  """
967
- Loads a flow from a remote source.
1114
+ Loads a flow from a remote source asynchronously.
968
1115
 
969
1116
  Args:
970
1117
  source: Either a URL to a git repository or a storage object.
@@ -1070,6 +1217,115 @@ class Flow(Generic[P, R]):
1070
1217
 
1071
1218
  return flow
1072
1219
 
1220
+ @classmethod
1221
+ @async_dispatch(afrom_source)
1222
+ def from_source(
1223
+ cls,
1224
+ source: Union[str, "RunnerStorage", ReadableDeploymentStorage],
1225
+ entrypoint: str,
1226
+ ) -> "Flow[..., Any]":
1227
+ """
1228
+ Loads a flow from a remote source.
1229
+
1230
+ Args:
1231
+ source: Either a URL to a git repository or a storage object.
1232
+ entrypoint: The path to a file containing a flow and the name of the flow function in
1233
+ the format `./path/to/file.py:flow_func_name`.
1234
+
1235
+ Returns:
1236
+ A new `Flow` instance.
1237
+
1238
+ Examples:
1239
+ Load a flow from a public git repository:
1240
+
1241
+
1242
+ ```python
1243
+ from prefect import flow
1244
+ from prefect.runner.storage import GitRepository
1245
+ from prefect.blocks.system import Secret
1246
+
1247
+ my_flow = flow.from_source(
1248
+ source="https://github.com/org/repo.git",
1249
+ entrypoint="flows.py:my_flow",
1250
+ )
1251
+
1252
+ my_flow()
1253
+ ```
1254
+
1255
+ Load a flow from a private git repository using an access token stored in a `Secret` block:
1256
+
1257
+ ```python
1258
+ from prefect import flow
1259
+ from prefect.runner.storage import GitRepository
1260
+ from prefect.blocks.system import Secret
1261
+
1262
+ my_flow = flow.from_source(
1263
+ source=GitRepository(
1264
+ url="https://github.com/org/repo.git",
1265
+ credentials={"access_token": Secret.load("github-access-token")}
1266
+ ),
1267
+ entrypoint="flows.py:my_flow",
1268
+ )
1269
+
1270
+ my_flow()
1271
+ ```
1272
+
1273
+ Load a flow from a local directory:
1274
+
1275
+ ``` python
1276
+ # from_local_source.py
1277
+
1278
+ from pathlib import Path
1279
+ from prefect import flow
1280
+
1281
+ @flow(log_prints=True)
1282
+ def my_flow(name: str = "world"):
1283
+ print(f"Hello {name}! I'm a flow from a Python script!")
1284
+
1285
+ if __name__ == "__main__":
1286
+ my_flow.from_source(
1287
+ source=str(Path(__file__).parent),
1288
+ entrypoint="from_local_source.py:my_flow",
1289
+ ).deploy(
1290
+ name="my-deployment",
1291
+ parameters=dict(name="Marvin"),
1292
+ work_pool_name="local",
1293
+ )
1294
+ ```
1295
+ """
1296
+
1297
+ from prefect.runner.storage import (
1298
+ BlockStorageAdapter,
1299
+ LocalStorage,
1300
+ RunnerStorage,
1301
+ create_storage_from_source,
1302
+ )
1303
+
1304
+ if isinstance(source, (Path, str)):
1305
+ if isinstance(source, Path):
1306
+ source = str(source)
1307
+ storage = create_storage_from_source(source)
1308
+ elif isinstance(source, RunnerStorage):
1309
+ storage = source
1310
+ elif hasattr(source, "get_directory"):
1311
+ storage = BlockStorageAdapter(source)
1312
+ else:
1313
+ raise TypeError(
1314
+ f"Unsupported source type {type(source).__name__!r}. Please provide a"
1315
+ " URL to remote storage or a storage object."
1316
+ )
1317
+ with tempfile.TemporaryDirectory() as tmpdir:
1318
+ if not isinstance(storage, LocalStorage):
1319
+ storage.set_base_path(Path(tmpdir))
1320
+ run_coro_as_sync(storage.pull_code())
1321
+
1322
+ full_entrypoint = str(storage.destination / entrypoint)
1323
+ flow = load_flow_from_entrypoint(full_entrypoint)
1324
+ flow._storage = storage
1325
+ flow._entrypoint = entrypoint
1326
+
1327
+ return flow
1328
+
1073
1329
  @sync_compatible
1074
1330
  async def deploy(
1075
1331
  self,
@@ -1084,7 +1340,8 @@ class Flow(Generic[P, R]):
1084
1340
  cron: Optional[str] = None,
1085
1341
  rrule: Optional[str] = None,
1086
1342
  paused: Optional[bool] = None,
1087
- schedules: Optional[list[DeploymentScheduleCreate]] = None,
1343
+ schedule: Optional[Schedule] = None,
1344
+ schedules: Optional[list[Schedule]] = None,
1088
1345
  concurrency_limit: Optional[Union[int, ConcurrencyLimitConfig, None]] = None,
1089
1346
  triggers: Optional[list[Union[DeploymentTriggerTypes, TriggerTypes]]] = None,
1090
1347
  parameters: Optional[dict[str, Any]] = None,
@@ -1131,6 +1388,8 @@ class Flow(Generic[P, R]):
1131
1388
  Also accepts an iterable of rrule schedule strings to create multiple schedules.
1132
1389
  triggers: A list of triggers that will kick off runs of this deployment.
1133
1390
  paused: Whether or not to set this deployment as paused.
1391
+ schedule: A schedule object defining when to execute runs of this deployment.
1392
+ Used to provide additional scheduling options like `timezone` or `parameters`.
1134
1393
  schedules: A list of schedule objects defining when to execute runs of this deployment.
1135
1394
  Used to define multiple schedules or additional scheduling options like `timezone`.
1136
1395
  concurrency_limit: The maximum number of runs that can be executed concurrently.
@@ -1215,6 +1474,7 @@ class Flow(Generic[P, R]):
1215
1474
  interval=interval,
1216
1475
  cron=cron,
1217
1476
  rrule=rrule,
1477
+ schedule=schedule,
1218
1478
  schedules=schedules,
1219
1479
  concurrency_limit=concurrency_limit,
1220
1480
  paused=paused,
@@ -1292,16 +1552,14 @@ class Flow(Generic[P, R]):
1292
1552
  @overload
1293
1553
  def __call__(
1294
1554
  self: "Flow[P, Coroutine[Any, Any, T]]", *args: P.args, **kwargs: P.kwargs
1295
- ) -> Coroutine[Any, Any, T]:
1296
- ...
1555
+ ) -> Coroutine[Any, Any, T]: ...
1297
1556
 
1298
1557
  @overload
1299
1558
  def __call__(
1300
1559
  self: "Flow[P, T]",
1301
1560
  *args: P.args,
1302
1561
  **kwargs: P.kwargs,
1303
- ) -> T:
1304
- ...
1562
+ ) -> T: ...
1305
1563
 
1306
1564
  @overload
1307
1565
  def __call__(
@@ -1309,8 +1567,7 @@ class Flow(Generic[P, R]):
1309
1567
  *args: P.args,
1310
1568
  return_state: Literal[True],
1311
1569
  **kwargs: P.kwargs,
1312
- ) -> Awaitable[State[T]]:
1313
- ...
1570
+ ) -> Awaitable[State[T]]: ...
1314
1571
 
1315
1572
  @overload
1316
1573
  def __call__(
@@ -1318,8 +1575,7 @@ class Flow(Generic[P, R]):
1318
1575
  *args: P.args,
1319
1576
  return_state: Literal[True],
1320
1577
  **kwargs: P.kwargs,
1321
- ) -> State[T]:
1322
- ...
1578
+ ) -> State[T]: ...
1323
1579
 
1324
1580
  def __call__(
1325
1581
  self,
@@ -1458,8 +1714,7 @@ class Flow(Generic[P, R]):
1458
1714
 
1459
1715
  class FlowDecorator:
1460
1716
  @overload
1461
- def __call__(self, __fn: Callable[P, R]) -> Flow[P, R]:
1462
- ...
1717
+ def __call__(self, __fn: Callable[P, R]) -> Flow[P, R]: ...
1463
1718
 
1464
1719
  @overload
1465
1720
  def __call__(
@@ -1485,8 +1740,7 @@ class FlowDecorator:
1485
1740
  on_cancellation: Optional[list[FlowStateHook[..., Any]]] = None,
1486
1741
  on_crashed: Optional[list[FlowStateHook[..., Any]]] = None,
1487
1742
  on_running: Optional[list[FlowStateHook[..., Any]]] = None,
1488
- ) -> Callable[[Callable[P, R]], Flow[P, R]]:
1489
- ...
1743
+ ) -> Callable[[Callable[P, R]], Flow[P, R]]: ...
1490
1744
 
1491
1745
  @overload
1492
1746
  def __call__(
@@ -1512,8 +1766,7 @@ class FlowDecorator:
1512
1766
  on_cancellation: Optional[list[FlowStateHook[..., Any]]] = None,
1513
1767
  on_crashed: Optional[list[FlowStateHook[..., Any]]] = None,
1514
1768
  on_running: Optional[list[FlowStateHook[..., Any]]] = None,
1515
- ) -> Callable[[Callable[P, R]], Flow[P, R]]:
1516
- ...
1769
+ ) -> Callable[[Callable[P, R]], Flow[P, R]]: ...
1517
1770
 
1518
1771
  def __call__(
1519
1772
  self,
@@ -1707,8 +1960,7 @@ class FlowDecorator:
1707
1960
  def from_source(
1708
1961
  source: Union[str, "RunnerStorage", ReadableDeploymentStorage],
1709
1962
  entrypoint: str,
1710
- ) -> Union["Flow[..., Any]", Coroutine[Any, Any, "Flow[..., Any]"]]:
1711
- ...
1963
+ ) -> Union["Flow[..., Any]", Coroutine[Any, Any, "Flow[..., Any]"]]: ...
1712
1964
 
1713
1965
 
1714
1966
  flow: FlowDecorator = FlowDecorator()
@@ -1980,8 +2232,7 @@ def _display_serve_start_message(*args: "RunnerDeployment"):
1980
2232
  from rich.table import Table
1981
2233
 
1982
2234
  help_message_top = (
1983
- "[green]Your deployments are being served and polling for"
1984
- " scheduled runs!\n[/]"
2235
+ "[green]Your deployments are being served and polling for scheduled runs!\n[/]"
1985
2236
  )
1986
2237
 
1987
2238
  table = Table(title="Deployments", show_header=False)
@@ -1,6 +1,7 @@
1
1
  """
2
2
  2024-06-27: This surfaces an actionable error message for moved or removed objects in Prefect 3.0 upgrade.
3
3
  """
4
+
4
5
  from typing import Any, Callable
5
6
 
6
7
  from prefect._internal.compatibility.migration import getattr_migration
@@ -1,6 +1,7 @@
1
1
  """
2
2
  2024-06-27: This surfaces an actionable error message for moved or removed objects in Prefect 3.0 upgrade.
3
3
  """
4
+
4
5
  from typing import Any, Callable
5
6
 
6
7
  from prefect._internal.compatibility.migration import getattr_migration
@@ -22,20 +22,17 @@ _provisioners = {
22
22
 
23
23
  class Provisioner(Protocol):
24
24
  @property
25
- def console(self) -> rich.console.Console:
26
- ...
25
+ def console(self) -> rich.console.Console: ...
27
26
 
28
27
  @console.setter
29
- def console(self, value: rich.console.Console) -> None:
30
- ...
28
+ def console(self, value: rich.console.Console) -> None: ...
31
29
 
32
30
  async def provision(
33
31
  self,
34
32
  work_pool_name: str,
35
33
  base_job_template: Dict[str, Any],
36
34
  client: Optional["PrefectClient"] = None,
37
- ) -> Dict[str, Any]:
38
- ...
35
+ ) -> Dict[str, Any]: ...
39
36
 
40
37
 
41
38
  def get_infrastructure_provisioner_for_work_pool_type(
@@ -118,9 +118,9 @@ class CoiledPushProvisioner:
118
118
  block_type_id=credentials_block_type.id
119
119
  )
120
120
  )
121
- assert (
122
- credentials_block_schema is not None
123
- ), f"Unable to find schema for block type {credentials_block_type.slug}"
121
+ assert credentials_block_schema is not None, (
122
+ f"Unable to find schema for block type {credentials_block_type.slug}"
123
+ )
124
124
 
125
125
  block_doc = await client.create_block_document(
126
126
  block_document=BlockDocumentCreate(
@@ -10,6 +10,7 @@ Classes:
10
10
  ContainerInstancePushProvisioner: A class for provisioning infrastructure using Azure Container Instances.
11
11
 
12
12
  """
13
+
13
14
  from __future__ import annotations
14
15
 
15
16
  import json
@@ -341,9 +341,9 @@ class CredentialsBlockResource:
341
341
  block_type_id=credentials_block_type.id
342
342
  )
343
343
  )
344
- assert (
345
- credentials_block_schema is not None
346
- ), f"Unable to find schema for block type {credentials_block_type.slug}"
344
+ assert credentials_block_schema is not None, (
345
+ f"Unable to find schema for block type {credentials_block_type.slug}"
346
+ )
347
347
 
348
348
  block_doc = await client.create_block_document(
349
349
  block_document=BlockDocumentCreate(
@@ -597,9 +597,9 @@ class ClusterResource:
597
597
  )
598
598
  advance()
599
599
 
600
- base_job_template["variables"]["properties"]["cluster"][
601
- "default"
602
- ] = self._cluster_name
600
+ base_job_template["variables"]["properties"]["cluster"]["default"] = (
601
+ self._cluster_name
602
+ )
603
603
 
604
604
  @property
605
605
  def next_steps(self) -> list[str]:
@@ -124,9 +124,9 @@ class ModalPushProvisioner:
124
124
  block_type_id=credentials_block_type.id
125
125
  )
126
126
  )
127
- assert (
128
- credentials_block_schema is not None
129
- ), f"Unable to find schema for block type {credentials_block_type.slug}"
127
+ assert credentials_block_schema is not None, (
128
+ f"Unable to find schema for block type {credentials_block_type.slug}"
129
+ )
130
130
 
131
131
  block_doc = await client.create_block_document(
132
132
  block_document=BlockDocumentCreate(
@@ -664,8 +664,7 @@ def receive_input( # type: ignore[overload-overlap]
664
664
  key_prefix: Optional[str] = None,
665
665
  flow_run_id: Optional[UUID] = None,
666
666
  with_metadata: bool = False,
667
- ) -> GetInputHandler[R]:
668
- ...
667
+ ) -> GetInputHandler[R]: ...
669
668
 
670
669
 
671
670
  @overload
@@ -678,8 +677,7 @@ def receive_input(
678
677
  key_prefix: Optional[str] = None,
679
678
  flow_run_id: Optional[UUID] = None,
680
679
  with_metadata: bool = False,
681
- ) -> GetAutomaticInputHandler[T]:
682
- ...
680
+ ) -> GetAutomaticInputHandler[T]: ...
683
681
 
684
682
 
685
683
  def receive_input(
@@ -697,9 +695,9 @@ def receive_input(
697
695
  # the signature is the same as here:
698
696
  # Union[Type[R], Type[T], pydantic.BaseModel],
699
697
  # Seems like a possible mypy bug, so we'll ignore the type check here.
700
- input_cls: Union[
701
- Type[AutomaticRunInput[T]], Type[R]
702
- ] = run_input_subclass_from_type(input_type) # type: ignore[arg-type]
698
+ input_cls: Union[Type[AutomaticRunInput[T]], Type[R]] = (
699
+ run_input_subclass_from_type(input_type)
700
+ ) # type: ignore[arg-type]
703
701
 
704
702
  if issubclass(input_cls, AutomaticRunInput):
705
703
  return input_cls.receive(