prefect-client 2.17.0__py3-none-any.whl → 2.18.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. prefect/_internal/compatibility/deprecated.py +2 -0
  2. prefect/_internal/pydantic/_compat.py +1 -0
  3. prefect/_internal/pydantic/utilities/field_validator.py +25 -10
  4. prefect/_internal/pydantic/utilities/model_dump.py +1 -1
  5. prefect/_internal/pydantic/utilities/model_validate.py +1 -1
  6. prefect/_internal/pydantic/utilities/model_validator.py +11 -3
  7. prefect/_internal/schemas/validators.py +0 -6
  8. prefect/_version.py +97 -38
  9. prefect/blocks/abstract.py +34 -1
  10. prefect/blocks/notifications.py +14 -5
  11. prefect/client/base.py +10 -5
  12. prefect/client/orchestration.py +125 -66
  13. prefect/client/schemas/actions.py +4 -3
  14. prefect/client/schemas/objects.py +6 -5
  15. prefect/client/schemas/schedules.py +2 -6
  16. prefect/deployments/__init__.py +0 -2
  17. prefect/deployments/base.py +2 -144
  18. prefect/deployments/deployments.py +2 -2
  19. prefect/deployments/runner.py +2 -2
  20. prefect/deployments/steps/core.py +3 -3
  21. prefect/deprecated/packaging/serializers.py +5 -4
  22. prefect/events/__init__.py +45 -0
  23. prefect/events/actions.py +250 -19
  24. prefect/events/cli/__init__.py +0 -0
  25. prefect/events/cli/automations.py +163 -0
  26. prefect/events/clients.py +133 -7
  27. prefect/events/schemas/automations.py +76 -3
  28. prefect/events/schemas/deployment_triggers.py +17 -59
  29. prefect/events/utilities.py +2 -0
  30. prefect/events/worker.py +12 -2
  31. prefect/exceptions.py +1 -1
  32. prefect/logging/__init__.py +2 -2
  33. prefect/logging/loggers.py +64 -1
  34. prefect/results.py +29 -10
  35. prefect/serializers.py +62 -31
  36. prefect/settings.py +6 -10
  37. prefect/types/__init__.py +90 -0
  38. prefect/utilities/pydantic.py +34 -15
  39. prefect/utilities/schema_tools/hydration.py +88 -19
  40. prefect/variables.py +4 -4
  41. {prefect_client-2.17.0.dist-info → prefect_client-2.18.0.dist-info}/METADATA +1 -1
  42. {prefect_client-2.17.0.dist-info → prefect_client-2.18.0.dist-info}/RECORD +45 -42
  43. {prefect_client-2.17.0.dist-info → prefect_client-2.18.0.dist-info}/LICENSE +0 -0
  44. {prefect_client-2.17.0.dist-info → prefect_client-2.18.0.dist-info}/WHEEL +0 -0
  45. {prefect_client-2.17.0.dist-info → prefect_client-2.18.0.dist-info}/top_level.txt +0 -0
@@ -8,6 +8,7 @@ from typing import (
8
8
  Dict,
9
9
  Iterable,
10
10
  List,
11
+ NoReturn,
11
12
  Optional,
12
13
  Set,
13
14
  Tuple,
@@ -23,15 +24,10 @@ import pendulum
23
24
  from prefect._internal.compatibility.deprecated import (
24
25
  handle_deprecated_infra_overrides_parameter,
25
26
  )
26
- from prefect._internal.compatibility.experimental import (
27
- EXPERIMENTAL_WARNING,
28
- ExperimentalFeature,
29
- experiment_enabled,
30
- )
31
27
  from prefect._internal.pydantic import HAS_PYDANTIC_V2
32
28
  from prefect.settings import (
33
- PREFECT_EXPERIMENTAL_WARN,
34
- PREFECT_EXPERIMENTAL_WARN_FLOW_RUN_INFRA_OVERRIDES,
29
+ PREFECT_API_SERVICES_TRIGGERS_ENABLED,
30
+ PREFECT_EXPERIMENTAL_EVENTS,
35
31
  )
36
32
 
37
33
  if HAS_PYDANTIC_V2:
@@ -133,7 +129,7 @@ from prefect.client.schemas.sorting import (
133
129
  TaskRunSort,
134
130
  )
135
131
  from prefect.deprecated.data_documents import DataDocument
136
- from prefect.events.schemas.automations import Automation, ExistingAutomation
132
+ from prefect.events.schemas.automations import Automation, AutomationCore
137
133
  from prefect.logging import get_logger
138
134
  from prefect.settings import (
139
135
  PREFECT_API_DATABASE_CONNECTION_URL,
@@ -161,6 +157,12 @@ class ServerType(AutoEnum):
161
157
  SERVER = AutoEnum.auto()
162
158
  CLOUD = AutoEnum.auto()
163
159
 
160
+ def supports_automations(self) -> bool:
161
+ if self == ServerType.CLOUD:
162
+ return True
163
+
164
+ return PREFECT_EXPERIMENTAL_EVENTS and PREFECT_API_SERVICES_TRIGGERS_ENABLED
165
+
164
166
 
165
167
  def get_client(httpx_settings: Optional[dict] = None) -> "PrefectClient":
166
168
  """
@@ -568,21 +570,6 @@ class PrefectClient:
568
570
  Returns:
569
571
  The flow run model
570
572
  """
571
- if job_variables is not None and experiment_enabled("flow_run_infra_overrides"):
572
- if (
573
- PREFECT_EXPERIMENTAL_WARN
574
- and PREFECT_EXPERIMENTAL_WARN_FLOW_RUN_INFRA_OVERRIDES
575
- ):
576
- warnings.warn(
577
- EXPERIMENTAL_WARNING.format(
578
- feature="Flow run job variables",
579
- group="flow_run_infra_overrides",
580
- help="To use this feature, update your workers to Prefect 2.16.4 or later. ",
581
- ),
582
- ExperimentalFeature,
583
- stacklevel=3,
584
- )
585
-
586
573
  parameters = parameters or {}
587
574
  context = context or {}
588
575
  state = state or prefect.states.Scheduled()
@@ -703,21 +690,6 @@ class PrefectClient:
703
690
  Returns:
704
691
  an `httpx.Response` object from the PATCH request
705
692
  """
706
- if job_variables is not None and experiment_enabled("flow_run_infra_overrides"):
707
- if (
708
- PREFECT_EXPERIMENTAL_WARN
709
- and PREFECT_EXPERIMENTAL_WARN_FLOW_RUN_INFRA_OVERRIDES
710
- ):
711
- warnings.warn(
712
- EXPERIMENTAL_WARNING.format(
713
- feature="Flow run job variables",
714
- group="flow_run_infra_overrides",
715
- help="To use this feature, update your workers to Prefect 2.16.4 or later. ",
716
- ),
717
- ExperimentalFeature,
718
- stacklevel=3,
719
- )
720
-
721
693
  params = {}
722
694
  if flow_version is not None:
723
695
  params["flow_version"] = flow_version
@@ -2998,34 +2970,6 @@ class PrefectClient:
2998
2970
  response.raise_for_status()
2999
2971
  return response.json()
3000
2972
 
3001
- async def create_automation(self, automation: Automation) -> UUID:
3002
- """Creates an automation in Prefect Cloud."""
3003
- if self.server_type != ServerType.CLOUD:
3004
- raise RuntimeError("Automations are only supported for Prefect Cloud.")
3005
-
3006
- response = await self._client.post(
3007
- "/automations/",
3008
- json=automation.dict(json_compatible=True),
3009
- )
3010
-
3011
- return UUID(response.json()["id"])
3012
-
3013
- async def read_resource_related_automations(
3014
- self, resource_id: str
3015
- ) -> List[ExistingAutomation]:
3016
- if self.server_type != ServerType.CLOUD:
3017
- raise RuntimeError("Automations are only supported for Prefect Cloud.")
3018
-
3019
- response = await self._client.get(f"/automations/related-to/{resource_id}")
3020
- response.raise_for_status()
3021
- return pydantic.parse_obj_as(List[ExistingAutomation], response.json())
3022
-
3023
- async def delete_resource_owned_automations(self, resource_id: str):
3024
- if self.server_type != ServerType.CLOUD:
3025
- raise RuntimeError("Automations are only supported for Prefect Cloud.")
3026
-
3027
- await self._client.delete(f"/automations/owned-by/{resource_id}")
3028
-
3029
2973
  async def increment_concurrency_slots(
3030
2974
  self, names: List[str], slots: int, mode: str
3031
2975
  ) -> httpx.Response:
@@ -3165,6 +3109,121 @@ class PrefectClient:
3165
3109
  response = await self._client.delete(f"/flow_runs/{flow_run_id}/input/{key}")
3166
3110
  response.raise_for_status()
3167
3111
 
3112
+ def _raise_for_unsupported_automations(self) -> NoReturn:
3113
+ if not PREFECT_EXPERIMENTAL_EVENTS:
3114
+ raise RuntimeError(
3115
+ "The current server and client configuration does not support "
3116
+ "events. Enable experimental events support with the "
3117
+ "PREFECT_EXPERIMENTAL_EVENTS setting."
3118
+ )
3119
+ else:
3120
+ raise RuntimeError(
3121
+ "The current server and client configuration does not support "
3122
+ "automations. Enable experimental automations with the "
3123
+ "PREFECT_API_SERVICES_TRIGGERS_ENABLED setting."
3124
+ )
3125
+
3126
+ async def create_automation(self, automation: AutomationCore) -> UUID:
3127
+ """Creates an automation in Prefect Cloud."""
3128
+ if not self.server_type.supports_automations():
3129
+ self._raise_for_unsupported_automations()
3130
+
3131
+ response = await self._client.post(
3132
+ "/automations/",
3133
+ json=automation.dict(json_compatible=True),
3134
+ )
3135
+
3136
+ return UUID(response.json()["id"])
3137
+
3138
+ async def read_automations(self) -> List[Automation]:
3139
+ if not self.server_type.supports_automations():
3140
+ self._raise_for_unsupported_automations()
3141
+
3142
+ response = await self._client.post("/automations/filter")
3143
+ response.raise_for_status()
3144
+ return pydantic.parse_obj_as(List[Automation], response.json())
3145
+
3146
+ async def find_automation(
3147
+ self, id_or_name: str, exit_if_not_found: bool = True
3148
+ ) -> Optional[Automation]:
3149
+ try:
3150
+ id = UUID(id_or_name)
3151
+ except ValueError:
3152
+ id = None
3153
+
3154
+ if id:
3155
+ automation = await self.read_automation(id)
3156
+ if automation:
3157
+ return automation
3158
+
3159
+ automations = await self.read_automations()
3160
+
3161
+ # Look for it by an exact name
3162
+ for automation in automations:
3163
+ if automation.name == id_or_name:
3164
+ return automation
3165
+
3166
+ # Look for it by a case-insensitive name
3167
+ for automation in automations:
3168
+ if automation.name.lower() == id_or_name.lower():
3169
+ return automation
3170
+
3171
+ return None
3172
+
3173
+ async def read_automation(self, automation_id: UUID) -> Optional[Automation]:
3174
+ if not self.server_type.supports_automations():
3175
+ self._raise_for_unsupported_automations()
3176
+
3177
+ response = await self._client.get(f"/automations/{automation_id}")
3178
+ if response.status_code == 404:
3179
+ return None
3180
+ response.raise_for_status()
3181
+ return Automation.parse_obj(response.json())
3182
+
3183
+ async def pause_automation(self, automation_id: UUID):
3184
+ if not self.server_type.supports_automations():
3185
+ self._raise_for_unsupported_automations()
3186
+
3187
+ response = await self._client.patch(
3188
+ f"/automations/{automation_id}", json={"enabled": False}
3189
+ )
3190
+ response.raise_for_status()
3191
+
3192
+ async def resume_automation(self, automation_id: UUID):
3193
+ if not self.server_type.supports_automations():
3194
+ self._raise_for_unsupported_automations()
3195
+
3196
+ response = await self._client.patch(
3197
+ f"/automations/{automation_id}", json={"enabled": True}
3198
+ )
3199
+ response.raise_for_status()
3200
+
3201
+ async def delete_automation(self, automation_id: UUID):
3202
+ if not self.server_type.supports_automations():
3203
+ self._raise_for_unsupported_automations()
3204
+
3205
+ response = await self._client.delete(f"/automations/{automation_id}")
3206
+ if response.status_code == 404:
3207
+ return
3208
+
3209
+ response.raise_for_status()
3210
+
3211
+ async def read_resource_related_automations(
3212
+ self, resource_id: str
3213
+ ) -> List[Automation]:
3214
+ if not self.server_type.supports_automations():
3215
+ self._raise_for_unsupported_automations()
3216
+
3217
+ response = await self._client.get(f"/automations/related-to/{resource_id}")
3218
+ response.raise_for_status()
3219
+ return pydantic.parse_obj_as(List[Automation], response.json())
3220
+
3221
+ async def delete_resource_owned_automations(self, resource_id: str):
3222
+ if not self.server_type.supports_automations():
3223
+ self._raise_for_unsupported_automations()
3224
+
3225
+ await self._client.delete(f"/automations/owned-by/{resource_id}")
3226
+
3168
3227
  async def __aenter__(self):
3169
3228
  """
3170
3229
  Start the client.
@@ -8,9 +8,9 @@ from prefect._internal.compatibility.deprecated import DeprecatedInfraOverridesF
8
8
  from prefect._internal.pydantic import HAS_PYDANTIC_V2
9
9
 
10
10
  if HAS_PYDANTIC_V2:
11
- from pydantic.v1 import Field, conint, root_validator, validator
11
+ from pydantic.v1 import Field, root_validator, validator
12
12
  else:
13
- from pydantic import Field, conint, root_validator, validator
13
+ from pydantic import Field, root_validator, validator
14
14
 
15
15
  import prefect.client.schemas.objects as objects
16
16
  from prefect._internal.schemas.bases import ActionBaseModel
@@ -26,6 +26,7 @@ from prefect._internal.schemas.validators import (
26
26
  )
27
27
  from prefect.client.schemas.objects import StateDetails, StateType
28
28
  from prefect.client.schemas.schedules import SCHEDULE_TYPES
29
+ from prefect.types import NonNegativeInteger
29
30
  from prefect.utilities.collections import listrepr
30
31
  from prefect.utilities.pydantic import get_class_fields_only
31
32
 
@@ -525,7 +526,7 @@ class WorkPoolCreate(ActionBaseModel):
525
526
  default=False,
526
527
  description="Whether the work pool is paused.",
527
528
  )
528
- concurrency_limit: Optional[conint(ge=0)] = Field(
529
+ concurrency_limit: Optional[NonNegativeInteger] = Field(
529
530
  default=None, description="A concurrency limit for the work pool."
530
531
  )
531
532
 
@@ -19,11 +19,12 @@ from prefect._internal.compatibility.deprecated import (
19
19
  DeprecatedInfraOverridesField,
20
20
  )
21
21
  from prefect._internal.pydantic import HAS_PYDANTIC_V2
22
+ from prefect.types import NonNegativeInteger, PositiveInteger
22
23
 
23
24
  if HAS_PYDANTIC_V2:
24
- from pydantic.v1 import Field, HttpUrl, conint, root_validator, validator
25
+ from pydantic.v1 import Field, HttpUrl, root_validator, validator
25
26
  else:
26
- from pydantic import Field, HttpUrl, conint, root_validator, validator
27
+ from pydantic import Field, HttpUrl, root_validator, validator
27
28
 
28
29
  from typing_extensions import Literal
29
30
 
@@ -1188,10 +1189,10 @@ class WorkQueue(ObjectBaseModel):
1188
1189
  is_paused: bool = Field(
1189
1190
  default=False, description="Whether or not the work queue is paused."
1190
1191
  )
1191
- concurrency_limit: Optional[conint(ge=0)] = Field(
1192
+ concurrency_limit: Optional[NonNegativeInteger] = Field(
1192
1193
  default=None, description="An optional concurrency limit for the work queue."
1193
1194
  )
1194
- priority: conint(ge=1) = Field(
1195
+ priority: PositiveInteger = Field(
1195
1196
  default=1,
1196
1197
  description=(
1197
1198
  "The queue's priority. Lower values are higher priority (1 is the highest)."
@@ -1351,7 +1352,7 @@ class WorkPool(ObjectBaseModel):
1351
1352
  default=False,
1352
1353
  description="Pausing the work pool stops the delivery of all work.",
1353
1354
  )
1354
- concurrency_limit: Optional[conint(ge=0)] = Field(
1355
+ concurrency_limit: Optional[NonNegativeInteger] = Field(
1355
1356
  default=None, description="A concurrency limit for the work pool."
1356
1357
  )
1357
1358
  status: Optional[WorkPoolStatus] = Field(
@@ -15,11 +15,11 @@ from prefect._internal.schemas.fields import DateTimeTZ
15
15
  from prefect._internal.schemas.validators import (
16
16
  default_anchor_date,
17
17
  default_timezone,
18
- interval_schedule_must_be_positive,
19
18
  validate_cron_string,
20
19
  validate_rrule_string,
21
20
  validate_rrule_timezone,
22
21
  )
22
+ from prefect.types import PositiveDuration
23
23
 
24
24
  if HAS_PYDANTIC_V2:
25
25
  from pydantic.v1 import Field, validator
@@ -64,14 +64,10 @@ class IntervalSchedule(PrefectBaseModel):
64
64
  extra = "forbid"
65
65
  exclude_none = True
66
66
 
67
- interval: datetime.timedelta
67
+ interval: PositiveDuration
68
68
  anchor_date: DateTimeTZ = None
69
69
  timezone: Optional[str] = Field(default=None, examples=["America/New_York"])
70
70
 
71
- @validator("interval")
72
- def validate_interval_schedule(cls, v):
73
- return interval_schedule_must_be_positive(v)
74
-
75
71
  @validator("anchor_date", always=True)
76
72
  def validate_anchor_date(cls, v):
77
73
  return default_anchor_date(v)
@@ -1,9 +1,7 @@
1
1
  import prefect.deployments.base
2
2
  import prefect.deployments.steps
3
3
  from prefect.deployments.base import (
4
- find_prefect_directory,
5
4
  initialize_project,
6
- register_flow,
7
5
  )
8
6
 
9
7
  from prefect.deployments.deployments import (
@@ -1,5 +1,5 @@
1
1
  """
2
- Core primitives for managing Prefect projects. Projects provide a minimally opinionated
2
+ Core primitives for managing Prefect deployments via `prefect deploy`, providing a minimally opinionated
3
3
  build system for managing flows and deployments.
4
4
 
5
5
  To get started, follow along with [the deloyments tutorial](/tutorials/deployments/).
@@ -7,7 +7,6 @@ To get started, follow along with [the deloyments tutorial](/tutorials/deploymen
7
7
 
8
8
  import ast
9
9
  import asyncio
10
- import json
11
10
  import math
12
11
  import os
13
12
  import subprocess
@@ -22,50 +21,13 @@ from ruamel.yaml import YAML
22
21
 
23
22
  from prefect.client.schemas.objects import MinimalDeploymentSchedule
24
23
  from prefect.client.schemas.schedules import IntervalSchedule
25
- from prefect.flows import load_flow_from_entrypoint
26
24
  from prefect.logging import get_logger
27
25
  from prefect.settings import PREFECT_DEBUG_MODE
28
- from prefect.utilities.asyncutils import LazySemaphore, run_sync_in_worker_thread
26
+ from prefect.utilities.asyncutils import LazySemaphore
29
27
  from prefect.utilities.filesystem import create_default_ignore_file, get_open_file_limit
30
28
  from prefect.utilities.templating import apply_values
31
29
 
32
30
 
33
- def find_prefect_directory(path: Path = None) -> Optional[Path]:
34
- """
35
- Given a path, recurses upward looking for .prefect/ directories.
36
-
37
- Once found, returns absolute path to the ./prefect directory, which is assumed to reside within the
38
- root for the current project.
39
-
40
- If one is never found, `None` is returned.
41
- """
42
- path = Path(path or ".").resolve()
43
- parent = path.parent.resolve()
44
- while path != parent:
45
- prefect_dir = path.joinpath(".prefect")
46
- if prefect_dir.is_dir():
47
- return prefect_dir
48
-
49
- path = parent.resolve()
50
- parent = path.parent.resolve()
51
-
52
-
53
- def set_prefect_hidden_dir(path: str = None) -> bool:
54
- """
55
- Creates default `.prefect/` directory if one does not already exist.
56
- Returns boolean specifying whether or not a directory was created.
57
-
58
- If a path is provided, the directory will be created in that location.
59
- """
60
- path = Path(path or ".") / ".prefect"
61
-
62
- # use exists so that we dont accidentally overwrite a file
63
- if path.exists():
64
- return False
65
- path.mkdir(mode=0o0700)
66
- return True
67
-
68
-
69
31
  def create_default_prefect_yaml(
70
32
  path: str, name: str = None, contents: Optional[Dict[str, Any]] = None
71
33
  ) -> bool:
@@ -270,114 +232,10 @@ def initialize_project(
270
232
  files.append(".prefectignore")
271
233
  if create_default_prefect_yaml(".", name=project_name, contents=configuration):
272
234
  files.append("prefect.yaml")
273
- if set_prefect_hidden_dir():
274
- files.append(".prefect/")
275
235
 
276
236
  return files
277
237
 
278
238
 
279
- async def register_flow(entrypoint: str, force: bool = False):
280
- """
281
- Register a flow with this project from an entrypoint.
282
-
283
- Args:
284
- entrypoint (str): the entrypoint to the flow to register
285
- force (bool, optional): whether or not to overwrite an existing flow with the same name
286
-
287
- Raises:
288
- ValueError: if `force` is `False` and registration would overwrite an existing flow
289
- """
290
- try:
291
- fpath, obj_name = entrypoint.rsplit(":", 1)
292
- except ValueError as exc:
293
- if str(exc) == "not enough values to unpack (expected 2, got 1)":
294
- missing_flow_name_msg = (
295
- "Your flow entrypoint must include the name of the function that is"
296
- f" the entrypoint to your flow.\nTry {entrypoint}:<flow_name> as your"
297
- f" entrypoint. If you meant to specify '{entrypoint}' as the deployment"
298
- f" name, try `prefect deploy -n {entrypoint}`."
299
- )
300
- raise ValueError(missing_flow_name_msg)
301
- else:
302
- raise exc
303
-
304
- flow = await run_sync_in_worker_thread(load_flow_from_entrypoint, entrypoint)
305
-
306
- fpath = Path(fpath).absolute()
307
- prefect_dir = find_prefect_directory()
308
- if not prefect_dir:
309
- raise FileNotFoundError(
310
- "No .prefect directory could be found - run `prefect project"
311
- " init` to create one."
312
- )
313
-
314
- entrypoint = f"{fpath.relative_to(prefect_dir.parent)!s}:{obj_name}"
315
-
316
- flows_file = prefect_dir / "flows.json"
317
- if flows_file.exists():
318
- with flows_file.open(mode="r") as f:
319
- flows = json.load(f)
320
- else:
321
- flows = {}
322
-
323
- ## quality control
324
- if flow.name in flows and flows[flow.name] != entrypoint:
325
- if not force:
326
- raise ValueError(
327
- f"Conflicting entry found for flow with name {flow.name!r}.\nExisting"
328
- f" entrypoint: {flows[flow.name]}\nAttempted entrypoint:"
329
- f" {entrypoint}\n\nYou can try removing the existing entry for"
330
- f" {flow.name!r} from your [yellow]~/.prefect/flows.json[/yellow]."
331
- )
332
-
333
- flows[flow.name] = entrypoint
334
-
335
- with flows_file.open(mode="w") as f:
336
- json.dump(flows, f, sort_keys=True, indent=2)
337
-
338
- return flow
339
-
340
-
341
- def _copy_deployments_into_prefect_file():
342
- """
343
- Copy deployments from the `deloyment.yaml` file into the `prefect.yaml` file.
344
-
345
- Used to migrate users from the old `prefect.yaml` + `deployment.yaml` structure
346
- to a single `prefect.yaml` file.
347
- """
348
- prefect_file = Path("prefect.yaml")
349
- deployment_file = Path("deployment.yaml")
350
- if not deployment_file.exists() or not prefect_file.exists():
351
- raise FileNotFoundError(
352
- "Could not find `prefect.yaml` or `deployment.yaml` files."
353
- )
354
-
355
- with deployment_file.open(mode="r") as f:
356
- raw_deployment_file_contents = f.read()
357
- parsed_deployment_file_contents = yaml.safe_load(raw_deployment_file_contents)
358
-
359
- deployments = parsed_deployment_file_contents.get("deployments")
360
-
361
- with prefect_file.open(mode="a") as f:
362
- # If deployment.yaml is empty, write an empty deployments list to prefect.yaml.
363
- if not parsed_deployment_file_contents:
364
- f.write("\n")
365
- f.write(yaml.dump({"deployments": []}, sort_keys=False))
366
- # If there is no 'deployments' key in deployment.yaml, assume that the
367
- # entire file is a single deployment.
368
- elif not deployments:
369
- f.write("\n")
370
- f.write(
371
- yaml.dump(
372
- {"deployments": [parsed_deployment_file_contents]}, sort_keys=False
373
- )
374
- )
375
- # Write all of deployment.yaml to prefect.yaml.
376
- else:
377
- f.write("\n")
378
- f.write(raw_deployment_file_contents)
379
-
380
-
381
239
  def _format_deployment_for_saving_to_prefect_file(
382
240
  deployment: Dict,
383
241
  ) -> Dict:
@@ -41,7 +41,7 @@ else:
41
41
 
42
42
  from prefect.blocks.core import Block
43
43
  from prefect.blocks.fields import SecretDict
44
- from prefect.client.orchestration import PrefectClient, ServerType, get_client
44
+ from prefect.client.orchestration import PrefectClient, get_client
45
45
  from prefect.client.schemas.objects import (
46
46
  FlowRun,
47
47
  MinimalDeploymentSchedule,
@@ -901,7 +901,7 @@ class Deployment(DeprecatedInfraOverridesField, BaseModel):
901
901
  enforce_parameter_schema=self.enforce_parameter_schema,
902
902
  )
903
903
 
904
- if client.server_type == ServerType.CLOUD:
904
+ if client.server_type.supports_automations():
905
905
  # The triggers defined in the deployment spec are, essentially,
906
906
  # anonymous and attempting truly sync them with cloud is not
907
907
  # feasible. Instead, we remove all automations that are owned
@@ -62,7 +62,7 @@ if HAS_PYDANTIC_V2:
62
62
  else:
63
63
  from pydantic import BaseModel, Field, PrivateAttr, root_validator, validator
64
64
 
65
- from prefect.client.orchestration import ServerType, get_client
65
+ from prefect.client.orchestration import get_client
66
66
  from prefect.client.schemas.objects import MinimalDeploymentSchedule
67
67
  from prefect.client.schemas.schedules import (
68
68
  SCHEDULE_TYPES,
@@ -325,7 +325,7 @@ class RunnerDeployment(BaseModel):
325
325
  f"Error while applying deployment: {str(exc)}"
326
326
  ) from exc
327
327
 
328
- if client.server_type == ServerType.CLOUD:
328
+ if client.server_type.supports_automations():
329
329
  # The triggers defined in the deployment spec are, essentially,
330
330
  # anonymous and attempting truly sync them with cloud is not
331
331
  # feasible. Instead, we remove all automations that are owned
@@ -1,11 +1,11 @@
1
1
  """
2
- Core primitives for running Prefect project steps.
2
+ Core primitives for running Prefect deployment steps.
3
3
 
4
- Project steps are YAML representations of Python functions along with their inputs.
4
+ Deployment steps are YAML representations of Python functions along with their inputs.
5
5
 
6
6
  Whenever a step is run, the following actions are taken:
7
7
 
8
- - The step's inputs and block / variable references are resolved (see [the projects concepts documentation](/concepts/projects/#templating-options) for more details)
8
+ - The step's inputs and block / variable references are resolved (see [the `prefect deploy` documentation](/guides/prefect-deploy/#templating-options) for more details)
9
9
  - The step's function is imported; if it cannot be found, the `requires` keyword is used to install the necessary packages
10
10
  - The step's function is called with the resolved inputs
11
11
  - The step's output is returned and used to resolve inputs for subsequent steps
@@ -55,20 +55,21 @@ class PickleSerializer(Serializer):
55
55
 
56
56
  picklelib: str = "cloudpickle"
57
57
  picklelib_version: str = None
58
+
58
59
  pickle_modules: List[str] = pydantic.Field(default_factory=list)
59
60
 
60
61
  @pydantic.validator("picklelib")
61
62
  def check_picklelib(cls, value):
62
63
  return validate_picklelib(value)
63
64
 
64
- @pydantic.root_validator
65
- def check_picklelib_version(cls, values):
66
- return validate_picklelib_version(values)
67
-
68
65
  @pydantic.root_validator
69
66
  def check_picklelib_and_modules(cls, values):
70
67
  return validate_picklelib_and_modules(values)
71
68
 
69
+ @pydantic.root_validator
70
+ def check_picklelib_version(cls, values):
71
+ return validate_picklelib_version(values)
72
+
72
73
  def dumps(self, obj: Any) -> bytes:
73
74
  pickler = from_qualified_name(self.picklelib)
74
75
 
@@ -2,7 +2,9 @@ from .schemas.events import Event, ReceivedEvent
2
2
  from .schemas.events import Resource, RelatedResource, ResourceSpecification
3
3
  from .schemas.automations import (
4
4
  Automation,
5
+ AutomationCore,
5
6
  Posture,
7
+ TriggerTypes,
6
8
  Trigger,
7
9
  ResourceTrigger,
8
10
  EventTrigger,
@@ -20,6 +22,27 @@ from .schemas.deployment_triggers import (
20
22
  DeploymentCompoundTrigger,
21
23
  DeploymentSequenceTrigger,
22
24
  )
25
+ from .actions import (
26
+ ActionTypes,
27
+ Action,
28
+ DoNothing,
29
+ RunDeployment,
30
+ PauseDeployment,
31
+ ResumeDeployment,
32
+ ChangeFlowRunState,
33
+ CancelFlowRun,
34
+ SuspendFlowRun,
35
+ CallWebhook,
36
+ SendNotification,
37
+ PauseWorkPool,
38
+ ResumeWorkPool,
39
+ PauseWorkQueue,
40
+ ResumeWorkQueue,
41
+ PauseAutomation,
42
+ ResumeAutomation,
43
+ DeclareIncident,
44
+ )
45
+ from .clients import get_events_client, get_events_subscriber
23
46
  from .utilities import emit_event
24
47
 
25
48
  __all__ = [
@@ -29,7 +52,9 @@ __all__ = [
29
52
  "RelatedResource",
30
53
  "ResourceSpecification",
31
54
  "Automation",
55
+ "AutomationCore",
32
56
  "Posture",
57
+ "TriggerTypes",
33
58
  "Trigger",
34
59
  "ResourceTrigger",
35
60
  "EventTrigger",
@@ -44,5 +69,25 @@ __all__ = [
44
69
  "DeploymentMetricTrigger",
45
70
  "DeploymentCompoundTrigger",
46
71
  "DeploymentSequenceTrigger",
72
+ "ActionTypes",
73
+ "Action",
74
+ "DoNothing",
75
+ "RunDeployment",
76
+ "PauseDeployment",
77
+ "ResumeDeployment",
78
+ "ChangeFlowRunState",
79
+ "CancelFlowRun",
80
+ "SuspendFlowRun",
81
+ "CallWebhook",
82
+ "SendNotification",
83
+ "PauseWorkPool",
84
+ "ResumeWorkPool",
85
+ "PauseWorkQueue",
86
+ "ResumeWorkQueue",
87
+ "PauseAutomation",
88
+ "ResumeAutomation",
89
+ "DeclareIncident",
47
90
  "emit_event",
91
+ "get_events_client",
92
+ "get_events_subscriber",
48
93
  ]