prefect-client 2.16.7__py3-none-any.whl → 2.16.9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (62) hide show
  1. prefect/_internal/compatibility/experimental.py +9 -8
  2. prefect/_internal/concurrency/api.py +23 -42
  3. prefect/_internal/concurrency/waiters.py +25 -22
  4. prefect/_internal/pydantic/__init__.py +12 -3
  5. prefect/_internal/pydantic/_base_model.py +7 -4
  6. prefect/_internal/pydantic/_compat.py +39 -453
  7. prefect/_internal/pydantic/_flags.py +2 -0
  8. prefect/_internal/pydantic/_types.py +8 -0
  9. prefect/_internal/pydantic/utilities/__init__.py +0 -0
  10. prefect/_internal/pydantic/utilities/model_construct.py +56 -0
  11. prefect/_internal/pydantic/utilities/model_copy.py +55 -0
  12. prefect/_internal/pydantic/utilities/model_dump.py +136 -0
  13. prefect/_internal/pydantic/utilities/model_dump_json.py +112 -0
  14. prefect/_internal/pydantic/utilities/model_fields.py +50 -0
  15. prefect/_internal/pydantic/utilities/model_json_schema.py +82 -0
  16. prefect/_internal/pydantic/utilities/model_rebuild.py +80 -0
  17. prefect/_internal/pydantic/utilities/model_validate.py +75 -0
  18. prefect/_internal/pydantic/utilities/model_validate_json.py +68 -0
  19. prefect/_internal/pydantic/utilities/type_adapter.py +71 -0
  20. prefect/_internal/schemas/bases.py +1 -17
  21. prefect/_internal/schemas/validators.py +425 -4
  22. prefect/blocks/kubernetes.py +7 -3
  23. prefect/client/cloud.py +1 -1
  24. prefect/client/orchestration.py +8 -8
  25. prefect/client/schemas/actions.py +348 -285
  26. prefect/client/schemas/objects.py +47 -126
  27. prefect/client/schemas/responses.py +231 -57
  28. prefect/concurrency/events.py +2 -2
  29. prefect/context.py +2 -1
  30. prefect/deployments/base.py +4 -3
  31. prefect/deployments/runner.py +7 -25
  32. prefect/deprecated/packaging/base.py +5 -6
  33. prefect/deprecated/packaging/docker.py +19 -25
  34. prefect/deprecated/packaging/file.py +10 -5
  35. prefect/deprecated/packaging/orion.py +9 -4
  36. prefect/deprecated/packaging/serializers.py +8 -58
  37. prefect/engine.py +23 -22
  38. prefect/events/actions.py +16 -1
  39. prefect/events/related.py +4 -4
  40. prefect/events/schemas/automations.py +13 -2
  41. prefect/events/schemas/deployment_triggers.py +73 -5
  42. prefect/events/schemas/events.py +1 -1
  43. prefect/flows.py +3 -0
  44. prefect/infrastructure/provisioners/ecs.py +1 -0
  45. prefect/logging/configuration.py +2 -2
  46. prefect/pydantic/__init__.py +48 -2
  47. prefect/pydantic/main.py +2 -2
  48. prefect/serializers.py +6 -31
  49. prefect/settings.py +40 -17
  50. prefect/software/python.py +3 -5
  51. prefect/utilities/callables.py +1 -1
  52. prefect/utilities/collections.py +2 -1
  53. prefect/utilities/schema_tools/validation.py +2 -2
  54. prefect/workers/base.py +19 -10
  55. prefect/workers/block.py +3 -7
  56. prefect/workers/process.py +2 -5
  57. {prefect_client-2.16.7.dist-info → prefect_client-2.16.9.dist-info}/METADATA +3 -2
  58. {prefect_client-2.16.7.dist-info → prefect_client-2.16.9.dist-info}/RECORD +61 -50
  59. prefect/_internal/schemas/transformations.py +0 -106
  60. {prefect_client-2.16.7.dist-info → prefect_client-2.16.9.dist-info}/LICENSE +0 -0
  61. {prefect_client-2.16.7.dist-info → prefect_client-2.16.9.dist-info}/WHEEL +0 -0
  62. {prefect_client-2.16.7.dist-info → prefect_client-2.16.9.dist-info}/top_level.txt +0 -0
@@ -4,6 +4,7 @@ build system for managing flows and deployments.
4
4
 
5
5
  To get started, follow along with [the deloyments tutorial](/tutorials/deployments/).
6
6
  """
7
+
7
8
  import ast
8
9
  import asyncio
9
10
  import json
@@ -13,7 +14,7 @@ import subprocess
13
14
  import sys
14
15
  from copy import deepcopy
15
16
  from pathlib import Path
16
- from typing import Dict, List, Optional, cast
17
+ from typing import Any, Dict, List, Optional, cast
17
18
 
18
19
  import anyio
19
20
  import yaml
@@ -74,7 +75,7 @@ def set_prefect_hidden_dir(path: str = None) -> bool:
74
75
 
75
76
 
76
77
  def create_default_prefect_yaml(
77
- path: str, name: str = None, contents: dict = None
78
+ path: str, name: str = None, contents: Optional[Dict[str, Any]] = None
78
79
  ) -> bool:
79
80
  """
80
81
  Creates default `prefect.yaml` file in the provided path if one does not already exist;
@@ -221,7 +222,7 @@ def _get_git_branch() -> Optional[str]:
221
222
 
222
223
 
223
224
  def initialize_project(
224
- name: str = None, recipe: str = None, inputs: dict = None
225
+ name: str = None, recipe: str = None, inputs: Optional[Dict[str, Any]] = None
225
226
  ) -> List[str]:
226
227
  """
227
228
  Initializes a basic project structure with base files. If no name is provided, the name
@@ -44,7 +44,11 @@ from rich.table import Table
44
44
 
45
45
  from prefect._internal.concurrency.api import create_call, from_async
46
46
  from prefect._internal.pydantic import HAS_PYDANTIC_V2
47
- from prefect._internal.schemas.validators import validate_automation_names
47
+ from prefect._internal.schemas.validators import (
48
+ reconcile_paused_deployment,
49
+ reconcile_schedules_runner,
50
+ validate_automation_names,
51
+ )
48
52
  from prefect.runner.storage import RunnerStorage
49
53
  from prefect.settings import (
50
54
  PREFECT_DEFAULT_DOCKER_BUILD_NAMESPACE,
@@ -67,7 +71,6 @@ from prefect.client.schemas.schedules import (
67
71
  from prefect.deployments.schedules import (
68
72
  FlexibleScheduleList,
69
73
  create_minimal_deployment_schedule,
70
- normalize_to_minimal_deployment_schedules,
71
74
  )
72
75
  from prefect.events import DeploymentTriggerTypes
73
76
  from prefect.exceptions import (
@@ -236,32 +239,11 @@ class RunnerDeployment(BaseModel):
236
239
 
237
240
  @root_validator(pre=True)
238
241
  def reconcile_paused(cls, values):
239
- paused = values.get("paused")
240
- is_schedule_active = values.get("is_schedule_active")
241
-
242
- if paused is not None:
243
- values["paused"] = paused
244
- values["is_schedule_active"] = not paused
245
- elif is_schedule_active is not None:
246
- values["paused"] = not is_schedule_active
247
- values["is_schedule_active"] = is_schedule_active
248
- else:
249
- values["paused"] = False
250
- values["is_schedule_active"] = True
251
-
252
- return values
242
+ return reconcile_paused_deployment(values)
253
243
 
254
244
  @root_validator(pre=True)
255
245
  def reconcile_schedules(cls, values):
256
- schedule = values.get("schedule")
257
- schedules = values.get("schedules")
258
-
259
- if schedules is None and schedule is not None:
260
- values["schedules"] = [create_minimal_deployment_schedule(schedule)]
261
- elif schedules is not None and len(schedules) > 0:
262
- values["schedules"] = normalize_to_minimal_deployment_schedules(schedules)
263
-
264
- return values
246
+ return reconcile_schedules_runner(values)
265
247
 
266
248
  @sync_compatible
267
249
  async def apply(
@@ -4,7 +4,7 @@ This module is deprecated as of March 2024 and will not be available after Septe
4
4
  """
5
5
 
6
6
  import abc
7
- from typing import Generic, TypeVar
7
+ from typing import Generic, Type, TypeVar
8
8
 
9
9
  from prefect._internal.compatibility.deprecated import deprecated_class
10
10
  from prefect._internal.pydantic import HAS_PYDANTIC_V2
@@ -17,7 +17,7 @@ else:
17
17
  from prefect.flows import Flow
18
18
  from prefect.utilities.callables import ParameterSchema, parameter_schema
19
19
  from prefect.utilities.dispatch import lookup_type
20
- from prefect.utilities.pydantic import PartialModel, add_type_dispatch
20
+ from prefect.utilities.pydantic import add_type_dispatch
21
21
 
22
22
  D = TypeVar("D")
23
23
 
@@ -81,10 +81,9 @@ class Packager(BaseModel, abc.ABC):
81
81
 
82
82
  type: str
83
83
 
84
- def base_manifest(self, flow: Flow) -> PartialModel[PackageManifest]:
85
- manifest_cls = lookup_type(PackageManifest, self.type)
86
- return PartialModel(
87
- manifest_cls,
84
+ def base_manifest(self, flow: Flow) -> PackageManifest:
85
+ manifest_cls: Type[BaseModel] = lookup_type(PackageManifest, self.type)
86
+ return manifest_cls.construct(
88
87
  type=self.type,
89
88
  flow_name=flow.name,
90
89
  flow_parameter_schema=parameter_schema(flow.fn),
@@ -10,6 +10,12 @@ from typing import Any, Mapping, Optional, Union
10
10
 
11
11
  from prefect._internal.compatibility.deprecated import deprecated_class
12
12
  from prefect._internal.pydantic import HAS_PYDANTIC_V2
13
+ from prefect._internal.schemas.validators import (
14
+ assign_default_base_image,
15
+ base_image_xor_dockerfile,
16
+ set_default_python_environment,
17
+ validate_registry_url,
18
+ )
13
19
 
14
20
  if HAS_PYDANTIC_V2:
15
21
  from pydantic.v1 import AnyHttpUrl, root_validator, validator
@@ -25,7 +31,6 @@ from prefect.utilities.asyncutils import run_sync_in_worker_thread
25
31
  from prefect.utilities.dockerutils import (
26
32
  ImageBuilder,
27
33
  build_image,
28
- get_prefect_image_name,
29
34
  push_image,
30
35
  to_run_command,
31
36
  )
@@ -63,7 +68,7 @@ class DockerPackager(Packager):
63
68
  registry, given by `registry_url`.
64
69
  """
65
70
 
66
- type: Literal["docker"] = "docker"
71
+ type: str = "docker"
67
72
 
68
73
  base_image: Optional[str] = None
69
74
  python_environment: Optional[Union[PythonEnvironment, CondaEnvironment]] = None
@@ -74,36 +79,19 @@ class DockerPackager(Packager):
74
79
 
75
80
  @root_validator
76
81
  def set_default_base_image(cls, values):
77
- if not values.get("base_image") and not values.get("dockerfile"):
78
- values["base_image"] = get_prefect_image_name(
79
- flavor=(
80
- "conda"
81
- if isinstance(values.get("python_environment"), CondaEnvironment)
82
- else None
83
- )
84
- )
85
- return values
82
+ return assign_default_base_image(values)
86
83
 
87
84
  @root_validator
88
85
  def base_image_and_dockerfile_exclusive(cls, values: Mapping[str, Any]):
89
- if values.get("base_image") and values.get("dockerfile"):
90
- raise ValueError(
91
- "Either `base_image` or `dockerfile` should be provided, but not both"
92
- )
93
- return values
86
+ return base_image_xor_dockerfile(values)
94
87
 
95
88
  @root_validator
96
89
  def default_python_environment(cls, values: Mapping[str, Any]):
97
- if values.get("base_image") and not values.get("python_environment"):
98
- values["python_environment"] = PythonEnvironment.from_environment()
99
- return values
90
+ return set_default_python_environment(values)
100
91
 
101
92
  @validator("registry_url", pre=True)
102
93
  def ensure_registry_url_is_prefixed(cls, value):
103
- if isinstance(value, str):
104
- if "://" not in value:
105
- return "https://" + value
106
- return value
94
+ validate_registry_url(value)
107
95
 
108
96
  async def package(self, flow: Flow) -> DockerPackageManifest:
109
97
  """
@@ -117,8 +105,14 @@ class DockerPackager(Packager):
117
105
  push_image, image_reference, self.registry_url, image_name
118
106
  )
119
107
 
120
- return self.base_manifest(flow).finalize(
121
- image=image_reference, image_flow_location=self.image_flow_location
108
+ return DockerPackageManifest(
109
+ **{
110
+ **self.base_manifest(flow).dict(),
111
+ **{
112
+ "image": image_reference,
113
+ "image_flow_location": self.image_flow_location,
114
+ },
115
+ }
122
116
  )
123
117
 
124
118
  async def _build_image(self, flow: Flow) -> str:
@@ -34,7 +34,7 @@ class FilePackageManifest(PackageManifest):
34
34
  This class is deprecated as of version March 2024 and will not be available after September 2024.
35
35
  """
36
36
 
37
- type: Literal["file"] = "file"
37
+ type: str = "file"
38
38
  serializer: Serializer
39
39
  key: str
40
40
  filesystem_id: UUID
@@ -80,8 +80,13 @@ class FilePackager(Packager):
80
80
  or await self.filesystem._save(is_anonymous=True)
81
81
  )
82
82
 
83
- return self.base_manifest(flow).finalize(
84
- serializer=self.serializer,
85
- filesystem_id=filesystem_id,
86
- key=key,
83
+ return FilePackageManifest(
84
+ **{
85
+ **self.base_manifest(flow).dict(),
86
+ **{
87
+ "serializer": self.serializer,
88
+ "filesystem_id": filesystem_id,
89
+ "key": key,
90
+ },
91
+ }
87
92
  )
@@ -31,7 +31,7 @@ class OrionPackageManifest(PackageManifest):
31
31
  This class is deprecated as of version March 2024 and will not be available after September 2024.
32
32
  """
33
33
 
34
- type: Literal["orion"] = "orion"
34
+ type: str = "orion"
35
35
  serializer: Serializer
36
36
  block_document_id: UUID
37
37
 
@@ -69,7 +69,12 @@ class OrionPackager(Packager):
69
69
  value={"flow": self.serializer.dumps(flow)}
70
70
  )._save(is_anonymous=True)
71
71
 
72
- return self.base_manifest(flow).finalize(
73
- serializer=self.serializer,
74
- block_document_id=block_document_id,
72
+ return OrionPackageManifest(
73
+ **{
74
+ **self.base_manifest(flow).dict(),
75
+ **{
76
+ "serializer": self.serializer,
77
+ "block_document_id": block_document_id,
78
+ },
79
+ }
75
80
  )
@@ -7,13 +7,17 @@ import base64
7
7
  import inspect
8
8
  import json
9
9
  import os.path
10
- import warnings
11
10
  from pathlib import Path
12
11
  from tempfile import TemporaryDirectory
13
12
  from typing import Any, List
14
13
 
15
14
  from prefect._internal.compatibility.deprecated import deprecated_class
16
15
  from prefect._internal.pydantic import HAS_PYDANTIC_V2
16
+ from prefect._internal.schemas.validators import (
17
+ validate_picklelib,
18
+ validate_picklelib_and_modules,
19
+ validate_picklelib_version,
20
+ )
17
21
 
18
22
  if HAS_PYDANTIC_V2:
19
23
  import pydantic.v1 as pydantic
@@ -55,69 +59,15 @@ class PickleSerializer(Serializer):
55
59
 
56
60
  @pydantic.validator("picklelib")
57
61
  def check_picklelib(cls, value):
58
- """
59
- Check that the given pickle library is importable and has dumps/loads methods.
60
- """
61
- try:
62
- pickler = from_qualified_name(value)
63
- except (ImportError, AttributeError) as exc:
64
- raise ValueError(
65
- f"Failed to import requested pickle library: {value!r}."
66
- ) from exc
67
-
68
- if not callable(getattr(pickler, "dumps", None)):
69
- raise ValueError(
70
- f"Pickle library at {value!r} does not have a 'dumps' method."
71
- )
72
-
73
- if not callable(getattr(pickler, "loads", None)):
74
- raise ValueError(
75
- f"Pickle library at {value!r} does not have a 'loads' method."
76
- )
77
-
78
- return value
62
+ return validate_picklelib(value)
79
63
 
80
64
  @pydantic.root_validator
81
65
  def check_picklelib_version(cls, values):
82
- """
83
- Infers a default value for `picklelib_version` if null or ensures it matches
84
- the version retrieved from the `pickelib`.
85
- """
86
- picklelib = values.get("picklelib")
87
- picklelib_version = values.get("picklelib_version")
88
-
89
- if not picklelib:
90
- raise ValueError("Unable to check version of unrecognized picklelib module")
91
-
92
- pickler = from_qualified_name(picklelib)
93
- pickler_version = getattr(pickler, "__version__", None)
94
-
95
- if not picklelib_version:
96
- values["picklelib_version"] = pickler_version
97
- elif picklelib_version != pickler_version:
98
- warnings.warn(
99
- (
100
- f"Mismatched {picklelib!r} versions. Found {pickler_version} in the"
101
- f" environment but {picklelib_version} was requested. This may"
102
- " cause the serializer to fail."
103
- ),
104
- RuntimeWarning,
105
- stacklevel=3,
106
- )
107
-
108
- return values
66
+ return validate_picklelib_version(values)
109
67
 
110
68
  @pydantic.root_validator
111
69
  def check_picklelib_and_modules(cls, values):
112
- """
113
- Prevents modules from being specified if picklelib is not cloudpickle
114
- """
115
- if values.get("picklelib") != "cloudpickle" and values.get("pickle_modules"):
116
- raise ValueError(
117
- "`pickle_modules` cannot be used without 'cloudpickle'. Got"
118
- f" {values.get('picklelib')!r}."
119
- )
120
- return values
70
+ return validate_picklelib_and_modules(values)
121
71
 
122
72
  def dumps(self, obj: Any) -> bytes:
123
73
  pickler = from_qualified_name(self.picklelib)
prefect/engine.py CHANGED
@@ -209,7 +209,6 @@ from prefect.utilities.callables import (
209
209
  parameters_to_args_kwargs,
210
210
  )
211
211
  from prefect.utilities.collections import StopVisiting, isiterable, visit_collection
212
- from prefect.utilities.pydantic import PartialModel
213
212
  from prefect.utilities.text import truncated_to
214
213
 
215
214
  R = TypeVar("R")
@@ -509,7 +508,7 @@ async def begin_flow_run(
509
508
  logger = flow_run_logger(flow_run, flow)
510
509
 
511
510
  log_prints = should_log_prints(flow)
512
- flow_run_context = PartialModel(FlowRunContext, log_prints=log_prints)
511
+ flow_run_context = FlowRunContext.construct(log_prints=log_prints)
513
512
 
514
513
  async with AsyncExitStack() as stack:
515
514
  await stack.enter_async_context(
@@ -722,8 +721,7 @@ async def create_and_begin_subflow_run(
722
721
  # interruptible as well
723
722
  interruptible=parent_flow_run_context.timeout_scope is not None,
724
723
  client=client,
725
- partial_flow_run_context=PartialModel(
726
- FlowRunContext,
724
+ partial_flow_run_context=FlowRunContext.construct(
727
725
  sync_portal=parent_flow_run_context.sync_portal,
728
726
  task_runner=task_runner,
729
727
  background_tasks=parent_flow_run_context.background_tasks,
@@ -758,7 +756,7 @@ async def orchestrate_flow_run(
758
756
  wait_for: Optional[Iterable[PrefectFuture]],
759
757
  interruptible: bool,
760
758
  client: PrefectClient,
761
- partial_flow_run_context: PartialModel[FlowRunContext],
759
+ partial_flow_run_context: FlowRunContext,
762
760
  user_thread: threading.Thread,
763
761
  ) -> State:
764
762
  """
@@ -809,11 +807,16 @@ async def orchestrate_flow_run(
809
807
  # Update the flow run to the latest data
810
808
  flow_run = await client.read_flow_run(flow_run.id)
811
809
  try:
812
- with partial_flow_run_context.finalize(
813
- flow=flow,
814
- flow_run=flow_run,
815
- client=client,
816
- parameters=parameters,
810
+ with FlowRunContext(
811
+ **{
812
+ **partial_flow_run_context.dict(),
813
+ **{
814
+ "flow_run": flow_run,
815
+ "flow": flow,
816
+ "client": client,
817
+ "parameters": parameters,
818
+ },
819
+ }
817
820
  ) as flow_run_context:
818
821
  # update flow run name
819
822
  if not run_name_set and flow.flow_run_name:
@@ -857,7 +860,9 @@ async def orchestrate_flow_run(
857
860
  )
858
861
  ):
859
862
  from_async.call_soon_in_waiting_thread(
860
- flow_call, thread=user_thread, timeout=flow.timeout_seconds
863
+ flow_call,
864
+ thread=user_thread,
865
+ timeout=flow.timeout_seconds,
861
866
  )
862
867
  else:
863
868
  from_async.call_soon_in_new_thread(
@@ -1395,12 +1400,6 @@ def enter_task_run_engine(
1395
1400
  client=get_client(),
1396
1401
  )
1397
1402
 
1398
- if TaskRunContext.get():
1399
- raise RuntimeError(
1400
- "Tasks cannot be run from within tasks. Did you mean to call this "
1401
- "task in a flow?"
1402
- )
1403
-
1404
1403
  if flow_run_context.timeout_scope and flow_run_context.timeout_scope.cancel_called:
1405
1404
  raise TimeoutError("Flow run timed out")
1406
1405
 
@@ -1941,8 +1940,7 @@ async def orchestrate_task_run(
1941
1940
  flow_run = await client.read_flow_run(task_run.flow_run_id)
1942
1941
  logger = task_run_logger(task_run, task=task, flow_run=flow_run)
1943
1942
 
1944
- partial_task_run_context = PartialModel(
1945
- TaskRunContext,
1943
+ partial_task_run_context = TaskRunContext.construct(
1946
1944
  task_run=task_run,
1947
1945
  task=task,
1948
1946
  client=client,
@@ -1982,17 +1980,20 @@ async def orchestrate_task_run(
1982
1980
 
1983
1981
  # Generate the cache key to attach to proposed states
1984
1982
  # The cache key uses a TaskRunContext that does not include a `timeout_context``
1983
+
1984
+ task_run_context = TaskRunContext(
1985
+ **partial_task_run_context.dict(), parameters=resolved_parameters
1986
+ )
1987
+
1985
1988
  cache_key = (
1986
1989
  task.cache_key_fn(
1987
- partial_task_run_context.finalize(parameters=resolved_parameters),
1990
+ task_run_context,
1988
1991
  resolved_parameters,
1989
1992
  )
1990
1993
  if task.cache_key_fn
1991
1994
  else None
1992
1995
  )
1993
1996
 
1994
- task_run_context = partial_task_run_context.finalize(parameters=resolved_parameters)
1995
-
1996
1997
  # Ignore the cached results for a cache key, default = false
1997
1998
  # Setting on task level overrules the Prefect setting (env var)
1998
1999
  refresh_cache = (
prefect/events/actions.py CHANGED
@@ -19,10 +19,17 @@ class Action(PrefectBaseModel):
19
19
  type: str
20
20
 
21
21
 
22
+ class DoNothing(Action):
23
+ """Do nothing, which may be helpful for testing automations"""
24
+
25
+ type: Literal["do-nothing"] = "do-nothing"
26
+
27
+
22
28
  class RunDeployment(Action):
23
29
  """Run the given deployment with the given parameters"""
24
30
 
25
31
  type: Literal["run-deployment"] = "run-deployment"
32
+
26
33
  source: Literal["selected"] = "selected"
27
34
  parameters: Optional[Dict[str, Any]] = Field(
28
35
  None,
@@ -32,12 +39,20 @@ class RunDeployment(Action):
32
39
  ),
33
40
  )
34
41
  deployment_id: UUID = Field(..., description="The identifier of the deployment")
42
+ job_variables: Optional[Dict[str, Any]] = Field(
43
+ None,
44
+ description=(
45
+ "Job variables to pass to the run, or None to use the "
46
+ "deployment's default job variables"
47
+ ),
48
+ )
35
49
 
36
50
 
37
51
  class SendNotification(Action):
38
52
  """Send a notification with the given parameters"""
39
53
 
40
54
  type: Literal["send-notification"] = "send-notification"
55
+
41
56
  block_document_id: UUID = Field(
42
57
  ..., description="The identifier of the notification block"
43
58
  )
@@ -45,4 +60,4 @@ class SendNotification(Action):
45
60
  subject: Optional[str] = Field(None, description="Notification subject")
46
61
 
47
62
 
48
- ActionTypes = Union[RunDeployment, SendNotification]
63
+ ActionTypes = Union[DoNothing, RunDeployment, SendNotification]
prefect/events/related.py CHANGED
@@ -31,8 +31,8 @@ RESOURCE_CACHE: RelatedResourceCache = {}
31
31
 
32
32
  def tags_as_related_resources(tags: Iterable[str]) -> List[RelatedResource]:
33
33
  return [
34
- RelatedResource(
35
- __root__={
34
+ RelatedResource.parse_obj(
35
+ {
36
36
  "prefect.resource.id": f"prefect.tag.{tag}",
37
37
  "prefect.resource.role": "tag",
38
38
  }
@@ -44,8 +44,8 @@ def tags_as_related_resources(tags: Iterable[str]) -> List[RelatedResource]:
44
44
  def object_as_related_resource(kind: str, role: str, object: Any) -> RelatedResource:
45
45
  resource_id = f"prefect.{kind}.{object.id}"
46
46
 
47
- return RelatedResource(
48
- __root__={
47
+ return RelatedResource.parse_obj(
48
+ {
49
49
  "prefect.resource.id": resource_id,
50
50
  "prefect.resource.role": role,
51
51
  "prefect.resource.name": object.name,
@@ -54,11 +54,11 @@ class ResourceTrigger(Trigger, abc.ABC):
54
54
  type: str
55
55
 
56
56
  match: ResourceSpecification = Field(
57
- default_factory=lambda: ResourceSpecification(__root__={}),
57
+ default_factory=lambda: ResourceSpecification.parse_obj({}),
58
58
  description="Labels for resources which this trigger will match.",
59
59
  )
60
60
  match_related: ResourceSpecification = Field(
61
- default_factory=lambda: ResourceSpecification(__root__={}),
61
+ default_factory=lambda: ResourceSpecification.parse_obj({}),
62
62
  description="Labels for related resources which this trigger will match.",
63
63
  )
64
64
 
@@ -294,6 +294,17 @@ class Automation(PrefectBaseModel, extra="ignore"):
294
294
  ...,
295
295
  description="The actions to perform when this Automation triggers",
296
296
  )
297
+
298
+ actions_on_trigger: List[ActionTypes] = Field(
299
+ default_factory=list,
300
+ description="The actions to perform when an Automation goes into a triggered state",
301
+ )
302
+
303
+ actions_on_resolve: List[ActionTypes] = Field(
304
+ default_factory=list,
305
+ description="The actions to perform when an Automation goes into a resolving state",
306
+ )
307
+
297
308
  owner_resource: Optional[str] = Field(
298
309
  default=None, description="The owning resource of this automation"
299
310
  )