prefect-client 2.16.5__py3-none-any.whl → 2.16.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (49) hide show
  1. prefect/_internal/pydantic/__init__.py +21 -1
  2. prefect/_internal/pydantic/_base_model.py +16 -0
  3. prefect/_internal/pydantic/_compat.py +352 -68
  4. prefect/_internal/pydantic/_flags.py +15 -0
  5. prefect/_internal/pydantic/v1_schema.py +48 -0
  6. prefect/_internal/pydantic/v2_schema.py +6 -2
  7. prefect/_internal/schemas/validators.py +582 -9
  8. prefect/artifacts.py +179 -70
  9. prefect/client/cloud.py +4 -1
  10. prefect/client/orchestration.py +1 -1
  11. prefect/client/schemas/actions.py +2 -2
  12. prefect/client/schemas/objects.py +13 -24
  13. prefect/client/schemas/schedules.py +18 -80
  14. prefect/deployments/deployments.py +22 -86
  15. prefect/deployments/runner.py +8 -11
  16. prefect/events/__init__.py +40 -1
  17. prefect/events/clients.py +17 -20
  18. prefect/events/filters.py +5 -6
  19. prefect/events/related.py +1 -1
  20. prefect/events/schemas/__init__.py +5 -0
  21. prefect/events/schemas/automations.py +303 -0
  22. prefect/events/{schemas.py → schemas/deployment_triggers.py} +146 -270
  23. prefect/events/schemas/events.py +285 -0
  24. prefect/events/schemas/labelling.py +106 -0
  25. prefect/events/utilities.py +2 -2
  26. prefect/events/worker.py +1 -1
  27. prefect/filesystems.py +8 -37
  28. prefect/flows.py +4 -4
  29. prefect/infrastructure/kubernetes.py +12 -56
  30. prefect/infrastructure/provisioners/__init__.py +1 -0
  31. prefect/pydantic/__init__.py +4 -0
  32. prefect/pydantic/main.py +15 -0
  33. prefect/runner/runner.py +2 -2
  34. prefect/runner/server.py +1 -1
  35. prefect/serializers.py +13 -61
  36. prefect/settings.py +35 -13
  37. prefect/task_server.py +21 -7
  38. prefect/utilities/asyncutils.py +1 -1
  39. prefect/utilities/callables.py +2 -2
  40. prefect/utilities/context.py +33 -1
  41. prefect/utilities/schema_tools/hydration.py +14 -6
  42. prefect/workers/base.py +1 -2
  43. prefect/workers/block.py +3 -7
  44. {prefect_client-2.16.5.dist-info → prefect_client-2.16.7.dist-info}/METADATA +2 -2
  45. {prefect_client-2.16.5.dist-info → prefect_client-2.16.7.dist-info}/RECORD +48 -40
  46. prefect/utilities/validation.py +0 -63
  47. {prefect_client-2.16.5.dist-info → prefect_client-2.16.7.dist-info}/LICENSE +0 -0
  48. {prefect_client-2.16.5.dist-info → prefect_client-2.16.7.dist-info}/WHEEL +0 -0
  49. {prefect_client-2.16.5.dist-info → prefect_client-2.16.7.dist-info}/top_level.txt +0 -0
@@ -21,6 +21,14 @@ from prefect._internal.compatibility.deprecated import (
21
21
  deprecated_class,
22
22
  )
23
23
  from prefect._internal.pydantic import HAS_PYDANTIC_V2
24
+ from prefect._internal.schemas.validators import (
25
+ handle_openapi_schema,
26
+ infrastructure_must_have_capabilities,
27
+ reconcile_schedules,
28
+ storage_must_have_capabilities,
29
+ validate_automation_names,
30
+ validate_deprecated_schedule_fields,
31
+ )
24
32
  from prefect.client.schemas.actions import DeploymentScheduleCreate
25
33
 
26
34
  if HAS_PYDANTIC_V2:
@@ -40,11 +48,9 @@ from prefect.client.utilities import inject_client
40
48
  from prefect.context import FlowRunContext, PrefectObjectRegistry, TaskRunContext
41
49
  from prefect.deployments.schedules import (
42
50
  FlexibleScheduleList,
43
- create_minimal_deployment_schedule,
44
- normalize_to_minimal_deployment_schedules,
45
51
  )
46
52
  from prefect.deployments.steps.core import run_steps
47
- from prefect.events.schemas import DeploymentTrigger
53
+ from prefect.events import DeploymentTriggerTypes
48
54
  from prefect.exceptions import (
49
55
  BlockMissingCapabilities,
50
56
  ObjectAlreadyExists,
@@ -56,7 +62,6 @@ from prefect.infrastructure import Infrastructure, Process
56
62
  from prefect.logging.loggers import flow_run_logger, get_logger
57
63
  from prefect.states import Scheduled
58
64
  from prefect.tasks import Task
59
- from prefect.utilities.annotations import NotSet
60
65
  from prefect.utilities.asyncutils import run_sync_in_worker_thread, sync_compatible
61
66
  from prefect.utilities.callables import ParameterSchema, parameter_schema
62
67
  from prefect.utilities.filesystem import relative_path_to_current_platform, tmpchdir
@@ -579,7 +584,7 @@ class Deployment(BaseModel):
579
584
  description="The parameter schema of the flow, including defaults.",
580
585
  )
581
586
  timestamp: datetime = Field(default_factory=partial(pendulum.now, "UTC"))
582
- triggers: List[DeploymentTrigger] = Field(
587
+ triggers: List[DeploymentTriggerTypes] = Field(
583
588
  default_factory=list,
584
589
  description="The triggers that should cause this deployment to run.",
585
590
  )
@@ -593,97 +598,28 @@ class Deployment(BaseModel):
593
598
  )
594
599
 
595
600
  @validator("infrastructure", pre=True)
596
- def infrastructure_must_have_capabilities(cls, value):
597
- if isinstance(value, dict):
598
- if "_block_type_slug" in value:
599
- # Replace private attribute with public for dispatch
600
- value["block_type_slug"] = value.pop("_block_type_slug")
601
- block = Block(**value)
602
- elif value is None:
603
- return value
604
- else:
605
- block = value
606
-
607
- if "run-infrastructure" not in block.get_block_capabilities():
608
- raise ValueError(
609
- "Infrastructure block must have 'run-infrastructure' capabilities."
610
- )
611
- return block
601
+ def validate_infrastructure_capabilities(cls, value):
602
+ return infrastructure_must_have_capabilities(value)
612
603
 
613
604
  @validator("storage", pre=True)
614
- def storage_must_have_capabilities(cls, value):
615
- if isinstance(value, dict):
616
- block_type = Block.get_block_class_from_key(value.pop("_block_type_slug"))
617
- block = block_type(**value)
618
- elif value is None:
619
- return value
620
- else:
621
- block = value
622
-
623
- capabilities = block.get_block_capabilities()
624
- if "get-directory" not in capabilities:
625
- raise ValueError(
626
- "Remote Storage block must have 'get-directory' capabilities."
627
- )
628
- return block
605
+ def validate_storage(cls, value):
606
+ return storage_must_have_capabilities(value)
629
607
 
630
608
  @validator("parameter_openapi_schema", pre=True)
631
- def handle_openapi_schema(cls, value):
632
- """
633
- This method ensures setting a value of `None` is handled gracefully.
634
- """
635
- if value is None:
636
- return ParameterSchema()
637
- return value
609
+ def validate_parameter_openapi_schema(cls, value):
610
+ return handle_openapi_schema(value)
638
611
 
639
612
  @validator("triggers")
640
- def validate_automation_names(cls, field_value, values, field, config):
641
- """Ensure that each trigger has a name for its automation if none is provided."""
642
- for i, trigger in enumerate(field_value, start=1):
643
- if trigger.name is None:
644
- trigger.name = f"{values['name']}__automation_{i}"
645
-
646
- return field_value
613
+ def validate_triggers(cls, field_value, values):
614
+ return validate_automation_names(field_value, values)
647
615
 
648
616
  @root_validator(pre=True)
649
- def validate_deprecated_schedule_fields(cls, values):
650
- if values.get("schedule") and not values.get("schedules"):
651
- logger.warning(
652
- "The field 'schedule' in 'Deployment' has been deprecated. It will not be "
653
- "available after Sep 2024. Define schedules in the `schedules` list instead."
654
- )
655
- elif values.get("is_schedule_active") and not values.get("schedules"):
656
- logger.warning(
657
- "The field 'is_schedule_active' in 'Deployment' has been deprecated. It will "
658
- "not be available after Sep 2024. Use the `active` flag within a schedule in "
659
- "the `schedules` list instead and the `pause` flag in 'Deployment' to pause "
660
- "all schedules."
661
- )
662
- return values
617
+ def validate_schedule(cls, values):
618
+ return validate_deprecated_schedule_fields(values, logger)
663
619
 
664
620
  @root_validator(pre=True)
665
- def reconcile_schedules(cls, values):
666
- schedule = values.get("schedule", NotSet)
667
- schedules = values.get("schedules", NotSet)
668
-
669
- if schedules is not NotSet:
670
- values["schedules"] = normalize_to_minimal_deployment_schedules(schedules)
671
- elif schedule is not NotSet:
672
- values["schedule"] = None
673
-
674
- if schedule is None:
675
- values["schedules"] = []
676
- else:
677
- values["schedules"] = [
678
- create_minimal_deployment_schedule(
679
- schedule=schedule, active=values.get("is_schedule_active")
680
- )
681
- ]
682
-
683
- for schedule in values.get("schedules", []):
684
- cls._validate_schedule(schedule.schedule)
685
-
686
- return values
621
+ def validate_backwards_compatibility_for_schedule(cls, values):
622
+ return reconcile_schedules(cls, values)
687
623
 
688
624
  @classmethod
689
625
  @sync_compatible
@@ -44,6 +44,7 @@ from rich.table import Table
44
44
 
45
45
  from prefect._internal.concurrency.api import create_call, from_async
46
46
  from prefect._internal.pydantic import HAS_PYDANTIC_V2
47
+ from prefect._internal.schemas.validators import validate_automation_names
47
48
  from prefect.runner.storage import RunnerStorage
48
49
  from prefect.settings import (
49
50
  PREFECT_DEFAULT_DOCKER_BUILD_NAMESPACE,
@@ -68,7 +69,7 @@ from prefect.deployments.schedules import (
68
69
  create_minimal_deployment_schedule,
69
70
  normalize_to_minimal_deployment_schedules,
70
71
  )
71
- from prefect.events.schemas import DeploymentTrigger
72
+ from prefect.events import DeploymentTriggerTypes
72
73
  from prefect.exceptions import (
73
74
  ObjectNotFound,
74
75
  PrefectHTTPStatusError,
@@ -175,7 +176,7 @@ class RunnerDeployment(BaseModel):
175
176
  "The path to the entrypoint for the workflow, relative to the `path`."
176
177
  ),
177
178
  )
178
- triggers: List[DeploymentTrigger] = Field(
179
+ triggers: List[DeploymentTriggerTypes] = Field(
179
180
  default_factory=list,
180
181
  description="The triggers that should cause this deployment to run.",
181
182
  )
@@ -229,13 +230,9 @@ class RunnerDeployment(BaseModel):
229
230
  return self._entrypoint_type
230
231
 
231
232
  @validator("triggers", allow_reuse=True)
232
- def validate_automation_names(cls, field_value, values, field, config):
233
+ def validate_automation_names(cls, field_value, values):
233
234
  """Ensure that each trigger has a name for its automation if none is provided."""
234
- for i, trigger in enumerate(field_value, start=1):
235
- if trigger.name is None:
236
- trigger.name = f"{values['name']}__automation_{i}"
237
-
238
- return field_value
235
+ return validate_automation_names(field_value, values)
239
236
 
240
237
  @root_validator(pre=True)
241
238
  def reconcile_paused(cls, values):
@@ -467,7 +464,7 @@ class RunnerDeployment(BaseModel):
467
464
  schedule: Optional[SCHEDULE_TYPES] = None,
468
465
  is_schedule_active: Optional[bool] = None,
469
466
  parameters: Optional[dict] = None,
470
- triggers: Optional[List[DeploymentTrigger]] = None,
467
+ triggers: Optional[List[DeploymentTriggerTypes]] = None,
471
468
  description: Optional[str] = None,
472
469
  tags: Optional[List[str]] = None,
473
470
  version: Optional[str] = None,
@@ -603,7 +600,7 @@ class RunnerDeployment(BaseModel):
603
600
  schedule: Optional[SCHEDULE_TYPES] = None,
604
601
  is_schedule_active: Optional[bool] = None,
605
602
  parameters: Optional[dict] = None,
606
- triggers: Optional[List[DeploymentTrigger]] = None,
603
+ triggers: Optional[List[DeploymentTriggerTypes]] = None,
607
604
  description: Optional[str] = None,
608
605
  tags: Optional[List[str]] = None,
609
606
  version: Optional[str] = None,
@@ -701,7 +698,7 @@ class RunnerDeployment(BaseModel):
701
698
  schedule: Optional[SCHEDULE_TYPES] = None,
702
699
  is_schedule_active: Optional[bool] = None,
703
700
  parameters: Optional[dict] = None,
704
- triggers: Optional[List[DeploymentTrigger]] = None,
701
+ triggers: Optional[List[DeploymentTriggerTypes]] = None,
705
702
  description: Optional[str] = None,
706
703
  tags: Optional[List[str]] = None,
707
704
  version: Optional[str] = None,
@@ -1,9 +1,48 @@
1
- from .schemas import Event, RelatedResource, Resource
1
+ from .schemas.events import Event, ReceivedEvent
2
+ from .schemas.events import Resource, RelatedResource, ResourceSpecification
3
+ from .schemas.automations import (
4
+ Automation,
5
+ Posture,
6
+ Trigger,
7
+ ResourceTrigger,
8
+ EventTrigger,
9
+ MetricTrigger,
10
+ MetricTriggerOperator,
11
+ MetricTriggerQuery,
12
+ CompositeTrigger,
13
+ CompoundTrigger,
14
+ SequenceTrigger,
15
+ )
16
+ from .schemas.deployment_triggers import (
17
+ DeploymentTriggerTypes,
18
+ DeploymentEventTrigger,
19
+ DeploymentMetricTrigger,
20
+ DeploymentCompoundTrigger,
21
+ DeploymentSequenceTrigger,
22
+ )
2
23
  from .utilities import emit_event
3
24
 
4
25
  __all__ = [
5
26
  "Event",
27
+ "ReceivedEvent",
6
28
  "Resource",
7
29
  "RelatedResource",
30
+ "ResourceSpecification",
31
+ "Automation",
32
+ "Posture",
33
+ "Trigger",
34
+ "ResourceTrigger",
35
+ "EventTrigger",
36
+ "MetricTrigger",
37
+ "MetricTriggerOperator",
38
+ "MetricTriggerQuery",
39
+ "CompositeTrigger",
40
+ "CompoundTrigger",
41
+ "SequenceTrigger",
42
+ "DeploymentTriggerTypes",
43
+ "DeploymentEventTrigger",
44
+ "DeploymentMetricTrigger",
45
+ "DeploymentCompoundTrigger",
46
+ "DeploymentSequenceTrigger",
8
47
  "emit_event",
9
48
  ]
prefect/events/clients.py CHANGED
@@ -16,12 +16,7 @@ from uuid import UUID
16
16
 
17
17
  import orjson
18
18
  import pendulum
19
-
20
- try:
21
- from cachetools import TTLCache
22
- except ImportError:
23
- pass
24
- from prefect._vendor.starlette.status import WS_1008_POLICY_VIOLATION
19
+ from cachetools import TTLCache
25
20
  from websockets.client import WebSocketClientProtocol, connect
26
21
  from websockets.exceptions import (
27
22
  ConnectionClosed,
@@ -304,6 +299,8 @@ class PrefectCloudEventSubscriber:
304
299
  )
305
300
  self._websocket = None
306
301
  self._reconnection_attempts = reconnection_attempts
302
+ if self._reconnection_attempts < 0:
303
+ raise ValueError("reconnection_attempts must be a non-negative integer")
307
304
 
308
305
  async def __aenter__(self) -> "PrefectCloudEventSubscriber":
309
306
  # Don't handle any errors in the initial connection, because these are most
@@ -333,19 +330,18 @@ class PrefectCloudEventSubscriber:
333
330
  message: Dict[str, Any] = orjson.loads(await self._websocket.recv())
334
331
  logger.debug(" auth result %s", message)
335
332
  assert message["type"] == "auth_success", message.get("reason", "")
336
- except (AssertionError, ConnectionClosedError) as e:
337
- if isinstance(e, AssertionError) or e.code == WS_1008_POLICY_VIOLATION:
338
- if isinstance(e, AssertionError):
339
- reason = e.args[0]
340
- elif isinstance(e, ConnectionClosedError):
341
- reason = e.reason
342
-
343
- raise Exception(
344
- "Unable to authenticate to the event stream. Please ensure the "
345
- "provided api_key you are using is valid for this environment. "
346
- f"Reason: {reason}"
347
- ) from e
348
- raise
333
+ except AssertionError as e:
334
+ raise Exception(
335
+ "Unable to authenticate to the event stream. Please ensure the "
336
+ "provided api_key you are using is valid for this environment. "
337
+ f"Reason: {e.args[0]}"
338
+ )
339
+ except ConnectionClosedError as e:
340
+ raise Exception(
341
+ "Unable to authenticate to the event stream. Please ensure the "
342
+ "provided api_key you are using is valid for this environment. "
343
+ f"Reason: {e.reason}"
344
+ ) from e
349
345
 
350
346
  from prefect.events.filters import EventOccurredFilter
351
347
 
@@ -374,7 +370,8 @@ class PrefectCloudEventSubscriber:
374
370
  return self
375
371
 
376
372
  async def __anext__(self) -> Event:
377
- for i in range(self._reconnection_attempts + 1):
373
+ assert self._reconnection_attempts >= 0
374
+ for i in range(self._reconnection_attempts + 1): # pragma: no branch
378
375
  try:
379
376
  # If we're here and the websocket is None, then we've had a failure in a
380
377
  # previous reconnection attempt.
prefect/events/filters.py CHANGED
@@ -4,8 +4,10 @@ from uuid import UUID
4
4
  import pendulum
5
5
 
6
6
  from prefect._internal.pydantic import HAS_PYDANTIC_V2
7
- from prefect.events.schemas import Event, Resource, ResourceSpecification
8
- from prefect.server.utilities.schemas import DateTimeTZ, PrefectBaseModel
7
+ from prefect._internal.schemas.bases import PrefectBaseModel
8
+ from prefect._internal.schemas.fields import DateTimeTZ
9
+
10
+ from .schemas.events import Event, Resource, ResourceSpecification
9
11
 
10
12
  if HAS_PYDANTIC_V2:
11
13
  from pydantic.v1 import Field
@@ -13,12 +15,9 @@ else:
13
15
  from pydantic import Field
14
16
 
15
17
 
16
- class EventDataFilter(PrefectBaseModel):
18
+ class EventDataFilter(PrefectBaseModel, extra="forbid"):
17
19
  """A base class for filtering event data."""
18
20
 
19
- class Config:
20
- extra = "forbid"
21
-
22
21
  def get_filters(self) -> List["EventDataFilter"]:
23
22
  return [
24
23
  filter
prefect/events/related.py CHANGED
@@ -17,7 +17,7 @@ from uuid import UUID
17
17
  import pendulum
18
18
  from pendulum.datetime import DateTime
19
19
 
20
- from .schemas import RelatedResource
20
+ from .schemas.events import RelatedResource
21
21
 
22
22
  if TYPE_CHECKING:
23
23
  from prefect._internal.schemas.bases import ObjectBaseModel
@@ -0,0 +1,5 @@
1
+ # TODO: these are just for backward compatibility, can can be removed in the future
2
+
3
+ from .deployment_triggers import DeploymentTrigger
4
+
5
+ __all__ = ["DeploymentTrigger"]
@@ -0,0 +1,303 @@
1
+ import abc
2
+ from datetime import timedelta
3
+ from enum import Enum
4
+ from typing import (
5
+ Any,
6
+ Dict,
7
+ List,
8
+ Literal,
9
+ Optional,
10
+ Set,
11
+ Union,
12
+ )
13
+ from uuid import UUID
14
+
15
+ from typing_extensions import TypeAlias
16
+
17
+ from prefect._internal.pydantic import HAS_PYDANTIC_V2
18
+ from prefect._internal.schemas.validators import validate_trigger_within
19
+
20
+ if HAS_PYDANTIC_V2:
21
+ from pydantic.v1 import Field, root_validator, validator
22
+ from pydantic.v1.fields import ModelField
23
+ else:
24
+ from pydantic import Field, root_validator, validator
25
+ from pydantic.fields import ModelField
26
+
27
+ from prefect._internal.schemas.bases import PrefectBaseModel
28
+ from prefect.events.actions import ActionTypes
29
+ from prefect.utilities.collections import AutoEnum
30
+
31
+ from .events import ResourceSpecification
32
+
33
+
34
+ class Posture(AutoEnum):
35
+ Reactive = "Reactive"
36
+ Proactive = "Proactive"
37
+ Metric = "Metric"
38
+
39
+
40
+ class Trigger(PrefectBaseModel, abc.ABC, extra="ignore"):
41
+ """
42
+ Base class describing a set of criteria that must be satisfied in order to trigger
43
+ an automation.
44
+ """
45
+
46
+ type: str
47
+
48
+
49
+ class ResourceTrigger(Trigger, abc.ABC):
50
+ """
51
+ Base class for triggers that may filter by the labels of resources.
52
+ """
53
+
54
+ type: str
55
+
56
+ match: ResourceSpecification = Field(
57
+ default_factory=lambda: ResourceSpecification(__root__={}),
58
+ description="Labels for resources which this trigger will match.",
59
+ )
60
+ match_related: ResourceSpecification = Field(
61
+ default_factory=lambda: ResourceSpecification(__root__={}),
62
+ description="Labels for related resources which this trigger will match.",
63
+ )
64
+
65
+
66
+ class EventTrigger(ResourceTrigger):
67
+ """
68
+ A trigger that fires based on the presence or absence of events within a given
69
+ period of time.
70
+ """
71
+
72
+ type: Literal["event"] = "event"
73
+
74
+ after: Set[str] = Field(
75
+ default_factory=set,
76
+ description=(
77
+ "The event(s) which must first been seen to fire this trigger. If "
78
+ "empty, then fire this trigger immediately. Events may include "
79
+ "trailing wildcards, like `prefect.flow-run.*`"
80
+ ),
81
+ )
82
+ expect: Set[str] = Field(
83
+ default_factory=set,
84
+ description=(
85
+ "The event(s) this trigger is expecting to see. If empty, this "
86
+ "trigger will match any event. Events may include trailing wildcards, "
87
+ "like `prefect.flow-run.*`"
88
+ ),
89
+ )
90
+
91
+ for_each: Set[str] = Field(
92
+ default_factory=set,
93
+ description=(
94
+ "Evaluate the trigger separately for each distinct value of these labels "
95
+ "on the resource. By default, labels refer to the primary resource of the "
96
+ "triggering event. You may also refer to labels from related "
97
+ "resources by specifying `related:<role>:<label>`. This will use the "
98
+ "value of that label for the first related resource in that role. For "
99
+ 'example, `"for_each": ["related:flow:prefect.resource.id"]` would '
100
+ "evaluate the trigger for each flow."
101
+ ),
102
+ )
103
+ posture: Literal[Posture.Reactive, Posture.Proactive] = Field( # type: ignore[valid-type]
104
+ ...,
105
+ description=(
106
+ "The posture of this trigger, either Reactive or Proactive. Reactive "
107
+ "triggers respond to the _presence_ of the expected events, while "
108
+ "Proactive triggers respond to the _absence_ of those expected events."
109
+ ),
110
+ )
111
+ threshold: int = Field(
112
+ 1,
113
+ description=(
114
+ "The number of events required for this trigger to fire (for "
115
+ "Reactive triggers), or the number of events expected (for Proactive "
116
+ "triggers)"
117
+ ),
118
+ )
119
+ within: timedelta = Field(
120
+ timedelta(0),
121
+ minimum=0.0,
122
+ exclusiveMinimum=False,
123
+ description=(
124
+ "The time period over which the events must occur. For Reactive triggers, "
125
+ "this may be as low as 0 seconds, but must be at least 10 seconds for "
126
+ "Proactive triggers"
127
+ ),
128
+ )
129
+
130
+ @validator("within")
131
+ def enforce_minimum_within(
132
+ cls, value: timedelta, values, config, field: ModelField
133
+ ):
134
+ return validate_trigger_within(value, field)
135
+
136
+ @root_validator(skip_on_failure=True)
137
+ def enforce_minimum_within_for_proactive_triggers(cls, values: Dict[str, Any]):
138
+ posture: Optional[Posture] = values.get("posture")
139
+ within: Optional[timedelta] = values.get("within")
140
+
141
+ if posture == Posture.Proactive:
142
+ if not within or within == timedelta(0):
143
+ values["within"] = timedelta(seconds=10.0)
144
+ elif within < timedelta(seconds=10.0):
145
+ raise ValueError(
146
+ "The minimum within for Proactive triggers is 10 seconds"
147
+ )
148
+
149
+ return values
150
+
151
+
152
+ class MetricTriggerOperator(Enum):
153
+ LT = "<"
154
+ LTE = "<="
155
+ GT = ">"
156
+ GTE = ">="
157
+
158
+
159
+ class PrefectMetric(Enum):
160
+ lateness = "lateness"
161
+ duration = "duration"
162
+ successes = "successes"
163
+
164
+
165
+ class MetricTriggerQuery(PrefectBaseModel):
166
+ """Defines a subset of the Trigger subclass, which is specific
167
+ to Metric automations, that specify the query configurations
168
+ and breaching conditions for the Automation"""
169
+
170
+ name: PrefectMetric = Field(
171
+ ...,
172
+ description="The name of the metric to query.",
173
+ )
174
+ threshold: float = Field(
175
+ ...,
176
+ description=(
177
+ "The threshold value against which we'll compare " "the query result."
178
+ ),
179
+ )
180
+ operator: MetricTriggerOperator = Field(
181
+ ...,
182
+ description=(
183
+ "The comparative operator (LT / LTE / GT / GTE) used to compare "
184
+ "the query result against the threshold value."
185
+ ),
186
+ )
187
+ range: timedelta = Field(
188
+ timedelta(seconds=300), # defaults to 5 minutes
189
+ minimum=300.0,
190
+ exclusiveMinimum=False,
191
+ description=(
192
+ "The lookback duration (seconds) for a metric query. This duration is "
193
+ "used to determine the time range over which the query will be executed. "
194
+ "The minimum value is 300 seconds (5 minutes)."
195
+ ),
196
+ )
197
+ firing_for: timedelta = Field(
198
+ timedelta(seconds=300), # defaults to 5 minutes
199
+ minimum=300.0,
200
+ exclusiveMinimum=False,
201
+ description=(
202
+ "The duration (seconds) for which the metric query must breach "
203
+ "or resolve continuously before the state is updated and the "
204
+ "automation is triggered. "
205
+ "The minimum value is 300 seconds (5 minutes)."
206
+ ),
207
+ )
208
+
209
+
210
+ class MetricTrigger(ResourceTrigger):
211
+ """
212
+ A trigger that fires based on the results of a metric query.
213
+ """
214
+
215
+ type: Literal["metric"] = "metric"
216
+
217
+ posture: Literal[Posture.Metric] = Field( # type: ignore[valid-type]
218
+ Posture.Metric,
219
+ description="Periodically evaluate the configured metric query.",
220
+ )
221
+
222
+ metric: MetricTriggerQuery = Field(
223
+ ...,
224
+ description="The metric query to evaluate for this trigger. ",
225
+ )
226
+
227
+
228
+ class CompositeTrigger(Trigger, abc.ABC):
229
+ """
230
+ Requires some number of triggers to have fired within the given time period.
231
+ """
232
+
233
+ type: Literal["compound", "sequence"]
234
+ triggers: List["TriggerTypes"]
235
+ within: Optional[timedelta]
236
+
237
+
238
+ class CompoundTrigger(CompositeTrigger):
239
+ """A composite trigger that requires some number of triggers to have
240
+ fired within the given time period"""
241
+
242
+ type: Literal["compound"] = "compound"
243
+ require: Union[int, Literal["any", "all"]]
244
+
245
+ @root_validator
246
+ def validate_require(cls, values: Dict[str, Any]) -> Dict[str, Any]:
247
+ require = values.get("require")
248
+
249
+ if isinstance(require, int):
250
+ if require < 1:
251
+ raise ValueError("required must be at least 1")
252
+ if require > len(values["triggers"]):
253
+ raise ValueError(
254
+ "required must be less than or equal to the number of triggers"
255
+ )
256
+
257
+ return values
258
+
259
+
260
+ class SequenceTrigger(CompositeTrigger):
261
+ """A composite trigger that requires some number of triggers to have fired
262
+ within the given time period in a specific order"""
263
+
264
+ type: Literal["sequence"] = "sequence"
265
+
266
+
267
+ TriggerTypes: TypeAlias = Union[
268
+ EventTrigger, MetricTrigger, CompoundTrigger, SequenceTrigger
269
+ ]
270
+ """The union of all concrete trigger types that a user may actually create"""
271
+
272
+ CompoundTrigger.update_forward_refs()
273
+ SequenceTrigger.update_forward_refs()
274
+
275
+
276
+ class Automation(PrefectBaseModel, extra="ignore"):
277
+ """Defines an action a user wants to take when a certain number of events
278
+ do or don't happen to the matching resources"""
279
+
280
+ name: str = Field(..., description="The name of this automation")
281
+ description: str = Field("", description="A longer description of this automation")
282
+
283
+ enabled: bool = Field(True, description="Whether this automation will be evaluated")
284
+
285
+ trigger: TriggerTypes = Field(
286
+ ...,
287
+ description=(
288
+ "The criteria for which events this Automation covers and how it will "
289
+ "respond to the presence or absence of those events"
290
+ ),
291
+ )
292
+
293
+ actions: List[ActionTypes] = Field(
294
+ ...,
295
+ description="The actions to perform when this Automation triggers",
296
+ )
297
+ owner_resource: Optional[str] = Field(
298
+ default=None, description="The owning resource of this automation"
299
+ )
300
+
301
+
302
+ class ExistingAutomation(Automation):
303
+ id: UUID = Field(..., description="The ID of this automation")