prefect-client 2.16.5__py3-none-any.whl → 2.16.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (49) hide show
  1. prefect/_internal/pydantic/__init__.py +21 -1
  2. prefect/_internal/pydantic/_base_model.py +16 -0
  3. prefect/_internal/pydantic/_compat.py +352 -68
  4. prefect/_internal/pydantic/_flags.py +15 -0
  5. prefect/_internal/pydantic/v1_schema.py +48 -0
  6. prefect/_internal/pydantic/v2_schema.py +6 -2
  7. prefect/_internal/schemas/validators.py +582 -9
  8. prefect/artifacts.py +179 -70
  9. prefect/client/cloud.py +4 -1
  10. prefect/client/orchestration.py +1 -1
  11. prefect/client/schemas/actions.py +2 -2
  12. prefect/client/schemas/objects.py +13 -24
  13. prefect/client/schemas/schedules.py +18 -80
  14. prefect/deployments/deployments.py +22 -86
  15. prefect/deployments/runner.py +8 -11
  16. prefect/events/__init__.py +40 -1
  17. prefect/events/clients.py +17 -20
  18. prefect/events/filters.py +5 -6
  19. prefect/events/related.py +1 -1
  20. prefect/events/schemas/__init__.py +5 -0
  21. prefect/events/schemas/automations.py +303 -0
  22. prefect/events/{schemas.py → schemas/deployment_triggers.py} +146 -270
  23. prefect/events/schemas/events.py +285 -0
  24. prefect/events/schemas/labelling.py +106 -0
  25. prefect/events/utilities.py +2 -2
  26. prefect/events/worker.py +1 -1
  27. prefect/filesystems.py +8 -37
  28. prefect/flows.py +4 -4
  29. prefect/infrastructure/kubernetes.py +12 -56
  30. prefect/infrastructure/provisioners/__init__.py +1 -0
  31. prefect/pydantic/__init__.py +4 -0
  32. prefect/pydantic/main.py +15 -0
  33. prefect/runner/runner.py +2 -2
  34. prefect/runner/server.py +1 -1
  35. prefect/serializers.py +13 -61
  36. prefect/settings.py +35 -13
  37. prefect/task_server.py +21 -7
  38. prefect/utilities/asyncutils.py +1 -1
  39. prefect/utilities/callables.py +2 -2
  40. prefect/utilities/context.py +33 -1
  41. prefect/utilities/schema_tools/hydration.py +14 -6
  42. prefect/workers/base.py +1 -2
  43. prefect/workers/block.py +3 -7
  44. {prefect_client-2.16.5.dist-info → prefect_client-2.16.7.dist-info}/METADATA +2 -2
  45. {prefect_client-2.16.5.dist-info → prefect_client-2.16.7.dist-info}/RECORD +48 -40
  46. prefect/utilities/validation.py +0 -63
  47. {prefect_client-2.16.5.dist-info → prefect_client-2.16.7.dist-info}/LICENSE +0 -0
  48. {prefect_client-2.16.5.dist-info → prefect_client-2.16.7.dist-info}/WHEEL +0 -0
  49. {prefect_client-2.16.5.dist-info → prefect_client-2.16.7.dist-info}/top_level.txt +0 -0
prefect/artifacts.py CHANGED
@@ -2,21 +2,173 @@
2
2
  Interface for creating and reading artifacts.
3
3
  """
4
4
 
5
- import json
5
+ from __future__ import annotations
6
+
7
+ import json # noqa: I001
6
8
  import math
7
- from typing import Any, Dict, List, Optional, Union
9
+ from typing import Any, Dict, List, Optional, Tuple, Union
8
10
  from uuid import UUID
9
11
 
12
+ from typing_extensions import Self
13
+
10
14
  from prefect.client.orchestration import PrefectClient
11
- from prefect.client.schemas.actions import ArtifactCreate
12
- from prefect.client.utilities import inject_client
13
- from prefect.context import FlowRunContext, TaskRunContext
15
+ from prefect.client.schemas.actions import ArtifactCreate as ArtifactRequest
16
+ from prefect.client.schemas.filters import ArtifactFilter, ArtifactFilterKey
17
+ from prefect.client.schemas.objects import Artifact as ArtifactResponse
18
+ from prefect.client.schemas.sorting import ArtifactSort
19
+ from prefect.client.utilities import get_or_create_client, inject_client
14
20
  from prefect.utilities.asyncutils import sync_compatible
21
+ from prefect.utilities.context import get_task_and_flow_run_ids
22
+
23
+
24
+ class Artifact(ArtifactRequest):
25
+ """
26
+ An artifact is a piece of data that is created by a flow or task run.
27
+ https://docs.prefect.io/latest/concepts/artifacts/
28
+
29
+ Arguments:
30
+ type: A string identifying the type of artifact.
31
+ key: A user-provided string identifier.
32
+ The key must only contain lowercase letters, numbers, and dashes.
33
+ description: A user-specified description of the artifact.
34
+ data: A JSON payload that allows for a result to be retrieved.
35
+ """
36
+
37
+ @sync_compatible
38
+ async def create(
39
+ self: Self,
40
+ client: Optional[PrefectClient] = None,
41
+ ) -> ArtifactResponse:
42
+ """
43
+ A method to create an artifact.
44
+
45
+ Arguments:
46
+ client: The PrefectClient
47
+
48
+ Returns:
49
+ - The created artifact.
50
+ """
51
+ client, _ = get_or_create_client(client)
52
+ task_run_id, flow_run_id = get_task_and_flow_run_ids()
53
+ return await client.create_artifact(
54
+ artifact=ArtifactRequest(
55
+ type=self.type,
56
+ key=self.key,
57
+ description=self.description,
58
+ task_run_id=self.task_run_id or task_run_id,
59
+ flow_run_id=self.flow_run_id or flow_run_id,
60
+ data=await self.format(),
61
+ )
62
+ )
63
+
64
+ @classmethod
65
+ @sync_compatible
66
+ async def get(
67
+ cls, key: Optional[str] = None, client: Optional[PrefectClient] = None
68
+ ) -> Optional[ArtifactResponse]:
69
+ """
70
+ A method to get an artifact.
71
+
72
+ Arguments:
73
+ key (str, optional): The key of the artifact to get.
74
+ client (PrefectClient, optional): The PrefectClient
75
+
76
+ Returns:
77
+ (ArtifactResponse, optional): The artifact (if found).
78
+ """
79
+ client, _ = get_or_create_client(client)
80
+ return next(
81
+ iter(
82
+ await client.read_artifacts(
83
+ limit=1,
84
+ sort=ArtifactSort.UPDATED_DESC,
85
+ artifact_filter=ArtifactFilter(key=ArtifactFilterKey(any_=[key])),
86
+ )
87
+ ),
88
+ None,
89
+ )
90
+
91
+ @classmethod
92
+ @sync_compatible
93
+ async def get_or_create(
94
+ cls,
95
+ key: Optional[str] = None,
96
+ description: Optional[str] = None,
97
+ data: Optional[Union[Dict[str, Any], Any]] = None,
98
+ client: Optional[PrefectClient] = None,
99
+ **kwargs: Any,
100
+ ) -> Tuple[ArtifactResponse, bool]:
101
+ """
102
+ A method to get or create an artifact.
103
+
104
+ Arguments:
105
+ key (str, optional): The key of the artifact to get or create.
106
+ description (str, optional): The description of the artifact to create.
107
+ data (Union[Dict[str, Any], Any], optional): The data of the artifact to create.
108
+ client (PrefectClient, optional): The PrefectClient
109
+
110
+ Returns:
111
+ (ArtifactResponse): The artifact, either retrieved or created.
112
+ """
113
+ artifact = await cls.get(key, client)
114
+ if artifact:
115
+ return artifact, False
116
+ else:
117
+ return (
118
+ await cls(key=key, description=description, data=data, **kwargs).create(
119
+ client
120
+ ),
121
+ True,
122
+ )
123
+
124
+ async def format(self) -> Optional[Union[Dict[str, Any], Any]]:
125
+ return json.dumps(self.data)
126
+
127
+
128
+ class LinkArtifact(Artifact):
129
+ link: str
130
+ link_text: Optional[str] = None
131
+ type: Optional[str] = "markdown"
132
+
133
+ async def format(self) -> str:
134
+ return (
135
+ f"[{self.link_text}]({self.link})"
136
+ if self.link_text
137
+ else f"[{self.link}]({self.link})"
138
+ )
139
+
140
+
141
+ class MarkdownArtifact(Artifact):
142
+ markdown: str
143
+ type: Optional[str] = "markdown"
144
+
145
+ async def format(self) -> str:
146
+ return self.markdown
15
147
 
16
- INVALID_TABLE_TYPE_ERROR = (
17
- "`create_table_artifact` requires a `table` argument of type `dict[list]` or"
18
- " `list[dict]`."
19
- )
148
+
149
+ class TableArtifact(Artifact):
150
+ table: Union[Dict[str, List[Any]], List[Dict[str, Any]], List[List[Any]]]
151
+ type: Optional[str] = "table"
152
+
153
+ @classmethod
154
+ def _sanitize(
155
+ cls, item: Union[Dict[str, Any], List[Any], float]
156
+ ) -> Union[Dict[str, Any], List[Any], int, float, None]:
157
+ """
158
+ Sanitize NaN values in a given item.
159
+ The item can be a dict, list or float.
160
+ """
161
+ if isinstance(item, list):
162
+ return [cls._sanitize(sub_item) for sub_item in item]
163
+ elif isinstance(item, dict):
164
+ return {k: cls._sanitize(v) for k, v in item.items()}
165
+ elif isinstance(item, float) and math.isnan(item):
166
+ return None
167
+ else:
168
+ return item
169
+
170
+ async def format(self) -> str:
171
+ return json.dumps(self._sanitize(self.table))
20
172
 
21
173
 
22
174
  @inject_client
@@ -41,28 +193,15 @@ async def _create_artifact(
41
193
  Returns:
42
194
  - The table artifact ID.
43
195
  """
44
- artifact_args = {}
45
- task_run_ctx = TaskRunContext.get()
46
- flow_run_ctx = FlowRunContext.get()
47
-
48
- if task_run_ctx:
49
- artifact_args["task_run_id"] = task_run_ctx.task_run.id
50
- artifact_args["flow_run_id"] = task_run_ctx.task_run.flow_run_id
51
- elif flow_run_ctx:
52
- artifact_args["flow_run_id"] = flow_run_ctx.flow_run.id
53
-
54
- if key is not None:
55
- artifact_args["key"] = key
56
- if type is not None:
57
- artifact_args["type"] = type
58
- if description is not None:
59
- artifact_args["description"] = description
60
- if data is not None:
61
- artifact_args["data"] = data
62
196
 
63
- artifact = ArtifactCreate(**artifact_args)
197
+ artifact = await Artifact(
198
+ type=type,
199
+ key=key,
200
+ description=description,
201
+ data=data,
202
+ ).create(client)
64
203
 
65
- return await client.create_artifact(artifact=artifact)
204
+ return artifact.id
66
205
 
67
206
 
68
207
  @sync_compatible
@@ -71,6 +210,7 @@ async def create_link_artifact(
71
210
  link_text: Optional[str] = None,
72
211
  key: Optional[str] = None,
73
212
  description: Optional[str] = None,
213
+ client: Optional[PrefectClient] = None,
74
214
  ) -> UUID:
75
215
  """
76
216
  Create a link artifact.
@@ -87,13 +227,12 @@ async def create_link_artifact(
87
227
  Returns:
88
228
  The table artifact ID.
89
229
  """
90
- formatted_link = f"[{link_text}]({link})" if link_text else f"[{link}]({link})"
91
- artifact = await _create_artifact(
230
+ artifact = await LinkArtifact(
92
231
  key=key,
93
- type="markdown",
94
232
  description=description,
95
- data=formatted_link,
96
- )
233
+ link=link,
234
+ link_text=link_text,
235
+ ).create(client)
97
236
 
98
237
  return artifact.id
99
238
 
@@ -117,12 +256,11 @@ async def create_markdown_artifact(
117
256
  Returns:
118
257
  The table artifact ID.
119
258
  """
120
- artifact = await _create_artifact(
259
+ artifact = await MarkdownArtifact(
121
260
  key=key,
122
- type="markdown",
123
261
  description=description,
124
- data=markdown,
125
- )
262
+ markdown=markdown,
263
+ ).create()
126
264
 
127
265
  return artifact.id
128
266
 
@@ -147,39 +285,10 @@ async def create_table_artifact(
147
285
  The table artifact ID.
148
286
  """
149
287
 
150
- def _sanitize_nan_values(item):
151
- """
152
- Sanitize NaN values in a given item. The item can be a dict, list or float.
153
- """
154
-
155
- if isinstance(item, list):
156
- return [_sanitize_nan_values(sub_item) for sub_item in item]
157
-
158
- elif isinstance(item, dict):
159
- return {k: _sanitize_nan_values(v) for k, v in item.items()}
160
-
161
- elif isinstance(item, float) and math.isnan(item):
162
- return None
163
-
164
- else:
165
- return item
166
-
167
- sanitized_table = _sanitize_nan_values(table)
168
-
169
- if isinstance(table, dict) and all(isinstance(v, list) for v in table.values()):
170
- pass
171
- elif isinstance(table, list) and all(isinstance(v, (list, dict)) for v in table):
172
- pass
173
- else:
174
- raise TypeError(INVALID_TABLE_TYPE_ERROR)
175
-
176
- formatted_table = json.dumps(sanitized_table)
177
-
178
- artifact = await _create_artifact(
288
+ artifact = await TableArtifact(
179
289
  key=key,
180
- type="table",
181
290
  description=description,
182
- data=formatted_table,
183
- )
291
+ table=table,
292
+ ).create()
184
293
 
185
294
  return artifact.id
prefect/client/cloud.py CHANGED
@@ -85,7 +85,10 @@ class CloudClient:
85
85
  await self.read_workspaces()
86
86
 
87
87
  async def read_workspaces(self) -> List[Workspace]:
88
- return pydantic.parse_obj_as(List[Workspace], await self.get("/me/workspaces"))
88
+ workspaces = pydantic.parse_obj_as(
89
+ List[Workspace], await self.get("/me/workspaces")
90
+ )
91
+ return workspaces
89
92
 
90
93
  async def read_worker_metadata(self) -> Dict[str, Any]:
91
94
  configured_url = prefect.settings.PREFECT_API_URL.value()
@@ -128,7 +128,7 @@ from prefect.client.schemas.sorting import (
128
128
  TaskRunSort,
129
129
  )
130
130
  from prefect.deprecated.data_documents import DataDocument
131
- from prefect.events.schemas import Automation, ExistingAutomation
131
+ from prefect.events.schemas.automations import Automation, ExistingAutomation
132
132
  from prefect.logging import get_logger
133
133
  from prefect.settings import (
134
134
  PREFECT_API_DATABASE_CONNECTION_URL,
@@ -652,7 +652,7 @@ class GlobalConcurrencyLimitCreate(ActionBaseModel):
652
652
  limit: int = FieldFrom(objects.GlobalConcurrencyLimit)
653
653
  active: Optional[bool] = FieldFrom(objects.GlobalConcurrencyLimit)
654
654
  active_slots: Optional[int] = FieldFrom(objects.GlobalConcurrencyLimit)
655
- slot_decay_per_second: Optional[int] = FieldFrom(objects.GlobalConcurrencyLimit)
655
+ slot_decay_per_second: Optional[float] = FieldFrom(objects.GlobalConcurrencyLimit)
656
656
 
657
657
 
658
658
  @copy_model_fields
@@ -663,4 +663,4 @@ class GlobalConcurrencyLimitUpdate(ActionBaseModel):
663
663
  limit: Optional[int] = FieldFrom(objects.GlobalConcurrencyLimit)
664
664
  active: Optional[bool] = FieldFrom(objects.GlobalConcurrencyLimit)
665
665
  active_slots: Optional[int] = FieldFrom(objects.GlobalConcurrencyLimit)
666
- slot_decay_per_second: Optional[int] = FieldFrom(objects.GlobalConcurrencyLimit)
666
+ slot_decay_per_second: Optional[float] = FieldFrom(objects.GlobalConcurrencyLimit)
@@ -27,6 +27,8 @@ from typing_extensions import Literal
27
27
  from prefect._internal.schemas.bases import ObjectBaseModel, PrefectBaseModel
28
28
  from prefect._internal.schemas.fields import CreatedBy, DateTimeTZ, UpdatedBy
29
29
  from prefect._internal.schemas.validators import (
30
+ get_or_create_run_name,
31
+ get_or_create_state_name,
30
32
  raise_on_name_alphanumeric_dashes_only,
31
33
  raise_on_name_with_banned_characters,
32
34
  )
@@ -242,13 +244,7 @@ class State(ObjectBaseModel, Generic[R]):
242
244
 
243
245
  @validator("name", always=True)
244
246
  def default_name_from_type(cls, v, *, values, **kwargs):
245
- """If a name is not provided, use the type"""
246
-
247
- # if `type` is not in `values` it means the `type` didn't pass its own
248
- # validation check and an error will be raised after this function is called
249
- if v is None and values.get("type"):
250
- v = " ".join([v.capitalize() for v in values.get("type").value.split("_")])
251
- return v
247
+ return get_or_create_state_name(v, values)
252
248
 
253
249
  @root_validator
254
250
  def default_scheduled_start_time(cls, values):
@@ -546,7 +542,7 @@ class FlowRun(ObjectBaseModel):
546
542
 
547
543
  @validator("name", pre=True)
548
544
  def set_default_name(cls, name):
549
- return name or generate_slug(2)
545
+ return get_or_create_run_name(name)
550
546
 
551
547
  # These are server-side optimizations and should not be present on client models
552
548
  # TODO: Deprecate these fields
@@ -754,7 +750,7 @@ class TaskRun(ObjectBaseModel):
754
750
 
755
751
  @validator("name", pre=True)
756
752
  def set_default_name(cls, name):
757
- return name or generate_slug(2)
753
+ return get_or_create_run_name(name)
758
754
 
759
755
 
760
756
  class Workspace(PrefectBaseModel):
@@ -831,8 +827,7 @@ class BlockType(ObjectBaseModel):
831
827
 
832
828
  @validator("name", check_fields=False)
833
829
  def validate_name_characters(cls, v):
834
- raise_on_name_with_banned_characters(v)
835
- return v
830
+ return raise_on_name_with_banned_characters(v)
836
831
 
837
832
 
838
833
  class BlockSchema(ObjectBaseModel):
@@ -890,9 +885,7 @@ class BlockDocument(ObjectBaseModel):
890
885
  def validate_name_characters(cls, v):
891
886
  # the BlockDocumentCreate subclass allows name=None
892
887
  # and will inherit this validator
893
- if v is not None:
894
- raise_on_name_with_banned_characters(v)
895
- return v
888
+ return raise_on_name_with_banned_characters(v)
896
889
 
897
890
  @root_validator
898
891
  def validate_name_is_present_if_not_anonymous(cls, values):
@@ -917,8 +910,7 @@ class Flow(ObjectBaseModel):
917
910
 
918
911
  @validator("name", check_fields=False)
919
912
  def validate_name_characters(cls, v):
920
- raise_on_name_with_banned_characters(v)
921
- return v
913
+ return raise_on_name_with_banned_characters(v)
922
914
 
923
915
 
924
916
  class FlowRunnerSettings(PrefectBaseModel):
@@ -1079,8 +1071,7 @@ class Deployment(ObjectBaseModel):
1079
1071
 
1080
1072
  @validator("name", check_fields=False)
1081
1073
  def validate_name_characters(cls, v):
1082
- raise_on_name_with_banned_characters(v)
1083
- return v
1074
+ return raise_on_name_with_banned_characters(v)
1084
1075
 
1085
1076
 
1086
1077
  class ConcurrencyLimit(ObjectBaseModel):
@@ -1267,8 +1258,7 @@ class WorkQueue(ObjectBaseModel):
1267
1258
 
1268
1259
  @validator("name", check_fields=False)
1269
1260
  def validate_name_characters(cls, v):
1270
- raise_on_name_with_banned_characters(v)
1271
- return v
1261
+ return raise_on_name_with_banned_characters(v)
1272
1262
 
1273
1263
 
1274
1264
  class WorkQueueHealthPolicy(PrefectBaseModel):
@@ -1431,8 +1421,7 @@ class WorkPool(ObjectBaseModel):
1431
1421
 
1432
1422
  @validator("name", check_fields=False)
1433
1423
  def validate_name_characters(cls, v):
1434
- raise_on_name_with_banned_characters(v)
1435
- return v
1424
+ return raise_on_name_with_banned_characters(v)
1436
1425
 
1437
1426
  @validator("default_queue_id", always=True)
1438
1427
  def helpful_error_for_missing_default_queue_id(cls, v):
@@ -1625,8 +1614,8 @@ class GlobalConcurrencyLimit(ObjectBaseModel):
1625
1614
  default=0,
1626
1615
  description="Number of tasks currently using a concurrency slot.",
1627
1616
  )
1628
- slot_decay_per_second: Optional[int] = Field(
1629
- default=0,
1617
+ slot_decay_per_second: Optional[float] = Field(
1618
+ default=0.0,
1630
1619
  description=(
1631
1620
  "Controls the rate at which slots are released when the concurrency limit"
1632
1621
  " is used as a rate limit."
@@ -8,22 +8,22 @@ from typing import Optional, Union
8
8
  import dateutil
9
9
  import dateutil.rrule
10
10
  import pendulum
11
- from croniter import croniter
12
11
 
13
12
  from prefect._internal.pydantic import HAS_PYDANTIC_V2
13
+ from prefect._internal.schemas.validators import (
14
+ default_anchor_date,
15
+ default_timezone,
16
+ interval_schedule_must_be_positive,
17
+ validate_cron_string,
18
+ validate_rrule_string,
19
+ validate_rrule_timezone,
20
+ )
14
21
 
15
22
  if HAS_PYDANTIC_V2:
16
23
  from pydantic.v1 import Field, validator
17
24
  else:
18
25
  from pydantic import Field, validator
19
26
 
20
- from prefect._internal.pytz import HAS_PYTZ
21
-
22
- if HAS_PYTZ:
23
- import pytz
24
- else:
25
- from prefect._internal import pytz
26
-
27
27
 
28
28
  from prefect._internal.schemas.bases import PrefectBaseModel
29
29
  from prefect._internal.schemas.fields import DateTimeTZ
@@ -71,41 +71,16 @@ class IntervalSchedule(PrefectBaseModel):
71
71
  timezone: Optional[str] = Field(default=None, example="America/New_York")
72
72
 
73
73
  @validator("interval")
74
- def interval_must_be_positive(cls, v):
75
- if v.total_seconds() <= 0:
76
- raise ValueError("The interval must be positive")
77
- return v
74
+ def validate_interval_schedule(cls, v):
75
+ return interval_schedule_must_be_positive(v)
78
76
 
79
77
  @validator("anchor_date", always=True)
80
- def default_anchor_date(cls, v):
81
- if v is None:
82
- return pendulum.now("UTC")
83
- return pendulum.instance(v)
78
+ def validate_anchor_date(cls, v):
79
+ return default_anchor_date(v)
84
80
 
85
81
  @validator("timezone", always=True)
86
- def default_timezone(cls, v, *, values, **kwargs):
87
- # pendulum.tz.timezones is a callable in 3.0 and above
88
- # https://github.com/PrefectHQ/prefect/issues/11619
89
- if callable(pendulum.tz.timezones):
90
- timezones = pendulum.tz.timezones()
91
- else:
92
- timezones = pendulum.tz.timezones
93
- # if was provided, make sure its a valid IANA string
94
- if v and v not in timezones:
95
- raise ValueError(f'Invalid timezone: "{v}"')
96
-
97
- # otherwise infer the timezone from the anchor date
98
- elif v is None and values.get("anchor_date"):
99
- tz = values["anchor_date"].tz.name
100
- if tz in timezones:
101
- return tz
102
- # sometimes anchor dates have "timezones" that are UTC offsets
103
- # like "-04:00". This happens when parsing ISO8601 strings.
104
- # In this case we, the correct inferred localization is "UTC".
105
- else:
106
- return "UTC"
107
-
108
- return v
82
+ def validate_default_timezone(cls, v, values):
83
+ return default_timezone(v, values=values)
109
84
 
110
85
 
111
86
  class CronSchedule(PrefectBaseModel):
@@ -147,31 +122,11 @@ class CronSchedule(PrefectBaseModel):
147
122
 
148
123
  @validator("timezone")
149
124
  def valid_timezone(cls, v):
150
- # pendulum.tz.timezones is a callable in 3.0 and above
151
- # https://github.com/PrefectHQ/prefect/issues/11619
152
- if callable(pendulum.tz.timezones):
153
- timezones = pendulum.tz.timezones()
154
- else:
155
- timezones = pendulum.tz.timezones
156
-
157
- if v and v not in timezones:
158
- raise ValueError(
159
- f'Invalid timezone: "{v}" (specify in IANA tzdata format, for example,'
160
- " America/New_York)"
161
- )
162
- return v
125
+ return default_timezone(v)
163
126
 
164
127
  @validator("cron")
165
128
  def valid_cron_string(cls, v):
166
- # croniter allows "random" and "hashed" expressions
167
- # which we do not support https://github.com/kiorky/croniter
168
- if not croniter.is_valid(v):
169
- raise ValueError(f'Invalid cron string: "{v}"')
170
- elif any(c for c in v.split() if c.casefold() in ["R", "H", "r", "h"]):
171
- raise ValueError(
172
- f'Random and Hashed expressions are unsupported, received: "{v}"'
173
- )
174
- return v
129
+ return validate_cron_string(v)
175
130
 
176
131
 
177
132
  DEFAULT_ANCHOR_DATE = pendulum.date(2020, 1, 1)
@@ -205,20 +160,7 @@ class RRuleSchedule(PrefectBaseModel):
205
160
 
206
161
  @validator("rrule")
207
162
  def validate_rrule_str(cls, v):
208
- # attempt to parse the rrule string as an rrule object
209
- # this will error if the string is invalid
210
- try:
211
- dateutil.rrule.rrulestr(v, cache=True)
212
- except ValueError as exc:
213
- # rrules errors are a mix of cryptic and informative
214
- # so reraise to be clear that the string was invalid
215
- raise ValueError(f'Invalid RRule string "{v}": {exc}')
216
- if len(v) > MAX_RRULE_LENGTH:
217
- raise ValueError(
218
- f'Invalid RRule string "{v[:40]}..."\n'
219
- f"Max length is {MAX_RRULE_LENGTH}, got {len(v)}"
220
- )
221
- return v
163
+ return validate_rrule_string(v)
222
164
 
223
165
  @classmethod
224
166
  def from_rrule(cls, rrule: dateutil.rrule.rrule):
@@ -325,11 +267,7 @@ class RRuleSchedule(PrefectBaseModel):
325
267
 
326
268
  @validator("timezone", always=True)
327
269
  def valid_timezone(cls, v):
328
- if v and v not in pytz.all_timezones_set:
329
- raise ValueError(f'Invalid timezone: "{v}"')
330
- elif v is None:
331
- return "UTC"
332
- return v
270
+ return validate_rrule_timezone(v)
333
271
 
334
272
 
335
273
  class NoSchedule(PrefectBaseModel):