prefect-client 3.1.9__py3-none-any.whl → 3.1.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (113) hide show
  1. prefect/_experimental/lineage.py +7 -8
  2. prefect/_internal/_logging.py +15 -3
  3. prefect/_internal/compatibility/async_dispatch.py +22 -16
  4. prefect/_internal/compatibility/deprecated.py +42 -18
  5. prefect/_internal/compatibility/migration.py +2 -2
  6. prefect/_internal/concurrency/inspection.py +12 -14
  7. prefect/_internal/concurrency/primitives.py +2 -2
  8. prefect/_internal/concurrency/services.py +154 -80
  9. prefect/_internal/concurrency/waiters.py +13 -9
  10. prefect/_internal/pydantic/annotations/pendulum.py +7 -7
  11. prefect/_internal/pytz.py +4 -3
  12. prefect/_internal/retries.py +10 -5
  13. prefect/_internal/schemas/bases.py +19 -10
  14. prefect/_internal/schemas/validators.py +227 -388
  15. prefect/_version.py +3 -3
  16. prefect/artifacts.py +61 -74
  17. prefect/automations.py +27 -7
  18. prefect/blocks/core.py +3 -3
  19. prefect/client/{orchestration.py → orchestration/__init__.py} +38 -701
  20. prefect/client/orchestration/_artifacts/__init__.py +0 -0
  21. prefect/client/orchestration/_artifacts/client.py +239 -0
  22. prefect/client/orchestration/_concurrency_limits/__init__.py +0 -0
  23. prefect/client/orchestration/_concurrency_limits/client.py +762 -0
  24. prefect/client/orchestration/_logs/__init__.py +0 -0
  25. prefect/client/orchestration/_logs/client.py +95 -0
  26. prefect/client/orchestration/_variables/__init__.py +0 -0
  27. prefect/client/orchestration/_variables/client.py +157 -0
  28. prefect/client/orchestration/base.py +46 -0
  29. prefect/client/orchestration/routes.py +145 -0
  30. prefect/client/schemas/actions.py +2 -2
  31. prefect/client/schemas/filters.py +5 -0
  32. prefect/client/schemas/objects.py +3 -10
  33. prefect/client/schemas/schedules.py +22 -10
  34. prefect/concurrency/_asyncio.py +87 -0
  35. prefect/concurrency/{events.py → _events.py} +10 -10
  36. prefect/concurrency/asyncio.py +20 -104
  37. prefect/concurrency/context.py +6 -4
  38. prefect/concurrency/services.py +26 -74
  39. prefect/concurrency/sync.py +23 -44
  40. prefect/concurrency/v1/_asyncio.py +63 -0
  41. prefect/concurrency/v1/{events.py → _events.py} +13 -15
  42. prefect/concurrency/v1/asyncio.py +27 -80
  43. prefect/concurrency/v1/context.py +6 -4
  44. prefect/concurrency/v1/services.py +33 -79
  45. prefect/concurrency/v1/sync.py +18 -37
  46. prefect/context.py +66 -70
  47. prefect/deployments/base.py +4 -144
  48. prefect/deployments/flow_runs.py +12 -2
  49. prefect/deployments/runner.py +11 -3
  50. prefect/deployments/steps/pull.py +13 -0
  51. prefect/events/clients.py +7 -1
  52. prefect/events/schemas/events.py +3 -2
  53. prefect/flow_engine.py +54 -47
  54. prefect/flows.py +2 -1
  55. prefect/futures.py +42 -27
  56. prefect/input/run_input.py +2 -1
  57. prefect/locking/filesystem.py +8 -7
  58. prefect/locking/memory.py +5 -3
  59. prefect/locking/protocol.py +1 -1
  60. prefect/main.py +1 -3
  61. prefect/plugins.py +12 -10
  62. prefect/results.py +3 -308
  63. prefect/runner/storage.py +87 -21
  64. prefect/serializers.py +32 -25
  65. prefect/settings/legacy.py +4 -4
  66. prefect/settings/models/api.py +3 -3
  67. prefect/settings/models/cli.py +3 -3
  68. prefect/settings/models/client.py +5 -3
  69. prefect/settings/models/cloud.py +3 -3
  70. prefect/settings/models/deployments.py +3 -3
  71. prefect/settings/models/experiments.py +4 -2
  72. prefect/settings/models/flows.py +3 -3
  73. prefect/settings/models/internal.py +4 -2
  74. prefect/settings/models/logging.py +4 -3
  75. prefect/settings/models/results.py +3 -3
  76. prefect/settings/models/root.py +3 -2
  77. prefect/settings/models/runner.py +4 -4
  78. prefect/settings/models/server/api.py +3 -3
  79. prefect/settings/models/server/database.py +11 -4
  80. prefect/settings/models/server/deployments.py +6 -2
  81. prefect/settings/models/server/ephemeral.py +4 -2
  82. prefect/settings/models/server/events.py +3 -2
  83. prefect/settings/models/server/flow_run_graph.py +6 -2
  84. prefect/settings/models/server/root.py +3 -3
  85. prefect/settings/models/server/services.py +26 -11
  86. prefect/settings/models/server/tasks.py +6 -3
  87. prefect/settings/models/server/ui.py +3 -3
  88. prefect/settings/models/tasks.py +5 -5
  89. prefect/settings/models/testing.py +3 -3
  90. prefect/settings/models/worker.py +5 -3
  91. prefect/settings/profiles.py +15 -2
  92. prefect/states.py +4 -7
  93. prefect/task_engine.py +54 -75
  94. prefect/tasks.py +84 -32
  95. prefect/telemetry/processors.py +6 -6
  96. prefect/telemetry/run_telemetry.py +13 -8
  97. prefect/telemetry/services.py +32 -31
  98. prefect/transactions.py +4 -15
  99. prefect/utilities/_git.py +34 -0
  100. prefect/utilities/asyncutils.py +1 -1
  101. prefect/utilities/engine.py +3 -19
  102. prefect/utilities/generics.py +18 -0
  103. prefect/workers/__init__.py +2 -0
  104. {prefect_client-3.1.9.dist-info → prefect_client-3.1.11.dist-info}/METADATA +1 -1
  105. {prefect_client-3.1.9.dist-info → prefect_client-3.1.11.dist-info}/RECORD +108 -99
  106. prefect/records/__init__.py +0 -1
  107. prefect/records/base.py +0 -235
  108. prefect/records/filesystem.py +0 -213
  109. prefect/records/memory.py +0 -184
  110. prefect/records/result_store.py +0 -70
  111. {prefect_client-3.1.9.dist-info → prefect_client-3.1.11.dist-info}/LICENSE +0 -0
  112. {prefect_client-3.1.9.dist-info → prefect_client-3.1.11.dist-info}/WHEEL +0 -0
  113. {prefect_client-3.1.9.dist-info → prefect_client-3.1.11.dist-info}/top_level.txt +0 -0
@@ -6,41 +6,51 @@ format.
6
6
  This will be subject to consolidation and refactoring over the next few months.
7
7
  """
8
8
 
9
- import json
9
+ import os
10
10
  import re
11
11
  import urllib.parse
12
12
  import warnings
13
+ from collections.abc import Iterable, Mapping, MutableMapping
13
14
  from copy import copy
14
15
  from pathlib import Path
15
- from typing import TYPE_CHECKING, Any, Dict, List, Mapping, Optional, Tuple, Union
16
+ from typing import TYPE_CHECKING, Any, Optional, TypeVar, Union, overload
16
17
  from uuid import UUID
17
18
 
18
19
  import jsonschema
19
20
  import pendulum
20
- import yaml
21
+ import pendulum.tz
21
22
 
22
- from prefect.exceptions import InvalidRepositoryURLError
23
- from prefect.types import DateTime
24
23
  from prefect.utilities.collections import isiterable
25
- from prefect.utilities.dockerutils import get_prefect_image_name
26
24
  from prefect.utilities.filesystem import relative_path_to_current_platform
27
25
  from prefect.utilities.importtools import from_qualified_name
28
26
  from prefect.utilities.names import generate_slug
29
- from prefect.utilities.pydantic import JsonPatch
27
+
28
+ if TYPE_CHECKING:
29
+ from prefect.serializers import Serializer
30
+
31
+ T = TypeVar("T")
32
+ M = TypeVar("M", bound=Mapping[str, Any])
33
+ MM = TypeVar("MM", bound=MutableMapping[str, Any])
34
+
30
35
 
31
36
  LOWERCASE_LETTERS_NUMBERS_AND_DASHES_ONLY_REGEX = "^[a-z0-9-]*$"
32
37
  LOWERCASE_LETTERS_NUMBERS_AND_UNDERSCORES_REGEX = "^[a-z0-9_]*$"
33
38
 
34
- if TYPE_CHECKING:
35
- from prefect.blocks.core import Block
36
- from prefect.serializers import Serializer
37
- from prefect.utilities.callables import ParameterSchema
39
+
40
+ @overload
41
+ def raise_on_name_alphanumeric_dashes_only(value: str, field_name: str = ...) -> str:
42
+ ...
43
+
44
+
45
+ @overload
46
+ def raise_on_name_alphanumeric_dashes_only(value: None, field_name: str = ...) -> None:
47
+ ...
38
48
 
39
49
 
40
50
  def raise_on_name_alphanumeric_dashes_only(
41
51
  value: Optional[str], field_name: str = "value"
42
- ):
43
- if value and not bool(
52
+ ) -> Optional[str]:
53
+ if value is not None and not bool(
44
54
  re.match(LOWERCASE_LETTERS_NUMBERS_AND_DASHES_ONLY_REGEX, value)
45
55
  ):
46
56
  raise ValueError(
@@ -49,40 +59,38 @@ def raise_on_name_alphanumeric_dashes_only(
49
59
  return value
50
60
 
51
61
 
52
- def raise_on_name_alphanumeric_underscores_only(value, field_name: str = "value"):
53
- if not bool(re.match(LOWERCASE_LETTERS_NUMBERS_AND_UNDERSCORES_REGEX, value)):
54
- raise ValueError(
55
- f"{field_name} must only contain lowercase letters, numbers, and"
56
- " underscores."
57
- )
58
- return value
62
+ @overload
63
+ def raise_on_name_alphanumeric_underscores_only(
64
+ value: str, field_name: str = ...
65
+ ) -> str:
66
+ ...
59
67
 
60
68
 
61
- def validate_schema(schema: dict):
62
- """
63
- Validate that the provided schema is a valid json schema.
69
+ @overload
70
+ def raise_on_name_alphanumeric_underscores_only(
71
+ value: None, field_name: str = ...
72
+ ) -> None:
73
+ ...
64
74
 
65
- Args:
66
- schema: The schema to validate.
67
75
 
68
- Raises:
69
- ValueError: If the provided schema is not a valid json schema.
70
-
71
- """
72
- try:
73
- if schema is not None:
74
- # Most closely matches the schemas generated by pydantic
75
- jsonschema.Draft202012Validator.check_schema(schema)
76
- except jsonschema.SchemaError as exc:
76
+ def raise_on_name_alphanumeric_underscores_only(
77
+ value: Optional[str], field_name: str = "value"
78
+ ) -> Optional[str]:
79
+ if value is not None and not re.match(
80
+ LOWERCASE_LETTERS_NUMBERS_AND_UNDERSCORES_REGEX, value
81
+ ):
77
82
  raise ValueError(
78
- "The provided schema is not a valid json schema. Schema error:"
79
- f" {exc.message}"
80
- ) from exc
83
+ f"{field_name} must only contain lowercase letters, numbers, and"
84
+ " underscores."
85
+ )
86
+ return value
81
87
 
82
88
 
83
89
  def validate_values_conform_to_schema(
84
- values: dict, schema: dict, ignore_required: bool = False
85
- ):
90
+ values: Optional[Mapping[str, Any]],
91
+ schema: Optional[Mapping[str, Any]],
92
+ ignore_required: bool = False,
93
+ ) -> None:
86
94
  """
87
95
  Validate that the provided values conform to the provided json schema.
88
96
 
@@ -127,90 +135,46 @@ def validate_values_conform_to_schema(
127
135
  ### DEPLOYMENT SCHEMA VALIDATORS ###
128
136
 
129
137
 
130
- def infrastructure_must_have_capabilities(
131
- value: Union[Dict[str, Any], "Block", None],
132
- ) -> Optional["Block"]:
133
- """
134
- Ensure that the provided value is an infrastructure block with the required capabilities.
135
- """
136
-
137
- from prefect.blocks.core import Block
138
-
139
- if isinstance(value, dict):
140
- if "_block_type_slug" in value:
141
- # Replace private attribute with public for dispatch
142
- value["block_type_slug"] = value.pop("_block_type_slug")
143
- block = Block(**value)
144
- elif value is None:
145
- return value
146
- else:
147
- block = value
148
-
149
- if "run-infrastructure" not in block.get_block_capabilities():
150
- raise ValueError(
151
- "Infrastructure block must have 'run-infrastructure' capabilities."
138
+ def validate_parameters_conform_to_schema(
139
+ parameters: M, values: Mapping[str, Any]
140
+ ) -> M:
141
+ """Validate that the parameters conform to the parameter schema."""
142
+ if values.get("enforce_parameter_schema"):
143
+ validate_values_conform_to_schema(
144
+ parameters, values.get("parameter_openapi_schema"), ignore_required=True
152
145
  )
153
- return block
146
+ return parameters
154
147
 
155
148
 
156
- def storage_must_have_capabilities(
157
- value: Union[Dict[str, Any], "Block", None],
158
- ) -> Optional["Block"]:
159
- """
160
- Ensure that the provided value is a storage block with the required capabilities.
161
- """
162
- from prefect.blocks.core import Block
163
-
164
- if isinstance(value, dict):
165
- block_type = Block.get_block_class_from_key(value.pop("_block_type_slug"))
166
- block = block_type(**value)
167
- elif value is None:
168
- return value
169
- else:
170
- block = value
149
+ @overload
150
+ def validate_parameter_openapi_schema(schema: M, values: Mapping[str, Any]) -> M:
151
+ ...
171
152
 
172
- capabilities = block.get_block_capabilities()
173
- if "get-directory" not in capabilities:
174
- raise ValueError("Remote Storage block must have 'get-directory' capabilities.")
175
- return block
176
153
 
177
-
178
- def handle_openapi_schema(value: Optional["ParameterSchema"]) -> "ParameterSchema":
179
- """
180
- This method ensures setting a value of `None` is handled gracefully.
181
- """
182
- from prefect.utilities.callables import ParameterSchema
183
-
184
- if value is None:
185
- return ParameterSchema()
186
- return value
187
-
188
-
189
- def validate_parameters_conform_to_schema(value: dict, values: dict) -> dict:
190
- """Validate that the parameters conform to the parameter schema."""
191
- if values.get("enforce_parameter_schema"):
192
- validate_values_conform_to_schema(
193
- value, values.get("parameter_openapi_schema"), ignore_required=True
194
- )
195
- return value
154
+ @overload
155
+ def validate_parameter_openapi_schema(schema: None, values: Mapping[str, Any]) -> None:
156
+ ...
196
157
 
197
158
 
198
- def validate_parameter_openapi_schema(value: dict, values: dict) -> dict:
159
+ def validate_parameter_openapi_schema(
160
+ schema: Optional[M], values: Mapping[str, Any]
161
+ ) -> Optional[M]:
199
162
  """Validate that the parameter_openapi_schema is a valid json schema."""
200
163
  if values.get("enforce_parameter_schema"):
201
- validate_schema(value)
202
- return value
203
-
204
-
205
- def return_none_schedule(v: Optional[Union[str, dict]]) -> Optional[Union[str, dict]]:
206
- from prefect.client.schemas.schedules import NoSchedule
164
+ try:
165
+ if schema is not None:
166
+ # Most closely matches the schemas generated by pydantic
167
+ jsonschema.Draft202012Validator.check_schema(schema)
168
+ except jsonschema.SchemaError as exc:
169
+ raise ValueError(
170
+ "The provided schema is not a valid json schema. Schema error:"
171
+ f" {exc.message}"
172
+ ) from exc
207
173
 
208
- if isinstance(v, NoSchedule):
209
- return None
210
- return v
174
+ return schema
211
175
 
212
176
 
213
- def convert_to_strings(value: Union[Any, List[Any]]) -> Union[str, List[str]]:
177
+ def convert_to_strings(value: Union[Any, Iterable[Any]]) -> Union[str, list[str]]:
214
178
  if isiterable(value):
215
179
  return [str(item) for item in value]
216
180
  return str(value)
@@ -219,7 +183,7 @@ def convert_to_strings(value: Union[Any, List[Any]]) -> Union[str, List[str]]:
219
183
  ### SCHEDULE SCHEMA VALIDATORS ###
220
184
 
221
185
 
222
- def reconcile_schedules_runner(values: dict) -> dict:
186
+ def reconcile_schedules_runner(values: MM) -> MM:
223
187
  from prefect.deployments.schedules import (
224
188
  normalize_to_deployment_schedule_create,
225
189
  )
@@ -231,13 +195,23 @@ def reconcile_schedules_runner(values: dict) -> dict:
231
195
  return values
232
196
 
233
197
 
198
+ @overload
199
+ def validate_schedule_max_scheduled_runs(v: int, limit: int) -> int:
200
+ ...
201
+
202
+
203
+ @overload
204
+ def validate_schedule_max_scheduled_runs(v: None, limit: int) -> None:
205
+ ...
206
+
207
+
234
208
  def validate_schedule_max_scheduled_runs(v: Optional[int], limit: int) -> Optional[int]:
235
209
  if v is not None and v > limit:
236
210
  raise ValueError(f"`max_scheduled_runs` must be less than or equal to {limit}.")
237
211
  return v
238
212
 
239
213
 
240
- def remove_old_deployment_fields(values: dict) -> dict:
214
+ def remove_old_deployment_fields(values: MM) -> MM:
241
215
  # 2.7.7 removed worker_pool_queue_id in lieu of worker_pool_name and
242
216
  # worker_pool_queue_name. Those fields were later renamed to work_pool_name
243
217
  # and work_queue_name. This validator removes old fields provided
@@ -270,7 +244,7 @@ def remove_old_deployment_fields(values: dict) -> dict:
270
244
  return values_copy
271
245
 
272
246
 
273
- def reconcile_paused_deployment(values):
247
+ def reconcile_paused_deployment(values: MM) -> MM:
274
248
  paused = values.get("paused")
275
249
 
276
250
  if paused is None:
@@ -279,45 +253,44 @@ def reconcile_paused_deployment(values):
279
253
  return values
280
254
 
281
255
 
282
- def default_anchor_date(v: DateTime) -> DateTime:
256
+ def default_anchor_date(v: pendulum.DateTime) -> pendulum.DateTime:
283
257
  return pendulum.instance(v)
284
258
 
285
259
 
286
- def get_valid_timezones(v: Optional[str]) -> Tuple[str, ...]:
287
- # pendulum.tz.timezones is a callable in 3.0 and above
288
- # https://github.com/PrefectHQ/prefect/issues/11619
289
- if callable(pendulum.tz.timezones):
290
- return pendulum.tz.timezones()
291
- else:
292
- return pendulum.tz.timezones
260
+ @overload
261
+ def default_timezone(v: str, values: Optional[Mapping[str, Any]] = ...) -> str:
262
+ ...
293
263
 
294
264
 
295
- def validate_timezone(v: str, timezones: Tuple[str, ...]) -> str:
296
- if v and v not in timezones:
297
- raise ValueError(
298
- f'Invalid timezone: "{v}" (specify in IANA tzdata format, for example,'
299
- " America/New_York)"
300
- )
301
- return v
265
+ @overload
266
+ def default_timezone(
267
+ v: None, values: Optional[Mapping[str, Any]] = ...
268
+ ) -> Optional[str]:
269
+ ...
302
270
 
303
271
 
304
- def default_timezone(v: Optional[str], values: Optional[dict] = None) -> str:
272
+ def default_timezone(
273
+ v: Optional[str], values: Optional[Mapping[str, Any]] = None
274
+ ) -> Optional[str]:
305
275
  values = values or {}
306
- timezones = get_valid_timezones(v)
276
+ timezones = pendulum.tz.timezones()
307
277
 
308
278
  if v is not None:
309
- return validate_timezone(v, timezones)
279
+ if v and v not in timezones:
280
+ raise ValueError(
281
+ f'Invalid timezone: "{v}" (specify in IANA tzdata format, for example,'
282
+ " America/New_York)"
283
+ )
284
+ return v
310
285
 
311
286
  # anchor schedules
312
- elif v is None and values and values.get("anchor_date"):
313
- tz = getattr(values["anchor_date"].tz, "name", None) or "UTC"
314
- if tz in timezones:
315
- return tz
287
+ elif "anchor_date" in values:
288
+ anchor_date: pendulum.DateTime = values["anchor_date"]
289
+ tz = "UTC" if anchor_date.tz is None else anchor_date.tz.name
316
290
  # sometimes anchor dates have "timezones" that are UTC offsets
317
291
  # like "-04:00". This happens when parsing ISO8601 strings.
318
292
  # In this case we, the correct inferred localization is "UTC".
319
- else:
320
- return "UTC"
293
+ return tz if tz in timezones else "UTC"
321
294
 
322
295
  # cron schedules
323
296
  return v
@@ -360,119 +333,18 @@ def validate_rrule_string(v: str) -> str:
360
333
  return v
361
334
 
362
335
 
363
- ### INFRASTRUCTURE SCHEMA VALIDATORS ###
364
-
365
-
366
- def validate_k8s_job_required_components(cls, value: Dict[str, Any]):
367
- """
368
- Validate that a Kubernetes job manifest has all required components.
369
- """
370
- from prefect.utilities.pydantic import JsonPatch
371
-
372
- patch = JsonPatch.from_diff(value, cls.base_job_manifest())
373
- missing_paths = sorted([op["path"] for op in patch if op["op"] == "add"])
374
- if missing_paths:
375
- raise ValueError(
376
- "Job is missing required attributes at the following paths: "
377
- f"{', '.join(missing_paths)}"
378
- )
379
- return value
380
-
381
-
382
- def validate_k8s_job_compatible_values(cls, value: Dict[str, Any]):
383
- """
384
- Validate that the provided job values are compatible with the job type.
385
- """
386
- from prefect.utilities.pydantic import JsonPatch
387
-
388
- patch = JsonPatch.from_diff(value, cls.base_job_manifest())
389
- incompatible = sorted(
390
- [
391
- f"{op['path']} must have value {op['value']!r}"
392
- for op in patch
393
- if op["op"] == "replace"
394
- ]
395
- )
396
- if incompatible:
397
- raise ValueError(
398
- "Job has incompatible values for the following attributes: "
399
- f"{', '.join(incompatible)}"
400
- )
401
- return value
402
-
403
-
404
- def cast_k8s_job_customizations(
405
- cls, value: Union[JsonPatch, str, List[Dict[str, Any]]]
406
- ):
407
- if isinstance(value, list):
408
- return JsonPatch(value)
409
- elif isinstance(value, str):
410
- try:
411
- return JsonPatch(json.loads(value))
412
- except json.JSONDecodeError as exc:
413
- raise ValueError(
414
- f"Unable to parse customizations as JSON: {value}. Please make sure"
415
- " that the provided value is a valid JSON string."
416
- ) from exc
417
- return value
418
-
419
-
420
- def set_default_namespace(values: dict) -> dict:
421
- """
422
- Set the default namespace for a Kubernetes job if not provided.
423
- """
424
- job = values.get("job")
425
-
426
- namespace = values.get("namespace")
427
- job_namespace = job["metadata"].get("namespace") if job else None
428
-
429
- if not namespace and not job_namespace:
430
- values["namespace"] = "default"
431
-
432
- return values
433
-
434
-
435
- def set_default_image(values: dict) -> dict:
436
- """
437
- Set the default image for a Kubernetes job if not provided.
438
- """
439
-
440
- job = values.get("job")
441
- image = values.get("image")
442
- job_image = (
443
- job["spec"]["template"]["spec"]["containers"][0].get("image") if job else None
444
- )
445
-
446
- if not image and not job_image:
447
- values["image"] = get_prefect_image_name()
448
-
449
- return values
450
-
451
-
452
336
  ### STATE SCHEMA VALIDATORS ###
453
337
 
454
338
 
455
- def get_or_create_state_name(v: str, values: dict) -> str:
456
- """If a name is not provided, use the type"""
457
-
458
- # if `type` is not in `values` it means the `type` didn't pass its own
459
- # validation check and an error will be raised after this function is called
460
- if v is None and values.get("type"):
461
- v = " ".join([v.capitalize() for v in values.get("type").value.split("_")])
462
- return v
463
-
464
-
465
- def get_or_create_run_name(name):
339
+ def get_or_create_run_name(name: Optional[str]) -> str:
466
340
  return name or generate_slug(2)
467
341
 
468
342
 
469
343
  ### FILESYSTEM SCHEMA VALIDATORS ###
470
344
 
471
345
 
472
- def stringify_path(value: Union[str, Path]) -> str:
473
- if isinstance(value, Path):
474
- return str(value)
475
- return value
346
+ def stringify_path(value: Union[str, os.PathLike[str]]) -> str:
347
+ return os.fspath(value)
476
348
 
477
349
 
478
350
  def validate_basepath(value: str) -> str:
@@ -495,25 +367,6 @@ def validate_basepath(value: str) -> str:
495
367
  return value
496
368
 
497
369
 
498
- def validate_github_access_token(v: str, values: dict) -> str:
499
- """Ensure that credentials are not provided with 'SSH' formatted GitHub URLs.
500
-
501
- Note: validates `access_token` specifically so that it only fires when
502
- private repositories are used.
503
- """
504
- if v is not None:
505
- if urllib.parse.urlparse(values["repository"]).scheme != "https":
506
- raise InvalidRepositoryURLError(
507
- "Crendentials can only be used with GitHub repositories "
508
- "using the 'HTTPS' format. You must either remove the "
509
- "credential if you wish to use the 'SSH' format and are not "
510
- "using a private repository, or you must change the repository "
511
- "URL to the 'HTTPS' format. "
512
- )
513
-
514
- return v
515
-
516
-
517
370
  ### SERIALIZER SCHEMA VALIDATORS ###
518
371
 
519
372
 
@@ -537,49 +390,7 @@ def validate_picklelib(value: str) -> str:
537
390
  return value
538
391
 
539
392
 
540
- def validate_picklelib_version(values: dict) -> dict:
541
- """
542
- Infers a default value for `picklelib_version` if null or ensures it matches
543
- the version retrieved from the `pickelib`.
544
- """
545
- picklelib = values.get("picklelib")
546
- picklelib_version = values.get("picklelib_version")
547
-
548
- if not picklelib:
549
- raise ValueError("Unable to check version of unrecognized picklelib module")
550
-
551
- pickler = from_qualified_name(picklelib)
552
- pickler_version = getattr(pickler, "__version__", None)
553
-
554
- if not picklelib_version:
555
- values["picklelib_version"] = pickler_version
556
- elif picklelib_version != pickler_version:
557
- warnings.warn(
558
- (
559
- f"Mismatched {picklelib!r} versions. Found {pickler_version} in the"
560
- f" environment but {picklelib_version} was requested. This may"
561
- " cause the serializer to fail."
562
- ),
563
- RuntimeWarning,
564
- stacklevel=3,
565
- )
566
-
567
- return values
568
-
569
-
570
- def validate_picklelib_and_modules(values: dict) -> dict:
571
- """
572
- Prevents modules from being specified if picklelib is not cloudpickle
573
- """
574
- if values.get("picklelib") != "cloudpickle" and values.get("pickle_modules"):
575
- raise ValueError(
576
- "`pickle_modules` cannot be used without 'cloudpickle'. Got"
577
- f" {values.get('picklelib')!r}."
578
- )
579
- return values
580
-
581
-
582
- def validate_dump_kwargs(value: dict[str, Any]) -> dict[str, Any]:
393
+ def validate_dump_kwargs(value: M) -> M:
583
394
  # `default` is set by `object_encoder`. A user provided callable would make this
584
395
  # class unserializable anyway.
585
396
  if "default" in value:
@@ -587,7 +398,7 @@ def validate_dump_kwargs(value: dict[str, Any]) -> dict[str, Any]:
587
398
  return value
588
399
 
589
400
 
590
- def validate_load_kwargs(value: dict[str, Any]) -> dict[str, Any]:
401
+ def validate_load_kwargs(value: M) -> M:
591
402
  # `object_hook` is set by `object_decoder`. A user provided callable would make
592
403
  # this class unserializable anyway.
593
404
  if "object_hook" in value:
@@ -597,7 +408,19 @@ def validate_load_kwargs(value: dict[str, Any]) -> dict[str, Any]:
597
408
  return value
598
409
 
599
410
 
600
- def cast_type_names_to_serializers(value: Union[str, "Serializer"]) -> "Serializer":
411
+ @overload
412
+ def cast_type_names_to_serializers(value: str) -> "Serializer[Any]":
413
+ ...
414
+
415
+
416
+ @overload
417
+ def cast_type_names_to_serializers(value: "Serializer[T]") -> "Serializer[T]":
418
+ ...
419
+
420
+
421
+ def cast_type_names_to_serializers(
422
+ value: Union[str, "Serializer[Any]"],
423
+ ) -> "Serializer[Any]":
601
424
  from prefect.serializers import Serializer
602
425
 
603
426
  if isinstance(value, str):
@@ -631,19 +454,49 @@ def validate_compressionlib(value: str) -> str:
631
454
 
632
455
 
633
456
  # TODO: if we use this elsewhere we can change the error message to be more generic
634
- def list_length_50_or_less(v: Optional[List[float]]) -> Optional[List[float]]:
457
+ @overload
458
+ def list_length_50_or_less(v: list[float]) -> list[float]:
459
+ ...
460
+
461
+
462
+ @overload
463
+ def list_length_50_or_less(v: None) -> None:
464
+ ...
465
+
466
+
467
+ def list_length_50_or_less(v: Optional[list[float]]) -> Optional[list[float]]:
635
468
  if isinstance(v, list) and (len(v) > 50):
636
469
  raise ValueError("Can not configure more than 50 retry delays per task.")
637
470
  return v
638
471
 
639
472
 
640
473
  # TODO: if we use this elsewhere we can change the error message to be more generic
474
+ @overload
475
+ def validate_not_negative(v: float) -> float:
476
+ ...
477
+
478
+
479
+ @overload
480
+ def validate_not_negative(v: None) -> None:
481
+ ...
482
+
483
+
641
484
  def validate_not_negative(v: Optional[float]) -> Optional[float]:
642
485
  if v is not None and v < 0:
643
486
  raise ValueError("`retry_jitter_factor` must be >= 0.")
644
487
  return v
645
488
 
646
489
 
490
+ @overload
491
+ def validate_message_template_variables(v: str) -> str:
492
+ ...
493
+
494
+
495
+ @overload
496
+ def validate_message_template_variables(v: None) -> None:
497
+ ...
498
+
499
+
647
500
  def validate_message_template_variables(v: Optional[str]) -> Optional[str]:
648
501
  from prefect.client.schemas.objects import FLOW_RUN_NOTIFICATION_TEMPLATE_KWARGS
649
502
 
@@ -665,11 +518,19 @@ def validate_default_queue_id_not_none(v: Optional[UUID]) -> UUID:
665
518
  return v
666
519
 
667
520
 
668
- def validate_max_metadata_length(
669
- v: Optional[Dict[str, Any]],
670
- ) -> Optional[Dict[str, Any]]:
521
+ @overload
522
+ def validate_max_metadata_length(v: MM) -> MM:
523
+ ...
524
+
525
+
526
+ @overload
527
+ def validate_max_metadata_length(v: None) -> None:
528
+ ...
529
+
530
+
531
+ def validate_max_metadata_length(v: Optional[MM]) -> Optional[MM]:
671
532
  max_metadata_length = 500
672
- if not isinstance(v, dict):
533
+ if v is None:
673
534
  return v
674
535
  for key in v.keys():
675
536
  if len(str(v[key])) > max_metadata_length:
@@ -677,79 +538,17 @@ def validate_max_metadata_length(
677
538
  return v
678
539
 
679
540
 
680
- ### DOCKER SCHEMA VALIDATORS ###
681
-
682
-
683
- def validate_registry_url(value: Optional[str]) -> Optional[str]:
684
- if isinstance(value, str):
685
- if "://" not in value:
686
- return "https://" + value
687
- return value
688
-
689
-
690
- def convert_labels_to_docker_format(labels: Dict[str, str]) -> Dict[str, str]:
691
- labels = labels or {}
692
- new_labels = {}
693
- for name, value in labels.items():
694
- if "/" in name:
695
- namespace, key = name.split("/", maxsplit=1)
696
- new_namespace = ".".join(reversed(namespace.split(".")))
697
- new_labels[f"{new_namespace}.{key}"] = value
698
- else:
699
- new_labels[name] = value
700
- return new_labels
701
-
702
-
703
- def check_volume_format(volumes: List[str]) -> List[str]:
704
- for volume in volumes:
705
- if ":" not in volume:
706
- raise ValueError(
707
- "Invalid volume specification. "
708
- f"Expected format 'path:container_path', but got {volume!r}"
709
- )
710
-
711
- return volumes
712
-
713
-
714
- def base_image_xor_dockerfile(values: Mapping[str, Any]):
715
- if values.get("base_image") and values.get("dockerfile"):
716
- raise ValueError(
717
- "Either `base_image` or `dockerfile` should be provided, but not both"
718
- )
719
- return values
720
-
721
-
722
- ### SETTINGS SCHEMA VALIDATORS ###
723
-
541
+ ### TASK RUN SCHEMA VALIDATORS ###
724
542
 
725
- def validate_settings(value: dict) -> dict:
726
- from prefect.settings import Setting, Settings
727
- from prefect.settings.legacy import _get_settings_fields
728
543
 
729
- if value is None:
730
- return value
544
+ @overload
545
+ def validate_cache_key_length(cache_key: str) -> str:
546
+ ...
731
547
 
732
- # Cast string setting names to variables
733
- validated = {}
734
- for setting, val in value.items():
735
- settings_fields = _get_settings_fields(Settings)
736
- if isinstance(setting, str) and setting in settings_fields:
737
- validated[settings_fields[setting]] = val
738
- elif isinstance(setting, Setting):
739
- validated[setting] = val
740
- else:
741
- warnings.warn(f"Setting {setting!r} is not recognized and will be ignored.")
742
548
 
743
- return validated
744
-
745
-
746
- def validate_yaml(value: Union[str, dict]) -> dict:
747
- if isinstance(value, str):
748
- return yaml.safe_load(value)
749
- return value
750
-
751
-
752
- ### TASK RUN SCHEMA VALIDATORS ###
549
+ @overload
550
+ def validate_cache_key_length(cache_key: None) -> None:
551
+ ...
753
552
 
754
553
 
755
554
  def validate_cache_key_length(cache_key: Optional[str]) -> Optional[str]:
@@ -765,7 +564,7 @@ def validate_cache_key_length(cache_key: Optional[str]) -> Optional[str]:
765
564
  return cache_key
766
565
 
767
566
 
768
- def set_run_policy_deprecated_fields(values: dict) -> dict:
567
+ def set_run_policy_deprecated_fields(values: MM) -> MM:
769
568
  """
770
569
  If deprecated fields are provided, populate the corresponding new fields
771
570
  to preserve orchestration behavior.
@@ -785,6 +584,16 @@ def set_run_policy_deprecated_fields(values: dict) -> dict:
785
584
  ### PYTHON ENVIRONMENT SCHEMA VALIDATORS ###
786
585
 
787
586
 
587
+ @overload
588
+ def return_v_or_none(v: str) -> str:
589
+ ...
590
+
591
+
592
+ @overload
593
+ def return_v_or_none(v: None) -> None:
594
+ ...
595
+
596
+
788
597
  def return_v_or_none(v: Optional[str]) -> Optional[str]:
789
598
  """Make sure that empty strings are treated as None"""
790
599
  if not v:
@@ -795,7 +604,7 @@ def return_v_or_none(v: Optional[str]) -> Optional[str]:
795
604
  ### BLOCK SCHEMA VALIDATORS ###
796
605
 
797
606
 
798
- def validate_parent_and_ref_diff(values: dict) -> dict:
607
+ def validate_parent_and_ref_diff(values: M) -> M:
799
608
  parent_id = values.get("parent_block_document_id")
800
609
  ref_id = values.get("reference_block_document_id")
801
610
  if parent_id and ref_id and parent_id == ref_id:
@@ -806,7 +615,7 @@ def validate_parent_and_ref_diff(values: dict) -> dict:
806
615
  return values
807
616
 
808
617
 
809
- def validate_name_present_on_nonanonymous_blocks(values: dict) -> dict:
618
+ def validate_name_present_on_nonanonymous_blocks(values: M) -> M:
810
619
  # anonymous blocks may have no name prior to actually being
811
620
  # stored in the database
812
621
  if not values.get("is_anonymous") and not values.get("name"):
@@ -817,9 +626,19 @@ def validate_name_present_on_nonanonymous_blocks(values: dict) -> dict:
817
626
  ### PROCESS JOB CONFIGURATION VALIDATORS ###
818
627
 
819
628
 
629
+ @overload
820
630
  def validate_command(v: str) -> Path:
631
+ ...
632
+
633
+
634
+ @overload
635
+ def validate_command(v: None) -> None:
636
+ ...
637
+
638
+
639
+ def validate_command(v: Optional[str]) -> Optional[Path]:
821
640
  """Make sure that the working directory is formatted for the current platform."""
822
- if v:
641
+ if v is not None:
823
642
  return relative_path_to_current_platform(v)
824
643
  return v
825
644
 
@@ -830,23 +649,43 @@ def validate_command(v: str) -> Path:
830
649
  # catch-all for validators until we organize these into files
831
650
 
832
651
 
833
- def validate_block_document_name(value):
652
+ @overload
653
+ def validate_block_document_name(value: str) -> str:
654
+ ...
655
+
656
+
657
+ @overload
658
+ def validate_block_document_name(value: None) -> None:
659
+ ...
660
+
661
+
662
+ def validate_block_document_name(value: Optional[str]) -> Optional[str]:
834
663
  if value is not None:
835
664
  raise_on_name_alphanumeric_dashes_only(value, field_name="Block document name")
836
665
  return value
837
666
 
838
667
 
839
- def validate_artifact_key(value):
668
+ def validate_artifact_key(value: str) -> str:
840
669
  raise_on_name_alphanumeric_dashes_only(value, field_name="Artifact key")
841
670
  return value
842
671
 
843
672
 
844
- def validate_variable_name(value):
673
+ @overload
674
+ def validate_variable_name(value: str) -> str:
675
+ ...
676
+
677
+
678
+ @overload
679
+ def validate_variable_name(value: None) -> None:
680
+ ...
681
+
682
+
683
+ def validate_variable_name(value: Optional[str]) -> Optional[str]:
845
684
  if value is not None:
846
685
  raise_on_name_alphanumeric_underscores_only(value, field_name="Variable name")
847
686
  return value
848
687
 
849
688
 
850
- def validate_block_type_slug(value):
689
+ def validate_block_type_slug(value: str):
851
690
  raise_on_name_alphanumeric_dashes_only(value, field_name="Block type slug")
852
691
  return value