prefect-client 3.1.15__py3-none-any.whl → 3.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (89) hide show
  1. prefect/_experimental/sla/objects.py +29 -1
  2. prefect/_internal/compatibility/deprecated.py +4 -4
  3. prefect/_internal/compatibility/migration.py +1 -1
  4. prefect/_internal/concurrency/calls.py +1 -2
  5. prefect/_internal/concurrency/cancellation.py +2 -4
  6. prefect/_internal/concurrency/threads.py +3 -3
  7. prefect/_internal/schemas/bases.py +3 -11
  8. prefect/_internal/schemas/validators.py +36 -60
  9. prefect/_result_records.py +235 -0
  10. prefect/_version.py +3 -3
  11. prefect/agent.py +1 -0
  12. prefect/automations.py +4 -8
  13. prefect/blocks/notifications.py +8 -8
  14. prefect/cache_policies.py +2 -0
  15. prefect/client/base.py +7 -8
  16. prefect/client/collections.py +3 -6
  17. prefect/client/orchestration/__init__.py +15 -263
  18. prefect/client/orchestration/_deployments/client.py +14 -6
  19. prefect/client/orchestration/_flow_runs/client.py +10 -6
  20. prefect/client/orchestration/_work_pools/__init__.py +0 -0
  21. prefect/client/orchestration/_work_pools/client.py +598 -0
  22. prefect/client/orchestration/base.py +9 -2
  23. prefect/client/schemas/actions.py +66 -2
  24. prefect/client/schemas/objects.py +22 -50
  25. prefect/client/schemas/schedules.py +7 -18
  26. prefect/client/types/flexible_schedule_list.py +2 -1
  27. prefect/context.py +2 -3
  28. prefect/deployments/flow_runs.py +1 -1
  29. prefect/deployments/runner.py +119 -43
  30. prefect/deployments/schedules.py +7 -1
  31. prefect/engine.py +4 -9
  32. prefect/events/schemas/automations.py +4 -2
  33. prefect/events/utilities.py +15 -13
  34. prefect/exceptions.py +1 -1
  35. prefect/flow_engine.py +8 -8
  36. prefect/flow_runs.py +4 -8
  37. prefect/flows.py +30 -22
  38. prefect/infrastructure/__init__.py +1 -0
  39. prefect/infrastructure/base.py +1 -0
  40. prefect/infrastructure/provisioners/__init__.py +3 -6
  41. prefect/infrastructure/provisioners/coiled.py +3 -3
  42. prefect/infrastructure/provisioners/container_instance.py +1 -0
  43. prefect/infrastructure/provisioners/ecs.py +6 -6
  44. prefect/infrastructure/provisioners/modal.py +3 -3
  45. prefect/input/run_input.py +5 -7
  46. prefect/locking/filesystem.py +4 -3
  47. prefect/main.py +1 -1
  48. prefect/results.py +42 -249
  49. prefect/runner/runner.py +9 -4
  50. prefect/runner/server.py +5 -5
  51. prefect/runner/storage.py +12 -10
  52. prefect/runner/submit.py +2 -4
  53. prefect/schedules.py +231 -0
  54. prefect/serializers.py +5 -5
  55. prefect/settings/__init__.py +2 -1
  56. prefect/settings/base.py +3 -3
  57. prefect/settings/models/root.py +4 -0
  58. prefect/settings/models/server/services.py +50 -9
  59. prefect/settings/sources.py +4 -4
  60. prefect/states.py +42 -11
  61. prefect/task_engine.py +10 -10
  62. prefect/task_runners.py +11 -22
  63. prefect/task_worker.py +9 -9
  64. prefect/tasks.py +22 -41
  65. prefect/telemetry/bootstrap.py +4 -6
  66. prefect/telemetry/services.py +2 -4
  67. prefect/types/__init__.py +2 -1
  68. prefect/types/_datetime.py +28 -1
  69. prefect/utilities/_engine.py +0 -1
  70. prefect/utilities/asyncutils.py +4 -8
  71. prefect/utilities/collections.py +13 -22
  72. prefect/utilities/dispatch.py +2 -4
  73. prefect/utilities/dockerutils.py +6 -6
  74. prefect/utilities/importtools.py +1 -68
  75. prefect/utilities/names.py +1 -1
  76. prefect/utilities/processutils.py +3 -6
  77. prefect/utilities/pydantic.py +4 -6
  78. prefect/utilities/schema_tools/hydration.py +6 -5
  79. prefect/utilities/templating.py +16 -10
  80. prefect/utilities/visualization.py +2 -4
  81. prefect/workers/base.py +3 -3
  82. prefect/workers/block.py +1 -0
  83. prefect/workers/cloud.py +1 -0
  84. prefect/workers/process.py +1 -0
  85. {prefect_client-3.1.15.dist-info → prefect_client-3.2.0.dist-info}/METADATA +1 -1
  86. {prefect_client-3.1.15.dist-info → prefect_client-3.2.0.dist-info}/RECORD +89 -85
  87. {prefect_client-3.1.15.dist-info → prefect_client-3.2.0.dist-info}/LICENSE +0 -0
  88. {prefect_client-3.1.15.dist-info → prefect_client-3.2.0.dist-info}/WHEEL +0 -0
  89. {prefect_client-3.1.15.dist-info → prefect_client-3.2.0.dist-info}/top_level.txt +0 -0
@@ -1,6 +1,7 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  import abc
4
+ from datetime import timedelta
4
5
  from typing import Literal, Optional, Union
5
6
  from uuid import UUID
6
7
 
@@ -49,6 +50,33 @@ class TimeToCompletionSla(ServiceLevelAgreement):
49
50
  )
50
51
 
51
52
 
53
+ class FrequencySla(ServiceLevelAgreement):
54
+ """An SLA that triggers when a completed flow run is not detected in the specified time.
55
+
56
+ For example, if stale_after is 1 hour, if a flow run does not complete
57
+ within an hour of the previous flow run, the SLA will trigger.
58
+ """
59
+
60
+ stale_after: timedelta = Field(
61
+ default=...,
62
+ description="The amount of time after which a flow run is considered in violation.",
63
+ )
64
+
65
+
66
+ class LatenessSla(ServiceLevelAgreement):
67
+ """An SLA that triggers when a flow run does not start within the specified window.
68
+
69
+ For example, if you schedule the deployment to run every day at 2:00pm and you pass
70
+ within=timedelta(minutes=10) to this SLA, if a run hasn't started by 2:10pm the SLA
71
+ violation will be recorded.
72
+ """
73
+
74
+ within: timedelta = Field(
75
+ default=...,
76
+ description="The amount of time before a flow run is considered in violation.",
77
+ )
78
+
79
+
52
80
  class SlaMergeResponse(PrefectBaseModel):
53
81
  """A response object for the apply_slas_for_deployment method. Contains the names of the created, updated, and deleted SLAs."""
54
82
 
@@ -58,4 +86,4 @@ class SlaMergeResponse(PrefectBaseModel):
58
86
 
59
87
 
60
88
  # Concrete SLA types
61
- SlaTypes: TypeAlias = Union[TimeToCompletionSla]
89
+ SlaTypes: TypeAlias = Union[TimeToCompletionSla, LatenessSla, FrequencySla]
@@ -15,10 +15,10 @@ import sys
15
15
  import warnings
16
16
  from typing import TYPE_CHECKING, Any, Callable, Optional, Union
17
17
 
18
- import pendulum
19
18
  from pydantic import BaseModel
20
19
  from typing_extensions import ParamSpec, TypeAlias, TypeVar
21
20
 
21
+ from prefect.types._datetime import DateTime, from_format
22
22
  from prefect.utilities.callables import get_call_parameters
23
23
  from prefect.utilities.importtools import (
24
24
  AliasedModuleDefinition,
@@ -60,18 +60,18 @@ def generate_deprecation_message(
60
60
  if not start_date and not end_date:
61
61
  raise ValueError(
62
62
  "A start date is required if an end date is not provided. Suggested start"
63
- f" date is {pendulum.now('UTC').format(DEPRECATED_DATEFMT)!r}"
63
+ f" date is {DateTime.now('UTC').format(DEPRECATED_DATEFMT)!r}"
64
64
  )
65
65
 
66
66
  if not end_date:
67
67
  if TYPE_CHECKING:
68
68
  assert start_date is not None
69
- parsed_start_date = pendulum.from_format(start_date, DEPRECATED_DATEFMT)
69
+ parsed_start_date = from_format(start_date, DEPRECATED_DATEFMT)
70
70
  parsed_end_date = parsed_start_date.add(months=6)
71
71
  end_date = parsed_end_date.format(DEPRECATED_DATEFMT)
72
72
  else:
73
73
  # Validate format
74
- pendulum.from_format(end_date, DEPRECATED_DATEFMT)
74
+ from_format(end_date, DEPRECATED_DATEFMT)
75
75
 
76
76
  if when:
77
77
  when = " when " + when
@@ -34,7 +34,7 @@ Removed objects:
34
34
  ```python
35
35
  # at top
36
36
  from prefect._internal.compatibility.migration import getattr_migration
37
-
37
+
38
38
  # at bottom
39
39
  __getattr__ = getattr_migration(__name__)
40
40
  ```
@@ -155,8 +155,7 @@ class Future(concurrent.futures.Future[T]):
155
155
 
156
156
  if TYPE_CHECKING:
157
157
 
158
- def __get_result(self) -> T:
159
- ...
158
+ def __get_result(self) -> T: ...
160
159
 
161
160
  def result(self, timeout: Optional[float] = None) -> T:
162
161
  """Return the result of the call that the future represents.
@@ -473,13 +473,11 @@ class WatcherThreadCancelScope(CancelScope):
473
473
 
474
474
 
475
475
  @overload
476
- def get_deadline(timeout: float) -> float:
477
- ...
476
+ def get_deadline(timeout: float) -> float: ...
478
477
 
479
478
 
480
479
  @overload
481
- def get_deadline(timeout: None) -> None:
482
- ...
480
+ def get_deadline(timeout: None) -> None: ...
483
481
 
484
482
 
485
483
  def get_deadline(timeout: Optional[float]) -> Optional[float]:
@@ -135,9 +135,9 @@ class EventLoopThread(Portal):
135
135
  self.thread = threading.Thread(
136
136
  name=name, daemon=daemon, target=self._entrypoint
137
137
  )
138
- self._ready_future: concurrent.futures.Future[
139
- bool
140
- ] = concurrent.futures.Future()
138
+ self._ready_future: concurrent.futures.Future[bool] = (
139
+ concurrent.futures.Future()
140
+ )
141
141
  self._loop: Optional[asyncio.AbstractEventLoop] = None
142
142
  self._shutdown_event: Event = Event()
143
143
  self._run_once: bool = run_once
@@ -3,7 +3,6 @@ Utilities for creating and working with Prefect REST API schemas.
3
3
  """
4
4
 
5
5
  import datetime
6
- import os
7
6
  from typing import Any, ClassVar, Optional, TypeVar, cast
8
7
  from uuid import UUID, uuid4
9
8
 
@@ -25,9 +24,7 @@ class PrefectBaseModel(BaseModel):
25
24
  fields that are passed to it at instantiation. Because adding new fields to
26
25
  API payloads is not considered a breaking change, this ensures that any
27
26
  Prefect client loading data from a server running a possibly-newer version
28
- of Prefect will be able to process those new fields gracefully. However,
29
- when PREFECT_TEST_MODE is on, extra fields are forbidden in order to catch
30
- subtle unintentional testing errors.
27
+ of Prefect will be able to process those new fields gracefully.
31
28
  """
32
29
 
33
30
  _reset_fields: ClassVar[set[str]] = set()
@@ -35,16 +32,11 @@ class PrefectBaseModel(BaseModel):
35
32
  model_config: ClassVar[ConfigDict] = ConfigDict(
36
33
  ser_json_timedelta="float",
37
34
  defer_build=True,
38
- extra=(
39
- "ignore"
40
- if os.getenv("PREFECT_TEST_MODE", "0").lower() not in ["true", "1"]
41
- and os.getenv("PREFECT_TESTING_TEST_MODE", "0").lower() not in ["true", "1"]
42
- else "forbid"
43
- ),
35
+ extra="ignore",
44
36
  )
45
37
 
46
38
  def __eq__(self, other: Any) -> bool:
47
- """Equaltiy operator that ignores the resettable fields of the PrefectBaseModel.
39
+ """Equality operator that ignores the resettable fields of the PrefectBaseModel.
48
40
 
49
41
  NOTE: this equality operator will only be applied if the PrefectBaseModel is
50
42
  the left-hand operand. This is a limitation of Python.
@@ -40,13 +40,15 @@ LOWERCASE_LETTERS_NUMBERS_AND_UNDERSCORES_REGEX = "^[a-z0-9_]*$"
40
40
 
41
41
 
42
42
  @overload
43
- def raise_on_name_alphanumeric_dashes_only(value: str, field_name: str = ...) -> str:
44
- ...
43
+ def raise_on_name_alphanumeric_dashes_only(
44
+ value: str, field_name: str = ...
45
+ ) -> str: ...
45
46
 
46
47
 
47
48
  @overload
48
- def raise_on_name_alphanumeric_dashes_only(value: None, field_name: str = ...) -> None:
49
- ...
49
+ def raise_on_name_alphanumeric_dashes_only(
50
+ value: None, field_name: str = ...
51
+ ) -> None: ...
50
52
 
51
53
 
52
54
  def raise_on_name_alphanumeric_dashes_only(
@@ -64,15 +66,13 @@ def raise_on_name_alphanumeric_dashes_only(
64
66
  @overload
65
67
  def raise_on_name_alphanumeric_underscores_only(
66
68
  value: str, field_name: str = ...
67
- ) -> str:
68
- ...
69
+ ) -> str: ...
69
70
 
70
71
 
71
72
  @overload
72
73
  def raise_on_name_alphanumeric_underscores_only(
73
74
  value: None, field_name: str = ...
74
- ) -> None:
75
- ...
75
+ ) -> None: ...
76
76
 
77
77
 
78
78
  def raise_on_name_alphanumeric_underscores_only(
@@ -149,13 +149,13 @@ def validate_parameters_conform_to_schema(
149
149
 
150
150
 
151
151
  @overload
152
- def validate_parameter_openapi_schema(schema: M, values: Mapping[str, Any]) -> M:
153
- ...
152
+ def validate_parameter_openapi_schema(schema: M, values: Mapping[str, Any]) -> M: ...
154
153
 
155
154
 
156
155
  @overload
157
- def validate_parameter_openapi_schema(schema: None, values: Mapping[str, Any]) -> None:
158
- ...
156
+ def validate_parameter_openapi_schema(
157
+ schema: None, values: Mapping[str, Any]
158
+ ) -> None: ...
159
159
 
160
160
 
161
161
  def validate_parameter_openapi_schema(
@@ -198,13 +198,11 @@ def reconcile_schedules_runner(values: MM) -> MM:
198
198
 
199
199
 
200
200
  @overload
201
- def validate_schedule_max_scheduled_runs(v: int, limit: int) -> int:
202
- ...
201
+ def validate_schedule_max_scheduled_runs(v: int, limit: int) -> int: ...
203
202
 
204
203
 
205
204
  @overload
206
- def validate_schedule_max_scheduled_runs(v: None, limit: int) -> None:
207
- ...
205
+ def validate_schedule_max_scheduled_runs(v: None, limit: int) -> None: ...
208
206
 
209
207
 
210
208
  def validate_schedule_max_scheduled_runs(v: Optional[int], limit: int) -> Optional[int]:
@@ -260,15 +258,13 @@ def default_anchor_date(v: pendulum.DateTime) -> pendulum.DateTime:
260
258
 
261
259
 
262
260
  @overload
263
- def default_timezone(v: str, values: Optional[Mapping[str, Any]] = ...) -> str:
264
- ...
261
+ def default_timezone(v: str, values: Optional[Mapping[str, Any]] = ...) -> str: ...
265
262
 
266
263
 
267
264
  @overload
268
265
  def default_timezone(
269
266
  v: None, values: Optional[Mapping[str, Any]] = ...
270
- ) -> Optional[str]:
271
- ...
267
+ ) -> Optional[str]: ...
272
268
 
273
269
 
274
270
  def default_timezone(
@@ -411,13 +407,11 @@ def validate_load_kwargs(value: M) -> M:
411
407
 
412
408
 
413
409
  @overload
414
- def cast_type_names_to_serializers(value: str) -> "Serializer[Any]":
415
- ...
410
+ def cast_type_names_to_serializers(value: str) -> "Serializer[Any]": ...
416
411
 
417
412
 
418
413
  @overload
419
- def cast_type_names_to_serializers(value: "Serializer[T]") -> "Serializer[T]":
420
- ...
414
+ def cast_type_names_to_serializers(value: "Serializer[T]") -> "Serializer[T]": ...
421
415
 
422
416
 
423
417
  def cast_type_names_to_serializers(
@@ -457,13 +451,11 @@ def validate_compressionlib(value: str) -> str:
457
451
 
458
452
  # TODO: if we use this elsewhere we can change the error message to be more generic
459
453
  @overload
460
- def list_length_50_or_less(v: list[float]) -> list[float]:
461
- ...
454
+ def list_length_50_or_less(v: list[float]) -> list[float]: ...
462
455
 
463
456
 
464
457
  @overload
465
- def list_length_50_or_less(v: None) -> None:
466
- ...
458
+ def list_length_50_or_less(v: None) -> None: ...
467
459
 
468
460
 
469
461
  def list_length_50_or_less(v: Optional[list[float]]) -> Optional[list[float]]:
@@ -474,13 +466,11 @@ def list_length_50_or_less(v: Optional[list[float]]) -> Optional[list[float]]:
474
466
 
475
467
  # TODO: if we use this elsewhere we can change the error message to be more generic
476
468
  @overload
477
- def validate_not_negative(v: float) -> float:
478
- ...
469
+ def validate_not_negative(v: float) -> float: ...
479
470
 
480
471
 
481
472
  @overload
482
- def validate_not_negative(v: None) -> None:
483
- ...
473
+ def validate_not_negative(v: None) -> None: ...
484
474
 
485
475
 
486
476
  def validate_not_negative(v: Optional[float]) -> Optional[float]:
@@ -490,13 +480,11 @@ def validate_not_negative(v: Optional[float]) -> Optional[float]:
490
480
 
491
481
 
492
482
  @overload
493
- def validate_message_template_variables(v: str) -> str:
494
- ...
483
+ def validate_message_template_variables(v: str) -> str: ...
495
484
 
496
485
 
497
486
  @overload
498
- def validate_message_template_variables(v: None) -> None:
499
- ...
487
+ def validate_message_template_variables(v: None) -> None: ...
500
488
 
501
489
 
502
490
  def validate_message_template_variables(v: Optional[str]) -> Optional[str]:
@@ -521,13 +509,11 @@ def validate_default_queue_id_not_none(v: Optional[UUID]) -> UUID:
521
509
 
522
510
 
523
511
  @overload
524
- def validate_max_metadata_length(v: MM) -> MM:
525
- ...
512
+ def validate_max_metadata_length(v: MM) -> MM: ...
526
513
 
527
514
 
528
515
  @overload
529
- def validate_max_metadata_length(v: None) -> None:
530
- ...
516
+ def validate_max_metadata_length(v: None) -> None: ...
531
517
 
532
518
 
533
519
  def validate_max_metadata_length(v: Optional[MM]) -> Optional[MM]:
@@ -544,13 +530,11 @@ def validate_max_metadata_length(v: Optional[MM]) -> Optional[MM]:
544
530
 
545
531
 
546
532
  @overload
547
- def validate_cache_key_length(cache_key: str) -> str:
548
- ...
533
+ def validate_cache_key_length(cache_key: str) -> str: ...
549
534
 
550
535
 
551
536
  @overload
552
- def validate_cache_key_length(cache_key: None) -> None:
553
- ...
537
+ def validate_cache_key_length(cache_key: None) -> None: ...
554
538
 
555
539
 
556
540
  def validate_cache_key_length(cache_key: Optional[str]) -> Optional[str]:
@@ -587,13 +571,11 @@ def set_run_policy_deprecated_fields(values: MM) -> MM:
587
571
 
588
572
 
589
573
  @overload
590
- def return_v_or_none(v: str) -> str:
591
- ...
574
+ def return_v_or_none(v: str) -> str: ...
592
575
 
593
576
 
594
577
  @overload
595
- def return_v_or_none(v: None) -> None:
596
- ...
578
+ def return_v_or_none(v: None) -> None: ...
597
579
 
598
580
 
599
581
  def return_v_or_none(v: Optional[str]) -> Optional[str]:
@@ -629,13 +611,11 @@ def validate_name_present_on_nonanonymous_blocks(values: M) -> M:
629
611
 
630
612
 
631
613
  @overload
632
- def validate_working_dir(v: str) -> Path:
633
- ...
614
+ def validate_working_dir(v: str) -> Path: ...
634
615
 
635
616
 
636
617
  @overload
637
- def validate_working_dir(v: None) -> None:
638
- ...
618
+ def validate_working_dir(v: None) -> None: ...
639
619
 
640
620
 
641
621
  def validate_working_dir(v: Optional[Path | str]) -> Optional[Path]:
@@ -652,13 +632,11 @@ def validate_working_dir(v: Optional[Path | str]) -> Optional[Path]:
652
632
 
653
633
 
654
634
  @overload
655
- def validate_block_document_name(value: str) -> str:
656
- ...
635
+ def validate_block_document_name(value: str) -> str: ...
657
636
 
658
637
 
659
638
  @overload
660
- def validate_block_document_name(value: None) -> None:
661
- ...
639
+ def validate_block_document_name(value: None) -> None: ...
662
640
 
663
641
 
664
642
  def validate_block_document_name(value: Optional[str]) -> Optional[str]:
@@ -673,13 +651,11 @@ def validate_artifact_key(value: str) -> str:
673
651
 
674
652
 
675
653
  @overload
676
- def validate_variable_name(value: str) -> str:
677
- ...
654
+ def validate_variable_name(value: str) -> str: ...
678
655
 
679
656
 
680
657
  @overload
681
- def validate_variable_name(value: None) -> None:
682
- ...
658
+ def validate_variable_name(value: None) -> None: ...
683
659
 
684
660
 
685
661
  def validate_variable_name(value: Optional[str]) -> Optional[str]:
@@ -0,0 +1,235 @@
1
+ from __future__ import annotations
2
+
3
+ import inspect
4
+ import uuid
5
+ from typing import (
6
+ TYPE_CHECKING,
7
+ Any,
8
+ Generic,
9
+ Optional,
10
+ TypeVar,
11
+ Union,
12
+ )
13
+ from uuid import UUID
14
+
15
+ from pydantic import (
16
+ BaseModel,
17
+ Field,
18
+ ValidationError,
19
+ model_validator,
20
+ )
21
+
22
+ import prefect
23
+ from prefect.exceptions import (
24
+ SerializationError,
25
+ )
26
+ from prefect.serializers import PickleSerializer, Serializer
27
+ from prefect.types import DateTime
28
+
29
+ if TYPE_CHECKING:
30
+ pass
31
+
32
+
33
+ ResultSerializer = Union[Serializer, str]
34
+ LITERAL_TYPES: set[type] = {type(None), bool, UUID}
35
+ R = TypeVar("R")
36
+
37
+
38
+ class ResultRecordMetadata(BaseModel):
39
+ """
40
+ Metadata for a result record.
41
+ """
42
+
43
+ storage_key: Optional[str] = Field(
44
+ default=None
45
+ ) # optional for backwards compatibility
46
+ expiration: Optional[DateTime] = Field(default=None)
47
+ serializer: Serializer = Field(default_factory=PickleSerializer)
48
+ prefect_version: str = Field(default=prefect.__version__)
49
+ storage_block_id: Optional[uuid.UUID] = Field(default=None)
50
+
51
+ def dump_bytes(self) -> bytes:
52
+ """
53
+ Serialize the metadata to bytes.
54
+
55
+ Returns:
56
+ bytes: the serialized metadata
57
+ """
58
+ return self.model_dump_json(serialize_as_any=True).encode()
59
+
60
+ @classmethod
61
+ def load_bytes(cls, data: bytes) -> "ResultRecordMetadata":
62
+ """
63
+ Deserialize metadata from bytes.
64
+
65
+ Args:
66
+ data: the serialized metadata
67
+
68
+ Returns:
69
+ ResultRecordMetadata: the deserialized metadata
70
+ """
71
+ return cls.model_validate_json(data)
72
+
73
+ def __eq__(self, other: Any) -> bool:
74
+ if not isinstance(other, ResultRecordMetadata):
75
+ return False
76
+ return (
77
+ self.storage_key == other.storage_key
78
+ and self.expiration == other.expiration
79
+ and self.serializer == other.serializer
80
+ and self.prefect_version == other.prefect_version
81
+ and self.storage_block_id == other.storage_block_id
82
+ )
83
+
84
+
85
+ class ResultRecord(BaseModel, Generic[R]):
86
+ """
87
+ A record of a result.
88
+ """
89
+
90
+ metadata: ResultRecordMetadata
91
+ result: R
92
+
93
+ @property
94
+ def expiration(self) -> DateTime | None:
95
+ return self.metadata.expiration
96
+
97
+ @property
98
+ def serializer(self) -> Serializer:
99
+ return self.metadata.serializer
100
+
101
+ def serialize_result(self) -> bytes:
102
+ try:
103
+ data = self.serializer.dumps(self.result)
104
+ except Exception as exc:
105
+ extra_info = (
106
+ 'You can try a different serializer (e.g. result_serializer="json") '
107
+ "or disabling persistence (persist_result=False) for this flow or task."
108
+ )
109
+ # check if this is a known issue with cloudpickle and pydantic
110
+ # and add extra information to help the user recover
111
+
112
+ if (
113
+ isinstance(exc, TypeError)
114
+ and isinstance(self.result, BaseModel)
115
+ and str(exc).startswith("cannot pickle")
116
+ ):
117
+ try:
118
+ from IPython.core.getipython import get_ipython
119
+
120
+ if get_ipython() is not None:
121
+ extra_info = inspect.cleandoc(
122
+ """
123
+ This is a known issue in Pydantic that prevents
124
+ locally-defined (non-imported) models from being
125
+ serialized by cloudpickle in IPython/Jupyter
126
+ environments. Please see
127
+ https://github.com/pydantic/pydantic/issues/8232 for
128
+ more information. To fix the issue, either: (1) move
129
+ your Pydantic class definition to an importable
130
+ location, (2) use the JSON serializer for your flow
131
+ or task (`result_serializer="json"`), or (3)
132
+ disable result persistence for your flow or task
133
+ (`persist_result=False`).
134
+ """
135
+ ).replace("\n", " ")
136
+ except ImportError:
137
+ pass
138
+ raise SerializationError(
139
+ f"Failed to serialize object of type {type(self.result).__name__!r} with "
140
+ f"serializer {self.serializer.type!r}. {extra_info}"
141
+ ) from exc
142
+
143
+ return data
144
+
145
+ @model_validator(mode="before")
146
+ @classmethod
147
+ def coerce_old_format(cls, value: dict[str, Any] | Any) -> dict[str, Any]:
148
+ if isinstance(value, dict):
149
+ if "data" in value:
150
+ value["result"] = value.pop("data")
151
+ if "metadata" not in value:
152
+ value["metadata"] = {}
153
+ if "expiration" in value:
154
+ value["metadata"]["expiration"] = value.pop("expiration")
155
+ if "serializer" in value:
156
+ value["metadata"]["serializer"] = value.pop("serializer")
157
+ if "prefect_version" in value:
158
+ value["metadata"]["prefect_version"] = value.pop("prefect_version")
159
+ return value
160
+
161
+ def serialize_metadata(self) -> bytes:
162
+ return self.metadata.dump_bytes()
163
+
164
+ def serialize(
165
+ self,
166
+ ) -> bytes:
167
+ """
168
+ Serialize the record to bytes.
169
+
170
+ Returns:
171
+ bytes: the serialized record
172
+
173
+ """
174
+ return (
175
+ self.model_copy(update={"result": self.serialize_result()})
176
+ .model_dump_json(serialize_as_any=True)
177
+ .encode()
178
+ )
179
+
180
+ @classmethod
181
+ def deserialize(
182
+ cls, data: bytes, backup_serializer: Serializer | None = None
183
+ ) -> "ResultRecord[R]":
184
+ """
185
+ Deserialize a record from bytes.
186
+
187
+ Args:
188
+ data: the serialized record
189
+ backup_serializer: The serializer to use to deserialize the result record. Only
190
+ necessary if the provided data does not specify a serializer.
191
+
192
+ Returns:
193
+ ResultRecord: the deserialized record
194
+ """
195
+ try:
196
+ instance = cls.model_validate_json(data)
197
+ except ValidationError:
198
+ if backup_serializer is None:
199
+ raise
200
+ else:
201
+ result = backup_serializer.loads(data)
202
+ return cls(
203
+ metadata=ResultRecordMetadata(serializer=backup_serializer),
204
+ result=result,
205
+ )
206
+ if isinstance(instance.result, bytes):
207
+ instance.result = instance.serializer.loads(instance.result)
208
+ elif isinstance(instance.result, str):
209
+ instance.result = instance.serializer.loads(instance.result.encode())
210
+ return instance
211
+
212
+ @classmethod
213
+ def deserialize_from_result_and_metadata(
214
+ cls, result: bytes, metadata: bytes
215
+ ) -> "ResultRecord[R]":
216
+ """
217
+ Deserialize a record from separate result and metadata bytes.
218
+
219
+ Args:
220
+ result: the result
221
+ metadata: the serialized metadata
222
+
223
+ Returns:
224
+ ResultRecord: the deserialized record
225
+ """
226
+ result_record_metadata = ResultRecordMetadata.load_bytes(metadata)
227
+ return cls(
228
+ metadata=result_record_metadata,
229
+ result=result_record_metadata.serializer.loads(result),
230
+ )
231
+
232
+ def __eq__(self, other: Any | "ResultRecord[Any]") -> bool:
233
+ if not isinstance(other, ResultRecord):
234
+ return False
235
+ return self.metadata == other.metadata and self.result == other.result
prefect/_version.py CHANGED
@@ -8,11 +8,11 @@ import json
8
8
 
9
9
  version_json = '''
10
10
  {
11
- "date": "2025-01-30T11:31:29-0800",
11
+ "date": "2025-02-07T18:02:21-0800",
12
12
  "dirty": true,
13
13
  "error": null,
14
- "full-revisionid": "3ac3d54885a6157989efd79cbfc0d681b4bb7e0c",
15
- "version": "3.1.15"
14
+ "full-revisionid": "c8986edebb2dde3e2a931adbe24d2eaefcb799cb",
15
+ "version": "3.2.0"
16
16
  }
17
17
  ''' # END VERSION_JSON
18
18
 
prefect/agent.py CHANGED
@@ -1,6 +1,7 @@
1
1
  """
2
2
  2024-06-27: This surfaces an actionable error message for moved or removed objects in Prefect 3.0 upgrade.
3
3
  """
4
+
4
5
  from typing import Any, Callable
5
6
 
6
7
  from prefect._internal.compatibility.migration import getattr_migration