prefect-client 3.1.10__py3-none-any.whl → 3.1.12__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (141) hide show
  1. prefect/_experimental/lineage.py +7 -8
  2. prefect/_experimental/sla/__init__.py +0 -0
  3. prefect/_experimental/sla/client.py +66 -0
  4. prefect/_experimental/sla/objects.py +53 -0
  5. prefect/_internal/_logging.py +15 -3
  6. prefect/_internal/compatibility/async_dispatch.py +22 -16
  7. prefect/_internal/compatibility/deprecated.py +42 -18
  8. prefect/_internal/compatibility/migration.py +2 -2
  9. prefect/_internal/concurrency/inspection.py +12 -14
  10. prefect/_internal/concurrency/primitives.py +2 -2
  11. prefect/_internal/concurrency/services.py +154 -80
  12. prefect/_internal/concurrency/waiters.py +13 -9
  13. prefect/_internal/pydantic/annotations/pendulum.py +7 -7
  14. prefect/_internal/pytz.py +4 -3
  15. prefect/_internal/retries.py +10 -5
  16. prefect/_internal/schemas/bases.py +19 -10
  17. prefect/_internal/schemas/validators.py +227 -388
  18. prefect/_version.py +3 -3
  19. prefect/automations.py +236 -30
  20. prefect/blocks/__init__.py +3 -3
  21. prefect/blocks/abstract.py +53 -30
  22. prefect/blocks/core.py +183 -84
  23. prefect/blocks/notifications.py +133 -73
  24. prefect/blocks/redis.py +13 -9
  25. prefect/blocks/system.py +24 -11
  26. prefect/blocks/webhook.py +7 -5
  27. prefect/cache_policies.py +3 -2
  28. prefect/client/orchestration/__init__.py +1957 -0
  29. prefect/client/orchestration/_artifacts/__init__.py +0 -0
  30. prefect/client/orchestration/_artifacts/client.py +239 -0
  31. prefect/client/orchestration/_automations/__init__.py +0 -0
  32. prefect/client/orchestration/_automations/client.py +329 -0
  33. prefect/client/orchestration/_blocks_documents/__init__.py +0 -0
  34. prefect/client/orchestration/_blocks_documents/client.py +334 -0
  35. prefect/client/orchestration/_blocks_schemas/__init__.py +0 -0
  36. prefect/client/orchestration/_blocks_schemas/client.py +200 -0
  37. prefect/client/orchestration/_blocks_types/__init__.py +0 -0
  38. prefect/client/orchestration/_blocks_types/client.py +380 -0
  39. prefect/client/orchestration/_concurrency_limits/__init__.py +0 -0
  40. prefect/client/orchestration/_concurrency_limits/client.py +762 -0
  41. prefect/client/orchestration/_deployments/__init__.py +0 -0
  42. prefect/client/orchestration/_deployments/client.py +1128 -0
  43. prefect/client/orchestration/_flow_runs/__init__.py +0 -0
  44. prefect/client/orchestration/_flow_runs/client.py +903 -0
  45. prefect/client/orchestration/_flows/__init__.py +0 -0
  46. prefect/client/orchestration/_flows/client.py +343 -0
  47. prefect/client/orchestration/_logs/__init__.py +0 -0
  48. prefect/client/orchestration/_logs/client.py +97 -0
  49. prefect/client/orchestration/_variables/__init__.py +0 -0
  50. prefect/client/orchestration/_variables/client.py +157 -0
  51. prefect/client/orchestration/base.py +46 -0
  52. prefect/client/orchestration/routes.py +145 -0
  53. prefect/client/schemas/__init__.py +68 -28
  54. prefect/client/schemas/actions.py +2 -2
  55. prefect/client/schemas/filters.py +5 -0
  56. prefect/client/schemas/objects.py +8 -15
  57. prefect/client/schemas/schedules.py +22 -10
  58. prefect/concurrency/_asyncio.py +87 -0
  59. prefect/concurrency/{events.py → _events.py} +10 -10
  60. prefect/concurrency/asyncio.py +20 -104
  61. prefect/concurrency/context.py +6 -4
  62. prefect/concurrency/services.py +26 -74
  63. prefect/concurrency/sync.py +23 -44
  64. prefect/concurrency/v1/_asyncio.py +63 -0
  65. prefect/concurrency/v1/{events.py → _events.py} +13 -15
  66. prefect/concurrency/v1/asyncio.py +27 -80
  67. prefect/concurrency/v1/context.py +6 -4
  68. prefect/concurrency/v1/services.py +33 -79
  69. prefect/concurrency/v1/sync.py +18 -37
  70. prefect/context.py +66 -45
  71. prefect/deployments/base.py +10 -144
  72. prefect/deployments/flow_runs.py +12 -2
  73. prefect/deployments/runner.py +53 -4
  74. prefect/deployments/steps/pull.py +13 -0
  75. prefect/engine.py +17 -4
  76. prefect/events/clients.py +7 -1
  77. prefect/events/schemas/events.py +3 -2
  78. prefect/filesystems.py +6 -2
  79. prefect/flow_engine.py +101 -85
  80. prefect/flows.py +10 -1
  81. prefect/input/run_input.py +2 -1
  82. prefect/logging/logging.yml +1 -1
  83. prefect/main.py +1 -3
  84. prefect/results.py +2 -307
  85. prefect/runner/runner.py +4 -2
  86. prefect/runner/storage.py +87 -21
  87. prefect/serializers.py +32 -25
  88. prefect/settings/legacy.py +4 -4
  89. prefect/settings/models/api.py +3 -3
  90. prefect/settings/models/cli.py +3 -3
  91. prefect/settings/models/client.py +5 -3
  92. prefect/settings/models/cloud.py +8 -3
  93. prefect/settings/models/deployments.py +3 -3
  94. prefect/settings/models/experiments.py +4 -7
  95. prefect/settings/models/flows.py +3 -3
  96. prefect/settings/models/internal.py +4 -2
  97. prefect/settings/models/logging.py +4 -3
  98. prefect/settings/models/results.py +3 -3
  99. prefect/settings/models/root.py +3 -2
  100. prefect/settings/models/runner.py +4 -4
  101. prefect/settings/models/server/api.py +3 -3
  102. prefect/settings/models/server/database.py +11 -4
  103. prefect/settings/models/server/deployments.py +6 -2
  104. prefect/settings/models/server/ephemeral.py +4 -2
  105. prefect/settings/models/server/events.py +3 -2
  106. prefect/settings/models/server/flow_run_graph.py +6 -2
  107. prefect/settings/models/server/root.py +3 -3
  108. prefect/settings/models/server/services.py +26 -11
  109. prefect/settings/models/server/tasks.py +6 -3
  110. prefect/settings/models/server/ui.py +3 -3
  111. prefect/settings/models/tasks.py +5 -5
  112. prefect/settings/models/testing.py +3 -3
  113. prefect/settings/models/worker.py +5 -3
  114. prefect/settings/profiles.py +15 -2
  115. prefect/states.py +61 -45
  116. prefect/task_engine.py +54 -75
  117. prefect/task_runners.py +56 -55
  118. prefect/task_worker.py +2 -2
  119. prefect/tasks.py +90 -36
  120. prefect/telemetry/bootstrap.py +10 -9
  121. prefect/telemetry/run_telemetry.py +13 -8
  122. prefect/telemetry/services.py +4 -0
  123. prefect/transactions.py +4 -15
  124. prefect/utilities/_git.py +34 -0
  125. prefect/utilities/asyncutils.py +1 -1
  126. prefect/utilities/engine.py +3 -19
  127. prefect/utilities/generics.py +18 -0
  128. prefect/utilities/templating.py +25 -1
  129. prefect/workers/base.py +6 -3
  130. prefect/workers/process.py +1 -1
  131. {prefect_client-3.1.10.dist-info → prefect_client-3.1.12.dist-info}/METADATA +2 -2
  132. {prefect_client-3.1.10.dist-info → prefect_client-3.1.12.dist-info}/RECORD +135 -109
  133. prefect/client/orchestration.py +0 -4523
  134. prefect/records/__init__.py +0 -1
  135. prefect/records/base.py +0 -235
  136. prefect/records/filesystem.py +0 -213
  137. prefect/records/memory.py +0 -184
  138. prefect/records/result_store.py +0 -70
  139. {prefect_client-3.1.10.dist-info → prefect_client-3.1.12.dist-info}/LICENSE +0 -0
  140. {prefect_client-3.1.10.dist-info → prefect_client-3.1.12.dist-info}/WHEEL +0 -0
  141. {prefect_client-3.1.10.dist-info → prefect_client-3.1.12.dist-info}/top_level.txt +0 -0
prefect/tasks.py CHANGED
@@ -30,7 +30,7 @@ from uuid import UUID, uuid4
30
30
  from typing_extensions import Literal, ParamSpec, Self, TypeAlias, TypeIs
31
31
 
32
32
  import prefect.states
33
- from prefect.cache_policies import DEFAULT, NONE, CachePolicy
33
+ from prefect.cache_policies import DEFAULT, NO_CACHE, CachePolicy
34
34
  from prefect.client.orchestration import get_client
35
35
  from prefect.client.schemas import TaskRun
36
36
  from prefect.client.schemas.objects import (
@@ -74,7 +74,7 @@ if TYPE_CHECKING:
74
74
  from prefect.context import TaskRunContext
75
75
  from prefect.transactions import Transaction
76
76
 
77
- T = TypeVar("T") # Generic type var for capturing the inner return type of async funcs
77
+ T = TypeVar("T")
78
78
  R = TypeVar("R") # The return type of the user's function
79
79
  P = ParamSpec("P") # The parameters of the task
80
80
 
@@ -82,6 +82,11 @@ NUM_CHARS_DYNAMIC_KEY = 8
82
82
 
83
83
  logger = get_logger("tasks")
84
84
 
85
+ FutureOrResult: TypeAlias = Union[PrefectFuture[T], T]
86
+ OneOrManyFutureOrResult: TypeAlias = Union[
87
+ FutureOrResult[T], Iterable[FutureOrResult[T]]
88
+ ]
89
+
85
90
 
86
91
  def task_input_hash(
87
92
  context: "TaskRunContext", arguments: dict[str, Any]
@@ -436,7 +441,9 @@ class Task(Generic[P, R]):
436
441
  if persist_result is None:
437
442
  if any(
438
443
  [
439
- cache_policy and cache_policy != NONE and cache_policy != NotSet,
444
+ cache_policy
445
+ and cache_policy != NO_CACHE
446
+ and cache_policy != NotSet,
440
447
  cache_key_fn is not None,
441
448
  result_storage_key is not None,
442
449
  result_storage is not None,
@@ -446,8 +453,8 @@ class Task(Generic[P, R]):
446
453
  persist_result = True
447
454
 
448
455
  if persist_result is False:
449
- self.cache_policy = None if cache_policy is None else NONE
450
- if cache_policy and cache_policy is not NotSet and cache_policy != NONE:
456
+ self.cache_policy = None if cache_policy is None else NO_CACHE
457
+ if cache_policy and cache_policy is not NotSet and cache_policy != NO_CACHE:
451
458
  logger.warning(
452
459
  "Ignoring `cache_policy` because `persist_result` is False"
453
460
  )
@@ -737,7 +744,7 @@ class Task(Generic[P, R]):
737
744
  parameters: Optional[dict[str, Any]] = None,
738
745
  flow_run_context: Optional[FlowRunContext] = None,
739
746
  parent_task_run_context: Optional[TaskRunContext] = None,
740
- wait_for: Optional[Iterable[PrefectFuture[R]]] = None,
747
+ wait_for: Optional[OneOrManyFutureOrResult[Any]] = None,
741
748
  extra_task_inputs: Optional[dict[str, set[TaskRunInput]]] = None,
742
749
  deferred: bool = False,
743
750
  ) -> TaskRun:
@@ -838,7 +845,7 @@ class Task(Generic[P, R]):
838
845
  parameters: Optional[dict[str, Any]] = None,
839
846
  flow_run_context: Optional[FlowRunContext] = None,
840
847
  parent_task_run_context: Optional[TaskRunContext] = None,
841
- wait_for: Optional[Iterable[PrefectFuture[R]]] = None,
848
+ wait_for: Optional[OneOrManyFutureOrResult[Any]] = None,
842
849
  extra_task_inputs: Optional[dict[str, set[TaskRunInput]]] = None,
843
850
  deferred: bool = False,
844
851
  ) -> TaskRun:
@@ -952,6 +959,8 @@ class Task(Generic[P, R]):
952
959
  def __call__(
953
960
  self: "Task[P, NoReturn]",
954
961
  *args: P.args,
962
+ return_state: Literal[False],
963
+ wait_for: Optional[OneOrManyFutureOrResult[Any]] = None,
955
964
  **kwargs: P.kwargs,
956
965
  ) -> None:
957
966
  # `NoReturn` matches if a type can't be inferred for the function which stops a
@@ -960,28 +969,41 @@ class Task(Generic[P, R]):
960
969
 
961
970
  @overload
962
971
  def __call__(
963
- self: "Task[P, T]",
972
+ self: "Task[P, R]",
964
973
  *args: P.args,
974
+ return_state: Literal[True],
975
+ wait_for: Optional[OneOrManyFutureOrResult[Any]] = None,
965
976
  **kwargs: P.kwargs,
966
- ) -> T:
977
+ ) -> State[R]:
967
978
  ...
968
979
 
969
980
  @overload
970
981
  def __call__(
971
- self: "Task[P, T]",
982
+ self: "Task[P, R]",
972
983
  *args: P.args,
973
- return_state: Literal[True],
984
+ return_state: Literal[False],
985
+ wait_for: Optional[OneOrManyFutureOrResult[Any]] = None,
974
986
  **kwargs: P.kwargs,
975
- ) -> State[T]:
987
+ ) -> R:
976
988
  ...
977
989
 
990
+ @overload
978
991
  def __call__(
979
- self,
992
+ self: "Task[P, R]",
993
+ *args: P.args,
994
+ return_state: Literal[False] = False,
995
+ wait_for: Optional[OneOrManyFutureOrResult[Any]] = None,
996
+ **kwargs: P.kwargs,
997
+ ) -> R:
998
+ ...
999
+
1000
+ def __call__(
1001
+ self: "Union[Task[P, R], Task[P, NoReturn]]",
980
1002
  *args: P.args,
981
1003
  return_state: bool = False,
982
- wait_for: Optional[Iterable[PrefectFuture[R]]] = None,
1004
+ wait_for: Optional[OneOrManyFutureOrResult[Any]] = None,
983
1005
  **kwargs: P.kwargs,
984
- ):
1006
+ ) -> Union[R, State[R], None]:
985
1007
  """
986
1008
  Run the task and return the result. If `return_state` is True returns
987
1009
  the result is wrapped in a Prefect State which provides error handling.
@@ -1013,53 +1035,57 @@ class Task(Generic[P, R]):
1013
1035
 
1014
1036
  @overload
1015
1037
  def submit(
1016
- self: "Task[P, NoReturn]",
1038
+ self: "Task[P, R]",
1017
1039
  *args: P.args,
1018
1040
  **kwargs: P.kwargs,
1019
- ) -> PrefectFuture[NoReturn]:
1020
- # `NoReturn` matches if a type can't be inferred for the function which stops a
1021
- # sync function from matching the `Coroutine` overload
1041
+ ) -> PrefectFuture[R]:
1022
1042
  ...
1023
1043
 
1024
1044
  @overload
1025
1045
  def submit(
1026
- self: "Task[P, Coroutine[Any, Any, T]]",
1046
+ self: "Task[P, Coroutine[Any, Any, R]]",
1027
1047
  *args: P.args,
1048
+ return_state: Literal[False],
1049
+ wait_for: Optional[OneOrManyFutureOrResult[Any]] = None,
1028
1050
  **kwargs: P.kwargs,
1029
- ) -> PrefectFuture[T]:
1051
+ ) -> PrefectFuture[R]:
1030
1052
  ...
1031
1053
 
1032
1054
  @overload
1033
1055
  def submit(
1034
- self: "Task[P, T]",
1056
+ self: "Task[P, R]",
1035
1057
  *args: P.args,
1058
+ return_state: Literal[False],
1059
+ wait_for: Optional[OneOrManyFutureOrResult[Any]] = None,
1036
1060
  **kwargs: P.kwargs,
1037
- ) -> PrefectFuture[T]:
1061
+ ) -> PrefectFuture[R]:
1038
1062
  ...
1039
1063
 
1040
1064
  @overload
1041
1065
  def submit(
1042
- self: "Task[P, Coroutine[Any, Any, T]]",
1066
+ self: "Task[P, Coroutine[Any, Any, R]]",
1043
1067
  *args: P.args,
1044
1068
  return_state: Literal[True],
1069
+ wait_for: Optional[OneOrManyFutureOrResult[Any]] = None,
1045
1070
  **kwargs: P.kwargs,
1046
- ) -> State[T]:
1071
+ ) -> State[R]:
1047
1072
  ...
1048
1073
 
1049
1074
  @overload
1050
1075
  def submit(
1051
- self: "Task[P, T]",
1076
+ self: "Task[P, R]",
1052
1077
  *args: P.args,
1053
1078
  return_state: Literal[True],
1079
+ wait_for: Optional[OneOrManyFutureOrResult[Any]] = None,
1054
1080
  **kwargs: P.kwargs,
1055
- ) -> State[T]:
1081
+ ) -> State[R]:
1056
1082
  ...
1057
1083
 
1058
1084
  def submit(
1059
- self,
1085
+ self: "Union[Task[P, R], Task[P, Coroutine[Any, Any, R]]]",
1060
1086
  *args: Any,
1061
1087
  return_state: bool = False,
1062
- wait_for: Optional[Iterable[PrefectFuture[R]]] = None,
1088
+ wait_for: Optional[OneOrManyFutureOrResult[Any]] = None,
1063
1089
  **kwargs: Any,
1064
1090
  ):
1065
1091
  """
@@ -1584,11 +1610,39 @@ def task(__fn: Callable[P, R]) -> Task[P, R]:
1584
1610
  @overload
1585
1611
  def task(
1586
1612
  __fn: Literal[None] = None,
1613
+ *,
1614
+ name: Optional[str] = None,
1615
+ description: Optional[str] = None,
1616
+ tags: Optional[Iterable[str]] = None,
1617
+ version: Optional[str] = None,
1618
+ cache_policy: Union[CachePolicy, type[NotSet]] = NotSet,
1619
+ cache_key_fn: Optional[
1620
+ Callable[["TaskRunContext", dict[str, Any]], Optional[str]]
1621
+ ] = None,
1622
+ cache_expiration: Optional[datetime.timedelta] = None,
1623
+ task_run_name: Optional[TaskRunNameValueOrCallable] = None,
1624
+ retries: int = 0,
1625
+ retry_delay_seconds: Union[
1626
+ float, int, list[float], Callable[[int], list[float]], None
1627
+ ] = None,
1628
+ retry_jitter_factor: Optional[float] = None,
1629
+ persist_result: Optional[bool] = None,
1630
+ result_storage: Optional[ResultStorage] = None,
1631
+ result_storage_key: Optional[str] = None,
1632
+ result_serializer: Optional[ResultSerializer] = None,
1633
+ cache_result_in_memory: bool = True,
1634
+ timeout_seconds: Union[int, float, None] = None,
1635
+ log_prints: Optional[bool] = None,
1636
+ refresh_cache: Optional[bool] = None,
1637
+ on_completion: Optional[list[StateHookCallable]] = None,
1638
+ on_failure: Optional[list[StateHookCallable]] = None,
1639
+ retry_condition_fn: Optional[Callable[[Task[P, Any], TaskRun, State], bool]] = None,
1640
+ viz_return_value: Any = None,
1587
1641
  ) -> Callable[[Callable[P, R]], Task[P, R]]:
1588
1642
  ...
1589
1643
 
1590
1644
 
1591
- @overload
1645
+ @overload # TODO: do we need this overload?
1592
1646
  def task(
1593
1647
  *,
1594
1648
  name: Optional[str] = None,
@@ -1619,7 +1673,7 @@ def task(
1619
1673
  refresh_cache: Optional[bool] = None,
1620
1674
  on_completion: Optional[list[StateHookCallable]] = None,
1621
1675
  on_failure: Optional[list[StateHookCallable]] = None,
1622
- retry_condition_fn: Optional[Callable[["Task[P, R]", TaskRun, State], bool]] = None,
1676
+ retry_condition_fn: Optional[Callable[[Task[P, Any], TaskRun, State], bool]] = None,
1623
1677
  viz_return_value: Any = None,
1624
1678
  ) -> Callable[[Callable[P, R]], Task[P, R]]:
1625
1679
  ...
@@ -1653,7 +1707,7 @@ def task(
1653
1707
  refresh_cache: Optional[bool] = None,
1654
1708
  on_completion: Optional[list[StateHookCallable]] = None,
1655
1709
  on_failure: Optional[list[StateHookCallable]] = None,
1656
- retry_condition_fn: Optional[Callable[["Task[P, R]", TaskRun, State], bool]] = None,
1710
+ retry_condition_fn: Optional[Callable[[Task[P, Any], TaskRun, State], bool]] = None,
1657
1711
  viz_return_value: Any = None,
1658
1712
  ):
1659
1713
  """
@@ -1685,10 +1739,10 @@ def task(
1685
1739
  callable that, given the total number of retries, generates a list of retry
1686
1740
  delays. If a number of seconds, that delay will be applied to all retries.
1687
1741
  If a list, each retry will wait for the corresponding delay before retrying.
1688
- When passing a callable or a list, the number of configured retry delays
1689
- cannot exceed 50.
1690
- retry_jitter_factor: An optional factor that defines the factor to which a retry
1691
- can be jittered in order to avoid a "thundering herd".
1742
+ When passing a callable or a list, the number of
1743
+ configured retry delays cannot exceed 50.
1744
+ retry_jitter_factor: An optional factor that defines the factor to which a
1745
+ retry can be jittered in order to avoid a "thundering herd".
1692
1746
  persist_result: A toggle indicating whether the result of this task
1693
1747
  should be persisted to result storage. Defaults to `None`, which
1694
1748
  indicates that the global default should be used (which is `True` by
@@ -2,6 +2,9 @@ from typing import TYPE_CHECKING, Union
2
2
 
3
3
  import prefect.settings
4
4
  from prefect.client.base import ServerType, determine_server_type
5
+ from prefect.logging.loggers import get_logger
6
+
7
+ logger = get_logger(__name__)
5
8
 
6
9
  if TYPE_CHECKING:
7
10
  from opentelemetry.sdk._logs import LoggerProvider
@@ -16,30 +19,28 @@ def setup_telemetry() -> (
16
19
  ]
17
20
  ):
18
21
  settings = prefect.settings.get_current_settings()
19
- if not settings.experiments.telemetry_enabled:
20
- return None, None, None
21
22
 
22
23
  server_type = determine_server_type()
23
24
  if server_type != ServerType.CLOUD:
24
25
  return None, None, None
25
26
 
27
+ if not settings.cloud.enable_orchestration_telemetry:
28
+ return None, None, None
29
+
26
30
  if not settings.api.key:
27
- raise ValueError(
31
+ logger.warning(
28
32
  "A Prefect Cloud API key is required to enable telemetry. Please set "
29
33
  "the `PREFECT_API_KEY` environment variable or authenticate with "
30
34
  "Prefect Cloud via the `prefect cloud login` command."
31
35
  )
36
+ return None, None, None
32
37
 
33
38
  assert settings.api.url
34
39
 
35
40
  # This import is here to defer importing of the `opentelemetry` packages.
36
41
  try:
37
42
  from .instrumentation import setup_exporters
38
- except ImportError as exc:
39
- raise ValueError(
40
- "Unable to import OpenTelemetry instrumentation libraries. Please "
41
- "ensure you have installed the `otel` extra when installing Prefect: "
42
- "`pip install 'prefect[otel]'`"
43
- ) from exc
43
+ except ImportError:
44
+ return None, None, None
44
45
 
45
46
  return setup_exporters(settings.api.url, settings.api.key.get_secret_value())
@@ -53,10 +53,9 @@ class RunTelemetry:
53
53
  self,
54
54
  run: FlowOrTaskRun,
55
55
  client: PrefectClient,
56
- name: Optional[str] = None,
57
56
  parameters: Optional[dict[str, Any]] = None,
58
57
  ):
59
- traceparent, span = self._start_span(run, name, parameters)
58
+ traceparent, span = self._start_span(run, parameters)
60
59
 
61
60
  if self._run_type(run) == "flow" and traceparent:
62
61
  # Only explicitly update labels if the run is a flow as task runs
@@ -71,10 +70,9 @@ class RunTelemetry:
71
70
  self,
72
71
  run: FlowOrTaskRun,
73
72
  client: SyncPrefectClient,
74
- name: Optional[str] = None,
75
73
  parameters: Optional[dict[str, Any]] = None,
76
74
  ):
77
- traceparent, span = self._start_span(run, name, parameters)
75
+ traceparent, span = self._start_span(run, parameters)
78
76
 
79
77
  if self._run_type(run) == "flow" and traceparent:
80
78
  # Only explicitly update labels if the run is a flow as task runs
@@ -86,7 +84,6 @@ class RunTelemetry:
86
84
  def _start_span(
87
85
  self,
88
86
  run: FlowOrTaskRun,
89
- name: Optional[str] = None,
90
87
  parameters: Optional[dict[str, Any]] = None,
91
88
  ) -> tuple[Optional[str], Span]:
92
89
  """
@@ -117,10 +114,10 @@ class RunTelemetry:
117
114
  run_type = self._run_type(run)
118
115
 
119
116
  self.span = self._tracer.start_span(
120
- name=name or run.name,
117
+ name=run.name,
121
118
  context=context,
122
119
  attributes={
123
- "prefect.run.name": name or run.name,
120
+ "prefect.run.name": run.name,
124
121
  "prefect.run.type": run_type,
125
122
  "prefect.run.id": str(run.id),
126
123
  "prefect.tags": run.tags,
@@ -152,7 +149,7 @@ class RunTelemetry:
152
149
  return propagate.extract(carrier)
153
150
 
154
151
  def _traceparent_from_span(self, span: Span) -> Optional[str]:
155
- carrier = {}
152
+ carrier: dict[str, Any] = {}
156
153
  propagate.inject(carrier, context=trace.set_span_in_context(span))
157
154
  return carrier.get(TRACEPARENT_KEY)
158
155
 
@@ -198,6 +195,14 @@ class RunTelemetry:
198
195
  },
199
196
  )
200
197
 
198
+ def update_run_name(self, name: str) -> None:
199
+ """
200
+ Update the name of the run.
201
+ """
202
+ if self.span:
203
+ self.span.update_name(name=name)
204
+ self.span.set_attribute("prefect.run.name", name)
205
+
201
206
  def _parent_run(self) -> Union[FlowOrTaskRun, None]:
202
207
  """
203
208
  Identify the "parent run" for the current execution context.
@@ -53,6 +53,8 @@ class QueueingSpanExporter(BaseQueueingExporter[ReadableSpan], SpanExporter):
53
53
 
54
54
  def export(self, spans: Sequence[ReadableSpan]) -> SpanExportResult:
55
55
  for item in spans:
56
+ if self._stopped:
57
+ break
56
58
  self.send(item)
57
59
  return SpanExportResult.SUCCESS
58
60
 
@@ -65,4 +67,6 @@ class QueueingLogExporter(BaseQueueingExporter[LogData], LogExporter):
65
67
 
66
68
  def export(self, batch: Sequence[LogData]) -> None:
67
69
  for item in batch:
70
+ if self._stopped:
71
+ break
68
72
  self.send(item)
prefect/transactions.py CHANGED
@@ -24,10 +24,7 @@ from prefect.exceptions import (
24
24
  SerializationError,
25
25
  )
26
26
  from prefect.logging.loggers import LoggingAdapter, get_logger, get_run_logger
27
- from prefect.records import RecordStore
28
- from prefect.records.base import TransactionRecord
29
27
  from prefect.results import (
30
- BaseResult,
31
28
  ResultRecord,
32
29
  ResultStore,
33
30
  get_result_store,
@@ -61,7 +58,7 @@ class Transaction(ContextModel):
61
58
  A base model for transaction state.
62
59
  """
63
60
 
64
- store: Union[RecordStore, ResultStore, None] = None
61
+ store: Optional[ResultStore] = None
65
62
  key: Optional[str] = None
66
63
  children: List["Transaction"] = Field(default_factory=list)
67
64
  commit_mode: Optional[CommitMode] = None
@@ -254,15 +251,11 @@ class Transaction(ContextModel):
254
251
  ):
255
252
  self.state = TransactionState.COMMITTED
256
253
 
257
- def read(self) -> Union["BaseResult[Any]", ResultRecord[Any], None]:
254
+ def read(self) -> Optional[ResultRecord[Any]]:
258
255
  if self.store and self.key:
259
256
  record = self.store.read(key=self.key)
260
257
  if isinstance(record, ResultRecord):
261
258
  return record
262
- # for backwards compatibility, if we encounter a transaction record, return the result
263
- # This happens when the transaction is using a `ResultStore`
264
- if isinstance(record, TransactionRecord):
265
- return record.result
266
259
  return None
267
260
 
268
261
  def reset(self) -> None:
@@ -315,11 +308,7 @@ class Transaction(ContextModel):
315
308
 
316
309
  if self.store and self.key and self.write_on_commit:
317
310
  if isinstance(self.store, ResultStore):
318
- if isinstance(self._staged_value, BaseResult):
319
- self.store.write(
320
- key=self.key, obj=self._staged_value.get(_sync=True)
321
- )
322
- elif isinstance(self._staged_value, ResultRecord):
311
+ if isinstance(self._staged_value, ResultRecord):
323
312
  self.store.persist_result_record(
324
313
  result_record=self._staged_value
325
314
  )
@@ -436,7 +425,7 @@ def get_transaction() -> Optional[Transaction]:
436
425
  @contextmanager
437
426
  def transaction(
438
427
  key: Optional[str] = None,
439
- store: Union[RecordStore, ResultStore, None] = None,
428
+ store: Optional[ResultStore] = None,
440
429
  commit_mode: Optional[CommitMode] = None,
441
430
  isolation_level: Optional[IsolationLevel] = None,
442
431
  overwrite: bool = False,
@@ -0,0 +1,34 @@
1
+ from __future__ import annotations
2
+
3
+ import subprocess
4
+ import sys
5
+
6
+
7
+ def get_git_remote_origin_url() -> str | None:
8
+ """
9
+ Returns the git remote origin URL for the current directory.
10
+ """
11
+ try:
12
+ origin_url = subprocess.check_output(
13
+ ["git", "config", "--get", "remote.origin.url"],
14
+ shell=sys.platform == "win32",
15
+ stderr=subprocess.DEVNULL,
16
+ )
17
+ origin_url = origin_url.decode().strip()
18
+ except subprocess.CalledProcessError:
19
+ return None
20
+
21
+ return origin_url
22
+
23
+
24
+ def get_git_branch() -> str | None:
25
+ """
26
+ Returns the git branch for the current directory.
27
+ """
28
+ try:
29
+ branch = subprocess.check_output(["git", "rev-parse", "--abbrev-ref", "HEAD"])
30
+ branch = branch.decode().strip()
31
+ except subprocess.CalledProcessError:
32
+ return None
33
+
34
+ return branch
@@ -77,7 +77,7 @@ def get_thread_limiter() -> anyio.CapacityLimiter:
77
77
 
78
78
  def is_async_fn(
79
79
  func: _SyncOrAsyncCallable[P, R],
80
- ) -> TypeGuard[Callable[P, Awaitable[R]]]:
80
+ ) -> TypeGuard[Callable[P, Coroutine[Any, Any, Any]]]:
81
81
  """
82
82
  Returns `True` if a function returns a coroutine.
83
83
 
@@ -45,7 +45,7 @@ from prefect.exceptions import (
45
45
  from prefect.flows import Flow
46
46
  from prefect.futures import PrefectFuture
47
47
  from prefect.logging.loggers import get_logger
48
- from prefect.results import BaseResult, ResultRecord, should_persist_result
48
+ from prefect.results import ResultRecord, should_persist_result
49
49
  from prefect.settings import PREFECT_LOGGING_LOG_PRINTS
50
50
  from prefect.states import State
51
51
  from prefect.tasks import Task
@@ -283,10 +283,6 @@ async def resolve_inputs(
283
283
  return resolved_parameters
284
284
 
285
285
 
286
- def _is_base_result(data: Any) -> TypeIs[BaseResult[Any]]:
287
- return isinstance(data, BaseResult)
288
-
289
-
290
286
  def _is_result_record(data: Any) -> TypeIs[ResultRecord[Any]]:
291
287
  return isinstance(data, ResultRecord)
292
288
 
@@ -335,11 +331,7 @@ async def propose_state(
335
331
  # Handle task and sub-flow tracing
336
332
  if state.is_final():
337
333
  result: Any
338
- if _is_base_result(state.data) and state.data.has_cached_object():
339
- # Avoid fetching the result unless it is cached, otherwise we defeat
340
- # the purpose of disabling `cache_result_in_memory`
341
- result = state.result(raise_on_failure=False, fetch=True)
342
- elif _is_result_record(state.data):
334
+ if _is_result_record(state.data):
343
335
  result = state.data.result
344
336
  else:
345
337
  result = state.data
@@ -451,13 +443,7 @@ def propose_state_sync(
451
443
 
452
444
  # Handle task and sub-flow tracing
453
445
  if state.is_final():
454
- if _is_base_result(state.data) and state.data.has_cached_object():
455
- # Avoid fetching the result unless it is cached, otherwise we defeat
456
- # the purpose of disabling `cache_result_in_memory`
457
- result = state.result(raise_on_failure=False, fetch=True)
458
- if asyncio.iscoroutine(result):
459
- result = run_coro_as_sync(result)
460
- elif _is_result_record(state.data):
446
+ if _is_result_record(state.data):
461
447
  result = state.data.result
462
448
  else:
463
449
  result = state.data
@@ -636,8 +622,6 @@ def emit_task_run_state_change_event(
636
622
 
637
623
  if _is_result_record(validated_state.data) and should_persist_result():
638
624
  data = validated_state.data.metadata.model_dump(mode="json")
639
- elif _is_base_result(validated_state.data):
640
- data = validated_state.data.model_dump(mode="json")
641
625
  else:
642
626
  data = None
643
627
 
@@ -0,0 +1,18 @@
1
+ from typing import Any, TypeVar
2
+
3
+ from pydantic import BaseModel
4
+ from pydantic_core import SchemaValidator, core_schema
5
+
6
+ T = TypeVar("T", bound=BaseModel)
7
+
8
+ ListValidator = SchemaValidator(
9
+ schema=core_schema.list_schema(
10
+ items_schema=core_schema.dict_schema(
11
+ keys_schema=core_schema.str_schema(), values_schema=core_schema.any_schema()
12
+ )
13
+ )
14
+ )
15
+
16
+
17
+ def validate_list(model: type[T], input: Any) -> list[T]:
18
+ return [model.model_validate(item) for item in ListValidator.validate_python(input)]
@@ -1,7 +1,17 @@
1
1
  import enum
2
2
  import os
3
3
  import re
4
- from typing import TYPE_CHECKING, Any, NamedTuple, Optional, TypeVar, Union, cast
4
+ from typing import (
5
+ TYPE_CHECKING,
6
+ Any,
7
+ Literal,
8
+ NamedTuple,
9
+ Optional,
10
+ TypeVar,
11
+ Union,
12
+ cast,
13
+ overload,
14
+ )
5
15
 
6
16
  from prefect.client.utilities import inject_client
7
17
  from prefect.utilities.annotations import NotSet
@@ -79,6 +89,20 @@ def find_placeholders(template: T) -> set[Placeholder]:
79
89
  raise ValueError(f"Unexpected type: {type(template)}")
80
90
 
81
91
 
92
+ @overload
93
+ def apply_values(
94
+ template: T, values: dict[str, Any], remove_notset: Literal[True] = True
95
+ ) -> T:
96
+ ...
97
+
98
+
99
+ @overload
100
+ def apply_values(
101
+ template: T, values: dict[str, Any], remove_notset: Literal[False] = False
102
+ ) -> Union[T, type[NotSet]]:
103
+ ...
104
+
105
+
82
106
  def apply_values(
83
107
  template: T, values: dict[str, Any], remove_notset: bool = True
84
108
  ) -> Union[T, type[NotSet]]:
prefect/workers/base.py CHANGED
@@ -866,6 +866,9 @@ class BaseWorker(abc.ABC):
866
866
 
867
867
  for flow_run in submittable_flow_runs:
868
868
  if flow_run.id in self._submitting_flow_run_ids:
869
+ self._logger.debug(
870
+ f"Skipping {flow_run.id} because it's already being submitted"
871
+ )
869
872
  continue
870
873
  try:
871
874
  if self._limiter:
@@ -945,7 +948,7 @@ class BaseWorker(abc.ABC):
945
948
  return
946
949
 
947
950
  ready_to_submit = await self._propose_pending_state(flow_run)
948
-
951
+ self._logger.debug(f"Ready to submit {flow_run.id}: {ready_to_submit}")
949
952
  if ready_to_submit:
950
953
  readiness_result = await self._runs_task_group.start(
951
954
  self._submit_run_and_capture_errors, flow_run
@@ -969,10 +972,9 @@ class BaseWorker(abc.ABC):
969
972
  else:
970
973
  # If the run is not ready to submit, release the concurrency slot
971
974
  self._release_limit_slot(flow_run.id)
972
-
973
- self._submitting_flow_run_ids.remove(flow_run.id)
974
975
  else:
975
976
  self._release_limit_slot(flow_run.id)
977
+ self._submitting_flow_run_ids.remove(flow_run.id)
976
978
 
977
979
  async def _submit_run_and_capture_errors(
978
980
  self, flow_run: "FlowRun", task_status: Optional[anyio.abc.TaskStatus] = None
@@ -1102,6 +1104,7 @@ class BaseWorker(abc.ABC):
1102
1104
  f"Server sent an abort signal: {exc}"
1103
1105
  ),
1104
1106
  )
1107
+
1105
1108
  return False
1106
1109
  except Exception:
1107
1110
  run_logger.exception(
@@ -85,7 +85,7 @@ class ProcessJobConfiguration(BaseJobConfiguration):
85
85
 
86
86
  @field_validator("working_dir")
87
87
  @classmethod
88
- def validate_command(cls, v):
88
+ def validate_command(cls, v: str) -> str:
89
89
  return validate_command(v)
90
90
 
91
91
  def prepare_for_flow_run(