prefect-client 3.1.5__py3-none-any.whl → 3.1.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (114) hide show
  1. prefect/__init__.py +3 -0
  2. prefect/_experimental/__init__.py +0 -0
  3. prefect/_experimental/lineage.py +181 -0
  4. prefect/_internal/compatibility/async_dispatch.py +38 -9
  5. prefect/_internal/compatibility/migration.py +1 -1
  6. prefect/_internal/concurrency/api.py +52 -52
  7. prefect/_internal/concurrency/calls.py +59 -35
  8. prefect/_internal/concurrency/cancellation.py +34 -18
  9. prefect/_internal/concurrency/event_loop.py +7 -6
  10. prefect/_internal/concurrency/threads.py +41 -33
  11. prefect/_internal/concurrency/waiters.py +28 -21
  12. prefect/_internal/pydantic/v1_schema.py +2 -2
  13. prefect/_internal/pydantic/v2_schema.py +10 -9
  14. prefect/_internal/pydantic/v2_validated_func.py +15 -10
  15. prefect/_internal/retries.py +15 -6
  16. prefect/_internal/schemas/bases.py +11 -8
  17. prefect/_internal/schemas/validators.py +7 -5
  18. prefect/_version.py +3 -3
  19. prefect/automations.py +53 -47
  20. prefect/blocks/abstract.py +12 -10
  21. prefect/blocks/core.py +148 -19
  22. prefect/blocks/system.py +2 -1
  23. prefect/cache_policies.py +11 -11
  24. prefect/client/__init__.py +3 -1
  25. prefect/client/base.py +36 -37
  26. prefect/client/cloud.py +26 -19
  27. prefect/client/collections.py +2 -2
  28. prefect/client/orchestration.py +430 -273
  29. prefect/client/schemas/__init__.py +24 -0
  30. prefect/client/schemas/actions.py +128 -121
  31. prefect/client/schemas/filters.py +1 -1
  32. prefect/client/schemas/objects.py +114 -85
  33. prefect/client/schemas/responses.py +19 -20
  34. prefect/client/schemas/schedules.py +136 -93
  35. prefect/client/subscriptions.py +30 -15
  36. prefect/client/utilities.py +46 -36
  37. prefect/concurrency/asyncio.py +6 -9
  38. prefect/concurrency/sync.py +35 -5
  39. prefect/context.py +40 -32
  40. prefect/deployments/flow_runs.py +6 -8
  41. prefect/deployments/runner.py +14 -14
  42. prefect/deployments/steps/core.py +3 -1
  43. prefect/deployments/steps/pull.py +60 -12
  44. prefect/docker/__init__.py +1 -1
  45. prefect/events/clients.py +55 -4
  46. prefect/events/filters.py +1 -1
  47. prefect/events/related.py +2 -1
  48. prefect/events/schemas/events.py +26 -21
  49. prefect/events/utilities.py +3 -2
  50. prefect/events/worker.py +8 -0
  51. prefect/filesystems.py +3 -3
  52. prefect/flow_engine.py +87 -87
  53. prefect/flow_runs.py +7 -5
  54. prefect/flows.py +218 -176
  55. prefect/logging/configuration.py +1 -1
  56. prefect/logging/highlighters.py +1 -2
  57. prefect/logging/loggers.py +30 -20
  58. prefect/main.py +17 -24
  59. prefect/results.py +43 -22
  60. prefect/runner/runner.py +43 -21
  61. prefect/runner/server.py +30 -32
  62. prefect/runner/storage.py +3 -3
  63. prefect/runner/submit.py +3 -6
  64. prefect/runner/utils.py +6 -6
  65. prefect/runtime/flow_run.py +7 -0
  66. prefect/serializers.py +28 -24
  67. prefect/settings/constants.py +2 -2
  68. prefect/settings/legacy.py +1 -1
  69. prefect/settings/models/experiments.py +5 -0
  70. prefect/settings/models/server/events.py +10 -0
  71. prefect/task_engine.py +87 -26
  72. prefect/task_runners.py +2 -2
  73. prefect/task_worker.py +43 -25
  74. prefect/tasks.py +148 -142
  75. prefect/telemetry/bootstrap.py +15 -2
  76. prefect/telemetry/instrumentation.py +1 -1
  77. prefect/telemetry/processors.py +10 -7
  78. prefect/telemetry/run_telemetry.py +231 -0
  79. prefect/transactions.py +14 -14
  80. prefect/types/__init__.py +5 -5
  81. prefect/utilities/_engine.py +96 -0
  82. prefect/utilities/annotations.py +25 -18
  83. prefect/utilities/asyncutils.py +126 -140
  84. prefect/utilities/callables.py +87 -78
  85. prefect/utilities/collections.py +278 -117
  86. prefect/utilities/compat.py +13 -21
  87. prefect/utilities/context.py +6 -5
  88. prefect/utilities/dispatch.py +23 -12
  89. prefect/utilities/dockerutils.py +33 -32
  90. prefect/utilities/engine.py +126 -239
  91. prefect/utilities/filesystem.py +18 -15
  92. prefect/utilities/hashing.py +10 -11
  93. prefect/utilities/importtools.py +40 -27
  94. prefect/utilities/math.py +9 -5
  95. prefect/utilities/names.py +3 -3
  96. prefect/utilities/processutils.py +121 -57
  97. prefect/utilities/pydantic.py +41 -36
  98. prefect/utilities/render_swagger.py +22 -12
  99. prefect/utilities/schema_tools/__init__.py +2 -1
  100. prefect/utilities/schema_tools/hydration.py +50 -43
  101. prefect/utilities/schema_tools/validation.py +52 -42
  102. prefect/utilities/services.py +13 -12
  103. prefect/utilities/templating.py +45 -45
  104. prefect/utilities/text.py +2 -1
  105. prefect/utilities/timeout.py +4 -4
  106. prefect/utilities/urls.py +9 -4
  107. prefect/utilities/visualization.py +46 -24
  108. prefect/variables.py +136 -27
  109. prefect/workers/base.py +15 -8
  110. {prefect_client-3.1.5.dist-info → prefect_client-3.1.7.dist-info}/METADATA +5 -2
  111. {prefect_client-3.1.5.dist-info → prefect_client-3.1.7.dist-info}/RECORD +114 -110
  112. {prefect_client-3.1.5.dist-info → prefect_client-3.1.7.dist-info}/LICENSE +0 -0
  113. {prefect_client-3.1.5.dist-info → prefect_client-3.1.7.dist-info}/WHEEL +0 -0
  114. {prefect_client-3.1.5.dist-info → prefect_client-3.1.7.dist-info}/top_level.txt +0 -0
prefect/runner/storage.py CHANGED
@@ -53,14 +53,14 @@ class RunnerStorage(Protocol):
53
53
  """
54
54
  ...
55
55
 
56
- def to_pull_step(self) -> dict:
56
+ def to_pull_step(self) -> dict[str, Any]:
57
57
  """
58
58
  Returns a dictionary representation of the storage object that can be
59
59
  used as a deployment pull step.
60
60
  """
61
61
  ...
62
62
 
63
- def __eq__(self, __value) -> bool:
63
+ def __eq__(self, __value: Any) -> bool:
64
64
  """
65
65
  Equality check for runner storage objects.
66
66
  """
@@ -69,7 +69,7 @@ class RunnerStorage(Protocol):
69
69
 
70
70
  class GitCredentials(TypedDict, total=False):
71
71
  username: str
72
- access_token: Union[str, Secret]
72
+ access_token: Union[str, Secret[str]]
73
73
 
74
74
 
75
75
  class GitRepository:
prefect/runner/submit.py CHANGED
@@ -42,11 +42,8 @@ async def _submit_flow_to_runner(
42
42
  Returns:
43
43
  A `FlowRun` object representing the flow run that was submitted.
44
44
  """
45
- from prefect.utilities.engine import (
46
- _dynamic_key_for_task_run,
47
- collect_task_run_inputs,
48
- resolve_inputs,
49
- )
45
+ from prefect.utilities._engine import dynamic_key_for_task_run
46
+ from prefect.utilities.engine import collect_task_run_inputs, resolve_inputs
50
47
 
51
48
  async with get_client() as client:
52
49
  if not retry_failed_submissions:
@@ -67,7 +64,7 @@ async def _submit_flow_to_runner(
67
64
  parent_flow_run_context.flow_run.id if parent_flow_run_context else None
68
65
  ),
69
66
  dynamic_key=(
70
- _dynamic_key_for_task_run(parent_flow_run_context, dummy_task)
67
+ dynamic_key_for_task_run(parent_flow_run_context, dummy_task)
71
68
  if parent_flow_run_context
72
69
  else str(uuid.uuid4())
73
70
  ),
prefect/runner/utils.py CHANGED
@@ -1,5 +1,5 @@
1
1
  from copy import deepcopy
2
- from typing import Any, Dict
2
+ from typing import Any
3
3
 
4
4
  from fastapi import FastAPI
5
5
  from fastapi.openapi.utils import get_openapi
@@ -8,8 +8,8 @@ from prefect import __version__ as PREFECT_VERSION
8
8
 
9
9
 
10
10
  def inject_schemas_into_openapi(
11
- webserver: FastAPI, schemas_to_inject: Dict[str, Any]
12
- ) -> Dict[str, Any]:
11
+ webserver: FastAPI, schemas_to_inject: dict[str, Any]
12
+ ) -> dict[str, Any]:
13
13
  """
14
14
  Augments the webserver's OpenAPI schema with additional schemas from deployments / flows / tasks.
15
15
 
@@ -29,8 +29,8 @@ def inject_schemas_into_openapi(
29
29
 
30
30
 
31
31
  def merge_definitions(
32
- injected_schemas: Dict[str, Any], openapi_schema: Dict[str, Any]
33
- ) -> Dict[str, Any]:
32
+ injected_schemas: dict[str, Any], openapi_schema: dict[str, Any]
33
+ ) -> dict[str, Any]:
34
34
  """
35
35
  Integrates definitions from injected schemas into the OpenAPI components.
36
36
 
@@ -69,7 +69,7 @@ def update_refs_in_schema(schema_item: Any, new_ref: str) -> None:
69
69
  update_refs_in_schema(item, new_ref)
70
70
 
71
71
 
72
- def update_refs_to_components(openapi_schema: Dict[str, Any]) -> Dict[str, Any]:
72
+ def update_refs_to_components(openapi_schema: dict[str, Any]) -> dict[str, Any]:
73
73
  """
74
74
  Updates all `$ref` fields in the OpenAPI schema to reference the components section.
75
75
 
@@ -44,6 +44,7 @@ __all__ = [
44
44
  "run_count",
45
45
  "api_url",
46
46
  "ui_url",
47
+ "job_variables",
47
48
  ]
48
49
 
49
50
 
@@ -317,6 +318,11 @@ def get_flow_run_ui_url() -> Optional[str]:
317
318
  return f"{PREFECT_UI_URL.value()}/flow-runs/flow-run/{flow_run_id}"
318
319
 
319
320
 
321
+ def get_job_variables() -> Optional[Dict[str, Any]]:
322
+ flow_run_ctx = FlowRunContext.get()
323
+ return flow_run_ctx.flow_run.job_variables if flow_run_ctx else None
324
+
325
+
320
326
  FIELDS = {
321
327
  "id": get_id,
322
328
  "tags": get_tags,
@@ -331,4 +337,5 @@ FIELDS = {
331
337
  "api_url": get_flow_run_api_url,
332
338
  "ui_url": get_flow_run_ui_url,
333
339
  "flow_version": get_flow_version,
340
+ "job_variables": get_job_variables,
334
341
  }
prefect/serializers.py CHANGED
@@ -13,7 +13,7 @@ bytes to an object respectively.
13
13
 
14
14
  import abc
15
15
  import base64
16
- from typing import Any, Dict, Generic, Optional, Type
16
+ from typing import Any, Generic, Optional, Type, Union
17
17
 
18
18
  from pydantic import (
19
19
  BaseModel,
@@ -23,7 +23,7 @@ from pydantic import (
23
23
  ValidationError,
24
24
  field_validator,
25
25
  )
26
- from typing_extensions import Literal, Self, TypeVar
26
+ from typing_extensions import Self, TypeVar
27
27
 
28
28
  from prefect._internal.schemas.validators import (
29
29
  cast_type_names_to_serializers,
@@ -54,7 +54,7 @@ def prefect_json_object_encoder(obj: Any) -> Any:
54
54
  }
55
55
 
56
56
 
57
- def prefect_json_object_decoder(result: dict):
57
+ def prefect_json_object_decoder(result: dict[str, Any]):
58
58
  """
59
59
  `JSONDecoder.object_hook` for decoding objects from JSON when previously encoded
60
60
  with `prefect_json_object_encoder`
@@ -80,12 +80,16 @@ class Serializer(BaseModel, Generic[D], abc.ABC):
80
80
  data.setdefault("type", type_string)
81
81
  super().__init__(**data)
82
82
 
83
- def __new__(cls: Type[Self], **kwargs) -> Self:
83
+ def __new__(cls: Type[Self], **kwargs: Any) -> Self:
84
84
  if "type" in kwargs:
85
85
  try:
86
86
  subcls = lookup_type(cls, dispatch_key=kwargs["type"])
87
87
  except KeyError as exc:
88
- raise ValidationError(errors=[exc], model=cls)
88
+ raise ValidationError.from_exception_data(
89
+ title=cls.__name__,
90
+ line_errors=[{"type": str(exc), "input": kwargs["type"]}],
91
+ input_type="python",
92
+ )
89
93
 
90
94
  return super().__new__(subcls)
91
95
  else:
@@ -104,7 +108,7 @@ class Serializer(BaseModel, Generic[D], abc.ABC):
104
108
  model_config = ConfigDict(extra="forbid")
105
109
 
106
110
  @classmethod
107
- def __dispatch_key__(cls) -> str:
111
+ def __dispatch_key__(cls) -> Optional[str]:
108
112
  type_str = cls.model_fields["type"].default
109
113
  return type_str if isinstance(type_str, str) else None
110
114
 
@@ -119,19 +123,15 @@ class PickleSerializer(Serializer):
119
123
  - Wraps pickles in base64 for safe transmission.
120
124
  """
121
125
 
122
- type: Literal["pickle"] = "pickle"
126
+ type: str = Field(default="pickle", frozen=True)
123
127
 
124
128
  picklelib: str = "cloudpickle"
125
129
  picklelib_version: Optional[str] = None
126
130
 
127
131
  @field_validator("picklelib")
128
- def check_picklelib(cls, value):
132
+ def check_picklelib(cls, value: str) -> str:
129
133
  return validate_picklelib(value)
130
134
 
131
- # @model_validator(mode="before")
132
- # def check_picklelib_version(cls, values):
133
- # return validate_picklelib_version(values)
134
-
135
135
  def dumps(self, obj: Any) -> bytes:
136
136
  pickler = from_qualified_name(self.picklelib)
137
137
  blob = pickler.dumps(obj)
@@ -151,7 +151,7 @@ class JSONSerializer(Serializer):
151
151
  Wraps the `json` library to serialize to UTF-8 bytes instead of string types.
152
152
  """
153
153
 
154
- type: Literal["json"] = "json"
154
+ type: str = Field(default="json", frozen=True)
155
155
 
156
156
  jsonlib: str = "json"
157
157
  object_encoder: Optional[str] = Field(
@@ -171,23 +171,27 @@ class JSONSerializer(Serializer):
171
171
  "by our default `object_encoder`."
172
172
  ),
173
173
  )
174
- dumps_kwargs: Dict[str, Any] = Field(default_factory=dict)
175
- loads_kwargs: Dict[str, Any] = Field(default_factory=dict)
174
+ dumps_kwargs: dict[str, Any] = Field(default_factory=dict)
175
+ loads_kwargs: dict[str, Any] = Field(default_factory=dict)
176
176
 
177
177
  @field_validator("dumps_kwargs")
178
- def dumps_kwargs_cannot_contain_default(cls, value):
178
+ def dumps_kwargs_cannot_contain_default(
179
+ cls, value: dict[str, Any]
180
+ ) -> dict[str, Any]:
179
181
  return validate_dump_kwargs(value)
180
182
 
181
183
  @field_validator("loads_kwargs")
182
- def loads_kwargs_cannot_contain_object_hook(cls, value):
184
+ def loads_kwargs_cannot_contain_object_hook(
185
+ cls, value: dict[str, Any]
186
+ ) -> dict[str, Any]:
183
187
  return validate_load_kwargs(value)
184
188
 
185
- def dumps(self, data: Any) -> bytes:
189
+ def dumps(self, obj: Any) -> bytes:
186
190
  json = from_qualified_name(self.jsonlib)
187
191
  kwargs = self.dumps_kwargs.copy()
188
192
  if self.object_encoder:
189
193
  kwargs["default"] = from_qualified_name(self.object_encoder)
190
- result = json.dumps(data, **kwargs)
194
+ result = json.dumps(obj, **kwargs)
191
195
  if isinstance(result, str):
192
196
  # The standard library returns str but others may return bytes directly
193
197
  result = result.encode()
@@ -213,17 +217,17 @@ class CompressedSerializer(Serializer):
213
217
  level: If not null, the level of compression to pass to `compress`.
214
218
  """
215
219
 
216
- type: Literal["compressed"] = "compressed"
220
+ type: str = Field(default="compressed", frozen=True)
217
221
 
218
222
  serializer: Serializer
219
223
  compressionlib: str = "lzma"
220
224
 
221
225
  @field_validator("serializer", mode="before")
222
- def validate_serializer(cls, value):
226
+ def validate_serializer(cls, value: Union[str, Serializer]) -> Serializer:
223
227
  return cast_type_names_to_serializers(value)
224
228
 
225
229
  @field_validator("compressionlib")
226
- def check_compressionlib(cls, value):
230
+ def check_compressionlib(cls, value: str) -> str:
227
231
  return validate_compressionlib(value)
228
232
 
229
233
  def dumps(self, obj: Any) -> bytes:
@@ -242,7 +246,7 @@ class CompressedPickleSerializer(CompressedSerializer):
242
246
  A compressed serializer preconfigured to use the pickle serializer.
243
247
  """
244
248
 
245
- type: Literal["compressed/pickle"] = "compressed/pickle"
249
+ type: str = Field(default="compressed/pickle", frozen=True)
246
250
 
247
251
  serializer: Serializer = Field(default_factory=PickleSerializer)
248
252
 
@@ -252,6 +256,6 @@ class CompressedJSONSerializer(CompressedSerializer):
252
256
  A compressed serializer preconfigured to use the json serializer.
253
257
  """
254
258
 
255
- type: Literal["compressed/json"] = "compressed/json"
259
+ type: str = Field(default="compressed/json", frozen=True)
256
260
 
257
261
  serializer: Serializer = Field(default_factory=JSONSerializer)
@@ -1,8 +1,8 @@
1
1
  from pathlib import Path
2
- from typing import Tuple, Type
2
+ from typing import Any, Tuple, Type
3
3
 
4
4
  from pydantic import Secret, SecretStr
5
5
 
6
6
  DEFAULT_PREFECT_HOME = Path.home() / ".prefect"
7
7
  DEFAULT_PROFILES_PATH = Path(__file__).parent.joinpath("profiles.toml")
8
- _SECRET_TYPES: Tuple[Type, ...] = (Secret, SecretStr)
8
+ _SECRET_TYPES: Tuple[Type[Any], ...] = (Secret, SecretStr)
@@ -8,7 +8,7 @@ from pydantic_settings import BaseSettings
8
8
  from typing_extensions import Self
9
9
 
10
10
  from prefect.settings.base import PrefectBaseSettings
11
- from prefect.settings.constants import _SECRET_TYPES
11
+ from prefect.settings.constants import _SECRET_TYPES # type: ignore[reportPrivateUsage]
12
12
  from prefect.settings.context import get_current_settings
13
13
  from prefect.settings.models.root import Settings
14
14
 
@@ -22,3 +22,8 @@ class ExperimentsSettings(PrefectBaseSettings):
22
22
  default=False,
23
23
  description="Enables sending telemetry to Prefect Cloud.",
24
24
  )
25
+
26
+ lineage_events_enabled: bool = Field(
27
+ default=False,
28
+ description="If `True`, enables emitting lineage events. Set to `False` to disable lineage event emission.",
29
+ )
@@ -135,3 +135,13 @@ class ServerEventsSettings(PrefectBaseSettings):
135
135
  "prefect_messaging_cache",
136
136
  ),
137
137
  )
138
+
139
+ maximum_event_name_length: int = Field(
140
+ default=1024,
141
+ gt=0,
142
+ description="The maximum length of an event name.",
143
+ validation_alias=AliasChoices(
144
+ AliasPath("maximum_event_name_length"),
145
+ "prefect_server_events_maximum_event_name_length",
146
+ ),
147
+ )
prefect/task_engine.py CHANGED
@@ -4,7 +4,7 @@ import logging
4
4
  import threading
5
5
  import time
6
6
  from asyncio import CancelledError
7
- from contextlib import ExitStack, asynccontextmanager, contextmanager
7
+ from contextlib import ExitStack, asynccontextmanager, contextmanager, nullcontext
8
8
  from dataclasses import dataclass, field
9
9
  from functools import partial
10
10
  from textwrap import dedent
@@ -29,6 +29,7 @@ from uuid import UUID
29
29
 
30
30
  import anyio
31
31
  import pendulum
32
+ from opentelemetry import trace
32
33
  from typing_extensions import ParamSpec
33
34
 
34
35
  from prefect import Task
@@ -79,13 +80,14 @@ from prefect.states import (
79
80
  exception_to_failed_state,
80
81
  return_value_to_state,
81
82
  )
83
+ from prefect.telemetry.run_telemetry import RunTelemetry
82
84
  from prefect.transactions import IsolationLevel, Transaction, transaction
85
+ from prefect.utilities._engine import get_hook_name
83
86
  from prefect.utilities.annotations import NotSet
84
87
  from prefect.utilities.asyncutils import run_coro_as_sync
85
88
  from prefect.utilities.callables import call_with_parameters, parameters_to_args_kwargs
86
89
  from prefect.utilities.collections import visit_collection
87
90
  from prefect.utilities.engine import (
88
- _get_hook_name,
89
91
  emit_task_run_state_change_event,
90
92
  link_state_to_result,
91
93
  resolve_to_final_result,
@@ -120,6 +122,7 @@ class BaseTaskRunEngine(Generic[P, R]):
120
122
  _is_started: bool = False
121
123
  _task_name_set: bool = False
122
124
  _last_event: Optional[PrefectEvent] = None
125
+ _telemetry: RunTelemetry = field(default_factory=RunTelemetry)
123
126
 
124
127
  def __post_init__(self):
125
128
  if self.parameters is None:
@@ -193,11 +196,11 @@ class BaseTaskRunEngine(Generic[P, R]):
193
196
  self.parameters = resolved_parameters
194
197
 
195
198
  def _set_custom_task_run_name(self):
196
- from prefect.utilities.engine import _resolve_custom_task_run_name
199
+ from prefect.utilities._engine import resolve_custom_task_run_name
197
200
 
198
201
  # update the task run name if necessary
199
202
  if not self._task_name_set and self.task.task_run_name:
200
- task_run_name = _resolve_custom_task_run_name(
203
+ task_run_name = resolve_custom_task_run_name(
201
204
  task=self.task, parameters=self.parameters or {}
202
205
  )
203
206
 
@@ -351,7 +354,7 @@ class SyncTaskRunEngine(BaseTaskRunEngine[P, R]):
351
354
  hooks = None
352
355
 
353
356
  for hook in hooks or []:
354
- hook_name = _get_hook_name(hook)
357
+ hook_name = get_hook_name(hook)
355
358
 
356
359
  try:
357
360
  self.logger.info(
@@ -427,6 +430,11 @@ class SyncTaskRunEngine(BaseTaskRunEngine[P, R]):
427
430
 
428
431
  self.task_run.state = new_state = state
429
432
 
433
+ if last_state.timestamp == new_state.timestamp:
434
+ # Ensure that the state timestamp is unique, or at least not equal to the last state.
435
+ # This might occur especially on Windows where the timestamp resolution is limited.
436
+ new_state.timestamp += pendulum.duration(microseconds=1)
437
+
430
438
  # Ensure that the state_details are populated with the current run IDs
431
439
  new_state.state_details.task_run_id = self.task_run.id
432
440
  new_state.state_details.flow_run_id = self.task_run.flow_run_id
@@ -460,7 +468,7 @@ class SyncTaskRunEngine(BaseTaskRunEngine[P, R]):
460
468
  validated_state=self.task_run.state,
461
469
  follows=self._last_event,
462
470
  )
463
-
471
+ self._telemetry.update_state(new_state)
464
472
  return new_state
465
473
 
466
474
  def result(self, raise_on_failure: bool = True) -> "Union[R, State, None]":
@@ -514,6 +522,8 @@ class SyncTaskRunEngine(BaseTaskRunEngine[P, R]):
514
522
  self.record_terminal_state_timing(terminal_state)
515
523
  self.set_state(terminal_state)
516
524
  self._return_value = result
525
+
526
+ self._telemetry.end_span_on_success()
517
527
  return result
518
528
 
519
529
  def handle_retry(self, exc: Exception) -> bool:
@@ -562,6 +572,7 @@ class SyncTaskRunEngine(BaseTaskRunEngine[P, R]):
562
572
 
563
573
  def handle_exception(self, exc: Exception) -> None:
564
574
  # If the task fails, and we have retries left, set the task to retrying.
575
+ self._telemetry.record_exception(exc)
565
576
  if not self.handle_retry(exc):
566
577
  # If the task has no retries left, or the retry condition is not met, set the task to failed.
567
578
  state = run_coro_as_sync(
@@ -575,6 +586,7 @@ class SyncTaskRunEngine(BaseTaskRunEngine[P, R]):
575
586
  self.record_terminal_state_timing(state)
576
587
  self.set_state(state)
577
588
  self._raised = exc
589
+ self._telemetry.end_span_on_failure(state.message if state else None)
578
590
 
579
591
  def handle_timeout(self, exc: TimeoutError) -> None:
580
592
  if not self.handle_retry(exc):
@@ -588,6 +600,7 @@ class SyncTaskRunEngine(BaseTaskRunEngine[P, R]):
588
600
  message=message,
589
601
  name="TimedOut",
590
602
  )
603
+ self.record_terminal_state_timing(state)
591
604
  self.set_state(state)
592
605
  self._raised = exc
593
606
 
@@ -598,6 +611,8 @@ class SyncTaskRunEngine(BaseTaskRunEngine[P, R]):
598
611
  self.record_terminal_state_timing(state)
599
612
  self.set_state(state, force=True)
600
613
  self._raised = exc
614
+ self._telemetry.record_exception(exc)
615
+ self._telemetry.end_span_on_failure(state.message if state else None)
601
616
 
602
617
  @contextmanager
603
618
  def setup_run_context(self, client: Optional[SyncPrefectClient] = None):
@@ -655,14 +670,17 @@ class SyncTaskRunEngine(BaseTaskRunEngine[P, R]):
655
670
  with SyncClientContext.get_or_create() as client_ctx:
656
671
  self._client = client_ctx.client
657
672
  self._is_started = True
673
+ parent_flow_run_context = FlowRunContext.get()
674
+ parent_task_run_context = TaskRunContext.get()
675
+
658
676
  try:
659
677
  if not self.task_run:
660
678
  self.task_run = run_coro_as_sync(
661
679
  self.task.create_local_run(
662
680
  id=task_run_id,
663
681
  parameters=self.parameters,
664
- flow_run_context=FlowRunContext.get(),
665
- parent_task_run_context=TaskRunContext.get(),
682
+ flow_run_context=parent_flow_run_context,
683
+ parent_task_run_context=parent_task_run_context,
666
684
  wait_for=self.wait_for,
667
685
  extra_task_inputs=dependencies,
668
686
  )
@@ -679,6 +697,14 @@ class SyncTaskRunEngine(BaseTaskRunEngine[P, R]):
679
697
  self.logger.debug(
680
698
  f"Created task run {self.task_run.name!r} for task {self.task.name!r}"
681
699
  )
700
+
701
+ self._telemetry.start_span(
702
+ run=self.task_run,
703
+ name=self.task.name,
704
+ client=self.client,
705
+ parameters=self.parameters,
706
+ )
707
+
682
708
  yield self
683
709
 
684
710
  except TerminationSignal as exc:
@@ -730,11 +756,14 @@ class SyncTaskRunEngine(BaseTaskRunEngine[P, R]):
730
756
  dependencies: Optional[Dict[str, Set[TaskRunInput]]] = None,
731
757
  ) -> Generator[None, None, None]:
732
758
  with self.initialize_run(task_run_id=task_run_id, dependencies=dependencies):
733
- self.begin_run()
734
- try:
735
- yield
736
- finally:
737
- self.call_hooks()
759
+ with trace.use_span(
760
+ self._telemetry.span
761
+ ) if self._telemetry.span else nullcontext():
762
+ self.begin_run()
763
+ try:
764
+ yield
765
+ finally:
766
+ self.call_hooks()
738
767
 
739
768
  @contextmanager
740
769
  def transaction_context(self) -> Generator[Transaction, None, None]:
@@ -866,7 +895,7 @@ class AsyncTaskRunEngine(BaseTaskRunEngine[P, R]):
866
895
  hooks = None
867
896
 
868
897
  for hook in hooks or []:
869
- hook_name = _get_hook_name(hook)
898
+ hook_name = get_hook_name(hook)
870
899
 
871
900
  try:
872
901
  self.logger.info(
@@ -942,6 +971,11 @@ class AsyncTaskRunEngine(BaseTaskRunEngine[P, R]):
942
971
 
943
972
  self.task_run.state = new_state = state
944
973
 
974
+ if last_state.timestamp == new_state.timestamp:
975
+ # Ensure that the state timestamp is unique, or at least not equal to the last state.
976
+ # This might occur especially on Windows where the timestamp resolution is limited.
977
+ new_state.timestamp += pendulum.duration(microseconds=1)
978
+
945
979
  # Ensure that the state_details are populated with the current run IDs
946
980
  new_state.state_details.task_run_id = self.task_run.id
947
981
  new_state.state_details.flow_run_id = self.task_run.flow_run_id
@@ -977,6 +1011,7 @@ class AsyncTaskRunEngine(BaseTaskRunEngine[P, R]):
977
1011
  follows=self._last_event,
978
1012
  )
979
1013
 
1014
+ self._telemetry.update_state(new_state)
980
1015
  return new_state
981
1016
 
982
1017
  async def result(self, raise_on_failure: bool = True) -> "Union[R, State, None]":
@@ -1025,6 +1060,9 @@ class AsyncTaskRunEngine(BaseTaskRunEngine[P, R]):
1025
1060
  self.record_terminal_state_timing(terminal_state)
1026
1061
  await self.set_state(terminal_state)
1027
1062
  self._return_value = result
1063
+
1064
+ self._telemetry.end_span_on_success()
1065
+
1028
1066
  return result
1029
1067
 
1030
1068
  async def handle_retry(self, exc: Exception) -> bool:
@@ -1073,6 +1111,7 @@ class AsyncTaskRunEngine(BaseTaskRunEngine[P, R]):
1073
1111
 
1074
1112
  async def handle_exception(self, exc: Exception) -> None:
1075
1113
  # If the task fails, and we have retries left, set the task to retrying.
1114
+ self._telemetry.record_exception(exc)
1076
1115
  if not await self.handle_retry(exc):
1077
1116
  # If the task has no retries left, or the retry condition is not met, set the task to failed.
1078
1117
  state = await exception_to_failed_state(
@@ -1084,7 +1123,10 @@ class AsyncTaskRunEngine(BaseTaskRunEngine[P, R]):
1084
1123
  await self.set_state(state)
1085
1124
  self._raised = exc
1086
1125
 
1126
+ self._telemetry.end_span_on_failure(state.message)
1127
+
1087
1128
  async def handle_timeout(self, exc: TimeoutError) -> None:
1129
+ self._telemetry.record_exception(exc)
1088
1130
  if not await self.handle_retry(exc):
1089
1131
  if isinstance(exc, TaskRunTimeoutError):
1090
1132
  message = f"Task run exceeded timeout of {self.task.timeout_seconds} second(s)"
@@ -1096,8 +1138,10 @@ class AsyncTaskRunEngine(BaseTaskRunEngine[P, R]):
1096
1138
  message=message,
1097
1139
  name="TimedOut",
1098
1140
  )
1141
+ self.record_terminal_state_timing(state)
1099
1142
  await self.set_state(state)
1100
1143
  self._raised = exc
1144
+ self._telemetry.end_span_on_failure(state.message)
1101
1145
 
1102
1146
  async def handle_crash(self, exc: BaseException) -> None:
1103
1147
  state = await exception_to_crashed_state(exc)
@@ -1107,6 +1151,9 @@ class AsyncTaskRunEngine(BaseTaskRunEngine[P, R]):
1107
1151
  await self.set_state(state, force=True)
1108
1152
  self._raised = exc
1109
1153
 
1154
+ self._telemetry.record_exception(exc)
1155
+ self._telemetry.end_span_on_failure(state.message)
1156
+
1110
1157
  @asynccontextmanager
1111
1158
  async def setup_run_context(self, client: Optional[PrefectClient] = None):
1112
1159
  from prefect.utilities.engine import (
@@ -1162,13 +1209,16 @@ class AsyncTaskRunEngine(BaseTaskRunEngine[P, R]):
1162
1209
  async with AsyncClientContext.get_or_create():
1163
1210
  self._client = get_client()
1164
1211
  self._is_started = True
1212
+ parent_flow_run_context = FlowRunContext.get()
1213
+ parent_task_run_context = TaskRunContext.get()
1214
+
1165
1215
  try:
1166
1216
  if not self.task_run:
1167
1217
  self.task_run = await self.task.create_local_run(
1168
1218
  id=task_run_id,
1169
1219
  parameters=self.parameters,
1170
- flow_run_context=FlowRunContext.get(),
1171
- parent_task_run_context=TaskRunContext.get(),
1220
+ flow_run_context=parent_flow_run_context,
1221
+ parent_task_run_context=parent_task_run_context,
1172
1222
  wait_for=self.wait_for,
1173
1223
  extra_task_inputs=dependencies,
1174
1224
  )
@@ -1184,6 +1234,14 @@ class AsyncTaskRunEngine(BaseTaskRunEngine[P, R]):
1184
1234
  self.logger.debug(
1185
1235
  f"Created task run {self.task_run.name!r} for task {self.task.name!r}"
1186
1236
  )
1237
+
1238
+ await self._telemetry.async_start_span(
1239
+ run=self.task_run,
1240
+ name=self.task.name,
1241
+ client=self.client,
1242
+ parameters=self.parameters,
1243
+ )
1244
+
1187
1245
  yield self
1188
1246
 
1189
1247
  except TerminationSignal as exc:
@@ -1237,11 +1295,14 @@ class AsyncTaskRunEngine(BaseTaskRunEngine[P, R]):
1237
1295
  async with self.initialize_run(
1238
1296
  task_run_id=task_run_id, dependencies=dependencies
1239
1297
  ):
1240
- await self.begin_run()
1241
- try:
1242
- yield
1243
- finally:
1244
- await self.call_hooks()
1298
+ with trace.use_span(
1299
+ self._telemetry.span
1300
+ ) if self._telemetry.span else nullcontext():
1301
+ await self.begin_run()
1302
+ try:
1303
+ yield
1304
+ finally:
1305
+ await self.call_hooks()
1245
1306
 
1246
1307
  @asynccontextmanager
1247
1308
  async def transaction_context(self) -> AsyncGenerator[Transaction, None]:
@@ -1317,7 +1378,7 @@ def run_task_sync(
1317
1378
  task_run_id: Optional[UUID] = None,
1318
1379
  task_run: Optional[TaskRun] = None,
1319
1380
  parameters: Optional[Dict[str, Any]] = None,
1320
- wait_for: Optional[Iterable[PrefectFuture]] = None,
1381
+ wait_for: Optional[Iterable[PrefectFuture[R]]] = None,
1321
1382
  return_type: Literal["state", "result"] = "result",
1322
1383
  dependencies: Optional[Dict[str, Set[TaskRunInput]]] = None,
1323
1384
  context: Optional[Dict[str, Any]] = None,
@@ -1344,7 +1405,7 @@ async def run_task_async(
1344
1405
  task_run_id: Optional[UUID] = None,
1345
1406
  task_run: Optional[TaskRun] = None,
1346
1407
  parameters: Optional[Dict[str, Any]] = None,
1347
- wait_for: Optional[Iterable[PrefectFuture]] = None,
1408
+ wait_for: Optional[Iterable[PrefectFuture[R]]] = None,
1348
1409
  return_type: Literal["state", "result"] = "result",
1349
1410
  dependencies: Optional[Dict[str, Set[TaskRunInput]]] = None,
1350
1411
  context: Optional[Dict[str, Any]] = None,
@@ -1371,7 +1432,7 @@ def run_generator_task_sync(
1371
1432
  task_run_id: Optional[UUID] = None,
1372
1433
  task_run: Optional[TaskRun] = None,
1373
1434
  parameters: Optional[Dict[str, Any]] = None,
1374
- wait_for: Optional[Iterable[PrefectFuture]] = None,
1435
+ wait_for: Optional[Iterable[PrefectFuture[R]]] = None,
1375
1436
  return_type: Literal["state", "result"] = "result",
1376
1437
  dependencies: Optional[Dict[str, Set[TaskRunInput]]] = None,
1377
1438
  context: Optional[Dict[str, Any]] = None,
@@ -1426,7 +1487,7 @@ async def run_generator_task_async(
1426
1487
  task_run_id: Optional[UUID] = None,
1427
1488
  task_run: Optional[TaskRun] = None,
1428
1489
  parameters: Optional[Dict[str, Any]] = None,
1429
- wait_for: Optional[Iterable[PrefectFuture]] = None,
1490
+ wait_for: Optional[Iterable[PrefectFuture[R]]] = None,
1430
1491
  return_type: Literal["state", "result"] = "result",
1431
1492
  dependencies: Optional[Dict[str, Set[TaskRunInput]]] = None,
1432
1493
  context: Optional[Dict[str, Any]] = None,
@@ -1482,7 +1543,7 @@ def run_task(
1482
1543
  task_run_id: Optional[UUID] = None,
1483
1544
  task_run: Optional[TaskRun] = None,
1484
1545
  parameters: Optional[Dict[str, Any]] = None,
1485
- wait_for: Optional[Iterable[PrefectFuture]] = None,
1546
+ wait_for: Optional[Iterable[PrefectFuture[R]]] = None,
1486
1547
  return_type: Literal["state", "result"] = "result",
1487
1548
  dependencies: Optional[Dict[str, Set[TaskRunInput]]] = None,
1488
1549
  context: Optional[Dict[str, Any]] = None,
prefect/task_runners.py CHANGED
@@ -97,9 +97,9 @@ class TaskRunner(abc.ABC, Generic[F]):
97
97
 
98
98
  def map(
99
99
  self,
100
- task: "Task",
100
+ task: "Task[P, R]",
101
101
  parameters: Dict[str, Any],
102
- wait_for: Optional[Iterable[PrefectFuture]] = None,
102
+ wait_for: Optional[Iterable[PrefectFuture[R]]] = None,
103
103
  ) -> PrefectFutureList[F]:
104
104
  """
105
105
  Submit multiple tasks to the task run engine.