prefect-client 3.1.5__py3-none-any.whl → 3.1.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (114) hide show
  1. prefect/__init__.py +3 -0
  2. prefect/_experimental/__init__.py +0 -0
  3. prefect/_experimental/lineage.py +181 -0
  4. prefect/_internal/compatibility/async_dispatch.py +38 -9
  5. prefect/_internal/compatibility/migration.py +1 -1
  6. prefect/_internal/concurrency/api.py +52 -52
  7. prefect/_internal/concurrency/calls.py +59 -35
  8. prefect/_internal/concurrency/cancellation.py +34 -18
  9. prefect/_internal/concurrency/event_loop.py +7 -6
  10. prefect/_internal/concurrency/threads.py +41 -33
  11. prefect/_internal/concurrency/waiters.py +28 -21
  12. prefect/_internal/pydantic/v1_schema.py +2 -2
  13. prefect/_internal/pydantic/v2_schema.py +10 -9
  14. prefect/_internal/pydantic/v2_validated_func.py +15 -10
  15. prefect/_internal/retries.py +15 -6
  16. prefect/_internal/schemas/bases.py +11 -8
  17. prefect/_internal/schemas/validators.py +7 -5
  18. prefect/_version.py +3 -3
  19. prefect/automations.py +53 -47
  20. prefect/blocks/abstract.py +12 -10
  21. prefect/blocks/core.py +148 -19
  22. prefect/blocks/system.py +2 -1
  23. prefect/cache_policies.py +11 -11
  24. prefect/client/__init__.py +3 -1
  25. prefect/client/base.py +36 -37
  26. prefect/client/cloud.py +26 -19
  27. prefect/client/collections.py +2 -2
  28. prefect/client/orchestration.py +430 -273
  29. prefect/client/schemas/__init__.py +24 -0
  30. prefect/client/schemas/actions.py +128 -121
  31. prefect/client/schemas/filters.py +1 -1
  32. prefect/client/schemas/objects.py +114 -85
  33. prefect/client/schemas/responses.py +19 -20
  34. prefect/client/schemas/schedules.py +136 -93
  35. prefect/client/subscriptions.py +30 -15
  36. prefect/client/utilities.py +46 -36
  37. prefect/concurrency/asyncio.py +6 -9
  38. prefect/concurrency/sync.py +35 -5
  39. prefect/context.py +40 -32
  40. prefect/deployments/flow_runs.py +6 -8
  41. prefect/deployments/runner.py +14 -14
  42. prefect/deployments/steps/core.py +3 -1
  43. prefect/deployments/steps/pull.py +60 -12
  44. prefect/docker/__init__.py +1 -1
  45. prefect/events/clients.py +55 -4
  46. prefect/events/filters.py +1 -1
  47. prefect/events/related.py +2 -1
  48. prefect/events/schemas/events.py +26 -21
  49. prefect/events/utilities.py +3 -2
  50. prefect/events/worker.py +8 -0
  51. prefect/filesystems.py +3 -3
  52. prefect/flow_engine.py +87 -87
  53. prefect/flow_runs.py +7 -5
  54. prefect/flows.py +218 -176
  55. prefect/logging/configuration.py +1 -1
  56. prefect/logging/highlighters.py +1 -2
  57. prefect/logging/loggers.py +30 -20
  58. prefect/main.py +17 -24
  59. prefect/results.py +43 -22
  60. prefect/runner/runner.py +43 -21
  61. prefect/runner/server.py +30 -32
  62. prefect/runner/storage.py +3 -3
  63. prefect/runner/submit.py +3 -6
  64. prefect/runner/utils.py +6 -6
  65. prefect/runtime/flow_run.py +7 -0
  66. prefect/serializers.py +28 -24
  67. prefect/settings/constants.py +2 -2
  68. prefect/settings/legacy.py +1 -1
  69. prefect/settings/models/experiments.py +5 -0
  70. prefect/settings/models/server/events.py +10 -0
  71. prefect/task_engine.py +87 -26
  72. prefect/task_runners.py +2 -2
  73. prefect/task_worker.py +43 -25
  74. prefect/tasks.py +148 -142
  75. prefect/telemetry/bootstrap.py +15 -2
  76. prefect/telemetry/instrumentation.py +1 -1
  77. prefect/telemetry/processors.py +10 -7
  78. prefect/telemetry/run_telemetry.py +231 -0
  79. prefect/transactions.py +14 -14
  80. prefect/types/__init__.py +5 -5
  81. prefect/utilities/_engine.py +96 -0
  82. prefect/utilities/annotations.py +25 -18
  83. prefect/utilities/asyncutils.py +126 -140
  84. prefect/utilities/callables.py +87 -78
  85. prefect/utilities/collections.py +278 -117
  86. prefect/utilities/compat.py +13 -21
  87. prefect/utilities/context.py +6 -5
  88. prefect/utilities/dispatch.py +23 -12
  89. prefect/utilities/dockerutils.py +33 -32
  90. prefect/utilities/engine.py +126 -239
  91. prefect/utilities/filesystem.py +18 -15
  92. prefect/utilities/hashing.py +10 -11
  93. prefect/utilities/importtools.py +40 -27
  94. prefect/utilities/math.py +9 -5
  95. prefect/utilities/names.py +3 -3
  96. prefect/utilities/processutils.py +121 -57
  97. prefect/utilities/pydantic.py +41 -36
  98. prefect/utilities/render_swagger.py +22 -12
  99. prefect/utilities/schema_tools/__init__.py +2 -1
  100. prefect/utilities/schema_tools/hydration.py +50 -43
  101. prefect/utilities/schema_tools/validation.py +52 -42
  102. prefect/utilities/services.py +13 -12
  103. prefect/utilities/templating.py +45 -45
  104. prefect/utilities/text.py +2 -1
  105. prefect/utilities/timeout.py +4 -4
  106. prefect/utilities/urls.py +9 -4
  107. prefect/utilities/visualization.py +46 -24
  108. prefect/variables.py +136 -27
  109. prefect/workers/base.py +15 -8
  110. {prefect_client-3.1.5.dist-info → prefect_client-3.1.7.dist-info}/METADATA +5 -2
  111. {prefect_client-3.1.5.dist-info → prefect_client-3.1.7.dist-info}/RECORD +114 -110
  112. {prefect_client-3.1.5.dist-info → prefect_client-3.1.7.dist-info}/LICENSE +0 -0
  113. {prefect_client-3.1.5.dist-info → prefect_client-3.1.7.dist-info}/WHEEL +0 -0
  114. {prefect_client-3.1.5.dist-info → prefect_client-3.1.7.dist-info}/top_level.txt +0 -0
@@ -5,31 +5,32 @@ Utilities for working with clients.
5
5
  # This module must not import from `prefect.client` when it is imported to avoid
6
6
  # circular imports for decorators such as `inject_client` which are widely used.
7
7
 
8
+ from collections.abc import Awaitable, Coroutine
8
9
  from functools import wraps
9
- from typing import (
10
- TYPE_CHECKING,
11
- Any,
12
- Awaitable,
13
- Callable,
14
- Coroutine,
15
- Optional,
16
- Tuple,
17
- TypeVar,
18
- cast,
19
- )
20
-
21
- from typing_extensions import Concatenate, ParamSpec
10
+ from typing import TYPE_CHECKING, Any, Callable, Optional, Union, overload
11
+
12
+ from typing_extensions import Concatenate, ParamSpec, TypeGuard, TypeVar
22
13
 
23
14
  if TYPE_CHECKING:
24
- from prefect.client.orchestration import PrefectClient
15
+ from prefect.client.orchestration import PrefectClient, SyncPrefectClient
25
16
 
26
17
  P = ParamSpec("P")
27
- R = TypeVar("R")
18
+ R = TypeVar("R", infer_variance=True)
19
+
20
+
21
+ def _current_async_client(
22
+ client: Union["PrefectClient", "SyncPrefectClient"],
23
+ ) -> TypeGuard["PrefectClient"]:
24
+ """Determine if the client is a PrefectClient instance attached to the current loop"""
25
+ from prefect._internal.concurrency.event_loop import get_running_loop
26
+
27
+ # Only a PrefectClient will have a _loop attribute that is the current loop
28
+ return getattr(client, "_loop", None) == get_running_loop()
28
29
 
29
30
 
30
31
  def get_or_create_client(
31
32
  client: Optional["PrefectClient"] = None,
32
- ) -> Tuple["PrefectClient", bool]:
33
+ ) -> tuple["PrefectClient", bool]:
33
34
  """
34
35
  Returns provided client, infers a client from context if available, or creates a new client.
35
36
 
@@ -41,29 +42,22 @@ def get_or_create_client(
41
42
  """
42
43
  if client is not None:
43
44
  return client, True
44
- from prefect._internal.concurrency.event_loop import get_running_loop
45
+
45
46
  from prefect.context import AsyncClientContext, FlowRunContext, TaskRunContext
46
47
 
47
48
  async_client_context = AsyncClientContext.get()
48
49
  flow_run_context = FlowRunContext.get()
49
50
  task_run_context = TaskRunContext.get()
50
51
 
51
- if async_client_context and async_client_context.client._loop == get_running_loop():
52
- return async_client_context.client, True
53
- elif (
54
- flow_run_context
55
- and getattr(flow_run_context.client, "_loop", None) == get_running_loop()
56
- ):
57
- return flow_run_context.client, True
58
- elif (
59
- task_run_context
60
- and getattr(task_run_context.client, "_loop", None) == get_running_loop()
61
- ):
62
- return task_run_context.client, True
63
- else:
64
- from prefect.client.orchestration import get_client as get_httpx_client
52
+ for context in (async_client_context, flow_run_context, task_run_context):
53
+ if context is None:
54
+ continue
55
+ if _current_async_client(context_client := context.client):
56
+ return context_client, True
65
57
 
66
- return get_httpx_client(), False
58
+ from prefect.client.orchestration import get_client as get_httpx_client
59
+
60
+ return get_httpx_client(), False
67
61
 
68
62
 
69
63
  def client_injector(
@@ -77,9 +71,23 @@ def client_injector(
77
71
  return wrapper
78
72
 
79
73
 
74
+ @overload
80
75
  def inject_client(
81
76
  fn: Callable[P, Coroutine[Any, Any, R]],
82
77
  ) -> Callable[P, Coroutine[Any, Any, R]]:
78
+ ...
79
+
80
+
81
+ @overload
82
+ def inject_client(
83
+ fn: Callable[P, R],
84
+ ) -> Callable[P, R]:
85
+ ...
86
+
87
+
88
+ def inject_client(
89
+ fn: Callable[P, Union[Coroutine[Any, Any, R], R]],
90
+ ) -> Callable[P, Union[Coroutine[Any, Any, R], R]]:
83
91
  """
84
92
  Simple helper to provide a context managed client to an asynchronous function.
85
93
 
@@ -90,16 +98,18 @@ def inject_client(
90
98
 
91
99
  @wraps(fn)
92
100
  async def with_injected_client(*args: P.args, **kwargs: P.kwargs) -> R:
93
- client = cast(Optional["PrefectClient"], kwargs.pop("client", None))
94
- client, inferred = get_or_create_client(client)
101
+ given = kwargs.pop("client", None)
102
+ if TYPE_CHECKING:
103
+ assert given is None or isinstance(given, PrefectClient)
104
+ client, inferred = get_or_create_client(given)
95
105
  if not inferred:
96
106
  context = client
97
107
  else:
98
108
  from prefect.utilities.asyncutils import asyncnullcontext
99
109
 
100
- context = asyncnullcontext()
110
+ context = asyncnullcontext(client)
101
111
  async with context as new_client:
102
- kwargs.setdefault("client", new_client or client)
112
+ kwargs |= {"client": new_client}
103
113
  return await fn(*args, **kwargs)
104
114
 
105
115
  return with_injected_client
@@ -17,7 +17,6 @@ except ImportError:
17
17
  from prefect.client.orchestration import get_client
18
18
  from prefect.client.schemas.responses import MinimalConcurrencyLimitResponse
19
19
  from prefect.logging.loggers import get_run_logger
20
- from prefect.utilities.asyncutils import sync_compatible
21
20
 
22
21
  from .context import ConcurrencyContext
23
22
  from .events import (
@@ -79,7 +78,7 @@ async def concurrency(
79
78
 
80
79
  names = names if isinstance(names, list) else [names]
81
80
 
82
- limits = await _acquire_concurrency_slots(
81
+ limits = await _aacquire_concurrency_slots(
83
82
  names,
84
83
  occupy,
85
84
  timeout_seconds=timeout_seconds,
@@ -95,7 +94,7 @@ async def concurrency(
95
94
  finally:
96
95
  occupancy_period = cast(Interval, (pendulum.now("UTC") - acquisition_time))
97
96
  try:
98
- await _release_concurrency_slots(
97
+ await _arelease_concurrency_slots(
99
98
  names, occupy, occupancy_period.total_seconds()
100
99
  )
101
100
  except anyio.get_cancelled_exc_class():
@@ -138,7 +137,7 @@ async def rate_limit(
138
137
 
139
138
  names = names if isinstance(names, list) else [names]
140
139
 
141
- limits = await _acquire_concurrency_slots(
140
+ limits = await _aacquire_concurrency_slots(
142
141
  names,
143
142
  occupy,
144
143
  mode="rate_limit",
@@ -149,7 +148,6 @@ async def rate_limit(
149
148
  _emit_concurrency_acquisition_events(limits, occupy)
150
149
 
151
150
 
152
- @sync_compatible
153
151
  @deprecated_parameter(
154
152
  name="create_if_missing",
155
153
  start_date="Sep 2024",
@@ -157,10 +155,10 @@ async def rate_limit(
157
155
  when=lambda x: x is not None,
158
156
  help="Limits must be explicitly created before acquiring concurrency slots; see `strict` if you want to enforce this behavior.",
159
157
  )
160
- async def _acquire_concurrency_slots(
158
+ async def _aacquire_concurrency_slots(
161
159
  names: List[str],
162
160
  slots: int,
163
- mode: Union[Literal["concurrency"], Literal["rate_limit"]] = "concurrency",
161
+ mode: Literal["concurrency", "rate_limit"] = "concurrency",
164
162
  timeout_seconds: Optional[float] = None,
165
163
  create_if_missing: Optional[bool] = None,
166
164
  max_retries: Optional[int] = None,
@@ -199,8 +197,7 @@ async def _acquire_concurrency_slots(
199
197
  return retval
200
198
 
201
199
 
202
- @sync_compatible
203
- async def _release_concurrency_slots(
200
+ async def _arelease_concurrency_slots(
204
201
  names: List[str], slots: int, occupancy_seconds: float
205
202
  ) -> List[MinimalConcurrencyLimitResponse]:
206
203
  async with get_client() as client:
@@ -9,6 +9,9 @@ from typing import (
9
9
  )
10
10
 
11
11
  import pendulum
12
+ from typing_extensions import Literal
13
+
14
+ from prefect.utilities.asyncutils import run_coro_as_sync
12
15
 
13
16
  try:
14
17
  from pendulum import Interval
@@ -19,8 +22,8 @@ except ImportError:
19
22
  from prefect.client.schemas.responses import MinimalConcurrencyLimitResponse
20
23
 
21
24
  from .asyncio import (
22
- _acquire_concurrency_slots,
23
- _release_concurrency_slots,
25
+ _aacquire_concurrency_slots,
26
+ _arelease_concurrency_slots,
24
27
  )
25
28
  from .events import (
26
29
  _emit_concurrency_acquisition_events,
@@ -30,6 +33,36 @@ from .events import (
30
33
  T = TypeVar("T")
31
34
 
32
35
 
36
+ def _release_concurrency_slots(
37
+ names: List[str], slots: int, occupancy_seconds: float
38
+ ) -> List[MinimalConcurrencyLimitResponse]:
39
+ result = run_coro_as_sync(
40
+ _arelease_concurrency_slots(names, slots, occupancy_seconds)
41
+ )
42
+ if result is None:
43
+ raise RuntimeError("Failed to release concurrency slots")
44
+ return result
45
+
46
+
47
+ def _acquire_concurrency_slots(
48
+ names: List[str],
49
+ slots: int,
50
+ mode: Literal["concurrency", "rate_limit"] = "concurrency",
51
+ timeout_seconds: Optional[float] = None,
52
+ create_if_missing: Optional[bool] = None,
53
+ max_retries: Optional[int] = None,
54
+ strict: bool = False,
55
+ ) -> List[MinimalConcurrencyLimitResponse]:
56
+ result = run_coro_as_sync(
57
+ _aacquire_concurrency_slots(
58
+ names, slots, mode, timeout_seconds, create_if_missing, max_retries, strict
59
+ )
60
+ )
61
+ if result is None:
62
+ raise RuntimeError("Failed to acquire concurrency slots")
63
+ return result
64
+
65
+
33
66
  @contextmanager
34
67
  def concurrency(
35
68
  names: Union[str, List[str]],
@@ -81,7 +114,6 @@ def concurrency(
81
114
  create_if_missing=create_if_missing,
82
115
  strict=strict,
83
116
  max_retries=max_retries,
84
- _sync=True,
85
117
  )
86
118
  acquisition_time = pendulum.now("UTC")
87
119
  emitted_events = _emit_concurrency_acquisition_events(limits, occupy)
@@ -94,7 +126,6 @@ def concurrency(
94
126
  names,
95
127
  occupy,
96
128
  occupancy_period.total_seconds(),
97
- _sync=True,
98
129
  )
99
130
  _emit_concurrency_release_events(limits, occupy, emitted_events)
100
131
 
@@ -134,6 +165,5 @@ def rate_limit(
134
165
  timeout_seconds=timeout_seconds,
135
166
  create_if_missing=create_if_missing,
136
167
  strict=strict,
137
- _sync=True,
138
168
  )
139
169
  _emit_concurrency_acquisition_events(limits, occupy)
prefect/context.py CHANGED
@@ -25,9 +25,7 @@ from typing import (
25
25
  Union,
26
26
  )
27
27
 
28
- import pendulum
29
28
  from pydantic import BaseModel, ConfigDict, Field, PrivateAttr
30
- from pydantic_extra_types.pendulum_dt import DateTime
31
29
  from typing_extensions import Self
32
30
 
33
31
  import prefect.logging
@@ -44,12 +42,17 @@ from prefect.results import (
44
42
  get_default_persist_setting_for_tasks,
45
43
  )
46
44
  from prefect.settings import Profile, Settings
47
- from prefect.settings.legacy import _get_settings_fields
45
+ from prefect.settings.legacy import (
46
+ _get_settings_fields, # type: ignore[reportPrivateUsage]
47
+ )
48
48
  from prefect.states import State
49
49
  from prefect.task_runners import TaskRunner
50
+ from prefect.types import DateTime
50
51
  from prefect.utilities.services import start_client_metrics_server
51
52
 
52
53
  T = TypeVar("T")
54
+ P = TypeVar("P")
55
+ R = TypeVar("R")
53
56
 
54
57
  if TYPE_CHECKING:
55
58
  from prefect.flows import Flow
@@ -121,8 +124,8 @@ class ContextModel(BaseModel):
121
124
  """
122
125
 
123
126
  # The context variable for storing data must be defined by the child class
124
- __var__: ContextVar
125
- _token: Optional[Token] = PrivateAttr(None)
127
+ __var__: ContextVar[Self]
128
+ _token: Optional[Token[Self]] = PrivateAttr(None)
126
129
  model_config = ConfigDict(
127
130
  arbitrary_types_allowed=True,
128
131
  extra="forbid",
@@ -150,7 +153,7 @@ class ContextModel(BaseModel):
150
153
  return cls.__var__.get(None)
151
154
 
152
155
  def model_copy(
153
- self: Self, *, update: Optional[Dict[str, Any]] = None, deep: bool = False
156
+ self: Self, *, update: Optional[Mapping[str, Any]] = None, deep: bool = False
154
157
  ):
155
158
  """
156
159
  Duplicate the context model, optionally choosing which fields to include, exclude, or change.
@@ -199,14 +202,14 @@ class SyncClientContext(ContextModel):
199
202
  assert c1 is ctx.client
200
203
  """
201
204
 
202
- __var__ = ContextVar("sync-client-context")
205
+ __var__: ContextVar[Self] = ContextVar("sync-client-context")
203
206
  client: SyncPrefectClient
204
207
  _httpx_settings: Optional[dict[str, Any]] = PrivateAttr(None)
205
208
  _context_stack: int = PrivateAttr(0)
206
209
 
207
210
  def __init__(self, httpx_settings: Optional[dict[str, Any]] = None):
208
211
  super().__init__(
209
- client=get_client(sync_client=True, httpx_settings=httpx_settings),
212
+ client=get_client(sync_client=True, httpx_settings=httpx_settings), # type: ignore[reportCallIssue]
210
213
  )
211
214
  self._httpx_settings = httpx_settings
212
215
  self._context_stack = 0
@@ -220,11 +223,11 @@ class SyncClientContext(ContextModel):
220
223
  else:
221
224
  return self
222
225
 
223
- def __exit__(self, *exc_info):
226
+ def __exit__(self, *exc_info: Any):
224
227
  self._context_stack -= 1
225
228
  if self._context_stack == 0:
226
- self.client.__exit__(*exc_info)
227
- return super().__exit__(*exc_info)
229
+ self.client.__exit__(*exc_info) # type: ignore[reportUnknownMemberType]
230
+ return super().__exit__(*exc_info) # type: ignore[reportUnknownMemberType]
228
231
 
229
232
  @classmethod
230
233
  @contextmanager
@@ -264,12 +267,12 @@ class AsyncClientContext(ContextModel):
264
267
 
265
268
  def __init__(self, httpx_settings: Optional[dict[str, Any]] = None):
266
269
  super().__init__(
267
- client=get_client(sync_client=False, httpx_settings=httpx_settings),
270
+ client=get_client(sync_client=False, httpx_settings=httpx_settings), # type: ignore[reportCallIssue]
268
271
  )
269
272
  self._httpx_settings = httpx_settings
270
273
  self._context_stack = 0
271
274
 
272
- async def __aenter__(self):
275
+ async def __aenter__(self: Self) -> Self:
273
276
  self._context_stack += 1
274
277
  if self._context_stack == 1:
275
278
  await self.client.__aenter__()
@@ -278,11 +281,11 @@ class AsyncClientContext(ContextModel):
278
281
  else:
279
282
  return self
280
283
 
281
- async def __aexit__(self, *exc_info):
284
+ async def __aexit__(self: Self, *exc_info: Any) -> None:
282
285
  self._context_stack -= 1
283
286
  if self._context_stack == 0:
284
- await self.client.__aexit__(*exc_info)
285
- return super().__exit__(*exc_info)
287
+ await self.client.__aexit__(*exc_info) # type: ignore[reportUnknownMemberType]
288
+ return super().__exit__(*exc_info) # type: ignore[reportUnknownMemberType]
286
289
 
287
290
  @classmethod
288
291
  @asynccontextmanager
@@ -305,19 +308,20 @@ class RunContext(ContextModel):
305
308
  client: The Prefect client instance being used for API communication
306
309
  """
307
310
 
308
- def __init__(self, *args, **kwargs):
311
+ def __init__(self, *args: Any, **kwargs: Any):
309
312
  super().__init__(*args, **kwargs)
310
313
 
311
314
  start_client_metrics_server()
312
315
 
313
- start_time: DateTime = Field(default_factory=lambda: pendulum.now("UTC"))
316
+ start_time: DateTime = Field(default_factory=lambda: DateTime.now("UTC"))
314
317
  input_keyset: Optional[Dict[str, Dict[str, str]]] = None
315
318
  client: Union[PrefectClient, SyncPrefectClient]
316
319
 
317
- def serialize(self):
320
+ def serialize(self: Self, include_secrets: bool = True) -> Dict[str, Any]:
318
321
  return self.model_dump(
319
322
  include={"start_time", "input_keyset"},
320
323
  exclude_unset=True,
324
+ context={"include_secrets": include_secrets},
321
325
  )
322
326
 
323
327
 
@@ -336,9 +340,9 @@ class EngineContext(RunContext):
336
340
  flow_run_states: A list of states for flow runs created within this flow run
337
341
  """
338
342
 
339
- flow: Optional["Flow"] = None
343
+ flow: Optional["Flow[Any, Any]"] = None
340
344
  flow_run: Optional[FlowRun] = None
341
- task_runner: TaskRunner
345
+ task_runner: TaskRunner[Any]
342
346
  log_prints: bool = False
343
347
  parameters: Optional[Dict[str, Any]] = None
344
348
 
@@ -351,21 +355,21 @@ class EngineContext(RunContext):
351
355
  persist_result: bool = Field(default_factory=get_default_persist_setting)
352
356
 
353
357
  # Counter for task calls allowing unique
354
- task_run_dynamic_keys: Dict[str, int] = Field(default_factory=dict)
358
+ task_run_dynamic_keys: Dict[str, Union[str, int]] = Field(default_factory=dict)
355
359
 
356
360
  # Counter for flow pauses
357
361
  observed_flow_pauses: Dict[str, int] = Field(default_factory=dict)
358
362
 
359
363
  # Tracking for result from task runs in this flow run for dependency tracking
360
364
  # Holds the ID of the object returned by the task run and task run state
361
- task_run_results: Mapping[int, State] = Field(default_factory=dict)
365
+ task_run_results: dict[int, State] = Field(default_factory=dict)
362
366
 
363
367
  # Events worker to emit events
364
368
  events: Optional[EventsWorker] = None
365
369
 
366
- __var__: ContextVar = ContextVar("flow_run")
370
+ __var__: ContextVar[Self] = ContextVar("flow_run")
367
371
 
368
- def serialize(self):
372
+ def serialize(self: Self, include_secrets: bool = True) -> Dict[str, Any]:
369
373
  return self.model_dump(
370
374
  include={
371
375
  "flow_run",
@@ -378,6 +382,8 @@ class EngineContext(RunContext):
378
382
  "persist_result",
379
383
  },
380
384
  exclude_unset=True,
385
+ serialize_as_any=True,
386
+ context={"include_secrets": include_secrets},
381
387
  )
382
388
 
383
389
 
@@ -394,7 +400,7 @@ class TaskRunContext(RunContext):
394
400
  task_run: The API metadata for this task run
395
401
  """
396
402
 
397
- task: "Task"
403
+ task: "Task[Any, Any]"
398
404
  task_run: TaskRun
399
405
  log_prints: bool = False
400
406
  parameters: Dict[str, Any]
@@ -405,7 +411,7 @@ class TaskRunContext(RunContext):
405
411
 
406
412
  __var__ = ContextVar("task_run")
407
413
 
408
- def serialize(self):
414
+ def serialize(self: Self, include_secrets: bool = True) -> Dict[str, Any]:
409
415
  return self.model_dump(
410
416
  include={
411
417
  "task_run",
@@ -418,6 +424,8 @@ class TaskRunContext(RunContext):
418
424
  "persist_result",
419
425
  },
420
426
  exclude_unset=True,
427
+ serialize_as_any=True,
428
+ context={"include_secrets": include_secrets},
421
429
  )
422
430
 
423
431
 
@@ -436,7 +444,7 @@ class TagsContext(ContextModel):
436
444
  # Return an empty `TagsContext` instead of `None` if no context exists
437
445
  return cls.__var__.get(TagsContext())
438
446
 
439
- __var__: ContextVar = ContextVar("tags")
447
+ __var__: ContextVar[Self] = ContextVar("tags")
440
448
 
441
449
 
442
450
  class SettingsContext(ContextModel):
@@ -453,9 +461,9 @@ class SettingsContext(ContextModel):
453
461
  profile: Profile
454
462
  settings: Settings
455
463
 
456
- __var__: ContextVar = ContextVar("settings")
464
+ __var__: ContextVar[Self] = ContextVar("settings")
457
465
 
458
- def __hash__(self) -> int:
466
+ def __hash__(self: Self) -> int:
459
467
  return hash(self.settings)
460
468
 
461
469
  @classmethod
@@ -562,7 +570,7 @@ def tags(*new_tags: str) -> Generator[Set[str], None, None]:
562
570
 
563
571
  @contextmanager
564
572
  def use_profile(
565
- profile: Union[Profile, str],
573
+ profile: Union[Profile, str, Any],
566
574
  override_environment_variables: bool = False,
567
575
  include_current_context: bool = True,
568
576
  ):
@@ -662,7 +670,7 @@ def root_settings_context():
662
670
  # an override in the `SettingsContext.get` method.
663
671
 
664
672
 
665
- GLOBAL_SETTINGS_CONTEXT: SettingsContext = root_settings_context()
673
+ GLOBAL_SETTINGS_CONTEXT: SettingsContext = root_settings_context() # type: ignore[reportConstantRedefinition]
666
674
 
667
675
 
668
676
  # 2024-07-02: This surfaces an actionable error message for removed objects
@@ -1,5 +1,5 @@
1
1
  from datetime import datetime
2
- from typing import TYPE_CHECKING, Iterable, Optional, Union
2
+ from typing import TYPE_CHECKING, Any, Iterable, Optional, Union
3
3
  from uuid import UUID
4
4
 
5
5
  import anyio
@@ -35,7 +35,7 @@ logger = get_logger(__name__)
35
35
  async def run_deployment(
36
36
  name: Union[str, UUID],
37
37
  client: Optional["PrefectClient"] = None,
38
- parameters: Optional[dict] = None,
38
+ parameters: Optional[dict[str, Any]] = None,
39
39
  scheduled_time: Optional[datetime] = None,
40
40
  flow_run_name: Optional[str] = None,
41
41
  timeout: Optional[float] = None,
@@ -44,7 +44,7 @@ async def run_deployment(
44
44
  idempotency_key: Optional[str] = None,
45
45
  work_queue_name: Optional[str] = None,
46
46
  as_subflow: Optional[bool] = True,
47
- job_variables: Optional[dict] = None,
47
+ job_variables: Optional[dict[str, Any]] = None,
48
48
  ) -> "FlowRun":
49
49
  """
50
50
  Create a flow run for a deployment and return it after completion or a timeout.
@@ -113,10 +113,8 @@ async def run_deployment(
113
113
  task_run_ctx = TaskRunContext.get()
114
114
  if as_subflow and (flow_run_ctx or task_run_ctx):
115
115
  # TODO: this logic can likely be simplified by using `Task.create_run`
116
- from prefect.utilities.engine import (
117
- _dynamic_key_for_task_run,
118
- collect_task_run_inputs,
119
- )
116
+ from prefect.utilities._engine import dynamic_key_for_task_run
117
+ from prefect.utilities.engine import collect_task_run_inputs
120
118
 
121
119
  # This was called from a flow. Link the flow run as a subflow.
122
120
  task_inputs = {
@@ -143,7 +141,7 @@ async def run_deployment(
143
141
  else task_run_ctx.task_run.flow_run_id
144
142
  )
145
143
  dynamic_key = (
146
- _dynamic_key_for_task_run(flow_run_ctx, dummy_task)
144
+ dynamic_key_for_task_run(flow_run_ctx, dummy_task)
147
145
  if flow_run_ctx
148
146
  else task_run_ctx.task_run.dynamic_key
149
147
  )
@@ -33,7 +33,7 @@ import importlib
33
33
  import tempfile
34
34
  from datetime import datetime, timedelta
35
35
  from pathlib import Path
36
- from typing import TYPE_CHECKING, Any, Dict, Iterable, List, Optional, Union
36
+ from typing import TYPE_CHECKING, Any, Iterable, List, Optional, Union
37
37
  from uuid import UUID
38
38
 
39
39
  from pydantic import (
@@ -160,7 +160,7 @@ class RunnerDeployment(BaseModel):
160
160
  paused: Optional[bool] = Field(
161
161
  default=None, description="Whether or not the deployment is paused."
162
162
  )
163
- parameters: Dict[str, Any] = Field(default_factory=dict)
163
+ parameters: dict[str, Any] = Field(default_factory=dict)
164
164
  entrypoint: Optional[str] = Field(
165
165
  default=None,
166
166
  description=(
@@ -198,7 +198,7 @@ class RunnerDeployment(BaseModel):
198
198
  " the deployment is registered with a built runner."
199
199
  ),
200
200
  )
201
- job_variables: Dict[str, Any] = Field(
201
+ job_variables: dict[str, Any] = Field(
202
202
  default_factory=dict,
203
203
  description=(
204
204
  "Job variables used to override the default values of a work pool"
@@ -280,7 +280,7 @@ class RunnerDeployment(BaseModel):
280
280
  async with get_client() as client:
281
281
  flow_id = await client.create_flow_from_name(self.flow_name)
282
282
 
283
- create_payload = dict(
283
+ create_payload: dict[str, Any] = dict(
284
284
  flow_id=flow_id,
285
285
  name=self.name,
286
286
  work_queue_name=self.work_queue_name,
@@ -428,7 +428,7 @@ class RunnerDeployment(BaseModel):
428
428
  else:
429
429
  return [create_deployment_schedule_create(schedule)]
430
430
 
431
- def _set_defaults_from_flow(self, flow: "Flow"):
431
+ def _set_defaults_from_flow(self, flow: "Flow[..., Any]"):
432
432
  self._parameter_openapi_schema = parameter_schema(flow)
433
433
 
434
434
  if not self.version:
@@ -439,7 +439,7 @@ class RunnerDeployment(BaseModel):
439
439
  @classmethod
440
440
  def from_flow(
441
441
  cls,
442
- flow: "Flow",
442
+ flow: "Flow[..., Any]",
443
443
  name: str,
444
444
  interval: Optional[
445
445
  Union[Iterable[Union[int, float, timedelta]], int, float, timedelta]
@@ -449,7 +449,7 @@ class RunnerDeployment(BaseModel):
449
449
  paused: Optional[bool] = None,
450
450
  schedules: Optional["FlexibleScheduleList"] = None,
451
451
  concurrency_limit: Optional[Union[int, ConcurrencyLimitConfig, None]] = None,
452
- parameters: Optional[dict] = None,
452
+ parameters: Optional[dict[str, Any]] = None,
453
453
  triggers: Optional[List[Union[DeploymentTriggerTypes, TriggerTypes]]] = None,
454
454
  description: Optional[str] = None,
455
455
  tags: Optional[List[str]] = None,
@@ -457,7 +457,7 @@ class RunnerDeployment(BaseModel):
457
457
  enforce_parameter_schema: bool = True,
458
458
  work_pool_name: Optional[str] = None,
459
459
  work_queue_name: Optional[str] = None,
460
- job_variables: Optional[Dict[str, Any]] = None,
460
+ job_variables: Optional[dict[str, Any]] = None,
461
461
  entrypoint_type: EntrypointType = EntrypointType.FILE_PATH,
462
462
  ) -> "RunnerDeployment":
463
463
  """
@@ -588,7 +588,7 @@ class RunnerDeployment(BaseModel):
588
588
  paused: Optional[bool] = None,
589
589
  schedules: Optional["FlexibleScheduleList"] = None,
590
590
  concurrency_limit: Optional[Union[int, ConcurrencyLimitConfig, None]] = None,
591
- parameters: Optional[dict] = None,
591
+ parameters: Optional[dict[str, Any]] = None,
592
592
  triggers: Optional[List[Union[DeploymentTriggerTypes, TriggerTypes]]] = None,
593
593
  description: Optional[str] = None,
594
594
  tags: Optional[List[str]] = None,
@@ -596,7 +596,7 @@ class RunnerDeployment(BaseModel):
596
596
  enforce_parameter_schema: bool = True,
597
597
  work_pool_name: Optional[str] = None,
598
598
  work_queue_name: Optional[str] = None,
599
- job_variables: Optional[Dict[str, Any]] = None,
599
+ job_variables: Optional[dict[str, Any]] = None,
600
600
  ) -> "RunnerDeployment":
601
601
  """
602
602
  Configure a deployment for a given flow located at a given entrypoint.
@@ -689,7 +689,7 @@ class RunnerDeployment(BaseModel):
689
689
  paused: Optional[bool] = None,
690
690
  schedules: Optional["FlexibleScheduleList"] = None,
691
691
  concurrency_limit: Optional[Union[int, ConcurrencyLimitConfig, None]] = None,
692
- parameters: Optional[dict] = None,
692
+ parameters: Optional[dict[str, Any]] = None,
693
693
  triggers: Optional[List[Union[DeploymentTriggerTypes, TriggerTypes]]] = None,
694
694
  description: Optional[str] = None,
695
695
  tags: Optional[List[str]] = None,
@@ -697,7 +697,7 @@ class RunnerDeployment(BaseModel):
697
697
  enforce_parameter_schema: bool = True,
698
698
  work_pool_name: Optional[str] = None,
699
699
  work_queue_name: Optional[str] = None,
700
- job_variables: Optional[Dict[str, Any]] = None,
700
+ job_variables: Optional[dict[str, Any]] = None,
701
701
  ):
702
702
  """
703
703
  Create a RunnerDeployment from a flow located at a given entrypoint and stored in a
@@ -945,8 +945,8 @@ async def deploy(
945
945
 
946
946
  console.print(f"Successfully pushed image {image.reference!r}", style="green")
947
947
 
948
- deployment_exceptions = []
949
- deployment_ids = []
948
+ deployment_exceptions: list[dict[str, Any]] = []
949
+ deployment_ids: list[UUID] = []
950
950
  image_ref = image.reference if image else None
951
951
  for deployment in track(
952
952
  deployments,