prefect-client 3.1.9__py3-none-any.whl → 3.1.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (113) hide show
  1. prefect/_experimental/lineage.py +7 -8
  2. prefect/_internal/_logging.py +15 -3
  3. prefect/_internal/compatibility/async_dispatch.py +22 -16
  4. prefect/_internal/compatibility/deprecated.py +42 -18
  5. prefect/_internal/compatibility/migration.py +2 -2
  6. prefect/_internal/concurrency/inspection.py +12 -14
  7. prefect/_internal/concurrency/primitives.py +2 -2
  8. prefect/_internal/concurrency/services.py +154 -80
  9. prefect/_internal/concurrency/waiters.py +13 -9
  10. prefect/_internal/pydantic/annotations/pendulum.py +7 -7
  11. prefect/_internal/pytz.py +4 -3
  12. prefect/_internal/retries.py +10 -5
  13. prefect/_internal/schemas/bases.py +19 -10
  14. prefect/_internal/schemas/validators.py +227 -388
  15. prefect/_version.py +3 -3
  16. prefect/artifacts.py +61 -74
  17. prefect/automations.py +27 -7
  18. prefect/blocks/core.py +3 -3
  19. prefect/client/{orchestration.py → orchestration/__init__.py} +38 -701
  20. prefect/client/orchestration/_artifacts/__init__.py +0 -0
  21. prefect/client/orchestration/_artifacts/client.py +239 -0
  22. prefect/client/orchestration/_concurrency_limits/__init__.py +0 -0
  23. prefect/client/orchestration/_concurrency_limits/client.py +762 -0
  24. prefect/client/orchestration/_logs/__init__.py +0 -0
  25. prefect/client/orchestration/_logs/client.py +95 -0
  26. prefect/client/orchestration/_variables/__init__.py +0 -0
  27. prefect/client/orchestration/_variables/client.py +157 -0
  28. prefect/client/orchestration/base.py +46 -0
  29. prefect/client/orchestration/routes.py +145 -0
  30. prefect/client/schemas/actions.py +2 -2
  31. prefect/client/schemas/filters.py +5 -0
  32. prefect/client/schemas/objects.py +3 -10
  33. prefect/client/schemas/schedules.py +22 -10
  34. prefect/concurrency/_asyncio.py +87 -0
  35. prefect/concurrency/{events.py → _events.py} +10 -10
  36. prefect/concurrency/asyncio.py +20 -104
  37. prefect/concurrency/context.py +6 -4
  38. prefect/concurrency/services.py +26 -74
  39. prefect/concurrency/sync.py +23 -44
  40. prefect/concurrency/v1/_asyncio.py +63 -0
  41. prefect/concurrency/v1/{events.py → _events.py} +13 -15
  42. prefect/concurrency/v1/asyncio.py +27 -80
  43. prefect/concurrency/v1/context.py +6 -4
  44. prefect/concurrency/v1/services.py +33 -79
  45. prefect/concurrency/v1/sync.py +18 -37
  46. prefect/context.py +66 -70
  47. prefect/deployments/base.py +4 -144
  48. prefect/deployments/flow_runs.py +12 -2
  49. prefect/deployments/runner.py +11 -3
  50. prefect/deployments/steps/pull.py +13 -0
  51. prefect/events/clients.py +7 -1
  52. prefect/events/schemas/events.py +3 -2
  53. prefect/flow_engine.py +54 -47
  54. prefect/flows.py +2 -1
  55. prefect/futures.py +42 -27
  56. prefect/input/run_input.py +2 -1
  57. prefect/locking/filesystem.py +8 -7
  58. prefect/locking/memory.py +5 -3
  59. prefect/locking/protocol.py +1 -1
  60. prefect/main.py +1 -3
  61. prefect/plugins.py +12 -10
  62. prefect/results.py +3 -308
  63. prefect/runner/storage.py +87 -21
  64. prefect/serializers.py +32 -25
  65. prefect/settings/legacy.py +4 -4
  66. prefect/settings/models/api.py +3 -3
  67. prefect/settings/models/cli.py +3 -3
  68. prefect/settings/models/client.py +5 -3
  69. prefect/settings/models/cloud.py +3 -3
  70. prefect/settings/models/deployments.py +3 -3
  71. prefect/settings/models/experiments.py +4 -2
  72. prefect/settings/models/flows.py +3 -3
  73. prefect/settings/models/internal.py +4 -2
  74. prefect/settings/models/logging.py +4 -3
  75. prefect/settings/models/results.py +3 -3
  76. prefect/settings/models/root.py +3 -2
  77. prefect/settings/models/runner.py +4 -4
  78. prefect/settings/models/server/api.py +3 -3
  79. prefect/settings/models/server/database.py +11 -4
  80. prefect/settings/models/server/deployments.py +6 -2
  81. prefect/settings/models/server/ephemeral.py +4 -2
  82. prefect/settings/models/server/events.py +3 -2
  83. prefect/settings/models/server/flow_run_graph.py +6 -2
  84. prefect/settings/models/server/root.py +3 -3
  85. prefect/settings/models/server/services.py +26 -11
  86. prefect/settings/models/server/tasks.py +6 -3
  87. prefect/settings/models/server/ui.py +3 -3
  88. prefect/settings/models/tasks.py +5 -5
  89. prefect/settings/models/testing.py +3 -3
  90. prefect/settings/models/worker.py +5 -3
  91. prefect/settings/profiles.py +15 -2
  92. prefect/states.py +4 -7
  93. prefect/task_engine.py +54 -75
  94. prefect/tasks.py +84 -32
  95. prefect/telemetry/processors.py +6 -6
  96. prefect/telemetry/run_telemetry.py +13 -8
  97. prefect/telemetry/services.py +32 -31
  98. prefect/transactions.py +4 -15
  99. prefect/utilities/_git.py +34 -0
  100. prefect/utilities/asyncutils.py +1 -1
  101. prefect/utilities/engine.py +3 -19
  102. prefect/utilities/generics.py +18 -0
  103. prefect/workers/__init__.py +2 -0
  104. {prefect_client-3.1.9.dist-info → prefect_client-3.1.11.dist-info}/METADATA +1 -1
  105. {prefect_client-3.1.9.dist-info → prefect_client-3.1.11.dist-info}/RECORD +108 -99
  106. prefect/records/__init__.py +0 -1
  107. prefect/records/base.py +0 -235
  108. prefect/records/filesystem.py +0 -213
  109. prefect/records/memory.py +0 -184
  110. prefect/records/result_store.py +0 -70
  111. {prefect_client-3.1.9.dist-info → prefect_client-3.1.11.dist-info}/LICENSE +0 -0
  112. {prefect_client-3.1.9.dist-info → prefect_client-3.1.11.dist-info}/WHEEL +0 -0
  113. {prefect_client-3.1.9.dist-info → prefect_client-3.1.11.dist-info}/top_level.txt +0 -0
prefect/events/clients.py CHANGED
@@ -16,6 +16,7 @@ from typing import (
16
16
  cast,
17
17
  )
18
18
  from urllib.parse import urlparse
19
+ from urllib.request import proxy_bypass
19
20
  from uuid import UUID
20
21
 
21
22
  import orjson
@@ -95,6 +96,9 @@ class WebsocketProxyConnect(Connect):
95
96
  u = urlparse(uri)
96
97
  host = u.hostname
97
98
 
99
+ if not host:
100
+ raise ValueError(f"Invalid URI {uri}, no hostname found")
101
+
98
102
  if u.scheme == "ws":
99
103
  port = u.port or 80
100
104
  proxy_url = os.environ.get("HTTP_PROXY")
@@ -107,7 +111,9 @@ class WebsocketProxyConnect(Connect):
107
111
  "Unsupported scheme %s. Expected 'ws' or 'wss'. " % u.scheme
108
112
  )
109
113
 
110
- self._proxy = Proxy.from_url(proxy_url) if proxy_url else None
114
+ self._proxy = (
115
+ Proxy.from_url(proxy_url) if proxy_url and not proxy_bypass(host) else None
116
+ )
111
117
  self._host = host
112
118
  self._port = port
113
119
 
@@ -2,6 +2,7 @@ import copy
2
2
  from collections import defaultdict
3
3
  from typing import (
4
4
  Any,
5
+ ClassVar,
5
6
  Dict,
6
7
  Iterable,
7
8
  List,
@@ -108,7 +109,7 @@ def _validate_related_resources(value) -> List:
108
109
  class Event(PrefectBaseModel):
109
110
  """The client-side view of an event that has happened to a Resource"""
110
111
 
111
- model_config = ConfigDict(extra="ignore")
112
+ model_config: ClassVar[ConfigDict] = ConfigDict(extra="ignore")
112
113
 
113
114
  occurred: DateTime = Field(
114
115
  default_factory=lambda: DateTime.now("UTC"),
@@ -177,7 +178,7 @@ class ReceivedEvent(Event):
177
178
  """The server-side view of an event that has happened to a Resource after it has
178
179
  been received by the server"""
179
180
 
180
- model_config = ConfigDict(from_attributes=True)
181
+ model_config: ClassVar[ConfigDict] = ConfigDict(from_attributes=True)
181
182
 
182
183
  received: DateTime = Field(
183
184
  ...,
prefect/flow_engine.py CHANGED
@@ -55,7 +55,6 @@ from prefect.logging.loggers import (
55
55
  patch_print,
56
56
  )
57
57
  from prefect.results import (
58
- BaseResult,
59
58
  ResultStore,
60
59
  get_result_store,
61
60
  should_persist_result,
@@ -307,10 +306,7 @@ class FlowRunEngine(BaseFlowRunEngine[P, R]):
307
306
  if self._return_value is not NotSet and not isinstance(
308
307
  self._return_value, State
309
308
  ):
310
- if isinstance(self._return_value, BaseResult):
311
- _result = self._return_value.get()
312
- else:
313
- _result = self._return_value
309
+ _result = self._return_value
314
310
 
315
311
  if asyncio.iscoroutine(_result):
316
312
  # getting the value for a BaseResult may return an awaitable
@@ -490,24 +486,13 @@ class FlowRunEngine(BaseFlowRunEngine[P, R]):
490
486
  ):
491
487
  return subflow_run
492
488
 
493
- flow_run = client.create_flow_run(
489
+ return client.create_flow_run(
494
490
  flow=self.flow,
495
491
  parameters=self.flow.serialize_parameters(parameters),
496
492
  state=Pending(),
497
493
  parent_task_run_id=getattr(parent_task_run, "id", None),
498
494
  tags=TagsContext.get().current_tags,
499
495
  )
500
- if flow_run_ctx:
501
- parent_logger = get_run_logger(flow_run_ctx)
502
- parent_logger.info(
503
- f"Created subflow run {flow_run.name!r} for flow {self.flow.name!r}"
504
- )
505
- else:
506
- self.logger.info(
507
- f"Created flow run {flow_run.name!r} for flow {self.flow.name!r}"
508
- )
509
-
510
- return flow_run
511
496
 
512
497
  def call_hooks(self, state: Optional[State] = None):
513
498
  if state is None:
@@ -606,6 +591,7 @@ class FlowRunEngine(BaseFlowRunEngine[P, R]):
606
591
  stack.enter_context(ConcurrencyContext())
607
592
 
608
593
  # set the logger to the flow run logger
594
+
609
595
  self.logger = flow_run_logger(flow_run=self.flow_run, flow=self.flow)
610
596
 
611
597
  # update the flow run name if necessary
@@ -616,12 +602,32 @@ class FlowRunEngine(BaseFlowRunEngine[P, R]):
616
602
  self.client.set_flow_run_name(
617
603
  flow_run_id=self.flow_run.id, name=flow_run_name
618
604
  )
605
+
619
606
  self.logger.extra["flow_run_name"] = flow_run_name
620
607
  self.logger.debug(
621
608
  f"Renamed flow run {self.flow_run.name!r} to {flow_run_name!r}"
622
609
  )
623
610
  self.flow_run.name = flow_run_name
624
611
  self._flow_run_name_set = True
612
+
613
+ self._telemetry.update_run_name(name=flow_run_name)
614
+
615
+ if self.flow_run.parent_task_run_id:
616
+ _logger = get_run_logger(FlowRunContext.get())
617
+ run_type = "subflow"
618
+ else:
619
+ _logger = self.logger
620
+ run_type = "flow"
621
+
622
+ _logger.info(
623
+ f"Beginning {run_type} run {self.flow_run.name!r} for flow {self.flow.name!r}"
624
+ )
625
+
626
+ if flow_run_url := url_for(self.flow_run):
627
+ self.logger.info(
628
+ f"View at {flow_run_url}", extra={"send_to_api": False}
629
+ )
630
+
625
631
  yield
626
632
 
627
633
  @contextmanager
@@ -635,12 +641,6 @@ class FlowRunEngine(BaseFlowRunEngine[P, R]):
635
641
 
636
642
  if not self.flow_run:
637
643
  self.flow_run = self.create_flow_run(self.client)
638
- flow_run_url = url_for(self.flow_run)
639
-
640
- if flow_run_url:
641
- self.logger.info(
642
- f"View at {flow_run_url}", extra={"send_to_api": False}
643
- )
644
644
  else:
645
645
  # Update the empirical policy to match the flow if it is not set
646
646
  if self.flow_run.empirical_policy.retry_delay is None:
@@ -658,7 +658,6 @@ class FlowRunEngine(BaseFlowRunEngine[P, R]):
658
658
  )
659
659
 
660
660
  self._telemetry.start_span(
661
- name=self.flow.name,
662
661
  run=self.flow_run,
663
662
  client=self.client,
664
663
  parameters=self.parameters,
@@ -705,9 +704,11 @@ class FlowRunEngine(BaseFlowRunEngine[P, R]):
705
704
  @contextmanager
706
705
  def start(self) -> Generator[None, None, None]:
707
706
  with self.initialize_run():
708
- with trace.use_span(
709
- self._telemetry.span
710
- ) if self._telemetry.span else nullcontext():
707
+ with (
708
+ trace.use_span(self._telemetry.span)
709
+ if self._telemetry.span
710
+ else nullcontext()
711
+ ):
711
712
  self.begin_run()
712
713
 
713
714
  if self.state.is_running():
@@ -870,10 +871,7 @@ class AsyncFlowRunEngine(BaseFlowRunEngine[P, R]):
870
871
  if self._return_value is not NotSet and not isinstance(
871
872
  self._return_value, State
872
873
  ):
873
- if isinstance(self._return_value, BaseResult):
874
- _result = self._return_value.get()
875
- else:
876
- _result = self._return_value
874
+ _result = self._return_value
877
875
 
878
876
  if asyncio.iscoroutine(_result):
879
877
  # getting the value for a BaseResult may return an awaitable
@@ -1052,24 +1050,13 @@ class AsyncFlowRunEngine(BaseFlowRunEngine[P, R]):
1052
1050
  ):
1053
1051
  return subflow_run
1054
1052
 
1055
- flow_run = await client.create_flow_run(
1053
+ return await client.create_flow_run(
1056
1054
  flow=self.flow,
1057
1055
  parameters=self.flow.serialize_parameters(parameters),
1058
1056
  state=Pending(),
1059
1057
  parent_task_run_id=getattr(parent_task_run, "id", None),
1060
1058
  tags=TagsContext.get().current_tags,
1061
1059
  )
1062
- if flow_run_ctx:
1063
- parent_logger = get_run_logger(flow_run_ctx)
1064
- parent_logger.info(
1065
- f"Created subflow run {flow_run.name!r} for flow {self.flow.name!r}"
1066
- )
1067
- else:
1068
- self.logger.info(
1069
- f"Created flow run {flow_run.name!r} for flow {self.flow.name!r}"
1070
- )
1071
-
1072
- return flow_run
1073
1060
 
1074
1061
  async def call_hooks(self, state: Optional[State] = None):
1075
1062
  if state is None:
@@ -1171,6 +1158,7 @@ class AsyncFlowRunEngine(BaseFlowRunEngine[P, R]):
1171
1158
  self.logger = flow_run_logger(flow_run=self.flow_run, flow=self.flow)
1172
1159
 
1173
1160
  # update the flow run name if necessary
1161
+
1174
1162
  if not self._flow_run_name_set and self.flow.flow_run_name:
1175
1163
  flow_run_name = resolve_custom_flow_run_name(
1176
1164
  flow=self.flow, parameters=self.parameters
@@ -1184,6 +1172,24 @@ class AsyncFlowRunEngine(BaseFlowRunEngine[P, R]):
1184
1172
  )
1185
1173
  self.flow_run.name = flow_run_name
1186
1174
  self._flow_run_name_set = True
1175
+
1176
+ self._telemetry.update_run_name(name=flow_run_name)
1177
+ if self.flow_run.parent_task_run_id:
1178
+ _logger = get_run_logger(FlowRunContext.get())
1179
+ run_type = "subflow"
1180
+ else:
1181
+ _logger = self.logger
1182
+ run_type = "flow"
1183
+
1184
+ _logger.info(
1185
+ f"Beginning {run_type} run {self.flow_run.name!r} for flow {self.flow.name!r}"
1186
+ )
1187
+
1188
+ if flow_run_url := url_for(self.flow_run):
1189
+ self.logger.info(
1190
+ f"View at {flow_run_url}", extra={"send_to_api": False}
1191
+ )
1192
+
1187
1193
  yield
1188
1194
 
1189
1195
  @asynccontextmanager
@@ -1220,7 +1226,6 @@ class AsyncFlowRunEngine(BaseFlowRunEngine[P, R]):
1220
1226
  )
1221
1227
 
1222
1228
  await self._telemetry.async_start_span(
1223
- name=self.flow.name,
1224
1229
  run=self.flow_run,
1225
1230
  client=self.client,
1226
1231
  parameters=self.parameters,
@@ -1267,9 +1272,11 @@ class AsyncFlowRunEngine(BaseFlowRunEngine[P, R]):
1267
1272
  @asynccontextmanager
1268
1273
  async def start(self) -> AsyncGenerator[None, None]:
1269
1274
  async with self.initialize_run():
1270
- with trace.use_span(
1271
- self._telemetry.span
1272
- ) if self._telemetry.span else nullcontext():
1275
+ with (
1276
+ trace.use_span(self._telemetry.span)
1277
+ if self._telemetry.span
1278
+ else nullcontext()
1279
+ ):
1273
1280
  await self.begin_run()
1274
1281
 
1275
1282
  if self.state.is_running():
prefect/flows.py CHANGED
@@ -37,7 +37,6 @@ from typing import (
37
37
  from uuid import UUID
38
38
 
39
39
  import pydantic
40
- from fastapi.encoders import jsonable_encoder
41
40
  from pydantic.v1 import BaseModel as V1BaseModel
42
41
  from pydantic.v1.decorator import ValidatedFunction as V1ValidatedFunction
43
42
  from pydantic.v1.errors import ConfigError # TODO
@@ -613,6 +612,8 @@ class Flow(Generic[P, R]):
613
612
  serialized_parameters[key] = f"<{type(value).__name__}>"
614
613
  continue
615
614
  try:
615
+ from fastapi.encoders import jsonable_encoder
616
+
616
617
  serialized_parameters[key] = jsonable_encoder(value)
617
618
  except (TypeError, ValueError):
618
619
  logger.debug(
prefect/futures.py CHANGED
@@ -1,17 +1,15 @@
1
1
  import abc
2
2
  import asyncio
3
- import collections
4
3
  import concurrent.futures
5
4
  import threading
6
5
  import uuid
7
6
  from collections.abc import Generator, Iterator
8
7
  from functools import partial
9
- from typing import Any, Callable, Generic, List, Optional, Set, Union, cast
8
+ from typing import Any, Callable, Generic, Optional, Union
10
9
 
11
- from typing_extensions import TypeVar
10
+ from typing_extensions import NamedTuple, Self, TypeVar
12
11
 
13
12
  from prefect.client.orchestration import get_client
14
- from prefect.client.schemas.objects import TaskRun
15
13
  from prefect.exceptions import ObjectNotFound
16
14
  from prefect.logging.loggers import get_logger, get_run_logger
17
15
  from prefect.states import Pending, State
@@ -50,7 +48,7 @@ class PrefectFuture(abc.ABC, Generic[R]):
50
48
  return self._final_state
51
49
  client = get_client(sync_client=True)
52
50
  try:
53
- task_run = cast(TaskRun, client.read_task_run(task_run_id=self.task_run_id))
51
+ task_run = client.read_task_run(task_run_id=self.task_run_id)
54
52
  except ObjectNotFound:
55
53
  # We'll be optimistic and assume this task will eventually start
56
54
  # TODO: Consider using task run events to wait for the task to start
@@ -92,7 +90,7 @@ class PrefectFuture(abc.ABC, Generic[R]):
92
90
  """
93
91
 
94
92
  @abc.abstractmethod
95
- def add_done_callback(self, fn):
93
+ def add_done_callback(self, fn: Callable[["PrefectFuture[R]"], None]):
96
94
  """
97
95
  Add a callback to be run when the future completes or is cancelled.
98
96
 
@@ -102,13 +100,17 @@ class PrefectFuture(abc.ABC, Generic[R]):
102
100
  ...
103
101
 
104
102
 
105
- class PrefectWrappedFuture(PrefectFuture, abc.ABC, Generic[R, F]):
103
+ class PrefectWrappedFuture(PrefectFuture[R], abc.ABC, Generic[R, F]):
106
104
  """
107
105
  A Prefect future that wraps another future object.
106
+
107
+ Type Parameters:
108
+ R: The return type of the future
109
+ F: The type of the wrapped future
108
110
  """
109
111
 
110
112
  def __init__(self, task_run_id: uuid.UUID, wrapped_future: F):
111
- self._wrapped_future = wrapped_future
113
+ self._wrapped_future: F = wrapped_future
112
114
  super().__init__(task_run_id)
113
115
 
114
116
  @property
@@ -116,10 +118,11 @@ class PrefectWrappedFuture(PrefectFuture, abc.ABC, Generic[R, F]):
116
118
  """The underlying future object wrapped by this Prefect future"""
117
119
  return self._wrapped_future
118
120
 
119
- def add_done_callback(self, fn: Callable[[PrefectFuture[R]], None]):
121
+ def add_done_callback(self, fn: Callable[[PrefectFuture[R]], None]) -> None:
122
+ """Add a callback to be executed when the future completes."""
120
123
  if not self._final_state:
121
124
 
122
- def call_with_self(future):
125
+ def call_with_self(future: F):
123
126
  """Call the callback with self as the argument, this is necessary to ensure we remove the future from the pending set"""
124
127
  fn(self)
125
128
 
@@ -128,7 +131,7 @@ class PrefectWrappedFuture(PrefectFuture, abc.ABC, Generic[R, F]):
128
131
  fn(self)
129
132
 
130
133
 
131
- class PrefectConcurrentFuture(PrefectWrappedFuture[R, concurrent.futures.Future]):
134
+ class PrefectConcurrentFuture(PrefectWrappedFuture[R, concurrent.futures.Future[R]]):
132
135
  """
133
136
  A Prefect future that wraps a concurrent.futures.Future. This future is used
134
137
  when the task run is submitted to a ThreadPoolExecutor.
@@ -193,7 +196,7 @@ class PrefectDistributedFuture(PrefectFuture[R]):
193
196
  any task run scheduled in Prefect's API.
194
197
  """
195
198
 
196
- done_callbacks: List[Callable[[PrefectFuture[R]], None]] = []
199
+ done_callbacks: list[Callable[[PrefectFuture[R]], None]] = []
197
200
  waiter = None
198
201
 
199
202
  def wait(self, timeout: Optional[float] = None) -> None:
@@ -270,7 +273,7 @@ class PrefectDistributedFuture(PrefectFuture[R]):
270
273
  return
271
274
  TaskRunWaiter.add_done_callback(self._task_run_id, partial(fn, self))
272
275
 
273
- def __eq__(self, other):
276
+ def __eq__(self, other: Any) -> bool:
274
277
  if not isinstance(other, PrefectDistributedFuture):
275
278
  return False
276
279
  return self.task_run_id == other.task_run_id
@@ -279,7 +282,7 @@ class PrefectDistributedFuture(PrefectFuture[R]):
279
282
  return hash(self.task_run_id)
280
283
 
281
284
 
282
- class PrefectFutureList(list, Iterator, Generic[F]):
285
+ class PrefectFutureList(list[PrefectFuture[R]], Iterator[PrefectFuture[R]]):
283
286
  """
284
287
  A list of Prefect futures.
285
288
 
@@ -298,10 +301,10 @@ class PrefectFutureList(list, Iterator, Generic[F]):
298
301
  wait(self, timeout=timeout)
299
302
 
300
303
  def result(
301
- self: "PrefectFutureList[R]",
304
+ self: Self,
302
305
  timeout: Optional[float] = None,
303
306
  raise_on_failure: bool = True,
304
- ) -> List[R]:
307
+ ) -> list[R]:
305
308
  """
306
309
  Get the results of all task runs associated with the futures in the list.
307
310
 
@@ -331,21 +334,22 @@ class PrefectFutureList(list, Iterator, Generic[F]):
331
334
 
332
335
 
333
336
  def as_completed(
334
- futures: List[PrefectFuture[R]], timeout: Optional[float] = None
337
+ futures: list[PrefectFuture[R]], timeout: Optional[float] = None
335
338
  ) -> Generator[PrefectFuture[R], None]:
336
- unique_futures: Set[PrefectFuture[R]] = set(futures)
339
+ unique_futures: set[PrefectFuture[R]] = set(futures)
337
340
  total_futures = len(unique_futures)
341
+ pending = unique_futures
338
342
  try:
339
343
  with timeout_context(timeout):
340
- done = {f for f in unique_futures if f._final_state}
344
+ done = {f for f in unique_futures if f._final_state} # type: ignore[privateUsage]
341
345
  pending = unique_futures - done
342
346
  yield from done
343
347
 
344
348
  finished_event = threading.Event()
345
349
  finished_lock = threading.Lock()
346
- finished_futures = []
350
+ finished_futures: list[PrefectFuture[R]] = []
347
351
 
348
- def add_to_done(future):
352
+ def add_to_done(future: PrefectFuture[R]):
349
353
  with finished_lock:
350
354
  finished_futures.append(future)
351
355
  finished_event.set()
@@ -370,10 +374,19 @@ def as_completed(
370
374
  )
371
375
 
372
376
 
373
- DoneAndNotDoneFutures = collections.namedtuple("DoneAndNotDoneFutures", "done not_done")
377
+ class DoneAndNotDoneFutures(NamedTuple, Generic[R]):
378
+ """A named 2-tuple of sets.
379
+
380
+ multiple inheritance supported in 3.11+, use typing_extensions.NamedTuple
381
+ """
382
+
383
+ done: set[PrefectFuture[R]]
384
+ not_done: set[PrefectFuture[R]]
374
385
 
375
386
 
376
- def wait(futures: List[PrefectFuture[R]], timeout=None) -> DoneAndNotDoneFutures:
387
+ def wait(
388
+ futures: list[PrefectFuture[R]], timeout: Optional[float] = None
389
+ ) -> DoneAndNotDoneFutures[R]:
377
390
  """
378
391
  Wait for the futures in the given sequence to complete.
379
392
 
@@ -431,9 +444,11 @@ def resolve_futures_to_states(
431
444
 
432
445
  Unsupported object types will be returned without modification.
433
446
  """
434
- futures: Set[PrefectFuture[R]] = set()
447
+ futures: set[PrefectFuture[R]] = set()
435
448
 
436
- def _collect_futures(futures, expr, context):
449
+ def _collect_futures(
450
+ futures: set[PrefectFuture[R]], expr: Any, context: Any
451
+ ) -> Union[PrefectFuture[R], Any]:
437
452
  # Expressions inside quotes should not be traversed
438
453
  if isinstance(context.get("annotation"), quote):
439
454
  raise StopVisiting()
@@ -455,14 +470,14 @@ def resolve_futures_to_states(
455
470
  return expr
456
471
 
457
472
  # Get final states for each future
458
- states = []
473
+ states: list[State] = []
459
474
  for future in futures:
460
475
  future.wait()
461
476
  states.append(future.state)
462
477
 
463
478
  states_by_future = dict(zip(futures, states))
464
479
 
465
- def replace_futures_with_states(expr, context):
480
+ def replace_futures_with_states(expr: Any, context: Any) -> Any:
466
481
  # Expressions inside quotes should not be modified
467
482
  if isinstance(context.get("annotation"), quote):
468
483
  raise StopVisiting()
@@ -64,6 +64,7 @@ from inspect import isclass
64
64
  from typing import (
65
65
  TYPE_CHECKING,
66
66
  Any,
67
+ ClassVar,
67
68
  Dict,
68
69
  Generic,
69
70
  Literal,
@@ -144,7 +145,7 @@ class RunInputMetadata(pydantic.BaseModel):
144
145
 
145
146
 
146
147
  class RunInput(pydantic.BaseModel):
147
- model_config = ConfigDict(extra="forbid")
148
+ model_config: ClassVar[ConfigDict] = ConfigDict(extra="forbid")
148
149
 
149
150
  _description: Optional[str] = pydantic.PrivateAttr(default=None)
150
151
  _metadata: RunInputMetadata = pydantic.PrivateAttr()
@@ -1,6 +1,7 @@
1
1
  import time
2
+ from logging import Logger
2
3
  from pathlib import Path
3
- from typing import Dict, Optional
4
+ from typing import Optional
4
5
 
5
6
  import anyio
6
7
  import pendulum
@@ -11,7 +12,7 @@ from prefect.logging.loggers import get_logger
11
12
 
12
13
  from .protocol import LockManager
13
14
 
14
- logger = get_logger(__name__)
15
+ logger: Logger = get_logger(__name__)
15
16
 
16
17
 
17
18
  class _LockInfo(TypedDict):
@@ -37,11 +38,11 @@ class FileSystemLockManager(LockManager):
37
38
  lock_files_directory: the directory where lock files are stored
38
39
  """
39
40
 
40
- def __init__(self, lock_files_directory: Path):
41
- self.lock_files_directory = lock_files_directory.expanduser().resolve()
42
- self._locks: Dict[str, _LockInfo] = {}
41
+ def __init__(self, lock_files_directory: Path) -> None:
42
+ self.lock_files_directory: Path = lock_files_directory.expanduser().resolve()
43
+ self._locks: dict[str, _LockInfo] = {}
43
44
 
44
- def _ensure_lock_files_directory_exists(self):
45
+ def _ensure_lock_files_directory_exists(self) -> None:
45
46
  self.lock_files_directory.mkdir(parents=True, exist_ok=True)
46
47
 
47
48
  def _lock_path_for_key(self, key: str) -> Path:
@@ -49,7 +50,7 @@ class FileSystemLockManager(LockManager):
49
50
  return lock_info["path"]
50
51
  return self.lock_files_directory.joinpath(key).with_suffix(".lock")
51
52
 
52
- def _get_lock_info(self, key: str, use_cache=True) -> Optional[_LockInfo]:
53
+ def _get_lock_info(self, key: str, use_cache: bool = True) -> Optional[_LockInfo]:
53
54
  if use_cache:
54
55
  if (lock_info := self._locks.get(key)) is not None:
55
56
  return lock_info
prefect/locking/memory.py CHANGED
@@ -1,6 +1,8 @@
1
1
  import asyncio
2
2
  import threading
3
- from typing import Dict, Optional, TypedDict
3
+ from typing import Any, Optional, TypedDict
4
+
5
+ from typing_extensions import Self
4
6
 
5
7
  from .protocol import LockManager
6
8
 
@@ -30,14 +32,14 @@ class MemoryLockManager(LockManager):
30
32
 
31
33
  _instance = None
32
34
 
33
- def __new__(cls, *args, **kwargs):
35
+ def __new__(cls, *args: Any, **kwargs: Any) -> Self:
34
36
  if cls._instance is None:
35
37
  cls._instance = super().__new__(cls)
36
38
  return cls._instance
37
39
 
38
40
  def __init__(self):
39
41
  self._locks_dict_lock = threading.Lock()
40
- self._locks: Dict[str, _LockInfo] = {}
42
+ self._locks: dict[str, _LockInfo] = {}
41
43
 
42
44
  def _expire_lock(self, key: str):
43
45
  """
@@ -57,7 +57,7 @@ class LockManager(Protocol):
57
57
  """
58
58
  ...
59
59
 
60
- def release_lock(self, key: str, holder: str):
60
+ def release_lock(self, key: str, holder: str) -> None:
61
61
  """
62
62
  Releases the lock on the corresponding transaction record.
63
63
 
prefect/main.py CHANGED
@@ -1,6 +1,5 @@
1
1
  # Import user-facing API
2
2
  from typing import Any
3
-
4
3
  from prefect.deployments import deploy
5
4
  from prefect.states import State
6
5
  from prefect.logging import get_run_logger
@@ -9,7 +8,7 @@ from prefect.transactions import Transaction
9
8
  from prefect.tasks import task, Task
10
9
  from prefect.context import tags
11
10
  from prefect.utilities.annotations import unmapped, allow_failure
12
- from prefect.results import BaseResult, ResultRecordMetadata
11
+ from prefect.results import ResultRecordMetadata
13
12
  from prefect.flow_runs import pause_flow_run, resume_flow_run, suspend_flow_run
14
13
  from prefect.client.orchestration import get_client
15
14
  from prefect.client.cloud import get_cloud_client
@@ -30,7 +29,6 @@ import prefect.client.schemas
30
29
  _types: dict[str, Any] = dict(
31
30
  Task=Task,
32
31
  Flow=Flow,
33
- BaseResult=BaseResult,
34
32
  ResultRecordMetadata=ResultRecordMetadata,
35
33
  )
36
34
  prefect.context.FlowRunContext.model_rebuild(_types_namespace=_types)
prefect/plugins.py CHANGED
@@ -9,15 +9,15 @@ Currently supported entrypoints:
9
9
  """
10
10
 
11
11
  from types import ModuleType
12
- from typing import Any, Dict, Union
12
+ from typing import Any, Union
13
13
 
14
14
  import prefect.settings
15
15
  from prefect.utilities.compat import EntryPoints, entry_points
16
16
 
17
- COLLECTIONS: Union[None, Dict[str, Union[ModuleType, Exception]]] = None
17
+ _collections: Union[None, dict[str, Union[ModuleType, Exception]]] = None
18
18
 
19
19
 
20
- def safe_load_entrypoints(entrypoints: EntryPoints) -> Dict[str, Union[Exception, Any]]:
20
+ def safe_load_entrypoints(entrypoints: EntryPoints) -> dict[str, Union[Exception, Any]]:
21
21
  """
22
22
  Load entry points for a group capturing any exceptions that occur.
23
23
  """
@@ -26,7 +26,7 @@ def safe_load_entrypoints(entrypoints: EntryPoints) -> Dict[str, Union[Exception
26
26
  # also want to validate the type for the group for entrypoints that have
27
27
  # a specific type we expect.
28
28
 
29
- results = {}
29
+ results: dict[str, Union[Exception, Any]] = {}
30
30
 
31
31
  for entrypoint in entrypoints:
32
32
  result = None
@@ -40,18 +40,20 @@ def safe_load_entrypoints(entrypoints: EntryPoints) -> Dict[str, Union[Exception
40
40
  return results
41
41
 
42
42
 
43
- def load_prefect_collections() -> Dict[str, Union[ModuleType, Exception]]:
43
+ def load_prefect_collections() -> dict[str, Union[ModuleType, Exception]]:
44
44
  """
45
45
  Load all Prefect collections that define an entrypoint in the group
46
46
  `prefect.collections`.
47
47
  """
48
- global COLLECTIONS
48
+ global _collections
49
49
 
50
- if COLLECTIONS is not None:
51
- return COLLECTIONS
50
+ if _collections is not None:
51
+ return _collections
52
52
 
53
53
  collection_entrypoints: EntryPoints = entry_points(group="prefect.collections")
54
- collections = safe_load_entrypoints(collection_entrypoints)
54
+ collections: dict[str, Union[Exception, Any]] = safe_load_entrypoints(
55
+ collection_entrypoints
56
+ )
55
57
 
56
58
  # TODO: Consider the utility of this once we've established this pattern.
57
59
  # We cannot use a logger here because logging is not yet initialized.
@@ -68,5 +70,5 @@ def load_prefect_collections() -> Dict[str, Union[ModuleType, Exception]]:
68
70
  if prefect.settings.PREFECT_DEBUG_MODE:
69
71
  print(f"Loaded collection {name!r}.")
70
72
 
71
- COLLECTIONS = collections
73
+ _collections = collections
72
74
  return collections