prefect-client 3.1.12__py3-none-any.whl → 3.1.14__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (111) hide show
  1. prefect/_experimental/lineage.py +63 -0
  2. prefect/_experimental/sla/client.py +53 -27
  3. prefect/_experimental/sla/objects.py +10 -2
  4. prefect/_internal/concurrency/services.py +2 -2
  5. prefect/_internal/concurrency/threads.py +6 -0
  6. prefect/_internal/retries.py +6 -3
  7. prefect/_internal/schemas/validators.py +6 -4
  8. prefect/_version.py +3 -3
  9. prefect/artifacts.py +4 -1
  10. prefect/automations.py +1 -1
  11. prefect/blocks/abstract.py +5 -2
  12. prefect/blocks/notifications.py +1 -0
  13. prefect/cache_policies.py +70 -22
  14. prefect/client/orchestration/_automations/client.py +4 -0
  15. prefect/client/orchestration/_deployments/client.py +3 -3
  16. prefect/client/utilities.py +3 -3
  17. prefect/context.py +16 -6
  18. prefect/deployments/base.py +7 -4
  19. prefect/deployments/flow_runs.py +5 -1
  20. prefect/deployments/runner.py +6 -11
  21. prefect/deployments/steps/core.py +1 -1
  22. prefect/deployments/steps/pull.py +8 -3
  23. prefect/deployments/steps/utility.py +2 -2
  24. prefect/docker/docker_image.py +13 -9
  25. prefect/engine.py +19 -10
  26. prefect/events/cli/automations.py +4 -4
  27. prefect/events/clients.py +17 -14
  28. prefect/events/filters.py +34 -34
  29. prefect/events/schemas/automations.py +12 -8
  30. prefect/events/schemas/events.py +5 -1
  31. prefect/events/worker.py +1 -1
  32. prefect/filesystems.py +1 -1
  33. prefect/flow_engine.py +172 -123
  34. prefect/flows.py +119 -74
  35. prefect/futures.py +14 -7
  36. prefect/infrastructure/provisioners/__init__.py +2 -0
  37. prefect/infrastructure/provisioners/cloud_run.py +4 -4
  38. prefect/infrastructure/provisioners/coiled.py +249 -0
  39. prefect/infrastructure/provisioners/container_instance.py +4 -3
  40. prefect/infrastructure/provisioners/ecs.py +55 -43
  41. prefect/infrastructure/provisioners/modal.py +5 -4
  42. prefect/input/actions.py +5 -1
  43. prefect/input/run_input.py +157 -43
  44. prefect/logging/configuration.py +5 -8
  45. prefect/logging/filters.py +2 -2
  46. prefect/logging/formatters.py +15 -11
  47. prefect/logging/handlers.py +24 -14
  48. prefect/logging/highlighters.py +5 -5
  49. prefect/logging/loggers.py +29 -20
  50. prefect/main.py +3 -1
  51. prefect/results.py +166 -86
  52. prefect/runner/runner.py +112 -84
  53. prefect/runner/server.py +3 -1
  54. prefect/runner/storage.py +18 -18
  55. prefect/runner/submit.py +19 -12
  56. prefect/runtime/deployment.py +15 -8
  57. prefect/runtime/flow_run.py +19 -6
  58. prefect/runtime/task_run.py +7 -3
  59. prefect/settings/base.py +17 -7
  60. prefect/settings/legacy.py +4 -4
  61. prefect/settings/models/api.py +4 -3
  62. prefect/settings/models/cli.py +4 -3
  63. prefect/settings/models/client.py +7 -4
  64. prefect/settings/models/cloud.py +4 -3
  65. prefect/settings/models/deployments.py +4 -3
  66. prefect/settings/models/experiments.py +4 -3
  67. prefect/settings/models/flows.py +4 -3
  68. prefect/settings/models/internal.py +4 -3
  69. prefect/settings/models/logging.py +8 -6
  70. prefect/settings/models/results.py +4 -3
  71. prefect/settings/models/root.py +11 -16
  72. prefect/settings/models/runner.py +8 -5
  73. prefect/settings/models/server/api.py +6 -3
  74. prefect/settings/models/server/database.py +120 -25
  75. prefect/settings/models/server/deployments.py +4 -3
  76. prefect/settings/models/server/ephemeral.py +7 -4
  77. prefect/settings/models/server/events.py +6 -3
  78. prefect/settings/models/server/flow_run_graph.py +4 -3
  79. prefect/settings/models/server/root.py +4 -3
  80. prefect/settings/models/server/services.py +15 -12
  81. prefect/settings/models/server/tasks.py +7 -4
  82. prefect/settings/models/server/ui.py +4 -3
  83. prefect/settings/models/tasks.py +10 -5
  84. prefect/settings/models/testing.py +4 -3
  85. prefect/settings/models/worker.py +7 -4
  86. prefect/settings/profiles.py +13 -12
  87. prefect/settings/sources.py +20 -19
  88. prefect/states.py +17 -13
  89. prefect/task_engine.py +43 -33
  90. prefect/task_runners.py +35 -23
  91. prefect/task_runs.py +20 -11
  92. prefect/task_worker.py +12 -7
  93. prefect/tasks.py +67 -25
  94. prefect/telemetry/bootstrap.py +4 -1
  95. prefect/telemetry/run_telemetry.py +15 -13
  96. prefect/transactions.py +3 -3
  97. prefect/types/__init__.py +9 -6
  98. prefect/types/_datetime.py +19 -0
  99. prefect/utilities/_deprecated.py +38 -0
  100. prefect/utilities/engine.py +11 -4
  101. prefect/utilities/filesystem.py +2 -2
  102. prefect/utilities/generics.py +1 -1
  103. prefect/utilities/pydantic.py +21 -36
  104. prefect/workers/base.py +52 -30
  105. prefect/workers/process.py +20 -15
  106. prefect/workers/server.py +4 -5
  107. {prefect_client-3.1.12.dist-info → prefect_client-3.1.14.dist-info}/METADATA +2 -2
  108. {prefect_client-3.1.12.dist-info → prefect_client-3.1.14.dist-info}/RECORD +111 -108
  109. {prefect_client-3.1.12.dist-info → prefect_client-3.1.14.dist-info}/LICENSE +0 -0
  110. {prefect_client-3.1.12.dist-info → prefect_client-3.1.14.dist-info}/WHEEL +0 -0
  111. {prefect_client-3.1.12.dist-info → prefect_client-3.1.14.dist-info}/top_level.txt +0 -0
prefect/results.py CHANGED
@@ -1,9 +1,12 @@
1
+ from __future__ import annotations
2
+
1
3
  import inspect
2
4
  import os
3
5
  import socket
4
6
  import threading
5
7
  import uuid
6
8
  from functools import partial
9
+ from operator import methodcaller
7
10
  from pathlib import Path
8
11
  from typing import (
9
12
  TYPE_CHECKING,
@@ -11,10 +14,8 @@ from typing import (
11
14
  Any,
12
15
  Callable,
13
16
  ClassVar,
14
- Dict,
15
17
  Generic,
16
18
  Optional,
17
- Tuple,
18
19
  TypeVar,
19
20
  Union,
20
21
  )
@@ -38,6 +39,7 @@ from prefect._experimental.lineage import (
38
39
  emit_result_read_event,
39
40
  emit_result_write_event,
40
41
  )
42
+ from prefect._internal.compatibility.async_dispatch import async_dispatch
41
43
  from prefect.blocks.core import Block
42
44
  from prefect.exceptions import (
43
45
  ConfigurationError,
@@ -58,28 +60,29 @@ from prefect.utilities.annotations import NotSet
58
60
  from prefect.utilities.asyncutils import sync_compatible
59
61
 
60
62
  if TYPE_CHECKING:
63
+ import logging
64
+
61
65
  from prefect import Flow, Task
62
66
  from prefect.transactions import IsolationLevel
63
67
 
64
68
 
65
69
  ResultStorage = Union[WritableFileSystem, str]
66
70
  ResultSerializer = Union[Serializer, str]
67
- LITERAL_TYPES = {type(None), bool, UUID}
71
+ LITERAL_TYPES: set[type] = {type(None), bool, UUID}
68
72
 
69
73
 
70
- def DEFAULT_STORAGE_KEY_FN():
74
+ def DEFAULT_STORAGE_KEY_FN() -> str:
71
75
  return uuid.uuid4().hex
72
76
 
73
77
 
74
- logger = get_logger("results")
78
+ logger: "logging.Logger" = get_logger("results")
75
79
  P = ParamSpec("P")
76
80
  R = TypeVar("R")
77
81
 
78
- _default_storages: Dict[Tuple[str, str], WritableFileSystem] = {}
82
+ _default_storages: dict[tuple[str, str], WritableFileSystem] = {}
79
83
 
80
84
 
81
- @sync_compatible
82
- async def get_default_result_storage() -> WritableFileSystem:
85
+ async def aget_default_result_storage() -> WritableFileSystem:
83
86
  """
84
87
  Generate a default file system for result storage.
85
88
  """
@@ -93,7 +96,7 @@ async def get_default_result_storage() -> WritableFileSystem:
93
96
  return _default_storages[cache_key]
94
97
 
95
98
  if default_block is not None:
96
- storage = await resolve_result_storage(default_block)
99
+ storage = await aresolve_result_storage(default_block)
97
100
  else:
98
101
  # Use the local file system
99
102
  storage = LocalFileSystem(basepath=str(basepath))
@@ -102,9 +105,34 @@ async def get_default_result_storage() -> WritableFileSystem:
102
105
  return storage
103
106
 
104
107
 
105
- @sync_compatible
106
- async def resolve_result_storage(
107
- result_storage: Union[ResultStorage, UUID, Path],
108
+ @async_dispatch(aget_default_result_storage)
109
+ def get_default_result_storage() -> WritableFileSystem:
110
+ """
111
+ Generate a default file system for result storage.
112
+ """
113
+ settings = get_current_settings()
114
+ default_block = settings.results.default_storage_block
115
+ basepath = settings.results.local_storage_path
116
+
117
+ cache_key = (str(default_block), str(basepath))
118
+
119
+ if cache_key in _default_storages:
120
+ return _default_storages[cache_key]
121
+
122
+ if default_block is not None:
123
+ storage = resolve_result_storage(default_block, _sync=True)
124
+ if TYPE_CHECKING:
125
+ assert isinstance(storage, WritableFileSystem)
126
+ else:
127
+ # Use the local file system
128
+ storage = LocalFileSystem(basepath=str(basepath))
129
+
130
+ _default_storages[cache_key] = storage
131
+ return storage
132
+
133
+
134
+ async def aresolve_result_storage(
135
+ result_storage: ResultStorage | UUID | Path,
108
136
  ) -> WritableFileSystem:
109
137
  """
110
138
  Resolve one of the valid `ResultStorage` input types into a saved block
@@ -113,23 +141,23 @@ async def resolve_result_storage(
113
141
  from prefect.client.orchestration import get_client
114
142
 
115
143
  client = get_client()
144
+ storage_block: WritableFileSystem
116
145
  if isinstance(result_storage, Block):
117
146
  storage_block = result_storage
118
-
119
- if storage_block._block_document_id is not None:
120
- # Avoid saving the block if it already has an identifier assigned
121
- storage_block_id = storage_block._block_document_id
122
- else:
123
- storage_block_id = None
124
147
  elif isinstance(result_storage, Path):
125
148
  storage_block = LocalFileSystem(basepath=str(result_storage))
126
149
  elif isinstance(result_storage, str):
127
- storage_block = await Block.aload(result_storage, client=client)
128
- storage_block_id = storage_block._block_document_id
129
- assert storage_block_id is not None, "Loaded storage blocks must have ids"
130
- elif isinstance(result_storage, UUID):
150
+ block = await Block.aload(result_storage, client=client)
151
+ if TYPE_CHECKING:
152
+ assert isinstance(block, WritableFileSystem)
153
+ storage_block = block
154
+ elif isinstance(result_storage, UUID): # pyright: ignore[reportUnnecessaryIsInstance]
131
155
  block_document = await client.read_block_document(result_storage)
132
- storage_block = Block._from_block_document(block_document)
156
+ from_block_document = methodcaller("_from_block_document", block_document)
157
+ block = from_block_document(Block)
158
+ if TYPE_CHECKING:
159
+ assert isinstance(block, WritableFileSystem)
160
+ storage_block = block
133
161
  else:
134
162
  raise TypeError(
135
163
  "Result storage must be one of the following types: 'UUID', 'Block', "
@@ -139,6 +167,42 @@ async def resolve_result_storage(
139
167
  return storage_block
140
168
 
141
169
 
170
+ @async_dispatch(aresolve_result_storage)
171
+ def resolve_result_storage(
172
+ result_storage: ResultStorage | UUID | Path,
173
+ ) -> WritableFileSystem:
174
+ """
175
+ Resolve one of the valid `ResultStorage` input types into a saved block
176
+ document id and an instance of the block.
177
+ """
178
+ from prefect.client.orchestration import get_client
179
+
180
+ client = get_client(sync_client=True)
181
+ storage_block: WritableFileSystem
182
+ if isinstance(result_storage, Block):
183
+ storage_block = result_storage
184
+ elif isinstance(result_storage, Path):
185
+ storage_block = LocalFileSystem(basepath=str(result_storage))
186
+ elif isinstance(result_storage, str):
187
+ block = Block.load(result_storage, _sync=True)
188
+ if TYPE_CHECKING:
189
+ assert isinstance(block, WritableFileSystem)
190
+ storage_block = block
191
+ elif isinstance(result_storage, UUID): # pyright: ignore[reportUnnecessaryIsInstance]
192
+ block_document = client.read_block_document(result_storage)
193
+ from_block_document = methodcaller("_from_block_document", block_document)
194
+ block = from_block_document(Block)
195
+ if TYPE_CHECKING:
196
+ assert isinstance(block, WritableFileSystem)
197
+ storage_block = block
198
+ else:
199
+ raise TypeError(
200
+ "Result storage must be one of the following types: 'UUID', 'Block', "
201
+ f"'str'. Got unsupported type {type(result_storage).__name__!r}."
202
+ )
203
+ return storage_block
204
+
205
+
142
206
  def resolve_serializer(serializer: ResultSerializer) -> Serializer:
143
207
  """
144
208
  Resolve one of the valid `ResultSerializer` input types into a serializer
@@ -146,7 +210,7 @@ def resolve_serializer(serializer: ResultSerializer) -> Serializer:
146
210
  """
147
211
  if isinstance(serializer, Serializer):
148
212
  return serializer
149
- elif isinstance(serializer, str):
213
+ elif isinstance(serializer, str): # pyright: ignore[reportUnnecessaryIsInstance]
150
214
  return Serializer(type=serializer)
151
215
  else:
152
216
  raise TypeError(
@@ -163,11 +227,14 @@ async def get_or_create_default_task_scheduling_storage() -> ResultStorage:
163
227
  default_block = settings.tasks.scheduling.default_storage_block
164
228
 
165
229
  if default_block is not None:
166
- return await Block.aload(default_block)
230
+ block = await Block.aload(default_block)
231
+ if TYPE_CHECKING:
232
+ assert isinstance(block, WritableFileSystem)
233
+ return block
167
234
 
168
235
  # otherwise, use the local file system
169
236
  basepath = settings.results.local_storage_path
170
- return LocalFileSystem(basepath=basepath)
237
+ return LocalFileSystem(basepath=str(basepath))
171
238
 
172
239
 
173
240
  def get_default_result_serializer() -> Serializer:
@@ -225,7 +292,7 @@ def _format_user_supplied_storage_key(key: str) -> str:
225
292
 
226
293
 
227
294
  async def _call_explicitly_async_block_method(
228
- block: Union[WritableFileSystem, NullFileSystem],
295
+ block: WritableFileSystem | NullFileSystem,
229
296
  method: str,
230
297
  args: tuple[Any, ...],
231
298
  kwargs: dict[str, Any],
@@ -301,13 +368,13 @@ class ResultStore(BaseModel):
301
368
  cache: LRUCache[str, "ResultRecord[Any]"] = Field(default_factory=default_cache)
302
369
 
303
370
  @property
304
- def result_storage_block_id(self) -> Optional[UUID]:
371
+ def result_storage_block_id(self) -> UUID | None:
305
372
  if self.result_storage is None:
306
373
  return None
307
- return self.result_storage._block_document_id
374
+ return getattr(self.result_storage, "_block_document_id", None)
308
375
 
309
376
  @sync_compatible
310
- async def update_for_flow(self, flow: "Flow") -> Self:
377
+ async def update_for_flow(self, flow: "Flow[..., Any]") -> Self:
311
378
  """
312
379
  Create a new result store for a flow with updated settings.
313
380
 
@@ -317,15 +384,16 @@ class ResultStore(BaseModel):
317
384
  Returns:
318
385
  An updated result store.
319
386
  """
320
- update = {}
387
+ update: dict[str, Any] = {}
388
+ update["cache_result_in_memory"] = flow.cache_result_in_memory
321
389
  if flow.result_storage is not None:
322
- update["result_storage"] = await resolve_result_storage(flow.result_storage)
390
+ update["result_storage"] = await aresolve_result_storage(
391
+ flow.result_storage
392
+ )
323
393
  if flow.result_serializer is not None:
324
394
  update["serializer"] = resolve_serializer(flow.result_serializer)
325
- if flow.cache_result_in_memory is not None:
326
- update["cache_result_in_memory"] = flow.cache_result_in_memory
327
395
  if self.result_storage is None and update.get("result_storage") is None:
328
- update["result_storage"] = await get_default_result_storage()
396
+ update["result_storage"] = await aget_default_result_storage()
329
397
  update["metadata_storage"] = NullFileSystem()
330
398
  return self.model_copy(update=update)
331
399
 
@@ -342,13 +410,14 @@ class ResultStore(BaseModel):
342
410
  """
343
411
  from prefect.transactions import get_transaction
344
412
 
345
- update = {}
413
+ update: dict[str, Any] = {}
414
+ update["cache_result_in_memory"] = task.cache_result_in_memory
346
415
  if task.result_storage is not None:
347
- update["result_storage"] = await resolve_result_storage(task.result_storage)
416
+ update["result_storage"] = await aresolve_result_storage(
417
+ task.result_storage
418
+ )
348
419
  if task.result_serializer is not None:
349
420
  update["serializer"] = resolve_serializer(task.result_serializer)
350
- if task.cache_result_in_memory is not None:
351
- update["cache_result_in_memory"] = task.cache_result_in_memory
352
421
  if task.result_storage_key is not None:
353
422
  update["storage_key_fn"] = partial(
354
423
  _format_user_supplied_storage_key, task.result_storage_key
@@ -360,18 +429,20 @@ class ResultStore(BaseModel):
360
429
  ):
361
430
  update["lock_manager"] = current_txn.store.lock_manager
362
431
 
363
- if task.cache_policy is not None and task.cache_policy is not NotSet:
432
+ from prefect.cache_policies import CachePolicy
433
+
434
+ if isinstance(task.cache_policy, CachePolicy):
364
435
  if task.cache_policy.key_storage is not None:
365
436
  storage = task.cache_policy.key_storage
366
437
  if isinstance(storage, str) and not len(storage.split("/")) == 2:
367
438
  storage = Path(storage)
368
- update["metadata_storage"] = await resolve_result_storage(storage)
439
+ update["metadata_storage"] = await aresolve_result_storage(storage)
369
440
  # if the cache policy has a lock manager, it takes precedence over the parent transaction
370
441
  if task.cache_policy.lock_manager is not None:
371
442
  update["lock_manager"] = task.cache_policy.lock_manager
372
443
 
373
444
  if self.result_storage is None and update.get("result_storage") is None:
374
- update["result_storage"] = await get_default_result_storage()
445
+ update["result_storage"] = await aget_default_result_storage()
375
446
  if (
376
447
  isinstance(self.metadata_storage, NullFileSystem)
377
448
  and update.get("metadata_storage", NotSet) is NotSet
@@ -424,7 +495,7 @@ class ResultStore(BaseModel):
424
495
  )
425
496
  if content is None:
426
497
  return False
427
- record = ResultRecord.deserialize(content)
498
+ record: ResultRecord[Any] = ResultRecord.deserialize(content)
428
499
  metadata = record.metadata
429
500
  except Exception:
430
501
  return False
@@ -462,10 +533,10 @@ class ResultStore(BaseModel):
462
533
  return await self._exists(key=key, _sync=False)
463
534
 
464
535
  def _resolved_key_path(self, key: str) -> str:
465
- if self.result_storage_block_id is None and hasattr(
466
- self.result_storage, "_resolve_path"
536
+ if self.result_storage_block_id is None and (
537
+ _resolve_path := getattr(self.result_storage, "_resolve_path", None)
467
538
  ):
468
- return str(self.result_storage._resolve_path(key))
539
+ return str(_resolve_path(key))
469
540
  return key
470
541
 
471
542
  @sync_compatible
@@ -490,12 +561,12 @@ class ResultStore(BaseModel):
490
561
  resolved_key_path = self._resolved_key_path(key)
491
562
 
492
563
  if resolved_key_path in self.cache:
493
- cached_result = self.cache[resolved_key_path]
564
+ cached_result: ResultRecord[Any] = self.cache[resolved_key_path]
494
565
  await emit_result_read_event(self, resolved_key_path, cached=True)
495
566
  return cached_result
496
567
 
497
568
  if self.result_storage is None:
498
- self.result_storage = await get_default_result_storage()
569
+ self.result_storage = await aget_default_result_storage()
499
570
 
500
571
  if self.metadata_storage is not None:
501
572
  metadata_content = await _call_explicitly_async_block_method(
@@ -539,7 +610,7 @@ class ResultStore(BaseModel):
539
610
  def read(
540
611
  self,
541
612
  key: str,
542
- holder: Optional[str] = None,
613
+ holder: str | None = None,
543
614
  ) -> "ResultRecord[Any]":
544
615
  """
545
616
  Read a result record from storage.
@@ -557,7 +628,7 @@ class ResultStore(BaseModel):
557
628
  async def aread(
558
629
  self,
559
630
  key: str,
560
- holder: Optional[str] = None,
631
+ holder: str | None = None,
561
632
  ) -> "ResultRecord[Any]":
562
633
  """
563
634
  Read a result record from storage.
@@ -575,8 +646,8 @@ class ResultStore(BaseModel):
575
646
  def create_result_record(
576
647
  self,
577
648
  obj: Any,
578
- key: Optional[str] = None,
579
- expiration: Optional[DateTime] = None,
649
+ key: str | None = None,
650
+ expiration: DateTime | None = None,
580
651
  ) -> "ResultRecord[Any]":
581
652
  """
582
653
  Create a result record.
@@ -590,10 +661,12 @@ class ResultStore(BaseModel):
590
661
 
591
662
  if self.result_storage is None:
592
663
  self.result_storage = get_default_result_storage(_sync=True)
664
+ if TYPE_CHECKING:
665
+ assert isinstance(self.result_storage, WritableFileSystem)
593
666
 
594
667
  if self.result_storage_block_id is None:
595
- if hasattr(self.result_storage, "_resolve_path"):
596
- key = str(self.result_storage._resolve_path(key))
668
+ if _resolve_path := getattr(self.result_storage, "_resolve_path", None):
669
+ key = str(_resolve_path(key))
597
670
 
598
671
  return ResultRecord(
599
672
  result=obj,
@@ -608,10 +681,10 @@ class ResultStore(BaseModel):
608
681
  def write(
609
682
  self,
610
683
  obj: Any,
611
- key: Optional[str] = None,
612
- expiration: Optional[DateTime] = None,
613
- holder: Optional[str] = None,
614
- ):
684
+ key: str | None = None,
685
+ expiration: DateTime | None = None,
686
+ holder: str | None = None,
687
+ ) -> None:
615
688
  """
616
689
  Write a result to storage.
617
690
 
@@ -635,10 +708,10 @@ class ResultStore(BaseModel):
635
708
  async def awrite(
636
709
  self,
637
710
  obj: Any,
638
- key: Optional[str] = None,
639
- expiration: Optional[DateTime] = None,
640
- holder: Optional[str] = None,
641
- ):
711
+ key: str | None = None,
712
+ expiration: DateTime | None = None,
713
+ holder: str | None = None,
714
+ ) -> None:
642
715
  """
643
716
  Write a result to storage.
644
717
 
@@ -657,7 +730,9 @@ class ResultStore(BaseModel):
657
730
  )
658
731
 
659
732
  @sync_compatible
660
- async def _persist_result_record(self, result_record: "ResultRecord", holder: str):
733
+ async def _persist_result_record(
734
+ self, result_record: "ResultRecord[Any]", holder: str
735
+ ) -> None:
661
736
  """
662
737
  Persist a result record to storage.
663
738
 
@@ -672,8 +747,10 @@ class ResultStore(BaseModel):
672
747
  key = result_record.metadata.storage_key
673
748
  if result_record.metadata.storage_block_id is None:
674
749
  basepath = (
675
- self.result_storage._resolve_path("")
676
- if hasattr(self.result_storage, "_resolve_path")
750
+ _resolve_path("")
751
+ if (
752
+ _resolve_path := getattr(self.result_storage, "_resolve_path", None)
753
+ )
677
754
  else Path(".").resolve()
678
755
  )
679
756
  base_key = str(Path(key).relative_to(basepath))
@@ -689,7 +766,7 @@ class ResultStore(BaseModel):
689
766
  f"another holder."
690
767
  )
691
768
  if self.result_storage is None:
692
- self.result_storage = await get_default_result_storage()
769
+ self.result_storage = await aget_default_result_storage()
693
770
 
694
771
  # If metadata storage is configured, write result and metadata separately
695
772
  if self.metadata_storage is not None:
@@ -719,8 +796,8 @@ class ResultStore(BaseModel):
719
796
  self.cache[key] = result_record
720
797
 
721
798
  def persist_result_record(
722
- self, result_record: "ResultRecord", holder: Optional[str] = None
723
- ):
799
+ self, result_record: "ResultRecord[Any]", holder: str | None = None
800
+ ) -> None:
724
801
  """
725
802
  Persist a result record to storage.
726
803
 
@@ -733,8 +810,8 @@ class ResultStore(BaseModel):
733
810
  )
734
811
 
735
812
  async def apersist_result_record(
736
- self, result_record: "ResultRecord", holder: Optional[str] = None
737
- ):
813
+ self, result_record: "ResultRecord[Any]", holder: str | None = None
814
+ ) -> None:
738
815
  """
739
816
  Persist a result record to storage.
740
817
 
@@ -766,7 +843,7 @@ class ResultStore(BaseModel):
766
843
  raise ValueError(f"Unsupported isolation level: {level}")
767
844
 
768
845
  def acquire_lock(
769
- self, key: str, holder: Optional[str] = None, timeout: Optional[float] = None
846
+ self, key: str, holder: str | None = None, timeout: float | None = None
770
847
  ) -> bool:
771
848
  """
772
849
  Acquire a lock for a result record.
@@ -789,7 +866,7 @@ class ResultStore(BaseModel):
789
866
  return self.lock_manager.acquire_lock(key, holder, timeout)
790
867
 
791
868
  async def aacquire_lock(
792
- self, key: str, holder: Optional[str] = None, timeout: Optional[float] = None
869
+ self, key: str, holder: str | None = None, timeout: float | None = None
793
870
  ) -> bool:
794
871
  """
795
872
  Acquire a lock for a result record.
@@ -812,7 +889,7 @@ class ResultStore(BaseModel):
812
889
 
813
890
  return await self.lock_manager.aacquire_lock(key, holder, timeout)
814
891
 
815
- def release_lock(self, key: str, holder: Optional[str] = None):
892
+ def release_lock(self, key: str, holder: str | None = None) -> None:
816
893
  """
817
894
  Release a lock for a result record.
818
895
 
@@ -841,7 +918,7 @@ class ResultStore(BaseModel):
841
918
  )
842
919
  return self.lock_manager.is_locked(key)
843
920
 
844
- def is_lock_holder(self, key: str, holder: Optional[str] = None) -> bool:
921
+ def is_lock_holder(self, key: str, holder: str | None = None) -> bool:
845
922
  """
846
923
  Check if the current holder is the lock holder for the result record.
847
924
 
@@ -861,7 +938,7 @@ class ResultStore(BaseModel):
861
938
  )
862
939
  return self.lock_manager.is_lock_holder(key, holder)
863
940
 
864
- def wait_for_lock(self, key: str, timeout: Optional[float] = None) -> bool:
941
+ def wait_for_lock(self, key: str, timeout: float | None = None) -> bool:
865
942
  """
866
943
  Wait for the corresponding transaction record to become free.
867
944
  """
@@ -872,7 +949,7 @@ class ResultStore(BaseModel):
872
949
  )
873
950
  return self.lock_manager.wait_for_lock(key, timeout)
874
951
 
875
- async def await_for_lock(self, key: str, timeout: Optional[float] = None) -> bool:
952
+ async def await_for_lock(self, key: str, timeout: float | None = None) -> bool:
876
953
  """
877
954
  Wait for the corresponding transaction record to become free.
878
955
  """
@@ -886,13 +963,14 @@ class ResultStore(BaseModel):
886
963
  # TODO: These two methods need to find a new home
887
964
 
888
965
  @sync_compatible
889
- async def store_parameters(self, identifier: UUID, parameters: Dict[str, Any]):
966
+ async def store_parameters(self, identifier: UUID, parameters: dict[str, Any]):
890
967
  record = ResultRecord(
891
968
  result=parameters,
892
969
  metadata=ResultRecordMetadata(
893
970
  serializer=self.serializer, storage_key=str(identifier)
894
971
  ),
895
972
  )
973
+
896
974
  await _call_explicitly_async_block_method(
897
975
  self.result_storage,
898
976
  "write_path",
@@ -906,7 +984,7 @@ class ResultStore(BaseModel):
906
984
  raise ValueError(
907
985
  "Result store is not configured - must have a result storage block to read parameters"
908
986
  )
909
- record = ResultRecord.deserialize(
987
+ record: ResultRecord[Any] = ResultRecord.deserialize(
910
988
  await _call_explicitly_async_block_method(
911
989
  self.result_storage,
912
990
  "read_path",
@@ -988,7 +1066,7 @@ class ResultRecord(BaseModel, Generic[R]):
988
1066
  result: R
989
1067
 
990
1068
  @property
991
- def expiration(self) -> Optional[DateTime]:
1069
+ def expiration(self) -> DateTime | None:
992
1070
  return self.metadata.expiration
993
1071
 
994
1072
  @property
@@ -1012,7 +1090,7 @@ class ResultRecord(BaseModel, Generic[R]):
1012
1090
  and str(exc).startswith("cannot pickle")
1013
1091
  ):
1014
1092
  try:
1015
- from IPython import get_ipython
1093
+ from IPython.core.getipython import get_ipython
1016
1094
 
1017
1095
  if get_ipython() is not None:
1018
1096
  extra_info = inspect.cleandoc(
@@ -1041,7 +1119,7 @@ class ResultRecord(BaseModel, Generic[R]):
1041
1119
 
1042
1120
  @model_validator(mode="before")
1043
1121
  @classmethod
1044
- def coerce_old_format(cls, value: Any) -> Any:
1122
+ def coerce_old_format(cls, value: dict[str, Any] | Any) -> dict[str, Any]:
1045
1123
  if isinstance(value, dict):
1046
1124
  if "data" in value:
1047
1125
  value["result"] = value.pop("data")
@@ -1071,12 +1149,14 @@ class ResultRecord(BaseModel, Generic[R]):
1071
1149
  if metadata.storage_block_id is None:
1072
1150
  storage_block = None
1073
1151
  else:
1074
- storage_block = await resolve_result_storage(
1075
- metadata.storage_block_id, _sync=False
1076
- )
1152
+ storage_block = await aresolve_result_storage(metadata.storage_block_id)
1077
1153
  store = ResultStore(
1078
1154
  result_storage=storage_block, serializer=metadata.serializer
1079
1155
  )
1156
+ if metadata.storage_key is None:
1157
+ raise ValueError(
1158
+ "storage_key is required to hydrate a result record from metadata"
1159
+ )
1080
1160
  result = await store.aread(metadata.storage_key)
1081
1161
  return result
1082
1162
 
@@ -1101,7 +1181,7 @@ class ResultRecord(BaseModel, Generic[R]):
1101
1181
 
1102
1182
  @classmethod
1103
1183
  def deserialize(
1104
- cls, data: bytes, backup_serializer: Optional[Serializer] = None
1184
+ cls, data: bytes, backup_serializer: Serializer | None = None
1105
1185
  ) -> "ResultRecord[R]":
1106
1186
  """
1107
1187
  Deserialize a record from bytes.
@@ -1151,7 +1231,7 @@ class ResultRecord(BaseModel, Generic[R]):
1151
1231
  result=result_record_metadata.serializer.loads(result),
1152
1232
  )
1153
1233
 
1154
- def __eq__(self, other):
1234
+ def __eq__(self, other: Any | "ResultRecord[Any]") -> bool:
1155
1235
  if not isinstance(other, ResultRecord):
1156
1236
  return False
1157
1237
  return self.metadata == other.metadata and self.result == other.result