prefect-client 3.0.0rc1__py3-none-any.whl → 3.0.0rc3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (72) hide show
  1. prefect/_internal/compatibility/migration.py +124 -0
  2. prefect/_internal/concurrency/__init__.py +2 -2
  3. prefect/_internal/concurrency/primitives.py +1 -0
  4. prefect/_internal/pydantic/annotations/pendulum.py +2 -2
  5. prefect/_internal/pytz.py +1 -1
  6. prefect/blocks/core.py +1 -1
  7. prefect/blocks/redis.py +168 -0
  8. prefect/client/orchestration.py +113 -23
  9. prefect/client/schemas/actions.py +1 -1
  10. prefect/client/schemas/filters.py +6 -0
  11. prefect/client/schemas/objects.py +22 -11
  12. prefect/client/subscriptions.py +3 -2
  13. prefect/concurrency/asyncio.py +1 -1
  14. prefect/concurrency/services.py +1 -1
  15. prefect/context.py +1 -27
  16. prefect/deployments/__init__.py +3 -0
  17. prefect/deployments/base.py +11 -3
  18. prefect/deployments/deployments.py +3 -0
  19. prefect/deployments/steps/pull.py +1 -0
  20. prefect/deployments/steps/utility.py +2 -1
  21. prefect/engine.py +3 -0
  22. prefect/events/cli/automations.py +1 -1
  23. prefect/events/clients.py +7 -1
  24. prefect/events/schemas/events.py +2 -0
  25. prefect/exceptions.py +9 -0
  26. prefect/filesystems.py +22 -11
  27. prefect/flow_engine.py +118 -156
  28. prefect/flow_runs.py +2 -2
  29. prefect/flows.py +91 -35
  30. prefect/futures.py +44 -43
  31. prefect/infrastructure/provisioners/container_instance.py +1 -0
  32. prefect/infrastructure/provisioners/ecs.py +2 -2
  33. prefect/input/__init__.py +4 -0
  34. prefect/input/run_input.py +4 -2
  35. prefect/logging/formatters.py +2 -2
  36. prefect/logging/handlers.py +2 -2
  37. prefect/logging/loggers.py +1 -1
  38. prefect/plugins.py +1 -0
  39. prefect/records/cache_policies.py +179 -0
  40. prefect/records/result_store.py +10 -3
  41. prefect/results.py +27 -55
  42. prefect/runner/runner.py +1 -1
  43. prefect/runner/server.py +1 -1
  44. prefect/runtime/__init__.py +1 -0
  45. prefect/runtime/deployment.py +1 -0
  46. prefect/runtime/flow_run.py +1 -0
  47. prefect/runtime/task_run.py +1 -0
  48. prefect/settings.py +21 -5
  49. prefect/states.py +17 -4
  50. prefect/task_engine.py +337 -209
  51. prefect/task_runners.py +15 -5
  52. prefect/task_runs.py +203 -0
  53. prefect/{task_server.py → task_worker.py} +66 -36
  54. prefect/tasks.py +180 -77
  55. prefect/transactions.py +92 -16
  56. prefect/types/__init__.py +1 -1
  57. prefect/utilities/asyncutils.py +3 -3
  58. prefect/utilities/callables.py +90 -7
  59. prefect/utilities/dockerutils.py +5 -3
  60. prefect/utilities/engine.py +11 -0
  61. prefect/utilities/filesystem.py +4 -5
  62. prefect/utilities/importtools.py +34 -5
  63. prefect/utilities/services.py +2 -2
  64. prefect/utilities/urls.py +195 -0
  65. prefect/utilities/visualization.py +1 -0
  66. prefect/variables.py +19 -10
  67. prefect/workers/base.py +46 -1
  68. {prefect_client-3.0.0rc1.dist-info → prefect_client-3.0.0rc3.dist-info}/METADATA +3 -2
  69. {prefect_client-3.0.0rc1.dist-info → prefect_client-3.0.0rc3.dist-info}/RECORD +72 -66
  70. {prefect_client-3.0.0rc1.dist-info → prefect_client-3.0.0rc3.dist-info}/LICENSE +0 -0
  71. {prefect_client-3.0.0rc1.dist-info → prefect_client-3.0.0rc3.dist-info}/WHEEL +0 -0
  72. {prefect_client-3.0.0rc1.dist-info → prefect_client-3.0.0rc3.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,179 @@
1
+ import inspect
2
+ from dataclasses import dataclass
3
+ from typing import Any, Callable, Dict, Optional
4
+
5
+ from prefect.context import TaskRunContext
6
+ from prefect.utilities.hashing import hash_objects
7
+
8
+
9
+ @dataclass
10
+ class CachePolicy:
11
+ @classmethod
12
+ def from_cache_key_fn(
13
+ cls, cache_key_fn: Callable[["TaskRunContext", Dict[str, Any]], Optional[str]]
14
+ ) -> "CacheKeyFnPolicy":
15
+ """
16
+ Given a function generates a key policy.
17
+ """
18
+ return CacheKeyFnPolicy(cache_key_fn=cache_key_fn)
19
+
20
+ def compute_key(
21
+ self,
22
+ task_ctx: TaskRunContext,
23
+ inputs: Dict[str, Any],
24
+ flow_parameters: Dict[str, Any],
25
+ **kwargs,
26
+ ) -> Optional[str]:
27
+ raise NotImplementedError
28
+
29
+ def __sub__(self, other: str) -> "CompoundCachePolicy":
30
+ if not isinstance(other, str):
31
+ raise TypeError("Can only subtract strings from key policies.")
32
+ if isinstance(self, Inputs):
33
+ exclude = self.exclude or []
34
+ return Inputs(exclude=exclude + [other])
35
+ elif isinstance(self, CompoundCachePolicy):
36
+ new = Inputs(exclude=[other])
37
+ policies = self.policies or []
38
+ return CompoundCachePolicy(policies=policies + [new])
39
+ else:
40
+ new = Inputs(exclude=[other])
41
+ return CompoundCachePolicy(policies=[self, new])
42
+
43
+ def __add__(self, other: "CachePolicy") -> "CompoundCachePolicy":
44
+ # adding _None is a no-op
45
+ if isinstance(other, _None):
46
+ return self
47
+ elif isinstance(self, _None):
48
+ return other
49
+
50
+ if isinstance(self, CompoundCachePolicy):
51
+ policies = self.policies or []
52
+ return CompoundCachePolicy(policies=policies + [other])
53
+ elif isinstance(other, CompoundCachePolicy):
54
+ policies = other.policies or []
55
+ return CompoundCachePolicy(policies=policies + [self])
56
+ else:
57
+ return CompoundCachePolicy(policies=[self, other])
58
+
59
+
60
+ @dataclass
61
+ class CacheKeyFnPolicy(CachePolicy):
62
+ # making it optional for tests
63
+ cache_key_fn: Optional[
64
+ Callable[["TaskRunContext", Dict[str, Any]], Optional[str]]
65
+ ] = None
66
+
67
+ def compute_key(
68
+ self,
69
+ task_ctx: TaskRunContext,
70
+ inputs: Dict[str, Any],
71
+ flow_parameters: Dict[str, Any],
72
+ **kwargs,
73
+ ) -> Optional[str]:
74
+ if self.cache_key_fn:
75
+ return self.cache_key_fn(task_ctx, inputs)
76
+
77
+
78
+ @dataclass
79
+ class CompoundCachePolicy(CachePolicy):
80
+ policies: Optional[list] = None
81
+
82
+ def compute_key(
83
+ self,
84
+ task_ctx: TaskRunContext,
85
+ inputs: Dict[str, Any],
86
+ flow_parameters: Dict[str, Any],
87
+ **kwargs,
88
+ ) -> Optional[str]:
89
+ keys = []
90
+ for policy in self.policies or []:
91
+ keys.append(
92
+ policy.compute_key(
93
+ task_ctx=task_ctx,
94
+ inputs=inputs,
95
+ flow_parameters=flow_parameters,
96
+ **kwargs,
97
+ )
98
+ )
99
+ return hash_objects(*keys)
100
+
101
+
102
+ @dataclass
103
+ class Default(CachePolicy):
104
+ "Execution run ID only"
105
+
106
+ def compute_key(
107
+ self,
108
+ task_ctx: TaskRunContext,
109
+ inputs: Dict[str, Any],
110
+ flow_parameters: Dict[str, Any],
111
+ **kwargs,
112
+ ) -> Optional[str]:
113
+ return str(task_ctx.task_run.id)
114
+
115
+
116
+ @dataclass
117
+ class _None(CachePolicy):
118
+ "ignore key policies altogether, always run - prevents persistence"
119
+
120
+ def compute_key(
121
+ self,
122
+ task_ctx: TaskRunContext,
123
+ inputs: Dict[str, Any],
124
+ flow_parameters: Dict[str, Any],
125
+ **kwargs,
126
+ ) -> Optional[str]:
127
+ return None
128
+
129
+
130
+ @dataclass
131
+ class TaskDef(CachePolicy):
132
+ def compute_key(
133
+ self,
134
+ task_ctx: TaskRunContext,
135
+ inputs: Dict[str, Any],
136
+ flow_parameters: Dict[str, Any],
137
+ **kwargs,
138
+ ) -> Optional[str]:
139
+ lines = inspect.getsource(task_ctx.task)
140
+ return hash_objects(lines)
141
+
142
+
143
+ @dataclass
144
+ class FlowParameters(CachePolicy):
145
+ pass
146
+
147
+
148
+ @dataclass
149
+ class Inputs(CachePolicy):
150
+ """
151
+ Exposes flag for whether to include flow parameters as well.
152
+
153
+ And exclude/include config.
154
+ """
155
+
156
+ exclude: Optional[list] = None
157
+
158
+ def compute_key(
159
+ self,
160
+ task_ctx: TaskRunContext,
161
+ inputs: Dict[str, Any],
162
+ flow_parameters: Dict[str, Any],
163
+ **kwargs,
164
+ ) -> Optional[str]:
165
+ hashed_inputs = {}
166
+ inputs = inputs or {}
167
+ exclude = self.exclude or []
168
+
169
+ for key, val in inputs.items():
170
+ if key not in exclude:
171
+ hashed_inputs[key] = val
172
+
173
+ return hash_objects(hashed_inputs)
174
+
175
+
176
+ DEFAULT = Default()
177
+ INPUTS = Inputs()
178
+ NONE = _None()
179
+ TASKDEF = TaskDef()
@@ -1,7 +1,8 @@
1
1
  from dataclasses import dataclass
2
2
  from typing import Any
3
3
 
4
- from prefect.exceptions import ObjectNotFound
4
+ import pendulum
5
+
5
6
  from prefect.results import BaseResult, PersistedResult, ResultFactory
6
7
  from prefect.utilities.asyncutils import run_coro_as_sync
7
8
 
@@ -17,9 +18,15 @@ class ResultFactoryStore(RecordStore):
17
18
  try:
18
19
  result = self.read(key)
19
20
  result.get(_sync=True)
21
+ if result.expiration:
22
+ # if the result has an expiration,
23
+ # check if it is still in the future
24
+ exists = result.expiration > pendulum.now("utc")
25
+ else:
26
+ exists = True
20
27
  self.cache = result
21
- return True
22
- except (ObjectNotFound, ValueError):
28
+ return exists
29
+ except Exception:
23
30
  return False
24
31
 
25
32
  def read(self, key: str) -> BaseResult:
prefect/results.py CHANGED
@@ -18,6 +18,7 @@ from uuid import UUID
18
18
 
19
19
  from pydantic import BaseModel, ConfigDict, Field, PrivateAttr, ValidationError
20
20
  from pydantic_core import PydanticUndefinedType
21
+ from pydantic_extra_types.pendulum_dt import DateTime
21
22
  from typing_extensions import ParamSpec, Self
22
23
 
23
24
  import prefect
@@ -37,6 +38,7 @@ from prefect.settings import (
37
38
  PREFECT_RESULTS_DEFAULT_SERIALIZER,
38
39
  PREFECT_RESULTS_PERSIST_BY_DEFAULT,
39
40
  PREFECT_TASK_SCHEDULING_DEFAULT_STORAGE_BLOCK,
41
+ default_result_storage_block_name,
40
42
  )
41
43
  from prefect.utilities.annotations import NotSet
42
44
  from prefect.utilities.asyncutils import sync_compatible
@@ -66,11 +68,18 @@ async def get_default_result_storage() -> ResultStorage:
66
68
  """
67
69
  Generate a default file system for result storage.
68
70
  """
69
- return (
70
- await Block.load(PREFECT_DEFAULT_RESULT_STORAGE_BLOCK.value())
71
- if PREFECT_DEFAULT_RESULT_STORAGE_BLOCK.value() is not None
72
- else LocalFileSystem(basepath=PREFECT_LOCAL_STORAGE_PATH.value())
73
- )
71
+ try:
72
+ return await Block.load(PREFECT_DEFAULT_RESULT_STORAGE_BLOCK.value())
73
+ except ValueError as e:
74
+ if "Unable to find" not in str(e):
75
+ raise e
76
+ elif (
77
+ PREFECT_DEFAULT_RESULT_STORAGE_BLOCK.value()
78
+ == default_result_storage_block_name()
79
+ ):
80
+ return LocalFileSystem(basepath=PREFECT_LOCAL_STORAGE_PATH.value())
81
+ else:
82
+ raise
74
83
 
75
84
 
76
85
  _default_task_scheduling_storages: Dict[Tuple[str, str], WritableFileSystem] = {}
@@ -271,13 +280,6 @@ class ResultFactory(BaseModel):
271
280
  """
272
281
  Create a new result factory for a task.
273
282
  """
274
- from prefect.context import FlowRunContext
275
-
276
- ctx = FlowRunContext.get()
277
-
278
- if ctx and ctx.autonomous_task_run:
279
- return await cls.from_autonomous_task(task, client=client)
280
-
281
283
  return await cls._from_task(task, get_default_result_storage, client=client)
282
284
 
283
285
  @classmethod
@@ -426,16 +428,16 @@ class ResultFactory(BaseModel):
426
428
  )
427
429
 
428
430
  @sync_compatible
429
- async def create_result(self, obj: R, key: str = None) -> Union[R, "BaseResult[R]"]:
431
+ async def create_result(
432
+ self, obj: R, key: Optional[str] = None, expiration: Optional[DateTime] = None
433
+ ) -> Union[R, "BaseResult[R]"]:
430
434
  """
431
435
  Create a result type for the given object.
432
436
 
433
437
  If persistence is disabled, the object is wrapped in an `UnpersistedResult` and
434
438
  returned.
435
439
 
436
- If persistence is enabled:
437
- - Bool and null types are converted into `LiteralResult`.
438
- - Other types are serialized, persisted to storage, and a reference is returned.
440
+ If persistence is enabled the object is serialized, persisted to storage, and a reference is returned.
439
441
  """
440
442
  # Null objects are "cached" in memory at no cost
441
443
  should_cache_object = self.cache_result_in_memory or obj is None
@@ -443,9 +445,6 @@ class ResultFactory(BaseModel):
443
445
  if not self.persist_result:
444
446
  return await UnpersistedResult.create(obj, cache_object=should_cache_object)
445
447
 
446
- if type(obj) in LITERAL_TYPES:
447
- return await LiteralResult.create(obj)
448
-
449
448
  if key:
450
449
 
451
450
  def key_fn():
@@ -462,6 +461,7 @@ class ResultFactory(BaseModel):
462
461
  storage_key_fn=storage_key_fn,
463
462
  serializer=self.serializer,
464
463
  cache_object=should_cache_object,
464
+ expiration=expiration,
465
465
  )
466
466
 
467
467
  @sync_compatible
@@ -569,41 +569,6 @@ class UnpersistedResult(BaseResult):
569
569
  return result
570
570
 
571
571
 
572
- class LiteralResult(BaseResult):
573
- """
574
- Result type for literal values like `None`, `True`, `False`.
575
-
576
- These values are stored inline and JSON serialized when sent to the Prefect API.
577
- They are not persisted to external result storage.
578
- """
579
-
580
- type: str = "literal"
581
- value: Any = None
582
-
583
- def has_cached_object(self) -> bool:
584
- # This result type always has the object cached in memory
585
- return True
586
-
587
- @sync_compatible
588
- async def get(self) -> R:
589
- return self.value
590
-
591
- @classmethod
592
- @sync_compatible
593
- async def create(
594
- cls: "Type[LiteralResult]",
595
- obj: R,
596
- ) -> "LiteralResult[R]":
597
- if type(obj) not in LITERAL_TYPES:
598
- raise TypeError(
599
- f"Unsupported type {type(obj).__name__!r} for result literal. Expected"
600
- f" one of: {', '.join(type_.__name__ for type_ in LITERAL_TYPES)}"
601
- )
602
-
603
- description = f"Result with value `{obj}` persisted to Prefect."
604
- return cls(value=obj, artifact_type="result", artifact_description=description)
605
-
606
-
607
572
  class PersistedResult(BaseResult):
608
573
  """
609
574
  Result type which stores a reference to a persisted result.
@@ -619,6 +584,7 @@ class PersistedResult(BaseResult):
619
584
  serializer_type: str
620
585
  storage_block_id: uuid.UUID
621
586
  storage_key: str
587
+ expiration: Optional[DateTime] = None
622
588
 
623
589
  _should_cache_object: bool = PrivateAttr(default=True)
624
590
 
@@ -634,6 +600,7 @@ class PersistedResult(BaseResult):
634
600
 
635
601
  blob = await self._read_blob(client=client)
636
602
  obj = blob.serializer.loads(blob.data)
603
+ self.expiration = blob.expiration
637
604
 
638
605
  if self._should_cache_object:
639
606
  self._cache_object(obj)
@@ -673,6 +640,7 @@ class PersistedResult(BaseResult):
673
640
  storage_key_fn: Callable[[], str],
674
641
  serializer: Serializer,
675
642
  cache_object: bool = True,
643
+ expiration: Optional[DateTime] = None,
676
644
  ) -> "PersistedResult[R]":
677
645
  """
678
646
  Create a new result reference from a user's object.
@@ -684,7 +652,9 @@ class PersistedResult(BaseResult):
684
652
  storage_block_id is not None
685
653
  ), "Unexpected storage block ID. Was it persisted?"
686
654
  data = serializer.dumps(obj)
687
- blob = PersistedResultBlob(serializer=serializer, data=data)
655
+ blob = PersistedResultBlob(
656
+ serializer=serializer, data=data, expiration=expiration
657
+ )
688
658
 
689
659
  key = storage_key_fn()
690
660
  if not isinstance(key, str):
@@ -709,6 +679,7 @@ class PersistedResult(BaseResult):
709
679
  storage_key=key,
710
680
  artifact_type="result",
711
681
  artifact_description=description,
682
+ expiration=expiration,
712
683
  )
713
684
 
714
685
  if cache_object:
@@ -730,6 +701,7 @@ class PersistedResultBlob(BaseModel):
730
701
  serializer: Serializer
731
702
  data: bytes
732
703
  prefect_version: str = Field(default=prefect.__version__)
704
+ expiration: Optional[DateTime] = None
733
705
 
734
706
  def to_bytes(self) -> bytes:
735
707
  return self.model_dump_json(serialize_as_any=True).encode()
prefect/runner/runner.py CHANGED
@@ -209,7 +209,7 @@ class Runner:
209
209
  async def add_flow(
210
210
  self,
211
211
  flow: Flow,
212
- name: str = None,
212
+ name: Optional[str] = None,
213
213
  interval: Optional[
214
214
  Union[
215
215
  Iterable[Union[int, float, datetime.timedelta]],
prefect/runner/server.py CHANGED
@@ -42,7 +42,7 @@ class RunnerGenericFlowRunRequest(BaseModel):
42
42
  parent_task_run_id: Optional[uuid.UUID] = None
43
43
 
44
44
 
45
- def perform_health_check(runner, delay_threshold: int = None) -> JSONResponse:
45
+ def perform_health_check(runner, delay_threshold: Optional[int] = None) -> JSONResponse:
46
46
  if delay_threshold is None:
47
47
  delay_threshold = (
48
48
  PREFECT_RUNNER_SERVER_MISSED_POLLS_TOLERANCE.value()
@@ -8,6 +8,7 @@ Example usage:
8
8
  print(f"This script is running from deployment {deployment.id} with parameters {deployment.parameters}")
9
9
  ```
10
10
  """
11
+
11
12
  import prefect.runtime.deployment
12
13
  import prefect.runtime.flow_run
13
14
  import prefect.runtime.task_run
@@ -24,6 +24,7 @@ Available attributes:
24
24
  include default values set on the flow function, only the parameter values set on the deployment
25
25
  object or those directly provided via API for this run
26
26
  """
27
+
27
28
  import os
28
29
  from typing import Any, Dict, List, Optional
29
30
 
@@ -18,6 +18,7 @@ Available attributes:
18
18
  - `parent_deployment_id`: the ID of the deployment that triggered this run, if any
19
19
  - `run_count`: the number of times this flow run has been run
20
20
  """
21
+
21
22
  import os
22
23
  from typing import Any, Dict, List, Optional
23
24
 
@@ -14,6 +14,7 @@ Available attributes:
14
14
  - `run_count`: the number of times this task run has been run
15
15
  - `task_name`: the name of the task
16
16
  """
17
+
17
18
  import os
18
19
  from typing import Any, Dict, List, Optional
19
20
 
prefect/settings.py CHANGED
@@ -42,6 +42,7 @@ dependent on the value of other settings or perform other dynamic effects.
42
42
 
43
43
  import logging
44
44
  import os
45
+ import socket
45
46
  import string
46
47
  import warnings
47
48
  from contextlib import contextmanager
@@ -84,6 +85,7 @@ from prefect._internal.schemas.validators import validate_settings
84
85
  from prefect.exceptions import MissingProfileError
85
86
  from prefect.utilities.names import OBFUSCATED_PREFIX, obfuscate
86
87
  from prefect.utilities.pydantic import add_cloudpickle_reduction
88
+ from prefect.utilities.slugify import slugify
87
89
 
88
90
  T = TypeVar("T")
89
91
 
@@ -417,6 +419,18 @@ def warn_on_misconfigured_api_url(values):
417
419
  return values
418
420
 
419
421
 
422
+ def default_result_storage_block_name(
423
+ settings: Optional["Settings"] = None, value: Optional[str] = None
424
+ ):
425
+ """
426
+ `value_callback` for `PREFECT_DEFAULT_RESULT_STORAGE_BLOCK_NAME` that sets the default
427
+ value to the hostname of the machine.
428
+ """
429
+ if value is None:
430
+ return f"local-file-system/{slugify(socket.gethostname())}-storage"
431
+ return value
432
+
433
+
420
434
  def default_database_connection_url(settings, value):
421
435
  templater = template_with_settings(PREFECT_HOME, PREFECT_API_DATABASE_PASSWORD)
422
436
 
@@ -1208,6 +1222,9 @@ PREFECT_API_SERVICES_FOREMAN_WORK_QUEUE_LAST_POLLED_TIMEOUT_SECONDS = Setting(
1208
1222
  """The number of seconds before a work queue is marked as not ready if it has not been
1209
1223
  polled."""
1210
1224
 
1225
+ PREFECT_API_LOG_RETRYABLE_ERRORS = Setting(bool, default=False)
1226
+ """If `True`, log retryable errors in the API and it's services."""
1227
+
1211
1228
 
1212
1229
  PREFECT_API_DEFAULT_LIMIT = Setting(
1213
1230
  int,
@@ -1527,9 +1544,9 @@ PREFECT_TASK_SCHEDULING_PENDING_TASK_TIMEOUT = Setting(
1527
1544
  default=timedelta(seconds=30),
1528
1545
  )
1529
1546
  """
1530
- How long before a PENDING task are made available to another task server. In practice,
1531
- a task server should move a task from PENDING to RUNNING very quickly, so runs stuck in
1532
- PENDING for a while is a sign that the task server may have crashed.
1547
+ How long before a PENDING task are made available to another task worker. In practice,
1548
+ a task worker should move a task from PENDING to RUNNING very quickly, so runs stuck in
1549
+ PENDING for a while is a sign that the task worker may have crashed.
1533
1550
  """
1534
1551
 
1535
1552
  PREFECT_EXPERIMENTAL_ENABLE_EXTRA_RUNNER_ENDPOINTS = Setting(bool, default=False)
@@ -1572,8 +1589,7 @@ PREFECT_EXPERIMENTAL_ENABLE_SCHEDULE_CONCURRENCY = Setting(bool, default=False)
1572
1589
  # Defaults -----------------------------------------------------------------------------
1573
1590
 
1574
1591
  PREFECT_DEFAULT_RESULT_STORAGE_BLOCK = Setting(
1575
- Optional[str],
1576
- default=None,
1592
+ Optional[str], default=None, value_callback=default_result_storage_block_name
1577
1593
  )
1578
1594
  """The `block-type/block-document` slug of a block to use as the default result storage."""
1579
1595
 
prefect/states.py CHANGED
@@ -204,7 +204,12 @@ async def exception_to_failed_state(
204
204
  return state
205
205
 
206
206
 
207
- async def return_value_to_state(retval: R, result_factory: ResultFactory) -> State[R]:
207
+ async def return_value_to_state(
208
+ retval: R,
209
+ result_factory: ResultFactory,
210
+ key: Optional[str] = None,
211
+ expiration: Optional[datetime.datetime] = None,
212
+ ) -> State[R]:
208
213
  """
209
214
  Given a return value from a user's function, create a `State` the run should
210
215
  be placed in.
@@ -236,7 +241,9 @@ async def return_value_to_state(retval: R, result_factory: ResultFactory) -> Sta
236
241
  # Unless the user has already constructed a result explicitly, use the factory
237
242
  # to update the data to the correct type
238
243
  if not isinstance(state.data, BaseResult):
239
- state.data = await result_factory.create_result(state.data)
244
+ state.data = await result_factory.create_result(
245
+ state.data, key=key, expiration=expiration
246
+ )
240
247
 
241
248
  return state
242
249
 
@@ -276,7 +283,9 @@ async def return_value_to_state(retval: R, result_factory: ResultFactory) -> Sta
276
283
  return State(
277
284
  type=new_state_type,
278
285
  message=message,
279
- data=await result_factory.create_result(retval),
286
+ data=await result_factory.create_result(
287
+ retval, key=key, expiration=expiration
288
+ ),
280
289
  )
281
290
 
282
291
  # Generators aren't portable, implicitly convert them to a list.
@@ -289,7 +298,11 @@ async def return_value_to_state(retval: R, result_factory: ResultFactory) -> Sta
289
298
  if isinstance(data, BaseResult):
290
299
  return Completed(data=data)
291
300
  else:
292
- return Completed(data=await result_factory.create_result(data))
301
+ return Completed(
302
+ data=await result_factory.create_result(
303
+ data, key=key, expiration=expiration
304
+ )
305
+ )
293
306
 
294
307
 
295
308
  @sync_compatible