prefect-client 3.0.0rc3__py3-none-any.whl → 3.0.0rc5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. prefect/__init__.py +0 -3
  2. prefect/client/schemas/schedules.py +9 -2
  3. prefect/client/subscriptions.py +3 -3
  4. prefect/client/types/__init__.py +0 -0
  5. prefect/client/types/flexible_schedule_list.py +11 -0
  6. prefect/concurrency/asyncio.py +14 -4
  7. prefect/concurrency/services.py +29 -22
  8. prefect/concurrency/sync.py +3 -5
  9. prefect/context.py +0 -114
  10. prefect/deployments/__init__.py +1 -1
  11. prefect/deployments/runner.py +11 -93
  12. prefect/deployments/schedules.py +5 -7
  13. prefect/docker/__init__.py +20 -0
  14. prefect/docker/docker_image.py +82 -0
  15. prefect/flow_engine.py +96 -20
  16. prefect/flows.py +36 -95
  17. prefect/futures.py +22 -2
  18. prefect/infrastructure/provisioners/cloud_run.py +2 -2
  19. prefect/infrastructure/provisioners/container_instance.py +2 -2
  20. prefect/infrastructure/provisioners/ecs.py +2 -2
  21. prefect/records/result_store.py +5 -1
  22. prefect/results.py +111 -42
  23. prefect/runner/runner.py +5 -3
  24. prefect/runner/server.py +6 -2
  25. prefect/settings.py +1 -1
  26. prefect/states.py +13 -3
  27. prefect/task_engine.py +7 -6
  28. prefect/task_runs.py +23 -9
  29. prefect/task_worker.py +128 -19
  30. prefect/tasks.py +20 -16
  31. prefect/transactions.py +8 -10
  32. prefect/types/__init__.py +10 -3
  33. prefect/types/entrypoint.py +13 -0
  34. prefect/utilities/collections.py +120 -57
  35. prefect/utilities/dockerutils.py +2 -1
  36. prefect/utilities/urls.py +5 -5
  37. {prefect_client-3.0.0rc3.dist-info → prefect_client-3.0.0rc5.dist-info}/METADATA +2 -2
  38. {prefect_client-3.0.0rc3.dist-info → prefect_client-3.0.0rc5.dist-info}/RECORD +41 -37
  39. prefect/blocks/kubernetes.py +0 -115
  40. {prefect_client-3.0.0rc3.dist-info → prefect_client-3.0.0rc5.dist-info}/LICENSE +0 -0
  41. {prefect_client-3.0.0rc3.dist-info → prefect_client-3.0.0rc5.dist-info}/WHEEL +0 -0
  42. {prefect_client-3.0.0rc3.dist-info → prefect_client-3.0.0rc5.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,82 @@
1
+ from pathlib import Path
2
+ from typing import Optional
3
+
4
+ from pendulum import now as pendulum_now
5
+
6
+ from prefect.settings import (
7
+ PREFECT_DEFAULT_DOCKER_BUILD_NAMESPACE,
8
+ )
9
+ from prefect.utilities.dockerutils import (
10
+ PushError,
11
+ build_image,
12
+ docker_client,
13
+ generate_default_dockerfile,
14
+ parse_image_tag,
15
+ split_repository_path,
16
+ )
17
+ from prefect.utilities.slugify import slugify
18
+
19
+
20
+ class DockerImage:
21
+ """
22
+ Configuration used to build and push a Docker image for a deployment.
23
+
24
+ Attributes:
25
+ name: The name of the Docker image to build, including the registry and
26
+ repository.
27
+ tag: The tag to apply to the built image.
28
+ dockerfile: The path to the Dockerfile to use for building the image. If
29
+ not provided, a default Dockerfile will be generated.
30
+ **build_kwargs: Additional keyword arguments to pass to the Docker build request.
31
+ See the [`docker-py` documentation](https://docker-py.readthedocs.io/en/stable/images.html#docker.models.images.ImageCollection.build)
32
+ for more information.
33
+
34
+ """
35
+
36
+ def __init__(
37
+ self, name: str, tag: Optional[str] = None, dockerfile="auto", **build_kwargs
38
+ ):
39
+ image_name, image_tag = parse_image_tag(name)
40
+ if tag and image_tag:
41
+ raise ValueError(
42
+ f"Only one tag can be provided - both {image_tag!r} and {tag!r} were"
43
+ " provided as tags."
44
+ )
45
+ namespace, repository = split_repository_path(image_name)
46
+ # if the provided image name does not include a namespace (registry URL or user/org name),
47
+ # use the default namespace
48
+ if not namespace:
49
+ namespace = PREFECT_DEFAULT_DOCKER_BUILD_NAMESPACE.value()
50
+ # join the namespace and repository to create the full image name
51
+ # ignore namespace if it is None
52
+ self.name = "/".join(filter(None, [namespace, repository]))
53
+ self.tag = tag or image_tag or slugify(pendulum_now("utc").isoformat())
54
+ self.dockerfile = dockerfile
55
+ self.build_kwargs = build_kwargs
56
+
57
+ @property
58
+ def reference(self):
59
+ return f"{self.name}:{self.tag}"
60
+
61
+ def build(self):
62
+ full_image_name = self.reference
63
+ build_kwargs = self.build_kwargs.copy()
64
+ build_kwargs["context"] = Path.cwd()
65
+ build_kwargs["tag"] = full_image_name
66
+ build_kwargs["pull"] = build_kwargs.get("pull", True)
67
+
68
+ if self.dockerfile == "auto":
69
+ with generate_default_dockerfile():
70
+ build_image(**build_kwargs)
71
+ else:
72
+ build_kwargs["dockerfile"] = self.dockerfile
73
+ build_image(**build_kwargs)
74
+
75
+ def push(self):
76
+ with docker_client() as client:
77
+ events = client.api.push(
78
+ repository=self.name, tag=self.tag, stream=True, decode=True
79
+ )
80
+ for event in events:
81
+ if "error" in event:
82
+ raise PushError(event["error"])
prefect/flow_engine.py CHANGED
@@ -6,6 +6,7 @@ from contextlib import ExitStack, contextmanager
6
6
  from dataclasses import dataclass, field
7
7
  from typing import (
8
8
  Any,
9
+ AsyncGenerator,
9
10
  Callable,
10
11
  Coroutine,
11
12
  Dict,
@@ -50,12 +51,13 @@ from prefect.states import (
50
51
  return_value_to_state,
51
52
  )
52
53
  from prefect.utilities.asyncutils import run_coro_as_sync
53
- from prefect.utilities.callables import call_with_parameters
54
+ from prefect.utilities.callables import call_with_parameters, parameters_to_args_kwargs
54
55
  from prefect.utilities.collections import visit_collection
55
56
  from prefect.utilities.engine import (
56
57
  _get_hook_name,
57
58
  _resolve_custom_flow_run_name,
58
59
  capture_sigterm,
60
+ link_state_to_result,
59
61
  propose_state_sync,
60
62
  resolve_to_final_result,
61
63
  )
@@ -166,6 +168,20 @@ class FlowRunEngine(Generic[P, R]):
166
168
  )
167
169
  return state
168
170
 
171
+ # validate prior to context so that context receives validated params
172
+ if self.flow.should_validate_parameters:
173
+ try:
174
+ self.parameters = self.flow.validate_parameters(self.parameters or {})
175
+ except Exception as exc:
176
+ message = "Validation of flow parameters failed with error:"
177
+ self.logger.error("%s %s", message, exc)
178
+ self.handle_exception(
179
+ exc,
180
+ msg=message,
181
+ result_factory=run_coro_as_sync(ResultFactory.from_flow(self.flow)),
182
+ )
183
+ self.short_circuit = True
184
+
169
185
  new_state = Running()
170
186
  state = self.set_state(new_state)
171
187
  while state.is_pending():
@@ -482,24 +498,6 @@ class FlowRunEngine(Generic[P, R]):
482
498
  flow_version=self.flow.version,
483
499
  empirical_policy=self.flow_run.empirical_policy,
484
500
  )
485
-
486
- # validate prior to context so that context receives validated params
487
- if self.flow.should_validate_parameters:
488
- try:
489
- self.parameters = self.flow.validate_parameters(
490
- self.parameters or {}
491
- )
492
- except Exception as exc:
493
- message = "Validation of flow parameters failed with error:"
494
- self.logger.error("%s %s", message, exc)
495
- self.handle_exception(
496
- exc,
497
- msg=message,
498
- result_factory=run_coro_as_sync(
499
- ResultFactory.from_flow(self.flow)
500
- ),
501
- )
502
- self.short_circuit = True
503
501
  try:
504
502
  yield self
505
503
  except Exception:
@@ -632,6 +630,80 @@ async def run_flow_async(
632
630
  return engine.state if return_type == "state" else engine.result()
633
631
 
634
632
 
633
+ def run_generator_flow_sync(
634
+ flow: Flow[P, R],
635
+ flow_run: Optional[FlowRun] = None,
636
+ parameters: Optional[Dict[str, Any]] = None,
637
+ wait_for: Optional[Iterable[PrefectFuture]] = None,
638
+ return_type: Literal["state", "result"] = "result",
639
+ ) -> Generator[R, None, None]:
640
+ if return_type != "result":
641
+ raise ValueError("The return_type for a generator flow must be 'result'")
642
+
643
+ engine = FlowRunEngine[P, R](
644
+ flow=flow, parameters=parameters, flow_run=flow_run, wait_for=wait_for
645
+ )
646
+
647
+ with engine.start():
648
+ while engine.is_running():
649
+ with engine.run_context():
650
+ call_args, call_kwargs = parameters_to_args_kwargs(
651
+ flow.fn, engine.parameters or {}
652
+ )
653
+ gen = flow.fn(*call_args, **call_kwargs)
654
+ try:
655
+ while True:
656
+ gen_result = next(gen)
657
+ # link the current state to the result for dependency tracking
658
+ link_state_to_result(engine.state, gen_result)
659
+ yield gen_result
660
+ except StopIteration as exc:
661
+ engine.handle_success(exc.value)
662
+ except GeneratorExit as exc:
663
+ engine.handle_success(None)
664
+ gen.throw(exc)
665
+
666
+ return engine.result()
667
+
668
+
669
+ async def run_generator_flow_async(
670
+ flow: Flow[P, R],
671
+ flow_run: Optional[FlowRun] = None,
672
+ parameters: Optional[Dict[str, Any]] = None,
673
+ wait_for: Optional[Iterable[PrefectFuture]] = None,
674
+ return_type: Literal["state", "result"] = "result",
675
+ ) -> AsyncGenerator[R, None]:
676
+ if return_type != "result":
677
+ raise ValueError("The return_type for a generator flow must be 'result'")
678
+
679
+ engine = FlowRunEngine[P, R](
680
+ flow=flow, parameters=parameters, flow_run=flow_run, wait_for=wait_for
681
+ )
682
+
683
+ with engine.start():
684
+ while engine.is_running():
685
+ with engine.run_context():
686
+ call_args, call_kwargs = parameters_to_args_kwargs(
687
+ flow.fn, engine.parameters or {}
688
+ )
689
+ gen = flow.fn(*call_args, **call_kwargs)
690
+ try:
691
+ while True:
692
+ # can't use anext in Python < 3.10
693
+ gen_result = await gen.__anext__()
694
+ # link the current state to the result for dependency tracking
695
+ link_state_to_result(engine.state, gen_result)
696
+ yield gen_result
697
+ except (StopAsyncIteration, GeneratorExit) as exc:
698
+ engine.handle_success(None)
699
+ if isinstance(exc, GeneratorExit):
700
+ gen.throw(exc)
701
+
702
+ # async generators can't return, but we can raise failures here
703
+ if engine.state.is_failed():
704
+ engine.result()
705
+
706
+
635
707
  def run_flow(
636
708
  flow: Flow[P, R],
637
709
  flow_run: Optional[FlowRun] = None,
@@ -646,7 +718,11 @@ def run_flow(
646
718
  wait_for=wait_for,
647
719
  return_type=return_type,
648
720
  )
649
- if flow.isasync:
721
+ if flow.isasync and flow.isgenerator:
722
+ return run_generator_flow_async(**kwargs)
723
+ elif flow.isgenerator:
724
+ return run_generator_flow_sync(**kwargs)
725
+ elif flow.isasync:
650
726
  return run_flow_async(**kwargs)
651
727
  else:
652
728
  return run_flow_sync(**kwargs)
prefect/flows.py CHANGED
@@ -17,11 +17,9 @@ import warnings
17
17
  from copy import copy
18
18
  from functools import partial, update_wrapper
19
19
  from pathlib import Path
20
- from tempfile import NamedTemporaryFile
21
20
  from typing import (
22
21
  TYPE_CHECKING,
23
22
  Any,
24
- AnyStr,
25
23
  Awaitable,
26
24
  Callable,
27
25
  Coroutine,
@@ -56,9 +54,9 @@ from prefect.client.schemas.objects import Flow as FlowSchema
56
54
  from prefect.client.schemas.objects import FlowRun
57
55
  from prefect.client.schemas.schedules import SCHEDULE_TYPES
58
56
  from prefect.client.utilities import client_injector
59
- from prefect.context import PrefectObjectRegistry, registry_from_script
60
- from prefect.deployments.runner import DeploymentImage, EntrypointType, deploy
57
+ from prefect.deployments.runner import deploy
61
58
  from prefect.deployments.steps.core import run_steps
59
+ from prefect.docker.docker_image import DockerImage
62
60
  from prefect.events import DeploymentTriggerTypes, TriggerTypes
63
61
  from prefect.exceptions import (
64
62
  InvalidNameError,
@@ -87,9 +85,9 @@ from prefect.settings import (
87
85
  from prefect.states import State
88
86
  from prefect.task_runners import TaskRunner, ThreadPoolTaskRunner
89
87
  from prefect.types import BANNED_CHARACTERS, WITHOUT_BANNED_CHARACTERS
88
+ from prefect.types.entrypoint import EntrypointType
90
89
  from prefect.utilities.annotations import NotSet
91
90
  from prefect.utilities.asyncutils import (
92
- is_async_fn,
93
91
  run_sync_in_worker_thread,
94
92
  sync_compatible,
95
93
  )
@@ -119,11 +117,11 @@ logger = get_logger("flows")
119
117
 
120
118
  if TYPE_CHECKING:
121
119
  from prefect.client.orchestration import PrefectClient
122
- from prefect.deployments.runner import FlexibleScheduleList, RunnerDeployment
120
+ from prefect.client.types.flexible_schedule_list import FlexibleScheduleList
121
+ from prefect.deployments.runner import RunnerDeployment
123
122
  from prefect.flows import FlowRun
124
123
 
125
124
 
126
- @PrefectObjectRegistry.register_instances
127
125
  class Flow(Generic[P, R]):
128
126
  """
129
127
  A Prefect workflow definition.
@@ -146,7 +144,7 @@ class Flow(Generic[P, R]):
146
144
  be provided as a string template with the flow's parameters as variables,
147
145
  or a function that returns a string.
148
146
  task_runner: An optional task runner to use for task execution within the flow;
149
- if not provided, a `ConcurrentTaskRunner` will be used.
147
+ if not provided, a `ThreadPoolTaskRunner` will be used.
150
148
  description: An optional string description for the flow; if not provided, the
151
149
  description will be pulled from the docstring for the decorated function.
152
150
  timeout_seconds: An optional number of seconds indicating a maximum runtime for
@@ -289,7 +287,18 @@ class Flow(Generic[P, R]):
289
287
  self.description = description or inspect.getdoc(fn)
290
288
  update_wrapper(self, fn)
291
289
  self.fn = fn
292
- self.isasync = is_async_fn(self.fn)
290
+
291
+ # the flow is considered async if its function is async or an async
292
+ # generator
293
+ self.isasync = inspect.iscoroutinefunction(
294
+ self.fn
295
+ ) or inspect.isasyncgenfunction(self.fn)
296
+
297
+ # the flow is considered a generator if its function is a generator or
298
+ # an async generator
299
+ self.isgenerator = inspect.isgeneratorfunction(
300
+ self.fn
301
+ ) or inspect.isasyncgenfunction(self.fn)
293
302
 
294
303
  raise_for_reserved_arguments(self.fn, ["return_state", "wait_for"])
295
304
 
@@ -988,7 +997,7 @@ class Flow(Generic[P, R]):
988
997
  self,
989
998
  name: str,
990
999
  work_pool_name: Optional[str] = None,
991
- image: Optional[Union[str, DeploymentImage]] = None,
1000
+ image: Optional[Union[str, DockerImage]] = None,
992
1001
  build: bool = True,
993
1002
  push: bool = True,
994
1003
  work_queue_name: Optional[str] = None,
@@ -1024,7 +1033,7 @@ class Flow(Generic[P, R]):
1024
1033
  work_pool_name: The name of the work pool to use for this deployment. Defaults to
1025
1034
  the value of `PREFECT_DEFAULT_WORK_POOL_NAME`.
1026
1035
  image: The name of the Docker image to build, including the registry and
1027
- repository. Pass a DeploymentImage instance to customize the Dockerfile used
1036
+ repository. Pass a DockerImage instance to customize the Dockerfile used
1028
1037
  and build arguments.
1029
1038
  build: Whether or not to build a new image for the flow. If False, the provided
1030
1039
  image will be used as-is and pulled at runtime.
@@ -1628,47 +1637,6 @@ def select_flow(
1628
1637
  return list(flows_dict.values())[0]
1629
1638
 
1630
1639
 
1631
- def load_flows_from_script(path: str) -> List[Flow]:
1632
- """
1633
- Load all flow objects from the given python script. All of the code in the file
1634
- will be executed.
1635
-
1636
- Returns:
1637
- A list of flows
1638
-
1639
- Raises:
1640
- FlowScriptError: If an exception is encountered while running the script
1641
- """
1642
- return registry_from_script(path).get_instances(Flow)
1643
-
1644
-
1645
- def load_flow_from_script(path: str, flow_name: Optional[str] = None) -> Flow:
1646
- """
1647
- Extract a flow object from a script by running all of the code in the file.
1648
-
1649
- If the script has multiple flows in it, a flow name must be provided to specify
1650
- the flow to return.
1651
-
1652
- Args:
1653
- path: A path to a Python script containing flows
1654
- flow_name: An optional flow name to look for in the script
1655
-
1656
- Returns:
1657
- The flow object from the script
1658
-
1659
- Raises:
1660
- FlowScriptError: If an exception is encountered while running the script
1661
- MissingFlowError: If no flows exist in the iterable
1662
- MissingFlowError: If a flow name is provided and that flow does not exist
1663
- UnspecifiedFlowError: If multiple flows exist but no flow name was provided
1664
- """
1665
- return select_flow(
1666
- load_flows_from_script(path),
1667
- flow_name=flow_name,
1668
- from_message=f"in script '{path}'",
1669
- )
1670
-
1671
-
1672
1640
  def load_flow_from_entrypoint(
1673
1641
  entrypoint: str,
1674
1642
  ) -> Flow:
@@ -1686,52 +1654,25 @@ def load_flow_from_entrypoint(
1686
1654
  FlowScriptError: If an exception is encountered while running the script
1687
1655
  MissingFlowError: If the flow function specified in the entrypoint does not exist
1688
1656
  """
1689
- with PrefectObjectRegistry( # type: ignore
1690
- block_code_execution=True,
1691
- capture_failures=True,
1692
- ):
1693
- if ":" in entrypoint:
1694
- # split by the last colon once to handle Windows paths with drive letters i.e C:\path\to\file.py:do_stuff
1695
- path, func_name = entrypoint.rsplit(":", maxsplit=1)
1696
- else:
1697
- path, func_name = entrypoint.rsplit(".", maxsplit=1)
1698
- try:
1699
- flow = import_object(entrypoint)
1700
- except AttributeError as exc:
1701
- raise MissingFlowError(
1702
- f"Flow function with name {func_name!r} not found in {path!r}. "
1703
- ) from exc
1704
-
1705
- if not isinstance(flow, Flow):
1706
- raise MissingFlowError(
1707
- f"Function with name {func_name!r} is not a flow. Make sure that it is "
1708
- "decorated with '@flow'."
1709
- )
1710
-
1711
- return flow
1712
1657
 
1658
+ if ":" in entrypoint:
1659
+ # split by the last colon once to handle Windows paths with drive letters i.e C:\path\to\file.py:do_stuff
1660
+ path, func_name = entrypoint.rsplit(":", maxsplit=1)
1661
+ else:
1662
+ path, func_name = entrypoint.rsplit(".", maxsplit=1)
1663
+ try:
1664
+ flow = import_object(entrypoint)
1665
+ except AttributeError as exc:
1666
+ raise MissingFlowError(
1667
+ f"Flow function with name {func_name!r} not found in {path!r}. "
1668
+ ) from exc
1713
1669
 
1714
- def load_flow_from_text(script_contents: AnyStr, flow_name: str) -> Flow:
1715
- """
1716
- Load a flow from a text script.
1670
+ if not isinstance(flow, Flow):
1671
+ raise MissingFlowError(
1672
+ f"Function with name {func_name!r} is not a flow. Make sure that it is "
1673
+ "decorated with '@flow'."
1674
+ )
1717
1675
 
1718
- The script will be written to a temporary local file path so errors can refer
1719
- to line numbers and contextual tracebacks can be provided.
1720
- """
1721
- with NamedTemporaryFile(
1722
- mode="wt" if isinstance(script_contents, str) else "wb",
1723
- prefix=f"flow-script-{flow_name}",
1724
- suffix=".py",
1725
- delete=False,
1726
- ) as tmpfile:
1727
- tmpfile.write(script_contents)
1728
- tmpfile.flush()
1729
- try:
1730
- flow = load_flow_from_script(tmpfile.name, flow_name=flow_name)
1731
- finally:
1732
- # windows compat
1733
- tmpfile.close()
1734
- os.remove(tmpfile.name)
1735
1676
  return flow
1736
1677
 
1737
1678
 
prefect/futures.py CHANGED
@@ -10,7 +10,7 @@ from typing_extensions import TypeVar
10
10
  from prefect.client.orchestration import get_client
11
11
  from prefect.client.schemas.objects import TaskRun
12
12
  from prefect.exceptions import ObjectNotFound
13
- from prefect.logging.loggers import get_logger
13
+ from prefect.logging.loggers import get_logger, get_run_logger
14
14
  from prefect.states import Pending, State
15
15
  from prefect.task_runs import TaskRunWaiter
16
16
  from prefect.utilities.annotations import quote
@@ -56,7 +56,7 @@ class PrefectFuture(abc.ABC):
56
56
  def wait(self, timeout: Optional[float] = None) -> None:
57
57
  ...
58
58
  """
59
- Wait for the task run to complete.
59
+ Wait for the task run to complete.
60
60
 
61
61
  If the task run has already completed, this method will return immediately.
62
62
 
@@ -143,6 +143,18 @@ class PrefectConcurrentFuture(PrefectWrappedFuture[concurrent.futures.Future]):
143
143
  _result = run_coro_as_sync(_result)
144
144
  return _result
145
145
 
146
+ def __del__(self):
147
+ if self._final_state or self._wrapped_future.done():
148
+ return
149
+ try:
150
+ local_logger = get_run_logger()
151
+ except Exception:
152
+ local_logger = logger
153
+ local_logger.warning(
154
+ "A future was garbage collected before it resolved."
155
+ " Please call `.wait()` or `.result()` on futures to ensure they resolve.",
156
+ )
157
+
146
158
 
147
159
  class PrefectDistributedFuture(PrefectFuture):
148
160
  """
@@ -163,6 +175,10 @@ class PrefectDistributedFuture(PrefectFuture):
163
175
  )
164
176
  return
165
177
 
178
+ # Ask for the instance of TaskRunWaiter _now_ so that it's already running and
179
+ # can catch the completion event if it happens before we start listening for it.
180
+ TaskRunWaiter.instance()
181
+
166
182
  # Read task run to see if it is still running
167
183
  async with get_client() as client:
168
184
  task_run = await client.read_task_run(task_run_id=self._task_run_id)
@@ -245,6 +261,10 @@ def resolve_futures_to_states(
245
261
  context={},
246
262
  )
247
263
 
264
+ # if no futures were found, return the original expression
265
+ if not futures:
266
+ return expr
267
+
248
268
  # Get final states for each future
249
269
  states = []
250
270
  for future in futures:
@@ -404,7 +404,7 @@ class CloudRunPushProvisioner:
404
404
  dedent(
405
405
  f"""\
406
406
  from prefect import flow
407
- from prefect.deployments import DeploymentImage
407
+ from prefect.docker import DockerImage
408
408
 
409
409
 
410
410
  @flow(log_prints=True)
@@ -416,7 +416,7 @@ class CloudRunPushProvisioner:
416
416
  my_flow.deploy(
417
417
  name="my-deployment",
418
418
  work_pool_name="{work_pool_name}",
419
- image=DeploymentImage(
419
+ image=DockerImage(
420
420
  name="my-image:latest",
421
421
  platform="linux/amd64",
422
422
  )
@@ -1042,7 +1042,7 @@ class ContainerInstancePushProvisioner:
1042
1042
  dedent(
1043
1043
  f"""\
1044
1044
  from prefect import flow
1045
- from prefect.deployments import DeploymentImage
1045
+ from prefect.docker import DockerImage
1046
1046
 
1047
1047
 
1048
1048
  @flow(log_prints=True)
@@ -1054,7 +1054,7 @@ class ContainerInstancePushProvisioner:
1054
1054
  my_flow.deploy(
1055
1055
  name="my-deployment",
1056
1056
  work_pool_name="{work_pool_name}",
1057
- image=DeploymentImage(
1057
+ image=DockerImage(
1058
1058
  name="my-image:latest",
1059
1059
  platform="linux/amd64",
1060
1060
  )
@@ -950,7 +950,7 @@ class ContainerRepositoryResource:
950
950
  dedent(
951
951
  f"""\
952
952
  from prefect import flow
953
- from prefect.deployments import DeploymentImage
953
+ from prefect.docker import DockerImage
954
954
 
955
955
 
956
956
  @flow(log_prints=True)
@@ -962,7 +962,7 @@ class ContainerRepositoryResource:
962
962
  my_flow.deploy(
963
963
  name="my-deployment",
964
964
  work_pool_name="{self._work_pool_name}",
965
- image=DeploymentImage(
965
+ image=DockerImage(
966
966
  name="{self._repository_name}:latest",
967
967
  platform="linux/amd64",
968
968
  )
@@ -44,6 +44,10 @@ class ResultFactoryStore(RecordStore):
44
44
  raise ValueError("Result could not be read")
45
45
 
46
46
  def write(self, key: str, value: Any) -> BaseResult:
47
- if isinstance(value, BaseResult):
47
+ if isinstance(value, PersistedResult):
48
+ # if the value is already a persisted result, write it
49
+ value.write(_sync=True)
50
+ return value
51
+ elif isinstance(value, BaseResult):
48
52
  return value
49
53
  return run_coro_as_sync(self.result_factory.create_result(obj=value, key=key))