prefect-client 3.0.0rc7__py3-none-any.whl → 3.0.0rc9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
prefect/__init__.py CHANGED
@@ -5,6 +5,7 @@
5
5
  from . import _version
6
6
  import importlib
7
7
  import pathlib
8
+ from typing import TYPE_CHECKING, Any
8
9
 
9
10
  __version_info__ = _version.get_versions()
10
11
  __version__ = __version_info__["version"]
@@ -23,63 +24,55 @@ __ui_static_path__ = __module_path__ / "server" / "ui"
23
24
 
24
25
  del _version, pathlib
25
26
 
26
-
27
- # Import user-facing API
28
- from prefect.deployments import deploy
29
- from prefect.states import State
30
- from prefect.logging import get_run_logger
31
- from prefect.flows import flow, Flow, serve
32
- from prefect.transactions import Transaction
33
- from prefect.tasks import task, Task
34
- from prefect.context import tags
35
- from prefect.manifests import Manifest
36
- from prefect.utilities.annotations import unmapped, allow_failure
37
- from prefect.results import BaseResult
38
- from prefect.flow_runs import pause_flow_run, resume_flow_run, suspend_flow_run
39
- from prefect.client.orchestration import get_client, PrefectClient
40
- from prefect.client.cloud import get_cloud_client, CloudClient
41
- import prefect.variables
42
- import prefect.runtime
43
-
44
- # Import modules that register types
45
- import prefect.serializers
46
- import prefect.blocks.notifications
47
- import prefect.blocks.system
48
-
49
- # Initialize the process-wide profile and registry at import time
50
- import prefect.context
51
-
52
- # Perform any forward-ref updates needed for Pydantic models
53
- import prefect.client.schemas
54
-
55
- prefect.context.FlowRunContext.model_rebuild()
56
- prefect.context.TaskRunContext.model_rebuild()
57
- prefect.client.schemas.State.model_rebuild()
58
- prefect.client.schemas.StateCreate.model_rebuild()
59
- Transaction.model_rebuild()
60
-
61
-
62
- prefect.plugins.load_extra_entrypoints()
63
-
64
- # Configure logging
65
- import prefect.logging.configuration
66
-
67
- prefect.logging.configuration.setup_logging()
68
- prefect.logging.get_logger("profiles").debug(
69
- f"Using profile {prefect.context.get_settings_context().profile.name!r}"
70
- )
71
-
72
- # Ensure moved names are accessible at old locations
73
- prefect.client.get_client = get_client
74
- prefect.client.PrefectClient = PrefectClient
75
-
76
-
77
- from prefect._internal.compatibility.deprecated import (
78
- inject_renamed_module_alias_finder,
79
- )
80
-
81
- inject_renamed_module_alias_finder()
82
-
27
+ if TYPE_CHECKING:
28
+ from .main import (
29
+ allow_failure,
30
+ flow,
31
+ Flow,
32
+ get_client,
33
+ get_run_logger,
34
+ Manifest,
35
+ State,
36
+ tags,
37
+ task,
38
+ Task,
39
+ Transaction,
40
+ unmapped,
41
+ serve,
42
+ deploy,
43
+ pause_flow_run,
44
+ resume_flow_run,
45
+ suspend_flow_run,
46
+ )
47
+
48
+ _slots: dict[str, Any] = {
49
+ "__version_info__": __version_info__,
50
+ "__version__": __version__,
51
+ "__module_path__": __module_path__,
52
+ "__development_base_path__": __development_base_path__,
53
+ "__ui_static_subpath__": __ui_static_subpath__,
54
+ "__ui_static_path__": __ui_static_path__,
55
+ }
56
+
57
+ _public_api: dict[str, tuple[str, str]] = {
58
+ "allow_failure": (__spec__.parent, ".main"),
59
+ "flow": (__spec__.parent, ".main"),
60
+ "Flow": (__spec__.parent, ".main"),
61
+ "get_client": (__spec__.parent, ".main"),
62
+ "get_run_logger": (__spec__.parent, ".main"),
63
+ "Manifest": (__spec__.parent, ".main"),
64
+ "State": (__spec__.parent, ".main"),
65
+ "tags": (__spec__.parent, ".main"),
66
+ "task": (__spec__.parent, ".main"),
67
+ "Task": (__spec__.parent, ".main"),
68
+ "Transaction": (__spec__.parent, ".main"),
69
+ "unmapped": (__spec__.parent, ".main"),
70
+ "serve": (__spec__.parent, ".main"),
71
+ "deploy": (__spec__.parent, ".main"),
72
+ "pause_flow_run": (__spec__.parent, ".main"),
73
+ "resume_flow_run": (__spec__.parent, ".main"),
74
+ "suspend_flow_run": (__spec__.parent, ".main"),
75
+ }
83
76
 
84
77
  # Declare API for type-checkers
85
78
  __all__ = [
@@ -100,4 +93,29 @@ __all__ = [
100
93
  "pause_flow_run",
101
94
  "resume_flow_run",
102
95
  "suspend_flow_run",
96
+ "__version_info__",
97
+ "__version__",
98
+ "__module_path__",
99
+ "__development_base_path__",
100
+ "__ui_static_subpath__",
101
+ "__ui_static_path__",
103
102
  ]
103
+
104
+
105
+ def __getattr__(attr_name: str) -> object:
106
+ if attr_name in _slots:
107
+ return _slots[attr_name]
108
+
109
+ dynamic_attr = _public_api.get(attr_name)
110
+ if dynamic_attr is None:
111
+ return importlib.import_module(f".{attr_name}", package=__name__)
112
+
113
+ package, module_name = dynamic_attr
114
+
115
+ from importlib import import_module
116
+
117
+ if module_name == "__module__":
118
+ return import_module(f".{attr_name}", package=package)
119
+ else:
120
+ module = import_module(module_name, package=package)
121
+ return getattr(module, attr_name)
@@ -16,6 +16,7 @@ import warnings
16
16
  from typing import Any, Callable, List, Optional, Type, TypeVar
17
17
 
18
18
  import pendulum
19
+ import wrapt
19
20
  from pydantic import BaseModel
20
21
 
21
22
  from prefect.utilities.callables import get_call_parameters
@@ -272,3 +273,55 @@ def register_renamed_module(old_name: str, new_name: str, start_date: str):
272
273
  DEPRECATED_MODULE_ALIASES.append(
273
274
  AliasedModuleDefinition(old_name, new_name, callback)
274
275
  )
276
+
277
+
278
+ class AsyncCompatProxy(wrapt.ObjectProxy):
279
+ """
280
+ A proxy object that allows for awaiting a method that is no longer async.
281
+
282
+ See https://wrapt.readthedocs.io/en/master/wrappers.html#object-proxy for more
283
+ """
284
+
285
+ def __init__(self, wrapped, class_name: str, method_name: str):
286
+ super().__init__(wrapped)
287
+ self._self_class_name = class_name
288
+ self._self_method_name = method_name
289
+ self._self_already_awaited = False
290
+
291
+ def __await__(self):
292
+ if not self._self_already_awaited:
293
+ warnings.warn(
294
+ (
295
+ f"The {self._self_method_name!r} method on {self._self_class_name!r}"
296
+ " is no longer async and awaiting it will raise an error after Dec 2024"
297
+ " - please remove the `await` keyword."
298
+ ),
299
+ DeprecationWarning,
300
+ stacklevel=2,
301
+ )
302
+ self._self_already_awaited = True
303
+ yield
304
+ return self.__wrapped__
305
+
306
+ def __repr__(self):
307
+ return repr(self.__wrapped__)
308
+
309
+ def __reduce_ex__(self, protocol):
310
+ return (
311
+ type(self),
312
+ (self.__wrapped__,),
313
+ {"_self_already_awaited": self._self_already_awaited},
314
+ )
315
+
316
+
317
+ def deprecated_async_method(wrapped):
318
+ """Decorator that wraps a sync method to allow awaiting it even though it is no longer async."""
319
+
320
+ @wrapt.decorator
321
+ def wrapper(wrapped, instance, args, kwargs):
322
+ result = wrapped(*args, **kwargs)
323
+ return AsyncCompatProxy(
324
+ result, class_name=instance.__class__.__name__, method_name=wrapped.__name__
325
+ )
326
+
327
+ return wrapper(wrapped)
@@ -27,15 +27,17 @@ MOVED_IN_V3 = {
27
27
  "prefect.engine:resume_flow_run": "prefect.flow_runs:resume_flow_run",
28
28
  "prefect.engine:suspend_flow_run": "prefect.flow_runs:suspend_flow_run",
29
29
  "prefect.engine:_in_process_pause": "prefect.flow_runs:_in_process_pause",
30
+ "prefect.client:get_client": "prefect.client.orchestration:get_client",
30
31
  }
31
32
 
32
33
  REMOVED_IN_V3 = {
33
- "prefect.deployments.deployments:Deployment": "Use 'flow.serve()' or `prefect deploy` instead.",
34
- "prefect.deployments:Deployment": "Use 'flow.serve()' or `prefect deploy` instead.",
35
- "prefect.filesystems:GCS": "Use 'prefect_gcp' instead.",
36
- "prefect.filesystems:Azure": "Use 'prefect_azure' instead.",
37
- "prefect.filesystems:S3": "Use 'prefect_aws' instead.",
38
- "prefect.engine:_out_of_process_pause": "Use 'prefect.flow_runs.pause_flow_run' instead.",
34
+ "prefect.client.schemas.objects:MinimalDeploymentSchedule": "Use `prefect.client.schemas.actions.DeploymentScheduleCreate` instead.",
35
+ "prefect.deployments.deployments:Deployment": "Use `flow.serve()`, `flow.deploy()`, or `prefect deploy` instead.",
36
+ "prefect.deployments:Deployment": "Use `flow.serve()`, `flow.deploy()`, or `prefect deploy` instead.",
37
+ "prefect.filesystems:GCS": "Use `prefect_gcp` instead.",
38
+ "prefect.filesystems:Azure": "Use `prefect_azure` instead.",
39
+ "prefect.filesystems:S3": "Use `prefect_aws` instead.",
40
+ "prefect.engine:_out_of_process_pause": "Use `prefect.flow_runs.pause_flow_run` instead.",
39
41
  }
40
42
 
41
43
  # IMPORTANT FOR USAGE: When adding new modules to MOVED_IN_V3 or REMOVED_IN_V3, include the following lines at the bottom of that module:
@@ -0,0 +1,7 @@
1
+ KNOWN_EXTRAS_FOR_PACKAGES = {
2
+ "prefect-kubernetes": "prefect[kubernetes]",
3
+ "prefect-aws": "prefect[aws]",
4
+ "prefect-gcp": "prefect[gcp]",
5
+ "prefect-azure": "prefect[azure]",
6
+ "prefect-docker": "prefect[docker]",
7
+ }
prefect/blocks/core.py CHANGED
@@ -798,7 +798,7 @@ class Block(BaseModel, ABC):
798
798
  name: str,
799
799
  validate: bool = True,
800
800
  client: Optional["PrefectClient"] = None,
801
- ):
801
+ ) -> "Self":
802
802
  """
803
803
  Retrieves data from the block document with the given name for the block type
804
804
  that corresponds with the current class and returns an instantiated version of
@@ -15,3 +15,7 @@ $ python -m asyncio
15
15
  ```
16
16
  </div>
17
17
  """
18
+
19
+ from prefect._internal.compatibility.migration import getattr_migration
20
+
21
+ __getattr__ = getattr_migration(__name__)
@@ -1601,7 +1601,6 @@ class PrefectClient:
1601
1601
  work_pool_name: Optional[str] = None,
1602
1602
  tags: Optional[List[str]] = None,
1603
1603
  storage_document_id: Optional[UUID] = None,
1604
- manifest_path: Optional[str] = None,
1605
1604
  path: Optional[str] = None,
1606
1605
  entrypoint: Optional[str] = None,
1607
1606
  infrastructure_document_id: Optional[UUID] = None,
@@ -1650,7 +1649,6 @@ class PrefectClient:
1650
1649
  storage_document_id=storage_document_id,
1651
1650
  path=path,
1652
1651
  entrypoint=entrypoint,
1653
- manifest_path=manifest_path, # for backwards compat
1654
1652
  infrastructure_document_id=infrastructure_document_id,
1655
1653
  job_variables=dict(job_variables or {}),
1656
1654
  parameter_openapi_schema=parameter_openapi_schema,
@@ -1723,7 +1721,6 @@ class PrefectClient:
1723
1721
  description=deployment.description,
1724
1722
  work_queue_name=deployment.work_queue_name,
1725
1723
  tags=deployment.tags,
1726
- manifest_path=deployment.manifest_path,
1727
1724
  path=deployment.path,
1728
1725
  entrypoint=deployment.entrypoint,
1729
1726
  parameters=deployment.parameters,
@@ -173,7 +173,6 @@ class DeploymentCreate(ActionBaseModel):
173
173
  tags: List[str] = Field(default_factory=list)
174
174
  pull_steps: Optional[List[dict]] = Field(None)
175
175
 
176
- manifest_path: Optional[str] = Field(None)
177
176
  work_queue_name: Optional[str] = Field(None)
178
177
  work_pool_name: Optional[str] = Field(
179
178
  default=None,
@@ -246,7 +245,6 @@ class DeploymentUpdate(ActionBaseModel):
246
245
  description="Overrides to apply to flow run infrastructure at runtime.",
247
246
  )
248
247
  entrypoint: Optional[str] = Field(None)
249
- manifest_path: Optional[str] = Field(None)
250
248
  storage_document_id: Optional[UUID] = Field(None)
251
249
  infrastructure_document_id: Optional[UUID] = Field(None)
252
250
  enforce_parameter_schema: Optional[bool] = Field(
@@ -28,6 +28,7 @@ from pydantic import (
28
28
  from pydantic_extra_types.pendulum_dt import DateTime
29
29
  from typing_extensions import Literal, Self
30
30
 
31
+ from prefect._internal.compatibility.migration import getattr_migration
31
32
  from prefect._internal.schemas.bases import ObjectBaseModel, PrefectBaseModel
32
33
  from prefect._internal.schemas.fields import CreatedBy, UpdatedBy
33
34
  from prefect._internal.schemas.validators import (
@@ -179,7 +180,10 @@ class State(ObjectBaseModel, Generic[R]):
179
180
  ...
180
181
 
181
182
  def result(
182
- self, raise_on_failure: bool = True, fetch: Optional[bool] = None
183
+ self,
184
+ raise_on_failure: bool = True,
185
+ fetch: Optional[bool] = None,
186
+ retry_result_failure: bool = True,
183
187
  ) -> Union[R, Exception]:
184
188
  """
185
189
  Retrieve the result attached to this state.
@@ -191,6 +195,8 @@ class State(ObjectBaseModel, Generic[R]):
191
195
  results into data. For synchronous users, this defaults to `True`.
192
196
  For asynchronous users, this defaults to `False` for backwards
193
197
  compatibility.
198
+ retry_result_failure: a boolean specifying whether to retry on failures to
199
+ load the result from result storage
194
200
 
195
201
  Raises:
196
202
  TypeError: If the state is failed but the result is not an exception.
@@ -253,7 +259,12 @@ class State(ObjectBaseModel, Generic[R]):
253
259
  """
254
260
  from prefect.states import get_state_result
255
261
 
256
- return get_state_result(self, raise_on_failure=raise_on_failure, fetch=fetch)
262
+ return get_state_result(
263
+ self,
264
+ raise_on_failure=raise_on_failure,
265
+ fetch=fetch,
266
+ retry_result_failure=retry_result_failure,
267
+ )
257
268
 
258
269
  def to_state_create(self):
259
270
  """
@@ -1040,12 +1051,6 @@ class Deployment(ObjectBaseModel):
1040
1051
  "The path to the entrypoint for the workflow, relative to the `path`."
1041
1052
  ),
1042
1053
  )
1043
- manifest_path: Optional[str] = Field(
1044
- default=None,
1045
- description=(
1046
- "The path to the flow's manifest file, relative to the chosen storage."
1047
- ),
1048
- )
1049
1054
  storage_document_id: Optional[UUID] = Field(
1050
1055
  default=None,
1051
1056
  description="The block document defining storage used for this flow.",
@@ -1600,3 +1605,6 @@ class CsrfToken(ObjectBaseModel):
1600
1605
  expiration: datetime.datetime = Field(
1601
1606
  default=..., description="The expiration time of the CSRF token"
1602
1607
  )
1608
+
1609
+
1610
+ __getattr__ = getattr_migration(__name__)
@@ -371,12 +371,6 @@ class DeploymentResponse(ObjectBaseModel):
371
371
  "The path to the entrypoint for the workflow, relative to the `path`."
372
372
  ),
373
373
  )
374
- manifest_path: Optional[str] = Field(
375
- default=None,
376
- description=(
377
- "The path to the flow's manifest file, relative to the chosen storage."
378
- ),
379
- )
380
374
  storage_document_id: Optional[UUID] = Field(
381
375
  default=None,
382
376
  description="The block document defining storage used for this flow.",
@@ -78,10 +78,10 @@ def client_injector(
78
78
 
79
79
 
80
80
  def inject_client(
81
- fn: Callable[P, Coroutine[Any, Any, Any]],
82
- ) -> Callable[P, Coroutine[Any, Any, Any]]:
81
+ fn: Callable[P, Coroutine[Any, Any, R]],
82
+ ) -> Callable[P, Coroutine[Any, Any, R]]:
83
83
  """
84
- Simple helper to provide a context managed client to a asynchronous function.
84
+ Simple helper to provide a context managed client to an asynchronous function.
85
85
 
86
86
  The decorated function _must_ take a `client` kwarg and if a client is passed when
87
87
  called it will be used instead of creating a new one, but it will not be context
@@ -89,7 +89,7 @@ def inject_client(
89
89
  """
90
90
 
91
91
  @wraps(fn)
92
- async def with_injected_client(*args: P.args, **kwargs: P.kwargs) -> Any:
92
+ async def with_injected_client(*args: P.args, **kwargs: P.kwargs) -> R:
93
93
  client = cast(Optional["PrefectClient"], kwargs.pop("client", None))
94
94
  client, inferred = get_or_create_client(client)
95
95
  if not inferred:
@@ -22,6 +22,7 @@ from typing import Any, Dict, List, Optional, Tuple, Union
22
22
 
23
23
  from prefect._internal.compatibility.deprecated import PrefectDeprecationWarning
24
24
  from prefect._internal.concurrency.api import Call, from_async
25
+ from prefect._internal.integrations import KNOWN_EXTRAS_FOR_PACKAGES
25
26
  from prefect.logging.loggers import get_logger
26
27
  from prefect.settings import PREFECT_DEBUG_MODE
27
28
  from prefect.utilities.importtools import import_object
@@ -84,6 +85,11 @@ def _get_function_for_step(
84
85
  raise
85
86
 
86
87
  try:
88
+ packages = [
89
+ KNOWN_EXTRAS_FOR_PACKAGES.get(package, package)
90
+ for package in packages
91
+ if package
92
+ ]
87
93
  subprocess.check_call([sys.executable, "-m", "pip", "install", *packages])
88
94
  except subprocess.CalledProcessError:
89
95
  get_logger("deployments.steps.core").warning(
prefect/engine.py CHANGED
@@ -31,16 +31,16 @@ if __name__ == "__main__":
31
31
  try:
32
32
  from prefect.flow_engine import (
33
33
  load_flow_and_flow_run,
34
- run_flow_async,
35
- run_flow_sync,
34
+ run_flow,
36
35
  )
37
36
 
38
37
  flow_run, flow = load_flow_and_flow_run(flow_run_id=flow_run_id)
39
38
  # run the flow
40
39
  if flow.isasync:
41
- run_coro_as_sync(run_flow_async(flow, flow_run=flow_run))
40
+ run_coro_as_sync(run_flow(flow, flow_run=flow_run))
42
41
  else:
43
- run_flow_sync(flow, flow_run=flow_run)
42
+ run_flow(flow, flow_run=flow_run)
43
+
44
44
  except Abort as exc:
45
45
  engine_logger.info(
46
46
  f"Engine execution of flow run '{flow_run_id}' aborted by orchestrator:"
prefect/flow_engine.py CHANGED
@@ -51,7 +51,11 @@ from prefect.states import (
51
51
  return_value_to_state,
52
52
  )
53
53
  from prefect.utilities.asyncutils import run_coro_as_sync
54
- from prefect.utilities.callables import call_with_parameters, parameters_to_args_kwargs
54
+ from prefect.utilities.callables import (
55
+ call_with_parameters,
56
+ get_call_parameters,
57
+ parameters_to_args_kwargs,
58
+ )
55
59
  from prefect.utilities.collections import visit_collection
56
60
  from prefect.utilities.engine import (
57
61
  _get_hook_name,
@@ -595,10 +599,11 @@ def run_flow_sync(
595
599
  wait_for: Optional[Iterable[PrefectFuture]] = None,
596
600
  return_type: Literal["state", "result"] = "result",
597
601
  ) -> Union[R, State, None]:
598
- parameters = flow_run.parameters if flow_run else parameters
599
-
600
602
  engine = FlowRunEngine[P, R](
601
- flow=flow, parameters=parameters, flow_run=flow_run, wait_for=wait_for
603
+ flow=flow,
604
+ parameters=parameters,
605
+ flow_run=flow_run,
606
+ wait_for=wait_for,
602
607
  )
603
608
 
604
609
  with engine.start():
@@ -616,8 +621,6 @@ async def run_flow_async(
616
621
  wait_for: Optional[Iterable[PrefectFuture]] = None,
617
622
  return_type: Literal["state", "result"] = "result",
618
623
  ) -> Union[R, State, None]:
619
- parameters = flow_run.parameters if flow_run else parameters
620
-
621
624
  engine = FlowRunEngine[P, R](
622
625
  flow=flow, parameters=parameters, flow_run=flow_run, wait_for=wait_for
623
626
  )
@@ -714,10 +717,13 @@ def run_flow(
714
717
  kwargs = dict(
715
718
  flow=flow,
716
719
  flow_run=flow_run,
717
- parameters=parameters,
720
+ parameters=_flow_parameters(
721
+ flow=flow, flow_run=flow_run, parameters=parameters
722
+ ),
718
723
  wait_for=wait_for,
719
724
  return_type=return_type,
720
725
  )
726
+
721
727
  if flow.isasync and flow.isgenerator:
722
728
  return run_generator_flow_async(**kwargs)
723
729
  elif flow.isgenerator:
@@ -726,3 +732,20 @@ def run_flow(
726
732
  return run_flow_async(**kwargs)
727
733
  else:
728
734
  return run_flow_sync(**kwargs)
735
+
736
+
737
+ def _flow_parameters(
738
+ flow: Flow[P, R], flow_run: Optional[FlowRun], parameters: Optional[Dict[str, Any]]
739
+ ) -> Dict[str, Any]:
740
+ if parameters:
741
+ # This path is taken when a flow is being called directly with
742
+ # parameters, in that case just return the parameters as-is.
743
+ return parameters
744
+
745
+ # Otherwise the flow is being executed indirectly and we may need to grab
746
+ # the parameters from the flow run. We also need to resolve any default
747
+ # parameters that are defined on the flow function itself.
748
+
749
+ parameters = flow_run.parameters if flow_run else {}
750
+ call_args, call_kwargs = parameters_to_args_kwargs(flow.fn, parameters)
751
+ return get_call_parameters(flow.fn, call_args, call_kwargs)
prefect/flow_runs.py CHANGED
@@ -340,7 +340,7 @@ async def suspend_flow_run(
340
340
  already started will run until completion. When resumed, the flow run will
341
341
  be rescheduled to finish execution. In order suspend a flow run in this
342
342
  way, the flow needs to have an associated deployment and results need to be
343
- configured with the `persist_results` option.
343
+ configured with the `persist_result` option.
344
344
 
345
345
  Args:
346
346
  flow_run_id: a flow run id. If supplied, this function will attempt to
prefect/flows.py CHANGED
@@ -8,7 +8,6 @@ import ast
8
8
  import datetime
9
9
  import importlib.util
10
10
  import inspect
11
- import json
12
11
  import os
13
12
  import re
14
13
  import sys
@@ -54,8 +53,6 @@ from prefect.client.schemas.objects import Flow as FlowSchema
54
53
  from prefect.client.schemas.objects import FlowRun
55
54
  from prefect.client.schemas.schedules import SCHEDULE_TYPES
56
55
  from prefect.client.utilities import client_injector
57
- from prefect.deployments.runner import deploy
58
- from prefect.deployments.steps.core import run_steps
59
56
  from prefect.docker.docker_image import DockerImage
60
57
  from prefect.events import DeploymentTriggerTypes, TriggerTypes
61
58
  from prefect.exceptions import (
@@ -70,11 +67,6 @@ from prefect.futures import PrefectFuture
70
67
  from prefect.logging import get_logger
71
68
  from prefect.logging.loggers import flow_run_logger
72
69
  from prefect.results import ResultSerializer, ResultStorage
73
- from prefect.runner.storage import (
74
- BlockStorageAdapter,
75
- RunnerStorage,
76
- create_storage_from_url,
77
- )
78
70
  from prefect.settings import (
79
71
  PREFECT_DEFAULT_WORK_POOL_NAME,
80
72
  PREFECT_FLOW_DEFAULT_RETRIES,
@@ -120,6 +112,7 @@ if TYPE_CHECKING:
120
112
  from prefect.client.types.flexible_schedule_list import FlexibleScheduleList
121
113
  from prefect.deployments.runner import RunnerDeployment
122
114
  from prefect.flows import FlowRun
115
+ from prefect.runner.storage import RunnerStorage
123
116
 
124
117
 
125
118
  class Flow(Generic[P, R]):
@@ -353,7 +346,7 @@ class Flow(Generic[P, R]):
353
346
  self.on_running_hooks = on_running or []
354
347
 
355
348
  # Used for flows loaded from remote storage
356
- self._storage: Optional[RunnerStorage] = None
349
+ self._storage: Optional["RunnerStorage"] = None
357
350
  self._entrypoint: Optional[str] = None
358
351
 
359
352
  module = fn.__module__
@@ -919,7 +912,7 @@ class Flow(Generic[P, R]):
919
912
  @sync_compatible
920
913
  async def from_source(
921
914
  cls: Type[F],
922
- source: Union[str, RunnerStorage, ReadableDeploymentStorage],
915
+ source: Union[str, "RunnerStorage", ReadableDeploymentStorage],
923
916
  entrypoint: str,
924
917
  ) -> F:
925
918
  """
@@ -968,8 +961,16 @@ class Flow(Generic[P, R]):
968
961
  my_flow()
969
962
  ```
970
963
  """
964
+
965
+ from prefect.runner.storage import (
966
+ BlockStorageAdapter,
967
+ LocalStorage,
968
+ RunnerStorage,
969
+ create_storage_from_source,
970
+ )
971
+
971
972
  if isinstance(source, str):
972
- storage = create_storage_from_url(source)
973
+ storage = create_storage_from_source(source)
973
974
  elif isinstance(source, RunnerStorage):
974
975
  storage = source
975
976
  elif hasattr(source, "get_directory"):
@@ -980,6 +981,9 @@ class Flow(Generic[P, R]):
980
981
  " URL to remote storage or a storage object."
981
982
  )
982
983
  with tempfile.TemporaryDirectory() as tmpdir:
984
+ if not isinstance(storage, LocalStorage):
985
+ storage.set_base_path(Path(tmpdir))
986
+ await storage.pull_code()
983
987
  storage.set_base_path(Path(tmpdir))
984
988
  await storage.pull_code()
985
989
 
@@ -1142,7 +1146,9 @@ class Flow(Generic[P, R]):
1142
1146
  entrypoint_type=entrypoint_type,
1143
1147
  )
1144
1148
 
1145
- deployment_ids = await deploy(
1149
+ from prefect.deployments import runner
1150
+
1151
+ deployment_ids = await runner.deploy(
1146
1152
  deployment,
1147
1153
  work_pool_name=work_pool_name,
1148
1154
  image=image,
@@ -1811,7 +1817,7 @@ async def load_flow_from_flow_run(
1811
1817
  )
1812
1818
  storage_block = Block._from_block_document(storage_document)
1813
1819
  else:
1814
- basepath = deployment.path or Path(deployment.manifest_path).parent
1820
+ basepath = deployment.path
1815
1821
  if runner_storage_base_path:
1816
1822
  basepath = str(basepath).replace(
1817
1823
  "$STORAGE_BASE_PATH", runner_storage_base_path
@@ -1830,19 +1836,15 @@ async def load_flow_from_flow_run(
1830
1836
  run_logger.debug(
1831
1837
  f"Running {len(deployment.pull_steps)} deployment pull step(s)"
1832
1838
  )
1839
+
1840
+ from prefect.deployments.steps.core import run_steps
1841
+
1833
1842
  output = await run_steps(deployment.pull_steps)
1834
1843
  if output.get("directory"):
1835
1844
  run_logger.debug(f"Changing working directory to {output['directory']!r}")
1836
1845
  os.chdir(output["directory"])
1837
1846
 
1838
1847
  import_path = relative_path_to_current_platform(deployment.entrypoint)
1839
- # for backwards compat
1840
- if deployment.manifest_path:
1841
- with open(deployment.manifest_path, "r") as f:
1842
- import_path = json.load(f)["import_path"]
1843
- import_path = (
1844
- Path(deployment.manifest_path).parent / import_path
1845
- ).absolute()
1846
1848
  run_logger.debug(f"Importing flow code from '{import_path}'")
1847
1849
 
1848
1850
  flow = await run_sync_in_worker_thread(load_flow_from_entrypoint, str(import_path))