prefect-client 3.0.0rc1__py3-none-any.whl → 3.0.0rc3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (72) hide show
  1. prefect/_internal/compatibility/migration.py +124 -0
  2. prefect/_internal/concurrency/__init__.py +2 -2
  3. prefect/_internal/concurrency/primitives.py +1 -0
  4. prefect/_internal/pydantic/annotations/pendulum.py +2 -2
  5. prefect/_internal/pytz.py +1 -1
  6. prefect/blocks/core.py +1 -1
  7. prefect/blocks/redis.py +168 -0
  8. prefect/client/orchestration.py +113 -23
  9. prefect/client/schemas/actions.py +1 -1
  10. prefect/client/schemas/filters.py +6 -0
  11. prefect/client/schemas/objects.py +22 -11
  12. prefect/client/subscriptions.py +3 -2
  13. prefect/concurrency/asyncio.py +1 -1
  14. prefect/concurrency/services.py +1 -1
  15. prefect/context.py +1 -27
  16. prefect/deployments/__init__.py +3 -0
  17. prefect/deployments/base.py +11 -3
  18. prefect/deployments/deployments.py +3 -0
  19. prefect/deployments/steps/pull.py +1 -0
  20. prefect/deployments/steps/utility.py +2 -1
  21. prefect/engine.py +3 -0
  22. prefect/events/cli/automations.py +1 -1
  23. prefect/events/clients.py +7 -1
  24. prefect/events/schemas/events.py +2 -0
  25. prefect/exceptions.py +9 -0
  26. prefect/filesystems.py +22 -11
  27. prefect/flow_engine.py +118 -156
  28. prefect/flow_runs.py +2 -2
  29. prefect/flows.py +91 -35
  30. prefect/futures.py +44 -43
  31. prefect/infrastructure/provisioners/container_instance.py +1 -0
  32. prefect/infrastructure/provisioners/ecs.py +2 -2
  33. prefect/input/__init__.py +4 -0
  34. prefect/input/run_input.py +4 -2
  35. prefect/logging/formatters.py +2 -2
  36. prefect/logging/handlers.py +2 -2
  37. prefect/logging/loggers.py +1 -1
  38. prefect/plugins.py +1 -0
  39. prefect/records/cache_policies.py +179 -0
  40. prefect/records/result_store.py +10 -3
  41. prefect/results.py +27 -55
  42. prefect/runner/runner.py +1 -1
  43. prefect/runner/server.py +1 -1
  44. prefect/runtime/__init__.py +1 -0
  45. prefect/runtime/deployment.py +1 -0
  46. prefect/runtime/flow_run.py +1 -0
  47. prefect/runtime/task_run.py +1 -0
  48. prefect/settings.py +21 -5
  49. prefect/states.py +17 -4
  50. prefect/task_engine.py +337 -209
  51. prefect/task_runners.py +15 -5
  52. prefect/task_runs.py +203 -0
  53. prefect/{task_server.py → task_worker.py} +66 -36
  54. prefect/tasks.py +180 -77
  55. prefect/transactions.py +92 -16
  56. prefect/types/__init__.py +1 -1
  57. prefect/utilities/asyncutils.py +3 -3
  58. prefect/utilities/callables.py +90 -7
  59. prefect/utilities/dockerutils.py +5 -3
  60. prefect/utilities/engine.py +11 -0
  61. prefect/utilities/filesystem.py +4 -5
  62. prefect/utilities/importtools.py +34 -5
  63. prefect/utilities/services.py +2 -2
  64. prefect/utilities/urls.py +195 -0
  65. prefect/utilities/visualization.py +1 -0
  66. prefect/variables.py +19 -10
  67. prefect/workers/base.py +46 -1
  68. {prefect_client-3.0.0rc1.dist-info → prefect_client-3.0.0rc3.dist-info}/METADATA +3 -2
  69. {prefect_client-3.0.0rc1.dist-info → prefect_client-3.0.0rc3.dist-info}/RECORD +72 -66
  70. {prefect_client-3.0.0rc1.dist-info → prefect_client-3.0.0rc3.dist-info}/LICENSE +0 -0
  71. {prefect_client-3.0.0rc1.dist-info → prefect_client-3.0.0rc3.dist-info}/WHEEL +0 -0
  72. {prefect_client-3.0.0rc1.dist-info → prefect_client-3.0.0rc3.dist-info}/top_level.txt +0 -0
@@ -94,6 +94,14 @@ class StateType(AutoEnum):
94
94
  CANCELLING = AutoEnum.auto()
95
95
 
96
96
 
97
+ TERMINAL_STATES = {
98
+ StateType.COMPLETED,
99
+ StateType.CANCELLED,
100
+ StateType.FAILED,
101
+ StateType.CRASHED,
102
+ }
103
+
104
+
97
105
  class WorkPoolStatus(AutoEnum):
98
106
  """Enumeration of work pool statuses."""
99
107
 
@@ -280,7 +288,7 @@ class State(ObjectBaseModel, Generic[R]):
280
288
  def default_scheduled_start_time(self) -> Self:
281
289
  if self.type == StateType.SCHEDULED:
282
290
  if not self.state_details.scheduled_time:
283
- self.state_details.scheduled_time = pendulum.now("utc")
291
+ self.state_details.scheduled_time = DateTime.now("utc")
284
292
  return self
285
293
 
286
294
  def is_scheduled(self) -> bool:
@@ -308,12 +316,7 @@ class State(ObjectBaseModel, Generic[R]):
308
316
  return self.type == StateType.CANCELLING
309
317
 
310
318
  def is_final(self) -> bool:
311
- return self.type in {
312
- StateType.CANCELLED,
313
- StateType.FAILED,
314
- StateType.COMPLETED,
315
- StateType.CRASHED,
316
- }
319
+ return self.type in TERMINAL_STATES
317
320
 
318
321
  def is_paused(self) -> bool:
319
322
  return self.type == StateType.PAUSED
@@ -419,8 +422,11 @@ class FlowRunPolicy(PrefectBaseModel):
419
422
  )
420
423
 
421
424
  @model_validator(mode="before")
422
- def populate_deprecated_fields(cls, values):
423
- return set_run_policy_deprecated_fields(values)
425
+ @classmethod
426
+ def populate_deprecated_fields(cls, values: Any):
427
+ if isinstance(values, dict):
428
+ return set_run_policy_deprecated_fields(values)
429
+ return values
424
430
 
425
431
 
426
432
  class FlowRun(ObjectBaseModel):
@@ -550,7 +556,8 @@ class FlowRun(ObjectBaseModel):
550
556
  examples=["State(type=StateType.COMPLETED)"],
551
557
  )
552
558
  job_variables: Optional[dict] = Field(
553
- default=None, description="Job variables for the flow run."
559
+ default=None,
560
+ description="Job variables for the flow run.",
554
561
  )
555
562
 
556
563
  # These are server-side optimizations and should not be present on client models
@@ -911,6 +918,7 @@ class BlockDocument(ObjectBaseModel):
911
918
  _validate_name_format = field_validator("name")(validate_block_document_name)
912
919
 
913
920
  @model_validator(mode="before")
921
+ @classmethod
914
922
  def validate_name_is_present_if_not_anonymous(cls, values):
915
923
  return validate_name_present_on_nonanonymous_blocks(values)
916
924
 
@@ -1142,8 +1150,11 @@ class BlockDocumentReference(ObjectBaseModel):
1142
1150
  )
1143
1151
 
1144
1152
  @model_validator(mode="before")
1153
+ @classmethod
1145
1154
  def validate_parent_and_ref_are_different(cls, values):
1146
- return validate_parent_and_ref_diff(values)
1155
+ if isinstance(values, dict):
1156
+ return validate_parent_and_ref_diff(values)
1157
+ return values
1147
1158
 
1148
1159
 
1149
1160
  class Configuration(ObjectBaseModel):
@@ -9,7 +9,7 @@ from typing_extensions import Self
9
9
 
10
10
  from prefect._internal.schemas.bases import IDBaseModel
11
11
  from prefect.logging import get_logger
12
- from prefect.settings import PREFECT_API_KEY, PREFECT_API_URL
12
+ from prefect.settings import PREFECT_API_KEY
13
13
 
14
14
  logger = get_logger(__name__)
15
15
 
@@ -23,10 +23,11 @@ class Subscription(Generic[S]):
23
23
  path: str,
24
24
  keys: List[str],
25
25
  client_id: Optional[str] = None,
26
+ base_url: Optional[str] = None,
26
27
  ):
27
28
  self.model = model
28
29
  self.client_id = client_id
29
- base_url = PREFECT_API_URL.value().replace("http", "ws", 1)
30
+ base_url = base_url.replace("http", "ws", 1)
30
31
  self.subscription_url = f"{base_url}{path}"
31
32
 
32
33
  self.keys = keys
@@ -11,7 +11,7 @@ except ImportError:
11
11
  # pendulum < 3
12
12
  from pendulum.period import Period as Interval # type: ignore
13
13
 
14
- from prefect import get_client
14
+ from prefect.client.orchestration import get_client
15
15
  from prefect.client.schemas.responses import MinimalConcurrencyLimitResponse
16
16
  from prefect.utilities.timeout import timeout_async
17
17
 
@@ -10,9 +10,9 @@ from typing import (
10
10
  import httpx
11
11
  from starlette import status
12
12
 
13
- from prefect import get_client
14
13
  from prefect._internal.concurrency import logger
15
14
  from prefect._internal.concurrency.services import QueueService
15
+ from prefect.client.orchestration import get_client
16
16
 
17
17
  if TYPE_CHECKING:
18
18
  from prefect.client.orchestration import PrefectClient
prefect/context.py CHANGED
@@ -29,13 +29,9 @@ from typing import (
29
29
  Union,
30
30
  )
31
31
 
32
- import anyio
33
- import anyio._backends._asyncio
34
- import anyio.abc
35
32
  import pendulum
36
33
  from pydantic import BaseModel, ConfigDict, Field, PrivateAttr
37
34
  from pydantic_extra_types.pendulum_dt import DateTime
38
- from sniffio import AsyncLibraryNotFoundError
39
35
  from typing_extensions import Self
40
36
 
41
37
  import prefect.logging
@@ -45,7 +41,6 @@ from prefect.client.orchestration import PrefectClient, SyncPrefectClient, get_c
45
41
  from prefect.client.schemas import FlowRun, TaskRun
46
42
  from prefect.events.worker import EventsWorker
47
43
  from prefect.exceptions import MissingContextError
48
- from prefect.futures import PrefectFuture
49
44
  from prefect.results import ResultFactory
50
45
  from prefect.settings import PREFECT_HOME, Profile, Settings
51
46
  from prefect.states import State
@@ -94,19 +89,12 @@ def hydrated_context(
94
89
  if settings_context := serialized_context.get("settings_context"):
95
90
  stack.enter_context(SettingsContext(**settings_context))
96
91
  # Set up parent flow run context
97
- # TODO: This task group isn't necessary in the new engine. Remove the background tasks
98
- # attribute from FlowRunContext.
99
92
  client = client or get_client(sync_client=True)
100
93
  if flow_run_context := serialized_context.get("flow_run_context"):
101
- try:
102
- task_group = anyio.create_task_group()
103
- except AsyncLibraryNotFoundError:
104
- task_group = anyio._backends._asyncio.TaskGroup()
105
94
  flow = flow_run_context["flow"]
106
95
  flow_run_context = FlowRunContext(
107
96
  **flow_run_context,
108
97
  client=client,
109
- background_tasks=task_group,
110
98
  result_factory=run_coro_as_sync(ResultFactory.from_flow(flow)),
111
99
  task_runner=flow.task_runner.duplicate(),
112
100
  detached=True,
@@ -367,13 +355,10 @@ class EngineContext(RunContext):
367
355
  task_run_states: A list of states for task runs created within this flow run
368
356
  task_run_results: A mapping of result ids to task run states for this flow run
369
357
  flow_run_states: A list of states for flow runs created within this flow run
370
- sync_portal: A blocking portal for sync task/flow runs in an async flow
371
- timeout_scope: The cancellation scope for flow level timeouts
372
358
  """
373
359
 
374
360
  flow: Optional["Flow"] = None
375
361
  flow_run: Optional[FlowRun] = None
376
- autonomous_task_run: Optional[TaskRun] = None
377
362
  task_runner: TaskRunner
378
363
  log_prints: bool = False
379
364
  parameters: Optional[Dict[str, Any]] = None
@@ -391,19 +376,8 @@ class EngineContext(RunContext):
391
376
  # Counter for flow pauses
392
377
  observed_flow_pauses: Dict[str, int] = Field(default_factory=dict)
393
378
 
394
- # Tracking for objects created by this flow run
395
- task_run_futures: List[PrefectFuture] = Field(default_factory=list)
396
- task_run_states: List[State] = Field(default_factory=list)
379
+ # Tracking for result from task runs in this flow run
397
380
  task_run_results: Dict[int, State] = Field(default_factory=dict)
398
- flow_run_states: List[State] = Field(default_factory=list)
399
-
400
- # The synchronous portal is only created for async flows for creating engine calls
401
- # from synchronous task and subflow calls
402
- sync_portal: Optional[anyio.abc.BlockingPortal] = None
403
- timeout_scope: Optional[anyio.abc.CancelScope] = None
404
-
405
- # Task group that can be used for background tasks during the flow run
406
- background_tasks: anyio.abc.TaskGroup
407
381
 
408
382
  # Events worker to emit events to Prefect Cloud
409
383
  events: Optional[EventsWorker] = None
@@ -1,3 +1,4 @@
1
+ from prefect._internal.compatibility.migration import getattr_migration
1
2
  import prefect.deployments.base
2
3
  import prefect.deployments.steps
3
4
  from prefect.deployments.base import (
@@ -15,3 +16,5 @@ from prefect.deployments.runner import (
15
16
  from prefect.deployments.flow_runs import (
16
17
  run_deployment,
17
18
  )
19
+
20
+ __getattr__ = getattr_migration(__name__)
@@ -29,7 +29,7 @@ from prefect.utilities.templating import apply_values
29
29
 
30
30
 
31
31
  def create_default_prefect_yaml(
32
- path: str, name: str = None, contents: Optional[Dict[str, Any]] = None
32
+ path: str, name: Optional[str] = None, contents: Optional[Dict[str, Any]] = None
33
33
  ) -> bool:
34
34
  """
35
35
  Creates default `prefect.yaml` file in the provided path if one does not already exist;
@@ -176,7 +176,9 @@ def _get_git_branch() -> Optional[str]:
176
176
 
177
177
 
178
178
  def initialize_project(
179
- name: str = None, recipe: str = None, inputs: Optional[Dict[str, Any]] = None
179
+ name: Optional[str] = None,
180
+ recipe: Optional[str] = None,
181
+ inputs: Optional[Dict[str, Any]] = None,
180
182
  ) -> List[str]:
181
183
  """
182
184
  Initializes a basic project structure with base files. If no name is provided, the name
@@ -398,7 +400,13 @@ async def _find_flow_functions_in_file(filename: str) -> List[Dict]:
398
400
  return decorated_functions
399
401
 
400
402
  for node in ast.walk(tree):
401
- if isinstance(node, ast.FunctionDef):
403
+ if isinstance(
404
+ node,
405
+ (
406
+ ast.FunctionDef,
407
+ ast.AsyncFunctionDef,
408
+ ),
409
+ ):
402
410
  for decorator in node.decorator_list:
403
411
  # handles @flow
404
412
  is_name_match = (
@@ -0,0 +1,3 @@
1
+ from .._internal.compatibility.migration import getattr_migration
2
+
3
+ __getattr__ = getattr_migration(__name__)
@@ -1,6 +1,7 @@
1
1
  """
2
2
  Core set of steps for specifying a Prefect project pull step.
3
3
  """
4
+
4
5
  import os
5
6
  from pathlib import Path
6
7
  from typing import TYPE_CHECKING, Any, Optional
@@ -4,7 +4,7 @@ Utility project steps that are useful for managing a project's deployment lifecy
4
4
  Steps within this module can be used within a `build`, `push`, or `pull` deployment action.
5
5
 
6
6
  Example:
7
- Use the `run_shell_script` setp to retrieve the short Git commit hash of the current
7
+ Use the `run_shell_script` setp to retrieve the short Git commit hash of the current
8
8
  repository and use it as a Docker image tag:
9
9
  ```yaml
10
10
  build:
@@ -19,6 +19,7 @@ Example:
19
19
  dockerfile: auto
20
20
  ```
21
21
  """
22
+
22
23
  import io
23
24
  import os
24
25
  import shlex
prefect/engine.py CHANGED
@@ -2,6 +2,7 @@ import os
2
2
  import sys
3
3
  from uuid import UUID
4
4
 
5
+ from prefect._internal.compatibility.migration import getattr_migration
5
6
  from prefect.exceptions import (
6
7
  Abort,
7
8
  Pause,
@@ -70,3 +71,5 @@ if __name__ == "__main__":
70
71
  )
71
72
  # Let the exit code be determined by the base exception type
72
73
  raise
74
+
75
+ __getattr__ = getattr_migration(__name__)
@@ -23,7 +23,7 @@ from prefect.exceptions import PrefectHTTPStatusError
23
23
 
24
24
  automations_app = PrefectTyper(
25
25
  name="automation",
26
- help="Commands for managing automations.",
26
+ help="Manage automations.",
27
27
  )
28
28
  app.add_typer(automations_app, aliases=["automations"])
29
29
 
prefect/events/clients.py CHANGED
@@ -63,6 +63,12 @@ def get_events_subscriber(
63
63
  reconnection_attempts: int = 10,
64
64
  ) -> "PrefectEventSubscriber":
65
65
  api_url = PREFECT_API_URL.value()
66
+ if not api_url:
67
+ raise ValueError(
68
+ "A Prefect server or Prefect Cloud is required to start an event "
69
+ "subscriber. Please check the PREFECT_API_URL setting in your profile."
70
+ )
71
+
66
72
  if isinstance(api_url, str) and api_url.startswith(PREFECT_CLOUD_API_URL.value()):
67
73
  return PrefectCloudEventSubscriber(
68
74
  filter=filter, reconnection_attempts=reconnection_attempts
@@ -168,7 +174,7 @@ class PrefectEphemeralEventsClient(EventsClient):
168
174
  )
169
175
  from prefect.server.api.server import create_app
170
176
 
171
- app = create_app()
177
+ app = create_app(ephemeral=True)
172
178
 
173
179
  self._http_client = PrefectHttpxAsyncClient(
174
180
  transport=httpx.ASGITransport(app=app, raise_app_exceptions=False),
@@ -83,6 +83,8 @@ class RelatedResource(Resource):
83
83
  class Event(PrefectBaseModel):
84
84
  """The client-side view of an event that has happened to a Resource"""
85
85
 
86
+ model_config = ConfigDict(extra="ignore")
87
+
86
88
  occurred: DateTime = Field(
87
89
  default_factory=lambda: pendulum.now("UTC"),
88
90
  description="When the event happened from the sender's perspective",
prefect/exceptions.py CHANGED
@@ -400,3 +400,12 @@ class FlowPauseTimeout(PrefectException):
400
400
 
401
401
  class FlowRunWaitTimeout(PrefectException):
402
402
  """Raised when a flow run takes longer than a given timeout"""
403
+
404
+
405
+ class PrefectImportError(ImportError):
406
+ """
407
+ An error raised when a Prefect object cannot be imported due to a move or removal.
408
+ """
409
+
410
+ def __init__(self, message: str) -> None:
411
+ super().__init__(message)
prefect/filesystems.py CHANGED
@@ -16,6 +16,8 @@ from prefect.utilities.asyncutils import run_sync_in_worker_thread, sync_compati
16
16
  from prefect.utilities.compat import copytree
17
17
  from prefect.utilities.filesystem import filter_files
18
18
 
19
+ from ._internal.compatibility.migration import getattr_migration
20
+
19
21
 
20
22
  class ReadableFileSystem(Block, abc.ABC):
21
23
  _block_schema_capabilities = ["read-path"]
@@ -42,7 +44,7 @@ class ReadableDeploymentStorage(Block, abc.ABC):
42
44
 
43
45
  @abc.abstractmethod
44
46
  async def get_directory(
45
- self, from_path: str = None, local_path: str = None
47
+ self, from_path: Optional[str] = None, local_path: Optional[str] = None
46
48
  ) -> None:
47
49
  pass
48
50
 
@@ -52,13 +54,16 @@ class WritableDeploymentStorage(Block, abc.ABC):
52
54
 
53
55
  @abc.abstractmethod
54
56
  async def get_directory(
55
- self, from_path: str = None, local_path: str = None
57
+ self, from_path: Optional[str] = None, local_path: Optional[str] = None
56
58
  ) -> None:
57
59
  pass
58
60
 
59
61
  @abc.abstractmethod
60
62
  async def put_directory(
61
- self, local_path: str = None, to_path: str = None, ignore_file: str = None
63
+ self,
64
+ local_path: Optional[str] = None,
65
+ to_path: Optional[str] = None,
66
+ ignore_file: Optional[str] = None,
62
67
  ) -> None:
63
68
  pass
64
69
 
@@ -103,18 +108,18 @@ class LocalFileSystem(WritableFileSystem, WritableDeploymentStorage):
103
108
  if path is None:
104
109
  return basepath
105
110
 
106
- path: Path = Path(path).expanduser()
111
+ resolved_path: Path = Path(path).expanduser()
107
112
 
108
- if not path.is_absolute():
109
- path = basepath / path
113
+ if not resolved_path.is_absolute():
114
+ resolved_path = basepath / resolved_path
110
115
  else:
111
- path = path.resolve()
112
- if basepath not in path.parents and (basepath != path):
116
+ resolved_path = resolved_path.resolve()
117
+ if basepath not in resolved_path.parents and (basepath != resolved_path):
113
118
  raise ValueError(
114
- f"Provided path {path} is outside of the base path {basepath}."
119
+ f"Provided path {resolved_path} is outside of the base path {basepath}."
115
120
  )
116
121
 
117
- return path
122
+ return resolved_path
118
123
 
119
124
  @sync_compatible
120
125
  async def get_directory(
@@ -168,7 +173,10 @@ class LocalFileSystem(WritableFileSystem, WritableDeploymentStorage):
168
173
 
169
174
  @sync_compatible
170
175
  async def put_directory(
171
- self, local_path: str = None, to_path: str = None, ignore_file: str = None
176
+ self,
177
+ local_path: Optional[str] = None,
178
+ to_path: Optional[str] = None,
179
+ ignore_file: Optional[str] = None,
172
180
  ) -> None:
173
181
  """
174
182
  Copies a directory from one place to another on the local filesystem.
@@ -506,3 +514,6 @@ class SMB(WritableFileSystem, WritableDeploymentStorage):
506
514
  @sync_compatible
507
515
  async def write_path(self, path: str, content: bytes) -> str:
508
516
  return await self.filesystem.write_path(path=path, content=content)
517
+
518
+
519
+ __getattr__ = getattr_migration(__name__)