prefect-client 3.1.6__py3-none-any.whl → 3.1.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (55) hide show
  1. prefect/_experimental/__init__.py +0 -0
  2. prefect/_experimental/lineage.py +181 -0
  3. prefect/_internal/compatibility/async_dispatch.py +38 -9
  4. prefect/_internal/pydantic/v2_validated_func.py +15 -10
  5. prefect/_internal/retries.py +15 -6
  6. prefect/_internal/schemas/bases.py +2 -1
  7. prefect/_internal/schemas/validators.py +5 -4
  8. prefect/_version.py +3 -3
  9. prefect/blocks/core.py +144 -17
  10. prefect/blocks/system.py +2 -1
  11. prefect/client/orchestration.py +106 -0
  12. prefect/client/schemas/actions.py +5 -5
  13. prefect/client/schemas/filters.py +1 -1
  14. prefect/client/schemas/objects.py +5 -5
  15. prefect/client/schemas/responses.py +1 -2
  16. prefect/client/schemas/schedules.py +1 -1
  17. prefect/client/subscriptions.py +2 -1
  18. prefect/client/utilities.py +15 -1
  19. prefect/context.py +1 -1
  20. prefect/deployments/flow_runs.py +3 -3
  21. prefect/deployments/runner.py +14 -14
  22. prefect/deployments/steps/core.py +3 -1
  23. prefect/deployments/steps/pull.py +60 -12
  24. prefect/events/clients.py +55 -4
  25. prefect/events/filters.py +1 -1
  26. prefect/events/related.py +2 -1
  27. prefect/events/schemas/events.py +1 -1
  28. prefect/events/utilities.py +2 -0
  29. prefect/events/worker.py +8 -0
  30. prefect/flow_engine.py +41 -81
  31. prefect/flow_runs.py +4 -2
  32. prefect/flows.py +4 -6
  33. prefect/results.py +43 -22
  34. prefect/runner/runner.py +129 -18
  35. prefect/runner/storage.py +3 -3
  36. prefect/serializers.py +28 -24
  37. prefect/settings/__init__.py +1 -0
  38. prefect/settings/base.py +3 -2
  39. prefect/settings/models/api.py +4 -0
  40. prefect/settings/models/experiments.py +5 -0
  41. prefect/settings/models/runner.py +8 -0
  42. prefect/settings/models/server/api.py +7 -1
  43. prefect/task_engine.py +34 -26
  44. prefect/task_worker.py +43 -25
  45. prefect/tasks.py +118 -125
  46. prefect/telemetry/instrumentation.py +1 -1
  47. prefect/telemetry/processors.py +10 -7
  48. prefect/telemetry/run_telemetry.py +157 -33
  49. prefect/types/__init__.py +4 -1
  50. prefect/variables.py +127 -19
  51. {prefect_client-3.1.6.dist-info → prefect_client-3.1.8.dist-info}/METADATA +2 -1
  52. {prefect_client-3.1.6.dist-info → prefect_client-3.1.8.dist-info}/RECORD +55 -53
  53. {prefect_client-3.1.6.dist-info → prefect_client-3.1.8.dist-info}/LICENSE +0 -0
  54. {prefect_client-3.1.6.dist-info → prefect_client-3.1.8.dist-info}/WHEEL +0 -0
  55. {prefect_client-3.1.6.dist-info → prefect_client-3.1.8.dist-info}/top_level.txt +0 -0
@@ -33,7 +33,7 @@ import importlib
33
33
  import tempfile
34
34
  from datetime import datetime, timedelta
35
35
  from pathlib import Path
36
- from typing import TYPE_CHECKING, Any, Dict, Iterable, List, Optional, Union
36
+ from typing import TYPE_CHECKING, Any, Iterable, List, Optional, Union
37
37
  from uuid import UUID
38
38
 
39
39
  from pydantic import (
@@ -160,7 +160,7 @@ class RunnerDeployment(BaseModel):
160
160
  paused: Optional[bool] = Field(
161
161
  default=None, description="Whether or not the deployment is paused."
162
162
  )
163
- parameters: Dict[str, Any] = Field(default_factory=dict)
163
+ parameters: dict[str, Any] = Field(default_factory=dict)
164
164
  entrypoint: Optional[str] = Field(
165
165
  default=None,
166
166
  description=(
@@ -198,7 +198,7 @@ class RunnerDeployment(BaseModel):
198
198
  " the deployment is registered with a built runner."
199
199
  ),
200
200
  )
201
- job_variables: Dict[str, Any] = Field(
201
+ job_variables: dict[str, Any] = Field(
202
202
  default_factory=dict,
203
203
  description=(
204
204
  "Job variables used to override the default values of a work pool"
@@ -280,7 +280,7 @@ class RunnerDeployment(BaseModel):
280
280
  async with get_client() as client:
281
281
  flow_id = await client.create_flow_from_name(self.flow_name)
282
282
 
283
- create_payload = dict(
283
+ create_payload: dict[str, Any] = dict(
284
284
  flow_id=flow_id,
285
285
  name=self.name,
286
286
  work_queue_name=self.work_queue_name,
@@ -428,7 +428,7 @@ class RunnerDeployment(BaseModel):
428
428
  else:
429
429
  return [create_deployment_schedule_create(schedule)]
430
430
 
431
- def _set_defaults_from_flow(self, flow: "Flow"):
431
+ def _set_defaults_from_flow(self, flow: "Flow[..., Any]"):
432
432
  self._parameter_openapi_schema = parameter_schema(flow)
433
433
 
434
434
  if not self.version:
@@ -439,7 +439,7 @@ class RunnerDeployment(BaseModel):
439
439
  @classmethod
440
440
  def from_flow(
441
441
  cls,
442
- flow: "Flow",
442
+ flow: "Flow[..., Any]",
443
443
  name: str,
444
444
  interval: Optional[
445
445
  Union[Iterable[Union[int, float, timedelta]], int, float, timedelta]
@@ -449,7 +449,7 @@ class RunnerDeployment(BaseModel):
449
449
  paused: Optional[bool] = None,
450
450
  schedules: Optional["FlexibleScheduleList"] = None,
451
451
  concurrency_limit: Optional[Union[int, ConcurrencyLimitConfig, None]] = None,
452
- parameters: Optional[dict] = None,
452
+ parameters: Optional[dict[str, Any]] = None,
453
453
  triggers: Optional[List[Union[DeploymentTriggerTypes, TriggerTypes]]] = None,
454
454
  description: Optional[str] = None,
455
455
  tags: Optional[List[str]] = None,
@@ -457,7 +457,7 @@ class RunnerDeployment(BaseModel):
457
457
  enforce_parameter_schema: bool = True,
458
458
  work_pool_name: Optional[str] = None,
459
459
  work_queue_name: Optional[str] = None,
460
- job_variables: Optional[Dict[str, Any]] = None,
460
+ job_variables: Optional[dict[str, Any]] = None,
461
461
  entrypoint_type: EntrypointType = EntrypointType.FILE_PATH,
462
462
  ) -> "RunnerDeployment":
463
463
  """
@@ -588,7 +588,7 @@ class RunnerDeployment(BaseModel):
588
588
  paused: Optional[bool] = None,
589
589
  schedules: Optional["FlexibleScheduleList"] = None,
590
590
  concurrency_limit: Optional[Union[int, ConcurrencyLimitConfig, None]] = None,
591
- parameters: Optional[dict] = None,
591
+ parameters: Optional[dict[str, Any]] = None,
592
592
  triggers: Optional[List[Union[DeploymentTriggerTypes, TriggerTypes]]] = None,
593
593
  description: Optional[str] = None,
594
594
  tags: Optional[List[str]] = None,
@@ -596,7 +596,7 @@ class RunnerDeployment(BaseModel):
596
596
  enforce_parameter_schema: bool = True,
597
597
  work_pool_name: Optional[str] = None,
598
598
  work_queue_name: Optional[str] = None,
599
- job_variables: Optional[Dict[str, Any]] = None,
599
+ job_variables: Optional[dict[str, Any]] = None,
600
600
  ) -> "RunnerDeployment":
601
601
  """
602
602
  Configure a deployment for a given flow located at a given entrypoint.
@@ -689,7 +689,7 @@ class RunnerDeployment(BaseModel):
689
689
  paused: Optional[bool] = None,
690
690
  schedules: Optional["FlexibleScheduleList"] = None,
691
691
  concurrency_limit: Optional[Union[int, ConcurrencyLimitConfig, None]] = None,
692
- parameters: Optional[dict] = None,
692
+ parameters: Optional[dict[str, Any]] = None,
693
693
  triggers: Optional[List[Union[DeploymentTriggerTypes, TriggerTypes]]] = None,
694
694
  description: Optional[str] = None,
695
695
  tags: Optional[List[str]] = None,
@@ -697,7 +697,7 @@ class RunnerDeployment(BaseModel):
697
697
  enforce_parameter_schema: bool = True,
698
698
  work_pool_name: Optional[str] = None,
699
699
  work_queue_name: Optional[str] = None,
700
- job_variables: Optional[Dict[str, Any]] = None,
700
+ job_variables: Optional[dict[str, Any]] = None,
701
701
  ):
702
702
  """
703
703
  Create a RunnerDeployment from a flow located at a given entrypoint and stored in a
@@ -945,8 +945,8 @@ async def deploy(
945
945
 
946
946
  console.print(f"Successfully pushed image {image.reference!r}", style="green")
947
947
 
948
- deployment_exceptions = []
949
- deployment_ids = []
948
+ deployment_exceptions: list[dict[str, Any]] = []
949
+ deployment_ids: list[UUID] = []
950
950
  image_ref = image.reference if image else None
951
951
  for deployment in track(
952
952
  deployments,
@@ -99,7 +99,9 @@ def _get_function_for_step(
99
99
  return step_func
100
100
 
101
101
 
102
- async def run_step(step: Dict, upstream_outputs: Optional[Dict] = None) -> Dict:
102
+ async def run_step(
103
+ step: dict[str, Any], upstream_outputs: Optional[dict[str, Any]] = None
104
+ ) -> dict[str, Any]:
103
105
  """
104
106
  Runs a step, returns the step's output.
105
107
 
@@ -6,10 +6,11 @@ import os
6
6
  from pathlib import Path
7
7
  from typing import TYPE_CHECKING, Any, Optional
8
8
 
9
+ from prefect._internal.compatibility.async_dispatch import async_dispatch
9
10
  from prefect._internal.retries import retry_async_fn
10
11
  from prefect.logging.loggers import get_logger
11
12
  from prefect.runner.storage import BlockStorageAdapter, GitRepository, RemoteStorage
12
- from prefect.utilities.asyncutils import sync_compatible
13
+ from prefect.utilities.asyncutils import run_coro_as_sync
13
14
 
14
15
  deployment_logger = get_logger("deployment")
15
16
 
@@ -17,7 +18,7 @@ if TYPE_CHECKING:
17
18
  from prefect.blocks.core import Block
18
19
 
19
20
 
20
- def set_working_directory(directory: str) -> dict:
21
+ def set_working_directory(directory: str) -> dict[str, str]:
21
22
  """
22
23
  Sets the working directory; works with both absolute and relative paths.
23
24
 
@@ -37,15 +38,64 @@ def set_working_directory(directory: str) -> dict:
37
38
  base_delay=1,
38
39
  max_delay=10,
39
40
  retry_on_exceptions=(RuntimeError,),
41
+ operation_name="git_clone",
40
42
  )
41
- @sync_compatible
42
- async def git_clone(
43
+ async def _pull_git_repository_with_retries(repo: GitRepository):
44
+ await repo.pull_code()
45
+
46
+
47
+ async def agit_clone(
48
+ repository: str,
49
+ branch: Optional[str] = None,
50
+ include_submodules: bool = False,
51
+ access_token: Optional[str] = None,
52
+ credentials: Optional["Block"] = None,
53
+ ) -> dict[str, str]:
54
+ """
55
+ Asynchronously clones a git repository into the current working directory.
56
+
57
+ Args:
58
+ repository: the URL of the repository to clone
59
+ branch: the branch to clone; if not provided, the default branch will be used
60
+ include_submodules (bool): whether to include git submodules when cloning the repository
61
+ access_token: an access token to use for cloning the repository; if not provided
62
+ the repository will be cloned using the default git credentials
63
+ credentials: a GitHubCredentials, GitLabCredentials, or BitBucketCredentials block can be used to specify the
64
+ credentials to use for cloning the repository.
65
+
66
+ Returns:
67
+ dict: a dictionary containing a `directory` key of the new directory that was created
68
+
69
+ Raises:
70
+ subprocess.CalledProcessError: if the git clone command fails for any reason
71
+ """
72
+ if access_token and credentials:
73
+ raise ValueError(
74
+ "Please provide either an access token or credentials but not both."
75
+ )
76
+
77
+ _credentials = {"access_token": access_token} if access_token else credentials
78
+
79
+ storage = GitRepository(
80
+ url=repository,
81
+ credentials=_credentials,
82
+ branch=branch,
83
+ include_submodules=include_submodules,
84
+ )
85
+
86
+ await _pull_git_repository_with_retries(storage)
87
+
88
+ return dict(directory=str(storage.destination.relative_to(Path.cwd())))
89
+
90
+
91
+ @async_dispatch(agit_clone)
92
+ def git_clone(
43
93
  repository: str,
44
94
  branch: Optional[str] = None,
45
95
  include_submodules: bool = False,
46
96
  access_token: Optional[str] = None,
47
97
  credentials: Optional["Block"] = None,
48
- ) -> dict:
98
+ ) -> dict[str, str]:
49
99
  """
50
100
  Clones a git repository into the current working directory.
51
101
 
@@ -120,20 +170,18 @@ async def git_clone(
120
170
  "Please provide either an access token or credentials but not both."
121
171
  )
122
172
 
123
- credentials = {"access_token": access_token} if access_token else credentials
173
+ _credentials = {"access_token": access_token} if access_token else credentials
124
174
 
125
175
  storage = GitRepository(
126
176
  url=repository,
127
- credentials=credentials,
177
+ credentials=_credentials,
128
178
  branch=branch,
129
179
  include_submodules=include_submodules,
130
180
  )
131
181
 
132
- await storage.pull_code()
182
+ run_coro_as_sync(_pull_git_repository_with_retries(storage))
133
183
 
134
- directory = str(storage.destination.relative_to(Path.cwd()))
135
- deployment_logger.info(f"Cloned repository {repository!r} into {directory!r}")
136
- return {"directory": directory}
184
+ return dict(directory=str(storage.destination.relative_to(Path.cwd())))
137
185
 
138
186
 
139
187
  async def pull_from_remote_storage(url: str, **settings: Any):
@@ -190,7 +238,7 @@ async def pull_with_block(block_document_name: str, block_type_slug: str):
190
238
 
191
239
  full_slug = f"{block_type_slug}/{block_document_name}"
192
240
  try:
193
- block = await Block.load(full_slug)
241
+ block = await Block.aload(full_slug)
194
242
  except Exception:
195
243
  deployment_logger.exception("Unable to load block '%s'", full_slug)
196
244
  raise
prefect/events/clients.py CHANGED
@@ -1,11 +1,13 @@
1
1
  import abc
2
2
  import asyncio
3
+ import os
3
4
  from types import TracebackType
4
5
  from typing import (
5
6
  TYPE_CHECKING,
6
7
  Any,
7
8
  ClassVar,
8
9
  Dict,
10
+ Generator,
9
11
  List,
10
12
  MutableMapping,
11
13
  Optional,
@@ -13,20 +15,22 @@ from typing import (
13
15
  Type,
14
16
  cast,
15
17
  )
18
+ from urllib.parse import urlparse
16
19
  from uuid import UUID
17
20
 
18
21
  import orjson
19
22
  import pendulum
20
23
  from cachetools import TTLCache
21
24
  from prometheus_client import Counter
25
+ from python_socks.async_.asyncio import Proxy
22
26
  from typing_extensions import Self
23
27
  from websockets import Subprotocol
24
- from websockets.client import WebSocketClientProtocol, connect
25
28
  from websockets.exceptions import (
26
29
  ConnectionClosed,
27
30
  ConnectionClosedError,
28
31
  ConnectionClosedOK,
29
32
  )
33
+ from websockets.legacy.client import Connect, WebSocketClientProtocol
30
34
 
31
35
  from prefect.events import Event
32
36
  from prefect.logging import get_logger
@@ -80,6 +84,53 @@ def events_out_socket_from_api_url(url: str):
80
84
  return http_to_ws(url) + "/events/out"
81
85
 
82
86
 
87
+ class WebsocketProxyConnect(Connect):
88
+ def __init__(self: Self, uri: str, **kwargs: Any):
89
+ # super() is intentionally deferred to the _proxy_connect method
90
+ # to allow for the socket to be established first
91
+
92
+ self.uri = uri
93
+ self._kwargs = kwargs
94
+
95
+ u = urlparse(uri)
96
+ host = u.hostname
97
+
98
+ if u.scheme == "ws":
99
+ port = u.port or 80
100
+ proxy_url = os.environ.get("HTTP_PROXY")
101
+ elif u.scheme == "wss":
102
+ port = u.port or 443
103
+ proxy_url = os.environ.get("HTTPS_PROXY")
104
+ kwargs["server_hostname"] = host
105
+ else:
106
+ raise ValueError(
107
+ "Unsupported scheme %s. Expected 'ws' or 'wss'. " % u.scheme
108
+ )
109
+
110
+ self._proxy = Proxy.from_url(proxy_url) if proxy_url else None
111
+ self._host = host
112
+ self._port = port
113
+
114
+ async def _proxy_connect(self: Self) -> WebSocketClientProtocol:
115
+ if self._proxy:
116
+ sock = await self._proxy.connect(
117
+ dest_host=self._host,
118
+ dest_port=self._port,
119
+ )
120
+ self._kwargs["sock"] = sock
121
+
122
+ super().__init__(self.uri, **self._kwargs)
123
+ proto = await self.__await_impl__()
124
+ return proto
125
+
126
+ def __await__(self: Self) -> Generator[Any, None, WebSocketClientProtocol]:
127
+ return self._proxy_connect().__await__()
128
+
129
+
130
+ def websocket_connect(uri: str, **kwargs: Any) -> WebsocketProxyConnect:
131
+ return WebsocketProxyConnect(uri, **kwargs)
132
+
133
+
83
134
  def get_events_client(
84
135
  reconnection_attempts: int = 10,
85
136
  checkpoint_every: int = 700,
@@ -265,7 +316,7 @@ class PrefectEventsClient(EventsClient):
265
316
  )
266
317
 
267
318
  self._events_socket_url = events_in_socket_from_api_url(api_url)
268
- self._connect = connect(self._events_socket_url)
319
+ self._connect = websocket_connect(self._events_socket_url)
269
320
  self._websocket = None
270
321
  self._reconnection_attempts = reconnection_attempts
271
322
  self._unconfirmed_events = []
@@ -435,7 +486,7 @@ class PrefectCloudEventsClient(PrefectEventsClient):
435
486
  reconnection_attempts=reconnection_attempts,
436
487
  checkpoint_every=checkpoint_every,
437
488
  )
438
- self._connect = connect(
489
+ self._connect = websocket_connect(
439
490
  self._events_socket_url,
440
491
  extra_headers={"Authorization": f"bearer {api_key}"},
441
492
  )
@@ -494,7 +545,7 @@ class PrefectEventSubscriber:
494
545
 
495
546
  logger.debug("Connecting to %s", socket_url)
496
547
 
497
- self._connect = connect(
548
+ self._connect = websocket_connect(
498
549
  socket_url,
499
550
  subprotocols=[Subprotocol("prefect")],
500
551
  )
prefect/events/filters.py CHANGED
@@ -3,9 +3,9 @@ from uuid import UUID
3
3
 
4
4
  import pendulum
5
5
  from pydantic import Field, PrivateAttr
6
- from pydantic_extra_types.pendulum_dt import DateTime
7
6
 
8
7
  from prefect._internal.schemas.bases import PrefectBaseModel
8
+ from prefect.types import DateTime
9
9
  from prefect.utilities.collections import AutoEnum
10
10
 
11
11
  from .schemas.events import Event, Resource, ResourceSpecification
prefect/events/related.py CHANGED
@@ -15,7 +15,8 @@ from typing import (
15
15
  from uuid import UUID
16
16
 
17
17
  import pendulum
18
- from pendulum.datetime import DateTime
18
+
19
+ from prefect.types import DateTime
19
20
 
20
21
  from .schemas.events import RelatedResource
21
22
 
@@ -20,7 +20,6 @@ from pydantic import (
20
20
  RootModel,
21
21
  model_validator,
22
22
  )
23
- from pydantic_extra_types.pendulum_dt import DateTime
24
23
  from typing_extensions import Annotated, Self
25
24
 
26
25
  from prefect._internal.schemas.bases import PrefectBaseModel
@@ -28,6 +27,7 @@ from prefect.logging import get_logger
28
27
  from prefect.settings import (
29
28
  PREFECT_EVENTS_MAXIMUM_LABELS_PER_RESOURCE,
30
29
  )
30
+ from prefect.types import DateTime
31
31
 
32
32
  from .labelling import Labelled
33
33
 
@@ -24,6 +24,7 @@ def emit_event(
24
24
  payload: Optional[Dict[str, Any]] = None,
25
25
  id: Optional[UUID] = None,
26
26
  follows: Optional[Event] = None,
27
+ **kwargs: Optional[Dict[str, Any]],
27
28
  ) -> Optional[Event]:
28
29
  """
29
30
  Send an event to Prefect Cloud.
@@ -62,6 +63,7 @@ def emit_event(
62
63
  event_kwargs: Dict[str, Any] = {
63
64
  "event": event,
64
65
  "resource": resource,
66
+ **kwargs,
65
67
  }
66
68
 
67
69
  if occurred is None:
prefect/events/worker.py CHANGED
@@ -83,6 +83,14 @@ class EventsWorker(QueueService[Event]):
83
83
  await self._client.emit(event)
84
84
 
85
85
  async def attach_related_resources_from_context(self, event: Event):
86
+ if "prefect.resource.lineage-group" in event.resource:
87
+ # We attach related resources to lineage events in `emit_lineage_event`,
88
+ # instead of the worker, because not all run-related resources are
89
+ # upstream from every lineage event (they might be downstream).
90
+ # The "related" field in the event schema tracks upstream resources
91
+ # only.
92
+ return
93
+
86
94
  exclude = {resource.id for resource in event.involved_resources}
87
95
  event.related += await related_resources_from_run_context(
88
96
  client=self._orchestration_client, exclude=exclude