prefect-client 2.16.8__py3-none-any.whl → 2.17.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- prefect/__init__.py +0 -18
- prefect/_internal/compatibility/deprecated.py +108 -5
- prefect/_internal/compatibility/experimental.py +9 -8
- prefect/_internal/concurrency/api.py +23 -42
- prefect/_internal/concurrency/waiters.py +25 -22
- prefect/_internal/pydantic/__init__.py +16 -3
- prefect/_internal/pydantic/_base_model.py +39 -4
- prefect/_internal/pydantic/_compat.py +69 -452
- prefect/_internal/pydantic/_flags.py +5 -0
- prefect/_internal/pydantic/_types.py +8 -0
- prefect/_internal/pydantic/utilities/__init__.py +0 -0
- prefect/_internal/pydantic/utilities/config_dict.py +72 -0
- prefect/_internal/pydantic/utilities/field_validator.py +135 -0
- prefect/_internal/pydantic/utilities/model_construct.py +56 -0
- prefect/_internal/pydantic/utilities/model_copy.py +55 -0
- prefect/_internal/pydantic/utilities/model_dump.py +136 -0
- prefect/_internal/pydantic/utilities/model_dump_json.py +112 -0
- prefect/_internal/pydantic/utilities/model_fields.py +50 -0
- prefect/_internal/pydantic/utilities/model_fields_set.py +29 -0
- prefect/_internal/pydantic/utilities/model_json_schema.py +82 -0
- prefect/_internal/pydantic/utilities/model_rebuild.py +80 -0
- prefect/_internal/pydantic/utilities/model_validate.py +75 -0
- prefect/_internal/pydantic/utilities/model_validate_json.py +68 -0
- prefect/_internal/pydantic/utilities/model_validator.py +79 -0
- prefect/_internal/pydantic/utilities/type_adapter.py +71 -0
- prefect/_internal/schemas/bases.py +1 -17
- prefect/_internal/schemas/validators.py +425 -4
- prefect/agent.py +1 -1
- prefect/blocks/kubernetes.py +7 -3
- prefect/blocks/notifications.py +18 -18
- prefect/blocks/webhook.py +1 -1
- prefect/client/base.py +7 -0
- prefect/client/cloud.py +1 -1
- prefect/client/orchestration.py +51 -11
- prefect/client/schemas/actions.py +367 -297
- prefect/client/schemas/filters.py +28 -28
- prefect/client/schemas/objects.py +78 -147
- prefect/client/schemas/responses.py +240 -60
- prefect/client/schemas/schedules.py +6 -8
- prefect/concurrency/events.py +2 -2
- prefect/context.py +4 -2
- prefect/deployments/base.py +6 -13
- prefect/deployments/deployments.py +34 -9
- prefect/deployments/runner.py +9 -27
- prefect/deprecated/packaging/base.py +5 -6
- prefect/deprecated/packaging/docker.py +19 -25
- prefect/deprecated/packaging/file.py +10 -5
- prefect/deprecated/packaging/orion.py +9 -4
- prefect/deprecated/packaging/serializers.py +8 -58
- prefect/engine.py +55 -618
- prefect/events/actions.py +16 -1
- prefect/events/clients.py +45 -13
- prefect/events/filters.py +19 -2
- prefect/events/related.py +4 -4
- prefect/events/schemas/automations.py +13 -2
- prefect/events/schemas/deployment_triggers.py +73 -5
- prefect/events/schemas/events.py +1 -1
- prefect/events/utilities.py +12 -4
- prefect/events/worker.py +26 -8
- prefect/exceptions.py +3 -8
- prefect/filesystems.py +7 -7
- prefect/flows.py +7 -3
- prefect/infrastructure/provisioners/ecs.py +1 -0
- prefect/logging/configuration.py +2 -2
- prefect/manifests.py +1 -8
- prefect/profiles.toml +1 -1
- prefect/pydantic/__init__.py +74 -2
- prefect/pydantic/main.py +26 -2
- prefect/serializers.py +6 -31
- prefect/settings.py +72 -26
- prefect/software/python.py +3 -5
- prefect/task_server.py +2 -2
- prefect/utilities/callables.py +1 -1
- prefect/utilities/collections.py +2 -1
- prefect/utilities/dispatch.py +1 -0
- prefect/utilities/engine.py +629 -0
- prefect/utilities/pydantic.py +1 -1
- prefect/utilities/schema_tools/validation.py +2 -2
- prefect/utilities/visualization.py +1 -1
- prefect/variables.py +88 -12
- prefect/workers/base.py +20 -11
- prefect/workers/block.py +4 -8
- prefect/workers/process.py +2 -5
- {prefect_client-2.16.8.dist-info → prefect_client-2.17.0.dist-info}/METADATA +4 -3
- {prefect_client-2.16.8.dist-info → prefect_client-2.17.0.dist-info}/RECORD +88 -72
- prefect/_internal/schemas/transformations.py +0 -106
- {prefect_client-2.16.8.dist-info → prefect_client-2.17.0.dist-info}/LICENSE +0 -0
- {prefect_client-2.16.8.dist-info → prefect_client-2.17.0.dist-info}/WHEEL +0 -0
- {prefect_client-2.16.8.dist-info → prefect_client-2.17.0.dist-info}/top_level.txt +0 -0
prefect/events/actions.py
CHANGED
@@ -19,10 +19,17 @@ class Action(PrefectBaseModel):
|
|
19
19
|
type: str
|
20
20
|
|
21
21
|
|
22
|
+
class DoNothing(Action):
|
23
|
+
"""Do nothing, which may be helpful for testing automations"""
|
24
|
+
|
25
|
+
type: Literal["do-nothing"] = "do-nothing"
|
26
|
+
|
27
|
+
|
22
28
|
class RunDeployment(Action):
|
23
29
|
"""Run the given deployment with the given parameters"""
|
24
30
|
|
25
31
|
type: Literal["run-deployment"] = "run-deployment"
|
32
|
+
|
26
33
|
source: Literal["selected"] = "selected"
|
27
34
|
parameters: Optional[Dict[str, Any]] = Field(
|
28
35
|
None,
|
@@ -32,12 +39,20 @@ class RunDeployment(Action):
|
|
32
39
|
),
|
33
40
|
)
|
34
41
|
deployment_id: UUID = Field(..., description="The identifier of the deployment")
|
42
|
+
job_variables: Optional[Dict[str, Any]] = Field(
|
43
|
+
None,
|
44
|
+
description=(
|
45
|
+
"Job variables to pass to the run, or None to use the "
|
46
|
+
"deployment's default job variables"
|
47
|
+
),
|
48
|
+
)
|
35
49
|
|
36
50
|
|
37
51
|
class SendNotification(Action):
|
38
52
|
"""Send a notification with the given parameters"""
|
39
53
|
|
40
54
|
type: Literal["send-notification"] = "send-notification"
|
55
|
+
|
41
56
|
block_document_id: UUID = Field(
|
42
57
|
..., description="The identifier of the notification block"
|
43
58
|
)
|
@@ -45,4 +60,4 @@ class SendNotification(Action):
|
|
45
60
|
subject: Optional[str] = Field(None, description="Notification subject")
|
46
61
|
|
47
62
|
|
48
|
-
ActionTypes = Union[RunDeployment, SendNotification]
|
63
|
+
ActionTypes = Union[DoNothing, RunDeployment, SendNotification]
|
prefect/events/clients.py
CHANGED
@@ -17,6 +17,7 @@ from uuid import UUID
|
|
17
17
|
import orjson
|
18
18
|
import pendulum
|
19
19
|
from cachetools import TTLCache
|
20
|
+
from typing_extensions import Self
|
20
21
|
from websockets.client import WebSocketClientProtocol, connect
|
21
22
|
from websockets.exceptions import (
|
22
23
|
ConnectionClosed,
|
@@ -118,8 +119,8 @@ def _get_api_url_and_key(
|
|
118
119
|
return api_url, api_key
|
119
120
|
|
120
121
|
|
121
|
-
class
|
122
|
-
"""A Prefect Events client that streams
|
122
|
+
class PrefectEventsClient(EventsClient):
|
123
|
+
"""A Prefect Events client that streams events to a Prefect server"""
|
123
124
|
|
124
125
|
_websocket: Optional[WebSocketClientProtocol]
|
125
126
|
_unconfirmed_events: List[Event]
|
@@ -127,37 +128,36 @@ class PrefectCloudEventsClient(EventsClient):
|
|
127
128
|
def __init__(
|
128
129
|
self,
|
129
130
|
api_url: str = None,
|
130
|
-
api_key: str = None,
|
131
131
|
reconnection_attempts: int = 10,
|
132
132
|
checkpoint_every: int = 20,
|
133
133
|
):
|
134
134
|
"""
|
135
135
|
Args:
|
136
|
-
api_url: The base URL for a Prefect
|
137
|
-
api_key: The API of an actor with the manage_events scope
|
136
|
+
api_url: The base URL for a Prefect server
|
138
137
|
reconnection_attempts: When the client is disconnected, how many times
|
139
138
|
the client should attempt to reconnect
|
140
139
|
checkpoint_every: How often the client should sync with the server to
|
141
140
|
confirm receipt of all previously sent events
|
142
141
|
"""
|
143
|
-
api_url
|
142
|
+
api_url = api_url or PREFECT_API_URL.value()
|
143
|
+
if not api_url:
|
144
|
+
raise ValueError(
|
145
|
+
"api_url must be provided or set in the Prefect configuration"
|
146
|
+
)
|
144
147
|
|
145
|
-
|
148
|
+
self._events_socket_url = (
|
146
149
|
api_url.replace("https://", "wss://")
|
147
150
|
.replace("http://", "ws://")
|
148
151
|
.rstrip("/")
|
152
|
+
+ "/events/in"
|
149
153
|
)
|
150
|
-
|
151
|
-
self._connect = connect(
|
152
|
-
socket_url + "/events/in",
|
153
|
-
extra_headers={"Authorization": f"bearer {api_key}"},
|
154
|
-
)
|
154
|
+
self._connect = connect(self._events_socket_url)
|
155
155
|
self._websocket = None
|
156
156
|
self._reconnection_attempts = reconnection_attempts
|
157
157
|
self._unconfirmed_events = []
|
158
158
|
self._checkpoint_every = checkpoint_every
|
159
159
|
|
160
|
-
async def __aenter__(self) ->
|
160
|
+
async def __aenter__(self) -> Self:
|
161
161
|
# Don't handle any errors in the initial connection, because these are most
|
162
162
|
# likely a permission or configuration issue that should propagate
|
163
163
|
await super().__aenter__()
|
@@ -238,6 +238,38 @@ class PrefectCloudEventsClient(EventsClient):
|
|
238
238
|
await asyncio.sleep(1)
|
239
239
|
|
240
240
|
|
241
|
+
class PrefectCloudEventsClient(PrefectEventsClient):
|
242
|
+
"""A Prefect Events client that streams events to a Prefect Cloud Workspace"""
|
243
|
+
|
244
|
+
def __init__(
|
245
|
+
self,
|
246
|
+
api_url: str = None,
|
247
|
+
api_key: str = None,
|
248
|
+
reconnection_attempts: int = 10,
|
249
|
+
checkpoint_every: int = 20,
|
250
|
+
):
|
251
|
+
"""
|
252
|
+
Args:
|
253
|
+
api_url: The base URL for a Prefect Cloud workspace
|
254
|
+
api_key: The API of an actor with the manage_events scope
|
255
|
+
reconnection_attempts: When the client is disconnected, how many times
|
256
|
+
the client should attempt to reconnect
|
257
|
+
checkpoint_every: How often the client should sync with the server to
|
258
|
+
confirm receipt of all previously sent events
|
259
|
+
"""
|
260
|
+
api_url, api_key = _get_api_url_and_key(api_url, api_key)
|
261
|
+
super().__init__(
|
262
|
+
api_url=api_url,
|
263
|
+
reconnection_attempts=reconnection_attempts,
|
264
|
+
checkpoint_every=checkpoint_every,
|
265
|
+
)
|
266
|
+
|
267
|
+
self._connect = connect(
|
268
|
+
self._events_socket_url,
|
269
|
+
extra_headers={"Authorization": f"bearer {api_key}"},
|
270
|
+
)
|
271
|
+
|
272
|
+
|
241
273
|
SEEN_EVENTS_SIZE = 500_000
|
242
274
|
SEEN_EVENTS_TTL = 120
|
243
275
|
|
prefect/events/filters.py
CHANGED
@@ -6,18 +6,21 @@ import pendulum
|
|
6
6
|
from prefect._internal.pydantic import HAS_PYDANTIC_V2
|
7
7
|
from prefect._internal.schemas.bases import PrefectBaseModel
|
8
8
|
from prefect._internal.schemas.fields import DateTimeTZ
|
9
|
+
from prefect.utilities.collections import AutoEnum
|
9
10
|
|
10
11
|
from .schemas.events import Event, Resource, ResourceSpecification
|
11
12
|
|
12
13
|
if HAS_PYDANTIC_V2:
|
13
|
-
from pydantic.v1 import Field
|
14
|
+
from pydantic.v1 import Field, PrivateAttr
|
14
15
|
else:
|
15
|
-
from pydantic import Field
|
16
|
+
from pydantic import Field, PrivateAttr
|
16
17
|
|
17
18
|
|
18
19
|
class EventDataFilter(PrefectBaseModel, extra="forbid"):
|
19
20
|
"""A base class for filtering event data."""
|
20
21
|
|
22
|
+
_top_level_filter: "EventFilter | None" = PrivateAttr(None)
|
23
|
+
|
21
24
|
def get_filters(self) -> List["EventDataFilter"]:
|
22
25
|
return [
|
23
26
|
filter
|
@@ -103,6 +106,10 @@ class EventResourceFilter(EventDataFilter):
|
|
103
106
|
labels: Optional[ResourceSpecification] = Field(
|
104
107
|
None, description="Only include events for resources with these labels"
|
105
108
|
)
|
109
|
+
distinct: bool = Field(
|
110
|
+
False,
|
111
|
+
description="Only include events for distinct resources",
|
112
|
+
)
|
106
113
|
|
107
114
|
def includes(self, event: Event) -> bool:
|
108
115
|
if self.id:
|
@@ -189,6 +196,11 @@ class EventIDFilter(EventDataFilter):
|
|
189
196
|
return True
|
190
197
|
|
191
198
|
|
199
|
+
class EventOrder(AutoEnum):
|
200
|
+
ASC = "ASC"
|
201
|
+
DESC = "DESC"
|
202
|
+
|
203
|
+
|
192
204
|
class EventFilter(EventDataFilter):
|
193
205
|
occurred: EventOccurredFilter = Field(
|
194
206
|
default_factory=EventOccurredFilter,
|
@@ -211,3 +223,8 @@ class EventFilter(EventDataFilter):
|
|
211
223
|
default_factory=EventIDFilter,
|
212
224
|
description="Filter criteria for the events' ID",
|
213
225
|
)
|
226
|
+
|
227
|
+
order: EventOrder = Field(
|
228
|
+
EventOrder.DESC,
|
229
|
+
description="The order to return filtered events",
|
230
|
+
)
|
prefect/events/related.py
CHANGED
@@ -31,8 +31,8 @@ RESOURCE_CACHE: RelatedResourceCache = {}
|
|
31
31
|
|
32
32
|
def tags_as_related_resources(tags: Iterable[str]) -> List[RelatedResource]:
|
33
33
|
return [
|
34
|
-
RelatedResource(
|
35
|
-
|
34
|
+
RelatedResource.parse_obj(
|
35
|
+
{
|
36
36
|
"prefect.resource.id": f"prefect.tag.{tag}",
|
37
37
|
"prefect.resource.role": "tag",
|
38
38
|
}
|
@@ -44,8 +44,8 @@ def tags_as_related_resources(tags: Iterable[str]) -> List[RelatedResource]:
|
|
44
44
|
def object_as_related_resource(kind: str, role: str, object: Any) -> RelatedResource:
|
45
45
|
resource_id = f"prefect.{kind}.{object.id}"
|
46
46
|
|
47
|
-
return RelatedResource(
|
48
|
-
|
47
|
+
return RelatedResource.parse_obj(
|
48
|
+
{
|
49
49
|
"prefect.resource.id": resource_id,
|
50
50
|
"prefect.resource.role": role,
|
51
51
|
"prefect.resource.name": object.name,
|
@@ -54,11 +54,11 @@ class ResourceTrigger(Trigger, abc.ABC):
|
|
54
54
|
type: str
|
55
55
|
|
56
56
|
match: ResourceSpecification = Field(
|
57
|
-
default_factory=lambda: ResourceSpecification(
|
57
|
+
default_factory=lambda: ResourceSpecification.parse_obj({}),
|
58
58
|
description="Labels for resources which this trigger will match.",
|
59
59
|
)
|
60
60
|
match_related: ResourceSpecification = Field(
|
61
|
-
default_factory=lambda: ResourceSpecification(
|
61
|
+
default_factory=lambda: ResourceSpecification.parse_obj({}),
|
62
62
|
description="Labels for related resources which this trigger will match.",
|
63
63
|
)
|
64
64
|
|
@@ -294,6 +294,17 @@ class Automation(PrefectBaseModel, extra="ignore"):
|
|
294
294
|
...,
|
295
295
|
description="The actions to perform when this Automation triggers",
|
296
296
|
)
|
297
|
+
|
298
|
+
actions_on_trigger: List[ActionTypes] = Field(
|
299
|
+
default_factory=list,
|
300
|
+
description="The actions to perform when an Automation goes into a triggered state",
|
301
|
+
)
|
302
|
+
|
303
|
+
actions_on_resolve: List[ActionTypes] = Field(
|
304
|
+
default_factory=list,
|
305
|
+
description="The actions to perform when an Automation goes into a resolving state",
|
306
|
+
)
|
307
|
+
|
297
308
|
owner_resource: Optional[str] = Field(
|
298
309
|
default=None, description="The owning resource of this automation"
|
299
310
|
)
|
@@ -11,6 +11,7 @@ create them from YAML.
|
|
11
11
|
|
12
12
|
import abc
|
13
13
|
import textwrap
|
14
|
+
import warnings
|
14
15
|
from datetime import timedelta
|
15
16
|
from typing import (
|
16
17
|
Any,
|
@@ -36,8 +37,17 @@ else:
|
|
36
37
|
from pydantic import Field, PrivateAttr, root_validator, validator
|
37
38
|
from pydantic.fields import ModelField
|
38
39
|
|
40
|
+
from prefect._internal.compatibility.experimental import (
|
41
|
+
EXPERIMENTAL_WARNING,
|
42
|
+
PREFECT_EXPERIMENTAL_WARN,
|
43
|
+
ExperimentalFeature,
|
44
|
+
experiment_enabled,
|
45
|
+
)
|
39
46
|
from prefect._internal.schemas.bases import PrefectBaseModel
|
40
47
|
from prefect.events.actions import RunDeployment
|
48
|
+
from prefect.settings import (
|
49
|
+
PREFECT_EXPERIMENTAL_WARN_FLOW_RUN_INFRA_OVERRIDES,
|
50
|
+
)
|
41
51
|
|
42
52
|
from .automations import (
|
43
53
|
Automation,
|
@@ -80,7 +90,13 @@ class BaseDeploymentTrigger(PrefectBaseModel, abc.ABC, extra="ignore"):
|
|
80
90
|
"deployment's default parameters"
|
81
91
|
),
|
82
92
|
)
|
83
|
-
|
93
|
+
job_variables: Optional[Dict[str, Any]] = Field(
|
94
|
+
None,
|
95
|
+
description=(
|
96
|
+
"Job variables to pass to the deployment, or None to use the "
|
97
|
+
"deployment's default job variables"
|
98
|
+
),
|
99
|
+
)
|
84
100
|
_deployment_id: Optional[UUID] = PrivateAttr(default=None)
|
85
101
|
|
86
102
|
def set_deployment_id(self, deployment_id: UUID):
|
@@ -90,11 +106,32 @@ class BaseDeploymentTrigger(PrefectBaseModel, abc.ABC, extra="ignore"):
|
|
90
106
|
return f"prefect.deployment.{self._deployment_id}"
|
91
107
|
|
92
108
|
def actions(self) -> List[RunDeployment]:
|
109
|
+
if self.job_variables is not None and experiment_enabled(
|
110
|
+
"flow_run_infra_overrides"
|
111
|
+
):
|
112
|
+
if (
|
113
|
+
PREFECT_EXPERIMENTAL_WARN
|
114
|
+
and PREFECT_EXPERIMENTAL_WARN_FLOW_RUN_INFRA_OVERRIDES
|
115
|
+
):
|
116
|
+
warnings.warn(
|
117
|
+
EXPERIMENTAL_WARNING.format(
|
118
|
+
feature="Flow run job variables",
|
119
|
+
group="flow_run_infra_overrides",
|
120
|
+
help="To use this feature, update your workers to Prefect 2.16.4 or later. ",
|
121
|
+
),
|
122
|
+
ExperimentalFeature,
|
123
|
+
stacklevel=3,
|
124
|
+
)
|
125
|
+
if not experiment_enabled("flow_run_infra_overrides"):
|
126
|
+
# nullify job_variables if the flag is disabled
|
127
|
+
self.job_variables = None
|
128
|
+
|
93
129
|
assert self._deployment_id
|
94
130
|
return [
|
95
131
|
RunDeployment(
|
96
132
|
parameters=self.parameters,
|
97
133
|
deployment_id=self._deployment_id,
|
134
|
+
job_variables=self.job_variables,
|
98
135
|
)
|
99
136
|
]
|
100
137
|
|
@@ -124,11 +161,11 @@ class DeploymentResourceTrigger(BaseDeploymentTrigger, abc.ABC):
|
|
124
161
|
type: str
|
125
162
|
|
126
163
|
match: ResourceSpecification = Field(
|
127
|
-
default_factory=lambda: ResourceSpecification(
|
164
|
+
default_factory=lambda: ResourceSpecification.parse_obj({}),
|
128
165
|
description="Labels for resources which this trigger will match.",
|
129
166
|
)
|
130
167
|
match_related: ResourceSpecification = Field(
|
131
|
-
default_factory=lambda: ResourceSpecification(
|
168
|
+
default_factory=lambda: ResourceSpecification.parse_obj({}),
|
132
169
|
description="Labels for related resources which this trigger will match.",
|
133
170
|
)
|
134
171
|
|
@@ -254,6 +291,7 @@ class DeploymentMetricTrigger(DeploymentResourceTrigger):
|
|
254
291
|
match_related=self.match_related,
|
255
292
|
posture=self.posture,
|
256
293
|
metric=self.metric,
|
294
|
+
job_variables=self.job_variables,
|
257
295
|
)
|
258
296
|
|
259
297
|
|
@@ -293,6 +331,7 @@ class DeploymentCompoundTrigger(DeploymentCompositeTrigger):
|
|
293
331
|
require=self.require,
|
294
332
|
triggers=self.triggers,
|
295
333
|
within=self.within,
|
334
|
+
job_variables=self.job_variables,
|
296
335
|
)
|
297
336
|
|
298
337
|
|
@@ -306,6 +345,7 @@ class DeploymentSequenceTrigger(DeploymentCompositeTrigger):
|
|
306
345
|
return SequenceTrigger(
|
307
346
|
triggers=self.triggers,
|
308
347
|
within=self.within,
|
348
|
+
job_variables=self.job_variables,
|
309
349
|
)
|
310
350
|
|
311
351
|
|
@@ -343,15 +383,22 @@ class DeploymentTrigger(PrefectBaseModel):
|
|
343
383
|
)
|
344
384
|
description: str = Field("", description="A longer description of this automation")
|
345
385
|
enabled: bool = Field(True, description="Whether this automation will be evaluated")
|
386
|
+
job_variables: Optional[Dict[str, Any]] = Field(
|
387
|
+
None,
|
388
|
+
description=(
|
389
|
+
"Job variables to pass to the run, or None to use the "
|
390
|
+
"deployment's default job variables"
|
391
|
+
),
|
392
|
+
)
|
346
393
|
|
347
394
|
# from ResourceTrigger
|
348
395
|
|
349
396
|
match: ResourceSpecification = Field(
|
350
|
-
default_factory=lambda: ResourceSpecification(
|
397
|
+
default_factory=lambda: ResourceSpecification.parse_obj({}),
|
351
398
|
description="Labels for resources which this trigger will match.",
|
352
399
|
)
|
353
400
|
match_related: ResourceSpecification = Field(
|
354
|
-
default_factory=lambda: ResourceSpecification(
|
401
|
+
default_factory=lambda: ResourceSpecification.parse_obj({}),
|
355
402
|
description="Labels for related resources which this trigger will match.",
|
356
403
|
)
|
357
404
|
|
@@ -472,10 +519,31 @@ class DeploymentTrigger(PrefectBaseModel):
|
|
472
519
|
return f"prefect.deployment.{self._deployment_id}"
|
473
520
|
|
474
521
|
def actions(self) -> List[RunDeployment]:
|
522
|
+
if self.job_variables is not None and experiment_enabled(
|
523
|
+
"flow_run_infra_overrides"
|
524
|
+
):
|
525
|
+
if (
|
526
|
+
PREFECT_EXPERIMENTAL_WARN
|
527
|
+
and PREFECT_EXPERIMENTAL_WARN_FLOW_RUN_INFRA_OVERRIDES
|
528
|
+
):
|
529
|
+
warnings.warn(
|
530
|
+
EXPERIMENTAL_WARNING.format(
|
531
|
+
feature="Flow run job variables",
|
532
|
+
group="flow_run_infra_overrides",
|
533
|
+
help="To use this feature, update your workers to Prefect 2.16.4 or later. ",
|
534
|
+
),
|
535
|
+
ExperimentalFeature,
|
536
|
+
stacklevel=3,
|
537
|
+
)
|
538
|
+
if not experiment_enabled("flow_run_infra_overrides"):
|
539
|
+
# nullify job_variables if the flag is disabled
|
540
|
+
self.job_variables = None
|
541
|
+
|
475
542
|
assert self._deployment_id
|
476
543
|
return [
|
477
544
|
RunDeployment(
|
478
545
|
parameters=self.parameters,
|
479
546
|
deployment_id=self._deployment_id,
|
547
|
+
job_variables=self.job_variables,
|
480
548
|
)
|
481
549
|
]
|
prefect/events/schemas/events.py
CHANGED
@@ -282,4 +282,4 @@ class ResourceSpecification(PrefectBaseModel):
|
|
282
282
|
return len(self.__root__)
|
283
283
|
|
284
284
|
def deepcopy(self) -> "ResourceSpecification":
|
285
|
-
return ResourceSpecification(
|
285
|
+
return ResourceSpecification.parse_obj(copy.deepcopy(self.__root__))
|
prefect/events/utilities.py
CHANGED
@@ -6,9 +6,13 @@ import pendulum
|
|
6
6
|
|
7
7
|
from prefect._internal.schemas.fields import DateTimeTZ
|
8
8
|
|
9
|
-
from .clients import
|
9
|
+
from .clients import (
|
10
|
+
AssertingEventsClient,
|
11
|
+
PrefectCloudEventsClient,
|
12
|
+
PrefectEventsClient,
|
13
|
+
)
|
10
14
|
from .schemas.events import Event, RelatedResource
|
11
|
-
from .worker import EventsWorker,
|
15
|
+
from .worker import EventsWorker, should_emit_events
|
12
16
|
|
13
17
|
TIGHT_TIMING = timedelta(minutes=5)
|
14
18
|
|
@@ -42,10 +46,14 @@ def emit_event(
|
|
42
46
|
The event that was emitted if worker is using a client that emit
|
43
47
|
events, otherwise None
|
44
48
|
"""
|
45
|
-
if not
|
49
|
+
if not should_emit_events():
|
46
50
|
return None
|
47
51
|
|
48
|
-
operational_clients = [
|
52
|
+
operational_clients = [
|
53
|
+
AssertingEventsClient,
|
54
|
+
PrefectCloudEventsClient,
|
55
|
+
PrefectEventsClient,
|
56
|
+
]
|
49
57
|
worker_instance = EventsWorker.instance()
|
50
58
|
|
51
59
|
if worker_instance.client_type not in operational_clients:
|
prefect/events/worker.py
CHANGED
@@ -4,25 +4,41 @@ from typing import Any, Optional, Tuple, Type
|
|
4
4
|
|
5
5
|
from typing_extensions import Self
|
6
6
|
|
7
|
-
from prefect._internal.compatibility.experimental import experiment_enabled
|
8
7
|
from prefect._internal.concurrency.services import QueueService
|
9
|
-
from prefect.settings import
|
8
|
+
from prefect.settings import (
|
9
|
+
PREFECT_API_KEY,
|
10
|
+
PREFECT_API_URL,
|
11
|
+
PREFECT_CLOUD_API_URL,
|
12
|
+
PREFECT_EXPERIMENTAL_EVENTS,
|
13
|
+
)
|
10
14
|
from prefect.utilities.context import temporary_context
|
11
15
|
|
12
|
-
from .clients import
|
16
|
+
from .clients import (
|
17
|
+
EventsClient,
|
18
|
+
NullEventsClient,
|
19
|
+
PrefectCloudEventsClient,
|
20
|
+
PrefectEventsClient,
|
21
|
+
)
|
13
22
|
from .related import related_resources_from_run_context
|
14
23
|
from .schemas.events import Event
|
15
24
|
|
16
25
|
|
26
|
+
def should_emit_events() -> bool:
|
27
|
+
return emit_events_to_cloud() or should_emit_events_to_running_server()
|
28
|
+
|
29
|
+
|
17
30
|
def emit_events_to_cloud() -> bool:
|
18
|
-
|
19
|
-
return (
|
20
|
-
|
21
|
-
and api
|
22
|
-
and api.startswith(PREFECT_CLOUD_API_URL.value())
|
31
|
+
api_url = PREFECT_API_URL.value()
|
32
|
+
return isinstance(api_url, str) and api_url.startswith(
|
33
|
+
PREFECT_CLOUD_API_URL.value()
|
23
34
|
)
|
24
35
|
|
25
36
|
|
37
|
+
def should_emit_events_to_running_server() -> bool:
|
38
|
+
api_url = PREFECT_API_URL.value()
|
39
|
+
return isinstance(api_url, str) and PREFECT_EXPERIMENTAL_EVENTS
|
40
|
+
|
41
|
+
|
26
42
|
class EventsWorker(QueueService[Event]):
|
27
43
|
def __init__(
|
28
44
|
self, client_type: Type[EventsClient], client_options: Tuple[Tuple[str, Any]]
|
@@ -67,6 +83,8 @@ class EventsWorker(QueueService[Event]):
|
|
67
83
|
"api_url": PREFECT_API_URL.value(),
|
68
84
|
"api_key": PREFECT_API_KEY.value(),
|
69
85
|
}
|
86
|
+
elif should_emit_events_to_running_server():
|
87
|
+
client_type = PrefectEventsClient
|
70
88
|
|
71
89
|
else:
|
72
90
|
client_type = NullEventsClient
|
prefect/exceptions.py
CHANGED
@@ -1,19 +1,14 @@
|
|
1
1
|
"""
|
2
2
|
Prefect-specific exceptions.
|
3
3
|
"""
|
4
|
+
|
4
5
|
import inspect
|
5
6
|
import traceback
|
6
7
|
from types import ModuleType, TracebackType
|
7
8
|
from typing import Callable, Dict, Iterable, List, Optional, Type
|
8
9
|
|
9
|
-
from prefect._internal.pydantic import HAS_PYDANTIC_V2
|
10
|
-
|
11
|
-
if HAS_PYDANTIC_V2:
|
12
|
-
import pydantic.v1 as pydantic
|
13
|
-
else:
|
14
|
-
import pydantic
|
15
|
-
|
16
10
|
from httpx._exceptions import HTTPStatusError
|
11
|
+
from pydantic import ValidationError
|
17
12
|
from rich.traceback import Traceback
|
18
13
|
from typing_extensions import Self
|
19
14
|
|
@@ -182,7 +177,7 @@ class ParameterTypeError(PrefectException):
|
|
182
177
|
super().__init__(msg)
|
183
178
|
|
184
179
|
@classmethod
|
185
|
-
def from_validation_error(cls, exc:
|
180
|
+
def from_validation_error(cls, exc: ValidationError) -> Self:
|
186
181
|
bad_params = [f'{err["loc"][0]}: {err["msg"]}' for err in exc.errors()]
|
187
182
|
msg = "Flow run received invalid parameters:\n - " + "\n - ".join(bad_params)
|
188
183
|
return cls(msg)
|
prefect/filesystems.py
CHANGED
@@ -270,7 +270,7 @@ class RemoteFileSystem(WritableFileSystem, WritableDeploymentStorage):
|
|
270
270
|
basepath: str = Field(
|
271
271
|
default=...,
|
272
272
|
description="Default path for this block to write to.",
|
273
|
-
|
273
|
+
examples=["s3://my-bucket/my-folder/"],
|
274
274
|
)
|
275
275
|
settings: Dict[str, Any] = Field(
|
276
276
|
default_factory=dict,
|
@@ -451,19 +451,19 @@ class S3(WritableFileSystem, WritableDeploymentStorage):
|
|
451
451
|
bucket_path: str = Field(
|
452
452
|
default=...,
|
453
453
|
description="An S3 bucket path.",
|
454
|
-
|
454
|
+
examples=["my-bucket/a-directory-within"],
|
455
455
|
)
|
456
456
|
aws_access_key_id: Optional[SecretStr] = Field(
|
457
457
|
default=None,
|
458
458
|
title="AWS Access Key ID",
|
459
459
|
description="Equivalent to the AWS_ACCESS_KEY_ID environment variable.",
|
460
|
-
|
460
|
+
examples=["AKIAIOSFODNN7EXAMPLE"],
|
461
461
|
)
|
462
462
|
aws_secret_access_key: Optional[SecretStr] = Field(
|
463
463
|
default=None,
|
464
464
|
title="AWS Secret Access Key",
|
465
465
|
description="Equivalent to the AWS_SECRET_ACCESS_KEY environment variable.",
|
466
|
-
|
466
|
+
examples=["wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY"],
|
467
467
|
)
|
468
468
|
|
469
469
|
_remote_file_system: RemoteFileSystem = None
|
@@ -549,7 +549,7 @@ class GCS(WritableFileSystem, WritableDeploymentStorage):
|
|
549
549
|
bucket_path: str = Field(
|
550
550
|
default=...,
|
551
551
|
description="A GCS bucket path.",
|
552
|
-
|
552
|
+
examples=["my-bucket/a-directory-within"],
|
553
553
|
)
|
554
554
|
service_account_info: Optional[SecretStr] = Field(
|
555
555
|
default=None,
|
@@ -653,7 +653,7 @@ class Azure(WritableFileSystem, WritableDeploymentStorage):
|
|
653
653
|
bucket_path: str = Field(
|
654
654
|
default=...,
|
655
655
|
description="An Azure storage bucket path.",
|
656
|
-
|
656
|
+
examples=["my-bucket/a-directory-within"],
|
657
657
|
)
|
658
658
|
azure_storage_connection_string: Optional[SecretStr] = Field(
|
659
659
|
default=None,
|
@@ -804,7 +804,7 @@ class SMB(WritableFileSystem, WritableDeploymentStorage):
|
|
804
804
|
share_path: str = Field(
|
805
805
|
default=...,
|
806
806
|
description="SMB target (requires <SHARE>, followed by <PATH>).",
|
807
|
-
|
807
|
+
examples=["/SHARE/dir/subdir"],
|
808
808
|
)
|
809
809
|
smb_username: Optional[SecretStr] = Field(
|
810
810
|
default=None,
|
prefect/flows.py
CHANGED
@@ -590,13 +590,13 @@ class Flow(Generic[P, R]):
|
|
590
590
|
@sync_compatible
|
591
591
|
@deprecated_parameter(
|
592
592
|
"schedule",
|
593
|
-
start_date="Mar
|
593
|
+
start_date="Mar 2024",
|
594
594
|
when=lambda p: p is not None,
|
595
595
|
help="Use `schedules` instead.",
|
596
596
|
)
|
597
597
|
@deprecated_parameter(
|
598
598
|
"is_schedule_active",
|
599
|
-
start_date="Mar
|
599
|
+
start_date="Mar 2024",
|
600
600
|
when=lambda p: p is not None,
|
601
601
|
help="Use `paused` instead.",
|
602
602
|
)
|
@@ -970,6 +970,7 @@ class Flow(Generic[P, R]):
|
|
970
970
|
enforce_parameter_schema: bool = False,
|
971
971
|
entrypoint_type: EntrypointType = EntrypointType.FILE_PATH,
|
972
972
|
print_next_steps: bool = True,
|
973
|
+
ignore_warnings: bool = False,
|
973
974
|
) -> UUID:
|
974
975
|
"""
|
975
976
|
Deploys a flow to run on dynamic infrastructure via a work pool.
|
@@ -1024,6 +1025,7 @@ class Flow(Generic[P, R]):
|
|
1024
1025
|
entrypoint, ensure that the module will be importable in the execution environment.
|
1025
1026
|
print_next_steps_message: Whether or not to print a message with next steps
|
1026
1027
|
after deploying the deployments.
|
1028
|
+
ignore_warnings: Whether or not to ignore warnings about the work pool type.
|
1027
1029
|
|
1028
1030
|
Returns:
|
1029
1031
|
The ID of the created/updated deployment.
|
@@ -1100,6 +1102,7 @@ class Flow(Generic[P, R]):
|
|
1100
1102
|
build=build,
|
1101
1103
|
push=push,
|
1102
1104
|
print_next_steps_message=False,
|
1105
|
+
ignore_warnings=ignore_warnings,
|
1103
1106
|
)
|
1104
1107
|
|
1105
1108
|
if print_next_steps:
|
@@ -1122,10 +1125,11 @@ class Flow(Generic[P, R]):
|
|
1122
1125
|
style="blue",
|
1123
1126
|
)
|
1124
1127
|
if PREFECT_UI_URL:
|
1125
|
-
|
1128
|
+
message = (
|
1126
1129
|
"\nYou can also run your flow via the Prefect UI:"
|
1127
1130
|
f" [blue]{PREFECT_UI_URL.value()}/deployments/deployment/{deployment_ids[0]}[/]\n"
|
1128
1131
|
)
|
1132
|
+
console.print(message, soft_wrap=True)
|
1129
1133
|
|
1130
1134
|
return deployment_ids[0]
|
1131
1135
|
|