prefect-client 2.18.0__py3-none-any.whl → 2.18.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. prefect/_internal/schemas/fields.py +31 -12
  2. prefect/blocks/core.py +1 -1
  3. prefect/blocks/notifications.py +2 -2
  4. prefect/blocks/system.py +2 -3
  5. prefect/client/orchestration.py +283 -22
  6. prefect/client/schemas/sorting.py +9 -0
  7. prefect/client/utilities.py +25 -3
  8. prefect/concurrency/asyncio.py +11 -5
  9. prefect/concurrency/events.py +3 -3
  10. prefect/concurrency/services.py +1 -1
  11. prefect/concurrency/sync.py +9 -5
  12. prefect/deployments/deployments.py +27 -18
  13. prefect/deployments/runner.py +34 -26
  14. prefect/engine.py +3 -1
  15. prefect/events/actions.py +2 -1
  16. prefect/events/cli/automations.py +47 -9
  17. prefect/events/clients.py +50 -18
  18. prefect/events/filters.py +30 -3
  19. prefect/events/instrument.py +40 -40
  20. prefect/events/related.py +2 -1
  21. prefect/events/schemas/automations.py +50 -5
  22. prefect/events/schemas/deployment_triggers.py +15 -227
  23. prefect/events/schemas/events.py +7 -7
  24. prefect/events/utilities.py +1 -1
  25. prefect/events/worker.py +10 -7
  26. prefect/flows.py +33 -18
  27. prefect/input/actions.py +9 -9
  28. prefect/input/run_input.py +49 -37
  29. prefect/new_flow_engine.py +293 -0
  30. prefect/new_task_engine.py +374 -0
  31. prefect/results.py +3 -2
  32. prefect/runner/runner.py +3 -2
  33. prefect/server/api/collections_data/views/aggregate-worker-metadata.json +44 -3
  34. prefect/settings.py +26 -0
  35. prefect/states.py +25 -19
  36. prefect/tasks.py +17 -0
  37. prefect/utilities/asyncutils.py +37 -0
  38. prefect/utilities/engine.py +6 -4
  39. prefect/utilities/schema_tools/validation.py +1 -1
  40. {prefect_client-2.18.0.dist-info → prefect_client-2.18.1.dist-info}/METADATA +1 -1
  41. {prefect_client-2.18.0.dist-info → prefect_client-2.18.1.dist-info}/RECORD +44 -43
  42. prefect/concurrency/common.py +0 -0
  43. {prefect_client-2.18.0.dist-info → prefect_client-2.18.1.dist-info}/LICENSE +0 -0
  44. {prefect_client-2.18.0.dist-info → prefect_client-2.18.1.dist-info}/WHEEL +0 -0
  45. {prefect_client-2.18.0.dist-info → prefect_client-2.18.1.dist-info}/top_level.txt +0 -0
@@ -10,23 +10,24 @@ from typing import (
10
10
  Optional,
11
11
  Set,
12
12
  Union,
13
+ cast,
13
14
  )
14
15
  from uuid import UUID
15
16
 
16
17
  from typing_extensions import TypeAlias
17
18
 
18
19
  from prefect._internal.pydantic import HAS_PYDANTIC_V2
19
- from prefect._internal.schemas.validators import validate_trigger_within
20
20
 
21
21
  if HAS_PYDANTIC_V2:
22
- from pydantic.v1 import Field, root_validator, validator
22
+ from pydantic.v1 import Field, PrivateAttr, root_validator, validator
23
23
  from pydantic.v1.fields import ModelField
24
24
  else:
25
- from pydantic import Field, root_validator, validator
26
- from pydantic.fields import ModelField
25
+ from pydantic import Field, PrivateAttr, root_validator, validator # type: ignore
26
+ from pydantic.fields import ModelField # type: ignore
27
27
 
28
28
  from prefect._internal.schemas.bases import PrefectBaseModel
29
- from prefect.events.actions import ActionTypes
29
+ from prefect._internal.schemas.validators import validate_trigger_within
30
+ from prefect.events.actions import ActionTypes, RunDeployment
30
31
  from prefect.utilities.collections import AutoEnum
31
32
 
32
33
  from .events import ResourceSpecification
@@ -50,6 +51,50 @@ class Trigger(PrefectBaseModel, abc.ABC, extra="ignore"):
50
51
  def describe_for_cli(self, indent: int = 0) -> str:
51
52
  """Return a human-readable description of this trigger for the CLI"""
52
53
 
54
+ # The following allows the regular Trigger class to be used when serving or
55
+ # deploying flows, analogous to how the Deployment*Trigger classes work
56
+
57
+ _deployment_id: Optional[UUID] = PrivateAttr(default=None)
58
+
59
+ def set_deployment_id(self, deployment_id: UUID):
60
+ self._deployment_id = deployment_id
61
+
62
+ def owner_resource(self) -> Optional[str]:
63
+ return f"prefect.deployment.{self._deployment_id}"
64
+
65
+ def actions(self) -> List[ActionTypes]:
66
+ assert self._deployment_id
67
+ return [
68
+ RunDeployment(
69
+ source="selected",
70
+ deployment_id=self._deployment_id,
71
+ parameters=getattr(self, "parameters", None),
72
+ job_variables=getattr(self, "job_variables", None),
73
+ )
74
+ ]
75
+
76
+ def as_automation(self) -> "AutomationCore":
77
+ assert self._deployment_id
78
+
79
+ trigger: TriggerTypes = cast(TriggerTypes, self)
80
+
81
+ # This is one of the Deployment*Trigger classes, so translate it over to a
82
+ # plain Trigger
83
+ if hasattr(self, "trigger_type"):
84
+ trigger = self.trigger_type(**self.dict())
85
+
86
+ return AutomationCore(
87
+ name=(
88
+ getattr(self, "name", None)
89
+ or f"Automation for deployment {self._deployment_id}"
90
+ ),
91
+ description="",
92
+ enabled=getattr(self, "enabled", True),
93
+ trigger=trigger,
94
+ actions=self.actions(),
95
+ owner_resource=self.owner_resource(),
96
+ )
97
+
53
98
 
54
99
  class ResourceTrigger(Trigger, abc.ABC):
55
100
  """
@@ -16,7 +16,6 @@ from typing import (
16
16
  Any,
17
17
  Dict,
18
18
  List,
19
- Literal,
20
19
  Optional,
21
20
  Set,
22
21
  Union,
@@ -25,19 +24,16 @@ from uuid import UUID
25
24
 
26
25
  from typing_extensions import TypeAlias
27
26
 
28
- from prefect._internal.compatibility.deprecated import deprecated_class
29
27
  from prefect._internal.pydantic import HAS_PYDANTIC_V2
30
- from prefect._internal.schemas.validators import validate_trigger_within
31
28
 
32
29
  if HAS_PYDANTIC_V2:
33
- from pydantic.v1 import Field, PrivateAttr, root_validator, validator
34
- from pydantic.v1.fields import ModelField
30
+ from pydantic.v1 import Field, PrivateAttr
35
31
  else:
36
- from pydantic import Field, PrivateAttr, root_validator, validator
37
- from pydantic.fields import ModelField
32
+ from pydantic import Field, PrivateAttr # type: ignore
38
33
 
34
+ from prefect._internal.compatibility.deprecated import deprecated_class
39
35
  from prefect._internal.schemas.bases import PrefectBaseModel
40
- from prefect.events.actions import RunDeployment
36
+ from prefect.events.actions import ActionTypes, RunDeployment
41
37
 
42
38
  from .automations import (
43
39
  AutomationCore,
@@ -47,7 +43,6 @@ from .automations import (
47
43
  MetricTriggerQuery,
48
44
  Posture,
49
45
  SequenceTrigger,
50
- Trigger,
51
46
  TriggerTypes,
52
47
  )
53
48
  from .events import ResourceSpecification
@@ -67,11 +62,7 @@ class BaseDeploymentTrigger(PrefectBaseModel, abc.ABC, extra="ignore"):
67
62
  description: str = Field("", description="A longer description of this automation")
68
63
  enabled: bool = Field(True, description="Whether this automation will be evaluated")
69
64
 
70
- # Fields from Trigger
71
-
72
- type: str
73
-
74
- # Fields from Deployment
65
+ # Fields from the RunDeployment action
75
66
 
76
67
  parameters: Optional[Dict[str, Any]] = Field(
77
68
  None,
@@ -87,60 +78,9 @@ class BaseDeploymentTrigger(PrefectBaseModel, abc.ABC, extra="ignore"):
87
78
  "deployment's default job variables"
88
79
  ),
89
80
  )
90
- _deployment_id: Optional[UUID] = PrivateAttr(default=None)
91
-
92
- def set_deployment_id(self, deployment_id: UUID):
93
- self._deployment_id = deployment_id
94
-
95
- def owner_resource(self) -> Optional[str]:
96
- return f"prefect.deployment.{self._deployment_id}"
97
-
98
- def actions(self) -> List[RunDeployment]:
99
- assert self._deployment_id
100
- return [
101
- RunDeployment(
102
- parameters=self.parameters,
103
- deployment_id=self._deployment_id,
104
- job_variables=self.job_variables,
105
- )
106
- ]
107
-
108
- def as_automation(self) -> AutomationCore:
109
- if not self.name:
110
- raise ValueError("name is required")
111
-
112
- return AutomationCore(
113
- name=self.name,
114
- description=self.description,
115
- enabled=self.enabled,
116
- trigger=self.as_trigger(),
117
- actions=self.actions(),
118
- owner_resource=self.owner_resource(),
119
- )
120
-
121
- @abc.abstractmethod
122
- def as_trigger(self) -> Trigger:
123
- ...
124
81
 
125
82
 
126
- class DeploymentResourceTrigger(BaseDeploymentTrigger, abc.ABC):
127
- """
128
- Base class for triggers that may filter by the labels of resources.
129
- """
130
-
131
- type: str
132
-
133
- match: ResourceSpecification = Field(
134
- default_factory=lambda: ResourceSpecification.parse_obj({}),
135
- description="Labels for resources which this trigger will match.",
136
- )
137
- match_related: ResourceSpecification = Field(
138
- default_factory=lambda: ResourceSpecification.parse_obj({}),
139
- description="Labels for related resources which this trigger will match.",
140
- )
141
-
142
-
143
- class DeploymentEventTrigger(DeploymentResourceTrigger):
83
+ class DeploymentEventTrigger(BaseDeploymentTrigger, EventTrigger):
144
84
  """
145
85
  A trigger that fires based on the presence or absence of events within a given
146
86
  period of time.
@@ -148,184 +88,28 @@ class DeploymentEventTrigger(DeploymentResourceTrigger):
148
88
 
149
89
  trigger_type = EventTrigger
150
90
 
151
- type: Literal["event"] = "event"
152
91
 
153
- after: Set[str] = Field(
154
- default_factory=set,
155
- description=(
156
- "The event(s) which must first been seen to fire this trigger. If "
157
- "empty, then fire this trigger immediately. Events may include "
158
- "trailing wildcards, like `prefect.flow-run.*`"
159
- ),
160
- )
161
- expect: Set[str] = Field(
162
- default_factory=set,
163
- description=(
164
- "The event(s) this trigger is expecting to see. If empty, this "
165
- "trigger will match any event. Events may include trailing wildcards, "
166
- "like `prefect.flow-run.*`"
167
- ),
168
- )
169
-
170
- for_each: Set[str] = Field(
171
- default_factory=set,
172
- description=(
173
- "Evaluate the trigger separately for each distinct value of these labels "
174
- "on the resource. By default, labels refer to the primary resource of the "
175
- "triggering event. You may also refer to labels from related "
176
- "resources by specifying `related:<role>:<label>`. This will use the "
177
- "value of that label for the first related resource in that role. For "
178
- 'example, `"for_each": ["related:flow:prefect.resource.id"]` would '
179
- "evaluate the trigger for each flow."
180
- ),
181
- )
182
- posture: Literal[Posture.Reactive, Posture.Proactive] = Field( # type: ignore[valid-type]
183
- Posture.Reactive,
184
- description=(
185
- "The posture of this trigger, either Reactive or Proactive. Reactive "
186
- "triggers respond to the _presence_ of the expected events, while "
187
- "Proactive triggers respond to the _absence_ of those expected events."
188
- ),
189
- )
190
- threshold: int = Field(
191
- 1,
192
- description=(
193
- "The number of events required for this trigger to fire (for "
194
- "Reactive triggers), or the number of events expected (for Proactive "
195
- "triggers)"
196
- ),
197
- )
198
- within: timedelta = Field(
199
- timedelta(0),
200
- minimum=0.0,
201
- exclusiveMinimum=False,
202
- description=(
203
- "The time period over which the events must occur. For Reactive triggers, "
204
- "this may be as low as 0 seconds, but must be at least 10 seconds for "
205
- "Proactive triggers"
206
- ),
207
- )
208
-
209
- @validator("within")
210
- def enforce_minimum_within(
211
- cls, value: timedelta, values, config, field: ModelField
212
- ):
213
- return validate_trigger_within(value, field)
214
-
215
- @root_validator(skip_on_failure=True)
216
- def enforce_minimum_within_for_proactive_triggers(cls, values: Dict[str, Any]):
217
- posture: Optional[Posture] = values.get("posture")
218
- within: Optional[timedelta] = values.get("within")
219
-
220
- if posture == Posture.Proactive:
221
- if not within or within == timedelta(0):
222
- values["within"] = timedelta(seconds=10.0)
223
- elif within < timedelta(seconds=10.0):
224
- raise ValueError(
225
- "The minimum within for Proactive triggers is 10 seconds"
226
- )
227
-
228
- return values
229
-
230
- def as_trigger(self) -> Trigger:
231
- return self.trigger_type(
232
- match=self.match,
233
- match_related=self.match_related,
234
- after=self.after,
235
- expect=self.expect,
236
- for_each=self.for_each,
237
- posture=self.posture,
238
- threshold=self.threshold,
239
- within=self.within,
240
- )
241
-
242
-
243
- class DeploymentMetricTrigger(DeploymentResourceTrigger):
92
+ class DeploymentMetricTrigger(BaseDeploymentTrigger, MetricTrigger):
244
93
  """
245
94
  A trigger that fires based on the results of a metric query.
246
95
  """
247
96
 
248
97
  trigger_type = MetricTrigger
249
98
 
250
- type: Literal["metric"] = "metric"
251
-
252
- posture: Literal[Posture.Metric] = Field( # type: ignore[valid-type]
253
- Posture.Metric,
254
- description="Periodically evaluate the configured metric query.",
255
- )
256
-
257
- metric: MetricTriggerQuery = Field(
258
- ...,
259
- description="The metric query to evaluate for this trigger. ",
260
- )
261
-
262
- def as_trigger(self) -> Trigger:
263
- return self.trigger_type(
264
- match=self.match,
265
- match_related=self.match_related,
266
- posture=self.posture,
267
- metric=self.metric,
268
- job_variables=self.job_variables,
269
- )
270
-
271
-
272
- class DeploymentCompositeTrigger(BaseDeploymentTrigger, abc.ABC):
273
- """
274
- Requires some number of triggers to have fired within the given time period.
275
- """
276
-
277
- type: Literal["compound", "sequence"]
278
- triggers: List["TriggerTypes"]
279
- within: Optional[timedelta]
280
99
 
281
-
282
- class DeploymentCompoundTrigger(DeploymentCompositeTrigger):
100
+ class DeploymentCompoundTrigger(BaseDeploymentTrigger, CompoundTrigger):
283
101
  """A composite trigger that requires some number of triggers to have
284
102
  fired within the given time period"""
285
103
 
286
104
  trigger_type = CompoundTrigger
287
105
 
288
- type: Literal["compound"] = "compound"
289
- require: Union[int, Literal["any", "all"]]
290
-
291
- @root_validator
292
- def validate_require(cls, values: Dict[str, Any]) -> Dict[str, Any]:
293
- require = values.get("require")
294
-
295
- if isinstance(require, int):
296
- if require < 1:
297
- raise ValueError("required must be at least 1")
298
- if require > len(values["triggers"]):
299
- raise ValueError(
300
- "required must be less than or equal to the number of triggers"
301
- )
302
-
303
- return values
304
-
305
- def as_trigger(self) -> Trigger:
306
- return self.trigger_type(
307
- require=self.require,
308
- triggers=self.triggers,
309
- within=self.within,
310
- job_variables=self.job_variables,
311
- )
312
106
 
313
-
314
- class DeploymentSequenceTrigger(DeploymentCompositeTrigger):
107
+ class DeploymentSequenceTrigger(BaseDeploymentTrigger, SequenceTrigger):
315
108
  """A composite trigger that requires some number of triggers to have fired
316
109
  within the given time period in a specific order"""
317
110
 
318
111
  trigger_type = SequenceTrigger
319
112
 
320
- type: Literal["sequence"] = "sequence"
321
-
322
- def as_trigger(self) -> Trigger:
323
- return self.trigger_type(
324
- triggers=self.triggers,
325
- within=self.within,
326
- job_variables=self.job_variables,
327
- )
328
-
329
113
 
330
114
  # Concrete deployment trigger types
331
115
  DeploymentTriggerTypes: TypeAlias = Union[
@@ -461,7 +245,10 @@ class DeploymentTrigger(PrefectBaseModel):
461
245
  def as_automation(self) -> AutomationCore:
462
246
  assert self.name
463
247
 
248
+ trigger: TriggerTypes
249
+
464
250
  if self.posture == Posture.Metric:
251
+ assert self.metric
465
252
  trigger = MetricTrigger(
466
253
  type="metric",
467
254
  match=self.match,
@@ -496,12 +283,13 @@ class DeploymentTrigger(PrefectBaseModel):
496
283
  def owner_resource(self) -> Optional[str]:
497
284
  return f"prefect.deployment.{self._deployment_id}"
498
285
 
499
- def actions(self) -> List[RunDeployment]:
286
+ def actions(self) -> List[ActionTypes]:
500
287
  assert self._deployment_id
501
288
  return [
502
289
  RunDeployment(
503
- parameters=self.parameters,
290
+ source="selected",
504
291
  deployment_id=self._deployment_id,
292
+ parameters=self.parameters,
505
293
  job_variables=self.job_variables,
506
294
  )
507
295
  ]
@@ -15,9 +15,14 @@ from typing import (
15
15
  from uuid import UUID, uuid4
16
16
 
17
17
  import pendulum
18
- from pydantic import Field, root_validator, validator
19
18
 
20
19
  from prefect._internal.pydantic import HAS_PYDANTIC_V2
20
+
21
+ if HAS_PYDANTIC_V2:
22
+ from pydantic.v1 import Field, root_validator, validator
23
+ else:
24
+ from pydantic import Field, root_validator, validator # type: ignore
25
+
21
26
  from prefect._internal.schemas.bases import PrefectBaseModel
22
27
  from prefect._internal.schemas.fields import DateTimeTZ
23
28
  from prefect.logging import get_logger
@@ -26,11 +31,6 @@ from prefect.settings import (
26
31
  PREFECT_EVENTS_MAXIMUM_RELATED_RESOURCES,
27
32
  )
28
33
 
29
- if HAS_PYDANTIC_V2:
30
- from pydantic.v1 import Field, root_validator, validator
31
- else:
32
- from pydantic import Field, root_validator, validator
33
-
34
34
  from .labelling import Labelled
35
35
 
36
36
  logger = get_logger(__name__)
@@ -128,7 +128,7 @@ class Event(PrefectBaseModel):
128
128
  description="The client-provided identifier of this event",
129
129
  )
130
130
  follows: Optional[UUID] = Field(
131
- None,
131
+ default=None,
132
132
  description=(
133
133
  "The ID of an event that is known to have occurred prior to this one. "
134
134
  "If set, this may be used to establish a more precise ordering of causally-"
@@ -61,7 +61,7 @@ def emit_event(
61
61
  if worker_instance.client_type not in operational_clients:
62
62
  return None
63
63
 
64
- event_kwargs = {
64
+ event_kwargs: Dict[str, Any] = {
65
65
  "event": event,
66
66
  "resource": resource,
67
67
  }
prefect/events/worker.py CHANGED
@@ -1,6 +1,7 @@
1
1
  from contextlib import asynccontextmanager
2
2
  from contextvars import Context, copy_context
3
- from typing import Any, Optional, Tuple, Type
3
+ from typing import Any, Dict, Optional, Tuple, Type
4
+ from uuid import UUID
4
5
 
5
6
  from typing_extensions import Self
6
7
 
@@ -41,11 +42,11 @@ def emit_events_to_cloud() -> bool:
41
42
 
42
43
  def should_emit_events_to_running_server() -> bool:
43
44
  api_url = PREFECT_API_URL.value()
44
- return isinstance(api_url, str) and PREFECT_EXPERIMENTAL_EVENTS
45
+ return isinstance(api_url, str) and PREFECT_EXPERIMENTAL_EVENTS.value()
45
46
 
46
47
 
47
48
  def should_emit_events_to_ephemeral_server() -> bool:
48
- return PREFECT_API_KEY.value() is None and PREFECT_EXPERIMENTAL_EVENTS
49
+ return PREFECT_API_KEY.value() is None and PREFECT_EXPERIMENTAL_EVENTS.value()
49
50
 
50
51
 
51
52
  class EventsWorker(QueueService[Event]):
@@ -56,6 +57,7 @@ class EventsWorker(QueueService[Event]):
56
57
  self.client_type = client_type
57
58
  self.client_options = client_options
58
59
  self._client: EventsClient
60
+ self._context_cache: Dict[UUID, Context] = {}
59
61
 
60
62
  @asynccontextmanager
61
63
  async def _lifespan(self):
@@ -64,11 +66,12 @@ class EventsWorker(QueueService[Event]):
64
66
  async with self._client:
65
67
  yield
66
68
 
67
- def _prepare_item(self, event: Event) -> Tuple[Event, Context]:
68
- return (event, copy_context())
69
+ def _prepare_item(self, event: Event) -> Event:
70
+ self._context_cache[event.id] = copy_context()
71
+ return event
69
72
 
70
- async def _handle(self, event_and_context: Tuple[Event, Context]):
71
- event, context = event_and_context
73
+ async def _handle(self, event: Event):
74
+ context = self._context_cache.pop(event.id)
72
75
  with temporary_context(context=context):
73
76
  await self.attach_related_resources_from_context(event)
74
77
 
prefect/flows.py CHANGED
@@ -34,20 +34,10 @@ from typing import (
34
34
  )
35
35
  from uuid import UUID
36
36
 
37
- from prefect._vendor.fastapi.encoders import jsonable_encoder
38
- from typing_extensions import Self
37
+ from rich.console import Console
38
+ from typing_extensions import Literal, ParamSpec, Self
39
39
 
40
- from prefect._internal.compatibility.deprecated import deprecated_parameter
41
- from prefect._internal.concurrency.api import create_call, from_async
42
40
  from prefect._internal.pydantic import HAS_PYDANTIC_V2
43
- from prefect.client.orchestration import get_client
44
- from prefect.deployments.runner import DeploymentImage, EntrypointType, deploy
45
- from prefect.filesystems import ReadableDeploymentStorage
46
- from prefect.runner.storage import (
47
- BlockStorageAdapter,
48
- RunnerStorage,
49
- create_storage_from_url,
50
- )
51
41
 
52
42
  if HAS_PYDANTIC_V2:
53
43
  import pydantic.v1 as pydantic
@@ -67,26 +57,36 @@ else:
67
57
 
68
58
  V2ValidationError = None
69
59
 
70
- from rich.console import Console
71
- from typing_extensions import Literal, ParamSpec
60
+ from prefect._vendor.fastapi.encoders import jsonable_encoder
72
61
 
62
+ from prefect._internal.compatibility.deprecated import deprecated_parameter
63
+ from prefect._internal.concurrency.api import create_call, from_async
73
64
  from prefect._internal.schemas.validators import raise_on_name_with_banned_characters
65
+ from prefect.client.orchestration import get_client
74
66
  from prefect.client.schemas.objects import Flow as FlowSchema
75
67
  from prefect.client.schemas.objects import FlowRun, MinimalDeploymentSchedule
76
68
  from prefect.client.schemas.schedules import SCHEDULE_TYPES
77
69
  from prefect.context import PrefectObjectRegistry, registry_from_script
78
- from prefect.events import DeploymentTriggerTypes
70
+ from prefect.deployments.runner import DeploymentImage, EntrypointType, deploy
71
+ from prefect.events import DeploymentTriggerTypes, TriggerTypes
79
72
  from prefect.exceptions import (
80
73
  MissingFlowError,
81
74
  ObjectNotFound,
82
75
  ParameterTypeError,
83
76
  UnspecifiedFlowError,
84
77
  )
78
+ from prefect.filesystems import ReadableDeploymentStorage
85
79
  from prefect.futures import PrefectFuture
86
80
  from prefect.logging import get_logger
87
81
  from prefect.results import ResultSerializer, ResultStorage
82
+ from prefect.runner.storage import (
83
+ BlockStorageAdapter,
84
+ RunnerStorage,
85
+ create_storage_from_url,
86
+ )
88
87
  from prefect.settings import (
89
88
  PREFECT_DEFAULT_WORK_POOL_NAME,
89
+ PREFECT_EXPERIMENTAL_ENABLE_NEW_ENGINE,
90
90
  PREFECT_FLOW_DEFAULT_RETRIES,
91
91
  PREFECT_FLOW_DEFAULT_RETRY_DELAY_SECONDS,
92
92
  PREFECT_UI_URL,
@@ -618,7 +618,7 @@ class Flow(Generic[P, R]):
618
618
  schedule: Optional[SCHEDULE_TYPES] = None,
619
619
  is_schedule_active: Optional[bool] = None,
620
620
  parameters: Optional[dict] = None,
621
- triggers: Optional[List[DeploymentTriggerTypes]] = None,
621
+ triggers: Optional[List[Union[DeploymentTriggerTypes, TriggerTypes]]] = None,
622
622
  description: Optional[str] = None,
623
623
  tags: Optional[List[str]] = None,
624
624
  version: Optional[str] = None,
@@ -748,7 +748,7 @@ class Flow(Generic[P, R]):
748
748
  schedules: Optional[List["FlexibleScheduleList"]] = None,
749
749
  schedule: Optional[SCHEDULE_TYPES] = None,
750
750
  is_schedule_active: Optional[bool] = None,
751
- triggers: Optional[List[DeploymentTriggerTypes]] = None,
751
+ triggers: Optional[List[Union[DeploymentTriggerTypes, TriggerTypes]]] = None,
752
752
  parameters: Optional[dict] = None,
753
753
  description: Optional[str] = None,
754
754
  tags: Optional[List[str]] = None,
@@ -962,7 +962,7 @@ class Flow(Generic[P, R]):
962
962
  schedules: Optional[List[MinimalDeploymentSchedule]] = None,
963
963
  schedule: Optional[SCHEDULE_TYPES] = None,
964
964
  is_schedule_active: Optional[bool] = None,
965
- triggers: Optional[List[DeploymentTriggerTypes]] = None,
965
+ triggers: Optional[List[Union[DeploymentTriggerTypes, TriggerTypes]]] = None,
966
966
  parameters: Optional[dict] = None,
967
967
  description: Optional[str] = None,
968
968
  tags: Optional[List[str]] = None,
@@ -1225,6 +1225,21 @@ class Flow(Generic[P, R]):
1225
1225
  # we can add support for exploring subflows for tasks in the future.
1226
1226
  return track_viz_task(self.isasync, self.name, parameters)
1227
1227
 
1228
+ if PREFECT_EXPERIMENTAL_ENABLE_NEW_ENGINE.value():
1229
+ from prefect.new_flow_engine import run_flow
1230
+ from prefect.utilities.asyncutils import run_sync
1231
+
1232
+ awaitable = run_flow(
1233
+ flow=self,
1234
+ parameters=parameters,
1235
+ wait_for=wait_for,
1236
+ return_type=return_type,
1237
+ )
1238
+ if self.isasync:
1239
+ return awaitable
1240
+ else:
1241
+ return run_sync(awaitable)
1242
+
1228
1243
  return enter_flow_run_engine_from_flow_call(
1229
1244
  self,
1230
1245
  parameters,
prefect/input/actions.py CHANGED
@@ -5,7 +5,7 @@ import orjson
5
5
  import pydantic
6
6
 
7
7
  from prefect._internal.pydantic import HAS_PYDANTIC_V2
8
- from prefect.client.utilities import inject_client
8
+ from prefect.client.utilities import client_injector
9
9
  from prefect.context import FlowRunContext
10
10
  from prefect.exceptions import PrefectHTTPStatusError
11
11
  from prefect.utilities.asyncutils import sync_compatible
@@ -52,13 +52,13 @@ async def create_flow_run_input_from_model(
52
52
 
53
53
 
54
54
  @sync_compatible
55
- @inject_client
55
+ @client_injector
56
56
  async def create_flow_run_input(
57
+ client: "PrefectClient",
57
58
  key: str,
58
59
  value: Any,
59
60
  flow_run_id: Optional[UUID] = None,
60
61
  sender: Optional[str] = None,
61
- client: "PrefectClient" = None,
62
62
  ):
63
63
  """
64
64
  Create a new flow run input. The given `value` will be serialized to JSON
@@ -81,13 +81,13 @@ async def create_flow_run_input(
81
81
 
82
82
 
83
83
  @sync_compatible
84
- @inject_client
84
+ @client_injector
85
85
  async def filter_flow_run_input(
86
+ client: "PrefectClient",
86
87
  key_prefix: str,
87
88
  limit: int = 1,
88
89
  exclude_keys: Optional[Set[str]] = None,
89
90
  flow_run_id: Optional[UUID] = None,
90
- client: "PrefectClient" = None,
91
91
  ):
92
92
  if exclude_keys is None:
93
93
  exclude_keys = set()
@@ -103,9 +103,9 @@ async def filter_flow_run_input(
103
103
 
104
104
 
105
105
  @sync_compatible
106
- @inject_client
106
+ @client_injector
107
107
  async def read_flow_run_input(
108
- key: str, flow_run_id: Optional[UUID] = None, client: "PrefectClient" = None
108
+ client: "PrefectClient", key: str, flow_run_id: Optional[UUID] = None
109
109
  ) -> Any:
110
110
  """Read a flow run input.
111
111
 
@@ -126,9 +126,9 @@ async def read_flow_run_input(
126
126
 
127
127
 
128
128
  @sync_compatible
129
- @inject_client
129
+ @client_injector
130
130
  async def delete_flow_run_input(
131
- key: str, flow_run_id: Optional[UUID] = None, client: "PrefectClient" = None
131
+ client: "PrefectClient", key: str, flow_run_id: Optional[UUID] = None
132
132
  ):
133
133
  """Delete a flow run input.
134
134