prefect-client 2.17.1__py3-none-any.whl → 2.18.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (71) hide show
  1. prefect/_internal/compatibility/deprecated.py +2 -0
  2. prefect/_internal/pydantic/_compat.py +1 -0
  3. prefect/_internal/pydantic/utilities/field_validator.py +25 -10
  4. prefect/_internal/pydantic/utilities/model_dump.py +1 -1
  5. prefect/_internal/pydantic/utilities/model_validate.py +1 -1
  6. prefect/_internal/pydantic/utilities/model_validator.py +11 -3
  7. prefect/_internal/schemas/fields.py +31 -12
  8. prefect/_internal/schemas/validators.py +0 -6
  9. prefect/_version.py +97 -38
  10. prefect/blocks/abstract.py +34 -1
  11. prefect/blocks/core.py +1 -1
  12. prefect/blocks/notifications.py +16 -7
  13. prefect/blocks/system.py +2 -3
  14. prefect/client/base.py +10 -5
  15. prefect/client/orchestration.py +405 -85
  16. prefect/client/schemas/actions.py +4 -3
  17. prefect/client/schemas/objects.py +6 -5
  18. prefect/client/schemas/schedules.py +2 -6
  19. prefect/client/schemas/sorting.py +9 -0
  20. prefect/client/utilities.py +25 -3
  21. prefect/concurrency/asyncio.py +11 -5
  22. prefect/concurrency/events.py +3 -3
  23. prefect/concurrency/services.py +1 -1
  24. prefect/concurrency/sync.py +9 -5
  25. prefect/deployments/__init__.py +0 -2
  26. prefect/deployments/base.py +2 -144
  27. prefect/deployments/deployments.py +29 -20
  28. prefect/deployments/runner.py +36 -28
  29. prefect/deployments/steps/core.py +3 -3
  30. prefect/deprecated/packaging/serializers.py +5 -4
  31. prefect/engine.py +3 -1
  32. prefect/events/__init__.py +45 -0
  33. prefect/events/actions.py +250 -18
  34. prefect/events/cli/automations.py +201 -0
  35. prefect/events/clients.py +179 -21
  36. prefect/events/filters.py +30 -3
  37. prefect/events/instrument.py +40 -40
  38. prefect/events/related.py +2 -1
  39. prefect/events/schemas/automations.py +126 -8
  40. prefect/events/schemas/deployment_triggers.py +23 -277
  41. prefect/events/schemas/events.py +7 -7
  42. prefect/events/utilities.py +3 -1
  43. prefect/events/worker.py +21 -8
  44. prefect/exceptions.py +1 -1
  45. prefect/flows.py +33 -18
  46. prefect/input/actions.py +9 -9
  47. prefect/input/run_input.py +49 -37
  48. prefect/logging/__init__.py +2 -2
  49. prefect/logging/loggers.py +64 -1
  50. prefect/new_flow_engine.py +293 -0
  51. prefect/new_task_engine.py +374 -0
  52. prefect/results.py +32 -12
  53. prefect/runner/runner.py +3 -2
  54. prefect/serializers.py +62 -31
  55. prefect/server/api/collections_data/views/aggregate-worker-metadata.json +44 -3
  56. prefect/settings.py +32 -10
  57. prefect/states.py +25 -19
  58. prefect/tasks.py +17 -0
  59. prefect/types/__init__.py +90 -0
  60. prefect/utilities/asyncutils.py +37 -0
  61. prefect/utilities/engine.py +6 -4
  62. prefect/utilities/pydantic.py +34 -15
  63. prefect/utilities/schema_tools/hydration.py +88 -19
  64. prefect/utilities/schema_tools/validation.py +1 -1
  65. prefect/variables.py +4 -4
  66. {prefect_client-2.17.1.dist-info → prefect_client-2.18.1.dist-info}/METADATA +1 -1
  67. {prefect_client-2.17.1.dist-info → prefect_client-2.18.1.dist-info}/RECORD +71 -67
  68. /prefect/{concurrency/common.py → events/cli/__init__.py} +0 -0
  69. {prefect_client-2.17.1.dist-info → prefect_client-2.18.1.dist-info}/LICENSE +0 -0
  70. {prefect_client-2.17.1.dist-info → prefect_client-2.18.1.dist-info}/WHEEL +0 -0
  71. {prefect_client-2.17.1.dist-info → prefect_client-2.18.1.dist-info}/top_level.txt +0 -0
prefect/results.py CHANGED
@@ -16,17 +16,10 @@ from typing import (
16
16
  )
17
17
  from uuid import UUID
18
18
 
19
- from typing_extensions import Self
19
+ from typing_extensions import ParamSpec, Self
20
20
 
21
21
  import prefect
22
22
  from prefect._internal.pydantic import HAS_PYDANTIC_V2
23
-
24
- if HAS_PYDANTIC_V2:
25
- import pydantic.v1 as pydantic
26
-
27
- else:
28
- import pydantic
29
-
30
23
  from prefect.blocks.core import Block
31
24
  from prefect.client.utilities import inject_client
32
25
  from prefect.exceptions import MissingResult
@@ -46,7 +39,14 @@ from prefect.settings import (
46
39
  )
47
40
  from prefect.utilities.annotations import NotSet
48
41
  from prefect.utilities.asyncutils import sync_compatible
49
- from prefect.utilities.pydantic import add_type_dispatch
42
+ from prefect.utilities.pydantic import get_dispatch_key, lookup_type, register_base_type
43
+
44
+ if HAS_PYDANTIC_V2:
45
+ import pydantic.v1 as pydantic
46
+
47
+ else:
48
+ import pydantic
49
+
50
50
 
51
51
  if TYPE_CHECKING:
52
52
  from prefect import Flow, Task
@@ -63,6 +63,7 @@ def DEFAULT_STORAGE_KEY_FN():
63
63
 
64
64
 
65
65
  logger = get_logger("results")
66
+ P = ParamSpec("P")
66
67
  R = TypeVar("R")
67
68
 
68
69
 
@@ -286,7 +287,7 @@ class ResultFactory(pydantic.BaseModel):
286
287
  @classmethod
287
288
  @inject_client
288
289
  async def from_autonomous_task(
289
- cls: Type[Self], task: "Task", client: "PrefectClient" = None
290
+ cls: Type[Self], task: "Task[P, R]", client: "PrefectClient" = None
290
291
  ) -> Self:
291
292
  """
292
293
  Create a new result factory for an autonomous task.
@@ -480,12 +481,27 @@ class ResultFactory(pydantic.BaseModel):
480
481
  return self.serializer.loads(blob.data)
481
482
 
482
483
 
483
- @add_type_dispatch
484
+ @register_base_type
484
485
  class BaseResult(pydantic.BaseModel, abc.ABC, Generic[R]):
485
486
  type: str
486
487
  artifact_type: Optional[str]
487
488
  artifact_description: Optional[str]
488
489
 
490
+ def __init__(self, **data: Any) -> None:
491
+ type_string = get_dispatch_key(self) if type(self) != BaseResult else "__base__"
492
+ data.setdefault("type", type_string)
493
+ super().__init__(**data)
494
+
495
+ def __new__(cls: Type[Self], **kwargs) -> Self:
496
+ if "type" in kwargs:
497
+ try:
498
+ subcls = lookup_type(cls, dispatch_key=kwargs["type"])
499
+ except KeyError as exc:
500
+ raise pydantic.ValidationError(errors=[exc], model=cls)
501
+ return super().__new__(subcls)
502
+ else:
503
+ return super().__new__(cls)
504
+
489
505
  _cache: Any = pydantic.PrivateAttr(NotSet)
490
506
 
491
507
  def _cache_object(self, obj: Any) -> None:
@@ -511,6 +527,10 @@ class BaseResult(pydantic.BaseModel, abc.ABC, Generic[R]):
511
527
  class Config:
512
528
  extra = "forbid"
513
529
 
530
+ @classmethod
531
+ def __dispatch_key__(cls, **kwargs):
532
+ return cls.__fields__.get("type").get_default()
533
+
514
534
 
515
535
  class UnpersistedResult(BaseResult):
516
536
  """
@@ -713,7 +733,7 @@ class PersistedResultBlob(pydantic.BaseModel):
713
733
 
714
734
  class UnknownResult(BaseResult):
715
735
  """
716
- Result type for unknown results. Typipcally used to represent the result
736
+ Result type for unknown results. Typically used to represent the result
717
737
  of tasks that were forced from a failure state into a completed state.
718
738
 
719
739
  The value for this result is always None and is not persisted to external
prefect/runner/runner.py CHANGED
@@ -29,6 +29,7 @@ Example:
29
29
  ```
30
30
 
31
31
  """
32
+
32
33
  import asyncio
33
34
  import datetime
34
35
  import inspect
@@ -80,7 +81,7 @@ from prefect.deployments.runner import (
80
81
  )
81
82
  from prefect.deployments.schedules import FlexibleScheduleList
82
83
  from prefect.engine import propose_state
83
- from prefect.events import DeploymentTriggerTypes
84
+ from prefect.events import DeploymentTriggerTypes, TriggerTypes
84
85
  from prefect.exceptions import (
85
86
  Abort,
86
87
  )
@@ -232,7 +233,7 @@ class Runner:
232
233
  schedule: Optional[SCHEDULE_TYPES] = None,
233
234
  is_schedule_active: Optional[bool] = None,
234
235
  parameters: Optional[dict] = None,
235
- triggers: Optional[List[DeploymentTriggerTypes]] = None,
236
+ triggers: Optional[List[Union[DeploymentTriggerTypes, TriggerTypes]]] = None,
236
237
  description: Optional[str] = None,
237
238
  tags: Optional[List[str]] = None,
238
239
  version: Optional[str] = None,
prefect/serializers.py CHANGED
@@ -13,9 +13,10 @@ bytes to an object respectively.
13
13
 
14
14
  import abc
15
15
  import base64
16
- from typing import Any, Dict, Generic, Optional, TypeVar
16
+ from typing import Any, Dict, Generic, Optional, Type, TypeVar
17
+
18
+ from typing_extensions import Literal, Self
17
19
 
18
- from prefect._internal.pydantic import HAS_PYDANTIC_V2
19
20
  from prefect._internal.schemas.validators import (
20
21
  cast_type_names_to_serializers,
21
22
  validate_compressionlib,
@@ -24,20 +25,29 @@ from prefect._internal.schemas.validators import (
24
25
  validate_picklelib,
25
26
  validate_picklelib_version,
26
27
  )
28
+ from prefect.pydantic import HAS_PYDANTIC_V2
29
+ from prefect.utilities.dispatch import get_dispatch_key, lookup_type, register_base_type
30
+ from prefect.utilities.importtools import from_qualified_name, to_qualified_name
31
+ from prefect.utilities.pydantic import custom_pydantic_encoder
27
32
 
28
33
  if HAS_PYDANTIC_V2:
29
- import pydantic.v1 as pydantic
30
- from pydantic.v1 import BaseModel
31
- from pydantic.v1.json import pydantic_encoder
34
+ from pydantic.v1 import (
35
+ BaseModel,
36
+ Field,
37
+ ValidationError,
38
+ parse_obj_as,
39
+ root_validator,
40
+ validator,
41
+ )
32
42
  else:
33
- import pydantic
34
- from pydantic import BaseModel
35
- from pydantic.json import pydantic_encoder
36
-
37
- from typing_extensions import Literal
38
-
39
- from prefect.utilities.importtools import from_qualified_name, to_qualified_name
40
- from prefect.utilities.pydantic import add_type_dispatch
43
+ from pydantic import (
44
+ BaseModel,
45
+ Field,
46
+ ValidationError,
47
+ parse_obj_as,
48
+ root_validator,
49
+ validator,
50
+ )
41
51
 
42
52
  D = TypeVar("D")
43
53
 
@@ -53,7 +63,7 @@ def prefect_json_object_encoder(obj: Any) -> Any:
53
63
  else:
54
64
  return {
55
65
  "__class__": to_qualified_name(obj.__class__),
56
- "data": pydantic_encoder(obj),
66
+ "data": custom_pydantic_encoder({}, obj),
57
67
  }
58
68
 
59
69
 
@@ -63,21 +73,35 @@ def prefect_json_object_decoder(result: dict):
63
73
  with `prefect_json_object_encoder`
64
74
  """
65
75
  if "__class__" in result:
66
- return pydantic.parse_obj_as(
67
- from_qualified_name(result["__class__"]), result["data"]
68
- )
76
+ return parse_obj_as(from_qualified_name(result["__class__"]), result["data"])
69
77
  elif "__exc_type__" in result:
70
78
  return from_qualified_name(result["__exc_type__"])(result["message"])
71
79
  else:
72
80
  return result
73
81
 
74
82
 
75
- @add_type_dispatch
83
+ @register_base_type
76
84
  class Serializer(BaseModel, Generic[D], abc.ABC):
77
85
  """
78
86
  A serializer that can encode objects of type 'D' into bytes.
79
87
  """
80
88
 
89
+ def __init__(self, **data: Any) -> None:
90
+ type_string = get_dispatch_key(self) if type(self) != Serializer else "__base__"
91
+ data.setdefault("type", type_string)
92
+ super().__init__(**data)
93
+
94
+ def __new__(cls: Type[Self], **kwargs) -> Self:
95
+ if "type" in kwargs:
96
+ try:
97
+ subcls = lookup_type(cls, dispatch_key=kwargs["type"])
98
+ except KeyError as exc:
99
+ raise ValidationError(errors=[exc], model=cls)
100
+
101
+ return super().__new__(subcls)
102
+ else:
103
+ return super().__new__(cls)
104
+
81
105
  type: str
82
106
 
83
107
  @abc.abstractmethod
@@ -91,6 +115,10 @@ class Serializer(BaseModel, Generic[D], abc.ABC):
91
115
  class Config:
92
116
  extra = "forbid"
93
117
 
118
+ @classmethod
119
+ def __dispatch_key__(cls):
120
+ return cls.__fields__.get("type").get_default()
121
+
94
122
 
95
123
  class PickleSerializer(Serializer):
96
124
  """
@@ -107,11 +135,11 @@ class PickleSerializer(Serializer):
107
135
  picklelib: str = "cloudpickle"
108
136
  picklelib_version: str = None
109
137
 
110
- @pydantic.validator("picklelib")
138
+ @validator("picklelib")
111
139
  def check_picklelib(cls, value):
112
140
  return validate_picklelib(value)
113
141
 
114
- @pydantic.root_validator
142
+ @root_validator
115
143
  def check_picklelib_version(cls, values):
116
144
  return validate_picklelib_version(values)
117
145
 
@@ -135,16 +163,17 @@ class JSONSerializer(Serializer):
135
163
  """
136
164
 
137
165
  type: Literal["json"] = "json"
166
+
138
167
  jsonlib: str = "json"
139
- object_encoder: Optional[str] = pydantic.Field(
168
+ object_encoder: Optional[str] = Field(
140
169
  default="prefect.serializers.prefect_json_object_encoder",
141
170
  description=(
142
171
  "An optional callable to use when serializing objects that are not "
143
172
  "supported by the JSON encoder. By default, this is set to a callable that "
144
- "adds support for all types supported by Pydantic."
173
+ "adds support for all types supported by "
145
174
  ),
146
175
  )
147
- object_decoder: Optional[str] = pydantic.Field(
176
+ object_decoder: Optional[str] = Field(
148
177
  default="prefect.serializers.prefect_json_object_decoder",
149
178
  description=(
150
179
  "An optional callable to use when deserializing objects. This callable "
@@ -153,14 +182,14 @@ class JSONSerializer(Serializer):
153
182
  "by our default `object_encoder`."
154
183
  ),
155
184
  )
156
- dumps_kwargs: Dict[str, Any] = pydantic.Field(default_factory=dict)
157
- loads_kwargs: Dict[str, Any] = pydantic.Field(default_factory=dict)
185
+ dumps_kwargs: Dict[str, Any] = Field(default_factory=dict)
186
+ loads_kwargs: Dict[str, Any] = Field(default_factory=dict)
158
187
 
159
- @pydantic.validator("dumps_kwargs")
188
+ @validator("dumps_kwargs")
160
189
  def dumps_kwargs_cannot_contain_default(cls, value):
161
190
  return validate_dump_kwargs(value)
162
191
 
163
- @pydantic.validator("loads_kwargs")
192
+ @validator("loads_kwargs")
164
193
  def loads_kwargs_cannot_contain_object_hook(cls, value):
165
194
  return validate_load_kwargs(value)
166
195
 
@@ -200,11 +229,11 @@ class CompressedSerializer(Serializer):
200
229
  serializer: Serializer
201
230
  compressionlib: str = "lzma"
202
231
 
203
- @pydantic.validator("serializer", pre=True)
232
+ @validator("serializer", pre=True)
204
233
  def validate_serializer(cls, value):
205
234
  return cast_type_names_to_serializers(value)
206
235
 
207
- @pydantic.validator("compressionlib")
236
+ @validator("compressionlib")
208
237
  def check_compressionlib(cls, value):
209
238
  return validate_compressionlib(value)
210
239
 
@@ -225,7 +254,8 @@ class CompressedPickleSerializer(CompressedSerializer):
225
254
  """
226
255
 
227
256
  type: Literal["compressed/pickle"] = "compressed/pickle"
228
- serializer: Serializer = pydantic.Field(default_factory=PickleSerializer)
257
+
258
+ serializer: Serializer = Field(default_factory=PickleSerializer)
229
259
 
230
260
 
231
261
  class CompressedJSONSerializer(CompressedSerializer):
@@ -234,4 +264,5 @@ class CompressedJSONSerializer(CompressedSerializer):
234
264
  """
235
265
 
236
266
  type: Literal["compressed/json"] = "compressed/json"
237
- serializer: Serializer = pydantic.Field(default_factory=JSONSerializer)
267
+
268
+ serializer: Serializer = Field(default_factory=JSONSerializer)
@@ -111,10 +111,12 @@
111
111
  "taskRoleArn": "{{ task_role_arn }}"
112
112
  },
113
113
  "tags": "{{ labels }}",
114
- "taskDefinition": "{{ task_definition_arn }}"
114
+ "taskDefinition": "{{ task_definition_arn }}",
115
+ "capacityProviderStrategy": "{{ capacity_provider_strategy }}"
115
116
  },
116
117
  "configure_cloudwatch_logs": "{{ configure_cloudwatch_logs }}",
117
118
  "cloudwatch_logs_options": "{{ cloudwatch_logs_options }}",
119
+ "cloudwatch_logs_prefix": "{{ cloudwatch_logs_prefix }}",
118
120
  "network_configuration": "{{ network_configuration }}",
119
121
  "stream_output": "{{ stream_output }}",
120
122
  "task_start_timeout_seconds": "{{ task_start_timeout_seconds }}",
@@ -191,6 +193,14 @@
191
193
  ],
192
194
  "type": "string"
193
195
  },
196
+ "capacity_provider_strategy": {
197
+ "title": "Capacity Provider Strategy",
198
+ "description": "The capacity provider strategy to use when running the task. If a capacity provider strategy is specified, the selected launch type will be ignored.",
199
+ "type": "array",
200
+ "items": {
201
+ "$ref": "#/definitions/CapacityProvider"
202
+ }
203
+ },
194
204
  "image": {
195
205
  "title": "Image",
196
206
  "description": "The image to use for the Prefect container in the task. If this value is not null, it will override the value in the task definition. This value defaults to a Prefect base image matching your local versions.",
@@ -239,6 +249,11 @@
239
249
  "type": "string"
240
250
  }
241
251
  },
252
+ "cloudwatch_logs_prefix": {
253
+ "title": "Cloudwatch Logs Prefix",
254
+ "description": "When `configure_cloudwatch_logs` is enabled, this setting may be used to set a prefix for the log group. If not provided, the default prefix will be `prefect-logs_<work_pool_name>_<deployment_id>`. If `awslogs-stream-prefix` is present in `Cloudwatch logs options` this setting will be ignored.",
255
+ "type": "string"
256
+ },
242
257
  "network_configuration": {
243
258
  "title": "Network Configuration",
244
259
  "description": "When `network_configuration` is supplied it will override ECS Worker'sawsvpcConfiguration that defined in the ECS task executing your workload. See the [AWS documentation](https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ecs-service-awsvpcconfiguration.html) for available options.",
@@ -370,6 +385,30 @@
370
385
  "aws_secret_access_key"
371
386
  ],
372
387
  "block_schema_references": {}
388
+ },
389
+ "CapacityProvider": {
390
+ "title": "CapacityProvider",
391
+ "description": "The capacity provider strategy to use when running the task.",
392
+ "type": "object",
393
+ "properties": {
394
+ "capacityProvider": {
395
+ "title": "Capacityprovider",
396
+ "type": "string"
397
+ },
398
+ "weight": {
399
+ "title": "Weight",
400
+ "type": "integer"
401
+ },
402
+ "base": {
403
+ "title": "Base",
404
+ "type": "integer"
405
+ }
406
+ },
407
+ "required": [
408
+ "capacityProvider",
409
+ "weight",
410
+ "base"
411
+ ]
373
412
  }
374
413
  }
375
414
  }
@@ -1100,7 +1139,9 @@
1100
1139
  "serviceAccount": "{{ service_account_name }}",
1101
1140
  "maxRetries": "{{ max_retries }}",
1102
1141
  "timeout": "{{ timeout }}",
1103
- "vpcAccess": "{{ vpc_connector_name }}",
1142
+ "vpcAccess": {
1143
+ "connector": "{{ vpc_connector_name }}"
1144
+ },
1104
1145
  "containers": [
1105
1146
  {
1106
1147
  "env": [],
@@ -1647,4 +1688,4 @@
1647
1688
  "type": "kubernetes"
1648
1689
  }
1649
1690
  }
1650
- }
1691
+ }
prefect/settings.py CHANGED
@@ -109,6 +109,8 @@ REMOVED_EXPERIMENTAL_FLAGS = {
109
109
  "PREFECT_EXPERIMENTAL_ENABLE_ENHANCED_DEPLOYMENT_PARAMETERS",
110
110
  "PREFECT_EXPERIMENTAL_ENABLE_EVENTS_CLIENT",
111
111
  "PREFECT_EXPERIMENTAL_WARN_EVENTS_CLIENT",
112
+ "PREFECT_EXPERIMENTAL_ENABLE_FLOW_RUN_INFRA_OVERRIDES",
113
+ "PREFECT_EXPERIMENTAL_WARN_FLOW_RUN_INFRA_OVERRIDES",
112
114
  }
113
115
 
114
116
 
@@ -1212,6 +1214,20 @@ PREFECT_API_SERVICES_CANCELLATION_CLEANUP_LOOP_SECONDS = Setting(
1212
1214
  this often. Defaults to `20`.
1213
1215
  """
1214
1216
 
1217
+ PREFECT_API_SERVICES_FOREMAN_ENABLED = Setting(bool, default=True)
1218
+ """Whether or not to start the Foreman service in the server application."""
1219
+
1220
+ PREFECT_API_SERVICES_FOREMAN_LOOP_SECONDS = Setting(float, default=15)
1221
+ """The number of seconds to wait between each iteration of the Foreman loop which checks
1222
+ for offline workers and updates work pool status."""
1223
+
1224
+ PREFECT_API_SERVICES_FOREMAN_DEPLOYMENT_LAST_POLLED_TIMEOUT_SECONDS = Setting(
1225
+ int, default=60
1226
+ )
1227
+ """The number of seconds before a deployment is marked as not ready if it has not been
1228
+ polled."""
1229
+
1230
+
1215
1231
  PREFECT_API_DEFAULT_LIMIT = Setting(
1216
1232
  int,
1217
1233
  default=200,
@@ -1542,16 +1558,6 @@ a task server should move a task from PENDING to RUNNING very quickly, so runs s
1542
1558
  PENDING for a while is a sign that the task server may have crashed.
1543
1559
  """
1544
1560
 
1545
- PREFECT_EXPERIMENTAL_ENABLE_FLOW_RUN_INFRA_OVERRIDES = Setting(bool, default=False)
1546
- """
1547
- Whether or not to enable infrastructure overrides made on flow runs.
1548
- """
1549
-
1550
- PREFECT_EXPERIMENTAL_WARN_FLOW_RUN_INFRA_OVERRIDES = Setting(bool, default=True)
1551
- """
1552
- Whether or not to warn infrastructure when experimental flow runs overrides are used.
1553
- """
1554
-
1555
1561
  PREFECT_EXPERIMENTAL_ENABLE_EXTRA_RUNNER_ENDPOINTS = Setting(bool, default=False)
1556
1562
  """
1557
1563
  Whether or not to enable experimental worker webserver endpoints.
@@ -1587,6 +1593,11 @@ PREFECT_EXPERIMENTAL_ENABLE_WORK_QUEUE_STATUS = Setting(bool, default=True)
1587
1593
  Whether or not to enable experimental work queue status in-place of work queue health.
1588
1594
  """
1589
1595
 
1596
+ PREFECT_EXPERIMENTAL_ENABLE_NEW_ENGINE = Setting(bool, default=False)
1597
+ """
1598
+ Whether or not to enable experimental new engine.
1599
+ """
1600
+
1590
1601
 
1591
1602
  # Defaults -----------------------------------------------------------------------------
1592
1603
 
@@ -1701,6 +1712,17 @@ PREFECT_API_SERVICES_EVENT_PERSISTER_FLUSH_INTERVAL = Setting(float, default=5,
1701
1712
  The maximum number of seconds between flushes of the event persister.
1702
1713
  """
1703
1714
 
1715
+ PREFECT_API_EVENTS_STREAM_OUT_ENABLED = Setting(bool, default=True)
1716
+ """
1717
+ Whether or not to allow streaming events out of via websockets.
1718
+ """
1719
+
1720
+ PREFECT_API_EVENTS_RELATED_RESOURCE_CACHE_TTL = Setting(
1721
+ timedelta, default=timedelta(minutes=5)
1722
+ )
1723
+ """
1724
+ How long to cache related resource data for emitting server-side vents
1725
+ """
1704
1726
 
1705
1727
  # Deprecated settings ------------------------------------------------------------------
1706
1728
 
prefect/states.py CHANGED
@@ -486,8 +486,10 @@ class StateGroup:
486
486
 
487
487
 
488
488
  def Scheduled(
489
- cls: Type[State] = State, scheduled_time: datetime.datetime = None, **kwargs
490
- ) -> State:
489
+ cls: Type[State[R]] = State,
490
+ scheduled_time: Optional[datetime.datetime] = None,
491
+ **kwargs: Any,
492
+ ) -> State[R]:
491
493
  """Convenience function for creating `Scheduled` states.
492
494
 
493
495
  Returns:
@@ -503,7 +505,7 @@ def Scheduled(
503
505
  return cls(type=StateType.SCHEDULED, state_details=state_details, **kwargs)
504
506
 
505
507
 
506
- def Completed(cls: Type[State] = State, **kwargs) -> State:
508
+ def Completed(cls: Type[State[R]] = State, **kwargs: Any) -> State[R]:
507
509
  """Convenience function for creating `Completed` states.
508
510
 
509
511
  Returns:
@@ -512,7 +514,7 @@ def Completed(cls: Type[State] = State, **kwargs) -> State:
512
514
  return cls(type=StateType.COMPLETED, **kwargs)
513
515
 
514
516
 
515
- def Running(cls: Type[State] = State, **kwargs) -> State:
517
+ def Running(cls: Type[State[R]] = State, **kwargs: Any) -> State[R]:
516
518
  """Convenience function for creating `Running` states.
517
519
 
518
520
  Returns:
@@ -521,7 +523,7 @@ def Running(cls: Type[State] = State, **kwargs) -> State:
521
523
  return cls(type=StateType.RUNNING, **kwargs)
522
524
 
523
525
 
524
- def Failed(cls: Type[State] = State, **kwargs) -> State:
526
+ def Failed(cls: Type[State[R]] = State, **kwargs: Any) -> State[R]:
525
527
  """Convenience function for creating `Failed` states.
526
528
 
527
529
  Returns:
@@ -530,7 +532,7 @@ def Failed(cls: Type[State] = State, **kwargs) -> State:
530
532
  return cls(type=StateType.FAILED, **kwargs)
531
533
 
532
534
 
533
- def Crashed(cls: Type[State] = State, **kwargs) -> State:
535
+ def Crashed(cls: Type[State[R]] = State, **kwargs: Any) -> State[R]:
534
536
  """Convenience function for creating `Crashed` states.
535
537
 
536
538
  Returns:
@@ -539,7 +541,7 @@ def Crashed(cls: Type[State] = State, **kwargs) -> State:
539
541
  return cls(type=StateType.CRASHED, **kwargs)
540
542
 
541
543
 
542
- def Cancelling(cls: Type[State] = State, **kwargs) -> State:
544
+ def Cancelling(cls: Type[State[R]] = State, **kwargs: Any) -> State[R]:
543
545
  """Convenience function for creating `Cancelling` states.
544
546
 
545
547
  Returns:
@@ -548,7 +550,7 @@ def Cancelling(cls: Type[State] = State, **kwargs) -> State:
548
550
  return cls(type=StateType.CANCELLING, **kwargs)
549
551
 
550
552
 
551
- def Cancelled(cls: Type[State] = State, **kwargs) -> State:
553
+ def Cancelled(cls: Type[State[R]] = State, **kwargs: Any) -> State[R]:
552
554
  """Convenience function for creating `Cancelled` states.
553
555
 
554
556
  Returns:
@@ -557,7 +559,7 @@ def Cancelled(cls: Type[State] = State, **kwargs) -> State:
557
559
  return cls(type=StateType.CANCELLED, **kwargs)
558
560
 
559
561
 
560
- def Pending(cls: Type[State] = State, **kwargs) -> State:
562
+ def Pending(cls: Type[State[R]] = State, **kwargs: Any) -> State[R]:
561
563
  """Convenience function for creating `Pending` states.
562
564
 
563
565
  Returns:
@@ -567,13 +569,13 @@ def Pending(cls: Type[State] = State, **kwargs) -> State:
567
569
 
568
570
 
569
571
  def Paused(
570
- cls: Type[State] = State,
572
+ cls: Type[State[R]] = State,
571
573
  timeout_seconds: Optional[int] = None,
572
574
  pause_expiration_time: Optional[datetime.datetime] = None,
573
575
  reschedule: bool = False,
574
576
  pause_key: Optional[str] = None,
575
- **kwargs,
576
- ) -> State:
577
+ **kwargs: Any,
578
+ ) -> State[R]:
577
579
  """Convenience function for creating `Paused` states.
578
580
 
579
581
  Returns:
@@ -603,11 +605,11 @@ def Paused(
603
605
 
604
606
 
605
607
  def Suspended(
606
- cls: Type[State] = State,
608
+ cls: Type[State[R]] = State,
607
609
  timeout_seconds: Optional[int] = None,
608
610
  pause_expiration_time: Optional[datetime.datetime] = None,
609
611
  pause_key: Optional[str] = None,
610
- **kwargs,
612
+ **kwargs: Any,
611
613
  ):
612
614
  """Convenience function for creating `Suspended` states.
613
615
 
@@ -626,8 +628,10 @@ def Suspended(
626
628
 
627
629
 
628
630
  def AwaitingRetry(
629
- cls: Type[State] = State, scheduled_time: datetime.datetime = None, **kwargs
630
- ) -> State:
631
+ cls: Type[State[R]] = State,
632
+ scheduled_time: Optional[datetime.datetime] = None,
633
+ **kwargs: Any,
634
+ ) -> State[R]:
631
635
  """Convenience function for creating `AwaitingRetry` states.
632
636
 
633
637
  Returns:
@@ -638,7 +642,7 @@ def AwaitingRetry(
638
642
  )
639
643
 
640
644
 
641
- def Retrying(cls: Type[State] = State, **kwargs) -> State:
645
+ def Retrying(cls: Type[State[R]] = State, **kwargs: Any) -> State[R]:
642
646
  """Convenience function for creating `Retrying` states.
643
647
 
644
648
  Returns:
@@ -648,8 +652,10 @@ def Retrying(cls: Type[State] = State, **kwargs) -> State:
648
652
 
649
653
 
650
654
  def Late(
651
- cls: Type[State] = State, scheduled_time: datetime.datetime = None, **kwargs
652
- ) -> State:
655
+ cls: Type[State[R]] = State,
656
+ scheduled_time: Optional[datetime.datetime] = None,
657
+ **kwargs: Any,
658
+ ) -> State[R]:
653
659
  """Convenience function for creating `Late` states.
654
660
 
655
661
  Returns:
prefect/tasks.py CHANGED
@@ -36,6 +36,7 @@ from prefect.context import FlowRunContext, PrefectObjectRegistry
36
36
  from prefect.futures import PrefectFuture
37
37
  from prefect.results import ResultSerializer, ResultStorage
38
38
  from prefect.settings import (
39
+ PREFECT_EXPERIMENTAL_ENABLE_NEW_ENGINE,
39
40
  PREFECT_EXPERIMENTAL_ENABLE_TASK_SCHEDULING,
40
41
  PREFECT_TASK_DEFAULT_RETRIES,
41
42
  PREFECT_TASK_DEFAULT_RETRY_DELAY_SECONDS,
@@ -582,6 +583,22 @@ class Task(Generic[P, R]):
582
583
  self.isasync, self.name, parameters, self.viz_return_value
583
584
  )
584
585
 
586
+ # new engine currently only compatible with async tasks
587
+ if PREFECT_EXPERIMENTAL_ENABLE_NEW_ENGINE.value():
588
+ from prefect.new_task_engine import run_task
589
+ from prefect.utilities.asyncutils import run_sync
590
+
591
+ awaitable = run_task(
592
+ task=self,
593
+ parameters=parameters,
594
+ wait_for=wait_for,
595
+ return_type=return_type,
596
+ )
597
+ if self.isasync:
598
+ return awaitable
599
+ else:
600
+ return run_sync(awaitable)
601
+
585
602
  if (
586
603
  PREFECT_EXPERIMENTAL_ENABLE_TASK_SCHEDULING.value()
587
604
  and not FlowRunContext.get()