prefect-client 3.4.5.dev4__py3-none-any.whl → 3.4.5.dev5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- prefect/_build_info.py +3 -3
- prefect/assets/__init__.py +4 -0
- prefect/assets/core.py +71 -0
- prefect/assets/materialize.py +42 -0
- prefect/context.py +226 -2
- prefect/deployments/runner.py +2 -2
- prefect/runner/server.py +2 -2
- prefect/runner/submit.py +2 -2
- prefect/server/api/events.py +1 -1
- prefect/server/api/task_workers.py +1 -1
- prefect/task_engine.py +73 -25
- prefect/tasks.py +100 -3
- prefect/types/__init__.py +2 -0
- prefect/types/names.py +23 -0
- prefect/utilities/engine.py +15 -3
- {prefect_client-3.4.5.dev4.dist-info → prefect_client-3.4.5.dev5.dist-info}/METADATA +1 -1
- {prefect_client-3.4.5.dev4.dist-info → prefect_client-3.4.5.dev5.dist-info}/RECORD +19 -16
- {prefect_client-3.4.5.dev4.dist-info → prefect_client-3.4.5.dev5.dist-info}/WHEEL +0 -0
- {prefect_client-3.4.5.dev4.dist-info → prefect_client-3.4.5.dev5.dist-info}/licenses/LICENSE +0 -0
prefect/_build_info.py
CHANGED
@@ -1,5 +1,5 @@
|
|
1
1
|
# Generated by versioningit
|
2
|
-
__version__ = "3.4.5.
|
3
|
-
__build_date__ = "2025-06-
|
4
|
-
__git_commit__ = "
|
2
|
+
__version__ = "3.4.5.dev5"
|
3
|
+
__build_date__ = "2025-06-06 08:09:21.189765+00:00"
|
4
|
+
__git_commit__ = "6fd843adf3aa002e8907b0ba1b96be0af8259b2d"
|
5
5
|
__dirty__ = False
|
prefect/assets/core.py
ADDED
@@ -0,0 +1,71 @@
|
|
1
|
+
from __future__ import annotations
|
2
|
+
|
3
|
+
from typing import Any, ClassVar, Optional
|
4
|
+
|
5
|
+
from pydantic import ConfigDict, Field
|
6
|
+
|
7
|
+
from prefect._internal.schemas.bases import PrefectBaseModel
|
8
|
+
from prefect.types import URILike
|
9
|
+
|
10
|
+
|
11
|
+
class AssetProperties(PrefectBaseModel):
|
12
|
+
"""
|
13
|
+
Metadata properties to configure on an Asset
|
14
|
+
"""
|
15
|
+
|
16
|
+
model_config: ClassVar[ConfigDict] = ConfigDict(frozen=True)
|
17
|
+
|
18
|
+
name: Optional[str] = Field(
|
19
|
+
default=None, description="Human readable name of the Asset."
|
20
|
+
)
|
21
|
+
url: Optional[str] = Field(
|
22
|
+
default=None, description="Visitable url to view the Asset."
|
23
|
+
)
|
24
|
+
description: Optional[str] = Field(
|
25
|
+
default=None, description="Description of the Asset."
|
26
|
+
)
|
27
|
+
owners: Optional[list[str]] = Field(
|
28
|
+
default=None, description="Owners of the Asset."
|
29
|
+
)
|
30
|
+
|
31
|
+
|
32
|
+
class Asset(PrefectBaseModel):
|
33
|
+
"""
|
34
|
+
Assets are objects that represent materialized data,
|
35
|
+
providing a way to track lineage and dependencies.
|
36
|
+
"""
|
37
|
+
|
38
|
+
model_config: ClassVar[ConfigDict] = ConfigDict(frozen=True)
|
39
|
+
|
40
|
+
key: URILike
|
41
|
+
properties: Optional[AssetProperties] = Field(
|
42
|
+
default=None,
|
43
|
+
description="Properties of the asset. "
|
44
|
+
"Setting this will overwrite properties of a known asset.",
|
45
|
+
)
|
46
|
+
|
47
|
+
def __repr__(self) -> str:
|
48
|
+
return f"Asset(key={self.key!r})"
|
49
|
+
|
50
|
+
def add_metadata(self, metadata: dict[str, Any]) -> None:
|
51
|
+
from prefect.context import AssetContext
|
52
|
+
|
53
|
+
asset_ctx = AssetContext.get()
|
54
|
+
if not asset_ctx:
|
55
|
+
raise RuntimeError(
|
56
|
+
"Unable add Asset metadata when not inside of an AssetContext"
|
57
|
+
)
|
58
|
+
|
59
|
+
asset_ctx.add_asset_metadata(self.key, metadata)
|
60
|
+
|
61
|
+
|
62
|
+
def add_asset_metadata(asset_key: str, metadata: dict[str, Any]) -> None:
|
63
|
+
from prefect.context import AssetContext
|
64
|
+
|
65
|
+
asset_ctx = AssetContext.get()
|
66
|
+
if not asset_ctx:
|
67
|
+
raise RuntimeError(
|
68
|
+
"Unable to call `add_asset_metadata` when not inside of an AssetContext"
|
69
|
+
)
|
70
|
+
|
71
|
+
asset_ctx.add_asset_metadata(asset_key, metadata)
|
@@ -0,0 +1,42 @@
|
|
1
|
+
from __future__ import annotations
|
2
|
+
|
3
|
+
from typing import TYPE_CHECKING, Callable, TypeVar, Union
|
4
|
+
|
5
|
+
from typing_extensions import ParamSpec, Unpack
|
6
|
+
|
7
|
+
from .core import Asset
|
8
|
+
|
9
|
+
T = TypeVar("T")
|
10
|
+
P = ParamSpec("P")
|
11
|
+
R = TypeVar("R")
|
12
|
+
|
13
|
+
if TYPE_CHECKING:
|
14
|
+
from prefect.tasks import MaterializingTask, TaskOptions
|
15
|
+
|
16
|
+
|
17
|
+
def materialize(
|
18
|
+
*assets: Union[str, Asset],
|
19
|
+
by: str | None = None,
|
20
|
+
**task_kwargs: Unpack[TaskOptions],
|
21
|
+
) -> Callable[[Callable[P, R]], MaterializingTask[P, R]]:
|
22
|
+
"""
|
23
|
+
Decorator for materializing assets.
|
24
|
+
|
25
|
+
Args:
|
26
|
+
*assets: Assets to materialize
|
27
|
+
by: An optional tool that is ultimately responsible for materializing the asset e.g. "dbt" or "spark"
|
28
|
+
**task_kwargs: Additional task configuration
|
29
|
+
"""
|
30
|
+
if not assets:
|
31
|
+
raise TypeError(
|
32
|
+
"materialize requires at least one asset argument, e.g. `@materialize(asset)`"
|
33
|
+
)
|
34
|
+
|
35
|
+
from prefect.tasks import MaterializingTask
|
36
|
+
|
37
|
+
def decorator(fn: Callable[P, R]) -> MaterializingTask[P, R]:
|
38
|
+
return MaterializingTask(
|
39
|
+
fn=fn, assets=assets, materialized_by=by, **task_kwargs
|
40
|
+
)
|
41
|
+
|
42
|
+
return decorator
|
prefect/context.py
CHANGED
@@ -7,13 +7,23 @@ For more user-accessible information about the current run, see [`prefect.runtim
|
|
7
7
|
"""
|
8
8
|
|
9
9
|
import asyncio
|
10
|
+
import json
|
10
11
|
import os
|
11
12
|
import sys
|
12
13
|
import warnings
|
13
14
|
from collections.abc import AsyncGenerator, Generator, Mapping
|
14
15
|
from contextlib import ExitStack, asynccontextmanager, contextmanager
|
15
16
|
from contextvars import ContextVar, Token
|
16
|
-
from typing import
|
17
|
+
from typing import (
|
18
|
+
TYPE_CHECKING,
|
19
|
+
Any,
|
20
|
+
Callable,
|
21
|
+
ClassVar,
|
22
|
+
Optional,
|
23
|
+
TypeVar,
|
24
|
+
Union,
|
25
|
+
)
|
26
|
+
from uuid import UUID
|
17
27
|
|
18
28
|
from pydantic import BaseModel, ConfigDict, Field, PrivateAttr
|
19
29
|
from typing_extensions import Self
|
@@ -21,6 +31,7 @@ from typing_extensions import Self
|
|
21
31
|
import prefect.settings
|
22
32
|
import prefect.types._datetime
|
23
33
|
from prefect._internal.compatibility.migration import getattr_migration
|
34
|
+
from prefect.assets import Asset
|
24
35
|
from prefect.client.orchestration import PrefectClient, SyncPrefectClient, get_client
|
25
36
|
from prefect.client.schemas import FlowRun, TaskRun
|
26
37
|
from prefect.events.worker import EventsWorker
|
@@ -48,9 +59,15 @@ if TYPE_CHECKING:
|
|
48
59
|
from prefect.tasks import Task
|
49
60
|
|
50
61
|
|
51
|
-
def serialize_context(
|
62
|
+
def serialize_context(
|
63
|
+
asset_ctx_kwargs: Union[dict[str, Any], None] = None,
|
64
|
+
) -> dict[str, Any]:
|
52
65
|
"""
|
53
66
|
Serialize the current context for use in a remote execution environment.
|
67
|
+
|
68
|
+
Optionally provide asset_ctx_kwargs to create new AssetContext, that will be used
|
69
|
+
in the remote execution environment. This is useful for TaskRunners, who rely on creating the
|
70
|
+
task run in the remote environment.
|
54
71
|
"""
|
55
72
|
flow_run_context = EngineContext.get()
|
56
73
|
task_run_context = TaskRunContext.get()
|
@@ -62,6 +79,11 @@ def serialize_context() -> dict[str, Any]:
|
|
62
79
|
"task_run_context": task_run_context.serialize() if task_run_context else {},
|
63
80
|
"tags_context": tags_context.serialize() if tags_context else {},
|
64
81
|
"settings_context": settings_context.serialize() if settings_context else {},
|
82
|
+
"asset_context": AssetContext.from_task_and_inputs(
|
83
|
+
**asset_ctx_kwargs
|
84
|
+
).serialize()
|
85
|
+
if asset_ctx_kwargs
|
86
|
+
else {},
|
65
87
|
}
|
66
88
|
|
67
89
|
|
@@ -112,6 +134,9 @@ def hydrated_context(
|
|
112
134
|
# Set up tags context
|
113
135
|
if tags_context := serialized_context.get("tags_context"):
|
114
136
|
stack.enter_context(tags(*tags_context["current_tags"]))
|
137
|
+
# Set up asset context
|
138
|
+
if asset_context := serialized_context.get("asset_context"):
|
139
|
+
stack.enter_context(AssetContext(**asset_context))
|
115
140
|
yield
|
116
141
|
|
117
142
|
|
@@ -373,6 +398,10 @@ class EngineContext(RunContext):
|
|
373
398
|
# Holds the ID of the object returned by the task run and task run state
|
374
399
|
task_run_results: dict[int, State] = Field(default_factory=dict)
|
375
400
|
|
401
|
+
# Tracking information needed to track asset linage between
|
402
|
+
# tasks and materialization
|
403
|
+
task_run_assets: dict[UUID, list[Asset]] = Field(default_factory=dict)
|
404
|
+
|
376
405
|
# Events worker to emit events
|
377
406
|
events: Optional[EventsWorker] = None
|
378
407
|
|
@@ -443,6 +472,201 @@ class TaskRunContext(RunContext):
|
|
443
472
|
)
|
444
473
|
|
445
474
|
|
475
|
+
class AssetContext(ContextModel):
|
476
|
+
"""
|
477
|
+
The asset context for a materializing task run. Contains all asset-related information needed
|
478
|
+
for asset event emission and downstream asset dependency propagation.
|
479
|
+
|
480
|
+
Attributes:
|
481
|
+
direct_asset_dependencies: Assets that this task directly depends on (from task.asset_deps)
|
482
|
+
downstream_assets: Assets that this task will create/materialize (from MaterializingTask.assets)
|
483
|
+
upstream_assets: Assets from upstream task dependencies
|
484
|
+
materialized_by: Tool that materialized the assets (from MaterializingTask.materialized_by)
|
485
|
+
task_run_id: ID of the associated task run
|
486
|
+
materialization_metadata: Metadata for materialized assets
|
487
|
+
"""
|
488
|
+
|
489
|
+
direct_asset_dependencies: list[Asset] = Field(default_factory=list)
|
490
|
+
downstream_assets: list[Asset] = Field(default_factory=list)
|
491
|
+
upstream_assets: list[Asset] = Field(default_factory=list)
|
492
|
+
materialized_by: Optional[str] = None
|
493
|
+
task_run_id: Optional[UUID] = None
|
494
|
+
materialization_metadata: dict[str, dict[str, Any]] = Field(default_factory=dict)
|
495
|
+
|
496
|
+
__var__: ClassVar[ContextVar[Self]] = ContextVar("asset_context")
|
497
|
+
|
498
|
+
@classmethod
|
499
|
+
def from_task_and_inputs(
|
500
|
+
cls,
|
501
|
+
task: "Task[Any, Any]",
|
502
|
+
task_run_id: UUID,
|
503
|
+
task_inputs: Optional[dict[str, set[Any]]] = None,
|
504
|
+
) -> "AssetContext":
|
505
|
+
"""
|
506
|
+
Create an AssetContext from a task and its resolved inputs.
|
507
|
+
|
508
|
+
Args:
|
509
|
+
task: The task instance
|
510
|
+
task_run_id: The task run ID
|
511
|
+
task_inputs: The resolved task inputs (TaskRunResult objects)
|
512
|
+
|
513
|
+
Returns:
|
514
|
+
Configured AssetContext
|
515
|
+
"""
|
516
|
+
from prefect.client.schemas import TaskRunResult
|
517
|
+
from prefect.tasks import MaterializingTask
|
518
|
+
|
519
|
+
upstream_assets: list[Asset] = []
|
520
|
+
|
521
|
+
# Get upstream assets from engine context instead of TaskRunResult.assets
|
522
|
+
flow_ctx = FlowRunContext.get()
|
523
|
+
if task_inputs and flow_ctx:
|
524
|
+
for inputs in task_inputs.values():
|
525
|
+
for task_input in inputs:
|
526
|
+
if isinstance(task_input, TaskRunResult):
|
527
|
+
# Look up assets in the engine context
|
528
|
+
task_assets = flow_ctx.task_run_assets.get(task_input.id)
|
529
|
+
if task_assets:
|
530
|
+
upstream_assets.extend(task_assets)
|
531
|
+
|
532
|
+
ctx = cls(
|
533
|
+
direct_asset_dependencies=task.asset_deps[:] if task.asset_deps else [],
|
534
|
+
downstream_assets=task.assets[:]
|
535
|
+
if isinstance(task, MaterializingTask) and task.assets
|
536
|
+
else [],
|
537
|
+
upstream_assets=upstream_assets,
|
538
|
+
materialized_by=task.materialized_by
|
539
|
+
if isinstance(task, MaterializingTask)
|
540
|
+
else None,
|
541
|
+
task_run_id=task_run_id,
|
542
|
+
)
|
543
|
+
ctx.update_tracked_assets()
|
544
|
+
|
545
|
+
return ctx
|
546
|
+
|
547
|
+
def add_asset_metadata(self, asset_key: str, metadata: dict[str, Any]) -> None:
|
548
|
+
"""
|
549
|
+
Add metadata for a materialized asset.
|
550
|
+
|
551
|
+
Args:
|
552
|
+
asset_key: The asset key
|
553
|
+
metadata: Metadata dictionary to add
|
554
|
+
"""
|
555
|
+
|
556
|
+
existing = self.materialization_metadata.get(asset_key, {})
|
557
|
+
self.materialization_metadata[asset_key] = existing | metadata
|
558
|
+
|
559
|
+
@staticmethod
|
560
|
+
def asset_as_resource(asset: Asset) -> dict[str, str]:
|
561
|
+
"""Convert Asset to event resource format."""
|
562
|
+
resource = {"prefect.resource.id": asset.key}
|
563
|
+
|
564
|
+
if asset.properties:
|
565
|
+
properties_dict = asset.properties.model_dump(exclude_unset=True)
|
566
|
+
|
567
|
+
if "name" in properties_dict:
|
568
|
+
resource["prefect.resource.name"] = properties_dict["name"]
|
569
|
+
|
570
|
+
if "description" in properties_dict:
|
571
|
+
resource["prefect.asset.description"] = properties_dict["description"]
|
572
|
+
|
573
|
+
if "url" in properties_dict:
|
574
|
+
resource["prefect.asset.url"] = properties_dict["url"]
|
575
|
+
|
576
|
+
if "owners" in properties_dict:
|
577
|
+
resource["prefect.asset.owners"] = json.dumps(properties_dict["owners"])
|
578
|
+
|
579
|
+
return resource
|
580
|
+
|
581
|
+
@staticmethod
|
582
|
+
def asset_as_related(asset: Asset) -> dict[str, str]:
|
583
|
+
"""Convert Asset to event related format."""
|
584
|
+
return {
|
585
|
+
"prefect.resource.id": asset.key,
|
586
|
+
"prefect.resource.role": "asset",
|
587
|
+
}
|
588
|
+
|
589
|
+
@staticmethod
|
590
|
+
def related_materialized_by(by: str) -> dict[str, str]:
|
591
|
+
"""Create a related resource for the tool that performed the materialization"""
|
592
|
+
return {
|
593
|
+
"prefect.resource.id": by,
|
594
|
+
"prefect.resource.role": "asset-materialized-by",
|
595
|
+
}
|
596
|
+
|
597
|
+
def emit_events(self, state: State) -> None:
|
598
|
+
"""
|
599
|
+
Emit asset reference and materialization events based on task completion.
|
600
|
+
"""
|
601
|
+
|
602
|
+
from prefect.events import emit_event
|
603
|
+
|
604
|
+
if state.name == "Cached":
|
605
|
+
return
|
606
|
+
if state.is_failed():
|
607
|
+
event_status = "failed"
|
608
|
+
elif state.is_completed():
|
609
|
+
event_status = "succeeded"
|
610
|
+
else:
|
611
|
+
return
|
612
|
+
|
613
|
+
asset_deps_related: list[Asset] = []
|
614
|
+
|
615
|
+
# Emit reference events for direct asset dependencies
|
616
|
+
for asset in self.direct_asset_dependencies:
|
617
|
+
emit_event(
|
618
|
+
event=f"prefect.asset.reference.{event_status}",
|
619
|
+
resource=self.asset_as_resource(asset),
|
620
|
+
related=[],
|
621
|
+
)
|
622
|
+
asset_deps_related.append(self.asset_as_related(asset))
|
623
|
+
|
624
|
+
# Emit materialization events for downstream assets
|
625
|
+
if self.downstream_assets:
|
626
|
+
upstream_related = [self.asset_as_related(a) for a in self.upstream_assets]
|
627
|
+
all_related = upstream_related + asset_deps_related
|
628
|
+
|
629
|
+
if self.materialized_by:
|
630
|
+
all_related.append(self.related_materialized_by(self.materialized_by))
|
631
|
+
|
632
|
+
for asset in self.downstream_assets:
|
633
|
+
emit_event(
|
634
|
+
event=f"prefect.asset.materialization.{event_status}",
|
635
|
+
resource=self.asset_as_resource(asset),
|
636
|
+
related=all_related,
|
637
|
+
payload=self.materialization_metadata.get(asset.key),
|
638
|
+
)
|
639
|
+
|
640
|
+
def update_tracked_assets(self) -> None:
|
641
|
+
"""
|
642
|
+
Update the flow run context with assets that should be propagated downstream.
|
643
|
+
"""
|
644
|
+
if not (flow_run_context := FlowRunContext.get()):
|
645
|
+
return
|
646
|
+
|
647
|
+
if not self.task_run_id:
|
648
|
+
return
|
649
|
+
|
650
|
+
if self.downstream_assets:
|
651
|
+
# MaterializingTask: propagate the downstream assets (what we create)
|
652
|
+
assets_for_downstream = self.downstream_assets[:]
|
653
|
+
else:
|
654
|
+
# Regular task: propagate upstream assets + direct dependencies
|
655
|
+
assets_for_downstream = (
|
656
|
+
list(self.upstream_assets) + self.direct_asset_dependencies
|
657
|
+
)
|
658
|
+
|
659
|
+
flow_run_context.task_run_assets[self.task_run_id] = assets_for_downstream
|
660
|
+
|
661
|
+
def serialize(self: Self, include_secrets: bool = True) -> dict[str, Any]:
|
662
|
+
"""Serialize the AssetContext for distributed execution."""
|
663
|
+
return self.model_dump(
|
664
|
+
exclude_unset=True,
|
665
|
+
serialize_as_any=True,
|
666
|
+
context={"include_secrets": include_secrets},
|
667
|
+
)
|
668
|
+
|
669
|
+
|
446
670
|
class TagsContext(ContextModel):
|
447
671
|
"""
|
448
672
|
The context for `prefect.tags` management.
|
prefect/deployments/runner.py
CHANGED
@@ -1222,14 +1222,14 @@ async def deploy(
|
|
1222
1222
|
" or specify a remote storage location for the flow with `.from_source`."
|
1223
1223
|
" If you are attempting to deploy a flow to a local process work pool,"
|
1224
1224
|
" consider using `flow.serve` instead. See the documentation for more"
|
1225
|
-
" information: https://docs.prefect.io/latest/
|
1225
|
+
" information: https://docs.prefect.io/latest/how-to-guides/deployments/run-flows-in-local-processes"
|
1226
1226
|
)
|
1227
1227
|
elif work_pool.type == "process" and not ignore_warnings:
|
1228
1228
|
console.print(
|
1229
1229
|
"Looks like you're deploying to a process work pool. If you're creating a"
|
1230
1230
|
" deployment for local development, calling `.serve` on your flow is a great"
|
1231
1231
|
" way to get started. See the documentation for more information:"
|
1232
|
-
" https://docs.prefect.io/latest/
|
1232
|
+
" https://docs.prefect.io/latest/how-to-guides/deployments/run-flows-in-local-processes "
|
1233
1233
|
" Set `ignore_warnings=True` to suppress this message.",
|
1234
1234
|
style="yellow",
|
1235
1235
|
)
|
prefect/runner/server.py
CHANGED
@@ -257,7 +257,7 @@ def _build_generic_endpoint_for_flows(
|
|
257
257
|
@deprecated_callable(
|
258
258
|
start_date=datetime(2025, 4, 1),
|
259
259
|
end_date=datetime(2025, 10, 1),
|
260
|
-
help="Use background tasks (https://docs.prefect.io/v3/
|
260
|
+
help="Use background tasks (https://docs.prefect.io/v3/concepts/tasks#background-tasks) or `run_deployment` and `.serve` instead of submitting runs to the Runner webserver.",
|
261
261
|
)
|
262
262
|
async def build_server(runner: "Runner") -> FastAPI:
|
263
263
|
"""
|
@@ -306,7 +306,7 @@ async def build_server(runner: "Runner") -> FastAPI:
|
|
306
306
|
@deprecated_callable(
|
307
307
|
start_date=datetime(2025, 4, 1),
|
308
308
|
end_date=datetime(2025, 10, 1),
|
309
|
-
help="Use background tasks (https://docs.prefect.io/v3/
|
309
|
+
help="Use background tasks (https://docs.prefect.io/v3/concepts/flows-and-tasks#background-tasks) or `run_deployment` and `.serve` instead of submitting runs to the Runner webserver.",
|
310
310
|
)
|
311
311
|
def start_webserver(runner: "Runner", log_level: str | None = None) -> None:
|
312
312
|
"""
|
prefect/runner/submit.py
CHANGED
@@ -124,7 +124,7 @@ def submit_to_runner(
|
|
124
124
|
@deprecated_callable(
|
125
125
|
start_date=datetime(2025, 4, 1),
|
126
126
|
end_date=datetime(2025, 10, 1),
|
127
|
-
help="Use background tasks (https://docs.prefect.io/v3/
|
127
|
+
help="Use background tasks (https://docs.prefect.io/v3/concepts/flows-and-tasks#background-tasks) or `run_deployment` and `.serve` instead of submitting runs to the Runner webserver.",
|
128
128
|
)
|
129
129
|
@sync_compatible
|
130
130
|
async def submit_to_runner(
|
@@ -196,7 +196,7 @@ async def submit_to_runner(
|
|
196
196
|
@deprecated_callable(
|
197
197
|
start_date=datetime(2025, 4, 1),
|
198
198
|
end_date=datetime(2025, 10, 1),
|
199
|
-
help="Use background tasks (https://docs.prefect.io/v3/
|
199
|
+
help="Use background tasks (https://docs.prefect.io/v3/concepts/flows-and-tasks#background-tasks) or `run_deployment` and `.serve` instead of submitting runs to the Runner webserver.",
|
200
200
|
)
|
201
201
|
@sync_compatible
|
202
202
|
async def wait_for_submitted_runs(
|
prefect/server/api/events.py
CHANGED
@@ -51,7 +51,7 @@ async def create_events(
|
|
51
51
|
"""
|
52
52
|
Record a batch of Events.
|
53
53
|
|
54
|
-
For more information, see https://docs.prefect.io/v3/
|
54
|
+
For more information, see https://docs.prefect.io/v3/concepts/events.
|
55
55
|
"""
|
56
56
|
if ephemeral_request:
|
57
57
|
await EventsPipeline().process_events(events)
|
@@ -23,7 +23,7 @@ async def read_task_workers(
|
|
23
23
|
"""
|
24
24
|
Read active task workers. Optionally filter by task keys.
|
25
25
|
|
26
|
-
For more information, see https://docs.prefect.io/v3/
|
26
|
+
For more information, see https://docs.prefect.io/v3/concepts/flows-and-tasks#background-tasks.
|
27
27
|
"""
|
28
28
|
|
29
29
|
if task_worker_filter and task_worker_filter.task_keys:
|
prefect/task_engine.py
CHANGED
@@ -43,6 +43,7 @@ from prefect.concurrency.v1.asyncio import concurrency as aconcurrency
|
|
43
43
|
from prefect.concurrency.v1.context import ConcurrencyContext as ConcurrencyContextV1
|
44
44
|
from prefect.concurrency.v1.sync import concurrency
|
45
45
|
from prefect.context import (
|
46
|
+
AssetContext,
|
46
47
|
AsyncClientContext,
|
47
48
|
FlowRunContext,
|
48
49
|
SyncClientContext,
|
@@ -314,10 +315,13 @@ class SyncTaskRunEngine(BaseTaskRunEngine[P, R]):
|
|
314
315
|
raise RuntimeError("Engine has not started.")
|
315
316
|
return self._client
|
316
317
|
|
317
|
-
def can_retry(self,
|
318
|
+
def can_retry(self, exc_or_state: Exception | State[R]) -> bool:
|
318
319
|
retry_condition: Optional[
|
319
|
-
Callable[["Task[P, Coroutine[Any, Any, R]]", TaskRun, State], bool]
|
320
|
+
Callable[["Task[P, Coroutine[Any, Any, R]]", TaskRun, State[R]], bool]
|
320
321
|
] = self.task.retry_condition_fn
|
322
|
+
|
323
|
+
failure_type = "exception" if isinstance(exc_or_state, Exception) else "state"
|
324
|
+
|
321
325
|
if not self.task_run:
|
322
326
|
raise ValueError("Task run is not set")
|
323
327
|
try:
|
@@ -326,8 +330,8 @@ class SyncTaskRunEngine(BaseTaskRunEngine[P, R]):
|
|
326
330
|
f" {self.task.name!r}"
|
327
331
|
)
|
328
332
|
state = Failed(
|
329
|
-
data=
|
330
|
-
message=f"Task run encountered unexpected
|
333
|
+
data=exc_or_state,
|
334
|
+
message=f"Task run encountered unexpected {failure_type}: {repr(exc_or_state)}",
|
331
335
|
)
|
332
336
|
if asyncio.iscoroutinefunction(retry_condition):
|
333
337
|
should_retry = run_coro_as_sync(
|
@@ -449,7 +453,9 @@ class SyncTaskRunEngine(BaseTaskRunEngine[P, R]):
|
|
449
453
|
else:
|
450
454
|
result = state.data
|
451
455
|
|
452
|
-
link_state_to_result(
|
456
|
+
link_state_to_result(new_state, result)
|
457
|
+
if asset_context := AssetContext.get():
|
458
|
+
asset_context.emit_events(new_state)
|
453
459
|
|
454
460
|
# emit a state change event
|
455
461
|
self._last_event = emit_task_run_state_change_event(
|
@@ -476,7 +482,15 @@ class SyncTaskRunEngine(BaseTaskRunEngine[P, R]):
|
|
476
482
|
# otherwise, return the exception
|
477
483
|
return self._raised
|
478
484
|
|
479
|
-
def handle_success(
|
485
|
+
def handle_success(
|
486
|
+
self, result: R, transaction: Transaction
|
487
|
+
) -> Union[ResultRecord[R], None, Coroutine[Any, Any, R], R]:
|
488
|
+
# Handle the case where the task explicitly returns a failed state, in
|
489
|
+
# which case we should retry the task if it has retries left.
|
490
|
+
if isinstance(result, State) and result.is_failed():
|
491
|
+
if self.handle_retry(result):
|
492
|
+
return None
|
493
|
+
|
480
494
|
if self.task.cache_expiration is not None:
|
481
495
|
expiration = prefect.types._datetime.now("UTC") + self.task.cache_expiration
|
482
496
|
else:
|
@@ -508,16 +522,16 @@ class SyncTaskRunEngine(BaseTaskRunEngine[P, R]):
|
|
508
522
|
self._return_value = result
|
509
523
|
|
510
524
|
self._telemetry.end_span_on_success()
|
511
|
-
return result
|
512
525
|
|
513
|
-
def handle_retry(self,
|
526
|
+
def handle_retry(self, exc_or_state: Exception | State[R]) -> bool:
|
514
527
|
"""Handle any task run retries.
|
515
528
|
|
516
529
|
- If the task has retries left, and the retry condition is met, set the task to retrying and return True.
|
517
530
|
- If the task has a retry delay, place in AwaitingRetry state with a delayed scheduled time.
|
518
531
|
- If the task has no retries left, or the retry condition is not met, return False.
|
519
532
|
"""
|
520
|
-
|
533
|
+
failure_type = "exception" if isinstance(exc_or_state, Exception) else "state"
|
534
|
+
if self.retries < self.task.retries and self.can_retry(exc_or_state):
|
521
535
|
if self.task.retry_delay_seconds:
|
522
536
|
delay = (
|
523
537
|
self.task.retry_delay_seconds[
|
@@ -535,8 +549,9 @@ class SyncTaskRunEngine(BaseTaskRunEngine[P, R]):
|
|
535
549
|
new_state = Retrying()
|
536
550
|
|
537
551
|
self.logger.info(
|
538
|
-
"Task run failed with
|
539
|
-
|
552
|
+
"Task run failed with %s: %r - Retry %s/%s will start %s",
|
553
|
+
failure_type,
|
554
|
+
exc_or_state,
|
540
555
|
self.retries + 1,
|
541
556
|
self.task.retries,
|
542
557
|
str(delay) + " second(s) from now" if delay else "immediately",
|
@@ -552,7 +567,7 @@ class SyncTaskRunEngine(BaseTaskRunEngine[P, R]):
|
|
552
567
|
else "No retries configured for this task."
|
553
568
|
)
|
554
569
|
self.logger.error(
|
555
|
-
f"Task run failed with
|
570
|
+
f"Task run failed with {failure_type}: {exc_or_state!r} - {retry_message_suffix}",
|
556
571
|
exc_info=True,
|
557
572
|
)
|
558
573
|
return False
|
@@ -625,6 +640,16 @@ class SyncTaskRunEngine(BaseTaskRunEngine[P, R]):
|
|
625
640
|
persist_result = settings.tasks.default_persist_result
|
626
641
|
else:
|
627
642
|
persist_result = should_persist_result()
|
643
|
+
|
644
|
+
asset_context = AssetContext.get()
|
645
|
+
if not asset_context:
|
646
|
+
asset_context = AssetContext.from_task_and_inputs(
|
647
|
+
task=self.task,
|
648
|
+
task_run_id=self.task_run.id,
|
649
|
+
task_inputs=self.task_run.task_inputs,
|
650
|
+
)
|
651
|
+
stack.enter_context(asset_context)
|
652
|
+
|
628
653
|
stack.enter_context(
|
629
654
|
TaskRunContext(
|
630
655
|
task=self.task,
|
@@ -830,7 +855,7 @@ class SyncTaskRunEngine(BaseTaskRunEngine[P, R]):
|
|
830
855
|
|
831
856
|
def call_task_fn(
|
832
857
|
self, transaction: Transaction
|
833
|
-
) -> Union[
|
858
|
+
) -> Union[ResultRecord[Any], None, Coroutine[Any, Any, R], R]:
|
834
859
|
"""
|
835
860
|
Convenience method to call the task function. Returns a coroutine if the
|
836
861
|
task is async.
|
@@ -855,10 +880,13 @@ class AsyncTaskRunEngine(BaseTaskRunEngine[P, R]):
|
|
855
880
|
raise RuntimeError("Engine has not started.")
|
856
881
|
return self._client
|
857
882
|
|
858
|
-
async def can_retry(self,
|
883
|
+
async def can_retry(self, exc_or_state: Exception | State[R]) -> bool:
|
859
884
|
retry_condition: Optional[
|
860
|
-
Callable[["Task[P, Coroutine[Any, Any, R]]", TaskRun, State], bool]
|
885
|
+
Callable[["Task[P, Coroutine[Any, Any, R]]", TaskRun, State[R]], bool]
|
861
886
|
] = self.task.retry_condition_fn
|
887
|
+
|
888
|
+
failure_type = "exception" if isinstance(exc_or_state, Exception) else "state"
|
889
|
+
|
862
890
|
if not self.task_run:
|
863
891
|
raise ValueError("Task run is not set")
|
864
892
|
try:
|
@@ -867,8 +895,8 @@ class AsyncTaskRunEngine(BaseTaskRunEngine[P, R]):
|
|
867
895
|
f" {self.task.name!r}"
|
868
896
|
)
|
869
897
|
state = Failed(
|
870
|
-
data=
|
871
|
-
message=f"Task run encountered unexpected
|
898
|
+
data=exc_or_state,
|
899
|
+
message=f"Task run encountered unexpected {failure_type}: {repr(exc_or_state)}",
|
872
900
|
)
|
873
901
|
if asyncio.iscoroutinefunction(retry_condition):
|
874
902
|
should_retry = await retry_condition(self.task, self.task_run, state)
|
@@ -1004,6 +1032,8 @@ class AsyncTaskRunEngine(BaseTaskRunEngine[P, R]):
|
|
1004
1032
|
result = new_state.data
|
1005
1033
|
|
1006
1034
|
link_state_to_result(new_state, result)
|
1035
|
+
if asset_context := AssetContext.get():
|
1036
|
+
asset_context.emit_events(new_state)
|
1007
1037
|
|
1008
1038
|
# emit a state change event
|
1009
1039
|
self._last_event = emit_task_run_state_change_event(
|
@@ -1031,7 +1061,13 @@ class AsyncTaskRunEngine(BaseTaskRunEngine[P, R]):
|
|
1031
1061
|
# otherwise, return the exception
|
1032
1062
|
return self._raised
|
1033
1063
|
|
1034
|
-
async def handle_success(
|
1064
|
+
async def handle_success(
|
1065
|
+
self, result: R, transaction: AsyncTransaction
|
1066
|
+
) -> Union[ResultRecord[R], None, Coroutine[Any, Any, R], R]:
|
1067
|
+
if isinstance(result, State) and result.is_failed():
|
1068
|
+
if await self.handle_retry(result):
|
1069
|
+
return None
|
1070
|
+
|
1035
1071
|
if self.task.cache_expiration is not None:
|
1036
1072
|
expiration = prefect.types._datetime.now("UTC") + self.task.cache_expiration
|
1037
1073
|
else:
|
@@ -1059,19 +1095,20 @@ class AsyncTaskRunEngine(BaseTaskRunEngine[P, R]):
|
|
1059
1095
|
self.record_terminal_state_timing(terminal_state)
|
1060
1096
|
await self.set_state(terminal_state)
|
1061
1097
|
self._return_value = result
|
1062
|
-
|
1063
1098
|
self._telemetry.end_span_on_success()
|
1064
1099
|
|
1065
1100
|
return result
|
1066
1101
|
|
1067
|
-
async def handle_retry(self,
|
1102
|
+
async def handle_retry(self, exc_or_state: Exception | State[R]) -> bool:
|
1068
1103
|
"""Handle any task run retries.
|
1069
1104
|
|
1070
1105
|
- If the task has retries left, and the retry condition is met, set the task to retrying and return True.
|
1071
1106
|
- If the task has a retry delay, place in AwaitingRetry state with a delayed scheduled time.
|
1072
1107
|
- If the task has no retries left, or the retry condition is not met, return False.
|
1073
1108
|
"""
|
1074
|
-
|
1109
|
+
failure_type = "exception" if isinstance(exc_or_state, Exception) else "state"
|
1110
|
+
|
1111
|
+
if self.retries < self.task.retries and await self.can_retry(exc_or_state):
|
1075
1112
|
if self.task.retry_delay_seconds:
|
1076
1113
|
delay = (
|
1077
1114
|
self.task.retry_delay_seconds[
|
@@ -1089,8 +1126,9 @@ class AsyncTaskRunEngine(BaseTaskRunEngine[P, R]):
|
|
1089
1126
|
new_state = Retrying()
|
1090
1127
|
|
1091
1128
|
self.logger.info(
|
1092
|
-
"Task run failed with
|
1093
|
-
|
1129
|
+
"Task run failed with %s: %r - Retry %s/%s will start %s",
|
1130
|
+
failure_type,
|
1131
|
+
exc_or_state,
|
1094
1132
|
self.retries + 1,
|
1095
1133
|
self.task.retries,
|
1096
1134
|
str(delay) + " second(s) from now" if delay else "immediately",
|
@@ -1106,7 +1144,7 @@ class AsyncTaskRunEngine(BaseTaskRunEngine[P, R]):
|
|
1106
1144
|
else "No retries configured for this task."
|
1107
1145
|
)
|
1108
1146
|
self.logger.error(
|
1109
|
-
f"Task run failed with
|
1147
|
+
f"Task run failed with {failure_type}: {exc_or_state!r} - {retry_message_suffix}",
|
1110
1148
|
exc_info=True,
|
1111
1149
|
)
|
1112
1150
|
return False
|
@@ -1180,6 +1218,16 @@ class AsyncTaskRunEngine(BaseTaskRunEngine[P, R]):
|
|
1180
1218
|
persist_result = settings.tasks.default_persist_result
|
1181
1219
|
else:
|
1182
1220
|
persist_result = should_persist_result()
|
1221
|
+
|
1222
|
+
asset_context = AssetContext.get()
|
1223
|
+
if not asset_context:
|
1224
|
+
asset_context = AssetContext.from_task_and_inputs(
|
1225
|
+
task=self.task,
|
1226
|
+
task_run_id=self.task_run.id,
|
1227
|
+
task_inputs=self.task_run.task_inputs,
|
1228
|
+
)
|
1229
|
+
stack.enter_context(asset_context)
|
1230
|
+
|
1183
1231
|
stack.enter_context(
|
1184
1232
|
TaskRunContext(
|
1185
1233
|
task=self.task,
|
@@ -1382,7 +1430,7 @@ class AsyncTaskRunEngine(BaseTaskRunEngine[P, R]):
|
|
1382
1430
|
|
1383
1431
|
async def call_task_fn(
|
1384
1432
|
self, transaction: AsyncTransaction
|
1385
|
-
) -> Union[
|
1433
|
+
) -> Union[ResultRecord[Any], None, Coroutine[Any, Any, R], R]:
|
1386
1434
|
"""
|
1387
1435
|
Convenience method to call the task function. Returns a coroutine if the
|
1388
1436
|
task is async.
|
prefect/tasks.py
CHANGED
@@ -29,10 +29,20 @@ from typing import (
|
|
29
29
|
)
|
30
30
|
from uuid import UUID, uuid4
|
31
31
|
|
32
|
-
from typing_extensions import
|
32
|
+
from typing_extensions import (
|
33
|
+
Literal,
|
34
|
+
ParamSpec,
|
35
|
+
Self,
|
36
|
+
Sequence,
|
37
|
+
TypeAlias,
|
38
|
+
TypedDict,
|
39
|
+
TypeIs,
|
40
|
+
Unpack,
|
41
|
+
)
|
33
42
|
|
34
43
|
import prefect.states
|
35
44
|
from prefect._internal.uuid7 import uuid7
|
45
|
+
from prefect.assets import Asset
|
36
46
|
from prefect.cache_policies import DEFAULT, NO_CACHE, CachePolicy
|
37
47
|
from prefect.client.orchestration import get_client
|
38
48
|
from prefect.client.schemas import TaskRun
|
@@ -90,6 +100,45 @@ OneOrManyFutureOrResult: TypeAlias = Union[
|
|
90
100
|
]
|
91
101
|
|
92
102
|
|
103
|
+
class TaskOptions(TypedDict, total=False):
|
104
|
+
"""
|
105
|
+
A TypedDict representing all available task configuration options.
|
106
|
+
|
107
|
+
This can be used with `Unpack` to provide type hints for **kwargs.
|
108
|
+
"""
|
109
|
+
|
110
|
+
name: Optional[str]
|
111
|
+
description: Optional[str]
|
112
|
+
tags: Optional[Iterable[str]]
|
113
|
+
version: Optional[str]
|
114
|
+
cache_policy: Union[CachePolicy, type[NotSet]]
|
115
|
+
cache_key_fn: Union[
|
116
|
+
Callable[["TaskRunContext", dict[str, Any]], Optional[str]], None
|
117
|
+
]
|
118
|
+
cache_expiration: Optional[datetime.timedelta]
|
119
|
+
task_run_name: Optional[TaskRunNameValueOrCallable]
|
120
|
+
retries: Optional[int]
|
121
|
+
retry_delay_seconds: Union[
|
122
|
+
float, int, list[float], Callable[[int], list[float]], None
|
123
|
+
]
|
124
|
+
retry_jitter_factor: Optional[float]
|
125
|
+
persist_result: Optional[bool]
|
126
|
+
result_storage: Optional[ResultStorage]
|
127
|
+
result_serializer: Optional[ResultSerializer]
|
128
|
+
result_storage_key: Optional[str]
|
129
|
+
cache_result_in_memory: bool
|
130
|
+
timeout_seconds: Union[int, float, None]
|
131
|
+
log_prints: Optional[bool]
|
132
|
+
refresh_cache: Optional[bool]
|
133
|
+
on_completion: Optional[list[StateHookCallable]]
|
134
|
+
on_failure: Optional[list[StateHookCallable]]
|
135
|
+
on_rollback: Optional[list[Callable[["Transaction"], None]]]
|
136
|
+
on_commit: Optional[list[Callable[["Transaction"], None]]]
|
137
|
+
retry_condition_fn: Optional[Callable[["Task[..., Any]", TaskRun, State], bool]]
|
138
|
+
viz_return_value: Any
|
139
|
+
asset_deps: Optional[list[Union[Asset, str]]]
|
140
|
+
|
141
|
+
|
93
142
|
def task_input_hash(
|
94
143
|
context: "TaskRunContext", arguments: dict[str, Any]
|
95
144
|
) -> Optional[str]:
|
@@ -311,6 +360,7 @@ class Task(Generic[P, R]):
|
|
311
360
|
should end as failed. Defaults to `None`, indicating the task should always continue
|
312
361
|
to its retry policy.
|
313
362
|
viz_return_value: An optional value to return when the task dependency tree is visualized.
|
363
|
+
asset_deps: An optional list of upstream assets that this task depends on.
|
314
364
|
"""
|
315
365
|
|
316
366
|
# NOTE: These parameters (types, defaults, and docstrings) should be duplicated
|
@@ -354,6 +404,7 @@ class Task(Generic[P, R]):
|
|
354
404
|
Callable[["Task[..., Any]", TaskRun, State], bool]
|
355
405
|
] = None,
|
356
406
|
viz_return_value: Optional[Any] = None,
|
407
|
+
asset_deps: Optional[list[Union[str, Asset]]] = None,
|
357
408
|
):
|
358
409
|
# Validate if hook passed is list and contains callables
|
359
410
|
hook_categories = [on_completion, on_failure]
|
@@ -547,6 +598,14 @@ class Task(Generic[P, R]):
|
|
547
598
|
self.retry_condition_fn = retry_condition_fn
|
548
599
|
self.viz_return_value = viz_return_value
|
549
600
|
|
601
|
+
from prefect.assets import Asset
|
602
|
+
|
603
|
+
self.asset_deps: list[Asset] = (
|
604
|
+
[Asset(key=a) if isinstance(a, str) else a for a in asset_deps]
|
605
|
+
if asset_deps
|
606
|
+
else []
|
607
|
+
)
|
608
|
+
|
550
609
|
@property
|
551
610
|
def ismethod(self) -> bool:
|
552
611
|
return hasattr(self.fn, "__prefect_self__")
|
@@ -617,6 +676,7 @@ class Task(Generic[P, R]):
|
|
617
676
|
Callable[["Task[..., Any]", TaskRun, State], bool]
|
618
677
|
] = None,
|
619
678
|
viz_return_value: Optional[Any] = None,
|
679
|
+
asset_deps: Optional[list[Union[str, Asset]]] = None,
|
620
680
|
) -> "Task[P, R]":
|
621
681
|
"""
|
622
682
|
Create a new task from the current object, updating provided options.
|
@@ -750,6 +810,7 @@ class Task(Generic[P, R]):
|
|
750
810
|
on_failure=on_failure or self.on_failure_hooks,
|
751
811
|
retry_condition_fn=retry_condition_fn or self.retry_condition_fn,
|
752
812
|
viz_return_value=viz_return_value or self.viz_return_value,
|
813
|
+
asset_deps=asset_deps or self.asset_deps,
|
753
814
|
)
|
754
815
|
|
755
816
|
def on_completion(self, fn: StateHookCallable) -> StateHookCallable:
|
@@ -887,7 +948,9 @@ class Task(Generic[P, R]):
|
|
887
948
|
deferred: bool = False,
|
888
949
|
) -> TaskRun:
|
889
950
|
from prefect.utilities._engine import dynamic_key_for_task_run
|
890
|
-
from prefect.utilities.engine import
|
951
|
+
from prefect.utilities.engine import (
|
952
|
+
collect_task_run_inputs_sync,
|
953
|
+
)
|
891
954
|
|
892
955
|
if flow_run_context is None:
|
893
956
|
flow_run_context = FlowRunContext.get()
|
@@ -927,7 +990,7 @@ class Task(Generic[P, R]):
|
|
927
990
|
|
928
991
|
store = await ResultStore(
|
929
992
|
result_storage=await get_or_create_default_task_scheduling_storage()
|
930
|
-
).update_for_task(
|
993
|
+
).update_for_task(self)
|
931
994
|
context = serialize_context()
|
932
995
|
data: dict[str, Any] = {"context": context}
|
933
996
|
if parameters:
|
@@ -963,6 +1026,7 @@ class Task(Generic[P, R]):
|
|
963
1026
|
else None
|
964
1027
|
)
|
965
1028
|
task_run_id = id or uuid7()
|
1029
|
+
|
966
1030
|
state = prefect.states.Pending(
|
967
1031
|
state_details=StateDetails(
|
968
1032
|
task_run_id=task_run_id,
|
@@ -1666,6 +1730,7 @@ def task(
|
|
1666
1730
|
on_failure: Optional[list[StateHookCallable]] = None,
|
1667
1731
|
retry_condition_fn: Literal[None] = None,
|
1668
1732
|
viz_return_value: Any = None,
|
1733
|
+
asset_deps: Optional[list[Union[str, Asset]]] = None,
|
1669
1734
|
) -> Callable[[Callable[P, R]], Task[P, R]]: ...
|
1670
1735
|
|
1671
1736
|
|
@@ -1701,6 +1766,7 @@ def task(
|
|
1701
1766
|
on_failure: Optional[list[StateHookCallable]] = None,
|
1702
1767
|
retry_condition_fn: Optional[Callable[[Task[P, R], TaskRun, State], bool]] = None,
|
1703
1768
|
viz_return_value: Any = None,
|
1769
|
+
asset_deps: Optional[list[Union[str, Asset]]] = None,
|
1704
1770
|
) -> Callable[[Callable[P, R]], Task[P, R]]: ...
|
1705
1771
|
|
1706
1772
|
|
@@ -1737,6 +1803,7 @@ def task(
|
|
1737
1803
|
on_failure: Optional[list[StateHookCallable]] = None,
|
1738
1804
|
retry_condition_fn: Optional[Callable[[Task[P, Any], TaskRun, State], bool]] = None,
|
1739
1805
|
viz_return_value: Any = None,
|
1806
|
+
asset_deps: Optional[list[Union[str, Asset]]] = None,
|
1740
1807
|
) -> Callable[[Callable[P, R]], Task[P, R]]: ...
|
1741
1808
|
|
1742
1809
|
|
@@ -1770,6 +1837,7 @@ def task(
|
|
1770
1837
|
on_failure: Optional[list[StateHookCallable]] = None,
|
1771
1838
|
retry_condition_fn: Optional[Callable[[Task[P, Any], TaskRun, State], bool]] = None,
|
1772
1839
|
viz_return_value: Any = None,
|
1840
|
+
asset_deps: Optional[list[Union[str, Asset]]] = None,
|
1773
1841
|
):
|
1774
1842
|
"""
|
1775
1843
|
Decorator to designate a function as a task in a Prefect workflow.
|
@@ -1830,6 +1898,7 @@ def task(
|
|
1830
1898
|
should end as failed. Defaults to `None`, indicating the task should always continue
|
1831
1899
|
to its retry policy.
|
1832
1900
|
viz_return_value: An optional value to return when the task dependency tree is visualized.
|
1901
|
+
asset_deps: An optional list of upstream assets that this task depends on.
|
1833
1902
|
|
1834
1903
|
Returns:
|
1835
1904
|
A callable `Task` object which, when called, will submit the task for execution.
|
@@ -1906,6 +1975,7 @@ def task(
|
|
1906
1975
|
on_failure=on_failure,
|
1907
1976
|
retry_condition_fn=retry_condition_fn,
|
1908
1977
|
viz_return_value=viz_return_value,
|
1978
|
+
asset_deps=asset_deps,
|
1909
1979
|
)
|
1910
1980
|
else:
|
1911
1981
|
return cast(
|
@@ -1935,5 +2005,32 @@ def task(
|
|
1935
2005
|
on_failure=on_failure,
|
1936
2006
|
retry_condition_fn=retry_condition_fn,
|
1937
2007
|
viz_return_value=viz_return_value,
|
2008
|
+
asset_deps=asset_deps,
|
1938
2009
|
),
|
1939
2010
|
)
|
2011
|
+
|
2012
|
+
|
2013
|
+
class MaterializingTask(Task[P, R]):
|
2014
|
+
"""
|
2015
|
+
A task that materializes Assets.
|
2016
|
+
|
2017
|
+
Args:
|
2018
|
+
assets: List of Assets that this task materializes (can be str or Asset)
|
2019
|
+
materialized_by: An optional tool that materialized the asset e.g. "dbt" or "spark"
|
2020
|
+
**task_kwargs: All other Task arguments
|
2021
|
+
"""
|
2022
|
+
|
2023
|
+
def __init__(
|
2024
|
+
self,
|
2025
|
+
fn: Callable[P, R],
|
2026
|
+
*,
|
2027
|
+
assets: Sequence[Union[str, Asset]],
|
2028
|
+
materialized_by: str | None = None,
|
2029
|
+
**task_kwargs: Unpack[TaskOptions],
|
2030
|
+
):
|
2031
|
+
super().__init__(fn=fn, **task_kwargs)
|
2032
|
+
|
2033
|
+
self.assets: list[Asset] = [
|
2034
|
+
Asset(key=a) if isinstance(a, str) else a for a in assets
|
2035
|
+
]
|
2036
|
+
self.materialized_by = materialized_by
|
prefect/types/__init__.py
CHANGED
@@ -14,6 +14,7 @@ from .names import (
|
|
14
14
|
BANNED_CHARACTERS,
|
15
15
|
WITHOUT_BANNED_CHARACTERS,
|
16
16
|
MAX_VARIABLE_NAME_LENGTH,
|
17
|
+
URILike,
|
17
18
|
)
|
18
19
|
from pydantic import (
|
19
20
|
BeforeValidator,
|
@@ -219,4 +220,5 @@ __all__ = [
|
|
219
220
|
"StatusCode",
|
220
221
|
"StrictVariableValue",
|
221
222
|
"TaskRetryDelaySeconds",
|
223
|
+
"URILike",
|
222
224
|
]
|
prefect/types/names.py
CHANGED
@@ -137,3 +137,26 @@ VariableName = Annotated[
|
|
137
137
|
examples=["my_variable"],
|
138
138
|
),
|
139
139
|
]
|
140
|
+
|
141
|
+
|
142
|
+
# URI validation
|
143
|
+
URI_REGEX = re.compile(r"^[a-z0-9]+://")
|
144
|
+
|
145
|
+
|
146
|
+
def validate_uri(value: str) -> str:
|
147
|
+
"""Validate that a string is a valid URI with lowercase protocol."""
|
148
|
+
if not URI_REGEX.match(value):
|
149
|
+
raise ValueError(
|
150
|
+
"Key must be a valid URI, e.g. storage://bucket/folder/asset.csv"
|
151
|
+
)
|
152
|
+
return value
|
153
|
+
|
154
|
+
|
155
|
+
URILike = Annotated[
|
156
|
+
str,
|
157
|
+
AfterValidator(validate_uri),
|
158
|
+
Field(
|
159
|
+
description="A URI-like string with a lowercase protocol",
|
160
|
+
examples=["s3://bucket/folder/data.csv", "postgres://dbtable"],
|
161
|
+
),
|
162
|
+
]
|
prefect/utilities/engine.py
CHANGED
@@ -80,7 +80,11 @@ async def collect_task_run_inputs(expr: Any, max_depth: int = -1) -> set[TaskRun
|
|
80
80
|
inputs.add(TaskRunResult(id=obj.task_run_id))
|
81
81
|
elif isinstance(obj, State):
|
82
82
|
if obj.state_details.task_run_id:
|
83
|
-
inputs.add(
|
83
|
+
inputs.add(
|
84
|
+
TaskRunResult(
|
85
|
+
id=obj.state_details.task_run_id,
|
86
|
+
)
|
87
|
+
)
|
84
88
|
# Expressions inside quotes should not be traversed
|
85
89
|
elif isinstance(obj, quote):
|
86
90
|
raise StopVisiting
|
@@ -118,10 +122,18 @@ def collect_task_run_inputs_sync(
|
|
118
122
|
|
119
123
|
def add_futures_and_states_to_inputs(obj: Any) -> None:
|
120
124
|
if isinstance(obj, future_cls) and hasattr(obj, "task_run_id"):
|
121
|
-
inputs.add(
|
125
|
+
inputs.add(
|
126
|
+
TaskRunResult(
|
127
|
+
id=obj.task_run_id,
|
128
|
+
)
|
129
|
+
)
|
122
130
|
elif isinstance(obj, State):
|
123
131
|
if obj.state_details.task_run_id:
|
124
|
-
inputs.add(
|
132
|
+
inputs.add(
|
133
|
+
TaskRunResult(
|
134
|
+
id=obj.state_details.task_run_id,
|
135
|
+
)
|
136
|
+
)
|
125
137
|
# Expressions inside quotes should not be traversed
|
126
138
|
elif isinstance(obj, quote):
|
127
139
|
raise StopVisiting
|
@@ -1,7 +1,7 @@
|
|
1
1
|
prefect/.prefectignore,sha256=awSprvKT0vI8a64mEOLrMxhxqcO-b0ERQeYpA2rNKVQ,390
|
2
2
|
prefect/__init__.py,sha256=iCdcC5ZmeewikCdnPEP6YBAjPNV5dvfxpYCTpw30Hkw,3685
|
3
3
|
prefect/__main__.py,sha256=WFjw3kaYJY6pOTA7WDOgqjsz8zUEUZHCcj3P5wyVa-g,66
|
4
|
-
prefect/_build_info.py,sha256=
|
4
|
+
prefect/_build_info.py,sha256=7KSOuV8CcpDnGddMRTZieQOUtWaRj9WomC01JHoAT-U,185
|
5
5
|
prefect/_result_records.py,sha256=S6QmsODkehGVSzbMm6ig022PYbI6gNKz671p_8kBYx4,7789
|
6
6
|
prefect/_versioning.py,sha256=YqR5cxXrY4P6LM1Pmhd8iMo7v_G2KJpGNdsf4EvDFQ0,14132
|
7
7
|
prefect/_waiters.py,sha256=Ia2ITaXdHzevtyWIgJoOg95lrEXQqNEOquHvw3T33UQ,9026
|
@@ -9,7 +9,7 @@ prefect/agent.py,sha256=dPvG1jDGD5HSH7aM2utwtk6RaJ9qg13XjkA0lAIgQmY,287
|
|
9
9
|
prefect/artifacts.py,sha256=dMBUOAWnUamzjb5HSqwB5-GR2Qb-Gxee26XG5NDCUuw,22720
|
10
10
|
prefect/automations.py,sha256=ZzPxn2tINdlXTQo805V4rIlbXuNWxd7cdb3gTJxZIeY,12567
|
11
11
|
prefect/cache_policies.py,sha256=jH1aDW6vItTcsEytuTCrNYyjbq87IQPwdOgF0yxiUts,12749
|
12
|
-
prefect/context.py,sha256=
|
12
|
+
prefect/context.py,sha256=yNM-ATYOuYT80xbsK0NLEWJhv-dtScSepkNuUUZxkrM,31946
|
13
13
|
prefect/engine.py,sha256=uB5JN4l045i5JTlRQNT1x7MwlSiGQ5Bop2Q6jHHOgxY,3699
|
14
14
|
prefect/exceptions.py,sha256=wZLQQMRB_DyiYkeEdIC5OKwbba5A94Dlnics-lrWI7A,11581
|
15
15
|
prefect/filesystems.py,sha256=v5YqGB4uXf9Ew2VuB9VCSkawvYMMVvEtZf7w1VmAmr8,18036
|
@@ -24,11 +24,11 @@ prefect/results.py,sha256=Amm3TQu8U_oakSn__tCogIJ5DsTj0w_kLzuENWsxK6A,36824
|
|
24
24
|
prefect/schedules.py,sha256=dhq4OhImRvcmtxF7UH1m8RbwYdHT5RQsp_FrxVXfODE,7289
|
25
25
|
prefect/serializers.py,sha256=lU9A1rGEfAfhr8nTl3rf-K7ED78QNShXOrmRBhgNk3Y,9566
|
26
26
|
prefect/states.py,sha256=rh7l1bnIYpTXdlXt5nnpz66y9KLjBWAJrN9Eo5RwgQs,26023
|
27
|
-
prefect/task_engine.py,sha256=
|
27
|
+
prefect/task_engine.py,sha256=fOaEgusqNX0kqjOqG46nLUJc2prqVHvjFmqum0DTrHA,64956
|
28
28
|
prefect/task_runners.py,sha256=ptgE5wuXg_IVHM0j7d6l7ELAVg3SXSy4vggnoHRF8dA,17040
|
29
29
|
prefect/task_runs.py,sha256=7LIzfo3fondCyEUpU05sYFN5IfpZigBDXrhG5yc-8t0,9039
|
30
30
|
prefect/task_worker.py,sha256=RifZ3bOl6ppoYPiOAd4TQp2_GEw9eDQoW483rq1q52Q,20805
|
31
|
-
prefect/tasks.py,sha256=
|
31
|
+
prefect/tasks.py,sha256=SKEETA99SOeVlZW4Cw0TOruPNAb4iFIk8xYqkwU5IeI,78282
|
32
32
|
prefect/transactions.py,sha256=uIoPNudzJzH6NrMJhrgr5lyh6JxOJQqT1GvrXt69yNw,26068
|
33
33
|
prefect/variables.py,sha256=dCK3vX7TbkqXZhnNT_v7rcGh3ISRqoR6pJVLpoll3Js,8342
|
34
34
|
prefect/_experimental/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
@@ -71,6 +71,9 @@ prefect/_internal/schemas/serializers.py,sha256=G_RGHfObjisUiRvd29p-zc6W4bwt5rE1
|
|
71
71
|
prefect/_internal/schemas/validators.py,sha256=h5LL6WuXf4rMmLHsYFflmJBlwqi5c7y0tYibMJzJANM,16933
|
72
72
|
prefect/_vendor/croniter/__init__.py,sha256=NUFzdbyPcTQhIOFtzmFM0nbClAvBbKh2mlnTBa6NfHU,523
|
73
73
|
prefect/_vendor/croniter/croniter.py,sha256=eJ2HzStNAYV-vNiLOgDXl4sYWWHOsSA0dgwbkQoguhY,53009
|
74
|
+
prefect/assets/__init__.py,sha256=-BAzycfydjD0eKRdpTiGXKxU66-yZX7CUh3Hot__PY4,203
|
75
|
+
prefect/assets/core.py,sha256=STeN02bYU1DkcmLn1FfFyqrwfUqW2PGGdiAcj86vjP4,2050
|
76
|
+
prefect/assets/materialize.py,sha256=GcHn1HEbCpExka0IOOz2b_2ZsJFROIo5y7DCP5GjpI8,1143
|
74
77
|
prefect/blocks/__init__.py,sha256=D0hB72qMfgqnBB2EMZRxUxlX9yLfkab5zDChOwJZmkY,220
|
75
78
|
prefect/blocks/abstract.py,sha256=mpOAWopSR_RrzdxeurBTXVSKisP8ne-k8LYos-tp7go,17021
|
76
79
|
prefect/blocks/core.py,sha256=iP-g6guW9HFkt-sFpgH8WCyWhwnH5zIoUJuI2ykImG0,62894
|
@@ -140,7 +143,7 @@ prefect/deployments/__init__.py,sha256=_wb7NxDKhq11z9MjYsPckmT3o6MRhGLRgCV9TmvYt
|
|
140
143
|
prefect/deployments/base.py,sha256=YY7g8MN6qzjNEjEA8wQXPxCrd47WnACIUeSRtI4nrEk,11849
|
141
144
|
prefect/deployments/deployments.py,sha256=K3Rgnpjxo_T8I8LMwlq24OKqZiZBTE8-YnPg-YGUStM,171
|
142
145
|
prefect/deployments/flow_runs.py,sha256=NYe-Bphsy6ENLqSSfywQuX5cRZt-uVgzqGmOsf3Sqw4,7643
|
143
|
-
prefect/deployments/runner.py,sha256=
|
146
|
+
prefect/deployments/runner.py,sha256=nRXloAo5j56rbTmr-MU8oYOVoXodvEauLAH_07vtcHM,56734
|
144
147
|
prefect/deployments/schedules.py,sha256=2eL1-w8qXtwKVkgfUK7cuamwpKK3X6tN1QYTDa_gWxU,2190
|
145
148
|
prefect/deployments/steps/__init__.py,sha256=Dlz9VqMRyG1Gal8dj8vfGpPr0LyQhZdvcciozkK8WoY,206
|
146
149
|
prefect/deployments/steps/core.py,sha256=ulSgBFSx1lhBt1fP-UxebrernkumBDlympR6IPffV1g,6900
|
@@ -188,9 +191,9 @@ prefect/logging/logging.yml,sha256=G5hFJ57Vawz40_w8tDdhqq00dp103OvVDVmWrSQeQcQ,3
|
|
188
191
|
prefect/runner/__init__.py,sha256=pQBd9wVrUVUDUFJlgiweKSnbahoBZwqnd2O2jkhrULY,158
|
189
192
|
prefect/runner/_observers.py,sha256=PpyXQL5bjp86AnDFEzcFPS5ayL6ExqcYgyuBMMQCO9Q,2183
|
190
193
|
prefect/runner/runner.py,sha256=q_3l2awvZATTTgVW3MYiElWHRWw5_ZIliUN9Ltt9d9M,59591
|
191
|
-
prefect/runner/server.py,sha256=
|
194
|
+
prefect/runner/server.py,sha256=5vMIJcgunjiDVzJEig09yOP8EbhcW6s-9zNUt101b44,11994
|
192
195
|
prefect/runner/storage.py,sha256=n-65YoEf7KNVInnmMPeP5TVFJOa2zOS8w9en9MHi6uo,31328
|
193
|
-
prefect/runner/submit.py,sha256=
|
196
|
+
prefect/runner/submit.py,sha256=b5n1M12DFQsxo6FazZnDbblRcIE7H3xrpecDMb4CjJY,9512
|
194
197
|
prefect/runner/utils.py,sha256=19DbhyiV6nvSpTXmnWlt7qPNt1jrz1jscznYrRVGurw,3413
|
195
198
|
prefect/runtime/__init__.py,sha256=JswiTlYRup2zXOYu8AqJ7czKtgcw9Kxo0tTbS6aWCqY,407
|
196
199
|
prefect/runtime/deployment.py,sha256=0A_cUVpYiFk3ciJw2ixy95dk9xBJcjisyF69pakSCcQ,5091
|
@@ -211,7 +214,7 @@ prefect/server/api/concurrency_limits_v2.py,sha256=PGjG7W2Z65OojNTP0ezFu2z69plXo
|
|
211
214
|
prefect/server/api/csrf_token.py,sha256=BwysSjQAhre7O0OY_LF3ZcIiO53FdMQroNT11Q6OcOM,1344
|
212
215
|
prefect/server/api/dependencies.py,sha256=VujfcIGn41TGJxUunFHVabY5hE-6nY6uSHyhNFj8PdI,6634
|
213
216
|
prefect/server/api/deployments.py,sha256=ppYA3b2csnw32-SbOXz5Dm_IsnmPKczNiSbqCzusFKI,39332
|
214
|
-
prefect/server/api/events.py,sha256=
|
217
|
+
prefect/server/api/events.py,sha256=mUTv5ZNxiRsEOpzq8fpfCkLpPasjt-ROUAowA5eFbDE,9900
|
215
218
|
prefect/server/api/flow_run_states.py,sha256=lIdxVE9CqLgtDCuH9bTaKkzHNL81FPrr11liPzvONrw,1661
|
216
219
|
prefect/server/api/flow_runs.py,sha256=Lmb165fLbN4DioxjxgDYaAJ5Qxj771iRYaqn-hYq9KM,33744
|
217
220
|
prefect/server/api/flows.py,sha256=Bz0ISh-9oY0W1X3mqA631_8678pQ6tuRGMpSgWAfxOc,7018
|
@@ -223,7 +226,7 @@ prefect/server/api/saved_searches.py,sha256=UjoqLLe245QVIs6q5Vk4vdODCOoYzciEEjhi
|
|
223
226
|
prefect/server/api/server.py,sha256=xSi2km9KhhHPHSKEFHVntii0hRz2OINtB5zCUNajt6A,33356
|
224
227
|
prefect/server/api/task_run_states.py,sha256=e63OPpxPudv_CIB5oKr8Z8rfQ-Osjm9Zq0iHe8obnMo,1647
|
225
228
|
prefect/server/api/task_runs.py,sha256=86lXKGUJJSElhkVcxX-kbjctrNe98nUe3U0McDCfTMw,13904
|
226
|
-
prefect/server/api/task_workers.py,sha256=
|
229
|
+
prefect/server/api/task_workers.py,sha256=bFHWifk7IwWF3iPu_3HwKu0vLRrxHg42SZU7vYWOw9g,1061
|
227
230
|
prefect/server/api/templates.py,sha256=92bLFfcahZUp5PVNTZPjl8uJSDj4ZYRTVdmTzZXkERg,1027
|
228
231
|
prefect/server/api/validation.py,sha256=HxSNyH8yb_tI-kOfjXESRjJp6WQK6hYWBJsaBxUvY34,14490
|
229
232
|
prefect/server/api/variables.py,sha256=SJaKuqInfQIEdMlJOemptBDN43KLFhlf_u9QwupDu7A,6185
|
@@ -278,10 +281,10 @@ prefect/telemetry/logging.py,sha256=ktIVTXbdZ46v6fUhoHNidFrpvpNJR-Pj-hQ4V9b40W4,
|
|
278
281
|
prefect/telemetry/processors.py,sha256=jw6j6LviOVxw3IBJe7cSjsxFk0zzY43jUmy6C9pcfCE,2272
|
279
282
|
prefect/telemetry/run_telemetry.py,sha256=_FbjiPqPemu4xvZuI2YBPwXeRJ2BcKRJ6qgO4UMzKKE,8571
|
280
283
|
prefect/telemetry/services.py,sha256=DxgNNDTeWNtHBtioX8cjua4IrCbTiJJdYecx-gugg-w,2358
|
281
|
-
prefect/types/__init__.py,sha256=
|
284
|
+
prefect/types/__init__.py,sha256=iJzZLnK1qQuZUExF4_4xRLr0TMgb3uKfR5HQ1cjQ_0w,6066
|
282
285
|
prefect/types/_datetime.py,sha256=ZE-4YK5XJuyxnp5pqldZwtIjkxCpxDGnCSfZiTl7-TU,7566
|
283
286
|
prefect/types/entrypoint.py,sha256=2FF03-wLPgtnqR_bKJDB2BsXXINPdu8ptY9ZYEZnXg8,328
|
284
|
-
prefect/types/names.py,sha256=
|
287
|
+
prefect/types/names.py,sha256=dGXNrP9nibQTm4hOBOpaQebKm3Avf3OGM5MH4M5BUKc,4013
|
285
288
|
prefect/utilities/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
286
289
|
prefect/utilities/_ast.py,sha256=sgEPUWElih-3cp4PoAy1IOyPtu8E27lL0Dldf3ijnYY,4905
|
287
290
|
prefect/utilities/_deprecated.py,sha256=b3pqRSoFANdVJAc8TJkygBcP-VjZtLJUxVIWC7kwspI,1303
|
@@ -295,7 +298,7 @@ prefect/utilities/compat.py,sha256=nnPA3lf2f4Y-l645tYFFNmj5NDPaYvjqa9pbGKZ3WKE,5
|
|
295
298
|
prefect/utilities/context.py,sha256=23SDMgdt07SjmB1qShiykHfGgiv55NBzdbMXM3fE9CI,1447
|
296
299
|
prefect/utilities/dispatch.py,sha256=u6GSGSO3_6vVoIqHVc849lsKkC-I1wUl6TX134GwRBo,6310
|
297
300
|
prefect/utilities/dockerutils.py,sha256=6DLVyzE195IzeQSWERiK1t3bDMnYBLe0zXIpMQ4r0c0,21659
|
298
|
-
prefect/utilities/engine.py,sha256=
|
301
|
+
prefect/utilities/engine.py,sha256=Ltu-Elpb6AjxPumjyqfl3yb2r61UMxO-l6zHXAZ5erM,29190
|
299
302
|
prefect/utilities/filesystem.py,sha256=Pwesv71PGFhf3lPa1iFyMqZZprBjy9nEKCVxTkf_hXw,5710
|
300
303
|
prefect/utilities/generics.py,sha256=o77e8a5iwmrisOf42wLp2WI9YvSw2xDW4vFdpdEwr3I,543
|
301
304
|
prefect/utilities/hashing.py,sha256=7jRy26s46IJAFRmVnCnoK9ek9N4p_UfXxQQvu2tW6dM,2589
|
@@ -322,7 +325,7 @@ prefect/workers/cloud.py,sha256=dPvG1jDGD5HSH7aM2utwtk6RaJ9qg13XjkA0lAIgQmY,287
|
|
322
325
|
prefect/workers/process.py,sha256=Yi5D0U5AQ51wHT86GdwtImXSefe0gJf3LGq4r4z9zwM,11090
|
323
326
|
prefect/workers/server.py,sha256=2pmVeJZiVbEK02SO6BEZaBIvHMsn6G8LzjW8BXyiTtk,1952
|
324
327
|
prefect/workers/utilities.py,sha256=VfPfAlGtTuDj0-Kb8WlMgAuOfgXCdrGAnKMapPSBrwc,2483
|
325
|
-
prefect_client-3.4.5.
|
326
|
-
prefect_client-3.4.5.
|
327
|
-
prefect_client-3.4.5.
|
328
|
-
prefect_client-3.4.5.
|
328
|
+
prefect_client-3.4.5.dev5.dist-info/METADATA,sha256=9z_e5eh2YeZ0eSZy3INLBLkvYZSC9D-qbfrnJJKht2g,7472
|
329
|
+
prefect_client-3.4.5.dev5.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
330
|
+
prefect_client-3.4.5.dev5.dist-info/licenses/LICENSE,sha256=MCxsn8osAkzfxKC4CC_dLcUkU8DZLkyihZ8mGs3Ah3Q,11357
|
331
|
+
prefect_client-3.4.5.dev5.dist-info/RECORD,,
|
File without changes
|
{prefect_client-3.4.5.dev4.dist-info → prefect_client-3.4.5.dev5.dist-info}/licenses/LICENSE
RENAMED
File without changes
|