prefect-client 3.4.4.dev1__py3-none-any.whl → 3.4.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- prefect/_build_info.py +3 -3
- prefect/_internal/schemas/validators.py +15 -1
- prefect/assets/__init__.py +4 -0
- prefect/assets/core.py +75 -0
- prefect/assets/materialize.py +42 -0
- prefect/blocks/notifications.py +22 -3
- prefect/client/schemas/objects.py +3 -3
- prefect/context.py +239 -2
- prefect/deployments/runner.py +2 -2
- prefect/events/clients.py +2 -2
- prefect/runner/server.py +2 -2
- prefect/runner/submit.py +2 -2
- prefect/server/api/events.py +1 -1
- prefect/server/api/task_workers.py +1 -1
- prefect/server/api/work_queues.py +3 -3
- prefect/settings/models/tasks.py +5 -0
- prefect/task_engine.py +73 -25
- prefect/tasks.py +130 -30
- prefect/types/__init__.py +2 -0
- prefect/types/_datetime.py +2 -2
- prefect/types/names.py +23 -0
- prefect/utilities/callables.py +13 -11
- prefect/utilities/engine.py +15 -3
- {prefect_client-3.4.4.dev1.dist-info → prefect_client-3.4.5.dist-info}/METADATA +2 -2
- {prefect_client-3.4.4.dev1.dist-info → prefect_client-3.4.5.dist-info}/RECORD +27 -24
- {prefect_client-3.4.4.dev1.dist-info → prefect_client-3.4.5.dist-info}/WHEEL +0 -0
- {prefect_client-3.4.4.dev1.dist-info → prefect_client-3.4.5.dist-info}/licenses/LICENSE +0 -0
prefect/_build_info.py
CHANGED
@@ -1,5 +1,5 @@
|
|
1
1
|
# Generated by versioningit
|
2
|
-
__version__ = "3.4.
|
3
|
-
__build_date__ = "2025-
|
4
|
-
__git_commit__ = "
|
2
|
+
__version__ = "3.4.5"
|
3
|
+
__build_date__ = "2025-06-07 02:24:06.378793+00:00"
|
4
|
+
__git_commit__ = "df37c8cf91c227486947f371a11d766819ee7163"
|
5
5
|
__dirty__ = False
|
@@ -402,6 +402,18 @@ def validate_compressionlib(value: str) -> str:
|
|
402
402
|
|
403
403
|
|
404
404
|
# TODO: if we use this elsewhere we can change the error message to be more generic
|
405
|
+
@overload
|
406
|
+
def list_length_50_or_less(v: int) -> int: ...
|
407
|
+
|
408
|
+
|
409
|
+
@overload
|
410
|
+
def list_length_50_or_less(v: float) -> float: ...
|
411
|
+
|
412
|
+
|
413
|
+
@overload
|
414
|
+
def list_length_50_or_less(v: list[int]) -> list[int]: ...
|
415
|
+
|
416
|
+
|
405
417
|
@overload
|
406
418
|
def list_length_50_or_less(v: list[float]) -> list[float]: ...
|
407
419
|
|
@@ -410,7 +422,9 @@ def list_length_50_or_less(v: list[float]) -> list[float]: ...
|
|
410
422
|
def list_length_50_or_less(v: None) -> None: ...
|
411
423
|
|
412
424
|
|
413
|
-
def list_length_50_or_less(
|
425
|
+
def list_length_50_or_less(
|
426
|
+
v: Optional[int | float | list[int] | list[float]],
|
427
|
+
) -> Optional[int | float | list[int] | list[float]]:
|
414
428
|
if isinstance(v, list) and (len(v) > 50):
|
415
429
|
raise ValueError("Can not configure more than 50 retry delays per task.")
|
416
430
|
return v
|
prefect/assets/core.py
ADDED
@@ -0,0 +1,75 @@
|
|
1
|
+
from __future__ import annotations
|
2
|
+
|
3
|
+
from typing import Any, ClassVar, Optional
|
4
|
+
|
5
|
+
from pydantic import ConfigDict, Field
|
6
|
+
|
7
|
+
from prefect._internal.schemas.bases import PrefectBaseModel
|
8
|
+
from prefect.types import URILike
|
9
|
+
|
10
|
+
|
11
|
+
class AssetProperties(PrefectBaseModel):
|
12
|
+
"""
|
13
|
+
Metadata properties to configure on an Asset
|
14
|
+
"""
|
15
|
+
|
16
|
+
model_config: ClassVar[ConfigDict] = ConfigDict(frozen=True)
|
17
|
+
|
18
|
+
name: Optional[str] = Field(
|
19
|
+
default=None, description="Human readable name of the Asset."
|
20
|
+
)
|
21
|
+
url: Optional[str] = Field(
|
22
|
+
default=None, description="Visitable url to view the Asset."
|
23
|
+
)
|
24
|
+
description: Optional[str] = Field(
|
25
|
+
default=None, description="Description of the Asset."
|
26
|
+
)
|
27
|
+
owners: Optional[list[str]] = Field(
|
28
|
+
default=None, description="Owners of the Asset."
|
29
|
+
)
|
30
|
+
|
31
|
+
|
32
|
+
class Asset(PrefectBaseModel):
|
33
|
+
"""
|
34
|
+
Assets are objects that represent materialized data,
|
35
|
+
providing a way to track lineage and dependencies.
|
36
|
+
"""
|
37
|
+
|
38
|
+
model_config: ClassVar[ConfigDict] = ConfigDict(frozen=True)
|
39
|
+
|
40
|
+
key: URILike
|
41
|
+
properties: Optional[AssetProperties] = Field(
|
42
|
+
default=None,
|
43
|
+
description="Properties of the asset. "
|
44
|
+
"Setting this will overwrite properties of a known asset.",
|
45
|
+
)
|
46
|
+
|
47
|
+
def __repr__(self) -> str:
|
48
|
+
return f"Asset(key={self.key!r})"
|
49
|
+
|
50
|
+
def __hash__(self) -> int:
|
51
|
+
return hash(self.key)
|
52
|
+
|
53
|
+
def add_metadata(self, metadata: dict[str, Any]) -> None:
|
54
|
+
from prefect.context import AssetContext
|
55
|
+
|
56
|
+
asset_ctx = AssetContext.get()
|
57
|
+
if not asset_ctx:
|
58
|
+
raise RuntimeError(
|
59
|
+
"Unable add Asset metadata when not inside of an AssetContext"
|
60
|
+
)
|
61
|
+
|
62
|
+
asset_ctx.add_asset_metadata(self.key, metadata)
|
63
|
+
|
64
|
+
|
65
|
+
def add_asset_metadata(asset: str | Asset, metadata: dict[str, Any]) -> None:
|
66
|
+
from prefect.context import AssetContext
|
67
|
+
|
68
|
+
asset_ctx = AssetContext.get()
|
69
|
+
if not asset_ctx:
|
70
|
+
raise RuntimeError(
|
71
|
+
"Unable to call `add_asset_metadata` when not inside of an AssetContext"
|
72
|
+
)
|
73
|
+
|
74
|
+
asset_key = asset if isinstance(asset, str) else asset.key
|
75
|
+
asset_ctx.add_asset_metadata(asset_key, metadata)
|
@@ -0,0 +1,42 @@
|
|
1
|
+
from __future__ import annotations
|
2
|
+
|
3
|
+
from typing import TYPE_CHECKING, Callable, TypeVar, Union
|
4
|
+
|
5
|
+
from typing_extensions import ParamSpec, Unpack
|
6
|
+
|
7
|
+
from .core import Asset
|
8
|
+
|
9
|
+
T = TypeVar("T")
|
10
|
+
P = ParamSpec("P")
|
11
|
+
R = TypeVar("R")
|
12
|
+
|
13
|
+
if TYPE_CHECKING:
|
14
|
+
from prefect.tasks import MaterializingTask, TaskOptions
|
15
|
+
|
16
|
+
|
17
|
+
def materialize(
|
18
|
+
*assets: Union[str, Asset],
|
19
|
+
by: str | None = None,
|
20
|
+
**task_kwargs: Unpack[TaskOptions],
|
21
|
+
) -> Callable[[Callable[P, R]], MaterializingTask[P, R]]:
|
22
|
+
"""
|
23
|
+
Decorator for materializing assets.
|
24
|
+
|
25
|
+
Args:
|
26
|
+
*assets: Assets to materialize
|
27
|
+
by: An optional tool that is ultimately responsible for materializing the asset e.g. "dbt" or "spark"
|
28
|
+
**task_kwargs: Additional task configuration
|
29
|
+
"""
|
30
|
+
if not assets:
|
31
|
+
raise TypeError(
|
32
|
+
"materialize requires at least one asset argument, e.g. `@materialize(asset)`"
|
33
|
+
)
|
34
|
+
|
35
|
+
from prefect.tasks import MaterializingTask
|
36
|
+
|
37
|
+
def decorator(fn: Callable[P, R]) -> MaterializingTask[P, R]:
|
38
|
+
return MaterializingTask(
|
39
|
+
fn=fn, assets=assets, materialized_by=by, **task_kwargs
|
40
|
+
)
|
41
|
+
|
42
|
+
return decorator
|
prefect/blocks/notifications.py
CHANGED
@@ -927,12 +927,31 @@ class SendgridEmail(AbstractAppriseNotificationBlock):
|
|
927
927
|
NotifySendGrid, # pyright: ignore[reportUnknownVariableType] incomplete type hints in apprise
|
928
928
|
)
|
929
929
|
|
930
|
-
|
931
|
-
|
930
|
+
self._NotifySendGrid = NotifySendGrid # Cache the working import
|
931
|
+
url = self._build_sendgrid_url()
|
932
|
+
self._start_apprise_client(url)
|
933
|
+
|
934
|
+
def _build_sendgrid_url(self) -> SecretStr:
|
935
|
+
"""Build the SendGrid URL with current to_emails."""
|
936
|
+
return SecretStr(
|
937
|
+
self._NotifySendGrid(
|
932
938
|
apikey=self.api_key.get_secret_value(),
|
933
939
|
from_email=self.sender_email,
|
934
940
|
targets=self.to_emails,
|
935
941
|
).url() # pyright: ignore[reportUnknownMemberType, reportUnknownArgumentType] incomplete type hints in apprise
|
936
942
|
)
|
937
943
|
|
938
|
-
|
944
|
+
@sync_compatible
|
945
|
+
async def notify(
|
946
|
+
self,
|
947
|
+
body: str,
|
948
|
+
subject: str | None = None,
|
949
|
+
):
|
950
|
+
# Update apprise client with current to_emails before sending
|
951
|
+
if hasattr(self, "_apprise_client") and self._apprise_client:
|
952
|
+
self._apprise_client.clear()
|
953
|
+
self._apprise_client.add(
|
954
|
+
servers=self._build_sendgrid_url().get_secret_value()
|
955
|
+
)
|
956
|
+
|
957
|
+
await super().notify(body, subject)
|
@@ -698,7 +698,7 @@ class TaskRunPolicy(PrefectBaseModel):
|
|
698
698
|
deprecated=True,
|
699
699
|
)
|
700
700
|
retries: Optional[int] = Field(default=None, description="The number of retries.")
|
701
|
-
retry_delay: Union[None, int, list[int]] = Field(
|
701
|
+
retry_delay: Union[None, int, float, list[int], list[float]] = Field(
|
702
702
|
default=None,
|
703
703
|
description="A delay time or list of delay times between retries, in seconds.",
|
704
704
|
)
|
@@ -728,8 +728,8 @@ class TaskRunPolicy(PrefectBaseModel):
|
|
728
728
|
@field_validator("retry_delay")
|
729
729
|
@classmethod
|
730
730
|
def validate_configured_retry_delays(
|
731
|
-
cls, v: Optional[list[float]]
|
732
|
-
) -> Optional[list[float]]:
|
731
|
+
cls, v: Optional[int | float | list[int] | list[float]]
|
732
|
+
) -> Optional[int | float | list[int] | list[float]]:
|
733
733
|
return list_length_50_or_less(v)
|
734
734
|
|
735
735
|
@field_validator("retry_jitter_factor")
|
prefect/context.py
CHANGED
@@ -7,13 +7,23 @@ For more user-accessible information about the current run, see [`prefect.runtim
|
|
7
7
|
"""
|
8
8
|
|
9
9
|
import asyncio
|
10
|
+
import json
|
10
11
|
import os
|
11
12
|
import sys
|
12
13
|
import warnings
|
13
14
|
from collections.abc import AsyncGenerator, Generator, Mapping
|
14
15
|
from contextlib import ExitStack, asynccontextmanager, contextmanager
|
15
16
|
from contextvars import ContextVar, Token
|
16
|
-
from typing import
|
17
|
+
from typing import (
|
18
|
+
TYPE_CHECKING,
|
19
|
+
Any,
|
20
|
+
Callable,
|
21
|
+
ClassVar,
|
22
|
+
Optional,
|
23
|
+
TypeVar,
|
24
|
+
Union,
|
25
|
+
)
|
26
|
+
from uuid import UUID
|
17
27
|
|
18
28
|
from pydantic import BaseModel, ConfigDict, Field, PrivateAttr
|
19
29
|
from typing_extensions import Self
|
@@ -21,6 +31,7 @@ from typing_extensions import Self
|
|
21
31
|
import prefect.settings
|
22
32
|
import prefect.types._datetime
|
23
33
|
from prefect._internal.compatibility.migration import getattr_migration
|
34
|
+
from prefect.assets import Asset
|
24
35
|
from prefect.client.orchestration import PrefectClient, SyncPrefectClient, get_client
|
25
36
|
from prefect.client.schemas import FlowRun, TaskRun
|
26
37
|
from prefect.events.worker import EventsWorker
|
@@ -48,9 +59,15 @@ if TYPE_CHECKING:
|
|
48
59
|
from prefect.tasks import Task
|
49
60
|
|
50
61
|
|
51
|
-
def serialize_context(
|
62
|
+
def serialize_context(
|
63
|
+
asset_ctx_kwargs: Union[dict[str, Any], None] = None,
|
64
|
+
) -> dict[str, Any]:
|
52
65
|
"""
|
53
66
|
Serialize the current context for use in a remote execution environment.
|
67
|
+
|
68
|
+
Optionally provide asset_ctx_kwargs to create new AssetContext, that will be used
|
69
|
+
in the remote execution environment. This is useful for TaskRunners, who rely on creating the
|
70
|
+
task run in the remote environment.
|
54
71
|
"""
|
55
72
|
flow_run_context = EngineContext.get()
|
56
73
|
task_run_context = TaskRunContext.get()
|
@@ -62,6 +79,11 @@ def serialize_context() -> dict[str, Any]:
|
|
62
79
|
"task_run_context": task_run_context.serialize() if task_run_context else {},
|
63
80
|
"tags_context": tags_context.serialize() if tags_context else {},
|
64
81
|
"settings_context": settings_context.serialize() if settings_context else {},
|
82
|
+
"asset_context": AssetContext.from_task_and_inputs(
|
83
|
+
**asset_ctx_kwargs
|
84
|
+
).serialize()
|
85
|
+
if asset_ctx_kwargs
|
86
|
+
else {},
|
65
87
|
}
|
66
88
|
|
67
89
|
|
@@ -112,6 +134,9 @@ def hydrated_context(
|
|
112
134
|
# Set up tags context
|
113
135
|
if tags_context := serialized_context.get("tags_context"):
|
114
136
|
stack.enter_context(tags(*tags_context["current_tags"]))
|
137
|
+
# Set up asset context
|
138
|
+
if asset_context := serialized_context.get("asset_context"):
|
139
|
+
stack.enter_context(AssetContext(**asset_context))
|
115
140
|
yield
|
116
141
|
|
117
142
|
|
@@ -373,6 +398,10 @@ class EngineContext(RunContext):
|
|
373
398
|
# Holds the ID of the object returned by the task run and task run state
|
374
399
|
task_run_results: dict[int, State] = Field(default_factory=dict)
|
375
400
|
|
401
|
+
# Tracking information needed to track asset linage between
|
402
|
+
# tasks and materialization
|
403
|
+
task_run_assets: dict[UUID, set[Asset]] = Field(default_factory=dict)
|
404
|
+
|
376
405
|
# Events worker to emit events
|
377
406
|
events: Optional[EventsWorker] = None
|
378
407
|
|
@@ -443,6 +472,214 @@ class TaskRunContext(RunContext):
|
|
443
472
|
)
|
444
473
|
|
445
474
|
|
475
|
+
class AssetContext(ContextModel):
|
476
|
+
"""
|
477
|
+
The asset context for a materializing task run. Contains all asset-related information needed
|
478
|
+
for asset event emission and downstream asset dependency propagation.
|
479
|
+
|
480
|
+
Attributes:
|
481
|
+
direct_asset_dependencies: Assets that this task directly depends on (from task.asset_deps)
|
482
|
+
downstream_assets: Assets that this task will create/materialize (from MaterializingTask.assets)
|
483
|
+
upstream_assets: Assets from upstream task dependencies
|
484
|
+
materialized_by: Tool that materialized the assets (from MaterializingTask.materialized_by)
|
485
|
+
task_run_id: ID of the associated task run
|
486
|
+
materialization_metadata: Metadata for materialized assets
|
487
|
+
"""
|
488
|
+
|
489
|
+
direct_asset_dependencies: set[Asset] = Field(default_factory=set)
|
490
|
+
downstream_assets: set[Asset] = Field(default_factory=set)
|
491
|
+
upstream_assets: set[Asset] = Field(default_factory=set)
|
492
|
+
materialized_by: Optional[str] = None
|
493
|
+
task_run_id: Optional[UUID] = None
|
494
|
+
materialization_metadata: dict[str, dict[str, Any]] = Field(default_factory=dict)
|
495
|
+
|
496
|
+
__var__: ClassVar[ContextVar[Self]] = ContextVar("asset_context")
|
497
|
+
|
498
|
+
@classmethod
|
499
|
+
def from_task_and_inputs(
|
500
|
+
cls,
|
501
|
+
task: "Task[Any, Any]",
|
502
|
+
task_run_id: UUID,
|
503
|
+
task_inputs: Optional[dict[str, set[Any]]] = None,
|
504
|
+
) -> "AssetContext":
|
505
|
+
"""
|
506
|
+
Create an AssetContext from a task and its resolved inputs.
|
507
|
+
|
508
|
+
Args:
|
509
|
+
task: The task instance
|
510
|
+
task_run_id: The task run ID
|
511
|
+
task_inputs: The resolved task inputs (TaskRunResult objects)
|
512
|
+
|
513
|
+
Returns:
|
514
|
+
Configured AssetContext
|
515
|
+
"""
|
516
|
+
from prefect.client.schemas import TaskRunResult
|
517
|
+
from prefect.tasks import MaterializingTask
|
518
|
+
|
519
|
+
upstream_assets: set[Asset] = set()
|
520
|
+
|
521
|
+
# Get upstream assets from engine context instead of TaskRunResult.assets
|
522
|
+
flow_ctx = FlowRunContext.get()
|
523
|
+
if task_inputs and flow_ctx:
|
524
|
+
for inputs in task_inputs.values():
|
525
|
+
for task_input in inputs:
|
526
|
+
if isinstance(task_input, TaskRunResult):
|
527
|
+
# Look up assets in the engine context
|
528
|
+
task_assets = flow_ctx.task_run_assets.get(task_input.id)
|
529
|
+
if task_assets:
|
530
|
+
upstream_assets.update(task_assets)
|
531
|
+
|
532
|
+
ctx = cls(
|
533
|
+
direct_asset_dependencies=set(task.asset_deps)
|
534
|
+
if task.asset_deps
|
535
|
+
else set(),
|
536
|
+
downstream_assets=set(task.assets)
|
537
|
+
if isinstance(task, MaterializingTask) and task.assets
|
538
|
+
else set(),
|
539
|
+
upstream_assets=upstream_assets,
|
540
|
+
materialized_by=task.materialized_by
|
541
|
+
if isinstance(task, MaterializingTask)
|
542
|
+
else None,
|
543
|
+
task_run_id=task_run_id,
|
544
|
+
)
|
545
|
+
ctx.update_tracked_assets()
|
546
|
+
|
547
|
+
return ctx
|
548
|
+
|
549
|
+
def add_asset_metadata(self, asset_key: str, metadata: dict[str, Any]) -> None:
|
550
|
+
"""
|
551
|
+
Add metadata for a materialized asset.
|
552
|
+
|
553
|
+
Args:
|
554
|
+
asset_key: The asset key
|
555
|
+
metadata: Metadata dictionary to add
|
556
|
+
|
557
|
+
Raises:
|
558
|
+
ValueError: If asset_key is not in downstream_assets
|
559
|
+
"""
|
560
|
+
downstream_keys = {asset.key for asset in self.downstream_assets}
|
561
|
+
if asset_key not in downstream_keys:
|
562
|
+
raise ValueError(
|
563
|
+
"Can only add metadata to assets that are arguments to @materialize"
|
564
|
+
)
|
565
|
+
|
566
|
+
existing = self.materialization_metadata.get(asset_key, {})
|
567
|
+
self.materialization_metadata[asset_key] = existing | metadata
|
568
|
+
|
569
|
+
@staticmethod
|
570
|
+
def asset_as_resource(asset: Asset) -> dict[str, str]:
|
571
|
+
"""Convert Asset to event resource format."""
|
572
|
+
resource = {"prefect.resource.id": asset.key}
|
573
|
+
|
574
|
+
if asset.properties:
|
575
|
+
properties_dict = asset.properties.model_dump(exclude_unset=True)
|
576
|
+
|
577
|
+
if "name" in properties_dict:
|
578
|
+
resource["prefect.resource.name"] = properties_dict["name"]
|
579
|
+
|
580
|
+
if "description" in properties_dict:
|
581
|
+
resource["prefect.asset.description"] = properties_dict["description"]
|
582
|
+
|
583
|
+
if "url" in properties_dict:
|
584
|
+
resource["prefect.asset.url"] = properties_dict["url"]
|
585
|
+
|
586
|
+
if "owners" in properties_dict:
|
587
|
+
resource["prefect.asset.owners"] = json.dumps(properties_dict["owners"])
|
588
|
+
|
589
|
+
return resource
|
590
|
+
|
591
|
+
@staticmethod
|
592
|
+
def asset_as_related(asset: Asset) -> dict[str, str]:
|
593
|
+
"""Convert Asset to event related format."""
|
594
|
+
return {
|
595
|
+
"prefect.resource.id": asset.key,
|
596
|
+
"prefect.resource.role": "asset",
|
597
|
+
}
|
598
|
+
|
599
|
+
@staticmethod
|
600
|
+
def related_materialized_by(by: str) -> dict[str, str]:
|
601
|
+
"""Create a related resource for the tool that performed the materialization"""
|
602
|
+
return {
|
603
|
+
"prefect.resource.id": by,
|
604
|
+
"prefect.resource.role": "asset-materialized-by",
|
605
|
+
}
|
606
|
+
|
607
|
+
def emit_events(self, state: State) -> None:
|
608
|
+
"""
|
609
|
+
Emit asset events
|
610
|
+
"""
|
611
|
+
|
612
|
+
from prefect.events import emit_event
|
613
|
+
|
614
|
+
if state.name == "Cached":
|
615
|
+
return
|
616
|
+
elif state.is_failed():
|
617
|
+
event_status = "failed"
|
618
|
+
elif state.is_completed():
|
619
|
+
event_status = "succeeded"
|
620
|
+
else:
|
621
|
+
return
|
622
|
+
|
623
|
+
# If we have no downstream assets, this not a materialization
|
624
|
+
if not self.downstream_assets:
|
625
|
+
return
|
626
|
+
|
627
|
+
# Emit reference events for all upstream assets (direct + inherited)
|
628
|
+
all_upstream_assets = self.upstream_assets | self.direct_asset_dependencies
|
629
|
+
for asset in all_upstream_assets:
|
630
|
+
emit_event(
|
631
|
+
event="prefect.asset.referenced",
|
632
|
+
resource=self.asset_as_resource(asset),
|
633
|
+
related=[],
|
634
|
+
)
|
635
|
+
|
636
|
+
# Emit materialization events for downstream assets
|
637
|
+
upstream_related = [self.asset_as_related(a) for a in all_upstream_assets]
|
638
|
+
|
639
|
+
if self.materialized_by:
|
640
|
+
upstream_related.append(self.related_materialized_by(self.materialized_by))
|
641
|
+
|
642
|
+
for asset in self.downstream_assets:
|
643
|
+
emit_event(
|
644
|
+
event=f"prefect.asset.materialization.{event_status}",
|
645
|
+
resource=self.asset_as_resource(asset),
|
646
|
+
related=upstream_related,
|
647
|
+
payload=self.materialization_metadata.get(asset.key),
|
648
|
+
)
|
649
|
+
|
650
|
+
def update_tracked_assets(self) -> None:
|
651
|
+
"""
|
652
|
+
Update the flow run context with assets that should be propagated downstream.
|
653
|
+
"""
|
654
|
+
if not (flow_run_context := FlowRunContext.get()):
|
655
|
+
return
|
656
|
+
|
657
|
+
if not self.task_run_id:
|
658
|
+
return
|
659
|
+
|
660
|
+
if self.downstream_assets:
|
661
|
+
# MaterializingTask: propagate the downstream assets (what we create)
|
662
|
+
assets_for_downstream = set(self.downstream_assets)
|
663
|
+
else:
|
664
|
+
# Regular task: propagate upstream assets + direct dependencies
|
665
|
+
assets_for_downstream = set(
|
666
|
+
self.upstream_assets | self.direct_asset_dependencies
|
667
|
+
)
|
668
|
+
|
669
|
+
flow_run_context.task_run_assets[self.task_run_id] = assets_for_downstream
|
670
|
+
|
671
|
+
def serialize(self: Self, include_secrets: bool = True) -> dict[str, Any]:
|
672
|
+
"""Serialize the AssetContext for distributed execution."""
|
673
|
+
return self.model_dump(
|
674
|
+
# use json serialization so fields that are
|
675
|
+
# sets of pydantic models are serialized
|
676
|
+
mode="json",
|
677
|
+
exclude_unset=True,
|
678
|
+
serialize_as_any=True,
|
679
|
+
context={"include_secrets": include_secrets},
|
680
|
+
)
|
681
|
+
|
682
|
+
|
446
683
|
class TagsContext(ContextModel):
|
447
684
|
"""
|
448
685
|
The context for `prefect.tags` management.
|
prefect/deployments/runner.py
CHANGED
@@ -1222,14 +1222,14 @@ async def deploy(
|
|
1222
1222
|
" or specify a remote storage location for the flow with `.from_source`."
|
1223
1223
|
" If you are attempting to deploy a flow to a local process work pool,"
|
1224
1224
|
" consider using `flow.serve` instead. See the documentation for more"
|
1225
|
-
" information: https://docs.prefect.io/latest/
|
1225
|
+
" information: https://docs.prefect.io/latest/how-to-guides/deployments/run-flows-in-local-processes"
|
1226
1226
|
)
|
1227
1227
|
elif work_pool.type == "process" and not ignore_warnings:
|
1228
1228
|
console.print(
|
1229
1229
|
"Looks like you're deploying to a process work pool. If you're creating a"
|
1230
1230
|
" deployment for local development, calling `.serve` on your flow is a great"
|
1231
1231
|
" way to get started. See the documentation for more information:"
|
1232
|
-
" https://docs.prefect.io/latest/
|
1232
|
+
" https://docs.prefect.io/latest/how-to-guides/deployments/run-flows-in-local-processes "
|
1233
1233
|
" Set `ignore_warnings=True` to suppress this message.",
|
1234
1234
|
style="yellow",
|
1235
1235
|
)
|
prefect/events/clients.py
CHANGED
@@ -628,7 +628,7 @@ class PrefectEventSubscriber:
|
|
628
628
|
try:
|
629
629
|
await self._reconnect()
|
630
630
|
finally:
|
631
|
-
EVENT_WEBSOCKET_CONNECTIONS.labels(self.client_name, "out", "initial")
|
631
|
+
EVENT_WEBSOCKET_CONNECTIONS.labels(self.client_name, "out", "initial").inc()
|
632
632
|
return self
|
633
633
|
|
634
634
|
async def _reconnect(self) -> None:
|
@@ -709,7 +709,7 @@ class PrefectEventSubscriber:
|
|
709
709
|
finally:
|
710
710
|
EVENT_WEBSOCKET_CONNECTIONS.labels(
|
711
711
|
self.client_name, "out", "reconnect"
|
712
|
-
)
|
712
|
+
).inc()
|
713
713
|
assert self._websocket
|
714
714
|
|
715
715
|
while True:
|
prefect/runner/server.py
CHANGED
@@ -257,7 +257,7 @@ def _build_generic_endpoint_for_flows(
|
|
257
257
|
@deprecated_callable(
|
258
258
|
start_date=datetime(2025, 4, 1),
|
259
259
|
end_date=datetime(2025, 10, 1),
|
260
|
-
help="Use background tasks (https://docs.prefect.io/v3/
|
260
|
+
help="Use background tasks (https://docs.prefect.io/v3/concepts/tasks#background-tasks) or `run_deployment` and `.serve` instead of submitting runs to the Runner webserver.",
|
261
261
|
)
|
262
262
|
async def build_server(runner: "Runner") -> FastAPI:
|
263
263
|
"""
|
@@ -306,7 +306,7 @@ async def build_server(runner: "Runner") -> FastAPI:
|
|
306
306
|
@deprecated_callable(
|
307
307
|
start_date=datetime(2025, 4, 1),
|
308
308
|
end_date=datetime(2025, 10, 1),
|
309
|
-
help="Use background tasks (https://docs.prefect.io/v3/
|
309
|
+
help="Use background tasks (https://docs.prefect.io/v3/concepts/flows-and-tasks#background-tasks) or `run_deployment` and `.serve` instead of submitting runs to the Runner webserver.",
|
310
310
|
)
|
311
311
|
def start_webserver(runner: "Runner", log_level: str | None = None) -> None:
|
312
312
|
"""
|
prefect/runner/submit.py
CHANGED
@@ -124,7 +124,7 @@ def submit_to_runner(
|
|
124
124
|
@deprecated_callable(
|
125
125
|
start_date=datetime(2025, 4, 1),
|
126
126
|
end_date=datetime(2025, 10, 1),
|
127
|
-
help="Use background tasks (https://docs.prefect.io/v3/
|
127
|
+
help="Use background tasks (https://docs.prefect.io/v3/concepts/flows-and-tasks#background-tasks) or `run_deployment` and `.serve` instead of submitting runs to the Runner webserver.",
|
128
128
|
)
|
129
129
|
@sync_compatible
|
130
130
|
async def submit_to_runner(
|
@@ -196,7 +196,7 @@ async def submit_to_runner(
|
|
196
196
|
@deprecated_callable(
|
197
197
|
start_date=datetime(2025, 4, 1),
|
198
198
|
end_date=datetime(2025, 10, 1),
|
199
|
-
help="Use background tasks (https://docs.prefect.io/v3/
|
199
|
+
help="Use background tasks (https://docs.prefect.io/v3/concepts/flows-and-tasks#background-tasks) or `run_deployment` and `.serve` instead of submitting runs to the Runner webserver.",
|
200
200
|
)
|
201
201
|
@sync_compatible
|
202
202
|
async def wait_for_submitted_runs(
|
prefect/server/api/events.py
CHANGED
@@ -51,7 +51,7 @@ async def create_events(
|
|
51
51
|
"""
|
52
52
|
Record a batch of Events.
|
53
53
|
|
54
|
-
For more information, see https://docs.prefect.io/v3/
|
54
|
+
For more information, see https://docs.prefect.io/v3/concepts/events.
|
55
55
|
"""
|
56
56
|
if ephemeral_request:
|
57
57
|
await EventsPipeline().process_events(events)
|
@@ -23,7 +23,7 @@ async def read_task_workers(
|
|
23
23
|
"""
|
24
24
|
Read active task workers. Optionally filter by task keys.
|
25
25
|
|
26
|
-
For more information, see https://docs.prefect.io/v3/
|
26
|
+
For more information, see https://docs.prefect.io/v3/concepts/flows-and-tasks#background-tasks.
|
27
27
|
"""
|
28
28
|
|
29
29
|
if task_worker_filter and task_worker_filter.task_keys:
|
@@ -5,7 +5,6 @@ Routes for interacting with work queue objects.
|
|
5
5
|
from typing import List, Optional
|
6
6
|
from uuid import UUID
|
7
7
|
|
8
|
-
import sqlalchemy as sa
|
9
8
|
from fastapi import (
|
10
9
|
BackgroundTasks,
|
11
10
|
Body,
|
@@ -15,6 +14,7 @@ from fastapi import (
|
|
15
14
|
Path,
|
16
15
|
status,
|
17
16
|
)
|
17
|
+
from sqlalchemy.exc import IntegrityError
|
18
18
|
|
19
19
|
import prefect.server.api.dependencies as dependencies
|
20
20
|
import prefect.server.models as models
|
@@ -54,7 +54,7 @@ async def create_work_queue(
|
|
54
54
|
model = await models.work_queues.create_work_queue(
|
55
55
|
session=session, work_queue=work_queue
|
56
56
|
)
|
57
|
-
except
|
57
|
+
except IntegrityError:
|
58
58
|
raise HTTPException(
|
59
59
|
status_code=status.HTTP_409_CONFLICT,
|
60
60
|
detail="A work queue with this name already exists.",
|
@@ -184,7 +184,7 @@ async def read_work_queue_runs(
|
|
184
184
|
async def read_work_queues(
|
185
185
|
limit: int = dependencies.LimitBody(),
|
186
186
|
offset: int = Body(0, ge=0),
|
187
|
-
work_queues: schemas.filters.WorkQueueFilter = None,
|
187
|
+
work_queues: Optional[schemas.filters.WorkQueueFilter] = None,
|
188
188
|
db: PrefectDBInterface = Depends(provide_database_interface),
|
189
189
|
) -> List[schemas.responses.WorkQueueResponse]:
|
190
190
|
"""
|
prefect/settings/models/tasks.py
CHANGED
@@ -63,6 +63,11 @@ class TasksSettings(PrefectBaseSettings):
|
|
63
63
|
description="If `True`, sets the default cache policy on all tasks to `NO_CACHE`.",
|
64
64
|
)
|
65
65
|
|
66
|
+
disable_caching: bool = Field(
|
67
|
+
default=False,
|
68
|
+
description="If `True`, disables caching on all tasks regardless of cache policy.",
|
69
|
+
)
|
70
|
+
|
66
71
|
default_retries: int = Field(
|
67
72
|
default=0,
|
68
73
|
ge=0,
|