prefect-client 3.1.11__py3-none-any.whl → 3.1.12__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- prefect/_experimental/sla/__init__.py +0 -0
- prefect/_experimental/sla/client.py +66 -0
- prefect/_experimental/sla/objects.py +53 -0
- prefect/_version.py +3 -3
- prefect/automations.py +236 -30
- prefect/blocks/__init__.py +3 -3
- prefect/blocks/abstract.py +53 -30
- prefect/blocks/core.py +181 -82
- prefect/blocks/notifications.py +133 -73
- prefect/blocks/redis.py +13 -9
- prefect/blocks/system.py +24 -11
- prefect/blocks/webhook.py +7 -5
- prefect/cache_policies.py +3 -2
- prefect/client/orchestration/__init__.py +103 -2006
- prefect/client/orchestration/_automations/__init__.py +0 -0
- prefect/client/orchestration/_automations/client.py +329 -0
- prefect/client/orchestration/_blocks_documents/__init__.py +0 -0
- prefect/client/orchestration/_blocks_documents/client.py +334 -0
- prefect/client/orchestration/_blocks_schemas/__init__.py +0 -0
- prefect/client/orchestration/_blocks_schemas/client.py +200 -0
- prefect/client/orchestration/_blocks_types/__init__.py +0 -0
- prefect/client/orchestration/_blocks_types/client.py +380 -0
- prefect/client/orchestration/_deployments/__init__.py +0 -0
- prefect/client/orchestration/_deployments/client.py +1128 -0
- prefect/client/orchestration/_flow_runs/__init__.py +0 -0
- prefect/client/orchestration/_flow_runs/client.py +903 -0
- prefect/client/orchestration/_flows/__init__.py +0 -0
- prefect/client/orchestration/_flows/client.py +343 -0
- prefect/client/orchestration/_logs/client.py +16 -14
- prefect/client/schemas/__init__.py +68 -28
- prefect/client/schemas/objects.py +5 -5
- prefect/context.py +15 -1
- prefect/deployments/base.py +6 -0
- prefect/deployments/runner.py +42 -1
- prefect/engine.py +17 -4
- prefect/filesystems.py +6 -2
- prefect/flow_engine.py +47 -38
- prefect/flows.py +10 -1
- prefect/logging/logging.yml +1 -1
- prefect/runner/runner.py +4 -2
- prefect/settings/models/cloud.py +5 -0
- prefect/settings/models/experiments.py +0 -5
- prefect/states.py +57 -38
- prefect/task_runners.py +56 -55
- prefect/task_worker.py +2 -2
- prefect/tasks.py +6 -4
- prefect/telemetry/bootstrap.py +10 -9
- prefect/telemetry/services.py +4 -0
- prefect/utilities/templating.py +25 -1
- prefect/workers/base.py +6 -3
- prefect/workers/process.py +1 -1
- {prefect_client-3.1.11.dist-info → prefect_client-3.1.12.dist-info}/METADATA +2 -2
- {prefect_client-3.1.11.dist-info → prefect_client-3.1.12.dist-info}/RECORD +56 -39
- {prefect_client-3.1.11.dist-info → prefect_client-3.1.12.dist-info}/LICENSE +0 -0
- {prefect_client-3.1.11.dist-info → prefect_client-3.1.12.dist-info}/WHEEL +0 -0
- {prefect_client-3.1.11.dist-info → prefect_client-3.1.12.dist-info}/top_level.txt +0 -0
prefect/states.py
CHANGED
@@ -6,7 +6,7 @@ import uuid
|
|
6
6
|
import warnings
|
7
7
|
from collections import Counter
|
8
8
|
from types import GeneratorType, TracebackType
|
9
|
-
from typing import Any, Dict, Iterable, Optional, Type
|
9
|
+
from typing import TYPE_CHECKING, Any, Dict, Iterable, Optional, Type
|
10
10
|
|
11
11
|
import anyio
|
12
12
|
import httpx
|
@@ -28,16 +28,16 @@ from prefect.exceptions import (
|
|
28
28
|
UnfinishedRun,
|
29
29
|
)
|
30
30
|
from prefect.logging.loggers import get_logger, get_run_logger
|
31
|
-
from prefect.results import (
|
32
|
-
R,
|
33
|
-
ResultRecord,
|
34
|
-
ResultRecordMetadata,
|
35
|
-
ResultStore,
|
36
|
-
)
|
37
31
|
from prefect.utilities.annotations import BaseAnnotation
|
38
32
|
from prefect.utilities.asyncutils import in_async_main_thread, sync_compatible
|
39
33
|
from prefect.utilities.collections import ensure_iterable
|
40
34
|
|
35
|
+
if TYPE_CHECKING:
|
36
|
+
from prefect.results import (
|
37
|
+
R,
|
38
|
+
ResultStore,
|
39
|
+
)
|
40
|
+
|
41
41
|
logger = get_logger("states")
|
42
42
|
|
43
43
|
|
@@ -49,11 +49,11 @@ logger = get_logger("states")
|
|
49
49
|
help="Please ensure you are awaiting the call to `result()` when calling in an async context.",
|
50
50
|
)
|
51
51
|
def get_state_result(
|
52
|
-
state: State[R],
|
52
|
+
state: "State[R]",
|
53
53
|
raise_on_failure: bool = True,
|
54
54
|
fetch: bool = True,
|
55
55
|
retry_result_failure: bool = True,
|
56
|
-
) -> R:
|
56
|
+
) -> "R":
|
57
57
|
"""
|
58
58
|
Get the result from a state.
|
59
59
|
|
@@ -86,13 +86,18 @@ RESULT_READ_RETRY_DELAY = 0.25
|
|
86
86
|
|
87
87
|
|
88
88
|
async def _get_state_result_data_with_retries(
|
89
|
-
state: State[R], retry_result_failure: bool = True
|
90
|
-
) -> R:
|
89
|
+
state: "State[R]", retry_result_failure: bool = True
|
90
|
+
) -> "R":
|
91
91
|
# Results may be written asynchronously, possibly after their corresponding
|
92
92
|
# state has been written and events have been emitted, so we should give some
|
93
93
|
# grace here about missing results. The exception below could come in the form
|
94
94
|
# of a missing file, a short read, or other types of errors depending on the
|
95
95
|
# result storage backend.
|
96
|
+
from prefect.results import (
|
97
|
+
ResultRecord,
|
98
|
+
ResultRecordMetadata,
|
99
|
+
)
|
100
|
+
|
96
101
|
if retry_result_failure is False:
|
97
102
|
max_attempts = 1
|
98
103
|
else:
|
@@ -120,11 +125,16 @@ async def _get_state_result_data_with_retries(
|
|
120
125
|
|
121
126
|
@sync_compatible
|
122
127
|
async def _get_state_result(
|
123
|
-
state: State[R], raise_on_failure: bool, retry_result_failure: bool = True
|
124
|
-
) -> R:
|
128
|
+
state: "State[R]", raise_on_failure: bool, retry_result_failure: bool = True
|
129
|
+
) -> "R":
|
125
130
|
"""
|
126
131
|
Internal implementation for `get_state_result` without async backwards compatibility
|
127
132
|
"""
|
133
|
+
from prefect.results import (
|
134
|
+
ResultRecord,
|
135
|
+
ResultRecordMetadata,
|
136
|
+
)
|
137
|
+
|
128
138
|
if state.is_paused():
|
129
139
|
# Paused states are not truly terminal and do not have results associated with them
|
130
140
|
raise PausedRun("Run is paused, its result is not available.", state=state)
|
@@ -181,7 +191,7 @@ def format_exception(exc: BaseException, tb: TracebackType = None) -> str:
|
|
181
191
|
|
182
192
|
async def exception_to_crashed_state(
|
183
193
|
exc: BaseException,
|
184
|
-
result_store: Optional[ResultStore] = None,
|
194
|
+
result_store: Optional["ResultStore"] = None,
|
185
195
|
) -> State:
|
186
196
|
"""
|
187
197
|
Takes an exception that occurs _outside_ of user code and converts it to a
|
@@ -233,7 +243,7 @@ async def exception_to_crashed_state(
|
|
233
243
|
|
234
244
|
async def exception_to_failed_state(
|
235
245
|
exc: Optional[BaseException] = None,
|
236
|
-
result_store: Optional[ResultStore] = None,
|
246
|
+
result_store: Optional["ResultStore"] = None,
|
237
247
|
write_result: bool = False,
|
238
248
|
**kwargs,
|
239
249
|
) -> State:
|
@@ -285,12 +295,12 @@ async def exception_to_failed_state(
|
|
285
295
|
|
286
296
|
|
287
297
|
async def return_value_to_state(
|
288
|
-
retval: R,
|
289
|
-
result_store: ResultStore,
|
298
|
+
retval: "R",
|
299
|
+
result_store: "ResultStore",
|
290
300
|
key: Optional[str] = None,
|
291
301
|
expiration: Optional[datetime.datetime] = None,
|
292
302
|
write_result: bool = False,
|
293
|
-
) -> State[R]:
|
303
|
+
) -> "State[R]":
|
294
304
|
"""
|
295
305
|
Given a return value from a user's function, create a `State` the run should
|
296
306
|
be placed in.
|
@@ -311,6 +321,11 @@ async def return_value_to_state(
|
|
311
321
|
Callers should resolve all futures into states before passing return values to this
|
312
322
|
function.
|
313
323
|
"""
|
324
|
+
from prefect.results import (
|
325
|
+
ResultRecord,
|
326
|
+
ResultRecordMetadata,
|
327
|
+
)
|
328
|
+
|
314
329
|
try:
|
315
330
|
local_logger = get_run_logger()
|
316
331
|
except MissingContextError:
|
@@ -443,6 +458,10 @@ async def get_state_exception(state: State) -> BaseException:
|
|
443
458
|
- `CrashedRun` if the state type is CRASHED.
|
444
459
|
- `CancelledRun` if the state type is CANCELLED.
|
445
460
|
"""
|
461
|
+
from prefect.results import (
|
462
|
+
ResultRecord,
|
463
|
+
ResultRecordMetadata,
|
464
|
+
)
|
446
465
|
|
447
466
|
if state.is_failed():
|
448
467
|
wrapper = FailedRun
|
@@ -586,7 +605,7 @@ class StateGroup:
|
|
586
605
|
return f"StateGroup<{self.counts_message()}>"
|
587
606
|
|
588
607
|
|
589
|
-
def _traced(cls: Type[State[R]], **kwargs: Any) -> State[R]:
|
608
|
+
def _traced(cls: Type["State[R]"], **kwargs: Any) -> "State[R]":
|
590
609
|
state_details = StateDetails.model_validate(kwargs.pop("state_details", {}))
|
591
610
|
|
592
611
|
carrier = {}
|
@@ -597,10 +616,10 @@ def _traced(cls: Type[State[R]], **kwargs: Any) -> State[R]:
|
|
597
616
|
|
598
617
|
|
599
618
|
def Scheduled(
|
600
|
-
cls: Type[State[R]] = State,
|
619
|
+
cls: Type["State[R]"] = State,
|
601
620
|
scheduled_time: Optional[datetime.datetime] = None,
|
602
621
|
**kwargs: Any,
|
603
|
-
) -> State[R]:
|
622
|
+
) -> "State[R]":
|
604
623
|
"""Convenience function for creating `Scheduled` states.
|
605
624
|
|
606
625
|
Returns:
|
@@ -616,7 +635,7 @@ def Scheduled(
|
|
616
635
|
return _traced(cls, type=StateType.SCHEDULED, state_details=state_details, **kwargs)
|
617
636
|
|
618
637
|
|
619
|
-
def Completed(cls: Type[State[R]] = State, **kwargs: Any) -> State[R]:
|
638
|
+
def Completed(cls: Type["State[R]"] = State, **kwargs: Any) -> "State[R]":
|
620
639
|
"""Convenience function for creating `Completed` states.
|
621
640
|
|
622
641
|
Returns:
|
@@ -626,7 +645,7 @@ def Completed(cls: Type[State[R]] = State, **kwargs: Any) -> State[R]:
|
|
626
645
|
return _traced(cls, type=StateType.COMPLETED, **kwargs)
|
627
646
|
|
628
647
|
|
629
|
-
def Running(cls: Type[State[R]] = State, **kwargs: Any) -> State[R]:
|
648
|
+
def Running(cls: Type["State[R]"] = State, **kwargs: Any) -> "State[R]":
|
630
649
|
"""Convenience function for creating `Running` states.
|
631
650
|
|
632
651
|
Returns:
|
@@ -635,7 +654,7 @@ def Running(cls: Type[State[R]] = State, **kwargs: Any) -> State[R]:
|
|
635
654
|
return _traced(cls, type=StateType.RUNNING, **kwargs)
|
636
655
|
|
637
656
|
|
638
|
-
def Failed(cls: Type[State[R]] = State, **kwargs: Any) -> State[R]:
|
657
|
+
def Failed(cls: Type["State[R]"] = State, **kwargs: Any) -> "State[R]":
|
639
658
|
"""Convenience function for creating `Failed` states.
|
640
659
|
|
641
660
|
Returns:
|
@@ -644,7 +663,7 @@ def Failed(cls: Type[State[R]] = State, **kwargs: Any) -> State[R]:
|
|
644
663
|
return _traced(cls, type=StateType.FAILED, **kwargs)
|
645
664
|
|
646
665
|
|
647
|
-
def Crashed(cls: Type[State[R]] = State, **kwargs: Any) -> State[R]:
|
666
|
+
def Crashed(cls: Type["State[R]"] = State, **kwargs: Any) -> "State[R]":
|
648
667
|
"""Convenience function for creating `Crashed` states.
|
649
668
|
|
650
669
|
Returns:
|
@@ -653,7 +672,7 @@ def Crashed(cls: Type[State[R]] = State, **kwargs: Any) -> State[R]:
|
|
653
672
|
return _traced(cls, type=StateType.CRASHED, **kwargs)
|
654
673
|
|
655
674
|
|
656
|
-
def Cancelling(cls: Type[State[R]] = State, **kwargs: Any) -> State[R]:
|
675
|
+
def Cancelling(cls: Type["State[R]"] = State, **kwargs: Any) -> "State[R]":
|
657
676
|
"""Convenience function for creating `Cancelling` states.
|
658
677
|
|
659
678
|
Returns:
|
@@ -662,7 +681,7 @@ def Cancelling(cls: Type[State[R]] = State, **kwargs: Any) -> State[R]:
|
|
662
681
|
return _traced(cls, type=StateType.CANCELLING, **kwargs)
|
663
682
|
|
664
683
|
|
665
|
-
def Cancelled(cls: Type[State[R]] = State, **kwargs: Any) -> State[R]:
|
684
|
+
def Cancelled(cls: Type["State[R]"] = State, **kwargs: Any) -> "State[R]":
|
666
685
|
"""Convenience function for creating `Cancelled` states.
|
667
686
|
|
668
687
|
Returns:
|
@@ -671,7 +690,7 @@ def Cancelled(cls: Type[State[R]] = State, **kwargs: Any) -> State[R]:
|
|
671
690
|
return _traced(cls, type=StateType.CANCELLED, **kwargs)
|
672
691
|
|
673
692
|
|
674
|
-
def Pending(cls: Type[State[R]] = State, **kwargs: Any) -> State[R]:
|
693
|
+
def Pending(cls: Type["State[R]"] = State, **kwargs: Any) -> "State[R]":
|
675
694
|
"""Convenience function for creating `Pending` states.
|
676
695
|
|
677
696
|
Returns:
|
@@ -681,13 +700,13 @@ def Pending(cls: Type[State[R]] = State, **kwargs: Any) -> State[R]:
|
|
681
700
|
|
682
701
|
|
683
702
|
def Paused(
|
684
|
-
cls: Type[State[R]] = State,
|
703
|
+
cls: Type["State[R]"] = State,
|
685
704
|
timeout_seconds: Optional[int] = None,
|
686
705
|
pause_expiration_time: Optional[datetime.datetime] = None,
|
687
706
|
reschedule: bool = False,
|
688
707
|
pause_key: Optional[str] = None,
|
689
708
|
**kwargs: Any,
|
690
|
-
) -> State[R]:
|
709
|
+
) -> "State[R]":
|
691
710
|
"""Convenience function for creating `Paused` states.
|
692
711
|
|
693
712
|
Returns:
|
@@ -717,7 +736,7 @@ def Paused(
|
|
717
736
|
|
718
737
|
|
719
738
|
def Suspended(
|
720
|
-
cls: Type[State[R]] = State,
|
739
|
+
cls: Type["State[R]"] = State,
|
721
740
|
timeout_seconds: Optional[int] = None,
|
722
741
|
pause_expiration_time: Optional[datetime.datetime] = None,
|
723
742
|
pause_key: Optional[str] = None,
|
@@ -740,10 +759,10 @@ def Suspended(
|
|
740
759
|
|
741
760
|
|
742
761
|
def AwaitingRetry(
|
743
|
-
cls: Type[State[R]] = State,
|
762
|
+
cls: Type["State[R]"] = State,
|
744
763
|
scheduled_time: Optional[datetime.datetime] = None,
|
745
764
|
**kwargs: Any,
|
746
|
-
) -> State[R]:
|
765
|
+
) -> "State[R]":
|
747
766
|
"""Convenience function for creating `AwaitingRetry` states.
|
748
767
|
|
749
768
|
Returns:
|
@@ -755,10 +774,10 @@ def AwaitingRetry(
|
|
755
774
|
|
756
775
|
|
757
776
|
def AwaitingConcurrencySlot(
|
758
|
-
cls: Type[State[R]] = State,
|
777
|
+
cls: Type["State[R]"] = State,
|
759
778
|
scheduled_time: Optional[datetime.datetime] = None,
|
760
779
|
**kwargs: Any,
|
761
|
-
) -> State[R]:
|
780
|
+
) -> "State[R]":
|
762
781
|
"""Convenience function for creating `AwaitingConcurrencySlot` states.
|
763
782
|
|
764
783
|
Returns:
|
@@ -769,7 +788,7 @@ def AwaitingConcurrencySlot(
|
|
769
788
|
)
|
770
789
|
|
771
790
|
|
772
|
-
def Retrying(cls: Type[State[R]] = State, **kwargs: Any) -> State[R]:
|
791
|
+
def Retrying(cls: Type["State[R]"] = State, **kwargs: Any) -> "State[R]":
|
773
792
|
"""Convenience function for creating `Retrying` states.
|
774
793
|
|
775
794
|
Returns:
|
@@ -779,10 +798,10 @@ def Retrying(cls: Type[State[R]] = State, **kwargs: Any) -> State[R]:
|
|
779
798
|
|
780
799
|
|
781
800
|
def Late(
|
782
|
-
cls: Type[State[R]] = State,
|
801
|
+
cls: Type["State[R]"] = State,
|
783
802
|
scheduled_time: Optional[datetime.datetime] = None,
|
784
803
|
**kwargs: Any,
|
785
|
-
) -> State[R]:
|
804
|
+
) -> "State[R]":
|
786
805
|
"""Convenience function for creating `Late` states.
|
787
806
|
|
788
807
|
Returns:
|
prefect/task_runners.py
CHANGED
@@ -1,3 +1,5 @@
|
|
1
|
+
from __future__ import annotations
|
2
|
+
|
1
3
|
import abc
|
2
4
|
import asyncio
|
3
5
|
import sys
|
@@ -14,7 +16,6 @@ from typing import (
|
|
14
16
|
Iterable,
|
15
17
|
List,
|
16
18
|
Optional,
|
17
|
-
Set,
|
18
19
|
overload,
|
19
20
|
)
|
20
21
|
|
@@ -44,7 +45,7 @@ if TYPE_CHECKING:
|
|
44
45
|
P = ParamSpec("P")
|
45
46
|
T = TypeVar("T")
|
46
47
|
R = TypeVar("R")
|
47
|
-
F = TypeVar("F", bound=PrefectFuture, default=PrefectConcurrentFuture)
|
48
|
+
F = TypeVar("F", bound=PrefectFuture[Any], default=PrefectConcurrentFuture[Any])
|
48
49
|
|
49
50
|
|
50
51
|
class TaskRunner(abc.ABC, Generic[F]):
|
@@ -76,10 +77,10 @@ class TaskRunner(abc.ABC, Generic[F]):
|
|
76
77
|
@abc.abstractmethod
|
77
78
|
def submit(
|
78
79
|
self,
|
79
|
-
task: "Task",
|
80
|
-
parameters:
|
81
|
-
wait_for:
|
82
|
-
dependencies:
|
80
|
+
task: "Task[P, R]",
|
81
|
+
parameters: dict[str, Any],
|
82
|
+
wait_for: Iterable[PrefectFuture[Any]] | None = None,
|
83
|
+
dependencies: dict[str, set[TaskRunInput]] | None = None,
|
83
84
|
) -> F:
|
84
85
|
"""
|
85
86
|
Submit a task to the task run engine.
|
@@ -98,7 +99,7 @@ class TaskRunner(abc.ABC, Generic[F]):
|
|
98
99
|
def map(
|
99
100
|
self,
|
100
101
|
task: "Task[P, R]",
|
101
|
-
parameters:
|
102
|
+
parameters: dict[str, Any],
|
102
103
|
wait_for: Optional[Iterable[PrefectFuture[R]]] = None,
|
103
104
|
) -> PrefectFutureList[F]:
|
104
105
|
"""
|
@@ -138,9 +139,9 @@ class TaskRunner(abc.ABC, Generic[F]):
|
|
138
139
|
# Ensure that any parameters in kwargs are expanded before this check
|
139
140
|
parameters = explode_variadic_parameter(task.fn, parameters)
|
140
141
|
|
141
|
-
iterable_parameters = {}
|
142
|
-
static_parameters = {}
|
143
|
-
annotated_parameters = {}
|
142
|
+
iterable_parameters: dict[str, Any] = {}
|
143
|
+
static_parameters: dict[str, Any] = {}
|
144
|
+
annotated_parameters: dict[str, Any] = {}
|
144
145
|
for key, val in parameters.items():
|
145
146
|
if isinstance(val, (allow_failure, quote)):
|
146
147
|
# Unwrap annotated parameters to determine if they are iterable
|
@@ -172,9 +173,9 @@ class TaskRunner(abc.ABC, Generic[F]):
|
|
172
173
|
|
173
174
|
map_length = list(lengths)[0]
|
174
175
|
|
175
|
-
futures: List[PrefectFuture] = []
|
176
|
+
futures: List[PrefectFuture[Any]] = []
|
176
177
|
for i in range(map_length):
|
177
|
-
call_parameters = {
|
178
|
+
call_parameters: dict[str, Any] = {
|
178
179
|
key: value[i] for key, value in iterable_parameters.items()
|
179
180
|
}
|
180
181
|
call_parameters.update(
|
@@ -212,12 +213,12 @@ class TaskRunner(abc.ABC, Generic[F]):
|
|
212
213
|
self._started = True
|
213
214
|
return self
|
214
215
|
|
215
|
-
def __exit__(self, exc_type, exc_value, traceback):
|
216
|
+
def __exit__(self, exc_type: Any, exc_value: Any, traceback: Any):
|
216
217
|
self.logger.debug("Stopping task runner")
|
217
218
|
self._started = False
|
218
219
|
|
219
220
|
|
220
|
-
class ThreadPoolTaskRunner(TaskRunner[PrefectConcurrentFuture]):
|
221
|
+
class ThreadPoolTaskRunner(TaskRunner[PrefectConcurrentFuture[Any]]):
|
221
222
|
def __init__(self, max_workers: Optional[int] = None):
|
222
223
|
super().__init__()
|
223
224
|
self._executor: Optional[ThreadPoolExecutor] = None
|
@@ -235,9 +236,9 @@ class ThreadPoolTaskRunner(TaskRunner[PrefectConcurrentFuture]):
|
|
235
236
|
def submit(
|
236
237
|
self,
|
237
238
|
task: "Task[P, Coroutine[Any, Any, R]]",
|
238
|
-
parameters:
|
239
|
-
wait_for:
|
240
|
-
dependencies:
|
239
|
+
parameters: dict[str, Any],
|
240
|
+
wait_for: Iterable[PrefectFuture[Any]] | None = None,
|
241
|
+
dependencies: dict[str, set[TaskRunInput]] | None = None,
|
241
242
|
) -> PrefectConcurrentFuture[R]:
|
242
243
|
...
|
243
244
|
|
@@ -245,19 +246,19 @@ class ThreadPoolTaskRunner(TaskRunner[PrefectConcurrentFuture]):
|
|
245
246
|
def submit(
|
246
247
|
self,
|
247
248
|
task: "Task[Any, R]",
|
248
|
-
parameters:
|
249
|
-
wait_for:
|
250
|
-
dependencies:
|
249
|
+
parameters: dict[str, Any],
|
250
|
+
wait_for: Iterable[PrefectFuture[Any]] | None = None,
|
251
|
+
dependencies: dict[str, set[TaskRunInput]] | None = None,
|
251
252
|
) -> PrefectConcurrentFuture[R]:
|
252
253
|
...
|
253
254
|
|
254
255
|
def submit(
|
255
256
|
self,
|
256
|
-
task: "Task",
|
257
|
-
parameters:
|
258
|
-
wait_for:
|
259
|
-
dependencies:
|
260
|
-
):
|
257
|
+
task: "Task[P, R]",
|
258
|
+
parameters: dict[str, Any],
|
259
|
+
wait_for: Iterable[PrefectFuture[Any]] | None = None,
|
260
|
+
dependencies: dict[str, set[TaskRunInput]] | None = None,
|
261
|
+
) -> PrefectConcurrentFuture[R]:
|
261
262
|
"""
|
262
263
|
Submit a task to the task run engine running in a separate thread.
|
263
264
|
|
@@ -289,7 +290,7 @@ class ThreadPoolTaskRunner(TaskRunner[PrefectConcurrentFuture]):
|
|
289
290
|
else:
|
290
291
|
self.logger.debug(f"Submitting task {task.name} to thread pool executor...")
|
291
292
|
|
292
|
-
submit_kwargs = dict(
|
293
|
+
submit_kwargs: dict[str, Any] = dict(
|
293
294
|
task=task,
|
294
295
|
task_run_id=task_run_id,
|
295
296
|
parameters=parameters,
|
@@ -322,8 +323,8 @@ class ThreadPoolTaskRunner(TaskRunner[PrefectConcurrentFuture]):
|
|
322
323
|
def map(
|
323
324
|
self,
|
324
325
|
task: "Task[P, Coroutine[Any, Any, R]]",
|
325
|
-
parameters:
|
326
|
-
wait_for:
|
326
|
+
parameters: dict[str, Any],
|
327
|
+
wait_for: Iterable[PrefectFuture[Any]] | None = None,
|
327
328
|
) -> PrefectFutureList[PrefectConcurrentFuture[R]]:
|
328
329
|
...
|
329
330
|
|
@@ -331,17 +332,17 @@ class ThreadPoolTaskRunner(TaskRunner[PrefectConcurrentFuture]):
|
|
331
332
|
def map(
|
332
333
|
self,
|
333
334
|
task: "Task[Any, R]",
|
334
|
-
parameters:
|
335
|
-
wait_for:
|
335
|
+
parameters: dict[str, Any],
|
336
|
+
wait_for: Iterable[PrefectFuture[Any]] | None = None,
|
336
337
|
) -> PrefectFutureList[PrefectConcurrentFuture[R]]:
|
337
338
|
...
|
338
339
|
|
339
340
|
def map(
|
340
341
|
self,
|
341
|
-
task: "Task",
|
342
|
-
parameters:
|
343
|
-
wait_for:
|
344
|
-
):
|
342
|
+
task: "Task[P, R]",
|
343
|
+
parameters: dict[str, Any],
|
344
|
+
wait_for: Iterable[PrefectFuture[Any]] | None = None,
|
345
|
+
) -> PrefectFutureList[PrefectConcurrentFuture[R]]:
|
345
346
|
return super().map(task, parameters, wait_for)
|
346
347
|
|
347
348
|
def cancel_all(self):
|
@@ -358,7 +359,7 @@ class ThreadPoolTaskRunner(TaskRunner[PrefectConcurrentFuture]):
|
|
358
359
|
self._executor = ThreadPoolExecutor(max_workers=self._max_workers)
|
359
360
|
return self
|
360
361
|
|
361
|
-
def __exit__(self, exc_type, exc_value, traceback):
|
362
|
+
def __exit__(self, exc_type: Any, exc_value: Any, traceback: Any):
|
362
363
|
self.cancel_all()
|
363
364
|
if self._executor is not None:
|
364
365
|
self._executor.shutdown(cancel_futures=True)
|
@@ -375,7 +376,7 @@ class ThreadPoolTaskRunner(TaskRunner[PrefectConcurrentFuture]):
|
|
375
376
|
ConcurrentTaskRunner = ThreadPoolTaskRunner
|
376
377
|
|
377
378
|
|
378
|
-
class PrefectTaskRunner(TaskRunner[PrefectDistributedFuture]):
|
379
|
+
class PrefectTaskRunner(TaskRunner[PrefectDistributedFuture[R]]):
|
379
380
|
def __init__(self):
|
380
381
|
super().__init__()
|
381
382
|
|
@@ -386,9 +387,9 @@ class PrefectTaskRunner(TaskRunner[PrefectDistributedFuture]):
|
|
386
387
|
def submit(
|
387
388
|
self,
|
388
389
|
task: "Task[P, Coroutine[Any, Any, R]]",
|
389
|
-
parameters:
|
390
|
-
wait_for:
|
391
|
-
dependencies:
|
390
|
+
parameters: dict[str, Any],
|
391
|
+
wait_for: Iterable[PrefectFuture[Any]] | None = None,
|
392
|
+
dependencies: dict[str, set[TaskRunInput]] | None = None,
|
392
393
|
) -> PrefectDistributedFuture[R]:
|
393
394
|
...
|
394
395
|
|
@@ -396,19 +397,19 @@ class PrefectTaskRunner(TaskRunner[PrefectDistributedFuture]):
|
|
396
397
|
def submit(
|
397
398
|
self,
|
398
399
|
task: "Task[Any, R]",
|
399
|
-
parameters:
|
400
|
-
wait_for:
|
401
|
-
dependencies:
|
400
|
+
parameters: dict[str, Any],
|
401
|
+
wait_for: Iterable[PrefectFuture[Any]] | None = None,
|
402
|
+
dependencies: dict[str, set[TaskRunInput]] | None = None,
|
402
403
|
) -> PrefectDistributedFuture[R]:
|
403
404
|
...
|
404
405
|
|
405
406
|
def submit(
|
406
407
|
self,
|
407
|
-
task: "Task",
|
408
|
-
parameters:
|
409
|
-
wait_for:
|
410
|
-
dependencies:
|
411
|
-
):
|
408
|
+
task: "Task[P, R]",
|
409
|
+
parameters: dict[str, Any],
|
410
|
+
wait_for: Iterable[PrefectFuture[Any]] | None = None,
|
411
|
+
dependencies: dict[str, set[TaskRunInput]] | None = None,
|
412
|
+
) -> PrefectDistributedFuture[R]:
|
412
413
|
"""
|
413
414
|
Submit a task to the task run engine running in a separate thread.
|
414
415
|
|
@@ -443,8 +444,8 @@ class PrefectTaskRunner(TaskRunner[PrefectDistributedFuture]):
|
|
443
444
|
def map(
|
444
445
|
self,
|
445
446
|
task: "Task[P, Coroutine[Any, Any, R]]",
|
446
|
-
parameters:
|
447
|
-
wait_for:
|
447
|
+
parameters: dict[str, Any],
|
448
|
+
wait_for: Iterable[PrefectFuture[Any]] | None = None,
|
448
449
|
) -> PrefectFutureList[PrefectDistributedFuture[R]]:
|
449
450
|
...
|
450
451
|
|
@@ -452,15 +453,15 @@ class PrefectTaskRunner(TaskRunner[PrefectDistributedFuture]):
|
|
452
453
|
def map(
|
453
454
|
self,
|
454
455
|
task: "Task[Any, R]",
|
455
|
-
parameters:
|
456
|
-
wait_for:
|
456
|
+
parameters: dict[str, Any],
|
457
|
+
wait_for: Iterable[PrefectFuture[Any]] | None = None,
|
457
458
|
) -> PrefectFutureList[PrefectDistributedFuture[R]]:
|
458
459
|
...
|
459
460
|
|
460
461
|
def map(
|
461
462
|
self,
|
462
|
-
task: "Task",
|
463
|
-
parameters:
|
464
|
-
wait_for:
|
465
|
-
):
|
463
|
+
task: "Task[P, R]",
|
464
|
+
parameters: dict[str, Any],
|
465
|
+
wait_for: Iterable[PrefectFuture[Any]] | None = None,
|
466
|
+
) -> PrefectFutureList[PrefectDistributedFuture[R]]:
|
466
467
|
return super().map(task, parameters, wait_for)
|
prefect/task_worker.py
CHANGED
@@ -21,7 +21,7 @@ from websockets.exceptions import InvalidStatusCode
|
|
21
21
|
|
22
22
|
from prefect import Task
|
23
23
|
from prefect._internal.concurrency.api import create_call, from_sync
|
24
|
-
from prefect.cache_policies import DEFAULT,
|
24
|
+
from prefect.cache_policies import DEFAULT, NO_CACHE
|
25
25
|
from prefect.client.orchestration import get_client
|
26
26
|
from prefect.client.schemas.objects import TaskRun
|
27
27
|
from prefect.client.subscriptions import Subscription
|
@@ -93,7 +93,7 @@ class TaskWorker:
|
|
93
93
|
if not isinstance(t, Task):
|
94
94
|
continue
|
95
95
|
|
96
|
-
if t.cache_policy in [None,
|
96
|
+
if t.cache_policy in [None, NO_CACHE, NotSet]:
|
97
97
|
self.tasks.append(
|
98
98
|
t.with_options(persist_result=True, cache_policy=DEFAULT)
|
99
99
|
)
|
prefect/tasks.py
CHANGED
@@ -30,7 +30,7 @@ from uuid import UUID, uuid4
|
|
30
30
|
from typing_extensions import Literal, ParamSpec, Self, TypeAlias, TypeIs
|
31
31
|
|
32
32
|
import prefect.states
|
33
|
-
from prefect.cache_policies import DEFAULT,
|
33
|
+
from prefect.cache_policies import DEFAULT, NO_CACHE, CachePolicy
|
34
34
|
from prefect.client.orchestration import get_client
|
35
35
|
from prefect.client.schemas import TaskRun
|
36
36
|
from prefect.client.schemas.objects import (
|
@@ -441,7 +441,9 @@ class Task(Generic[P, R]):
|
|
441
441
|
if persist_result is None:
|
442
442
|
if any(
|
443
443
|
[
|
444
|
-
cache_policy
|
444
|
+
cache_policy
|
445
|
+
and cache_policy != NO_CACHE
|
446
|
+
and cache_policy != NotSet,
|
445
447
|
cache_key_fn is not None,
|
446
448
|
result_storage_key is not None,
|
447
449
|
result_storage is not None,
|
@@ -451,8 +453,8 @@ class Task(Generic[P, R]):
|
|
451
453
|
persist_result = True
|
452
454
|
|
453
455
|
if persist_result is False:
|
454
|
-
self.cache_policy = None if cache_policy is None else
|
455
|
-
if cache_policy and cache_policy is not NotSet and cache_policy !=
|
456
|
+
self.cache_policy = None if cache_policy is None else NO_CACHE
|
457
|
+
if cache_policy and cache_policy is not NotSet and cache_policy != NO_CACHE:
|
456
458
|
logger.warning(
|
457
459
|
"Ignoring `cache_policy` because `persist_result` is False"
|
458
460
|
)
|
prefect/telemetry/bootstrap.py
CHANGED
@@ -2,6 +2,9 @@ from typing import TYPE_CHECKING, Union
|
|
2
2
|
|
3
3
|
import prefect.settings
|
4
4
|
from prefect.client.base import ServerType, determine_server_type
|
5
|
+
from prefect.logging.loggers import get_logger
|
6
|
+
|
7
|
+
logger = get_logger(__name__)
|
5
8
|
|
6
9
|
if TYPE_CHECKING:
|
7
10
|
from opentelemetry.sdk._logs import LoggerProvider
|
@@ -16,30 +19,28 @@ def setup_telemetry() -> (
|
|
16
19
|
]
|
17
20
|
):
|
18
21
|
settings = prefect.settings.get_current_settings()
|
19
|
-
if not settings.experiments.telemetry_enabled:
|
20
|
-
return None, None, None
|
21
22
|
|
22
23
|
server_type = determine_server_type()
|
23
24
|
if server_type != ServerType.CLOUD:
|
24
25
|
return None, None, None
|
25
26
|
|
27
|
+
if not settings.cloud.enable_orchestration_telemetry:
|
28
|
+
return None, None, None
|
29
|
+
|
26
30
|
if not settings.api.key:
|
27
|
-
|
31
|
+
logger.warning(
|
28
32
|
"A Prefect Cloud API key is required to enable telemetry. Please set "
|
29
33
|
"the `PREFECT_API_KEY` environment variable or authenticate with "
|
30
34
|
"Prefect Cloud via the `prefect cloud login` command."
|
31
35
|
)
|
36
|
+
return None, None, None
|
32
37
|
|
33
38
|
assert settings.api.url
|
34
39
|
|
35
40
|
# This import is here to defer importing of the `opentelemetry` packages.
|
36
41
|
try:
|
37
42
|
from .instrumentation import setup_exporters
|
38
|
-
except ImportError
|
39
|
-
|
40
|
-
"Unable to import OpenTelemetry instrumentation libraries. Please "
|
41
|
-
"ensure you have installed the `otel` extra when installing Prefect: "
|
42
|
-
"`pip install 'prefect[otel]'`"
|
43
|
-
) from exc
|
43
|
+
except ImportError:
|
44
|
+
return None, None, None
|
44
45
|
|
45
46
|
return setup_exporters(settings.api.url, settings.api.key.get_secret_value())
|
prefect/telemetry/services.py
CHANGED
@@ -53,6 +53,8 @@ class QueueingSpanExporter(BaseQueueingExporter[ReadableSpan], SpanExporter):
|
|
53
53
|
|
54
54
|
def export(self, spans: Sequence[ReadableSpan]) -> SpanExportResult:
|
55
55
|
for item in spans:
|
56
|
+
if self._stopped:
|
57
|
+
break
|
56
58
|
self.send(item)
|
57
59
|
return SpanExportResult.SUCCESS
|
58
60
|
|
@@ -65,4 +67,6 @@ class QueueingLogExporter(BaseQueueingExporter[LogData], LogExporter):
|
|
65
67
|
|
66
68
|
def export(self, batch: Sequence[LogData]) -> None:
|
67
69
|
for item in batch:
|
70
|
+
if self._stopped:
|
71
|
+
break
|
68
72
|
self.send(item)
|