prefect-client 3.2.12__py3-none-any.whl → 3.2.14__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- prefect/_build_info.py +3 -3
- prefect/client/orchestration/_flow_runs/client.py +34 -4
- prefect/client/schemas/actions.py +12 -0
- prefect/client/schemas/objects.py +18 -0
- prefect/deployments/runner.py +1 -9
- prefect/docker/docker_image.py +2 -1
- prefect/flow_engine.py +11 -5
- prefect/flow_runs.py +1 -1
- prefect/flows.py +3 -4
- prefect/logging/loggers.py +5 -4
- prefect/results.py +9 -3
- prefect/runner/__init__.py +2 -0
- prefect/runner/runner.py +1 -1
- prefect/runner/server.py +12 -7
- prefect/runner/storage.py +37 -37
- prefect/runner/submit.py +36 -25
- prefect/runner/utils.py +9 -5
- prefect/server/api/collections_data/views/aggregate-worker-metadata.json +4 -4
- prefect/server/api/flow_runs.py +21 -0
- prefect/server/api/server.py +17 -0
- prefect/server/api/task_runs.py +52 -1
- prefect/settings/models/tasks.py +5 -0
- prefect/task_engine.py +27 -0
- prefect/tasks.py +8 -2
- prefect/telemetry/run_telemetry.py +11 -3
- prefect/transactions.py +5 -0
- prefect/utilities/engine.py +2 -2
- prefect/utilities/importtools.py +6 -9
- prefect/workers/base.py +189 -104
- prefect/workers/process.py +7 -6
- {prefect_client-3.2.12.dist-info → prefect_client-3.2.14.dist-info}/METADATA +1 -1
- {prefect_client-3.2.12.dist-info → prefect_client-3.2.14.dist-info}/RECORD +34 -34
- {prefect_client-3.2.12.dist-info → prefect_client-3.2.14.dist-info}/WHEEL +0 -0
- {prefect_client-3.2.12.dist-info → prefect_client-3.2.14.dist-info}/licenses/LICENSE +0 -0
prefect/runner/submit.py
CHANGED
@@ -3,15 +3,19 @@ from __future__ import annotations
|
|
3
3
|
import asyncio
|
4
4
|
import inspect
|
5
5
|
import uuid
|
6
|
-
from typing import TYPE_CHECKING, Any,
|
6
|
+
from typing import TYPE_CHECKING, Any, Union, overload
|
7
7
|
|
8
8
|
import anyio
|
9
9
|
import httpx
|
10
10
|
from typing_extensions import Literal, TypeAlias
|
11
11
|
|
12
12
|
from prefect.client.orchestration import get_client
|
13
|
-
from prefect.client.schemas.filters import
|
14
|
-
|
13
|
+
from prefect.client.schemas.filters import (
|
14
|
+
FlowRunFilter,
|
15
|
+
FlowRunFilterParentFlowRunId,
|
16
|
+
TaskRunFilter,
|
17
|
+
)
|
18
|
+
from prefect.client.schemas.objects import Constant, FlowRun, Parameter, TaskRunResult
|
15
19
|
from prefect.context import FlowRunContext
|
16
20
|
from prefect.flows import Flow
|
17
21
|
from prefect.logging import get_logger
|
@@ -60,18 +64,20 @@ async def _submit_flow_to_runner(
|
|
60
64
|
|
61
65
|
parent_flow_run_context = FlowRunContext.get()
|
62
66
|
|
63
|
-
task_inputs = {
|
64
|
-
k: await collect_task_run_inputs(v) for k, v in parameters.items()
|
67
|
+
task_inputs: dict[str, list[TaskRunResult | Parameter | Constant]] = {
|
68
|
+
k: list(await collect_task_run_inputs(v)) for k, v in parameters.items()
|
65
69
|
}
|
66
70
|
parameters = await resolve_inputs(parameters)
|
67
71
|
dummy_task = Task(name=flow.name, fn=flow.fn, version=flow.version)
|
68
72
|
parent_task_run = await client.create_task_run(
|
69
73
|
task=dummy_task,
|
70
74
|
flow_run_id=(
|
71
|
-
parent_flow_run_context.flow_run.id
|
75
|
+
parent_flow_run_context.flow_run.id
|
76
|
+
if parent_flow_run_context and parent_flow_run_context.flow_run
|
77
|
+
else None
|
72
78
|
),
|
73
79
|
dynamic_key=(
|
74
|
-
dynamic_key_for_task_run(parent_flow_run_context, dummy_task)
|
80
|
+
str(dynamic_key_for_task_run(parent_flow_run_context, dummy_task))
|
75
81
|
if parent_flow_run_context
|
76
82
|
else str(uuid.uuid4())
|
77
83
|
),
|
@@ -79,14 +85,15 @@ async def _submit_flow_to_runner(
|
|
79
85
|
state=Pending(),
|
80
86
|
)
|
81
87
|
|
82
|
-
|
88
|
+
httpx_client = getattr(client, "_client")
|
89
|
+
response = await httpx_client.post(
|
83
90
|
(
|
84
91
|
f"http://{PREFECT_RUNNER_SERVER_HOST.value()}"
|
85
92
|
f":{PREFECT_RUNNER_SERVER_PORT.value()}"
|
86
93
|
"/flow/run"
|
87
94
|
),
|
88
95
|
json={
|
89
|
-
"entrypoint": flow
|
96
|
+
"entrypoint": getattr(flow, "_entrypoint"),
|
90
97
|
"parameters": flow.serialize_parameters(parameters),
|
91
98
|
"parent_task_run_id": str(parent_task_run.id),
|
92
99
|
},
|
@@ -98,15 +105,15 @@ async def _submit_flow_to_runner(
|
|
98
105
|
|
99
106
|
@overload
|
100
107
|
def submit_to_runner(
|
101
|
-
prefect_callable:
|
102
|
-
parameters:
|
108
|
+
prefect_callable: Flow[Any, Any] | Task[Any, Any],
|
109
|
+
parameters: dict[str, Any],
|
103
110
|
retry_failed_submissions: bool = True,
|
104
111
|
) -> FlowRun: ...
|
105
112
|
|
106
113
|
|
107
114
|
@overload
|
108
115
|
def submit_to_runner(
|
109
|
-
prefect_callable:
|
116
|
+
prefect_callable: Flow[Any, Any] | Task[Any, Any],
|
110
117
|
parameters: list[dict[str, Any]],
|
111
118
|
retry_failed_submissions: bool = True,
|
112
119
|
) -> list[FlowRun]: ...
|
@@ -114,10 +121,10 @@ def submit_to_runner(
|
|
114
121
|
|
115
122
|
@sync_compatible
|
116
123
|
async def submit_to_runner(
|
117
|
-
prefect_callable:
|
118
|
-
parameters:
|
124
|
+
prefect_callable: Flow[Any, Any],
|
125
|
+
parameters: dict[str, Any] | list[dict[str, Any]] | None = None,
|
119
126
|
retry_failed_submissions: bool = True,
|
120
|
-
) ->
|
127
|
+
) -> FlowRun | list[FlowRun]:
|
121
128
|
"""
|
122
129
|
Submit a callable in the background via the runner webserver one or more times.
|
123
130
|
|
@@ -127,22 +134,22 @@ async def submit_to_runner(
|
|
127
134
|
each dictionary represents a discrete invocation of the callable
|
128
135
|
retry_failed_submissions: Whether to retry failed submissions to the runner webserver.
|
129
136
|
"""
|
130
|
-
if not isinstance(prefect_callable,
|
137
|
+
if not isinstance(prefect_callable, Flow): # pyright: ignore[reportUnnecessaryIsInstance]
|
131
138
|
raise TypeError(
|
132
139
|
"The `submit_to_runner` utility only supports submitting flows and tasks."
|
133
140
|
)
|
134
141
|
|
135
142
|
parameters = parameters or {}
|
136
|
-
if isinstance(parameters,
|
143
|
+
if isinstance(parameters, list):
|
137
144
|
return_single = False
|
138
|
-
elif isinstance(parameters, dict):
|
145
|
+
elif isinstance(parameters, dict): # pyright: ignore[reportUnnecessaryIsInstance]
|
139
146
|
parameters = [parameters]
|
140
147
|
return_single = True
|
141
148
|
else:
|
142
149
|
raise TypeError("Parameters must be a dictionary or a list of dictionaries.")
|
143
150
|
|
144
|
-
submitted_runs = []
|
145
|
-
unsubmitted_parameters = []
|
151
|
+
submitted_runs: list[FlowRun] = []
|
152
|
+
unsubmitted_parameters: list[dict[str, Any]] = []
|
146
153
|
|
147
154
|
for p in parameters:
|
148
155
|
try:
|
@@ -181,9 +188,9 @@ async def submit_to_runner(
|
|
181
188
|
|
182
189
|
@sync_compatible
|
183
190
|
async def wait_for_submitted_runs(
|
184
|
-
flow_run_filter:
|
185
|
-
task_run_filter:
|
186
|
-
timeout:
|
191
|
+
flow_run_filter: FlowRunFilter | None = None,
|
192
|
+
task_run_filter: TaskRunFilter | None = None,
|
193
|
+
timeout: float | None = None,
|
187
194
|
poll_interval: float = 3.0,
|
188
195
|
):
|
189
196
|
"""
|
@@ -197,7 +204,9 @@ async def wait_for_submitted_runs(
|
|
197
204
|
poll_interval: How long to wait between polling each run's state (seconds).
|
198
205
|
"""
|
199
206
|
|
200
|
-
parent_flow_run_id =
|
207
|
+
parent_flow_run_id = (
|
208
|
+
ctx.flow_run.id if ((ctx := FlowRunContext.get()) and ctx.flow_run) else None
|
209
|
+
)
|
201
210
|
|
202
211
|
if task_run_filter:
|
203
212
|
raise NotImplementedError("Waiting for task runs is not yet supported.")
|
@@ -223,7 +232,9 @@ async def wait_for_submitted_runs(
|
|
223
232
|
if parent_flow_run_id is not None:
|
224
233
|
subflow_runs = await client.read_flow_runs(
|
225
234
|
flow_run_filter=FlowRunFilter(
|
226
|
-
parent_flow_run_id=
|
235
|
+
parent_flow_run_id=FlowRunFilterParentFlowRunId(
|
236
|
+
any_=[parent_flow_run_id]
|
237
|
+
)
|
227
238
|
)
|
228
239
|
)
|
229
240
|
|
prefect/runner/utils.py
CHANGED
@@ -1,5 +1,7 @@
|
|
1
|
+
from __future__ import annotations
|
2
|
+
|
1
3
|
from copy import deepcopy
|
2
|
-
from typing import Any
|
4
|
+
from typing import Any, Hashable
|
3
5
|
|
4
6
|
from fastapi import FastAPI
|
5
7
|
from fastapi.openapi.utils import get_openapi
|
@@ -8,7 +10,7 @@ from prefect import __version__ as PREFECT_VERSION
|
|
8
10
|
|
9
11
|
|
10
12
|
def inject_schemas_into_openapi(
|
11
|
-
webserver: FastAPI, schemas_to_inject: dict[
|
13
|
+
webserver: FastAPI, schemas_to_inject: dict[Hashable, Any]
|
12
14
|
) -> dict[str, Any]:
|
13
15
|
"""
|
14
16
|
Augments the webserver's OpenAPI schema with additional schemas from deployments / flows / tasks.
|
@@ -29,7 +31,7 @@ def inject_schemas_into_openapi(
|
|
29
31
|
|
30
32
|
|
31
33
|
def merge_definitions(
|
32
|
-
injected_schemas: dict[
|
34
|
+
injected_schemas: dict[Hashable, Any], openapi_schema: dict[str, Any]
|
33
35
|
) -> dict[str, Any]:
|
34
36
|
"""
|
35
37
|
Integrates definitions from injected schemas into the OpenAPI components.
|
@@ -51,7 +53,9 @@ def merge_definitions(
|
|
51
53
|
return openapi_schema_copy
|
52
54
|
|
53
55
|
|
54
|
-
def update_refs_in_schema(
|
56
|
+
def update_refs_in_schema(
|
57
|
+
schema_item: dict[str, Any] | list[Any], new_ref: str
|
58
|
+
) -> None:
|
55
59
|
"""
|
56
60
|
Recursively replaces `$ref` with a new reference base in a schema item.
|
57
61
|
|
@@ -64,7 +68,7 @@ def update_refs_in_schema(schema_item: Any, new_ref: str) -> None:
|
|
64
68
|
schema_item["$ref"] = schema_item["$ref"].replace("#/definitions/", new_ref)
|
65
69
|
for value in schema_item.values():
|
66
70
|
update_refs_in_schema(value, new_ref)
|
67
|
-
elif isinstance(schema_item, list):
|
71
|
+
elif isinstance(schema_item, list): # pyright: ignore[reportUnnecessaryIsInstance]
|
68
72
|
for item in schema_item:
|
69
73
|
update_refs_in_schema(item, new_ref)
|
70
74
|
|
@@ -559,7 +559,7 @@
|
|
559
559
|
"description": "To use any private container registry with a username and password, choose DockerRegistry. To use a private Azure Container Registry with a managed identity, choose ACRManagedIdentity.",
|
560
560
|
"anyOf": [
|
561
561
|
{
|
562
|
-
"$ref": "#/definitions/
|
562
|
+
"$ref": "#/definitions/DockerRegistryCredentials"
|
563
563
|
},
|
564
564
|
{
|
565
565
|
"$ref": "#/definitions/ACRManagedIdentity"
|
@@ -637,8 +637,8 @@
|
|
637
637
|
"subscription_id"
|
638
638
|
],
|
639
639
|
"definitions": {
|
640
|
-
"
|
641
|
-
"title": "
|
640
|
+
"DockerRegistryCredentials": {
|
641
|
+
"title": "DockerRegistryCredentials",
|
642
642
|
"description": "Connects to a Docker registry.\n\nRequires a Docker Engine to be connectable.",
|
643
643
|
"type": "object",
|
644
644
|
"properties": {
|
@@ -671,7 +671,7 @@
|
|
671
671
|
"password",
|
672
672
|
"registry_url"
|
673
673
|
],
|
674
|
-
"block_type_slug": "docker-registry",
|
674
|
+
"block_type_slug": "docker-registry-credentials",
|
675
675
|
"secret_fields": [
|
676
676
|
"password"
|
677
677
|
],
|
prefect/server/api/flow_runs.py
CHANGED
@@ -29,6 +29,7 @@ import prefect.server.schemas as schemas
|
|
29
29
|
from prefect.logging import get_logger
|
30
30
|
from prefect.server.api.run_history import run_history
|
31
31
|
from prefect.server.api.validation import validate_job_variables_for_deployment_flow_run
|
32
|
+
from prefect.server.api.workers import WorkerLookups
|
32
33
|
from prefect.server.database import PrefectDBInterface, provide_database_interface
|
33
34
|
from prefect.server.exceptions import FlowRunGraphTooLarge
|
34
35
|
from prefect.server.models.flow_runs import (
|
@@ -68,6 +69,7 @@ async def create_flow_run(
|
|
68
69
|
orchestration_dependencies.provide_flow_orchestration_parameters
|
69
70
|
),
|
70
71
|
api_version: str = Depends(dependencies.provide_request_api_version),
|
72
|
+
worker_lookups: WorkerLookups = Depends(WorkerLookups),
|
71
73
|
) -> schemas.responses.FlowRunResponse:
|
72
74
|
"""
|
73
75
|
Create a flow run. If a flow run with the same flow_id and
|
@@ -91,6 +93,25 @@ async def create_flow_run(
|
|
91
93
|
right_now = now("UTC")
|
92
94
|
|
93
95
|
async with db.session_context(begin_transaction=True) as session:
|
96
|
+
if flow_run.work_pool_name:
|
97
|
+
if flow_run.work_queue_name:
|
98
|
+
work_queue_id = await worker_lookups._get_work_queue_id_from_name(
|
99
|
+
session=session,
|
100
|
+
work_pool_name=flow_run.work_pool_name,
|
101
|
+
work_queue_name=flow_run.work_queue_name,
|
102
|
+
)
|
103
|
+
else:
|
104
|
+
work_queue_id = (
|
105
|
+
await worker_lookups._get_default_work_queue_id_from_work_pool_name(
|
106
|
+
session=session,
|
107
|
+
work_pool_name=flow_run.work_pool_name,
|
108
|
+
)
|
109
|
+
)
|
110
|
+
else:
|
111
|
+
work_queue_id = None
|
112
|
+
|
113
|
+
flow_run_object.work_queue_id = work_queue_id
|
114
|
+
|
94
115
|
model = await models.flow_runs.create_flow_run(
|
95
116
|
session=session,
|
96
117
|
flow_run=flow_run_object,
|
prefect/server/api/server.py
CHANGED
@@ -294,12 +294,16 @@ async def prefect_object_not_found_exception_handler(
|
|
294
294
|
)
|
295
295
|
|
296
296
|
|
297
|
+
API_APP_CACHE: dict[tuple[str, str | None], FastAPI] = {}
|
298
|
+
|
299
|
+
|
297
300
|
def create_api_app(
|
298
301
|
dependencies: list[Any] | None = None,
|
299
302
|
health_check_path: str = "/health",
|
300
303
|
version_check_path: str = "/version",
|
301
304
|
fast_api_app_kwargs: dict[str, Any] | None = None,
|
302
305
|
final: bool = False,
|
306
|
+
ignore_cache: bool = False,
|
303
307
|
) -> FastAPI:
|
304
308
|
"""
|
305
309
|
Create a FastAPI app that includes the Prefect REST API
|
@@ -310,10 +314,20 @@ def create_api_app(
|
|
310
314
|
fast_api_app_kwargs: kwargs to pass to the FastAPI constructor
|
311
315
|
final: whether this will be the last instance of the Prefect server to be
|
312
316
|
created in this process, so that additional optimizations may be applied
|
317
|
+
ignore_cache: if set, a new app will be created even if the settings and fast_api_app_kwargs match
|
318
|
+
an existing app in the cache
|
313
319
|
|
314
320
|
Returns:
|
315
321
|
a FastAPI app that serves the Prefect REST API
|
316
322
|
"""
|
323
|
+
cache_key = (
|
324
|
+
prefect.settings.get_current_settings().hash_key(),
|
325
|
+
hash_objects(fast_api_app_kwargs) if fast_api_app_kwargs else None,
|
326
|
+
)
|
327
|
+
|
328
|
+
if cache_key in API_APP_CACHE and not ignore_cache:
|
329
|
+
return API_APP_CACHE[cache_key]
|
330
|
+
|
317
331
|
fast_api_app_kwargs = fast_api_app_kwargs or {}
|
318
332
|
api_app = FastAPI(title=API_TITLE, **fast_api_app_kwargs)
|
319
333
|
api_app.add_middleware(GZipMiddleware)
|
@@ -389,6 +403,8 @@ def create_api_app(
|
|
389
403
|
)
|
390
404
|
return await call_next(request)
|
391
405
|
|
406
|
+
API_APP_CACHE[cache_key] = api_app
|
407
|
+
|
392
408
|
return api_app
|
393
409
|
|
394
410
|
|
@@ -655,6 +671,7 @@ def create_app(
|
|
655
671
|
}
|
656
672
|
},
|
657
673
|
final=final,
|
674
|
+
ignore_cache=ignore_cache,
|
658
675
|
)
|
659
676
|
ui_app = create_ui_app(ephemeral)
|
660
677
|
|
prefect/server/api/task_runs.py
CHANGED
@@ -16,6 +16,7 @@ from fastapi import (
|
|
16
16
|
WebSocket,
|
17
17
|
status,
|
18
18
|
)
|
19
|
+
from fastapi.responses import ORJSONResponse
|
19
20
|
from starlette.websockets import WebSocketDisconnect
|
20
21
|
|
21
22
|
import prefect.server.api.dependencies as dependencies
|
@@ -27,7 +28,10 @@ from prefect.server.database import PrefectDBInterface, provide_database_interfa
|
|
27
28
|
from prefect.server.orchestration import dependencies as orchestration_dependencies
|
28
29
|
from prefect.server.orchestration.core_policy import CoreTaskPolicy
|
29
30
|
from prefect.server.orchestration.policies import TaskRunOrchestrationPolicy
|
30
|
-
from prefect.server.schemas.responses import
|
31
|
+
from prefect.server.schemas.responses import (
|
32
|
+
OrchestrationResult,
|
33
|
+
TaskRunPaginationResponse,
|
34
|
+
)
|
31
35
|
from prefect.server.task_queue import MultiQueue, TaskQueue
|
32
36
|
from prefect.server.utilities import subscriptions
|
33
37
|
from prefect.server.utilities.server import PrefectRouter
|
@@ -214,6 +218,53 @@ async def read_task_runs(
|
|
214
218
|
)
|
215
219
|
|
216
220
|
|
221
|
+
@router.post("/paginate", response_class=ORJSONResponse)
|
222
|
+
async def paginate_task_runs(
|
223
|
+
sort: schemas.sorting.TaskRunSort = Body(schemas.sorting.TaskRunSort.ID_DESC),
|
224
|
+
limit: int = dependencies.LimitBody(),
|
225
|
+
page: int = Body(1, ge=1),
|
226
|
+
flows: Optional[schemas.filters.FlowFilter] = None,
|
227
|
+
flow_runs: Optional[schemas.filters.FlowRunFilter] = None,
|
228
|
+
task_runs: Optional[schemas.filters.TaskRunFilter] = None,
|
229
|
+
deployments: Optional[schemas.filters.DeploymentFilter] = None,
|
230
|
+
db: PrefectDBInterface = Depends(provide_database_interface),
|
231
|
+
) -> TaskRunPaginationResponse:
|
232
|
+
"""
|
233
|
+
Pagination query for task runs.
|
234
|
+
"""
|
235
|
+
offset = (page - 1) * limit
|
236
|
+
|
237
|
+
async with db.session_context() as session:
|
238
|
+
runs = await models.task_runs.read_task_runs(
|
239
|
+
session=session,
|
240
|
+
flow_filter=flows,
|
241
|
+
flow_run_filter=flow_runs,
|
242
|
+
task_run_filter=task_runs,
|
243
|
+
deployment_filter=deployments,
|
244
|
+
offset=offset,
|
245
|
+
limit=limit,
|
246
|
+
sort=sort,
|
247
|
+
)
|
248
|
+
|
249
|
+
total_count = await models.task_runs.count_task_runs(
|
250
|
+
session=session,
|
251
|
+
flow_filter=flows,
|
252
|
+
flow_run_filter=flow_runs,
|
253
|
+
task_run_filter=task_runs,
|
254
|
+
deployment_filter=deployments,
|
255
|
+
)
|
256
|
+
|
257
|
+
return TaskRunPaginationResponse.model_validate(
|
258
|
+
dict(
|
259
|
+
results=runs,
|
260
|
+
count=total_count,
|
261
|
+
limit=limit,
|
262
|
+
pages=(total_count + limit - 1) // limit,
|
263
|
+
page=page,
|
264
|
+
)
|
265
|
+
)
|
266
|
+
|
267
|
+
|
217
268
|
@router.delete("/{id}", status_code=status.HTTP_204_NO_CONTENT)
|
218
269
|
async def delete_task_run(
|
219
270
|
task_run_id: UUID = Path(..., description="The task run id", alias="id"),
|
prefect/settings/models/tasks.py
CHANGED
@@ -57,6 +57,11 @@ class TasksSettings(PrefectBaseSettings):
|
|
57
57
|
description="If `True`, enables a refresh of cached results: re-executing the task will refresh the cached results.",
|
58
58
|
)
|
59
59
|
|
60
|
+
default_no_cache: bool = Field(
|
61
|
+
default=False,
|
62
|
+
description="If `True`, sets the default cache policy on all tasks to `NO_CACHE`.",
|
63
|
+
)
|
64
|
+
|
60
65
|
default_retries: int = Field(
|
61
66
|
default=0,
|
62
67
|
ge=0,
|
prefect/task_engine.py
CHANGED
@@ -24,6 +24,7 @@ from typing import (
|
|
24
24
|
Type,
|
25
25
|
TypeVar,
|
26
26
|
Union,
|
27
|
+
overload,
|
27
28
|
)
|
28
29
|
from uuid import UUID
|
29
30
|
|
@@ -1527,6 +1528,32 @@ async def run_generator_task_async(
|
|
1527
1528
|
await engine.result()
|
1528
1529
|
|
1529
1530
|
|
1531
|
+
@overload
|
1532
|
+
def run_task(
|
1533
|
+
task: "Task[P, R]",
|
1534
|
+
task_run_id: Optional[UUID] = None,
|
1535
|
+
task_run: Optional[TaskRun] = None,
|
1536
|
+
parameters: Optional[dict[str, Any]] = None,
|
1537
|
+
wait_for: Optional["OneOrManyFutureOrResult[Any]"] = None,
|
1538
|
+
return_type: Literal["state"] = "state",
|
1539
|
+
dependencies: Optional[dict[str, set[TaskRunInput]]] = None,
|
1540
|
+
context: Optional[dict[str, Any]] = None,
|
1541
|
+
) -> State[R]: ...
|
1542
|
+
|
1543
|
+
|
1544
|
+
@overload
|
1545
|
+
def run_task(
|
1546
|
+
task: "Task[P, R]",
|
1547
|
+
task_run_id: Optional[UUID] = None,
|
1548
|
+
task_run: Optional[TaskRun] = None,
|
1549
|
+
parameters: Optional[dict[str, Any]] = None,
|
1550
|
+
wait_for: Optional["OneOrManyFutureOrResult[Any]"] = None,
|
1551
|
+
return_type: Literal["result"] = "result",
|
1552
|
+
dependencies: Optional[dict[str, set[TaskRunInput]]] = None,
|
1553
|
+
context: Optional[dict[str, Any]] = None,
|
1554
|
+
) -> R: ...
|
1555
|
+
|
1556
|
+
|
1530
1557
|
def run_task(
|
1531
1558
|
task: "Task[P, Union[R, Coroutine[Any, Any, R]]]",
|
1532
1559
|
task_run_id: Optional[UUID] = None,
|
prefect/tasks.py
CHANGED
@@ -422,6 +422,11 @@ class Task(Generic[P, R]):
|
|
422
422
|
|
423
423
|
self.task_key: str = _generate_task_key(self.fn)
|
424
424
|
|
425
|
+
# determine cache and result configuration
|
426
|
+
settings = get_current_settings()
|
427
|
+
if settings.tasks.default_no_cache and cache_policy is NotSet:
|
428
|
+
cache_policy = NO_CACHE
|
429
|
+
|
425
430
|
if cache_policy is not NotSet and cache_key_fn is not None:
|
426
431
|
logger.warning(
|
427
432
|
f"Both `cache_policy` and `cache_key_fn` are set on task {self}. `cache_key_fn` will be used."
|
@@ -458,7 +463,7 @@ class Task(Generic[P, R]):
|
|
458
463
|
)
|
459
464
|
elif cache_policy is NotSet and result_storage_key is None:
|
460
465
|
self.cache_policy = DEFAULT
|
461
|
-
elif result_storage_key:
|
466
|
+
elif cache_policy != NO_CACHE and result_storage_key:
|
462
467
|
# TODO: handle this situation with double storage
|
463
468
|
self.cache_policy = None
|
464
469
|
else:
|
@@ -468,7 +473,6 @@ class Task(Generic[P, R]):
|
|
468
473
|
# TODO: We can instantiate a `TaskRunPolicy` and add Pydantic bound checks to
|
469
474
|
# validate that the user passes positive numbers here
|
470
475
|
|
471
|
-
settings = get_current_settings()
|
472
476
|
self.retries: int = (
|
473
477
|
retries if retries is not None else settings.tasks.default_retries
|
474
478
|
)
|
@@ -1426,6 +1430,8 @@ class Task(Generic[P, R]):
|
|
1426
1430
|
else:
|
1427
1431
|
return futures
|
1428
1432
|
|
1433
|
+
# Background task methods
|
1434
|
+
|
1429
1435
|
def apply_async(
|
1430
1436
|
self,
|
1431
1437
|
args: Optional[tuple[Any, ...]] = None,
|
@@ -16,6 +16,7 @@ from opentelemetry.trace import (
|
|
16
16
|
from typing_extensions import TypeAlias
|
17
17
|
|
18
18
|
import prefect
|
19
|
+
import prefect.settings
|
19
20
|
from prefect.client.orchestration import PrefectClient, SyncPrefectClient
|
20
21
|
from prefect.client.schemas import FlowRun, TaskRun
|
21
22
|
from prefect.client.schemas.objects import State
|
@@ -50,15 +51,19 @@ class RunTelemetry:
|
|
50
51
|
default_factory=lambda: get_tracer("prefect", prefect.__version__)
|
51
52
|
)
|
52
53
|
span: Span | None = None
|
54
|
+
_enabled: bool = field(
|
55
|
+
default_factory=lambda: prefect.settings.get_current_settings().cloud.enable_orchestration_telemetry
|
56
|
+
)
|
53
57
|
|
54
58
|
async def async_start_span(
|
55
59
|
self,
|
56
60
|
run: FlowOrTaskRun,
|
57
61
|
client: PrefectClient,
|
58
62
|
parameters: dict[str, Any] | None = None,
|
59
|
-
) -> Span:
|
63
|
+
) -> Span | None:
|
64
|
+
if not self._enabled:
|
65
|
+
return None
|
60
66
|
traceparent, span = self._start_span(run, parameters)
|
61
|
-
|
62
67
|
if self._run_type(run) == "flow" and traceparent:
|
63
68
|
# Only explicitly update labels if the run is a flow as task runs
|
64
69
|
# are updated via events.
|
@@ -73,7 +78,10 @@ class RunTelemetry:
|
|
73
78
|
run: FlowOrTaskRun,
|
74
79
|
client: SyncPrefectClient,
|
75
80
|
parameters: dict[str, Any] | None = None,
|
76
|
-
) -> Span:
|
81
|
+
) -> Span | None:
|
82
|
+
if not self._enabled:
|
83
|
+
return None
|
84
|
+
|
77
85
|
traceparent, span = self._start_span(run, parameters)
|
78
86
|
|
79
87
|
if self._run_type(run) == "flow" and traceparent:
|
prefect/transactions.py
CHANGED
@@ -23,6 +23,7 @@ from prefect.exceptions import (
|
|
23
23
|
MissingContextError,
|
24
24
|
SerializationError,
|
25
25
|
)
|
26
|
+
from prefect.filesystems import NullFileSystem
|
26
27
|
from prefect.logging.loggers import LoggingAdapter, get_logger, get_run_logger
|
27
28
|
from prefect.results import (
|
28
29
|
ResultRecord,
|
@@ -453,6 +454,10 @@ def transaction(
|
|
453
454
|
if key and not store:
|
454
455
|
store = get_result_store()
|
455
456
|
|
457
|
+
# Avoid inheriting a NullFileSystem for metadata_storage from a flow's result store
|
458
|
+
if store and isinstance(store.metadata_storage, NullFileSystem):
|
459
|
+
store = store.model_copy(update={"metadata_storage": None})
|
460
|
+
|
456
461
|
try:
|
457
462
|
_logger: Union[logging.Logger, LoggingAdapter] = logger or get_run_logger()
|
458
463
|
except MissingContextError:
|
prefect/utilities/engine.py
CHANGED
@@ -63,7 +63,7 @@ engine_logger: Logger = get_logger("engine")
|
|
63
63
|
T = TypeVar("T")
|
64
64
|
|
65
65
|
|
66
|
-
async def collect_task_run_inputs(expr: Any, max_depth: int = -1) -> set[
|
66
|
+
async def collect_task_run_inputs(expr: Any, max_depth: int = -1) -> set[TaskRunResult]:
|
67
67
|
"""
|
68
68
|
This function recurses through an expression to generate a set of any discernible
|
69
69
|
task run inputs it finds in the data structure. It produces a set of all inputs
|
@@ -76,7 +76,7 @@ async def collect_task_run_inputs(expr: Any, max_depth: int = -1) -> set[TaskRun
|
|
76
76
|
"""
|
77
77
|
# TODO: This function needs to be updated to detect parameters and constants
|
78
78
|
|
79
|
-
inputs: set[
|
79
|
+
inputs: set[TaskRunResult] = set()
|
80
80
|
|
81
81
|
def add_futures_and_states_to_inputs(obj: Any) -> None:
|
82
82
|
if isinstance(obj, PrefectFuture):
|
prefect/utilities/importtools.py
CHANGED
@@ -93,8 +93,11 @@ def load_script_as_module(path: str) -> ModuleType:
|
|
93
93
|
parent_path = str(Path(path).resolve().parent)
|
94
94
|
working_directory = os.getcwd()
|
95
95
|
|
96
|
-
|
97
|
-
|
96
|
+
module_name = os.path.splitext(Path(path).name)[0]
|
97
|
+
|
98
|
+
# fall back in case of filenames with the same names as modules
|
99
|
+
if module_name in sys.modules:
|
100
|
+
module_name = f"__prefect_loader_{id(path)}__"
|
98
101
|
|
99
102
|
spec = importlib.util.spec_from_file_location(
|
100
103
|
module_name,
|
@@ -112,15 +115,9 @@ def load_script_as_module(path: str) -> ModuleType:
|
|
112
115
|
with _get_sys_path_lock():
|
113
116
|
sys.path.insert(0, working_directory)
|
114
117
|
sys.path.insert(0, parent_path)
|
115
|
-
|
116
|
-
spec.loader.exec_module(module)
|
117
|
-
finally:
|
118
|
-
sys.path.remove(parent_path)
|
119
|
-
sys.path.remove(working_directory)
|
118
|
+
spec.loader.exec_module(module)
|
120
119
|
except Exception as exc:
|
121
120
|
raise ScriptError(user_exc=exc, path=path) from exc
|
122
|
-
finally:
|
123
|
-
sys.modules.pop(module_name)
|
124
121
|
|
125
122
|
return module
|
126
123
|
|