prefect-client 3.0.0rc18__py3-none-any.whl → 3.0.0rc20__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- prefect/__init__.py +0 -3
- prefect/_internal/concurrency/services.py +14 -0
- prefect/_internal/schemas/bases.py +1 -0
- prefect/blocks/core.py +41 -30
- prefect/blocks/system.py +48 -12
- prefect/client/cloud.py +56 -7
- prefect/client/collections.py +1 -1
- prefect/client/orchestration.py +111 -8
- prefect/client/schemas/objects.py +40 -2
- prefect/concurrency/asyncio.py +8 -2
- prefect/concurrency/services.py +16 -6
- prefect/concurrency/sync.py +4 -1
- prefect/concurrency/v1/__init__.py +0 -0
- prefect/concurrency/v1/asyncio.py +143 -0
- prefect/concurrency/v1/context.py +27 -0
- prefect/concurrency/v1/events.py +61 -0
- prefect/concurrency/v1/services.py +116 -0
- prefect/concurrency/v1/sync.py +92 -0
- prefect/context.py +2 -2
- prefect/deployments/flow_runs.py +0 -7
- prefect/deployments/runner.py +11 -0
- prefect/events/clients.py +41 -0
- prefect/events/related.py +72 -73
- prefect/events/utilities.py +2 -0
- prefect/events/worker.py +12 -3
- prefect/exceptions.py +6 -0
- prefect/flow_engine.py +5 -0
- prefect/flows.py +9 -2
- prefect/logging/handlers.py +4 -1
- prefect/main.py +8 -6
- prefect/records/base.py +74 -18
- prefect/records/filesystem.py +207 -0
- prefect/records/memory.py +16 -3
- prefect/records/result_store.py +19 -14
- prefect/results.py +232 -169
- prefect/runner/runner.py +7 -4
- prefect/settings.py +14 -15
- prefect/states.py +73 -18
- prefect/task_engine.py +127 -221
- prefect/task_worker.py +7 -39
- prefect/tasks.py +0 -7
- prefect/transactions.py +89 -27
- prefect/utilities/annotations.py +4 -3
- prefect/utilities/asyncutils.py +4 -4
- prefect/utilities/callables.py +1 -3
- prefect/utilities/dispatch.py +16 -11
- prefect/utilities/engine.py +1 -4
- prefect/utilities/schema_tools/hydration.py +13 -0
- prefect/workers/base.py +78 -18
- {prefect_client-3.0.0rc18.dist-info → prefect_client-3.0.0rc20.dist-info}/METADATA +3 -4
- {prefect_client-3.0.0rc18.dist-info → prefect_client-3.0.0rc20.dist-info}/RECORD +54 -48
- prefect/manifests.py +0 -21
- {prefect_client-3.0.0rc18.dist-info → prefect_client-3.0.0rc20.dist-info}/LICENSE +0 -0
- {prefect_client-3.0.0rc18.dist-info → prefect_client-3.0.0rc20.dist-info}/WHEEL +0 -0
- {prefect_client-3.0.0rc18.dist-info → prefect_client-3.0.0rc20.dist-info}/top_level.txt +0 -0
prefect/transactions.py
CHANGED
@@ -2,6 +2,7 @@ import copy
|
|
2
2
|
import logging
|
3
3
|
from contextlib import contextmanager
|
4
4
|
from contextvars import ContextVar, Token
|
5
|
+
from functools import partial
|
5
6
|
from typing import (
|
6
7
|
Any,
|
7
8
|
Callable,
|
@@ -17,10 +18,9 @@ from pydantic import Field, PrivateAttr
|
|
17
18
|
from typing_extensions import Self
|
18
19
|
|
19
20
|
from prefect.context import ContextModel, FlowRunContext, TaskRunContext
|
20
|
-
from prefect.exceptions import MissingContextError
|
21
|
-
from prefect.logging.loggers import
|
21
|
+
from prefect.exceptions import MissingContextError, SerializationError
|
22
|
+
from prefect.logging.loggers import get_logger, get_run_logger
|
22
23
|
from prefect.records import RecordStore
|
23
|
-
from prefect.records.result_store import ResultFactoryStore
|
24
24
|
from prefect.results import (
|
25
25
|
BaseResult,
|
26
26
|
ResultFactory,
|
@@ -60,13 +60,16 @@ class Transaction(ContextModel):
|
|
60
60
|
key: Optional[str] = None
|
61
61
|
children: List["Transaction"] = Field(default_factory=list)
|
62
62
|
commit_mode: Optional[CommitMode] = None
|
63
|
+
isolation_level: Optional[IsolationLevel] = IsolationLevel.READ_COMMITTED
|
63
64
|
state: TransactionState = TransactionState.PENDING
|
64
65
|
on_commit_hooks: List[Callable[["Transaction"], None]] = Field(default_factory=list)
|
65
66
|
on_rollback_hooks: List[Callable[["Transaction"], None]] = Field(
|
66
67
|
default_factory=list
|
67
68
|
)
|
68
69
|
overwrite: bool = False
|
69
|
-
logger: Union[logging.Logger, logging.LoggerAdapter
|
70
|
+
logger: Union[logging.Logger, logging.LoggerAdapter] = Field(
|
71
|
+
default_factory=partial(get_logger, "transactions")
|
72
|
+
)
|
70
73
|
_stored_values: Dict[str, Any] = PrivateAttr(default_factory=dict)
|
71
74
|
_staged_value: Any = None
|
72
75
|
__var__: ContextVar = ContextVar("transaction")
|
@@ -101,16 +104,27 @@ class Transaction(ContextModel):
|
|
101
104
|
raise RuntimeError(
|
102
105
|
"Context already entered. Context enter calls cannot be nested."
|
103
106
|
)
|
104
|
-
|
107
|
+
parent = get_transaction()
|
108
|
+
if parent:
|
109
|
+
self._stored_values = copy.deepcopy(parent._stored_values)
|
110
|
+
# set default commit behavior; either inherit from parent or set a default of eager
|
105
111
|
if self.commit_mode is None:
|
106
|
-
|
112
|
+
self.commit_mode = parent.commit_mode if parent else CommitMode.LAZY
|
113
|
+
# set default isolation level; either inherit from parent or set a default of read committed
|
114
|
+
if self.isolation_level is None:
|
115
|
+
self.isolation_level = (
|
116
|
+
parent.isolation_level if parent else IsolationLevel.READ_COMMITTED
|
117
|
+
)
|
107
118
|
|
108
|
-
|
109
|
-
|
110
|
-
|
111
|
-
|
112
|
-
|
113
|
-
|
119
|
+
assert self.isolation_level is not None, "Isolation level was not set correctly"
|
120
|
+
if (
|
121
|
+
self.store
|
122
|
+
and self.key
|
123
|
+
and not self.store.supports_isolation_level(self.isolation_level)
|
124
|
+
):
|
125
|
+
raise ValueError(
|
126
|
+
f"Isolation level {self.isolation_level.name} is not supported by record store type {self.store.__class__.__name__}"
|
127
|
+
)
|
114
128
|
|
115
129
|
# this needs to go before begin, which could set the state to committed
|
116
130
|
self.state = TransactionState.ACTIVE
|
@@ -148,8 +162,13 @@ class Transaction(ContextModel):
|
|
148
162
|
self.reset()
|
149
163
|
|
150
164
|
def begin(self):
|
151
|
-
|
152
|
-
|
165
|
+
if (
|
166
|
+
self.store
|
167
|
+
and self.key
|
168
|
+
and self.isolation_level == IsolationLevel.SERIALIZABLE
|
169
|
+
):
|
170
|
+
self.logger.debug(f"Acquiring lock for transaction {self.key!r}")
|
171
|
+
self.store.acquire_lock(self.key)
|
153
172
|
if (
|
154
173
|
not self.overwrite
|
155
174
|
and self.store
|
@@ -158,11 +177,12 @@ class Transaction(ContextModel):
|
|
158
177
|
):
|
159
178
|
self.state = TransactionState.COMMITTED
|
160
179
|
|
161
|
-
def read(self) -> BaseResult:
|
180
|
+
def read(self) -> Optional[BaseResult]:
|
162
181
|
if self.store and self.key:
|
163
|
-
|
164
|
-
|
165
|
-
|
182
|
+
record = self.store.read(key=self.key)
|
183
|
+
if record is not None:
|
184
|
+
return record.result
|
185
|
+
return None
|
166
186
|
|
167
187
|
def reset(self) -> None:
|
168
188
|
parent = self.get_parent()
|
@@ -192,6 +212,14 @@ class Transaction(ContextModel):
|
|
192
212
|
|
193
213
|
def commit(self) -> bool:
|
194
214
|
if self.state in [TransactionState.ROLLED_BACK, TransactionState.COMMITTED]:
|
215
|
+
if (
|
216
|
+
self.store
|
217
|
+
and self.key
|
218
|
+
and self.isolation_level == IsolationLevel.SERIALIZABLE
|
219
|
+
):
|
220
|
+
self.logger.debug(f"Releasing lock for transaction {self.key!r}")
|
221
|
+
self.store.release_lock(self.key)
|
222
|
+
|
195
223
|
return False
|
196
224
|
|
197
225
|
try:
|
@@ -202,9 +230,24 @@ class Transaction(ContextModel):
|
|
202
230
|
self.run_hook(hook, "commit")
|
203
231
|
|
204
232
|
if self.store and self.key:
|
205
|
-
self.store.write(key=self.key,
|
233
|
+
self.store.write(key=self.key, result=self._staged_value)
|
206
234
|
self.state = TransactionState.COMMITTED
|
235
|
+
if (
|
236
|
+
self.store
|
237
|
+
and self.key
|
238
|
+
and self.isolation_level == IsolationLevel.SERIALIZABLE
|
239
|
+
):
|
240
|
+
self.logger.debug(f"Releasing lock for transaction {self.key!r}")
|
241
|
+
self.store.release_lock(self.key)
|
207
242
|
return True
|
243
|
+
except SerializationError as exc:
|
244
|
+
if self.logger:
|
245
|
+
self.logger.warning(
|
246
|
+
f"Encountered an error while serializing result for transaction {self.key!r}: {exc}"
|
247
|
+
" Code execution will continue, but the transaction will not be committed.",
|
248
|
+
)
|
249
|
+
self.rollback()
|
250
|
+
return False
|
208
251
|
except Exception:
|
209
252
|
if self.logger:
|
210
253
|
self.logger.exception(
|
@@ -216,19 +259,25 @@ class Transaction(ContextModel):
|
|
216
259
|
|
217
260
|
def run_hook(self, hook, hook_type: str) -> None:
|
218
261
|
hook_name = _get_hook_name(hook)
|
219
|
-
|
262
|
+
# Undocumented way to disable logging for a hook. Subject to change.
|
263
|
+
should_log = getattr(hook, "log_on_run", True)
|
264
|
+
|
265
|
+
if should_log:
|
266
|
+
self.logger.info(f"Running {hook_type} hook {hook_name!r}")
|
220
267
|
|
221
268
|
try:
|
222
269
|
hook(self)
|
223
270
|
except Exception as exc:
|
224
|
-
|
225
|
-
|
226
|
-
|
271
|
+
if should_log:
|
272
|
+
self.logger.error(
|
273
|
+
f"An error was encountered while running {hook_type} hook {hook_name!r}",
|
274
|
+
)
|
227
275
|
raise exc
|
228
276
|
else:
|
229
|
-
|
230
|
-
|
231
|
-
|
277
|
+
if should_log:
|
278
|
+
self.logger.info(
|
279
|
+
f"{hook_type.capitalize()} hook {hook_name!r} finished running successfully"
|
280
|
+
)
|
232
281
|
|
233
282
|
def stage(
|
234
283
|
self,
|
@@ -269,6 +318,14 @@ class Transaction(ContextModel):
|
|
269
318
|
exc_info=True,
|
270
319
|
)
|
271
320
|
return False
|
321
|
+
finally:
|
322
|
+
if (
|
323
|
+
self.store
|
324
|
+
and self.key
|
325
|
+
and self.isolation_level == IsolationLevel.SERIALIZABLE
|
326
|
+
):
|
327
|
+
self.logger.debug(f"Releasing lock for transaction {self.key!r}")
|
328
|
+
self.store.release_lock(self.key)
|
272
329
|
|
273
330
|
@classmethod
|
274
331
|
def get_active(cls: Type[Self]) -> Optional[Self]:
|
@@ -284,8 +341,9 @@ def transaction(
|
|
284
341
|
key: Optional[str] = None,
|
285
342
|
store: Optional[RecordStore] = None,
|
286
343
|
commit_mode: Optional[CommitMode] = None,
|
344
|
+
isolation_level: Optional[IsolationLevel] = None,
|
287
345
|
overwrite: bool = False,
|
288
|
-
logger:
|
346
|
+
logger: Union[logging.Logger, logging.LoggerAdapter, None] = None,
|
289
347
|
) -> Generator[Transaction, None, None]:
|
290
348
|
"""
|
291
349
|
A context manager for opening and managing a transaction.
|
@@ -309,6 +367,7 @@ def transaction(
|
|
309
367
|
flow_run_context, "result_factory", None
|
310
368
|
)
|
311
369
|
|
370
|
+
new_factory: ResultFactory
|
312
371
|
if existing_factory and existing_factory.storage_block_id:
|
313
372
|
new_factory = existing_factory.model_copy(
|
314
373
|
update={
|
@@ -332,6 +391,8 @@ def transaction(
|
|
332
391
|
result_storage=default_storage,
|
333
392
|
)
|
334
393
|
)
|
394
|
+
from prefect.records.result_store import ResultFactoryStore
|
395
|
+
|
335
396
|
store = ResultFactoryStore(
|
336
397
|
result_factory=new_factory,
|
337
398
|
)
|
@@ -345,6 +406,7 @@ def transaction(
|
|
345
406
|
key=key,
|
346
407
|
store=store,
|
347
408
|
commit_mode=commit_mode,
|
409
|
+
isolation_level=isolation_level,
|
348
410
|
overwrite=overwrite,
|
349
411
|
logger=logger,
|
350
412
|
) as txn:
|
prefect/utilities/annotations.py
CHANGED
@@ -21,8 +21,8 @@ class BaseAnnotation(
|
|
21
21
|
def rewrap(self, value: T) -> "BaseAnnotation[T]":
|
22
22
|
return type(self)(value)
|
23
23
|
|
24
|
-
def __eq__(self, other:
|
25
|
-
if
|
24
|
+
def __eq__(self, other: "BaseAnnotation[T]") -> bool:
|
25
|
+
if type(self) is not type(other):
|
26
26
|
return False
|
27
27
|
return self.unwrap() == other.unwrap()
|
28
28
|
|
@@ -90,10 +90,11 @@ class quote(BaseAnnotation[T]):
|
|
90
90
|
class Quote(quote):
|
91
91
|
def __init__(self, expr):
|
92
92
|
warnings.warn(
|
93
|
-
DeprecationWarning,
|
94
93
|
"Use of `Quote` is deprecated. Use `quote` instead.",
|
94
|
+
DeprecationWarning,
|
95
95
|
stacklevel=2,
|
96
96
|
)
|
97
|
+
super().__init__(expr)
|
97
98
|
|
98
99
|
|
99
100
|
class NotSet:
|
prefect/utilities/asyncutils.py
CHANGED
@@ -314,20 +314,20 @@ def in_async_main_thread() -> bool:
|
|
314
314
|
|
315
315
|
@overload
|
316
316
|
def sync_compatible(
|
317
|
-
async_fn: Callable[..., Coroutine[Any, Any, R]],
|
317
|
+
async_fn: Callable[..., Coroutine[Any, Any, R]],
|
318
318
|
) -> Callable[..., R]:
|
319
319
|
...
|
320
320
|
|
321
321
|
|
322
322
|
@overload
|
323
323
|
def sync_compatible(
|
324
|
-
async_fn: Callable[..., Coroutine[Any, Any, R]],
|
324
|
+
async_fn: Callable[..., Coroutine[Any, Any, R]],
|
325
325
|
) -> Callable[..., Coroutine[Any, Any, R]]:
|
326
326
|
...
|
327
327
|
|
328
328
|
|
329
329
|
def sync_compatible(
|
330
|
-
async_fn: Callable[..., Coroutine[Any, Any, R]],
|
330
|
+
async_fn: Callable[..., Coroutine[Any, Any, R]],
|
331
331
|
) -> Callable[..., Union[R, Coroutine[Any, Any, R]]]:
|
332
332
|
"""
|
333
333
|
Converts an async function into a dual async and sync function.
|
@@ -403,7 +403,7 @@ def sync_compatible(
|
|
403
403
|
|
404
404
|
|
405
405
|
@asynccontextmanager
|
406
|
-
async def asyncnullcontext(value=None):
|
406
|
+
async def asyncnullcontext(value=None, *args, **kwargs):
|
407
407
|
yield value
|
408
408
|
|
409
409
|
|
prefect/utilities/callables.py
CHANGED
@@ -12,9 +12,7 @@ from typing import Any, Callable, Dict, Iterable, List, Optional, Tuple
|
|
12
12
|
|
13
13
|
import cloudpickle
|
14
14
|
import pydantic
|
15
|
-
from griffe
|
16
|
-
from griffe.docstrings.dataclasses import DocstringSectionKind
|
17
|
-
from griffe.docstrings.parsers import Parser, parse
|
15
|
+
from griffe import Docstring, DocstringSectionKind, Parser, parse
|
18
16
|
from typing_extensions import Literal
|
19
17
|
|
20
18
|
from prefect._internal.pydantic.v1_schema import has_v1_type_as_param
|
prefect/utilities/dispatch.py
CHANGED
@@ -162,17 +162,22 @@ def register_type(cls: T) -> T:
|
|
162
162
|
key = get_dispatch_key(cls)
|
163
163
|
existing_value = registry.get(key)
|
164
164
|
if existing_value is not None and id(existing_value) != id(cls):
|
165
|
-
|
166
|
-
|
167
|
-
|
168
|
-
|
169
|
-
|
170
|
-
|
171
|
-
|
172
|
-
|
173
|
-
|
174
|
-
|
175
|
-
|
165
|
+
try:
|
166
|
+
# Get line numbers for debugging
|
167
|
+
file = inspect.getsourcefile(cls)
|
168
|
+
line_number = inspect.getsourcelines(cls)[1]
|
169
|
+
existing_file = inspect.getsourcefile(existing_value)
|
170
|
+
existing_line_number = inspect.getsourcelines(existing_value)[1]
|
171
|
+
warnings.warn(
|
172
|
+
f"Type {cls.__name__!r} at {file}:{line_number} has key {key!r} that "
|
173
|
+
f"matches existing registered type {existing_value.__name__!r} from "
|
174
|
+
f"{existing_file}:{existing_line_number}. The existing type will be "
|
175
|
+
"overridden."
|
176
|
+
)
|
177
|
+
except OSError:
|
178
|
+
# If we can't get the source, another actor is loading this class via eval
|
179
|
+
# and we shouldn't update the registry
|
180
|
+
return cls
|
176
181
|
|
177
182
|
# Add to the registry
|
178
183
|
registry[key] = cls
|
prefect/utilities/engine.py
CHANGED
@@ -51,7 +51,6 @@ from prefect.logging.loggers import (
|
|
51
51
|
)
|
52
52
|
from prefect.results import BaseResult
|
53
53
|
from prefect.settings import (
|
54
|
-
PREFECT_EXPERIMENTAL_ENABLE_CLIENT_SIDE_TASK_ORCHESTRATION,
|
55
54
|
PREFECT_LOGGING_LOG_PRINTS,
|
56
55
|
)
|
57
56
|
from prefect.states import (
|
@@ -806,9 +805,7 @@ def emit_task_run_state_change_event(
|
|
806
805
|
else ""
|
807
806
|
),
|
808
807
|
"prefect.state-type": str(validated_state.type.value),
|
809
|
-
"prefect.orchestration": "client"
|
810
|
-
if PREFECT_EXPERIMENTAL_ENABLE_CLIENT_SIDE_TASK_ORCHESTRATION
|
811
|
-
else "server",
|
808
|
+
"prefect.orchestration": "client",
|
812
809
|
},
|
813
810
|
follows=follows,
|
814
811
|
)
|
@@ -202,6 +202,11 @@ def json_handler(obj: dict, ctx: HydrationContext):
|
|
202
202
|
dehydrated_json = _hydrate(obj["value"], ctx)
|
203
203
|
else:
|
204
204
|
dehydrated_json = obj["value"]
|
205
|
+
|
206
|
+
# If the result is a Placeholder, we should return it as is
|
207
|
+
if isinstance(dehydrated_json, Placeholder):
|
208
|
+
return dehydrated_json
|
209
|
+
|
205
210
|
try:
|
206
211
|
return json.loads(dehydrated_json)
|
207
212
|
except (json.decoder.JSONDecodeError, TypeError) as e:
|
@@ -224,6 +229,10 @@ def jinja_handler(obj: dict, ctx: HydrationContext):
|
|
224
229
|
else:
|
225
230
|
dehydrated_jinja = obj["template"]
|
226
231
|
|
232
|
+
# If the result is a Placeholder, we should return it as is
|
233
|
+
if isinstance(dehydrated_jinja, Placeholder):
|
234
|
+
return dehydrated_jinja
|
235
|
+
|
227
236
|
try:
|
228
237
|
validate_user_template(dehydrated_jinja)
|
229
238
|
except (jinja2.exceptions.TemplateSyntaxError, TemplateSecurityError) as exc:
|
@@ -245,6 +254,10 @@ def workspace_variable_handler(obj: dict, ctx: HydrationContext):
|
|
245
254
|
else:
|
246
255
|
dehydrated_variable = obj["variable_name"]
|
247
256
|
|
257
|
+
# If the result is a Placeholder, we should return it as is
|
258
|
+
if isinstance(dehydrated_variable, Placeholder):
|
259
|
+
return dehydrated_variable
|
260
|
+
|
248
261
|
if not ctx.render_workspace_variables:
|
249
262
|
return WorkspaceVariable(variable_name=obj["variable_name"])
|
250
263
|
|
prefect/workers/base.py
CHANGED
@@ -19,6 +19,11 @@ from prefect.client.orchestration import PrefectClient, get_client
|
|
19
19
|
from prefect.client.schemas.actions import WorkPoolCreate, WorkPoolUpdate
|
20
20
|
from prefect.client.schemas.objects import StateType, WorkPool
|
21
21
|
from prefect.client.utilities import inject_client
|
22
|
+
from prefect.concurrency.asyncio import (
|
23
|
+
AcquireConcurrencySlotTimeoutError,
|
24
|
+
ConcurrencySlotAcquisitionError,
|
25
|
+
concurrency,
|
26
|
+
)
|
22
27
|
from prefect.events import Event, RelatedResource, emit_event
|
23
28
|
from prefect.events.related import object_as_related_resource, tags_as_related_resources
|
24
29
|
from prefect.exceptions import (
|
@@ -35,7 +40,13 @@ from prefect.settings import (
|
|
35
40
|
PREFECT_WORKER_QUERY_SECONDS,
|
36
41
|
get_current_settings,
|
37
42
|
)
|
38
|
-
from prefect.states import
|
43
|
+
from prefect.states import (
|
44
|
+
AwaitingConcurrencySlot,
|
45
|
+
Crashed,
|
46
|
+
Pending,
|
47
|
+
exception_to_failed_state,
|
48
|
+
)
|
49
|
+
from prefect.utilities.asyncutils import asyncnullcontext
|
39
50
|
from prefect.utilities.dispatch import get_registry_for_type, register_base_type
|
40
51
|
from prefect.utilities.engine import propose_state
|
41
52
|
from prefect.utilities.services import critical_service_loop
|
@@ -654,6 +665,7 @@ class BaseWorker(abc.ABC):
|
|
654
665
|
work_pool = await self._client.read_work_pool(
|
655
666
|
work_pool_name=self._work_pool_name
|
656
667
|
)
|
668
|
+
|
657
669
|
except ObjectNotFound:
|
658
670
|
if self._create_pool_if_not_found:
|
659
671
|
wp = WorkPoolCreate(
|
@@ -747,11 +759,10 @@ class BaseWorker(abc.ABC):
|
|
747
759
|
for execution by the worker.
|
748
760
|
"""
|
749
761
|
submittable_flow_runs = [entry.flow_run for entry in flow_run_response]
|
750
|
-
|
762
|
+
|
751
763
|
for flow_run in submittable_flow_runs:
|
752
764
|
if flow_run.id in self._submitting_flow_run_ids:
|
753
765
|
continue
|
754
|
-
|
755
766
|
try:
|
756
767
|
if self._limiter:
|
757
768
|
self._limiter.acquire_on_behalf_of_nowait(flow_run.id)
|
@@ -796,8 +807,6 @@ class BaseWorker(abc.ABC):
|
|
796
807
|
" Please use an agent to execute this flow run."
|
797
808
|
)
|
798
809
|
|
799
|
-
#
|
800
|
-
|
801
810
|
async def _submit_run(self, flow_run: "FlowRun") -> None:
|
802
811
|
"""
|
803
812
|
Submits a given flow run for execution by the worker.
|
@@ -837,28 +846,59 @@ class BaseWorker(abc.ABC):
|
|
837
846
|
"not be cancellable."
|
838
847
|
)
|
839
848
|
|
840
|
-
|
849
|
+
run_logger.info(f"Completed submission of flow run '{flow_run.id}'")
|
841
850
|
|
842
|
-
|
843
|
-
|
844
|
-
|
845
|
-
|
851
|
+
else:
|
852
|
+
# If the run is not ready to submit, release the concurrency slot
|
853
|
+
if self._limiter:
|
854
|
+
self._limiter.release_on_behalf_of(flow_run.id)
|
846
855
|
|
847
|
-
|
856
|
+
self._submitting_flow_run_ids.remove(flow_run.id)
|
848
857
|
|
849
858
|
async def _submit_run_and_capture_errors(
|
850
859
|
self, flow_run: "FlowRun", task_status: Optional[anyio.abc.TaskStatus] = None
|
851
860
|
) -> Union[BaseWorkerResult, Exception]:
|
852
861
|
run_logger = self.get_flow_run_logger(flow_run)
|
862
|
+
deployment = None
|
863
|
+
|
864
|
+
if flow_run.deployment_id:
|
865
|
+
deployment = await self._client.read_deployment(flow_run.deployment_id)
|
866
|
+
if deployment and deployment.concurrency_limit:
|
867
|
+
limit_name = f"deployment:{deployment.id}"
|
868
|
+
concurrency_limit = deployment.concurrency_limit
|
869
|
+
concurrency_ctx = concurrency
|
870
|
+
else:
|
871
|
+
limit_name = None
|
872
|
+
concurrency_limit = None
|
873
|
+
concurrency_ctx = asyncnullcontext
|
853
874
|
|
854
875
|
try:
|
855
|
-
|
856
|
-
|
857
|
-
|
858
|
-
|
859
|
-
|
860
|
-
|
876
|
+
async with concurrency_ctx(
|
877
|
+
limit_name, occupy=concurrency_limit, max_retries=0
|
878
|
+
):
|
879
|
+
configuration = await self._get_configuration(flow_run, deployment)
|
880
|
+
submitted_event = self._emit_flow_run_submitted_event(configuration)
|
881
|
+
result = await self.run(
|
882
|
+
flow_run=flow_run,
|
883
|
+
task_status=task_status,
|
884
|
+
configuration=configuration,
|
885
|
+
)
|
886
|
+
except (
|
887
|
+
AcquireConcurrencySlotTimeoutError,
|
888
|
+
ConcurrencySlotAcquisitionError,
|
889
|
+
) as exc:
|
890
|
+
self._logger.info(
|
891
|
+
(
|
892
|
+
"Deployment %s has reached its concurrency limit when submitting flow run %s"
|
893
|
+
),
|
894
|
+
flow_run.deployment_id,
|
895
|
+
flow_run.name,
|
861
896
|
)
|
897
|
+
await self._propose_scheduled_state(flow_run)
|
898
|
+
|
899
|
+
if not task_status._future.done():
|
900
|
+
task_status.started(exc)
|
901
|
+
return exc
|
862
902
|
except Exception as exc:
|
863
903
|
if not task_status._future.done():
|
864
904
|
# This flow run was being submitted and did not start successfully
|
@@ -924,8 +964,13 @@ class BaseWorker(abc.ABC):
|
|
924
964
|
async def _get_configuration(
|
925
965
|
self,
|
926
966
|
flow_run: "FlowRun",
|
967
|
+
deployment: Optional["DeploymentResponse"] = None,
|
927
968
|
) -> BaseJobConfiguration:
|
928
|
-
deployment =
|
969
|
+
deployment = (
|
970
|
+
deployment
|
971
|
+
if deployment
|
972
|
+
else await self._client.read_deployment(flow_run.deployment_id)
|
973
|
+
)
|
929
974
|
flow = await self._client.read_flow(flow_run.flow_id)
|
930
975
|
|
931
976
|
deployment_vars = deployment.job_variables or {}
|
@@ -979,6 +1024,21 @@ class BaseWorker(abc.ABC):
|
|
979
1024
|
|
980
1025
|
return True
|
981
1026
|
|
1027
|
+
async def _propose_scheduled_state(self, flow_run: "FlowRun") -> None:
|
1028
|
+
run_logger = self.get_flow_run_logger(flow_run)
|
1029
|
+
try:
|
1030
|
+
state = await propose_state(
|
1031
|
+
self._client,
|
1032
|
+
AwaitingConcurrencySlot(),
|
1033
|
+
flow_run_id=flow_run.id,
|
1034
|
+
)
|
1035
|
+
self._logger.info(f"Flow run {flow_run.id} now has state {state.name}")
|
1036
|
+
except Abort:
|
1037
|
+
# Flow run already marked as failed
|
1038
|
+
pass
|
1039
|
+
except Exception:
|
1040
|
+
run_logger.exception(f"Failed to update state of flow run '{flow_run.id}'")
|
1041
|
+
|
982
1042
|
async def _propose_failed_state(self, flow_run: "FlowRun", exc: Exception) -> None:
|
983
1043
|
run_logger = self.get_flow_run_logger(flow_run)
|
984
1044
|
try:
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: prefect-client
|
3
|
-
Version: 3.0.
|
3
|
+
Version: 3.0.0rc20
|
4
4
|
Summary: Workflow orchestration and management.
|
5
5
|
Home-page: https://www.prefect.io
|
6
6
|
Author: Prefect Technologies, Inc.
|
@@ -34,10 +34,9 @@ Requires-Dist: exceptiongroup>=1.0.0
|
|
34
34
|
Requires-Dist: fastapi<1.0.0,>=0.111.0
|
35
35
|
Requires-Dist: fsspec>=2022.5.0
|
36
36
|
Requires-Dist: graphviz>=0.20.1
|
37
|
-
Requires-Dist: griffe<0.
|
37
|
+
Requires-Dist: griffe<2.0.0,>=0.49.0
|
38
38
|
Requires-Dist: httpcore<2.0.0,>=1.0.5
|
39
39
|
Requires-Dist: httpx[http2]!=0.23.2,>=0.23
|
40
|
-
Requires-Dist: importlib-resources<6.2.0,>=6.1.3
|
41
40
|
Requires-Dist: jsonpatch<2.0,>=1.32
|
42
41
|
Requires-Dist: jsonschema<5.0.0,>=4.0.0
|
43
42
|
Requires-Dist: orjson<4.0,>=3.7
|
@@ -60,7 +59,7 @@ Requires-Dist: toml>=0.10.0
|
|
60
59
|
Requires-Dist: typing-extensions<5.0.0,>=4.5.0
|
61
60
|
Requires-Dist: ujson<6.0.0,>=5.8.0
|
62
61
|
Requires-Dist: uvicorn!=0.29.0,>=0.14.0
|
63
|
-
Requires-Dist: websockets<
|
62
|
+
Requires-Dist: websockets<14.0,>=10.4
|
64
63
|
Requires-Dist: importlib-metadata>=4.4; python_version < "3.10"
|
65
64
|
Provides-Extra: notifications
|
66
65
|
Requires-Dist: apprise<2.0.0,>=1.1.0; extra == "notifications"
|