prefect-client 3.2.14__py3-none-any.whl → 3.2.15__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
prefect/_build_info.py CHANGED
@@ -1,5 +1,5 @@
1
1
  # Generated by versioningit
2
- __version__ = "3.2.14"
3
- __build_date__ = "2025-03-21 15:55:26.682402+00:00"
4
- __git_commit__ = "775b957d124ecd8cc3f0005e45019f1f9e855973"
2
+ __version__ = "3.2.15"
3
+ __build_date__ = "2025-03-28 15:27:36.030311+00:00"
4
+ __git_commit__ = "0f5a3081a2658f78eeeaa22440043cf80b19cc35"
5
5
  __dirty__ = False
@@ -173,13 +173,16 @@ def execute_bundle_in_subprocess(
173
173
  return process
174
174
 
175
175
 
176
- def convert_step_to_command(step: dict[str, Any], key: str) -> list[str]:
176
+ def convert_step_to_command(
177
+ step: dict[str, Any], key: str, quiet: bool = False
178
+ ) -> list[str]:
177
179
  """
178
180
  Converts a bundle upload or execution step to a command.
179
181
 
180
182
  Args:
181
183
  step: The step to convert.
182
184
  key: The key to use for the remote file when downloading or uploading.
185
+ quiet: Whether to suppress `uv` output from the command.
183
186
 
184
187
  Returns:
185
188
  A list of strings representing the command to run the step.
@@ -187,6 +190,9 @@ def convert_step_to_command(step: dict[str, Any], key: str) -> list[str]:
187
190
  # Start with uv run
188
191
  command = ["uv", "run"]
189
192
 
193
+ if quiet:
194
+ command.append("--quiet")
195
+
190
196
  step_keys = list(step.keys())
191
197
 
192
198
  if len(step_keys) != 1:
@@ -1,3 +1,5 @@
1
+ from __future__ import annotations
2
+
1
3
  import abc
2
4
  import asyncio
3
5
  import concurrent.futures
@@ -375,6 +377,14 @@ class BatchedQueueService(QueueService[T]):
375
377
  _max_batch_size: int
376
378
  _min_interval: Optional[float] = None
377
379
 
380
+ @property
381
+ def min_interval(self) -> float | None:
382
+ return self.__class__._min_interval
383
+
384
+ @property
385
+ def max_batch_size(self) -> int:
386
+ return self.__class__._max_batch_size
387
+
378
388
  async def _main_loop(self):
379
389
  done = False
380
390
 
@@ -383,8 +393,8 @@ class BatchedQueueService(QueueService[T]):
383
393
  batch_size = 0
384
394
 
385
395
  # Pull items from the queue until we reach the batch size
386
- deadline = get_deadline(self._min_interval)
387
- while batch_size < self._max_batch_size:
396
+ deadline = get_deadline(self.min_interval)
397
+ while batch_size < self.max_batch_size:
388
398
  try:
389
399
  item = await self._queue_get_thread.submit(
390
400
  create_call(self._queue.get, timeout=get_timeout(deadline))
@@ -401,7 +411,7 @@ class BatchedQueueService(QueueService[T]):
401
411
  self,
402
412
  item,
403
413
  batch_size,
404
- self._max_batch_size,
414
+ self.max_batch_size,
405
415
  )
406
416
  except queue.Empty:
407
417
  # Process the batch after `min_interval` even if it is smaller than
prefect/cache_policies.py CHANGED
@@ -2,7 +2,15 @@ import inspect
2
2
  from copy import deepcopy
3
3
  from dataclasses import dataclass, field
4
4
  from pathlib import Path
5
- from typing import TYPE_CHECKING, Any, Callable, Dict, Literal, Optional, Union
5
+ from typing import (
6
+ TYPE_CHECKING,
7
+ Any,
8
+ Callable,
9
+ Dict,
10
+ Literal,
11
+ Optional,
12
+ Union,
13
+ )
6
14
 
7
15
  from typing_extensions import Self
8
16
 
@@ -15,6 +23,24 @@ if TYPE_CHECKING:
15
23
  from prefect.locking.protocol import LockManager
16
24
  from prefect.transactions import IsolationLevel
17
25
 
26
+ STABLE_TRANSFORMS: dict[type, Callable[[Any], Any]] = {}
27
+
28
+
29
+ def _register_stable_transforms() -> None:
30
+ """
31
+ Some inputs do not reliably produce deterministic byte strings when serialized via
32
+ `cloudpickle`. This utility registers stabilizing transformations of such types
33
+ so that cache keys that utilize them are deterministic across invocations.
34
+ """
35
+ try:
36
+ import pandas as pd
37
+
38
+ STABLE_TRANSFORMS[pd.DataFrame] = lambda df: [
39
+ df[col] for col in sorted(df.columns)
40
+ ]
41
+ except (ImportError, ModuleNotFoundError):
42
+ pass
43
+
18
44
 
19
45
  @dataclass
20
46
  class CachePolicy:
@@ -341,7 +367,8 @@ class Inputs(CachePolicy):
341
367
 
342
368
  for key, val in inputs.items():
343
369
  if key not in exclude:
344
- hashed_inputs[key] = val
370
+ transformer = STABLE_TRANSFORMS.get(type(val)) # type: ignore[reportUnknownMemberType]
371
+ hashed_inputs[key] = transformer(val) if transformer else val
345
372
 
346
373
  try:
347
374
  return hash_objects(hashed_inputs, raise_on_failure=True)
@@ -362,6 +389,8 @@ class Inputs(CachePolicy):
362
389
  return Inputs(exclude=self.exclude + [other])
363
390
 
364
391
 
392
+ _register_stable_transforms()
393
+
365
394
  INPUTS = Inputs()
366
395
  NONE = _None()
367
396
  NO_CACHE = _None()
@@ -39,6 +39,7 @@ from prefect.types import (
39
39
  DateTime,
40
40
  KeyValueLabelsField,
41
41
  Name,
42
+ NameOrEmpty,
42
43
  NonEmptyishName,
43
44
  NonNegativeFloat,
44
45
  NonNegativeInteger,
@@ -212,7 +213,7 @@ class DeploymentCreate(ActionBaseModel):
212
213
  ) -> Union[str, list[str]]:
213
214
  return convert_to_strings(values)
214
215
 
215
- name: str = Field(..., description="The name of the deployment.")
216
+ name: NameOrEmpty = Field(..., description="The name of the deployment.")
216
217
  flow_id: UUID = Field(..., description="The ID of the flow to deploy.")
217
218
  paused: Optional[bool] = Field(default=None)
218
219
  schedules: list[DeploymentScheduleCreate] = Field(
prefect/flows.py CHANGED
@@ -196,7 +196,7 @@ class Flow(Generic[P, R]):
196
196
  # exactly in the @flow decorator
197
197
  def __init__(
198
198
  self,
199
- fn: Callable[P, R],
199
+ fn: Callable[P, R] | "classmethod[Any, P, R]" | "staticmethod[P, R]",
200
200
  name: Optional[str] = None,
201
201
  version: Optional[str] = None,
202
202
  flow_run_name: Optional[Union[Callable[[], str], str]] = None,
@@ -270,6 +270,13 @@ class Flow(Generic[P, R]):
270
270
  " my_flow():\n\tpass"
271
271
  )
272
272
 
273
+ if isinstance(fn, classmethod):
274
+ fn = cast(Callable[P, R], fn.__func__)
275
+
276
+ if isinstance(fn, staticmethod):
277
+ fn = cast(Callable[P, R], fn.__func__)
278
+ setattr(fn, "__prefect_static__", True)
279
+
273
280
  if not callable(fn):
274
281
  raise TypeError("'fn' must be callable")
275
282
 
@@ -396,16 +403,28 @@ class Flow(Generic[P, R]):
396
403
  def ismethod(self) -> bool:
397
404
  return hasattr(self.fn, "__prefect_self__")
398
405
 
406
+ @property
407
+ def isclassmethod(self) -> bool:
408
+ return hasattr(self.fn, "__prefect_cls__")
409
+
410
+ @property
411
+ def isstaticmethod(self) -> bool:
412
+ return getattr(self.fn, "__prefect_static__", False)
413
+
399
414
  def __get__(self, instance: Any, owner: Any) -> "Flow[P, R]":
400
415
  """
401
- Implement the descriptor protocol so that the flow can be used as an instance method.
416
+ Implement the descriptor protocol so that the flow can be used as an instance or class method.
402
417
  When an instance method is loaded, this method is called with the "self" instance as
403
418
  an argument. We return a copy of the flow with that instance bound to the flow's function.
404
419
  """
420
+ if self.isstaticmethod:
421
+ return self
405
422
 
406
- # if no instance is provided, it's being accessed on the class
423
+ # wrapped function is a classmethod
407
424
  if instance is None:
408
- return self
425
+ bound_flow = copy(self)
426
+ setattr(bound_flow.fn, "__prefect_cls__", owner)
427
+ return bound_flow
409
428
 
410
429
  # if the flow is being accessed on an instance, bind the instance to the __prefect_self__ attribute
411
430
  # of the flow's function. This will allow it to be automatically added to the flow's parameters
@@ -636,6 +655,10 @@ class Flow(Generic[P, R]):
636
655
  # do not serialize the bound self object
637
656
  if self.ismethod and value is getattr(self.fn, "__prefect_self__", None):
638
657
  continue
658
+ if self.isclassmethod and value is getattr(
659
+ self.fn, "__prefect_cls__", None
660
+ ):
661
+ continue
639
662
  if isinstance(value, (PrefectFuture, State)):
640
663
  # Don't call jsonable_encoder() on a PrefectFuture or State to
641
664
  # avoid triggering a __getitem__ call
@@ -1895,11 +1918,6 @@ class FlowDecorator:
1895
1918
  >>> pass
1896
1919
  """
1897
1920
  if __fn:
1898
- if isinstance(__fn, (classmethod, staticmethod)):
1899
- method_decorator = type(__fn).__name__
1900
- raise TypeError(
1901
- f"@{method_decorator} should be applied on top of @flow"
1902
- )
1903
1921
  return Flow(
1904
1922
  fn=__fn,
1905
1923
  name=name,
prefect/locking/memory.py CHANGED
@@ -193,23 +193,31 @@ class MemoryLockManager(LockManager):
193
193
  )
194
194
 
195
195
  def wait_for_lock(self, key: str, timeout: Optional[float] = None) -> bool:
196
- if lock := self._locks.get(key, {}).get("lock"):
196
+ lock_info: _LockInfo | None = self._locks.get(key)
197
+ if lock_info is None:
198
+ return True
199
+ if lock_info["lock"].locked():
197
200
  if timeout is not None:
198
- lock_acquired = lock.acquire(timeout=timeout)
201
+ lock_acquired = lock_info["lock"].acquire(timeout=timeout)
199
202
  else:
200
- lock_acquired = lock.acquire()
203
+ lock_acquired = lock_info["lock"].acquire()
201
204
  if lock_acquired:
202
- lock.release()
205
+ lock_info["lock"].release()
203
206
  return lock_acquired
204
207
  return True
205
208
 
206
209
  async def await_for_lock(self, key: str, timeout: Optional[float] = None) -> bool:
207
- if lock := self._locks.get(key, {}).get("lock"):
210
+ lock_info: _LockInfo | None = self._locks.get(key, None)
211
+ if lock_info is None:
212
+ return True
213
+ if lock_info["lock"].locked():
208
214
  if timeout is not None:
209
- lock_acquired = await asyncio.to_thread(lock.acquire, timeout=timeout)
215
+ lock_acquired = await asyncio.to_thread(
216
+ lock_info["lock"].acquire, timeout=timeout
217
+ )
210
218
  else:
211
- lock_acquired = await asyncio.to_thread(lock.acquire)
219
+ lock_acquired = await asyncio.to_thread(lock_info["lock"].acquire)
212
220
  if lock_acquired:
213
- lock.release()
221
+ lock_info["lock"].release()
214
222
  return lock_acquired
215
223
  return True
@@ -1,3 +1,3 @@
1
1
  from .loggers import disable_run_logger, get_logger, get_run_logger, LogEavesdropper
2
2
 
3
- __all__ = ["get_logger", "get_run_logger", "LogEavesdropper"]
3
+ __all__ = ["get_logger", "get_run_logger", "LogEavesdropper", "disable_run_logger"]
@@ -1,3 +1,5 @@
1
+ from __future__ import annotations
2
+
1
3
  import logging
2
4
  import logging.config
3
5
  import os
@@ -6,7 +8,7 @@ import string
6
8
  import warnings
7
9
  from functools import partial
8
10
  from pathlib import Path
9
- from typing import Any, Callable, Dict, Optional
11
+ from typing import Any, Callable
10
12
 
11
13
  import yaml
12
14
 
@@ -21,7 +23,7 @@ from prefect.utilities.collections import dict_to_flatdict, flatdict_to_dict
21
23
  DEFAULT_LOGGING_SETTINGS_PATH = Path(__file__).parent / "logging.yml"
22
24
 
23
25
  # Stores the configuration used to setup logging in this Python process
24
- PROCESS_LOGGING_CONFIG: Optional[Dict[str, Any]] = None
26
+ PROCESS_LOGGING_CONFIG: dict[str, Any] = {}
25
27
 
26
28
  # Regex call to replace non-alphanumeric characters to '_' to create a valid env var
27
29
  to_envvar: Callable[[str], str] = partial(re.sub, re.compile(r"[^0-9a-zA-Z]+"), "_")
@@ -60,7 +62,7 @@ def load_logging_config(path: Path) -> dict[str, Any]:
60
62
  return flatdict_to_dict(flat_config)
61
63
 
62
64
 
63
- def setup_logging(incremental: Optional[bool] = None) -> dict[str, Any]:
65
+ def setup_logging(incremental: bool | None = None) -> dict[str, Any]:
64
66
  """
65
67
  Sets up logging.
66
68
 
@@ -100,6 +102,6 @@ def setup_logging(incremental: Optional[bool] = None) -> dict[str, Any]:
100
102
  for handler in extra_config.handlers:
101
103
  logger.addHandler(handler)
102
104
 
103
- PROCESS_LOGGING_CONFIG = config
105
+ PROCESS_LOGGING_CONFIG.update(config)
104
106
 
105
107
  return config
@@ -78,7 +78,7 @@ class PrefectFormatter(logging.Formatter):
78
78
  self,
79
79
  format: str | None = None,
80
80
  datefmt: str | None = None,
81
- style: str = "%",
81
+ style: Literal["%", "{", "$"] = "%",
82
82
  validate: bool = True,
83
83
  *,
84
84
  defaults: dict[str, Any] | None = None,
@@ -93,8 +93,8 @@ class PrefectFormatter(logging.Formatter):
93
93
  # See https://github.com/python/cpython/blob/c8c6113398ee9a7867fe9b08bc539cceb61e2aaa/Lib/logging/__init__.py#L546
94
94
  # for implementation details
95
95
 
96
- init_kwargs = {}
97
- style_kwargs = {}
96
+ init_kwargs: dict[str, Any] = {}
97
+ style_kwargs: dict[str, Any] = {}
98
98
 
99
99
  # defaults added in 3.10
100
100
  if sys.version_info >= (3, 10):
@@ -1,5 +1,6 @@
1
1
  from __future__ import annotations
2
2
 
3
+ import inspect
3
4
  import json
4
5
  import logging
5
6
  import sys
@@ -44,10 +45,13 @@ else:
44
45
  else:
45
46
  StreamHandler = logging.StreamHandler
46
47
 
48
+ if TYPE_CHECKING:
49
+ from prefect.client.schemas.objects import FlowRun, TaskRun
50
+
47
51
 
48
52
  class APILogWorker(BatchedQueueService[Dict[str, Any]]):
49
53
  @property
50
- def _max_batch_size(self) -> int:
54
+ def max_batch_size(self) -> int:
51
55
  return max(
52
56
  PREFECT_LOGGING_TO_API_BATCH_SIZE.value()
53
57
  - PREFECT_LOGGING_TO_API_MAX_LOG_SIZE.value(),
@@ -55,7 +59,7 @@ class APILogWorker(BatchedQueueService[Dict[str, Any]]):
55
59
  )
56
60
 
57
61
  @property
58
- def _min_interval(self) -> float | None:
62
+ def min_interval(self) -> float | None:
59
63
  return PREFECT_LOGGING_TO_API_BATCH_INTERVAL.value()
60
64
 
61
65
  async def _handle_batch(self, items: list[dict[str, Any]]):
@@ -77,7 +81,7 @@ class APILogWorker(BatchedQueueService[Dict[str, Any]]):
77
81
  yield
78
82
 
79
83
  @classmethod
80
- def instance(cls: Type[Self]) -> Self:
84
+ def instance(cls: Type[Self], *args: Any) -> Self:
81
85
  settings = (
82
86
  PREFECT_LOGGING_TO_API_BATCH_SIZE.value(),
83
87
  PREFECT_API_URL.value(),
@@ -85,7 +89,7 @@ class APILogWorker(BatchedQueueService[Dict[str, Any]]):
85
89
  )
86
90
 
87
91
  # Ensure a unique worker is retrieved per relevant logging settings
88
- return super().instance(*settings)
92
+ return super().instance(*settings, *args)
89
93
 
90
94
  def _get_size(self, item: Dict[str, Any]) -> int:
91
95
  return item.pop("__payload_size__", None) or len(json.dumps(item).encode())
@@ -99,8 +103,7 @@ class APILogHandler(logging.Handler):
99
103
  the background.
100
104
  """
101
105
 
102
- @classmethod
103
- def flush(cls) -> None:
106
+ def flush(self) -> None:
104
107
  """
105
108
  Tell the `APILogWorker` to send any currently enqueued logs and block until
106
109
  completion.
@@ -119,22 +122,23 @@ class APILogHandler(logging.Handler):
119
122
  # Not ideal, but this method is called by the stdlib and cannot return a
120
123
  # coroutine so we just schedule the drain in a new thread and continue
121
124
  from_sync.call_soon_in_new_thread(create_call(APILogWorker.drain_all))
122
- return None
123
125
  else:
124
126
  # We set a timeout of 5s because we don't want to block forever if the worker
125
127
  # is stuck. This can occur when the handler is being shutdown and the
126
128
  # `logging._lock` is held but the worker is attempting to emit logs resulting
127
129
  # in a deadlock.
128
- return APILogWorker.drain_all(timeout=5)
130
+ APILogWorker.drain_all(timeout=5)
129
131
 
130
132
  @classmethod
131
- async def aflush(cls) -> bool:
133
+ async def aflush(cls) -> None:
132
134
  """
133
135
  Tell the `APILogWorker` to send any currently enqueued logs and block until
134
136
  completion.
135
137
  """
136
138
 
137
- return await APILogWorker.drain_all()
139
+ result = APILogWorker.drain_all()
140
+ if inspect.isawaitable(result):
141
+ await result
138
142
 
139
143
  def emit(self, record: logging.LogRecord) -> None:
140
144
  """
@@ -202,11 +206,15 @@ class APILogHandler(logging.Handler):
202
206
  " flow run contexts unless the flow run id is manually provided."
203
207
  ) from None
204
208
 
205
- if hasattr(context, "flow_run"):
206
- flow_run_id = context.flow_run.id
207
- elif hasattr(context, "task_run"):
208
- flow_run_id = context.task_run.flow_run_id
209
- task_run_id = task_run_id or context.task_run.id
209
+ if flow_run := getattr(context, "flow_run", None):
210
+ if TYPE_CHECKING:
211
+ assert isinstance(flow_run, FlowRun)
212
+ flow_run_id = flow_run.id
213
+ elif task_run := getattr(context, "task_run", None):
214
+ if TYPE_CHECKING:
215
+ assert isinstance(task_run, TaskRun)
216
+ flow_run_id = task_run.flow_run_id
217
+ task_run_id = task_run_id or task_run.id
210
218
  else:
211
219
  raise ValueError(
212
220
  "Encountered malformed run context. Does not contain flow or task "
@@ -216,15 +224,14 @@ class APILogHandler(logging.Handler):
216
224
  # Parsing to a `LogCreate` object here gives us nice parsing error messages
217
225
  # from the standard lib `handleError` method if something goes wrong and
218
226
  # prevents malformed logs from entering the queue
219
- try:
220
- is_uuid_like = isinstance(flow_run_id, uuid.UUID) or (
221
- isinstance(flow_run_id, str) and uuid.UUID(flow_run_id)
222
- )
223
- except ValueError:
224
- is_uuid_like = False
227
+ if isinstance(flow_run_id, str):
228
+ try:
229
+ flow_run_id = uuid.UUID(flow_run_id)
230
+ except ValueError:
231
+ flow_run_id = None
225
232
 
226
233
  log = LogCreate(
227
- flow_run_id=flow_run_id if is_uuid_like else None,
234
+ flow_run_id=flow_run_id,
228
235
  task_run_id=task_run_id,
229
236
  worker_id=worker_id,
230
237
  name=record.name,
@@ -306,15 +313,19 @@ class PrefectConsoleHandler(StreamHandler):
306
313
  styled_console = PREFECT_LOGGING_COLORS.value()
307
314
  markup_console = PREFECT_LOGGING_MARKUP.value()
308
315
  if styled_console:
309
- highlighter = highlighter()
316
+ highlighter_instance = highlighter()
310
317
  theme = Theme(styles, inherit=False)
311
318
  else:
312
- highlighter = NullHighlighter()
319
+ highlighter_instance = NullHighlighter()
313
320
  theme = Theme(inherit=False)
314
321
 
315
- self.level = level
322
+ if isinstance(level, str):
323
+ self.level: int = logging.getLevelNamesMapping()[level]
324
+ else:
325
+ self.level: int = level
326
+
316
327
  self.console: Console = Console(
317
- highlighter=highlighter,
328
+ highlighter=highlighter_instance,
318
329
  theme=theme,
319
330
  file=self.stream,
320
331
  markup=markup_console,
prefect/task_engine.py CHANGED
@@ -755,11 +755,14 @@ class SyncTaskRunEngine(BaseTaskRunEngine[P, R]):
755
755
  if self._telemetry.span
756
756
  else nullcontext()
757
757
  ):
758
- self.begin_run()
759
- try:
760
- yield
761
- finally:
762
- self.call_hooks()
758
+ # Acquire a concurrency slot for each tag, but only if a limit
759
+ # matching the tag already exists.
760
+ with concurrency(list(self.task_run.tags), self.task_run.id):
761
+ self.begin_run()
762
+ try:
763
+ yield
764
+ finally:
765
+ self.call_hooks()
763
766
 
764
767
  @contextmanager
765
768
  def transaction_context(self) -> Generator[Transaction, None, None]:
@@ -820,13 +823,7 @@ class SyncTaskRunEngine(BaseTaskRunEngine[P, R]):
820
823
  if transaction.is_committed():
821
824
  result = transaction.read()
822
825
  else:
823
- if self.task_run.tags:
824
- # Acquire a concurrency slot for each tag, but only if a limit
825
- # matching the tag already exists.
826
- with concurrency(list(self.task_run.tags), self.task_run.id):
827
- result = call_with_parameters(self.task.fn, parameters)
828
- else:
829
- result = call_with_parameters(self.task.fn, parameters)
826
+ result = call_with_parameters(self.task.fn, parameters)
830
827
  self.handle_success(result, transaction=transaction)
831
828
  return result
832
829
 
@@ -1288,11 +1285,14 @@ class AsyncTaskRunEngine(BaseTaskRunEngine[P, R]):
1288
1285
  if self._telemetry.span
1289
1286
  else nullcontext()
1290
1287
  ):
1291
- await self.begin_run()
1292
- try:
1293
- yield
1294
- finally:
1295
- await self.call_hooks()
1288
+ # Acquire a concurrency slot for each tag, but only if a limit
1289
+ # matching the tag already exists.
1290
+ async with aconcurrency(list(self.task_run.tags), self.task_run.id):
1291
+ await self.begin_run()
1292
+ try:
1293
+ yield
1294
+ finally:
1295
+ await self.call_hooks()
1296
1296
 
1297
1297
  @asynccontextmanager
1298
1298
  async def transaction_context(self) -> AsyncGenerator[Transaction, None]:
@@ -1352,13 +1352,7 @@ class AsyncTaskRunEngine(BaseTaskRunEngine[P, R]):
1352
1352
  if transaction.is_committed():
1353
1353
  result = transaction.read()
1354
1354
  else:
1355
- if self.task_run and self.task_run.tags:
1356
- # Acquire a concurrency slot for each tag, but only if a limit
1357
- # matching the tag already exists.
1358
- async with aconcurrency(list(self.task_run.tags), self.task_run.id):
1359
- result = await call_with_parameters(self.task.fn, parameters)
1360
- else:
1361
- result = await call_with_parameters(self.task.fn, parameters)
1355
+ result = await call_with_parameters(self.task.fn, parameters)
1362
1356
  await self.handle_success(result, transaction=transaction)
1363
1357
  return result
1364
1358
 
prefect/tasks.py CHANGED
@@ -1,8 +1,10 @@
1
1
  """
2
2
  Module containing the base workflow task class and decorator - for most use cases, using the [`@task` decorator][prefect.tasks.task] is preferred.
3
3
  """
4
+
4
5
  # This file requires type-checking with pyright because mypy does not yet support PEP612
5
6
  # See https://github.com/python/mypy/issues/8645
7
+ from __future__ import annotations
6
8
 
7
9
  import asyncio
8
10
  import datetime
@@ -314,7 +316,7 @@ class Task(Generic[P, R]):
314
316
  # exactly in the @task decorator
315
317
  def __init__(
316
318
  self,
317
- fn: Callable[P, R],
319
+ fn: Callable[P, R] | "classmethod[Any, P, R]" | "staticmethod[P, R]",
318
320
  name: Optional[str] = None,
319
321
  description: Optional[str] = None,
320
322
  tags: Optional[Iterable[str]] = None,
@@ -378,6 +380,13 @@ class Task(Generic[P, R]):
378
380
  " my_task():\n\tpass"
379
381
  )
380
382
 
383
+ if isinstance(fn, classmethod):
384
+ fn = cast(Callable[P, R], fn.__func__)
385
+
386
+ if isinstance(fn, staticmethod):
387
+ fn = cast(Callable[P, R], fn.__func__)
388
+ setattr(fn, "__prefect_static__", True)
389
+
381
390
  if not callable(fn):
382
391
  raise TypeError("'fn' must be callable")
383
392
 
@@ -536,6 +545,14 @@ class Task(Generic[P, R]):
536
545
  def ismethod(self) -> bool:
537
546
  return hasattr(self.fn, "__prefect_self__")
538
547
 
548
+ @property
549
+ def isclassmethod(self) -> bool:
550
+ return hasattr(self.fn, "__prefect_cls__")
551
+
552
+ @property
553
+ def isstaticmethod(self) -> bool:
554
+ return getattr(self.fn, "__prefect_static__", False)
555
+
539
556
  def __get__(self, instance: Any, owner: Any) -> "Task[P, R]":
540
557
  """
541
558
  Implement the descriptor protocol so that the task can be used as an instance method.
@@ -543,10 +560,15 @@ class Task(Generic[P, R]):
543
560
  an argument. We return a copy of the task with that instance bound to the task's function.
544
561
  """
545
562
 
546
- # if no instance is provided, it's being accessed on the class
547
- if instance is None:
563
+ if self.isstaticmethod:
548
564
  return self
549
565
 
566
+ # wrapped function is a classmethod
567
+ if not instance:
568
+ bound_task = copy(self)
569
+ setattr(bound_task.fn, "__prefect_cls__", owner)
570
+ return bound_task
571
+
550
572
  # if the task is being accessed on an instance, bind the instance to the __prefect_self__ attribute
551
573
  # of the task's function. This will allow it to be automatically added to the task's parameters
552
574
  else:
@@ -1851,9 +1873,6 @@ def task(
1851
1873
  """
1852
1874
 
1853
1875
  if __fn:
1854
- if isinstance(__fn, (classmethod, staticmethod)):
1855
- method_decorator = type(__fn).__name__
1856
- raise TypeError(f"@{method_decorator} should be applied on top of @task")
1857
1876
  return Task(
1858
1877
  fn=__fn,
1859
1878
  name=name,
@@ -63,6 +63,8 @@ def get_call_parameters(
63
63
  """
64
64
  if hasattr(fn, "__prefect_self__"):
65
65
  call_args = (getattr(fn, "__prefect_self__"), *call_args)
66
+ if hasattr(fn, "__prefect_cls__"):
67
+ call_args = (getattr(fn, "__prefect_cls__"), *call_args)
66
68
 
67
69
  try:
68
70
  bound_signature = inspect.signature(fn).bind(*call_args, **call_kwargs)
prefect/workers/base.py CHANGED
@@ -1250,12 +1250,13 @@ class BaseWorker(abc.ABC, Generic[C, V, R]):
1250
1250
  ) -> None:
1251
1251
  state_updates = state_updates or {}
1252
1252
  state_updates.setdefault("name", "Cancelled")
1253
- state_updates.setdefault("type", StateType.CANCELLED)
1254
1253
 
1255
1254
  if flow_run.state:
1255
+ state_updates.setdefault("type", StateType.CANCELLED)
1256
1256
  state = flow_run.state.model_copy(update=state_updates)
1257
1257
  else:
1258
1258
  # Unexpectedly when flow run does not have a state, create a new one
1259
+ # does not need to explicitly set the type
1259
1260
  state = Cancelled(**state_updates)
1260
1261
 
1261
1262
  await self.client.set_flow_run_state(flow_run.id, state, force=True)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: prefect-client
3
- Version: 3.2.14
3
+ Version: 3.2.15
4
4
  Summary: Workflow orchestration and management.
5
5
  Project-URL: Changelog, https://github.com/PrefectHQ/prefect/releases
6
6
  Project-URL: Documentation, https://docs.prefect.io
@@ -48,13 +48,14 @@ Requires-Dist: pydantic-settings>2.2.1
48
48
  Requires-Dist: python-dateutil<3.0.0,>=2.8.2
49
49
  Requires-Dist: python-slugify<9.0,>=5.0
50
50
  Requires-Dist: python-socks[asyncio]<3.0,>=2.5.3
51
+ Requires-Dist: pytz<2026,>=2021.1
51
52
  Requires-Dist: pyyaml<7.0.0,>=5.4.1
52
53
  Requires-Dist: rfc3339-validator<0.2.0,>=0.1.4
53
54
  Requires-Dist: rich<14.0,>=11.0
54
55
  Requires-Dist: ruamel-yaml>=0.17.0
55
56
  Requires-Dist: sniffio<2.0.0,>=1.3.0
56
57
  Requires-Dist: toml>=0.10.0
57
- Requires-Dist: typing-extensions<5.0.0,>=4.5.0
58
+ Requires-Dist: typing-extensions<5.0.0,>=4.10.0
58
59
  Requires-Dist: ujson<6.0.0,>=5.8.0
59
60
  Requires-Dist: uvicorn!=0.29.0,>=0.14.0
60
61
  Requires-Dist: websockets<16.0,>=13.0
@@ -1,20 +1,20 @@
1
1
  prefect/.prefectignore,sha256=awSprvKT0vI8a64mEOLrMxhxqcO-b0ERQeYpA2rNKVQ,390
2
2
  prefect/__init__.py,sha256=iCdcC5ZmeewikCdnPEP6YBAjPNV5dvfxpYCTpw30Hkw,3685
3
3
  prefect/__main__.py,sha256=WFjw3kaYJY6pOTA7WDOgqjsz8zUEUZHCcj3P5wyVa-g,66
4
- prefect/_build_info.py,sha256=CVmFj0Mmllfz-aWbB-Deuyvgq5sfaVi0SeP84j9cUT4,181
4
+ prefect/_build_info.py,sha256=H_r3iAipByX7h6dLUxmFezOTIdNBuHZnAm7TJVM4w_I,181
5
5
  prefect/_result_records.py,sha256=S6QmsODkehGVSzbMm6ig022PYbI6gNKz671p_8kBYx4,7789
6
6
  prefect/_waiters.py,sha256=Ia2ITaXdHzevtyWIgJoOg95lrEXQqNEOquHvw3T33UQ,9026
7
7
  prefect/agent.py,sha256=dPvG1jDGD5HSH7aM2utwtk6RaJ9qg13XjkA0lAIgQmY,287
8
8
  prefect/artifacts.py,sha256=dMBUOAWnUamzjb5HSqwB5-GR2Qb-Gxee26XG5NDCUuw,22720
9
9
  prefect/automations.py,sha256=ZzPxn2tINdlXTQo805V4rIlbXuNWxd7cdb3gTJxZIeY,12567
10
- prefect/cache_policies.py,sha256=cF_6eqg34x7XgaCIw6S8Vr-Eq0wIr4Y6t3FOuXaPBrY,11912
10
+ prefect/cache_policies.py,sha256=Kwdei4JjitNfx42OepKpDNxwPtEwRgUUAn_soxsnNzI,12699
11
11
  prefect/context.py,sha256=iJe4pkFqX6lz8ax1Mde_YqVmBVWmzeBe0ca2_nT6KPQ,23673
12
12
  prefect/engine.py,sha256=uB5JN4l045i5JTlRQNT1x7MwlSiGQ5Bop2Q6jHHOgxY,3699
13
13
  prefect/exceptions.py,sha256=-nih8qqdxRm6CX-4yrqwePVh8Mcpvla_V6N_KbdJsIU,11593
14
14
  prefect/filesystems.py,sha256=v5YqGB4uXf9Ew2VuB9VCSkawvYMMVvEtZf7w1VmAmr8,18036
15
15
  prefect/flow_engine.py,sha256=rjzpFrLswQ7sMX1-ap4SDYvWUawzSD0nnL6jVLa0ku0,59389
16
16
  prefect/flow_runs.py,sha256=dbHcXsOq1UsNM7vyJV9gboCTylmdUwQ_-W4NQt4R4ds,17267
17
- prefect/flows.py,sha256=dsrV-qNP_2eUsdG409XbtNtfMkTIzcAnBbXrD9OBUgQ,108950
17
+ prefect/flows.py,sha256=wDVMQ67YSgzJR_jwSBjmLQ2zAtIKP7xN_R1UG7an-yk,109495
18
18
  prefect/futures.py,sha256=ADd8ceFqX7A8Kw8aXaqvbYRG03uU82OEY30xrP5vrwY,23599
19
19
  prefect/main.py,sha256=hFeTTrr01qWKcRwZVEHVipyHEybS0VLTscFV6zG6GtY,2306
20
20
  prefect/plugins.py,sha256=FPRLR2mWVBMuOnlzeiTD9krlHONZH2rtYLD753JQDNQ,2516
@@ -23,15 +23,15 @@ prefect/results.py,sha256=_y-pBxl5Retn39pG5RE2d5lAb2ycJHhWPAoQWAzr9yQ,36633
23
23
  prefect/schedules.py,sha256=9ufG4jhIA_R7vS9uXqnnZEgB7Ts922KMhNacWcveVgA,7291
24
24
  prefect/serializers.py,sha256=QI0oEal_BO4HQaWSjr6ReSwT55Hn4sbSOXxGgQI1-y0,9249
25
25
  prefect/states.py,sha256=tTZrN-IZKvmFcN8FR_4L-X-ZrmXi6z-cPXl6KdOy-XI,26920
26
- prefect/task_engine.py,sha256=nbiaDyTNN89zP_ibJbe_Zrw2kLGZ4QqoeN221iIc5y0,61526
26
+ prefect/task_engine.py,sha256=Ct9ndJ2IVpXZhrQ5eSs4iT786m2WjSbKmgi5On_jlCQ,61276
27
27
  prefect/task_runners.py,sha256=Ce_ngocfq_X-NA5zhPj13IdVmzZ5h6gXlmfxYWs2AXA,15828
28
28
  prefect/task_runs.py,sha256=7LIzfo3fondCyEUpU05sYFN5IfpZigBDXrhG5yc-8t0,9039
29
29
  prefect/task_worker.py,sha256=mihWOZ3IpZCupqBboB_T1XhLm-0ApwwptTgUH-I3nKo,17794
30
- prefect/tasks.py,sha256=wtDcBmmsAsmVCJy4SVQqcRdLQUtJjgMRyB_YhkddMcY,74268
30
+ prefect/tasks.py,sha256=mdvWJe5xRpt2YTijWkMMfNjCcHrYv8C6l5J_3PBPu9k,74742
31
31
  prefect/transactions.py,sha256=BYvxr4ZSFmYDCODPhH8DO1_51inH35oJ75ZZOd_GI_w,16341
32
32
  prefect/variables.py,sha256=dCK3vX7TbkqXZhnNT_v7rcGh3ISRqoR6pJVLpoll3Js,8342
33
33
  prefect/_experimental/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
34
- prefect/_experimental/bundles.py,sha256=cxfUFdvBjd42imrimPG2MF3zJoyi1vGeYOVSiyjLC8Y,6245
34
+ prefect/_experimental/bundles.py,sha256=EIZc6hsjQS8V2CH1RbsxOiFeR11l1IgvhiXTSO3uDyM,6386
35
35
  prefect/_experimental/lineage.py,sha256=8LssReoq7eLtQScUCu-7FCtrWoRZstXKRdpO0PxgbKg,9958
36
36
  prefect/_experimental/sla/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
37
37
  prefect/_experimental/sla/client.py,sha256=XTkYHFZiBy_O7RgUyGEdl9MxaHP-6fEAKBk3ksNQobU,3611
@@ -52,7 +52,7 @@ prefect/_internal/concurrency/cancellation.py,sha256=stCN22-S0f_kZPk50hCEEYzH35f
52
52
  prefect/_internal/concurrency/event_loop.py,sha256=N6SyBV0vaSF5HD4_JM8zL7oBGd2nMuEKkeSPnBZdHw4,2136
53
53
  prefect/_internal/concurrency/inspection.py,sha256=wUWVbHi4G-BxuuYFWhTNmo5yc1C651lQrp5OMiHPU1E,3545
54
54
  prefect/_internal/concurrency/primitives.py,sha256=Wuht4GwJCgts_uAZFUt9c-InPssnXcelRQc1dGdOplk,2672
55
- prefect/_internal/concurrency/services.py,sha256=LmB6QwqJoB6B8curZ93zxWdKYNwuyiyBKLdACWFfb7E,15940
55
+ prefect/_internal/concurrency/services.py,sha256=w2J5Q5Pep19Ignx-TLEw27wf3fS26HVw-eeR4xMeTxQ,16174
56
56
  prefect/_internal/concurrency/threads.py,sha256=9sIDBdVFmvY4qqdkz3p1eqs4se7Ua2lJ-CPnhTSPRs4,9288
57
57
  prefect/_internal/concurrency/waiters.py,sha256=mhXpQk8swcUAxBk7f7kGn1fqy44XcFyneog_zEYecr0,9442
58
58
  prefect/_internal/pydantic/__init__.py,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
@@ -112,7 +112,7 @@ prefect/client/orchestration/_variables/client.py,sha256=wKBbZBLGgs5feDCil-xxKt3
112
112
  prefect/client/orchestration/_work_pools/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
113
113
  prefect/client/orchestration/_work_pools/client.py,sha256=s1DfUQQBgB2sLiVVPhLNTlkueUDE6uFsh4mAzcSA1OE,19881
114
114
  prefect/client/schemas/__init__.py,sha256=InZcDzdeWA2oaV0TlyvoMcyLcbi_aaqU1U9D6Gx-eoU,2747
115
- prefect/client/schemas/actions.py,sha256=8kGnzpKGa7M9z5D10zoj9KfN42dZTqmmfVu0ELZjnRo,33007
115
+ prefect/client/schemas/actions.py,sha256=I8LGTyDBrV4eXI_VSq8nQz5jUuOUAn1A0Q5JsPgCa2A,33032
116
116
  prefect/client/schemas/filters.py,sha256=zaiDkalrIpKjd38V4aP1GHlqD24KTPCZiKtPyX69ZWE,36607
117
117
  prefect/client/schemas/objects.py,sha256=7175pnyPFmGJeqvGrDDuSk7Jitl9h4uGLdDejkzdTy8,57688
118
118
  prefect/client/schemas/responses.py,sha256=iTXTiUhdRL7PxNyJXMZ4ngT7C8SepT_z7g_pnUnVlzo,15629
@@ -173,13 +173,13 @@ prefect/input/actions.py,sha256=BDx26b6ZYCTr0kbWBp73Or7UXnLIv1lnm0jow6Simxw,3871
173
173
  prefect/input/run_input.py,sha256=GoM4LR3oqAFLf2sPCR1yITY9tNSZT8kAd4gaC-v-a-c,22703
174
174
  prefect/locking/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
175
175
  prefect/locking/filesystem.py,sha256=O67Miiz466fQUu3UmHer9dkWpVL1f8GEX8Lv2lDj0Y8,8113
176
- prefect/locking/memory.py,sha256=mFUgV750ywEL7aVQuxFjg9gxbjVU4esBQn7bGQYzeMY,7548
176
+ prefect/locking/memory.py,sha256=Q8NqSeksdQb-AZfql_SXeTd4SRFZ3rWMBwA5shTVEZM,7860
177
177
  prefect/locking/protocol.py,sha256=RsfvlaHTTEJ0YvYWSqFGoZuT2w4FPPxyQlHqjoyNGuE,4240
178
- prefect/logging/__init__.py,sha256=zx9f5_dWrR4DbcTOFBpNGOPoCZ1QcPFudr7zxb2XRpA,148
179
- prefect/logging/configuration.py,sha256=XyHlC31XT00v3hsDg1KKKmKweb8E5fszVKE7BRQ-ebM,3292
178
+ prefect/logging/__init__.py,sha256=DpRZzZeWeiDHFlMDEQdknRzbxpL0ObFh5IqqS9iaZwQ,170
179
+ prefect/logging/configuration.py,sha256=ZBAOgwE34VSZFSiP4gBEd0S9m645_mEs3dFhiwPj58o,3303
180
180
  prefect/logging/filters.py,sha256=NnRYubh9dMmWcCAjuW32cIVQ37rLxdn8ci26wTtQMyU,1136
181
- prefect/logging/formatters.py,sha256=BkPykVyOFKdnhDj_1vhhOoWiHiiBeRnWXPcaRIWK3aI,4125
182
- prefect/logging/handlers.py,sha256=pIeS6gvuVnuh3lZ-kIC4ijRMSbVPkHo-rYeLMj5P8NA,12240
181
+ prefect/logging/formatters.py,sha256=A-SDQbykcvBnPAAJEwbH8ZU6MwFKy035_AOABPPaaQk,4176
182
+ prefect/logging/handlers.py,sha256=_8AiVLSQksVQnINssF8EfwCl2ov-vyMTyPP2_i3Qmbc,12642
183
183
  prefect/logging/highlighters.py,sha256=BCf_LNhFInIfGPqwuu8YVrGa4wVxNc4YXo2pYgftpg4,1811
184
184
  prefect/logging/loggers.py,sha256=rwFJv0i3dhdKr25XX-xUkQy4Vv4dy18bTy366jrC0OQ,12741
185
185
  prefect/logging/logging.yml,sha256=tT7gTyC4NmngFSqFkCdHaw7R0GPNPDDsTCGZQByiJAQ,3169
@@ -285,7 +285,7 @@ prefect/utilities/_engine.py,sha256=9GW4X1lyAbmPwCuXXIubVJ7Z0DMT3dykkEUtp9tm5hI,
285
285
  prefect/utilities/_git.py,sha256=bPYWQdr9xvH0BqxR1ll1RkaSb3x0vhwylhYD5EilkKU,863
286
286
  prefect/utilities/annotations.py,sha256=0Elqgq6LR7pQqezNqT5wb6U_0e2pDO_zx6VseVL6kL8,4396
287
287
  prefect/utilities/asyncutils.py,sha256=xcfeNym2j3WH4gKXznON2hI1PpUTcwr_BGc16IQS3C4,19789
288
- prefect/utilities/callables.py,sha256=pQ60sVaQ8jMhVHj3t9mQ_NunTLFb2jK9-34gsb5JUMA,25729
288
+ prefect/utilities/callables.py,sha256=YiNdLzsOQfUsTa6bIU3TIMAuZWRiZVevYUQX2cSv5gY,25833
289
289
  prefect/utilities/collections.py,sha256=yMZyRD9j6m3Fd3wm4-HR2r3o7B02AC_MDQZUWsX3s18,23513
290
290
  prefect/utilities/compat.py,sha256=nnPA3lf2f4Y-l645tYFFNmj5NDPaYvjqa9pbGKZ3WKE,582
291
291
  prefect/utilities/context.py,sha256=23SDMgdt07SjmB1qShiykHfGgiv55NBzdbMXM3fE9CI,1447
@@ -312,13 +312,13 @@ prefect/utilities/schema_tools/__init__.py,sha256=At3rMHd2g_Em2P3_dFQlFgqR_EpBwr
312
312
  prefect/utilities/schema_tools/hydration.py,sha256=NkRhWkNfxxFmVGhNDfmxdK_xeKaEhs3a42q83Sg9cT4,9436
313
313
  prefect/utilities/schema_tools/validation.py,sha256=Wix26IVR-ZJ32-6MX2pHhrwm3reB-Q4iB6_phn85OKE,10743
314
314
  prefect/workers/__init__.py,sha256=EaM1F0RZ-XIJaGeTKLsXDnfOPHzVWk5bk0_c4BVS44M,64
315
- prefect/workers/base.py,sha256=XTMuLTKf-cAA1omH374Peyk0be1G9mquYIjpx-j0W6g,53169
315
+ prefect/workers/base.py,sha256=rj9n4RrTV3LpbQ7JTDy4cGiuRxGed4AefVLengEdBgU,53228
316
316
  prefect/workers/block.py,sha256=dPvG1jDGD5HSH7aM2utwtk6RaJ9qg13XjkA0lAIgQmY,287
317
317
  prefect/workers/cloud.py,sha256=dPvG1jDGD5HSH7aM2utwtk6RaJ9qg13XjkA0lAIgQmY,287
318
318
  prefect/workers/process.py,sha256=uxOwcqA2Ps-V-W6WeSdKCQMINrCxBEVx1K1Un8pb7vs,8973
319
319
  prefect/workers/server.py,sha256=SEuyScZ5nGm2OotdtbHjpvqJlTRVWCh29ND7FeL_fZA,1974
320
320
  prefect/workers/utilities.py,sha256=VfPfAlGtTuDj0-Kb8WlMgAuOfgXCdrGAnKMapPSBrwc,2483
321
- prefect_client-3.2.14.dist-info/METADATA,sha256=qDbKPaNFkOv43dwCV7Uh2egkTsJM54dBUfaFjFDBbds,7193
322
- prefect_client-3.2.14.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
323
- prefect_client-3.2.14.dist-info/licenses/LICENSE,sha256=MCxsn8osAkzfxKC4CC_dLcUkU8DZLkyihZ8mGs3Ah3Q,11357
324
- prefect_client-3.2.14.dist-info/RECORD,,
321
+ prefect_client-3.2.15.dist-info/METADATA,sha256=T6Q95u70QVvOSQ9fgUARBajY3rg8QAlxQYslOeyn41I,7228
322
+ prefect_client-3.2.15.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
323
+ prefect_client-3.2.15.dist-info/licenses/LICENSE,sha256=MCxsn8osAkzfxKC4CC_dLcUkU8DZLkyihZ8mGs3Ah3Q,11357
324
+ prefect_client-3.2.15.dist-info/RECORD,,