pydocket 0.5.1__py3-none-any.whl → 0.6.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pydocket might be problematic. Click here for more details.

docket/__init__.py CHANGED
@@ -13,11 +13,13 @@ from .dependencies import (
13
13
  CurrentDocket,
14
14
  CurrentExecution,
15
15
  CurrentWorker,
16
+ Depends,
16
17
  ExponentialRetry,
17
18
  Perpetual,
18
19
  Retry,
19
20
  TaskKey,
20
21
  TaskLogger,
22
+ Timeout,
21
23
  )
22
24
  from .docket import Docket
23
25
  from .execution import Execution
@@ -36,5 +38,7 @@ __all__ = [
36
38
  "ExponentialRetry",
37
39
  "Logged",
38
40
  "Perpetual",
41
+ "Timeout",
42
+ "Depends",
39
43
  "__version__",
40
44
  ]
docket/annotations.py CHANGED
@@ -28,3 +28,19 @@ class Annotation(abc.ABC):
28
28
 
29
29
  class Logged(Annotation):
30
30
  """Instructs docket to include arguments to this parameter in the log."""
31
+
32
+ length_only: bool = False
33
+
34
+ def __init__(self, length_only: bool = False) -> None:
35
+ self.length_only = length_only
36
+
37
+ def format(self, argument: Any) -> str:
38
+ if self.length_only:
39
+ if isinstance(argument, (dict, set)):
40
+ return f"{{len {len(argument)}}}"
41
+ elif isinstance(argument, tuple):
42
+ return f"(len {len(argument)})"
43
+ elif hasattr(argument, "__len__"):
44
+ return f"[len {len(argument)}]"
45
+
46
+ return repr(argument)
docket/dependencies.py CHANGED
@@ -1,35 +1,60 @@
1
1
  import abc
2
- import inspect
3
2
  import logging
3
+ import time
4
+ from contextlib import AsyncExitStack, asynccontextmanager
5
+ from contextvars import ContextVar
4
6
  from datetime import timedelta
5
- from typing import Any, Awaitable, Callable, Counter, TypeVar, cast
7
+ from types import TracebackType
8
+ from typing import (
9
+ TYPE_CHECKING,
10
+ Any,
11
+ AsyncContextManager,
12
+ AsyncGenerator,
13
+ Awaitable,
14
+ Callable,
15
+ Counter,
16
+ Generic,
17
+ TypeVar,
18
+ cast,
19
+ )
6
20
 
7
21
  from .docket import Docket
8
- from .execution import Execution
9
- from .worker import Worker
22
+ from .execution import Execution, TaskFunction, get_signature
23
+
24
+ if TYPE_CHECKING: # pragma: no cover
25
+ from .worker import Worker
10
26
 
11
27
 
12
28
  class Dependency(abc.ABC):
13
29
  single: bool = False
14
30
 
31
+ docket: ContextVar[Docket] = ContextVar("docket")
32
+ worker: ContextVar["Worker"] = ContextVar("worker")
33
+ execution: ContextVar[Execution] = ContextVar("execution")
34
+
15
35
  @abc.abstractmethod
16
- def __call__(
17
- self, docket: Docket, worker: Worker, execution: Execution
18
- ) -> Any: ... # pragma: no cover
36
+ async def __aenter__(self) -> Any: ... # pragma: no cover
37
+
38
+ async def __aexit__(
39
+ self,
40
+ exc_type: type[BaseException] | None,
41
+ exc_value: BaseException | None,
42
+ traceback: TracebackType | None,
43
+ ) -> bool: ... # pragma: no cover
19
44
 
20
45
 
21
46
  class _CurrentWorker(Dependency):
22
- def __call__(self, docket: Docket, worker: Worker, execution: Execution) -> Worker:
23
- return worker
47
+ async def __aenter__(self) -> "Worker":
48
+ return self.worker.get()
24
49
 
25
50
 
26
- def CurrentWorker() -> Worker:
27
- return cast(Worker, _CurrentWorker())
51
+ def CurrentWorker() -> "Worker":
52
+ return cast("Worker", _CurrentWorker())
28
53
 
29
54
 
30
55
  class _CurrentDocket(Dependency):
31
- def __call__(self, docket: Docket, worker: Worker, execution: Execution) -> Docket:
32
- return docket
56
+ async def __aenter__(self) -> Docket:
57
+ return self.docket.get()
33
58
 
34
59
 
35
60
  def CurrentDocket() -> Docket:
@@ -37,10 +62,8 @@ def CurrentDocket() -> Docket:
37
62
 
38
63
 
39
64
  class _CurrentExecution(Dependency):
40
- def __call__(
41
- self, docket: Docket, worker: Worker, execution: Execution
42
- ) -> Execution:
43
- return execution
65
+ async def __aenter__(self) -> Execution:
66
+ return self.execution.get()
44
67
 
45
68
 
46
69
  def CurrentExecution() -> Execution:
@@ -48,8 +71,8 @@ def CurrentExecution() -> Execution:
48
71
 
49
72
 
50
73
  class _TaskKey(Dependency):
51
- def __call__(self, docket: Docket, worker: Worker, execution: Execution) -> str:
52
- return execution.key
74
+ async def __aenter__(self) -> str:
75
+ return self.execution.get().key
53
76
 
54
77
 
55
78
  def TaskKey() -> str:
@@ -57,15 +80,14 @@ def TaskKey() -> str:
57
80
 
58
81
 
59
82
  class _TaskLogger(Dependency):
60
- def __call__(
61
- self, docket: Docket, worker: Worker, execution: Execution
62
- ) -> logging.LoggerAdapter[logging.Logger]:
83
+ async def __aenter__(self) -> logging.LoggerAdapter[logging.Logger]:
84
+ execution = self.execution.get()
63
85
  logger = logging.getLogger(f"docket.task.{execution.function.__name__}")
64
86
  return logging.LoggerAdapter(
65
87
  logger,
66
88
  {
67
- **docket.labels(),
68
- **worker.labels(),
89
+ **self.docket.get().labels(),
90
+ **self.worker.get().labels(),
69
91
  **execution.specific_labels(),
70
92
  },
71
93
  )
@@ -85,7 +107,8 @@ class Retry(Dependency):
85
107
  self.delay = delay
86
108
  self.attempt = 1
87
109
 
88
- def __call__(self, docket: Docket, worker: Worker, execution: Execution) -> "Retry":
110
+ async def __aenter__(self) -> "Retry":
111
+ execution = self.execution.get()
89
112
  retry = Retry(attempts=self.attempts, delay=self.delay)
90
113
  retry.attempt = execution.attempt
91
114
  return retry
@@ -104,9 +127,9 @@ class ExponentialRetry(Retry):
104
127
  self.minimum_delay = minimum_delay
105
128
  self.maximum_delay = maximum_delay
106
129
 
107
- def __call__(
108
- self, docket: Docket, worker: Worker, execution: Execution
109
- ) -> "ExponentialRetry":
130
+ async def __aenter__(self) -> "ExponentialRetry":
131
+ execution = self.execution.get()
132
+
110
133
  retry = ExponentialRetry(
111
134
  attempts=self.attempts,
112
135
  minimum_delay=self.minimum_delay,
@@ -155,9 +178,8 @@ class Perpetual(Dependency):
155
178
  self.automatic = automatic
156
179
  self.cancelled = False
157
180
 
158
- def __call__(
159
- self, docket: Docket, worker: Worker, execution: Execution
160
- ) -> "Perpetual":
181
+ async def __aenter__(self) -> "Perpetual":
182
+ execution = self.execution.get()
161
183
  perpetual = Perpetual(every=self.every)
162
184
  perpetual.args = execution.args
163
185
  perpetual.kwargs = execution.kwargs
@@ -171,27 +193,121 @@ class Perpetual(Dependency):
171
193
  self.kwargs = kwargs
172
194
 
173
195
 
196
+ class Timeout(Dependency):
197
+ single = True
198
+
199
+ base: timedelta
200
+
201
+ _deadline: float
202
+
203
+ def __init__(self, base: timedelta) -> None:
204
+ self.base = base
205
+
206
+ async def __aenter__(self) -> "Timeout":
207
+ timeout = Timeout(base=self.base)
208
+ timeout.start()
209
+ return timeout
210
+
211
+ def start(self) -> None:
212
+ self._deadline = time.monotonic() + self.base.total_seconds()
213
+
214
+ def expired(self) -> bool:
215
+ return time.monotonic() >= self._deadline
216
+
217
+ def remaining(self) -> timedelta:
218
+ return timedelta(seconds=self._deadline - time.monotonic())
219
+
220
+ def extend(self, by: timedelta | None = None) -> None:
221
+ if by is None:
222
+ by = self.base
223
+ self._deadline += by.total_seconds()
224
+
225
+
226
+ R = TypeVar("R")
227
+
228
+ DependencyFunction = Callable[..., Awaitable[R] | AsyncContextManager[R]]
229
+
230
+
231
+ _parameter_cache: dict[
232
+ TaskFunction | DependencyFunction[Any],
233
+ dict[str, Dependency],
234
+ ] = {}
235
+
236
+
174
237
  def get_dependency_parameters(
175
- function: Callable[..., Awaitable[Any]],
238
+ function: TaskFunction | DependencyFunction[Any],
176
239
  ) -> dict[str, Dependency]:
177
- dependencies: dict[str, Any] = {}
240
+ if function in _parameter_cache:
241
+ return _parameter_cache[function]
242
+
243
+ dependencies: dict[str, Dependency] = {}
178
244
 
179
- signature = inspect.signature(function)
245
+ signature = get_signature(function)
180
246
 
181
- for param_name, param in signature.parameters.items():
247
+ for parameter, param in signature.parameters.items():
182
248
  if not isinstance(param.default, Dependency):
183
249
  continue
184
250
 
185
- dependencies[param_name] = param.default
251
+ dependencies[parameter] = param.default
186
252
 
253
+ _parameter_cache[function] = dependencies
187
254
  return dependencies
188
255
 
189
256
 
257
+ class _Depends(Dependency, Generic[R]):
258
+ dependency: DependencyFunction[R]
259
+
260
+ cache: ContextVar[dict[DependencyFunction[Any], Any]] = ContextVar("cache")
261
+ stack: ContextVar[AsyncExitStack] = ContextVar("stack")
262
+
263
+ def __init__(
264
+ self, dependency: Callable[[], Awaitable[R] | AsyncContextManager[R]]
265
+ ) -> None:
266
+ self.dependency = dependency
267
+
268
+ async def _resolve_parameters(
269
+ self,
270
+ function: TaskFunction | DependencyFunction[Any],
271
+ ) -> dict[str, Any]:
272
+ stack = self.stack.get()
273
+
274
+ arguments: dict[str, Any] = {}
275
+ parameters = get_dependency_parameters(function)
276
+
277
+ for parameter, dependency in parameters.items():
278
+ arguments[parameter] = await stack.enter_async_context(dependency)
279
+
280
+ return arguments
281
+
282
+ async def __aenter__(self) -> R:
283
+ cache = self.cache.get()
284
+
285
+ if self.dependency in cache:
286
+ return cache[self.dependency]
287
+
288
+ stack = self.stack.get()
289
+ arguments = await self._resolve_parameters(self.dependency)
290
+
291
+ value = self.dependency(**arguments)
292
+
293
+ if isinstance(value, AsyncContextManager):
294
+ value = await stack.enter_async_context(value)
295
+ else:
296
+ value = await value
297
+
298
+ cache[self.dependency] = value
299
+ return value
300
+
301
+
302
+ def Depends(dependency: DependencyFunction[R]) -> R:
303
+ return cast(R, _Depends(dependency))
304
+
305
+
190
306
  D = TypeVar("D", bound=Dependency)
191
307
 
192
308
 
193
309
  def get_single_dependency_parameter_of_type(
194
- function: Callable[..., Awaitable[Any]], dependency_type: type[D]
310
+ function: TaskFunction, dependency_type: type[D]
195
311
  ) -> D | None:
196
312
  assert dependency_type.single, "Dependency must be single"
197
313
  for _, dependency in get_dependency_parameters(function).items():
@@ -210,7 +326,7 @@ def get_single_dependency_of_type(
210
326
  return None
211
327
 
212
328
 
213
- def validate_dependencies(function: Callable[..., Awaitable[Any]]) -> None:
329
+ def validate_dependencies(function: TaskFunction) -> None:
214
330
  parameters = get_dependency_parameters(function)
215
331
 
216
332
  counts = Counter(type(dependency) for dependency in parameters.values())
@@ -220,3 +336,31 @@ def validate_dependencies(function: Callable[..., Awaitable[Any]]) -> None:
220
336
  raise ValueError(
221
337
  f"Only one {dependency_type.__name__} dependency is allowed per task"
222
338
  )
339
+
340
+
341
+ @asynccontextmanager
342
+ async def resolved_dependencies(
343
+ worker: "Worker", execution: Execution
344
+ ) -> AsyncGenerator[dict[str, Any], None]:
345
+ # Set context variables once at the beginning
346
+ Dependency.docket.set(worker.docket)
347
+ Dependency.worker.set(worker)
348
+ Dependency.execution.set(execution)
349
+
350
+ _Depends.cache.set({})
351
+
352
+ async with AsyncExitStack() as stack:
353
+ _Depends.stack.set(stack)
354
+
355
+ arguments: dict[str, Any] = {}
356
+
357
+ parameters = get_dependency_parameters(execution.function)
358
+ for parameter, dependency in parameters.items():
359
+ kwargs = execution.kwargs
360
+ if parameter in kwargs:
361
+ arguments[parameter] = kwargs[parameter]
362
+ continue
363
+
364
+ arguments[parameter] = await stack.enter_async_context(dependency)
365
+
366
+ yield arguments
docket/docket.py CHANGED
@@ -28,6 +28,7 @@ from uuid import uuid4
28
28
  import redis.exceptions
29
29
  from opentelemetry import propagate, trace
30
30
  from redis.asyncio import ConnectionPool, Redis
31
+ from redis.asyncio.client import Pipeline
31
32
 
32
33
  from .execution import (
33
34
  Execution,
@@ -37,6 +38,7 @@ from .execution import (
37
38
  Strike,
38
39
  StrikeInstruction,
39
40
  StrikeList,
41
+ TaskFunction,
40
42
  )
41
43
  from .instrumentation import (
42
44
  REDIS_DISRUPTIONS,
@@ -56,7 +58,7 @@ tracer: trace.Tracer = trace.get_tracer(__name__)
56
58
  P = ParamSpec("P")
57
59
  R = TypeVar("R")
58
60
 
59
- TaskCollection = Iterable[Callable[..., Awaitable[Any]]]
61
+ TaskCollection = Iterable[TaskFunction]
60
62
 
61
63
  RedisStreamID = bytes
62
64
  RedisMessageID = bytes
@@ -90,7 +92,7 @@ class RunningExecution(Execution):
90
92
  worker: str,
91
93
  started: datetime,
92
94
  ) -> None:
93
- self.function: Callable[..., Awaitable[Any]] = execution.function
95
+ self.function: TaskFunction = execution.function
94
96
  self.args: tuple[Any, ...] = execution.args
95
97
  self.kwargs: dict[str, Any] = execution.kwargs
96
98
  self.when: datetime = execution.when
@@ -110,7 +112,7 @@ class DocketSnapshot:
110
112
 
111
113
 
112
114
  class Docket:
113
- tasks: dict[str, Callable[..., Awaitable[Any]]]
115
+ tasks: dict[str, TaskFunction]
114
116
  strike_list: StrikeList
115
117
 
116
118
  _monitor_strikes_task: asyncio.Task[None]
@@ -196,7 +198,7 @@ class Docket:
196
198
  finally:
197
199
  await asyncio.shield(r.__aexit__(None, None, None))
198
200
 
199
- def register(self, function: Callable[..., Awaitable[Any]]) -> None:
201
+ def register(self, function: TaskFunction) -> None:
200
202
  from .dependencies import validate_dependencies
201
203
 
202
204
  validate_dependencies(function)
@@ -256,9 +258,14 @@ class Docket:
256
258
 
257
259
  async def scheduler(*args: P.args, **kwargs: P.kwargs) -> Execution:
258
260
  execution = Execution(function, args, kwargs, when, key, attempt=1)
259
- await self.schedule(execution)
261
+
262
+ async with self.redis() as redis:
263
+ async with redis.pipeline() as pipeline:
264
+ await self._schedule(redis, pipeline, execution, replace=False)
265
+ await pipeline.execute()
260
266
 
261
267
  TASKS_ADDED.add(1, {**self.labels(), **execution.general_labels()})
268
+ TASKS_SCHEDULED.add(1, {**self.labels(), **execution.general_labels()})
262
269
 
263
270
  return execution
264
271
 
@@ -291,15 +298,48 @@ class Docket:
291
298
 
292
299
  async def scheduler(*args: P.args, **kwargs: P.kwargs) -> Execution:
293
300
  execution = Execution(function, args, kwargs, when, key, attempt=1)
294
- await self.cancel(key)
295
- await self.schedule(execution)
301
+
302
+ async with self.redis() as redis:
303
+ async with redis.pipeline() as pipeline:
304
+ await self._schedule(redis, pipeline, execution, replace=True)
305
+ await pipeline.execute()
296
306
 
297
307
  TASKS_REPLACED.add(1, {**self.labels(), **execution.general_labels()})
308
+ TASKS_CANCELLED.add(1, {**self.labels(), **execution.general_labels()})
309
+ TASKS_SCHEDULED.add(1, {**self.labels(), **execution.general_labels()})
298
310
 
299
311
  return execution
300
312
 
301
313
  return scheduler
302
314
 
315
+ async def schedule(self, execution: Execution) -> None:
316
+ with tracer.start_as_current_span(
317
+ "docket.schedule",
318
+ attributes={
319
+ **self.labels(),
320
+ **execution.specific_labels(),
321
+ "code.function.name": execution.function.__name__,
322
+ },
323
+ ):
324
+ async with self.redis() as redis:
325
+ async with redis.pipeline() as pipeline:
326
+ await self._schedule(redis, pipeline, execution, replace=False)
327
+ await pipeline.execute()
328
+
329
+ TASKS_SCHEDULED.add(1, {**self.labels(), **execution.general_labels()})
330
+
331
+ async def cancel(self, key: str) -> None:
332
+ with tracer.start_as_current_span(
333
+ "docket.cancel",
334
+ attributes={**self.labels(), "docket.key": key},
335
+ ):
336
+ async with self.redis() as redis:
337
+ async with redis.pipeline() as pipeline:
338
+ await self._cancel(pipeline, key)
339
+ await pipeline.execute()
340
+
341
+ TASKS_CANCELLED.add(1, self.labels())
342
+
303
343
  @property
304
344
  def queue_key(self) -> str:
305
345
  return f"{self.name}:queue"
@@ -314,7 +354,13 @@ class Docket:
314
354
  def parked_task_key(self, key: str) -> str:
315
355
  return f"{self.name}:{key}"
316
356
 
317
- async def schedule(self, execution: Execution) -> None:
357
+ async def _schedule(
358
+ self,
359
+ redis: Redis,
360
+ pipeline: Pipeline,
361
+ execution: Execution,
362
+ replace: bool = False,
363
+ ) -> None:
318
364
  if self.strike_list.is_stricken(execution):
319
365
  logger.warning(
320
366
  "%r is stricken, skipping schedule of %r",
@@ -334,53 +380,35 @@ class Docket:
334
380
  message: dict[bytes, bytes] = execution.as_message()
335
381
  propagate.inject(message, setter=message_setter)
336
382
 
337
- with tracer.start_as_current_span(
338
- "docket.schedule",
339
- attributes={
340
- **self.labels(),
341
- **execution.specific_labels(),
342
- "code.function.name": execution.function.__name__,
343
- },
344
- ):
345
- key = execution.key
346
- when = execution.when
383
+ key = execution.key
384
+ when = execution.when
385
+ known_task_key = self.known_task_key(key)
347
386
 
348
- async with self.redis() as redis:
387
+ async with redis.lock(f"{known_task_key}:lock", timeout=10):
388
+ if replace:
389
+ await self._cancel(pipeline, key)
390
+ else:
349
391
  # if the task is already in the queue or stream, retain it
350
- if await redis.exists(self.known_task_key(key)):
392
+ if await redis.exists(known_task_key):
351
393
  logger.debug(
352
- "Task %r is already in the queue or stream, skipping schedule",
394
+ "Task %r is already in the queue or stream, not scheduling",
353
395
  key,
354
396
  extra=self.labels(),
355
397
  )
356
398
  return
357
399
 
358
- async with redis.pipeline() as pipe:
359
- pipe.set(self.known_task_key(key), when.timestamp())
360
-
361
- if when <= datetime.now(timezone.utc):
362
- pipe.xadd(self.stream_key, message) # type: ignore[arg-type]
363
- else:
364
- pipe.hset(self.parked_task_key(key), mapping=message) # type: ignore[arg-type]
365
- pipe.zadd(self.queue_key, {key: when.timestamp()})
400
+ pipeline.set(known_task_key, when.timestamp())
366
401
 
367
- await pipe.execute()
368
-
369
- TASKS_SCHEDULED.add(1, {**self.labels(), **execution.general_labels()})
370
-
371
- async def cancel(self, key: str) -> None:
372
- with tracer.start_as_current_span(
373
- "docket.cancel",
374
- attributes={**self.labels(), "docket.key": key},
375
- ):
376
- async with self.redis() as redis:
377
- async with redis.pipeline() as pipe:
378
- pipe.delete(self.known_task_key(key))
379
- pipe.delete(self.parked_task_key(key))
380
- pipe.zrem(self.queue_key, key)
381
- await pipe.execute()
402
+ if when <= datetime.now(timezone.utc):
403
+ pipeline.xadd(self.stream_key, message) # type: ignore[arg-type]
404
+ else:
405
+ pipeline.hset(self.parked_task_key(key), mapping=message) # type: ignore[arg-type]
406
+ pipeline.zadd(self.queue_key, {key: when.timestamp()})
382
407
 
383
- TASKS_CANCELLED.add(1, self.labels())
408
+ async def _cancel(self, pipeline: Pipeline, key: str) -> None:
409
+ pipeline.delete(self.known_task_key(key))
410
+ pipeline.delete(self.parked_task_key(key))
411
+ pipeline.zrem(self.queue_key, key)
384
412
 
385
413
  @property
386
414
  def strike_key(self) -> str:
docket/execution.py CHANGED
@@ -7,23 +7,40 @@ from typing import Any, Awaitable, Callable, Hashable, Literal, Mapping, Self, c
7
7
 
8
8
  import cloudpickle # type: ignore[import]
9
9
 
10
+ from opentelemetry import propagate
11
+ import opentelemetry.context
10
12
 
11
13
  from .annotations import Logged
14
+ from docket.instrumentation import message_getter
12
15
 
13
16
  logger: logging.Logger = logging.getLogger(__name__)
14
17
 
18
+ TaskFunction = Callable[..., Awaitable[Any]]
15
19
  Message = dict[bytes, bytes]
16
20
 
17
21
 
22
+ _signature_cache: dict[Callable[..., Any], inspect.Signature] = {}
23
+
24
+
25
+ def get_signature(function: Callable[..., Any]) -> inspect.Signature:
26
+ if function in _signature_cache:
27
+ return _signature_cache[function]
28
+
29
+ signature = inspect.signature(function)
30
+ _signature_cache[function] = signature
31
+ return signature
32
+
33
+
18
34
  class Execution:
19
35
  def __init__(
20
36
  self,
21
- function: Callable[..., Awaitable[Any]],
37
+ function: TaskFunction,
22
38
  args: tuple[Any, ...],
23
39
  kwargs: dict[str, Any],
24
40
  when: datetime,
25
41
  key: str,
26
42
  attempt: int,
43
+ trace_context: opentelemetry.context.Context | None = None,
27
44
  ) -> None:
28
45
  self.function = function
29
46
  self.args = args
@@ -31,6 +48,7 @@ class Execution:
31
48
  self.when = when
32
49
  self.key = key
33
50
  self.attempt = attempt
51
+ self.trace_context = trace_context
34
52
 
35
53
  def as_message(self) -> Message:
36
54
  return {
@@ -43,9 +61,7 @@ class Execution:
43
61
  }
44
62
 
45
63
  @classmethod
46
- def from_message(
47
- cls, function: Callable[..., Awaitable[Any]], message: Message
48
- ) -> Self:
64
+ def from_message(cls, function: TaskFunction, message: Message) -> Self:
49
65
  return cls(
50
66
  function=function,
51
67
  args=cloudpickle.loads(message[b"args"]),
@@ -53,6 +69,7 @@ class Execution:
53
69
  when=datetime.fromisoformat(message[b"when"].decode()),
54
70
  key=message[b"key"].decode(),
55
71
  attempt=int(message[b"attempt"].decode()),
72
+ trace_context=propagate.extract(message, getter=message_getter),
56
73
  )
57
74
 
58
75
  def general_labels(self) -> Mapping[str, str]:
@@ -68,7 +85,7 @@ class Execution:
68
85
 
69
86
  def call_repr(self) -> str:
70
87
  arguments: list[str] = []
71
- signature = inspect.signature(self.function)
88
+ signature = get_signature(self.function)
72
89
  function_name = self.function.__name__
73
90
 
74
91
  logged_parameters = Logged.annotated_parameters(signature)
@@ -77,14 +94,14 @@ class Execution:
77
94
 
78
95
  for i, argument in enumerate(self.args[: len(parameter_names)]):
79
96
  parameter_name = parameter_names[i]
80
- if parameter_name in logged_parameters:
81
- arguments.append(repr(argument))
97
+ if logged := logged_parameters.get(parameter_name):
98
+ arguments.append(logged.format(argument))
82
99
  else:
83
100
  arguments.append("...")
84
101
 
85
102
  for parameter_name, argument in self.kwargs.items():
86
- if parameter_name in logged_parameters:
87
- arguments.append(f"{parameter_name}={repr(argument)}")
103
+ if logged := logged_parameters.get(parameter_name):
104
+ arguments.append(f"{parameter_name}={logged.format(argument)}")
88
105
  else:
89
106
  arguments.append(f"{parameter_name}=...")
90
107
 
@@ -217,10 +234,10 @@ class StrikeList:
217
234
  if function_name in self.task_strikes and not task_strikes:
218
235
  return True
219
236
 
220
- sig = inspect.signature(execution.function)
237
+ signature = get_signature(execution.function)
221
238
 
222
239
  try:
223
- bound_args = sig.bind(*execution.args, **execution.kwargs)
240
+ bound_args = signature.bind(*execution.args, **execution.kwargs)
224
241
  bound_args.apply_defaults()
225
242
  except TypeError:
226
243
  # If we can't make sense of the arguments, just assume the task is fine
@@ -265,6 +282,8 @@ class StrikeList:
265
282
  case "between": # pragma: no branch
266
283
  lower, upper = strike_value
267
284
  return lower <= value <= upper
285
+ case _: # pragma: no cover
286
+ raise ValueError(f"Unknown operator: {operator}")
268
287
  except (ValueError, TypeError):
269
288
  # If we can't make the comparison due to incompatible types, just log the
270
289
  # error and assume the task is not stricken