pydocket 0.6.0__py3-none-any.whl → 0.6.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pydocket might be problematic. Click here for more details.

docket/annotations.py CHANGED
@@ -4,8 +4,14 @@ from typing import Any, Iterable, Mapping, Self
4
4
 
5
5
 
6
6
  class Annotation(abc.ABC):
7
+ _cache: dict[tuple[type[Self], inspect.Signature], Mapping[str, Self]] = {}
8
+
7
9
  @classmethod
8
10
  def annotated_parameters(cls, signature: inspect.Signature) -> Mapping[str, Self]:
11
+ key = (cls, signature)
12
+ if key in cls._cache:
13
+ return cls._cache[key]
14
+
9
15
  annotated: dict[str, Self] = {}
10
16
 
11
17
  for param_name, param in signature.parameters.items():
@@ -23,6 +29,7 @@ class Annotation(abc.ABC):
23
29
  elif isinstance(arg_type, type) and issubclass(arg_type, cls):
24
30
  annotated[param_name] = arg_type()
25
31
 
32
+ cls._cache[key] = annotated
26
33
  return annotated
27
34
 
28
35
 
docket/execution.py CHANGED
@@ -7,7 +7,7 @@ from typing import Any, Awaitable, Callable, Hashable, Literal, Mapping, Self, c
7
7
 
8
8
  import cloudpickle # type: ignore[import]
9
9
 
10
- from opentelemetry import propagate
10
+ from opentelemetry import trace, propagate
11
11
  import opentelemetry.context
12
12
 
13
13
  from .annotations import Logged
@@ -85,11 +85,10 @@ class Execution:
85
85
 
86
86
  def call_repr(self) -> str:
87
87
  arguments: list[str] = []
88
- signature = get_signature(self.function)
89
88
  function_name = self.function.__name__
90
89
 
90
+ signature = get_signature(self.function)
91
91
  logged_parameters = Logged.annotated_parameters(signature)
92
-
93
92
  parameter_names = list(signature.parameters.keys())
94
93
 
95
94
  for i, argument in enumerate(self.args[: len(parameter_names)]):
@@ -107,6 +106,11 @@ class Execution:
107
106
 
108
107
  return f"{function_name}({', '.join(arguments)}){{{self.key}}}"
109
108
 
109
+ def incoming_span_links(self) -> list[trace.Link]:
110
+ initiating_span = trace.get_current_span(self.trace_context)
111
+ initiating_context = initiating_span.get_span_context()
112
+ return [trace.Link(initiating_context)] if initiating_context.is_valid else []
113
+
110
114
 
111
115
  class Operator(enum.StrEnum):
112
116
  EQUAL = "=="
docket/worker.py CHANGED
@@ -1,6 +1,7 @@
1
1
  import asyncio
2
2
  import logging
3
3
  import sys
4
+ import time
4
5
  from datetime import datetime, timedelta, timezone
5
6
  from types import TracebackType
6
7
  from typing import (
@@ -228,8 +229,10 @@ class Worker:
228
229
  active_tasks: dict[asyncio.Task[None], RedisMessageID] = {}
229
230
  available_slots = self.concurrency
230
231
 
232
+ log_context = self._log_context()
233
+
231
234
  async def check_for_work() -> bool:
232
- logger.debug("Checking for work", extra=self._log_context())
235
+ logger.debug("Checking for work", extra=log_context)
233
236
  async with redis.pipeline() as pipeline:
234
237
  pipeline.xlen(self.docket.stream_key)
235
238
  pipeline.zcard(self.docket.queue_key)
@@ -239,7 +242,7 @@ class Worker:
239
242
  return stream_len > 0 or queue_len > 0
240
243
 
241
244
  async def get_redeliveries(redis: Redis) -> RedisReadGroupResponse:
242
- logger.debug("Getting redeliveries", extra=self._log_context())
245
+ logger.debug("Getting redeliveries", extra=log_context)
243
246
  _, redeliveries, *_ = await redis.xautoclaim(
244
247
  name=self.docket.stream_key,
245
248
  groupname=self.docket.worker_group_name,
@@ -251,7 +254,7 @@ class Worker:
251
254
  return [(b"__redelivery__", redeliveries)]
252
255
 
253
256
  async def get_new_deliveries(redis: Redis) -> RedisReadGroupResponse:
254
- logger.debug("Getting new deliveries", extra=self._log_context())
257
+ logger.debug("Getting new deliveries", extra=log_context)
255
258
  return await redis.xreadgroup(
256
259
  groupname=self.docket.worker_group_name,
257
260
  consumername=self.name,
@@ -261,21 +264,18 @@ class Worker:
261
264
  )
262
265
 
263
266
  def start_task(message_id: RedisMessageID, message: RedisMessage) -> bool:
264
- if not message: # pragma: no cover
265
- return False
266
-
267
267
  function_name = message[b"function"].decode()
268
268
  if not (function := self.docket.tasks.get(function_name)):
269
269
  logger.warning(
270
270
  "Task function %r not found",
271
271
  function_name,
272
- extra=self._log_context(),
272
+ extra=log_context,
273
273
  )
274
274
  return False
275
275
 
276
276
  execution = Execution.from_message(function, message)
277
277
 
278
- task = asyncio.create_task(self._execute(execution))
278
+ task = asyncio.create_task(self._execute(execution), name=execution.key)
279
279
  active_tasks[task] = message_id
280
280
 
281
281
  nonlocal available_slots
@@ -283,8 +283,15 @@ class Worker:
283
283
 
284
284
  return True
285
285
 
286
+ async def process_completed_tasks() -> None:
287
+ completed_tasks = {task for task in active_tasks if task.done()}
288
+ for task in completed_tasks:
289
+ message_id = active_tasks.pop(task)
290
+ await task
291
+ await ack_message(redis, message_id)
292
+
286
293
  async def ack_message(redis: Redis, message_id: RedisMessageID) -> None:
287
- logger.debug("Acknowledging message", extra=self._log_context())
294
+ logger.debug("Acknowledging message", extra=log_context)
288
295
  async with redis.pipeline() as pipeline:
289
296
  pipeline.xack(
290
297
  self.docket.stream_key,
@@ -297,13 +304,6 @@ class Worker:
297
304
  )
298
305
  await pipeline.execute()
299
306
 
300
- async def process_completed_tasks() -> None:
301
- completed_tasks = {task for task in active_tasks if task.done()}
302
- for task in completed_tasks:
303
- message_id = active_tasks.pop(task)
304
- await task
305
- await ack_message(redis, message_id)
306
-
307
307
  has_work: bool = True
308
308
 
309
309
  try:
@@ -319,6 +319,9 @@ class Worker:
319
319
  for source in [get_redeliveries, get_new_deliveries]:
320
320
  for _, messages in await source(redis):
321
321
  for message_id, message in messages:
322
+ if not message: # pragma: no cover
323
+ continue
324
+
322
325
  if not start_task(message_id, message):
323
326
  await self._delete_known_task(redis, message)
324
327
  await ack_message(redis, message_id)
@@ -334,7 +337,7 @@ class Worker:
334
337
  logger.info(
335
338
  "Shutdown requested, finishing %d active tasks...",
336
339
  len(active_tasks),
337
- extra=self._log_context(),
340
+ extra=log_context,
338
341
  )
339
342
  finally:
340
343
  if active_tasks:
@@ -401,9 +404,11 @@ class Worker:
401
404
 
402
405
  total_work: int = sys.maxsize
403
406
 
407
+ log_context = self._log_context()
408
+
404
409
  while not worker_stopping.is_set() or total_work:
405
410
  try:
406
- logger.debug("Scheduling due tasks", extra=self._log_context())
411
+ logger.debug("Scheduling due tasks", extra=log_context)
407
412
  total_work, due_work = await stream_due_tasks(
408
413
  keys=[self.docket.queue_key, self.docket.stream_key],
409
414
  args=[datetime.now(timezone.utc).timestamp(), self.docket.name],
@@ -416,18 +421,18 @@ class Worker:
416
421
  total_work,
417
422
  self.docket.queue_key,
418
423
  self.docket.stream_key,
419
- extra=self._log_context(),
424
+ extra=log_context,
420
425
  )
421
426
  except Exception: # pragma: no cover
422
427
  logger.exception(
423
428
  "Error in scheduler loop",
424
429
  exc_info=True,
425
- extra=self._log_context(),
430
+ extra=log_context,
426
431
  )
427
432
  finally:
428
433
  await asyncio.sleep(self.scheduling_resolution.total_seconds())
429
434
 
430
- logger.debug("Scheduler loop finished", extra=self._log_context())
435
+ logger.debug("Scheduler loop finished", extra=log_context)
431
436
 
432
437
  async def _schedule_all_automatic_perpetual_tasks(self) -> None:
433
438
  async with self.docket.redis() as redis:
@@ -469,38 +474,30 @@ class Worker:
469
474
  log_context = {**self._log_context(), **execution.specific_labels()}
470
475
  counter_labels = {**self.labels(), **execution.general_labels()}
471
476
 
472
- arrow = "↬" if execution.attempt > 1 else "↪"
473
477
  call = execution.call_repr()
474
478
 
475
479
  if self.docket.strike_list.is_stricken(execution):
476
480
  async with self.docket.redis() as redis:
477
481
  await self._delete_known_task(redis, execution)
478
482
 
479
- arrow = "🗙"
480
- logger.warning("%s %s", arrow, call, extra=log_context)
483
+ logger.warning("🗙 %s", call, extra=log_context)
481
484
  TASKS_STRICKEN.add(1, counter_labels | {"docket.where": "worker"})
482
485
  return
483
486
 
484
487
  if execution.key in self._execution_counts:
485
488
  self._execution_counts[execution.key] += 1
486
489
 
487
- initiating_span = trace.get_current_span(execution.trace_context)
488
- initiating_context = initiating_span.get_span_context()
489
- links = [trace.Link(initiating_context)] if initiating_context.is_valid else []
490
-
491
- start = datetime.now(timezone.utc)
492
- punctuality = start - execution.when
493
- log_context = {
494
- **log_context,
495
- "punctuality": punctuality.total_seconds(),
496
- }
497
- duration = timedelta(0)
490
+ start = time.time()
491
+ punctuality = start - execution.when.timestamp()
492
+ log_context = {**log_context, "punctuality": punctuality}
493
+ duration = 0.0
498
494
 
499
495
  TASKS_STARTED.add(1, counter_labels)
500
496
  TASKS_RUNNING.add(1, counter_labels)
501
- TASK_PUNCTUALITY.record(punctuality.total_seconds(), counter_labels)
497
+ TASK_PUNCTUALITY.record(punctuality, counter_labels)
502
498
 
503
- logger.info("%s [%s] %s", arrow, punctuality, call, extra=log_context)
499
+ arrow = "↬" if execution.attempt > 1 else "↪"
500
+ logger.info("%s [%s] %s", arrow, ms(punctuality), call, extra=log_context)
504
501
 
505
502
  with tracer.start_as_current_span(
506
503
  execution.function.__name__,
@@ -510,7 +507,7 @@ class Worker:
510
507
  **execution.specific_labels(),
511
508
  "code.function.name": execution.function.__name__,
512
509
  },
513
- links=links,
510
+ links=execution.incoming_span_links(),
514
511
  ):
515
512
  async with resolved_dependencies(self, execution) as dependencies:
516
513
  # Preemptively reschedule the perpetual task for the future, or clear
@@ -536,31 +533,35 @@ class Worker:
536
533
  },
537
534
  )
538
535
 
536
+ duration = log_context["duration"] = time.time() - start
539
537
  TASKS_SUCCEEDED.add(1, counter_labels)
540
- duration = datetime.now(timezone.utc) - start
541
- log_context["duration"] = duration.total_seconds()
538
+
542
539
  rescheduled = await self._perpetuate_if_requested(
543
- execution, dependencies, duration
540
+ execution, dependencies, timedelta(seconds=duration)
544
541
  )
542
+
545
543
  arrow = "↫" if rescheduled else "↩"
546
- logger.info("%s [%s] %s", arrow, duration, call, extra=log_context)
544
+ logger.info(
545
+ "%s [%s] %s", arrow, ms(duration), call, extra=log_context
546
+ )
547
547
  except Exception:
548
+ duration = log_context["duration"] = time.time() - start
548
549
  TASKS_FAILED.add(1, counter_labels)
549
- duration = datetime.now(timezone.utc) - start
550
- log_context["duration"] = duration.total_seconds()
550
+
551
551
  retried = await self._retry_if_requested(execution, dependencies)
552
552
  if not retried:
553
553
  retried = await self._perpetuate_if_requested(
554
- execution, dependencies, duration
554
+ execution, dependencies, timedelta(seconds=duration)
555
555
  )
556
+
556
557
  arrow = "↫" if retried else "↩"
557
558
  logger.exception(
558
- "%s [%s] %s", arrow, duration, call, extra=log_context
559
+ "%s [%s] %s", arrow, ms(duration), call, extra=log_context
559
560
  )
560
561
  finally:
561
562
  TASKS_RUNNING.add(-1, counter_labels)
562
563
  TASKS_COMPLETED.add(1, counter_labels)
563
- TASK_DURATION.record(duration.total_seconds(), counter_labels)
564
+ TASK_DURATION.record(duration, counter_labels)
564
565
 
565
566
  async def _run_function_with_timeout(
566
567
  self,
@@ -603,15 +604,15 @@ class Worker:
603
604
  if not retry:
604
605
  return False
605
606
 
606
- if retry.attempts is None or execution.attempt < retry.attempts:
607
- execution.when = datetime.now(timezone.utc) + retry.delay
608
- execution.attempt += 1
609
- await self.docket.schedule(execution)
607
+ if retry.attempts is not None and execution.attempt >= retry.attempts:
608
+ return False
610
609
 
611
- TASKS_RETRIED.add(1, {**self.labels(), **execution.specific_labels()})
612
- return True
610
+ execution.when = datetime.now(timezone.utc) + retry.delay
611
+ execution.attempt += 1
612
+ await self.docket.schedule(execution)
613
613
 
614
- return False
614
+ TASKS_RETRIED.add(1, {**self.labels(), **execution.specific_labels()})
615
+ return True
615
616
 
616
617
  async def _perpetuate_if_requested(
617
618
  self,
@@ -710,3 +711,10 @@ class Worker:
710
711
  exc_info=True,
711
712
  extra=self._log_context(),
712
713
  )
714
+
715
+
716
+ def ms(seconds: float) -> str:
717
+ if seconds < 100:
718
+ return f"{seconds * 1000:6.0f}ms"
719
+ else:
720
+ return f"{seconds:6.0f}s "
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pydocket
3
- Version: 0.6.0
3
+ Version: 0.6.1
4
4
  Summary: A distributed background task system for Python functions
5
5
  Project-URL: Homepage, https://github.com/chrisguidry/docket
6
6
  Project-URL: Bug Tracker, https://github.com/chrisguidry/docket/issues
@@ -1,16 +1,16 @@
1
1
  docket/__init__.py,sha256=124XWbyQQHO1lhCoLQ-oheZnu4vNDHIaq4Whb7z3ogI,831
2
2
  docket/__main__.py,sha256=Vkuh7aJ-Bl7QVpVbbkUksAd_hn05FiLmWbc-8kbhZQ4,34
3
- docket/annotations.py,sha256=I00zB32BYWOQSNEjjCkc5n5DwTnT277I_BRYUJPS7w4,1474
3
+ docket/annotations.py,sha256=6sCgQxsgOjBN6ithFdXulXq4CPNSdyFocwyJ1gK9v2Q,1688
4
4
  docket/cli.py,sha256=OWql6QFthSbvRCGkIg-ufo26F48z0eCmzRXJYOdyAEc,20309
5
5
  docket/dependencies.py,sha256=pkjseBZjdSpgW9g2H4cZ_RXIRZ2ZfdngBCXJGUcbmao,10052
6
6
  docket/docket.py,sha256=KJxgiyOskEHsRQOmfgLpJCYDNNleHI-vEKK3uBPL_K8,21420
7
- docket/execution.py,sha256=da1uYxSNAfz5FuNyCzX4I_PglHiMaf1oEv--K5TkjXc,13297
7
+ docket/execution.py,sha256=f3LLt9bC7ExEZhgde5OBo1faKLYv-8ryfNLXSswo318,13579
8
8
  docket/instrumentation.py,sha256=bZlGA02JoJcY0J1WGm5_qXDfY0AXKr0ZLAYu67wkeKY,4611
9
9
  docket/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
10
10
  docket/tasks.py,sha256=RIlSM2omh-YDwVnCz6M5MtmK8T_m_s1w2OlRRxDUs6A,1437
11
- docket/worker.py,sha256=3sMcwGfSJ0Q4y5AuaqdgiGniDhJ21nM2PQmroJi_Q-A,26430
12
- pydocket-0.6.0.dist-info/METADATA,sha256=ktk1hqLmP_VSqYmdRtHFDPbEeRQD1J66ZAHEqaDXejk,13092
13
- pydocket-0.6.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
14
- pydocket-0.6.0.dist-info/entry_points.txt,sha256=4WOk1nUlBsUT5O3RyMci2ImuC5XFswuopElYcLHtD5k,47
15
- pydocket-0.6.0.dist-info/licenses/LICENSE,sha256=YuVWU_ZXO0K_k2FG8xWKe5RGxV24AhJKTvQmKfqXuyk,1087
16
- pydocket-0.6.0.dist-info/RECORD,,
11
+ docket/worker.py,sha256=NrzmfpjHjQaGS8CoTOiKM5Bn88tPh_q2hz9f4hFegSk,26280
12
+ pydocket-0.6.1.dist-info/METADATA,sha256=mxI1OHWe9W9bAyi8QiH69eMSsSk1Dm2oDvh301BJFgo,13092
13
+ pydocket-0.6.1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
14
+ pydocket-0.6.1.dist-info/entry_points.txt,sha256=4WOk1nUlBsUT5O3RyMci2ImuC5XFswuopElYcLHtD5k,47
15
+ pydocket-0.6.1.dist-info/licenses/LICENSE,sha256=YuVWU_ZXO0K_k2FG8xWKe5RGxV24AhJKTvQmKfqXuyk,1087
16
+ pydocket-0.6.1.dist-info/RECORD,,