pydocket 0.1.4__py3-none-any.whl → 0.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pydocket might be problematic. Click here for more details.

docket/cli.py CHANGED
@@ -243,6 +243,13 @@ def worker(
243
243
  help="Exit after the current docket is finished",
244
244
  ),
245
245
  ] = False,
246
+ metrics_port: Annotated[
247
+ int | None,
248
+ typer.Option(
249
+ "--metrics-port",
250
+ help="The port to serve Prometheus metrics on",
251
+ ),
252
+ ] = None,
246
253
  ) -> None:
247
254
  asyncio.run(
248
255
  Worker.run(
@@ -254,6 +261,7 @@ def worker(
254
261
  reconnection_delay=reconnection_delay,
255
262
  minimum_check_interval=minimum_check_interval,
256
263
  until_finished=until_finished,
264
+ metrics_port=metrics_port,
257
265
  tasks=tasks,
258
266
  )
259
267
  )
docket/dependencies.py CHANGED
@@ -61,15 +61,14 @@ class _TaskLogger(Dependency):
61
61
  self, docket: Docket, worker: Worker, execution: Execution
62
62
  ) -> logging.LoggerAdapter[logging.Logger]:
63
63
  logger = logging.getLogger(f"docket.task.{execution.function.__name__}")
64
-
65
- extra = {
66
- "execution.key": execution.key,
67
- "execution.attempt": execution.attempt,
68
- "worker.name": worker.name,
69
- "docket.name": docket.name,
70
- }
71
-
72
- return logging.LoggerAdapter(logger, extra)
64
+ return logging.LoggerAdapter(
65
+ logger,
66
+ {
67
+ **docket.labels(),
68
+ **worker.labels(),
69
+ **execution.specific_labels(),
70
+ },
71
+ )
73
72
 
74
73
 
75
74
  def TaskLogger() -> logging.LoggerAdapter[logging.Logger]:
docket/docket.py CHANGED
@@ -13,6 +13,7 @@ from typing import (
13
13
  Collection,
14
14
  Hashable,
15
15
  Iterable,
16
+ Mapping,
16
17
  NoReturn,
17
18
  ParamSpec,
18
19
  Self,
@@ -26,7 +27,7 @@ from uuid import uuid4
26
27
 
27
28
  import redis.exceptions
28
29
  from opentelemetry import propagate, trace
29
- from redis.asyncio import Redis
30
+ from redis.asyncio import ConnectionPool, Redis
30
31
 
31
32
  from .execution import (
32
33
  Execution,
@@ -112,11 +113,14 @@ class Docket:
112
113
  tasks: dict[str, Callable[..., Awaitable[Any]]]
113
114
  strike_list: StrikeList
114
115
 
116
+ _monitor_strikes_task: asyncio.Task[None]
117
+ _connection_pool: ConnectionPool
118
+
115
119
  def __init__(
116
120
  self,
117
121
  name: str = "docket",
118
122
  url: str = "redis://localhost:6379/0",
119
- heartbeat_interval: timedelta = timedelta(seconds=1),
123
+ heartbeat_interval: timedelta = timedelta(seconds=2),
120
124
  missed_heartbeats: int = 5,
121
125
  ) -> None:
122
126
  """
@@ -144,6 +148,7 @@ class Docket:
144
148
  self.tasks = {fn.__name__: fn for fn in standard_tasks}
145
149
  self.strike_list = StrikeList()
146
150
 
151
+ self._connection_pool = ConnectionPool.from_url(self.url) # type: ignore
147
152
  self._monitor_strikes_task = asyncio.create_task(self._monitor_strikes())
148
153
 
149
154
  # Ensure that the stream and worker group exist
@@ -176,23 +181,17 @@ class Docket:
176
181
  except asyncio.CancelledError:
177
182
  pass
178
183
 
184
+ await asyncio.shield(self._connection_pool.disconnect())
185
+ del self._connection_pool
186
+
179
187
  @asynccontextmanager
180
188
  async def redis(self) -> AsyncGenerator[Redis, None]:
181
- redis: Redis | None = None
189
+ r = Redis(connection_pool=self._connection_pool)
190
+ await r.__aenter__()
182
191
  try:
183
- redis = await Redis.from_url(
184
- self.url,
185
- single_connection_client=True,
186
- )
187
- await redis.__aenter__()
188
- try:
189
- yield redis
190
- finally:
191
- await asyncio.shield(redis.__aexit__(None, None, None))
192
+ yield r
192
193
  finally:
193
- # redis 4.6.0 doesn't automatically disconnect and leaves connections open
194
- if redis:
195
- await asyncio.shield(redis.connection_pool.disconnect())
194
+ await asyncio.shield(r.__aexit__(None, None, None))
196
195
 
197
196
  def register(self, function: Callable[..., Awaitable[Any]]) -> None:
198
197
  from .dependencies import validate_dependencies
@@ -214,6 +213,11 @@ class Docket:
214
213
  for function in collection:
215
214
  self.register(function)
216
215
 
216
+ def labels(self) -> Mapping[str, str]:
217
+ return {
218
+ "docket.name": self.name,
219
+ }
220
+
217
221
  @overload
218
222
  def add(
219
223
  self,
@@ -251,7 +255,7 @@ class Docket:
251
255
  execution = Execution(function, args, kwargs, when, key, attempt=1)
252
256
  await self.schedule(execution)
253
257
 
254
- TASKS_ADDED.add(1, {"docket": self.name, "task": function.__name__})
258
+ TASKS_ADDED.add(1, {**self.labels(), **execution.general_labels()})
255
259
 
256
260
  return execution
257
261
 
@@ -287,7 +291,7 @@ class Docket:
287
291
  await self.cancel(key)
288
292
  await self.schedule(execution)
289
293
 
290
- TASKS_REPLACED.add(1, {"docket": self.name, "task": function.__name__})
294
+ TASKS_REPLACED.add(1, {**self.labels(), **execution.general_labels()})
291
295
 
292
296
  return execution
293
297
 
@@ -314,9 +318,9 @@ class Docket:
314
318
  TASKS_STRICKEN.add(
315
319
  1,
316
320
  {
317
- "docket": self.name,
318
- "task": execution.function.__name__,
319
- "where": "docket",
321
+ **self.labels(),
322
+ **execution.specific_labels(),
323
+ "docket.where": "docket",
320
324
  },
321
325
  )
322
326
  return
@@ -327,10 +331,8 @@ class Docket:
327
331
  with tracer.start_as_current_span(
328
332
  "docket.schedule",
329
333
  attributes={
330
- "docket.name": self.name,
331
- "docket.execution.when": execution.when.isoformat(),
332
- "docket.execution.key": execution.key,
333
- "docket.execution.attempt": execution.attempt,
334
+ **self.labels(),
335
+ **execution.specific_labels(),
334
336
  "code.function.name": execution.function.__name__,
335
337
  },
336
338
  ):
@@ -350,16 +352,14 @@ class Docket:
350
352
  pipe.zadd(self.queue_key, {key: when.timestamp()})
351
353
  await pipe.execute()
352
354
 
353
- TASKS_SCHEDULED.add(
354
- 1, {"docket": self.name, "task": execution.function.__name__}
355
- )
355
+ TASKS_SCHEDULED.add(1, {**self.labels(), **execution.general_labels()})
356
356
 
357
357
  async def cancel(self, key: str) -> None:
358
358
  with tracer.start_as_current_span(
359
359
  "docket.cancel",
360
360
  attributes={
361
- "docket.name": self.name,
362
- "docket.execution.key": key,
361
+ **self.labels(),
362
+ "docket.key": key,
363
363
  },
364
364
  ):
365
365
  async with self.redis() as redis:
@@ -368,7 +368,7 @@ class Docket:
368
368
  pipe.zrem(self.queue_key, key)
369
369
  await pipe.execute()
370
370
 
371
- TASKS_CANCELLED.add(1, {"docket": self.name})
371
+ TASKS_CANCELLED.add(1, self.labels())
372
372
 
373
373
  @property
374
374
  def strike_key(self) -> str:
@@ -408,8 +408,8 @@ class Docket:
408
408
  with tracer.start_as_current_span(
409
409
  f"docket.{instruction.direction}",
410
410
  attributes={
411
- "docket.name": self.name,
412
- **instruction.as_span_attributes(),
411
+ **self.labels(),
412
+ **instruction.labels(),
413
413
  },
414
414
  ):
415
415
  async with self.redis() as redis:
@@ -441,18 +441,15 @@ class Docket:
441
441
  else "Restoring"
442
442
  ),
443
443
  instruction.call_repr(),
444
- extra={"docket": self.name},
444
+ extra=self.labels(),
445
445
  )
446
446
 
447
- counter_labels = {"docket": self.name}
448
- if instruction.function:
449
- counter_labels["task"] = instruction.function
450
- if instruction.parameter:
451
- counter_labels["parameter"] = instruction.parameter
452
-
453
447
  STRIKES_IN_EFFECT.add(
454
448
  1 if instruction.direction == "strike" else -1,
455
- counter_labels,
449
+ {
450
+ **self.labels(),
451
+ **instruction.labels(),
452
+ },
456
453
  )
457
454
 
458
455
  except redis.exceptions.ConnectionError: # pragma: no cover
docket/execution.py CHANGED
@@ -3,7 +3,7 @@ import enum
3
3
  import inspect
4
4
  import logging
5
5
  from datetime import datetime
6
- from typing import Any, Awaitable, Callable, Hashable, Literal, Self, cast
6
+ from typing import Any, Awaitable, Callable, Hashable, Literal, Mapping, Self, cast
7
7
 
8
8
  import cloudpickle # type: ignore[import]
9
9
 
@@ -55,6 +55,17 @@ class Execution:
55
55
  attempt=int(message[b"attempt"].decode()),
56
56
  )
57
57
 
58
+ def general_labels(self) -> Mapping[str, str]:
59
+ return {"docket.task": self.function.__name__}
60
+
61
+ def specific_labels(self) -> Mapping[str, str | int]:
62
+ return {
63
+ "docket.task": self.function.__name__,
64
+ "docket.key": self.key,
65
+ "docket.when": self.when.isoformat(),
66
+ "docket.attempt": self.attempt,
67
+ }
68
+
58
69
  def call_repr(self) -> str:
59
70
  arguments: list[str] = []
60
71
  signature = inspect.signature(self.function)
@@ -131,17 +142,17 @@ class StrikeInstruction(abc.ABC):
131
142
  else:
132
143
  return Restore(function, parameter, operator, value)
133
144
 
134
- def as_span_attributes(self) -> dict[str, str]:
135
- span_attributes: dict[str, str] = {}
145
+ def labels(self) -> Mapping[str, str]:
146
+ labels: dict[str, str] = {}
136
147
  if self.function:
137
- span_attributes["docket.function"] = self.function
148
+ labels["docket.task"] = self.function
138
149
 
139
150
  if self.parameter:
140
- span_attributes["docket.parameter"] = self.parameter
141
- span_attributes["docket.operator"] = self.operator
142
- span_attributes["docket.value"] = repr(self.value)
151
+ labels["docket.parameter"] = self.parameter
152
+ labels["docket.operator"] = self.operator
153
+ labels["docket.value"] = repr(self.value)
143
154
 
144
- return span_attributes
155
+ return labels
145
156
 
146
157
  def call_repr(self) -> str:
147
158
  return (
docket/instrumentation.py CHANGED
@@ -1,5 +1,12 @@
1
+ import threading
2
+ from contextlib import contextmanager
3
+ from typing import Generator, cast
4
+
1
5
  from opentelemetry import metrics
6
+ from opentelemetry.exporter.prometheus import PrometheusMetricReader
7
+ from opentelemetry.metrics import set_meter_provider
2
8
  from opentelemetry.propagators.textmap import Getter, Setter
9
+ from opentelemetry.sdk.metrics import MeterProvider
3
10
 
4
11
  meter: metrics.Meter = metrics.get_meter("docket")
5
12
 
@@ -93,6 +100,17 @@ STRIKES_IN_EFFECT = meter.create_up_down_counter(
93
100
  unit="1",
94
101
  )
95
102
 
103
+ QUEUE_DEPTH = meter.create_gauge(
104
+ "docket_queue_depth",
105
+ description="How many tasks are due to be executed now",
106
+ unit="1",
107
+ )
108
+ SCHEDULE_DEPTH = meter.create_gauge(
109
+ "docket_schedule_depth",
110
+ description="How many tasks are scheduled to be executed in the future",
111
+ unit="1",
112
+ )
113
+
96
114
  Message = dict[bytes, bytes]
97
115
 
98
116
 
@@ -119,3 +137,38 @@ class MessageSetter(Setter[Message]):
119
137
 
120
138
  message_getter: MessageGetter = MessageGetter()
121
139
  message_setter: MessageSetter = MessageSetter()
140
+
141
+
142
+ @contextmanager
143
+ def metrics_server(
144
+ host: str = "0.0.0.0", port: int | None = None
145
+ ) -> Generator[None, None, None]:
146
+ if port is None:
147
+ yield
148
+ return
149
+
150
+ from wsgiref.types import WSGIApplication
151
+
152
+ from prometheus_client import REGISTRY
153
+ from prometheus_client.exposition import (
154
+ ThreadingWSGIServer,
155
+ _SilentHandler, # type: ignore[member-access]
156
+ make_server, # type: ignore[import]
157
+ make_wsgi_app, # type: ignore[import]
158
+ )
159
+
160
+ set_meter_provider(MeterProvider(metric_readers=[PrometheusMetricReader()]))
161
+
162
+ server = make_server(
163
+ host,
164
+ port,
165
+ cast(WSGIApplication, make_wsgi_app(registry=REGISTRY)),
166
+ ThreadingWSGIServer,
167
+ handler_class=_SilentHandler,
168
+ )
169
+ with server:
170
+ t = threading.Thread(target=server.serve_forever)
171
+ t.daemon = True
172
+ t.start()
173
+
174
+ yield
docket/tasks.py CHANGED
@@ -2,16 +2,21 @@ import asyncio
2
2
  import logging
3
3
  from datetime import datetime, timezone
4
4
 
5
- from .dependencies import CurrentDocket, CurrentExecution, CurrentWorker, Retry
5
+ from .dependencies import (
6
+ CurrentDocket,
7
+ CurrentExecution,
8
+ CurrentWorker,
9
+ Retry,
10
+ TaskLogger,
11
+ )
6
12
  from .docket import Docket, TaskCollection
7
13
  from .execution import Execution
8
14
  from .worker import Worker
9
15
 
10
- logger: logging.Logger = logging.getLogger(__name__)
11
-
12
16
 
13
17
  async def trace(
14
18
  message: str,
19
+ logger: logging.LoggerAdapter[logging.Logger] = TaskLogger(),
15
20
  docket: Docket = CurrentDocket(),
16
21
  worker: Worker = CurrentWorker(),
17
22
  execution: Execution = CurrentExecution(),
@@ -23,11 +28,6 @@ async def trace(
23
28
  docket.name,
24
29
  (datetime.now(timezone.utc) - execution.when),
25
30
  worker.name,
26
- extra={
27
- "docket.name": docket.name,
28
- "worker.name": worker.name,
29
- "execution.key": execution.key,
30
- },
31
31
  )
32
32
 
33
33
 
@@ -45,7 +45,9 @@ async def fail(
45
45
  )
46
46
 
47
47
 
48
- async def sleep(seconds: float) -> None:
48
+ async def sleep(
49
+ seconds: float, logger: logging.LoggerAdapter[logging.Logger] = TaskLogger()
50
+ ) -> None:
49
51
  logger.info("Sleeping for %s seconds", seconds)
50
52
  await asyncio.sleep(seconds)
51
53
 
docket/worker.py CHANGED
@@ -7,6 +7,7 @@ from types import TracebackType
7
7
  from typing import (
8
8
  TYPE_CHECKING,
9
9
  Any,
10
+ Mapping,
10
11
  Protocol,
11
12
  Self,
12
13
  TypeVar,
@@ -27,7 +28,9 @@ from .docket import (
27
28
  RedisReadGroupResponse,
28
29
  )
29
30
  from .instrumentation import (
31
+ QUEUE_DEPTH,
30
32
  REDIS_DISRUPTIONS,
33
+ SCHEDULE_DEPTH,
31
34
  TASK_DURATION,
32
35
  TASK_PUNCTUALITY,
33
36
  TASKS_COMPLETED,
@@ -38,6 +41,7 @@ from .instrumentation import (
38
41
  TASKS_STRICKEN,
39
42
  TASKS_SUCCEEDED,
40
43
  message_getter,
44
+ metrics_server,
41
45
  )
42
46
 
43
47
  logger: logging.Logger = logging.getLogger(__name__)
@@ -94,11 +98,17 @@ class Worker:
94
98
  pass
95
99
  del self._heartbeat_task
96
100
 
97
- @property
98
- def _log_context(self) -> dict[str, str]:
101
+ def labels(self) -> Mapping[str, str]:
102
+ return {
103
+ **self.docket.labels(),
104
+ "docket.worker": self.name,
105
+ }
106
+
107
+ def _log_context(self) -> Mapping[str, str]:
99
108
  return {
100
- "queue_key": self.docket.queue_key,
101
- "stream_key": self.docket.stream_key,
109
+ **self.labels(),
110
+ "docket.queue_key": self.docket.queue_key,
111
+ "docket.stream_key": self.docket.stream_key,
102
112
  }
103
113
 
104
114
  @classmethod
@@ -112,24 +122,26 @@ class Worker:
112
122
  reconnection_delay: timedelta = timedelta(seconds=5),
113
123
  minimum_check_interval: timedelta = timedelta(milliseconds=100),
114
124
  until_finished: bool = False,
125
+ metrics_port: int | None = None,
115
126
  tasks: list[str] = ["docket.tasks:standard_tasks"],
116
127
  ) -> None:
117
- async with Docket(name=docket_name, url=url) as docket:
118
- for task_path in tasks:
119
- docket.register_collection(task_path)
120
-
121
- async with Worker(
122
- docket=docket,
123
- name=name,
124
- concurrency=concurrency,
125
- redelivery_timeout=redelivery_timeout,
126
- reconnection_delay=reconnection_delay,
127
- minimum_check_interval=minimum_check_interval,
128
- ) as worker:
129
- if until_finished:
130
- await worker.run_until_finished()
131
- else:
132
- await worker.run_forever() # pragma: no cover
128
+ with metrics_server(port=metrics_port):
129
+ async with Docket(name=docket_name, url=url) as docket:
130
+ for task_path in tasks:
131
+ docket.register_collection(task_path)
132
+
133
+ async with Worker(
134
+ docket=docket,
135
+ name=name,
136
+ concurrency=concurrency,
137
+ redelivery_timeout=redelivery_timeout,
138
+ reconnection_delay=reconnection_delay,
139
+ minimum_check_interval=minimum_check_interval,
140
+ ) as worker:
141
+ if until_finished:
142
+ await worker.run_until_finished()
143
+ else:
144
+ await worker.run_forever() # pragma: no cover
133
145
 
134
146
  async def run_until_finished(self) -> None:
135
147
  """Run the worker until there are no more tasks to process."""
@@ -149,9 +161,7 @@ class Worker:
149
161
  try:
150
162
  return await self._worker_loop(forever=forever)
151
163
  except redis.exceptions.ConnectionError:
152
- REDIS_DISRUPTIONS.add(
153
- 1, {"docket": self.docket.name, "worker": self.name}
154
- )
164
+ REDIS_DISRUPTIONS.add(1, self.labels())
155
165
  logger.warning(
156
166
  "Error connecting to redis, retrying in %s...",
157
167
  self.reconnection_delay,
@@ -263,7 +273,7 @@ class Worker:
263
273
  future_work,
264
274
  self.docket.queue_key,
265
275
  self.docket.stream_key,
266
- extra=self._log_context,
276
+ extra=self._log_context(),
267
277
  )
268
278
 
269
279
  redeliveries: RedisMessages
@@ -304,7 +314,7 @@ class Worker:
304
314
  logger.info(
305
315
  "Shutdown requested, finishing %d active tasks...",
306
316
  len(active_tasks),
307
- extra=self._log_context,
317
+ extra=self._log_context(),
308
318
  )
309
319
  finally:
310
320
  if active_tasks:
@@ -312,28 +322,20 @@ class Worker:
312
322
  await process_completed_tasks()
313
323
 
314
324
  async def _execute(self, message: RedisMessage) -> None:
325
+ log_context: dict[str, str | float] = self._log_context()
326
+
315
327
  function_name = message[b"function"].decode()
316
328
  function = self.docket.tasks.get(function_name)
317
329
  if function is None:
318
330
  logger.warning(
319
- "Task function %r not found", function_name, extra=self._log_context
331
+ "Task function %r not found", function_name, extra=log_context
320
332
  )
321
333
  return
322
334
 
323
335
  execution = Execution.from_message(function, message)
324
- name = execution.function.__name__
325
- key = execution.key
326
336
 
327
- log_context: dict[str, str | float] = {
328
- **self._log_context,
329
- "task": name,
330
- "key": key,
331
- }
332
- counter_labels = {
333
- "docket": self.docket.name,
334
- "worker": self.name,
335
- "task": name,
336
- }
337
+ log_context |= execution.specific_labels()
338
+ counter_labels = {**self.labels(), **execution.general_labels()}
337
339
 
338
340
  arrow = "↬" if execution.attempt > 1 else "↪"
339
341
  call = execution.call_repr()
@@ -341,7 +343,7 @@ class Worker:
341
343
  if self.docket.strike_list.is_stricken(execution):
342
344
  arrow = "🗙"
343
345
  logger.warning("%s %s", arrow, call, extra=log_context)
344
- TASKS_STRICKEN.add(1, counter_labels | {"where": "worker"})
346
+ TASKS_STRICKEN.add(1, counter_labels | {"docket.where": "worker"})
345
347
  return
346
348
 
347
349
  dependencies = self._get_dependencies(execution)
@@ -366,11 +368,8 @@ class Worker:
366
368
  execution.function.__name__,
367
369
  kind=trace.SpanKind.CONSUMER,
368
370
  attributes={
369
- "docket.name": self.docket.name,
370
- "docket.execution.when": execution.when.isoformat(),
371
- "docket.execution.key": execution.key,
372
- "docket.execution.attempt": execution.attempt,
373
- "docket.execution.punctuality": punctuality.total_seconds(),
371
+ **self.labels(),
372
+ **execution.specific_labels(),
374
373
  "code.function.name": execution.function.__name__,
375
374
  },
376
375
  links=links,
@@ -438,12 +437,7 @@ class Worker:
438
437
  execution.attempt += 1
439
438
  await self.docket.schedule(execution)
440
439
 
441
- counter_labels = {
442
- "docket": self.docket.name,
443
- "worker": self.name,
444
- "task": execution.function.__name__,
445
- }
446
- TASKS_RETRIED.add(1, counter_labels)
440
+ TASKS_RETRIED.add(1, {**self.labels(), **execution.specific_labels()})
447
441
  return True
448
442
 
449
443
  return False
@@ -487,12 +481,35 @@ class Worker:
487
481
  )
488
482
 
489
483
  await pipeline.execute()
484
+
485
+ async with r.pipeline() as pipeline:
486
+ pipeline.xlen(self.docket.stream_key)
487
+ pipeline.zcount(self.docket.queue_key, 0, now)
488
+ pipeline.zcount(self.docket.queue_key, now, "+inf")
489
+
490
+ (
491
+ stream_depth,
492
+ overdue_depth,
493
+ schedule_depth,
494
+ ) = await pipeline.execute()
495
+
496
+ QUEUE_DEPTH.set(
497
+ stream_depth + overdue_depth, self.docket.labels()
498
+ )
499
+ SCHEDULE_DEPTH.set(schedule_depth, self.docket.labels())
500
+
490
501
  except asyncio.CancelledError: # pragma: no cover
491
502
  return
492
503
  except redis.exceptions.ConnectionError:
493
- REDIS_DISRUPTIONS.add(
494
- 1, {"docket": self.docket.name, "worker": self.name}
504
+ REDIS_DISRUPTIONS.add(1, self.labels())
505
+ logger.exception(
506
+ "Error sending worker heartbeat",
507
+ exc_info=True,
508
+ extra=self._log_context(),
495
509
  )
496
- logger.exception("Error sending worker heartbeat", exc_info=True)
497
510
  except Exception:
498
- logger.exception("Error sending worker heartbeat", exc_info=True)
511
+ logger.exception(
512
+ "Error sending worker heartbeat",
513
+ exc_info=True,
514
+ extra=self._log_context(),
515
+ )
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pydocket
3
- Version: 0.1.4
3
+ Version: 0.2.0
4
4
  Summary: A distributed background task system for Python functions
5
5
  Project-URL: Homepage, https://github.com/chrisguidry/docket
6
6
  Project-URL: Bug Tracker, https://github.com/chrisguidry/docket/issues
@@ -0,0 +1,16 @@
1
+ docket/__init__.py,sha256=GoJYpyuO6QFeBB8GNaxGGvMMuai55Eaw_8u-o1PM3hk,743
2
+ docket/__main__.py,sha256=Vkuh7aJ-Bl7QVpVbbkUksAd_hn05FiLmWbc-8kbhZQ4,34
3
+ docket/annotations.py,sha256=GZwOPtPXyeIhnsLh3TQMBnXrjtTtSmF4Ratv4vjPx8U,950
4
+ docket/cli.py,sha256=EseF0Sj7IEgd9QDC-FSbHSffvF7DNsrmDGYGgZBdJc8,19413
5
+ docket/dependencies.py,sha256=gIDwcBUhrLk7xGh0ZxdqpsnSeX-hZzGMNvUrVFfqbJI,4281
6
+ docket/docket.py,sha256=zva6ofTm7i5hRwAaAnNtlgIqoMPaNLqCTs2PXGka_8s,19723
7
+ docket/execution.py,sha256=ShP8MoLmxEslk2pAuhKi6KEEKbHdneyQukR9oQwXdjQ,11732
8
+ docket/instrumentation.py,sha256=SUVhVFf8AX2HAfmi0HPTT_QvQezlGPJEKs_1YAmrCbA,4454
9
+ docket/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
10
+ docket/tasks.py,sha256=RIlSM2omh-YDwVnCz6M5MtmK8T_m_s1w2OlRRxDUs6A,1437
11
+ docket/worker.py,sha256=UZIPfAsIhsBsr2tBCgGGkLKU1mJs_nnP8-Retwl3218,19104
12
+ pydocket-0.2.0.dist-info/METADATA,sha256=X8Yqvi_cqCqYaGu6ZGr4dMvxqcvy6otYvt-J2jwCHOs,13092
13
+ pydocket-0.2.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
14
+ pydocket-0.2.0.dist-info/entry_points.txt,sha256=4WOk1nUlBsUT5O3RyMci2ImuC5XFswuopElYcLHtD5k,47
15
+ pydocket-0.2.0.dist-info/licenses/LICENSE,sha256=YuVWU_ZXO0K_k2FG8xWKe5RGxV24AhJKTvQmKfqXuyk,1087
16
+ pydocket-0.2.0.dist-info/RECORD,,
@@ -1,16 +0,0 @@
1
- docket/__init__.py,sha256=GoJYpyuO6QFeBB8GNaxGGvMMuai55Eaw_8u-o1PM3hk,743
2
- docket/__main__.py,sha256=Vkuh7aJ-Bl7QVpVbbkUksAd_hn05FiLmWbc-8kbhZQ4,34
3
- docket/annotations.py,sha256=GZwOPtPXyeIhnsLh3TQMBnXrjtTtSmF4Ratv4vjPx8U,950
4
- docket/cli.py,sha256=N0vp1zO5Wau4nBDMJOU34hYn11HR3PaYY3Ybk1gS8XY,19188
5
- docket/dependencies.py,sha256=Vht3qKbik-HQ7jsAU5k-eig4_yuru56-ZewjBVVu4yM,4325
6
- docket/docket.py,sha256=TWeZ63NfN6Eq4lFzKoQTJz88ECZsH3-gqYszhQl-bXs,20124
7
- docket/execution.py,sha256=rHsQ60BbNREzcpUC_RvbGUctdLaprYp1x46sT6jTrdc,11416
8
- docket/instrumentation.py,sha256=USo8ptCFcwQj_YaUpJvsUHPb0QfQr50i9dF4tYgYde4,2992
9
- docket/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
10
- docket/tasks.py,sha256=K1f_W1z4m9RVz1GJ1ymWY5ZaRmqHO1SebNBVENlkelU,1471
11
- docket/worker.py,sha256=8wnWxHj7ctHPxEGSRxPTsHksZ6OWRoG5dKpSkvIZP88,18479
12
- pydocket-0.1.4.dist-info/METADATA,sha256=y8PTR9Xwh8MeMr7ZhPJzUQGtQUjQXN3QRpYTvxtKfv0,13092
13
- pydocket-0.1.4.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
14
- pydocket-0.1.4.dist-info/entry_points.txt,sha256=4WOk1nUlBsUT5O3RyMci2ImuC5XFswuopElYcLHtD5k,47
15
- pydocket-0.1.4.dist-info/licenses/LICENSE,sha256=YuVWU_ZXO0K_k2FG8xWKe5RGxV24AhJKTvQmKfqXuyk,1087
16
- pydocket-0.1.4.dist-info/RECORD,,