tasq-client-python 0.1.12__py3-none-any.whl → 0.1.16__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
tasq_client/check_type.py CHANGED
@@ -12,6 +12,11 @@ class OptionalKey:
12
12
  key: str
13
13
 
14
14
 
15
+ @dataclass
16
+ class OptionalValue:
17
+ template: Any
18
+
19
+
15
20
  class CheckTypeException(Exception):
16
21
  """
17
22
  An error indicating that the type of an object does not match the expected
@@ -60,6 +65,9 @@ def check_type(template: Any, obj: Any):
60
65
  raise CheckTypeException(
61
66
  f"expected type {template} to be float or int but got {type(obj)}"
62
67
  )
68
+ elif isinstance(template, OptionalValue):
69
+ if obj is not None:
70
+ _wrap_check("optional value", lambda: check_type(template.template, obj))
63
71
  else:
64
72
  if not isinstance(obj, template):
65
73
  raise CheckTypeException(f"expected type {template} but got {type(obj)}")
tasq_client/client.py CHANGED
@@ -1,16 +1,17 @@
1
- import multiprocessing
1
+ import random
2
2
  import sys
3
3
  import time
4
4
  import urllib.parse
5
5
  from contextlib import contextmanager
6
6
  from dataclasses import dataclass
7
- from multiprocessing.context import BaseContext
7
+ from queue import Empty, Queue
8
+ from threading import Thread
8
9
  from typing import Any, Dict, List, Optional, Tuple
9
10
 
10
11
  import requests
11
12
  from requests.adapters import HTTPAdapter, Retry
12
13
 
13
- from .check_type import CheckTypeException, OptionalKey, check_type
14
+ from .check_type import CheckTypeException, OptionalKey, OptionalValue, check_type
14
15
 
15
16
 
16
17
  @dataclass
@@ -34,6 +35,8 @@ class QueueCounts:
34
35
  # server is old enough to not support rate estimation.
35
36
  rate: Optional[float] = None
36
37
 
38
+ modtime: Optional[int] = None
39
+
37
40
 
38
41
  class TasqClient:
39
42
  """
@@ -48,9 +51,10 @@ class TasqClient:
48
51
  :param username: optional username for basic authentication.
49
52
  :param password: optional password for basic authentication.
50
53
  :param max_timeout: the maximum amount of time (in seconds) to wait
51
- between attempts to pop a task in pop_running_task().
52
- Lower values mean waiting less long in the case that
53
- a new task is pushed or all tasks are finished.
54
+ between attempts to pop a task in pop_running_task(),
55
+ or push a task in push_blocking().
56
+ Lower values mean waiting less long to pop in the case
57
+ that a new task is pushed or all tasks are finished.
54
58
  :param task_timeout: if specified, override the timeout on the server with
55
59
  a custom timeout. This can be useful if we know we
56
60
  will be sending frequent keepalives, but the server
@@ -80,15 +84,70 @@ class TasqClient:
80
84
  self.retry_server_errors = retry_server_errors
81
85
  self.session = requests.Session()
82
86
  self._configure_session()
83
- self.mp_context = multiprocessing.get_context("spawn")
84
87
 
85
- def push(self, contents: str) -> str:
86
- """Push a task and get its resulting ID."""
87
- return self._post_form("/task/push", dict(contents=contents), type_template=str)
88
+ def push(self, contents: str, limit: int = 0) -> Optional[str]:
89
+ """
90
+ Push a task and get its resulting ID.
91
+
92
+ If limit is specified, then the task will not be pushed if the queue is
93
+ full, in which case None is returned.
94
+ """
95
+ return self._post_form(
96
+ f"/task/push", dict(contents=contents, limit=limit), type_template=OptionalValue(str)
97
+ )
98
+
99
+ def push_batch(self, ids: List[str], limit: int = 0) -> Optional[List[str]]:
100
+ """
101
+ Push a batch of tasks and get their resulting IDs.
102
+
103
+ If limit is specified, then tasks will not be pushed if the queue does
104
+ not have room for all the tasks at once, in which case None is
105
+ returned.
106
+
107
+ If limit is negative, then (-limit + batch_size) is used as the limit.
108
+ This effectively limits the size of the queue before a push rather than
109
+ after the push, to prevent large batches from being less likely to be
110
+ pushed than larger batches.
111
+ """
112
+ if limit < 0:
113
+ limit = -limit + len(ids)
114
+ return self._post_json(
115
+ f"/task/push_batch?limit={limit}", ids, type_template=OptionalValue([str])
116
+ )
117
+
118
+ def push_blocking(
119
+ self, contents: List[str], limit: int, init_wait_time: float = 1.0
120
+ ) -> List[str]:
121
+ """
122
+ Push one or more tasks atomically and block until they are pushed.
123
+
124
+ If the queue cannot fit the batch, this will wait to retry with random
125
+ exponential backoff. Backoff is randomized to mitigate starvation.
126
+
127
+ See push_batch() for details on passing a negative limit to avoid
128
+ starvation of larger batches when pushing from multiple processes.
88
129
 
89
- def push_batch(self, ids: List[str]) -> List[str]:
90
- """Push a batch of tasks and get their resulting IDs."""
91
- return self._post_json("/task/push_batch", ids, type_template=[str])
130
+ Unlike push_batch(), the ids returned by this method will never be
131
+ None, since all tasks must be pushed.
132
+ """
133
+ assert isinstance(
134
+ contents, (list, tuple)
135
+ ), f"expected a list of task contents, got object of type {type(contents)}"
136
+ assert (
137
+ init_wait_time <= self.max_timeout
138
+ ), f"wait time {init_wait_time=} should not be larger than {self.max_timeout=}"
139
+ assert limit < 0 or limit >= len(contents)
140
+
141
+ cur_wait = init_wait_time
142
+ while True:
143
+ ids = self.push_batch(contents, limit=limit)
144
+ if ids is not None:
145
+ return ids
146
+ timeout = cur_wait * random.random()
147
+ time.sleep(timeout)
148
+ # Use summation instead of doubling to prevent really rapid
149
+ # growth of cur_wait with low probability.
150
+ cur_wait = min(cur_wait + timeout, self.max_timeout)
92
151
 
93
152
  def pop(self) -> Tuple[Optional[Task], Optional[float]]:
94
153
  """
@@ -182,9 +241,7 @@ class TasqClient:
182
241
  while True:
183
242
  task, timeout = self.pop()
184
243
  if task is not None:
185
- rt = RunningTask(
186
- self, id=task.id, contents=task.contents, mp_context=self.mp_context
187
- )
244
+ rt = RunningTask(self, id=task.id, contents=task.contents)
188
245
  try:
189
246
  yield rt
190
247
  rt.completed()
@@ -200,7 +257,7 @@ class TasqClient:
200
257
  def counts(self, rate_window: int = 0) -> QueueCounts:
201
258
  """Get the number of tasks in each state within the queue."""
202
259
  data = self._get(
203
- f"/counts?window={rate_window}",
260
+ f"/counts?window={rate_window}&includeModtime=1",
204
261
  {
205
262
  "pending": int,
206
263
  "running": int,
@@ -211,6 +268,14 @@ class TasqClient:
211
268
  )
212
269
  return QueueCounts(**data)
213
270
 
271
+ def clear(self):
272
+ """Deletes the queue and all tasks in it."""
273
+ result = self._post_form("/task/clear", dict())
274
+ if result is True:
275
+ return True
276
+ else:
277
+ raise TasqMisbehavingServerError("failed to clear queue")
278
+
214
279
  def __getstate__(
215
280
  self,
216
281
  ):
@@ -281,28 +346,28 @@ class RunningTask(Task):
281
346
  cancel() or completed() is called.
282
347
  """
283
348
 
284
- def __init__(
285
- self, client: TasqClient, *args, mp_context: Optional[BaseContext] = None, **kwargs
286
- ):
349
+ def __init__(self, client: TasqClient, *args, **kwargs):
287
350
  super().__init__(*args, **kwargs)
288
351
  self.client = client
289
- self._proc = (mp_context or multiprocessing).Process(
352
+ self._kill_queue = Queue()
353
+ self._thread = Thread(
290
354
  target=RunningTask._keepalive_worker,
291
355
  name="tasq-keepalive-worker",
292
356
  args=(
357
+ self._kill_queue,
293
358
  client,
294
359
  self.id,
295
360
  ),
296
361
  daemon=True,
297
362
  )
298
- self._proc.start()
363
+ self._thread.start()
299
364
 
300
365
  def cancel(self):
301
- if self._proc is None:
366
+ if self._thread is None:
302
367
  return
303
- self._proc.kill()
304
- self._proc.join()
305
- self._proc = None
368
+ self._kill_queue.put(None)
369
+ self._thread.join()
370
+ self._thread = None
306
371
 
307
372
  def completed(self):
308
373
  self.cancel()
@@ -310,6 +375,7 @@ class RunningTask(Task):
310
375
 
311
376
  @staticmethod
312
377
  def _keepalive_worker(
378
+ kill_queue: Queue,
313
379
  client: TasqClient,
314
380
  task_id: str,
315
381
  ):
@@ -317,8 +383,19 @@ class RunningTask(Task):
317
383
  try:
318
384
  client.keepalive(task_id)
319
385
  except Exception as exc: # pylint: disable=broad-except
386
+ # Ignore the error if we killed the thread during the
387
+ # keepalive call.
388
+ try:
389
+ kill_queue.get(block=False)
390
+ return
391
+ except Empty:
392
+ pass
320
393
  print(f"exception in tasq keepalive worker: {exc}", file=sys.stderr)
321
- time.sleep(client.keepalive_interval)
394
+ try:
395
+ kill_queue.get(timeout=client.keepalive_interval)
396
+ return
397
+ except Empty:
398
+ pass
322
399
 
323
400
 
324
401
  class TasqRemoteError(Exception):
@@ -0,0 +1,11 @@
1
+ Metadata-Version: 2.1
2
+ Name: tasq-client-python
3
+ Version: 0.1.16
4
+ Summary: UNKNOWN
5
+ Home-page: UNKNOWN
6
+ License: UNKNOWN
7
+ Platform: UNKNOWN
8
+ Requires-Dist: requests
9
+
10
+ UNKNOWN
11
+
@@ -0,0 +1,8 @@
1
+ tasq_client/__init__.py,sha256=I0ik-_c0hcVKUgx7QsE3YnoCQyAVMFKKOzoLt-jNFtE,277
2
+ tasq_client/check_type.py,sha256=t_jreI8rf6QWS9Jf105ZvUVbwFe-uL4rMg4kZk6e4cA,2795
3
+ tasq_client/check_type_test.py,sha256=bvhVaO-Bu18aI3J4Kxnb0H27fzDCKkTHVBWhjJMFMis,1433
4
+ tasq_client/client.py,sha256=1f-BBYaiILfziZfzW3J4HOGwAgRbU80pbmlPX2J9wQE,15162
5
+ tasq_client_python-0.1.16.dist-info/METADATA,sha256=Y5auv-WWFODNhED09_NvXXNcVvyq6GDUYGvlzWf-Tx8,168
6
+ tasq_client_python-0.1.16.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92
7
+ tasq_client_python-0.1.16.dist-info/top_level.txt,sha256=JUs_FTRfs_ggMu8zusU5CSXgAl-JHhrjMXxuZay-B58,12
8
+ tasq_client_python-0.1.16.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: bdist_wheel (0.41.1)
2
+ Generator: bdist_wheel (0.37.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
@@ -1,5 +0,0 @@
1
- Metadata-Version: 2.1
2
- Name: tasq-client-python
3
- Version: 0.1.12
4
- Requires-Dist: requests
5
-
@@ -1,8 +0,0 @@
1
- tasq_client/__init__.py,sha256=I0ik-_c0hcVKUgx7QsE3YnoCQyAVMFKKOzoLt-jNFtE,277
2
- tasq_client/check_type.py,sha256=lEys9wV_8xMnDVbJptG5VcIAZ_Dbr85wQYYKtfaZqrk,2583
3
- tasq_client/check_type_test.py,sha256=bvhVaO-Bu18aI3J4Kxnb0H27fzDCKkTHVBWhjJMFMis,1433
4
- tasq_client/client.py,sha256=4sBHtMbAHbQQHvdkCMtxRbohDvPU95gF8-53F-Nvv3s,12245
5
- tasq_client_python-0.1.12.dist-info/METADATA,sha256=Rz_gV72U-eae23k_B-IhZSvG1BIDVjkzJQblYTu57Mw,88
6
- tasq_client_python-0.1.12.dist-info/WHEEL,sha256=5sUXSg9e4bi7lTLOHcm6QEYwO5TIF1TNbTSVFVjcJcc,92
7
- tasq_client_python-0.1.12.dist-info/top_level.txt,sha256=JUs_FTRfs_ggMu8zusU5CSXgAl-JHhrjMXxuZay-B58,12
8
- tasq_client_python-0.1.12.dist-info/RECORD,,