pybgworker 0.2.1__py3-none-any.whl → 0.2.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
pybgworker/__init__.py CHANGED
@@ -2,5 +2,5 @@ from .task import task
2
2
  from .result import AsyncResult
3
3
 
4
4
  __all__ = ["task", "AsyncResult"]
5
- __version__ = "0.2.1"
5
+ __version__ = "0.2.2"
6
6
 
pybgworker/ratelimit.py CHANGED
@@ -4,23 +4,40 @@ import threading
4
4
 
5
5
  class RateLimiter:
6
6
  def __init__(self, rate_per_sec):
7
- self.rate = rate_per_sec
7
+ # default/global rate
8
+ self.default_rate = rate_per_sec
8
9
  self.lock = threading.Lock()
9
10
  self.timestamps = []
10
11
 
11
- def acquire(self):
12
+ def acquire(self, rate=None):
13
+ """
14
+ rate: optional per-task rate limit
15
+ """
16
+ limit = rate or self.default_rate
17
+
18
+ # No limit configured
19
+ if not limit or limit <= 0:
20
+ return
21
+
12
22
  with self.lock:
13
23
  now = time.time()
14
24
 
15
- # remove old timestamps
25
+ # Remove timestamps older than 1 second
16
26
  self.timestamps = [
17
27
  t for t in self.timestamps
18
28
  if now - t < 1
19
29
  ]
20
30
 
21
- if len(self.timestamps) >= self.rate:
31
+ # Wait if limit reached
32
+ if len(self.timestamps) >= limit:
22
33
  sleep_time = 1 - (now - self.timestamps[0])
23
34
  if sleep_time > 0:
24
35
  time.sleep(sleep_time)
25
36
 
37
+ now = time.time()
38
+ self.timestamps = [
39
+ t for t in self.timestamps
40
+ if now - t < 1
41
+ ]
42
+
26
43
  self.timestamps.append(time.time())
@@ -105,7 +105,9 @@ class SQLiteQueue(BaseQueue):
105
105
  UPDATE tasks
106
106
  SET status='success',
107
107
  finished_at=?,
108
- updated_at=?
108
+ updated_at=?,
109
+ locked_by=NULL,
110
+ locked_at=NULL
109
111
  WHERE id=?
110
112
  """, (now().isoformat(), now().isoformat(), task_id))
111
113
  conn.commit()
@@ -119,7 +121,9 @@ class SQLiteQueue(BaseQueue):
119
121
  SET status='failed',
120
122
  last_error=?,
121
123
  finished_at=?,
122
- updated_at=?
124
+ updated_at=?,
125
+ locked_by=NULL,
126
+ locked_at=NULL
123
127
  WHERE id=?
124
128
  """, (error, now().isoformat(), now().isoformat(), task_id))
125
129
  conn.commit()
@@ -134,7 +138,24 @@ class SQLiteQueue(BaseQueue):
134
138
  SET status='retrying',
135
139
  attempt=attempt+1,
136
140
  run_at=?,
137
- updated_at=?
141
+ updated_at=?,
142
+ locked_by=NULL,
143
+ locked_at=NULL
138
144
  WHERE id=?
139
145
  """, (run_at.isoformat(), now().isoformat(), task_id))
140
146
  conn.commit()
147
+
148
+ # ---------------- cancel ----------------
149
+
150
+ def cancel(self, task_id):
151
+ with get_conn() as conn:
152
+ conn.execute("""
153
+ UPDATE tasks
154
+ SET status='cancelled',
155
+ finished_at=?,
156
+ updated_at=?,
157
+ locked_by=NULL,
158
+ locked_at=NULL
159
+ WHERE id=?
160
+ """, (now().isoformat(), now().isoformat(), task_id))
161
+ conn.commit()
pybgworker/task.py CHANGED
@@ -10,17 +10,27 @@ queue = SQLiteQueue()
10
10
  backend = SQLiteBackend()
11
11
 
12
12
 
13
- def task(name=None, retries=0, retry_delay=0, retry_for=(Exception,)):
13
+ def task(
14
+ name=None,
15
+ retries=0,
16
+ retry_delay=0,
17
+ retry_for=(Exception,),
18
+ timeout=None,
19
+ rate_limit=None,
20
+ ):
14
21
  if name is None:
15
22
  raise ValueError("Task name is required to avoid __main__ issues")
16
23
 
17
24
  def decorator(func):
18
25
  task_name = name or f"{func.__module__}.{func.__name__}"
19
26
 
27
+ # Store task metadata
20
28
  TASK_REGISTRY[task_name] = {
21
29
  "func": func,
22
30
  "retry_delay": retry_delay,
23
31
  "retry_for": retry_for,
32
+ "timeout": timeout,
33
+ "rate_limit": rate_limit,
24
34
  }
25
35
 
26
36
  @wraps(func)
@@ -42,7 +52,7 @@ def task(name=None, retries=0, retry_delay=0, retry_for=(Exception,)):
42
52
  "attempt": 0,
43
53
  "max_retries": retries,
44
54
  "run_at": run_at.isoformat(),
45
- "priority": priority, # ⭐ NEW
55
+ "priority": priority,
46
56
  "locked_by": None,
47
57
  "locked_at": None,
48
58
  "last_error": None,
pybgworker/worker.py CHANGED
@@ -1,6 +1,8 @@
1
1
  import time
2
2
  import traceback
3
3
  import threading
4
+ import signal
5
+ import os
4
6
  from multiprocessing import Process, Queue as MPQueue
5
7
 
6
8
  from .logger import log
@@ -17,7 +19,42 @@ queue = SQLiteQueue()
17
19
  backend = SQLiteBackend()
18
20
  limiter = RateLimiter(RATE_LIMIT)
19
21
 
20
- TASK_TIMEOUT = 150 # seconds
22
+ TASK_TIMEOUT = 150 # default timeout
23
+
24
+
25
+ shutdown_requested = False
26
+ last_shutdown_signal = 0
27
+ current_task_id = None
28
+ current_process = None
29
+
30
+
31
+ def handle_shutdown(signum, frame):
32
+ global shutdown_requested, last_shutdown_signal
33
+ global current_task_id, current_process
34
+
35
+ now_ts = time.time()
36
+
37
+ # Ignore duplicate signals (Windows issue)
38
+ if now_ts - last_shutdown_signal < 1:
39
+ return
40
+
41
+ last_shutdown_signal = now_ts
42
+
43
+ # Second Ctrl+C → force exit
44
+ if shutdown_requested:
45
+ log("worker_force_exit", worker=WORKER_NAME)
46
+
47
+ if current_task_id:
48
+ queue.cancel(current_task_id)
49
+ log("task_cancelled", task_id=current_task_id)
50
+
51
+ if current_process and current_process.is_alive():
52
+ current_process.terminate()
53
+
54
+ os._exit(1)
55
+
56
+ shutdown_requested = True
57
+ log("worker_shutdown_requested", worker=WORKER_NAME)
21
58
 
22
59
 
23
60
  def heartbeat():
@@ -38,6 +75,9 @@ def heartbeat():
38
75
 
39
76
 
40
77
  def run_task(func, args, kwargs, result_queue):
78
+ # Child process ignores Ctrl+C
79
+ signal.signal(signal.SIGINT, signal.SIG_IGN)
80
+
41
81
  try:
42
82
  result = func(*args, **kwargs)
43
83
  result_queue.put(("success", result))
@@ -46,27 +86,34 @@ def run_task(func, args, kwargs, result_queue):
46
86
 
47
87
 
48
88
  def run_worker():
89
+ global shutdown_requested, current_task_id, current_process
90
+
91
+ signal.signal(signal.SIGINT, handle_shutdown)
92
+ signal.signal(signal.SIGTERM, handle_shutdown)
93
+
49
94
  log("worker_start", worker=WORKER_NAME)
50
95
 
51
96
  threading.Thread(target=heartbeat, daemon=True).start()
52
97
  threading.Thread(target=run_scheduler, daemon=True).start()
53
98
 
54
- while True:
99
+ while not shutdown_requested:
55
100
  task = queue.fetch_next(WORKER_NAME)
56
101
 
57
102
  if not task:
103
+ if shutdown_requested:
104
+ break
58
105
  time.sleep(POLL_INTERVAL)
59
106
  continue
60
107
 
61
- # ⭐ rate limiting happens here
62
- limiter.acquire()
63
-
64
108
  meta = TASK_REGISTRY.get(task["name"])
65
109
  if not meta:
66
110
  queue.fail(task["id"], "Task not registered")
67
111
  log("task_invalid", task_id=task["id"])
68
112
  continue
69
113
 
114
+ # -------- Rate limit per task --------
115
+ limiter.acquire(meta.get("rate_limit"))
116
+
70
117
  func = meta["func"]
71
118
  retry_delay = meta["retry_delay"]
72
119
 
@@ -74,49 +121,69 @@ def run_worker():
74
121
  kwargs = loads(task["kwargs"])
75
122
 
76
123
  start_time = now()
77
- log("task_start", task_id=task["id"], worker=WORKER_NAME)
124
+ current_task_id = task["id"]
125
+
126
+ log("task_start", task_id=current_task_id, worker=WORKER_NAME)
78
127
 
79
128
  result_queue = MPQueue()
80
129
  process = Process(target=run_task, args=(func, args, kwargs, result_queue))
130
+ current_process = process
81
131
 
82
132
  process.start()
83
- process.join(TASK_TIMEOUT)
133
+
134
+ # -------- Timeout per task --------
135
+ timeout = meta.get("timeout") or TASK_TIMEOUT
136
+
137
+ start_join = time.time()
138
+
139
+ while process.is_alive():
140
+ if time.time() - start_join > timeout:
141
+ break
142
+ time.sleep(0.2)
84
143
 
85
144
  if process.is_alive():
86
145
  process.terminate()
87
146
 
88
- info = backend.get_task(task["id"])
147
+ info = backend.get_task(current_task_id)
89
148
  if info["status"] == "cancelled":
90
- log("task_cancelled", task_id=task["id"])
149
+ log("task_cancelled", task_id=current_task_id)
150
+ current_task_id = None
151
+ current_process = None
91
152
  continue
92
153
 
93
- queue.fail(task["id"], "Task timeout")
94
- log("task_timeout", task_id=task["id"])
154
+ queue.fail(current_task_id, "Task timeout")
155
+ log("task_timeout", task_id=current_task_id)
156
+ log("task_failed", task_id=current_task_id)
157
+ current_task_id = None
158
+ current_process = None
95
159
  continue
96
160
 
97
161
  if result_queue.empty():
98
- queue.fail(task["id"], "Task crashed without result")
99
- log("task_crash", task_id=task["id"])
162
+ queue.fail(current_task_id, "Task crashed")
163
+ log("task_crash", task_id=current_task_id)
164
+ current_task_id = None
165
+ current_process = None
100
166
  continue
101
167
 
102
168
  status, payload = result_queue.get()
103
169
  duration = (now() - start_time).total_seconds()
104
170
 
105
171
  if status == "success":
106
- backend.store_result(task["id"], payload)
107
- queue.ack(task["id"])
108
- log("task_success",
109
- task_id=task["id"],
172
+ backend.store_result(current_task_id, payload)
173
+ queue.ack(current_task_id)
174
+ log(
175
+ "task_success",
176
+ task_id=current_task_id,
110
177
  duration=duration,
111
- worker=WORKER_NAME)
112
-
178
+ worker=WORKER_NAME,
179
+ )
113
180
  else:
114
181
  if task["attempt"] < task["max_retries"]:
115
- queue.reschedule(task["id"], retry_delay)
116
- log("task_retry",
117
- task_id=task["id"],
118
- attempt=task["attempt"] + 1,
119
- max=task["max_retries"])
182
+ queue.reschedule(current_task_id, retry_delay)
120
183
  else:
121
- queue.fail(task["id"], payload)
122
- log("task_failed", task_id=task["id"])
184
+ queue.fail(current_task_id, payload)
185
+
186
+ current_task_id = None
187
+ current_process = None
188
+
189
+ log("worker_stopped", worker=WORKER_NAME)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pybgworker
3
- Version: 0.2.1
3
+ Version: 0.2.2
4
4
  Summary: Lightweight production-ready background task worker with cron, rate limiting and JSON observability
5
5
  Author: Prabhat Verma
6
6
  License: MIT
@@ -25,33 +25,38 @@ Dynamic: license-file
25
25
 
26
26
  # PyBgWorker
27
27
 
28
- A lightweight, production-ready background task framework for Python.
28
+ A lightweight, production-ready background task library for Python.
29
29
 
30
- PyBgWorker provides a durable SQLite-backed task queue, cron scheduling,
31
- rate limiting, retries, and structured observability — all without external
32
- infrastructure.
30
+ PyBgWorker provides a durable SQLite-backed task queue, scheduling (cron and
31
+ countdown/ETA), rate limiting, retries, and structured observability without
32
+ external infrastructure.
33
33
 
34
34
  It is designed to be simple, reliable, and easy to deploy.
35
35
 
36
36
  ---
37
37
 
38
- ## Features
38
+ ## Features
39
39
 
40
40
  - Persistent SQLite task queue
41
41
  - Multi-worker safe execution
42
+ - Task scheduling: cron + countdown/ETA
42
43
  - Retry + failure handling
44
+ - Task cancellation support
43
45
  - Crash isolation via subprocess
44
- - Cron scheduler for recurring jobs
46
+ - Task priority execution
47
+ - Task status tracking
48
+ - Result storage and retrieval
49
+ - Worker statistics and monitoring
45
50
  - JSON structured logging
46
51
  - Task duration tracking
47
52
  - Rate limiting (overload protection)
48
53
  - Heartbeat monitoring
49
- - CLI inspect / retry / purge / cancel
54
+ - CLI tools: inspect, retry, failed, purge, cancel, stats
50
55
  - Production-safe worker loop
51
56
 
52
57
  ---
53
58
 
54
- ## 🚀 Installation
59
+ ## Installation
55
60
 
56
61
  ```bash
57
62
  pip install pybgworker
@@ -59,7 +64,7 @@ pip install pybgworker
59
64
 
60
65
  ---
61
66
 
62
- ## 🧠 Basic Usage
67
+ ## Basic Usage
63
68
 
64
69
  ### Define a task
65
70
 
@@ -79,7 +84,7 @@ add.delay(1, 2)
79
84
 
80
85
  ---
81
86
 
82
- ## Run worker
87
+ ## Run worker
83
88
 
84
89
  ```bash
85
90
  python -m pybgworker.cli run --app example
@@ -87,7 +92,7 @@ python -m pybgworker.cli run --app example
87
92
 
88
93
  ---
89
94
 
90
- ## Cron Scheduler
95
+ ## Cron Scheduler
91
96
 
92
97
  Run recurring tasks:
93
98
 
@@ -105,7 +110,7 @@ Cron runs automatically inside the worker.
105
110
 
106
111
  ---
107
112
 
108
- ## 📊 JSON Logging
113
+ ## JSON Logging
109
114
 
110
115
  All worker events are structured JSON:
111
116
 
@@ -123,7 +128,7 @@ This enables:
123
128
 
124
129
  ---
125
130
 
126
- ## 🚦 Rate Limiting
131
+ ## Rate Limiting
127
132
 
128
133
  Protect infrastructure from overload:
129
134
 
@@ -135,7 +140,7 @@ Ensures predictable execution under heavy load.
135
140
 
136
141
  ---
137
142
 
138
- ## 🔍 CLI Commands
143
+ ## CLI Commands
139
144
 
140
145
  Inspect queue:
141
146
 
@@ -163,7 +168,7 @@ python -m pybgworker.cli purge
163
168
 
164
169
  ---
165
170
 
166
- ## 🧪 Observability
171
+ ## Observability
167
172
 
168
173
  PyBgWorker logs:
169
174
 
@@ -181,7 +186,7 @@ All machine-readable.
181
186
 
182
187
  ---
183
188
 
184
- ## 🎯 Design Goals
189
+ ## Design Goals
185
190
 
186
191
  - zero external dependencies
187
192
  - SQLite durability
@@ -192,18 +197,23 @@ All machine-readable.
192
197
 
193
198
  ---
194
199
 
195
- ## 📌 Roadmap
200
+ ## Roadmap
196
201
 
197
- Future upgrades may include:
202
+ Planned but not yet included:
198
203
 
199
- - dashboard web UI
200
- - metrics endpoint
201
- - Redis backend
202
- - workflow pipelines
203
- - cluster coordination
204
+ - Single-worker concurrency (process pool)
205
+ - Retry backoff + jitter policies
206
+ - Dead-letter queue for exhausted retries
207
+ - Task/result TTL and automatic DB cleanup
208
+ - Multiple named queues + routing
209
+ - Pluggable backends (Redis first)
210
+ - Cluster coordination / leader election for scheduler
211
+ - Metrics endpoint and health checks
212
+ - Dashboard API + web UI
213
+ - Workflow pipelines / DAGs
204
214
 
205
215
  ---
206
216
 
207
- ## 📄 License
217
+ ## License
208
218
 
209
219
  MIT License
@@ -1,4 +1,4 @@
1
- pybgworker/__init__.py,sha256=YyQGbGdbLNexZU3tpt-zw1ABbVoZIiAAtB0lwGV_eYg,119
1
+ pybgworker/__init__.py,sha256=A12A0QQNbJgaJDvjpcCVd-GbK8KVPmn104tp7YDuJxE,118
2
2
  pybgworker/backends.py,sha256=vOpcY9lXfKm2-ffnFHEWcDvLAukB5KLgSGP3VO8bjEw,1239
3
3
  pybgworker/cancel.py,sha256=jNfyKrhDf8gtL3uSgLaknTKuLpsx4umJVTKYGKXs39E,703
4
4
  pybgworker/cli.py,sha256=lniULLMozN7CH9ijQqVUU6ujt_rqc6eAU5GHuxmQA3M,1576
@@ -8,19 +8,19 @@ pybgworker/inspect.py,sha256=pMtSUItc1VVS3sdh3Mi931Dho_RP2uPcHFJsDQT7x6w,1148
8
8
  pybgworker/logger.py,sha256=JzSJLX6NB8VXcTfiqH7NjBmwfCZ0Q6IO_2DNCkxTxl8,298
9
9
  pybgworker/purge.py,sha256=hvJhL1jIhKamElNnLpv5PH_eroDcTQc2cPZ_0MlwTzo,321
10
10
  pybgworker/queue.py,sha256=YPQgRLouqm9xXx7Azm80J19_eZ_4pkLZD4zNtJHvHEE,458
11
- pybgworker/ratelimit.py,sha256=p30pQXY3V8M8iXFdDXsA2OnXvrGeddp65JD7QyvIMxA,686
11
+ pybgworker/ratelimit.py,sha256=OzJzkuIy6btBYGu_2R2U-BMwCeKzfmUV-aE57rBbEI8,1167
12
12
  pybgworker/result.py,sha256=uJzsVeA7Aj84f70vSDRxDQ68YxAI26zuXEQlgiNJ_0U,1767
13
13
  pybgworker/retry.py,sha256=OLyBqnPxjMptVJ7zVFD7Bm66kdWAJSJpbFZOnhLTir0,707
14
14
  pybgworker/scheduler.py,sha256=hJ4jK0IiEFfEjYQOVIn6XDNuWUcaDLipVK85wBtoni8,1648
15
- pybgworker/sqlite_queue.py,sha256=sclzIY4M9KZjM5MsVK9YMm1ezKAp0qGnwGsQitaJG-Q,4355
15
+ pybgworker/sqlite_queue.py,sha256=DE06DXY_o6kei9h0_TBs5FegF3Uqusm4Hn4SOwe7ohE,5062
16
16
  pybgworker/state.py,sha256=LmUxPXSKC2GvFobOSvoHzLpFNM1M7yqtbPKgJ2Ts6Qk,650
17
17
  pybgworker/stats.py,sha256=AYrPeZsd_IsU6GpmSEhwS_NmAFwIwwm3lasmgDvu920,751
18
- pybgworker/task.py,sha256=l6oLVzU2Om-l_2R2HXBimPUo9OwZT2BiDpgWak_joAc,1884
18
+ pybgworker/task.py,sha256=cZ-sqMKJznzMS6KLRptwACX17LjIgd0yDf4uaQB2i6E,2040
19
19
  pybgworker/utils.py,sha256=w7cX28o7K0_wFpRHegtDZ44LpJgBVv7wSdzDlrILGGQ,569
20
- pybgworker/worker.py,sha256=wHCT-9RwM25j4dgztFgLlLrdeNzPzk6qXPjiMqJ--94,3697
21
- pybgworker-0.2.1.dist-info/licenses/LICENSE,sha256=eZySKOWd_q-qQa3p01Llyvu5OWqpLnQV6UXUlIaVdHg,1091
22
- pybgworker-0.2.1.dist-info/METADATA,sha256=0dbrYgliRgv47dGNBU9K3_ndKb7N0TVBZ5KnF49g9Oo,3780
23
- pybgworker-0.2.1.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
24
- pybgworker-0.2.1.dist-info/entry_points.txt,sha256=iJkiCne1tUtm8TlO3TnVHv1Axe81TxU3vaWK6Cusja4,51
25
- pybgworker-0.2.1.dist-info/top_level.txt,sha256=0vv_-19bFyP0DL0lqlcA-tvz6tISlkYl3Z3v860To-s,11
26
- pybgworker-0.2.1.dist-info/RECORD,,
20
+ pybgworker/worker.py,sha256=wHJzUZHOi8MX-8Dt7snGhpwiueWvI8El8rUbA5_d8hA,5550
21
+ pybgworker-0.2.2.dist-info/licenses/LICENSE,sha256=eZySKOWd_q-qQa3p01Llyvu5OWqpLnQV6UXUlIaVdHg,1091
22
+ pybgworker-0.2.2.dist-info/METADATA,sha256=ybMHaquGp-YRCwp0A70o5f59xIXdDpSOW3mokzysPMk,4188
23
+ pybgworker-0.2.2.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
24
+ pybgworker-0.2.2.dist-info/entry_points.txt,sha256=iJkiCne1tUtm8TlO3TnVHv1Axe81TxU3vaWK6Cusja4,51
25
+ pybgworker-0.2.2.dist-info/top_level.txt,sha256=0vv_-19bFyP0DL0lqlcA-tvz6tISlkYl3Z3v860To-s,11
26
+ pybgworker-0.2.2.dist-info/RECORD,,