dj-queue 0.1.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (48) hide show
  1. dj_queue-0.1.0/LICENSE +21 -0
  2. dj_queue-0.1.0/PKG-INFO +613 -0
  3. dj_queue-0.1.0/README.md +587 -0
  4. dj_queue-0.1.0/dj_queue/__init__.py +0 -0
  5. dj_queue-0.1.0/dj_queue/admin.py +90 -0
  6. dj_queue-0.1.0/dj_queue/api.py +122 -0
  7. dj_queue-0.1.0/dj_queue/apps.py +6 -0
  8. dj_queue-0.1.0/dj_queue/backend.py +161 -0
  9. dj_queue-0.1.0/dj_queue/config.py +456 -0
  10. dj_queue-0.1.0/dj_queue/contrib/__init__.py +1 -0
  11. dj_queue-0.1.0/dj_queue/contrib/asgi.py +32 -0
  12. dj_queue-0.1.0/dj_queue/contrib/gunicorn.py +25 -0
  13. dj_queue-0.1.0/dj_queue/db.py +68 -0
  14. dj_queue-0.1.0/dj_queue/exceptions.py +26 -0
  15. dj_queue-0.1.0/dj_queue/hooks.py +86 -0
  16. dj_queue-0.1.0/dj_queue/log.py +27 -0
  17. dj_queue-0.1.0/dj_queue/management/__init__.py +1 -0
  18. dj_queue-0.1.0/dj_queue/management/commands/__init__.py +1 -0
  19. dj_queue-0.1.0/dj_queue/management/commands/dj_queue.py +39 -0
  20. dj_queue-0.1.0/dj_queue/management/commands/dj_queue_health.py +32 -0
  21. dj_queue-0.1.0/dj_queue/management/commands/dj_queue_prune.py +22 -0
  22. dj_queue-0.1.0/dj_queue/migrations/0001_initial.py +262 -0
  23. dj_queue-0.1.0/dj_queue/migrations/0002_pause_semaphore.py +52 -0
  24. dj_queue-0.1.0/dj_queue/migrations/0003_recurringtask_recurringexecution.py +73 -0
  25. dj_queue-0.1.0/dj_queue/migrations/__init__.py +0 -0
  26. dj_queue-0.1.0/dj_queue/models/__init__.py +24 -0
  27. dj_queue-0.1.0/dj_queue/models/jobs.py +328 -0
  28. dj_queue-0.1.0/dj_queue/models/recurring.py +51 -0
  29. dj_queue-0.1.0/dj_queue/models/runtime.py +55 -0
  30. dj_queue-0.1.0/dj_queue/operations/__init__.py +1 -0
  31. dj_queue-0.1.0/dj_queue/operations/cleanup.py +37 -0
  32. dj_queue-0.1.0/dj_queue/operations/concurrency.py +176 -0
  33. dj_queue-0.1.0/dj_queue/operations/jobs.py +637 -0
  34. dj_queue-0.1.0/dj_queue/operations/recurring.py +81 -0
  35. dj_queue-0.1.0/dj_queue/routers.py +26 -0
  36. dj_queue-0.1.0/dj_queue/runtime/__init__.py +1 -0
  37. dj_queue-0.1.0/dj_queue/runtime/base.py +198 -0
  38. dj_queue-0.1.0/dj_queue/runtime/dispatcher.py +78 -0
  39. dj_queue-0.1.0/dj_queue/runtime/errors.py +39 -0
  40. dj_queue-0.1.0/dj_queue/runtime/interruptible.py +46 -0
  41. dj_queue-0.1.0/dj_queue/runtime/notify.py +119 -0
  42. dj_queue-0.1.0/dj_queue/runtime/pidfile.py +39 -0
  43. dj_queue-0.1.0/dj_queue/runtime/pool.py +62 -0
  44. dj_queue-0.1.0/dj_queue/runtime/procline.py +11 -0
  45. dj_queue-0.1.0/dj_queue/runtime/scheduler.py +128 -0
  46. dj_queue-0.1.0/dj_queue/runtime/supervisor.py +460 -0
  47. dj_queue-0.1.0/dj_queue/runtime/worker.py +116 -0
  48. dj_queue-0.1.0/pyproject.toml +63 -0
dj_queue-0.1.0/LICENSE ADDED
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2026 coriocactus
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy of
6
+ this software and associated documentation files (the “Software”), to deal in
7
+ the Software without restriction, including without limitation the rights to
8
+ use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
9
+ of the Software, and to permit persons to whom the Software is furnished to do
10
+ so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
@@ -0,0 +1,613 @@
1
+ Metadata-Version: 2.4
2
+ Name: dj-queue
3
+ Version: 0.1.0
4
+ Summary: Database-backed task queue backend for Django's django.tasks framework
5
+ License-Expression: MIT
6
+ License-File: LICENSE
7
+ Classifier: Development Status :: 3 - Alpha
8
+ Classifier: Framework :: Django
9
+ Classifier: Framework :: Django :: 6.0
10
+ Classifier: Intended Audience :: Developers
11
+ Classifier: Programming Language :: Python :: 3
12
+ Classifier: Programming Language :: Python :: 3.12
13
+ Classifier: Programming Language :: Python :: 3.13
14
+ Classifier: Programming Language :: Python :: 3.14
15
+ Classifier: Topic :: Software Development :: Libraries :: Python Modules
16
+ Requires-Dist: croniter>=6.2.2
17
+ Requires-Dist: django>=6.0.0
18
+ Requires-Dist: pyyaml>=6.0.3
19
+ Requires-Dist: psycopg>=3.3.3 ; extra == 'postgres'
20
+ Requires-Python: >=3.12
21
+ Project-URL: Homepage, https://github.com/coriocactus/dj_queue
22
+ Project-URL: Repository, https://github.com/coriocactus/dj_queue
23
+ Project-URL: Issues, https://github.com/coriocactus/dj_queue/issues
24
+ Provides-Extra: postgres
25
+ Description-Content-Type: text/markdown
26
+
27
+ # dj_queue
28
+
29
+ `dj_queue` is a database-backed task queue backend for Django's `django.tasks`
30
+ framework.
31
+
32
+ It keeps the queue, live execution state, runtime metadata, and task results in
33
+ your database.
34
+
35
+ - no Redis, RabbitMQ, or separate result store
36
+ - PostgreSQL is the first-class production backend
37
+ - MySQL 8+, MariaDB 10.6+, and SQLite are supported
38
+ - immediate, scheduled, recurring, and concurrency-limited work
39
+ - fork and async runtime modes
40
+ - multi-database aware from day one
41
+
42
+ `dj_queue` is inspired by Rails solid_queue, but shaped to fit Django's task
43
+ backend API and long-running process model.
44
+
45
+ ## Why dj_queue
46
+
47
+ The database is the queue.
48
+
49
+ That gives `dj_queue` a narrow, explicit shape:
50
+
51
+ - application code uses Django's `@task` API
52
+ - `DjQueueBackend` stores jobs and results in Django-managed tables
53
+ - workers, dispatchers, and schedulers all share one operations layer
54
+ - PostgreSQL can use `LISTEN/NOTIFY` and `SKIP LOCKED` as optimizations
55
+ - polling remains the correctness path on every supported database
56
+
57
+ If your application already depends on the database being the durable system of
58
+ record, `dj_queue` lets background work follow the same model.
59
+
60
+ ## Installation
61
+
62
+ `dj_queue` requires Python 3.12+ and Django 6.0+.
63
+
64
+ Install the package:
65
+
66
+ ```bash
67
+ pip install dj-queue
68
+ ```
69
+
70
+ Backend-specific extras are available when you want `dj_queue` to install a
71
+ database adapter for you:
72
+
73
+ ```bash
74
+ pip install "dj-queue[postgres]"
75
+ ```
76
+
77
+ Notes:
78
+
79
+ - `postgres` installs `psycopg`, which Django's PostgreSQL backend and
80
+ `dj_queue`'s optional `LISTEN/NOTIFY` wakeups use
81
+ - for MySQL or MariaDB, install and configure a Django-compatible driver in
82
+ your application following Django's database docs
83
+
84
+ Add `dj_queue` to `INSTALLED_APPS`, register the router, and point Django's task
85
+ backend at `DjQueueBackend`:
86
+
87
+ ```python
88
+ # settings.py
89
+
90
+ INSTALLED_APPS = [
91
+ # ...
92
+ "dj_queue",
93
+ ]
94
+
95
+ DATABASE_ROUTERS = ["dj_queue.routers.DjQueueRouter"]
96
+
97
+ TASKS = {
98
+ "default": {
99
+ "BACKEND": "dj_queue.backend.DjQueueBackend",
100
+ "QUEUES": [],
101
+ "OPTIONS": {},
102
+ },
103
+ }
104
+ ```
105
+
106
+ Run migrations:
107
+
108
+ ```bash
109
+ python manage.py migrate
110
+ ```
111
+
112
+ ## Quick Start
113
+
114
+ Define a task with Django's `@task` decorator:
115
+
116
+ ```python
117
+ # myapp/tasks.py
118
+
119
+ from django.tasks import task
120
+
121
+
122
+ @task
123
+ def add(a, b):
124
+ return a + b
125
+ ```
126
+
127
+ Start the `dj_queue` runtime in one terminal:
128
+
129
+ ```bash
130
+ python manage.py dj_queue
131
+ ```
132
+
133
+ Then enqueue work from another terminal or from your application code:
134
+
135
+ ```python
136
+ from myapp.tasks import add
137
+
138
+ task_result = add.enqueue(3, 7)
139
+ print(task_result.id)
140
+ ```
141
+
142
+ Read the result back through Django's task backend API:
143
+
144
+ ```python
145
+ from myapp.tasks import add
146
+
147
+ fresh_result = add.get_backend().get_result(task_result.id)
148
+ print(fresh_result.status)
149
+ print(fresh_result.return_value)
150
+ ```
151
+
152
+ When the worker has executed the job, `fresh_result.return_value` will be `10`.
153
+
154
+ ## Data Contract
155
+
156
+ Job payloads and persisted return values are stored in JSON columns, so they
157
+ must be JSON round-trippable.
158
+
159
+ - enqueueing args or kwargs that cannot round-trip through JSON fails immediately
160
+ - returning a non-JSON-serializable value marks the job failed instead of
161
+ leaving it claimed forever
162
+
163
+ If you need to pass model instances, files, or custom objects, store them
164
+ elsewhere and pass identifiers or serialized data instead.
165
+
166
+ ## How dj_queue runs
167
+
168
+ `python manage.py dj_queue` starts a supervisor for one backend alias.
169
+
170
+ Job lifecycle:
171
+
172
+ `enqueue -> ready | scheduled | blocked -> claimed -> successful | failed`
173
+
174
+ The runtime has four moving parts:
175
+
176
+ - `supervisor`: boots and stops the runtime
177
+ - `workers`: claim ready jobs and execute them
178
+ - `dispatchers`: promote due scheduled jobs and run concurrency maintenance
179
+ - `scheduler`: enqueue recurring tasks and finished-job cleanup when configured
180
+
181
+ Useful command variants:
182
+
183
+ ```bash
184
+ python manage.py dj_queue
185
+ python manage.py dj_queue --mode async
186
+ python manage.py dj_queue --only-work
187
+ python manage.py dj_queue --only-dispatch
188
+ python manage.py dj_queue --skip-recurring
189
+ ```
190
+
191
+ Mode and topology notes:
192
+
193
+ - `fork` is the default standalone mode
194
+ - `async` runs supervised actors in threads inside one process
195
+ - `--only-work` starts workers without dispatchers or scheduler
196
+ - `--only-dispatch` starts dispatchers without workers or scheduler
197
+ - `--skip-recurring` starts without the scheduler
198
+
199
+ If you're familiar with Solid Queue, the same high-level tradeoff is described
200
+ in its [fork vs async mode](https://github.com/rails/solid_queue?tab=readme-ov-file#fork-vs-async-mode)
201
+ section.
202
+
203
+ ## Choose a setup
204
+
205
+ Once migrations are in place, start processing jobs with `python manage.py dj_queue`
206
+ on the machine that should do the work. With the default configuration, this
207
+ starts the supervisor, workers, and dispatcher for the default backend alias and
208
+ processes all queues.
209
+
210
+ For most deployments, start with a standalone `dj_queue` process. Reach for a
211
+ dedicated queue database before you reach for embedded mode.
212
+
213
+ - single database, standalone process: easiest way to start. Use the app
214
+ database and run `python manage.py dj_queue`
215
+ - dedicated queue database: recommended production default. Keep queue tables
216
+ and runtime traffic on `database_alias`. See [Multi-Database Setup](#multi-database-setup)
217
+ - embedded server mode: run `dj_queue` inside ASGI or Gunicorn when you want
218
+ queue execution colocated with the server process. See [Embedded Server Mode](#embedded-server-mode)
219
+
220
+ For small deployments, running `dj_queue` on the same machine as the web server
221
+ is often enough. When you need more capacity, multiple machines can point at
222
+ the same queue database. Full `python manage.py dj_queue` instances coordinate
223
+ through database locking, so workers and dispatchers share load safely and
224
+ recurring firing stays deduplicated across schedulers.
225
+
226
+ In practice, keep recurring settings identical on every full node and prefer one
227
+ full instance plus additional `python manage.py dj_queue --only-work` nodes.
228
+ Add `--only-dispatch` nodes only when you need more scheduled-job promotion or
229
+ concurrency-maintenance throughput.
230
+
231
+ ## Common Patterns
232
+
233
+ ### Scheduled jobs
234
+
235
+ Use `run_after` to keep work out of the ready queue until a future time:
236
+
237
+ ```python
238
+ from datetime import timedelta
239
+
240
+ from django.utils import timezone
241
+
242
+ from myapp.tasks import send_digest
243
+
244
+ future = timezone.now() + timedelta(hours=1)
245
+ send_digest.using(run_after=future).enqueue("daily")
246
+ ```
247
+
248
+ ### Priorities and named queues
249
+
250
+ Use `priority` and `queue_name` on the task call itself:
251
+
252
+ ```python
253
+ from myapp.tasks import deliver_email
254
+
255
+ deliver_email.using(queue_name="email", priority=10).enqueue("welcome")
256
+ deliver_email.using(queue_name="email", priority=-5).enqueue("digest")
257
+ ```
258
+
259
+ ### Bulk enqueue
260
+
261
+ Use `enqueue_all()` when you need one backend call to submit many jobs:
262
+
263
+ ```python
264
+ from myapp.tasks import process_item
265
+
266
+ results = process_item.get_backend().enqueue_all(
267
+ [(process_item, [item_id], {}) for item_id in range(5)]
268
+ )
269
+ ```
270
+
271
+ ## Ordering and transactions
272
+
273
+ Queue ordering rules:
274
+
275
+ - within one selected queue, higher numeric `priority` is claimed first
276
+ - across multiple queue selectors, selector order wins
277
+ - `"*"` matches all queues
278
+ - selectors ending in `*` match queue prefixes such as `email*`
279
+
280
+ For example, a worker configured with `queues: ["email", "default"]` will
281
+ prefer ready work from `email` before `default`, even if `default` contains
282
+ higher-priority rows.
283
+
284
+ `enqueue()` writes immediately. If a task depends on rows that are still inside
285
+ the current transaction, use `enqueue_on_commit()`:
286
+
287
+ ```python
288
+ from django.db import transaction
289
+
290
+ from dj_queue.api import enqueue_on_commit
291
+ from myapp.tasks import send_receipt
292
+
293
+ with transaction.atomic():
294
+ order = create_order()
295
+ enqueue_on_commit(send_receipt, order.id)
296
+ ```
297
+
298
+ ## Recurring Tasks
299
+
300
+ `dj_queue` supports both static recurring tasks from settings and dynamic
301
+ recurring tasks managed at runtime.
302
+
303
+ ### Static recurring tasks
304
+
305
+ Define recurring tasks in `TASKS[...]["OPTIONS"]["recurring"]`:
306
+
307
+ ```python
308
+ TASKS = {
309
+ "default": {
310
+ "BACKEND": "dj_queue.backend.DjQueueBackend",
311
+ "QUEUES": [],
312
+ "OPTIONS": {
313
+ "recurring": {
314
+ "nightly_cleanup": {
315
+ "task_path": "myapp.tasks.cleanup",
316
+ "schedule": "0 3 * * *",
317
+ "queue_name": "maintenance",
318
+ "priority": -5,
319
+ "description": "nightly cleanup",
320
+ },
321
+ },
322
+ },
323
+ },
324
+ }
325
+ ```
326
+
327
+ ### Dynamic recurring tasks
328
+
329
+ Create, update, and remove recurring tasks at runtime:
330
+
331
+ ```python
332
+ from dj_queue.api import schedule_recurring_task, unschedule_recurring_task
333
+
334
+ schedule_recurring_task(
335
+ key="tenant_42_report",
336
+ task_path="myapp.tasks.send_report",
337
+ schedule="0 * * * *",
338
+ queue_name="reports",
339
+ priority=5,
340
+ )
341
+
342
+ unschedule_recurring_task("tenant_42_report")
343
+ ```
344
+
345
+ Dynamic recurring tasks require
346
+ `TASKS[backend_alias]["OPTIONS"]["scheduler"]["dynamic_tasks_enabled"] = True`
347
+ or the equivalent `scheduler.dynamic_tasks_enabled: true` in the optional YAML
348
+ config.
349
+
350
+ The scheduler is part of the normal `dj_queue` runtime. You do not run a
351
+ separate recurring service.
352
+
353
+ ## Concurrency Controls
354
+
355
+ Tasks can opt into database-backed concurrency limits by defining concurrency
356
+ metadata on the wrapped function:
357
+
358
+ ```python
359
+ from django.tasks import task
360
+
361
+
362
+ @task
363
+ def sync_account(account_id, action):
364
+ return f"{account_id}:{action}"
365
+
366
+
367
+ sync_account.func.concurrency_key = "account:{account_id}"
368
+ sync_account.func.concurrency_limit = 1
369
+ sync_account.func.concurrency_duration = 60
370
+ sync_account.func.on_conflict = "block"
371
+ ```
372
+
373
+ With this configuration:
374
+
375
+ - the first matching job can run immediately
376
+ - later jobs for the same key can block until capacity is released
377
+ - `on_conflict = "discard"` turns the same pattern into singleton-style work
378
+
379
+ ## Queue Operations
380
+
381
+ `QueueInfo` exposes operational queue controls without bypassing the queue
382
+ tables:
383
+
384
+ ```python
385
+ from dj_queue.api import QueueInfo
386
+
387
+ orders = QueueInfo("orders")
388
+
389
+ print(orders.size)
390
+ print(orders.latency)
391
+ print(orders.paused)
392
+
393
+ orders.pause()
394
+ orders.resume()
395
+ orders.clear()
396
+ ```
397
+
398
+ Operational commands:
399
+
400
+ ```bash
401
+ python manage.py dj_queue_health
402
+ python manage.py dj_queue_health --max-age 120
403
+ python manage.py dj_queue_prune --older-than 86400
404
+ python manage.py dj_queue_prune --task-path myapp.tasks.cleanup
405
+ ```
406
+
407
+ If Django admin is installed, `dj_queue` also registers the main operational
408
+ models there, including jobs, failed executions, processes, recurring tasks,
409
+ pauses, and semaphores.
410
+
411
+ ## Failed jobs
412
+
413
+ When a task raises, `dj_queue` keeps the job and its failed execution row in the
414
+ queue database, including the exception class, message, and traceback.
415
+
416
+ You can retry or discard failed jobs through Django admin or the operations
417
+ layer:
418
+
419
+ ```python
420
+ from dj_queue.operations.jobs import discard_failed_job, retry_failed_job
421
+
422
+ retry_failed_job(job_id)
423
+ discard_failed_job(job_id)
424
+ ```
425
+
426
+ Failures stay inspectable until you act on them.
427
+
428
+ ## Multi-Database Setup
429
+
430
+ `dj_queue` can keep queue tables on a dedicated database alias.
431
+
432
+ Example configuration:
433
+
434
+ ```python
435
+ DATABASES = {
436
+ "default": {
437
+ "ENGINE": "django.db.backends.postgresql",
438
+ "NAME": "app",
439
+ },
440
+ "queue": {
441
+ "ENGINE": "django.db.backends.postgresql",
442
+ "NAME": "queue",
443
+ },
444
+ }
445
+
446
+ DATABASE_ROUTERS = ["dj_queue.routers.DjQueueRouter"]
447
+
448
+ TASKS = {
449
+ "default": {
450
+ "BACKEND": "dj_queue.backend.DjQueueBackend",
451
+ "QUEUES": [],
452
+ "OPTIONS": {
453
+ "database_alias": "queue",
454
+ },
455
+ },
456
+ }
457
+ ```
458
+
459
+ Run your normal application migrations on `default`, then migrate `dj_queue`
460
+ onto the queue database:
461
+
462
+ ```bash
463
+ python manage.py migrate
464
+ python manage.py migrate dj_queue --database queue
465
+ ```
466
+
467
+ With this setup, `dj_queue`'s ORM queries and raw SQL helpers stay on the queue
468
+ database.
469
+
470
+ ## Embedded Server Mode
471
+
472
+ `dj_queue` can run inside an existing server process via embedded async
473
+ supervision.
474
+
475
+ ### ASGI
476
+
477
+ Wrap your ASGI application with `DjQueueLifespan`:
478
+
479
+ ```python
480
+ from django.core.asgi import get_asgi_application
481
+
482
+ from dj_queue.contrib.asgi import DjQueueLifespan
483
+
484
+ django_application = get_asgi_application()
485
+ application = DjQueueLifespan(django_application)
486
+ ```
487
+
488
+ ### Gunicorn
489
+
490
+ Import the provided hooks in your Gunicorn config:
491
+
492
+ ```python
493
+ # gunicorn.conf.py
494
+
495
+ from dj_queue.contrib.gunicorn import post_fork, worker_exit
496
+ ```
497
+
498
+ Both embedded integrations use `AsyncSupervisor(standalone=False)` and leave
499
+ signal handling to the host server.
500
+
501
+ ## Configuration
502
+
503
+ The main configuration lives in `TASKS[backend_alias]["OPTIONS"]`.
504
+
505
+ Start with these options:
506
+
507
+ - `mode`: `"fork"` or `"async"`
508
+ - `workers`: queue selectors, thread counts, and process counts
509
+ - `dispatchers`: scheduled promotion and concurrency maintenance settings
510
+ - `scheduler`: dynamic recurring polling settings
511
+ - `database_alias`: database alias for queue tables and runtime activity
512
+ - `preserve_finished_jobs` and `clear_finished_jobs_after`: result retention and
513
+ cleanup
514
+
515
+ Additional operational tuning is available when needed, including
516
+ `use_skip_locked`, `listen_notify`, `silence_polling`,
517
+ `process_heartbeat_interval`, `process_alive_threshold`, `shutdown_timeout`, and
518
+ `on_thread_error`.
519
+
520
+ On PostgreSQL, `listen_notify` uses the same Django PostgreSQL driver
521
+ configuration as the main database connection. Install a compatible driver in
522
+ your project, or use `dj-queue[postgres]` to pull in `psycopg`.
523
+
524
+ Configuration precedence is explicit:
525
+
526
+ - CLI overrides
527
+ - environment variables
528
+ - YAML file pointed to by `DJ_QUEUE_CONFIG`
529
+ - Django `TASKS` settings
530
+
531
+ ### YAML file config
532
+
533
+ You can point `dj_queue` at a YAML file with either `--config` or
534
+ `DJ_QUEUE_CONFIG`:
535
+
536
+ ```bash
537
+ python manage.py dj_queue --config /etc/dj_queue.yml
538
+ DJ_QUEUE_CONFIG=/etc/dj_queue.yml python manage.py dj_queue
539
+ ```
540
+
541
+ The YAML file should contain a single mapping of backend option values. It uses
542
+ the same shape as `TASKS[backend_alias]["OPTIONS"]`, not the full Django
543
+ `TASKS` structure:
544
+
545
+ ```yaml
546
+ mode: async
547
+ database_alias: queue
548
+ preserve_finished_jobs: true
549
+ clear_finished_jobs_after: 86400
550
+ listen_notify: true
551
+ silence_polling: true
552
+
553
+ workers:
554
+ - queues: ["default", "email*"]
555
+ threads: 8
556
+ processes: 1
557
+ polling_interval: 0.1
558
+
559
+ dispatchers:
560
+ - batch_size: 500
561
+ polling_interval: 1
562
+ concurrency_maintenance: true
563
+ concurrency_maintenance_interval: 600
564
+
565
+ scheduler:
566
+ dynamic_tasks_enabled: true
567
+ polling_interval: 5
568
+
569
+ recurring:
570
+ nightly_cleanup:
571
+ task_path: myapp.tasks.cleanup
572
+ schedule: "0 3 * * *"
573
+ queue_name: maintenance
574
+ priority: -5
575
+ description: nightly cleanup
576
+ ```
577
+
578
+ This file is merged on top of `TASKS[backend_alias]["OPTIONS"]`, then any
579
+ environment-variable and CLI overrides win after that.
580
+
581
+ Environment overrides currently supported by `dj_queue` itself:
582
+
583
+ - `DJ_QUEUE_CONFIG`
584
+ - `DJ_QUEUE_MODE`
585
+ - `DJ_QUEUE_SKIP_RECURRING`
586
+
587
+ ## Database Support
588
+
589
+ | Backend | Support level | Notes |
590
+ |---|---|---|
591
+ | PostgreSQL | first-class | polling, `SKIP LOCKED`, and optional `LISTEN/NOTIFY` |
592
+ | MySQL 8+ | supported | polling plus `SKIP LOCKED` |
593
+ | MariaDB 10.6+ | supported | polling plus `SKIP LOCKED` |
594
+ | SQLite | supported with limits | polling only, serialized writes, no `SKIP LOCKED`, no `LISTEN/NOTIFY`; practical for development, CI, and smaller deployments |
595
+
596
+ Polling is the portability path everywhere. Backend-specific features improve
597
+ latency and throughput but are not correctness requirements.
598
+
599
+ ## Examples
600
+
601
+ The repository ships real runnable examples in `examples/`.
602
+
603
+ Recommended entry points:
604
+
605
+ - `examples/ex01_basic_enqueue.py`
606
+ - `examples/ex07_basic_enqueue_on_commit.py`
607
+ - `examples/ex08_basic_recurring.py`
608
+ - `examples/ex20_advanced_concurrency.py`
609
+ - `examples/ex21_advanced_queue_control.py`
610
+ - `examples/ex24_advanced_multi_db.py`
611
+ - `examples/ex25_advanced_asgi.py`
612
+
613
+ The examples index in `examples/README.md` lists the full progression.