@mantiq/queue 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +19 -0
- package/package.json +72 -0
- package/src/Job.ts +122 -0
- package/src/JobBatch.ts +188 -0
- package/src/JobChain.ts +119 -0
- package/src/JobRegistry.ts +37 -0
- package/src/PendingDispatch.ts +80 -0
- package/src/QueueManager.ts +76 -0
- package/src/QueueServiceProvider.ts +88 -0
- package/src/Worker.ts +282 -0
- package/src/commands/MakeJobCommand.ts +29 -0
- package/src/commands/QueueFailedCommand.ts +50 -0
- package/src/commands/QueueFlushCommand.ts +19 -0
- package/src/commands/QueueRetryCommand.ts +52 -0
- package/src/commands/QueueWorkCommand.ts +49 -0
- package/src/commands/ScheduleRunCommand.ts +58 -0
- package/src/contracts/JobContract.ts +64 -0
- package/src/contracts/QueueDriver.ts +70 -0
- package/src/drivers/KafkaDriver.ts +334 -0
- package/src/drivers/RedisDriver.ts +292 -0
- package/src/drivers/SQLiteDriver.ts +280 -0
- package/src/drivers/SqsDriver.ts +280 -0
- package/src/drivers/SyncDriver.ts +142 -0
- package/src/errors/QueueError.ts +22 -0
- package/src/events/QueueEvents.ts +36 -0
- package/src/helpers/queue.ts +76 -0
- package/src/index.ts +85 -0
- package/src/schedule/Schedule.ts +252 -0
- package/src/testing/QueueFake.ts +209 -0
|
@@ -0,0 +1,292 @@
|
|
|
1
|
+
import type { QueueDriver } from '../contracts/QueueDriver.ts'
|
|
2
|
+
import type {
|
|
3
|
+
QueuedJob,
|
|
4
|
+
FailedJob,
|
|
5
|
+
SerializedPayload,
|
|
6
|
+
BatchRecord,
|
|
7
|
+
} from '../contracts/JobContract.ts'
|
|
8
|
+
|
|
9
|
+
export interface RedisQueueConfig {
|
|
10
|
+
driver: 'redis'
|
|
11
|
+
host?: string | undefined
|
|
12
|
+
port?: number | undefined
|
|
13
|
+
password?: string | undefined
|
|
14
|
+
db?: number | undefined
|
|
15
|
+
url?: string | undefined
|
|
16
|
+
prefix?: string | undefined
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
/**
|
|
20
|
+
* Redis-backed queue driver using ioredis.
|
|
21
|
+
*
|
|
22
|
+
* Uses Redis lists for job queues, sorted sets for delayed jobs,
|
|
23
|
+
* and hashes for batch/failed job tracking.
|
|
24
|
+
*
|
|
25
|
+
* Requires `ioredis` as a peer dependency:
|
|
26
|
+
* bun add ioredis
|
|
27
|
+
*
|
|
28
|
+
* Key structure:
|
|
29
|
+
* - `{prefix}:{queue}` — List of ready jobs (JSON payloads)
|
|
30
|
+
* - `{prefix}:{queue}:delayed` — Sorted set (score = availableAt)
|
|
31
|
+
* - `{prefix}:failed` — List of failed jobs
|
|
32
|
+
* - `{prefix}:batch:{id}` — Hash for batch records
|
|
33
|
+
*/
|
|
34
|
+
export class RedisDriver implements QueueDriver {
|
|
35
|
+
private client: any
|
|
36
|
+
private readonly prefix: string
|
|
37
|
+
private nextFailedId = 1
|
|
38
|
+
|
|
39
|
+
constructor(config: RedisQueueConfig) {
|
|
40
|
+
this.prefix = config.prefix ?? 'mantiq_queue'
|
|
41
|
+
|
|
42
|
+
try {
|
|
43
|
+
const Redis = require('ioredis')
|
|
44
|
+
if (config.url) {
|
|
45
|
+
this.client = new Redis(config.url)
|
|
46
|
+
} else {
|
|
47
|
+
this.client = new Redis({
|
|
48
|
+
host: config.host ?? '127.0.0.1',
|
|
49
|
+
port: config.port ?? 6379,
|
|
50
|
+
password: config.password,
|
|
51
|
+
db: config.db ?? 0,
|
|
52
|
+
})
|
|
53
|
+
}
|
|
54
|
+
} catch {
|
|
55
|
+
throw new Error(
|
|
56
|
+
'ioredis is required for the Redis queue driver. Install it with: bun add ioredis',
|
|
57
|
+
)
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
// ── Core job operations ──────────────────────────────────────────
|
|
62
|
+
|
|
63
|
+
async push(payload: SerializedPayload, queue: string, delay = 0): Promise<string | number> {
|
|
64
|
+
const id = crypto.randomUUID()
|
|
65
|
+
const now = Math.floor(Date.now() / 1000)
|
|
66
|
+
|
|
67
|
+
const job: QueuedJob = {
|
|
68
|
+
id,
|
|
69
|
+
queue,
|
|
70
|
+
payload,
|
|
71
|
+
attempts: 0,
|
|
72
|
+
reservedAt: null,
|
|
73
|
+
availableAt: now + delay,
|
|
74
|
+
createdAt: now,
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
const serialized = JSON.stringify(job)
|
|
78
|
+
|
|
79
|
+
if (delay > 0) {
|
|
80
|
+
// Add to delayed sorted set (score = availableAt)
|
|
81
|
+
await this.client.zadd(this.key(queue, 'delayed'), now + delay, serialized)
|
|
82
|
+
} else {
|
|
83
|
+
// Add to ready list
|
|
84
|
+
await this.client.lpush(this.key(queue), serialized)
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
return id
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
async pop(queue: string): Promise<QueuedJob | null> {
|
|
91
|
+
// First, migrate any delayed jobs that are now available
|
|
92
|
+
await this.migrateDelayed(queue)
|
|
93
|
+
|
|
94
|
+
// Atomically pop from the right side of the list
|
|
95
|
+
const raw = await this.client.rpop(this.key(queue))
|
|
96
|
+
if (!raw) return null
|
|
97
|
+
|
|
98
|
+
const job: QueuedJob = JSON.parse(raw)
|
|
99
|
+
job.reservedAt = Math.floor(Date.now() / 1000)
|
|
100
|
+
job.attempts++
|
|
101
|
+
|
|
102
|
+
// Store in reserved set for tracking
|
|
103
|
+
await this.client.hset(
|
|
104
|
+
this.key(queue, 'reserved'),
|
|
105
|
+
String(job.id),
|
|
106
|
+
JSON.stringify(job),
|
|
107
|
+
)
|
|
108
|
+
|
|
109
|
+
return job
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
async delete(job: QueuedJob): Promise<void> {
|
|
113
|
+
await this.client.hdel(this.key(job.queue, 'reserved'), String(job.id))
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
async release(job: QueuedJob, delay: number): Promise<void> {
|
|
117
|
+
// Remove from reserved
|
|
118
|
+
await this.client.hdel(this.key(job.queue, 'reserved'), String(job.id))
|
|
119
|
+
|
|
120
|
+
// Re-queue
|
|
121
|
+
job.reservedAt = null
|
|
122
|
+
const now = Math.floor(Date.now() / 1000)
|
|
123
|
+
job.availableAt = now + delay
|
|
124
|
+
const serialized = JSON.stringify(job)
|
|
125
|
+
|
|
126
|
+
if (delay > 0) {
|
|
127
|
+
await this.client.zadd(this.key(job.queue, 'delayed'), now + delay, serialized)
|
|
128
|
+
} else {
|
|
129
|
+
await this.client.lpush(this.key(job.queue), serialized)
|
|
130
|
+
}
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
async size(queue: string): Promise<number> {
|
|
134
|
+
const [ready, delayed] = await Promise.all([
|
|
135
|
+
this.client.llen(this.key(queue)),
|
|
136
|
+
this.client.zcard(this.key(queue, 'delayed')),
|
|
137
|
+
])
|
|
138
|
+
return ready + delayed
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
async clear(queue: string): Promise<void> {
|
|
142
|
+
await Promise.all([
|
|
143
|
+
this.client.del(this.key(queue)),
|
|
144
|
+
this.client.del(this.key(queue, 'delayed')),
|
|
145
|
+
this.client.del(this.key(queue, 'reserved')),
|
|
146
|
+
])
|
|
147
|
+
}
|
|
148
|
+
|
|
149
|
+
// ── Failed jobs ──────────────────────────────────────────────────
|
|
150
|
+
|
|
151
|
+
async fail(job: QueuedJob, error: Error): Promise<void> {
|
|
152
|
+
await this.delete(job)
|
|
153
|
+
|
|
154
|
+
const failed: FailedJob = {
|
|
155
|
+
id: this.nextFailedId++,
|
|
156
|
+
queue: job.queue,
|
|
157
|
+
payload: job.payload,
|
|
158
|
+
exception: `${error.name}: ${error.message}\n${error.stack ?? ''}`,
|
|
159
|
+
failedAt: Math.floor(Date.now() / 1000),
|
|
160
|
+
}
|
|
161
|
+
|
|
162
|
+
await this.client.lpush(this.key('_failed'), JSON.stringify(failed))
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
async getFailedJobs(): Promise<FailedJob[]> {
|
|
166
|
+
const items = await this.client.lrange(this.key('_failed'), 0, -1)
|
|
167
|
+
return items.map((raw: string) => JSON.parse(raw))
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
async findFailedJob(id: string | number): Promise<FailedJob | null> {
|
|
171
|
+
const all = await this.getFailedJobs()
|
|
172
|
+
return all.find((j) => j.id === id) ?? null
|
|
173
|
+
}
|
|
174
|
+
|
|
175
|
+
async forgetFailedJob(id: string | number): Promise<boolean> {
|
|
176
|
+
const all = await this.getFailedJobs()
|
|
177
|
+
const idx = all.findIndex((j) => j.id === id)
|
|
178
|
+
if (idx === -1) return false
|
|
179
|
+
|
|
180
|
+
// Remove and rewrite (Redis doesn't support remove-by-value easily for complex objects)
|
|
181
|
+
all.splice(idx, 1)
|
|
182
|
+
await this.client.del(this.key('_failed'))
|
|
183
|
+
if (all.length > 0) {
|
|
184
|
+
await this.client.lpush(this.key('_failed'), ...all.map((j) => JSON.stringify(j)))
|
|
185
|
+
}
|
|
186
|
+
return true
|
|
187
|
+
}
|
|
188
|
+
|
|
189
|
+
async flushFailedJobs(): Promise<void> {
|
|
190
|
+
await this.client.del(this.key('_failed'))
|
|
191
|
+
}
|
|
192
|
+
|
|
193
|
+
// ── Batch support ────────────────────────────────────────────────
|
|
194
|
+
|
|
195
|
+
async createBatch(batch: BatchRecord): Promise<string> {
|
|
196
|
+
await this.client.set(this.key('batch', batch.id), JSON.stringify(batch))
|
|
197
|
+
return batch.id
|
|
198
|
+
}
|
|
199
|
+
|
|
200
|
+
async findBatch(id: string): Promise<BatchRecord | null> {
|
|
201
|
+
const raw = await this.client.get(this.key('batch', id))
|
|
202
|
+
return raw ? JSON.parse(raw) : null
|
|
203
|
+
}
|
|
204
|
+
|
|
205
|
+
async updateBatchProgress(id: string, processed: number, failed: number): Promise<BatchRecord | null> {
|
|
206
|
+
// Use a Lua script for atomic increment
|
|
207
|
+
const script = `
|
|
208
|
+
local key = KEYS[1]
|
|
209
|
+
local raw = redis.call('GET', key)
|
|
210
|
+
if not raw then return nil end
|
|
211
|
+
local batch = cjson.decode(raw)
|
|
212
|
+
batch.processedJobs = batch.processedJobs + tonumber(ARGV[1])
|
|
213
|
+
batch.failedJobs = batch.failedJobs + tonumber(ARGV[2])
|
|
214
|
+
local updated = cjson.encode(batch)
|
|
215
|
+
redis.call('SET', key, updated)
|
|
216
|
+
return updated
|
|
217
|
+
`
|
|
218
|
+
const result = await this.client.eval(script, 1, this.key('batch', id), processed, failed)
|
|
219
|
+
return result ? JSON.parse(result) : null
|
|
220
|
+
}
|
|
221
|
+
|
|
222
|
+
async markBatchFinished(id: string): Promise<void> {
|
|
223
|
+
const batch = await this.findBatch(id)
|
|
224
|
+
if (batch) {
|
|
225
|
+
batch.finishedAt = Math.floor(Date.now() / 1000)
|
|
226
|
+
await this.client.set(this.key('batch', id), JSON.stringify(batch))
|
|
227
|
+
}
|
|
228
|
+
}
|
|
229
|
+
|
|
230
|
+
async cancelBatch(id: string): Promise<void> {
|
|
231
|
+
const batch = await this.findBatch(id)
|
|
232
|
+
if (batch) {
|
|
233
|
+
batch.cancelledAt = Math.floor(Date.now() / 1000)
|
|
234
|
+
await this.client.set(this.key('batch', id), JSON.stringify(batch))
|
|
235
|
+
}
|
|
236
|
+
}
|
|
237
|
+
|
|
238
|
+
async pruneBatches(olderThanSeconds: number): Promise<void> {
|
|
239
|
+
// Scan for batch keys and remove old ones
|
|
240
|
+
const cutoff = Math.floor(Date.now() / 1000) - olderThanSeconds
|
|
241
|
+
const pattern = this.key('batch', '*')
|
|
242
|
+
let cursor = '0'
|
|
243
|
+
|
|
244
|
+
do {
|
|
245
|
+
const [nextCursor, keys] = await this.client.scan(cursor, 'MATCH', pattern, 'COUNT', 100)
|
|
246
|
+
cursor = nextCursor
|
|
247
|
+
for (const key of keys) {
|
|
248
|
+
const raw = await this.client.get(key)
|
|
249
|
+
if (raw) {
|
|
250
|
+
const batch = JSON.parse(raw)
|
|
251
|
+
if (batch.createdAt < cutoff) await this.client.del(key)
|
|
252
|
+
}
|
|
253
|
+
}
|
|
254
|
+
} while (cursor !== '0')
|
|
255
|
+
}
|
|
256
|
+
|
|
257
|
+
// ── Helpers ──────────────────────────────────────────────────────
|
|
258
|
+
|
|
259
|
+
/** Migrate delayed jobs that are now available to the ready list */
|
|
260
|
+
private async migrateDelayed(queue: string): Promise<void> {
|
|
261
|
+
const now = Math.floor(Date.now() / 1000)
|
|
262
|
+
const delayedKey = this.key(queue, 'delayed')
|
|
263
|
+
const readyKey = this.key(queue)
|
|
264
|
+
|
|
265
|
+
// Atomic: pop all jobs with score <= now and push them to the ready list
|
|
266
|
+
const script = `
|
|
267
|
+
local items = redis.call('ZRANGEBYSCORE', KEYS[1], '-inf', ARGV[1])
|
|
268
|
+
if #items > 0 then
|
|
269
|
+
for _, item in ipairs(items) do
|
|
270
|
+
redis.call('LPUSH', KEYS[2], item)
|
|
271
|
+
end
|
|
272
|
+
redis.call('ZREMRANGEBYSCORE', KEYS[1], '-inf', ARGV[1])
|
|
273
|
+
end
|
|
274
|
+
return #items
|
|
275
|
+
`
|
|
276
|
+
await this.client.eval(script, 2, delayedKey, readyKey, now)
|
|
277
|
+
}
|
|
278
|
+
|
|
279
|
+
private key(...parts: string[]): string {
|
|
280
|
+
return [this.prefix, ...parts].join(':')
|
|
281
|
+
}
|
|
282
|
+
|
|
283
|
+
/** Get the underlying ioredis client */
|
|
284
|
+
getClient(): any {
|
|
285
|
+
return this.client
|
|
286
|
+
}
|
|
287
|
+
|
|
288
|
+
/** Disconnect */
|
|
289
|
+
async disconnect(): Promise<void> {
|
|
290
|
+
await this.client.quit()
|
|
291
|
+
}
|
|
292
|
+
}
|
|
@@ -0,0 +1,280 @@
|
|
|
1
|
+
import { Database } from 'bun:sqlite'
|
|
2
|
+
import type { QueueDriver } from '../contracts/QueueDriver.ts'
|
|
3
|
+
import type {
|
|
4
|
+
QueuedJob,
|
|
5
|
+
FailedJob,
|
|
6
|
+
SerializedPayload,
|
|
7
|
+
BatchRecord,
|
|
8
|
+
} from '../contracts/JobContract.ts'
|
|
9
|
+
|
|
10
|
+
/**
|
|
11
|
+
* SQLite-backed queue driver using bun:sqlite.
|
|
12
|
+
* Uses BEGIN IMMEDIATE transactions for atomic pop operations.
|
|
13
|
+
* Auto-creates tables on first use.
|
|
14
|
+
*/
|
|
15
|
+
export class SQLiteDriver implements QueueDriver {
|
|
16
|
+
private db: Database | null = null
|
|
17
|
+
private initialized = false
|
|
18
|
+
|
|
19
|
+
constructor(private readonly dbPath: string) {}
|
|
20
|
+
|
|
21
|
+
private getDb(): Database {
|
|
22
|
+
if (!this.db) {
|
|
23
|
+
this.db = new Database(this.dbPath)
|
|
24
|
+
this.db.exec('PRAGMA journal_mode = WAL')
|
|
25
|
+
this.db.exec('PRAGMA busy_timeout = 5000')
|
|
26
|
+
}
|
|
27
|
+
if (!this.initialized) {
|
|
28
|
+
this.createTables()
|
|
29
|
+
this.initialized = true
|
|
30
|
+
}
|
|
31
|
+
return this.db
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
private createTables(): void {
|
|
35
|
+
const db = this.db!
|
|
36
|
+
|
|
37
|
+
db.exec(`
|
|
38
|
+
CREATE TABLE IF NOT EXISTS queue_jobs (
|
|
39
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
40
|
+
queue TEXT NOT NULL,
|
|
41
|
+
payload TEXT NOT NULL,
|
|
42
|
+
attempts INTEGER NOT NULL DEFAULT 0,
|
|
43
|
+
reserved_at INTEGER,
|
|
44
|
+
available_at INTEGER NOT NULL,
|
|
45
|
+
created_at INTEGER NOT NULL
|
|
46
|
+
)
|
|
47
|
+
`)
|
|
48
|
+
|
|
49
|
+
db.exec(`
|
|
50
|
+
CREATE INDEX IF NOT EXISTS idx_queue_jobs_queue_available
|
|
51
|
+
ON queue_jobs (queue, available_at)
|
|
52
|
+
`)
|
|
53
|
+
|
|
54
|
+
db.exec(`
|
|
55
|
+
CREATE TABLE IF NOT EXISTS queue_failed_jobs (
|
|
56
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
57
|
+
queue TEXT NOT NULL,
|
|
58
|
+
payload TEXT NOT NULL,
|
|
59
|
+
exception TEXT NOT NULL,
|
|
60
|
+
failed_at INTEGER NOT NULL
|
|
61
|
+
)
|
|
62
|
+
`)
|
|
63
|
+
|
|
64
|
+
db.exec(`
|
|
65
|
+
CREATE TABLE IF NOT EXISTS queue_batches (
|
|
66
|
+
id TEXT PRIMARY KEY,
|
|
67
|
+
name TEXT NOT NULL,
|
|
68
|
+
total_jobs INTEGER NOT NULL,
|
|
69
|
+
processed_jobs INTEGER NOT NULL DEFAULT 0,
|
|
70
|
+
failed_jobs INTEGER NOT NULL DEFAULT 0,
|
|
71
|
+
failed_job_ids TEXT NOT NULL DEFAULT '[]',
|
|
72
|
+
options TEXT NOT NULL,
|
|
73
|
+
cancelled_at INTEGER,
|
|
74
|
+
created_at INTEGER NOT NULL,
|
|
75
|
+
finished_at INTEGER
|
|
76
|
+
)
|
|
77
|
+
`)
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
// ── Core job operations ──────────────────────────────────────────
|
|
81
|
+
|
|
82
|
+
async push(payload: SerializedPayload, queue: string, delay = 0): Promise<string | number> {
|
|
83
|
+
const db = this.getDb()
|
|
84
|
+
const now = Math.floor(Date.now() / 1000)
|
|
85
|
+
const stmt = db.prepare(
|
|
86
|
+
'INSERT INTO queue_jobs (queue, payload, attempts, reserved_at, available_at, created_at) VALUES (?, ?, 0, NULL, ?, ?)',
|
|
87
|
+
)
|
|
88
|
+
const result = stmt.run(queue, JSON.stringify(payload), now + delay, now)
|
|
89
|
+
return Number(result.lastInsertRowid)
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
async pop(queue: string): Promise<QueuedJob | null> {
|
|
93
|
+
const db = this.getDb()
|
|
94
|
+
const now = Math.floor(Date.now() / 1000)
|
|
95
|
+
|
|
96
|
+
// Use BEGIN IMMEDIATE for atomic claim
|
|
97
|
+
const row = db.transaction(() => {
|
|
98
|
+
const r = db.prepare(
|
|
99
|
+
'SELECT * FROM queue_jobs WHERE queue = ? AND reserved_at IS NULL AND available_at <= ? ORDER BY id ASC LIMIT 1',
|
|
100
|
+
).get(queue, now) as any
|
|
101
|
+
|
|
102
|
+
if (!r) return null
|
|
103
|
+
|
|
104
|
+
db.prepare(
|
|
105
|
+
'UPDATE queue_jobs SET reserved_at = ?, attempts = attempts + 1 WHERE id = ?',
|
|
106
|
+
).run(now, r.id)
|
|
107
|
+
|
|
108
|
+
return { ...r, reserved_at: now, attempts: r.attempts + 1 }
|
|
109
|
+
}).immediate()
|
|
110
|
+
|
|
111
|
+
if (!row) return null
|
|
112
|
+
|
|
113
|
+
return {
|
|
114
|
+
id: row.id,
|
|
115
|
+
queue: row.queue,
|
|
116
|
+
payload: JSON.parse(row.payload),
|
|
117
|
+
attempts: row.attempts,
|
|
118
|
+
reservedAt: row.reserved_at,
|
|
119
|
+
availableAt: row.available_at,
|
|
120
|
+
createdAt: row.created_at,
|
|
121
|
+
}
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
async delete(job: QueuedJob): Promise<void> {
|
|
125
|
+
this.getDb().prepare('DELETE FROM queue_jobs WHERE id = ?').run(job.id)
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
async release(job: QueuedJob, delay: number): Promise<void> {
|
|
129
|
+
const now = Math.floor(Date.now() / 1000)
|
|
130
|
+
this.getDb().prepare(
|
|
131
|
+
'UPDATE queue_jobs SET reserved_at = NULL, available_at = ? WHERE id = ?',
|
|
132
|
+
).run(now + delay, job.id)
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
async size(queue: string): Promise<number> {
|
|
136
|
+
const row = this.getDb().prepare(
|
|
137
|
+
'SELECT COUNT(*) as count FROM queue_jobs WHERE queue = ?',
|
|
138
|
+
).get(queue) as any
|
|
139
|
+
return row?.count ?? 0
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
async clear(queue: string): Promise<void> {
|
|
143
|
+
this.getDb().prepare('DELETE FROM queue_jobs WHERE queue = ?').run(queue)
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
// ── Failed jobs ──────────────────────────────────────────────────
|
|
147
|
+
|
|
148
|
+
async fail(job: QueuedJob, error: Error): Promise<void> {
|
|
149
|
+
const db = this.getDb()
|
|
150
|
+
const now = Math.floor(Date.now() / 1000)
|
|
151
|
+
const exception = `${error.name}: ${error.message}\n${error.stack ?? ''}`
|
|
152
|
+
|
|
153
|
+
db.transaction(() => {
|
|
154
|
+
db.prepare('DELETE FROM queue_jobs WHERE id = ?').run(job.id)
|
|
155
|
+
db.prepare(
|
|
156
|
+
'INSERT INTO queue_failed_jobs (queue, payload, exception, failed_at) VALUES (?, ?, ?, ?)',
|
|
157
|
+
).run(job.queue, JSON.stringify(job.payload), exception, now)
|
|
158
|
+
})()
|
|
159
|
+
}
|
|
160
|
+
|
|
161
|
+
async getFailedJobs(): Promise<FailedJob[]> {
|
|
162
|
+
const rows = this.getDb().prepare(
|
|
163
|
+
'SELECT * FROM queue_failed_jobs ORDER BY failed_at DESC',
|
|
164
|
+
).all() as any[]
|
|
165
|
+
return rows.map(this.mapFailedJob)
|
|
166
|
+
}
|
|
167
|
+
|
|
168
|
+
async findFailedJob(id: string | number): Promise<FailedJob | null> {
|
|
169
|
+
const row = this.getDb().prepare(
|
|
170
|
+
'SELECT * FROM queue_failed_jobs WHERE id = ?',
|
|
171
|
+
).get(id) as any
|
|
172
|
+
return row ? this.mapFailedJob(row) : null
|
|
173
|
+
}
|
|
174
|
+
|
|
175
|
+
async forgetFailedJob(id: string | number): Promise<boolean> {
|
|
176
|
+
const result = this.getDb().prepare(
|
|
177
|
+
'DELETE FROM queue_failed_jobs WHERE id = ?',
|
|
178
|
+
).run(id)
|
|
179
|
+
return result.changes > 0
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
async flushFailedJobs(): Promise<void> {
|
|
183
|
+
this.getDb().prepare('DELETE FROM queue_failed_jobs').run()
|
|
184
|
+
}
|
|
185
|
+
|
|
186
|
+
// ── Batch support ────────────────────────────────────────────────
|
|
187
|
+
|
|
188
|
+
async createBatch(batch: BatchRecord): Promise<string> {
|
|
189
|
+
this.getDb().prepare(`
|
|
190
|
+
INSERT INTO queue_batches (id, name, total_jobs, processed_jobs, failed_jobs, failed_job_ids, options, cancelled_at, created_at, finished_at)
|
|
191
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
192
|
+
`).run(
|
|
193
|
+
batch.id,
|
|
194
|
+
batch.name,
|
|
195
|
+
batch.totalJobs,
|
|
196
|
+
batch.processedJobs,
|
|
197
|
+
batch.failedJobs,
|
|
198
|
+
JSON.stringify(batch.failedJobIds),
|
|
199
|
+
JSON.stringify(batch.options),
|
|
200
|
+
batch.cancelledAt,
|
|
201
|
+
batch.createdAt,
|
|
202
|
+
batch.finishedAt,
|
|
203
|
+
)
|
|
204
|
+
return batch.id
|
|
205
|
+
}
|
|
206
|
+
|
|
207
|
+
async findBatch(id: string): Promise<BatchRecord | null> {
|
|
208
|
+
const row = this.getDb().prepare(
|
|
209
|
+
'SELECT * FROM queue_batches WHERE id = ?',
|
|
210
|
+
).get(id) as any
|
|
211
|
+
return row ? this.mapBatch(row) : null
|
|
212
|
+
}
|
|
213
|
+
|
|
214
|
+
async updateBatchProgress(id: string, processed: number, failed: number): Promise<BatchRecord | null> {
|
|
215
|
+
const db = this.getDb()
|
|
216
|
+
const row = db.transaction(() => {
|
|
217
|
+
db.prepare(
|
|
218
|
+
'UPDATE queue_batches SET processed_jobs = processed_jobs + ?, failed_jobs = failed_jobs + ? WHERE id = ?',
|
|
219
|
+
).run(processed, failed, id)
|
|
220
|
+
return db.prepare('SELECT * FROM queue_batches WHERE id = ?').get(id) as any
|
|
221
|
+
}).immediate()
|
|
222
|
+
|
|
223
|
+
return row ? this.mapBatch(row) : null
|
|
224
|
+
}
|
|
225
|
+
|
|
226
|
+
async markBatchFinished(id: string): Promise<void> {
|
|
227
|
+
const now = Math.floor(Date.now() / 1000)
|
|
228
|
+
this.getDb().prepare(
|
|
229
|
+
'UPDATE queue_batches SET finished_at = ? WHERE id = ?',
|
|
230
|
+
).run(now, id)
|
|
231
|
+
}
|
|
232
|
+
|
|
233
|
+
async cancelBatch(id: string): Promise<void> {
|
|
234
|
+
const now = Math.floor(Date.now() / 1000)
|
|
235
|
+
this.getDb().prepare(
|
|
236
|
+
'UPDATE queue_batches SET cancelled_at = ? WHERE id = ?',
|
|
237
|
+
).run(now, id)
|
|
238
|
+
}
|
|
239
|
+
|
|
240
|
+
async pruneBatches(olderThanSeconds: number): Promise<void> {
|
|
241
|
+
const cutoff = Math.floor(Date.now() / 1000) - olderThanSeconds
|
|
242
|
+
this.getDb().prepare(
|
|
243
|
+
'DELETE FROM queue_batches WHERE created_at < ?',
|
|
244
|
+
).run(cutoff)
|
|
245
|
+
}
|
|
246
|
+
|
|
247
|
+
// ── Helpers ──────────────────────────────────────────────────────
|
|
248
|
+
|
|
249
|
+
private mapFailedJob(row: any): FailedJob {
|
|
250
|
+
return {
|
|
251
|
+
id: row.id,
|
|
252
|
+
queue: row.queue,
|
|
253
|
+
payload: JSON.parse(row.payload),
|
|
254
|
+
exception: row.exception,
|
|
255
|
+
failedAt: row.failed_at,
|
|
256
|
+
}
|
|
257
|
+
}
|
|
258
|
+
|
|
259
|
+
private mapBatch(row: any): BatchRecord {
|
|
260
|
+
return {
|
|
261
|
+
id: row.id,
|
|
262
|
+
name: row.name,
|
|
263
|
+
totalJobs: row.total_jobs,
|
|
264
|
+
processedJobs: row.processed_jobs,
|
|
265
|
+
failedJobs: row.failed_jobs,
|
|
266
|
+
failedJobIds: JSON.parse(row.failed_job_ids),
|
|
267
|
+
options: JSON.parse(row.options),
|
|
268
|
+
cancelledAt: row.cancelled_at,
|
|
269
|
+
createdAt: row.created_at,
|
|
270
|
+
finishedAt: row.finished_at,
|
|
271
|
+
}
|
|
272
|
+
}
|
|
273
|
+
|
|
274
|
+
/** Close the database connection */
|
|
275
|
+
close(): void {
|
|
276
|
+
this.db?.close()
|
|
277
|
+
this.db = null
|
|
278
|
+
this.initialized = false
|
|
279
|
+
}
|
|
280
|
+
}
|