pglite-queue 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs ADDED
@@ -0,0 +1,789 @@
1
+ "use strict";
2
+ var __defProp = Object.defineProperty;
3
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
4
+ var __getOwnPropNames = Object.getOwnPropertyNames;
5
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
6
+ var __export = (target, all) => {
7
+ for (var name in all)
8
+ __defProp(target, name, { get: all[name], enumerable: true });
9
+ };
10
+ var __copyProps = (to, from, except, desc) => {
11
+ if (from && typeof from === "object" || typeof from === "function") {
12
+ for (let key of __getOwnPropNames(from))
13
+ if (!__hasOwnProp.call(to, key) && key !== except)
14
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
15
+ }
16
+ return to;
17
+ };
18
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
19
+
20
+ // src/index.ts
21
+ var index_exports = {};
22
+ __export(index_exports, {
23
+ Queue: () => Queue,
24
+ calculateBackoff: () => calculateBackoff,
25
+ parseCron: () => parseCron,
26
+ parseDelay: () => parseDelay
27
+ });
28
+ module.exports = __toCommonJS(index_exports);
29
+
30
+ // src/queue.ts
31
+ var import_pglite = require("@electric-sql/pglite");
32
+
33
+ // src/types.ts
34
+ function rowToJob(row) {
35
+ return {
36
+ id: row.id,
37
+ task: row.task,
38
+ data: row.data,
39
+ status: row.status,
40
+ priority: row.priority,
41
+ runAt: new Date(row.run_at),
42
+ startedAt: row.started_at ? new Date(row.started_at) : null,
43
+ completedAt: row.completed_at ? new Date(row.completed_at) : null,
44
+ failedAt: row.failed_at ? new Date(row.failed_at) : null,
45
+ attempts: row.attempts,
46
+ maxAttempts: row.max_attempts,
47
+ lastError: row.last_error,
48
+ progress: row.progress,
49
+ result: row.result,
50
+ cronExpr: row.cron_expr,
51
+ createdAt: new Date(row.created_at),
52
+ updatedAt: new Date(row.updated_at)
53
+ };
54
+ }
55
+
56
+ // src/events.ts
57
+ var import_node_events = require("events");
58
+ var TypedEmitter = class {
59
+ emitter = new import_node_events.EventEmitter();
60
+ on(event, listener) {
61
+ this.emitter.on(event, listener);
62
+ return this;
63
+ }
64
+ off(event, listener) {
65
+ this.emitter.off(event, listener);
66
+ return this;
67
+ }
68
+ once(event, listener) {
69
+ this.emitter.once(event, listener);
70
+ return this;
71
+ }
72
+ emit(event, ...args) {
73
+ return this.emitter.emit(event, ...args);
74
+ }
75
+ removeAllListeners(event) {
76
+ this.emitter.removeAllListeners(event);
77
+ return this;
78
+ }
79
+ };
80
+
81
+ // src/backoff.ts
82
+ function calculateBackoff(attempt, options) {
83
+ const type = options?.type ?? "exponential";
84
+ const baseDelay = options?.baseDelay ?? 1e3;
85
+ const maxDelay = options?.maxDelay ?? 3e5;
86
+ const factor = options?.factor ?? 2;
87
+ if (type === "fixed") {
88
+ return baseDelay;
89
+ }
90
+ const exponential = baseDelay * Math.pow(factor, attempt - 1);
91
+ const jitter = exponential * Math.random() * 0.25;
92
+ return Math.min(Math.round(exponential + jitter), maxDelay);
93
+ }
94
+
95
+ // src/cron.ts
96
+ var FIELDS = [
97
+ { min: 0, max: 59 },
98
+ // minute
99
+ { min: 0, max: 23 },
100
+ // hour
101
+ { min: 1, max: 31 },
102
+ // day of month
103
+ { min: 1, max: 12 },
104
+ // month
105
+ { min: 0, max: 6 }
106
+ // day of week
107
+ ];
108
+ function parseField(field, def) {
109
+ const values = /* @__PURE__ */ new Set();
110
+ for (const part of field.split(",")) {
111
+ const trimmed = part.trim();
112
+ if (trimmed === "*") {
113
+ for (let i = def.min; i <= def.max; i++) values.add(i);
114
+ continue;
115
+ }
116
+ const stepMatch = trimmed.match(/^(?:(\d+)-(\d+)|\*)\/(\d+)$/);
117
+ if (stepMatch) {
118
+ const start = stepMatch[1] !== void 0 ? parseInt(stepMatch[1], 10) : def.min;
119
+ const end = stepMatch[2] !== void 0 ? parseInt(stepMatch[2], 10) : def.max;
120
+ const step = parseInt(stepMatch[3] || "1", 10);
121
+ if (step === 0) throw new Error(`Invalid cron step: "${trimmed}"`);
122
+ for (let i = start; i <= end; i += step) values.add(i);
123
+ continue;
124
+ }
125
+ const rangeStepMatch = trimmed.match(/^(\d+)-(\d+)\/(\d+)$/);
126
+ if (rangeStepMatch) {
127
+ const start = parseInt(rangeStepMatch[1], 10);
128
+ const end = parseInt(rangeStepMatch[2], 10);
129
+ const step = parseInt(rangeStepMatch[3], 10);
130
+ if (step === 0) throw new Error(`Invalid cron step: "${trimmed}"`);
131
+ for (let i = start; i <= end; i += step) values.add(i);
132
+ continue;
133
+ }
134
+ const rangeMatch = trimmed.match(/^(\d+)-(\d+)$/);
135
+ if (rangeMatch) {
136
+ const start = parseInt(rangeMatch[1], 10);
137
+ const end = parseInt(rangeMatch[2], 10);
138
+ for (let i = start; i <= end; i++) values.add(i);
139
+ continue;
140
+ }
141
+ const num = parseInt(trimmed, 10);
142
+ if (isNaN(num) || num < def.min || num > def.max) {
143
+ throw new Error(`Invalid cron value "${trimmed}" for range ${def.min}-${def.max}`);
144
+ }
145
+ values.add(num);
146
+ }
147
+ return values;
148
+ }
149
+ function parseCron(expr) {
150
+ const parts = expr.trim().split(/\s+/);
151
+ if (parts.length !== 5) {
152
+ throw new Error(`Invalid cron expression: "${expr}". Expected 5 fields (minute hour dom month dow).`);
153
+ }
154
+ const minutes = parseField(parts[0], FIELDS[0]);
155
+ const hours = parseField(parts[1], FIELDS[1]);
156
+ const doms = parseField(parts[2], FIELDS[2]);
157
+ const months = parseField(parts[3], FIELDS[3]);
158
+ const dows = parseField(parts[4], FIELDS[4]);
159
+ function matches(date) {
160
+ return minutes.has(date.getUTCMinutes()) && hours.has(date.getUTCHours()) && doms.has(date.getUTCDate()) && months.has(date.getUTCMonth() + 1) && dows.has(date.getUTCDay());
161
+ }
162
+ function nextRun(from) {
163
+ const d = from ? new Date(from.getTime()) : /* @__PURE__ */ new Date();
164
+ d.setUTCSeconds(0, 0);
165
+ d.setUTCMinutes(d.getUTCMinutes() + 1);
166
+ const limit = d.getTime() + 4 * 365 * 24 * 60 * 60 * 1e3;
167
+ while (d.getTime() < limit) {
168
+ if (!months.has(d.getUTCMonth() + 1)) {
169
+ d.setUTCMonth(d.getUTCMonth() + 1, 1);
170
+ d.setUTCHours(0, 0, 0, 0);
171
+ continue;
172
+ }
173
+ if (!doms.has(d.getUTCDate()) || !dows.has(d.getUTCDay())) {
174
+ d.setUTCDate(d.getUTCDate() + 1);
175
+ d.setUTCHours(0, 0, 0, 0);
176
+ continue;
177
+ }
178
+ if (!hours.has(d.getUTCHours())) {
179
+ d.setUTCHours(d.getUTCHours() + 1, 0, 0, 0);
180
+ continue;
181
+ }
182
+ if (!minutes.has(d.getUTCMinutes())) {
183
+ d.setUTCMinutes(d.getUTCMinutes() + 1, 0, 0);
184
+ continue;
185
+ }
186
+ return d;
187
+ }
188
+ throw new Error(`No matching cron time found within 4 years for expression: "${expr}"`);
189
+ }
190
+ return { nextRun, matches };
191
+ }
192
+
193
+ // src/worker.ts
194
+ var FETCH_SQL = `
195
+ UPDATE pglite_queue_jobs
196
+ SET status = 'active', started_at = NOW(), attempts = attempts + 1
197
+ WHERE id = (
198
+ SELECT id FROM pglite_queue_jobs
199
+ WHERE status IN ('pending', 'delayed')
200
+ AND run_at <= NOW()
201
+ ORDER BY priority ASC, run_at ASC, id ASC
202
+ LIMIT 1
203
+ FOR UPDATE SKIP LOCKED
204
+ )
205
+ RETURNING *;
206
+ `;
207
+ var COMPLETE_SQL = `
208
+ UPDATE pglite_queue_jobs
209
+ SET status = 'completed', completed_at = NOW(), result = $2, progress = 100
210
+ WHERE id = $1
211
+ RETURNING *;
212
+ `;
213
+ var FAIL_SQL = `
214
+ UPDATE pglite_queue_jobs
215
+ SET status = 'failed', failed_at = NOW(), last_error = $2
216
+ WHERE id = $1
217
+ RETURNING *;
218
+ `;
219
+ var RETRY_SQL = `
220
+ UPDATE pglite_queue_jobs
221
+ SET status = 'delayed', failed_at = NOW(), last_error = $2, run_at = NOW() + make_interval(secs => $3::double precision)
222
+ WHERE id = $1
223
+ RETURNING *;
224
+ `;
225
+ var PROGRESS_SQL = `
226
+ UPDATE pglite_queue_jobs SET progress = $2 WHERE id = $1;
227
+ `;
228
+ var INSERT_CRON_NEXT_SQL = `
229
+ INSERT INTO pglite_queue_jobs (task, data, status, priority, run_at, max_attempts, cron_expr, cron_name)
230
+ VALUES ($1, $2, 'delayed', $3, $4, $5, $6, $7)
231
+ ON CONFLICT (cron_name) WHERE cron_name IS NOT NULL AND status NOT IN ('completed', 'failed')
232
+ DO UPDATE SET run_at = EXCLUDED.run_at, data = EXCLUDED.data;
233
+ `;
234
+ var Worker = class {
235
+ db;
236
+ handlers;
237
+ emitter;
238
+ concurrency;
239
+ activeCount = 0;
240
+ stopping = false;
241
+ pollInProgress = false;
242
+ pollQueued = false;
243
+ drainResolve = null;
244
+ constructor(db, options) {
245
+ this.db = db;
246
+ this.handlers = options.handlers;
247
+ this.emitter = options.emitter;
248
+ this.concurrency = options.concurrency;
249
+ }
250
+ async poll() {
251
+ if (this.stopping) return;
252
+ if (this.pollInProgress) {
253
+ this.pollQueued = true;
254
+ return;
255
+ }
256
+ this.pollInProgress = true;
257
+ try {
258
+ await this.doPoll();
259
+ } catch {
260
+ } finally {
261
+ this.pollInProgress = false;
262
+ if (this.pollQueued && !this.stopping) {
263
+ this.pollQueued = false;
264
+ void this.poll();
265
+ }
266
+ }
267
+ }
268
+ async doPoll() {
269
+ while (this.activeCount < this.concurrency && !this.stopping) {
270
+ const row = await this.fetchOne();
271
+ if (!row) break;
272
+ const entry = this.handlers.get(row.task);
273
+ if (!entry) {
274
+ await this.db.query(
275
+ `UPDATE pglite_queue_jobs SET status = 'pending', started_at = NULL, attempts = attempts - 1 WHERE id = $1`,
276
+ [row.id]
277
+ );
278
+ continue;
279
+ }
280
+ this.activeCount++;
281
+ const job = rowToJob(row);
282
+ this.emitter.emit("active", job);
283
+ void this.processJob(row, entry).finally(() => {
284
+ this.activeCount--;
285
+ if (this.activeCount === 0 && this.drainResolve) {
286
+ this.drainResolve();
287
+ this.drainResolve = null;
288
+ }
289
+ if (!this.stopping) {
290
+ void this.poll();
291
+ }
292
+ });
293
+ }
294
+ if (this.activeCount === 0) {
295
+ this.emitter.emit("drained");
296
+ }
297
+ }
298
+ async fetchOne() {
299
+ const result = await this.db.query(FETCH_SQL);
300
+ return result.rows[0] ?? null;
301
+ }
302
+ async processJob(row, entry) {
303
+ const context = {
304
+ id: row.id,
305
+ task: row.task,
306
+ data: row.data,
307
+ attempts: row.attempts,
308
+ maxAttempts: row.max_attempts,
309
+ progress: async (pct) => {
310
+ if (this.stopping) return;
311
+ const clamped = Math.max(0, Math.min(100, Math.round(pct)));
312
+ try {
313
+ await this.db.query(PROGRESS_SQL, [row.id, clamped]);
314
+ this.emitter.emit("progress", rowToJob({ ...row, progress: clamped }), clamped);
315
+ } catch {
316
+ }
317
+ }
318
+ };
319
+ try {
320
+ const result = await entry.handler(context);
321
+ if (this.stopping) return;
322
+ try {
323
+ const completedResult = await this.db.query(COMPLETE_SQL, [
324
+ row.id,
325
+ result !== void 0 ? JSON.stringify(result) : null
326
+ ]);
327
+ const completedJob = rowToJob(completedResult.rows[0]);
328
+ this.emitter.emit("completed", completedJob);
329
+ if (row.cron_expr) {
330
+ await this.scheduleNextCron(row);
331
+ }
332
+ } catch {
333
+ }
334
+ } catch (err) {
335
+ const error = err instanceof Error ? err : new Error(String(err));
336
+ if (this.stopping) return;
337
+ try {
338
+ if (row.attempts < row.max_attempts) {
339
+ const backoffMs = calculateBackoff(row.attempts);
340
+ const backoffSecs = backoffMs / 1e3;
341
+ const retryResult = await this.db.query(RETRY_SQL, [row.id, error.message, backoffSecs]);
342
+ const retryJob = rowToJob(retryResult.rows[0]);
343
+ this.emitter.emit("retrying", retryJob, row.attempts);
344
+ } else {
345
+ const failResult = await this.db.query(FAIL_SQL, [row.id, error.message]);
346
+ const failedJob = rowToJob(failResult.rows[0]);
347
+ this.emitter.emit("failed", failedJob, error);
348
+ }
349
+ if (row.cron_expr) {
350
+ await this.scheduleNextCron(row);
351
+ }
352
+ } catch {
353
+ }
354
+ }
355
+ }
356
+ async scheduleNextCron(row) {
357
+ try {
358
+ const cron = parseCron(row.cron_expr);
359
+ const nextRun = cron.nextRun();
360
+ await this.db.query(INSERT_CRON_NEXT_SQL, [
361
+ row.task,
362
+ JSON.stringify(row.data),
363
+ row.priority,
364
+ nextRun.toISOString(),
365
+ row.max_attempts,
366
+ row.cron_expr,
367
+ row.cron_name
368
+ ]);
369
+ } catch (err) {
370
+ const error = err instanceof Error ? err : new Error(String(err));
371
+ this.emitter.emit("error", error);
372
+ }
373
+ }
374
+ waitForDrain() {
375
+ if (this.activeCount === 0) return Promise.resolve();
376
+ return new Promise((resolve) => {
377
+ this.drainResolve = resolve;
378
+ });
379
+ }
380
+ stop() {
381
+ this.stopping = true;
382
+ }
383
+ get active() {
384
+ return this.activeCount;
385
+ }
386
+ get isStopping() {
387
+ return this.stopping;
388
+ }
389
+ };
390
+
391
+ // src/schema.ts
392
+ var MIGRATION_SQL = `
393
+ CREATE TABLE IF NOT EXISTS pglite_queue_jobs (
394
+ id BIGSERIAL PRIMARY KEY,
395
+ task TEXT NOT NULL,
396
+ data JSONB NOT NULL DEFAULT '{}'::jsonb,
397
+ status TEXT NOT NULL DEFAULT 'pending',
398
+ priority INTEGER NOT NULL DEFAULT 0,
399
+ run_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
400
+ started_at TIMESTAMPTZ,
401
+ completed_at TIMESTAMPTZ,
402
+ failed_at TIMESTAMPTZ,
403
+ attempts INTEGER NOT NULL DEFAULT 0,
404
+ max_attempts INTEGER NOT NULL DEFAULT 1,
405
+ last_error TEXT,
406
+ progress INTEGER NOT NULL DEFAULT 0 CHECK (progress >= 0 AND progress <= 100),
407
+ result JSONB,
408
+ cron_expr TEXT,
409
+ cron_name TEXT,
410
+ created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
411
+ updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
412
+ );
413
+
414
+ CREATE INDEX IF NOT EXISTS idx_pglite_queue_pickup
415
+ ON pglite_queue_jobs (priority ASC, run_at ASC, id ASC)
416
+ WHERE status IN ('pending', 'delayed');
417
+
418
+ CREATE INDEX IF NOT EXISTS idx_pglite_queue_task
419
+ ON pglite_queue_jobs (task);
420
+
421
+ CREATE INDEX IF NOT EXISTS idx_pglite_queue_status
422
+ ON pglite_queue_jobs (status);
423
+
424
+ DO $$ BEGIN
425
+ IF NOT EXISTS (
426
+ SELECT 1 FROM pg_indexes
427
+ WHERE indexname = 'idx_pglite_queue_cron_name'
428
+ ) THEN
429
+ CREATE UNIQUE INDEX idx_pglite_queue_cron_name
430
+ ON pglite_queue_jobs (cron_name)
431
+ WHERE cron_name IS NOT NULL AND status NOT IN ('completed', 'failed');
432
+ END IF;
433
+ END $$;
434
+
435
+ CREATE OR REPLACE FUNCTION pglite_queue_update_timestamp()
436
+ RETURNS TRIGGER AS $$
437
+ BEGIN
438
+ NEW.updated_at = NOW();
439
+ RETURN NEW;
440
+ END;
441
+ $$ LANGUAGE plpgsql;
442
+
443
+ DO $$ BEGIN
444
+ IF NOT EXISTS (
445
+ SELECT 1 FROM pg_trigger WHERE tgname = 'trg_pglite_queue_updated_at'
446
+ ) THEN
447
+ CREATE TRIGGER trg_pglite_queue_updated_at
448
+ BEFORE UPDATE ON pglite_queue_jobs
449
+ FOR EACH ROW
450
+ EXECUTE FUNCTION pglite_queue_update_timestamp();
451
+ END IF;
452
+ END $$;
453
+
454
+ CREATE OR REPLACE FUNCTION pglite_queue_notify_new_job()
455
+ RETURNS TRIGGER AS $$
456
+ BEGIN
457
+ PERFORM pg_notify('pglite_queue_new_job', NEW.id::text);
458
+ RETURN NEW;
459
+ END;
460
+ $$ LANGUAGE plpgsql;
461
+
462
+ DO $$ BEGIN
463
+ IF NOT EXISTS (
464
+ SELECT 1 FROM pg_trigger WHERE tgname = 'trg_pglite_queue_notify'
465
+ ) THEN
466
+ CREATE TRIGGER trg_pglite_queue_notify
467
+ AFTER INSERT ON pglite_queue_jobs
468
+ FOR EACH ROW
469
+ EXECUTE FUNCTION pglite_queue_notify_new_job();
470
+ END IF;
471
+ END $$;
472
+ `;
473
+ var RECOVERY_SQL = `
474
+ UPDATE pglite_queue_jobs
475
+ SET
476
+ status = CASE
477
+ WHEN attempts >= max_attempts THEN 'failed'
478
+ ELSE 'pending'
479
+ END,
480
+ started_at = NULL,
481
+ last_error = COALESCE(last_error, 'Process crashed during execution')
482
+ WHERE status = 'active';
483
+ `;
484
+ async function runMigrations(db) {
485
+ await db.exec(MIGRATION_SQL);
486
+ }
487
+ async function recoverStalledJobs(db) {
488
+ const result = await db.query(RECOVERY_SQL);
489
+ return result.affectedRows ?? 0;
490
+ }
491
+
492
+ // src/delay.ts
493
+ var UNITS = {
494
+ s: 1e3,
495
+ m: 6e4,
496
+ h: 36e5,
497
+ d: 864e5
498
+ };
499
+ function parseDelay(input) {
500
+ if (typeof input === "number") return input;
501
+ const match = input.trim().match(/^(\d+(?:\.\d+)?)\s*(s|m|h|d)$/i);
502
+ if (!match) {
503
+ throw new Error(`Invalid delay format: "${input}". Use a number (ms) or a string like "5s", "10m", "2h", "1d".`);
504
+ }
505
+ const value = parseFloat(match[1]);
506
+ const unit = match[2].toLowerCase();
507
+ return Math.round(value * UNITS[unit]);
508
+ }
509
+
510
+ // src/queue.ts
511
+ var ADD_JOB_SQL = `
512
+ INSERT INTO pglite_queue_jobs (task, data, status, priority, run_at, max_attempts)
513
+ VALUES ($1, $2, $3, $4, NOW() + make_interval(secs => $5::double precision), $6)
514
+ RETURNING *;
515
+ `;
516
+ var ADD_CRON_SQL = `
517
+ INSERT INTO pglite_queue_jobs (task, data, status, priority, run_at, max_attempts, cron_expr, cron_name)
518
+ VALUES ($1, $2, 'delayed', $3, $4, $5, $6, $7)
519
+ ON CONFLICT (cron_name) WHERE cron_name IS NOT NULL AND status NOT IN ('completed', 'failed')
520
+ DO UPDATE SET run_at = EXCLUDED.run_at, data = EXCLUDED.data, cron_expr = EXCLUDED.cron_expr
521
+ RETURNING *;
522
+ `;
523
+ var GET_JOB_SQL = `SELECT * FROM pglite_queue_jobs WHERE id = $1;`;
524
+ var REMOVE_JOB_SQL = `DELETE FROM pglite_queue_jobs WHERE id = $1;`;
525
+ var Queue = class {
526
+ db = null;
527
+ ownsDb;
528
+ dataDir;
529
+ concurrency;
530
+ pollInterval;
531
+ shutdownTimeout;
532
+ handleSignals;
533
+ handlers = /* @__PURE__ */ new Map();
534
+ emitter = new TypedEmitter();
535
+ worker = null;
536
+ pollTimer = null;
537
+ unsubscribeNotify = null;
538
+ started = false;
539
+ signalHandlers = [];
540
+ migrated = false;
541
+ constructor(options = {}) {
542
+ if (options.db) {
543
+ this.db = options.db;
544
+ this.ownsDb = false;
545
+ this.dataDir = "";
546
+ } else {
547
+ this.ownsDb = true;
548
+ this.dataDir = options.dataDir ?? "memory://";
549
+ }
550
+ this.concurrency = options.concurrency ?? 1;
551
+ this.pollInterval = options.pollInterval ?? 5e3;
552
+ this.shutdownTimeout = options.shutdownTimeout ?? 3e4;
553
+ this.handleSignals = options.handleSignals ?? false;
554
+ }
555
+ /**
556
+ * Register a job handler for a task name.
557
+ */
558
+ define(task, handler, options) {
559
+ this.handlers.set(task, {
560
+ handler,
561
+ concurrency: options?.concurrency
562
+ });
563
+ }
564
+ /**
565
+ * Add a job to the queue.
566
+ */
567
+ async add(task, data, options) {
568
+ const db = await this.ensureReady();
569
+ const delaySecs = options?.delay ? parseDelay(options.delay) / 1e3 : 0;
570
+ const status = delaySecs > 0 ? "delayed" : "pending";
571
+ const priority = options?.priority ?? 0;
572
+ const maxAttempts = (options?.retry ?? 0) + 1;
573
+ const result = await db.query(ADD_JOB_SQL, [
574
+ task,
575
+ JSON.stringify(data),
576
+ status,
577
+ priority,
578
+ delaySecs,
579
+ maxAttempts
580
+ ]);
581
+ return rowToJob(result.rows[0]);
582
+ }
583
+ /**
584
+ * Register a recurring cron job.
585
+ */
586
+ async every(cronExpr, task, data, options) {
587
+ const db = await this.ensureReady();
588
+ const cron = parseCron(cronExpr);
589
+ const nextRun = cron.nextRun();
590
+ const priority = options?.priority ?? 0;
591
+ const maxAttempts = (options?.retry ?? 0) + 1;
592
+ const result = await db.query(ADD_CRON_SQL, [
593
+ task,
594
+ JSON.stringify(data ?? {}),
595
+ priority,
596
+ nextRun.toISOString(),
597
+ maxAttempts,
598
+ cronExpr,
599
+ task
600
+ // cron_name defaults to task name
601
+ ]);
602
+ return rowToJob(result.rows[0]);
603
+ }
604
+ /**
605
+ * Start processing jobs.
606
+ */
607
+ async start() {
608
+ if (this.started) return;
609
+ this.started = true;
610
+ const db = await this.ensureReady();
611
+ const recovered = await recoverStalledJobs(db);
612
+ if (recovered > 0) {
613
+ this.emitter.emit("error", new Error(`Recovered ${recovered} stalled jobs from previous crash`));
614
+ }
615
+ this.worker = new Worker(db, {
616
+ concurrency: this.concurrency,
617
+ handlers: this.handlers,
618
+ emitter: this.emitter
619
+ });
620
+ try {
621
+ const unsub = await db.listen("pglite_queue_new_job", () => {
622
+ if (this.worker && !this.worker.isStopping) {
623
+ void this.worker.poll();
624
+ }
625
+ });
626
+ this.unsubscribeNotify = () => unsub();
627
+ } catch {
628
+ }
629
+ this.pollTimer = setInterval(() => {
630
+ if (this.worker && !this.worker.isStopping) {
631
+ void this.worker.poll();
632
+ }
633
+ }, this.pollInterval);
634
+ if (this.handleSignals) {
635
+ for (const signal of ["SIGINT", "SIGTERM"]) {
636
+ const handler = () => {
637
+ void this.stop();
638
+ };
639
+ process.on(signal, handler);
640
+ this.signalHandlers.push({ signal, handler });
641
+ }
642
+ }
643
+ void this.worker.poll();
644
+ }
645
+ /**
646
+ * Gracefully stop processing. Waits for active jobs to finish.
647
+ */
648
+ async stop() {
649
+ if (!this.started) return;
650
+ this.started = false;
651
+ if (this.pollTimer) {
652
+ clearInterval(this.pollTimer);
653
+ this.pollTimer = null;
654
+ }
655
+ if (this.unsubscribeNotify) {
656
+ this.unsubscribeNotify();
657
+ this.unsubscribeNotify = null;
658
+ }
659
+ if (this.worker) {
660
+ this.worker.stop();
661
+ if (this.worker.active > 0) {
662
+ const timeout = new Promise(
663
+ (resolve) => setTimeout(resolve, this.shutdownTimeout)
664
+ );
665
+ await Promise.race([this.worker.waitForDrain(), timeout]);
666
+ }
667
+ }
668
+ for (const { signal, handler } of this.signalHandlers) {
669
+ process.removeListener(signal, handler);
670
+ }
671
+ this.signalHandlers = [];
672
+ if (this.ownsDb && this.db) {
673
+ await this.db.close();
674
+ this.db = null;
675
+ }
676
+ }
677
+ /**
678
+ * Get a job by ID.
679
+ */
680
+ async getJob(id) {
681
+ const db = await this.ensureReady();
682
+ const result = await db.query(GET_JOB_SQL, [id]);
683
+ return result.rows[0] ? rowToJob(result.rows[0]) : null;
684
+ }
685
+ /**
686
+ * Get jobs matching a filter.
687
+ */
688
+ async getJobs(filter) {
689
+ const db = await this.ensureReady();
690
+ const conditions = [];
691
+ const params = [];
692
+ let paramIdx = 1;
693
+ if (filter?.status) {
694
+ const statuses = Array.isArray(filter.status) ? filter.status : [filter.status];
695
+ conditions.push(`status = ANY($${paramIdx})`);
696
+ params.push(statuses);
697
+ paramIdx++;
698
+ }
699
+ if (filter?.task) {
700
+ conditions.push(`task = $${paramIdx}`);
701
+ params.push(filter.task);
702
+ paramIdx++;
703
+ }
704
+ const where = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
705
+ const limit = filter?.limit ? `LIMIT ${filter.limit}` : "";
706
+ const offset = filter?.offset ? `OFFSET ${filter.offset}` : "";
707
+ const sql = `SELECT * FROM pglite_queue_jobs ${where} ORDER BY created_at DESC ${limit} ${offset};`;
708
+ const result = await db.query(sql, params);
709
+ return result.rows.map((row) => rowToJob(row));
710
+ }
711
+ /**
712
+ * Remove a job by ID.
713
+ */
714
+ async removeJob(id) {
715
+ const db = await this.ensureReady();
716
+ const result = await db.query(REMOVE_JOB_SQL, [id]);
717
+ return (result.affectedRows ?? 0) > 0;
718
+ }
719
+ /**
720
+ * Remove all completed/failed jobs.
721
+ */
722
+ async clean(status) {
723
+ const db = await this.ensureReady();
724
+ const statuses = status ? [status] : ["completed", "failed"];
725
+ const result = await db.query(
726
+ `DELETE FROM pglite_queue_jobs WHERE status = ANY($1);`,
727
+ [statuses]
728
+ );
729
+ return result.affectedRows ?? 0;
730
+ }
731
+ /**
732
+ * Delete all jobs (for testing).
733
+ */
734
+ async obliterate() {
735
+ const db = await this.ensureReady();
736
+ await db.exec("DELETE FROM pglite_queue_jobs;");
737
+ }
738
+ /**
739
+ * Get job counts by status.
740
+ */
741
+ async counts() {
742
+ const db = await this.ensureReady();
743
+ const result = await db.query(
744
+ `SELECT status, COUNT(*)::text as count FROM pglite_queue_jobs GROUP BY status;`
745
+ );
746
+ const counts = {};
747
+ for (const row of result.rows) {
748
+ counts[row.status] = parseInt(row.count, 10);
749
+ }
750
+ return counts;
751
+ }
752
+ // --- Event delegation ---
753
+ on(event, listener) {
754
+ this.emitter.on(event, listener);
755
+ return this;
756
+ }
757
+ off(event, listener) {
758
+ this.emitter.off(event, listener);
759
+ return this;
760
+ }
761
+ once(event, listener) {
762
+ this.emitter.once(event, listener);
763
+ return this;
764
+ }
765
+ // --- Internal ---
766
+ async ensureDb() {
767
+ if (!this.db) {
768
+ this.db = new import_pglite.PGlite(this.dataDir);
769
+ await this.db.waitReady;
770
+ }
771
+ return this.db;
772
+ }
773
+ async ensureReady() {
774
+ const db = await this.ensureDb();
775
+ if (!this.migrated) {
776
+ await runMigrations(db);
777
+ this.migrated = true;
778
+ }
779
+ return db;
780
+ }
781
+ };
782
+ // Annotate the CommonJS export names for ESM import in node:
783
+ 0 && (module.exports = {
784
+ Queue,
785
+ calculateBackoff,
786
+ parseCron,
787
+ parseDelay
788
+ });
789
+ //# sourceMappingURL=index.cjs.map