pglite-queue 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +226 -0
- package/dist/index.cjs +789 -0
- package/dist/index.cjs.map +1 -0
- package/dist/index.d.cts +157 -0
- package/dist/index.d.ts +157 -0
- package/dist/index.js +759 -0
- package/dist/index.js.map +1 -0
- package/package.json +61 -0
package/dist/index.js
ADDED
|
@@ -0,0 +1,759 @@
|
|
|
1
|
+
// src/queue.ts
|
|
2
|
+
import { PGlite } from "@electric-sql/pglite";
|
|
3
|
+
|
|
4
|
+
// src/types.ts
|
|
5
|
+
function rowToJob(row) {
|
|
6
|
+
return {
|
|
7
|
+
id: row.id,
|
|
8
|
+
task: row.task,
|
|
9
|
+
data: row.data,
|
|
10
|
+
status: row.status,
|
|
11
|
+
priority: row.priority,
|
|
12
|
+
runAt: new Date(row.run_at),
|
|
13
|
+
startedAt: row.started_at ? new Date(row.started_at) : null,
|
|
14
|
+
completedAt: row.completed_at ? new Date(row.completed_at) : null,
|
|
15
|
+
failedAt: row.failed_at ? new Date(row.failed_at) : null,
|
|
16
|
+
attempts: row.attempts,
|
|
17
|
+
maxAttempts: row.max_attempts,
|
|
18
|
+
lastError: row.last_error,
|
|
19
|
+
progress: row.progress,
|
|
20
|
+
result: row.result,
|
|
21
|
+
cronExpr: row.cron_expr,
|
|
22
|
+
createdAt: new Date(row.created_at),
|
|
23
|
+
updatedAt: new Date(row.updated_at)
|
|
24
|
+
};
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
// src/events.ts
|
|
28
|
+
import { EventEmitter } from "events";
|
|
29
|
+
var TypedEmitter = class {
|
|
30
|
+
emitter = new EventEmitter();
|
|
31
|
+
on(event, listener) {
|
|
32
|
+
this.emitter.on(event, listener);
|
|
33
|
+
return this;
|
|
34
|
+
}
|
|
35
|
+
off(event, listener) {
|
|
36
|
+
this.emitter.off(event, listener);
|
|
37
|
+
return this;
|
|
38
|
+
}
|
|
39
|
+
once(event, listener) {
|
|
40
|
+
this.emitter.once(event, listener);
|
|
41
|
+
return this;
|
|
42
|
+
}
|
|
43
|
+
emit(event, ...args) {
|
|
44
|
+
return this.emitter.emit(event, ...args);
|
|
45
|
+
}
|
|
46
|
+
removeAllListeners(event) {
|
|
47
|
+
this.emitter.removeAllListeners(event);
|
|
48
|
+
return this;
|
|
49
|
+
}
|
|
50
|
+
};
|
|
51
|
+
|
|
52
|
+
// src/backoff.ts
|
|
53
|
+
function calculateBackoff(attempt, options) {
|
|
54
|
+
const type = options?.type ?? "exponential";
|
|
55
|
+
const baseDelay = options?.baseDelay ?? 1e3;
|
|
56
|
+
const maxDelay = options?.maxDelay ?? 3e5;
|
|
57
|
+
const factor = options?.factor ?? 2;
|
|
58
|
+
if (type === "fixed") {
|
|
59
|
+
return baseDelay;
|
|
60
|
+
}
|
|
61
|
+
const exponential = baseDelay * Math.pow(factor, attempt - 1);
|
|
62
|
+
const jitter = exponential * Math.random() * 0.25;
|
|
63
|
+
return Math.min(Math.round(exponential + jitter), maxDelay);
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
// src/cron.ts
|
|
67
|
+
var FIELDS = [
|
|
68
|
+
{ min: 0, max: 59 },
|
|
69
|
+
// minute
|
|
70
|
+
{ min: 0, max: 23 },
|
|
71
|
+
// hour
|
|
72
|
+
{ min: 1, max: 31 },
|
|
73
|
+
// day of month
|
|
74
|
+
{ min: 1, max: 12 },
|
|
75
|
+
// month
|
|
76
|
+
{ min: 0, max: 6 }
|
|
77
|
+
// day of week
|
|
78
|
+
];
|
|
79
|
+
function parseField(field, def) {
|
|
80
|
+
const values = /* @__PURE__ */ new Set();
|
|
81
|
+
for (const part of field.split(",")) {
|
|
82
|
+
const trimmed = part.trim();
|
|
83
|
+
if (trimmed === "*") {
|
|
84
|
+
for (let i = def.min; i <= def.max; i++) values.add(i);
|
|
85
|
+
continue;
|
|
86
|
+
}
|
|
87
|
+
const stepMatch = trimmed.match(/^(?:(\d+)-(\d+)|\*)\/(\d+)$/);
|
|
88
|
+
if (stepMatch) {
|
|
89
|
+
const start = stepMatch[1] !== void 0 ? parseInt(stepMatch[1], 10) : def.min;
|
|
90
|
+
const end = stepMatch[2] !== void 0 ? parseInt(stepMatch[2], 10) : def.max;
|
|
91
|
+
const step = parseInt(stepMatch[3] || "1", 10);
|
|
92
|
+
if (step === 0) throw new Error(`Invalid cron step: "${trimmed}"`);
|
|
93
|
+
for (let i = start; i <= end; i += step) values.add(i);
|
|
94
|
+
continue;
|
|
95
|
+
}
|
|
96
|
+
const rangeStepMatch = trimmed.match(/^(\d+)-(\d+)\/(\d+)$/);
|
|
97
|
+
if (rangeStepMatch) {
|
|
98
|
+
const start = parseInt(rangeStepMatch[1], 10);
|
|
99
|
+
const end = parseInt(rangeStepMatch[2], 10);
|
|
100
|
+
const step = parseInt(rangeStepMatch[3], 10);
|
|
101
|
+
if (step === 0) throw new Error(`Invalid cron step: "${trimmed}"`);
|
|
102
|
+
for (let i = start; i <= end; i += step) values.add(i);
|
|
103
|
+
continue;
|
|
104
|
+
}
|
|
105
|
+
const rangeMatch = trimmed.match(/^(\d+)-(\d+)$/);
|
|
106
|
+
if (rangeMatch) {
|
|
107
|
+
const start = parseInt(rangeMatch[1], 10);
|
|
108
|
+
const end = parseInt(rangeMatch[2], 10);
|
|
109
|
+
for (let i = start; i <= end; i++) values.add(i);
|
|
110
|
+
continue;
|
|
111
|
+
}
|
|
112
|
+
const num = parseInt(trimmed, 10);
|
|
113
|
+
if (isNaN(num) || num < def.min || num > def.max) {
|
|
114
|
+
throw new Error(`Invalid cron value "${trimmed}" for range ${def.min}-${def.max}`);
|
|
115
|
+
}
|
|
116
|
+
values.add(num);
|
|
117
|
+
}
|
|
118
|
+
return values;
|
|
119
|
+
}
|
|
120
|
+
function parseCron(expr) {
|
|
121
|
+
const parts = expr.trim().split(/\s+/);
|
|
122
|
+
if (parts.length !== 5) {
|
|
123
|
+
throw new Error(`Invalid cron expression: "${expr}". Expected 5 fields (minute hour dom month dow).`);
|
|
124
|
+
}
|
|
125
|
+
const minutes = parseField(parts[0], FIELDS[0]);
|
|
126
|
+
const hours = parseField(parts[1], FIELDS[1]);
|
|
127
|
+
const doms = parseField(parts[2], FIELDS[2]);
|
|
128
|
+
const months = parseField(parts[3], FIELDS[3]);
|
|
129
|
+
const dows = parseField(parts[4], FIELDS[4]);
|
|
130
|
+
function matches(date) {
|
|
131
|
+
return minutes.has(date.getUTCMinutes()) && hours.has(date.getUTCHours()) && doms.has(date.getUTCDate()) && months.has(date.getUTCMonth() + 1) && dows.has(date.getUTCDay());
|
|
132
|
+
}
|
|
133
|
+
function nextRun(from) {
|
|
134
|
+
const d = from ? new Date(from.getTime()) : /* @__PURE__ */ new Date();
|
|
135
|
+
d.setUTCSeconds(0, 0);
|
|
136
|
+
d.setUTCMinutes(d.getUTCMinutes() + 1);
|
|
137
|
+
const limit = d.getTime() + 4 * 365 * 24 * 60 * 60 * 1e3;
|
|
138
|
+
while (d.getTime() < limit) {
|
|
139
|
+
if (!months.has(d.getUTCMonth() + 1)) {
|
|
140
|
+
d.setUTCMonth(d.getUTCMonth() + 1, 1);
|
|
141
|
+
d.setUTCHours(0, 0, 0, 0);
|
|
142
|
+
continue;
|
|
143
|
+
}
|
|
144
|
+
if (!doms.has(d.getUTCDate()) || !dows.has(d.getUTCDay())) {
|
|
145
|
+
d.setUTCDate(d.getUTCDate() + 1);
|
|
146
|
+
d.setUTCHours(0, 0, 0, 0);
|
|
147
|
+
continue;
|
|
148
|
+
}
|
|
149
|
+
if (!hours.has(d.getUTCHours())) {
|
|
150
|
+
d.setUTCHours(d.getUTCHours() + 1, 0, 0, 0);
|
|
151
|
+
continue;
|
|
152
|
+
}
|
|
153
|
+
if (!minutes.has(d.getUTCMinutes())) {
|
|
154
|
+
d.setUTCMinutes(d.getUTCMinutes() + 1, 0, 0);
|
|
155
|
+
continue;
|
|
156
|
+
}
|
|
157
|
+
return d;
|
|
158
|
+
}
|
|
159
|
+
throw new Error(`No matching cron time found within 4 years for expression: "${expr}"`);
|
|
160
|
+
}
|
|
161
|
+
return { nextRun, matches };
|
|
162
|
+
}
|
|
163
|
+
|
|
164
|
+
// src/worker.ts
|
|
165
|
+
var FETCH_SQL = `
|
|
166
|
+
UPDATE pglite_queue_jobs
|
|
167
|
+
SET status = 'active', started_at = NOW(), attempts = attempts + 1
|
|
168
|
+
WHERE id = (
|
|
169
|
+
SELECT id FROM pglite_queue_jobs
|
|
170
|
+
WHERE status IN ('pending', 'delayed')
|
|
171
|
+
AND run_at <= NOW()
|
|
172
|
+
ORDER BY priority ASC, run_at ASC, id ASC
|
|
173
|
+
LIMIT 1
|
|
174
|
+
FOR UPDATE SKIP LOCKED
|
|
175
|
+
)
|
|
176
|
+
RETURNING *;
|
|
177
|
+
`;
|
|
178
|
+
var COMPLETE_SQL = `
|
|
179
|
+
UPDATE pglite_queue_jobs
|
|
180
|
+
SET status = 'completed', completed_at = NOW(), result = $2, progress = 100
|
|
181
|
+
WHERE id = $1
|
|
182
|
+
RETURNING *;
|
|
183
|
+
`;
|
|
184
|
+
var FAIL_SQL = `
|
|
185
|
+
UPDATE pglite_queue_jobs
|
|
186
|
+
SET status = 'failed', failed_at = NOW(), last_error = $2
|
|
187
|
+
WHERE id = $1
|
|
188
|
+
RETURNING *;
|
|
189
|
+
`;
|
|
190
|
+
var RETRY_SQL = `
|
|
191
|
+
UPDATE pglite_queue_jobs
|
|
192
|
+
SET status = 'delayed', failed_at = NOW(), last_error = $2, run_at = NOW() + make_interval(secs => $3::double precision)
|
|
193
|
+
WHERE id = $1
|
|
194
|
+
RETURNING *;
|
|
195
|
+
`;
|
|
196
|
+
var PROGRESS_SQL = `
|
|
197
|
+
UPDATE pglite_queue_jobs SET progress = $2 WHERE id = $1;
|
|
198
|
+
`;
|
|
199
|
+
var INSERT_CRON_NEXT_SQL = `
|
|
200
|
+
INSERT INTO pglite_queue_jobs (task, data, status, priority, run_at, max_attempts, cron_expr, cron_name)
|
|
201
|
+
VALUES ($1, $2, 'delayed', $3, $4, $5, $6, $7)
|
|
202
|
+
ON CONFLICT (cron_name) WHERE cron_name IS NOT NULL AND status NOT IN ('completed', 'failed')
|
|
203
|
+
DO UPDATE SET run_at = EXCLUDED.run_at, data = EXCLUDED.data;
|
|
204
|
+
`;
|
|
205
|
+
var Worker = class {
|
|
206
|
+
db;
|
|
207
|
+
handlers;
|
|
208
|
+
emitter;
|
|
209
|
+
concurrency;
|
|
210
|
+
activeCount = 0;
|
|
211
|
+
stopping = false;
|
|
212
|
+
pollInProgress = false;
|
|
213
|
+
pollQueued = false;
|
|
214
|
+
drainResolve = null;
|
|
215
|
+
constructor(db, options) {
|
|
216
|
+
this.db = db;
|
|
217
|
+
this.handlers = options.handlers;
|
|
218
|
+
this.emitter = options.emitter;
|
|
219
|
+
this.concurrency = options.concurrency;
|
|
220
|
+
}
|
|
221
|
+
async poll() {
|
|
222
|
+
if (this.stopping) return;
|
|
223
|
+
if (this.pollInProgress) {
|
|
224
|
+
this.pollQueued = true;
|
|
225
|
+
return;
|
|
226
|
+
}
|
|
227
|
+
this.pollInProgress = true;
|
|
228
|
+
try {
|
|
229
|
+
await this.doPoll();
|
|
230
|
+
} catch {
|
|
231
|
+
} finally {
|
|
232
|
+
this.pollInProgress = false;
|
|
233
|
+
if (this.pollQueued && !this.stopping) {
|
|
234
|
+
this.pollQueued = false;
|
|
235
|
+
void this.poll();
|
|
236
|
+
}
|
|
237
|
+
}
|
|
238
|
+
}
|
|
239
|
+
async doPoll() {
|
|
240
|
+
while (this.activeCount < this.concurrency && !this.stopping) {
|
|
241
|
+
const row = await this.fetchOne();
|
|
242
|
+
if (!row) break;
|
|
243
|
+
const entry = this.handlers.get(row.task);
|
|
244
|
+
if (!entry) {
|
|
245
|
+
await this.db.query(
|
|
246
|
+
`UPDATE pglite_queue_jobs SET status = 'pending', started_at = NULL, attempts = attempts - 1 WHERE id = $1`,
|
|
247
|
+
[row.id]
|
|
248
|
+
);
|
|
249
|
+
continue;
|
|
250
|
+
}
|
|
251
|
+
this.activeCount++;
|
|
252
|
+
const job = rowToJob(row);
|
|
253
|
+
this.emitter.emit("active", job);
|
|
254
|
+
void this.processJob(row, entry).finally(() => {
|
|
255
|
+
this.activeCount--;
|
|
256
|
+
if (this.activeCount === 0 && this.drainResolve) {
|
|
257
|
+
this.drainResolve();
|
|
258
|
+
this.drainResolve = null;
|
|
259
|
+
}
|
|
260
|
+
if (!this.stopping) {
|
|
261
|
+
void this.poll();
|
|
262
|
+
}
|
|
263
|
+
});
|
|
264
|
+
}
|
|
265
|
+
if (this.activeCount === 0) {
|
|
266
|
+
this.emitter.emit("drained");
|
|
267
|
+
}
|
|
268
|
+
}
|
|
269
|
+
async fetchOne() {
|
|
270
|
+
const result = await this.db.query(FETCH_SQL);
|
|
271
|
+
return result.rows[0] ?? null;
|
|
272
|
+
}
|
|
273
|
+
async processJob(row, entry) {
|
|
274
|
+
const context = {
|
|
275
|
+
id: row.id,
|
|
276
|
+
task: row.task,
|
|
277
|
+
data: row.data,
|
|
278
|
+
attempts: row.attempts,
|
|
279
|
+
maxAttempts: row.max_attempts,
|
|
280
|
+
progress: async (pct) => {
|
|
281
|
+
if (this.stopping) return;
|
|
282
|
+
const clamped = Math.max(0, Math.min(100, Math.round(pct)));
|
|
283
|
+
try {
|
|
284
|
+
await this.db.query(PROGRESS_SQL, [row.id, clamped]);
|
|
285
|
+
this.emitter.emit("progress", rowToJob({ ...row, progress: clamped }), clamped);
|
|
286
|
+
} catch {
|
|
287
|
+
}
|
|
288
|
+
}
|
|
289
|
+
};
|
|
290
|
+
try {
|
|
291
|
+
const result = await entry.handler(context);
|
|
292
|
+
if (this.stopping) return;
|
|
293
|
+
try {
|
|
294
|
+
const completedResult = await this.db.query(COMPLETE_SQL, [
|
|
295
|
+
row.id,
|
|
296
|
+
result !== void 0 ? JSON.stringify(result) : null
|
|
297
|
+
]);
|
|
298
|
+
const completedJob = rowToJob(completedResult.rows[0]);
|
|
299
|
+
this.emitter.emit("completed", completedJob);
|
|
300
|
+
if (row.cron_expr) {
|
|
301
|
+
await this.scheduleNextCron(row);
|
|
302
|
+
}
|
|
303
|
+
} catch {
|
|
304
|
+
}
|
|
305
|
+
} catch (err) {
|
|
306
|
+
const error = err instanceof Error ? err : new Error(String(err));
|
|
307
|
+
if (this.stopping) return;
|
|
308
|
+
try {
|
|
309
|
+
if (row.attempts < row.max_attempts) {
|
|
310
|
+
const backoffMs = calculateBackoff(row.attempts);
|
|
311
|
+
const backoffSecs = backoffMs / 1e3;
|
|
312
|
+
const retryResult = await this.db.query(RETRY_SQL, [row.id, error.message, backoffSecs]);
|
|
313
|
+
const retryJob = rowToJob(retryResult.rows[0]);
|
|
314
|
+
this.emitter.emit("retrying", retryJob, row.attempts);
|
|
315
|
+
} else {
|
|
316
|
+
const failResult = await this.db.query(FAIL_SQL, [row.id, error.message]);
|
|
317
|
+
const failedJob = rowToJob(failResult.rows[0]);
|
|
318
|
+
this.emitter.emit("failed", failedJob, error);
|
|
319
|
+
}
|
|
320
|
+
if (row.cron_expr) {
|
|
321
|
+
await this.scheduleNextCron(row);
|
|
322
|
+
}
|
|
323
|
+
} catch {
|
|
324
|
+
}
|
|
325
|
+
}
|
|
326
|
+
}
|
|
327
|
+
async scheduleNextCron(row) {
|
|
328
|
+
try {
|
|
329
|
+
const cron = parseCron(row.cron_expr);
|
|
330
|
+
const nextRun = cron.nextRun();
|
|
331
|
+
await this.db.query(INSERT_CRON_NEXT_SQL, [
|
|
332
|
+
row.task,
|
|
333
|
+
JSON.stringify(row.data),
|
|
334
|
+
row.priority,
|
|
335
|
+
nextRun.toISOString(),
|
|
336
|
+
row.max_attempts,
|
|
337
|
+
row.cron_expr,
|
|
338
|
+
row.cron_name
|
|
339
|
+
]);
|
|
340
|
+
} catch (err) {
|
|
341
|
+
const error = err instanceof Error ? err : new Error(String(err));
|
|
342
|
+
this.emitter.emit("error", error);
|
|
343
|
+
}
|
|
344
|
+
}
|
|
345
|
+
waitForDrain() {
|
|
346
|
+
if (this.activeCount === 0) return Promise.resolve();
|
|
347
|
+
return new Promise((resolve) => {
|
|
348
|
+
this.drainResolve = resolve;
|
|
349
|
+
});
|
|
350
|
+
}
|
|
351
|
+
stop() {
|
|
352
|
+
this.stopping = true;
|
|
353
|
+
}
|
|
354
|
+
get active() {
|
|
355
|
+
return this.activeCount;
|
|
356
|
+
}
|
|
357
|
+
get isStopping() {
|
|
358
|
+
return this.stopping;
|
|
359
|
+
}
|
|
360
|
+
};
|
|
361
|
+
|
|
362
|
+
// src/schema.ts
|
|
363
|
+
var MIGRATION_SQL = `
|
|
364
|
+
CREATE TABLE IF NOT EXISTS pglite_queue_jobs (
|
|
365
|
+
id BIGSERIAL PRIMARY KEY,
|
|
366
|
+
task TEXT NOT NULL,
|
|
367
|
+
data JSONB NOT NULL DEFAULT '{}'::jsonb,
|
|
368
|
+
status TEXT NOT NULL DEFAULT 'pending',
|
|
369
|
+
priority INTEGER NOT NULL DEFAULT 0,
|
|
370
|
+
run_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
|
371
|
+
started_at TIMESTAMPTZ,
|
|
372
|
+
completed_at TIMESTAMPTZ,
|
|
373
|
+
failed_at TIMESTAMPTZ,
|
|
374
|
+
attempts INTEGER NOT NULL DEFAULT 0,
|
|
375
|
+
max_attempts INTEGER NOT NULL DEFAULT 1,
|
|
376
|
+
last_error TEXT,
|
|
377
|
+
progress INTEGER NOT NULL DEFAULT 0 CHECK (progress >= 0 AND progress <= 100),
|
|
378
|
+
result JSONB,
|
|
379
|
+
cron_expr TEXT,
|
|
380
|
+
cron_name TEXT,
|
|
381
|
+
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
|
382
|
+
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
|
383
|
+
);
|
|
384
|
+
|
|
385
|
+
CREATE INDEX IF NOT EXISTS idx_pglite_queue_pickup
|
|
386
|
+
ON pglite_queue_jobs (priority ASC, run_at ASC, id ASC)
|
|
387
|
+
WHERE status IN ('pending', 'delayed');
|
|
388
|
+
|
|
389
|
+
CREATE INDEX IF NOT EXISTS idx_pglite_queue_task
|
|
390
|
+
ON pglite_queue_jobs (task);
|
|
391
|
+
|
|
392
|
+
CREATE INDEX IF NOT EXISTS idx_pglite_queue_status
|
|
393
|
+
ON pglite_queue_jobs (status);
|
|
394
|
+
|
|
395
|
+
DO $$ BEGIN
|
|
396
|
+
IF NOT EXISTS (
|
|
397
|
+
SELECT 1 FROM pg_indexes
|
|
398
|
+
WHERE indexname = 'idx_pglite_queue_cron_name'
|
|
399
|
+
) THEN
|
|
400
|
+
CREATE UNIQUE INDEX idx_pglite_queue_cron_name
|
|
401
|
+
ON pglite_queue_jobs (cron_name)
|
|
402
|
+
WHERE cron_name IS NOT NULL AND status NOT IN ('completed', 'failed');
|
|
403
|
+
END IF;
|
|
404
|
+
END $$;
|
|
405
|
+
|
|
406
|
+
CREATE OR REPLACE FUNCTION pglite_queue_update_timestamp()
|
|
407
|
+
RETURNS TRIGGER AS $$
|
|
408
|
+
BEGIN
|
|
409
|
+
NEW.updated_at = NOW();
|
|
410
|
+
RETURN NEW;
|
|
411
|
+
END;
|
|
412
|
+
$$ LANGUAGE plpgsql;
|
|
413
|
+
|
|
414
|
+
DO $$ BEGIN
|
|
415
|
+
IF NOT EXISTS (
|
|
416
|
+
SELECT 1 FROM pg_trigger WHERE tgname = 'trg_pglite_queue_updated_at'
|
|
417
|
+
) THEN
|
|
418
|
+
CREATE TRIGGER trg_pglite_queue_updated_at
|
|
419
|
+
BEFORE UPDATE ON pglite_queue_jobs
|
|
420
|
+
FOR EACH ROW
|
|
421
|
+
EXECUTE FUNCTION pglite_queue_update_timestamp();
|
|
422
|
+
END IF;
|
|
423
|
+
END $$;
|
|
424
|
+
|
|
425
|
+
CREATE OR REPLACE FUNCTION pglite_queue_notify_new_job()
|
|
426
|
+
RETURNS TRIGGER AS $$
|
|
427
|
+
BEGIN
|
|
428
|
+
PERFORM pg_notify('pglite_queue_new_job', NEW.id::text);
|
|
429
|
+
RETURN NEW;
|
|
430
|
+
END;
|
|
431
|
+
$$ LANGUAGE plpgsql;
|
|
432
|
+
|
|
433
|
+
DO $$ BEGIN
|
|
434
|
+
IF NOT EXISTS (
|
|
435
|
+
SELECT 1 FROM pg_trigger WHERE tgname = 'trg_pglite_queue_notify'
|
|
436
|
+
) THEN
|
|
437
|
+
CREATE TRIGGER trg_pglite_queue_notify
|
|
438
|
+
AFTER INSERT ON pglite_queue_jobs
|
|
439
|
+
FOR EACH ROW
|
|
440
|
+
EXECUTE FUNCTION pglite_queue_notify_new_job();
|
|
441
|
+
END IF;
|
|
442
|
+
END $$;
|
|
443
|
+
`;
|
|
444
|
+
var RECOVERY_SQL = `
|
|
445
|
+
UPDATE pglite_queue_jobs
|
|
446
|
+
SET
|
|
447
|
+
status = CASE
|
|
448
|
+
WHEN attempts >= max_attempts THEN 'failed'
|
|
449
|
+
ELSE 'pending'
|
|
450
|
+
END,
|
|
451
|
+
started_at = NULL,
|
|
452
|
+
last_error = COALESCE(last_error, 'Process crashed during execution')
|
|
453
|
+
WHERE status = 'active';
|
|
454
|
+
`;
|
|
455
|
+
async function runMigrations(db) {
|
|
456
|
+
await db.exec(MIGRATION_SQL);
|
|
457
|
+
}
|
|
458
|
+
async function recoverStalledJobs(db) {
|
|
459
|
+
const result = await db.query(RECOVERY_SQL);
|
|
460
|
+
return result.affectedRows ?? 0;
|
|
461
|
+
}
|
|
462
|
+
|
|
463
|
+
// src/delay.ts
|
|
464
|
+
var UNITS = {
|
|
465
|
+
s: 1e3,
|
|
466
|
+
m: 6e4,
|
|
467
|
+
h: 36e5,
|
|
468
|
+
d: 864e5
|
|
469
|
+
};
|
|
470
|
+
function parseDelay(input) {
|
|
471
|
+
if (typeof input === "number") return input;
|
|
472
|
+
const match = input.trim().match(/^(\d+(?:\.\d+)?)\s*(s|m|h|d)$/i);
|
|
473
|
+
if (!match) {
|
|
474
|
+
throw new Error(`Invalid delay format: "${input}". Use a number (ms) or a string like "5s", "10m", "2h", "1d".`);
|
|
475
|
+
}
|
|
476
|
+
const value = parseFloat(match[1]);
|
|
477
|
+
const unit = match[2].toLowerCase();
|
|
478
|
+
return Math.round(value * UNITS[unit]);
|
|
479
|
+
}
|
|
480
|
+
|
|
481
|
+
// src/queue.ts
|
|
482
|
+
var ADD_JOB_SQL = `
|
|
483
|
+
INSERT INTO pglite_queue_jobs (task, data, status, priority, run_at, max_attempts)
|
|
484
|
+
VALUES ($1, $2, $3, $4, NOW() + make_interval(secs => $5::double precision), $6)
|
|
485
|
+
RETURNING *;
|
|
486
|
+
`;
|
|
487
|
+
var ADD_CRON_SQL = `
|
|
488
|
+
INSERT INTO pglite_queue_jobs (task, data, status, priority, run_at, max_attempts, cron_expr, cron_name)
|
|
489
|
+
VALUES ($1, $2, 'delayed', $3, $4, $5, $6, $7)
|
|
490
|
+
ON CONFLICT (cron_name) WHERE cron_name IS NOT NULL AND status NOT IN ('completed', 'failed')
|
|
491
|
+
DO UPDATE SET run_at = EXCLUDED.run_at, data = EXCLUDED.data, cron_expr = EXCLUDED.cron_expr
|
|
492
|
+
RETURNING *;
|
|
493
|
+
`;
|
|
494
|
+
var GET_JOB_SQL = `SELECT * FROM pglite_queue_jobs WHERE id = $1;`;
|
|
495
|
+
var REMOVE_JOB_SQL = `DELETE FROM pglite_queue_jobs WHERE id = $1;`;
|
|
496
|
+
var Queue = class {
|
|
497
|
+
db = null;
|
|
498
|
+
ownsDb;
|
|
499
|
+
dataDir;
|
|
500
|
+
concurrency;
|
|
501
|
+
pollInterval;
|
|
502
|
+
shutdownTimeout;
|
|
503
|
+
handleSignals;
|
|
504
|
+
handlers = /* @__PURE__ */ new Map();
|
|
505
|
+
emitter = new TypedEmitter();
|
|
506
|
+
worker = null;
|
|
507
|
+
pollTimer = null;
|
|
508
|
+
unsubscribeNotify = null;
|
|
509
|
+
started = false;
|
|
510
|
+
signalHandlers = [];
|
|
511
|
+
migrated = false;
|
|
512
|
+
constructor(options = {}) {
|
|
513
|
+
if (options.db) {
|
|
514
|
+
this.db = options.db;
|
|
515
|
+
this.ownsDb = false;
|
|
516
|
+
this.dataDir = "";
|
|
517
|
+
} else {
|
|
518
|
+
this.ownsDb = true;
|
|
519
|
+
this.dataDir = options.dataDir ?? "memory://";
|
|
520
|
+
}
|
|
521
|
+
this.concurrency = options.concurrency ?? 1;
|
|
522
|
+
this.pollInterval = options.pollInterval ?? 5e3;
|
|
523
|
+
this.shutdownTimeout = options.shutdownTimeout ?? 3e4;
|
|
524
|
+
this.handleSignals = options.handleSignals ?? false;
|
|
525
|
+
}
|
|
526
|
+
/**
|
|
527
|
+
* Register a job handler for a task name.
|
|
528
|
+
*/
|
|
529
|
+
define(task, handler, options) {
|
|
530
|
+
this.handlers.set(task, {
|
|
531
|
+
handler,
|
|
532
|
+
concurrency: options?.concurrency
|
|
533
|
+
});
|
|
534
|
+
}
|
|
535
|
+
/**
|
|
536
|
+
* Add a job to the queue.
|
|
537
|
+
*/
|
|
538
|
+
async add(task, data, options) {
|
|
539
|
+
const db = await this.ensureReady();
|
|
540
|
+
const delaySecs = options?.delay ? parseDelay(options.delay) / 1e3 : 0;
|
|
541
|
+
const status = delaySecs > 0 ? "delayed" : "pending";
|
|
542
|
+
const priority = options?.priority ?? 0;
|
|
543
|
+
const maxAttempts = (options?.retry ?? 0) + 1;
|
|
544
|
+
const result = await db.query(ADD_JOB_SQL, [
|
|
545
|
+
task,
|
|
546
|
+
JSON.stringify(data),
|
|
547
|
+
status,
|
|
548
|
+
priority,
|
|
549
|
+
delaySecs,
|
|
550
|
+
maxAttempts
|
|
551
|
+
]);
|
|
552
|
+
return rowToJob(result.rows[0]);
|
|
553
|
+
}
|
|
554
|
+
/**
|
|
555
|
+
* Register a recurring cron job.
|
|
556
|
+
*/
|
|
557
|
+
async every(cronExpr, task, data, options) {
|
|
558
|
+
const db = await this.ensureReady();
|
|
559
|
+
const cron = parseCron(cronExpr);
|
|
560
|
+
const nextRun = cron.nextRun();
|
|
561
|
+
const priority = options?.priority ?? 0;
|
|
562
|
+
const maxAttempts = (options?.retry ?? 0) + 1;
|
|
563
|
+
const result = await db.query(ADD_CRON_SQL, [
|
|
564
|
+
task,
|
|
565
|
+
JSON.stringify(data ?? {}),
|
|
566
|
+
priority,
|
|
567
|
+
nextRun.toISOString(),
|
|
568
|
+
maxAttempts,
|
|
569
|
+
cronExpr,
|
|
570
|
+
task
|
|
571
|
+
// cron_name defaults to task name
|
|
572
|
+
]);
|
|
573
|
+
return rowToJob(result.rows[0]);
|
|
574
|
+
}
|
|
575
|
+
/**
|
|
576
|
+
* Start processing jobs.
|
|
577
|
+
*/
|
|
578
|
+
async start() {
|
|
579
|
+
if (this.started) return;
|
|
580
|
+
this.started = true;
|
|
581
|
+
const db = await this.ensureReady();
|
|
582
|
+
const recovered = await recoverStalledJobs(db);
|
|
583
|
+
if (recovered > 0) {
|
|
584
|
+
this.emitter.emit("error", new Error(`Recovered ${recovered} stalled jobs from previous crash`));
|
|
585
|
+
}
|
|
586
|
+
this.worker = new Worker(db, {
|
|
587
|
+
concurrency: this.concurrency,
|
|
588
|
+
handlers: this.handlers,
|
|
589
|
+
emitter: this.emitter
|
|
590
|
+
});
|
|
591
|
+
try {
|
|
592
|
+
const unsub = await db.listen("pglite_queue_new_job", () => {
|
|
593
|
+
if (this.worker && !this.worker.isStopping) {
|
|
594
|
+
void this.worker.poll();
|
|
595
|
+
}
|
|
596
|
+
});
|
|
597
|
+
this.unsubscribeNotify = () => unsub();
|
|
598
|
+
} catch {
|
|
599
|
+
}
|
|
600
|
+
this.pollTimer = setInterval(() => {
|
|
601
|
+
if (this.worker && !this.worker.isStopping) {
|
|
602
|
+
void this.worker.poll();
|
|
603
|
+
}
|
|
604
|
+
}, this.pollInterval);
|
|
605
|
+
if (this.handleSignals) {
|
|
606
|
+
for (const signal of ["SIGINT", "SIGTERM"]) {
|
|
607
|
+
const handler = () => {
|
|
608
|
+
void this.stop();
|
|
609
|
+
};
|
|
610
|
+
process.on(signal, handler);
|
|
611
|
+
this.signalHandlers.push({ signal, handler });
|
|
612
|
+
}
|
|
613
|
+
}
|
|
614
|
+
void this.worker.poll();
|
|
615
|
+
}
|
|
616
|
+
/**
|
|
617
|
+
* Gracefully stop processing. Waits for active jobs to finish.
|
|
618
|
+
*/
|
|
619
|
+
async stop() {
|
|
620
|
+
if (!this.started) return;
|
|
621
|
+
this.started = false;
|
|
622
|
+
if (this.pollTimer) {
|
|
623
|
+
clearInterval(this.pollTimer);
|
|
624
|
+
this.pollTimer = null;
|
|
625
|
+
}
|
|
626
|
+
if (this.unsubscribeNotify) {
|
|
627
|
+
this.unsubscribeNotify();
|
|
628
|
+
this.unsubscribeNotify = null;
|
|
629
|
+
}
|
|
630
|
+
if (this.worker) {
|
|
631
|
+
this.worker.stop();
|
|
632
|
+
if (this.worker.active > 0) {
|
|
633
|
+
const timeout = new Promise(
|
|
634
|
+
(resolve) => setTimeout(resolve, this.shutdownTimeout)
|
|
635
|
+
);
|
|
636
|
+
await Promise.race([this.worker.waitForDrain(), timeout]);
|
|
637
|
+
}
|
|
638
|
+
}
|
|
639
|
+
for (const { signal, handler } of this.signalHandlers) {
|
|
640
|
+
process.removeListener(signal, handler);
|
|
641
|
+
}
|
|
642
|
+
this.signalHandlers = [];
|
|
643
|
+
if (this.ownsDb && this.db) {
|
|
644
|
+
await this.db.close();
|
|
645
|
+
this.db = null;
|
|
646
|
+
}
|
|
647
|
+
}
|
|
648
|
+
/**
|
|
649
|
+
* Get a job by ID.
|
|
650
|
+
*/
|
|
651
|
+
async getJob(id) {
|
|
652
|
+
const db = await this.ensureReady();
|
|
653
|
+
const result = await db.query(GET_JOB_SQL, [id]);
|
|
654
|
+
return result.rows[0] ? rowToJob(result.rows[0]) : null;
|
|
655
|
+
}
|
|
656
|
+
/**
|
|
657
|
+
* Get jobs matching a filter.
|
|
658
|
+
*/
|
|
659
|
+
async getJobs(filter) {
|
|
660
|
+
const db = await this.ensureReady();
|
|
661
|
+
const conditions = [];
|
|
662
|
+
const params = [];
|
|
663
|
+
let paramIdx = 1;
|
|
664
|
+
if (filter?.status) {
|
|
665
|
+
const statuses = Array.isArray(filter.status) ? filter.status : [filter.status];
|
|
666
|
+
conditions.push(`status = ANY($${paramIdx})`);
|
|
667
|
+
params.push(statuses);
|
|
668
|
+
paramIdx++;
|
|
669
|
+
}
|
|
670
|
+
if (filter?.task) {
|
|
671
|
+
conditions.push(`task = $${paramIdx}`);
|
|
672
|
+
params.push(filter.task);
|
|
673
|
+
paramIdx++;
|
|
674
|
+
}
|
|
675
|
+
const where = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
|
|
676
|
+
const limit = filter?.limit ? `LIMIT ${filter.limit}` : "";
|
|
677
|
+
const offset = filter?.offset ? `OFFSET ${filter.offset}` : "";
|
|
678
|
+
const sql = `SELECT * FROM pglite_queue_jobs ${where} ORDER BY created_at DESC ${limit} ${offset};`;
|
|
679
|
+
const result = await db.query(sql, params);
|
|
680
|
+
return result.rows.map((row) => rowToJob(row));
|
|
681
|
+
}
|
|
682
|
+
/**
|
|
683
|
+
* Remove a job by ID.
|
|
684
|
+
*/
|
|
685
|
+
async removeJob(id) {
|
|
686
|
+
const db = await this.ensureReady();
|
|
687
|
+
const result = await db.query(REMOVE_JOB_SQL, [id]);
|
|
688
|
+
return (result.affectedRows ?? 0) > 0;
|
|
689
|
+
}
|
|
690
|
+
/**
|
|
691
|
+
* Remove all completed/failed jobs.
|
|
692
|
+
*/
|
|
693
|
+
async clean(status) {
|
|
694
|
+
const db = await this.ensureReady();
|
|
695
|
+
const statuses = status ? [status] : ["completed", "failed"];
|
|
696
|
+
const result = await db.query(
|
|
697
|
+
`DELETE FROM pglite_queue_jobs WHERE status = ANY($1);`,
|
|
698
|
+
[statuses]
|
|
699
|
+
);
|
|
700
|
+
return result.affectedRows ?? 0;
|
|
701
|
+
}
|
|
702
|
+
/**
|
|
703
|
+
* Delete all jobs (for testing).
|
|
704
|
+
*/
|
|
705
|
+
async obliterate() {
|
|
706
|
+
const db = await this.ensureReady();
|
|
707
|
+
await db.exec("DELETE FROM pglite_queue_jobs;");
|
|
708
|
+
}
|
|
709
|
+
/**
|
|
710
|
+
* Get job counts by status.
|
|
711
|
+
*/
|
|
712
|
+
async counts() {
|
|
713
|
+
const db = await this.ensureReady();
|
|
714
|
+
const result = await db.query(
|
|
715
|
+
`SELECT status, COUNT(*)::text as count FROM pglite_queue_jobs GROUP BY status;`
|
|
716
|
+
);
|
|
717
|
+
const counts = {};
|
|
718
|
+
for (const row of result.rows) {
|
|
719
|
+
counts[row.status] = parseInt(row.count, 10);
|
|
720
|
+
}
|
|
721
|
+
return counts;
|
|
722
|
+
}
|
|
723
|
+
// --- Event delegation ---
|
|
724
|
+
on(event, listener) {
|
|
725
|
+
this.emitter.on(event, listener);
|
|
726
|
+
return this;
|
|
727
|
+
}
|
|
728
|
+
off(event, listener) {
|
|
729
|
+
this.emitter.off(event, listener);
|
|
730
|
+
return this;
|
|
731
|
+
}
|
|
732
|
+
once(event, listener) {
|
|
733
|
+
this.emitter.once(event, listener);
|
|
734
|
+
return this;
|
|
735
|
+
}
|
|
736
|
+
// --- Internal ---
|
|
737
|
+
async ensureDb() {
|
|
738
|
+
if (!this.db) {
|
|
739
|
+
this.db = new PGlite(this.dataDir);
|
|
740
|
+
await this.db.waitReady;
|
|
741
|
+
}
|
|
742
|
+
return this.db;
|
|
743
|
+
}
|
|
744
|
+
async ensureReady() {
|
|
745
|
+
const db = await this.ensureDb();
|
|
746
|
+
if (!this.migrated) {
|
|
747
|
+
await runMigrations(db);
|
|
748
|
+
this.migrated = true;
|
|
749
|
+
}
|
|
750
|
+
return db;
|
|
751
|
+
}
|
|
752
|
+
};
|
|
753
|
+
export {
|
|
754
|
+
Queue,
|
|
755
|
+
calculateBackoff,
|
|
756
|
+
parseCron,
|
|
757
|
+
parseDelay
|
|
758
|
+
};
|
|
759
|
+
//# sourceMappingURL=index.js.map
|