@nicnocquee/dataqueue 1.21.0 → 1.24.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli.cjs +6 -0
- package/dist/cli.cjs.map +1 -1
- package/dist/cli.js +6 -0
- package/dist/cli.js.map +1 -1
- package/dist/index.cjs +550 -30
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.cts +170 -3
- package/dist/index.d.ts +170 -3
- package/dist/index.js +545 -31
- package/dist/index.js.map +1 -1
- package/migrations/1765809419000_add_force_kill_on_timeout_to_job_queue.sql +6 -0
- package/package.json +1 -1
- package/src/cli.ts +6 -0
- package/src/db-util.test.ts +56 -1
- package/src/db-util.ts +77 -6
- package/src/handler-validation.test.ts +414 -0
- package/src/handler-validation.ts +168 -0
- package/src/index.test.ts +224 -0
- package/src/index.ts +33 -0
- package/src/processor.test.ts +55 -0
- package/src/processor.ts +261 -17
- package/src/queue.test.ts +522 -0
- package/src/queue.ts +286 -9
- package/src/types.ts +122 -1
package/dist/index.js
CHANGED
|
@@ -1,6 +1,8 @@
|
|
|
1
1
|
import { AsyncLocalStorage } from 'async_hooks';
|
|
2
|
+
import { Worker } from 'worker_threads';
|
|
2
3
|
import { Pool } from 'pg';
|
|
3
4
|
import { parse } from 'pg-connection-string';
|
|
5
|
+
import fs from 'fs';
|
|
4
6
|
|
|
5
7
|
// src/types.ts
|
|
6
8
|
var JobEventType = /* @__PURE__ */ ((JobEventType2) => {
|
|
@@ -10,6 +12,7 @@ var JobEventType = /* @__PURE__ */ ((JobEventType2) => {
|
|
|
10
12
|
JobEventType2["Failed"] = "failed";
|
|
11
13
|
JobEventType2["Cancelled"] = "cancelled";
|
|
12
14
|
JobEventType2["Retried"] = "retried";
|
|
15
|
+
JobEventType2["Edited"] = "edited";
|
|
13
16
|
return JobEventType2;
|
|
14
17
|
})(JobEventType || {});
|
|
15
18
|
var FailureReason = /* @__PURE__ */ ((FailureReason3) => {
|
|
@@ -53,6 +56,7 @@ var addJob = async (pool, {
|
|
|
53
56
|
priority = 0,
|
|
54
57
|
runAt = null,
|
|
55
58
|
timeoutMs = void 0,
|
|
59
|
+
forceKillOnTimeout = false,
|
|
56
60
|
tags = void 0
|
|
57
61
|
}) => {
|
|
58
62
|
const client = await pool.connect();
|
|
@@ -61,8 +65,8 @@ var addJob = async (pool, {
|
|
|
61
65
|
if (runAt) {
|
|
62
66
|
result = await client.query(
|
|
63
67
|
`INSERT INTO job_queue
|
|
64
|
-
(job_type, payload, max_attempts, priority, run_at, timeout_ms, tags)
|
|
65
|
-
VALUES ($1, $2, $3, $4, $5, $6, $7)
|
|
68
|
+
(job_type, payload, max_attempts, priority, run_at, timeout_ms, force_kill_on_timeout, tags)
|
|
69
|
+
VALUES ($1, $2, $3, $4, $5, $6, $7, $8)
|
|
66
70
|
RETURNING id`,
|
|
67
71
|
[
|
|
68
72
|
jobType,
|
|
@@ -71,6 +75,7 @@ var addJob = async (pool, {
|
|
|
71
75
|
priority,
|
|
72
76
|
runAt,
|
|
73
77
|
timeoutMs ?? null,
|
|
78
|
+
forceKillOnTimeout ?? false,
|
|
74
79
|
tags ?? null
|
|
75
80
|
]
|
|
76
81
|
);
|
|
@@ -80,8 +85,8 @@ var addJob = async (pool, {
|
|
|
80
85
|
} else {
|
|
81
86
|
result = await client.query(
|
|
82
87
|
`INSERT INTO job_queue
|
|
83
|
-
(job_type, payload, max_attempts, priority, timeout_ms, tags)
|
|
84
|
-
VALUES ($1, $2, $3, $4, $5, $6)
|
|
88
|
+
(job_type, payload, max_attempts, priority, timeout_ms, force_kill_on_timeout, tags)
|
|
89
|
+
VALUES ($1, $2, $3, $4, $5, $6, $7)
|
|
85
90
|
RETURNING id`,
|
|
86
91
|
[
|
|
87
92
|
jobType,
|
|
@@ -89,6 +94,7 @@ var addJob = async (pool, {
|
|
|
89
94
|
maxAttempts,
|
|
90
95
|
priority,
|
|
91
96
|
timeoutMs ?? null,
|
|
97
|
+
forceKillOnTimeout ?? false,
|
|
92
98
|
tags ?? null
|
|
93
99
|
]
|
|
94
100
|
);
|
|
@@ -113,7 +119,7 @@ var getJob = async (pool, id) => {
|
|
|
113
119
|
const client = await pool.connect();
|
|
114
120
|
try {
|
|
115
121
|
const result = await client.query(
|
|
116
|
-
`SELECT id, job_type AS "jobType", payload, status, max_attempts AS "maxAttempts", attempts, priority, run_at AS "runAt", timeout_ms AS "timeoutMs", created_at AS "createdAt", updated_at AS "updatedAt", started_at AS "startedAt", completed_at AS "completedAt", last_failed_at AS "lastFailedAt", locked_at AS "lockedAt", locked_by AS "lockedBy", error_history AS "errorHistory", failure_reason AS "failureReason", next_attempt_at AS "nextAttemptAt", last_failed_at AS "lastFailedAt", last_retried_at AS "lastRetriedAt", last_cancelled_at AS "lastCancelledAt", pending_reason AS "pendingReason" FROM job_queue WHERE id = $1`,
|
|
122
|
+
`SELECT id, job_type AS "jobType", payload, status, max_attempts AS "maxAttempts", attempts, priority, run_at AS "runAt", timeout_ms AS "timeoutMs", force_kill_on_timeout AS "forceKillOnTimeout", created_at AS "createdAt", updated_at AS "updatedAt", started_at AS "startedAt", completed_at AS "completedAt", last_failed_at AS "lastFailedAt", locked_at AS "lockedAt", locked_by AS "lockedBy", error_history AS "errorHistory", failure_reason AS "failureReason", next_attempt_at AS "nextAttemptAt", last_failed_at AS "lastFailedAt", last_retried_at AS "lastRetriedAt", last_cancelled_at AS "lastCancelledAt", pending_reason AS "pendingReason", tags FROM job_queue WHERE id = $1`,
|
|
117
123
|
[id]
|
|
118
124
|
);
|
|
119
125
|
if (result.rows.length === 0) {
|
|
@@ -126,6 +132,7 @@ var getJob = async (pool, id) => {
|
|
|
126
132
|
...job,
|
|
127
133
|
payload: job.payload,
|
|
128
134
|
timeoutMs: job.timeoutMs,
|
|
135
|
+
forceKillOnTimeout: job.forceKillOnTimeout,
|
|
129
136
|
failureReason: job.failureReason
|
|
130
137
|
};
|
|
131
138
|
} catch (error) {
|
|
@@ -139,7 +146,7 @@ var getJobsByStatus = async (pool, status, limit = 100, offset = 0) => {
|
|
|
139
146
|
const client = await pool.connect();
|
|
140
147
|
try {
|
|
141
148
|
const result = await client.query(
|
|
142
|
-
`SELECT id, job_type AS "jobType", payload, status, max_attempts AS "maxAttempts", attempts, priority, run_at AS "runAt", timeout_ms AS "timeoutMs", created_at AS "createdAt", updated_at AS "updatedAt", started_at AS "startedAt", completed_at AS "completedAt", last_failed_at AS "lastFailedAt", locked_at AS "lockedAt", locked_by AS "lockedBy", error_history AS "errorHistory", failure_reason AS "failureReason", next_attempt_at AS "nextAttemptAt", last_failed_at AS "lastFailedAt", last_retried_at AS "lastRetriedAt", last_cancelled_at AS "lastCancelledAt", pending_reason AS "pendingReason" FROM job_queue WHERE status = $1 ORDER BY created_at DESC LIMIT $2 OFFSET $3`,
|
|
149
|
+
`SELECT id, job_type AS "jobType", payload, status, max_attempts AS "maxAttempts", attempts, priority, run_at AS "runAt", timeout_ms AS "timeoutMs", force_kill_on_timeout AS "forceKillOnTimeout", created_at AS "createdAt", updated_at AS "updatedAt", started_at AS "startedAt", completed_at AS "completedAt", last_failed_at AS "lastFailedAt", locked_at AS "lockedAt", locked_by AS "lockedBy", error_history AS "errorHistory", failure_reason AS "failureReason", next_attempt_at AS "nextAttemptAt", last_failed_at AS "lastFailedAt", last_retried_at AS "lastRetriedAt", last_cancelled_at AS "lastCancelledAt", pending_reason AS "pendingReason" FROM job_queue WHERE status = $1 ORDER BY created_at DESC LIMIT $2 OFFSET $3`,
|
|
143
150
|
[status, limit, offset]
|
|
144
151
|
);
|
|
145
152
|
log(`Found ${result.rows.length} jobs by status ${status}`);
|
|
@@ -147,6 +154,7 @@ var getJobsByStatus = async (pool, status, limit = 100, offset = 0) => {
|
|
|
147
154
|
...job,
|
|
148
155
|
payload: job.payload,
|
|
149
156
|
timeoutMs: job.timeoutMs,
|
|
157
|
+
forceKillOnTimeout: job.forceKillOnTimeout,
|
|
150
158
|
failureReason: job.failureReason
|
|
151
159
|
}));
|
|
152
160
|
} catch (error) {
|
|
@@ -192,7 +200,7 @@ var getNextBatch = async (pool, workerId, batchSize = 10, jobType) => {
|
|
|
192
200
|
LIMIT $2
|
|
193
201
|
FOR UPDATE SKIP LOCKED
|
|
194
202
|
)
|
|
195
|
-
RETURNING id, job_type AS "jobType", payload, status, max_attempts AS "maxAttempts", attempts, priority, run_at AS "runAt", timeout_ms AS "timeoutMs", created_at AS "createdAt", updated_at AS "updatedAt", started_at AS "startedAt", completed_at AS "completedAt", last_failed_at AS "lastFailedAt", locked_at AS "lockedAt", locked_by AS "lockedBy", error_history AS "errorHistory", failure_reason AS "failureReason", next_attempt_at AS "nextAttemptAt", last_retried_at AS "lastRetriedAt", last_cancelled_at AS "lastCancelledAt", pending_reason AS "pendingReason"
|
|
203
|
+
RETURNING id, job_type AS "jobType", payload, status, max_attempts AS "maxAttempts", attempts, priority, run_at AS "runAt", timeout_ms AS "timeoutMs", force_kill_on_timeout AS "forceKillOnTimeout", created_at AS "createdAt", updated_at AS "updatedAt", started_at AS "startedAt", completed_at AS "completedAt", last_failed_at AS "lastFailedAt", locked_at AS "lockedAt", locked_by AS "lockedBy", error_history AS "errorHistory", failure_reason AS "failureReason", next_attempt_at AS "nextAttemptAt", last_retried_at AS "lastRetriedAt", last_cancelled_at AS "lastCancelledAt", pending_reason AS "pendingReason"
|
|
196
204
|
`,
|
|
197
205
|
params
|
|
198
206
|
);
|
|
@@ -204,7 +212,8 @@ var getNextBatch = async (pool, workerId, batchSize = 10, jobType) => {
|
|
|
204
212
|
return result.rows.map((job) => ({
|
|
205
213
|
...job,
|
|
206
214
|
payload: job.payload,
|
|
207
|
-
timeoutMs: job.timeoutMs
|
|
215
|
+
timeoutMs: job.timeoutMs,
|
|
216
|
+
forceKillOnTimeout: job.forceKillOnTimeout
|
|
208
217
|
}));
|
|
209
218
|
} catch (error) {
|
|
210
219
|
log(`Error getting next batch: ${error}`);
|
|
@@ -337,6 +346,198 @@ var cancelJob = async (pool, jobId) => {
|
|
|
337
346
|
client.release();
|
|
338
347
|
}
|
|
339
348
|
};
|
|
349
|
+
var editJob = async (pool, jobId, updates) => {
|
|
350
|
+
const client = await pool.connect();
|
|
351
|
+
try {
|
|
352
|
+
const updateFields = [];
|
|
353
|
+
const params = [];
|
|
354
|
+
let paramIdx = 1;
|
|
355
|
+
if (updates.payload !== void 0) {
|
|
356
|
+
updateFields.push(`payload = $${paramIdx++}`);
|
|
357
|
+
params.push(updates.payload);
|
|
358
|
+
}
|
|
359
|
+
if (updates.maxAttempts !== void 0) {
|
|
360
|
+
updateFields.push(`max_attempts = $${paramIdx++}`);
|
|
361
|
+
params.push(updates.maxAttempts);
|
|
362
|
+
}
|
|
363
|
+
if (updates.priority !== void 0) {
|
|
364
|
+
updateFields.push(`priority = $${paramIdx++}`);
|
|
365
|
+
params.push(updates.priority);
|
|
366
|
+
}
|
|
367
|
+
if (updates.runAt !== void 0) {
|
|
368
|
+
if (updates.runAt === null) {
|
|
369
|
+
updateFields.push(`run_at = NOW()`);
|
|
370
|
+
} else {
|
|
371
|
+
updateFields.push(`run_at = $${paramIdx++}`);
|
|
372
|
+
params.push(updates.runAt);
|
|
373
|
+
}
|
|
374
|
+
}
|
|
375
|
+
if (updates.timeoutMs !== void 0) {
|
|
376
|
+
updateFields.push(`timeout_ms = $${paramIdx++}`);
|
|
377
|
+
params.push(updates.timeoutMs ?? null);
|
|
378
|
+
}
|
|
379
|
+
if (updates.tags !== void 0) {
|
|
380
|
+
updateFields.push(`tags = $${paramIdx++}`);
|
|
381
|
+
params.push(updates.tags ?? null);
|
|
382
|
+
}
|
|
383
|
+
if (updateFields.length === 0) {
|
|
384
|
+
log(`No fields to update for job ${jobId}`);
|
|
385
|
+
return;
|
|
386
|
+
}
|
|
387
|
+
updateFields.push(`updated_at = NOW()`);
|
|
388
|
+
params.push(jobId);
|
|
389
|
+
const query = `
|
|
390
|
+
UPDATE job_queue
|
|
391
|
+
SET ${updateFields.join(", ")}
|
|
392
|
+
WHERE id = $${paramIdx} AND status = 'pending'
|
|
393
|
+
`;
|
|
394
|
+
await client.query(query, params);
|
|
395
|
+
const metadata = {};
|
|
396
|
+
if (updates.payload !== void 0) metadata.payload = updates.payload;
|
|
397
|
+
if (updates.maxAttempts !== void 0)
|
|
398
|
+
metadata.maxAttempts = updates.maxAttempts;
|
|
399
|
+
if (updates.priority !== void 0) metadata.priority = updates.priority;
|
|
400
|
+
if (updates.runAt !== void 0) metadata.runAt = updates.runAt;
|
|
401
|
+
if (updates.timeoutMs !== void 0) metadata.timeoutMs = updates.timeoutMs;
|
|
402
|
+
if (updates.tags !== void 0) metadata.tags = updates.tags;
|
|
403
|
+
await recordJobEvent(pool, jobId, "edited" /* Edited */, metadata);
|
|
404
|
+
log(`Edited job ${jobId}: ${JSON.stringify(metadata)}`);
|
|
405
|
+
} catch (error) {
|
|
406
|
+
log(`Error editing job ${jobId}: ${error}`);
|
|
407
|
+
throw error;
|
|
408
|
+
} finally {
|
|
409
|
+
client.release();
|
|
410
|
+
}
|
|
411
|
+
};
|
|
412
|
+
var editAllPendingJobs = async (pool, filters = void 0, updates) => {
|
|
413
|
+
const client = await pool.connect();
|
|
414
|
+
try {
|
|
415
|
+
const updateFields = [];
|
|
416
|
+
const params = [];
|
|
417
|
+
let paramIdx = 1;
|
|
418
|
+
if (updates.payload !== void 0) {
|
|
419
|
+
updateFields.push(`payload = $${paramIdx++}`);
|
|
420
|
+
params.push(updates.payload);
|
|
421
|
+
}
|
|
422
|
+
if (updates.maxAttempts !== void 0) {
|
|
423
|
+
updateFields.push(`max_attempts = $${paramIdx++}`);
|
|
424
|
+
params.push(updates.maxAttempts);
|
|
425
|
+
}
|
|
426
|
+
if (updates.priority !== void 0) {
|
|
427
|
+
updateFields.push(`priority = $${paramIdx++}`);
|
|
428
|
+
params.push(updates.priority);
|
|
429
|
+
}
|
|
430
|
+
if (updates.runAt !== void 0) {
|
|
431
|
+
if (updates.runAt === null) {
|
|
432
|
+
updateFields.push(`run_at = NOW()`);
|
|
433
|
+
} else {
|
|
434
|
+
updateFields.push(`run_at = $${paramIdx++}`);
|
|
435
|
+
params.push(updates.runAt);
|
|
436
|
+
}
|
|
437
|
+
}
|
|
438
|
+
if (updates.timeoutMs !== void 0) {
|
|
439
|
+
updateFields.push(`timeout_ms = $${paramIdx++}`);
|
|
440
|
+
params.push(updates.timeoutMs ?? null);
|
|
441
|
+
}
|
|
442
|
+
if (updates.tags !== void 0) {
|
|
443
|
+
updateFields.push(`tags = $${paramIdx++}`);
|
|
444
|
+
params.push(updates.tags ?? null);
|
|
445
|
+
}
|
|
446
|
+
if (updateFields.length === 0) {
|
|
447
|
+
log(`No fields to update for batch edit`);
|
|
448
|
+
return 0;
|
|
449
|
+
}
|
|
450
|
+
updateFields.push(`updated_at = NOW()`);
|
|
451
|
+
let query = `
|
|
452
|
+
UPDATE job_queue
|
|
453
|
+
SET ${updateFields.join(", ")}
|
|
454
|
+
WHERE status = 'pending'`;
|
|
455
|
+
if (filters) {
|
|
456
|
+
if (filters.jobType) {
|
|
457
|
+
query += ` AND job_type = $${paramIdx++}`;
|
|
458
|
+
params.push(filters.jobType);
|
|
459
|
+
}
|
|
460
|
+
if (filters.priority !== void 0) {
|
|
461
|
+
query += ` AND priority = $${paramIdx++}`;
|
|
462
|
+
params.push(filters.priority);
|
|
463
|
+
}
|
|
464
|
+
if (filters.runAt) {
|
|
465
|
+
if (filters.runAt instanceof Date) {
|
|
466
|
+
query += ` AND run_at = $${paramIdx++}`;
|
|
467
|
+
params.push(filters.runAt);
|
|
468
|
+
} else if (typeof filters.runAt === "object") {
|
|
469
|
+
const ops = filters.runAt;
|
|
470
|
+
if (ops.gt) {
|
|
471
|
+
query += ` AND run_at > $${paramIdx++}`;
|
|
472
|
+
params.push(ops.gt);
|
|
473
|
+
}
|
|
474
|
+
if (ops.gte) {
|
|
475
|
+
query += ` AND run_at >= $${paramIdx++}`;
|
|
476
|
+
params.push(ops.gte);
|
|
477
|
+
}
|
|
478
|
+
if (ops.lt) {
|
|
479
|
+
query += ` AND run_at < $${paramIdx++}`;
|
|
480
|
+
params.push(ops.lt);
|
|
481
|
+
}
|
|
482
|
+
if (ops.lte) {
|
|
483
|
+
query += ` AND run_at <= $${paramIdx++}`;
|
|
484
|
+
params.push(ops.lte);
|
|
485
|
+
}
|
|
486
|
+
if (ops.eq) {
|
|
487
|
+
query += ` AND run_at = $${paramIdx++}`;
|
|
488
|
+
params.push(ops.eq);
|
|
489
|
+
}
|
|
490
|
+
}
|
|
491
|
+
}
|
|
492
|
+
if (filters.tags && filters.tags.values && filters.tags.values.length > 0) {
|
|
493
|
+
const mode = filters.tags.mode || "all";
|
|
494
|
+
const tagValues = filters.tags.values;
|
|
495
|
+
switch (mode) {
|
|
496
|
+
case "exact":
|
|
497
|
+
query += ` AND tags = $${paramIdx++}`;
|
|
498
|
+
params.push(tagValues);
|
|
499
|
+
break;
|
|
500
|
+
case "all":
|
|
501
|
+
query += ` AND tags @> $${paramIdx++}`;
|
|
502
|
+
params.push(tagValues);
|
|
503
|
+
break;
|
|
504
|
+
case "any":
|
|
505
|
+
query += ` AND tags && $${paramIdx++}`;
|
|
506
|
+
params.push(tagValues);
|
|
507
|
+
break;
|
|
508
|
+
case "none":
|
|
509
|
+
query += ` AND NOT (tags && $${paramIdx++})`;
|
|
510
|
+
params.push(tagValues);
|
|
511
|
+
break;
|
|
512
|
+
default:
|
|
513
|
+
query += ` AND tags @> $${paramIdx++}`;
|
|
514
|
+
params.push(tagValues);
|
|
515
|
+
}
|
|
516
|
+
}
|
|
517
|
+
}
|
|
518
|
+
query += "\nRETURNING id";
|
|
519
|
+
const result = await client.query(query, params);
|
|
520
|
+
const editedCount = result.rowCount || 0;
|
|
521
|
+
const metadata = {};
|
|
522
|
+
if (updates.payload !== void 0) metadata.payload = updates.payload;
|
|
523
|
+
if (updates.maxAttempts !== void 0)
|
|
524
|
+
metadata.maxAttempts = updates.maxAttempts;
|
|
525
|
+
if (updates.priority !== void 0) metadata.priority = updates.priority;
|
|
526
|
+
if (updates.runAt !== void 0) metadata.runAt = updates.runAt;
|
|
527
|
+
if (updates.timeoutMs !== void 0) metadata.timeoutMs = updates.timeoutMs;
|
|
528
|
+
if (updates.tags !== void 0) metadata.tags = updates.tags;
|
|
529
|
+
for (const row of result.rows) {
|
|
530
|
+
await recordJobEvent(pool, row.id, "edited" /* Edited */, metadata);
|
|
531
|
+
}
|
|
532
|
+
log(`Edited ${editedCount} pending jobs: ${JSON.stringify(metadata)}`);
|
|
533
|
+
return editedCount;
|
|
534
|
+
} catch (error) {
|
|
535
|
+
log(`Error editing pending jobs: ${error}`);
|
|
536
|
+
throw error;
|
|
537
|
+
} finally {
|
|
538
|
+
client.release();
|
|
539
|
+
}
|
|
540
|
+
};
|
|
340
541
|
var cancelAllUpcomingJobs = async (pool, filters) => {
|
|
341
542
|
const client = await pool.connect();
|
|
342
543
|
try {
|
|
@@ -424,14 +625,15 @@ var getAllJobs = async (pool, limit = 100, offset = 0) => {
|
|
|
424
625
|
const client = await pool.connect();
|
|
425
626
|
try {
|
|
426
627
|
const result = await client.query(
|
|
427
|
-
`SELECT id, job_type AS "jobType", payload, status, max_attempts AS "maxAttempts", attempts, priority, run_at AS "runAt", timeout_ms AS "timeoutMs", created_at AS "createdAt", updated_at AS "updatedAt", started_at AS "startedAt", completed_at AS "completedAt", last_failed_at AS "lastFailedAt", locked_at AS "lockedAt", locked_by AS "lockedBy", error_history AS "errorHistory", failure_reason AS "failureReason", next_attempt_at AS "nextAttemptAt", last_failed_at AS "lastFailedAt", last_retried_at AS "lastRetriedAt", last_cancelled_at AS "lastCancelledAt", pending_reason AS "pendingReason" FROM job_queue ORDER BY created_at DESC LIMIT $1 OFFSET $2`,
|
|
628
|
+
`SELECT id, job_type AS "jobType", payload, status, max_attempts AS "maxAttempts", attempts, priority, run_at AS "runAt", timeout_ms AS "timeoutMs", force_kill_on_timeout AS "forceKillOnTimeout", created_at AS "createdAt", updated_at AS "updatedAt", started_at AS "startedAt", completed_at AS "completedAt", last_failed_at AS "lastFailedAt", locked_at AS "lockedAt", locked_by AS "lockedBy", error_history AS "errorHistory", failure_reason AS "failureReason", next_attempt_at AS "nextAttemptAt", last_failed_at AS "lastFailedAt", last_retried_at AS "lastRetriedAt", last_cancelled_at AS "lastCancelledAt", pending_reason AS "pendingReason" FROM job_queue ORDER BY created_at DESC LIMIT $1 OFFSET $2`,
|
|
428
629
|
[limit, offset]
|
|
429
630
|
);
|
|
430
631
|
log(`Found ${result.rows.length} jobs (all)`);
|
|
431
632
|
return result.rows.map((job) => ({
|
|
432
633
|
...job,
|
|
433
634
|
payload: job.payload,
|
|
434
|
-
timeoutMs: job.timeoutMs
|
|
635
|
+
timeoutMs: job.timeoutMs,
|
|
636
|
+
forceKillOnTimeout: job.forceKillOnTimeout
|
|
435
637
|
}));
|
|
436
638
|
} catch (error) {
|
|
437
639
|
log(`Error getting all jobs: ${error}`);
|
|
@@ -532,6 +734,7 @@ var getJobsByTags = async (pool, tags, mode = "all", limit = 100, offset = 0) =>
|
|
|
532
734
|
...job,
|
|
533
735
|
payload: job.payload,
|
|
534
736
|
timeoutMs: job.timeoutMs,
|
|
737
|
+
forceKillOnTimeout: job.forceKillOnTimeout,
|
|
535
738
|
failureReason: job.failureReason
|
|
536
739
|
}));
|
|
537
740
|
} catch (error) {
|
|
@@ -546,7 +749,7 @@ var getJobsByTags = async (pool, tags, mode = "all", limit = 100, offset = 0) =>
|
|
|
546
749
|
var getJobs = async (pool, filters, limit = 100, offset = 0) => {
|
|
547
750
|
const client = await pool.connect();
|
|
548
751
|
try {
|
|
549
|
-
let query = `SELECT id, job_type AS "jobType", payload, status, max_attempts AS "maxAttempts", attempts, priority, run_at AS "runAt", timeout_ms AS "timeoutMs", created_at AS "createdAt", updated_at AS "updatedAt", started_at AS "startedAt", completed_at AS "completedAt", last_failed_at AS "lastFailedAt", locked_at AS "lockedAt", locked_by AS "lockedBy", error_history AS "errorHistory", failure_reason AS "failureReason", next_attempt_at AS "nextAttemptAt", last_failed_at AS "lastFailedAt", last_retried_at AS "lastRetriedAt", last_cancelled_at AS "lastCancelledAt", pending_reason AS "pendingReason", tags FROM job_queue`;
|
|
752
|
+
let query = `SELECT id, job_type AS "jobType", payload, status, max_attempts AS "maxAttempts", attempts, priority, run_at AS "runAt", timeout_ms AS "timeoutMs", force_kill_on_timeout AS "forceKillOnTimeout", created_at AS "createdAt", updated_at AS "updatedAt", started_at AS "startedAt", completed_at AS "completedAt", last_failed_at AS "lastFailedAt", locked_at AS "lockedAt", locked_by AS "lockedBy", error_history AS "errorHistory", failure_reason AS "failureReason", next_attempt_at AS "nextAttemptAt", last_failed_at AS "lastFailedAt", last_retried_at AS "lastRetriedAt", last_cancelled_at AS "lastCancelledAt", pending_reason AS "pendingReason", tags FROM job_queue`;
|
|
550
753
|
const params = [];
|
|
551
754
|
let where = [];
|
|
552
755
|
let paramIdx = 1;
|
|
@@ -625,6 +828,7 @@ var getJobs = async (pool, filters, limit = 100, offset = 0) => {
|
|
|
625
828
|
...job,
|
|
626
829
|
payload: job.payload,
|
|
627
830
|
timeoutMs: job.timeoutMs,
|
|
831
|
+
forceKillOnTimeout: job.forceKillOnTimeout,
|
|
628
832
|
failureReason: job.failureReason
|
|
629
833
|
}));
|
|
630
834
|
} catch (error) {
|
|
@@ -634,8 +838,178 @@ var getJobs = async (pool, filters, limit = 100, offset = 0) => {
|
|
|
634
838
|
client.release();
|
|
635
839
|
}
|
|
636
840
|
};
|
|
637
|
-
|
|
638
|
-
|
|
841
|
+
function validateHandlerSerializable(handler, jobType) {
|
|
842
|
+
try {
|
|
843
|
+
const handlerString = handler.toString();
|
|
844
|
+
if (handlerString.includes("this.") && !handlerString.match(/\([^)]*this[^)]*\)/)) {
|
|
845
|
+
throw new Error(
|
|
846
|
+
`Handler for job type "${jobType}" uses 'this' context which cannot be serialized. Use a regular function or avoid 'this' references when forceKillOnTimeout is enabled.`
|
|
847
|
+
);
|
|
848
|
+
}
|
|
849
|
+
if (handlerString.includes("[native code]")) {
|
|
850
|
+
throw new Error(
|
|
851
|
+
`Handler for job type "${jobType}" contains native code which cannot be serialized. Ensure your handler is a plain function when forceKillOnTimeout is enabled.`
|
|
852
|
+
);
|
|
853
|
+
}
|
|
854
|
+
try {
|
|
855
|
+
new Function("return " + handlerString);
|
|
856
|
+
} catch (parseError) {
|
|
857
|
+
throw new Error(
|
|
858
|
+
`Handler for job type "${jobType}" cannot be serialized: ${parseError instanceof Error ? parseError.message : String(parseError)}. When using forceKillOnTimeout, handlers must be serializable functions without closures over external variables.`
|
|
859
|
+
);
|
|
860
|
+
}
|
|
861
|
+
} catch (error) {
|
|
862
|
+
if (error instanceof Error) {
|
|
863
|
+
throw error;
|
|
864
|
+
}
|
|
865
|
+
throw new Error(
|
|
866
|
+
`Failed to validate handler serialization for job type "${jobType}": ${String(error)}`
|
|
867
|
+
);
|
|
868
|
+
}
|
|
869
|
+
}
|
|
870
|
+
async function runHandlerInWorker(handler, payload, timeoutMs, jobType) {
|
|
871
|
+
validateHandlerSerializable(handler, jobType);
|
|
872
|
+
return new Promise((resolve, reject) => {
|
|
873
|
+
const workerCode = `
|
|
874
|
+
(function() {
|
|
875
|
+
const { parentPort, workerData } = require('worker_threads');
|
|
876
|
+
const { handlerCode, payload, timeoutMs } = workerData;
|
|
877
|
+
|
|
878
|
+
// Create an AbortController for the handler
|
|
879
|
+
const controller = new AbortController();
|
|
880
|
+
const signal = controller.signal;
|
|
881
|
+
|
|
882
|
+
// Set up timeout
|
|
883
|
+
const timeoutId = setTimeout(() => {
|
|
884
|
+
controller.abort();
|
|
885
|
+
parentPort.postMessage({ type: 'timeout' });
|
|
886
|
+
}, timeoutMs);
|
|
887
|
+
|
|
888
|
+
try {
|
|
889
|
+
// Execute the handler
|
|
890
|
+
// Note: This uses Function constructor which requires the handler to be serializable.
|
|
891
|
+
// The handler should be validated before reaching this point.
|
|
892
|
+
let handlerFn;
|
|
893
|
+
try {
|
|
894
|
+
// Wrap handlerCode in parentheses to ensure it's treated as an expression
|
|
895
|
+
// This handles both arrow functions and regular functions
|
|
896
|
+
const wrappedCode = handlerCode.trim().startsWith('async') || handlerCode.trim().startsWith('function')
|
|
897
|
+
? handlerCode
|
|
898
|
+
: '(' + handlerCode + ')';
|
|
899
|
+
handlerFn = new Function('return ' + wrappedCode)();
|
|
900
|
+
} catch (parseError) {
|
|
901
|
+
clearTimeout(timeoutId);
|
|
902
|
+
parentPort.postMessage({
|
|
903
|
+
type: 'error',
|
|
904
|
+
error: {
|
|
905
|
+
message: 'Handler cannot be deserialized in worker thread. ' +
|
|
906
|
+
'Ensure your handler is a standalone function without closures over external variables. ' +
|
|
907
|
+
'Original error: ' + (parseError instanceof Error ? parseError.message : String(parseError)),
|
|
908
|
+
stack: parseError instanceof Error ? parseError.stack : undefined,
|
|
909
|
+
name: 'SerializationError',
|
|
910
|
+
},
|
|
911
|
+
});
|
|
912
|
+
return;
|
|
913
|
+
}
|
|
914
|
+
|
|
915
|
+
// Ensure handlerFn is actually a function
|
|
916
|
+
if (typeof handlerFn !== 'function') {
|
|
917
|
+
clearTimeout(timeoutId);
|
|
918
|
+
parentPort.postMessage({
|
|
919
|
+
type: 'error',
|
|
920
|
+
error: {
|
|
921
|
+
message: 'Handler deserialization did not produce a function. ' +
|
|
922
|
+
'Ensure your handler is a valid function when forceKillOnTimeout is enabled.',
|
|
923
|
+
name: 'SerializationError',
|
|
924
|
+
},
|
|
925
|
+
});
|
|
926
|
+
return;
|
|
927
|
+
}
|
|
928
|
+
|
|
929
|
+
handlerFn(payload, signal)
|
|
930
|
+
.then(() => {
|
|
931
|
+
clearTimeout(timeoutId);
|
|
932
|
+
parentPort.postMessage({ type: 'success' });
|
|
933
|
+
})
|
|
934
|
+
.catch((error) => {
|
|
935
|
+
clearTimeout(timeoutId);
|
|
936
|
+
parentPort.postMessage({
|
|
937
|
+
type: 'error',
|
|
938
|
+
error: {
|
|
939
|
+
message: error.message,
|
|
940
|
+
stack: error.stack,
|
|
941
|
+
name: error.name,
|
|
942
|
+
},
|
|
943
|
+
});
|
|
944
|
+
});
|
|
945
|
+
} catch (error) {
|
|
946
|
+
clearTimeout(timeoutId);
|
|
947
|
+
parentPort.postMessage({
|
|
948
|
+
type: 'error',
|
|
949
|
+
error: {
|
|
950
|
+
message: error.message,
|
|
951
|
+
stack: error.stack,
|
|
952
|
+
name: error.name,
|
|
953
|
+
},
|
|
954
|
+
});
|
|
955
|
+
}
|
|
956
|
+
})();
|
|
957
|
+
`;
|
|
958
|
+
const worker = new Worker(workerCode, {
|
|
959
|
+
eval: true,
|
|
960
|
+
workerData: {
|
|
961
|
+
handlerCode: handler.toString(),
|
|
962
|
+
payload,
|
|
963
|
+
timeoutMs
|
|
964
|
+
}
|
|
965
|
+
});
|
|
966
|
+
let resolved = false;
|
|
967
|
+
worker.on("message", (message) => {
|
|
968
|
+
if (resolved) return;
|
|
969
|
+
resolved = true;
|
|
970
|
+
if (message.type === "success") {
|
|
971
|
+
resolve();
|
|
972
|
+
} else if (message.type === "timeout") {
|
|
973
|
+
const timeoutError = new Error(
|
|
974
|
+
`Job timed out after ${timeoutMs} ms and was forcefully terminated`
|
|
975
|
+
);
|
|
976
|
+
timeoutError.failureReason = "timeout" /* Timeout */;
|
|
977
|
+
reject(timeoutError);
|
|
978
|
+
} else if (message.type === "error") {
|
|
979
|
+
const error = new Error(message.error.message);
|
|
980
|
+
error.stack = message.error.stack;
|
|
981
|
+
error.name = message.error.name;
|
|
982
|
+
reject(error);
|
|
983
|
+
}
|
|
984
|
+
});
|
|
985
|
+
worker.on("error", (error) => {
|
|
986
|
+
if (resolved) return;
|
|
987
|
+
resolved = true;
|
|
988
|
+
reject(error);
|
|
989
|
+
});
|
|
990
|
+
worker.on("exit", (code) => {
|
|
991
|
+
if (resolved) return;
|
|
992
|
+
if (code !== 0) {
|
|
993
|
+
resolved = true;
|
|
994
|
+
reject(new Error(`Worker stopped with exit code ${code}`));
|
|
995
|
+
}
|
|
996
|
+
});
|
|
997
|
+
setTimeout(() => {
|
|
998
|
+
if (!resolved) {
|
|
999
|
+
resolved = true;
|
|
1000
|
+
worker.terminate().then(() => {
|
|
1001
|
+
const timeoutError = new Error(
|
|
1002
|
+
`Job timed out after ${timeoutMs} ms and was forcefully terminated`
|
|
1003
|
+
);
|
|
1004
|
+
timeoutError.failureReason = "timeout" /* Timeout */;
|
|
1005
|
+
reject(timeoutError);
|
|
1006
|
+
}).catch((err) => {
|
|
1007
|
+
reject(err);
|
|
1008
|
+
});
|
|
1009
|
+
}
|
|
1010
|
+
}, timeoutMs + 100);
|
|
1011
|
+
});
|
|
1012
|
+
}
|
|
639
1013
|
async function processJobWithHandlers(pool, job, jobHandlers) {
|
|
640
1014
|
const handler = jobHandlers[job.jobType];
|
|
641
1015
|
if (!handler) {
|
|
@@ -653,26 +1027,31 @@ async function processJobWithHandlers(pool, job, jobHandlers) {
|
|
|
653
1027
|
return;
|
|
654
1028
|
}
|
|
655
1029
|
const timeoutMs = job.timeoutMs ?? void 0;
|
|
1030
|
+
const forceKillOnTimeout = job.forceKillOnTimeout ?? false;
|
|
656
1031
|
let timeoutId;
|
|
657
1032
|
const controller = new AbortController();
|
|
658
1033
|
try {
|
|
659
|
-
|
|
660
|
-
|
|
661
|
-
await Promise.race([
|
|
662
|
-
jobPromise,
|
|
663
|
-
new Promise((_, reject) => {
|
|
664
|
-
timeoutId = setTimeout(() => {
|
|
665
|
-
controller.abort();
|
|
666
|
-
const timeoutError = new Error(
|
|
667
|
-
`Job timed out after ${timeoutMs} ms`
|
|
668
|
-
);
|
|
669
|
-
timeoutError.failureReason = "timeout" /* Timeout */;
|
|
670
|
-
reject(timeoutError);
|
|
671
|
-
}, timeoutMs);
|
|
672
|
-
})
|
|
673
|
-
]);
|
|
1034
|
+
if (forceKillOnTimeout && timeoutMs && timeoutMs > 0) {
|
|
1035
|
+
await runHandlerInWorker(handler, job.payload, timeoutMs, job.jobType);
|
|
674
1036
|
} else {
|
|
675
|
-
|
|
1037
|
+
const jobPromise = handler(job.payload, controller.signal);
|
|
1038
|
+
if (timeoutMs && timeoutMs > 0) {
|
|
1039
|
+
await Promise.race([
|
|
1040
|
+
jobPromise,
|
|
1041
|
+
new Promise((_, reject) => {
|
|
1042
|
+
timeoutId = setTimeout(() => {
|
|
1043
|
+
controller.abort();
|
|
1044
|
+
const timeoutError = new Error(
|
|
1045
|
+
`Job timed out after ${timeoutMs} ms`
|
|
1046
|
+
);
|
|
1047
|
+
timeoutError.failureReason = "timeout" /* Timeout */;
|
|
1048
|
+
reject(timeoutError);
|
|
1049
|
+
}, timeoutMs);
|
|
1050
|
+
})
|
|
1051
|
+
]);
|
|
1052
|
+
} else {
|
|
1053
|
+
await jobPromise;
|
|
1054
|
+
}
|
|
676
1055
|
}
|
|
677
1056
|
if (timeoutId) clearTimeout(timeoutId);
|
|
678
1057
|
await completeJob(pool, job.id);
|
|
@@ -805,12 +1184,27 @@ var createProcessor = (pool, handlers, options = {}) => {
|
|
|
805
1184
|
isRunning: () => running
|
|
806
1185
|
};
|
|
807
1186
|
};
|
|
1187
|
+
function loadPemOrFile(value) {
|
|
1188
|
+
if (!value) return void 0;
|
|
1189
|
+
if (value.startsWith("file://")) {
|
|
1190
|
+
const filePath = value.slice(7);
|
|
1191
|
+
return fs.readFileSync(filePath, "utf8");
|
|
1192
|
+
}
|
|
1193
|
+
return value;
|
|
1194
|
+
}
|
|
808
1195
|
var createPool = (config) => {
|
|
809
1196
|
let searchPath;
|
|
1197
|
+
let ssl = void 0;
|
|
1198
|
+
let customCA;
|
|
1199
|
+
let sslmode;
|
|
810
1200
|
if (config.connectionString) {
|
|
811
1201
|
try {
|
|
812
1202
|
const url = new URL(config.connectionString);
|
|
813
1203
|
searchPath = url.searchParams.get("search_path") || void 0;
|
|
1204
|
+
sslmode = url.searchParams.get("sslmode") || void 0;
|
|
1205
|
+
if (sslmode === "no-verify") {
|
|
1206
|
+
ssl = { rejectUnauthorized: false };
|
|
1207
|
+
}
|
|
814
1208
|
} catch (e) {
|
|
815
1209
|
const parsed = parse(config.connectionString);
|
|
816
1210
|
if (parsed.options) {
|
|
@@ -819,9 +1213,52 @@ var createPool = (config) => {
|
|
|
819
1213
|
searchPath = match[1];
|
|
820
1214
|
}
|
|
821
1215
|
}
|
|
1216
|
+
sslmode = typeof parsed.sslmode === "string" ? parsed.sslmode : void 0;
|
|
1217
|
+
if (sslmode === "no-verify") {
|
|
1218
|
+
ssl = { rejectUnauthorized: false };
|
|
1219
|
+
}
|
|
1220
|
+
}
|
|
1221
|
+
}
|
|
1222
|
+
if (config.ssl) {
|
|
1223
|
+
if (typeof config.ssl.ca === "string") {
|
|
1224
|
+
customCA = config.ssl.ca;
|
|
1225
|
+
} else if (typeof process.env.PGSSLROOTCERT === "string") {
|
|
1226
|
+
customCA = process.env.PGSSLROOTCERT;
|
|
1227
|
+
} else {
|
|
1228
|
+
customCA = void 0;
|
|
822
1229
|
}
|
|
1230
|
+
const caValue = typeof customCA === "string" ? loadPemOrFile(customCA) : void 0;
|
|
1231
|
+
ssl = {
|
|
1232
|
+
...ssl,
|
|
1233
|
+
...caValue ? { ca: caValue } : {},
|
|
1234
|
+
cert: loadPemOrFile(
|
|
1235
|
+
typeof config.ssl.cert === "string" ? config.ssl.cert : process.env.PGSSLCERT
|
|
1236
|
+
),
|
|
1237
|
+
key: loadPemOrFile(
|
|
1238
|
+
typeof config.ssl.key === "string" ? config.ssl.key : process.env.PGSSLKEY
|
|
1239
|
+
),
|
|
1240
|
+
rejectUnauthorized: config.ssl.rejectUnauthorized !== void 0 ? config.ssl.rejectUnauthorized : true
|
|
1241
|
+
};
|
|
823
1242
|
}
|
|
824
|
-
|
|
1243
|
+
if (sslmode && customCA) {
|
|
1244
|
+
const warning = `
|
|
1245
|
+
|
|
1246
|
+
\x1B[33m**************************************************
|
|
1247
|
+
\u26A0\uFE0F WARNING: SSL CONFIGURATION ISSUE
|
|
1248
|
+
**************************************************
|
|
1249
|
+
Both sslmode ('${sslmode}') is set in the connection string
|
|
1250
|
+
and a custom CA is provided (via config.ssl.ca or PGSSLROOTCERT).
|
|
1251
|
+
This combination may cause connection failures or unexpected behavior.
|
|
1252
|
+
|
|
1253
|
+
Recommended: Remove sslmode from the connection string when using a custom CA.
|
|
1254
|
+
**************************************************\x1B[0m
|
|
1255
|
+
`;
|
|
1256
|
+
console.warn(warning);
|
|
1257
|
+
}
|
|
1258
|
+
const pool = new Pool({
|
|
1259
|
+
...config,
|
|
1260
|
+
...ssl ? { ssl } : {}
|
|
1261
|
+
});
|
|
825
1262
|
if (searchPath) {
|
|
826
1263
|
pool.on("connect", (client) => {
|
|
827
1264
|
client.query(`SET search_path TO ${searchPath}`);
|
|
@@ -830,6 +1267,75 @@ var createPool = (config) => {
|
|
|
830
1267
|
return pool;
|
|
831
1268
|
};
|
|
832
1269
|
|
|
1270
|
+
// src/handler-validation.ts
|
|
1271
|
+
function validateHandlerSerializable2(handler, jobType) {
|
|
1272
|
+
try {
|
|
1273
|
+
const handlerString = handler.toString();
|
|
1274
|
+
const typeLabel = jobType ? `job type "${jobType}"` : "handler";
|
|
1275
|
+
if (handlerString.includes("this.") && !handlerString.match(/\([^)]*this[^)]*\)/)) {
|
|
1276
|
+
return {
|
|
1277
|
+
isSerializable: false,
|
|
1278
|
+
error: `Handler for ${typeLabel} uses 'this' context which cannot be serialized. Use a regular function or avoid 'this' references when forceKillOnTimeout is enabled.`
|
|
1279
|
+
};
|
|
1280
|
+
}
|
|
1281
|
+
if (handlerString.includes("[native code]")) {
|
|
1282
|
+
return {
|
|
1283
|
+
isSerializable: false,
|
|
1284
|
+
error: `Handler for ${typeLabel} contains native code which cannot be serialized. Ensure your handler is a plain function when forceKillOnTimeout is enabled.`
|
|
1285
|
+
};
|
|
1286
|
+
}
|
|
1287
|
+
try {
|
|
1288
|
+
new Function("return " + handlerString);
|
|
1289
|
+
} catch (parseError) {
|
|
1290
|
+
return {
|
|
1291
|
+
isSerializable: false,
|
|
1292
|
+
error: `Handler for ${typeLabel} cannot be serialized: ${parseError instanceof Error ? parseError.message : String(parseError)}. When using forceKillOnTimeout, handlers must be serializable functions without closures over external variables.`
|
|
1293
|
+
};
|
|
1294
|
+
}
|
|
1295
|
+
const hasPotentialClosure = /const\s+\w+\s*=\s*[^;]+;\s*async\s*\(/.test(handlerString) || /let\s+\w+\s*=\s*[^;]+;\s*async\s*\(/.test(handlerString);
|
|
1296
|
+
if (hasPotentialClosure) {
|
|
1297
|
+
return {
|
|
1298
|
+
isSerializable: true,
|
|
1299
|
+
// Still serializable, but might have issues
|
|
1300
|
+
error: `Warning: Handler for ${typeLabel} may have closures over external variables. Test thoroughly with forceKillOnTimeout enabled. If the handler fails to execute in a worker thread, ensure all dependencies are imported within the handler function.`
|
|
1301
|
+
};
|
|
1302
|
+
}
|
|
1303
|
+
return { isSerializable: true };
|
|
1304
|
+
} catch (error) {
|
|
1305
|
+
return {
|
|
1306
|
+
isSerializable: false,
|
|
1307
|
+
error: `Failed to validate handler serialization${jobType ? ` for job type "${jobType}"` : ""}: ${error instanceof Error ? error.message : String(error)}`
|
|
1308
|
+
};
|
|
1309
|
+
}
|
|
1310
|
+
}
|
|
1311
|
+
async function testHandlerSerialization(handler, jobType) {
|
|
1312
|
+
const basicValidation = validateHandlerSerializable2(handler, jobType);
|
|
1313
|
+
if (!basicValidation.isSerializable) {
|
|
1314
|
+
return basicValidation;
|
|
1315
|
+
}
|
|
1316
|
+
try {
|
|
1317
|
+
const handlerString = handler.toString();
|
|
1318
|
+
const handlerFn = new Function("return " + handlerString)();
|
|
1319
|
+
const testPromise = handlerFn({}, new AbortController().signal);
|
|
1320
|
+
const timeoutPromise = new Promise(
|
|
1321
|
+
(_, reject) => setTimeout(() => reject(new Error("Handler test timeout")), 100)
|
|
1322
|
+
);
|
|
1323
|
+
try {
|
|
1324
|
+
await Promise.race([testPromise, timeoutPromise]);
|
|
1325
|
+
} catch (execError) {
|
|
1326
|
+
if (execError instanceof Error && execError.message === "Handler test timeout") {
|
|
1327
|
+
return { isSerializable: true };
|
|
1328
|
+
}
|
|
1329
|
+
}
|
|
1330
|
+
return { isSerializable: true };
|
|
1331
|
+
} catch (error) {
|
|
1332
|
+
return {
|
|
1333
|
+
isSerializable: false,
|
|
1334
|
+
error: `Handler failed serialization test: ${error instanceof Error ? error.message : String(error)}`
|
|
1335
|
+
};
|
|
1336
|
+
}
|
|
1337
|
+
}
|
|
1338
|
+
|
|
833
1339
|
// src/index.ts
|
|
834
1340
|
var initJobQueue = (config) => {
|
|
835
1341
|
const { databaseConfig } = config;
|
|
@@ -863,6 +1369,14 @@ var initJobQueue = (config) => {
|
|
|
863
1369
|
(jobId) => cancelJob(pool, jobId),
|
|
864
1370
|
config.verbose ?? false
|
|
865
1371
|
),
|
|
1372
|
+
editJob: withLogContext(
|
|
1373
|
+
(jobId, updates) => editJob(pool, jobId, updates),
|
|
1374
|
+
config.verbose ?? false
|
|
1375
|
+
),
|
|
1376
|
+
editAllPendingJobs: withLogContext(
|
|
1377
|
+
(filters, updates) => editAllPendingJobs(pool, filters, updates),
|
|
1378
|
+
config.verbose ?? false
|
|
1379
|
+
),
|
|
866
1380
|
cancelAllUpcomingJobs: withLogContext(
|
|
867
1381
|
(filters) => cancelAllUpcomingJobs(pool, filters),
|
|
868
1382
|
config.verbose ?? false
|
|
@@ -891,6 +1405,6 @@ var withLogContext = (fn, verbose) => (...args) => {
|
|
|
891
1405
|
return fn(...args);
|
|
892
1406
|
};
|
|
893
1407
|
|
|
894
|
-
export { FailureReason, JobEventType, initJobQueue };
|
|
1408
|
+
export { FailureReason, JobEventType, initJobQueue, testHandlerSerialization, validateHandlerSerializable2 as validateHandlerSerializable };
|
|
895
1409
|
//# sourceMappingURL=index.js.map
|
|
896
1410
|
//# sourceMappingURL=index.js.map
|