bunqueue 1.9.7 → 1.9.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/application/backgroundTasks.d.ts +3 -6
- package/dist/application/backgroundTasks.d.ts.map +1 -1
- package/dist/application/backgroundTasks.js +9 -172
- package/dist/application/backgroundTasks.js.map +1 -1
- package/dist/application/cleanupTasks.d.ts +1 -1
- package/dist/application/cleanupTasks.d.ts.map +1 -1
- package/dist/application/cleanupTasks.js +57 -22
- package/dist/application/cleanupTasks.js.map +1 -1
- package/dist/application/clientTracking.d.ts +22 -0
- package/dist/application/clientTracking.d.ts.map +1 -0
- package/dist/application/clientTracking.js +122 -0
- package/dist/application/clientTracking.js.map +1 -0
- package/dist/application/contextFactory.d.ts +97 -0
- package/dist/application/contextFactory.d.ts.map +1 -0
- package/dist/application/contextFactory.js +169 -0
- package/dist/application/contextFactory.js.map +1 -0
- package/dist/application/dependencyProcessor.d.ts +11 -0
- package/dist/application/dependencyProcessor.d.ts.map +1 -0
- package/dist/application/dependencyProcessor.js +69 -0
- package/dist/application/dependencyProcessor.js.map +1 -0
- package/dist/application/dlqManager.d.ts +12 -0
- package/dist/application/dlqManager.d.ts.map +1 -1
- package/dist/application/dlqManager.js +36 -0
- package/dist/application/dlqManager.js.map +1 -1
- package/dist/application/lockManager.d.ts +2 -49
- package/dist/application/lockManager.d.ts.map +1 -1
- package/dist/application/lockManager.js +73 -262
- package/dist/application/lockManager.js.map +1 -1
- package/dist/application/lockOperations.d.ts +39 -0
- package/dist/application/lockOperations.d.ts.map +1 -0
- package/dist/application/lockOperations.js +101 -0
- package/dist/application/lockOperations.js.map +1 -0
- package/dist/application/operations/ack.d.ts +0 -5
- package/dist/application/operations/ack.d.ts.map +1 -1
- package/dist/application/operations/ack.js +30 -258
- package/dist/application/operations/ack.js.map +1 -1
- package/dist/application/operations/ackHelpers.d.ts +78 -0
- package/dist/application/operations/ackHelpers.d.ts.map +1 -0
- package/dist/application/operations/ackHelpers.js +162 -0
- package/dist/application/operations/ackHelpers.js.map +1 -0
- package/dist/application/operations/jobManagement.d.ts +2 -0
- package/dist/application/operations/jobManagement.d.ts.map +1 -1
- package/dist/application/operations/jobManagement.js +8 -0
- package/dist/application/operations/jobManagement.js.map +1 -1
- package/dist/application/operations/push.d.ts.map +1 -1
- package/dist/application/operations/push.js +8 -2
- package/dist/application/operations/push.js.map +1 -1
- package/dist/application/operations/queryOperations.d.ts +11 -0
- package/dist/application/operations/queryOperations.d.ts.map +1 -1
- package/dist/application/operations/queryOperations.js +32 -0
- package/dist/application/operations/queryOperations.js.map +1 -1
- package/dist/application/queueManager.d.ts +3 -11
- package/dist/application/queueManager.d.ts.map +1 -1
- package/dist/application/queueManager.js +90 -243
- package/dist/application/queueManager.js.map +1 -1
- package/dist/application/stallDetection.d.ts +11 -0
- package/dist/application/stallDetection.d.ts.map +1 -0
- package/dist/application/stallDetection.js +128 -0
- package/dist/application/stallDetection.js.map +1 -0
- package/dist/domain/queue/dependencyTracker.d.ts +74 -0
- package/dist/domain/queue/dependencyTracker.d.ts.map +1 -0
- package/dist/domain/queue/dependencyTracker.js +126 -0
- package/dist/domain/queue/dependencyTracker.js.map +1 -0
- package/dist/domain/queue/dlqShard.d.ts +59 -0
- package/dist/domain/queue/dlqShard.d.ts.map +1 -0
- package/dist/domain/queue/dlqShard.js +165 -0
- package/dist/domain/queue/dlqShard.js.map +1 -0
- package/dist/domain/queue/limiterManager.d.ts +44 -0
- package/dist/domain/queue/limiterManager.d.ts.map +1 -0
- package/dist/domain/queue/limiterManager.js +99 -0
- package/dist/domain/queue/limiterManager.js.map +1 -0
- package/dist/domain/queue/shard.d.ts +29 -122
- package/dist/domain/queue/shard.d.ts.map +1 -1
- package/dist/domain/queue/shard.js +152 -426
- package/dist/domain/queue/shard.js.map +1 -1
- package/dist/domain/queue/temporalManager.d.ts +81 -0
- package/dist/domain/queue/temporalManager.d.ts.map +1 -0
- package/dist/domain/queue/temporalManager.js +149 -0
- package/dist/domain/queue/temporalManager.js.map +1 -0
- package/dist/domain/queue/uniqueKeyManager.d.ts +32 -0
- package/dist/domain/queue/uniqueKeyManager.d.ts.map +1 -0
- package/dist/domain/queue/uniqueKeyManager.js +87 -0
- package/dist/domain/queue/uniqueKeyManager.js.map +1 -0
- package/dist/infrastructure/backup/s3Backup.d.ts +3 -40
- package/dist/infrastructure/backup/s3Backup.d.ts.map +1 -1
- package/dist/infrastructure/backup/s3Backup.js +10 -182
- package/dist/infrastructure/backup/s3Backup.js.map +1 -1
- package/dist/infrastructure/backup/s3BackupConfig.d.ts +67 -0
- package/dist/infrastructure/backup/s3BackupConfig.d.ts.map +1 -0
- package/dist/infrastructure/backup/s3BackupConfig.js +48 -0
- package/dist/infrastructure/backup/s3BackupConfig.js.map +1 -0
- package/dist/infrastructure/backup/s3BackupOperations.d.ts +23 -0
- package/dist/infrastructure/backup/s3BackupOperations.d.ts.map +1 -0
- package/dist/infrastructure/backup/s3BackupOperations.js +170 -0
- package/dist/infrastructure/backup/s3BackupOperations.js.map +1 -0
- package/dist/infrastructure/persistence/sqlite.d.ts +4 -13
- package/dist/infrastructure/persistence/sqlite.d.ts.map +1 -1
- package/dist/infrastructure/persistence/sqlite.js +23 -178
- package/dist/infrastructure/persistence/sqlite.js.map +1 -1
- package/dist/infrastructure/persistence/sqliteBatch.d.ts +38 -0
- package/dist/infrastructure/persistence/sqliteBatch.d.ts.map +1 -0
- package/dist/infrastructure/persistence/sqliteBatch.js +124 -0
- package/dist/infrastructure/persistence/sqliteBatch.js.map +1 -0
- package/dist/infrastructure/persistence/sqliteSerializer.d.ts +17 -0
- package/dist/infrastructure/persistence/sqliteSerializer.d.ts.map +1 -0
- package/dist/infrastructure/persistence/sqliteSerializer.js +81 -0
- package/dist/infrastructure/persistence/sqliteSerializer.js.map +1 -0
- package/dist/infrastructure/server/handler.d.ts.map +1 -1
- package/dist/infrastructure/server/handler.js +1 -186
- package/dist/infrastructure/server/handler.js.map +1 -1
- package/dist/infrastructure/server/handlerRoutes.d.ts +23 -0
- package/dist/infrastructure/server/handlerRoutes.d.ts.map +1 -0
- package/dist/infrastructure/server/handlerRoutes.js +190 -0
- package/dist/infrastructure/server/handlerRoutes.js.map +1 -0
- package/dist/infrastructure/server/http.d.ts +4 -25
- package/dist/infrastructure/server/http.d.ts.map +1 -1
- package/dist/infrastructure/server/http.js +43 -285
- package/dist/infrastructure/server/http.js.map +1 -1
- package/dist/infrastructure/server/httpEndpoints.d.ts +19 -0
- package/dist/infrastructure/server/httpEndpoints.d.ts.map +1 -0
- package/dist/infrastructure/server/httpEndpoints.js +151 -0
- package/dist/infrastructure/server/httpEndpoints.js.map +1 -0
- package/dist/infrastructure/server/sseHandler.d.ts +27 -0
- package/dist/infrastructure/server/sseHandler.d.ts.map +1 -0
- package/dist/infrastructure/server/sseHandler.js +77 -0
- package/dist/infrastructure/server/sseHandler.js.map +1 -0
- package/dist/infrastructure/server/wsHandler.d.ts +31 -0
- package/dist/infrastructure/server/wsHandler.d.ts.map +1 -0
- package/dist/infrastructure/server/wsHandler.js +63 -0
- package/dist/infrastructure/server/wsHandler.js.map +1 -0
- package/dist/mcp/index.js +3 -465
- package/dist/mcp/index.js.map +1 -1
- package/dist/mcp/mcpHandlers.d.ts +129 -0
- package/dist/mcp/mcpHandlers.d.ts.map +1 -0
- package/dist/mcp/mcpHandlers.js +204 -0
- package/dist/mcp/mcpHandlers.js.map +1 -0
- package/dist/mcp/mcpTools.d.ts +15 -0
- package/dist/mcp/mcpTools.d.ts.map +1 -0
- package/dist/mcp/mcpTools.js +277 -0
- package/dist/mcp/mcpTools.js.map +1 -0
- package/package.json +2 -2
- package/dist/cli/dashboard.d.ts +0 -32
- package/dist/cli/dashboard.d.ts.map +0 -1
- package/dist/cli/dashboard.js +0 -183
- package/dist/cli/dashboard.js.map +0 -1
|
@@ -2,217 +2,14 @@
|
|
|
2
2
|
* Lock Manager - Job lock and client tracking
|
|
3
3
|
* Handles BullMQ-style lock-based job ownership
|
|
4
4
|
*/
|
|
5
|
-
import {
|
|
5
|
+
import { isLockExpired } from '../domain/types/job';
|
|
6
6
|
import { queueLog } from '../shared/logger';
|
|
7
7
|
import { shardIndex, processingShardIndex } from '../shared/hash';
|
|
8
8
|
import { withWriteLock } from '../shared/lock';
|
|
9
|
-
//
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
*/
|
|
14
|
-
export function createLock(jobId, owner, ctx, ttl = DEFAULT_LOCK_TTL) {
|
|
15
|
-
const loc = ctx.jobIndex.get(jobId);
|
|
16
|
-
if (loc?.type !== 'processing')
|
|
17
|
-
return null;
|
|
18
|
-
// Check if lock already exists (shouldn't happen, but defensive)
|
|
19
|
-
if (ctx.jobLocks.has(jobId)) {
|
|
20
|
-
queueLog.warn('Lock already exists for job', { jobId: String(jobId), owner });
|
|
21
|
-
return null;
|
|
22
|
-
}
|
|
23
|
-
const lock = createJobLock(jobId, owner, ttl);
|
|
24
|
-
ctx.jobLocks.set(jobId, lock);
|
|
25
|
-
return lock.token;
|
|
26
|
-
}
|
|
27
|
-
/**
|
|
28
|
-
* Verify that a token is valid for a job.
|
|
29
|
-
* @returns true if token matches the active lock
|
|
30
|
-
*/
|
|
31
|
-
export function verifyLock(jobId, token, ctx) {
|
|
32
|
-
const lock = ctx.jobLocks.get(jobId);
|
|
33
|
-
if (!lock)
|
|
34
|
-
return false;
|
|
35
|
-
if (lock.token !== token)
|
|
36
|
-
return false;
|
|
37
|
-
if (isLockExpired(lock))
|
|
38
|
-
return false;
|
|
39
|
-
return true;
|
|
40
|
-
}
|
|
41
|
-
/**
|
|
42
|
-
* Renew a lock with the given token.
|
|
43
|
-
* @returns true if renewal succeeded, false if token invalid or lock expired
|
|
44
|
-
*/
|
|
45
|
-
export function renewJobLock(jobId, token, ctx, newTtl) {
|
|
46
|
-
const lock = ctx.jobLocks.get(jobId);
|
|
47
|
-
if (!lock)
|
|
48
|
-
return false;
|
|
49
|
-
if (lock.token !== token)
|
|
50
|
-
return false;
|
|
51
|
-
if (isLockExpired(lock)) {
|
|
52
|
-
// Lock already expired, remove it
|
|
53
|
-
ctx.jobLocks.delete(jobId);
|
|
54
|
-
return false;
|
|
55
|
-
}
|
|
56
|
-
renewLock(lock, newTtl);
|
|
57
|
-
// Also update lastHeartbeat on the job (for legacy stall detection compatibility)
|
|
58
|
-
const loc = ctx.jobIndex.get(jobId);
|
|
59
|
-
if (loc?.type === 'processing') {
|
|
60
|
-
const job = ctx.processingShards[loc.shardIdx].get(jobId);
|
|
61
|
-
if (job)
|
|
62
|
-
job.lastHeartbeat = Date.now();
|
|
63
|
-
}
|
|
64
|
-
return true;
|
|
65
|
-
}
|
|
66
|
-
/**
|
|
67
|
-
* Renew locks for multiple jobs (batch operation).
|
|
68
|
-
* @returns Array of jobIds that were successfully renewed
|
|
69
|
-
*/
|
|
70
|
-
export function renewJobLockBatch(items, ctx) {
|
|
71
|
-
const renewed = [];
|
|
72
|
-
for (const item of items) {
|
|
73
|
-
if (renewJobLock(item.id, item.token, ctx, item.ttl)) {
|
|
74
|
-
renewed.push(String(item.id));
|
|
75
|
-
}
|
|
76
|
-
}
|
|
77
|
-
return renewed;
|
|
78
|
-
}
|
|
79
|
-
/**
|
|
80
|
-
* Release a lock when job is completed or failed.
|
|
81
|
-
* Should be called by ACK/FAIL operations.
|
|
82
|
-
*/
|
|
83
|
-
export function releaseLock(jobId, ctx, token) {
|
|
84
|
-
const lock = ctx.jobLocks.get(jobId);
|
|
85
|
-
if (!lock)
|
|
86
|
-
return true; // No lock to release
|
|
87
|
-
// If token provided, verify it matches
|
|
88
|
-
if (token && lock.token !== token) {
|
|
89
|
-
queueLog.warn('Token mismatch on lock release', {
|
|
90
|
-
jobId: String(jobId),
|
|
91
|
-
expected: lock.token.substring(0, 8),
|
|
92
|
-
got: token.substring(0, 8),
|
|
93
|
-
});
|
|
94
|
-
return false;
|
|
95
|
-
}
|
|
96
|
-
ctx.jobLocks.delete(jobId);
|
|
97
|
-
return true;
|
|
98
|
-
}
|
|
99
|
-
/**
|
|
100
|
-
* Get lock info for a job (for debugging/monitoring).
|
|
101
|
-
*/
|
|
102
|
-
export function getLockInfo(jobId, ctx) {
|
|
103
|
-
return ctx.jobLocks.get(jobId) ?? null;
|
|
104
|
-
}
|
|
105
|
-
// ============ Client-Job Tracking ============
|
|
106
|
-
/**
|
|
107
|
-
* Register a job as owned by a client (called on PULL).
|
|
108
|
-
*/
|
|
109
|
-
export function registerClientJob(clientId, jobId, ctx) {
|
|
110
|
-
let jobs = ctx.clientJobs.get(clientId);
|
|
111
|
-
if (!jobs) {
|
|
112
|
-
jobs = new Set();
|
|
113
|
-
ctx.clientJobs.set(clientId, jobs);
|
|
114
|
-
}
|
|
115
|
-
jobs.add(jobId);
|
|
116
|
-
}
|
|
117
|
-
/**
|
|
118
|
-
* Unregister a job from a client (called on ACK/FAIL).
|
|
119
|
-
*/
|
|
120
|
-
export function unregisterClientJob(clientId, jobId, ctx) {
|
|
121
|
-
if (!clientId)
|
|
122
|
-
return;
|
|
123
|
-
const jobs = ctx.clientJobs.get(clientId);
|
|
124
|
-
if (jobs) {
|
|
125
|
-
jobs.delete(jobId);
|
|
126
|
-
if (jobs.size === 0) {
|
|
127
|
-
ctx.clientJobs.delete(clientId);
|
|
128
|
-
}
|
|
129
|
-
}
|
|
130
|
-
}
|
|
131
|
-
/**
|
|
132
|
-
* Release all jobs owned by a client back to queue (called on TCP disconnect).
|
|
133
|
-
* Returns the number of jobs released.
|
|
134
|
-
*
|
|
135
|
-
* Uses proper locking to prevent race conditions.
|
|
136
|
-
*/
|
|
137
|
-
export async function releaseClientJobs(clientId, ctx) {
|
|
138
|
-
const jobs = ctx.clientJobs.get(clientId);
|
|
139
|
-
if (!jobs || jobs.size === 0) {
|
|
140
|
-
ctx.clientJobs.delete(clientId);
|
|
141
|
-
return 0;
|
|
142
|
-
}
|
|
143
|
-
// Phase 1: Collect jobs to release (read-only, no locks needed)
|
|
144
|
-
const jobsToRelease = [];
|
|
145
|
-
for (const jobId of jobs) {
|
|
146
|
-
const loc = ctx.jobIndex.get(jobId);
|
|
147
|
-
if (loc?.type !== 'processing')
|
|
148
|
-
continue;
|
|
149
|
-
const procIdx = loc.shardIdx;
|
|
150
|
-
const job = ctx.processingShards[procIdx].get(jobId);
|
|
151
|
-
if (!job)
|
|
152
|
-
continue;
|
|
153
|
-
jobsToRelease.push({
|
|
154
|
-
jobId,
|
|
155
|
-
procIdx,
|
|
156
|
-
queueShardIdx: shardIndex(job.queue),
|
|
157
|
-
});
|
|
158
|
-
}
|
|
159
|
-
if (jobsToRelease.length === 0) {
|
|
160
|
-
ctx.clientJobs.delete(clientId);
|
|
161
|
-
return 0;
|
|
162
|
-
}
|
|
163
|
-
// Phase 2: Group by processing shard for efficient locking
|
|
164
|
-
const byProcShard = new Map();
|
|
165
|
-
for (const item of jobsToRelease) {
|
|
166
|
-
let list = byProcShard.get(item.procIdx);
|
|
167
|
-
if (!list) {
|
|
168
|
-
list = [];
|
|
169
|
-
byProcShard.set(item.procIdx, list);
|
|
170
|
-
}
|
|
171
|
-
list.push(item);
|
|
172
|
-
}
|
|
173
|
-
let released = 0;
|
|
174
|
-
const now = Date.now();
|
|
175
|
-
// Phase 3: Process each processing shard with proper locking
|
|
176
|
-
for (const [procIdx, items] of byProcShard) {
|
|
177
|
-
await withWriteLock(ctx.processingLocks[procIdx], async () => {
|
|
178
|
-
for (const { jobId, queueShardIdx } of items) {
|
|
179
|
-
const job = ctx.processingShards[procIdx].get(jobId);
|
|
180
|
-
if (!job)
|
|
181
|
-
continue;
|
|
182
|
-
// Acquire shard lock for queue modifications
|
|
183
|
-
await withWriteLock(ctx.shardLocks[queueShardIdx], () => {
|
|
184
|
-
const shard = ctx.shards[queueShardIdx];
|
|
185
|
-
// Remove from processing
|
|
186
|
-
ctx.processingShards[procIdx].delete(jobId);
|
|
187
|
-
// Release lock if exists
|
|
188
|
-
ctx.jobLocks.delete(jobId);
|
|
189
|
-
// Release concurrency
|
|
190
|
-
shard.releaseConcurrency(job.queue);
|
|
191
|
-
// Release group if active
|
|
192
|
-
if (job.groupId) {
|
|
193
|
-
shard.releaseGroup(job.queue, job.groupId);
|
|
194
|
-
}
|
|
195
|
-
// Reset job state for retry
|
|
196
|
-
job.startedAt = null;
|
|
197
|
-
job.lastHeartbeat = now;
|
|
198
|
-
// Re-queue the job
|
|
199
|
-
shard.getQueue(job.queue).push(job);
|
|
200
|
-
const isDelayed = job.runAt > now;
|
|
201
|
-
shard.incrementQueued(jobId, isDelayed, job.createdAt, job.queue, job.runAt);
|
|
202
|
-
ctx.jobIndex.set(jobId, { type: 'queue', shardIdx: queueShardIdx, queueName: job.queue });
|
|
203
|
-
released++;
|
|
204
|
-
});
|
|
205
|
-
}
|
|
206
|
-
});
|
|
207
|
-
}
|
|
208
|
-
// Clear client tracking
|
|
209
|
-
ctx.clientJobs.delete(clientId);
|
|
210
|
-
if (released > 0) {
|
|
211
|
-
queueLog.info('Released client jobs', { clientId: clientId.substring(0, 8), released });
|
|
212
|
-
}
|
|
213
|
-
return released;
|
|
214
|
-
}
|
|
215
|
-
// ============ Lock Expiration Check ============
|
|
9
|
+
// Re-export lock operations
|
|
10
|
+
export { createLock, verifyLock, renewJobLock, renewJobLockBatch, releaseLock, getLockInfo, } from './lockOperations';
|
|
11
|
+
// Re-export client tracking
|
|
12
|
+
export { registerClientJob, unregisterClientJob, releaseClientJobs } from './clientTracking';
|
|
216
13
|
/**
|
|
217
14
|
* Check and handle expired locks.
|
|
218
15
|
* Jobs with expired locks are requeued for retry.
|
|
@@ -245,63 +42,77 @@ export async function checkExpiredLocks(ctx) {
|
|
|
245
42
|
for (const [procIdx, items] of byProcShard) {
|
|
246
43
|
await withWriteLock(ctx.processingLocks[procIdx], async () => {
|
|
247
44
|
for (const { jobId, lock } of items) {
|
|
248
|
-
|
|
249
|
-
if (job) {
|
|
250
|
-
const idx = shardIndex(job.queue);
|
|
251
|
-
await withWriteLock(ctx.shardLocks[idx], () => {
|
|
252
|
-
const shard = ctx.shards[idx];
|
|
253
|
-
const queue = shard.getQueue(job.queue);
|
|
254
|
-
// Remove from processing
|
|
255
|
-
ctx.processingShards[procIdx].delete(jobId);
|
|
256
|
-
// Increment attempts and reset state
|
|
257
|
-
job.attempts++;
|
|
258
|
-
job.startedAt = null;
|
|
259
|
-
job.lastHeartbeat = now;
|
|
260
|
-
job.stallCount++;
|
|
261
|
-
// Check if max stalls exceeded
|
|
262
|
-
const stallConfig = shard.getStallConfig(job.queue);
|
|
263
|
-
if (stallConfig.maxStalls > 0 && job.stallCount >= stallConfig.maxStalls) {
|
|
264
|
-
shard.addToDlq(job, "stalled" /* FailureReason.Stalled */, `Lock expired after ${lock.renewalCount} renewals`);
|
|
265
|
-
ctx.jobIndex.set(jobId, { type: 'dlq', queueName: job.queue });
|
|
266
|
-
queueLog.warn('Job moved to DLQ due to lock expiration', {
|
|
267
|
-
jobId: String(jobId),
|
|
268
|
-
queue: job.queue,
|
|
269
|
-
owner: lock.owner,
|
|
270
|
-
renewals: lock.renewalCount,
|
|
271
|
-
stallCount: job.stallCount,
|
|
272
|
-
});
|
|
273
|
-
ctx.eventsManager.broadcast({
|
|
274
|
-
eventType: "failed" /* EventType.Failed */,
|
|
275
|
-
jobId,
|
|
276
|
-
queue: job.queue,
|
|
277
|
-
timestamp: now,
|
|
278
|
-
error: 'Lock expired (max stalls reached)',
|
|
279
|
-
});
|
|
280
|
-
}
|
|
281
|
-
else {
|
|
282
|
-
queue.push(job);
|
|
283
|
-
ctx.jobIndex.set(jobId, { type: 'queue', shardIdx: idx, queueName: job.queue });
|
|
284
|
-
queueLog.info('Job requeued due to lock expiration', {
|
|
285
|
-
jobId: String(jobId),
|
|
286
|
-
queue: job.queue,
|
|
287
|
-
owner: lock.owner,
|
|
288
|
-
renewals: lock.renewalCount,
|
|
289
|
-
attempt: job.attempts,
|
|
290
|
-
});
|
|
291
|
-
ctx.eventsManager.broadcast({
|
|
292
|
-
eventType: "stalled" /* EventType.Stalled */,
|
|
293
|
-
jobId,
|
|
294
|
-
queue: job.queue,
|
|
295
|
-
timestamp: now,
|
|
296
|
-
});
|
|
297
|
-
}
|
|
298
|
-
});
|
|
299
|
-
}
|
|
300
|
-
// Remove the expired lock
|
|
301
|
-
ctx.jobLocks.delete(jobId);
|
|
45
|
+
await processExpiredLock(jobId, lock, procIdx, ctx, now);
|
|
302
46
|
}
|
|
303
47
|
});
|
|
304
48
|
}
|
|
305
49
|
queueLog.info('Processed expired locks', { count: expired.length });
|
|
306
50
|
}
|
|
51
|
+
/** Process a single expired lock */
|
|
52
|
+
async function processExpiredLock(jobId, lock, procIdx, ctx, now) {
|
|
53
|
+
const job = ctx.processingShards[procIdx].get(jobId);
|
|
54
|
+
if (job) {
|
|
55
|
+
const idx = shardIndex(job.queue);
|
|
56
|
+
await withWriteLock(ctx.shardLocks[idx], () => {
|
|
57
|
+
const shard = ctx.shards[idx];
|
|
58
|
+
const queue = shard.getQueue(job.queue);
|
|
59
|
+
// Remove from processing
|
|
60
|
+
ctx.processingShards[procIdx].delete(jobId);
|
|
61
|
+
// Increment attempts and reset state
|
|
62
|
+
job.attempts++;
|
|
63
|
+
job.startedAt = null;
|
|
64
|
+
job.lastHeartbeat = now;
|
|
65
|
+
job.stallCount++;
|
|
66
|
+
// Check if max stalls exceeded
|
|
67
|
+
const stallConfig = shard.getStallConfig(job.queue);
|
|
68
|
+
if (stallConfig.maxStalls > 0 && job.stallCount >= stallConfig.maxStalls) {
|
|
69
|
+
handleMaxStallsExceeded({ jobId, job, lock, shard, ctx, now });
|
|
70
|
+
}
|
|
71
|
+
else {
|
|
72
|
+
requeueExpiredJob({ jobId, job, lock, queue, idx, ctx, now });
|
|
73
|
+
}
|
|
74
|
+
});
|
|
75
|
+
}
|
|
76
|
+
// Remove the expired lock
|
|
77
|
+
ctx.jobLocks.delete(jobId);
|
|
78
|
+
}
|
|
79
|
+
/** Move job to DLQ when max stalls exceeded */
|
|
80
|
+
function handleMaxStallsExceeded(opts) {
|
|
81
|
+
const { jobId, job, lock, shard, ctx, now } = opts;
|
|
82
|
+
shard.addToDlq(job, "stalled" /* FailureReason.Stalled */, `Lock expired after ${lock.renewalCount} renewals`);
|
|
83
|
+
ctx.jobIndex.set(jobId, { type: 'dlq', queueName: job.queue });
|
|
84
|
+
queueLog.warn('Job moved to DLQ due to lock expiration', {
|
|
85
|
+
jobId: String(jobId),
|
|
86
|
+
queue: job.queue,
|
|
87
|
+
owner: lock.owner,
|
|
88
|
+
renewals: lock.renewalCount,
|
|
89
|
+
stallCount: job.stallCount,
|
|
90
|
+
});
|
|
91
|
+
ctx.eventsManager.broadcast({
|
|
92
|
+
eventType: "failed" /* EventType.Failed */,
|
|
93
|
+
jobId,
|
|
94
|
+
queue: job.queue,
|
|
95
|
+
timestamp: now,
|
|
96
|
+
error: 'Lock expired (max stalls reached)',
|
|
97
|
+
});
|
|
98
|
+
}
|
|
99
|
+
/** Requeue job for retry */
|
|
100
|
+
function requeueExpiredJob(opts) {
|
|
101
|
+
const { jobId, job, lock, queue, idx, ctx, now } = opts;
|
|
102
|
+
queue.push(job);
|
|
103
|
+
ctx.jobIndex.set(jobId, { type: 'queue', shardIdx: idx, queueName: job.queue });
|
|
104
|
+
queueLog.info('Job requeued due to lock expiration', {
|
|
105
|
+
jobId: String(jobId),
|
|
106
|
+
queue: job.queue,
|
|
107
|
+
owner: lock.owner,
|
|
108
|
+
renewals: lock.renewalCount,
|
|
109
|
+
attempt: job.attempts,
|
|
110
|
+
});
|
|
111
|
+
ctx.eventsManager.broadcast({
|
|
112
|
+
eventType: "stalled" /* EventType.Stalled */,
|
|
113
|
+
jobId,
|
|
114
|
+
queue: job.queue,
|
|
115
|
+
timestamp: now,
|
|
116
|
+
});
|
|
117
|
+
}
|
|
307
118
|
//# sourceMappingURL=lockManager.js.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"lockManager.js","sourceRoot":"","sources":["../../src/application/lockManager.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAGH,OAAO,EAAE,aAAa,EAAE,
|
|
1
|
+
{"version":3,"file":"lockManager.js","sourceRoot":"","sources":["../../src/application/lockManager.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAGH,OAAO,EAAE,aAAa,EAAE,MAAM,qBAAqB,CAAC;AAIpD,OAAO,EAAE,QAAQ,EAAE,MAAM,kBAAkB,CAAC;AAC5C,OAAO,EAAE,UAAU,EAAE,oBAAoB,EAAE,MAAM,gBAAgB,CAAC;AAClE,OAAO,EAAE,aAAa,EAAE,MAAM,gBAAgB,CAAC;AAG/C,4BAA4B;AAC5B,OAAO,EACL,UAAU,EACV,UAAU,EACV,YAAY,EACZ,iBAAiB,EACjB,WAAW,EACX,WAAW,GACZ,MAAM,kBAAkB,CAAC;AAE1B,4BAA4B;AAC5B,OAAO,EAAE,iBAAiB,EAAE,mBAAmB,EAAE,iBAAiB,EAAE,MAAM,kBAAkB,CAAC;AAE7F;;;;;GAKG;AACH,MAAM,CAAC,KAAK,UAAU,iBAAiB,CAAC,GAAgB;IACtD,MAAM,GAAG,GAAG,IAAI,CAAC,GAAG,EAAE,CAAC;IAEvB,6CAA6C;IAC7C,MAAM,OAAO,GAA4D,EAAE,CAAC;IAE5E,KAAK,MAAM,CAAC,KAAK,EAAE,IAAI,CAAC,IAAI,GAAG,CAAC,QAAQ,EAAE,CAAC;QACzC,IAAI,aAAa,CAAC,IAAI,EAAE,GAAG,CAAC,EAAE,CAAC;YAC7B,MAAM,OAAO,GAAG,oBAAoB,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC;YACpD,OAAO,CAAC,IAAI,CAAC,EAAE,KAAK,EAAE,IAAI,EAAE,OAAO,EAAE,CAAC,CAAC;QACzC,CAAC;IACH,CAAC;IAED,IAAI,OAAO,CAAC,MAAM,KAAK,CAAC;QAAE,OAAO;IAEjC,qCAAqC;IACrC,MAAM,WAAW,GAAG,IAAI,GAAG,EAA0B,CAAC;IACtD,KAAK,MAAM,IAAI,IAAI,OAAO,EAAE,CAAC;QAC3B,IAAI,IAAI,GAAG,WAAW,CAAC,GAAG,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;QACzC,IAAI,CAAC,IAAI,EAAE,CAAC;YACV,IAAI,GAAG,EAAE,CAAC;YACV,WAAW,CAAC,GAAG,CAAC,IAAI,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;QACtC,CAAC;QACD,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAClB,CAAC;IAED,kDAAkD;IAClD,KAAK,MAAM,CAAC,OAAO,EAAE,KAAK,CAAC,IAAI,WAAW,EAAE,CAAC;QAC3C,MAAM,aAAa,CAAC,GAAG,CAAC,eAAe,CAAC,OAAO,CAAC,EAAE,KAAK,IAAI,EAAE;YAC3D,KAAK,MAAM,EAAE,KAAK,EAAE,IAAI,EAAE,IAAI,KAAK,EAAE,CAAC;gBACpC,MAAM,kBAAkB,CAAC,KAAK,EAAE,IAAI,EAAE,OAAO,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC;YAC3D,CAAC;QACH,CAAC,CAAC,CAAC;IACL,CAAC;IAED,QAAQ,CAAC,IAAI,CAAC,yBAAyB,EAAE,EAAE,KAAK,EAAE,OAAO,CAAC,MAAM,EAAE,CAAC,CAAC;AACtE,CAAC;AAED,oCAAoC;AACpC,KAAK,UAAU,kBAAkB,CAC/B,KAAY,EACZ,IAAa,EACb,OAAe,EACf,GAAgB,EAChB,GAAW;IAEX,MAAM,GAAG,GAAG,GAAG,CAAC,gBAAgB,CAAC,OAAO,CAAC,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;IAErD,IAAI,GAAG,EAAE,CAAC;QACR,MAAM,GAAG,GAAG,UAAU,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QAElC,MAAM,aAAa,CAAC,GAAG,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE,GAAG,EAAE;YAC5C,MAAM,KAAK,GAAG,GAAG,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;YAC9B,MAAM,KAAK,GAAG,KAAK,CAAC,QAAQ,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;YAExC,yBAAyB;YACzB,GAAG,CAAC,gBAAgB,CAAC,OAAO,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;YAE5C,qCAAqC;YACrC,GAAG,CAAC,QAAQ,EAAE,CAAC;YACf,GAAG,CAAC,SAAS,GAAG,IAAI,CAAC;YACrB,GAAG,CAAC,aAAa,GAAG,GAAG,CAAC;YACxB,GAAG,CAAC,UAAU,EAAE,CAAC;YAEjB,+BAA+B;YAC/B,MAAM,WAAW,GAAG,KAAK,CAAC,cAAc,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;YACpD,IAAI,WAAW,CAAC,SAAS,GAAG,CAAC,IAAI,GAAG,CAAC,UAAU,IAAI,WAAW,CAAC,SAAS,EAAE,CAAC;gBACzE,uBAAuB,CAAC,EAAE,KAAK,EAAE,GAAG,EAAE,IAAI,EAAE,KAAK,EAAE,GAAG,EAAE,GAAG,EAAE,CAAC,CAAC;YACjE,CAAC;iBAAM,CAAC;gBACN,iBAAiB,CAAC,EAAE,KAAK,EAAE,GAAG,EAAE,IAAI,EAAE,KAAK,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,CAAC,CAAC;YAChE,CAAC;QACH,CAAC,CAAC,CAAC;IACL,CAAC;IAED,0BAA0B;IAC1B,GAAG,CAAC,QAAQ,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;AAC7B,CAAC;AAYD,+CAA+C;AAC/C,SAAS,uBAAuB,CAAC,IAAsB;IACrD,MAAM,EAAE,KAAK,EAAE,GAAG,EAAE,IAAI,EAAE,KAAK,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,IAAI,CAAC;IACnD,KAAK,CAAC,QAAQ,CAAC,GAAG,yCAAyB,sBAAsB,IAAI,CAAC,YAAY,WAAW,CAAC,CAAC;IAC/F,GAAG,CAAC,QAAQ,CAAC,GAAG,CAAC,KAAK,EAAE,EAAE,IAAI,EAAE,KAAK,EAAE,SAAS,EAAE,GAAG,CAAC,KAAK,EAAE,CAAC,CAAC;IAE/D,QAAQ,CAAC,IAAI,CAAC,yCAAyC,EAAE;QACvD,KAAK,EAAE,MAAM,CAAC,KAAK,CAAC;QACpB,KAAK,EAAE,GAAG,CAAC,KAAK;QAChB,KAAK,EAAE,IAAI,CAAC,KAAK;QACjB,QAAQ,EAAE,IAAI,CAAC,YAAY;QAC3B,UAAU,EAAE,GAAG,CAAC,UAAU;KAC3B,CAAC,CAAC;IAEH,GAAG,CAAC,aAAa,CAAC,SAAS,CAAC;QAC1B,SAAS,iCAAkB;QAC3B,KAAK;QACL,KAAK,EAAE,GAAG,CAAC,KAAK;QAChB,SAAS,EAAE,GAAG;QACd,KAAK,EAAE,mCAAmC;KAC3C,CAAC,CAAC;AACL,CAAC;AAaD,4BAA4B;AAC5B,SAAS,iBAAiB,CAAC,IAAoB;IAC7C,MAAM,EAAE,KAAK,EAAE,GAAG,EAAE,IAAI,EAAE,KAAK,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,IAAI,CAAC;IACxD,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;IAChB,GAAG,CAAC,QAAQ,CAAC,GAAG,CAAC,KAAK,EAAE,EAAE,IAAI,EAAE,OAAO,EAAE,QAAQ,EAAE,GAAG,EAAE,SAAS,EAAE,GAAG,CAAC,KAAK,EAAE,CAAC,CAAC;IAEhF,QAAQ,CAAC,IAAI,CAAC,qCAAqC,EAAE;QACnD,KAAK,EAAE,MAAM,CAAC,KAAK,CAAC;QACpB,KAAK,EAAE,GAAG,CAAC,KAAK;QAChB,KAAK,EAAE,IAAI,CAAC,KAAK;QACjB,QAAQ,EAAE,IAAI,CAAC,YAAY;QAC3B,OAAO,EAAE,GAAG,CAAC,QAAQ;KACtB,CAAC,CAAC;IAEH,GAAG,CAAC,aAAa,CAAC,SAAS,CAAC;QAC1B,SAAS,mCAAmB;QAC5B,KAAK;QACL,KAAK,EAAE,GAAG,CAAC,KAAK;QAChB,SAAS,EAAE,GAAG;KACf,CAAC,CAAC;AACL,CAAC"}
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Lock Operations - Job lock creation, verification, renewal
|
|
3
|
+
*/
|
|
4
|
+
import type { JobId, JobLock, LockToken } from '../domain/types/job';
|
|
5
|
+
import type { LockContext } from './types';
|
|
6
|
+
/**
|
|
7
|
+
* Create a lock for a job when it's pulled for processing.
|
|
8
|
+
* @returns The lock token, or null if job not in processing
|
|
9
|
+
*/
|
|
10
|
+
export declare function createLock(jobId: JobId, owner: string, ctx: LockContext, ttl?: number): LockToken | null;
|
|
11
|
+
/**
|
|
12
|
+
* Verify that a token is valid for a job.
|
|
13
|
+
* @returns true if token matches the active lock
|
|
14
|
+
*/
|
|
15
|
+
export declare function verifyLock(jobId: JobId, token: string, ctx: LockContext): boolean;
|
|
16
|
+
/**
|
|
17
|
+
* Renew a lock with the given token.
|
|
18
|
+
* @returns true if renewal succeeded, false if token invalid or lock expired
|
|
19
|
+
*/
|
|
20
|
+
export declare function renewJobLock(jobId: JobId, token: string, ctx: LockContext, newTtl?: number): boolean;
|
|
21
|
+
/**
|
|
22
|
+
* Renew locks for multiple jobs (batch operation).
|
|
23
|
+
* @returns Array of jobIds that were successfully renewed
|
|
24
|
+
*/
|
|
25
|
+
export declare function renewJobLockBatch(items: Array<{
|
|
26
|
+
id: JobId;
|
|
27
|
+
token: string;
|
|
28
|
+
ttl?: number;
|
|
29
|
+
}>, ctx: LockContext): string[];
|
|
30
|
+
/**
|
|
31
|
+
* Release a lock when job is completed or failed.
|
|
32
|
+
* Should be called by ACK/FAIL operations.
|
|
33
|
+
*/
|
|
34
|
+
export declare function releaseLock(jobId: JobId, ctx: LockContext, token?: string): boolean;
|
|
35
|
+
/**
|
|
36
|
+
* Get lock info for a job (for debugging/monitoring).
|
|
37
|
+
*/
|
|
38
|
+
export declare function getLockInfo(jobId: JobId, ctx: LockContext): JobLock | null;
|
|
39
|
+
//# sourceMappingURL=lockOperations.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"lockOperations.d.ts","sourceRoot":"","sources":["../../src/application/lockOperations.ts"],"names":[],"mappings":"AAAA;;GAEG;AAEH,OAAO,KAAK,EAAE,KAAK,EAAE,OAAO,EAAE,SAAS,EAAE,MAAM,qBAAqB,CAAC;AAGrE,OAAO,KAAK,EAAE,WAAW,EAAE,MAAM,SAAS,CAAC;AAE3C;;;GAGG;AACH,wBAAgB,UAAU,CACxB,KAAK,EAAE,KAAK,EACZ,KAAK,EAAE,MAAM,EACb,GAAG,EAAE,WAAW,EAChB,GAAG,GAAE,MAAyB,GAC7B,SAAS,GAAG,IAAI,CAalB;AAED;;;GAGG;AACH,wBAAgB,UAAU,CAAC,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,MAAM,EAAE,GAAG,EAAE,WAAW,GAAG,OAAO,CAMjF;AAED;;;GAGG;AACH,wBAAgB,YAAY,CAC1B,KAAK,EAAE,KAAK,EACZ,KAAK,EAAE,MAAM,EACb,GAAG,EAAE,WAAW,EAChB,MAAM,CAAC,EAAE,MAAM,GACd,OAAO,CAoBT;AAED;;;GAGG;AACH,wBAAgB,iBAAiB,CAC/B,KAAK,EAAE,KAAK,CAAC;IAAE,EAAE,EAAE,KAAK,CAAC;IAAC,KAAK,EAAE,MAAM,CAAC;IAAC,GAAG,CAAC,EAAE,MAAM,CAAA;CAAE,CAAC,EACxD,GAAG,EAAE,WAAW,GACf,MAAM,EAAE,CAQV;AAED;;;GAGG;AACH,wBAAgB,WAAW,CAAC,KAAK,EAAE,KAAK,EAAE,GAAG,EAAE,WAAW,EAAE,KAAK,CAAC,EAAE,MAAM,GAAG,OAAO,CAgBnF;AAED;;GAEG;AACH,wBAAgB,WAAW,CAAC,KAAK,EAAE,KAAK,EAAE,GAAG,EAAE,WAAW,GAAG,OAAO,GAAG,IAAI,CAE1E"}
|
|
@@ -0,0 +1,101 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Lock Operations - Job lock creation, verification, renewal
|
|
3
|
+
*/
|
|
4
|
+
import { createJobLock, isLockExpired, renewLock, DEFAULT_LOCK_TTL } from '../domain/types/job';
|
|
5
|
+
import { queueLog } from '../shared/logger';
|
|
6
|
+
/**
|
|
7
|
+
* Create a lock for a job when it's pulled for processing.
|
|
8
|
+
* @returns The lock token, or null if job not in processing
|
|
9
|
+
*/
|
|
10
|
+
export function createLock(jobId, owner, ctx, ttl = DEFAULT_LOCK_TTL) {
|
|
11
|
+
const loc = ctx.jobIndex.get(jobId);
|
|
12
|
+
if (loc?.type !== 'processing')
|
|
13
|
+
return null;
|
|
14
|
+
// Check if lock already exists (shouldn't happen, but defensive)
|
|
15
|
+
if (ctx.jobLocks.has(jobId)) {
|
|
16
|
+
queueLog.warn('Lock already exists for job', { jobId: String(jobId), owner });
|
|
17
|
+
return null;
|
|
18
|
+
}
|
|
19
|
+
const lock = createJobLock(jobId, owner, ttl);
|
|
20
|
+
ctx.jobLocks.set(jobId, lock);
|
|
21
|
+
return lock.token;
|
|
22
|
+
}
|
|
23
|
+
/**
|
|
24
|
+
* Verify that a token is valid for a job.
|
|
25
|
+
* @returns true if token matches the active lock
|
|
26
|
+
*/
|
|
27
|
+
export function verifyLock(jobId, token, ctx) {
|
|
28
|
+
const lock = ctx.jobLocks.get(jobId);
|
|
29
|
+
if (!lock)
|
|
30
|
+
return false;
|
|
31
|
+
if (lock.token !== token)
|
|
32
|
+
return false;
|
|
33
|
+
if (isLockExpired(lock))
|
|
34
|
+
return false;
|
|
35
|
+
return true;
|
|
36
|
+
}
|
|
37
|
+
/**
|
|
38
|
+
* Renew a lock with the given token.
|
|
39
|
+
* @returns true if renewal succeeded, false if token invalid or lock expired
|
|
40
|
+
*/
|
|
41
|
+
export function renewJobLock(jobId, token, ctx, newTtl) {
|
|
42
|
+
const lock = ctx.jobLocks.get(jobId);
|
|
43
|
+
if (!lock)
|
|
44
|
+
return false;
|
|
45
|
+
if (lock.token !== token)
|
|
46
|
+
return false;
|
|
47
|
+
if (isLockExpired(lock)) {
|
|
48
|
+
// Lock already expired, remove it
|
|
49
|
+
ctx.jobLocks.delete(jobId);
|
|
50
|
+
return false;
|
|
51
|
+
}
|
|
52
|
+
renewLock(lock, newTtl);
|
|
53
|
+
// Also update lastHeartbeat on the job (for legacy stall detection compatibility)
|
|
54
|
+
const loc = ctx.jobIndex.get(jobId);
|
|
55
|
+
if (loc?.type === 'processing') {
|
|
56
|
+
const job = ctx.processingShards[loc.shardIdx].get(jobId);
|
|
57
|
+
if (job)
|
|
58
|
+
job.lastHeartbeat = Date.now();
|
|
59
|
+
}
|
|
60
|
+
return true;
|
|
61
|
+
}
|
|
62
|
+
/**
|
|
63
|
+
* Renew locks for multiple jobs (batch operation).
|
|
64
|
+
* @returns Array of jobIds that were successfully renewed
|
|
65
|
+
*/
|
|
66
|
+
export function renewJobLockBatch(items, ctx) {
|
|
67
|
+
const renewed = [];
|
|
68
|
+
for (const item of items) {
|
|
69
|
+
if (renewJobLock(item.id, item.token, ctx, item.ttl)) {
|
|
70
|
+
renewed.push(String(item.id));
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
return renewed;
|
|
74
|
+
}
|
|
75
|
+
/**
|
|
76
|
+
* Release a lock when job is completed or failed.
|
|
77
|
+
* Should be called by ACK/FAIL operations.
|
|
78
|
+
*/
|
|
79
|
+
export function releaseLock(jobId, ctx, token) {
|
|
80
|
+
const lock = ctx.jobLocks.get(jobId);
|
|
81
|
+
if (!lock)
|
|
82
|
+
return true; // No lock to release
|
|
83
|
+
// If token provided, verify it matches
|
|
84
|
+
if (token && lock.token !== token) {
|
|
85
|
+
queueLog.warn('Token mismatch on lock release', {
|
|
86
|
+
jobId: String(jobId),
|
|
87
|
+
expected: lock.token.substring(0, 8),
|
|
88
|
+
got: token.substring(0, 8),
|
|
89
|
+
});
|
|
90
|
+
return false;
|
|
91
|
+
}
|
|
92
|
+
ctx.jobLocks.delete(jobId);
|
|
93
|
+
return true;
|
|
94
|
+
}
|
|
95
|
+
/**
|
|
96
|
+
* Get lock info for a job (for debugging/monitoring).
|
|
97
|
+
*/
|
|
98
|
+
export function getLockInfo(jobId, ctx) {
|
|
99
|
+
return ctx.jobLocks.get(jobId) ?? null;
|
|
100
|
+
}
|
|
101
|
+
//# sourceMappingURL=lockOperations.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"lockOperations.js","sourceRoot":"","sources":["../../src/application/lockOperations.ts"],"names":[],"mappings":"AAAA;;GAEG;AAGH,OAAO,EAAE,aAAa,EAAE,aAAa,EAAE,SAAS,EAAE,gBAAgB,EAAE,MAAM,qBAAqB,CAAC;AAChG,OAAO,EAAE,QAAQ,EAAE,MAAM,kBAAkB,CAAC;AAG5C;;;GAGG;AACH,MAAM,UAAU,UAAU,CACxB,KAAY,EACZ,KAAa,EACb,GAAgB,EAChB,MAAc,gBAAgB;IAE9B,MAAM,GAAG,GAAG,GAAG,CAAC,QAAQ,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;IACpC,IAAI,GAAG,EAAE,IAAI,KAAK,YAAY;QAAE,OAAO,IAAI,CAAC;IAE5C,iEAAiE;IACjE,IAAI,GAAG,CAAC,QAAQ,CAAC,GAAG,CAAC,KAAK,CAAC,EAAE,CAAC;QAC5B,QAAQ,CAAC,IAAI,CAAC,6BAA6B,EAAE,EAAE,KAAK,EAAE,MAAM,CAAC,KAAK,CAAC,EAAE,KAAK,EAAE,CAAC,CAAC;QAC9E,OAAO,IAAI,CAAC;IACd,CAAC;IAED,MAAM,IAAI,GAAG,aAAa,CAAC,KAAK,EAAE,KAAK,EAAE,GAAG,CAAC,CAAC;IAC9C,GAAG,CAAC,QAAQ,CAAC,GAAG,CAAC,KAAK,EAAE,IAAI,CAAC,CAAC;IAC9B,OAAO,IAAI,CAAC,KAAK,CAAC;AACpB,CAAC;AAED;;;GAGG;AACH,MAAM,UAAU,UAAU,CAAC,KAAY,EAAE,KAAa,EAAE,GAAgB;IACtE,MAAM,IAAI,GAAG,GAAG,CAAC,QAAQ,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;IACrC,IAAI,CAAC,IAAI;QAAE,OAAO,KAAK,CAAC;IACxB,IAAI,IAAI,CAAC,KAAK,KAAK,KAAK;QAAE,OAAO,KAAK,CAAC;IACvC,IAAI,aAAa,CAAC,IAAI,CAAC;QAAE,OAAO,KAAK,CAAC;IACtC,OAAO,IAAI,CAAC;AACd,CAAC;AAED;;;GAGG;AACH,MAAM,UAAU,YAAY,CAC1B,KAAY,EACZ,KAAa,EACb,GAAgB,EAChB,MAAe;IAEf,MAAM,IAAI,GAAG,GAAG,CAAC,QAAQ,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;IACrC,IAAI,CAAC,IAAI;QAAE,OAAO,KAAK,CAAC;IACxB,IAAI,IAAI,CAAC,KAAK,KAAK,KAAK;QAAE,OAAO,KAAK,CAAC;IACvC,IAAI,aAAa,CAAC,IAAI,CAAC,EAAE,CAAC;QACxB,kCAAkC;QAClC,GAAG,CAAC,QAAQ,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;QAC3B,OAAO,KAAK,CAAC;IACf,CAAC;IAED,SAAS,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;IAExB,kFAAkF;IAClF,MAAM,GAAG,GAAG,GAAG,CAAC,QAAQ,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;IACpC,IAAI,GAAG,EAAE,IAAI,KAAK,YAAY,EAAE,CAAC;QAC/B,MAAM,GAAG,GAAG,GAAG,CAAC,gBAAgB,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QAC1D,IAAI,GAAG;YAAE,GAAG,CAAC,aAAa,GAAG,IAAI,CAAC,GAAG,EAAE,CAAC;IAC1C,CAAC;IAED,OAAO,IAAI,CAAC;AACd,CAAC;AAED;;;GAGG;AACH,MAAM,UAAU,iBAAiB,CAC/B,KAAwD,EACxD,GAAgB;IAEhB,MAAM,OAAO,GAAa,EAAE,CAAC;IAC7B,KAAK,MAAM,IAAI,IAAI,KAAK,EAAE,CAAC;QACzB,IAAI,YAAY,CAAC,IAAI,CAAC,EAAE,EAAE,IAAI,CAAC,KAAK,EAAE,GAAG,EAAE,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC;YACrD,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,CAAC;QAChC,CAAC;IACH,CAAC;IACD,OAAO,OAAO,CAAC;AACjB,CAAC;AAED;;;GAGG;AACH,MAAM,UAAU,WAAW,CAAC,KAAY,EAAE,GAAgB,EAAE,KAAc;IACxE,MAAM,IAAI,GAAG,GAAG,CAAC,QAAQ,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;IACrC,IAAI,CAAC,IAAI;QAAE,OAAO,IAAI,CAAC,CAAC,qBAAqB;IAE7C,uCAAuC;IACvC,IAAI,KAAK,IAAI,IAAI,CAAC,KAAK,KAAK,KAAK,EAAE,CAAC;QAClC,QAAQ,CAAC,IAAI,CAAC,gCAAgC,EAAE;YAC9C,KAAK,EAAE,MAAM,CAAC,KAAK,CAAC;YACpB,QAAQ,EAAE,IAAI,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,EAAE,CAAC,CAAC;YACpC,GAAG,EAAE,KAAK,CAAC,SAAS,CAAC,CAAC,EAAE,CAAC,CAAC;SAC3B,CAAC,CAAC;QACH,OAAO,KAAK,CAAC;IACf,CAAC;IAED,GAAG,CAAC,QAAQ,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;IAC3B,OAAO,IAAI,CAAC;AACd,CAAC;AAED;;GAEG;AACH,MAAM,UAAU,WAAW,CAAC,KAAY,EAAE,GAAgB;IACxD,OAAO,GAAG,CAAC,QAAQ,CAAC,GAAG,CAAC,KAAK,CAAC,IAAI,IAAI,CAAC;AACzC,CAAC"}
|
|
@@ -33,13 +33,9 @@ export interface AckContext {
|
|
|
33
33
|
error?: string;
|
|
34
34
|
}) => void;
|
|
35
35
|
onJobCompleted: (jobId: JobId) => void;
|
|
36
|
-
/** Batch notify completions - more efficient than per-job calls */
|
|
37
36
|
onJobsCompleted?: (jobIds: JobId[]) => void;
|
|
38
|
-
/** Fast check if broadcast is needed - avoids function call overhead */
|
|
39
37
|
needsBroadcast?: () => boolean;
|
|
40
|
-
/** Check if any jobs are waiting for dependencies */
|
|
41
38
|
hasPendingDeps?: () => boolean;
|
|
42
|
-
/** Callback to re-queue repeatable jobs */
|
|
43
39
|
onRepeat?: (job: Job) => void;
|
|
44
40
|
}
|
|
45
41
|
/**
|
|
@@ -57,7 +53,6 @@ export declare function failJob(jobId: JobId, error: string | undefined, ctx: Ac
|
|
|
57
53
|
export declare function ackJobBatch(jobIds: JobId[], ctx: AckContext): Promise<void>;
|
|
58
54
|
/**
|
|
59
55
|
* Acknowledge multiple jobs with individual results - optimized batch processing
|
|
60
|
-
* Same as ackJobBatch but supports passing result data for each job
|
|
61
56
|
*/
|
|
62
57
|
export declare function ackJobBatchWithResults(items: Array<{
|
|
63
58
|
id: JobId;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"ack.d.ts","sourceRoot":"","sources":["../../../src/application/operations/ack.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAEH,OAAO,EAAE,KAAK,GAAG,EAAE,KAAK,KAAK,EAA8B,MAAM,wBAAwB,CAAC;AAC1F,OAAO,KAAK,EAAE,WAAW,EAAE,SAAS,EAAE,MAAM,0BAA0B,CAAC;AACvE,OAAO,KAAK,EAAE,KAAK,EAAE,MAAM,0BAA0B,CAAC;AACtD,OAAO,KAAK,EAAE,aAAa,EAAE,MAAM,yCAAyC,CAAC;AAC7E,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,mBAAmB,CAAC;AAGhD,OAAO,KAAK,EAAE,OAAO,EAAE,OAAO,EAAE,MAAM,kBAAkB,CAAC;
|
|
1
|
+
{"version":3,"file":"ack.d.ts","sourceRoot":"","sources":["../../../src/application/operations/ack.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAEH,OAAO,EAAE,KAAK,GAAG,EAAE,KAAK,KAAK,EAA8B,MAAM,wBAAwB,CAAC;AAC1F,OAAO,KAAK,EAAE,WAAW,EAAE,SAAS,EAAE,MAAM,0BAA0B,CAAC;AACvE,OAAO,KAAK,EAAE,KAAK,EAAE,MAAM,0BAA0B,CAAC;AACtD,OAAO,KAAK,EAAE,aAAa,EAAE,MAAM,yCAAyC,CAAC;AAC7E,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,mBAAmB,CAAC;AAGhD,OAAO,KAAK,EAAE,OAAO,EAAE,OAAO,EAAE,MAAM,kBAAkB,CAAC;AAWzD,4BAA4B;AAC5B,MAAM,WAAW,UAAU;IACzB,OAAO,EAAE,aAAa,GAAG,IAAI,CAAC;IAC9B,MAAM,EAAE,KAAK,EAAE,CAAC;IAChB,UAAU,EAAE,MAAM,EAAE,CAAC;IACrB,gBAAgB,EAAE,GAAG,CAAC,KAAK,EAAE,GAAG,CAAC,EAAE,CAAC;IACpC,eAAe,EAAE,MAAM,EAAE,CAAC;IAC1B,aAAa,EAAE,OAAO,CAAC,KAAK,CAAC,CAAC;IAC9B,UAAU,EAAE,OAAO,CAAC,KAAK,EAAE,OAAO,CAAC,CAAC;IACpC,QAAQ,EAAE,GAAG,CAAC,KAAK,EAAE,WAAW,CAAC,CAAC;IAClC,cAAc,EAAE;QAAE,KAAK,EAAE,MAAM,CAAA;KAAE,CAAC;IAClC,WAAW,EAAE;QAAE,KAAK,EAAE,MAAM,CAAA;KAAE,CAAC;IAC/B,SAAS,EAAE,CAAC,KAAK,EAAE;QACjB,SAAS,EAAE,SAAS,CAAC;QACrB,KAAK,EAAE,MAAM,CAAC;QACd,KAAK,EAAE,KAAK,CAAC;QACb,SAAS,EAAE,MAAM,CAAC;QAClB,IAAI,CAAC,EAAE,OAAO,CAAC;QACf,KAAK,CAAC,EAAE,MAAM,CAAC;KAChB,KAAK,IAAI,CAAC;IACX,cAAc,EAAE,CAAC,KAAK,EAAE,KAAK,KAAK,IAAI,CAAC;IACvC,eAAe,CAAC,EAAE,CAAC,MAAM,EAAE,KAAK,EAAE,KAAK,IAAI,CAAC;IAC5C,cAAc,CAAC,EAAE,MAAM,OAAO,CAAC;IAC/B,cAAc,CAAC,EAAE,MAAM,OAAO,CAAC;IAC/B,QAAQ,CAAC,EAAE,CAAC,GAAG,EAAE,GAAG,KAAK,IAAI,CAAC;CAC/B;AAED;;GAEG;AACH,wBAAsB,MAAM,CAAC,KAAK,EAAE,KAAK,EAAE,MAAM,EAAE,OAAO,EAAE,GAAG,EAAE,UAAU,GAAG,OAAO,CAAC,IAAI,CAAC,CAkD1F;AAED;;GAEG;AACH,wBAAsB,OAAO,CAC3B,KAAK,EAAE,KAAK,EACZ,KAAK,EAAE,MAAM,GAAG,SAAS,EACzB,GAAG,EAAE,UAAU,GACd,OAAO,CAAC,IAAI,CAAC,CAgDf;AAED;;;GAGG;AACH,wBAAsB,WAAW,CAAC,MAAM,EAAE,KAAK,EAAE,EAAE,GAAG,EAAE,UAAU,GAAG,OAAO,CAAC,IAAI,CAAC,CA0BjF;AAED;;GAEG;AACH,wBAAsB,sBAAsB,CAC1C,KAAK,EAAE,KAAK,CAAC;IAAE,EAAE,EAAE,KAAK,CAAC;IAAC,MAAM,EAAE,OAAO,CAAA;CAAE,CAAC,EAC5C,GAAG,EAAE,UAAU,GACd,OAAO,CAAC,IAAI,CAAC,CA0Bf"}
|