bunqueue 1.9.7 → 1.9.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/application/backgroundTasks.d.ts +3 -6
- package/dist/application/backgroundTasks.d.ts.map +1 -1
- package/dist/application/backgroundTasks.js +10 -179
- package/dist/application/backgroundTasks.js.map +1 -1
- package/dist/application/cleanupTasks.d.ts +1 -1
- package/dist/application/cleanupTasks.d.ts.map +1 -1
- package/dist/application/cleanupTasks.js +61 -22
- package/dist/application/cleanupTasks.js.map +1 -1
- package/dist/application/clientTracking.d.ts +22 -0
- package/dist/application/clientTracking.d.ts.map +1 -0
- package/dist/application/clientTracking.js +118 -0
- package/dist/application/clientTracking.js.map +1 -0
- package/dist/application/contextFactory.d.ts +97 -0
- package/dist/application/contextFactory.d.ts.map +1 -0
- package/dist/application/contextFactory.js +170 -0
- package/dist/application/contextFactory.js.map +1 -0
- package/dist/application/dependencyProcessor.d.ts +11 -0
- package/dist/application/dependencyProcessor.d.ts.map +1 -0
- package/dist/application/dependencyProcessor.js +69 -0
- package/dist/application/dependencyProcessor.js.map +1 -0
- package/dist/application/dlqManager.d.ts +12 -0
- package/dist/application/dlqManager.d.ts.map +1 -1
- package/dist/application/dlqManager.js +36 -0
- package/dist/application/dlqManager.js.map +1 -1
- package/dist/application/lockManager.d.ts +3 -49
- package/dist/application/lockManager.d.ts.map +1 -1
- package/dist/application/lockManager.js +101 -272
- package/dist/application/lockManager.js.map +1 -1
- package/dist/application/lockOperations.d.ts +39 -0
- package/dist/application/lockOperations.d.ts.map +1 -0
- package/dist/application/lockOperations.js +101 -0
- package/dist/application/lockOperations.js.map +1 -0
- package/dist/application/operations/ack.d.ts +1 -5
- package/dist/application/operations/ack.d.ts.map +1 -1
- package/dist/application/operations/ack.js +43 -259
- package/dist/application/operations/ack.js.map +1 -1
- package/dist/application/operations/ackHelpers.d.ts +79 -0
- package/dist/application/operations/ackHelpers.d.ts.map +1 -0
- package/dist/application/operations/ackHelpers.js +173 -0
- package/dist/application/operations/ackHelpers.js.map +1 -0
- package/dist/application/operations/jobManagement.d.ts +2 -0
- package/dist/application/operations/jobManagement.d.ts.map +1 -1
- package/dist/application/operations/jobManagement.js +8 -0
- package/dist/application/operations/jobManagement.js.map +1 -1
- package/dist/application/operations/push.d.ts.map +1 -1
- package/dist/application/operations/push.js +20 -6
- package/dist/application/operations/push.js.map +1 -1
- package/dist/application/operations/queryOperations.d.ts +11 -0
- package/dist/application/operations/queryOperations.d.ts.map +1 -1
- package/dist/application/operations/queryOperations.js +32 -0
- package/dist/application/operations/queryOperations.js.map +1 -1
- package/dist/application/queueManager.d.ts +3 -11
- package/dist/application/queueManager.d.ts.map +1 -1
- package/dist/application/queueManager.js +98 -244
- package/dist/application/queueManager.js.map +1 -1
- package/dist/application/stallDetection.d.ts +11 -0
- package/dist/application/stallDetection.d.ts.map +1 -0
- package/dist/application/stallDetection.js +128 -0
- package/dist/application/stallDetection.js.map +1 -0
- package/dist/application/types.js +1 -1
- package/dist/application/types.js.map +1 -1
- package/dist/cli/client.d.ts +3 -5
- package/dist/cli/client.d.ts.map +1 -1
- package/dist/cli/client.js +31 -27
- package/dist/cli/client.js.map +1 -1
- package/dist/cli/commands/core.js +3 -3
- package/dist/cli/commands/core.js.map +1 -1
- package/dist/cli/commands/job.js +14 -14
- package/dist/cli/commands/job.js.map +1 -1
- package/dist/cli/commands/server.d.ts.map +1 -1
- package/dist/cli/commands/server.js +5 -29
- package/dist/cli/commands/server.js.map +1 -1
- package/dist/cli/index.d.ts.map +1 -1
- package/dist/cli/index.js +1 -9
- package/dist/cli/index.js.map +1 -1
- package/dist/client/events.d.ts +0 -1
- package/dist/client/events.d.ts.map +1 -1
- package/dist/client/events.js +4 -7
- package/dist/client/events.js.map +1 -1
- package/dist/client/flow.d.ts +23 -1
- package/dist/client/flow.d.ts.map +1 -1
- package/dist/client/flow.js +166 -68
- package/dist/client/flow.js.map +1 -1
- package/dist/client/queue/queue.d.ts.map +1 -1
- package/dist/client/queue/queue.js +3 -1
- package/dist/client/queue/queue.js.map +1 -1
- package/dist/client/sandboxed/types.d.ts +1 -0
- package/dist/client/sandboxed/types.d.ts.map +1 -1
- package/dist/client/sandboxed/worker.d.ts +1 -0
- package/dist/client/sandboxed/worker.d.ts.map +1 -1
- package/dist/client/sandboxed/worker.js +31 -8
- package/dist/client/sandboxed/worker.js.map +1 -1
- package/dist/client/sandboxed/wrapper.d.ts.map +1 -1
- package/dist/client/sandboxed/wrapper.js +10 -1
- package/dist/client/sandboxed/wrapper.js.map +1 -1
- package/dist/client/tcp/client.d.ts +4 -1
- package/dist/client/tcp/client.d.ts.map +1 -1
- package/dist/client/tcp/client.js +26 -8
- package/dist/client/tcp/client.js.map +1 -1
- package/dist/client/tcp/connection.d.ts +6 -8
- package/dist/client/tcp/connection.d.ts.map +1 -1
- package/dist/client/tcp/connection.js +24 -22
- package/dist/client/tcp/connection.js.map +1 -1
- package/dist/client/tcp/index.d.ts +0 -1
- package/dist/client/tcp/index.d.ts.map +1 -1
- package/dist/client/tcp/index.js +0 -1
- package/dist/client/tcp/index.js.map +1 -1
- package/dist/client/tcp/types.d.ts +8 -13
- package/dist/client/tcp/types.d.ts.map +1 -1
- package/dist/client/tcp/types.js +0 -1
- package/dist/client/tcp/types.js.map +1 -1
- package/dist/client/tcpPool.d.ts.map +1 -1
- package/dist/client/tcpPool.js +0 -6
- package/dist/client/tcpPool.js.map +1 -1
- package/dist/client/worker/ackBatcher.d.ts +1 -1
- package/dist/client/worker/ackBatcher.d.ts.map +1 -1
- package/dist/client/worker/ackBatcher.js +20 -18
- package/dist/client/worker/ackBatcher.js.map +1 -1
- package/dist/client/worker/jobParser.d.ts.map +1 -1
- package/dist/client/worker/jobParser.js +8 -7
- package/dist/client/worker/jobParser.js.map +1 -1
- package/dist/client/worker/processor.d.ts.map +1 -1
- package/dist/client/worker/processor.js +10 -6
- package/dist/client/worker/processor.js.map +1 -1
- package/dist/domain/queue/dependencyTracker.d.ts +74 -0
- package/dist/domain/queue/dependencyTracker.d.ts.map +1 -0
- package/dist/domain/queue/dependencyTracker.js +126 -0
- package/dist/domain/queue/dependencyTracker.js.map +1 -0
- package/dist/domain/queue/dlqShard.d.ts +61 -0
- package/dist/domain/queue/dlqShard.d.ts.map +1 -0
- package/dist/domain/queue/dlqShard.js +175 -0
- package/dist/domain/queue/dlqShard.js.map +1 -0
- package/dist/domain/queue/limiterManager.d.ts +44 -0
- package/dist/domain/queue/limiterManager.d.ts.map +1 -0
- package/dist/domain/queue/limiterManager.js +99 -0
- package/dist/domain/queue/limiterManager.js.map +1 -0
- package/dist/domain/queue/shard.d.ts +33 -124
- package/dist/domain/queue/shard.d.ts.map +1 -1
- package/dist/domain/queue/shard.js +157 -427
- package/dist/domain/queue/shard.js.map +1 -1
- package/dist/domain/queue/temporalManager.d.ts +82 -0
- package/dist/domain/queue/temporalManager.d.ts.map +1 -0
- package/dist/domain/queue/temporalManager.js +150 -0
- package/dist/domain/queue/temporalManager.js.map +1 -0
- package/dist/domain/queue/uniqueKeyManager.d.ts +32 -0
- package/dist/domain/queue/uniqueKeyManager.d.ts.map +1 -0
- package/dist/domain/queue/uniqueKeyManager.js +87 -0
- package/dist/domain/queue/uniqueKeyManager.js.map +1 -0
- package/dist/domain/types/command.d.ts +6 -0
- package/dist/domain/types/command.d.ts.map +1 -1
- package/dist/infrastructure/backup/s3Backup.d.ts +3 -40
- package/dist/infrastructure/backup/s3Backup.d.ts.map +1 -1
- package/dist/infrastructure/backup/s3Backup.js +10 -182
- package/dist/infrastructure/backup/s3Backup.js.map +1 -1
- package/dist/infrastructure/backup/s3BackupConfig.d.ts +67 -0
- package/dist/infrastructure/backup/s3BackupConfig.d.ts.map +1 -0
- package/dist/infrastructure/backup/s3BackupConfig.js +48 -0
- package/dist/infrastructure/backup/s3BackupConfig.js.map +1 -0
- package/dist/infrastructure/backup/s3BackupOperations.d.ts +23 -0
- package/dist/infrastructure/backup/s3BackupOperations.d.ts.map +1 -0
- package/dist/infrastructure/backup/s3BackupOperations.js +170 -0
- package/dist/infrastructure/backup/s3BackupOperations.js.map +1 -0
- package/dist/infrastructure/persistence/sqlite.d.ts +6 -13
- package/dist/infrastructure/persistence/sqlite.d.ts.map +1 -1
- package/dist/infrastructure/persistence/sqlite.js +28 -179
- package/dist/infrastructure/persistence/sqlite.js.map +1 -1
- package/dist/infrastructure/persistence/sqliteBatch.d.ts +38 -0
- package/dist/infrastructure/persistence/sqliteBatch.d.ts.map +1 -0
- package/dist/infrastructure/persistence/sqliteBatch.js +124 -0
- package/dist/infrastructure/persistence/sqliteBatch.js.map +1 -0
- package/dist/infrastructure/persistence/sqliteSerializer.d.ts +17 -0
- package/dist/infrastructure/persistence/sqliteSerializer.d.ts.map +1 -0
- package/dist/infrastructure/persistence/sqliteSerializer.js +81 -0
- package/dist/infrastructure/persistence/sqliteSerializer.js.map +1 -0
- package/dist/infrastructure/persistence/statements.d.ts +1 -1
- package/dist/infrastructure/persistence/statements.d.ts.map +1 -1
- package/dist/infrastructure/persistence/statements.js +3 -2
- package/dist/infrastructure/persistence/statements.js.map +1 -1
- package/dist/infrastructure/scheduler/cronScheduler.d.ts +7 -0
- package/dist/infrastructure/scheduler/cronScheduler.d.ts.map +1 -1
- package/dist/infrastructure/scheduler/cronScheduler.js +23 -3
- package/dist/infrastructure/scheduler/cronScheduler.js.map +1 -1
- package/dist/infrastructure/server/handler.d.ts.map +1 -1
- package/dist/infrastructure/server/handler.js +1 -186
- package/dist/infrastructure/server/handler.js.map +1 -1
- package/dist/infrastructure/server/handlerRoutes.d.ts +23 -0
- package/dist/infrastructure/server/handlerRoutes.d.ts.map +1 -0
- package/dist/infrastructure/server/handlerRoutes.js +190 -0
- package/dist/infrastructure/server/handlerRoutes.js.map +1 -0
- package/dist/infrastructure/server/handlers/core.d.ts.map +1 -1
- package/dist/infrastructure/server/handlers/core.js +26 -19
- package/dist/infrastructure/server/handlers/core.js.map +1 -1
- package/dist/infrastructure/server/http.d.ts +4 -25
- package/dist/infrastructure/server/http.d.ts.map +1 -1
- package/dist/infrastructure/server/http.js +68 -285
- package/dist/infrastructure/server/http.js.map +1 -1
- package/dist/infrastructure/server/httpEndpoints.d.ts +19 -0
- package/dist/infrastructure/server/httpEndpoints.d.ts.map +1 -0
- package/dist/infrastructure/server/httpEndpoints.js +151 -0
- package/dist/infrastructure/server/httpEndpoints.js.map +1 -0
- package/dist/infrastructure/server/protocol.d.ts +15 -1
- package/dist/infrastructure/server/protocol.d.ts.map +1 -1
- package/dist/infrastructure/server/protocol.js +37 -3
- package/dist/infrastructure/server/protocol.js.map +1 -1
- package/dist/infrastructure/server/sseHandler.d.ts +27 -0
- package/dist/infrastructure/server/sseHandler.d.ts.map +1 -0
- package/dist/infrastructure/server/sseHandler.js +77 -0
- package/dist/infrastructure/server/sseHandler.js.map +1 -0
- package/dist/infrastructure/server/tcp.d.ts +8 -10
- package/dist/infrastructure/server/tcp.d.ts.map +1 -1
- package/dist/infrastructure/server/tcp.js +51 -42
- package/dist/infrastructure/server/tcp.js.map +1 -1
- package/dist/infrastructure/server/wsHandler.d.ts +31 -0
- package/dist/infrastructure/server/wsHandler.d.ts.map +1 -0
- package/dist/infrastructure/server/wsHandler.js +63 -0
- package/dist/infrastructure/server/wsHandler.js.map +1 -0
- package/dist/main.js +2 -4
- package/dist/main.js.map +1 -1
- package/dist/mcp/index.js +3 -465
- package/dist/mcp/index.js.map +1 -1
- package/dist/mcp/mcpHandlers.d.ts +129 -0
- package/dist/mcp/mcpHandlers.d.ts.map +1 -0
- package/dist/mcp/mcpHandlers.js +204 -0
- package/dist/mcp/mcpHandlers.js.map +1 -0
- package/dist/mcp/mcpTools.d.ts +15 -0
- package/dist/mcp/mcpTools.d.ts.map +1 -0
- package/dist/mcp/mcpTools.js +277 -0
- package/dist/mcp/mcpTools.js.map +1 -0
- package/dist/shared/lru.d.ts +23 -0
- package/dist/shared/lru.d.ts.map +1 -1
- package/dist/shared/lru.js +61 -3
- package/dist/shared/lru.js.map +1 -1
- package/dist/shared/skipList.d.ts +10 -2
- package/dist/shared/skipList.d.ts.map +1 -1
- package/dist/shared/skipList.js +22 -1
- package/dist/shared/skipList.js.map +1 -1
- package/package.json +2 -2
- package/dist/cli/dashboard.d.ts +0 -32
- package/dist/cli/dashboard.d.ts.map +0 -1
- package/dist/cli/dashboard.js +0 -183
- package/dist/cli/dashboard.js.map +0 -1
- package/dist/client/tcp/lineBuffer.d.ts +0 -17
- package/dist/client/tcp/lineBuffer.d.ts.map +0 -1
- package/dist/client/tcp/lineBuffer.js +0 -32
- package/dist/client/tcp/lineBuffer.js.map +0 -1
|
@@ -2,306 +2,135 @@
|
|
|
2
2
|
* Lock Manager - Job lock and client tracking
|
|
3
3
|
* Handles BullMQ-style lock-based job ownership
|
|
4
4
|
*/
|
|
5
|
-
import {
|
|
5
|
+
import { isLockExpired } from '../domain/types/job';
|
|
6
6
|
import { queueLog } from '../shared/logger';
|
|
7
7
|
import { shardIndex, processingShardIndex } from '../shared/hash';
|
|
8
8
|
import { withWriteLock } from '../shared/lock';
|
|
9
|
-
//
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
*/
|
|
14
|
-
export function createLock(jobId, owner, ctx, ttl = DEFAULT_LOCK_TTL) {
|
|
15
|
-
const loc = ctx.jobIndex.get(jobId);
|
|
16
|
-
if (loc?.type !== 'processing')
|
|
17
|
-
return null;
|
|
18
|
-
// Check if lock already exists (shouldn't happen, but defensive)
|
|
19
|
-
if (ctx.jobLocks.has(jobId)) {
|
|
20
|
-
queueLog.warn('Lock already exists for job', { jobId: String(jobId), owner });
|
|
21
|
-
return null;
|
|
22
|
-
}
|
|
23
|
-
const lock = createJobLock(jobId, owner, ttl);
|
|
24
|
-
ctx.jobLocks.set(jobId, lock);
|
|
25
|
-
return lock.token;
|
|
26
|
-
}
|
|
27
|
-
/**
|
|
28
|
-
* Verify that a token is valid for a job.
|
|
29
|
-
* @returns true if token matches the active lock
|
|
30
|
-
*/
|
|
31
|
-
export function verifyLock(jobId, token, ctx) {
|
|
32
|
-
const lock = ctx.jobLocks.get(jobId);
|
|
33
|
-
if (!lock)
|
|
34
|
-
return false;
|
|
35
|
-
if (lock.token !== token)
|
|
36
|
-
return false;
|
|
37
|
-
if (isLockExpired(lock))
|
|
38
|
-
return false;
|
|
39
|
-
return true;
|
|
40
|
-
}
|
|
41
|
-
/**
|
|
42
|
-
* Renew a lock with the given token.
|
|
43
|
-
* @returns true if renewal succeeded, false if token invalid or lock expired
|
|
44
|
-
*/
|
|
45
|
-
export function renewJobLock(jobId, token, ctx, newTtl) {
|
|
46
|
-
const lock = ctx.jobLocks.get(jobId);
|
|
47
|
-
if (!lock)
|
|
48
|
-
return false;
|
|
49
|
-
if (lock.token !== token)
|
|
50
|
-
return false;
|
|
51
|
-
if (isLockExpired(lock)) {
|
|
52
|
-
// Lock already expired, remove it
|
|
53
|
-
ctx.jobLocks.delete(jobId);
|
|
54
|
-
return false;
|
|
55
|
-
}
|
|
56
|
-
renewLock(lock, newTtl);
|
|
57
|
-
// Also update lastHeartbeat on the job (for legacy stall detection compatibility)
|
|
58
|
-
const loc = ctx.jobIndex.get(jobId);
|
|
59
|
-
if (loc?.type === 'processing') {
|
|
60
|
-
const job = ctx.processingShards[loc.shardIdx].get(jobId);
|
|
61
|
-
if (job)
|
|
62
|
-
job.lastHeartbeat = Date.now();
|
|
63
|
-
}
|
|
64
|
-
return true;
|
|
65
|
-
}
|
|
66
|
-
/**
|
|
67
|
-
* Renew locks for multiple jobs (batch operation).
|
|
68
|
-
* @returns Array of jobIds that were successfully renewed
|
|
69
|
-
*/
|
|
70
|
-
export function renewJobLockBatch(items, ctx) {
|
|
71
|
-
const renewed = [];
|
|
72
|
-
for (const item of items) {
|
|
73
|
-
if (renewJobLock(item.id, item.token, ctx, item.ttl)) {
|
|
74
|
-
renewed.push(String(item.id));
|
|
75
|
-
}
|
|
76
|
-
}
|
|
77
|
-
return renewed;
|
|
78
|
-
}
|
|
79
|
-
/**
|
|
80
|
-
* Release a lock when job is completed or failed.
|
|
81
|
-
* Should be called by ACK/FAIL operations.
|
|
82
|
-
*/
|
|
83
|
-
export function releaseLock(jobId, ctx, token) {
|
|
84
|
-
const lock = ctx.jobLocks.get(jobId);
|
|
85
|
-
if (!lock)
|
|
86
|
-
return true; // No lock to release
|
|
87
|
-
// If token provided, verify it matches
|
|
88
|
-
if (token && lock.token !== token) {
|
|
89
|
-
queueLog.warn('Token mismatch on lock release', {
|
|
90
|
-
jobId: String(jobId),
|
|
91
|
-
expected: lock.token.substring(0, 8),
|
|
92
|
-
got: token.substring(0, 8),
|
|
93
|
-
});
|
|
94
|
-
return false;
|
|
95
|
-
}
|
|
96
|
-
ctx.jobLocks.delete(jobId);
|
|
97
|
-
return true;
|
|
98
|
-
}
|
|
99
|
-
/**
|
|
100
|
-
* Get lock info for a job (for debugging/monitoring).
|
|
101
|
-
*/
|
|
102
|
-
export function getLockInfo(jobId, ctx) {
|
|
103
|
-
return ctx.jobLocks.get(jobId) ?? null;
|
|
104
|
-
}
|
|
105
|
-
// ============ Client-Job Tracking ============
|
|
106
|
-
/**
|
|
107
|
-
* Register a job as owned by a client (called on PULL).
|
|
108
|
-
*/
|
|
109
|
-
export function registerClientJob(clientId, jobId, ctx) {
|
|
110
|
-
let jobs = ctx.clientJobs.get(clientId);
|
|
111
|
-
if (!jobs) {
|
|
112
|
-
jobs = new Set();
|
|
113
|
-
ctx.clientJobs.set(clientId, jobs);
|
|
114
|
-
}
|
|
115
|
-
jobs.add(jobId);
|
|
116
|
-
}
|
|
117
|
-
/**
|
|
118
|
-
* Unregister a job from a client (called on ACK/FAIL).
|
|
119
|
-
*/
|
|
120
|
-
export function unregisterClientJob(clientId, jobId, ctx) {
|
|
121
|
-
if (!clientId)
|
|
122
|
-
return;
|
|
123
|
-
const jobs = ctx.clientJobs.get(clientId);
|
|
124
|
-
if (jobs) {
|
|
125
|
-
jobs.delete(jobId);
|
|
126
|
-
if (jobs.size === 0) {
|
|
127
|
-
ctx.clientJobs.delete(clientId);
|
|
128
|
-
}
|
|
129
|
-
}
|
|
130
|
-
}
|
|
131
|
-
/**
|
|
132
|
-
* Release all jobs owned by a client back to queue (called on TCP disconnect).
|
|
133
|
-
* Returns the number of jobs released.
|
|
134
|
-
*
|
|
135
|
-
* Uses proper locking to prevent race conditions.
|
|
136
|
-
*/
|
|
137
|
-
export async function releaseClientJobs(clientId, ctx) {
|
|
138
|
-
const jobs = ctx.clientJobs.get(clientId);
|
|
139
|
-
if (!jobs || jobs.size === 0) {
|
|
140
|
-
ctx.clientJobs.delete(clientId);
|
|
141
|
-
return 0;
|
|
142
|
-
}
|
|
143
|
-
// Phase 1: Collect jobs to release (read-only, no locks needed)
|
|
144
|
-
const jobsToRelease = [];
|
|
145
|
-
for (const jobId of jobs) {
|
|
146
|
-
const loc = ctx.jobIndex.get(jobId);
|
|
147
|
-
if (loc?.type !== 'processing')
|
|
148
|
-
continue;
|
|
149
|
-
const procIdx = loc.shardIdx;
|
|
150
|
-
const job = ctx.processingShards[procIdx].get(jobId);
|
|
151
|
-
if (!job)
|
|
152
|
-
continue;
|
|
153
|
-
jobsToRelease.push({
|
|
154
|
-
jobId,
|
|
155
|
-
procIdx,
|
|
156
|
-
queueShardIdx: shardIndex(job.queue),
|
|
157
|
-
});
|
|
158
|
-
}
|
|
159
|
-
if (jobsToRelease.length === 0) {
|
|
160
|
-
ctx.clientJobs.delete(clientId);
|
|
161
|
-
return 0;
|
|
162
|
-
}
|
|
163
|
-
// Phase 2: Group by processing shard for efficient locking
|
|
164
|
-
const byProcShard = new Map();
|
|
165
|
-
for (const item of jobsToRelease) {
|
|
166
|
-
let list = byProcShard.get(item.procIdx);
|
|
167
|
-
if (!list) {
|
|
168
|
-
list = [];
|
|
169
|
-
byProcShard.set(item.procIdx, list);
|
|
170
|
-
}
|
|
171
|
-
list.push(item);
|
|
172
|
-
}
|
|
173
|
-
let released = 0;
|
|
174
|
-
const now = Date.now();
|
|
175
|
-
// Phase 3: Process each processing shard with proper locking
|
|
176
|
-
for (const [procIdx, items] of byProcShard) {
|
|
177
|
-
await withWriteLock(ctx.processingLocks[procIdx], async () => {
|
|
178
|
-
for (const { jobId, queueShardIdx } of items) {
|
|
179
|
-
const job = ctx.processingShards[procIdx].get(jobId);
|
|
180
|
-
if (!job)
|
|
181
|
-
continue;
|
|
182
|
-
// Acquire shard lock for queue modifications
|
|
183
|
-
await withWriteLock(ctx.shardLocks[queueShardIdx], () => {
|
|
184
|
-
const shard = ctx.shards[queueShardIdx];
|
|
185
|
-
// Remove from processing
|
|
186
|
-
ctx.processingShards[procIdx].delete(jobId);
|
|
187
|
-
// Release lock if exists
|
|
188
|
-
ctx.jobLocks.delete(jobId);
|
|
189
|
-
// Release concurrency
|
|
190
|
-
shard.releaseConcurrency(job.queue);
|
|
191
|
-
// Release group if active
|
|
192
|
-
if (job.groupId) {
|
|
193
|
-
shard.releaseGroup(job.queue, job.groupId);
|
|
194
|
-
}
|
|
195
|
-
// Reset job state for retry
|
|
196
|
-
job.startedAt = null;
|
|
197
|
-
job.lastHeartbeat = now;
|
|
198
|
-
// Re-queue the job
|
|
199
|
-
shard.getQueue(job.queue).push(job);
|
|
200
|
-
const isDelayed = job.runAt > now;
|
|
201
|
-
shard.incrementQueued(jobId, isDelayed, job.createdAt, job.queue, job.runAt);
|
|
202
|
-
ctx.jobIndex.set(jobId, { type: 'queue', shardIdx: queueShardIdx, queueName: job.queue });
|
|
203
|
-
released++;
|
|
204
|
-
});
|
|
205
|
-
}
|
|
206
|
-
});
|
|
207
|
-
}
|
|
208
|
-
// Clear client tracking
|
|
209
|
-
ctx.clientJobs.delete(clientId);
|
|
210
|
-
if (released > 0) {
|
|
211
|
-
queueLog.info('Released client jobs', { clientId: clientId.substring(0, 8), released });
|
|
212
|
-
}
|
|
213
|
-
return released;
|
|
214
|
-
}
|
|
215
|
-
// ============ Lock Expiration Check ============
|
|
9
|
+
// Re-export lock operations
|
|
10
|
+
export { createLock, verifyLock, renewJobLock, renewJobLockBatch, releaseLock, getLockInfo, } from './lockOperations';
|
|
11
|
+
// Re-export client tracking
|
|
12
|
+
export { registerClientJob, unregisterClientJob, releaseClientJobs } from './clientTracking';
|
|
216
13
|
/**
|
|
217
14
|
* Check and handle expired locks.
|
|
218
15
|
* Jobs with expired locks are requeued for retry.
|
|
219
16
|
*
|
|
220
17
|
* Uses proper locking to prevent race conditions.
|
|
18
|
+
* Lock hierarchy: shards[N] -> processingShards[N] (per CLAUDE.md)
|
|
221
19
|
*/
|
|
222
20
|
export async function checkExpiredLocks(ctx) {
|
|
223
21
|
const now = Date.now();
|
|
224
|
-
// Phase 1: Collect expired locks (read-only)
|
|
22
|
+
// Phase 1: Collect expired locks and look up their jobs (read-only from processing shards)
|
|
23
|
+
// We need to know the queue name to determine shard index
|
|
225
24
|
const expired = [];
|
|
226
25
|
for (const [jobId, lock] of ctx.jobLocks) {
|
|
227
26
|
if (isLockExpired(lock, now)) {
|
|
228
27
|
const procIdx = processingShardIndex(String(jobId));
|
|
229
|
-
|
|
28
|
+
const job = ctx.processingShards[procIdx].get(jobId);
|
|
29
|
+
if (job) {
|
|
30
|
+
const shardIdx = shardIndex(job.queue);
|
|
31
|
+
expired.push({ jobId, lock, procIdx, shardIdx, job });
|
|
32
|
+
}
|
|
33
|
+
else {
|
|
34
|
+
// Job not in processing - just clean up the orphan lock
|
|
35
|
+
ctx.jobLocks.delete(jobId);
|
|
36
|
+
}
|
|
230
37
|
}
|
|
231
38
|
}
|
|
232
39
|
if (expired.length === 0)
|
|
233
40
|
return;
|
|
234
|
-
// Phase 2: Group by processing shard
|
|
235
|
-
|
|
41
|
+
// Phase 2: Group by shard index first (primary), then by processing shard (secondary)
|
|
42
|
+
// This ensures we acquire locks in the correct hierarchy order
|
|
43
|
+
const byShard = new Map();
|
|
236
44
|
for (const item of expired) {
|
|
237
|
-
let
|
|
45
|
+
let procMap = byShard.get(item.shardIdx);
|
|
46
|
+
if (!procMap) {
|
|
47
|
+
procMap = new Map();
|
|
48
|
+
byShard.set(item.shardIdx, procMap);
|
|
49
|
+
}
|
|
50
|
+
let list = procMap.get(item.procIdx);
|
|
238
51
|
if (!list) {
|
|
239
52
|
list = [];
|
|
240
|
-
|
|
53
|
+
procMap.set(item.procIdx, list);
|
|
241
54
|
}
|
|
242
55
|
list.push(item);
|
|
243
56
|
}
|
|
244
|
-
// Phase 3: Process
|
|
245
|
-
for (const [
|
|
246
|
-
await withWriteLock(ctx.
|
|
247
|
-
for (const
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
const queue = shard.getQueue(job.queue);
|
|
254
|
-
// Remove from processing
|
|
255
|
-
ctx.processingShards[procIdx].delete(jobId);
|
|
256
|
-
// Increment attempts and reset state
|
|
257
|
-
job.attempts++;
|
|
258
|
-
job.startedAt = null;
|
|
259
|
-
job.lastHeartbeat = now;
|
|
260
|
-
job.stallCount++;
|
|
261
|
-
// Check if max stalls exceeded
|
|
262
|
-
const stallConfig = shard.getStallConfig(job.queue);
|
|
263
|
-
if (stallConfig.maxStalls > 0 && job.stallCount >= stallConfig.maxStalls) {
|
|
264
|
-
shard.addToDlq(job, "stalled" /* FailureReason.Stalled */, `Lock expired after ${lock.renewalCount} renewals`);
|
|
265
|
-
ctx.jobIndex.set(jobId, { type: 'dlq', queueName: job.queue });
|
|
266
|
-
queueLog.warn('Job moved to DLQ due to lock expiration', {
|
|
267
|
-
jobId: String(jobId),
|
|
268
|
-
queue: job.queue,
|
|
269
|
-
owner: lock.owner,
|
|
270
|
-
renewals: lock.renewalCount,
|
|
271
|
-
stallCount: job.stallCount,
|
|
272
|
-
});
|
|
273
|
-
ctx.eventsManager.broadcast({
|
|
274
|
-
eventType: "failed" /* EventType.Failed */,
|
|
275
|
-
jobId,
|
|
276
|
-
queue: job.queue,
|
|
277
|
-
timestamp: now,
|
|
278
|
-
error: 'Lock expired (max stalls reached)',
|
|
279
|
-
});
|
|
280
|
-
}
|
|
281
|
-
else {
|
|
282
|
-
queue.push(job);
|
|
283
|
-
ctx.jobIndex.set(jobId, { type: 'queue', shardIdx: idx, queueName: job.queue });
|
|
284
|
-
queueLog.info('Job requeued due to lock expiration', {
|
|
285
|
-
jobId: String(jobId),
|
|
286
|
-
queue: job.queue,
|
|
287
|
-
owner: lock.owner,
|
|
288
|
-
renewals: lock.renewalCount,
|
|
289
|
-
attempt: job.attempts,
|
|
290
|
-
});
|
|
291
|
-
ctx.eventsManager.broadcast({
|
|
292
|
-
eventType: "stalled" /* EventType.Stalled */,
|
|
293
|
-
jobId,
|
|
294
|
-
queue: job.queue,
|
|
295
|
-
timestamp: now,
|
|
296
|
-
});
|
|
297
|
-
}
|
|
298
|
-
});
|
|
299
|
-
}
|
|
300
|
-
// Remove the expired lock
|
|
301
|
-
ctx.jobLocks.delete(jobId);
|
|
57
|
+
// Phase 3: Process with correct lock hierarchy: shardLock -> processingLock
|
|
58
|
+
for (const [shardIdx, procMap] of byShard) {
|
|
59
|
+
await withWriteLock(ctx.shardLocks[shardIdx], async () => {
|
|
60
|
+
for (const [procIdx, items] of procMap) {
|
|
61
|
+
await withWriteLock(ctx.processingLocks[procIdx], async () => {
|
|
62
|
+
for (const { jobId, lock, job } of items) {
|
|
63
|
+
processExpiredLockInner(jobId, lock, job, shardIdx, procIdx, ctx, now);
|
|
64
|
+
}
|
|
65
|
+
});
|
|
302
66
|
}
|
|
303
67
|
});
|
|
304
68
|
}
|
|
305
69
|
queueLog.info('Processed expired locks', { count: expired.length });
|
|
306
70
|
}
|
|
71
|
+
/**
|
|
72
|
+
* Process a single expired lock (called with both locks already held)
|
|
73
|
+
* Lock hierarchy already satisfied: shardLock -> processingLock held by caller
|
|
74
|
+
*/
|
|
75
|
+
// eslint-disable-next-line max-params
|
|
76
|
+
function processExpiredLockInner(jobId, lock, job, shardIdx, procIdx, ctx, now) {
|
|
77
|
+
const shard = ctx.shards[shardIdx];
|
|
78
|
+
const queue = shard.getQueue(job.queue);
|
|
79
|
+
// Remove from processing
|
|
80
|
+
ctx.processingShards[procIdx].delete(jobId);
|
|
81
|
+
// Increment attempts and reset state
|
|
82
|
+
job.attempts++;
|
|
83
|
+
job.startedAt = null;
|
|
84
|
+
job.lastHeartbeat = now;
|
|
85
|
+
job.stallCount++;
|
|
86
|
+
// Check if max stalls exceeded
|
|
87
|
+
const stallConfig = shard.getStallConfig(job.queue);
|
|
88
|
+
if (stallConfig.maxStalls > 0 && job.stallCount >= stallConfig.maxStalls) {
|
|
89
|
+
handleMaxStallsExceeded({ jobId, job, lock, shard, ctx, now });
|
|
90
|
+
}
|
|
91
|
+
else {
|
|
92
|
+
requeueExpiredJob({ jobId, job, lock, queue, idx: shardIdx, ctx, now });
|
|
93
|
+
}
|
|
94
|
+
// Remove the expired lock
|
|
95
|
+
ctx.jobLocks.delete(jobId);
|
|
96
|
+
}
|
|
97
|
+
/** Move job to DLQ when max stalls exceeded */
|
|
98
|
+
function handleMaxStallsExceeded(opts) {
|
|
99
|
+
const { jobId, job, lock, shard, ctx, now } = opts;
|
|
100
|
+
shard.addToDlq(job, "stalled" /* FailureReason.Stalled */, `Lock expired after ${lock.renewalCount} renewals`);
|
|
101
|
+
ctx.jobIndex.set(jobId, { type: 'dlq', queueName: job.queue });
|
|
102
|
+
queueLog.warn('Job moved to DLQ due to lock expiration', {
|
|
103
|
+
jobId: String(jobId),
|
|
104
|
+
queue: job.queue,
|
|
105
|
+
owner: lock.owner,
|
|
106
|
+
renewals: lock.renewalCount,
|
|
107
|
+
stallCount: job.stallCount,
|
|
108
|
+
});
|
|
109
|
+
ctx.eventsManager.broadcast({
|
|
110
|
+
eventType: "failed" /* EventType.Failed */,
|
|
111
|
+
jobId,
|
|
112
|
+
queue: job.queue,
|
|
113
|
+
timestamp: now,
|
|
114
|
+
error: 'Lock expired (max stalls reached)',
|
|
115
|
+
});
|
|
116
|
+
}
|
|
117
|
+
/** Requeue job for retry */
|
|
118
|
+
function requeueExpiredJob(opts) {
|
|
119
|
+
const { jobId, job, lock, queue, idx, ctx, now } = opts;
|
|
120
|
+
queue.push(job);
|
|
121
|
+
ctx.jobIndex.set(jobId, { type: 'queue', shardIdx: idx, queueName: job.queue });
|
|
122
|
+
queueLog.info('Job requeued due to lock expiration', {
|
|
123
|
+
jobId: String(jobId),
|
|
124
|
+
queue: job.queue,
|
|
125
|
+
owner: lock.owner,
|
|
126
|
+
renewals: lock.renewalCount,
|
|
127
|
+
attempt: job.attempts,
|
|
128
|
+
});
|
|
129
|
+
ctx.eventsManager.broadcast({
|
|
130
|
+
eventType: "stalled" /* EventType.Stalled */,
|
|
131
|
+
jobId,
|
|
132
|
+
queue: job.queue,
|
|
133
|
+
timestamp: now,
|
|
134
|
+
});
|
|
135
|
+
}
|
|
307
136
|
//# sourceMappingURL=lockManager.js.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"lockManager.js","sourceRoot":"","sources":["../../src/application/lockManager.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAGH,OAAO,EAAE,aAAa,EAAE,
|
|
1
|
+
{"version":3,"file":"lockManager.js","sourceRoot":"","sources":["../../src/application/lockManager.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAGH,OAAO,EAAE,aAAa,EAAE,MAAM,qBAAqB,CAAC;AAIpD,OAAO,EAAE,QAAQ,EAAE,MAAM,kBAAkB,CAAC;AAC5C,OAAO,EAAE,UAAU,EAAE,oBAAoB,EAAE,MAAM,gBAAgB,CAAC;AAClE,OAAO,EAAE,aAAa,EAAE,MAAM,gBAAgB,CAAC;AAG/C,4BAA4B;AAC5B,OAAO,EACL,UAAU,EACV,UAAU,EACV,YAAY,EACZ,iBAAiB,EACjB,WAAW,EACX,WAAW,GACZ,MAAM,kBAAkB,CAAC;AAE1B,4BAA4B;AAC5B,OAAO,EAAE,iBAAiB,EAAE,mBAAmB,EAAE,iBAAiB,EAAE,MAAM,kBAAkB,CAAC;AAE7F;;;;;;GAMG;AACH,MAAM,CAAC,KAAK,UAAU,iBAAiB,CAAC,GAAgB;IACtD,MAAM,GAAG,GAAG,IAAI,CAAC,GAAG,EAAE,CAAC;IAEvB,2FAA2F;IAC3F,0DAA0D;IAC1D,MAAM,OAAO,GAMR,EAAE,CAAC;IAER,KAAK,MAAM,CAAC,KAAK,EAAE,IAAI,CAAC,IAAI,GAAG,CAAC,QAAQ,EAAE,CAAC;QACzC,IAAI,aAAa,CAAC,IAAI,EAAE,GAAG,CAAC,EAAE,CAAC;YAC7B,MAAM,OAAO,GAAG,oBAAoB,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC;YACpD,MAAM,GAAG,GAAG,GAAG,CAAC,gBAAgB,CAAC,OAAO,CAAC,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;YACrD,IAAI,GAAG,EAAE,CAAC;gBACR,MAAM,QAAQ,GAAG,UAAU,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;gBACvC,OAAO,CAAC,IAAI,CAAC,EAAE,KAAK,EAAE,IAAI,EAAE,OAAO,EAAE,QAAQ,EAAE,GAAG,EAAE,CAAC,CAAC;YACxD,CAAC;iBAAM,CAAC;gBACN,wDAAwD;gBACxD,GAAG,CAAC,QAAQ,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;YAC7B,CAAC;QACH,CAAC;IACH,CAAC;IAED,IAAI,OAAO,CAAC,MAAM,KAAK,CAAC;QAAE,OAAO;IAEjC,sFAAsF;IACtF,+DAA+D;IAC/D,MAAM,OAAO,GAAG,IAAI,GAAG,EAAuC,CAAC;IAC/D,KAAK,MAAM,IAAI,IAAI,OAAO,EAAE,CAAC;QAC3B,IAAI,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;QACzC,IAAI,CAAC,OAAO,EAAE,CAAC;YACb,OAAO,GAAG,IAAI,GAAG,EAAE,CAAC;YACpB,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC;QACtC,CAAC;QACD,IAAI,IAAI,GAAG,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;QACrC,IAAI,CAAC,IAAI,EAAE,CAAC;YACV,IAAI,GAAG,EAAE,CAAC;YACV,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;QAClC,CAAC;QACD,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAClB,CAAC;IAED,4EAA4E;IAC5E,KAAK,MAAM,CAAC,QAAQ,EAAE,OAAO,CAAC,IAAI,OAAO,EAAE,CAAC;QAC1C,MAAM,aAAa,CAAC,GAAG,CAAC,UAAU,CAAC,QAAQ,CAAC,EAAE,KAAK,IAAI,EAAE;YACvD,KAAK,MAAM,CAAC,OAAO,EAAE,KAAK,CAAC,IAAI,OAAO,EAAE,CAAC;gBACvC,MAAM,aAAa,CAAC,GAAG,CAAC,eAAe,CAAC,OAAO,CAAC,EAAE,KAAK,IAAI,EAAE;oBAC3D,KAAK,MAAM,EAAE,KAAK,EAAE,IAAI,EAAE,GAAG,EAAE,IAAI,KAAK,EAAE,CAAC;wBACzC,uBAAuB,CAAC,KAAK,EAAE,IAAI,EAAE,GAAG,EAAE,QAAQ,EAAE,OAAO,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC;oBACzE,CAAC;gBACH,CAAC,CAAC,CAAC;YACL,CAAC;QACH,CAAC,CAAC,CAAC;IACL,CAAC;IAED,QAAQ,CAAC,IAAI,CAAC,yBAAyB,EAAE,EAAE,KAAK,EAAE,OAAO,CAAC,MAAM,EAAE,CAAC,CAAC;AACtE,CAAC;AAED;;;GAGG;AACH,sCAAsC;AACtC,SAAS,uBAAuB,CAC9B,KAAY,EACZ,IAAa,EACb,GAAQ,EACR,QAAgB,EAChB,OAAe,EACf,GAAgB,EAChB,GAAW;IAEX,MAAM,KAAK,GAAG,GAAG,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC;IACnC,MAAM,KAAK,GAAG,KAAK,CAAC,QAAQ,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;IAExC,yBAAyB;IACzB,GAAG,CAAC,gBAAgB,CAAC,OAAO,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;IAE5C,qCAAqC;IACrC,GAAG,CAAC,QAAQ,EAAE,CAAC;IACf,GAAG,CAAC,SAAS,GAAG,IAAI,CAAC;IACrB,GAAG,CAAC,aAAa,GAAG,GAAG,CAAC;IACxB,GAAG,CAAC,UAAU,EAAE,CAAC;IAEjB,+BAA+B;IAC/B,MAAM,WAAW,GAAG,KAAK,CAAC,cAAc,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;IACpD,IAAI,WAAW,CAAC,SAAS,GAAG,CAAC,IAAI,GAAG,CAAC,UAAU,IAAI,WAAW,CAAC,SAAS,EAAE,CAAC;QACzE,uBAAuB,CAAC,EAAE,KAAK,EAAE,GAAG,EAAE,IAAI,EAAE,KAAK,EAAE,GAAG,EAAE,GAAG,EAAE,CAAC,CAAC;IACjE,CAAC;SAAM,CAAC;QACN,iBAAiB,CAAC,EAAE,KAAK,EAAE,GAAG,EAAE,IAAI,EAAE,KAAK,EAAE,GAAG,EAAE,QAAQ,EAAE,GAAG,EAAE,GAAG,EAAE,CAAC,CAAC;IAC1E,CAAC;IAED,0BAA0B;IAC1B,GAAG,CAAC,QAAQ,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;AAC7B,CAAC;AAYD,+CAA+C;AAC/C,SAAS,uBAAuB,CAAC,IAAsB;IACrD,MAAM,EAAE,KAAK,EAAE,GAAG,EAAE,IAAI,EAAE,KAAK,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,IAAI,CAAC;IACnD,KAAK,CAAC,QAAQ,CAAC,GAAG,yCAAyB,sBAAsB,IAAI,CAAC,YAAY,WAAW,CAAC,CAAC;IAC/F,GAAG,CAAC,QAAQ,CAAC,GAAG,CAAC,KAAK,EAAE,EAAE,IAAI,EAAE,KAAK,EAAE,SAAS,EAAE,GAAG,CAAC,KAAK,EAAE,CAAC,CAAC;IAE/D,QAAQ,CAAC,IAAI,CAAC,yCAAyC,EAAE;QACvD,KAAK,EAAE,MAAM,CAAC,KAAK,CAAC;QACpB,KAAK,EAAE,GAAG,CAAC,KAAK;QAChB,KAAK,EAAE,IAAI,CAAC,KAAK;QACjB,QAAQ,EAAE,IAAI,CAAC,YAAY;QAC3B,UAAU,EAAE,GAAG,CAAC,UAAU;KAC3B,CAAC,CAAC;IAEH,GAAG,CAAC,aAAa,CAAC,SAAS,CAAC;QAC1B,SAAS,iCAAkB;QAC3B,KAAK;QACL,KAAK,EAAE,GAAG,CAAC,KAAK;QAChB,SAAS,EAAE,GAAG;QACd,KAAK,EAAE,mCAAmC;KAC3C,CAAC,CAAC;AACL,CAAC;AAaD,4BAA4B;AAC5B,SAAS,iBAAiB,CAAC,IAAoB;IAC7C,MAAM,EAAE,KAAK,EAAE,GAAG,EAAE,IAAI,EAAE,KAAK,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,IAAI,CAAC;IACxD,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;IAChB,GAAG,CAAC,QAAQ,CAAC,GAAG,CAAC,KAAK,EAAE,EAAE,IAAI,EAAE,OAAO,EAAE,QAAQ,EAAE,GAAG,EAAE,SAAS,EAAE,GAAG,CAAC,KAAK,EAAE,CAAC,CAAC;IAEhF,QAAQ,CAAC,IAAI,CAAC,qCAAqC,EAAE;QACnD,KAAK,EAAE,MAAM,CAAC,KAAK,CAAC;QACpB,KAAK,EAAE,GAAG,CAAC,KAAK;QAChB,KAAK,EAAE,IAAI,CAAC,KAAK;QACjB,QAAQ,EAAE,IAAI,CAAC,YAAY;QAC3B,OAAO,EAAE,GAAG,CAAC,QAAQ;KACtB,CAAC,CAAC;IAEH,GAAG,CAAC,aAAa,CAAC,SAAS,CAAC;QAC1B,SAAS,mCAAmB;QAC5B,KAAK;QACL,KAAK,EAAE,GAAG,CAAC,KAAK;QAChB,SAAS,EAAE,GAAG;KACf,CAAC,CAAC;AACL,CAAC"}
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Lock Operations - Job lock creation, verification, renewal
|
|
3
|
+
*/
|
|
4
|
+
import type { JobId, JobLock, LockToken } from '../domain/types/job';
|
|
5
|
+
import type { LockContext } from './types';
|
|
6
|
+
/**
|
|
7
|
+
* Create a lock for a job when it's pulled for processing.
|
|
8
|
+
* @returns The lock token, or null if job not in processing
|
|
9
|
+
*/
|
|
10
|
+
export declare function createLock(jobId: JobId, owner: string, ctx: LockContext, ttl?: number): LockToken | null;
|
|
11
|
+
/**
|
|
12
|
+
* Verify that a token is valid for a job.
|
|
13
|
+
* @returns true if token matches the active lock
|
|
14
|
+
*/
|
|
15
|
+
export declare function verifyLock(jobId: JobId, token: string, ctx: LockContext): boolean;
|
|
16
|
+
/**
|
|
17
|
+
* Renew a lock with the given token.
|
|
18
|
+
* @returns true if renewal succeeded, false if token invalid or lock expired
|
|
19
|
+
*/
|
|
20
|
+
export declare function renewJobLock(jobId: JobId, token: string, ctx: LockContext, newTtl?: number): boolean;
|
|
21
|
+
/**
|
|
22
|
+
* Renew locks for multiple jobs (batch operation).
|
|
23
|
+
* @returns Array of jobIds that were successfully renewed
|
|
24
|
+
*/
|
|
25
|
+
export declare function renewJobLockBatch(items: Array<{
|
|
26
|
+
id: JobId;
|
|
27
|
+
token: string;
|
|
28
|
+
ttl?: number;
|
|
29
|
+
}>, ctx: LockContext): string[];
|
|
30
|
+
/**
|
|
31
|
+
* Release a lock when job is completed or failed.
|
|
32
|
+
* Should be called by ACK/FAIL operations.
|
|
33
|
+
*/
|
|
34
|
+
export declare function releaseLock(jobId: JobId, ctx: LockContext, token?: string): boolean;
|
|
35
|
+
/**
|
|
36
|
+
* Get lock info for a job (for debugging/monitoring).
|
|
37
|
+
*/
|
|
38
|
+
export declare function getLockInfo(jobId: JobId, ctx: LockContext): JobLock | null;
|
|
39
|
+
//# sourceMappingURL=lockOperations.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"lockOperations.d.ts","sourceRoot":"","sources":["../../src/application/lockOperations.ts"],"names":[],"mappings":"AAAA;;GAEG;AAEH,OAAO,KAAK,EAAE,KAAK,EAAE,OAAO,EAAE,SAAS,EAAE,MAAM,qBAAqB,CAAC;AAGrE,OAAO,KAAK,EAAE,WAAW,EAAE,MAAM,SAAS,CAAC;AAE3C;;;GAGG;AACH,wBAAgB,UAAU,CACxB,KAAK,EAAE,KAAK,EACZ,KAAK,EAAE,MAAM,EACb,GAAG,EAAE,WAAW,EAChB,GAAG,GAAE,MAAyB,GAC7B,SAAS,GAAG,IAAI,CAalB;AAED;;;GAGG;AACH,wBAAgB,UAAU,CAAC,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,MAAM,EAAE,GAAG,EAAE,WAAW,GAAG,OAAO,CAMjF;AAED;;;GAGG;AACH,wBAAgB,YAAY,CAC1B,KAAK,EAAE,KAAK,EACZ,KAAK,EAAE,MAAM,EACb,GAAG,EAAE,WAAW,EAChB,MAAM,CAAC,EAAE,MAAM,GACd,OAAO,CAoBT;AAED;;;GAGG;AACH,wBAAgB,iBAAiB,CAC/B,KAAK,EAAE,KAAK,CAAC;IAAE,EAAE,EAAE,KAAK,CAAC;IAAC,KAAK,EAAE,MAAM,CAAC;IAAC,GAAG,CAAC,EAAE,MAAM,CAAA;CAAE,CAAC,EACxD,GAAG,EAAE,WAAW,GACf,MAAM,EAAE,CAQV;AAED;;;GAGG;AACH,wBAAgB,WAAW,CAAC,KAAK,EAAE,KAAK,EAAE,GAAG,EAAE,WAAW,EAAE,KAAK,CAAC,EAAE,MAAM,GAAG,OAAO,CAgBnF;AAED;;GAEG;AACH,wBAAgB,WAAW,CAAC,KAAK,EAAE,KAAK,EAAE,GAAG,EAAE,WAAW,GAAG,OAAO,GAAG,IAAI,CAE1E"}
|
|
@@ -0,0 +1,101 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Lock Operations - Job lock creation, verification, renewal
|
|
3
|
+
*/
|
|
4
|
+
import { createJobLock, isLockExpired, renewLock, DEFAULT_LOCK_TTL } from '../domain/types/job';
|
|
5
|
+
import { queueLog } from '../shared/logger';
|
|
6
|
+
/**
|
|
7
|
+
* Create a lock for a job when it's pulled for processing.
|
|
8
|
+
* @returns The lock token, or null if job not in processing
|
|
9
|
+
*/
|
|
10
|
+
export function createLock(jobId, owner, ctx, ttl = DEFAULT_LOCK_TTL) {
|
|
11
|
+
const loc = ctx.jobIndex.get(jobId);
|
|
12
|
+
if (loc?.type !== 'processing')
|
|
13
|
+
return null;
|
|
14
|
+
// Check if lock already exists (shouldn't happen, but defensive)
|
|
15
|
+
if (ctx.jobLocks.has(jobId)) {
|
|
16
|
+
queueLog.warn('Lock already exists for job', { jobId: String(jobId), owner });
|
|
17
|
+
return null;
|
|
18
|
+
}
|
|
19
|
+
const lock = createJobLock(jobId, owner, ttl);
|
|
20
|
+
ctx.jobLocks.set(jobId, lock);
|
|
21
|
+
return lock.token;
|
|
22
|
+
}
|
|
23
|
+
/**
|
|
24
|
+
* Verify that a token is valid for a job.
|
|
25
|
+
* @returns true if token matches the active lock
|
|
26
|
+
*/
|
|
27
|
+
export function verifyLock(jobId, token, ctx) {
|
|
28
|
+
const lock = ctx.jobLocks.get(jobId);
|
|
29
|
+
if (!lock)
|
|
30
|
+
return false;
|
|
31
|
+
if (lock.token !== token)
|
|
32
|
+
return false;
|
|
33
|
+
if (isLockExpired(lock))
|
|
34
|
+
return false;
|
|
35
|
+
return true;
|
|
36
|
+
}
|
|
37
|
+
/**
|
|
38
|
+
* Renew a lock with the given token.
|
|
39
|
+
* @returns true if renewal succeeded, false if token invalid or lock expired
|
|
40
|
+
*/
|
|
41
|
+
export function renewJobLock(jobId, token, ctx, newTtl) {
|
|
42
|
+
const lock = ctx.jobLocks.get(jobId);
|
|
43
|
+
if (!lock)
|
|
44
|
+
return false;
|
|
45
|
+
if (lock.token !== token)
|
|
46
|
+
return false;
|
|
47
|
+
if (isLockExpired(lock)) {
|
|
48
|
+
// Lock already expired, remove it
|
|
49
|
+
ctx.jobLocks.delete(jobId);
|
|
50
|
+
return false;
|
|
51
|
+
}
|
|
52
|
+
renewLock(lock, newTtl);
|
|
53
|
+
// Also update lastHeartbeat on the job (for legacy stall detection compatibility)
|
|
54
|
+
const loc = ctx.jobIndex.get(jobId);
|
|
55
|
+
if (loc?.type === 'processing') {
|
|
56
|
+
const job = ctx.processingShards[loc.shardIdx].get(jobId);
|
|
57
|
+
if (job)
|
|
58
|
+
job.lastHeartbeat = Date.now();
|
|
59
|
+
}
|
|
60
|
+
return true;
|
|
61
|
+
}
|
|
62
|
+
/**
|
|
63
|
+
* Renew locks for multiple jobs (batch operation).
|
|
64
|
+
* @returns Array of jobIds that were successfully renewed
|
|
65
|
+
*/
|
|
66
|
+
export function renewJobLockBatch(items, ctx) {
|
|
67
|
+
const renewed = [];
|
|
68
|
+
for (const item of items) {
|
|
69
|
+
if (renewJobLock(item.id, item.token, ctx, item.ttl)) {
|
|
70
|
+
renewed.push(String(item.id));
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
return renewed;
|
|
74
|
+
}
|
|
75
|
+
/**
|
|
76
|
+
* Release a lock when job is completed or failed.
|
|
77
|
+
* Should be called by ACK/FAIL operations.
|
|
78
|
+
*/
|
|
79
|
+
export function releaseLock(jobId, ctx, token) {
|
|
80
|
+
const lock = ctx.jobLocks.get(jobId);
|
|
81
|
+
if (!lock)
|
|
82
|
+
return true; // No lock to release
|
|
83
|
+
// If token provided, verify it matches
|
|
84
|
+
if (token && lock.token !== token) {
|
|
85
|
+
queueLog.warn('Token mismatch on lock release', {
|
|
86
|
+
jobId: String(jobId),
|
|
87
|
+
expected: lock.token.substring(0, 8),
|
|
88
|
+
got: token.substring(0, 8),
|
|
89
|
+
});
|
|
90
|
+
return false;
|
|
91
|
+
}
|
|
92
|
+
ctx.jobLocks.delete(jobId);
|
|
93
|
+
return true;
|
|
94
|
+
}
|
|
95
|
+
/**
|
|
96
|
+
* Get lock info for a job (for debugging/monitoring).
|
|
97
|
+
*/
|
|
98
|
+
export function getLockInfo(jobId, ctx) {
|
|
99
|
+
return ctx.jobLocks.get(jobId) ?? null;
|
|
100
|
+
}
|
|
101
|
+
//# sourceMappingURL=lockOperations.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"lockOperations.js","sourceRoot":"","sources":["../../src/application/lockOperations.ts"],"names":[],"mappings":"AAAA;;GAEG;AAGH,OAAO,EAAE,aAAa,EAAE,aAAa,EAAE,SAAS,EAAE,gBAAgB,EAAE,MAAM,qBAAqB,CAAC;AAChG,OAAO,EAAE,QAAQ,EAAE,MAAM,kBAAkB,CAAC;AAG5C;;;GAGG;AACH,MAAM,UAAU,UAAU,CACxB,KAAY,EACZ,KAAa,EACb,GAAgB,EAChB,MAAc,gBAAgB;IAE9B,MAAM,GAAG,GAAG,GAAG,CAAC,QAAQ,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;IACpC,IAAI,GAAG,EAAE,IAAI,KAAK,YAAY;QAAE,OAAO,IAAI,CAAC;IAE5C,iEAAiE;IACjE,IAAI,GAAG,CAAC,QAAQ,CAAC,GAAG,CAAC,KAAK,CAAC,EAAE,CAAC;QAC5B,QAAQ,CAAC,IAAI,CAAC,6BAA6B,EAAE,EAAE,KAAK,EAAE,MAAM,CAAC,KAAK,CAAC,EAAE,KAAK,EAAE,CAAC,CAAC;QAC9E,OAAO,IAAI,CAAC;IACd,CAAC;IAED,MAAM,IAAI,GAAG,aAAa,CAAC,KAAK,EAAE,KAAK,EAAE,GAAG,CAAC,CAAC;IAC9C,GAAG,CAAC,QAAQ,CAAC,GAAG,CAAC,KAAK,EAAE,IAAI,CAAC,CAAC;IAC9B,OAAO,IAAI,CAAC,KAAK,CAAC;AACpB,CAAC;AAED;;;GAGG;AACH,MAAM,UAAU,UAAU,CAAC,KAAY,EAAE,KAAa,EAAE,GAAgB;IACtE,MAAM,IAAI,GAAG,GAAG,CAAC,QAAQ,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;IACrC,IAAI,CAAC,IAAI;QAAE,OAAO,KAAK,CAAC;IACxB,IAAI,IAAI,CAAC,KAAK,KAAK,KAAK;QAAE,OAAO,KAAK,CAAC;IACvC,IAAI,aAAa,CAAC,IAAI,CAAC;QAAE,OAAO,KAAK,CAAC;IACtC,OAAO,IAAI,CAAC;AACd,CAAC;AAED;;;GAGG;AACH,MAAM,UAAU,YAAY,CAC1B,KAAY,EACZ,KAAa,EACb,GAAgB,EAChB,MAAe;IAEf,MAAM,IAAI,GAAG,GAAG,CAAC,QAAQ,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;IACrC,IAAI,CAAC,IAAI;QAAE,OAAO,KAAK,CAAC;IACxB,IAAI,IAAI,CAAC,KAAK,KAAK,KAAK;QAAE,OAAO,KAAK,CAAC;IACvC,IAAI,aAAa,CAAC,IAAI,CAAC,EAAE,CAAC;QACxB,kCAAkC;QAClC,GAAG,CAAC,QAAQ,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;QAC3B,OAAO,KAAK,CAAC;IACf,CAAC;IAED,SAAS,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;IAExB,kFAAkF;IAClF,MAAM,GAAG,GAAG,GAAG,CAAC,QAAQ,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;IACpC,IAAI,GAAG,EAAE,IAAI,KAAK,YAAY,EAAE,CAAC;QAC/B,MAAM,GAAG,GAAG,GAAG,CAAC,gBAAgB,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QAC1D,IAAI,GAAG;YAAE,GAAG,CAAC,aAAa,GAAG,IAAI,CAAC,GAAG,EAAE,CAAC;IAC1C,CAAC;IAED,OAAO,IAAI,CAAC;AACd,CAAC;AAED;;;GAGG;AACH,MAAM,UAAU,iBAAiB,CAC/B,KAAwD,EACxD,GAAgB;IAEhB,MAAM,OAAO,GAAa,EAAE,CAAC;IAC7B,KAAK,MAAM,IAAI,IAAI,KAAK,EAAE,CAAC;QACzB,IAAI,YAAY,CAAC,IAAI,CAAC,EAAE,EAAE,IAAI,CAAC,KAAK,EAAE,GAAG,EAAE,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC;YACrD,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,CAAC;QAChC,CAAC;IACH,CAAC;IACD,OAAO,OAAO,CAAC;AACjB,CAAC;AAED;;;GAGG;AACH,MAAM,UAAU,WAAW,CAAC,KAAY,EAAE,GAAgB,EAAE,KAAc;IACxE,MAAM,IAAI,GAAG,GAAG,CAAC,QAAQ,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;IACrC,IAAI,CAAC,IAAI;QAAE,OAAO,IAAI,CAAC,CAAC,qBAAqB;IAE7C,uCAAuC;IACvC,IAAI,KAAK,IAAI,IAAI,CAAC,KAAK,KAAK,KAAK,EAAE,CAAC;QAClC,QAAQ,CAAC,IAAI,CAAC,gCAAgC,EAAE;YAC9C,KAAK,EAAE,MAAM,CAAC,KAAK,CAAC;YACpB,QAAQ,EAAE,IAAI,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,EAAE,CAAC,CAAC;YACpC,GAAG,EAAE,KAAK,CAAC,SAAS,CAAC,CAAC,EAAE,CAAC,CAAC;SAC3B,CAAC,CAAC;QACH,OAAO,KAAK,CAAC;IACf,CAAC;IAED,GAAG,CAAC,QAAQ,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;IAC3B,OAAO,IAAI,CAAC;AACd,CAAC;AAED;;GAEG;AACH,MAAM,UAAU,WAAW,CAAC,KAAY,EAAE,GAAgB;IACxD,OAAO,GAAG,CAAC,QAAQ,CAAC,GAAG,CAAC,KAAK,CAAC,IAAI,IAAI,CAAC;AACzC,CAAC"}
|
|
@@ -18,6 +18,7 @@ export interface AckContext {
|
|
|
18
18
|
completedJobs: SetLike<JobId>;
|
|
19
19
|
jobResults: MapLike<JobId, unknown>;
|
|
20
20
|
jobIndex: Map<JobId, JobLocation>;
|
|
21
|
+
customIdMap?: MapLike<string, JobId>;
|
|
21
22
|
totalCompleted: {
|
|
22
23
|
value: bigint;
|
|
23
24
|
};
|
|
@@ -33,13 +34,9 @@ export interface AckContext {
|
|
|
33
34
|
error?: string;
|
|
34
35
|
}) => void;
|
|
35
36
|
onJobCompleted: (jobId: JobId) => void;
|
|
36
|
-
/** Batch notify completions - more efficient than per-job calls */
|
|
37
37
|
onJobsCompleted?: (jobIds: JobId[]) => void;
|
|
38
|
-
/** Fast check if broadcast is needed - avoids function call overhead */
|
|
39
38
|
needsBroadcast?: () => boolean;
|
|
40
|
-
/** Check if any jobs are waiting for dependencies */
|
|
41
39
|
hasPendingDeps?: () => boolean;
|
|
42
|
-
/** Callback to re-queue repeatable jobs */
|
|
43
40
|
onRepeat?: (job: Job) => void;
|
|
44
41
|
}
|
|
45
42
|
/**
|
|
@@ -57,7 +54,6 @@ export declare function failJob(jobId: JobId, error: string | undefined, ctx: Ac
|
|
|
57
54
|
export declare function ackJobBatch(jobIds: JobId[], ctx: AckContext): Promise<void>;
|
|
58
55
|
/**
|
|
59
56
|
* Acknowledge multiple jobs with individual results - optimized batch processing
|
|
60
|
-
* Same as ackJobBatch but supports passing result data for each job
|
|
61
57
|
*/
|
|
62
58
|
export declare function ackJobBatchWithResults(items: Array<{
|
|
63
59
|
id: JobId;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"ack.d.ts","sourceRoot":"","sources":["../../../src/application/operations/ack.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAEH,OAAO,EAAE,KAAK,GAAG,EAAE,KAAK,KAAK,EAA8B,MAAM,wBAAwB,CAAC;AAC1F,OAAO,KAAK,EAAE,WAAW,EAAE,SAAS,EAAE,MAAM,0BAA0B,CAAC;
|
|
1
|
+
{"version":3,"file":"ack.d.ts","sourceRoot":"","sources":["../../../src/application/operations/ack.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAEH,OAAO,EAAE,KAAK,GAAG,EAAE,KAAK,KAAK,EAA8B,MAAM,wBAAwB,CAAC;AAC1F,OAAO,KAAK,EAAE,WAAW,EAAE,SAAS,EAAE,MAAM,0BAA0B,CAAC;AAEvE,OAAO,KAAK,EAAE,KAAK,EAAE,MAAM,0BAA0B,CAAC;AACtD,OAAO,KAAK,EAAE,aAAa,EAAE,MAAM,yCAAyC,CAAC;AAC7E,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,mBAAmB,CAAC;AAGhD,OAAO,KAAK,EAAE,OAAO,EAAE,OAAO,EAAE,MAAM,kBAAkB,CAAC;AAWzD,4BAA4B;AAC5B,MAAM,WAAW,UAAU;IACzB,OAAO,EAAE,aAAa,GAAG,IAAI,CAAC;IAC9B,MAAM,EAAE,KAAK,EAAE,CAAC;IAChB,UAAU,EAAE,MAAM,EAAE,CAAC;IACrB,gBAAgB,EAAE,GAAG,CAAC,KAAK,EAAE,GAAG,CAAC,EAAE,CAAC;IACpC,eAAe,EAAE,MAAM,EAAE,CAAC;IAC1B,aAAa,EAAE,OAAO,CAAC,KAAK,CAAC,CAAC;IAC9B,UAAU,EAAE,OAAO,CAAC,KAAK,EAAE,OAAO,CAAC,CAAC;IACpC,QAAQ,EAAE,GAAG,CAAC,KAAK,EAAE,WAAW,CAAC,CAAC;IAClC,WAAW,CAAC,EAAE,OAAO,CAAC,MAAM,EAAE,KAAK,CAAC,CAAC;IACrC,cAAc,EAAE;QAAE,KAAK,EAAE,MAAM,CAAA;KAAE,CAAC;IAClC,WAAW,EAAE;QAAE,KAAK,EAAE,MAAM,CAAA;KAAE,CAAC;IAC/B,SAAS,EAAE,CAAC,KAAK,EAAE;QACjB,SAAS,EAAE,SAAS,CAAC;QACrB,KAAK,EAAE,MAAM,CAAC;QACd,KAAK,EAAE,KAAK,CAAC;QACb,SAAS,EAAE,MAAM,CAAC;QAClB,IAAI,CAAC,EAAE,OAAO,CAAC;QACf,KAAK,CAAC,EAAE,MAAM,CAAC;KAChB,KAAK,IAAI,CAAC;IACX,cAAc,EAAE,CAAC,KAAK,EAAE,KAAK,KAAK,IAAI,CAAC;IACvC,eAAe,CAAC,EAAE,CAAC,MAAM,EAAE,KAAK,EAAE,KAAK,IAAI,CAAC;IAC5C,cAAc,CAAC,EAAE,MAAM,OAAO,CAAC;IAC/B,cAAc,CAAC,EAAE,MAAM,OAAO,CAAC;IAC/B,QAAQ,CAAC,EAAE,CAAC,GAAG,EAAE,GAAG,KAAK,IAAI,CAAC;CAC/B;AAED;;GAEG;AACH,wBAAsB,MAAM,CAAC,KAAK,EAAE,KAAK,EAAE,MAAM,EAAE,OAAO,EAAE,GAAG,EAAE,UAAU,GAAG,OAAO,CAAC,IAAI,CAAC,CAuD1F;AAED;;GAEG;AACH,wBAAsB,OAAO,CAC3B,KAAK,EAAE,KAAK,EACZ,KAAK,EAAE,MAAM,GAAG,SAAS,EACzB,GAAG,EAAE,UAAU,GACd,OAAO,CAAC,IAAI,CAAC,CAwDf;AAED;;;GAGG;AACH,wBAAsB,WAAW,CAAC,MAAM,EAAE,KAAK,EAAE,EAAE,GAAG,EAAE,UAAU,GAAG,OAAO,CAAC,IAAI,CAAC,CA0BjF;AAED;;GAEG;AACH,wBAAsB,sBAAsB,CAC1C,KAAK,EAAE,KAAK,CAAC;IAAE,EAAE,EAAE,KAAK,CAAC;IAAC,MAAM,EAAE,OAAO,CAAA;CAAE,CAAC,EAC5C,GAAG,EAAE,UAAU,GACd,OAAO,CAAC,IAAI,CAAC,CA0Bf"}
|