power-queues 2.0.3 → 2.0.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +671 -659
- package/dist/index.d.cts +83 -275
- package/dist/index.d.ts +83 -275
- package/dist/index.js +670 -668
- package/package.json +5 -2
package/dist/index.cjs
CHANGED
|
@@ -20,763 +20,775 @@ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: tru
|
|
|
20
20
|
// src/index.ts
|
|
21
21
|
var index_exports = {};
|
|
22
22
|
__export(index_exports, {
|
|
23
|
-
|
|
23
|
+
PowerQueues: () => PowerQueues
|
|
24
24
|
});
|
|
25
25
|
module.exports = __toCommonJS(index_exports);
|
|
26
26
|
|
|
27
|
-
// src/
|
|
28
|
-
var import_uuid = require("uuid");
|
|
29
|
-
var import_full_utils = require("full-utils");
|
|
27
|
+
// src/PowerQueues.ts
|
|
30
28
|
var import_power_redis = require("power-redis");
|
|
31
|
-
var
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
29
|
+
var import_full_utils = require("full-utils");
|
|
30
|
+
var import_uuid = require("uuid");
|
|
31
|
+
|
|
32
|
+
// src/scripts.ts
|
|
33
|
+
var XAddBulk = `
|
|
34
|
+
local UNPACK = table and table.unpack or unpack
|
|
35
|
+
|
|
36
|
+
local stream = KEYS[1]
|
|
37
|
+
local maxlen = tonumber(ARGV[1])
|
|
38
|
+
local approxFlag = tonumber(ARGV[2]) == 1
|
|
39
|
+
local n = tonumber(ARGV[3])
|
|
40
|
+
local exactFlag = tonumber(ARGV[4]) == 1
|
|
41
|
+
local nomkstream = tonumber(ARGV[5]) == 1
|
|
42
|
+
local trimLimit = tonumber(ARGV[6])
|
|
43
|
+
local minidWindowMs = tonumber(ARGV[7]) or 0
|
|
44
|
+
local minidExact = tonumber(ARGV[8]) == 1
|
|
45
|
+
local idx = 9
|
|
46
|
+
local out = {}
|
|
47
|
+
|
|
48
|
+
local common_opts = {}
|
|
49
|
+
local co_len = 0
|
|
50
|
+
|
|
51
|
+
if nomkstream then
|
|
52
|
+
co_len = co_len + 1; common_opts[co_len] = 'NOMKSTREAM'
|
|
53
|
+
end
|
|
54
|
+
|
|
55
|
+
if minidWindowMs > 0 then
|
|
56
|
+
local tm = redis.call('TIME')
|
|
57
|
+
local now_ms = (tonumber(tm[1]) * 1000) + math.floor(tonumber(tm[2]) / 1000)
|
|
58
|
+
local cutoff_ms = now_ms - minidWindowMs
|
|
59
|
+
if cutoff_ms < 0 then cutoff_ms = 0 end
|
|
60
|
+
local cutoff_id = tostring(cutoff_ms) .. '-0'
|
|
61
|
+
|
|
62
|
+
co_len = co_len + 1; common_opts[co_len] = 'MINID'
|
|
63
|
+
co_len = co_len + 1; common_opts[co_len] = (minidExact and '=' or '~')
|
|
64
|
+
co_len = co_len + 1; common_opts[co_len] = cutoff_id
|
|
65
|
+
if trimLimit and trimLimit > 0 then
|
|
66
|
+
co_len = co_len + 1; common_opts[co_len] = 'LIMIT'
|
|
67
|
+
co_len = co_len + 1; common_opts[co_len] = trimLimit
|
|
68
|
+
end
|
|
69
|
+
elseif maxlen and maxlen > 0 then
|
|
70
|
+
co_len = co_len + 1; common_opts[co_len] = 'MAXLEN'
|
|
71
|
+
if exactFlag then
|
|
72
|
+
co_len = co_len + 1; common_opts[co_len] = '='
|
|
73
|
+
elseif approxFlag then
|
|
74
|
+
co_len = co_len + 1; common_opts[co_len] = '~'
|
|
75
|
+
end
|
|
76
|
+
co_len = co_len + 1; common_opts[co_len] = maxlen
|
|
77
|
+
if trimLimit and trimLimit > 0 then
|
|
78
|
+
co_len = co_len + 1; common_opts[co_len] = 'LIMIT'
|
|
79
|
+
co_len = co_len + 1; common_opts[co_len] = trimLimit
|
|
80
|
+
end
|
|
81
|
+
end
|
|
82
|
+
|
|
83
|
+
for e = 1, n do
|
|
84
|
+
local id = ARGV[idx]; idx = idx + 1
|
|
85
|
+
local num_pairs = tonumber(ARGV[idx]); idx = idx + 1
|
|
86
|
+
|
|
87
|
+
local a = {}
|
|
88
|
+
local a_len = 0
|
|
89
|
+
|
|
90
|
+
for i = 1, co_len do a_len = a_len + 1; a[a_len] = common_opts[i] end
|
|
91
|
+
|
|
92
|
+
a_len = a_len + 1; a[a_len] = id
|
|
93
|
+
|
|
94
|
+
for j = 1, (num_pairs * 2) do
|
|
95
|
+
a_len = a_len + 1; a[a_len] = ARGV[idx]; idx = idx + 1
|
|
96
|
+
end
|
|
97
|
+
|
|
98
|
+
local addedId = redis.call('XADD', stream, UNPACK(a))
|
|
99
|
+
out[#out+1] = addedId or ''
|
|
100
|
+
end
|
|
101
|
+
|
|
102
|
+
return out
|
|
103
|
+
`;
|
|
104
|
+
var Approve = `
|
|
105
|
+
local stream = KEYS[1]
|
|
106
|
+
local group = ARGV[1]
|
|
107
|
+
local delFlag = tonumber(ARGV[2]) == 1
|
|
108
|
+
|
|
109
|
+
local acked = 0
|
|
110
|
+
local nids = #ARGV - 2
|
|
111
|
+
if nids > 0 then
|
|
112
|
+
acked = tonumber(redis.call('XACK', stream, group, unpack(ARGV, 3))) or 0
|
|
113
|
+
if delFlag and nids > 0 then
|
|
114
|
+
local ok, deln = pcall(redis.call, 'XDEL', stream, unpack(ARGV, 3))
|
|
115
|
+
if not ok then
|
|
116
|
+
deln = 0
|
|
117
|
+
for i = 3, #ARGV do
|
|
118
|
+
deln = deln + (tonumber(redis.call('XDEL', stream, ARGV[i])) or 0)
|
|
80
119
|
end
|
|
81
120
|
end
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
121
|
+
end
|
|
122
|
+
end
|
|
123
|
+
return acked
|
|
124
|
+
`;
|
|
125
|
+
var IdempotencyAllow = `
|
|
126
|
+
local doneKey = KEYS[1]
|
|
127
|
+
local lockKey = KEYS[2]
|
|
128
|
+
local startKey = KEYS[3]
|
|
129
|
+
|
|
130
|
+
if redis.call('EXISTS', doneKey) == 1 then
|
|
131
|
+
return 1
|
|
132
|
+
end
|
|
133
|
+
|
|
134
|
+
local ttl = tonumber(ARGV[1]) or 0
|
|
135
|
+
if ttl <= 0 then return 0 end
|
|
136
|
+
|
|
137
|
+
local ok = redis.call('SET', lockKey, ARGV[2], 'NX', 'PX', ttl)
|
|
138
|
+
if ok then
|
|
139
|
+
if startKey and startKey ~= '' then
|
|
140
|
+
redis.call('SET', startKey, 1, 'PX', ttl)
|
|
141
|
+
end
|
|
142
|
+
return 2
|
|
143
|
+
else
|
|
144
|
+
return 0
|
|
145
|
+
end
|
|
146
|
+
`;
|
|
147
|
+
var IdempotencyStart = `
|
|
148
|
+
local lockKey = KEYS[1]
|
|
149
|
+
local startKey = KEYS[2]
|
|
150
|
+
if redis.call('GET', lockKey) == ARGV[1] then
|
|
151
|
+
local ttl = tonumber(ARGV[2]) or 0
|
|
152
|
+
if ttl > 0 then
|
|
153
|
+
redis.call('SET', startKey, 1, 'PX', ttl)
|
|
154
|
+
redis.call('PEXPIRE', lockKey, ttl)
|
|
155
|
+
else
|
|
156
|
+
redis.call('SET', startKey, 1)
|
|
157
|
+
end
|
|
158
|
+
return 1
|
|
159
|
+
end
|
|
160
|
+
return 0
|
|
161
|
+
`;
|
|
162
|
+
var IdempotencyDone = `
|
|
163
|
+
local doneKey = KEYS[1]
|
|
164
|
+
local lockKey = KEYS[2]
|
|
165
|
+
local startKey = KEYS[3]
|
|
166
|
+
redis.call('SET', doneKey, 1)
|
|
167
|
+
local ttlSec = tonumber(ARGV[1]) or 0
|
|
168
|
+
if ttlSec > 0 then redis.call('EXPIRE', doneKey, ttlSec) end
|
|
169
|
+
if redis.call('GET', lockKey) == ARGV[2] then
|
|
170
|
+
redis.call('DEL', lockKey)
|
|
171
|
+
if startKey then redis.call('DEL', startKey) end
|
|
172
|
+
end
|
|
173
|
+
return 1
|
|
174
|
+
`;
|
|
175
|
+
var IdempotencyFree = `
|
|
176
|
+
local lockKey = KEYS[1]
|
|
177
|
+
local startKey = KEYS[2]
|
|
178
|
+
if redis.call('GET', lockKey) == ARGV[1] then
|
|
179
|
+
redis.call('DEL', lockKey)
|
|
180
|
+
if startKey then redis.call('DEL', startKey) end
|
|
181
|
+
return 1
|
|
182
|
+
end
|
|
183
|
+
return 0
|
|
184
|
+
`;
|
|
185
|
+
var SelectStuck = `
|
|
186
|
+
local stream = KEYS[1]
|
|
187
|
+
local group = ARGV[1]
|
|
188
|
+
local consumer = ARGV[2]
|
|
189
|
+
local pendingIdleMs = tonumber(ARGV[3])
|
|
190
|
+
local count = tonumber(ARGV[4]) or 0
|
|
191
|
+
if count < 1 then count = 1 end
|
|
192
|
+
|
|
193
|
+
local timeBudgetMs = tonumber(ARGV[5]) or 15
|
|
194
|
+
local t0 = redis.call('TIME')
|
|
195
|
+
local start_ms = (tonumber(t0[1]) * 1000) + math.floor(tonumber(t0[2]) / 1000)
|
|
196
|
+
|
|
197
|
+
local results = {}
|
|
198
|
+
local collected = 0
|
|
199
|
+
local start_id = '0-0'
|
|
200
|
+
local iters = 0
|
|
201
|
+
local max_iters = math.max(16, math.ceil(count / 100))
|
|
202
|
+
|
|
203
|
+
local function time_exceeded()
|
|
204
|
+
local t1 = redis.call('TIME')
|
|
205
|
+
local now_ms = (tonumber(t1[1]) * 1000) + math.floor(tonumber(t1[2]) / 1000)
|
|
206
|
+
return (now_ms - start_ms) >= timeBudgetMs
|
|
207
|
+
end
|
|
208
|
+
|
|
209
|
+
while (collected < count) and (iters < max_iters) do
|
|
210
|
+
local to_claim = count - collected
|
|
211
|
+
if to_claim < 1 then break end
|
|
212
|
+
|
|
213
|
+
local claim = redis.call('XAUTOCLAIM', stream, group, consumer, pendingIdleMs, start_id, 'COUNT', to_claim)
|
|
214
|
+
iters = iters + 1
|
|
215
|
+
|
|
216
|
+
local bucket = nil
|
|
217
|
+
if claim then
|
|
218
|
+
bucket = claim[2]
|
|
219
|
+
end
|
|
220
|
+
if bucket and #bucket > 0 then
|
|
221
|
+
for i = 1, #bucket do
|
|
222
|
+
results[#results+1] = bucket[i]
|
|
102
223
|
end
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
}
|
|
106
|
-
getRequeueScript() {
|
|
107
|
-
return `
|
|
108
|
-
-- KEYS: 1=processing, 2=processingVt, 3=ready
|
|
109
|
-
-- ARGV: 1=now, 2=limit
|
|
110
|
-
local processing = KEYS[1]
|
|
111
|
-
local vt = KEYS[2]
|
|
112
|
-
local ready = KEYS[3]
|
|
113
|
-
local now = tonumber(ARGV[1])
|
|
114
|
-
local limit = tonumber(ARGV[2])
|
|
224
|
+
collected = #results
|
|
225
|
+
end
|
|
115
226
|
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
227
|
+
local next_id = claim and claim[1] or start_id
|
|
228
|
+
if next_id == start_id then
|
|
229
|
+
local s, seq = string.match(start_id, '^(%d+)%-(%d+)$')
|
|
230
|
+
if s and seq then
|
|
231
|
+
start_id = s .. '-' .. tostring(tonumber(seq) + 1)
|
|
232
|
+
else
|
|
233
|
+
start_id = '0-1'
|
|
122
234
|
end
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
getPromoteScript() {
|
|
127
|
-
return `
|
|
128
|
-
-- KEYS: 1=delayed, 2=ready
|
|
129
|
-
-- ARGV: 1=now, 2=limit
|
|
130
|
-
local delayed = KEYS[1]
|
|
131
|
-
local ready = KEYS[2]
|
|
132
|
-
local now = tonumber(ARGV[1])
|
|
133
|
-
local limit = tonumber(ARGV[2])
|
|
235
|
+
else
|
|
236
|
+
start_id = next_id
|
|
237
|
+
end
|
|
134
238
|
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
239
|
+
if time_exceeded() then
|
|
240
|
+
break
|
|
241
|
+
end
|
|
242
|
+
end
|
|
243
|
+
|
|
244
|
+
local left = count - collected
|
|
245
|
+
if left > 0 then
|
|
246
|
+
local xr = redis.call('XREADGROUP', 'GROUP', group, consumer, 'COUNT', left, 'STREAMS', stream, '>')
|
|
247
|
+
if xr and xr[1] and xr[1][2] then
|
|
248
|
+
local entries = xr[1][2]
|
|
249
|
+
for i = 1, #entries do
|
|
250
|
+
results[#results+1] = entries[i]
|
|
140
251
|
end
|
|
141
|
-
|
|
142
|
-
|
|
252
|
+
end
|
|
253
|
+
end
|
|
254
|
+
|
|
255
|
+
return results
|
|
256
|
+
`;
|
|
257
|
+
|
|
258
|
+
// src/PowerQueues.ts
|
|
259
|
+
var PowerQueues = class extends import_power_redis.PowerRedis {
|
|
260
|
+
constructor() {
|
|
261
|
+
super(...arguments);
|
|
262
|
+
this.abort = new AbortController();
|
|
263
|
+
this.strictCheckingConnection = ["true", "on", "yes", "y", "1"].includes(String(process.env.REDIS_STRICT_CHECK_CONNECTION ?? "").trim().toLowerCase());
|
|
264
|
+
this.scripts = {};
|
|
265
|
+
this.addingBatchTasksCount = 800;
|
|
266
|
+
this.addingBatchKeysLimit = 1e4;
|
|
267
|
+
this.idemOn = true;
|
|
268
|
+
this.idemKey = "";
|
|
269
|
+
this.workerExecuteLockTimeoutMs = 18e4;
|
|
270
|
+
this.workerCacheTaskTimeoutMs = 60;
|
|
271
|
+
this.approveBatchTasksCount = 2e3;
|
|
272
|
+
this.removeOnExecuted = false;
|
|
273
|
+
this.executeBatchAtOnce = false;
|
|
274
|
+
this.executeJobStatus = false;
|
|
275
|
+
this.executeJobStatusTtlSec = 300;
|
|
276
|
+
this.consumerHost = "host";
|
|
277
|
+
this.stream = "stream";
|
|
278
|
+
this.group = "group";
|
|
279
|
+
this.workerBatchTasksCount = 200;
|
|
280
|
+
this.recoveryStuckTasksTimeoutMs = 6e4;
|
|
281
|
+
this.workerLoopIntervalMs = 5e3;
|
|
282
|
+
this.workerSelectionTimeoutMs = 80;
|
|
283
|
+
}
|
|
284
|
+
async onSelected(data) {
|
|
285
|
+
return data;
|
|
143
286
|
}
|
|
144
|
-
async
|
|
145
|
-
if (!force && (this.reserveSha || this.reserveShaRpoplpush || !(0, import_full_utils.isFunc)(this.redis?.script))) {
|
|
146
|
-
return;
|
|
147
|
-
}
|
|
148
|
-
this.reserveSha = void 0;
|
|
149
|
-
this.reserveShaRpoplpush = void 0;
|
|
150
|
-
try {
|
|
151
|
-
this.reserveSha = await this.redis?.script("LOAD", this.getReserveScriptLMOVE());
|
|
152
|
-
} catch {
|
|
153
|
-
this.reserveShaRpoplpush = await this.redis?.script("LOAD", this.getReserveScriptRPOPLPUSH());
|
|
154
|
-
}
|
|
287
|
+
async onExecute(id, payload, createdAt, job, key) {
|
|
155
288
|
}
|
|
156
|
-
async
|
|
157
|
-
if (!force && this.requeueSha) {
|
|
158
|
-
return;
|
|
159
|
-
}
|
|
160
|
-
const scriptFn = this.redis?.script;
|
|
161
|
-
if (!scriptFn) {
|
|
162
|
-
return;
|
|
163
|
-
}
|
|
164
|
-
this.requeueSha = await scriptFn("LOAD", this.getRequeueScript());
|
|
289
|
+
async onExecuted(data) {
|
|
165
290
|
}
|
|
166
|
-
async
|
|
167
|
-
if (!force && this.promoteSha) {
|
|
168
|
-
return;
|
|
169
|
-
}
|
|
170
|
-
const scriptFn = this.redis?.script;
|
|
171
|
-
if (!scriptFn) {
|
|
172
|
-
return;
|
|
173
|
-
}
|
|
174
|
-
this.promoteSha = await scriptFn("LOAD", this.getPromoteScript());
|
|
291
|
+
async onSuccess(id, payload, createdAt, job, key) {
|
|
175
292
|
}
|
|
176
|
-
async
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
293
|
+
async runQueue() {
|
|
294
|
+
await this.createGroup("0-0");
|
|
295
|
+
await this.consumerLoop();
|
|
296
|
+
}
|
|
297
|
+
async consumerLoop() {
|
|
298
|
+
const signal = this.signal();
|
|
299
|
+
while (!signal?.aborted) {
|
|
300
|
+
try {
|
|
301
|
+
const tasks = await this.select();
|
|
302
|
+
if (!Array.isArray(tasks) || !(tasks.length > 0)) {
|
|
303
|
+
await (0, import_full_utils.wait)(600);
|
|
304
|
+
continue;
|
|
305
|
+
}
|
|
306
|
+
const tasksP = await this.onSelected(tasks);
|
|
307
|
+
const ids = await this.execute(Array.isArray(tasksP) && tasksP.length > 0 ? tasksP : tasks);
|
|
308
|
+
if (Array.isArray(ids) && ids.length > 0) {
|
|
309
|
+
await this.approve(ids);
|
|
310
|
+
}
|
|
311
|
+
} catch (err) {
|
|
312
|
+
await (0, import_full_utils.wait)(600);
|
|
189
313
|
}
|
|
190
|
-
} catch {
|
|
191
314
|
}
|
|
192
|
-
|
|
193
|
-
}
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
315
|
+
}
|
|
316
|
+
async addTasks(queueName, data, opts = {}) {
|
|
317
|
+
if (!Array.isArray(data) || !(data.length > 0)) {
|
|
318
|
+
throw new Error("Tasks is not filled.");
|
|
319
|
+
}
|
|
320
|
+
if (typeof queueName !== "string" || !(queueName.length > 0)) {
|
|
321
|
+
throw new Error("Queue name is required.");
|
|
322
|
+
}
|
|
323
|
+
const batches = this.buildBatches(data);
|
|
324
|
+
const result = new Array(data.length);
|
|
325
|
+
const promises = [];
|
|
326
|
+
let cursor = 0;
|
|
327
|
+
for (const batch of batches) {
|
|
328
|
+
const start = cursor;
|
|
329
|
+
const end = start + batch.length;
|
|
330
|
+
cursor = end;
|
|
331
|
+
promises.push(async () => {
|
|
332
|
+
const partIds = await this.xaddBatch(queueName, ...this.payloadBatch(batch, opts));
|
|
333
|
+
for (let k = 0; k < partIds.length; k++) {
|
|
334
|
+
result[start + k] = partIds[k];
|
|
335
|
+
}
|
|
336
|
+
});
|
|
200
337
|
}
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
await ensure(true);
|
|
207
|
-
const sha2 = shaGetter();
|
|
208
|
-
if (!sha2) {
|
|
209
|
-
throw new Error("EVALSHA NOSCRIPT and reload failed (no SHA)");
|
|
338
|
+
const runners = Array.from({ length: promises.length }, async () => {
|
|
339
|
+
while (promises.length) {
|
|
340
|
+
const promise = promises.shift();
|
|
341
|
+
if (promise) {
|
|
342
|
+
await promise();
|
|
210
343
|
}
|
|
211
|
-
return await evalshaFn(sha2, numKeys, ...keysAndArgs.map(String));
|
|
212
344
|
}
|
|
213
|
-
|
|
214
|
-
|
|
345
|
+
});
|
|
346
|
+
await Promise.all(runners);
|
|
347
|
+
return result;
|
|
215
348
|
}
|
|
216
|
-
async
|
|
217
|
-
const
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
349
|
+
async loadScripts(full = false) {
|
|
350
|
+
const scripts = full ? [
|
|
351
|
+
["XAddBulk", XAddBulk],
|
|
352
|
+
["Approve", Approve],
|
|
353
|
+
["IdempotencyAllow", IdempotencyAllow],
|
|
354
|
+
["IdempotencyStart", IdempotencyStart],
|
|
355
|
+
["IdempotencyDone", IdempotencyDone],
|
|
356
|
+
["IdempotencyFree", IdempotencyFree],
|
|
357
|
+
["SelectStuck", SelectStuck]
|
|
358
|
+
] : [
|
|
359
|
+
["XAddBulk", XAddBulk]
|
|
360
|
+
];
|
|
361
|
+
for (const [name, code] of scripts) {
|
|
362
|
+
await this.loadScript(this.saveScript(name, code));
|
|
363
|
+
}
|
|
364
|
+
}
|
|
365
|
+
async loadScript(code) {
|
|
366
|
+
for (let i = 0; i < 3; i++) {
|
|
367
|
+
try {
|
|
368
|
+
return await this.redis.script("LOAD", code);
|
|
369
|
+
} catch (e) {
|
|
370
|
+
if (i === 2) {
|
|
371
|
+
throw e;
|
|
372
|
+
}
|
|
373
|
+
await new Promise((r) => setTimeout(r, 10 + Math.floor(Math.random() * 40)));
|
|
229
374
|
}
|
|
230
|
-
} catch {
|
|
231
|
-
}
|
|
232
|
-
try {
|
|
233
|
-
await this.redis.zadd(key, score, member);
|
|
234
|
-
} catch {
|
|
235
375
|
}
|
|
376
|
+
throw new Error("Load lua script failed.");
|
|
236
377
|
}
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
return;
|
|
378
|
+
saveScript(name, codeBody) {
|
|
379
|
+
if (typeof codeBody !== "string" || !(codeBody.length > 0)) {
|
|
380
|
+
throw new Error("Script body is empty.");
|
|
241
381
|
}
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
const t = setInterval(() => {
|
|
245
|
-
this.extendVisibility(vtKey, raw, this.visibilityTimeoutSec).catch(() => {
|
|
246
|
-
});
|
|
247
|
-
}, periodMs);
|
|
248
|
-
t.unref?.();
|
|
249
|
-
this.heartbeatTimers.set(task.id, t);
|
|
250
|
-
}
|
|
251
|
-
stopHeartbeat(task) {
|
|
252
|
-
const t = this.heartbeatTimers.get(task.id);
|
|
253
|
-
if (t) {
|
|
254
|
-
clearInterval(t);
|
|
255
|
-
}
|
|
256
|
-
this.heartbeatTimers.delete(task.id);
|
|
382
|
+
this.scripts[name] = { codeBody };
|
|
383
|
+
return codeBody;
|
|
257
384
|
}
|
|
258
|
-
async
|
|
259
|
-
if (!this.
|
|
260
|
-
|
|
385
|
+
async runScript(name, keys, args, defaultCode) {
|
|
386
|
+
if (!this.scripts[name]) {
|
|
387
|
+
if (typeof defaultCode !== "string" || !(defaultCode.length > 0)) {
|
|
388
|
+
throw new Error(`Undefined script "${name}". Save it before executing.`);
|
|
389
|
+
}
|
|
390
|
+
this.saveScript(name, defaultCode);
|
|
261
391
|
}
|
|
262
|
-
if (!
|
|
263
|
-
|
|
392
|
+
if (!this.scripts[name].codeReady) {
|
|
393
|
+
this.scripts[name].codeReady = await this.loadScript(this.scripts[name].codeBody);
|
|
264
394
|
}
|
|
265
|
-
|
|
266
|
-
|
|
395
|
+
try {
|
|
396
|
+
return await this.redis.evalsha(this.scripts[name].codeReady, keys.length, ...keys, ...args);
|
|
397
|
+
} catch (err) {
|
|
398
|
+
if (String(err?.message || "").includes("NOSCRIPT")) {
|
|
399
|
+
this.scripts[name].codeReady = await this.loadScript(this.scripts[name].codeBody);
|
|
400
|
+
return await this.redis.evalsha(this.scripts[name].codeReady, keys.length, ...keys, ...args);
|
|
401
|
+
}
|
|
402
|
+
throw err;
|
|
267
403
|
}
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
404
|
+
}
|
|
405
|
+
async xaddBatch(queueName, ...batches) {
|
|
406
|
+
return await this.runScript("XAddBulk", [queueName], batches, XAddBulk);
|
|
407
|
+
}
|
|
408
|
+
payloadBatch(data, opts) {
|
|
409
|
+
const maxlen = Math.max(0, Math.floor(opts?.maxlen ?? 0));
|
|
410
|
+
const approx = opts?.exact ? 0 : opts?.approx !== false ? 1 : 0;
|
|
411
|
+
const exact = opts?.exact ? 1 : 0;
|
|
412
|
+
const nomkstream = opts?.nomkstream ? 1 : 0;
|
|
413
|
+
const trimLimit = Math.max(0, Math.floor(opts?.trimLimit ?? 0));
|
|
414
|
+
const minidWindowMs = Math.max(0, Math.floor(opts?.minidWindowMs ?? 0));
|
|
415
|
+
const minidExact = opts?.minidExact ? 1 : 0;
|
|
416
|
+
const argv = [
|
|
417
|
+
String(maxlen),
|
|
418
|
+
String(approx),
|
|
419
|
+
String(data.length),
|
|
420
|
+
String(exact),
|
|
421
|
+
String(nomkstream),
|
|
422
|
+
String(trimLimit),
|
|
423
|
+
String(minidWindowMs),
|
|
424
|
+
String(minidExact)
|
|
425
|
+
];
|
|
426
|
+
for (const item of data) {
|
|
427
|
+
const entry = item;
|
|
428
|
+
const id = entry.id ?? "*";
|
|
429
|
+
let flat;
|
|
430
|
+
if ("flat" in entry && Array.isArray(entry.flat) && entry.flat.length > 0) {
|
|
431
|
+
flat = entry.flat;
|
|
432
|
+
if (flat.length % 2 !== 0) {
|
|
433
|
+
throw new Error('Property "flat" must contain an even number of realKeysLength (field/value pairs).');
|
|
274
434
|
}
|
|
275
|
-
|
|
276
|
-
|
|
435
|
+
} else if ("payload" in entry && typeof entry.payload === "object" && Object.keys(entry.payload || {}).length > 0) {
|
|
436
|
+
flat = [];
|
|
437
|
+
for (const [k, v] of Object.entries(entry.payload)) {
|
|
438
|
+
flat.push(k, v);
|
|
277
439
|
}
|
|
440
|
+
} else {
|
|
441
|
+
throw new Error('Task must have "payload" or "flat".');
|
|
278
442
|
}
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
const res = await tryEval();
|
|
283
|
-
if ((0, import_full_utils.isArr)(res)) {
|
|
284
|
-
return Array.from(res).map(String);
|
|
443
|
+
const pairs = flat.length / 2;
|
|
444
|
+
if (pairs <= 0) {
|
|
445
|
+
throw new Error('Task must have "payload" or "flat".');
|
|
285
446
|
}
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
447
|
+
argv.push(String(id));
|
|
448
|
+
argv.push(String(pairs));
|
|
449
|
+
for (const token of flat) {
|
|
450
|
+
argv.push(!token ? "" : typeof token === "string" && token.length > 0 ? token : String(token));
|
|
451
|
+
}
|
|
452
|
+
}
|
|
453
|
+
return argv;
|
|
454
|
+
}
|
|
455
|
+
buildBatches(tasks) {
|
|
456
|
+
const job = (0, import_uuid.v4)();
|
|
457
|
+
const batches = [];
|
|
458
|
+
let batch = [], realKeysLength = 0;
|
|
459
|
+
for (let task of tasks) {
|
|
460
|
+
let entry = task;
|
|
461
|
+
if (this.idemOn) {
|
|
462
|
+
const createdAt = entry?.createdAt || Date.now();
|
|
463
|
+
let idemKey = entry?.idemKey || (0, import_uuid.v4)();
|
|
464
|
+
if (typeof entry.payload === "object") {
|
|
465
|
+
if (this.idemKey && typeof entry.payload[this.idemKey] === "string" && entry.payload[this.idemKey].length > 0) {
|
|
466
|
+
idemKey = entry.payload[this.idemKey];
|
|
293
467
|
}
|
|
294
|
-
|
|
468
|
+
entry = {
|
|
469
|
+
...entry,
|
|
470
|
+
payload: {
|
|
471
|
+
payload: JSON.stringify(entry.payload),
|
|
472
|
+
createdAt,
|
|
473
|
+
job,
|
|
474
|
+
idemKey
|
|
475
|
+
}
|
|
476
|
+
};
|
|
477
|
+
} else if (Array.isArray(entry.flat)) {
|
|
478
|
+
entry.flat.push("createdAt");
|
|
479
|
+
entry.flat.push(String(createdAt));
|
|
480
|
+
entry.flat.push("job");
|
|
481
|
+
entry.flat.push(job);
|
|
482
|
+
entry.flat.push("idemKey");
|
|
483
|
+
entry.flat.push(idemKey);
|
|
295
484
|
}
|
|
296
485
|
}
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
break;
|
|
486
|
+
const reqKeysLength = this.keysLength(entry);
|
|
487
|
+
if (batch.length && (batch.length >= this.addingBatchTasksCount || realKeysLength + reqKeysLength > this.addingBatchKeysLimit)) {
|
|
488
|
+
batches.push(batch);
|
|
489
|
+
batch = [];
|
|
490
|
+
realKeysLength = 0;
|
|
303
491
|
}
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
492
|
+
batch.push(entry);
|
|
493
|
+
realKeysLength += reqKeysLength;
|
|
494
|
+
}
|
|
495
|
+
if (batch.length) {
|
|
496
|
+
batches.push(batch);
|
|
497
|
+
}
|
|
498
|
+
return batches;
|
|
499
|
+
}
|
|
500
|
+
keysLength(task) {
|
|
501
|
+
return 2 + ("flat" in task && Array.isArray(task.flat) && task.flat.length ? task.flat.length : Object.keys(task).length * 2);
|
|
502
|
+
}
|
|
503
|
+
async success(id, payload, createdAt, job, key) {
|
|
504
|
+
if (this.executeJobStatus) {
|
|
505
|
+
await this.status(id, payload, createdAt, job, key);
|
|
506
|
+
}
|
|
507
|
+
await this.onSuccess(id, payload, createdAt, job, key);
|
|
508
|
+
}
|
|
509
|
+
async status(id, payload, createdAt, job, key) {
|
|
510
|
+
const prefix = `s:${this.stream}:`;
|
|
511
|
+
const { ready = 0, ok = 0 } = await this.getMany(prefix);
|
|
512
|
+
await this.setMany([{ key: `${prefix}ready`, value: ready + 1 }, { key: `${prefix}ok`, value: ok + 1 }], this.executeJobStatusTtlSec);
|
|
513
|
+
}
|
|
514
|
+
async execute(tasks) {
|
|
515
|
+
const result = [];
|
|
516
|
+
let contended = 0, promises = [];
|
|
517
|
+
for (const [id, payload, createdAt, job, idemKey] of tasks) {
|
|
518
|
+
if (this.executeBatchAtOnce) {
|
|
519
|
+
promises.push((async () => {
|
|
520
|
+
const r = await this.executeProcess(id, payload, createdAt, job, idemKey);
|
|
521
|
+
if (r.id) {
|
|
522
|
+
result.push(id);
|
|
523
|
+
} else if (r.contended) {
|
|
524
|
+
contended++;
|
|
525
|
+
}
|
|
526
|
+
})());
|
|
527
|
+
} else {
|
|
528
|
+
const r = await this.executeProcess(id, payload, createdAt, job, idemKey);
|
|
529
|
+
if (r.id) {
|
|
530
|
+
result.push(id);
|
|
531
|
+
} else if (r.contended) {
|
|
532
|
+
contended++;
|
|
533
|
+
}
|
|
310
534
|
}
|
|
311
|
-
await tx.exec();
|
|
312
|
-
}
|
|
313
|
-
return moved;
|
|
314
|
-
}
|
|
315
|
-
async ackProcessing(processing, processingVt, raw) {
|
|
316
|
-
if (!this.checkConnection()) {
|
|
317
|
-
throw new Error("Redis connection error.");
|
|
318
|
-
}
|
|
319
|
-
const tx = this.redis?.multi();
|
|
320
|
-
tx.lrem(processing, 1, raw);
|
|
321
|
-
tx.zrem(processingVt, raw);
|
|
322
|
-
await tx.exec();
|
|
323
|
-
return;
|
|
324
|
-
}
|
|
325
|
-
async requeueExpired(processing, processingVt, ready, nowTs, chunk = 1e3) {
|
|
326
|
-
if (!this.checkConnection()) {
|
|
327
|
-
throw new Error("Redis connection error.");
|
|
328
535
|
}
|
|
329
|
-
const now = (0, import_full_utils.isNumP)(nowTs) ? nowTs : this.nowSec();
|
|
330
536
|
try {
|
|
331
|
-
|
|
332
|
-
()
|
|
333
|
-
(force) => this.ensureRequeueScript(!!force),
|
|
334
|
-
3,
|
|
335
|
-
[processing, processingVt, ready, String(now), String(chunk)]
|
|
336
|
-
);
|
|
337
|
-
return (0, import_full_utils.isNumP)(moved) ? moved : 0;
|
|
338
|
-
} catch {
|
|
339
|
-
const expired = await this.redis?.zrangebyscore(processingVt, 0, now, "LIMIT", 0, chunk);
|
|
340
|
-
if (!(0, import_full_utils.isArrFilled)(expired)) {
|
|
341
|
-
return 0;
|
|
537
|
+
if (this.executeBatchAtOnce && promises.length > 0) {
|
|
538
|
+
await Promise.all(promises);
|
|
342
539
|
}
|
|
343
|
-
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
tx.zrem(processingVt, raw);
|
|
347
|
-
tx.rpush(ready, raw);
|
|
540
|
+
await this.onExecuted(tasks);
|
|
541
|
+
if ((!Array.isArray(result) || !(result.length > 0)) && contended > tasks.length >> 1) {
|
|
542
|
+
await this.waitAbortable(15 + Math.floor(Math.random() * 35) + Math.min(250, 15 * contended + Math.floor(Math.random() * 40)));
|
|
348
543
|
}
|
|
349
|
-
|
|
350
|
-
return expired.length;
|
|
544
|
+
} catch (err) {
|
|
351
545
|
}
|
|
546
|
+
return result;
|
|
352
547
|
}
|
|
353
|
-
async
|
|
354
|
-
if (
|
|
355
|
-
|
|
356
|
-
}
|
|
357
|
-
|
|
358
|
-
|
|
359
|
-
|
|
360
|
-
|
|
361
|
-
|
|
362
|
-
2,
|
|
363
|
-
[delayed, ready, String(now), String(chunk)]
|
|
364
|
-
);
|
|
365
|
-
return (0, import_full_utils.isNumP)(promoted) ? promoted : 0;
|
|
366
|
-
} catch {
|
|
367
|
-
const due = await this.redis?.zrangebyscore(delayed, 0, now, "LIMIT", 0, chunk);
|
|
368
|
-
if (!(0, import_full_utils.isArrFilled)(due)) {
|
|
369
|
-
return 0;
|
|
370
|
-
}
|
|
371
|
-
const tx = this.redis?.multi();
|
|
372
|
-
for (const raw of due) {
|
|
373
|
-
tx.zrem(delayed, raw);
|
|
374
|
-
tx.rpush(ready, raw);
|
|
548
|
+
async executeProcess(id, payload, createdAt, job, key) {
|
|
549
|
+
if (key) {
|
|
550
|
+
return await this.idempotency(id, payload, createdAt, job, key);
|
|
551
|
+
} else {
|
|
552
|
+
try {
|
|
553
|
+
await this.onExecute(id, payload, createdAt, job, key);
|
|
554
|
+
await this.success(id, payload, createdAt, job, key);
|
|
555
|
+
return { id };
|
|
556
|
+
} catch (err) {
|
|
375
557
|
}
|
|
376
|
-
await tx.exec();
|
|
377
|
-
return due.length;
|
|
378
558
|
}
|
|
559
|
+
return {};
|
|
379
560
|
}
|
|
380
|
-
async
|
|
381
|
-
if (!
|
|
382
|
-
|
|
561
|
+
async approve(ids) {
|
|
562
|
+
if (!Array.isArray(ids) || !(ids.length > 0)) {
|
|
563
|
+
return 0;
|
|
383
564
|
}
|
|
384
|
-
const
|
|
385
|
-
|
|
386
|
-
|
|
387
|
-
|
|
565
|
+
const approveBatchTasksCount = Math.max(500, Math.min(4e3, this.approveBatchTasksCount));
|
|
566
|
+
let total = 0, i = 0;
|
|
567
|
+
while (i < ids.length) {
|
|
568
|
+
const room = Math.min(approveBatchTasksCount, ids.length - i);
|
|
569
|
+
const part = ids.slice(i, i + room);
|
|
570
|
+
const approved = await this.runScript("Approve", [this.stream], [this.group, this.removeOnExecuted ? "1" : "0", ...part], Approve);
|
|
571
|
+
total += Number(approved || 0);
|
|
572
|
+
i += room;
|
|
388
573
|
}
|
|
389
|
-
return
|
|
390
|
-
}
|
|
391
|
-
async extendVisibility(processingVt, raw, visibilitySec) {
|
|
392
|
-
const deadline = this.nowSec() + Math.max(1, visibilitySec);
|
|
393
|
-
await this.zaddCompatXXCH(processingVt, deadline, raw);
|
|
574
|
+
return total;
|
|
394
575
|
}
|
|
395
|
-
|
|
396
|
-
|
|
397
|
-
|
|
576
|
+
async idempotency(id, payload, createdAt, job, key) {
|
|
577
|
+
const keys = this.idempotencyKeys(key);
|
|
578
|
+
const allow = await this.idempotencyAllow(keys);
|
|
579
|
+
if (allow === 1) {
|
|
580
|
+
return { id };
|
|
581
|
+
} else if (allow === 0) {
|
|
582
|
+
let ttl = -2;
|
|
583
|
+
try {
|
|
584
|
+
ttl = await this.redis.pttl(keys.startKey);
|
|
585
|
+
} catch (err) {
|
|
586
|
+
}
|
|
587
|
+
await this.waitAbortable(ttl);
|
|
588
|
+
return { contended: true };
|
|
398
589
|
}
|
|
399
|
-
|
|
400
|
-
|
|
401
|
-
throw new Error(`Queue "${queueName}" already started.`);
|
|
590
|
+
if (!await this.idempotencyStart(keys)) {
|
|
591
|
+
return { contended: true };
|
|
402
592
|
}
|
|
403
|
-
|
|
404
|
-
this.runners.set(queueName, r);
|
|
405
|
-
this.loop(queueName, r).catch(() => {
|
|
406
|
-
r.running = false;
|
|
593
|
+
const heartbeat = this.heartbeat(keys) || (() => {
|
|
407
594
|
});
|
|
408
|
-
|
|
409
|
-
|
|
410
|
-
|
|
411
|
-
|
|
412
|
-
|
|
413
|
-
|
|
595
|
+
try {
|
|
596
|
+
await this.onExecute(id, payload, createdAt, job, key);
|
|
597
|
+
await this.idempotencyDone(keys);
|
|
598
|
+
await this.success(id, payload, createdAt, job, key);
|
|
599
|
+
return { id };
|
|
600
|
+
} catch (err) {
|
|
601
|
+
try {
|
|
602
|
+
await this.idempotencyFree(keys);
|
|
603
|
+
} catch (err2) {
|
|
604
|
+
}
|
|
605
|
+
} finally {
|
|
606
|
+
heartbeat();
|
|
414
607
|
}
|
|
415
608
|
}
|
|
416
|
-
|
|
417
|
-
|
|
418
|
-
|
|
419
|
-
}
|
|
420
|
-
|
|
421
|
-
|
|
422
|
-
}
|
|
609
|
+
idempotencyKeys(key) {
|
|
610
|
+
const prefix = `q:${this.stream.replace(/[^\w:\-]/g, "_")}:`;
|
|
611
|
+
const keyP = key.replace(/[^\w:\-]/g, "_");
|
|
612
|
+
const doneKey = `${prefix}done:${keyP}`;
|
|
613
|
+
const lockKey = `${prefix}lock:${keyP}`;
|
|
614
|
+
const startKey = `${prefix}start:${keyP}`;
|
|
615
|
+
const token = `${this.consumer()}:${Date.now().toString(36)}:${Math.random().toString(36).slice(2)}`;
|
|
423
616
|
return {
|
|
424
|
-
|
|
425
|
-
|
|
426
|
-
|
|
427
|
-
|
|
428
|
-
|
|
429
|
-
currentAttempt: (0, import_full_utils.isNumPZ)(data.currentAttempt) ? data.currentAttempt : 0,
|
|
430
|
-
chain: (0, import_full_utils.isObjFilled)(data.chain) && (0, import_full_utils.isArrFilled)(data.chain.queues) && (0, import_full_utils.isNumPZ)(data.chain.index) ? data.chain : {
|
|
431
|
-
queues: [],
|
|
432
|
-
index: 0
|
|
433
|
-
},
|
|
434
|
-
payload: (0, import_full_utils.isObjFilled)(data.payload) ? data.payload : {},
|
|
435
|
-
progress: {
|
|
436
|
-
createdAt: Date.now(),
|
|
437
|
-
successAt: 0,
|
|
438
|
-
errorAt: 0,
|
|
439
|
-
failAt: 0,
|
|
440
|
-
fatalAt: 0,
|
|
441
|
-
retries: [],
|
|
442
|
-
chain: [],
|
|
443
|
-
...(0, import_full_utils.isObjFilled)(data.progress) ? data.progress : {}
|
|
444
|
-
},
|
|
445
|
-
result: (0, import_full_utils.isObjFilled)(data.result) ? data.result : {}
|
|
617
|
+
prefix,
|
|
618
|
+
doneKey,
|
|
619
|
+
lockKey,
|
|
620
|
+
startKey,
|
|
621
|
+
token
|
|
446
622
|
};
|
|
447
623
|
}
|
|
448
|
-
async
|
|
449
|
-
const
|
|
450
|
-
|
|
451
|
-
return await this.enqueue(ready, delayed, this.buildTask(data), (0, import_full_utils.isNumP)(delaySec) ? delaySec : 0);
|
|
452
|
-
}
|
|
453
|
-
async addTasks(data) {
|
|
454
|
-
if (!this.checkConnection()) {
|
|
455
|
-
throw new Error("Redis connection error.");
|
|
456
|
-
}
|
|
457
|
-
if (!(0, import_full_utils.isObjFilled)(data) || !(0, import_full_utils.isStrFilled)(data.queueName)) {
|
|
458
|
-
throw new Error("Queue name is not valid.");
|
|
459
|
-
}
|
|
460
|
-
if (!(0, import_full_utils.isArrFilled)(data.payloads)) {
|
|
461
|
-
return 0;
|
|
462
|
-
}
|
|
463
|
-
const queueName = String(data.queueName);
|
|
464
|
-
const ready = this.readyKey(queueName);
|
|
465
|
-
const delayed = this.delayedKey(queueName);
|
|
466
|
-
const now = this.nowSec();
|
|
467
|
-
const uniformDelay = (0, import_full_utils.isNumP)(data.delaySec) ? Math.max(0, Number(data.delaySec)) : void 0;
|
|
468
|
-
const perItemDelays = (0, import_full_utils.isArr)(data.delaySec) ? data.delaySec.map((v) => Math.max(0, Number(v || 0))) : void 0;
|
|
469
|
-
const batchSize = Math.max(1, Math.min(this.portionLength, 1e3));
|
|
470
|
-
let idx = 0, total = 0;
|
|
471
|
-
while (idx < data.payloads.length) {
|
|
472
|
-
const end = Math.min(idx + batchSize, data.payloads.length);
|
|
473
|
-
const tx = this.redis?.multi();
|
|
474
|
-
for (let i = idx; i < end; i++) {
|
|
475
|
-
const item = data.payloads[i];
|
|
476
|
-
let partial;
|
|
477
|
-
if ((0, import_full_utils.isObjFilled)(item) && Object.prototype.hasOwnProperty.call(item, "payload")) {
|
|
478
|
-
partial = { ...item, queueName };
|
|
479
|
-
} else {
|
|
480
|
-
partial = { queueName, payload: item };
|
|
481
|
-
}
|
|
482
|
-
const task = this.buildTask(partial);
|
|
483
|
-
const raw = this.toPayload(task);
|
|
484
|
-
let d = 0;
|
|
485
|
-
if ((0, import_full_utils.isNumP)(uniformDelay)) {
|
|
486
|
-
d = uniformDelay;
|
|
487
|
-
} else if ((0, import_full_utils.isArr)(perItemDelays)) {
|
|
488
|
-
d = Number(perItemDelays[i] || 0);
|
|
489
|
-
}
|
|
490
|
-
if (d > 0) {
|
|
491
|
-
tx.zadd(delayed, now + d, raw);
|
|
492
|
-
} else {
|
|
493
|
-
tx.rpush(ready, raw);
|
|
494
|
-
}
|
|
495
|
-
total++;
|
|
496
|
-
}
|
|
497
|
-
await tx.exec();
|
|
498
|
-
idx = end;
|
|
499
|
-
}
|
|
500
|
-
return total;
|
|
501
|
-
}
|
|
502
|
-
async iteration(tasks) {
|
|
503
|
-
const tasksProcessed = await this.beforeIterationExecution(tasks);
|
|
504
|
-
const limit = Math.max(1, Number(this.concurrency) || 1);
|
|
505
|
-
let i = 0;
|
|
506
|
-
while (i < tasksProcessed.length) {
|
|
507
|
-
const slice = tasksProcessed.slice(i, i + limit);
|
|
508
|
-
await Promise.all(slice.map((task) => this.logic(task)));
|
|
509
|
-
i += limit;
|
|
510
|
-
}
|
|
511
|
-
await this.afterIterationExecution(tasksProcessed, tasksProcessed.map((t) => t.result ?? {}));
|
|
512
|
-
}
|
|
513
|
-
async beforeIterationExecution(data) {
|
|
514
|
-
return data;
|
|
515
|
-
}
|
|
516
|
-
async afterIterationExecution(data, results) {
|
|
517
|
-
}
|
|
518
|
-
async beforeExecution(task) {
|
|
519
|
-
return task;
|
|
520
|
-
}
|
|
521
|
-
async afterExecution(task, result) {
|
|
522
|
-
return result;
|
|
523
|
-
}
|
|
524
|
-
async execute(task) {
|
|
525
|
-
return {};
|
|
526
|
-
}
|
|
527
|
-
async onRetry(task) {
|
|
528
|
-
}
|
|
529
|
-
async onError(err, task) {
|
|
530
|
-
}
|
|
531
|
-
async onFail(err, task) {
|
|
624
|
+
async idempotencyAllow(keys) {
|
|
625
|
+
const res = await this.runScript("IdempotencyAllow", [keys.doneKey, keys.lockKey, keys.startKey], [String(this.workerExecuteLockTimeoutMs), keys.token], IdempotencyAllow);
|
|
626
|
+
return Number(res || 0);
|
|
532
627
|
}
|
|
533
|
-
async
|
|
628
|
+
async idempotencyStart(keys) {
|
|
629
|
+
const res = await this.runScript("IdempotencyStart", [keys.lockKey, keys.startKey], [keys.token, String(this.workerExecuteLockTimeoutMs)], IdempotencyStart);
|
|
630
|
+
return Number(res || 0) === 1;
|
|
534
631
|
}
|
|
535
|
-
async
|
|
632
|
+
async idempotencyDone(keys) {
|
|
633
|
+
await this.runScript("IdempotencyDone", [keys.doneKey, keys.lockKey, keys.startKey], [String(this.workerCacheTaskTimeoutMs), keys.token], IdempotencyDone);
|
|
536
634
|
}
|
|
537
|
-
async
|
|
635
|
+
async idempotencyFree(keys) {
|
|
636
|
+
await this.runScript("IdempotencyFree", [keys.lockKey, keys.startKey], [keys.token], IdempotencyFree);
|
|
538
637
|
}
|
|
539
|
-
async
|
|
540
|
-
}
|
|
541
|
-
async logic(task) {
|
|
542
|
-
let data = task;
|
|
638
|
+
async createGroup(from = "$") {
|
|
543
639
|
try {
|
|
544
|
-
|
|
545
|
-
const before = data?.result ?? {};
|
|
546
|
-
const after = await this.execute(data);
|
|
547
|
-
data.result = {
|
|
548
|
-
...(0, import_full_utils.isObjFilled)(before) ? before : {},
|
|
549
|
-
...(0, import_full_utils.isObjFilled)(after) ? after : {}
|
|
550
|
-
};
|
|
551
|
-
await this.success(data, data.result);
|
|
552
|
-
return await this.afterExecution(data, data.result);
|
|
640
|
+
await this.redis.xgroup("CREATE", this.stream, this.group, from, "MKSTREAM");
|
|
553
641
|
} catch (err) {
|
|
554
|
-
|
|
555
|
-
|
|
556
|
-
|
|
557
|
-
await this.error(err2, data);
|
|
558
|
-
}
|
|
559
|
-
} finally {
|
|
560
|
-
try {
|
|
561
|
-
this.stopHeartbeat(data);
|
|
562
|
-
await this.ack(data).catch(() => {
|
|
563
|
-
});
|
|
564
|
-
} catch {
|
|
642
|
+
const msg = String(err?.message || "");
|
|
643
|
+
if (!msg.includes("BUSYGROUP")) {
|
|
644
|
+
throw err;
|
|
565
645
|
}
|
|
566
646
|
}
|
|
567
|
-
return {};
|
|
568
|
-
}
|
|
569
|
-
jitteredBackoffSec(attempt) {
|
|
570
|
-
const base = Math.max(1, Number(this.retryBaseSec) || 1);
|
|
571
|
-
const maxD = Math.max(base, Number(this.retryMaxSec) || 3600);
|
|
572
|
-
const pow = Math.min(maxD, base * Math.pow(2, Math.max(0, attempt - 1)));
|
|
573
|
-
const jitter = Math.floor(Math.random() * base);
|
|
574
|
-
return Math.min(maxD, pow + jitter);
|
|
575
647
|
}
|
|
576
|
-
async
|
|
577
|
-
|
|
578
|
-
|
|
579
|
-
|
|
648
|
+
async select() {
|
|
649
|
+
let entries = await this.selectStuck();
|
|
650
|
+
if (!entries?.length) {
|
|
651
|
+
entries = await this.selectFresh();
|
|
580
652
|
}
|
|
581
|
-
|
|
582
|
-
try {
|
|
583
|
-
if (task.currentAttempt < maxAttempts - 1) {
|
|
584
|
-
const taskProcessed = { ...task, currentAttempt: task.currentAttempt + 1 };
|
|
585
|
-
const delaySec = this.jitteredBackoffSec(taskProcessed.currentAttempt);
|
|
586
|
-
await this.addTask(taskProcessed, delaySec);
|
|
587
|
-
await this.onRetry(taskProcessed);
|
|
588
|
-
return;
|
|
589
|
-
}
|
|
590
|
-
} catch (err) {
|
|
591
|
-
await this.fail(err, task);
|
|
592
|
-
return;
|
|
593
|
-
}
|
|
594
|
-
await this.fail(new Error("The attempt limit has been reached."), task);
|
|
653
|
+
return this.normalizeEntries(entries);
|
|
595
654
|
}
|
|
596
|
-
async
|
|
655
|
+
async selectStuck() {
|
|
597
656
|
try {
|
|
598
|
-
await this.
|
|
599
|
-
|
|
600
|
-
}
|
|
601
|
-
|
|
602
|
-
|
|
603
|
-
this.processingRaw.delete(t.id);
|
|
657
|
+
const res = await this.runScript("SelectStuck", [this.stream], [this.group, this.consumer(), String(this.recoveryStuckTasksTimeoutMs), String(this.workerBatchTasksCount), String(this.workerSelectionTimeoutMs)], SelectStuck);
|
|
658
|
+
return Array.isArray(res) ? res : [];
|
|
659
|
+
} catch (err) {
|
|
660
|
+
if (String(err?.message || "").includes("NOGROUP")) {
|
|
661
|
+
await this.createGroup();
|
|
604
662
|
}
|
|
605
663
|
}
|
|
664
|
+
return [];
|
|
606
665
|
}
|
|
607
|
-
async
|
|
666
|
+
async selectFresh() {
|
|
667
|
+
let entries = [];
|
|
608
668
|
try {
|
|
609
|
-
await this.
|
|
610
|
-
|
|
611
|
-
|
|
612
|
-
|
|
613
|
-
|
|
614
|
-
|
|
615
|
-
|
|
616
|
-
|
|
617
|
-
|
|
618
|
-
|
|
619
|
-
|
|
620
|
-
|
|
621
|
-
|
|
622
|
-
|
|
669
|
+
const res = await this.redis.xreadgroup(
|
|
670
|
+
"GROUP",
|
|
671
|
+
this.group,
|
|
672
|
+
this.consumer(),
|
|
673
|
+
"BLOCK",
|
|
674
|
+
Math.max(2, this.workerLoopIntervalMs | 0),
|
|
675
|
+
"COUNT",
|
|
676
|
+
this.workerBatchTasksCount,
|
|
677
|
+
"STREAMS",
|
|
678
|
+
this.stream,
|
|
679
|
+
">"
|
|
680
|
+
);
|
|
681
|
+
if (!res?.[0]?.[1]?.length) {
|
|
682
|
+
return [];
|
|
623
683
|
}
|
|
624
|
-
|
|
625
|
-
|
|
626
|
-
|
|
627
|
-
|
|
628
|
-
}
|
|
629
|
-
|
|
630
|
-
|
|
631
|
-
try {
|
|
632
|
-
await this.addTask({
|
|
633
|
-
...task,
|
|
634
|
-
queueName: [task.queueName, task.iterationId, "fail", "list"].join(":"),
|
|
635
|
-
currentAttempt: 0,
|
|
636
|
-
payload: {
|
|
637
|
-
...task.payload,
|
|
638
|
-
errorMessage: String(err?.message ?? "")
|
|
639
|
-
}
|
|
640
|
-
});
|
|
641
|
-
await this.onFail(err, task);
|
|
642
|
-
} catch (err2) {
|
|
643
|
-
try {
|
|
644
|
-
await this.onFatal(err2, task);
|
|
645
|
-
} catch {
|
|
684
|
+
entries = res?.[0]?.[1] ?? [];
|
|
685
|
+
if (!entries?.length) {
|
|
686
|
+
return [];
|
|
687
|
+
}
|
|
688
|
+
} catch (err) {
|
|
689
|
+
if (String(err?.message || "").includes("NOGROUP")) {
|
|
690
|
+
await this.createGroup();
|
|
646
691
|
}
|
|
647
692
|
}
|
|
648
|
-
|
|
649
|
-
await this.status(task, "fail");
|
|
650
|
-
} catch {
|
|
651
|
-
}
|
|
693
|
+
return entries;
|
|
652
694
|
}
|
|
653
|
-
async
|
|
654
|
-
|
|
655
|
-
|
|
656
|
-
|
|
657
|
-
|
|
658
|
-
successAt: Date.now()
|
|
695
|
+
async waitAbortable(ttl) {
|
|
696
|
+
return new Promise((resolve) => {
|
|
697
|
+
const signal = this.signal();
|
|
698
|
+
if (signal?.aborted) {
|
|
699
|
+
return resolve();
|
|
659
700
|
}
|
|
660
|
-
|
|
661
|
-
|
|
662
|
-
|
|
663
|
-
const currentIndex = taskProcessed.chain.index;
|
|
664
|
-
const newIndex = currentIndex + 1;
|
|
665
|
-
taskProcessed.progress.chain.push(Date.now());
|
|
666
|
-
if (currentIndex === taskProcessed.chain.queues.length - 1) {
|
|
667
|
-
await this.status(taskProcessed, "success");
|
|
668
|
-
await this.onChainSuccess(taskProcessed, result);
|
|
669
|
-
} else if (newIndex <= taskProcessed.chain.queues.length - 1) {
|
|
670
|
-
const newQueueName = taskProcessed.chain.queues[newIndex];
|
|
671
|
-
if ((0, import_full_utils.isStrFilled)(newQueueName)) {
|
|
672
|
-
await this.addTask({
|
|
673
|
-
...taskProcessed,
|
|
674
|
-
queueName: newQueueName,
|
|
675
|
-
currentAttempt: 0,
|
|
676
|
-
chain: {
|
|
677
|
-
...taskProcessed.chain,
|
|
678
|
-
index: newIndex
|
|
679
|
-
},
|
|
680
|
-
result
|
|
681
|
-
});
|
|
682
|
-
} else {
|
|
683
|
-
await this.fail(new Error("Next queue format error."), taskProcessed);
|
|
684
|
-
}
|
|
701
|
+
const t = setTimeout(() => {
|
|
702
|
+
if (signal) {
|
|
703
|
+
signal.removeEventListener("abort", onAbort);
|
|
685
704
|
}
|
|
686
|
-
|
|
687
|
-
|
|
688
|
-
|
|
689
|
-
|
|
690
|
-
|
|
691
|
-
|
|
692
|
-
await this.status(taskProcessed, "fatal");
|
|
693
|
-
} catch {
|
|
694
|
-
}
|
|
695
|
-
try {
|
|
696
|
-
await this.onFatal(err, taskProcessed);
|
|
697
|
-
} catch {
|
|
705
|
+
resolve();
|
|
706
|
+
}, ttl > 0 ? 25 + Math.floor(Math.random() * 50) : 5 + Math.floor(Math.random() * 15));
|
|
707
|
+
t.unref?.();
|
|
708
|
+
function onAbort() {
|
|
709
|
+
clearTimeout(t);
|
|
710
|
+
resolve();
|
|
698
711
|
}
|
|
699
|
-
|
|
712
|
+
signal?.addEventListener("abort", onAbort, { once: true });
|
|
713
|
+
});
|
|
700
714
|
}
|
|
701
|
-
|
|
702
|
-
if (
|
|
703
|
-
|
|
704
|
-
}
|
|
705
|
-
const processedKey = this.toKeyString(task.queueName, task.iterationId, "processed");
|
|
706
|
-
const categoryKey = this.toKeyString(task.queueName, task.iterationId, category);
|
|
707
|
-
await this.redis?.incr(processedKey);
|
|
708
|
-
await this.redis?.incr(categoryKey);
|
|
709
|
-
await this.redis?.expire(processedKey, this.expireStatusSec);
|
|
710
|
-
await this.redis?.expire(categoryKey, this.expireStatusSec);
|
|
711
|
-
}
|
|
712
|
-
async loop(queueName, runner) {
|
|
713
|
-
if (!(0, import_full_utils.isStrFilled)(queueName)) {
|
|
714
|
-
throw new Error(`Queue name is not valid: "${queueName}"; Type: "${typeof queueName}".`);
|
|
715
|
+
heartbeat(keys) {
|
|
716
|
+
if (this.workerExecuteLockTimeoutMs <= 0) {
|
|
717
|
+
return;
|
|
715
718
|
}
|
|
716
|
-
|
|
717
|
-
const
|
|
718
|
-
const
|
|
719
|
-
|
|
720
|
-
|
|
721
|
-
|
|
722
|
-
await (0, import_full_utils.wait)(this.iterationTimeout);
|
|
723
|
-
continue;
|
|
719
|
+
let timer, alive = true, hbFails = 0;
|
|
720
|
+
const workerHeartbeatTimeoutMs = Math.max(1e3, Math.floor(Math.max(5e3, this.workerExecuteLockTimeoutMs | 0) / 4));
|
|
721
|
+
const stop = () => {
|
|
722
|
+
alive = false;
|
|
723
|
+
if (timer) {
|
|
724
|
+
clearTimeout(timer);
|
|
724
725
|
}
|
|
725
|
-
|
|
726
|
-
|
|
727
|
-
|
|
726
|
+
};
|
|
727
|
+
const onAbort = () => stop();
|
|
728
|
+
const signal = this.signal();
|
|
729
|
+
signal?.addEventListener?.("abort", onAbort, { once: true });
|
|
730
|
+
const tick = async () => {
|
|
731
|
+
if (!alive) {
|
|
732
|
+
return;
|
|
728
733
|
}
|
|
729
734
|
try {
|
|
730
|
-
await this.
|
|
735
|
+
const r = await this.heartbeat(keys);
|
|
736
|
+
hbFails = r ? 0 : hbFails + 1;
|
|
737
|
+
if (hbFails >= 3) {
|
|
738
|
+
throw new Error("Heartbeat lost.");
|
|
739
|
+
}
|
|
731
740
|
} catch {
|
|
732
|
-
|
|
733
|
-
|
|
734
|
-
|
|
735
|
-
|
|
736
|
-
if (!(0, import_full_utils.isArrFilled)(data)) {
|
|
737
|
-
await (0, import_full_utils.wait)(this.iterationTimeout);
|
|
738
|
-
continue;
|
|
741
|
+
hbFails++;
|
|
742
|
+
if (hbFails >= 6) {
|
|
743
|
+
stop();
|
|
744
|
+
return;
|
|
739
745
|
}
|
|
740
|
-
await this.iteration(data);
|
|
741
|
-
} catch (err) {
|
|
742
|
-
await this.iterationError(err, queueName, data);
|
|
743
|
-
await (0, import_full_utils.wait)(this.iterationTimeout);
|
|
744
746
|
}
|
|
745
|
-
|
|
747
|
+
timer = setTimeout(tick, workerHeartbeatTimeoutMs).unref?.();
|
|
748
|
+
};
|
|
749
|
+
timer = setTimeout(tick, workerHeartbeatTimeoutMs).unref?.();
|
|
750
|
+
return () => {
|
|
751
|
+
signal?.removeEventListener?.("abort", onAbort);
|
|
752
|
+
stop();
|
|
753
|
+
};
|
|
746
754
|
}
|
|
747
|
-
|
|
748
|
-
|
|
749
|
-
const processing = this.processingKey(queueName);
|
|
750
|
-
const processingVt = this.processingVtKey(queueName);
|
|
751
|
-
const raws = await this.reserveMany(ready, processing, processingVt, this.portionLength, this.visibilityTimeoutSec);
|
|
752
|
-
if (!(0, import_full_utils.isArrFilled)(raws)) {
|
|
755
|
+
normalizeEntries(raw) {
|
|
756
|
+
if (!Array.isArray(raw)) {
|
|
753
757
|
return [];
|
|
754
758
|
}
|
|
755
|
-
|
|
756
|
-
|
|
757
|
-
const
|
|
758
|
-
|
|
759
|
-
|
|
760
|
-
|
|
761
|
-
|
|
762
|
-
|
|
763
|
-
|
|
759
|
+
return Array.from(raw || []).map((e) => {
|
|
760
|
+
const id = Buffer.isBuffer(e?.[0]) ? e[0].toString() : e?.[0];
|
|
761
|
+
const kvRaw = e?.[1] ?? [];
|
|
762
|
+
const kv = Array.isArray(kvRaw) ? kvRaw.map((x) => Buffer.isBuffer(x) ? x.toString() : x) : [];
|
|
763
|
+
return [id, kv, 0, "", ""];
|
|
764
|
+
}).filter(([id, kv]) => typeof id === "string" && id.length > 0 && Array.isArray(kv) && (kv.length & 1) === 0).map(([id, kv]) => {
|
|
765
|
+
const values = this.values(kv);
|
|
766
|
+
const { idemKey = "", createdAt, job, ...data } = this.payload(values);
|
|
767
|
+
return [id, data, createdAt, job, idemKey];
|
|
768
|
+
});
|
|
769
|
+
}
|
|
770
|
+
values(value) {
|
|
771
|
+
const result = {};
|
|
772
|
+
for (let i = 0; i < value.length; i += 2) {
|
|
773
|
+
result[value[i]] = value[i + 1];
|
|
764
774
|
}
|
|
765
|
-
return
|
|
775
|
+
return result;
|
|
766
776
|
}
|
|
767
|
-
|
|
777
|
+
payload(data) {
|
|
768
778
|
try {
|
|
769
|
-
|
|
770
|
-
|
|
771
|
-
return;
|
|
772
|
-
}
|
|
773
|
-
this.processingRaw.delete(task.id);
|
|
774
|
-
await this.ackProcessing(this.processingKey(task.queueName), this.processingVtKey(task.queueName), raw);
|
|
775
|
-
} catch {
|
|
779
|
+
return JSON.parse(data?.payload);
|
|
780
|
+
} catch (err) {
|
|
776
781
|
}
|
|
782
|
+
return data;
|
|
783
|
+
}
|
|
784
|
+
signal() {
|
|
785
|
+
return this.abort.signal;
|
|
786
|
+
}
|
|
787
|
+
consumer() {
|
|
788
|
+
return `${String(this.consumerHost || "host")}:${process.pid}`;
|
|
777
789
|
}
|
|
778
790
|
};
|
|
779
791
|
// Annotate the CommonJS export names for ESM import in node:
|
|
780
792
|
0 && (module.exports = {
|
|
781
|
-
|
|
793
|
+
PowerQueues
|
|
782
794
|
});
|