@valentinkolb/sync 2.2.0 → 3.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +144 -193
- package/browser/ephemeral.js +472 -0
- package/browser/index.js +21 -0
- package/browser/job.js +14687 -0
- package/browser/mutex.js +165 -0
- package/browser/queue.js +342 -0
- package/browser/ratelimit.js +124 -0
- package/browser/registry.js +662 -0
- package/browser/retry.js +94 -0
- package/browser/scheduler.js +988 -0
- package/browser/store.js +61 -0
- package/browser/topic.js +359 -0
- package/index.js +1 -18531
- package/package.json +19 -4
- package/src/browser/ephemeral.d.ts +101 -0
- package/src/browser/index.d.ts +10 -0
- package/src/browser/internal/emitter.d.ts +11 -0
- package/src/browser/internal/event-log.d.ts +33 -0
- package/src/browser/internal/id.d.ts +9 -0
- package/src/browser/internal/sleep.d.ts +2 -0
- package/src/browser/job.d.ts +107 -0
- package/src/browser/mutex.d.ts +28 -0
- package/src/browser/queue.d.ts +67 -0
- package/src/browser/ratelimit.d.ts +24 -0
- package/src/browser/registry.d.ts +131 -0
- package/src/browser/retry.d.ts +19 -0
- package/src/browser/scheduler.d.ts +164 -0
- package/src/browser/store.d.ts +17 -0
- package/src/browser/topic.d.ts +65 -0
|
@@ -0,0 +1,988 @@
|
|
|
1
|
+
// src/browser/store.ts
|
|
2
|
+
class MemoryStore {
|
|
3
|
+
data = new Map;
|
|
4
|
+
timers = new Map;
|
|
5
|
+
get(key) {
|
|
6
|
+
const entry = this.data.get(key);
|
|
7
|
+
if (!entry)
|
|
8
|
+
return;
|
|
9
|
+
if (entry.expiresAt !== null && Date.now() >= entry.expiresAt) {
|
|
10
|
+
this.del(key);
|
|
11
|
+
return;
|
|
12
|
+
}
|
|
13
|
+
return entry.value;
|
|
14
|
+
}
|
|
15
|
+
set(key, value, ttlMs) {
|
|
16
|
+
const existingTimer = this.timers.get(key);
|
|
17
|
+
if (existingTimer) {
|
|
18
|
+
clearTimeout(existingTimer);
|
|
19
|
+
this.timers.delete(key);
|
|
20
|
+
}
|
|
21
|
+
const expiresAt = ttlMs != null && ttlMs > 0 ? Date.now() + ttlMs : null;
|
|
22
|
+
this.data.set(key, { value, expiresAt });
|
|
23
|
+
if (ttlMs != null && ttlMs > 0) {
|
|
24
|
+
this.timers.set(key, setTimeout(() => this.del(key), ttlMs));
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
del(key) {
|
|
28
|
+
this.data.delete(key);
|
|
29
|
+
const timer = this.timers.get(key);
|
|
30
|
+
if (timer) {
|
|
31
|
+
clearTimeout(timer);
|
|
32
|
+
this.timers.delete(key);
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
keys(prefix) {
|
|
36
|
+
const now = Date.now();
|
|
37
|
+
const result = [];
|
|
38
|
+
for (const [key, entry] of this.data) {
|
|
39
|
+
if (entry.expiresAt !== null && now >= entry.expiresAt) {
|
|
40
|
+
this.del(key);
|
|
41
|
+
continue;
|
|
42
|
+
}
|
|
43
|
+
if (prefix === undefined || key.startsWith(prefix)) {
|
|
44
|
+
result.push(key);
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
return result;
|
|
48
|
+
}
|
|
49
|
+
clear() {
|
|
50
|
+
for (const timer of this.timers.values()) {
|
|
51
|
+
clearTimeout(timer);
|
|
52
|
+
}
|
|
53
|
+
this.timers.clear();
|
|
54
|
+
this.data.clear();
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
var createMemoryStore = () => new MemoryStore;
|
|
58
|
+
|
|
59
|
+
// src/browser/internal/sleep.ts
|
|
60
|
+
var sleep = (ms) => new Promise((resolve) => setTimeout(resolve, ms));
|
|
61
|
+
var createAbortError = () => {
|
|
62
|
+
const error = new Error("sleep aborted");
|
|
63
|
+
error.name = "AbortError";
|
|
64
|
+
return error;
|
|
65
|
+
};
|
|
66
|
+
var sleepWithSignal = async (delayMs, signal) => {
|
|
67
|
+
if (delayMs <= 0)
|
|
68
|
+
return;
|
|
69
|
+
if (!signal) {
|
|
70
|
+
await sleep(delayMs);
|
|
71
|
+
return;
|
|
72
|
+
}
|
|
73
|
+
if (signal.aborted)
|
|
74
|
+
throw createAbortError();
|
|
75
|
+
await new Promise((resolve, reject) => {
|
|
76
|
+
const timer = setTimeout(() => {
|
|
77
|
+
signal.removeEventListener("abort", onAbort);
|
|
78
|
+
resolve();
|
|
79
|
+
}, delayMs);
|
|
80
|
+
const onAbort = () => {
|
|
81
|
+
clearTimeout(timer);
|
|
82
|
+
signal.removeEventListener("abort", onAbort);
|
|
83
|
+
reject(createAbortError());
|
|
84
|
+
};
|
|
85
|
+
signal.addEventListener("abort", onAbort, { once: true });
|
|
86
|
+
});
|
|
87
|
+
};
|
|
88
|
+
|
|
89
|
+
// src/browser/internal/id.ts
|
|
90
|
+
var randomHex = (bytes) => {
|
|
91
|
+
const buf = new Uint8Array(bytes);
|
|
92
|
+
crypto.getRandomValues(buf);
|
|
93
|
+
return Array.from(buf, (b) => b.toString(16).padStart(2, "0")).join("");
|
|
94
|
+
};
|
|
95
|
+
var simpleHash = (str) => {
|
|
96
|
+
let hash = 5381;
|
|
97
|
+
for (let i = 0;i < str.length; i++) {
|
|
98
|
+
hash = (hash << 5) + hash + str.charCodeAt(i) | 0;
|
|
99
|
+
}
|
|
100
|
+
return `hash:${(hash >>> 0).toString(16)}`;
|
|
101
|
+
};
|
|
102
|
+
|
|
103
|
+
// src/browser/mutex.ts
|
|
104
|
+
var DEFAULT_PREFIX = "sync:mutex";
|
|
105
|
+
var DEFAULT_RETRY_COUNT = 10;
|
|
106
|
+
var DEFAULT_RETRY_DELAY = 200;
|
|
107
|
+
var DEFAULT_TTL = 1e4;
|
|
108
|
+
var MAX_RESOURCE_LENGTH = 128;
|
|
109
|
+
var normalizeResource = (resource) => {
|
|
110
|
+
if (resource.length <= MAX_RESOURCE_LENGTH)
|
|
111
|
+
return resource;
|
|
112
|
+
return simpleHash(resource);
|
|
113
|
+
};
|
|
114
|
+
|
|
115
|
+
class LockError extends Error {
|
|
116
|
+
resource;
|
|
117
|
+
constructor(resource) {
|
|
118
|
+
super(`Failed to acquire lock on resource: ${resource}`);
|
|
119
|
+
this.name = "LockError";
|
|
120
|
+
this.resource = resource;
|
|
121
|
+
}
|
|
122
|
+
}
|
|
123
|
+
var mutex = (config) => {
|
|
124
|
+
const prefix = config.prefix ?? DEFAULT_PREFIX;
|
|
125
|
+
const retryCount = config.retryCount ?? DEFAULT_RETRY_COUNT;
|
|
126
|
+
const retryDelay = config.retryDelay ?? DEFAULT_RETRY_DELAY;
|
|
127
|
+
const defaultTtl = config.defaultTtl ?? DEFAULT_TTL;
|
|
128
|
+
const store = config.store ?? createMemoryStore();
|
|
129
|
+
const acquire = async (resource, ttl = defaultTtl) => {
|
|
130
|
+
const safeResource = normalizeResource(resource);
|
|
131
|
+
const key = `${prefix}:${config.id}:${safeResource}`;
|
|
132
|
+
const value = randomHex(16);
|
|
133
|
+
for (let attempt = 0;attempt <= retryCount; attempt++) {
|
|
134
|
+
const existing = store.get(key);
|
|
135
|
+
if (existing === undefined) {
|
|
136
|
+
store.set(key, value, ttl);
|
|
137
|
+
return {
|
|
138
|
+
resource: key,
|
|
139
|
+
value,
|
|
140
|
+
ttl,
|
|
141
|
+
expiration: Date.now() + ttl
|
|
142
|
+
};
|
|
143
|
+
}
|
|
144
|
+
if (attempt < retryCount) {
|
|
145
|
+
await sleep(retryDelay + Math.random() * 100);
|
|
146
|
+
}
|
|
147
|
+
}
|
|
148
|
+
return null;
|
|
149
|
+
};
|
|
150
|
+
const release = async (lock) => {
|
|
151
|
+
const current = store.get(lock.resource);
|
|
152
|
+
if (current === lock.value) {
|
|
153
|
+
store.del(lock.resource);
|
|
154
|
+
}
|
|
155
|
+
};
|
|
156
|
+
const extend = async (lock, ttl = defaultTtl) => {
|
|
157
|
+
const current = store.get(lock.resource);
|
|
158
|
+
if (current === lock.value) {
|
|
159
|
+
store.set(lock.resource, lock.value, ttl);
|
|
160
|
+
lock.ttl = ttl;
|
|
161
|
+
lock.expiration = Date.now() + ttl;
|
|
162
|
+
return true;
|
|
163
|
+
}
|
|
164
|
+
return false;
|
|
165
|
+
};
|
|
166
|
+
const withLock = async (resource, fn, ttl) => {
|
|
167
|
+
const lock = await acquire(resource, ttl);
|
|
168
|
+
if (!lock)
|
|
169
|
+
return null;
|
|
170
|
+
try {
|
|
171
|
+
return await fn(lock);
|
|
172
|
+
} finally {
|
|
173
|
+
await release(lock);
|
|
174
|
+
}
|
|
175
|
+
};
|
|
176
|
+
const withLockOrThrow = async (resource, fn, ttl) => {
|
|
177
|
+
const lock = await acquire(resource, ttl);
|
|
178
|
+
if (!lock) {
|
|
179
|
+
throw new LockError(resource);
|
|
180
|
+
}
|
|
181
|
+
try {
|
|
182
|
+
return await fn(lock);
|
|
183
|
+
} finally {
|
|
184
|
+
await release(lock);
|
|
185
|
+
}
|
|
186
|
+
};
|
|
187
|
+
return { id: config.id, acquire, release, withLock, withLockOrThrow, extend };
|
|
188
|
+
};
|
|
189
|
+
|
|
190
|
+
// src/browser/retry.ts
|
|
191
|
+
var asError = (error) => error instanceof Error ? error : new Error(String(error));
|
|
192
|
+
var createAbortError2 = () => {
|
|
193
|
+
const error = new Error("retry aborted");
|
|
194
|
+
error.name = "AbortError";
|
|
195
|
+
return error;
|
|
196
|
+
};
|
|
197
|
+
var parseCode = (error) => {
|
|
198
|
+
if (!error || typeof error !== "object")
|
|
199
|
+
return "";
|
|
200
|
+
const code = error.code;
|
|
201
|
+
return typeof code === "string" ? code.toUpperCase() : "";
|
|
202
|
+
};
|
|
203
|
+
var isRetryableTransportError = (error) => {
|
|
204
|
+
const code = parseCode(error);
|
|
205
|
+
if (code === "ECONNRESET" || code === "ETIMEDOUT" || code === "ECONNREFUSED" || code === "ENOTFOUND" || code === "EPIPE" || code === "EHOSTUNREACH" || code === "ECONNABORTED") {
|
|
206
|
+
return true;
|
|
207
|
+
}
|
|
208
|
+
const message = asError(error).message.toLowerCase();
|
|
209
|
+
return message.includes("econnreset") || message.includes("etimedout") || message.includes("connection") || message.includes("socket") || message.includes("broken pipe") || message.includes("network") || message.includes("loading") || message.includes("tryagain") || message.includes("clusterdown");
|
|
210
|
+
};
|
|
211
|
+
var DEFAULT_RETRY_OPTIONS = {
|
|
212
|
+
attempts: 8,
|
|
213
|
+
minDelayMs: 100,
|
|
214
|
+
maxDelayMs: 2000,
|
|
215
|
+
factor: 2,
|
|
216
|
+
jitter: 0.2,
|
|
217
|
+
retryIf: isRetryableTransportError
|
|
218
|
+
};
|
|
219
|
+
var computeDelayMs = (attempt, opts) => {
|
|
220
|
+
const base = opts.minDelayMs * opts.factor ** Math.max(0, attempt - 1);
|
|
221
|
+
const capped = Math.min(opts.maxDelayMs, base);
|
|
222
|
+
const spread = capped * opts.jitter;
|
|
223
|
+
const jittered = capped + (Math.random() * 2 - 1) * spread;
|
|
224
|
+
return Math.max(0, Math.floor(jittered));
|
|
225
|
+
};
|
|
226
|
+
var retry = async (fn, opts = {}) => {
|
|
227
|
+
const attempts = Math.max(1, opts.attempts ?? DEFAULT_RETRY_OPTIONS.attempts);
|
|
228
|
+
const minDelayMs = Math.max(0, opts.minDelayMs ?? DEFAULT_RETRY_OPTIONS.minDelayMs);
|
|
229
|
+
const maxDelayMs = Math.max(minDelayMs, opts.maxDelayMs ?? DEFAULT_RETRY_OPTIONS.maxDelayMs);
|
|
230
|
+
const factor = Math.max(1, opts.factor ?? DEFAULT_RETRY_OPTIONS.factor);
|
|
231
|
+
const jitter = Math.min(1, Math.max(0, opts.jitter ?? DEFAULT_RETRY_OPTIONS.jitter));
|
|
232
|
+
const retryIf = opts.retryIf ?? DEFAULT_RETRY_OPTIONS.retryIf;
|
|
233
|
+
for (let attempt = 1;attempt <= attempts; attempt++) {
|
|
234
|
+
if (opts.signal?.aborted)
|
|
235
|
+
throw createAbortError2();
|
|
236
|
+
try {
|
|
237
|
+
return await fn(attempt);
|
|
238
|
+
} catch (error) {
|
|
239
|
+
if (attempt >= attempts)
|
|
240
|
+
throw error;
|
|
241
|
+
if (!retryIf(error))
|
|
242
|
+
throw error;
|
|
243
|
+
const delayMs = computeDelayMs(attempt, { minDelayMs, maxDelayMs, factor, jitter });
|
|
244
|
+
await sleepWithSignal(delayMs, opts.signal);
|
|
245
|
+
}
|
|
246
|
+
}
|
|
247
|
+
throw new Error("unreachable retry state");
|
|
248
|
+
};
|
|
249
|
+
|
|
250
|
+
// src/internal/cron.ts
|
|
251
|
+
var MAX_LOOKAHEAD_MINUTES = 5 * 366 * 24 * 60;
|
|
252
|
+
var WEEKDAY_TO_NUM = {
|
|
253
|
+
Sun: 0,
|
|
254
|
+
Mon: 1,
|
|
255
|
+
Tue: 2,
|
|
256
|
+
Wed: 3,
|
|
257
|
+
Thu: 4,
|
|
258
|
+
Fri: 5,
|
|
259
|
+
Sat: 6
|
|
260
|
+
};
|
|
261
|
+
var zonedFormatterCache = new Map;
|
|
262
|
+
var parsedCronCache = new Map;
|
|
263
|
+
var getZonedFormatter = (tz) => {
|
|
264
|
+
const cached = zonedFormatterCache.get(tz);
|
|
265
|
+
if (cached)
|
|
266
|
+
return cached;
|
|
267
|
+
const formatter = new Intl.DateTimeFormat("en-US", {
|
|
268
|
+
timeZone: tz,
|
|
269
|
+
hour12: false,
|
|
270
|
+
hourCycle: "h23",
|
|
271
|
+
minute: "2-digit",
|
|
272
|
+
hour: "2-digit",
|
|
273
|
+
day: "2-digit",
|
|
274
|
+
month: "2-digit",
|
|
275
|
+
year: "numeric",
|
|
276
|
+
weekday: "short"
|
|
277
|
+
});
|
|
278
|
+
zonedFormatterCache.set(tz, formatter);
|
|
279
|
+
return formatter;
|
|
280
|
+
};
|
|
281
|
+
var asInt = (value, fieldName) => {
|
|
282
|
+
const parsed = Number(value);
|
|
283
|
+
if (!Number.isInteger(parsed)) {
|
|
284
|
+
throw new Error(`invalid ${fieldName} value: ${value}`);
|
|
285
|
+
}
|
|
286
|
+
return parsed;
|
|
287
|
+
};
|
|
288
|
+
var normalizeDow = (value) => {
|
|
289
|
+
if (value === 7)
|
|
290
|
+
return 0;
|
|
291
|
+
return value;
|
|
292
|
+
};
|
|
293
|
+
var parseSegment = (segment, min, max, fieldName) => {
|
|
294
|
+
const [base, stepRaw] = segment.split("/");
|
|
295
|
+
const step = stepRaw === undefined ? 1 : asInt(stepRaw, `${fieldName} step`);
|
|
296
|
+
if (step <= 0)
|
|
297
|
+
throw new Error(`invalid ${fieldName} step: ${segment}`);
|
|
298
|
+
const resolvedBase = base ?? "*";
|
|
299
|
+
let start = min;
|
|
300
|
+
let end = max;
|
|
301
|
+
if (resolvedBase !== "*") {
|
|
302
|
+
if (resolvedBase.includes("-")) {
|
|
303
|
+
const [a, b] = resolvedBase.split("-");
|
|
304
|
+
if (a === undefined || b === undefined)
|
|
305
|
+
throw new Error(`invalid ${fieldName} range: ${segment}`);
|
|
306
|
+
start = asInt(a, fieldName);
|
|
307
|
+
end = asInt(b, fieldName);
|
|
308
|
+
} else {
|
|
309
|
+
start = asInt(resolvedBase, fieldName);
|
|
310
|
+
end = start;
|
|
311
|
+
}
|
|
312
|
+
}
|
|
313
|
+
if (fieldName === "day-of-week") {
|
|
314
|
+
const rawStart = start;
|
|
315
|
+
const rawEnd = end;
|
|
316
|
+
start = normalizeDow(start);
|
|
317
|
+
end = normalizeDow(end);
|
|
318
|
+
if (rawStart !== rawEnd && start > end) {
|
|
319
|
+
const wrappedValues = [];
|
|
320
|
+
for (let v = start;v <= max; v += step)
|
|
321
|
+
wrappedValues.push(v);
|
|
322
|
+
for (let v = min;v <= end; v += step)
|
|
323
|
+
wrappedValues.push(v);
|
|
324
|
+
return wrappedValues;
|
|
325
|
+
}
|
|
326
|
+
}
|
|
327
|
+
if (start < min || start > max || end < min || end > max || start > end) {
|
|
328
|
+
throw new Error(`out-of-range ${fieldName}: ${segment}`);
|
|
329
|
+
}
|
|
330
|
+
const values = [];
|
|
331
|
+
for (let v = start;v <= end; v += step)
|
|
332
|
+
values.push(v);
|
|
333
|
+
return values;
|
|
334
|
+
};
|
|
335
|
+
var parseField = (raw, min, max, fieldName) => {
|
|
336
|
+
if (raw === "*")
|
|
337
|
+
return { any: true, values: new Set };
|
|
338
|
+
const values = new Set;
|
|
339
|
+
for (const segment of raw.split(",")) {
|
|
340
|
+
const trimmed = segment.trim();
|
|
341
|
+
if (!trimmed)
|
|
342
|
+
throw new Error(`invalid ${fieldName} field: "${raw}"`);
|
|
343
|
+
for (const v of parseSegment(trimmed, min, max, fieldName)) {
|
|
344
|
+
values.add(fieldName === "day-of-week" ? normalizeDow(v) : v);
|
|
345
|
+
}
|
|
346
|
+
}
|
|
347
|
+
if (values.size === 0)
|
|
348
|
+
throw new Error(`invalid ${fieldName} field: "${raw}"`);
|
|
349
|
+
return { any: false, values };
|
|
350
|
+
};
|
|
351
|
+
var parseCron = (raw) => {
|
|
352
|
+
const cached = parsedCronCache.get(raw);
|
|
353
|
+
if (cached)
|
|
354
|
+
return cached;
|
|
355
|
+
const parts = raw.trim().split(/\s+/);
|
|
356
|
+
if (parts.length !== 5) {
|
|
357
|
+
throw new Error(`cron must have 5 fields (minute hour day-of-month month day-of-week), got: "${raw}"`);
|
|
358
|
+
}
|
|
359
|
+
const [minuteRaw, hourRaw, dayOfMonthRaw, monthRaw, dayOfWeekRaw] = parts;
|
|
360
|
+
const spec = {
|
|
361
|
+
minute: parseField(minuteRaw, 0, 59, "minute"),
|
|
362
|
+
hour: parseField(hourRaw, 0, 23, "hour"),
|
|
363
|
+
dayOfMonth: parseField(dayOfMonthRaw, 1, 31, "day-of-month"),
|
|
364
|
+
month: parseField(monthRaw, 1, 12, "month"),
|
|
365
|
+
dayOfWeek: parseField(dayOfWeekRaw, 0, 7, "day-of-week")
|
|
366
|
+
};
|
|
367
|
+
parsedCronCache.set(raw, spec);
|
|
368
|
+
if (parsedCronCache.size > 1000) {
|
|
369
|
+
const firstKey = parsedCronCache.keys().next().value;
|
|
370
|
+
if (firstKey)
|
|
371
|
+
parsedCronCache.delete(firstKey);
|
|
372
|
+
}
|
|
373
|
+
return spec;
|
|
374
|
+
};
|
|
375
|
+
var floorToMinute = (timestampMs) => {
|
|
376
|
+
return Math.floor(timestampMs / 60000) * 60000;
|
|
377
|
+
};
|
|
378
|
+
var toZonedParts = (timestampMs, tz) => {
|
|
379
|
+
const parts = getZonedFormatter(tz).formatToParts(new Date(timestampMs));
|
|
380
|
+
let minute = -1;
|
|
381
|
+
let hour = -1;
|
|
382
|
+
let dayOfMonth = -1;
|
|
383
|
+
let month = -1;
|
|
384
|
+
let dayOfWeek = -1;
|
|
385
|
+
for (const part of parts) {
|
|
386
|
+
if (part.type === "minute")
|
|
387
|
+
minute = Number(part.value);
|
|
388
|
+
else if (part.type === "hour")
|
|
389
|
+
hour = Number(part.value);
|
|
390
|
+
else if (part.type === "day")
|
|
391
|
+
dayOfMonth = Number(part.value);
|
|
392
|
+
else if (part.type === "month")
|
|
393
|
+
month = Number(part.value);
|
|
394
|
+
else if (part.type === "weekday")
|
|
395
|
+
dayOfWeek = WEEKDAY_TO_NUM[part.value] ?? -1;
|
|
396
|
+
}
|
|
397
|
+
if (minute < 0 || hour < 0 || dayOfMonth < 0 || month < 0 || dayOfWeek < 0) {
|
|
398
|
+
throw new Error(`failed to resolve zoned time parts for timezone "${tz}"`);
|
|
399
|
+
}
|
|
400
|
+
return { minute, hour, dayOfMonth, month, dayOfWeek };
|
|
401
|
+
};
|
|
402
|
+
var matchesField = (field, value) => field.any || field.values.has(value);
|
|
403
|
+
var matchesDay = (spec, dayOfMonth, dayOfWeek) => {
|
|
404
|
+
const domAny = spec.dayOfMonth.any;
|
|
405
|
+
const dowAny = spec.dayOfWeek.any;
|
|
406
|
+
const domMatches = domAny || spec.dayOfMonth.values.has(dayOfMonth);
|
|
407
|
+
const dowMatches = dowAny || spec.dayOfWeek.values.has(dayOfWeek);
|
|
408
|
+
if (domAny && dowAny)
|
|
409
|
+
return true;
|
|
410
|
+
if (domAny)
|
|
411
|
+
return dowMatches;
|
|
412
|
+
if (dowAny)
|
|
413
|
+
return domMatches;
|
|
414
|
+
return domMatches || dowMatches;
|
|
415
|
+
};
|
|
416
|
+
var matchesCron = (spec, timestampMs, tz) => {
|
|
417
|
+
const p = toZonedParts(timestampMs, tz);
|
|
418
|
+
return matchesField(spec.minute, p.minute) && matchesField(spec.hour, p.hour) && matchesField(spec.month, p.month) && matchesDay(spec, p.dayOfMonth, p.dayOfWeek);
|
|
419
|
+
};
|
|
420
|
+
var nextCronTimestamp = (cron, tz, afterTimestampMs) => {
|
|
421
|
+
const spec = parseCron(cron);
|
|
422
|
+
let candidate = floorToMinute(afterTimestampMs) + 60000;
|
|
423
|
+
for (let i = 0;i < MAX_LOOKAHEAD_MINUTES; i++) {
|
|
424
|
+
if (matchesCron(spec, candidate, tz))
|
|
425
|
+
return candidate;
|
|
426
|
+
candidate += 60000;
|
|
427
|
+
}
|
|
428
|
+
throw new Error(`unable to find next cron execution within lookahead window for "${cron}" (${tz})`);
|
|
429
|
+
};
|
|
430
|
+
var assertValidTimeZone = (tz) => {
|
|
431
|
+
getZonedFormatter(tz);
|
|
432
|
+
};
|
|
433
|
+
|
|
434
|
+
// src/browser/scheduler.ts
|
|
435
|
+
var DEFAULT_PREFIX2 = "sync:scheduler";
|
|
436
|
+
var DAY_MS = 24 * 60 * 60 * 1000;
|
|
437
|
+
var DEFAULT_LEASE_MS = 5000;
|
|
438
|
+
var DEFAULT_HEARTBEAT_MS = 500;
|
|
439
|
+
var DEFAULT_TICK_MS = 500;
|
|
440
|
+
var DEFAULT_BATCH_SIZE = 200;
|
|
441
|
+
var DEFAULT_MAX_SUBMITS_PER_TICK = 500;
|
|
442
|
+
var DEFAULT_SUBMIT_RETRIES = 3;
|
|
443
|
+
var DEFAULT_SUBMIT_BACKOFF_BASE_MS = 100;
|
|
444
|
+
var DEFAULT_SUBMIT_BACKOFF_MAX_MS = 2000;
|
|
445
|
+
var DEFAULT_SCHEDULED_JOB_KEY_TTL_MS = 90 * DAY_MS;
|
|
446
|
+
var DEFAULT_DISPATCH_DLQ_MAX_ENTRIES = 5000;
|
|
447
|
+
var DEFAULT_MISFIRE = "skip";
|
|
448
|
+
var DEFAULT_MAX_CATCH_UP_RUNS = 100;
|
|
449
|
+
var DEFAULT_MAX_CONSECUTIVE_DISPATCH_FAILURES = 5;
|
|
450
|
+
var DEFAULT_STRICT_HANDLERS = true;
|
|
451
|
+
var asError2 = (error) => error instanceof Error ? error : new Error(String(error));
|
|
452
|
+
var safeMetric = (onMetric, metric) => {
|
|
453
|
+
if (!onMetric)
|
|
454
|
+
return;
|
|
455
|
+
try {
|
|
456
|
+
onMetric(metric);
|
|
457
|
+
} catch {}
|
|
458
|
+
};
|
|
459
|
+
var asInfo = (schedule) => ({
|
|
460
|
+
id: schedule.id,
|
|
461
|
+
cron: schedule.cron,
|
|
462
|
+
tz: schedule.tz,
|
|
463
|
+
misfire: schedule.misfire,
|
|
464
|
+
maxCatchUpRuns: schedule.maxCatchUpRuns,
|
|
465
|
+
jobId: schedule.jobId,
|
|
466
|
+
nextRunAt: schedule.nextRunAt,
|
|
467
|
+
createdAt: schedule.createdAt,
|
|
468
|
+
updatedAt: schedule.updatedAt
|
|
469
|
+
});
|
|
470
|
+
var computeDispatchPlan = (schedule, nowMs) => {
|
|
471
|
+
if (schedule.nextRunAt > nowMs)
|
|
472
|
+
return null;
|
|
473
|
+
if (schedule.misfire === "skip") {
|
|
474
|
+
return {
|
|
475
|
+
slots: [],
|
|
476
|
+
nextRunAt: nextCronTimestamp(schedule.cron, schedule.tz, nowMs)
|
|
477
|
+
};
|
|
478
|
+
}
|
|
479
|
+
if (schedule.misfire === "catch_up_one") {
|
|
480
|
+
return {
|
|
481
|
+
slots: [schedule.nextRunAt],
|
|
482
|
+
nextRunAt: nextCronTimestamp(schedule.cron, schedule.tz, nowMs)
|
|
483
|
+
};
|
|
484
|
+
}
|
|
485
|
+
const slots = [];
|
|
486
|
+
let cursor = schedule.nextRunAt;
|
|
487
|
+
const maxRuns = Math.max(1, schedule.maxCatchUpRuns);
|
|
488
|
+
while (cursor <= nowMs && slots.length < maxRuns) {
|
|
489
|
+
slots.push(cursor);
|
|
490
|
+
cursor = nextCronTimestamp(schedule.cron, schedule.tz, cursor);
|
|
491
|
+
}
|
|
492
|
+
return {
|
|
493
|
+
slots,
|
|
494
|
+
nextRunAt: cursor
|
|
495
|
+
};
|
|
496
|
+
};
|
|
497
|
+
var schedulerSharedStores = new Map;
|
|
498
|
+
var schedulerSharedSchedules = new Map;
|
|
499
|
+
var scheduler = (config) => {
|
|
500
|
+
const prefix = config.prefix ?? DEFAULT_PREFIX2;
|
|
501
|
+
const leaseMs = Math.max(500, config.leader?.leaseMs ?? DEFAULT_LEASE_MS);
|
|
502
|
+
const heartbeatMs = Math.max(100, config.leader?.heartbeatMs ?? DEFAULT_HEARTBEAT_MS);
|
|
503
|
+
const tickMs = Math.max(50, config.dispatch?.tickMs ?? DEFAULT_TICK_MS);
|
|
504
|
+
const batchSize = Math.max(1, config.dispatch?.batchSize ?? DEFAULT_BATCH_SIZE);
|
|
505
|
+
const maxSubmitsPerTick = Math.max(1, config.dispatch?.maxSubmitsPerTick ?? DEFAULT_MAX_SUBMITS_PER_TICK);
|
|
506
|
+
const submitRetries = Math.max(0, config.dispatch?.submitRetries ?? DEFAULT_SUBMIT_RETRIES);
|
|
507
|
+
const submitBackoffBaseMs = Math.max(10, config.dispatch?.submitBackoffBaseMs ?? DEFAULT_SUBMIT_BACKOFF_BASE_MS);
|
|
508
|
+
const submitBackoffMaxMs = Math.max(submitBackoffBaseMs, config.dispatch?.submitBackoffMaxMs ?? DEFAULT_SUBMIT_BACKOFF_MAX_MS);
|
|
509
|
+
const scheduledJobKeyTtlMs = Math.max(60000, config.dispatch?.scheduledJobKeyTtlMs ?? DEFAULT_SCHEDULED_JOB_KEY_TTL_MS);
|
|
510
|
+
const dlqMaxEntries = Math.max(1, config.dispatch?.dlqMaxEntries ?? DEFAULT_DISPATCH_DLQ_MAX_ENTRIES);
|
|
511
|
+
const maxConsecutiveDispatchFailures = Math.max(1, config.dispatch?.maxConsecutiveDispatchFailures ?? DEFAULT_MAX_CONSECUTIVE_DISPATCH_FAILURES);
|
|
512
|
+
const strictHandlers = config.strictHandlers ?? DEFAULT_STRICT_HANDLERS;
|
|
513
|
+
const sharedMutexStoreKey = `${prefix}:${config.id}:leader:store`;
|
|
514
|
+
if (!schedulerSharedStores.has(sharedMutexStoreKey)) {
|
|
515
|
+
schedulerSharedStores.set(sharedMutexStoreKey, createMemoryStore());
|
|
516
|
+
}
|
|
517
|
+
const sharedMutexStore = schedulerSharedStores.get(sharedMutexStoreKey);
|
|
518
|
+
const leaderMutex = mutex({
|
|
519
|
+
id: `${config.id}:leader`,
|
|
520
|
+
prefix: `${prefix}:leader`,
|
|
521
|
+
defaultTtl: leaseMs,
|
|
522
|
+
retryCount: 0,
|
|
523
|
+
store: sharedMutexStore
|
|
524
|
+
});
|
|
525
|
+
const sharedSchedulesKey = `${prefix}:${config.id}:schedules`;
|
|
526
|
+
if (!schedulerSharedSchedules.has(sharedSchedulesKey)) {
|
|
527
|
+
schedulerSharedSchedules.set(sharedSchedulesKey, new Map);
|
|
528
|
+
}
|
|
529
|
+
const schedules = schedulerSharedSchedules.get(sharedSchedulesKey);
|
|
530
|
+
const jobsById = new Map;
|
|
531
|
+
const scheduleToJobId = new Map;
|
|
532
|
+
const dispatchDlq = [];
|
|
533
|
+
const metricsState = {
|
|
534
|
+
isLeader: false,
|
|
535
|
+
leaderEpoch: 0,
|
|
536
|
+
leaderChanges: 0,
|
|
537
|
+
dispatchSubmitted: 0,
|
|
538
|
+
dispatchFailed: 0,
|
|
539
|
+
dispatchRetried: 0,
|
|
540
|
+
dispatchSkipped: 0,
|
|
541
|
+
dispatchDlq: 0,
|
|
542
|
+
triggerSubmitted: 0,
|
|
543
|
+
triggerFailed: 0,
|
|
544
|
+
triggerRejected: 0,
|
|
545
|
+
tickErrors: 0,
|
|
546
|
+
lastTickAt: null
|
|
547
|
+
};
|
|
548
|
+
let running = false;
|
|
549
|
+
let loopPromise = null;
|
|
550
|
+
let currentLeaderLock = null;
|
|
551
|
+
let currentLeaderEpoch = 0;
|
|
552
|
+
let lastHeartbeatAt = 0;
|
|
553
|
+
const setLeader = (next, reason) => {
|
|
554
|
+
if (metricsState.isLeader === next)
|
|
555
|
+
return;
|
|
556
|
+
metricsState.isLeader = next;
|
|
557
|
+
metricsState.leaderChanges += 1;
|
|
558
|
+
if (next) {
|
|
559
|
+
safeMetric(config.onMetric, { type: "leader_acquired", ts: Date.now() });
|
|
560
|
+
return;
|
|
561
|
+
}
|
|
562
|
+
safeMetric(config.onMetric, { type: "leader_lost", ts: Date.now(), reason: reason ?? "stop" });
|
|
563
|
+
};
|
|
564
|
+
const tryAcquireLeadership = async () => {
|
|
565
|
+
if (currentLeaderLock)
|
|
566
|
+
return;
|
|
567
|
+
const acquired = await leaderMutex.acquire("active", leaseMs);
|
|
568
|
+
if (!acquired)
|
|
569
|
+
return;
|
|
570
|
+
currentLeaderLock = acquired;
|
|
571
|
+
currentLeaderEpoch++;
|
|
572
|
+
metricsState.leaderEpoch = currentLeaderEpoch;
|
|
573
|
+
lastHeartbeatAt = Date.now();
|
|
574
|
+
setLeader(true);
|
|
575
|
+
};
|
|
576
|
+
const maintainLeadership = async () => {
|
|
577
|
+
if (!currentLeaderLock)
|
|
578
|
+
return;
|
|
579
|
+
const nowMs = Date.now();
|
|
580
|
+
if (nowMs - lastHeartbeatAt < heartbeatMs)
|
|
581
|
+
return;
|
|
582
|
+
const ok = await leaderMutex.extend(currentLeaderLock, leaseMs);
|
|
583
|
+
lastHeartbeatAt = nowMs;
|
|
584
|
+
if (!ok) {
|
|
585
|
+
currentLeaderLock = null;
|
|
586
|
+
setLeader(false, "extend_failed");
|
|
587
|
+
}
|
|
588
|
+
};
|
|
589
|
+
const relinquishLeadership = async () => {
|
|
590
|
+
if (!currentLeaderLock)
|
|
591
|
+
return;
|
|
592
|
+
try {
|
|
593
|
+
await leaderMutex.release(currentLeaderLock);
|
|
594
|
+
} catch {}
|
|
595
|
+
currentLeaderLock = null;
|
|
596
|
+
setLeader(false, "stop");
|
|
597
|
+
};
|
|
598
|
+
const ensureLeadership = async () => {
|
|
599
|
+
if (!currentLeaderLock)
|
|
600
|
+
return false;
|
|
601
|
+
const nowMs = Date.now();
|
|
602
|
+
if (nowMs - lastHeartbeatAt >= heartbeatMs) {
|
|
603
|
+
const ok = await leaderMutex.extend(currentLeaderLock, leaseMs);
|
|
604
|
+
lastHeartbeatAt = nowMs;
|
|
605
|
+
if (!ok) {
|
|
606
|
+
currentLeaderLock = null;
|
|
607
|
+
setLeader(false, "extend_failed");
|
|
608
|
+
return false;
|
|
609
|
+
}
|
|
610
|
+
}
|
|
611
|
+
return metricsState.isLeader;
|
|
612
|
+
};
|
|
613
|
+
const submitScheduledJob = async (cfg) => {
|
|
614
|
+
return await retry(async () => {
|
|
615
|
+
if (cfg.requireLeadership && !await ensureLeadership()) {
|
|
616
|
+
throw new Error("leadership lost during dispatch");
|
|
617
|
+
}
|
|
618
|
+
return await cfg.jobHandle.submit({
|
|
619
|
+
input: cfg.schedule.input,
|
|
620
|
+
key: cfg.key,
|
|
621
|
+
keyTtlMs: scheduledJobKeyTtlMs,
|
|
622
|
+
...cfg.at !== undefined ? { at: cfg.at } : {},
|
|
623
|
+
meta: cfg.meta
|
|
624
|
+
});
|
|
625
|
+
}, {
|
|
626
|
+
attempts: submitRetries + 1,
|
|
627
|
+
minDelayMs: submitBackoffBaseMs,
|
|
628
|
+
maxDelayMs: submitBackoffMaxMs,
|
|
629
|
+
factor: 2,
|
|
630
|
+
jitter: 0.25,
|
|
631
|
+
retryIf: (error) => {
|
|
632
|
+
const err = asError2(error);
|
|
633
|
+
if (err.name === "ZodError")
|
|
634
|
+
return false;
|
|
635
|
+
if (cfg.requireLeadership && err.message === "leadership lost during dispatch")
|
|
636
|
+
return false;
|
|
637
|
+
cfg.onRetry?.();
|
|
638
|
+
return true;
|
|
639
|
+
}
|
|
640
|
+
});
|
|
641
|
+
};
|
|
642
|
+
const pushDispatchDlq = (cfg) => {
|
|
643
|
+
dispatchDlq.unshift({ ...cfg, ts: Date.now() });
|
|
644
|
+
if (dispatchDlq.length > dlqMaxEntries) {
|
|
645
|
+
dispatchDlq.length = dlqMaxEntries;
|
|
646
|
+
}
|
|
647
|
+
metricsState.dispatchDlq += 1;
|
|
648
|
+
safeMetric(config.onMetric, {
|
|
649
|
+
type: "dispatch_dlq",
|
|
650
|
+
ts: Date.now(),
|
|
651
|
+
scheduleId: cfg.scheduleId,
|
|
652
|
+
slotTs: cfg.slotTs,
|
|
653
|
+
message: cfg.message
|
|
654
|
+
});
|
|
655
|
+
};
|
|
656
|
+
const recordDispatchFailure = (cfg) => {
|
|
657
|
+
const sameSlot = cfg.schedule.lastFailedSlotTs === cfg.failedSlotTs;
|
|
658
|
+
const failures = sameSlot ? cfg.schedule.consecutiveDispatchFailures + 1 : 1;
|
|
659
|
+
const shouldAdvance = cfg.deterministic || failures >= maxConsecutiveDispatchFailures;
|
|
660
|
+
cfg.schedule.consecutiveDispatchFailures = failures;
|
|
661
|
+
cfg.schedule.lastFailedSlotTs = cfg.failedSlotTs;
|
|
662
|
+
cfg.schedule.lastDispatchError = cfg.message;
|
|
663
|
+
cfg.schedule.updatedAt = Date.now();
|
|
664
|
+
if (shouldAdvance) {
|
|
665
|
+
cfg.schedule.nextRunAt = nextCronTimestamp(cfg.schedule.cron, cfg.schedule.tz, cfg.failedSlotTs);
|
|
666
|
+
cfg.schedule.consecutiveDispatchFailures = 0;
|
|
667
|
+
safeMetric(config.onMetric, {
|
|
668
|
+
type: "dispatch_advanced_after_failures",
|
|
669
|
+
ts: Date.now(),
|
|
670
|
+
scheduleId: cfg.schedule.id,
|
|
671
|
+
slotTs: cfg.failedSlotTs,
|
|
672
|
+
failures
|
|
673
|
+
});
|
|
674
|
+
}
|
|
675
|
+
};
|
|
676
|
+
const dispatchDue = async () => {
|
|
677
|
+
const nowMs = Date.now();
|
|
678
|
+
const dueSchedules = [];
|
|
679
|
+
for (const schedule of schedules.values()) {
|
|
680
|
+
if (schedule.nextRunAt <= nowMs) {
|
|
681
|
+
dueSchedules.push(schedule);
|
|
682
|
+
}
|
|
683
|
+
}
|
|
684
|
+
dueSchedules.sort((a, b) => a.nextRunAt - b.nextRunAt);
|
|
685
|
+
const batch = dueSchedules.slice(0, batchSize);
|
|
686
|
+
if (batch.length === 0)
|
|
687
|
+
return;
|
|
688
|
+
let submitsRemaining = maxSubmitsPerTick;
|
|
689
|
+
for (const schedule of batch) {
|
|
690
|
+
if (!await ensureLeadership())
|
|
691
|
+
break;
|
|
692
|
+
if (schedule.nextRunAt > nowMs)
|
|
693
|
+
continue;
|
|
694
|
+
const plan = computeDispatchPlan(schedule, nowMs);
|
|
695
|
+
if (!plan)
|
|
696
|
+
continue;
|
|
697
|
+
const jobHandle = jobsById.get(schedule.jobId);
|
|
698
|
+
if (!jobHandle && plan.slots.length > 0) {
|
|
699
|
+
metricsState.dispatchSkipped += 1;
|
|
700
|
+
safeMetric(config.onMetric, {
|
|
701
|
+
type: "dispatch_skipped",
|
|
702
|
+
ts: nowMs,
|
|
703
|
+
scheduleId: schedule.id,
|
|
704
|
+
reason: "missing_handler"
|
|
705
|
+
});
|
|
706
|
+
if (strictHandlers) {
|
|
707
|
+
await relinquishLeadership();
|
|
708
|
+
break;
|
|
709
|
+
}
|
|
710
|
+
continue;
|
|
711
|
+
}
|
|
712
|
+
let submitFailed = false;
|
|
713
|
+
let submittedAny = false;
|
|
714
|
+
let lastSubmittedSlotTs = null;
|
|
715
|
+
for (const slotTs of plan.slots) {
|
|
716
|
+
if (submitsRemaining <= 0)
|
|
717
|
+
break;
|
|
718
|
+
if (!await ensureLeadership()) {
|
|
719
|
+
submitFailed = true;
|
|
720
|
+
break;
|
|
721
|
+
}
|
|
722
|
+
try {
|
|
723
|
+
const jobId = await submitScheduledJob({
|
|
724
|
+
jobHandle,
|
|
725
|
+
schedule,
|
|
726
|
+
key: `${schedule.id}:${slotTs}`,
|
|
727
|
+
at: slotTs,
|
|
728
|
+
meta: {
|
|
729
|
+
...schedule.meta ?? {},
|
|
730
|
+
scheduleId: schedule.id,
|
|
731
|
+
scheduleSlotTs: slotTs,
|
|
732
|
+
schedulerId: config.id
|
|
733
|
+
},
|
|
734
|
+
requireLeadership: true,
|
|
735
|
+
onRetry: () => {
|
|
736
|
+
metricsState.dispatchRetried += 1;
|
|
737
|
+
}
|
|
738
|
+
});
|
|
739
|
+
metricsState.dispatchSubmitted += 1;
|
|
740
|
+
submitsRemaining -= 1;
|
|
741
|
+
submittedAny = true;
|
|
742
|
+
lastSubmittedSlotTs = slotTs;
|
|
743
|
+
safeMetric(config.onMetric, {
|
|
744
|
+
type: "dispatch_submitted",
|
|
745
|
+
ts: Date.now(),
|
|
746
|
+
scheduleId: schedule.id,
|
|
747
|
+
slotTs,
|
|
748
|
+
jobId
|
|
749
|
+
});
|
|
750
|
+
} catch (error) {
|
|
751
|
+
submitFailed = true;
|
|
752
|
+
metricsState.dispatchFailed += 1;
|
|
753
|
+
const err = asError2(error);
|
|
754
|
+
pushDispatchDlq({
|
|
755
|
+
scheduleId: schedule.id,
|
|
756
|
+
slotTs,
|
|
757
|
+
message: err.message
|
|
758
|
+
});
|
|
759
|
+
recordDispatchFailure({
|
|
760
|
+
schedule,
|
|
761
|
+
failedSlotTs: slotTs,
|
|
762
|
+
message: err.message,
|
|
763
|
+
deterministic: err.name === "ZodError"
|
|
764
|
+
});
|
|
765
|
+
safeMetric(config.onMetric, {
|
|
766
|
+
type: "dispatch_failed",
|
|
767
|
+
ts: Date.now(),
|
|
768
|
+
scheduleId: schedule.id,
|
|
769
|
+
message: err.message
|
|
770
|
+
});
|
|
771
|
+
break;
|
|
772
|
+
}
|
|
773
|
+
}
|
|
774
|
+
if (submitFailed)
|
|
775
|
+
continue;
|
|
776
|
+
if (submitsRemaining <= 0) {
|
|
777
|
+
if (submittedAny && lastSubmittedSlotTs !== null) {
|
|
778
|
+
schedule.nextRunAt = nextCronTimestamp(schedule.cron, schedule.tz, lastSubmittedSlotTs);
|
|
779
|
+
schedule.updatedAt = Date.now();
|
|
780
|
+
scheduleToJobId.set(schedule.id, schedule.jobId);
|
|
781
|
+
}
|
|
782
|
+
break;
|
|
783
|
+
}
|
|
784
|
+
schedule.nextRunAt = plan.nextRunAt;
|
|
785
|
+
schedule.updatedAt = Date.now();
|
|
786
|
+
scheduleToJobId.set(schedule.id, schedule.jobId);
|
|
787
|
+
}
|
|
788
|
+
};
|
|
789
|
+
const loop = async () => {
|
|
790
|
+
while (running) {
|
|
791
|
+
try {
|
|
792
|
+
await tryAcquireLeadership();
|
|
793
|
+
await maintainLeadership();
|
|
794
|
+
if (currentLeaderLock) {
|
|
795
|
+
await dispatchDue();
|
|
796
|
+
}
|
|
797
|
+
metricsState.lastTickAt = Date.now();
|
|
798
|
+
} catch (error) {
|
|
799
|
+
metricsState.tickErrors += 1;
|
|
800
|
+
safeMetric(config.onMetric, {
|
|
801
|
+
type: "tick_error",
|
|
802
|
+
ts: Date.now(),
|
|
803
|
+
message: asError2(error).message
|
|
804
|
+
});
|
|
805
|
+
}
|
|
806
|
+
await sleep(tickMs);
|
|
807
|
+
}
|
|
808
|
+
};
|
|
809
|
+
const register = async (cfg) => {
|
|
810
|
+
const tz = cfg.tz ?? "UTC";
|
|
811
|
+
assertValidTimeZone(tz);
|
|
812
|
+
cfg.job.validateInput?.(cfg.input);
|
|
813
|
+
const nowMs = Date.now();
|
|
814
|
+
const misfire = cfg.misfire ?? DEFAULT_MISFIRE;
|
|
815
|
+
const maxCatchUpRuns = Math.max(1, cfg.maxCatchUpRuns ?? DEFAULT_MAX_CATCH_UP_RUNS);
|
|
816
|
+
const firstRunAt = nextCronTimestamp(cfg.cron, tz, nowMs);
|
|
817
|
+
const existing = schedules.get(cfg.id);
|
|
818
|
+
const stored = {
|
|
819
|
+
id: cfg.id,
|
|
820
|
+
cron: cfg.cron,
|
|
821
|
+
tz,
|
|
822
|
+
misfire,
|
|
823
|
+
maxCatchUpRuns,
|
|
824
|
+
jobId: cfg.job.id,
|
|
825
|
+
input: cfg.input,
|
|
826
|
+
meta: cfg.meta,
|
|
827
|
+
createdAt: existing?.createdAt ?? nowMs,
|
|
828
|
+
updatedAt: nowMs,
|
|
829
|
+
nextRunAt: firstRunAt,
|
|
830
|
+
consecutiveDispatchFailures: 0
|
|
831
|
+
};
|
|
832
|
+
if (existing && existing.cron === cfg.cron && existing.tz === tz) {
|
|
833
|
+
stored.nextRunAt = existing.nextRunAt;
|
|
834
|
+
stored.consecutiveDispatchFailures = existing.consecutiveDispatchFailures;
|
|
835
|
+
stored.lastFailedSlotTs = existing.lastFailedSlotTs;
|
|
836
|
+
stored.lastDispatchError = existing.lastDispatchError;
|
|
837
|
+
}
|
|
838
|
+
const created = !existing;
|
|
839
|
+
const updated = !!existing;
|
|
840
|
+
schedules.set(cfg.id, stored);
|
|
841
|
+
jobsById.set(cfg.job.id, cfg.job);
|
|
842
|
+
const previousJobId = scheduleToJobId.get(cfg.id);
|
|
843
|
+
scheduleToJobId.set(cfg.id, cfg.job.id);
|
|
844
|
+
if (previousJobId && previousJobId !== cfg.job.id) {
|
|
845
|
+
let stillUsed = false;
|
|
846
|
+
for (const [sid, jid] of scheduleToJobId.entries()) {
|
|
847
|
+
if (sid !== cfg.id && jid === previousJobId) {
|
|
848
|
+
stillUsed = true;
|
|
849
|
+
break;
|
|
850
|
+
}
|
|
851
|
+
}
|
|
852
|
+
if (!stillUsed)
|
|
853
|
+
jobsById.delete(previousJobId);
|
|
854
|
+
}
|
|
855
|
+
safeMetric(config.onMetric, {
|
|
856
|
+
type: "schedule_registered",
|
|
857
|
+
ts: Date.now(),
|
|
858
|
+
scheduleId: cfg.id,
|
|
859
|
+
created
|
|
860
|
+
});
|
|
861
|
+
if (updated) {
|
|
862
|
+
safeMetric(config.onMetric, {
|
|
863
|
+
type: "schedule_updated",
|
|
864
|
+
ts: Date.now(),
|
|
865
|
+
scheduleId: cfg.id
|
|
866
|
+
});
|
|
867
|
+
}
|
|
868
|
+
return { created, updated };
|
|
869
|
+
};
|
|
870
|
+
const unregister = async (cfg) => {
|
|
871
|
+
const jobId = scheduleToJobId.get(cfg.id);
|
|
872
|
+
schedules.delete(cfg.id);
|
|
873
|
+
scheduleToJobId.delete(cfg.id);
|
|
874
|
+
safeMetric(config.onMetric, {
|
|
875
|
+
type: "schedule_unregistered",
|
|
876
|
+
ts: Date.now(),
|
|
877
|
+
scheduleId: cfg.id
|
|
878
|
+
});
|
|
879
|
+
if (jobId) {
|
|
880
|
+
let stillUsed = false;
|
|
881
|
+
for (const mappedJobId of scheduleToJobId.values()) {
|
|
882
|
+
if (mappedJobId === jobId) {
|
|
883
|
+
stillUsed = true;
|
|
884
|
+
break;
|
|
885
|
+
}
|
|
886
|
+
}
|
|
887
|
+
if (!stillUsed)
|
|
888
|
+
jobsById.delete(jobId);
|
|
889
|
+
}
|
|
890
|
+
};
|
|
891
|
+
const triggerNow = async (cfg) => {
|
|
892
|
+
const schedule = schedules.get(cfg.id);
|
|
893
|
+
if (!schedule) {
|
|
894
|
+
metricsState.triggerRejected += 1;
|
|
895
|
+
safeMetric(config.onMetric, {
|
|
896
|
+
type: "trigger_rejected",
|
|
897
|
+
ts: Date.now(),
|
|
898
|
+
scheduleId: cfg.id,
|
|
899
|
+
reason: "missing_schedule"
|
|
900
|
+
});
|
|
901
|
+
throw new Error(`scheduler trigger rejected: missing schedule ${cfg.id}`);
|
|
902
|
+
}
|
|
903
|
+
const jobHandle = jobsById.get(schedule.jobId);
|
|
904
|
+
if (!jobHandle) {
|
|
905
|
+
metricsState.triggerRejected += 1;
|
|
906
|
+
safeMetric(config.onMetric, {
|
|
907
|
+
type: "trigger_rejected",
|
|
908
|
+
ts: Date.now(),
|
|
909
|
+
scheduleId: schedule.id,
|
|
910
|
+
reason: "missing_handler"
|
|
911
|
+
});
|
|
912
|
+
throw new Error(`scheduler trigger rejected: missing local handler for schedule ${schedule.id}`);
|
|
913
|
+
}
|
|
914
|
+
try {
|
|
915
|
+
const jobId = await submitScheduledJob({
|
|
916
|
+
jobHandle,
|
|
917
|
+
schedule,
|
|
918
|
+
key: cfg.key ? `manual:${schedule.id}:${cfg.key}` : undefined,
|
|
919
|
+
meta: {
|
|
920
|
+
...schedule.meta ?? {},
|
|
921
|
+
scheduleId: schedule.id,
|
|
922
|
+
schedulerId: config.id,
|
|
923
|
+
scheduleTrigger: "manual",
|
|
924
|
+
...cfg.key ? { scheduleManualKey: cfg.key } : {}
|
|
925
|
+
},
|
|
926
|
+
requireLeadership: false
|
|
927
|
+
});
|
|
928
|
+
metricsState.triggerSubmitted += 1;
|
|
929
|
+
safeMetric(config.onMetric, {
|
|
930
|
+
type: "trigger_submitted",
|
|
931
|
+
ts: Date.now(),
|
|
932
|
+
scheduleId: schedule.id,
|
|
933
|
+
jobId
|
|
934
|
+
});
|
|
935
|
+
return jobId;
|
|
936
|
+
} catch (error) {
|
|
937
|
+
metricsState.triggerFailed += 1;
|
|
938
|
+
const err = asError2(error);
|
|
939
|
+
safeMetric(config.onMetric, {
|
|
940
|
+
type: "trigger_failed",
|
|
941
|
+
ts: Date.now(),
|
|
942
|
+
scheduleId: schedule.id,
|
|
943
|
+
message: err.message
|
|
944
|
+
});
|
|
945
|
+
throw error;
|
|
946
|
+
}
|
|
947
|
+
};
|
|
948
|
+
const get = async (cfg) => {
|
|
949
|
+
const schedule = schedules.get(cfg.id);
|
|
950
|
+
if (!schedule)
|
|
951
|
+
return null;
|
|
952
|
+
return asInfo(schedule);
|
|
953
|
+
};
|
|
954
|
+
const listSchedules = async () => {
|
|
955
|
+
return Array.from(schedules.values()).map(asInfo).sort((a, b) => a.id.localeCompare(b.id));
|
|
956
|
+
};
|
|
957
|
+
const start = () => {
|
|
958
|
+
if (running)
|
|
959
|
+
return;
|
|
960
|
+
running = true;
|
|
961
|
+
loopPromise = loop();
|
|
962
|
+
};
|
|
963
|
+
const stop = async () => {
|
|
964
|
+
if (!running)
|
|
965
|
+
return;
|
|
966
|
+
running = false;
|
|
967
|
+
setLeader(false, "stop");
|
|
968
|
+
await relinquishLeadership();
|
|
969
|
+
if (loopPromise) {
|
|
970
|
+
await loopPromise;
|
|
971
|
+
loopPromise = null;
|
|
972
|
+
}
|
|
973
|
+
};
|
|
974
|
+
return {
|
|
975
|
+
id: config.id,
|
|
976
|
+
start,
|
|
977
|
+
stop,
|
|
978
|
+
register,
|
|
979
|
+
unregister,
|
|
980
|
+
triggerNow,
|
|
981
|
+
get,
|
|
982
|
+
list: listSchedules,
|
|
983
|
+
metrics: () => ({ ...metricsState })
|
|
984
|
+
};
|
|
985
|
+
};
|
|
986
|
+
export {
|
|
987
|
+
scheduler
|
|
988
|
+
};
|