@microfox/ai-worker 1.0.1 → 1.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +14 -0
- package/README.md +19 -2
- package/dist/chunk-72XGFZCE.mjs +163 -0
- package/dist/chunk-72XGFZCE.mjs.map +1 -0
- package/dist/chunk-7LQNS2SG.mjs +797 -0
- package/dist/chunk-7LQNS2SG.mjs.map +1 -0
- package/dist/chunk-AOXGONGI.mjs +351 -0
- package/dist/chunk-AOXGONGI.mjs.map +1 -0
- package/dist/client-BqSJQ9mZ.d.mts +183 -0
- package/dist/client-BqSJQ9mZ.d.ts +183 -0
- package/dist/client.d.mts +2 -64
- package/dist/client.d.ts +2 -64
- package/dist/client.js +88 -4
- package/dist/client.js.map +1 -1
- package/dist/client.mjs +11 -3
- package/dist/handler.d.mts +113 -14
- package/dist/handler.d.ts +113 -14
- package/dist/handler.js +823 -6
- package/dist/handler.js.map +1 -1
- package/dist/handler.mjs +10 -3
- package/dist/index.d.mts +3 -3
- package/dist/index.d.ts +3 -3
- package/dist/index.js +1059 -11
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +167 -8
- package/dist/index.mjs.map +1 -1
- package/dist/queueJobStore.d.mts +53 -0
- package/dist/queueJobStore.d.ts +53 -0
- package/dist/queueJobStore.js +378 -0
- package/dist/queueJobStore.js.map +1 -0
- package/dist/queueJobStore.mjs +14 -0
- package/dist/queueJobStore.mjs.map +1 -0
- package/package.json +9 -2
- package/dist/chunk-FQCZSXDI.mjs +0 -83
- package/dist/chunk-FQCZSXDI.mjs.map +0 -1
- package/dist/chunk-WVR4JVWK.mjs +0 -285
- package/dist/chunk-WVR4JVWK.mjs.map +0 -1
package/dist/index.js
CHANGED
|
@@ -20,23 +20,31 @@ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: tru
|
|
|
20
20
|
// src/index.ts
|
|
21
21
|
var index_exports = {};
|
|
22
22
|
__export(index_exports, {
|
|
23
|
+
SQS_MAX_DELAY_SECONDS: () => SQS_MAX_DELAY_SECONDS,
|
|
23
24
|
clearWorkersConfigCache: () => clearWorkersConfigCache,
|
|
24
25
|
createLambdaEntrypoint: () => createLambdaEntrypoint,
|
|
25
26
|
createLambdaHandler: () => createLambdaHandler,
|
|
26
27
|
createWorker: () => createWorker,
|
|
28
|
+
createWorkerLogger: () => createWorkerLogger,
|
|
29
|
+
defineWorkerQueue: () => defineWorkerQueue,
|
|
27
30
|
dispatch: () => dispatch,
|
|
28
31
|
dispatchLocal: () => dispatchLocal,
|
|
32
|
+
dispatchQueue: () => dispatchQueue,
|
|
33
|
+
dispatchWorker: () => dispatchWorker,
|
|
34
|
+
getQueueStartUrl: () => getQueueStartUrl,
|
|
29
35
|
getWorkersConfig: () => getWorkersConfig,
|
|
30
|
-
|
|
36
|
+
getWorkersTriggerUrl: () => getWorkersTriggerUrl,
|
|
37
|
+
resolveQueueUrl: () => resolveQueueUrl,
|
|
38
|
+
wrapHandlerForQueue: () => wrapHandlerForQueue
|
|
31
39
|
});
|
|
32
40
|
module.exports = __toCommonJS(index_exports);
|
|
33
41
|
|
|
34
42
|
// src/client.ts
|
|
35
43
|
function getWorkersTriggerUrl() {
|
|
36
|
-
const raw = process.env.WORKER_BASE_URL || process.env.
|
|
44
|
+
const raw = process.env.WORKER_BASE_URL || process.env.WORKERS_TRIGGER_API_URL || process.env.WORKERS_CONFIG_API_URL;
|
|
37
45
|
if (!raw) {
|
|
38
46
|
throw new Error(
|
|
39
|
-
"WORKER_BASE_URL
|
|
47
|
+
"WORKER_BASE_URL is required for background workers. Set it server-side only."
|
|
40
48
|
);
|
|
41
49
|
}
|
|
42
50
|
const url = new URL(raw);
|
|
@@ -48,6 +56,23 @@ function getWorkersTriggerUrl() {
|
|
|
48
56
|
url.pathname = `${basePath}/workers/trigger`.replace(/\/+$/, "");
|
|
49
57
|
return url.toString();
|
|
50
58
|
}
|
|
59
|
+
function getQueueStartUrl(queueId) {
|
|
60
|
+
const raw = process.env.WORKER_BASE_URL || process.env.WORKERS_TRIGGER_API_URL || process.env.WORKERS_CONFIG_API_URL;
|
|
61
|
+
if (!raw) {
|
|
62
|
+
throw new Error(
|
|
63
|
+
"WORKER_BASE_URL is required for background workers. Set it server-side only."
|
|
64
|
+
);
|
|
65
|
+
}
|
|
66
|
+
const url = new URL(raw);
|
|
67
|
+
url.search = "";
|
|
68
|
+
url.hash = "";
|
|
69
|
+
const path = url.pathname || "";
|
|
70
|
+
url.pathname = path.replace(/\/?workers\/(trigger|config)\/?$/, "");
|
|
71
|
+
const basePath = url.pathname.replace(/\/+$/, "");
|
|
72
|
+
const safeSegment = encodeURIComponent(queueId);
|
|
73
|
+
url.pathname = `${basePath}/queues/${safeSegment}/start`.replace(/\/+$/, "");
|
|
74
|
+
return url.toString();
|
|
75
|
+
}
|
|
51
76
|
function serializeContext(ctx) {
|
|
52
77
|
const serialized = {};
|
|
53
78
|
if (ctx.requestId) {
|
|
@@ -105,9 +130,71 @@ async function dispatch(workerId, input, inputSchema, options, ctx) {
|
|
|
105
130
|
jobId
|
|
106
131
|
};
|
|
107
132
|
}
|
|
133
|
+
async function dispatchWorker(workerId, input, options = {}, ctx) {
|
|
134
|
+
const jobId = options.jobId || `job-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
|
|
135
|
+
const triggerUrl = getWorkersTriggerUrl();
|
|
136
|
+
const serializedContext = ctx ? serializeContext(ctx) : {};
|
|
137
|
+
const messageBody = {
|
|
138
|
+
workerId,
|
|
139
|
+
jobId,
|
|
140
|
+
input: input ?? {},
|
|
141
|
+
context: serializedContext,
|
|
142
|
+
webhookUrl: options.webhookUrl,
|
|
143
|
+
metadata: options.metadata || {},
|
|
144
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString()
|
|
145
|
+
};
|
|
146
|
+
const headers = { "Content-Type": "application/json" };
|
|
147
|
+
const triggerKey = process.env.WORKERS_TRIGGER_API_KEY;
|
|
148
|
+
if (triggerKey) headers["x-workers-trigger-key"] = triggerKey;
|
|
149
|
+
const response = await fetch(triggerUrl, {
|
|
150
|
+
method: "POST",
|
|
151
|
+
headers,
|
|
152
|
+
body: JSON.stringify({ workerId, body: messageBody })
|
|
153
|
+
});
|
|
154
|
+
if (!response.ok) {
|
|
155
|
+
const text = await response.text().catch(() => "");
|
|
156
|
+
throw new Error(
|
|
157
|
+
`Failed to trigger worker "${workerId}": ${response.status} ${response.statusText}${text ? ` - ${text}` : ""}`
|
|
158
|
+
);
|
|
159
|
+
}
|
|
160
|
+
const data = await response.json().catch(() => ({}));
|
|
161
|
+
const messageId = data?.messageId ? String(data.messageId) : `trigger-${jobId}`;
|
|
162
|
+
return { messageId, status: "queued", jobId };
|
|
163
|
+
}
|
|
108
164
|
async function dispatchLocal(handler, input, ctx) {
|
|
109
165
|
return handler({ input, ctx: ctx || {} });
|
|
110
166
|
}
|
|
167
|
+
async function dispatchQueue(queueId, initialInput, options = {}, _ctx) {
|
|
168
|
+
const jobId = options.jobId || `job-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
|
|
169
|
+
const queueStartUrl = getQueueStartUrl(queueId);
|
|
170
|
+
const normalizedInput = initialInput !== null && typeof initialInput === "object" ? initialInput : { value: initialInput };
|
|
171
|
+
const headers = { "Content-Type": "application/json" };
|
|
172
|
+
const triggerKey = process.env.WORKERS_TRIGGER_API_KEY;
|
|
173
|
+
if (triggerKey) headers["x-workers-trigger-key"] = triggerKey;
|
|
174
|
+
const response = await fetch(queueStartUrl, {
|
|
175
|
+
method: "POST",
|
|
176
|
+
headers,
|
|
177
|
+
body: JSON.stringify({
|
|
178
|
+
input: normalizedInput,
|
|
179
|
+
initialInput: normalizedInput,
|
|
180
|
+
metadata: options.metadata ?? {},
|
|
181
|
+
jobId,
|
|
182
|
+
...options.webhookUrl ? { webhookUrl: options.webhookUrl } : {}
|
|
183
|
+
})
|
|
184
|
+
});
|
|
185
|
+
if (!response.ok) {
|
|
186
|
+
const text = await response.text().catch(() => "");
|
|
187
|
+
throw new Error(
|
|
188
|
+
`Failed to start queue "${queueId}": ${response.status} ${response.statusText}${text ? ` - ${text}` : ""}`
|
|
189
|
+
);
|
|
190
|
+
}
|
|
191
|
+
const data = await response.json().catch(() => ({}));
|
|
192
|
+
const messageId = data?.messageId ?? data?.jobId ?? `queue-${jobId}`;
|
|
193
|
+
return { queueId, messageId, status: "queued", jobId };
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
// src/handler.ts
|
|
197
|
+
var import_client_sqs = require("@aws-sdk/client-sqs");
|
|
111
198
|
|
|
112
199
|
// src/mongoJobStore.ts
|
|
113
200
|
var import_mongodb = require("mongodb");
|
|
@@ -134,6 +221,21 @@ async function getCollection() {
|
|
|
134
221
|
const client = await getClient();
|
|
135
222
|
return client.db(dbName).collection(collectionName);
|
|
136
223
|
}
|
|
224
|
+
async function getJobById(jobId) {
|
|
225
|
+
try {
|
|
226
|
+
const coll = await getCollection();
|
|
227
|
+
const doc = await coll.findOne({ _id: jobId });
|
|
228
|
+
if (!doc) return null;
|
|
229
|
+
const { _id, ...r } = doc;
|
|
230
|
+
return r;
|
|
231
|
+
} catch (e) {
|
|
232
|
+
console.error("[Worker] MongoDB getJobById failed:", {
|
|
233
|
+
jobId,
|
|
234
|
+
error: e?.message ?? String(e)
|
|
235
|
+
});
|
|
236
|
+
return null;
|
|
237
|
+
}
|
|
238
|
+
}
|
|
137
239
|
function createMongoJobStore(workerId, jobId, input, metadata) {
|
|
138
240
|
return {
|
|
139
241
|
update: async (update) => {
|
|
@@ -205,6 +307,36 @@ function createMongoJobStore(workerId, jobId, input, metadata) {
|
|
|
205
307
|
});
|
|
206
308
|
return null;
|
|
207
309
|
}
|
|
310
|
+
},
|
|
311
|
+
appendInternalJob: async (entry) => {
|
|
312
|
+
try {
|
|
313
|
+
const coll = await getCollection();
|
|
314
|
+
await coll.updateOne(
|
|
315
|
+
{ _id: jobId },
|
|
316
|
+
{ $push: { internalJobs: entry } }
|
|
317
|
+
);
|
|
318
|
+
} catch (e) {
|
|
319
|
+
console.error("[Worker] MongoDB job store appendInternalJob failed:", {
|
|
320
|
+
jobId,
|
|
321
|
+
workerId,
|
|
322
|
+
error: e?.message ?? String(e)
|
|
323
|
+
});
|
|
324
|
+
}
|
|
325
|
+
},
|
|
326
|
+
getJob: async (otherJobId) => {
|
|
327
|
+
try {
|
|
328
|
+
const coll = await getCollection();
|
|
329
|
+
const doc = await coll.findOne({ _id: otherJobId });
|
|
330
|
+
if (!doc) return null;
|
|
331
|
+
const { _id, ...r } = doc;
|
|
332
|
+
return r;
|
|
333
|
+
} catch (e) {
|
|
334
|
+
console.error("[Worker] MongoDB job store getJob failed:", {
|
|
335
|
+
otherJobId,
|
|
336
|
+
error: e?.message ?? String(e)
|
|
337
|
+
});
|
|
338
|
+
return null;
|
|
339
|
+
}
|
|
208
340
|
}
|
|
209
341
|
};
|
|
210
342
|
}
|
|
@@ -232,7 +364,696 @@ function isMongoJobStoreConfigured() {
|
|
|
232
364
|
return Boolean(uri?.trim());
|
|
233
365
|
}
|
|
234
366
|
|
|
367
|
+
// src/redisJobStore.ts
|
|
368
|
+
var import_redis = require("@upstash/redis");
|
|
369
|
+
var redisUrl = process.env.WORKER_UPSTASH_REDIS_REST_URL || process.env.UPSTASH_REDIS_REST_URL || process.env.UPSTASH_REDIS_URL;
|
|
370
|
+
var redisToken = process.env.WORKER_UPSTASH_REDIS_REST_TOKEN || process.env.UPSTASH_REDIS_REST_TOKEN || process.env.UPSTASH_REDIS_TOKEN;
|
|
371
|
+
var jobKeyPrefix = process.env.WORKER_UPSTASH_REDIS_JOBS_PREFIX || process.env.UPSTASH_REDIS_KEY_PREFIX || process.env.REDIS_WORKER_JOB_PREFIX || "worker:jobs:";
|
|
372
|
+
var defaultTtlSeconds = 60 * 60 * 24 * 7;
|
|
373
|
+
var jobTtlSeconds = typeof process.env.WORKER_JOBS_TTL_SECONDS === "string" ? parseInt(process.env.WORKER_JOBS_TTL_SECONDS, 10) || defaultTtlSeconds : typeof process.env.REDIS_WORKER_JOB_TTL_SECONDS === "string" ? parseInt(process.env.REDIS_WORKER_JOB_TTL_SECONDS, 10) || defaultTtlSeconds : typeof process.env.WORKFLOW_JOBS_TTL_SECONDS === "string" ? parseInt(process.env.WORKFLOW_JOBS_TTL_SECONDS, 10) || defaultTtlSeconds : defaultTtlSeconds;
|
|
374
|
+
var redisClient = null;
|
|
375
|
+
function getRedis() {
|
|
376
|
+
if (!redisUrl || !redisToken) {
|
|
377
|
+
throw new Error(
|
|
378
|
+
"Upstash Redis configuration missing. Set WORKER_UPSTASH_REDIS_REST_URL and WORKER_UPSTASH_REDIS_REST_TOKEN (or UPSTASH_REDIS_REST_URL/UPSTASH_REDIS_REST_TOKEN)."
|
|
379
|
+
);
|
|
380
|
+
}
|
|
381
|
+
if (!redisClient) {
|
|
382
|
+
redisClient = new import_redis.Redis({
|
|
383
|
+
url: redisUrl,
|
|
384
|
+
token: redisToken
|
|
385
|
+
});
|
|
386
|
+
}
|
|
387
|
+
return redisClient;
|
|
388
|
+
}
|
|
389
|
+
function jobKey(jobId) {
|
|
390
|
+
return `${jobKeyPrefix}${jobId}`;
|
|
391
|
+
}
|
|
392
|
+
function internalListKey(jobId) {
|
|
393
|
+
return `${jobKeyPrefix}${jobId}:internal`;
|
|
394
|
+
}
|
|
395
|
+
function isRedisJobStoreConfigured() {
|
|
396
|
+
return Boolean((redisUrl || "").trim() && (redisToken || "").trim());
|
|
397
|
+
}
|
|
398
|
+
async function loadJob(jobId) {
|
|
399
|
+
const redis = getRedis();
|
|
400
|
+
const key = jobKey(jobId);
|
|
401
|
+
const data = await redis.hgetall(key);
|
|
402
|
+
if (!data || Object.keys(data).length === 0) return null;
|
|
403
|
+
const parseJson = (val) => {
|
|
404
|
+
if (!val) return void 0;
|
|
405
|
+
try {
|
|
406
|
+
return JSON.parse(val);
|
|
407
|
+
} catch {
|
|
408
|
+
return void 0;
|
|
409
|
+
}
|
|
410
|
+
};
|
|
411
|
+
const listKey = internalListKey(jobId);
|
|
412
|
+
const listItems = await redis.lrange(listKey, 0, -1);
|
|
413
|
+
let internalJobs;
|
|
414
|
+
if (listItems && listItems.length > 0) {
|
|
415
|
+
internalJobs = listItems.map((s) => {
|
|
416
|
+
try {
|
|
417
|
+
return JSON.parse(s);
|
|
418
|
+
} catch {
|
|
419
|
+
return null;
|
|
420
|
+
}
|
|
421
|
+
}).filter(Boolean);
|
|
422
|
+
} else {
|
|
423
|
+
internalJobs = parseJson(data.internalJobs);
|
|
424
|
+
}
|
|
425
|
+
const record = {
|
|
426
|
+
jobId: data.jobId,
|
|
427
|
+
workerId: data.workerId,
|
|
428
|
+
status: data.status || "queued",
|
|
429
|
+
input: parseJson(data.input) ?? {},
|
|
430
|
+
output: parseJson(data.output),
|
|
431
|
+
error: parseJson(data.error),
|
|
432
|
+
metadata: parseJson(data.metadata) ?? {},
|
|
433
|
+
internalJobs,
|
|
434
|
+
createdAt: data.createdAt,
|
|
435
|
+
updatedAt: data.updatedAt,
|
|
436
|
+
completedAt: data.completedAt
|
|
437
|
+
};
|
|
438
|
+
return record;
|
|
439
|
+
}
|
|
440
|
+
function createRedisJobStore(workerId, jobId, input, metadata) {
|
|
441
|
+
return {
|
|
442
|
+
update: async (update) => {
|
|
443
|
+
const redis = getRedis();
|
|
444
|
+
const key = jobKey(jobId);
|
|
445
|
+
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
446
|
+
const existing = await loadJob(jobId);
|
|
447
|
+
const next = {};
|
|
448
|
+
const mergedMeta = { ...existing?.metadata ?? {} };
|
|
449
|
+
if (update.metadata) {
|
|
450
|
+
Object.assign(mergedMeta, update.metadata);
|
|
451
|
+
}
|
|
452
|
+
if (update.progress !== void 0 || update.progressMessage !== void 0) {
|
|
453
|
+
mergedMeta.progress = update.progress;
|
|
454
|
+
mergedMeta.progressMessage = update.progressMessage;
|
|
455
|
+
}
|
|
456
|
+
next.metadata = mergedMeta;
|
|
457
|
+
if (update.status !== void 0) {
|
|
458
|
+
next.status = update.error ? "failed" : update.status;
|
|
459
|
+
if ((update.status === "completed" || update.status === "failed") && !existing?.completedAt) {
|
|
460
|
+
next.completedAt = now;
|
|
461
|
+
}
|
|
462
|
+
}
|
|
463
|
+
if (update.output !== void 0) next.output = update.output;
|
|
464
|
+
if (update.error !== void 0) next.error = update.error;
|
|
465
|
+
const toSet = {};
|
|
466
|
+
if (next.status) toSet["status"] = String(next.status);
|
|
467
|
+
if (next.output !== void 0) toSet["output"] = JSON.stringify(next.output);
|
|
468
|
+
if (next.error !== void 0) toSet["error"] = JSON.stringify(next.error);
|
|
469
|
+
if (next.metadata !== void 0) toSet["metadata"] = JSON.stringify(next.metadata);
|
|
470
|
+
if (next.completedAt) {
|
|
471
|
+
toSet["completedAt"] = next.completedAt;
|
|
472
|
+
}
|
|
473
|
+
toSet["updatedAt"] = now;
|
|
474
|
+
await redis.hset(key, toSet);
|
|
475
|
+
if (jobTtlSeconds > 0) {
|
|
476
|
+
await redis.expire(key, jobTtlSeconds);
|
|
477
|
+
}
|
|
478
|
+
},
|
|
479
|
+
get: async () => {
|
|
480
|
+
return loadJob(jobId);
|
|
481
|
+
},
|
|
482
|
+
appendInternalJob: async (entry) => {
|
|
483
|
+
const redis = getRedis();
|
|
484
|
+
const listKey = internalListKey(jobId);
|
|
485
|
+
await redis.rpush(listKey, JSON.stringify(entry));
|
|
486
|
+
const mainKey = jobKey(jobId);
|
|
487
|
+
await redis.hset(mainKey, { updatedAt: (/* @__PURE__ */ new Date()).toISOString() });
|
|
488
|
+
if (jobTtlSeconds > 0) {
|
|
489
|
+
await redis.expire(listKey, jobTtlSeconds);
|
|
490
|
+
await redis.expire(mainKey, jobTtlSeconds);
|
|
491
|
+
}
|
|
492
|
+
},
|
|
493
|
+
getJob: async (otherJobId) => {
|
|
494
|
+
return loadJob(otherJobId);
|
|
495
|
+
}
|
|
496
|
+
};
|
|
497
|
+
}
|
|
498
|
+
async function upsertRedisJob(jobId, workerId, input, metadata) {
|
|
499
|
+
const redis = getRedis();
|
|
500
|
+
const key = jobKey(jobId);
|
|
501
|
+
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
502
|
+
const doc = {
|
|
503
|
+
jobId,
|
|
504
|
+
workerId,
|
|
505
|
+
status: "queued",
|
|
506
|
+
input,
|
|
507
|
+
metadata,
|
|
508
|
+
createdAt: now,
|
|
509
|
+
updatedAt: now
|
|
510
|
+
};
|
|
511
|
+
const toSet = {
|
|
512
|
+
jobId,
|
|
513
|
+
workerId,
|
|
514
|
+
status: doc.status,
|
|
515
|
+
input: JSON.stringify(doc.input ?? {}),
|
|
516
|
+
metadata: JSON.stringify(doc.metadata ?? {}),
|
|
517
|
+
createdAt: now,
|
|
518
|
+
updatedAt: now
|
|
519
|
+
};
|
|
520
|
+
await redis.hset(key, toSet);
|
|
521
|
+
if (jobTtlSeconds > 0) {
|
|
522
|
+
await redis.expire(key, jobTtlSeconds);
|
|
523
|
+
}
|
|
524
|
+
}
|
|
525
|
+
|
|
526
|
+
// src/queueJobStore.ts
|
|
527
|
+
var import_redis2 = require("@upstash/redis");
|
|
528
|
+
var import_mongodb2 = require("mongodb");
|
|
529
|
+
var mongoUri = process.env.DATABASE_MONGODB_URI || process.env.MONGODB_URI;
|
|
530
|
+
var mongoDbName = process.env.DATABASE_MONGODB_DB || process.env.MONGODB_DB || "mediamake";
|
|
531
|
+
var mongoQueueCollectionName = process.env.MONGODB_QUEUE_JOBS_COLLECTION || "queue_jobs";
|
|
532
|
+
var mongoClientPromise = null;
|
|
533
|
+
async function getMongoClient() {
|
|
534
|
+
if (!mongoUri) {
|
|
535
|
+
throw new Error(
|
|
536
|
+
"MongoDB URI required for queue job store. Set DATABASE_MONGODB_URI or MONGODB_URI."
|
|
537
|
+
);
|
|
538
|
+
}
|
|
539
|
+
if (!mongoClientPromise) {
|
|
540
|
+
mongoClientPromise = new import_mongodb2.MongoClient(mongoUri, {
|
|
541
|
+
maxPoolSize: 10,
|
|
542
|
+
minPoolSize: 0,
|
|
543
|
+
serverSelectionTimeoutMS: 1e4
|
|
544
|
+
}).connect();
|
|
545
|
+
}
|
|
546
|
+
return mongoClientPromise;
|
|
547
|
+
}
|
|
548
|
+
async function getMongoQueueCollection() {
|
|
549
|
+
const client = await getMongoClient();
|
|
550
|
+
return client.db(mongoDbName).collection(mongoQueueCollectionName);
|
|
551
|
+
}
|
|
552
|
+
var redisUrl2 = process.env.WORKER_UPSTASH_REDIS_REST_URL || process.env.UPSTASH_REDIS_REST_URL || process.env.UPSTASH_REDIS_URL;
|
|
553
|
+
var redisToken2 = process.env.WORKER_UPSTASH_REDIS_REST_TOKEN || process.env.UPSTASH_REDIS_REST_TOKEN || process.env.UPSTASH_REDIS_TOKEN;
|
|
554
|
+
var queueKeyPrefix = process.env.WORKER_UPSTASH_REDIS_QUEUE_PREFIX || process.env.UPSTASH_REDIS_QUEUE_PREFIX || "worker:queue-jobs:";
|
|
555
|
+
var defaultTtlSeconds2 = 60 * 60 * 24 * 7;
|
|
556
|
+
var queueJobTtlSeconds = typeof process.env.WORKER_QUEUE_JOBS_TTL_SECONDS === "string" ? parseInt(process.env.WORKER_QUEUE_JOBS_TTL_SECONDS, 10) || defaultTtlSeconds2 : typeof process.env.WORKER_JOBS_TTL_SECONDS === "string" ? parseInt(process.env.WORKER_JOBS_TTL_SECONDS, 10) || defaultTtlSeconds2 : defaultTtlSeconds2;
|
|
557
|
+
var redisClient2 = null;
|
|
558
|
+
function getRedis2() {
|
|
559
|
+
if (!redisUrl2 || !redisToken2) {
|
|
560
|
+
throw new Error(
|
|
561
|
+
"Upstash Redis configuration missing for queue job store. Set WORKER_UPSTASH_REDIS_REST_URL and WORKER_UPSTASH_REDIS_REST_TOKEN (or UPSTASH_REDIS_REST_URL/UPSTASH_REDIS_REST_TOKEN)."
|
|
562
|
+
);
|
|
563
|
+
}
|
|
564
|
+
if (!redisClient2) {
|
|
565
|
+
redisClient2 = new import_redis2.Redis({
|
|
566
|
+
url: redisUrl2,
|
|
567
|
+
token: redisToken2
|
|
568
|
+
});
|
|
569
|
+
}
|
|
570
|
+
return redisClient2;
|
|
571
|
+
}
|
|
572
|
+
function queueKey(id) {
|
|
573
|
+
return `${queueKeyPrefix}${id}`;
|
|
574
|
+
}
|
|
575
|
+
function stepsFromHash(val) {
|
|
576
|
+
if (Array.isArray(val)) return val;
|
|
577
|
+
if (typeof val === "string") {
|
|
578
|
+
try {
|
|
579
|
+
const parsed = JSON.parse(val);
|
|
580
|
+
return Array.isArray(parsed) ? parsed : [];
|
|
581
|
+
} catch {
|
|
582
|
+
return [];
|
|
583
|
+
}
|
|
584
|
+
}
|
|
585
|
+
return [];
|
|
586
|
+
}
|
|
587
|
+
function metadataFromHash(val) {
|
|
588
|
+
if (val && typeof val === "object" && !Array.isArray(val)) return val;
|
|
589
|
+
if (typeof val === "string") {
|
|
590
|
+
try {
|
|
591
|
+
const parsed = JSON.parse(val);
|
|
592
|
+
return parsed && typeof parsed === "object" ? parsed : {};
|
|
593
|
+
} catch {
|
|
594
|
+
return {};
|
|
595
|
+
}
|
|
596
|
+
}
|
|
597
|
+
return {};
|
|
598
|
+
}
|
|
599
|
+
async function loadQueueJobRedis(queueJobId) {
|
|
600
|
+
const redis = getRedis2();
|
|
601
|
+
const key = queueKey(queueJobId);
|
|
602
|
+
const data = await redis.hgetall(key);
|
|
603
|
+
if (!data || typeof data !== "object" || Object.keys(data).length === 0) return null;
|
|
604
|
+
const d = data;
|
|
605
|
+
const record = {
|
|
606
|
+
id: d.id === void 0 ? queueJobId : String(d.id),
|
|
607
|
+
queueId: String(d.queueId ?? ""),
|
|
608
|
+
status: String(d.status ?? "running"),
|
|
609
|
+
steps: stepsFromHash(d.steps),
|
|
610
|
+
metadata: metadataFromHash(d.metadata),
|
|
611
|
+
createdAt: String(d.createdAt ?? (/* @__PURE__ */ new Date()).toISOString()),
|
|
612
|
+
updatedAt: String(d.updatedAt ?? (/* @__PURE__ */ new Date()).toISOString()),
|
|
613
|
+
completedAt: d.completedAt != null ? String(d.completedAt) : void 0
|
|
614
|
+
};
|
|
615
|
+
return record;
|
|
616
|
+
}
|
|
617
|
+
async function saveQueueJobRedis(record) {
|
|
618
|
+
const redis = getRedis2();
|
|
619
|
+
const key = queueKey(record.id);
|
|
620
|
+
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
621
|
+
const toSet = {
|
|
622
|
+
id: record.id,
|
|
623
|
+
queueId: record.queueId,
|
|
624
|
+
status: record.status,
|
|
625
|
+
steps: JSON.stringify(record.steps || []),
|
|
626
|
+
metadata: JSON.stringify(record.metadata || {}),
|
|
627
|
+
createdAt: record.createdAt || now,
|
|
628
|
+
updatedAt: record.updatedAt || now
|
|
629
|
+
};
|
|
630
|
+
if (record.completedAt) {
|
|
631
|
+
toSet.completedAt = record.completedAt;
|
|
632
|
+
}
|
|
633
|
+
await redis.hset(key, toSet);
|
|
634
|
+
if (queueJobTtlSeconds > 0) {
|
|
635
|
+
await redis.expire(key, queueJobTtlSeconds);
|
|
636
|
+
}
|
|
637
|
+
}
|
|
638
|
+
function getStoreType() {
|
|
639
|
+
const t = (process.env.WORKER_DATABASE_TYPE || "upstash-redis").toLowerCase();
|
|
640
|
+
return t === "mongodb" ? "mongodb" : "upstash-redis";
|
|
641
|
+
}
|
|
642
|
+
function preferMongo() {
|
|
643
|
+
return getStoreType() === "mongodb" && Boolean(mongoUri?.trim());
|
|
644
|
+
}
|
|
645
|
+
function preferRedis() {
|
|
646
|
+
return getStoreType() !== "mongodb" && Boolean((redisUrl2 || "").trim() && (redisToken2 || "").trim());
|
|
647
|
+
}
|
|
648
|
+
async function upsertInitialQueueJob(options) {
|
|
649
|
+
const { queueJobId, queueId, firstWorkerId, firstWorkerJobId, metadata } = options;
|
|
650
|
+
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
651
|
+
if (preferMongo()) {
|
|
652
|
+
const coll = await getMongoQueueCollection();
|
|
653
|
+
const existing = await coll.findOne({ _id: queueJobId });
|
|
654
|
+
if (existing) {
|
|
655
|
+
const steps = existing.steps ?? [];
|
|
656
|
+
if (steps.length === 0) {
|
|
657
|
+
steps.push({
|
|
658
|
+
workerId: firstWorkerId,
|
|
659
|
+
workerJobId: firstWorkerJobId,
|
|
660
|
+
status: "queued"
|
|
661
|
+
});
|
|
662
|
+
}
|
|
663
|
+
await coll.updateOne(
|
|
664
|
+
{ _id: queueJobId },
|
|
665
|
+
{
|
|
666
|
+
$set: {
|
|
667
|
+
steps,
|
|
668
|
+
updatedAt: now
|
|
669
|
+
}
|
|
670
|
+
}
|
|
671
|
+
);
|
|
672
|
+
} else {
|
|
673
|
+
const doc = {
|
|
674
|
+
_id: queueJobId,
|
|
675
|
+
id: queueJobId,
|
|
676
|
+
queueId,
|
|
677
|
+
status: "running",
|
|
678
|
+
steps: [
|
|
679
|
+
{
|
|
680
|
+
workerId: firstWorkerId,
|
|
681
|
+
workerJobId: firstWorkerJobId,
|
|
682
|
+
status: "queued"
|
|
683
|
+
}
|
|
684
|
+
],
|
|
685
|
+
metadata: metadata ?? {},
|
|
686
|
+
createdAt: now,
|
|
687
|
+
updatedAt: now
|
|
688
|
+
};
|
|
689
|
+
await coll.updateOne(
|
|
690
|
+
{ _id: queueJobId },
|
|
691
|
+
{ $set: doc },
|
|
692
|
+
{ upsert: true }
|
|
693
|
+
);
|
|
694
|
+
}
|
|
695
|
+
return;
|
|
696
|
+
}
|
|
697
|
+
if (preferRedis()) {
|
|
698
|
+
const existing = await loadQueueJobRedis(queueJobId);
|
|
699
|
+
if (existing) {
|
|
700
|
+
if (!existing.steps || existing.steps.length === 0) {
|
|
701
|
+
existing.steps = [
|
|
702
|
+
{
|
|
703
|
+
workerId: firstWorkerId,
|
|
704
|
+
workerJobId: firstWorkerJobId,
|
|
705
|
+
status: "queued"
|
|
706
|
+
}
|
|
707
|
+
];
|
|
708
|
+
}
|
|
709
|
+
existing.updatedAt = now;
|
|
710
|
+
await saveQueueJobRedis(existing);
|
|
711
|
+
} else {
|
|
712
|
+
const record = {
|
|
713
|
+
id: queueJobId,
|
|
714
|
+
queueId,
|
|
715
|
+
status: "running",
|
|
716
|
+
steps: [
|
|
717
|
+
{
|
|
718
|
+
workerId: firstWorkerId,
|
|
719
|
+
workerJobId: firstWorkerJobId,
|
|
720
|
+
status: "queued"
|
|
721
|
+
}
|
|
722
|
+
],
|
|
723
|
+
metadata: metadata ?? {},
|
|
724
|
+
createdAt: now,
|
|
725
|
+
updatedAt: now
|
|
726
|
+
};
|
|
727
|
+
await saveQueueJobRedis(record);
|
|
728
|
+
}
|
|
729
|
+
}
|
|
730
|
+
}
|
|
731
|
+
async function updateQueueJobStepInStore(options) {
|
|
732
|
+
const { queueJobId, stepIndex, status, input, output, error } = options;
|
|
733
|
+
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
734
|
+
if (preferMongo()) {
|
|
735
|
+
const coll = await getMongoQueueCollection();
|
|
736
|
+
const existing = await coll.findOne({ _id: queueJobId });
|
|
737
|
+
if (!existing) return;
|
|
738
|
+
const step = existing.steps[stepIndex];
|
|
739
|
+
if (!step) return;
|
|
740
|
+
const mergedStep = {
|
|
741
|
+
...step,
|
|
742
|
+
status,
|
|
743
|
+
...input !== void 0 && { input },
|
|
744
|
+
...output !== void 0 && { output },
|
|
745
|
+
...error !== void 0 && { error },
|
|
746
|
+
startedAt: step.startedAt ?? (status === "running" ? now : step.startedAt),
|
|
747
|
+
completedAt: step.completedAt ?? (status === "completed" || status === "failed" ? now : step.completedAt)
|
|
748
|
+
};
|
|
749
|
+
const setDoc = {
|
|
750
|
+
steps: existing.steps,
|
|
751
|
+
updatedAt: now
|
|
752
|
+
};
|
|
753
|
+
setDoc.steps[stepIndex] = mergedStep;
|
|
754
|
+
if (status === "failed") {
|
|
755
|
+
setDoc.status = "failed";
|
|
756
|
+
if (!existing.completedAt) setDoc.completedAt = now;
|
|
757
|
+
} else if (status === "completed" && stepIndex === existing.steps.length - 1) {
|
|
758
|
+
setDoc.status = "completed";
|
|
759
|
+
if (!existing.completedAt) setDoc.completedAt = now;
|
|
760
|
+
}
|
|
761
|
+
await coll.updateOne(
|
|
762
|
+
{ _id: queueJobId },
|
|
763
|
+
{
|
|
764
|
+
$set: setDoc
|
|
765
|
+
}
|
|
766
|
+
);
|
|
767
|
+
return;
|
|
768
|
+
}
|
|
769
|
+
if (preferRedis()) {
|
|
770
|
+
const existing = await loadQueueJobRedis(queueJobId);
|
|
771
|
+
if (!existing) {
|
|
772
|
+
return;
|
|
773
|
+
}
|
|
774
|
+
const steps = existing.steps || [];
|
|
775
|
+
const step = steps[stepIndex];
|
|
776
|
+
if (!step) {
|
|
777
|
+
return;
|
|
778
|
+
}
|
|
779
|
+
step.status = status;
|
|
780
|
+
if (input !== void 0) step.input = input;
|
|
781
|
+
if (output !== void 0) step.output = output;
|
|
782
|
+
if (error !== void 0) step.error = error;
|
|
783
|
+
if (status === "running") {
|
|
784
|
+
step.startedAt = step.startedAt ?? now;
|
|
785
|
+
}
|
|
786
|
+
if (status === "completed" || status === "failed") {
|
|
787
|
+
step.completedAt = step.completedAt ?? now;
|
|
788
|
+
}
|
|
789
|
+
existing.steps = steps;
|
|
790
|
+
existing.updatedAt = now;
|
|
791
|
+
if (status === "failed") {
|
|
792
|
+
existing.status = "failed";
|
|
793
|
+
existing.completedAt = existing.completedAt ?? now;
|
|
794
|
+
} else if (status === "completed" && stepIndex === steps.length - 1) {
|
|
795
|
+
existing.status = "completed";
|
|
796
|
+
existing.completedAt = existing.completedAt ?? now;
|
|
797
|
+
}
|
|
798
|
+
await saveQueueJobRedis(existing);
|
|
799
|
+
}
|
|
800
|
+
}
|
|
801
|
+
async function appendQueueJobStepInStore(options) {
|
|
802
|
+
const { queueJobId, workerId, workerJobId } = options;
|
|
803
|
+
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
804
|
+
if (preferMongo()) {
|
|
805
|
+
const coll = await getMongoQueueCollection();
|
|
806
|
+
await coll.updateOne(
|
|
807
|
+
{ _id: queueJobId },
|
|
808
|
+
{
|
|
809
|
+
$push: {
|
|
810
|
+
steps: {
|
|
811
|
+
workerId,
|
|
812
|
+
workerJobId,
|
|
813
|
+
status: "queued"
|
|
814
|
+
}
|
|
815
|
+
},
|
|
816
|
+
$set: { updatedAt: now }
|
|
817
|
+
}
|
|
818
|
+
);
|
|
819
|
+
return;
|
|
820
|
+
}
|
|
821
|
+
if (preferRedis()) {
|
|
822
|
+
const existing = await loadQueueJobRedis(queueJobId);
|
|
823
|
+
if (!existing) return;
|
|
824
|
+
const steps = existing.steps || [];
|
|
825
|
+
steps.push({
|
|
826
|
+
workerId,
|
|
827
|
+
workerJobId,
|
|
828
|
+
status: "queued"
|
|
829
|
+
});
|
|
830
|
+
existing.steps = steps;
|
|
831
|
+
existing.updatedAt = now;
|
|
832
|
+
await saveQueueJobRedis(existing);
|
|
833
|
+
}
|
|
834
|
+
}
|
|
835
|
+
|
|
235
836
|
// src/handler.ts
|
|
837
|
+
var SQS_MAX_DELAY_SECONDS = 900;
|
|
838
|
+
function createWorkerLogger(jobId, workerId) {
|
|
839
|
+
const prefix = (level) => `[${level}] [${workerId}] [${jobId}]`;
|
|
840
|
+
return {
|
|
841
|
+
info(msg, data) {
|
|
842
|
+
console.log(prefix("INFO"), msg, data !== void 0 ? JSON.stringify(data) : "");
|
|
843
|
+
},
|
|
844
|
+
warn(msg, data) {
|
|
845
|
+
console.warn(prefix("WARN"), msg, data !== void 0 ? JSON.stringify(data) : "");
|
|
846
|
+
},
|
|
847
|
+
error(msg, data) {
|
|
848
|
+
console.error(prefix("ERROR"), msg, data !== void 0 ? JSON.stringify(data) : "");
|
|
849
|
+
},
|
|
850
|
+
debug(msg, data) {
|
|
851
|
+
if (process.env.DEBUG || process.env.WORKER_DEBUG) {
|
|
852
|
+
console.debug(prefix("DEBUG"), msg, data !== void 0 ? JSON.stringify(data) : "");
|
|
853
|
+
}
|
|
854
|
+
}
|
|
855
|
+
};
|
|
856
|
+
}
|
|
857
|
+
var WORKER_QUEUE_KEY = "__workerQueue";
|
|
858
|
+
async function notifyQueueJobStep(queueJobId, action, params) {
|
|
859
|
+
try {
|
|
860
|
+
if (action === "append") {
|
|
861
|
+
if (!params.workerId || !params.workerJobId) return;
|
|
862
|
+
await appendQueueJobStepInStore({
|
|
863
|
+
queueJobId,
|
|
864
|
+
workerId: params.workerId,
|
|
865
|
+
workerJobId: params.workerJobId
|
|
866
|
+
});
|
|
867
|
+
if (process.env.DEBUG_WORKER_QUEUES === "1") {
|
|
868
|
+
console.log("[Worker] Queue job step appended", {
|
|
869
|
+
queueJobId,
|
|
870
|
+
workerId: params.workerId,
|
|
871
|
+
workerJobId: params.workerJobId
|
|
872
|
+
});
|
|
873
|
+
}
|
|
874
|
+
return;
|
|
875
|
+
}
|
|
876
|
+
if (params.stepIndex === void 0) return;
|
|
877
|
+
const status = action === "start" ? "running" : action === "complete" ? "completed" : action === "fail" ? "failed" : void 0;
|
|
878
|
+
if (!status) return;
|
|
879
|
+
await updateQueueJobStepInStore({
|
|
880
|
+
queueJobId,
|
|
881
|
+
stepIndex: params.stepIndex,
|
|
882
|
+
workerId: params.workerId || "",
|
|
883
|
+
workerJobId: params.workerJobId,
|
|
884
|
+
status,
|
|
885
|
+
input: params.input,
|
|
886
|
+
output: params.output,
|
|
887
|
+
error: params.error
|
|
888
|
+
});
|
|
889
|
+
console.log("[Worker] Queue job step updated", {
|
|
890
|
+
queueId: params.queueId ?? queueJobId,
|
|
891
|
+
queueJobId,
|
|
892
|
+
stepIndex: params.stepIndex,
|
|
893
|
+
workerId: params.workerId,
|
|
894
|
+
status
|
|
895
|
+
});
|
|
896
|
+
} catch (err) {
|
|
897
|
+
console.warn("[Worker] Queue job update error:", {
|
|
898
|
+
queueJobId,
|
|
899
|
+
action,
|
|
900
|
+
error: err?.message ?? String(err)
|
|
901
|
+
});
|
|
902
|
+
}
|
|
903
|
+
}
|
|
904
|
+
function wrapHandlerForQueue(handler, queueRuntime) {
|
|
905
|
+
return async (params) => {
|
|
906
|
+
const queueContext = params.input?.[WORKER_QUEUE_KEY];
|
|
907
|
+
const output = await handler(params);
|
|
908
|
+
if (!queueContext || typeof queueContext !== "object" || !queueContext.id) {
|
|
909
|
+
return output;
|
|
910
|
+
}
|
|
911
|
+
const { id: queueId, stepIndex, initialInput, queueJobId } = queueContext;
|
|
912
|
+
const jobId = params.ctx?.jobId;
|
|
913
|
+
const workerId = params.ctx?.workerId ?? "";
|
|
914
|
+
const next = queueRuntime.getNextStep(queueId, stepIndex);
|
|
915
|
+
const childJobId = next ? `job-${Date.now()}-${Math.random().toString(36).slice(2, 11)}` : void 0;
|
|
916
|
+
if (next && queueJobId) {
|
|
917
|
+
await notifyQueueJobStep(queueJobId, "append", {
|
|
918
|
+
workerJobId: childJobId,
|
|
919
|
+
workerId: next.workerId
|
|
920
|
+
});
|
|
921
|
+
}
|
|
922
|
+
if (queueJobId && typeof stepIndex === "number") {
|
|
923
|
+
await notifyQueueJobStep(queueJobId, "complete", {
|
|
924
|
+
queueId,
|
|
925
|
+
stepIndex,
|
|
926
|
+
workerJobId: jobId,
|
|
927
|
+
workerId,
|
|
928
|
+
output
|
|
929
|
+
});
|
|
930
|
+
}
|
|
931
|
+
if (!next) {
|
|
932
|
+
return output;
|
|
933
|
+
}
|
|
934
|
+
let nextInput = output;
|
|
935
|
+
if (next.mapInputFromPrev && typeof queueRuntime.invokeMapInput === "function") {
|
|
936
|
+
let previousOutputs = [];
|
|
937
|
+
if (queueJobId && typeof queueRuntime.getQueueJob === "function") {
|
|
938
|
+
try {
|
|
939
|
+
const job = await queueRuntime.getQueueJob(queueJobId);
|
|
940
|
+
if (job?.steps) {
|
|
941
|
+
const fromStore = job.steps.slice(0, stepIndex).map((s, i) => ({ stepIndex: i, workerId: s.workerId, output: s.output }));
|
|
942
|
+
previousOutputs = fromStore.concat([
|
|
943
|
+
{ stepIndex, workerId: params.ctx?.workerId ?? "", output }
|
|
944
|
+
]);
|
|
945
|
+
}
|
|
946
|
+
} catch (e) {
|
|
947
|
+
if (process.env.AI_WORKER_QUEUES_DEBUG === "1") {
|
|
948
|
+
console.warn("[Worker] getQueueJob failed, mapping without previousOutputs:", e?.message ?? e);
|
|
949
|
+
}
|
|
950
|
+
}
|
|
951
|
+
}
|
|
952
|
+
nextInput = await queueRuntime.invokeMapInput(
|
|
953
|
+
queueId,
|
|
954
|
+
stepIndex + 1,
|
|
955
|
+
initialInput,
|
|
956
|
+
previousOutputs
|
|
957
|
+
);
|
|
958
|
+
}
|
|
959
|
+
const nextInputWithQueue = {
|
|
960
|
+
...nextInput !== null && typeof nextInput === "object" ? nextInput : { value: nextInput },
|
|
961
|
+
[WORKER_QUEUE_KEY]: {
|
|
962
|
+
id: queueId,
|
|
963
|
+
stepIndex: stepIndex + 1,
|
|
964
|
+
initialInput,
|
|
965
|
+
queueJobId
|
|
966
|
+
}
|
|
967
|
+
};
|
|
968
|
+
const debug = process.env.AI_WORKER_QUEUES_DEBUG === "1";
|
|
969
|
+
if (debug) {
|
|
970
|
+
console.log("[Worker] Queue chain dispatching next:", {
|
|
971
|
+
queueId,
|
|
972
|
+
fromStep: stepIndex,
|
|
973
|
+
nextWorkerId: next.workerId,
|
|
974
|
+
delaySeconds: next.delaySeconds
|
|
975
|
+
});
|
|
976
|
+
}
|
|
977
|
+
await params.ctx.dispatchWorker(next.workerId, nextInputWithQueue, {
|
|
978
|
+
await: false,
|
|
979
|
+
delaySeconds: next.delaySeconds,
|
|
980
|
+
jobId: childJobId
|
|
981
|
+
});
|
|
982
|
+
return output;
|
|
983
|
+
};
|
|
984
|
+
}
|
|
985
|
+
var DEFAULT_POLL_INTERVAL_MS = 2e3;
|
|
986
|
+
var DEFAULT_POLL_TIMEOUT_MS = 15 * 60 * 1e3;
|
|
987
|
+
function sanitizeWorkerIdForEnv(workerId) {
|
|
988
|
+
return workerId.replace(/-/g, "_").toUpperCase();
|
|
989
|
+
}
|
|
990
|
+
function getQueueUrlForWorker(calleeWorkerId) {
|
|
991
|
+
const key = `WORKER_QUEUE_URL_${sanitizeWorkerIdForEnv(calleeWorkerId)}`;
|
|
992
|
+
return process.env[key]?.trim() || void 0;
|
|
993
|
+
}
|
|
994
|
+
function createDispatchWorker(parentJobId, parentWorkerId, parentContext, jobStore) {
|
|
995
|
+
return async (calleeWorkerId, input, options) => {
|
|
996
|
+
const childJobId = options?.jobId || `job-${Date.now()}-${Math.random().toString(36).slice(2, 11)}`;
|
|
997
|
+
const metadata = options?.metadata ?? {};
|
|
998
|
+
const serializedContext = {};
|
|
999
|
+
if (parentContext.requestId) serializedContext.requestId = parentContext.requestId;
|
|
1000
|
+
const messageBody = {
|
|
1001
|
+
workerId: calleeWorkerId,
|
|
1002
|
+
jobId: childJobId,
|
|
1003
|
+
input: input ?? {},
|
|
1004
|
+
context: serializedContext,
|
|
1005
|
+
webhookUrl: options?.webhookUrl,
|
|
1006
|
+
metadata,
|
|
1007
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString()
|
|
1008
|
+
};
|
|
1009
|
+
const queueUrl = getQueueUrlForWorker(calleeWorkerId);
|
|
1010
|
+
if (queueUrl) {
|
|
1011
|
+
const region = process.env.AWS_REGION || process.env.AWS_DEFAULT_REGION || "us-east-1";
|
|
1012
|
+
const sqs = new import_client_sqs.SQSClient({ region });
|
|
1013
|
+
const delaySeconds = options?.await !== true && options?.delaySeconds != null ? Math.min(SQS_MAX_DELAY_SECONDS, Math.max(0, Math.floor(options.delaySeconds))) : void 0;
|
|
1014
|
+
const sendResult = await sqs.send(
|
|
1015
|
+
new import_client_sqs.SendMessageCommand({
|
|
1016
|
+
QueueUrl: queueUrl,
|
|
1017
|
+
MessageBody: JSON.stringify(messageBody),
|
|
1018
|
+
...delaySeconds !== void 0 && delaySeconds > 0 ? { DelaySeconds: delaySeconds } : {}
|
|
1019
|
+
})
|
|
1020
|
+
);
|
|
1021
|
+
const messageId = sendResult.MessageId ?? void 0;
|
|
1022
|
+
if (jobStore?.appendInternalJob) {
|
|
1023
|
+
await jobStore.appendInternalJob({ jobId: childJobId, workerId: calleeWorkerId });
|
|
1024
|
+
}
|
|
1025
|
+
if (options?.await && jobStore?.getJob) {
|
|
1026
|
+
const pollIntervalMs = options.pollIntervalMs ?? DEFAULT_POLL_INTERVAL_MS;
|
|
1027
|
+
const pollTimeoutMs = options.pollTimeoutMs ?? DEFAULT_POLL_TIMEOUT_MS;
|
|
1028
|
+
const deadline = Date.now() + pollTimeoutMs;
|
|
1029
|
+
while (Date.now() < deadline) {
|
|
1030
|
+
const child = await jobStore.getJob(childJobId);
|
|
1031
|
+
if (!child) {
|
|
1032
|
+
await new Promise((r) => setTimeout(r, pollIntervalMs));
|
|
1033
|
+
continue;
|
|
1034
|
+
}
|
|
1035
|
+
if (child.status === "completed") {
|
|
1036
|
+
return { jobId: childJobId, messageId, output: child.output };
|
|
1037
|
+
}
|
|
1038
|
+
if (child.status === "failed") {
|
|
1039
|
+
const err = child.error;
|
|
1040
|
+
throw new Error(
|
|
1041
|
+
err?.message ?? `Child worker ${calleeWorkerId} failed`
|
|
1042
|
+
);
|
|
1043
|
+
}
|
|
1044
|
+
await new Promise((r) => setTimeout(r, pollIntervalMs));
|
|
1045
|
+
}
|
|
1046
|
+
throw new Error(
|
|
1047
|
+
`Child worker ${calleeWorkerId} (${childJobId}) did not complete within ${pollTimeoutMs}ms`
|
|
1048
|
+
);
|
|
1049
|
+
}
|
|
1050
|
+
return { jobId: childJobId, messageId };
|
|
1051
|
+
}
|
|
1052
|
+
throw new Error(
|
|
1053
|
+
`WORKER_QUEUE_URL_${sanitizeWorkerIdForEnv(calleeWorkerId)} is not set. Configure queue URL for worker-to-worker dispatch, or run in local mode.`
|
|
1054
|
+
);
|
|
1055
|
+
};
|
|
1056
|
+
}
|
|
236
1057
|
async function sendWebhook(webhookUrl, payload) {
|
|
237
1058
|
try {
|
|
238
1059
|
const response = await fetch(webhookUrl, {
|
|
@@ -272,24 +1093,63 @@ function createLambdaHandler(handler, outputSchema) {
|
|
|
272
1093
|
try {
|
|
273
1094
|
messageBody = JSON.parse(record.body);
|
|
274
1095
|
const { workerId, jobId, input, context, webhookUrl, metadata = {} } = messageBody;
|
|
1096
|
+
const raw = (process.env.WORKER_DATABASE_TYPE || "upstash-redis").toLowerCase();
|
|
1097
|
+
const jobStoreType = raw === "mongodb" ? "mongodb" : "upstash-redis";
|
|
1098
|
+
if (jobStoreType === "upstash-redis" && isRedisJobStoreConfigured()) {
|
|
1099
|
+
const existing = await loadJob(jobId);
|
|
1100
|
+
if (existing && (existing.status === "completed" || existing.status === "failed")) {
|
|
1101
|
+
console.log("[Worker] Skipping already terminal job (idempotent):", {
|
|
1102
|
+
jobId,
|
|
1103
|
+
workerId,
|
|
1104
|
+
status: existing.status
|
|
1105
|
+
});
|
|
1106
|
+
return;
|
|
1107
|
+
}
|
|
1108
|
+
} else if (jobStoreType === "mongodb" || isMongoJobStoreConfigured()) {
|
|
1109
|
+
const existing = await getJobById(jobId);
|
|
1110
|
+
if (existing && (existing.status === "completed" || existing.status === "failed")) {
|
|
1111
|
+
console.log("[Worker] Skipping already terminal job (idempotent):", {
|
|
1112
|
+
jobId,
|
|
1113
|
+
workerId,
|
|
1114
|
+
status: existing.status
|
|
1115
|
+
});
|
|
1116
|
+
return;
|
|
1117
|
+
}
|
|
1118
|
+
}
|
|
275
1119
|
let jobStore;
|
|
276
|
-
if (
|
|
1120
|
+
if (jobStoreType === "upstash-redis" && isRedisJobStoreConfigured()) {
|
|
1121
|
+
await upsertRedisJob(jobId, workerId, input, metadata);
|
|
1122
|
+
jobStore = createRedisJobStore(workerId, jobId, input, metadata);
|
|
1123
|
+
} else if (jobStoreType === "mongodb" || isMongoJobStoreConfigured()) {
|
|
277
1124
|
await upsertJob(jobId, workerId, input, metadata);
|
|
278
1125
|
jobStore = createMongoJobStore(workerId, jobId, input, metadata);
|
|
279
1126
|
}
|
|
280
|
-
const
|
|
1127
|
+
const baseContext = {
|
|
281
1128
|
jobId,
|
|
282
1129
|
workerId,
|
|
283
1130
|
requestId: context.requestId || lambdaContext.awsRequestId,
|
|
284
|
-
...jobStore ? { jobStore } : {},
|
|
285
1131
|
...context
|
|
286
1132
|
};
|
|
1133
|
+
const handlerContext = {
|
|
1134
|
+
...baseContext,
|
|
1135
|
+
...jobStore ? { jobStore } : {},
|
|
1136
|
+
logger: createWorkerLogger(jobId, workerId),
|
|
1137
|
+
dispatchWorker: createDispatchWorker(
|
|
1138
|
+
jobId,
|
|
1139
|
+
workerId,
|
|
1140
|
+
baseContext,
|
|
1141
|
+
jobStore
|
|
1142
|
+
)
|
|
1143
|
+
};
|
|
287
1144
|
if (jobStore) {
|
|
288
1145
|
try {
|
|
289
1146
|
await jobStore.update({ status: "running" });
|
|
1147
|
+
const queueCtxForLog = input?.__workerQueue ?? metadata?.__workerQueue;
|
|
290
1148
|
console.log("[Worker] Job status updated to running:", {
|
|
291
1149
|
jobId,
|
|
292
|
-
workerId
|
|
1150
|
+
workerId,
|
|
1151
|
+
...queueCtxForLog?.id && { queueId: queueCtxForLog.id },
|
|
1152
|
+
...queueCtxForLog?.queueJobId && { queueJobId: queueCtxForLog.queueJobId }
|
|
293
1153
|
});
|
|
294
1154
|
} catch (error) {
|
|
295
1155
|
console.warn("[Worker] Failed to update status to running:", {
|
|
@@ -299,6 +1159,33 @@ function createLambdaHandler(handler, outputSchema) {
|
|
|
299
1159
|
});
|
|
300
1160
|
}
|
|
301
1161
|
}
|
|
1162
|
+
const queueCtx = input?.__workerQueue ?? metadata?.__workerQueue;
|
|
1163
|
+
if (queueCtx?.queueJobId && typeof queueCtx.stepIndex === "number") {
|
|
1164
|
+
if (queueCtx.stepIndex === 0) {
|
|
1165
|
+
try {
|
|
1166
|
+
await upsertInitialQueueJob({
|
|
1167
|
+
queueJobId: queueCtx.queueJobId,
|
|
1168
|
+
queueId: queueCtx.id,
|
|
1169
|
+
firstWorkerId: workerId,
|
|
1170
|
+
firstWorkerJobId: jobId,
|
|
1171
|
+
metadata
|
|
1172
|
+
});
|
|
1173
|
+
} catch (e) {
|
|
1174
|
+
console.warn("[Worker] Failed to upsert initial queue job:", {
|
|
1175
|
+
queueJobId: queueCtx.queueJobId,
|
|
1176
|
+
queueId: queueCtx.id,
|
|
1177
|
+
error: e?.message ?? String(e)
|
|
1178
|
+
});
|
|
1179
|
+
}
|
|
1180
|
+
}
|
|
1181
|
+
await notifyQueueJobStep(queueCtx.queueJobId, "start", {
|
|
1182
|
+
queueId: queueCtx.id,
|
|
1183
|
+
stepIndex: queueCtx.stepIndex,
|
|
1184
|
+
workerJobId: jobId,
|
|
1185
|
+
workerId,
|
|
1186
|
+
input
|
|
1187
|
+
});
|
|
1188
|
+
}
|
|
302
1189
|
let output;
|
|
303
1190
|
try {
|
|
304
1191
|
output = await handler({
|
|
@@ -338,6 +1225,16 @@ function createLambdaHandler(handler, outputSchema) {
|
|
|
338
1225
|
});
|
|
339
1226
|
}
|
|
340
1227
|
}
|
|
1228
|
+
const queueCtxFail = input?.__workerQueue ?? metadata?.__workerQueue;
|
|
1229
|
+
if (queueCtxFail?.queueJobId && typeof queueCtxFail.stepIndex === "number") {
|
|
1230
|
+
await notifyQueueJobStep(queueCtxFail.queueJobId, "fail", {
|
|
1231
|
+
queueId: queueCtxFail.id,
|
|
1232
|
+
stepIndex: queueCtxFail.stepIndex,
|
|
1233
|
+
workerJobId: jobId,
|
|
1234
|
+
workerId,
|
|
1235
|
+
error: errorPayload.error
|
|
1236
|
+
});
|
|
1237
|
+
}
|
|
341
1238
|
if (webhookUrl) {
|
|
342
1239
|
await sendWebhook(webhookUrl, errorPayload);
|
|
343
1240
|
}
|
|
@@ -434,6 +1331,11 @@ function clearWorkersConfigCache() {
|
|
|
434
1331
|
cacheExpiry = 0;
|
|
435
1332
|
}
|
|
436
1333
|
|
|
1334
|
+
// src/queue.ts
|
|
1335
|
+
function defineWorkerQueue(config) {
|
|
1336
|
+
return config;
|
|
1337
|
+
}
|
|
1338
|
+
|
|
437
1339
|
// src/index.ts
|
|
438
1340
|
function createWorker(config) {
|
|
439
1341
|
const { id, inputSchema, outputSchema, handler } = config;
|
|
@@ -550,6 +1452,42 @@ function createWorker(config) {
|
|
|
550
1452
|
});
|
|
551
1453
|
return null;
|
|
552
1454
|
}
|
|
1455
|
+
},
|
|
1456
|
+
appendInternalJob: async (entry) => {
|
|
1457
|
+
try {
|
|
1458
|
+
const nextJsPath = "@/app/api/workflows/stores/jobStore";
|
|
1459
|
+
const explicitPath2 = process.env.WORKER_JOB_STORE_MODULE_PATH;
|
|
1460
|
+
for (const importPath of [nextJsPath, explicitPath2].filter(Boolean)) {
|
|
1461
|
+
try {
|
|
1462
|
+
const module2 = await import(importPath);
|
|
1463
|
+
if (typeof module2?.appendInternalJob === "function") {
|
|
1464
|
+
await module2.appendInternalJob(localJobId, entry);
|
|
1465
|
+
return;
|
|
1466
|
+
}
|
|
1467
|
+
} catch {
|
|
1468
|
+
}
|
|
1469
|
+
}
|
|
1470
|
+
} catch (error) {
|
|
1471
|
+
console.warn("[Worker] Failed to appendInternalJob (direct DB):", { localJobId, error: error?.message || String(error) });
|
|
1472
|
+
}
|
|
1473
|
+
},
|
|
1474
|
+
getJob: async (otherJobId) => {
|
|
1475
|
+
try {
|
|
1476
|
+
const nextJsPath = "@/app/api/workflows/stores/jobStore";
|
|
1477
|
+
const explicitPath2 = process.env.WORKER_JOB_STORE_MODULE_PATH;
|
|
1478
|
+
for (const importPath of [nextJsPath, explicitPath2].filter(Boolean)) {
|
|
1479
|
+
try {
|
|
1480
|
+
const module2 = await import(importPath);
|
|
1481
|
+
if (typeof module2?.getJob === "function") {
|
|
1482
|
+
return await module2.getJob(otherJobId);
|
|
1483
|
+
}
|
|
1484
|
+
} catch {
|
|
1485
|
+
}
|
|
1486
|
+
}
|
|
1487
|
+
} catch (error) {
|
|
1488
|
+
console.warn("[Worker] Failed to getJob (direct DB):", { otherJobId, error: error?.message || String(error) });
|
|
1489
|
+
}
|
|
1490
|
+
return null;
|
|
553
1491
|
}
|
|
554
1492
|
};
|
|
555
1493
|
}
|
|
@@ -625,6 +1563,101 @@ function createWorker(config) {
|
|
|
625
1563
|
};
|
|
626
1564
|
};
|
|
627
1565
|
const jobStore = createLocalJobStore(directJobStore, jobStoreUrl);
|
|
1566
|
+
const DEFAULT_POLL_INTERVAL_MS2 = 2e3;
|
|
1567
|
+
const DEFAULT_POLL_TIMEOUT_MS2 = 15 * 60 * 1e3;
|
|
1568
|
+
const createLocalDispatchWorker = (parentJobId, parentWorkerId, parentContext, store) => {
|
|
1569
|
+
return async (calleeWorkerId, input2, options2) => {
|
|
1570
|
+
const childJobId = options2?.jobId || `job-${Date.now()}-${Math.random().toString(36).slice(2, 11)}`;
|
|
1571
|
+
const metadata = options2?.metadata ?? {};
|
|
1572
|
+
const serializedContext = {};
|
|
1573
|
+
if (parentContext.requestId) serializedContext.requestId = parentContext.requestId;
|
|
1574
|
+
const messageBody = {
|
|
1575
|
+
workerId: calleeWorkerId,
|
|
1576
|
+
jobId: childJobId,
|
|
1577
|
+
input: input2 ?? {},
|
|
1578
|
+
context: serializedContext,
|
|
1579
|
+
webhookUrl: options2?.webhookUrl,
|
|
1580
|
+
metadata,
|
|
1581
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString()
|
|
1582
|
+
};
|
|
1583
|
+
let triggerUrl;
|
|
1584
|
+
try {
|
|
1585
|
+
triggerUrl = getWorkersTriggerUrl();
|
|
1586
|
+
} catch (e) {
|
|
1587
|
+
throw new Error(
|
|
1588
|
+
`Local dispatchWorker requires WORKER_BASE_URL (or similar) for worker "${calleeWorkerId}": ${e?.message ?? e}`
|
|
1589
|
+
);
|
|
1590
|
+
}
|
|
1591
|
+
const headers = { "Content-Type": "application/json" };
|
|
1592
|
+
const triggerKey = process.env.WORKERS_TRIGGER_API_KEY;
|
|
1593
|
+
if (triggerKey) headers["x-workers-trigger-key"] = triggerKey;
|
|
1594
|
+
if (options2?.await !== true && options2?.delaySeconds != null && options2.delaySeconds > 0) {
|
|
1595
|
+
const sec = Math.min(SQS_MAX_DELAY_SECONDS, Math.max(0, Math.floor(options2.delaySeconds)));
|
|
1596
|
+
const storeRef = store;
|
|
1597
|
+
setTimeout(() => {
|
|
1598
|
+
fetch(triggerUrl, {
|
|
1599
|
+
method: "POST",
|
|
1600
|
+
headers,
|
|
1601
|
+
body: JSON.stringify({ workerId: calleeWorkerId, body: messageBody })
|
|
1602
|
+
}).then(async (response2) => {
|
|
1603
|
+
if (!response2.ok) {
|
|
1604
|
+
const text = await response2.text().catch(() => "");
|
|
1605
|
+
console.error(
|
|
1606
|
+
`[Worker] Delayed trigger failed for "${calleeWorkerId}": ${response2.status} ${response2.statusText}${text ? ` - ${text}` : ""}`
|
|
1607
|
+
);
|
|
1608
|
+
return;
|
|
1609
|
+
}
|
|
1610
|
+
if (storeRef?.appendInternalJob) {
|
|
1611
|
+
await storeRef.appendInternalJob({ jobId: childJobId, workerId: calleeWorkerId });
|
|
1612
|
+
}
|
|
1613
|
+
}).catch((err) => {
|
|
1614
|
+
console.error("[Worker] Delayed trigger error:", { calleeWorkerId, jobId: childJobId, error: err?.message ?? err });
|
|
1615
|
+
});
|
|
1616
|
+
}, sec * 1e3);
|
|
1617
|
+
return { jobId: childJobId, messageId: void 0 };
|
|
1618
|
+
}
|
|
1619
|
+
const response = await fetch(triggerUrl, {
|
|
1620
|
+
method: "POST",
|
|
1621
|
+
headers,
|
|
1622
|
+
body: JSON.stringify({ workerId: calleeWorkerId, body: messageBody })
|
|
1623
|
+
});
|
|
1624
|
+
if (!response.ok) {
|
|
1625
|
+
const text = await response.text().catch(() => "");
|
|
1626
|
+
throw new Error(
|
|
1627
|
+
`Failed to trigger worker "${calleeWorkerId}": ${response.status} ${response.statusText}${text ? ` - ${text}` : ""}`
|
|
1628
|
+
);
|
|
1629
|
+
}
|
|
1630
|
+
const data = await response.json().catch(() => ({}));
|
|
1631
|
+
const messageId = data?.messageId ? String(data.messageId) : `trigger-${childJobId}`;
|
|
1632
|
+
if (store?.appendInternalJob) {
|
|
1633
|
+
await store.appendInternalJob({ jobId: childJobId, workerId: calleeWorkerId });
|
|
1634
|
+
}
|
|
1635
|
+
if (options2?.await && store?.getJob) {
|
|
1636
|
+
const pollIntervalMs = options2.pollIntervalMs ?? DEFAULT_POLL_INTERVAL_MS2;
|
|
1637
|
+
const pollTimeoutMs = options2.pollTimeoutMs ?? DEFAULT_POLL_TIMEOUT_MS2;
|
|
1638
|
+
const deadline = Date.now() + pollTimeoutMs;
|
|
1639
|
+
while (Date.now() < deadline) {
|
|
1640
|
+
const child = await store.getJob(childJobId);
|
|
1641
|
+
if (!child) {
|
|
1642
|
+
await new Promise((r) => setTimeout(r, pollIntervalMs));
|
|
1643
|
+
continue;
|
|
1644
|
+
}
|
|
1645
|
+
if (child.status === "completed") {
|
|
1646
|
+
return { jobId: childJobId, messageId, output: child.output };
|
|
1647
|
+
}
|
|
1648
|
+
if (child.status === "failed") {
|
|
1649
|
+
const err = child.error;
|
|
1650
|
+
throw new Error(err?.message ?? `Child worker ${calleeWorkerId} failed`);
|
|
1651
|
+
}
|
|
1652
|
+
await new Promise((r) => setTimeout(r, pollIntervalMs));
|
|
1653
|
+
}
|
|
1654
|
+
throw new Error(
|
|
1655
|
+
`Child worker ${calleeWorkerId} (${childJobId}) did not complete within ${pollTimeoutMs}ms`
|
|
1656
|
+
);
|
|
1657
|
+
}
|
|
1658
|
+
return { jobId: childJobId, messageId };
|
|
1659
|
+
};
|
|
1660
|
+
};
|
|
628
1661
|
if (directJobStore?.setJob) {
|
|
629
1662
|
try {
|
|
630
1663
|
await directJobStore.setJob(localJobId, {
|
|
@@ -642,10 +1675,17 @@ function createWorker(config) {
|
|
|
642
1675
|
});
|
|
643
1676
|
}
|
|
644
1677
|
}
|
|
1678
|
+
const baseContext = { jobId: localJobId, workerId: id };
|
|
645
1679
|
const handlerContext = {
|
|
646
|
-
|
|
647
|
-
|
|
648
|
-
|
|
1680
|
+
...baseContext,
|
|
1681
|
+
...jobStore ? { jobStore } : {},
|
|
1682
|
+
logger: createWorkerLogger(localJobId, id),
|
|
1683
|
+
dispatchWorker: createLocalDispatchWorker(
|
|
1684
|
+
localJobId,
|
|
1685
|
+
id,
|
|
1686
|
+
baseContext,
|
|
1687
|
+
jobStore
|
|
1688
|
+
)
|
|
649
1689
|
};
|
|
650
1690
|
try {
|
|
651
1691
|
if (jobStore) {
|
|
@@ -722,13 +1762,21 @@ function createLambdaEntrypoint(agent) {
|
|
|
722
1762
|
}
|
|
723
1763
|
// Annotate the CommonJS export names for ESM import in node:
|
|
724
1764
|
0 && (module.exports = {
|
|
1765
|
+
SQS_MAX_DELAY_SECONDS,
|
|
725
1766
|
clearWorkersConfigCache,
|
|
726
1767
|
createLambdaEntrypoint,
|
|
727
1768
|
createLambdaHandler,
|
|
728
1769
|
createWorker,
|
|
1770
|
+
createWorkerLogger,
|
|
1771
|
+
defineWorkerQueue,
|
|
729
1772
|
dispatch,
|
|
730
1773
|
dispatchLocal,
|
|
1774
|
+
dispatchQueue,
|
|
1775
|
+
dispatchWorker,
|
|
1776
|
+
getQueueStartUrl,
|
|
731
1777
|
getWorkersConfig,
|
|
732
|
-
|
|
1778
|
+
getWorkersTriggerUrl,
|
|
1779
|
+
resolveQueueUrl,
|
|
1780
|
+
wrapHandlerForQueue
|
|
733
1781
|
});
|
|
734
1782
|
//# sourceMappingURL=index.js.map
|