@microfox/ai-worker 1.0.1 → 1.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +14 -0
- package/README.md +19 -2
- package/dist/chunk-72XGFZCE.mjs +163 -0
- package/dist/chunk-72XGFZCE.mjs.map +1 -0
- package/dist/chunk-7LQNS2SG.mjs +797 -0
- package/dist/chunk-7LQNS2SG.mjs.map +1 -0
- package/dist/chunk-AOXGONGI.mjs +351 -0
- package/dist/chunk-AOXGONGI.mjs.map +1 -0
- package/dist/client-BqSJQ9mZ.d.mts +183 -0
- package/dist/client-BqSJQ9mZ.d.ts +183 -0
- package/dist/client.d.mts +2 -64
- package/dist/client.d.ts +2 -64
- package/dist/client.js +88 -4
- package/dist/client.js.map +1 -1
- package/dist/client.mjs +11 -3
- package/dist/handler.d.mts +113 -14
- package/dist/handler.d.ts +113 -14
- package/dist/handler.js +823 -6
- package/dist/handler.js.map +1 -1
- package/dist/handler.mjs +10 -3
- package/dist/index.d.mts +3 -3
- package/dist/index.d.ts +3 -3
- package/dist/index.js +1059 -11
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +167 -8
- package/dist/index.mjs.map +1 -1
- package/dist/queueJobStore.d.mts +53 -0
- package/dist/queueJobStore.d.ts +53 -0
- package/dist/queueJobStore.js +378 -0
- package/dist/queueJobStore.js.map +1 -0
- package/dist/queueJobStore.mjs +14 -0
- package/dist/queueJobStore.mjs.map +1 -0
- package/package.json +9 -2
- package/dist/chunk-FQCZSXDI.mjs +0 -83
- package/dist/chunk-FQCZSXDI.mjs.map +0 -1
- package/dist/chunk-WVR4JVWK.mjs +0 -285
- package/dist/chunk-WVR4JVWK.mjs.map +0 -1
|
@@ -0,0 +1,797 @@
|
|
|
1
|
+
import {
|
|
2
|
+
appendQueueJobStepInStore,
|
|
3
|
+
updateQueueJobStepInStore,
|
|
4
|
+
upsertInitialQueueJob
|
|
5
|
+
} from "./chunk-AOXGONGI.mjs";
|
|
6
|
+
|
|
7
|
+
// src/handler.ts
|
|
8
|
+
import { SQSClient, SendMessageCommand } from "@aws-sdk/client-sqs";
|
|
9
|
+
|
|
10
|
+
// src/mongoJobStore.ts
|
|
11
|
+
import { MongoClient } from "mongodb";
|
|
12
|
+
var uri = process.env.MONGODB_WORKER_URI || process.env.DATABASE_MONGODB_URI || process.env.MONGODB_URI;
|
|
13
|
+
var dbName = process.env.MONGODB_WORKER_DB || process.env.MONGODB_DB || "worker";
|
|
14
|
+
var collectionName = process.env.MONGODB_WORKER_JOBS_COLLECTION || "worker_jobs";
|
|
15
|
+
var clientPromise = null;
|
|
16
|
+
function getClient() {
|
|
17
|
+
if (!uri) {
|
|
18
|
+
throw new Error(
|
|
19
|
+
"MongoDB URI required for job store. Set DATABASE_MONGODB_URI or MONGODB_URI."
|
|
20
|
+
);
|
|
21
|
+
}
|
|
22
|
+
if (!clientPromise) {
|
|
23
|
+
clientPromise = new MongoClient(uri, {
|
|
24
|
+
maxPoolSize: 10,
|
|
25
|
+
minPoolSize: 0,
|
|
26
|
+
serverSelectionTimeoutMS: 1e4
|
|
27
|
+
}).connect();
|
|
28
|
+
}
|
|
29
|
+
return clientPromise;
|
|
30
|
+
}
|
|
31
|
+
async function getCollection() {
|
|
32
|
+
const client = await getClient();
|
|
33
|
+
return client.db(dbName).collection(collectionName);
|
|
34
|
+
}
|
|
35
|
+
async function getJobById(jobId) {
|
|
36
|
+
try {
|
|
37
|
+
const coll = await getCollection();
|
|
38
|
+
const doc = await coll.findOne({ _id: jobId });
|
|
39
|
+
if (!doc) return null;
|
|
40
|
+
const { _id, ...r } = doc;
|
|
41
|
+
return r;
|
|
42
|
+
} catch (e) {
|
|
43
|
+
console.error("[Worker] MongoDB getJobById failed:", {
|
|
44
|
+
jobId,
|
|
45
|
+
error: e?.message ?? String(e)
|
|
46
|
+
});
|
|
47
|
+
return null;
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
function createMongoJobStore(workerId, jobId, input, metadata) {
|
|
51
|
+
return {
|
|
52
|
+
update: async (update) => {
|
|
53
|
+
try {
|
|
54
|
+
const coll = await getCollection();
|
|
55
|
+
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
56
|
+
const existing = await coll.findOne({ _id: jobId });
|
|
57
|
+
let metadataUpdate = { ...existing?.metadata ?? {} };
|
|
58
|
+
if (update.metadata) {
|
|
59
|
+
Object.assign(metadataUpdate, update.metadata);
|
|
60
|
+
}
|
|
61
|
+
if (update.progress !== void 0 || update.progressMessage !== void 0) {
|
|
62
|
+
metadataUpdate.progress = update.progress;
|
|
63
|
+
metadataUpdate.progressMessage = update.progressMessage;
|
|
64
|
+
}
|
|
65
|
+
const set = {
|
|
66
|
+
updatedAt: now,
|
|
67
|
+
metadata: metadataUpdate
|
|
68
|
+
};
|
|
69
|
+
if (update.status !== void 0) {
|
|
70
|
+
set.status = update.status;
|
|
71
|
+
if (["completed", "failed"].includes(update.status) && !existing?.completedAt) {
|
|
72
|
+
set.completedAt = now;
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
if (update.output !== void 0) set.output = update.output;
|
|
76
|
+
if (update.error !== void 0) set.error = update.error;
|
|
77
|
+
if (existing) {
|
|
78
|
+
await coll.updateOne({ _id: jobId }, { $set: set });
|
|
79
|
+
} else {
|
|
80
|
+
const doc = {
|
|
81
|
+
_id: jobId,
|
|
82
|
+
jobId,
|
|
83
|
+
workerId,
|
|
84
|
+
status: update.status ?? "queued",
|
|
85
|
+
input: input ?? {},
|
|
86
|
+
output: update.output,
|
|
87
|
+
error: update.error,
|
|
88
|
+
metadata: metadataUpdate,
|
|
89
|
+
createdAt: now,
|
|
90
|
+
updatedAt: now,
|
|
91
|
+
completedAt: set.completedAt
|
|
92
|
+
};
|
|
93
|
+
if (doc.status === "completed" || doc.status === "failed") {
|
|
94
|
+
doc.completedAt = doc.completedAt ?? now;
|
|
95
|
+
}
|
|
96
|
+
await coll.updateOne({ _id: jobId }, { $set: doc }, { upsert: true });
|
|
97
|
+
}
|
|
98
|
+
} catch (e) {
|
|
99
|
+
console.error("[Worker] MongoDB job store update failed:", {
|
|
100
|
+
jobId,
|
|
101
|
+
workerId,
|
|
102
|
+
error: e?.message ?? String(e)
|
|
103
|
+
});
|
|
104
|
+
}
|
|
105
|
+
},
|
|
106
|
+
get: async () => {
|
|
107
|
+
try {
|
|
108
|
+
const coll = await getCollection();
|
|
109
|
+
const doc = await coll.findOne({ _id: jobId });
|
|
110
|
+
if (!doc) return null;
|
|
111
|
+
const { _id, ...r } = doc;
|
|
112
|
+
return r;
|
|
113
|
+
} catch (e) {
|
|
114
|
+
console.error("[Worker] MongoDB job store get failed:", {
|
|
115
|
+
jobId,
|
|
116
|
+
workerId,
|
|
117
|
+
error: e?.message ?? String(e)
|
|
118
|
+
});
|
|
119
|
+
return null;
|
|
120
|
+
}
|
|
121
|
+
},
|
|
122
|
+
appendInternalJob: async (entry) => {
|
|
123
|
+
try {
|
|
124
|
+
const coll = await getCollection();
|
|
125
|
+
await coll.updateOne(
|
|
126
|
+
{ _id: jobId },
|
|
127
|
+
{ $push: { internalJobs: entry } }
|
|
128
|
+
);
|
|
129
|
+
} catch (e) {
|
|
130
|
+
console.error("[Worker] MongoDB job store appendInternalJob failed:", {
|
|
131
|
+
jobId,
|
|
132
|
+
workerId,
|
|
133
|
+
error: e?.message ?? String(e)
|
|
134
|
+
});
|
|
135
|
+
}
|
|
136
|
+
},
|
|
137
|
+
getJob: async (otherJobId) => {
|
|
138
|
+
try {
|
|
139
|
+
const coll = await getCollection();
|
|
140
|
+
const doc = await coll.findOne({ _id: otherJobId });
|
|
141
|
+
if (!doc) return null;
|
|
142
|
+
const { _id, ...r } = doc;
|
|
143
|
+
return r;
|
|
144
|
+
} catch (e) {
|
|
145
|
+
console.error("[Worker] MongoDB job store getJob failed:", {
|
|
146
|
+
otherJobId,
|
|
147
|
+
error: e?.message ?? String(e)
|
|
148
|
+
});
|
|
149
|
+
return null;
|
|
150
|
+
}
|
|
151
|
+
}
|
|
152
|
+
};
|
|
153
|
+
}
|
|
154
|
+
async function upsertJob(jobId, workerId, input, metadata) {
|
|
155
|
+
const coll = await getCollection();
|
|
156
|
+
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
157
|
+
await coll.updateOne(
|
|
158
|
+
{ _id: jobId },
|
|
159
|
+
{
|
|
160
|
+
$set: {
|
|
161
|
+
_id: jobId,
|
|
162
|
+
jobId,
|
|
163
|
+
workerId,
|
|
164
|
+
status: "queued",
|
|
165
|
+
input: input ?? {},
|
|
166
|
+
metadata: metadata ?? {},
|
|
167
|
+
createdAt: now,
|
|
168
|
+
updatedAt: now
|
|
169
|
+
}
|
|
170
|
+
},
|
|
171
|
+
{ upsert: true }
|
|
172
|
+
);
|
|
173
|
+
}
|
|
174
|
+
function isMongoJobStoreConfigured() {
|
|
175
|
+
return Boolean(uri?.trim());
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
// src/redisJobStore.ts
|
|
179
|
+
import { Redis } from "@upstash/redis";
|
|
180
|
+
var redisUrl = process.env.WORKER_UPSTASH_REDIS_REST_URL || process.env.UPSTASH_REDIS_REST_URL || process.env.UPSTASH_REDIS_URL;
|
|
181
|
+
var redisToken = process.env.WORKER_UPSTASH_REDIS_REST_TOKEN || process.env.UPSTASH_REDIS_REST_TOKEN || process.env.UPSTASH_REDIS_TOKEN;
|
|
182
|
+
var jobKeyPrefix = process.env.WORKER_UPSTASH_REDIS_JOBS_PREFIX || process.env.UPSTASH_REDIS_KEY_PREFIX || process.env.REDIS_WORKER_JOB_PREFIX || "worker:jobs:";
|
|
183
|
+
var defaultTtlSeconds = 60 * 60 * 24 * 7;
|
|
184
|
+
var jobTtlSeconds = typeof process.env.WORKER_JOBS_TTL_SECONDS === "string" ? parseInt(process.env.WORKER_JOBS_TTL_SECONDS, 10) || defaultTtlSeconds : typeof process.env.REDIS_WORKER_JOB_TTL_SECONDS === "string" ? parseInt(process.env.REDIS_WORKER_JOB_TTL_SECONDS, 10) || defaultTtlSeconds : typeof process.env.WORKFLOW_JOBS_TTL_SECONDS === "string" ? parseInt(process.env.WORKFLOW_JOBS_TTL_SECONDS, 10) || defaultTtlSeconds : defaultTtlSeconds;
|
|
185
|
+
var redisClient = null;
|
|
186
|
+
function getRedis() {
|
|
187
|
+
if (!redisUrl || !redisToken) {
|
|
188
|
+
throw new Error(
|
|
189
|
+
"Upstash Redis configuration missing. Set WORKER_UPSTASH_REDIS_REST_URL and WORKER_UPSTASH_REDIS_REST_TOKEN (or UPSTASH_REDIS_REST_URL/UPSTASH_REDIS_REST_TOKEN)."
|
|
190
|
+
);
|
|
191
|
+
}
|
|
192
|
+
if (!redisClient) {
|
|
193
|
+
redisClient = new Redis({
|
|
194
|
+
url: redisUrl,
|
|
195
|
+
token: redisToken
|
|
196
|
+
});
|
|
197
|
+
}
|
|
198
|
+
return redisClient;
|
|
199
|
+
}
|
|
200
|
+
function jobKey(jobId) {
|
|
201
|
+
return `${jobKeyPrefix}${jobId}`;
|
|
202
|
+
}
|
|
203
|
+
function internalListKey(jobId) {
|
|
204
|
+
return `${jobKeyPrefix}${jobId}:internal`;
|
|
205
|
+
}
|
|
206
|
+
function isRedisJobStoreConfigured() {
|
|
207
|
+
return Boolean((redisUrl || "").trim() && (redisToken || "").trim());
|
|
208
|
+
}
|
|
209
|
+
async function loadJob(jobId) {
|
|
210
|
+
const redis = getRedis();
|
|
211
|
+
const key = jobKey(jobId);
|
|
212
|
+
const data = await redis.hgetall(key);
|
|
213
|
+
if (!data || Object.keys(data).length === 0) return null;
|
|
214
|
+
const parseJson = (val) => {
|
|
215
|
+
if (!val) return void 0;
|
|
216
|
+
try {
|
|
217
|
+
return JSON.parse(val);
|
|
218
|
+
} catch {
|
|
219
|
+
return void 0;
|
|
220
|
+
}
|
|
221
|
+
};
|
|
222
|
+
const listKey = internalListKey(jobId);
|
|
223
|
+
const listItems = await redis.lrange(listKey, 0, -1);
|
|
224
|
+
let internalJobs;
|
|
225
|
+
if (listItems && listItems.length > 0) {
|
|
226
|
+
internalJobs = listItems.map((s) => {
|
|
227
|
+
try {
|
|
228
|
+
return JSON.parse(s);
|
|
229
|
+
} catch {
|
|
230
|
+
return null;
|
|
231
|
+
}
|
|
232
|
+
}).filter(Boolean);
|
|
233
|
+
} else {
|
|
234
|
+
internalJobs = parseJson(data.internalJobs);
|
|
235
|
+
}
|
|
236
|
+
const record = {
|
|
237
|
+
jobId: data.jobId,
|
|
238
|
+
workerId: data.workerId,
|
|
239
|
+
status: data.status || "queued",
|
|
240
|
+
input: parseJson(data.input) ?? {},
|
|
241
|
+
output: parseJson(data.output),
|
|
242
|
+
error: parseJson(data.error),
|
|
243
|
+
metadata: parseJson(data.metadata) ?? {},
|
|
244
|
+
internalJobs,
|
|
245
|
+
createdAt: data.createdAt,
|
|
246
|
+
updatedAt: data.updatedAt,
|
|
247
|
+
completedAt: data.completedAt
|
|
248
|
+
};
|
|
249
|
+
return record;
|
|
250
|
+
}
|
|
251
|
+
function createRedisJobStore(workerId, jobId, input, metadata) {
|
|
252
|
+
return {
|
|
253
|
+
update: async (update) => {
|
|
254
|
+
const redis = getRedis();
|
|
255
|
+
const key = jobKey(jobId);
|
|
256
|
+
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
257
|
+
const existing = await loadJob(jobId);
|
|
258
|
+
const next = {};
|
|
259
|
+
const mergedMeta = { ...existing?.metadata ?? {} };
|
|
260
|
+
if (update.metadata) {
|
|
261
|
+
Object.assign(mergedMeta, update.metadata);
|
|
262
|
+
}
|
|
263
|
+
if (update.progress !== void 0 || update.progressMessage !== void 0) {
|
|
264
|
+
mergedMeta.progress = update.progress;
|
|
265
|
+
mergedMeta.progressMessage = update.progressMessage;
|
|
266
|
+
}
|
|
267
|
+
next.metadata = mergedMeta;
|
|
268
|
+
if (update.status !== void 0) {
|
|
269
|
+
next.status = update.error ? "failed" : update.status;
|
|
270
|
+
if ((update.status === "completed" || update.status === "failed") && !existing?.completedAt) {
|
|
271
|
+
next.completedAt = now;
|
|
272
|
+
}
|
|
273
|
+
}
|
|
274
|
+
if (update.output !== void 0) next.output = update.output;
|
|
275
|
+
if (update.error !== void 0) next.error = update.error;
|
|
276
|
+
const toSet = {};
|
|
277
|
+
if (next.status) toSet["status"] = String(next.status);
|
|
278
|
+
if (next.output !== void 0) toSet["output"] = JSON.stringify(next.output);
|
|
279
|
+
if (next.error !== void 0) toSet["error"] = JSON.stringify(next.error);
|
|
280
|
+
if (next.metadata !== void 0) toSet["metadata"] = JSON.stringify(next.metadata);
|
|
281
|
+
if (next.completedAt) {
|
|
282
|
+
toSet["completedAt"] = next.completedAt;
|
|
283
|
+
}
|
|
284
|
+
toSet["updatedAt"] = now;
|
|
285
|
+
await redis.hset(key, toSet);
|
|
286
|
+
if (jobTtlSeconds > 0) {
|
|
287
|
+
await redis.expire(key, jobTtlSeconds);
|
|
288
|
+
}
|
|
289
|
+
},
|
|
290
|
+
get: async () => {
|
|
291
|
+
return loadJob(jobId);
|
|
292
|
+
},
|
|
293
|
+
appendInternalJob: async (entry) => {
|
|
294
|
+
const redis = getRedis();
|
|
295
|
+
const listKey = internalListKey(jobId);
|
|
296
|
+
await redis.rpush(listKey, JSON.stringify(entry));
|
|
297
|
+
const mainKey = jobKey(jobId);
|
|
298
|
+
await redis.hset(mainKey, { updatedAt: (/* @__PURE__ */ new Date()).toISOString() });
|
|
299
|
+
if (jobTtlSeconds > 0) {
|
|
300
|
+
await redis.expire(listKey, jobTtlSeconds);
|
|
301
|
+
await redis.expire(mainKey, jobTtlSeconds);
|
|
302
|
+
}
|
|
303
|
+
},
|
|
304
|
+
getJob: async (otherJobId) => {
|
|
305
|
+
return loadJob(otherJobId);
|
|
306
|
+
}
|
|
307
|
+
};
|
|
308
|
+
}
|
|
309
|
+
async function upsertRedisJob(jobId, workerId, input, metadata) {
|
|
310
|
+
const redis = getRedis();
|
|
311
|
+
const key = jobKey(jobId);
|
|
312
|
+
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
313
|
+
const doc = {
|
|
314
|
+
jobId,
|
|
315
|
+
workerId,
|
|
316
|
+
status: "queued",
|
|
317
|
+
input,
|
|
318
|
+
metadata,
|
|
319
|
+
createdAt: now,
|
|
320
|
+
updatedAt: now
|
|
321
|
+
};
|
|
322
|
+
const toSet = {
|
|
323
|
+
jobId,
|
|
324
|
+
workerId,
|
|
325
|
+
status: doc.status,
|
|
326
|
+
input: JSON.stringify(doc.input ?? {}),
|
|
327
|
+
metadata: JSON.stringify(doc.metadata ?? {}),
|
|
328
|
+
createdAt: now,
|
|
329
|
+
updatedAt: now
|
|
330
|
+
};
|
|
331
|
+
await redis.hset(key, toSet);
|
|
332
|
+
if (jobTtlSeconds > 0) {
|
|
333
|
+
await redis.expire(key, jobTtlSeconds);
|
|
334
|
+
}
|
|
335
|
+
}
|
|
336
|
+
|
|
337
|
+
// src/handler.ts
|
|
338
|
+
var SQS_MAX_DELAY_SECONDS = 900;
|
|
339
|
+
function createWorkerLogger(jobId, workerId) {
|
|
340
|
+
const prefix = (level) => `[${level}] [${workerId}] [${jobId}]`;
|
|
341
|
+
return {
|
|
342
|
+
info(msg, data) {
|
|
343
|
+
console.log(prefix("INFO"), msg, data !== void 0 ? JSON.stringify(data) : "");
|
|
344
|
+
},
|
|
345
|
+
warn(msg, data) {
|
|
346
|
+
console.warn(prefix("WARN"), msg, data !== void 0 ? JSON.stringify(data) : "");
|
|
347
|
+
},
|
|
348
|
+
error(msg, data) {
|
|
349
|
+
console.error(prefix("ERROR"), msg, data !== void 0 ? JSON.stringify(data) : "");
|
|
350
|
+
},
|
|
351
|
+
debug(msg, data) {
|
|
352
|
+
if (process.env.DEBUG || process.env.WORKER_DEBUG) {
|
|
353
|
+
console.debug(prefix("DEBUG"), msg, data !== void 0 ? JSON.stringify(data) : "");
|
|
354
|
+
}
|
|
355
|
+
}
|
|
356
|
+
};
|
|
357
|
+
}
|
|
358
|
+
var WORKER_QUEUE_KEY = "__workerQueue";
|
|
359
|
+
async function notifyQueueJobStep(queueJobId, action, params) {
|
|
360
|
+
try {
|
|
361
|
+
if (action === "append") {
|
|
362
|
+
if (!params.workerId || !params.workerJobId) return;
|
|
363
|
+
await appendQueueJobStepInStore({
|
|
364
|
+
queueJobId,
|
|
365
|
+
workerId: params.workerId,
|
|
366
|
+
workerJobId: params.workerJobId
|
|
367
|
+
});
|
|
368
|
+
if (process.env.DEBUG_WORKER_QUEUES === "1") {
|
|
369
|
+
console.log("[Worker] Queue job step appended", {
|
|
370
|
+
queueJobId,
|
|
371
|
+
workerId: params.workerId,
|
|
372
|
+
workerJobId: params.workerJobId
|
|
373
|
+
});
|
|
374
|
+
}
|
|
375
|
+
return;
|
|
376
|
+
}
|
|
377
|
+
if (params.stepIndex === void 0) return;
|
|
378
|
+
const status = action === "start" ? "running" : action === "complete" ? "completed" : action === "fail" ? "failed" : void 0;
|
|
379
|
+
if (!status) return;
|
|
380
|
+
await updateQueueJobStepInStore({
|
|
381
|
+
queueJobId,
|
|
382
|
+
stepIndex: params.stepIndex,
|
|
383
|
+
workerId: params.workerId || "",
|
|
384
|
+
workerJobId: params.workerJobId,
|
|
385
|
+
status,
|
|
386
|
+
input: params.input,
|
|
387
|
+
output: params.output,
|
|
388
|
+
error: params.error
|
|
389
|
+
});
|
|
390
|
+
console.log("[Worker] Queue job step updated", {
|
|
391
|
+
queueId: params.queueId ?? queueJobId,
|
|
392
|
+
queueJobId,
|
|
393
|
+
stepIndex: params.stepIndex,
|
|
394
|
+
workerId: params.workerId,
|
|
395
|
+
status
|
|
396
|
+
});
|
|
397
|
+
} catch (err) {
|
|
398
|
+
console.warn("[Worker] Queue job update error:", {
|
|
399
|
+
queueJobId,
|
|
400
|
+
action,
|
|
401
|
+
error: err?.message ?? String(err)
|
|
402
|
+
});
|
|
403
|
+
}
|
|
404
|
+
}
|
|
405
|
+
function wrapHandlerForQueue(handler, queueRuntime) {
|
|
406
|
+
return async (params) => {
|
|
407
|
+
const queueContext = params.input?.[WORKER_QUEUE_KEY];
|
|
408
|
+
const output = await handler(params);
|
|
409
|
+
if (!queueContext || typeof queueContext !== "object" || !queueContext.id) {
|
|
410
|
+
return output;
|
|
411
|
+
}
|
|
412
|
+
const { id: queueId, stepIndex, initialInput, queueJobId } = queueContext;
|
|
413
|
+
const jobId = params.ctx?.jobId;
|
|
414
|
+
const workerId = params.ctx?.workerId ?? "";
|
|
415
|
+
const next = queueRuntime.getNextStep(queueId, stepIndex);
|
|
416
|
+
const childJobId = next ? `job-${Date.now()}-${Math.random().toString(36).slice(2, 11)}` : void 0;
|
|
417
|
+
if (next && queueJobId) {
|
|
418
|
+
await notifyQueueJobStep(queueJobId, "append", {
|
|
419
|
+
workerJobId: childJobId,
|
|
420
|
+
workerId: next.workerId
|
|
421
|
+
});
|
|
422
|
+
}
|
|
423
|
+
if (queueJobId && typeof stepIndex === "number") {
|
|
424
|
+
await notifyQueueJobStep(queueJobId, "complete", {
|
|
425
|
+
queueId,
|
|
426
|
+
stepIndex,
|
|
427
|
+
workerJobId: jobId,
|
|
428
|
+
workerId,
|
|
429
|
+
output
|
|
430
|
+
});
|
|
431
|
+
}
|
|
432
|
+
if (!next) {
|
|
433
|
+
return output;
|
|
434
|
+
}
|
|
435
|
+
let nextInput = output;
|
|
436
|
+
if (next.mapInputFromPrev && typeof queueRuntime.invokeMapInput === "function") {
|
|
437
|
+
let previousOutputs = [];
|
|
438
|
+
if (queueJobId && typeof queueRuntime.getQueueJob === "function") {
|
|
439
|
+
try {
|
|
440
|
+
const job = await queueRuntime.getQueueJob(queueJobId);
|
|
441
|
+
if (job?.steps) {
|
|
442
|
+
const fromStore = job.steps.slice(0, stepIndex).map((s, i) => ({ stepIndex: i, workerId: s.workerId, output: s.output }));
|
|
443
|
+
previousOutputs = fromStore.concat([
|
|
444
|
+
{ stepIndex, workerId: params.ctx?.workerId ?? "", output }
|
|
445
|
+
]);
|
|
446
|
+
}
|
|
447
|
+
} catch (e) {
|
|
448
|
+
if (process.env.AI_WORKER_QUEUES_DEBUG === "1") {
|
|
449
|
+
console.warn("[Worker] getQueueJob failed, mapping without previousOutputs:", e?.message ?? e);
|
|
450
|
+
}
|
|
451
|
+
}
|
|
452
|
+
}
|
|
453
|
+
nextInput = await queueRuntime.invokeMapInput(
|
|
454
|
+
queueId,
|
|
455
|
+
stepIndex + 1,
|
|
456
|
+
initialInput,
|
|
457
|
+
previousOutputs
|
|
458
|
+
);
|
|
459
|
+
}
|
|
460
|
+
const nextInputWithQueue = {
|
|
461
|
+
...nextInput !== null && typeof nextInput === "object" ? nextInput : { value: nextInput },
|
|
462
|
+
[WORKER_QUEUE_KEY]: {
|
|
463
|
+
id: queueId,
|
|
464
|
+
stepIndex: stepIndex + 1,
|
|
465
|
+
initialInput,
|
|
466
|
+
queueJobId
|
|
467
|
+
}
|
|
468
|
+
};
|
|
469
|
+
const debug = process.env.AI_WORKER_QUEUES_DEBUG === "1";
|
|
470
|
+
if (debug) {
|
|
471
|
+
console.log("[Worker] Queue chain dispatching next:", {
|
|
472
|
+
queueId,
|
|
473
|
+
fromStep: stepIndex,
|
|
474
|
+
nextWorkerId: next.workerId,
|
|
475
|
+
delaySeconds: next.delaySeconds
|
|
476
|
+
});
|
|
477
|
+
}
|
|
478
|
+
await params.ctx.dispatchWorker(next.workerId, nextInputWithQueue, {
|
|
479
|
+
await: false,
|
|
480
|
+
delaySeconds: next.delaySeconds,
|
|
481
|
+
jobId: childJobId
|
|
482
|
+
});
|
|
483
|
+
return output;
|
|
484
|
+
};
|
|
485
|
+
}
|
|
486
|
+
var DEFAULT_POLL_INTERVAL_MS = 2e3;
|
|
487
|
+
var DEFAULT_POLL_TIMEOUT_MS = 15 * 60 * 1e3;
|
|
488
|
+
function sanitizeWorkerIdForEnv(workerId) {
|
|
489
|
+
return workerId.replace(/-/g, "_").toUpperCase();
|
|
490
|
+
}
|
|
491
|
+
function getQueueUrlForWorker(calleeWorkerId) {
|
|
492
|
+
const key = `WORKER_QUEUE_URL_${sanitizeWorkerIdForEnv(calleeWorkerId)}`;
|
|
493
|
+
return process.env[key]?.trim() || void 0;
|
|
494
|
+
}
|
|
495
|
+
function createDispatchWorker(parentJobId, parentWorkerId, parentContext, jobStore) {
|
|
496
|
+
return async (calleeWorkerId, input, options) => {
|
|
497
|
+
const childJobId = options?.jobId || `job-${Date.now()}-${Math.random().toString(36).slice(2, 11)}`;
|
|
498
|
+
const metadata = options?.metadata ?? {};
|
|
499
|
+
const serializedContext = {};
|
|
500
|
+
if (parentContext.requestId) serializedContext.requestId = parentContext.requestId;
|
|
501
|
+
const messageBody = {
|
|
502
|
+
workerId: calleeWorkerId,
|
|
503
|
+
jobId: childJobId,
|
|
504
|
+
input: input ?? {},
|
|
505
|
+
context: serializedContext,
|
|
506
|
+
webhookUrl: options?.webhookUrl,
|
|
507
|
+
metadata,
|
|
508
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString()
|
|
509
|
+
};
|
|
510
|
+
const queueUrl = getQueueUrlForWorker(calleeWorkerId);
|
|
511
|
+
if (queueUrl) {
|
|
512
|
+
const region = process.env.AWS_REGION || process.env.AWS_DEFAULT_REGION || "us-east-1";
|
|
513
|
+
const sqs = new SQSClient({ region });
|
|
514
|
+
const delaySeconds = options?.await !== true && options?.delaySeconds != null ? Math.min(SQS_MAX_DELAY_SECONDS, Math.max(0, Math.floor(options.delaySeconds))) : void 0;
|
|
515
|
+
const sendResult = await sqs.send(
|
|
516
|
+
new SendMessageCommand({
|
|
517
|
+
QueueUrl: queueUrl,
|
|
518
|
+
MessageBody: JSON.stringify(messageBody),
|
|
519
|
+
...delaySeconds !== void 0 && delaySeconds > 0 ? { DelaySeconds: delaySeconds } : {}
|
|
520
|
+
})
|
|
521
|
+
);
|
|
522
|
+
const messageId = sendResult.MessageId ?? void 0;
|
|
523
|
+
if (jobStore?.appendInternalJob) {
|
|
524
|
+
await jobStore.appendInternalJob({ jobId: childJobId, workerId: calleeWorkerId });
|
|
525
|
+
}
|
|
526
|
+
if (options?.await && jobStore?.getJob) {
|
|
527
|
+
const pollIntervalMs = options.pollIntervalMs ?? DEFAULT_POLL_INTERVAL_MS;
|
|
528
|
+
const pollTimeoutMs = options.pollTimeoutMs ?? DEFAULT_POLL_TIMEOUT_MS;
|
|
529
|
+
const deadline = Date.now() + pollTimeoutMs;
|
|
530
|
+
while (Date.now() < deadline) {
|
|
531
|
+
const child = await jobStore.getJob(childJobId);
|
|
532
|
+
if (!child) {
|
|
533
|
+
await new Promise((r) => setTimeout(r, pollIntervalMs));
|
|
534
|
+
continue;
|
|
535
|
+
}
|
|
536
|
+
if (child.status === "completed") {
|
|
537
|
+
return { jobId: childJobId, messageId, output: child.output };
|
|
538
|
+
}
|
|
539
|
+
if (child.status === "failed") {
|
|
540
|
+
const err = child.error;
|
|
541
|
+
throw new Error(
|
|
542
|
+
err?.message ?? `Child worker ${calleeWorkerId} failed`
|
|
543
|
+
);
|
|
544
|
+
}
|
|
545
|
+
await new Promise((r) => setTimeout(r, pollIntervalMs));
|
|
546
|
+
}
|
|
547
|
+
throw new Error(
|
|
548
|
+
`Child worker ${calleeWorkerId} (${childJobId}) did not complete within ${pollTimeoutMs}ms`
|
|
549
|
+
);
|
|
550
|
+
}
|
|
551
|
+
return { jobId: childJobId, messageId };
|
|
552
|
+
}
|
|
553
|
+
throw new Error(
|
|
554
|
+
`WORKER_QUEUE_URL_${sanitizeWorkerIdForEnv(calleeWorkerId)} is not set. Configure queue URL for worker-to-worker dispatch, or run in local mode.`
|
|
555
|
+
);
|
|
556
|
+
};
|
|
557
|
+
}
|
|
558
|
+
async function sendWebhook(webhookUrl, payload) {
|
|
559
|
+
try {
|
|
560
|
+
const response = await fetch(webhookUrl, {
|
|
561
|
+
method: "POST",
|
|
562
|
+
headers: {
|
|
563
|
+
"Content-Type": "application/json",
|
|
564
|
+
"User-Agent": "ai-router-worker/1.0"
|
|
565
|
+
},
|
|
566
|
+
body: JSON.stringify(payload)
|
|
567
|
+
});
|
|
568
|
+
if (!response.ok) {
|
|
569
|
+
const errorText = await response.text().catch(() => "");
|
|
570
|
+
console.error("[Worker] Webhook callback failed:", {
|
|
571
|
+
url: webhookUrl,
|
|
572
|
+
status: response.status,
|
|
573
|
+
statusText: response.statusText,
|
|
574
|
+
errorText
|
|
575
|
+
});
|
|
576
|
+
} else {
|
|
577
|
+
console.log("[Worker] Webhook callback successful:", {
|
|
578
|
+
url: webhookUrl,
|
|
579
|
+
status: response.status
|
|
580
|
+
});
|
|
581
|
+
}
|
|
582
|
+
} catch (error) {
|
|
583
|
+
console.error("[Worker] Webhook callback error:", {
|
|
584
|
+
url: webhookUrl,
|
|
585
|
+
error: error?.message || String(error),
|
|
586
|
+
stack: error?.stack
|
|
587
|
+
});
|
|
588
|
+
}
|
|
589
|
+
}
|
|
590
|
+
function createLambdaHandler(handler, outputSchema) {
|
|
591
|
+
return async (event, lambdaContext) => {
|
|
592
|
+
const promises = event.Records.map(async (record) => {
|
|
593
|
+
let messageBody = null;
|
|
594
|
+
try {
|
|
595
|
+
messageBody = JSON.parse(record.body);
|
|
596
|
+
const { workerId, jobId, input, context, webhookUrl, metadata = {} } = messageBody;
|
|
597
|
+
const raw = (process.env.WORKER_DATABASE_TYPE || "upstash-redis").toLowerCase();
|
|
598
|
+
const jobStoreType = raw === "mongodb" ? "mongodb" : "upstash-redis";
|
|
599
|
+
if (jobStoreType === "upstash-redis" && isRedisJobStoreConfigured()) {
|
|
600
|
+
const existing = await loadJob(jobId);
|
|
601
|
+
if (existing && (existing.status === "completed" || existing.status === "failed")) {
|
|
602
|
+
console.log("[Worker] Skipping already terminal job (idempotent):", {
|
|
603
|
+
jobId,
|
|
604
|
+
workerId,
|
|
605
|
+
status: existing.status
|
|
606
|
+
});
|
|
607
|
+
return;
|
|
608
|
+
}
|
|
609
|
+
} else if (jobStoreType === "mongodb" || isMongoJobStoreConfigured()) {
|
|
610
|
+
const existing = await getJobById(jobId);
|
|
611
|
+
if (existing && (existing.status === "completed" || existing.status === "failed")) {
|
|
612
|
+
console.log("[Worker] Skipping already terminal job (idempotent):", {
|
|
613
|
+
jobId,
|
|
614
|
+
workerId,
|
|
615
|
+
status: existing.status
|
|
616
|
+
});
|
|
617
|
+
return;
|
|
618
|
+
}
|
|
619
|
+
}
|
|
620
|
+
let jobStore;
|
|
621
|
+
if (jobStoreType === "upstash-redis" && isRedisJobStoreConfigured()) {
|
|
622
|
+
await upsertRedisJob(jobId, workerId, input, metadata);
|
|
623
|
+
jobStore = createRedisJobStore(workerId, jobId, input, metadata);
|
|
624
|
+
} else if (jobStoreType === "mongodb" || isMongoJobStoreConfigured()) {
|
|
625
|
+
await upsertJob(jobId, workerId, input, metadata);
|
|
626
|
+
jobStore = createMongoJobStore(workerId, jobId, input, metadata);
|
|
627
|
+
}
|
|
628
|
+
const baseContext = {
|
|
629
|
+
jobId,
|
|
630
|
+
workerId,
|
|
631
|
+
requestId: context.requestId || lambdaContext.awsRequestId,
|
|
632
|
+
...context
|
|
633
|
+
};
|
|
634
|
+
const handlerContext = {
|
|
635
|
+
...baseContext,
|
|
636
|
+
...jobStore ? { jobStore } : {},
|
|
637
|
+
logger: createWorkerLogger(jobId, workerId),
|
|
638
|
+
dispatchWorker: createDispatchWorker(
|
|
639
|
+
jobId,
|
|
640
|
+
workerId,
|
|
641
|
+
baseContext,
|
|
642
|
+
jobStore
|
|
643
|
+
)
|
|
644
|
+
};
|
|
645
|
+
if (jobStore) {
|
|
646
|
+
try {
|
|
647
|
+
await jobStore.update({ status: "running" });
|
|
648
|
+
const queueCtxForLog = input?.__workerQueue ?? metadata?.__workerQueue;
|
|
649
|
+
console.log("[Worker] Job status updated to running:", {
|
|
650
|
+
jobId,
|
|
651
|
+
workerId,
|
|
652
|
+
...queueCtxForLog?.id && { queueId: queueCtxForLog.id },
|
|
653
|
+
...queueCtxForLog?.queueJobId && { queueJobId: queueCtxForLog.queueJobId }
|
|
654
|
+
});
|
|
655
|
+
} catch (error) {
|
|
656
|
+
console.warn("[Worker] Failed to update status to running:", {
|
|
657
|
+
jobId,
|
|
658
|
+
workerId,
|
|
659
|
+
error: error?.message || String(error)
|
|
660
|
+
});
|
|
661
|
+
}
|
|
662
|
+
}
|
|
663
|
+
const queueCtx = input?.__workerQueue ?? metadata?.__workerQueue;
|
|
664
|
+
if (queueCtx?.queueJobId && typeof queueCtx.stepIndex === "number") {
|
|
665
|
+
if (queueCtx.stepIndex === 0) {
|
|
666
|
+
try {
|
|
667
|
+
await upsertInitialQueueJob({
|
|
668
|
+
queueJobId: queueCtx.queueJobId,
|
|
669
|
+
queueId: queueCtx.id,
|
|
670
|
+
firstWorkerId: workerId,
|
|
671
|
+
firstWorkerJobId: jobId,
|
|
672
|
+
metadata
|
|
673
|
+
});
|
|
674
|
+
} catch (e) {
|
|
675
|
+
console.warn("[Worker] Failed to upsert initial queue job:", {
|
|
676
|
+
queueJobId: queueCtx.queueJobId,
|
|
677
|
+
queueId: queueCtx.id,
|
|
678
|
+
error: e?.message ?? String(e)
|
|
679
|
+
});
|
|
680
|
+
}
|
|
681
|
+
}
|
|
682
|
+
await notifyQueueJobStep(queueCtx.queueJobId, "start", {
|
|
683
|
+
queueId: queueCtx.id,
|
|
684
|
+
stepIndex: queueCtx.stepIndex,
|
|
685
|
+
workerJobId: jobId,
|
|
686
|
+
workerId,
|
|
687
|
+
input
|
|
688
|
+
});
|
|
689
|
+
}
|
|
690
|
+
let output;
|
|
691
|
+
try {
|
|
692
|
+
output = await handler({
|
|
693
|
+
input,
|
|
694
|
+
ctx: handlerContext
|
|
695
|
+
});
|
|
696
|
+
if (outputSchema) {
|
|
697
|
+
output = outputSchema.parse(output);
|
|
698
|
+
}
|
|
699
|
+
} catch (error) {
|
|
700
|
+
const errorPayload = {
|
|
701
|
+
jobId,
|
|
702
|
+
workerId,
|
|
703
|
+
status: "error",
|
|
704
|
+
error: {
|
|
705
|
+
message: error.message || "Unknown error",
|
|
706
|
+
stack: error.stack,
|
|
707
|
+
name: error.name || "Error"
|
|
708
|
+
},
|
|
709
|
+
metadata
|
|
710
|
+
};
|
|
711
|
+
if (jobStore) {
|
|
712
|
+
try {
|
|
713
|
+
await jobStore.update({
|
|
714
|
+
status: "failed",
|
|
715
|
+
error: errorPayload.error
|
|
716
|
+
});
|
|
717
|
+
console.log("[Worker] Job status updated to failed:", {
|
|
718
|
+
jobId,
|
|
719
|
+
workerId
|
|
720
|
+
});
|
|
721
|
+
} catch (updateError) {
|
|
722
|
+
console.warn("[Worker] Failed to update job store on error:", {
|
|
723
|
+
jobId,
|
|
724
|
+
workerId,
|
|
725
|
+
error: updateError?.message || String(updateError)
|
|
726
|
+
});
|
|
727
|
+
}
|
|
728
|
+
}
|
|
729
|
+
const queueCtxFail = input?.__workerQueue ?? metadata?.__workerQueue;
|
|
730
|
+
if (queueCtxFail?.queueJobId && typeof queueCtxFail.stepIndex === "number") {
|
|
731
|
+
await notifyQueueJobStep(queueCtxFail.queueJobId, "fail", {
|
|
732
|
+
queueId: queueCtxFail.id,
|
|
733
|
+
stepIndex: queueCtxFail.stepIndex,
|
|
734
|
+
workerJobId: jobId,
|
|
735
|
+
workerId,
|
|
736
|
+
error: errorPayload.error
|
|
737
|
+
});
|
|
738
|
+
}
|
|
739
|
+
if (webhookUrl) {
|
|
740
|
+
await sendWebhook(webhookUrl, errorPayload);
|
|
741
|
+
}
|
|
742
|
+
throw error;
|
|
743
|
+
}
|
|
744
|
+
if (jobStore) {
|
|
745
|
+
try {
|
|
746
|
+
await jobStore.update({
|
|
747
|
+
status: "completed",
|
|
748
|
+
output
|
|
749
|
+
});
|
|
750
|
+
console.log("[Worker] Job status updated to completed:", {
|
|
751
|
+
jobId,
|
|
752
|
+
workerId
|
|
753
|
+
});
|
|
754
|
+
} catch (updateError) {
|
|
755
|
+
console.warn("[Worker] Failed to update job store on success:", {
|
|
756
|
+
jobId,
|
|
757
|
+
workerId,
|
|
758
|
+
error: updateError?.message || String(updateError)
|
|
759
|
+
});
|
|
760
|
+
}
|
|
761
|
+
}
|
|
762
|
+
console.log("[Worker] Job completed:", {
|
|
763
|
+
jobId,
|
|
764
|
+
workerId,
|
|
765
|
+
output
|
|
766
|
+
});
|
|
767
|
+
const successPayload = {
|
|
768
|
+
jobId,
|
|
769
|
+
workerId,
|
|
770
|
+
status: "success",
|
|
771
|
+
output,
|
|
772
|
+
metadata
|
|
773
|
+
};
|
|
774
|
+
if (webhookUrl) {
|
|
775
|
+
await sendWebhook(webhookUrl, successPayload);
|
|
776
|
+
}
|
|
777
|
+
} catch (error) {
|
|
778
|
+
console.error("[Worker] Error processing SQS record:", {
|
|
779
|
+
jobId: messageBody?.jobId ?? "(parse failed)",
|
|
780
|
+
workerId: messageBody?.workerId ?? "(parse failed)",
|
|
781
|
+
error: error?.message || String(error),
|
|
782
|
+
stack: error?.stack
|
|
783
|
+
});
|
|
784
|
+
throw error;
|
|
785
|
+
}
|
|
786
|
+
});
|
|
787
|
+
await Promise.all(promises);
|
|
788
|
+
};
|
|
789
|
+
}
|
|
790
|
+
|
|
791
|
+
export {
|
|
792
|
+
SQS_MAX_DELAY_SECONDS,
|
|
793
|
+
createWorkerLogger,
|
|
794
|
+
wrapHandlerForQueue,
|
|
795
|
+
createLambdaHandler
|
|
796
|
+
};
|
|
797
|
+
//# sourceMappingURL=chunk-7LQNS2SG.mjs.map
|