@microfox/ai-worker 1.0.1 → 1.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,1053 @@
1
+ // src/handler.ts
2
+ import { SQSClient, SendMessageCommand } from "@aws-sdk/client-sqs";
3
+
4
+ // src/mongoJobStore.ts
5
+ import { MongoClient } from "mongodb";
6
+ var uri = process.env.MONGODB_WORKER_URI || process.env.DATABASE_MONGODB_URI || process.env.MONGODB_URI;
7
+ var dbName = process.env.MONGODB_WORKER_DB || process.env.MONGODB_DB || "worker";
8
+ var collectionName = process.env.MONGODB_WORKER_JOBS_COLLECTION || "worker_jobs";
9
+ var clientPromise = null;
10
+ function getClient() {
11
+ if (!uri) {
12
+ throw new Error(
13
+ "MongoDB URI required for job store. Set DATABASE_MONGODB_URI or MONGODB_URI."
14
+ );
15
+ }
16
+ if (!clientPromise) {
17
+ clientPromise = new MongoClient(uri, {
18
+ maxPoolSize: 10,
19
+ minPoolSize: 0,
20
+ serverSelectionTimeoutMS: 1e4
21
+ }).connect();
22
+ }
23
+ return clientPromise;
24
+ }
25
+ async function getCollection() {
26
+ const client = await getClient();
27
+ return client.db(dbName).collection(collectionName);
28
+ }
29
+ async function getJobById(jobId) {
30
+ try {
31
+ const coll = await getCollection();
32
+ const doc = await coll.findOne({ _id: jobId });
33
+ if (!doc) return null;
34
+ const { _id, ...r } = doc;
35
+ return r;
36
+ } catch (e) {
37
+ console.error("[Worker] MongoDB getJobById failed:", {
38
+ jobId,
39
+ error: e?.message ?? String(e)
40
+ });
41
+ return null;
42
+ }
43
+ }
44
+ function createMongoJobStore(workerId, jobId, input, metadata) {
45
+ return {
46
+ update: async (update) => {
47
+ try {
48
+ const coll = await getCollection();
49
+ const now = (/* @__PURE__ */ new Date()).toISOString();
50
+ const existing = await coll.findOne({ _id: jobId });
51
+ let metadataUpdate = { ...existing?.metadata ?? {} };
52
+ if (update.metadata) {
53
+ Object.assign(metadataUpdate, update.metadata);
54
+ }
55
+ if (update.progress !== void 0 || update.progressMessage !== void 0) {
56
+ metadataUpdate.progress = update.progress;
57
+ metadataUpdate.progressMessage = update.progressMessage;
58
+ }
59
+ const set = {
60
+ updatedAt: now,
61
+ metadata: metadataUpdate
62
+ };
63
+ if (update.status !== void 0) {
64
+ set.status = update.status;
65
+ if (["completed", "failed"].includes(update.status) && !existing?.completedAt) {
66
+ set.completedAt = now;
67
+ }
68
+ }
69
+ if (update.output !== void 0) set.output = update.output;
70
+ if (update.error !== void 0) set.error = update.error;
71
+ if (existing) {
72
+ await coll.updateOne({ _id: jobId }, { $set: set });
73
+ } else {
74
+ const doc = {
75
+ _id: jobId,
76
+ jobId,
77
+ workerId,
78
+ status: update.status ?? "queued",
79
+ input: input ?? {},
80
+ output: update.output,
81
+ error: update.error,
82
+ metadata: metadataUpdate,
83
+ createdAt: now,
84
+ updatedAt: now,
85
+ completedAt: set.completedAt
86
+ };
87
+ if (doc.status === "completed" || doc.status === "failed") {
88
+ doc.completedAt = doc.completedAt ?? now;
89
+ }
90
+ await coll.updateOne({ _id: jobId }, { $set: doc }, { upsert: true });
91
+ }
92
+ } catch (e) {
93
+ console.error("[Worker] MongoDB job store update failed:", {
94
+ jobId,
95
+ workerId,
96
+ error: e?.message ?? String(e)
97
+ });
98
+ }
99
+ },
100
+ get: async () => {
101
+ try {
102
+ const coll = await getCollection();
103
+ const doc = await coll.findOne({ _id: jobId });
104
+ if (!doc) return null;
105
+ const { _id, ...r } = doc;
106
+ return r;
107
+ } catch (e) {
108
+ console.error("[Worker] MongoDB job store get failed:", {
109
+ jobId,
110
+ workerId,
111
+ error: e?.message ?? String(e)
112
+ });
113
+ return null;
114
+ }
115
+ },
116
+ appendInternalJob: async (entry) => {
117
+ try {
118
+ const coll = await getCollection();
119
+ await coll.updateOne(
120
+ { _id: jobId },
121
+ { $push: { internalJobs: entry } }
122
+ );
123
+ } catch (e) {
124
+ console.error("[Worker] MongoDB job store appendInternalJob failed:", {
125
+ jobId,
126
+ workerId,
127
+ error: e?.message ?? String(e)
128
+ });
129
+ }
130
+ },
131
+ getJob: async (otherJobId) => {
132
+ try {
133
+ const coll = await getCollection();
134
+ const doc = await coll.findOne({ _id: otherJobId });
135
+ if (!doc) return null;
136
+ const { _id, ...r } = doc;
137
+ return r;
138
+ } catch (e) {
139
+ console.error("[Worker] MongoDB job store getJob failed:", {
140
+ otherJobId,
141
+ error: e?.message ?? String(e)
142
+ });
143
+ return null;
144
+ }
145
+ }
146
+ };
147
+ }
148
+ async function upsertJob(jobId, workerId, input, metadata) {
149
+ const coll = await getCollection();
150
+ const now = (/* @__PURE__ */ new Date()).toISOString();
151
+ await coll.updateOne(
152
+ { _id: jobId },
153
+ {
154
+ $set: {
155
+ _id: jobId,
156
+ jobId,
157
+ workerId,
158
+ status: "queued",
159
+ input: input ?? {},
160
+ metadata: metadata ?? {},
161
+ createdAt: now,
162
+ updatedAt: now
163
+ }
164
+ },
165
+ { upsert: true }
166
+ );
167
+ }
168
+ function isMongoJobStoreConfigured() {
169
+ return Boolean(uri?.trim());
170
+ }
171
+
172
+ // src/redisJobStore.ts
173
+ import { Redis } from "@upstash/redis";
174
+ var redisUrl = process.env.WORKER_UPSTASH_REDIS_REST_URL || process.env.UPSTASH_REDIS_REST_URL || process.env.UPSTASH_REDIS_URL;
175
+ var redisToken = process.env.WORKER_UPSTASH_REDIS_REST_TOKEN || process.env.UPSTASH_REDIS_REST_TOKEN || process.env.UPSTASH_REDIS_TOKEN;
176
+ var jobKeyPrefix = process.env.WORKER_UPSTASH_REDIS_JOBS_PREFIX || process.env.UPSTASH_REDIS_KEY_PREFIX || process.env.REDIS_WORKER_JOB_PREFIX || "worker:jobs:";
177
+ var defaultTtlSeconds = 60 * 60 * 24 * 7;
178
+ var jobTtlSeconds = typeof process.env.WORKER_JOBS_TTL_SECONDS === "string" ? parseInt(process.env.WORKER_JOBS_TTL_SECONDS, 10) || defaultTtlSeconds : typeof process.env.REDIS_WORKER_JOB_TTL_SECONDS === "string" ? parseInt(process.env.REDIS_WORKER_JOB_TTL_SECONDS, 10) || defaultTtlSeconds : typeof process.env.WORKFLOW_JOBS_TTL_SECONDS === "string" ? parseInt(process.env.WORKFLOW_JOBS_TTL_SECONDS, 10) || defaultTtlSeconds : defaultTtlSeconds;
179
+ var redisClient = null;
180
+ function getRedis() {
181
+ if (!redisUrl || !redisToken) {
182
+ throw new Error(
183
+ "Upstash Redis configuration missing. Set WORKER_UPSTASH_REDIS_REST_URL and WORKER_UPSTASH_REDIS_REST_TOKEN (or UPSTASH_REDIS_REST_URL/UPSTASH_REDIS_REST_TOKEN)."
184
+ );
185
+ }
186
+ if (!redisClient) {
187
+ redisClient = new Redis({
188
+ url: redisUrl,
189
+ token: redisToken
190
+ });
191
+ }
192
+ return redisClient;
193
+ }
194
+ function jobKey(jobId) {
195
+ return `${jobKeyPrefix}${jobId}`;
196
+ }
197
+ function internalListKey(jobId) {
198
+ return `${jobKeyPrefix}${jobId}:internal`;
199
+ }
200
+ function isRedisJobStoreConfigured() {
201
+ return Boolean((redisUrl || "").trim() && (redisToken || "").trim());
202
+ }
203
+ async function loadJob(jobId) {
204
+ const redis = getRedis();
205
+ const key = jobKey(jobId);
206
+ const data = await redis.hgetall(key);
207
+ if (!data || Object.keys(data).length === 0) return null;
208
+ const parseJson = (val) => {
209
+ if (!val) return void 0;
210
+ try {
211
+ return JSON.parse(val);
212
+ } catch {
213
+ return void 0;
214
+ }
215
+ };
216
+ const listKey = internalListKey(jobId);
217
+ const listItems = await redis.lrange(listKey, 0, -1);
218
+ let internalJobs;
219
+ if (listItems && listItems.length > 0) {
220
+ internalJobs = listItems.map((s) => {
221
+ try {
222
+ return JSON.parse(s);
223
+ } catch {
224
+ return null;
225
+ }
226
+ }).filter(Boolean);
227
+ } else {
228
+ internalJobs = parseJson(data.internalJobs);
229
+ }
230
+ const record = {
231
+ jobId: data.jobId,
232
+ workerId: data.workerId,
233
+ status: data.status || "queued",
234
+ input: parseJson(data.input) ?? {},
235
+ output: parseJson(data.output),
236
+ error: parseJson(data.error),
237
+ metadata: parseJson(data.metadata) ?? {},
238
+ internalJobs,
239
+ createdAt: data.createdAt,
240
+ updatedAt: data.updatedAt,
241
+ completedAt: data.completedAt
242
+ };
243
+ return record;
244
+ }
245
+ function createRedisJobStore(workerId, jobId, input, metadata) {
246
+ return {
247
+ update: async (update) => {
248
+ const redis = getRedis();
249
+ const key = jobKey(jobId);
250
+ const now = (/* @__PURE__ */ new Date()).toISOString();
251
+ const existing = await loadJob(jobId);
252
+ const next = {};
253
+ const mergedMeta = { ...existing?.metadata ?? {} };
254
+ if (update.metadata) {
255
+ Object.assign(mergedMeta, update.metadata);
256
+ }
257
+ if (update.progress !== void 0 || update.progressMessage !== void 0) {
258
+ mergedMeta.progress = update.progress;
259
+ mergedMeta.progressMessage = update.progressMessage;
260
+ }
261
+ next.metadata = mergedMeta;
262
+ if (update.status !== void 0) {
263
+ next.status = update.error ? "failed" : update.status;
264
+ if ((update.status === "completed" || update.status === "failed") && !existing?.completedAt) {
265
+ next.completedAt = now;
266
+ }
267
+ }
268
+ if (update.output !== void 0) next.output = update.output;
269
+ if (update.error !== void 0) next.error = update.error;
270
+ const toSet = {};
271
+ if (next.status) toSet["status"] = String(next.status);
272
+ if (next.output !== void 0) toSet["output"] = JSON.stringify(next.output);
273
+ if (next.error !== void 0) toSet["error"] = JSON.stringify(next.error);
274
+ if (next.metadata !== void 0) toSet["metadata"] = JSON.stringify(next.metadata);
275
+ if (next.completedAt) {
276
+ toSet["completedAt"] = next.completedAt;
277
+ }
278
+ toSet["updatedAt"] = now;
279
+ await redis.hset(key, toSet);
280
+ if (jobTtlSeconds > 0) {
281
+ await redis.expire(key, jobTtlSeconds);
282
+ }
283
+ },
284
+ get: async () => {
285
+ return loadJob(jobId);
286
+ },
287
+ appendInternalJob: async (entry) => {
288
+ const redis = getRedis();
289
+ const listKey = internalListKey(jobId);
290
+ await redis.rpush(listKey, JSON.stringify(entry));
291
+ const mainKey = jobKey(jobId);
292
+ await redis.hset(mainKey, { updatedAt: (/* @__PURE__ */ new Date()).toISOString() });
293
+ if (jobTtlSeconds > 0) {
294
+ await redis.expire(listKey, jobTtlSeconds);
295
+ await redis.expire(mainKey, jobTtlSeconds);
296
+ }
297
+ },
298
+ getJob: async (otherJobId) => {
299
+ return loadJob(otherJobId);
300
+ }
301
+ };
302
+ }
303
+ async function upsertRedisJob(jobId, workerId, input, metadata) {
304
+ const redis = getRedis();
305
+ const key = jobKey(jobId);
306
+ const now = (/* @__PURE__ */ new Date()).toISOString();
307
+ const doc = {
308
+ jobId,
309
+ workerId,
310
+ status: "queued",
311
+ input,
312
+ metadata,
313
+ createdAt: now,
314
+ updatedAt: now
315
+ };
316
+ const toSet = {
317
+ jobId,
318
+ workerId,
319
+ status: doc.status,
320
+ input: JSON.stringify(doc.input ?? {}),
321
+ metadata: JSON.stringify(doc.metadata ?? {}),
322
+ createdAt: now,
323
+ updatedAt: now
324
+ };
325
+ await redis.hset(key, toSet);
326
+ if (jobTtlSeconds > 0) {
327
+ await redis.expire(key, jobTtlSeconds);
328
+ }
329
+ }
330
+
331
+ // src/queueJobStore.ts
332
+ import { Redis as UpstashRedis } from "@upstash/redis";
333
+ import { MongoClient as MongoClient2 } from "mongodb";
334
+ var mongoUri = process.env.DATABASE_MONGODB_URI || process.env.MONGODB_URI;
335
+ var mongoDbName = process.env.DATABASE_MONGODB_DB || process.env.MONGODB_DB || "mediamake";
336
+ var mongoQueueCollectionName = process.env.MONGODB_QUEUE_JOBS_COLLECTION || "queue_jobs";
337
+ var mongoClientPromise = null;
338
+ async function getMongoClient() {
339
+ if (!mongoUri) {
340
+ throw new Error(
341
+ "MongoDB URI required for queue job store. Set DATABASE_MONGODB_URI or MONGODB_URI."
342
+ );
343
+ }
344
+ if (!mongoClientPromise) {
345
+ mongoClientPromise = new MongoClient2(mongoUri, {
346
+ maxPoolSize: 10,
347
+ minPoolSize: 0,
348
+ serverSelectionTimeoutMS: 1e4
349
+ }).connect();
350
+ }
351
+ return mongoClientPromise;
352
+ }
353
+ async function getMongoQueueCollection() {
354
+ const client = await getMongoClient();
355
+ return client.db(mongoDbName).collection(mongoQueueCollectionName);
356
+ }
357
+ var redisUrl2 = process.env.WORKER_UPSTASH_REDIS_REST_URL || process.env.UPSTASH_REDIS_REST_URL || process.env.UPSTASH_REDIS_URL;
358
+ var redisToken2 = process.env.WORKER_UPSTASH_REDIS_REST_TOKEN || process.env.UPSTASH_REDIS_REST_TOKEN || process.env.UPSTASH_REDIS_TOKEN;
359
+ var queueKeyPrefix = process.env.WORKER_UPSTASH_REDIS_QUEUE_PREFIX || process.env.UPSTASH_REDIS_QUEUE_PREFIX || "worker:queue-jobs:";
360
+ var defaultTtlSeconds2 = 60 * 60 * 24 * 7;
361
+ var queueJobTtlSeconds = typeof process.env.WORKER_QUEUE_JOBS_TTL_SECONDS === "string" ? parseInt(process.env.WORKER_QUEUE_JOBS_TTL_SECONDS, 10) || defaultTtlSeconds2 : typeof process.env.WORKER_JOBS_TTL_SECONDS === "string" ? parseInt(process.env.WORKER_JOBS_TTL_SECONDS, 10) || defaultTtlSeconds2 : defaultTtlSeconds2;
362
+ var redisClient2 = null;
363
+ function getRedis2() {
364
+ if (!redisUrl2 || !redisToken2) {
365
+ throw new Error(
366
+ "Upstash Redis configuration missing for queue job store. Set WORKER_UPSTASH_REDIS_REST_URL and WORKER_UPSTASH_REDIS_REST_TOKEN (or UPSTASH_REDIS_REST_URL/UPSTASH_REDIS_REST_TOKEN)."
367
+ );
368
+ }
369
+ if (!redisClient2) {
370
+ redisClient2 = new UpstashRedis({
371
+ url: redisUrl2,
372
+ token: redisToken2
373
+ });
374
+ }
375
+ return redisClient2;
376
+ }
377
+ function queueKey(id) {
378
+ return `${queueKeyPrefix}${id}`;
379
+ }
380
+ function stepsFromHash(val) {
381
+ if (Array.isArray(val)) return val;
382
+ if (typeof val === "string") {
383
+ try {
384
+ const parsed = JSON.parse(val);
385
+ return Array.isArray(parsed) ? parsed : [];
386
+ } catch {
387
+ return [];
388
+ }
389
+ }
390
+ return [];
391
+ }
392
+ function metadataFromHash(val) {
393
+ if (val && typeof val === "object" && !Array.isArray(val)) return val;
394
+ if (typeof val === "string") {
395
+ try {
396
+ const parsed = JSON.parse(val);
397
+ return parsed && typeof parsed === "object" ? parsed : {};
398
+ } catch {
399
+ return {};
400
+ }
401
+ }
402
+ return {};
403
+ }
404
+ async function loadQueueJobRedis(queueJobId) {
405
+ const redis = getRedis2();
406
+ const key = queueKey(queueJobId);
407
+ const data = await redis.hgetall(key);
408
+ if (!data || typeof data !== "object" || Object.keys(data).length === 0) return null;
409
+ const d = data;
410
+ const record = {
411
+ id: d.id === void 0 ? queueJobId : String(d.id),
412
+ queueId: String(d.queueId ?? ""),
413
+ status: String(d.status ?? "running"),
414
+ steps: stepsFromHash(d.steps),
415
+ metadata: metadataFromHash(d.metadata),
416
+ createdAt: String(d.createdAt ?? (/* @__PURE__ */ new Date()).toISOString()),
417
+ updatedAt: String(d.updatedAt ?? (/* @__PURE__ */ new Date()).toISOString()),
418
+ completedAt: d.completedAt != null ? String(d.completedAt) : void 0
419
+ };
420
+ return record;
421
+ }
422
+ async function saveQueueJobRedis(record) {
423
+ const redis = getRedis2();
424
+ const key = queueKey(record.id);
425
+ const now = (/* @__PURE__ */ new Date()).toISOString();
426
+ const toSet = {
427
+ id: record.id,
428
+ queueId: record.queueId,
429
+ status: record.status,
430
+ steps: JSON.stringify(record.steps || []),
431
+ metadata: JSON.stringify(record.metadata || {}),
432
+ createdAt: record.createdAt || now,
433
+ updatedAt: record.updatedAt || now
434
+ };
435
+ if (record.completedAt) {
436
+ toSet.completedAt = record.completedAt;
437
+ }
438
+ await redis.hset(key, toSet);
439
+ if (queueJobTtlSeconds > 0) {
440
+ await redis.expire(key, queueJobTtlSeconds);
441
+ }
442
+ }
443
+ function getStoreType() {
444
+ const t = (process.env.WORKER_DATABASE_TYPE || "upstash-redis").toLowerCase();
445
+ return t === "mongodb" ? "mongodb" : "upstash-redis";
446
+ }
447
+ function preferMongo() {
448
+ return getStoreType() === "mongodb" && Boolean(mongoUri?.trim());
449
+ }
450
+ function preferRedis() {
451
+ return getStoreType() !== "mongodb" && Boolean((redisUrl2 || "").trim() && (redisToken2 || "").trim());
452
+ }
453
+ async function upsertInitialQueueJob(options) {
454
+ const { queueJobId, queueId, firstWorkerId, firstWorkerJobId, metadata } = options;
455
+ const now = (/* @__PURE__ */ new Date()).toISOString();
456
+ if (preferMongo()) {
457
+ const coll = await getMongoQueueCollection();
458
+ const existing = await coll.findOne({ _id: queueJobId });
459
+ if (existing) {
460
+ const steps = existing.steps ?? [];
461
+ if (steps.length === 0) {
462
+ steps.push({
463
+ workerId: firstWorkerId,
464
+ workerJobId: firstWorkerJobId,
465
+ status: "queued"
466
+ });
467
+ }
468
+ await coll.updateOne(
469
+ { _id: queueJobId },
470
+ {
471
+ $set: {
472
+ steps,
473
+ updatedAt: now
474
+ }
475
+ }
476
+ );
477
+ } else {
478
+ const doc = {
479
+ _id: queueJobId,
480
+ id: queueJobId,
481
+ queueId,
482
+ status: "running",
483
+ steps: [
484
+ {
485
+ workerId: firstWorkerId,
486
+ workerJobId: firstWorkerJobId,
487
+ status: "queued"
488
+ }
489
+ ],
490
+ metadata: metadata ?? {},
491
+ createdAt: now,
492
+ updatedAt: now
493
+ };
494
+ await coll.updateOne(
495
+ { _id: queueJobId },
496
+ { $set: doc },
497
+ { upsert: true }
498
+ );
499
+ }
500
+ return;
501
+ }
502
+ if (preferRedis()) {
503
+ const existing = await loadQueueJobRedis(queueJobId);
504
+ if (existing) {
505
+ if (!existing.steps || existing.steps.length === 0) {
506
+ existing.steps = [
507
+ {
508
+ workerId: firstWorkerId,
509
+ workerJobId: firstWorkerJobId,
510
+ status: "queued"
511
+ }
512
+ ];
513
+ }
514
+ existing.updatedAt = now;
515
+ await saveQueueJobRedis(existing);
516
+ } else {
517
+ const record = {
518
+ id: queueJobId,
519
+ queueId,
520
+ status: "running",
521
+ steps: [
522
+ {
523
+ workerId: firstWorkerId,
524
+ workerJobId: firstWorkerJobId,
525
+ status: "queued"
526
+ }
527
+ ],
528
+ metadata: metadata ?? {},
529
+ createdAt: now,
530
+ updatedAt: now
531
+ };
532
+ await saveQueueJobRedis(record);
533
+ }
534
+ }
535
+ }
536
+ async function updateQueueJobStepInStore(options) {
537
+ const { queueJobId, stepIndex, status, input, output, error } = options;
538
+ const now = (/* @__PURE__ */ new Date()).toISOString();
539
+ if (preferMongo()) {
540
+ const coll = await getMongoQueueCollection();
541
+ const existing = await coll.findOne({ _id: queueJobId });
542
+ if (!existing) return;
543
+ const step = existing.steps[stepIndex];
544
+ if (!step) return;
545
+ const mergedStep = {
546
+ ...step,
547
+ status,
548
+ ...input !== void 0 && { input },
549
+ ...output !== void 0 && { output },
550
+ ...error !== void 0 && { error },
551
+ startedAt: step.startedAt ?? (status === "running" ? now : step.startedAt),
552
+ completedAt: step.completedAt ?? (status === "completed" || status === "failed" ? now : step.completedAt)
553
+ };
554
+ const setDoc = {
555
+ steps: existing.steps,
556
+ updatedAt: now
557
+ };
558
+ setDoc.steps[stepIndex] = mergedStep;
559
+ if (status === "failed") {
560
+ setDoc.status = "failed";
561
+ if (!existing.completedAt) setDoc.completedAt = now;
562
+ } else if (status === "completed" && stepIndex === existing.steps.length - 1) {
563
+ setDoc.status = "completed";
564
+ if (!existing.completedAt) setDoc.completedAt = now;
565
+ }
566
+ await coll.updateOne(
567
+ { _id: queueJobId },
568
+ {
569
+ $set: setDoc
570
+ }
571
+ );
572
+ return;
573
+ }
574
+ if (preferRedis()) {
575
+ const existing = await loadQueueJobRedis(queueJobId);
576
+ if (!existing) {
577
+ return;
578
+ }
579
+ const steps = existing.steps || [];
580
+ const step = steps[stepIndex];
581
+ if (!step) {
582
+ return;
583
+ }
584
+ step.status = status;
585
+ if (input !== void 0) step.input = input;
586
+ if (output !== void 0) step.output = output;
587
+ if (error !== void 0) step.error = error;
588
+ if (status === "running") {
589
+ step.startedAt = step.startedAt ?? now;
590
+ }
591
+ if (status === "completed" || status === "failed") {
592
+ step.completedAt = step.completedAt ?? now;
593
+ }
594
+ existing.steps = steps;
595
+ existing.updatedAt = now;
596
+ if (status === "failed") {
597
+ existing.status = "failed";
598
+ existing.completedAt = existing.completedAt ?? now;
599
+ } else if (status === "completed" && stepIndex === steps.length - 1) {
600
+ existing.status = "completed";
601
+ existing.completedAt = existing.completedAt ?? now;
602
+ }
603
+ await saveQueueJobRedis(existing);
604
+ }
605
+ }
606
+ async function appendQueueJobStepInStore(options) {
607
+ const { queueJobId, workerId, workerJobId } = options;
608
+ const now = (/* @__PURE__ */ new Date()).toISOString();
609
+ if (preferMongo()) {
610
+ const coll = await getMongoQueueCollection();
611
+ await coll.updateOne(
612
+ { _id: queueJobId },
613
+ {
614
+ $push: {
615
+ steps: {
616
+ workerId,
617
+ workerJobId,
618
+ status: "queued"
619
+ }
620
+ },
621
+ $set: { updatedAt: now }
622
+ }
623
+ );
624
+ return;
625
+ }
626
+ if (preferRedis()) {
627
+ const existing = await loadQueueJobRedis(queueJobId);
628
+ if (!existing) return;
629
+ const steps = existing.steps || [];
630
+ steps.push({
631
+ workerId,
632
+ workerJobId,
633
+ status: "queued"
634
+ });
635
+ existing.steps = steps;
636
+ existing.updatedAt = now;
637
+ await saveQueueJobRedis(existing);
638
+ }
639
+ }
640
+
641
+ // src/handler.ts
642
+ var SQS_MAX_DELAY_SECONDS = 900;
643
+ var WORKER_QUEUE_KEY = "__workerQueue";
644
+ async function notifyQueueJobStep(queueJobId, action, params) {
645
+ try {
646
+ if (action === "append") {
647
+ if (!params.workerId || !params.workerJobId) return;
648
+ await appendQueueJobStepInStore({
649
+ queueJobId,
650
+ workerId: params.workerId,
651
+ workerJobId: params.workerJobId
652
+ });
653
+ if (process.env.DEBUG_WORKER_QUEUES === "1") {
654
+ console.log("[Worker] Queue job step appended", {
655
+ queueJobId,
656
+ workerId: params.workerId,
657
+ workerJobId: params.workerJobId
658
+ });
659
+ }
660
+ return;
661
+ }
662
+ if (params.stepIndex === void 0) return;
663
+ const status = action === "start" ? "running" : action === "complete" ? "completed" : action === "fail" ? "failed" : void 0;
664
+ if (!status) return;
665
+ await updateQueueJobStepInStore({
666
+ queueJobId,
667
+ stepIndex: params.stepIndex,
668
+ workerId: params.workerId || "",
669
+ workerJobId: params.workerJobId,
670
+ status,
671
+ input: params.input,
672
+ output: params.output,
673
+ error: params.error
674
+ });
675
+ if (process.env.DEBUG_WORKER_QUEUES === "1") {
676
+ console.log("[Worker] Queue job step updated", {
677
+ queueJobId,
678
+ action,
679
+ stepIndex: params.stepIndex,
680
+ status
681
+ });
682
+ }
683
+ } catch (err) {
684
+ console.warn("[Worker] Queue job update error:", {
685
+ queueJobId,
686
+ action,
687
+ error: err?.message ?? String(err)
688
+ });
689
+ }
690
+ }
691
+ function wrapHandlerForQueue(handler, queueRuntime) {
692
+ return async (params) => {
693
+ const queueContext = params.input?.[WORKER_QUEUE_KEY];
694
+ const output = await handler(params);
695
+ if (!queueContext || typeof queueContext !== "object" || !queueContext.id) {
696
+ return output;
697
+ }
698
+ const { id: queueId, stepIndex, initialInput, queueJobId } = queueContext;
699
+ const next = queueRuntime.getNextStep(queueId, stepIndex);
700
+ if (!next) {
701
+ return output;
702
+ }
703
+ const childJobId = `job-${Date.now()}-${Math.random().toString(36).slice(2, 11)}`;
704
+ if (queueJobId) {
705
+ await notifyQueueJobStep(queueJobId, "append", {
706
+ workerJobId: childJobId,
707
+ workerId: next.workerId
708
+ });
709
+ }
710
+ let nextInput = output;
711
+ if (next.mapInputFromPrev && typeof queueRuntime.invokeMapInput === "function") {
712
+ nextInput = await queueRuntime.invokeMapInput(queueId, stepIndex + 1, output, initialInput);
713
+ }
714
+ const nextInputWithQueue = {
715
+ ...nextInput !== null && typeof nextInput === "object" ? nextInput : { value: nextInput },
716
+ [WORKER_QUEUE_KEY]: {
717
+ id: queueId,
718
+ stepIndex: stepIndex + 1,
719
+ initialInput,
720
+ queueJobId
721
+ }
722
+ };
723
+ const debug = process.env.AI_WORKER_QUEUES_DEBUG === "1";
724
+ if (debug) {
725
+ console.log("[Worker] Queue chain dispatching next:", {
726
+ queueId,
727
+ fromStep: stepIndex,
728
+ nextWorkerId: next.workerId,
729
+ delaySeconds: next.delaySeconds
730
+ });
731
+ }
732
+ await params.ctx.dispatchWorker(next.workerId, nextInputWithQueue, {
733
+ await: false,
734
+ delaySeconds: next.delaySeconds,
735
+ jobId: childJobId
736
+ });
737
+ return output;
738
+ };
739
+ }
740
+ var DEFAULT_POLL_INTERVAL_MS = 2e3;
741
+ var DEFAULT_POLL_TIMEOUT_MS = 15 * 60 * 1e3;
742
+ function sanitizeWorkerIdForEnv(workerId) {
743
+ return workerId.replace(/-/g, "_").toUpperCase();
744
+ }
745
+ function getQueueUrlForWorker(calleeWorkerId) {
746
+ const key = `WORKER_QUEUE_URL_${sanitizeWorkerIdForEnv(calleeWorkerId)}`;
747
+ return process.env[key]?.trim() || void 0;
748
+ }
749
+ function createDispatchWorker(parentJobId, parentWorkerId, parentContext, jobStore) {
750
+ return async (calleeWorkerId, input, options) => {
751
+ const childJobId = options?.jobId || `job-${Date.now()}-${Math.random().toString(36).slice(2, 11)}`;
752
+ const metadata = options?.metadata ?? {};
753
+ const serializedContext = {};
754
+ if (parentContext.requestId) serializedContext.requestId = parentContext.requestId;
755
+ const messageBody = {
756
+ workerId: calleeWorkerId,
757
+ jobId: childJobId,
758
+ input: input ?? {},
759
+ context: serializedContext,
760
+ webhookUrl: options?.webhookUrl,
761
+ metadata,
762
+ timestamp: (/* @__PURE__ */ new Date()).toISOString()
763
+ };
764
+ const queueUrl = getQueueUrlForWorker(calleeWorkerId);
765
+ if (queueUrl) {
766
+ const region = process.env.AWS_REGION || process.env.AWS_DEFAULT_REGION || "us-east-1";
767
+ const sqs = new SQSClient({ region });
768
+ const delaySeconds = options?.await !== true && options?.delaySeconds != null ? Math.min(SQS_MAX_DELAY_SECONDS, Math.max(0, Math.floor(options.delaySeconds))) : void 0;
769
+ const sendResult = await sqs.send(
770
+ new SendMessageCommand({
771
+ QueueUrl: queueUrl,
772
+ MessageBody: JSON.stringify(messageBody),
773
+ ...delaySeconds !== void 0 && delaySeconds > 0 ? { DelaySeconds: delaySeconds } : {}
774
+ })
775
+ );
776
+ const messageId = sendResult.MessageId ?? void 0;
777
+ if (jobStore?.appendInternalJob) {
778
+ await jobStore.appendInternalJob({ jobId: childJobId, workerId: calleeWorkerId });
779
+ }
780
+ if (options?.await && jobStore?.getJob) {
781
+ const pollIntervalMs = options.pollIntervalMs ?? DEFAULT_POLL_INTERVAL_MS;
782
+ const pollTimeoutMs = options.pollTimeoutMs ?? DEFAULT_POLL_TIMEOUT_MS;
783
+ const deadline = Date.now() + pollTimeoutMs;
784
+ while (Date.now() < deadline) {
785
+ const child = await jobStore.getJob(childJobId);
786
+ if (!child) {
787
+ await new Promise((r) => setTimeout(r, pollIntervalMs));
788
+ continue;
789
+ }
790
+ if (child.status === "completed") {
791
+ return { jobId: childJobId, messageId, output: child.output };
792
+ }
793
+ if (child.status === "failed") {
794
+ const err = child.error;
795
+ throw new Error(
796
+ err?.message ?? `Child worker ${calleeWorkerId} failed`
797
+ );
798
+ }
799
+ await new Promise((r) => setTimeout(r, pollIntervalMs));
800
+ }
801
+ throw new Error(
802
+ `Child worker ${calleeWorkerId} (${childJobId}) did not complete within ${pollTimeoutMs}ms`
803
+ );
804
+ }
805
+ return { jobId: childJobId, messageId };
806
+ }
807
+ throw new Error(
808
+ `WORKER_QUEUE_URL_${sanitizeWorkerIdForEnv(calleeWorkerId)} is not set. Configure queue URL for worker-to-worker dispatch, or run in local mode.`
809
+ );
810
+ };
811
+ }
812
+ async function sendWebhook(webhookUrl, payload) {
813
+ try {
814
+ const response = await fetch(webhookUrl, {
815
+ method: "POST",
816
+ headers: {
817
+ "Content-Type": "application/json",
818
+ "User-Agent": "ai-router-worker/1.0"
819
+ },
820
+ body: JSON.stringify(payload)
821
+ });
822
+ if (!response.ok) {
823
+ const errorText = await response.text().catch(() => "");
824
+ console.error("[Worker] Webhook callback failed:", {
825
+ url: webhookUrl,
826
+ status: response.status,
827
+ statusText: response.statusText,
828
+ errorText
829
+ });
830
+ } else {
831
+ console.log("[Worker] Webhook callback successful:", {
832
+ url: webhookUrl,
833
+ status: response.status
834
+ });
835
+ }
836
+ } catch (error) {
837
+ console.error("[Worker] Webhook callback error:", {
838
+ url: webhookUrl,
839
+ error: error?.message || String(error),
840
+ stack: error?.stack
841
+ });
842
+ }
843
+ }
844
+ function createLambdaHandler(handler, outputSchema) {
845
+ return async (event, lambdaContext) => {
846
+ const promises = event.Records.map(async (record) => {
847
+ let messageBody = null;
848
+ try {
849
+ messageBody = JSON.parse(record.body);
850
+ const { workerId, jobId, input, context, webhookUrl, metadata = {} } = messageBody;
851
+ const raw = (process.env.WORKER_DATABASE_TYPE || "upstash-redis").toLowerCase();
852
+ const jobStoreType = raw === "mongodb" ? "mongodb" : "upstash-redis";
853
+ if (jobStoreType === "upstash-redis" && isRedisJobStoreConfigured()) {
854
+ const existing = await loadJob(jobId);
855
+ if (existing && (existing.status === "completed" || existing.status === "failed")) {
856
+ console.log("[Worker] Skipping already terminal job (idempotent):", {
857
+ jobId,
858
+ workerId,
859
+ status: existing.status
860
+ });
861
+ return;
862
+ }
863
+ } else if (jobStoreType === "mongodb" || isMongoJobStoreConfigured()) {
864
+ const existing = await getJobById(jobId);
865
+ if (existing && (existing.status === "completed" || existing.status === "failed")) {
866
+ console.log("[Worker] Skipping already terminal job (idempotent):", {
867
+ jobId,
868
+ workerId,
869
+ status: existing.status
870
+ });
871
+ return;
872
+ }
873
+ }
874
+ let jobStore;
875
+ if (jobStoreType === "upstash-redis" && isRedisJobStoreConfigured()) {
876
+ await upsertRedisJob(jobId, workerId, input, metadata);
877
+ jobStore = createRedisJobStore(workerId, jobId, input, metadata);
878
+ } else if (jobStoreType === "mongodb" || isMongoJobStoreConfigured()) {
879
+ await upsertJob(jobId, workerId, input, metadata);
880
+ jobStore = createMongoJobStore(workerId, jobId, input, metadata);
881
+ }
882
+ const baseContext = {
883
+ jobId,
884
+ workerId,
885
+ requestId: context.requestId || lambdaContext.awsRequestId,
886
+ ...context
887
+ };
888
+ const handlerContext = {
889
+ ...baseContext,
890
+ ...jobStore ? { jobStore } : {},
891
+ dispatchWorker: createDispatchWorker(
892
+ jobId,
893
+ workerId,
894
+ baseContext,
895
+ jobStore
896
+ )
897
+ };
898
+ if (jobStore) {
899
+ try {
900
+ await jobStore.update({ status: "running" });
901
+ console.log("[Worker] Job status updated to running:", {
902
+ jobId,
903
+ workerId
904
+ });
905
+ } catch (error) {
906
+ console.warn("[Worker] Failed to update status to running:", {
907
+ jobId,
908
+ workerId,
909
+ error: error?.message || String(error)
910
+ });
911
+ }
912
+ }
913
+ const queueCtx = input?.__workerQueue ?? metadata?.__workerQueue;
914
+ if (queueCtx?.queueJobId && typeof queueCtx.stepIndex === "number") {
915
+ if (queueCtx.stepIndex === 0) {
916
+ try {
917
+ await upsertInitialQueueJob({
918
+ queueJobId: queueCtx.queueJobId,
919
+ queueId: queueCtx.id,
920
+ firstWorkerId: workerId,
921
+ firstWorkerJobId: jobId,
922
+ metadata
923
+ });
924
+ } catch (e) {
925
+ console.warn("[Worker] Failed to upsert initial queue job:", {
926
+ queueJobId: queueCtx.queueJobId,
927
+ queueId: queueCtx.id,
928
+ error: e?.message ?? String(e)
929
+ });
930
+ }
931
+ }
932
+ await notifyQueueJobStep(queueCtx.queueJobId, "start", {
933
+ stepIndex: queueCtx.stepIndex,
934
+ workerJobId: jobId,
935
+ workerId,
936
+ input
937
+ });
938
+ }
939
+ let output;
940
+ try {
941
+ output = await handler({
942
+ input,
943
+ ctx: handlerContext
944
+ });
945
+ if (outputSchema) {
946
+ output = outputSchema.parse(output);
947
+ }
948
+ } catch (error) {
949
+ const errorPayload = {
950
+ jobId,
951
+ workerId,
952
+ status: "error",
953
+ error: {
954
+ message: error.message || "Unknown error",
955
+ stack: error.stack,
956
+ name: error.name || "Error"
957
+ },
958
+ metadata
959
+ };
960
+ if (jobStore) {
961
+ try {
962
+ await jobStore.update({
963
+ status: "failed",
964
+ error: errorPayload.error
965
+ });
966
+ console.log("[Worker] Job status updated to failed:", {
967
+ jobId,
968
+ workerId
969
+ });
970
+ } catch (updateError) {
971
+ console.warn("[Worker] Failed to update job store on error:", {
972
+ jobId,
973
+ workerId,
974
+ error: updateError?.message || String(updateError)
975
+ });
976
+ }
977
+ }
978
+ const queueCtxFail = input?.__workerQueue ?? metadata?.__workerQueue;
979
+ if (queueCtxFail?.queueJobId && typeof queueCtxFail.stepIndex === "number") {
980
+ await notifyQueueJobStep(queueCtxFail.queueJobId, "fail", {
981
+ stepIndex: queueCtxFail.stepIndex,
982
+ workerJobId: jobId,
983
+ workerId,
984
+ error: errorPayload.error
985
+ });
986
+ }
987
+ if (webhookUrl) {
988
+ await sendWebhook(webhookUrl, errorPayload);
989
+ }
990
+ throw error;
991
+ }
992
+ if (jobStore) {
993
+ try {
994
+ await jobStore.update({
995
+ status: "completed",
996
+ output
997
+ });
998
+ console.log("[Worker] Job status updated to completed:", {
999
+ jobId,
1000
+ workerId
1001
+ });
1002
+ } catch (updateError) {
1003
+ console.warn("[Worker] Failed to update job store on success:", {
1004
+ jobId,
1005
+ workerId,
1006
+ error: updateError?.message || String(updateError)
1007
+ });
1008
+ }
1009
+ }
1010
+ const queueCtxSuccess = input?.__workerQueue ?? metadata?.__workerQueue;
1011
+ if (queueCtxSuccess?.queueJobId && typeof queueCtxSuccess.stepIndex === "number") {
1012
+ await notifyQueueJobStep(queueCtxSuccess.queueJobId, "complete", {
1013
+ stepIndex: queueCtxSuccess.stepIndex,
1014
+ workerJobId: jobId,
1015
+ workerId,
1016
+ output
1017
+ });
1018
+ }
1019
+ console.log("[Worker] Job completed:", {
1020
+ jobId,
1021
+ workerId,
1022
+ output
1023
+ });
1024
+ const successPayload = {
1025
+ jobId,
1026
+ workerId,
1027
+ status: "success",
1028
+ output,
1029
+ metadata
1030
+ };
1031
+ if (webhookUrl) {
1032
+ await sendWebhook(webhookUrl, successPayload);
1033
+ }
1034
+ } catch (error) {
1035
+ console.error("[Worker] Error processing SQS record:", {
1036
+ jobId: messageBody?.jobId ?? "(parse failed)",
1037
+ workerId: messageBody?.workerId ?? "(parse failed)",
1038
+ error: error?.message || String(error),
1039
+ stack: error?.stack
1040
+ });
1041
+ throw error;
1042
+ }
1043
+ });
1044
+ await Promise.all(promises);
1045
+ };
1046
+ }
1047
+
1048
+ export {
1049
+ SQS_MAX_DELAY_SECONDS,
1050
+ wrapHandlerForQueue,
1051
+ createLambdaHandler
1052
+ };
1053
+ //# sourceMappingURL=chunk-4WU5ZCHS.mjs.map