@microfox/ai-worker-cli 1.0.3 → 1.0.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs CHANGED
@@ -33,7 +33,10 @@ __export(index_exports, {
33
33
  aiWorkerCli: () => aiWorkerCli
34
34
  });
35
35
  module.exports = __toCommonJS(index_exports);
36
- var import_commander3 = require("commander");
36
+ var import_commander4 = require("commander");
37
+ var import_fs = require("fs");
38
+ var import_url2 = require("url");
39
+ var import_path = require("path");
37
40
 
38
41
  // src/commands/push.ts
39
42
  var import_commander = require("commander");
@@ -208,6 +211,15 @@ async function collectCalleeWorkerIds(workers, projectRoot) {
208
211
  function sanitizeWorkerIdForEnv(workerId) {
209
212
  return workerId.replace(/-/g, "_").toUpperCase();
210
213
  }
214
+ function toCamelCase(id) {
215
+ return id.split(/[^a-zA-Z0-9]+/).filter(Boolean).map(
216
+ (part, i) => i === 0 ? part.toLowerCase() : part.charAt(0).toUpperCase() + part.slice(1).toLowerCase()
217
+ ).join("");
218
+ }
219
+ function toPrefixedCamel(prefix, id) {
220
+ const camel = toCamelCase(id);
221
+ return prefix + (camel.charAt(0).toUpperCase() + camel.slice(1));
222
+ }
211
223
  function readJsonFile(filePath) {
212
224
  try {
213
225
  return JSON.parse(fs.readFileSync(filePath, "utf-8"));
@@ -361,7 +373,7 @@ async function scanQueues(aiPath = "app/ai") {
361
373
  const stepsMatch = content.match(/steps:\s*\[([\s\S]*?)\]/);
362
374
  if (stepsMatch) {
363
375
  const stepsStr = stepsMatch[1];
364
- const stepRegex = /\{\s*workerId:\s*['"]([^'"]+)['"](?:,\s*delaySeconds:\s*(\d+))?(?:,\s*mapInputFromPrev:\s*['"]([^'"]+)['"])?\s*\}/g;
376
+ const stepRegex = /\{\s*workerId:\s*['"]([^'"]+)['"](?:,\s*(?:\/\/[^\r\n]*\r?\n\s*)?delaySeconds:\s*(\d+))?(?:,\s*(?:\/\/[^\r\n]*\r?\n\s*)?mapInputFromPrev:\s*['"]([^'"]+)['"])?\s*,?\s*(?:\/\/[^\r\n]*\r?\n\s*)?(?=\s*\})\s*\},?/g;
365
377
  let m;
366
378
  while ((m = stepRegex.exec(stepsStr)) !== null) {
367
379
  steps.push({
@@ -372,8 +384,9 @@ async function scanQueues(aiPath = "app/ai") {
372
384
  }
373
385
  }
374
386
  let schedule;
375
- const scheduleStrMatch = content.match(/schedule:\s*['"]([^'"]+)['"]/);
376
- const scheduleObjMatch = content.match(/schedule:\s*(\{[^}]+(?:\{[^}]*\}[^}]*)*\})/);
387
+ const contentWithoutLineComments = content.replace(/\/\/[^\n]*/g, "");
388
+ const scheduleStrMatch = contentWithoutLineComments.match(/schedule:\s*['"]([^'"]+)['"]/);
389
+ const scheduleObjMatch = contentWithoutLineComments.match(/schedule:\s*(\{[^}]+(?:\{[^}]*\}[^}]*)*\})/);
377
390
  if (scheduleStrMatch) {
378
391
  schedule = scheduleStrMatch[1];
379
392
  } else if (scheduleObjMatch) {
@@ -395,10 +408,32 @@ function generateQueueRegistry(queues, outputDir, projectRoot) {
395
408
  if (!fs.existsSync(generatedDir)) {
396
409
  fs.mkdirSync(generatedDir, { recursive: true });
397
410
  }
411
+ const relToRoot = path.relative(generatedDir, projectRoot).replace(/\\/g, "/");
412
+ const queueModulesLines = [];
413
+ const queueModulesEntries = [];
414
+ const queuesWithMapping = queues.filter(
415
+ (q) => q.steps?.some((s) => s.mapInputFromPrev)
416
+ );
417
+ for (let i = 0; i < queuesWithMapping.length; i++) {
418
+ const q = queuesWithMapping[i];
419
+ const relPath = (relToRoot + "/" + q.filePath.replace(/\\/g, "/")).replace(/\.ts$/, "");
420
+ const safeId = q.id.replace(/[^a-zA-Z0-9]/g, "");
421
+ queueModulesLines.push(`const queueModule_${safeId} = require('${relPath}');`);
422
+ queueModulesEntries.push(` '${q.id}': queueModule_${safeId},`);
423
+ }
424
+ const queueModulesBlock = queueModulesLines.length > 0 ? `
425
+ ${queueModulesLines.join("\n")}
426
+ const queueModules = {
427
+ ${queueModulesEntries.join("\n")}
428
+ };
429
+ ` : `
430
+ const queueModules = {};
431
+ `;
398
432
  const registryContent = `/**
399
433
  * Auto-generated queue registry. DO NOT EDIT.
400
434
  * Generated by @microfox/ai-worker-cli from .queue.ts files.
401
435
  */
436
+ ${queueModulesBlock}
402
437
 
403
438
  const QUEUES = ${JSON.stringify(queues.map((q) => ({ id: q.id, steps: q.steps, schedule: q.schedule })), null, 2)};
404
439
 
@@ -415,8 +450,14 @@ export function getNextStep(queueId, stepIndex) {
415
450
  return step ? { workerId: step.workerId, delaySeconds: step.delaySeconds, mapInputFromPrev: step.mapInputFromPrev } : undefined;
416
451
  }
417
452
 
418
- export function invokeMapInput(_queueId, _stepIndex, prevOutput, _initialInput) {
419
- return prevOutput;
453
+ export function invokeMapInput(queueId, stepIndex, initialInput, previousOutputs) {
454
+ const queue = getQueueById(queueId);
455
+ const step = queue?.steps?.[stepIndex];
456
+ const fnName = step?.mapInputFromPrev;
457
+ if (!fnName) return previousOutputs.length ? previousOutputs[previousOutputs.length - 1].output : initialInput;
458
+ const mod = queueModules[queueId];
459
+ if (!mod || typeof mod[fnName] !== 'function') return previousOutputs.length ? previousOutputs[previousOutputs.length - 1].output : initialInput;
460
+ return mod[fnName](initialInput, previousOutputs);
420
461
  }
421
462
  `;
422
463
  const registryPath = path.join(generatedDir, "workerQueues.registry.js");
@@ -452,11 +493,13 @@ function mergeQueueCallees(calleeIds, queues, workers) {
452
493
  }
453
494
  async function generateHandlers(workers, outputDir, queues = []) {
454
495
  const handlersDir = path.join(outputDir, "handlers");
496
+ const workersSubdir = path.join(handlersDir, "workers");
455
497
  const workersInQueues = getWorkersInQueues(queues);
456
- if (fs.existsSync(handlersDir)) {
457
- fs.rmSync(handlersDir, { recursive: true, force: true });
498
+ if (fs.existsSync(workersSubdir)) {
499
+ fs.rmSync(workersSubdir, { recursive: true, force: true });
458
500
  }
459
501
  fs.mkdirSync(handlersDir, { recursive: true });
502
+ fs.mkdirSync(workersSubdir, { recursive: true });
460
503
  for (const worker of workers) {
461
504
  const handlerFile = path.join(handlersDir, worker.handlerPath.replace("handlers/", "") + ".js");
462
505
  const handlerDir = path.dirname(handlerFile);
@@ -482,6 +525,7 @@ async function generateHandlers(workers, outputDir, queues = []) {
482
525
  const registryImportPath = registryRelPath.startsWith(".") ? registryRelPath : "./" + registryRelPath;
483
526
  const handlerCreation = inQueue ? `
484
527
  import { createLambdaHandler, wrapHandlerForQueue } from '@microfox/ai-worker/handler';
528
+ import { getQueueJob } from '@microfox/ai-worker/queueJobStore';
485
529
  import * as queueRegistry from '${registryImportPath}';
486
530
  import * as workerModule from '${relativeImportPath}';
487
531
 
@@ -495,6 +539,7 @@ if (!workerAgent || typeof workerAgent.handler !== 'function') {
495
539
  const queueRuntime = {
496
540
  getNextStep: queueRegistry.getNextStep,
497
541
  invokeMapInput: queueRegistry.invokeMapInput,
542
+ getQueueJob,
498
543
  };
499
544
  const wrappedHandler = wrapHandlerForQueue(workerAgent.handler, queueRuntime);
500
545
 
@@ -502,10 +547,20 @@ const baseHandler = createLambdaHandler(wrappedHandler, workerAgent.outputSchema
502
547
 
503
548
  export const handler = async (event: any, context: any) => {
504
549
  const records = Array.isArray((event as any)?.Records) ? (event as any).Records.length : 0;
550
+ let queueId, queueJobId;
505
551
  try {
552
+ const first = (event as any)?.Records?.[0];
553
+ if (first?.body) {
554
+ const body = typeof first.body === 'string' ? JSON.parse(first.body) : first.body;
555
+ const qc = body?.input?.__workerQueue ?? body?.metadata?.__workerQueue;
556
+ if (qc?.id) queueId = qc.id;
557
+ if (qc?.queueJobId) queueJobId = qc.queueJobId;
558
+ }
506
559
  console.log(WORKER_LOG_PREFIX, {
507
560
  workerId: workerAgent.id,
508
561
  inQueue: true,
562
+ ...(queueId && { queueId }),
563
+ ...(queueJobId && { queueJobId }),
509
564
  records,
510
565
  requestId: (context as any)?.awsRequestId,
511
566
  });
@@ -617,7 +672,8 @@ export const exportedWorkerConfig = workerModule.workerConfig || workerAgent?.wo
617
672
  console.log(import_chalk.default.green(`\u2713 Generated ${workers.length} bundled handlers`));
618
673
  }
619
674
  function generateDocsHandler(outputDir, serviceName, stage, region) {
620
- const handlerFile = path.join(outputDir, "handlers", "docs.js");
675
+ const apiDir = path.join(outputDir, "handlers", "api");
676
+ const handlerFile = path.join(apiDir, "docs.js");
621
677
  const tempEntryFile = handlerFile.replace(".js", ".temp.ts");
622
678
  const handlerDir = path.dirname(handlerFile);
623
679
  if (!fs.existsSync(handlerDir)) {
@@ -833,7 +889,8 @@ export const handler = async (
833
889
  console.log(import_chalk.default.green(`\u2713 Generated docs.json handler`));
834
890
  }
835
891
  function generateTriggerHandler(outputDir, serviceName) {
836
- const handlerFile = path.join(outputDir, "handlers", "workers-trigger.js");
892
+ const apiDir = path.join(outputDir, "handlers", "api");
893
+ const handlerFile = path.join(apiDir, "workers-trigger.js");
837
894
  const tempEntryFile = handlerFile.replace(".js", ".temp.ts");
838
895
  const handlerDir = path.dirname(handlerFile);
839
896
  if (!fs.existsSync(handlerDir)) {
@@ -958,9 +1015,10 @@ export const handler = async (event: APIGatewayProxyEvent): Promise<APIGatewayPr
958
1015
  fs.unlinkSync(tempEntryFile);
959
1016
  console.log(import_chalk.default.green(`\u2713 Generated /workers/trigger handler`));
960
1017
  }
961
- function generateQueueStarterHandler(outputDir, queue, serviceName) {
962
- const safeId = queue.id.replace(/[^a-zA-Z0-9]/g, "");
963
- const handlerFile = path.join(outputDir, "handlers", `queue-starter-${safeId}.js`);
1018
+ function generateQueueHandler(outputDir, queue, serviceName) {
1019
+ const queueFileId = queue.id.replace(/[^a-zA-Z0-9-]/g, "-").replace(/-+/g, "-");
1020
+ const queuesDir = path.join(outputDir, "handlers", "queues");
1021
+ const handlerFile = path.join(queuesDir, `${queueFileId}.js`);
964
1022
  const tempEntryFile = handlerFile.replace(".js", ".temp.ts");
965
1023
  const handlerDir = path.dirname(handlerFile);
966
1024
  if (!fs.existsSync(handlerDir)) {
@@ -969,39 +1027,99 @@ function generateQueueStarterHandler(outputDir, queue, serviceName) {
969
1027
  const firstWorkerId = queue.steps[0]?.workerId;
970
1028
  if (!firstWorkerId) return;
971
1029
  const handlerContent = `/**
972
- * Auto-generated queue-starter for queue "${queue.id}"
1030
+ * Auto-generated queue handler for queue "${queue.id}"
973
1031
  * DO NOT EDIT - This file is generated by @microfox/ai-worker-cli
1032
+ * Invoked by schedule (if configured) or HTTP POST /queues/${queue.id}/start (dispatch proxy).
974
1033
  */
975
1034
 
976
- import { ScheduledHandler } from 'aws-lambda';
977
1035
  import { SQSClient, GetQueueUrlCommand, SendMessageCommand } from '@aws-sdk/client-sqs';
1036
+ import { upsertInitialQueueJob } from '@microfox/ai-worker/queueJobStore';
978
1037
 
979
1038
  const QUEUE_ID = ${JSON.stringify(queue.id)};
980
1039
  const FIRST_WORKER_ID = ${JSON.stringify(firstWorkerId)};
981
1040
  const SERVICE_NAME = ${JSON.stringify(serviceName)};
982
1041
 
983
- export const handler: ScheduledHandler = async () => {
1042
+ function isHttpEvent(event: any): event is { body?: string; requestContext?: any } {
1043
+ return event && typeof event.requestContext === 'object' && (event.body !== undefined || event.httpMethod === 'POST');
1044
+ }
1045
+
1046
+ export const handler = async (event: any) => {
984
1047
  const stage = process.env.ENVIRONMENT || process.env.STAGE || 'prod';
985
1048
  const region = process.env.AWS_REGION || 'us-east-1';
986
1049
  const queueName = \`\${SERVICE_NAME}-\${FIRST_WORKER_ID}-\${stage}\`;
987
1050
 
1051
+ let jobId: string;
1052
+ let initialInput: Record<string, any>;
1053
+ let context: Record<string, any> = {};
1054
+ let metadata: Record<string, any> = {};
1055
+ let webhookUrl: string | undefined;
1056
+
1057
+ if (isHttpEvent(event)) {
1058
+ const apiKey = process.env.WORKERS_TRIGGER_API_KEY;
1059
+ if (apiKey) {
1060
+ const provided = (event.headers && (event.headers['x-workers-trigger-key'] || event.headers['X-Workers-Trigger-Key'])) || '';
1061
+ if (provided !== apiKey) {
1062
+ return { statusCode: 401, headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ error: 'Unauthorized' }) };
1063
+ }
1064
+ }
1065
+ let body: { input?: any; initialInput?: any; jobId?: string; metadata?: any; context?: any; webhookUrl?: string } = {};
1066
+ if (event.body) {
1067
+ try {
1068
+ body = typeof event.body === 'string' ? JSON.parse(event.body) : event.body;
1069
+ } catch (_) {}
1070
+ }
1071
+ jobId = (body.jobId && String(body.jobId).trim()) || 'job-' + Date.now() + '-' + Math.random().toString(36).slice(2, 11);
1072
+ const rawInput = body.input != null ? body.input : body.initialInput;
1073
+ initialInput = rawInput != null && typeof rawInput === 'object' ? rawInput : {};
1074
+ context = body.context && typeof body.context === 'object' ? body.context : {};
1075
+ metadata = body.metadata && typeof body.metadata === 'object' ? body.metadata : {};
1076
+ webhookUrl = typeof body.webhookUrl === 'string' ? body.webhookUrl : undefined;
1077
+
1078
+ const response = { statusCode: 200, headers: { 'Content-Type': 'application/json' }, body: '' };
1079
+ try {
1080
+ await upsertInitialQueueJob({ queueJobId: jobId, queueId: QUEUE_ID, firstWorkerId: FIRST_WORKER_ID, firstWorkerJobId: jobId, metadata });
1081
+ await sendFirstMessage(region, queueName, jobId, initialInput, context, metadata, webhookUrl, 'http');
1082
+ response.body = JSON.stringify({ queueId: QUEUE_ID, jobId, status: 'queued' });
1083
+ } catch (err: any) {
1084
+ response.statusCode = 500;
1085
+ response.body = JSON.stringify({ error: err?.message || String(err) });
1086
+ }
1087
+ return response;
1088
+ }
1089
+
1090
+ // Scheduled invocation
1091
+ jobId = 'job-' + Date.now() + '-' + Math.random().toString(36).slice(2, 11);
1092
+ initialInput = {};
1093
+ try {
1094
+ await upsertInitialQueueJob({ queueJobId: jobId, queueId: QUEUE_ID, firstWorkerId: FIRST_WORKER_ID, firstWorkerJobId: jobId, metadata: {} });
1095
+ } catch (_) {}
1096
+ await sendFirstMessage(region, queueName, jobId, initialInput, context, metadata, webhookUrl, 'schedule');
1097
+ };
1098
+
1099
+ async function sendFirstMessage(
1100
+ region: string,
1101
+ queueName: string,
1102
+ jobId: string,
1103
+ initialInput: Record<string, any>,
1104
+ context: Record<string, any>,
1105
+ metadata: Record<string, any>,
1106
+ webhookUrl?: string,
1107
+ trigger?: 'schedule' | 'http'
1108
+ ) {
988
1109
  const sqs = new SQSClient({ region });
989
1110
  const { QueueUrl } = await sqs.send(new GetQueueUrlCommand({ QueueName: queueName }));
990
1111
  if (!QueueUrl) {
991
1112
  throw new Error('Queue URL not found: ' + queueName);
992
1113
  }
993
1114
 
994
- const jobId = 'job-' + Date.now() + '-' + Math.random().toString(36).slice(2, 11);
995
- const initialInput = {};
1115
+ const queueContext = { id: QUEUE_ID, stepIndex: 0, initialInput, queueJobId: jobId };
996
1116
  const messageBody = {
997
1117
  workerId: FIRST_WORKER_ID,
998
1118
  jobId,
999
- input: {
1000
- ...initialInput,
1001
- __workerQueue: { id: QUEUE_ID, stepIndex: 0, initialInput },
1002
- },
1003
- context: {},
1004
- metadata: { __workerQueue: { id: QUEUE_ID, stepIndex: 0, initialInput } },
1119
+ input: { ...initialInput, __workerQueue: queueContext },
1120
+ context,
1121
+ metadata: { ...metadata, __workerQueue: queueContext },
1122
+ ...(webhookUrl ? { webhookUrl } : {}),
1005
1123
  timestamp: new Date().toISOString(),
1006
1124
  };
1007
1125
 
@@ -1010,8 +1128,8 @@ export const handler: ScheduledHandler = async () => {
1010
1128
  MessageBody: JSON.stringify(messageBody),
1011
1129
  }));
1012
1130
 
1013
- console.log('[queue-starter] Dispatched first worker for queue:', { queueId: QUEUE_ID, jobId, workerId: FIRST_WORKER_ID });
1014
- };
1131
+ console.log('[queue] Dispatched first worker', { queueId: QUEUE_ID, jobId, workerId: FIRST_WORKER_ID, trigger: trigger ?? 'unknown' });
1132
+ }
1015
1133
  `;
1016
1134
  fs.writeFileSync(tempEntryFile, handlerContent);
1017
1135
  esbuild.buildSync({
@@ -1025,14 +1143,14 @@ export const handler: ScheduledHandler = async () => {
1025
1143
  logLevel: "error"
1026
1144
  });
1027
1145
  fs.unlinkSync(tempEntryFile);
1028
- console.log(import_chalk.default.green(`\u2713 Generated queue-starter for ${queue.id}`));
1146
+ console.log(import_chalk.default.green(`\u2713 Generated queue handler for ${queue.id}`));
1029
1147
  }
1030
1148
  function generateWorkersConfigHandler(outputDir, workers, serviceName, queues = []) {
1031
- const handlerFile = path.join(outputDir, "handlers", "workers-config.js");
1149
+ const apiDir = path.join(outputDir, "handlers", "api");
1150
+ const handlerFile = path.join(apiDir, "workers-config.js");
1032
1151
  const tempEntryFile = handlerFile.replace(".js", ".temp.ts");
1033
1152
  const handlerDir = path.dirname(handlerFile);
1034
- if (fs.existsSync(handlerDir) && !fs.existsSync(handlerFile)) {
1035
- } else if (!fs.existsSync(handlerDir)) {
1153
+ if (!fs.existsSync(handlerDir)) {
1036
1154
  fs.mkdirSync(handlerDir, { recursive: true });
1037
1155
  }
1038
1156
  const handlerContent = `/**
@@ -1292,7 +1410,7 @@ function generateServerlessConfig(workers, stage, region, envVars, serviceName,
1292
1410
  }
1293
1411
  const functions = {};
1294
1412
  for (const worker of workers) {
1295
- const functionName = `worker${worker.id.replace(/[^a-zA-Z0-9]/g, "")}`;
1413
+ const functionName = toPrefixedCamel("worker", worker.id);
1296
1414
  const events = [
1297
1415
  {
1298
1416
  sqs: {
@@ -1332,7 +1450,7 @@ function generateServerlessConfig(workers, stage, region, envVars, serviceName,
1332
1450
  }
1333
1451
  }
1334
1452
  functions["getDocs"] = {
1335
- handler: "handlers/docs.handler",
1453
+ handler: "handlers/api/docs.handler",
1336
1454
  events: [
1337
1455
  {
1338
1456
  http: {
@@ -1344,7 +1462,7 @@ function generateServerlessConfig(workers, stage, region, envVars, serviceName,
1344
1462
  ]
1345
1463
  };
1346
1464
  functions["triggerWorker"] = {
1347
- handler: "handlers/workers-trigger.handler",
1465
+ handler: "handlers/api/workers-trigger.handler",
1348
1466
  events: [
1349
1467
  {
1350
1468
  http: {
@@ -1356,7 +1474,7 @@ function generateServerlessConfig(workers, stage, region, envVars, serviceName,
1356
1474
  ]
1357
1475
  };
1358
1476
  functions["workersConfig"] = {
1359
- handler: "handlers/workers-config.handler",
1477
+ handler: "handlers/api/workers-config.handler",
1360
1478
  events: [
1361
1479
  {
1362
1480
  http: {
@@ -1368,17 +1486,26 @@ function generateServerlessConfig(workers, stage, region, envVars, serviceName,
1368
1486
  ]
1369
1487
  };
1370
1488
  for (const queue of queues) {
1489
+ const queueFileId = queue.id.replace(/[^a-zA-Z0-9-]/g, "-").replace(/-+/g, "-");
1490
+ const fnName = toPrefixedCamel("queue", queue.id);
1491
+ const events = [
1492
+ {
1493
+ http: {
1494
+ path: `queues/${queueFileId}/start`,
1495
+ method: "POST",
1496
+ cors: true
1497
+ }
1498
+ }
1499
+ ];
1371
1500
  if (queue.schedule) {
1372
- const safeId = queue.id.replace(/[^a-zA-Z0-9]/g, "");
1373
- const fnName = `queueStarter${safeId}`;
1374
- const scheduleEvents = processScheduleEvents(queue.schedule);
1375
- functions[fnName] = {
1376
- handler: `handlers/queue-starter-${safeId}.handler`,
1377
- timeout: 60,
1378
- memorySize: 128,
1379
- events: scheduleEvents
1380
- };
1501
+ events.push(...processScheduleEvents(queue.schedule));
1381
1502
  }
1503
+ functions[fnName] = {
1504
+ handler: `handlers/queues/${queueFileId}.handler`,
1505
+ timeout: 60,
1506
+ memorySize: 128,
1507
+ events
1508
+ };
1382
1509
  }
1383
1510
  const safeEnvVars = {};
1384
1511
  const allowedPrefixes = ["OPENAI_", "ANTHROPIC_", "DATABASE_", "MONGODB_", "REDIS_", "UPSTASH_", "WORKER_", "WORKERS_", "WORKFLOW_", "REMOTION_", "QUEUE_JOB_", "DEBUG_WORKER_QUEUES"];
@@ -1537,7 +1664,7 @@ async function build2(args) {
1537
1664
  const jobStoreType = getJobStoreType();
1538
1665
  const filteredDeps = filterDepsForJobStore(runtimeDeps, jobStoreType);
1539
1666
  const dependencies = buildDependenciesMap(process.cwd(), filteredDeps);
1540
- const packageJson = {
1667
+ const packageJson2 = {
1541
1668
  name: "ai-router-workers",
1542
1669
  version: "1.0.0",
1543
1670
  description: "Auto-generated serverless workers",
@@ -1554,7 +1681,7 @@ async function build2(args) {
1554
1681
  };
1555
1682
  fs.writeFileSync(
1556
1683
  path.join(serverlessDir, "package.json"),
1557
- JSON.stringify(packageJson, null, 2)
1684
+ JSON.stringify(packageJson2, null, 2)
1558
1685
  );
1559
1686
  const envVars = loadEnvVars();
1560
1687
  const workerEntryFiles = workers.map((w) => w.filePath);
@@ -1616,7 +1743,8 @@ async function build2(args) {
1616
1743
  console.log(import_chalk.default.gray(` \u2713 ${worker.id}: found ${module2.exportedWorkerConfig.layers.length} layer(s)`));
1617
1744
  }
1618
1745
  } else {
1619
- console.warn(import_chalk.default.yellow(` \u26A0 ${worker.id}: exportedWorkerConfig not found in handler`));
1746
+ worker.workerConfig = worker.workerConfig ?? { timeout: 300, memorySize: 512 };
1747
+ console.log(import_chalk.default.gray(` \u2139 ${worker.id}: using default config (exportedWorkerConfig not in bundle)`));
1620
1748
  }
1621
1749
  } catch (importError) {
1622
1750
  console.log(import_chalk.default.gray(` \u2139 ${worker.id}: extracting config from source (import failed: ${importError?.message?.slice(0, 50) || "runtime error"}...)`));
@@ -1637,14 +1765,21 @@ async function build2(args) {
1637
1765
  }
1638
1766
  }
1639
1767
  } catch (fallbackError) {
1640
- console.warn(import_chalk.default.yellow(` \u26A0 ${worker.id}: fallback extraction also failed, using defaults`));
1768
+ worker.workerConfig = worker.workerConfig ?? { timeout: 300, memorySize: 512 };
1769
+ console.log(import_chalk.default.gray(` \u2139 ${worker.id}: using default config (fallback extraction failed)`));
1641
1770
  }
1642
1771
  }
1643
1772
  } else {
1644
- console.warn(import_chalk.default.yellow(` \u26A0 ${worker.id}: handler file not found: ${handlerFile}`));
1773
+ worker.workerConfig = worker.workerConfig ?? { timeout: 300, memorySize: 512 };
1774
+ console.warn(import_chalk.default.yellow(` \u26A0 ${worker.id}: handler file not found: ${handlerFile}, using defaults`));
1775
+ }
1776
+ if (!worker.workerConfig) {
1777
+ worker.workerConfig = { timeout: 300, memorySize: 512 };
1778
+ console.log(import_chalk.default.gray(` \u2139 ${worker.id}: using default config`));
1645
1779
  }
1646
1780
  } catch (error) {
1647
- console.warn(import_chalk.default.yellow(` \u26A0 ${worker.id}: failed to extract config: ${error?.message || error}`));
1781
+ worker.workerConfig = worker.workerConfig ?? { timeout: 300, memorySize: 512 };
1782
+ console.warn(import_chalk.default.yellow(` \u26A0 ${worker.id}: failed to extract config: ${error?.message || error}, using defaults`));
1648
1783
  }
1649
1784
  }
1650
1785
  extractSpinner.succeed("Extracted configs");
@@ -1652,9 +1787,7 @@ async function build2(args) {
1652
1787
  generateDocsHandler(serverlessDir, serviceName, stage, region);
1653
1788
  generateTriggerHandler(serverlessDir, serviceName);
1654
1789
  for (const queue of queues) {
1655
- if (queue.schedule) {
1656
- generateQueueStarterHandler(serverlessDir, queue, serviceName);
1657
- }
1790
+ generateQueueHandler(serverlessDir, queue, serviceName);
1658
1791
  }
1659
1792
  let calleeIds = await collectCalleeWorkerIds(workers, process.cwd());
1660
1793
  calleeIds = mergeQueueCallees(calleeIds, queues, workers);
@@ -1745,26 +1878,21 @@ var fs2 = __toESM(require("fs"), 1);
1745
1878
  var path2 = __toESM(require("path"), 1);
1746
1879
  var import_chalk2 = __toESM(require("chalk"), 1);
1747
1880
  var import_ora2 = __toESM(require("ora"), 1);
1748
- var newCommand = new import_commander2.Command().name("new").description("Scaffold a new background worker file").argument("<id>", "Worker ID (used as the worker id and filename)").option("--dir <path>", "Directory for the worker file", "app/ai/workers").option("--schedule <expression>", 'Optional schedule expression (e.g. "cron(0 3 * * ? *)" or "rate(1 hour)")').option("--timeout <seconds>", "Lambda timeout in seconds", "300").option("--memory <mb>", "Lambda memory size in MB", "512").action((id, options) => {
1749
- const spinner = (0, import_ora2.default)("Scaffolding worker...").start();
1750
- try {
1751
- const projectRoot = process.cwd();
1752
- const dir = path2.resolve(projectRoot, options.dir || "app/ai/workers");
1753
- if (!fs2.existsSync(dir)) {
1754
- fs2.mkdirSync(dir, { recursive: true });
1755
- }
1756
- const fileSafeId = id.trim().replace(/[^a-zA-Z0-9_-]+/g, "-");
1757
- const filePath = path2.join(dir, `${fileSafeId}.worker.ts`);
1758
- if (fs2.existsSync(filePath)) {
1759
- spinner.fail(`File already exists: ${path2.relative(projectRoot, filePath)}`);
1760
- process.exitCode = 1;
1761
- return;
1762
- }
1763
- const timeout = Number(options.timeout || "300") || 300;
1764
- const memorySize = Number(options.memory || "512") || 512;
1765
- const scheduleLine = options.schedule ? ` schedule: '${options.schedule}',
1881
+ var import_prompts = __toESM(require("prompts"), 1);
1882
+ var WORKER_DIR_DEFAULT = "app/ai/workers";
1883
+ var QUEUES_DIR_DEFAULT = "app/ai/queues";
1884
+ function scaffoldWorker(projectRoot, id, options) {
1885
+ const dir = path2.resolve(projectRoot, options.dir || WORKER_DIR_DEFAULT);
1886
+ if (!fs2.existsSync(dir)) {
1887
+ fs2.mkdirSync(dir, { recursive: true });
1888
+ }
1889
+ const fileSafeId = id.trim().replace(/[^a-zA-Z0-9_-]+/g, "-");
1890
+ const filePath = path2.join(dir, `${fileSafeId}.worker.ts`);
1891
+ const timeout = Number(options.timeout || "300") || 300;
1892
+ const memorySize = Number(options.memory || "512") || 512;
1893
+ const scheduleLine = options.schedule ? ` schedule: '${options.schedule}',
1766
1894
  ` : "";
1767
- const contents = `import { createWorker, type WorkerConfig } from '@microfox/ai-worker';
1895
+ const contents = `import { createWorker, type WorkerConfig } from '@microfox/ai-worker';
1768
1896
  import { z } from 'zod';
1769
1897
  import type { WorkerHandlerParams } from '@microfox/ai-worker/handler';
1770
1898
 
@@ -1789,8 +1917,8 @@ export default createWorker<typeof InputSchema, Output>({
1789
1917
  inputSchema: InputSchema,
1790
1918
  outputSchema: OutputSchema,
1791
1919
  async handler({ input, ctx }: WorkerHandlerParams<Input, Output>) {
1792
- const { jobId, workerId, jobStore, dispatchWorker } = ctx;
1793
- console.log('[${id}] start', { jobId, workerId });
1920
+ const { jobId, workerId, jobStore, dispatchWorker, logger } = ctx;
1921
+ logger.info('start', { jobId, workerId });
1794
1922
 
1795
1923
  await jobStore?.update({ status: 'running' });
1796
1924
 
@@ -1802,23 +1930,2737 @@ export default createWorker<typeof InputSchema, Output>({
1802
1930
  },
1803
1931
  });
1804
1932
  `;
1805
- fs2.writeFileSync(filePath, contents, "utf-8");
1806
- spinner.succeed(
1807
- `Created worker: ${import_chalk2.default.cyan(path2.relative(projectRoot, filePath))}
1808
- Next: run ${import_chalk2.default.yellow("npx @microfox/ai-worker-cli@latest push")} to build & deploy your workers.`
1933
+ fs2.writeFileSync(filePath, contents, "utf-8");
1934
+ return path2.relative(projectRoot, filePath);
1935
+ }
1936
+ function scaffoldQueue(projectRoot, id, options) {
1937
+ const dir = path2.resolve(projectRoot, options.dir || QUEUES_DIR_DEFAULT);
1938
+ if (!fs2.existsSync(dir)) {
1939
+ fs2.mkdirSync(dir, { recursive: true });
1940
+ }
1941
+ const fileSafeId = id.trim().replace(/[^a-zA-Z0-9_-]+/g, "-");
1942
+ const filePath = path2.join(dir, `${fileSafeId}.queue.ts`);
1943
+ const contents = `import { defineWorkerQueue } from '@microfox/ai-worker/queue';
1944
+
1945
+ /**
1946
+ * Worker queue: ${id}
1947
+ * Steps run in sequence. Each step's output can be mapped to the next step's input.
1948
+ */
1949
+ export default defineWorkerQueue({
1950
+ id: '${id}',
1951
+ steps: [
1952
+ { workerId: 'first-worker' },
1953
+ // Add more steps: { workerId: 'second-worker' }, { workerId: 'third-worker', delaySeconds: 10 }
1954
+ ],
1955
+ // Optional: run on a schedule (CLI will generate a queue-starter Lambda)
1956
+ // schedule: 'cron(0 3 * * ? *)',
1957
+ });
1958
+ `;
1959
+ fs2.writeFileSync(filePath, contents, "utf-8");
1960
+ return path2.relative(projectRoot, filePath);
1961
+ }
1962
+ var newCommand = new import_commander2.Command().name("new").description("Scaffold a new worker or queue (interactive: choose type, then enter id)").argument("[id]", "Worker or queue ID (optional; will prompt if omitted)").option("--type <worker|queue>", "Scaffold type (skips interactive prompt)").option("--dir <path>", "Directory for the output file (workers: app/ai/workers, queues: app/ai/queues)", "").option("--schedule <expression>", 'Optional schedule (workers only; e.g. "cron(0 3 * * ? *)")').option("--timeout <seconds>", "Lambda timeout in seconds (workers only)", "300").option("--memory <mb>", "Lambda memory in MB (workers only)", "512").action(
1963
+ async (idArg, options) => {
1964
+ const projectRoot = process.cwd();
1965
+ let type;
1966
+ let id;
1967
+ if (options.type === "worker" || options.type === "queue") {
1968
+ type = options.type;
1969
+ id = (idArg ?? "").trim();
1970
+ if (!id) {
1971
+ const res = await (0, import_prompts.default)({
1972
+ type: "text",
1973
+ name: "id",
1974
+ message: `Enter ${type} ID:`,
1975
+ validate: (v) => v.trim() ? true : "ID is required"
1976
+ });
1977
+ if (typeof res.id !== "string") {
1978
+ process.exitCode = 1;
1979
+ return;
1980
+ }
1981
+ id = res.id.trim();
1982
+ }
1983
+ } else {
1984
+ const typeRes = await (0, import_prompts.default)({
1985
+ type: "select",
1986
+ name: "type",
1987
+ message: "What do you want to create?",
1988
+ choices: [
1989
+ { title: "Worker", value: "worker", description: "A single background worker (.worker.ts)" },
1990
+ { title: "Queue", value: "queue", description: "A multi-step worker queue (.queue.ts)" }
1991
+ ]
1992
+ });
1993
+ if (typeRes.type === void 0) {
1994
+ process.exitCode = 1;
1995
+ return;
1996
+ }
1997
+ type = typeRes.type;
1998
+ id = (idArg ?? "").trim();
1999
+ if (!id) {
2000
+ const idRes = await (0, import_prompts.default)({
2001
+ type: "text",
2002
+ name: "id",
2003
+ message: `Enter ${type} ID:`,
2004
+ validate: (v) => v.trim() ? true : "ID is required"
2005
+ });
2006
+ if (typeof idRes.id !== "string") {
2007
+ process.exitCode = 1;
2008
+ return;
2009
+ }
2010
+ id = idRes.id.trim();
2011
+ }
2012
+ }
2013
+ const spinner = (0, import_ora2.default)(`Scaffolding ${type}...`).start();
2014
+ try {
2015
+ const dirOpt = options.dir ? { dir: options.dir } : {};
2016
+ if (type === "worker") {
2017
+ const relativePath = scaffoldWorker(projectRoot, id, {
2018
+ ...dirOpt,
2019
+ schedule: options.schedule,
2020
+ timeout: options.timeout,
2021
+ memory: options.memory
2022
+ });
2023
+ spinner.succeed(
2024
+ `Created worker: ${import_chalk2.default.cyan(relativePath)}
2025
+ Next: run ${import_chalk2.default.yellow("npx ai-worker push")} to build & deploy.`
2026
+ );
2027
+ } else {
2028
+ const relativePath = scaffoldQueue(projectRoot, id, dirOpt);
2029
+ spinner.succeed(
2030
+ `Created queue: ${import_chalk2.default.cyan(relativePath)}
2031
+ Edit steps (workerId) to match your workers, then run ${import_chalk2.default.yellow("npx ai-worker push")} to build & deploy.`
2032
+ );
2033
+ }
2034
+ } catch (error) {
2035
+ const err = error;
2036
+ spinner.fail(`Failed to scaffold ${type}`);
2037
+ console.error(import_chalk2.default.red(err?.stack || err?.message || String(error)));
2038
+ process.exitCode = 1;
2039
+ }
2040
+ }
2041
+ );
2042
+
2043
+ // src/commands/boilerplate.ts
2044
+ var import_commander3 = require("commander");
2045
+ var fs3 = __toESM(require("fs"), 1);
2046
+ var path3 = __toESM(require("path"), 1);
2047
+ var import_chalk3 = __toESM(require("chalk"), 1);
2048
+ var import_ora3 = __toESM(require("ora"), 1);
2049
+ var TEMPLATES = {
2050
+ "stores/jobStore.ts": `/**
2051
+ * Job store for tracking worker job status and results.
2052
+ *
2053
+ * Always uses MongoDB. Workers run on AWS Lambda and update jobs via the API;
2054
+ * in-memory storage is not shared across processes, so a persistent store is required.
2055
+ *
2056
+ * Configure via \`microfox.config.ts\` -> \`workflowSettings.jobStore\` or env:
2057
+ * - WORKER_DATABASE_TYPE: 'mongodb' | 'upstash-redis' (default: upstash-redis)
2058
+ * - DATABASE_MONGODB_URI or MONGODB_URI (required for mongodb)
2059
+ * - DATABASE_MONGODB_DB or MONGODB_DB; MONGODB_WORKER_JOBS_COLLECTION (default: worker_jobs)
2060
+ * - WORKER_UPSTASH_REDIS_* / WORKER_JOBS_TTL_SECONDS for Redis
2061
+ *
2062
+ * Job record structure:
2063
+ * {
2064
+ * jobId: string,
2065
+ * workerId: string,
2066
+ * status: 'queued' | 'running' | 'completed' | 'failed',
2067
+ * input: any,
2068
+ * output?: any,
2069
+ * error?: { message: string, stack?: string },
2070
+ * metadata?: Record<string, any>,
2071
+ * createdAt: string,
2072
+ * updatedAt: string,
2073
+ * completedAt?: string
2074
+ * }
2075
+ */
2076
+
2077
+ export interface InternalJobEntry {
2078
+ jobId: string;
2079
+ workerId: string;
2080
+ }
2081
+
2082
+ export interface JobRecord {
2083
+ jobId: string;
2084
+ workerId: string;
2085
+ status: 'queued' | 'running' | 'completed' | 'failed';
2086
+ input: any;
2087
+ output?: any;
2088
+ error?: {
2089
+ message: string;
2090
+ stack?: string;
2091
+ };
2092
+ metadata?: Record<string, any>;
2093
+ internalJobs?: InternalJobEntry[];
2094
+ createdAt: string;
2095
+ updatedAt: string;
2096
+ completedAt?: string;
2097
+ }
2098
+
2099
+ // Storage adapter interface
2100
+ interface JobStoreAdapter {
2101
+ setJob(jobId: string, data: Partial<JobRecord>): Promise<void>;
2102
+ getJob(jobId: string): Promise<JobRecord | null>;
2103
+ updateJob(jobId: string, data: Partial<JobRecord>): Promise<void>;
2104
+ appendInternalJob(parentJobId: string, entry: InternalJobEntry): Promise<void>;
2105
+ listJobsByWorker(workerId: string): Promise<JobRecord[]>;
2106
+ }
2107
+
2108
+ // Job store can use MongoDB or Upstash Redis (workers run on Lambda; no in-memory fallback).
2109
+ function getStorageAdapter(): JobStoreAdapter {
2110
+ try {
2111
+ // Prefer workflowSettings.jobStore.type from microfox.config.ts; env fallback: WORKER_DATABASE_TYPE
2112
+ let jobStoreType: string | undefined;
2113
+ try {
2114
+ const config = require('@/microfox.config').StudioConfig as {
2115
+ workflowSettings?: { jobStore?: { type?: string } };
2116
+ };
2117
+ jobStoreType = config?.workflowSettings?.jobStore?.type;
2118
+ } catch {
2119
+ // Config missing or not resolvable; fall back to env
2120
+ }
2121
+ jobStoreType = jobStoreType || process.env.WORKER_DATABASE_TYPE || 'upstash-redis';
2122
+ const normalized = jobStoreType.toLowerCase();
2123
+
2124
+ if (normalized === 'upstash-redis' || normalized === 'redis') {
2125
+ const { redisJobStore } = require('./redisAdapter');
2126
+ console.log('[JobStore] Ready (Upstash Redis)');
2127
+ return redisJobStore;
2128
+ }
2129
+
2130
+ const { mongoJobStore } = require('./mongoAdapter');
2131
+ console.log('[JobStore] Ready (MongoDB)');
2132
+ return mongoJobStore;
2133
+ } catch (error: any) {
2134
+ const msg = error?.message || String(error);
2135
+ console.error('[JobStore] Job store adapter required (workers run on Lambda).', { error: msg });
2136
+ throw new Error(
2137
+ 'Job store requires a persistent backend. Set workflowSettings.jobStore.type or WORKER_DATABASE_TYPE to "mongodb" or "upstash-redis", and set the corresponding connection settings. ' +
2138
+ \`Details: \${msg}\`
2139
+ );
2140
+ }
2141
+ }
2142
+
2143
+ // Lazy-loaded storage adapter
2144
+ let storageAdapter: JobStoreAdapter | null = null;
2145
+ function getAdapter(): JobStoreAdapter {
2146
+ if (!storageAdapter) {
2147
+ storageAdapter = getStorageAdapter();
2148
+ }
2149
+ return storageAdapter;
2150
+ }
2151
+
2152
+ /**
2153
+ * Store a job record.
2154
+ */
2155
+ export async function setJob(jobId: string, data: Partial<JobRecord>): Promise<void> {
2156
+ try {
2157
+ const adapter = getAdapter();
2158
+ await adapter.setJob(jobId, data);
2159
+ } catch (error: any) {
2160
+ console.error('[JobStore] Error setting job:', {
2161
+ jobId,
2162
+ error: error?.message || String(error),
2163
+ stack: process.env.NODE_ENV === 'development' ? error?.stack : undefined,
2164
+ });
2165
+ throw error;
2166
+ }
2167
+ }
2168
+
2169
+ /**
2170
+ * Get a job record.
2171
+ */
2172
+ export async function getJob(jobId: string): Promise<JobRecord | null> {
2173
+ try {
2174
+ const adapter = getAdapter();
2175
+ return await adapter.getJob(jobId);
2176
+ } catch (error: any) {
2177
+ console.error('[JobStore] Error getting job:', {
2178
+ jobId,
2179
+ error: error?.message || String(error),
2180
+ stack: process.env.NODE_ENV === 'development' ? error?.stack : undefined,
2181
+ });
2182
+ throw error;
2183
+ }
2184
+ }
2185
+
2186
+ /**
2187
+ * Update a job record.
2188
+ */
2189
+ export async function updateJob(jobId: string, data: Partial<JobRecord>): Promise<void> {
2190
+ try {
2191
+ const adapter = getAdapter();
2192
+ await adapter.updateJob(jobId, data);
2193
+ } catch (error: any) {
2194
+ console.error('[JobStore] Error updating job:', {
2195
+ jobId,
2196
+ updates: Object.keys(data),
2197
+ error: error?.message || String(error),
2198
+ stack: process.env.NODE_ENV === 'development' ? error?.stack : undefined,
2199
+ });
2200
+ throw error;
2201
+ }
2202
+ }
2203
+
2204
+ /**
2205
+ * Append an internal (child) job to a parent job's internalJobs list.
2206
+ * Used when a worker dispatches another worker (ctx.dispatchWorker).
2207
+ */
2208
+ export async function appendInternalJob(
2209
+ parentJobId: string,
2210
+ entry: InternalJobEntry
2211
+ ): Promise<void> {
2212
+ try {
2213
+ const adapter = getAdapter();
2214
+ await adapter.appendInternalJob(parentJobId, entry);
2215
+ } catch (error: any) {
2216
+ console.error('[JobStore] Error appending internal job:', {
2217
+ parentJobId,
2218
+ entry,
2219
+ error: error?.message || String(error),
2220
+ stack: process.env.NODE_ENV === 'development' ? error?.stack : undefined,
2221
+ });
2222
+ throw error;
2223
+ }
2224
+ }
2225
+
2226
+ /**
2227
+ * List jobs by worker ID.
2228
+ */
2229
+ export async function listJobsByWorker(workerId: string): Promise<JobRecord[]> {
2230
+ try {
2231
+ const adapter = getAdapter();
2232
+ return await adapter.listJobsByWorker(workerId);
2233
+ } catch (error: any) {
2234
+ console.error('[JobStore] Error listing jobs by worker:', {
2235
+ workerId,
2236
+ error: error?.message || String(error),
2237
+ stack: process.env.NODE_ENV === 'development' ? error?.stack : undefined,
2238
+ });
2239
+ throw error;
2240
+ }
2241
+ }
2242
+ `,
2243
+ "stores/mongoAdapter.ts": `/**
2244
+ * MongoDB adapter for job store.
2245
+ *
2246
+ * Provides persistent storage for worker job state using MongoDB.
2247
+ *
2248
+ * Configuration (from microfox.config.ts or env vars):
2249
+ * - workflowSettings.jobStore.mongodb.uri or DATABASE_MONGODB_URI/MONGODB_URI: MongoDB connection string
2250
+ * - workflowSettings.jobStore.mongodb.db or DATABASE_MONGODB_DB/MONGODB_DB: Database name (default: 'ai_router')
2251
+ *
2252
+ * Collection name: config -> workflowSettings.jobStore.mongodb.workerJobsCollection
2253
+ * (default: 'worker_jobs'). Env: MONGODB_WORKER_JOBS_COLLECTION then DATABASE_MONGODB_WORKER_JOBS_COLLECTION.
2254
+ */
2255
+
2256
+ import { MongoClient, type Db, type Collection } from 'mongodb';
2257
+ import type { JobRecord, InternalJobEntry } from './jobStore';
2258
+
2259
+ declare global {
2260
+ // eslint-disable-next-line no-var
2261
+ var __workflowMongoClientPromise: Promise<MongoClient> | undefined;
2262
+ }
2263
+
2264
+ function getMongoUri(): string {
2265
+ // Try to get from config first, fallback to env vars
2266
+ let uri: string | undefined;
2267
+ try {
2268
+ const config = require('@/microfox.config').StudioConfig as {
2269
+ workflowSettings?: { jobStore?: { mongodb?: { uri?: string } } };
2270
+ };
2271
+ uri = config?.workflowSettings?.jobStore?.mongodb?.uri;
2272
+ } catch (error) {
2273
+ // Config not available, use env vars
2274
+ }
2275
+
2276
+ uri = uri || process.env.DATABASE_MONGODB_URI || process.env.MONGODB_URI;
2277
+
2278
+ if (!uri) {
2279
+ throw new Error(
2280
+ 'Missing MongoDB connection string. Set workflowSettings.jobStore.mongodb.uri in microfox.config.ts or environment variable DATABASE_MONGODB_URI or MONGODB_URI.'
2281
+ );
2282
+ }
2283
+ return uri;
2284
+ }
2285
+
2286
+ function getMongoDbName(): string {
2287
+ // Try to get from config first, fallback to env vars
2288
+ let dbName: string | undefined;
2289
+ try {
2290
+ const config = require('@/microfox.config').StudioConfig as {
2291
+ workflowSettings?: { jobStore?: { mongodb?: { db?: string } } };
2292
+ };
2293
+ dbName = config?.workflowSettings?.jobStore?.mongodb?.db;
2294
+ } catch (error) {
2295
+ // Config not available, use env vars
2296
+ }
2297
+
2298
+ return dbName || process.env.DATABASE_MONGODB_DB || process.env.MONGODB_DB || 'ai_router';
2299
+ }
2300
+
2301
+ function getWorkerJobsCollection(): string {
2302
+ let collection: string | undefined;
2303
+ try {
2304
+ const config = require('@/microfox.config').StudioConfig as {
2305
+ workflowSettings?: { jobStore?: { mongodb?: { workerJobsCollection?: string } } };
2306
+ };
2307
+ collection = config?.workflowSettings?.jobStore?.mongodb?.workerJobsCollection;
2308
+ } catch {
2309
+ // Config not available
2310
+ }
2311
+ return (
2312
+ collection ||
2313
+ process.env.MONGODB_WORKER_JOBS_COLLECTION ||
2314
+ process.env.DATABASE_MONGODB_WORKER_JOBS_COLLECTION ||
2315
+ 'worker_jobs'
2316
+ );
2317
+ }
2318
+
2319
+ async function getMongoClient(): Promise<MongoClient> {
2320
+ const uri = getMongoUri();
2321
+
2322
+ // Reuse a single client across hot reloads / lambda invocations when possible.
2323
+ if (!globalThis.__workflowMongoClientPromise) {
2324
+ const client = new MongoClient(uri, {
2325
+ // Keep defaults conservative; works on both local dev and Lambda.
2326
+ maxPoolSize: 10,
2327
+ minPoolSize: 0,
2328
+ serverSelectionTimeoutMS: 10_000,
2329
+ });
2330
+ globalThis.__workflowMongoClientPromise = client.connect();
2331
+ }
2332
+
2333
+ return globalThis.__workflowMongoClientPromise;
2334
+ }
2335
+
2336
+ async function getMongoDb(): Promise<Db> {
2337
+ const client = await getMongoClient();
2338
+ return client.db(getMongoDbName());
2339
+ }
2340
+
2341
+ /** Export for queue job store (shared MongoDB connection). */
2342
+ export async function getWorkflowDb(): Promise<Db> {
2343
+ return getMongoDb();
2344
+ }
2345
+
2346
+ async function getCollection(): Promise<Collection<JobRecord & { _id: string }>> {
2347
+ const db = await getMongoDb();
2348
+ return db.collection<JobRecord & { _id: string }>(getWorkerJobsCollection());
2349
+ }
2350
+
2351
+ /**
2352
+ * MongoDB storage adapter for job store.
2353
+ */
2354
+ export const mongoJobStore = {
2355
+ async setJob(jobId: string, data: Partial<JobRecord>): Promise<void> {
2356
+ const now = new Date().toISOString();
2357
+ const collection = await getCollection();
2358
+
2359
+ const existing = await collection.findOne({ _id: jobId });
2360
+
2361
+ const record: JobRecord = {
2362
+ jobId,
2363
+ workerId: data.workerId || existing?.workerId || '',
2364
+ status: data.status || existing?.status || 'queued',
2365
+ input: data.input !== undefined ? data.input : existing?.input || {},
2366
+ output: data.output !== undefined ? data.output : existing?.output,
2367
+ error: data.error !== undefined ? data.error : existing?.error,
2368
+ metadata: { ...existing?.metadata, ...data.metadata },
2369
+ createdAt: existing?.createdAt || now,
2370
+ updatedAt: now,
2371
+ completedAt: data.completedAt || existing?.completedAt,
2372
+ };
2373
+
2374
+ // Set completedAt if status changed to completed/failed
2375
+ if (data.status && ['completed', 'failed'].includes(data.status) && !record.completedAt) {
2376
+ record.completedAt = now;
2377
+ }
2378
+
2379
+ await collection.updateOne(
2380
+ { _id: jobId },
2381
+ {
2382
+ $set: {
2383
+ ...record,
2384
+ _id: jobId,
2385
+ },
2386
+ },
2387
+ { upsert: true }
2388
+ );
2389
+ },
2390
+
2391
+ async getJob(jobId: string): Promise<JobRecord | null> {
2392
+ const collection = await getCollection();
2393
+ const doc = await collection.findOne({ _id: jobId });
2394
+
2395
+ if (!doc) {
2396
+ return null;
2397
+ }
2398
+
2399
+ // Convert MongoDB document to JobRecord (remove _id, use jobId)
2400
+ const { _id, ...record } = doc;
2401
+ return record as JobRecord;
2402
+ },
2403
+
2404
+ async updateJob(jobId: string, data: Partial<JobRecord>): Promise<void> {
2405
+ const collection = await getCollection();
2406
+ const existing = await collection.findOne({ _id: jobId });
2407
+
2408
+ if (!existing) {
2409
+ throw new Error(\`Job \${jobId} not found\`);
2410
+ }
2411
+
2412
+ const now = new Date().toISOString();
2413
+ const update: any = {
2414
+ $set: {
2415
+ updatedAt: now,
2416
+ },
2417
+ };
2418
+
2419
+ if (data.status !== undefined) {
2420
+ update.$set.status = data.status;
2421
+ if (['completed', 'failed'].includes(data.status) && !existing.completedAt) {
2422
+ update.$set.completedAt = now;
2423
+ }
2424
+ }
2425
+ if (data.output !== undefined) {
2426
+ update.$set.output = data.output;
2427
+ }
2428
+ if (data.error !== undefined) {
2429
+ update.$set.error = data.error;
2430
+ }
2431
+ if (data.metadata !== undefined) {
2432
+ update.$set.metadata = { ...existing.metadata, ...data.metadata };
2433
+ }
2434
+
2435
+ await collection.updateOne({ _id: jobId }, update);
2436
+ },
2437
+
2438
+ async appendInternalJob(parentJobId: string, entry: InternalJobEntry): Promise<void> {
2439
+ const collection = await getCollection();
2440
+ const now = new Date().toISOString();
2441
+ await collection.updateOne(
2442
+ { _id: parentJobId },
2443
+ {
2444
+ $push: { internalJobs: entry },
2445
+ $set: { updatedAt: now },
2446
+ }
2447
+ );
2448
+ },
2449
+
2450
+ async listJobsByWorker(workerId: string): Promise<JobRecord[]> {
2451
+ const collection = await getCollection();
2452
+ const docs = await collection
2453
+ .find({ workerId })
2454
+ .sort({ createdAt: -1 })
2455
+ .toArray();
2456
+
2457
+ return docs.map((doc) => {
2458
+ const { _id, ...record } = doc;
2459
+ return record as JobRecord;
2460
+ });
2461
+ },
2462
+ };
2463
+ `,
2464
+ "stores/redisAdapter.ts": `/**
2465
+ * Upstash Redis adapter for workflow/worker job store.
2466
+ *
2467
+ * Uses a hash-per-job model with key-level TTL for fast lookups by jobId.
2468
+ *
2469
+ * Configuration (from microfox.config.ts or env vars):
2470
+ * - workflowSettings.jobStore.redis; env: WORKER_UPSTASH_REDIS_REST_URL, WORKER_UPSTASH_REDIS_REST_TOKEN,
2471
+ * WORKER_UPSTASH_REDIS_JOBS_PREFIX (default: worker:jobs:), WORKER_JOBS_TTL_SECONDS
2472
+ */
2473
+
2474
+ import { Redis } from '@upstash/redis';
2475
+ import type { JobRecord, InternalJobEntry } from './jobStore';
2476
+
2477
+ let redisClient: Redis | null = null;
2478
+ let redisUrl: string | undefined;
2479
+ let redisToken: string | undefined;
2480
+ let jobKeyPrefix: string = 'worker:jobs:';
2481
+ const defaultTtlSeconds = 60 * 60 * 24 * 7; // 7 days
2482
+
2483
+ function loadConfig() {
2484
+ try {
2485
+ // Prefer config from microfox.config.ts if present
2486
+ const config = require('@/microfox.config').StudioConfig as {
2487
+ workflowSettings?: {
2488
+ jobStore?: {
2489
+ redis?: {
2490
+ url?: string;
2491
+ token?: string;
2492
+ keyPrefix?: string;
2493
+ ttlSeconds?: number;
2494
+ };
2495
+ };
2496
+ };
2497
+ };
2498
+ const redisCfg = config?.workflowSettings?.jobStore?.redis;
2499
+ redisUrl = redisCfg?.url || redisUrl;
2500
+ redisToken = redisCfg?.token || redisToken;
2501
+ if (redisCfg?.keyPrefix) {
2502
+ jobKeyPrefix = redisCfg.keyPrefix;
2503
+ }
2504
+ } catch {
2505
+ // Config optional; fall back to env vars
2506
+ }
2507
+
2508
+ redisUrl =
2509
+ redisUrl ||
2510
+ process.env.WORKER_UPSTASH_REDIS_REST_URL ||
2511
+ process.env.UPSTASH_REDIS_REST_URL ||
2512
+ process.env.UPSTASH_REDIS_URL;
2513
+ redisToken =
2514
+ redisToken ||
2515
+ process.env.WORKER_UPSTASH_REDIS_REST_TOKEN ||
2516
+ process.env.UPSTASH_REDIS_REST_TOKEN ||
2517
+ process.env.UPSTASH_REDIS_TOKEN;
2518
+ jobKeyPrefix =
2519
+ jobKeyPrefix ||
2520
+ process.env.WORKER_UPSTASH_REDIS_JOBS_PREFIX ||
2521
+ process.env.UPSTASH_REDIS_KEY_PREFIX ||
2522
+ 'worker:jobs:';
2523
+ }
2524
+
2525
+ function getRedis(): Redis {
2526
+ if (!redisClient) {
2527
+ loadConfig();
2528
+ if (!redisUrl || !redisToken) {
2529
+ throw new Error(
2530
+ 'Missing Upstash Redis configuration. Set workflowSettings.jobStore.redis in microfox.config.ts or WORKER_UPSTASH_REDIS_REST_URL / WORKER_UPSTASH_REDIS_REST_TOKEN (or UPSTASH_REDIS_REST_URL / UPSTASH_REDIS_REST_TOKEN).'
2531
+ );
2532
+ }
2533
+ redisClient = new Redis({
2534
+ url: redisUrl,
2535
+ token: redisToken,
2536
+ });
2537
+ }
2538
+ return redisClient;
2539
+ }
2540
+
2541
+ function jobKey(jobId: string): string {
2542
+ return \`\${jobKeyPrefix}\${jobId}\`;
2543
+ }
2544
+
2545
+ /** Separate LIST key for internal job refs; each RPUSH is atomic so no race when appending multiple. */
2546
+ function internalListKey(jobId: string): string {
2547
+ return \`\${jobKeyPrefix}\${jobId}:internal\`;
2548
+ }
2549
+
2550
+ function workerIndexKey(workerId: string): string {
2551
+ // Secondary index: worker -> set of jobIds
2552
+ return \`\${jobKeyPrefix}by-worker:\${workerId}\`;
2553
+ }
2554
+
2555
+ function getJobTtlSeconds(): number {
2556
+ const raw =
2557
+ process.env.WORKER_JOBS_TTL_SECONDS || process.env.WORKFLOW_JOBS_TTL_SECONDS;
2558
+ if (!raw) return defaultTtlSeconds;
2559
+ const n = parseInt(raw, 10);
2560
+ return Number.isFinite(n) && n > 0 ? n : defaultTtlSeconds;
2561
+ }
2562
+
2563
+ async function loadJob(jobId: string): Promise<JobRecord | null> {
2564
+ const redis = getRedis();
2565
+ const key = jobKey(jobId);
2566
+ const data = await redis.hgetall<Record<string, string>>(key);
2567
+ if (!data || Object.keys(data).length === 0) return null;
2568
+
2569
+ const parseJson = <T>(val?: string | null): T | undefined => {
2570
+ if (!val) return undefined;
2571
+ try {
2572
+ return JSON.parse(val) as T;
2573
+ } catch {
2574
+ return undefined;
2575
+ }
2576
+ };
2577
+
2578
+ // Prefer atomic list key for internal jobs; fallback to hash field for old records
2579
+ const listKey = internalListKey(jobId);
2580
+ const listItems = (await redis.lrange(listKey, 0, -1)) ?? [];
2581
+ let internalJobs: InternalJobEntry[] | undefined;
2582
+ if (listItems.length > 0) {
2583
+ internalJobs = listItems
2584
+ .map((s) => {
2585
+ try {
2586
+ return JSON.parse(s) as InternalJobEntry;
2587
+ } catch {
2588
+ return null;
2589
+ }
2590
+ })
2591
+ .filter((e): e is InternalJobEntry => e != null);
2592
+ } else {
2593
+ internalJobs = parseJson<InternalJobEntry[]>(data.internalJobs);
2594
+ }
2595
+
2596
+ const record: JobRecord = {
2597
+ jobId: data.jobId,
2598
+ workerId: data.workerId,
2599
+ status: (data.status as JobRecord['status']) || 'queued',
2600
+ input: parseJson<any>(data.input) ?? {},
2601
+ output: parseJson<any>(data.output),
2602
+ error: parseJson<any>(data.error),
2603
+ metadata: parseJson<Record<string, any>>(data.metadata) ?? {},
2604
+ internalJobs,
2605
+ createdAt: data.createdAt,
2606
+ updatedAt: data.updatedAt,
2607
+ completedAt: data.completedAt,
2608
+ };
2609
+
2610
+ return record;
2611
+ }
2612
+
2613
+ export const redisJobStore = {
2614
+ async setJob(jobId: string, data: Partial<JobRecord>): Promise<void> {
2615
+ const redis = getRedis();
2616
+ const key = jobKey(jobId);
2617
+ const now = new Date().toISOString();
2618
+
2619
+ const existing = await loadJob(jobId);
2620
+
2621
+ const record: JobRecord = {
2622
+ jobId,
2623
+ workerId: data.workerId || existing?.workerId || '',
2624
+ status: data.status || existing?.status || 'queued',
2625
+ input: data.input !== undefined ? data.input : existing?.input || {},
2626
+ output: data.output !== undefined ? data.output : existing?.output,
2627
+ error: data.error !== undefined ? data.error : existing?.error,
2628
+ metadata: { ...(existing?.metadata || {}), ...(data.metadata || {}) },
2629
+ internalJobs: existing?.internalJobs,
2630
+ createdAt: existing?.createdAt || now,
2631
+ updatedAt: now,
2632
+ completedAt: data.completedAt || existing?.completedAt,
2633
+ };
2634
+
2635
+ if (data.status && ['completed', 'failed'].includes(data.status) && !record.completedAt) {
2636
+ record.completedAt = now;
2637
+ }
2638
+
2639
+ const toSet: Record<string, string> = {
2640
+ jobId: record.jobId,
2641
+ workerId: record.workerId,
2642
+ status: record.status,
2643
+ input: JSON.stringify(record.input ?? {}),
2644
+ metadata: JSON.stringify(record.metadata ?? {}),
2645
+ createdAt: record.createdAt,
2646
+ updatedAt: record.updatedAt,
2647
+ };
2648
+ if (record.output !== undefined) {
2649
+ toSet.output = JSON.stringify(record.output);
2650
+ }
2651
+ if (record.error !== undefined) {
2652
+ toSet.error = JSON.stringify(record.error);
2653
+ }
2654
+ if (record.internalJobs) {
2655
+ toSet.internalJobs = JSON.stringify(record.internalJobs);
2656
+ }
2657
+ if (record.completedAt) {
2658
+ toSet.completedAt = record.completedAt;
2659
+ }
2660
+
2661
+ await redis.hset(key, toSet);
2662
+ const ttl = getJobTtlSeconds();
2663
+ if (ttl > 0) {
2664
+ await redis.expire(key, ttl);
2665
+ }
2666
+
2667
+ // Maintain secondary index per worker
2668
+ if (record.workerId) {
2669
+ await redis.sadd(workerIndexKey(record.workerId), jobId);
2670
+ }
2671
+ },
2672
+
2673
+ async getJob(jobId: string): Promise<JobRecord | null> {
2674
+ return loadJob(jobId);
2675
+ },
2676
+
2677
+ async updateJob(jobId: string, data: Partial<JobRecord>): Promise<void> {
2678
+ const redis = getRedis();
2679
+ const key = jobKey(jobId);
2680
+ const existing = await loadJob(jobId);
2681
+ if (!existing) {
2682
+ throw new Error(\`Job \${jobId} not found\`);
2683
+ }
2684
+
2685
+ const now = new Date().toISOString();
2686
+ const update: Partial<JobRecord> = {
2687
+ updatedAt: now,
2688
+ };
2689
+
2690
+ if (data.status !== undefined) {
2691
+ update.status = data.status;
2692
+ if (['completed', 'failed'].includes(data.status) && !existing.completedAt) {
2693
+ update.completedAt = now;
2694
+ }
2695
+ }
2696
+ if (data.output !== undefined) {
2697
+ update.output = data.output;
2698
+ }
2699
+ if (data.error !== undefined) {
2700
+ update.error = data.error;
2701
+ }
2702
+ if (data.metadata !== undefined) {
2703
+ update.metadata = { ...(existing.metadata || {}), ...data.metadata };
2704
+ }
2705
+
2706
+ const toSet: Record<string, string> = {
2707
+ updatedAt: now,
2708
+ };
2709
+ if (update.status !== undefined) {
2710
+ toSet.status = update.status;
2711
+ }
2712
+ if (update.output !== undefined) {
2713
+ toSet.output = JSON.stringify(update.output);
2714
+ }
2715
+ if (update.error !== undefined) {
2716
+ toSet.error = JSON.stringify(update.error);
2717
+ }
2718
+ if (update.metadata !== undefined) {
2719
+ toSet.metadata = JSON.stringify(update.metadata);
2720
+ }
2721
+ if (update.completedAt) {
2722
+ toSet.completedAt = update.completedAt;
2723
+ }
2724
+
2725
+ await redis.hset(key, toSet);
2726
+ const ttl = getJobTtlSeconds();
2727
+ if (ttl > 0) {
2728
+ await redis.expire(key, ttl);
2729
+ }
2730
+ },
2731
+
2732
+ async appendInternalJob(parentJobId: string, entry: InternalJobEntry): Promise<void> {
2733
+ const redis = getRedis();
2734
+ const listKey = internalListKey(parentJobId);
2735
+ await redis.rpush(listKey, JSON.stringify(entry));
2736
+ const mainKey = jobKey(parentJobId);
2737
+ await redis.hset(mainKey, { updatedAt: new Date().toISOString() });
2738
+ const ttl = getJobTtlSeconds();
2739
+ if (ttl > 0) {
2740
+ await redis.expire(listKey, ttl);
2741
+ await redis.expire(mainKey, ttl);
2742
+ }
2743
+ },
2744
+
2745
+ async listJobsByWorker(workerId: string): Promise<JobRecord[]> {
2746
+ const redis = getRedis();
2747
+ const indexKey = workerIndexKey(workerId);
2748
+ const jobIds = (await redis.smembers(indexKey)) ?? [];
2749
+ const jobs: JobRecord[] = [];
2750
+ for (const jobId of jobIds) {
2751
+ const job = await loadJob(jobId);
2752
+ if (job) {
2753
+ jobs.push(job);
2754
+ }
2755
+ }
2756
+ // Most recent first
2757
+ jobs.sort((a, b) => b.createdAt.localeCompare(a.createdAt));
2758
+ return jobs;
2759
+ },
2760
+ };
2761
+ `,
2762
+ "stores/queueJobStore.ts": `/**
2763
+ * Queue job store for tracking multi-step queue execution.
2764
+ *
2765
+ * Stores a single record per queue run with steps array containing:
2766
+ * - workerId, workerJobId (worker_job id), status, input, output, startedAt, completedAt, error
2767
+ *
2768
+ * Uses MongoDB or Upstash Redis (same backend as worker_jobs), based on WORKER_DATABASE_TYPE.
2769
+ * Collection/key prefix: queue_jobs / worker:queue-jobs:
2770
+ */
2771
+
2772
+ import type { Collection } from 'mongodb';
2773
+ import { Redis } from '@upstash/redis';
2774
+ import { getWorkflowDb } from './mongoAdapter';
2775
+
2776
+ export interface QueueJobStep {
2777
+ workerId: string;
2778
+ workerJobId: string;
2779
+ status: 'queued' | 'running' | 'completed' | 'failed';
2780
+ input?: unknown;
2781
+ output?: unknown;
2782
+ error?: { message: string };
2783
+ startedAt?: string;
2784
+ completedAt?: string;
2785
+ }
2786
+
2787
+ export interface QueueJobRecord {
2788
+ id: string;
2789
+ queueId: string;
2790
+ status: 'running' | 'completed' | 'failed' | 'partial';
2791
+ steps: QueueJobStep[];
2792
+ metadata?: Record<string, unknown>;
2793
+ createdAt: string;
2794
+ updatedAt: string;
2795
+ completedAt?: string;
2796
+ }
2797
+
2798
+ // === Backend selection ===
2799
+
2800
+ function getStoreType(): 'mongodb' | 'upstash-redis' {
2801
+ const t = (process.env.WORKER_DATABASE_TYPE || 'upstash-redis').toLowerCase();
2802
+ return t === 'mongodb' ? 'mongodb' : 'upstash-redis';
2803
+ }
2804
+
2805
+ function preferMongo(): boolean {
2806
+ return getStoreType() === 'mongodb';
2807
+ }
2808
+
2809
+ function preferRedis(): boolean {
2810
+ return getStoreType() !== 'mongodb';
2811
+ }
2812
+
2813
+ // === MongoDB backend ===
2814
+
2815
+ function getQueueJobsCollectionName(): string {
2816
+ return process.env.MONGODB_QUEUE_JOBS_COLLECTION || 'queue_jobs';
2817
+ }
2818
+
2819
+ async function getCollection(): Promise<Collection<QueueJobRecord & { _id: string }>> {
2820
+ const db = await getWorkflowDb();
2821
+ return db.collection<QueueJobRecord & { _id: string }>(getQueueJobsCollectionName());
2822
+ }
2823
+
2824
+ // === Redis backend ===
2825
+
2826
+ const redisUrl =
2827
+ process.env.WORKER_UPSTASH_REDIS_REST_URL ||
2828
+ process.env.UPSTASH_REDIS_REST_URL ||
2829
+ process.env.UPSTASH_REDIS_URL;
2830
+ const redisToken =
2831
+ process.env.WORKER_UPSTASH_REDIS_REST_TOKEN ||
2832
+ process.env.UPSTASH_REDIS_REST_TOKEN ||
2833
+ process.env.UPSTASH_REDIS_TOKEN;
2834
+ const queueKeyPrefix =
2835
+ process.env.WORKER_UPSTASH_REDIS_QUEUE_PREFIX ||
2836
+ process.env.UPSTASH_REDIS_QUEUE_PREFIX ||
2837
+ 'worker:queue-jobs:';
2838
+
2839
+ let redisClient: Redis | null = null;
2840
+
2841
+ function getRedis(): Redis {
2842
+ if (!redisUrl || !redisToken) {
2843
+ throw new Error(
2844
+ 'Upstash Redis configuration missing for queue job store. Set WORKER_UPSTASH_REDIS_REST_URL and WORKER_UPSTASH_REDIS_REST_TOKEN (or UPSTASH_REDIS_REST_URL/UPSTASH_REDIS_REST_TOKEN).'
2845
+ );
2846
+ }
2847
+ if (!redisClient) {
2848
+ redisClient = new Redis({
2849
+ url: redisUrl,
2850
+ token: redisToken,
2851
+ });
2852
+ }
2853
+ return redisClient;
2854
+ }
2855
+
2856
+ function queueKey(id: string): string {
2857
+ return \`\${queueKeyPrefix}\${id}\`;
2858
+ }
2859
+
2860
+ /** Hash values from Upstash hgetall may be auto-parsed (array/object) or raw strings. */
2861
+ function stepsFromHash(val: unknown): QueueJobStep[] {
2862
+ if (Array.isArray(val)) return val as QueueJobStep[];
2863
+ if (typeof val === 'string') {
2864
+ try {
2865
+ const parsed = JSON.parse(val) as QueueJobStep[];
2866
+ return Array.isArray(parsed) ? parsed : [];
2867
+ } catch {
2868
+ return [];
2869
+ }
2870
+ }
2871
+ return [];
2872
+ }
2873
+
2874
+ function metadataFromHash(val: unknown): Record<string, unknown> {
2875
+ if (val && typeof val === 'object' && !Array.isArray(val)) return val as Record<string, unknown>;
2876
+ if (typeof val === 'string') {
2877
+ try {
2878
+ const parsed = JSON.parse(val) as Record<string, unknown>;
2879
+ return parsed && typeof parsed === 'object' ? parsed : {};
2880
+ } catch {
2881
+ return {};
2882
+ }
2883
+ }
2884
+ return {};
2885
+ }
2886
+
2887
+ async function loadQueueJobRedis(queueJobId: string): Promise<QueueJobRecord | null> {
2888
+ const redis = getRedis();
2889
+ const key = queueKey(queueJobId);
2890
+ const data = await redis.hgetall(key);
2891
+ if (!data || typeof data !== 'object' || Object.keys(data).length === 0) return null;
2892
+ const record: QueueJobRecord = {
2893
+ id: (data as Record<string, unknown>).id === undefined ? queueJobId : String((data as Record<string, unknown>).id),
2894
+ queueId: String((data as Record<string, unknown>).queueId ?? ''),
2895
+ status: (String((data as Record<string, unknown>).status ?? 'running') as QueueJobRecord['status']),
2896
+ steps: stepsFromHash((data as Record<string, unknown>).steps),
2897
+ metadata: metadataFromHash((data as Record<string, unknown>).metadata),
2898
+ createdAt: String((data as Record<string, unknown>).createdAt ?? new Date().toISOString()),
2899
+ updatedAt: String((data as Record<string, unknown>).updatedAt ?? new Date().toISOString()),
2900
+ completedAt: (data as Record<string, unknown>).completedAt != null ? String((data as Record<string, unknown>).completedAt) : undefined,
2901
+ };
2902
+ return record;
2903
+ }
2904
+
2905
+ export async function createQueueJob(
2906
+ id: string,
2907
+ queueId: string,
2908
+ firstStep: { workerId: string; workerJobId: string },
2909
+ metadata?: Record<string, unknown>
2910
+ ): Promise<void> {
2911
+ const now = new Date().toISOString();
2912
+ const record: QueueJobRecord = {
2913
+ id,
2914
+ queueId,
2915
+ status: 'running',
2916
+ steps: [
2917
+ {
2918
+ workerId: firstStep.workerId,
2919
+ workerJobId: firstStep.workerJobId,
2920
+ status: 'queued',
2921
+ },
2922
+ ],
2923
+ metadata: metadata ?? {},
2924
+ createdAt: now,
2925
+ updatedAt: now,
2926
+ };
2927
+
2928
+ if (preferRedis()) {
2929
+ const redis = getRedis();
2930
+ const key = queueKey(id);
2931
+ const toSet: Record<string, string> = {
2932
+ id: record.id,
2933
+ queueId: record.queueId,
2934
+ status: record.status,
2935
+ steps: JSON.stringify(record.steps),
2936
+ metadata: JSON.stringify(record.metadata || {}),
2937
+ createdAt: record.createdAt,
2938
+ updatedAt: record.updatedAt,
2939
+ };
2940
+ await redis.hset(key, toSet);
2941
+ const ttlSeconds =
2942
+ typeof process.env.WORKER_QUEUE_JOBS_TTL_SECONDS === 'string'
2943
+ ? parseInt(process.env.WORKER_QUEUE_JOBS_TTL_SECONDS, 10) || 60 * 60 * 24 * 7
2944
+ : typeof process.env.WORKER_JOBS_TTL_SECONDS === 'string'
2945
+ ? parseInt(process.env.WORKER_JOBS_TTL_SECONDS, 10) || 60 * 60 * 24 * 7
2946
+ : 60 * 60 * 24 * 7; // 7 days default
2947
+ if (ttlSeconds > 0) {
2948
+ await redis.expire(key, ttlSeconds);
2949
+ }
2950
+ return;
2951
+ }
2952
+
2953
+ const collection = await getCollection();
2954
+ await collection.updateOne(
2955
+ { _id: id },
2956
+ { $set: { ...record, _id: id } },
2957
+ { upsert: true }
2958
+ );
2959
+ }
2960
+
2961
+ export async function updateQueueStep(
2962
+ queueJobId: string,
2963
+ stepIndex: number,
2964
+ update: {
2965
+ status?: 'queued' | 'running' | 'completed' | 'failed';
2966
+ input?: unknown;
2967
+ output?: unknown;
2968
+ error?: { message: string };
2969
+ startedAt?: string;
2970
+ completedAt?: string;
2971
+ }
2972
+ ): Promise<void> {
2973
+ const collection = await getCollection();
2974
+ const now = new Date().toISOString();
2975
+ const setKey = \`steps.\${stepIndex}\`;
2976
+ const existing = await collection.findOne({ _id: queueJobId });
2977
+ if (!existing) {
2978
+ throw new Error(\`Queue job \${queueJobId} not found\`);
2979
+ }
2980
+ const step = existing.steps[stepIndex];
2981
+ if (!step) {
2982
+ throw new Error(\`Queue job \${queueJobId} has no step at index \${stepIndex}\`);
2983
+ }
2984
+ const mergedStep: QueueJobStep = {
2985
+ ...step,
2986
+ ...(update.status !== undefined && { status: update.status }),
2987
+ ...(update.input !== undefined && { input: update.input }),
2988
+ ...(update.output !== undefined && { output: update.output }),
2989
+ ...(update.error !== undefined && { error: update.error }),
2990
+ startedAt: update.startedAt ?? (update.status === 'running' ? now : step.startedAt),
2991
+ completedAt:
2992
+ update.completedAt ??
2993
+ (['completed', 'failed'].includes(update.status ?? '') ? now : step.completedAt),
2994
+ };
2995
+ const updateDoc: any = {
2996
+ $set: {
2997
+ [setKey]: mergedStep,
2998
+ updatedAt: now,
2999
+ },
3000
+ };
3001
+ if (update.status === 'failed') {
3002
+ updateDoc.$set.status = 'failed';
3003
+ if (!existing.completedAt) updateDoc.$set.completedAt = now;
3004
+ } else if (update.status === 'completed' && stepIndex === existing.steps.length - 1) {
3005
+ updateDoc.$set.status = 'completed';
3006
+ if (!existing.completedAt) updateDoc.$set.completedAt = now;
3007
+ }
3008
+ await collection.updateOne({ _id: queueJobId }, updateDoc);
3009
+ }
3010
+
3011
+ export async function appendQueueStep(
3012
+ queueJobId: string,
3013
+ step: { workerId: string; workerJobId: string }
3014
+ ): Promise<void> {
3015
+ const collection = await getCollection();
3016
+ const now = new Date().toISOString();
3017
+ await collection.updateOne(
3018
+ { _id: queueJobId },
3019
+ {
3020
+ $push: {
3021
+ steps: {
3022
+ workerId: step.workerId,
3023
+ workerJobId: step.workerJobId,
3024
+ status: 'queued',
3025
+ },
3026
+ },
3027
+ $set: { updatedAt: now },
3028
+ }
3029
+ );
3030
+ }
3031
+
3032
+ /**
3033
+ * Update queue job overall status (e.g. from webhook when queue run completes).
3034
+ */
3035
+ export async function updateQueueJob(
3036
+ queueJobId: string,
3037
+ update: { status?: QueueJobRecord['status']; completedAt?: string }
3038
+ ): Promise<void> {
3039
+ const now = new Date().toISOString();
3040
+ if (preferRedis()) {
3041
+ const redis = getRedis();
3042
+ const key = queueKey(queueJobId);
3043
+ const existing = await loadQueueJobRedis(queueJobId);
3044
+ if (!existing) throw new Error(\`Queue job \${queueJobId} not found\`);
3045
+ const toSet: Record<string, string> = {
3046
+ status: update.status ?? existing.status,
3047
+ updatedAt: now,
3048
+ };
3049
+ if (update.completedAt !== undefined) toSet.completedAt = update.completedAt;
3050
+ await redis.hset(key, toSet);
3051
+ return;
3052
+ }
3053
+ const collection = await getCollection();
3054
+ const setDoc: Record<string, string> = { updatedAt: now };
3055
+ if (update.status !== undefined) setDoc.status = update.status;
3056
+ if (update.completedAt !== undefined) setDoc.completedAt = update.completedAt;
3057
+ await collection.updateOne({ _id: queueJobId }, { $set: setDoc });
3058
+ }
3059
+
3060
+ export async function getQueueJob(queueJobId: string): Promise<QueueJobRecord | null> {
3061
+ if (preferRedis()) {
3062
+ return loadQueueJobRedis(queueJobId);
3063
+ }
3064
+ const collection = await getCollection();
3065
+ const doc = await collection.findOne({ _id: queueJobId });
3066
+ if (!doc) return null;
3067
+ const { _id, ...record } = doc;
3068
+ return { ...record, id: _id };
3069
+ }
3070
+
3071
+ export async function listQueueJobs(
3072
+ queueId?: string,
3073
+ limit = 50
3074
+ ): Promise<QueueJobRecord[]> {
3075
+ if (preferRedis()) {
3076
+ // Redis: scan for keys matching prefix, then load each
3077
+ // Note: This is less efficient than MongoDB queries, but acceptable for small datasets
3078
+ const redis = getRedis();
3079
+ const pattern = queueKey('*');
3080
+ const keys: string[] = [];
3081
+ let cursor: number = 0;
3082
+ do {
3083
+ const result = await redis.scan(cursor, { match: pattern, count: 100 });
3084
+ cursor = typeof result[0] === 'number' ? result[0] : parseInt(String(result[0]), 10);
3085
+ keys.push(...(result[1] || []));
3086
+ } while (cursor !== 0);
3087
+
3088
+ const jobs = await Promise.all(
3089
+ keys.map((key) => {
3090
+ const id = key.replace(queueKeyPrefix, '');
3091
+ return loadQueueJobRedis(id);
3092
+ })
3093
+ );
3094
+ const valid = jobs.filter((j): j is QueueJobRecord => j !== null);
3095
+ const filtered = queueId ? valid.filter((j) => j.queueId === queueId) : valid;
3096
+ return filtered
3097
+ .sort((a, b) => new Date(b.createdAt).getTime() - new Date(a.createdAt).getTime())
3098
+ .slice(0, limit);
3099
+ }
3100
+ const collection = await getCollection();
3101
+ const filter = queueId ? { queueId } : {};
3102
+ const docs = await collection
3103
+ .find(filter)
3104
+ .sort({ createdAt: -1 })
3105
+ .limit(limit)
3106
+ .toArray();
3107
+ return docs.map((doc) => {
3108
+ const { _id, ...record } = doc;
3109
+ return { ...record, id: _id };
3110
+ });
3111
+ }
3112
+ `,
3113
+ "registry/workers.ts": `/**
3114
+ * Worker registry system.
3115
+ *
3116
+ * Uses only the GET /workers/config API as the source of truth.
3117
+ * No directory scanning, no dynamic imports, no .worker.ts loading.
3118
+ *
3119
+ * - getWorker(workerId): returns a synthetic WorkerAgent that dispatches via POST /workers/trigger
3120
+ * - listWorkers(): returns worker IDs from the config API response
3121
+ * - getQueueRegistry(): returns QueueRegistry from config (for dispatchQueue)
3122
+ */
3123
+
3124
+ import type { WorkerAgent, WorkerQueueRegistry } from '@microfox/ai-worker';
3125
+
3126
+ /** Queue step config (matches WorkerQueueStep from @microfox/ai-worker). */
3127
+ export interface QueueStepConfig {
3128
+ workerId: string;
3129
+ delaySeconds?: number;
3130
+ mapInputFromPrev?: string;
3131
+ }
3132
+
3133
+ /** Queue config from workers/config API (matches WorkerQueueConfig structure). */
3134
+ export interface QueueConfig {
3135
+ id: string;
3136
+ steps: QueueStepConfig[];
3137
+ schedule?: string | { rate: string; enabled?: boolean; input?: Record<string, any> };
3138
+ }
3139
+
3140
+ export interface WorkersConfig {
3141
+ version?: string;
3142
+ stage?: string;
3143
+ region?: string;
3144
+ workers: Record<string, { queueUrl: string; region: string }>;
3145
+ queues?: QueueConfig[];
3146
+ }
3147
+
3148
+ let configCache: WorkersConfig | null = null;
3149
+
3150
+ function getConfigBaseUrl(): string {
3151
+ const raw =
3152
+ process.env.WORKERS_CONFIG_API_URL ||
3153
+ process.env.WORKER_BASE_URL;
3154
+ if (!raw?.trim()) {
3155
+ throw new Error(
3156
+ 'WORKERS_CONFIG_API_URL or WORKER_BASE_URL is required for the worker registry. ' +
3157
+ 'Set it to the base URL of your workers service (e.g. https://xxx.execute-api.us-east-1.amazonaws.com/prod).'
3158
+ );
3159
+ }
3160
+ const base = raw.trim().replace(/\\/+$/, '');
3161
+ if (base.endsWith('/workers/config')) {
3162
+ return base.replace(/\\/workers\\/config\\/?$/, '');
3163
+ }
3164
+ return base;
3165
+ }
3166
+
3167
+ function getConfigUrl(): string {
3168
+ const base = getConfigBaseUrl();
3169
+ return \`\${base}/workers/config\`;
3170
+ }
3171
+
3172
+ function getTriggerUrl(): string {
3173
+ const base = getConfigBaseUrl();
3174
+ return \`\${base}/workers/trigger\`;
3175
+ }
3176
+
3177
+ /**
3178
+ * Fetch and cache workers config from GET /workers/config.
3179
+ */
3180
+ export async function fetchWorkersConfig(): Promise<WorkersConfig> {
3181
+ if (configCache) {
3182
+ return configCache;
3183
+ }
3184
+ const configUrl = getConfigUrl();
3185
+ const headers: Record<string, string> = { 'Content-Type': 'application/json' };
3186
+ const apiKey = process.env.WORKERS_CONFIG_API_KEY;
3187
+ if (apiKey) {
3188
+ headers['x-workers-config-key'] = apiKey;
3189
+ }
3190
+ const res = await fetch(configUrl, { method: 'GET', headers });
3191
+ if (!res.ok) {
3192
+ throw new Error(
3193
+ \`[WorkerRegistry] GET \${configUrl} failed: \${res.status} \${res.statusText}\`
3194
+ );
3195
+ }
3196
+ const data = (await res.json()) as WorkersConfig;
3197
+ if (!data?.workers || typeof data.workers !== 'object') {
3198
+ throw new Error(
3199
+ '[WorkerRegistry] Invalid config: expected { workers: { [id]: { queueUrl, region } } }'
3200
+ );
3201
+ }
3202
+ configCache = data;
3203
+ const workerIds = Object.keys(data.workers);
3204
+ const queueIds = data.queues?.map((q) => q.id) ?? [];
3205
+ console.log('[WorkerRegistry] Config loaded', { workers: workerIds.length, queues: queueIds });
3206
+ return data;
3207
+ }
3208
+
3209
+ /**
3210
+ * Build a synthetic WorkerAgent that dispatches via POST /workers/trigger.
3211
+ * Matches the trigger API contract used by @microfox/ai-worker.
3212
+ */
3213
+ function createSyntheticAgent(workerId: string): WorkerAgent<any, any> {
3214
+ return {
3215
+ id: workerId,
3216
+ dispatch: async (input: any, options: any) => {
3217
+ const jobId =
3218
+ options?.jobId ||
3219
+ \`job-\${Date.now()}-\${Math.random().toString(36).slice(2, 11)}\`;
3220
+ const webhookUrl = options?.webhookUrl;
3221
+ const metadata = options?.metadata ?? {};
3222
+ const triggerUrl = getTriggerUrl();
3223
+ const messageBody = {
3224
+ workerId,
3225
+ jobId,
3226
+ input: input ?? {},
3227
+ context: {},
3228
+ webhookUrl: webhookUrl ?? undefined,
3229
+ metadata,
3230
+ timestamp: new Date().toISOString(),
3231
+ };
3232
+ const headers: Record<string, string> = {
3233
+ 'Content-Type': 'application/json',
3234
+ };
3235
+ const key = process.env.WORKERS_TRIGGER_API_KEY;
3236
+ if (key) {
3237
+ headers['x-workers-trigger-key'] = key;
3238
+ }
3239
+ const response = await fetch(triggerUrl, {
3240
+ method: 'POST',
3241
+ headers,
3242
+ body: JSON.stringify({ workerId, body: messageBody }),
3243
+ });
3244
+ if (!response.ok) {
3245
+ const text = await response.text().catch(() => '');
3246
+ throw new Error(
3247
+ \`Failed to trigger worker "\${workerId}": \${response.status} \${response.statusText}\${text ? \` - \${text}\` : ''}\`
3248
+ );
3249
+ }
3250
+ const data = (await response.json().catch(() => ({}))) as any;
3251
+ const messageId = data?.messageId ? String(data.messageId) : \`trigger-\${jobId}\`;
3252
+ return { messageId, status: 'queued' as const, jobId };
3253
+ },
3254
+ } as WorkerAgent<any, any>;
3255
+ }
3256
+
3257
+ /**
3258
+ * List worker IDs from the config API.
3259
+ */
3260
+ export async function listWorkers(): Promise<string[]> {
3261
+ const config = await fetchWorkersConfig();
3262
+ return Object.keys(config.workers);
3263
+ }
3264
+
3265
+ /**
3266
+ * Get a worker by ID. Returns a synthetic WorkerAgent that dispatches via
3267
+ * POST /workers/trigger. Returns null if the worker is not in the config.
3268
+ */
3269
+ export async function getWorker(
3270
+ workerId: string
3271
+ ): Promise<WorkerAgent<any, any> | null> {
3272
+ const config = await fetchWorkersConfig();
3273
+ if (!(workerId in config.workers)) {
3274
+ return null;
3275
+ }
3276
+ return createSyntheticAgent(workerId);
3277
+ }
3278
+
3279
+ /** Webpack require.context \u2013 auto-discovers app/ai/queues/*.queue.ts (Next.js). */
3280
+ function getQueueModuleContext(): { keys(): string[]; (key: string): unknown } | null {
3281
+ try {
3282
+ if (typeof require === 'undefined') return null;
3283
+ const ctx = (require as unknown as { context: (dir: string, sub: boolean, re: RegExp) => { keys(): string[]; (k: string): unknown } }).context(
3284
+ '@/app/ai/queues',
3285
+ false,
3286
+ /\\.queue\\.ts$/
3287
+ );
3288
+ return ctx;
3289
+ } catch {
3290
+ return null;
3291
+ }
3292
+ }
3293
+
3294
+ /**
3295
+ * Auto-discover queue modules from app/ai/queues/*.queue.ts (no per-queue registration).
3296
+ * Uses require.context when available (Next.js/webpack).
3297
+ */
3298
+ function buildQueueModules(): Record<string, Record<string, (initial: unknown, prevOutputs: unknown[]) => unknown>> {
3299
+ const ctx = getQueueModuleContext();
3300
+ if (!ctx) return {};
3301
+ const out: Record<string, Record<string, (initial: unknown, prevOutputs: unknown[]) => unknown>> = {};
3302
+ for (const key of ctx.keys()) {
3303
+ const mod = ctx(key) as { default?: { id?: string }; [k: string]: unknown };
3304
+ const id = mod?.default?.id;
3305
+ if (id && typeof id === 'string') {
3306
+ out[id] = mod as Record<string, (initial: unknown, prevOutputs: unknown[]) => unknown>;
3307
+ }
3308
+ }
3309
+ return out;
3310
+ }
3311
+
3312
+ const queueModules = buildQueueModules();
3313
+
3314
+ /**
3315
+ * Returns a registry compatible with dispatchQueue. Queue definitions come from
3316
+ * GET /workers/config; mapInputFromPrev is resolved from app/ai/queues/*.queue.ts
3317
+ * automatically (no manual registration per queue).
3318
+ */
3319
+ export async function getQueueRegistry(): Promise<WorkerQueueRegistry> {
3320
+ const config = await fetchWorkersConfig();
3321
+ const queues: QueueConfig[] = config.queues ?? [];
3322
+
3323
+ const registry = {
3324
+ getQueueById(queueId: string) {
3325
+ return queues.find((q) => q.id === queueId);
3326
+ },
3327
+ invokeMapInput(
3328
+ queueId: string,
3329
+ stepIndex: number,
3330
+ initialInput: unknown,
3331
+ previousOutputs: Array<{ stepIndex: number; workerId: string; output: unknown }>
3332
+ ): unknown {
3333
+ const queue = queues.find((q) => q.id === queueId);
3334
+ const step = queue?.steps?.[stepIndex];
3335
+ const fnName = step?.mapInputFromPrev;
3336
+ if (!fnName) {
3337
+ return previousOutputs.length > 0 ? previousOutputs[previousOutputs.length - 1].output : initialInput;
3338
+ }
3339
+ const mod = queueModules[queueId];
3340
+ if (!mod || typeof mod[fnName] !== 'function') {
3341
+ return previousOutputs.length > 0 ? previousOutputs[previousOutputs.length - 1].output : initialInput;
3342
+ }
3343
+ return mod[fnName](initialInput, previousOutputs);
3344
+ },
3345
+ };
3346
+ return registry as WorkerQueueRegistry;
3347
+ }
3348
+
3349
+ /**
3350
+ * Clear the in-memory config cache (e.g. for tests or refresh).
3351
+ */
3352
+ export function clearConfigCache(): void {
3353
+ configCache = null;
3354
+ }
3355
+ `,
3356
+ "workers/[...slug]/route.ts": `import { NextRequest, NextResponse } from 'next/server';
3357
+
3358
+ /**
3359
+ * Worker execution endpoint.
3360
+ *
3361
+ * POST /api/workflows/workers/:workerId - Execute a worker
3362
+ * GET /api/workflows/workers/:workerId/:jobId - Get worker job status
3363
+ * POST /api/workflows/workers/:workerId/webhook - Webhook callback for completion notifications
3364
+ *
3365
+ * This endpoint allows workers to be called like workflows, enabling
3366
+ * them to be used in orchestration.
3367
+ *
3368
+ * Workers are auto-discovered from app/ai directory (any .worker.ts files) or
3369
+ * can be imported and registered manually via registerWorker().
3370
+ */
3371
+
3372
+ // Worker auto-discovery is implemented in ../registry/workers
3373
+ // - Create worker registry module: app/api/workflows/registry/workers.ts
3374
+ // - Scan app/ai/**/*.worker.ts files at startup or lazily on first access
3375
+ // - Use glob pattern: 'app/ai/**/*.worker.ts'
3376
+ // - Extract worker ID from file: const worker = await import(filePath); worker.id
3377
+ // - Cache workers in memory or persistent store
3378
+ // - Support hot-reload in development
3379
+ // - Export: scanWorkers(), getWorker(workerId), listWorkers()
3380
+
3381
+ /**
3382
+ * Get a worker by ID.
3383
+ */
3384
+ async function getWorkerById(workerId: string): Promise<any | null> {
3385
+ const workersModule = await import('../../registry/workers') as { getWorker: (workerId: string) => Promise<any | null> };
3386
+ return await workersModule.getWorker(workerId);
3387
+ }
3388
+
3389
+ export async function POST(
3390
+ req: NextRequest,
3391
+ { params }: { params: Promise<{ slug: string[] }> }
3392
+ ) {
3393
+ let slug: string[] = [];
3394
+ try {
3395
+ const { slug: slugParam } = await params;
3396
+ slug = slugParam || [];
3397
+ const [workerId, action] = slug;
3398
+
3399
+ // Handle webhook endpoint
3400
+ if (action === 'webhook') {
3401
+ return handleWebhook(req, workerId);
3402
+ }
3403
+
3404
+ // Handle job store update endpoint (POST /api/workflows/workers/:workerId/update)
3405
+ if (action === 'update') {
3406
+ return handleJobUpdate(req, workerId);
3407
+ }
3408
+
3409
+ // Create job record (POST /api/workflows/workers/:workerId/job) \u2013 used before polling when trigger-only
3410
+ if (action === 'job') {
3411
+ return handleCreateJob(req, workerId);
3412
+ }
3413
+
3414
+ if (!workerId) {
3415
+ return NextResponse.json(
3416
+ { error: 'Worker ID is required' },
3417
+ { status: 400 }
3418
+ );
3419
+ }
3420
+
3421
+ let body;
3422
+ try {
3423
+ body = await req.json();
3424
+ } catch (parseError: any) {
3425
+ console.error('[Worker] Failed to parse request body:', {
3426
+ workerId,
3427
+ error: parseError?.message || String(parseError),
3428
+ });
3429
+ return NextResponse.json(
3430
+ { error: 'Invalid JSON in request body' },
3431
+ { status: 400 }
3432
+ );
3433
+ }
3434
+
3435
+ const { input, await: shouldAwait = false, jobId: providedJobId } = body;
3436
+
3437
+ console.log('[Worker] Dispatching worker:', {
3438
+ workerId,
3439
+ shouldAwait,
3440
+ hasInput: !!input,
3441
+ });
3442
+
3443
+ // Get the worker using registry system
3444
+ let worker;
3445
+ try {
3446
+ worker = await getWorkerById(workerId);
3447
+ } catch (getWorkerError: any) {
3448
+ console.error('[Worker] Error getting worker:', {
3449
+ workerId,
3450
+ error: getWorkerError?.message || String(getWorkerError),
3451
+ });
3452
+ return NextResponse.json(
3453
+ { error: \`Failed to get worker: \${getWorkerError?.message || String(getWorkerError)}\` },
3454
+ { status: 500 }
3455
+ );
3456
+ }
3457
+
3458
+ if (!worker) {
3459
+ console.warn('[Worker] Worker not found:', {
3460
+ workerId,
3461
+ });
3462
+ return NextResponse.json(
3463
+ { error: \`Worker "\${workerId}" not found. Make sure it's exported from a .worker.ts file.\` },
3464
+ { status: 404 }
3465
+ );
3466
+ }
3467
+
3468
+ // Webhook optional. Job updates use MongoDB only; never pass jobStoreUrl.
3469
+ const webhookBase = process.env.WORKFLOW_WEBHOOK_BASE_URL;
3470
+ const webhookUrl =
3471
+ shouldAwait && typeof webhookBase === 'string' && webhookBase
3472
+ ? \`\${webhookBase.replace(/\\/+$/, '')}/api/workflows/workers/\${workerId}/webhook\`
3473
+ : undefined;
3474
+
3475
+ // Use a single jobId end-to-end (Next job store + SQS/Lambda job store).
3476
+ // If caller provides jobId, respect it; otherwise generate one.
3477
+ const jobId =
3478
+ (typeof providedJobId === 'string' && providedJobId.trim()
3479
+ ? providedJobId.trim()
3480
+ : \`job-\${Date.now()}-\${Math.random().toString(36).slice(2, 11)}\`);
3481
+
3482
+ // Store initial job record
3483
+ const { setJob } = await import('../../stores/jobStore');
3484
+ try {
3485
+ await setJob(jobId, {
3486
+ jobId,
3487
+ workerId,
3488
+ status: 'queued',
3489
+ input: input || {},
3490
+ metadata: { source: 'workflow-orchestration' },
3491
+ });
3492
+ console.log('[Worker] Initial job record created:', {
3493
+ jobId,
3494
+ workerId,
3495
+ });
3496
+ } catch (setJobError: any) {
3497
+ console.error('[Worker] Failed to create initial job record:', {
3498
+ jobId,
3499
+ workerId,
3500
+ error: setJobError?.message || String(setJobError),
3501
+ });
3502
+ // Continue even if job store fails - worker dispatch can still proceed
3503
+ }
3504
+
3505
+ // Dispatch the worker. Job updates use MongoDB only; webhook only if configured.
3506
+ let dispatchResult;
3507
+ try {
3508
+ dispatchResult = await worker.dispatch(input || {}, {
3509
+ mode: 'auto',
3510
+ jobId,
3511
+ ...(webhookUrl ? { webhookUrl } : {}),
3512
+ metadata: { source: 'workflow-orchestration' },
3513
+ });
3514
+ console.log('[Worker] Worker dispatched successfully:', {
3515
+ jobId: dispatchResult.jobId,
3516
+ workerId,
3517
+ messageId: dispatchResult.messageId,
3518
+ });
3519
+ } catch (dispatchError: any) {
3520
+ console.error('[Worker] Failed to dispatch worker:', {
3521
+ workerId,
3522
+ error: dispatchError?.message || String(dispatchError),
3523
+ stack: process.env.NODE_ENV === 'development' ? dispatchError?.stack : undefined,
3524
+ });
3525
+ throw new Error(\`Failed to dispatch worker: \${dispatchError?.message || String(dispatchError)}\`);
3526
+ }
3527
+
3528
+ const finalJobId = dispatchResult.jobId || jobId;
3529
+
3530
+ if (shouldAwait) {
3531
+ // For await mode, return job info and let caller poll status
3532
+ // The webhook handler will update the job when complete
3533
+ // For Vercel workflow: Use polling with setTimeout/setInterval
3534
+ // Workers are fire-and-forget only
3535
+ return NextResponse.json(
3536
+ {
3537
+ jobId: finalJobId,
3538
+ status: 'queued',
3539
+ message: 'Worker job queued. Use GET /api/workflows/workers/:workerId/:jobId to check status, or wait for webhook.',
3540
+ },
3541
+ { status: 200 }
3542
+ );
3543
+ }
3544
+
3545
+ return NextResponse.json(
3546
+ {
3547
+ jobId: finalJobId,
3548
+ status: dispatchResult.status || 'queued',
3549
+ },
3550
+ { status: 200 }
3551
+ );
3552
+ } catch (error: any) {
3553
+ console.error('[Worker] Error in POST handler:', {
3554
+ workerId: slug[0],
3555
+ error: error?.message || String(error),
3556
+ stack: process.env.NODE_ENV === 'development' ? error?.stack : undefined,
3557
+ });
3558
+ return NextResponse.json(
3559
+ {
3560
+ error: error?.message || String(error),
3561
+ stack: process.env.NODE_ENV === 'development' ? error?.stack : undefined,
3562
+ },
3563
+ { status: 500 }
3564
+ );
3565
+ }
3566
+ }
3567
+
3568
+ export async function GET(
3569
+ req: NextRequest,
3570
+ { params }: { params: Promise<{ slug: string[] }> }
3571
+ ) {
3572
+ let slug: string[] = [];
3573
+ try {
3574
+ const { slug: slugParam } = await params;
3575
+ slug = slugParam || [];
3576
+ const [workerId, jobId] = slug;
3577
+
3578
+ if (!workerId || !jobId) {
3579
+ return NextResponse.json(
3580
+ { error: 'Worker ID and job ID are required' },
3581
+ { status: 400 }
3582
+ );
3583
+ }
3584
+
3585
+ console.log('[Worker] Getting job status:', {
3586
+ jobId,
3587
+ workerId,
3588
+ });
3589
+
3590
+ // Get job status from job store
3591
+ const { getJob } = await import('../../stores/jobStore');
3592
+ let job;
3593
+ try {
3594
+ job = await getJob(jobId);
3595
+ } catch (getJobError: any) {
3596
+ console.error('[Worker] Error getting job from store:', {
3597
+ jobId,
3598
+ workerId,
3599
+ error: getJobError?.message || String(getJobError),
3600
+ });
3601
+ return NextResponse.json(
3602
+ { error: \`Failed to get job: \${getJobError?.message || String(getJobError)}\` },
3603
+ { status: 500 }
3604
+ );
3605
+ }
3606
+
3607
+ if (!job) {
3608
+ console.warn('[Worker] Job not found:', {
3609
+ jobId,
3610
+ workerId,
3611
+ });
3612
+ return NextResponse.json(
3613
+ { error: \`Job "\${jobId}" not found\` },
3614
+ { status: 404 }
3615
+ );
3616
+ }
3617
+
3618
+ console.log('[Worker] Job status retrieved:', {
3619
+ jobId,
3620
+ workerId,
3621
+ status: job.status,
3622
+ });
3623
+
3624
+ return NextResponse.json(
3625
+ {
3626
+ jobId: job.jobId,
3627
+ workerId: job.workerId,
3628
+ status: job.status,
3629
+ output: job.output,
3630
+ error: job.error,
3631
+ metadata: job.metadata,
3632
+ createdAt: job.createdAt,
3633
+ updatedAt: job.updatedAt,
3634
+ completedAt: job.completedAt,
3635
+ },
3636
+ { status: 200 }
3637
+ );
3638
+ } catch (error: any) {
3639
+ console.error('[Worker] Error in GET handler:', {
3640
+ workerId: slug[0],
3641
+ jobId: slug[1],
3642
+ error: error?.message || String(error),
3643
+ stack: process.env.NODE_ENV === 'development' ? error?.stack : undefined,
3644
+ });
3645
+ return NextResponse.json(
3646
+ {
3647
+ error: error?.message || String(error),
3648
+ stack: process.env.NODE_ENV === 'development' ? error?.stack : undefined,
3649
+ },
3650
+ { status: 500 }
3651
+ );
3652
+ }
3653
+ }
3654
+
3655
+ /**
3656
+ * Create job record before polling (trigger-only flow).
3657
+ * POST /api/workflows/workers/:workerId/job
3658
+ * Body: { jobId, input }
3659
+ */
3660
+ async function handleCreateJob(req: NextRequest, workerId: string) {
3661
+ try {
3662
+ if (!workerId) {
3663
+ return NextResponse.json({ error: 'Worker ID is required' }, { status: 400 });
3664
+ }
3665
+ const body = await req.json();
3666
+ const { jobId, input } = body;
3667
+ if (!jobId) {
3668
+ return NextResponse.json({ error: 'jobId is required in request body' }, { status: 400 });
3669
+ }
3670
+ const { setJob } = await import('../../stores/jobStore');
3671
+ await setJob(jobId, {
3672
+ jobId,
3673
+ workerId,
3674
+ status: 'queued',
3675
+ input: input ?? {},
3676
+ metadata: { source: 'workflow-orchestration' },
3677
+ });
3678
+ console.log('[Worker] Job created:', { jobId, workerId });
3679
+ return NextResponse.json({ message: 'Job created', jobId, workerId }, { status: 200 });
3680
+ } catch (error: any) {
3681
+ console.error('[Worker] Error creating job:', { workerId, error: error?.message || String(error) });
3682
+ return NextResponse.json(
3683
+ { error: error?.message || String(error) },
3684
+ { status: 500 }
3685
+ );
3686
+ }
3687
+ }
3688
+
3689
+ /**
3690
+ * Handle job store update from worker context.
3691
+ * POST /api/workflows/workers/:workerId/update
3692
+ */
3693
+ async function handleJobUpdate(req: NextRequest, workerId: string) {
3694
+ try {
3695
+ if (!workerId) {
3696
+ return NextResponse.json(
3697
+ { error: 'Worker ID is required' },
3698
+ { status: 400 }
3699
+ );
3700
+ }
3701
+
3702
+ const body = await req.json();
3703
+ const { jobId, status, metadata, output, error } = body;
3704
+
3705
+ if (!jobId) {
3706
+ return NextResponse.json(
3707
+ { error: 'jobId is required in request body' },
3708
+ { status: 400 }
3709
+ );
3710
+ }
3711
+
3712
+ const { updateJob, setJob, getJob } = await import('../../stores/jobStore');
3713
+ const existing = await getJob(jobId);
3714
+
3715
+ // Upsert: create job if missing (e.g. workflow triggered via /workers/trigger directly)
3716
+ if (!existing) {
3717
+ await setJob(jobId, {
3718
+ jobId,
3719
+ workerId,
3720
+ status: status ?? 'queued',
3721
+ input: {},
3722
+ metadata: metadata ?? {},
3723
+ output,
3724
+ error,
3725
+ });
3726
+ return NextResponse.json(
3727
+ { message: 'Job created and updated successfully', jobId, workerId },
3728
+ { status: 200 }
3729
+ );
3730
+ }
3731
+
3732
+ const updateData: any = {};
3733
+ if (status !== undefined) updateData.status = status;
3734
+ if (metadata !== undefined) updateData.metadata = { ...existing.metadata, ...metadata };
3735
+ if (output !== undefined) updateData.output = output;
3736
+ if (error !== undefined) updateData.error = error;
3737
+
3738
+ await updateJob(jobId, updateData);
3739
+
3740
+ console.log('[Worker] Job updated:', { jobId, workerId, updates: Object.keys(updateData) });
3741
+
3742
+ return NextResponse.json(
3743
+ { message: 'Job updated successfully', jobId, workerId },
3744
+ { status: 200 }
3745
+ );
3746
+ } catch (error: any) {
3747
+ console.error('[Worker] Error updating job:', {
3748
+ workerId,
3749
+ error: error?.message || String(error),
3750
+ stack: process.env.NODE_ENV === 'development' ? error?.stack : undefined,
3751
+ });
3752
+ return NextResponse.json(
3753
+ { error: error?.message || String(error) },
3754
+ { status: 500 }
3755
+ );
3756
+ }
3757
+ }
3758
+
3759
+ /**
3760
+ * Handle webhook callback for worker completion.
3761
+ * POST /api/workflows/workers/:workerId/webhook
3762
+ *
3763
+ * This endpoint receives completion notifications from workers.
3764
+ * It updates the job store with the final status before returning.
3765
+ * Webhook is only called if webhookUrl was provided during dispatch.
3766
+ */
3767
+ async function handleWebhook(req: NextRequest, workerId: string) {
3768
+ try {
3769
+ if (!workerId) {
3770
+ return NextResponse.json(
3771
+ { error: 'Worker ID is required' },
3772
+ { status: 400 }
3773
+ );
3774
+ }
3775
+
3776
+ const body = await req.json();
3777
+ const { jobId, status, output, error, metadata } = body;
3778
+
3779
+ if (!jobId) {
3780
+ return NextResponse.json(
3781
+ { error: 'jobId is required in webhook payload' },
3782
+ { status: 400 }
3783
+ );
3784
+ }
3785
+
3786
+ // Update job store with completion status (before any further processing)
3787
+ const { updateJob } = await import('../../stores/jobStore');
3788
+
3789
+ const jobStatus = status === 'success' ? 'completed' : 'failed';
3790
+
3791
+ try {
3792
+ // Update job with completion status
3793
+ await updateJob(jobId, {
3794
+ jobId,
3795
+ workerId,
3796
+ status: jobStatus,
3797
+ output,
3798
+ error,
3799
+ completedAt: new Date().toISOString(),
3800
+ metadata: metadata || {},
3801
+ });
3802
+
3803
+ console.log('[Worker] Webhook received and job updated:', {
3804
+ jobId,
3805
+ workerId,
3806
+ status: jobStatus,
3807
+ });
3808
+ } catch (updateError: any) {
3809
+ console.error('[Worker] Failed to update job store from webhook:', {
3810
+ jobId,
3811
+ workerId,
3812
+ error: updateError?.message || String(updateError),
3813
+ stack: process.env.NODE_ENV === 'development' ? updateError?.stack : undefined,
3814
+ });
3815
+ // Continue even if job store update fails - webhook was received
3816
+ }
3817
+
3818
+ return NextResponse.json(
3819
+ { message: 'Webhook received', jobId, workerId, status: jobStatus },
3820
+ { status: 200 }
3821
+ );
3822
+ } catch (error: any) {
3823
+ console.error('[Worker] Error handling webhook:', {
3824
+ workerId,
3825
+ error: error?.message || String(error),
3826
+ stack: process.env.NODE_ENV === 'development' ? error?.stack : undefined,
3827
+ });
3828
+ return NextResponse.json(
3829
+ { error: error?.message || String(error) },
3830
+ { status: 500 }
3831
+ );
3832
+ }
3833
+ }
3834
+ `,
3835
+ "queues/[...slug]/route.ts": `import { NextRequest, NextResponse } from 'next/server';
3836
+ import { dispatchQueue } from '@microfox/ai-worker';
3837
+ import { getQueueRegistry } from '../../registry/workers';
3838
+ import {
3839
+ getQueueJob,
3840
+ listQueueJobs,
3841
+ updateQueueJob,
3842
+ updateQueueStep,
3843
+ appendQueueStep,
3844
+ } from '../../stores/queueJobStore';
3845
+
3846
+ export const dynamic = 'force-dynamic';
3847
+
3848
+ const LOG = '[Queues]';
3849
+
3850
+ /**
3851
+ * Queue execution endpoint (mirrors workers route structure).
3852
+ *
3853
+ * POST /api/workflows/queues/:queueId - Trigger a queue (no registry import needed in app code)
3854
+ * GET /api/workflows/queues/:queueId/:jobId - Get queue job status
3855
+ * GET /api/workflows/queues - List queue jobs (query: queueId?, limit?)
3856
+ * POST /api/workflows/queues/:queueId/update - Update queue job step (for Lambda/callers)
3857
+ * POST /api/workflows/queues/:queueId/webhook - Webhook for queue completion
3858
+ *
3859
+ * Callers can trigger a queue with a simple POST; registry is resolved inside this route.
3860
+ */
3861
+ async function getRegistry() {
3862
+ return getQueueRegistry();
3863
+ }
3864
+
3865
+ export async function POST(
3866
+ req: NextRequest,
3867
+ { params }: { params: Promise<{ slug: string[] }> }
3868
+ ) {
3869
+ let slug: string[] = [];
3870
+ try {
3871
+ const { slug: slugParam } = await params;
3872
+ slug = slugParam ?? [];
3873
+ const [queueId, action] = slug;
3874
+
3875
+ if (action === 'update') {
3876
+ return handleQueueJobUpdate(req, queueId);
3877
+ }
3878
+ if (action === 'webhook') {
3879
+ return handleQueueWebhook(req, queueId);
3880
+ }
3881
+
3882
+ if (!queueId) {
3883
+ return NextResponse.json(
3884
+ { error: 'Queue ID is required. Use POST /api/workflows/queues/:queueId to trigger a queue.' },
3885
+ { status: 400 }
3886
+ );
3887
+ }
3888
+
3889
+ let body: { input?: unknown; metadata?: Record<string, unknown>; jobId?: string } = {};
3890
+ try {
3891
+ body = await req.json();
3892
+ } catch {
3893
+ body = {};
3894
+ }
3895
+ const { input = {}, metadata, jobId: providedJobId } = body;
3896
+
3897
+ const registry = await getRegistry();
3898
+ const queue = registry.getQueueById(queueId);
3899
+ if (!queue) {
3900
+ console.warn(\`\${LOG} Queue not found: \${queueId}\`);
3901
+ return NextResponse.json(
3902
+ { error: \`Queue "\${queueId}" not found. Ensure workers are deployed and config is available.\` },
3903
+ { status: 404 }
3904
+ );
3905
+ }
3906
+
3907
+ const result = await dispatchQueue(queueId, input as Record<string, unknown>, {
3908
+ registry,
3909
+ metadata: metadata ?? { source: 'queues-api' },
3910
+ ...(typeof providedJobId === 'string' && providedJobId.trim() ? { jobId: providedJobId.trim() } : {}),
3911
+ });
3912
+
3913
+ console.log(\`\${LOG} Queue triggered\`, {
3914
+ queueId: result.queueId,
3915
+ jobId: result.jobId,
3916
+ messageId: result.messageId,
3917
+ });
3918
+
3919
+ return NextResponse.json(
3920
+ {
3921
+ jobId: result.jobId,
3922
+ status: result.status,
3923
+ messageId: result.messageId,
3924
+ queueId: result.queueId,
3925
+ queueJobUrl: \`/api/workflows/queues/\${queueId}/\${result.jobId}\`,
3926
+ },
3927
+ { status: 200 }
3928
+ );
3929
+ } catch (error: unknown) {
3930
+ const err = error instanceof Error ? error : new Error(String(error));
3931
+ console.error(\`\${LOG} POST error:\`, err.message, err.stack);
3932
+ return NextResponse.json(
3933
+ { error: err.message },
3934
+ { status: 500 }
3935
+ );
3936
+ }
3937
+ }
3938
+
3939
+ export async function GET(
3940
+ req: NextRequest,
3941
+ { params }: { params: Promise<{ slug: string[] }> }
3942
+ ) {
3943
+ let slug: string[] = [];
3944
+ try {
3945
+ const { slug: slugParam } = await params;
3946
+ slug = slugParam ?? [];
3947
+ const [queueId, jobId] = slug;
3948
+
3949
+ // List: GET /api/workflows/queues or GET /api/workflows/queues?queueId=...&limit=...
3950
+ if (slug.length === 0 || (slug.length === 1 && !jobId)) {
3951
+ const { searchParams } = new URL(req.url);
3952
+ const filterQueueId = searchParams.get('queueId') ?? (slug[0] || undefined);
3953
+ const limit = Math.min(
3954
+ 100,
3955
+ Math.max(1, parseInt(searchParams.get('limit') ?? '50', 10) || 50)
3956
+ );
3957
+ const jobs = await listQueueJobs(filterQueueId, limit);
3958
+ return NextResponse.json({ jobs });
3959
+ }
3960
+
3961
+ // Get one: GET /api/workflows/queues/:queueId/:jobId
3962
+ if (!queueId || !jobId) {
3963
+ return NextResponse.json(
3964
+ { error: 'Queue ID and job ID are required for GET. Use GET /api/workflows/queues/:queueId/:jobId' },
3965
+ { status: 400 }
3966
+ );
3967
+ }
3968
+
3969
+ const job = await getQueueJob(jobId);
3970
+ if (!job) {
3971
+ return NextResponse.json({ error: 'Queue job not found' }, { status: 404 });
3972
+ }
3973
+ if (job.queueId !== queueId) {
3974
+ return NextResponse.json({ error: 'Queue job does not belong to this queue' }, { status: 400 });
3975
+ }
3976
+
3977
+ return NextResponse.json(job);
3978
+ } catch (error: unknown) {
3979
+ const err = error instanceof Error ? error : new Error(String(error));
3980
+ console.error(\`\${LOG} GET error:\`, err.message);
3981
+ return NextResponse.json(
3982
+ { error: err.message },
3983
+ { status: 500 }
3984
+ );
3985
+ }
3986
+ }
3987
+
3988
+ async function handleQueueJobUpdate(req: NextRequest, queueId: string) {
3989
+ if (!queueId) {
3990
+ return NextResponse.json({ error: 'Queue ID is required' }, { status: 400 });
3991
+ }
3992
+ const body = await req.json();
3993
+ const { queueJobId, jobId, action, stepIndex, workerJobId, workerId, output, error, input } = body;
3994
+ const id = queueJobId ?? jobId;
3995
+ if (!id) {
3996
+ return NextResponse.json(
3997
+ { error: 'queueJobId or jobId is required in request body' },
3998
+ { status: 400 }
3999
+ );
4000
+ }
4001
+
4002
+ if (action === 'append') {
4003
+ if (!workerId || !workerJobId) {
4004
+ return NextResponse.json(
4005
+ { error: 'append requires workerId and workerJobId' },
4006
+ { status: 400 }
4007
+ );
4008
+ }
4009
+ await appendQueueStep(id, { workerId, workerJobId });
4010
+ console.log(\`\${LOG} Step appended\`, { queueJobId: id, workerId, workerJobId });
4011
+ return NextResponse.json({ ok: true, action: 'append' });
4012
+ }
4013
+
4014
+ if (action === 'start') {
4015
+ if (typeof stepIndex !== 'number' || !workerJobId) {
4016
+ return NextResponse.json(
4017
+ { error: 'start requires stepIndex and workerJobId' },
4018
+ { status: 400 }
4019
+ );
4020
+ }
4021
+ await updateQueueStep(id, stepIndex, {
4022
+ status: 'running',
4023
+ startedAt: new Date().toISOString(),
4024
+ ...(input !== undefined && { input }),
4025
+ });
4026
+ console.log(\`\${LOG} Step started\`, { queueJobId: id, stepIndex, workerJobId });
4027
+ return NextResponse.json({ ok: true, action: 'start' });
4028
+ }
4029
+
4030
+ if (action === 'complete') {
4031
+ if (typeof stepIndex !== 'number' || !workerJobId) {
4032
+ return NextResponse.json(
4033
+ { error: 'complete requires stepIndex and workerJobId' },
4034
+ { status: 400 }
4035
+ );
4036
+ }
4037
+ await updateQueueStep(id, stepIndex, {
4038
+ status: 'completed',
4039
+ output,
4040
+ completedAt: new Date().toISOString(),
4041
+ });
4042
+ console.log(\`\${LOG} Step completed\`, { queueJobId: id, stepIndex, workerJobId });
4043
+ return NextResponse.json({ ok: true, action: 'complete' });
4044
+ }
4045
+
4046
+ if (action === 'fail') {
4047
+ if (typeof stepIndex !== 'number' || !workerJobId) {
4048
+ return NextResponse.json(
4049
+ { error: 'fail requires stepIndex and workerJobId' },
4050
+ { status: 400 }
4051
+ );
4052
+ }
4053
+ await updateQueueStep(id, stepIndex, {
4054
+ status: 'failed',
4055
+ error: error ?? { message: 'Unknown error' },
4056
+ completedAt: new Date().toISOString(),
4057
+ });
4058
+ console.log(\`\${LOG} Step failed\`, { queueJobId: id, stepIndex, workerJobId });
4059
+ return NextResponse.json({ ok: true, action: 'fail' });
4060
+ }
4061
+
4062
+ return NextResponse.json(
4063
+ { error: \`Unknown action: \${action}. Use start|complete|fail|append\` },
4064
+ { status: 400 }
4065
+ );
4066
+ }
4067
+
4068
+ /**
4069
+ * Handle webhook callback for queue completion.
4070
+ * POST /api/workflows/queues/:queueId/webhook
4071
+ *
4072
+ * When a webhook URL is provided at dispatch time, the worker/runtime calls this
4073
+ * instead of updating the job store directly. This handler updates the queue job
4074
+ * store with the final status (same outcome as when no webhook: store reflects completion).
4075
+ */
4076
+ async function handleQueueWebhook(req: NextRequest, queueId: string) {
4077
+ try {
4078
+ if (!queueId) {
4079
+ return NextResponse.json({ error: 'Queue ID is required' }, { status: 400 });
4080
+ }
4081
+
4082
+ const body = await req.json();
4083
+ const { queueJobId, jobId, status, output, error, metadata } = body;
4084
+ const id = queueJobId ?? jobId;
4085
+ if (!id) {
4086
+ return NextResponse.json(
4087
+ { error: 'queueJobId or jobId is required in webhook payload' },
4088
+ { status: 400 }
4089
+ );
4090
+ }
4091
+
4092
+ const jobStatus = status === 'success' ? 'completed' : 'failed';
4093
+
4094
+ try {
4095
+ await updateQueueJob(id, {
4096
+ status: jobStatus,
4097
+ completedAt: new Date().toISOString(),
4098
+ });
4099
+ console.log(\`\${LOG} Webhook received and queue job updated:\`, {
4100
+ queueJobId: id,
4101
+ queueId,
4102
+ status: jobStatus,
4103
+ });
4104
+ } catch (updateError: unknown) {
4105
+ const err = updateError instanceof Error ? updateError : new Error(String(updateError));
4106
+ console.error(\`\${LOG} Failed to update queue job from webhook:\`, {
4107
+ queueJobId: id,
4108
+ queueId,
4109
+ error: err.message,
4110
+ });
4111
+ // Still return 200 so the caller does not retry; store update can be retried elsewhere if needed
4112
+ }
4113
+
4114
+ return NextResponse.json(
4115
+ { message: 'Webhook received', queueId, queueJobId: id, status: jobStatus },
4116
+ { status: 200 }
4117
+ );
4118
+ } catch (error: unknown) {
4119
+ const err = error instanceof Error ? error : new Error(String(error));
4120
+ console.error(\`\${LOG} Error handling queue webhook:\`, { queueId, error: err.message });
4121
+ return NextResponse.json(
4122
+ { error: err.message },
4123
+ { status: 500 }
4124
+ );
4125
+ }
4126
+ }
4127
+ `,
4128
+ "../../../hooks/useWorkflowJob.ts": `'use client';
4129
+
4130
+ import { useCallback, useEffect, useRef, useState } from 'react';
4131
+
4132
+ export type WorkflowJobStatus =
4133
+ | 'idle'
4134
+ | 'queued'
4135
+ | 'running'
4136
+ | 'completed'
4137
+ | 'failed'
4138
+ | 'partial';
4139
+
4140
+ export interface WorkerJobResult {
4141
+ jobId: string;
4142
+ workerId: string;
4143
+ status: string;
4144
+ output?: unknown;
4145
+ error?: { message: string; stack?: string };
4146
+ metadata?: Record<string, unknown>;
4147
+ createdAt?: string;
4148
+ updatedAt?: string;
4149
+ completedAt?: string;
4150
+ }
4151
+
4152
+ export interface QueueJobStep {
4153
+ workerId: string;
4154
+ workerJobId: string;
4155
+ status: string;
4156
+ input?: unknown;
4157
+ output?: unknown;
4158
+ error?: { message: string };
4159
+ startedAt?: string;
4160
+ completedAt?: string;
4161
+ }
4162
+
4163
+ export interface QueueJobResult {
4164
+ id: string;
4165
+ queueId: string;
4166
+ status: string;
4167
+ steps: QueueJobStep[];
4168
+ metadata?: Record<string, unknown>;
4169
+ createdAt: string;
4170
+ updatedAt: string;
4171
+ completedAt?: string;
4172
+ }
4173
+
4174
+ export type WorkflowJobOutput = WorkerJobResult | QueueJobResult;
4175
+
4176
+ export interface UseWorkflowJobBaseOptions {
4177
+ /** Base URL for API calls (default: '' for relative, or set window.location.origin) */
4178
+ baseUrl?: string;
4179
+ /** Poll interval in ms (default: 2000) */
4180
+ pollIntervalMs?: number;
4181
+ /** Stop polling after this many ms (default: 300000 = 5 min) */
4182
+ pollTimeoutMs?: number;
4183
+ /** Start polling automatically after trigger (default: true) */
4184
+ autoPoll?: boolean;
4185
+ /** Called when job reaches completed (or queue: completed/partial) */
4186
+ onComplete?: (result: WorkflowJobOutput) => void;
4187
+ /** Called when job fails or trigger/poll errors */
4188
+ onError?: (error: Error) => void;
4189
+ /** If false, trigger is a no-op and auto-poll is skipped (default: true) */
4190
+ enabled?: boolean;
4191
+ }
4192
+
4193
+ export interface UseWorkflowJobWorkerOptions extends UseWorkflowJobBaseOptions {
4194
+ type: 'worker';
4195
+ workerId: string;
4196
+ }
4197
+
4198
+ export interface UseWorkflowJobQueueOptions extends UseWorkflowJobBaseOptions {
4199
+ type: 'queue';
4200
+ queueId: string;
4201
+ /** Optional metadata for queue trigger */
4202
+ metadata?: Record<string, unknown>;
4203
+ }
4204
+
4205
+ export type UseWorkflowJobOptions =
4206
+ | UseWorkflowJobWorkerOptions
4207
+ | UseWorkflowJobQueueOptions;
4208
+
4209
+ const TERMINAL_STATUSES = ['completed', 'failed', 'partial'];
4210
+
4211
+ function getBaseUrl(baseUrl?: string): string {
4212
+ if (baseUrl !== undefined && baseUrl !== '') return baseUrl;
4213
+ if (typeof window !== 'undefined') return window.location.origin;
4214
+ return '';
4215
+ }
4216
+
4217
+ export interface UseWorkflowJobReturn {
4218
+ /** Trigger the worker or queue. Pass input for the job. */
4219
+ trigger: (input?: Record<string, unknown>) => Promise<void>;
4220
+ /** Current job/queue job id (after trigger) */
4221
+ jobId: string | null;
4222
+ /** Current status: idle | queued | running | completed | failed | partial */
4223
+ status: WorkflowJobStatus;
4224
+ /** Last job output (worker or queue job object) */
4225
+ output: WorkflowJobOutput | null;
4226
+ /** Error from trigger or from job failure */
4227
+ error: Error | null;
4228
+ /** True while the trigger request is in flight */
4229
+ loading: boolean;
4230
+ /** True while polling for job status */
4231
+ polling: boolean;
4232
+ /** Reset state so you can trigger again */
4233
+ reset: () => void;
4234
+ }
4235
+
4236
+ export function useWorkflowJob(
4237
+ options: UseWorkflowJobWorkerOptions
4238
+ ): UseWorkflowJobReturn & { output: WorkerJobResult | null };
4239
+ export function useWorkflowJob(
4240
+ options: UseWorkflowJobQueueOptions
4241
+ ): UseWorkflowJobReturn & { output: QueueJobResult | null };
4242
+ export function useWorkflowJob(
4243
+ options: UseWorkflowJobOptions
4244
+ ): UseWorkflowJobReturn {
4245
+ const {
4246
+ baseUrl: baseUrlOpt,
4247
+ pollIntervalMs = 2000,
4248
+ pollTimeoutMs = 300_000,
4249
+ autoPoll = true,
4250
+ onComplete,
4251
+ onError,
4252
+ enabled = true,
4253
+ } = options;
4254
+
4255
+ const baseUrl = getBaseUrl(baseUrlOpt);
4256
+ const prefix = baseUrl ? baseUrl.replace(/\\/+$/, '') : '';
4257
+ const api = (path: string) => \`\${prefix}/api/workflows\${path}\`;
4258
+
4259
+ const [jobId, setJobId] = useState<string | null>(null);
4260
+ const [status, setStatus] = useState<WorkflowJobStatus>('idle');
4261
+ const [output, setOutput] = useState<WorkflowJobOutput | null>(null);
4262
+ const [error, setError] = useState<Error | null>(null);
4263
+ const [loading, setLoading] = useState(false);
4264
+ const [polling, setPolling] = useState(false);
4265
+
4266
+ const intervalRef = useRef<ReturnType<typeof setInterval> | null>(null);
4267
+ const timeoutRef = useRef<ReturnType<typeof setTimeout> | null>(null);
4268
+ const mountedRef = useRef(true);
4269
+
4270
+ const clearPolling = useCallback(() => {
4271
+ if (intervalRef.current) {
4272
+ clearInterval(intervalRef.current);
4273
+ intervalRef.current = null;
4274
+ }
4275
+ if (timeoutRef.current) {
4276
+ clearTimeout(timeoutRef.current);
4277
+ timeoutRef.current = null;
4278
+ }
4279
+ setPolling(false);
4280
+ }, []);
4281
+
4282
+ const reset = useCallback(() => {
4283
+ clearPolling();
4284
+ setJobId(null);
4285
+ setStatus('idle');
4286
+ setOutput(null);
4287
+ setError(null);
4288
+ setLoading(false);
4289
+ setPolling(false);
4290
+ }, [clearPolling]);
4291
+
4292
+ const trigger = useCallback(
4293
+ async (input?: Record<string, unknown>) => {
4294
+ if (!enabled) return;
4295
+
4296
+ setError(null);
4297
+ setOutput(null);
4298
+ setLoading(true);
4299
+
4300
+ try {
4301
+ if (options.type === 'worker') {
4302
+ const res = await fetch(api(\`/workers/\${options.workerId}\`), {
4303
+ method: 'POST',
4304
+ headers: { 'Content-Type': 'application/json' },
4305
+ body: JSON.stringify({ input: input ?? {}, await: false }),
4306
+ });
4307
+ const data = await res.json();
4308
+ if (!res.ok) throw new Error(data?.error ?? \`HTTP \${res.status}\`);
4309
+ const id = data.jobId ?? null;
4310
+ if (!id) throw new Error('No jobId in response');
4311
+ setJobId(id);
4312
+ setStatus('queued');
4313
+ setLoading(false);
4314
+
4315
+ if (autoPoll) {
4316
+ setPolling(true);
4317
+ const deadline = Date.now() + pollTimeoutMs;
4318
+ const poll = async () => {
4319
+ if (!mountedRef.current) return;
4320
+ try {
4321
+ const r = await fetch(
4322
+ api(\`/workers/\${options.workerId}/\${id}\`)
4323
+ );
4324
+ const job = await r.json();
4325
+ if (!r.ok) {
4326
+ if (Date.now() >= deadline) {
4327
+ clearPolling();
4328
+ const err = new Error('Poll timeout');
4329
+ setError(err);
4330
+ setStatus('failed');
4331
+ onError?.(err);
4332
+ }
4333
+ return;
4334
+ }
4335
+ setStatus((job.status as WorkflowJobStatus) ?? 'running');
4336
+ setOutput(job as WorkerJobResult);
4337
+ if (job.status === 'completed') {
4338
+ clearPolling();
4339
+ onComplete?.(job as WorkerJobResult);
4340
+ } else if (job.status === 'failed') {
4341
+ clearPolling();
4342
+ const err = new Error(
4343
+ job?.error?.message ?? 'Job failed'
4344
+ );
4345
+ setError(err);
4346
+ setStatus('failed');
4347
+ onError?.(err);
4348
+ } else if (Date.now() >= deadline) {
4349
+ clearPolling();
4350
+ const err = new Error('Poll timeout');
4351
+ setError(err);
4352
+ onError?.(err);
4353
+ }
4354
+ } catch (e) {
4355
+ if (mountedRef.current) {
4356
+ clearPolling();
4357
+ const err = e instanceof Error ? e : new Error(String(e));
4358
+ setError(err);
4359
+ setStatus('failed');
4360
+ onError?.(err);
4361
+ }
4362
+ }
4363
+ };
4364
+ await poll();
4365
+ intervalRef.current = setInterval(poll, pollIntervalMs);
4366
+ timeoutRef.current = setTimeout(() => {
4367
+ clearPolling();
4368
+ setError(new Error('Poll timeout'));
4369
+ setStatus('failed');
4370
+ }, pollTimeoutMs);
4371
+ }
4372
+ } else {
4373
+ const body: Record<string, unknown> = {
4374
+ input: input ?? {},
4375
+ };
4376
+ if (options.metadata) body.metadata = options.metadata;
4377
+ const res = await fetch(api(\`/queues/\${options.queueId}\`), {
4378
+ method: 'POST',
4379
+ headers: { 'Content-Type': 'application/json' },
4380
+ body: JSON.stringify(body),
4381
+ });
4382
+ const data = await res.json();
4383
+ if (!res.ok) throw new Error(data?.error ?? \`HTTP \${res.status}\`);
4384
+ const id = data.jobId ?? null;
4385
+ if (!id) throw new Error('No jobId in response');
4386
+ setJobId(id);
4387
+ setStatus('queued');
4388
+ setLoading(false);
4389
+
4390
+ if (autoPoll) {
4391
+ setPolling(true);
4392
+ const deadline = Date.now() + pollTimeoutMs;
4393
+ const poll = async () => {
4394
+ if (!mountedRef.current) return;
4395
+ try {
4396
+ const r = await fetch(
4397
+ api(\`/queues/\${options.queueId}/\${id}\`)
4398
+ );
4399
+ const job = await r.json();
4400
+ if (!r.ok) {
4401
+ if (Date.now() >= deadline) {
4402
+ clearPolling();
4403
+ setError(new Error('Poll timeout'));
4404
+ setStatus('failed');
4405
+ }
4406
+ return;
4407
+ }
4408
+ const st = (job.status as string) ?? 'running';
4409
+ setStatus(st as WorkflowJobStatus);
4410
+ setOutput(job as QueueJobResult);
4411
+ if (TERMINAL_STATUSES.includes(st)) {
4412
+ clearPolling();
4413
+ onComplete?.(job as QueueJobResult);
4414
+ if (st === 'failed') {
4415
+ setError(new Error('Queue job failed'));
4416
+ onError?.(new Error('Queue job failed'));
4417
+ }
4418
+ } else if (Date.now() >= deadline) {
4419
+ clearPolling();
4420
+ setError(new Error('Poll timeout'));
4421
+ setStatus('failed');
4422
+ }
4423
+ } catch (e) {
4424
+ if (mountedRef.current) {
4425
+ clearPolling();
4426
+ const err = e instanceof Error ? e : new Error(String(e));
4427
+ setError(err);
4428
+ setStatus('failed');
4429
+ onError?.(err);
4430
+ }
4431
+ }
4432
+ };
4433
+ await poll();
4434
+ intervalRef.current = setInterval(poll, pollIntervalMs);
4435
+ timeoutRef.current = setTimeout(() => {
4436
+ clearPolling();
4437
+ setError(new Error('Poll timeout'));
4438
+ setStatus('failed');
4439
+ }, pollTimeoutMs);
4440
+ }
4441
+ }
4442
+ } catch (e) {
4443
+ const err = e instanceof Error ? e : new Error(String(e));
4444
+ setError(err);
4445
+ setStatus('failed');
4446
+ setLoading(false);
4447
+ onError?.(err);
4448
+ }
4449
+ },
4450
+ [
4451
+ enabled,
4452
+ options,
4453
+ api,
4454
+ autoPoll,
4455
+ pollIntervalMs,
4456
+ pollTimeoutMs,
4457
+ onComplete,
4458
+ onError,
4459
+ clearPolling,
4460
+ ]
4461
+ );
4462
+
4463
+ useEffect(() => {
4464
+ mountedRef.current = true;
4465
+ return () => {
4466
+ mountedRef.current = false;
4467
+ clearPolling();
4468
+ };
4469
+ }, [clearPolling]);
4470
+
4471
+ return {
4472
+ trigger,
4473
+ jobId,
4474
+ status,
4475
+ output,
4476
+ error,
4477
+ loading,
4478
+ polling,
4479
+ reset,
4480
+ };
4481
+ }
4482
+ `
4483
+ };
4484
+ var WORKFLOW_SETTINGS_SNIPPET = ` // Workflow + worker runtime configuration (job store, etc.)
4485
+ workflowSettings: {
4486
+ jobStore: {
4487
+ // 'mongodb' | 'upstash-redis'
4488
+ type:
4489
+ (process.env.WORKER_DATABASE_TYPE as
4490
+ | 'mongodb'
4491
+ | 'upstash-redis') || 'upstash-redis',
4492
+ mongodb: {
4493
+ uri: process.env.DATABASE_MONGODB_URI || process.env.MONGODB_URI,
4494
+ db:
4495
+ process.env.DATABASE_MONGODB_DB ||
4496
+ process.env.MONGODB_DB ||
4497
+ 'ai_router',
4498
+ workerJobsCollection:
4499
+ process.env.MONGODB_WORKER_JOBS_COLLECTION || 'worker_jobs',
4500
+ workflowStatusCollection:
4501
+ process.env.MONGODB_WORKFLOW_STATUS_COLLECTION || 'workflow_status',
4502
+ },
4503
+ redis: {
4504
+ url:
4505
+ process.env.WORKER_UPSTASH_REDIS_REST_URL ||
4506
+ process.env.UPSTASH_REDIS_REST_URL,
4507
+ token:
4508
+ process.env.WORKER_UPSTASH_REDIS_REST_TOKEN ||
4509
+ process.env.UPSTASH_REDIS_REST_TOKEN,
4510
+ keyPrefix:
4511
+ process.env.WORKER_UPSTASH_REDIS_JOBS_PREFIX ||
4512
+ 'worker:jobs:',
4513
+ ttlSeconds:
4514
+ Number(process.env.WORKER_JOBS_TTL_SECONDS ?? 60 * 60 * 24 * 7),
4515
+ },
4516
+ },
4517
+ },`;
4518
+ function writeFile(filePath, content, force) {
4519
+ if (fs3.existsSync(filePath) && !force) {
4520
+ return false;
4521
+ }
4522
+ const dir = path3.dirname(filePath);
4523
+ if (!fs3.existsSync(dir)) {
4524
+ fs3.mkdirSync(dir, { recursive: true });
4525
+ }
4526
+ fs3.writeFileSync(filePath, content, "utf-8");
4527
+ return true;
4528
+ }
4529
+ function mergeMicrofoxConfig(configPath, force) {
4530
+ if (!fs3.existsSync(configPath)) {
4531
+ const content2 = `export const StudioConfig = {
4532
+ appName: 'My App',
4533
+ projectInfo: {
4534
+ framework: 'next-js',
4535
+ },
4536
+ studioSettings: {
4537
+ protection: {
4538
+ enabled: false,
4539
+ },
4540
+ database: {
4541
+ type: 'local',
4542
+ },
4543
+ },
4544
+ ${WORKFLOW_SETTINGS_SNIPPET}
4545
+ };
4546
+ `;
4547
+ fs3.writeFileSync(configPath, content2, "utf-8");
4548
+ return true;
4549
+ }
4550
+ const content = fs3.readFileSync(configPath, "utf-8");
4551
+ if (content.includes("workflowSettings")) {
4552
+ if (!force) {
4553
+ return false;
4554
+ }
4555
+ return false;
4556
+ }
4557
+ const lines = content.split("\n");
4558
+ let insertIndex = -1;
4559
+ let braceCount = 0;
4560
+ let inStudioConfig = false;
4561
+ for (let i = 0; i < lines.length; i++) {
4562
+ const line = lines[i];
4563
+ if (line.includes("StudioConfig") && line.includes("=")) {
4564
+ inStudioConfig = true;
4565
+ }
4566
+ if (inStudioConfig) {
4567
+ const openBraces = (line.match(/{/g) || []).length;
4568
+ const closeBraces = (line.match(/}/g) || []).length;
4569
+ braceCount += openBraces - closeBraces;
4570
+ if (braceCount === 0 && closeBraces > 0 && insertIndex === -1) {
4571
+ insertIndex = i;
4572
+ break;
4573
+ }
4574
+ }
4575
+ }
4576
+ if (insertIndex === -1) {
4577
+ const lastBrace = content.lastIndexOf("}");
4578
+ if (lastBrace !== -1) {
4579
+ const before = content.slice(0, lastBrace);
4580
+ const after = content.slice(lastBrace);
4581
+ const newContent = before + ",\n" + WORKFLOW_SETTINGS_SNIPPET + "\n" + after;
4582
+ fs3.writeFileSync(configPath, newContent, "utf-8");
4583
+ return true;
4584
+ }
4585
+ return false;
4586
+ }
4587
+ const indent = lines[insertIndex].match(/^(\s*)/)?.[1] || " ";
4588
+ const workflowLines = WORKFLOW_SETTINGS_SNIPPET.split("\n").map((l, idx) => {
4589
+ if (idx === 0) return indent + l;
4590
+ return indent + l;
4591
+ });
4592
+ lines.splice(insertIndex, 0, ...workflowLines);
4593
+ fs3.writeFileSync(configPath, lines.join("\n"), "utf-8");
4594
+ return true;
4595
+ }
4596
+ var boilerplateCommand = new import_commander3.Command().name("boilerplate").description("Create or update worker boilerplate files (job store, API routes, config)").option("--force", "Overwrite existing files", false).option("--app-dir <path>", "App directory path (default: app)", "app").option("--skip-config", "Skip microfox.config.ts updates", false).action((options) => {
4597
+ const spinner = (0, import_ora3.default)("Creating boilerplate files...").start();
4598
+ try {
4599
+ const projectRoot = process.cwd();
4600
+ const appDir = options.appDir || "app";
4601
+ const apiDir = path3.join(appDir, "api", "workflows");
4602
+ const force = options.force || false;
4603
+ const skipConfig = options.skipConfig || false;
4604
+ const filesCreated = [];
4605
+ const filesSkipped = [];
4606
+ for (const [relativePath, template] of Object.entries(TEMPLATES)) {
4607
+ const filePath = path3.normalize(path3.join(projectRoot, apiDir, relativePath));
4608
+ const written = writeFile(filePath, template, force);
4609
+ if (written) {
4610
+ filesCreated.push(path3.relative(projectRoot, filePath));
4611
+ } else {
4612
+ filesSkipped.push(path3.relative(projectRoot, filePath));
4613
+ }
4614
+ }
4615
+ let configUpdated = false;
4616
+ if (!skipConfig) {
4617
+ const configPath = path3.join(projectRoot, "microfox.config.ts");
4618
+ configUpdated = mergeMicrofoxConfig(configPath, force);
4619
+ if (configUpdated) {
4620
+ filesCreated.push("microfox.config.ts");
4621
+ } else if (fs3.existsSync(configPath)) {
4622
+ filesSkipped.push("microfox.config.ts");
4623
+ }
4624
+ }
4625
+ spinner.succeed("Boilerplate files created");
4626
+ if (filesCreated.length > 0) {
4627
+ console.log(import_chalk3.default.green("\n\u2713 Created files:"));
4628
+ filesCreated.forEach((f) => console.log(import_chalk3.default.gray(` - ${f}`)));
4629
+ }
4630
+ if (filesSkipped.length > 0) {
4631
+ console.log(import_chalk3.default.yellow("\n\u26A0 Skipped existing files (use --force to overwrite):"));
4632
+ filesSkipped.forEach((f) => console.log(import_chalk3.default.gray(` - ${f}`)));
4633
+ }
4634
+ console.log(
4635
+ import_chalk3.default.blue(
4636
+ `
4637
+ \u{1F4DA} Next steps:
4638
+ 1. Configure your job store in microfox.config.ts (workflowSettings.jobStore)
4639
+ 2. Set environment variables (MONGODB_URI or UPSTASH_REDIS_*)
4640
+ 3. Create your first worker: ${import_chalk3.default.yellow("npx ai-worker new <worker-id>")}
4641
+ 4. Deploy workers: ${import_chalk3.default.yellow("npx ai-worker push")}
4642
+ 5. Use ${import_chalk3.default.yellow("hooks/useWorkflowJob.ts")} in client components to trigger and poll workers/queues`
4643
+ )
1809
4644
  );
1810
4645
  } catch (error) {
1811
- spinner.fail("Failed to scaffold worker");
1812
- console.error(import_chalk2.default.red(error?.stack || error?.message || String(error)));
4646
+ spinner.fail("Failed to create boilerplate files");
4647
+ console.error(import_chalk3.default.red(error?.stack || error?.message || String(error)));
1813
4648
  process.exitCode = 1;
1814
4649
  }
1815
4650
  });
1816
4651
 
1817
4652
  // src/index.ts
1818
- var program = new import_commander3.Command();
1819
- program.name("ai-worker").description("CLI tooling for deploying ai-router background workers").version("1.0.0");
4653
+ var import_meta = {};
4654
+ var __filename = (0, import_url2.fileURLToPath)(import_meta.url);
4655
+ var __dirname = (0, import_path.dirname)(__filename);
4656
+ var packageJsonPath = (0, import_path.join)(__dirname, "..", "package.json");
4657
+ var packageJson = JSON.parse((0, import_fs.readFileSync)(packageJsonPath, "utf-8"));
4658
+ var version = packageJson.version || "1.0.0";
4659
+ var program = new import_commander4.Command();
4660
+ program.name("ai-worker").description("CLI tooling for deploying ai-router background workers").version(version);
1820
4661
  program.addCommand(pushCommand);
1821
4662
  program.addCommand(newCommand);
4663
+ program.addCommand(boilerplateCommand);
1822
4664
  program.parse(process.argv);
1823
4665
  var aiWorkerCli = program;
1824
4666
  // Annotate the CommonJS export names for ESM import in node: