@microfox/ai-worker-cli 1.0.1 → 1.0.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -1,7 +1,7 @@
1
1
  #!/usr/bin/env node
2
2
 
3
3
  // src/index.ts
4
- import { Command as Command2 } from "commander";
4
+ import { Command as Command3 } from "commander";
5
5
 
6
6
  // src/commands/push.ts
7
7
  import { Command } from "commander";
@@ -132,6 +132,50 @@ async function collectEnvUsageForWorkers(workerEntryFiles, projectRoot) {
132
132
  buildtimeKeys.delete("node");
133
133
  return { runtimeKeys, buildtimeKeys };
134
134
  }
135
+ async function collectCalleeWorkerIds(workers, projectRoot) {
136
+ void projectRoot;
137
+ const calleeIdsByWorker = /* @__PURE__ */ new Map();
138
+ const workerIds = new Set(workers.map((w) => w.id));
139
+ for (const worker of workers) {
140
+ const calleeIds = /* @__PURE__ */ new Set();
141
+ const visited = /* @__PURE__ */ new Set();
142
+ const queue = [worker.filePath];
143
+ while (queue.length > 0) {
144
+ const file = queue.pop();
145
+ const normalized = path.resolve(file);
146
+ if (visited.has(normalized)) continue;
147
+ visited.add(normalized);
148
+ if (!fs.existsSync(normalized) || !fs.statSync(normalized).isFile()) continue;
149
+ const src = fs.readFileSync(normalized, "utf-8");
150
+ const re = /(?:ctx\.)?dispatchWorker\s*\(\s*['"]([^'"]+)['"]/g;
151
+ for (const match of src.matchAll(re)) {
152
+ if (match[1]) calleeIds.add(match[1]);
153
+ }
154
+ const specifiers = extractImportSpecifiers(src);
155
+ for (const spec of specifiers) {
156
+ if (!spec || !spec.startsWith(".")) continue;
157
+ const resolved = tryResolveLocalImport(normalized, spec);
158
+ if (resolved) queue.push(resolved);
159
+ }
160
+ }
161
+ if (calleeIds.size > 0) {
162
+ for (const calleeId of calleeIds) {
163
+ if (!workerIds.has(calleeId)) {
164
+ console.warn(
165
+ chalk.yellow(
166
+ `\u26A0\uFE0F Worker "${worker.id}" calls "${calleeId}" which is not in scanned workers (typo or other service?). Queue URL will not be auto-injected.`
167
+ )
168
+ );
169
+ }
170
+ }
171
+ calleeIdsByWorker.set(worker.id, calleeIds);
172
+ }
173
+ }
174
+ return calleeIdsByWorker;
175
+ }
176
+ function sanitizeWorkerIdForEnv(workerId) {
177
+ return workerId.replace(/-/g, "_").toUpperCase();
178
+ }
135
179
  function readJsonFile(filePath) {
136
180
  try {
137
181
  return JSON.parse(fs.readFileSync(filePath, "utf-8"));
@@ -184,6 +228,20 @@ async function collectRuntimeDependenciesForWorkers(workerEntryFiles, projectRoo
184
228
  deps.delete("@microfox/ai-worker");
185
229
  return deps;
186
230
  }
231
+ function getJobStoreType() {
232
+ const raw = process.env.WORKER_DATABASE_TYPE?.toLowerCase();
233
+ if (raw === "mongodb" || raw === "upstash-redis") return raw;
234
+ return "upstash-redis";
235
+ }
236
+ function filterDepsForJobStore(runtimeDeps, jobStoreType) {
237
+ const filtered = new Set(runtimeDeps);
238
+ filtered.delete("mongodb");
239
+ filtered.delete("@upstash/redis");
240
+ if (jobStoreType === "mongodb") filtered.add("mongodb");
241
+ else filtered.add("@upstash/redis");
242
+ if (runtimeDeps.has("mongodb")) filtered.add("mongodb");
243
+ return filtered;
244
+ }
187
245
  function buildDependenciesMap(projectRoot, deps) {
188
246
  const projectPkg = readJsonFile(path.join(projectRoot, "package.json")) || {};
189
247
  const projectDeps = projectPkg.dependencies || {};
@@ -253,8 +311,116 @@ async function scanWorkers(aiPath = "app/ai") {
253
311
  }
254
312
  return workers;
255
313
  }
256
- async function generateHandlers(workers, outputDir) {
314
+ async function scanQueues(aiPath = "app/ai") {
315
+ const base = aiPath.replace(/\\/g, "/");
316
+ const pattern = `${base}/queues/**/*.queue.ts`;
317
+ const files = await glob(pattern);
318
+ const queues = [];
319
+ for (const filePath of files) {
320
+ try {
321
+ const content = fs.readFileSync(filePath, "utf-8");
322
+ const idMatch = content.match(/defineWorkerQueue\s*\(\s*\{[\s\S]*?id:\s*['"]([^'"]+)['"]/);
323
+ if (!idMatch) {
324
+ console.warn(chalk.yellow(`\u26A0\uFE0F Skipping ${filePath}: No queue id found in defineWorkerQueue`));
325
+ continue;
326
+ }
327
+ const queueId = idMatch[1];
328
+ const steps = [];
329
+ const stepsMatch = content.match(/steps:\s*\[([\s\S]*?)\]/);
330
+ if (stepsMatch) {
331
+ const stepsStr = stepsMatch[1];
332
+ const stepRegex = /\{\s*workerId:\s*['"]([^'"]+)['"](?:,\s*delaySeconds:\s*(\d+))?(?:,\s*mapInputFromPrev:\s*['"]([^'"]+)['"])?\s*\}/g;
333
+ let m;
334
+ while ((m = stepRegex.exec(stepsStr)) !== null) {
335
+ steps.push({
336
+ workerId: m[1],
337
+ delaySeconds: m[2] ? parseInt(m[2], 10) : void 0,
338
+ mapInputFromPrev: m[3]
339
+ });
340
+ }
341
+ }
342
+ let schedule;
343
+ const scheduleStrMatch = content.match(/schedule:\s*['"]([^'"]+)['"]/);
344
+ const scheduleObjMatch = content.match(/schedule:\s*(\{[^}]+(?:\{[^}]*\}[^}]*)*\})/);
345
+ if (scheduleStrMatch) {
346
+ schedule = scheduleStrMatch[1];
347
+ } else if (scheduleObjMatch) {
348
+ try {
349
+ schedule = new Function("return " + scheduleObjMatch[1])();
350
+ } catch {
351
+ schedule = void 0;
352
+ }
353
+ }
354
+ queues.push({ id: queueId, filePath, steps, schedule });
355
+ } catch (error) {
356
+ console.error(chalk.red(`\u274C Error processing ${filePath}:`), error);
357
+ }
358
+ }
359
+ return queues;
360
+ }
361
+ function generateQueueRegistry(queues, outputDir, projectRoot) {
362
+ const generatedDir = path.join(outputDir, "generated");
363
+ if (!fs.existsSync(generatedDir)) {
364
+ fs.mkdirSync(generatedDir, { recursive: true });
365
+ }
366
+ const registryContent = `/**
367
+ * Auto-generated queue registry. DO NOT EDIT.
368
+ * Generated by @microfox/ai-worker-cli from .queue.ts files.
369
+ */
370
+
371
+ const QUEUES = ${JSON.stringify(queues.map((q) => ({ id: q.id, steps: q.steps, schedule: q.schedule })), null, 2)};
372
+
373
+ export function getQueueById(queueId) {
374
+ return QUEUES.find((q) => q.id === queueId);
375
+ }
376
+
377
+ export function getNextStep(queueId, stepIndex) {
378
+ const queue = getQueueById(queueId);
379
+ if (!queue || !queue.steps || stepIndex < 0 || stepIndex >= queue.steps.length - 1) {
380
+ return undefined;
381
+ }
382
+ const step = queue.steps[stepIndex + 1];
383
+ return step ? { workerId: step.workerId, delaySeconds: step.delaySeconds, mapInputFromPrev: step.mapInputFromPrev } : undefined;
384
+ }
385
+
386
+ export function invokeMapInput(_queueId, _stepIndex, prevOutput, _initialInput) {
387
+ return prevOutput;
388
+ }
389
+ `;
390
+ const registryPath = path.join(generatedDir, "workerQueues.registry.js");
391
+ fs.writeFileSync(registryPath, registryContent);
392
+ console.log(chalk.green(`\u2713 Generated queue registry: ${registryPath}`));
393
+ }
394
+ function getWorkersInQueues(queues) {
395
+ const set = /* @__PURE__ */ new Set();
396
+ for (const q of queues) {
397
+ for (const step of q.steps) {
398
+ set.add(step.workerId);
399
+ }
400
+ }
401
+ return set;
402
+ }
403
+ function mergeQueueCallees(calleeIds, queues, workers) {
404
+ const merged = new Map(calleeIds);
405
+ const workerIds = new Set(workers.map((w) => w.id));
406
+ for (const queue of queues) {
407
+ for (let i = 0; i < queue.steps.length - 1; i++) {
408
+ const fromWorkerId = queue.steps[i].workerId;
409
+ const toWorkerId = queue.steps[i + 1].workerId;
410
+ if (!workerIds.has(toWorkerId)) continue;
411
+ let callees = merged.get(fromWorkerId);
412
+ if (!callees) {
413
+ callees = /* @__PURE__ */ new Set();
414
+ merged.set(fromWorkerId, callees);
415
+ }
416
+ callees.add(toWorkerId);
417
+ }
418
+ }
419
+ return merged;
420
+ }
421
+ async function generateHandlers(workers, outputDir, queues = []) {
257
422
  const handlersDir = path.join(outputDir, "handlers");
423
+ const workersInQueues = getWorkersInQueues(queues);
258
424
  if (fs.existsSync(handlersDir)) {
259
425
  fs.rmSync(handlersDir, { recursive: true, force: true });
260
426
  }
@@ -279,18 +445,76 @@ async function generateHandlers(workers, outputDir) {
279
445
  const exportName = exportMatch ? exportMatch[2] : "worker";
280
446
  const tempEntryFile = handlerFile.replace(".js", ".temp.ts");
281
447
  const workerRef = defaultExport ? "workerModule.default" : `workerModule.${exportName}`;
282
- const tempEntryContent = `
448
+ const inQueue = workersInQueues.has(worker.id);
449
+ const registryRelPath = path.relative(path.dirname(path.resolve(handlerFile)), path.join(outputDir, "generated", "workerQueues.registry")).split(path.sep).join("/");
450
+ const registryImportPath = registryRelPath.startsWith(".") ? registryRelPath : "./" + registryRelPath;
451
+ const handlerCreation = inQueue ? `
452
+ import { createLambdaHandler, wrapHandlerForQueue } from '@microfox/ai-worker/handler';
453
+ import * as queueRegistry from '${registryImportPath}';
454
+ import * as workerModule from '${relativeImportPath}';
455
+
456
+ const WORKER_LOG_PREFIX = '[WorkerEntrypoint]';
457
+
458
+ const workerAgent = ${workerRef};
459
+ if (!workerAgent || typeof workerAgent.handler !== 'function') {
460
+ throw new Error('Worker module must export a createWorker result (default or named) with .handler');
461
+ }
462
+
463
+ const queueRuntime = {
464
+ getNextStep: queueRegistry.getNextStep,
465
+ invokeMapInput: queueRegistry.invokeMapInput,
466
+ };
467
+ const wrappedHandler = wrapHandlerForQueue(workerAgent.handler, queueRuntime);
468
+
469
+ const baseHandler = createLambdaHandler(wrappedHandler, workerAgent.outputSchema);
470
+
471
+ export const handler = async (event: any, context: any) => {
472
+ const records = Array.isArray((event as any)?.Records) ? (event as any).Records.length : 0;
473
+ try {
474
+ console.log(WORKER_LOG_PREFIX, {
475
+ workerId: workerAgent.id,
476
+ inQueue: true,
477
+ records,
478
+ requestId: (context as any)?.awsRequestId,
479
+ });
480
+ } catch {
481
+ // Best-effort logging only
482
+ }
483
+ return baseHandler(event, context);
484
+ };
485
+
486
+ export const exportedWorkerConfig = workerModule.workerConfig || workerAgent?.workerConfig;
487
+ ` : `
283
488
  import { createLambdaHandler } from '@microfox/ai-worker/handler';
284
489
  import * as workerModule from '${relativeImportPath}';
285
490
 
491
+ const WORKER_LOG_PREFIX = '[WorkerEntrypoint]';
492
+
286
493
  const workerAgent = ${workerRef};
287
494
  if (!workerAgent || typeof workerAgent.handler !== 'function') {
288
495
  throw new Error('Worker module must export a createWorker result (default or named) with .handler');
289
496
  }
290
497
 
291
- export const handler = createLambdaHandler(workerAgent.handler, workerAgent.outputSchema);
498
+ const baseHandler = createLambdaHandler(workerAgent.handler, workerAgent.outputSchema);
499
+
500
+ export const handler = async (event: any, context: any) => {
501
+ const records = Array.isArray((event as any)?.Records) ? (event as any).Records.length : 0;
502
+ try {
503
+ console.log(WORKER_LOG_PREFIX, {
504
+ workerId: workerAgent.id,
505
+ inQueue: false,
506
+ records,
507
+ requestId: (context as any)?.awsRequestId,
508
+ });
509
+ } catch {
510
+ // Best-effort logging only
511
+ }
512
+ return baseHandler(event, context);
513
+ };
514
+
292
515
  export const exportedWorkerConfig = workerModule.workerConfig || workerAgent?.workerConfig;
293
516
  `;
517
+ const tempEntryContent = handlerCreation;
294
518
  fs.writeFileSync(tempEntryFile, tempEntryContent);
295
519
  try {
296
520
  const fixLazyCachePlugin = {
@@ -702,7 +926,76 @@ export const handler = async (event: APIGatewayProxyEvent): Promise<APIGatewayPr
702
926
  fs.unlinkSync(tempEntryFile);
703
927
  console.log(chalk.green(`\u2713 Generated /workers/trigger handler`));
704
928
  }
705
- function generateWorkersConfigHandler(outputDir, workers, serviceName) {
929
+ function generateQueueStarterHandler(outputDir, queue, serviceName) {
930
+ const safeId = queue.id.replace(/[^a-zA-Z0-9]/g, "");
931
+ const handlerFile = path.join(outputDir, "handlers", `queue-starter-${safeId}.js`);
932
+ const tempEntryFile = handlerFile.replace(".js", ".temp.ts");
933
+ const handlerDir = path.dirname(handlerFile);
934
+ if (!fs.existsSync(handlerDir)) {
935
+ fs.mkdirSync(handlerDir, { recursive: true });
936
+ }
937
+ const firstWorkerId = queue.steps[0]?.workerId;
938
+ if (!firstWorkerId) return;
939
+ const handlerContent = `/**
940
+ * Auto-generated queue-starter for queue "${queue.id}"
941
+ * DO NOT EDIT - This file is generated by @microfox/ai-worker-cli
942
+ */
943
+
944
+ import { ScheduledHandler } from 'aws-lambda';
945
+ import { SQSClient, GetQueueUrlCommand, SendMessageCommand } from '@aws-sdk/client-sqs';
946
+
947
+ const QUEUE_ID = ${JSON.stringify(queue.id)};
948
+ const FIRST_WORKER_ID = ${JSON.stringify(firstWorkerId)};
949
+ const SERVICE_NAME = ${JSON.stringify(serviceName)};
950
+
951
+ export const handler: ScheduledHandler = async () => {
952
+ const stage = process.env.ENVIRONMENT || process.env.STAGE || 'prod';
953
+ const region = process.env.AWS_REGION || 'us-east-1';
954
+ const queueName = \`\${SERVICE_NAME}-\${FIRST_WORKER_ID}-\${stage}\`;
955
+
956
+ const sqs = new SQSClient({ region });
957
+ const { QueueUrl } = await sqs.send(new GetQueueUrlCommand({ QueueName: queueName }));
958
+ if (!QueueUrl) {
959
+ throw new Error('Queue URL not found: ' + queueName);
960
+ }
961
+
962
+ const jobId = 'job-' + Date.now() + '-' + Math.random().toString(36).slice(2, 11);
963
+ const initialInput = {};
964
+ const messageBody = {
965
+ workerId: FIRST_WORKER_ID,
966
+ jobId,
967
+ input: {
968
+ ...initialInput,
969
+ __workerQueue: { id: QUEUE_ID, stepIndex: 0, initialInput },
970
+ },
971
+ context: {},
972
+ metadata: { __workerQueue: { id: QUEUE_ID, stepIndex: 0, initialInput } },
973
+ timestamp: new Date().toISOString(),
974
+ };
975
+
976
+ await sqs.send(new SendMessageCommand({
977
+ QueueUrl,
978
+ MessageBody: JSON.stringify(messageBody),
979
+ }));
980
+
981
+ console.log('[queue-starter] Dispatched first worker for queue:', { queueId: QUEUE_ID, jobId, workerId: FIRST_WORKER_ID });
982
+ };
983
+ `;
984
+ fs.writeFileSync(tempEntryFile, handlerContent);
985
+ esbuild.buildSync({
986
+ entryPoints: [tempEntryFile],
987
+ bundle: true,
988
+ platform: "node",
989
+ target: "node20",
990
+ outfile: handlerFile,
991
+ external: ["aws-sdk", "canvas", "@microfox/puppeteer-sls", "@sparticuz/chromium"],
992
+ packages: "bundle",
993
+ logLevel: "error"
994
+ });
995
+ fs.unlinkSync(tempEntryFile);
996
+ console.log(chalk.green(`\u2713 Generated queue-starter for ${queue.id}`));
997
+ }
998
+ function generateWorkersConfigHandler(outputDir, workers, serviceName, queues = []) {
706
999
  const handlerFile = path.join(outputDir, "handlers", "workers-config.js");
707
1000
  const tempEntryFile = handlerFile.replace(".js", ".temp.ts");
708
1001
  const handlerDir = path.dirname(handlerFile);
@@ -718,8 +1011,9 @@ function generateWorkersConfigHandler(outputDir, workers, serviceName) {
718
1011
  import { APIGatewayProxyEvent, APIGatewayProxyResult } from 'aws-lambda';
719
1012
  import { SQSClient, GetQueueUrlCommand } from '@aws-sdk/client-sqs';
720
1013
 
721
- // Worker IDs embedded at build time so this endpoint doesn't depend on any generated files.
1014
+ // Worker IDs and queue definitions embedded at build time.
722
1015
  const WORKER_IDS: string[] = ${JSON.stringify(workers.map((w) => w.id), null, 2)};
1016
+ const QUEUES = ${JSON.stringify(queues.map((q) => ({ id: q.id, steps: q.steps, schedule: q.schedule })), null, 2)};
723
1017
  const SERVICE_NAME = ${JSON.stringify(serviceName)};
724
1018
 
725
1019
  export const handler = async (
@@ -790,6 +1084,7 @@ export const handler = async (
790
1084
  stage,
791
1085
  region,
792
1086
  workers,
1087
+ queues: QUEUES,
793
1088
  ...(debug ? { attemptedQueueNames, errors } : {}),
794
1089
  }),
795
1090
  };
@@ -899,7 +1194,7 @@ function processScheduleEvents(scheduleConfig) {
899
1194
  }
900
1195
  return events;
901
1196
  }
902
- function generateServerlessConfig(workers, stage, region, envVars, serviceName) {
1197
+ function generateServerlessConfig(workers, stage, region, envVars, serviceName, calleeIds = /* @__PURE__ */ new Map(), queues = []) {
903
1198
  const resources = {
904
1199
  Resources: {},
905
1200
  Outputs: {}
@@ -988,6 +1283,21 @@ function generateServerlessConfig(workers, stage, region, envVars, serviceName)
988
1283
  if (worker.workerConfig?.layers?.length) {
989
1284
  functions[functionName].layers = worker.workerConfig.layers;
990
1285
  }
1286
+ const callees = calleeIds.get(worker.id);
1287
+ if (callees && callees.size > 0) {
1288
+ const env = {};
1289
+ for (const calleeId of callees) {
1290
+ const calleeWorker = workers.find((w) => w.id === calleeId);
1291
+ if (calleeWorker) {
1292
+ const queueLogicalId = `WorkerQueue${calleeWorker.id.replace(/[^a-zA-Z0-9]/g, "")}${stage}`;
1293
+ const envKey = `WORKER_QUEUE_URL_${sanitizeWorkerIdForEnv(calleeId)}`;
1294
+ env[envKey] = { Ref: queueLogicalId };
1295
+ }
1296
+ }
1297
+ if (Object.keys(env).length > 0) {
1298
+ functions[functionName].environment = env;
1299
+ }
1300
+ }
991
1301
  }
992
1302
  functions["getDocs"] = {
993
1303
  handler: "handlers/docs.handler",
@@ -1025,8 +1335,21 @@ function generateServerlessConfig(workers, stage, region, envVars, serviceName)
1025
1335
  }
1026
1336
  ]
1027
1337
  };
1338
+ for (const queue of queues) {
1339
+ if (queue.schedule) {
1340
+ const safeId = queue.id.replace(/[^a-zA-Z0-9]/g, "");
1341
+ const fnName = `queueStarter${safeId}`;
1342
+ const scheduleEvents = processScheduleEvents(queue.schedule);
1343
+ functions[fnName] = {
1344
+ handler: `handlers/queue-starter-${safeId}.handler`,
1345
+ timeout: 60,
1346
+ memorySize: 128,
1347
+ events: scheduleEvents
1348
+ };
1349
+ }
1350
+ }
1028
1351
  const safeEnvVars = {};
1029
- const allowedPrefixes = ["OPENAI_", "ANTHROPIC_", "DATABASE_", "MONGODB_", "REDIS_", "WORKERS_", "REMOTION_"];
1352
+ const allowedPrefixes = ["OPENAI_", "ANTHROPIC_", "DATABASE_", "MONGODB_", "REDIS_", "UPSTASH_", "WORKER_", "WORKERS_", "WORKFLOW_", "REMOTION_", "QUEUE_JOB_", "DEBUG_WORKER_QUEUES"];
1030
1353
  for (const [key, value] of Object.entries(envVars)) {
1031
1354
  if (allowedPrefixes.some((prefix) => key.startsWith(prefix))) {
1032
1355
  safeEnvVars[key] = value;
@@ -1051,11 +1374,14 @@ function generateServerlessConfig(workers, stage, region, envVars, serviceName)
1051
1374
  service: serviceName,
1052
1375
  package: {
1053
1376
  excludeDevDependencies: true,
1377
+ individually: true,
1378
+ // Handlers are fully bundled by esbuild (packages: 'bundle'); exclude node_modules to stay under Lambda 250 MB limit
1054
1379
  patterns: [
1055
1380
  "!venv/**",
1056
1381
  "!.idea/**",
1057
1382
  "!.vscode/**",
1058
1383
  "!src/**",
1384
+ "!node_modules/**",
1059
1385
  "!node_modules/serverless-offline/**",
1060
1386
  "!node_modules/typescript/**",
1061
1387
  "!node_modules/@types/**",
@@ -1176,7 +1502,9 @@ async function build2(args) {
1176
1502
  workers.map((w) => w.filePath),
1177
1503
  process.cwd()
1178
1504
  );
1179
- const dependencies = buildDependenciesMap(process.cwd(), runtimeDeps);
1505
+ const jobStoreType = getJobStoreType();
1506
+ const filteredDeps = filterDepsForJobStore(runtimeDeps, jobStoreType);
1507
+ const dependencies = buildDependenciesMap(process.cwd(), filteredDeps);
1180
1508
  const packageJson = {
1181
1509
  name: "ai-router-workers",
1182
1510
  version: "1.0.0",
@@ -1235,8 +1563,13 @@ async function build2(args) {
1235
1563
  console.warn(chalk.yellow("\u26A0\uFE0F Failed to parse microfox.json, using default service name"));
1236
1564
  }
1237
1565
  }
1566
+ const queues = await scanQueues(aiPath);
1567
+ if (queues.length > 0) {
1568
+ console.log(chalk.blue(`\u2139\uFE0F Found ${queues.length} queue(s): ${queues.map((q) => q.id).join(", ")}`));
1569
+ generateQueueRegistry(queues, serverlessDir, process.cwd());
1570
+ }
1238
1571
  ora("Generating handlers...").start().succeed("Generated handlers");
1239
- await generateHandlers(workers, serverlessDir);
1572
+ await generateHandlers(workers, serverlessDir, queues);
1240
1573
  const extractSpinner = ora("Extracting worker configs from bundled handlers...").start();
1241
1574
  for (const worker of workers) {
1242
1575
  try {
@@ -1283,17 +1616,24 @@ async function build2(args) {
1283
1616
  }
1284
1617
  }
1285
1618
  extractSpinner.succeed("Extracted configs");
1286
- generateWorkersConfigHandler(serverlessDir, workers, serviceName);
1619
+ generateWorkersConfigHandler(serverlessDir, workers, serviceName, queues);
1287
1620
  generateDocsHandler(serverlessDir, serviceName, stage, region);
1288
1621
  generateTriggerHandler(serverlessDir, serviceName);
1289
- const config = generateServerlessConfig(workers, stage, region, envVars, serviceName);
1622
+ for (const queue of queues) {
1623
+ if (queue.schedule) {
1624
+ generateQueueStarterHandler(serverlessDir, queue, serviceName);
1625
+ }
1626
+ }
1627
+ let calleeIds = await collectCalleeWorkerIds(workers, process.cwd());
1628
+ calleeIds = mergeQueueCallees(calleeIds, queues, workers);
1629
+ const config = generateServerlessConfig(workers, stage, region, envVars, serviceName, calleeIds, queues);
1290
1630
  const envStage = fs.existsSync(microfoxJsonPath) ? "prod" : stage;
1291
1631
  const safeEnvVars = {
1292
1632
  ENVIRONMENT: envStage,
1293
1633
  STAGE: envStage,
1294
1634
  NODE_ENV: envStage
1295
1635
  };
1296
- const allowedPrefixes = ["OPENAI_", "ANTHROPIC_", "DATABASE_", "MONGODB_", "REDIS_", "WORKERS_", "REMOTION_"];
1636
+ const allowedPrefixes = ["OPENAI_", "ANTHROPIC_", "DATABASE_", "MONGODB_", "REDIS_", "UPSTASH_", "WORKER_", "WORKERS_", "WORKFLOW_", "REMOTION_", "QUEUE_JOB_", "DEBUG_WORKER_QUEUES"];
1297
1637
  for (const [key, value] of Object.entries(envVars)) {
1298
1638
  if (key.startsWith("AWS_")) continue;
1299
1639
  if (allowedPrefixes.some((prefix) => key.startsWith(prefix)) || referencedEnvKeys.has(key)) {
@@ -1312,8 +1652,8 @@ async function build2(args) {
1312
1652
  async function deploy(args) {
1313
1653
  const stage = args.stage || process.env.STAGE || "prod";
1314
1654
  const region = args.region || process.env.AWS_REGION || "us-east-1";
1315
- const skipDeploy = args["skip-deploy"] || false;
1316
- const skipInstall = args["skip-install"] || false;
1655
+ const skipDeploy = args.skipDeploy ?? args["skip-deploy"] ?? false;
1656
+ const skipInstall = args.skipInstall ?? args["skip-install"] ?? false;
1317
1657
  if (skipDeploy) {
1318
1658
  console.log(chalk.yellow("\u23ED\uFE0F Skipping deployment (--skip-deploy flag)"));
1319
1659
  return;
@@ -1367,10 +1707,86 @@ var pushCommand = new Command().name("push").description("Build and deploy backg
1367
1707
  await deploy(options);
1368
1708
  });
1369
1709
 
1710
+ // src/commands/new.ts
1711
+ import { Command as Command2 } from "commander";
1712
+ import * as fs2 from "fs";
1713
+ import * as path2 from "path";
1714
+ import chalk2 from "chalk";
1715
+ import ora2 from "ora";
1716
+ var newCommand = new Command2().name("new").description("Scaffold a new background worker file").argument("<id>", "Worker ID (used as the worker id and filename)").option("--dir <path>", "Directory for the worker file", "app/ai/workers").option("--schedule <expression>", 'Optional schedule expression (e.g. "cron(0 3 * * ? *)" or "rate(1 hour)")').option("--timeout <seconds>", "Lambda timeout in seconds", "300").option("--memory <mb>", "Lambda memory size in MB", "512").action((id, options) => {
1717
+ const spinner = ora2("Scaffolding worker...").start();
1718
+ try {
1719
+ const projectRoot = process.cwd();
1720
+ const dir = path2.resolve(projectRoot, options.dir || "app/ai/workers");
1721
+ if (!fs2.existsSync(dir)) {
1722
+ fs2.mkdirSync(dir, { recursive: true });
1723
+ }
1724
+ const fileSafeId = id.trim().replace(/[^a-zA-Z0-9_-]+/g, "-");
1725
+ const filePath = path2.join(dir, `${fileSafeId}.worker.ts`);
1726
+ if (fs2.existsSync(filePath)) {
1727
+ spinner.fail(`File already exists: ${path2.relative(projectRoot, filePath)}`);
1728
+ process.exitCode = 1;
1729
+ return;
1730
+ }
1731
+ const timeout = Number(options.timeout || "300") || 300;
1732
+ const memorySize = Number(options.memory || "512") || 512;
1733
+ const scheduleLine = options.schedule ? ` schedule: '${options.schedule}',
1734
+ ` : "";
1735
+ const contents = `import { createWorker, type WorkerConfig } from '@microfox/ai-worker';
1736
+ import { z } from 'zod';
1737
+ import type { WorkerHandlerParams } from '@microfox/ai-worker/handler';
1738
+
1739
+ const InputSchema = z.object({
1740
+ // TODO: define input fields
1741
+ });
1742
+
1743
+ const OutputSchema = z.object({
1744
+ // TODO: define output fields
1745
+ });
1746
+
1747
+ type Input = z.infer<typeof InputSchema>;
1748
+ type Output = z.infer<typeof OutputSchema>;
1749
+
1750
+ export const workerConfig: WorkerConfig = {
1751
+ timeout: ${timeout},
1752
+ memorySize: ${memorySize},
1753
+ ${scheduleLine}};
1754
+
1755
+ export default createWorker<typeof InputSchema, Output>({
1756
+ id: '${id}',
1757
+ inputSchema: InputSchema,
1758
+ outputSchema: OutputSchema,
1759
+ async handler({ input, ctx }: WorkerHandlerParams<Input, Output>) {
1760
+ const { jobId, workerId, jobStore, dispatchWorker } = ctx;
1761
+ console.log('[${id}] start', { jobId, workerId });
1762
+
1763
+ await jobStore?.update({ status: 'running' });
1764
+
1765
+ // TODO: implement your business logic here
1766
+ const result: Output = {} as any;
1767
+
1768
+ await jobStore?.update({ status: 'completed', output: result });
1769
+ return result;
1770
+ },
1771
+ });
1772
+ `;
1773
+ fs2.writeFileSync(filePath, contents, "utf-8");
1774
+ spinner.succeed(
1775
+ `Created worker: ${chalk2.cyan(path2.relative(projectRoot, filePath))}
1776
+ Next: run ${chalk2.yellow("npx @microfox/ai-worker-cli@latest push")} to build & deploy your workers.`
1777
+ );
1778
+ } catch (error) {
1779
+ spinner.fail("Failed to scaffold worker");
1780
+ console.error(chalk2.red(error?.stack || error?.message || String(error)));
1781
+ process.exitCode = 1;
1782
+ }
1783
+ });
1784
+
1370
1785
  // src/index.ts
1371
- var program = new Command2();
1372
- program.name("ai-worker").description("CLI tooling for deploying ai-router background workers").version("0.1.0");
1786
+ var program = new Command3();
1787
+ program.name("ai-worker").description("CLI tooling for deploying ai-router background workers").version("1.0.0");
1373
1788
  program.addCommand(pushCommand);
1789
+ program.addCommand(newCommand);
1374
1790
  program.parse(process.argv);
1375
1791
  var aiWorkerCli = program;
1376
1792
  export {