@microfox/ai-worker-cli 1.0.1 → 1.0.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs CHANGED
@@ -33,7 +33,7 @@ __export(index_exports, {
33
33
  aiWorkerCli: () => aiWorkerCli
34
34
  });
35
35
  module.exports = __toCommonJS(index_exports);
36
- var import_commander2 = require("commander");
36
+ var import_commander3 = require("commander");
37
37
 
38
38
  // src/commands/push.ts
39
39
  var import_commander = require("commander");
@@ -164,6 +164,50 @@ async function collectEnvUsageForWorkers(workerEntryFiles, projectRoot) {
164
164
  buildtimeKeys.delete("node");
165
165
  return { runtimeKeys, buildtimeKeys };
166
166
  }
167
+ async function collectCalleeWorkerIds(workers, projectRoot) {
168
+ void projectRoot;
169
+ const calleeIdsByWorker = /* @__PURE__ */ new Map();
170
+ const workerIds = new Set(workers.map((w) => w.id));
171
+ for (const worker of workers) {
172
+ const calleeIds = /* @__PURE__ */ new Set();
173
+ const visited = /* @__PURE__ */ new Set();
174
+ const queue = [worker.filePath];
175
+ while (queue.length > 0) {
176
+ const file = queue.pop();
177
+ const normalized = path.resolve(file);
178
+ if (visited.has(normalized)) continue;
179
+ visited.add(normalized);
180
+ if (!fs.existsSync(normalized) || !fs.statSync(normalized).isFile()) continue;
181
+ const src = fs.readFileSync(normalized, "utf-8");
182
+ const re = /(?:ctx\.)?dispatchWorker\s*\(\s*['"]([^'"]+)['"]/g;
183
+ for (const match of src.matchAll(re)) {
184
+ if (match[1]) calleeIds.add(match[1]);
185
+ }
186
+ const specifiers = extractImportSpecifiers(src);
187
+ for (const spec of specifiers) {
188
+ if (!spec || !spec.startsWith(".")) continue;
189
+ const resolved = tryResolveLocalImport(normalized, spec);
190
+ if (resolved) queue.push(resolved);
191
+ }
192
+ }
193
+ if (calleeIds.size > 0) {
194
+ for (const calleeId of calleeIds) {
195
+ if (!workerIds.has(calleeId)) {
196
+ console.warn(
197
+ import_chalk.default.yellow(
198
+ `\u26A0\uFE0F Worker "${worker.id}" calls "${calleeId}" which is not in scanned workers (typo or other service?). Queue URL will not be auto-injected.`
199
+ )
200
+ );
201
+ }
202
+ }
203
+ calleeIdsByWorker.set(worker.id, calleeIds);
204
+ }
205
+ }
206
+ return calleeIdsByWorker;
207
+ }
208
+ function sanitizeWorkerIdForEnv(workerId) {
209
+ return workerId.replace(/-/g, "_").toUpperCase();
210
+ }
167
211
  function readJsonFile(filePath) {
168
212
  try {
169
213
  return JSON.parse(fs.readFileSync(filePath, "utf-8"));
@@ -216,6 +260,20 @@ async function collectRuntimeDependenciesForWorkers(workerEntryFiles, projectRoo
216
260
  deps.delete("@microfox/ai-worker");
217
261
  return deps;
218
262
  }
263
+ function getJobStoreType() {
264
+ const raw = process.env.WORKER_DATABASE_TYPE?.toLowerCase();
265
+ if (raw === "mongodb" || raw === "upstash-redis") return raw;
266
+ return "upstash-redis";
267
+ }
268
+ function filterDepsForJobStore(runtimeDeps, jobStoreType) {
269
+ const filtered = new Set(runtimeDeps);
270
+ filtered.delete("mongodb");
271
+ filtered.delete("@upstash/redis");
272
+ if (jobStoreType === "mongodb") filtered.add("mongodb");
273
+ else filtered.add("@upstash/redis");
274
+ if (runtimeDeps.has("mongodb")) filtered.add("mongodb");
275
+ return filtered;
276
+ }
219
277
  function buildDependenciesMap(projectRoot, deps) {
220
278
  const projectPkg = readJsonFile(path.join(projectRoot, "package.json")) || {};
221
279
  const projectDeps = projectPkg.dependencies || {};
@@ -285,8 +343,116 @@ async function scanWorkers(aiPath = "app/ai") {
285
343
  }
286
344
  return workers;
287
345
  }
288
- async function generateHandlers(workers, outputDir) {
346
+ async function scanQueues(aiPath = "app/ai") {
347
+ const base = aiPath.replace(/\\/g, "/");
348
+ const pattern = `${base}/queues/**/*.queue.ts`;
349
+ const files = await (0, import_glob.glob)(pattern);
350
+ const queues = [];
351
+ for (const filePath of files) {
352
+ try {
353
+ const content = fs.readFileSync(filePath, "utf-8");
354
+ const idMatch = content.match(/defineWorkerQueue\s*\(\s*\{[\s\S]*?id:\s*['"]([^'"]+)['"]/);
355
+ if (!idMatch) {
356
+ console.warn(import_chalk.default.yellow(`\u26A0\uFE0F Skipping ${filePath}: No queue id found in defineWorkerQueue`));
357
+ continue;
358
+ }
359
+ const queueId = idMatch[1];
360
+ const steps = [];
361
+ const stepsMatch = content.match(/steps:\s*\[([\s\S]*?)\]/);
362
+ if (stepsMatch) {
363
+ const stepsStr = stepsMatch[1];
364
+ const stepRegex = /\{\s*workerId:\s*['"]([^'"]+)['"](?:,\s*delaySeconds:\s*(\d+))?(?:,\s*mapInputFromPrev:\s*['"]([^'"]+)['"])?\s*\}/g;
365
+ let m;
366
+ while ((m = stepRegex.exec(stepsStr)) !== null) {
367
+ steps.push({
368
+ workerId: m[1],
369
+ delaySeconds: m[2] ? parseInt(m[2], 10) : void 0,
370
+ mapInputFromPrev: m[3]
371
+ });
372
+ }
373
+ }
374
+ let schedule;
375
+ const scheduleStrMatch = content.match(/schedule:\s*['"]([^'"]+)['"]/);
376
+ const scheduleObjMatch = content.match(/schedule:\s*(\{[^}]+(?:\{[^}]*\}[^}]*)*\})/);
377
+ if (scheduleStrMatch) {
378
+ schedule = scheduleStrMatch[1];
379
+ } else if (scheduleObjMatch) {
380
+ try {
381
+ schedule = new Function("return " + scheduleObjMatch[1])();
382
+ } catch {
383
+ schedule = void 0;
384
+ }
385
+ }
386
+ queues.push({ id: queueId, filePath, steps, schedule });
387
+ } catch (error) {
388
+ console.error(import_chalk.default.red(`\u274C Error processing ${filePath}:`), error);
389
+ }
390
+ }
391
+ return queues;
392
+ }
393
+ function generateQueueRegistry(queues, outputDir, projectRoot) {
394
+ const generatedDir = path.join(outputDir, "generated");
395
+ if (!fs.existsSync(generatedDir)) {
396
+ fs.mkdirSync(generatedDir, { recursive: true });
397
+ }
398
+ const registryContent = `/**
399
+ * Auto-generated queue registry. DO NOT EDIT.
400
+ * Generated by @microfox/ai-worker-cli from .queue.ts files.
401
+ */
402
+
403
+ const QUEUES = ${JSON.stringify(queues.map((q) => ({ id: q.id, steps: q.steps, schedule: q.schedule })), null, 2)};
404
+
405
+ export function getQueueById(queueId) {
406
+ return QUEUES.find((q) => q.id === queueId);
407
+ }
408
+
409
+ export function getNextStep(queueId, stepIndex) {
410
+ const queue = getQueueById(queueId);
411
+ if (!queue || !queue.steps || stepIndex < 0 || stepIndex >= queue.steps.length - 1) {
412
+ return undefined;
413
+ }
414
+ const step = queue.steps[stepIndex + 1];
415
+ return step ? { workerId: step.workerId, delaySeconds: step.delaySeconds, mapInputFromPrev: step.mapInputFromPrev } : undefined;
416
+ }
417
+
418
+ export function invokeMapInput(_queueId, _stepIndex, prevOutput, _initialInput) {
419
+ return prevOutput;
420
+ }
421
+ `;
422
+ const registryPath = path.join(generatedDir, "workerQueues.registry.js");
423
+ fs.writeFileSync(registryPath, registryContent);
424
+ console.log(import_chalk.default.green(`\u2713 Generated queue registry: ${registryPath}`));
425
+ }
426
+ function getWorkersInQueues(queues) {
427
+ const set = /* @__PURE__ */ new Set();
428
+ for (const q of queues) {
429
+ for (const step of q.steps) {
430
+ set.add(step.workerId);
431
+ }
432
+ }
433
+ return set;
434
+ }
435
+ function mergeQueueCallees(calleeIds, queues, workers) {
436
+ const merged = new Map(calleeIds);
437
+ const workerIds = new Set(workers.map((w) => w.id));
438
+ for (const queue of queues) {
439
+ for (let i = 0; i < queue.steps.length - 1; i++) {
440
+ const fromWorkerId = queue.steps[i].workerId;
441
+ const toWorkerId = queue.steps[i + 1].workerId;
442
+ if (!workerIds.has(toWorkerId)) continue;
443
+ let callees = merged.get(fromWorkerId);
444
+ if (!callees) {
445
+ callees = /* @__PURE__ */ new Set();
446
+ merged.set(fromWorkerId, callees);
447
+ }
448
+ callees.add(toWorkerId);
449
+ }
450
+ }
451
+ return merged;
452
+ }
453
+ async function generateHandlers(workers, outputDir, queues = []) {
289
454
  const handlersDir = path.join(outputDir, "handlers");
455
+ const workersInQueues = getWorkersInQueues(queues);
290
456
  if (fs.existsSync(handlersDir)) {
291
457
  fs.rmSync(handlersDir, { recursive: true, force: true });
292
458
  }
@@ -311,18 +477,76 @@ async function generateHandlers(workers, outputDir) {
311
477
  const exportName = exportMatch ? exportMatch[2] : "worker";
312
478
  const tempEntryFile = handlerFile.replace(".js", ".temp.ts");
313
479
  const workerRef = defaultExport ? "workerModule.default" : `workerModule.${exportName}`;
314
- const tempEntryContent = `
480
+ const inQueue = workersInQueues.has(worker.id);
481
+ const registryRelPath = path.relative(path.dirname(path.resolve(handlerFile)), path.join(outputDir, "generated", "workerQueues.registry")).split(path.sep).join("/");
482
+ const registryImportPath = registryRelPath.startsWith(".") ? registryRelPath : "./" + registryRelPath;
483
+ const handlerCreation = inQueue ? `
484
+ import { createLambdaHandler, wrapHandlerForQueue } from '@microfox/ai-worker/handler';
485
+ import * as queueRegistry from '${registryImportPath}';
486
+ import * as workerModule from '${relativeImportPath}';
487
+
488
+ const WORKER_LOG_PREFIX = '[WorkerEntrypoint]';
489
+
490
+ const workerAgent = ${workerRef};
491
+ if (!workerAgent || typeof workerAgent.handler !== 'function') {
492
+ throw new Error('Worker module must export a createWorker result (default or named) with .handler');
493
+ }
494
+
495
+ const queueRuntime = {
496
+ getNextStep: queueRegistry.getNextStep,
497
+ invokeMapInput: queueRegistry.invokeMapInput,
498
+ };
499
+ const wrappedHandler = wrapHandlerForQueue(workerAgent.handler, queueRuntime);
500
+
501
+ const baseHandler = createLambdaHandler(wrappedHandler, workerAgent.outputSchema);
502
+
503
+ export const handler = async (event: any, context: any) => {
504
+ const records = Array.isArray((event as any)?.Records) ? (event as any).Records.length : 0;
505
+ try {
506
+ console.log(WORKER_LOG_PREFIX, {
507
+ workerId: workerAgent.id,
508
+ inQueue: true,
509
+ records,
510
+ requestId: (context as any)?.awsRequestId,
511
+ });
512
+ } catch {
513
+ // Best-effort logging only
514
+ }
515
+ return baseHandler(event, context);
516
+ };
517
+
518
+ export const exportedWorkerConfig = workerModule.workerConfig || workerAgent?.workerConfig;
519
+ ` : `
315
520
  import { createLambdaHandler } from '@microfox/ai-worker/handler';
316
521
  import * as workerModule from '${relativeImportPath}';
317
522
 
523
+ const WORKER_LOG_PREFIX = '[WorkerEntrypoint]';
524
+
318
525
  const workerAgent = ${workerRef};
319
526
  if (!workerAgent || typeof workerAgent.handler !== 'function') {
320
527
  throw new Error('Worker module must export a createWorker result (default or named) with .handler');
321
528
  }
322
529
 
323
- export const handler = createLambdaHandler(workerAgent.handler, workerAgent.outputSchema);
530
+ const baseHandler = createLambdaHandler(workerAgent.handler, workerAgent.outputSchema);
531
+
532
+ export const handler = async (event: any, context: any) => {
533
+ const records = Array.isArray((event as any)?.Records) ? (event as any).Records.length : 0;
534
+ try {
535
+ console.log(WORKER_LOG_PREFIX, {
536
+ workerId: workerAgent.id,
537
+ inQueue: false,
538
+ records,
539
+ requestId: (context as any)?.awsRequestId,
540
+ });
541
+ } catch {
542
+ // Best-effort logging only
543
+ }
544
+ return baseHandler(event, context);
545
+ };
546
+
324
547
  export const exportedWorkerConfig = workerModule.workerConfig || workerAgent?.workerConfig;
325
548
  `;
549
+ const tempEntryContent = handlerCreation;
326
550
  fs.writeFileSync(tempEntryFile, tempEntryContent);
327
551
  try {
328
552
  const fixLazyCachePlugin = {
@@ -734,7 +958,76 @@ export const handler = async (event: APIGatewayProxyEvent): Promise<APIGatewayPr
734
958
  fs.unlinkSync(tempEntryFile);
735
959
  console.log(import_chalk.default.green(`\u2713 Generated /workers/trigger handler`));
736
960
  }
737
- function generateWorkersConfigHandler(outputDir, workers, serviceName) {
961
+ function generateQueueStarterHandler(outputDir, queue, serviceName) {
962
+ const safeId = queue.id.replace(/[^a-zA-Z0-9]/g, "");
963
+ const handlerFile = path.join(outputDir, "handlers", `queue-starter-${safeId}.js`);
964
+ const tempEntryFile = handlerFile.replace(".js", ".temp.ts");
965
+ const handlerDir = path.dirname(handlerFile);
966
+ if (!fs.existsSync(handlerDir)) {
967
+ fs.mkdirSync(handlerDir, { recursive: true });
968
+ }
969
+ const firstWorkerId = queue.steps[0]?.workerId;
970
+ if (!firstWorkerId) return;
971
+ const handlerContent = `/**
972
+ * Auto-generated queue-starter for queue "${queue.id}"
973
+ * DO NOT EDIT - This file is generated by @microfox/ai-worker-cli
974
+ */
975
+
976
+ import { ScheduledHandler } from 'aws-lambda';
977
+ import { SQSClient, GetQueueUrlCommand, SendMessageCommand } from '@aws-sdk/client-sqs';
978
+
979
+ const QUEUE_ID = ${JSON.stringify(queue.id)};
980
+ const FIRST_WORKER_ID = ${JSON.stringify(firstWorkerId)};
981
+ const SERVICE_NAME = ${JSON.stringify(serviceName)};
982
+
983
+ export const handler: ScheduledHandler = async () => {
984
+ const stage = process.env.ENVIRONMENT || process.env.STAGE || 'prod';
985
+ const region = process.env.AWS_REGION || 'us-east-1';
986
+ const queueName = \`\${SERVICE_NAME}-\${FIRST_WORKER_ID}-\${stage}\`;
987
+
988
+ const sqs = new SQSClient({ region });
989
+ const { QueueUrl } = await sqs.send(new GetQueueUrlCommand({ QueueName: queueName }));
990
+ if (!QueueUrl) {
991
+ throw new Error('Queue URL not found: ' + queueName);
992
+ }
993
+
994
+ const jobId = 'job-' + Date.now() + '-' + Math.random().toString(36).slice(2, 11);
995
+ const initialInput = {};
996
+ const messageBody = {
997
+ workerId: FIRST_WORKER_ID,
998
+ jobId,
999
+ input: {
1000
+ ...initialInput,
1001
+ __workerQueue: { id: QUEUE_ID, stepIndex: 0, initialInput },
1002
+ },
1003
+ context: {},
1004
+ metadata: { __workerQueue: { id: QUEUE_ID, stepIndex: 0, initialInput } },
1005
+ timestamp: new Date().toISOString(),
1006
+ };
1007
+
1008
+ await sqs.send(new SendMessageCommand({
1009
+ QueueUrl,
1010
+ MessageBody: JSON.stringify(messageBody),
1011
+ }));
1012
+
1013
+ console.log('[queue-starter] Dispatched first worker for queue:', { queueId: QUEUE_ID, jobId, workerId: FIRST_WORKER_ID });
1014
+ };
1015
+ `;
1016
+ fs.writeFileSync(tempEntryFile, handlerContent);
1017
+ esbuild.buildSync({
1018
+ entryPoints: [tempEntryFile],
1019
+ bundle: true,
1020
+ platform: "node",
1021
+ target: "node20",
1022
+ outfile: handlerFile,
1023
+ external: ["aws-sdk", "canvas", "@microfox/puppeteer-sls", "@sparticuz/chromium"],
1024
+ packages: "bundle",
1025
+ logLevel: "error"
1026
+ });
1027
+ fs.unlinkSync(tempEntryFile);
1028
+ console.log(import_chalk.default.green(`\u2713 Generated queue-starter for ${queue.id}`));
1029
+ }
1030
+ function generateWorkersConfigHandler(outputDir, workers, serviceName, queues = []) {
738
1031
  const handlerFile = path.join(outputDir, "handlers", "workers-config.js");
739
1032
  const tempEntryFile = handlerFile.replace(".js", ".temp.ts");
740
1033
  const handlerDir = path.dirname(handlerFile);
@@ -750,8 +1043,9 @@ function generateWorkersConfigHandler(outputDir, workers, serviceName) {
750
1043
  import { APIGatewayProxyEvent, APIGatewayProxyResult } from 'aws-lambda';
751
1044
  import { SQSClient, GetQueueUrlCommand } from '@aws-sdk/client-sqs';
752
1045
 
753
- // Worker IDs embedded at build time so this endpoint doesn't depend on any generated files.
1046
+ // Worker IDs and queue definitions embedded at build time.
754
1047
  const WORKER_IDS: string[] = ${JSON.stringify(workers.map((w) => w.id), null, 2)};
1048
+ const QUEUES = ${JSON.stringify(queues.map((q) => ({ id: q.id, steps: q.steps, schedule: q.schedule })), null, 2)};
755
1049
  const SERVICE_NAME = ${JSON.stringify(serviceName)};
756
1050
 
757
1051
  export const handler = async (
@@ -822,6 +1116,7 @@ export const handler = async (
822
1116
  stage,
823
1117
  region,
824
1118
  workers,
1119
+ queues: QUEUES,
825
1120
  ...(debug ? { attemptedQueueNames, errors } : {}),
826
1121
  }),
827
1122
  };
@@ -931,7 +1226,7 @@ function processScheduleEvents(scheduleConfig) {
931
1226
  }
932
1227
  return events;
933
1228
  }
934
- function generateServerlessConfig(workers, stage, region, envVars, serviceName) {
1229
+ function generateServerlessConfig(workers, stage, region, envVars, serviceName, calleeIds = /* @__PURE__ */ new Map(), queues = []) {
935
1230
  const resources = {
936
1231
  Resources: {},
937
1232
  Outputs: {}
@@ -1020,6 +1315,21 @@ function generateServerlessConfig(workers, stage, region, envVars, serviceName)
1020
1315
  if (worker.workerConfig?.layers?.length) {
1021
1316
  functions[functionName].layers = worker.workerConfig.layers;
1022
1317
  }
1318
+ const callees = calleeIds.get(worker.id);
1319
+ if (callees && callees.size > 0) {
1320
+ const env = {};
1321
+ for (const calleeId of callees) {
1322
+ const calleeWorker = workers.find((w) => w.id === calleeId);
1323
+ if (calleeWorker) {
1324
+ const queueLogicalId = `WorkerQueue${calleeWorker.id.replace(/[^a-zA-Z0-9]/g, "")}${stage}`;
1325
+ const envKey = `WORKER_QUEUE_URL_${sanitizeWorkerIdForEnv(calleeId)}`;
1326
+ env[envKey] = { Ref: queueLogicalId };
1327
+ }
1328
+ }
1329
+ if (Object.keys(env).length > 0) {
1330
+ functions[functionName].environment = env;
1331
+ }
1332
+ }
1023
1333
  }
1024
1334
  functions["getDocs"] = {
1025
1335
  handler: "handlers/docs.handler",
@@ -1057,8 +1367,21 @@ function generateServerlessConfig(workers, stage, region, envVars, serviceName)
1057
1367
  }
1058
1368
  ]
1059
1369
  };
1370
+ for (const queue of queues) {
1371
+ if (queue.schedule) {
1372
+ const safeId = queue.id.replace(/[^a-zA-Z0-9]/g, "");
1373
+ const fnName = `queueStarter${safeId}`;
1374
+ const scheduleEvents = processScheduleEvents(queue.schedule);
1375
+ functions[fnName] = {
1376
+ handler: `handlers/queue-starter-${safeId}.handler`,
1377
+ timeout: 60,
1378
+ memorySize: 128,
1379
+ events: scheduleEvents
1380
+ };
1381
+ }
1382
+ }
1060
1383
  const safeEnvVars = {};
1061
- const allowedPrefixes = ["OPENAI_", "ANTHROPIC_", "DATABASE_", "MONGODB_", "REDIS_", "WORKERS_", "REMOTION_"];
1384
+ const allowedPrefixes = ["OPENAI_", "ANTHROPIC_", "DATABASE_", "MONGODB_", "REDIS_", "UPSTASH_", "WORKER_", "WORKERS_", "WORKFLOW_", "REMOTION_", "QUEUE_JOB_", "DEBUG_WORKER_QUEUES"];
1062
1385
  for (const [key, value] of Object.entries(envVars)) {
1063
1386
  if (allowedPrefixes.some((prefix) => key.startsWith(prefix))) {
1064
1387
  safeEnvVars[key] = value;
@@ -1083,11 +1406,14 @@ function generateServerlessConfig(workers, stage, region, envVars, serviceName)
1083
1406
  service: serviceName,
1084
1407
  package: {
1085
1408
  excludeDevDependencies: true,
1409
+ individually: true,
1410
+ // Handlers are fully bundled by esbuild (packages: 'bundle'); exclude node_modules to stay under Lambda 250 MB limit
1086
1411
  patterns: [
1087
1412
  "!venv/**",
1088
1413
  "!.idea/**",
1089
1414
  "!.vscode/**",
1090
1415
  "!src/**",
1416
+ "!node_modules/**",
1091
1417
  "!node_modules/serverless-offline/**",
1092
1418
  "!node_modules/typescript/**",
1093
1419
  "!node_modules/@types/**",
@@ -1208,7 +1534,9 @@ async function build2(args) {
1208
1534
  workers.map((w) => w.filePath),
1209
1535
  process.cwd()
1210
1536
  );
1211
- const dependencies = buildDependenciesMap(process.cwd(), runtimeDeps);
1537
+ const jobStoreType = getJobStoreType();
1538
+ const filteredDeps = filterDepsForJobStore(runtimeDeps, jobStoreType);
1539
+ const dependencies = buildDependenciesMap(process.cwd(), filteredDeps);
1212
1540
  const packageJson = {
1213
1541
  name: "ai-router-workers",
1214
1542
  version: "1.0.0",
@@ -1267,8 +1595,13 @@ async function build2(args) {
1267
1595
  console.warn(import_chalk.default.yellow("\u26A0\uFE0F Failed to parse microfox.json, using default service name"));
1268
1596
  }
1269
1597
  }
1598
+ const queues = await scanQueues(aiPath);
1599
+ if (queues.length > 0) {
1600
+ console.log(import_chalk.default.blue(`\u2139\uFE0F Found ${queues.length} queue(s): ${queues.map((q) => q.id).join(", ")}`));
1601
+ generateQueueRegistry(queues, serverlessDir, process.cwd());
1602
+ }
1270
1603
  (0, import_ora.default)("Generating handlers...").start().succeed("Generated handlers");
1271
- await generateHandlers(workers, serverlessDir);
1604
+ await generateHandlers(workers, serverlessDir, queues);
1272
1605
  const extractSpinner = (0, import_ora.default)("Extracting worker configs from bundled handlers...").start();
1273
1606
  for (const worker of workers) {
1274
1607
  try {
@@ -1315,17 +1648,24 @@ async function build2(args) {
1315
1648
  }
1316
1649
  }
1317
1650
  extractSpinner.succeed("Extracted configs");
1318
- generateWorkersConfigHandler(serverlessDir, workers, serviceName);
1651
+ generateWorkersConfigHandler(serverlessDir, workers, serviceName, queues);
1319
1652
  generateDocsHandler(serverlessDir, serviceName, stage, region);
1320
1653
  generateTriggerHandler(serverlessDir, serviceName);
1321
- const config = generateServerlessConfig(workers, stage, region, envVars, serviceName);
1654
+ for (const queue of queues) {
1655
+ if (queue.schedule) {
1656
+ generateQueueStarterHandler(serverlessDir, queue, serviceName);
1657
+ }
1658
+ }
1659
+ let calleeIds = await collectCalleeWorkerIds(workers, process.cwd());
1660
+ calleeIds = mergeQueueCallees(calleeIds, queues, workers);
1661
+ const config = generateServerlessConfig(workers, stage, region, envVars, serviceName, calleeIds, queues);
1322
1662
  const envStage = fs.existsSync(microfoxJsonPath) ? "prod" : stage;
1323
1663
  const safeEnvVars = {
1324
1664
  ENVIRONMENT: envStage,
1325
1665
  STAGE: envStage,
1326
1666
  NODE_ENV: envStage
1327
1667
  };
1328
- const allowedPrefixes = ["OPENAI_", "ANTHROPIC_", "DATABASE_", "MONGODB_", "REDIS_", "WORKERS_", "REMOTION_"];
1668
+ const allowedPrefixes = ["OPENAI_", "ANTHROPIC_", "DATABASE_", "MONGODB_", "REDIS_", "UPSTASH_", "WORKER_", "WORKERS_", "WORKFLOW_", "REMOTION_", "QUEUE_JOB_", "DEBUG_WORKER_QUEUES"];
1329
1669
  for (const [key, value] of Object.entries(envVars)) {
1330
1670
  if (key.startsWith("AWS_")) continue;
1331
1671
  if (allowedPrefixes.some((prefix) => key.startsWith(prefix)) || referencedEnvKeys.has(key)) {
@@ -1344,8 +1684,8 @@ async function build2(args) {
1344
1684
  async function deploy(args) {
1345
1685
  const stage = args.stage || process.env.STAGE || "prod";
1346
1686
  const region = args.region || process.env.AWS_REGION || "us-east-1";
1347
- const skipDeploy = args["skip-deploy"] || false;
1348
- const skipInstall = args["skip-install"] || false;
1687
+ const skipDeploy = args.skipDeploy ?? args["skip-deploy"] ?? false;
1688
+ const skipInstall = args.skipInstall ?? args["skip-install"] ?? false;
1349
1689
  if (skipDeploy) {
1350
1690
  console.log(import_chalk.default.yellow("\u23ED\uFE0F Skipping deployment (--skip-deploy flag)"));
1351
1691
  return;
@@ -1399,10 +1739,86 @@ var pushCommand = new import_commander.Command().name("push").description("Build
1399
1739
  await deploy(options);
1400
1740
  });
1401
1741
 
1742
+ // src/commands/new.ts
1743
+ var import_commander2 = require("commander");
1744
+ var fs2 = __toESM(require("fs"), 1);
1745
+ var path2 = __toESM(require("path"), 1);
1746
+ var import_chalk2 = __toESM(require("chalk"), 1);
1747
+ var import_ora2 = __toESM(require("ora"), 1);
1748
+ var newCommand = new import_commander2.Command().name("new").description("Scaffold a new background worker file").argument("<id>", "Worker ID (used as the worker id and filename)").option("--dir <path>", "Directory for the worker file", "app/ai/workers").option("--schedule <expression>", 'Optional schedule expression (e.g. "cron(0 3 * * ? *)" or "rate(1 hour)")').option("--timeout <seconds>", "Lambda timeout in seconds", "300").option("--memory <mb>", "Lambda memory size in MB", "512").action((id, options) => {
1749
+ const spinner = (0, import_ora2.default)("Scaffolding worker...").start();
1750
+ try {
1751
+ const projectRoot = process.cwd();
1752
+ const dir = path2.resolve(projectRoot, options.dir || "app/ai/workers");
1753
+ if (!fs2.existsSync(dir)) {
1754
+ fs2.mkdirSync(dir, { recursive: true });
1755
+ }
1756
+ const fileSafeId = id.trim().replace(/[^a-zA-Z0-9_-]+/g, "-");
1757
+ const filePath = path2.join(dir, `${fileSafeId}.worker.ts`);
1758
+ if (fs2.existsSync(filePath)) {
1759
+ spinner.fail(`File already exists: ${path2.relative(projectRoot, filePath)}`);
1760
+ process.exitCode = 1;
1761
+ return;
1762
+ }
1763
+ const timeout = Number(options.timeout || "300") || 300;
1764
+ const memorySize = Number(options.memory || "512") || 512;
1765
+ const scheduleLine = options.schedule ? ` schedule: '${options.schedule}',
1766
+ ` : "";
1767
+ const contents = `import { createWorker, type WorkerConfig } from '@microfox/ai-worker';
1768
+ import { z } from 'zod';
1769
+ import type { WorkerHandlerParams } from '@microfox/ai-worker/handler';
1770
+
1771
+ const InputSchema = z.object({
1772
+ // TODO: define input fields
1773
+ });
1774
+
1775
+ const OutputSchema = z.object({
1776
+ // TODO: define output fields
1777
+ });
1778
+
1779
+ type Input = z.infer<typeof InputSchema>;
1780
+ type Output = z.infer<typeof OutputSchema>;
1781
+
1782
+ export const workerConfig: WorkerConfig = {
1783
+ timeout: ${timeout},
1784
+ memorySize: ${memorySize},
1785
+ ${scheduleLine}};
1786
+
1787
+ export default createWorker<typeof InputSchema, Output>({
1788
+ id: '${id}',
1789
+ inputSchema: InputSchema,
1790
+ outputSchema: OutputSchema,
1791
+ async handler({ input, ctx }: WorkerHandlerParams<Input, Output>) {
1792
+ const { jobId, workerId, jobStore, dispatchWorker } = ctx;
1793
+ console.log('[${id}] start', { jobId, workerId });
1794
+
1795
+ await jobStore?.update({ status: 'running' });
1796
+
1797
+ // TODO: implement your business logic here
1798
+ const result: Output = {} as any;
1799
+
1800
+ await jobStore?.update({ status: 'completed', output: result });
1801
+ return result;
1802
+ },
1803
+ });
1804
+ `;
1805
+ fs2.writeFileSync(filePath, contents, "utf-8");
1806
+ spinner.succeed(
1807
+ `Created worker: ${import_chalk2.default.cyan(path2.relative(projectRoot, filePath))}
1808
+ Next: run ${import_chalk2.default.yellow("npx @microfox/ai-worker-cli@latest push")} to build & deploy your workers.`
1809
+ );
1810
+ } catch (error) {
1811
+ spinner.fail("Failed to scaffold worker");
1812
+ console.error(import_chalk2.default.red(error?.stack || error?.message || String(error)));
1813
+ process.exitCode = 1;
1814
+ }
1815
+ });
1816
+
1402
1817
  // src/index.ts
1403
- var program = new import_commander2.Command();
1404
- program.name("ai-worker").description("CLI tooling for deploying ai-router background workers").version("0.1.0");
1818
+ var program = new import_commander3.Command();
1819
+ program.name("ai-worker").description("CLI tooling for deploying ai-router background workers").version("1.0.0");
1405
1820
  program.addCommand(pushCommand);
1821
+ program.addCommand(newCommand);
1406
1822
  program.parse(process.argv);
1407
1823
  var aiWorkerCli = program;
1408
1824
  // Annotate the CommonJS export names for ESM import in node: