@microfox/ai-worker-cli 1.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs ADDED
@@ -0,0 +1,1412 @@
1
+ #!/usr/bin/env node
2
+ var __create = Object.create;
3
+ var __defProp = Object.defineProperty;
4
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
5
+ var __getOwnPropNames = Object.getOwnPropertyNames;
6
+ var __getProtoOf = Object.getPrototypeOf;
7
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
8
+ var __export = (target, all) => {
9
+ for (var name in all)
10
+ __defProp(target, name, { get: all[name], enumerable: true });
11
+ };
12
+ var __copyProps = (to, from, except, desc) => {
13
+ if (from && typeof from === "object" || typeof from === "function") {
14
+ for (let key of __getOwnPropNames(from))
15
+ if (!__hasOwnProp.call(to, key) && key !== except)
16
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
17
+ }
18
+ return to;
19
+ };
20
+ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
21
+ // If the importer is in node compatibility mode or this is not an ESM
22
+ // file that has been converted to a CommonJS file using a Babel-
23
+ // compatible transform (i.e. "__esModule" has not been set), then set
24
+ // "default" to the CommonJS "module.exports" for node compatibility.
25
+ isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
26
+ mod
27
+ ));
28
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
29
+
30
+ // src/index.ts
31
+ var index_exports = {};
32
+ __export(index_exports, {
33
+ aiWorkerCli: () => aiWorkerCli
34
+ });
35
+ module.exports = __toCommonJS(index_exports);
36
+ var import_commander2 = require("commander");
37
+
38
+ // src/commands/push.ts
39
+ var import_commander = require("commander");
40
+ var esbuild = __toESM(require("esbuild"), 1);
41
+ var import_child_process = require("child_process");
42
+ var fs = __toESM(require("fs"), 1);
43
+ var path = __toESM(require("path"), 1);
44
+ var import_url = require("url");
45
+ var import_module = require("module");
46
+ var import_glob = require("glob");
47
+ var yaml = __toESM(require("js-yaml"), 1);
48
+ var import_chalk = __toESM(require("chalk"), 1);
49
+ var import_ora = __toESM(require("ora"), 1);
50
+ var NODE_BUILTINS = new Set(
51
+ import_module.builtinModules.map((m) => m.startsWith("node:") ? m.slice("node:".length) : m)
52
+ );
53
+ function isBuiltinModule(specifier) {
54
+ const s = specifier.startsWith("node:") ? specifier.slice("node:".length) : specifier;
55
+ return NODE_BUILTINS.has(s);
56
+ }
57
+ function getPackageNameFromSpecifier(specifier) {
58
+ if (specifier.startsWith("@")) {
59
+ const [scope, name] = specifier.split("/");
60
+ return name ? `${scope}/${name}` : specifier;
61
+ }
62
+ return specifier.split("/")[0];
63
+ }
64
+ function tryResolveLocalImport(fromFile, specifier) {
65
+ const baseDir = path.dirname(fromFile);
66
+ const raw = path.resolve(baseDir, specifier);
67
+ const candidates = [
68
+ raw,
69
+ `${raw}.ts`,
70
+ `${raw}.tsx`,
71
+ `${raw}.js`,
72
+ `${raw}.mjs`,
73
+ `${raw}.cjs`
74
+ ];
75
+ for (const c of candidates) {
76
+ if (fs.existsSync(c) && fs.statSync(c).isFile()) return c;
77
+ }
78
+ if (fs.existsSync(raw) && fs.statSync(raw).isDirectory()) {
79
+ const idxCandidates = [
80
+ path.join(raw, "index.ts"),
81
+ path.join(raw, "index.tsx"),
82
+ path.join(raw, "index.js"),
83
+ path.join(raw, "index.mjs"),
84
+ path.join(raw, "index.cjs")
85
+ ];
86
+ for (const c of idxCandidates) {
87
+ if (fs.existsSync(c) && fs.statSync(c).isFile()) return c;
88
+ }
89
+ }
90
+ return null;
91
+ }
92
+ function extractImportSpecifiers(source) {
93
+ const specs = [];
94
+ const re1 = /(?:^|\n)\s*(?!import\s+type)(?:import|export)\s+[\s\S]*?\sfrom\s*['"]([^'"]+)['"]/g;
95
+ for (const match of source.matchAll(re1)) {
96
+ if (match[1]) specs.push(match[1]);
97
+ }
98
+ const re2 = /import\s*\(\s*['"]([^'"]+)['"]\s*\)/g;
99
+ for (const match of source.matchAll(re2)) {
100
+ if (match[1]) specs.push(match[1]);
101
+ }
102
+ const re3 = /require\s*\(\s*['"]([^'"]+)['"]\s*\)/g;
103
+ for (const match of source.matchAll(re3)) {
104
+ if (match[1]) specs.push(match[1]);
105
+ }
106
+ return specs;
107
+ }
108
+ function extractEnvVarUsageFromSource(source) {
109
+ const runtimeKeys = /* @__PURE__ */ new Set();
110
+ const buildtimeKeys = /* @__PURE__ */ new Set();
111
+ const reProcessDot = /\bprocess\.env\??\.([A-Za-z_][A-Za-z0-9_]*)\b/g;
112
+ for (const match of source.matchAll(reProcessDot)) {
113
+ const key = match[1];
114
+ if (key) runtimeKeys.add(key);
115
+ }
116
+ const reProcessBracket = /\bprocess\.env\[\s*['"]([^'"]+)['"]\s*\]/g;
117
+ for (const match of source.matchAll(reProcessBracket)) {
118
+ const key = match[1];
119
+ if (key) runtimeKeys.add(key);
120
+ }
121
+ const reImportMetaDot = /\bimport\.meta\.env\.([A-Za-z_][A-Za-z0-9_]*)\b/g;
122
+ for (const match of source.matchAll(reImportMetaDot)) {
123
+ const key = match[1];
124
+ if (key) buildtimeKeys.add(key);
125
+ }
126
+ const reImportMetaBracket = /\bimport\.meta\.env\[\s*['"]([^'"]+)['"]\s*\]/g;
127
+ for (const match of source.matchAll(reImportMetaBracket)) {
128
+ const key = match[1];
129
+ if (key) buildtimeKeys.add(key);
130
+ }
131
+ return { runtimeKeys, buildtimeKeys };
132
+ }
133
+ async function collectEnvUsageForWorkers(workerEntryFiles, projectRoot) {
134
+ void projectRoot;
135
+ const runtimeKeys = /* @__PURE__ */ new Set();
136
+ const buildtimeKeys = /* @__PURE__ */ new Set();
137
+ const visited = /* @__PURE__ */ new Set();
138
+ const queue = [...workerEntryFiles];
139
+ while (queue.length > 0) {
140
+ const file = queue.pop();
141
+ const normalized = path.resolve(file);
142
+ if (visited.has(normalized)) continue;
143
+ visited.add(normalized);
144
+ if (!fs.existsSync(normalized) || !fs.statSync(normalized).isFile()) continue;
145
+ const src = fs.readFileSync(normalized, "utf-8");
146
+ const usage = extractEnvVarUsageFromSource(src);
147
+ usage.runtimeKeys.forEach((k) => runtimeKeys.add(k));
148
+ usage.buildtimeKeys.forEach((k) => buildtimeKeys.add(k));
149
+ const specifiers = extractImportSpecifiers(src);
150
+ for (const spec of specifiers) {
151
+ if (!spec) continue;
152
+ if (spec.startsWith(".")) {
153
+ const resolved = tryResolveLocalImport(normalized, spec);
154
+ if (resolved) queue.push(resolved);
155
+ continue;
156
+ }
157
+ if (spec.startsWith("/")) continue;
158
+ if (isBuiltinModule(spec)) continue;
159
+ }
160
+ }
161
+ runtimeKeys.delete("");
162
+ buildtimeKeys.delete("");
163
+ runtimeKeys.delete("node");
164
+ buildtimeKeys.delete("node");
165
+ return { runtimeKeys, buildtimeKeys };
166
+ }
167
+ function readJsonFile(filePath) {
168
+ try {
169
+ return JSON.parse(fs.readFileSync(filePath, "utf-8"));
170
+ } catch {
171
+ return null;
172
+ }
173
+ }
174
+ function findMonorepoRoot(startDir) {
175
+ let dir = path.resolve(startDir);
176
+ while (true) {
177
+ const pkgPath = path.join(dir, "package.json");
178
+ if (fs.existsSync(pkgPath)) {
179
+ const pkg = readJsonFile(pkgPath);
180
+ if (pkg?.workspaces) return dir;
181
+ }
182
+ const parent = path.dirname(dir);
183
+ if (parent === dir) return startDir;
184
+ dir = parent;
185
+ }
186
+ }
187
+ async function collectRuntimeDependenciesForWorkers(workerEntryFiles, projectRoot) {
188
+ const deps = /* @__PURE__ */ new Set(["@microfox/ai-worker", "@aws-sdk/client-sqs"]);
189
+ const visited = /* @__PURE__ */ new Set();
190
+ const queue = [...workerEntryFiles];
191
+ while (queue.length > 0) {
192
+ const file = queue.pop();
193
+ const normalized = path.resolve(file);
194
+ if (visited.has(normalized)) continue;
195
+ visited.add(normalized);
196
+ if (!fs.existsSync(normalized) || !fs.statSync(normalized).isFile()) continue;
197
+ const src = fs.readFileSync(normalized, "utf-8");
198
+ const specifiers = extractImportSpecifiers(src);
199
+ for (const spec of specifiers) {
200
+ if (!spec) continue;
201
+ if (spec.startsWith(".")) {
202
+ const resolved = tryResolveLocalImport(normalized, spec);
203
+ if (resolved) queue.push(resolved);
204
+ continue;
205
+ }
206
+ if (spec.startsWith("/")) continue;
207
+ if (isBuiltinModule(spec)) continue;
208
+ deps.add(getPackageNameFromSpecifier(spec));
209
+ }
210
+ }
211
+ deps.delete("");
212
+ deps.delete("node");
213
+ deps.delete("serverless");
214
+ deps.delete("serverless-offline");
215
+ deps.delete("@aws-sdk/client-sqs");
216
+ deps.delete("@microfox/ai-worker");
217
+ return deps;
218
+ }
219
+ function buildDependenciesMap(projectRoot, deps) {
220
+ const projectPkg = readJsonFile(path.join(projectRoot, "package.json")) || {};
221
+ const projectDeps = projectPkg.dependencies || {};
222
+ const projectDevDeps = projectPkg.devDependencies || {};
223
+ const repoRoot = findMonorepoRoot(projectRoot);
224
+ const workerPkg = readJsonFile(path.join(repoRoot, "packages", "ai-worker", "package.json")) || {};
225
+ const workerCliPkg = readJsonFile(
226
+ path.join(repoRoot, "packages", "ai-worker-cli", "package.json")
227
+ ) || {};
228
+ const workspaceDeps = {
229
+ ...workerPkg.dependencies || {},
230
+ ...workerPkg.devDependencies || {},
231
+ ...workerCliPkg.dependencies || {},
232
+ ...workerCliPkg.devDependencies || {}
233
+ };
234
+ const out = {};
235
+ for (const dep of Array.from(deps).sort()) {
236
+ const range = projectDeps[dep] || projectDevDeps[dep] || workspaceDeps[dep];
237
+ if (range) {
238
+ out[dep] = String(range);
239
+ }
240
+ }
241
+ return out;
242
+ }
243
+ function getServiceNameFromProjectId(projectId) {
244
+ const cleanedProjectId = projectId.replace(/-/g, "").slice(0, 15);
245
+ return `p-${cleanedProjectId}`;
246
+ }
247
+ function validateEnvironment() {
248
+ try {
249
+ (0, import_child_process.execSync)("npm --version", { stdio: "ignore" });
250
+ } catch (error) {
251
+ console.error(import_chalk.default.red("\u274C npm is not installed or not in PATH."));
252
+ process.exit(1);
253
+ }
254
+ }
255
+ async function scanWorkers(aiPath = "app/ai") {
256
+ const pattern = path.join(aiPath, "**/*.worker.ts").replace(/\\/g, "/");
257
+ const files = await (0, import_glob.glob)(pattern);
258
+ const workers = [];
259
+ for (const filePath of files) {
260
+ try {
261
+ let workerConfig;
262
+ let workerId;
263
+ if (!workerId) {
264
+ const content = fs.readFileSync(filePath, "utf-8");
265
+ const idMatch = content.match(/createWorker\s*(?:<[^>]+>)?\s*\(\s*\{[\s\S]*?id:\s*['"]([^'"]+)['"]/);
266
+ if (!idMatch) {
267
+ console.warn(import_chalk.default.yellow(`\u26A0\uFE0F Skipping ${filePath}: No worker ID found`));
268
+ continue;
269
+ }
270
+ workerId = idMatch[1];
271
+ }
272
+ const relativePath = path.relative(aiPath, filePath);
273
+ const handlerDir = path.dirname(relativePath);
274
+ const handlerName = path.basename(relativePath, ".worker.ts");
275
+ const handlerPath = path.join("handlers", handlerDir, `${handlerName}`).replace(/\\/g, "/");
276
+ workers.push({
277
+ id: workerId,
278
+ filePath,
279
+ handlerPath,
280
+ workerConfig
281
+ });
282
+ } catch (error) {
283
+ console.error(import_chalk.default.red(`\u274C Error processing ${filePath}:`), error);
284
+ }
285
+ }
286
+ return workers;
287
+ }
288
+ async function generateHandlers(workers, outputDir) {
289
+ const handlersDir = path.join(outputDir, "handlers");
290
+ if (fs.existsSync(handlersDir)) {
291
+ fs.rmSync(handlersDir, { recursive: true, force: true });
292
+ }
293
+ fs.mkdirSync(handlersDir, { recursive: true });
294
+ for (const worker of workers) {
295
+ const handlerFile = path.join(handlersDir, worker.handlerPath.replace("handlers/", "") + ".js");
296
+ const handlerDir = path.dirname(handlerFile);
297
+ if (!fs.existsSync(handlerDir)) {
298
+ fs.mkdirSync(handlerDir, { recursive: true });
299
+ }
300
+ const handlerAbsPath = path.resolve(handlerFile);
301
+ const workerAbsPath = path.resolve(worker.filePath);
302
+ let relativeImportPath = path.relative(path.dirname(handlerAbsPath), workerAbsPath);
303
+ if (!relativeImportPath.startsWith(".")) {
304
+ relativeImportPath = "./" + relativeImportPath;
305
+ }
306
+ relativeImportPath = relativeImportPath.replace(/\.ts$/, "");
307
+ relativeImportPath = relativeImportPath.split(path.sep).join("/");
308
+ const fileContent = fs.readFileSync(worker.filePath, "utf-8");
309
+ const defaultExport = /export\s+default\s+createWorker/.test(fileContent);
310
+ const exportMatch = fileContent.match(/export\s+(const|let)\s+(\w+)\s*=\s*createWorker/);
311
+ const exportName = exportMatch ? exportMatch[2] : "worker";
312
+ const tempEntryFile = handlerFile.replace(".js", ".temp.ts");
313
+ const workerRef = defaultExport ? "workerModule.default" : `workerModule.${exportName}`;
314
+ const tempEntryContent = `
315
+ import { createLambdaHandler } from '@microfox/ai-worker/handler';
316
+ import * as workerModule from '${relativeImportPath}';
317
+
318
+ const workerAgent = ${workerRef};
319
+ if (!workerAgent || typeof workerAgent.handler !== 'function') {
320
+ throw new Error('Worker module must export a createWorker result (default or named) with .handler');
321
+ }
322
+
323
+ export const handler = createLambdaHandler(workerAgent.handler, workerAgent.outputSchema);
324
+ export const exportedWorkerConfig = workerModule.workerConfig || workerAgent?.workerConfig;
325
+ `;
326
+ fs.writeFileSync(tempEntryFile, tempEntryContent);
327
+ try {
328
+ const fixLazyCachePlugin = {
329
+ name: "fix-lazy-cache",
330
+ setup(build3) {
331
+ build3.onEnd(async (result) => {
332
+ if (result.errors.length > 0) return;
333
+ let bundledCode = fs.readFileSync(handlerFile, "utf-8");
334
+ let modified = false;
335
+ const pattern = /(require\("kind-of",\s*"typeOf"\);\s*)require_for_own\(\);/g;
336
+ if (pattern.test(bundledCode)) {
337
+ bundledCode = bundledCode.replace(
338
+ pattern,
339
+ '$1require("for-own", "forOwn");'
340
+ );
341
+ modified = true;
342
+ }
343
+ if (bundledCode.includes("import_meta.url")) {
344
+ bundledCode = bundledCode.replace(
345
+ /import_meta\.url/g,
346
+ 'require("url").pathToFileURL(__filename).href'
347
+ );
348
+ modified = true;
349
+ }
350
+ const beforeCreateRequire = bundledCode;
351
+ bundledCode = bundledCode.replace(
352
+ /\bcreateRequire\s*\(\s*(?:undefined|void\s*0)\s*\)/g,
353
+ 'createRequire(require("url").pathToFileURL(__filename).href)'
354
+ );
355
+ if (bundledCode !== beforeCreateRequire) modified = true;
356
+ if (modified) {
357
+ fs.writeFileSync(handlerFile, bundledCode, "utf-8");
358
+ }
359
+ });
360
+ }
361
+ };
362
+ await esbuild.build({
363
+ entryPoints: [tempEntryFile],
364
+ bundle: true,
365
+ platform: "node",
366
+ target: "node20",
367
+ format: "cjs",
368
+ outfile: handlerFile,
369
+ // We exclude aws-sdk as it's included in Lambda runtime
370
+ // We exclude canvas because it's a binary dependency often problematic in bundling
371
+ external: [
372
+ "aws-sdk",
373
+ "canvas",
374
+ "@microfox/puppeteer-sls",
375
+ "@sparticuz/chromium"
376
+ ],
377
+ // Force lazy-cache to eagerly load modules during bundling
378
+ // This prevents runtime dynamic require() calls that fail in bundled code
379
+ define: {
380
+ "process.env.UNLAZY": '"true"'
381
+ },
382
+ // Force bundling of all packages to avoid runtime module resolution issues
383
+ // This ensures clone-deep, lazy-cache, and all transitive deps are bundled
384
+ packages: "bundle",
385
+ plugins: [fixLazyCachePlugin],
386
+ logLevel: "error"
387
+ });
388
+ fs.unlinkSync(tempEntryFile);
389
+ } catch (error) {
390
+ console.error(import_chalk.default.red(`Error bundling handler for ${worker.id}:`), error);
391
+ }
392
+ }
393
+ console.log(import_chalk.default.green(`\u2713 Generated ${workers.length} bundled handlers`));
394
+ }
395
+ function generateDocsHandler(outputDir, serviceName, stage, region) {
396
+ const handlerFile = path.join(outputDir, "handlers", "docs.js");
397
+ const tempEntryFile = handlerFile.replace(".js", ".temp.ts");
398
+ const handlerDir = path.dirname(handlerFile);
399
+ if (!fs.existsSync(handlerDir)) {
400
+ fs.mkdirSync(handlerDir, { recursive: true });
401
+ }
402
+ const handlerContent = `/**
403
+ * Auto-generated docs handler for Microfox compatibility
404
+ * DO NOT EDIT - This file is generated by @microfox/ai-worker-cli
405
+ */
406
+
407
+ import { APIGatewayProxyEvent, APIGatewayProxyResult } from 'aws-lambda';
408
+
409
+ export const handler = async (
410
+ event: APIGatewayProxyEvent
411
+ ): Promise<APIGatewayProxyResult> => {
412
+ // Return OpenAPI JSON for Microfox
413
+ const openapi = {
414
+ openapi: '3.0.3',
415
+ info: {
416
+ title: 'AI Worker Service',
417
+ version: '1.0.0',
418
+ description: 'Auto-generated OpenAPI for background workers service',
419
+ },
420
+ servers: [
421
+ {
422
+ url: 'https://{apiId}.execute-api.{region}.amazonaws.com/{stage}',
423
+ variables: {
424
+ apiId: { default: 'REPLACE_ME' },
425
+ region: { default: '${region}' },
426
+ stage: { default: '${stage}' },
427
+ },
428
+ },
429
+ ],
430
+ paths: {
431
+ '/docs.json': {
432
+ get: {
433
+ operationId: 'getDocs',
434
+ summary: 'Get OpenAPI schema',
435
+ responses: {
436
+ '200': {
437
+ description: 'OpenAPI JSON',
438
+ content: {
439
+ 'application/json': {
440
+ schema: { type: 'object' },
441
+ },
442
+ },
443
+ },
444
+ },
445
+ },
446
+ },
447
+ '/workers/config': {
448
+ get: {
449
+ operationId: 'getWorkersConfig',
450
+ summary: 'Get workers config (queue urls map)',
451
+ parameters: [
452
+ {
453
+ name: 'x-workers-config-key',
454
+ in: 'header',
455
+ required: false,
456
+ schema: { type: 'string' },
457
+ description: 'Optional API key header (if configured)',
458
+ },
459
+ ],
460
+ responses: {
461
+ '200': {
462
+ description: 'Workers config map',
463
+ content: {
464
+ 'application/json': {
465
+ schema: {
466
+ type: 'object',
467
+ properties: {
468
+ version: { type: 'string' },
469
+ stage: { type: 'string' },
470
+ region: { type: 'string' },
471
+ workers: { type: 'object' },
472
+ },
473
+ },
474
+ },
475
+ },
476
+ },
477
+ '401': {
478
+ description: 'Unauthorized',
479
+ content: {
480
+ 'application/json': {
481
+ schema: {
482
+ type: 'object',
483
+ properties: { error: { type: 'string' } },
484
+ },
485
+ },
486
+ },
487
+ },
488
+ },
489
+ },
490
+ },
491
+ '/workers/trigger': {
492
+ post: {
493
+ operationId: 'triggerWorker',
494
+ summary: 'Trigger a worker by sending a raw SQS message body',
495
+ parameters: [
496
+ {
497
+ name: 'workerId',
498
+ in: 'query',
499
+ required: false,
500
+ schema: { type: 'string' },
501
+ description: 'Worker ID (can also be provided in JSON body as workerId)',
502
+ },
503
+ {
504
+ name: 'x-workers-trigger-key',
505
+ in: 'header',
506
+ required: false,
507
+ schema: { type: 'string' },
508
+ description: 'Optional API key header (if configured)',
509
+ },
510
+ ],
511
+ requestBody: {
512
+ required: true,
513
+ content: {
514
+ 'application/json': {
515
+ schema: {
516
+ type: 'object',
517
+ properties: {
518
+ workerId: { type: 'string' },
519
+ // Prefer sending the exact SQS message body your worker expects
520
+ body: { type: 'object' },
521
+ messageBody: { type: 'string' },
522
+ },
523
+ additionalProperties: true,
524
+ },
525
+ },
526
+ },
527
+ },
528
+ responses: {
529
+ '200': {
530
+ description: 'Enqueued',
531
+ content: {
532
+ 'application/json': {
533
+ schema: {
534
+ type: 'object',
535
+ properties: {
536
+ ok: { type: 'boolean' },
537
+ workerId: { type: 'string' },
538
+ stage: { type: 'string' },
539
+ queueName: { type: 'string' },
540
+ queueUrl: { type: 'string' },
541
+ messageId: { type: 'string' },
542
+ },
543
+ },
544
+ },
545
+ },
546
+ },
547
+ '400': {
548
+ description: 'Bad request',
549
+ content: {
550
+ 'application/json': {
551
+ schema: {
552
+ type: 'object',
553
+ properties: { error: { type: 'string' } },
554
+ },
555
+ },
556
+ },
557
+ },
558
+ '401': {
559
+ description: 'Unauthorized',
560
+ content: {
561
+ 'application/json': {
562
+ schema: {
563
+ type: 'object',
564
+ properties: { error: { type: 'string' } },
565
+ },
566
+ },
567
+ },
568
+ },
569
+ },
570
+ },
571
+ },
572
+ },
573
+ 'x-service': {
574
+ serviceName: '${serviceName}',
575
+ stage: '${stage}',
576
+ region: '${region}',
577
+ },
578
+ };
579
+
580
+ return {
581
+ statusCode: 200,
582
+ headers: {
583
+ 'Content-Type': 'application/json',
584
+ 'Access-Control-Allow-Origin': '*',
585
+ },
586
+ body: JSON.stringify(openapi, null, 2),
587
+ };
588
+ };
589
+ `;
590
+ fs.writeFileSync(tempEntryFile, handlerContent);
591
+ esbuild.buildSync({
592
+ entryPoints: [tempEntryFile],
593
+ bundle: true,
594
+ platform: "node",
595
+ target: "node20",
596
+ outfile: handlerFile,
597
+ external: [
598
+ "aws-sdk",
599
+ "canvas",
600
+ "@microfox/puppeteer-sls",
601
+ "@sparticuz/chromium"
602
+ ],
603
+ define: {
604
+ "process.env.UNLAZY": '"true"'
605
+ },
606
+ packages: "bundle"
607
+ });
608
+ fs.unlinkSync(tempEntryFile);
609
+ console.log(import_chalk.default.green(`\u2713 Generated docs.json handler`));
610
+ }
611
+ function generateTriggerHandler(outputDir, serviceName) {
612
+ const handlerFile = path.join(outputDir, "handlers", "workers-trigger.js");
613
+ const tempEntryFile = handlerFile.replace(".js", ".temp.ts");
614
+ const handlerDir = path.dirname(handlerFile);
615
+ if (!fs.existsSync(handlerDir)) {
616
+ fs.mkdirSync(handlerDir, { recursive: true });
617
+ }
618
+ const handlerContent = `/**
619
+ * Auto-generated worker trigger handler
620
+ * DO NOT EDIT - This file is generated by @microfox/ai-worker-cli
621
+ */
622
+
623
+ import { APIGatewayProxyEvent, APIGatewayProxyResult } from 'aws-lambda';
624
+ import { SQSClient, GetQueueUrlCommand, SendMessageCommand } from '@aws-sdk/client-sqs';
625
+
626
+ const SERVICE_NAME = ${JSON.stringify(serviceName)};
627
+
628
+ function jsonResponse(statusCode: number, body: any): APIGatewayProxyResult {
629
+ return {
630
+ statusCode,
631
+ headers: {
632
+ 'Content-Type': 'application/json',
633
+ 'Access-Control-Allow-Origin': '*',
634
+ },
635
+ body: JSON.stringify(body),
636
+ };
637
+ }
638
+
639
+ export const handler = async (event: APIGatewayProxyEvent): Promise<APIGatewayProxyResult> => {
640
+ // Optional API key
641
+ const apiKey = process.env.WORKERS_TRIGGER_API_KEY;
642
+ if (apiKey) {
643
+ const providedKey = event.headers['x-workers-trigger-key'] || event.headers['X-Workers-Trigger-Key'];
644
+ if (providedKey !== apiKey) {
645
+ return jsonResponse(401, { error: 'Unauthorized' });
646
+ }
647
+ }
648
+
649
+ const stage =
650
+ (event as any)?.requestContext?.stage ||
651
+ process.env.ENVIRONMENT ||
652
+ process.env.STAGE ||
653
+ 'prod';
654
+ const region = process.env.AWS_REGION || 'us-east-1';
655
+
656
+ const qsWorkerId = event.queryStringParameters?.workerId;
657
+
658
+ let parsedBody: any = undefined;
659
+ if (event.body) {
660
+ try {
661
+ parsedBody = JSON.parse(event.body);
662
+ } catch {
663
+ parsedBody = undefined;
664
+ }
665
+ }
666
+
667
+ const workerId = (parsedBody && parsedBody.workerId) || qsWorkerId;
668
+ if (!workerId || typeof workerId !== 'string') {
669
+ return jsonResponse(400, { error: 'workerId is required (query param workerId or JSON body workerId)' });
670
+ }
671
+
672
+ // Prefer JSON body fields, otherwise send raw event.body
673
+ let messageBody: string | undefined;
674
+ if (parsedBody && typeof parsedBody.messageBody === 'string') {
675
+ messageBody = parsedBody.messageBody;
676
+ } else if (parsedBody && parsedBody.body !== undefined) {
677
+ messageBody = typeof parsedBody.body === 'string' ? parsedBody.body : JSON.stringify(parsedBody.body);
678
+ } else if (event.body) {
679
+ messageBody = event.body;
680
+ }
681
+
682
+ if (!messageBody) {
683
+ return jsonResponse(400, { error: 'body/messageBody is required' });
684
+ }
685
+
686
+ const queueName = \`\${SERVICE_NAME}-\${workerId}-\${stage}\`;
687
+ const sqs = new SQSClient({ region });
688
+
689
+ let queueUrl: string;
690
+ try {
691
+ const urlRes = await sqs.send(new GetQueueUrlCommand({ QueueName: queueName }));
692
+ if (!urlRes.QueueUrl) {
693
+ return jsonResponse(404, { error: 'Queue URL not found', queueName });
694
+ }
695
+ queueUrl = String(urlRes.QueueUrl);
696
+ } catch (e: any) {
697
+ return jsonResponse(404, { error: 'Queue does not exist or not accessible', queueName, message: String(e?.message || e) });
698
+ }
699
+
700
+ try {
701
+ const sendRes = await sqs.send(new SendMessageCommand({ QueueUrl: queueUrl, MessageBody: messageBody }));
702
+ return jsonResponse(200, {
703
+ ok: true,
704
+ workerId,
705
+ stage,
706
+ queueName,
707
+ queueUrl,
708
+ messageId: sendRes.MessageId || null,
709
+ });
710
+ } catch (e: any) {
711
+ return jsonResponse(500, { error: 'Failed to send message', message: String(e?.message || e) });
712
+ }
713
+ };
714
+ `;
715
+ fs.writeFileSync(tempEntryFile, handlerContent);
716
+ esbuild.buildSync({
717
+ entryPoints: [tempEntryFile],
718
+ bundle: true,
719
+ platform: "node",
720
+ target: "node20",
721
+ outfile: handlerFile,
722
+ external: [
723
+ "aws-sdk",
724
+ "canvas",
725
+ "@microfox/puppeteer-sls",
726
+ "@sparticuz/chromium"
727
+ ],
728
+ define: {
729
+ "process.env.UNLAZY": '"true"'
730
+ },
731
+ packages: "bundle",
732
+ logLevel: "error"
733
+ });
734
+ fs.unlinkSync(tempEntryFile);
735
+ console.log(import_chalk.default.green(`\u2713 Generated /workers/trigger handler`));
736
+ }
737
+ function generateWorkersConfigHandler(outputDir, workers, serviceName) {
738
+ const handlerFile = path.join(outputDir, "handlers", "workers-config.js");
739
+ const tempEntryFile = handlerFile.replace(".js", ".temp.ts");
740
+ const handlerDir = path.dirname(handlerFile);
741
+ if (fs.existsSync(handlerDir) && !fs.existsSync(handlerFile)) {
742
+ } else if (!fs.existsSync(handlerDir)) {
743
+ fs.mkdirSync(handlerDir, { recursive: true });
744
+ }
745
+ const handlerContent = `/**
746
+ * Auto-generated workers-config Lambda handler
747
+ * DO NOT EDIT - This file is generated by @microfox/ai-worker-cli
748
+ */
749
+
750
+ import { APIGatewayProxyEvent, APIGatewayProxyResult } from 'aws-lambda';
751
+ import { SQSClient, GetQueueUrlCommand } from '@aws-sdk/client-sqs';
752
+
753
+ // Worker IDs embedded at build time so this endpoint doesn't depend on any generated files.
754
+ const WORKER_IDS: string[] = ${JSON.stringify(workers.map((w) => w.id), null, 2)};
755
+ const SERVICE_NAME = ${JSON.stringify(serviceName)};
756
+
757
+ export const handler = async (
758
+ event: APIGatewayProxyEvent
759
+ ): Promise<APIGatewayProxyResult> => {
760
+ // ... same logic ...
761
+ // Check API key if configured
762
+ const apiKey = process.env.WORKERS_CONFIG_API_KEY;
763
+ if (apiKey) {
764
+ const providedKey = event.headers['x-workers-config-key'] || event.headers['X-Workers-Config-Key'];
765
+ if (providedKey !== apiKey) {
766
+ return {
767
+ statusCode: 401,
768
+ headers: { 'Content-Type': 'application/json' },
769
+ body: JSON.stringify({ error: 'Unauthorized' }),
770
+ };
771
+ }
772
+ }
773
+
774
+ // Stage resolution:
775
+ // - Prefer API Gateway stage (microfox tends to deploy APIs on "prod")
776
+ // - Fallback to ENVIRONMENT/STAGE env vars
777
+ // - Default to "prod" (safer for microfox) if nothing else is set
778
+ const stage =
779
+ (event as any)?.requestContext?.stage ||
780
+ process.env.ENVIRONMENT ||
781
+ process.env.STAGE ||
782
+ 'prod';
783
+ const region = process.env.AWS_REGION || 'us-east-1';
784
+
785
+ // Resolve queue URLs dynamically via SQS so we return actual URLs.
786
+ // NOTE: Node 20 Lambda runtime does NOT guarantee 'aws-sdk' v2 is available.
787
+ // We use AWS SDK v3 and bundle it into this handler.
788
+ const sqs = new SQSClient({ region });
789
+ const workers: Record<string, { queueUrl: string; region: string }> = {};
790
+ const attemptedQueueNames: string[] = [];
791
+ const errors: Array<{ workerId: string; queueName: string; message: string; name?: string }> = [];
792
+ const debug = event.queryStringParameters?.debug === '1' || event.queryStringParameters?.debug === 'true';
793
+
794
+ await Promise.all(
795
+ WORKER_IDS.map(async (workerId) => {
796
+ const queueName = \`\${SERVICE_NAME}-\${workerId}-\${stage}\`;
797
+ attemptedQueueNames.push(queueName);
798
+ try {
799
+ const result = await sqs.send(new GetQueueUrlCommand({ QueueName: queueName }));
800
+ if (result?.QueueUrl) {
801
+ workers[workerId] = { queueUrl: String(result.QueueUrl), region };
802
+ }
803
+ } catch (e) {
804
+ const err = e as any;
805
+ const message = String(err?.message || err || 'Unknown error');
806
+ const name = err?.name ? String(err.name) : undefined;
807
+ // Log so CloudWatch shows what's going on (nonexistent queue vs permission vs region).
808
+ console.error('[workers-config] getQueueUrl failed', { workerId, queueName, name, message });
809
+ errors.push({ workerId, queueName, name, message });
810
+ }
811
+ })
812
+ );
813
+
814
+ return {
815
+ statusCode: 200,
816
+ headers: {
817
+ 'Content-Type': 'application/json',
818
+ 'Access-Control-Allow-Origin': '*',
819
+ },
820
+ body: JSON.stringify({
821
+ version: '1.0.0',
822
+ stage,
823
+ region,
824
+ workers,
825
+ ...(debug ? { attemptedQueueNames, errors } : {}),
826
+ }),
827
+ };
828
+ };
829
+ `;
830
+ fs.writeFileSync(tempEntryFile, handlerContent);
831
+ esbuild.buildSync({
832
+ entryPoints: [tempEntryFile],
833
+ bundle: true,
834
+ platform: "node",
835
+ target: "node20",
836
+ outfile: handlerFile,
837
+ external: [
838
+ "aws-sdk",
839
+ "canvas",
840
+ "@microfox/puppeteer-sls",
841
+ "@sparticuz/chromium"
842
+ ],
843
+ define: {
844
+ "process.env.UNLAZY": '"true"'
845
+ },
846
+ packages: "bundle"
847
+ });
848
+ fs.unlinkSync(tempEntryFile);
849
+ console.log(import_chalk.default.green(`\u2713 Generated workers-config handler`));
850
+ }
851
+ function loadEnvVars(envPath = ".env") {
852
+ const env = {};
853
+ if (!fs.existsSync(envPath)) {
854
+ console.warn(import_chalk.default.yellow(`\u26A0\uFE0F .env file not found at ${envPath}`));
855
+ return env;
856
+ }
857
+ const content = fs.readFileSync(envPath, "utf-8");
858
+ const lines = content.split("\n");
859
+ for (const line of lines) {
860
+ const trimmed = line.trim();
861
+ if (!trimmed || trimmed.startsWith("#")) continue;
862
+ const match = trimmed.match(/^([^=]+)=(.*)$/);
863
+ if (match) {
864
+ const key = match[1].trim();
865
+ const value = match[2].trim().replace(/^["']|["']$/g, "");
866
+ env[key] = value;
867
+ }
868
+ }
869
+ return env;
870
+ }
871
+ function processScheduleEvents(scheduleConfig) {
872
+ if (!scheduleConfig) {
873
+ return [];
874
+ }
875
+ const events = [];
876
+ const schedules = Array.isArray(scheduleConfig) ? scheduleConfig : [scheduleConfig];
877
+ for (const schedule of schedules) {
878
+ if (typeof schedule === "string") {
879
+ events.push({
880
+ schedule
881
+ });
882
+ continue;
883
+ }
884
+ if (typeof schedule === "object" && schedule !== null) {
885
+ const scheduleEvent = { schedule: {} };
886
+ if (schedule.rate) {
887
+ if (Array.isArray(schedule.rate)) {
888
+ scheduleEvent.schedule.rate = schedule.rate;
889
+ } else {
890
+ scheduleEvent.schedule.rate = schedule.rate;
891
+ }
892
+ } else {
893
+ continue;
894
+ }
895
+ if (schedule.enabled !== void 0) {
896
+ scheduleEvent.schedule.enabled = schedule.enabled;
897
+ }
898
+ if (schedule.input !== void 0) {
899
+ scheduleEvent.schedule.input = schedule.input;
900
+ }
901
+ if (schedule.inputPath !== void 0) {
902
+ scheduleEvent.schedule.inputPath = schedule.inputPath;
903
+ }
904
+ if (schedule.inputTransformer !== void 0) {
905
+ scheduleEvent.schedule.inputTransformer = schedule.inputTransformer;
906
+ }
907
+ if (schedule.name !== void 0) {
908
+ scheduleEvent.schedule.name = schedule.name;
909
+ }
910
+ if (schedule.description !== void 0) {
911
+ scheduleEvent.schedule.description = schedule.description;
912
+ }
913
+ if (schedule.method !== void 0) {
914
+ scheduleEvent.schedule.method = schedule.method;
915
+ }
916
+ if (schedule.timezone !== void 0) {
917
+ scheduleEvent.schedule.timezone = schedule.timezone;
918
+ }
919
+ if (Object.keys(scheduleEvent.schedule).length === 1 && scheduleEvent.schedule.rate) {
920
+ if (typeof scheduleEvent.schedule.rate === "string") {
921
+ events.push({
922
+ schedule: scheduleEvent.schedule.rate
923
+ });
924
+ } else {
925
+ events.push(scheduleEvent);
926
+ }
927
+ } else {
928
+ events.push(scheduleEvent);
929
+ }
930
+ }
931
+ }
932
+ return events;
933
+ }
934
+ function generateServerlessConfig(workers, stage, region, envVars, serviceName) {
935
+ const resources = {
936
+ Resources: {},
937
+ Outputs: {}
938
+ };
939
+ const queueArns = [];
940
+ const providerEnvironment = {
941
+ STAGE: stage,
942
+ NODE_ENV: stage
943
+ };
944
+ const customConfig = {
945
+ stage: `\${env:ENVIRONMENT, '${stage}'}`,
946
+ "serverless-offline": {
947
+ httpPort: 4e3,
948
+ lambdaPort: 4002,
949
+ useChildProcesses: true,
950
+ useWorkerThreads: true,
951
+ noCookieValidation: true,
952
+ allowCache: true,
953
+ hideStackTraces: false,
954
+ disableCookieValidation: true,
955
+ noTimeout: true,
956
+ environment: "${file(env.json)}"
957
+ }
958
+ };
959
+ for (const worker of workers) {
960
+ const queueName = `WorkerQueue${worker.id.replace(/[^a-zA-Z0-9]/g, "")}`;
961
+ const queueLogicalId = `${queueName}${stage}`;
962
+ const dlqLogicalId = `${queueName}DLQ${stage}`;
963
+ const sqsCfg = worker.workerConfig?.sqs;
964
+ const retention = typeof sqsCfg?.messageRetentionPeriod === "number" ? sqsCfg.messageRetentionPeriod : 1209600;
965
+ const dlqRetention = typeof sqsCfg?.deadLetterMessageRetentionPeriod === "number" ? sqsCfg.deadLetterMessageRetentionPeriod : retention;
966
+ const visibilityTimeout = typeof sqsCfg?.visibilityTimeout === "number" ? sqsCfg.visibilityTimeout : (worker.workerConfig?.timeout || 300) + 60;
967
+ const maxReceiveCountRaw = typeof sqsCfg?.maxReceiveCount === "number" ? sqsCfg.maxReceiveCount : 1;
968
+ const maxReceiveCount = Math.max(1, Math.floor(maxReceiveCountRaw));
969
+ resources.Resources[dlqLogicalId] = {
970
+ Type: "AWS::SQS::Queue",
971
+ Properties: {
972
+ QueueName: `\${self:service}-${worker.id}-dlq-\${opt:stage, env:ENVIRONMENT, '${stage}'}`,
973
+ MessageRetentionPeriod: dlqRetention
974
+ }
975
+ };
976
+ resources.Resources[queueLogicalId] = {
977
+ Type: "AWS::SQS::Queue",
978
+ Properties: {
979
+ // Use ${self:service} to avoid hardcoding service name
980
+ QueueName: `\${self:service}-${worker.id}-\${opt:stage, env:ENVIRONMENT, '${stage}'}`,
981
+ VisibilityTimeout: visibilityTimeout,
982
+ MessageRetentionPeriod: retention,
983
+ RedrivePolicy: {
984
+ deadLetterTargetArn: { "Fn::GetAtt": [dlqLogicalId, "Arn"] },
985
+ maxReceiveCount
986
+ }
987
+ }
988
+ };
989
+ resources.Outputs[`${queueLogicalId}Url`] = {
990
+ Description: `Queue URL for worker ${worker.id}`,
991
+ Value: { Ref: queueLogicalId },
992
+ Export: {
993
+ Name: `\${self:service}-${worker.id}-queue-url`
994
+ }
995
+ };
996
+ queueArns.push({ "Fn::GetAtt": [queueLogicalId, "Arn"] });
997
+ }
998
+ const functions = {};
999
+ for (const worker of workers) {
1000
+ const functionName = `worker${worker.id.replace(/[^a-zA-Z0-9]/g, "")}`;
1001
+ const events = [
1002
+ {
1003
+ sqs: {
1004
+ arn: { "Fn::GetAtt": [`WorkerQueue${worker.id.replace(/[^a-zA-Z0-9]/g, "")}${stage}`, "Arn"] },
1005
+ batchSize: 1
1006
+ }
1007
+ }
1008
+ ];
1009
+ if (worker.workerConfig?.schedule) {
1010
+ const scheduleEvents = processScheduleEvents(worker.workerConfig.schedule);
1011
+ events.push(...scheduleEvents);
1012
+ }
1013
+ functions[functionName] = {
1014
+ // IMPORTANT: Keep AWS handler string to exactly one dot: "<modulePath>.handler"
1015
+ handler: `${worker.handlerPath}.handler`,
1016
+ timeout: worker.workerConfig?.timeout || 300,
1017
+ memorySize: worker.workerConfig?.memorySize || 512,
1018
+ events
1019
+ };
1020
+ if (worker.workerConfig?.layers?.length) {
1021
+ functions[functionName].layers = worker.workerConfig.layers;
1022
+ }
1023
+ }
1024
+ functions["getDocs"] = {
1025
+ handler: "handlers/docs.handler",
1026
+ events: [
1027
+ {
1028
+ http: {
1029
+ path: "/docs.json",
1030
+ method: "GET",
1031
+ cors: true
1032
+ }
1033
+ }
1034
+ ]
1035
+ };
1036
+ functions["triggerWorker"] = {
1037
+ handler: "handlers/workers-trigger.handler",
1038
+ events: [
1039
+ {
1040
+ http: {
1041
+ path: "/workers/trigger",
1042
+ method: "POST",
1043
+ cors: true
1044
+ }
1045
+ }
1046
+ ]
1047
+ };
1048
+ functions["workersConfig"] = {
1049
+ handler: "handlers/workers-config.handler",
1050
+ events: [
1051
+ {
1052
+ http: {
1053
+ path: "workers/config",
1054
+ method: "GET",
1055
+ cors: true
1056
+ }
1057
+ }
1058
+ ]
1059
+ };
1060
+ const safeEnvVars = {};
1061
+ const allowedPrefixes = ["OPENAI_", "ANTHROPIC_", "DATABASE_", "MONGODB_", "REDIS_", "WORKERS_", "REMOTION_"];
1062
+ for (const [key, value] of Object.entries(envVars)) {
1063
+ if (allowedPrefixes.some((prefix) => key.startsWith(prefix))) {
1064
+ safeEnvVars[key] = value;
1065
+ }
1066
+ }
1067
+ resources.Outputs["ApiEndpoints"] = {
1068
+ Description: "API Endpoints",
1069
+ Value: {
1070
+ "Fn::Join": [
1071
+ "",
1072
+ [
1073
+ "API: https://",
1074
+ { "Ref": "ApiGatewayRestApi" },
1075
+ ".execute-api.",
1076
+ { "Ref": "AWS::Region" },
1077
+ `.amazonaws.com/\${env:ENVIRONMENT, '${stage}'}`
1078
+ ]
1079
+ ]
1080
+ }
1081
+ };
1082
+ return {
1083
+ service: serviceName,
1084
+ package: {
1085
+ excludeDevDependencies: true,
1086
+ patterns: [
1087
+ "!venv/**",
1088
+ "!.idea/**",
1089
+ "!.vscode/**",
1090
+ "!src/**",
1091
+ "!node_modules/serverless-offline/**",
1092
+ "!node_modules/typescript/**",
1093
+ "!node_modules/@types/**",
1094
+ "!node_modules/aws-sdk/**",
1095
+ "!node_modules/@aws-sdk/**"
1096
+ ]
1097
+ },
1098
+ custom: customConfig,
1099
+ provider: {
1100
+ name: "aws",
1101
+ runtime: "nodejs20.x",
1102
+ region,
1103
+ versionFunctions: false,
1104
+ // Use ENVIRONMENT from env.json to drive the actual deployed stage (Microfox defaults to prod).
1105
+ stage: `\${env:ENVIRONMENT, '${stage}'}`,
1106
+ environment: "${file(env.json)}",
1107
+ iam: {
1108
+ role: {
1109
+ statements: [
1110
+ {
1111
+ Effect: "Allow",
1112
+ Action: [
1113
+ "sqs:SendMessage",
1114
+ "sqs:ReceiveMessage",
1115
+ "sqs:DeleteMessage",
1116
+ "sqs:GetQueueAttributes"
1117
+ ],
1118
+ Resource: queueArns
1119
+ },
1120
+ {
1121
+ Effect: "Allow",
1122
+ Action: ["sqs:GetQueueUrl"],
1123
+ // GetQueueUrl is not resource-scoped for unknown queue ARNs, must be '*'
1124
+ Resource: "*"
1125
+ }
1126
+ ]
1127
+ }
1128
+ }
1129
+ },
1130
+ plugins: ["serverless-offline"],
1131
+ functions,
1132
+ resources
1133
+ };
1134
+ }
1135
+ async function generateWorkersMap(stage, region, outputDir) {
1136
+ const serverlessDir = path.join(outputDir, ".serverless");
1137
+ if (!fs.existsSync(serverlessDir)) {
1138
+ fs.mkdirSync(serverlessDir, { recursive: true });
1139
+ }
1140
+ const workers = await scanWorkers();
1141
+ const stackName = `ai-router-workers-${stage}-${stage}`;
1142
+ let queueUrls = {};
1143
+ const spinner = (0, import_ora.default)("Fetching CloudFormation outputs...").start();
1144
+ try {
1145
+ const output = (0, import_child_process.execSync)(
1146
+ `aws cloudformation describe-stacks --stack-name ${stackName} --region ${region} --query "Stacks[0].Outputs" --output json`,
1147
+ { encoding: "utf-8", stdio: "pipe" }
1148
+ );
1149
+ const outputs = JSON.parse(output);
1150
+ const outputMap = {};
1151
+ for (const output2 of outputs) {
1152
+ const key = output2.OutputKey;
1153
+ if (key && key.endsWith("Url")) {
1154
+ const workerId = key.replace("WorkerQueue", "").replace("Url", "").toLowerCase();
1155
+ outputMap[key] = output2.OutputValue;
1156
+ }
1157
+ }
1158
+ for (const worker of workers) {
1159
+ const sanitizedId = worker.id.replace(/[^a-zA-Z0-9]/g, "");
1160
+ const queueKey = `WorkerQueue${sanitizedId}${stage}Url`;
1161
+ const matchingKey = Object.keys(outputMap).find((k) => k.toLowerCase() === queueKey.toLowerCase());
1162
+ if (matchingKey && outputMap[matchingKey]) {
1163
+ queueUrls[worker.id] = {
1164
+ queueUrl: outputMap[matchingKey],
1165
+ region
1166
+ };
1167
+ }
1168
+ }
1169
+ spinner.succeed("Fetched CloudFormation outputs");
1170
+ } catch (error) {
1171
+ spinner.warn("Could not fetch CloudFormation outputs. Using deterministic queue URLs.");
1172
+ for (const worker of workers) {
1173
+ queueUrls[worker.id] = {
1174
+ queueUrl: `https://sqs.${"${aws:region}"}.amazonaws.com/${"${aws:accountId}"}/${"${self:service}"}-${worker.id}-${stage}`,
1175
+ region
1176
+ };
1177
+ }
1178
+ }
1179
+ const mapContent = `/**
1180
+ * Auto-generated workers map
1181
+ * DO NOT EDIT - This file is generated by deploy-workers script
1182
+ */
1183
+
1184
+ export const workersMap = ${JSON.stringify(queueUrls, null, 2)} as const;
1185
+ `;
1186
+ const mapFile = path.join(serverlessDir, "workers-map.generated.ts");
1187
+ fs.writeFileSync(mapFile, mapContent);
1188
+ console.log(import_chalk.default.green(`\u2713 Generated workers map: ${mapFile}`));
1189
+ }
1190
+ async function build2(args) {
1191
+ const stage = args.stage || process.env.STAGE || "prod";
1192
+ const region = args.region || process.env.AWS_REGION || "us-east-1";
1193
+ const aiPath = args["ai-path"] || "app/ai";
1194
+ console.log(import_chalk.default.blue(`\u{1F4E6} Building workers (stage: ${stage}, region: ${region})...`));
1195
+ const spinner = (0, import_ora.default)("Scanning workers...").start();
1196
+ const workers = await scanWorkers(aiPath);
1197
+ if (workers.length === 0) {
1198
+ spinner.warn("No workers found.");
1199
+ return;
1200
+ }
1201
+ spinner.succeed(`Found ${workers.length} worker(s)`);
1202
+ workers.forEach((w) => console.log(import_chalk.default.gray(` - ${w.id} (${w.filePath})`)));
1203
+ const serverlessDir = path.join(process.cwd(), ".serverless-workers");
1204
+ if (!fs.existsSync(serverlessDir)) {
1205
+ fs.mkdirSync(serverlessDir, { recursive: true });
1206
+ }
1207
+ const runtimeDeps = await collectRuntimeDependenciesForWorkers(
1208
+ workers.map((w) => w.filePath),
1209
+ process.cwd()
1210
+ );
1211
+ const dependencies = buildDependenciesMap(process.cwd(), runtimeDeps);
1212
+ const packageJson = {
1213
+ name: "ai-router-workers",
1214
+ version: "1.0.0",
1215
+ description: "Auto-generated serverless workers",
1216
+ private: true,
1217
+ dependencies,
1218
+ scripts: {
1219
+ build: "echo 'Already compiled.'"
1220
+ },
1221
+ devDependencies: {
1222
+ serverless: "^3.38.0",
1223
+ "serverless-offline": "^13.3.3",
1224
+ "@aws-sdk/client-sqs": "^3.700.0"
1225
+ }
1226
+ };
1227
+ fs.writeFileSync(
1228
+ path.join(serverlessDir, "package.json"),
1229
+ JSON.stringify(packageJson, null, 2)
1230
+ );
1231
+ const envVars = loadEnvVars();
1232
+ const workerEntryFiles = workers.map((w) => w.filePath);
1233
+ const { runtimeKeys: runtimeEnvKeys, buildtimeKeys: buildtimeEnvKeys } = await collectEnvUsageForWorkers(workerEntryFiles, process.cwd());
1234
+ const referencedEnvKeys = /* @__PURE__ */ new Set([
1235
+ ...Array.from(runtimeEnvKeys),
1236
+ ...Array.from(buildtimeEnvKeys)
1237
+ ]);
1238
+ const runtimeList = Array.from(runtimeEnvKeys).sort();
1239
+ const buildtimeList = Array.from(buildtimeEnvKeys).sort();
1240
+ const missingFromDotEnv = Array.from(referencedEnvKeys).filter((k) => !(k in envVars)).sort();
1241
+ if (runtimeList.length || buildtimeList.length) {
1242
+ console.log(
1243
+ import_chalk.default.blue(
1244
+ `\u2139\uFE0F Detected env usage from worker code: runtime=${runtimeList.length}, buildtime=${buildtimeList.length}`
1245
+ )
1246
+ );
1247
+ if (missingFromDotEnv.length > 0) {
1248
+ console.log(
1249
+ import_chalk.default.yellow(
1250
+ `\u26A0\uFE0F These referenced envs were not found in .env (so they will NOT be written to env.json): ${missingFromDotEnv.slice(0, 25).join(", ")}${missingFromDotEnv.length > 25 ? " ..." : ""}`
1251
+ )
1252
+ );
1253
+ }
1254
+ }
1255
+ let serviceName = args["service-name"]?.trim() || `ai-router-workers-${stage}`;
1256
+ const microfoxJsonPath = path.join(process.cwd(), "microfox.json");
1257
+ if (fs.existsSync(microfoxJsonPath)) {
1258
+ try {
1259
+ const microfoxConfig = JSON.parse(fs.readFileSync(microfoxJsonPath, "utf-8"));
1260
+ if (microfoxConfig.projectId) {
1261
+ if (!args["service-name"]?.trim()) {
1262
+ serviceName = getServiceNameFromProjectId(microfoxConfig.projectId);
1263
+ }
1264
+ console.log(import_chalk.default.blue(`\u2139\uFE0F Using service name from microfox.json: ${serviceName}`));
1265
+ }
1266
+ } catch (error) {
1267
+ console.warn(import_chalk.default.yellow("\u26A0\uFE0F Failed to parse microfox.json, using default service name"));
1268
+ }
1269
+ }
1270
+ (0, import_ora.default)("Generating handlers...").start().succeed("Generated handlers");
1271
+ await generateHandlers(workers, serverlessDir);
1272
+ const extractSpinner = (0, import_ora.default)("Extracting worker configs from bundled handlers...").start();
1273
+ for (const worker of workers) {
1274
+ try {
1275
+ const handlerFile = path.join(serverlessDir, worker.handlerPath + ".js");
1276
+ if (fs.existsSync(handlerFile)) {
1277
+ const handlerUrl = (0, import_url.pathToFileURL)(path.resolve(handlerFile)).href;
1278
+ try {
1279
+ const module2 = await import(handlerUrl);
1280
+ if (module2.exportedWorkerConfig) {
1281
+ worker.workerConfig = module2.exportedWorkerConfig;
1282
+ if (module2.exportedWorkerConfig.layers?.length) {
1283
+ console.log(import_chalk.default.gray(` \u2713 ${worker.id}: found ${module2.exportedWorkerConfig.layers.length} layer(s)`));
1284
+ }
1285
+ } else {
1286
+ console.warn(import_chalk.default.yellow(` \u26A0 ${worker.id}: exportedWorkerConfig not found in handler`));
1287
+ }
1288
+ } catch (importError) {
1289
+ console.log(import_chalk.default.gray(` \u2139 ${worker.id}: extracting config from source (import failed: ${importError?.message?.slice(0, 50) || "runtime error"}...)`));
1290
+ try {
1291
+ const sourceContent = fs.readFileSync(worker.filePath, "utf-8");
1292
+ const workerConfigMatch = sourceContent.match(/export\s+const\s+workerConfig[^=]*=\s*(\{[\s\S]*?\});/);
1293
+ if (workerConfigMatch) {
1294
+ let configStr = workerConfigMatch[1].replace(/\/\*[\s\S]*?\*\//g, "").replace(/(^|\s)\/\/[^\n]*/gm, "$1");
1295
+ const configObj = new Function("return " + configStr)();
1296
+ if (configObj && (configObj.layers || configObj.timeout || configObj.memorySize || configObj.schedule)) {
1297
+ worker.workerConfig = configObj;
1298
+ if (configObj.layers?.length) {
1299
+ console.log(import_chalk.default.gray(` \u2713 ${worker.id}: found ${configObj.layers.length} layer(s) from source file`));
1300
+ }
1301
+ if (configObj.schedule) {
1302
+ console.log(import_chalk.default.gray(` \u2713 ${worker.id}: found schedule configuration`));
1303
+ }
1304
+ }
1305
+ }
1306
+ } catch (fallbackError) {
1307
+ console.warn(import_chalk.default.yellow(` \u26A0 ${worker.id}: fallback extraction also failed, using defaults`));
1308
+ }
1309
+ }
1310
+ } else {
1311
+ console.warn(import_chalk.default.yellow(` \u26A0 ${worker.id}: handler file not found: ${handlerFile}`));
1312
+ }
1313
+ } catch (error) {
1314
+ console.warn(import_chalk.default.yellow(` \u26A0 ${worker.id}: failed to extract config: ${error?.message || error}`));
1315
+ }
1316
+ }
1317
+ extractSpinner.succeed("Extracted configs");
1318
+ generateWorkersConfigHandler(serverlessDir, workers, serviceName);
1319
+ generateDocsHandler(serverlessDir, serviceName, stage, region);
1320
+ generateTriggerHandler(serverlessDir, serviceName);
1321
+ const config = generateServerlessConfig(workers, stage, region, envVars, serviceName);
1322
+ const envStage = fs.existsSync(microfoxJsonPath) ? "prod" : stage;
1323
+ const safeEnvVars = {
1324
+ ENVIRONMENT: envStage,
1325
+ STAGE: envStage,
1326
+ NODE_ENV: envStage
1327
+ };
1328
+ const allowedPrefixes = ["OPENAI_", "ANTHROPIC_", "DATABASE_", "MONGODB_", "REDIS_", "WORKERS_", "REMOTION_"];
1329
+ for (const [key, value] of Object.entries(envVars)) {
1330
+ if (key.startsWith("AWS_")) continue;
1331
+ if (allowedPrefixes.some((prefix) => key.startsWith(prefix)) || referencedEnvKeys.has(key)) {
1332
+ safeEnvVars[key] = value;
1333
+ }
1334
+ }
1335
+ fs.writeFileSync(
1336
+ path.join(serverlessDir, "env.json"),
1337
+ JSON.stringify(safeEnvVars, null, 2)
1338
+ );
1339
+ const yamlContent = yaml.dump(config, { indent: 2 });
1340
+ const yamlPath = path.join(serverlessDir, "serverless.yml");
1341
+ fs.writeFileSync(yamlPath, yamlContent);
1342
+ console.log(import_chalk.default.green(`\u2713 Generated serverless.yml: ${yamlPath}`));
1343
+ }
1344
+ async function deploy(args) {
1345
+ const stage = args.stage || process.env.STAGE || "prod";
1346
+ const region = args.region || process.env.AWS_REGION || "us-east-1";
1347
+ const skipDeploy = args["skip-deploy"] || false;
1348
+ const skipInstall = args["skip-install"] || false;
1349
+ if (skipDeploy) {
1350
+ console.log(import_chalk.default.yellow("\u23ED\uFE0F Skipping deployment (--skip-deploy flag)"));
1351
+ return;
1352
+ }
1353
+ const serverlessDir = path.join(process.cwd(), ".serverless-workers");
1354
+ const yamlPath = path.join(serverlessDir, "serverless.yml");
1355
+ if (!fs.existsSync(yamlPath)) {
1356
+ console.error(import_chalk.default.red('\u274C serverless.yml not found. Run "build" first.'));
1357
+ process.exit(1);
1358
+ }
1359
+ console.log(import_chalk.default.blue(`\u{1F680} Deploying to AWS (stage: ${stage}, region: ${region})...`));
1360
+ validateEnvironment();
1361
+ try {
1362
+ if (!skipInstall && !fs.existsSync(path.join(serverlessDir, "node_modules"))) {
1363
+ console.log(import_chalk.default.blue("\u{1F4E6} Installing serverless dependencies..."));
1364
+ (0, import_child_process.execSync)("npm install", {
1365
+ cwd: serverlessDir,
1366
+ stdio: "inherit"
1367
+ });
1368
+ }
1369
+ const microfoxJsonPath = path.join(process.cwd(), "microfox.json");
1370
+ if (fs.existsSync(microfoxJsonPath)) {
1371
+ console.log(import_chalk.default.blue("\u2139\uFE0F Found microfox.json, deploying via Microfox Cloud..."));
1372
+ fs.copyFileSync(microfoxJsonPath, path.join(serverlessDir, "microfox.json"));
1373
+ const envVars = loadEnvVars();
1374
+ (0, import_child_process.execSync)("npx microfox@latest push", {
1375
+ cwd: serverlessDir,
1376
+ stdio: "inherit"
1377
+ });
1378
+ console.log(import_chalk.default.green("\u2713 Deployment triggered via Microfox!"));
1379
+ return;
1380
+ }
1381
+ (0, import_child_process.execSync)("npx serverless deploy", {
1382
+ cwd: serverlessDir,
1383
+ stdio: "inherit",
1384
+ env: {
1385
+ ...process.env,
1386
+ STAGE: stage,
1387
+ AWS_REGION: region
1388
+ }
1389
+ });
1390
+ console.log(import_chalk.default.green("\u2713 Deployment complete!"));
1391
+ } catch (error) {
1392
+ console.error(import_chalk.default.red("\u274C Deployment failed"));
1393
+ process.exit(1);
1394
+ }
1395
+ await generateWorkersMap(stage, region, serverlessDir);
1396
+ }
1397
+ var pushCommand = new import_commander.Command().name("push").description("Build and deploy background workers to AWS").option("-s, --stage <stage>", "Deployment stage", "prod").option("-r, --region <region>", "AWS region", "us-east-1").option("--ai-path <path>", "Path to AI directory containing workers", "app/ai").option("--service-name <name>", "Override serverless service name (defaults to ai-router-workers-<stage>)").option("--skip-deploy", "Skip deployment, only build", false).option("--skip-install", "Skip npm install in serverless directory", false).action(async (options) => {
1398
+ await build2(options);
1399
+ await deploy(options);
1400
+ });
1401
+
1402
+ // src/index.ts
1403
+ var program = new import_commander2.Command();
1404
+ program.name("ai-worker").description("CLI tooling for deploying ai-router background workers").version("0.1.0");
1405
+ program.addCommand(pushCommand);
1406
+ program.parse(process.argv);
1407
+ var aiWorkerCli = program;
1408
+ // Annotate the CommonJS export names for ESM import in node:
1409
+ 0 && (module.exports = {
1410
+ aiWorkerCli
1411
+ });
1412
+ //# sourceMappingURL=index.cjs.map