@microfox/ai-worker-cli 1.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js ADDED
@@ -0,0 +1,1379 @@
1
+ #!/usr/bin/env node
2
+
3
+ // src/index.ts
4
+ import { Command as Command2 } from "commander";
5
+
6
+ // src/commands/push.ts
7
+ import { Command } from "commander";
8
+ import * as esbuild from "esbuild";
9
+ import { execSync } from "child_process";
10
+ import * as fs from "fs";
11
+ import * as path from "path";
12
+ import { pathToFileURL } from "url";
13
+ import { builtinModules } from "module";
14
+ import { glob } from "glob";
15
+ import * as yaml from "js-yaml";
16
+ import chalk from "chalk";
17
+ import ora from "ora";
18
+ var NODE_BUILTINS = new Set(
19
+ builtinModules.map((m) => m.startsWith("node:") ? m.slice("node:".length) : m)
20
+ );
21
+ function isBuiltinModule(specifier) {
22
+ const s = specifier.startsWith("node:") ? specifier.slice("node:".length) : specifier;
23
+ return NODE_BUILTINS.has(s);
24
+ }
25
+ function getPackageNameFromSpecifier(specifier) {
26
+ if (specifier.startsWith("@")) {
27
+ const [scope, name] = specifier.split("/");
28
+ return name ? `${scope}/${name}` : specifier;
29
+ }
30
+ return specifier.split("/")[0];
31
+ }
32
+ function tryResolveLocalImport(fromFile, specifier) {
33
+ const baseDir = path.dirname(fromFile);
34
+ const raw = path.resolve(baseDir, specifier);
35
+ const candidates = [
36
+ raw,
37
+ `${raw}.ts`,
38
+ `${raw}.tsx`,
39
+ `${raw}.js`,
40
+ `${raw}.mjs`,
41
+ `${raw}.cjs`
42
+ ];
43
+ for (const c of candidates) {
44
+ if (fs.existsSync(c) && fs.statSync(c).isFile()) return c;
45
+ }
46
+ if (fs.existsSync(raw) && fs.statSync(raw).isDirectory()) {
47
+ const idxCandidates = [
48
+ path.join(raw, "index.ts"),
49
+ path.join(raw, "index.tsx"),
50
+ path.join(raw, "index.js"),
51
+ path.join(raw, "index.mjs"),
52
+ path.join(raw, "index.cjs")
53
+ ];
54
+ for (const c of idxCandidates) {
55
+ if (fs.existsSync(c) && fs.statSync(c).isFile()) return c;
56
+ }
57
+ }
58
+ return null;
59
+ }
60
+ function extractImportSpecifiers(source) {
61
+ const specs = [];
62
+ const re1 = /(?:^|\n)\s*(?!import\s+type)(?:import|export)\s+[\s\S]*?\sfrom\s*['"]([^'"]+)['"]/g;
63
+ for (const match of source.matchAll(re1)) {
64
+ if (match[1]) specs.push(match[1]);
65
+ }
66
+ const re2 = /import\s*\(\s*['"]([^'"]+)['"]\s*\)/g;
67
+ for (const match of source.matchAll(re2)) {
68
+ if (match[1]) specs.push(match[1]);
69
+ }
70
+ const re3 = /require\s*\(\s*['"]([^'"]+)['"]\s*\)/g;
71
+ for (const match of source.matchAll(re3)) {
72
+ if (match[1]) specs.push(match[1]);
73
+ }
74
+ return specs;
75
+ }
76
+ function extractEnvVarUsageFromSource(source) {
77
+ const runtimeKeys = /* @__PURE__ */ new Set();
78
+ const buildtimeKeys = /* @__PURE__ */ new Set();
79
+ const reProcessDot = /\bprocess\.env\??\.([A-Za-z_][A-Za-z0-9_]*)\b/g;
80
+ for (const match of source.matchAll(reProcessDot)) {
81
+ const key = match[1];
82
+ if (key) runtimeKeys.add(key);
83
+ }
84
+ const reProcessBracket = /\bprocess\.env\[\s*['"]([^'"]+)['"]\s*\]/g;
85
+ for (const match of source.matchAll(reProcessBracket)) {
86
+ const key = match[1];
87
+ if (key) runtimeKeys.add(key);
88
+ }
89
+ const reImportMetaDot = /\bimport\.meta\.env\.([A-Za-z_][A-Za-z0-9_]*)\b/g;
90
+ for (const match of source.matchAll(reImportMetaDot)) {
91
+ const key = match[1];
92
+ if (key) buildtimeKeys.add(key);
93
+ }
94
+ const reImportMetaBracket = /\bimport\.meta\.env\[\s*['"]([^'"]+)['"]\s*\]/g;
95
+ for (const match of source.matchAll(reImportMetaBracket)) {
96
+ const key = match[1];
97
+ if (key) buildtimeKeys.add(key);
98
+ }
99
+ return { runtimeKeys, buildtimeKeys };
100
+ }
101
+ async function collectEnvUsageForWorkers(workerEntryFiles, projectRoot) {
102
+ void projectRoot;
103
+ const runtimeKeys = /* @__PURE__ */ new Set();
104
+ const buildtimeKeys = /* @__PURE__ */ new Set();
105
+ const visited = /* @__PURE__ */ new Set();
106
+ const queue = [...workerEntryFiles];
107
+ while (queue.length > 0) {
108
+ const file = queue.pop();
109
+ const normalized = path.resolve(file);
110
+ if (visited.has(normalized)) continue;
111
+ visited.add(normalized);
112
+ if (!fs.existsSync(normalized) || !fs.statSync(normalized).isFile()) continue;
113
+ const src = fs.readFileSync(normalized, "utf-8");
114
+ const usage = extractEnvVarUsageFromSource(src);
115
+ usage.runtimeKeys.forEach((k) => runtimeKeys.add(k));
116
+ usage.buildtimeKeys.forEach((k) => buildtimeKeys.add(k));
117
+ const specifiers = extractImportSpecifiers(src);
118
+ for (const spec of specifiers) {
119
+ if (!spec) continue;
120
+ if (spec.startsWith(".")) {
121
+ const resolved = tryResolveLocalImport(normalized, spec);
122
+ if (resolved) queue.push(resolved);
123
+ continue;
124
+ }
125
+ if (spec.startsWith("/")) continue;
126
+ if (isBuiltinModule(spec)) continue;
127
+ }
128
+ }
129
+ runtimeKeys.delete("");
130
+ buildtimeKeys.delete("");
131
+ runtimeKeys.delete("node");
132
+ buildtimeKeys.delete("node");
133
+ return { runtimeKeys, buildtimeKeys };
134
+ }
135
+ function readJsonFile(filePath) {
136
+ try {
137
+ return JSON.parse(fs.readFileSync(filePath, "utf-8"));
138
+ } catch {
139
+ return null;
140
+ }
141
+ }
142
+ function findMonorepoRoot(startDir) {
143
+ let dir = path.resolve(startDir);
144
+ while (true) {
145
+ const pkgPath = path.join(dir, "package.json");
146
+ if (fs.existsSync(pkgPath)) {
147
+ const pkg = readJsonFile(pkgPath);
148
+ if (pkg?.workspaces) return dir;
149
+ }
150
+ const parent = path.dirname(dir);
151
+ if (parent === dir) return startDir;
152
+ dir = parent;
153
+ }
154
+ }
155
+ async function collectRuntimeDependenciesForWorkers(workerEntryFiles, projectRoot) {
156
+ const deps = /* @__PURE__ */ new Set(["@microfox/ai-worker", "@aws-sdk/client-sqs"]);
157
+ const visited = /* @__PURE__ */ new Set();
158
+ const queue = [...workerEntryFiles];
159
+ while (queue.length > 0) {
160
+ const file = queue.pop();
161
+ const normalized = path.resolve(file);
162
+ if (visited.has(normalized)) continue;
163
+ visited.add(normalized);
164
+ if (!fs.existsSync(normalized) || !fs.statSync(normalized).isFile()) continue;
165
+ const src = fs.readFileSync(normalized, "utf-8");
166
+ const specifiers = extractImportSpecifiers(src);
167
+ for (const spec of specifiers) {
168
+ if (!spec) continue;
169
+ if (spec.startsWith(".")) {
170
+ const resolved = tryResolveLocalImport(normalized, spec);
171
+ if (resolved) queue.push(resolved);
172
+ continue;
173
+ }
174
+ if (spec.startsWith("/")) continue;
175
+ if (isBuiltinModule(spec)) continue;
176
+ deps.add(getPackageNameFromSpecifier(spec));
177
+ }
178
+ }
179
+ deps.delete("");
180
+ deps.delete("node");
181
+ deps.delete("serverless");
182
+ deps.delete("serverless-offline");
183
+ deps.delete("@aws-sdk/client-sqs");
184
+ deps.delete("@microfox/ai-worker");
185
+ return deps;
186
+ }
187
+ function buildDependenciesMap(projectRoot, deps) {
188
+ const projectPkg = readJsonFile(path.join(projectRoot, "package.json")) || {};
189
+ const projectDeps = projectPkg.dependencies || {};
190
+ const projectDevDeps = projectPkg.devDependencies || {};
191
+ const repoRoot = findMonorepoRoot(projectRoot);
192
+ const workerPkg = readJsonFile(path.join(repoRoot, "packages", "ai-worker", "package.json")) || {};
193
+ const workerCliPkg = readJsonFile(
194
+ path.join(repoRoot, "packages", "ai-worker-cli", "package.json")
195
+ ) || {};
196
+ const workspaceDeps = {
197
+ ...workerPkg.dependencies || {},
198
+ ...workerPkg.devDependencies || {},
199
+ ...workerCliPkg.dependencies || {},
200
+ ...workerCliPkg.devDependencies || {}
201
+ };
202
+ const out = {};
203
+ for (const dep of Array.from(deps).sort()) {
204
+ const range = projectDeps[dep] || projectDevDeps[dep] || workspaceDeps[dep];
205
+ if (range) {
206
+ out[dep] = String(range);
207
+ }
208
+ }
209
+ return out;
210
+ }
211
+ function getServiceNameFromProjectId(projectId) {
212
+ const cleanedProjectId = projectId.replace(/-/g, "").slice(0, 15);
213
+ return `p-${cleanedProjectId}`;
214
+ }
215
+ function validateEnvironment() {
216
+ try {
217
+ execSync("npm --version", { stdio: "ignore" });
218
+ } catch (error) {
219
+ console.error(chalk.red("\u274C npm is not installed or not in PATH."));
220
+ process.exit(1);
221
+ }
222
+ }
223
+ async function scanWorkers(aiPath = "app/ai") {
224
+ const pattern = path.join(aiPath, "**/*.worker.ts").replace(/\\/g, "/");
225
+ const files = await glob(pattern);
226
+ const workers = [];
227
+ for (const filePath of files) {
228
+ try {
229
+ let workerConfig;
230
+ let workerId;
231
+ if (!workerId) {
232
+ const content = fs.readFileSync(filePath, "utf-8");
233
+ const idMatch = content.match(/createWorker\s*(?:<[^>]+>)?\s*\(\s*\{[\s\S]*?id:\s*['"]([^'"]+)['"]/);
234
+ if (!idMatch) {
235
+ console.warn(chalk.yellow(`\u26A0\uFE0F Skipping ${filePath}: No worker ID found`));
236
+ continue;
237
+ }
238
+ workerId = idMatch[1];
239
+ }
240
+ const relativePath = path.relative(aiPath, filePath);
241
+ const handlerDir = path.dirname(relativePath);
242
+ const handlerName = path.basename(relativePath, ".worker.ts");
243
+ const handlerPath = path.join("handlers", handlerDir, `${handlerName}`).replace(/\\/g, "/");
244
+ workers.push({
245
+ id: workerId,
246
+ filePath,
247
+ handlerPath,
248
+ workerConfig
249
+ });
250
+ } catch (error) {
251
+ console.error(chalk.red(`\u274C Error processing ${filePath}:`), error);
252
+ }
253
+ }
254
+ return workers;
255
+ }
256
+ async function generateHandlers(workers, outputDir) {
257
+ const handlersDir = path.join(outputDir, "handlers");
258
+ if (fs.existsSync(handlersDir)) {
259
+ fs.rmSync(handlersDir, { recursive: true, force: true });
260
+ }
261
+ fs.mkdirSync(handlersDir, { recursive: true });
262
+ for (const worker of workers) {
263
+ const handlerFile = path.join(handlersDir, worker.handlerPath.replace("handlers/", "") + ".js");
264
+ const handlerDir = path.dirname(handlerFile);
265
+ if (!fs.existsSync(handlerDir)) {
266
+ fs.mkdirSync(handlerDir, { recursive: true });
267
+ }
268
+ const handlerAbsPath = path.resolve(handlerFile);
269
+ const workerAbsPath = path.resolve(worker.filePath);
270
+ let relativeImportPath = path.relative(path.dirname(handlerAbsPath), workerAbsPath);
271
+ if (!relativeImportPath.startsWith(".")) {
272
+ relativeImportPath = "./" + relativeImportPath;
273
+ }
274
+ relativeImportPath = relativeImportPath.replace(/\.ts$/, "");
275
+ relativeImportPath = relativeImportPath.split(path.sep).join("/");
276
+ const fileContent = fs.readFileSync(worker.filePath, "utf-8");
277
+ const defaultExport = /export\s+default\s+createWorker/.test(fileContent);
278
+ const exportMatch = fileContent.match(/export\s+(const|let)\s+(\w+)\s*=\s*createWorker/);
279
+ const exportName = exportMatch ? exportMatch[2] : "worker";
280
+ const tempEntryFile = handlerFile.replace(".js", ".temp.ts");
281
+ const workerRef = defaultExport ? "workerModule.default" : `workerModule.${exportName}`;
282
+ const tempEntryContent = `
283
+ import { createLambdaHandler } from '@microfox/ai-worker/handler';
284
+ import * as workerModule from '${relativeImportPath}';
285
+
286
+ const workerAgent = ${workerRef};
287
+ if (!workerAgent || typeof workerAgent.handler !== 'function') {
288
+ throw new Error('Worker module must export a createWorker result (default or named) with .handler');
289
+ }
290
+
291
+ export const handler = createLambdaHandler(workerAgent.handler, workerAgent.outputSchema);
292
+ export const exportedWorkerConfig = workerModule.workerConfig || workerAgent?.workerConfig;
293
+ `;
294
+ fs.writeFileSync(tempEntryFile, tempEntryContent);
295
+ try {
296
+ const fixLazyCachePlugin = {
297
+ name: "fix-lazy-cache",
298
+ setup(build3) {
299
+ build3.onEnd(async (result) => {
300
+ if (result.errors.length > 0) return;
301
+ let bundledCode = fs.readFileSync(handlerFile, "utf-8");
302
+ let modified = false;
303
+ const pattern = /(require\("kind-of",\s*"typeOf"\);\s*)require_for_own\(\);/g;
304
+ if (pattern.test(bundledCode)) {
305
+ bundledCode = bundledCode.replace(
306
+ pattern,
307
+ '$1require("for-own", "forOwn");'
308
+ );
309
+ modified = true;
310
+ }
311
+ if (bundledCode.includes("import_meta.url")) {
312
+ bundledCode = bundledCode.replace(
313
+ /import_meta\.url/g,
314
+ 'require("url").pathToFileURL(__filename).href'
315
+ );
316
+ modified = true;
317
+ }
318
+ const beforeCreateRequire = bundledCode;
319
+ bundledCode = bundledCode.replace(
320
+ /\bcreateRequire\s*\(\s*(?:undefined|void\s*0)\s*\)/g,
321
+ 'createRequire(require("url").pathToFileURL(__filename).href)'
322
+ );
323
+ if (bundledCode !== beforeCreateRequire) modified = true;
324
+ if (modified) {
325
+ fs.writeFileSync(handlerFile, bundledCode, "utf-8");
326
+ }
327
+ });
328
+ }
329
+ };
330
+ await esbuild.build({
331
+ entryPoints: [tempEntryFile],
332
+ bundle: true,
333
+ platform: "node",
334
+ target: "node20",
335
+ format: "cjs",
336
+ outfile: handlerFile,
337
+ // We exclude aws-sdk as it's included in Lambda runtime
338
+ // We exclude canvas because it's a binary dependency often problematic in bundling
339
+ external: [
340
+ "aws-sdk",
341
+ "canvas",
342
+ "@microfox/puppeteer-sls",
343
+ "@sparticuz/chromium"
344
+ ],
345
+ // Force lazy-cache to eagerly load modules during bundling
346
+ // This prevents runtime dynamic require() calls that fail in bundled code
347
+ define: {
348
+ "process.env.UNLAZY": '"true"'
349
+ },
350
+ // Force bundling of all packages to avoid runtime module resolution issues
351
+ // This ensures clone-deep, lazy-cache, and all transitive deps are bundled
352
+ packages: "bundle",
353
+ plugins: [fixLazyCachePlugin],
354
+ logLevel: "error"
355
+ });
356
+ fs.unlinkSync(tempEntryFile);
357
+ } catch (error) {
358
+ console.error(chalk.red(`Error bundling handler for ${worker.id}:`), error);
359
+ }
360
+ }
361
+ console.log(chalk.green(`\u2713 Generated ${workers.length} bundled handlers`));
362
+ }
363
+ function generateDocsHandler(outputDir, serviceName, stage, region) {
364
+ const handlerFile = path.join(outputDir, "handlers", "docs.js");
365
+ const tempEntryFile = handlerFile.replace(".js", ".temp.ts");
366
+ const handlerDir = path.dirname(handlerFile);
367
+ if (!fs.existsSync(handlerDir)) {
368
+ fs.mkdirSync(handlerDir, { recursive: true });
369
+ }
370
+ const handlerContent = `/**
371
+ * Auto-generated docs handler for Microfox compatibility
372
+ * DO NOT EDIT - This file is generated by @microfox/ai-worker-cli
373
+ */
374
+
375
+ import { APIGatewayProxyEvent, APIGatewayProxyResult } from 'aws-lambda';
376
+
377
+ export const handler = async (
378
+ event: APIGatewayProxyEvent
379
+ ): Promise<APIGatewayProxyResult> => {
380
+ // Return OpenAPI JSON for Microfox
381
+ const openapi = {
382
+ openapi: '3.0.3',
383
+ info: {
384
+ title: 'AI Worker Service',
385
+ version: '1.0.0',
386
+ description: 'Auto-generated OpenAPI for background workers service',
387
+ },
388
+ servers: [
389
+ {
390
+ url: 'https://{apiId}.execute-api.{region}.amazonaws.com/{stage}',
391
+ variables: {
392
+ apiId: { default: 'REPLACE_ME' },
393
+ region: { default: '${region}' },
394
+ stage: { default: '${stage}' },
395
+ },
396
+ },
397
+ ],
398
+ paths: {
399
+ '/docs.json': {
400
+ get: {
401
+ operationId: 'getDocs',
402
+ summary: 'Get OpenAPI schema',
403
+ responses: {
404
+ '200': {
405
+ description: 'OpenAPI JSON',
406
+ content: {
407
+ 'application/json': {
408
+ schema: { type: 'object' },
409
+ },
410
+ },
411
+ },
412
+ },
413
+ },
414
+ },
415
+ '/workers/config': {
416
+ get: {
417
+ operationId: 'getWorkersConfig',
418
+ summary: 'Get workers config (queue urls map)',
419
+ parameters: [
420
+ {
421
+ name: 'x-workers-config-key',
422
+ in: 'header',
423
+ required: false,
424
+ schema: { type: 'string' },
425
+ description: 'Optional API key header (if configured)',
426
+ },
427
+ ],
428
+ responses: {
429
+ '200': {
430
+ description: 'Workers config map',
431
+ content: {
432
+ 'application/json': {
433
+ schema: {
434
+ type: 'object',
435
+ properties: {
436
+ version: { type: 'string' },
437
+ stage: { type: 'string' },
438
+ region: { type: 'string' },
439
+ workers: { type: 'object' },
440
+ },
441
+ },
442
+ },
443
+ },
444
+ },
445
+ '401': {
446
+ description: 'Unauthorized',
447
+ content: {
448
+ 'application/json': {
449
+ schema: {
450
+ type: 'object',
451
+ properties: { error: { type: 'string' } },
452
+ },
453
+ },
454
+ },
455
+ },
456
+ },
457
+ },
458
+ },
459
+ '/workers/trigger': {
460
+ post: {
461
+ operationId: 'triggerWorker',
462
+ summary: 'Trigger a worker by sending a raw SQS message body',
463
+ parameters: [
464
+ {
465
+ name: 'workerId',
466
+ in: 'query',
467
+ required: false,
468
+ schema: { type: 'string' },
469
+ description: 'Worker ID (can also be provided in JSON body as workerId)',
470
+ },
471
+ {
472
+ name: 'x-workers-trigger-key',
473
+ in: 'header',
474
+ required: false,
475
+ schema: { type: 'string' },
476
+ description: 'Optional API key header (if configured)',
477
+ },
478
+ ],
479
+ requestBody: {
480
+ required: true,
481
+ content: {
482
+ 'application/json': {
483
+ schema: {
484
+ type: 'object',
485
+ properties: {
486
+ workerId: { type: 'string' },
487
+ // Prefer sending the exact SQS message body your worker expects
488
+ body: { type: 'object' },
489
+ messageBody: { type: 'string' },
490
+ },
491
+ additionalProperties: true,
492
+ },
493
+ },
494
+ },
495
+ },
496
+ responses: {
497
+ '200': {
498
+ description: 'Enqueued',
499
+ content: {
500
+ 'application/json': {
501
+ schema: {
502
+ type: 'object',
503
+ properties: {
504
+ ok: { type: 'boolean' },
505
+ workerId: { type: 'string' },
506
+ stage: { type: 'string' },
507
+ queueName: { type: 'string' },
508
+ queueUrl: { type: 'string' },
509
+ messageId: { type: 'string' },
510
+ },
511
+ },
512
+ },
513
+ },
514
+ },
515
+ '400': {
516
+ description: 'Bad request',
517
+ content: {
518
+ 'application/json': {
519
+ schema: {
520
+ type: 'object',
521
+ properties: { error: { type: 'string' } },
522
+ },
523
+ },
524
+ },
525
+ },
526
+ '401': {
527
+ description: 'Unauthorized',
528
+ content: {
529
+ 'application/json': {
530
+ schema: {
531
+ type: 'object',
532
+ properties: { error: { type: 'string' } },
533
+ },
534
+ },
535
+ },
536
+ },
537
+ },
538
+ },
539
+ },
540
+ },
541
+ 'x-service': {
542
+ serviceName: '${serviceName}',
543
+ stage: '${stage}',
544
+ region: '${region}',
545
+ },
546
+ };
547
+
548
+ return {
549
+ statusCode: 200,
550
+ headers: {
551
+ 'Content-Type': 'application/json',
552
+ 'Access-Control-Allow-Origin': '*',
553
+ },
554
+ body: JSON.stringify(openapi, null, 2),
555
+ };
556
+ };
557
+ `;
558
+ fs.writeFileSync(tempEntryFile, handlerContent);
559
+ esbuild.buildSync({
560
+ entryPoints: [tempEntryFile],
561
+ bundle: true,
562
+ platform: "node",
563
+ target: "node20",
564
+ outfile: handlerFile,
565
+ external: [
566
+ "aws-sdk",
567
+ "canvas",
568
+ "@microfox/puppeteer-sls",
569
+ "@sparticuz/chromium"
570
+ ],
571
+ define: {
572
+ "process.env.UNLAZY": '"true"'
573
+ },
574
+ packages: "bundle"
575
+ });
576
+ fs.unlinkSync(tempEntryFile);
577
+ console.log(chalk.green(`\u2713 Generated docs.json handler`));
578
+ }
579
+ function generateTriggerHandler(outputDir, serviceName) {
580
+ const handlerFile = path.join(outputDir, "handlers", "workers-trigger.js");
581
+ const tempEntryFile = handlerFile.replace(".js", ".temp.ts");
582
+ const handlerDir = path.dirname(handlerFile);
583
+ if (!fs.existsSync(handlerDir)) {
584
+ fs.mkdirSync(handlerDir, { recursive: true });
585
+ }
586
+ const handlerContent = `/**
587
+ * Auto-generated worker trigger handler
588
+ * DO NOT EDIT - This file is generated by @microfox/ai-worker-cli
589
+ */
590
+
591
+ import { APIGatewayProxyEvent, APIGatewayProxyResult } from 'aws-lambda';
592
+ import { SQSClient, GetQueueUrlCommand, SendMessageCommand } from '@aws-sdk/client-sqs';
593
+
594
+ const SERVICE_NAME = ${JSON.stringify(serviceName)};
595
+
596
+ function jsonResponse(statusCode: number, body: any): APIGatewayProxyResult {
597
+ return {
598
+ statusCode,
599
+ headers: {
600
+ 'Content-Type': 'application/json',
601
+ 'Access-Control-Allow-Origin': '*',
602
+ },
603
+ body: JSON.stringify(body),
604
+ };
605
+ }
606
+
607
+ export const handler = async (event: APIGatewayProxyEvent): Promise<APIGatewayProxyResult> => {
608
+ // Optional API key
609
+ const apiKey = process.env.WORKERS_TRIGGER_API_KEY;
610
+ if (apiKey) {
611
+ const providedKey = event.headers['x-workers-trigger-key'] || event.headers['X-Workers-Trigger-Key'];
612
+ if (providedKey !== apiKey) {
613
+ return jsonResponse(401, { error: 'Unauthorized' });
614
+ }
615
+ }
616
+
617
+ const stage =
618
+ (event as any)?.requestContext?.stage ||
619
+ process.env.ENVIRONMENT ||
620
+ process.env.STAGE ||
621
+ 'prod';
622
+ const region = process.env.AWS_REGION || 'us-east-1';
623
+
624
+ const qsWorkerId = event.queryStringParameters?.workerId;
625
+
626
+ let parsedBody: any = undefined;
627
+ if (event.body) {
628
+ try {
629
+ parsedBody = JSON.parse(event.body);
630
+ } catch {
631
+ parsedBody = undefined;
632
+ }
633
+ }
634
+
635
+ const workerId = (parsedBody && parsedBody.workerId) || qsWorkerId;
636
+ if (!workerId || typeof workerId !== 'string') {
637
+ return jsonResponse(400, { error: 'workerId is required (query param workerId or JSON body workerId)' });
638
+ }
639
+
640
+ // Prefer JSON body fields, otherwise send raw event.body
641
+ let messageBody: string | undefined;
642
+ if (parsedBody && typeof parsedBody.messageBody === 'string') {
643
+ messageBody = parsedBody.messageBody;
644
+ } else if (parsedBody && parsedBody.body !== undefined) {
645
+ messageBody = typeof parsedBody.body === 'string' ? parsedBody.body : JSON.stringify(parsedBody.body);
646
+ } else if (event.body) {
647
+ messageBody = event.body;
648
+ }
649
+
650
+ if (!messageBody) {
651
+ return jsonResponse(400, { error: 'body/messageBody is required' });
652
+ }
653
+
654
+ const queueName = \`\${SERVICE_NAME}-\${workerId}-\${stage}\`;
655
+ const sqs = new SQSClient({ region });
656
+
657
+ let queueUrl: string;
658
+ try {
659
+ const urlRes = await sqs.send(new GetQueueUrlCommand({ QueueName: queueName }));
660
+ if (!urlRes.QueueUrl) {
661
+ return jsonResponse(404, { error: 'Queue URL not found', queueName });
662
+ }
663
+ queueUrl = String(urlRes.QueueUrl);
664
+ } catch (e: any) {
665
+ return jsonResponse(404, { error: 'Queue does not exist or not accessible', queueName, message: String(e?.message || e) });
666
+ }
667
+
668
+ try {
669
+ const sendRes = await sqs.send(new SendMessageCommand({ QueueUrl: queueUrl, MessageBody: messageBody }));
670
+ return jsonResponse(200, {
671
+ ok: true,
672
+ workerId,
673
+ stage,
674
+ queueName,
675
+ queueUrl,
676
+ messageId: sendRes.MessageId || null,
677
+ });
678
+ } catch (e: any) {
679
+ return jsonResponse(500, { error: 'Failed to send message', message: String(e?.message || e) });
680
+ }
681
+ };
682
+ `;
683
+ fs.writeFileSync(tempEntryFile, handlerContent);
684
+ esbuild.buildSync({
685
+ entryPoints: [tempEntryFile],
686
+ bundle: true,
687
+ platform: "node",
688
+ target: "node20",
689
+ outfile: handlerFile,
690
+ external: [
691
+ "aws-sdk",
692
+ "canvas",
693
+ "@microfox/puppeteer-sls",
694
+ "@sparticuz/chromium"
695
+ ],
696
+ define: {
697
+ "process.env.UNLAZY": '"true"'
698
+ },
699
+ packages: "bundle",
700
+ logLevel: "error"
701
+ });
702
+ fs.unlinkSync(tempEntryFile);
703
+ console.log(chalk.green(`\u2713 Generated /workers/trigger handler`));
704
+ }
705
+ function generateWorkersConfigHandler(outputDir, workers, serviceName) {
706
+ const handlerFile = path.join(outputDir, "handlers", "workers-config.js");
707
+ const tempEntryFile = handlerFile.replace(".js", ".temp.ts");
708
+ const handlerDir = path.dirname(handlerFile);
709
+ if (fs.existsSync(handlerDir) && !fs.existsSync(handlerFile)) {
710
+ } else if (!fs.existsSync(handlerDir)) {
711
+ fs.mkdirSync(handlerDir, { recursive: true });
712
+ }
713
+ const handlerContent = `/**
714
+ * Auto-generated workers-config Lambda handler
715
+ * DO NOT EDIT - This file is generated by @microfox/ai-worker-cli
716
+ */
717
+
718
+ import { APIGatewayProxyEvent, APIGatewayProxyResult } from 'aws-lambda';
719
+ import { SQSClient, GetQueueUrlCommand } from '@aws-sdk/client-sqs';
720
+
721
+ // Worker IDs embedded at build time so this endpoint doesn't depend on any generated files.
722
+ const WORKER_IDS: string[] = ${JSON.stringify(workers.map((w) => w.id), null, 2)};
723
+ const SERVICE_NAME = ${JSON.stringify(serviceName)};
724
+
725
+ export const handler = async (
726
+ event: APIGatewayProxyEvent
727
+ ): Promise<APIGatewayProxyResult> => {
728
+ // ... same logic ...
729
+ // Check API key if configured
730
+ const apiKey = process.env.WORKERS_CONFIG_API_KEY;
731
+ if (apiKey) {
732
+ const providedKey = event.headers['x-workers-config-key'] || event.headers['X-Workers-Config-Key'];
733
+ if (providedKey !== apiKey) {
734
+ return {
735
+ statusCode: 401,
736
+ headers: { 'Content-Type': 'application/json' },
737
+ body: JSON.stringify({ error: 'Unauthorized' }),
738
+ };
739
+ }
740
+ }
741
+
742
+ // Stage resolution:
743
+ // - Prefer API Gateway stage (microfox tends to deploy APIs on "prod")
744
+ // - Fallback to ENVIRONMENT/STAGE env vars
745
+ // - Default to "prod" (safer for microfox) if nothing else is set
746
+ const stage =
747
+ (event as any)?.requestContext?.stage ||
748
+ process.env.ENVIRONMENT ||
749
+ process.env.STAGE ||
750
+ 'prod';
751
+ const region = process.env.AWS_REGION || 'us-east-1';
752
+
753
+ // Resolve queue URLs dynamically via SQS so we return actual URLs.
754
+ // NOTE: Node 20 Lambda runtime does NOT guarantee 'aws-sdk' v2 is available.
755
+ // We use AWS SDK v3 and bundle it into this handler.
756
+ const sqs = new SQSClient({ region });
757
+ const workers: Record<string, { queueUrl: string; region: string }> = {};
758
+ const attemptedQueueNames: string[] = [];
759
+ const errors: Array<{ workerId: string; queueName: string; message: string; name?: string }> = [];
760
+ const debug = event.queryStringParameters?.debug === '1' || event.queryStringParameters?.debug === 'true';
761
+
762
+ await Promise.all(
763
+ WORKER_IDS.map(async (workerId) => {
764
+ const queueName = \`\${SERVICE_NAME}-\${workerId}-\${stage}\`;
765
+ attemptedQueueNames.push(queueName);
766
+ try {
767
+ const result = await sqs.send(new GetQueueUrlCommand({ QueueName: queueName }));
768
+ if (result?.QueueUrl) {
769
+ workers[workerId] = { queueUrl: String(result.QueueUrl), region };
770
+ }
771
+ } catch (e) {
772
+ const err = e as any;
773
+ const message = String(err?.message || err || 'Unknown error');
774
+ const name = err?.name ? String(err.name) : undefined;
775
+ // Log so CloudWatch shows what's going on (nonexistent queue vs permission vs region).
776
+ console.error('[workers-config] getQueueUrl failed', { workerId, queueName, name, message });
777
+ errors.push({ workerId, queueName, name, message });
778
+ }
779
+ })
780
+ );
781
+
782
+ return {
783
+ statusCode: 200,
784
+ headers: {
785
+ 'Content-Type': 'application/json',
786
+ 'Access-Control-Allow-Origin': '*',
787
+ },
788
+ body: JSON.stringify({
789
+ version: '1.0.0',
790
+ stage,
791
+ region,
792
+ workers,
793
+ ...(debug ? { attemptedQueueNames, errors } : {}),
794
+ }),
795
+ };
796
+ };
797
+ `;
798
+ fs.writeFileSync(tempEntryFile, handlerContent);
799
+ esbuild.buildSync({
800
+ entryPoints: [tempEntryFile],
801
+ bundle: true,
802
+ platform: "node",
803
+ target: "node20",
804
+ outfile: handlerFile,
805
+ external: [
806
+ "aws-sdk",
807
+ "canvas",
808
+ "@microfox/puppeteer-sls",
809
+ "@sparticuz/chromium"
810
+ ],
811
+ define: {
812
+ "process.env.UNLAZY": '"true"'
813
+ },
814
+ packages: "bundle"
815
+ });
816
+ fs.unlinkSync(tempEntryFile);
817
+ console.log(chalk.green(`\u2713 Generated workers-config handler`));
818
+ }
819
+ function loadEnvVars(envPath = ".env") {
820
+ const env = {};
821
+ if (!fs.existsSync(envPath)) {
822
+ console.warn(chalk.yellow(`\u26A0\uFE0F .env file not found at ${envPath}`));
823
+ return env;
824
+ }
825
+ const content = fs.readFileSync(envPath, "utf-8");
826
+ const lines = content.split("\n");
827
+ for (const line of lines) {
828
+ const trimmed = line.trim();
829
+ if (!trimmed || trimmed.startsWith("#")) continue;
830
+ const match = trimmed.match(/^([^=]+)=(.*)$/);
831
+ if (match) {
832
+ const key = match[1].trim();
833
+ const value = match[2].trim().replace(/^["']|["']$/g, "");
834
+ env[key] = value;
835
+ }
836
+ }
837
+ return env;
838
+ }
839
+ function processScheduleEvents(scheduleConfig) {
840
+ if (!scheduleConfig) {
841
+ return [];
842
+ }
843
+ const events = [];
844
+ const schedules = Array.isArray(scheduleConfig) ? scheduleConfig : [scheduleConfig];
845
+ for (const schedule of schedules) {
846
+ if (typeof schedule === "string") {
847
+ events.push({
848
+ schedule
849
+ });
850
+ continue;
851
+ }
852
+ if (typeof schedule === "object" && schedule !== null) {
853
+ const scheduleEvent = { schedule: {} };
854
+ if (schedule.rate) {
855
+ if (Array.isArray(schedule.rate)) {
856
+ scheduleEvent.schedule.rate = schedule.rate;
857
+ } else {
858
+ scheduleEvent.schedule.rate = schedule.rate;
859
+ }
860
+ } else {
861
+ continue;
862
+ }
863
+ if (schedule.enabled !== void 0) {
864
+ scheduleEvent.schedule.enabled = schedule.enabled;
865
+ }
866
+ if (schedule.input !== void 0) {
867
+ scheduleEvent.schedule.input = schedule.input;
868
+ }
869
+ if (schedule.inputPath !== void 0) {
870
+ scheduleEvent.schedule.inputPath = schedule.inputPath;
871
+ }
872
+ if (schedule.inputTransformer !== void 0) {
873
+ scheduleEvent.schedule.inputTransformer = schedule.inputTransformer;
874
+ }
875
+ if (schedule.name !== void 0) {
876
+ scheduleEvent.schedule.name = schedule.name;
877
+ }
878
+ if (schedule.description !== void 0) {
879
+ scheduleEvent.schedule.description = schedule.description;
880
+ }
881
+ if (schedule.method !== void 0) {
882
+ scheduleEvent.schedule.method = schedule.method;
883
+ }
884
+ if (schedule.timezone !== void 0) {
885
+ scheduleEvent.schedule.timezone = schedule.timezone;
886
+ }
887
+ if (Object.keys(scheduleEvent.schedule).length === 1 && scheduleEvent.schedule.rate) {
888
+ if (typeof scheduleEvent.schedule.rate === "string") {
889
+ events.push({
890
+ schedule: scheduleEvent.schedule.rate
891
+ });
892
+ } else {
893
+ events.push(scheduleEvent);
894
+ }
895
+ } else {
896
+ events.push(scheduleEvent);
897
+ }
898
+ }
899
+ }
900
+ return events;
901
+ }
902
+ function generateServerlessConfig(workers, stage, region, envVars, serviceName) {
903
+ const resources = {
904
+ Resources: {},
905
+ Outputs: {}
906
+ };
907
+ const queueArns = [];
908
+ const providerEnvironment = {
909
+ STAGE: stage,
910
+ NODE_ENV: stage
911
+ };
912
+ const customConfig = {
913
+ stage: `\${env:ENVIRONMENT, '${stage}'}`,
914
+ "serverless-offline": {
915
+ httpPort: 4e3,
916
+ lambdaPort: 4002,
917
+ useChildProcesses: true,
918
+ useWorkerThreads: true,
919
+ noCookieValidation: true,
920
+ allowCache: true,
921
+ hideStackTraces: false,
922
+ disableCookieValidation: true,
923
+ noTimeout: true,
924
+ environment: "${file(env.json)}"
925
+ }
926
+ };
927
+ for (const worker of workers) {
928
+ const queueName = `WorkerQueue${worker.id.replace(/[^a-zA-Z0-9]/g, "")}`;
929
+ const queueLogicalId = `${queueName}${stage}`;
930
+ const dlqLogicalId = `${queueName}DLQ${stage}`;
931
+ const sqsCfg = worker.workerConfig?.sqs;
932
+ const retention = typeof sqsCfg?.messageRetentionPeriod === "number" ? sqsCfg.messageRetentionPeriod : 1209600;
933
+ const dlqRetention = typeof sqsCfg?.deadLetterMessageRetentionPeriod === "number" ? sqsCfg.deadLetterMessageRetentionPeriod : retention;
934
+ const visibilityTimeout = typeof sqsCfg?.visibilityTimeout === "number" ? sqsCfg.visibilityTimeout : (worker.workerConfig?.timeout || 300) + 60;
935
+ const maxReceiveCountRaw = typeof sqsCfg?.maxReceiveCount === "number" ? sqsCfg.maxReceiveCount : 1;
936
+ const maxReceiveCount = Math.max(1, Math.floor(maxReceiveCountRaw));
937
+ resources.Resources[dlqLogicalId] = {
938
+ Type: "AWS::SQS::Queue",
939
+ Properties: {
940
+ QueueName: `\${self:service}-${worker.id}-dlq-\${opt:stage, env:ENVIRONMENT, '${stage}'}`,
941
+ MessageRetentionPeriod: dlqRetention
942
+ }
943
+ };
944
+ resources.Resources[queueLogicalId] = {
945
+ Type: "AWS::SQS::Queue",
946
+ Properties: {
947
+ // Use ${self:service} to avoid hardcoding service name
948
+ QueueName: `\${self:service}-${worker.id}-\${opt:stage, env:ENVIRONMENT, '${stage}'}`,
949
+ VisibilityTimeout: visibilityTimeout,
950
+ MessageRetentionPeriod: retention,
951
+ RedrivePolicy: {
952
+ deadLetterTargetArn: { "Fn::GetAtt": [dlqLogicalId, "Arn"] },
953
+ maxReceiveCount
954
+ }
955
+ }
956
+ };
957
+ resources.Outputs[`${queueLogicalId}Url`] = {
958
+ Description: `Queue URL for worker ${worker.id}`,
959
+ Value: { Ref: queueLogicalId },
960
+ Export: {
961
+ Name: `\${self:service}-${worker.id}-queue-url`
962
+ }
963
+ };
964
+ queueArns.push({ "Fn::GetAtt": [queueLogicalId, "Arn"] });
965
+ }
966
+ const functions = {};
967
+ for (const worker of workers) {
968
+ const functionName = `worker${worker.id.replace(/[^a-zA-Z0-9]/g, "")}`;
969
+ const events = [
970
+ {
971
+ sqs: {
972
+ arn: { "Fn::GetAtt": [`WorkerQueue${worker.id.replace(/[^a-zA-Z0-9]/g, "")}${stage}`, "Arn"] },
973
+ batchSize: 1
974
+ }
975
+ }
976
+ ];
977
+ if (worker.workerConfig?.schedule) {
978
+ const scheduleEvents = processScheduleEvents(worker.workerConfig.schedule);
979
+ events.push(...scheduleEvents);
980
+ }
981
+ functions[functionName] = {
982
+ // IMPORTANT: Keep AWS handler string to exactly one dot: "<modulePath>.handler"
983
+ handler: `${worker.handlerPath}.handler`,
984
+ timeout: worker.workerConfig?.timeout || 300,
985
+ memorySize: worker.workerConfig?.memorySize || 512,
986
+ events
987
+ };
988
+ if (worker.workerConfig?.layers?.length) {
989
+ functions[functionName].layers = worker.workerConfig.layers;
990
+ }
991
+ }
992
+ functions["getDocs"] = {
993
+ handler: "handlers/docs.handler",
994
+ events: [
995
+ {
996
+ http: {
997
+ path: "/docs.json",
998
+ method: "GET",
999
+ cors: true
1000
+ }
1001
+ }
1002
+ ]
1003
+ };
1004
+ functions["triggerWorker"] = {
1005
+ handler: "handlers/workers-trigger.handler",
1006
+ events: [
1007
+ {
1008
+ http: {
1009
+ path: "/workers/trigger",
1010
+ method: "POST",
1011
+ cors: true
1012
+ }
1013
+ }
1014
+ ]
1015
+ };
1016
+ functions["workersConfig"] = {
1017
+ handler: "handlers/workers-config.handler",
1018
+ events: [
1019
+ {
1020
+ http: {
1021
+ path: "workers/config",
1022
+ method: "GET",
1023
+ cors: true
1024
+ }
1025
+ }
1026
+ ]
1027
+ };
1028
+ const safeEnvVars = {};
1029
+ const allowedPrefixes = ["OPENAI_", "ANTHROPIC_", "DATABASE_", "MONGODB_", "REDIS_", "WORKERS_", "REMOTION_"];
1030
+ for (const [key, value] of Object.entries(envVars)) {
1031
+ if (allowedPrefixes.some((prefix) => key.startsWith(prefix))) {
1032
+ safeEnvVars[key] = value;
1033
+ }
1034
+ }
1035
+ resources.Outputs["ApiEndpoints"] = {
1036
+ Description: "API Endpoints",
1037
+ Value: {
1038
+ "Fn::Join": [
1039
+ "",
1040
+ [
1041
+ "API: https://",
1042
+ { "Ref": "ApiGatewayRestApi" },
1043
+ ".execute-api.",
1044
+ { "Ref": "AWS::Region" },
1045
+ `.amazonaws.com/\${env:ENVIRONMENT, '${stage}'}`
1046
+ ]
1047
+ ]
1048
+ }
1049
+ };
1050
+ return {
1051
+ service: serviceName,
1052
+ package: {
1053
+ excludeDevDependencies: true,
1054
+ patterns: [
1055
+ "!venv/**",
1056
+ "!.idea/**",
1057
+ "!.vscode/**",
1058
+ "!src/**",
1059
+ "!node_modules/serverless-offline/**",
1060
+ "!node_modules/typescript/**",
1061
+ "!node_modules/@types/**",
1062
+ "!node_modules/aws-sdk/**",
1063
+ "!node_modules/@aws-sdk/**"
1064
+ ]
1065
+ },
1066
+ custom: customConfig,
1067
+ provider: {
1068
+ name: "aws",
1069
+ runtime: "nodejs20.x",
1070
+ region,
1071
+ versionFunctions: false,
1072
+ // Use ENVIRONMENT from env.json to drive the actual deployed stage (Microfox defaults to prod).
1073
+ stage: `\${env:ENVIRONMENT, '${stage}'}`,
1074
+ environment: "${file(env.json)}",
1075
+ iam: {
1076
+ role: {
1077
+ statements: [
1078
+ {
1079
+ Effect: "Allow",
1080
+ Action: [
1081
+ "sqs:SendMessage",
1082
+ "sqs:ReceiveMessage",
1083
+ "sqs:DeleteMessage",
1084
+ "sqs:GetQueueAttributes"
1085
+ ],
1086
+ Resource: queueArns
1087
+ },
1088
+ {
1089
+ Effect: "Allow",
1090
+ Action: ["sqs:GetQueueUrl"],
1091
+ // GetQueueUrl is not resource-scoped for unknown queue ARNs, must be '*'
1092
+ Resource: "*"
1093
+ }
1094
+ ]
1095
+ }
1096
+ }
1097
+ },
1098
+ plugins: ["serverless-offline"],
1099
+ functions,
1100
+ resources
1101
+ };
1102
+ }
1103
+ async function generateWorkersMap(stage, region, outputDir) {
1104
+ const serverlessDir = path.join(outputDir, ".serverless");
1105
+ if (!fs.existsSync(serverlessDir)) {
1106
+ fs.mkdirSync(serverlessDir, { recursive: true });
1107
+ }
1108
+ const workers = await scanWorkers();
1109
+ const stackName = `ai-router-workers-${stage}-${stage}`;
1110
+ let queueUrls = {};
1111
+ const spinner = ora("Fetching CloudFormation outputs...").start();
1112
+ try {
1113
+ const output = execSync(
1114
+ `aws cloudformation describe-stacks --stack-name ${stackName} --region ${region} --query "Stacks[0].Outputs" --output json`,
1115
+ { encoding: "utf-8", stdio: "pipe" }
1116
+ );
1117
+ const outputs = JSON.parse(output);
1118
+ const outputMap = {};
1119
+ for (const output2 of outputs) {
1120
+ const key = output2.OutputKey;
1121
+ if (key && key.endsWith("Url")) {
1122
+ const workerId = key.replace("WorkerQueue", "").replace("Url", "").toLowerCase();
1123
+ outputMap[key] = output2.OutputValue;
1124
+ }
1125
+ }
1126
+ for (const worker of workers) {
1127
+ const sanitizedId = worker.id.replace(/[^a-zA-Z0-9]/g, "");
1128
+ const queueKey = `WorkerQueue${sanitizedId}${stage}Url`;
1129
+ const matchingKey = Object.keys(outputMap).find((k) => k.toLowerCase() === queueKey.toLowerCase());
1130
+ if (matchingKey && outputMap[matchingKey]) {
1131
+ queueUrls[worker.id] = {
1132
+ queueUrl: outputMap[matchingKey],
1133
+ region
1134
+ };
1135
+ }
1136
+ }
1137
+ spinner.succeed("Fetched CloudFormation outputs");
1138
+ } catch (error) {
1139
+ spinner.warn("Could not fetch CloudFormation outputs. Using deterministic queue URLs.");
1140
+ for (const worker of workers) {
1141
+ queueUrls[worker.id] = {
1142
+ queueUrl: `https://sqs.${"${aws:region}"}.amazonaws.com/${"${aws:accountId}"}/${"${self:service}"}-${worker.id}-${stage}`,
1143
+ region
1144
+ };
1145
+ }
1146
+ }
1147
+ const mapContent = `/**
1148
+ * Auto-generated workers map
1149
+ * DO NOT EDIT - This file is generated by deploy-workers script
1150
+ */
1151
+
1152
+ export const workersMap = ${JSON.stringify(queueUrls, null, 2)} as const;
1153
+ `;
1154
+ const mapFile = path.join(serverlessDir, "workers-map.generated.ts");
1155
+ fs.writeFileSync(mapFile, mapContent);
1156
+ console.log(chalk.green(`\u2713 Generated workers map: ${mapFile}`));
1157
+ }
1158
+ async function build2(args) {
1159
+ const stage = args.stage || process.env.STAGE || "prod";
1160
+ const region = args.region || process.env.AWS_REGION || "us-east-1";
1161
+ const aiPath = args["ai-path"] || "app/ai";
1162
+ console.log(chalk.blue(`\u{1F4E6} Building workers (stage: ${stage}, region: ${region})...`));
1163
+ const spinner = ora("Scanning workers...").start();
1164
+ const workers = await scanWorkers(aiPath);
1165
+ if (workers.length === 0) {
1166
+ spinner.warn("No workers found.");
1167
+ return;
1168
+ }
1169
+ spinner.succeed(`Found ${workers.length} worker(s)`);
1170
+ workers.forEach((w) => console.log(chalk.gray(` - ${w.id} (${w.filePath})`)));
1171
+ const serverlessDir = path.join(process.cwd(), ".serverless-workers");
1172
+ if (!fs.existsSync(serverlessDir)) {
1173
+ fs.mkdirSync(serverlessDir, { recursive: true });
1174
+ }
1175
+ const runtimeDeps = await collectRuntimeDependenciesForWorkers(
1176
+ workers.map((w) => w.filePath),
1177
+ process.cwd()
1178
+ );
1179
+ const dependencies = buildDependenciesMap(process.cwd(), runtimeDeps);
1180
+ const packageJson = {
1181
+ name: "ai-router-workers",
1182
+ version: "1.0.0",
1183
+ description: "Auto-generated serverless workers",
1184
+ private: true,
1185
+ dependencies,
1186
+ scripts: {
1187
+ build: "echo 'Already compiled.'"
1188
+ },
1189
+ devDependencies: {
1190
+ serverless: "^3.38.0",
1191
+ "serverless-offline": "^13.3.3",
1192
+ "@aws-sdk/client-sqs": "^3.700.0"
1193
+ }
1194
+ };
1195
+ fs.writeFileSync(
1196
+ path.join(serverlessDir, "package.json"),
1197
+ JSON.stringify(packageJson, null, 2)
1198
+ );
1199
+ const envVars = loadEnvVars();
1200
+ const workerEntryFiles = workers.map((w) => w.filePath);
1201
+ const { runtimeKeys: runtimeEnvKeys, buildtimeKeys: buildtimeEnvKeys } = await collectEnvUsageForWorkers(workerEntryFiles, process.cwd());
1202
+ const referencedEnvKeys = /* @__PURE__ */ new Set([
1203
+ ...Array.from(runtimeEnvKeys),
1204
+ ...Array.from(buildtimeEnvKeys)
1205
+ ]);
1206
+ const runtimeList = Array.from(runtimeEnvKeys).sort();
1207
+ const buildtimeList = Array.from(buildtimeEnvKeys).sort();
1208
+ const missingFromDotEnv = Array.from(referencedEnvKeys).filter((k) => !(k in envVars)).sort();
1209
+ if (runtimeList.length || buildtimeList.length) {
1210
+ console.log(
1211
+ chalk.blue(
1212
+ `\u2139\uFE0F Detected env usage from worker code: runtime=${runtimeList.length}, buildtime=${buildtimeList.length}`
1213
+ )
1214
+ );
1215
+ if (missingFromDotEnv.length > 0) {
1216
+ console.log(
1217
+ chalk.yellow(
1218
+ `\u26A0\uFE0F These referenced envs were not found in .env (so they will NOT be written to env.json): ${missingFromDotEnv.slice(0, 25).join(", ")}${missingFromDotEnv.length > 25 ? " ..." : ""}`
1219
+ )
1220
+ );
1221
+ }
1222
+ }
1223
+ let serviceName = args["service-name"]?.trim() || `ai-router-workers-${stage}`;
1224
+ const microfoxJsonPath = path.join(process.cwd(), "microfox.json");
1225
+ if (fs.existsSync(microfoxJsonPath)) {
1226
+ try {
1227
+ const microfoxConfig = JSON.parse(fs.readFileSync(microfoxJsonPath, "utf-8"));
1228
+ if (microfoxConfig.projectId) {
1229
+ if (!args["service-name"]?.trim()) {
1230
+ serviceName = getServiceNameFromProjectId(microfoxConfig.projectId);
1231
+ }
1232
+ console.log(chalk.blue(`\u2139\uFE0F Using service name from microfox.json: ${serviceName}`));
1233
+ }
1234
+ } catch (error) {
1235
+ console.warn(chalk.yellow("\u26A0\uFE0F Failed to parse microfox.json, using default service name"));
1236
+ }
1237
+ }
1238
+ ora("Generating handlers...").start().succeed("Generated handlers");
1239
+ await generateHandlers(workers, serverlessDir);
1240
+ const extractSpinner = ora("Extracting worker configs from bundled handlers...").start();
1241
+ for (const worker of workers) {
1242
+ try {
1243
+ const handlerFile = path.join(serverlessDir, worker.handlerPath + ".js");
1244
+ if (fs.existsSync(handlerFile)) {
1245
+ const handlerUrl = pathToFileURL(path.resolve(handlerFile)).href;
1246
+ try {
1247
+ const module = await import(handlerUrl);
1248
+ if (module.exportedWorkerConfig) {
1249
+ worker.workerConfig = module.exportedWorkerConfig;
1250
+ if (module.exportedWorkerConfig.layers?.length) {
1251
+ console.log(chalk.gray(` \u2713 ${worker.id}: found ${module.exportedWorkerConfig.layers.length} layer(s)`));
1252
+ }
1253
+ } else {
1254
+ console.warn(chalk.yellow(` \u26A0 ${worker.id}: exportedWorkerConfig not found in handler`));
1255
+ }
1256
+ } catch (importError) {
1257
+ console.log(chalk.gray(` \u2139 ${worker.id}: extracting config from source (import failed: ${importError?.message?.slice(0, 50) || "runtime error"}...)`));
1258
+ try {
1259
+ const sourceContent = fs.readFileSync(worker.filePath, "utf-8");
1260
+ const workerConfigMatch = sourceContent.match(/export\s+const\s+workerConfig[^=]*=\s*(\{[\s\S]*?\});/);
1261
+ if (workerConfigMatch) {
1262
+ let configStr = workerConfigMatch[1].replace(/\/\*[\s\S]*?\*\//g, "").replace(/(^|\s)\/\/[^\n]*/gm, "$1");
1263
+ const configObj = new Function("return " + configStr)();
1264
+ if (configObj && (configObj.layers || configObj.timeout || configObj.memorySize || configObj.schedule)) {
1265
+ worker.workerConfig = configObj;
1266
+ if (configObj.layers?.length) {
1267
+ console.log(chalk.gray(` \u2713 ${worker.id}: found ${configObj.layers.length} layer(s) from source file`));
1268
+ }
1269
+ if (configObj.schedule) {
1270
+ console.log(chalk.gray(` \u2713 ${worker.id}: found schedule configuration`));
1271
+ }
1272
+ }
1273
+ }
1274
+ } catch (fallbackError) {
1275
+ console.warn(chalk.yellow(` \u26A0 ${worker.id}: fallback extraction also failed, using defaults`));
1276
+ }
1277
+ }
1278
+ } else {
1279
+ console.warn(chalk.yellow(` \u26A0 ${worker.id}: handler file not found: ${handlerFile}`));
1280
+ }
1281
+ } catch (error) {
1282
+ console.warn(chalk.yellow(` \u26A0 ${worker.id}: failed to extract config: ${error?.message || error}`));
1283
+ }
1284
+ }
1285
+ extractSpinner.succeed("Extracted configs");
1286
+ generateWorkersConfigHandler(serverlessDir, workers, serviceName);
1287
+ generateDocsHandler(serverlessDir, serviceName, stage, region);
1288
+ generateTriggerHandler(serverlessDir, serviceName);
1289
+ const config = generateServerlessConfig(workers, stage, region, envVars, serviceName);
1290
+ const envStage = fs.existsSync(microfoxJsonPath) ? "prod" : stage;
1291
+ const safeEnvVars = {
1292
+ ENVIRONMENT: envStage,
1293
+ STAGE: envStage,
1294
+ NODE_ENV: envStage
1295
+ };
1296
+ const allowedPrefixes = ["OPENAI_", "ANTHROPIC_", "DATABASE_", "MONGODB_", "REDIS_", "WORKERS_", "REMOTION_"];
1297
+ for (const [key, value] of Object.entries(envVars)) {
1298
+ if (key.startsWith("AWS_")) continue;
1299
+ if (allowedPrefixes.some((prefix) => key.startsWith(prefix)) || referencedEnvKeys.has(key)) {
1300
+ safeEnvVars[key] = value;
1301
+ }
1302
+ }
1303
+ fs.writeFileSync(
1304
+ path.join(serverlessDir, "env.json"),
1305
+ JSON.stringify(safeEnvVars, null, 2)
1306
+ );
1307
+ const yamlContent = yaml.dump(config, { indent: 2 });
1308
+ const yamlPath = path.join(serverlessDir, "serverless.yml");
1309
+ fs.writeFileSync(yamlPath, yamlContent);
1310
+ console.log(chalk.green(`\u2713 Generated serverless.yml: ${yamlPath}`));
1311
+ }
1312
+ async function deploy(args) {
1313
+ const stage = args.stage || process.env.STAGE || "prod";
1314
+ const region = args.region || process.env.AWS_REGION || "us-east-1";
1315
+ const skipDeploy = args["skip-deploy"] || false;
1316
+ const skipInstall = args["skip-install"] || false;
1317
+ if (skipDeploy) {
1318
+ console.log(chalk.yellow("\u23ED\uFE0F Skipping deployment (--skip-deploy flag)"));
1319
+ return;
1320
+ }
1321
+ const serverlessDir = path.join(process.cwd(), ".serverless-workers");
1322
+ const yamlPath = path.join(serverlessDir, "serverless.yml");
1323
+ if (!fs.existsSync(yamlPath)) {
1324
+ console.error(chalk.red('\u274C serverless.yml not found. Run "build" first.'));
1325
+ process.exit(1);
1326
+ }
1327
+ console.log(chalk.blue(`\u{1F680} Deploying to AWS (stage: ${stage}, region: ${region})...`));
1328
+ validateEnvironment();
1329
+ try {
1330
+ if (!skipInstall && !fs.existsSync(path.join(serverlessDir, "node_modules"))) {
1331
+ console.log(chalk.blue("\u{1F4E6} Installing serverless dependencies..."));
1332
+ execSync("npm install", {
1333
+ cwd: serverlessDir,
1334
+ stdio: "inherit"
1335
+ });
1336
+ }
1337
+ const microfoxJsonPath = path.join(process.cwd(), "microfox.json");
1338
+ if (fs.existsSync(microfoxJsonPath)) {
1339
+ console.log(chalk.blue("\u2139\uFE0F Found microfox.json, deploying via Microfox Cloud..."));
1340
+ fs.copyFileSync(microfoxJsonPath, path.join(serverlessDir, "microfox.json"));
1341
+ const envVars = loadEnvVars();
1342
+ execSync("npx microfox@latest push", {
1343
+ cwd: serverlessDir,
1344
+ stdio: "inherit"
1345
+ });
1346
+ console.log(chalk.green("\u2713 Deployment triggered via Microfox!"));
1347
+ return;
1348
+ }
1349
+ execSync("npx serverless deploy", {
1350
+ cwd: serverlessDir,
1351
+ stdio: "inherit",
1352
+ env: {
1353
+ ...process.env,
1354
+ STAGE: stage,
1355
+ AWS_REGION: region
1356
+ }
1357
+ });
1358
+ console.log(chalk.green("\u2713 Deployment complete!"));
1359
+ } catch (error) {
1360
+ console.error(chalk.red("\u274C Deployment failed"));
1361
+ process.exit(1);
1362
+ }
1363
+ await generateWorkersMap(stage, region, serverlessDir);
1364
+ }
1365
+ var pushCommand = new Command().name("push").description("Build and deploy background workers to AWS").option("-s, --stage <stage>", "Deployment stage", "prod").option("-r, --region <region>", "AWS region", "us-east-1").option("--ai-path <path>", "Path to AI directory containing workers", "app/ai").option("--service-name <name>", "Override serverless service name (defaults to ai-router-workers-<stage>)").option("--skip-deploy", "Skip deployment, only build", false).option("--skip-install", "Skip npm install in serverless directory", false).action(async (options) => {
1366
+ await build2(options);
1367
+ await deploy(options);
1368
+ });
1369
+
1370
+ // src/index.ts
1371
+ var program = new Command2();
1372
+ program.name("ai-worker").description("CLI tooling for deploying ai-router background workers").version("0.1.0");
1373
+ program.addCommand(pushCommand);
1374
+ program.parse(process.argv);
1375
+ var aiWorkerCli = program;
1376
+ export {
1377
+ aiWorkerCli
1378
+ };
1379
+ //# sourceMappingURL=index.js.map