@microfox/ai-worker-cli 1.0.1 → 1.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +433 -17
- package/dist/index.cjs.map +1 -1
- package/dist/index.js +433 -17
- package/dist/index.js.map +1 -1
- package/package.json +1 -1
package/dist/index.cjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/index.ts","../src/commands/push.ts"],"sourcesContent":["#!/usr/bin/env node\n\nimport { Command } from 'commander';\nimport { pushCommand } from './commands/push.js';\n\nconst program = new Command();\n\nprogram\n .name('ai-worker')\n .description('CLI tooling for deploying ai-router background workers')\n .version('0.1.0');\n\nprogram.addCommand(pushCommand);\n\nprogram.parse(process.argv);\n\nconst aiWorkerCli = program;\nexport { aiWorkerCli };\n","import { Command } from 'commander';\nimport * as esbuild from 'esbuild';\nimport { execSync } from 'child_process';\nimport * as fs from 'fs';\nimport * as path from 'path';\nimport { pathToFileURL } from 'url';\nimport { builtinModules } from 'module';\nimport { glob } from 'glob';\nimport * as yaml from 'js-yaml';\nimport chalk from 'chalk';\nimport ora from 'ora';\n\nconst NODE_BUILTINS = new Set(\n builtinModules.map((m) => (m.startsWith('node:') ? m.slice('node:'.length) : m))\n);\n\nfunction isBuiltinModule(specifier: string): boolean {\n const s = specifier.startsWith('node:')\n ? specifier.slice('node:'.length)\n : specifier;\n return NODE_BUILTINS.has(s);\n}\n\nfunction getPackageNameFromSpecifier(specifier: string): string {\n // Scoped packages: @scope/name/...\n if (specifier.startsWith('@')) {\n const [scope, name] = specifier.split('/');\n return name ? `${scope}/${name}` : specifier;\n }\n // Unscoped: name/...\n return specifier.split('/')[0];\n}\n\nfunction tryResolveLocalImport(fromFile: string, specifier: string): string | null {\n const baseDir = path.dirname(fromFile);\n const raw = path.resolve(baseDir, specifier);\n\n // Direct file hits\n const candidates = [\n raw,\n `${raw}.ts`,\n `${raw}.tsx`,\n `${raw}.js`,\n `${raw}.mjs`,\n `${raw}.cjs`,\n ];\n for (const c of candidates) {\n if (fs.existsSync(c) && fs.statSync(c).isFile()) return c;\n }\n\n // Directory index hits\n if (fs.existsSync(raw) && fs.statSync(raw).isDirectory()) {\n const idxCandidates = [\n path.join(raw, 'index.ts'),\n path.join(raw, 'index.tsx'),\n path.join(raw, 'index.js'),\n path.join(raw, 'index.mjs'),\n path.join(raw, 'index.cjs'),\n ];\n for (const c of idxCandidates) {\n if (fs.existsSync(c) && fs.statSync(c).isFile()) return c;\n }\n }\n\n return null;\n}\n\nfunction extractImportSpecifiers(source: string): string[] {\n const specs: string[] = [];\n\n // import ... from 'x' / export ... from 'x'\n // NOTE: we intentionally ignore \"import type ... from\" because it's type-only.\n const re1 =\n /(?:^|\\n)\\s*(?!import\\s+type)(?:import|export)\\s+[\\s\\S]*?\\sfrom\\s*['\"]([^'\"]+)['\"]/g;\n for (const match of source.matchAll(re1)) {\n if (match[1]) specs.push(match[1]);\n }\n\n // import('x')\n const re2 = /import\\s*\\(\\s*['\"]([^'\"]+)['\"]\\s*\\)/g;\n for (const match of source.matchAll(re2)) {\n if (match[1]) specs.push(match[1]);\n }\n\n // require('x')\n const re3 = /require\\s*\\(\\s*['\"]([^'\"]+)['\"]\\s*\\)/g;\n for (const match of source.matchAll(re3)) {\n if (match[1]) specs.push(match[1]);\n }\n\n return specs;\n}\n\nfunction extractEnvVarUsageFromSource(source: string): {\n runtimeKeys: Set<string>;\n buildtimeKeys: Set<string>;\n} {\n const runtimeKeys = new Set<string>();\n const buildtimeKeys = new Set<string>();\n\n // process.env.KEY / process.env?.KEY\n const reProcessDot = /\\bprocess\\.env\\??\\.([A-Za-z_][A-Za-z0-9_]*)\\b/g;\n for (const match of source.matchAll(reProcessDot)) {\n const key = match[1];\n if (key) runtimeKeys.add(key);\n }\n\n // process.env['KEY'] / process.env[\"KEY\"]\n const reProcessBracket = /\\bprocess\\.env\\[\\s*['\"]([^'\"]+)['\"]\\s*\\]/g;\n for (const match of source.matchAll(reProcessBracket)) {\n const key = match[1];\n if (key) runtimeKeys.add(key);\n }\n\n // import.meta.env.KEY\n const reImportMetaDot = /\\bimport\\.meta\\.env\\.([A-Za-z_][A-Za-z0-9_]*)\\b/g;\n for (const match of source.matchAll(reImportMetaDot)) {\n const key = match[1];\n if (key) buildtimeKeys.add(key);\n }\n\n // import.meta.env['KEY']\n const reImportMetaBracket = /\\bimport\\.meta\\.env\\[\\s*['\"]([^'\"]+)['\"]\\s*\\]/g;\n for (const match of source.matchAll(reImportMetaBracket)) {\n const key = match[1];\n if (key) buildtimeKeys.add(key);\n }\n\n return { runtimeKeys, buildtimeKeys };\n}\n\nasync function collectEnvUsageForWorkers(\n workerEntryFiles: string[],\n projectRoot: string\n): Promise<{ runtimeKeys: Set<string>; buildtimeKeys: Set<string> }> {\n void projectRoot; // reserved for future improvements (tsconfig path aliases, etc.)\n\n const runtimeKeys = new Set<string>();\n const buildtimeKeys = new Set<string>();\n\n const visited = new Set<string>();\n const queue: string[] = [...workerEntryFiles];\n\n while (queue.length > 0) {\n const file = queue.pop()!;\n const normalized = path.resolve(file);\n if (visited.has(normalized)) continue;\n visited.add(normalized);\n\n if (!fs.existsSync(normalized) || !fs.statSync(normalized).isFile()) continue;\n const src = fs.readFileSync(normalized, 'utf-8');\n\n const usage = extractEnvVarUsageFromSource(src);\n usage.runtimeKeys.forEach((k) => runtimeKeys.add(k));\n usage.buildtimeKeys.forEach((k) => buildtimeKeys.add(k));\n\n const specifiers = extractImportSpecifiers(src);\n for (const spec of specifiers) {\n if (!spec) continue;\n if (spec.startsWith('.')) {\n const resolved = tryResolveLocalImport(normalized, spec);\n if (resolved) queue.push(resolved);\n continue;\n }\n\n // Ignore absolute paths and non-node specifiers.\n if (spec.startsWith('/')) continue;\n if (isBuiltinModule(spec)) continue;\n // External packages are ignored; we only scan local files.\n }\n }\n\n runtimeKeys.delete('');\n buildtimeKeys.delete('');\n runtimeKeys.delete('node');\n buildtimeKeys.delete('node');\n\n return { runtimeKeys, buildtimeKeys };\n}\n\nfunction readJsonFile<T = any>(filePath: string): T | null {\n try {\n return JSON.parse(fs.readFileSync(filePath, 'utf-8')) as T;\n } catch {\n return null;\n }\n}\n\nfunction findMonorepoRoot(startDir: string): string {\n let dir = path.resolve(startDir);\n // Walk up until we find a package.json with \"workspaces\" or we hit filesystem root.\n while (true) {\n const pkgPath = path.join(dir, 'package.json');\n if (fs.existsSync(pkgPath)) {\n const pkg = readJsonFile<any>(pkgPath);\n if (pkg?.workspaces) return dir;\n }\n\n const parent = path.dirname(dir);\n if (parent === dir) return startDir; // fallback\n dir = parent;\n }\n}\n\nasync function collectRuntimeDependenciesForWorkers(\n workerEntryFiles: string[],\n projectRoot: string\n): Promise<Set<string>> {\n // Always include these: they're used by generated workers-config / lambda wrapper logic,\n // and are safe to install even if handlers are bundled.\n const deps = new Set<string>(['@microfox/ai-worker', '@aws-sdk/client-sqs']);\n const visited = new Set<string>();\n const queue: string[] = [...workerEntryFiles];\n\n while (queue.length > 0) {\n const file = queue.pop()!;\n const normalized = path.resolve(file);\n if (visited.has(normalized)) continue;\n visited.add(normalized);\n\n if (!fs.existsSync(normalized) || !fs.statSync(normalized).isFile()) continue;\n const src = fs.readFileSync(normalized, 'utf-8');\n const specifiers = extractImportSpecifiers(src);\n\n for (const spec of specifiers) {\n if (!spec) continue;\n if (spec.startsWith('.')) {\n const resolved = tryResolveLocalImport(normalized, spec);\n if (resolved) queue.push(resolved);\n continue;\n }\n\n // Ignore absolute paths and non-node specifiers.\n if (spec.startsWith('/')) continue;\n if (isBuiltinModule(spec)) continue;\n\n deps.add(getPackageNameFromSpecifier(spec));\n }\n }\n\n // Filter out anything that isn't an npm package name\n deps.delete('');\n deps.delete('node');\n\n // Filter devDependencies\n deps.delete('serverless');\n deps.delete('serverless-offline');\n deps.delete('@aws-sdk/client-sqs');\n deps.delete('@microfox/ai-worker')\n return deps;\n}\n\nfunction buildDependenciesMap(projectRoot: string, deps: Set<string>): Record<string, string> {\n const projectPkg =\n readJsonFile<any>(path.join(projectRoot, 'package.json')) || {};\n const projectDeps: Record<string, string> = projectPkg.dependencies || {};\n const projectDevDeps: Record<string, string> = projectPkg.devDependencies || {};\n\n // Try to also source versions from workspace packages (ai-worker / ai-worker-cli)\n const repoRoot = findMonorepoRoot(projectRoot);\n const workerPkg =\n readJsonFile<any>(path.join(repoRoot, 'packages', 'ai-worker', 'package.json')) ||\n {};\n const workerCliPkg =\n readJsonFile<any>(\n path.join(repoRoot, 'packages', 'ai-worker-cli', 'package.json')\n ) || {};\n\n const workspaceDeps: Record<string, string> = {\n ...(workerPkg.dependencies || {}),\n ...(workerPkg.devDependencies || {}),\n ...(workerCliPkg.dependencies || {}),\n ...(workerCliPkg.devDependencies || {}),\n };\n\n const out: Record<string, string> = {};\n for (const dep of Array.from(deps).sort()) {\n const range =\n projectDeps[dep] ||\n projectDevDeps[dep] ||\n workspaceDeps[dep];\n // Only add deps that the project or workspace already declares (e.g. in package.json).\n // Skip subpath imports like @tokenlens/helpers that are not real packages and not in package.json.\n if (range) {\n out[dep] = String(range);\n }\n }\n\n return out;\n}\n\ninterface WorkerInfo {\n id: string;\n filePath: string;\n // Module path WITHOUT extension and WITHOUT \".handler\" suffix.\n // Example: \"handlers/agents/test/test\"\n handlerPath: string;\n workerConfig?: {\n timeout?: number;\n memorySize?: number;\n layers?: string[];\n schedule?: any; // Schedule config: string, object, or array of either\n sqs?: {\n maxReceiveCount?: number;\n messageRetentionPeriod?: number;\n visibilityTimeout?: number;\n deadLetterMessageRetentionPeriod?: number;\n };\n };\n}\n\ninterface ServerlessConfig {\n service: string;\n custom?: Record<string, any>;\n package: {\n excludeDevDependencies: boolean;\n patterns: string[];\n };\n provider: {\n name: string;\n runtime: string;\n region: string;\n stage: string;\n versionFunctions?: boolean;\n environment: Record<string, string | Record<string, any>> | string;\n iam: {\n role: {\n statements: Array<{\n Effect: string;\n Action: string[];\n Resource: string | Array<string | Record<string, any>>;\n }>;\n };\n };\n };\n plugins: string[];\n functions: Record<string, any>;\n resources: {\n Resources: Record<string, any>;\n Outputs: Record<string, any>;\n };\n}\n\nexport function getServiceNameFromProjectId(projectId: string): string {\n const cleanedProjectId = projectId.replace(/-/g, '').slice(0, 15);\n return `p-${cleanedProjectId}`;\n}\n\n/**\n * Validates the environment and dependencies.\n */\nfunction validateEnvironment(): void {\n // We no longer strictly require global serverless since we'll install it locally in the temp dir\n // But we do need npm\n try {\n execSync('npm --version', { stdio: 'ignore' });\n } catch (error) {\n console.error(chalk.red('❌ npm is not installed or not in PATH.'));\n process.exit(1);\n }\n}\n\n/**\n * Scans for all *.worker.ts files in app/ai directory.\n */\nasync function scanWorkers(aiPath: string = 'app/ai'): Promise<WorkerInfo[]> {\n const pattern = path.join(aiPath, '**/*.worker.ts').replace(/\\\\/g, '/');\n const files = await glob(pattern);\n\n const workers: WorkerInfo[] = [];\n\n for (const filePath of files) {\n try {\n // Try to dynamically import the worker file to get the actual workerConfig\n // This is more reliable than parsing the file as text\n let workerConfig: WorkerInfo['workerConfig'] | undefined;\n let workerId: string | undefined;\n\n // For now, just extract the ID using regex\n // We'll import the workerConfig from the bundled handlers later\n\n // Fallback to regex parsing if import didn't work\n if (!workerId) {\n const content = fs.readFileSync(filePath, 'utf-8');\n // Match createWorker with optional type parameters: createWorker<...>({ id: '...' })\n // or createWorker({ id: '...' })\n const idMatch = content.match(/createWorker\\s*(?:<[^>]+>)?\\s*\\(\\s*\\{[\\s\\S]*?id:\\s*['\"]([^'\"]+)['\"]/);\n if (!idMatch) {\n console.warn(chalk.yellow(`⚠️ Skipping ${filePath}: No worker ID found`));\n continue;\n }\n workerId = idMatch[1];\n }\n\n // Generate handler path (relative to serverless root)\n // Convert app/ai/agents/my-worker.worker.ts -> handlers/my-worker\n const relativePath = path.relative(aiPath, filePath);\n const handlerDir = path.dirname(relativePath);\n const handlerName = path.basename(relativePath, '.worker.ts');\n const handlerPath = path.join('handlers', handlerDir, `${handlerName}`).replace(/\\\\/g, '/');\n\n workers.push({\n id: workerId,\n filePath,\n handlerPath,\n workerConfig,\n });\n } catch (error) {\n console.error(chalk.red(`❌ Error processing ${filePath}:`), error);\n }\n }\n\n return workers;\n}\n\n/**\n * Generates Lambda handler entrypoints for each worker.\n */\nasync function generateHandlers(workers: WorkerInfo[], outputDir: string): Promise<void> {\n const handlersDir = path.join(outputDir, 'handlers');\n\n // Ensure handlers directory exists and is clean\n if (fs.existsSync(handlersDir)) {\n fs.rmSync(handlersDir, { recursive: true, force: true });\n }\n fs.mkdirSync(handlersDir, { recursive: true });\n\n for (const worker of workers) {\n // Create directory structure\n // We output JS files now, so change extension in path\n const handlerFile = path.join(handlersDir, worker.handlerPath.replace('handlers/', '') + '.js');\n const handlerDir = path.dirname(handlerFile);\n\n if (!fs.existsSync(handlerDir)) {\n fs.mkdirSync(handlerDir, { recursive: true });\n }\n\n // Generate handler entrypoint\n // Convert app/ai/agents/my-worker.worker.ts to import path\n // We need relative path from .serverless-workers/handlers/agent/ to original source\n // Original: /path/to/project/app/ai/agents/my-worker.worker.ts\n // Handler: /path/to/project/.serverless-workers/handlers/agent/my-worker.handler.ts\n // Import should look like: ../../../app/ai/agents/my-worker.worker\n\n const handlerAbsPath = path.resolve(handlerFile);\n const workerAbsPath = path.resolve(worker.filePath);\n\n // Calculate relative path from handler directory to worker file\n let relativeImportPath = path.relative(path.dirname(handlerAbsPath), workerAbsPath);\n\n // Ensure it starts with ./ or ../\n if (!relativeImportPath.startsWith('.')) {\n relativeImportPath = './' + relativeImportPath;\n }\n\n // Remove extension for import\n relativeImportPath = relativeImportPath.replace(/\\.ts$/, '');\n // Normalize slashes for Windows\n relativeImportPath = relativeImportPath.split(path.sep).join('/');\n\n // Detect export: \"export default createWorker\" vs \"export const X = createWorker\"\n const fileContent = fs.readFileSync(worker.filePath, 'utf-8');\n const defaultExport = /export\\s+default\\s+createWorker/.test(fileContent);\n const exportMatch = fileContent.match(/export\\s+(const|let)\\s+(\\w+)\\s*=\\s*createWorker/);\n const exportName = exportMatch ? exportMatch[2] : 'worker';\n\n // 1. Create a temporary TS entrypoint\n const tempEntryFile = handlerFile.replace('.js', '.temp.ts');\n\n const workerRef = defaultExport\n ? 'workerModule.default'\n : `workerModule.${exportName}`;\n\n // Try to import workerConfig (new pattern) - it might not exist (old pattern)\n const tempEntryContent = `\nimport { createLambdaHandler } from '@microfox/ai-worker/handler';\nimport * as workerModule from '${relativeImportPath}';\n\nconst workerAgent = ${workerRef};\nif (!workerAgent || typeof workerAgent.handler !== 'function') {\n throw new Error('Worker module must export a createWorker result (default or named) with .handler');\n}\n\nexport const handler = createLambdaHandler(workerAgent.handler, workerAgent.outputSchema);\nexport const exportedWorkerConfig = workerModule.workerConfig || workerAgent?.workerConfig;\n`;\n fs.writeFileSync(tempEntryFile, tempEntryContent);\n\n // 2. Bundle using esbuild\n try {\n // Plugin to fix lazy-cache issue where forOwn is not properly added to utils\n // The issue: require_for_own() is called directly instead of through the lazy-cache proxy\n const fixLazyCachePlugin: esbuild.Plugin = {\n name: 'fix-lazy-cache',\n setup(build) {\n build.onEnd(async (result) => {\n if (result.errors.length > 0) return;\n\n // Read the bundled file\n let bundledCode = fs.readFileSync(handlerFile, 'utf-8');\n let modified = false;\n\n // Fix the lazy-cache pattern in clone-deep/utils.js\n // Pattern: require_for_own(); should be require(\"for-own\", \"forOwn\");\n // This ensures forOwn is properly added to the utils object via lazy-cache\n // Match the pattern more flexibly to handle different whitespace\n const pattern = /(require\\(\"kind-of\",\\s*\"typeOf\"\\);\\s*)require_for_own\\(\\);/g;\n\n if (pattern.test(bundledCode)) {\n bundledCode = bundledCode.replace(\n pattern,\n '$1require(\"for-own\", \"forOwn\");'\n );\n modified = true;\n }\n\n // Fix (0, import_node_module.createRequire)(import_meta.url) - esbuild emits import_meta.url\n // which is undefined in CJS Lambda. Polyfill so createRequire gets a valid file URL.\n if (bundledCode.includes('import_meta.url')) {\n bundledCode = bundledCode.replace(\n /import_meta\\.url/g,\n 'require(\"url\").pathToFileURL(__filename).href'\n );\n modified = true;\n }\n\n // Fix createRequire(undefined) / createRequire(void 0) if any dependency emits that\n const beforeCreateRequire = bundledCode;\n bundledCode = bundledCode.replace(\n /\\bcreateRequire\\s*\\(\\s*(?:undefined|void\\s*0)\\s*\\)/g,\n 'createRequire(require(\"url\").pathToFileURL(__filename).href)'\n );\n if (bundledCode !== beforeCreateRequire) modified = true;\n\n if (modified) {\n fs.writeFileSync(handlerFile, bundledCode, 'utf-8');\n }\n });\n },\n };\n\n await esbuild.build({\n entryPoints: [tempEntryFile],\n bundle: true,\n platform: 'node',\n target: 'node20',\n format: 'cjs',\n outfile: handlerFile,\n // We exclude aws-sdk as it's included in Lambda runtime\n // We exclude canvas because it's a binary dependency often problematic in bundling\n external: [\n 'aws-sdk',\n 'canvas',\n '@microfox/puppeteer-sls',\n \"@sparticuz/chromium\"\n ],\n // Force lazy-cache to eagerly load modules during bundling\n // This prevents runtime dynamic require() calls that fail in bundled code\n define: {\n 'process.env.UNLAZY': '\"true\"',\n },\n // Force bundling of all packages to avoid runtime module resolution issues\n // This ensures clone-deep, lazy-cache, and all transitive deps are bundled\n packages: 'bundle',\n plugins: [fixLazyCachePlugin],\n logLevel: 'error',\n });\n\n // 3. Cleanup temp file\n fs.unlinkSync(tempEntryFile);\n\n } catch (error) {\n console.error(chalk.red(`Error bundling handler for ${worker.id}:`), error);\n // Don't delete temp file on error for debugging\n }\n }\n console.log(chalk.green(`✓ Generated ${workers.length} bundled handlers`));\n}\n\nfunction generateDocsHandler(outputDir: string, serviceName: string, stage: string, region: string): void {\n const handlerFile = path.join(outputDir, 'handlers', 'docs.js');\n const tempEntryFile = handlerFile.replace('.js', '.temp.ts');\n const handlerDir = path.dirname(handlerFile);\n\n if (!fs.existsSync(handlerDir)) {\n fs.mkdirSync(handlerDir, { recursive: true });\n }\n\n const handlerContent = `/**\n * Auto-generated docs handler for Microfox compatibility\n * DO NOT EDIT - This file is generated by @microfox/ai-worker-cli\n */\n\nimport { APIGatewayProxyEvent, APIGatewayProxyResult } from 'aws-lambda';\n\nexport const handler = async (\n event: APIGatewayProxyEvent\n): Promise<APIGatewayProxyResult> => {\n // Return OpenAPI JSON for Microfox\n const openapi = {\n openapi: '3.0.3',\n info: {\n title: 'AI Worker Service',\n version: '1.0.0',\n description: 'Auto-generated OpenAPI for background workers service',\n },\n servers: [\n {\n url: 'https://{apiId}.execute-api.{region}.amazonaws.com/{stage}',\n variables: {\n apiId: { default: 'REPLACE_ME' },\n region: { default: '${region}' },\n stage: { default: '${stage}' },\n },\n },\n ],\n paths: {\n '/docs.json': {\n get: {\n operationId: 'getDocs',\n summary: 'Get OpenAPI schema',\n responses: {\n '200': {\n description: 'OpenAPI JSON',\n content: {\n 'application/json': {\n schema: { type: 'object' },\n },\n },\n },\n },\n },\n },\n '/workers/config': {\n get: {\n operationId: 'getWorkersConfig',\n summary: 'Get workers config (queue urls map)',\n parameters: [\n {\n name: 'x-workers-config-key',\n in: 'header',\n required: false,\n schema: { type: 'string' },\n description: 'Optional API key header (if configured)',\n },\n ],\n responses: {\n '200': {\n description: 'Workers config map',\n content: {\n 'application/json': {\n schema: {\n type: 'object',\n properties: {\n version: { type: 'string' },\n stage: { type: 'string' },\n region: { type: 'string' },\n workers: { type: 'object' },\n },\n },\n },\n },\n },\n '401': {\n description: 'Unauthorized',\n content: {\n 'application/json': {\n schema: {\n type: 'object',\n properties: { error: { type: 'string' } },\n },\n },\n },\n },\n },\n },\n },\n '/workers/trigger': {\n post: {\n operationId: 'triggerWorker',\n summary: 'Trigger a worker by sending a raw SQS message body',\n parameters: [\n {\n name: 'workerId',\n in: 'query',\n required: false,\n schema: { type: 'string' },\n description: 'Worker ID (can also be provided in JSON body as workerId)',\n },\n {\n name: 'x-workers-trigger-key',\n in: 'header',\n required: false,\n schema: { type: 'string' },\n description: 'Optional API key header (if configured)',\n },\n ],\n requestBody: {\n required: true,\n content: {\n 'application/json': {\n schema: {\n type: 'object',\n properties: {\n workerId: { type: 'string' },\n // Prefer sending the exact SQS message body your worker expects\n body: { type: 'object' },\n messageBody: { type: 'string' },\n },\n additionalProperties: true,\n },\n },\n },\n },\n responses: {\n '200': {\n description: 'Enqueued',\n content: {\n 'application/json': {\n schema: {\n type: 'object',\n properties: {\n ok: { type: 'boolean' },\n workerId: { type: 'string' },\n stage: { type: 'string' },\n queueName: { type: 'string' },\n queueUrl: { type: 'string' },\n messageId: { type: 'string' },\n },\n },\n },\n },\n },\n '400': {\n description: 'Bad request',\n content: {\n 'application/json': {\n schema: {\n type: 'object',\n properties: { error: { type: 'string' } },\n },\n },\n },\n },\n '401': {\n description: 'Unauthorized',\n content: {\n 'application/json': {\n schema: {\n type: 'object',\n properties: { error: { type: 'string' } },\n },\n },\n },\n },\n },\n },\n },\n },\n 'x-service': {\n serviceName: '${serviceName}',\n stage: '${stage}',\n region: '${region}',\n },\n };\n\n return {\n statusCode: 200,\n headers: {\n 'Content-Type': 'application/json',\n 'Access-Control-Allow-Origin': '*',\n },\n body: JSON.stringify(openapi, null, 2),\n };\n};\n`;\n\n fs.writeFileSync(tempEntryFile, handlerContent);\n\n // Bundle it\n esbuild.buildSync({\n entryPoints: [tempEntryFile],\n bundle: true,\n platform: 'node',\n target: 'node20',\n outfile: handlerFile,\n external: [\n 'aws-sdk',\n 'canvas',\n '@microfox/puppeteer-sls',\n \"@sparticuz/chromium\"\n ],\n define: {\n 'process.env.UNLAZY': '\"true\"',\n },\n packages: 'bundle'\n });\n\n fs.unlinkSync(tempEntryFile);\n console.log(chalk.green(`✓ Generated docs.json handler`));\n}\n\nfunction generateTriggerHandler(outputDir: string, serviceName: string): void {\n const handlerFile = path.join(outputDir, 'handlers', 'workers-trigger.js');\n const tempEntryFile = handlerFile.replace('.js', '.temp.ts');\n const handlerDir = path.dirname(handlerFile);\n\n if (!fs.existsSync(handlerDir)) {\n fs.mkdirSync(handlerDir, { recursive: true });\n }\n\n const handlerContent = `/**\n * Auto-generated worker trigger handler\n * DO NOT EDIT - This file is generated by @microfox/ai-worker-cli\n */\n\nimport { APIGatewayProxyEvent, APIGatewayProxyResult } from 'aws-lambda';\nimport { SQSClient, GetQueueUrlCommand, SendMessageCommand } from '@aws-sdk/client-sqs';\n\nconst SERVICE_NAME = ${JSON.stringify(serviceName)};\n\nfunction jsonResponse(statusCode: number, body: any): APIGatewayProxyResult {\n return {\n statusCode,\n headers: {\n 'Content-Type': 'application/json',\n 'Access-Control-Allow-Origin': '*',\n },\n body: JSON.stringify(body),\n };\n}\n\nexport const handler = async (event: APIGatewayProxyEvent): Promise<APIGatewayProxyResult> => {\n // Optional API key\n const apiKey = process.env.WORKERS_TRIGGER_API_KEY;\n if (apiKey) {\n const providedKey = event.headers['x-workers-trigger-key'] || event.headers['X-Workers-Trigger-Key'];\n if (providedKey !== apiKey) {\n return jsonResponse(401, { error: 'Unauthorized' });\n }\n }\n\n const stage =\n (event as any)?.requestContext?.stage ||\n process.env.ENVIRONMENT ||\n process.env.STAGE ||\n 'prod';\n const region = process.env.AWS_REGION || 'us-east-1';\n\n const qsWorkerId = event.queryStringParameters?.workerId;\n\n let parsedBody: any = undefined;\n if (event.body) {\n try {\n parsedBody = JSON.parse(event.body);\n } catch {\n parsedBody = undefined;\n }\n }\n\n const workerId = (parsedBody && parsedBody.workerId) || qsWorkerId;\n if (!workerId || typeof workerId !== 'string') {\n return jsonResponse(400, { error: 'workerId is required (query param workerId or JSON body workerId)' });\n }\n\n // Prefer JSON body fields, otherwise send raw event.body\n let messageBody: string | undefined;\n if (parsedBody && typeof parsedBody.messageBody === 'string') {\n messageBody = parsedBody.messageBody;\n } else if (parsedBody && parsedBody.body !== undefined) {\n messageBody = typeof parsedBody.body === 'string' ? parsedBody.body : JSON.stringify(parsedBody.body);\n } else if (event.body) {\n messageBody = event.body;\n }\n\n if (!messageBody) {\n return jsonResponse(400, { error: 'body/messageBody is required' });\n }\n\n const queueName = \\`\\${SERVICE_NAME}-\\${workerId}-\\${stage}\\`;\n const sqs = new SQSClient({ region });\n\n let queueUrl: string;\n try {\n const urlRes = await sqs.send(new GetQueueUrlCommand({ QueueName: queueName }));\n if (!urlRes.QueueUrl) {\n return jsonResponse(404, { error: 'Queue URL not found', queueName });\n }\n queueUrl = String(urlRes.QueueUrl);\n } catch (e: any) {\n return jsonResponse(404, { error: 'Queue does not exist or not accessible', queueName, message: String(e?.message || e) });\n }\n\n try {\n const sendRes = await sqs.send(new SendMessageCommand({ QueueUrl: queueUrl, MessageBody: messageBody }));\n return jsonResponse(200, {\n ok: true,\n workerId,\n stage,\n queueName,\n queueUrl,\n messageId: sendRes.MessageId || null,\n });\n } catch (e: any) {\n return jsonResponse(500, { error: 'Failed to send message', message: String(e?.message || e) });\n }\n};\n`;\n\n fs.writeFileSync(tempEntryFile, handlerContent);\n\n esbuild.buildSync({\n entryPoints: [tempEntryFile],\n bundle: true,\n platform: 'node',\n target: 'node20',\n outfile: handlerFile,\n external: [\n 'aws-sdk',\n 'canvas',\n '@microfox/puppeteer-sls',\n \"@sparticuz/chromium\"\n ],\n define: {\n 'process.env.UNLAZY': '\"true\"',\n },\n packages: 'bundle',\n logLevel: 'error',\n });\n\n fs.unlinkSync(tempEntryFile);\n console.log(chalk.green(`✓ Generated /workers/trigger handler`));\n}\n\n/**\n * Generates workers-config Lambda handler.\n */\nfunction generateWorkersConfigHandler(\n outputDir: string,\n workers: WorkerInfo[],\n serviceName: string\n): void {\n // We'll bundle this one too\n const handlerFile = path.join(outputDir, 'handlers', 'workers-config.js');\n const tempEntryFile = handlerFile.replace('.js', '.temp.ts');\n const handlerDir = path.dirname(handlerFile);\n\n // Ensure handlers directory exists and is clean for config handler\n if (fs.existsSync(handlerDir) && !fs.existsSync(handlerFile)) {\n // Don't wipe if we already cleaned it in generateHandlers, unless it's a diff dir\n } else if (!fs.existsSync(handlerDir)) {\n fs.mkdirSync(handlerDir, { recursive: true });\n }\n\n const handlerContent = `/**\n * Auto-generated workers-config Lambda handler\n * DO NOT EDIT - This file is generated by @microfox/ai-worker-cli\n */\n\nimport { APIGatewayProxyEvent, APIGatewayProxyResult } from 'aws-lambda';\nimport { SQSClient, GetQueueUrlCommand } from '@aws-sdk/client-sqs';\n\n// Worker IDs embedded at build time so this endpoint doesn't depend on any generated files.\nconst WORKER_IDS: string[] = ${JSON.stringify(workers.map(w => w.id), null, 2)};\nconst SERVICE_NAME = ${JSON.stringify(serviceName)};\n\nexport const handler = async (\n event: APIGatewayProxyEvent\n): Promise<APIGatewayProxyResult> => {\n// ... same logic ...\n // Check API key if configured\n const apiKey = process.env.WORKERS_CONFIG_API_KEY;\n if (apiKey) {\n const providedKey = event.headers['x-workers-config-key'] || event.headers['X-Workers-Config-Key'];\n if (providedKey !== apiKey) {\n return {\n statusCode: 401,\n headers: { 'Content-Type': 'application/json' },\n body: JSON.stringify({ error: 'Unauthorized' }),\n };\n }\n }\n\n // Stage resolution:\n // - Prefer API Gateway stage (microfox tends to deploy APIs on \"prod\")\n // - Fallback to ENVIRONMENT/STAGE env vars\n // - Default to \"prod\" (safer for microfox) if nothing else is set\n const stage =\n (event as any)?.requestContext?.stage ||\n process.env.ENVIRONMENT ||\n process.env.STAGE ||\n 'prod';\n const region = process.env.AWS_REGION || 'us-east-1';\n\n // Resolve queue URLs dynamically via SQS so we return actual URLs.\n // NOTE: Node 20 Lambda runtime does NOT guarantee 'aws-sdk' v2 is available.\n // We use AWS SDK v3 and bundle it into this handler.\n const sqs = new SQSClient({ region });\n const workers: Record<string, { queueUrl: string; region: string }> = {};\n const attemptedQueueNames: string[] = [];\n const errors: Array<{ workerId: string; queueName: string; message: string; name?: string }> = [];\n const debug = event.queryStringParameters?.debug === '1' || event.queryStringParameters?.debug === 'true';\n\n await Promise.all(\n WORKER_IDS.map(async (workerId) => {\n const queueName = \\`\\${SERVICE_NAME}-\\${workerId}-\\${stage}\\`;\n attemptedQueueNames.push(queueName);\n try {\n const result = await sqs.send(new GetQueueUrlCommand({ QueueName: queueName }));\n if (result?.QueueUrl) {\n workers[workerId] = { queueUrl: String(result.QueueUrl), region };\n }\n } catch (e) {\n const err = e as any;\n const message = String(err?.message || err || 'Unknown error');\n const name = err?.name ? String(err.name) : undefined;\n // Log so CloudWatch shows what's going on (nonexistent queue vs permission vs region).\n console.error('[workers-config] getQueueUrl failed', { workerId, queueName, name, message });\n errors.push({ workerId, queueName, name, message });\n }\n })\n );\n\n return {\n statusCode: 200,\n headers: {\n 'Content-Type': 'application/json',\n 'Access-Control-Allow-Origin': '*',\n },\n body: JSON.stringify({\n version: '1.0.0',\n stage,\n region,\n workers,\n ...(debug ? { attemptedQueueNames, errors } : {}),\n }),\n };\n};\n`;\n\n fs.writeFileSync(tempEntryFile, handlerContent);\n\n // Bundle it\n esbuild.buildSync({\n entryPoints: [tempEntryFile],\n bundle: true,\n platform: 'node',\n target: 'node20',\n outfile: handlerFile,\n external: [\n 'aws-sdk',\n 'canvas',\n '@microfox/puppeteer-sls',\n \"@sparticuz/chromium\"\n ],\n define: {\n 'process.env.UNLAZY': '\"true\"',\n },\n packages: 'bundle'\n });\n\n fs.unlinkSync(tempEntryFile);\n console.log(chalk.green(`✓ Generated workers-config handler`));\n}\n\n/**\n * Reads environment variables from .env file.\n */\nfunction loadEnvVars(envPath: string = '.env'): Record<string, string> {\n const env: Record<string, string> = {};\n\n if (!fs.existsSync(envPath)) {\n console.warn(chalk.yellow(`⚠️ .env file not found at ${envPath}`));\n return env;\n }\n\n const content = fs.readFileSync(envPath, 'utf-8');\n const lines = content.split('\\n');\n\n for (const line of lines) {\n const trimmed = line.trim();\n if (!trimmed || trimmed.startsWith('#')) continue;\n\n const match = trimmed.match(/^([^=]+)=(.*)$/);\n if (match) {\n const key = match[1].trim();\n const value = match[2].trim().replace(/^[\"']|[\"']$/g, '');\n env[key] = value;\n }\n }\n\n return env;\n}\n\n/**\n * Converts schedule configuration to serverless.yml schedule event format.\n * Supports simple strings, configuration objects, and arrays of both.\n */\nfunction processScheduleEvents(scheduleConfig: any): any[] {\n if (!scheduleConfig) {\n return [];\n }\n\n const events: any[] = [];\n\n // Normalize to array\n const schedules = Array.isArray(scheduleConfig) ? scheduleConfig : [scheduleConfig];\n\n for (const schedule of schedules) {\n // Simple string format: 'rate(2 hours)' or 'cron(0 12 * * ? *)'\n if (typeof schedule === 'string') {\n events.push({\n schedule: schedule,\n });\n continue;\n }\n\n // Full configuration object\n if (typeof schedule === 'object' && schedule !== null) {\n const scheduleEvent: any = { schedule: {} };\n\n // Handle rate - can be string or array of strings\n if (schedule.rate) {\n if (Array.isArray(schedule.rate)) {\n // Multiple rate expressions\n scheduleEvent.schedule.rate = schedule.rate;\n } else {\n // Single rate expression\n scheduleEvent.schedule.rate = schedule.rate;\n }\n } else {\n // If no rate specified but we have a schedule object, skip it\n continue;\n }\n\n // Optional fields\n if (schedule.enabled !== undefined) {\n scheduleEvent.schedule.enabled = schedule.enabled;\n }\n if (schedule.input !== undefined) {\n scheduleEvent.schedule.input = schedule.input;\n }\n if (schedule.inputPath !== undefined) {\n scheduleEvent.schedule.inputPath = schedule.inputPath;\n }\n if (schedule.inputTransformer !== undefined) {\n scheduleEvent.schedule.inputTransformer = schedule.inputTransformer;\n }\n if (schedule.name !== undefined) {\n scheduleEvent.schedule.name = schedule.name;\n }\n if (schedule.description !== undefined) {\n scheduleEvent.schedule.description = schedule.description;\n }\n if (schedule.method !== undefined) {\n scheduleEvent.schedule.method = schedule.method;\n }\n if (schedule.timezone !== undefined) {\n scheduleEvent.schedule.timezone = schedule.timezone;\n }\n\n // If schedule object only has rate (or is minimal), we can simplify it\n // Serverless Framework accepts both { schedule: 'rate(...)' } and { schedule: { rate: 'rate(...)' } }\n if (Object.keys(scheduleEvent.schedule).length === 1 && scheduleEvent.schedule.rate) {\n // Simplify to string format if it's just a single rate\n if (typeof scheduleEvent.schedule.rate === 'string') {\n events.push({\n schedule: scheduleEvent.schedule.rate,\n });\n } else {\n // Keep object format for arrays\n events.push(scheduleEvent);\n }\n } else {\n events.push(scheduleEvent);\n }\n }\n }\n\n return events;\n}\n\n/**\n * Generates serverless.yml configuration.\n */\nfunction generateServerlessConfig(\n workers: WorkerInfo[],\n stage: string,\n region: string,\n envVars: Record<string, string>,\n serviceName: string\n): ServerlessConfig {\n // Create SQS queues for each worker\n const resources: ServerlessConfig['resources'] = {\n Resources: {},\n Outputs: {},\n };\n\n const queueArns: Array<string | Record<string, any>> = [];\n\n // Update provider environment to use file(env.json)\n const providerEnvironment: any = {\n STAGE: stage,\n NODE_ENV: stage,\n };\n\n // Custom configuration including serverless-offline\n const customConfig: Record<string, any> = {\n stage: `\\${env:ENVIRONMENT, '${stage}'}`,\n 'serverless-offline': {\n httpPort: 4000,\n lambdaPort: 4002,\n useChildProcesses: true,\n useWorkerThreads: true,\n noCookieValidation: true,\n allowCache: true,\n hideStackTraces: false,\n disableCookieValidation: true,\n noTimeout: true,\n environment: '\\${file(env.json)}',\n }\n };\n\n for (const worker of workers) {\n const queueName = `WorkerQueue${worker.id.replace(/[^a-zA-Z0-9]/g, '')}`;\n const queueLogicalId = `${queueName}${stage}`;\n const dlqLogicalId = `${queueName}DLQ${stage}`;\n\n const sqsCfg = worker.workerConfig?.sqs;\n const retention =\n typeof sqsCfg?.messageRetentionPeriod === 'number'\n ? sqsCfg.messageRetentionPeriod\n : 1209600; // 14 days\n const dlqRetention =\n typeof sqsCfg?.deadLetterMessageRetentionPeriod === 'number'\n ? sqsCfg.deadLetterMessageRetentionPeriod\n : retention;\n const visibilityTimeout =\n typeof sqsCfg?.visibilityTimeout === 'number'\n ? sqsCfg.visibilityTimeout\n : (worker.workerConfig?.timeout || 300) + 60; // Add buffer\n const maxReceiveCountRaw =\n typeof sqsCfg?.maxReceiveCount === 'number' ? sqsCfg.maxReceiveCount : 1;\n // SQS does not support 0; treat <=0 as 1.\n const maxReceiveCount = Math.max(1, Math.floor(maxReceiveCountRaw));\n\n // DLQ (always create so we can support \"no retries\" mode safely)\n resources.Resources[dlqLogicalId] = {\n Type: 'AWS::SQS::Queue',\n Properties: {\n QueueName: `\\${self:service}-${worker.id}-dlq-\\${opt:stage, env:ENVIRONMENT, '${stage}'}`,\n MessageRetentionPeriod: dlqRetention,\n },\n };\n\n resources.Resources[queueLogicalId] = {\n Type: 'AWS::SQS::Queue',\n Properties: {\n // Use ${self:service} to avoid hardcoding service name\n QueueName: `\\${self:service}-${worker.id}-\\${opt:stage, env:ENVIRONMENT, '${stage}'}`,\n VisibilityTimeout: visibilityTimeout,\n MessageRetentionPeriod: retention,\n RedrivePolicy: {\n deadLetterTargetArn: { 'Fn::GetAtt': [dlqLogicalId, 'Arn'] },\n maxReceiveCount,\n },\n },\n };\n\n resources.Outputs[`${queueLogicalId}Url`] = {\n Description: `Queue URL for worker ${worker.id}`,\n Value: { Ref: queueLogicalId },\n Export: {\n Name: `\\${self:service}-${worker.id}-queue-url`,\n },\n };\n\n queueArns.push({ 'Fn::GetAtt': [queueLogicalId, 'Arn'] });\n }\n\n // Create functions for each worker\n const functions: Record<string, any> = {};\n\n for (const worker of workers) {\n const functionName = `worker${worker.id.replace(/[^a-zA-Z0-9]/g, '')}`;\n\n // Start with SQS event (default)\n const events: any[] = [\n {\n sqs: {\n arn: { 'Fn::GetAtt': [`WorkerQueue${worker.id.replace(/[^a-zA-Z0-9]/g, '')}${stage}`, 'Arn'] },\n batchSize: 1,\n },\n },\n ];\n\n // Add schedule events if configured\n if (worker.workerConfig?.schedule) {\n const scheduleEvents = processScheduleEvents(worker.workerConfig.schedule);\n events.push(...scheduleEvents);\n }\n\n functions[functionName] = {\n // IMPORTANT: Keep AWS handler string to exactly one dot: \"<modulePath>.handler\"\n handler: `${worker.handlerPath}.handler`,\n timeout: worker.workerConfig?.timeout || 300,\n memorySize: worker.workerConfig?.memorySize || 512,\n events,\n };\n\n if (worker.workerConfig?.layers?.length) {\n functions[functionName].layers = worker.workerConfig.layers;\n }\n }\n\n // Add docs.json function for Microfox compatibility\n functions['getDocs'] = {\n handler: 'handlers/docs.handler',\n events: [\n {\n http: {\n path: '/docs.json',\n method: 'GET',\n cors: true,\n },\n },\n ],\n };\n\n // Add workers trigger endpoint (HTTP -> SQS SendMessage)\n functions['triggerWorker'] = {\n handler: 'handlers/workers-trigger.handler',\n events: [\n {\n http: {\n path: '/workers/trigger',\n method: 'POST',\n cors: true,\n },\n },\n ],\n };\n\n // Add workers-config function\n functions['workersConfig'] = {\n handler: 'handlers/workers-config.handler',\n events: [\n {\n http: {\n path: 'workers/config',\n method: 'GET',\n cors: true,\n },\n },\n ],\n };\n\n // Filter env vars - only include safe ones (exclude secrets that should be in AWS Secrets Manager)\n const safeEnvVars: Record<string, string> = {};\n const allowedPrefixes = ['OPENAI_', 'ANTHROPIC_', 'DATABASE_', 'MONGODB_', 'REDIS_', 'WORKERS_', 'REMOTION_'];\n\n // AWS_ prefix is reserved by Lambda, do not include it in environment variables\n // https://docs.aws.amazon.com/lambda/latest/dg/configuration-envvars.html\n\n for (const [key, value] of Object.entries(envVars)) {\n if (allowedPrefixes.some(prefix => key.startsWith(prefix))) {\n safeEnvVars[key] = value;\n }\n }\n\n // Add ApiEndpoints output for Microfox\n resources.Outputs['ApiEndpoints'] = {\n Description: \"API Endpoints\",\n Value: {\n \"Fn::Join\": [\n \"\",\n [\n \"API: https://\",\n { \"Ref\": \"ApiGatewayRestApi\" },\n \".execute-api.\",\n { \"Ref\": \"AWS::Region\" },\n `.amazonaws.com/\\${env:ENVIRONMENT, '${stage}'}`\n ]\n ]\n }\n };\n\n return {\n service: serviceName,\n package: {\n excludeDevDependencies: true,\n patterns: [\n '!venv/**',\n '!.idea/**',\n '!.vscode/**',\n '!src/**',\n '!node_modules/serverless-offline/**',\n '!node_modules/typescript/**',\n '!node_modules/@types/**',\n '!node_modules/aws-sdk/**',\n '!node_modules/@aws-sdk/**'\n ],\n },\n custom: customConfig,\n provider: {\n name: 'aws',\n runtime: 'nodejs20.x',\n region,\n versionFunctions: false,\n // Use ENVIRONMENT from env.json to drive the actual deployed stage (Microfox defaults to prod).\n stage: `\\${env:ENVIRONMENT, '${stage}'}`,\n environment: '\\${file(env.json)}',\n iam: {\n role: {\n statements: [\n {\n Effect: 'Allow',\n Action: [\n 'sqs:SendMessage',\n 'sqs:ReceiveMessage',\n 'sqs:DeleteMessage',\n 'sqs:GetQueueAttributes',\n ],\n Resource: queueArns,\n },\n {\n Effect: 'Allow',\n Action: ['sqs:GetQueueUrl'],\n // GetQueueUrl is not resource-scoped for unknown queue ARNs, must be '*'\n Resource: '*',\n }\n ],\n },\n },\n },\n plugins: ['serverless-offline'],\n functions,\n resources,\n };\n}\n\n/**\n * Resolves queue URLs after deployment and generates workers-map.generated.ts\n */\nasync function generateWorkersMap(\n stage: string,\n region: string,\n outputDir: string\n): Promise<void> {\n const serverlessDir = path.join(outputDir, '.serverless');\n if (!fs.existsSync(serverlessDir)) {\n fs.mkdirSync(serverlessDir, { recursive: true });\n }\n\n // Need to scan workers again to get IDs for map generation\n // Or we could save this metadata in the build step.\n // For now, re-scanning is fine.\n const workers = await scanWorkers();\n\n // Try to read CloudFormation outputs\n const stackName = `ai-router-workers-${stage}-${stage}`;\n let queueUrls: Record<string, { queueUrl: string; region: string }> = {};\n\n const spinner = ora('Fetching CloudFormation outputs...').start();\n\n try {\n // Use AWS CLI to get stack outputs\n const output = execSync(\n `aws cloudformation describe-stacks --stack-name ${stackName} --region ${region} --query \"Stacks[0].Outputs\" --output json`,\n { encoding: 'utf-8', stdio: 'pipe' }\n );\n\n const outputs = JSON.parse(output);\n const outputMap: Record<string, string> = {};\n\n for (const output of outputs) {\n const key = output.OutputKey;\n if (key && key.endsWith('Url')) {\n const workerId = key.replace('WorkerQueue', '').replace('Url', '').toLowerCase();\n // The workerId from CF output might have stripped characters, need fuzzy match or consistent naming\n // Currently we use replace(/[^a-zA-Z0-9]/g, '') in CF output name\n outputMap[key] = output.OutputValue;\n }\n }\n\n // Match workers to queue URLs\n for (const worker of workers) {\n const sanitizedId = worker.id.replace(/[^a-zA-Z0-9]/g, '');\n const queueKey = `WorkerQueue${sanitizedId}${stage}Url`;\n\n // Look for key ending with this pattern to handle casing issues if any\n const matchingKey = Object.keys(outputMap).find(k => k.toLowerCase() === queueKey.toLowerCase());\n\n if (matchingKey && outputMap[matchingKey]) {\n queueUrls[worker.id] = {\n queueUrl: outputMap[matchingKey],\n region,\n };\n }\n }\n spinner.succeed('Fetched CloudFormation outputs');\n } catch (error) {\n spinner.warn('Could not fetch CloudFormation outputs. Using deterministic queue URLs.');\n for (const worker of workers) {\n queueUrls[worker.id] = {\n queueUrl: `https://sqs.${'${aws:region}'}.amazonaws.com/${'${aws:accountId}'}/${'${self:service}'}-${worker.id}-${stage}`,\n region,\n };\n }\n }\n\n // Generate TypeScript file\n const mapContent = `/**\n * Auto-generated workers map\n * DO NOT EDIT - This file is generated by deploy-workers script\n */\n\nexport const workersMap = ${JSON.stringify(queueUrls, null, 2)} as const;\n`;\n\n const mapFile = path.join(serverlessDir, 'workers-map.generated.ts');\n fs.writeFileSync(mapFile, mapContent);\n console.log(chalk.green(`✓ Generated workers map: ${mapFile}`));\n}\n\nasync function build(args: any) {\n const stage = args.stage || process.env.STAGE || 'prod';\n const region = args.region || process.env.AWS_REGION || 'us-east-1';\n const aiPath = args['ai-path'] || 'app/ai';\n\n console.log(chalk.blue(`📦 Building workers (stage: ${stage}, region: ${region})...`));\n\n const spinner = ora('Scanning workers...').start();\n const workers = await scanWorkers(aiPath);\n\n if (workers.length === 0) {\n spinner.warn('No workers found.');\n return;\n }\n spinner.succeed(`Found ${workers.length} worker(s)`);\n workers.forEach(w => console.log(chalk.gray(` - ${w.id} (${w.filePath})`)));\n\n const serverlessDir = path.join(process.cwd(), '.serverless-workers');\n if (!fs.existsSync(serverlessDir)) {\n fs.mkdirSync(serverlessDir, { recursive: true });\n }\n\n // Build an accurate dependencies map for Microfox installs:\n // include any npm packages imported by the worker entrypoints (and their local imports),\n // plus runtime packages used by generated handlers.\n const runtimeDeps = await collectRuntimeDependenciesForWorkers(\n workers.map((w) => w.filePath),\n process.cwd()\n );\n const dependencies = buildDependenciesMap(process.cwd(), runtimeDeps);\n\n // Generate package.json for the serverless service (used by Microfox push)\n const packageJson = {\n name: 'ai-router-workers',\n version: '1.0.0',\n description: 'Auto-generated serverless workers',\n private: true,\n dependencies,\n scripts: {\n build: \"echo 'Already compiled.'\",\n },\n devDependencies: {\n serverless: '^3.38.0',\n 'serverless-offline': '^13.3.3',\n '@aws-sdk/client-sqs': '^3.700.0',\n },\n };\n fs.writeFileSync(\n path.join(serverlessDir, 'package.json'),\n JSON.stringify(packageJson, null, 2)\n );\n\n // No tsconfig.json needed as we are deploying bundled JS\n\n const envVars = loadEnvVars();\n\n // Detect env usage from worker entry files + their local dependency graph.\n // We use this to populate env.json with only envs that are actually referenced,\n // but ONLY if they exist in .env (we don't invent values).\n const workerEntryFiles = workers.map((w) => w.filePath);\n const { runtimeKeys: runtimeEnvKeys, buildtimeKeys: buildtimeEnvKeys } =\n await collectEnvUsageForWorkers(workerEntryFiles, process.cwd());\n const referencedEnvKeys = new Set<string>([\n ...Array.from(runtimeEnvKeys),\n ...Array.from(buildtimeEnvKeys),\n ]);\n\n // Light, helpful logging (avoid noisy huge dumps)\n const runtimeList = Array.from(runtimeEnvKeys).sort();\n const buildtimeList = Array.from(buildtimeEnvKeys).sort();\n const missingFromDotEnv = Array.from(referencedEnvKeys)\n .filter((k) => !(k in envVars))\n .sort();\n if (runtimeList.length || buildtimeList.length) {\n console.log(\n chalk.blue(\n `ℹ️ Detected env usage from worker code: runtime=${runtimeList.length}, buildtime=${buildtimeList.length}`\n )\n );\n if (missingFromDotEnv.length > 0) {\n console.log(\n chalk.yellow(\n `⚠️ These referenced envs were not found in .env (so they will NOT be written to env.json): ${missingFromDotEnv\n .slice(0, 25)\n .join(', ')}${missingFromDotEnv.length > 25 ? ' ...' : ''}`\n )\n );\n }\n }\n\n let serviceName = (args['service-name'] as string | undefined)?.trim() || `ai-router-workers-${stage}`;\n\n // Check for microfox.json to customize service name\n const microfoxJsonPath = path.join(process.cwd(), 'microfox.json');\n if (fs.existsSync(microfoxJsonPath)) {\n try {\n const microfoxConfig = JSON.parse(fs.readFileSync(microfoxJsonPath, 'utf-8'));\n if (microfoxConfig.projectId) {\n // Only override if user did not explicitly provide a service name\n if (!(args['service-name'] as string | undefined)?.trim()) {\n serviceName = getServiceNameFromProjectId(microfoxConfig.projectId);\n }\n console.log(chalk.blue(`ℹ️ Using service name from microfox.json: ${serviceName}`));\n }\n } catch (error) {\n console.warn(chalk.yellow('⚠️ Failed to parse microfox.json, using default service name'));\n }\n }\n\n ora('Generating handlers...').start().succeed('Generated handlers');\n await generateHandlers(workers, serverlessDir);\n\n // Now import the bundled handlers to extract workerConfig\n const extractSpinner = ora('Extracting worker configs from bundled handlers...').start();\n for (const worker of workers) {\n try {\n const handlerFile = path.join(serverlessDir, worker.handlerPath + '.js');\n if (fs.existsSync(handlerFile)) {\n // Convert absolute path to file:// URL for ESM import (required on Windows)\n const handlerUrl = pathToFileURL(path.resolve(handlerFile)).href;\n\n try {\n // Import the bundled handler (which exports exportedWorkerConfig)\n // Note: The handler might have runtime errors, but we only need the exportedWorkerConfig\n const module = await import(handlerUrl);\n\n // exportedWorkerConfig is exported directly from the handler file\n if (module.exportedWorkerConfig) {\n worker.workerConfig = module.exportedWorkerConfig;\n if (module.exportedWorkerConfig.layers?.length) {\n console.log(chalk.gray(` ✓ ${worker.id}: found ${module.exportedWorkerConfig.layers.length} layer(s)`));\n }\n } else {\n console.warn(chalk.yellow(` ⚠ ${worker.id}: exportedWorkerConfig not found in handler`));\n }\n } catch (importError: any) {\n // If import fails due to runtime errors (e.g., lazy-cache initialization in bundled code),\n // try to extract config from source file as fallback. This is expected for some bundled handlers.\n // The fallback will work fine, and the Lambda runtime will handle the bundled code correctly.\n console.log(chalk.gray(` ℹ ${worker.id}: extracting config from source (import failed: ${importError?.message?.slice(0, 50) || 'runtime error'}...)`));\n\n // Fallback: try to read the source worker file and extract workerConfig\n try {\n const sourceContent = fs.readFileSync(worker.filePath, 'utf-8');\n // Look for exported workerConfig\n const workerConfigMatch = sourceContent.match(/export\\s+const\\s+workerConfig[^=]*=\\s*(\\{[\\s\\S]*?\\});/);\n if (workerConfigMatch) {\n // Try to parse it as JSON (after cleaning up comments)\n let configStr = workerConfigMatch[1]\n .replace(/\\/\\*[\\s\\S]*?\\*\\//g, '') // Remove block comments\n .replace(/(^|\\s)\\/\\/[^\\n]*/gm, '$1'); // Remove line comments\n\n // Use Function constructor to parse the object (safer than eval)\n const configObj = new Function('return ' + configStr)();\n if (configObj && (configObj.layers || configObj.timeout || configObj.memorySize || configObj.schedule)) {\n worker.workerConfig = configObj;\n if (configObj.layers?.length) {\n console.log(chalk.gray(` ✓ ${worker.id}: found ${configObj.layers.length} layer(s) from source file`));\n }\n if (configObj.schedule) {\n console.log(chalk.gray(` ✓ ${worker.id}: found schedule configuration`));\n }\n }\n }\n } catch (fallbackError) {\n // If fallback also fails, just log and continue\n console.warn(chalk.yellow(` ⚠ ${worker.id}: fallback extraction also failed, using defaults`));\n }\n }\n } else {\n console.warn(chalk.yellow(` ⚠ ${worker.id}: handler file not found: ${handlerFile}`));\n }\n } catch (error: any) {\n // If everything fails, workerConfig will remain undefined (fallback to defaults)\n console.warn(chalk.yellow(` ⚠ ${worker.id}: failed to extract config: ${error?.message || error}`));\n }\n }\n extractSpinner.succeed('Extracted configs');\n\n generateWorkersConfigHandler(serverlessDir, workers, serviceName);\n generateDocsHandler(serverlessDir, serviceName, stage, region);\n generateTriggerHandler(serverlessDir, serviceName);\n\n const config = generateServerlessConfig(workers, stage, region, envVars, serviceName);\n\n // Always generate env.json now as serverless.yml relies on it.\n // Microfox deploys APIs on prod by default; when microfox.json exists, default ENVIRONMENT/STAGE to \"prod\".\n const envStage = fs.existsSync(microfoxJsonPath) ? 'prod' : stage;\n const safeEnvVars: Record<string, string> = {\n ENVIRONMENT: envStage,\n STAGE: envStage,\n NODE_ENV: envStage,\n };\n const allowedPrefixes = ['OPENAI_', 'ANTHROPIC_', 'DATABASE_', 'MONGODB_', 'REDIS_', 'WORKERS_', 'REMOTION_'];\n\n for (const [key, value] of Object.entries(envVars)) {\n // AWS_ prefix is reserved by Lambda, do not include it in environment variables\n // https://docs.aws.amazon.com/lambda/latest/dg/configuration-envvars.html\n if (key.startsWith('AWS_')) continue;\n\n // Keep legacy behavior for known-safe prefixes,\n // and also include any env that is referenced by worker code.\n if (allowedPrefixes.some((prefix) => key.startsWith(prefix)) || referencedEnvKeys.has(key)) {\n safeEnvVars[key] = value;\n }\n }\n\n fs.writeFileSync(\n path.join(serverlessDir, 'env.json'),\n JSON.stringify(safeEnvVars, null, 2)\n );\n\n const yamlContent = yaml.dump(config, { indent: 2 });\n const yamlPath = path.join(serverlessDir, 'serverless.yml');\n fs.writeFileSync(yamlPath, yamlContent);\n console.log(chalk.green(`✓ Generated serverless.yml: ${yamlPath}`));\n}\n\nasync function deploy(args: any) {\n const stage = args.stage || process.env.STAGE || 'prod';\n const region = args.region || process.env.AWS_REGION || 'us-east-1';\n const skipDeploy = args['skip-deploy'] || false;\n const skipInstall = args['skip-install'] || false;\n\n if (skipDeploy) {\n console.log(chalk.yellow('⏭️ Skipping deployment (--skip-deploy flag)'));\n return;\n }\n\n const serverlessDir = path.join(process.cwd(), '.serverless-workers');\n const yamlPath = path.join(serverlessDir, 'serverless.yml');\n\n if (!fs.existsSync(yamlPath)) {\n console.error(chalk.red('❌ serverless.yml not found. Run \"build\" first.'));\n process.exit(1);\n }\n\n console.log(chalk.blue(`🚀 Deploying to AWS (stage: ${stage}, region: ${region})...`));\n validateEnvironment();\n\n try {\n // Install dependencies in the serverless directory if node_modules doesn't exist\n // Skip if --skip-install is provided\n if (!skipInstall && !fs.existsSync(path.join(serverlessDir, 'node_modules'))) {\n console.log(chalk.blue('📦 Installing serverless dependencies...'));\n execSync('npm install', {\n cwd: serverlessDir,\n stdio: 'inherit'\n });\n }\n\n // Check for microfox.json in project root\n const microfoxJsonPath = path.join(process.cwd(), 'microfox.json');\n if (fs.existsSync(microfoxJsonPath)) {\n console.log(chalk.blue('ℹ️ Found microfox.json, deploying via Microfox Cloud...'));\n\n // Copy microfox.json to .serverless-workers directory\n fs.copyFileSync(microfoxJsonPath, path.join(serverlessDir, 'microfox.json'));\n\n // Load and filter environment variables\n const envVars = loadEnvVars();\n // env.json is already generated by build()\n\n execSync('npx microfox@latest push', {\n cwd: serverlessDir,\n stdio: 'inherit'\n });\n console.log(chalk.green('✓ Deployment triggered via Microfox!'));\n // We don't generate workers map for Microfox push as it handles its own routing\n return;\n }\n\n execSync('npx serverless deploy', {\n cwd: serverlessDir,\n stdio: 'inherit',\n env: {\n ...process.env,\n STAGE: stage,\n AWS_REGION: region,\n },\n });\n console.log(chalk.green('✓ Deployment complete!'));\n } catch (error) {\n console.error(chalk.red('❌ Deployment failed'));\n process.exit(1);\n }\n\n await generateWorkersMap(stage, region, serverlessDir);\n}\n\nexport const pushCommand = new Command()\n .name('push')\n .description('Build and deploy background workers to AWS')\n .option('-s, --stage <stage>', 'Deployment stage', 'prod')\n .option('-r, --region <region>', 'AWS region', 'us-east-1')\n .option('--ai-path <path>', 'Path to AI directory containing workers', 'app/ai')\n .option('--service-name <name>', 'Override serverless service name (defaults to ai-router-workers-<stage>)')\n .option('--skip-deploy', 'Skip deployment, only build', false)\n .option('--skip-install', 'Skip npm install in serverless directory', false)\n .action(async (options) => {\n await build(options);\n await deploy(options);\n });\n\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAEA,IAAAA,oBAAwB;;;ACFxB,uBAAwB;AACxB,cAAyB;AACzB,2BAAyB;AACzB,SAAoB;AACpB,WAAsB;AACtB,iBAA8B;AAC9B,oBAA+B;AAC/B,kBAAqB;AACrB,WAAsB;AACtB,mBAAkB;AAClB,iBAAgB;AAEhB,IAAM,gBAAgB,IAAI;AAAA,EACxB,6BAAe,IAAI,CAAC,MAAO,EAAE,WAAW,OAAO,IAAI,EAAE,MAAM,QAAQ,MAAM,IAAI,CAAE;AACjF;AAEA,SAAS,gBAAgB,WAA4B;AACnD,QAAM,IAAI,UAAU,WAAW,OAAO,IAClC,UAAU,MAAM,QAAQ,MAAM,IAC9B;AACJ,SAAO,cAAc,IAAI,CAAC;AAC5B;AAEA,SAAS,4BAA4B,WAA2B;AAE9D,MAAI,UAAU,WAAW,GAAG,GAAG;AAC7B,UAAM,CAAC,OAAO,IAAI,IAAI,UAAU,MAAM,GAAG;AACzC,WAAO,OAAO,GAAG,KAAK,IAAI,IAAI,KAAK;AAAA,EACrC;AAEA,SAAO,UAAU,MAAM,GAAG,EAAE,CAAC;AAC/B;AAEA,SAAS,sBAAsB,UAAkB,WAAkC;AACjF,QAAM,UAAe,aAAQ,QAAQ;AACrC,QAAM,MAAW,aAAQ,SAAS,SAAS;AAG3C,QAAM,aAAa;AAAA,IACjB;AAAA,IACA,GAAG,GAAG;AAAA,IACN,GAAG,GAAG;AAAA,IACN,GAAG,GAAG;AAAA,IACN,GAAG,GAAG;AAAA,IACN,GAAG,GAAG;AAAA,EACR;AACA,aAAW,KAAK,YAAY;AAC1B,QAAO,cAAW,CAAC,KAAQ,YAAS,CAAC,EAAE,OAAO,EAAG,QAAO;AAAA,EAC1D;AAGA,MAAO,cAAW,GAAG,KAAQ,YAAS,GAAG,EAAE,YAAY,GAAG;AACxD,UAAM,gBAAgB;AAAA,MACf,UAAK,KAAK,UAAU;AAAA,MACpB,UAAK,KAAK,WAAW;AAAA,MACrB,UAAK,KAAK,UAAU;AAAA,MACpB,UAAK,KAAK,WAAW;AAAA,MACrB,UAAK,KAAK,WAAW;AAAA,IAC5B;AACA,eAAW,KAAK,eAAe;AAC7B,UAAO,cAAW,CAAC,KAAQ,YAAS,CAAC,EAAE,OAAO,EAAG,QAAO;AAAA,IAC1D;AAAA,EACF;AAEA,SAAO;AACT;AAEA,SAAS,wBAAwB,QAA0B;AACzD,QAAM,QAAkB,CAAC;AAIzB,QAAM,MACJ;AACF,aAAW,SAAS,OAAO,SAAS,GAAG,GAAG;AACxC,QAAI,MAAM,CAAC,EAAG,OAAM,KAAK,MAAM,CAAC,CAAC;AAAA,EACnC;AAGA,QAAM,MAAM;AACZ,aAAW,SAAS,OAAO,SAAS,GAAG,GAAG;AACxC,QAAI,MAAM,CAAC,EAAG,OAAM,KAAK,MAAM,CAAC,CAAC;AAAA,EACnC;AAGA,QAAM,MAAM;AACZ,aAAW,SAAS,OAAO,SAAS,GAAG,GAAG;AACxC,QAAI,MAAM,CAAC,EAAG,OAAM,KAAK,MAAM,CAAC,CAAC;AAAA,EACnC;AAEA,SAAO;AACT;AAEA,SAAS,6BAA6B,QAGpC;AACA,QAAM,cAAc,oBAAI,IAAY;AACpC,QAAM,gBAAgB,oBAAI,IAAY;AAGtC,QAAM,eAAe;AACrB,aAAW,SAAS,OAAO,SAAS,YAAY,GAAG;AACjD,UAAM,MAAM,MAAM,CAAC;AACnB,QAAI,IAAK,aAAY,IAAI,GAAG;AAAA,EAC9B;AAGA,QAAM,mBAAmB;AACzB,aAAW,SAAS,OAAO,SAAS,gBAAgB,GAAG;AACrD,UAAM,MAAM,MAAM,CAAC;AACnB,QAAI,IAAK,aAAY,IAAI,GAAG;AAAA,EAC9B;AAGA,QAAM,kBAAkB;AACxB,aAAW,SAAS,OAAO,SAAS,eAAe,GAAG;AACpD,UAAM,MAAM,MAAM,CAAC;AACnB,QAAI,IAAK,eAAc,IAAI,GAAG;AAAA,EAChC;AAGA,QAAM,sBAAsB;AAC5B,aAAW,SAAS,OAAO,SAAS,mBAAmB,GAAG;AACxD,UAAM,MAAM,MAAM,CAAC;AACnB,QAAI,IAAK,eAAc,IAAI,GAAG;AAAA,EAChC;AAEA,SAAO,EAAE,aAAa,cAAc;AACtC;AAEA,eAAe,0BACb,kBACA,aACmE;AACnE,OAAK;AAEL,QAAM,cAAc,oBAAI,IAAY;AACpC,QAAM,gBAAgB,oBAAI,IAAY;AAEtC,QAAM,UAAU,oBAAI,IAAY;AAChC,QAAM,QAAkB,CAAC,GAAG,gBAAgB;AAE5C,SAAO,MAAM,SAAS,GAAG;AACvB,UAAM,OAAO,MAAM,IAAI;AACvB,UAAM,aAAkB,aAAQ,IAAI;AACpC,QAAI,QAAQ,IAAI,UAAU,EAAG;AAC7B,YAAQ,IAAI,UAAU;AAEtB,QAAI,CAAI,cAAW,UAAU,KAAK,CAAI,YAAS,UAAU,EAAE,OAAO,EAAG;AACrE,UAAM,MAAS,gBAAa,YAAY,OAAO;AAE/C,UAAM,QAAQ,6BAA6B,GAAG;AAC9C,UAAM,YAAY,QAAQ,CAAC,MAAM,YAAY,IAAI,CAAC,CAAC;AACnD,UAAM,cAAc,QAAQ,CAAC,MAAM,cAAc,IAAI,CAAC,CAAC;AAEvD,UAAM,aAAa,wBAAwB,GAAG;AAC9C,eAAW,QAAQ,YAAY;AAC7B,UAAI,CAAC,KAAM;AACX,UAAI,KAAK,WAAW,GAAG,GAAG;AACxB,cAAM,WAAW,sBAAsB,YAAY,IAAI;AACvD,YAAI,SAAU,OAAM,KAAK,QAAQ;AACjC;AAAA,MACF;AAGA,UAAI,KAAK,WAAW,GAAG,EAAG;AAC1B,UAAI,gBAAgB,IAAI,EAAG;AAAA,IAE7B;AAAA,EACF;AAEA,cAAY,OAAO,EAAE;AACrB,gBAAc,OAAO,EAAE;AACvB,cAAY,OAAO,MAAM;AACzB,gBAAc,OAAO,MAAM;AAE3B,SAAO,EAAE,aAAa,cAAc;AACtC;AAEA,SAAS,aAAsB,UAA4B;AACzD,MAAI;AACF,WAAO,KAAK,MAAS,gBAAa,UAAU,OAAO,CAAC;AAAA,EACtD,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAEA,SAAS,iBAAiB,UAA0B;AAClD,MAAI,MAAW,aAAQ,QAAQ;AAE/B,SAAO,MAAM;AACX,UAAM,UAAe,UAAK,KAAK,cAAc;AAC7C,QAAO,cAAW,OAAO,GAAG;AAC1B,YAAM,MAAM,aAAkB,OAAO;AACrC,UAAI,KAAK,WAAY,QAAO;AAAA,IAC9B;AAEA,UAAM,SAAc,aAAQ,GAAG;AAC/B,QAAI,WAAW,IAAK,QAAO;AAC3B,UAAM;AAAA,EACR;AACF;AAEA,eAAe,qCACb,kBACA,aACsB;AAGtB,QAAM,OAAO,oBAAI,IAAY,CAAC,uBAAuB,qBAAqB,CAAC;AAC3E,QAAM,UAAU,oBAAI,IAAY;AAChC,QAAM,QAAkB,CAAC,GAAG,gBAAgB;AAE5C,SAAO,MAAM,SAAS,GAAG;AACvB,UAAM,OAAO,MAAM,IAAI;AACvB,UAAM,aAAkB,aAAQ,IAAI;AACpC,QAAI,QAAQ,IAAI,UAAU,EAAG;AAC7B,YAAQ,IAAI,UAAU;AAEtB,QAAI,CAAI,cAAW,UAAU,KAAK,CAAI,YAAS,UAAU,EAAE,OAAO,EAAG;AACrE,UAAM,MAAS,gBAAa,YAAY,OAAO;AAC/C,UAAM,aAAa,wBAAwB,GAAG;AAE9C,eAAW,QAAQ,YAAY;AAC7B,UAAI,CAAC,KAAM;AACX,UAAI,KAAK,WAAW,GAAG,GAAG;AACxB,cAAM,WAAW,sBAAsB,YAAY,IAAI;AACvD,YAAI,SAAU,OAAM,KAAK,QAAQ;AACjC;AAAA,MACF;AAGA,UAAI,KAAK,WAAW,GAAG,EAAG;AAC1B,UAAI,gBAAgB,IAAI,EAAG;AAE3B,WAAK,IAAI,4BAA4B,IAAI,CAAC;AAAA,IAC5C;AAAA,EACF;AAGA,OAAK,OAAO,EAAE;AACd,OAAK,OAAO,MAAM;AAGlB,OAAK,OAAO,YAAY;AACxB,OAAK,OAAO,oBAAoB;AAChC,OAAK,OAAO,qBAAqB;AACjC,OAAK,OAAO,qBAAqB;AACjC,SAAO;AACT;AAEA,SAAS,qBAAqB,aAAqB,MAA2C;AAC5F,QAAM,aACJ,aAAuB,UAAK,aAAa,cAAc,CAAC,KAAK,CAAC;AAChE,QAAM,cAAsC,WAAW,gBAAgB,CAAC;AACxE,QAAM,iBAAyC,WAAW,mBAAmB,CAAC;AAG9E,QAAM,WAAW,iBAAiB,WAAW;AAC7C,QAAM,YACJ,aAAuB,UAAK,UAAU,YAAY,aAAa,cAAc,CAAC,KAC9E,CAAC;AACH,QAAM,eACJ;AAAA,IACO,UAAK,UAAU,YAAY,iBAAiB,cAAc;AAAA,EACjE,KAAK,CAAC;AAER,QAAM,gBAAwC;AAAA,IAC5C,GAAI,UAAU,gBAAgB,CAAC;AAAA,IAC/B,GAAI,UAAU,mBAAmB,CAAC;AAAA,IAClC,GAAI,aAAa,gBAAgB,CAAC;AAAA,IAClC,GAAI,aAAa,mBAAmB,CAAC;AAAA,EACvC;AAEA,QAAM,MAA8B,CAAC;AACrC,aAAW,OAAO,MAAM,KAAK,IAAI,EAAE,KAAK,GAAG;AACzC,UAAM,QACJ,YAAY,GAAG,KACf,eAAe,GAAG,KAClB,cAAc,GAAG;AAGnB,QAAI,OAAO;AACT,UAAI,GAAG,IAAI,OAAO,KAAK;AAAA,IACzB;AAAA,EACF;AAEA,SAAO;AACT;AAsDO,SAAS,4BAA4B,WAA2B;AACrE,QAAM,mBAAmB,UAAU,QAAQ,MAAM,EAAE,EAAE,MAAM,GAAG,EAAE;AAChE,SAAO,KAAK,gBAAgB;AAC9B;AAKA,SAAS,sBAA4B;AAGnC,MAAI;AACF,uCAAS,iBAAiB,EAAE,OAAO,SAAS,CAAC;AAAA,EAC/C,SAAS,OAAO;AACd,YAAQ,MAAM,aAAAC,QAAM,IAAI,6CAAwC,CAAC;AACjE,YAAQ,KAAK,CAAC;AAAA,EAChB;AACF;AAKA,eAAe,YAAY,SAAiB,UAAiC;AAC3E,QAAM,UAAe,UAAK,QAAQ,gBAAgB,EAAE,QAAQ,OAAO,GAAG;AACtE,QAAM,QAAQ,UAAM,kBAAK,OAAO;AAEhC,QAAM,UAAwB,CAAC;AAE/B,aAAW,YAAY,OAAO;AAC5B,QAAI;AAGF,UAAI;AACJ,UAAI;AAMJ,UAAI,CAAC,UAAU;AACb,cAAM,UAAa,gBAAa,UAAU,OAAO;AAGjD,cAAM,UAAU,QAAQ,MAAM,qEAAqE;AACnG,YAAI,CAAC,SAAS;AACZ,kBAAQ,KAAK,aAAAA,QAAM,OAAO,0BAAgB,QAAQ,sBAAsB,CAAC;AACzE;AAAA,QACF;AACA,mBAAW,QAAQ,CAAC;AAAA,MACtB;AAIA,YAAM,eAAoB,cAAS,QAAQ,QAAQ;AACnD,YAAM,aAAkB,aAAQ,YAAY;AAC5C,YAAM,cAAmB,cAAS,cAAc,YAAY;AAC5D,YAAM,cAAmB,UAAK,YAAY,YAAY,GAAG,WAAW,EAAE,EAAE,QAAQ,OAAO,GAAG;AAE1F,cAAQ,KAAK;AAAA,QACX,IAAI;AAAA,QACJ;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AAAA,IACH,SAAS,OAAO;AACd,cAAQ,MAAM,aAAAA,QAAM,IAAI,2BAAsB,QAAQ,GAAG,GAAG,KAAK;AAAA,IACnE;AAAA,EACF;AAEA,SAAO;AACT;AAKA,eAAe,iBAAiB,SAAuB,WAAkC;AACvF,QAAM,cAAmB,UAAK,WAAW,UAAU;AAGnD,MAAO,cAAW,WAAW,GAAG;AAC9B,IAAG,UAAO,aAAa,EAAE,WAAW,MAAM,OAAO,KAAK,CAAC;AAAA,EACzD;AACA,EAAG,aAAU,aAAa,EAAE,WAAW,KAAK,CAAC;AAE7C,aAAW,UAAU,SAAS;AAG5B,UAAM,cAAmB,UAAK,aAAa,OAAO,YAAY,QAAQ,aAAa,EAAE,IAAI,KAAK;AAC9F,UAAM,aAAkB,aAAQ,WAAW;AAE3C,QAAI,CAAI,cAAW,UAAU,GAAG;AAC9B,MAAG,aAAU,YAAY,EAAE,WAAW,KAAK,CAAC;AAAA,IAC9C;AASA,UAAM,iBAAsB,aAAQ,WAAW;AAC/C,UAAM,gBAAqB,aAAQ,OAAO,QAAQ;AAGlD,QAAI,qBAA0B,cAAc,aAAQ,cAAc,GAAG,aAAa;AAGlF,QAAI,CAAC,mBAAmB,WAAW,GAAG,GAAG;AACvC,2BAAqB,OAAO;AAAA,IAC9B;AAGA,yBAAqB,mBAAmB,QAAQ,SAAS,EAAE;AAE3D,yBAAqB,mBAAmB,MAAW,QAAG,EAAE,KAAK,GAAG;AAGhE,UAAM,cAAiB,gBAAa,OAAO,UAAU,OAAO;AAC5D,UAAM,gBAAgB,kCAAkC,KAAK,WAAW;AACxE,UAAM,cAAc,YAAY,MAAM,iDAAiD;AACvF,UAAM,aAAa,cAAc,YAAY,CAAC,IAAI;AAGlD,UAAM,gBAAgB,YAAY,QAAQ,OAAO,UAAU;AAE3D,UAAM,YAAY,gBACd,yBACA,gBAAgB,UAAU;AAG9B,UAAM,mBAAmB;AAAA;AAAA,iCAEI,kBAAkB;AAAA;AAAA,sBAE7B,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAQ3B,IAAG,iBAAc,eAAe,gBAAgB;AAGhD,QAAI;AAGF,YAAM,qBAAqC;AAAA,QACzC,MAAM;AAAA,QACN,MAAMC,QAAO;AACX,UAAAA,OAAM,MAAM,OAAO,WAAW;AAC5B,gBAAI,OAAO,OAAO,SAAS,EAAG;AAG9B,gBAAI,cAAiB,gBAAa,aAAa,OAAO;AACtD,gBAAI,WAAW;AAMf,kBAAM,UAAU;AAEhB,gBAAI,QAAQ,KAAK,WAAW,GAAG;AAC7B,4BAAc,YAAY;AAAA,gBACxB;AAAA,gBACA;AAAA,cACF;AACA,yBAAW;AAAA,YACb;AAIA,gBAAI,YAAY,SAAS,iBAAiB,GAAG;AAC3C,4BAAc,YAAY;AAAA,gBACxB;AAAA,gBACA;AAAA,cACF;AACA,yBAAW;AAAA,YACb;AAGA,kBAAM,sBAAsB;AAC5B,0BAAc,YAAY;AAAA,cACxB;AAAA,cACA;AAAA,YACF;AACA,gBAAI,gBAAgB,oBAAqB,YAAW;AAEpD,gBAAI,UAAU;AACZ,cAAG,iBAAc,aAAa,aAAa,OAAO;AAAA,YACpD;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF;AAEA,YAAc,cAAM;AAAA,QAClB,aAAa,CAAC,aAAa;AAAA,QAC3B,QAAQ;AAAA,QACR,UAAU;AAAA,QACV,QAAQ;AAAA,QACR,QAAQ;AAAA,QACR,SAAS;AAAA;AAAA;AAAA,QAGT,UAAU;AAAA,UACR;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACF;AAAA;AAAA;AAAA,QAGA,QAAQ;AAAA,UACN,sBAAsB;AAAA,QACxB;AAAA;AAAA;AAAA,QAGA,UAAU;AAAA,QACV,SAAS,CAAC,kBAAkB;AAAA,QAC5B,UAAU;AAAA,MACZ,CAAC;AAGD,MAAG,cAAW,aAAa;AAAA,IAE7B,SAAS,OAAO;AACd,cAAQ,MAAM,aAAAD,QAAM,IAAI,8BAA8B,OAAO,EAAE,GAAG,GAAG,KAAK;AAAA,IAE5E;AAAA,EACF;AACA,UAAQ,IAAI,aAAAA,QAAM,MAAM,oBAAe,QAAQ,MAAM,mBAAmB,CAAC;AAC3E;AAEA,SAAS,oBAAoB,WAAmB,aAAqB,OAAe,QAAsB;AACxG,QAAM,cAAmB,UAAK,WAAW,YAAY,SAAS;AAC9D,QAAM,gBAAgB,YAAY,QAAQ,OAAO,UAAU;AAC3D,QAAM,aAAkB,aAAQ,WAAW;AAE3C,MAAI,CAAI,cAAW,UAAU,GAAG;AAC9B,IAAG,aAAU,YAAY,EAAE,WAAW,KAAK,CAAC;AAAA,EAC9C;AAEA,QAAM,iBAAiB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gCAuBO,MAAM;AAAA,+BACP,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sBAoJd,WAAW;AAAA,gBACjB,KAAK;AAAA,iBACJ,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAerB,EAAG,iBAAc,eAAe,cAAc;AAG9C,EAAQ,kBAAU;AAAA,IAChB,aAAa,CAAC,aAAa;AAAA,IAC3B,QAAQ;AAAA,IACR,UAAU;AAAA,IACV,QAAQ;AAAA,IACR,SAAS;AAAA,IACT,UAAU;AAAA,MACR;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,IACA,QAAQ;AAAA,MACN,sBAAsB;AAAA,IACxB;AAAA,IACA,UAAU;AAAA,EACZ,CAAC;AAED,EAAG,cAAW,aAAa;AAC3B,UAAQ,IAAI,aAAAA,QAAM,MAAM,oCAA+B,CAAC;AAC1D;AAEA,SAAS,uBAAuB,WAAmB,aAA2B;AAC5E,QAAM,cAAmB,UAAK,WAAW,YAAY,oBAAoB;AACzE,QAAM,gBAAgB,YAAY,QAAQ,OAAO,UAAU;AAC3D,QAAM,aAAkB,aAAQ,WAAW;AAE3C,MAAI,CAAI,cAAW,UAAU,GAAG;AAC9B,IAAG,aAAU,YAAY,EAAE,WAAW,KAAK,CAAC;AAAA,EAC9C;AAEA,QAAM,iBAAiB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,uBAQF,KAAK,UAAU,WAAW,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AA0FhD,EAAG,iBAAc,eAAe,cAAc;AAE9C,EAAQ,kBAAU;AAAA,IAChB,aAAa,CAAC,aAAa;AAAA,IAC3B,QAAQ;AAAA,IACR,UAAU;AAAA,IACV,QAAQ;AAAA,IACR,SAAS;AAAA,IACT,UAAU;AAAA,MACR;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,IACA,QAAQ;AAAA,MACN,sBAAsB;AAAA,IACxB;AAAA,IACA,UAAU;AAAA,IACV,UAAU;AAAA,EACZ,CAAC;AAED,EAAG,cAAW,aAAa;AAC3B,UAAQ,IAAI,aAAAA,QAAM,MAAM,2CAAsC,CAAC;AACjE;AAKA,SAAS,6BACP,WACA,SACA,aACM;AAEN,QAAM,cAAmB,UAAK,WAAW,YAAY,mBAAmB;AACxE,QAAM,gBAAgB,YAAY,QAAQ,OAAO,UAAU;AAC3D,QAAM,aAAkB,aAAQ,WAAW;AAG3C,MAAO,cAAW,UAAU,KAAK,CAAI,cAAW,WAAW,GAAG;AAAA,EAE9D,WAAW,CAAI,cAAW,UAAU,GAAG;AACrC,IAAG,aAAU,YAAY,EAAE,WAAW,KAAK,CAAC;AAAA,EAC9C;AAEA,QAAM,iBAAiB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,+BASM,KAAK,UAAU,QAAQ,IAAI,OAAK,EAAE,EAAE,GAAG,MAAM,CAAC,CAAC;AAAA,uBACvD,KAAK,UAAU,WAAW,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AA4EhD,EAAG,iBAAc,eAAe,cAAc;AAG9C,EAAQ,kBAAU;AAAA,IAChB,aAAa,CAAC,aAAa;AAAA,IAC3B,QAAQ;AAAA,IACR,UAAU;AAAA,IACV,QAAQ;AAAA,IACR,SAAS;AAAA,IACT,UAAU;AAAA,MACR;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,IACA,QAAQ;AAAA,MACN,sBAAsB;AAAA,IACxB;AAAA,IACA,UAAU;AAAA,EACZ,CAAC;AAED,EAAG,cAAW,aAAa;AAC3B,UAAQ,IAAI,aAAAA,QAAM,MAAM,yCAAoC,CAAC;AAC/D;AAKA,SAAS,YAAY,UAAkB,QAAgC;AACrE,QAAM,MAA8B,CAAC;AAErC,MAAI,CAAI,cAAW,OAAO,GAAG;AAC3B,YAAQ,KAAK,aAAAA,QAAM,OAAO,wCAA8B,OAAO,EAAE,CAAC;AAClE,WAAO;AAAA,EACT;AAEA,QAAM,UAAa,gBAAa,SAAS,OAAO;AAChD,QAAM,QAAQ,QAAQ,MAAM,IAAI;AAEhC,aAAW,QAAQ,OAAO;AACxB,UAAM,UAAU,KAAK,KAAK;AAC1B,QAAI,CAAC,WAAW,QAAQ,WAAW,GAAG,EAAG;AAEzC,UAAM,QAAQ,QAAQ,MAAM,gBAAgB;AAC5C,QAAI,OAAO;AACT,YAAM,MAAM,MAAM,CAAC,EAAE,KAAK;AAC1B,YAAM,QAAQ,MAAM,CAAC,EAAE,KAAK,EAAE,QAAQ,gBAAgB,EAAE;AACxD,UAAI,GAAG,IAAI;AAAA,IACb;AAAA,EACF;AAEA,SAAO;AACT;AAMA,SAAS,sBAAsB,gBAA4B;AACzD,MAAI,CAAC,gBAAgB;AACnB,WAAO,CAAC;AAAA,EACV;AAEA,QAAM,SAAgB,CAAC;AAGvB,QAAM,YAAY,MAAM,QAAQ,cAAc,IAAI,iBAAiB,CAAC,cAAc;AAElF,aAAW,YAAY,WAAW;AAEhC,QAAI,OAAO,aAAa,UAAU;AAChC,aAAO,KAAK;AAAA,QACV;AAAA,MACF,CAAC;AACD;AAAA,IACF;AAGA,QAAI,OAAO,aAAa,YAAY,aAAa,MAAM;AACrD,YAAM,gBAAqB,EAAE,UAAU,CAAC,EAAE;AAG1C,UAAI,SAAS,MAAM;AACjB,YAAI,MAAM,QAAQ,SAAS,IAAI,GAAG;AAEhC,wBAAc,SAAS,OAAO,SAAS;AAAA,QACzC,OAAO;AAEL,wBAAc,SAAS,OAAO,SAAS;AAAA,QACzC;AAAA,MACF,OAAO;AAEL;AAAA,MACF;AAGA,UAAI,SAAS,YAAY,QAAW;AAClC,sBAAc,SAAS,UAAU,SAAS;AAAA,MAC5C;AACA,UAAI,SAAS,UAAU,QAAW;AAChC,sBAAc,SAAS,QAAQ,SAAS;AAAA,MAC1C;AACA,UAAI,SAAS,cAAc,QAAW;AACpC,sBAAc,SAAS,YAAY,SAAS;AAAA,MAC9C;AACA,UAAI,SAAS,qBAAqB,QAAW;AAC3C,sBAAc,SAAS,mBAAmB,SAAS;AAAA,MACrD;AACA,UAAI,SAAS,SAAS,QAAW;AAC/B,sBAAc,SAAS,OAAO,SAAS;AAAA,MACzC;AACA,UAAI,SAAS,gBAAgB,QAAW;AACtC,sBAAc,SAAS,cAAc,SAAS;AAAA,MAChD;AACA,UAAI,SAAS,WAAW,QAAW;AACjC,sBAAc,SAAS,SAAS,SAAS;AAAA,MAC3C;AACA,UAAI,SAAS,aAAa,QAAW;AACnC,sBAAc,SAAS,WAAW,SAAS;AAAA,MAC7C;AAIA,UAAI,OAAO,KAAK,cAAc,QAAQ,EAAE,WAAW,KAAK,cAAc,SAAS,MAAM;AAEnF,YAAI,OAAO,cAAc,SAAS,SAAS,UAAU;AACnD,iBAAO,KAAK;AAAA,YACV,UAAU,cAAc,SAAS;AAAA,UACnC,CAAC;AAAA,QACH,OAAO;AAEL,iBAAO,KAAK,aAAa;AAAA,QAC3B;AAAA,MACF,OAAO;AACL,eAAO,KAAK,aAAa;AAAA,MAC3B;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAKA,SAAS,yBACP,SACA,OACA,QACA,SACA,aACkB;AAElB,QAAM,YAA2C;AAAA,IAC/C,WAAW,CAAC;AAAA,IACZ,SAAS,CAAC;AAAA,EACZ;AAEA,QAAM,YAAiD,CAAC;AAGxD,QAAM,sBAA2B;AAAA,IAC/B,OAAO;AAAA,IACP,UAAU;AAAA,EACZ;AAGA,QAAM,eAAoC;AAAA,IACxC,OAAO,wBAAwB,KAAK;AAAA,IACpC,sBAAsB;AAAA,MACpB,UAAU;AAAA,MACV,YAAY;AAAA,MACZ,mBAAmB;AAAA,MACnB,kBAAkB;AAAA,MAClB,oBAAoB;AAAA,MACpB,YAAY;AAAA,MACZ,iBAAiB;AAAA,MACjB,yBAAyB;AAAA,MACzB,WAAW;AAAA,MACX,aAAa;AAAA,IACf;AAAA,EACF;AAEA,aAAW,UAAU,SAAS;AAC5B,UAAM,YAAY,cAAc,OAAO,GAAG,QAAQ,iBAAiB,EAAE,CAAC;AACtE,UAAM,iBAAiB,GAAG,SAAS,GAAG,KAAK;AAC3C,UAAM,eAAe,GAAG,SAAS,MAAM,KAAK;AAE5C,UAAM,SAAS,OAAO,cAAc;AACpC,UAAM,YACJ,OAAO,QAAQ,2BAA2B,WACtC,OAAO,yBACP;AACN,UAAM,eACJ,OAAO,QAAQ,qCAAqC,WAChD,OAAO,mCACP;AACN,UAAM,oBACJ,OAAO,QAAQ,sBAAsB,WACjC,OAAO,qBACN,OAAO,cAAc,WAAW,OAAO;AAC9C,UAAM,qBACJ,OAAO,QAAQ,oBAAoB,WAAW,OAAO,kBAAkB;AAEzE,UAAM,kBAAkB,KAAK,IAAI,GAAG,KAAK,MAAM,kBAAkB,CAAC;AAGlE,cAAU,UAAU,YAAY,IAAI;AAAA,MAClC,MAAM;AAAA,MACN,YAAY;AAAA,QACV,WAAW,oBAAoB,OAAO,EAAE,wCAAwC,KAAK;AAAA,QACrF,wBAAwB;AAAA,MAC1B;AAAA,IACF;AAEA,cAAU,UAAU,cAAc,IAAI;AAAA,MACpC,MAAM;AAAA,MACN,YAAY;AAAA;AAAA,QAEV,WAAW,oBAAoB,OAAO,EAAE,oCAAoC,KAAK;AAAA,QACjF,mBAAmB;AAAA,QACnB,wBAAwB;AAAA,QACxB,eAAe;AAAA,UACb,qBAAqB,EAAE,cAAc,CAAC,cAAc,KAAK,EAAE;AAAA,UAC3D;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAEA,cAAU,QAAQ,GAAG,cAAc,KAAK,IAAI;AAAA,MAC1C,aAAa,wBAAwB,OAAO,EAAE;AAAA,MAC9C,OAAO,EAAE,KAAK,eAAe;AAAA,MAC7B,QAAQ;AAAA,QACN,MAAM,oBAAoB,OAAO,EAAE;AAAA,MACrC;AAAA,IACF;AAEA,cAAU,KAAK,EAAE,cAAc,CAAC,gBAAgB,KAAK,EAAE,CAAC;AAAA,EAC1D;AAGA,QAAM,YAAiC,CAAC;AAExC,aAAW,UAAU,SAAS;AAC5B,UAAM,eAAe,SAAS,OAAO,GAAG,QAAQ,iBAAiB,EAAE,CAAC;AAGpE,UAAM,SAAgB;AAAA,MACpB;AAAA,QACE,KAAK;AAAA,UACH,KAAK,EAAE,cAAc,CAAC,cAAc,OAAO,GAAG,QAAQ,iBAAiB,EAAE,CAAC,GAAG,KAAK,IAAI,KAAK,EAAE;AAAA,UAC7F,WAAW;AAAA,QACb;AAAA,MACF;AAAA,IACF;AAGA,QAAI,OAAO,cAAc,UAAU;AACjC,YAAM,iBAAiB,sBAAsB,OAAO,aAAa,QAAQ;AACzE,aAAO,KAAK,GAAG,cAAc;AAAA,IAC/B;AAEA,cAAU,YAAY,IAAI;AAAA;AAAA,MAExB,SAAS,GAAG,OAAO,WAAW;AAAA,MAC9B,SAAS,OAAO,cAAc,WAAW;AAAA,MACzC,YAAY,OAAO,cAAc,cAAc;AAAA,MAC/C;AAAA,IACF;AAEA,QAAI,OAAO,cAAc,QAAQ,QAAQ;AACvC,gBAAU,YAAY,EAAE,SAAS,OAAO,aAAa;AAAA,IACvD;AAAA,EACF;AAGA,YAAU,SAAS,IAAI;AAAA,IACrB,SAAS;AAAA,IACT,QAAQ;AAAA,MACN;AAAA,QACE,MAAM;AAAA,UACJ,MAAM;AAAA,UACN,QAAQ;AAAA,UACR,MAAM;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAGA,YAAU,eAAe,IAAI;AAAA,IAC3B,SAAS;AAAA,IACT,QAAQ;AAAA,MACN;AAAA,QACE,MAAM;AAAA,UACJ,MAAM;AAAA,UACN,QAAQ;AAAA,UACR,MAAM;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAGA,YAAU,eAAe,IAAI;AAAA,IAC3B,SAAS;AAAA,IACT,QAAQ;AAAA,MACN;AAAA,QACE,MAAM;AAAA,UACJ,MAAM;AAAA,UACN,QAAQ;AAAA,UACR,MAAM;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAGA,QAAM,cAAsC,CAAC;AAC7C,QAAM,kBAAkB,CAAC,WAAW,cAAc,aAAa,YAAY,UAAU,YAAY,WAAW;AAK5G,aAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,OAAO,GAAG;AAClD,QAAI,gBAAgB,KAAK,YAAU,IAAI,WAAW,MAAM,CAAC,GAAG;AAC1D,kBAAY,GAAG,IAAI;AAAA,IACrB;AAAA,EACF;AAGA,YAAU,QAAQ,cAAc,IAAI;AAAA,IAClC,aAAa;AAAA,IACb,OAAO;AAAA,MACL,YAAY;AAAA,QACV;AAAA,QACA;AAAA,UACE;AAAA,UACA,EAAE,OAAO,oBAAoB;AAAA,UAC7B;AAAA,UACA,EAAE,OAAO,cAAc;AAAA,UACvB,uCAAuC,KAAK;AAAA,QAC9C;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AAAA,IACL,SAAS;AAAA,IACT,SAAS;AAAA,MACP,wBAAwB;AAAA,MACxB,UAAU;AAAA,QACR;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAAA,IACF;AAAA,IACA,QAAQ;AAAA,IACR,UAAU;AAAA,MACR,MAAM;AAAA,MACN,SAAS;AAAA,MACT;AAAA,MACA,kBAAkB;AAAA;AAAA,MAElB,OAAO,wBAAwB,KAAK;AAAA,MACpC,aAAa;AAAA,MACb,KAAK;AAAA,QACH,MAAM;AAAA,UACJ,YAAY;AAAA,YACV;AAAA,cACE,QAAQ;AAAA,cACR,QAAQ;AAAA,gBACN;AAAA,gBACA;AAAA,gBACA;AAAA,gBACA;AAAA,cACF;AAAA,cACA,UAAU;AAAA,YACZ;AAAA,YACA;AAAA,cACE,QAAQ;AAAA,cACR,QAAQ,CAAC,iBAAiB;AAAA;AAAA,cAE1B,UAAU;AAAA,YACZ;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,IACA,SAAS,CAAC,oBAAoB;AAAA,IAC9B;AAAA,IACA;AAAA,EACF;AACF;AAKA,eAAe,mBACb,OACA,QACA,WACe;AACf,QAAM,gBAAqB,UAAK,WAAW,aAAa;AACxD,MAAI,CAAI,cAAW,aAAa,GAAG;AACjC,IAAG,aAAU,eAAe,EAAE,WAAW,KAAK,CAAC;AAAA,EACjD;AAKA,QAAM,UAAU,MAAM,YAAY;AAGlC,QAAM,YAAY,qBAAqB,KAAK,IAAI,KAAK;AACrD,MAAI,YAAkE,CAAC;AAEvE,QAAM,cAAU,WAAAE,SAAI,oCAAoC,EAAE,MAAM;AAEhE,MAAI;AAEF,UAAM,aAAS;AAAA,MACb,mDAAmD,SAAS,aAAa,MAAM;AAAA,MAC/E,EAAE,UAAU,SAAS,OAAO,OAAO;AAAA,IACrC;AAEA,UAAM,UAAU,KAAK,MAAM,MAAM;AACjC,UAAM,YAAoC,CAAC;AAE3C,eAAWC,WAAU,SAAS;AAC5B,YAAM,MAAMA,QAAO;AACnB,UAAI,OAAO,IAAI,SAAS,KAAK,GAAG;AAC9B,cAAM,WAAW,IAAI,QAAQ,eAAe,EAAE,EAAE,QAAQ,OAAO,EAAE,EAAE,YAAY;AAG/E,kBAAU,GAAG,IAAIA,QAAO;AAAA,MAC1B;AAAA,IACF;AAGA,eAAW,UAAU,SAAS;AAC5B,YAAM,cAAc,OAAO,GAAG,QAAQ,iBAAiB,EAAE;AACzD,YAAM,WAAW,cAAc,WAAW,GAAG,KAAK;AAGlD,YAAM,cAAc,OAAO,KAAK,SAAS,EAAE,KAAK,OAAK,EAAE,YAAY,MAAM,SAAS,YAAY,CAAC;AAE/F,UAAI,eAAe,UAAU,WAAW,GAAG;AACzC,kBAAU,OAAO,EAAE,IAAI;AAAA,UACrB,UAAU,UAAU,WAAW;AAAA,UAC/B;AAAA,QACF;AAAA,MACF;AAAA,IACF;AACA,YAAQ,QAAQ,gCAAgC;AAAA,EAClD,SAAS,OAAO;AACd,YAAQ,KAAK,yEAAyE;AACtF,eAAW,UAAU,SAAS;AAC5B,gBAAU,OAAO,EAAE,IAAI;AAAA,QACrB,UAAU,eAAe,eAAe,kBAAkB,kBAAkB,IAAI,iBAAiB,IAAI,OAAO,EAAE,IAAI,KAAK;AAAA,QACvH;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAGA,QAAM,aAAa;AAAA;AAAA;AAAA;AAAA;AAAA,4BAKO,KAAK,UAAU,WAAW,MAAM,CAAC,CAAC;AAAA;AAG5D,QAAM,UAAe,UAAK,eAAe,0BAA0B;AACnE,EAAG,iBAAc,SAAS,UAAU;AACpC,UAAQ,IAAI,aAAAH,QAAM,MAAM,iCAA4B,OAAO,EAAE,CAAC;AAChE;AAEA,eAAeC,OAAM,MAAW;AAC9B,QAAM,QAAQ,KAAK,SAAS,QAAQ,IAAI,SAAS;AACjD,QAAM,SAAS,KAAK,UAAU,QAAQ,IAAI,cAAc;AACxD,QAAM,SAAS,KAAK,SAAS,KAAK;AAElC,UAAQ,IAAI,aAAAD,QAAM,KAAK,sCAA+B,KAAK,aAAa,MAAM,MAAM,CAAC;AAErF,QAAM,cAAU,WAAAE,SAAI,qBAAqB,EAAE,MAAM;AACjD,QAAM,UAAU,MAAM,YAAY,MAAM;AAExC,MAAI,QAAQ,WAAW,GAAG;AACxB,YAAQ,KAAK,mBAAmB;AAChC;AAAA,EACF;AACA,UAAQ,QAAQ,SAAS,QAAQ,MAAM,YAAY;AACnD,UAAQ,QAAQ,OAAK,QAAQ,IAAI,aAAAF,QAAM,KAAK,OAAO,EAAE,EAAE,KAAK,EAAE,QAAQ,GAAG,CAAC,CAAC;AAE3E,QAAM,gBAAqB,UAAK,QAAQ,IAAI,GAAG,qBAAqB;AACpE,MAAI,CAAI,cAAW,aAAa,GAAG;AACjC,IAAG,aAAU,eAAe,EAAE,WAAW,KAAK,CAAC;AAAA,EACjD;AAKA,QAAM,cAAc,MAAM;AAAA,IACxB,QAAQ,IAAI,CAAC,MAAM,EAAE,QAAQ;AAAA,IAC7B,QAAQ,IAAI;AAAA,EACd;AACA,QAAM,eAAe,qBAAqB,QAAQ,IAAI,GAAG,WAAW;AAGpE,QAAM,cAAc;AAAA,IAClB,MAAM;AAAA,IACN,SAAS;AAAA,IACT,aAAa;AAAA,IACb,SAAS;AAAA,IACT;AAAA,IACA,SAAS;AAAA,MACP,OAAO;AAAA,IACT;AAAA,IACA,iBAAiB;AAAA,MACf,YAAY;AAAA,MACZ,sBAAsB;AAAA,MACtB,uBAAuB;AAAA,IACzB;AAAA,EACF;AACA,EAAG;AAAA,IACI,UAAK,eAAe,cAAc;AAAA,IACvC,KAAK,UAAU,aAAa,MAAM,CAAC;AAAA,EACrC;AAIA,QAAM,UAAU,YAAY;AAK5B,QAAM,mBAAmB,QAAQ,IAAI,CAAC,MAAM,EAAE,QAAQ;AACtD,QAAM,EAAE,aAAa,gBAAgB,eAAe,iBAAiB,IACnE,MAAM,0BAA0B,kBAAkB,QAAQ,IAAI,CAAC;AACjE,QAAM,oBAAoB,oBAAI,IAAY;AAAA,IACxC,GAAG,MAAM,KAAK,cAAc;AAAA,IAC5B,GAAG,MAAM,KAAK,gBAAgB;AAAA,EAChC,CAAC;AAGD,QAAM,cAAc,MAAM,KAAK,cAAc,EAAE,KAAK;AACpD,QAAM,gBAAgB,MAAM,KAAK,gBAAgB,EAAE,KAAK;AACxD,QAAM,oBAAoB,MAAM,KAAK,iBAAiB,EACnD,OAAO,CAAC,MAAM,EAAE,KAAK,QAAQ,EAC7B,KAAK;AACR,MAAI,YAAY,UAAU,cAAc,QAAQ;AAC9C,YAAQ;AAAA,MACN,aAAAA,QAAM;AAAA,QACJ,8DAAoD,YAAY,MAAM,eAAe,cAAc,MAAM;AAAA,MAC3G;AAAA,IACF;AACA,QAAI,kBAAkB,SAAS,GAAG;AAChC,cAAQ;AAAA,QACN,aAAAA,QAAM;AAAA,UACJ,yGAA+F,kBAC5F,MAAM,GAAG,EAAE,EACX,KAAK,IAAI,CAAC,GAAG,kBAAkB,SAAS,KAAK,SAAS,EAAE;AAAA,QAC7D;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,MAAI,cAAe,KAAK,cAAc,GAA0B,KAAK,KAAK,qBAAqB,KAAK;AAGpG,QAAM,mBAAwB,UAAK,QAAQ,IAAI,GAAG,eAAe;AACjE,MAAO,cAAW,gBAAgB,GAAG;AACnC,QAAI;AACF,YAAM,iBAAiB,KAAK,MAAS,gBAAa,kBAAkB,OAAO,CAAC;AAC5E,UAAI,eAAe,WAAW;AAE5B,YAAI,CAAE,KAAK,cAAc,GAA0B,KAAK,GAAG;AACzD,wBAAc,4BAA4B,eAAe,SAAS;AAAA,QACpE;AACA,gBAAQ,IAAI,aAAAA,QAAM,KAAK,wDAA8C,WAAW,EAAE,CAAC;AAAA,MACrF;AAAA,IACF,SAAS,OAAO;AACd,cAAQ,KAAK,aAAAA,QAAM,OAAO,yEAA+D,CAAC;AAAA,IAC5F;AAAA,EACF;AAEA,iBAAAE,SAAI,wBAAwB,EAAE,MAAM,EAAE,QAAQ,oBAAoB;AAClE,QAAM,iBAAiB,SAAS,aAAa;AAG7C,QAAM,qBAAiB,WAAAA,SAAI,oDAAoD,EAAE,MAAM;AACvF,aAAW,UAAU,SAAS;AAC5B,QAAI;AACF,YAAM,cAAmB,UAAK,eAAe,OAAO,cAAc,KAAK;AACvE,UAAO,cAAW,WAAW,GAAG;AAE9B,cAAM,iBAAa,0BAAmB,aAAQ,WAAW,CAAC,EAAE;AAE5D,YAAI;AAGF,gBAAME,UAAS,MAAM,OAAO;AAG5B,cAAIA,QAAO,sBAAsB;AAC/B,mBAAO,eAAeA,QAAO;AAC7B,gBAAIA,QAAO,qBAAqB,QAAQ,QAAQ;AAC9C,sBAAQ,IAAI,aAAAJ,QAAM,KAAK,YAAO,OAAO,EAAE,WAAWI,QAAO,qBAAqB,OAAO,MAAM,WAAW,CAAC;AAAA,YACzG;AAAA,UACF,OAAO;AACL,oBAAQ,KAAK,aAAAJ,QAAM,OAAO,YAAO,OAAO,EAAE,6CAA6C,CAAC;AAAA,UAC1F;AAAA,QACF,SAAS,aAAkB;AAIzB,kBAAQ,IAAI,aAAAA,QAAM,KAAK,YAAO,OAAO,EAAE,mDAAmD,aAAa,SAAS,MAAM,GAAG,EAAE,KAAK,eAAe,MAAM,CAAC;AAGtJ,cAAI;AACF,kBAAM,gBAAmB,gBAAa,OAAO,UAAU,OAAO;AAE9D,kBAAM,oBAAoB,cAAc,MAAM,uDAAuD;AACrG,gBAAI,mBAAmB;AAErB,kBAAI,YAAY,kBAAkB,CAAC,EAChC,QAAQ,qBAAqB,EAAE,EAC/B,QAAQ,sBAAsB,IAAI;AAGrC,oBAAM,YAAY,IAAI,SAAS,YAAY,SAAS,EAAE;AACtD,kBAAI,cAAc,UAAU,UAAU,UAAU,WAAW,UAAU,cAAc,UAAU,WAAW;AACtG,uBAAO,eAAe;AACtB,oBAAI,UAAU,QAAQ,QAAQ;AAC5B,0BAAQ,IAAI,aAAAA,QAAM,KAAK,YAAO,OAAO,EAAE,WAAW,UAAU,OAAO,MAAM,4BAA4B,CAAC;AAAA,gBACxG;AACA,oBAAI,UAAU,UAAU;AACtB,0BAAQ,IAAI,aAAAA,QAAM,KAAK,YAAO,OAAO,EAAE,gCAAgC,CAAC;AAAA,gBAC1E;AAAA,cACF;AAAA,YACF;AAAA,UACF,SAAS,eAAe;AAEtB,oBAAQ,KAAK,aAAAA,QAAM,OAAO,YAAO,OAAO,EAAE,mDAAmD,CAAC;AAAA,UAChG;AAAA,QACF;AAAA,MACF,OAAO;AACL,gBAAQ,KAAK,aAAAA,QAAM,OAAO,YAAO,OAAO,EAAE,6BAA6B,WAAW,EAAE,CAAC;AAAA,MACvF;AAAA,IACF,SAAS,OAAY;AAEnB,cAAQ,KAAK,aAAAA,QAAM,OAAO,YAAO,OAAO,EAAE,+BAA+B,OAAO,WAAW,KAAK,EAAE,CAAC;AAAA,IACrG;AAAA,EACF;AACA,iBAAe,QAAQ,mBAAmB;AAE1C,+BAA6B,eAAe,SAAS,WAAW;AAChE,sBAAoB,eAAe,aAAa,OAAO,MAAM;AAC7D,yBAAuB,eAAe,WAAW;AAEjD,QAAM,SAAS,yBAAyB,SAAS,OAAO,QAAQ,SAAS,WAAW;AAIpF,QAAM,WAAc,cAAW,gBAAgB,IAAI,SAAS;AAC5D,QAAM,cAAsC;AAAA,IAC1C,aAAa;AAAA,IACb,OAAO;AAAA,IACP,UAAU;AAAA,EACZ;AACA,QAAM,kBAAkB,CAAC,WAAW,cAAc,aAAa,YAAY,UAAU,YAAY,WAAW;AAE5G,aAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,OAAO,GAAG;AAGlD,QAAI,IAAI,WAAW,MAAM,EAAG;AAI5B,QAAI,gBAAgB,KAAK,CAAC,WAAW,IAAI,WAAW,MAAM,CAAC,KAAK,kBAAkB,IAAI,GAAG,GAAG;AAC1F,kBAAY,GAAG,IAAI;AAAA,IACrB;AAAA,EACF;AAEA,EAAG;AAAA,IACI,UAAK,eAAe,UAAU;AAAA,IACnC,KAAK,UAAU,aAAa,MAAM,CAAC;AAAA,EACrC;AAEA,QAAM,cAAmB,UAAK,QAAQ,EAAE,QAAQ,EAAE,CAAC;AACnD,QAAM,WAAgB,UAAK,eAAe,gBAAgB;AAC1D,EAAG,iBAAc,UAAU,WAAW;AACtC,UAAQ,IAAI,aAAAA,QAAM,MAAM,oCAA+B,QAAQ,EAAE,CAAC;AACpE;AAEA,eAAe,OAAO,MAAW;AAC/B,QAAM,QAAQ,KAAK,SAAS,QAAQ,IAAI,SAAS;AACjD,QAAM,SAAS,KAAK,UAAU,QAAQ,IAAI,cAAc;AACxD,QAAM,aAAa,KAAK,aAAa,KAAK;AAC1C,QAAM,cAAc,KAAK,cAAc,KAAK;AAE5C,MAAI,YAAY;AACd,YAAQ,IAAI,aAAAA,QAAM,OAAO,wDAA8C,CAAC;AACxE;AAAA,EACF;AAEA,QAAM,gBAAqB,UAAK,QAAQ,IAAI,GAAG,qBAAqB;AACpE,QAAM,WAAgB,UAAK,eAAe,gBAAgB;AAE1D,MAAI,CAAI,cAAW,QAAQ,GAAG;AAC5B,YAAQ,MAAM,aAAAA,QAAM,IAAI,qDAAgD,CAAC;AACzE,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,UAAQ,IAAI,aAAAA,QAAM,KAAK,sCAA+B,KAAK,aAAa,MAAM,MAAM,CAAC;AACrF,sBAAoB;AAEpB,MAAI;AAGF,QAAI,CAAC,eAAe,CAAI,cAAgB,UAAK,eAAe,cAAc,CAAC,GAAG;AAC5E,cAAQ,IAAI,aAAAA,QAAM,KAAK,iDAA0C,CAAC;AAClE,yCAAS,eAAe;AAAA,QACtB,KAAK;AAAA,QACL,OAAO;AAAA,MACT,CAAC;AAAA,IACH;AAGA,UAAM,mBAAwB,UAAK,QAAQ,IAAI,GAAG,eAAe;AACjE,QAAO,cAAW,gBAAgB,GAAG;AACnC,cAAQ,IAAI,aAAAA,QAAM,KAAK,oEAA0D,CAAC;AAGlF,MAAG,gBAAa,kBAAuB,UAAK,eAAe,eAAe,CAAC;AAG3E,YAAM,UAAU,YAAY;AAG5B,yCAAS,4BAA4B;AAAA,QACnC,KAAK;AAAA,QACL,OAAO;AAAA,MACT,CAAC;AACD,cAAQ,IAAI,aAAAA,QAAM,MAAM,2CAAsC,CAAC;AAE/D;AAAA,IACF;AAEA,uCAAS,yBAAyB;AAAA,MAChC,KAAK;AAAA,MACL,OAAO;AAAA,MACP,KAAK;AAAA,QACH,GAAG,QAAQ;AAAA,QACX,OAAO;AAAA,QACP,YAAY;AAAA,MACd;AAAA,IACF,CAAC;AACD,YAAQ,IAAI,aAAAA,QAAM,MAAM,6BAAwB,CAAC;AAAA,EACnD,SAAS,OAAO;AACd,YAAQ,MAAM,aAAAA,QAAM,IAAI,0BAAqB,CAAC;AAC9C,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,QAAM,mBAAmB,OAAO,QAAQ,aAAa;AACvD;AAEO,IAAM,cAAc,IAAI,yBAAQ,EACpC,KAAK,MAAM,EACX,YAAY,4CAA4C,EACxD,OAAO,uBAAuB,oBAAoB,MAAM,EACxD,OAAO,yBAAyB,cAAc,WAAW,EACzD,OAAO,oBAAoB,2CAA2C,QAAQ,EAC9E,OAAO,yBAAyB,0EAA0E,EAC1G,OAAO,iBAAiB,+BAA+B,KAAK,EAC5D,OAAO,kBAAkB,4CAA4C,KAAK,EAC1E,OAAO,OAAO,YAAY;AACzB,QAAMC,OAAM,OAAO;AACnB,QAAM,OAAO,OAAO;AACtB,CAAC;;;AD9xDH,IAAM,UAAU,IAAI,0BAAQ;AAE5B,QACG,KAAK,WAAW,EAChB,YAAY,wDAAwD,EACpE,QAAQ,OAAO;AAElB,QAAQ,WAAW,WAAW;AAE9B,QAAQ,MAAM,QAAQ,IAAI;AAE1B,IAAM,cAAc;","names":["import_commander","chalk","build","ora","output","module"]}
|
|
1
|
+
{"version":3,"sources":["../src/index.ts","../src/commands/push.ts","../src/commands/new.ts"],"sourcesContent":["#!/usr/bin/env node\n\nimport { Command } from 'commander';\nimport { pushCommand } from './commands/push.js';\nimport { newCommand } from './commands/new.js';\n\nconst program = new Command();\n\nprogram\n .name('ai-worker')\n .description('CLI tooling for deploying ai-router background workers')\n .version('1.0.0');\n\nprogram.addCommand(pushCommand);\nprogram.addCommand(newCommand);\n\nprogram.parse(process.argv);\n\nconst aiWorkerCli = program;\nexport { aiWorkerCli };\n","import { Command } from 'commander';\nimport * as esbuild from 'esbuild';\nimport { execSync } from 'child_process';\nimport * as fs from 'fs';\nimport * as path from 'path';\nimport { pathToFileURL } from 'url';\nimport { builtinModules } from 'module';\nimport { glob } from 'glob';\nimport * as yaml from 'js-yaml';\nimport chalk from 'chalk';\nimport ora from 'ora';\n\nconst NODE_BUILTINS = new Set(\n builtinModules.map((m) => (m.startsWith('node:') ? m.slice('node:'.length) : m))\n);\n\nfunction isBuiltinModule(specifier: string): boolean {\n const s = specifier.startsWith('node:')\n ? specifier.slice('node:'.length)\n : specifier;\n return NODE_BUILTINS.has(s);\n}\n\nfunction getPackageNameFromSpecifier(specifier: string): string {\n // Scoped packages: @scope/name/...\n if (specifier.startsWith('@')) {\n const [scope, name] = specifier.split('/');\n return name ? `${scope}/${name}` : specifier;\n }\n // Unscoped: name/...\n return specifier.split('/')[0];\n}\n\nfunction tryResolveLocalImport(fromFile: string, specifier: string): string | null {\n const baseDir = path.dirname(fromFile);\n const raw = path.resolve(baseDir, specifier);\n\n // Direct file hits\n const candidates = [\n raw,\n `${raw}.ts`,\n `${raw}.tsx`,\n `${raw}.js`,\n `${raw}.mjs`,\n `${raw}.cjs`,\n ];\n for (const c of candidates) {\n if (fs.existsSync(c) && fs.statSync(c).isFile()) return c;\n }\n\n // Directory index hits\n if (fs.existsSync(raw) && fs.statSync(raw).isDirectory()) {\n const idxCandidates = [\n path.join(raw, 'index.ts'),\n path.join(raw, 'index.tsx'),\n path.join(raw, 'index.js'),\n path.join(raw, 'index.mjs'),\n path.join(raw, 'index.cjs'),\n ];\n for (const c of idxCandidates) {\n if (fs.existsSync(c) && fs.statSync(c).isFile()) return c;\n }\n }\n\n return null;\n}\n\nfunction extractImportSpecifiers(source: string): string[] {\n const specs: string[] = [];\n\n // import ... from 'x' / export ... from 'x'\n // NOTE: we intentionally ignore \"import type ... from\" because it's type-only.\n const re1 =\n /(?:^|\\n)\\s*(?!import\\s+type)(?:import|export)\\s+[\\s\\S]*?\\sfrom\\s*['\"]([^'\"]+)['\"]/g;\n for (const match of source.matchAll(re1)) {\n if (match[1]) specs.push(match[1]);\n }\n\n // import('x')\n const re2 = /import\\s*\\(\\s*['\"]([^'\"]+)['\"]\\s*\\)/g;\n for (const match of source.matchAll(re2)) {\n if (match[1]) specs.push(match[1]);\n }\n\n // require('x')\n const re3 = /require\\s*\\(\\s*['\"]([^'\"]+)['\"]\\s*\\)/g;\n for (const match of source.matchAll(re3)) {\n if (match[1]) specs.push(match[1]);\n }\n\n return specs;\n}\n\nfunction extractEnvVarUsageFromSource(source: string): {\n runtimeKeys: Set<string>;\n buildtimeKeys: Set<string>;\n} {\n const runtimeKeys = new Set<string>();\n const buildtimeKeys = new Set<string>();\n\n // process.env.KEY / process.env?.KEY\n const reProcessDot = /\\bprocess\\.env\\??\\.([A-Za-z_][A-Za-z0-9_]*)\\b/g;\n for (const match of source.matchAll(reProcessDot)) {\n const key = match[1];\n if (key) runtimeKeys.add(key);\n }\n\n // process.env['KEY'] / process.env[\"KEY\"]\n const reProcessBracket = /\\bprocess\\.env\\[\\s*['\"]([^'\"]+)['\"]\\s*\\]/g;\n for (const match of source.matchAll(reProcessBracket)) {\n const key = match[1];\n if (key) runtimeKeys.add(key);\n }\n\n // import.meta.env.KEY\n const reImportMetaDot = /\\bimport\\.meta\\.env\\.([A-Za-z_][A-Za-z0-9_]*)\\b/g;\n for (const match of source.matchAll(reImportMetaDot)) {\n const key = match[1];\n if (key) buildtimeKeys.add(key);\n }\n\n // import.meta.env['KEY']\n const reImportMetaBracket = /\\bimport\\.meta\\.env\\[\\s*['\"]([^'\"]+)['\"]\\s*\\]/g;\n for (const match of source.matchAll(reImportMetaBracket)) {\n const key = match[1];\n if (key) buildtimeKeys.add(key);\n }\n\n return { runtimeKeys, buildtimeKeys };\n}\n\nasync function collectEnvUsageForWorkers(\n workerEntryFiles: string[],\n projectRoot: string\n): Promise<{ runtimeKeys: Set<string>; buildtimeKeys: Set<string> }> {\n void projectRoot; // reserved for future improvements (tsconfig path aliases, etc.)\n\n const runtimeKeys = new Set<string>();\n const buildtimeKeys = new Set<string>();\n\n const visited = new Set<string>();\n const queue: string[] = [...workerEntryFiles];\n\n while (queue.length > 0) {\n const file = queue.pop()!;\n const normalized = path.resolve(file);\n if (visited.has(normalized)) continue;\n visited.add(normalized);\n\n if (!fs.existsSync(normalized) || !fs.statSync(normalized).isFile()) continue;\n const src = fs.readFileSync(normalized, 'utf-8');\n\n const usage = extractEnvVarUsageFromSource(src);\n usage.runtimeKeys.forEach((k) => runtimeKeys.add(k));\n usage.buildtimeKeys.forEach((k) => buildtimeKeys.add(k));\n\n const specifiers = extractImportSpecifiers(src);\n for (const spec of specifiers) {\n if (!spec) continue;\n if (spec.startsWith('.')) {\n const resolved = tryResolveLocalImport(normalized, spec);\n if (resolved) queue.push(resolved);\n continue;\n }\n\n // Ignore absolute paths and non-node specifiers.\n if (spec.startsWith('/')) continue;\n if (isBuiltinModule(spec)) continue;\n // External packages are ignored; we only scan local files.\n }\n }\n\n runtimeKeys.delete('');\n buildtimeKeys.delete('');\n runtimeKeys.delete('node');\n buildtimeKeys.delete('node');\n\n return { runtimeKeys, buildtimeKeys };\n}\n\n/**\n * Collect callee worker IDs per worker (ctx.dispatchWorker('id', ...) in handler code).\n * Walks from each worker entry file and its local imports, extracts string literal IDs.\n */\nasync function collectCalleeWorkerIds(\n workers: WorkerInfo[],\n projectRoot: string\n): Promise<Map<string, Set<string>>> {\n void projectRoot;\n const calleeIdsByWorker = new Map<string, Set<string>>();\n\n const workerIds = new Set(workers.map((w) => w.id));\n\n for (const worker of workers) {\n const calleeIds = new Set<string>();\n const visited = new Set<string>();\n const queue: string[] = [worker.filePath];\n\n while (queue.length > 0) {\n const file = queue.pop()!;\n const normalized = path.resolve(file);\n if (visited.has(normalized)) continue;\n visited.add(normalized);\n\n if (!fs.existsSync(normalized) || !fs.statSync(normalized).isFile()) continue;\n const src = fs.readFileSync(normalized, 'utf-8');\n\n // ctx.dispatchWorker('id', ...) or ctx.dispatchWorker(\"id\", ...)\n const re = /(?:ctx\\.)?dispatchWorker\\s*\\(\\s*['\"]([^'\"]+)['\"]/g;\n for (const match of src.matchAll(re)) {\n if (match[1]) calleeIds.add(match[1]);\n }\n\n const specifiers = extractImportSpecifiers(src);\n for (const spec of specifiers) {\n if (!spec || !spec.startsWith('.')) continue;\n const resolved = tryResolveLocalImport(normalized, spec);\n if (resolved) queue.push(resolved);\n }\n }\n\n if (calleeIds.size > 0) {\n for (const calleeId of calleeIds) {\n if (!workerIds.has(calleeId)) {\n console.warn(\n chalk.yellow(\n `⚠️ Worker \"${worker.id}\" calls \"${calleeId}\" which is not in scanned workers (typo or other service?). Queue URL will not be auto-injected.`\n )\n );\n }\n }\n calleeIdsByWorker.set(worker.id, calleeIds);\n }\n }\n\n return calleeIdsByWorker;\n}\n\nfunction sanitizeWorkerIdForEnv(workerId: string): string {\n return workerId.replace(/-/g, '_').toUpperCase();\n}\n\nfunction readJsonFile<T = any>(filePath: string): T | null {\n try {\n return JSON.parse(fs.readFileSync(filePath, 'utf-8')) as T;\n } catch {\n return null;\n }\n}\n\nfunction findMonorepoRoot(startDir: string): string {\n let dir = path.resolve(startDir);\n // Walk up until we find a package.json with \"workspaces\" or we hit filesystem root.\n while (true) {\n const pkgPath = path.join(dir, 'package.json');\n if (fs.existsSync(pkgPath)) {\n const pkg = readJsonFile<any>(pkgPath);\n if (pkg?.workspaces) return dir;\n }\n\n const parent = path.dirname(dir);\n if (parent === dir) return startDir; // fallback\n dir = parent;\n }\n}\n\nasync function collectRuntimeDependenciesForWorkers(\n workerEntryFiles: string[],\n projectRoot: string\n): Promise<Set<string>> {\n // Always include these: they're used by generated workers-config / lambda wrapper logic,\n // and are safe to install even if handlers are bundled.\n const deps = new Set<string>(['@microfox/ai-worker', '@aws-sdk/client-sqs']);\n const visited = new Set<string>();\n const queue: string[] = [...workerEntryFiles];\n\n while (queue.length > 0) {\n const file = queue.pop()!;\n const normalized = path.resolve(file);\n if (visited.has(normalized)) continue;\n visited.add(normalized);\n\n if (!fs.existsSync(normalized) || !fs.statSync(normalized).isFile()) continue;\n const src = fs.readFileSync(normalized, 'utf-8');\n const specifiers = extractImportSpecifiers(src);\n\n for (const spec of specifiers) {\n if (!spec) continue;\n if (spec.startsWith('.')) {\n const resolved = tryResolveLocalImport(normalized, spec);\n if (resolved) queue.push(resolved);\n continue;\n }\n\n // Ignore absolute paths and non-node specifiers.\n if (spec.startsWith('/')) continue;\n if (isBuiltinModule(spec)) continue;\n\n deps.add(getPackageNameFromSpecifier(spec));\n }\n }\n\n // Filter out anything that isn't an npm package name\n deps.delete('');\n deps.delete('node');\n\n // Filter devDependencies\n deps.delete('serverless');\n deps.delete('serverless-offline');\n deps.delete('@aws-sdk/client-sqs');\n deps.delete('@microfox/ai-worker')\n return deps;\n}\n\n/** Resolve job store type from env (used for conditional deps). Default: upstash-redis. */\nfunction getJobStoreType(): 'mongodb' | 'upstash-redis' {\n const raw = process.env.WORKER_DATABASE_TYPE?.toLowerCase();\n if (raw === 'mongodb' || raw === 'upstash-redis') return raw;\n return 'upstash-redis';\n}\n\n/**\n * Filter runtime deps so only the chosen job-store backend is included (+ mongodb if user code uses it).\n * - type mongodb: include only mongodb for job store.\n * - type upstash-redis: include only @upstash/redis for job store.\n * - If user code imports mongodb (e.g. worker uses Mongo for its own logic), always add mongodb.\n */\nfunction filterDepsForJobStore(\n runtimeDeps: Set<string>,\n jobStoreType: 'mongodb' | 'upstash-redis'\n): Set<string> {\n const filtered = new Set(runtimeDeps);\n filtered.delete('mongodb');\n filtered.delete('@upstash/redis');\n if (jobStoreType === 'mongodb') filtered.add('mongodb');\n else filtered.add('@upstash/redis');\n if (runtimeDeps.has('mongodb')) filtered.add('mongodb');\n return filtered;\n}\n\nfunction buildDependenciesMap(projectRoot: string, deps: Set<string>): Record<string, string> {\n const projectPkg =\n readJsonFile<any>(path.join(projectRoot, 'package.json')) || {};\n const projectDeps: Record<string, string> = projectPkg.dependencies || {};\n const projectDevDeps: Record<string, string> = projectPkg.devDependencies || {};\n\n // Try to also source versions from workspace packages (ai-worker / ai-worker-cli)\n const repoRoot = findMonorepoRoot(projectRoot);\n const workerPkg =\n readJsonFile<any>(path.join(repoRoot, 'packages', 'ai-worker', 'package.json')) ||\n {};\n const workerCliPkg =\n readJsonFile<any>(\n path.join(repoRoot, 'packages', 'ai-worker-cli', 'package.json')\n ) || {};\n\n const workspaceDeps: Record<string, string> = {\n ...(workerPkg.dependencies || {}),\n ...(workerPkg.devDependencies || {}),\n ...(workerCliPkg.dependencies || {}),\n ...(workerCliPkg.devDependencies || {}),\n };\n\n const out: Record<string, string> = {};\n for (const dep of Array.from(deps).sort()) {\n const range =\n projectDeps[dep] ||\n projectDevDeps[dep] ||\n workspaceDeps[dep];\n // Only add deps that the project or workspace already declares (e.g. in package.json).\n // Skip subpath imports like @tokenlens/helpers that are not real packages and not in package.json.\n if (range) {\n out[dep] = String(range);\n }\n }\n\n return out;\n}\n\ninterface QueueStepInfo {\n workerId: string;\n delaySeconds?: number;\n mapInputFromPrev?: string;\n}\n\ninterface QueueInfo {\n id: string;\n filePath: string;\n steps: QueueStepInfo[];\n schedule?: string | { rate: string; enabled?: boolean; input?: Record<string, any> };\n}\n\ninterface WorkerInfo {\n id: string;\n filePath: string;\n // Module path WITHOUT extension and WITHOUT \".handler\" suffix.\n // Example: \"handlers/agents/test/test\"\n handlerPath: string;\n workerConfig?: {\n timeout?: number;\n memorySize?: number;\n layers?: string[];\n schedule?: any; // Schedule config: string, object, or array of either\n sqs?: {\n maxReceiveCount?: number;\n messageRetentionPeriod?: number;\n visibilityTimeout?: number;\n deadLetterMessageRetentionPeriod?: number;\n };\n };\n}\n\ninterface ServerlessConfig {\n service: string;\n custom?: Record<string, any>;\n package: {\n excludeDevDependencies: boolean;\n individually?: boolean;\n patterns: string[];\n };\n provider: {\n name: string;\n runtime: string;\n region: string;\n stage: string;\n versionFunctions?: boolean;\n environment: Record<string, string | Record<string, any>> | string;\n iam: {\n role: {\n statements: Array<{\n Effect: string;\n Action: string[];\n Resource: string | Array<string | Record<string, any>>;\n }>;\n };\n };\n };\n plugins: string[];\n functions: Record<string, any>;\n resources: {\n Resources: Record<string, any>;\n Outputs: Record<string, any>;\n };\n}\n\nexport function getServiceNameFromProjectId(projectId: string): string {\n const cleanedProjectId = projectId.replace(/-/g, '').slice(0, 15);\n return `p-${cleanedProjectId}`;\n}\n\n/**\n * Validates the environment and dependencies.\n */\nfunction validateEnvironment(): void {\n // We no longer strictly require global serverless since we'll install it locally in the temp dir\n // But we do need npm\n try {\n execSync('npm --version', { stdio: 'ignore' });\n } catch (error) {\n console.error(chalk.red('❌ npm is not installed or not in PATH.'));\n process.exit(1);\n }\n}\n\n/**\n * Scans for all *.worker.ts files in app/ai directory.\n */\nasync function scanWorkers(aiPath: string = 'app/ai'): Promise<WorkerInfo[]> {\n const pattern = path.join(aiPath, '**/*.worker.ts').replace(/\\\\/g, '/');\n const files = await glob(pattern);\n\n const workers: WorkerInfo[] = [];\n\n for (const filePath of files) {\n try {\n // Try to dynamically import the worker file to get the actual workerConfig\n // This is more reliable than parsing the file as text\n let workerConfig: WorkerInfo['workerConfig'] | undefined;\n let workerId: string | undefined;\n\n // For now, just extract the ID using regex\n // We'll import the workerConfig from the bundled handlers later\n\n // Fallback to regex parsing if import didn't work\n if (!workerId) {\n const content = fs.readFileSync(filePath, 'utf-8');\n // Match createWorker with optional type parameters: createWorker<...>({ id: '...' })\n // or createWorker({ id: '...' })\n const idMatch = content.match(/createWorker\\s*(?:<[^>]+>)?\\s*\\(\\s*\\{[\\s\\S]*?id:\\s*['\"]([^'\"]+)['\"]/);\n if (!idMatch) {\n console.warn(chalk.yellow(`⚠️ Skipping ${filePath}: No worker ID found`));\n continue;\n }\n workerId = idMatch[1];\n }\n\n // Generate handler path (relative to serverless root)\n // Convert app/ai/agents/my-worker.worker.ts -> handlers/my-worker\n const relativePath = path.relative(aiPath, filePath);\n const handlerDir = path.dirname(relativePath);\n const handlerName = path.basename(relativePath, '.worker.ts');\n const handlerPath = path.join('handlers', handlerDir, `${handlerName}`).replace(/\\\\/g, '/');\n\n workers.push({\n id: workerId,\n filePath,\n handlerPath,\n workerConfig,\n });\n } catch (error) {\n console.error(chalk.red(`❌ Error processing ${filePath}:`), error);\n }\n }\n\n return workers;\n}\n\n/**\n * Scans for *.queue.ts files and parses defineWorkerQueue configs.\n */\nasync function scanQueues(aiPath: string = 'app/ai'): Promise<QueueInfo[]> {\n const base = aiPath.replace(/\\\\/g, '/');\n const pattern = `${base}/queues/**/*.queue.ts`;\n const files = await glob(pattern);\n\n const queues: QueueInfo[] = [];\n\n for (const filePath of files) {\n try {\n const content = fs.readFileSync(filePath, 'utf-8');\n // Match defineWorkerQueue({ id: '...', steps: [...], schedule?: ... })\n const idMatch = content.match(/defineWorkerQueue\\s*\\(\\s*\\{[\\s\\S]*?id:\\s*['\"]([^'\"]+)['\"]/);\n if (!idMatch) {\n console.warn(chalk.yellow(`⚠️ Skipping ${filePath}: No queue id found in defineWorkerQueue`));\n continue;\n }\n const queueId = idMatch[1];\n\n const steps: QueueStepInfo[] = [];\n const stepsMatch = content.match(/steps:\\s*\\[([\\s\\S]*?)\\]/);\n if (stepsMatch) {\n const stepsStr = stepsMatch[1];\n const stepRegex = /\\{\\s*workerId:\\s*['\"]([^'\"]+)['\"](?:,\\s*delaySeconds:\\s*(\\d+))?(?:,\\s*mapInputFromPrev:\\s*['\"]([^'\"]+)['\"])?\\s*\\}/g;\n let m;\n while ((m = stepRegex.exec(stepsStr)) !== null) {\n steps.push({\n workerId: m[1],\n delaySeconds: m[2] ? parseInt(m[2], 10) : undefined,\n mapInputFromPrev: m[3],\n });\n }\n }\n\n let schedule: QueueInfo['schedule'];\n const scheduleStrMatch = content.match(/schedule:\\s*['\"]([^'\"]+)['\"]/);\n const scheduleObjMatch = content.match(/schedule:\\s*(\\{[^}]+(?:\\{[^}]*\\}[^}]*)*\\})/);\n if (scheduleStrMatch) {\n schedule = scheduleStrMatch[1];\n } else if (scheduleObjMatch) {\n try {\n schedule = new Function('return ' + scheduleObjMatch[1])();\n } catch {\n schedule = undefined;\n }\n }\n\n queues.push({ id: queueId, filePath, steps, schedule });\n } catch (error) {\n console.error(chalk.red(`❌ Error processing ${filePath}:`), error);\n }\n }\n\n return queues;\n}\n\n/**\n * Generates the queue registry module for runtime lookup.\n */\nfunction generateQueueRegistry(queues: QueueInfo[], outputDir: string, projectRoot: string): void {\n const generatedDir = path.join(outputDir, 'generated');\n if (!fs.existsSync(generatedDir)) {\n fs.mkdirSync(generatedDir, { recursive: true });\n }\n\n const registryContent = `/**\n * Auto-generated queue registry. DO NOT EDIT.\n * Generated by @microfox/ai-worker-cli from .queue.ts files.\n */\n\nconst QUEUES = ${JSON.stringify(queues.map((q) => ({ id: q.id, steps: q.steps, schedule: q.schedule })), null, 2)};\n\nexport function getQueueById(queueId) {\n return QUEUES.find((q) => q.id === queueId);\n}\n\nexport function getNextStep(queueId, stepIndex) {\n const queue = getQueueById(queueId);\n if (!queue || !queue.steps || stepIndex < 0 || stepIndex >= queue.steps.length - 1) {\n return undefined;\n }\n const step = queue.steps[stepIndex + 1];\n return step ? { workerId: step.workerId, delaySeconds: step.delaySeconds, mapInputFromPrev: step.mapInputFromPrev } : undefined;\n}\n\nexport function invokeMapInput(_queueId, _stepIndex, prevOutput, _initialInput) {\n return prevOutput;\n}\n`;\n\n const registryPath = path.join(generatedDir, 'workerQueues.registry.js');\n fs.writeFileSync(registryPath, registryContent);\n console.log(chalk.green(`✓ Generated queue registry: ${registryPath}`));\n\n // Note: For dispatchQueue in app (e.g. Vercel), use in-memory registry:\n // app/ai/queues/registry.ts imports from .queue.ts and exports queueRegistry.\n}\n\n/**\n * Returns worker IDs that participate in any queue (for wrapping and callee injection).\n */\nfunction getWorkersInQueues(queues: QueueInfo[]): Set<string> {\n const set = new Set<string>();\n for (const q of queues) {\n for (const step of q.steps) {\n set.add(step.workerId);\n }\n }\n return set;\n}\n\n/**\n * Merges queue next-step worker IDs into calleeIds so WORKER_QUEUE_URL_* gets injected.\n */\nfunction mergeQueueCallees(\n calleeIds: Map<string, Set<string>>,\n queues: QueueInfo[],\n workers: WorkerInfo[]\n): Map<string, Set<string>> {\n const merged = new Map(calleeIds);\n const workerIds = new Set(workers.map((w) => w.id));\n\n for (const queue of queues) {\n for (let i = 0; i < queue.steps.length - 1; i++) {\n const fromWorkerId = queue.steps[i].workerId;\n const toWorkerId = queue.steps[i + 1].workerId;\n if (!workerIds.has(toWorkerId)) continue;\n let callees = merged.get(fromWorkerId);\n if (!callees) {\n callees = new Set<string>();\n merged.set(fromWorkerId, callees);\n }\n callees.add(toWorkerId);\n }\n }\n return merged;\n}\n\n/**\n * Generates Lambda handler entrypoints for each worker.\n */\nasync function generateHandlers(\n workers: WorkerInfo[],\n outputDir: string,\n queues: QueueInfo[] = []\n): Promise<void> {\n const handlersDir = path.join(outputDir, 'handlers');\n const workersInQueues = getWorkersInQueues(queues);\n\n // Ensure handlers directory exists and is clean\n if (fs.existsSync(handlersDir)) {\n fs.rmSync(handlersDir, { recursive: true, force: true });\n }\n fs.mkdirSync(handlersDir, { recursive: true });\n\n for (const worker of workers) {\n // Create directory structure\n // We output JS files now, so change extension in path\n const handlerFile = path.join(handlersDir, worker.handlerPath.replace('handlers/', '') + '.js');\n const handlerDir = path.dirname(handlerFile);\n\n if (!fs.existsSync(handlerDir)) {\n fs.mkdirSync(handlerDir, { recursive: true });\n }\n\n // Generate handler entrypoint\n // Convert app/ai/agents/my-worker.worker.ts to import path\n // We need relative path from .serverless-workers/handlers/agent/ to original source\n // Original: /path/to/project/app/ai/agents/my-worker.worker.ts\n // Handler: /path/to/project/.serverless-workers/handlers/agent/my-worker.handler.ts\n // Import should look like: ../../../app/ai/agents/my-worker.worker\n\n const handlerAbsPath = path.resolve(handlerFile);\n const workerAbsPath = path.resolve(worker.filePath);\n\n // Calculate relative path from handler directory to worker file\n let relativeImportPath = path.relative(path.dirname(handlerAbsPath), workerAbsPath);\n\n // Ensure it starts with ./ or ../\n if (!relativeImportPath.startsWith('.')) {\n relativeImportPath = './' + relativeImportPath;\n }\n\n // Remove extension for import\n relativeImportPath = relativeImportPath.replace(/\\.ts$/, '');\n // Normalize slashes for Windows\n relativeImportPath = relativeImportPath.split(path.sep).join('/');\n\n // Detect export: \"export default createWorker\" vs \"export const X = createWorker\"\n const fileContent = fs.readFileSync(worker.filePath, 'utf-8');\n const defaultExport = /export\\s+default\\s+createWorker/.test(fileContent);\n const exportMatch = fileContent.match(/export\\s+(const|let)\\s+(\\w+)\\s*=\\s*createWorker/);\n const exportName = exportMatch ? exportMatch[2] : 'worker';\n\n // 1. Create a temporary TS entrypoint\n const tempEntryFile = handlerFile.replace('.js', '.temp.ts');\n\n const workerRef = defaultExport\n ? 'workerModule.default'\n : `workerModule.${exportName}`;\n\n const inQueue = workersInQueues.has(worker.id);\n const registryRelPath = path\n .relative(path.dirname(path.resolve(handlerFile)), path.join(outputDir, 'generated', 'workerQueues.registry'))\n .split(path.sep)\n .join('/');\n const registryImportPath = registryRelPath.startsWith('.') ? registryRelPath : './' + registryRelPath;\n\n const handlerCreation = inQueue\n ? `\nimport { createLambdaHandler, wrapHandlerForQueue } from '@microfox/ai-worker/handler';\nimport * as queueRegistry from '${registryImportPath}';\nimport * as workerModule from '${relativeImportPath}';\n\nconst WORKER_LOG_PREFIX = '[WorkerEntrypoint]';\n\nconst workerAgent = ${workerRef};\nif (!workerAgent || typeof workerAgent.handler !== 'function') {\n throw new Error('Worker module must export a createWorker result (default or named) with .handler');\n}\n\nconst queueRuntime = {\n getNextStep: queueRegistry.getNextStep,\n invokeMapInput: queueRegistry.invokeMapInput,\n};\nconst wrappedHandler = wrapHandlerForQueue(workerAgent.handler, queueRuntime);\n\nconst baseHandler = createLambdaHandler(wrappedHandler, workerAgent.outputSchema);\n\nexport const handler = async (event: any, context: any) => {\n const records = Array.isArray((event as any)?.Records) ? (event as any).Records.length : 0;\n try {\n console.log(WORKER_LOG_PREFIX, {\n workerId: workerAgent.id,\n inQueue: true,\n records,\n requestId: (context as any)?.awsRequestId,\n });\n } catch {\n // Best-effort logging only\n }\n return baseHandler(event, context);\n};\n\nexport const exportedWorkerConfig = workerModule.workerConfig || workerAgent?.workerConfig;\n`\n : `\nimport { createLambdaHandler } from '@microfox/ai-worker/handler';\nimport * as workerModule from '${relativeImportPath}';\n\nconst WORKER_LOG_PREFIX = '[WorkerEntrypoint]';\n\nconst workerAgent = ${workerRef};\nif (!workerAgent || typeof workerAgent.handler !== 'function') {\n throw new Error('Worker module must export a createWorker result (default or named) with .handler');\n}\n\nconst baseHandler = createLambdaHandler(workerAgent.handler, workerAgent.outputSchema);\n\nexport const handler = async (event: any, context: any) => {\n const records = Array.isArray((event as any)?.Records) ? (event as any).Records.length : 0;\n try {\n console.log(WORKER_LOG_PREFIX, {\n workerId: workerAgent.id,\n inQueue: false,\n records,\n requestId: (context as any)?.awsRequestId,\n });\n } catch {\n // Best-effort logging only\n }\n return baseHandler(event, context);\n};\n\nexport const exportedWorkerConfig = workerModule.workerConfig || workerAgent?.workerConfig;\n`;\n\n const tempEntryContent = handlerCreation;\n fs.writeFileSync(tempEntryFile, tempEntryContent);\n\n // 2. Bundle using esbuild\n try {\n // Plugin to fix lazy-cache issue where forOwn is not properly added to utils\n // The issue: require_for_own() is called directly instead of through the lazy-cache proxy\n const fixLazyCachePlugin: esbuild.Plugin = {\n name: 'fix-lazy-cache',\n setup(build) {\n build.onEnd(async (result) => {\n if (result.errors.length > 0) return;\n\n // Read the bundled file\n let bundledCode = fs.readFileSync(handlerFile, 'utf-8');\n let modified = false;\n\n // Fix the lazy-cache pattern in clone-deep/utils.js\n // Pattern: require_for_own(); should be require(\"for-own\", \"forOwn\");\n // This ensures forOwn is properly added to the utils object via lazy-cache\n // Match the pattern more flexibly to handle different whitespace\n const pattern = /(require\\(\"kind-of\",\\s*\"typeOf\"\\);\\s*)require_for_own\\(\\);/g;\n\n if (pattern.test(bundledCode)) {\n bundledCode = bundledCode.replace(\n pattern,\n '$1require(\"for-own\", \"forOwn\");'\n );\n modified = true;\n }\n\n // Fix (0, import_node_module.createRequire)(import_meta.url) - esbuild emits import_meta.url\n // which is undefined in CJS Lambda. Polyfill so createRequire gets a valid file URL.\n if (bundledCode.includes('import_meta.url')) {\n bundledCode = bundledCode.replace(\n /import_meta\\.url/g,\n 'require(\"url\").pathToFileURL(__filename).href'\n );\n modified = true;\n }\n\n // Fix createRequire(undefined) / createRequire(void 0) if any dependency emits that\n const beforeCreateRequire = bundledCode;\n bundledCode = bundledCode.replace(\n /\\bcreateRequire\\s*\\(\\s*(?:undefined|void\\s*0)\\s*\\)/g,\n 'createRequire(require(\"url\").pathToFileURL(__filename).href)'\n );\n if (bundledCode !== beforeCreateRequire) modified = true;\n\n if (modified) {\n fs.writeFileSync(handlerFile, bundledCode, 'utf-8');\n }\n });\n },\n };\n\n await esbuild.build({\n entryPoints: [tempEntryFile],\n bundle: true,\n platform: 'node',\n target: 'node20',\n format: 'cjs',\n outfile: handlerFile,\n // We exclude aws-sdk as it's included in Lambda runtime\n // We exclude canvas because it's a binary dependency often problematic in bundling\n external: [\n 'aws-sdk',\n 'canvas',\n '@microfox/puppeteer-sls',\n \"@sparticuz/chromium\"\n ],\n // Force lazy-cache to eagerly load modules during bundling\n // This prevents runtime dynamic require() calls that fail in bundled code\n define: {\n 'process.env.UNLAZY': '\"true\"',\n },\n // Force bundling of all packages to avoid runtime module resolution issues\n // This ensures clone-deep, lazy-cache, and all transitive deps are bundled\n packages: 'bundle',\n plugins: [fixLazyCachePlugin],\n logLevel: 'error',\n });\n\n // 3. Cleanup temp file\n fs.unlinkSync(tempEntryFile);\n\n } catch (error) {\n console.error(chalk.red(`Error bundling handler for ${worker.id}:`), error);\n // Don't delete temp file on error for debugging\n }\n }\n console.log(chalk.green(`✓ Generated ${workers.length} bundled handlers`));\n}\n\nfunction generateDocsHandler(outputDir: string, serviceName: string, stage: string, region: string): void {\n const handlerFile = path.join(outputDir, 'handlers', 'docs.js');\n const tempEntryFile = handlerFile.replace('.js', '.temp.ts');\n const handlerDir = path.dirname(handlerFile);\n\n if (!fs.existsSync(handlerDir)) {\n fs.mkdirSync(handlerDir, { recursive: true });\n }\n\n const handlerContent = `/**\n * Auto-generated docs handler for Microfox compatibility\n * DO NOT EDIT - This file is generated by @microfox/ai-worker-cli\n */\n\nimport { APIGatewayProxyEvent, APIGatewayProxyResult } from 'aws-lambda';\n\nexport const handler = async (\n event: APIGatewayProxyEvent\n): Promise<APIGatewayProxyResult> => {\n // Return OpenAPI JSON for Microfox\n const openapi = {\n openapi: '3.0.3',\n info: {\n title: 'AI Worker Service',\n version: '1.0.0',\n description: 'Auto-generated OpenAPI for background workers service',\n },\n servers: [\n {\n url: 'https://{apiId}.execute-api.{region}.amazonaws.com/{stage}',\n variables: {\n apiId: { default: 'REPLACE_ME' },\n region: { default: '${region}' },\n stage: { default: '${stage}' },\n },\n },\n ],\n paths: {\n '/docs.json': {\n get: {\n operationId: 'getDocs',\n summary: 'Get OpenAPI schema',\n responses: {\n '200': {\n description: 'OpenAPI JSON',\n content: {\n 'application/json': {\n schema: { type: 'object' },\n },\n },\n },\n },\n },\n },\n '/workers/config': {\n get: {\n operationId: 'getWorkersConfig',\n summary: 'Get workers config (queue urls map)',\n parameters: [\n {\n name: 'x-workers-config-key',\n in: 'header',\n required: false,\n schema: { type: 'string' },\n description: 'Optional API key header (if configured)',\n },\n ],\n responses: {\n '200': {\n description: 'Workers config map',\n content: {\n 'application/json': {\n schema: {\n type: 'object',\n properties: {\n version: { type: 'string' },\n stage: { type: 'string' },\n region: { type: 'string' },\n workers: { type: 'object' },\n },\n },\n },\n },\n },\n '401': {\n description: 'Unauthorized',\n content: {\n 'application/json': {\n schema: {\n type: 'object',\n properties: { error: { type: 'string' } },\n },\n },\n },\n },\n },\n },\n },\n '/workers/trigger': {\n post: {\n operationId: 'triggerWorker',\n summary: 'Trigger a worker by sending a raw SQS message body',\n parameters: [\n {\n name: 'workerId',\n in: 'query',\n required: false,\n schema: { type: 'string' },\n description: 'Worker ID (can also be provided in JSON body as workerId)',\n },\n {\n name: 'x-workers-trigger-key',\n in: 'header',\n required: false,\n schema: { type: 'string' },\n description: 'Optional API key header (if configured)',\n },\n ],\n requestBody: {\n required: true,\n content: {\n 'application/json': {\n schema: {\n type: 'object',\n properties: {\n workerId: { type: 'string' },\n // Prefer sending the exact SQS message body your worker expects\n body: { type: 'object' },\n messageBody: { type: 'string' },\n },\n additionalProperties: true,\n },\n },\n },\n },\n responses: {\n '200': {\n description: 'Enqueued',\n content: {\n 'application/json': {\n schema: {\n type: 'object',\n properties: {\n ok: { type: 'boolean' },\n workerId: { type: 'string' },\n stage: { type: 'string' },\n queueName: { type: 'string' },\n queueUrl: { type: 'string' },\n messageId: { type: 'string' },\n },\n },\n },\n },\n },\n '400': {\n description: 'Bad request',\n content: {\n 'application/json': {\n schema: {\n type: 'object',\n properties: { error: { type: 'string' } },\n },\n },\n },\n },\n '401': {\n description: 'Unauthorized',\n content: {\n 'application/json': {\n schema: {\n type: 'object',\n properties: { error: { type: 'string' } },\n },\n },\n },\n },\n },\n },\n },\n },\n 'x-service': {\n serviceName: '${serviceName}',\n stage: '${stage}',\n region: '${region}',\n },\n };\n\n return {\n statusCode: 200,\n headers: {\n 'Content-Type': 'application/json',\n 'Access-Control-Allow-Origin': '*',\n },\n body: JSON.stringify(openapi, null, 2),\n };\n};\n`;\n\n fs.writeFileSync(tempEntryFile, handlerContent);\n\n // Bundle it\n esbuild.buildSync({\n entryPoints: [tempEntryFile],\n bundle: true,\n platform: 'node',\n target: 'node20',\n outfile: handlerFile,\n external: [\n 'aws-sdk',\n 'canvas',\n '@microfox/puppeteer-sls',\n \"@sparticuz/chromium\"\n ],\n define: {\n 'process.env.UNLAZY': '\"true\"',\n },\n packages: 'bundle'\n });\n\n fs.unlinkSync(tempEntryFile);\n console.log(chalk.green(`✓ Generated docs.json handler`));\n}\n\nfunction generateTriggerHandler(outputDir: string, serviceName: string): void {\n const handlerFile = path.join(outputDir, 'handlers', 'workers-trigger.js');\n const tempEntryFile = handlerFile.replace('.js', '.temp.ts');\n const handlerDir = path.dirname(handlerFile);\n\n if (!fs.existsSync(handlerDir)) {\n fs.mkdirSync(handlerDir, { recursive: true });\n }\n\n const handlerContent = `/**\n * Auto-generated worker trigger handler\n * DO NOT EDIT - This file is generated by @microfox/ai-worker-cli\n */\n\nimport { APIGatewayProxyEvent, APIGatewayProxyResult } from 'aws-lambda';\nimport { SQSClient, GetQueueUrlCommand, SendMessageCommand } from '@aws-sdk/client-sqs';\n\nconst SERVICE_NAME = ${JSON.stringify(serviceName)};\n\nfunction jsonResponse(statusCode: number, body: any): APIGatewayProxyResult {\n return {\n statusCode,\n headers: {\n 'Content-Type': 'application/json',\n 'Access-Control-Allow-Origin': '*',\n },\n body: JSON.stringify(body),\n };\n}\n\nexport const handler = async (event: APIGatewayProxyEvent): Promise<APIGatewayProxyResult> => {\n // Optional API key\n const apiKey = process.env.WORKERS_TRIGGER_API_KEY;\n if (apiKey) {\n const providedKey = event.headers['x-workers-trigger-key'] || event.headers['X-Workers-Trigger-Key'];\n if (providedKey !== apiKey) {\n return jsonResponse(401, { error: 'Unauthorized' });\n }\n }\n\n const stage =\n (event as any)?.requestContext?.stage ||\n process.env.ENVIRONMENT ||\n process.env.STAGE ||\n 'prod';\n const region = process.env.AWS_REGION || 'us-east-1';\n\n const qsWorkerId = event.queryStringParameters?.workerId;\n\n let parsedBody: any = undefined;\n if (event.body) {\n try {\n parsedBody = JSON.parse(event.body);\n } catch {\n parsedBody = undefined;\n }\n }\n\n const workerId = (parsedBody && parsedBody.workerId) || qsWorkerId;\n if (!workerId || typeof workerId !== 'string') {\n return jsonResponse(400, { error: 'workerId is required (query param workerId or JSON body workerId)' });\n }\n\n // Prefer JSON body fields, otherwise send raw event.body\n let messageBody: string | undefined;\n if (parsedBody && typeof parsedBody.messageBody === 'string') {\n messageBody = parsedBody.messageBody;\n } else if (parsedBody && parsedBody.body !== undefined) {\n messageBody = typeof parsedBody.body === 'string' ? parsedBody.body : JSON.stringify(parsedBody.body);\n } else if (event.body) {\n messageBody = event.body;\n }\n\n if (!messageBody) {\n return jsonResponse(400, { error: 'body/messageBody is required' });\n }\n\n const queueName = \\`\\${SERVICE_NAME}-\\${workerId}-\\${stage}\\`;\n const sqs = new SQSClient({ region });\n\n let queueUrl: string;\n try {\n const urlRes = await sqs.send(new GetQueueUrlCommand({ QueueName: queueName }));\n if (!urlRes.QueueUrl) {\n return jsonResponse(404, { error: 'Queue URL not found', queueName });\n }\n queueUrl = String(urlRes.QueueUrl);\n } catch (e: any) {\n return jsonResponse(404, { error: 'Queue does not exist or not accessible', queueName, message: String(e?.message || e) });\n }\n\n try {\n const sendRes = await sqs.send(new SendMessageCommand({ QueueUrl: queueUrl, MessageBody: messageBody }));\n return jsonResponse(200, {\n ok: true,\n workerId,\n stage,\n queueName,\n queueUrl,\n messageId: sendRes.MessageId || null,\n });\n } catch (e: any) {\n return jsonResponse(500, { error: 'Failed to send message', message: String(e?.message || e) });\n }\n};\n`;\n\n fs.writeFileSync(tempEntryFile, handlerContent);\n\n esbuild.buildSync({\n entryPoints: [tempEntryFile],\n bundle: true,\n platform: 'node',\n target: 'node20',\n outfile: handlerFile,\n external: [\n 'aws-sdk',\n 'canvas',\n '@microfox/puppeteer-sls',\n \"@sparticuz/chromium\"\n ],\n define: {\n 'process.env.UNLAZY': '\"true\"',\n },\n packages: 'bundle',\n logLevel: 'error',\n });\n\n fs.unlinkSync(tempEntryFile);\n console.log(chalk.green(`✓ Generated /workers/trigger handler`));\n}\n\n/**\n * Generates queue-starter Lambda for a scheduled queue.\n * When the schedule fires, it sends the first worker a message with __workerQueue context.\n */\nfunction generateQueueStarterHandler(\n outputDir: string,\n queue: QueueInfo,\n serviceName: string\n): void {\n const safeId = queue.id.replace(/[^a-zA-Z0-9]/g, '');\n const handlerFile = path.join(outputDir, 'handlers', `queue-starter-${safeId}.js`);\n const tempEntryFile = handlerFile.replace('.js', '.temp.ts');\n const handlerDir = path.dirname(handlerFile);\n\n if (!fs.existsSync(handlerDir)) {\n fs.mkdirSync(handlerDir, { recursive: true });\n }\n\n const firstWorkerId = queue.steps[0]?.workerId;\n if (!firstWorkerId) return;\n\n const handlerContent = `/**\n * Auto-generated queue-starter for queue \"${queue.id}\"\n * DO NOT EDIT - This file is generated by @microfox/ai-worker-cli\n */\n\nimport { ScheduledHandler } from 'aws-lambda';\nimport { SQSClient, GetQueueUrlCommand, SendMessageCommand } from '@aws-sdk/client-sqs';\n\nconst QUEUE_ID = ${JSON.stringify(queue.id)};\nconst FIRST_WORKER_ID = ${JSON.stringify(firstWorkerId)};\nconst SERVICE_NAME = ${JSON.stringify(serviceName)};\n\nexport const handler: ScheduledHandler = async () => {\n const stage = process.env.ENVIRONMENT || process.env.STAGE || 'prod';\n const region = process.env.AWS_REGION || 'us-east-1';\n const queueName = \\`\\${SERVICE_NAME}-\\${FIRST_WORKER_ID}-\\${stage}\\`;\n\n const sqs = new SQSClient({ region });\n const { QueueUrl } = await sqs.send(new GetQueueUrlCommand({ QueueName: queueName }));\n if (!QueueUrl) {\n throw new Error('Queue URL not found: ' + queueName);\n }\n\n const jobId = 'job-' + Date.now() + '-' + Math.random().toString(36).slice(2, 11);\n const initialInput = {};\n const messageBody = {\n workerId: FIRST_WORKER_ID,\n jobId,\n input: {\n ...initialInput,\n __workerQueue: { id: QUEUE_ID, stepIndex: 0, initialInput },\n },\n context: {},\n metadata: { __workerQueue: { id: QUEUE_ID, stepIndex: 0, initialInput } },\n timestamp: new Date().toISOString(),\n };\n\n await sqs.send(new SendMessageCommand({\n QueueUrl,\n MessageBody: JSON.stringify(messageBody),\n }));\n\n console.log('[queue-starter] Dispatched first worker for queue:', { queueId: QUEUE_ID, jobId, workerId: FIRST_WORKER_ID });\n};\n`;\n\n fs.writeFileSync(tempEntryFile, handlerContent);\n esbuild.buildSync({\n entryPoints: [tempEntryFile],\n bundle: true,\n platform: 'node',\n target: 'node20',\n outfile: handlerFile,\n external: ['aws-sdk', 'canvas', '@microfox/puppeteer-sls', '@sparticuz/chromium'],\n packages: 'bundle',\n logLevel: 'error',\n });\n fs.unlinkSync(tempEntryFile);\n console.log(chalk.green(`✓ Generated queue-starter for ${queue.id}`));\n}\n\n/**\n * Generates workers-config Lambda handler.\n */\nfunction generateWorkersConfigHandler(\n outputDir: string,\n workers: WorkerInfo[],\n serviceName: string,\n queues: QueueInfo[] = []\n): void {\n // We'll bundle this one too\n const handlerFile = path.join(outputDir, 'handlers', 'workers-config.js');\n const tempEntryFile = handlerFile.replace('.js', '.temp.ts');\n const handlerDir = path.dirname(handlerFile);\n\n // Ensure handlers directory exists and is clean for config handler\n if (fs.existsSync(handlerDir) && !fs.existsSync(handlerFile)) {\n // Don't wipe if we already cleaned it in generateHandlers, unless it's a diff dir\n } else if (!fs.existsSync(handlerDir)) {\n fs.mkdirSync(handlerDir, { recursive: true });\n }\n\n const handlerContent = `/**\n * Auto-generated workers-config Lambda handler\n * DO NOT EDIT - This file is generated by @microfox/ai-worker-cli\n */\n\nimport { APIGatewayProxyEvent, APIGatewayProxyResult } from 'aws-lambda';\nimport { SQSClient, GetQueueUrlCommand } from '@aws-sdk/client-sqs';\n\n// Worker IDs and queue definitions embedded at build time.\nconst WORKER_IDS: string[] = ${JSON.stringify(workers.map(w => w.id), null, 2)};\nconst QUEUES = ${JSON.stringify(queues.map(q => ({ id: q.id, steps: q.steps, schedule: q.schedule })), null, 2)};\nconst SERVICE_NAME = ${JSON.stringify(serviceName)};\n\nexport const handler = async (\n event: APIGatewayProxyEvent\n): Promise<APIGatewayProxyResult> => {\n// ... same logic ...\n // Check API key if configured\n const apiKey = process.env.WORKERS_CONFIG_API_KEY;\n if (apiKey) {\n const providedKey = event.headers['x-workers-config-key'] || event.headers['X-Workers-Config-Key'];\n if (providedKey !== apiKey) {\n return {\n statusCode: 401,\n headers: { 'Content-Type': 'application/json' },\n body: JSON.stringify({ error: 'Unauthorized' }),\n };\n }\n }\n\n // Stage resolution:\n // - Prefer API Gateway stage (microfox tends to deploy APIs on \"prod\")\n // - Fallback to ENVIRONMENT/STAGE env vars\n // - Default to \"prod\" (safer for microfox) if nothing else is set\n const stage =\n (event as any)?.requestContext?.stage ||\n process.env.ENVIRONMENT ||\n process.env.STAGE ||\n 'prod';\n const region = process.env.AWS_REGION || 'us-east-1';\n\n // Resolve queue URLs dynamically via SQS so we return actual URLs.\n // NOTE: Node 20 Lambda runtime does NOT guarantee 'aws-sdk' v2 is available.\n // We use AWS SDK v3 and bundle it into this handler.\n const sqs = new SQSClient({ region });\n const workers: Record<string, { queueUrl: string; region: string }> = {};\n const attemptedQueueNames: string[] = [];\n const errors: Array<{ workerId: string; queueName: string; message: string; name?: string }> = [];\n const debug = event.queryStringParameters?.debug === '1' || event.queryStringParameters?.debug === 'true';\n\n await Promise.all(\n WORKER_IDS.map(async (workerId) => {\n const queueName = \\`\\${SERVICE_NAME}-\\${workerId}-\\${stage}\\`;\n attemptedQueueNames.push(queueName);\n try {\n const result = await sqs.send(new GetQueueUrlCommand({ QueueName: queueName }));\n if (result?.QueueUrl) {\n workers[workerId] = { queueUrl: String(result.QueueUrl), region };\n }\n } catch (e) {\n const err = e as any;\n const message = String(err?.message || err || 'Unknown error');\n const name = err?.name ? String(err.name) : undefined;\n // Log so CloudWatch shows what's going on (nonexistent queue vs permission vs region).\n console.error('[workers-config] getQueueUrl failed', { workerId, queueName, name, message });\n errors.push({ workerId, queueName, name, message });\n }\n })\n );\n\n return {\n statusCode: 200,\n headers: {\n 'Content-Type': 'application/json',\n 'Access-Control-Allow-Origin': '*',\n },\n body: JSON.stringify({\n version: '1.0.0',\n stage,\n region,\n workers,\n queues: QUEUES,\n ...(debug ? { attemptedQueueNames, errors } : {}),\n }),\n };\n};\n`;\n\n fs.writeFileSync(tempEntryFile, handlerContent);\n\n // Bundle it\n esbuild.buildSync({\n entryPoints: [tempEntryFile],\n bundle: true,\n platform: 'node',\n target: 'node20',\n outfile: handlerFile,\n external: [\n 'aws-sdk',\n 'canvas',\n '@microfox/puppeteer-sls',\n \"@sparticuz/chromium\"\n ],\n define: {\n 'process.env.UNLAZY': '\"true\"',\n },\n packages: 'bundle'\n });\n\n fs.unlinkSync(tempEntryFile);\n console.log(chalk.green(`✓ Generated workers-config handler`));\n}\n\n/**\n * Reads environment variables from .env file.\n */\nfunction loadEnvVars(envPath: string = '.env'): Record<string, string> {\n const env: Record<string, string> = {};\n\n if (!fs.existsSync(envPath)) {\n console.warn(chalk.yellow(`⚠️ .env file not found at ${envPath}`));\n return env;\n }\n\n const content = fs.readFileSync(envPath, 'utf-8');\n const lines = content.split('\\n');\n\n for (const line of lines) {\n const trimmed = line.trim();\n if (!trimmed || trimmed.startsWith('#')) continue;\n\n const match = trimmed.match(/^([^=]+)=(.*)$/);\n if (match) {\n const key = match[1].trim();\n const value = match[2].trim().replace(/^[\"']|[\"']$/g, '');\n env[key] = value;\n }\n }\n\n return env;\n}\n\n/**\n * Converts schedule configuration to serverless.yml schedule event format.\n * Supports simple strings, configuration objects, and arrays of both.\n */\nfunction processScheduleEvents(scheduleConfig: any): any[] {\n if (!scheduleConfig) {\n return [];\n }\n\n const events: any[] = [];\n\n // Normalize to array\n const schedules = Array.isArray(scheduleConfig) ? scheduleConfig : [scheduleConfig];\n\n for (const schedule of schedules) {\n // Simple string format: 'rate(2 hours)' or 'cron(0 12 * * ? *)'\n if (typeof schedule === 'string') {\n events.push({\n schedule: schedule,\n });\n continue;\n }\n\n // Full configuration object\n if (typeof schedule === 'object' && schedule !== null) {\n const scheduleEvent: any = { schedule: {} };\n\n // Handle rate - can be string or array of strings\n if (schedule.rate) {\n if (Array.isArray(schedule.rate)) {\n // Multiple rate expressions\n scheduleEvent.schedule.rate = schedule.rate;\n } else {\n // Single rate expression\n scheduleEvent.schedule.rate = schedule.rate;\n }\n } else {\n // If no rate specified but we have a schedule object, skip it\n continue;\n }\n\n // Optional fields\n if (schedule.enabled !== undefined) {\n scheduleEvent.schedule.enabled = schedule.enabled;\n }\n if (schedule.input !== undefined) {\n scheduleEvent.schedule.input = schedule.input;\n }\n if (schedule.inputPath !== undefined) {\n scheduleEvent.schedule.inputPath = schedule.inputPath;\n }\n if (schedule.inputTransformer !== undefined) {\n scheduleEvent.schedule.inputTransformer = schedule.inputTransformer;\n }\n if (schedule.name !== undefined) {\n scheduleEvent.schedule.name = schedule.name;\n }\n if (schedule.description !== undefined) {\n scheduleEvent.schedule.description = schedule.description;\n }\n if (schedule.method !== undefined) {\n scheduleEvent.schedule.method = schedule.method;\n }\n if (schedule.timezone !== undefined) {\n scheduleEvent.schedule.timezone = schedule.timezone;\n }\n\n // If schedule object only has rate (or is minimal), we can simplify it\n // Serverless Framework accepts both { schedule: 'rate(...)' } and { schedule: { rate: 'rate(...)' } }\n if (Object.keys(scheduleEvent.schedule).length === 1 && scheduleEvent.schedule.rate) {\n // Simplify to string format if it's just a single rate\n if (typeof scheduleEvent.schedule.rate === 'string') {\n events.push({\n schedule: scheduleEvent.schedule.rate,\n });\n } else {\n // Keep object format for arrays\n events.push(scheduleEvent);\n }\n } else {\n events.push(scheduleEvent);\n }\n }\n }\n\n return events;\n}\n\n/**\n * Generates serverless.yml configuration.\n */\nfunction generateServerlessConfig(\n workers: WorkerInfo[],\n stage: string,\n region: string,\n envVars: Record<string, string>,\n serviceName: string,\n calleeIds: Map<string, Set<string>> = new Map(),\n queues: QueueInfo[] = []\n): ServerlessConfig {\n // Create SQS queues for each worker\n const resources: ServerlessConfig['resources'] = {\n Resources: {},\n Outputs: {},\n };\n\n const queueArns: Array<string | Record<string, any>> = [];\n\n // Update provider environment to use file(env.json)\n const providerEnvironment: any = {\n STAGE: stage,\n NODE_ENV: stage,\n };\n\n // Custom configuration including serverless-offline\n const customConfig: Record<string, any> = {\n stage: `\\${env:ENVIRONMENT, '${stage}'}`,\n 'serverless-offline': {\n httpPort: 4000,\n lambdaPort: 4002,\n useChildProcesses: true,\n useWorkerThreads: true,\n noCookieValidation: true,\n allowCache: true,\n hideStackTraces: false,\n disableCookieValidation: true,\n noTimeout: true,\n environment: '\\${file(env.json)}',\n }\n };\n\n for (const worker of workers) {\n const queueName = `WorkerQueue${worker.id.replace(/[^a-zA-Z0-9]/g, '')}`;\n const queueLogicalId = `${queueName}${stage}`;\n const dlqLogicalId = `${queueName}DLQ${stage}`;\n\n const sqsCfg = worker.workerConfig?.sqs;\n const retention =\n typeof sqsCfg?.messageRetentionPeriod === 'number'\n ? sqsCfg.messageRetentionPeriod\n : 1209600; // 14 days\n const dlqRetention =\n typeof sqsCfg?.deadLetterMessageRetentionPeriod === 'number'\n ? sqsCfg.deadLetterMessageRetentionPeriod\n : retention;\n const visibilityTimeout =\n typeof sqsCfg?.visibilityTimeout === 'number'\n ? sqsCfg.visibilityTimeout\n : (worker.workerConfig?.timeout || 300) + 60; // Add buffer\n const maxReceiveCountRaw =\n typeof sqsCfg?.maxReceiveCount === 'number' ? sqsCfg.maxReceiveCount : 1;\n // SQS does not support 0; treat <=0 as 1.\n const maxReceiveCount = Math.max(1, Math.floor(maxReceiveCountRaw));\n\n // DLQ (always create so we can support \"no retries\" mode safely)\n resources.Resources[dlqLogicalId] = {\n Type: 'AWS::SQS::Queue',\n Properties: {\n QueueName: `\\${self:service}-${worker.id}-dlq-\\${opt:stage, env:ENVIRONMENT, '${stage}'}`,\n MessageRetentionPeriod: dlqRetention,\n },\n };\n\n resources.Resources[queueLogicalId] = {\n Type: 'AWS::SQS::Queue',\n Properties: {\n // Use ${self:service} to avoid hardcoding service name\n QueueName: `\\${self:service}-${worker.id}-\\${opt:stage, env:ENVIRONMENT, '${stage}'}`,\n VisibilityTimeout: visibilityTimeout,\n MessageRetentionPeriod: retention,\n RedrivePolicy: {\n deadLetterTargetArn: { 'Fn::GetAtt': [dlqLogicalId, 'Arn'] },\n maxReceiveCount,\n },\n },\n };\n\n resources.Outputs[`${queueLogicalId}Url`] = {\n Description: `Queue URL for worker ${worker.id}`,\n Value: { Ref: queueLogicalId },\n Export: {\n Name: `\\${self:service}-${worker.id}-queue-url`,\n },\n };\n\n queueArns.push({ 'Fn::GetAtt': [queueLogicalId, 'Arn'] });\n }\n\n // Create functions for each worker\n const functions: Record<string, any> = {};\n\n for (const worker of workers) {\n const functionName = `worker${worker.id.replace(/[^a-zA-Z0-9]/g, '')}`;\n\n // Start with SQS event (default)\n const events: any[] = [\n {\n sqs: {\n arn: { 'Fn::GetAtt': [`WorkerQueue${worker.id.replace(/[^a-zA-Z0-9]/g, '')}${stage}`, 'Arn'] },\n batchSize: 1,\n },\n },\n ];\n\n // Add schedule events if configured\n if (worker.workerConfig?.schedule) {\n const scheduleEvents = processScheduleEvents(worker.workerConfig.schedule);\n events.push(...scheduleEvents);\n }\n\n functions[functionName] = {\n // IMPORTANT: Keep AWS handler string to exactly one dot: \"<modulePath>.handler\"\n handler: `${worker.handlerPath}.handler`,\n timeout: worker.workerConfig?.timeout || 300,\n memorySize: worker.workerConfig?.memorySize || 512,\n events,\n };\n\n if (worker.workerConfig?.layers?.length) {\n functions[functionName].layers = worker.workerConfig.layers;\n }\n\n // Per-function env: queue URLs for workers this Lambda calls (ctx.dispatchWorker)\n const callees = calleeIds.get(worker.id);\n if (callees && callees.size > 0) {\n const env: Record<string, any> = {};\n for (const calleeId of callees) {\n const calleeWorker = workers.find((w) => w.id === calleeId);\n if (calleeWorker) {\n const queueLogicalId = `WorkerQueue${calleeWorker.id.replace(/[^a-zA-Z0-9]/g, '')}${stage}`;\n const envKey = `WORKER_QUEUE_URL_${sanitizeWorkerIdForEnv(calleeId)}`;\n env[envKey] = { Ref: queueLogicalId };\n }\n }\n if (Object.keys(env).length > 0) {\n functions[functionName].environment = env;\n }\n }\n }\n\n // Add docs.json function for Microfox compatibility\n functions['getDocs'] = {\n handler: 'handlers/docs.handler',\n events: [\n {\n http: {\n path: '/docs.json',\n method: 'GET',\n cors: true,\n },\n },\n ],\n };\n\n // Add workers trigger endpoint (HTTP -> SQS SendMessage)\n functions['triggerWorker'] = {\n handler: 'handlers/workers-trigger.handler',\n events: [\n {\n http: {\n path: '/workers/trigger',\n method: 'POST',\n cors: true,\n },\n },\n ],\n };\n\n // Add workers-config function\n functions['workersConfig'] = {\n handler: 'handlers/workers-config.handler',\n events: [\n {\n http: {\n path: 'workers/config',\n method: 'GET',\n cors: true,\n },\n },\n ],\n };\n\n // Add queue-starter functions for scheduled queues\n for (const queue of queues) {\n if (queue.schedule) {\n const safeId = queue.id.replace(/[^a-zA-Z0-9]/g, '');\n const fnName = `queueStarter${safeId}`;\n const scheduleEvents = processScheduleEvents(queue.schedule);\n functions[fnName] = {\n handler: `handlers/queue-starter-${safeId}.handler`,\n timeout: 60,\n memorySize: 128,\n events: scheduleEvents,\n };\n }\n }\n\n // Filter env vars - only include safe ones (exclude secrets that should be in AWS Secrets Manager)\n const safeEnvVars: Record<string, string> = {};\n const allowedPrefixes = ['OPENAI_', 'ANTHROPIC_', 'DATABASE_', 'MONGODB_', 'REDIS_', 'UPSTASH_', 'WORKER_', 'WORKERS_', 'WORKFLOW_', 'REMOTION_', 'QUEUE_JOB_', 'DEBUG_WORKER_QUEUES'];\n\n // AWS_ prefix is reserved by Lambda, do not include it in environment variables\n // https://docs.aws.amazon.com/lambda/latest/dg/configuration-envvars.html\n\n for (const [key, value] of Object.entries(envVars)) {\n if (allowedPrefixes.some(prefix => key.startsWith(prefix))) {\n safeEnvVars[key] = value;\n }\n }\n\n // Add ApiEndpoints output for Microfox\n resources.Outputs['ApiEndpoints'] = {\n Description: \"API Endpoints\",\n Value: {\n \"Fn::Join\": [\n \"\",\n [\n \"API: https://\",\n { \"Ref\": \"ApiGatewayRestApi\" },\n \".execute-api.\",\n { \"Ref\": \"AWS::Region\" },\n `.amazonaws.com/\\${env:ENVIRONMENT, '${stage}'}`\n ]\n ]\n }\n };\n\n return {\n service: serviceName,\n package: {\n excludeDevDependencies: true,\n individually: true,\n // Handlers are fully bundled by esbuild (packages: 'bundle'); exclude node_modules to stay under Lambda 250 MB limit\n patterns: [\n '!venv/**',\n '!.idea/**',\n '!.vscode/**',\n '!src/**',\n '!node_modules/**',\n '!node_modules/serverless-offline/**',\n '!node_modules/typescript/**',\n '!node_modules/@types/**',\n '!node_modules/aws-sdk/**',\n '!node_modules/@aws-sdk/**'\n ],\n },\n custom: customConfig,\n provider: {\n name: 'aws',\n runtime: 'nodejs20.x',\n region,\n versionFunctions: false,\n // Use ENVIRONMENT from env.json to drive the actual deployed stage (Microfox defaults to prod).\n stage: `\\${env:ENVIRONMENT, '${stage}'}`,\n environment: '\\${file(env.json)}',\n iam: {\n role: {\n statements: [\n {\n Effect: 'Allow',\n Action: [\n 'sqs:SendMessage',\n 'sqs:ReceiveMessage',\n 'sqs:DeleteMessage',\n 'sqs:GetQueueAttributes',\n ],\n Resource: queueArns,\n },\n {\n Effect: 'Allow',\n Action: ['sqs:GetQueueUrl'],\n // GetQueueUrl is not resource-scoped for unknown queue ARNs, must be '*'\n Resource: '*',\n }\n ],\n },\n },\n },\n plugins: ['serverless-offline'],\n functions,\n resources,\n };\n}\n\n/**\n * Resolves queue URLs after deployment and generates workers-map.generated.ts\n */\nasync function generateWorkersMap(\n stage: string,\n region: string,\n outputDir: string\n): Promise<void> {\n const serverlessDir = path.join(outputDir, '.serverless');\n if (!fs.existsSync(serverlessDir)) {\n fs.mkdirSync(serverlessDir, { recursive: true });\n }\n\n // Need to scan workers again to get IDs for map generation\n // Or we could save this metadata in the build step.\n // For now, re-scanning is fine.\n const workers = await scanWorkers();\n\n // Try to read CloudFormation outputs\n const stackName = `ai-router-workers-${stage}-${stage}`;\n let queueUrls: Record<string, { queueUrl: string; region: string }> = {};\n\n const spinner = ora('Fetching CloudFormation outputs...').start();\n\n try {\n // Use AWS CLI to get stack outputs\n const output = execSync(\n `aws cloudformation describe-stacks --stack-name ${stackName} --region ${region} --query \"Stacks[0].Outputs\" --output json`,\n { encoding: 'utf-8', stdio: 'pipe' }\n );\n\n const outputs = JSON.parse(output);\n const outputMap: Record<string, string> = {};\n\n for (const output of outputs) {\n const key = output.OutputKey;\n if (key && key.endsWith('Url')) {\n const workerId = key.replace('WorkerQueue', '').replace('Url', '').toLowerCase();\n // The workerId from CF output might have stripped characters, need fuzzy match or consistent naming\n // Currently we use replace(/[^a-zA-Z0-9]/g, '') in CF output name\n outputMap[key] = output.OutputValue;\n }\n }\n\n // Match workers to queue URLs\n for (const worker of workers) {\n const sanitizedId = worker.id.replace(/[^a-zA-Z0-9]/g, '');\n const queueKey = `WorkerQueue${sanitizedId}${stage}Url`;\n\n // Look for key ending with this pattern to handle casing issues if any\n const matchingKey = Object.keys(outputMap).find(k => k.toLowerCase() === queueKey.toLowerCase());\n\n if (matchingKey && outputMap[matchingKey]) {\n queueUrls[worker.id] = {\n queueUrl: outputMap[matchingKey],\n region,\n };\n }\n }\n spinner.succeed('Fetched CloudFormation outputs');\n } catch (error) {\n spinner.warn('Could not fetch CloudFormation outputs. Using deterministic queue URLs.');\n for (const worker of workers) {\n queueUrls[worker.id] = {\n queueUrl: `https://sqs.${'${aws:region}'}.amazonaws.com/${'${aws:accountId}'}/${'${self:service}'}-${worker.id}-${stage}`,\n region,\n };\n }\n }\n\n // Generate TypeScript file\n const mapContent = `/**\n * Auto-generated workers map\n * DO NOT EDIT - This file is generated by deploy-workers script\n */\n\nexport const workersMap = ${JSON.stringify(queueUrls, null, 2)} as const;\n`;\n\n const mapFile = path.join(serverlessDir, 'workers-map.generated.ts');\n fs.writeFileSync(mapFile, mapContent);\n console.log(chalk.green(`✓ Generated workers map: ${mapFile}`));\n}\n\nasync function build(args: any) {\n const stage = args.stage || process.env.STAGE || 'prod';\n const region = args.region || process.env.AWS_REGION || 'us-east-1';\n const aiPath = args['ai-path'] || 'app/ai';\n\n console.log(chalk.blue(`📦 Building workers (stage: ${stage}, region: ${region})...`));\n\n const spinner = ora('Scanning workers...').start();\n const workers = await scanWorkers(aiPath);\n\n if (workers.length === 0) {\n spinner.warn('No workers found.');\n return;\n }\n spinner.succeed(`Found ${workers.length} worker(s)`);\n workers.forEach(w => console.log(chalk.gray(` - ${w.id} (${w.filePath})`)));\n\n const serverlessDir = path.join(process.cwd(), '.serverless-workers');\n if (!fs.existsSync(serverlessDir)) {\n fs.mkdirSync(serverlessDir, { recursive: true });\n }\n\n // Build an accurate dependencies map for Microfox installs:\n // include any npm packages imported by the worker entrypoints (and their local imports),\n // plus runtime packages used by generated handlers.\n // Job store backend is conditional on WORKER_DATABASE_TYPE; include only that backend (+ mongodb if user code uses it).\n const runtimeDeps = await collectRuntimeDependenciesForWorkers(\n workers.map((w) => w.filePath),\n process.cwd()\n );\n const jobStoreType = getJobStoreType();\n const filteredDeps = filterDepsForJobStore(runtimeDeps, jobStoreType);\n const dependencies = buildDependenciesMap(process.cwd(), filteredDeps);\n\n // Generate package.json for the serverless service (used by Microfox push)\n const packageJson = {\n name: 'ai-router-workers',\n version: '1.0.0',\n description: 'Auto-generated serverless workers',\n private: true,\n dependencies,\n scripts: {\n build: \"echo 'Already compiled.'\",\n },\n devDependencies: {\n serverless: '^3.38.0',\n 'serverless-offline': '^13.3.3',\n '@aws-sdk/client-sqs': '^3.700.0',\n },\n };\n fs.writeFileSync(\n path.join(serverlessDir, 'package.json'),\n JSON.stringify(packageJson, null, 2)\n );\n\n // No tsconfig.json needed as we are deploying bundled JS\n\n const envVars = loadEnvVars();\n\n // Detect env usage from worker entry files + their local dependency graph.\n // We use this to populate env.json with only envs that are actually referenced,\n // but ONLY if they exist in .env (we don't invent values).\n const workerEntryFiles = workers.map((w) => w.filePath);\n const { runtimeKeys: runtimeEnvKeys, buildtimeKeys: buildtimeEnvKeys } =\n await collectEnvUsageForWorkers(workerEntryFiles, process.cwd());\n const referencedEnvKeys = new Set<string>([\n ...Array.from(runtimeEnvKeys),\n ...Array.from(buildtimeEnvKeys),\n ]);\n\n // Light, helpful logging (avoid noisy huge dumps)\n const runtimeList = Array.from(runtimeEnvKeys).sort();\n const buildtimeList = Array.from(buildtimeEnvKeys).sort();\n const missingFromDotEnv = Array.from(referencedEnvKeys)\n .filter((k) => !(k in envVars))\n .sort();\n if (runtimeList.length || buildtimeList.length) {\n console.log(\n chalk.blue(\n `ℹ️ Detected env usage from worker code: runtime=${runtimeList.length}, buildtime=${buildtimeList.length}`\n )\n );\n if (missingFromDotEnv.length > 0) {\n console.log(\n chalk.yellow(\n `⚠️ These referenced envs were not found in .env (so they will NOT be written to env.json): ${missingFromDotEnv\n .slice(0, 25)\n .join(', ')}${missingFromDotEnv.length > 25 ? ' ...' : ''}`\n )\n );\n }\n }\n\n let serviceName = (args['service-name'] as string | undefined)?.trim() || `ai-router-workers-${stage}`;\n\n // Check for microfox.json to customize service name\n const microfoxJsonPath = path.join(process.cwd(), 'microfox.json');\n if (fs.existsSync(microfoxJsonPath)) {\n try {\n const microfoxConfig = JSON.parse(fs.readFileSync(microfoxJsonPath, 'utf-8'));\n if (microfoxConfig.projectId) {\n // Only override if user did not explicitly provide a service name\n if (!(args['service-name'] as string | undefined)?.trim()) {\n serviceName = getServiceNameFromProjectId(microfoxConfig.projectId);\n }\n console.log(chalk.blue(`ℹ️ Using service name from microfox.json: ${serviceName}`));\n }\n } catch (error) {\n console.warn(chalk.yellow('⚠️ Failed to parse microfox.json, using default service name'));\n }\n }\n\n const queues = await scanQueues(aiPath);\n if (queues.length > 0) {\n console.log(chalk.blue(`ℹ️ Found ${queues.length} queue(s): ${queues.map((q) => q.id).join(', ')}`));\n generateQueueRegistry(queues, serverlessDir, process.cwd());\n }\n\n ora('Generating handlers...').start().succeed('Generated handlers');\n await generateHandlers(workers, serverlessDir, queues);\n\n // Now import the bundled handlers to extract workerConfig\n const extractSpinner = ora('Extracting worker configs from bundled handlers...').start();\n for (const worker of workers) {\n try {\n const handlerFile = path.join(serverlessDir, worker.handlerPath + '.js');\n if (fs.existsSync(handlerFile)) {\n // Convert absolute path to file:// URL for ESM import (required on Windows)\n const handlerUrl = pathToFileURL(path.resolve(handlerFile)).href;\n\n try {\n // Import the bundled handler (which exports exportedWorkerConfig)\n // Note: The handler might have runtime errors, but we only need the exportedWorkerConfig\n const module = await import(handlerUrl);\n\n // exportedWorkerConfig is exported directly from the handler file\n if (module.exportedWorkerConfig) {\n worker.workerConfig = module.exportedWorkerConfig;\n if (module.exportedWorkerConfig.layers?.length) {\n console.log(chalk.gray(` ✓ ${worker.id}: found ${module.exportedWorkerConfig.layers.length} layer(s)`));\n }\n } else {\n console.warn(chalk.yellow(` ⚠ ${worker.id}: exportedWorkerConfig not found in handler`));\n }\n } catch (importError: any) {\n // If import fails due to runtime errors (e.g., lazy-cache initialization in bundled code),\n // try to extract config from source file as fallback. This is expected for some bundled handlers.\n // The fallback will work fine, and the Lambda runtime will handle the bundled code correctly.\n console.log(chalk.gray(` ℹ ${worker.id}: extracting config from source (import failed: ${importError?.message?.slice(0, 50) || 'runtime error'}...)`));\n\n // Fallback: try to read the source worker file and extract workerConfig\n try {\n const sourceContent = fs.readFileSync(worker.filePath, 'utf-8');\n // Look for exported workerConfig\n const workerConfigMatch = sourceContent.match(/export\\s+const\\s+workerConfig[^=]*=\\s*(\\{[\\s\\S]*?\\});/);\n if (workerConfigMatch) {\n // Try to parse it as JSON (after cleaning up comments)\n let configStr = workerConfigMatch[1]\n .replace(/\\/\\*[\\s\\S]*?\\*\\//g, '') // Remove block comments\n .replace(/(^|\\s)\\/\\/[^\\n]*/gm, '$1'); // Remove line comments\n\n // Use Function constructor to parse the object (safer than eval)\n const configObj = new Function('return ' + configStr)();\n if (configObj && (configObj.layers || configObj.timeout || configObj.memorySize || configObj.schedule)) {\n worker.workerConfig = configObj;\n if (configObj.layers?.length) {\n console.log(chalk.gray(` ✓ ${worker.id}: found ${configObj.layers.length} layer(s) from source file`));\n }\n if (configObj.schedule) {\n console.log(chalk.gray(` ✓ ${worker.id}: found schedule configuration`));\n }\n }\n }\n } catch (fallbackError) {\n // If fallback also fails, just log and continue\n console.warn(chalk.yellow(` ⚠ ${worker.id}: fallback extraction also failed, using defaults`));\n }\n }\n } else {\n console.warn(chalk.yellow(` ⚠ ${worker.id}: handler file not found: ${handlerFile}`));\n }\n } catch (error: any) {\n // If everything fails, workerConfig will remain undefined (fallback to defaults)\n console.warn(chalk.yellow(` ⚠ ${worker.id}: failed to extract config: ${error?.message || error}`));\n }\n }\n extractSpinner.succeed('Extracted configs');\n\n generateWorkersConfigHandler(serverlessDir, workers, serviceName, queues);\n generateDocsHandler(serverlessDir, serviceName, stage, region);\n generateTriggerHandler(serverlessDir, serviceName);\n\n for (const queue of queues) {\n if (queue.schedule) {\n generateQueueStarterHandler(serverlessDir, queue, serviceName);\n }\n }\n\n let calleeIds = await collectCalleeWorkerIds(workers, process.cwd());\n calleeIds = mergeQueueCallees(calleeIds, queues, workers);\n const config = generateServerlessConfig(workers, stage, region, envVars, serviceName, calleeIds, queues);\n\n // Always generate env.json now as serverless.yml relies on it.\n // Microfox deploys APIs on prod by default; when microfox.json exists, default ENVIRONMENT/STAGE to \"prod\".\n const envStage = fs.existsSync(microfoxJsonPath) ? 'prod' : stage;\n const safeEnvVars: Record<string, string> = {\n ENVIRONMENT: envStage,\n STAGE: envStage,\n NODE_ENV: envStage,\n };\n const allowedPrefixes = ['OPENAI_', 'ANTHROPIC_', 'DATABASE_', 'MONGODB_', 'REDIS_', 'UPSTASH_', 'WORKER_', 'WORKERS_', 'WORKFLOW_', 'REMOTION_', 'QUEUE_JOB_', 'DEBUG_WORKER_QUEUES'];\n\n for (const [key, value] of Object.entries(envVars)) {\n // AWS_ prefix is reserved by Lambda, do not include it in environment variables\n // https://docs.aws.amazon.com/lambda/latest/dg/configuration-envvars.html\n if (key.startsWith('AWS_')) continue;\n\n // Keep legacy behavior for known-safe prefixes,\n // and also include any env that is referenced by worker code.\n if (allowedPrefixes.some((prefix) => key.startsWith(prefix)) || referencedEnvKeys.has(key)) {\n safeEnvVars[key] = value;\n }\n }\n\n fs.writeFileSync(\n path.join(serverlessDir, 'env.json'),\n JSON.stringify(safeEnvVars, null, 2)\n );\n\n const yamlContent = yaml.dump(config, { indent: 2 });\n const yamlPath = path.join(serverlessDir, 'serverless.yml');\n fs.writeFileSync(yamlPath, yamlContent);\n console.log(chalk.green(`✓ Generated serverless.yml: ${yamlPath}`));\n}\n\nasync function deploy(args: any) {\n const stage = args.stage || process.env.STAGE || 'prod';\n const region = args.region || process.env.AWS_REGION || 'us-east-1';\n // Commander passes option names as camelCase (e.g. skipDeploy, skipInstall)\n const skipDeploy = args.skipDeploy ?? args['skip-deploy'] ?? false;\n const skipInstall = args.skipInstall ?? args['skip-install'] ?? false;\n\n if (skipDeploy) {\n console.log(chalk.yellow('⏭️ Skipping deployment (--skip-deploy flag)'));\n return;\n }\n\n const serverlessDir = path.join(process.cwd(), '.serverless-workers');\n const yamlPath = path.join(serverlessDir, 'serverless.yml');\n\n if (!fs.existsSync(yamlPath)) {\n console.error(chalk.red('❌ serverless.yml not found. Run \"build\" first.'));\n process.exit(1);\n }\n\n console.log(chalk.blue(`🚀 Deploying to AWS (stage: ${stage}, region: ${region})...`));\n validateEnvironment();\n\n try {\n // Install dependencies in the serverless directory if node_modules doesn't exist\n // Skip if --skip-install is provided\n if (!skipInstall && !fs.existsSync(path.join(serverlessDir, 'node_modules'))) {\n console.log(chalk.blue('📦 Installing serverless dependencies...'));\n execSync('npm install', {\n cwd: serverlessDir,\n stdio: 'inherit'\n });\n }\n\n // Check for microfox.json in project root\n const microfoxJsonPath = path.join(process.cwd(), 'microfox.json');\n if (fs.existsSync(microfoxJsonPath)) {\n console.log(chalk.blue('ℹ️ Found microfox.json, deploying via Microfox Cloud...'));\n\n // Copy microfox.json to .serverless-workers directory\n fs.copyFileSync(microfoxJsonPath, path.join(serverlessDir, 'microfox.json'));\n\n // Load and filter environment variables\n const envVars = loadEnvVars();\n // env.json is already generated by build()\n\n execSync('npx microfox@latest push', {\n cwd: serverlessDir,\n stdio: 'inherit'\n });\n console.log(chalk.green('✓ Deployment triggered via Microfox!'));\n // We don't generate workers map for Microfox push as it handles its own routing\n return;\n }\n\n execSync('npx serverless deploy', {\n cwd: serverlessDir,\n stdio: 'inherit',\n env: {\n ...process.env,\n STAGE: stage,\n AWS_REGION: region,\n },\n });\n console.log(chalk.green('✓ Deployment complete!'));\n } catch (error) {\n console.error(chalk.red('❌ Deployment failed'));\n process.exit(1);\n }\n\n await generateWorkersMap(stage, region, serverlessDir);\n}\n\nexport const pushCommand = new Command()\n .name('push')\n .description('Build and deploy background workers to AWS')\n .option('-s, --stage <stage>', 'Deployment stage', 'prod')\n .option('-r, --region <region>', 'AWS region', 'us-east-1')\n .option('--ai-path <path>', 'Path to AI directory containing workers', 'app/ai')\n .option('--service-name <name>', 'Override serverless service name (defaults to ai-router-workers-<stage>)')\n .option('--skip-deploy', 'Skip deployment, only build', false)\n .option('--skip-install', 'Skip npm install in serverless directory', false)\n .action(async (options) => {\n await build(options);\n await deploy(options);\n });\n\n","import { Command } from 'commander';\nimport * as fs from 'fs';\nimport * as path from 'path';\nimport chalk from 'chalk';\nimport ora from 'ora';\n\nexport const newCommand = new Command()\n .name('new')\n .description('Scaffold a new background worker file')\n .argument('<id>', 'Worker ID (used as the worker id and filename)')\n .option('--dir <path>', 'Directory for the worker file', 'app/ai/workers')\n .option('--schedule <expression>', 'Optional schedule expression (e.g. \\\"cron(0 3 * * ? *)\\\" or \\\"rate(1 hour)\\\")')\n .option('--timeout <seconds>', 'Lambda timeout in seconds', '300')\n .option('--memory <mb>', 'Lambda memory size in MB', '512')\n .action((id: string, options: { dir?: string; schedule?: string; timeout?: string; memory?: string }) => {\n const spinner = ora('Scaffolding worker...').start();\n try {\n const projectRoot = process.cwd();\n const dir = path.resolve(projectRoot, options.dir || 'app/ai/workers');\n if (!fs.existsSync(dir)) {\n fs.mkdirSync(dir, { recursive: true });\n }\n\n // Derive a file-safe name from id (replace non-word characters with dashes)\n const fileSafeId = id.trim().replace(/[^a-zA-Z0-9_-]+/g, '-');\n const filePath = path.join(dir, `${fileSafeId}.worker.ts`);\n\n if (fs.existsSync(filePath)) {\n spinner.fail(`File already exists: ${path.relative(projectRoot, filePath)}`);\n process.exitCode = 1;\n return;\n }\n\n const timeout = Number(options.timeout || '300') || 300;\n const memorySize = Number(options.memory || '512') || 512;\n const scheduleLine = options.schedule\n ? ` schedule: '${options.schedule}',\\n`\n : '';\n\n const contents = `import { createWorker, type WorkerConfig } from '@microfox/ai-worker';\nimport { z } from 'zod';\nimport type { WorkerHandlerParams } from '@microfox/ai-worker/handler';\n\nconst InputSchema = z.object({\n // TODO: define input fields\n});\n\nconst OutputSchema = z.object({\n // TODO: define output fields\n});\n\ntype Input = z.infer<typeof InputSchema>;\ntype Output = z.infer<typeof OutputSchema>;\n\nexport const workerConfig: WorkerConfig = {\n timeout: ${timeout},\n memorySize: ${memorySize},\n${scheduleLine}};\n\nexport default createWorker<typeof InputSchema, Output>({\n id: '${id}',\n inputSchema: InputSchema,\n outputSchema: OutputSchema,\n async handler({ input, ctx }: WorkerHandlerParams<Input, Output>) {\n const { jobId, workerId, jobStore, dispatchWorker } = ctx;\n console.log('[${id}] start', { jobId, workerId });\n\n await jobStore?.update({ status: 'running' });\n\n // TODO: implement your business logic here\n const result: Output = {} as any;\n\n await jobStore?.update({ status: 'completed', output: result });\n return result;\n },\n});\n`;\n\n fs.writeFileSync(filePath, contents, 'utf-8');\n\n spinner.succeed(\n `Created worker: ${chalk.cyan(path.relative(projectRoot, filePath))}\\n` +\n `Next: run ${chalk.yellow('npx @microfox/ai-worker-cli@latest push')} to build & deploy your workers.`\n );\n } catch (error: any) {\n spinner.fail('Failed to scaffold worker');\n console.error(chalk.red(error?.stack || error?.message || String(error)));\n process.exitCode = 1;\n }\n });\n\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAEA,IAAAA,oBAAwB;;;ACFxB,uBAAwB;AACxB,cAAyB;AACzB,2BAAyB;AACzB,SAAoB;AACpB,WAAsB;AACtB,iBAA8B;AAC9B,oBAA+B;AAC/B,kBAAqB;AACrB,WAAsB;AACtB,mBAAkB;AAClB,iBAAgB;AAEhB,IAAM,gBAAgB,IAAI;AAAA,EACxB,6BAAe,IAAI,CAAC,MAAO,EAAE,WAAW,OAAO,IAAI,EAAE,MAAM,QAAQ,MAAM,IAAI,CAAE;AACjF;AAEA,SAAS,gBAAgB,WAA4B;AACnD,QAAM,IAAI,UAAU,WAAW,OAAO,IAClC,UAAU,MAAM,QAAQ,MAAM,IAC9B;AACJ,SAAO,cAAc,IAAI,CAAC;AAC5B;AAEA,SAAS,4BAA4B,WAA2B;AAE9D,MAAI,UAAU,WAAW,GAAG,GAAG;AAC7B,UAAM,CAAC,OAAO,IAAI,IAAI,UAAU,MAAM,GAAG;AACzC,WAAO,OAAO,GAAG,KAAK,IAAI,IAAI,KAAK;AAAA,EACrC;AAEA,SAAO,UAAU,MAAM,GAAG,EAAE,CAAC;AAC/B;AAEA,SAAS,sBAAsB,UAAkB,WAAkC;AACjF,QAAM,UAAe,aAAQ,QAAQ;AACrC,QAAM,MAAW,aAAQ,SAAS,SAAS;AAG3C,QAAM,aAAa;AAAA,IACjB;AAAA,IACA,GAAG,GAAG;AAAA,IACN,GAAG,GAAG;AAAA,IACN,GAAG,GAAG;AAAA,IACN,GAAG,GAAG;AAAA,IACN,GAAG,GAAG;AAAA,EACR;AACA,aAAW,KAAK,YAAY;AAC1B,QAAO,cAAW,CAAC,KAAQ,YAAS,CAAC,EAAE,OAAO,EAAG,QAAO;AAAA,EAC1D;AAGA,MAAO,cAAW,GAAG,KAAQ,YAAS,GAAG,EAAE,YAAY,GAAG;AACxD,UAAM,gBAAgB;AAAA,MACf,UAAK,KAAK,UAAU;AAAA,MACpB,UAAK,KAAK,WAAW;AAAA,MACrB,UAAK,KAAK,UAAU;AAAA,MACpB,UAAK,KAAK,WAAW;AAAA,MACrB,UAAK,KAAK,WAAW;AAAA,IAC5B;AACA,eAAW,KAAK,eAAe;AAC7B,UAAO,cAAW,CAAC,KAAQ,YAAS,CAAC,EAAE,OAAO,EAAG,QAAO;AAAA,IAC1D;AAAA,EACF;AAEA,SAAO;AACT;AAEA,SAAS,wBAAwB,QAA0B;AACzD,QAAM,QAAkB,CAAC;AAIzB,QAAM,MACJ;AACF,aAAW,SAAS,OAAO,SAAS,GAAG,GAAG;AACxC,QAAI,MAAM,CAAC,EAAG,OAAM,KAAK,MAAM,CAAC,CAAC;AAAA,EACnC;AAGA,QAAM,MAAM;AACZ,aAAW,SAAS,OAAO,SAAS,GAAG,GAAG;AACxC,QAAI,MAAM,CAAC,EAAG,OAAM,KAAK,MAAM,CAAC,CAAC;AAAA,EACnC;AAGA,QAAM,MAAM;AACZ,aAAW,SAAS,OAAO,SAAS,GAAG,GAAG;AACxC,QAAI,MAAM,CAAC,EAAG,OAAM,KAAK,MAAM,CAAC,CAAC;AAAA,EACnC;AAEA,SAAO;AACT;AAEA,SAAS,6BAA6B,QAGpC;AACA,QAAM,cAAc,oBAAI,IAAY;AACpC,QAAM,gBAAgB,oBAAI,IAAY;AAGtC,QAAM,eAAe;AACrB,aAAW,SAAS,OAAO,SAAS,YAAY,GAAG;AACjD,UAAM,MAAM,MAAM,CAAC;AACnB,QAAI,IAAK,aAAY,IAAI,GAAG;AAAA,EAC9B;AAGA,QAAM,mBAAmB;AACzB,aAAW,SAAS,OAAO,SAAS,gBAAgB,GAAG;AACrD,UAAM,MAAM,MAAM,CAAC;AACnB,QAAI,IAAK,aAAY,IAAI,GAAG;AAAA,EAC9B;AAGA,QAAM,kBAAkB;AACxB,aAAW,SAAS,OAAO,SAAS,eAAe,GAAG;AACpD,UAAM,MAAM,MAAM,CAAC;AACnB,QAAI,IAAK,eAAc,IAAI,GAAG;AAAA,EAChC;AAGA,QAAM,sBAAsB;AAC5B,aAAW,SAAS,OAAO,SAAS,mBAAmB,GAAG;AACxD,UAAM,MAAM,MAAM,CAAC;AACnB,QAAI,IAAK,eAAc,IAAI,GAAG;AAAA,EAChC;AAEA,SAAO,EAAE,aAAa,cAAc;AACtC;AAEA,eAAe,0BACb,kBACA,aACmE;AACnE,OAAK;AAEL,QAAM,cAAc,oBAAI,IAAY;AACpC,QAAM,gBAAgB,oBAAI,IAAY;AAEtC,QAAM,UAAU,oBAAI,IAAY;AAChC,QAAM,QAAkB,CAAC,GAAG,gBAAgB;AAE5C,SAAO,MAAM,SAAS,GAAG;AACvB,UAAM,OAAO,MAAM,IAAI;AACvB,UAAM,aAAkB,aAAQ,IAAI;AACpC,QAAI,QAAQ,IAAI,UAAU,EAAG;AAC7B,YAAQ,IAAI,UAAU;AAEtB,QAAI,CAAI,cAAW,UAAU,KAAK,CAAI,YAAS,UAAU,EAAE,OAAO,EAAG;AACrE,UAAM,MAAS,gBAAa,YAAY,OAAO;AAE/C,UAAM,QAAQ,6BAA6B,GAAG;AAC9C,UAAM,YAAY,QAAQ,CAAC,MAAM,YAAY,IAAI,CAAC,CAAC;AACnD,UAAM,cAAc,QAAQ,CAAC,MAAM,cAAc,IAAI,CAAC,CAAC;AAEvD,UAAM,aAAa,wBAAwB,GAAG;AAC9C,eAAW,QAAQ,YAAY;AAC7B,UAAI,CAAC,KAAM;AACX,UAAI,KAAK,WAAW,GAAG,GAAG;AACxB,cAAM,WAAW,sBAAsB,YAAY,IAAI;AACvD,YAAI,SAAU,OAAM,KAAK,QAAQ;AACjC;AAAA,MACF;AAGA,UAAI,KAAK,WAAW,GAAG,EAAG;AAC1B,UAAI,gBAAgB,IAAI,EAAG;AAAA,IAE7B;AAAA,EACF;AAEA,cAAY,OAAO,EAAE;AACrB,gBAAc,OAAO,EAAE;AACvB,cAAY,OAAO,MAAM;AACzB,gBAAc,OAAO,MAAM;AAE3B,SAAO,EAAE,aAAa,cAAc;AACtC;AAMA,eAAe,uBACb,SACA,aACmC;AACnC,OAAK;AACL,QAAM,oBAAoB,oBAAI,IAAyB;AAEvD,QAAM,YAAY,IAAI,IAAI,QAAQ,IAAI,CAAC,MAAM,EAAE,EAAE,CAAC;AAElD,aAAW,UAAU,SAAS;AAC5B,UAAM,YAAY,oBAAI,IAAY;AAClC,UAAM,UAAU,oBAAI,IAAY;AAChC,UAAM,QAAkB,CAAC,OAAO,QAAQ;AAExC,WAAO,MAAM,SAAS,GAAG;AACvB,YAAM,OAAO,MAAM,IAAI;AACvB,YAAM,aAAkB,aAAQ,IAAI;AACpC,UAAI,QAAQ,IAAI,UAAU,EAAG;AAC7B,cAAQ,IAAI,UAAU;AAEtB,UAAI,CAAI,cAAW,UAAU,KAAK,CAAI,YAAS,UAAU,EAAE,OAAO,EAAG;AACrE,YAAM,MAAS,gBAAa,YAAY,OAAO;AAG/C,YAAM,KAAK;AACX,iBAAW,SAAS,IAAI,SAAS,EAAE,GAAG;AACpC,YAAI,MAAM,CAAC,EAAG,WAAU,IAAI,MAAM,CAAC,CAAC;AAAA,MACtC;AAEA,YAAM,aAAa,wBAAwB,GAAG;AAC9C,iBAAW,QAAQ,YAAY;AAC7B,YAAI,CAAC,QAAQ,CAAC,KAAK,WAAW,GAAG,EAAG;AACpC,cAAM,WAAW,sBAAsB,YAAY,IAAI;AACvD,YAAI,SAAU,OAAM,KAAK,QAAQ;AAAA,MACnC;AAAA,IACF;AAEA,QAAI,UAAU,OAAO,GAAG;AACtB,iBAAW,YAAY,WAAW;AAChC,YAAI,CAAC,UAAU,IAAI,QAAQ,GAAG;AAC5B,kBAAQ;AAAA,YACN,aAAAC,QAAM;AAAA,cACJ,yBAAe,OAAO,EAAE,YAAY,QAAQ;AAAA,YAC9C;AAAA,UACF;AAAA,QACF;AAAA,MACF;AACA,wBAAkB,IAAI,OAAO,IAAI,SAAS;AAAA,IAC5C;AAAA,EACF;AAEA,SAAO;AACT;AAEA,SAAS,uBAAuB,UAA0B;AACxD,SAAO,SAAS,QAAQ,MAAM,GAAG,EAAE,YAAY;AACjD;AAEA,SAAS,aAAsB,UAA4B;AACzD,MAAI;AACF,WAAO,KAAK,MAAS,gBAAa,UAAU,OAAO,CAAC;AAAA,EACtD,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAEA,SAAS,iBAAiB,UAA0B;AAClD,MAAI,MAAW,aAAQ,QAAQ;AAE/B,SAAO,MAAM;AACX,UAAM,UAAe,UAAK,KAAK,cAAc;AAC7C,QAAO,cAAW,OAAO,GAAG;AAC1B,YAAM,MAAM,aAAkB,OAAO;AACrC,UAAI,KAAK,WAAY,QAAO;AAAA,IAC9B;AAEA,UAAM,SAAc,aAAQ,GAAG;AAC/B,QAAI,WAAW,IAAK,QAAO;AAC3B,UAAM;AAAA,EACR;AACF;AAEA,eAAe,qCACb,kBACA,aACsB;AAGtB,QAAM,OAAO,oBAAI,IAAY,CAAC,uBAAuB,qBAAqB,CAAC;AAC3E,QAAM,UAAU,oBAAI,IAAY;AAChC,QAAM,QAAkB,CAAC,GAAG,gBAAgB;AAE5C,SAAO,MAAM,SAAS,GAAG;AACvB,UAAM,OAAO,MAAM,IAAI;AACvB,UAAM,aAAkB,aAAQ,IAAI;AACpC,QAAI,QAAQ,IAAI,UAAU,EAAG;AAC7B,YAAQ,IAAI,UAAU;AAEtB,QAAI,CAAI,cAAW,UAAU,KAAK,CAAI,YAAS,UAAU,EAAE,OAAO,EAAG;AACrE,UAAM,MAAS,gBAAa,YAAY,OAAO;AAC/C,UAAM,aAAa,wBAAwB,GAAG;AAE9C,eAAW,QAAQ,YAAY;AAC7B,UAAI,CAAC,KAAM;AACX,UAAI,KAAK,WAAW,GAAG,GAAG;AACxB,cAAM,WAAW,sBAAsB,YAAY,IAAI;AACvD,YAAI,SAAU,OAAM,KAAK,QAAQ;AACjC;AAAA,MACF;AAGA,UAAI,KAAK,WAAW,GAAG,EAAG;AAC1B,UAAI,gBAAgB,IAAI,EAAG;AAE3B,WAAK,IAAI,4BAA4B,IAAI,CAAC;AAAA,IAC5C;AAAA,EACF;AAGA,OAAK,OAAO,EAAE;AACd,OAAK,OAAO,MAAM;AAGlB,OAAK,OAAO,YAAY;AACxB,OAAK,OAAO,oBAAoB;AAChC,OAAK,OAAO,qBAAqB;AACjC,OAAK,OAAO,qBAAqB;AACjC,SAAO;AACT;AAGA,SAAS,kBAA+C;AACtD,QAAM,MAAM,QAAQ,IAAI,sBAAsB,YAAY;AAC1D,MAAI,QAAQ,aAAa,QAAQ,gBAAiB,QAAO;AACzD,SAAO;AACT;AAQA,SAAS,sBACP,aACA,cACa;AACb,QAAM,WAAW,IAAI,IAAI,WAAW;AACpC,WAAS,OAAO,SAAS;AACzB,WAAS,OAAO,gBAAgB;AAChC,MAAI,iBAAiB,UAAW,UAAS,IAAI,SAAS;AAAA,MACjD,UAAS,IAAI,gBAAgB;AAClC,MAAI,YAAY,IAAI,SAAS,EAAG,UAAS,IAAI,SAAS;AACtD,SAAO;AACT;AAEA,SAAS,qBAAqB,aAAqB,MAA2C;AAC5F,QAAM,aACJ,aAAuB,UAAK,aAAa,cAAc,CAAC,KAAK,CAAC;AAChE,QAAM,cAAsC,WAAW,gBAAgB,CAAC;AACxE,QAAM,iBAAyC,WAAW,mBAAmB,CAAC;AAG9E,QAAM,WAAW,iBAAiB,WAAW;AAC7C,QAAM,YACJ,aAAuB,UAAK,UAAU,YAAY,aAAa,cAAc,CAAC,KAC9E,CAAC;AACH,QAAM,eACJ;AAAA,IACO,UAAK,UAAU,YAAY,iBAAiB,cAAc;AAAA,EACjE,KAAK,CAAC;AAER,QAAM,gBAAwC;AAAA,IAC5C,GAAI,UAAU,gBAAgB,CAAC;AAAA,IAC/B,GAAI,UAAU,mBAAmB,CAAC;AAAA,IAClC,GAAI,aAAa,gBAAgB,CAAC;AAAA,IAClC,GAAI,aAAa,mBAAmB,CAAC;AAAA,EACvC;AAEA,QAAM,MAA8B,CAAC;AACrC,aAAW,OAAO,MAAM,KAAK,IAAI,EAAE,KAAK,GAAG;AACzC,UAAM,QACJ,YAAY,GAAG,KACf,eAAe,GAAG,KAClB,cAAc,GAAG;AAGnB,QAAI,OAAO;AACT,UAAI,GAAG,IAAI,OAAO,KAAK;AAAA,IACzB;AAAA,EACF;AAEA,SAAO;AACT;AAoEO,SAAS,4BAA4B,WAA2B;AACrE,QAAM,mBAAmB,UAAU,QAAQ,MAAM,EAAE,EAAE,MAAM,GAAG,EAAE;AAChE,SAAO,KAAK,gBAAgB;AAC9B;AAKA,SAAS,sBAA4B;AAGnC,MAAI;AACF,uCAAS,iBAAiB,EAAE,OAAO,SAAS,CAAC;AAAA,EAC/C,SAAS,OAAO;AACd,YAAQ,MAAM,aAAAA,QAAM,IAAI,6CAAwC,CAAC;AACjE,YAAQ,KAAK,CAAC;AAAA,EAChB;AACF;AAKA,eAAe,YAAY,SAAiB,UAAiC;AAC3E,QAAM,UAAe,UAAK,QAAQ,gBAAgB,EAAE,QAAQ,OAAO,GAAG;AACtE,QAAM,QAAQ,UAAM,kBAAK,OAAO;AAEhC,QAAM,UAAwB,CAAC;AAE/B,aAAW,YAAY,OAAO;AAC5B,QAAI;AAGF,UAAI;AACJ,UAAI;AAMJ,UAAI,CAAC,UAAU;AACb,cAAM,UAAa,gBAAa,UAAU,OAAO;AAGjD,cAAM,UAAU,QAAQ,MAAM,qEAAqE;AACnG,YAAI,CAAC,SAAS;AACZ,kBAAQ,KAAK,aAAAA,QAAM,OAAO,0BAAgB,QAAQ,sBAAsB,CAAC;AACzE;AAAA,QACF;AACA,mBAAW,QAAQ,CAAC;AAAA,MACtB;AAIA,YAAM,eAAoB,cAAS,QAAQ,QAAQ;AACnD,YAAM,aAAkB,aAAQ,YAAY;AAC5C,YAAM,cAAmB,cAAS,cAAc,YAAY;AAC5D,YAAM,cAAmB,UAAK,YAAY,YAAY,GAAG,WAAW,EAAE,EAAE,QAAQ,OAAO,GAAG;AAE1F,cAAQ,KAAK;AAAA,QACX,IAAI;AAAA,QACJ;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AAAA,IACH,SAAS,OAAO;AACd,cAAQ,MAAM,aAAAA,QAAM,IAAI,2BAAsB,QAAQ,GAAG,GAAG,KAAK;AAAA,IACnE;AAAA,EACF;AAEA,SAAO;AACT;AAKA,eAAe,WAAW,SAAiB,UAAgC;AACzE,QAAM,OAAO,OAAO,QAAQ,OAAO,GAAG;AACtC,QAAM,UAAU,GAAG,IAAI;AACvB,QAAM,QAAQ,UAAM,kBAAK,OAAO;AAEhC,QAAM,SAAsB,CAAC;AAE7B,aAAW,YAAY,OAAO;AAC5B,QAAI;AACF,YAAM,UAAa,gBAAa,UAAU,OAAO;AAEjD,YAAM,UAAU,QAAQ,MAAM,2DAA2D;AACzF,UAAI,CAAC,SAAS;AACZ,gBAAQ,KAAK,aAAAA,QAAM,OAAO,0BAAgB,QAAQ,0CAA0C,CAAC;AAC7F;AAAA,MACF;AACA,YAAM,UAAU,QAAQ,CAAC;AAEzB,YAAM,QAAyB,CAAC;AAChC,YAAM,aAAa,QAAQ,MAAM,yBAAyB;AAC1D,UAAI,YAAY;AACd,cAAM,WAAW,WAAW,CAAC;AAC7B,cAAM,YAAY;AAClB,YAAI;AACJ,gBAAQ,IAAI,UAAU,KAAK,QAAQ,OAAO,MAAM;AAC9C,gBAAM,KAAK;AAAA,YACT,UAAU,EAAE,CAAC;AAAA,YACb,cAAc,EAAE,CAAC,IAAI,SAAS,EAAE,CAAC,GAAG,EAAE,IAAI;AAAA,YAC1C,kBAAkB,EAAE,CAAC;AAAA,UACvB,CAAC;AAAA,QACH;AAAA,MACF;AAEA,UAAI;AACJ,YAAM,mBAAmB,QAAQ,MAAM,8BAA8B;AACrE,YAAM,mBAAmB,QAAQ,MAAM,4CAA4C;AACnF,UAAI,kBAAkB;AACpB,mBAAW,iBAAiB,CAAC;AAAA,MAC/B,WAAW,kBAAkB;AAC3B,YAAI;AACF,qBAAW,IAAI,SAAS,YAAY,iBAAiB,CAAC,CAAC,EAAE;AAAA,QAC3D,QAAQ;AACN,qBAAW;AAAA,QACb;AAAA,MACF;AAEA,aAAO,KAAK,EAAE,IAAI,SAAS,UAAU,OAAO,SAAS,CAAC;AAAA,IACxD,SAAS,OAAO;AACd,cAAQ,MAAM,aAAAA,QAAM,IAAI,2BAAsB,QAAQ,GAAG,GAAG,KAAK;AAAA,IACnE;AAAA,EACF;AAEA,SAAO;AACT;AAKA,SAAS,sBAAsB,QAAqB,WAAmB,aAA2B;AAChG,QAAM,eAAoB,UAAK,WAAW,WAAW;AACrD,MAAI,CAAI,cAAW,YAAY,GAAG;AAChC,IAAG,aAAU,cAAc,EAAE,WAAW,KAAK,CAAC;AAAA,EAChD;AAEA,QAAM,kBAAkB;AAAA;AAAA;AAAA;AAAA;AAAA,iBAKT,KAAK,UAAU,OAAO,IAAI,CAAC,OAAO,EAAE,IAAI,EAAE,IAAI,OAAO,EAAE,OAAO,UAAU,EAAE,SAAS,EAAE,GAAG,MAAM,CAAC,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAoB/G,QAAM,eAAoB,UAAK,cAAc,0BAA0B;AACvE,EAAG,iBAAc,cAAc,eAAe;AAC9C,UAAQ,IAAI,aAAAA,QAAM,MAAM,oCAA+B,YAAY,EAAE,CAAC;AAIxE;AAKA,SAAS,mBAAmB,QAAkC;AAC5D,QAAM,MAAM,oBAAI,IAAY;AAC5B,aAAW,KAAK,QAAQ;AACtB,eAAW,QAAQ,EAAE,OAAO;AAC1B,UAAI,IAAI,KAAK,QAAQ;AAAA,IACvB;AAAA,EACF;AACA,SAAO;AACT;AAKA,SAAS,kBACP,WACA,QACA,SAC0B;AAC1B,QAAM,SAAS,IAAI,IAAI,SAAS;AAChC,QAAM,YAAY,IAAI,IAAI,QAAQ,IAAI,CAAC,MAAM,EAAE,EAAE,CAAC;AAElD,aAAW,SAAS,QAAQ;AAC1B,aAAS,IAAI,GAAG,IAAI,MAAM,MAAM,SAAS,GAAG,KAAK;AAC/C,YAAM,eAAe,MAAM,MAAM,CAAC,EAAE;AACpC,YAAM,aAAa,MAAM,MAAM,IAAI,CAAC,EAAE;AACtC,UAAI,CAAC,UAAU,IAAI,UAAU,EAAG;AAChC,UAAI,UAAU,OAAO,IAAI,YAAY;AACrC,UAAI,CAAC,SAAS;AACZ,kBAAU,oBAAI,IAAY;AAC1B,eAAO,IAAI,cAAc,OAAO;AAAA,MAClC;AACA,cAAQ,IAAI,UAAU;AAAA,IACxB;AAAA,EACF;AACA,SAAO;AACT;AAKA,eAAe,iBACb,SACA,WACA,SAAsB,CAAC,GACR;AACf,QAAM,cAAmB,UAAK,WAAW,UAAU;AACnD,QAAM,kBAAkB,mBAAmB,MAAM;AAGjD,MAAO,cAAW,WAAW,GAAG;AAC9B,IAAG,UAAO,aAAa,EAAE,WAAW,MAAM,OAAO,KAAK,CAAC;AAAA,EACzD;AACA,EAAG,aAAU,aAAa,EAAE,WAAW,KAAK,CAAC;AAE7C,aAAW,UAAU,SAAS;AAG5B,UAAM,cAAmB,UAAK,aAAa,OAAO,YAAY,QAAQ,aAAa,EAAE,IAAI,KAAK;AAC9F,UAAM,aAAkB,aAAQ,WAAW;AAE3C,QAAI,CAAI,cAAW,UAAU,GAAG;AAC9B,MAAG,aAAU,YAAY,EAAE,WAAW,KAAK,CAAC;AAAA,IAC9C;AASA,UAAM,iBAAsB,aAAQ,WAAW;AAC/C,UAAM,gBAAqB,aAAQ,OAAO,QAAQ;AAGlD,QAAI,qBAA0B,cAAc,aAAQ,cAAc,GAAG,aAAa;AAGlF,QAAI,CAAC,mBAAmB,WAAW,GAAG,GAAG;AACvC,2BAAqB,OAAO;AAAA,IAC9B;AAGA,yBAAqB,mBAAmB,QAAQ,SAAS,EAAE;AAE3D,yBAAqB,mBAAmB,MAAW,QAAG,EAAE,KAAK,GAAG;AAGhE,UAAM,cAAiB,gBAAa,OAAO,UAAU,OAAO;AAC5D,UAAM,gBAAgB,kCAAkC,KAAK,WAAW;AACxE,UAAM,cAAc,YAAY,MAAM,iDAAiD;AACvF,UAAM,aAAa,cAAc,YAAY,CAAC,IAAI;AAGlD,UAAM,gBAAgB,YAAY,QAAQ,OAAO,UAAU;AAE3D,UAAM,YAAY,gBACd,yBACA,gBAAgB,UAAU;AAE9B,UAAM,UAAU,gBAAgB,IAAI,OAAO,EAAE;AAC7C,UAAM,kBACH,cAAc,aAAa,aAAQ,WAAW,CAAC,GAAQ,UAAK,WAAW,aAAa,uBAAuB,CAAC,EAC5G,MAAW,QAAG,EACd,KAAK,GAAG;AACX,UAAM,qBAAqB,gBAAgB,WAAW,GAAG,IAAI,kBAAkB,OAAO;AAEtF,UAAM,kBAAkB,UACpB;AAAA;AAAA,kCAE0B,kBAAkB;AAAA,iCACnB,kBAAkB;AAAA;AAAA;AAAA;AAAA,sBAI7B,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IA8BvB;AAAA;AAAA,iCAEyB,kBAAkB;AAAA;AAAA;AAAA;AAAA,sBAI7B,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAyB3B,UAAM,mBAAmB;AACzB,IAAG,iBAAc,eAAe,gBAAgB;AAGhD,QAAI;AAGF,YAAM,qBAAqC;AAAA,QACzC,MAAM;AAAA,QACN,MAAMC,QAAO;AACX,UAAAA,OAAM,MAAM,OAAO,WAAW;AAC5B,gBAAI,OAAO,OAAO,SAAS,EAAG;AAG9B,gBAAI,cAAiB,gBAAa,aAAa,OAAO;AACtD,gBAAI,WAAW;AAMf,kBAAM,UAAU;AAEhB,gBAAI,QAAQ,KAAK,WAAW,GAAG;AAC7B,4BAAc,YAAY;AAAA,gBACxB;AAAA,gBACA;AAAA,cACF;AACA,yBAAW;AAAA,YACb;AAIA,gBAAI,YAAY,SAAS,iBAAiB,GAAG;AAC3C,4BAAc,YAAY;AAAA,gBACxB;AAAA,gBACA;AAAA,cACF;AACA,yBAAW;AAAA,YACb;AAGA,kBAAM,sBAAsB;AAC5B,0BAAc,YAAY;AAAA,cACxB;AAAA,cACA;AAAA,YACF;AACA,gBAAI,gBAAgB,oBAAqB,YAAW;AAEpD,gBAAI,UAAU;AACZ,cAAG,iBAAc,aAAa,aAAa,OAAO;AAAA,YACpD;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF;AAEA,YAAc,cAAM;AAAA,QAClB,aAAa,CAAC,aAAa;AAAA,QAC3B,QAAQ;AAAA,QACR,UAAU;AAAA,QACV,QAAQ;AAAA,QACR,QAAQ;AAAA,QACR,SAAS;AAAA;AAAA;AAAA,QAGT,UAAU;AAAA,UACR;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACF;AAAA;AAAA;AAAA,QAGA,QAAQ;AAAA,UACN,sBAAsB;AAAA,QACxB;AAAA;AAAA;AAAA,QAGA,UAAU;AAAA,QACV,SAAS,CAAC,kBAAkB;AAAA,QAC5B,UAAU;AAAA,MACZ,CAAC;AAGD,MAAG,cAAW,aAAa;AAAA,IAE7B,SAAS,OAAO;AACd,cAAQ,MAAM,aAAAD,QAAM,IAAI,8BAA8B,OAAO,EAAE,GAAG,GAAG,KAAK;AAAA,IAE5E;AAAA,EACF;AACA,UAAQ,IAAI,aAAAA,QAAM,MAAM,oBAAe,QAAQ,MAAM,mBAAmB,CAAC;AAC3E;AAEA,SAAS,oBAAoB,WAAmB,aAAqB,OAAe,QAAsB;AACxG,QAAM,cAAmB,UAAK,WAAW,YAAY,SAAS;AAC9D,QAAM,gBAAgB,YAAY,QAAQ,OAAO,UAAU;AAC3D,QAAM,aAAkB,aAAQ,WAAW;AAE3C,MAAI,CAAI,cAAW,UAAU,GAAG;AAC9B,IAAG,aAAU,YAAY,EAAE,WAAW,KAAK,CAAC;AAAA,EAC9C;AAEA,QAAM,iBAAiB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gCAuBO,MAAM;AAAA,+BACP,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sBAoJd,WAAW;AAAA,gBACjB,KAAK;AAAA,iBACJ,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAerB,EAAG,iBAAc,eAAe,cAAc;AAG9C,EAAQ,kBAAU;AAAA,IAChB,aAAa,CAAC,aAAa;AAAA,IAC3B,QAAQ;AAAA,IACR,UAAU;AAAA,IACV,QAAQ;AAAA,IACR,SAAS;AAAA,IACT,UAAU;AAAA,MACR;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,IACA,QAAQ;AAAA,MACN,sBAAsB;AAAA,IACxB;AAAA,IACA,UAAU;AAAA,EACZ,CAAC;AAED,EAAG,cAAW,aAAa;AAC3B,UAAQ,IAAI,aAAAA,QAAM,MAAM,oCAA+B,CAAC;AAC1D;AAEA,SAAS,uBAAuB,WAAmB,aAA2B;AAC5E,QAAM,cAAmB,UAAK,WAAW,YAAY,oBAAoB;AACzE,QAAM,gBAAgB,YAAY,QAAQ,OAAO,UAAU;AAC3D,QAAM,aAAkB,aAAQ,WAAW;AAE3C,MAAI,CAAI,cAAW,UAAU,GAAG;AAC9B,IAAG,aAAU,YAAY,EAAE,WAAW,KAAK,CAAC;AAAA,EAC9C;AAEA,QAAM,iBAAiB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,uBAQF,KAAK,UAAU,WAAW,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AA0FhD,EAAG,iBAAc,eAAe,cAAc;AAE9C,EAAQ,kBAAU;AAAA,IAChB,aAAa,CAAC,aAAa;AAAA,IAC3B,QAAQ;AAAA,IACR,UAAU;AAAA,IACV,QAAQ;AAAA,IACR,SAAS;AAAA,IACT,UAAU;AAAA,MACR;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,IACA,QAAQ;AAAA,MACN,sBAAsB;AAAA,IACxB;AAAA,IACA,UAAU;AAAA,IACV,UAAU;AAAA,EACZ,CAAC;AAED,EAAG,cAAW,aAAa;AAC3B,UAAQ,IAAI,aAAAA,QAAM,MAAM,2CAAsC,CAAC;AACjE;AAMA,SAAS,4BACP,WACA,OACA,aACM;AACN,QAAM,SAAS,MAAM,GAAG,QAAQ,iBAAiB,EAAE;AACnD,QAAM,cAAmB,UAAK,WAAW,YAAY,iBAAiB,MAAM,KAAK;AACjF,QAAM,gBAAgB,YAAY,QAAQ,OAAO,UAAU;AAC3D,QAAM,aAAkB,aAAQ,WAAW;AAE3C,MAAI,CAAI,cAAW,UAAU,GAAG;AAC9B,IAAG,aAAU,YAAY,EAAE,WAAW,KAAK,CAAC;AAAA,EAC9C;AAEA,QAAM,gBAAgB,MAAM,MAAM,CAAC,GAAG;AACtC,MAAI,CAAC,cAAe;AAEpB,QAAM,iBAAiB;AAAA,6CACoB,MAAM,EAAE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,mBAOlC,KAAK,UAAU,MAAM,EAAE,CAAC;AAAA,0BACjB,KAAK,UAAU,aAAa,CAAC;AAAA,uBAChC,KAAK,UAAU,WAAW,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAoChD,EAAG,iBAAc,eAAe,cAAc;AAC9C,EAAQ,kBAAU;AAAA,IAChB,aAAa,CAAC,aAAa;AAAA,IAC3B,QAAQ;AAAA,IACR,UAAU;AAAA,IACV,QAAQ;AAAA,IACR,SAAS;AAAA,IACT,UAAU,CAAC,WAAW,UAAU,2BAA2B,qBAAqB;AAAA,IAChF,UAAU;AAAA,IACV,UAAU;AAAA,EACZ,CAAC;AACD,EAAG,cAAW,aAAa;AAC3B,UAAQ,IAAI,aAAAA,QAAM,MAAM,sCAAiC,MAAM,EAAE,EAAE,CAAC;AACtE;AAKA,SAAS,6BACP,WACA,SACA,aACA,SAAsB,CAAC,GACjB;AAEN,QAAM,cAAmB,UAAK,WAAW,YAAY,mBAAmB;AACxE,QAAM,gBAAgB,YAAY,QAAQ,OAAO,UAAU;AAC3D,QAAM,aAAkB,aAAQ,WAAW;AAG3C,MAAO,cAAW,UAAU,KAAK,CAAI,cAAW,WAAW,GAAG;AAAA,EAE9D,WAAW,CAAI,cAAW,UAAU,GAAG;AACrC,IAAG,aAAU,YAAY,EAAE,WAAW,KAAK,CAAC;AAAA,EAC9C;AAEA,QAAM,iBAAiB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,+BASM,KAAK,UAAU,QAAQ,IAAI,OAAK,EAAE,EAAE,GAAG,MAAM,CAAC,CAAC;AAAA,iBAC7D,KAAK,UAAU,OAAO,IAAI,QAAM,EAAE,IAAI,EAAE,IAAI,OAAO,EAAE,OAAO,UAAU,EAAE,SAAS,EAAE,GAAG,MAAM,CAAC,CAAC;AAAA,uBACxF,KAAK,UAAU,WAAW,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AA6EhD,EAAG,iBAAc,eAAe,cAAc;AAG9C,EAAQ,kBAAU;AAAA,IAChB,aAAa,CAAC,aAAa;AAAA,IAC3B,QAAQ;AAAA,IACR,UAAU;AAAA,IACV,QAAQ;AAAA,IACR,SAAS;AAAA,IACT,UAAU;AAAA,MACR;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,IACA,QAAQ;AAAA,MACN,sBAAsB;AAAA,IACxB;AAAA,IACA,UAAU;AAAA,EACZ,CAAC;AAED,EAAG,cAAW,aAAa;AAC3B,UAAQ,IAAI,aAAAA,QAAM,MAAM,yCAAoC,CAAC;AAC/D;AAKA,SAAS,YAAY,UAAkB,QAAgC;AACrE,QAAM,MAA8B,CAAC;AAErC,MAAI,CAAI,cAAW,OAAO,GAAG;AAC3B,YAAQ,KAAK,aAAAA,QAAM,OAAO,wCAA8B,OAAO,EAAE,CAAC;AAClE,WAAO;AAAA,EACT;AAEA,QAAM,UAAa,gBAAa,SAAS,OAAO;AAChD,QAAM,QAAQ,QAAQ,MAAM,IAAI;AAEhC,aAAW,QAAQ,OAAO;AACxB,UAAM,UAAU,KAAK,KAAK;AAC1B,QAAI,CAAC,WAAW,QAAQ,WAAW,GAAG,EAAG;AAEzC,UAAM,QAAQ,QAAQ,MAAM,gBAAgB;AAC5C,QAAI,OAAO;AACT,YAAM,MAAM,MAAM,CAAC,EAAE,KAAK;AAC1B,YAAM,QAAQ,MAAM,CAAC,EAAE,KAAK,EAAE,QAAQ,gBAAgB,EAAE;AACxD,UAAI,GAAG,IAAI;AAAA,IACb;AAAA,EACF;AAEA,SAAO;AACT;AAMA,SAAS,sBAAsB,gBAA4B;AACzD,MAAI,CAAC,gBAAgB;AACnB,WAAO,CAAC;AAAA,EACV;AAEA,QAAM,SAAgB,CAAC;AAGvB,QAAM,YAAY,MAAM,QAAQ,cAAc,IAAI,iBAAiB,CAAC,cAAc;AAElF,aAAW,YAAY,WAAW;AAEhC,QAAI,OAAO,aAAa,UAAU;AAChC,aAAO,KAAK;AAAA,QACV;AAAA,MACF,CAAC;AACD;AAAA,IACF;AAGA,QAAI,OAAO,aAAa,YAAY,aAAa,MAAM;AACrD,YAAM,gBAAqB,EAAE,UAAU,CAAC,EAAE;AAG1C,UAAI,SAAS,MAAM;AACjB,YAAI,MAAM,QAAQ,SAAS,IAAI,GAAG;AAEhC,wBAAc,SAAS,OAAO,SAAS;AAAA,QACzC,OAAO;AAEL,wBAAc,SAAS,OAAO,SAAS;AAAA,QACzC;AAAA,MACF,OAAO;AAEL;AAAA,MACF;AAGA,UAAI,SAAS,YAAY,QAAW;AAClC,sBAAc,SAAS,UAAU,SAAS;AAAA,MAC5C;AACA,UAAI,SAAS,UAAU,QAAW;AAChC,sBAAc,SAAS,QAAQ,SAAS;AAAA,MAC1C;AACA,UAAI,SAAS,cAAc,QAAW;AACpC,sBAAc,SAAS,YAAY,SAAS;AAAA,MAC9C;AACA,UAAI,SAAS,qBAAqB,QAAW;AAC3C,sBAAc,SAAS,mBAAmB,SAAS;AAAA,MACrD;AACA,UAAI,SAAS,SAAS,QAAW;AAC/B,sBAAc,SAAS,OAAO,SAAS;AAAA,MACzC;AACA,UAAI,SAAS,gBAAgB,QAAW;AACtC,sBAAc,SAAS,cAAc,SAAS;AAAA,MAChD;AACA,UAAI,SAAS,WAAW,QAAW;AACjC,sBAAc,SAAS,SAAS,SAAS;AAAA,MAC3C;AACA,UAAI,SAAS,aAAa,QAAW;AACnC,sBAAc,SAAS,WAAW,SAAS;AAAA,MAC7C;AAIA,UAAI,OAAO,KAAK,cAAc,QAAQ,EAAE,WAAW,KAAK,cAAc,SAAS,MAAM;AAEnF,YAAI,OAAO,cAAc,SAAS,SAAS,UAAU;AACnD,iBAAO,KAAK;AAAA,YACV,UAAU,cAAc,SAAS;AAAA,UACnC,CAAC;AAAA,QACH,OAAO;AAEL,iBAAO,KAAK,aAAa;AAAA,QAC3B;AAAA,MACF,OAAO;AACL,eAAO,KAAK,aAAa;AAAA,MAC3B;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAKA,SAAS,yBACP,SACA,OACA,QACA,SACA,aACA,YAAsC,oBAAI,IAAI,GAC9C,SAAsB,CAAC,GACL;AAElB,QAAM,YAA2C;AAAA,IAC/C,WAAW,CAAC;AAAA,IACZ,SAAS,CAAC;AAAA,EACZ;AAEA,QAAM,YAAiD,CAAC;AAGxD,QAAM,sBAA2B;AAAA,IAC/B,OAAO;AAAA,IACP,UAAU;AAAA,EACZ;AAGA,QAAM,eAAoC;AAAA,IACxC,OAAO,wBAAwB,KAAK;AAAA,IACpC,sBAAsB;AAAA,MACpB,UAAU;AAAA,MACV,YAAY;AAAA,MACZ,mBAAmB;AAAA,MACnB,kBAAkB;AAAA,MAClB,oBAAoB;AAAA,MACpB,YAAY;AAAA,MACZ,iBAAiB;AAAA,MACjB,yBAAyB;AAAA,MACzB,WAAW;AAAA,MACX,aAAa;AAAA,IACf;AAAA,EACF;AAEA,aAAW,UAAU,SAAS;AAC5B,UAAM,YAAY,cAAc,OAAO,GAAG,QAAQ,iBAAiB,EAAE,CAAC;AACtE,UAAM,iBAAiB,GAAG,SAAS,GAAG,KAAK;AAC3C,UAAM,eAAe,GAAG,SAAS,MAAM,KAAK;AAE5C,UAAM,SAAS,OAAO,cAAc;AACpC,UAAM,YACJ,OAAO,QAAQ,2BAA2B,WACtC,OAAO,yBACP;AACN,UAAM,eACJ,OAAO,QAAQ,qCAAqC,WAChD,OAAO,mCACP;AACN,UAAM,oBACJ,OAAO,QAAQ,sBAAsB,WACjC,OAAO,qBACN,OAAO,cAAc,WAAW,OAAO;AAC9C,UAAM,qBACJ,OAAO,QAAQ,oBAAoB,WAAW,OAAO,kBAAkB;AAEzE,UAAM,kBAAkB,KAAK,IAAI,GAAG,KAAK,MAAM,kBAAkB,CAAC;AAGlE,cAAU,UAAU,YAAY,IAAI;AAAA,MAClC,MAAM;AAAA,MACN,YAAY;AAAA,QACV,WAAW,oBAAoB,OAAO,EAAE,wCAAwC,KAAK;AAAA,QACrF,wBAAwB;AAAA,MAC1B;AAAA,IACF;AAEA,cAAU,UAAU,cAAc,IAAI;AAAA,MACpC,MAAM;AAAA,MACN,YAAY;AAAA;AAAA,QAEV,WAAW,oBAAoB,OAAO,EAAE,oCAAoC,KAAK;AAAA,QACjF,mBAAmB;AAAA,QACnB,wBAAwB;AAAA,QACxB,eAAe;AAAA,UACb,qBAAqB,EAAE,cAAc,CAAC,cAAc,KAAK,EAAE;AAAA,UAC3D;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAEA,cAAU,QAAQ,GAAG,cAAc,KAAK,IAAI;AAAA,MAC1C,aAAa,wBAAwB,OAAO,EAAE;AAAA,MAC9C,OAAO,EAAE,KAAK,eAAe;AAAA,MAC7B,QAAQ;AAAA,QACN,MAAM,oBAAoB,OAAO,EAAE;AAAA,MACrC;AAAA,IACF;AAEA,cAAU,KAAK,EAAE,cAAc,CAAC,gBAAgB,KAAK,EAAE,CAAC;AAAA,EAC1D;AAGA,QAAM,YAAiC,CAAC;AAExC,aAAW,UAAU,SAAS;AAC5B,UAAM,eAAe,SAAS,OAAO,GAAG,QAAQ,iBAAiB,EAAE,CAAC;AAGpE,UAAM,SAAgB;AAAA,MACpB;AAAA,QACE,KAAK;AAAA,UACH,KAAK,EAAE,cAAc,CAAC,cAAc,OAAO,GAAG,QAAQ,iBAAiB,EAAE,CAAC,GAAG,KAAK,IAAI,KAAK,EAAE;AAAA,UAC7F,WAAW;AAAA,QACb;AAAA,MACF;AAAA,IACF;AAGA,QAAI,OAAO,cAAc,UAAU;AACjC,YAAM,iBAAiB,sBAAsB,OAAO,aAAa,QAAQ;AACzE,aAAO,KAAK,GAAG,cAAc;AAAA,IAC/B;AAEA,cAAU,YAAY,IAAI;AAAA;AAAA,MAExB,SAAS,GAAG,OAAO,WAAW;AAAA,MAC9B,SAAS,OAAO,cAAc,WAAW;AAAA,MACzC,YAAY,OAAO,cAAc,cAAc;AAAA,MAC/C;AAAA,IACF;AAEA,QAAI,OAAO,cAAc,QAAQ,QAAQ;AACvC,gBAAU,YAAY,EAAE,SAAS,OAAO,aAAa;AAAA,IACvD;AAGA,UAAM,UAAU,UAAU,IAAI,OAAO,EAAE;AACvC,QAAI,WAAW,QAAQ,OAAO,GAAG;AAC/B,YAAM,MAA2B,CAAC;AAClC,iBAAW,YAAY,SAAS;AAC9B,cAAM,eAAe,QAAQ,KAAK,CAAC,MAAM,EAAE,OAAO,QAAQ;AAC1D,YAAI,cAAc;AAChB,gBAAM,iBAAiB,cAAc,aAAa,GAAG,QAAQ,iBAAiB,EAAE,CAAC,GAAG,KAAK;AACzF,gBAAM,SAAS,oBAAoB,uBAAuB,QAAQ,CAAC;AACnE,cAAI,MAAM,IAAI,EAAE,KAAK,eAAe;AAAA,QACtC;AAAA,MACF;AACA,UAAI,OAAO,KAAK,GAAG,EAAE,SAAS,GAAG;AAC/B,kBAAU,YAAY,EAAE,cAAc;AAAA,MACxC;AAAA,IACF;AAAA,EACF;AAGA,YAAU,SAAS,IAAI;AAAA,IACrB,SAAS;AAAA,IACT,QAAQ;AAAA,MACN;AAAA,QACE,MAAM;AAAA,UACJ,MAAM;AAAA,UACN,QAAQ;AAAA,UACR,MAAM;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAGA,YAAU,eAAe,IAAI;AAAA,IAC3B,SAAS;AAAA,IACT,QAAQ;AAAA,MACN;AAAA,QACE,MAAM;AAAA,UACJ,MAAM;AAAA,UACN,QAAQ;AAAA,UACR,MAAM;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAGA,YAAU,eAAe,IAAI;AAAA,IAC3B,SAAS;AAAA,IACT,QAAQ;AAAA,MACN;AAAA,QACE,MAAM;AAAA,UACJ,MAAM;AAAA,UACN,QAAQ;AAAA,UACR,MAAM;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAGA,aAAW,SAAS,QAAQ;AAC1B,QAAI,MAAM,UAAU;AAClB,YAAM,SAAS,MAAM,GAAG,QAAQ,iBAAiB,EAAE;AACnD,YAAM,SAAS,eAAe,MAAM;AACpC,YAAM,iBAAiB,sBAAsB,MAAM,QAAQ;AAC3D,gBAAU,MAAM,IAAI;AAAA,QAClB,SAAS,0BAA0B,MAAM;AAAA,QACzC,SAAS;AAAA,QACT,YAAY;AAAA,QACZ,QAAQ;AAAA,MACV;AAAA,IACF;AAAA,EACF;AAGA,QAAM,cAAsC,CAAC;AAC7C,QAAM,kBAAkB,CAAC,WAAW,cAAc,aAAa,YAAY,UAAU,YAAY,WAAW,YAAY,aAAa,aAAa,cAAc,qBAAqB;AAKrL,aAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,OAAO,GAAG;AAClD,QAAI,gBAAgB,KAAK,YAAU,IAAI,WAAW,MAAM,CAAC,GAAG;AAC1D,kBAAY,GAAG,IAAI;AAAA,IACrB;AAAA,EACF;AAGA,YAAU,QAAQ,cAAc,IAAI;AAAA,IAClC,aAAa;AAAA,IACb,OAAO;AAAA,MACL,YAAY;AAAA,QACV;AAAA,QACA;AAAA,UACE;AAAA,UACA,EAAE,OAAO,oBAAoB;AAAA,UAC7B;AAAA,UACA,EAAE,OAAO,cAAc;AAAA,UACvB,uCAAuC,KAAK;AAAA,QAC9C;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AAAA,IACL,SAAS;AAAA,IACT,SAAS;AAAA,MACP,wBAAwB;AAAA,MACxB,cAAc;AAAA;AAAA,MAEd,UAAU;AAAA,QACR;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAAA,IACF;AAAA,IACA,QAAQ;AAAA,IACR,UAAU;AAAA,MACR,MAAM;AAAA,MACN,SAAS;AAAA,MACT;AAAA,MACA,kBAAkB;AAAA;AAAA,MAElB,OAAO,wBAAwB,KAAK;AAAA,MACpC,aAAa;AAAA,MACb,KAAK;AAAA,QACH,MAAM;AAAA,UACJ,YAAY;AAAA,YACV;AAAA,cACE,QAAQ;AAAA,cACR,QAAQ;AAAA,gBACN;AAAA,gBACA;AAAA,gBACA;AAAA,gBACA;AAAA,cACF;AAAA,cACA,UAAU;AAAA,YACZ;AAAA,YACA;AAAA,cACE,QAAQ;AAAA,cACR,QAAQ,CAAC,iBAAiB;AAAA;AAAA,cAE1B,UAAU;AAAA,YACZ;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,IACA,SAAS,CAAC,oBAAoB;AAAA,IAC9B;AAAA,IACA;AAAA,EACF;AACF;AAKA,eAAe,mBACb,OACA,QACA,WACe;AACf,QAAM,gBAAqB,UAAK,WAAW,aAAa;AACxD,MAAI,CAAI,cAAW,aAAa,GAAG;AACjC,IAAG,aAAU,eAAe,EAAE,WAAW,KAAK,CAAC;AAAA,EACjD;AAKA,QAAM,UAAU,MAAM,YAAY;AAGlC,QAAM,YAAY,qBAAqB,KAAK,IAAI,KAAK;AACrD,MAAI,YAAkE,CAAC;AAEvE,QAAM,cAAU,WAAAE,SAAI,oCAAoC,EAAE,MAAM;AAEhE,MAAI;AAEF,UAAM,aAAS;AAAA,MACb,mDAAmD,SAAS,aAAa,MAAM;AAAA,MAC/E,EAAE,UAAU,SAAS,OAAO,OAAO;AAAA,IACrC;AAEA,UAAM,UAAU,KAAK,MAAM,MAAM;AACjC,UAAM,YAAoC,CAAC;AAE3C,eAAWC,WAAU,SAAS;AAC5B,YAAM,MAAMA,QAAO;AACnB,UAAI,OAAO,IAAI,SAAS,KAAK,GAAG;AAC9B,cAAM,WAAW,IAAI,QAAQ,eAAe,EAAE,EAAE,QAAQ,OAAO,EAAE,EAAE,YAAY;AAG/E,kBAAU,GAAG,IAAIA,QAAO;AAAA,MAC1B;AAAA,IACF;AAGA,eAAW,UAAU,SAAS;AAC5B,YAAM,cAAc,OAAO,GAAG,QAAQ,iBAAiB,EAAE;AACzD,YAAM,WAAW,cAAc,WAAW,GAAG,KAAK;AAGlD,YAAM,cAAc,OAAO,KAAK,SAAS,EAAE,KAAK,OAAK,EAAE,YAAY,MAAM,SAAS,YAAY,CAAC;AAE/F,UAAI,eAAe,UAAU,WAAW,GAAG;AACzC,kBAAU,OAAO,EAAE,IAAI;AAAA,UACrB,UAAU,UAAU,WAAW;AAAA,UAC/B;AAAA,QACF;AAAA,MACF;AAAA,IACF;AACA,YAAQ,QAAQ,gCAAgC;AAAA,EAClD,SAAS,OAAO;AACd,YAAQ,KAAK,yEAAyE;AACtF,eAAW,UAAU,SAAS;AAC5B,gBAAU,OAAO,EAAE,IAAI;AAAA,QACrB,UAAU,eAAe,eAAe,kBAAkB,kBAAkB,IAAI,iBAAiB,IAAI,OAAO,EAAE,IAAI,KAAK;AAAA,QACvH;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAGA,QAAM,aAAa;AAAA;AAAA;AAAA;AAAA;AAAA,4BAKO,KAAK,UAAU,WAAW,MAAM,CAAC,CAAC;AAAA;AAG5D,QAAM,UAAe,UAAK,eAAe,0BAA0B;AACnE,EAAG,iBAAc,SAAS,UAAU;AACpC,UAAQ,IAAI,aAAAH,QAAM,MAAM,iCAA4B,OAAO,EAAE,CAAC;AAChE;AAEA,eAAeC,OAAM,MAAW;AAC9B,QAAM,QAAQ,KAAK,SAAS,QAAQ,IAAI,SAAS;AACjD,QAAM,SAAS,KAAK,UAAU,QAAQ,IAAI,cAAc;AACxD,QAAM,SAAS,KAAK,SAAS,KAAK;AAElC,UAAQ,IAAI,aAAAD,QAAM,KAAK,sCAA+B,KAAK,aAAa,MAAM,MAAM,CAAC;AAErF,QAAM,cAAU,WAAAE,SAAI,qBAAqB,EAAE,MAAM;AACjD,QAAM,UAAU,MAAM,YAAY,MAAM;AAExC,MAAI,QAAQ,WAAW,GAAG;AACxB,YAAQ,KAAK,mBAAmB;AAChC;AAAA,EACF;AACA,UAAQ,QAAQ,SAAS,QAAQ,MAAM,YAAY;AACnD,UAAQ,QAAQ,OAAK,QAAQ,IAAI,aAAAF,QAAM,KAAK,OAAO,EAAE,EAAE,KAAK,EAAE,QAAQ,GAAG,CAAC,CAAC;AAE3E,QAAM,gBAAqB,UAAK,QAAQ,IAAI,GAAG,qBAAqB;AACpE,MAAI,CAAI,cAAW,aAAa,GAAG;AACjC,IAAG,aAAU,eAAe,EAAE,WAAW,KAAK,CAAC;AAAA,EACjD;AAMA,QAAM,cAAc,MAAM;AAAA,IACxB,QAAQ,IAAI,CAAC,MAAM,EAAE,QAAQ;AAAA,IAC7B,QAAQ,IAAI;AAAA,EACd;AACA,QAAM,eAAe,gBAAgB;AACrC,QAAM,eAAe,sBAAsB,aAAa,YAAY;AACpE,QAAM,eAAe,qBAAqB,QAAQ,IAAI,GAAG,YAAY;AAGrE,QAAM,cAAc;AAAA,IAClB,MAAM;AAAA,IACN,SAAS;AAAA,IACT,aAAa;AAAA,IACb,SAAS;AAAA,IACT;AAAA,IACA,SAAS;AAAA,MACP,OAAO;AAAA,IACT;AAAA,IACA,iBAAiB;AAAA,MACf,YAAY;AAAA,MACZ,sBAAsB;AAAA,MACtB,uBAAuB;AAAA,IACzB;AAAA,EACF;AACA,EAAG;AAAA,IACI,UAAK,eAAe,cAAc;AAAA,IACvC,KAAK,UAAU,aAAa,MAAM,CAAC;AAAA,EACrC;AAIA,QAAM,UAAU,YAAY;AAK5B,QAAM,mBAAmB,QAAQ,IAAI,CAAC,MAAM,EAAE,QAAQ;AACtD,QAAM,EAAE,aAAa,gBAAgB,eAAe,iBAAiB,IACnE,MAAM,0BAA0B,kBAAkB,QAAQ,IAAI,CAAC;AACjE,QAAM,oBAAoB,oBAAI,IAAY;AAAA,IACxC,GAAG,MAAM,KAAK,cAAc;AAAA,IAC5B,GAAG,MAAM,KAAK,gBAAgB;AAAA,EAChC,CAAC;AAGD,QAAM,cAAc,MAAM,KAAK,cAAc,EAAE,KAAK;AACpD,QAAM,gBAAgB,MAAM,KAAK,gBAAgB,EAAE,KAAK;AACxD,QAAM,oBAAoB,MAAM,KAAK,iBAAiB,EACnD,OAAO,CAAC,MAAM,EAAE,KAAK,QAAQ,EAC7B,KAAK;AACR,MAAI,YAAY,UAAU,cAAc,QAAQ;AAC9C,YAAQ;AAAA,MACN,aAAAA,QAAM;AAAA,QACJ,8DAAoD,YAAY,MAAM,eAAe,cAAc,MAAM;AAAA,MAC3G;AAAA,IACF;AACA,QAAI,kBAAkB,SAAS,GAAG;AAChC,cAAQ;AAAA,QACN,aAAAA,QAAM;AAAA,UACJ,yGAA+F,kBAC5F,MAAM,GAAG,EAAE,EACX,KAAK,IAAI,CAAC,GAAG,kBAAkB,SAAS,KAAK,SAAS,EAAE;AAAA,QAC7D;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,MAAI,cAAe,KAAK,cAAc,GAA0B,KAAK,KAAK,qBAAqB,KAAK;AAGpG,QAAM,mBAAwB,UAAK,QAAQ,IAAI,GAAG,eAAe;AACjE,MAAO,cAAW,gBAAgB,GAAG;AACnC,QAAI;AACF,YAAM,iBAAiB,KAAK,MAAS,gBAAa,kBAAkB,OAAO,CAAC;AAC5E,UAAI,eAAe,WAAW;AAE5B,YAAI,CAAE,KAAK,cAAc,GAA0B,KAAK,GAAG;AACzD,wBAAc,4BAA4B,eAAe,SAAS;AAAA,QACpE;AACA,gBAAQ,IAAI,aAAAA,QAAM,KAAK,wDAA8C,WAAW,EAAE,CAAC;AAAA,MACrF;AAAA,IACF,SAAS,OAAO;AACd,cAAQ,KAAK,aAAAA,QAAM,OAAO,yEAA+D,CAAC;AAAA,IAC5F;AAAA,EACF;AAEA,QAAM,SAAS,MAAM,WAAW,MAAM;AACtC,MAAI,OAAO,SAAS,GAAG;AACrB,YAAQ,IAAI,aAAAA,QAAM,KAAK,uBAAa,OAAO,MAAM,cAAc,OAAO,IAAI,CAAC,MAAM,EAAE,EAAE,EAAE,KAAK,IAAI,CAAC,EAAE,CAAC;AACpG,0BAAsB,QAAQ,eAAe,QAAQ,IAAI,CAAC;AAAA,EAC5D;AAEA,iBAAAE,SAAI,wBAAwB,EAAE,MAAM,EAAE,QAAQ,oBAAoB;AAClE,QAAM,iBAAiB,SAAS,eAAe,MAAM;AAGrD,QAAM,qBAAiB,WAAAA,SAAI,oDAAoD,EAAE,MAAM;AACvF,aAAW,UAAU,SAAS;AAC5B,QAAI;AACF,YAAM,cAAmB,UAAK,eAAe,OAAO,cAAc,KAAK;AACvE,UAAO,cAAW,WAAW,GAAG;AAE9B,cAAM,iBAAa,0BAAmB,aAAQ,WAAW,CAAC,EAAE;AAE5D,YAAI;AAGF,gBAAME,UAAS,MAAM,OAAO;AAG5B,cAAIA,QAAO,sBAAsB;AAC/B,mBAAO,eAAeA,QAAO;AAC7B,gBAAIA,QAAO,qBAAqB,QAAQ,QAAQ;AAC9C,sBAAQ,IAAI,aAAAJ,QAAM,KAAK,YAAO,OAAO,EAAE,WAAWI,QAAO,qBAAqB,OAAO,MAAM,WAAW,CAAC;AAAA,YACzG;AAAA,UACF,OAAO;AACL,oBAAQ,KAAK,aAAAJ,QAAM,OAAO,YAAO,OAAO,EAAE,6CAA6C,CAAC;AAAA,UAC1F;AAAA,QACF,SAAS,aAAkB;AAIzB,kBAAQ,IAAI,aAAAA,QAAM,KAAK,YAAO,OAAO,EAAE,mDAAmD,aAAa,SAAS,MAAM,GAAG,EAAE,KAAK,eAAe,MAAM,CAAC;AAGtJ,cAAI;AACF,kBAAM,gBAAmB,gBAAa,OAAO,UAAU,OAAO;AAE9D,kBAAM,oBAAoB,cAAc,MAAM,uDAAuD;AACrG,gBAAI,mBAAmB;AAErB,kBAAI,YAAY,kBAAkB,CAAC,EAChC,QAAQ,qBAAqB,EAAE,EAC/B,QAAQ,sBAAsB,IAAI;AAGrC,oBAAM,YAAY,IAAI,SAAS,YAAY,SAAS,EAAE;AACtD,kBAAI,cAAc,UAAU,UAAU,UAAU,WAAW,UAAU,cAAc,UAAU,WAAW;AACtG,uBAAO,eAAe;AACtB,oBAAI,UAAU,QAAQ,QAAQ;AAC5B,0BAAQ,IAAI,aAAAA,QAAM,KAAK,YAAO,OAAO,EAAE,WAAW,UAAU,OAAO,MAAM,4BAA4B,CAAC;AAAA,gBACxG;AACA,oBAAI,UAAU,UAAU;AACtB,0BAAQ,IAAI,aAAAA,QAAM,KAAK,YAAO,OAAO,EAAE,gCAAgC,CAAC;AAAA,gBAC1E;AAAA,cACF;AAAA,YACF;AAAA,UACF,SAAS,eAAe;AAEtB,oBAAQ,KAAK,aAAAA,QAAM,OAAO,YAAO,OAAO,EAAE,mDAAmD,CAAC;AAAA,UAChG;AAAA,QACF;AAAA,MACF,OAAO;AACL,gBAAQ,KAAK,aAAAA,QAAM,OAAO,YAAO,OAAO,EAAE,6BAA6B,WAAW,EAAE,CAAC;AAAA,MACvF;AAAA,IACF,SAAS,OAAY;AAEnB,cAAQ,KAAK,aAAAA,QAAM,OAAO,YAAO,OAAO,EAAE,+BAA+B,OAAO,WAAW,KAAK,EAAE,CAAC;AAAA,IACrG;AAAA,EACF;AACA,iBAAe,QAAQ,mBAAmB;AAE1C,+BAA6B,eAAe,SAAS,aAAa,MAAM;AACxE,sBAAoB,eAAe,aAAa,OAAO,MAAM;AAC7D,yBAAuB,eAAe,WAAW;AAEjD,aAAW,SAAS,QAAQ;AAC1B,QAAI,MAAM,UAAU;AAClB,kCAA4B,eAAe,OAAO,WAAW;AAAA,IAC/D;AAAA,EACF;AAEA,MAAI,YAAY,MAAM,uBAAuB,SAAS,QAAQ,IAAI,CAAC;AACnE,cAAY,kBAAkB,WAAW,QAAQ,OAAO;AACxD,QAAM,SAAS,yBAAyB,SAAS,OAAO,QAAQ,SAAS,aAAa,WAAW,MAAM;AAIvG,QAAM,WAAc,cAAW,gBAAgB,IAAI,SAAS;AAC5D,QAAM,cAAsC;AAAA,IAC1C,aAAa;AAAA,IACb,OAAO;AAAA,IACP,UAAU;AAAA,EACZ;AACA,QAAM,kBAAkB,CAAC,WAAW,cAAc,aAAa,YAAY,UAAU,YAAY,WAAW,YAAY,aAAa,aAAa,cAAc,qBAAqB;AAErL,aAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,OAAO,GAAG;AAGlD,QAAI,IAAI,WAAW,MAAM,EAAG;AAI5B,QAAI,gBAAgB,KAAK,CAAC,WAAW,IAAI,WAAW,MAAM,CAAC,KAAK,kBAAkB,IAAI,GAAG,GAAG;AAC1F,kBAAY,GAAG,IAAI;AAAA,IACrB;AAAA,EACF;AAEA,EAAG;AAAA,IACI,UAAK,eAAe,UAAU;AAAA,IACnC,KAAK,UAAU,aAAa,MAAM,CAAC;AAAA,EACrC;AAEA,QAAM,cAAmB,UAAK,QAAQ,EAAE,QAAQ,EAAE,CAAC;AACnD,QAAM,WAAgB,UAAK,eAAe,gBAAgB;AAC1D,EAAG,iBAAc,UAAU,WAAW;AACtC,UAAQ,IAAI,aAAAA,QAAM,MAAM,oCAA+B,QAAQ,EAAE,CAAC;AACpE;AAEA,eAAe,OAAO,MAAW;AAC/B,QAAM,QAAQ,KAAK,SAAS,QAAQ,IAAI,SAAS;AACjD,QAAM,SAAS,KAAK,UAAU,QAAQ,IAAI,cAAc;AAExD,QAAM,aAAa,KAAK,cAAc,KAAK,aAAa,KAAK;AAC7D,QAAM,cAAc,KAAK,eAAe,KAAK,cAAc,KAAK;AAEhE,MAAI,YAAY;AACd,YAAQ,IAAI,aAAAA,QAAM,OAAO,wDAA8C,CAAC;AACxE;AAAA,EACF;AAEA,QAAM,gBAAqB,UAAK,QAAQ,IAAI,GAAG,qBAAqB;AACpE,QAAM,WAAgB,UAAK,eAAe,gBAAgB;AAE1D,MAAI,CAAI,cAAW,QAAQ,GAAG;AAC5B,YAAQ,MAAM,aAAAA,QAAM,IAAI,qDAAgD,CAAC;AACzE,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,UAAQ,IAAI,aAAAA,QAAM,KAAK,sCAA+B,KAAK,aAAa,MAAM,MAAM,CAAC;AACrF,sBAAoB;AAEpB,MAAI;AAGF,QAAI,CAAC,eAAe,CAAI,cAAgB,UAAK,eAAe,cAAc,CAAC,GAAG;AAC5E,cAAQ,IAAI,aAAAA,QAAM,KAAK,iDAA0C,CAAC;AAClE,yCAAS,eAAe;AAAA,QACtB,KAAK;AAAA,QACL,OAAO;AAAA,MACT,CAAC;AAAA,IACH;AAGA,UAAM,mBAAwB,UAAK,QAAQ,IAAI,GAAG,eAAe;AACjE,QAAO,cAAW,gBAAgB,GAAG;AACnC,cAAQ,IAAI,aAAAA,QAAM,KAAK,oEAA0D,CAAC;AAGlF,MAAG,gBAAa,kBAAuB,UAAK,eAAe,eAAe,CAAC;AAG3E,YAAM,UAAU,YAAY;AAG5B,yCAAS,4BAA4B;AAAA,QACnC,KAAK;AAAA,QACL,OAAO;AAAA,MACT,CAAC;AACD,cAAQ,IAAI,aAAAA,QAAM,MAAM,2CAAsC,CAAC;AAE/D;AAAA,IACF;AAEA,uCAAS,yBAAyB;AAAA,MAChC,KAAK;AAAA,MACL,OAAO;AAAA,MACP,KAAK;AAAA,QACH,GAAG,QAAQ;AAAA,QACX,OAAO;AAAA,QACP,YAAY;AAAA,MACd;AAAA,IACF,CAAC;AACD,YAAQ,IAAI,aAAAA,QAAM,MAAM,6BAAwB,CAAC;AAAA,EACnD,SAAS,OAAO;AACd,YAAQ,MAAM,aAAAA,QAAM,IAAI,0BAAqB,CAAC;AAC9C,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,QAAM,mBAAmB,OAAO,QAAQ,aAAa;AACvD;AAEO,IAAM,cAAc,IAAI,yBAAQ,EACpC,KAAK,MAAM,EACX,YAAY,4CAA4C,EACxD,OAAO,uBAAuB,oBAAoB,MAAM,EACxD,OAAO,yBAAyB,cAAc,WAAW,EACzD,OAAO,oBAAoB,2CAA2C,QAAQ,EAC9E,OAAO,yBAAyB,0EAA0E,EAC1G,OAAO,iBAAiB,+BAA+B,KAAK,EAC5D,OAAO,kBAAkB,4CAA4C,KAAK,EAC1E,OAAO,OAAO,YAAY;AACzB,QAAMC,OAAM,OAAO;AACnB,QAAM,OAAO,OAAO;AACtB,CAAC;;;ACtuEH,IAAAI,oBAAwB;AACxB,IAAAC,MAAoB;AACpB,IAAAC,QAAsB;AACtB,IAAAC,gBAAkB;AAClB,IAAAC,cAAgB;AAET,IAAM,aAAa,IAAI,0BAAQ,EACnC,KAAK,KAAK,EACV,YAAY,uCAAuC,EACnD,SAAS,QAAQ,gDAAgD,EACjE,OAAO,gBAAgB,iCAAiC,gBAAgB,EACxE,OAAO,2BAA2B,2EAA+E,EACjH,OAAO,uBAAuB,6BAA6B,KAAK,EAChE,OAAO,iBAAiB,4BAA4B,KAAK,EACzD,OAAO,CAAC,IAAY,YAAoF;AACvG,QAAM,cAAU,YAAAC,SAAI,uBAAuB,EAAE,MAAM;AACnD,MAAI;AACF,UAAM,cAAc,QAAQ,IAAI;AAChC,UAAM,MAAW,cAAQ,aAAa,QAAQ,OAAO,gBAAgB;AACrE,QAAI,CAAI,eAAW,GAAG,GAAG;AACvB,MAAG,cAAU,KAAK,EAAE,WAAW,KAAK,CAAC;AAAA,IACvC;AAGA,UAAM,aAAa,GAAG,KAAK,EAAE,QAAQ,oBAAoB,GAAG;AAC5D,UAAM,WAAgB,WAAK,KAAK,GAAG,UAAU,YAAY;AAEzD,QAAO,eAAW,QAAQ,GAAG;AAC3B,cAAQ,KAAK,wBAA6B,eAAS,aAAa,QAAQ,CAAC,EAAE;AAC3E,cAAQ,WAAW;AACnB;AAAA,IACF;AAEA,UAAM,UAAU,OAAO,QAAQ,WAAW,KAAK,KAAK;AACpD,UAAM,aAAa,OAAO,QAAQ,UAAU,KAAK,KAAK;AACtD,UAAM,eAAe,QAAQ,WACzB,gBAAgB,QAAQ,QAAQ;AAAA,IAChC;AAEJ,UAAM,WAAW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,aAgBV,OAAO;AAAA,gBACJ,UAAU;AAAA,EACxB,YAAY;AAAA;AAAA;AAAA,SAGL,EAAE;AAAA;AAAA;AAAA;AAAA;AAAA,oBAKS,EAAE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAahB,IAAG,kBAAc,UAAU,UAAU,OAAO;AAE5C,YAAQ;AAAA,MACN,mBAAmB,cAAAC,QAAM,KAAU,eAAS,aAAa,QAAQ,CAAC,CAAC;AAAA,YACpD,cAAAA,QAAM,OAAO,yCAAyC,CAAC;AAAA,IACxE;AAAA,EACF,SAAS,OAAY;AACnB,YAAQ,KAAK,2BAA2B;AACxC,YAAQ,MAAM,cAAAA,QAAM,IAAI,OAAO,SAAS,OAAO,WAAW,OAAO,KAAK,CAAC,CAAC;AACxE,YAAQ,WAAW;AAAA,EACrB;AACF,CAAC;;;AFnFH,IAAM,UAAU,IAAI,0BAAQ;AAE5B,QACG,KAAK,WAAW,EAChB,YAAY,wDAAwD,EACpE,QAAQ,OAAO;AAElB,QAAQ,WAAW,WAAW;AAC9B,QAAQ,WAAW,UAAU;AAE7B,QAAQ,MAAM,QAAQ,IAAI;AAE1B,IAAM,cAAc;","names":["import_commander","chalk","build","ora","output","module","import_commander","fs","path","import_chalk","import_ora","ora","chalk"]}
|