@microfox/ai-worker-cli 1.0.2 → 1.0.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +3287 -32
- package/dist/index.cjs.map +1 -1
- package/dist/index.js +3286 -32
- package/dist/index.js.map +1 -1
- package/package.json +3 -2
package/dist/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/index.ts","../src/commands/push.ts"],"sourcesContent":["#!/usr/bin/env node\n\nimport { Command } from 'commander';\nimport { pushCommand } from './commands/push.js';\n\nconst program = new Command();\n\nprogram\n .name('ai-worker')\n .description('CLI tooling for deploying ai-router background workers')\n .version('0.1.0');\n\nprogram.addCommand(pushCommand);\n\nprogram.parse(process.argv);\n\nconst aiWorkerCli = program;\nexport { aiWorkerCli };\n","import { Command } from 'commander';\nimport * as esbuild from 'esbuild';\nimport { execSync } from 'child_process';\nimport * as fs from 'fs';\nimport * as path from 'path';\nimport { pathToFileURL } from 'url';\nimport { builtinModules } from 'module';\nimport { glob } from 'glob';\nimport * as yaml from 'js-yaml';\nimport chalk from 'chalk';\nimport ora from 'ora';\n\nconst NODE_BUILTINS = new Set(\n builtinModules.map((m) => (m.startsWith('node:') ? m.slice('node:'.length) : m))\n);\n\nfunction isBuiltinModule(specifier: string): boolean {\n const s = specifier.startsWith('node:')\n ? specifier.slice('node:'.length)\n : specifier;\n return NODE_BUILTINS.has(s);\n}\n\nfunction getPackageNameFromSpecifier(specifier: string): string {\n // Scoped packages: @scope/name/...\n if (specifier.startsWith('@')) {\n const [scope, name] = specifier.split('/');\n return name ? `${scope}/${name}` : specifier;\n }\n // Unscoped: name/...\n return specifier.split('/')[0];\n}\n\nfunction tryResolveLocalImport(fromFile: string, specifier: string): string | null {\n const baseDir = path.dirname(fromFile);\n const raw = path.resolve(baseDir, specifier);\n\n // Direct file hits\n const candidates = [\n raw,\n `${raw}.ts`,\n `${raw}.tsx`,\n `${raw}.js`,\n `${raw}.mjs`,\n `${raw}.cjs`,\n ];\n for (const c of candidates) {\n if (fs.existsSync(c) && fs.statSync(c).isFile()) return c;\n }\n\n // Directory index hits\n if (fs.existsSync(raw) && fs.statSync(raw).isDirectory()) {\n const idxCandidates = [\n path.join(raw, 'index.ts'),\n path.join(raw, 'index.tsx'),\n path.join(raw, 'index.js'),\n path.join(raw, 'index.mjs'),\n path.join(raw, 'index.cjs'),\n ];\n for (const c of idxCandidates) {\n if (fs.existsSync(c) && fs.statSync(c).isFile()) return c;\n }\n }\n\n return null;\n}\n\nfunction extractImportSpecifiers(source: string): string[] {\n const specs: string[] = [];\n\n // import ... from 'x' / export ... from 'x'\n // NOTE: we intentionally ignore \"import type ... from\" because it's type-only.\n const re1 =\n /(?:^|\\n)\\s*(?!import\\s+type)(?:import|export)\\s+[\\s\\S]*?\\sfrom\\s*['\"]([^'\"]+)['\"]/g;\n for (const match of source.matchAll(re1)) {\n if (match[1]) specs.push(match[1]);\n }\n\n // import('x')\n const re2 = /import\\s*\\(\\s*['\"]([^'\"]+)['\"]\\s*\\)/g;\n for (const match of source.matchAll(re2)) {\n if (match[1]) specs.push(match[1]);\n }\n\n // require('x')\n const re3 = /require\\s*\\(\\s*['\"]([^'\"]+)['\"]\\s*\\)/g;\n for (const match of source.matchAll(re3)) {\n if (match[1]) specs.push(match[1]);\n }\n\n return specs;\n}\n\nfunction extractEnvVarUsageFromSource(source: string): {\n runtimeKeys: Set<string>;\n buildtimeKeys: Set<string>;\n} {\n const runtimeKeys = new Set<string>();\n const buildtimeKeys = new Set<string>();\n\n // process.env.KEY / process.env?.KEY\n const reProcessDot = /\\bprocess\\.env\\??\\.([A-Za-z_][A-Za-z0-9_]*)\\b/g;\n for (const match of source.matchAll(reProcessDot)) {\n const key = match[1];\n if (key) runtimeKeys.add(key);\n }\n\n // process.env['KEY'] / process.env[\"KEY\"]\n const reProcessBracket = /\\bprocess\\.env\\[\\s*['\"]([^'\"]+)['\"]\\s*\\]/g;\n for (const match of source.matchAll(reProcessBracket)) {\n const key = match[1];\n if (key) runtimeKeys.add(key);\n }\n\n // import.meta.env.KEY\n const reImportMetaDot = /\\bimport\\.meta\\.env\\.([A-Za-z_][A-Za-z0-9_]*)\\b/g;\n for (const match of source.matchAll(reImportMetaDot)) {\n const key = match[1];\n if (key) buildtimeKeys.add(key);\n }\n\n // import.meta.env['KEY']\n const reImportMetaBracket = /\\bimport\\.meta\\.env\\[\\s*['\"]([^'\"]+)['\"]\\s*\\]/g;\n for (const match of source.matchAll(reImportMetaBracket)) {\n const key = match[1];\n if (key) buildtimeKeys.add(key);\n }\n\n return { runtimeKeys, buildtimeKeys };\n}\n\nasync function collectEnvUsageForWorkers(\n workerEntryFiles: string[],\n projectRoot: string\n): Promise<{ runtimeKeys: Set<string>; buildtimeKeys: Set<string> }> {\n void projectRoot; // reserved for future improvements (tsconfig path aliases, etc.)\n\n const runtimeKeys = new Set<string>();\n const buildtimeKeys = new Set<string>();\n\n const visited = new Set<string>();\n const queue: string[] = [...workerEntryFiles];\n\n while (queue.length > 0) {\n const file = queue.pop()!;\n const normalized = path.resolve(file);\n if (visited.has(normalized)) continue;\n visited.add(normalized);\n\n if (!fs.existsSync(normalized) || !fs.statSync(normalized).isFile()) continue;\n const src = fs.readFileSync(normalized, 'utf-8');\n\n const usage = extractEnvVarUsageFromSource(src);\n usage.runtimeKeys.forEach((k) => runtimeKeys.add(k));\n usage.buildtimeKeys.forEach((k) => buildtimeKeys.add(k));\n\n const specifiers = extractImportSpecifiers(src);\n for (const spec of specifiers) {\n if (!spec) continue;\n if (spec.startsWith('.')) {\n const resolved = tryResolveLocalImport(normalized, spec);\n if (resolved) queue.push(resolved);\n continue;\n }\n\n // Ignore absolute paths and non-node specifiers.\n if (spec.startsWith('/')) continue;\n if (isBuiltinModule(spec)) continue;\n // External packages are ignored; we only scan local files.\n }\n }\n\n runtimeKeys.delete('');\n buildtimeKeys.delete('');\n runtimeKeys.delete('node');\n buildtimeKeys.delete('node');\n\n return { runtimeKeys, buildtimeKeys };\n}\n\nfunction readJsonFile<T = any>(filePath: string): T | null {\n try {\n return JSON.parse(fs.readFileSync(filePath, 'utf-8')) as T;\n } catch {\n return null;\n }\n}\n\nfunction findMonorepoRoot(startDir: string): string {\n let dir = path.resolve(startDir);\n // Walk up until we find a package.json with \"workspaces\" or we hit filesystem root.\n while (true) {\n const pkgPath = path.join(dir, 'package.json');\n if (fs.existsSync(pkgPath)) {\n const pkg = readJsonFile<any>(pkgPath);\n if (pkg?.workspaces) return dir;\n }\n\n const parent = path.dirname(dir);\n if (parent === dir) return startDir; // fallback\n dir = parent;\n }\n}\n\nasync function collectRuntimeDependenciesForWorkers(\n workerEntryFiles: string[],\n projectRoot: string\n): Promise<Set<string>> {\n // Always include these: they're used by generated workers-config / lambda wrapper logic,\n // and are safe to install even if handlers are bundled.\n const deps = new Set<string>(['@microfox/ai-worker', '@aws-sdk/client-sqs']);\n const visited = new Set<string>();\n const queue: string[] = [...workerEntryFiles];\n\n while (queue.length > 0) {\n const file = queue.pop()!;\n const normalized = path.resolve(file);\n if (visited.has(normalized)) continue;\n visited.add(normalized);\n\n if (!fs.existsSync(normalized) || !fs.statSync(normalized).isFile()) continue;\n const src = fs.readFileSync(normalized, 'utf-8');\n const specifiers = extractImportSpecifiers(src);\n\n for (const spec of specifiers) {\n if (!spec) continue;\n if (spec.startsWith('.')) {\n const resolved = tryResolveLocalImport(normalized, spec);\n if (resolved) queue.push(resolved);\n continue;\n }\n\n // Ignore absolute paths and non-node specifiers.\n if (spec.startsWith('/')) continue;\n if (isBuiltinModule(spec)) continue;\n\n deps.add(getPackageNameFromSpecifier(spec));\n }\n }\n\n // Filter out anything that isn't an npm package name\n deps.delete('');\n deps.delete('node');\n\n // Filter devDependencies\n deps.delete('serverless');\n deps.delete('serverless-offline');\n deps.delete('@aws-sdk/client-sqs');\n deps.delete('@microfox/ai-worker')\n return deps;\n}\n\nfunction buildDependenciesMap(projectRoot: string, deps: Set<string>): Record<string, string> {\n const projectPkg =\n readJsonFile<any>(path.join(projectRoot, 'package.json')) || {};\n const projectDeps: Record<string, string> = projectPkg.dependencies || {};\n const projectDevDeps: Record<string, string> = projectPkg.devDependencies || {};\n\n // Try to also source versions from workspace packages (ai-worker / ai-worker-cli)\n const repoRoot = findMonorepoRoot(projectRoot);\n const workerPkg =\n readJsonFile<any>(path.join(repoRoot, 'packages', 'ai-worker', 'package.json')) ||\n {};\n const workerCliPkg =\n readJsonFile<any>(\n path.join(repoRoot, 'packages', 'ai-worker-cli', 'package.json')\n ) || {};\n\n const workspaceDeps: Record<string, string> = {\n ...(workerPkg.dependencies || {}),\n ...(workerPkg.devDependencies || {}),\n ...(workerCliPkg.dependencies || {}),\n ...(workerCliPkg.devDependencies || {}),\n };\n\n const out: Record<string, string> = {};\n for (const dep of Array.from(deps).sort()) {\n const range =\n projectDeps[dep] ||\n projectDevDeps[dep] ||\n workspaceDeps[dep];\n // Only add deps that the project or workspace already declares (e.g. in package.json).\n // Skip subpath imports like @tokenlens/helpers that are not real packages and not in package.json.\n if (range) {\n out[dep] = String(range);\n }\n }\n\n return out;\n}\n\ninterface WorkerInfo {\n id: string;\n filePath: string;\n // Module path WITHOUT extension and WITHOUT \".handler\" suffix.\n // Example: \"handlers/agents/test/test\"\n handlerPath: string;\n workerConfig?: {\n timeout?: number;\n memorySize?: number;\n layers?: string[];\n schedule?: any; // Schedule config: string, object, or array of either\n sqs?: {\n maxReceiveCount?: number;\n messageRetentionPeriod?: number;\n visibilityTimeout?: number;\n deadLetterMessageRetentionPeriod?: number;\n };\n };\n}\n\ninterface ServerlessConfig {\n service: string;\n custom?: Record<string, any>;\n package: {\n excludeDevDependencies: boolean;\n individually?: boolean;\n patterns: string[];\n };\n provider: {\n name: string;\n runtime: string;\n region: string;\n stage: string;\n versionFunctions?: boolean;\n environment: Record<string, string | Record<string, any>> | string;\n iam: {\n role: {\n statements: Array<{\n Effect: string;\n Action: string[];\n Resource: string | Array<string | Record<string, any>>;\n }>;\n };\n };\n };\n plugins: string[];\n functions: Record<string, any>;\n resources: {\n Resources: Record<string, any>;\n Outputs: Record<string, any>;\n };\n}\n\nexport function getServiceNameFromProjectId(projectId: string): string {\n const cleanedProjectId = projectId.replace(/-/g, '').slice(0, 15);\n return `p-${cleanedProjectId}`;\n}\n\n/**\n * Validates the environment and dependencies.\n */\nfunction validateEnvironment(): void {\n // We no longer strictly require global serverless since we'll install it locally in the temp dir\n // But we do need npm\n try {\n execSync('npm --version', { stdio: 'ignore' });\n } catch (error) {\n console.error(chalk.red('❌ npm is not installed or not in PATH.'));\n process.exit(1);\n }\n}\n\n/**\n * Scans for all *.worker.ts files in app/ai directory.\n */\nasync function scanWorkers(aiPath: string = 'app/ai'): Promise<WorkerInfo[]> {\n const pattern = path.join(aiPath, '**/*.worker.ts').replace(/\\\\/g, '/');\n const files = await glob(pattern);\n\n const workers: WorkerInfo[] = [];\n\n for (const filePath of files) {\n try {\n // Try to dynamically import the worker file to get the actual workerConfig\n // This is more reliable than parsing the file as text\n let workerConfig: WorkerInfo['workerConfig'] | undefined;\n let workerId: string | undefined;\n\n // For now, just extract the ID using regex\n // We'll import the workerConfig from the bundled handlers later\n\n // Fallback to regex parsing if import didn't work\n if (!workerId) {\n const content = fs.readFileSync(filePath, 'utf-8');\n // Match createWorker with optional type parameters: createWorker<...>({ id: '...' })\n // or createWorker({ id: '...' })\n const idMatch = content.match(/createWorker\\s*(?:<[^>]+>)?\\s*\\(\\s*\\{[\\s\\S]*?id:\\s*['\"]([^'\"]+)['\"]/);\n if (!idMatch) {\n console.warn(chalk.yellow(`⚠️ Skipping ${filePath}: No worker ID found`));\n continue;\n }\n workerId = idMatch[1];\n }\n\n // Generate handler path (relative to serverless root)\n // Convert app/ai/agents/my-worker.worker.ts -> handlers/my-worker\n const relativePath = path.relative(aiPath, filePath);\n const handlerDir = path.dirname(relativePath);\n const handlerName = path.basename(relativePath, '.worker.ts');\n const handlerPath = path.join('handlers', handlerDir, `${handlerName}`).replace(/\\\\/g, '/');\n\n workers.push({\n id: workerId,\n filePath,\n handlerPath,\n workerConfig,\n });\n } catch (error) {\n console.error(chalk.red(`❌ Error processing ${filePath}:`), error);\n }\n }\n\n return workers;\n}\n\n/**\n * Generates Lambda handler entrypoints for each worker.\n */\nasync function generateHandlers(workers: WorkerInfo[], outputDir: string): Promise<void> {\n const handlersDir = path.join(outputDir, 'handlers');\n\n // Ensure handlers directory exists and is clean\n if (fs.existsSync(handlersDir)) {\n fs.rmSync(handlersDir, { recursive: true, force: true });\n }\n fs.mkdirSync(handlersDir, { recursive: true });\n\n for (const worker of workers) {\n // Create directory structure\n // We output JS files now, so change extension in path\n const handlerFile = path.join(handlersDir, worker.handlerPath.replace('handlers/', '') + '.js');\n const handlerDir = path.dirname(handlerFile);\n\n if (!fs.existsSync(handlerDir)) {\n fs.mkdirSync(handlerDir, { recursive: true });\n }\n\n // Generate handler entrypoint\n // Convert app/ai/agents/my-worker.worker.ts to import path\n // We need relative path from .serverless-workers/handlers/agent/ to original source\n // Original: /path/to/project/app/ai/agents/my-worker.worker.ts\n // Handler: /path/to/project/.serverless-workers/handlers/agent/my-worker.handler.ts\n // Import should look like: ../../../app/ai/agents/my-worker.worker\n\n const handlerAbsPath = path.resolve(handlerFile);\n const workerAbsPath = path.resolve(worker.filePath);\n\n // Calculate relative path from handler directory to worker file\n let relativeImportPath = path.relative(path.dirname(handlerAbsPath), workerAbsPath);\n\n // Ensure it starts with ./ or ../\n if (!relativeImportPath.startsWith('.')) {\n relativeImportPath = './' + relativeImportPath;\n }\n\n // Remove extension for import\n relativeImportPath = relativeImportPath.replace(/\\.ts$/, '');\n // Normalize slashes for Windows\n relativeImportPath = relativeImportPath.split(path.sep).join('/');\n\n // Detect export: \"export default createWorker\" vs \"export const X = createWorker\"\n const fileContent = fs.readFileSync(worker.filePath, 'utf-8');\n const defaultExport = /export\\s+default\\s+createWorker/.test(fileContent);\n const exportMatch = fileContent.match(/export\\s+(const|let)\\s+(\\w+)\\s*=\\s*createWorker/);\n const exportName = exportMatch ? exportMatch[2] : 'worker';\n\n // 1. Create a temporary TS entrypoint\n const tempEntryFile = handlerFile.replace('.js', '.temp.ts');\n\n const workerRef = defaultExport\n ? 'workerModule.default'\n : `workerModule.${exportName}`;\n\n // Try to import workerConfig (new pattern) - it might not exist (old pattern)\n const tempEntryContent = `\nimport { createLambdaHandler } from '@microfox/ai-worker/handler';\nimport * as workerModule from '${relativeImportPath}';\n\nconst workerAgent = ${workerRef};\nif (!workerAgent || typeof workerAgent.handler !== 'function') {\n throw new Error('Worker module must export a createWorker result (default or named) with .handler');\n}\n\nexport const handler = createLambdaHandler(workerAgent.handler, workerAgent.outputSchema);\nexport const exportedWorkerConfig = workerModule.workerConfig || workerAgent?.workerConfig;\n`;\n fs.writeFileSync(tempEntryFile, tempEntryContent);\n\n // 2. Bundle using esbuild\n try {\n // Plugin to fix lazy-cache issue where forOwn is not properly added to utils\n // The issue: require_for_own() is called directly instead of through the lazy-cache proxy\n const fixLazyCachePlugin: esbuild.Plugin = {\n name: 'fix-lazy-cache',\n setup(build) {\n build.onEnd(async (result) => {\n if (result.errors.length > 0) return;\n\n // Read the bundled file\n let bundledCode = fs.readFileSync(handlerFile, 'utf-8');\n let modified = false;\n\n // Fix the lazy-cache pattern in clone-deep/utils.js\n // Pattern: require_for_own(); should be require(\"for-own\", \"forOwn\");\n // This ensures forOwn is properly added to the utils object via lazy-cache\n // Match the pattern more flexibly to handle different whitespace\n const pattern = /(require\\(\"kind-of\",\\s*\"typeOf\"\\);\\s*)require_for_own\\(\\);/g;\n\n if (pattern.test(bundledCode)) {\n bundledCode = bundledCode.replace(\n pattern,\n '$1require(\"for-own\", \"forOwn\");'\n );\n modified = true;\n }\n\n // Fix (0, import_node_module.createRequire)(import_meta.url) - esbuild emits import_meta.url\n // which is undefined in CJS Lambda. Polyfill so createRequire gets a valid file URL.\n if (bundledCode.includes('import_meta.url')) {\n bundledCode = bundledCode.replace(\n /import_meta\\.url/g,\n 'require(\"url\").pathToFileURL(__filename).href'\n );\n modified = true;\n }\n\n // Fix createRequire(undefined) / createRequire(void 0) if any dependency emits that\n const beforeCreateRequire = bundledCode;\n bundledCode = bundledCode.replace(\n /\\bcreateRequire\\s*\\(\\s*(?:undefined|void\\s*0)\\s*\\)/g,\n 'createRequire(require(\"url\").pathToFileURL(__filename).href)'\n );\n if (bundledCode !== beforeCreateRequire) modified = true;\n\n if (modified) {\n fs.writeFileSync(handlerFile, bundledCode, 'utf-8');\n }\n });\n },\n };\n\n await esbuild.build({\n entryPoints: [tempEntryFile],\n bundle: true,\n platform: 'node',\n target: 'node20',\n format: 'cjs',\n outfile: handlerFile,\n // We exclude aws-sdk as it's included in Lambda runtime\n // We exclude canvas because it's a binary dependency often problematic in bundling\n external: [\n 'aws-sdk',\n 'canvas',\n '@microfox/puppeteer-sls',\n \"@sparticuz/chromium\"\n ],\n // Force lazy-cache to eagerly load modules during bundling\n // This prevents runtime dynamic require() calls that fail in bundled code\n define: {\n 'process.env.UNLAZY': '\"true\"',\n },\n // Force bundling of all packages to avoid runtime module resolution issues\n // This ensures clone-deep, lazy-cache, and all transitive deps are bundled\n packages: 'bundle',\n plugins: [fixLazyCachePlugin],\n logLevel: 'error',\n });\n\n // 3. Cleanup temp file\n fs.unlinkSync(tempEntryFile);\n\n } catch (error) {\n console.error(chalk.red(`Error bundling handler for ${worker.id}:`), error);\n // Don't delete temp file on error for debugging\n }\n }\n console.log(chalk.green(`✓ Generated ${workers.length} bundled handlers`));\n}\n\nfunction generateDocsHandler(outputDir: string, serviceName: string, stage: string, region: string): void {\n const handlerFile = path.join(outputDir, 'handlers', 'docs.js');\n const tempEntryFile = handlerFile.replace('.js', '.temp.ts');\n const handlerDir = path.dirname(handlerFile);\n\n if (!fs.existsSync(handlerDir)) {\n fs.mkdirSync(handlerDir, { recursive: true });\n }\n\n const handlerContent = `/**\n * Auto-generated docs handler for Microfox compatibility\n * DO NOT EDIT - This file is generated by @microfox/ai-worker-cli\n */\n\nimport { APIGatewayProxyEvent, APIGatewayProxyResult } from 'aws-lambda';\n\nexport const handler = async (\n event: APIGatewayProxyEvent\n): Promise<APIGatewayProxyResult> => {\n // Return OpenAPI JSON for Microfox\n const openapi = {\n openapi: '3.0.3',\n info: {\n title: 'AI Worker Service',\n version: '1.0.0',\n description: 'Auto-generated OpenAPI for background workers service',\n },\n servers: [\n {\n url: 'https://{apiId}.execute-api.{region}.amazonaws.com/{stage}',\n variables: {\n apiId: { default: 'REPLACE_ME' },\n region: { default: '${region}' },\n stage: { default: '${stage}' },\n },\n },\n ],\n paths: {\n '/docs.json': {\n get: {\n operationId: 'getDocs',\n summary: 'Get OpenAPI schema',\n responses: {\n '200': {\n description: 'OpenAPI JSON',\n content: {\n 'application/json': {\n schema: { type: 'object' },\n },\n },\n },\n },\n },\n },\n '/workers/config': {\n get: {\n operationId: 'getWorkersConfig',\n summary: 'Get workers config (queue urls map)',\n parameters: [\n {\n name: 'x-workers-config-key',\n in: 'header',\n required: false,\n schema: { type: 'string' },\n description: 'Optional API key header (if configured)',\n },\n ],\n responses: {\n '200': {\n description: 'Workers config map',\n content: {\n 'application/json': {\n schema: {\n type: 'object',\n properties: {\n version: { type: 'string' },\n stage: { type: 'string' },\n region: { type: 'string' },\n workers: { type: 'object' },\n },\n },\n },\n },\n },\n '401': {\n description: 'Unauthorized',\n content: {\n 'application/json': {\n schema: {\n type: 'object',\n properties: { error: { type: 'string' } },\n },\n },\n },\n },\n },\n },\n },\n '/workers/trigger': {\n post: {\n operationId: 'triggerWorker',\n summary: 'Trigger a worker by sending a raw SQS message body',\n parameters: [\n {\n name: 'workerId',\n in: 'query',\n required: false,\n schema: { type: 'string' },\n description: 'Worker ID (can also be provided in JSON body as workerId)',\n },\n {\n name: 'x-workers-trigger-key',\n in: 'header',\n required: false,\n schema: { type: 'string' },\n description: 'Optional API key header (if configured)',\n },\n ],\n requestBody: {\n required: true,\n content: {\n 'application/json': {\n schema: {\n type: 'object',\n properties: {\n workerId: { type: 'string' },\n // Prefer sending the exact SQS message body your worker expects\n body: { type: 'object' },\n messageBody: { type: 'string' },\n },\n additionalProperties: true,\n },\n },\n },\n },\n responses: {\n '200': {\n description: 'Enqueued',\n content: {\n 'application/json': {\n schema: {\n type: 'object',\n properties: {\n ok: { type: 'boolean' },\n workerId: { type: 'string' },\n stage: { type: 'string' },\n queueName: { type: 'string' },\n queueUrl: { type: 'string' },\n messageId: { type: 'string' },\n },\n },\n },\n },\n },\n '400': {\n description: 'Bad request',\n content: {\n 'application/json': {\n schema: {\n type: 'object',\n properties: { error: { type: 'string' } },\n },\n },\n },\n },\n '401': {\n description: 'Unauthorized',\n content: {\n 'application/json': {\n schema: {\n type: 'object',\n properties: { error: { type: 'string' } },\n },\n },\n },\n },\n },\n },\n },\n },\n 'x-service': {\n serviceName: '${serviceName}',\n stage: '${stage}',\n region: '${region}',\n },\n };\n\n return {\n statusCode: 200,\n headers: {\n 'Content-Type': 'application/json',\n 'Access-Control-Allow-Origin': '*',\n },\n body: JSON.stringify(openapi, null, 2),\n };\n};\n`;\n\n fs.writeFileSync(tempEntryFile, handlerContent);\n\n // Bundle it\n esbuild.buildSync({\n entryPoints: [tempEntryFile],\n bundle: true,\n platform: 'node',\n target: 'node20',\n outfile: handlerFile,\n external: [\n 'aws-sdk',\n 'canvas',\n '@microfox/puppeteer-sls',\n \"@sparticuz/chromium\"\n ],\n define: {\n 'process.env.UNLAZY': '\"true\"',\n },\n packages: 'bundle'\n });\n\n fs.unlinkSync(tempEntryFile);\n console.log(chalk.green(`✓ Generated docs.json handler`));\n}\n\nfunction generateTriggerHandler(outputDir: string, serviceName: string): void {\n const handlerFile = path.join(outputDir, 'handlers', 'workers-trigger.js');\n const tempEntryFile = handlerFile.replace('.js', '.temp.ts');\n const handlerDir = path.dirname(handlerFile);\n\n if (!fs.existsSync(handlerDir)) {\n fs.mkdirSync(handlerDir, { recursive: true });\n }\n\n const handlerContent = `/**\n * Auto-generated worker trigger handler\n * DO NOT EDIT - This file is generated by @microfox/ai-worker-cli\n */\n\nimport { APIGatewayProxyEvent, APIGatewayProxyResult } from 'aws-lambda';\nimport { SQSClient, GetQueueUrlCommand, SendMessageCommand } from '@aws-sdk/client-sqs';\n\nconst SERVICE_NAME = ${JSON.stringify(serviceName)};\n\nfunction jsonResponse(statusCode: number, body: any): APIGatewayProxyResult {\n return {\n statusCode,\n headers: {\n 'Content-Type': 'application/json',\n 'Access-Control-Allow-Origin': '*',\n },\n body: JSON.stringify(body),\n };\n}\n\nexport const handler = async (event: APIGatewayProxyEvent): Promise<APIGatewayProxyResult> => {\n // Optional API key\n const apiKey = process.env.WORKERS_TRIGGER_API_KEY;\n if (apiKey) {\n const providedKey = event.headers['x-workers-trigger-key'] || event.headers['X-Workers-Trigger-Key'];\n if (providedKey !== apiKey) {\n return jsonResponse(401, { error: 'Unauthorized' });\n }\n }\n\n const stage =\n (event as any)?.requestContext?.stage ||\n process.env.ENVIRONMENT ||\n process.env.STAGE ||\n 'prod';\n const region = process.env.AWS_REGION || 'us-east-1';\n\n const qsWorkerId = event.queryStringParameters?.workerId;\n\n let parsedBody: any = undefined;\n if (event.body) {\n try {\n parsedBody = JSON.parse(event.body);\n } catch {\n parsedBody = undefined;\n }\n }\n\n const workerId = (parsedBody && parsedBody.workerId) || qsWorkerId;\n if (!workerId || typeof workerId !== 'string') {\n return jsonResponse(400, { error: 'workerId is required (query param workerId or JSON body workerId)' });\n }\n\n // Prefer JSON body fields, otherwise send raw event.body\n let messageBody: string | undefined;\n if (parsedBody && typeof parsedBody.messageBody === 'string') {\n messageBody = parsedBody.messageBody;\n } else if (parsedBody && parsedBody.body !== undefined) {\n messageBody = typeof parsedBody.body === 'string' ? parsedBody.body : JSON.stringify(parsedBody.body);\n } else if (event.body) {\n messageBody = event.body;\n }\n\n if (!messageBody) {\n return jsonResponse(400, { error: 'body/messageBody is required' });\n }\n\n const queueName = \\`\\${SERVICE_NAME}-\\${workerId}-\\${stage}\\`;\n const sqs = new SQSClient({ region });\n\n let queueUrl: string;\n try {\n const urlRes = await sqs.send(new GetQueueUrlCommand({ QueueName: queueName }));\n if (!urlRes.QueueUrl) {\n return jsonResponse(404, { error: 'Queue URL not found', queueName });\n }\n queueUrl = String(urlRes.QueueUrl);\n } catch (e: any) {\n return jsonResponse(404, { error: 'Queue does not exist or not accessible', queueName, message: String(e?.message || e) });\n }\n\n try {\n const sendRes = await sqs.send(new SendMessageCommand({ QueueUrl: queueUrl, MessageBody: messageBody }));\n return jsonResponse(200, {\n ok: true,\n workerId,\n stage,\n queueName,\n queueUrl,\n messageId: sendRes.MessageId || null,\n });\n } catch (e: any) {\n return jsonResponse(500, { error: 'Failed to send message', message: String(e?.message || e) });\n }\n};\n`;\n\n fs.writeFileSync(tempEntryFile, handlerContent);\n\n esbuild.buildSync({\n entryPoints: [tempEntryFile],\n bundle: true,\n platform: 'node',\n target: 'node20',\n outfile: handlerFile,\n external: [\n 'aws-sdk',\n 'canvas',\n '@microfox/puppeteer-sls',\n \"@sparticuz/chromium\"\n ],\n define: {\n 'process.env.UNLAZY': '\"true\"',\n },\n packages: 'bundle',\n logLevel: 'error',\n });\n\n fs.unlinkSync(tempEntryFile);\n console.log(chalk.green(`✓ Generated /workers/trigger handler`));\n}\n\n/**\n * Generates workers-config Lambda handler.\n */\nfunction generateWorkersConfigHandler(\n outputDir: string,\n workers: WorkerInfo[],\n serviceName: string\n): void {\n // We'll bundle this one too\n const handlerFile = path.join(outputDir, 'handlers', 'workers-config.js');\n const tempEntryFile = handlerFile.replace('.js', '.temp.ts');\n const handlerDir = path.dirname(handlerFile);\n\n // Ensure handlers directory exists and is clean for config handler\n if (fs.existsSync(handlerDir) && !fs.existsSync(handlerFile)) {\n // Don't wipe if we already cleaned it in generateHandlers, unless it's a diff dir\n } else if (!fs.existsSync(handlerDir)) {\n fs.mkdirSync(handlerDir, { recursive: true });\n }\n\n const handlerContent = `/**\n * Auto-generated workers-config Lambda handler\n * DO NOT EDIT - This file is generated by @microfox/ai-worker-cli\n */\n\nimport { APIGatewayProxyEvent, APIGatewayProxyResult } from 'aws-lambda';\nimport { SQSClient, GetQueueUrlCommand } from '@aws-sdk/client-sqs';\n\n// Worker IDs embedded at build time so this endpoint doesn't depend on any generated files.\nconst WORKER_IDS: string[] = ${JSON.stringify(workers.map(w => w.id), null, 2)};\nconst SERVICE_NAME = ${JSON.stringify(serviceName)};\n\nexport const handler = async (\n event: APIGatewayProxyEvent\n): Promise<APIGatewayProxyResult> => {\n// ... same logic ...\n // Check API key if configured\n const apiKey = process.env.WORKERS_CONFIG_API_KEY;\n if (apiKey) {\n const providedKey = event.headers['x-workers-config-key'] || event.headers['X-Workers-Config-Key'];\n if (providedKey !== apiKey) {\n return {\n statusCode: 401,\n headers: { 'Content-Type': 'application/json' },\n body: JSON.stringify({ error: 'Unauthorized' }),\n };\n }\n }\n\n // Stage resolution:\n // - Prefer API Gateway stage (microfox tends to deploy APIs on \"prod\")\n // - Fallback to ENVIRONMENT/STAGE env vars\n // - Default to \"prod\" (safer for microfox) if nothing else is set\n const stage =\n (event as any)?.requestContext?.stage ||\n process.env.ENVIRONMENT ||\n process.env.STAGE ||\n 'prod';\n const region = process.env.AWS_REGION || 'us-east-1';\n\n // Resolve queue URLs dynamically via SQS so we return actual URLs.\n // NOTE: Node 20 Lambda runtime does NOT guarantee 'aws-sdk' v2 is available.\n // We use AWS SDK v3 and bundle it into this handler.\n const sqs = new SQSClient({ region });\n const workers: Record<string, { queueUrl: string; region: string }> = {};\n const attemptedQueueNames: string[] = [];\n const errors: Array<{ workerId: string; queueName: string; message: string; name?: string }> = [];\n const debug = event.queryStringParameters?.debug === '1' || event.queryStringParameters?.debug === 'true';\n\n await Promise.all(\n WORKER_IDS.map(async (workerId) => {\n const queueName = \\`\\${SERVICE_NAME}-\\${workerId}-\\${stage}\\`;\n attemptedQueueNames.push(queueName);\n try {\n const result = await sqs.send(new GetQueueUrlCommand({ QueueName: queueName }));\n if (result?.QueueUrl) {\n workers[workerId] = { queueUrl: String(result.QueueUrl), region };\n }\n } catch (e) {\n const err = e as any;\n const message = String(err?.message || err || 'Unknown error');\n const name = err?.name ? String(err.name) : undefined;\n // Log so CloudWatch shows what's going on (nonexistent queue vs permission vs region).\n console.error('[workers-config] getQueueUrl failed', { workerId, queueName, name, message });\n errors.push({ workerId, queueName, name, message });\n }\n })\n );\n\n return {\n statusCode: 200,\n headers: {\n 'Content-Type': 'application/json',\n 'Access-Control-Allow-Origin': '*',\n },\n body: JSON.stringify({\n version: '1.0.0',\n stage,\n region,\n workers,\n ...(debug ? { attemptedQueueNames, errors } : {}),\n }),\n };\n};\n`;\n\n fs.writeFileSync(tempEntryFile, handlerContent);\n\n // Bundle it\n esbuild.buildSync({\n entryPoints: [tempEntryFile],\n bundle: true,\n platform: 'node',\n target: 'node20',\n outfile: handlerFile,\n external: [\n 'aws-sdk',\n 'canvas',\n '@microfox/puppeteer-sls',\n \"@sparticuz/chromium\"\n ],\n define: {\n 'process.env.UNLAZY': '\"true\"',\n },\n packages: 'bundle'\n });\n\n fs.unlinkSync(tempEntryFile);\n console.log(chalk.green(`✓ Generated workers-config handler`));\n}\n\n/**\n * Reads environment variables from .env file.\n */\nfunction loadEnvVars(envPath: string = '.env'): Record<string, string> {\n const env: Record<string, string> = {};\n\n if (!fs.existsSync(envPath)) {\n console.warn(chalk.yellow(`⚠️ .env file not found at ${envPath}`));\n return env;\n }\n\n const content = fs.readFileSync(envPath, 'utf-8');\n const lines = content.split('\\n');\n\n for (const line of lines) {\n const trimmed = line.trim();\n if (!trimmed || trimmed.startsWith('#')) continue;\n\n const match = trimmed.match(/^([^=]+)=(.*)$/);\n if (match) {\n const key = match[1].trim();\n const value = match[2].trim().replace(/^[\"']|[\"']$/g, '');\n env[key] = value;\n }\n }\n\n return env;\n}\n\n/**\n * Converts schedule configuration to serverless.yml schedule event format.\n * Supports simple strings, configuration objects, and arrays of both.\n */\nfunction processScheduleEvents(scheduleConfig: any): any[] {\n if (!scheduleConfig) {\n return [];\n }\n\n const events: any[] = [];\n\n // Normalize to array\n const schedules = Array.isArray(scheduleConfig) ? scheduleConfig : [scheduleConfig];\n\n for (const schedule of schedules) {\n // Simple string format: 'rate(2 hours)' or 'cron(0 12 * * ? *)'\n if (typeof schedule === 'string') {\n events.push({\n schedule: schedule,\n });\n continue;\n }\n\n // Full configuration object\n if (typeof schedule === 'object' && schedule !== null) {\n const scheduleEvent: any = { schedule: {} };\n\n // Handle rate - can be string or array of strings\n if (schedule.rate) {\n if (Array.isArray(schedule.rate)) {\n // Multiple rate expressions\n scheduleEvent.schedule.rate = schedule.rate;\n } else {\n // Single rate expression\n scheduleEvent.schedule.rate = schedule.rate;\n }\n } else {\n // If no rate specified but we have a schedule object, skip it\n continue;\n }\n\n // Optional fields\n if (schedule.enabled !== undefined) {\n scheduleEvent.schedule.enabled = schedule.enabled;\n }\n if (schedule.input !== undefined) {\n scheduleEvent.schedule.input = schedule.input;\n }\n if (schedule.inputPath !== undefined) {\n scheduleEvent.schedule.inputPath = schedule.inputPath;\n }\n if (schedule.inputTransformer !== undefined) {\n scheduleEvent.schedule.inputTransformer = schedule.inputTransformer;\n }\n if (schedule.name !== undefined) {\n scheduleEvent.schedule.name = schedule.name;\n }\n if (schedule.description !== undefined) {\n scheduleEvent.schedule.description = schedule.description;\n }\n if (schedule.method !== undefined) {\n scheduleEvent.schedule.method = schedule.method;\n }\n if (schedule.timezone !== undefined) {\n scheduleEvent.schedule.timezone = schedule.timezone;\n }\n\n // If schedule object only has rate (or is minimal), we can simplify it\n // Serverless Framework accepts both { schedule: 'rate(...)' } and { schedule: { rate: 'rate(...)' } }\n if (Object.keys(scheduleEvent.schedule).length === 1 && scheduleEvent.schedule.rate) {\n // Simplify to string format if it's just a single rate\n if (typeof scheduleEvent.schedule.rate === 'string') {\n events.push({\n schedule: scheduleEvent.schedule.rate,\n });\n } else {\n // Keep object format for arrays\n events.push(scheduleEvent);\n }\n } else {\n events.push(scheduleEvent);\n }\n }\n }\n\n return events;\n}\n\n/**\n * Generates serverless.yml configuration.\n */\nfunction generateServerlessConfig(\n workers: WorkerInfo[],\n stage: string,\n region: string,\n envVars: Record<string, string>,\n serviceName: string\n): ServerlessConfig {\n // Create SQS queues for each worker\n const resources: ServerlessConfig['resources'] = {\n Resources: {},\n Outputs: {},\n };\n\n const queueArns: Array<string | Record<string, any>> = [];\n\n // Update provider environment to use file(env.json)\n const providerEnvironment: any = {\n STAGE: stage,\n NODE_ENV: stage,\n };\n\n // Custom configuration including serverless-offline\n const customConfig: Record<string, any> = {\n stage: `\\${env:ENVIRONMENT, '${stage}'}`,\n 'serverless-offline': {\n httpPort: 4000,\n lambdaPort: 4002,\n useChildProcesses: true,\n useWorkerThreads: true,\n noCookieValidation: true,\n allowCache: true,\n hideStackTraces: false,\n disableCookieValidation: true,\n noTimeout: true,\n environment: '\\${file(env.json)}',\n }\n };\n\n for (const worker of workers) {\n const queueName = `WorkerQueue${worker.id.replace(/[^a-zA-Z0-9]/g, '')}`;\n const queueLogicalId = `${queueName}${stage}`;\n const dlqLogicalId = `${queueName}DLQ${stage}`;\n\n const sqsCfg = worker.workerConfig?.sqs;\n const retention =\n typeof sqsCfg?.messageRetentionPeriod === 'number'\n ? sqsCfg.messageRetentionPeriod\n : 1209600; // 14 days\n const dlqRetention =\n typeof sqsCfg?.deadLetterMessageRetentionPeriod === 'number'\n ? sqsCfg.deadLetterMessageRetentionPeriod\n : retention;\n const visibilityTimeout =\n typeof sqsCfg?.visibilityTimeout === 'number'\n ? sqsCfg.visibilityTimeout\n : (worker.workerConfig?.timeout || 300) + 60; // Add buffer\n const maxReceiveCountRaw =\n typeof sqsCfg?.maxReceiveCount === 'number' ? sqsCfg.maxReceiveCount : 1;\n // SQS does not support 0; treat <=0 as 1.\n const maxReceiveCount = Math.max(1, Math.floor(maxReceiveCountRaw));\n\n // DLQ (always create so we can support \"no retries\" mode safely)\n resources.Resources[dlqLogicalId] = {\n Type: 'AWS::SQS::Queue',\n Properties: {\n QueueName: `\\${self:service}-${worker.id}-dlq-\\${opt:stage, env:ENVIRONMENT, '${stage}'}`,\n MessageRetentionPeriod: dlqRetention,\n },\n };\n\n resources.Resources[queueLogicalId] = {\n Type: 'AWS::SQS::Queue',\n Properties: {\n // Use ${self:service} to avoid hardcoding service name\n QueueName: `\\${self:service}-${worker.id}-\\${opt:stage, env:ENVIRONMENT, '${stage}'}`,\n VisibilityTimeout: visibilityTimeout,\n MessageRetentionPeriod: retention,\n RedrivePolicy: {\n deadLetterTargetArn: { 'Fn::GetAtt': [dlqLogicalId, 'Arn'] },\n maxReceiveCount,\n },\n },\n };\n\n resources.Outputs[`${queueLogicalId}Url`] = {\n Description: `Queue URL for worker ${worker.id}`,\n Value: { Ref: queueLogicalId },\n Export: {\n Name: `\\${self:service}-${worker.id}-queue-url`,\n },\n };\n\n queueArns.push({ 'Fn::GetAtt': [queueLogicalId, 'Arn'] });\n }\n\n // Create functions for each worker\n const functions: Record<string, any> = {};\n\n for (const worker of workers) {\n const functionName = `worker${worker.id.replace(/[^a-zA-Z0-9]/g, '')}`;\n\n // Start with SQS event (default)\n const events: any[] = [\n {\n sqs: {\n arn: { 'Fn::GetAtt': [`WorkerQueue${worker.id.replace(/[^a-zA-Z0-9]/g, '')}${stage}`, 'Arn'] },\n batchSize: 1,\n },\n },\n ];\n\n // Add schedule events if configured\n if (worker.workerConfig?.schedule) {\n const scheduleEvents = processScheduleEvents(worker.workerConfig.schedule);\n events.push(...scheduleEvents);\n }\n\n functions[functionName] = {\n // IMPORTANT: Keep AWS handler string to exactly one dot: \"<modulePath>.handler\"\n handler: `${worker.handlerPath}.handler`,\n timeout: worker.workerConfig?.timeout || 300,\n memorySize: worker.workerConfig?.memorySize || 512,\n events,\n };\n\n if (worker.workerConfig?.layers?.length) {\n functions[functionName].layers = worker.workerConfig.layers;\n }\n }\n\n // Add docs.json function for Microfox compatibility\n functions['getDocs'] = {\n handler: 'handlers/docs.handler',\n events: [\n {\n http: {\n path: '/docs.json',\n method: 'GET',\n cors: true,\n },\n },\n ],\n };\n\n // Add workers trigger endpoint (HTTP -> SQS SendMessage)\n functions['triggerWorker'] = {\n handler: 'handlers/workers-trigger.handler',\n events: [\n {\n http: {\n path: '/workers/trigger',\n method: 'POST',\n cors: true,\n },\n },\n ],\n };\n\n // Add workers-config function\n functions['workersConfig'] = {\n handler: 'handlers/workers-config.handler',\n events: [\n {\n http: {\n path: 'workers/config',\n method: 'GET',\n cors: true,\n },\n },\n ],\n };\n\n // Filter env vars - only include safe ones (exclude secrets that should be in AWS Secrets Manager)\n const safeEnvVars: Record<string, string> = {};\n const allowedPrefixes = ['OPENAI_', 'ANTHROPIC_', 'DATABASE_', 'MONGODB_', 'REDIS_', 'WORKERS_', 'REMOTION_'];\n\n // AWS_ prefix is reserved by Lambda, do not include it in environment variables\n // https://docs.aws.amazon.com/lambda/latest/dg/configuration-envvars.html\n\n for (const [key, value] of Object.entries(envVars)) {\n if (allowedPrefixes.some(prefix => key.startsWith(prefix))) {\n safeEnvVars[key] = value;\n }\n }\n\n // Add ApiEndpoints output for Microfox\n resources.Outputs['ApiEndpoints'] = {\n Description: \"API Endpoints\",\n Value: {\n \"Fn::Join\": [\n \"\",\n [\n \"API: https://\",\n { \"Ref\": \"ApiGatewayRestApi\" },\n \".execute-api.\",\n { \"Ref\": \"AWS::Region\" },\n `.amazonaws.com/\\${env:ENVIRONMENT, '${stage}'}`\n ]\n ]\n }\n };\n\n return {\n service: serviceName,\n package: {\n excludeDevDependencies: true,\n individually: true,\n // Handlers are fully bundled by esbuild (packages: 'bundle'); exclude node_modules to stay under Lambda 250 MB limit\n patterns: [\n '!venv/**',\n '!.idea/**',\n '!.vscode/**',\n '!src/**',\n '!node_modules/**',\n '!node_modules/serverless-offline/**',\n '!node_modules/typescript/**',\n '!node_modules/@types/**',\n '!node_modules/aws-sdk/**',\n '!node_modules/@aws-sdk/**'\n ],\n },\n custom: customConfig,\n provider: {\n name: 'aws',\n runtime: 'nodejs20.x',\n region,\n versionFunctions: false,\n // Use ENVIRONMENT from env.json to drive the actual deployed stage (Microfox defaults to prod).\n stage: `\\${env:ENVIRONMENT, '${stage}'}`,\n environment: '\\${file(env.json)}',\n iam: {\n role: {\n statements: [\n {\n Effect: 'Allow',\n Action: [\n 'sqs:SendMessage',\n 'sqs:ReceiveMessage',\n 'sqs:DeleteMessage',\n 'sqs:GetQueueAttributes',\n ],\n Resource: queueArns,\n },\n {\n Effect: 'Allow',\n Action: ['sqs:GetQueueUrl'],\n // GetQueueUrl is not resource-scoped for unknown queue ARNs, must be '*'\n Resource: '*',\n }\n ],\n },\n },\n },\n plugins: ['serverless-offline'],\n functions,\n resources,\n };\n}\n\n/**\n * Resolves queue URLs after deployment and generates workers-map.generated.ts\n */\nasync function generateWorkersMap(\n stage: string,\n region: string,\n outputDir: string\n): Promise<void> {\n const serverlessDir = path.join(outputDir, '.serverless');\n if (!fs.existsSync(serverlessDir)) {\n fs.mkdirSync(serverlessDir, { recursive: true });\n }\n\n // Need to scan workers again to get IDs for map generation\n // Or we could save this metadata in the build step.\n // For now, re-scanning is fine.\n const workers = await scanWorkers();\n\n // Try to read CloudFormation outputs\n const stackName = `ai-router-workers-${stage}-${stage}`;\n let queueUrls: Record<string, { queueUrl: string; region: string }> = {};\n\n const spinner = ora('Fetching CloudFormation outputs...').start();\n\n try {\n // Use AWS CLI to get stack outputs\n const output = execSync(\n `aws cloudformation describe-stacks --stack-name ${stackName} --region ${region} --query \"Stacks[0].Outputs\" --output json`,\n { encoding: 'utf-8', stdio: 'pipe' }\n );\n\n const outputs = JSON.parse(output);\n const outputMap: Record<string, string> = {};\n\n for (const output of outputs) {\n const key = output.OutputKey;\n if (key && key.endsWith('Url')) {\n const workerId = key.replace('WorkerQueue', '').replace('Url', '').toLowerCase();\n // The workerId from CF output might have stripped characters, need fuzzy match or consistent naming\n // Currently we use replace(/[^a-zA-Z0-9]/g, '') in CF output name\n outputMap[key] = output.OutputValue;\n }\n }\n\n // Match workers to queue URLs\n for (const worker of workers) {\n const sanitizedId = worker.id.replace(/[^a-zA-Z0-9]/g, '');\n const queueKey = `WorkerQueue${sanitizedId}${stage}Url`;\n\n // Look for key ending with this pattern to handle casing issues if any\n const matchingKey = Object.keys(outputMap).find(k => k.toLowerCase() === queueKey.toLowerCase());\n\n if (matchingKey && outputMap[matchingKey]) {\n queueUrls[worker.id] = {\n queueUrl: outputMap[matchingKey],\n region,\n };\n }\n }\n spinner.succeed('Fetched CloudFormation outputs');\n } catch (error) {\n spinner.warn('Could not fetch CloudFormation outputs. Using deterministic queue URLs.');\n for (const worker of workers) {\n queueUrls[worker.id] = {\n queueUrl: `https://sqs.${'${aws:region}'}.amazonaws.com/${'${aws:accountId}'}/${'${self:service}'}-${worker.id}-${stage}`,\n region,\n };\n }\n }\n\n // Generate TypeScript file\n const mapContent = `/**\n * Auto-generated workers map\n * DO NOT EDIT - This file is generated by deploy-workers script\n */\n\nexport const workersMap = ${JSON.stringify(queueUrls, null, 2)} as const;\n`;\n\n const mapFile = path.join(serverlessDir, 'workers-map.generated.ts');\n fs.writeFileSync(mapFile, mapContent);\n console.log(chalk.green(`✓ Generated workers map: ${mapFile}`));\n}\n\nasync function build(args: any) {\n const stage = args.stage || process.env.STAGE || 'prod';\n const region = args.region || process.env.AWS_REGION || 'us-east-1';\n const aiPath = args['ai-path'] || 'app/ai';\n\n console.log(chalk.blue(`📦 Building workers (stage: ${stage}, region: ${region})...`));\n\n const spinner = ora('Scanning workers...').start();\n const workers = await scanWorkers(aiPath);\n\n if (workers.length === 0) {\n spinner.warn('No workers found.');\n return;\n }\n spinner.succeed(`Found ${workers.length} worker(s)`);\n workers.forEach(w => console.log(chalk.gray(` - ${w.id} (${w.filePath})`)));\n\n const serverlessDir = path.join(process.cwd(), '.serverless-workers');\n if (!fs.existsSync(serverlessDir)) {\n fs.mkdirSync(serverlessDir, { recursive: true });\n }\n\n // Build an accurate dependencies map for Microfox installs:\n // include any npm packages imported by the worker entrypoints (and their local imports),\n // plus runtime packages used by generated handlers.\n const runtimeDeps = await collectRuntimeDependenciesForWorkers(\n workers.map((w) => w.filePath),\n process.cwd()\n );\n const dependencies = buildDependenciesMap(process.cwd(), runtimeDeps);\n\n // Generate package.json for the serverless service (used by Microfox push)\n const packageJson = {\n name: 'ai-router-workers',\n version: '1.0.0',\n description: 'Auto-generated serverless workers',\n private: true,\n dependencies,\n scripts: {\n build: \"echo 'Already compiled.'\",\n },\n devDependencies: {\n serverless: '^3.38.0',\n 'serverless-offline': '^13.3.3',\n '@aws-sdk/client-sqs': '^3.700.0',\n },\n };\n fs.writeFileSync(\n path.join(serverlessDir, 'package.json'),\n JSON.stringify(packageJson, null, 2)\n );\n\n // No tsconfig.json needed as we are deploying bundled JS\n\n const envVars = loadEnvVars();\n\n // Detect env usage from worker entry files + their local dependency graph.\n // We use this to populate env.json with only envs that are actually referenced,\n // but ONLY if they exist in .env (we don't invent values).\n const workerEntryFiles = workers.map((w) => w.filePath);\n const { runtimeKeys: runtimeEnvKeys, buildtimeKeys: buildtimeEnvKeys } =\n await collectEnvUsageForWorkers(workerEntryFiles, process.cwd());\n const referencedEnvKeys = new Set<string>([\n ...Array.from(runtimeEnvKeys),\n ...Array.from(buildtimeEnvKeys),\n ]);\n\n // Light, helpful logging (avoid noisy huge dumps)\n const runtimeList = Array.from(runtimeEnvKeys).sort();\n const buildtimeList = Array.from(buildtimeEnvKeys).sort();\n const missingFromDotEnv = Array.from(referencedEnvKeys)\n .filter((k) => !(k in envVars))\n .sort();\n if (runtimeList.length || buildtimeList.length) {\n console.log(\n chalk.blue(\n `ℹ️ Detected env usage from worker code: runtime=${runtimeList.length}, buildtime=${buildtimeList.length}`\n )\n );\n if (missingFromDotEnv.length > 0) {\n console.log(\n chalk.yellow(\n `⚠️ These referenced envs were not found in .env (so they will NOT be written to env.json): ${missingFromDotEnv\n .slice(0, 25)\n .join(', ')}${missingFromDotEnv.length > 25 ? ' ...' : ''}`\n )\n );\n }\n }\n\n let serviceName = (args['service-name'] as string | undefined)?.trim() || `ai-router-workers-${stage}`;\n\n // Check for microfox.json to customize service name\n const microfoxJsonPath = path.join(process.cwd(), 'microfox.json');\n if (fs.existsSync(microfoxJsonPath)) {\n try {\n const microfoxConfig = JSON.parse(fs.readFileSync(microfoxJsonPath, 'utf-8'));\n if (microfoxConfig.projectId) {\n // Only override if user did not explicitly provide a service name\n if (!(args['service-name'] as string | undefined)?.trim()) {\n serviceName = getServiceNameFromProjectId(microfoxConfig.projectId);\n }\n console.log(chalk.blue(`ℹ️ Using service name from microfox.json: ${serviceName}`));\n }\n } catch (error) {\n console.warn(chalk.yellow('⚠️ Failed to parse microfox.json, using default service name'));\n }\n }\n\n ora('Generating handlers...').start().succeed('Generated handlers');\n await generateHandlers(workers, serverlessDir);\n\n // Now import the bundled handlers to extract workerConfig\n const extractSpinner = ora('Extracting worker configs from bundled handlers...').start();\n for (const worker of workers) {\n try {\n const handlerFile = path.join(serverlessDir, worker.handlerPath + '.js');\n if (fs.existsSync(handlerFile)) {\n // Convert absolute path to file:// URL for ESM import (required on Windows)\n const handlerUrl = pathToFileURL(path.resolve(handlerFile)).href;\n\n try {\n // Import the bundled handler (which exports exportedWorkerConfig)\n // Note: The handler might have runtime errors, but we only need the exportedWorkerConfig\n const module = await import(handlerUrl);\n\n // exportedWorkerConfig is exported directly from the handler file\n if (module.exportedWorkerConfig) {\n worker.workerConfig = module.exportedWorkerConfig;\n if (module.exportedWorkerConfig.layers?.length) {\n console.log(chalk.gray(` ✓ ${worker.id}: found ${module.exportedWorkerConfig.layers.length} layer(s)`));\n }\n } else {\n console.warn(chalk.yellow(` ⚠ ${worker.id}: exportedWorkerConfig not found in handler`));\n }\n } catch (importError: any) {\n // If import fails due to runtime errors (e.g., lazy-cache initialization in bundled code),\n // try to extract config from source file as fallback. This is expected for some bundled handlers.\n // The fallback will work fine, and the Lambda runtime will handle the bundled code correctly.\n console.log(chalk.gray(` ℹ ${worker.id}: extracting config from source (import failed: ${importError?.message?.slice(0, 50) || 'runtime error'}...)`));\n\n // Fallback: try to read the source worker file and extract workerConfig\n try {\n const sourceContent = fs.readFileSync(worker.filePath, 'utf-8');\n // Look for exported workerConfig\n const workerConfigMatch = sourceContent.match(/export\\s+const\\s+workerConfig[^=]*=\\s*(\\{[\\s\\S]*?\\});/);\n if (workerConfigMatch) {\n // Try to parse it as JSON (after cleaning up comments)\n let configStr = workerConfigMatch[1]\n .replace(/\\/\\*[\\s\\S]*?\\*\\//g, '') // Remove block comments\n .replace(/(^|\\s)\\/\\/[^\\n]*/gm, '$1'); // Remove line comments\n\n // Use Function constructor to parse the object (safer than eval)\n const configObj = new Function('return ' + configStr)();\n if (configObj && (configObj.layers || configObj.timeout || configObj.memorySize || configObj.schedule)) {\n worker.workerConfig = configObj;\n if (configObj.layers?.length) {\n console.log(chalk.gray(` ✓ ${worker.id}: found ${configObj.layers.length} layer(s) from source file`));\n }\n if (configObj.schedule) {\n console.log(chalk.gray(` ✓ ${worker.id}: found schedule configuration`));\n }\n }\n }\n } catch (fallbackError) {\n // If fallback also fails, just log and continue\n console.warn(chalk.yellow(` ⚠ ${worker.id}: fallback extraction also failed, using defaults`));\n }\n }\n } else {\n console.warn(chalk.yellow(` ⚠ ${worker.id}: handler file not found: ${handlerFile}`));\n }\n } catch (error: any) {\n // If everything fails, workerConfig will remain undefined (fallback to defaults)\n console.warn(chalk.yellow(` ⚠ ${worker.id}: failed to extract config: ${error?.message || error}`));\n }\n }\n extractSpinner.succeed('Extracted configs');\n\n generateWorkersConfigHandler(serverlessDir, workers, serviceName);\n generateDocsHandler(serverlessDir, serviceName, stage, region);\n generateTriggerHandler(serverlessDir, serviceName);\n\n const config = generateServerlessConfig(workers, stage, region, envVars, serviceName);\n\n // Always generate env.json now as serverless.yml relies on it.\n // Microfox deploys APIs on prod by default; when microfox.json exists, default ENVIRONMENT/STAGE to \"prod\".\n const envStage = fs.existsSync(microfoxJsonPath) ? 'prod' : stage;\n const safeEnvVars: Record<string, string> = {\n ENVIRONMENT: envStage,\n STAGE: envStage,\n NODE_ENV: envStage,\n };\n const allowedPrefixes = ['OPENAI_', 'ANTHROPIC_', 'DATABASE_', 'MONGODB_', 'REDIS_', 'WORKERS_', 'REMOTION_'];\n\n for (const [key, value] of Object.entries(envVars)) {\n // AWS_ prefix is reserved by Lambda, do not include it in environment variables\n // https://docs.aws.amazon.com/lambda/latest/dg/configuration-envvars.html\n if (key.startsWith('AWS_')) continue;\n\n // Keep legacy behavior for known-safe prefixes,\n // and also include any env that is referenced by worker code.\n if (allowedPrefixes.some((prefix) => key.startsWith(prefix)) || referencedEnvKeys.has(key)) {\n safeEnvVars[key] = value;\n }\n }\n\n fs.writeFileSync(\n path.join(serverlessDir, 'env.json'),\n JSON.stringify(safeEnvVars, null, 2)\n );\n\n const yamlContent = yaml.dump(config, { indent: 2 });\n const yamlPath = path.join(serverlessDir, 'serverless.yml');\n fs.writeFileSync(yamlPath, yamlContent);\n console.log(chalk.green(`✓ Generated serverless.yml: ${yamlPath}`));\n}\n\nasync function deploy(args: any) {\n const stage = args.stage || process.env.STAGE || 'prod';\n const region = args.region || process.env.AWS_REGION || 'us-east-1';\n // Commander passes option names as camelCase (e.g. skipDeploy, skipInstall)\n const skipDeploy = args.skipDeploy ?? args['skip-deploy'] ?? false;\n const skipInstall = args.skipInstall ?? args['skip-install'] ?? false;\n\n if (skipDeploy) {\n console.log(chalk.yellow('⏭️ Skipping deployment (--skip-deploy flag)'));\n return;\n }\n\n const serverlessDir = path.join(process.cwd(), '.serverless-workers');\n const yamlPath = path.join(serverlessDir, 'serverless.yml');\n\n if (!fs.existsSync(yamlPath)) {\n console.error(chalk.red('❌ serverless.yml not found. Run \"build\" first.'));\n process.exit(1);\n }\n\n console.log(chalk.blue(`🚀 Deploying to AWS (stage: ${stage}, region: ${region})...`));\n validateEnvironment();\n\n try {\n // Install dependencies in the serverless directory if node_modules doesn't exist\n // Skip if --skip-install is provided\n if (!skipInstall && !fs.existsSync(path.join(serverlessDir, 'node_modules'))) {\n console.log(chalk.blue('📦 Installing serverless dependencies...'));\n execSync('npm install', {\n cwd: serverlessDir,\n stdio: 'inherit'\n });\n }\n\n // Check for microfox.json in project root\n const microfoxJsonPath = path.join(process.cwd(), 'microfox.json');\n if (fs.existsSync(microfoxJsonPath)) {\n console.log(chalk.blue('ℹ️ Found microfox.json, deploying via Microfox Cloud...'));\n\n // Copy microfox.json to .serverless-workers directory\n fs.copyFileSync(microfoxJsonPath, path.join(serverlessDir, 'microfox.json'));\n\n // Load and filter environment variables\n const envVars = loadEnvVars();\n // env.json is already generated by build()\n\n execSync('npx microfox@latest push', {\n cwd: serverlessDir,\n stdio: 'inherit'\n });\n console.log(chalk.green('✓ Deployment triggered via Microfox!'));\n // We don't generate workers map for Microfox push as it handles its own routing\n return;\n }\n\n execSync('npx serverless deploy', {\n cwd: serverlessDir,\n stdio: 'inherit',\n env: {\n ...process.env,\n STAGE: stage,\n AWS_REGION: region,\n },\n });\n console.log(chalk.green('✓ Deployment complete!'));\n } catch (error) {\n console.error(chalk.red('❌ Deployment failed'));\n process.exit(1);\n }\n\n await generateWorkersMap(stage, region, serverlessDir);\n}\n\nexport const pushCommand = new Command()\n .name('push')\n .description('Build and deploy background workers to AWS')\n .option('-s, --stage <stage>', 'Deployment stage', 'prod')\n .option('-r, --region <region>', 'AWS region', 'us-east-1')\n .option('--ai-path <path>', 'Path to AI directory containing workers', 'app/ai')\n .option('--service-name <name>', 'Override serverless service name (defaults to ai-router-workers-<stage>)')\n .option('--skip-deploy', 'Skip deployment, only build', false)\n .option('--skip-install', 'Skip npm install in serverless directory', false)\n .action(async (options) => {\n await build(options);\n await deploy(options);\n });\n\n"],"mappings":";;;AAEA,SAAS,WAAAA,gBAAe;;;ACFxB,SAAS,eAAe;AACxB,YAAY,aAAa;AACzB,SAAS,gBAAgB;AACzB,YAAY,QAAQ;AACpB,YAAY,UAAU;AACtB,SAAS,qBAAqB;AAC9B,SAAS,sBAAsB;AAC/B,SAAS,YAAY;AACrB,YAAY,UAAU;AACtB,OAAO,WAAW;AAClB,OAAO,SAAS;AAEhB,IAAM,gBAAgB,IAAI;AAAA,EACxB,eAAe,IAAI,CAAC,MAAO,EAAE,WAAW,OAAO,IAAI,EAAE,MAAM,QAAQ,MAAM,IAAI,CAAE;AACjF;AAEA,SAAS,gBAAgB,WAA4B;AACnD,QAAM,IAAI,UAAU,WAAW,OAAO,IAClC,UAAU,MAAM,QAAQ,MAAM,IAC9B;AACJ,SAAO,cAAc,IAAI,CAAC;AAC5B;AAEA,SAAS,4BAA4B,WAA2B;AAE9D,MAAI,UAAU,WAAW,GAAG,GAAG;AAC7B,UAAM,CAAC,OAAO,IAAI,IAAI,UAAU,MAAM,GAAG;AACzC,WAAO,OAAO,GAAG,KAAK,IAAI,IAAI,KAAK;AAAA,EACrC;AAEA,SAAO,UAAU,MAAM,GAAG,EAAE,CAAC;AAC/B;AAEA,SAAS,sBAAsB,UAAkB,WAAkC;AACjF,QAAM,UAAe,aAAQ,QAAQ;AACrC,QAAM,MAAW,aAAQ,SAAS,SAAS;AAG3C,QAAM,aAAa;AAAA,IACjB;AAAA,IACA,GAAG,GAAG;AAAA,IACN,GAAG,GAAG;AAAA,IACN,GAAG,GAAG;AAAA,IACN,GAAG,GAAG;AAAA,IACN,GAAG,GAAG;AAAA,EACR;AACA,aAAW,KAAK,YAAY;AAC1B,QAAO,cAAW,CAAC,KAAQ,YAAS,CAAC,EAAE,OAAO,EAAG,QAAO;AAAA,EAC1D;AAGA,MAAO,cAAW,GAAG,KAAQ,YAAS,GAAG,EAAE,YAAY,GAAG;AACxD,UAAM,gBAAgB;AAAA,MACf,UAAK,KAAK,UAAU;AAAA,MACpB,UAAK,KAAK,WAAW;AAAA,MACrB,UAAK,KAAK,UAAU;AAAA,MACpB,UAAK,KAAK,WAAW;AAAA,MACrB,UAAK,KAAK,WAAW;AAAA,IAC5B;AACA,eAAW,KAAK,eAAe;AAC7B,UAAO,cAAW,CAAC,KAAQ,YAAS,CAAC,EAAE,OAAO,EAAG,QAAO;AAAA,IAC1D;AAAA,EACF;AAEA,SAAO;AACT;AAEA,SAAS,wBAAwB,QAA0B;AACzD,QAAM,QAAkB,CAAC;AAIzB,QAAM,MACJ;AACF,aAAW,SAAS,OAAO,SAAS,GAAG,GAAG;AACxC,QAAI,MAAM,CAAC,EAAG,OAAM,KAAK,MAAM,CAAC,CAAC;AAAA,EACnC;AAGA,QAAM,MAAM;AACZ,aAAW,SAAS,OAAO,SAAS,GAAG,GAAG;AACxC,QAAI,MAAM,CAAC,EAAG,OAAM,KAAK,MAAM,CAAC,CAAC;AAAA,EACnC;AAGA,QAAM,MAAM;AACZ,aAAW,SAAS,OAAO,SAAS,GAAG,GAAG;AACxC,QAAI,MAAM,CAAC,EAAG,OAAM,KAAK,MAAM,CAAC,CAAC;AAAA,EACnC;AAEA,SAAO;AACT;AAEA,SAAS,6BAA6B,QAGpC;AACA,QAAM,cAAc,oBAAI,IAAY;AACpC,QAAM,gBAAgB,oBAAI,IAAY;AAGtC,QAAM,eAAe;AACrB,aAAW,SAAS,OAAO,SAAS,YAAY,GAAG;AACjD,UAAM,MAAM,MAAM,CAAC;AACnB,QAAI,IAAK,aAAY,IAAI,GAAG;AAAA,EAC9B;AAGA,QAAM,mBAAmB;AACzB,aAAW,SAAS,OAAO,SAAS,gBAAgB,GAAG;AACrD,UAAM,MAAM,MAAM,CAAC;AACnB,QAAI,IAAK,aAAY,IAAI,GAAG;AAAA,EAC9B;AAGA,QAAM,kBAAkB;AACxB,aAAW,SAAS,OAAO,SAAS,eAAe,GAAG;AACpD,UAAM,MAAM,MAAM,CAAC;AACnB,QAAI,IAAK,eAAc,IAAI,GAAG;AAAA,EAChC;AAGA,QAAM,sBAAsB;AAC5B,aAAW,SAAS,OAAO,SAAS,mBAAmB,GAAG;AACxD,UAAM,MAAM,MAAM,CAAC;AACnB,QAAI,IAAK,eAAc,IAAI,GAAG;AAAA,EAChC;AAEA,SAAO,EAAE,aAAa,cAAc;AACtC;AAEA,eAAe,0BACb,kBACA,aACmE;AACnE,OAAK;AAEL,QAAM,cAAc,oBAAI,IAAY;AACpC,QAAM,gBAAgB,oBAAI,IAAY;AAEtC,QAAM,UAAU,oBAAI,IAAY;AAChC,QAAM,QAAkB,CAAC,GAAG,gBAAgB;AAE5C,SAAO,MAAM,SAAS,GAAG;AACvB,UAAM,OAAO,MAAM,IAAI;AACvB,UAAM,aAAkB,aAAQ,IAAI;AACpC,QAAI,QAAQ,IAAI,UAAU,EAAG;AAC7B,YAAQ,IAAI,UAAU;AAEtB,QAAI,CAAI,cAAW,UAAU,KAAK,CAAI,YAAS,UAAU,EAAE,OAAO,EAAG;AACrE,UAAM,MAAS,gBAAa,YAAY,OAAO;AAE/C,UAAM,QAAQ,6BAA6B,GAAG;AAC9C,UAAM,YAAY,QAAQ,CAAC,MAAM,YAAY,IAAI,CAAC,CAAC;AACnD,UAAM,cAAc,QAAQ,CAAC,MAAM,cAAc,IAAI,CAAC,CAAC;AAEvD,UAAM,aAAa,wBAAwB,GAAG;AAC9C,eAAW,QAAQ,YAAY;AAC7B,UAAI,CAAC,KAAM;AACX,UAAI,KAAK,WAAW,GAAG,GAAG;AACxB,cAAM,WAAW,sBAAsB,YAAY,IAAI;AACvD,YAAI,SAAU,OAAM,KAAK,QAAQ;AACjC;AAAA,MACF;AAGA,UAAI,KAAK,WAAW,GAAG,EAAG;AAC1B,UAAI,gBAAgB,IAAI,EAAG;AAAA,IAE7B;AAAA,EACF;AAEA,cAAY,OAAO,EAAE;AACrB,gBAAc,OAAO,EAAE;AACvB,cAAY,OAAO,MAAM;AACzB,gBAAc,OAAO,MAAM;AAE3B,SAAO,EAAE,aAAa,cAAc;AACtC;AAEA,SAAS,aAAsB,UAA4B;AACzD,MAAI;AACF,WAAO,KAAK,MAAS,gBAAa,UAAU,OAAO,CAAC;AAAA,EACtD,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAEA,SAAS,iBAAiB,UAA0B;AAClD,MAAI,MAAW,aAAQ,QAAQ;AAE/B,SAAO,MAAM;AACX,UAAM,UAAe,UAAK,KAAK,cAAc;AAC7C,QAAO,cAAW,OAAO,GAAG;AAC1B,YAAM,MAAM,aAAkB,OAAO;AACrC,UAAI,KAAK,WAAY,QAAO;AAAA,IAC9B;AAEA,UAAM,SAAc,aAAQ,GAAG;AAC/B,QAAI,WAAW,IAAK,QAAO;AAC3B,UAAM;AAAA,EACR;AACF;AAEA,eAAe,qCACb,kBACA,aACsB;AAGtB,QAAM,OAAO,oBAAI,IAAY,CAAC,uBAAuB,qBAAqB,CAAC;AAC3E,QAAM,UAAU,oBAAI,IAAY;AAChC,QAAM,QAAkB,CAAC,GAAG,gBAAgB;AAE5C,SAAO,MAAM,SAAS,GAAG;AACvB,UAAM,OAAO,MAAM,IAAI;AACvB,UAAM,aAAkB,aAAQ,IAAI;AACpC,QAAI,QAAQ,IAAI,UAAU,EAAG;AAC7B,YAAQ,IAAI,UAAU;AAEtB,QAAI,CAAI,cAAW,UAAU,KAAK,CAAI,YAAS,UAAU,EAAE,OAAO,EAAG;AACrE,UAAM,MAAS,gBAAa,YAAY,OAAO;AAC/C,UAAM,aAAa,wBAAwB,GAAG;AAE9C,eAAW,QAAQ,YAAY;AAC7B,UAAI,CAAC,KAAM;AACX,UAAI,KAAK,WAAW,GAAG,GAAG;AACxB,cAAM,WAAW,sBAAsB,YAAY,IAAI;AACvD,YAAI,SAAU,OAAM,KAAK,QAAQ;AACjC;AAAA,MACF;AAGA,UAAI,KAAK,WAAW,GAAG,EAAG;AAC1B,UAAI,gBAAgB,IAAI,EAAG;AAE3B,WAAK,IAAI,4BAA4B,IAAI,CAAC;AAAA,IAC5C;AAAA,EACF;AAGA,OAAK,OAAO,EAAE;AACd,OAAK,OAAO,MAAM;AAGlB,OAAK,OAAO,YAAY;AACxB,OAAK,OAAO,oBAAoB;AAChC,OAAK,OAAO,qBAAqB;AACjC,OAAK,OAAO,qBAAqB;AACjC,SAAO;AACT;AAEA,SAAS,qBAAqB,aAAqB,MAA2C;AAC5F,QAAM,aACJ,aAAuB,UAAK,aAAa,cAAc,CAAC,KAAK,CAAC;AAChE,QAAM,cAAsC,WAAW,gBAAgB,CAAC;AACxE,QAAM,iBAAyC,WAAW,mBAAmB,CAAC;AAG9E,QAAM,WAAW,iBAAiB,WAAW;AAC7C,QAAM,YACJ,aAAuB,UAAK,UAAU,YAAY,aAAa,cAAc,CAAC,KAC9E,CAAC;AACH,QAAM,eACJ;AAAA,IACO,UAAK,UAAU,YAAY,iBAAiB,cAAc;AAAA,EACjE,KAAK,CAAC;AAER,QAAM,gBAAwC;AAAA,IAC5C,GAAI,UAAU,gBAAgB,CAAC;AAAA,IAC/B,GAAI,UAAU,mBAAmB,CAAC;AAAA,IAClC,GAAI,aAAa,gBAAgB,CAAC;AAAA,IAClC,GAAI,aAAa,mBAAmB,CAAC;AAAA,EACvC;AAEA,QAAM,MAA8B,CAAC;AACrC,aAAW,OAAO,MAAM,KAAK,IAAI,EAAE,KAAK,GAAG;AACzC,UAAM,QACJ,YAAY,GAAG,KACf,eAAe,GAAG,KAClB,cAAc,GAAG;AAGnB,QAAI,OAAO;AACT,UAAI,GAAG,IAAI,OAAO,KAAK;AAAA,IACzB;AAAA,EACF;AAEA,SAAO;AACT;AAuDO,SAAS,4BAA4B,WAA2B;AACrE,QAAM,mBAAmB,UAAU,QAAQ,MAAM,EAAE,EAAE,MAAM,GAAG,EAAE;AAChE,SAAO,KAAK,gBAAgB;AAC9B;AAKA,SAAS,sBAA4B;AAGnC,MAAI;AACF,aAAS,iBAAiB,EAAE,OAAO,SAAS,CAAC;AAAA,EAC/C,SAAS,OAAO;AACd,YAAQ,MAAM,MAAM,IAAI,6CAAwC,CAAC;AACjE,YAAQ,KAAK,CAAC;AAAA,EAChB;AACF;AAKA,eAAe,YAAY,SAAiB,UAAiC;AAC3E,QAAM,UAAe,UAAK,QAAQ,gBAAgB,EAAE,QAAQ,OAAO,GAAG;AACtE,QAAM,QAAQ,MAAM,KAAK,OAAO;AAEhC,QAAM,UAAwB,CAAC;AAE/B,aAAW,YAAY,OAAO;AAC5B,QAAI;AAGF,UAAI;AACJ,UAAI;AAMJ,UAAI,CAAC,UAAU;AACb,cAAM,UAAa,gBAAa,UAAU,OAAO;AAGjD,cAAM,UAAU,QAAQ,MAAM,qEAAqE;AACnG,YAAI,CAAC,SAAS;AACZ,kBAAQ,KAAK,MAAM,OAAO,0BAAgB,QAAQ,sBAAsB,CAAC;AACzE;AAAA,QACF;AACA,mBAAW,QAAQ,CAAC;AAAA,MACtB;AAIA,YAAM,eAAoB,cAAS,QAAQ,QAAQ;AACnD,YAAM,aAAkB,aAAQ,YAAY;AAC5C,YAAM,cAAmB,cAAS,cAAc,YAAY;AAC5D,YAAM,cAAmB,UAAK,YAAY,YAAY,GAAG,WAAW,EAAE,EAAE,QAAQ,OAAO,GAAG;AAE1F,cAAQ,KAAK;AAAA,QACX,IAAI;AAAA,QACJ;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AAAA,IACH,SAAS,OAAO;AACd,cAAQ,MAAM,MAAM,IAAI,2BAAsB,QAAQ,GAAG,GAAG,KAAK;AAAA,IACnE;AAAA,EACF;AAEA,SAAO;AACT;AAKA,eAAe,iBAAiB,SAAuB,WAAkC;AACvF,QAAM,cAAmB,UAAK,WAAW,UAAU;AAGnD,MAAO,cAAW,WAAW,GAAG;AAC9B,IAAG,UAAO,aAAa,EAAE,WAAW,MAAM,OAAO,KAAK,CAAC;AAAA,EACzD;AACA,EAAG,aAAU,aAAa,EAAE,WAAW,KAAK,CAAC;AAE7C,aAAW,UAAU,SAAS;AAG5B,UAAM,cAAmB,UAAK,aAAa,OAAO,YAAY,QAAQ,aAAa,EAAE,IAAI,KAAK;AAC9F,UAAM,aAAkB,aAAQ,WAAW;AAE3C,QAAI,CAAI,cAAW,UAAU,GAAG;AAC9B,MAAG,aAAU,YAAY,EAAE,WAAW,KAAK,CAAC;AAAA,IAC9C;AASA,UAAM,iBAAsB,aAAQ,WAAW;AAC/C,UAAM,gBAAqB,aAAQ,OAAO,QAAQ;AAGlD,QAAI,qBAA0B,cAAc,aAAQ,cAAc,GAAG,aAAa;AAGlF,QAAI,CAAC,mBAAmB,WAAW,GAAG,GAAG;AACvC,2BAAqB,OAAO;AAAA,IAC9B;AAGA,yBAAqB,mBAAmB,QAAQ,SAAS,EAAE;AAE3D,yBAAqB,mBAAmB,MAAW,QAAG,EAAE,KAAK,GAAG;AAGhE,UAAM,cAAiB,gBAAa,OAAO,UAAU,OAAO;AAC5D,UAAM,gBAAgB,kCAAkC,KAAK,WAAW;AACxE,UAAM,cAAc,YAAY,MAAM,iDAAiD;AACvF,UAAM,aAAa,cAAc,YAAY,CAAC,IAAI;AAGlD,UAAM,gBAAgB,YAAY,QAAQ,OAAO,UAAU;AAE3D,UAAM,YAAY,gBACd,yBACA,gBAAgB,UAAU;AAG9B,UAAM,mBAAmB;AAAA;AAAA,iCAEI,kBAAkB;AAAA;AAAA,sBAE7B,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAQ3B,IAAG,iBAAc,eAAe,gBAAgB;AAGhD,QAAI;AAGF,YAAM,qBAAqC;AAAA,QACzC,MAAM;AAAA,QACN,MAAMC,QAAO;AACX,UAAAA,OAAM,MAAM,OAAO,WAAW;AAC5B,gBAAI,OAAO,OAAO,SAAS,EAAG;AAG9B,gBAAI,cAAiB,gBAAa,aAAa,OAAO;AACtD,gBAAI,WAAW;AAMf,kBAAM,UAAU;AAEhB,gBAAI,QAAQ,KAAK,WAAW,GAAG;AAC7B,4BAAc,YAAY;AAAA,gBACxB;AAAA,gBACA;AAAA,cACF;AACA,yBAAW;AAAA,YACb;AAIA,gBAAI,YAAY,SAAS,iBAAiB,GAAG;AAC3C,4BAAc,YAAY;AAAA,gBACxB;AAAA,gBACA;AAAA,cACF;AACA,yBAAW;AAAA,YACb;AAGA,kBAAM,sBAAsB;AAC5B,0BAAc,YAAY;AAAA,cACxB;AAAA,cACA;AAAA,YACF;AACA,gBAAI,gBAAgB,oBAAqB,YAAW;AAEpD,gBAAI,UAAU;AACZ,cAAG,iBAAc,aAAa,aAAa,OAAO;AAAA,YACpD;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF;AAEA,YAAc,cAAM;AAAA,QAClB,aAAa,CAAC,aAAa;AAAA,QAC3B,QAAQ;AAAA,QACR,UAAU;AAAA,QACV,QAAQ;AAAA,QACR,QAAQ;AAAA,QACR,SAAS;AAAA;AAAA;AAAA,QAGT,UAAU;AAAA,UACR;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACF;AAAA;AAAA;AAAA,QAGA,QAAQ;AAAA,UACN,sBAAsB;AAAA,QACxB;AAAA;AAAA;AAAA,QAGA,UAAU;AAAA,QACV,SAAS,CAAC,kBAAkB;AAAA,QAC5B,UAAU;AAAA,MACZ,CAAC;AAGD,MAAG,cAAW,aAAa;AAAA,IAE7B,SAAS,OAAO;AACd,cAAQ,MAAM,MAAM,IAAI,8BAA8B,OAAO,EAAE,GAAG,GAAG,KAAK;AAAA,IAE5E;AAAA,EACF;AACA,UAAQ,IAAI,MAAM,MAAM,oBAAe,QAAQ,MAAM,mBAAmB,CAAC;AAC3E;AAEA,SAAS,oBAAoB,WAAmB,aAAqB,OAAe,QAAsB;AACxG,QAAM,cAAmB,UAAK,WAAW,YAAY,SAAS;AAC9D,QAAM,gBAAgB,YAAY,QAAQ,OAAO,UAAU;AAC3D,QAAM,aAAkB,aAAQ,WAAW;AAE3C,MAAI,CAAI,cAAW,UAAU,GAAG;AAC9B,IAAG,aAAU,YAAY,EAAE,WAAW,KAAK,CAAC;AAAA,EAC9C;AAEA,QAAM,iBAAiB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gCAuBO,MAAM;AAAA,+BACP,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sBAoJd,WAAW;AAAA,gBACjB,KAAK;AAAA,iBACJ,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAerB,EAAG,iBAAc,eAAe,cAAc;AAG9C,EAAQ,kBAAU;AAAA,IAChB,aAAa,CAAC,aAAa;AAAA,IAC3B,QAAQ;AAAA,IACR,UAAU;AAAA,IACV,QAAQ;AAAA,IACR,SAAS;AAAA,IACT,UAAU;AAAA,MACR;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,IACA,QAAQ;AAAA,MACN,sBAAsB;AAAA,IACxB;AAAA,IACA,UAAU;AAAA,EACZ,CAAC;AAED,EAAG,cAAW,aAAa;AAC3B,UAAQ,IAAI,MAAM,MAAM,oCAA+B,CAAC;AAC1D;AAEA,SAAS,uBAAuB,WAAmB,aAA2B;AAC5E,QAAM,cAAmB,UAAK,WAAW,YAAY,oBAAoB;AACzE,QAAM,gBAAgB,YAAY,QAAQ,OAAO,UAAU;AAC3D,QAAM,aAAkB,aAAQ,WAAW;AAE3C,MAAI,CAAI,cAAW,UAAU,GAAG;AAC9B,IAAG,aAAU,YAAY,EAAE,WAAW,KAAK,CAAC;AAAA,EAC9C;AAEA,QAAM,iBAAiB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,uBAQF,KAAK,UAAU,WAAW,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AA0FhD,EAAG,iBAAc,eAAe,cAAc;AAE9C,EAAQ,kBAAU;AAAA,IAChB,aAAa,CAAC,aAAa;AAAA,IAC3B,QAAQ;AAAA,IACR,UAAU;AAAA,IACV,QAAQ;AAAA,IACR,SAAS;AAAA,IACT,UAAU;AAAA,MACR;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,IACA,QAAQ;AAAA,MACN,sBAAsB;AAAA,IACxB;AAAA,IACA,UAAU;AAAA,IACV,UAAU;AAAA,EACZ,CAAC;AAED,EAAG,cAAW,aAAa;AAC3B,UAAQ,IAAI,MAAM,MAAM,2CAAsC,CAAC;AACjE;AAKA,SAAS,6BACP,WACA,SACA,aACM;AAEN,QAAM,cAAmB,UAAK,WAAW,YAAY,mBAAmB;AACxE,QAAM,gBAAgB,YAAY,QAAQ,OAAO,UAAU;AAC3D,QAAM,aAAkB,aAAQ,WAAW;AAG3C,MAAO,cAAW,UAAU,KAAK,CAAI,cAAW,WAAW,GAAG;AAAA,EAE9D,WAAW,CAAI,cAAW,UAAU,GAAG;AACrC,IAAG,aAAU,YAAY,EAAE,WAAW,KAAK,CAAC;AAAA,EAC9C;AAEA,QAAM,iBAAiB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,+BASM,KAAK,UAAU,QAAQ,IAAI,OAAK,EAAE,EAAE,GAAG,MAAM,CAAC,CAAC;AAAA,uBACvD,KAAK,UAAU,WAAW,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AA4EhD,EAAG,iBAAc,eAAe,cAAc;AAG9C,EAAQ,kBAAU;AAAA,IAChB,aAAa,CAAC,aAAa;AAAA,IAC3B,QAAQ;AAAA,IACR,UAAU;AAAA,IACV,QAAQ;AAAA,IACR,SAAS;AAAA,IACT,UAAU;AAAA,MACR;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,IACA,QAAQ;AAAA,MACN,sBAAsB;AAAA,IACxB;AAAA,IACA,UAAU;AAAA,EACZ,CAAC;AAED,EAAG,cAAW,aAAa;AAC3B,UAAQ,IAAI,MAAM,MAAM,yCAAoC,CAAC;AAC/D;AAKA,SAAS,YAAY,UAAkB,QAAgC;AACrE,QAAM,MAA8B,CAAC;AAErC,MAAI,CAAI,cAAW,OAAO,GAAG;AAC3B,YAAQ,KAAK,MAAM,OAAO,wCAA8B,OAAO,EAAE,CAAC;AAClE,WAAO;AAAA,EACT;AAEA,QAAM,UAAa,gBAAa,SAAS,OAAO;AAChD,QAAM,QAAQ,QAAQ,MAAM,IAAI;AAEhC,aAAW,QAAQ,OAAO;AACxB,UAAM,UAAU,KAAK,KAAK;AAC1B,QAAI,CAAC,WAAW,QAAQ,WAAW,GAAG,EAAG;AAEzC,UAAM,QAAQ,QAAQ,MAAM,gBAAgB;AAC5C,QAAI,OAAO;AACT,YAAM,MAAM,MAAM,CAAC,EAAE,KAAK;AAC1B,YAAM,QAAQ,MAAM,CAAC,EAAE,KAAK,EAAE,QAAQ,gBAAgB,EAAE;AACxD,UAAI,GAAG,IAAI;AAAA,IACb;AAAA,EACF;AAEA,SAAO;AACT;AAMA,SAAS,sBAAsB,gBAA4B;AACzD,MAAI,CAAC,gBAAgB;AACnB,WAAO,CAAC;AAAA,EACV;AAEA,QAAM,SAAgB,CAAC;AAGvB,QAAM,YAAY,MAAM,QAAQ,cAAc,IAAI,iBAAiB,CAAC,cAAc;AAElF,aAAW,YAAY,WAAW;AAEhC,QAAI,OAAO,aAAa,UAAU;AAChC,aAAO,KAAK;AAAA,QACV;AAAA,MACF,CAAC;AACD;AAAA,IACF;AAGA,QAAI,OAAO,aAAa,YAAY,aAAa,MAAM;AACrD,YAAM,gBAAqB,EAAE,UAAU,CAAC,EAAE;AAG1C,UAAI,SAAS,MAAM;AACjB,YAAI,MAAM,QAAQ,SAAS,IAAI,GAAG;AAEhC,wBAAc,SAAS,OAAO,SAAS;AAAA,QACzC,OAAO;AAEL,wBAAc,SAAS,OAAO,SAAS;AAAA,QACzC;AAAA,MACF,OAAO;AAEL;AAAA,MACF;AAGA,UAAI,SAAS,YAAY,QAAW;AAClC,sBAAc,SAAS,UAAU,SAAS;AAAA,MAC5C;AACA,UAAI,SAAS,UAAU,QAAW;AAChC,sBAAc,SAAS,QAAQ,SAAS;AAAA,MAC1C;AACA,UAAI,SAAS,cAAc,QAAW;AACpC,sBAAc,SAAS,YAAY,SAAS;AAAA,MAC9C;AACA,UAAI,SAAS,qBAAqB,QAAW;AAC3C,sBAAc,SAAS,mBAAmB,SAAS;AAAA,MACrD;AACA,UAAI,SAAS,SAAS,QAAW;AAC/B,sBAAc,SAAS,OAAO,SAAS;AAAA,MACzC;AACA,UAAI,SAAS,gBAAgB,QAAW;AACtC,sBAAc,SAAS,cAAc,SAAS;AAAA,MAChD;AACA,UAAI,SAAS,WAAW,QAAW;AACjC,sBAAc,SAAS,SAAS,SAAS;AAAA,MAC3C;AACA,UAAI,SAAS,aAAa,QAAW;AACnC,sBAAc,SAAS,WAAW,SAAS;AAAA,MAC7C;AAIA,UAAI,OAAO,KAAK,cAAc,QAAQ,EAAE,WAAW,KAAK,cAAc,SAAS,MAAM;AAEnF,YAAI,OAAO,cAAc,SAAS,SAAS,UAAU;AACnD,iBAAO,KAAK;AAAA,YACV,UAAU,cAAc,SAAS;AAAA,UACnC,CAAC;AAAA,QACH,OAAO;AAEL,iBAAO,KAAK,aAAa;AAAA,QAC3B;AAAA,MACF,OAAO;AACL,eAAO,KAAK,aAAa;AAAA,MAC3B;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAKA,SAAS,yBACP,SACA,OACA,QACA,SACA,aACkB;AAElB,QAAM,YAA2C;AAAA,IAC/C,WAAW,CAAC;AAAA,IACZ,SAAS,CAAC;AAAA,EACZ;AAEA,QAAM,YAAiD,CAAC;AAGxD,QAAM,sBAA2B;AAAA,IAC/B,OAAO;AAAA,IACP,UAAU;AAAA,EACZ;AAGA,QAAM,eAAoC;AAAA,IACxC,OAAO,wBAAwB,KAAK;AAAA,IACpC,sBAAsB;AAAA,MACpB,UAAU;AAAA,MACV,YAAY;AAAA,MACZ,mBAAmB;AAAA,MACnB,kBAAkB;AAAA,MAClB,oBAAoB;AAAA,MACpB,YAAY;AAAA,MACZ,iBAAiB;AAAA,MACjB,yBAAyB;AAAA,MACzB,WAAW;AAAA,MACX,aAAa;AAAA,IACf;AAAA,EACF;AAEA,aAAW,UAAU,SAAS;AAC5B,UAAM,YAAY,cAAc,OAAO,GAAG,QAAQ,iBAAiB,EAAE,CAAC;AACtE,UAAM,iBAAiB,GAAG,SAAS,GAAG,KAAK;AAC3C,UAAM,eAAe,GAAG,SAAS,MAAM,KAAK;AAE5C,UAAM,SAAS,OAAO,cAAc;AACpC,UAAM,YACJ,OAAO,QAAQ,2BAA2B,WACtC,OAAO,yBACP;AACN,UAAM,eACJ,OAAO,QAAQ,qCAAqC,WAChD,OAAO,mCACP;AACN,UAAM,oBACJ,OAAO,QAAQ,sBAAsB,WACjC,OAAO,qBACN,OAAO,cAAc,WAAW,OAAO;AAC9C,UAAM,qBACJ,OAAO,QAAQ,oBAAoB,WAAW,OAAO,kBAAkB;AAEzE,UAAM,kBAAkB,KAAK,IAAI,GAAG,KAAK,MAAM,kBAAkB,CAAC;AAGlE,cAAU,UAAU,YAAY,IAAI;AAAA,MAClC,MAAM;AAAA,MACN,YAAY;AAAA,QACV,WAAW,oBAAoB,OAAO,EAAE,wCAAwC,KAAK;AAAA,QACrF,wBAAwB;AAAA,MAC1B;AAAA,IACF;AAEA,cAAU,UAAU,cAAc,IAAI;AAAA,MACpC,MAAM;AAAA,MACN,YAAY;AAAA;AAAA,QAEV,WAAW,oBAAoB,OAAO,EAAE,oCAAoC,KAAK;AAAA,QACjF,mBAAmB;AAAA,QACnB,wBAAwB;AAAA,QACxB,eAAe;AAAA,UACb,qBAAqB,EAAE,cAAc,CAAC,cAAc,KAAK,EAAE;AAAA,UAC3D;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAEA,cAAU,QAAQ,GAAG,cAAc,KAAK,IAAI;AAAA,MAC1C,aAAa,wBAAwB,OAAO,EAAE;AAAA,MAC9C,OAAO,EAAE,KAAK,eAAe;AAAA,MAC7B,QAAQ;AAAA,QACN,MAAM,oBAAoB,OAAO,EAAE;AAAA,MACrC;AAAA,IACF;AAEA,cAAU,KAAK,EAAE,cAAc,CAAC,gBAAgB,KAAK,EAAE,CAAC;AAAA,EAC1D;AAGA,QAAM,YAAiC,CAAC;AAExC,aAAW,UAAU,SAAS;AAC5B,UAAM,eAAe,SAAS,OAAO,GAAG,QAAQ,iBAAiB,EAAE,CAAC;AAGpE,UAAM,SAAgB;AAAA,MACpB;AAAA,QACE,KAAK;AAAA,UACH,KAAK,EAAE,cAAc,CAAC,cAAc,OAAO,GAAG,QAAQ,iBAAiB,EAAE,CAAC,GAAG,KAAK,IAAI,KAAK,EAAE;AAAA,UAC7F,WAAW;AAAA,QACb;AAAA,MACF;AAAA,IACF;AAGA,QAAI,OAAO,cAAc,UAAU;AACjC,YAAM,iBAAiB,sBAAsB,OAAO,aAAa,QAAQ;AACzE,aAAO,KAAK,GAAG,cAAc;AAAA,IAC/B;AAEA,cAAU,YAAY,IAAI;AAAA;AAAA,MAExB,SAAS,GAAG,OAAO,WAAW;AAAA,MAC9B,SAAS,OAAO,cAAc,WAAW;AAAA,MACzC,YAAY,OAAO,cAAc,cAAc;AAAA,MAC/C;AAAA,IACF;AAEA,QAAI,OAAO,cAAc,QAAQ,QAAQ;AACvC,gBAAU,YAAY,EAAE,SAAS,OAAO,aAAa;AAAA,IACvD;AAAA,EACF;AAGA,YAAU,SAAS,IAAI;AAAA,IACrB,SAAS;AAAA,IACT,QAAQ;AAAA,MACN;AAAA,QACE,MAAM;AAAA,UACJ,MAAM;AAAA,UACN,QAAQ;AAAA,UACR,MAAM;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAGA,YAAU,eAAe,IAAI;AAAA,IAC3B,SAAS;AAAA,IACT,QAAQ;AAAA,MACN;AAAA,QACE,MAAM;AAAA,UACJ,MAAM;AAAA,UACN,QAAQ;AAAA,UACR,MAAM;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAGA,YAAU,eAAe,IAAI;AAAA,IAC3B,SAAS;AAAA,IACT,QAAQ;AAAA,MACN;AAAA,QACE,MAAM;AAAA,UACJ,MAAM;AAAA,UACN,QAAQ;AAAA,UACR,MAAM;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAGA,QAAM,cAAsC,CAAC;AAC7C,QAAM,kBAAkB,CAAC,WAAW,cAAc,aAAa,YAAY,UAAU,YAAY,WAAW;AAK5G,aAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,OAAO,GAAG;AAClD,QAAI,gBAAgB,KAAK,YAAU,IAAI,WAAW,MAAM,CAAC,GAAG;AAC1D,kBAAY,GAAG,IAAI;AAAA,IACrB;AAAA,EACF;AAGA,YAAU,QAAQ,cAAc,IAAI;AAAA,IAClC,aAAa;AAAA,IACb,OAAO;AAAA,MACL,YAAY;AAAA,QACV;AAAA,QACA;AAAA,UACE;AAAA,UACA,EAAE,OAAO,oBAAoB;AAAA,UAC7B;AAAA,UACA,EAAE,OAAO,cAAc;AAAA,UACvB,uCAAuC,KAAK;AAAA,QAC9C;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AAAA,IACL,SAAS;AAAA,IACT,SAAS;AAAA,MACP,wBAAwB;AAAA,MACxB,cAAc;AAAA;AAAA,MAEd,UAAU;AAAA,QACR;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAAA,IACF;AAAA,IACA,QAAQ;AAAA,IACR,UAAU;AAAA,MACR,MAAM;AAAA,MACN,SAAS;AAAA,MACT;AAAA,MACA,kBAAkB;AAAA;AAAA,MAElB,OAAO,wBAAwB,KAAK;AAAA,MACpC,aAAa;AAAA,MACb,KAAK;AAAA,QACH,MAAM;AAAA,UACJ,YAAY;AAAA,YACV;AAAA,cACE,QAAQ;AAAA,cACR,QAAQ;AAAA,gBACN;AAAA,gBACA;AAAA,gBACA;AAAA,gBACA;AAAA,cACF;AAAA,cACA,UAAU;AAAA,YACZ;AAAA,YACA;AAAA,cACE,QAAQ;AAAA,cACR,QAAQ,CAAC,iBAAiB;AAAA;AAAA,cAE1B,UAAU;AAAA,YACZ;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,IACA,SAAS,CAAC,oBAAoB;AAAA,IAC9B;AAAA,IACA;AAAA,EACF;AACF;AAKA,eAAe,mBACb,OACA,QACA,WACe;AACf,QAAM,gBAAqB,UAAK,WAAW,aAAa;AACxD,MAAI,CAAI,cAAW,aAAa,GAAG;AACjC,IAAG,aAAU,eAAe,EAAE,WAAW,KAAK,CAAC;AAAA,EACjD;AAKA,QAAM,UAAU,MAAM,YAAY;AAGlC,QAAM,YAAY,qBAAqB,KAAK,IAAI,KAAK;AACrD,MAAI,YAAkE,CAAC;AAEvE,QAAM,UAAU,IAAI,oCAAoC,EAAE,MAAM;AAEhE,MAAI;AAEF,UAAM,SAAS;AAAA,MACb,mDAAmD,SAAS,aAAa,MAAM;AAAA,MAC/E,EAAE,UAAU,SAAS,OAAO,OAAO;AAAA,IACrC;AAEA,UAAM,UAAU,KAAK,MAAM,MAAM;AACjC,UAAM,YAAoC,CAAC;AAE3C,eAAWC,WAAU,SAAS;AAC5B,YAAM,MAAMA,QAAO;AACnB,UAAI,OAAO,IAAI,SAAS,KAAK,GAAG;AAC9B,cAAM,WAAW,IAAI,QAAQ,eAAe,EAAE,EAAE,QAAQ,OAAO,EAAE,EAAE,YAAY;AAG/E,kBAAU,GAAG,IAAIA,QAAO;AAAA,MAC1B;AAAA,IACF;AAGA,eAAW,UAAU,SAAS;AAC5B,YAAM,cAAc,OAAO,GAAG,QAAQ,iBAAiB,EAAE;AACzD,YAAM,WAAW,cAAc,WAAW,GAAG,KAAK;AAGlD,YAAM,cAAc,OAAO,KAAK,SAAS,EAAE,KAAK,OAAK,EAAE,YAAY,MAAM,SAAS,YAAY,CAAC;AAE/F,UAAI,eAAe,UAAU,WAAW,GAAG;AACzC,kBAAU,OAAO,EAAE,IAAI;AAAA,UACrB,UAAU,UAAU,WAAW;AAAA,UAC/B;AAAA,QACF;AAAA,MACF;AAAA,IACF;AACA,YAAQ,QAAQ,gCAAgC;AAAA,EAClD,SAAS,OAAO;AACd,YAAQ,KAAK,yEAAyE;AACtF,eAAW,UAAU,SAAS;AAC5B,gBAAU,OAAO,EAAE,IAAI;AAAA,QACrB,UAAU,eAAe,eAAe,kBAAkB,kBAAkB,IAAI,iBAAiB,IAAI,OAAO,EAAE,IAAI,KAAK;AAAA,QACvH;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAGA,QAAM,aAAa;AAAA;AAAA;AAAA;AAAA;AAAA,4BAKO,KAAK,UAAU,WAAW,MAAM,CAAC,CAAC;AAAA;AAG5D,QAAM,UAAe,UAAK,eAAe,0BAA0B;AACnE,EAAG,iBAAc,SAAS,UAAU;AACpC,UAAQ,IAAI,MAAM,MAAM,iCAA4B,OAAO,EAAE,CAAC;AAChE;AAEA,eAAeD,OAAM,MAAW;AAC9B,QAAM,QAAQ,KAAK,SAAS,QAAQ,IAAI,SAAS;AACjD,QAAM,SAAS,KAAK,UAAU,QAAQ,IAAI,cAAc;AACxD,QAAM,SAAS,KAAK,SAAS,KAAK;AAElC,UAAQ,IAAI,MAAM,KAAK,sCAA+B,KAAK,aAAa,MAAM,MAAM,CAAC;AAErF,QAAM,UAAU,IAAI,qBAAqB,EAAE,MAAM;AACjD,QAAM,UAAU,MAAM,YAAY,MAAM;AAExC,MAAI,QAAQ,WAAW,GAAG;AACxB,YAAQ,KAAK,mBAAmB;AAChC;AAAA,EACF;AACA,UAAQ,QAAQ,SAAS,QAAQ,MAAM,YAAY;AACnD,UAAQ,QAAQ,OAAK,QAAQ,IAAI,MAAM,KAAK,OAAO,EAAE,EAAE,KAAK,EAAE,QAAQ,GAAG,CAAC,CAAC;AAE3E,QAAM,gBAAqB,UAAK,QAAQ,IAAI,GAAG,qBAAqB;AACpE,MAAI,CAAI,cAAW,aAAa,GAAG;AACjC,IAAG,aAAU,eAAe,EAAE,WAAW,KAAK,CAAC;AAAA,EACjD;AAKA,QAAM,cAAc,MAAM;AAAA,IACxB,QAAQ,IAAI,CAAC,MAAM,EAAE,QAAQ;AAAA,IAC7B,QAAQ,IAAI;AAAA,EACd;AACA,QAAM,eAAe,qBAAqB,QAAQ,IAAI,GAAG,WAAW;AAGpE,QAAM,cAAc;AAAA,IAClB,MAAM;AAAA,IACN,SAAS;AAAA,IACT,aAAa;AAAA,IACb,SAAS;AAAA,IACT;AAAA,IACA,SAAS;AAAA,MACP,OAAO;AAAA,IACT;AAAA,IACA,iBAAiB;AAAA,MACf,YAAY;AAAA,MACZ,sBAAsB;AAAA,MACtB,uBAAuB;AAAA,IACzB;AAAA,EACF;AACA,EAAG;AAAA,IACI,UAAK,eAAe,cAAc;AAAA,IACvC,KAAK,UAAU,aAAa,MAAM,CAAC;AAAA,EACrC;AAIA,QAAM,UAAU,YAAY;AAK5B,QAAM,mBAAmB,QAAQ,IAAI,CAAC,MAAM,EAAE,QAAQ;AACtD,QAAM,EAAE,aAAa,gBAAgB,eAAe,iBAAiB,IACnE,MAAM,0BAA0B,kBAAkB,QAAQ,IAAI,CAAC;AACjE,QAAM,oBAAoB,oBAAI,IAAY;AAAA,IACxC,GAAG,MAAM,KAAK,cAAc;AAAA,IAC5B,GAAG,MAAM,KAAK,gBAAgB;AAAA,EAChC,CAAC;AAGD,QAAM,cAAc,MAAM,KAAK,cAAc,EAAE,KAAK;AACpD,QAAM,gBAAgB,MAAM,KAAK,gBAAgB,EAAE,KAAK;AACxD,QAAM,oBAAoB,MAAM,KAAK,iBAAiB,EACnD,OAAO,CAAC,MAAM,EAAE,KAAK,QAAQ,EAC7B,KAAK;AACR,MAAI,YAAY,UAAU,cAAc,QAAQ;AAC9C,YAAQ;AAAA,MACN,MAAM;AAAA,QACJ,8DAAoD,YAAY,MAAM,eAAe,cAAc,MAAM;AAAA,MAC3G;AAAA,IACF;AACA,QAAI,kBAAkB,SAAS,GAAG;AAChC,cAAQ;AAAA,QACN,MAAM;AAAA,UACJ,yGAA+F,kBAC5F,MAAM,GAAG,EAAE,EACX,KAAK,IAAI,CAAC,GAAG,kBAAkB,SAAS,KAAK,SAAS,EAAE;AAAA,QAC7D;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,MAAI,cAAe,KAAK,cAAc,GAA0B,KAAK,KAAK,qBAAqB,KAAK;AAGpG,QAAM,mBAAwB,UAAK,QAAQ,IAAI,GAAG,eAAe;AACjE,MAAO,cAAW,gBAAgB,GAAG;AACnC,QAAI;AACF,YAAM,iBAAiB,KAAK,MAAS,gBAAa,kBAAkB,OAAO,CAAC;AAC5E,UAAI,eAAe,WAAW;AAE5B,YAAI,CAAE,KAAK,cAAc,GAA0B,KAAK,GAAG;AACzD,wBAAc,4BAA4B,eAAe,SAAS;AAAA,QACpE;AACA,gBAAQ,IAAI,MAAM,KAAK,wDAA8C,WAAW,EAAE,CAAC;AAAA,MACrF;AAAA,IACF,SAAS,OAAO;AACd,cAAQ,KAAK,MAAM,OAAO,yEAA+D,CAAC;AAAA,IAC5F;AAAA,EACF;AAEA,MAAI,wBAAwB,EAAE,MAAM,EAAE,QAAQ,oBAAoB;AAClE,QAAM,iBAAiB,SAAS,aAAa;AAG7C,QAAM,iBAAiB,IAAI,oDAAoD,EAAE,MAAM;AACvF,aAAW,UAAU,SAAS;AAC5B,QAAI;AACF,YAAM,cAAmB,UAAK,eAAe,OAAO,cAAc,KAAK;AACvE,UAAO,cAAW,WAAW,GAAG;AAE9B,cAAM,aAAa,cAAmB,aAAQ,WAAW,CAAC,EAAE;AAE5D,YAAI;AAGF,gBAAM,SAAS,MAAM,OAAO;AAG5B,cAAI,OAAO,sBAAsB;AAC/B,mBAAO,eAAe,OAAO;AAC7B,gBAAI,OAAO,qBAAqB,QAAQ,QAAQ;AAC9C,sBAAQ,IAAI,MAAM,KAAK,YAAO,OAAO,EAAE,WAAW,OAAO,qBAAqB,OAAO,MAAM,WAAW,CAAC;AAAA,YACzG;AAAA,UACF,OAAO;AACL,oBAAQ,KAAK,MAAM,OAAO,YAAO,OAAO,EAAE,6CAA6C,CAAC;AAAA,UAC1F;AAAA,QACF,SAAS,aAAkB;AAIzB,kBAAQ,IAAI,MAAM,KAAK,YAAO,OAAO,EAAE,mDAAmD,aAAa,SAAS,MAAM,GAAG,EAAE,KAAK,eAAe,MAAM,CAAC;AAGtJ,cAAI;AACF,kBAAM,gBAAmB,gBAAa,OAAO,UAAU,OAAO;AAE9D,kBAAM,oBAAoB,cAAc,MAAM,uDAAuD;AACrG,gBAAI,mBAAmB;AAErB,kBAAI,YAAY,kBAAkB,CAAC,EAChC,QAAQ,qBAAqB,EAAE,EAC/B,QAAQ,sBAAsB,IAAI;AAGrC,oBAAM,YAAY,IAAI,SAAS,YAAY,SAAS,EAAE;AACtD,kBAAI,cAAc,UAAU,UAAU,UAAU,WAAW,UAAU,cAAc,UAAU,WAAW;AACtG,uBAAO,eAAe;AACtB,oBAAI,UAAU,QAAQ,QAAQ;AAC5B,0BAAQ,IAAI,MAAM,KAAK,YAAO,OAAO,EAAE,WAAW,UAAU,OAAO,MAAM,4BAA4B,CAAC;AAAA,gBACxG;AACA,oBAAI,UAAU,UAAU;AACtB,0BAAQ,IAAI,MAAM,KAAK,YAAO,OAAO,EAAE,gCAAgC,CAAC;AAAA,gBAC1E;AAAA,cACF;AAAA,YACF;AAAA,UACF,SAAS,eAAe;AAEtB,oBAAQ,KAAK,MAAM,OAAO,YAAO,OAAO,EAAE,mDAAmD,CAAC;AAAA,UAChG;AAAA,QACF;AAAA,MACF,OAAO;AACL,gBAAQ,KAAK,MAAM,OAAO,YAAO,OAAO,EAAE,6BAA6B,WAAW,EAAE,CAAC;AAAA,MACvF;AAAA,IACF,SAAS,OAAY;AAEnB,cAAQ,KAAK,MAAM,OAAO,YAAO,OAAO,EAAE,+BAA+B,OAAO,WAAW,KAAK,EAAE,CAAC;AAAA,IACrG;AAAA,EACF;AACA,iBAAe,QAAQ,mBAAmB;AAE1C,+BAA6B,eAAe,SAAS,WAAW;AAChE,sBAAoB,eAAe,aAAa,OAAO,MAAM;AAC7D,yBAAuB,eAAe,WAAW;AAEjD,QAAM,SAAS,yBAAyB,SAAS,OAAO,QAAQ,SAAS,WAAW;AAIpF,QAAM,WAAc,cAAW,gBAAgB,IAAI,SAAS;AAC5D,QAAM,cAAsC;AAAA,IAC1C,aAAa;AAAA,IACb,OAAO;AAAA,IACP,UAAU;AAAA,EACZ;AACA,QAAM,kBAAkB,CAAC,WAAW,cAAc,aAAa,YAAY,UAAU,YAAY,WAAW;AAE5G,aAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,OAAO,GAAG;AAGlD,QAAI,IAAI,WAAW,MAAM,EAAG;AAI5B,QAAI,gBAAgB,KAAK,CAAC,WAAW,IAAI,WAAW,MAAM,CAAC,KAAK,kBAAkB,IAAI,GAAG,GAAG;AAC1F,kBAAY,GAAG,IAAI;AAAA,IACrB;AAAA,EACF;AAEA,EAAG;AAAA,IACI,UAAK,eAAe,UAAU;AAAA,IACnC,KAAK,UAAU,aAAa,MAAM,CAAC;AAAA,EACrC;AAEA,QAAM,cAAmB,UAAK,QAAQ,EAAE,QAAQ,EAAE,CAAC;AACnD,QAAM,WAAgB,UAAK,eAAe,gBAAgB;AAC1D,EAAG,iBAAc,UAAU,WAAW;AACtC,UAAQ,IAAI,MAAM,MAAM,oCAA+B,QAAQ,EAAE,CAAC;AACpE;AAEA,eAAe,OAAO,MAAW;AAC/B,QAAM,QAAQ,KAAK,SAAS,QAAQ,IAAI,SAAS;AACjD,QAAM,SAAS,KAAK,UAAU,QAAQ,IAAI,cAAc;AAExD,QAAM,aAAa,KAAK,cAAc,KAAK,aAAa,KAAK;AAC7D,QAAM,cAAc,KAAK,eAAe,KAAK,cAAc,KAAK;AAEhE,MAAI,YAAY;AACd,YAAQ,IAAI,MAAM,OAAO,wDAA8C,CAAC;AACxE;AAAA,EACF;AAEA,QAAM,gBAAqB,UAAK,QAAQ,IAAI,GAAG,qBAAqB;AACpE,QAAM,WAAgB,UAAK,eAAe,gBAAgB;AAE1D,MAAI,CAAI,cAAW,QAAQ,GAAG;AAC5B,YAAQ,MAAM,MAAM,IAAI,qDAAgD,CAAC;AACzE,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,UAAQ,IAAI,MAAM,KAAK,sCAA+B,KAAK,aAAa,MAAM,MAAM,CAAC;AACrF,sBAAoB;AAEpB,MAAI;AAGF,QAAI,CAAC,eAAe,CAAI,cAAgB,UAAK,eAAe,cAAc,CAAC,GAAG;AAC5E,cAAQ,IAAI,MAAM,KAAK,iDAA0C,CAAC;AAClE,eAAS,eAAe;AAAA,QACtB,KAAK;AAAA,QACL,OAAO;AAAA,MACT,CAAC;AAAA,IACH;AAGA,UAAM,mBAAwB,UAAK,QAAQ,IAAI,GAAG,eAAe;AACjE,QAAO,cAAW,gBAAgB,GAAG;AACnC,cAAQ,IAAI,MAAM,KAAK,oEAA0D,CAAC;AAGlF,MAAG,gBAAa,kBAAuB,UAAK,eAAe,eAAe,CAAC;AAG3E,YAAM,UAAU,YAAY;AAG5B,eAAS,4BAA4B;AAAA,QACnC,KAAK;AAAA,QACL,OAAO;AAAA,MACT,CAAC;AACD,cAAQ,IAAI,MAAM,MAAM,2CAAsC,CAAC;AAE/D;AAAA,IACF;AAEA,aAAS,yBAAyB;AAAA,MAChC,KAAK;AAAA,MACL,OAAO;AAAA,MACP,KAAK;AAAA,QACH,GAAG,QAAQ;AAAA,QACX,OAAO;AAAA,QACP,YAAY;AAAA,MACd;AAAA,IACF,CAAC;AACD,YAAQ,IAAI,MAAM,MAAM,6BAAwB,CAAC;AAAA,EACnD,SAAS,OAAO;AACd,YAAQ,MAAM,MAAM,IAAI,0BAAqB,CAAC;AAC9C,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,QAAM,mBAAmB,OAAO,QAAQ,aAAa;AACvD;AAEO,IAAM,cAAc,IAAI,QAAQ,EACpC,KAAK,MAAM,EACX,YAAY,4CAA4C,EACxD,OAAO,uBAAuB,oBAAoB,MAAM,EACxD,OAAO,yBAAyB,cAAc,WAAW,EACzD,OAAO,oBAAoB,2CAA2C,QAAQ,EAC9E,OAAO,yBAAyB,0EAA0E,EAC1G,OAAO,iBAAiB,+BAA+B,KAAK,EAC5D,OAAO,kBAAkB,4CAA4C,KAAK,EAC1E,OAAO,OAAO,YAAY;AACzB,QAAMA,OAAM,OAAO;AACnB,QAAM,OAAO,OAAO;AACtB,CAAC;;;ADnyDH,IAAM,UAAU,IAAIE,SAAQ;AAE5B,QACG,KAAK,WAAW,EAChB,YAAY,wDAAwD,EACpE,QAAQ,OAAO;AAElB,QAAQ,WAAW,WAAW;AAE9B,QAAQ,MAAM,QAAQ,IAAI;AAE1B,IAAM,cAAc;","names":["Command","build","output","Command"]}
|
|
1
|
+
{"version":3,"sources":["../src/index.ts","../src/commands/push.ts","../src/commands/new.ts","../src/commands/boilerplate.ts"],"sourcesContent":["#!/usr/bin/env node\n\nimport { Command } from 'commander';\nimport { readFileSync } from 'fs';\nimport { fileURLToPath } from 'url';\nimport { dirname, join } from 'path';\nimport { pushCommand } from './commands/push.js';\nimport { newCommand } from './commands/new.js';\nimport { boilerplateCommand } from './commands/boilerplate.js';\n\n// Read version from package.json\nconst __filename = fileURLToPath(import.meta.url);\nconst __dirname = dirname(__filename);\nconst packageJsonPath = join(__dirname, '..', 'package.json');\nconst packageJson = JSON.parse(readFileSync(packageJsonPath, 'utf-8'));\nconst version = packageJson.version || '1.0.0';\n\nconst program = new Command();\n\nprogram\n .name('ai-worker')\n .description('CLI tooling for deploying ai-router background workers')\n .version(version);\n\nprogram.addCommand(pushCommand);\nprogram.addCommand(newCommand);\nprogram.addCommand(boilerplateCommand);\n\nprogram.parse(process.argv);\n\nconst aiWorkerCli = program;\nexport { aiWorkerCli };\n","import { Command } from 'commander';\nimport * as esbuild from 'esbuild';\nimport { execSync } from 'child_process';\nimport * as fs from 'fs';\nimport * as path from 'path';\nimport { pathToFileURL } from 'url';\nimport { builtinModules } from 'module';\nimport { glob } from 'glob';\nimport * as yaml from 'js-yaml';\nimport chalk from 'chalk';\nimport ora from 'ora';\n\nconst NODE_BUILTINS = new Set(\n builtinModules.map((m) => (m.startsWith('node:') ? m.slice('node:'.length) : m))\n);\n\nfunction isBuiltinModule(specifier: string): boolean {\n const s = specifier.startsWith('node:')\n ? specifier.slice('node:'.length)\n : specifier;\n return NODE_BUILTINS.has(s);\n}\n\nfunction getPackageNameFromSpecifier(specifier: string): string {\n // Scoped packages: @scope/name/...\n if (specifier.startsWith('@')) {\n const [scope, name] = specifier.split('/');\n return name ? `${scope}/${name}` : specifier;\n }\n // Unscoped: name/...\n return specifier.split('/')[0];\n}\n\nfunction tryResolveLocalImport(fromFile: string, specifier: string): string | null {\n const baseDir = path.dirname(fromFile);\n const raw = path.resolve(baseDir, specifier);\n\n // Direct file hits\n const candidates = [\n raw,\n `${raw}.ts`,\n `${raw}.tsx`,\n `${raw}.js`,\n `${raw}.mjs`,\n `${raw}.cjs`,\n ];\n for (const c of candidates) {\n if (fs.existsSync(c) && fs.statSync(c).isFile()) return c;\n }\n\n // Directory index hits\n if (fs.existsSync(raw) && fs.statSync(raw).isDirectory()) {\n const idxCandidates = [\n path.join(raw, 'index.ts'),\n path.join(raw, 'index.tsx'),\n path.join(raw, 'index.js'),\n path.join(raw, 'index.mjs'),\n path.join(raw, 'index.cjs'),\n ];\n for (const c of idxCandidates) {\n if (fs.existsSync(c) && fs.statSync(c).isFile()) return c;\n }\n }\n\n return null;\n}\n\nfunction extractImportSpecifiers(source: string): string[] {\n const specs: string[] = [];\n\n // import ... from 'x' / export ... from 'x'\n // NOTE: we intentionally ignore \"import type ... from\" because it's type-only.\n const re1 =\n /(?:^|\\n)\\s*(?!import\\s+type)(?:import|export)\\s+[\\s\\S]*?\\sfrom\\s*['\"]([^'\"]+)['\"]/g;\n for (const match of source.matchAll(re1)) {\n if (match[1]) specs.push(match[1]);\n }\n\n // import('x')\n const re2 = /import\\s*\\(\\s*['\"]([^'\"]+)['\"]\\s*\\)/g;\n for (const match of source.matchAll(re2)) {\n if (match[1]) specs.push(match[1]);\n }\n\n // require('x')\n const re3 = /require\\s*\\(\\s*['\"]([^'\"]+)['\"]\\s*\\)/g;\n for (const match of source.matchAll(re3)) {\n if (match[1]) specs.push(match[1]);\n }\n\n return specs;\n}\n\nfunction extractEnvVarUsageFromSource(source: string): {\n runtimeKeys: Set<string>;\n buildtimeKeys: Set<string>;\n} {\n const runtimeKeys = new Set<string>();\n const buildtimeKeys = new Set<string>();\n\n // process.env.KEY / process.env?.KEY\n const reProcessDot = /\\bprocess\\.env\\??\\.([A-Za-z_][A-Za-z0-9_]*)\\b/g;\n for (const match of source.matchAll(reProcessDot)) {\n const key = match[1];\n if (key) runtimeKeys.add(key);\n }\n\n // process.env['KEY'] / process.env[\"KEY\"]\n const reProcessBracket = /\\bprocess\\.env\\[\\s*['\"]([^'\"]+)['\"]\\s*\\]/g;\n for (const match of source.matchAll(reProcessBracket)) {\n const key = match[1];\n if (key) runtimeKeys.add(key);\n }\n\n // import.meta.env.KEY\n const reImportMetaDot = /\\bimport\\.meta\\.env\\.([A-Za-z_][A-Za-z0-9_]*)\\b/g;\n for (const match of source.matchAll(reImportMetaDot)) {\n const key = match[1];\n if (key) buildtimeKeys.add(key);\n }\n\n // import.meta.env['KEY']\n const reImportMetaBracket = /\\bimport\\.meta\\.env\\[\\s*['\"]([^'\"]+)['\"]\\s*\\]/g;\n for (const match of source.matchAll(reImportMetaBracket)) {\n const key = match[1];\n if (key) buildtimeKeys.add(key);\n }\n\n return { runtimeKeys, buildtimeKeys };\n}\n\nasync function collectEnvUsageForWorkers(\n workerEntryFiles: string[],\n projectRoot: string\n): Promise<{ runtimeKeys: Set<string>; buildtimeKeys: Set<string> }> {\n void projectRoot; // reserved for future improvements (tsconfig path aliases, etc.)\n\n const runtimeKeys = new Set<string>();\n const buildtimeKeys = new Set<string>();\n\n const visited = new Set<string>();\n const queue: string[] = [...workerEntryFiles];\n\n while (queue.length > 0) {\n const file = queue.pop()!;\n const normalized = path.resolve(file);\n if (visited.has(normalized)) continue;\n visited.add(normalized);\n\n if (!fs.existsSync(normalized) || !fs.statSync(normalized).isFile()) continue;\n const src = fs.readFileSync(normalized, 'utf-8');\n\n const usage = extractEnvVarUsageFromSource(src);\n usage.runtimeKeys.forEach((k) => runtimeKeys.add(k));\n usage.buildtimeKeys.forEach((k) => buildtimeKeys.add(k));\n\n const specifiers = extractImportSpecifiers(src);\n for (const spec of specifiers) {\n if (!spec) continue;\n if (spec.startsWith('.')) {\n const resolved = tryResolveLocalImport(normalized, spec);\n if (resolved) queue.push(resolved);\n continue;\n }\n\n // Ignore absolute paths and non-node specifiers.\n if (spec.startsWith('/')) continue;\n if (isBuiltinModule(spec)) continue;\n // External packages are ignored; we only scan local files.\n }\n }\n\n runtimeKeys.delete('');\n buildtimeKeys.delete('');\n runtimeKeys.delete('node');\n buildtimeKeys.delete('node');\n\n return { runtimeKeys, buildtimeKeys };\n}\n\n/**\n * Collect callee worker IDs per worker (ctx.dispatchWorker('id', ...) in handler code).\n * Walks from each worker entry file and its local imports, extracts string literal IDs.\n */\nasync function collectCalleeWorkerIds(\n workers: WorkerInfo[],\n projectRoot: string\n): Promise<Map<string, Set<string>>> {\n void projectRoot;\n const calleeIdsByWorker = new Map<string, Set<string>>();\n\n const workerIds = new Set(workers.map((w) => w.id));\n\n for (const worker of workers) {\n const calleeIds = new Set<string>();\n const visited = new Set<string>();\n const queue: string[] = [worker.filePath];\n\n while (queue.length > 0) {\n const file = queue.pop()!;\n const normalized = path.resolve(file);\n if (visited.has(normalized)) continue;\n visited.add(normalized);\n\n if (!fs.existsSync(normalized) || !fs.statSync(normalized).isFile()) continue;\n const src = fs.readFileSync(normalized, 'utf-8');\n\n // ctx.dispatchWorker('id', ...) or ctx.dispatchWorker(\"id\", ...)\n const re = /(?:ctx\\.)?dispatchWorker\\s*\\(\\s*['\"]([^'\"]+)['\"]/g;\n for (const match of src.matchAll(re)) {\n if (match[1]) calleeIds.add(match[1]);\n }\n\n const specifiers = extractImportSpecifiers(src);\n for (const spec of specifiers) {\n if (!spec || !spec.startsWith('.')) continue;\n const resolved = tryResolveLocalImport(normalized, spec);\n if (resolved) queue.push(resolved);\n }\n }\n\n if (calleeIds.size > 0) {\n for (const calleeId of calleeIds) {\n if (!workerIds.has(calleeId)) {\n console.warn(\n chalk.yellow(\n `⚠️ Worker \"${worker.id}\" calls \"${calleeId}\" which is not in scanned workers (typo or other service?). Queue URL will not be auto-injected.`\n )\n );\n }\n }\n calleeIdsByWorker.set(worker.id, calleeIds);\n }\n }\n\n return calleeIdsByWorker;\n}\n\nfunction sanitizeWorkerIdForEnv(workerId: string): string {\n return workerId.replace(/-/g, '_').toUpperCase();\n}\n\n/** Converts kebab/dotted id to camelCase segment (e.g. \"results-aggregator\" -> \"resultsAggregator\"). */\nfunction toCamelCase(id: string): string {\n return id\n .split(/[^a-zA-Z0-9]+/)\n .filter(Boolean)\n .map((part, i) =>\n i === 0 ? part.toLowerCase() : part.charAt(0).toUpperCase() + part.slice(1).toLowerCase()\n )\n .join('');\n}\n\n/** Prefix + camelCase id with first letter capitalized (e.g. \"worker\", \"results-aggregator\" -> \"workerResultsAggregator\"). */\nfunction toPrefixedCamel(prefix: string, id: string): string {\n const camel = toCamelCase(id);\n return prefix + (camel.charAt(0).toUpperCase() + camel.slice(1));\n}\n\nfunction readJsonFile<T = any>(filePath: string): T | null {\n try {\n return JSON.parse(fs.readFileSync(filePath, 'utf-8')) as T;\n } catch {\n return null;\n }\n}\n\nfunction findMonorepoRoot(startDir: string): string {\n let dir = path.resolve(startDir);\n // Walk up until we find a package.json with \"workspaces\" or we hit filesystem root.\n while (true) {\n const pkgPath = path.join(dir, 'package.json');\n if (fs.existsSync(pkgPath)) {\n const pkg = readJsonFile<any>(pkgPath);\n if (pkg?.workspaces) return dir;\n }\n\n const parent = path.dirname(dir);\n if (parent === dir) return startDir; // fallback\n dir = parent;\n }\n}\n\nasync function collectRuntimeDependenciesForWorkers(\n workerEntryFiles: string[],\n projectRoot: string\n): Promise<Set<string>> {\n // Always include these: they're used by generated workers-config / lambda wrapper logic,\n // and are safe to install even if handlers are bundled.\n const deps = new Set<string>(['@microfox/ai-worker', '@aws-sdk/client-sqs']);\n const visited = new Set<string>();\n const queue: string[] = [...workerEntryFiles];\n\n while (queue.length > 0) {\n const file = queue.pop()!;\n const normalized = path.resolve(file);\n if (visited.has(normalized)) continue;\n visited.add(normalized);\n\n if (!fs.existsSync(normalized) || !fs.statSync(normalized).isFile()) continue;\n const src = fs.readFileSync(normalized, 'utf-8');\n const specifiers = extractImportSpecifiers(src);\n\n for (const spec of specifiers) {\n if (!spec) continue;\n if (spec.startsWith('.')) {\n const resolved = tryResolveLocalImport(normalized, spec);\n if (resolved) queue.push(resolved);\n continue;\n }\n\n // Ignore absolute paths and non-node specifiers.\n if (spec.startsWith('/')) continue;\n if (isBuiltinModule(spec)) continue;\n\n deps.add(getPackageNameFromSpecifier(spec));\n }\n }\n\n // Filter out anything that isn't an npm package name\n deps.delete('');\n deps.delete('node');\n\n // Filter devDependencies\n deps.delete('serverless');\n deps.delete('serverless-offline');\n deps.delete('@aws-sdk/client-sqs');\n deps.delete('@microfox/ai-worker')\n return deps;\n}\n\n/** Resolve job store type from env (used for conditional deps). Default: upstash-redis. */\nfunction getJobStoreType(): 'mongodb' | 'upstash-redis' {\n const raw = process.env.WORKER_DATABASE_TYPE?.toLowerCase();\n if (raw === 'mongodb' || raw === 'upstash-redis') return raw;\n return 'upstash-redis';\n}\n\n/**\n * Filter runtime deps so only the chosen job-store backend is included (+ mongodb if user code uses it).\n * - type mongodb: include only mongodb for job store.\n * - type upstash-redis: include only @upstash/redis for job store.\n * - If user code imports mongodb (e.g. worker uses Mongo for its own logic), always add mongodb.\n */\nfunction filterDepsForJobStore(\n runtimeDeps: Set<string>,\n jobStoreType: 'mongodb' | 'upstash-redis'\n): Set<string> {\n const filtered = new Set(runtimeDeps);\n filtered.delete('mongodb');\n filtered.delete('@upstash/redis');\n if (jobStoreType === 'mongodb') filtered.add('mongodb');\n else filtered.add('@upstash/redis');\n if (runtimeDeps.has('mongodb')) filtered.add('mongodb');\n return filtered;\n}\n\nfunction buildDependenciesMap(projectRoot: string, deps: Set<string>): Record<string, string> {\n const projectPkg =\n readJsonFile<any>(path.join(projectRoot, 'package.json')) || {};\n const projectDeps: Record<string, string> = projectPkg.dependencies || {};\n const projectDevDeps: Record<string, string> = projectPkg.devDependencies || {};\n\n // Try to also source versions from workspace packages (ai-worker / ai-worker-cli)\n const repoRoot = findMonorepoRoot(projectRoot);\n const workerPkg =\n readJsonFile<any>(path.join(repoRoot, 'packages', 'ai-worker', 'package.json')) ||\n {};\n const workerCliPkg =\n readJsonFile<any>(\n path.join(repoRoot, 'packages', 'ai-worker-cli', 'package.json')\n ) || {};\n\n const workspaceDeps: Record<string, string> = {\n ...(workerPkg.dependencies || {}),\n ...(workerPkg.devDependencies || {}),\n ...(workerCliPkg.dependencies || {}),\n ...(workerCliPkg.devDependencies || {}),\n };\n\n const out: Record<string, string> = {};\n for (const dep of Array.from(deps).sort()) {\n const range =\n projectDeps[dep] ||\n projectDevDeps[dep] ||\n workspaceDeps[dep];\n // Only add deps that the project or workspace already declares (e.g. in package.json).\n // Skip subpath imports like @tokenlens/helpers that are not real packages and not in package.json.\n if (range) {\n out[dep] = String(range);\n }\n }\n\n return out;\n}\n\ninterface QueueStepInfo {\n workerId: string;\n delaySeconds?: number;\n mapInputFromPrev?: string;\n}\n\ninterface QueueInfo {\n id: string;\n filePath: string;\n steps: QueueStepInfo[];\n schedule?: string | { rate: string; enabled?: boolean; input?: Record<string, any> };\n}\n\ninterface WorkerInfo {\n id: string;\n filePath: string;\n // Module path WITHOUT extension and WITHOUT \".handler\" suffix.\n // Example: \"handlers/agents/test/test\"\n handlerPath: string;\n workerConfig?: {\n timeout?: number;\n memorySize?: number;\n layers?: string[];\n schedule?: any; // Schedule config: string, object, or array of either\n sqs?: {\n maxReceiveCount?: number;\n messageRetentionPeriod?: number;\n visibilityTimeout?: number;\n deadLetterMessageRetentionPeriod?: number;\n };\n };\n}\n\ninterface ServerlessConfig {\n service: string;\n custom?: Record<string, any>;\n package: {\n excludeDevDependencies: boolean;\n individually?: boolean;\n patterns: string[];\n };\n provider: {\n name: string;\n runtime: string;\n region: string;\n stage: string;\n versionFunctions?: boolean;\n environment: Record<string, string | Record<string, any>> | string;\n iam: {\n role: {\n statements: Array<{\n Effect: string;\n Action: string[];\n Resource: string | Array<string | Record<string, any>>;\n }>;\n };\n };\n };\n plugins: string[];\n functions: Record<string, any>;\n resources: {\n Resources: Record<string, any>;\n Outputs: Record<string, any>;\n };\n}\n\nexport function getServiceNameFromProjectId(projectId: string): string {\n const cleanedProjectId = projectId.replace(/-/g, '').slice(0, 15);\n return `p-${cleanedProjectId}`;\n}\n\n/**\n * Validates the environment and dependencies.\n */\nfunction validateEnvironment(): void {\n // We no longer strictly require global serverless since we'll install it locally in the temp dir\n // But we do need npm\n try {\n execSync('npm --version', { stdio: 'ignore' });\n } catch (error) {\n console.error(chalk.red('❌ npm is not installed or not in PATH.'));\n process.exit(1);\n }\n}\n\n/**\n * Scans for all *.worker.ts files in app/ai directory.\n */\nasync function scanWorkers(aiPath: string = 'app/ai'): Promise<WorkerInfo[]> {\n const pattern = path.join(aiPath, '**/*.worker.ts').replace(/\\\\/g, '/');\n const files = await glob(pattern);\n\n const workers: WorkerInfo[] = [];\n\n for (const filePath of files) {\n try {\n // Try to dynamically import the worker file to get the actual workerConfig\n // This is more reliable than parsing the file as text\n let workerConfig: WorkerInfo['workerConfig'] | undefined;\n let workerId: string | undefined;\n\n // For now, just extract the ID using regex\n // We'll import the workerConfig from the bundled handlers later\n\n // Fallback to regex parsing if import didn't work\n if (!workerId) {\n const content = fs.readFileSync(filePath, 'utf-8');\n // Match createWorker with optional type parameters: createWorker<...>({ id: '...' })\n // or createWorker({ id: '...' })\n const idMatch = content.match(/createWorker\\s*(?:<[^>]+>)?\\s*\\(\\s*\\{[\\s\\S]*?id:\\s*['\"]([^'\"]+)['\"]/);\n if (!idMatch) {\n console.warn(chalk.yellow(`⚠️ Skipping ${filePath}: No worker ID found`));\n continue;\n }\n workerId = idMatch[1];\n }\n\n // Generate handler path (relative to serverless root)\n // Convert app/ai/agents/my-worker.worker.ts -> handlers/my-worker\n const relativePath = path.relative(aiPath, filePath);\n const handlerDir = path.dirname(relativePath);\n const handlerName = path.basename(relativePath, '.worker.ts');\n const handlerPath = path.join('handlers', handlerDir, `${handlerName}`).replace(/\\\\/g, '/');\n\n workers.push({\n id: workerId,\n filePath,\n handlerPath,\n workerConfig,\n });\n } catch (error) {\n console.error(chalk.red(`❌ Error processing ${filePath}:`), error);\n }\n }\n\n return workers;\n}\n\n/**\n * Scans for *.queue.ts files and parses defineWorkerQueue configs.\n */\nasync function scanQueues(aiPath: string = 'app/ai'): Promise<QueueInfo[]> {\n const base = aiPath.replace(/\\\\/g, '/');\n const pattern = `${base}/queues/**/*.queue.ts`;\n const files = await glob(pattern);\n\n const queues: QueueInfo[] = [];\n\n for (const filePath of files) {\n try {\n const content = fs.readFileSync(filePath, 'utf-8');\n // Match defineWorkerQueue({ id: '...', steps: [...], schedule?: ... })\n const idMatch = content.match(/defineWorkerQueue\\s*\\(\\s*\\{[\\s\\S]*?id:\\s*['\"]([^'\"]+)['\"]/);\n if (!idMatch) {\n console.warn(chalk.yellow(`⚠️ Skipping ${filePath}: No queue id found in defineWorkerQueue`));\n continue;\n }\n const queueId = idMatch[1];\n\n const steps: QueueStepInfo[] = [];\n const stepsMatch = content.match(/steps:\\s*\\[([\\s\\S]*?)\\]/);\n if (stepsMatch) {\n const stepsStr = stepsMatch[1];\n // Match step objects: { workerId: 'x', delaySeconds?: N, mapInputFromPrev?: 'y' }\n // Allow optional comment line between properties; comment before } only (lookahead); no trailing \\s*\n const stepRegex = /\\{\\s*workerId:\\s*['\"]([^'\"]+)['\"](?:,\\s*(?:\\/\\/[^\\r\\n]*\\r?\\n\\s*)?delaySeconds:\\s*(\\d+))?(?:,\\s*(?:\\/\\/[^\\r\\n]*\\r?\\n\\s*)?mapInputFromPrev:\\s*['\"]([^'\"]+)['\"])?\\s*,?\\s*(?:\\/\\/[^\\r\\n]*\\r?\\n\\s*)?(?=\\s*\\})\\s*\\},?/g;\n let m;\n while ((m = stepRegex.exec(stepsStr)) !== null) {\n steps.push({\n workerId: m[1],\n delaySeconds: m[2] ? parseInt(m[2], 10) : undefined,\n mapInputFromPrev: m[3],\n });\n }\n }\n\n let schedule: QueueInfo['schedule'];\n // Strip single-line comments so commented-out schedule is not picked up\n const contentWithoutLineComments = content.replace(/\\/\\/[^\\n]*/g, '');\n const scheduleStrMatch = contentWithoutLineComments.match(/schedule:\\s*['\"]([^'\"]+)['\"]/);\n const scheduleObjMatch = contentWithoutLineComments.match(/schedule:\\s*(\\{[^}]+(?:\\{[^}]*\\}[^}]*)*\\})/);\n if (scheduleStrMatch) {\n schedule = scheduleStrMatch[1];\n } else if (scheduleObjMatch) {\n try {\n schedule = new Function('return ' + scheduleObjMatch[1])();\n } catch {\n schedule = undefined;\n }\n }\n\n queues.push({ id: queueId, filePath, steps, schedule });\n } catch (error) {\n console.error(chalk.red(`❌ Error processing ${filePath}:`), error);\n }\n }\n\n return queues;\n}\n\n/**\n * Generates the queue registry module for runtime lookup.\n * For queues with mapInputFromPrev, imports the .queue.ts module so mapping can use any previous step or initial input.\n */\nfunction generateQueueRegistry(queues: QueueInfo[], outputDir: string, projectRoot: string): void {\n const generatedDir = path.join(outputDir, 'generated');\n if (!fs.existsSync(generatedDir)) {\n fs.mkdirSync(generatedDir, { recursive: true });\n }\n\n const relToRoot = path.relative(generatedDir, projectRoot).replace(/\\\\/g, '/');\n const queueModulesLines: string[] = [];\n const queueModulesEntries: string[] = [];\n const queuesWithMapping = queues.filter(\n (q) => q.steps?.some((s) => s.mapInputFromPrev)\n );\n for (let i = 0; i < queuesWithMapping.length; i++) {\n const q = queuesWithMapping[i];\n const relPath = (relToRoot + '/' + q.filePath.replace(/\\\\/g, '/')).replace(/\\.ts$/, '');\n const safeId = q.id.replace(/[^a-zA-Z0-9]/g, '');\n queueModulesLines.push(`const queueModule_${safeId} = require('${relPath}');`);\n queueModulesEntries.push(` '${q.id}': queueModule_${safeId},`);\n }\n const queueModulesBlock =\n queueModulesLines.length > 0\n ? `\n${queueModulesLines.join('\\n')}\nconst queueModules = {\n${queueModulesEntries.join('\\n')}\n};\n`\n : `\nconst queueModules = {};\n`;\n\n const registryContent = `/**\n * Auto-generated queue registry. DO NOT EDIT.\n * Generated by @microfox/ai-worker-cli from .queue.ts files.\n */\n${queueModulesBlock}\n\nconst QUEUES = ${JSON.stringify(queues.map((q) => ({ id: q.id, steps: q.steps, schedule: q.schedule })), null, 2)};\n\nexport function getQueueById(queueId) {\n return QUEUES.find((q) => q.id === queueId);\n}\n\nexport function getNextStep(queueId, stepIndex) {\n const queue = getQueueById(queueId);\n if (!queue || !queue.steps || stepIndex < 0 || stepIndex >= queue.steps.length - 1) {\n return undefined;\n }\n const step = queue.steps[stepIndex + 1];\n return step ? { workerId: step.workerId, delaySeconds: step.delaySeconds, mapInputFromPrev: step.mapInputFromPrev } : undefined;\n}\n\nexport function invokeMapInput(queueId, stepIndex, initialInput, previousOutputs) {\n const queue = getQueueById(queueId);\n const step = queue?.steps?.[stepIndex];\n const fnName = step?.mapInputFromPrev;\n if (!fnName) return previousOutputs.length ? previousOutputs[previousOutputs.length - 1].output : initialInput;\n const mod = queueModules[queueId];\n if (!mod || typeof mod[fnName] !== 'function') return previousOutputs.length ? previousOutputs[previousOutputs.length - 1].output : initialInput;\n return mod[fnName](initialInput, previousOutputs);\n}\n`;\n\n const registryPath = path.join(generatedDir, 'workerQueues.registry.js');\n fs.writeFileSync(registryPath, registryContent);\n console.log(chalk.green(`✓ Generated queue registry: ${registryPath}`));\n\n // Note: For dispatchQueue in app (e.g. Vercel), use in-memory registry:\n // app/ai/queues/registry.ts imports from .queue.ts and exports queueRegistry.\n}\n\n/**\n * Returns worker IDs that participate in any queue (for wrapping and callee injection).\n */\nfunction getWorkersInQueues(queues: QueueInfo[]): Set<string> {\n const set = new Set<string>();\n for (const q of queues) {\n for (const step of q.steps) {\n set.add(step.workerId);\n }\n }\n return set;\n}\n\n/**\n * Merges queue next-step worker IDs into calleeIds so WORKER_QUEUE_URL_* gets injected.\n */\nfunction mergeQueueCallees(\n calleeIds: Map<string, Set<string>>,\n queues: QueueInfo[],\n workers: WorkerInfo[]\n): Map<string, Set<string>> {\n const merged = new Map(calleeIds);\n const workerIds = new Set(workers.map((w) => w.id));\n\n for (const queue of queues) {\n for (let i = 0; i < queue.steps.length - 1; i++) {\n const fromWorkerId = queue.steps[i].workerId;\n const toWorkerId = queue.steps[i + 1].workerId;\n if (!workerIds.has(toWorkerId)) continue;\n let callees = merged.get(fromWorkerId);\n if (!callees) {\n callees = new Set<string>();\n merged.set(fromWorkerId, callees);\n }\n callees.add(toWorkerId);\n }\n }\n return merged;\n}\n\n/**\n * Generates Lambda handler entrypoints for each worker.\n */\nasync function generateHandlers(\n workers: WorkerInfo[],\n outputDir: string,\n queues: QueueInfo[] = []\n): Promise<void> {\n const handlersDir = path.join(outputDir, 'handlers');\n const workersSubdir = path.join(handlersDir, 'workers');\n const workersInQueues = getWorkersInQueues(queues);\n\n // Only clean workers subdir so handlers/api and handlers/queues can coexist\n if (fs.existsSync(workersSubdir)) {\n fs.rmSync(workersSubdir, { recursive: true, force: true });\n }\n fs.mkdirSync(handlersDir, { recursive: true });\n fs.mkdirSync(workersSubdir, { recursive: true });\n\n for (const worker of workers) {\n // Create directory structure\n // We output JS files now, so change extension in path\n const handlerFile = path.join(handlersDir, worker.handlerPath.replace('handlers/', '') + '.js');\n const handlerDir = path.dirname(handlerFile);\n\n if (!fs.existsSync(handlerDir)) {\n fs.mkdirSync(handlerDir, { recursive: true });\n }\n\n // Generate handler entrypoint\n // Convert app/ai/agents/my-worker.worker.ts to import path\n // We need relative path from .serverless-workers/handlers/agent/ to original source\n // Original: /path/to/project/app/ai/agents/my-worker.worker.ts\n // Handler: /path/to/project/.serverless-workers/handlers/agent/my-worker.handler.ts\n // Import should look like: ../../../app/ai/agents/my-worker.worker\n\n const handlerAbsPath = path.resolve(handlerFile);\n const workerAbsPath = path.resolve(worker.filePath);\n\n // Calculate relative path from handler directory to worker file\n let relativeImportPath = path.relative(path.dirname(handlerAbsPath), workerAbsPath);\n\n // Ensure it starts with ./ or ../\n if (!relativeImportPath.startsWith('.')) {\n relativeImportPath = './' + relativeImportPath;\n }\n\n // Remove extension for import\n relativeImportPath = relativeImportPath.replace(/\\.ts$/, '');\n // Normalize slashes for Windows\n relativeImportPath = relativeImportPath.split(path.sep).join('/');\n\n // Detect export: \"export default createWorker\" vs \"export const X = createWorker\"\n const fileContent = fs.readFileSync(worker.filePath, 'utf-8');\n const defaultExport = /export\\s+default\\s+createWorker/.test(fileContent);\n const exportMatch = fileContent.match(/export\\s+(const|let)\\s+(\\w+)\\s*=\\s*createWorker/);\n const exportName = exportMatch ? exportMatch[2] : 'worker';\n\n // 1. Create a temporary TS entrypoint\n const tempEntryFile = handlerFile.replace('.js', '.temp.ts');\n\n const workerRef = defaultExport\n ? 'workerModule.default'\n : `workerModule.${exportName}`;\n\n const inQueue = workersInQueues.has(worker.id);\n const registryRelPath = path\n .relative(path.dirname(path.resolve(handlerFile)), path.join(outputDir, 'generated', 'workerQueues.registry'))\n .split(path.sep)\n .join('/');\n const registryImportPath = registryRelPath.startsWith('.') ? registryRelPath : './' + registryRelPath;\n\n const handlerCreation = inQueue\n ? `\nimport { createLambdaHandler, wrapHandlerForQueue } from '@microfox/ai-worker/handler';\nimport { getQueueJob } from '@microfox/ai-worker/queueJobStore';\nimport * as queueRegistry from '${registryImportPath}';\nimport * as workerModule from '${relativeImportPath}';\n\nconst WORKER_LOG_PREFIX = '[WorkerEntrypoint]';\n\nconst workerAgent = ${workerRef};\nif (!workerAgent || typeof workerAgent.handler !== 'function') {\n throw new Error('Worker module must export a createWorker result (default or named) with .handler');\n}\n\nconst queueRuntime = {\n getNextStep: queueRegistry.getNextStep,\n invokeMapInput: queueRegistry.invokeMapInput,\n getQueueJob,\n};\nconst wrappedHandler = wrapHandlerForQueue(workerAgent.handler, queueRuntime);\n\nconst baseHandler = createLambdaHandler(wrappedHandler, workerAgent.outputSchema);\n\nexport const handler = async (event: any, context: any) => {\n const records = Array.isArray((event as any)?.Records) ? (event as any).Records.length : 0;\n let queueId, queueJobId;\n try {\n const first = (event as any)?.Records?.[0];\n if (first?.body) {\n const body = typeof first.body === 'string' ? JSON.parse(first.body) : first.body;\n const qc = body?.input?.__workerQueue ?? body?.metadata?.__workerQueue;\n if (qc?.id) queueId = qc.id;\n if (qc?.queueJobId) queueJobId = qc.queueJobId;\n }\n console.log(WORKER_LOG_PREFIX, {\n workerId: workerAgent.id,\n inQueue: true,\n ...(queueId && { queueId }),\n ...(queueJobId && { queueJobId }),\n records,\n requestId: (context as any)?.awsRequestId,\n });\n } catch {\n // Best-effort logging only\n }\n return baseHandler(event, context);\n};\n\nexport const exportedWorkerConfig = workerModule.workerConfig || workerAgent?.workerConfig;\n`\n : `\nimport { createLambdaHandler } from '@microfox/ai-worker/handler';\nimport * as workerModule from '${relativeImportPath}';\n\nconst WORKER_LOG_PREFIX = '[WorkerEntrypoint]';\n\nconst workerAgent = ${workerRef};\nif (!workerAgent || typeof workerAgent.handler !== 'function') {\n throw new Error('Worker module must export a createWorker result (default or named) with .handler');\n}\n\nconst baseHandler = createLambdaHandler(workerAgent.handler, workerAgent.outputSchema);\n\nexport const handler = async (event: any, context: any) => {\n const records = Array.isArray((event as any)?.Records) ? (event as any).Records.length : 0;\n try {\n console.log(WORKER_LOG_PREFIX, {\n workerId: workerAgent.id,\n inQueue: false,\n records,\n requestId: (context as any)?.awsRequestId,\n });\n } catch {\n // Best-effort logging only\n }\n return baseHandler(event, context);\n};\n\nexport const exportedWorkerConfig = workerModule.workerConfig || workerAgent?.workerConfig;\n`;\n\n const tempEntryContent = handlerCreation;\n fs.writeFileSync(tempEntryFile, tempEntryContent);\n\n // 2. Bundle using esbuild\n try {\n // Plugin to fix lazy-cache issue where forOwn is not properly added to utils\n // The issue: require_for_own() is called directly instead of through the lazy-cache proxy\n const fixLazyCachePlugin: esbuild.Plugin = {\n name: 'fix-lazy-cache',\n setup(build) {\n build.onEnd(async (result) => {\n if (result.errors.length > 0) return;\n\n // Read the bundled file\n let bundledCode = fs.readFileSync(handlerFile, 'utf-8');\n let modified = false;\n\n // Fix the lazy-cache pattern in clone-deep/utils.js\n // Pattern: require_for_own(); should be require(\"for-own\", \"forOwn\");\n // This ensures forOwn is properly added to the utils object via lazy-cache\n // Match the pattern more flexibly to handle different whitespace\n const pattern = /(require\\(\"kind-of\",\\s*\"typeOf\"\\);\\s*)require_for_own\\(\\);/g;\n\n if (pattern.test(bundledCode)) {\n bundledCode = bundledCode.replace(\n pattern,\n '$1require(\"for-own\", \"forOwn\");'\n );\n modified = true;\n }\n\n // Fix (0, import_node_module.createRequire)(import_meta.url) - esbuild emits import_meta.url\n // which is undefined in CJS Lambda. Polyfill so createRequire gets a valid file URL.\n if (bundledCode.includes('import_meta.url')) {\n bundledCode = bundledCode.replace(\n /import_meta\\.url/g,\n 'require(\"url\").pathToFileURL(__filename).href'\n );\n modified = true;\n }\n\n // Fix createRequire(undefined) / createRequire(void 0) if any dependency emits that\n const beforeCreateRequire = bundledCode;\n bundledCode = bundledCode.replace(\n /\\bcreateRequire\\s*\\(\\s*(?:undefined|void\\s*0)\\s*\\)/g,\n 'createRequire(require(\"url\").pathToFileURL(__filename).href)'\n );\n if (bundledCode !== beforeCreateRequire) modified = true;\n\n if (modified) {\n fs.writeFileSync(handlerFile, bundledCode, 'utf-8');\n }\n });\n },\n };\n\n await esbuild.build({\n entryPoints: [tempEntryFile],\n bundle: true,\n platform: 'node',\n target: 'node20',\n format: 'cjs',\n outfile: handlerFile,\n // We exclude aws-sdk as it's included in Lambda runtime\n // We exclude canvas because it's a binary dependency often problematic in bundling\n external: [\n 'aws-sdk',\n 'canvas',\n '@microfox/puppeteer-sls',\n \"@sparticuz/chromium\"\n ],\n // Force lazy-cache to eagerly load modules during bundling\n // This prevents runtime dynamic require() calls that fail in bundled code\n define: {\n 'process.env.UNLAZY': '\"true\"',\n },\n // Force bundling of all packages to avoid runtime module resolution issues\n // This ensures clone-deep, lazy-cache, and all transitive deps are bundled\n packages: 'bundle',\n plugins: [fixLazyCachePlugin],\n logLevel: 'error',\n });\n\n // 3. Cleanup temp file\n fs.unlinkSync(tempEntryFile);\n\n } catch (error) {\n console.error(chalk.red(`Error bundling handler for ${worker.id}:`), error);\n // Don't delete temp file on error for debugging\n }\n }\n console.log(chalk.green(`✓ Generated ${workers.length} bundled handlers`));\n}\n\nfunction generateDocsHandler(outputDir: string, serviceName: string, stage: string, region: string): void {\n const apiDir = path.join(outputDir, 'handlers', 'api');\n const handlerFile = path.join(apiDir, 'docs.js');\n const tempEntryFile = handlerFile.replace('.js', '.temp.ts');\n const handlerDir = path.dirname(handlerFile);\n\n if (!fs.existsSync(handlerDir)) {\n fs.mkdirSync(handlerDir, { recursive: true });\n }\n\n const handlerContent = `/**\n * Auto-generated docs handler for Microfox compatibility\n * DO NOT EDIT - This file is generated by @microfox/ai-worker-cli\n */\n\nimport { APIGatewayProxyEvent, APIGatewayProxyResult } from 'aws-lambda';\n\nexport const handler = async (\n event: APIGatewayProxyEvent\n): Promise<APIGatewayProxyResult> => {\n // Return OpenAPI JSON for Microfox\n const openapi = {\n openapi: '3.0.3',\n info: {\n title: 'AI Worker Service',\n version: '1.0.0',\n description: 'Auto-generated OpenAPI for background workers service',\n },\n servers: [\n {\n url: 'https://{apiId}.execute-api.{region}.amazonaws.com/{stage}',\n variables: {\n apiId: { default: 'REPLACE_ME' },\n region: { default: '${region}' },\n stage: { default: '${stage}' },\n },\n },\n ],\n paths: {\n '/docs.json': {\n get: {\n operationId: 'getDocs',\n summary: 'Get OpenAPI schema',\n responses: {\n '200': {\n description: 'OpenAPI JSON',\n content: {\n 'application/json': {\n schema: { type: 'object' },\n },\n },\n },\n },\n },\n },\n '/workers/config': {\n get: {\n operationId: 'getWorkersConfig',\n summary: 'Get workers config (queue urls map)',\n parameters: [\n {\n name: 'x-workers-config-key',\n in: 'header',\n required: false,\n schema: { type: 'string' },\n description: 'Optional API key header (if configured)',\n },\n ],\n responses: {\n '200': {\n description: 'Workers config map',\n content: {\n 'application/json': {\n schema: {\n type: 'object',\n properties: {\n version: { type: 'string' },\n stage: { type: 'string' },\n region: { type: 'string' },\n workers: { type: 'object' },\n },\n },\n },\n },\n },\n '401': {\n description: 'Unauthorized',\n content: {\n 'application/json': {\n schema: {\n type: 'object',\n properties: { error: { type: 'string' } },\n },\n },\n },\n },\n },\n },\n },\n '/workers/trigger': {\n post: {\n operationId: 'triggerWorker',\n summary: 'Trigger a worker by sending a raw SQS message body',\n parameters: [\n {\n name: 'workerId',\n in: 'query',\n required: false,\n schema: { type: 'string' },\n description: 'Worker ID (can also be provided in JSON body as workerId)',\n },\n {\n name: 'x-workers-trigger-key',\n in: 'header',\n required: false,\n schema: { type: 'string' },\n description: 'Optional API key header (if configured)',\n },\n ],\n requestBody: {\n required: true,\n content: {\n 'application/json': {\n schema: {\n type: 'object',\n properties: {\n workerId: { type: 'string' },\n // Prefer sending the exact SQS message body your worker expects\n body: { type: 'object' },\n messageBody: { type: 'string' },\n },\n additionalProperties: true,\n },\n },\n },\n },\n responses: {\n '200': {\n description: 'Enqueued',\n content: {\n 'application/json': {\n schema: {\n type: 'object',\n properties: {\n ok: { type: 'boolean' },\n workerId: { type: 'string' },\n stage: { type: 'string' },\n queueName: { type: 'string' },\n queueUrl: { type: 'string' },\n messageId: { type: 'string' },\n },\n },\n },\n },\n },\n '400': {\n description: 'Bad request',\n content: {\n 'application/json': {\n schema: {\n type: 'object',\n properties: { error: { type: 'string' } },\n },\n },\n },\n },\n '401': {\n description: 'Unauthorized',\n content: {\n 'application/json': {\n schema: {\n type: 'object',\n properties: { error: { type: 'string' } },\n },\n },\n },\n },\n },\n },\n },\n },\n 'x-service': {\n serviceName: '${serviceName}',\n stage: '${stage}',\n region: '${region}',\n },\n };\n\n return {\n statusCode: 200,\n headers: {\n 'Content-Type': 'application/json',\n 'Access-Control-Allow-Origin': '*',\n },\n body: JSON.stringify(openapi, null, 2),\n };\n};\n`;\n\n fs.writeFileSync(tempEntryFile, handlerContent);\n\n // Bundle it\n esbuild.buildSync({\n entryPoints: [tempEntryFile],\n bundle: true,\n platform: 'node',\n target: 'node20',\n outfile: handlerFile,\n external: [\n 'aws-sdk',\n 'canvas',\n '@microfox/puppeteer-sls',\n \"@sparticuz/chromium\"\n ],\n define: {\n 'process.env.UNLAZY': '\"true\"',\n },\n packages: 'bundle'\n });\n\n fs.unlinkSync(tempEntryFile);\n console.log(chalk.green(`✓ Generated docs.json handler`));\n}\n\nfunction generateTriggerHandler(outputDir: string, serviceName: string): void {\n const apiDir = path.join(outputDir, 'handlers', 'api');\n const handlerFile = path.join(apiDir, 'workers-trigger.js');\n const tempEntryFile = handlerFile.replace('.js', '.temp.ts');\n const handlerDir = path.dirname(handlerFile);\n\n if (!fs.existsSync(handlerDir)) {\n fs.mkdirSync(handlerDir, { recursive: true });\n }\n\n const handlerContent = `/**\n * Auto-generated worker trigger handler\n * DO NOT EDIT - This file is generated by @microfox/ai-worker-cli\n */\n\nimport { APIGatewayProxyEvent, APIGatewayProxyResult } from 'aws-lambda';\nimport { SQSClient, GetQueueUrlCommand, SendMessageCommand } from '@aws-sdk/client-sqs';\n\nconst SERVICE_NAME = ${JSON.stringify(serviceName)};\n\nfunction jsonResponse(statusCode: number, body: any): APIGatewayProxyResult {\n return {\n statusCode,\n headers: {\n 'Content-Type': 'application/json',\n 'Access-Control-Allow-Origin': '*',\n },\n body: JSON.stringify(body),\n };\n}\n\nexport const handler = async (event: APIGatewayProxyEvent): Promise<APIGatewayProxyResult> => {\n // Optional API key\n const apiKey = process.env.WORKERS_TRIGGER_API_KEY;\n if (apiKey) {\n const providedKey = event.headers['x-workers-trigger-key'] || event.headers['X-Workers-Trigger-Key'];\n if (providedKey !== apiKey) {\n return jsonResponse(401, { error: 'Unauthorized' });\n }\n }\n\n const stage =\n (event as any)?.requestContext?.stage ||\n process.env.ENVIRONMENT ||\n process.env.STAGE ||\n 'prod';\n const region = process.env.AWS_REGION || 'us-east-1';\n\n const qsWorkerId = event.queryStringParameters?.workerId;\n\n let parsedBody: any = undefined;\n if (event.body) {\n try {\n parsedBody = JSON.parse(event.body);\n } catch {\n parsedBody = undefined;\n }\n }\n\n const workerId = (parsedBody && parsedBody.workerId) || qsWorkerId;\n if (!workerId || typeof workerId !== 'string') {\n return jsonResponse(400, { error: 'workerId is required (query param workerId or JSON body workerId)' });\n }\n\n // Prefer JSON body fields, otherwise send raw event.body\n let messageBody: string | undefined;\n if (parsedBody && typeof parsedBody.messageBody === 'string') {\n messageBody = parsedBody.messageBody;\n } else if (parsedBody && parsedBody.body !== undefined) {\n messageBody = typeof parsedBody.body === 'string' ? parsedBody.body : JSON.stringify(parsedBody.body);\n } else if (event.body) {\n messageBody = event.body;\n }\n\n if (!messageBody) {\n return jsonResponse(400, { error: 'body/messageBody is required' });\n }\n\n const queueName = \\`\\${SERVICE_NAME}-\\${workerId}-\\${stage}\\`;\n const sqs = new SQSClient({ region });\n\n let queueUrl: string;\n try {\n const urlRes = await sqs.send(new GetQueueUrlCommand({ QueueName: queueName }));\n if (!urlRes.QueueUrl) {\n return jsonResponse(404, { error: 'Queue URL not found', queueName });\n }\n queueUrl = String(urlRes.QueueUrl);\n } catch (e: any) {\n return jsonResponse(404, { error: 'Queue does not exist or not accessible', queueName, message: String(e?.message || e) });\n }\n\n try {\n const sendRes = await sqs.send(new SendMessageCommand({ QueueUrl: queueUrl, MessageBody: messageBody }));\n return jsonResponse(200, {\n ok: true,\n workerId,\n stage,\n queueName,\n queueUrl,\n messageId: sendRes.MessageId || null,\n });\n } catch (e: any) {\n return jsonResponse(500, { error: 'Failed to send message', message: String(e?.message || e) });\n }\n};\n`;\n\n fs.writeFileSync(tempEntryFile, handlerContent);\n\n esbuild.buildSync({\n entryPoints: [tempEntryFile],\n bundle: true,\n platform: 'node',\n target: 'node20',\n outfile: handlerFile,\n external: [\n 'aws-sdk',\n 'canvas',\n '@microfox/puppeteer-sls',\n \"@sparticuz/chromium\"\n ],\n define: {\n 'process.env.UNLAZY': '\"true\"',\n },\n packages: 'bundle',\n logLevel: 'error',\n });\n\n fs.unlinkSync(tempEntryFile);\n console.log(chalk.green(`✓ Generated /workers/trigger handler`));\n}\n\n/**\n * Generates queue Lambda for each queue. Invoked by schedule (if any) or by HTTP POST\n * (dispatch proxy). Single place to log \"queue X started\" and send first worker message.\n */\nfunction generateQueueHandler(\n outputDir: string,\n queue: QueueInfo,\n serviceName: string\n): void {\n // File-safe queue id for path (keep dashes for readability, e.g. demo-data-processor)\n const queueFileId = queue.id.replace(/[^a-zA-Z0-9-]/g, '-').replace(/-+/g, '-');\n const queuesDir = path.join(outputDir, 'handlers', 'queues');\n const handlerFile = path.join(queuesDir, `${queueFileId}.js`);\n const tempEntryFile = handlerFile.replace('.js', '.temp.ts');\n const handlerDir = path.dirname(handlerFile);\n\n if (!fs.existsSync(handlerDir)) {\n fs.mkdirSync(handlerDir, { recursive: true });\n }\n\n const firstWorkerId = queue.steps[0]?.workerId;\n if (!firstWorkerId) return;\n\n const handlerContent = `/**\n * Auto-generated queue handler for queue \"${queue.id}\"\n * DO NOT EDIT - This file is generated by @microfox/ai-worker-cli\n * Invoked by schedule (if configured) or HTTP POST /queues/${queue.id}/start (dispatch proxy).\n */\n\nimport { SQSClient, GetQueueUrlCommand, SendMessageCommand } from '@aws-sdk/client-sqs';\nimport { upsertInitialQueueJob } from '@microfox/ai-worker/queueJobStore';\n\nconst QUEUE_ID = ${JSON.stringify(queue.id)};\nconst FIRST_WORKER_ID = ${JSON.stringify(firstWorkerId)};\nconst SERVICE_NAME = ${JSON.stringify(serviceName)};\n\nfunction isHttpEvent(event: any): event is { body?: string; requestContext?: any } {\n return event && typeof event.requestContext === 'object' && (event.body !== undefined || event.httpMethod === 'POST');\n}\n\nexport const handler = async (event: any) => {\n const stage = process.env.ENVIRONMENT || process.env.STAGE || 'prod';\n const region = process.env.AWS_REGION || 'us-east-1';\n const queueName = \\`\\${SERVICE_NAME}-\\${FIRST_WORKER_ID}-\\${stage}\\`;\n\n let jobId: string;\n let initialInput: Record<string, any>;\n let context: Record<string, any> = {};\n let metadata: Record<string, any> = {};\n let webhookUrl: string | undefined;\n\n if (isHttpEvent(event)) {\n const apiKey = process.env.WORKERS_TRIGGER_API_KEY;\n if (apiKey) {\n const provided = (event.headers && (event.headers['x-workers-trigger-key'] || event.headers['X-Workers-Trigger-Key'])) || '';\n if (provided !== apiKey) {\n return { statusCode: 401, headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ error: 'Unauthorized' }) };\n }\n }\n let body: { input?: any; initialInput?: any; jobId?: string; metadata?: any; context?: any; webhookUrl?: string } = {};\n if (event.body) {\n try {\n body = typeof event.body === 'string' ? JSON.parse(event.body) : event.body;\n } catch (_) {}\n }\n jobId = (body.jobId && String(body.jobId).trim()) || 'job-' + Date.now() + '-' + Math.random().toString(36).slice(2, 11);\n const rawInput = body.input != null ? body.input : body.initialInput;\n initialInput = rawInput != null && typeof rawInput === 'object' ? rawInput : {};\n context = body.context && typeof body.context === 'object' ? body.context : {};\n metadata = body.metadata && typeof body.metadata === 'object' ? body.metadata : {};\n webhookUrl = typeof body.webhookUrl === 'string' ? body.webhookUrl : undefined;\n\n const response = { statusCode: 200, headers: { 'Content-Type': 'application/json' }, body: '' };\n try {\n await upsertInitialQueueJob({ queueJobId: jobId, queueId: QUEUE_ID, firstWorkerId: FIRST_WORKER_ID, firstWorkerJobId: jobId, metadata });\n await sendFirstMessage(region, queueName, jobId, initialInput, context, metadata, webhookUrl, 'http');\n response.body = JSON.stringify({ queueId: QUEUE_ID, jobId, status: 'queued' });\n } catch (err: any) {\n response.statusCode = 500;\n response.body = JSON.stringify({ error: err?.message || String(err) });\n }\n return response;\n }\n\n // Scheduled invocation\n jobId = 'job-' + Date.now() + '-' + Math.random().toString(36).slice(2, 11);\n initialInput = {};\n try {\n await upsertInitialQueueJob({ queueJobId: jobId, queueId: QUEUE_ID, firstWorkerId: FIRST_WORKER_ID, firstWorkerJobId: jobId, metadata: {} });\n } catch (_) {}\n await sendFirstMessage(region, queueName, jobId, initialInput, context, metadata, webhookUrl, 'schedule');\n};\n\nasync function sendFirstMessage(\n region: string,\n queueName: string,\n jobId: string,\n initialInput: Record<string, any>,\n context: Record<string, any>,\n metadata: Record<string, any>,\n webhookUrl?: string,\n trigger?: 'schedule' | 'http'\n) {\n const sqs = new SQSClient({ region });\n const { QueueUrl } = await sqs.send(new GetQueueUrlCommand({ QueueName: queueName }));\n if (!QueueUrl) {\n throw new Error('Queue URL not found: ' + queueName);\n }\n\n const queueContext = { id: QUEUE_ID, stepIndex: 0, initialInput, queueJobId: jobId };\n const messageBody = {\n workerId: FIRST_WORKER_ID,\n jobId,\n input: { ...initialInput, __workerQueue: queueContext },\n context,\n metadata: { ...metadata, __workerQueue: queueContext },\n ...(webhookUrl ? { webhookUrl } : {}),\n timestamp: new Date().toISOString(),\n };\n\n await sqs.send(new SendMessageCommand({\n QueueUrl,\n MessageBody: JSON.stringify(messageBody),\n }));\n\n console.log('[queue] Dispatched first worker', { queueId: QUEUE_ID, jobId, workerId: FIRST_WORKER_ID, trigger: trigger ?? 'unknown' });\n}\n`;\n\n fs.writeFileSync(tempEntryFile, handlerContent);\n esbuild.buildSync({\n entryPoints: [tempEntryFile],\n bundle: true,\n platform: 'node',\n target: 'node20',\n outfile: handlerFile,\n external: ['aws-sdk', 'canvas', '@microfox/puppeteer-sls', '@sparticuz/chromium'],\n packages: 'bundle',\n logLevel: 'error',\n });\n fs.unlinkSync(tempEntryFile);\n console.log(chalk.green(`✓ Generated queue handler for ${queue.id}`));\n}\n\n/**\n * Generates workers-config Lambda handler.\n */\nfunction generateWorkersConfigHandler(\n outputDir: string,\n workers: WorkerInfo[],\n serviceName: string,\n queues: QueueInfo[] = []\n): void {\n // We'll bundle this one too\n const apiDir = path.join(outputDir, 'handlers', 'api');\n const handlerFile = path.join(apiDir, 'workers-config.js');\n const tempEntryFile = handlerFile.replace('.js', '.temp.ts');\n const handlerDir = path.dirname(handlerFile);\n\n if (!fs.existsSync(handlerDir)) {\n fs.mkdirSync(handlerDir, { recursive: true });\n }\n\n const handlerContent = `/**\n * Auto-generated workers-config Lambda handler\n * DO NOT EDIT - This file is generated by @microfox/ai-worker-cli\n */\n\nimport { APIGatewayProxyEvent, APIGatewayProxyResult } from 'aws-lambda';\nimport { SQSClient, GetQueueUrlCommand } from '@aws-sdk/client-sqs';\n\n// Worker IDs and queue definitions embedded at build time.\nconst WORKER_IDS: string[] = ${JSON.stringify(workers.map(w => w.id), null, 2)};\nconst QUEUES = ${JSON.stringify(queues.map(q => ({ id: q.id, steps: q.steps, schedule: q.schedule })), null, 2)};\nconst SERVICE_NAME = ${JSON.stringify(serviceName)};\n\nexport const handler = async (\n event: APIGatewayProxyEvent\n): Promise<APIGatewayProxyResult> => {\n// ... same logic ...\n // Check API key if configured\n const apiKey = process.env.WORKERS_CONFIG_API_KEY;\n if (apiKey) {\n const providedKey = event.headers['x-workers-config-key'] || event.headers['X-Workers-Config-Key'];\n if (providedKey !== apiKey) {\n return {\n statusCode: 401,\n headers: { 'Content-Type': 'application/json' },\n body: JSON.stringify({ error: 'Unauthorized' }),\n };\n }\n }\n\n // Stage resolution:\n // - Prefer API Gateway stage (microfox tends to deploy APIs on \"prod\")\n // - Fallback to ENVIRONMENT/STAGE env vars\n // - Default to \"prod\" (safer for microfox) if nothing else is set\n const stage =\n (event as any)?.requestContext?.stage ||\n process.env.ENVIRONMENT ||\n process.env.STAGE ||\n 'prod';\n const region = process.env.AWS_REGION || 'us-east-1';\n\n // Resolve queue URLs dynamically via SQS so we return actual URLs.\n // NOTE: Node 20 Lambda runtime does NOT guarantee 'aws-sdk' v2 is available.\n // We use AWS SDK v3 and bundle it into this handler.\n const sqs = new SQSClient({ region });\n const workers: Record<string, { queueUrl: string; region: string }> = {};\n const attemptedQueueNames: string[] = [];\n const errors: Array<{ workerId: string; queueName: string; message: string; name?: string }> = [];\n const debug = event.queryStringParameters?.debug === '1' || event.queryStringParameters?.debug === 'true';\n\n await Promise.all(\n WORKER_IDS.map(async (workerId) => {\n const queueName = \\`\\${SERVICE_NAME}-\\${workerId}-\\${stage}\\`;\n attemptedQueueNames.push(queueName);\n try {\n const result = await sqs.send(new GetQueueUrlCommand({ QueueName: queueName }));\n if (result?.QueueUrl) {\n workers[workerId] = { queueUrl: String(result.QueueUrl), region };\n }\n } catch (e) {\n const err = e as any;\n const message = String(err?.message || err || 'Unknown error');\n const name = err?.name ? String(err.name) : undefined;\n // Log so CloudWatch shows what's going on (nonexistent queue vs permission vs region).\n console.error('[workers-config] getQueueUrl failed', { workerId, queueName, name, message });\n errors.push({ workerId, queueName, name, message });\n }\n })\n );\n\n return {\n statusCode: 200,\n headers: {\n 'Content-Type': 'application/json',\n 'Access-Control-Allow-Origin': '*',\n },\n body: JSON.stringify({\n version: '1.0.0',\n stage,\n region,\n workers,\n queues: QUEUES,\n ...(debug ? { attemptedQueueNames, errors } : {}),\n }),\n };\n};\n`;\n\n fs.writeFileSync(tempEntryFile, handlerContent);\n\n // Bundle it\n esbuild.buildSync({\n entryPoints: [tempEntryFile],\n bundle: true,\n platform: 'node',\n target: 'node20',\n outfile: handlerFile,\n external: [\n 'aws-sdk',\n 'canvas',\n '@microfox/puppeteer-sls',\n \"@sparticuz/chromium\"\n ],\n define: {\n 'process.env.UNLAZY': '\"true\"',\n },\n packages: 'bundle'\n });\n\n fs.unlinkSync(tempEntryFile);\n console.log(chalk.green(`✓ Generated workers-config handler`));\n}\n\n/**\n * Reads environment variables from .env file.\n */\nfunction loadEnvVars(envPath: string = '.env'): Record<string, string> {\n const env: Record<string, string> = {};\n\n if (!fs.existsSync(envPath)) {\n console.warn(chalk.yellow(`⚠️ .env file not found at ${envPath}`));\n return env;\n }\n\n const content = fs.readFileSync(envPath, 'utf-8');\n const lines = content.split('\\n');\n\n for (const line of lines) {\n const trimmed = line.trim();\n if (!trimmed || trimmed.startsWith('#')) continue;\n\n const match = trimmed.match(/^([^=]+)=(.*)$/);\n if (match) {\n const key = match[1].trim();\n const value = match[2].trim().replace(/^[\"']|[\"']$/g, '');\n env[key] = value;\n }\n }\n\n return env;\n}\n\n/**\n * Converts schedule configuration to serverless.yml schedule event format.\n * Supports simple strings, configuration objects, and arrays of both.\n */\nfunction processScheduleEvents(scheduleConfig: any): any[] {\n if (!scheduleConfig) {\n return [];\n }\n\n const events: any[] = [];\n\n // Normalize to array\n const schedules = Array.isArray(scheduleConfig) ? scheduleConfig : [scheduleConfig];\n\n for (const schedule of schedules) {\n // Simple string format: 'rate(2 hours)' or 'cron(0 12 * * ? *)'\n if (typeof schedule === 'string') {\n events.push({\n schedule: schedule,\n });\n continue;\n }\n\n // Full configuration object\n if (typeof schedule === 'object' && schedule !== null) {\n const scheduleEvent: any = { schedule: {} };\n\n // Handle rate - can be string or array of strings\n if (schedule.rate) {\n if (Array.isArray(schedule.rate)) {\n // Multiple rate expressions\n scheduleEvent.schedule.rate = schedule.rate;\n } else {\n // Single rate expression\n scheduleEvent.schedule.rate = schedule.rate;\n }\n } else {\n // If no rate specified but we have a schedule object, skip it\n continue;\n }\n\n // Optional fields\n if (schedule.enabled !== undefined) {\n scheduleEvent.schedule.enabled = schedule.enabled;\n }\n if (schedule.input !== undefined) {\n scheduleEvent.schedule.input = schedule.input;\n }\n if (schedule.inputPath !== undefined) {\n scheduleEvent.schedule.inputPath = schedule.inputPath;\n }\n if (schedule.inputTransformer !== undefined) {\n scheduleEvent.schedule.inputTransformer = schedule.inputTransformer;\n }\n if (schedule.name !== undefined) {\n scheduleEvent.schedule.name = schedule.name;\n }\n if (schedule.description !== undefined) {\n scheduleEvent.schedule.description = schedule.description;\n }\n if (schedule.method !== undefined) {\n scheduleEvent.schedule.method = schedule.method;\n }\n if (schedule.timezone !== undefined) {\n scheduleEvent.schedule.timezone = schedule.timezone;\n }\n\n // If schedule object only has rate (or is minimal), we can simplify it\n // Serverless Framework accepts both { schedule: 'rate(...)' } and { schedule: { rate: 'rate(...)' } }\n if (Object.keys(scheduleEvent.schedule).length === 1 && scheduleEvent.schedule.rate) {\n // Simplify to string format if it's just a single rate\n if (typeof scheduleEvent.schedule.rate === 'string') {\n events.push({\n schedule: scheduleEvent.schedule.rate,\n });\n } else {\n // Keep object format for arrays\n events.push(scheduleEvent);\n }\n } else {\n events.push(scheduleEvent);\n }\n }\n }\n\n return events;\n}\n\n/**\n * Generates serverless.yml configuration.\n */\nfunction generateServerlessConfig(\n workers: WorkerInfo[],\n stage: string,\n region: string,\n envVars: Record<string, string>,\n serviceName: string,\n calleeIds: Map<string, Set<string>> = new Map(),\n queues: QueueInfo[] = []\n): ServerlessConfig {\n // Create SQS queues for each worker\n const resources: ServerlessConfig['resources'] = {\n Resources: {},\n Outputs: {},\n };\n\n const queueArns: Array<string | Record<string, any>> = [];\n\n // Update provider environment to use file(env.json)\n const providerEnvironment: any = {\n STAGE: stage,\n NODE_ENV: stage,\n };\n\n // Custom configuration including serverless-offline\n const customConfig: Record<string, any> = {\n stage: `\\${env:ENVIRONMENT, '${stage}'}`,\n 'serverless-offline': {\n httpPort: 4000,\n lambdaPort: 4002,\n useChildProcesses: true,\n useWorkerThreads: true,\n noCookieValidation: true,\n allowCache: true,\n hideStackTraces: false,\n disableCookieValidation: true,\n noTimeout: true,\n environment: '\\${file(env.json)}',\n }\n };\n\n for (const worker of workers) {\n const queueName = `WorkerQueue${worker.id.replace(/[^a-zA-Z0-9]/g, '')}`;\n const queueLogicalId = `${queueName}${stage}`;\n const dlqLogicalId = `${queueName}DLQ${stage}`;\n\n const sqsCfg = worker.workerConfig?.sqs;\n const retention =\n typeof sqsCfg?.messageRetentionPeriod === 'number'\n ? sqsCfg.messageRetentionPeriod\n : 1209600; // 14 days\n const dlqRetention =\n typeof sqsCfg?.deadLetterMessageRetentionPeriod === 'number'\n ? sqsCfg.deadLetterMessageRetentionPeriod\n : retention;\n const visibilityTimeout =\n typeof sqsCfg?.visibilityTimeout === 'number'\n ? sqsCfg.visibilityTimeout\n : (worker.workerConfig?.timeout || 300) + 60; // Add buffer\n const maxReceiveCountRaw =\n typeof sqsCfg?.maxReceiveCount === 'number' ? sqsCfg.maxReceiveCount : 1;\n // SQS does not support 0; treat <=0 as 1.\n const maxReceiveCount = Math.max(1, Math.floor(maxReceiveCountRaw));\n\n // DLQ (always create so we can support \"no retries\" mode safely)\n resources.Resources[dlqLogicalId] = {\n Type: 'AWS::SQS::Queue',\n Properties: {\n QueueName: `\\${self:service}-${worker.id}-dlq-\\${opt:stage, env:ENVIRONMENT, '${stage}'}`,\n MessageRetentionPeriod: dlqRetention,\n },\n };\n\n resources.Resources[queueLogicalId] = {\n Type: 'AWS::SQS::Queue',\n Properties: {\n // Use ${self:service} to avoid hardcoding service name\n QueueName: `\\${self:service}-${worker.id}-\\${opt:stage, env:ENVIRONMENT, '${stage}'}`,\n VisibilityTimeout: visibilityTimeout,\n MessageRetentionPeriod: retention,\n RedrivePolicy: {\n deadLetterTargetArn: { 'Fn::GetAtt': [dlqLogicalId, 'Arn'] },\n maxReceiveCount,\n },\n },\n };\n\n resources.Outputs[`${queueLogicalId}Url`] = {\n Description: `Queue URL for worker ${worker.id}`,\n Value: { Ref: queueLogicalId },\n Export: {\n Name: `\\${self:service}-${worker.id}-queue-url`,\n },\n };\n\n queueArns.push({ 'Fn::GetAtt': [queueLogicalId, 'Arn'] });\n }\n\n // Create functions for each worker\n const functions: Record<string, any> = {};\n\n for (const worker of workers) {\n const functionName = toPrefixedCamel('worker', worker.id);\n\n // Start with SQS event (default)\n const events: any[] = [\n {\n sqs: {\n arn: { 'Fn::GetAtt': [`WorkerQueue${worker.id.replace(/[^a-zA-Z0-9]/g, '')}${stage}`, 'Arn'] },\n batchSize: 1,\n },\n },\n ];\n\n // Add schedule events if configured\n if (worker.workerConfig?.schedule) {\n const scheduleEvents = processScheduleEvents(worker.workerConfig.schedule);\n events.push(...scheduleEvents);\n }\n\n functions[functionName] = {\n // IMPORTANT: Keep AWS handler string to exactly one dot: \"<modulePath>.handler\"\n handler: `${worker.handlerPath}.handler`,\n timeout: worker.workerConfig?.timeout || 300,\n memorySize: worker.workerConfig?.memorySize || 512,\n events,\n };\n\n if (worker.workerConfig?.layers?.length) {\n functions[functionName].layers = worker.workerConfig.layers;\n }\n\n // Per-function env: queue URLs for workers this Lambda calls (ctx.dispatchWorker)\n const callees = calleeIds.get(worker.id);\n if (callees && callees.size > 0) {\n const env: Record<string, any> = {};\n for (const calleeId of callees) {\n const calleeWorker = workers.find((w) => w.id === calleeId);\n if (calleeWorker) {\n const queueLogicalId = `WorkerQueue${calleeWorker.id.replace(/[^a-zA-Z0-9]/g, '')}${stage}`;\n const envKey = `WORKER_QUEUE_URL_${sanitizeWorkerIdForEnv(calleeId)}`;\n env[envKey] = { Ref: queueLogicalId };\n }\n }\n if (Object.keys(env).length > 0) {\n functions[functionName].environment = env;\n }\n }\n }\n\n // Add docs.json function for Microfox compatibility\n functions['getDocs'] = {\n handler: 'handlers/api/docs.handler',\n events: [\n {\n http: {\n path: '/docs.json',\n method: 'GET',\n cors: true,\n },\n },\n ],\n };\n\n // Add workers trigger endpoint (HTTP -> SQS SendMessage)\n functions['triggerWorker'] = {\n handler: 'handlers/api/workers-trigger.handler',\n events: [\n {\n http: {\n path: '/workers/trigger',\n method: 'POST',\n cors: true,\n },\n },\n ],\n };\n\n // Add workers-config function\n functions['workersConfig'] = {\n handler: 'handlers/api/workers-config.handler',\n events: [\n {\n http: {\n path: 'workers/config',\n method: 'GET',\n cors: true,\n },\n },\n ],\n };\n\n // One function per queue: HTTP POST /queues/:queueId/start (dispatch proxy) + optional schedule\n for (const queue of queues) {\n const queueFileId = queue.id.replace(/[^a-zA-Z0-9-]/g, '-').replace(/-+/g, '-');\n const fnName = toPrefixedCamel('queue', queue.id);\n const events: any[] = [\n {\n http: {\n path: `queues/${queueFileId}/start`,\n method: 'POST',\n cors: true,\n },\n },\n ];\n if (queue.schedule) {\n events.push(...processScheduleEvents(queue.schedule));\n }\n functions[fnName] = {\n handler: `handlers/queues/${queueFileId}.handler`,\n timeout: 60,\n memorySize: 128,\n events,\n };\n }\n\n // Filter env vars - only include safe ones (exclude secrets that should be in AWS Secrets Manager)\n const safeEnvVars: Record<string, string> = {};\n const allowedPrefixes = ['OPENAI_', 'ANTHROPIC_', 'DATABASE_', 'MONGODB_', 'REDIS_', 'UPSTASH_', 'WORKER_', 'WORKERS_', 'WORKFLOW_', 'REMOTION_', 'QUEUE_JOB_', 'DEBUG_WORKER_QUEUES'];\n\n // AWS_ prefix is reserved by Lambda, do not include it in environment variables\n // https://docs.aws.amazon.com/lambda/latest/dg/configuration-envvars.html\n\n for (const [key, value] of Object.entries(envVars)) {\n if (allowedPrefixes.some(prefix => key.startsWith(prefix))) {\n safeEnvVars[key] = value;\n }\n }\n\n // Add ApiEndpoints output for Microfox\n resources.Outputs['ApiEndpoints'] = {\n Description: \"API Endpoints\",\n Value: {\n \"Fn::Join\": [\n \"\",\n [\n \"API: https://\",\n { \"Ref\": \"ApiGatewayRestApi\" },\n \".execute-api.\",\n { \"Ref\": \"AWS::Region\" },\n `.amazonaws.com/\\${env:ENVIRONMENT, '${stage}'}`\n ]\n ]\n }\n };\n\n return {\n service: serviceName,\n package: {\n excludeDevDependencies: true,\n individually: true,\n // Handlers are fully bundled by esbuild (packages: 'bundle'); exclude node_modules to stay under Lambda 250 MB limit\n patterns: [\n '!venv/**',\n '!.idea/**',\n '!.vscode/**',\n '!src/**',\n '!node_modules/**',\n '!node_modules/serverless-offline/**',\n '!node_modules/typescript/**',\n '!node_modules/@types/**',\n '!node_modules/aws-sdk/**',\n '!node_modules/@aws-sdk/**'\n ],\n },\n custom: customConfig,\n provider: {\n name: 'aws',\n runtime: 'nodejs20.x',\n region,\n versionFunctions: false,\n // Use ENVIRONMENT from env.json to drive the actual deployed stage (Microfox defaults to prod).\n stage: `\\${env:ENVIRONMENT, '${stage}'}`,\n environment: '\\${file(env.json)}',\n iam: {\n role: {\n statements: [\n {\n Effect: 'Allow',\n Action: [\n 'sqs:SendMessage',\n 'sqs:ReceiveMessage',\n 'sqs:DeleteMessage',\n 'sqs:GetQueueAttributes',\n ],\n Resource: queueArns,\n },\n {\n Effect: 'Allow',\n Action: ['sqs:GetQueueUrl'],\n // GetQueueUrl is not resource-scoped for unknown queue ARNs, must be '*'\n Resource: '*',\n }\n ],\n },\n },\n },\n plugins: ['serverless-offline'],\n functions,\n resources,\n };\n}\n\n/**\n * Resolves queue URLs after deployment and generates workers-map.generated.ts\n */\nasync function generateWorkersMap(\n stage: string,\n region: string,\n outputDir: string\n): Promise<void> {\n const serverlessDir = path.join(outputDir, '.serverless');\n if (!fs.existsSync(serverlessDir)) {\n fs.mkdirSync(serverlessDir, { recursive: true });\n }\n\n // Need to scan workers again to get IDs for map generation\n // Or we could save this metadata in the build step.\n // For now, re-scanning is fine.\n const workers = await scanWorkers();\n\n // Try to read CloudFormation outputs\n const stackName = `ai-router-workers-${stage}-${stage}`;\n let queueUrls: Record<string, { queueUrl: string; region: string }> = {};\n\n const spinner = ora('Fetching CloudFormation outputs...').start();\n\n try {\n // Use AWS CLI to get stack outputs\n const output = execSync(\n `aws cloudformation describe-stacks --stack-name ${stackName} --region ${region} --query \"Stacks[0].Outputs\" --output json`,\n { encoding: 'utf-8', stdio: 'pipe' }\n );\n\n const outputs = JSON.parse(output);\n const outputMap: Record<string, string> = {};\n\n for (const output of outputs) {\n const key = output.OutputKey;\n if (key && key.endsWith('Url')) {\n const workerId = key.replace('WorkerQueue', '').replace('Url', '').toLowerCase();\n // The workerId from CF output might have stripped characters, need fuzzy match or consistent naming\n // Currently we use replace(/[^a-zA-Z0-9]/g, '') in CF output name\n outputMap[key] = output.OutputValue;\n }\n }\n\n // Match workers to queue URLs\n for (const worker of workers) {\n const sanitizedId = worker.id.replace(/[^a-zA-Z0-9]/g, '');\n const queueKey = `WorkerQueue${sanitizedId}${stage}Url`;\n\n // Look for key ending with this pattern to handle casing issues if any\n const matchingKey = Object.keys(outputMap).find(k => k.toLowerCase() === queueKey.toLowerCase());\n\n if (matchingKey && outputMap[matchingKey]) {\n queueUrls[worker.id] = {\n queueUrl: outputMap[matchingKey],\n region,\n };\n }\n }\n spinner.succeed('Fetched CloudFormation outputs');\n } catch (error) {\n spinner.warn('Could not fetch CloudFormation outputs. Using deterministic queue URLs.');\n for (const worker of workers) {\n queueUrls[worker.id] = {\n queueUrl: `https://sqs.${'${aws:region}'}.amazonaws.com/${'${aws:accountId}'}/${'${self:service}'}-${worker.id}-${stage}`,\n region,\n };\n }\n }\n\n // Generate TypeScript file\n const mapContent = `/**\n * Auto-generated workers map\n * DO NOT EDIT - This file is generated by deploy-workers script\n */\n\nexport const workersMap = ${JSON.stringify(queueUrls, null, 2)} as const;\n`;\n\n const mapFile = path.join(serverlessDir, 'workers-map.generated.ts');\n fs.writeFileSync(mapFile, mapContent);\n console.log(chalk.green(`✓ Generated workers map: ${mapFile}`));\n}\n\nasync function build(args: any) {\n const stage = args.stage || process.env.STAGE || 'prod';\n const region = args.region || process.env.AWS_REGION || 'us-east-1';\n const aiPath = args['ai-path'] || 'app/ai';\n\n console.log(chalk.blue(`📦 Building workers (stage: ${stage}, region: ${region})...`));\n\n const spinner = ora('Scanning workers...').start();\n const workers = await scanWorkers(aiPath);\n\n if (workers.length === 0) {\n spinner.warn('No workers found.');\n return;\n }\n spinner.succeed(`Found ${workers.length} worker(s)`);\n workers.forEach(w => console.log(chalk.gray(` - ${w.id} (${w.filePath})`)));\n\n const serverlessDir = path.join(process.cwd(), '.serverless-workers');\n if (!fs.existsSync(serverlessDir)) {\n fs.mkdirSync(serverlessDir, { recursive: true });\n }\n\n // Build an accurate dependencies map for Microfox installs:\n // include any npm packages imported by the worker entrypoints (and their local imports),\n // plus runtime packages used by generated handlers.\n // Job store backend is conditional on WORKER_DATABASE_TYPE; include only that backend (+ mongodb if user code uses it).\n const runtimeDeps = await collectRuntimeDependenciesForWorkers(\n workers.map((w) => w.filePath),\n process.cwd()\n );\n const jobStoreType = getJobStoreType();\n const filteredDeps = filterDepsForJobStore(runtimeDeps, jobStoreType);\n const dependencies = buildDependenciesMap(process.cwd(), filteredDeps);\n\n // Generate package.json for the serverless service (used by Microfox push)\n const packageJson = {\n name: 'ai-router-workers',\n version: '1.0.0',\n description: 'Auto-generated serverless workers',\n private: true,\n dependencies,\n scripts: {\n build: \"echo 'Already compiled.'\",\n },\n devDependencies: {\n serverless: '^3.38.0',\n 'serverless-offline': '^13.3.3',\n '@aws-sdk/client-sqs': '^3.700.0',\n },\n };\n fs.writeFileSync(\n path.join(serverlessDir, 'package.json'),\n JSON.stringify(packageJson, null, 2)\n );\n\n // No tsconfig.json needed as we are deploying bundled JS\n\n const envVars = loadEnvVars();\n\n // Detect env usage from worker entry files + their local dependency graph.\n // We use this to populate env.json with only envs that are actually referenced,\n // but ONLY if they exist in .env (we don't invent values).\n const workerEntryFiles = workers.map((w) => w.filePath);\n const { runtimeKeys: runtimeEnvKeys, buildtimeKeys: buildtimeEnvKeys } =\n await collectEnvUsageForWorkers(workerEntryFiles, process.cwd());\n const referencedEnvKeys = new Set<string>([\n ...Array.from(runtimeEnvKeys),\n ...Array.from(buildtimeEnvKeys),\n ]);\n\n // Light, helpful logging (avoid noisy huge dumps)\n const runtimeList = Array.from(runtimeEnvKeys).sort();\n const buildtimeList = Array.from(buildtimeEnvKeys).sort();\n const missingFromDotEnv = Array.from(referencedEnvKeys)\n .filter((k) => !(k in envVars))\n .sort();\n if (runtimeList.length || buildtimeList.length) {\n console.log(\n chalk.blue(\n `ℹ️ Detected env usage from worker code: runtime=${runtimeList.length}, buildtime=${buildtimeList.length}`\n )\n );\n if (missingFromDotEnv.length > 0) {\n console.log(\n chalk.yellow(\n `⚠️ These referenced envs were not found in .env (so they will NOT be written to env.json): ${missingFromDotEnv\n .slice(0, 25)\n .join(', ')}${missingFromDotEnv.length > 25 ? ' ...' : ''}`\n )\n );\n }\n }\n\n let serviceName = (args['service-name'] as string | undefined)?.trim() || `ai-router-workers-${stage}`;\n\n // Check for microfox.json to customize service name\n const microfoxJsonPath = path.join(process.cwd(), 'microfox.json');\n if (fs.existsSync(microfoxJsonPath)) {\n try {\n const microfoxConfig = JSON.parse(fs.readFileSync(microfoxJsonPath, 'utf-8'));\n if (microfoxConfig.projectId) {\n // Only override if user did not explicitly provide a service name\n if (!(args['service-name'] as string | undefined)?.trim()) {\n serviceName = getServiceNameFromProjectId(microfoxConfig.projectId);\n }\n console.log(chalk.blue(`ℹ️ Using service name from microfox.json: ${serviceName}`));\n }\n } catch (error) {\n console.warn(chalk.yellow('⚠️ Failed to parse microfox.json, using default service name'));\n }\n }\n\n const queues = await scanQueues(aiPath);\n if (queues.length > 0) {\n console.log(chalk.blue(`ℹ️ Found ${queues.length} queue(s): ${queues.map((q) => q.id).join(', ')}`));\n generateQueueRegistry(queues, serverlessDir, process.cwd());\n }\n\n ora('Generating handlers...').start().succeed('Generated handlers');\n await generateHandlers(workers, serverlessDir, queues);\n\n // Now import the bundled handlers to extract workerConfig\n const extractSpinner = ora('Extracting worker configs from bundled handlers...').start();\n for (const worker of workers) {\n try {\n const handlerFile = path.join(serverlessDir, worker.handlerPath + '.js');\n if (fs.existsSync(handlerFile)) {\n // Convert absolute path to file:// URL for ESM import (required on Windows)\n const handlerUrl = pathToFileURL(path.resolve(handlerFile)).href;\n\n try {\n // Import the bundled handler (which exports exportedWorkerConfig)\n // Note: The handler might have runtime errors, but we only need the exportedWorkerConfig\n const module = await import(handlerUrl);\n\n // exportedWorkerConfig is exported directly from the handler file\n if (module.exportedWorkerConfig) {\n worker.workerConfig = module.exportedWorkerConfig;\n if (module.exportedWorkerConfig.layers?.length) {\n console.log(chalk.gray(` ✓ ${worker.id}: found ${module.exportedWorkerConfig.layers.length} layer(s)`));\n }\n } else {\n worker.workerConfig = worker.workerConfig ?? { timeout: 300, memorySize: 512 };\n console.log(chalk.gray(` ℹ ${worker.id}: using default config (exportedWorkerConfig not in bundle)`));\n }\n } catch (importError: any) {\n // If import fails due to runtime errors (e.g., lazy-cache initialization in bundled code),\n // try to extract config from source file as fallback. This is expected for some bundled handlers.\n // The fallback will work fine, and the Lambda runtime will handle the bundled code correctly.\n console.log(chalk.gray(` ℹ ${worker.id}: extracting config from source (import failed: ${importError?.message?.slice(0, 50) || 'runtime error'}...)`));\n\n // Fallback: try to read the source worker file and extract workerConfig\n try {\n const sourceContent = fs.readFileSync(worker.filePath, 'utf-8');\n // Look for exported workerConfig\n const workerConfigMatch = sourceContent.match(/export\\s+const\\s+workerConfig[^=]*=\\s*(\\{[\\s\\S]*?\\});/);\n if (workerConfigMatch) {\n // Try to parse it as JSON (after cleaning up comments)\n let configStr = workerConfigMatch[1]\n .replace(/\\/\\*[\\s\\S]*?\\*\\//g, '') // Remove block comments\n .replace(/(^|\\s)\\/\\/[^\\n]*/gm, '$1'); // Remove line comments\n\n // Use Function constructor to parse the object (safer than eval)\n const configObj = new Function('return ' + configStr)();\n if (configObj && (configObj.layers || configObj.timeout || configObj.memorySize || configObj.schedule)) {\n worker.workerConfig = configObj;\n if (configObj.layers?.length) {\n console.log(chalk.gray(` ✓ ${worker.id}: found ${configObj.layers.length} layer(s) from source file`));\n }\n if (configObj.schedule) {\n console.log(chalk.gray(` ✓ ${worker.id}: found schedule configuration`));\n }\n }\n }\n } catch (fallbackError) {\n // If fallback also fails, apply defaults\n worker.workerConfig = worker.workerConfig ?? { timeout: 300, memorySize: 512 };\n console.log(chalk.gray(` ℹ ${worker.id}: using default config (fallback extraction failed)`));\n }\n }\n } else {\n worker.workerConfig = worker.workerConfig ?? { timeout: 300, memorySize: 512 };\n console.warn(chalk.yellow(` ⚠ ${worker.id}: handler file not found: ${handlerFile}, using defaults`));\n }\n // Ensure every worker has a config (defaults if still missing)\n if (!worker.workerConfig) {\n worker.workerConfig = { timeout: 300, memorySize: 512 };\n console.log(chalk.gray(` ℹ ${worker.id}: using default config`));\n }\n } catch (error: any) {\n worker.workerConfig = worker.workerConfig ?? { timeout: 300, memorySize: 512 };\n console.warn(chalk.yellow(` ⚠ ${worker.id}: failed to extract config: ${error?.message || error}, using defaults`));\n }\n }\n extractSpinner.succeed('Extracted configs');\n\n generateWorkersConfigHandler(serverlessDir, workers, serviceName, queues);\n generateDocsHandler(serverlessDir, serviceName, stage, region);\n generateTriggerHandler(serverlessDir, serviceName);\n\n for (const queue of queues) {\n generateQueueHandler(serverlessDir, queue, serviceName);\n }\n\n let calleeIds = await collectCalleeWorkerIds(workers, process.cwd());\n calleeIds = mergeQueueCallees(calleeIds, queues, workers);\n const config = generateServerlessConfig(workers, stage, region, envVars, serviceName, calleeIds, queues);\n\n // Always generate env.json now as serverless.yml relies on it.\n // Microfox deploys APIs on prod by default; when microfox.json exists, default ENVIRONMENT/STAGE to \"prod\".\n const envStage = fs.existsSync(microfoxJsonPath) ? 'prod' : stage;\n const safeEnvVars: Record<string, string> = {\n ENVIRONMENT: envStage,\n STAGE: envStage,\n NODE_ENV: envStage,\n };\n const allowedPrefixes = ['OPENAI_', 'ANTHROPIC_', 'DATABASE_', 'MONGODB_', 'REDIS_', 'UPSTASH_', 'WORKER_', 'WORKERS_', 'WORKFLOW_', 'REMOTION_', 'QUEUE_JOB_', 'DEBUG_WORKER_QUEUES'];\n\n for (const [key, value] of Object.entries(envVars)) {\n // AWS_ prefix is reserved by Lambda, do not include it in environment variables\n // https://docs.aws.amazon.com/lambda/latest/dg/configuration-envvars.html\n if (key.startsWith('AWS_')) continue;\n\n // Keep legacy behavior for known-safe prefixes,\n // and also include any env that is referenced by worker code.\n if (allowedPrefixes.some((prefix) => key.startsWith(prefix)) || referencedEnvKeys.has(key)) {\n safeEnvVars[key] = value;\n }\n }\n\n fs.writeFileSync(\n path.join(serverlessDir, 'env.json'),\n JSON.stringify(safeEnvVars, null, 2)\n );\n\n const yamlContent = yaml.dump(config, { indent: 2 });\n const yamlPath = path.join(serverlessDir, 'serverless.yml');\n fs.writeFileSync(yamlPath, yamlContent);\n console.log(chalk.green(`✓ Generated serverless.yml: ${yamlPath}`));\n}\n\nasync function deploy(args: any) {\n const stage = args.stage || process.env.STAGE || 'prod';\n const region = args.region || process.env.AWS_REGION || 'us-east-1';\n // Commander passes option names as camelCase (e.g. skipDeploy, skipInstall)\n const skipDeploy = args.skipDeploy ?? args['skip-deploy'] ?? false;\n const skipInstall = args.skipInstall ?? args['skip-install'] ?? false;\n\n if (skipDeploy) {\n console.log(chalk.yellow('⏭️ Skipping deployment (--skip-deploy flag)'));\n return;\n }\n\n const serverlessDir = path.join(process.cwd(), '.serverless-workers');\n const yamlPath = path.join(serverlessDir, 'serverless.yml');\n\n if (!fs.existsSync(yamlPath)) {\n console.error(chalk.red('❌ serverless.yml not found. Run \"build\" first.'));\n process.exit(1);\n }\n\n console.log(chalk.blue(`🚀 Deploying to AWS (stage: ${stage}, region: ${region})...`));\n validateEnvironment();\n\n try {\n // Install dependencies in the serverless directory if node_modules doesn't exist\n // Skip if --skip-install is provided\n if (!skipInstall && !fs.existsSync(path.join(serverlessDir, 'node_modules'))) {\n console.log(chalk.blue('📦 Installing serverless dependencies...'));\n execSync('npm install', {\n cwd: serverlessDir,\n stdio: 'inherit'\n });\n }\n\n // Check for microfox.json in project root\n const microfoxJsonPath = path.join(process.cwd(), 'microfox.json');\n if (fs.existsSync(microfoxJsonPath)) {\n console.log(chalk.blue('ℹ️ Found microfox.json, deploying via Microfox Cloud...'));\n\n // Copy microfox.json to .serverless-workers directory\n fs.copyFileSync(microfoxJsonPath, path.join(serverlessDir, 'microfox.json'));\n\n // Load and filter environment variables\n const envVars = loadEnvVars();\n // env.json is already generated by build()\n\n execSync('npx microfox@latest push', {\n cwd: serverlessDir,\n stdio: 'inherit'\n });\n console.log(chalk.green('✓ Deployment triggered via Microfox!'));\n // We don't generate workers map for Microfox push as it handles its own routing\n return;\n }\n\n execSync('npx serverless deploy', {\n cwd: serverlessDir,\n stdio: 'inherit',\n env: {\n ...process.env,\n STAGE: stage,\n AWS_REGION: region,\n },\n });\n console.log(chalk.green('✓ Deployment complete!'));\n } catch (error) {\n console.error(chalk.red('❌ Deployment failed'));\n process.exit(1);\n }\n\n await generateWorkersMap(stage, region, serverlessDir);\n}\n\nexport const pushCommand = new Command()\n .name('push')\n .description('Build and deploy background workers to AWS')\n .option('-s, --stage <stage>', 'Deployment stage', 'prod')\n .option('-r, --region <region>', 'AWS region', 'us-east-1')\n .option('--ai-path <path>', 'Path to AI directory containing workers', 'app/ai')\n .option('--service-name <name>', 'Override serverless service name (defaults to ai-router-workers-<stage>)')\n .option('--skip-deploy', 'Skip deployment, only build', false)\n .option('--skip-install', 'Skip npm install in serverless directory', false)\n .action(async (options) => {\n await build(options);\n await deploy(options);\n });\n\n","import { Command } from 'commander';\nimport * as fs from 'fs';\nimport * as path from 'path';\nimport chalk from 'chalk';\nimport ora from 'ora';\nimport prompts from 'prompts';\n\nconst WORKER_DIR_DEFAULT = 'app/ai/workers';\nconst QUEUES_DIR_DEFAULT = 'app/ai/queues';\n\ntype ScaffoldType = 'worker' | 'queue';\n\nfunction scaffoldWorker(\n projectRoot: string,\n id: string,\n options: { dir?: string; schedule?: string; timeout?: string; memory?: string }\n): string {\n const dir = path.resolve(projectRoot, options.dir || WORKER_DIR_DEFAULT);\n if (!fs.existsSync(dir)) {\n fs.mkdirSync(dir, { recursive: true });\n }\n const fileSafeId = id.trim().replace(/[^a-zA-Z0-9_-]+/g, '-');\n const filePath = path.join(dir, `${fileSafeId}.worker.ts`);\n\n const timeout = Number(options.timeout || '300') || 300;\n const memorySize = Number(options.memory || '512') || 512;\n const scheduleLine = options.schedule\n ? ` schedule: '${options.schedule}',\\n`\n : '';\n\n const contents = `import { createWorker, type WorkerConfig } from '@microfox/ai-worker';\nimport { z } from 'zod';\nimport type { WorkerHandlerParams } from '@microfox/ai-worker/handler';\n\nconst InputSchema = z.object({\n // TODO: define input fields\n});\n\nconst OutputSchema = z.object({\n // TODO: define output fields\n});\n\ntype Input = z.infer<typeof InputSchema>;\ntype Output = z.infer<typeof OutputSchema>;\n\nexport const workerConfig: WorkerConfig = {\n timeout: ${timeout},\n memorySize: ${memorySize},\n${scheduleLine}};\n\nexport default createWorker<typeof InputSchema, Output>({\n id: '${id}',\n inputSchema: InputSchema,\n outputSchema: OutputSchema,\n async handler({ input, ctx }: WorkerHandlerParams<Input, Output>) {\n const { jobId, workerId, jobStore, dispatchWorker, logger } = ctx;\n logger.info('start', { jobId, workerId });\n\n await jobStore?.update({ status: 'running' });\n\n // TODO: implement your business logic here\n const result: Output = {} as any;\n\n await jobStore?.update({ status: 'completed', output: result });\n return result;\n },\n});\n`;\n\n fs.writeFileSync(filePath, contents, 'utf-8');\n return path.relative(projectRoot, filePath);\n}\n\nfunction scaffoldQueue(projectRoot: string, id: string, options: { dir?: string }): string {\n const dir = path.resolve(projectRoot, options.dir || QUEUES_DIR_DEFAULT);\n if (!fs.existsSync(dir)) {\n fs.mkdirSync(dir, { recursive: true });\n }\n const fileSafeId = id.trim().replace(/[^a-zA-Z0-9_-]+/g, '-');\n const filePath = path.join(dir, `${fileSafeId}.queue.ts`);\n\n const contents = `import { defineWorkerQueue } from '@microfox/ai-worker/queue';\n\n/**\n * Worker queue: ${id}\n * Steps run in sequence. Each step's output can be mapped to the next step's input.\n */\nexport default defineWorkerQueue({\n id: '${id}',\n steps: [\n { workerId: 'first-worker' },\n // Add more steps: { workerId: 'second-worker' }, { workerId: 'third-worker', delaySeconds: 10 }\n ],\n // Optional: run on a schedule (CLI will generate a queue-starter Lambda)\n // schedule: 'cron(0 3 * * ? *)',\n});\n`;\n\n fs.writeFileSync(filePath, contents, 'utf-8');\n return path.relative(projectRoot, filePath);\n}\n\nexport const newCommand = new Command()\n .name('new')\n .description('Scaffold a new worker or queue (interactive: choose type, then enter id)')\n .argument('[id]', 'Worker or queue ID (optional; will prompt if omitted)')\n .option('--type <worker|queue>', 'Scaffold type (skips interactive prompt)')\n .option('--dir <path>', 'Directory for the output file (workers: app/ai/workers, queues: app/ai/queues)', '')\n .option('--schedule <expression>', 'Optional schedule (workers only; e.g. \"cron(0 3 * * ? *)\")')\n .option('--timeout <seconds>', 'Lambda timeout in seconds (workers only)', '300')\n .option('--memory <mb>', 'Lambda memory in MB (workers only)', '512')\n .action(\n async (\n idArg: string | undefined,\n options: {\n type?: string;\n dir?: string;\n schedule?: string;\n timeout?: string;\n memory?: string;\n }\n ) => {\n const projectRoot = process.cwd();\n let type: ScaffoldType;\n let id: string;\n\n if (options.type === 'worker' || options.type === 'queue') {\n type = options.type;\n id = (idArg ?? '').trim();\n if (!id) {\n const res = await prompts({\n type: 'text',\n name: 'id',\n message: `Enter ${type} ID:`,\n validate: (v) => (v.trim() ? true : 'ID is required'),\n });\n if (typeof res.id !== 'string') {\n process.exitCode = 1;\n return;\n }\n id = res.id.trim();\n }\n } else {\n const typeRes = await prompts({\n type: 'select',\n name: 'type',\n message: 'What do you want to create?',\n choices: [\n { title: 'Worker', value: 'worker', description: 'A single background worker (.worker.ts)' },\n { title: 'Queue', value: 'queue', description: 'A multi-step worker queue (.queue.ts)' },\n ],\n });\n if (typeRes.type === undefined) {\n process.exitCode = 1;\n return;\n }\n type = typeRes.type as ScaffoldType;\n id = (idArg ?? '').trim();\n if (!id) {\n const idRes = await prompts({\n type: 'text',\n name: 'id',\n message: `Enter ${type} ID:`,\n validate: (v) => (v.trim() ? true : 'ID is required'),\n });\n if (typeof idRes.id !== 'string') {\n process.exitCode = 1;\n return;\n }\n id = idRes.id.trim();\n }\n }\n\n const spinner = ora(`Scaffolding ${type}...`).start();\n try {\n const dirOpt = options.dir ? { dir: options.dir } : {};\n if (type === 'worker') {\n const relativePath = scaffoldWorker(projectRoot, id, {\n ...dirOpt,\n schedule: options.schedule,\n timeout: options.timeout,\n memory: options.memory,\n });\n spinner.succeed(\n `Created worker: ${chalk.cyan(relativePath)}\\n` +\n `Next: run ${chalk.yellow('npx ai-worker push')} to build & deploy.`\n );\n } else {\n const relativePath = scaffoldQueue(projectRoot, id, dirOpt);\n spinner.succeed(\n `Created queue: ${chalk.cyan(relativePath)}\\n` +\n `Edit steps (workerId) to match your workers, then run ${chalk.yellow('npx ai-worker push')} to build & deploy.`\n );\n }\n } catch (error: unknown) {\n const err = error as Error;\n spinner.fail(`Failed to scaffold ${type}`);\n console.error(chalk.red(err?.stack || err?.message || String(error)));\n process.exitCode = 1;\n }\n }\n );\n","import { Command } from 'commander';\nimport * as fs from 'fs';\nimport * as path from 'path';\nimport chalk from 'chalk';\nimport ora from 'ora';\n\n// Templates embedded from examples/root\nconst TEMPLATES = {\n 'stores/jobStore.ts': `/**\n * Job store for tracking worker job status and results.\n *\n * Always uses MongoDB. Workers run on AWS Lambda and update jobs via the API;\n * in-memory storage is not shared across processes, so a persistent store is required.\n *\n * Configure via \\`microfox.config.ts\\` -> \\`workflowSettings.jobStore\\` or env:\n * - WORKER_DATABASE_TYPE: 'mongodb' | 'upstash-redis' (default: upstash-redis)\n * - DATABASE_MONGODB_URI or MONGODB_URI (required for mongodb)\n * - DATABASE_MONGODB_DB or MONGODB_DB; MONGODB_WORKER_JOBS_COLLECTION (default: worker_jobs)\n * - WORKER_UPSTASH_REDIS_* / WORKER_JOBS_TTL_SECONDS for Redis\n *\n * Job record structure:\n * {\n * jobId: string,\n * workerId: string,\n * status: 'queued' | 'running' | 'completed' | 'failed',\n * input: any,\n * output?: any,\n * error?: { message: string, stack?: string },\n * metadata?: Record<string, any>,\n * createdAt: string,\n * updatedAt: string,\n * completedAt?: string\n * }\n */\n\nexport interface InternalJobEntry {\n jobId: string;\n workerId: string;\n}\n\nexport interface JobRecord {\n jobId: string;\n workerId: string;\n status: 'queued' | 'running' | 'completed' | 'failed';\n input: any;\n output?: any;\n error?: {\n message: string;\n stack?: string;\n };\n metadata?: Record<string, any>;\n internalJobs?: InternalJobEntry[];\n createdAt: string;\n updatedAt: string;\n completedAt?: string;\n}\n\n// Storage adapter interface\ninterface JobStoreAdapter {\n setJob(jobId: string, data: Partial<JobRecord>): Promise<void>;\n getJob(jobId: string): Promise<JobRecord | null>;\n updateJob(jobId: string, data: Partial<JobRecord>): Promise<void>;\n appendInternalJob(parentJobId: string, entry: InternalJobEntry): Promise<void>;\n listJobsByWorker(workerId: string): Promise<JobRecord[]>;\n}\n\n// Job store can use MongoDB or Upstash Redis (workers run on Lambda; no in-memory fallback).\nfunction getStorageAdapter(): JobStoreAdapter {\n try {\n // Prefer workflowSettings.jobStore.type from microfox.config.ts; env fallback: WORKER_DATABASE_TYPE\n let jobStoreType: string | undefined;\n try {\n const config = require('@/microfox.config').StudioConfig as {\n workflowSettings?: { jobStore?: { type?: string } };\n };\n jobStoreType = config?.workflowSettings?.jobStore?.type;\n } catch {\n // Config missing or not resolvable; fall back to env\n }\n jobStoreType = jobStoreType || process.env.WORKER_DATABASE_TYPE || 'upstash-redis';\n const normalized = jobStoreType.toLowerCase();\n\n if (normalized === 'upstash-redis' || normalized === 'redis') {\n const { redisJobStore } = require('./redisAdapter');\n console.log('[JobStore] Ready (Upstash Redis)');\n return redisJobStore;\n }\n\n const { mongoJobStore } = require('./mongoAdapter');\n console.log('[JobStore] Ready (MongoDB)');\n return mongoJobStore;\n } catch (error: any) {\n const msg = error?.message || String(error);\n console.error('[JobStore] Job store adapter required (workers run on Lambda).', { error: msg });\n throw new Error(\n 'Job store requires a persistent backend. Set workflowSettings.jobStore.type or WORKER_DATABASE_TYPE to \"mongodb\" or \"upstash-redis\", and set the corresponding connection settings. ' +\n \\`Details: \\${msg}\\`\n );\n }\n}\n\n// Lazy-loaded storage adapter\nlet storageAdapter: JobStoreAdapter | null = null;\nfunction getAdapter(): JobStoreAdapter {\n if (!storageAdapter) {\n storageAdapter = getStorageAdapter();\n }\n return storageAdapter;\n}\n\n/**\n * Store a job record.\n */\nexport async function setJob(jobId: string, data: Partial<JobRecord>): Promise<void> {\n try {\n const adapter = getAdapter();\n await adapter.setJob(jobId, data);\n } catch (error: any) {\n console.error('[JobStore] Error setting job:', {\n jobId,\n error: error?.message || String(error),\n stack: process.env.NODE_ENV === 'development' ? error?.stack : undefined,\n });\n throw error;\n }\n}\n\n/**\n * Get a job record.\n */\nexport async function getJob(jobId: string): Promise<JobRecord | null> {\n try {\n const adapter = getAdapter();\n return await adapter.getJob(jobId);\n } catch (error: any) {\n console.error('[JobStore] Error getting job:', {\n jobId,\n error: error?.message || String(error),\n stack: process.env.NODE_ENV === 'development' ? error?.stack : undefined,\n });\n throw error;\n }\n}\n\n/**\n * Update a job record.\n */\nexport async function updateJob(jobId: string, data: Partial<JobRecord>): Promise<void> {\n try {\n const adapter = getAdapter();\n await adapter.updateJob(jobId, data);\n } catch (error: any) {\n console.error('[JobStore] Error updating job:', {\n jobId,\n updates: Object.keys(data),\n error: error?.message || String(error),\n stack: process.env.NODE_ENV === 'development' ? error?.stack : undefined,\n });\n throw error;\n }\n}\n\n/**\n * Append an internal (child) job to a parent job's internalJobs list.\n * Used when a worker dispatches another worker (ctx.dispatchWorker).\n */\nexport async function appendInternalJob(\n parentJobId: string,\n entry: InternalJobEntry\n): Promise<void> {\n try {\n const adapter = getAdapter();\n await adapter.appendInternalJob(parentJobId, entry);\n } catch (error: any) {\n console.error('[JobStore] Error appending internal job:', {\n parentJobId,\n entry,\n error: error?.message || String(error),\n stack: process.env.NODE_ENV === 'development' ? error?.stack : undefined,\n });\n throw error;\n }\n}\n\n/**\n * List jobs by worker ID.\n */\nexport async function listJobsByWorker(workerId: string): Promise<JobRecord[]> {\n try {\n const adapter = getAdapter();\n return await adapter.listJobsByWorker(workerId);\n } catch (error: any) {\n console.error('[JobStore] Error listing jobs by worker:', {\n workerId,\n error: error?.message || String(error),\n stack: process.env.NODE_ENV === 'development' ? error?.stack : undefined,\n });\n throw error;\n }\n}\n`,\n\n 'stores/mongoAdapter.ts': `/**\n * MongoDB adapter for job store.\n *\n * Provides persistent storage for worker job state using MongoDB.\n *\n * Configuration (from microfox.config.ts or env vars):\n * - workflowSettings.jobStore.mongodb.uri or DATABASE_MONGODB_URI/MONGODB_URI: MongoDB connection string\n * - workflowSettings.jobStore.mongodb.db or DATABASE_MONGODB_DB/MONGODB_DB: Database name (default: 'ai_router')\n *\n * Collection name: config -> workflowSettings.jobStore.mongodb.workerJobsCollection\n * (default: 'worker_jobs'). Env: MONGODB_WORKER_JOBS_COLLECTION then DATABASE_MONGODB_WORKER_JOBS_COLLECTION.\n */\n\nimport { MongoClient, type Db, type Collection } from 'mongodb';\nimport type { JobRecord, InternalJobEntry } from './jobStore';\n\ndeclare global {\n // eslint-disable-next-line no-var\n var __workflowMongoClientPromise: Promise<MongoClient> | undefined;\n}\n\nfunction getMongoUri(): string {\n // Try to get from config first, fallback to env vars\n let uri: string | undefined;\n try {\n const config = require('@/microfox.config').StudioConfig as {\n workflowSettings?: { jobStore?: { mongodb?: { uri?: string } } };\n };\n uri = config?.workflowSettings?.jobStore?.mongodb?.uri;\n } catch (error) {\n // Config not available, use env vars\n }\n \n uri = uri || process.env.DATABASE_MONGODB_URI || process.env.MONGODB_URI;\n \n if (!uri) {\n throw new Error(\n 'Missing MongoDB connection string. Set workflowSettings.jobStore.mongodb.uri in microfox.config.ts or environment variable DATABASE_MONGODB_URI or MONGODB_URI.'\n );\n }\n return uri;\n}\n\nfunction getMongoDbName(): string {\n // Try to get from config first, fallback to env vars\n let dbName: string | undefined;\n try {\n const config = require('@/microfox.config').StudioConfig as {\n workflowSettings?: { jobStore?: { mongodb?: { db?: string } } };\n };\n dbName = config?.workflowSettings?.jobStore?.mongodb?.db;\n } catch (error) {\n // Config not available, use env vars\n }\n \n return dbName || process.env.DATABASE_MONGODB_DB || process.env.MONGODB_DB || 'ai_router';\n}\n\nfunction getWorkerJobsCollection(): string {\n let collection: string | undefined;\n try {\n const config = require('@/microfox.config').StudioConfig as {\n workflowSettings?: { jobStore?: { mongodb?: { workerJobsCollection?: string } } };\n };\n collection = config?.workflowSettings?.jobStore?.mongodb?.workerJobsCollection;\n } catch {\n // Config not available\n }\n return (\n collection ||\n process.env.MONGODB_WORKER_JOBS_COLLECTION ||\n process.env.DATABASE_MONGODB_WORKER_JOBS_COLLECTION ||\n 'worker_jobs'\n );\n}\n\nasync function getMongoClient(): Promise<MongoClient> {\n const uri = getMongoUri();\n\n // Reuse a single client across hot reloads / lambda invocations when possible.\n if (!globalThis.__workflowMongoClientPromise) {\n const client = new MongoClient(uri, {\n // Keep defaults conservative; works on both local dev and Lambda.\n maxPoolSize: 10,\n minPoolSize: 0,\n serverSelectionTimeoutMS: 10_000,\n });\n globalThis.__workflowMongoClientPromise = client.connect();\n }\n\n return globalThis.__workflowMongoClientPromise;\n}\n\nasync function getMongoDb(): Promise<Db> {\n const client = await getMongoClient();\n return client.db(getMongoDbName());\n}\n\n/** Export for queue job store (shared MongoDB connection). */\nexport async function getWorkflowDb(): Promise<Db> {\n return getMongoDb();\n}\n\nasync function getCollection(): Promise<Collection<JobRecord & { _id: string }>> {\n const db = await getMongoDb();\n return db.collection<JobRecord & { _id: string }>(getWorkerJobsCollection());\n}\n\n/**\n * MongoDB storage adapter for job store.\n */\nexport const mongoJobStore = {\n async setJob(jobId: string, data: Partial<JobRecord>): Promise<void> {\n const now = new Date().toISOString();\n const collection = await getCollection();\n \n const existing = await collection.findOne({ _id: jobId });\n \n const record: JobRecord = {\n jobId,\n workerId: data.workerId || existing?.workerId || '',\n status: data.status || existing?.status || 'queued',\n input: data.input !== undefined ? data.input : existing?.input || {},\n output: data.output !== undefined ? data.output : existing?.output,\n error: data.error !== undefined ? data.error : existing?.error,\n metadata: { ...existing?.metadata, ...data.metadata },\n createdAt: existing?.createdAt || now,\n updatedAt: now,\n completedAt: data.completedAt || existing?.completedAt,\n };\n\n // Set completedAt if status changed to completed/failed\n if (data.status && ['completed', 'failed'].includes(data.status) && !record.completedAt) {\n record.completedAt = now;\n }\n\n await collection.updateOne(\n { _id: jobId },\n {\n $set: {\n ...record,\n _id: jobId,\n },\n },\n { upsert: true }\n );\n },\n\n async getJob(jobId: string): Promise<JobRecord | null> {\n const collection = await getCollection();\n const doc = await collection.findOne({ _id: jobId });\n \n if (!doc) {\n return null;\n }\n\n // Convert MongoDB document to JobRecord (remove _id, use jobId)\n const { _id, ...record } = doc;\n return record as JobRecord;\n },\n\n async updateJob(jobId: string, data: Partial<JobRecord>): Promise<void> {\n const collection = await getCollection();\n const existing = await collection.findOne({ _id: jobId });\n \n if (!existing) {\n throw new Error(\\`Job \\${jobId} not found\\`);\n }\n\n const now = new Date().toISOString();\n const update: any = {\n $set: {\n updatedAt: now,\n },\n };\n\n if (data.status !== undefined) {\n update.$set.status = data.status;\n if (['completed', 'failed'].includes(data.status) && !existing.completedAt) {\n update.$set.completedAt = now;\n }\n }\n if (data.output !== undefined) {\n update.$set.output = data.output;\n }\n if (data.error !== undefined) {\n update.$set.error = data.error;\n }\n if (data.metadata !== undefined) {\n update.$set.metadata = { ...existing.metadata, ...data.metadata };\n }\n\n await collection.updateOne({ _id: jobId }, update);\n },\n\n async appendInternalJob(parentJobId: string, entry: InternalJobEntry): Promise<void> {\n const collection = await getCollection();\n const now = new Date().toISOString();\n await collection.updateOne(\n { _id: parentJobId },\n {\n $push: { internalJobs: entry },\n $set: { updatedAt: now },\n }\n );\n },\n\n async listJobsByWorker(workerId: string): Promise<JobRecord[]> {\n const collection = await getCollection();\n const docs = await collection\n .find({ workerId })\n .sort({ createdAt: -1 })\n .toArray();\n\n return docs.map((doc) => {\n const { _id, ...record } = doc;\n return record as JobRecord;\n });\n },\n};\n`,\n\n 'stores/redisAdapter.ts': `/**\n * Upstash Redis adapter for workflow/worker job store.\n *\n * Uses a hash-per-job model with key-level TTL for fast lookups by jobId.\n *\n * Configuration (from microfox.config.ts or env vars):\n * - workflowSettings.jobStore.redis; env: WORKER_UPSTASH_REDIS_REST_URL, WORKER_UPSTASH_REDIS_REST_TOKEN,\n * WORKER_UPSTASH_REDIS_JOBS_PREFIX (default: worker:jobs:), WORKER_JOBS_TTL_SECONDS\n */\n\nimport { Redis } from '@upstash/redis';\nimport type { JobRecord, InternalJobEntry } from './jobStore';\n\nlet redisClient: Redis | null = null;\nlet redisUrl: string | undefined;\nlet redisToken: string | undefined;\nlet jobKeyPrefix: string = 'worker:jobs:';\nconst defaultTtlSeconds = 60 * 60 * 24 * 7; // 7 days\n\nfunction loadConfig() {\n try {\n // Prefer config from microfox.config.ts if present\n const config = require('@/microfox.config').StudioConfig as {\n workflowSettings?: {\n jobStore?: {\n redis?: {\n url?: string;\n token?: string;\n keyPrefix?: string;\n ttlSeconds?: number;\n };\n };\n };\n };\n const redisCfg = config?.workflowSettings?.jobStore?.redis;\n redisUrl = redisCfg?.url || redisUrl;\n redisToken = redisCfg?.token || redisToken;\n if (redisCfg?.keyPrefix) {\n jobKeyPrefix = redisCfg.keyPrefix;\n }\n } catch {\n // Config optional; fall back to env vars\n }\n\n redisUrl =\n redisUrl ||\n process.env.WORKER_UPSTASH_REDIS_REST_URL ||\n process.env.UPSTASH_REDIS_REST_URL ||\n process.env.UPSTASH_REDIS_URL;\n redisToken =\n redisToken ||\n process.env.WORKER_UPSTASH_REDIS_REST_TOKEN ||\n process.env.UPSTASH_REDIS_REST_TOKEN ||\n process.env.UPSTASH_REDIS_TOKEN;\n jobKeyPrefix =\n jobKeyPrefix ||\n process.env.WORKER_UPSTASH_REDIS_JOBS_PREFIX ||\n process.env.UPSTASH_REDIS_KEY_PREFIX ||\n 'worker:jobs:';\n}\n\nfunction getRedis(): Redis {\n if (!redisClient) {\n loadConfig();\n if (!redisUrl || !redisToken) {\n throw new Error(\n 'Missing Upstash Redis configuration. Set workflowSettings.jobStore.redis in microfox.config.ts or WORKER_UPSTASH_REDIS_REST_URL / WORKER_UPSTASH_REDIS_REST_TOKEN (or UPSTASH_REDIS_REST_URL / UPSTASH_REDIS_REST_TOKEN).'\n );\n }\n redisClient = new Redis({\n url: redisUrl,\n token: redisToken,\n });\n }\n return redisClient;\n}\n\nfunction jobKey(jobId: string): string {\n return \\`\\${jobKeyPrefix}\\${jobId}\\`;\n}\n\n/** Separate LIST key for internal job refs; each RPUSH is atomic so no race when appending multiple. */\nfunction internalListKey(jobId: string): string {\n return \\`\\${jobKeyPrefix}\\${jobId}:internal\\`;\n}\n\nfunction workerIndexKey(workerId: string): string {\n // Secondary index: worker -> set of jobIds\n return \\`\\${jobKeyPrefix}by-worker:\\${workerId}\\`;\n}\n\nfunction getJobTtlSeconds(): number {\n const raw =\n process.env.WORKER_JOBS_TTL_SECONDS || process.env.WORKFLOW_JOBS_TTL_SECONDS;\n if (!raw) return defaultTtlSeconds;\n const n = parseInt(raw, 10);\n return Number.isFinite(n) && n > 0 ? n : defaultTtlSeconds;\n}\n\nasync function loadJob(jobId: string): Promise<JobRecord | null> {\n const redis = getRedis();\n const key = jobKey(jobId);\n const data = await redis.hgetall<Record<string, string>>(key);\n if (!data || Object.keys(data).length === 0) return null;\n\n const parseJson = <T>(val?: string | null): T | undefined => {\n if (!val) return undefined;\n try {\n return JSON.parse(val) as T;\n } catch {\n return undefined;\n }\n };\n\n // Prefer atomic list key for internal jobs; fallback to hash field for old records\n const listKey = internalListKey(jobId);\n const listItems = (await redis.lrange(listKey, 0, -1)) ?? [];\n let internalJobs: InternalJobEntry[] | undefined;\n if (listItems.length > 0) {\n internalJobs = listItems\n .map((s) => {\n try {\n return JSON.parse(s) as InternalJobEntry;\n } catch {\n return null;\n }\n })\n .filter((e): e is InternalJobEntry => e != null);\n } else {\n internalJobs = parseJson<InternalJobEntry[]>(data.internalJobs);\n }\n\n const record: JobRecord = {\n jobId: data.jobId,\n workerId: data.workerId,\n status: (data.status as JobRecord['status']) || 'queued',\n input: parseJson<any>(data.input) ?? {},\n output: parseJson<any>(data.output),\n error: parseJson<any>(data.error),\n metadata: parseJson<Record<string, any>>(data.metadata) ?? {},\n internalJobs,\n createdAt: data.createdAt,\n updatedAt: data.updatedAt,\n completedAt: data.completedAt,\n };\n\n return record;\n}\n\nexport const redisJobStore = {\n async setJob(jobId: string, data: Partial<JobRecord>): Promise<void> {\n const redis = getRedis();\n const key = jobKey(jobId);\n const now = new Date().toISOString();\n\n const existing = await loadJob(jobId);\n\n const record: JobRecord = {\n jobId,\n workerId: data.workerId || existing?.workerId || '',\n status: data.status || existing?.status || 'queued',\n input: data.input !== undefined ? data.input : existing?.input || {},\n output: data.output !== undefined ? data.output : existing?.output,\n error: data.error !== undefined ? data.error : existing?.error,\n metadata: { ...(existing?.metadata || {}), ...(data.metadata || {}) },\n internalJobs: existing?.internalJobs,\n createdAt: existing?.createdAt || now,\n updatedAt: now,\n completedAt: data.completedAt || existing?.completedAt,\n };\n\n if (data.status && ['completed', 'failed'].includes(data.status) && !record.completedAt) {\n record.completedAt = now;\n }\n\n const toSet: Record<string, string> = {\n jobId: record.jobId,\n workerId: record.workerId,\n status: record.status,\n input: JSON.stringify(record.input ?? {}),\n metadata: JSON.stringify(record.metadata ?? {}),\n createdAt: record.createdAt,\n updatedAt: record.updatedAt,\n };\n if (record.output !== undefined) {\n toSet.output = JSON.stringify(record.output);\n }\n if (record.error !== undefined) {\n toSet.error = JSON.stringify(record.error);\n }\n if (record.internalJobs) {\n toSet.internalJobs = JSON.stringify(record.internalJobs);\n }\n if (record.completedAt) {\n toSet.completedAt = record.completedAt;\n }\n\n await redis.hset(key, toSet);\n const ttl = getJobTtlSeconds();\n if (ttl > 0) {\n await redis.expire(key, ttl);\n }\n\n // Maintain secondary index per worker\n if (record.workerId) {\n await redis.sadd(workerIndexKey(record.workerId), jobId);\n }\n },\n\n async getJob(jobId: string): Promise<JobRecord | null> {\n return loadJob(jobId);\n },\n\n async updateJob(jobId: string, data: Partial<JobRecord>): Promise<void> {\n const redis = getRedis();\n const key = jobKey(jobId);\n const existing = await loadJob(jobId);\n if (!existing) {\n throw new Error(\\`Job \\${jobId} not found\\`);\n }\n\n const now = new Date().toISOString();\n const update: Partial<JobRecord> = {\n updatedAt: now,\n };\n\n if (data.status !== undefined) {\n update.status = data.status;\n if (['completed', 'failed'].includes(data.status) && !existing.completedAt) {\n update.completedAt = now;\n }\n }\n if (data.output !== undefined) {\n update.output = data.output;\n }\n if (data.error !== undefined) {\n update.error = data.error;\n }\n if (data.metadata !== undefined) {\n update.metadata = { ...(existing.metadata || {}), ...data.metadata };\n }\n\n const toSet: Record<string, string> = {\n updatedAt: now,\n };\n if (update.status !== undefined) {\n toSet.status = update.status;\n }\n if (update.output !== undefined) {\n toSet.output = JSON.stringify(update.output);\n }\n if (update.error !== undefined) {\n toSet.error = JSON.stringify(update.error);\n }\n if (update.metadata !== undefined) {\n toSet.metadata = JSON.stringify(update.metadata);\n }\n if (update.completedAt) {\n toSet.completedAt = update.completedAt;\n }\n\n await redis.hset(key, toSet);\n const ttl = getJobTtlSeconds();\n if (ttl > 0) {\n await redis.expire(key, ttl);\n }\n },\n\n async appendInternalJob(parentJobId: string, entry: InternalJobEntry): Promise<void> {\n const redis = getRedis();\n const listKey = internalListKey(parentJobId);\n await redis.rpush(listKey, JSON.stringify(entry));\n const mainKey = jobKey(parentJobId);\n await redis.hset(mainKey, { updatedAt: new Date().toISOString() });\n const ttl = getJobTtlSeconds();\n if (ttl > 0) {\n await redis.expire(listKey, ttl);\n await redis.expire(mainKey, ttl);\n }\n },\n\n async listJobsByWorker(workerId: string): Promise<JobRecord[]> {\n const redis = getRedis();\n const indexKey = workerIndexKey(workerId);\n const jobIds = (await redis.smembers(indexKey)) ?? [];\n const jobs: JobRecord[] = [];\n for (const jobId of jobIds) {\n const job = await loadJob(jobId);\n if (job) {\n jobs.push(job);\n }\n }\n // Most recent first\n jobs.sort((a, b) => b.createdAt.localeCompare(a.createdAt));\n return jobs;\n },\n};\n`,\n\n 'stores/queueJobStore.ts': `/**\n * Queue job store for tracking multi-step queue execution.\n *\n * Stores a single record per queue run with steps array containing:\n * - workerId, workerJobId (worker_job id), status, input, output, startedAt, completedAt, error\n *\n * Uses MongoDB or Upstash Redis (same backend as worker_jobs), based on WORKER_DATABASE_TYPE.\n * Collection/key prefix: queue_jobs / worker:queue-jobs:\n */\n\nimport type { Collection } from 'mongodb';\nimport { Redis } from '@upstash/redis';\nimport { getWorkflowDb } from './mongoAdapter';\n\nexport interface QueueJobStep {\n workerId: string;\n workerJobId: string;\n status: 'queued' | 'running' | 'completed' | 'failed';\n input?: unknown;\n output?: unknown;\n error?: { message: string };\n startedAt?: string;\n completedAt?: string;\n}\n\nexport interface QueueJobRecord {\n id: string;\n queueId: string;\n status: 'running' | 'completed' | 'failed' | 'partial';\n steps: QueueJobStep[];\n metadata?: Record<string, unknown>;\n createdAt: string;\n updatedAt: string;\n completedAt?: string;\n}\n\n// === Backend selection ===\n\nfunction getStoreType(): 'mongodb' | 'upstash-redis' {\n const t = (process.env.WORKER_DATABASE_TYPE || 'upstash-redis').toLowerCase();\n return t === 'mongodb' ? 'mongodb' : 'upstash-redis';\n}\n\nfunction preferMongo(): boolean {\n return getStoreType() === 'mongodb';\n}\n\nfunction preferRedis(): boolean {\n return getStoreType() !== 'mongodb';\n}\n\n// === MongoDB backend ===\n\nfunction getQueueJobsCollectionName(): string {\n return process.env.MONGODB_QUEUE_JOBS_COLLECTION || 'queue_jobs';\n}\n\nasync function getCollection(): Promise<Collection<QueueJobRecord & { _id: string }>> {\n const db = await getWorkflowDb();\n return db.collection<QueueJobRecord & { _id: string }>(getQueueJobsCollectionName());\n}\n\n// === Redis backend ===\n\nconst redisUrl =\n process.env.WORKER_UPSTASH_REDIS_REST_URL ||\n process.env.UPSTASH_REDIS_REST_URL ||\n process.env.UPSTASH_REDIS_URL;\nconst redisToken =\n process.env.WORKER_UPSTASH_REDIS_REST_TOKEN ||\n process.env.UPSTASH_REDIS_REST_TOKEN ||\n process.env.UPSTASH_REDIS_TOKEN;\nconst queueKeyPrefix =\n process.env.WORKER_UPSTASH_REDIS_QUEUE_PREFIX ||\n process.env.UPSTASH_REDIS_QUEUE_PREFIX ||\n 'worker:queue-jobs:';\n\nlet redisClient: Redis | null = null;\n\nfunction getRedis(): Redis {\n if (!redisUrl || !redisToken) {\n throw new Error(\n 'Upstash Redis configuration missing for queue job store. Set WORKER_UPSTASH_REDIS_REST_URL and WORKER_UPSTASH_REDIS_REST_TOKEN (or UPSTASH_REDIS_REST_URL/UPSTASH_REDIS_REST_TOKEN).'\n );\n }\n if (!redisClient) {\n redisClient = new Redis({\n url: redisUrl,\n token: redisToken,\n });\n }\n return redisClient;\n}\n\nfunction queueKey(id: string): string {\n return \\`\\${queueKeyPrefix}\\${id}\\`;\n}\n\n/** Hash values from Upstash hgetall may be auto-parsed (array/object) or raw strings. */\nfunction stepsFromHash(val: unknown): QueueJobStep[] {\n if (Array.isArray(val)) return val as QueueJobStep[];\n if (typeof val === 'string') {\n try {\n const parsed = JSON.parse(val) as QueueJobStep[];\n return Array.isArray(parsed) ? parsed : [];\n } catch {\n return [];\n }\n }\n return [];\n}\n\nfunction metadataFromHash(val: unknown): Record<string, unknown> {\n if (val && typeof val === 'object' && !Array.isArray(val)) return val as Record<string, unknown>;\n if (typeof val === 'string') {\n try {\n const parsed = JSON.parse(val) as Record<string, unknown>;\n return parsed && typeof parsed === 'object' ? parsed : {};\n } catch {\n return {};\n }\n }\n return {};\n}\n\nasync function loadQueueJobRedis(queueJobId: string): Promise<QueueJobRecord | null> {\n const redis = getRedis();\n const key = queueKey(queueJobId);\n const data = await redis.hgetall(key);\n if (!data || typeof data !== 'object' || Object.keys(data).length === 0) return null;\n const record: QueueJobRecord = {\n id: (data as Record<string, unknown>).id === undefined ? queueJobId : String((data as Record<string, unknown>).id),\n queueId: String((data as Record<string, unknown>).queueId ?? ''),\n status: (String((data as Record<string, unknown>).status ?? 'running') as QueueJobRecord['status']),\n steps: stepsFromHash((data as Record<string, unknown>).steps),\n metadata: metadataFromHash((data as Record<string, unknown>).metadata),\n createdAt: String((data as Record<string, unknown>).createdAt ?? new Date().toISOString()),\n updatedAt: String((data as Record<string, unknown>).updatedAt ?? new Date().toISOString()),\n completedAt: (data as Record<string, unknown>).completedAt != null ? String((data as Record<string, unknown>).completedAt) : undefined,\n };\n return record;\n}\n\nexport async function createQueueJob(\n id: string,\n queueId: string,\n firstStep: { workerId: string; workerJobId: string },\n metadata?: Record<string, unknown>\n): Promise<void> {\n const now = new Date().toISOString();\n const record: QueueJobRecord = {\n id,\n queueId,\n status: 'running',\n steps: [\n {\n workerId: firstStep.workerId,\n workerJobId: firstStep.workerJobId,\n status: 'queued',\n },\n ],\n metadata: metadata ?? {},\n createdAt: now,\n updatedAt: now,\n };\n \n if (preferRedis()) {\n const redis = getRedis();\n const key = queueKey(id);\n const toSet: Record<string, string> = {\n id: record.id,\n queueId: record.queueId,\n status: record.status,\n steps: JSON.stringify(record.steps),\n metadata: JSON.stringify(record.metadata || {}),\n createdAt: record.createdAt,\n updatedAt: record.updatedAt,\n };\n await redis.hset(key, toSet);\n const ttlSeconds =\n typeof process.env.WORKER_QUEUE_JOBS_TTL_SECONDS === 'string'\n ? parseInt(process.env.WORKER_QUEUE_JOBS_TTL_SECONDS, 10) || 60 * 60 * 24 * 7\n : typeof process.env.WORKER_JOBS_TTL_SECONDS === 'string'\n ? parseInt(process.env.WORKER_JOBS_TTL_SECONDS, 10) || 60 * 60 * 24 * 7\n : 60 * 60 * 24 * 7; // 7 days default\n if (ttlSeconds > 0) {\n await redis.expire(key, ttlSeconds);\n }\n return;\n }\n \n const collection = await getCollection();\n await collection.updateOne(\n { _id: id },\n { $set: { ...record, _id: id } },\n { upsert: true }\n );\n}\n\nexport async function updateQueueStep(\n queueJobId: string,\n stepIndex: number,\n update: {\n status?: 'queued' | 'running' | 'completed' | 'failed';\n input?: unknown;\n output?: unknown;\n error?: { message: string };\n startedAt?: string;\n completedAt?: string;\n }\n): Promise<void> {\n const collection = await getCollection();\n const now = new Date().toISOString();\n const setKey = \\`steps.\\${stepIndex}\\`;\n const existing = await collection.findOne({ _id: queueJobId });\n if (!existing) {\n throw new Error(\\`Queue job \\${queueJobId} not found\\`);\n }\n const step = existing.steps[stepIndex];\n if (!step) {\n throw new Error(\\`Queue job \\${queueJobId} has no step at index \\${stepIndex}\\`);\n }\n const mergedStep: QueueJobStep = {\n ...step,\n ...(update.status !== undefined && { status: update.status }),\n ...(update.input !== undefined && { input: update.input }),\n ...(update.output !== undefined && { output: update.output }),\n ...(update.error !== undefined && { error: update.error }),\n startedAt: update.startedAt ?? (update.status === 'running' ? now : step.startedAt),\n completedAt:\n update.completedAt ??\n (['completed', 'failed'].includes(update.status ?? '') ? now : step.completedAt),\n };\n const updateDoc: any = {\n $set: {\n [setKey]: mergedStep,\n updatedAt: now,\n },\n };\n if (update.status === 'failed') {\n updateDoc.$set.status = 'failed';\n if (!existing.completedAt) updateDoc.$set.completedAt = now;\n } else if (update.status === 'completed' && stepIndex === existing.steps.length - 1) {\n updateDoc.$set.status = 'completed';\n if (!existing.completedAt) updateDoc.$set.completedAt = now;\n }\n await collection.updateOne({ _id: queueJobId }, updateDoc);\n}\n\nexport async function appendQueueStep(\n queueJobId: string,\n step: { workerId: string; workerJobId: string }\n): Promise<void> {\n const collection = await getCollection();\n const now = new Date().toISOString();\n await collection.updateOne(\n { _id: queueJobId },\n {\n $push: {\n steps: {\n workerId: step.workerId,\n workerJobId: step.workerJobId,\n status: 'queued',\n },\n },\n $set: { updatedAt: now },\n }\n );\n}\n\n/**\n * Update queue job overall status (e.g. from webhook when queue run completes).\n */\nexport async function updateQueueJob(\n queueJobId: string,\n update: { status?: QueueJobRecord['status']; completedAt?: string }\n): Promise<void> {\n const now = new Date().toISOString();\n if (preferRedis()) {\n const redis = getRedis();\n const key = queueKey(queueJobId);\n const existing = await loadQueueJobRedis(queueJobId);\n if (!existing) throw new Error(\\`Queue job \\${queueJobId} not found\\`);\n const toSet: Record<string, string> = {\n status: update.status ?? existing.status,\n updatedAt: now,\n };\n if (update.completedAt !== undefined) toSet.completedAt = update.completedAt;\n await redis.hset(key, toSet);\n return;\n }\n const collection = await getCollection();\n const setDoc: Record<string, string> = { updatedAt: now };\n if (update.status !== undefined) setDoc.status = update.status;\n if (update.completedAt !== undefined) setDoc.completedAt = update.completedAt;\n await collection.updateOne({ _id: queueJobId }, { $set: setDoc });\n}\n\nexport async function getQueueJob(queueJobId: string): Promise<QueueJobRecord | null> {\n if (preferRedis()) {\n return loadQueueJobRedis(queueJobId);\n }\n const collection = await getCollection();\n const doc = await collection.findOne({ _id: queueJobId });\n if (!doc) return null;\n const { _id, ...record } = doc;\n return { ...record, id: _id };\n}\n\nexport async function listQueueJobs(\n queueId?: string,\n limit = 50\n): Promise<QueueJobRecord[]> {\n if (preferRedis()) {\n // Redis: scan for keys matching prefix, then load each\n // Note: This is less efficient than MongoDB queries, but acceptable for small datasets\n const redis = getRedis();\n const pattern = queueKey('*');\n const keys: string[] = [];\n let cursor: number = 0;\n do {\n const result = await redis.scan(cursor, { match: pattern, count: 100 });\n cursor = typeof result[0] === 'number' ? result[0] : parseInt(String(result[0]), 10);\n keys.push(...(result[1] || []));\n } while (cursor !== 0);\n \n const jobs = await Promise.all(\n keys.map((key) => {\n const id = key.replace(queueKeyPrefix, '');\n return loadQueueJobRedis(id);\n })\n );\n const valid = jobs.filter((j): j is QueueJobRecord => j !== null);\n const filtered = queueId ? valid.filter((j) => j.queueId === queueId) : valid;\n return filtered\n .sort((a, b) => new Date(b.createdAt).getTime() - new Date(a.createdAt).getTime())\n .slice(0, limit);\n }\n const collection = await getCollection();\n const filter = queueId ? { queueId } : {};\n const docs = await collection\n .find(filter)\n .sort({ createdAt: -1 })\n .limit(limit)\n .toArray();\n return docs.map((doc) => {\n const { _id, ...record } = doc;\n return { ...record, id: _id };\n });\n}\n`,\n\n 'registry/workers.ts': `/**\n * Worker registry system.\n *\n * Uses only the GET /workers/config API as the source of truth.\n * No directory scanning, no dynamic imports, no .worker.ts loading.\n *\n * - getWorker(workerId): returns a synthetic WorkerAgent that dispatches via POST /workers/trigger\n * - listWorkers(): returns worker IDs from the config API response\n * - getQueueRegistry(): returns QueueRegistry from config (for dispatchQueue)\n */\n\nimport type { WorkerAgent, WorkerQueueRegistry } from '@microfox/ai-worker';\n\n/** Queue step config (matches WorkerQueueStep from @microfox/ai-worker). */\nexport interface QueueStepConfig {\n workerId: string;\n delaySeconds?: number;\n mapInputFromPrev?: string;\n}\n\n/** Queue config from workers/config API (matches WorkerQueueConfig structure). */\nexport interface QueueConfig {\n id: string;\n steps: QueueStepConfig[];\n schedule?: string | { rate: string; enabled?: boolean; input?: Record<string, any> };\n}\n\nexport interface WorkersConfig {\n version?: string;\n stage?: string;\n region?: string;\n workers: Record<string, { queueUrl: string; region: string }>;\n queues?: QueueConfig[];\n}\n\nlet configCache: WorkersConfig | null = null;\n\nfunction getConfigBaseUrl(): string {\n const raw =\n process.env.WORKERS_CONFIG_API_URL ||\n process.env.WORKER_BASE_URL;\n if (!raw?.trim()) {\n throw new Error(\n 'WORKERS_CONFIG_API_URL or WORKER_BASE_URL is required for the worker registry. ' +\n 'Set it to the base URL of your workers service (e.g. https://xxx.execute-api.us-east-1.amazonaws.com/prod).'\n );\n }\n const base = raw.trim().replace(/\\\\/+$/, '');\n if (base.endsWith('/workers/config')) {\n return base.replace(/\\\\/workers\\\\/config\\\\/?$/, '');\n }\n return base;\n}\n\nfunction getConfigUrl(): string {\n const base = getConfigBaseUrl();\n return \\`\\${base}/workers/config\\`;\n}\n\nfunction getTriggerUrl(): string {\n const base = getConfigBaseUrl();\n return \\`\\${base}/workers/trigger\\`;\n}\n\n/**\n * Fetch and cache workers config from GET /workers/config.\n */\nexport async function fetchWorkersConfig(): Promise<WorkersConfig> {\n if (configCache) {\n return configCache;\n }\n const configUrl = getConfigUrl();\n const headers: Record<string, string> = { 'Content-Type': 'application/json' };\n const apiKey = process.env.WORKERS_CONFIG_API_KEY;\n if (apiKey) {\n headers['x-workers-config-key'] = apiKey;\n }\n const res = await fetch(configUrl, { method: 'GET', headers });\n if (!res.ok) {\n throw new Error(\n \\`[WorkerRegistry] GET \\${configUrl} failed: \\${res.status} \\${res.statusText}\\`\n );\n }\n const data = (await res.json()) as WorkersConfig;\n if (!data?.workers || typeof data.workers !== 'object') {\n throw new Error(\n '[WorkerRegistry] Invalid config: expected { workers: { [id]: { queueUrl, region } } }'\n );\n }\n configCache = data;\n const workerIds = Object.keys(data.workers);\n const queueIds = data.queues?.map((q) => q.id) ?? [];\n console.log('[WorkerRegistry] Config loaded', { workers: workerIds.length, queues: queueIds });\n return data;\n}\n\n/**\n * Build a synthetic WorkerAgent that dispatches via POST /workers/trigger.\n * Matches the trigger API contract used by @microfox/ai-worker.\n */\nfunction createSyntheticAgent(workerId: string): WorkerAgent<any, any> {\n return {\n id: workerId,\n dispatch: async (input: any, options: any) => {\n const jobId =\n options?.jobId ||\n \\`job-\\${Date.now()}-\\${Math.random().toString(36).slice(2, 11)}\\`;\n const webhookUrl = options?.webhookUrl;\n const metadata = options?.metadata ?? {};\n const triggerUrl = getTriggerUrl();\n const messageBody = {\n workerId,\n jobId,\n input: input ?? {},\n context: {},\n webhookUrl: webhookUrl ?? undefined,\n metadata,\n timestamp: new Date().toISOString(),\n };\n const headers: Record<string, string> = {\n 'Content-Type': 'application/json',\n };\n const key = process.env.WORKERS_TRIGGER_API_KEY;\n if (key) {\n headers['x-workers-trigger-key'] = key;\n }\n const response = await fetch(triggerUrl, {\n method: 'POST',\n headers,\n body: JSON.stringify({ workerId, body: messageBody }),\n });\n if (!response.ok) {\n const text = await response.text().catch(() => '');\n throw new Error(\n \\`Failed to trigger worker \"\\${workerId}\": \\${response.status} \\${response.statusText}\\${text ? \\` - \\${text}\\` : ''}\\`\n );\n }\n const data = (await response.json().catch(() => ({}))) as any;\n const messageId = data?.messageId ? String(data.messageId) : \\`trigger-\\${jobId}\\`;\n return { messageId, status: 'queued' as const, jobId };\n },\n } as WorkerAgent<any, any>;\n}\n\n/**\n * List worker IDs from the config API.\n */\nexport async function listWorkers(): Promise<string[]> {\n const config = await fetchWorkersConfig();\n return Object.keys(config.workers);\n}\n\n/**\n * Get a worker by ID. Returns a synthetic WorkerAgent that dispatches via\n * POST /workers/trigger. Returns null if the worker is not in the config.\n */\nexport async function getWorker(\n workerId: string\n): Promise<WorkerAgent<any, any> | null> {\n const config = await fetchWorkersConfig();\n if (!(workerId in config.workers)) {\n return null;\n }\n return createSyntheticAgent(workerId);\n}\n\n/** Webpack require.context – auto-discovers app/ai/queues/*.queue.ts (Next.js). */\nfunction getQueueModuleContext(): { keys(): string[]; (key: string): unknown } | null {\n try {\n if (typeof require === 'undefined') return null;\n const ctx = (require as unknown as { context: (dir: string, sub: boolean, re: RegExp) => { keys(): string[]; (k: string): unknown } }).context(\n '@/app/ai/queues',\n false,\n /\\\\.queue\\\\.ts$/\n );\n return ctx;\n } catch {\n return null;\n }\n}\n\n/**\n * Auto-discover queue modules from app/ai/queues/*.queue.ts (no per-queue registration).\n * Uses require.context when available (Next.js/webpack).\n */\nfunction buildQueueModules(): Record<string, Record<string, (initial: unknown, prevOutputs: unknown[]) => unknown>> {\n const ctx = getQueueModuleContext();\n if (!ctx) return {};\n const out: Record<string, Record<string, (initial: unknown, prevOutputs: unknown[]) => unknown>> = {};\n for (const key of ctx.keys()) {\n const mod = ctx(key) as { default?: { id?: string }; [k: string]: unknown };\n const id = mod?.default?.id;\n if (id && typeof id === 'string') {\n out[id] = mod as Record<string, (initial: unknown, prevOutputs: unknown[]) => unknown>;\n }\n }\n return out;\n}\n\nconst queueModules = buildQueueModules();\n\n/**\n * Returns a registry compatible with dispatchQueue. Queue definitions come from\n * GET /workers/config; mapInputFromPrev is resolved from app/ai/queues/*.queue.ts\n * automatically (no manual registration per queue).\n */\nexport async function getQueueRegistry(): Promise<WorkerQueueRegistry> {\n const config = await fetchWorkersConfig();\n const queues: QueueConfig[] = config.queues ?? [];\n\n const registry = {\n getQueueById(queueId: string) {\n return queues.find((q) => q.id === queueId);\n },\n invokeMapInput(\n queueId: string,\n stepIndex: number,\n initialInput: unknown,\n previousOutputs: Array<{ stepIndex: number; workerId: string; output: unknown }>\n ): unknown {\n const queue = queues.find((q) => q.id === queueId);\n const step = queue?.steps?.[stepIndex];\n const fnName = step?.mapInputFromPrev;\n if (!fnName) {\n return previousOutputs.length > 0 ? previousOutputs[previousOutputs.length - 1].output : initialInput;\n }\n const mod = queueModules[queueId];\n if (!mod || typeof mod[fnName] !== 'function') {\n return previousOutputs.length > 0 ? previousOutputs[previousOutputs.length - 1].output : initialInput;\n }\n return mod[fnName](initialInput, previousOutputs);\n },\n };\n return registry as WorkerQueueRegistry;\n}\n\n/**\n * Clear the in-memory config cache (e.g. for tests or refresh).\n */\nexport function clearConfigCache(): void {\n configCache = null;\n}\n`,\n\n 'workers/[...slug]/route.ts': `import { NextRequest, NextResponse } from 'next/server';\n\n/**\n * Worker execution endpoint.\n *\n * POST /api/workflows/workers/:workerId - Execute a worker\n * GET /api/workflows/workers/:workerId/:jobId - Get worker job status\n * POST /api/workflows/workers/:workerId/webhook - Webhook callback for completion notifications\n *\n * This endpoint allows workers to be called like workflows, enabling\n * them to be used in orchestration.\n *\n * Workers are auto-discovered from app/ai directory (any .worker.ts files) or\n * can be imported and registered manually via registerWorker().\n */\n\n// Worker auto-discovery is implemented in ../registry/workers\n// - Create worker registry module: app/api/workflows/registry/workers.ts\n// - Scan app/ai/**/*.worker.ts files at startup or lazily on first access\n// - Use glob pattern: 'app/ai/**/*.worker.ts'\n// - Extract worker ID from file: const worker = await import(filePath); worker.id\n// - Cache workers in memory or persistent store\n// - Support hot-reload in development\n// - Export: scanWorkers(), getWorker(workerId), listWorkers()\n\n/**\n * Get a worker by ID.\n */\nasync function getWorkerById(workerId: string): Promise<any | null> {\n const workersModule = await import('../../registry/workers') as { getWorker: (workerId: string) => Promise<any | null> };\n return await workersModule.getWorker(workerId);\n}\n\nexport async function POST(\n req: NextRequest,\n { params }: { params: Promise<{ slug: string[] }> }\n) {\n let slug: string[] = [];\n try {\n const { slug: slugParam } = await params;\n slug = slugParam || [];\n const [workerId, action] = slug;\n\n // Handle webhook endpoint\n if (action === 'webhook') {\n return handleWebhook(req, workerId);\n }\n\n // Handle job store update endpoint (POST /api/workflows/workers/:workerId/update)\n if (action === 'update') {\n return handleJobUpdate(req, workerId);\n }\n\n // Create job record (POST /api/workflows/workers/:workerId/job) – used before polling when trigger-only\n if (action === 'job') {\n return handleCreateJob(req, workerId);\n }\n\n if (!workerId) {\n return NextResponse.json(\n { error: 'Worker ID is required' },\n { status: 400 }\n );\n }\n\n let body;\n try {\n body = await req.json();\n } catch (parseError: any) {\n console.error('[Worker] Failed to parse request body:', {\n workerId,\n error: parseError?.message || String(parseError),\n });\n return NextResponse.json(\n { error: 'Invalid JSON in request body' },\n { status: 400 }\n );\n }\n\n const { input, await: shouldAwait = false, jobId: providedJobId } = body;\n\n console.log('[Worker] Dispatching worker:', {\n workerId,\n shouldAwait,\n hasInput: !!input,\n });\n\n // Get the worker using registry system\n let worker;\n try {\n worker = await getWorkerById(workerId);\n } catch (getWorkerError: any) {\n console.error('[Worker] Error getting worker:', {\n workerId,\n error: getWorkerError?.message || String(getWorkerError),\n });\n return NextResponse.json(\n { error: \\`Failed to get worker: \\${getWorkerError?.message || String(getWorkerError)}\\` },\n { status: 500 }\n );\n }\n\n if (!worker) {\n console.warn('[Worker] Worker not found:', {\n workerId,\n });\n return NextResponse.json(\n { error: \\`Worker \"\\${workerId}\" not found. Make sure it's exported from a .worker.ts file.\\` },\n { status: 404 }\n );\n }\n\n // Webhook optional. Job updates use MongoDB only; never pass jobStoreUrl.\n const webhookBase = process.env.WORKFLOW_WEBHOOK_BASE_URL;\n const webhookUrl =\n shouldAwait && typeof webhookBase === 'string' && webhookBase\n ? \\`\\${webhookBase.replace(/\\\\/+$/, '')}/api/workflows/workers/\\${workerId}/webhook\\`\n : undefined;\n\n // Use a single jobId end-to-end (Next job store + SQS/Lambda job store).\n // If caller provides jobId, respect it; otherwise generate one.\n const jobId =\n (typeof providedJobId === 'string' && providedJobId.trim()\n ? providedJobId.trim()\n : \\`job-\\${Date.now()}-\\${Math.random().toString(36).slice(2, 11)}\\`);\n\n // Store initial job record\n const { setJob } = await import('../../stores/jobStore');\n try {\n await setJob(jobId, {\n jobId,\n workerId,\n status: 'queued',\n input: input || {},\n metadata: { source: 'workflow-orchestration' },\n });\n console.log('[Worker] Initial job record created:', {\n jobId,\n workerId,\n });\n } catch (setJobError: any) {\n console.error('[Worker] Failed to create initial job record:', {\n jobId,\n workerId,\n error: setJobError?.message || String(setJobError),\n });\n // Continue even if job store fails - worker dispatch can still proceed\n }\n\n // Dispatch the worker. Job updates use MongoDB only; webhook only if configured.\n let dispatchResult;\n try {\n dispatchResult = await worker.dispatch(input || {}, {\n mode: 'auto',\n jobId,\n ...(webhookUrl ? { webhookUrl } : {}),\n metadata: { source: 'workflow-orchestration' },\n });\n console.log('[Worker] Worker dispatched successfully:', {\n jobId: dispatchResult.jobId,\n workerId,\n messageId: dispatchResult.messageId,\n });\n } catch (dispatchError: any) {\n console.error('[Worker] Failed to dispatch worker:', {\n workerId,\n error: dispatchError?.message || String(dispatchError),\n stack: process.env.NODE_ENV === 'development' ? dispatchError?.stack : undefined,\n });\n throw new Error(\\`Failed to dispatch worker: \\${dispatchError?.message || String(dispatchError)}\\`);\n }\n\n const finalJobId = dispatchResult.jobId || jobId;\n\n if (shouldAwait) {\n // For await mode, return job info and let caller poll status\n // The webhook handler will update the job when complete\n // For Vercel workflow: Use polling with setTimeout/setInterval\n // Workers are fire-and-forget only\n return NextResponse.json(\n {\n jobId: finalJobId,\n status: 'queued',\n message: 'Worker job queued. Use GET /api/workflows/workers/:workerId/:jobId to check status, or wait for webhook.',\n },\n { status: 200 }\n );\n }\n\n return NextResponse.json(\n {\n jobId: finalJobId,\n status: dispatchResult.status || 'queued',\n },\n { status: 200 }\n );\n } catch (error: any) {\n console.error('[Worker] Error in POST handler:', {\n workerId: slug[0],\n error: error?.message || String(error),\n stack: process.env.NODE_ENV === 'development' ? error?.stack : undefined,\n });\n return NextResponse.json(\n { \n error: error?.message || String(error),\n stack: process.env.NODE_ENV === 'development' ? error?.stack : undefined,\n },\n { status: 500 }\n );\n }\n}\n\nexport async function GET(\n req: NextRequest,\n { params }: { params: Promise<{ slug: string[] }> }\n) {\n let slug: string[] = [];\n try {\n const { slug: slugParam } = await params;\n slug = slugParam || [];\n const [workerId, jobId] = slug;\n\n if (!workerId || !jobId) {\n return NextResponse.json(\n { error: 'Worker ID and job ID are required' },\n { status: 400 }\n );\n }\n\n console.log('[Worker] Getting job status:', {\n jobId,\n workerId,\n });\n\n // Get job status from job store\n const { getJob } = await import('../../stores/jobStore');\n let job;\n try {\n job = await getJob(jobId);\n } catch (getJobError: any) {\n console.error('[Worker] Error getting job from store:', {\n jobId,\n workerId,\n error: getJobError?.message || String(getJobError),\n });\n return NextResponse.json(\n { error: \\`Failed to get job: \\${getJobError?.message || String(getJobError)}\\` },\n { status: 500 }\n );\n }\n \n if (!job) {\n console.warn('[Worker] Job not found:', {\n jobId,\n workerId,\n });\n return NextResponse.json(\n { error: \\`Job \"\\${jobId}\" not found\\` },\n { status: 404 }\n );\n }\n \n console.log('[Worker] Job status retrieved:', {\n jobId,\n workerId,\n status: job.status,\n });\n \n return NextResponse.json(\n {\n jobId: job.jobId,\n workerId: job.workerId,\n status: job.status,\n output: job.output,\n error: job.error,\n metadata: job.metadata,\n createdAt: job.createdAt,\n updatedAt: job.updatedAt,\n completedAt: job.completedAt,\n },\n { status: 200 }\n );\n } catch (error: any) {\n console.error('[Worker] Error in GET handler:', {\n workerId: slug[0],\n jobId: slug[1],\n error: error?.message || String(error),\n stack: process.env.NODE_ENV === 'development' ? error?.stack : undefined,\n });\n return NextResponse.json(\n { \n error: error?.message || String(error),\n stack: process.env.NODE_ENV === 'development' ? error?.stack : undefined,\n },\n { status: 500 }\n );\n }\n}\n\n/**\n * Create job record before polling (trigger-only flow).\n * POST /api/workflows/workers/:workerId/job\n * Body: { jobId, input }\n */\nasync function handleCreateJob(req: NextRequest, workerId: string) {\n try {\n if (!workerId) {\n return NextResponse.json({ error: 'Worker ID is required' }, { status: 400 });\n }\n const body = await req.json();\n const { jobId, input } = body;\n if (!jobId) {\n return NextResponse.json({ error: 'jobId is required in request body' }, { status: 400 });\n }\n const { setJob } = await import('../../stores/jobStore');\n await setJob(jobId, {\n jobId,\n workerId,\n status: 'queued',\n input: input ?? {},\n metadata: { source: 'workflow-orchestration' },\n });\n console.log('[Worker] Job created:', { jobId, workerId });\n return NextResponse.json({ message: 'Job created', jobId, workerId }, { status: 200 });\n } catch (error: any) {\n console.error('[Worker] Error creating job:', { workerId, error: error?.message || String(error) });\n return NextResponse.json(\n { error: error?.message || String(error) },\n { status: 500 }\n );\n }\n}\n\n/**\n * Handle job store update from worker context.\n * POST /api/workflows/workers/:workerId/update\n */\nasync function handleJobUpdate(req: NextRequest, workerId: string) {\n try {\n if (!workerId) {\n return NextResponse.json(\n { error: 'Worker ID is required' },\n { status: 400 }\n );\n }\n\n const body = await req.json();\n const { jobId, status, metadata, output, error } = body;\n\n if (!jobId) {\n return NextResponse.json(\n { error: 'jobId is required in request body' },\n { status: 400 }\n );\n }\n\n const { updateJob, setJob, getJob } = await import('../../stores/jobStore');\n const existing = await getJob(jobId);\n\n // Upsert: create job if missing (e.g. workflow triggered via /workers/trigger directly)\n if (!existing) {\n await setJob(jobId, {\n jobId,\n workerId,\n status: status ?? 'queued',\n input: {},\n metadata: metadata ?? {},\n output,\n error,\n });\n return NextResponse.json(\n { message: 'Job created and updated successfully', jobId, workerId },\n { status: 200 }\n );\n }\n\n const updateData: any = {};\n if (status !== undefined) updateData.status = status;\n if (metadata !== undefined) updateData.metadata = { ...existing.metadata, ...metadata };\n if (output !== undefined) updateData.output = output;\n if (error !== undefined) updateData.error = error;\n\n await updateJob(jobId, updateData);\n \n console.log('[Worker] Job updated:', { jobId, workerId, updates: Object.keys(updateData) });\n \n return NextResponse.json(\n { message: 'Job updated successfully', jobId, workerId },\n { status: 200 }\n );\n } catch (error: any) {\n console.error('[Worker] Error updating job:', {\n workerId,\n error: error?.message || String(error),\n stack: process.env.NODE_ENV === 'development' ? error?.stack : undefined,\n });\n return NextResponse.json(\n { error: error?.message || String(error) },\n { status: 500 }\n );\n }\n}\n\n/**\n * Handle webhook callback for worker completion.\n * POST /api/workflows/workers/:workerId/webhook\n * \n * This endpoint receives completion notifications from workers.\n * It updates the job store with the final status before returning.\n * Webhook is only called if webhookUrl was provided during dispatch.\n */\nasync function handleWebhook(req: NextRequest, workerId: string) {\n try {\n if (!workerId) {\n return NextResponse.json(\n { error: 'Worker ID is required' },\n { status: 400 }\n );\n }\n\n const body = await req.json();\n const { jobId, status, output, error, metadata } = body;\n\n if (!jobId) {\n return NextResponse.json(\n { error: 'jobId is required in webhook payload' },\n { status: 400 }\n );\n }\n\n // Update job store with completion status (before any further processing)\n const { updateJob } = await import('../../stores/jobStore');\n \n const jobStatus = status === 'success' ? 'completed' : 'failed';\n \n try {\n // Update job with completion status\n await updateJob(jobId, {\n jobId,\n workerId,\n status: jobStatus,\n output,\n error,\n completedAt: new Date().toISOString(),\n metadata: metadata || {},\n });\n \n console.log('[Worker] Webhook received and job updated:', {\n jobId,\n workerId,\n status: jobStatus,\n });\n } catch (updateError: any) {\n console.error('[Worker] Failed to update job store from webhook:', {\n jobId,\n workerId,\n error: updateError?.message || String(updateError),\n stack: process.env.NODE_ENV === 'development' ? updateError?.stack : undefined,\n });\n // Continue even if job store update fails - webhook was received\n }\n \n return NextResponse.json(\n { message: 'Webhook received', jobId, workerId, status: jobStatus },\n { status: 200 }\n );\n } catch (error: any) {\n console.error('[Worker] Error handling webhook:', {\n workerId,\n error: error?.message || String(error),\n stack: process.env.NODE_ENV === 'development' ? error?.stack : undefined,\n });\n return NextResponse.json(\n { error: error?.message || String(error) },\n { status: 500 }\n );\n }\n}\n`,\n\n 'queues/[...slug]/route.ts': `import { NextRequest, NextResponse } from 'next/server';\nimport { dispatchQueue } from '@microfox/ai-worker';\nimport { getQueueRegistry } from '../../registry/workers';\nimport {\n getQueueJob,\n listQueueJobs,\n updateQueueJob,\n updateQueueStep,\n appendQueueStep,\n} from '../../stores/queueJobStore';\n\nexport const dynamic = 'force-dynamic';\n\nconst LOG = '[Queues]';\n\n/**\n * Queue execution endpoint (mirrors workers route structure).\n *\n * POST /api/workflows/queues/:queueId - Trigger a queue (no registry import needed in app code)\n * GET /api/workflows/queues/:queueId/:jobId - Get queue job status\n * GET /api/workflows/queues - List queue jobs (query: queueId?, limit?)\n * POST /api/workflows/queues/:queueId/update - Update queue job step (for Lambda/callers)\n * POST /api/workflows/queues/:queueId/webhook - Webhook for queue completion\n *\n * Callers can trigger a queue with a simple POST; registry is resolved inside this route.\n */\nasync function getRegistry() {\n return getQueueRegistry();\n}\n\nexport async function POST(\n req: NextRequest,\n { params }: { params: Promise<{ slug: string[] }> }\n) {\n let slug: string[] = [];\n try {\n const { slug: slugParam } = await params;\n slug = slugParam ?? [];\n const [queueId, action] = slug;\n\n if (action === 'update') {\n return handleQueueJobUpdate(req, queueId);\n }\n if (action === 'webhook') {\n return handleQueueWebhook(req, queueId);\n }\n\n if (!queueId) {\n return NextResponse.json(\n { error: 'Queue ID is required. Use POST /api/workflows/queues/:queueId to trigger a queue.' },\n { status: 400 }\n );\n }\n\n let body: { input?: unknown; metadata?: Record<string, unknown>; jobId?: string } = {};\n try {\n body = await req.json();\n } catch {\n body = {};\n }\n const { input = {}, metadata, jobId: providedJobId } = body;\n\n const registry = await getRegistry();\n const queue = registry.getQueueById(queueId);\n if (!queue) {\n console.warn(\\`\\${LOG} Queue not found: \\${queueId}\\`);\n return NextResponse.json(\n { error: \\`Queue \"\\${queueId}\" not found. Ensure workers are deployed and config is available.\\` },\n { status: 404 }\n );\n }\n\n const result = await dispatchQueue(queueId, input as Record<string, unknown>, {\n registry,\n metadata: metadata ?? { source: 'queues-api' },\n ...(typeof providedJobId === 'string' && providedJobId.trim() ? { jobId: providedJobId.trim() } : {}),\n });\n\n console.log(\\`\\${LOG} Queue triggered\\`, {\n queueId: result.queueId,\n jobId: result.jobId,\n messageId: result.messageId,\n });\n\n return NextResponse.json(\n {\n jobId: result.jobId,\n status: result.status,\n messageId: result.messageId,\n queueId: result.queueId,\n queueJobUrl: \\`/api/workflows/queues/\\${queueId}/\\${result.jobId}\\`,\n },\n { status: 200 }\n );\n } catch (error: unknown) {\n const err = error instanceof Error ? error : new Error(String(error));\n console.error(\\`\\${LOG} POST error:\\`, err.message, err.stack);\n return NextResponse.json(\n { error: err.message },\n { status: 500 }\n );\n }\n}\n\nexport async function GET(\n req: NextRequest,\n { params }: { params: Promise<{ slug: string[] }> }\n) {\n let slug: string[] = [];\n try {\n const { slug: slugParam } = await params;\n slug = slugParam ?? [];\n const [queueId, jobId] = slug;\n\n // List: GET /api/workflows/queues or GET /api/workflows/queues?queueId=...&limit=...\n if (slug.length === 0 || (slug.length === 1 && !jobId)) {\n const { searchParams } = new URL(req.url);\n const filterQueueId = searchParams.get('queueId') ?? (slug[0] || undefined);\n const limit = Math.min(\n 100,\n Math.max(1, parseInt(searchParams.get('limit') ?? '50', 10) || 50)\n );\n const jobs = await listQueueJobs(filterQueueId, limit);\n return NextResponse.json({ jobs });\n }\n\n // Get one: GET /api/workflows/queues/:queueId/:jobId\n if (!queueId || !jobId) {\n return NextResponse.json(\n { error: 'Queue ID and job ID are required for GET. Use GET /api/workflows/queues/:queueId/:jobId' },\n { status: 400 }\n );\n }\n\n const job = await getQueueJob(jobId);\n if (!job) {\n return NextResponse.json({ error: 'Queue job not found' }, { status: 404 });\n }\n if (job.queueId !== queueId) {\n return NextResponse.json({ error: 'Queue job does not belong to this queue' }, { status: 400 });\n }\n\n return NextResponse.json(job);\n } catch (error: unknown) {\n const err = error instanceof Error ? error : new Error(String(error));\n console.error(\\`\\${LOG} GET error:\\`, err.message);\n return NextResponse.json(\n { error: err.message },\n { status: 500 }\n );\n }\n}\n\nasync function handleQueueJobUpdate(req: NextRequest, queueId: string) {\n if (!queueId) {\n return NextResponse.json({ error: 'Queue ID is required' }, { status: 400 });\n }\n const body = await req.json();\n const { queueJobId, jobId, action, stepIndex, workerJobId, workerId, output, error, input } = body;\n const id = queueJobId ?? jobId;\n if (!id) {\n return NextResponse.json(\n { error: 'queueJobId or jobId is required in request body' },\n { status: 400 }\n );\n }\n\n if (action === 'append') {\n if (!workerId || !workerJobId) {\n return NextResponse.json(\n { error: 'append requires workerId and workerJobId' },\n { status: 400 }\n );\n }\n await appendQueueStep(id, { workerId, workerJobId });\n console.log(\\`\\${LOG} Step appended\\`, { queueJobId: id, workerId, workerJobId });\n return NextResponse.json({ ok: true, action: 'append' });\n }\n\n if (action === 'start') {\n if (typeof stepIndex !== 'number' || !workerJobId) {\n return NextResponse.json(\n { error: 'start requires stepIndex and workerJobId' },\n { status: 400 }\n );\n }\n await updateQueueStep(id, stepIndex, {\n status: 'running',\n startedAt: new Date().toISOString(),\n ...(input !== undefined && { input }),\n });\n console.log(\\`\\${LOG} Step started\\`, { queueJobId: id, stepIndex, workerJobId });\n return NextResponse.json({ ok: true, action: 'start' });\n }\n\n if (action === 'complete') {\n if (typeof stepIndex !== 'number' || !workerJobId) {\n return NextResponse.json(\n { error: 'complete requires stepIndex and workerJobId' },\n { status: 400 }\n );\n }\n await updateQueueStep(id, stepIndex, {\n status: 'completed',\n output,\n completedAt: new Date().toISOString(),\n });\n console.log(\\`\\${LOG} Step completed\\`, { queueJobId: id, stepIndex, workerJobId });\n return NextResponse.json({ ok: true, action: 'complete' });\n }\n\n if (action === 'fail') {\n if (typeof stepIndex !== 'number' || !workerJobId) {\n return NextResponse.json(\n { error: 'fail requires stepIndex and workerJobId' },\n { status: 400 }\n );\n }\n await updateQueueStep(id, stepIndex, {\n status: 'failed',\n error: error ?? { message: 'Unknown error' },\n completedAt: new Date().toISOString(),\n });\n console.log(\\`\\${LOG} Step failed\\`, { queueJobId: id, stepIndex, workerJobId });\n return NextResponse.json({ ok: true, action: 'fail' });\n }\n\n return NextResponse.json(\n { error: \\`Unknown action: \\${action}. Use start|complete|fail|append\\` },\n { status: 400 }\n );\n}\n\n/**\n * Handle webhook callback for queue completion.\n * POST /api/workflows/queues/:queueId/webhook\n *\n * When a webhook URL is provided at dispatch time, the worker/runtime calls this\n * instead of updating the job store directly. This handler updates the queue job\n * store with the final status (same outcome as when no webhook: store reflects completion).\n */\nasync function handleQueueWebhook(req: NextRequest, queueId: string) {\n try {\n if (!queueId) {\n return NextResponse.json({ error: 'Queue ID is required' }, { status: 400 });\n }\n\n const body = await req.json();\n const { queueJobId, jobId, status, output, error, metadata } = body;\n const id = queueJobId ?? jobId;\n if (!id) {\n return NextResponse.json(\n { error: 'queueJobId or jobId is required in webhook payload' },\n { status: 400 }\n );\n }\n\n const jobStatus = status === 'success' ? 'completed' : 'failed';\n\n try {\n await updateQueueJob(id, {\n status: jobStatus,\n completedAt: new Date().toISOString(),\n });\n console.log(\\`\\${LOG} Webhook received and queue job updated:\\`, {\n queueJobId: id,\n queueId,\n status: jobStatus,\n });\n } catch (updateError: unknown) {\n const err = updateError instanceof Error ? updateError : new Error(String(updateError));\n console.error(\\`\\${LOG} Failed to update queue job from webhook:\\`, {\n queueJobId: id,\n queueId,\n error: err.message,\n });\n // Still return 200 so the caller does not retry; store update can be retried elsewhere if needed\n }\n\n return NextResponse.json(\n { message: 'Webhook received', queueId, queueJobId: id, status: jobStatus },\n { status: 200 }\n );\n } catch (error: unknown) {\n const err = error instanceof Error ? error : new Error(String(error));\n console.error(\\`\\${LOG} Error handling queue webhook:\\`, { queueId, error: err.message });\n return NextResponse.json(\n { error: err.message },\n { status: 500 }\n );\n }\n}\n`,\n\n '../../../hooks/useWorkflowJob.ts': `'use client';\n\nimport { useCallback, useEffect, useRef, useState } from 'react';\n\nexport type WorkflowJobStatus =\n | 'idle'\n | 'queued'\n | 'running'\n | 'completed'\n | 'failed'\n | 'partial';\n\nexport interface WorkerJobResult {\n jobId: string;\n workerId: string;\n status: string;\n output?: unknown;\n error?: { message: string; stack?: string };\n metadata?: Record<string, unknown>;\n createdAt?: string;\n updatedAt?: string;\n completedAt?: string;\n}\n\nexport interface QueueJobStep {\n workerId: string;\n workerJobId: string;\n status: string;\n input?: unknown;\n output?: unknown;\n error?: { message: string };\n startedAt?: string;\n completedAt?: string;\n}\n\nexport interface QueueJobResult {\n id: string;\n queueId: string;\n status: string;\n steps: QueueJobStep[];\n metadata?: Record<string, unknown>;\n createdAt: string;\n updatedAt: string;\n completedAt?: string;\n}\n\nexport type WorkflowJobOutput = WorkerJobResult | QueueJobResult;\n\nexport interface UseWorkflowJobBaseOptions {\n /** Base URL for API calls (default: '' for relative, or set window.location.origin) */\n baseUrl?: string;\n /** Poll interval in ms (default: 2000) */\n pollIntervalMs?: number;\n /** Stop polling after this many ms (default: 300000 = 5 min) */\n pollTimeoutMs?: number;\n /** Start polling automatically after trigger (default: true) */\n autoPoll?: boolean;\n /** Called when job reaches completed (or queue: completed/partial) */\n onComplete?: (result: WorkflowJobOutput) => void;\n /** Called when job fails or trigger/poll errors */\n onError?: (error: Error) => void;\n /** If false, trigger is a no-op and auto-poll is skipped (default: true) */\n enabled?: boolean;\n}\n\nexport interface UseWorkflowJobWorkerOptions extends UseWorkflowJobBaseOptions {\n type: 'worker';\n workerId: string;\n}\n\nexport interface UseWorkflowJobQueueOptions extends UseWorkflowJobBaseOptions {\n type: 'queue';\n queueId: string;\n /** Optional metadata for queue trigger */\n metadata?: Record<string, unknown>;\n}\n\nexport type UseWorkflowJobOptions =\n | UseWorkflowJobWorkerOptions\n | UseWorkflowJobQueueOptions;\n\nconst TERMINAL_STATUSES = ['completed', 'failed', 'partial'];\n\nfunction getBaseUrl(baseUrl?: string): string {\n if (baseUrl !== undefined && baseUrl !== '') return baseUrl;\n if (typeof window !== 'undefined') return window.location.origin;\n return '';\n}\n\nexport interface UseWorkflowJobReturn {\n /** Trigger the worker or queue. Pass input for the job. */\n trigger: (input?: Record<string, unknown>) => Promise<void>;\n /** Current job/queue job id (after trigger) */\n jobId: string | null;\n /** Current status: idle | queued | running | completed | failed | partial */\n status: WorkflowJobStatus;\n /** Last job output (worker or queue job object) */\n output: WorkflowJobOutput | null;\n /** Error from trigger or from job failure */\n error: Error | null;\n /** True while the trigger request is in flight */\n loading: boolean;\n /** True while polling for job status */\n polling: boolean;\n /** Reset state so you can trigger again */\n reset: () => void;\n}\n\nexport function useWorkflowJob(\n options: UseWorkflowJobWorkerOptions\n): UseWorkflowJobReturn & { output: WorkerJobResult | null };\nexport function useWorkflowJob(\n options: UseWorkflowJobQueueOptions\n): UseWorkflowJobReturn & { output: QueueJobResult | null };\nexport function useWorkflowJob(\n options: UseWorkflowJobOptions\n): UseWorkflowJobReturn {\n const {\n baseUrl: baseUrlOpt,\n pollIntervalMs = 2000,\n pollTimeoutMs = 300_000,\n autoPoll = true,\n onComplete,\n onError,\n enabled = true,\n } = options;\n\n const baseUrl = getBaseUrl(baseUrlOpt);\n const prefix = baseUrl ? baseUrl.replace(/\\\\/+$/, '') : '';\n const api = (path: string) => \\`\\${prefix}/api/workflows\\${path}\\`;\n\n const [jobId, setJobId] = useState<string | null>(null);\n const [status, setStatus] = useState<WorkflowJobStatus>('idle');\n const [output, setOutput] = useState<WorkflowJobOutput | null>(null);\n const [error, setError] = useState<Error | null>(null);\n const [loading, setLoading] = useState(false);\n const [polling, setPolling] = useState(false);\n\n const intervalRef = useRef<ReturnType<typeof setInterval> | null>(null);\n const timeoutRef = useRef<ReturnType<typeof setTimeout> | null>(null);\n const mountedRef = useRef(true);\n\n const clearPolling = useCallback(() => {\n if (intervalRef.current) {\n clearInterval(intervalRef.current);\n intervalRef.current = null;\n }\n if (timeoutRef.current) {\n clearTimeout(timeoutRef.current);\n timeoutRef.current = null;\n }\n setPolling(false);\n }, []);\n\n const reset = useCallback(() => {\n clearPolling();\n setJobId(null);\n setStatus('idle');\n setOutput(null);\n setError(null);\n setLoading(false);\n setPolling(false);\n }, [clearPolling]);\n\n const trigger = useCallback(\n async (input?: Record<string, unknown>) => {\n if (!enabled) return;\n\n setError(null);\n setOutput(null);\n setLoading(true);\n\n try {\n if (options.type === 'worker') {\n const res = await fetch(api(\\`/workers/\\${options.workerId}\\`), {\n method: 'POST',\n headers: { 'Content-Type': 'application/json' },\n body: JSON.stringify({ input: input ?? {}, await: false }),\n });\n const data = await res.json();\n if (!res.ok) throw new Error(data?.error ?? \\`HTTP \\${res.status}\\`);\n const id = data.jobId ?? null;\n if (!id) throw new Error('No jobId in response');\n setJobId(id);\n setStatus('queued');\n setLoading(false);\n\n if (autoPoll) {\n setPolling(true);\n const deadline = Date.now() + pollTimeoutMs;\n const poll = async () => {\n if (!mountedRef.current) return;\n try {\n const r = await fetch(\n api(\\`/workers/\\${options.workerId}/\\${id}\\`)\n );\n const job = await r.json();\n if (!r.ok) {\n if (Date.now() >= deadline) {\n clearPolling();\n const err = new Error('Poll timeout');\n setError(err);\n setStatus('failed');\n onError?.(err);\n }\n return;\n }\n setStatus((job.status as WorkflowJobStatus) ?? 'running');\n setOutput(job as WorkerJobResult);\n if (job.status === 'completed') {\n clearPolling();\n onComplete?.(job as WorkerJobResult);\n } else if (job.status === 'failed') {\n clearPolling();\n const err = new Error(\n job?.error?.message ?? 'Job failed'\n );\n setError(err);\n setStatus('failed');\n onError?.(err);\n } else if (Date.now() >= deadline) {\n clearPolling();\n const err = new Error('Poll timeout');\n setError(err);\n onError?.(err);\n }\n } catch (e) {\n if (mountedRef.current) {\n clearPolling();\n const err = e instanceof Error ? e : new Error(String(e));\n setError(err);\n setStatus('failed');\n onError?.(err);\n }\n }\n };\n await poll();\n intervalRef.current = setInterval(poll, pollIntervalMs);\n timeoutRef.current = setTimeout(() => {\n clearPolling();\n setError(new Error('Poll timeout'));\n setStatus('failed');\n }, pollTimeoutMs);\n }\n } else {\n const body: Record<string, unknown> = {\n input: input ?? {},\n };\n if (options.metadata) body.metadata = options.metadata;\n const res = await fetch(api(\\`/queues/\\${options.queueId}\\`), {\n method: 'POST',\n headers: { 'Content-Type': 'application/json' },\n body: JSON.stringify(body),\n });\n const data = await res.json();\n if (!res.ok) throw new Error(data?.error ?? \\`HTTP \\${res.status}\\`);\n const id = data.jobId ?? null;\n if (!id) throw new Error('No jobId in response');\n setJobId(id);\n setStatus('queued');\n setLoading(false);\n\n if (autoPoll) {\n setPolling(true);\n const deadline = Date.now() + pollTimeoutMs;\n const poll = async () => {\n if (!mountedRef.current) return;\n try {\n const r = await fetch(\n api(\\`/queues/\\${options.queueId}/\\${id}\\`)\n );\n const job = await r.json();\n if (!r.ok) {\n if (Date.now() >= deadline) {\n clearPolling();\n setError(new Error('Poll timeout'));\n setStatus('failed');\n }\n return;\n }\n const st = (job.status as string) ?? 'running';\n setStatus(st as WorkflowJobStatus);\n setOutput(job as QueueJobResult);\n if (TERMINAL_STATUSES.includes(st)) {\n clearPolling();\n onComplete?.(job as QueueJobResult);\n if (st === 'failed') {\n setError(new Error('Queue job failed'));\n onError?.(new Error('Queue job failed'));\n }\n } else if (Date.now() >= deadline) {\n clearPolling();\n setError(new Error('Poll timeout'));\n setStatus('failed');\n }\n } catch (e) {\n if (mountedRef.current) {\n clearPolling();\n const err = e instanceof Error ? e : new Error(String(e));\n setError(err);\n setStatus('failed');\n onError?.(err);\n }\n }\n };\n await poll();\n intervalRef.current = setInterval(poll, pollIntervalMs);\n timeoutRef.current = setTimeout(() => {\n clearPolling();\n setError(new Error('Poll timeout'));\n setStatus('failed');\n }, pollTimeoutMs);\n }\n }\n } catch (e) {\n const err = e instanceof Error ? e : new Error(String(e));\n setError(err);\n setStatus('failed');\n setLoading(false);\n onError?.(err);\n }\n },\n [\n enabled,\n options,\n api,\n autoPoll,\n pollIntervalMs,\n pollTimeoutMs,\n onComplete,\n onError,\n clearPolling,\n ]\n );\n\n useEffect(() => {\n mountedRef.current = true;\n return () => {\n mountedRef.current = false;\n clearPolling();\n };\n }, [clearPolling]);\n\n return {\n trigger,\n jobId,\n status,\n output,\n error,\n loading,\n polling,\n reset,\n };\n}\n`,\n};\n\nconst WORKFLOW_SETTINGS_SNIPPET = ` // Workflow + worker runtime configuration (job store, etc.)\n workflowSettings: {\n jobStore: {\n // 'mongodb' | 'upstash-redis'\n type:\n (process.env.WORKER_DATABASE_TYPE as\n | 'mongodb'\n | 'upstash-redis') || 'upstash-redis',\n mongodb: {\n uri: process.env.DATABASE_MONGODB_URI || process.env.MONGODB_URI,\n db:\n process.env.DATABASE_MONGODB_DB ||\n process.env.MONGODB_DB ||\n 'ai_router',\n workerJobsCollection:\n process.env.MONGODB_WORKER_JOBS_COLLECTION || 'worker_jobs',\n workflowStatusCollection:\n process.env.MONGODB_WORKFLOW_STATUS_COLLECTION || 'workflow_status',\n },\n redis: {\n url:\n process.env.WORKER_UPSTASH_REDIS_REST_URL ||\n process.env.UPSTASH_REDIS_REST_URL,\n token:\n process.env.WORKER_UPSTASH_REDIS_REST_TOKEN ||\n process.env.UPSTASH_REDIS_REST_TOKEN,\n keyPrefix:\n process.env.WORKER_UPSTASH_REDIS_JOBS_PREFIX ||\n 'worker:jobs:',\n ttlSeconds:\n Number(process.env.WORKER_JOBS_TTL_SECONDS ?? 60 * 60 * 24 * 7),\n },\n },\n },`;\n\nfunction writeFile(filePath: string, content: string, force: boolean): boolean {\n if (fs.existsSync(filePath) && !force) {\n return false; // Skip existing file\n }\n const dir = path.dirname(filePath);\n if (!fs.existsSync(dir)) {\n fs.mkdirSync(dir, { recursive: true });\n }\n fs.writeFileSync(filePath, content, 'utf-8');\n return true; // File written\n}\n\nfunction mergeMicrofoxConfig(configPath: string, force: boolean): boolean {\n if (!fs.existsSync(configPath)) {\n // Create minimal config file\n const content = `export const StudioConfig = {\n appName: 'My App',\n projectInfo: {\n framework: 'next-js',\n },\n studioSettings: {\n protection: {\n enabled: false,\n },\n database: {\n type: 'local',\n },\n },\n${WORKFLOW_SETTINGS_SNIPPET}\n};\n`;\n fs.writeFileSync(configPath, content, 'utf-8');\n return true;\n }\n\n // Try to merge workflowSettings into existing config\n const content = fs.readFileSync(configPath, 'utf-8');\n \n // Check if workflowSettings already exists\n if (content.includes('workflowSettings')) {\n if (!force) {\n return false; // Already has workflowSettings, skip\n }\n // TODO: Could do smarter merging here, but for now just skip if exists and not force\n return false;\n }\n\n // Find the closing brace of StudioConfig and insert workflowSettings before it\n const lines = content.split('\\n');\n let insertIndex = -1;\n let braceCount = 0;\n let inStudioConfig = false;\n\n for (let i = 0; i < lines.length; i++) {\n const line = lines[i];\n if (line.includes('StudioConfig') && line.includes('=')) {\n inStudioConfig = true;\n }\n if (inStudioConfig) {\n const openBraces = (line.match(/{/g) || []).length;\n const closeBraces = (line.match(/}/g) || []).length;\n braceCount += openBraces - closeBraces;\n \n if (braceCount === 0 && closeBraces > 0 && insertIndex === -1) {\n insertIndex = i;\n break;\n }\n }\n }\n\n if (insertIndex === -1) {\n // Couldn't find insertion point, append at end before last }\n const lastBrace = content.lastIndexOf('}');\n if (lastBrace !== -1) {\n const before = content.slice(0, lastBrace);\n const after = content.slice(lastBrace);\n const newContent = before + ',\\n' + WORKFLOW_SETTINGS_SNIPPET + '\\n' + after;\n fs.writeFileSync(configPath, newContent, 'utf-8');\n return true;\n }\n return false;\n }\n\n // Insert workflowSettings before the closing brace\n const indent = lines[insertIndex].match(/^(\\s*)/)?.[1] || ' ';\n const workflowLines = WORKFLOW_SETTINGS_SNIPPET.split('\\n').map((l, idx) => {\n if (idx === 0) return indent + l;\n return indent + l;\n });\n \n lines.splice(insertIndex, 0, ...workflowLines);\n fs.writeFileSync(configPath, lines.join('\\n'), 'utf-8');\n return true;\n}\n\nexport const boilerplateCommand = new Command()\n .name('boilerplate')\n .description('Create or update worker boilerplate files (job store, API routes, config)')\n .option('--force', 'Overwrite existing files', false)\n .option('--app-dir <path>', 'App directory path (default: app)', 'app')\n .option('--skip-config', 'Skip microfox.config.ts updates', false)\n .action((options: { force?: boolean; appDir?: string; skipConfig?: boolean }) => {\n const spinner = ora('Creating boilerplate files...').start();\n try {\n const projectRoot = process.cwd();\n const appDir = options.appDir || 'app';\n const apiDir = path.join(appDir, 'api', 'workflows');\n const force = options.force || false;\n const skipConfig = options.skipConfig || false;\n\n const filesCreated: string[] = [];\n const filesSkipped: string[] = [];\n\n // Write template files (normalize so e.g. ../../../hooks/useWorkflowJob.ts resolves)\n for (const [relativePath, template] of Object.entries(TEMPLATES)) {\n const filePath = path.normalize(path.join(projectRoot, apiDir, relativePath));\n const written = writeFile(filePath, template, force);\n if (written) {\n filesCreated.push(path.relative(projectRoot, filePath));\n } else {\n filesSkipped.push(path.relative(projectRoot, filePath));\n }\n }\n\n // Handle microfox.config.ts\n let configUpdated = false;\n if (!skipConfig) {\n const configPath = path.join(projectRoot, 'microfox.config.ts');\n configUpdated = mergeMicrofoxConfig(configPath, force);\n if (configUpdated) {\n filesCreated.push('microfox.config.ts');\n } else if (fs.existsSync(configPath)) {\n filesSkipped.push('microfox.config.ts');\n }\n }\n\n spinner.succeed('Boilerplate files created');\n\n if (filesCreated.length > 0) {\n console.log(chalk.green('\\n✓ Created files:'));\n filesCreated.forEach((f) => console.log(chalk.gray(` - ${f}`)));\n }\n\n if (filesSkipped.length > 0) {\n console.log(chalk.yellow('\\n⚠ Skipped existing files (use --force to overwrite):'));\n filesSkipped.forEach((f) => console.log(chalk.gray(` - ${f}`)));\n }\n\n console.log(\n chalk.blue(\n `\\n📚 Next steps:\\n` +\n ` 1. Configure your job store in microfox.config.ts (workflowSettings.jobStore)\\n` +\n ` 2. Set environment variables (MONGODB_URI or UPSTASH_REDIS_*)\\n` +\n ` 3. Create your first worker: ${chalk.yellow('npx ai-worker new <worker-id>')}\\n` +\n ` 4. Deploy workers: ${chalk.yellow('npx ai-worker push')}\\n` +\n ` 5. Use ${chalk.yellow('hooks/useWorkflowJob.ts')} in client components to trigger and poll workers/queues`\n )\n );\n } catch (error: any) {\n spinner.fail('Failed to create boilerplate files');\n console.error(chalk.red(error?.stack || error?.message || String(error)));\n process.exitCode = 1;\n }\n });\n"],"mappings":";;;AAEA,SAAS,WAAAA,gBAAe;AACxB,SAAS,gBAAAC,qBAAoB;AAC7B,SAAS,qBAAqB;AAC9B,SAAS,WAAAC,UAAS,QAAAC,aAAY;;;ACL9B,SAAS,eAAe;AACxB,YAAY,aAAa;AACzB,SAAS,gBAAgB;AACzB,YAAY,QAAQ;AACpB,YAAY,UAAU;AACtB,SAAS,qBAAqB;AAC9B,SAAS,sBAAsB;AAC/B,SAAS,YAAY;AACrB,YAAY,UAAU;AACtB,OAAO,WAAW;AAClB,OAAO,SAAS;AAEhB,IAAM,gBAAgB,IAAI;AAAA,EACxB,eAAe,IAAI,CAAC,MAAO,EAAE,WAAW,OAAO,IAAI,EAAE,MAAM,QAAQ,MAAM,IAAI,CAAE;AACjF;AAEA,SAAS,gBAAgB,WAA4B;AACnD,QAAM,IAAI,UAAU,WAAW,OAAO,IAClC,UAAU,MAAM,QAAQ,MAAM,IAC9B;AACJ,SAAO,cAAc,IAAI,CAAC;AAC5B;AAEA,SAAS,4BAA4B,WAA2B;AAE9D,MAAI,UAAU,WAAW,GAAG,GAAG;AAC7B,UAAM,CAAC,OAAO,IAAI,IAAI,UAAU,MAAM,GAAG;AACzC,WAAO,OAAO,GAAG,KAAK,IAAI,IAAI,KAAK;AAAA,EACrC;AAEA,SAAO,UAAU,MAAM,GAAG,EAAE,CAAC;AAC/B;AAEA,SAAS,sBAAsB,UAAkB,WAAkC;AACjF,QAAM,UAAe,aAAQ,QAAQ;AACrC,QAAM,MAAW,aAAQ,SAAS,SAAS;AAG3C,QAAM,aAAa;AAAA,IACjB;AAAA,IACA,GAAG,GAAG;AAAA,IACN,GAAG,GAAG;AAAA,IACN,GAAG,GAAG;AAAA,IACN,GAAG,GAAG;AAAA,IACN,GAAG,GAAG;AAAA,EACR;AACA,aAAW,KAAK,YAAY;AAC1B,QAAO,cAAW,CAAC,KAAQ,YAAS,CAAC,EAAE,OAAO,EAAG,QAAO;AAAA,EAC1D;AAGA,MAAO,cAAW,GAAG,KAAQ,YAAS,GAAG,EAAE,YAAY,GAAG;AACxD,UAAM,gBAAgB;AAAA,MACf,UAAK,KAAK,UAAU;AAAA,MACpB,UAAK,KAAK,WAAW;AAAA,MACrB,UAAK,KAAK,UAAU;AAAA,MACpB,UAAK,KAAK,WAAW;AAAA,MACrB,UAAK,KAAK,WAAW;AAAA,IAC5B;AACA,eAAW,KAAK,eAAe;AAC7B,UAAO,cAAW,CAAC,KAAQ,YAAS,CAAC,EAAE,OAAO,EAAG,QAAO;AAAA,IAC1D;AAAA,EACF;AAEA,SAAO;AACT;AAEA,SAAS,wBAAwB,QAA0B;AACzD,QAAM,QAAkB,CAAC;AAIzB,QAAM,MACJ;AACF,aAAW,SAAS,OAAO,SAAS,GAAG,GAAG;AACxC,QAAI,MAAM,CAAC,EAAG,OAAM,KAAK,MAAM,CAAC,CAAC;AAAA,EACnC;AAGA,QAAM,MAAM;AACZ,aAAW,SAAS,OAAO,SAAS,GAAG,GAAG;AACxC,QAAI,MAAM,CAAC,EAAG,OAAM,KAAK,MAAM,CAAC,CAAC;AAAA,EACnC;AAGA,QAAM,MAAM;AACZ,aAAW,SAAS,OAAO,SAAS,GAAG,GAAG;AACxC,QAAI,MAAM,CAAC,EAAG,OAAM,KAAK,MAAM,CAAC,CAAC;AAAA,EACnC;AAEA,SAAO;AACT;AAEA,SAAS,6BAA6B,QAGpC;AACA,QAAM,cAAc,oBAAI,IAAY;AACpC,QAAM,gBAAgB,oBAAI,IAAY;AAGtC,QAAM,eAAe;AACrB,aAAW,SAAS,OAAO,SAAS,YAAY,GAAG;AACjD,UAAM,MAAM,MAAM,CAAC;AACnB,QAAI,IAAK,aAAY,IAAI,GAAG;AAAA,EAC9B;AAGA,QAAM,mBAAmB;AACzB,aAAW,SAAS,OAAO,SAAS,gBAAgB,GAAG;AACrD,UAAM,MAAM,MAAM,CAAC;AACnB,QAAI,IAAK,aAAY,IAAI,GAAG;AAAA,EAC9B;AAGA,QAAM,kBAAkB;AACxB,aAAW,SAAS,OAAO,SAAS,eAAe,GAAG;AACpD,UAAM,MAAM,MAAM,CAAC;AACnB,QAAI,IAAK,eAAc,IAAI,GAAG;AAAA,EAChC;AAGA,QAAM,sBAAsB;AAC5B,aAAW,SAAS,OAAO,SAAS,mBAAmB,GAAG;AACxD,UAAM,MAAM,MAAM,CAAC;AACnB,QAAI,IAAK,eAAc,IAAI,GAAG;AAAA,EAChC;AAEA,SAAO,EAAE,aAAa,cAAc;AACtC;AAEA,eAAe,0BACb,kBACA,aACmE;AACnE,OAAK;AAEL,QAAM,cAAc,oBAAI,IAAY;AACpC,QAAM,gBAAgB,oBAAI,IAAY;AAEtC,QAAM,UAAU,oBAAI,IAAY;AAChC,QAAM,QAAkB,CAAC,GAAG,gBAAgB;AAE5C,SAAO,MAAM,SAAS,GAAG;AACvB,UAAM,OAAO,MAAM,IAAI;AACvB,UAAM,aAAkB,aAAQ,IAAI;AACpC,QAAI,QAAQ,IAAI,UAAU,EAAG;AAC7B,YAAQ,IAAI,UAAU;AAEtB,QAAI,CAAI,cAAW,UAAU,KAAK,CAAI,YAAS,UAAU,EAAE,OAAO,EAAG;AACrE,UAAM,MAAS,gBAAa,YAAY,OAAO;AAE/C,UAAM,QAAQ,6BAA6B,GAAG;AAC9C,UAAM,YAAY,QAAQ,CAAC,MAAM,YAAY,IAAI,CAAC,CAAC;AACnD,UAAM,cAAc,QAAQ,CAAC,MAAM,cAAc,IAAI,CAAC,CAAC;AAEvD,UAAM,aAAa,wBAAwB,GAAG;AAC9C,eAAW,QAAQ,YAAY;AAC7B,UAAI,CAAC,KAAM;AACX,UAAI,KAAK,WAAW,GAAG,GAAG;AACxB,cAAM,WAAW,sBAAsB,YAAY,IAAI;AACvD,YAAI,SAAU,OAAM,KAAK,QAAQ;AACjC;AAAA,MACF;AAGA,UAAI,KAAK,WAAW,GAAG,EAAG;AAC1B,UAAI,gBAAgB,IAAI,EAAG;AAAA,IAE7B;AAAA,EACF;AAEA,cAAY,OAAO,EAAE;AACrB,gBAAc,OAAO,EAAE;AACvB,cAAY,OAAO,MAAM;AACzB,gBAAc,OAAO,MAAM;AAE3B,SAAO,EAAE,aAAa,cAAc;AACtC;AAMA,eAAe,uBACb,SACA,aACmC;AACnC,OAAK;AACL,QAAM,oBAAoB,oBAAI,IAAyB;AAEvD,QAAM,YAAY,IAAI,IAAI,QAAQ,IAAI,CAAC,MAAM,EAAE,EAAE,CAAC;AAElD,aAAW,UAAU,SAAS;AAC5B,UAAM,YAAY,oBAAI,IAAY;AAClC,UAAM,UAAU,oBAAI,IAAY;AAChC,UAAM,QAAkB,CAAC,OAAO,QAAQ;AAExC,WAAO,MAAM,SAAS,GAAG;AACvB,YAAM,OAAO,MAAM,IAAI;AACvB,YAAM,aAAkB,aAAQ,IAAI;AACpC,UAAI,QAAQ,IAAI,UAAU,EAAG;AAC7B,cAAQ,IAAI,UAAU;AAEtB,UAAI,CAAI,cAAW,UAAU,KAAK,CAAI,YAAS,UAAU,EAAE,OAAO,EAAG;AACrE,YAAM,MAAS,gBAAa,YAAY,OAAO;AAG/C,YAAM,KAAK;AACX,iBAAW,SAAS,IAAI,SAAS,EAAE,GAAG;AACpC,YAAI,MAAM,CAAC,EAAG,WAAU,IAAI,MAAM,CAAC,CAAC;AAAA,MACtC;AAEA,YAAM,aAAa,wBAAwB,GAAG;AAC9C,iBAAW,QAAQ,YAAY;AAC7B,YAAI,CAAC,QAAQ,CAAC,KAAK,WAAW,GAAG,EAAG;AACpC,cAAM,WAAW,sBAAsB,YAAY,IAAI;AACvD,YAAI,SAAU,OAAM,KAAK,QAAQ;AAAA,MACnC;AAAA,IACF;AAEA,QAAI,UAAU,OAAO,GAAG;AACtB,iBAAW,YAAY,WAAW;AAChC,YAAI,CAAC,UAAU,IAAI,QAAQ,GAAG;AAC5B,kBAAQ;AAAA,YACN,MAAM;AAAA,cACJ,yBAAe,OAAO,EAAE,YAAY,QAAQ;AAAA,YAC9C;AAAA,UACF;AAAA,QACF;AAAA,MACF;AACA,wBAAkB,IAAI,OAAO,IAAI,SAAS;AAAA,IAC5C;AAAA,EACF;AAEA,SAAO;AACT;AAEA,SAAS,uBAAuB,UAA0B;AACxD,SAAO,SAAS,QAAQ,MAAM,GAAG,EAAE,YAAY;AACjD;AAGA,SAAS,YAAY,IAAoB;AACvC,SAAO,GACJ,MAAM,eAAe,EACrB,OAAO,OAAO,EACd;AAAA,IAAI,CAAC,MAAM,MACV,MAAM,IAAI,KAAK,YAAY,IAAI,KAAK,OAAO,CAAC,EAAE,YAAY,IAAI,KAAK,MAAM,CAAC,EAAE,YAAY;AAAA,EAC1F,EACC,KAAK,EAAE;AACZ;AAGA,SAAS,gBAAgB,QAAgB,IAAoB;AAC3D,QAAM,QAAQ,YAAY,EAAE;AAC5B,SAAO,UAAU,MAAM,OAAO,CAAC,EAAE,YAAY,IAAI,MAAM,MAAM,CAAC;AAChE;AAEA,SAAS,aAAsB,UAA4B;AACzD,MAAI;AACF,WAAO,KAAK,MAAS,gBAAa,UAAU,OAAO,CAAC;AAAA,EACtD,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAEA,SAAS,iBAAiB,UAA0B;AAClD,MAAI,MAAW,aAAQ,QAAQ;AAE/B,SAAO,MAAM;AACX,UAAM,UAAe,UAAK,KAAK,cAAc;AAC7C,QAAO,cAAW,OAAO,GAAG;AAC1B,YAAM,MAAM,aAAkB,OAAO;AACrC,UAAI,KAAK,WAAY,QAAO;AAAA,IAC9B;AAEA,UAAM,SAAc,aAAQ,GAAG;AAC/B,QAAI,WAAW,IAAK,QAAO;AAC3B,UAAM;AAAA,EACR;AACF;AAEA,eAAe,qCACb,kBACA,aACsB;AAGtB,QAAM,OAAO,oBAAI,IAAY,CAAC,uBAAuB,qBAAqB,CAAC;AAC3E,QAAM,UAAU,oBAAI,IAAY;AAChC,QAAM,QAAkB,CAAC,GAAG,gBAAgB;AAE5C,SAAO,MAAM,SAAS,GAAG;AACvB,UAAM,OAAO,MAAM,IAAI;AACvB,UAAM,aAAkB,aAAQ,IAAI;AACpC,QAAI,QAAQ,IAAI,UAAU,EAAG;AAC7B,YAAQ,IAAI,UAAU;AAEtB,QAAI,CAAI,cAAW,UAAU,KAAK,CAAI,YAAS,UAAU,EAAE,OAAO,EAAG;AACrE,UAAM,MAAS,gBAAa,YAAY,OAAO;AAC/C,UAAM,aAAa,wBAAwB,GAAG;AAE9C,eAAW,QAAQ,YAAY;AAC7B,UAAI,CAAC,KAAM;AACX,UAAI,KAAK,WAAW,GAAG,GAAG;AACxB,cAAM,WAAW,sBAAsB,YAAY,IAAI;AACvD,YAAI,SAAU,OAAM,KAAK,QAAQ;AACjC;AAAA,MACF;AAGA,UAAI,KAAK,WAAW,GAAG,EAAG;AAC1B,UAAI,gBAAgB,IAAI,EAAG;AAE3B,WAAK,IAAI,4BAA4B,IAAI,CAAC;AAAA,IAC5C;AAAA,EACF;AAGA,OAAK,OAAO,EAAE;AACd,OAAK,OAAO,MAAM;AAGlB,OAAK,OAAO,YAAY;AACxB,OAAK,OAAO,oBAAoB;AAChC,OAAK,OAAO,qBAAqB;AACjC,OAAK,OAAO,qBAAqB;AACjC,SAAO;AACT;AAGA,SAAS,kBAA+C;AACtD,QAAM,MAAM,QAAQ,IAAI,sBAAsB,YAAY;AAC1D,MAAI,QAAQ,aAAa,QAAQ,gBAAiB,QAAO;AACzD,SAAO;AACT;AAQA,SAAS,sBACP,aACA,cACa;AACb,QAAM,WAAW,IAAI,IAAI,WAAW;AACpC,WAAS,OAAO,SAAS;AACzB,WAAS,OAAO,gBAAgB;AAChC,MAAI,iBAAiB,UAAW,UAAS,IAAI,SAAS;AAAA,MACjD,UAAS,IAAI,gBAAgB;AAClC,MAAI,YAAY,IAAI,SAAS,EAAG,UAAS,IAAI,SAAS;AACtD,SAAO;AACT;AAEA,SAAS,qBAAqB,aAAqB,MAA2C;AAC5F,QAAM,aACJ,aAAuB,UAAK,aAAa,cAAc,CAAC,KAAK,CAAC;AAChE,QAAM,cAAsC,WAAW,gBAAgB,CAAC;AACxE,QAAM,iBAAyC,WAAW,mBAAmB,CAAC;AAG9E,QAAM,WAAW,iBAAiB,WAAW;AAC7C,QAAM,YACJ,aAAuB,UAAK,UAAU,YAAY,aAAa,cAAc,CAAC,KAC9E,CAAC;AACH,QAAM,eACJ;AAAA,IACO,UAAK,UAAU,YAAY,iBAAiB,cAAc;AAAA,EACjE,KAAK,CAAC;AAER,QAAM,gBAAwC;AAAA,IAC5C,GAAI,UAAU,gBAAgB,CAAC;AAAA,IAC/B,GAAI,UAAU,mBAAmB,CAAC;AAAA,IAClC,GAAI,aAAa,gBAAgB,CAAC;AAAA,IAClC,GAAI,aAAa,mBAAmB,CAAC;AAAA,EACvC;AAEA,QAAM,MAA8B,CAAC;AACrC,aAAW,OAAO,MAAM,KAAK,IAAI,EAAE,KAAK,GAAG;AACzC,UAAM,QACJ,YAAY,GAAG,KACf,eAAe,GAAG,KAClB,cAAc,GAAG;AAGnB,QAAI,OAAO;AACT,UAAI,GAAG,IAAI,OAAO,KAAK;AAAA,IACzB;AAAA,EACF;AAEA,SAAO;AACT;AAoEO,SAAS,4BAA4B,WAA2B;AACrE,QAAM,mBAAmB,UAAU,QAAQ,MAAM,EAAE,EAAE,MAAM,GAAG,EAAE;AAChE,SAAO,KAAK,gBAAgB;AAC9B;AAKA,SAAS,sBAA4B;AAGnC,MAAI;AACF,aAAS,iBAAiB,EAAE,OAAO,SAAS,CAAC;AAAA,EAC/C,SAAS,OAAO;AACd,YAAQ,MAAM,MAAM,IAAI,6CAAwC,CAAC;AACjE,YAAQ,KAAK,CAAC;AAAA,EAChB;AACF;AAKA,eAAe,YAAY,SAAiB,UAAiC;AAC3E,QAAM,UAAe,UAAK,QAAQ,gBAAgB,EAAE,QAAQ,OAAO,GAAG;AACtE,QAAM,QAAQ,MAAM,KAAK,OAAO;AAEhC,QAAM,UAAwB,CAAC;AAE/B,aAAW,YAAY,OAAO;AAC5B,QAAI;AAGF,UAAI;AACJ,UAAI;AAMJ,UAAI,CAAC,UAAU;AACb,cAAM,UAAa,gBAAa,UAAU,OAAO;AAGjD,cAAM,UAAU,QAAQ,MAAM,qEAAqE;AACnG,YAAI,CAAC,SAAS;AACZ,kBAAQ,KAAK,MAAM,OAAO,0BAAgB,QAAQ,sBAAsB,CAAC;AACzE;AAAA,QACF;AACA,mBAAW,QAAQ,CAAC;AAAA,MACtB;AAIA,YAAM,eAAoB,cAAS,QAAQ,QAAQ;AACnD,YAAM,aAAkB,aAAQ,YAAY;AAC5C,YAAM,cAAmB,cAAS,cAAc,YAAY;AAC5D,YAAM,cAAmB,UAAK,YAAY,YAAY,GAAG,WAAW,EAAE,EAAE,QAAQ,OAAO,GAAG;AAE1F,cAAQ,KAAK;AAAA,QACX,IAAI;AAAA,QACJ;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AAAA,IACH,SAAS,OAAO;AACd,cAAQ,MAAM,MAAM,IAAI,2BAAsB,QAAQ,GAAG,GAAG,KAAK;AAAA,IACnE;AAAA,EACF;AAEA,SAAO;AACT;AAKA,eAAe,WAAW,SAAiB,UAAgC;AACzE,QAAM,OAAO,OAAO,QAAQ,OAAO,GAAG;AACtC,QAAM,UAAU,GAAG,IAAI;AACvB,QAAM,QAAQ,MAAM,KAAK,OAAO;AAEhC,QAAM,SAAsB,CAAC;AAE7B,aAAW,YAAY,OAAO;AAC5B,QAAI;AACF,YAAM,UAAa,gBAAa,UAAU,OAAO;AAEjD,YAAM,UAAU,QAAQ,MAAM,2DAA2D;AACzF,UAAI,CAAC,SAAS;AACZ,gBAAQ,KAAK,MAAM,OAAO,0BAAgB,QAAQ,0CAA0C,CAAC;AAC7F;AAAA,MACF;AACA,YAAM,UAAU,QAAQ,CAAC;AAEzB,YAAM,QAAyB,CAAC;AAChC,YAAM,aAAa,QAAQ,MAAM,yBAAyB;AAC1D,UAAI,YAAY;AACd,cAAM,WAAW,WAAW,CAAC;AAG7B,cAAM,YAAY;AAClB,YAAI;AACJ,gBAAQ,IAAI,UAAU,KAAK,QAAQ,OAAO,MAAM;AAC9C,gBAAM,KAAK;AAAA,YACT,UAAU,EAAE,CAAC;AAAA,YACb,cAAc,EAAE,CAAC,IAAI,SAAS,EAAE,CAAC,GAAG,EAAE,IAAI;AAAA,YAC1C,kBAAkB,EAAE,CAAC;AAAA,UACvB,CAAC;AAAA,QACH;AAAA,MACF;AAEA,UAAI;AAEJ,YAAM,6BAA6B,QAAQ,QAAQ,eAAe,EAAE;AACpE,YAAM,mBAAmB,2BAA2B,MAAM,8BAA8B;AACxF,YAAM,mBAAmB,2BAA2B,MAAM,4CAA4C;AACtG,UAAI,kBAAkB;AACpB,mBAAW,iBAAiB,CAAC;AAAA,MAC/B,WAAW,kBAAkB;AAC3B,YAAI;AACF,qBAAW,IAAI,SAAS,YAAY,iBAAiB,CAAC,CAAC,EAAE;AAAA,QAC3D,QAAQ;AACN,qBAAW;AAAA,QACb;AAAA,MACF;AAEA,aAAO,KAAK,EAAE,IAAI,SAAS,UAAU,OAAO,SAAS,CAAC;AAAA,IACxD,SAAS,OAAO;AACd,cAAQ,MAAM,MAAM,IAAI,2BAAsB,QAAQ,GAAG,GAAG,KAAK;AAAA,IACnE;AAAA,EACF;AAEA,SAAO;AACT;AAMA,SAAS,sBAAsB,QAAqB,WAAmB,aAA2B;AAChG,QAAM,eAAoB,UAAK,WAAW,WAAW;AACrD,MAAI,CAAI,cAAW,YAAY,GAAG;AAChC,IAAG,aAAU,cAAc,EAAE,WAAW,KAAK,CAAC;AAAA,EAChD;AAEA,QAAM,YAAiB,cAAS,cAAc,WAAW,EAAE,QAAQ,OAAO,GAAG;AAC7E,QAAM,oBAA8B,CAAC;AACrC,QAAM,sBAAgC,CAAC;AACvC,QAAM,oBAAoB,OAAO;AAAA,IAC/B,CAAC,MAAM,EAAE,OAAO,KAAK,CAAC,MAAM,EAAE,gBAAgB;AAAA,EAChD;AACA,WAAS,IAAI,GAAG,IAAI,kBAAkB,QAAQ,KAAK;AACjD,UAAM,IAAI,kBAAkB,CAAC;AAC7B,UAAM,WAAW,YAAY,MAAM,EAAE,SAAS,QAAQ,OAAO,GAAG,GAAG,QAAQ,SAAS,EAAE;AACtF,UAAM,SAAS,EAAE,GAAG,QAAQ,iBAAiB,EAAE;AAC/C,sBAAkB,KAAK,qBAAqB,MAAM,eAAe,OAAO,KAAK;AAC7E,wBAAoB,KAAK,MAAM,EAAE,EAAE,kBAAkB,MAAM,GAAG;AAAA,EAChE;AACA,QAAM,oBACJ,kBAAkB,SAAS,IACvB;AAAA,EACN,kBAAkB,KAAK,IAAI,CAAC;AAAA;AAAA,EAE5B,oBAAoB,KAAK,IAAI,CAAC;AAAA;AAAA,IAGxB;AAAA;AAAA;AAIN,QAAM,kBAAkB;AAAA;AAAA;AAAA;AAAA,EAIxB,iBAAiB;AAAA;AAAA,iBAEF,KAAK,UAAU,OAAO,IAAI,CAAC,OAAO,EAAE,IAAI,EAAE,IAAI,OAAO,EAAE,OAAO,UAAU,EAAE,SAAS,EAAE,GAAG,MAAM,CAAC,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AA0B/G,QAAM,eAAoB,UAAK,cAAc,0BAA0B;AACvE,EAAG,iBAAc,cAAc,eAAe;AAC9C,UAAQ,IAAI,MAAM,MAAM,oCAA+B,YAAY,EAAE,CAAC;AAIxE;AAKA,SAAS,mBAAmB,QAAkC;AAC5D,QAAM,MAAM,oBAAI,IAAY;AAC5B,aAAW,KAAK,QAAQ;AACtB,eAAW,QAAQ,EAAE,OAAO;AAC1B,UAAI,IAAI,KAAK,QAAQ;AAAA,IACvB;AAAA,EACF;AACA,SAAO;AACT;AAKA,SAAS,kBACP,WACA,QACA,SAC0B;AAC1B,QAAM,SAAS,IAAI,IAAI,SAAS;AAChC,QAAM,YAAY,IAAI,IAAI,QAAQ,IAAI,CAAC,MAAM,EAAE,EAAE,CAAC;AAElD,aAAW,SAAS,QAAQ;AAC1B,aAAS,IAAI,GAAG,IAAI,MAAM,MAAM,SAAS,GAAG,KAAK;AAC/C,YAAM,eAAe,MAAM,MAAM,CAAC,EAAE;AACpC,YAAM,aAAa,MAAM,MAAM,IAAI,CAAC,EAAE;AACtC,UAAI,CAAC,UAAU,IAAI,UAAU,EAAG;AAChC,UAAI,UAAU,OAAO,IAAI,YAAY;AACrC,UAAI,CAAC,SAAS;AACZ,kBAAU,oBAAI,IAAY;AAC1B,eAAO,IAAI,cAAc,OAAO;AAAA,MAClC;AACA,cAAQ,IAAI,UAAU;AAAA,IACxB;AAAA,EACF;AACA,SAAO;AACT;AAKA,eAAe,iBACb,SACA,WACA,SAAsB,CAAC,GACR;AACf,QAAM,cAAmB,UAAK,WAAW,UAAU;AACnD,QAAM,gBAAqB,UAAK,aAAa,SAAS;AACtD,QAAM,kBAAkB,mBAAmB,MAAM;AAGjD,MAAO,cAAW,aAAa,GAAG;AAChC,IAAG,UAAO,eAAe,EAAE,WAAW,MAAM,OAAO,KAAK,CAAC;AAAA,EAC3D;AACA,EAAG,aAAU,aAAa,EAAE,WAAW,KAAK,CAAC;AAC7C,EAAG,aAAU,eAAe,EAAE,WAAW,KAAK,CAAC;AAE/C,aAAW,UAAU,SAAS;AAG5B,UAAM,cAAmB,UAAK,aAAa,OAAO,YAAY,QAAQ,aAAa,EAAE,IAAI,KAAK;AAC9F,UAAM,aAAkB,aAAQ,WAAW;AAE3C,QAAI,CAAI,cAAW,UAAU,GAAG;AAC9B,MAAG,aAAU,YAAY,EAAE,WAAW,KAAK,CAAC;AAAA,IAC9C;AASA,UAAM,iBAAsB,aAAQ,WAAW;AAC/C,UAAM,gBAAqB,aAAQ,OAAO,QAAQ;AAGlD,QAAI,qBAA0B,cAAc,aAAQ,cAAc,GAAG,aAAa;AAGlF,QAAI,CAAC,mBAAmB,WAAW,GAAG,GAAG;AACvC,2BAAqB,OAAO;AAAA,IAC9B;AAGA,yBAAqB,mBAAmB,QAAQ,SAAS,EAAE;AAE3D,yBAAqB,mBAAmB,MAAW,QAAG,EAAE,KAAK,GAAG;AAGhE,UAAM,cAAiB,gBAAa,OAAO,UAAU,OAAO;AAC5D,UAAM,gBAAgB,kCAAkC,KAAK,WAAW;AACxE,UAAM,cAAc,YAAY,MAAM,iDAAiD;AACvF,UAAM,aAAa,cAAc,YAAY,CAAC,IAAI;AAGlD,UAAM,gBAAgB,YAAY,QAAQ,OAAO,UAAU;AAE3D,UAAM,YAAY,gBACd,yBACA,gBAAgB,UAAU;AAE9B,UAAM,UAAU,gBAAgB,IAAI,OAAO,EAAE;AAC7C,UAAM,kBACH,cAAc,aAAa,aAAQ,WAAW,CAAC,GAAQ,UAAK,WAAW,aAAa,uBAAuB,CAAC,EAC5G,MAAW,QAAG,EACd,KAAK,GAAG;AACX,UAAM,qBAAqB,gBAAgB,WAAW,GAAG,IAAI,kBAAkB,OAAO;AAEtF,UAAM,kBAAkB,UACpB;AAAA;AAAA;AAAA,kCAG0B,kBAAkB;AAAA,iCACnB,kBAAkB;AAAA;AAAA;AAAA;AAAA,sBAI7B,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAyCvB;AAAA;AAAA,iCAEyB,kBAAkB;AAAA;AAAA;AAAA;AAAA,sBAI7B,SAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAyB3B,UAAM,mBAAmB;AACzB,IAAG,iBAAc,eAAe,gBAAgB;AAGhD,QAAI;AAGF,YAAM,qBAAqC;AAAA,QACzC,MAAM;AAAA,QACN,MAAMC,QAAO;AACX,UAAAA,OAAM,MAAM,OAAO,WAAW;AAC5B,gBAAI,OAAO,OAAO,SAAS,EAAG;AAG9B,gBAAI,cAAiB,gBAAa,aAAa,OAAO;AACtD,gBAAI,WAAW;AAMf,kBAAM,UAAU;AAEhB,gBAAI,QAAQ,KAAK,WAAW,GAAG;AAC7B,4BAAc,YAAY;AAAA,gBACxB;AAAA,gBACA;AAAA,cACF;AACA,yBAAW;AAAA,YACb;AAIA,gBAAI,YAAY,SAAS,iBAAiB,GAAG;AAC3C,4BAAc,YAAY;AAAA,gBACxB;AAAA,gBACA;AAAA,cACF;AACA,yBAAW;AAAA,YACb;AAGA,kBAAM,sBAAsB;AAC5B,0BAAc,YAAY;AAAA,cACxB;AAAA,cACA;AAAA,YACF;AACA,gBAAI,gBAAgB,oBAAqB,YAAW;AAEpD,gBAAI,UAAU;AACZ,cAAG,iBAAc,aAAa,aAAa,OAAO;AAAA,YACpD;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF;AAEA,YAAc,cAAM;AAAA,QAClB,aAAa,CAAC,aAAa;AAAA,QAC3B,QAAQ;AAAA,QACR,UAAU;AAAA,QACV,QAAQ;AAAA,QACR,QAAQ;AAAA,QACR,SAAS;AAAA;AAAA;AAAA,QAGT,UAAU;AAAA,UACR;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACF;AAAA;AAAA;AAAA,QAGA,QAAQ;AAAA,UACN,sBAAsB;AAAA,QACxB;AAAA;AAAA;AAAA,QAGA,UAAU;AAAA,QACV,SAAS,CAAC,kBAAkB;AAAA,QAC5B,UAAU;AAAA,MACZ,CAAC;AAGD,MAAG,cAAW,aAAa;AAAA,IAE7B,SAAS,OAAO;AACd,cAAQ,MAAM,MAAM,IAAI,8BAA8B,OAAO,EAAE,GAAG,GAAG,KAAK;AAAA,IAE5E;AAAA,EACF;AACA,UAAQ,IAAI,MAAM,MAAM,oBAAe,QAAQ,MAAM,mBAAmB,CAAC;AAC3E;AAEA,SAAS,oBAAoB,WAAmB,aAAqB,OAAe,QAAsB;AACxG,QAAM,SAAc,UAAK,WAAW,YAAY,KAAK;AACrD,QAAM,cAAmB,UAAK,QAAQ,SAAS;AAC/C,QAAM,gBAAgB,YAAY,QAAQ,OAAO,UAAU;AAC3D,QAAM,aAAkB,aAAQ,WAAW;AAE3C,MAAI,CAAI,cAAW,UAAU,GAAG;AAC9B,IAAG,aAAU,YAAY,EAAE,WAAW,KAAK,CAAC;AAAA,EAC9C;AAEA,QAAM,iBAAiB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gCAuBO,MAAM;AAAA,+BACP,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sBAoJd,WAAW;AAAA,gBACjB,KAAK;AAAA,iBACJ,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAerB,EAAG,iBAAc,eAAe,cAAc;AAG9C,EAAQ,kBAAU;AAAA,IAChB,aAAa,CAAC,aAAa;AAAA,IAC3B,QAAQ;AAAA,IACR,UAAU;AAAA,IACV,QAAQ;AAAA,IACR,SAAS;AAAA,IACT,UAAU;AAAA,MACR;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,IACA,QAAQ;AAAA,MACN,sBAAsB;AAAA,IACxB;AAAA,IACA,UAAU;AAAA,EACZ,CAAC;AAED,EAAG,cAAW,aAAa;AAC3B,UAAQ,IAAI,MAAM,MAAM,oCAA+B,CAAC;AAC1D;AAEA,SAAS,uBAAuB,WAAmB,aAA2B;AAC5E,QAAM,SAAc,UAAK,WAAW,YAAY,KAAK;AACrD,QAAM,cAAmB,UAAK,QAAQ,oBAAoB;AAC1D,QAAM,gBAAgB,YAAY,QAAQ,OAAO,UAAU;AAC3D,QAAM,aAAkB,aAAQ,WAAW;AAE3C,MAAI,CAAI,cAAW,UAAU,GAAG;AAC9B,IAAG,aAAU,YAAY,EAAE,WAAW,KAAK,CAAC;AAAA,EAC9C;AAEA,QAAM,iBAAiB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,uBAQF,KAAK,UAAU,WAAW,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AA0FhD,EAAG,iBAAc,eAAe,cAAc;AAE9C,EAAQ,kBAAU;AAAA,IAChB,aAAa,CAAC,aAAa;AAAA,IAC3B,QAAQ;AAAA,IACR,UAAU;AAAA,IACV,QAAQ;AAAA,IACR,SAAS;AAAA,IACT,UAAU;AAAA,MACR;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,IACA,QAAQ;AAAA,MACN,sBAAsB;AAAA,IACxB;AAAA,IACA,UAAU;AAAA,IACV,UAAU;AAAA,EACZ,CAAC;AAED,EAAG,cAAW,aAAa;AAC3B,UAAQ,IAAI,MAAM,MAAM,2CAAsC,CAAC;AACjE;AAMA,SAAS,qBACP,WACA,OACA,aACM;AAEN,QAAM,cAAc,MAAM,GAAG,QAAQ,kBAAkB,GAAG,EAAE,QAAQ,OAAO,GAAG;AAC9E,QAAM,YAAiB,UAAK,WAAW,YAAY,QAAQ;AAC3D,QAAM,cAAmB,UAAK,WAAW,GAAG,WAAW,KAAK;AAC5D,QAAM,gBAAgB,YAAY,QAAQ,OAAO,UAAU;AAC3D,QAAM,aAAkB,aAAQ,WAAW;AAE3C,MAAI,CAAI,cAAW,UAAU,GAAG;AAC9B,IAAG,aAAU,YAAY,EAAE,WAAW,KAAK,CAAC;AAAA,EAC9C;AAEA,QAAM,gBAAgB,MAAM,MAAM,CAAC,GAAG;AACtC,MAAI,CAAC,cAAe;AAEpB,QAAM,iBAAiB;AAAA,6CACoB,MAAM,EAAE;AAAA;AAAA,8DAES,MAAM,EAAE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,mBAMnD,KAAK,UAAU,MAAM,EAAE,CAAC;AAAA,0BACjB,KAAK,UAAU,aAAa,CAAC;AAAA,uBAChC,KAAK,UAAU,WAAW,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AA+FhD,EAAG,iBAAc,eAAe,cAAc;AAC9C,EAAQ,kBAAU;AAAA,IAChB,aAAa,CAAC,aAAa;AAAA,IAC3B,QAAQ;AAAA,IACR,UAAU;AAAA,IACV,QAAQ;AAAA,IACR,SAAS;AAAA,IACT,UAAU,CAAC,WAAW,UAAU,2BAA2B,qBAAqB;AAAA,IAChF,UAAU;AAAA,IACV,UAAU;AAAA,EACZ,CAAC;AACD,EAAG,cAAW,aAAa;AAC3B,UAAQ,IAAI,MAAM,MAAM,sCAAiC,MAAM,EAAE,EAAE,CAAC;AACtE;AAKA,SAAS,6BACP,WACA,SACA,aACA,SAAsB,CAAC,GACjB;AAEN,QAAM,SAAc,UAAK,WAAW,YAAY,KAAK;AACrD,QAAM,cAAmB,UAAK,QAAQ,mBAAmB;AACzD,QAAM,gBAAgB,YAAY,QAAQ,OAAO,UAAU;AAC3D,QAAM,aAAkB,aAAQ,WAAW;AAE3C,MAAI,CAAI,cAAW,UAAU,GAAG;AAC9B,IAAG,aAAU,YAAY,EAAE,WAAW,KAAK,CAAC;AAAA,EAC9C;AAEA,QAAM,iBAAiB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,+BASM,KAAK,UAAU,QAAQ,IAAI,OAAK,EAAE,EAAE,GAAG,MAAM,CAAC,CAAC;AAAA,iBAC7D,KAAK,UAAU,OAAO,IAAI,QAAM,EAAE,IAAI,EAAE,IAAI,OAAO,EAAE,OAAO,UAAU,EAAE,SAAS,EAAE,GAAG,MAAM,CAAC,CAAC;AAAA,uBACxF,KAAK,UAAU,WAAW,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AA6EhD,EAAG,iBAAc,eAAe,cAAc;AAG9C,EAAQ,kBAAU;AAAA,IAChB,aAAa,CAAC,aAAa;AAAA,IAC3B,QAAQ;AAAA,IACR,UAAU;AAAA,IACV,QAAQ;AAAA,IACR,SAAS;AAAA,IACT,UAAU;AAAA,MACR;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,IACA,QAAQ;AAAA,MACN,sBAAsB;AAAA,IACxB;AAAA,IACA,UAAU;AAAA,EACZ,CAAC;AAED,EAAG,cAAW,aAAa;AAC3B,UAAQ,IAAI,MAAM,MAAM,yCAAoC,CAAC;AAC/D;AAKA,SAAS,YAAY,UAAkB,QAAgC;AACrE,QAAM,MAA8B,CAAC;AAErC,MAAI,CAAI,cAAW,OAAO,GAAG;AAC3B,YAAQ,KAAK,MAAM,OAAO,wCAA8B,OAAO,EAAE,CAAC;AAClE,WAAO;AAAA,EACT;AAEA,QAAM,UAAa,gBAAa,SAAS,OAAO;AAChD,QAAM,QAAQ,QAAQ,MAAM,IAAI;AAEhC,aAAW,QAAQ,OAAO;AACxB,UAAM,UAAU,KAAK,KAAK;AAC1B,QAAI,CAAC,WAAW,QAAQ,WAAW,GAAG,EAAG;AAEzC,UAAM,QAAQ,QAAQ,MAAM,gBAAgB;AAC5C,QAAI,OAAO;AACT,YAAM,MAAM,MAAM,CAAC,EAAE,KAAK;AAC1B,YAAM,QAAQ,MAAM,CAAC,EAAE,KAAK,EAAE,QAAQ,gBAAgB,EAAE;AACxD,UAAI,GAAG,IAAI;AAAA,IACb;AAAA,EACF;AAEA,SAAO;AACT;AAMA,SAAS,sBAAsB,gBAA4B;AACzD,MAAI,CAAC,gBAAgB;AACnB,WAAO,CAAC;AAAA,EACV;AAEA,QAAM,SAAgB,CAAC;AAGvB,QAAM,YAAY,MAAM,QAAQ,cAAc,IAAI,iBAAiB,CAAC,cAAc;AAElF,aAAW,YAAY,WAAW;AAEhC,QAAI,OAAO,aAAa,UAAU;AAChC,aAAO,KAAK;AAAA,QACV;AAAA,MACF,CAAC;AACD;AAAA,IACF;AAGA,QAAI,OAAO,aAAa,YAAY,aAAa,MAAM;AACrD,YAAM,gBAAqB,EAAE,UAAU,CAAC,EAAE;AAG1C,UAAI,SAAS,MAAM;AACjB,YAAI,MAAM,QAAQ,SAAS,IAAI,GAAG;AAEhC,wBAAc,SAAS,OAAO,SAAS;AAAA,QACzC,OAAO;AAEL,wBAAc,SAAS,OAAO,SAAS;AAAA,QACzC;AAAA,MACF,OAAO;AAEL;AAAA,MACF;AAGA,UAAI,SAAS,YAAY,QAAW;AAClC,sBAAc,SAAS,UAAU,SAAS;AAAA,MAC5C;AACA,UAAI,SAAS,UAAU,QAAW;AAChC,sBAAc,SAAS,QAAQ,SAAS;AAAA,MAC1C;AACA,UAAI,SAAS,cAAc,QAAW;AACpC,sBAAc,SAAS,YAAY,SAAS;AAAA,MAC9C;AACA,UAAI,SAAS,qBAAqB,QAAW;AAC3C,sBAAc,SAAS,mBAAmB,SAAS;AAAA,MACrD;AACA,UAAI,SAAS,SAAS,QAAW;AAC/B,sBAAc,SAAS,OAAO,SAAS;AAAA,MACzC;AACA,UAAI,SAAS,gBAAgB,QAAW;AACtC,sBAAc,SAAS,cAAc,SAAS;AAAA,MAChD;AACA,UAAI,SAAS,WAAW,QAAW;AACjC,sBAAc,SAAS,SAAS,SAAS;AAAA,MAC3C;AACA,UAAI,SAAS,aAAa,QAAW;AACnC,sBAAc,SAAS,WAAW,SAAS;AAAA,MAC7C;AAIA,UAAI,OAAO,KAAK,cAAc,QAAQ,EAAE,WAAW,KAAK,cAAc,SAAS,MAAM;AAEnF,YAAI,OAAO,cAAc,SAAS,SAAS,UAAU;AACnD,iBAAO,KAAK;AAAA,YACV,UAAU,cAAc,SAAS;AAAA,UACnC,CAAC;AAAA,QACH,OAAO;AAEL,iBAAO,KAAK,aAAa;AAAA,QAC3B;AAAA,MACF,OAAO;AACL,eAAO,KAAK,aAAa;AAAA,MAC3B;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAKA,SAAS,yBACP,SACA,OACA,QACA,SACA,aACA,YAAsC,oBAAI,IAAI,GAC9C,SAAsB,CAAC,GACL;AAElB,QAAM,YAA2C;AAAA,IAC/C,WAAW,CAAC;AAAA,IACZ,SAAS,CAAC;AAAA,EACZ;AAEA,QAAM,YAAiD,CAAC;AAGxD,QAAM,sBAA2B;AAAA,IAC/B,OAAO;AAAA,IACP,UAAU;AAAA,EACZ;AAGA,QAAM,eAAoC;AAAA,IACxC,OAAO,wBAAwB,KAAK;AAAA,IACpC,sBAAsB;AAAA,MACpB,UAAU;AAAA,MACV,YAAY;AAAA,MACZ,mBAAmB;AAAA,MACnB,kBAAkB;AAAA,MAClB,oBAAoB;AAAA,MACpB,YAAY;AAAA,MACZ,iBAAiB;AAAA,MACjB,yBAAyB;AAAA,MACzB,WAAW;AAAA,MACX,aAAa;AAAA,IACf;AAAA,EACF;AAEA,aAAW,UAAU,SAAS;AAC5B,UAAM,YAAY,cAAc,OAAO,GAAG,QAAQ,iBAAiB,EAAE,CAAC;AACtE,UAAM,iBAAiB,GAAG,SAAS,GAAG,KAAK;AAC3C,UAAM,eAAe,GAAG,SAAS,MAAM,KAAK;AAE5C,UAAM,SAAS,OAAO,cAAc;AACpC,UAAM,YACJ,OAAO,QAAQ,2BAA2B,WACtC,OAAO,yBACP;AACN,UAAM,eACJ,OAAO,QAAQ,qCAAqC,WAChD,OAAO,mCACP;AACN,UAAM,oBACJ,OAAO,QAAQ,sBAAsB,WACjC,OAAO,qBACN,OAAO,cAAc,WAAW,OAAO;AAC9C,UAAM,qBACJ,OAAO,QAAQ,oBAAoB,WAAW,OAAO,kBAAkB;AAEzE,UAAM,kBAAkB,KAAK,IAAI,GAAG,KAAK,MAAM,kBAAkB,CAAC;AAGlE,cAAU,UAAU,YAAY,IAAI;AAAA,MAClC,MAAM;AAAA,MACN,YAAY;AAAA,QACV,WAAW,oBAAoB,OAAO,EAAE,wCAAwC,KAAK;AAAA,QACrF,wBAAwB;AAAA,MAC1B;AAAA,IACF;AAEA,cAAU,UAAU,cAAc,IAAI;AAAA,MACpC,MAAM;AAAA,MACN,YAAY;AAAA;AAAA,QAEV,WAAW,oBAAoB,OAAO,EAAE,oCAAoC,KAAK;AAAA,QACjF,mBAAmB;AAAA,QACnB,wBAAwB;AAAA,QACxB,eAAe;AAAA,UACb,qBAAqB,EAAE,cAAc,CAAC,cAAc,KAAK,EAAE;AAAA,UAC3D;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAEA,cAAU,QAAQ,GAAG,cAAc,KAAK,IAAI;AAAA,MAC1C,aAAa,wBAAwB,OAAO,EAAE;AAAA,MAC9C,OAAO,EAAE,KAAK,eAAe;AAAA,MAC7B,QAAQ;AAAA,QACN,MAAM,oBAAoB,OAAO,EAAE;AAAA,MACrC;AAAA,IACF;AAEA,cAAU,KAAK,EAAE,cAAc,CAAC,gBAAgB,KAAK,EAAE,CAAC;AAAA,EAC1D;AAGA,QAAM,YAAiC,CAAC;AAExC,aAAW,UAAU,SAAS;AAC5B,UAAM,eAAe,gBAAgB,UAAU,OAAO,EAAE;AAGxD,UAAM,SAAgB;AAAA,MACpB;AAAA,QACE,KAAK;AAAA,UACH,KAAK,EAAE,cAAc,CAAC,cAAc,OAAO,GAAG,QAAQ,iBAAiB,EAAE,CAAC,GAAG,KAAK,IAAI,KAAK,EAAE;AAAA,UAC7F,WAAW;AAAA,QACb;AAAA,MACF;AAAA,IACF;AAGA,QAAI,OAAO,cAAc,UAAU;AACjC,YAAM,iBAAiB,sBAAsB,OAAO,aAAa,QAAQ;AACzE,aAAO,KAAK,GAAG,cAAc;AAAA,IAC/B;AAEA,cAAU,YAAY,IAAI;AAAA;AAAA,MAExB,SAAS,GAAG,OAAO,WAAW;AAAA,MAC9B,SAAS,OAAO,cAAc,WAAW;AAAA,MACzC,YAAY,OAAO,cAAc,cAAc;AAAA,MAC/C;AAAA,IACF;AAEA,QAAI,OAAO,cAAc,QAAQ,QAAQ;AACvC,gBAAU,YAAY,EAAE,SAAS,OAAO,aAAa;AAAA,IACvD;AAGA,UAAM,UAAU,UAAU,IAAI,OAAO,EAAE;AACvC,QAAI,WAAW,QAAQ,OAAO,GAAG;AAC/B,YAAM,MAA2B,CAAC;AAClC,iBAAW,YAAY,SAAS;AAC9B,cAAM,eAAe,QAAQ,KAAK,CAAC,MAAM,EAAE,OAAO,QAAQ;AAC1D,YAAI,cAAc;AAChB,gBAAM,iBAAiB,cAAc,aAAa,GAAG,QAAQ,iBAAiB,EAAE,CAAC,GAAG,KAAK;AACzF,gBAAM,SAAS,oBAAoB,uBAAuB,QAAQ,CAAC;AACnE,cAAI,MAAM,IAAI,EAAE,KAAK,eAAe;AAAA,QACtC;AAAA,MACF;AACA,UAAI,OAAO,KAAK,GAAG,EAAE,SAAS,GAAG;AAC/B,kBAAU,YAAY,EAAE,cAAc;AAAA,MACxC;AAAA,IACF;AAAA,EACF;AAGA,YAAU,SAAS,IAAI;AAAA,IACrB,SAAS;AAAA,IACT,QAAQ;AAAA,MACN;AAAA,QACE,MAAM;AAAA,UACJ,MAAM;AAAA,UACN,QAAQ;AAAA,UACR,MAAM;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAGA,YAAU,eAAe,IAAI;AAAA,IAC3B,SAAS;AAAA,IACT,QAAQ;AAAA,MACN;AAAA,QACE,MAAM;AAAA,UACJ,MAAM;AAAA,UACN,QAAQ;AAAA,UACR,MAAM;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAGA,YAAU,eAAe,IAAI;AAAA,IAC3B,SAAS;AAAA,IACT,QAAQ;AAAA,MACN;AAAA,QACE,MAAM;AAAA,UACJ,MAAM;AAAA,UACN,QAAQ;AAAA,UACR,MAAM;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAGA,aAAW,SAAS,QAAQ;AAC1B,UAAM,cAAc,MAAM,GAAG,QAAQ,kBAAkB,GAAG,EAAE,QAAQ,OAAO,GAAG;AAC9E,UAAM,SAAS,gBAAgB,SAAS,MAAM,EAAE;AAChD,UAAM,SAAgB;AAAA,MACpB;AAAA,QACE,MAAM;AAAA,UACJ,MAAM,UAAU,WAAW;AAAA,UAC3B,QAAQ;AAAA,UACR,MAAM;AAAA,QACR;AAAA,MACF;AAAA,IACF;AACA,QAAI,MAAM,UAAU;AAClB,aAAO,KAAK,GAAG,sBAAsB,MAAM,QAAQ,CAAC;AAAA,IACtD;AACA,cAAU,MAAM,IAAI;AAAA,MAClB,SAAS,mBAAmB,WAAW;AAAA,MACvC,SAAS;AAAA,MACT,YAAY;AAAA,MACZ;AAAA,IACF;AAAA,EACF;AAGA,QAAM,cAAsC,CAAC;AAC7C,QAAM,kBAAkB,CAAC,WAAW,cAAc,aAAa,YAAY,UAAU,YAAY,WAAW,YAAY,aAAa,aAAa,cAAc,qBAAqB;AAKrL,aAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,OAAO,GAAG;AAClD,QAAI,gBAAgB,KAAK,YAAU,IAAI,WAAW,MAAM,CAAC,GAAG;AAC1D,kBAAY,GAAG,IAAI;AAAA,IACrB;AAAA,EACF;AAGA,YAAU,QAAQ,cAAc,IAAI;AAAA,IAClC,aAAa;AAAA,IACb,OAAO;AAAA,MACL,YAAY;AAAA,QACV;AAAA,QACA;AAAA,UACE;AAAA,UACA,EAAE,OAAO,oBAAoB;AAAA,UAC7B;AAAA,UACA,EAAE,OAAO,cAAc;AAAA,UACvB,uCAAuC,KAAK;AAAA,QAC9C;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AAAA,IACL,SAAS;AAAA,IACT,SAAS;AAAA,MACP,wBAAwB;AAAA,MACxB,cAAc;AAAA;AAAA,MAEd,UAAU;AAAA,QACR;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAAA,IACF;AAAA,IACA,QAAQ;AAAA,IACR,UAAU;AAAA,MACR,MAAM;AAAA,MACN,SAAS;AAAA,MACT;AAAA,MACA,kBAAkB;AAAA;AAAA,MAElB,OAAO,wBAAwB,KAAK;AAAA,MACpC,aAAa;AAAA,MACb,KAAK;AAAA,QACH,MAAM;AAAA,UACJ,YAAY;AAAA,YACV;AAAA,cACE,QAAQ;AAAA,cACR,QAAQ;AAAA,gBACN;AAAA,gBACA;AAAA,gBACA;AAAA,gBACA;AAAA,cACF;AAAA,cACA,UAAU;AAAA,YACZ;AAAA,YACA;AAAA,cACE,QAAQ;AAAA,cACR,QAAQ,CAAC,iBAAiB;AAAA;AAAA,cAE1B,UAAU;AAAA,YACZ;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,IACA,SAAS,CAAC,oBAAoB;AAAA,IAC9B;AAAA,IACA;AAAA,EACF;AACF;AAKA,eAAe,mBACb,OACA,QACA,WACe;AACf,QAAM,gBAAqB,UAAK,WAAW,aAAa;AACxD,MAAI,CAAI,cAAW,aAAa,GAAG;AACjC,IAAG,aAAU,eAAe,EAAE,WAAW,KAAK,CAAC;AAAA,EACjD;AAKA,QAAM,UAAU,MAAM,YAAY;AAGlC,QAAM,YAAY,qBAAqB,KAAK,IAAI,KAAK;AACrD,MAAI,YAAkE,CAAC;AAEvE,QAAM,UAAU,IAAI,oCAAoC,EAAE,MAAM;AAEhE,MAAI;AAEF,UAAM,SAAS;AAAA,MACb,mDAAmD,SAAS,aAAa,MAAM;AAAA,MAC/E,EAAE,UAAU,SAAS,OAAO,OAAO;AAAA,IACrC;AAEA,UAAM,UAAU,KAAK,MAAM,MAAM;AACjC,UAAM,YAAoC,CAAC;AAE3C,eAAWC,WAAU,SAAS;AAC5B,YAAM,MAAMA,QAAO;AACnB,UAAI,OAAO,IAAI,SAAS,KAAK,GAAG;AAC9B,cAAM,WAAW,IAAI,QAAQ,eAAe,EAAE,EAAE,QAAQ,OAAO,EAAE,EAAE,YAAY;AAG/E,kBAAU,GAAG,IAAIA,QAAO;AAAA,MAC1B;AAAA,IACF;AAGA,eAAW,UAAU,SAAS;AAC5B,YAAM,cAAc,OAAO,GAAG,QAAQ,iBAAiB,EAAE;AACzD,YAAM,WAAW,cAAc,WAAW,GAAG,KAAK;AAGlD,YAAM,cAAc,OAAO,KAAK,SAAS,EAAE,KAAK,OAAK,EAAE,YAAY,MAAM,SAAS,YAAY,CAAC;AAE/F,UAAI,eAAe,UAAU,WAAW,GAAG;AACzC,kBAAU,OAAO,EAAE,IAAI;AAAA,UACrB,UAAU,UAAU,WAAW;AAAA,UAC/B;AAAA,QACF;AAAA,MACF;AAAA,IACF;AACA,YAAQ,QAAQ,gCAAgC;AAAA,EAClD,SAAS,OAAO;AACd,YAAQ,KAAK,yEAAyE;AACtF,eAAW,UAAU,SAAS;AAC5B,gBAAU,OAAO,EAAE,IAAI;AAAA,QACrB,UAAU,eAAe,eAAe,kBAAkB,kBAAkB,IAAI,iBAAiB,IAAI,OAAO,EAAE,IAAI,KAAK;AAAA,QACvH;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAGA,QAAM,aAAa;AAAA;AAAA;AAAA;AAAA;AAAA,4BAKO,KAAK,UAAU,WAAW,MAAM,CAAC,CAAC;AAAA;AAG5D,QAAM,UAAe,UAAK,eAAe,0BAA0B;AACnE,EAAG,iBAAc,SAAS,UAAU;AACpC,UAAQ,IAAI,MAAM,MAAM,iCAA4B,OAAO,EAAE,CAAC;AAChE;AAEA,eAAeD,OAAM,MAAW;AAC9B,QAAM,QAAQ,KAAK,SAAS,QAAQ,IAAI,SAAS;AACjD,QAAM,SAAS,KAAK,UAAU,QAAQ,IAAI,cAAc;AACxD,QAAM,SAAS,KAAK,SAAS,KAAK;AAElC,UAAQ,IAAI,MAAM,KAAK,sCAA+B,KAAK,aAAa,MAAM,MAAM,CAAC;AAErF,QAAM,UAAU,IAAI,qBAAqB,EAAE,MAAM;AACjD,QAAM,UAAU,MAAM,YAAY,MAAM;AAExC,MAAI,QAAQ,WAAW,GAAG;AACxB,YAAQ,KAAK,mBAAmB;AAChC;AAAA,EACF;AACA,UAAQ,QAAQ,SAAS,QAAQ,MAAM,YAAY;AACnD,UAAQ,QAAQ,OAAK,QAAQ,IAAI,MAAM,KAAK,OAAO,EAAE,EAAE,KAAK,EAAE,QAAQ,GAAG,CAAC,CAAC;AAE3E,QAAM,gBAAqB,UAAK,QAAQ,IAAI,GAAG,qBAAqB;AACpE,MAAI,CAAI,cAAW,aAAa,GAAG;AACjC,IAAG,aAAU,eAAe,EAAE,WAAW,KAAK,CAAC;AAAA,EACjD;AAMA,QAAM,cAAc,MAAM;AAAA,IACxB,QAAQ,IAAI,CAAC,MAAM,EAAE,QAAQ;AAAA,IAC7B,QAAQ,IAAI;AAAA,EACd;AACA,QAAM,eAAe,gBAAgB;AACrC,QAAM,eAAe,sBAAsB,aAAa,YAAY;AACpE,QAAM,eAAe,qBAAqB,QAAQ,IAAI,GAAG,YAAY;AAGrE,QAAME,eAAc;AAAA,IAClB,MAAM;AAAA,IACN,SAAS;AAAA,IACT,aAAa;AAAA,IACb,SAAS;AAAA,IACT;AAAA,IACA,SAAS;AAAA,MACP,OAAO;AAAA,IACT;AAAA,IACA,iBAAiB;AAAA,MACf,YAAY;AAAA,MACZ,sBAAsB;AAAA,MACtB,uBAAuB;AAAA,IACzB;AAAA,EACF;AACA,EAAG;AAAA,IACI,UAAK,eAAe,cAAc;AAAA,IACvC,KAAK,UAAUA,cAAa,MAAM,CAAC;AAAA,EACrC;AAIA,QAAM,UAAU,YAAY;AAK5B,QAAM,mBAAmB,QAAQ,IAAI,CAAC,MAAM,EAAE,QAAQ;AACtD,QAAM,EAAE,aAAa,gBAAgB,eAAe,iBAAiB,IACnE,MAAM,0BAA0B,kBAAkB,QAAQ,IAAI,CAAC;AACjE,QAAM,oBAAoB,oBAAI,IAAY;AAAA,IACxC,GAAG,MAAM,KAAK,cAAc;AAAA,IAC5B,GAAG,MAAM,KAAK,gBAAgB;AAAA,EAChC,CAAC;AAGD,QAAM,cAAc,MAAM,KAAK,cAAc,EAAE,KAAK;AACpD,QAAM,gBAAgB,MAAM,KAAK,gBAAgB,EAAE,KAAK;AACxD,QAAM,oBAAoB,MAAM,KAAK,iBAAiB,EACnD,OAAO,CAAC,MAAM,EAAE,KAAK,QAAQ,EAC7B,KAAK;AACR,MAAI,YAAY,UAAU,cAAc,QAAQ;AAC9C,YAAQ;AAAA,MACN,MAAM;AAAA,QACJ,8DAAoD,YAAY,MAAM,eAAe,cAAc,MAAM;AAAA,MAC3G;AAAA,IACF;AACA,QAAI,kBAAkB,SAAS,GAAG;AAChC,cAAQ;AAAA,QACN,MAAM;AAAA,UACJ,yGAA+F,kBAC5F,MAAM,GAAG,EAAE,EACX,KAAK,IAAI,CAAC,GAAG,kBAAkB,SAAS,KAAK,SAAS,EAAE;AAAA,QAC7D;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,MAAI,cAAe,KAAK,cAAc,GAA0B,KAAK,KAAK,qBAAqB,KAAK;AAGpG,QAAM,mBAAwB,UAAK,QAAQ,IAAI,GAAG,eAAe;AACjE,MAAO,cAAW,gBAAgB,GAAG;AACnC,QAAI;AACF,YAAM,iBAAiB,KAAK,MAAS,gBAAa,kBAAkB,OAAO,CAAC;AAC5E,UAAI,eAAe,WAAW;AAE5B,YAAI,CAAE,KAAK,cAAc,GAA0B,KAAK,GAAG;AACzD,wBAAc,4BAA4B,eAAe,SAAS;AAAA,QACpE;AACA,gBAAQ,IAAI,MAAM,KAAK,wDAA8C,WAAW,EAAE,CAAC;AAAA,MACrF;AAAA,IACF,SAAS,OAAO;AACd,cAAQ,KAAK,MAAM,OAAO,yEAA+D,CAAC;AAAA,IAC5F;AAAA,EACF;AAEA,QAAM,SAAS,MAAM,WAAW,MAAM;AACtC,MAAI,OAAO,SAAS,GAAG;AACrB,YAAQ,IAAI,MAAM,KAAK,uBAAa,OAAO,MAAM,cAAc,OAAO,IAAI,CAAC,MAAM,EAAE,EAAE,EAAE,KAAK,IAAI,CAAC,EAAE,CAAC;AACpG,0BAAsB,QAAQ,eAAe,QAAQ,IAAI,CAAC;AAAA,EAC5D;AAEA,MAAI,wBAAwB,EAAE,MAAM,EAAE,QAAQ,oBAAoB;AAClE,QAAM,iBAAiB,SAAS,eAAe,MAAM;AAGrD,QAAM,iBAAiB,IAAI,oDAAoD,EAAE,MAAM;AACvF,aAAW,UAAU,SAAS;AAC5B,QAAI;AACF,YAAM,cAAmB,UAAK,eAAe,OAAO,cAAc,KAAK;AACvE,UAAO,cAAW,WAAW,GAAG;AAE9B,cAAM,aAAa,cAAmB,aAAQ,WAAW,CAAC,EAAE;AAE5D,YAAI;AAGF,gBAAM,SAAS,MAAM,OAAO;AAG5B,cAAI,OAAO,sBAAsB;AAC/B,mBAAO,eAAe,OAAO;AAC7B,gBAAI,OAAO,qBAAqB,QAAQ,QAAQ;AAC9C,sBAAQ,IAAI,MAAM,KAAK,YAAO,OAAO,EAAE,WAAW,OAAO,qBAAqB,OAAO,MAAM,WAAW,CAAC;AAAA,YACzG;AAAA,UACF,OAAO;AACL,mBAAO,eAAe,OAAO,gBAAgB,EAAE,SAAS,KAAK,YAAY,IAAI;AAC7E,oBAAQ,IAAI,MAAM,KAAK,YAAO,OAAO,EAAE,6DAA6D,CAAC;AAAA,UACvG;AAAA,QACF,SAAS,aAAkB;AAIzB,kBAAQ,IAAI,MAAM,KAAK,YAAO,OAAO,EAAE,mDAAmD,aAAa,SAAS,MAAM,GAAG,EAAE,KAAK,eAAe,MAAM,CAAC;AAGtJ,cAAI;AACF,kBAAM,gBAAmB,gBAAa,OAAO,UAAU,OAAO;AAE9D,kBAAM,oBAAoB,cAAc,MAAM,uDAAuD;AACrG,gBAAI,mBAAmB;AAErB,kBAAI,YAAY,kBAAkB,CAAC,EAChC,QAAQ,qBAAqB,EAAE,EAC/B,QAAQ,sBAAsB,IAAI;AAGrC,oBAAM,YAAY,IAAI,SAAS,YAAY,SAAS,EAAE;AACtD,kBAAI,cAAc,UAAU,UAAU,UAAU,WAAW,UAAU,cAAc,UAAU,WAAW;AACtG,uBAAO,eAAe;AACtB,oBAAI,UAAU,QAAQ,QAAQ;AAC5B,0BAAQ,IAAI,MAAM,KAAK,YAAO,OAAO,EAAE,WAAW,UAAU,OAAO,MAAM,4BAA4B,CAAC;AAAA,gBACxG;AACA,oBAAI,UAAU,UAAU;AACtB,0BAAQ,IAAI,MAAM,KAAK,YAAO,OAAO,EAAE,gCAAgC,CAAC;AAAA,gBAC1E;AAAA,cACF;AAAA,YACF;AAAA,UACF,SAAS,eAAe;AAEtB,mBAAO,eAAe,OAAO,gBAAgB,EAAE,SAAS,KAAK,YAAY,IAAI;AAC7E,oBAAQ,IAAI,MAAM,KAAK,YAAO,OAAO,EAAE,qDAAqD,CAAC;AAAA,UAC/F;AAAA,QACF;AAAA,MACF,OAAO;AACL,eAAO,eAAe,OAAO,gBAAgB,EAAE,SAAS,KAAK,YAAY,IAAI;AAC7E,gBAAQ,KAAK,MAAM,OAAO,YAAO,OAAO,EAAE,6BAA6B,WAAW,kBAAkB,CAAC;AAAA,MACvG;AAEA,UAAI,CAAC,OAAO,cAAc;AACxB,eAAO,eAAe,EAAE,SAAS,KAAK,YAAY,IAAI;AACtD,gBAAQ,IAAI,MAAM,KAAK,YAAO,OAAO,EAAE,wBAAwB,CAAC;AAAA,MAClE;AAAA,IACF,SAAS,OAAY;AACnB,aAAO,eAAe,OAAO,gBAAgB,EAAE,SAAS,KAAK,YAAY,IAAI;AAC7E,cAAQ,KAAK,MAAM,OAAO,YAAO,OAAO,EAAE,+BAA+B,OAAO,WAAW,KAAK,kBAAkB,CAAC;AAAA,IACrH;AAAA,EACF;AACA,iBAAe,QAAQ,mBAAmB;AAE1C,+BAA6B,eAAe,SAAS,aAAa,MAAM;AACxE,sBAAoB,eAAe,aAAa,OAAO,MAAM;AAC7D,yBAAuB,eAAe,WAAW;AAEjD,aAAW,SAAS,QAAQ;AAC1B,yBAAqB,eAAe,OAAO,WAAW;AAAA,EACxD;AAEA,MAAI,YAAY,MAAM,uBAAuB,SAAS,QAAQ,IAAI,CAAC;AACnE,cAAY,kBAAkB,WAAW,QAAQ,OAAO;AACxD,QAAM,SAAS,yBAAyB,SAAS,OAAO,QAAQ,SAAS,aAAa,WAAW,MAAM;AAIvG,QAAM,WAAc,cAAW,gBAAgB,IAAI,SAAS;AAC5D,QAAM,cAAsC;AAAA,IAC1C,aAAa;AAAA,IACb,OAAO;AAAA,IACP,UAAU;AAAA,EACZ;AACA,QAAM,kBAAkB,CAAC,WAAW,cAAc,aAAa,YAAY,UAAU,YAAY,WAAW,YAAY,aAAa,aAAa,cAAc,qBAAqB;AAErL,aAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,OAAO,GAAG;AAGlD,QAAI,IAAI,WAAW,MAAM,EAAG;AAI5B,QAAI,gBAAgB,KAAK,CAAC,WAAW,IAAI,WAAW,MAAM,CAAC,KAAK,kBAAkB,IAAI,GAAG,GAAG;AAC1F,kBAAY,GAAG,IAAI;AAAA,IACrB;AAAA,EACF;AAEA,EAAG;AAAA,IACI,UAAK,eAAe,UAAU;AAAA,IACnC,KAAK,UAAU,aAAa,MAAM,CAAC;AAAA,EACrC;AAEA,QAAM,cAAmB,UAAK,QAAQ,EAAE,QAAQ,EAAE,CAAC;AACnD,QAAM,WAAgB,UAAK,eAAe,gBAAgB;AAC1D,EAAG,iBAAc,UAAU,WAAW;AACtC,UAAQ,IAAI,MAAM,MAAM,oCAA+B,QAAQ,EAAE,CAAC;AACpE;AAEA,eAAe,OAAO,MAAW;AAC/B,QAAM,QAAQ,KAAK,SAAS,QAAQ,IAAI,SAAS;AACjD,QAAM,SAAS,KAAK,UAAU,QAAQ,IAAI,cAAc;AAExD,QAAM,aAAa,KAAK,cAAc,KAAK,aAAa,KAAK;AAC7D,QAAM,cAAc,KAAK,eAAe,KAAK,cAAc,KAAK;AAEhE,MAAI,YAAY;AACd,YAAQ,IAAI,MAAM,OAAO,wDAA8C,CAAC;AACxE;AAAA,EACF;AAEA,QAAM,gBAAqB,UAAK,QAAQ,IAAI,GAAG,qBAAqB;AACpE,QAAM,WAAgB,UAAK,eAAe,gBAAgB;AAE1D,MAAI,CAAI,cAAW,QAAQ,GAAG;AAC5B,YAAQ,MAAM,MAAM,IAAI,qDAAgD,CAAC;AACzE,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,UAAQ,IAAI,MAAM,KAAK,sCAA+B,KAAK,aAAa,MAAM,MAAM,CAAC;AACrF,sBAAoB;AAEpB,MAAI;AAGF,QAAI,CAAC,eAAe,CAAI,cAAgB,UAAK,eAAe,cAAc,CAAC,GAAG;AAC5E,cAAQ,IAAI,MAAM,KAAK,iDAA0C,CAAC;AAClE,eAAS,eAAe;AAAA,QACtB,KAAK;AAAA,QACL,OAAO;AAAA,MACT,CAAC;AAAA,IACH;AAGA,UAAM,mBAAwB,UAAK,QAAQ,IAAI,GAAG,eAAe;AACjE,QAAO,cAAW,gBAAgB,GAAG;AACnC,cAAQ,IAAI,MAAM,KAAK,oEAA0D,CAAC;AAGlF,MAAG,gBAAa,kBAAuB,UAAK,eAAe,eAAe,CAAC;AAG3E,YAAM,UAAU,YAAY;AAG5B,eAAS,4BAA4B;AAAA,QACnC,KAAK;AAAA,QACL,OAAO;AAAA,MACT,CAAC;AACD,cAAQ,IAAI,MAAM,MAAM,2CAAsC,CAAC;AAE/D;AAAA,IACF;AAEA,aAAS,yBAAyB;AAAA,MAChC,KAAK;AAAA,MACL,OAAO;AAAA,MACP,KAAK;AAAA,QACH,GAAG,QAAQ;AAAA,QACX,OAAO;AAAA,QACP,YAAY;AAAA,MACd;AAAA,IACF,CAAC;AACD,YAAQ,IAAI,MAAM,MAAM,6BAAwB,CAAC;AAAA,EACnD,SAAS,OAAO;AACd,YAAQ,MAAM,MAAM,IAAI,0BAAqB,CAAC;AAC9C,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,QAAM,mBAAmB,OAAO,QAAQ,aAAa;AACvD;AAEO,IAAM,cAAc,IAAI,QAAQ,EACpC,KAAK,MAAM,EACX,YAAY,4CAA4C,EACxD,OAAO,uBAAuB,oBAAoB,MAAM,EACxD,OAAO,yBAAyB,cAAc,WAAW,EACzD,OAAO,oBAAoB,2CAA2C,QAAQ,EAC9E,OAAO,yBAAyB,0EAA0E,EAC1G,OAAO,iBAAiB,+BAA+B,KAAK,EAC5D,OAAO,kBAAkB,4CAA4C,KAAK,EAC1E,OAAO,OAAO,YAAY;AACzB,QAAMF,OAAM,OAAO;AACnB,QAAM,OAAO,OAAO;AACtB,CAAC;;;ACv3EH,SAAS,WAAAG,gBAAe;AACxB,YAAYC,SAAQ;AACpB,YAAYC,WAAU;AACtB,OAAOC,YAAW;AAClB,OAAOC,UAAS;AAChB,OAAO,aAAa;AAEpB,IAAM,qBAAqB;AAC3B,IAAM,qBAAqB;AAI3B,SAAS,eACP,aACA,IACA,SACQ;AACR,QAAM,MAAW,cAAQ,aAAa,QAAQ,OAAO,kBAAkB;AACvE,MAAI,CAAI,eAAW,GAAG,GAAG;AACvB,IAAG,cAAU,KAAK,EAAE,WAAW,KAAK,CAAC;AAAA,EACvC;AACA,QAAM,aAAa,GAAG,KAAK,EAAE,QAAQ,oBAAoB,GAAG;AAC5D,QAAM,WAAgB,WAAK,KAAK,GAAG,UAAU,YAAY;AAEzD,QAAM,UAAU,OAAO,QAAQ,WAAW,KAAK,KAAK;AACpD,QAAM,aAAa,OAAO,QAAQ,UAAU,KAAK,KAAK;AACtD,QAAM,eAAe,QAAQ,WACzB,gBAAgB,QAAQ,QAAQ;AAAA,IAChC;AAEJ,QAAM,WAAW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,aAgBN,OAAO;AAAA,gBACJ,UAAU;AAAA,EACxB,YAAY;AAAA;AAAA;AAAA,SAGL,EAAE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAkBT,EAAG,kBAAc,UAAU,UAAU,OAAO;AAC5C,SAAY,eAAS,aAAa,QAAQ;AAC5C;AAEA,SAAS,cAAc,aAAqB,IAAY,SAAmC;AACzF,QAAM,MAAW,cAAQ,aAAa,QAAQ,OAAO,kBAAkB;AACvE,MAAI,CAAI,eAAW,GAAG,GAAG;AACvB,IAAG,cAAU,KAAK,EAAE,WAAW,KAAK,CAAC;AAAA,EACvC;AACA,QAAM,aAAa,GAAG,KAAK,EAAE,QAAQ,oBAAoB,GAAG;AAC5D,QAAM,WAAgB,WAAK,KAAK,GAAG,UAAU,WAAW;AAExD,QAAM,WAAW;AAAA;AAAA;AAAA,mBAGA,EAAE;AAAA;AAAA;AAAA;AAAA,SAIZ,EAAE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAUT,EAAG,kBAAc,UAAU,UAAU,OAAO;AAC5C,SAAY,eAAS,aAAa,QAAQ;AAC5C;AAEO,IAAM,aAAa,IAAIJ,SAAQ,EACnC,KAAK,KAAK,EACV,YAAY,0EAA0E,EACtF,SAAS,QAAQ,uDAAuD,EACxE,OAAO,yBAAyB,0CAA0C,EAC1E,OAAO,gBAAgB,kFAAkF,EAAE,EAC3G,OAAO,2BAA2B,4DAA4D,EAC9F,OAAO,uBAAuB,4CAA4C,KAAK,EAC/E,OAAO,iBAAiB,sCAAsC,KAAK,EACnE;AAAA,EACC,OACE,OACA,YAOG;AACH,UAAM,cAAc,QAAQ,IAAI;AAChC,QAAI;AACJ,QAAI;AAEJ,QAAI,QAAQ,SAAS,YAAY,QAAQ,SAAS,SAAS;AACzD,aAAO,QAAQ;AACf,YAAM,SAAS,IAAI,KAAK;AACxB,UAAI,CAAC,IAAI;AACP,cAAM,MAAM,MAAM,QAAQ;AAAA,UACxB,MAAM;AAAA,UACN,MAAM;AAAA,UACN,SAAS,SAAS,IAAI;AAAA,UACtB,UAAU,CAAC,MAAO,EAAE,KAAK,IAAI,OAAO;AAAA,QACtC,CAAC;AACD,YAAI,OAAO,IAAI,OAAO,UAAU;AAC9B,kBAAQ,WAAW;AACnB;AAAA,QACF;AACA,aAAK,IAAI,GAAG,KAAK;AAAA,MACnB;AAAA,IACF,OAAO;AACL,YAAM,UAAU,MAAM,QAAQ;AAAA,QAC5B,MAAM;AAAA,QACN,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SAAS;AAAA,UACP,EAAE,OAAO,UAAU,OAAO,UAAU,aAAa,0CAA0C;AAAA,UAC3F,EAAE,OAAO,SAAS,OAAO,SAAS,aAAa,wCAAwC;AAAA,QACzF;AAAA,MACF,CAAC;AACD,UAAI,QAAQ,SAAS,QAAW;AAC9B,gBAAQ,WAAW;AACnB;AAAA,MACF;AACA,aAAO,QAAQ;AACf,YAAM,SAAS,IAAI,KAAK;AACxB,UAAI,CAAC,IAAI;AACP,cAAM,QAAQ,MAAM,QAAQ;AAAA,UAC1B,MAAM;AAAA,UACN,MAAM;AAAA,UACN,SAAS,SAAS,IAAI;AAAA,UACtB,UAAU,CAAC,MAAO,EAAE,KAAK,IAAI,OAAO;AAAA,QACtC,CAAC;AACD,YAAI,OAAO,MAAM,OAAO,UAAU;AAChC,kBAAQ,WAAW;AACnB;AAAA,QACF;AACA,aAAK,MAAM,GAAG,KAAK;AAAA,MACrB;AAAA,IACF;AAEA,UAAM,UAAUI,KAAI,eAAe,IAAI,KAAK,EAAE,MAAM;AACpD,QAAI;AACF,YAAM,SAAS,QAAQ,MAAM,EAAE,KAAK,QAAQ,IAAI,IAAI,CAAC;AACrD,UAAI,SAAS,UAAU;AACrB,cAAM,eAAe,eAAe,aAAa,IAAI;AAAA,UACnD,GAAG;AAAA,UACH,UAAU,QAAQ;AAAA,UAClB,SAAS,QAAQ;AAAA,UACjB,QAAQ,QAAQ;AAAA,QAClB,CAAC;AACD,gBAAQ;AAAA,UACN,mBAAmBD,OAAM,KAAK,YAAY,CAAC;AAAA,YAC5BA,OAAM,OAAO,oBAAoB,CAAC;AAAA,QACnD;AAAA,MACF,OAAO;AACL,cAAM,eAAe,cAAc,aAAa,IAAI,MAAM;AAC1D,gBAAQ;AAAA,UACN,kBAAkBA,OAAM,KAAK,YAAY,CAAC;AAAA,wDACiBA,OAAM,OAAO,oBAAoB,CAAC;AAAA,QAC/F;AAAA,MACF;AAAA,IACF,SAAS,OAAgB;AACvB,YAAM,MAAM;AACZ,cAAQ,KAAK,sBAAsB,IAAI,EAAE;AACzC,cAAQ,MAAMA,OAAM,IAAI,KAAK,SAAS,KAAK,WAAW,OAAO,KAAK,CAAC,CAAC;AACpE,cAAQ,WAAW;AAAA,IACrB;AAAA,EACF;AACF;;;ACzMF,SAAS,WAAAE,gBAAe;AACxB,YAAYC,SAAQ;AACpB,YAAYC,WAAU;AACtB,OAAOC,YAAW;AAClB,OAAOC,UAAS;AAGhB,IAAM,YAAY;AAAA,EAChB,sBAAsB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAkMtB,0BAA0B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EA8N1B,0BAA0B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EA2S1B,2BAA2B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAgW3B,uBAAuB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAoPvB,8BAA8B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAge9B,6BAA6B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAsS7B,oCAAoC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAmWtC;AAEA,IAAM,4BAA4B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAmClC,SAAS,UAAU,UAAkB,SAAiB,OAAyB;AAC7E,MAAO,eAAW,QAAQ,KAAK,CAAC,OAAO;AACrC,WAAO;AAAA,EACT;AACA,QAAM,MAAW,cAAQ,QAAQ;AACjC,MAAI,CAAI,eAAW,GAAG,GAAG;AACvB,IAAG,cAAU,KAAK,EAAE,WAAW,KAAK,CAAC;AAAA,EACvC;AACA,EAAG,kBAAc,UAAU,SAAS,OAAO;AAC3C,SAAO;AACT;AAEA,SAAS,oBAAoB,YAAoB,OAAyB;AACxE,MAAI,CAAI,eAAW,UAAU,GAAG;AAE9B,UAAMC,WAAU;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAalB,yBAAyB;AAAA;AAAA;AAGvB,IAAG,kBAAc,YAAYA,UAAS,OAAO;AAC7C,WAAO;AAAA,EACT;AAGA,QAAM,UAAa,iBAAa,YAAY,OAAO;AAGnD,MAAI,QAAQ,SAAS,kBAAkB,GAAG;AACxC,QAAI,CAAC,OAAO;AACV,aAAO;AAAA,IACT;AAEA,WAAO;AAAA,EACT;AAGA,QAAM,QAAQ,QAAQ,MAAM,IAAI;AAChC,MAAI,cAAc;AAClB,MAAI,aAAa;AACjB,MAAI,iBAAiB;AAErB,WAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACrC,UAAM,OAAO,MAAM,CAAC;AACpB,QAAI,KAAK,SAAS,cAAc,KAAK,KAAK,SAAS,GAAG,GAAG;AACvD,uBAAiB;AAAA,IACnB;AACA,QAAI,gBAAgB;AAClB,YAAM,cAAc,KAAK,MAAM,IAAI,KAAK,CAAC,GAAG;AAC5C,YAAM,eAAe,KAAK,MAAM,IAAI,KAAK,CAAC,GAAG;AAC7C,oBAAc,aAAa;AAE3B,UAAI,eAAe,KAAK,cAAc,KAAK,gBAAgB,IAAI;AAC7D,sBAAc;AACd;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,MAAI,gBAAgB,IAAI;AAEtB,UAAM,YAAY,QAAQ,YAAY,GAAG;AACzC,QAAI,cAAc,IAAI;AACpB,YAAM,SAAS,QAAQ,MAAM,GAAG,SAAS;AACzC,YAAM,QAAQ,QAAQ,MAAM,SAAS;AACrC,YAAM,aAAa,SAAS,QAAQ,4BAA4B,OAAO;AACvE,MAAG,kBAAc,YAAY,YAAY,OAAO;AAChD,aAAO;AAAA,IACT;AACA,WAAO;AAAA,EACT;AAGA,QAAM,SAAS,MAAM,WAAW,EAAE,MAAM,QAAQ,IAAI,CAAC,KAAK;AAC1D,QAAM,gBAAgB,0BAA0B,MAAM,IAAI,EAAE,IAAI,CAAC,GAAG,QAAQ;AAC1E,QAAI,QAAQ,EAAG,QAAO,SAAS;AAC/B,WAAO,SAAS;AAAA,EAClB,CAAC;AAED,QAAM,OAAO,aAAa,GAAG,GAAG,aAAa;AAC7C,EAAG,kBAAc,YAAY,MAAM,KAAK,IAAI,GAAG,OAAO;AACtD,SAAO;AACT;AAEO,IAAM,qBAAqB,IAAIL,SAAQ,EAC3C,KAAK,aAAa,EAClB,YAAY,2EAA2E,EACvF,OAAO,WAAW,4BAA4B,KAAK,EACnD,OAAO,oBAAoB,qCAAqC,KAAK,EACrE,OAAO,iBAAiB,mCAAmC,KAAK,EAChE,OAAO,CAAC,YAAwE;AAC/E,QAAM,UAAUI,KAAI,+BAA+B,EAAE,MAAM;AAC3D,MAAI;AACF,UAAM,cAAc,QAAQ,IAAI;AAChC,UAAM,SAAS,QAAQ,UAAU;AACjC,UAAM,SAAc,WAAK,QAAQ,OAAO,WAAW;AACnD,UAAM,QAAQ,QAAQ,SAAS;AAC/B,UAAM,aAAa,QAAQ,cAAc;AAEzC,UAAM,eAAyB,CAAC;AAChC,UAAM,eAAyB,CAAC;AAGhC,eAAW,CAAC,cAAc,QAAQ,KAAK,OAAO,QAAQ,SAAS,GAAG;AAChE,YAAM,WAAgB,gBAAe,WAAK,aAAa,QAAQ,YAAY,CAAC;AAC5E,YAAM,UAAU,UAAU,UAAU,UAAU,KAAK;AACnD,UAAI,SAAS;AACX,qBAAa,KAAU,eAAS,aAAa,QAAQ,CAAC;AAAA,MACxD,OAAO;AACL,qBAAa,KAAU,eAAS,aAAa,QAAQ,CAAC;AAAA,MACxD;AAAA,IACF;AAGA,QAAI,gBAAgB;AACpB,QAAI,CAAC,YAAY;AACf,YAAM,aAAkB,WAAK,aAAa,oBAAoB;AAC9D,sBAAgB,oBAAoB,YAAY,KAAK;AACrD,UAAI,eAAe;AACjB,qBAAa,KAAK,oBAAoB;AAAA,MACxC,WAAc,eAAW,UAAU,GAAG;AACpC,qBAAa,KAAK,oBAAoB;AAAA,MACxC;AAAA,IACF;AAEA,YAAQ,QAAQ,2BAA2B;AAE3C,QAAI,aAAa,SAAS,GAAG;AAC3B,cAAQ,IAAID,OAAM,MAAM,yBAAoB,CAAC;AAC7C,mBAAa,QAAQ,CAAC,MAAM,QAAQ,IAAIA,OAAM,KAAK,OAAO,CAAC,EAAE,CAAC,CAAC;AAAA,IACjE;AAEA,QAAI,aAAa,SAAS,GAAG;AAC3B,cAAQ,IAAIA,OAAM,OAAO,6DAAwD,CAAC;AAClF,mBAAa,QAAQ,CAAC,MAAM,QAAQ,IAAIA,OAAM,KAAK,OAAO,CAAC,EAAE,CAAC,CAAC;AAAA,IACjE;AAEA,YAAQ;AAAA,MACNA,OAAM;AAAA,QACJ;AAAA;AAAA;AAAA;AAAA,iCAGoCA,OAAM,OAAO,+BAA+B,CAAC;AAAA,uBACvDA,OAAM,OAAO,oBAAoB,CAAC;AAAA,WAC9CA,OAAM,OAAO,yBAAyB,CAAC;AAAA,MACvD;AAAA,IACF;AAAA,EACF,SAAS,OAAY;AACnB,YAAQ,KAAK,oCAAoC;AACjD,YAAQ,MAAMA,OAAM,IAAI,OAAO,SAAS,OAAO,WAAW,OAAO,KAAK,CAAC,CAAC;AACxE,YAAQ,WAAW;AAAA,EACrB;AACF,CAAC;;;AH7kFH,IAAM,aAAa,cAAc,YAAY,GAAG;AAChD,IAAM,YAAYG,SAAQ,UAAU;AACpC,IAAM,kBAAkBC,MAAK,WAAW,MAAM,cAAc;AAC5D,IAAM,cAAc,KAAK,MAAMC,cAAa,iBAAiB,OAAO,CAAC;AACrE,IAAM,UAAU,YAAY,WAAW;AAEvC,IAAM,UAAU,IAAIC,SAAQ;AAE5B,QACG,KAAK,WAAW,EAChB,YAAY,wDAAwD,EACpE,QAAQ,OAAO;AAElB,QAAQ,WAAW,WAAW;AAC9B,QAAQ,WAAW,UAAU;AAC7B,QAAQ,WAAW,kBAAkB;AAErC,QAAQ,MAAM,QAAQ,IAAI;AAE1B,IAAM,cAAc;","names":["Command","readFileSync","dirname","join","build","output","packageJson","Command","fs","path","chalk","ora","Command","fs","path","chalk","ora","content","dirname","join","readFileSync","Command"]}
|