@microfox/ai-worker 1.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/index.ts","../src/client.ts","../src/mongoJobStore.ts","../src/handler.ts","../src/config.ts"],"sourcesContent":["/**\n * @microfox/ai-worker\n * Worker runtime for ai-router - SQS-based async agent execution\n */\n\nimport { dispatch, dispatchLocal, type DispatchOptions, type DispatchResult } from './client.js';\nimport { createLambdaHandler, type WorkerHandler, type JobStore } from './handler.js';\nimport type { ZodType, z } from 'zod';\n\nexport * from './client.js';\nexport * from './handler.js';\nexport * from './config.js';\n\n/**\n * Schedule event configuration for a worker.\n * Supports both simple rate/cron strings and full configuration objects.\n * \n * @example Simple rate/cron\n * ```typescript\n * schedule: 'rate(2 hours)'\n * // or\n * schedule: 'cron(0 12 * * ? *)'\n * ```\n * \n * @example Full configuration\n * ```typescript\n * schedule: {\n * rate: 'rate(10 minutes)',\n * enabled: true,\n * input: { key1: 'value1' }\n * }\n * ```\n * \n * @example Multiple schedules\n * ```typescript\n * schedule: [\n * 'rate(2 hours)',\n * { rate: 'cron(0 12 * * ? *)', enabled: false }\n * ]\n * ```\n */\nexport interface ScheduleEventConfig {\n /**\n * Schedule rate using either rate() or cron() syntax.\n * Can be a string or array of strings for multiple schedules.\n * \n * @example 'rate(2 hours)' or 'cron(0 12 * * ? *)'\n * @example ['cron(0 0/4 ? * MON-FRI *)', 'cron(0 2 ? * SAT-SUN *)']\n */\n rate: string | string[];\n /**\n * Whether the schedule is enabled (default: true).\n */\n enabled?: boolean;\n /**\n * Input payload to pass to the function.\n */\n input?: Record<string, any>;\n /**\n * JSONPath expression to select part of the event data as input.\n */\n inputPath?: string;\n /**\n * Input transformer configuration for custom input mapping.\n */\n inputTransformer?: {\n inputPathsMap?: Record<string, string>;\n inputTemplate?: string;\n };\n /**\n * Name of the schedule event.\n */\n name?: string;\n /**\n * Description of the schedule event.\n */\n description?: string;\n /**\n * Method to use: 'eventBus' (default) or 'scheduler'.\n * Use 'scheduler' for higher limits (1M events vs 300).\n */\n method?: 'eventBus' | 'scheduler';\n /**\n * Timezone for the schedule (only used with method: 'scheduler').\n * @example 'America/New_York'\n */\n timezone?: string;\n}\n\nexport type ScheduleConfig = \n | string \n | ScheduleEventConfig \n | (string | ScheduleEventConfig)[];\n\n/**\n * Configuration for a worker's Lambda function deployment.\n * \n * **Best Practice**: Export this as a separate const from your worker file:\n * ```typescript\n * export const workerConfig: WorkerConfig = {\n * timeout: 900,\n * memorySize: 2048,\n * layers: ['arn:aws:lambda:${aws:region}:${aws:accountId}:layer:ffmpeg:1'],\n * schedule: 'rate(2 hours)',\n * };\n * ```\n * \n * The CLI will automatically extract it from the export. You do not need to pass it to `createWorker()`.\n */\nexport interface WorkerConfig {\n /**\n * Lambda function timeout in seconds (max 900).\n */\n timeout?: number;\n /**\n * Lambda function memory size in MB (128-10240).\n */\n memorySize?: number;\n /**\n * Optional Lambda layers ARNs to attach to this worker function.\n *\n * This is primarily used by @microfox/ai-worker-cli when generating serverless.yml.\n * Supports CloudFormation pseudo-parameters like ${aws:region} and ${aws:accountId}.\n *\n * Example:\n * layers: ['arn:aws:lambda:${aws:region}:${aws:accountId}:layer:ffmpeg:1']\n */\n layers?: string[];\n /**\n * Schedule events configuration for this worker.\n * Allows multiple schedule events to be attached to the same function.\n * \n * @example Simple rate\n * ```typescript\n * schedule: 'rate(2 hours)'\n * ```\n * \n * @example Multiple schedules\n * ```typescript\n * schedule: [\n * 'rate(2 hours)',\n * { rate: 'cron(0 12 * * ? *)', enabled: true, input: { key: 'value' } }\n * ]\n * ```\n * \n * @example Using scheduler method with timezone\n * ```typescript\n * schedule: {\n * method: 'scheduler',\n * rate: 'cron(0 0/4 ? * MON-FRI *)',\n * timezone: 'America/New_York',\n * input: { key1: 'value1' }\n * }\n * ```\n */\n schedule?: ScheduleConfig;\n\n /**\n * SQS queue settings for this worker (used by @microfox/ai-worker-cli when generating serverless.yml).\n *\n * Notes:\n * - To effectively disable retries, set `maxReceiveCount: 1` (requires DLQ; the CLI will create one).\n * - SQS does not support `maxReceiveCount: 0`.\n * - `messageRetentionPeriod` is in seconds (max 1209600 = 14 days).\n */\n sqs?: {\n /**\n * How many receives before sending to DLQ.\n * Use 1 to avoid retries.\n */\n maxReceiveCount?: number;\n /**\n * How long messages are retained in the main queue (seconds).\n */\n messageRetentionPeriod?: number;\n /**\n * Visibility timeout for the main queue (seconds).\n * If not set, CLI defaults to (worker timeout + 60s).\n */\n visibilityTimeout?: number;\n /**\n * DLQ message retention period (seconds).\n * Defaults to `messageRetentionPeriod` (or 14 days).\n */\n deadLetterMessageRetentionPeriod?: number;\n };\n}\n\nexport interface WorkerAgentConfig<INPUT_SCHEMA extends ZodType<any>, OUTPUT> {\n id: string;\n inputSchema: INPUT_SCHEMA;\n outputSchema: ZodType<OUTPUT>;\n handler: WorkerHandler<z.infer<INPUT_SCHEMA>, OUTPUT>;\n /**\n * @deprecated Prefer exporting `workerConfig` as a separate const from your worker file.\n * The CLI will automatically extract it from the export. This parameter is kept for backward compatibility.\n */\n workerConfig?: WorkerConfig;\n}\n\nexport interface WorkerAgent<INPUT_SCHEMA extends ZodType<any>, OUTPUT> {\n id: string;\n dispatch: (\n input: z.input<INPUT_SCHEMA>,\n options: DispatchOptions\n ) => Promise<DispatchResult>;\n handler: WorkerHandler<z.infer<INPUT_SCHEMA>, OUTPUT>;\n inputSchema: INPUT_SCHEMA;\n outputSchema: ZodType<OUTPUT>;\n workerConfig?: WorkerConfig;\n}\n\n/**\n * Creates a worker agent that can be dispatched to SQS/Lambda.\n *\n * In development mode (NODE_ENV === 'development' and WORKERS_LOCAL_MODE !== 'false'),\n * dispatch() will run the handler immediately in the same process.\n *\n * In production, dispatch() sends a message to SQS which triggers a Lambda function.\n *\n * @template INPUT_SCHEMA - The Zod schema type (e.g., `typeof InputSchema`).\n * Used to derive both:\n * - Pre-parse input type via `z.input<INPUT_SCHEMA>` for `dispatch()` (preserves optional fields)\n * - Parsed input type via `z.infer<INPUT_SCHEMA>` for handler (defaults applied)\n * @template OUTPUT - The output type returned by the handler. Use `z.infer<typeof OutputSchema>`.\n *\n * @param config - Worker agent configuration\n * @returns A worker agent object with a dispatch method\n *\n * @example\n * ```typescript\n * const InputSchema = z.object({\n * url: z.string().url(),\n * timeout: z.number().optional().default(5000), // optional with default\n * });\n *\n * export const worker = createWorker<typeof InputSchema, Output>({\n * // dispatch() accepts { url: string, timeout?: number } (pre-parse, optional preserved)\n * // handler receives { url: string, timeout: number } (parsed, default applied)\n * });\n * ```\n */\nexport function createWorker<INPUT_SCHEMA extends ZodType<any>, OUTPUT>(\n config: WorkerAgentConfig<INPUT_SCHEMA, OUTPUT>\n): WorkerAgent<INPUT_SCHEMA, OUTPUT> {\n const { id, inputSchema, outputSchema, handler } = config;\n\n const agent: WorkerAgent<INPUT_SCHEMA, OUTPUT> = {\n id,\n handler,\n inputSchema,\n outputSchema,\n\n async dispatch(input: z.input<INPUT_SCHEMA>, options: DispatchOptions): Promise<DispatchResult> {\n const mode = options.mode ?? 'auto';\n const envWantsLocal =\n process.env.NODE_ENV === 'development' &&\n process.env.WORKERS_LOCAL_MODE !== 'false';\n // Check if we're in local development mode\n const isLocal = mode === 'local' || (mode === 'auto' && envWantsLocal);\n\n if (isLocal) {\n // Local mode: run handler immediately\n // Parse input to apply defaults and get the final parsed type\n const parsedInput = inputSchema.parse(input);\n const localJobId = options.jobId || `local-${Date.now()}`;\n \n // Try to get direct job store access in local mode (same process as Next.js app)\n // This allows direct DB updates without needing HTTP/webhook URLs\n let directJobStore: {\n updateJob: (jobId: string, data: any) => Promise<void>;\n setJob?: (jobId: string, data: any) => Promise<void>;\n } | null = null;\n\n // Path constants for job store imports\n const nextJsPathAlias = '@/app/api/workflows/stores/jobStore';\n const explicitPath = process.env.WORKER_JOB_STORE_MODULE_PATH;\n\n // Reliable approach: try Next.js path alias first, then explicit env var\n // The @/ alias works at runtime in Next.js context\n const resolveJobStore = async () => {\n // Option 1: Try Next.js path alias (works in Next.js runtime context)\n try {\n const module = await import(nextJsPathAlias);\n if (module?.updateJob) {\n return { updateJob: module.updateJob, setJob: module.setJob };\n }\n } catch {\n // Path alias not available (not in Next.js context or alias not configured)\n }\n\n // Option 2: Use explicit env var if provided (for custom setups)\n if (explicitPath) {\n try {\n const module = await import(explicitPath).catch(() => {\n // eslint-disable-next-line @typescript-eslint/no-require-imports\n return require(explicitPath);\n });\n if (module?.updateJob) {\n return { updateJob: module.updateJob, setJob: module.setJob };\n }\n } catch {\n // Explicit path failed\n }\n }\n\n return null;\n };\n\n directJobStore = await resolveJobStore();\n if (directJobStore) {\n console.log('[Worker] Using direct job store in local mode (no HTTP needed)');\n }\n\n // Derive job store URL from webhook URL or environment (fallback for HTTP mode)\n let jobStoreUrl: string | undefined;\n if (options.webhookUrl) {\n try {\n const webhookUrlObj = new URL(options.webhookUrl);\n jobStoreUrl = webhookUrlObj.pathname.replace(/\\/webhook$/, '');\n jobStoreUrl = `${webhookUrlObj.origin}${jobStoreUrl}`;\n } catch {\n // Invalid URL, skip job store URL\n }\n }\n jobStoreUrl = jobStoreUrl || process.env.WORKER_JOB_STORE_URL;\n\n // Create job store interface for local mode\n // Prefer direct DB access, fallback to HTTP calls if needed\n const createLocalJobStore = (\n directStore: typeof directJobStore,\n httpUrl?: string\n ): JobStore | undefined => {\n // If we have direct job store access, use it (no HTTP needed)\n if (directStore) {\n return {\n update: async (update) => {\n try {\n // Build update payload\n const updatePayload: any = {};\n \n if (update.status !== undefined) {\n updatePayload.status = update.status;\n }\n if (update.metadata !== undefined) {\n updatePayload.metadata = update.metadata;\n }\n if (update.progress !== undefined) {\n // Merge progress into metadata\n updatePayload.metadata = {\n ...updatePayload.metadata,\n progress: update.progress,\n progressMessage: update.progressMessage,\n };\n }\n if (update.output !== undefined) {\n updatePayload.output = update.output;\n }\n if (update.error !== undefined) {\n updatePayload.error = update.error;\n }\n\n await directStore.updateJob(localJobId, updatePayload);\n console.log('[Worker] Local job updated (direct DB):', {\n jobId: localJobId,\n workerId: id,\n updates: Object.keys(updatePayload),\n });\n } catch (error: any) {\n console.warn('[Worker] Failed to update local job (direct DB):', {\n jobId: localJobId,\n workerId: id,\n error: error?.message || String(error),\n });\n }\n },\n get: async () => {\n try {\n // Use the same direct store that has updateJob - it should also have getJob\n if (directStore) {\n // Try to import getJob from the same module\n const nextJsPath = '@/app/api/workflows/stores/jobStore';\n const explicitPath = process.env.WORKER_JOB_STORE_MODULE_PATH;\n \n for (const importPath of [nextJsPath, explicitPath].filter(Boolean)) {\n try {\n const module = await import(importPath!);\n if (module?.getJob) {\n return await module.getJob(localJobId);\n }\n } catch {\n // Continue\n }\n }\n }\n return null;\n } catch (error: any) {\n console.warn('[Worker] Failed to get local job (direct DB):', {\n jobId: localJobId,\n workerId: id,\n error: error?.message || String(error),\n });\n return null;\n }\n },\n };\n }\n\n // Fallback to HTTP calls if no direct access\n if (!httpUrl) {\n return undefined;\n }\n\n // Use HTTP calls to update job store\n return {\n update: async (update) => {\n try {\n // Build update payload\n const updatePayload: any = { jobId: localJobId, workerId: id };\n \n if (update.status !== undefined) {\n updatePayload.status = update.status;\n }\n if (update.metadata !== undefined) {\n updatePayload.metadata = update.metadata;\n }\n if (update.progress !== undefined) {\n // Merge progress into metadata\n updatePayload.metadata = {\n ...updatePayload.metadata,\n progress: update.progress,\n progressMessage: update.progressMessage,\n };\n }\n if (update.output !== undefined) {\n updatePayload.output = update.output;\n }\n if (update.error !== undefined) {\n updatePayload.error = update.error;\n }\n\n const response = await fetch(`${httpUrl}/update`, {\n method: 'POST',\n headers: { 'Content-Type': 'application/json' },\n body: JSON.stringify(updatePayload),\n });\n if (!response.ok) {\n throw new Error(`Job store update failed: ${response.status} ${response.statusText}`);\n }\n console.log('[Worker] Local job updated (HTTP):', {\n jobId: localJobId,\n workerId: id,\n updates: Object.keys(updatePayload),\n });\n } catch (error: any) {\n console.warn('[Worker] Failed to update local job (HTTP):', {\n jobId: localJobId,\n workerId: id,\n error: error?.message || String(error),\n });\n }\n },\n get: async () => {\n try {\n // GET /api/workflows/workers/:workerId/:jobId\n const response = await fetch(`${httpUrl}/${id}/${localJobId}`, {\n method: 'GET',\n headers: { 'Content-Type': 'application/json' },\n });\n\n if (!response.ok) {\n if (response.status === 404) {\n return null;\n }\n throw new Error(`Job store get failed: ${response.status} ${response.statusText}`);\n }\n\n return await response.json();\n } catch (error: any) {\n console.warn('[Worker] Failed to get local job (HTTP):', {\n jobId: localJobId,\n workerId: id,\n error: error?.message || String(error),\n });\n return null;\n }\n },\n };\n };\n\n const jobStore = createLocalJobStore(directJobStore, jobStoreUrl);\n\n // Create initial job record if we have job store access\n if (directJobStore?.setJob) {\n try {\n await directJobStore.setJob(localJobId, {\n jobId: localJobId,\n workerId: id,\n status: 'queued',\n input: parsedInput,\n metadata: options.metadata || {},\n });\n } catch (error: any) {\n console.warn('[Worker] Failed to create initial job record:', {\n jobId: localJobId,\n workerId: id,\n error: error?.message || String(error),\n });\n // Continue - job will still be created when status is updated\n }\n }\n\n // Create handler context with job store\n const handlerContext = {\n jobId: localJobId,\n workerId: id,\n ...(jobStore ? { jobStore } : {}),\n };\n\n try {\n // Update status to running before execution\n if (jobStore) {\n await jobStore.update({ status: 'running' });\n }\n\n const output = await dispatchLocal(handler, parsedInput, handlerContext);\n\n // Update status to completed before webhook\n if (jobStore) {\n await jobStore.update({ status: 'completed', output });\n }\n\n // Only send webhook if webhookUrl is provided\n if (options.webhookUrl) {\n try {\n await fetch(options.webhookUrl, {\n method: 'POST',\n headers: { 'Content-Type': 'application/json' },\n body: JSON.stringify({\n jobId: localJobId,\n workerId: id,\n status: 'success',\n output,\n metadata: options.metadata,\n }),\n });\n } catch (error) {\n console.warn('[Worker] Local webhook call failed:', error);\n }\n }\n\n return {\n messageId: `local-${Date.now()}`,\n status: 'queued',\n jobId: localJobId,\n };\n } catch (error: any) {\n // Update status to failed before webhook\n if (jobStore) {\n await jobStore.update({\n status: 'failed',\n error: {\n message: error.message || 'Unknown error',\n stack: error.stack,\n name: error.name || 'Error',\n },\n });\n }\n\n // Only send error webhook if webhookUrl is provided\n if (options.webhookUrl) {\n try {\n await fetch(options.webhookUrl, {\n method: 'POST',\n headers: { 'Content-Type': 'application/json' },\n body: JSON.stringify({\n jobId: localJobId,\n workerId: id,\n status: 'error',\n error: {\n message: error.message || 'Unknown error',\n stack: error.stack,\n name: error.name || 'Error',\n },\n metadata: options.metadata,\n }),\n });\n } catch (webhookError) {\n console.warn('[Worker] Local error webhook call failed:', webhookError);\n }\n }\n throw error;\n }\n }\n\n // Production mode: dispatch to SQS\n return dispatch(id, input, inputSchema, options);\n },\n };\n\n return agent;\n}\n\n/**\n * Creates a Lambda handler entrypoint for a worker agent.\n * This is used by the deployment script to generate Lambda entrypoints.\n *\n * @param agent - The worker agent\n * @returns A Lambda handler function\n */\nexport function createLambdaEntrypoint<INPUT_SCHEMA extends ZodType<any>, OUTPUT>(\n agent: WorkerAgent<INPUT_SCHEMA, OUTPUT>\n) {\n return createLambdaHandler(agent.handler, agent.outputSchema);\n}\n","/**\n * Client for dispatching background worker jobs.\n *\n * In production, dispatching happens via the workers HTTP API:\n * POST /workers/trigger -> enqueues message to SQS on the workers service side\n *\n * This avoids requiring AWS credentials in your Next.js app.\n */\n\nimport type { ZodType, z } from 'zod';\n\nexport interface DispatchOptions {\n /**\n * Optional webhook callback URL to notify when the job finishes.\n * Only called when provided. Default: no webhook (use job store / MongoDB only).\n */\n webhookUrl?: string;\n /**\n * Controls how dispatch executes.\n * - \"auto\" (default): local inline execution in development unless WORKERS_LOCAL_MODE=false.\n * - \"local\": force inline execution (no SQS).\n * - \"remote\": force SQS/Lambda dispatch even in development.\n */\n mode?: 'auto' | 'local' | 'remote';\n jobId?: string;\n metadata?: Record<string, any>;\n}\n\nexport interface DispatchResult {\n messageId: string;\n status: 'queued';\n jobId: string;\n}\n\nexport interface SerializedContext {\n requestId?: string;\n userId?: string;\n traceId?: string;\n [key: string]: any;\n}\n\n/**\n * Derives the full /workers/trigger URL from env.\n *\n * Preferred env vars:\n * - WORKER_BASE_URL: base URL of the workers service (e.g. https://.../prod)\n * - NEXT_PUBLIC_WORKER_BASE_URL: same, but exposed to the browser\n *\n * Legacy env vars (still supported for backwards compatibility):\n * - WORKERS_TRIGGER_API_URL / NEXT_PUBLIC_WORKERS_TRIGGER_API_URL\n * - WORKERS_CONFIG_API_URL / NEXT_PUBLIC_WORKERS_CONFIG_API_URL\n */\nfunction getWorkersTriggerUrl(): string {\n const raw =\n process.env.WORKER_BASE_URL ||\n process.env.NEXT_PUBLIC_WORKER_BASE_URL ||\n process.env.WORKERS_TRIGGER_API_URL ||\n process.env.NEXT_PUBLIC_WORKERS_TRIGGER_API_URL ||\n process.env.WORKERS_CONFIG_API_URL ||\n process.env.NEXT_PUBLIC_WORKERS_CONFIG_API_URL;\n\n if (!raw) {\n throw new Error(\n 'WORKER_BASE_URL (preferred) or NEXT_PUBLIC_WORKER_BASE_URL is required for background workers'\n );\n }\n\n const url = new URL(raw);\n url.search = '';\n url.hash = '';\n\n const path = url.pathname || '';\n\n // If the user pointed at a specific endpoint, normalize back to the service root.\n url.pathname = path.replace(/\\/?workers\\/(trigger|config)\\/?$/, '');\n\n const basePath = url.pathname.replace(/\\/+$/, '');\n url.pathname = `${basePath}/workers/trigger`.replace(/\\/+$/, '');\n\n return url.toString();\n}\n\n/**\n * Serializes context data for transmission to Lambda.\n * Only serializes safe, JSON-compatible properties.\n */\nfunction serializeContext(ctx: any): SerializedContext {\n const serialized: SerializedContext = {};\n\n if (ctx.requestId) {\n serialized.requestId = ctx.requestId;\n }\n\n // Extract any additional serializable metadata\n if (ctx.metadata && typeof ctx.metadata === 'object') {\n Object.assign(serialized, ctx.metadata);\n }\n\n // Allow custom context serialization via a helper property\n if (ctx._serializeContext && typeof ctx._serializeContext === 'function') {\n const custom = ctx._serializeContext();\n Object.assign(serialized, custom);\n }\n\n return serialized;\n}\n\n/**\n * Dispatches a background worker job to SQS.\n *\n * @param workerId - The ID of the worker to dispatch\n * @param input - The input data for the worker (will be validated against inputSchema)\n * @param inputSchema - Zod schema for input validation\n * @param options - Dispatch options including webhook URL\n * @param ctx - Optional context object (only serializable parts will be sent)\n * @returns Promise resolving to dispatch result with messageId and jobId\n */\nexport async function dispatch<INPUT_SCHEMA extends ZodType<any>>(\n workerId: string,\n input: z.input<INPUT_SCHEMA>,\n inputSchema: INPUT_SCHEMA,\n options: DispatchOptions,\n ctx?: any\n): Promise<DispatchResult> {\n // Validate input against schema\n const validatedInput = inputSchema.parse(input);\n\n // Generate job ID if not provided\n const jobId =\n options.jobId || `job-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;\n\n // Resolve /workers/trigger endpoint URL\n const triggerUrl = getWorkersTriggerUrl();\n\n // Serialize context (only safe, JSON-compatible parts)\n const serializedContext = ctx ? serializeContext(ctx) : {};\n\n // Job updates use MongoDB only; never pass jobStoreUrl/origin URL.\n const messageBody = {\n workerId,\n jobId,\n input: validatedInput,\n context: serializedContext,\n webhookUrl: options.webhookUrl,\n metadata: options.metadata || {},\n timestamp: new Date().toISOString(),\n };\n\n const headers: Record<string, string> = {\n 'Content-Type': 'application/json',\n };\n const triggerKey = process.env.WORKERS_TRIGGER_API_KEY;\n if (triggerKey) {\n headers['x-workers-trigger-key'] = triggerKey;\n }\n\n const response = await fetch(triggerUrl, {\n method: 'POST',\n headers,\n body: JSON.stringify({\n workerId,\n body: messageBody,\n }),\n });\n\n if (!response.ok) {\n const text = await response.text().catch(() => '');\n throw new Error(\n `Failed to trigger worker \"${workerId}\": ${response.status} ${response.statusText}${text ? ` - ${text}` : ''}`\n );\n }\n\n const data = (await response.json().catch(() => ({}))) as any;\n const messageId = data?.messageId ? String(data.messageId) : `trigger-${jobId}`;\n\n return {\n messageId,\n status: 'queued',\n jobId,\n };\n}\n\n/**\n * Local development mode: runs the handler immediately in the same process.\n * This bypasses SQS and Lambda for faster iteration during development.\n *\n * @param handler - The worker handler function\n * @param input - The input data\n * @param ctx - The context object\n * @returns The handler result\n */\nexport async function dispatchLocal<INPUT, OUTPUT>(\n handler: (params: { input: INPUT; ctx: any }) => Promise<OUTPUT>,\n input: INPUT,\n ctx?: any\n): Promise<OUTPUT> {\n return handler({ input, ctx: ctx || {} });\n}\n","/**\n * MongoDB-backed job store for Lambda workers.\n * Updates jobs directly in MongoDB; never uses HTTP/origin URL.\n *\n * Env: MONGODB_WORKER_URI (or MONGODB_URI), MONGODB_WORKER_DB (or MONGODB_DB),\n * MONGODB_WORKER_JOBS_COLLECTION (default: worker_jobs).\n */\n\nimport { MongoClient, type Collection } from 'mongodb';\nimport type { JobStore, JobStoreUpdate } from './handler';\n\nconst uri = process.env.MONGODB_WORKER_URI || process.env.DATABASE_MONGODB_URI || process.env.MONGODB_URI;\nconst dbName =\n process.env.MONGODB_WORKER_DB ||\n process.env.MONGODB_DB ||\n 'worker';\nconst collectionName =\n process.env.MONGODB_WORKER_JOBS_COLLECTION || 'worker_jobs';\n\ntype Doc = {\n _id: string;\n jobId: string;\n workerId: string;\n status: 'queued' | 'running' | 'completed' | 'failed';\n input: any;\n output?: any;\n error?: { message: string; stack?: string; name?: string };\n metadata?: Record<string, any>;\n createdAt: string;\n updatedAt: string;\n completedAt?: string;\n};\n\nlet clientPromise: Promise<MongoClient> | null = null;\n\nfunction getClient(): Promise<MongoClient> {\n if (!uri) {\n throw new Error(\n 'MongoDB URI required for job store. Set DATABASE_MONGODB_URI or MONGODB_URI.'\n );\n }\n if (!clientPromise) {\n clientPromise = new MongoClient(uri, {\n maxPoolSize: 10,\n minPoolSize: 0,\n serverSelectionTimeoutMS: 10_000,\n }).connect();\n }\n return clientPromise;\n}\n\nasync function getCollection(): Promise<Collection<Doc>> {\n const client = await getClient();\n return client.db(dbName).collection<Doc>(collectionName);\n}\n\n/**\n * Create a JobStore that reads/writes directly to MongoDB.\n * Caller must ensure the job exists (upsert on first use).\n */\nexport function createMongoJobStore(\n workerId: string,\n jobId: string,\n input: any,\n metadata: Record<string, any>\n): JobStore {\n return {\n update: async (update: JobStoreUpdate): Promise<void> => {\n try {\n const coll = await getCollection();\n const now = new Date().toISOString();\n const existing = await coll.findOne({ _id: jobId });\n\n let metadataUpdate: Record<string, any> = { ...(existing?.metadata ?? {}) };\n if (update.metadata) {\n Object.assign(metadataUpdate, update.metadata);\n }\n if (update.progress !== undefined || update.progressMessage !== undefined) {\n metadataUpdate.progress = update.progress;\n metadataUpdate.progressMessage = update.progressMessage;\n }\n\n const set: Partial<Doc> = {\n updatedAt: now,\n metadata: metadataUpdate,\n };\n if (update.status !== undefined) {\n set.status = update.status;\n if (['completed', 'failed'].includes(update.status) && !existing?.completedAt) {\n set.completedAt = now;\n }\n }\n if (update.output !== undefined) set.output = update.output;\n if (update.error !== undefined) set.error = update.error;\n\n if (existing) {\n await coll.updateOne({ _id: jobId }, { $set: set });\n } else {\n const doc: Doc = {\n _id: jobId,\n jobId,\n workerId,\n status: (update.status as Doc['status']) ?? 'queued',\n input: input ?? {},\n output: update.output,\n error: update.error,\n metadata: metadataUpdate,\n createdAt: now,\n updatedAt: now,\n completedAt: set.completedAt,\n };\n if (doc.status === 'completed' || doc.status === 'failed') {\n doc.completedAt = doc.completedAt ?? now;\n }\n await coll.updateOne({ _id: jobId }, { $set: doc }, { upsert: true });\n }\n } catch (e: any) {\n console.error('[Worker] MongoDB job store update failed:', {\n jobId,\n workerId,\n error: e?.message ?? String(e),\n });\n }\n },\n get: async () => {\n try {\n const coll = await getCollection();\n const doc = await coll.findOne({ _id: jobId });\n if (!doc) return null;\n const { _id, ...r } = doc;\n return r as any;\n } catch (e: any) {\n console.error('[Worker] MongoDB job store get failed:', {\n jobId,\n workerId,\n error: e?.message ?? String(e),\n });\n return null;\n }\n },\n };\n}\n\n/**\n * Upsert initial job record in MongoDB (queued).\n * Call this when the Lambda starts processing a message.\n */\nexport async function upsertJob(\n jobId: string,\n workerId: string,\n input: any,\n metadata: Record<string, any>\n): Promise<void> {\n const coll = await getCollection();\n const now = new Date().toISOString();\n await coll.updateOne(\n { _id: jobId },\n {\n $set: {\n _id: jobId,\n jobId,\n workerId,\n status: 'queued',\n input: input ?? {},\n metadata: metadata ?? {},\n createdAt: now,\n updatedAt: now,\n },\n },\n { upsert: true }\n );\n}\n\nexport function isMongoJobStoreConfigured(): boolean {\n return Boolean(uri?.trim());\n}\n","/**\n * Generic Lambda handler wrapper for worker agents.\n * Handles SQS events, executes user handlers, and sends webhook callbacks.\n * Job store: MongoDB only. Never uses HTTP/origin URL for job updates.\n */\n\nimport type { SQSEvent, SQSRecord, Context as LambdaContext } from 'aws-lambda';\nimport type { ZodType } from 'zod';\nimport {\n createMongoJobStore,\n upsertJob,\n isMongoJobStoreConfigured,\n} from './mongoJobStore';\n\nexport interface JobStoreUpdate {\n status?: 'queued' | 'running' | 'completed' | 'failed';\n metadata?: Record<string, any>;\n progress?: number;\n progressMessage?: string;\n output?: any;\n error?: {\n message: string;\n stack?: string;\n name?: string;\n };\n}\n\nexport interface JobStore {\n /**\n * Update job in job store.\n * @param update - Update object with status, metadata, progress, output, or error\n */\n update(update: JobStoreUpdate): Promise<void>;\n /**\n * Get current job record from job store.\n * @returns Job record or null if not found\n */\n get(): Promise<{\n jobId: string;\n workerId: string;\n status: 'queued' | 'running' | 'completed' | 'failed';\n input: any;\n output?: any;\n error?: { message: string; stack?: string };\n metadata?: Record<string, any>;\n createdAt: string;\n updatedAt: string;\n completedAt?: string;\n } | null>;\n}\n\nexport interface WorkerHandlerParams<INPUT, OUTPUT> {\n input: INPUT;\n ctx: {\n jobId: string;\n workerId: string;\n requestId?: string;\n /**\n * Job store interface for updating and retrieving job state.\n * Uses MongoDB directly when configured; never HTTP/origin URL.\n */\n jobStore?: JobStore;\n [key: string]: any;\n };\n}\n\nexport type WorkerHandler<INPUT, OUTPUT> = (\n params: WorkerHandlerParams<INPUT, OUTPUT>\n) => Promise<OUTPUT>;\n\nexport interface SQSMessageBody {\n workerId: string;\n jobId: string;\n input: any;\n context: Record<string, any>;\n webhookUrl?: string;\n /** @deprecated Never use. Job updates use MongoDB only. */\n jobStoreUrl?: string;\n metadata?: Record<string, any>;\n timestamp: string;\n}\n\nexport interface WebhookPayload {\n jobId: string;\n workerId: string;\n status: 'success' | 'error';\n output?: any;\n error?: {\n message: string;\n stack?: string;\n name?: string;\n };\n metadata?: Record<string, any>;\n}\n\n/**\n * Sends a webhook callback to the specified URL.\n */\nasync function sendWebhook(\n webhookUrl: string,\n payload: WebhookPayload\n): Promise<void> {\n try {\n const response = await fetch(webhookUrl, {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n 'User-Agent': 'ai-router-worker/1.0',\n },\n body: JSON.stringify(payload),\n });\n\n if (!response.ok) {\n const errorText = await response.text().catch(() => '');\n console.error('[Worker] Webhook callback failed:', {\n url: webhookUrl,\n status: response.status,\n statusText: response.statusText,\n errorText,\n });\n // Don't throw - webhook failures shouldn't fail the Lambda\n } else {\n console.log('[Worker] Webhook callback successful:', {\n url: webhookUrl,\n status: response.status,\n });\n }\n } catch (error: any) {\n console.error('[Worker] Webhook callback error:', {\n url: webhookUrl,\n error: error?.message || String(error),\n stack: error?.stack,\n });\n // Don't throw - webhook failures shouldn't fail the Lambda\n }\n}\n\n/**\n * Creates a Lambda handler function that processes SQS events for workers.\n * Job store: MongoDB only. Never uses HTTP/origin URL for job updates.\n *\n * @param handler - The user's worker handler function\n * @param outputSchema - Optional Zod schema for output validation\n * @returns A Lambda handler function\n */\nexport function createLambdaHandler<INPUT, OUTPUT>(\n handler: WorkerHandler<INPUT, OUTPUT>,\n outputSchema?: ZodType<OUTPUT>\n): (event: SQSEvent, context: LambdaContext) => Promise<void> {\n return async (event: SQSEvent, lambdaContext: LambdaContext) => {\n const promises = event.Records.map(async (record: SQSRecord) => {\n let messageBody: SQSMessageBody | null = null;\n try {\n messageBody = JSON.parse(record.body) as SQSMessageBody;\n\n const { workerId, jobId, input, context, webhookUrl, metadata = {} } =\n messageBody;\n\n let jobStore: JobStore | undefined;\n if (isMongoJobStoreConfigured()) {\n await upsertJob(jobId, workerId, input, metadata);\n jobStore = createMongoJobStore(workerId, jobId, input, metadata);\n }\n\n const handlerContext = {\n jobId,\n workerId,\n requestId: context.requestId || lambdaContext.awsRequestId,\n ...(jobStore ? { jobStore } : {}),\n ...context,\n };\n\n if (jobStore) {\n try {\n await jobStore.update({ status: 'running' });\n console.log('[Worker] Job status updated to running:', {\n jobId,\n workerId,\n });\n } catch (error: any) {\n console.warn('[Worker] Failed to update status to running:', {\n jobId,\n workerId,\n error: error?.message || String(error),\n });\n }\n }\n\n let output: OUTPUT;\n try {\n output = await handler({\n input: input as INPUT,\n ctx: handlerContext,\n });\n\n if (outputSchema) {\n output = outputSchema.parse(output);\n }\n } catch (error: any) {\n const errorPayload: WebhookPayload = {\n jobId,\n workerId,\n status: 'error',\n error: {\n message: error.message || 'Unknown error',\n stack: error.stack,\n name: error.name || 'Error',\n },\n metadata,\n };\n\n if (jobStore) {\n try {\n await jobStore.update({\n status: 'failed',\n error: errorPayload.error,\n });\n console.log('[Worker] Job status updated to failed:', {\n jobId,\n workerId,\n });\n } catch (updateError: any) {\n console.warn('[Worker] Failed to update job store on error:', {\n jobId,\n workerId,\n error: updateError?.message || String(updateError),\n });\n }\n }\n\n if (webhookUrl) {\n await sendWebhook(webhookUrl, errorPayload);\n }\n throw error;\n }\n\n if (jobStore) {\n try {\n await jobStore.update({\n status: 'completed',\n output,\n });\n console.log('[Worker] Job status updated to completed:', {\n jobId,\n workerId,\n });\n } catch (updateError: any) {\n console.warn('[Worker] Failed to update job store on success:', {\n jobId,\n workerId,\n error: updateError?.message || String(updateError),\n });\n }\n }\n\n console.log('[Worker] Job completed:', {\n jobId,\n workerId,\n output,\n });\n\n const successPayload: WebhookPayload = {\n jobId,\n workerId,\n status: 'success',\n output,\n metadata,\n };\n\n if (webhookUrl) {\n await sendWebhook(webhookUrl, successPayload);\n }\n } catch (error: any) {\n console.error('[Worker] Error processing SQS record:', {\n jobId: messageBody?.jobId ?? '(parse failed)',\n workerId: messageBody?.workerId ?? '(parse failed)',\n error: error?.message || String(error),\n stack: error?.stack,\n });\n throw error;\n }\n });\n\n await Promise.all(promises);\n };\n}\n","/**\n * Workers-config client for resolving queue URLs from the workers-config API Lambda.\n */\n\nexport interface WorkersConfig {\n version: string;\n stage: string;\n region: string;\n workers: Record<\n string,\n {\n queueUrl: string;\n region: string;\n }\n >;\n}\n\nlet cachedConfig: WorkersConfig | null = null;\nlet cacheExpiry: number = 0;\nconst CACHE_TTL_MS = 5 * 60 * 1000; // 5 minutes\n\n/**\n * Fetches the workers configuration from the workers-config API.\n * Results are cached for 5 minutes to reduce API calls.\n *\n * @param apiUrl - The URL of the workers-config API endpoint\n * @param apiKey - Optional API key for authentication (sent as x-workers-config-key header)\n * @returns The workers configuration mapping worker IDs to queue URLs\n */\nexport async function getWorkersConfig(\n apiUrl: string,\n apiKey?: string\n): Promise<WorkersConfig> {\n const now = Date.now();\n\n // Return cached config if still valid\n if (cachedConfig && now < cacheExpiry) {\n return cachedConfig;\n }\n\n const headers: Record<string, string> = {\n 'Content-Type': 'application/json',\n };\n\n if (apiKey) {\n headers['x-workers-config-key'] = apiKey;\n }\n\n const response = await fetch(apiUrl, {\n method: 'GET',\n headers,\n });\n\n if (!response.ok) {\n throw new Error(\n `Failed to fetch workers config: ${response.status} ${response.statusText}`\n );\n }\n\n const config = (await response.json()) as WorkersConfig;\n cachedConfig = config;\n cacheExpiry = now + CACHE_TTL_MS;\n\n return config;\n}\n\n/**\n * Resolves the queue URL for a specific worker ID.\n * Throws an error if the worker ID is not found in the configuration.\n *\n * @param workerId - The ID of the worker\n * @param apiUrl - The URL of the workers-config API endpoint\n * @param apiKey - Optional API key for authentication\n * @returns The queue URL for the worker\n */\nexport async function resolveQueueUrl(\n workerId: string,\n apiUrl: string,\n apiKey?: string\n): Promise<string> {\n const config = await getWorkersConfig(apiUrl, apiKey);\n const worker = config.workers[workerId];\n\n if (!worker) {\n throw new Error(\n `Worker \"${workerId}\" not found in workers config. Available workers: ${Object.keys(config.workers).join(', ')}`\n );\n }\n\n return worker.queueUrl;\n}\n\n/**\n * Clears the cached workers configuration.\n * Useful for testing or when you need to force a refresh.\n */\nexport function clearWorkersConfigCache(): void {\n cachedConfig = null;\n cacheExpiry = 0;\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACoDA,SAAS,uBAA+B;AACtC,QAAM,MACJ,QAAQ,IAAI,mBACZ,QAAQ,IAAI,+BACZ,QAAQ,IAAI,2BACZ,QAAQ,IAAI,uCACZ,QAAQ,IAAI,0BACZ,QAAQ,IAAI;AAEd,MAAI,CAAC,KAAK;AACR,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAEA,QAAM,MAAM,IAAI,IAAI,GAAG;AACvB,MAAI,SAAS;AACb,MAAI,OAAO;AAEX,QAAM,OAAO,IAAI,YAAY;AAG7B,MAAI,WAAW,KAAK,QAAQ,oCAAoC,EAAE;AAElE,QAAM,WAAW,IAAI,SAAS,QAAQ,QAAQ,EAAE;AAChD,MAAI,WAAW,GAAG,QAAQ,mBAAmB,QAAQ,QAAQ,EAAE;AAE/D,SAAO,IAAI,SAAS;AACtB;AAMA,SAAS,iBAAiB,KAA6B;AACrD,QAAM,aAAgC,CAAC;AAEvC,MAAI,IAAI,WAAW;AACjB,eAAW,YAAY,IAAI;AAAA,EAC7B;AAGA,MAAI,IAAI,YAAY,OAAO,IAAI,aAAa,UAAU;AACpD,WAAO,OAAO,YAAY,IAAI,QAAQ;AAAA,EACxC;AAGA,MAAI,IAAI,qBAAqB,OAAO,IAAI,sBAAsB,YAAY;AACxE,UAAM,SAAS,IAAI,kBAAkB;AACrC,WAAO,OAAO,YAAY,MAAM;AAAA,EAClC;AAEA,SAAO;AACT;AAYA,eAAsB,SACpB,UACA,OACA,aACA,SACA,KACyB;AAEzB,QAAM,iBAAiB,YAAY,MAAM,KAAK;AAG9C,QAAM,QACJ,QAAQ,SAAS,OAAO,KAAK,IAAI,CAAC,IAAI,KAAK,OAAO,EAAE,SAAS,EAAE,EAAE,OAAO,GAAG,CAAC,CAAC;AAG/E,QAAM,aAAa,qBAAqB;AAGxC,QAAM,oBAAoB,MAAM,iBAAiB,GAAG,IAAI,CAAC;AAGzD,QAAM,cAAc;AAAA,IAClB;AAAA,IACA;AAAA,IACA,OAAO;AAAA,IACP,SAAS;AAAA,IACT,YAAY,QAAQ;AAAA,IACpB,UAAU,QAAQ,YAAY,CAAC;AAAA,IAC/B,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,EACpC;AAEA,QAAM,UAAkC;AAAA,IACtC,gBAAgB;AAAA,EAClB;AACA,QAAM,aAAa,QAAQ,IAAI;AAC/B,MAAI,YAAY;AACd,YAAQ,uBAAuB,IAAI;AAAA,EACrC;AAEA,QAAM,WAAW,MAAM,MAAM,YAAY;AAAA,IACvC,QAAQ;AAAA,IACR;AAAA,IACA,MAAM,KAAK,UAAU;AAAA,MACnB;AAAA,MACA,MAAM;AAAA,IACR,CAAC;AAAA,EACH,CAAC;AAED,MAAI,CAAC,SAAS,IAAI;AAChB,UAAM,OAAO,MAAM,SAAS,KAAK,EAAE,MAAM,MAAM,EAAE;AACjD,UAAM,IAAI;AAAA,MACR,6BAA6B,QAAQ,MAAM,SAAS,MAAM,IAAI,SAAS,UAAU,GAAG,OAAO,MAAM,IAAI,KAAK,EAAE;AAAA,IAC9G;AAAA,EACF;AAEA,QAAM,OAAQ,MAAM,SAAS,KAAK,EAAE,MAAM,OAAO,CAAC,EAAE;AACpD,QAAM,YAAY,MAAM,YAAY,OAAO,KAAK,SAAS,IAAI,WAAW,KAAK;AAE7E,SAAO;AAAA,IACL;AAAA,IACA,QAAQ;AAAA,IACR;AAAA,EACF;AACF;AAWA,eAAsB,cACpB,SACA,OACA,KACiB;AACjB,SAAO,QAAQ,EAAE,OAAO,KAAK,OAAO,CAAC,EAAE,CAAC;AAC1C;;;AC7LA,qBAA6C;AAG7C,IAAM,MAAM,QAAQ,IAAI,sBAAsB,QAAQ,IAAI,wBAAwB,QAAQ,IAAI;AAC9F,IAAM,SACJ,QAAQ,IAAI,qBACZ,QAAQ,IAAI,cACZ;AACF,IAAM,iBACJ,QAAQ,IAAI,kCAAkC;AAgBhD,IAAI,gBAA6C;AAEjD,SAAS,YAAkC;AACzC,MAAI,CAAC,KAAK;AACR,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AACA,MAAI,CAAC,eAAe;AAClB,oBAAgB,IAAI,2BAAY,KAAK;AAAA,MACnC,aAAa;AAAA,MACb,aAAa;AAAA,MACb,0BAA0B;AAAA,IAC5B,CAAC,EAAE,QAAQ;AAAA,EACb;AACA,SAAO;AACT;AAEA,eAAe,gBAA0C;AACvD,QAAM,SAAS,MAAM,UAAU;AAC/B,SAAO,OAAO,GAAG,MAAM,EAAE,WAAgB,cAAc;AACzD;AAMO,SAAS,oBACd,UACA,OACA,OACA,UACU;AACV,SAAO;AAAA,IACL,QAAQ,OAAO,WAA0C;AACvD,UAAI;AACF,cAAM,OAAO,MAAM,cAAc;AACjC,cAAM,OAAM,oBAAI,KAAK,GAAE,YAAY;AACnC,cAAM,WAAW,MAAM,KAAK,QAAQ,EAAE,KAAK,MAAM,CAAC;AAElD,YAAI,iBAAsC,EAAE,GAAI,UAAU,YAAY,CAAC,EAAG;AAC1E,YAAI,OAAO,UAAU;AACnB,iBAAO,OAAO,gBAAgB,OAAO,QAAQ;AAAA,QAC/C;AACA,YAAI,OAAO,aAAa,UAAa,OAAO,oBAAoB,QAAW;AACzE,yBAAe,WAAW,OAAO;AACjC,yBAAe,kBAAkB,OAAO;AAAA,QAC1C;AAEA,cAAM,MAAoB;AAAA,UACxB,WAAW;AAAA,UACX,UAAU;AAAA,QACZ;AACA,YAAI,OAAO,WAAW,QAAW;AAC/B,cAAI,SAAS,OAAO;AACpB,cAAI,CAAC,aAAa,QAAQ,EAAE,SAAS,OAAO,MAAM,KAAK,CAAC,UAAU,aAAa;AAC7E,gBAAI,cAAc;AAAA,UACpB;AAAA,QACF;AACA,YAAI,OAAO,WAAW,OAAW,KAAI,SAAS,OAAO;AACrD,YAAI,OAAO,UAAU,OAAW,KAAI,QAAQ,OAAO;AAEnD,YAAI,UAAU;AACZ,gBAAM,KAAK,UAAU,EAAE,KAAK,MAAM,GAAG,EAAE,MAAM,IAAI,CAAC;AAAA,QACpD,OAAO;AACL,gBAAM,MAAW;AAAA,YACf,KAAK;AAAA,YACL;AAAA,YACA;AAAA,YACA,QAAS,OAAO,UAA4B;AAAA,YAC5C,OAAO,SAAS,CAAC;AAAA,YACjB,QAAQ,OAAO;AAAA,YACf,OAAO,OAAO;AAAA,YACd,UAAU;AAAA,YACV,WAAW;AAAA,YACX,WAAW;AAAA,YACX,aAAa,IAAI;AAAA,UACnB;AACA,cAAI,IAAI,WAAW,eAAe,IAAI,WAAW,UAAU;AACzD,gBAAI,cAAc,IAAI,eAAe;AAAA,UACvC;AACA,gBAAM,KAAK,UAAU,EAAE,KAAK,MAAM,GAAG,EAAE,MAAM,IAAI,GAAG,EAAE,QAAQ,KAAK,CAAC;AAAA,QACtE;AAAA,MACF,SAAS,GAAQ;AACf,gBAAQ,MAAM,6CAA6C;AAAA,UACzD;AAAA,UACA;AAAA,UACA,OAAO,GAAG,WAAW,OAAO,CAAC;AAAA,QAC/B,CAAC;AAAA,MACH;AAAA,IACF;AAAA,IACA,KAAK,YAAY;AACf,UAAI;AACF,cAAM,OAAO,MAAM,cAAc;AACjC,cAAM,MAAM,MAAM,KAAK,QAAQ,EAAE,KAAK,MAAM,CAAC;AAC7C,YAAI,CAAC,IAAK,QAAO;AACjB,cAAM,EAAE,KAAK,GAAG,EAAE,IAAI;AACtB,eAAO;AAAA,MACT,SAAS,GAAQ;AACf,gBAAQ,MAAM,0CAA0C;AAAA,UACtD;AAAA,UACA;AAAA,UACA,OAAO,GAAG,WAAW,OAAO,CAAC;AAAA,QAC/B,CAAC;AACD,eAAO;AAAA,MACT;AAAA,IACF;AAAA,EACF;AACF;AAMA,eAAsB,UACpB,OACA,UACA,OACA,UACe;AACf,QAAM,OAAO,MAAM,cAAc;AACjC,QAAM,OAAM,oBAAI,KAAK,GAAE,YAAY;AACnC,QAAM,KAAK;AAAA,IACT,EAAE,KAAK,MAAM;AAAA,IACb;AAAA,MACE,MAAM;AAAA,QACJ,KAAK;AAAA,QACL;AAAA,QACA;AAAA,QACA,QAAQ;AAAA,QACR,OAAO,SAAS,CAAC;AAAA,QACjB,UAAU,YAAY,CAAC;AAAA,QACvB,WAAW;AAAA,QACX,WAAW;AAAA,MACb;AAAA,IACF;AAAA,IACA,EAAE,QAAQ,KAAK;AAAA,EACjB;AACF;AAEO,SAAS,4BAAqC;AACnD,SAAO,QAAQ,KAAK,KAAK,CAAC;AAC5B;;;AC7EA,eAAe,YACb,YACA,SACe;AACf,MAAI;AACF,UAAM,WAAW,MAAM,MAAM,YAAY;AAAA,MACvC,QAAQ;AAAA,MACR,SAAS;AAAA,QACP,gBAAgB;AAAA,QAChB,cAAc;AAAA,MAChB;AAAA,MACA,MAAM,KAAK,UAAU,OAAO;AAAA,IAC9B,CAAC;AAED,QAAI,CAAC,SAAS,IAAI;AAChB,YAAM,YAAY,MAAM,SAAS,KAAK,EAAE,MAAM,MAAM,EAAE;AACtD,cAAQ,MAAM,qCAAqC;AAAA,QACjD,KAAK;AAAA,QACL,QAAQ,SAAS;AAAA,QACjB,YAAY,SAAS;AAAA,QACrB;AAAA,MACF,CAAC;AAAA,IAEH,OAAO;AACL,cAAQ,IAAI,yCAAyC;AAAA,QACnD,KAAK;AAAA,QACL,QAAQ,SAAS;AAAA,MACnB,CAAC;AAAA,IACH;AAAA,EACF,SAAS,OAAY;AACnB,YAAQ,MAAM,oCAAoC;AAAA,MAChD,KAAK;AAAA,MACL,OAAO,OAAO,WAAW,OAAO,KAAK;AAAA,MACrC,OAAO,OAAO;AAAA,IAChB,CAAC;AAAA,EAEH;AACF;AAUO,SAAS,oBACd,SACA,cAC4D;AAC5D,SAAO,OAAO,OAAiB,kBAAiC;AAC9D,UAAM,WAAW,MAAM,QAAQ,IAAI,OAAO,WAAsB;AAC9D,UAAI,cAAqC;AACzC,UAAI;AACF,sBAAc,KAAK,MAAM,OAAO,IAAI;AAEpC,cAAM,EAAE,UAAU,OAAO,OAAO,SAAS,YAAY,WAAW,CAAC,EAAE,IACjE;AAEF,YAAI;AACJ,YAAI,0BAA0B,GAAG;AAC/B,gBAAM,UAAU,OAAO,UAAU,OAAO,QAAQ;AAChD,qBAAW,oBAAoB,UAAU,OAAO,OAAO,QAAQ;AAAA,QACjE;AAEA,cAAM,iBAAiB;AAAA,UACrB;AAAA,UACA;AAAA,UACA,WAAW,QAAQ,aAAa,cAAc;AAAA,UAC9C,GAAI,WAAW,EAAE,SAAS,IAAI,CAAC;AAAA,UAC/B,GAAG;AAAA,QACL;AAEA,YAAI,UAAU;AACZ,cAAI;AACF,kBAAM,SAAS,OAAO,EAAE,QAAQ,UAAU,CAAC;AAC3C,oBAAQ,IAAI,2CAA2C;AAAA,cACrD;AAAA,cACA;AAAA,YACF,CAAC;AAAA,UACH,SAAS,OAAY;AACnB,oBAAQ,KAAK,gDAAgD;AAAA,cAC3D;AAAA,cACA;AAAA,cACA,OAAO,OAAO,WAAW,OAAO,KAAK;AAAA,YACvC,CAAC;AAAA,UACH;AAAA,QACF;AAEA,YAAI;AACJ,YAAI;AACF,mBAAS,MAAM,QAAQ;AAAA,YACrB;AAAA,YACA,KAAK;AAAA,UACP,CAAC;AAED,cAAI,cAAc;AAChB,qBAAS,aAAa,MAAM,MAAM;AAAA,UACpC;AAAA,QACF,SAAS,OAAY;AACnB,gBAAM,eAA+B;AAAA,YACnC;AAAA,YACA;AAAA,YACA,QAAQ;AAAA,YACR,OAAO;AAAA,cACL,SAAS,MAAM,WAAW;AAAA,cAC1B,OAAO,MAAM;AAAA,cACb,MAAM,MAAM,QAAQ;AAAA,YACtB;AAAA,YACA;AAAA,UACF;AAEA,cAAI,UAAU;AACZ,gBAAI;AACF,oBAAM,SAAS,OAAO;AAAA,gBACpB,QAAQ;AAAA,gBACR,OAAO,aAAa;AAAA,cACtB,CAAC;AACD,sBAAQ,IAAI,0CAA0C;AAAA,gBACpD;AAAA,gBACA;AAAA,cACF,CAAC;AAAA,YACH,SAAS,aAAkB;AACzB,sBAAQ,KAAK,iDAAiD;AAAA,gBAC5D;AAAA,gBACA;AAAA,gBACA,OAAO,aAAa,WAAW,OAAO,WAAW;AAAA,cACnD,CAAC;AAAA,YACH;AAAA,UACF;AAEA,cAAI,YAAY;AACd,kBAAM,YAAY,YAAY,YAAY;AAAA,UAC5C;AACA,gBAAM;AAAA,QACR;AAEA,YAAI,UAAU;AACZ,cAAI;AACF,kBAAM,SAAS,OAAO;AAAA,cACpB,QAAQ;AAAA,cACR;AAAA,YACF,CAAC;AACD,oBAAQ,IAAI,6CAA6C;AAAA,cACvD;AAAA,cACA;AAAA,YACF,CAAC;AAAA,UACH,SAAS,aAAkB;AACzB,oBAAQ,KAAK,mDAAmD;AAAA,cAC9D;AAAA,cACA;AAAA,cACA,OAAO,aAAa,WAAW,OAAO,WAAW;AAAA,YACnD,CAAC;AAAA,UACH;AAAA,QACF;AAEA,gBAAQ,IAAI,2BAA2B;AAAA,UACrC;AAAA,UACA;AAAA,UACA;AAAA,QACF,CAAC;AAED,cAAM,iBAAiC;AAAA,UACrC;AAAA,UACA;AAAA,UACA,QAAQ;AAAA,UACR;AAAA,UACA;AAAA,QACF;AAEA,YAAI,YAAY;AACd,gBAAM,YAAY,YAAY,cAAc;AAAA,QAC9C;AAAA,MACF,SAAS,OAAY;AACnB,gBAAQ,MAAM,yCAAyC;AAAA,UACrD,OAAO,aAAa,SAAS;AAAA,UAC7B,UAAU,aAAa,YAAY;AAAA,UACnC,OAAO,OAAO,WAAW,OAAO,KAAK;AAAA,UACrC,OAAO,OAAO;AAAA,QAChB,CAAC;AACD,cAAM;AAAA,MACR;AAAA,IACF,CAAC;AAED,UAAM,QAAQ,IAAI,QAAQ;AAAA,EAC5B;AACF;;;AC5QA,IAAI,eAAqC;AACzC,IAAI,cAAsB;AAC1B,IAAM,eAAe,IAAI,KAAK;AAU9B,eAAsB,iBACpB,QACA,QACwB;AACxB,QAAM,MAAM,KAAK,IAAI;AAGrB,MAAI,gBAAgB,MAAM,aAAa;AACrC,WAAO;AAAA,EACT;AAEA,QAAM,UAAkC;AAAA,IACtC,gBAAgB;AAAA,EAClB;AAEA,MAAI,QAAQ;AACV,YAAQ,sBAAsB,IAAI;AAAA,EACpC;AAEA,QAAM,WAAW,MAAM,MAAM,QAAQ;AAAA,IACnC,QAAQ;AAAA,IACR;AAAA,EACF,CAAC;AAED,MAAI,CAAC,SAAS,IAAI;AAChB,UAAM,IAAI;AAAA,MACR,mCAAmC,SAAS,MAAM,IAAI,SAAS,UAAU;AAAA,IAC3E;AAAA,EACF;AAEA,QAAM,SAAU,MAAM,SAAS,KAAK;AACpC,iBAAe;AACf,gBAAc,MAAM;AAEpB,SAAO;AACT;AAWA,eAAsB,gBACpB,UACA,QACA,QACiB;AACjB,QAAM,SAAS,MAAM,iBAAiB,QAAQ,MAAM;AACpD,QAAM,SAAS,OAAO,QAAQ,QAAQ;AAEtC,MAAI,CAAC,QAAQ;AACX,UAAM,IAAI;AAAA,MACR,WAAW,QAAQ,qDAAqD,OAAO,KAAK,OAAO,OAAO,EAAE,KAAK,IAAI,CAAC;AAAA,IAChH;AAAA,EACF;AAEA,SAAO,OAAO;AAChB;AAMO,SAAS,0BAAgC;AAC9C,iBAAe;AACf,gBAAc;AAChB;;;AJ+IO,SAAS,aACd,QACmC;AACnC,QAAM,EAAE,IAAI,aAAa,cAAc,QAAQ,IAAI;AAEnD,QAAM,QAA2C;AAAA,IAC/C;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IAEA,MAAM,SAAS,OAA8B,SAAmD;AAC9F,YAAM,OAAO,QAAQ,QAAQ;AAC7B,YAAM,gBACJ,QAAQ,IAAI,aAAa,iBACzB,QAAQ,IAAI,uBAAuB;AAErC,YAAM,UAAU,SAAS,WAAY,SAAS,UAAU;AAExD,UAAI,SAAS;AAGX,cAAM,cAAc,YAAY,MAAM,KAAK;AAC3C,cAAM,aAAa,QAAQ,SAAS,SAAS,KAAK,IAAI,CAAC;AAIvD,YAAI,iBAGO;AAGX,cAAM,kBAAkB;AACxB,cAAM,eAAe,QAAQ,IAAI;AAIjC,cAAM,kBAAkB,YAAY;AAElC,cAAI;AACF,kBAAMA,UAAS,MAAM,OAAO;AAC5B,gBAAIA,SAAQ,WAAW;AACrB,qBAAO,EAAE,WAAWA,QAAO,WAAW,QAAQA,QAAO,OAAO;AAAA,YAC9D;AAAA,UACF,QAAQ;AAAA,UAER;AAGA,cAAI,cAAc;AAChB,gBAAI;AACF,oBAAMA,UAAS,MAAM,OAAO,cAAc,MAAM,MAAM;AAEpD,uBAAO,QAAQ,YAAY;AAAA,cAC7B,CAAC;AACD,kBAAIA,SAAQ,WAAW;AACrB,uBAAO,EAAE,WAAWA,QAAO,WAAW,QAAQA,QAAO,OAAO;AAAA,cAC9D;AAAA,YACF,QAAQ;AAAA,YAER;AAAA,UACF;AAEA,iBAAO;AAAA,QACT;AAEA,yBAAiB,MAAM,gBAAgB;AACvC,YAAI,gBAAgB;AAClB,kBAAQ,IAAI,gEAAgE;AAAA,QAC9E;AAGA,YAAI;AACJ,YAAI,QAAQ,YAAY;AACtB,cAAI;AACF,kBAAM,gBAAgB,IAAI,IAAI,QAAQ,UAAU;AAChD,0BAAc,cAAc,SAAS,QAAQ,cAAc,EAAE;AAC7D,0BAAc,GAAG,cAAc,MAAM,GAAG,WAAW;AAAA,UACrD,QAAQ;AAAA,UAER;AAAA,QACF;AACA,sBAAc,eAAe,QAAQ,IAAI;AAIzC,cAAM,sBAAsB,CAC1B,aACA,YACyB;AAEzB,cAAI,aAAa;AACf,mBAAO;AAAA,cACL,QAAQ,OAAO,WAAW;AACxB,oBAAI;AAEF,wBAAM,gBAAqB,CAAC;AAE5B,sBAAI,OAAO,WAAW,QAAW;AAC/B,kCAAc,SAAS,OAAO;AAAA,kBAChC;AACA,sBAAI,OAAO,aAAa,QAAW;AACjC,kCAAc,WAAW,OAAO;AAAA,kBAClC;AACA,sBAAI,OAAO,aAAa,QAAW;AAEjC,kCAAc,WAAW;AAAA,sBACvB,GAAG,cAAc;AAAA,sBACjB,UAAU,OAAO;AAAA,sBACjB,iBAAiB,OAAO;AAAA,oBAC1B;AAAA,kBACF;AACA,sBAAI,OAAO,WAAW,QAAW;AAC/B,kCAAc,SAAS,OAAO;AAAA,kBAChC;AACA,sBAAI,OAAO,UAAU,QAAW;AAC9B,kCAAc,QAAQ,OAAO;AAAA,kBAC/B;AAEA,wBAAM,YAAY,UAAU,YAAY,aAAa;AACrD,0BAAQ,IAAI,2CAA2C;AAAA,oBACrD,OAAO;AAAA,oBACP,UAAU;AAAA,oBACV,SAAS,OAAO,KAAK,aAAa;AAAA,kBACpC,CAAC;AAAA,gBACH,SAAS,OAAY;AACnB,0BAAQ,KAAK,oDAAoD;AAAA,oBAC/D,OAAO;AAAA,oBACP,UAAU;AAAA,oBACV,OAAO,OAAO,WAAW,OAAO,KAAK;AAAA,kBACvC,CAAC;AAAA,gBACH;AAAA,cACF;AAAA,cACA,KAAK,YAAY;AACf,oBAAI;AAEF,sBAAI,aAAa;AAEf,0BAAM,aAAa;AACnB,0BAAMC,gBAAe,QAAQ,IAAI;AAEjC,+BAAW,cAAc,CAAC,YAAYA,aAAY,EAAE,OAAO,OAAO,GAAG;AACnE,0BAAI;AACF,8BAAMD,UAAS,MAAM,OAAO;AAC5B,4BAAIA,SAAQ,QAAQ;AAClB,iCAAO,MAAMA,QAAO,OAAO,UAAU;AAAA,wBACvC;AAAA,sBACF,QAAQ;AAAA,sBAER;AAAA,oBACF;AAAA,kBACF;AACA,yBAAO;AAAA,gBACT,SAAS,OAAY;AACnB,0BAAQ,KAAK,iDAAiD;AAAA,oBAC5D,OAAO;AAAA,oBACP,UAAU;AAAA,oBACV,OAAO,OAAO,WAAW,OAAO,KAAK;AAAA,kBACvC,CAAC;AACD,yBAAO;AAAA,gBACT;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAGA,cAAI,CAAC,SAAS;AACZ,mBAAO;AAAA,UACT;AAGA,iBAAO;AAAA,YACL,QAAQ,OAAO,WAAW;AACxB,kBAAI;AAEF,sBAAM,gBAAqB,EAAE,OAAO,YAAY,UAAU,GAAG;AAE7D,oBAAI,OAAO,WAAW,QAAW;AAC/B,gCAAc,SAAS,OAAO;AAAA,gBAChC;AACA,oBAAI,OAAO,aAAa,QAAW;AACjC,gCAAc,WAAW,OAAO;AAAA,gBAClC;AACA,oBAAI,OAAO,aAAa,QAAW;AAEjC,gCAAc,WAAW;AAAA,oBACvB,GAAG,cAAc;AAAA,oBACjB,UAAU,OAAO;AAAA,oBACjB,iBAAiB,OAAO;AAAA,kBAC1B;AAAA,gBACF;AACA,oBAAI,OAAO,WAAW,QAAW;AAC/B,gCAAc,SAAS,OAAO;AAAA,gBAChC;AACA,oBAAI,OAAO,UAAU,QAAW;AAC9B,gCAAc,QAAQ,OAAO;AAAA,gBAC/B;AAEA,sBAAM,WAAW,MAAM,MAAM,GAAG,OAAO,WAAW;AAAA,kBAChD,QAAQ;AAAA,kBACR,SAAS,EAAE,gBAAgB,mBAAmB;AAAA,kBAC9C,MAAM,KAAK,UAAU,aAAa;AAAA,gBACpC,CAAC;AACD,oBAAI,CAAC,SAAS,IAAI;AAChB,wBAAM,IAAI,MAAM,4BAA4B,SAAS,MAAM,IAAI,SAAS,UAAU,EAAE;AAAA,gBACtF;AACA,wBAAQ,IAAI,sCAAsC;AAAA,kBAChD,OAAO;AAAA,kBACP,UAAU;AAAA,kBACV,SAAS,OAAO,KAAK,aAAa;AAAA,gBACpC,CAAC;AAAA,cACH,SAAS,OAAY;AACnB,wBAAQ,KAAK,+CAA+C;AAAA,kBAC1D,OAAO;AAAA,kBACP,UAAU;AAAA,kBACV,OAAO,OAAO,WAAW,OAAO,KAAK;AAAA,gBACvC,CAAC;AAAA,cACH;AAAA,YACF;AAAA,YACA,KAAK,YAAY;AACf,kBAAI;AAEF,sBAAM,WAAW,MAAM,MAAM,GAAG,OAAO,IAAI,EAAE,IAAI,UAAU,IAAI;AAAA,kBAC7D,QAAQ;AAAA,kBACR,SAAS,EAAE,gBAAgB,mBAAmB;AAAA,gBAChD,CAAC;AAED,oBAAI,CAAC,SAAS,IAAI;AAChB,sBAAI,SAAS,WAAW,KAAK;AAC3B,2BAAO;AAAA,kBACT;AACA,wBAAM,IAAI,MAAM,yBAAyB,SAAS,MAAM,IAAI,SAAS,UAAU,EAAE;AAAA,gBACnF;AAEA,uBAAO,MAAM,SAAS,KAAK;AAAA,cAC7B,SAAS,OAAY;AACnB,wBAAQ,KAAK,4CAA4C;AAAA,kBACvD,OAAO;AAAA,kBACP,UAAU;AAAA,kBACV,OAAO,OAAO,WAAW,OAAO,KAAK;AAAA,gBACvC,CAAC;AACD,uBAAO;AAAA,cACT;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAEA,cAAM,WAAW,oBAAoB,gBAAgB,WAAW;AAGhE,YAAI,gBAAgB,QAAQ;AAC1B,cAAI;AACF,kBAAM,eAAe,OAAO,YAAY;AAAA,cACtC,OAAO;AAAA,cACP,UAAU;AAAA,cACV,QAAQ;AAAA,cACR,OAAO;AAAA,cACP,UAAU,QAAQ,YAAY,CAAC;AAAA,YACjC,CAAC;AAAA,UACH,SAAS,OAAY;AACnB,oBAAQ,KAAK,iDAAiD;AAAA,cAC5D,OAAO;AAAA,cACP,UAAU;AAAA,cACV,OAAO,OAAO,WAAW,OAAO,KAAK;AAAA,YACvC,CAAC;AAAA,UAEH;AAAA,QACF;AAGA,cAAM,iBAAiB;AAAA,UACrB,OAAO;AAAA,UACP,UAAU;AAAA,UACV,GAAI,WAAW,EAAE,SAAS,IAAI,CAAC;AAAA,QACjC;AAEA,YAAI;AAEF,cAAI,UAAU;AACZ,kBAAM,SAAS,OAAO,EAAE,QAAQ,UAAU,CAAC;AAAA,UAC7C;AAEA,gBAAM,SAAS,MAAM,cAAc,SAAS,aAAa,cAAc;AAGvE,cAAI,UAAU;AACZ,kBAAM,SAAS,OAAO,EAAE,QAAQ,aAAa,OAAO,CAAC;AAAA,UACvD;AAGA,cAAI,QAAQ,YAAY;AACtB,gBAAI;AACF,oBAAM,MAAM,QAAQ,YAAY;AAAA,gBAC9B,QAAQ;AAAA,gBACR,SAAS,EAAE,gBAAgB,mBAAmB;AAAA,gBAC9C,MAAM,KAAK,UAAU;AAAA,kBACnB,OAAO;AAAA,kBACP,UAAU;AAAA,kBACV,QAAQ;AAAA,kBACR;AAAA,kBACA,UAAU,QAAQ;AAAA,gBACpB,CAAC;AAAA,cACH,CAAC;AAAA,YACH,SAAS,OAAO;AACd,sBAAQ,KAAK,uCAAuC,KAAK;AAAA,YAC3D;AAAA,UACF;AAEA,iBAAO;AAAA,YACL,WAAW,SAAS,KAAK,IAAI,CAAC;AAAA,YAC9B,QAAQ;AAAA,YACR,OAAO;AAAA,UACT;AAAA,QACF,SAAS,OAAY;AAEnB,cAAI,UAAU;AACZ,kBAAM,SAAS,OAAO;AAAA,cACpB,QAAQ;AAAA,cACR,OAAO;AAAA,gBACL,SAAS,MAAM,WAAW;AAAA,gBAC1B,OAAO,MAAM;AAAA,gBACb,MAAM,MAAM,QAAQ;AAAA,cACtB;AAAA,YACF,CAAC;AAAA,UACH;AAGA,cAAI,QAAQ,YAAY;AACtB,gBAAI;AACF,oBAAM,MAAM,QAAQ,YAAY;AAAA,gBAC9B,QAAQ;AAAA,gBACR,SAAS,EAAE,gBAAgB,mBAAmB;AAAA,gBAC9C,MAAM,KAAK,UAAU;AAAA,kBACnB,OAAO;AAAA,kBACP,UAAU;AAAA,kBACV,QAAQ;AAAA,kBACR,OAAO;AAAA,oBACL,SAAS,MAAM,WAAW;AAAA,oBAC1B,OAAO,MAAM;AAAA,oBACb,MAAM,MAAM,QAAQ;AAAA,kBACtB;AAAA,kBACA,UAAU,QAAQ;AAAA,gBACpB,CAAC;AAAA,cACH,CAAC;AAAA,YACH,SAAS,cAAc;AACrB,sBAAQ,KAAK,6CAA6C,YAAY;AAAA,YACxE;AAAA,UACF;AACA,gBAAM;AAAA,QACR;AAAA,MACF;AAGA,aAAO,SAAS,IAAI,OAAO,aAAa,OAAO;AAAA,IACjD;AAAA,EACF;AAEA,SAAO;AACT;AASO,SAAS,uBACd,OACA;AACA,SAAO,oBAAoB,MAAM,SAAS,MAAM,YAAY;AAC9D;","names":["module","explicitPath"]}
package/dist/index.mjs ADDED
@@ -0,0 +1,313 @@
1
+ import {
2
+ dispatch,
3
+ dispatchLocal
4
+ } from "./chunk-FQCZSXDI.mjs";
5
+ import {
6
+ clearWorkersConfigCache,
7
+ getWorkersConfig,
8
+ resolveQueueUrl
9
+ } from "./chunk-ZYYWZ3PR.mjs";
10
+ import {
11
+ createLambdaHandler
12
+ } from "./chunk-WVR4JVWK.mjs";
13
+ import {
14
+ __require
15
+ } from "./chunk-BJTO5JO5.mjs";
16
+
17
+ // src/index.ts
18
+ function createWorker(config) {
19
+ const { id, inputSchema, outputSchema, handler } = config;
20
+ const agent = {
21
+ id,
22
+ handler,
23
+ inputSchema,
24
+ outputSchema,
25
+ async dispatch(input, options) {
26
+ const mode = options.mode ?? "auto";
27
+ const envWantsLocal = process.env.NODE_ENV === "development" && process.env.WORKERS_LOCAL_MODE !== "false";
28
+ const isLocal = mode === "local" || mode === "auto" && envWantsLocal;
29
+ if (isLocal) {
30
+ const parsedInput = inputSchema.parse(input);
31
+ const localJobId = options.jobId || `local-${Date.now()}`;
32
+ let directJobStore = null;
33
+ const nextJsPathAlias = "@/app/api/workflows/stores/jobStore";
34
+ const explicitPath = process.env.WORKER_JOB_STORE_MODULE_PATH;
35
+ const resolveJobStore = async () => {
36
+ try {
37
+ const module = await import(nextJsPathAlias);
38
+ if (module?.updateJob) {
39
+ return { updateJob: module.updateJob, setJob: module.setJob };
40
+ }
41
+ } catch {
42
+ }
43
+ if (explicitPath) {
44
+ try {
45
+ const module = await import(explicitPath).catch(() => {
46
+ return __require(explicitPath);
47
+ });
48
+ if (module?.updateJob) {
49
+ return { updateJob: module.updateJob, setJob: module.setJob };
50
+ }
51
+ } catch {
52
+ }
53
+ }
54
+ return null;
55
+ };
56
+ directJobStore = await resolveJobStore();
57
+ if (directJobStore) {
58
+ console.log("[Worker] Using direct job store in local mode (no HTTP needed)");
59
+ }
60
+ let jobStoreUrl;
61
+ if (options.webhookUrl) {
62
+ try {
63
+ const webhookUrlObj = new URL(options.webhookUrl);
64
+ jobStoreUrl = webhookUrlObj.pathname.replace(/\/webhook$/, "");
65
+ jobStoreUrl = `${webhookUrlObj.origin}${jobStoreUrl}`;
66
+ } catch {
67
+ }
68
+ }
69
+ jobStoreUrl = jobStoreUrl || process.env.WORKER_JOB_STORE_URL;
70
+ const createLocalJobStore = (directStore, httpUrl) => {
71
+ if (directStore) {
72
+ return {
73
+ update: async (update) => {
74
+ try {
75
+ const updatePayload = {};
76
+ if (update.status !== void 0) {
77
+ updatePayload.status = update.status;
78
+ }
79
+ if (update.metadata !== void 0) {
80
+ updatePayload.metadata = update.metadata;
81
+ }
82
+ if (update.progress !== void 0) {
83
+ updatePayload.metadata = {
84
+ ...updatePayload.metadata,
85
+ progress: update.progress,
86
+ progressMessage: update.progressMessage
87
+ };
88
+ }
89
+ if (update.output !== void 0) {
90
+ updatePayload.output = update.output;
91
+ }
92
+ if (update.error !== void 0) {
93
+ updatePayload.error = update.error;
94
+ }
95
+ await directStore.updateJob(localJobId, updatePayload);
96
+ console.log("[Worker] Local job updated (direct DB):", {
97
+ jobId: localJobId,
98
+ workerId: id,
99
+ updates: Object.keys(updatePayload)
100
+ });
101
+ } catch (error) {
102
+ console.warn("[Worker] Failed to update local job (direct DB):", {
103
+ jobId: localJobId,
104
+ workerId: id,
105
+ error: error?.message || String(error)
106
+ });
107
+ }
108
+ },
109
+ get: async () => {
110
+ try {
111
+ if (directStore) {
112
+ const nextJsPath = "@/app/api/workflows/stores/jobStore";
113
+ const explicitPath2 = process.env.WORKER_JOB_STORE_MODULE_PATH;
114
+ for (const importPath of [nextJsPath, explicitPath2].filter(Boolean)) {
115
+ try {
116
+ const module = await import(importPath);
117
+ if (module?.getJob) {
118
+ return await module.getJob(localJobId);
119
+ }
120
+ } catch {
121
+ }
122
+ }
123
+ }
124
+ return null;
125
+ } catch (error) {
126
+ console.warn("[Worker] Failed to get local job (direct DB):", {
127
+ jobId: localJobId,
128
+ workerId: id,
129
+ error: error?.message || String(error)
130
+ });
131
+ return null;
132
+ }
133
+ }
134
+ };
135
+ }
136
+ if (!httpUrl) {
137
+ return void 0;
138
+ }
139
+ return {
140
+ update: async (update) => {
141
+ try {
142
+ const updatePayload = { jobId: localJobId, workerId: id };
143
+ if (update.status !== void 0) {
144
+ updatePayload.status = update.status;
145
+ }
146
+ if (update.metadata !== void 0) {
147
+ updatePayload.metadata = update.metadata;
148
+ }
149
+ if (update.progress !== void 0) {
150
+ updatePayload.metadata = {
151
+ ...updatePayload.metadata,
152
+ progress: update.progress,
153
+ progressMessage: update.progressMessage
154
+ };
155
+ }
156
+ if (update.output !== void 0) {
157
+ updatePayload.output = update.output;
158
+ }
159
+ if (update.error !== void 0) {
160
+ updatePayload.error = update.error;
161
+ }
162
+ const response = await fetch(`${httpUrl}/update`, {
163
+ method: "POST",
164
+ headers: { "Content-Type": "application/json" },
165
+ body: JSON.stringify(updatePayload)
166
+ });
167
+ if (!response.ok) {
168
+ throw new Error(`Job store update failed: ${response.status} ${response.statusText}`);
169
+ }
170
+ console.log("[Worker] Local job updated (HTTP):", {
171
+ jobId: localJobId,
172
+ workerId: id,
173
+ updates: Object.keys(updatePayload)
174
+ });
175
+ } catch (error) {
176
+ console.warn("[Worker] Failed to update local job (HTTP):", {
177
+ jobId: localJobId,
178
+ workerId: id,
179
+ error: error?.message || String(error)
180
+ });
181
+ }
182
+ },
183
+ get: async () => {
184
+ try {
185
+ const response = await fetch(`${httpUrl}/${id}/${localJobId}`, {
186
+ method: "GET",
187
+ headers: { "Content-Type": "application/json" }
188
+ });
189
+ if (!response.ok) {
190
+ if (response.status === 404) {
191
+ return null;
192
+ }
193
+ throw new Error(`Job store get failed: ${response.status} ${response.statusText}`);
194
+ }
195
+ return await response.json();
196
+ } catch (error) {
197
+ console.warn("[Worker] Failed to get local job (HTTP):", {
198
+ jobId: localJobId,
199
+ workerId: id,
200
+ error: error?.message || String(error)
201
+ });
202
+ return null;
203
+ }
204
+ }
205
+ };
206
+ };
207
+ const jobStore = createLocalJobStore(directJobStore, jobStoreUrl);
208
+ if (directJobStore?.setJob) {
209
+ try {
210
+ await directJobStore.setJob(localJobId, {
211
+ jobId: localJobId,
212
+ workerId: id,
213
+ status: "queued",
214
+ input: parsedInput,
215
+ metadata: options.metadata || {}
216
+ });
217
+ } catch (error) {
218
+ console.warn("[Worker] Failed to create initial job record:", {
219
+ jobId: localJobId,
220
+ workerId: id,
221
+ error: error?.message || String(error)
222
+ });
223
+ }
224
+ }
225
+ const handlerContext = {
226
+ jobId: localJobId,
227
+ workerId: id,
228
+ ...jobStore ? { jobStore } : {}
229
+ };
230
+ try {
231
+ if (jobStore) {
232
+ await jobStore.update({ status: "running" });
233
+ }
234
+ const output = await dispatchLocal(handler, parsedInput, handlerContext);
235
+ if (jobStore) {
236
+ await jobStore.update({ status: "completed", output });
237
+ }
238
+ if (options.webhookUrl) {
239
+ try {
240
+ await fetch(options.webhookUrl, {
241
+ method: "POST",
242
+ headers: { "Content-Type": "application/json" },
243
+ body: JSON.stringify({
244
+ jobId: localJobId,
245
+ workerId: id,
246
+ status: "success",
247
+ output,
248
+ metadata: options.metadata
249
+ })
250
+ });
251
+ } catch (error) {
252
+ console.warn("[Worker] Local webhook call failed:", error);
253
+ }
254
+ }
255
+ return {
256
+ messageId: `local-${Date.now()}`,
257
+ status: "queued",
258
+ jobId: localJobId
259
+ };
260
+ } catch (error) {
261
+ if (jobStore) {
262
+ await jobStore.update({
263
+ status: "failed",
264
+ error: {
265
+ message: error.message || "Unknown error",
266
+ stack: error.stack,
267
+ name: error.name || "Error"
268
+ }
269
+ });
270
+ }
271
+ if (options.webhookUrl) {
272
+ try {
273
+ await fetch(options.webhookUrl, {
274
+ method: "POST",
275
+ headers: { "Content-Type": "application/json" },
276
+ body: JSON.stringify({
277
+ jobId: localJobId,
278
+ workerId: id,
279
+ status: "error",
280
+ error: {
281
+ message: error.message || "Unknown error",
282
+ stack: error.stack,
283
+ name: error.name || "Error"
284
+ },
285
+ metadata: options.metadata
286
+ })
287
+ });
288
+ } catch (webhookError) {
289
+ console.warn("[Worker] Local error webhook call failed:", webhookError);
290
+ }
291
+ }
292
+ throw error;
293
+ }
294
+ }
295
+ return dispatch(id, input, inputSchema, options);
296
+ }
297
+ };
298
+ return agent;
299
+ }
300
+ function createLambdaEntrypoint(agent) {
301
+ return createLambdaHandler(agent.handler, agent.outputSchema);
302
+ }
303
+ export {
304
+ clearWorkersConfigCache,
305
+ createLambdaEntrypoint,
306
+ createLambdaHandler,
307
+ createWorker,
308
+ dispatch,
309
+ dispatchLocal,
310
+ getWorkersConfig,
311
+ resolveQueueUrl
312
+ };
313
+ //# sourceMappingURL=index.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/index.ts"],"sourcesContent":["/**\n * @microfox/ai-worker\n * Worker runtime for ai-router - SQS-based async agent execution\n */\n\nimport { dispatch, dispatchLocal, type DispatchOptions, type DispatchResult } from './client.js';\nimport { createLambdaHandler, type WorkerHandler, type JobStore } from './handler.js';\nimport type { ZodType, z } from 'zod';\n\nexport * from './client.js';\nexport * from './handler.js';\nexport * from './config.js';\n\n/**\n * Schedule event configuration for a worker.\n * Supports both simple rate/cron strings and full configuration objects.\n * \n * @example Simple rate/cron\n * ```typescript\n * schedule: 'rate(2 hours)'\n * // or\n * schedule: 'cron(0 12 * * ? *)'\n * ```\n * \n * @example Full configuration\n * ```typescript\n * schedule: {\n * rate: 'rate(10 minutes)',\n * enabled: true,\n * input: { key1: 'value1' }\n * }\n * ```\n * \n * @example Multiple schedules\n * ```typescript\n * schedule: [\n * 'rate(2 hours)',\n * { rate: 'cron(0 12 * * ? *)', enabled: false }\n * ]\n * ```\n */\nexport interface ScheduleEventConfig {\n /**\n * Schedule rate using either rate() or cron() syntax.\n * Can be a string or array of strings for multiple schedules.\n * \n * @example 'rate(2 hours)' or 'cron(0 12 * * ? *)'\n * @example ['cron(0 0/4 ? * MON-FRI *)', 'cron(0 2 ? * SAT-SUN *)']\n */\n rate: string | string[];\n /**\n * Whether the schedule is enabled (default: true).\n */\n enabled?: boolean;\n /**\n * Input payload to pass to the function.\n */\n input?: Record<string, any>;\n /**\n * JSONPath expression to select part of the event data as input.\n */\n inputPath?: string;\n /**\n * Input transformer configuration for custom input mapping.\n */\n inputTransformer?: {\n inputPathsMap?: Record<string, string>;\n inputTemplate?: string;\n };\n /**\n * Name of the schedule event.\n */\n name?: string;\n /**\n * Description of the schedule event.\n */\n description?: string;\n /**\n * Method to use: 'eventBus' (default) or 'scheduler'.\n * Use 'scheduler' for higher limits (1M events vs 300).\n */\n method?: 'eventBus' | 'scheduler';\n /**\n * Timezone for the schedule (only used with method: 'scheduler').\n * @example 'America/New_York'\n */\n timezone?: string;\n}\n\nexport type ScheduleConfig = \n | string \n | ScheduleEventConfig \n | (string | ScheduleEventConfig)[];\n\n/**\n * Configuration for a worker's Lambda function deployment.\n * \n * **Best Practice**: Export this as a separate const from your worker file:\n * ```typescript\n * export const workerConfig: WorkerConfig = {\n * timeout: 900,\n * memorySize: 2048,\n * layers: ['arn:aws:lambda:${aws:region}:${aws:accountId}:layer:ffmpeg:1'],\n * schedule: 'rate(2 hours)',\n * };\n * ```\n * \n * The CLI will automatically extract it from the export. You do not need to pass it to `createWorker()`.\n */\nexport interface WorkerConfig {\n /**\n * Lambda function timeout in seconds (max 900).\n */\n timeout?: number;\n /**\n * Lambda function memory size in MB (128-10240).\n */\n memorySize?: number;\n /**\n * Optional Lambda layers ARNs to attach to this worker function.\n *\n * This is primarily used by @microfox/ai-worker-cli when generating serverless.yml.\n * Supports CloudFormation pseudo-parameters like ${aws:region} and ${aws:accountId}.\n *\n * Example:\n * layers: ['arn:aws:lambda:${aws:region}:${aws:accountId}:layer:ffmpeg:1']\n */\n layers?: string[];\n /**\n * Schedule events configuration for this worker.\n * Allows multiple schedule events to be attached to the same function.\n * \n * @example Simple rate\n * ```typescript\n * schedule: 'rate(2 hours)'\n * ```\n * \n * @example Multiple schedules\n * ```typescript\n * schedule: [\n * 'rate(2 hours)',\n * { rate: 'cron(0 12 * * ? *)', enabled: true, input: { key: 'value' } }\n * ]\n * ```\n * \n * @example Using scheduler method with timezone\n * ```typescript\n * schedule: {\n * method: 'scheduler',\n * rate: 'cron(0 0/4 ? * MON-FRI *)',\n * timezone: 'America/New_York',\n * input: { key1: 'value1' }\n * }\n * ```\n */\n schedule?: ScheduleConfig;\n\n /**\n * SQS queue settings for this worker (used by @microfox/ai-worker-cli when generating serverless.yml).\n *\n * Notes:\n * - To effectively disable retries, set `maxReceiveCount: 1` (requires DLQ; the CLI will create one).\n * - SQS does not support `maxReceiveCount: 0`.\n * - `messageRetentionPeriod` is in seconds (max 1209600 = 14 days).\n */\n sqs?: {\n /**\n * How many receives before sending to DLQ.\n * Use 1 to avoid retries.\n */\n maxReceiveCount?: number;\n /**\n * How long messages are retained in the main queue (seconds).\n */\n messageRetentionPeriod?: number;\n /**\n * Visibility timeout for the main queue (seconds).\n * If not set, CLI defaults to (worker timeout + 60s).\n */\n visibilityTimeout?: number;\n /**\n * DLQ message retention period (seconds).\n * Defaults to `messageRetentionPeriod` (or 14 days).\n */\n deadLetterMessageRetentionPeriod?: number;\n };\n}\n\nexport interface WorkerAgentConfig<INPUT_SCHEMA extends ZodType<any>, OUTPUT> {\n id: string;\n inputSchema: INPUT_SCHEMA;\n outputSchema: ZodType<OUTPUT>;\n handler: WorkerHandler<z.infer<INPUT_SCHEMA>, OUTPUT>;\n /**\n * @deprecated Prefer exporting `workerConfig` as a separate const from your worker file.\n * The CLI will automatically extract it from the export. This parameter is kept for backward compatibility.\n */\n workerConfig?: WorkerConfig;\n}\n\nexport interface WorkerAgent<INPUT_SCHEMA extends ZodType<any>, OUTPUT> {\n id: string;\n dispatch: (\n input: z.input<INPUT_SCHEMA>,\n options: DispatchOptions\n ) => Promise<DispatchResult>;\n handler: WorkerHandler<z.infer<INPUT_SCHEMA>, OUTPUT>;\n inputSchema: INPUT_SCHEMA;\n outputSchema: ZodType<OUTPUT>;\n workerConfig?: WorkerConfig;\n}\n\n/**\n * Creates a worker agent that can be dispatched to SQS/Lambda.\n *\n * In development mode (NODE_ENV === 'development' and WORKERS_LOCAL_MODE !== 'false'),\n * dispatch() will run the handler immediately in the same process.\n *\n * In production, dispatch() sends a message to SQS which triggers a Lambda function.\n *\n * @template INPUT_SCHEMA - The Zod schema type (e.g., `typeof InputSchema`).\n * Used to derive both:\n * - Pre-parse input type via `z.input<INPUT_SCHEMA>` for `dispatch()` (preserves optional fields)\n * - Parsed input type via `z.infer<INPUT_SCHEMA>` for handler (defaults applied)\n * @template OUTPUT - The output type returned by the handler. Use `z.infer<typeof OutputSchema>`.\n *\n * @param config - Worker agent configuration\n * @returns A worker agent object with a dispatch method\n *\n * @example\n * ```typescript\n * const InputSchema = z.object({\n * url: z.string().url(),\n * timeout: z.number().optional().default(5000), // optional with default\n * });\n *\n * export const worker = createWorker<typeof InputSchema, Output>({\n * // dispatch() accepts { url: string, timeout?: number } (pre-parse, optional preserved)\n * // handler receives { url: string, timeout: number } (parsed, default applied)\n * });\n * ```\n */\nexport function createWorker<INPUT_SCHEMA extends ZodType<any>, OUTPUT>(\n config: WorkerAgentConfig<INPUT_SCHEMA, OUTPUT>\n): WorkerAgent<INPUT_SCHEMA, OUTPUT> {\n const { id, inputSchema, outputSchema, handler } = config;\n\n const agent: WorkerAgent<INPUT_SCHEMA, OUTPUT> = {\n id,\n handler,\n inputSchema,\n outputSchema,\n\n async dispatch(input: z.input<INPUT_SCHEMA>, options: DispatchOptions): Promise<DispatchResult> {\n const mode = options.mode ?? 'auto';\n const envWantsLocal =\n process.env.NODE_ENV === 'development' &&\n process.env.WORKERS_LOCAL_MODE !== 'false';\n // Check if we're in local development mode\n const isLocal = mode === 'local' || (mode === 'auto' && envWantsLocal);\n\n if (isLocal) {\n // Local mode: run handler immediately\n // Parse input to apply defaults and get the final parsed type\n const parsedInput = inputSchema.parse(input);\n const localJobId = options.jobId || `local-${Date.now()}`;\n \n // Try to get direct job store access in local mode (same process as Next.js app)\n // This allows direct DB updates without needing HTTP/webhook URLs\n let directJobStore: {\n updateJob: (jobId: string, data: any) => Promise<void>;\n setJob?: (jobId: string, data: any) => Promise<void>;\n } | null = null;\n\n // Path constants for job store imports\n const nextJsPathAlias = '@/app/api/workflows/stores/jobStore';\n const explicitPath = process.env.WORKER_JOB_STORE_MODULE_PATH;\n\n // Reliable approach: try Next.js path alias first, then explicit env var\n // The @/ alias works at runtime in Next.js context\n const resolveJobStore = async () => {\n // Option 1: Try Next.js path alias (works in Next.js runtime context)\n try {\n const module = await import(nextJsPathAlias);\n if (module?.updateJob) {\n return { updateJob: module.updateJob, setJob: module.setJob };\n }\n } catch {\n // Path alias not available (not in Next.js context or alias not configured)\n }\n\n // Option 2: Use explicit env var if provided (for custom setups)\n if (explicitPath) {\n try {\n const module = await import(explicitPath).catch(() => {\n // eslint-disable-next-line @typescript-eslint/no-require-imports\n return require(explicitPath);\n });\n if (module?.updateJob) {\n return { updateJob: module.updateJob, setJob: module.setJob };\n }\n } catch {\n // Explicit path failed\n }\n }\n\n return null;\n };\n\n directJobStore = await resolveJobStore();\n if (directJobStore) {\n console.log('[Worker] Using direct job store in local mode (no HTTP needed)');\n }\n\n // Derive job store URL from webhook URL or environment (fallback for HTTP mode)\n let jobStoreUrl: string | undefined;\n if (options.webhookUrl) {\n try {\n const webhookUrlObj = new URL(options.webhookUrl);\n jobStoreUrl = webhookUrlObj.pathname.replace(/\\/webhook$/, '');\n jobStoreUrl = `${webhookUrlObj.origin}${jobStoreUrl}`;\n } catch {\n // Invalid URL, skip job store URL\n }\n }\n jobStoreUrl = jobStoreUrl || process.env.WORKER_JOB_STORE_URL;\n\n // Create job store interface for local mode\n // Prefer direct DB access, fallback to HTTP calls if needed\n const createLocalJobStore = (\n directStore: typeof directJobStore,\n httpUrl?: string\n ): JobStore | undefined => {\n // If we have direct job store access, use it (no HTTP needed)\n if (directStore) {\n return {\n update: async (update) => {\n try {\n // Build update payload\n const updatePayload: any = {};\n \n if (update.status !== undefined) {\n updatePayload.status = update.status;\n }\n if (update.metadata !== undefined) {\n updatePayload.metadata = update.metadata;\n }\n if (update.progress !== undefined) {\n // Merge progress into metadata\n updatePayload.metadata = {\n ...updatePayload.metadata,\n progress: update.progress,\n progressMessage: update.progressMessage,\n };\n }\n if (update.output !== undefined) {\n updatePayload.output = update.output;\n }\n if (update.error !== undefined) {\n updatePayload.error = update.error;\n }\n\n await directStore.updateJob(localJobId, updatePayload);\n console.log('[Worker] Local job updated (direct DB):', {\n jobId: localJobId,\n workerId: id,\n updates: Object.keys(updatePayload),\n });\n } catch (error: any) {\n console.warn('[Worker] Failed to update local job (direct DB):', {\n jobId: localJobId,\n workerId: id,\n error: error?.message || String(error),\n });\n }\n },\n get: async () => {\n try {\n // Use the same direct store that has updateJob - it should also have getJob\n if (directStore) {\n // Try to import getJob from the same module\n const nextJsPath = '@/app/api/workflows/stores/jobStore';\n const explicitPath = process.env.WORKER_JOB_STORE_MODULE_PATH;\n \n for (const importPath of [nextJsPath, explicitPath].filter(Boolean)) {\n try {\n const module = await import(importPath!);\n if (module?.getJob) {\n return await module.getJob(localJobId);\n }\n } catch {\n // Continue\n }\n }\n }\n return null;\n } catch (error: any) {\n console.warn('[Worker] Failed to get local job (direct DB):', {\n jobId: localJobId,\n workerId: id,\n error: error?.message || String(error),\n });\n return null;\n }\n },\n };\n }\n\n // Fallback to HTTP calls if no direct access\n if (!httpUrl) {\n return undefined;\n }\n\n // Use HTTP calls to update job store\n return {\n update: async (update) => {\n try {\n // Build update payload\n const updatePayload: any = { jobId: localJobId, workerId: id };\n \n if (update.status !== undefined) {\n updatePayload.status = update.status;\n }\n if (update.metadata !== undefined) {\n updatePayload.metadata = update.metadata;\n }\n if (update.progress !== undefined) {\n // Merge progress into metadata\n updatePayload.metadata = {\n ...updatePayload.metadata,\n progress: update.progress,\n progressMessage: update.progressMessage,\n };\n }\n if (update.output !== undefined) {\n updatePayload.output = update.output;\n }\n if (update.error !== undefined) {\n updatePayload.error = update.error;\n }\n\n const response = await fetch(`${httpUrl}/update`, {\n method: 'POST',\n headers: { 'Content-Type': 'application/json' },\n body: JSON.stringify(updatePayload),\n });\n if (!response.ok) {\n throw new Error(`Job store update failed: ${response.status} ${response.statusText}`);\n }\n console.log('[Worker] Local job updated (HTTP):', {\n jobId: localJobId,\n workerId: id,\n updates: Object.keys(updatePayload),\n });\n } catch (error: any) {\n console.warn('[Worker] Failed to update local job (HTTP):', {\n jobId: localJobId,\n workerId: id,\n error: error?.message || String(error),\n });\n }\n },\n get: async () => {\n try {\n // GET /api/workflows/workers/:workerId/:jobId\n const response = await fetch(`${httpUrl}/${id}/${localJobId}`, {\n method: 'GET',\n headers: { 'Content-Type': 'application/json' },\n });\n\n if (!response.ok) {\n if (response.status === 404) {\n return null;\n }\n throw new Error(`Job store get failed: ${response.status} ${response.statusText}`);\n }\n\n return await response.json();\n } catch (error: any) {\n console.warn('[Worker] Failed to get local job (HTTP):', {\n jobId: localJobId,\n workerId: id,\n error: error?.message || String(error),\n });\n return null;\n }\n },\n };\n };\n\n const jobStore = createLocalJobStore(directJobStore, jobStoreUrl);\n\n // Create initial job record if we have job store access\n if (directJobStore?.setJob) {\n try {\n await directJobStore.setJob(localJobId, {\n jobId: localJobId,\n workerId: id,\n status: 'queued',\n input: parsedInput,\n metadata: options.metadata || {},\n });\n } catch (error: any) {\n console.warn('[Worker] Failed to create initial job record:', {\n jobId: localJobId,\n workerId: id,\n error: error?.message || String(error),\n });\n // Continue - job will still be created when status is updated\n }\n }\n\n // Create handler context with job store\n const handlerContext = {\n jobId: localJobId,\n workerId: id,\n ...(jobStore ? { jobStore } : {}),\n };\n\n try {\n // Update status to running before execution\n if (jobStore) {\n await jobStore.update({ status: 'running' });\n }\n\n const output = await dispatchLocal(handler, parsedInput, handlerContext);\n\n // Update status to completed before webhook\n if (jobStore) {\n await jobStore.update({ status: 'completed', output });\n }\n\n // Only send webhook if webhookUrl is provided\n if (options.webhookUrl) {\n try {\n await fetch(options.webhookUrl, {\n method: 'POST',\n headers: { 'Content-Type': 'application/json' },\n body: JSON.stringify({\n jobId: localJobId,\n workerId: id,\n status: 'success',\n output,\n metadata: options.metadata,\n }),\n });\n } catch (error) {\n console.warn('[Worker] Local webhook call failed:', error);\n }\n }\n\n return {\n messageId: `local-${Date.now()}`,\n status: 'queued',\n jobId: localJobId,\n };\n } catch (error: any) {\n // Update status to failed before webhook\n if (jobStore) {\n await jobStore.update({\n status: 'failed',\n error: {\n message: error.message || 'Unknown error',\n stack: error.stack,\n name: error.name || 'Error',\n },\n });\n }\n\n // Only send error webhook if webhookUrl is provided\n if (options.webhookUrl) {\n try {\n await fetch(options.webhookUrl, {\n method: 'POST',\n headers: { 'Content-Type': 'application/json' },\n body: JSON.stringify({\n jobId: localJobId,\n workerId: id,\n status: 'error',\n error: {\n message: error.message || 'Unknown error',\n stack: error.stack,\n name: error.name || 'Error',\n },\n metadata: options.metadata,\n }),\n });\n } catch (webhookError) {\n console.warn('[Worker] Local error webhook call failed:', webhookError);\n }\n }\n throw error;\n }\n }\n\n // Production mode: dispatch to SQS\n return dispatch(id, input, inputSchema, options);\n },\n };\n\n return agent;\n}\n\n/**\n * Creates a Lambda handler entrypoint for a worker agent.\n * This is used by the deployment script to generate Lambda entrypoints.\n *\n * @param agent - The worker agent\n * @returns A Lambda handler function\n */\nexport function createLambdaEntrypoint<INPUT_SCHEMA extends ZodType<any>, OUTPUT>(\n agent: WorkerAgent<INPUT_SCHEMA, OUTPUT>\n) {\n return createLambdaHandler(agent.handler, agent.outputSchema);\n}\n"],"mappings":";;;;;;;;;;;;;;;;;AAkPO,SAAS,aACd,QACmC;AACnC,QAAM,EAAE,IAAI,aAAa,cAAc,QAAQ,IAAI;AAEnD,QAAM,QAA2C;AAAA,IAC/C;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IAEA,MAAM,SAAS,OAA8B,SAAmD;AAC9F,YAAM,OAAO,QAAQ,QAAQ;AAC7B,YAAM,gBACJ,QAAQ,IAAI,aAAa,iBACzB,QAAQ,IAAI,uBAAuB;AAErC,YAAM,UAAU,SAAS,WAAY,SAAS,UAAU;AAExD,UAAI,SAAS;AAGX,cAAM,cAAc,YAAY,MAAM,KAAK;AAC3C,cAAM,aAAa,QAAQ,SAAS,SAAS,KAAK,IAAI,CAAC;AAIvD,YAAI,iBAGO;AAGX,cAAM,kBAAkB;AACxB,cAAM,eAAe,QAAQ,IAAI;AAIjC,cAAM,kBAAkB,YAAY;AAElC,cAAI;AACF,kBAAM,SAAS,MAAM,OAAO;AAC5B,gBAAI,QAAQ,WAAW;AACrB,qBAAO,EAAE,WAAW,OAAO,WAAW,QAAQ,OAAO,OAAO;AAAA,YAC9D;AAAA,UACF,QAAQ;AAAA,UAER;AAGA,cAAI,cAAc;AAChB,gBAAI;AACF,oBAAM,SAAS,MAAM,OAAO,cAAc,MAAM,MAAM;AAEpD,uBAAO,UAAQ,YAAY;AAAA,cAC7B,CAAC;AACD,kBAAI,QAAQ,WAAW;AACrB,uBAAO,EAAE,WAAW,OAAO,WAAW,QAAQ,OAAO,OAAO;AAAA,cAC9D;AAAA,YACF,QAAQ;AAAA,YAER;AAAA,UACF;AAEA,iBAAO;AAAA,QACT;AAEA,yBAAiB,MAAM,gBAAgB;AACvC,YAAI,gBAAgB;AAClB,kBAAQ,IAAI,gEAAgE;AAAA,QAC9E;AAGA,YAAI;AACJ,YAAI,QAAQ,YAAY;AACtB,cAAI;AACF,kBAAM,gBAAgB,IAAI,IAAI,QAAQ,UAAU;AAChD,0BAAc,cAAc,SAAS,QAAQ,cAAc,EAAE;AAC7D,0BAAc,GAAG,cAAc,MAAM,GAAG,WAAW;AAAA,UACrD,QAAQ;AAAA,UAER;AAAA,QACF;AACA,sBAAc,eAAe,QAAQ,IAAI;AAIzC,cAAM,sBAAsB,CAC1B,aACA,YACyB;AAEzB,cAAI,aAAa;AACf,mBAAO;AAAA,cACL,QAAQ,OAAO,WAAW;AACxB,oBAAI;AAEF,wBAAM,gBAAqB,CAAC;AAE5B,sBAAI,OAAO,WAAW,QAAW;AAC/B,kCAAc,SAAS,OAAO;AAAA,kBAChC;AACA,sBAAI,OAAO,aAAa,QAAW;AACjC,kCAAc,WAAW,OAAO;AAAA,kBAClC;AACA,sBAAI,OAAO,aAAa,QAAW;AAEjC,kCAAc,WAAW;AAAA,sBACvB,GAAG,cAAc;AAAA,sBACjB,UAAU,OAAO;AAAA,sBACjB,iBAAiB,OAAO;AAAA,oBAC1B;AAAA,kBACF;AACA,sBAAI,OAAO,WAAW,QAAW;AAC/B,kCAAc,SAAS,OAAO;AAAA,kBAChC;AACA,sBAAI,OAAO,UAAU,QAAW;AAC9B,kCAAc,QAAQ,OAAO;AAAA,kBAC/B;AAEA,wBAAM,YAAY,UAAU,YAAY,aAAa;AACrD,0BAAQ,IAAI,2CAA2C;AAAA,oBACrD,OAAO;AAAA,oBACP,UAAU;AAAA,oBACV,SAAS,OAAO,KAAK,aAAa;AAAA,kBACpC,CAAC;AAAA,gBACH,SAAS,OAAY;AACnB,0BAAQ,KAAK,oDAAoD;AAAA,oBAC/D,OAAO;AAAA,oBACP,UAAU;AAAA,oBACV,OAAO,OAAO,WAAW,OAAO,KAAK;AAAA,kBACvC,CAAC;AAAA,gBACH;AAAA,cACF;AAAA,cACA,KAAK,YAAY;AACf,oBAAI;AAEF,sBAAI,aAAa;AAEf,0BAAM,aAAa;AACnB,0BAAMA,gBAAe,QAAQ,IAAI;AAEjC,+BAAW,cAAc,CAAC,YAAYA,aAAY,EAAE,OAAO,OAAO,GAAG;AACnE,0BAAI;AACF,8BAAM,SAAS,MAAM,OAAO;AAC5B,4BAAI,QAAQ,QAAQ;AAClB,iCAAO,MAAM,OAAO,OAAO,UAAU;AAAA,wBACvC;AAAA,sBACF,QAAQ;AAAA,sBAER;AAAA,oBACF;AAAA,kBACF;AACA,yBAAO;AAAA,gBACT,SAAS,OAAY;AACnB,0BAAQ,KAAK,iDAAiD;AAAA,oBAC5D,OAAO;AAAA,oBACP,UAAU;AAAA,oBACV,OAAO,OAAO,WAAW,OAAO,KAAK;AAAA,kBACvC,CAAC;AACD,yBAAO;AAAA,gBACT;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAGA,cAAI,CAAC,SAAS;AACZ,mBAAO;AAAA,UACT;AAGA,iBAAO;AAAA,YACL,QAAQ,OAAO,WAAW;AACxB,kBAAI;AAEF,sBAAM,gBAAqB,EAAE,OAAO,YAAY,UAAU,GAAG;AAE7D,oBAAI,OAAO,WAAW,QAAW;AAC/B,gCAAc,SAAS,OAAO;AAAA,gBAChC;AACA,oBAAI,OAAO,aAAa,QAAW;AACjC,gCAAc,WAAW,OAAO;AAAA,gBAClC;AACA,oBAAI,OAAO,aAAa,QAAW;AAEjC,gCAAc,WAAW;AAAA,oBACvB,GAAG,cAAc;AAAA,oBACjB,UAAU,OAAO;AAAA,oBACjB,iBAAiB,OAAO;AAAA,kBAC1B;AAAA,gBACF;AACA,oBAAI,OAAO,WAAW,QAAW;AAC/B,gCAAc,SAAS,OAAO;AAAA,gBAChC;AACA,oBAAI,OAAO,UAAU,QAAW;AAC9B,gCAAc,QAAQ,OAAO;AAAA,gBAC/B;AAEA,sBAAM,WAAW,MAAM,MAAM,GAAG,OAAO,WAAW;AAAA,kBAChD,QAAQ;AAAA,kBACR,SAAS,EAAE,gBAAgB,mBAAmB;AAAA,kBAC9C,MAAM,KAAK,UAAU,aAAa;AAAA,gBACpC,CAAC;AACD,oBAAI,CAAC,SAAS,IAAI;AAChB,wBAAM,IAAI,MAAM,4BAA4B,SAAS,MAAM,IAAI,SAAS,UAAU,EAAE;AAAA,gBACtF;AACA,wBAAQ,IAAI,sCAAsC;AAAA,kBAChD,OAAO;AAAA,kBACP,UAAU;AAAA,kBACV,SAAS,OAAO,KAAK,aAAa;AAAA,gBACpC,CAAC;AAAA,cACH,SAAS,OAAY;AACnB,wBAAQ,KAAK,+CAA+C;AAAA,kBAC1D,OAAO;AAAA,kBACP,UAAU;AAAA,kBACV,OAAO,OAAO,WAAW,OAAO,KAAK;AAAA,gBACvC,CAAC;AAAA,cACH;AAAA,YACF;AAAA,YACA,KAAK,YAAY;AACf,kBAAI;AAEF,sBAAM,WAAW,MAAM,MAAM,GAAG,OAAO,IAAI,EAAE,IAAI,UAAU,IAAI;AAAA,kBAC7D,QAAQ;AAAA,kBACR,SAAS,EAAE,gBAAgB,mBAAmB;AAAA,gBAChD,CAAC;AAED,oBAAI,CAAC,SAAS,IAAI;AAChB,sBAAI,SAAS,WAAW,KAAK;AAC3B,2BAAO;AAAA,kBACT;AACA,wBAAM,IAAI,MAAM,yBAAyB,SAAS,MAAM,IAAI,SAAS,UAAU,EAAE;AAAA,gBACnF;AAEA,uBAAO,MAAM,SAAS,KAAK;AAAA,cAC7B,SAAS,OAAY;AACnB,wBAAQ,KAAK,4CAA4C;AAAA,kBACvD,OAAO;AAAA,kBACP,UAAU;AAAA,kBACV,OAAO,OAAO,WAAW,OAAO,KAAK;AAAA,gBACvC,CAAC;AACD,uBAAO;AAAA,cACT;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAEA,cAAM,WAAW,oBAAoB,gBAAgB,WAAW;AAGhE,YAAI,gBAAgB,QAAQ;AAC1B,cAAI;AACF,kBAAM,eAAe,OAAO,YAAY;AAAA,cACtC,OAAO;AAAA,cACP,UAAU;AAAA,cACV,QAAQ;AAAA,cACR,OAAO;AAAA,cACP,UAAU,QAAQ,YAAY,CAAC;AAAA,YACjC,CAAC;AAAA,UACH,SAAS,OAAY;AACnB,oBAAQ,KAAK,iDAAiD;AAAA,cAC5D,OAAO;AAAA,cACP,UAAU;AAAA,cACV,OAAO,OAAO,WAAW,OAAO,KAAK;AAAA,YACvC,CAAC;AAAA,UAEH;AAAA,QACF;AAGA,cAAM,iBAAiB;AAAA,UACrB,OAAO;AAAA,UACP,UAAU;AAAA,UACV,GAAI,WAAW,EAAE,SAAS,IAAI,CAAC;AAAA,QACjC;AAEA,YAAI;AAEF,cAAI,UAAU;AACZ,kBAAM,SAAS,OAAO,EAAE,QAAQ,UAAU,CAAC;AAAA,UAC7C;AAEA,gBAAM,SAAS,MAAM,cAAc,SAAS,aAAa,cAAc;AAGvE,cAAI,UAAU;AACZ,kBAAM,SAAS,OAAO,EAAE,QAAQ,aAAa,OAAO,CAAC;AAAA,UACvD;AAGA,cAAI,QAAQ,YAAY;AACtB,gBAAI;AACF,oBAAM,MAAM,QAAQ,YAAY;AAAA,gBAC9B,QAAQ;AAAA,gBACR,SAAS,EAAE,gBAAgB,mBAAmB;AAAA,gBAC9C,MAAM,KAAK,UAAU;AAAA,kBACnB,OAAO;AAAA,kBACP,UAAU;AAAA,kBACV,QAAQ;AAAA,kBACR;AAAA,kBACA,UAAU,QAAQ;AAAA,gBACpB,CAAC;AAAA,cACH,CAAC;AAAA,YACH,SAAS,OAAO;AACd,sBAAQ,KAAK,uCAAuC,KAAK;AAAA,YAC3D;AAAA,UACF;AAEA,iBAAO;AAAA,YACL,WAAW,SAAS,KAAK,IAAI,CAAC;AAAA,YAC9B,QAAQ;AAAA,YACR,OAAO;AAAA,UACT;AAAA,QACF,SAAS,OAAY;AAEnB,cAAI,UAAU;AACZ,kBAAM,SAAS,OAAO;AAAA,cACpB,QAAQ;AAAA,cACR,OAAO;AAAA,gBACL,SAAS,MAAM,WAAW;AAAA,gBAC1B,OAAO,MAAM;AAAA,gBACb,MAAM,MAAM,QAAQ;AAAA,cACtB;AAAA,YACF,CAAC;AAAA,UACH;AAGA,cAAI,QAAQ,YAAY;AACtB,gBAAI;AACF,oBAAM,MAAM,QAAQ,YAAY;AAAA,gBAC9B,QAAQ;AAAA,gBACR,SAAS,EAAE,gBAAgB,mBAAmB;AAAA,gBAC9C,MAAM,KAAK,UAAU;AAAA,kBACnB,OAAO;AAAA,kBACP,UAAU;AAAA,kBACV,QAAQ;AAAA,kBACR,OAAO;AAAA,oBACL,SAAS,MAAM,WAAW;AAAA,oBAC1B,OAAO,MAAM;AAAA,oBACb,MAAM,MAAM,QAAQ;AAAA,kBACtB;AAAA,kBACA,UAAU,QAAQ;AAAA,gBACpB,CAAC;AAAA,cACH,CAAC;AAAA,YACH,SAAS,cAAc;AACrB,sBAAQ,KAAK,6CAA6C,YAAY;AAAA,YACxE;AAAA,UACF;AACA,gBAAM;AAAA,QACR;AAAA,MACF;AAGA,aAAO,SAAS,IAAI,OAAO,aAAa,OAAO;AAAA,IACjD;AAAA,EACF;AAEA,SAAO;AACT;AASO,SAAS,uBACd,OACA;AACA,SAAO,oBAAoB,MAAM,SAAS,MAAM,YAAY;AAC9D;","names":["explicitPath"]}
package/package.json ADDED
@@ -0,0 +1,73 @@
1
+ {
2
+ "name": "@microfox/ai-worker",
3
+ "version": "1.0.1",
4
+ "description": "Background worker runtime for ai-router - SQS-based async agent execution",
5
+ "main": "./dist/index.js",
6
+ "module": "./dist/index.mjs",
7
+ "types": "./dist/index.d.ts",
8
+ "files": [
9
+ "dist/**/*",
10
+ "CHANGELOG.md"
11
+ ],
12
+ "scripts": {
13
+ "build": "tsup",
14
+ "build:watch": "tsup --watch",
15
+ "clean": "rm -rf dist",
16
+ "lint": "eslint . --ext .ts",
17
+ "prettier-check": "prettier --check \"./**/*.ts*\"",
18
+ "test": "vitest run",
19
+ "test:watch": "vitest"
20
+ },
21
+ "exports": {
22
+ "./package.json": "./package.json",
23
+ ".": {
24
+ "types": "./dist/index.d.ts",
25
+ "import": "./dist/index.mjs",
26
+ "require": "./dist/index.js"
27
+ },
28
+ "./client": {
29
+ "types": "./dist/client.d.ts",
30
+ "import": "./dist/client.mjs",
31
+ "require": "./dist/client.js"
32
+ },
33
+ "./handler": {
34
+ "types": "./dist/handler.d.ts",
35
+ "import": "./dist/handler.mjs",
36
+ "require": "./dist/handler.js"
37
+ },
38
+ "./config": {
39
+ "types": "./dist/config.d.ts",
40
+ "import": "./dist/config.mjs",
41
+ "require": "./dist/config.js"
42
+ }
43
+ },
44
+ "dependencies": {
45
+ "mongodb": "^6.12.0",
46
+ "zod": "^4.1.5"
47
+ },
48
+ "peerDependencies": {
49
+ "@microfox/ai-router": ">=2.1.3"
50
+ },
51
+ "devDependencies": {
52
+ "@types/aws-lambda": "^8.10.145",
53
+ "@types/node": "^20.14.2",
54
+ "@typescript-eslint/eslint-plugin": "^6.0.0",
55
+ "@typescript-eslint/parser": "^6.0.0",
56
+ "eslint": "^8.57.0",
57
+ "prettier": "^3.0.0",
58
+ "tsup": "^8",
59
+ "typescript": "^5.6.3",
60
+ "vitest": "^1.0.0"
61
+ },
62
+ "engines": {
63
+ "node": ">=18.0.0"
64
+ },
65
+ "keywords": [
66
+ "@microfox/ai-worker",
67
+ "ai-router",
68
+ "background-workers",
69
+ "sqs",
70
+ "lambda",
71
+ "typescript"
72
+ ]
73
+ }