@microfox/ai-worker 1.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +16 -0
- package/README.md +185 -0
- package/dist/chunk-BJTO5JO5.mjs +11 -0
- package/dist/chunk-BJTO5JO5.mjs.map +1 -0
- package/dist/chunk-FQCZSXDI.mjs +83 -0
- package/dist/chunk-FQCZSXDI.mjs.map +1 -0
- package/dist/chunk-WVR4JVWK.mjs +285 -0
- package/dist/chunk-WVR4JVWK.mjs.map +1 -0
- package/dist/chunk-ZYYWZ3PR.mjs +50 -0
- package/dist/chunk-ZYYWZ3PR.mjs.map +1 -0
- package/dist/client.d.mts +64 -0
- package/dist/client.d.ts +64 -0
- package/dist/client.js +108 -0
- package/dist/client.js.map +1 -0
- package/dist/client.mjs +10 -0
- package/dist/client.mjs.map +1 -0
- package/dist/config.d.mts +38 -0
- package/dist/config.d.ts +38 -0
- package/dist/config.js +76 -0
- package/dist/config.js.map +1 -0
- package/dist/config.mjs +12 -0
- package/dist/config.mjs.map +1 -0
- package/dist/handler.d.mts +96 -0
- package/dist/handler.d.ts +96 -0
- package/dist/handler.js +311 -0
- package/dist/handler.js.map +1 -0
- package/dist/handler.mjs +8 -0
- package/dist/handler.mjs.map +1 -0
- package/dist/index.d.mts +236 -0
- package/dist/index.d.ts +236 -0
- package/dist/index.js +734 -0
- package/dist/index.js.map +1 -0
- package/dist/index.mjs +313 -0
- package/dist/index.mjs.map +1 -0
- package/package.json +73 -0
package/CHANGELOG.md
ADDED
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
# @microfox/ai-worker
|
|
2
|
+
|
|
3
|
+
## 1.0.1
|
|
4
|
+
|
|
5
|
+
### Patch Changes
|
|
6
|
+
|
|
7
|
+
- d108f28: Triggered by issue #41: release @microfox/ai-worker patch
|
|
8
|
+
- 8447252: Triggered by issue #47: release @microfox/ai-worker patch
|
|
9
|
+
- Updated dependencies [4d3a677]
|
|
10
|
+
- @microfox/ai-router@2.1.3
|
|
11
|
+
|
|
12
|
+
## 0.1.1
|
|
13
|
+
|
|
14
|
+
### Patch Changes
|
|
15
|
+
|
|
16
|
+
- 973aac4: Changes from PR #38: ai-worker-and-cli
|
package/README.md
ADDED
|
@@ -0,0 +1,185 @@
|
|
|
1
|
+
# @microfox/ai-worker
|
|
2
|
+
|
|
3
|
+
Background worker runtime for `ai-router` - SQS-based async agent execution.
|
|
4
|
+
|
|
5
|
+
## Overview
|
|
6
|
+
|
|
7
|
+
`@microfox/ai-worker` enables you to run long-running AI agents asynchronously on AWS Lambda, triggered via SQS queues. This allows you to bypass Vercel's timeout limits while maintaining a unified developer experience.
|
|
8
|
+
|
|
9
|
+
## Features
|
|
10
|
+
|
|
11
|
+
- **Unified DX**: Define agent logic in one place (`app/ai/agents/...`), deploy automatically to Lambda
|
|
12
|
+
- **SQS-based**: Reliable message queuing with automatic retries
|
|
13
|
+
- **Webhook callbacks**: Receive completion notifications back to your Next.js app
|
|
14
|
+
- **Local development**: Run handlers immediately in development mode
|
|
15
|
+
- **Type-safe**: Full TypeScript support with Zod schema validation
|
|
16
|
+
|
|
17
|
+
## Installation
|
|
18
|
+
|
|
19
|
+
```bash
|
|
20
|
+
npm install @microfox/ai-worker
|
|
21
|
+
```
|
|
22
|
+
|
|
23
|
+
## Quick Start
|
|
24
|
+
|
|
25
|
+
### 1. Create a Background Worker
|
|
26
|
+
|
|
27
|
+
```typescript
|
|
28
|
+
// app/ai/agents/video-processing.worker.ts
|
|
29
|
+
import { createWorker, type WorkerConfig } from '@microfox/ai-worker';
|
|
30
|
+
import { z } from 'zod';
|
|
31
|
+
|
|
32
|
+
// Export workerConfig separately (best practice - CLI extracts this automatically)
|
|
33
|
+
export const workerConfig: WorkerConfig = {
|
|
34
|
+
timeout: 900, // 15 minutes
|
|
35
|
+
memorySize: 2048, // 2GB
|
|
36
|
+
// Optional: Lambda layers
|
|
37
|
+
// layers: ['arn:aws:lambda:${aws:region}:${aws:accountId}:layer:ffmpeg:1'],
|
|
38
|
+
};
|
|
39
|
+
|
|
40
|
+
export const videoProcessingAgent = createWorker({
|
|
41
|
+
id: 'video-processing',
|
|
42
|
+
inputSchema: z.object({ url: z.string() }),
|
|
43
|
+
outputSchema: z.object({ processedUrl: z.string() }),
|
|
44
|
+
|
|
45
|
+
handler: async ({ input, ctx }) => {
|
|
46
|
+
// This runs on AWS Lambda
|
|
47
|
+
const result = await heavyVideoProcessing(input.url);
|
|
48
|
+
return { processedUrl: result };
|
|
49
|
+
},
|
|
50
|
+
});
|
|
51
|
+
```
|
|
52
|
+
|
|
53
|
+
### 2. Dispatch from an Orchestrator
|
|
54
|
+
|
|
55
|
+
```typescript
|
|
56
|
+
// app/ai/orchestrator.ts
|
|
57
|
+
import { videoProcessingAgent } from './agents/video-processing.worker';
|
|
58
|
+
|
|
59
|
+
// Dispatch to background worker
|
|
60
|
+
const result = await videoProcessingAgent.dispatch(
|
|
61
|
+
{ url: 'https://example.com/video.mp4' },
|
|
62
|
+
{
|
|
63
|
+
webhookUrl: 'https://myapp.com/api/ai/callback', // optional
|
|
64
|
+
mode: 'remote', // optional: "auto" | "local" | "remote"
|
|
65
|
+
jobId: 'unique-job-id', // Optional
|
|
66
|
+
metadata: { userId: '123' }, // Optional
|
|
67
|
+
}
|
|
68
|
+
);
|
|
69
|
+
|
|
70
|
+
// Returns: { messageId: string, status: 'queued', jobId: string }
|
|
71
|
+
```
|
|
72
|
+
|
|
73
|
+
### 3. Handle Webhook Callbacks
|
|
74
|
+
|
|
75
|
+
```typescript
|
|
76
|
+
// app/api/ai/callback/route.ts
|
|
77
|
+
import { NextRequest, NextResponse } from 'next/server';
|
|
78
|
+
|
|
79
|
+
export async function POST(request: NextRequest) {
|
|
80
|
+
const { jobId, workerId, status, output, error } = await request.json();
|
|
81
|
+
|
|
82
|
+
if (status === 'success') {
|
|
83
|
+
// Update your database, trigger follow-up agents, etc.
|
|
84
|
+
await updateJobStatus(jobId, 'completed', output);
|
|
85
|
+
} else {
|
|
86
|
+
// Handle error
|
|
87
|
+
await updateJobStatus(jobId, 'failed', error);
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
return NextResponse.json({ success: true });
|
|
91
|
+
}
|
|
92
|
+
```
|
|
93
|
+
|
|
94
|
+
### 4. Deploy Workers
|
|
95
|
+
|
|
96
|
+
```bash
|
|
97
|
+
# Scan app/ai/**/*.worker.ts and deploy to AWS
|
|
98
|
+
npx @microfox/ai-worker-cli@latest push
|
|
99
|
+
```
|
|
100
|
+
|
|
101
|
+
## Configuration
|
|
102
|
+
|
|
103
|
+
### Environment Variables
|
|
104
|
+
|
|
105
|
+
**Required for Next.js:**
|
|
106
|
+
- `WORKER_BASE_URL` - Base URL of your workers service (server-side). We append `/workers/trigger` and `/workers/config` internally when needed (e.g. `https://.../prod`).
|
|
107
|
+
- `NEXT_PUBLIC_WORKER_BASE_URL` - Same as `WORKER_BASE_URL`, but exposed to the browser (use this if you call `dispatch()` from client-side code).
|
|
108
|
+
- `WORKERS_TRIGGER_API_KEY` - Optional API key for trigger authentication (sent as `x-workers-trigger-key`)
|
|
109
|
+
|
|
110
|
+
**Required for Lambda (set via deploy script):**
|
|
111
|
+
- `AWS_REGION` - AWS region for SQS/Lambda
|
|
112
|
+
- `STAGE` - Deployment stage (dev/stage/prod)
|
|
113
|
+
- Any secrets your workers need (OPENAI_KEY, DATABASE_URL, etc.)
|
|
114
|
+
|
|
115
|
+
### Worker Configuration
|
|
116
|
+
|
|
117
|
+
**Best Practice**: Export `workerConfig` as a separate const from your worker file:
|
|
118
|
+
|
|
119
|
+
```typescript
|
|
120
|
+
import { type WorkerConfig } from '@microfox/ai-worker';
|
|
121
|
+
|
|
122
|
+
export const workerConfig: WorkerConfig = {
|
|
123
|
+
timeout: 300, // Lambda timeout in seconds (max 900)
|
|
124
|
+
memorySize: 512, // Lambda memory in MB (128-10240)
|
|
125
|
+
layers: ['arn:aws:lambda:${aws:region}:${aws:accountId}:layer:ffmpeg:1'], // Optional Lambda layers
|
|
126
|
+
};
|
|
127
|
+
```
|
|
128
|
+
|
|
129
|
+
The CLI will automatically extract this configuration when generating `serverless.yml`. You do not need to pass it to `createWorker()`.
|
|
130
|
+
|
|
131
|
+
## Architecture
|
|
132
|
+
|
|
133
|
+
```
|
|
134
|
+
┌─────────────┐
|
|
135
|
+
│ Next.js │
|
|
136
|
+
│ Orchestrator│
|
|
137
|
+
└──────┬──────┘
|
|
138
|
+
│ dispatch()
|
|
139
|
+
▼
|
|
140
|
+
┌─────────────┐
|
|
141
|
+
│ AWS SQS │
|
|
142
|
+
│ Queue │
|
|
143
|
+
└──────┬──────┘
|
|
144
|
+
│ trigger
|
|
145
|
+
▼
|
|
146
|
+
┌─────────────┐
|
|
147
|
+
│AWS Lambda │
|
|
148
|
+
│ Worker │
|
|
149
|
+
└──────┬──────┘
|
|
150
|
+
│ POST
|
|
151
|
+
▼
|
|
152
|
+
┌─────────────┐
|
|
153
|
+
│ Webhook │
|
|
154
|
+
│ Callback │
|
|
155
|
+
└─────────────┘
|
|
156
|
+
```
|
|
157
|
+
|
|
158
|
+
## API Reference
|
|
159
|
+
|
|
160
|
+
### `createWorker<INPUT, OUTPUT>(config)`
|
|
161
|
+
|
|
162
|
+
Creates a background agent with the specified configuration.
|
|
163
|
+
|
|
164
|
+
**Parameters:**
|
|
165
|
+
- `id: string` - Unique worker ID
|
|
166
|
+
- `inputSchema: ZodType<INPUT>` - Input validation schema
|
|
167
|
+
- `outputSchema: ZodType<OUTPUT>` - Output validation schema
|
|
168
|
+
- `handler: WorkerHandler<INPUT, OUTPUT>` - Handler function
|
|
169
|
+
- `workerConfig?: WorkerConfig` - **Deprecated**: Prefer exporting `workerConfig` as a separate const
|
|
170
|
+
|
|
171
|
+
**Returns:** `BackgroundAgent<INPUT, OUTPUT>` with a `dispatch()` method
|
|
172
|
+
|
|
173
|
+
### `dispatch(input, options)`
|
|
174
|
+
|
|
175
|
+
Dispatches a job to the background worker.
|
|
176
|
+
|
|
177
|
+
**Parameters:**
|
|
178
|
+
- `input: INPUT` - Input data (validated against `inputSchema`)
|
|
179
|
+
- `options: { webhookUrl?: string, jobId?: string, metadata?: Record<string, any> }`
|
|
180
|
+
|
|
181
|
+
**Returns:** `Promise<{ messageId: string, status: 'queued', jobId: string }>`
|
|
182
|
+
|
|
183
|
+
## License
|
|
184
|
+
|
|
185
|
+
MIT
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
var __require = /* @__PURE__ */ ((x) => typeof require !== "undefined" ? require : typeof Proxy !== "undefined" ? new Proxy(x, {
|
|
2
|
+
get: (a, b) => (typeof require !== "undefined" ? require : a)[b]
|
|
3
|
+
}) : x)(function(x) {
|
|
4
|
+
if (typeof require !== "undefined") return require.apply(this, arguments);
|
|
5
|
+
throw Error('Dynamic require of "' + x + '" is not supported');
|
|
6
|
+
});
|
|
7
|
+
|
|
8
|
+
export {
|
|
9
|
+
__require
|
|
10
|
+
};
|
|
11
|
+
//# sourceMappingURL=chunk-BJTO5JO5.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":[],"sourcesContent":[],"mappings":"","names":[]}
|
|
@@ -0,0 +1,83 @@
|
|
|
1
|
+
// src/client.ts
|
|
2
|
+
function getWorkersTriggerUrl() {
|
|
3
|
+
const raw = process.env.WORKER_BASE_URL || process.env.NEXT_PUBLIC_WORKER_BASE_URL || process.env.WORKERS_TRIGGER_API_URL || process.env.NEXT_PUBLIC_WORKERS_TRIGGER_API_URL || process.env.WORKERS_CONFIG_API_URL || process.env.NEXT_PUBLIC_WORKERS_CONFIG_API_URL;
|
|
4
|
+
if (!raw) {
|
|
5
|
+
throw new Error(
|
|
6
|
+
"WORKER_BASE_URL (preferred) or NEXT_PUBLIC_WORKER_BASE_URL is required for background workers"
|
|
7
|
+
);
|
|
8
|
+
}
|
|
9
|
+
const url = new URL(raw);
|
|
10
|
+
url.search = "";
|
|
11
|
+
url.hash = "";
|
|
12
|
+
const path = url.pathname || "";
|
|
13
|
+
url.pathname = path.replace(/\/?workers\/(trigger|config)\/?$/, "");
|
|
14
|
+
const basePath = url.pathname.replace(/\/+$/, "");
|
|
15
|
+
url.pathname = `${basePath}/workers/trigger`.replace(/\/+$/, "");
|
|
16
|
+
return url.toString();
|
|
17
|
+
}
|
|
18
|
+
function serializeContext(ctx) {
|
|
19
|
+
const serialized = {};
|
|
20
|
+
if (ctx.requestId) {
|
|
21
|
+
serialized.requestId = ctx.requestId;
|
|
22
|
+
}
|
|
23
|
+
if (ctx.metadata && typeof ctx.metadata === "object") {
|
|
24
|
+
Object.assign(serialized, ctx.metadata);
|
|
25
|
+
}
|
|
26
|
+
if (ctx._serializeContext && typeof ctx._serializeContext === "function") {
|
|
27
|
+
const custom = ctx._serializeContext();
|
|
28
|
+
Object.assign(serialized, custom);
|
|
29
|
+
}
|
|
30
|
+
return serialized;
|
|
31
|
+
}
|
|
32
|
+
async function dispatch(workerId, input, inputSchema, options, ctx) {
|
|
33
|
+
const validatedInput = inputSchema.parse(input);
|
|
34
|
+
const jobId = options.jobId || `job-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
|
|
35
|
+
const triggerUrl = getWorkersTriggerUrl();
|
|
36
|
+
const serializedContext = ctx ? serializeContext(ctx) : {};
|
|
37
|
+
const messageBody = {
|
|
38
|
+
workerId,
|
|
39
|
+
jobId,
|
|
40
|
+
input: validatedInput,
|
|
41
|
+
context: serializedContext,
|
|
42
|
+
webhookUrl: options.webhookUrl,
|
|
43
|
+
metadata: options.metadata || {},
|
|
44
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString()
|
|
45
|
+
};
|
|
46
|
+
const headers = {
|
|
47
|
+
"Content-Type": "application/json"
|
|
48
|
+
};
|
|
49
|
+
const triggerKey = process.env.WORKERS_TRIGGER_API_KEY;
|
|
50
|
+
if (triggerKey) {
|
|
51
|
+
headers["x-workers-trigger-key"] = triggerKey;
|
|
52
|
+
}
|
|
53
|
+
const response = await fetch(triggerUrl, {
|
|
54
|
+
method: "POST",
|
|
55
|
+
headers,
|
|
56
|
+
body: JSON.stringify({
|
|
57
|
+
workerId,
|
|
58
|
+
body: messageBody
|
|
59
|
+
})
|
|
60
|
+
});
|
|
61
|
+
if (!response.ok) {
|
|
62
|
+
const text = await response.text().catch(() => "");
|
|
63
|
+
throw new Error(
|
|
64
|
+
`Failed to trigger worker "${workerId}": ${response.status} ${response.statusText}${text ? ` - ${text}` : ""}`
|
|
65
|
+
);
|
|
66
|
+
}
|
|
67
|
+
const data = await response.json().catch(() => ({}));
|
|
68
|
+
const messageId = data?.messageId ? String(data.messageId) : `trigger-${jobId}`;
|
|
69
|
+
return {
|
|
70
|
+
messageId,
|
|
71
|
+
status: "queued",
|
|
72
|
+
jobId
|
|
73
|
+
};
|
|
74
|
+
}
|
|
75
|
+
async function dispatchLocal(handler, input, ctx) {
|
|
76
|
+
return handler({ input, ctx: ctx || {} });
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
export {
|
|
80
|
+
dispatch,
|
|
81
|
+
dispatchLocal
|
|
82
|
+
};
|
|
83
|
+
//# sourceMappingURL=chunk-FQCZSXDI.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/client.ts"],"sourcesContent":["/**\n * Client for dispatching background worker jobs.\n *\n * In production, dispatching happens via the workers HTTP API:\n * POST /workers/trigger -> enqueues message to SQS on the workers service side\n *\n * This avoids requiring AWS credentials in your Next.js app.\n */\n\nimport type { ZodType, z } from 'zod';\n\nexport interface DispatchOptions {\n /**\n * Optional webhook callback URL to notify when the job finishes.\n * Only called when provided. Default: no webhook (use job store / MongoDB only).\n */\n webhookUrl?: string;\n /**\n * Controls how dispatch executes.\n * - \"auto\" (default): local inline execution in development unless WORKERS_LOCAL_MODE=false.\n * - \"local\": force inline execution (no SQS).\n * - \"remote\": force SQS/Lambda dispatch even in development.\n */\n mode?: 'auto' | 'local' | 'remote';\n jobId?: string;\n metadata?: Record<string, any>;\n}\n\nexport interface DispatchResult {\n messageId: string;\n status: 'queued';\n jobId: string;\n}\n\nexport interface SerializedContext {\n requestId?: string;\n userId?: string;\n traceId?: string;\n [key: string]: any;\n}\n\n/**\n * Derives the full /workers/trigger URL from env.\n *\n * Preferred env vars:\n * - WORKER_BASE_URL: base URL of the workers service (e.g. https://.../prod)\n * - NEXT_PUBLIC_WORKER_BASE_URL: same, but exposed to the browser\n *\n * Legacy env vars (still supported for backwards compatibility):\n * - WORKERS_TRIGGER_API_URL / NEXT_PUBLIC_WORKERS_TRIGGER_API_URL\n * - WORKERS_CONFIG_API_URL / NEXT_PUBLIC_WORKERS_CONFIG_API_URL\n */\nfunction getWorkersTriggerUrl(): string {\n const raw =\n process.env.WORKER_BASE_URL ||\n process.env.NEXT_PUBLIC_WORKER_BASE_URL ||\n process.env.WORKERS_TRIGGER_API_URL ||\n process.env.NEXT_PUBLIC_WORKERS_TRIGGER_API_URL ||\n process.env.WORKERS_CONFIG_API_URL ||\n process.env.NEXT_PUBLIC_WORKERS_CONFIG_API_URL;\n\n if (!raw) {\n throw new Error(\n 'WORKER_BASE_URL (preferred) or NEXT_PUBLIC_WORKER_BASE_URL is required for background workers'\n );\n }\n\n const url = new URL(raw);\n url.search = '';\n url.hash = '';\n\n const path = url.pathname || '';\n\n // If the user pointed at a specific endpoint, normalize back to the service root.\n url.pathname = path.replace(/\\/?workers\\/(trigger|config)\\/?$/, '');\n\n const basePath = url.pathname.replace(/\\/+$/, '');\n url.pathname = `${basePath}/workers/trigger`.replace(/\\/+$/, '');\n\n return url.toString();\n}\n\n/**\n * Serializes context data for transmission to Lambda.\n * Only serializes safe, JSON-compatible properties.\n */\nfunction serializeContext(ctx: any): SerializedContext {\n const serialized: SerializedContext = {};\n\n if (ctx.requestId) {\n serialized.requestId = ctx.requestId;\n }\n\n // Extract any additional serializable metadata\n if (ctx.metadata && typeof ctx.metadata === 'object') {\n Object.assign(serialized, ctx.metadata);\n }\n\n // Allow custom context serialization via a helper property\n if (ctx._serializeContext && typeof ctx._serializeContext === 'function') {\n const custom = ctx._serializeContext();\n Object.assign(serialized, custom);\n }\n\n return serialized;\n}\n\n/**\n * Dispatches a background worker job to SQS.\n *\n * @param workerId - The ID of the worker to dispatch\n * @param input - The input data for the worker (will be validated against inputSchema)\n * @param inputSchema - Zod schema for input validation\n * @param options - Dispatch options including webhook URL\n * @param ctx - Optional context object (only serializable parts will be sent)\n * @returns Promise resolving to dispatch result with messageId and jobId\n */\nexport async function dispatch<INPUT_SCHEMA extends ZodType<any>>(\n workerId: string,\n input: z.input<INPUT_SCHEMA>,\n inputSchema: INPUT_SCHEMA,\n options: DispatchOptions,\n ctx?: any\n): Promise<DispatchResult> {\n // Validate input against schema\n const validatedInput = inputSchema.parse(input);\n\n // Generate job ID if not provided\n const jobId =\n options.jobId || `job-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;\n\n // Resolve /workers/trigger endpoint URL\n const triggerUrl = getWorkersTriggerUrl();\n\n // Serialize context (only safe, JSON-compatible parts)\n const serializedContext = ctx ? serializeContext(ctx) : {};\n\n // Job updates use MongoDB only; never pass jobStoreUrl/origin URL.\n const messageBody = {\n workerId,\n jobId,\n input: validatedInput,\n context: serializedContext,\n webhookUrl: options.webhookUrl,\n metadata: options.metadata || {},\n timestamp: new Date().toISOString(),\n };\n\n const headers: Record<string, string> = {\n 'Content-Type': 'application/json',\n };\n const triggerKey = process.env.WORKERS_TRIGGER_API_KEY;\n if (triggerKey) {\n headers['x-workers-trigger-key'] = triggerKey;\n }\n\n const response = await fetch(triggerUrl, {\n method: 'POST',\n headers,\n body: JSON.stringify({\n workerId,\n body: messageBody,\n }),\n });\n\n if (!response.ok) {\n const text = await response.text().catch(() => '');\n throw new Error(\n `Failed to trigger worker \"${workerId}\": ${response.status} ${response.statusText}${text ? ` - ${text}` : ''}`\n );\n }\n\n const data = (await response.json().catch(() => ({}))) as any;\n const messageId = data?.messageId ? String(data.messageId) : `trigger-${jobId}`;\n\n return {\n messageId,\n status: 'queued',\n jobId,\n };\n}\n\n/**\n * Local development mode: runs the handler immediately in the same process.\n * This bypasses SQS and Lambda for faster iteration during development.\n *\n * @param handler - The worker handler function\n * @param input - The input data\n * @param ctx - The context object\n * @returns The handler result\n */\nexport async function dispatchLocal<INPUT, OUTPUT>(\n handler: (params: { input: INPUT; ctx: any }) => Promise<OUTPUT>,\n input: INPUT,\n ctx?: any\n): Promise<OUTPUT> {\n return handler({ input, ctx: ctx || {} });\n}\n"],"mappings":";AAoDA,SAAS,uBAA+B;AACtC,QAAM,MACJ,QAAQ,IAAI,mBACZ,QAAQ,IAAI,+BACZ,QAAQ,IAAI,2BACZ,QAAQ,IAAI,uCACZ,QAAQ,IAAI,0BACZ,QAAQ,IAAI;AAEd,MAAI,CAAC,KAAK;AACR,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAEA,QAAM,MAAM,IAAI,IAAI,GAAG;AACvB,MAAI,SAAS;AACb,MAAI,OAAO;AAEX,QAAM,OAAO,IAAI,YAAY;AAG7B,MAAI,WAAW,KAAK,QAAQ,oCAAoC,EAAE;AAElE,QAAM,WAAW,IAAI,SAAS,QAAQ,QAAQ,EAAE;AAChD,MAAI,WAAW,GAAG,QAAQ,mBAAmB,QAAQ,QAAQ,EAAE;AAE/D,SAAO,IAAI,SAAS;AACtB;AAMA,SAAS,iBAAiB,KAA6B;AACrD,QAAM,aAAgC,CAAC;AAEvC,MAAI,IAAI,WAAW;AACjB,eAAW,YAAY,IAAI;AAAA,EAC7B;AAGA,MAAI,IAAI,YAAY,OAAO,IAAI,aAAa,UAAU;AACpD,WAAO,OAAO,YAAY,IAAI,QAAQ;AAAA,EACxC;AAGA,MAAI,IAAI,qBAAqB,OAAO,IAAI,sBAAsB,YAAY;AACxE,UAAM,SAAS,IAAI,kBAAkB;AACrC,WAAO,OAAO,YAAY,MAAM;AAAA,EAClC;AAEA,SAAO;AACT;AAYA,eAAsB,SACpB,UACA,OACA,aACA,SACA,KACyB;AAEzB,QAAM,iBAAiB,YAAY,MAAM,KAAK;AAG9C,QAAM,QACJ,QAAQ,SAAS,OAAO,KAAK,IAAI,CAAC,IAAI,KAAK,OAAO,EAAE,SAAS,EAAE,EAAE,OAAO,GAAG,CAAC,CAAC;AAG/E,QAAM,aAAa,qBAAqB;AAGxC,QAAM,oBAAoB,MAAM,iBAAiB,GAAG,IAAI,CAAC;AAGzD,QAAM,cAAc;AAAA,IAClB;AAAA,IACA;AAAA,IACA,OAAO;AAAA,IACP,SAAS;AAAA,IACT,YAAY,QAAQ;AAAA,IACpB,UAAU,QAAQ,YAAY,CAAC;AAAA,IAC/B,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,EACpC;AAEA,QAAM,UAAkC;AAAA,IACtC,gBAAgB;AAAA,EAClB;AACA,QAAM,aAAa,QAAQ,IAAI;AAC/B,MAAI,YAAY;AACd,YAAQ,uBAAuB,IAAI;AAAA,EACrC;AAEA,QAAM,WAAW,MAAM,MAAM,YAAY;AAAA,IACvC,QAAQ;AAAA,IACR;AAAA,IACA,MAAM,KAAK,UAAU;AAAA,MACnB;AAAA,MACA,MAAM;AAAA,IACR,CAAC;AAAA,EACH,CAAC;AAED,MAAI,CAAC,SAAS,IAAI;AAChB,UAAM,OAAO,MAAM,SAAS,KAAK,EAAE,MAAM,MAAM,EAAE;AACjD,UAAM,IAAI;AAAA,MACR,6BAA6B,QAAQ,MAAM,SAAS,MAAM,IAAI,SAAS,UAAU,GAAG,OAAO,MAAM,IAAI,KAAK,EAAE;AAAA,IAC9G;AAAA,EACF;AAEA,QAAM,OAAQ,MAAM,SAAS,KAAK,EAAE,MAAM,OAAO,CAAC,EAAE;AACpD,QAAM,YAAY,MAAM,YAAY,OAAO,KAAK,SAAS,IAAI,WAAW,KAAK;AAE7E,SAAO;AAAA,IACL;AAAA,IACA,QAAQ;AAAA,IACR;AAAA,EACF;AACF;AAWA,eAAsB,cACpB,SACA,OACA,KACiB;AACjB,SAAO,QAAQ,EAAE,OAAO,KAAK,OAAO,CAAC,EAAE,CAAC;AAC1C;","names":[]}
|
|
@@ -0,0 +1,285 @@
|
|
|
1
|
+
// src/mongoJobStore.ts
|
|
2
|
+
import { MongoClient } from "mongodb";
|
|
3
|
+
var uri = process.env.MONGODB_WORKER_URI || process.env.DATABASE_MONGODB_URI || process.env.MONGODB_URI;
|
|
4
|
+
var dbName = process.env.MONGODB_WORKER_DB || process.env.MONGODB_DB || "worker";
|
|
5
|
+
var collectionName = process.env.MONGODB_WORKER_JOBS_COLLECTION || "worker_jobs";
|
|
6
|
+
var clientPromise = null;
|
|
7
|
+
function getClient() {
|
|
8
|
+
if (!uri) {
|
|
9
|
+
throw new Error(
|
|
10
|
+
"MongoDB URI required for job store. Set DATABASE_MONGODB_URI or MONGODB_URI."
|
|
11
|
+
);
|
|
12
|
+
}
|
|
13
|
+
if (!clientPromise) {
|
|
14
|
+
clientPromise = new MongoClient(uri, {
|
|
15
|
+
maxPoolSize: 10,
|
|
16
|
+
minPoolSize: 0,
|
|
17
|
+
serverSelectionTimeoutMS: 1e4
|
|
18
|
+
}).connect();
|
|
19
|
+
}
|
|
20
|
+
return clientPromise;
|
|
21
|
+
}
|
|
22
|
+
async function getCollection() {
|
|
23
|
+
const client = await getClient();
|
|
24
|
+
return client.db(dbName).collection(collectionName);
|
|
25
|
+
}
|
|
26
|
+
function createMongoJobStore(workerId, jobId, input, metadata) {
|
|
27
|
+
return {
|
|
28
|
+
update: async (update) => {
|
|
29
|
+
try {
|
|
30
|
+
const coll = await getCollection();
|
|
31
|
+
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
32
|
+
const existing = await coll.findOne({ _id: jobId });
|
|
33
|
+
let metadataUpdate = { ...existing?.metadata ?? {} };
|
|
34
|
+
if (update.metadata) {
|
|
35
|
+
Object.assign(metadataUpdate, update.metadata);
|
|
36
|
+
}
|
|
37
|
+
if (update.progress !== void 0 || update.progressMessage !== void 0) {
|
|
38
|
+
metadataUpdate.progress = update.progress;
|
|
39
|
+
metadataUpdate.progressMessage = update.progressMessage;
|
|
40
|
+
}
|
|
41
|
+
const set = {
|
|
42
|
+
updatedAt: now,
|
|
43
|
+
metadata: metadataUpdate
|
|
44
|
+
};
|
|
45
|
+
if (update.status !== void 0) {
|
|
46
|
+
set.status = update.status;
|
|
47
|
+
if (["completed", "failed"].includes(update.status) && !existing?.completedAt) {
|
|
48
|
+
set.completedAt = now;
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
if (update.output !== void 0) set.output = update.output;
|
|
52
|
+
if (update.error !== void 0) set.error = update.error;
|
|
53
|
+
if (existing) {
|
|
54
|
+
await coll.updateOne({ _id: jobId }, { $set: set });
|
|
55
|
+
} else {
|
|
56
|
+
const doc = {
|
|
57
|
+
_id: jobId,
|
|
58
|
+
jobId,
|
|
59
|
+
workerId,
|
|
60
|
+
status: update.status ?? "queued",
|
|
61
|
+
input: input ?? {},
|
|
62
|
+
output: update.output,
|
|
63
|
+
error: update.error,
|
|
64
|
+
metadata: metadataUpdate,
|
|
65
|
+
createdAt: now,
|
|
66
|
+
updatedAt: now,
|
|
67
|
+
completedAt: set.completedAt
|
|
68
|
+
};
|
|
69
|
+
if (doc.status === "completed" || doc.status === "failed") {
|
|
70
|
+
doc.completedAt = doc.completedAt ?? now;
|
|
71
|
+
}
|
|
72
|
+
await coll.updateOne({ _id: jobId }, { $set: doc }, { upsert: true });
|
|
73
|
+
}
|
|
74
|
+
} catch (e) {
|
|
75
|
+
console.error("[Worker] MongoDB job store update failed:", {
|
|
76
|
+
jobId,
|
|
77
|
+
workerId,
|
|
78
|
+
error: e?.message ?? String(e)
|
|
79
|
+
});
|
|
80
|
+
}
|
|
81
|
+
},
|
|
82
|
+
get: async () => {
|
|
83
|
+
try {
|
|
84
|
+
const coll = await getCollection();
|
|
85
|
+
const doc = await coll.findOne({ _id: jobId });
|
|
86
|
+
if (!doc) return null;
|
|
87
|
+
const { _id, ...r } = doc;
|
|
88
|
+
return r;
|
|
89
|
+
} catch (e) {
|
|
90
|
+
console.error("[Worker] MongoDB job store get failed:", {
|
|
91
|
+
jobId,
|
|
92
|
+
workerId,
|
|
93
|
+
error: e?.message ?? String(e)
|
|
94
|
+
});
|
|
95
|
+
return null;
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
};
|
|
99
|
+
}
|
|
100
|
+
async function upsertJob(jobId, workerId, input, metadata) {
|
|
101
|
+
const coll = await getCollection();
|
|
102
|
+
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
103
|
+
await coll.updateOne(
|
|
104
|
+
{ _id: jobId },
|
|
105
|
+
{
|
|
106
|
+
$set: {
|
|
107
|
+
_id: jobId,
|
|
108
|
+
jobId,
|
|
109
|
+
workerId,
|
|
110
|
+
status: "queued",
|
|
111
|
+
input: input ?? {},
|
|
112
|
+
metadata: metadata ?? {},
|
|
113
|
+
createdAt: now,
|
|
114
|
+
updatedAt: now
|
|
115
|
+
}
|
|
116
|
+
},
|
|
117
|
+
{ upsert: true }
|
|
118
|
+
);
|
|
119
|
+
}
|
|
120
|
+
function isMongoJobStoreConfigured() {
|
|
121
|
+
return Boolean(uri?.trim());
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
// src/handler.ts
|
|
125
|
+
async function sendWebhook(webhookUrl, payload) {
|
|
126
|
+
try {
|
|
127
|
+
const response = await fetch(webhookUrl, {
|
|
128
|
+
method: "POST",
|
|
129
|
+
headers: {
|
|
130
|
+
"Content-Type": "application/json",
|
|
131
|
+
"User-Agent": "ai-router-worker/1.0"
|
|
132
|
+
},
|
|
133
|
+
body: JSON.stringify(payload)
|
|
134
|
+
});
|
|
135
|
+
if (!response.ok) {
|
|
136
|
+
const errorText = await response.text().catch(() => "");
|
|
137
|
+
console.error("[Worker] Webhook callback failed:", {
|
|
138
|
+
url: webhookUrl,
|
|
139
|
+
status: response.status,
|
|
140
|
+
statusText: response.statusText,
|
|
141
|
+
errorText
|
|
142
|
+
});
|
|
143
|
+
} else {
|
|
144
|
+
console.log("[Worker] Webhook callback successful:", {
|
|
145
|
+
url: webhookUrl,
|
|
146
|
+
status: response.status
|
|
147
|
+
});
|
|
148
|
+
}
|
|
149
|
+
} catch (error) {
|
|
150
|
+
console.error("[Worker] Webhook callback error:", {
|
|
151
|
+
url: webhookUrl,
|
|
152
|
+
error: error?.message || String(error),
|
|
153
|
+
stack: error?.stack
|
|
154
|
+
});
|
|
155
|
+
}
|
|
156
|
+
}
|
|
157
|
+
function createLambdaHandler(handler, outputSchema) {
|
|
158
|
+
return async (event, lambdaContext) => {
|
|
159
|
+
const promises = event.Records.map(async (record) => {
|
|
160
|
+
let messageBody = null;
|
|
161
|
+
try {
|
|
162
|
+
messageBody = JSON.parse(record.body);
|
|
163
|
+
const { workerId, jobId, input, context, webhookUrl, metadata = {} } = messageBody;
|
|
164
|
+
let jobStore;
|
|
165
|
+
if (isMongoJobStoreConfigured()) {
|
|
166
|
+
await upsertJob(jobId, workerId, input, metadata);
|
|
167
|
+
jobStore = createMongoJobStore(workerId, jobId, input, metadata);
|
|
168
|
+
}
|
|
169
|
+
const handlerContext = {
|
|
170
|
+
jobId,
|
|
171
|
+
workerId,
|
|
172
|
+
requestId: context.requestId || lambdaContext.awsRequestId,
|
|
173
|
+
...jobStore ? { jobStore } : {},
|
|
174
|
+
...context
|
|
175
|
+
};
|
|
176
|
+
if (jobStore) {
|
|
177
|
+
try {
|
|
178
|
+
await jobStore.update({ status: "running" });
|
|
179
|
+
console.log("[Worker] Job status updated to running:", {
|
|
180
|
+
jobId,
|
|
181
|
+
workerId
|
|
182
|
+
});
|
|
183
|
+
} catch (error) {
|
|
184
|
+
console.warn("[Worker] Failed to update status to running:", {
|
|
185
|
+
jobId,
|
|
186
|
+
workerId,
|
|
187
|
+
error: error?.message || String(error)
|
|
188
|
+
});
|
|
189
|
+
}
|
|
190
|
+
}
|
|
191
|
+
let output;
|
|
192
|
+
try {
|
|
193
|
+
output = await handler({
|
|
194
|
+
input,
|
|
195
|
+
ctx: handlerContext
|
|
196
|
+
});
|
|
197
|
+
if (outputSchema) {
|
|
198
|
+
output = outputSchema.parse(output);
|
|
199
|
+
}
|
|
200
|
+
} catch (error) {
|
|
201
|
+
const errorPayload = {
|
|
202
|
+
jobId,
|
|
203
|
+
workerId,
|
|
204
|
+
status: "error",
|
|
205
|
+
error: {
|
|
206
|
+
message: error.message || "Unknown error",
|
|
207
|
+
stack: error.stack,
|
|
208
|
+
name: error.name || "Error"
|
|
209
|
+
},
|
|
210
|
+
metadata
|
|
211
|
+
};
|
|
212
|
+
if (jobStore) {
|
|
213
|
+
try {
|
|
214
|
+
await jobStore.update({
|
|
215
|
+
status: "failed",
|
|
216
|
+
error: errorPayload.error
|
|
217
|
+
});
|
|
218
|
+
console.log("[Worker] Job status updated to failed:", {
|
|
219
|
+
jobId,
|
|
220
|
+
workerId
|
|
221
|
+
});
|
|
222
|
+
} catch (updateError) {
|
|
223
|
+
console.warn("[Worker] Failed to update job store on error:", {
|
|
224
|
+
jobId,
|
|
225
|
+
workerId,
|
|
226
|
+
error: updateError?.message || String(updateError)
|
|
227
|
+
});
|
|
228
|
+
}
|
|
229
|
+
}
|
|
230
|
+
if (webhookUrl) {
|
|
231
|
+
await sendWebhook(webhookUrl, errorPayload);
|
|
232
|
+
}
|
|
233
|
+
throw error;
|
|
234
|
+
}
|
|
235
|
+
if (jobStore) {
|
|
236
|
+
try {
|
|
237
|
+
await jobStore.update({
|
|
238
|
+
status: "completed",
|
|
239
|
+
output
|
|
240
|
+
});
|
|
241
|
+
console.log("[Worker] Job status updated to completed:", {
|
|
242
|
+
jobId,
|
|
243
|
+
workerId
|
|
244
|
+
});
|
|
245
|
+
} catch (updateError) {
|
|
246
|
+
console.warn("[Worker] Failed to update job store on success:", {
|
|
247
|
+
jobId,
|
|
248
|
+
workerId,
|
|
249
|
+
error: updateError?.message || String(updateError)
|
|
250
|
+
});
|
|
251
|
+
}
|
|
252
|
+
}
|
|
253
|
+
console.log("[Worker] Job completed:", {
|
|
254
|
+
jobId,
|
|
255
|
+
workerId,
|
|
256
|
+
output
|
|
257
|
+
});
|
|
258
|
+
const successPayload = {
|
|
259
|
+
jobId,
|
|
260
|
+
workerId,
|
|
261
|
+
status: "success",
|
|
262
|
+
output,
|
|
263
|
+
metadata
|
|
264
|
+
};
|
|
265
|
+
if (webhookUrl) {
|
|
266
|
+
await sendWebhook(webhookUrl, successPayload);
|
|
267
|
+
}
|
|
268
|
+
} catch (error) {
|
|
269
|
+
console.error("[Worker] Error processing SQS record:", {
|
|
270
|
+
jobId: messageBody?.jobId ?? "(parse failed)",
|
|
271
|
+
workerId: messageBody?.workerId ?? "(parse failed)",
|
|
272
|
+
error: error?.message || String(error),
|
|
273
|
+
stack: error?.stack
|
|
274
|
+
});
|
|
275
|
+
throw error;
|
|
276
|
+
}
|
|
277
|
+
});
|
|
278
|
+
await Promise.all(promises);
|
|
279
|
+
};
|
|
280
|
+
}
|
|
281
|
+
|
|
282
|
+
export {
|
|
283
|
+
createLambdaHandler
|
|
284
|
+
};
|
|
285
|
+
//# sourceMappingURL=chunk-WVR4JVWK.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/mongoJobStore.ts","../src/handler.ts"],"sourcesContent":["/**\n * MongoDB-backed job store for Lambda workers.\n * Updates jobs directly in MongoDB; never uses HTTP/origin URL.\n *\n * Env: MONGODB_WORKER_URI (or MONGODB_URI), MONGODB_WORKER_DB (or MONGODB_DB),\n * MONGODB_WORKER_JOBS_COLLECTION (default: worker_jobs).\n */\n\nimport { MongoClient, type Collection } from 'mongodb';\nimport type { JobStore, JobStoreUpdate } from './handler';\n\nconst uri = process.env.MONGODB_WORKER_URI || process.env.DATABASE_MONGODB_URI || process.env.MONGODB_URI;\nconst dbName =\n process.env.MONGODB_WORKER_DB ||\n process.env.MONGODB_DB ||\n 'worker';\nconst collectionName =\n process.env.MONGODB_WORKER_JOBS_COLLECTION || 'worker_jobs';\n\ntype Doc = {\n _id: string;\n jobId: string;\n workerId: string;\n status: 'queued' | 'running' | 'completed' | 'failed';\n input: any;\n output?: any;\n error?: { message: string; stack?: string; name?: string };\n metadata?: Record<string, any>;\n createdAt: string;\n updatedAt: string;\n completedAt?: string;\n};\n\nlet clientPromise: Promise<MongoClient> | null = null;\n\nfunction getClient(): Promise<MongoClient> {\n if (!uri) {\n throw new Error(\n 'MongoDB URI required for job store. Set DATABASE_MONGODB_URI or MONGODB_URI.'\n );\n }\n if (!clientPromise) {\n clientPromise = new MongoClient(uri, {\n maxPoolSize: 10,\n minPoolSize: 0,\n serverSelectionTimeoutMS: 10_000,\n }).connect();\n }\n return clientPromise;\n}\n\nasync function getCollection(): Promise<Collection<Doc>> {\n const client = await getClient();\n return client.db(dbName).collection<Doc>(collectionName);\n}\n\n/**\n * Create a JobStore that reads/writes directly to MongoDB.\n * Caller must ensure the job exists (upsert on first use).\n */\nexport function createMongoJobStore(\n workerId: string,\n jobId: string,\n input: any,\n metadata: Record<string, any>\n): JobStore {\n return {\n update: async (update: JobStoreUpdate): Promise<void> => {\n try {\n const coll = await getCollection();\n const now = new Date().toISOString();\n const existing = await coll.findOne({ _id: jobId });\n\n let metadataUpdate: Record<string, any> = { ...(existing?.metadata ?? {}) };\n if (update.metadata) {\n Object.assign(metadataUpdate, update.metadata);\n }\n if (update.progress !== undefined || update.progressMessage !== undefined) {\n metadataUpdate.progress = update.progress;\n metadataUpdate.progressMessage = update.progressMessage;\n }\n\n const set: Partial<Doc> = {\n updatedAt: now,\n metadata: metadataUpdate,\n };\n if (update.status !== undefined) {\n set.status = update.status;\n if (['completed', 'failed'].includes(update.status) && !existing?.completedAt) {\n set.completedAt = now;\n }\n }\n if (update.output !== undefined) set.output = update.output;\n if (update.error !== undefined) set.error = update.error;\n\n if (existing) {\n await coll.updateOne({ _id: jobId }, { $set: set });\n } else {\n const doc: Doc = {\n _id: jobId,\n jobId,\n workerId,\n status: (update.status as Doc['status']) ?? 'queued',\n input: input ?? {},\n output: update.output,\n error: update.error,\n metadata: metadataUpdate,\n createdAt: now,\n updatedAt: now,\n completedAt: set.completedAt,\n };\n if (doc.status === 'completed' || doc.status === 'failed') {\n doc.completedAt = doc.completedAt ?? now;\n }\n await coll.updateOne({ _id: jobId }, { $set: doc }, { upsert: true });\n }\n } catch (e: any) {\n console.error('[Worker] MongoDB job store update failed:', {\n jobId,\n workerId,\n error: e?.message ?? String(e),\n });\n }\n },\n get: async () => {\n try {\n const coll = await getCollection();\n const doc = await coll.findOne({ _id: jobId });\n if (!doc) return null;\n const { _id, ...r } = doc;\n return r as any;\n } catch (e: any) {\n console.error('[Worker] MongoDB job store get failed:', {\n jobId,\n workerId,\n error: e?.message ?? String(e),\n });\n return null;\n }\n },\n };\n}\n\n/**\n * Upsert initial job record in MongoDB (queued).\n * Call this when the Lambda starts processing a message.\n */\nexport async function upsertJob(\n jobId: string,\n workerId: string,\n input: any,\n metadata: Record<string, any>\n): Promise<void> {\n const coll = await getCollection();\n const now = new Date().toISOString();\n await coll.updateOne(\n { _id: jobId },\n {\n $set: {\n _id: jobId,\n jobId,\n workerId,\n status: 'queued',\n input: input ?? {},\n metadata: metadata ?? {},\n createdAt: now,\n updatedAt: now,\n },\n },\n { upsert: true }\n );\n}\n\nexport function isMongoJobStoreConfigured(): boolean {\n return Boolean(uri?.trim());\n}\n","/**\n * Generic Lambda handler wrapper for worker agents.\n * Handles SQS events, executes user handlers, and sends webhook callbacks.\n * Job store: MongoDB only. Never uses HTTP/origin URL for job updates.\n */\n\nimport type { SQSEvent, SQSRecord, Context as LambdaContext } from 'aws-lambda';\nimport type { ZodType } from 'zod';\nimport {\n createMongoJobStore,\n upsertJob,\n isMongoJobStoreConfigured,\n} from './mongoJobStore';\n\nexport interface JobStoreUpdate {\n status?: 'queued' | 'running' | 'completed' | 'failed';\n metadata?: Record<string, any>;\n progress?: number;\n progressMessage?: string;\n output?: any;\n error?: {\n message: string;\n stack?: string;\n name?: string;\n };\n}\n\nexport interface JobStore {\n /**\n * Update job in job store.\n * @param update - Update object with status, metadata, progress, output, or error\n */\n update(update: JobStoreUpdate): Promise<void>;\n /**\n * Get current job record from job store.\n * @returns Job record or null if not found\n */\n get(): Promise<{\n jobId: string;\n workerId: string;\n status: 'queued' | 'running' | 'completed' | 'failed';\n input: any;\n output?: any;\n error?: { message: string; stack?: string };\n metadata?: Record<string, any>;\n createdAt: string;\n updatedAt: string;\n completedAt?: string;\n } | null>;\n}\n\nexport interface WorkerHandlerParams<INPUT, OUTPUT> {\n input: INPUT;\n ctx: {\n jobId: string;\n workerId: string;\n requestId?: string;\n /**\n * Job store interface for updating and retrieving job state.\n * Uses MongoDB directly when configured; never HTTP/origin URL.\n */\n jobStore?: JobStore;\n [key: string]: any;\n };\n}\n\nexport type WorkerHandler<INPUT, OUTPUT> = (\n params: WorkerHandlerParams<INPUT, OUTPUT>\n) => Promise<OUTPUT>;\n\nexport interface SQSMessageBody {\n workerId: string;\n jobId: string;\n input: any;\n context: Record<string, any>;\n webhookUrl?: string;\n /** @deprecated Never use. Job updates use MongoDB only. */\n jobStoreUrl?: string;\n metadata?: Record<string, any>;\n timestamp: string;\n}\n\nexport interface WebhookPayload {\n jobId: string;\n workerId: string;\n status: 'success' | 'error';\n output?: any;\n error?: {\n message: string;\n stack?: string;\n name?: string;\n };\n metadata?: Record<string, any>;\n}\n\n/**\n * Sends a webhook callback to the specified URL.\n */\nasync function sendWebhook(\n webhookUrl: string,\n payload: WebhookPayload\n): Promise<void> {\n try {\n const response = await fetch(webhookUrl, {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n 'User-Agent': 'ai-router-worker/1.0',\n },\n body: JSON.stringify(payload),\n });\n\n if (!response.ok) {\n const errorText = await response.text().catch(() => '');\n console.error('[Worker] Webhook callback failed:', {\n url: webhookUrl,\n status: response.status,\n statusText: response.statusText,\n errorText,\n });\n // Don't throw - webhook failures shouldn't fail the Lambda\n } else {\n console.log('[Worker] Webhook callback successful:', {\n url: webhookUrl,\n status: response.status,\n });\n }\n } catch (error: any) {\n console.error('[Worker] Webhook callback error:', {\n url: webhookUrl,\n error: error?.message || String(error),\n stack: error?.stack,\n });\n // Don't throw - webhook failures shouldn't fail the Lambda\n }\n}\n\n/**\n * Creates a Lambda handler function that processes SQS events for workers.\n * Job store: MongoDB only. Never uses HTTP/origin URL for job updates.\n *\n * @param handler - The user's worker handler function\n * @param outputSchema - Optional Zod schema for output validation\n * @returns A Lambda handler function\n */\nexport function createLambdaHandler<INPUT, OUTPUT>(\n handler: WorkerHandler<INPUT, OUTPUT>,\n outputSchema?: ZodType<OUTPUT>\n): (event: SQSEvent, context: LambdaContext) => Promise<void> {\n return async (event: SQSEvent, lambdaContext: LambdaContext) => {\n const promises = event.Records.map(async (record: SQSRecord) => {\n let messageBody: SQSMessageBody | null = null;\n try {\n messageBody = JSON.parse(record.body) as SQSMessageBody;\n\n const { workerId, jobId, input, context, webhookUrl, metadata = {} } =\n messageBody;\n\n let jobStore: JobStore | undefined;\n if (isMongoJobStoreConfigured()) {\n await upsertJob(jobId, workerId, input, metadata);\n jobStore = createMongoJobStore(workerId, jobId, input, metadata);\n }\n\n const handlerContext = {\n jobId,\n workerId,\n requestId: context.requestId || lambdaContext.awsRequestId,\n ...(jobStore ? { jobStore } : {}),\n ...context,\n };\n\n if (jobStore) {\n try {\n await jobStore.update({ status: 'running' });\n console.log('[Worker] Job status updated to running:', {\n jobId,\n workerId,\n });\n } catch (error: any) {\n console.warn('[Worker] Failed to update status to running:', {\n jobId,\n workerId,\n error: error?.message || String(error),\n });\n }\n }\n\n let output: OUTPUT;\n try {\n output = await handler({\n input: input as INPUT,\n ctx: handlerContext,\n });\n\n if (outputSchema) {\n output = outputSchema.parse(output);\n }\n } catch (error: any) {\n const errorPayload: WebhookPayload = {\n jobId,\n workerId,\n status: 'error',\n error: {\n message: error.message || 'Unknown error',\n stack: error.stack,\n name: error.name || 'Error',\n },\n metadata,\n };\n\n if (jobStore) {\n try {\n await jobStore.update({\n status: 'failed',\n error: errorPayload.error,\n });\n console.log('[Worker] Job status updated to failed:', {\n jobId,\n workerId,\n });\n } catch (updateError: any) {\n console.warn('[Worker] Failed to update job store on error:', {\n jobId,\n workerId,\n error: updateError?.message || String(updateError),\n });\n }\n }\n\n if (webhookUrl) {\n await sendWebhook(webhookUrl, errorPayload);\n }\n throw error;\n }\n\n if (jobStore) {\n try {\n await jobStore.update({\n status: 'completed',\n output,\n });\n console.log('[Worker] Job status updated to completed:', {\n jobId,\n workerId,\n });\n } catch (updateError: any) {\n console.warn('[Worker] Failed to update job store on success:', {\n jobId,\n workerId,\n error: updateError?.message || String(updateError),\n });\n }\n }\n\n console.log('[Worker] Job completed:', {\n jobId,\n workerId,\n output,\n });\n\n const successPayload: WebhookPayload = {\n jobId,\n workerId,\n status: 'success',\n output,\n metadata,\n };\n\n if (webhookUrl) {\n await sendWebhook(webhookUrl, successPayload);\n }\n } catch (error: any) {\n console.error('[Worker] Error processing SQS record:', {\n jobId: messageBody?.jobId ?? '(parse failed)',\n workerId: messageBody?.workerId ?? '(parse failed)',\n error: error?.message || String(error),\n stack: error?.stack,\n });\n throw error;\n }\n });\n\n await Promise.all(promises);\n };\n}\n"],"mappings":";AAQA,SAAS,mBAAoC;AAG7C,IAAM,MAAM,QAAQ,IAAI,sBAAsB,QAAQ,IAAI,wBAAwB,QAAQ,IAAI;AAC9F,IAAM,SACJ,QAAQ,IAAI,qBACZ,QAAQ,IAAI,cACZ;AACF,IAAM,iBACJ,QAAQ,IAAI,kCAAkC;AAgBhD,IAAI,gBAA6C;AAEjD,SAAS,YAAkC;AACzC,MAAI,CAAC,KAAK;AACR,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AACA,MAAI,CAAC,eAAe;AAClB,oBAAgB,IAAI,YAAY,KAAK;AAAA,MACnC,aAAa;AAAA,MACb,aAAa;AAAA,MACb,0BAA0B;AAAA,IAC5B,CAAC,EAAE,QAAQ;AAAA,EACb;AACA,SAAO;AACT;AAEA,eAAe,gBAA0C;AACvD,QAAM,SAAS,MAAM,UAAU;AAC/B,SAAO,OAAO,GAAG,MAAM,EAAE,WAAgB,cAAc;AACzD;AAMO,SAAS,oBACd,UACA,OACA,OACA,UACU;AACV,SAAO;AAAA,IACL,QAAQ,OAAO,WAA0C;AACvD,UAAI;AACF,cAAM,OAAO,MAAM,cAAc;AACjC,cAAM,OAAM,oBAAI,KAAK,GAAE,YAAY;AACnC,cAAM,WAAW,MAAM,KAAK,QAAQ,EAAE,KAAK,MAAM,CAAC;AAElD,YAAI,iBAAsC,EAAE,GAAI,UAAU,YAAY,CAAC,EAAG;AAC1E,YAAI,OAAO,UAAU;AACnB,iBAAO,OAAO,gBAAgB,OAAO,QAAQ;AAAA,QAC/C;AACA,YAAI,OAAO,aAAa,UAAa,OAAO,oBAAoB,QAAW;AACzE,yBAAe,WAAW,OAAO;AACjC,yBAAe,kBAAkB,OAAO;AAAA,QAC1C;AAEA,cAAM,MAAoB;AAAA,UACxB,WAAW;AAAA,UACX,UAAU;AAAA,QACZ;AACA,YAAI,OAAO,WAAW,QAAW;AAC/B,cAAI,SAAS,OAAO;AACpB,cAAI,CAAC,aAAa,QAAQ,EAAE,SAAS,OAAO,MAAM,KAAK,CAAC,UAAU,aAAa;AAC7E,gBAAI,cAAc;AAAA,UACpB;AAAA,QACF;AACA,YAAI,OAAO,WAAW,OAAW,KAAI,SAAS,OAAO;AACrD,YAAI,OAAO,UAAU,OAAW,KAAI,QAAQ,OAAO;AAEnD,YAAI,UAAU;AACZ,gBAAM,KAAK,UAAU,EAAE,KAAK,MAAM,GAAG,EAAE,MAAM,IAAI,CAAC;AAAA,QACpD,OAAO;AACL,gBAAM,MAAW;AAAA,YACf,KAAK;AAAA,YACL;AAAA,YACA;AAAA,YACA,QAAS,OAAO,UAA4B;AAAA,YAC5C,OAAO,SAAS,CAAC;AAAA,YACjB,QAAQ,OAAO;AAAA,YACf,OAAO,OAAO;AAAA,YACd,UAAU;AAAA,YACV,WAAW;AAAA,YACX,WAAW;AAAA,YACX,aAAa,IAAI;AAAA,UACnB;AACA,cAAI,IAAI,WAAW,eAAe,IAAI,WAAW,UAAU;AACzD,gBAAI,cAAc,IAAI,eAAe;AAAA,UACvC;AACA,gBAAM,KAAK,UAAU,EAAE,KAAK,MAAM,GAAG,EAAE,MAAM,IAAI,GAAG,EAAE,QAAQ,KAAK,CAAC;AAAA,QACtE;AAAA,MACF,SAAS,GAAQ;AACf,gBAAQ,MAAM,6CAA6C;AAAA,UACzD;AAAA,UACA;AAAA,UACA,OAAO,GAAG,WAAW,OAAO,CAAC;AAAA,QAC/B,CAAC;AAAA,MACH;AAAA,IACF;AAAA,IACA,KAAK,YAAY;AACf,UAAI;AACF,cAAM,OAAO,MAAM,cAAc;AACjC,cAAM,MAAM,MAAM,KAAK,QAAQ,EAAE,KAAK,MAAM,CAAC;AAC7C,YAAI,CAAC,IAAK,QAAO;AACjB,cAAM,EAAE,KAAK,GAAG,EAAE,IAAI;AACtB,eAAO;AAAA,MACT,SAAS,GAAQ;AACf,gBAAQ,MAAM,0CAA0C;AAAA,UACtD;AAAA,UACA;AAAA,UACA,OAAO,GAAG,WAAW,OAAO,CAAC;AAAA,QAC/B,CAAC;AACD,eAAO;AAAA,MACT;AAAA,IACF;AAAA,EACF;AACF;AAMA,eAAsB,UACpB,OACA,UACA,OACA,UACe;AACf,QAAM,OAAO,MAAM,cAAc;AACjC,QAAM,OAAM,oBAAI,KAAK,GAAE,YAAY;AACnC,QAAM,KAAK;AAAA,IACT,EAAE,KAAK,MAAM;AAAA,IACb;AAAA,MACE,MAAM;AAAA,QACJ,KAAK;AAAA,QACL;AAAA,QACA;AAAA,QACA,QAAQ;AAAA,QACR,OAAO,SAAS,CAAC;AAAA,QACjB,UAAU,YAAY,CAAC;AAAA,QACvB,WAAW;AAAA,QACX,WAAW;AAAA,MACb;AAAA,IACF;AAAA,IACA,EAAE,QAAQ,KAAK;AAAA,EACjB;AACF;AAEO,SAAS,4BAAqC;AACnD,SAAO,QAAQ,KAAK,KAAK,CAAC;AAC5B;;;AC7EA,eAAe,YACb,YACA,SACe;AACf,MAAI;AACF,UAAM,WAAW,MAAM,MAAM,YAAY;AAAA,MACvC,QAAQ;AAAA,MACR,SAAS;AAAA,QACP,gBAAgB;AAAA,QAChB,cAAc;AAAA,MAChB;AAAA,MACA,MAAM,KAAK,UAAU,OAAO;AAAA,IAC9B,CAAC;AAED,QAAI,CAAC,SAAS,IAAI;AAChB,YAAM,YAAY,MAAM,SAAS,KAAK,EAAE,MAAM,MAAM,EAAE;AACtD,cAAQ,MAAM,qCAAqC;AAAA,QACjD,KAAK;AAAA,QACL,QAAQ,SAAS;AAAA,QACjB,YAAY,SAAS;AAAA,QACrB;AAAA,MACF,CAAC;AAAA,IAEH,OAAO;AACL,cAAQ,IAAI,yCAAyC;AAAA,QACnD,KAAK;AAAA,QACL,QAAQ,SAAS;AAAA,MACnB,CAAC;AAAA,IACH;AAAA,EACF,SAAS,OAAY;AACnB,YAAQ,MAAM,oCAAoC;AAAA,MAChD,KAAK;AAAA,MACL,OAAO,OAAO,WAAW,OAAO,KAAK;AAAA,MACrC,OAAO,OAAO;AAAA,IAChB,CAAC;AAAA,EAEH;AACF;AAUO,SAAS,oBACd,SACA,cAC4D;AAC5D,SAAO,OAAO,OAAiB,kBAAiC;AAC9D,UAAM,WAAW,MAAM,QAAQ,IAAI,OAAO,WAAsB;AAC9D,UAAI,cAAqC;AACzC,UAAI;AACF,sBAAc,KAAK,MAAM,OAAO,IAAI;AAEpC,cAAM,EAAE,UAAU,OAAO,OAAO,SAAS,YAAY,WAAW,CAAC,EAAE,IACjE;AAEF,YAAI;AACJ,YAAI,0BAA0B,GAAG;AAC/B,gBAAM,UAAU,OAAO,UAAU,OAAO,QAAQ;AAChD,qBAAW,oBAAoB,UAAU,OAAO,OAAO,QAAQ;AAAA,QACjE;AAEA,cAAM,iBAAiB;AAAA,UACrB;AAAA,UACA;AAAA,UACA,WAAW,QAAQ,aAAa,cAAc;AAAA,UAC9C,GAAI,WAAW,EAAE,SAAS,IAAI,CAAC;AAAA,UAC/B,GAAG;AAAA,QACL;AAEA,YAAI,UAAU;AACZ,cAAI;AACF,kBAAM,SAAS,OAAO,EAAE,QAAQ,UAAU,CAAC;AAC3C,oBAAQ,IAAI,2CAA2C;AAAA,cACrD;AAAA,cACA;AAAA,YACF,CAAC;AAAA,UACH,SAAS,OAAY;AACnB,oBAAQ,KAAK,gDAAgD;AAAA,cAC3D;AAAA,cACA;AAAA,cACA,OAAO,OAAO,WAAW,OAAO,KAAK;AAAA,YACvC,CAAC;AAAA,UACH;AAAA,QACF;AAEA,YAAI;AACJ,YAAI;AACF,mBAAS,MAAM,QAAQ;AAAA,YACrB;AAAA,YACA,KAAK;AAAA,UACP,CAAC;AAED,cAAI,cAAc;AAChB,qBAAS,aAAa,MAAM,MAAM;AAAA,UACpC;AAAA,QACF,SAAS,OAAY;AACnB,gBAAM,eAA+B;AAAA,YACnC;AAAA,YACA;AAAA,YACA,QAAQ;AAAA,YACR,OAAO;AAAA,cACL,SAAS,MAAM,WAAW;AAAA,cAC1B,OAAO,MAAM;AAAA,cACb,MAAM,MAAM,QAAQ;AAAA,YACtB;AAAA,YACA;AAAA,UACF;AAEA,cAAI,UAAU;AACZ,gBAAI;AACF,oBAAM,SAAS,OAAO;AAAA,gBACpB,QAAQ;AAAA,gBACR,OAAO,aAAa;AAAA,cACtB,CAAC;AACD,sBAAQ,IAAI,0CAA0C;AAAA,gBACpD;AAAA,gBACA;AAAA,cACF,CAAC;AAAA,YACH,SAAS,aAAkB;AACzB,sBAAQ,KAAK,iDAAiD;AAAA,gBAC5D;AAAA,gBACA;AAAA,gBACA,OAAO,aAAa,WAAW,OAAO,WAAW;AAAA,cACnD,CAAC;AAAA,YACH;AAAA,UACF;AAEA,cAAI,YAAY;AACd,kBAAM,YAAY,YAAY,YAAY;AAAA,UAC5C;AACA,gBAAM;AAAA,QACR;AAEA,YAAI,UAAU;AACZ,cAAI;AACF,kBAAM,SAAS,OAAO;AAAA,cACpB,QAAQ;AAAA,cACR;AAAA,YACF,CAAC;AACD,oBAAQ,IAAI,6CAA6C;AAAA,cACvD;AAAA,cACA;AAAA,YACF,CAAC;AAAA,UACH,SAAS,aAAkB;AACzB,oBAAQ,KAAK,mDAAmD;AAAA,cAC9D;AAAA,cACA;AAAA,cACA,OAAO,aAAa,WAAW,OAAO,WAAW;AAAA,YACnD,CAAC;AAAA,UACH;AAAA,QACF;AAEA,gBAAQ,IAAI,2BAA2B;AAAA,UACrC;AAAA,UACA;AAAA,UACA;AAAA,QACF,CAAC;AAED,cAAM,iBAAiC;AAAA,UACrC;AAAA,UACA;AAAA,UACA,QAAQ;AAAA,UACR;AAAA,UACA;AAAA,QACF;AAEA,YAAI,YAAY;AACd,gBAAM,YAAY,YAAY,cAAc;AAAA,QAC9C;AAAA,MACF,SAAS,OAAY;AACnB,gBAAQ,MAAM,yCAAyC;AAAA,UACrD,OAAO,aAAa,SAAS;AAAA,UAC7B,UAAU,aAAa,YAAY;AAAA,UACnC,OAAO,OAAO,WAAW,OAAO,KAAK;AAAA,UACrC,OAAO,OAAO;AAAA,QAChB,CAAC;AACD,cAAM;AAAA,MACR;AAAA,IACF,CAAC;AAED,UAAM,QAAQ,IAAI,QAAQ;AAAA,EAC5B;AACF;","names":[]}
|