@microfox/ai-worker 1.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +16 -0
- package/README.md +185 -0
- package/dist/chunk-BJTO5JO5.mjs +11 -0
- package/dist/chunk-BJTO5JO5.mjs.map +1 -0
- package/dist/chunk-FQCZSXDI.mjs +83 -0
- package/dist/chunk-FQCZSXDI.mjs.map +1 -0
- package/dist/chunk-WVR4JVWK.mjs +285 -0
- package/dist/chunk-WVR4JVWK.mjs.map +1 -0
- package/dist/chunk-ZYYWZ3PR.mjs +50 -0
- package/dist/chunk-ZYYWZ3PR.mjs.map +1 -0
- package/dist/client.d.mts +64 -0
- package/dist/client.d.ts +64 -0
- package/dist/client.js +108 -0
- package/dist/client.js.map +1 -0
- package/dist/client.mjs +10 -0
- package/dist/client.mjs.map +1 -0
- package/dist/config.d.mts +38 -0
- package/dist/config.d.ts +38 -0
- package/dist/config.js +76 -0
- package/dist/config.js.map +1 -0
- package/dist/config.mjs +12 -0
- package/dist/config.mjs.map +1 -0
- package/dist/handler.d.mts +96 -0
- package/dist/handler.d.ts +96 -0
- package/dist/handler.js +311 -0
- package/dist/handler.js.map +1 -0
- package/dist/handler.mjs +8 -0
- package/dist/handler.mjs.map +1 -0
- package/dist/index.d.mts +236 -0
- package/dist/index.d.ts +236 -0
- package/dist/index.js +734 -0
- package/dist/index.js.map +1 -0
- package/dist/index.mjs +313 -0
- package/dist/index.mjs.map +1 -0
- package/package.json +73 -0
|
@@ -0,0 +1,96 @@
|
|
|
1
|
+
import { SQSEvent, Context } from 'aws-lambda';
|
|
2
|
+
import { ZodType } from 'zod';
|
|
3
|
+
|
|
4
|
+
/**
|
|
5
|
+
* Generic Lambda handler wrapper for worker agents.
|
|
6
|
+
* Handles SQS events, executes user handlers, and sends webhook callbacks.
|
|
7
|
+
* Job store: MongoDB only. Never uses HTTP/origin URL for job updates.
|
|
8
|
+
*/
|
|
9
|
+
|
|
10
|
+
interface JobStoreUpdate {
|
|
11
|
+
status?: 'queued' | 'running' | 'completed' | 'failed';
|
|
12
|
+
metadata?: Record<string, any>;
|
|
13
|
+
progress?: number;
|
|
14
|
+
progressMessage?: string;
|
|
15
|
+
output?: any;
|
|
16
|
+
error?: {
|
|
17
|
+
message: string;
|
|
18
|
+
stack?: string;
|
|
19
|
+
name?: string;
|
|
20
|
+
};
|
|
21
|
+
}
|
|
22
|
+
interface JobStore {
|
|
23
|
+
/**
|
|
24
|
+
* Update job in job store.
|
|
25
|
+
* @param update - Update object with status, metadata, progress, output, or error
|
|
26
|
+
*/
|
|
27
|
+
update(update: JobStoreUpdate): Promise<void>;
|
|
28
|
+
/**
|
|
29
|
+
* Get current job record from job store.
|
|
30
|
+
* @returns Job record or null if not found
|
|
31
|
+
*/
|
|
32
|
+
get(): Promise<{
|
|
33
|
+
jobId: string;
|
|
34
|
+
workerId: string;
|
|
35
|
+
status: 'queued' | 'running' | 'completed' | 'failed';
|
|
36
|
+
input: any;
|
|
37
|
+
output?: any;
|
|
38
|
+
error?: {
|
|
39
|
+
message: string;
|
|
40
|
+
stack?: string;
|
|
41
|
+
};
|
|
42
|
+
metadata?: Record<string, any>;
|
|
43
|
+
createdAt: string;
|
|
44
|
+
updatedAt: string;
|
|
45
|
+
completedAt?: string;
|
|
46
|
+
} | null>;
|
|
47
|
+
}
|
|
48
|
+
interface WorkerHandlerParams<INPUT, OUTPUT> {
|
|
49
|
+
input: INPUT;
|
|
50
|
+
ctx: {
|
|
51
|
+
jobId: string;
|
|
52
|
+
workerId: string;
|
|
53
|
+
requestId?: string;
|
|
54
|
+
/**
|
|
55
|
+
* Job store interface for updating and retrieving job state.
|
|
56
|
+
* Uses MongoDB directly when configured; never HTTP/origin URL.
|
|
57
|
+
*/
|
|
58
|
+
jobStore?: JobStore;
|
|
59
|
+
[key: string]: any;
|
|
60
|
+
};
|
|
61
|
+
}
|
|
62
|
+
type WorkerHandler<INPUT, OUTPUT> = (params: WorkerHandlerParams<INPUT, OUTPUT>) => Promise<OUTPUT>;
|
|
63
|
+
interface SQSMessageBody {
|
|
64
|
+
workerId: string;
|
|
65
|
+
jobId: string;
|
|
66
|
+
input: any;
|
|
67
|
+
context: Record<string, any>;
|
|
68
|
+
webhookUrl?: string;
|
|
69
|
+
/** @deprecated Never use. Job updates use MongoDB only. */
|
|
70
|
+
jobStoreUrl?: string;
|
|
71
|
+
metadata?: Record<string, any>;
|
|
72
|
+
timestamp: string;
|
|
73
|
+
}
|
|
74
|
+
interface WebhookPayload {
|
|
75
|
+
jobId: string;
|
|
76
|
+
workerId: string;
|
|
77
|
+
status: 'success' | 'error';
|
|
78
|
+
output?: any;
|
|
79
|
+
error?: {
|
|
80
|
+
message: string;
|
|
81
|
+
stack?: string;
|
|
82
|
+
name?: string;
|
|
83
|
+
};
|
|
84
|
+
metadata?: Record<string, any>;
|
|
85
|
+
}
|
|
86
|
+
/**
|
|
87
|
+
* Creates a Lambda handler function that processes SQS events for workers.
|
|
88
|
+
* Job store: MongoDB only. Never uses HTTP/origin URL for job updates.
|
|
89
|
+
*
|
|
90
|
+
* @param handler - The user's worker handler function
|
|
91
|
+
* @param outputSchema - Optional Zod schema for output validation
|
|
92
|
+
* @returns A Lambda handler function
|
|
93
|
+
*/
|
|
94
|
+
declare function createLambdaHandler<INPUT, OUTPUT>(handler: WorkerHandler<INPUT, OUTPUT>, outputSchema?: ZodType<OUTPUT>): (event: SQSEvent, context: Context) => Promise<void>;
|
|
95
|
+
|
|
96
|
+
export { type JobStore, type JobStoreUpdate, type SQSMessageBody, type WebhookPayload, type WorkerHandler, type WorkerHandlerParams, createLambdaHandler };
|
package/dist/handler.js
ADDED
|
@@ -0,0 +1,311 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
6
|
+
var __export = (target, all) => {
|
|
7
|
+
for (var name in all)
|
|
8
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
9
|
+
};
|
|
10
|
+
var __copyProps = (to, from, except, desc) => {
|
|
11
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
12
|
+
for (let key of __getOwnPropNames(from))
|
|
13
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
14
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
15
|
+
}
|
|
16
|
+
return to;
|
|
17
|
+
};
|
|
18
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
19
|
+
|
|
20
|
+
// src/handler.ts
|
|
21
|
+
var handler_exports = {};
|
|
22
|
+
__export(handler_exports, {
|
|
23
|
+
createLambdaHandler: () => createLambdaHandler
|
|
24
|
+
});
|
|
25
|
+
module.exports = __toCommonJS(handler_exports);
|
|
26
|
+
|
|
27
|
+
// src/mongoJobStore.ts
|
|
28
|
+
var import_mongodb = require("mongodb");
|
|
29
|
+
var uri = process.env.MONGODB_WORKER_URI || process.env.DATABASE_MONGODB_URI || process.env.MONGODB_URI;
|
|
30
|
+
var dbName = process.env.MONGODB_WORKER_DB || process.env.MONGODB_DB || "worker";
|
|
31
|
+
var collectionName = process.env.MONGODB_WORKER_JOBS_COLLECTION || "worker_jobs";
|
|
32
|
+
var clientPromise = null;
|
|
33
|
+
function getClient() {
|
|
34
|
+
if (!uri) {
|
|
35
|
+
throw new Error(
|
|
36
|
+
"MongoDB URI required for job store. Set DATABASE_MONGODB_URI or MONGODB_URI."
|
|
37
|
+
);
|
|
38
|
+
}
|
|
39
|
+
if (!clientPromise) {
|
|
40
|
+
clientPromise = new import_mongodb.MongoClient(uri, {
|
|
41
|
+
maxPoolSize: 10,
|
|
42
|
+
minPoolSize: 0,
|
|
43
|
+
serverSelectionTimeoutMS: 1e4
|
|
44
|
+
}).connect();
|
|
45
|
+
}
|
|
46
|
+
return clientPromise;
|
|
47
|
+
}
|
|
48
|
+
async function getCollection() {
|
|
49
|
+
const client = await getClient();
|
|
50
|
+
return client.db(dbName).collection(collectionName);
|
|
51
|
+
}
|
|
52
|
+
function createMongoJobStore(workerId, jobId, input, metadata) {
|
|
53
|
+
return {
|
|
54
|
+
update: async (update) => {
|
|
55
|
+
try {
|
|
56
|
+
const coll = await getCollection();
|
|
57
|
+
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
58
|
+
const existing = await coll.findOne({ _id: jobId });
|
|
59
|
+
let metadataUpdate = { ...existing?.metadata ?? {} };
|
|
60
|
+
if (update.metadata) {
|
|
61
|
+
Object.assign(metadataUpdate, update.metadata);
|
|
62
|
+
}
|
|
63
|
+
if (update.progress !== void 0 || update.progressMessage !== void 0) {
|
|
64
|
+
metadataUpdate.progress = update.progress;
|
|
65
|
+
metadataUpdate.progressMessage = update.progressMessage;
|
|
66
|
+
}
|
|
67
|
+
const set = {
|
|
68
|
+
updatedAt: now,
|
|
69
|
+
metadata: metadataUpdate
|
|
70
|
+
};
|
|
71
|
+
if (update.status !== void 0) {
|
|
72
|
+
set.status = update.status;
|
|
73
|
+
if (["completed", "failed"].includes(update.status) && !existing?.completedAt) {
|
|
74
|
+
set.completedAt = now;
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
if (update.output !== void 0) set.output = update.output;
|
|
78
|
+
if (update.error !== void 0) set.error = update.error;
|
|
79
|
+
if (existing) {
|
|
80
|
+
await coll.updateOne({ _id: jobId }, { $set: set });
|
|
81
|
+
} else {
|
|
82
|
+
const doc = {
|
|
83
|
+
_id: jobId,
|
|
84
|
+
jobId,
|
|
85
|
+
workerId,
|
|
86
|
+
status: update.status ?? "queued",
|
|
87
|
+
input: input ?? {},
|
|
88
|
+
output: update.output,
|
|
89
|
+
error: update.error,
|
|
90
|
+
metadata: metadataUpdate,
|
|
91
|
+
createdAt: now,
|
|
92
|
+
updatedAt: now,
|
|
93
|
+
completedAt: set.completedAt
|
|
94
|
+
};
|
|
95
|
+
if (doc.status === "completed" || doc.status === "failed") {
|
|
96
|
+
doc.completedAt = doc.completedAt ?? now;
|
|
97
|
+
}
|
|
98
|
+
await coll.updateOne({ _id: jobId }, { $set: doc }, { upsert: true });
|
|
99
|
+
}
|
|
100
|
+
} catch (e) {
|
|
101
|
+
console.error("[Worker] MongoDB job store update failed:", {
|
|
102
|
+
jobId,
|
|
103
|
+
workerId,
|
|
104
|
+
error: e?.message ?? String(e)
|
|
105
|
+
});
|
|
106
|
+
}
|
|
107
|
+
},
|
|
108
|
+
get: async () => {
|
|
109
|
+
try {
|
|
110
|
+
const coll = await getCollection();
|
|
111
|
+
const doc = await coll.findOne({ _id: jobId });
|
|
112
|
+
if (!doc) return null;
|
|
113
|
+
const { _id, ...r } = doc;
|
|
114
|
+
return r;
|
|
115
|
+
} catch (e) {
|
|
116
|
+
console.error("[Worker] MongoDB job store get failed:", {
|
|
117
|
+
jobId,
|
|
118
|
+
workerId,
|
|
119
|
+
error: e?.message ?? String(e)
|
|
120
|
+
});
|
|
121
|
+
return null;
|
|
122
|
+
}
|
|
123
|
+
}
|
|
124
|
+
};
|
|
125
|
+
}
|
|
126
|
+
async function upsertJob(jobId, workerId, input, metadata) {
|
|
127
|
+
const coll = await getCollection();
|
|
128
|
+
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
129
|
+
await coll.updateOne(
|
|
130
|
+
{ _id: jobId },
|
|
131
|
+
{
|
|
132
|
+
$set: {
|
|
133
|
+
_id: jobId,
|
|
134
|
+
jobId,
|
|
135
|
+
workerId,
|
|
136
|
+
status: "queued",
|
|
137
|
+
input: input ?? {},
|
|
138
|
+
metadata: metadata ?? {},
|
|
139
|
+
createdAt: now,
|
|
140
|
+
updatedAt: now
|
|
141
|
+
}
|
|
142
|
+
},
|
|
143
|
+
{ upsert: true }
|
|
144
|
+
);
|
|
145
|
+
}
|
|
146
|
+
function isMongoJobStoreConfigured() {
|
|
147
|
+
return Boolean(uri?.trim());
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
// src/handler.ts
|
|
151
|
+
async function sendWebhook(webhookUrl, payload) {
|
|
152
|
+
try {
|
|
153
|
+
const response = await fetch(webhookUrl, {
|
|
154
|
+
method: "POST",
|
|
155
|
+
headers: {
|
|
156
|
+
"Content-Type": "application/json",
|
|
157
|
+
"User-Agent": "ai-router-worker/1.0"
|
|
158
|
+
},
|
|
159
|
+
body: JSON.stringify(payload)
|
|
160
|
+
});
|
|
161
|
+
if (!response.ok) {
|
|
162
|
+
const errorText = await response.text().catch(() => "");
|
|
163
|
+
console.error("[Worker] Webhook callback failed:", {
|
|
164
|
+
url: webhookUrl,
|
|
165
|
+
status: response.status,
|
|
166
|
+
statusText: response.statusText,
|
|
167
|
+
errorText
|
|
168
|
+
});
|
|
169
|
+
} else {
|
|
170
|
+
console.log("[Worker] Webhook callback successful:", {
|
|
171
|
+
url: webhookUrl,
|
|
172
|
+
status: response.status
|
|
173
|
+
});
|
|
174
|
+
}
|
|
175
|
+
} catch (error) {
|
|
176
|
+
console.error("[Worker] Webhook callback error:", {
|
|
177
|
+
url: webhookUrl,
|
|
178
|
+
error: error?.message || String(error),
|
|
179
|
+
stack: error?.stack
|
|
180
|
+
});
|
|
181
|
+
}
|
|
182
|
+
}
|
|
183
|
+
function createLambdaHandler(handler, outputSchema) {
|
|
184
|
+
return async (event, lambdaContext) => {
|
|
185
|
+
const promises = event.Records.map(async (record) => {
|
|
186
|
+
let messageBody = null;
|
|
187
|
+
try {
|
|
188
|
+
messageBody = JSON.parse(record.body);
|
|
189
|
+
const { workerId, jobId, input, context, webhookUrl, metadata = {} } = messageBody;
|
|
190
|
+
let jobStore;
|
|
191
|
+
if (isMongoJobStoreConfigured()) {
|
|
192
|
+
await upsertJob(jobId, workerId, input, metadata);
|
|
193
|
+
jobStore = createMongoJobStore(workerId, jobId, input, metadata);
|
|
194
|
+
}
|
|
195
|
+
const handlerContext = {
|
|
196
|
+
jobId,
|
|
197
|
+
workerId,
|
|
198
|
+
requestId: context.requestId || lambdaContext.awsRequestId,
|
|
199
|
+
...jobStore ? { jobStore } : {},
|
|
200
|
+
...context
|
|
201
|
+
};
|
|
202
|
+
if (jobStore) {
|
|
203
|
+
try {
|
|
204
|
+
await jobStore.update({ status: "running" });
|
|
205
|
+
console.log("[Worker] Job status updated to running:", {
|
|
206
|
+
jobId,
|
|
207
|
+
workerId
|
|
208
|
+
});
|
|
209
|
+
} catch (error) {
|
|
210
|
+
console.warn("[Worker] Failed to update status to running:", {
|
|
211
|
+
jobId,
|
|
212
|
+
workerId,
|
|
213
|
+
error: error?.message || String(error)
|
|
214
|
+
});
|
|
215
|
+
}
|
|
216
|
+
}
|
|
217
|
+
let output;
|
|
218
|
+
try {
|
|
219
|
+
output = await handler({
|
|
220
|
+
input,
|
|
221
|
+
ctx: handlerContext
|
|
222
|
+
});
|
|
223
|
+
if (outputSchema) {
|
|
224
|
+
output = outputSchema.parse(output);
|
|
225
|
+
}
|
|
226
|
+
} catch (error) {
|
|
227
|
+
const errorPayload = {
|
|
228
|
+
jobId,
|
|
229
|
+
workerId,
|
|
230
|
+
status: "error",
|
|
231
|
+
error: {
|
|
232
|
+
message: error.message || "Unknown error",
|
|
233
|
+
stack: error.stack,
|
|
234
|
+
name: error.name || "Error"
|
|
235
|
+
},
|
|
236
|
+
metadata
|
|
237
|
+
};
|
|
238
|
+
if (jobStore) {
|
|
239
|
+
try {
|
|
240
|
+
await jobStore.update({
|
|
241
|
+
status: "failed",
|
|
242
|
+
error: errorPayload.error
|
|
243
|
+
});
|
|
244
|
+
console.log("[Worker] Job status updated to failed:", {
|
|
245
|
+
jobId,
|
|
246
|
+
workerId
|
|
247
|
+
});
|
|
248
|
+
} catch (updateError) {
|
|
249
|
+
console.warn("[Worker] Failed to update job store on error:", {
|
|
250
|
+
jobId,
|
|
251
|
+
workerId,
|
|
252
|
+
error: updateError?.message || String(updateError)
|
|
253
|
+
});
|
|
254
|
+
}
|
|
255
|
+
}
|
|
256
|
+
if (webhookUrl) {
|
|
257
|
+
await sendWebhook(webhookUrl, errorPayload);
|
|
258
|
+
}
|
|
259
|
+
throw error;
|
|
260
|
+
}
|
|
261
|
+
if (jobStore) {
|
|
262
|
+
try {
|
|
263
|
+
await jobStore.update({
|
|
264
|
+
status: "completed",
|
|
265
|
+
output
|
|
266
|
+
});
|
|
267
|
+
console.log("[Worker] Job status updated to completed:", {
|
|
268
|
+
jobId,
|
|
269
|
+
workerId
|
|
270
|
+
});
|
|
271
|
+
} catch (updateError) {
|
|
272
|
+
console.warn("[Worker] Failed to update job store on success:", {
|
|
273
|
+
jobId,
|
|
274
|
+
workerId,
|
|
275
|
+
error: updateError?.message || String(updateError)
|
|
276
|
+
});
|
|
277
|
+
}
|
|
278
|
+
}
|
|
279
|
+
console.log("[Worker] Job completed:", {
|
|
280
|
+
jobId,
|
|
281
|
+
workerId,
|
|
282
|
+
output
|
|
283
|
+
});
|
|
284
|
+
const successPayload = {
|
|
285
|
+
jobId,
|
|
286
|
+
workerId,
|
|
287
|
+
status: "success",
|
|
288
|
+
output,
|
|
289
|
+
metadata
|
|
290
|
+
};
|
|
291
|
+
if (webhookUrl) {
|
|
292
|
+
await sendWebhook(webhookUrl, successPayload);
|
|
293
|
+
}
|
|
294
|
+
} catch (error) {
|
|
295
|
+
console.error("[Worker] Error processing SQS record:", {
|
|
296
|
+
jobId: messageBody?.jobId ?? "(parse failed)",
|
|
297
|
+
workerId: messageBody?.workerId ?? "(parse failed)",
|
|
298
|
+
error: error?.message || String(error),
|
|
299
|
+
stack: error?.stack
|
|
300
|
+
});
|
|
301
|
+
throw error;
|
|
302
|
+
}
|
|
303
|
+
});
|
|
304
|
+
await Promise.all(promises);
|
|
305
|
+
};
|
|
306
|
+
}
|
|
307
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
308
|
+
0 && (module.exports = {
|
|
309
|
+
createLambdaHandler
|
|
310
|
+
});
|
|
311
|
+
//# sourceMappingURL=handler.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/handler.ts","../src/mongoJobStore.ts"],"sourcesContent":["/**\n * Generic Lambda handler wrapper for worker agents.\n * Handles SQS events, executes user handlers, and sends webhook callbacks.\n * Job store: MongoDB only. Never uses HTTP/origin URL for job updates.\n */\n\nimport type { SQSEvent, SQSRecord, Context as LambdaContext } from 'aws-lambda';\nimport type { ZodType } from 'zod';\nimport {\n createMongoJobStore,\n upsertJob,\n isMongoJobStoreConfigured,\n} from './mongoJobStore';\n\nexport interface JobStoreUpdate {\n status?: 'queued' | 'running' | 'completed' | 'failed';\n metadata?: Record<string, any>;\n progress?: number;\n progressMessage?: string;\n output?: any;\n error?: {\n message: string;\n stack?: string;\n name?: string;\n };\n}\n\nexport interface JobStore {\n /**\n * Update job in job store.\n * @param update - Update object with status, metadata, progress, output, or error\n */\n update(update: JobStoreUpdate): Promise<void>;\n /**\n * Get current job record from job store.\n * @returns Job record or null if not found\n */\n get(): Promise<{\n jobId: string;\n workerId: string;\n status: 'queued' | 'running' | 'completed' | 'failed';\n input: any;\n output?: any;\n error?: { message: string; stack?: string };\n metadata?: Record<string, any>;\n createdAt: string;\n updatedAt: string;\n completedAt?: string;\n } | null>;\n}\n\nexport interface WorkerHandlerParams<INPUT, OUTPUT> {\n input: INPUT;\n ctx: {\n jobId: string;\n workerId: string;\n requestId?: string;\n /**\n * Job store interface for updating and retrieving job state.\n * Uses MongoDB directly when configured; never HTTP/origin URL.\n */\n jobStore?: JobStore;\n [key: string]: any;\n };\n}\n\nexport type WorkerHandler<INPUT, OUTPUT> = (\n params: WorkerHandlerParams<INPUT, OUTPUT>\n) => Promise<OUTPUT>;\n\nexport interface SQSMessageBody {\n workerId: string;\n jobId: string;\n input: any;\n context: Record<string, any>;\n webhookUrl?: string;\n /** @deprecated Never use. Job updates use MongoDB only. */\n jobStoreUrl?: string;\n metadata?: Record<string, any>;\n timestamp: string;\n}\n\nexport interface WebhookPayload {\n jobId: string;\n workerId: string;\n status: 'success' | 'error';\n output?: any;\n error?: {\n message: string;\n stack?: string;\n name?: string;\n };\n metadata?: Record<string, any>;\n}\n\n/**\n * Sends a webhook callback to the specified URL.\n */\nasync function sendWebhook(\n webhookUrl: string,\n payload: WebhookPayload\n): Promise<void> {\n try {\n const response = await fetch(webhookUrl, {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n 'User-Agent': 'ai-router-worker/1.0',\n },\n body: JSON.stringify(payload),\n });\n\n if (!response.ok) {\n const errorText = await response.text().catch(() => '');\n console.error('[Worker] Webhook callback failed:', {\n url: webhookUrl,\n status: response.status,\n statusText: response.statusText,\n errorText,\n });\n // Don't throw - webhook failures shouldn't fail the Lambda\n } else {\n console.log('[Worker] Webhook callback successful:', {\n url: webhookUrl,\n status: response.status,\n });\n }\n } catch (error: any) {\n console.error('[Worker] Webhook callback error:', {\n url: webhookUrl,\n error: error?.message || String(error),\n stack: error?.stack,\n });\n // Don't throw - webhook failures shouldn't fail the Lambda\n }\n}\n\n/**\n * Creates a Lambda handler function that processes SQS events for workers.\n * Job store: MongoDB only. Never uses HTTP/origin URL for job updates.\n *\n * @param handler - The user's worker handler function\n * @param outputSchema - Optional Zod schema for output validation\n * @returns A Lambda handler function\n */\nexport function createLambdaHandler<INPUT, OUTPUT>(\n handler: WorkerHandler<INPUT, OUTPUT>,\n outputSchema?: ZodType<OUTPUT>\n): (event: SQSEvent, context: LambdaContext) => Promise<void> {\n return async (event: SQSEvent, lambdaContext: LambdaContext) => {\n const promises = event.Records.map(async (record: SQSRecord) => {\n let messageBody: SQSMessageBody | null = null;\n try {\n messageBody = JSON.parse(record.body) as SQSMessageBody;\n\n const { workerId, jobId, input, context, webhookUrl, metadata = {} } =\n messageBody;\n\n let jobStore: JobStore | undefined;\n if (isMongoJobStoreConfigured()) {\n await upsertJob(jobId, workerId, input, metadata);\n jobStore = createMongoJobStore(workerId, jobId, input, metadata);\n }\n\n const handlerContext = {\n jobId,\n workerId,\n requestId: context.requestId || lambdaContext.awsRequestId,\n ...(jobStore ? { jobStore } : {}),\n ...context,\n };\n\n if (jobStore) {\n try {\n await jobStore.update({ status: 'running' });\n console.log('[Worker] Job status updated to running:', {\n jobId,\n workerId,\n });\n } catch (error: any) {\n console.warn('[Worker] Failed to update status to running:', {\n jobId,\n workerId,\n error: error?.message || String(error),\n });\n }\n }\n\n let output: OUTPUT;\n try {\n output = await handler({\n input: input as INPUT,\n ctx: handlerContext,\n });\n\n if (outputSchema) {\n output = outputSchema.parse(output);\n }\n } catch (error: any) {\n const errorPayload: WebhookPayload = {\n jobId,\n workerId,\n status: 'error',\n error: {\n message: error.message || 'Unknown error',\n stack: error.stack,\n name: error.name || 'Error',\n },\n metadata,\n };\n\n if (jobStore) {\n try {\n await jobStore.update({\n status: 'failed',\n error: errorPayload.error,\n });\n console.log('[Worker] Job status updated to failed:', {\n jobId,\n workerId,\n });\n } catch (updateError: any) {\n console.warn('[Worker] Failed to update job store on error:', {\n jobId,\n workerId,\n error: updateError?.message || String(updateError),\n });\n }\n }\n\n if (webhookUrl) {\n await sendWebhook(webhookUrl, errorPayload);\n }\n throw error;\n }\n\n if (jobStore) {\n try {\n await jobStore.update({\n status: 'completed',\n output,\n });\n console.log('[Worker] Job status updated to completed:', {\n jobId,\n workerId,\n });\n } catch (updateError: any) {\n console.warn('[Worker] Failed to update job store on success:', {\n jobId,\n workerId,\n error: updateError?.message || String(updateError),\n });\n }\n }\n\n console.log('[Worker] Job completed:', {\n jobId,\n workerId,\n output,\n });\n\n const successPayload: WebhookPayload = {\n jobId,\n workerId,\n status: 'success',\n output,\n metadata,\n };\n\n if (webhookUrl) {\n await sendWebhook(webhookUrl, successPayload);\n }\n } catch (error: any) {\n console.error('[Worker] Error processing SQS record:', {\n jobId: messageBody?.jobId ?? '(parse failed)',\n workerId: messageBody?.workerId ?? '(parse failed)',\n error: error?.message || String(error),\n stack: error?.stack,\n });\n throw error;\n }\n });\n\n await Promise.all(promises);\n };\n}\n","/**\n * MongoDB-backed job store for Lambda workers.\n * Updates jobs directly in MongoDB; never uses HTTP/origin URL.\n *\n * Env: MONGODB_WORKER_URI (or MONGODB_URI), MONGODB_WORKER_DB (or MONGODB_DB),\n * MONGODB_WORKER_JOBS_COLLECTION (default: worker_jobs).\n */\n\nimport { MongoClient, type Collection } from 'mongodb';\nimport type { JobStore, JobStoreUpdate } from './handler';\n\nconst uri = process.env.MONGODB_WORKER_URI || process.env.DATABASE_MONGODB_URI || process.env.MONGODB_URI;\nconst dbName =\n process.env.MONGODB_WORKER_DB ||\n process.env.MONGODB_DB ||\n 'worker';\nconst collectionName =\n process.env.MONGODB_WORKER_JOBS_COLLECTION || 'worker_jobs';\n\ntype Doc = {\n _id: string;\n jobId: string;\n workerId: string;\n status: 'queued' | 'running' | 'completed' | 'failed';\n input: any;\n output?: any;\n error?: { message: string; stack?: string; name?: string };\n metadata?: Record<string, any>;\n createdAt: string;\n updatedAt: string;\n completedAt?: string;\n};\n\nlet clientPromise: Promise<MongoClient> | null = null;\n\nfunction getClient(): Promise<MongoClient> {\n if (!uri) {\n throw new Error(\n 'MongoDB URI required for job store. Set DATABASE_MONGODB_URI or MONGODB_URI.'\n );\n }\n if (!clientPromise) {\n clientPromise = new MongoClient(uri, {\n maxPoolSize: 10,\n minPoolSize: 0,\n serverSelectionTimeoutMS: 10_000,\n }).connect();\n }\n return clientPromise;\n}\n\nasync function getCollection(): Promise<Collection<Doc>> {\n const client = await getClient();\n return client.db(dbName).collection<Doc>(collectionName);\n}\n\n/**\n * Create a JobStore that reads/writes directly to MongoDB.\n * Caller must ensure the job exists (upsert on first use).\n */\nexport function createMongoJobStore(\n workerId: string,\n jobId: string,\n input: any,\n metadata: Record<string, any>\n): JobStore {\n return {\n update: async (update: JobStoreUpdate): Promise<void> => {\n try {\n const coll = await getCollection();\n const now = new Date().toISOString();\n const existing = await coll.findOne({ _id: jobId });\n\n let metadataUpdate: Record<string, any> = { ...(existing?.metadata ?? {}) };\n if (update.metadata) {\n Object.assign(metadataUpdate, update.metadata);\n }\n if (update.progress !== undefined || update.progressMessage !== undefined) {\n metadataUpdate.progress = update.progress;\n metadataUpdate.progressMessage = update.progressMessage;\n }\n\n const set: Partial<Doc> = {\n updatedAt: now,\n metadata: metadataUpdate,\n };\n if (update.status !== undefined) {\n set.status = update.status;\n if (['completed', 'failed'].includes(update.status) && !existing?.completedAt) {\n set.completedAt = now;\n }\n }\n if (update.output !== undefined) set.output = update.output;\n if (update.error !== undefined) set.error = update.error;\n\n if (existing) {\n await coll.updateOne({ _id: jobId }, { $set: set });\n } else {\n const doc: Doc = {\n _id: jobId,\n jobId,\n workerId,\n status: (update.status as Doc['status']) ?? 'queued',\n input: input ?? {},\n output: update.output,\n error: update.error,\n metadata: metadataUpdate,\n createdAt: now,\n updatedAt: now,\n completedAt: set.completedAt,\n };\n if (doc.status === 'completed' || doc.status === 'failed') {\n doc.completedAt = doc.completedAt ?? now;\n }\n await coll.updateOne({ _id: jobId }, { $set: doc }, { upsert: true });\n }\n } catch (e: any) {\n console.error('[Worker] MongoDB job store update failed:', {\n jobId,\n workerId,\n error: e?.message ?? String(e),\n });\n }\n },\n get: async () => {\n try {\n const coll = await getCollection();\n const doc = await coll.findOne({ _id: jobId });\n if (!doc) return null;\n const { _id, ...r } = doc;\n return r as any;\n } catch (e: any) {\n console.error('[Worker] MongoDB job store get failed:', {\n jobId,\n workerId,\n error: e?.message ?? String(e),\n });\n return null;\n }\n },\n };\n}\n\n/**\n * Upsert initial job record in MongoDB (queued).\n * Call this when the Lambda starts processing a message.\n */\nexport async function upsertJob(\n jobId: string,\n workerId: string,\n input: any,\n metadata: Record<string, any>\n): Promise<void> {\n const coll = await getCollection();\n const now = new Date().toISOString();\n await coll.updateOne(\n { _id: jobId },\n {\n $set: {\n _id: jobId,\n jobId,\n workerId,\n status: 'queued',\n input: input ?? {},\n metadata: metadata ?? {},\n createdAt: now,\n updatedAt: now,\n },\n },\n { upsert: true }\n );\n}\n\nexport function isMongoJobStoreConfigured(): boolean {\n return Boolean(uri?.trim());\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACQA,qBAA6C;AAG7C,IAAM,MAAM,QAAQ,IAAI,sBAAsB,QAAQ,IAAI,wBAAwB,QAAQ,IAAI;AAC9F,IAAM,SACJ,QAAQ,IAAI,qBACZ,QAAQ,IAAI,cACZ;AACF,IAAM,iBACJ,QAAQ,IAAI,kCAAkC;AAgBhD,IAAI,gBAA6C;AAEjD,SAAS,YAAkC;AACzC,MAAI,CAAC,KAAK;AACR,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AACA,MAAI,CAAC,eAAe;AAClB,oBAAgB,IAAI,2BAAY,KAAK;AAAA,MACnC,aAAa;AAAA,MACb,aAAa;AAAA,MACb,0BAA0B;AAAA,IAC5B,CAAC,EAAE,QAAQ;AAAA,EACb;AACA,SAAO;AACT;AAEA,eAAe,gBAA0C;AACvD,QAAM,SAAS,MAAM,UAAU;AAC/B,SAAO,OAAO,GAAG,MAAM,EAAE,WAAgB,cAAc;AACzD;AAMO,SAAS,oBACd,UACA,OACA,OACA,UACU;AACV,SAAO;AAAA,IACL,QAAQ,OAAO,WAA0C;AACvD,UAAI;AACF,cAAM,OAAO,MAAM,cAAc;AACjC,cAAM,OAAM,oBAAI,KAAK,GAAE,YAAY;AACnC,cAAM,WAAW,MAAM,KAAK,QAAQ,EAAE,KAAK,MAAM,CAAC;AAElD,YAAI,iBAAsC,EAAE,GAAI,UAAU,YAAY,CAAC,EAAG;AAC1E,YAAI,OAAO,UAAU;AACnB,iBAAO,OAAO,gBAAgB,OAAO,QAAQ;AAAA,QAC/C;AACA,YAAI,OAAO,aAAa,UAAa,OAAO,oBAAoB,QAAW;AACzE,yBAAe,WAAW,OAAO;AACjC,yBAAe,kBAAkB,OAAO;AAAA,QAC1C;AAEA,cAAM,MAAoB;AAAA,UACxB,WAAW;AAAA,UACX,UAAU;AAAA,QACZ;AACA,YAAI,OAAO,WAAW,QAAW;AAC/B,cAAI,SAAS,OAAO;AACpB,cAAI,CAAC,aAAa,QAAQ,EAAE,SAAS,OAAO,MAAM,KAAK,CAAC,UAAU,aAAa;AAC7E,gBAAI,cAAc;AAAA,UACpB;AAAA,QACF;AACA,YAAI,OAAO,WAAW,OAAW,KAAI,SAAS,OAAO;AACrD,YAAI,OAAO,UAAU,OAAW,KAAI,QAAQ,OAAO;AAEnD,YAAI,UAAU;AACZ,gBAAM,KAAK,UAAU,EAAE,KAAK,MAAM,GAAG,EAAE,MAAM,IAAI,CAAC;AAAA,QACpD,OAAO;AACL,gBAAM,MAAW;AAAA,YACf,KAAK;AAAA,YACL;AAAA,YACA;AAAA,YACA,QAAS,OAAO,UAA4B;AAAA,YAC5C,OAAO,SAAS,CAAC;AAAA,YACjB,QAAQ,OAAO;AAAA,YACf,OAAO,OAAO;AAAA,YACd,UAAU;AAAA,YACV,WAAW;AAAA,YACX,WAAW;AAAA,YACX,aAAa,IAAI;AAAA,UACnB;AACA,cAAI,IAAI,WAAW,eAAe,IAAI,WAAW,UAAU;AACzD,gBAAI,cAAc,IAAI,eAAe;AAAA,UACvC;AACA,gBAAM,KAAK,UAAU,EAAE,KAAK,MAAM,GAAG,EAAE,MAAM,IAAI,GAAG,EAAE,QAAQ,KAAK,CAAC;AAAA,QACtE;AAAA,MACF,SAAS,GAAQ;AACf,gBAAQ,MAAM,6CAA6C;AAAA,UACzD;AAAA,UACA;AAAA,UACA,OAAO,GAAG,WAAW,OAAO,CAAC;AAAA,QAC/B,CAAC;AAAA,MACH;AAAA,IACF;AAAA,IACA,KAAK,YAAY;AACf,UAAI;AACF,cAAM,OAAO,MAAM,cAAc;AACjC,cAAM,MAAM,MAAM,KAAK,QAAQ,EAAE,KAAK,MAAM,CAAC;AAC7C,YAAI,CAAC,IAAK,QAAO;AACjB,cAAM,EAAE,KAAK,GAAG,EAAE,IAAI;AACtB,eAAO;AAAA,MACT,SAAS,GAAQ;AACf,gBAAQ,MAAM,0CAA0C;AAAA,UACtD;AAAA,UACA;AAAA,UACA,OAAO,GAAG,WAAW,OAAO,CAAC;AAAA,QAC/B,CAAC;AACD,eAAO;AAAA,MACT;AAAA,IACF;AAAA,EACF;AACF;AAMA,eAAsB,UACpB,OACA,UACA,OACA,UACe;AACf,QAAM,OAAO,MAAM,cAAc;AACjC,QAAM,OAAM,oBAAI,KAAK,GAAE,YAAY;AACnC,QAAM,KAAK;AAAA,IACT,EAAE,KAAK,MAAM;AAAA,IACb;AAAA,MACE,MAAM;AAAA,QACJ,KAAK;AAAA,QACL;AAAA,QACA;AAAA,QACA,QAAQ;AAAA,QACR,OAAO,SAAS,CAAC;AAAA,QACjB,UAAU,YAAY,CAAC;AAAA,QACvB,WAAW;AAAA,QACX,WAAW;AAAA,MACb;AAAA,IACF;AAAA,IACA,EAAE,QAAQ,KAAK;AAAA,EACjB;AACF;AAEO,SAAS,4BAAqC;AACnD,SAAO,QAAQ,KAAK,KAAK,CAAC;AAC5B;;;AD7EA,eAAe,YACb,YACA,SACe;AACf,MAAI;AACF,UAAM,WAAW,MAAM,MAAM,YAAY;AAAA,MACvC,QAAQ;AAAA,MACR,SAAS;AAAA,QACP,gBAAgB;AAAA,QAChB,cAAc;AAAA,MAChB;AAAA,MACA,MAAM,KAAK,UAAU,OAAO;AAAA,IAC9B,CAAC;AAED,QAAI,CAAC,SAAS,IAAI;AAChB,YAAM,YAAY,MAAM,SAAS,KAAK,EAAE,MAAM,MAAM,EAAE;AACtD,cAAQ,MAAM,qCAAqC;AAAA,QACjD,KAAK;AAAA,QACL,QAAQ,SAAS;AAAA,QACjB,YAAY,SAAS;AAAA,QACrB;AAAA,MACF,CAAC;AAAA,IAEH,OAAO;AACL,cAAQ,IAAI,yCAAyC;AAAA,QACnD,KAAK;AAAA,QACL,QAAQ,SAAS;AAAA,MACnB,CAAC;AAAA,IACH;AAAA,EACF,SAAS,OAAY;AACnB,YAAQ,MAAM,oCAAoC;AAAA,MAChD,KAAK;AAAA,MACL,OAAO,OAAO,WAAW,OAAO,KAAK;AAAA,MACrC,OAAO,OAAO;AAAA,IAChB,CAAC;AAAA,EAEH;AACF;AAUO,SAAS,oBACd,SACA,cAC4D;AAC5D,SAAO,OAAO,OAAiB,kBAAiC;AAC9D,UAAM,WAAW,MAAM,QAAQ,IAAI,OAAO,WAAsB;AAC9D,UAAI,cAAqC;AACzC,UAAI;AACF,sBAAc,KAAK,MAAM,OAAO,IAAI;AAEpC,cAAM,EAAE,UAAU,OAAO,OAAO,SAAS,YAAY,WAAW,CAAC,EAAE,IACjE;AAEF,YAAI;AACJ,YAAI,0BAA0B,GAAG;AAC/B,gBAAM,UAAU,OAAO,UAAU,OAAO,QAAQ;AAChD,qBAAW,oBAAoB,UAAU,OAAO,OAAO,QAAQ;AAAA,QACjE;AAEA,cAAM,iBAAiB;AAAA,UACrB;AAAA,UACA;AAAA,UACA,WAAW,QAAQ,aAAa,cAAc;AAAA,UAC9C,GAAI,WAAW,EAAE,SAAS,IAAI,CAAC;AAAA,UAC/B,GAAG;AAAA,QACL;AAEA,YAAI,UAAU;AACZ,cAAI;AACF,kBAAM,SAAS,OAAO,EAAE,QAAQ,UAAU,CAAC;AAC3C,oBAAQ,IAAI,2CAA2C;AAAA,cACrD;AAAA,cACA;AAAA,YACF,CAAC;AAAA,UACH,SAAS,OAAY;AACnB,oBAAQ,KAAK,gDAAgD;AAAA,cAC3D;AAAA,cACA;AAAA,cACA,OAAO,OAAO,WAAW,OAAO,KAAK;AAAA,YACvC,CAAC;AAAA,UACH;AAAA,QACF;AAEA,YAAI;AACJ,YAAI;AACF,mBAAS,MAAM,QAAQ;AAAA,YACrB;AAAA,YACA,KAAK;AAAA,UACP,CAAC;AAED,cAAI,cAAc;AAChB,qBAAS,aAAa,MAAM,MAAM;AAAA,UACpC;AAAA,QACF,SAAS,OAAY;AACnB,gBAAM,eAA+B;AAAA,YACnC;AAAA,YACA;AAAA,YACA,QAAQ;AAAA,YACR,OAAO;AAAA,cACL,SAAS,MAAM,WAAW;AAAA,cAC1B,OAAO,MAAM;AAAA,cACb,MAAM,MAAM,QAAQ;AAAA,YACtB;AAAA,YACA;AAAA,UACF;AAEA,cAAI,UAAU;AACZ,gBAAI;AACF,oBAAM,SAAS,OAAO;AAAA,gBACpB,QAAQ;AAAA,gBACR,OAAO,aAAa;AAAA,cACtB,CAAC;AACD,sBAAQ,IAAI,0CAA0C;AAAA,gBACpD;AAAA,gBACA;AAAA,cACF,CAAC;AAAA,YACH,SAAS,aAAkB;AACzB,sBAAQ,KAAK,iDAAiD;AAAA,gBAC5D;AAAA,gBACA;AAAA,gBACA,OAAO,aAAa,WAAW,OAAO,WAAW;AAAA,cACnD,CAAC;AAAA,YACH;AAAA,UACF;AAEA,cAAI,YAAY;AACd,kBAAM,YAAY,YAAY,YAAY;AAAA,UAC5C;AACA,gBAAM;AAAA,QACR;AAEA,YAAI,UAAU;AACZ,cAAI;AACF,kBAAM,SAAS,OAAO;AAAA,cACpB,QAAQ;AAAA,cACR;AAAA,YACF,CAAC;AACD,oBAAQ,IAAI,6CAA6C;AAAA,cACvD;AAAA,cACA;AAAA,YACF,CAAC;AAAA,UACH,SAAS,aAAkB;AACzB,oBAAQ,KAAK,mDAAmD;AAAA,cAC9D;AAAA,cACA;AAAA,cACA,OAAO,aAAa,WAAW,OAAO,WAAW;AAAA,YACnD,CAAC;AAAA,UACH;AAAA,QACF;AAEA,gBAAQ,IAAI,2BAA2B;AAAA,UACrC;AAAA,UACA;AAAA,UACA;AAAA,QACF,CAAC;AAED,cAAM,iBAAiC;AAAA,UACrC;AAAA,UACA;AAAA,UACA,QAAQ;AAAA,UACR;AAAA,UACA;AAAA,QACF;AAEA,YAAI,YAAY;AACd,gBAAM,YAAY,YAAY,cAAc;AAAA,QAC9C;AAAA,MACF,SAAS,OAAY;AACnB,gBAAQ,MAAM,yCAAyC;AAAA,UACrD,OAAO,aAAa,SAAS;AAAA,UAC7B,UAAU,aAAa,YAAY;AAAA,UACnC,OAAO,OAAO,WAAW,OAAO,KAAK;AAAA,UACrC,OAAO,OAAO;AAAA,QAChB,CAAC;AACD,cAAM;AAAA,MACR;AAAA,IACF,CAAC;AAED,UAAM,QAAQ,IAAI,QAAQ;AAAA,EAC5B;AACF;","names":[]}
|
package/dist/handler.mjs
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":[],"sourcesContent":[],"mappings":"","names":[]}
|