@microfox/ai-worker 1.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +16 -0
- package/README.md +185 -0
- package/dist/chunk-BJTO5JO5.mjs +11 -0
- package/dist/chunk-BJTO5JO5.mjs.map +1 -0
- package/dist/chunk-FQCZSXDI.mjs +83 -0
- package/dist/chunk-FQCZSXDI.mjs.map +1 -0
- package/dist/chunk-WVR4JVWK.mjs +285 -0
- package/dist/chunk-WVR4JVWK.mjs.map +1 -0
- package/dist/chunk-ZYYWZ3PR.mjs +50 -0
- package/dist/chunk-ZYYWZ3PR.mjs.map +1 -0
- package/dist/client.d.mts +64 -0
- package/dist/client.d.ts +64 -0
- package/dist/client.js +108 -0
- package/dist/client.js.map +1 -0
- package/dist/client.mjs +10 -0
- package/dist/client.mjs.map +1 -0
- package/dist/config.d.mts +38 -0
- package/dist/config.d.ts +38 -0
- package/dist/config.js +76 -0
- package/dist/config.js.map +1 -0
- package/dist/config.mjs +12 -0
- package/dist/config.mjs.map +1 -0
- package/dist/handler.d.mts +96 -0
- package/dist/handler.d.ts +96 -0
- package/dist/handler.js +311 -0
- package/dist/handler.js.map +1 -0
- package/dist/handler.mjs +8 -0
- package/dist/handler.mjs.map +1 -0
- package/dist/index.d.mts +236 -0
- package/dist/index.d.ts +236 -0
- package/dist/index.js +734 -0
- package/dist/index.js.map +1 -0
- package/dist/index.mjs +313 -0
- package/dist/index.mjs.map +1 -0
- package/package.json +73 -0
package/dist/index.js
ADDED
|
@@ -0,0 +1,734 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
6
|
+
var __export = (target, all) => {
|
|
7
|
+
for (var name in all)
|
|
8
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
9
|
+
};
|
|
10
|
+
var __copyProps = (to, from, except, desc) => {
|
|
11
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
12
|
+
for (let key of __getOwnPropNames(from))
|
|
13
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
14
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
15
|
+
}
|
|
16
|
+
return to;
|
|
17
|
+
};
|
|
18
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
19
|
+
|
|
20
|
+
// src/index.ts
|
|
21
|
+
var index_exports = {};
|
|
22
|
+
__export(index_exports, {
|
|
23
|
+
clearWorkersConfigCache: () => clearWorkersConfigCache,
|
|
24
|
+
createLambdaEntrypoint: () => createLambdaEntrypoint,
|
|
25
|
+
createLambdaHandler: () => createLambdaHandler,
|
|
26
|
+
createWorker: () => createWorker,
|
|
27
|
+
dispatch: () => dispatch,
|
|
28
|
+
dispatchLocal: () => dispatchLocal,
|
|
29
|
+
getWorkersConfig: () => getWorkersConfig,
|
|
30
|
+
resolveQueueUrl: () => resolveQueueUrl
|
|
31
|
+
});
|
|
32
|
+
module.exports = __toCommonJS(index_exports);
|
|
33
|
+
|
|
34
|
+
// src/client.ts
|
|
35
|
+
function getWorkersTriggerUrl() {
|
|
36
|
+
const raw = process.env.WORKER_BASE_URL || process.env.NEXT_PUBLIC_WORKER_BASE_URL || process.env.WORKERS_TRIGGER_API_URL || process.env.NEXT_PUBLIC_WORKERS_TRIGGER_API_URL || process.env.WORKERS_CONFIG_API_URL || process.env.NEXT_PUBLIC_WORKERS_CONFIG_API_URL;
|
|
37
|
+
if (!raw) {
|
|
38
|
+
throw new Error(
|
|
39
|
+
"WORKER_BASE_URL (preferred) or NEXT_PUBLIC_WORKER_BASE_URL is required for background workers"
|
|
40
|
+
);
|
|
41
|
+
}
|
|
42
|
+
const url = new URL(raw);
|
|
43
|
+
url.search = "";
|
|
44
|
+
url.hash = "";
|
|
45
|
+
const path = url.pathname || "";
|
|
46
|
+
url.pathname = path.replace(/\/?workers\/(trigger|config)\/?$/, "");
|
|
47
|
+
const basePath = url.pathname.replace(/\/+$/, "");
|
|
48
|
+
url.pathname = `${basePath}/workers/trigger`.replace(/\/+$/, "");
|
|
49
|
+
return url.toString();
|
|
50
|
+
}
|
|
51
|
+
function serializeContext(ctx) {
|
|
52
|
+
const serialized = {};
|
|
53
|
+
if (ctx.requestId) {
|
|
54
|
+
serialized.requestId = ctx.requestId;
|
|
55
|
+
}
|
|
56
|
+
if (ctx.metadata && typeof ctx.metadata === "object") {
|
|
57
|
+
Object.assign(serialized, ctx.metadata);
|
|
58
|
+
}
|
|
59
|
+
if (ctx._serializeContext && typeof ctx._serializeContext === "function") {
|
|
60
|
+
const custom = ctx._serializeContext();
|
|
61
|
+
Object.assign(serialized, custom);
|
|
62
|
+
}
|
|
63
|
+
return serialized;
|
|
64
|
+
}
|
|
65
|
+
async function dispatch(workerId, input, inputSchema, options, ctx) {
|
|
66
|
+
const validatedInput = inputSchema.parse(input);
|
|
67
|
+
const jobId = options.jobId || `job-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
|
|
68
|
+
const triggerUrl = getWorkersTriggerUrl();
|
|
69
|
+
const serializedContext = ctx ? serializeContext(ctx) : {};
|
|
70
|
+
const messageBody = {
|
|
71
|
+
workerId,
|
|
72
|
+
jobId,
|
|
73
|
+
input: validatedInput,
|
|
74
|
+
context: serializedContext,
|
|
75
|
+
webhookUrl: options.webhookUrl,
|
|
76
|
+
metadata: options.metadata || {},
|
|
77
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString()
|
|
78
|
+
};
|
|
79
|
+
const headers = {
|
|
80
|
+
"Content-Type": "application/json"
|
|
81
|
+
};
|
|
82
|
+
const triggerKey = process.env.WORKERS_TRIGGER_API_KEY;
|
|
83
|
+
if (triggerKey) {
|
|
84
|
+
headers["x-workers-trigger-key"] = triggerKey;
|
|
85
|
+
}
|
|
86
|
+
const response = await fetch(triggerUrl, {
|
|
87
|
+
method: "POST",
|
|
88
|
+
headers,
|
|
89
|
+
body: JSON.stringify({
|
|
90
|
+
workerId,
|
|
91
|
+
body: messageBody
|
|
92
|
+
})
|
|
93
|
+
});
|
|
94
|
+
if (!response.ok) {
|
|
95
|
+
const text = await response.text().catch(() => "");
|
|
96
|
+
throw new Error(
|
|
97
|
+
`Failed to trigger worker "${workerId}": ${response.status} ${response.statusText}${text ? ` - ${text}` : ""}`
|
|
98
|
+
);
|
|
99
|
+
}
|
|
100
|
+
const data = await response.json().catch(() => ({}));
|
|
101
|
+
const messageId = data?.messageId ? String(data.messageId) : `trigger-${jobId}`;
|
|
102
|
+
return {
|
|
103
|
+
messageId,
|
|
104
|
+
status: "queued",
|
|
105
|
+
jobId
|
|
106
|
+
};
|
|
107
|
+
}
|
|
108
|
+
async function dispatchLocal(handler, input, ctx) {
|
|
109
|
+
return handler({ input, ctx: ctx || {} });
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
// src/mongoJobStore.ts
|
|
113
|
+
var import_mongodb = require("mongodb");
|
|
114
|
+
var uri = process.env.MONGODB_WORKER_URI || process.env.DATABASE_MONGODB_URI || process.env.MONGODB_URI;
|
|
115
|
+
var dbName = process.env.MONGODB_WORKER_DB || process.env.MONGODB_DB || "worker";
|
|
116
|
+
var collectionName = process.env.MONGODB_WORKER_JOBS_COLLECTION || "worker_jobs";
|
|
117
|
+
var clientPromise = null;
|
|
118
|
+
function getClient() {
|
|
119
|
+
if (!uri) {
|
|
120
|
+
throw new Error(
|
|
121
|
+
"MongoDB URI required for job store. Set DATABASE_MONGODB_URI or MONGODB_URI."
|
|
122
|
+
);
|
|
123
|
+
}
|
|
124
|
+
if (!clientPromise) {
|
|
125
|
+
clientPromise = new import_mongodb.MongoClient(uri, {
|
|
126
|
+
maxPoolSize: 10,
|
|
127
|
+
minPoolSize: 0,
|
|
128
|
+
serverSelectionTimeoutMS: 1e4
|
|
129
|
+
}).connect();
|
|
130
|
+
}
|
|
131
|
+
return clientPromise;
|
|
132
|
+
}
|
|
133
|
+
async function getCollection() {
|
|
134
|
+
const client = await getClient();
|
|
135
|
+
return client.db(dbName).collection(collectionName);
|
|
136
|
+
}
|
|
137
|
+
function createMongoJobStore(workerId, jobId, input, metadata) {
|
|
138
|
+
return {
|
|
139
|
+
update: async (update) => {
|
|
140
|
+
try {
|
|
141
|
+
const coll = await getCollection();
|
|
142
|
+
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
143
|
+
const existing = await coll.findOne({ _id: jobId });
|
|
144
|
+
let metadataUpdate = { ...existing?.metadata ?? {} };
|
|
145
|
+
if (update.metadata) {
|
|
146
|
+
Object.assign(metadataUpdate, update.metadata);
|
|
147
|
+
}
|
|
148
|
+
if (update.progress !== void 0 || update.progressMessage !== void 0) {
|
|
149
|
+
metadataUpdate.progress = update.progress;
|
|
150
|
+
metadataUpdate.progressMessage = update.progressMessage;
|
|
151
|
+
}
|
|
152
|
+
const set = {
|
|
153
|
+
updatedAt: now,
|
|
154
|
+
metadata: metadataUpdate
|
|
155
|
+
};
|
|
156
|
+
if (update.status !== void 0) {
|
|
157
|
+
set.status = update.status;
|
|
158
|
+
if (["completed", "failed"].includes(update.status) && !existing?.completedAt) {
|
|
159
|
+
set.completedAt = now;
|
|
160
|
+
}
|
|
161
|
+
}
|
|
162
|
+
if (update.output !== void 0) set.output = update.output;
|
|
163
|
+
if (update.error !== void 0) set.error = update.error;
|
|
164
|
+
if (existing) {
|
|
165
|
+
await coll.updateOne({ _id: jobId }, { $set: set });
|
|
166
|
+
} else {
|
|
167
|
+
const doc = {
|
|
168
|
+
_id: jobId,
|
|
169
|
+
jobId,
|
|
170
|
+
workerId,
|
|
171
|
+
status: update.status ?? "queued",
|
|
172
|
+
input: input ?? {},
|
|
173
|
+
output: update.output,
|
|
174
|
+
error: update.error,
|
|
175
|
+
metadata: metadataUpdate,
|
|
176
|
+
createdAt: now,
|
|
177
|
+
updatedAt: now,
|
|
178
|
+
completedAt: set.completedAt
|
|
179
|
+
};
|
|
180
|
+
if (doc.status === "completed" || doc.status === "failed") {
|
|
181
|
+
doc.completedAt = doc.completedAt ?? now;
|
|
182
|
+
}
|
|
183
|
+
await coll.updateOne({ _id: jobId }, { $set: doc }, { upsert: true });
|
|
184
|
+
}
|
|
185
|
+
} catch (e) {
|
|
186
|
+
console.error("[Worker] MongoDB job store update failed:", {
|
|
187
|
+
jobId,
|
|
188
|
+
workerId,
|
|
189
|
+
error: e?.message ?? String(e)
|
|
190
|
+
});
|
|
191
|
+
}
|
|
192
|
+
},
|
|
193
|
+
get: async () => {
|
|
194
|
+
try {
|
|
195
|
+
const coll = await getCollection();
|
|
196
|
+
const doc = await coll.findOne({ _id: jobId });
|
|
197
|
+
if (!doc) return null;
|
|
198
|
+
const { _id, ...r } = doc;
|
|
199
|
+
return r;
|
|
200
|
+
} catch (e) {
|
|
201
|
+
console.error("[Worker] MongoDB job store get failed:", {
|
|
202
|
+
jobId,
|
|
203
|
+
workerId,
|
|
204
|
+
error: e?.message ?? String(e)
|
|
205
|
+
});
|
|
206
|
+
return null;
|
|
207
|
+
}
|
|
208
|
+
}
|
|
209
|
+
};
|
|
210
|
+
}
|
|
211
|
+
async function upsertJob(jobId, workerId, input, metadata) {
|
|
212
|
+
const coll = await getCollection();
|
|
213
|
+
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
214
|
+
await coll.updateOne(
|
|
215
|
+
{ _id: jobId },
|
|
216
|
+
{
|
|
217
|
+
$set: {
|
|
218
|
+
_id: jobId,
|
|
219
|
+
jobId,
|
|
220
|
+
workerId,
|
|
221
|
+
status: "queued",
|
|
222
|
+
input: input ?? {},
|
|
223
|
+
metadata: metadata ?? {},
|
|
224
|
+
createdAt: now,
|
|
225
|
+
updatedAt: now
|
|
226
|
+
}
|
|
227
|
+
},
|
|
228
|
+
{ upsert: true }
|
|
229
|
+
);
|
|
230
|
+
}
|
|
231
|
+
function isMongoJobStoreConfigured() {
|
|
232
|
+
return Boolean(uri?.trim());
|
|
233
|
+
}
|
|
234
|
+
|
|
235
|
+
// src/handler.ts
|
|
236
|
+
async function sendWebhook(webhookUrl, payload) {
|
|
237
|
+
try {
|
|
238
|
+
const response = await fetch(webhookUrl, {
|
|
239
|
+
method: "POST",
|
|
240
|
+
headers: {
|
|
241
|
+
"Content-Type": "application/json",
|
|
242
|
+
"User-Agent": "ai-router-worker/1.0"
|
|
243
|
+
},
|
|
244
|
+
body: JSON.stringify(payload)
|
|
245
|
+
});
|
|
246
|
+
if (!response.ok) {
|
|
247
|
+
const errorText = await response.text().catch(() => "");
|
|
248
|
+
console.error("[Worker] Webhook callback failed:", {
|
|
249
|
+
url: webhookUrl,
|
|
250
|
+
status: response.status,
|
|
251
|
+
statusText: response.statusText,
|
|
252
|
+
errorText
|
|
253
|
+
});
|
|
254
|
+
} else {
|
|
255
|
+
console.log("[Worker] Webhook callback successful:", {
|
|
256
|
+
url: webhookUrl,
|
|
257
|
+
status: response.status
|
|
258
|
+
});
|
|
259
|
+
}
|
|
260
|
+
} catch (error) {
|
|
261
|
+
console.error("[Worker] Webhook callback error:", {
|
|
262
|
+
url: webhookUrl,
|
|
263
|
+
error: error?.message || String(error),
|
|
264
|
+
stack: error?.stack
|
|
265
|
+
});
|
|
266
|
+
}
|
|
267
|
+
}
|
|
268
|
+
function createLambdaHandler(handler, outputSchema) {
|
|
269
|
+
return async (event, lambdaContext) => {
|
|
270
|
+
const promises = event.Records.map(async (record) => {
|
|
271
|
+
let messageBody = null;
|
|
272
|
+
try {
|
|
273
|
+
messageBody = JSON.parse(record.body);
|
|
274
|
+
const { workerId, jobId, input, context, webhookUrl, metadata = {} } = messageBody;
|
|
275
|
+
let jobStore;
|
|
276
|
+
if (isMongoJobStoreConfigured()) {
|
|
277
|
+
await upsertJob(jobId, workerId, input, metadata);
|
|
278
|
+
jobStore = createMongoJobStore(workerId, jobId, input, metadata);
|
|
279
|
+
}
|
|
280
|
+
const handlerContext = {
|
|
281
|
+
jobId,
|
|
282
|
+
workerId,
|
|
283
|
+
requestId: context.requestId || lambdaContext.awsRequestId,
|
|
284
|
+
...jobStore ? { jobStore } : {},
|
|
285
|
+
...context
|
|
286
|
+
};
|
|
287
|
+
if (jobStore) {
|
|
288
|
+
try {
|
|
289
|
+
await jobStore.update({ status: "running" });
|
|
290
|
+
console.log("[Worker] Job status updated to running:", {
|
|
291
|
+
jobId,
|
|
292
|
+
workerId
|
|
293
|
+
});
|
|
294
|
+
} catch (error) {
|
|
295
|
+
console.warn("[Worker] Failed to update status to running:", {
|
|
296
|
+
jobId,
|
|
297
|
+
workerId,
|
|
298
|
+
error: error?.message || String(error)
|
|
299
|
+
});
|
|
300
|
+
}
|
|
301
|
+
}
|
|
302
|
+
let output;
|
|
303
|
+
try {
|
|
304
|
+
output = await handler({
|
|
305
|
+
input,
|
|
306
|
+
ctx: handlerContext
|
|
307
|
+
});
|
|
308
|
+
if (outputSchema) {
|
|
309
|
+
output = outputSchema.parse(output);
|
|
310
|
+
}
|
|
311
|
+
} catch (error) {
|
|
312
|
+
const errorPayload = {
|
|
313
|
+
jobId,
|
|
314
|
+
workerId,
|
|
315
|
+
status: "error",
|
|
316
|
+
error: {
|
|
317
|
+
message: error.message || "Unknown error",
|
|
318
|
+
stack: error.stack,
|
|
319
|
+
name: error.name || "Error"
|
|
320
|
+
},
|
|
321
|
+
metadata
|
|
322
|
+
};
|
|
323
|
+
if (jobStore) {
|
|
324
|
+
try {
|
|
325
|
+
await jobStore.update({
|
|
326
|
+
status: "failed",
|
|
327
|
+
error: errorPayload.error
|
|
328
|
+
});
|
|
329
|
+
console.log("[Worker] Job status updated to failed:", {
|
|
330
|
+
jobId,
|
|
331
|
+
workerId
|
|
332
|
+
});
|
|
333
|
+
} catch (updateError) {
|
|
334
|
+
console.warn("[Worker] Failed to update job store on error:", {
|
|
335
|
+
jobId,
|
|
336
|
+
workerId,
|
|
337
|
+
error: updateError?.message || String(updateError)
|
|
338
|
+
});
|
|
339
|
+
}
|
|
340
|
+
}
|
|
341
|
+
if (webhookUrl) {
|
|
342
|
+
await sendWebhook(webhookUrl, errorPayload);
|
|
343
|
+
}
|
|
344
|
+
throw error;
|
|
345
|
+
}
|
|
346
|
+
if (jobStore) {
|
|
347
|
+
try {
|
|
348
|
+
await jobStore.update({
|
|
349
|
+
status: "completed",
|
|
350
|
+
output
|
|
351
|
+
});
|
|
352
|
+
console.log("[Worker] Job status updated to completed:", {
|
|
353
|
+
jobId,
|
|
354
|
+
workerId
|
|
355
|
+
});
|
|
356
|
+
} catch (updateError) {
|
|
357
|
+
console.warn("[Worker] Failed to update job store on success:", {
|
|
358
|
+
jobId,
|
|
359
|
+
workerId,
|
|
360
|
+
error: updateError?.message || String(updateError)
|
|
361
|
+
});
|
|
362
|
+
}
|
|
363
|
+
}
|
|
364
|
+
console.log("[Worker] Job completed:", {
|
|
365
|
+
jobId,
|
|
366
|
+
workerId,
|
|
367
|
+
output
|
|
368
|
+
});
|
|
369
|
+
const successPayload = {
|
|
370
|
+
jobId,
|
|
371
|
+
workerId,
|
|
372
|
+
status: "success",
|
|
373
|
+
output,
|
|
374
|
+
metadata
|
|
375
|
+
};
|
|
376
|
+
if (webhookUrl) {
|
|
377
|
+
await sendWebhook(webhookUrl, successPayload);
|
|
378
|
+
}
|
|
379
|
+
} catch (error) {
|
|
380
|
+
console.error("[Worker] Error processing SQS record:", {
|
|
381
|
+
jobId: messageBody?.jobId ?? "(parse failed)",
|
|
382
|
+
workerId: messageBody?.workerId ?? "(parse failed)",
|
|
383
|
+
error: error?.message || String(error),
|
|
384
|
+
stack: error?.stack
|
|
385
|
+
});
|
|
386
|
+
throw error;
|
|
387
|
+
}
|
|
388
|
+
});
|
|
389
|
+
await Promise.all(promises);
|
|
390
|
+
};
|
|
391
|
+
}
|
|
392
|
+
|
|
393
|
+
// src/config.ts
|
|
394
|
+
var cachedConfig = null;
|
|
395
|
+
var cacheExpiry = 0;
|
|
396
|
+
var CACHE_TTL_MS = 5 * 60 * 1e3;
|
|
397
|
+
async function getWorkersConfig(apiUrl, apiKey) {
|
|
398
|
+
const now = Date.now();
|
|
399
|
+
if (cachedConfig && now < cacheExpiry) {
|
|
400
|
+
return cachedConfig;
|
|
401
|
+
}
|
|
402
|
+
const headers = {
|
|
403
|
+
"Content-Type": "application/json"
|
|
404
|
+
};
|
|
405
|
+
if (apiKey) {
|
|
406
|
+
headers["x-workers-config-key"] = apiKey;
|
|
407
|
+
}
|
|
408
|
+
const response = await fetch(apiUrl, {
|
|
409
|
+
method: "GET",
|
|
410
|
+
headers
|
|
411
|
+
});
|
|
412
|
+
if (!response.ok) {
|
|
413
|
+
throw new Error(
|
|
414
|
+
`Failed to fetch workers config: ${response.status} ${response.statusText}`
|
|
415
|
+
);
|
|
416
|
+
}
|
|
417
|
+
const config = await response.json();
|
|
418
|
+
cachedConfig = config;
|
|
419
|
+
cacheExpiry = now + CACHE_TTL_MS;
|
|
420
|
+
return config;
|
|
421
|
+
}
|
|
422
|
+
async function resolveQueueUrl(workerId, apiUrl, apiKey) {
|
|
423
|
+
const config = await getWorkersConfig(apiUrl, apiKey);
|
|
424
|
+
const worker = config.workers[workerId];
|
|
425
|
+
if (!worker) {
|
|
426
|
+
throw new Error(
|
|
427
|
+
`Worker "${workerId}" not found in workers config. Available workers: ${Object.keys(config.workers).join(", ")}`
|
|
428
|
+
);
|
|
429
|
+
}
|
|
430
|
+
return worker.queueUrl;
|
|
431
|
+
}
|
|
432
|
+
function clearWorkersConfigCache() {
|
|
433
|
+
cachedConfig = null;
|
|
434
|
+
cacheExpiry = 0;
|
|
435
|
+
}
|
|
436
|
+
|
|
437
|
+
// src/index.ts
|
|
438
|
+
function createWorker(config) {
|
|
439
|
+
const { id, inputSchema, outputSchema, handler } = config;
|
|
440
|
+
const agent = {
|
|
441
|
+
id,
|
|
442
|
+
handler,
|
|
443
|
+
inputSchema,
|
|
444
|
+
outputSchema,
|
|
445
|
+
async dispatch(input, options) {
|
|
446
|
+
const mode = options.mode ?? "auto";
|
|
447
|
+
const envWantsLocal = process.env.NODE_ENV === "development" && process.env.WORKERS_LOCAL_MODE !== "false";
|
|
448
|
+
const isLocal = mode === "local" || mode === "auto" && envWantsLocal;
|
|
449
|
+
if (isLocal) {
|
|
450
|
+
const parsedInput = inputSchema.parse(input);
|
|
451
|
+
const localJobId = options.jobId || `local-${Date.now()}`;
|
|
452
|
+
let directJobStore = null;
|
|
453
|
+
const nextJsPathAlias = "@/app/api/workflows/stores/jobStore";
|
|
454
|
+
const explicitPath = process.env.WORKER_JOB_STORE_MODULE_PATH;
|
|
455
|
+
const resolveJobStore = async () => {
|
|
456
|
+
try {
|
|
457
|
+
const module2 = await import(nextJsPathAlias);
|
|
458
|
+
if (module2?.updateJob) {
|
|
459
|
+
return { updateJob: module2.updateJob, setJob: module2.setJob };
|
|
460
|
+
}
|
|
461
|
+
} catch {
|
|
462
|
+
}
|
|
463
|
+
if (explicitPath) {
|
|
464
|
+
try {
|
|
465
|
+
const module2 = await import(explicitPath).catch(() => {
|
|
466
|
+
return require(explicitPath);
|
|
467
|
+
});
|
|
468
|
+
if (module2?.updateJob) {
|
|
469
|
+
return { updateJob: module2.updateJob, setJob: module2.setJob };
|
|
470
|
+
}
|
|
471
|
+
} catch {
|
|
472
|
+
}
|
|
473
|
+
}
|
|
474
|
+
return null;
|
|
475
|
+
};
|
|
476
|
+
directJobStore = await resolveJobStore();
|
|
477
|
+
if (directJobStore) {
|
|
478
|
+
console.log("[Worker] Using direct job store in local mode (no HTTP needed)");
|
|
479
|
+
}
|
|
480
|
+
let jobStoreUrl;
|
|
481
|
+
if (options.webhookUrl) {
|
|
482
|
+
try {
|
|
483
|
+
const webhookUrlObj = new URL(options.webhookUrl);
|
|
484
|
+
jobStoreUrl = webhookUrlObj.pathname.replace(/\/webhook$/, "");
|
|
485
|
+
jobStoreUrl = `${webhookUrlObj.origin}${jobStoreUrl}`;
|
|
486
|
+
} catch {
|
|
487
|
+
}
|
|
488
|
+
}
|
|
489
|
+
jobStoreUrl = jobStoreUrl || process.env.WORKER_JOB_STORE_URL;
|
|
490
|
+
const createLocalJobStore = (directStore, httpUrl) => {
|
|
491
|
+
if (directStore) {
|
|
492
|
+
return {
|
|
493
|
+
update: async (update) => {
|
|
494
|
+
try {
|
|
495
|
+
const updatePayload = {};
|
|
496
|
+
if (update.status !== void 0) {
|
|
497
|
+
updatePayload.status = update.status;
|
|
498
|
+
}
|
|
499
|
+
if (update.metadata !== void 0) {
|
|
500
|
+
updatePayload.metadata = update.metadata;
|
|
501
|
+
}
|
|
502
|
+
if (update.progress !== void 0) {
|
|
503
|
+
updatePayload.metadata = {
|
|
504
|
+
...updatePayload.metadata,
|
|
505
|
+
progress: update.progress,
|
|
506
|
+
progressMessage: update.progressMessage
|
|
507
|
+
};
|
|
508
|
+
}
|
|
509
|
+
if (update.output !== void 0) {
|
|
510
|
+
updatePayload.output = update.output;
|
|
511
|
+
}
|
|
512
|
+
if (update.error !== void 0) {
|
|
513
|
+
updatePayload.error = update.error;
|
|
514
|
+
}
|
|
515
|
+
await directStore.updateJob(localJobId, updatePayload);
|
|
516
|
+
console.log("[Worker] Local job updated (direct DB):", {
|
|
517
|
+
jobId: localJobId,
|
|
518
|
+
workerId: id,
|
|
519
|
+
updates: Object.keys(updatePayload)
|
|
520
|
+
});
|
|
521
|
+
} catch (error) {
|
|
522
|
+
console.warn("[Worker] Failed to update local job (direct DB):", {
|
|
523
|
+
jobId: localJobId,
|
|
524
|
+
workerId: id,
|
|
525
|
+
error: error?.message || String(error)
|
|
526
|
+
});
|
|
527
|
+
}
|
|
528
|
+
},
|
|
529
|
+
get: async () => {
|
|
530
|
+
try {
|
|
531
|
+
if (directStore) {
|
|
532
|
+
const nextJsPath = "@/app/api/workflows/stores/jobStore";
|
|
533
|
+
const explicitPath2 = process.env.WORKER_JOB_STORE_MODULE_PATH;
|
|
534
|
+
for (const importPath of [nextJsPath, explicitPath2].filter(Boolean)) {
|
|
535
|
+
try {
|
|
536
|
+
const module2 = await import(importPath);
|
|
537
|
+
if (module2?.getJob) {
|
|
538
|
+
return await module2.getJob(localJobId);
|
|
539
|
+
}
|
|
540
|
+
} catch {
|
|
541
|
+
}
|
|
542
|
+
}
|
|
543
|
+
}
|
|
544
|
+
return null;
|
|
545
|
+
} catch (error) {
|
|
546
|
+
console.warn("[Worker] Failed to get local job (direct DB):", {
|
|
547
|
+
jobId: localJobId,
|
|
548
|
+
workerId: id,
|
|
549
|
+
error: error?.message || String(error)
|
|
550
|
+
});
|
|
551
|
+
return null;
|
|
552
|
+
}
|
|
553
|
+
}
|
|
554
|
+
};
|
|
555
|
+
}
|
|
556
|
+
if (!httpUrl) {
|
|
557
|
+
return void 0;
|
|
558
|
+
}
|
|
559
|
+
return {
|
|
560
|
+
update: async (update) => {
|
|
561
|
+
try {
|
|
562
|
+
const updatePayload = { jobId: localJobId, workerId: id };
|
|
563
|
+
if (update.status !== void 0) {
|
|
564
|
+
updatePayload.status = update.status;
|
|
565
|
+
}
|
|
566
|
+
if (update.metadata !== void 0) {
|
|
567
|
+
updatePayload.metadata = update.metadata;
|
|
568
|
+
}
|
|
569
|
+
if (update.progress !== void 0) {
|
|
570
|
+
updatePayload.metadata = {
|
|
571
|
+
...updatePayload.metadata,
|
|
572
|
+
progress: update.progress,
|
|
573
|
+
progressMessage: update.progressMessage
|
|
574
|
+
};
|
|
575
|
+
}
|
|
576
|
+
if (update.output !== void 0) {
|
|
577
|
+
updatePayload.output = update.output;
|
|
578
|
+
}
|
|
579
|
+
if (update.error !== void 0) {
|
|
580
|
+
updatePayload.error = update.error;
|
|
581
|
+
}
|
|
582
|
+
const response = await fetch(`${httpUrl}/update`, {
|
|
583
|
+
method: "POST",
|
|
584
|
+
headers: { "Content-Type": "application/json" },
|
|
585
|
+
body: JSON.stringify(updatePayload)
|
|
586
|
+
});
|
|
587
|
+
if (!response.ok) {
|
|
588
|
+
throw new Error(`Job store update failed: ${response.status} ${response.statusText}`);
|
|
589
|
+
}
|
|
590
|
+
console.log("[Worker] Local job updated (HTTP):", {
|
|
591
|
+
jobId: localJobId,
|
|
592
|
+
workerId: id,
|
|
593
|
+
updates: Object.keys(updatePayload)
|
|
594
|
+
});
|
|
595
|
+
} catch (error) {
|
|
596
|
+
console.warn("[Worker] Failed to update local job (HTTP):", {
|
|
597
|
+
jobId: localJobId,
|
|
598
|
+
workerId: id,
|
|
599
|
+
error: error?.message || String(error)
|
|
600
|
+
});
|
|
601
|
+
}
|
|
602
|
+
},
|
|
603
|
+
get: async () => {
|
|
604
|
+
try {
|
|
605
|
+
const response = await fetch(`${httpUrl}/${id}/${localJobId}`, {
|
|
606
|
+
method: "GET",
|
|
607
|
+
headers: { "Content-Type": "application/json" }
|
|
608
|
+
});
|
|
609
|
+
if (!response.ok) {
|
|
610
|
+
if (response.status === 404) {
|
|
611
|
+
return null;
|
|
612
|
+
}
|
|
613
|
+
throw new Error(`Job store get failed: ${response.status} ${response.statusText}`);
|
|
614
|
+
}
|
|
615
|
+
return await response.json();
|
|
616
|
+
} catch (error) {
|
|
617
|
+
console.warn("[Worker] Failed to get local job (HTTP):", {
|
|
618
|
+
jobId: localJobId,
|
|
619
|
+
workerId: id,
|
|
620
|
+
error: error?.message || String(error)
|
|
621
|
+
});
|
|
622
|
+
return null;
|
|
623
|
+
}
|
|
624
|
+
}
|
|
625
|
+
};
|
|
626
|
+
};
|
|
627
|
+
const jobStore = createLocalJobStore(directJobStore, jobStoreUrl);
|
|
628
|
+
if (directJobStore?.setJob) {
|
|
629
|
+
try {
|
|
630
|
+
await directJobStore.setJob(localJobId, {
|
|
631
|
+
jobId: localJobId,
|
|
632
|
+
workerId: id,
|
|
633
|
+
status: "queued",
|
|
634
|
+
input: parsedInput,
|
|
635
|
+
metadata: options.metadata || {}
|
|
636
|
+
});
|
|
637
|
+
} catch (error) {
|
|
638
|
+
console.warn("[Worker] Failed to create initial job record:", {
|
|
639
|
+
jobId: localJobId,
|
|
640
|
+
workerId: id,
|
|
641
|
+
error: error?.message || String(error)
|
|
642
|
+
});
|
|
643
|
+
}
|
|
644
|
+
}
|
|
645
|
+
const handlerContext = {
|
|
646
|
+
jobId: localJobId,
|
|
647
|
+
workerId: id,
|
|
648
|
+
...jobStore ? { jobStore } : {}
|
|
649
|
+
};
|
|
650
|
+
try {
|
|
651
|
+
if (jobStore) {
|
|
652
|
+
await jobStore.update({ status: "running" });
|
|
653
|
+
}
|
|
654
|
+
const output = await dispatchLocal(handler, parsedInput, handlerContext);
|
|
655
|
+
if (jobStore) {
|
|
656
|
+
await jobStore.update({ status: "completed", output });
|
|
657
|
+
}
|
|
658
|
+
if (options.webhookUrl) {
|
|
659
|
+
try {
|
|
660
|
+
await fetch(options.webhookUrl, {
|
|
661
|
+
method: "POST",
|
|
662
|
+
headers: { "Content-Type": "application/json" },
|
|
663
|
+
body: JSON.stringify({
|
|
664
|
+
jobId: localJobId,
|
|
665
|
+
workerId: id,
|
|
666
|
+
status: "success",
|
|
667
|
+
output,
|
|
668
|
+
metadata: options.metadata
|
|
669
|
+
})
|
|
670
|
+
});
|
|
671
|
+
} catch (error) {
|
|
672
|
+
console.warn("[Worker] Local webhook call failed:", error);
|
|
673
|
+
}
|
|
674
|
+
}
|
|
675
|
+
return {
|
|
676
|
+
messageId: `local-${Date.now()}`,
|
|
677
|
+
status: "queued",
|
|
678
|
+
jobId: localJobId
|
|
679
|
+
};
|
|
680
|
+
} catch (error) {
|
|
681
|
+
if (jobStore) {
|
|
682
|
+
await jobStore.update({
|
|
683
|
+
status: "failed",
|
|
684
|
+
error: {
|
|
685
|
+
message: error.message || "Unknown error",
|
|
686
|
+
stack: error.stack,
|
|
687
|
+
name: error.name || "Error"
|
|
688
|
+
}
|
|
689
|
+
});
|
|
690
|
+
}
|
|
691
|
+
if (options.webhookUrl) {
|
|
692
|
+
try {
|
|
693
|
+
await fetch(options.webhookUrl, {
|
|
694
|
+
method: "POST",
|
|
695
|
+
headers: { "Content-Type": "application/json" },
|
|
696
|
+
body: JSON.stringify({
|
|
697
|
+
jobId: localJobId,
|
|
698
|
+
workerId: id,
|
|
699
|
+
status: "error",
|
|
700
|
+
error: {
|
|
701
|
+
message: error.message || "Unknown error",
|
|
702
|
+
stack: error.stack,
|
|
703
|
+
name: error.name || "Error"
|
|
704
|
+
},
|
|
705
|
+
metadata: options.metadata
|
|
706
|
+
})
|
|
707
|
+
});
|
|
708
|
+
} catch (webhookError) {
|
|
709
|
+
console.warn("[Worker] Local error webhook call failed:", webhookError);
|
|
710
|
+
}
|
|
711
|
+
}
|
|
712
|
+
throw error;
|
|
713
|
+
}
|
|
714
|
+
}
|
|
715
|
+
return dispatch(id, input, inputSchema, options);
|
|
716
|
+
}
|
|
717
|
+
};
|
|
718
|
+
return agent;
|
|
719
|
+
}
|
|
720
|
+
function createLambdaEntrypoint(agent) {
|
|
721
|
+
return createLambdaHandler(agent.handler, agent.outputSchema);
|
|
722
|
+
}
|
|
723
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
724
|
+
0 && (module.exports = {
|
|
725
|
+
clearWorkersConfigCache,
|
|
726
|
+
createLambdaEntrypoint,
|
|
727
|
+
createLambdaHandler,
|
|
728
|
+
createWorker,
|
|
729
|
+
dispatch,
|
|
730
|
+
dispatchLocal,
|
|
731
|
+
getWorkersConfig,
|
|
732
|
+
resolveQueueUrl
|
|
733
|
+
});
|
|
734
|
+
//# sourceMappingURL=index.js.map
|