@microfox/ai-worker 1.0.1 → 1.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +14 -0
- package/README.md +19 -2
- package/dist/chunk-72XGFZCE.mjs +163 -0
- package/dist/chunk-72XGFZCE.mjs.map +1 -0
- package/dist/chunk-7LQNS2SG.mjs +797 -0
- package/dist/chunk-7LQNS2SG.mjs.map +1 -0
- package/dist/chunk-AOXGONGI.mjs +351 -0
- package/dist/chunk-AOXGONGI.mjs.map +1 -0
- package/dist/client-BqSJQ9mZ.d.mts +183 -0
- package/dist/client-BqSJQ9mZ.d.ts +183 -0
- package/dist/client.d.mts +2 -64
- package/dist/client.d.ts +2 -64
- package/dist/client.js +88 -4
- package/dist/client.js.map +1 -1
- package/dist/client.mjs +11 -3
- package/dist/handler.d.mts +113 -14
- package/dist/handler.d.ts +113 -14
- package/dist/handler.js +823 -6
- package/dist/handler.js.map +1 -1
- package/dist/handler.mjs +10 -3
- package/dist/index.d.mts +3 -3
- package/dist/index.d.ts +3 -3
- package/dist/index.js +1059 -11
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +167 -8
- package/dist/index.mjs.map +1 -1
- package/dist/queueJobStore.d.mts +53 -0
- package/dist/queueJobStore.d.ts +53 -0
- package/dist/queueJobStore.js +378 -0
- package/dist/queueJobStore.js.map +1 -0
- package/dist/queueJobStore.mjs +14 -0
- package/dist/queueJobStore.mjs.map +1 -0
- package/package.json +9 -2
- package/dist/chunk-FQCZSXDI.mjs +0 -83
- package/dist/chunk-FQCZSXDI.mjs.map +0 -1
- package/dist/chunk-WVR4JVWK.mjs +0 -285
- package/dist/chunk-WVR4JVWK.mjs.map +0 -1
|
@@ -0,0 +1,378 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
6
|
+
var __export = (target, all) => {
|
|
7
|
+
for (var name in all)
|
|
8
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
9
|
+
};
|
|
10
|
+
var __copyProps = (to, from, except, desc) => {
|
|
11
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
12
|
+
for (let key of __getOwnPropNames(from))
|
|
13
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
14
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
15
|
+
}
|
|
16
|
+
return to;
|
|
17
|
+
};
|
|
18
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
19
|
+
|
|
20
|
+
// src/queueJobStore.ts
|
|
21
|
+
var queueJobStore_exports = {};
|
|
22
|
+
__export(queueJobStore_exports, {
|
|
23
|
+
appendQueueJobStepInStore: () => appendQueueJobStepInStore,
|
|
24
|
+
getQueueJob: () => getQueueJob,
|
|
25
|
+
updateQueueJobStepInStore: () => updateQueueJobStepInStore,
|
|
26
|
+
upsertInitialQueueJob: () => upsertInitialQueueJob
|
|
27
|
+
});
|
|
28
|
+
module.exports = __toCommonJS(queueJobStore_exports);
|
|
29
|
+
var import_redis = require("@upstash/redis");
|
|
30
|
+
var import_mongodb = require("mongodb");
|
|
31
|
+
var mongoUri = process.env.DATABASE_MONGODB_URI || process.env.MONGODB_URI;
|
|
32
|
+
var mongoDbName = process.env.DATABASE_MONGODB_DB || process.env.MONGODB_DB || "mediamake";
|
|
33
|
+
var mongoQueueCollectionName = process.env.MONGODB_QUEUE_JOBS_COLLECTION || "queue_jobs";
|
|
34
|
+
var mongoClientPromise = null;
|
|
35
|
+
async function getMongoClient() {
|
|
36
|
+
if (!mongoUri) {
|
|
37
|
+
throw new Error(
|
|
38
|
+
"MongoDB URI required for queue job store. Set DATABASE_MONGODB_URI or MONGODB_URI."
|
|
39
|
+
);
|
|
40
|
+
}
|
|
41
|
+
if (!mongoClientPromise) {
|
|
42
|
+
mongoClientPromise = new import_mongodb.MongoClient(mongoUri, {
|
|
43
|
+
maxPoolSize: 10,
|
|
44
|
+
minPoolSize: 0,
|
|
45
|
+
serverSelectionTimeoutMS: 1e4
|
|
46
|
+
}).connect();
|
|
47
|
+
}
|
|
48
|
+
return mongoClientPromise;
|
|
49
|
+
}
|
|
50
|
+
async function getMongoQueueCollection() {
|
|
51
|
+
const client = await getMongoClient();
|
|
52
|
+
return client.db(mongoDbName).collection(mongoQueueCollectionName);
|
|
53
|
+
}
|
|
54
|
+
var redisUrl = process.env.WORKER_UPSTASH_REDIS_REST_URL || process.env.UPSTASH_REDIS_REST_URL || process.env.UPSTASH_REDIS_URL;
|
|
55
|
+
var redisToken = process.env.WORKER_UPSTASH_REDIS_REST_TOKEN || process.env.UPSTASH_REDIS_REST_TOKEN || process.env.UPSTASH_REDIS_TOKEN;
|
|
56
|
+
var queueKeyPrefix = process.env.WORKER_UPSTASH_REDIS_QUEUE_PREFIX || process.env.UPSTASH_REDIS_QUEUE_PREFIX || "worker:queue-jobs:";
|
|
57
|
+
var defaultTtlSeconds = 60 * 60 * 24 * 7;
|
|
58
|
+
var queueJobTtlSeconds = typeof process.env.WORKER_QUEUE_JOBS_TTL_SECONDS === "string" ? parseInt(process.env.WORKER_QUEUE_JOBS_TTL_SECONDS, 10) || defaultTtlSeconds : typeof process.env.WORKER_JOBS_TTL_SECONDS === "string" ? parseInt(process.env.WORKER_JOBS_TTL_SECONDS, 10) || defaultTtlSeconds : defaultTtlSeconds;
|
|
59
|
+
var redisClient = null;
|
|
60
|
+
function getRedis() {
|
|
61
|
+
if (!redisUrl || !redisToken) {
|
|
62
|
+
throw new Error(
|
|
63
|
+
"Upstash Redis configuration missing for queue job store. Set WORKER_UPSTASH_REDIS_REST_URL and WORKER_UPSTASH_REDIS_REST_TOKEN (or UPSTASH_REDIS_REST_URL/UPSTASH_REDIS_REST_TOKEN)."
|
|
64
|
+
);
|
|
65
|
+
}
|
|
66
|
+
if (!redisClient) {
|
|
67
|
+
redisClient = new import_redis.Redis({
|
|
68
|
+
url: redisUrl,
|
|
69
|
+
token: redisToken
|
|
70
|
+
});
|
|
71
|
+
}
|
|
72
|
+
return redisClient;
|
|
73
|
+
}
|
|
74
|
+
function queueKey(id) {
|
|
75
|
+
return `${queueKeyPrefix}${id}`;
|
|
76
|
+
}
|
|
77
|
+
function stepsFromHash(val) {
|
|
78
|
+
if (Array.isArray(val)) return val;
|
|
79
|
+
if (typeof val === "string") {
|
|
80
|
+
try {
|
|
81
|
+
const parsed = JSON.parse(val);
|
|
82
|
+
return Array.isArray(parsed) ? parsed : [];
|
|
83
|
+
} catch {
|
|
84
|
+
return [];
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
return [];
|
|
88
|
+
}
|
|
89
|
+
function metadataFromHash(val) {
|
|
90
|
+
if (val && typeof val === "object" && !Array.isArray(val)) return val;
|
|
91
|
+
if (typeof val === "string") {
|
|
92
|
+
try {
|
|
93
|
+
const parsed = JSON.parse(val);
|
|
94
|
+
return parsed && typeof parsed === "object" ? parsed : {};
|
|
95
|
+
} catch {
|
|
96
|
+
return {};
|
|
97
|
+
}
|
|
98
|
+
}
|
|
99
|
+
return {};
|
|
100
|
+
}
|
|
101
|
+
async function loadQueueJobRedis(queueJobId) {
|
|
102
|
+
const redis = getRedis();
|
|
103
|
+
const key = queueKey(queueJobId);
|
|
104
|
+
const data = await redis.hgetall(key);
|
|
105
|
+
if (!data || typeof data !== "object" || Object.keys(data).length === 0) return null;
|
|
106
|
+
const d = data;
|
|
107
|
+
const record = {
|
|
108
|
+
id: d.id === void 0 ? queueJobId : String(d.id),
|
|
109
|
+
queueId: String(d.queueId ?? ""),
|
|
110
|
+
status: String(d.status ?? "running"),
|
|
111
|
+
steps: stepsFromHash(d.steps),
|
|
112
|
+
metadata: metadataFromHash(d.metadata),
|
|
113
|
+
createdAt: String(d.createdAt ?? (/* @__PURE__ */ new Date()).toISOString()),
|
|
114
|
+
updatedAt: String(d.updatedAt ?? (/* @__PURE__ */ new Date()).toISOString()),
|
|
115
|
+
completedAt: d.completedAt != null ? String(d.completedAt) : void 0
|
|
116
|
+
};
|
|
117
|
+
return record;
|
|
118
|
+
}
|
|
119
|
+
async function saveQueueJobRedis(record) {
|
|
120
|
+
const redis = getRedis();
|
|
121
|
+
const key = queueKey(record.id);
|
|
122
|
+
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
123
|
+
const toSet = {
|
|
124
|
+
id: record.id,
|
|
125
|
+
queueId: record.queueId,
|
|
126
|
+
status: record.status,
|
|
127
|
+
steps: JSON.stringify(record.steps || []),
|
|
128
|
+
metadata: JSON.stringify(record.metadata || {}),
|
|
129
|
+
createdAt: record.createdAt || now,
|
|
130
|
+
updatedAt: record.updatedAt || now
|
|
131
|
+
};
|
|
132
|
+
if (record.completedAt) {
|
|
133
|
+
toSet.completedAt = record.completedAt;
|
|
134
|
+
}
|
|
135
|
+
await redis.hset(key, toSet);
|
|
136
|
+
if (queueJobTtlSeconds > 0) {
|
|
137
|
+
await redis.expire(key, queueJobTtlSeconds);
|
|
138
|
+
}
|
|
139
|
+
}
|
|
140
|
+
function getStoreType() {
|
|
141
|
+
const t = (process.env.WORKER_DATABASE_TYPE || "upstash-redis").toLowerCase();
|
|
142
|
+
return t === "mongodb" ? "mongodb" : "upstash-redis";
|
|
143
|
+
}
|
|
144
|
+
function preferMongo() {
|
|
145
|
+
return getStoreType() === "mongodb" && Boolean(mongoUri?.trim());
|
|
146
|
+
}
|
|
147
|
+
function preferRedis() {
|
|
148
|
+
return getStoreType() !== "mongodb" && Boolean((redisUrl || "").trim() && (redisToken || "").trim());
|
|
149
|
+
}
|
|
150
|
+
async function upsertInitialQueueJob(options) {
|
|
151
|
+
const { queueJobId, queueId, firstWorkerId, firstWorkerJobId, metadata } = options;
|
|
152
|
+
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
153
|
+
if (preferMongo()) {
|
|
154
|
+
const coll = await getMongoQueueCollection();
|
|
155
|
+
const existing = await coll.findOne({ _id: queueJobId });
|
|
156
|
+
if (existing) {
|
|
157
|
+
const steps = existing.steps ?? [];
|
|
158
|
+
if (steps.length === 0) {
|
|
159
|
+
steps.push({
|
|
160
|
+
workerId: firstWorkerId,
|
|
161
|
+
workerJobId: firstWorkerJobId,
|
|
162
|
+
status: "queued"
|
|
163
|
+
});
|
|
164
|
+
}
|
|
165
|
+
await coll.updateOne(
|
|
166
|
+
{ _id: queueJobId },
|
|
167
|
+
{
|
|
168
|
+
$set: {
|
|
169
|
+
steps,
|
|
170
|
+
updatedAt: now
|
|
171
|
+
}
|
|
172
|
+
}
|
|
173
|
+
);
|
|
174
|
+
} else {
|
|
175
|
+
const doc = {
|
|
176
|
+
_id: queueJobId,
|
|
177
|
+
id: queueJobId,
|
|
178
|
+
queueId,
|
|
179
|
+
status: "running",
|
|
180
|
+
steps: [
|
|
181
|
+
{
|
|
182
|
+
workerId: firstWorkerId,
|
|
183
|
+
workerJobId: firstWorkerJobId,
|
|
184
|
+
status: "queued"
|
|
185
|
+
}
|
|
186
|
+
],
|
|
187
|
+
metadata: metadata ?? {},
|
|
188
|
+
createdAt: now,
|
|
189
|
+
updatedAt: now
|
|
190
|
+
};
|
|
191
|
+
await coll.updateOne(
|
|
192
|
+
{ _id: queueJobId },
|
|
193
|
+
{ $set: doc },
|
|
194
|
+
{ upsert: true }
|
|
195
|
+
);
|
|
196
|
+
}
|
|
197
|
+
return;
|
|
198
|
+
}
|
|
199
|
+
if (preferRedis()) {
|
|
200
|
+
const existing = await loadQueueJobRedis(queueJobId);
|
|
201
|
+
if (existing) {
|
|
202
|
+
if (!existing.steps || existing.steps.length === 0) {
|
|
203
|
+
existing.steps = [
|
|
204
|
+
{
|
|
205
|
+
workerId: firstWorkerId,
|
|
206
|
+
workerJobId: firstWorkerJobId,
|
|
207
|
+
status: "queued"
|
|
208
|
+
}
|
|
209
|
+
];
|
|
210
|
+
}
|
|
211
|
+
existing.updatedAt = now;
|
|
212
|
+
await saveQueueJobRedis(existing);
|
|
213
|
+
} else {
|
|
214
|
+
const record = {
|
|
215
|
+
id: queueJobId,
|
|
216
|
+
queueId,
|
|
217
|
+
status: "running",
|
|
218
|
+
steps: [
|
|
219
|
+
{
|
|
220
|
+
workerId: firstWorkerId,
|
|
221
|
+
workerJobId: firstWorkerJobId,
|
|
222
|
+
status: "queued"
|
|
223
|
+
}
|
|
224
|
+
],
|
|
225
|
+
metadata: metadata ?? {},
|
|
226
|
+
createdAt: now,
|
|
227
|
+
updatedAt: now
|
|
228
|
+
};
|
|
229
|
+
await saveQueueJobRedis(record);
|
|
230
|
+
}
|
|
231
|
+
}
|
|
232
|
+
}
|
|
233
|
+
async function updateQueueJobStepInStore(options) {
|
|
234
|
+
const { queueJobId, stepIndex, status, input, output, error } = options;
|
|
235
|
+
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
236
|
+
if (preferMongo()) {
|
|
237
|
+
const coll = await getMongoQueueCollection();
|
|
238
|
+
const existing = await coll.findOne({ _id: queueJobId });
|
|
239
|
+
if (!existing) return;
|
|
240
|
+
const step = existing.steps[stepIndex];
|
|
241
|
+
if (!step) return;
|
|
242
|
+
const mergedStep = {
|
|
243
|
+
...step,
|
|
244
|
+
status,
|
|
245
|
+
...input !== void 0 && { input },
|
|
246
|
+
...output !== void 0 && { output },
|
|
247
|
+
...error !== void 0 && { error },
|
|
248
|
+
startedAt: step.startedAt ?? (status === "running" ? now : step.startedAt),
|
|
249
|
+
completedAt: step.completedAt ?? (status === "completed" || status === "failed" ? now : step.completedAt)
|
|
250
|
+
};
|
|
251
|
+
const setDoc = {
|
|
252
|
+
steps: existing.steps,
|
|
253
|
+
updatedAt: now
|
|
254
|
+
};
|
|
255
|
+
setDoc.steps[stepIndex] = mergedStep;
|
|
256
|
+
if (status === "failed") {
|
|
257
|
+
setDoc.status = "failed";
|
|
258
|
+
if (!existing.completedAt) setDoc.completedAt = now;
|
|
259
|
+
} else if (status === "completed" && stepIndex === existing.steps.length - 1) {
|
|
260
|
+
setDoc.status = "completed";
|
|
261
|
+
if (!existing.completedAt) setDoc.completedAt = now;
|
|
262
|
+
}
|
|
263
|
+
await coll.updateOne(
|
|
264
|
+
{ _id: queueJobId },
|
|
265
|
+
{
|
|
266
|
+
$set: setDoc
|
|
267
|
+
}
|
|
268
|
+
);
|
|
269
|
+
return;
|
|
270
|
+
}
|
|
271
|
+
if (preferRedis()) {
|
|
272
|
+
const existing = await loadQueueJobRedis(queueJobId);
|
|
273
|
+
if (!existing) {
|
|
274
|
+
return;
|
|
275
|
+
}
|
|
276
|
+
const steps = existing.steps || [];
|
|
277
|
+
const step = steps[stepIndex];
|
|
278
|
+
if (!step) {
|
|
279
|
+
return;
|
|
280
|
+
}
|
|
281
|
+
step.status = status;
|
|
282
|
+
if (input !== void 0) step.input = input;
|
|
283
|
+
if (output !== void 0) step.output = output;
|
|
284
|
+
if (error !== void 0) step.error = error;
|
|
285
|
+
if (status === "running") {
|
|
286
|
+
step.startedAt = step.startedAt ?? now;
|
|
287
|
+
}
|
|
288
|
+
if (status === "completed" || status === "failed") {
|
|
289
|
+
step.completedAt = step.completedAt ?? now;
|
|
290
|
+
}
|
|
291
|
+
existing.steps = steps;
|
|
292
|
+
existing.updatedAt = now;
|
|
293
|
+
if (status === "failed") {
|
|
294
|
+
existing.status = "failed";
|
|
295
|
+
existing.completedAt = existing.completedAt ?? now;
|
|
296
|
+
} else if (status === "completed" && stepIndex === steps.length - 1) {
|
|
297
|
+
existing.status = "completed";
|
|
298
|
+
existing.completedAt = existing.completedAt ?? now;
|
|
299
|
+
}
|
|
300
|
+
await saveQueueJobRedis(existing);
|
|
301
|
+
}
|
|
302
|
+
}
|
|
303
|
+
async function appendQueueJobStepInStore(options) {
|
|
304
|
+
const { queueJobId, workerId, workerJobId } = options;
|
|
305
|
+
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
306
|
+
if (preferMongo()) {
|
|
307
|
+
const coll = await getMongoQueueCollection();
|
|
308
|
+
await coll.updateOne(
|
|
309
|
+
{ _id: queueJobId },
|
|
310
|
+
{
|
|
311
|
+
$push: {
|
|
312
|
+
steps: {
|
|
313
|
+
workerId,
|
|
314
|
+
workerJobId,
|
|
315
|
+
status: "queued"
|
|
316
|
+
}
|
|
317
|
+
},
|
|
318
|
+
$set: { updatedAt: now }
|
|
319
|
+
}
|
|
320
|
+
);
|
|
321
|
+
return;
|
|
322
|
+
}
|
|
323
|
+
if (preferRedis()) {
|
|
324
|
+
const existing = await loadQueueJobRedis(queueJobId);
|
|
325
|
+
if (!existing) return;
|
|
326
|
+
const steps = existing.steps || [];
|
|
327
|
+
steps.push({
|
|
328
|
+
workerId,
|
|
329
|
+
workerJobId,
|
|
330
|
+
status: "queued"
|
|
331
|
+
});
|
|
332
|
+
existing.steps = steps;
|
|
333
|
+
existing.updatedAt = now;
|
|
334
|
+
await saveQueueJobRedis(existing);
|
|
335
|
+
}
|
|
336
|
+
}
|
|
337
|
+
async function getQueueJob(queueJobId) {
|
|
338
|
+
if (preferMongo()) {
|
|
339
|
+
const coll = await getMongoQueueCollection();
|
|
340
|
+
const doc = await coll.findOne({ _id: queueJobId });
|
|
341
|
+
if (!doc) return null;
|
|
342
|
+
return {
|
|
343
|
+
id: doc.id ?? queueJobId,
|
|
344
|
+
queueId: doc.queueId,
|
|
345
|
+
status: doc.status,
|
|
346
|
+
steps: (doc.steps ?? []).map((s) => ({
|
|
347
|
+
workerId: s.workerId,
|
|
348
|
+
workerJobId: s.workerJobId,
|
|
349
|
+
status: s.status,
|
|
350
|
+
output: s.output
|
|
351
|
+
}))
|
|
352
|
+
};
|
|
353
|
+
}
|
|
354
|
+
if (preferRedis()) {
|
|
355
|
+
const record = await loadQueueJobRedis(queueJobId);
|
|
356
|
+
if (!record) return null;
|
|
357
|
+
return {
|
|
358
|
+
id: record.id,
|
|
359
|
+
queueId: record.queueId,
|
|
360
|
+
status: record.status,
|
|
361
|
+
steps: (record.steps ?? []).map((s) => ({
|
|
362
|
+
workerId: s.workerId,
|
|
363
|
+
workerJobId: s.workerJobId,
|
|
364
|
+
status: s.status,
|
|
365
|
+
output: s.output
|
|
366
|
+
}))
|
|
367
|
+
};
|
|
368
|
+
}
|
|
369
|
+
return null;
|
|
370
|
+
}
|
|
371
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
372
|
+
0 && (module.exports = {
|
|
373
|
+
appendQueueJobStepInStore,
|
|
374
|
+
getQueueJob,
|
|
375
|
+
updateQueueJobStepInStore,
|
|
376
|
+
upsertInitialQueueJob
|
|
377
|
+
});
|
|
378
|
+
//# sourceMappingURL=queueJobStore.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/queueJobStore.ts"],"sourcesContent":["/**\n * Queue job store for worker queues (MongoDB or Upstash Redis).\n *\n * Mirrors the worker_jobs pattern but optimized for queues:\n * - MongoDB: collection `queue_jobs` (configurable via MONGODB_QUEUE_JOBS_COLLECTION)\n * - Upstash Redis: JSON blob per queue job with compact step entries\n *\n * This module is runtime-only (used by Lambda workers). Next.js APIs can read\n * the same collections/keys to show queue progress.\n */\n\nimport type { Redis } from '@upstash/redis';\nimport { Redis as UpstashRedis } from '@upstash/redis';\nimport { MongoClient, type Collection } from 'mongodb';\n\ntype QueueJobStep = {\n workerId: string;\n workerJobId: string;\n status: 'queued' | 'running' | 'completed' | 'failed';\n input?: unknown;\n output?: unknown;\n error?: { message: string };\n startedAt?: string;\n completedAt?: string;\n};\n\ntype QueueJobDoc = {\n _id: string; // queueJobId\n id: string;\n queueId: string;\n status: 'running' | 'completed' | 'failed' | 'partial';\n steps: QueueJobStep[];\n metadata?: Record<string, unknown>;\n createdAt: string;\n updatedAt: string;\n completedAt?: string;\n};\n\n\n// === Mongo backend (shares connection pattern with mongoJobStore) ===\n\nconst mongoUri = process.env.DATABASE_MONGODB_URI || process.env.MONGODB_URI;\nconst mongoDbName =\n process.env.DATABASE_MONGODB_DB ||\n process.env.MONGODB_DB ||\n 'mediamake';\nconst mongoQueueCollectionName =\n process.env.MONGODB_QUEUE_JOBS_COLLECTION || 'queue_jobs';\n\nlet mongoClientPromise: Promise<MongoClient> | null = null;\n\nasync function getMongoClient(): Promise<MongoClient> {\n if (!mongoUri) {\n throw new Error(\n 'MongoDB URI required for queue job store. Set DATABASE_MONGODB_URI or MONGODB_URI.'\n );\n }\n if (!mongoClientPromise) {\n mongoClientPromise = new MongoClient(mongoUri, {\n maxPoolSize: 10,\n minPoolSize: 0,\n serverSelectionTimeoutMS: 10_000,\n }).connect();\n }\n return mongoClientPromise;\n}\n\nasync function getMongoQueueCollection(): Promise<Collection<QueueJobDoc>> {\n const client = await getMongoClient();\n return client.db(mongoDbName).collection<QueueJobDoc>(mongoQueueCollectionName);\n}\n\n// === Redis backend (Upstash) ===\n\nconst redisUrl =\n process.env.WORKER_UPSTASH_REDIS_REST_URL ||\n process.env.UPSTASH_REDIS_REST_URL ||\n process.env.UPSTASH_REDIS_URL;\nconst redisToken =\n process.env.WORKER_UPSTASH_REDIS_REST_TOKEN ||\n process.env.UPSTASH_REDIS_REST_TOKEN ||\n process.env.UPSTASH_REDIS_TOKEN;\nconst queueKeyPrefix =\n process.env.WORKER_UPSTASH_REDIS_QUEUE_PREFIX ||\n process.env.UPSTASH_REDIS_QUEUE_PREFIX ||\n 'worker:queue-jobs:';\n\nconst defaultTtlSeconds = 60 * 60 * 24 * 7; // 7 days\nconst queueJobTtlSeconds =\n typeof process.env.WORKER_QUEUE_JOBS_TTL_SECONDS === 'string'\n ? parseInt(process.env.WORKER_QUEUE_JOBS_TTL_SECONDS, 10) || defaultTtlSeconds\n : typeof process.env.WORKER_JOBS_TTL_SECONDS === 'string'\n ? parseInt(process.env.WORKER_JOBS_TTL_SECONDS, 10) || defaultTtlSeconds\n : defaultTtlSeconds;\n\nlet redisClient: Redis | null = null;\n\nfunction getRedis(): Redis {\n if (!redisUrl || !redisToken) {\n throw new Error(\n 'Upstash Redis configuration missing for queue job store. Set WORKER_UPSTASH_REDIS_REST_URL and WORKER_UPSTASH_REDIS_REST_TOKEN (or UPSTASH_REDIS_REST_URL/UPSTASH_REDIS_REST_TOKEN).'\n );\n }\n if (!redisClient) {\n redisClient = new UpstashRedis({\n url: redisUrl,\n token: redisToken,\n });\n }\n return redisClient;\n}\n\nfunction queueKey(id: string): string {\n return `${queueKeyPrefix}${id}`;\n}\n\ntype QueueJobRecord = Omit<QueueJobDoc, '_id'>;\n\n/** Hash values from Upstash hgetall may be auto-parsed (array/object) or raw strings. */\nfunction stepsFromHash(val: unknown): QueueJobStep[] {\n if (Array.isArray(val)) return val as QueueJobStep[];\n if (typeof val === 'string') {\n try {\n const parsed = JSON.parse(val) as QueueJobStep[];\n return Array.isArray(parsed) ? parsed : [];\n } catch {\n return [];\n }\n }\n return [];\n}\n\nfunction metadataFromHash(val: unknown): Record<string, unknown> {\n if (val && typeof val === 'object' && !Array.isArray(val)) return val as Record<string, unknown>;\n if (typeof val === 'string') {\n try {\n const parsed = JSON.parse(val) as Record<string, unknown>;\n return parsed && typeof parsed === 'object' ? parsed : {};\n } catch {\n return {};\n }\n }\n return {};\n}\n\nasync function loadQueueJobRedis(queueJobId: string): Promise<QueueJobRecord | null> {\n const redis = getRedis();\n const key = queueKey(queueJobId);\n const data = await redis.hgetall(key);\n if (!data || typeof data !== 'object' || Object.keys(data).length === 0) return null;\n const d = data as Record<string, unknown>;\n const record: QueueJobRecord = {\n id: (d.id === undefined ? queueJobId : String(d.id)) as string,\n queueId: String(d.queueId ?? ''),\n status: (String(d.status ?? 'running') as QueueJobRecord['status']),\n steps: stepsFromHash(d.steps),\n metadata: metadataFromHash(d.metadata),\n createdAt: String(d.createdAt ?? new Date().toISOString()),\n updatedAt: String(d.updatedAt ?? new Date().toISOString()),\n completedAt: d.completedAt != null ? String(d.completedAt) : undefined,\n };\n return record;\n}\n\nasync function saveQueueJobRedis(record: QueueJobRecord): Promise<void> {\n const redis = getRedis();\n const key = queueKey(record.id);\n const now = new Date().toISOString();\n const toSet: Record<string, string> = {\n id: record.id,\n queueId: record.queueId,\n status: record.status,\n steps: JSON.stringify(record.steps || []),\n metadata: JSON.stringify(record.metadata || {}),\n createdAt: record.createdAt || now,\n updatedAt: record.updatedAt || now,\n };\n if (record.completedAt) {\n toSet.completedAt = record.completedAt;\n }\n await redis.hset(key, toSet);\n if (queueJobTtlSeconds > 0) {\n await redis.expire(key, queueJobTtlSeconds);\n }\n}\n\n// === Backend selection ===\n\nfunction getStoreType(): 'mongodb' | 'upstash-redis' {\n const t = (process.env.WORKER_DATABASE_TYPE || 'upstash-redis').toLowerCase();\n return t === 'mongodb' ? 'mongodb' : 'upstash-redis';\n}\n\nfunction preferMongo(): boolean {\n return getStoreType() === 'mongodb' && Boolean(mongoUri?.trim());\n}\n\nfunction preferRedis(): boolean {\n return getStoreType() !== 'mongodb' && Boolean((redisUrl || '').trim() && (redisToken || '').trim());\n}\n\n// === Public API used from handler.ts ===\n\nexport async function upsertInitialQueueJob(options: {\n queueJobId: string;\n queueId: string;\n firstWorkerId: string;\n firstWorkerJobId: string;\n metadata?: Record<string, any>;\n}): Promise<void> {\n const { queueJobId, queueId, firstWorkerId, firstWorkerJobId, metadata } = options;\n const now = new Date().toISOString();\n\n if (preferMongo()) {\n const coll = await getMongoQueueCollection();\n const existing = await coll.findOne({ _id: queueJobId });\n if (existing) {\n const steps = existing.steps ?? [];\n if (steps.length === 0) {\n steps.push({\n workerId: firstWorkerId,\n workerJobId: firstWorkerJobId,\n status: 'queued',\n });\n }\n await coll.updateOne(\n { _id: queueJobId },\n {\n $set: {\n steps,\n updatedAt: now,\n },\n }\n );\n } else {\n const doc: QueueJobDoc = {\n _id: queueJobId,\n id: queueJobId,\n queueId,\n status: 'running',\n steps: [\n {\n workerId: firstWorkerId,\n workerJobId: firstWorkerJobId,\n status: 'queued',\n },\n ],\n metadata: metadata ?? {},\n createdAt: now,\n updatedAt: now,\n };\n await coll.updateOne(\n { _id: queueJobId },\n { $set: doc },\n { upsert: true }\n );\n }\n return;\n }\n\n if (preferRedis()) {\n const existing = await loadQueueJobRedis(queueJobId);\n if (existing) {\n // Ensure we have at least one step\n if (!existing.steps || existing.steps.length === 0) {\n existing.steps = [\n {\n workerId: firstWorkerId,\n workerJobId: firstWorkerJobId,\n status: 'queued',\n },\n ];\n }\n existing.updatedAt = now;\n await saveQueueJobRedis(existing);\n } else {\n const record: QueueJobRecord = {\n id: queueJobId,\n queueId,\n status: 'running',\n steps: [\n {\n workerId: firstWorkerId,\n workerJobId: firstWorkerJobId,\n status: 'queued',\n },\n ],\n metadata: metadata ?? {},\n createdAt: now,\n updatedAt: now,\n };\n await saveQueueJobRedis(record);\n }\n }\n}\n\nexport async function updateQueueJobStepInStore(options: {\n queueJobId: string;\n queueId?: string;\n stepIndex: number;\n workerId: string;\n workerJobId: string;\n status: 'running' | 'completed' | 'failed';\n input?: unknown;\n output?: unknown;\n error?: { message: string };\n}): Promise<void> {\n const { queueJobId, stepIndex, status, input, output, error } = options;\n const now = new Date().toISOString();\n\n if (preferMongo()) {\n const coll = await getMongoQueueCollection();\n const existing = await coll.findOne({ _id: queueJobId });\n if (!existing) return;\n const step = existing.steps[stepIndex];\n if (!step) return;\n\n const mergedStep: QueueJobStep = {\n ...step,\n status,\n ...(input !== undefined && { input }),\n ...(output !== undefined && { output }),\n ...(error !== undefined && { error }),\n startedAt: step.startedAt ?? (status === 'running' ? now : step.startedAt),\n completedAt:\n step.completedAt ??\n (status === 'completed' || status === 'failed' ? now : step.completedAt),\n };\n\n const setDoc: Partial<QueueJobDoc> & { steps: QueueJobStep[] } = {\n steps: existing.steps,\n updatedAt: now,\n };\n setDoc.steps[stepIndex] = mergedStep;\n if (status === 'failed') {\n setDoc.status = 'failed';\n if (!existing.completedAt) setDoc.completedAt = now;\n } else if (status === 'completed' && stepIndex === existing.steps.length - 1) {\n setDoc.status = 'completed';\n if (!existing.completedAt) setDoc.completedAt = now;\n }\n\n await coll.updateOne(\n { _id: queueJobId },\n {\n $set: setDoc,\n }\n );\n return;\n }\n\n if (preferRedis()) {\n const existing = await loadQueueJobRedis(queueJobId);\n if (!existing) {\n // No queue job; nothing to update\n return;\n }\n const steps = existing.steps || [];\n const step = steps[stepIndex];\n if (!step) {\n return;\n }\n step.status = status;\n if (input !== undefined) step.input = input;\n if (output !== undefined) step.output = output;\n if (error !== undefined) step.error = error;\n if (status === 'running') {\n step.startedAt = step.startedAt ?? now;\n }\n if (status === 'completed' || status === 'failed') {\n step.completedAt = step.completedAt ?? now;\n }\n\n existing.steps = steps;\n existing.updatedAt = now;\n if (status === 'failed') {\n existing.status = 'failed';\n existing.completedAt = existing.completedAt ?? now;\n } else if (status === 'completed' && stepIndex === steps.length - 1) {\n existing.status = 'completed';\n existing.completedAt = existing.completedAt ?? now;\n }\n await saveQueueJobRedis(existing);\n }\n}\n\nexport async function appendQueueJobStepInStore(options: {\n queueJobId: string;\n queueId?: string;\n workerId: string;\n workerJobId: string;\n}): Promise<void> {\n const { queueJobId, workerId, workerJobId } = options;\n const now = new Date().toISOString();\n\n if (preferMongo()) {\n const coll = await getMongoQueueCollection();\n await coll.updateOne(\n { _id: queueJobId },\n {\n $push: {\n steps: {\n workerId,\n workerJobId,\n status: 'queued',\n } as QueueJobStep,\n },\n $set: { updatedAt: now },\n }\n );\n return;\n }\n\n if (preferRedis()) {\n const existing = await loadQueueJobRedis(queueJobId);\n if (!existing) return;\n const steps = existing.steps || [];\n steps.push({\n workerId,\n workerJobId,\n status: 'queued',\n });\n existing.steps = steps;\n existing.updatedAt = now;\n await saveQueueJobRedis(existing);\n }\n}\n\n/**\n * Load a queue job by ID (for mapping context: previous step outputs).\n * Used by wrapHandlerForQueue when invoking mapInputFromPrev with previousOutputs.\n */\nexport async function getQueueJob(queueJobId: string): Promise<{\n id: string;\n queueId: string;\n status: string;\n steps: Array<{ workerId: string; workerJobId: string; status: string; output?: unknown }>;\n} | null> {\n if (preferMongo()) {\n const coll = await getMongoQueueCollection();\n const doc = await coll.findOne({ _id: queueJobId });\n if (!doc) return null;\n return {\n id: doc.id ?? queueJobId,\n queueId: doc.queueId,\n status: doc.status,\n steps: (doc.steps ?? []).map((s: QueueJobStep) => ({\n workerId: s.workerId,\n workerJobId: s.workerJobId,\n status: s.status,\n output: s.output,\n })),\n };\n }\n if (preferRedis()) {\n const record = await loadQueueJobRedis(queueJobId);\n if (!record) return null;\n return {\n id: record.id,\n queueId: record.queueId,\n status: record.status,\n steps: (record.steps ?? []).map((s) => ({\n workerId: s.workerId,\n workerJobId: s.workerJobId,\n status: s.status,\n output: s.output,\n })),\n };\n }\n return null;\n}\n\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAYA,mBAAsC;AACtC,qBAA6C;AA4B7C,IAAM,WAAW,QAAQ,IAAI,wBAAwB,QAAQ,IAAI;AACjE,IAAM,cACJ,QAAQ,IAAI,uBACZ,QAAQ,IAAI,cACZ;AACF,IAAM,2BACJ,QAAQ,IAAI,iCAAiC;AAE/C,IAAI,qBAAkD;AAEtD,eAAe,iBAAuC;AACpD,MAAI,CAAC,UAAU;AACb,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AACA,MAAI,CAAC,oBAAoB;AACvB,yBAAqB,IAAI,2BAAY,UAAU;AAAA,MAC7C,aAAa;AAAA,MACb,aAAa;AAAA,MACb,0BAA0B;AAAA,IAC5B,CAAC,EAAE,QAAQ;AAAA,EACb;AACA,SAAO;AACT;AAEA,eAAe,0BAA4D;AACzE,QAAM,SAAS,MAAM,eAAe;AACpC,SAAO,OAAO,GAAG,WAAW,EAAE,WAAwB,wBAAwB;AAChF;AAIA,IAAM,WACJ,QAAQ,IAAI,iCACZ,QAAQ,IAAI,0BACZ,QAAQ,IAAI;AACd,IAAM,aACJ,QAAQ,IAAI,mCACZ,QAAQ,IAAI,4BACZ,QAAQ,IAAI;AACd,IAAM,iBACJ,QAAQ,IAAI,qCACZ,QAAQ,IAAI,8BACZ;AAEF,IAAM,oBAAoB,KAAK,KAAK,KAAK;AACzC,IAAM,qBACJ,OAAO,QAAQ,IAAI,kCAAkC,WACjD,SAAS,QAAQ,IAAI,+BAA+B,EAAE,KAAK,oBAC3D,OAAO,QAAQ,IAAI,4BAA4B,WAC7C,SAAS,QAAQ,IAAI,yBAAyB,EAAE,KAAK,oBACrD;AAER,IAAI,cAA4B;AAEhC,SAAS,WAAkB;AACzB,MAAI,CAAC,YAAY,CAAC,YAAY;AAC5B,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AACA,MAAI,CAAC,aAAa;AAChB,kBAAc,IAAI,aAAAA,MAAa;AAAA,MAC7B,KAAK;AAAA,MACL,OAAO;AAAA,IACT,CAAC;AAAA,EACH;AACA,SAAO;AACT;AAEA,SAAS,SAAS,IAAoB;AACpC,SAAO,GAAG,cAAc,GAAG,EAAE;AAC/B;AAKA,SAAS,cAAc,KAA8B;AACnD,MAAI,MAAM,QAAQ,GAAG,EAAG,QAAO;AAC/B,MAAI,OAAO,QAAQ,UAAU;AAC3B,QAAI;AACF,YAAM,SAAS,KAAK,MAAM,GAAG;AAC7B,aAAO,MAAM,QAAQ,MAAM,IAAI,SAAS,CAAC;AAAA,IAC3C,QAAQ;AACN,aAAO,CAAC;AAAA,IACV;AAAA,EACF;AACA,SAAO,CAAC;AACV;AAEA,SAAS,iBAAiB,KAAuC;AAC/D,MAAI,OAAO,OAAO,QAAQ,YAAY,CAAC,MAAM,QAAQ,GAAG,EAAG,QAAO;AAClE,MAAI,OAAO,QAAQ,UAAU;AAC3B,QAAI;AACF,YAAM,SAAS,KAAK,MAAM,GAAG;AAC7B,aAAO,UAAU,OAAO,WAAW,WAAW,SAAS,CAAC;AAAA,IAC1D,QAAQ;AACN,aAAO,CAAC;AAAA,IACV;AAAA,EACF;AACA,SAAO,CAAC;AACV;AAEA,eAAe,kBAAkB,YAAoD;AACnF,QAAM,QAAQ,SAAS;AACvB,QAAM,MAAM,SAAS,UAAU;AAC/B,QAAM,OAAO,MAAM,MAAM,QAAQ,GAAG;AACpC,MAAI,CAAC,QAAQ,OAAO,SAAS,YAAY,OAAO,KAAK,IAAI,EAAE,WAAW,EAAG,QAAO;AAChF,QAAM,IAAI;AACV,QAAM,SAAyB;AAAA,IAC7B,IAAK,EAAE,OAAO,SAAY,aAAa,OAAO,EAAE,EAAE;AAAA,IAClD,SAAS,OAAO,EAAE,WAAW,EAAE;AAAA,IAC/B,QAAS,OAAO,EAAE,UAAU,SAAS;AAAA,IACrC,OAAO,cAAc,EAAE,KAAK;AAAA,IAC5B,UAAU,iBAAiB,EAAE,QAAQ;AAAA,IACrC,WAAW,OAAO,EAAE,cAAa,oBAAI,KAAK,GAAE,YAAY,CAAC;AAAA,IACzD,WAAW,OAAO,EAAE,cAAa,oBAAI,KAAK,GAAE,YAAY,CAAC;AAAA,IACzD,aAAa,EAAE,eAAe,OAAO,OAAO,EAAE,WAAW,IAAI;AAAA,EAC/D;AACA,SAAO;AACT;AAEA,eAAe,kBAAkB,QAAuC;AACtE,QAAM,QAAQ,SAAS;AACvB,QAAM,MAAM,SAAS,OAAO,EAAE;AAC9B,QAAM,OAAM,oBAAI,KAAK,GAAE,YAAY;AACnC,QAAM,QAAgC;AAAA,IACpC,IAAI,OAAO;AAAA,IACX,SAAS,OAAO;AAAA,IAChB,QAAQ,OAAO;AAAA,IACf,OAAO,KAAK,UAAU,OAAO,SAAS,CAAC,CAAC;AAAA,IACxC,UAAU,KAAK,UAAU,OAAO,YAAY,CAAC,CAAC;AAAA,IAC9C,WAAW,OAAO,aAAa;AAAA,IAC/B,WAAW,OAAO,aAAa;AAAA,EACjC;AACA,MAAI,OAAO,aAAa;AACtB,UAAM,cAAc,OAAO;AAAA,EAC7B;AACA,QAAM,MAAM,KAAK,KAAK,KAAK;AAC3B,MAAI,qBAAqB,GAAG;AAC1B,UAAM,MAAM,OAAO,KAAK,kBAAkB;AAAA,EAC5C;AACF;AAIA,SAAS,eAA4C;AACnD,QAAM,KAAK,QAAQ,IAAI,wBAAwB,iBAAiB,YAAY;AAC5E,SAAO,MAAM,YAAY,YAAY;AACvC;AAEA,SAAS,cAAuB;AAC9B,SAAO,aAAa,MAAM,aAAa,QAAQ,UAAU,KAAK,CAAC;AACjE;AAEA,SAAS,cAAuB;AAC9B,SAAO,aAAa,MAAM,aAAa,SAAS,YAAY,IAAI,KAAK,MAAM,cAAc,IAAI,KAAK,CAAC;AACrG;AAIA,eAAsB,sBAAsB,SAM1B;AAChB,QAAM,EAAE,YAAY,SAAS,eAAe,kBAAkB,SAAS,IAAI;AAC3E,QAAM,OAAM,oBAAI,KAAK,GAAE,YAAY;AAEnC,MAAI,YAAY,GAAG;AACjB,UAAM,OAAO,MAAM,wBAAwB;AAC3C,UAAM,WAAW,MAAM,KAAK,QAAQ,EAAE,KAAK,WAAW,CAAC;AACvD,QAAI,UAAU;AACZ,YAAM,QAAQ,SAAS,SAAS,CAAC;AACjC,UAAI,MAAM,WAAW,GAAG;AACtB,cAAM,KAAK;AAAA,UACT,UAAU;AAAA,UACV,aAAa;AAAA,UACb,QAAQ;AAAA,QACV,CAAC;AAAA,MACH;AACA,YAAM,KAAK;AAAA,QACT,EAAE,KAAK,WAAW;AAAA,QAClB;AAAA,UACE,MAAM;AAAA,YACJ;AAAA,YACA,WAAW;AAAA,UACb;AAAA,QACF;AAAA,MACF;AAAA,IACF,OAAO;AACL,YAAM,MAAmB;AAAA,QACvB,KAAK;AAAA,QACL,IAAI;AAAA,QACJ;AAAA,QACA,QAAQ;AAAA,QACR,OAAO;AAAA,UACL;AAAA,YACE,UAAU;AAAA,YACV,aAAa;AAAA,YACb,QAAQ;AAAA,UACV;AAAA,QACF;AAAA,QACA,UAAU,YAAY,CAAC;AAAA,QACvB,WAAW;AAAA,QACX,WAAW;AAAA,MACb;AACA,YAAM,KAAK;AAAA,QACT,EAAE,KAAK,WAAW;AAAA,QAClB,EAAE,MAAM,IAAI;AAAA,QACZ,EAAE,QAAQ,KAAK;AAAA,MACjB;AAAA,IACF;AACA;AAAA,EACF;AAEA,MAAI,YAAY,GAAG;AACjB,UAAM,WAAW,MAAM,kBAAkB,UAAU;AACnD,QAAI,UAAU;AAEZ,UAAI,CAAC,SAAS,SAAS,SAAS,MAAM,WAAW,GAAG;AAClD,iBAAS,QAAQ;AAAA,UACf;AAAA,YACE,UAAU;AAAA,YACV,aAAa;AAAA,YACb,QAAQ;AAAA,UACV;AAAA,QACF;AAAA,MACF;AACA,eAAS,YAAY;AACrB,YAAM,kBAAkB,QAAQ;AAAA,IAClC,OAAO;AACL,YAAM,SAAyB;AAAA,QAC7B,IAAI;AAAA,QACJ;AAAA,QACA,QAAQ;AAAA,QACR,OAAO;AAAA,UACL;AAAA,YACE,UAAU;AAAA,YACV,aAAa;AAAA,YACb,QAAQ;AAAA,UACV;AAAA,QACF;AAAA,QACA,UAAU,YAAY,CAAC;AAAA,QACvB,WAAW;AAAA,QACX,WAAW;AAAA,MACb;AACA,YAAM,kBAAkB,MAAM;AAAA,IAChC;AAAA,EACF;AACF;AAEA,eAAsB,0BAA0B,SAU9B;AAChB,QAAM,EAAE,YAAY,WAAW,QAAQ,OAAO,QAAQ,MAAM,IAAI;AAChE,QAAM,OAAM,oBAAI,KAAK,GAAE,YAAY;AAEnC,MAAI,YAAY,GAAG;AACjB,UAAM,OAAO,MAAM,wBAAwB;AAC3C,UAAM,WAAW,MAAM,KAAK,QAAQ,EAAE,KAAK,WAAW,CAAC;AACvD,QAAI,CAAC,SAAU;AACf,UAAM,OAAO,SAAS,MAAM,SAAS;AACrC,QAAI,CAAC,KAAM;AAEX,UAAM,aAA2B;AAAA,MAC/B,GAAG;AAAA,MACH;AAAA,MACA,GAAI,UAAU,UAAa,EAAE,MAAM;AAAA,MACnC,GAAI,WAAW,UAAa,EAAE,OAAO;AAAA,MACrC,GAAI,UAAU,UAAa,EAAE,MAAM;AAAA,MACnC,WAAW,KAAK,cAAc,WAAW,YAAY,MAAM,KAAK;AAAA,MAChE,aACE,KAAK,gBACJ,WAAW,eAAe,WAAW,WAAW,MAAM,KAAK;AAAA,IAChE;AAEA,UAAM,SAA2D;AAAA,MAC/D,OAAO,SAAS;AAAA,MAChB,WAAW;AAAA,IACb;AACA,WAAO,MAAM,SAAS,IAAI;AAC1B,QAAI,WAAW,UAAU;AACvB,aAAO,SAAS;AAChB,UAAI,CAAC,SAAS,YAAa,QAAO,cAAc;AAAA,IAClD,WAAW,WAAW,eAAe,cAAc,SAAS,MAAM,SAAS,GAAG;AAC5E,aAAO,SAAS;AAChB,UAAI,CAAC,SAAS,YAAa,QAAO,cAAc;AAAA,IAClD;AAEA,UAAM,KAAK;AAAA,MACT,EAAE,KAAK,WAAW;AAAA,MAClB;AAAA,QACE,MAAM;AAAA,MACR;AAAA,IACF;AACA;AAAA,EACF;AAEA,MAAI,YAAY,GAAG;AACjB,UAAM,WAAW,MAAM,kBAAkB,UAAU;AACnD,QAAI,CAAC,UAAU;AAEb;AAAA,IACF;AACA,UAAM,QAAQ,SAAS,SAAS,CAAC;AACjC,UAAM,OAAO,MAAM,SAAS;AAC5B,QAAI,CAAC,MAAM;AACT;AAAA,IACF;AACA,SAAK,SAAS;AACd,QAAI,UAAU,OAAW,MAAK,QAAQ;AACtC,QAAI,WAAW,OAAW,MAAK,SAAS;AACxC,QAAI,UAAU,OAAW,MAAK,QAAQ;AACtC,QAAI,WAAW,WAAW;AACxB,WAAK,YAAY,KAAK,aAAa;AAAA,IACrC;AACA,QAAI,WAAW,eAAe,WAAW,UAAU;AACjD,WAAK,cAAc,KAAK,eAAe;AAAA,IACzC;AAEA,aAAS,QAAQ;AACjB,aAAS,YAAY;AACrB,QAAI,WAAW,UAAU;AACvB,eAAS,SAAS;AAClB,eAAS,cAAc,SAAS,eAAe;AAAA,IACjD,WAAW,WAAW,eAAe,cAAc,MAAM,SAAS,GAAG;AACnE,eAAS,SAAS;AAClB,eAAS,cAAc,SAAS,eAAe;AAAA,IACjD;AACA,UAAM,kBAAkB,QAAQ;AAAA,EAClC;AACF;AAEA,eAAsB,0BAA0B,SAK9B;AAChB,QAAM,EAAE,YAAY,UAAU,YAAY,IAAI;AAC9C,QAAM,OAAM,oBAAI,KAAK,GAAE,YAAY;AAEnC,MAAI,YAAY,GAAG;AACjB,UAAM,OAAO,MAAM,wBAAwB;AAC3C,UAAM,KAAK;AAAA,MACT,EAAE,KAAK,WAAW;AAAA,MAClB;AAAA,QACE,OAAO;AAAA,UACL,OAAO;AAAA,YACL;AAAA,YACA;AAAA,YACA,QAAQ;AAAA,UACV;AAAA,QACF;AAAA,QACA,MAAM,EAAE,WAAW,IAAI;AAAA,MACzB;AAAA,IACF;AACA;AAAA,EACF;AAEA,MAAI,YAAY,GAAG;AACjB,UAAM,WAAW,MAAM,kBAAkB,UAAU;AACnD,QAAI,CAAC,SAAU;AACf,UAAM,QAAQ,SAAS,SAAS,CAAC;AACjC,UAAM,KAAK;AAAA,MACT;AAAA,MACA;AAAA,MACA,QAAQ;AAAA,IACV,CAAC;AACD,aAAS,QAAQ;AACjB,aAAS,YAAY;AACrB,UAAM,kBAAkB,QAAQ;AAAA,EAClC;AACF;AAMA,eAAsB,YAAY,YAKxB;AACR,MAAI,YAAY,GAAG;AACjB,UAAM,OAAO,MAAM,wBAAwB;AAC3C,UAAM,MAAM,MAAM,KAAK,QAAQ,EAAE,KAAK,WAAW,CAAC;AAClD,QAAI,CAAC,IAAK,QAAO;AACjB,WAAO;AAAA,MACL,IAAI,IAAI,MAAM;AAAA,MACd,SAAS,IAAI;AAAA,MACb,QAAQ,IAAI;AAAA,MACZ,QAAQ,IAAI,SAAS,CAAC,GAAG,IAAI,CAAC,OAAqB;AAAA,QACjD,UAAU,EAAE;AAAA,QACZ,aAAa,EAAE;AAAA,QACf,QAAQ,EAAE;AAAA,QACV,QAAQ,EAAE;AAAA,MACZ,EAAE;AAAA,IACJ;AAAA,EACF;AACA,MAAI,YAAY,GAAG;AACjB,UAAM,SAAS,MAAM,kBAAkB,UAAU;AACjD,QAAI,CAAC,OAAQ,QAAO;AACpB,WAAO;AAAA,MACL,IAAI,OAAO;AAAA,MACX,SAAS,OAAO;AAAA,MAChB,QAAQ,OAAO;AAAA,MACf,QAAQ,OAAO,SAAS,CAAC,GAAG,IAAI,CAAC,OAAO;AAAA,QACtC,UAAU,EAAE;AAAA,QACZ,aAAa,EAAE;AAAA,QACf,QAAQ,EAAE;AAAA,QACV,QAAQ,EAAE;AAAA,MACZ,EAAE;AAAA,IACJ;AAAA,EACF;AACA,SAAO;AACT;","names":["UpstashRedis"]}
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
import {
|
|
2
|
+
appendQueueJobStepInStore,
|
|
3
|
+
getQueueJob,
|
|
4
|
+
updateQueueJobStepInStore,
|
|
5
|
+
upsertInitialQueueJob
|
|
6
|
+
} from "./chunk-AOXGONGI.mjs";
|
|
7
|
+
import "./chunk-BJTO5JO5.mjs";
|
|
8
|
+
export {
|
|
9
|
+
appendQueueJobStepInStore,
|
|
10
|
+
getQueueJob,
|
|
11
|
+
updateQueueJobStepInStore,
|
|
12
|
+
upsertInitialQueueJob
|
|
13
|
+
};
|
|
14
|
+
//# sourceMappingURL=queueJobStore.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":[],"sourcesContent":[],"mappings":"","names":[]}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@microfox/ai-worker",
|
|
3
|
-
"version": "1.0.
|
|
3
|
+
"version": "1.0.3",
|
|
4
4
|
"description": "Background worker runtime for ai-router - SQS-based async agent execution",
|
|
5
5
|
"main": "./dist/index.js",
|
|
6
6
|
"module": "./dist/index.mjs",
|
|
@@ -39,14 +39,21 @@
|
|
|
39
39
|
"types": "./dist/config.d.ts",
|
|
40
40
|
"import": "./dist/config.mjs",
|
|
41
41
|
"require": "./dist/config.js"
|
|
42
|
+
},
|
|
43
|
+
"./queueJobStore": {
|
|
44
|
+
"types": "./dist/queueJobStore.d.ts",
|
|
45
|
+
"import": "./dist/queueJobStore.mjs",
|
|
46
|
+
"require": "./dist/queueJobStore.js"
|
|
42
47
|
}
|
|
43
48
|
},
|
|
44
49
|
"dependencies": {
|
|
50
|
+
"@aws-sdk/client-sqs": "^3.700.0",
|
|
51
|
+
"@upstash/redis": "^1.29.0",
|
|
45
52
|
"mongodb": "^6.12.0",
|
|
46
53
|
"zod": "^4.1.5"
|
|
47
54
|
},
|
|
48
55
|
"peerDependencies": {
|
|
49
|
-
"@microfox/ai-router": ">=2.1.
|
|
56
|
+
"@microfox/ai-router": ">=2.1.5"
|
|
50
57
|
},
|
|
51
58
|
"devDependencies": {
|
|
52
59
|
"@types/aws-lambda": "^8.10.145",
|
package/dist/chunk-FQCZSXDI.mjs
DELETED
|
@@ -1,83 +0,0 @@
|
|
|
1
|
-
// src/client.ts
|
|
2
|
-
function getWorkersTriggerUrl() {
|
|
3
|
-
const raw = process.env.WORKER_BASE_URL || process.env.NEXT_PUBLIC_WORKER_BASE_URL || process.env.WORKERS_TRIGGER_API_URL || process.env.NEXT_PUBLIC_WORKERS_TRIGGER_API_URL || process.env.WORKERS_CONFIG_API_URL || process.env.NEXT_PUBLIC_WORKERS_CONFIG_API_URL;
|
|
4
|
-
if (!raw) {
|
|
5
|
-
throw new Error(
|
|
6
|
-
"WORKER_BASE_URL (preferred) or NEXT_PUBLIC_WORKER_BASE_URL is required for background workers"
|
|
7
|
-
);
|
|
8
|
-
}
|
|
9
|
-
const url = new URL(raw);
|
|
10
|
-
url.search = "";
|
|
11
|
-
url.hash = "";
|
|
12
|
-
const path = url.pathname || "";
|
|
13
|
-
url.pathname = path.replace(/\/?workers\/(trigger|config)\/?$/, "");
|
|
14
|
-
const basePath = url.pathname.replace(/\/+$/, "");
|
|
15
|
-
url.pathname = `${basePath}/workers/trigger`.replace(/\/+$/, "");
|
|
16
|
-
return url.toString();
|
|
17
|
-
}
|
|
18
|
-
function serializeContext(ctx) {
|
|
19
|
-
const serialized = {};
|
|
20
|
-
if (ctx.requestId) {
|
|
21
|
-
serialized.requestId = ctx.requestId;
|
|
22
|
-
}
|
|
23
|
-
if (ctx.metadata && typeof ctx.metadata === "object") {
|
|
24
|
-
Object.assign(serialized, ctx.metadata);
|
|
25
|
-
}
|
|
26
|
-
if (ctx._serializeContext && typeof ctx._serializeContext === "function") {
|
|
27
|
-
const custom = ctx._serializeContext();
|
|
28
|
-
Object.assign(serialized, custom);
|
|
29
|
-
}
|
|
30
|
-
return serialized;
|
|
31
|
-
}
|
|
32
|
-
async function dispatch(workerId, input, inputSchema, options, ctx) {
|
|
33
|
-
const validatedInput = inputSchema.parse(input);
|
|
34
|
-
const jobId = options.jobId || `job-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
|
|
35
|
-
const triggerUrl = getWorkersTriggerUrl();
|
|
36
|
-
const serializedContext = ctx ? serializeContext(ctx) : {};
|
|
37
|
-
const messageBody = {
|
|
38
|
-
workerId,
|
|
39
|
-
jobId,
|
|
40
|
-
input: validatedInput,
|
|
41
|
-
context: serializedContext,
|
|
42
|
-
webhookUrl: options.webhookUrl,
|
|
43
|
-
metadata: options.metadata || {},
|
|
44
|
-
timestamp: (/* @__PURE__ */ new Date()).toISOString()
|
|
45
|
-
};
|
|
46
|
-
const headers = {
|
|
47
|
-
"Content-Type": "application/json"
|
|
48
|
-
};
|
|
49
|
-
const triggerKey = process.env.WORKERS_TRIGGER_API_KEY;
|
|
50
|
-
if (triggerKey) {
|
|
51
|
-
headers["x-workers-trigger-key"] = triggerKey;
|
|
52
|
-
}
|
|
53
|
-
const response = await fetch(triggerUrl, {
|
|
54
|
-
method: "POST",
|
|
55
|
-
headers,
|
|
56
|
-
body: JSON.stringify({
|
|
57
|
-
workerId,
|
|
58
|
-
body: messageBody
|
|
59
|
-
})
|
|
60
|
-
});
|
|
61
|
-
if (!response.ok) {
|
|
62
|
-
const text = await response.text().catch(() => "");
|
|
63
|
-
throw new Error(
|
|
64
|
-
`Failed to trigger worker "${workerId}": ${response.status} ${response.statusText}${text ? ` - ${text}` : ""}`
|
|
65
|
-
);
|
|
66
|
-
}
|
|
67
|
-
const data = await response.json().catch(() => ({}));
|
|
68
|
-
const messageId = data?.messageId ? String(data.messageId) : `trigger-${jobId}`;
|
|
69
|
-
return {
|
|
70
|
-
messageId,
|
|
71
|
-
status: "queued",
|
|
72
|
-
jobId
|
|
73
|
-
};
|
|
74
|
-
}
|
|
75
|
-
async function dispatchLocal(handler, input, ctx) {
|
|
76
|
-
return handler({ input, ctx: ctx || {} });
|
|
77
|
-
}
|
|
78
|
-
|
|
79
|
-
export {
|
|
80
|
-
dispatch,
|
|
81
|
-
dispatchLocal
|
|
82
|
-
};
|
|
83
|
-
//# sourceMappingURL=chunk-FQCZSXDI.mjs.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/client.ts"],"sourcesContent":["/**\n * Client for dispatching background worker jobs.\n *\n * In production, dispatching happens via the workers HTTP API:\n * POST /workers/trigger -> enqueues message to SQS on the workers service side\n *\n * This avoids requiring AWS credentials in your Next.js app.\n */\n\nimport type { ZodType, z } from 'zod';\n\nexport interface DispatchOptions {\n /**\n * Optional webhook callback URL to notify when the job finishes.\n * Only called when provided. Default: no webhook (use job store / MongoDB only).\n */\n webhookUrl?: string;\n /**\n * Controls how dispatch executes.\n * - \"auto\" (default): local inline execution in development unless WORKERS_LOCAL_MODE=false.\n * - \"local\": force inline execution (no SQS).\n * - \"remote\": force SQS/Lambda dispatch even in development.\n */\n mode?: 'auto' | 'local' | 'remote';\n jobId?: string;\n metadata?: Record<string, any>;\n}\n\nexport interface DispatchResult {\n messageId: string;\n status: 'queued';\n jobId: string;\n}\n\nexport interface SerializedContext {\n requestId?: string;\n userId?: string;\n traceId?: string;\n [key: string]: any;\n}\n\n/**\n * Derives the full /workers/trigger URL from env.\n *\n * Preferred env vars:\n * - WORKER_BASE_URL: base URL of the workers service (e.g. https://.../prod)\n * - NEXT_PUBLIC_WORKER_BASE_URL: same, but exposed to the browser\n *\n * Legacy env vars (still supported for backwards compatibility):\n * - WORKERS_TRIGGER_API_URL / NEXT_PUBLIC_WORKERS_TRIGGER_API_URL\n * - WORKERS_CONFIG_API_URL / NEXT_PUBLIC_WORKERS_CONFIG_API_URL\n */\nfunction getWorkersTriggerUrl(): string {\n const raw =\n process.env.WORKER_BASE_URL ||\n process.env.NEXT_PUBLIC_WORKER_BASE_URL ||\n process.env.WORKERS_TRIGGER_API_URL ||\n process.env.NEXT_PUBLIC_WORKERS_TRIGGER_API_URL ||\n process.env.WORKERS_CONFIG_API_URL ||\n process.env.NEXT_PUBLIC_WORKERS_CONFIG_API_URL;\n\n if (!raw) {\n throw new Error(\n 'WORKER_BASE_URL (preferred) or NEXT_PUBLIC_WORKER_BASE_URL is required for background workers'\n );\n }\n\n const url = new URL(raw);\n url.search = '';\n url.hash = '';\n\n const path = url.pathname || '';\n\n // If the user pointed at a specific endpoint, normalize back to the service root.\n url.pathname = path.replace(/\\/?workers\\/(trigger|config)\\/?$/, '');\n\n const basePath = url.pathname.replace(/\\/+$/, '');\n url.pathname = `${basePath}/workers/trigger`.replace(/\\/+$/, '');\n\n return url.toString();\n}\n\n/**\n * Serializes context data for transmission to Lambda.\n * Only serializes safe, JSON-compatible properties.\n */\nfunction serializeContext(ctx: any): SerializedContext {\n const serialized: SerializedContext = {};\n\n if (ctx.requestId) {\n serialized.requestId = ctx.requestId;\n }\n\n // Extract any additional serializable metadata\n if (ctx.metadata && typeof ctx.metadata === 'object') {\n Object.assign(serialized, ctx.metadata);\n }\n\n // Allow custom context serialization via a helper property\n if (ctx._serializeContext && typeof ctx._serializeContext === 'function') {\n const custom = ctx._serializeContext();\n Object.assign(serialized, custom);\n }\n\n return serialized;\n}\n\n/**\n * Dispatches a background worker job to SQS.\n *\n * @param workerId - The ID of the worker to dispatch\n * @param input - The input data for the worker (will be validated against inputSchema)\n * @param inputSchema - Zod schema for input validation\n * @param options - Dispatch options including webhook URL\n * @param ctx - Optional context object (only serializable parts will be sent)\n * @returns Promise resolving to dispatch result with messageId and jobId\n */\nexport async function dispatch<INPUT_SCHEMA extends ZodType<any>>(\n workerId: string,\n input: z.input<INPUT_SCHEMA>,\n inputSchema: INPUT_SCHEMA,\n options: DispatchOptions,\n ctx?: any\n): Promise<DispatchResult> {\n // Validate input against schema\n const validatedInput = inputSchema.parse(input);\n\n // Generate job ID if not provided\n const jobId =\n options.jobId || `job-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;\n\n // Resolve /workers/trigger endpoint URL\n const triggerUrl = getWorkersTriggerUrl();\n\n // Serialize context (only safe, JSON-compatible parts)\n const serializedContext = ctx ? serializeContext(ctx) : {};\n\n // Job updates use MongoDB only; never pass jobStoreUrl/origin URL.\n const messageBody = {\n workerId,\n jobId,\n input: validatedInput,\n context: serializedContext,\n webhookUrl: options.webhookUrl,\n metadata: options.metadata || {},\n timestamp: new Date().toISOString(),\n };\n\n const headers: Record<string, string> = {\n 'Content-Type': 'application/json',\n };\n const triggerKey = process.env.WORKERS_TRIGGER_API_KEY;\n if (triggerKey) {\n headers['x-workers-trigger-key'] = triggerKey;\n }\n\n const response = await fetch(triggerUrl, {\n method: 'POST',\n headers,\n body: JSON.stringify({\n workerId,\n body: messageBody,\n }),\n });\n\n if (!response.ok) {\n const text = await response.text().catch(() => '');\n throw new Error(\n `Failed to trigger worker \"${workerId}\": ${response.status} ${response.statusText}${text ? ` - ${text}` : ''}`\n );\n }\n\n const data = (await response.json().catch(() => ({}))) as any;\n const messageId = data?.messageId ? String(data.messageId) : `trigger-${jobId}`;\n\n return {\n messageId,\n status: 'queued',\n jobId,\n };\n}\n\n/**\n * Local development mode: runs the handler immediately in the same process.\n * This bypasses SQS and Lambda for faster iteration during development.\n *\n * @param handler - The worker handler function\n * @param input - The input data\n * @param ctx - The context object\n * @returns The handler result\n */\nexport async function dispatchLocal<INPUT, OUTPUT>(\n handler: (params: { input: INPUT; ctx: any }) => Promise<OUTPUT>,\n input: INPUT,\n ctx?: any\n): Promise<OUTPUT> {\n return handler({ input, ctx: ctx || {} });\n}\n"],"mappings":";AAoDA,SAAS,uBAA+B;AACtC,QAAM,MACJ,QAAQ,IAAI,mBACZ,QAAQ,IAAI,+BACZ,QAAQ,IAAI,2BACZ,QAAQ,IAAI,uCACZ,QAAQ,IAAI,0BACZ,QAAQ,IAAI;AAEd,MAAI,CAAC,KAAK;AACR,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAEA,QAAM,MAAM,IAAI,IAAI,GAAG;AACvB,MAAI,SAAS;AACb,MAAI,OAAO;AAEX,QAAM,OAAO,IAAI,YAAY;AAG7B,MAAI,WAAW,KAAK,QAAQ,oCAAoC,EAAE;AAElE,QAAM,WAAW,IAAI,SAAS,QAAQ,QAAQ,EAAE;AAChD,MAAI,WAAW,GAAG,QAAQ,mBAAmB,QAAQ,QAAQ,EAAE;AAE/D,SAAO,IAAI,SAAS;AACtB;AAMA,SAAS,iBAAiB,KAA6B;AACrD,QAAM,aAAgC,CAAC;AAEvC,MAAI,IAAI,WAAW;AACjB,eAAW,YAAY,IAAI;AAAA,EAC7B;AAGA,MAAI,IAAI,YAAY,OAAO,IAAI,aAAa,UAAU;AACpD,WAAO,OAAO,YAAY,IAAI,QAAQ;AAAA,EACxC;AAGA,MAAI,IAAI,qBAAqB,OAAO,IAAI,sBAAsB,YAAY;AACxE,UAAM,SAAS,IAAI,kBAAkB;AACrC,WAAO,OAAO,YAAY,MAAM;AAAA,EAClC;AAEA,SAAO;AACT;AAYA,eAAsB,SACpB,UACA,OACA,aACA,SACA,KACyB;AAEzB,QAAM,iBAAiB,YAAY,MAAM,KAAK;AAG9C,QAAM,QACJ,QAAQ,SAAS,OAAO,KAAK,IAAI,CAAC,IAAI,KAAK,OAAO,EAAE,SAAS,EAAE,EAAE,OAAO,GAAG,CAAC,CAAC;AAG/E,QAAM,aAAa,qBAAqB;AAGxC,QAAM,oBAAoB,MAAM,iBAAiB,GAAG,IAAI,CAAC;AAGzD,QAAM,cAAc;AAAA,IAClB;AAAA,IACA;AAAA,IACA,OAAO;AAAA,IACP,SAAS;AAAA,IACT,YAAY,QAAQ;AAAA,IACpB,UAAU,QAAQ,YAAY,CAAC;AAAA,IAC/B,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,EACpC;AAEA,QAAM,UAAkC;AAAA,IACtC,gBAAgB;AAAA,EAClB;AACA,QAAM,aAAa,QAAQ,IAAI;AAC/B,MAAI,YAAY;AACd,YAAQ,uBAAuB,IAAI;AAAA,EACrC;AAEA,QAAM,WAAW,MAAM,MAAM,YAAY;AAAA,IACvC,QAAQ;AAAA,IACR;AAAA,IACA,MAAM,KAAK,UAAU;AAAA,MACnB;AAAA,MACA,MAAM;AAAA,IACR,CAAC;AAAA,EACH,CAAC;AAED,MAAI,CAAC,SAAS,IAAI;AAChB,UAAM,OAAO,MAAM,SAAS,KAAK,EAAE,MAAM,MAAM,EAAE;AACjD,UAAM,IAAI;AAAA,MACR,6BAA6B,QAAQ,MAAM,SAAS,MAAM,IAAI,SAAS,UAAU,GAAG,OAAO,MAAM,IAAI,KAAK,EAAE;AAAA,IAC9G;AAAA,EACF;AAEA,QAAM,OAAQ,MAAM,SAAS,KAAK,EAAE,MAAM,OAAO,CAAC,EAAE;AACpD,QAAM,YAAY,MAAM,YAAY,OAAO,KAAK,SAAS,IAAI,WAAW,KAAK;AAE7E,SAAO;AAAA,IACL;AAAA,IACA,QAAQ;AAAA,IACR;AAAA,EACF;AACF;AAWA,eAAsB,cACpB,SACA,OACA,KACiB;AACjB,SAAO,QAAQ,EAAE,OAAO,KAAK,OAAO,CAAC,EAAE,CAAC;AAC1C;","names":[]}
|