@microfox/ai-worker-cli 1.0.2 → 1.0.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +3287 -32
- package/dist/index.cjs.map +1 -1
- package/dist/index.js +3286 -32
- package/dist/index.js.map +1 -1
- package/package.json +3 -2
package/dist/index.js
CHANGED
|
@@ -1,7 +1,10 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
2
|
|
|
3
3
|
// src/index.ts
|
|
4
|
-
import { Command as
|
|
4
|
+
import { Command as Command4 } from "commander";
|
|
5
|
+
import { readFileSync as readFileSync3 } from "fs";
|
|
6
|
+
import { fileURLToPath } from "url";
|
|
7
|
+
import { dirname as dirname3, join as join4 } from "path";
|
|
5
8
|
|
|
6
9
|
// src/commands/push.ts
|
|
7
10
|
import { Command } from "commander";
|
|
@@ -132,6 +135,59 @@ async function collectEnvUsageForWorkers(workerEntryFiles, projectRoot) {
|
|
|
132
135
|
buildtimeKeys.delete("node");
|
|
133
136
|
return { runtimeKeys, buildtimeKeys };
|
|
134
137
|
}
|
|
138
|
+
async function collectCalleeWorkerIds(workers, projectRoot) {
|
|
139
|
+
void projectRoot;
|
|
140
|
+
const calleeIdsByWorker = /* @__PURE__ */ new Map();
|
|
141
|
+
const workerIds = new Set(workers.map((w) => w.id));
|
|
142
|
+
for (const worker of workers) {
|
|
143
|
+
const calleeIds = /* @__PURE__ */ new Set();
|
|
144
|
+
const visited = /* @__PURE__ */ new Set();
|
|
145
|
+
const queue = [worker.filePath];
|
|
146
|
+
while (queue.length > 0) {
|
|
147
|
+
const file = queue.pop();
|
|
148
|
+
const normalized = path.resolve(file);
|
|
149
|
+
if (visited.has(normalized)) continue;
|
|
150
|
+
visited.add(normalized);
|
|
151
|
+
if (!fs.existsSync(normalized) || !fs.statSync(normalized).isFile()) continue;
|
|
152
|
+
const src = fs.readFileSync(normalized, "utf-8");
|
|
153
|
+
const re = /(?:ctx\.)?dispatchWorker\s*\(\s*['"]([^'"]+)['"]/g;
|
|
154
|
+
for (const match of src.matchAll(re)) {
|
|
155
|
+
if (match[1]) calleeIds.add(match[1]);
|
|
156
|
+
}
|
|
157
|
+
const specifiers = extractImportSpecifiers(src);
|
|
158
|
+
for (const spec of specifiers) {
|
|
159
|
+
if (!spec || !spec.startsWith(".")) continue;
|
|
160
|
+
const resolved = tryResolveLocalImport(normalized, spec);
|
|
161
|
+
if (resolved) queue.push(resolved);
|
|
162
|
+
}
|
|
163
|
+
}
|
|
164
|
+
if (calleeIds.size > 0) {
|
|
165
|
+
for (const calleeId of calleeIds) {
|
|
166
|
+
if (!workerIds.has(calleeId)) {
|
|
167
|
+
console.warn(
|
|
168
|
+
chalk.yellow(
|
|
169
|
+
`\u26A0\uFE0F Worker "${worker.id}" calls "${calleeId}" which is not in scanned workers (typo or other service?). Queue URL will not be auto-injected.`
|
|
170
|
+
)
|
|
171
|
+
);
|
|
172
|
+
}
|
|
173
|
+
}
|
|
174
|
+
calleeIdsByWorker.set(worker.id, calleeIds);
|
|
175
|
+
}
|
|
176
|
+
}
|
|
177
|
+
return calleeIdsByWorker;
|
|
178
|
+
}
|
|
179
|
+
function sanitizeWorkerIdForEnv(workerId) {
|
|
180
|
+
return workerId.replace(/-/g, "_").toUpperCase();
|
|
181
|
+
}
|
|
182
|
+
function toCamelCase(id) {
|
|
183
|
+
return id.split(/[^a-zA-Z0-9]+/).filter(Boolean).map(
|
|
184
|
+
(part, i) => i === 0 ? part.toLowerCase() : part.charAt(0).toUpperCase() + part.slice(1).toLowerCase()
|
|
185
|
+
).join("");
|
|
186
|
+
}
|
|
187
|
+
function toPrefixedCamel(prefix, id) {
|
|
188
|
+
const camel = toCamelCase(id);
|
|
189
|
+
return prefix + (camel.charAt(0).toUpperCase() + camel.slice(1));
|
|
190
|
+
}
|
|
135
191
|
function readJsonFile(filePath) {
|
|
136
192
|
try {
|
|
137
193
|
return JSON.parse(fs.readFileSync(filePath, "utf-8"));
|
|
@@ -184,6 +240,20 @@ async function collectRuntimeDependenciesForWorkers(workerEntryFiles, projectRoo
|
|
|
184
240
|
deps.delete("@microfox/ai-worker");
|
|
185
241
|
return deps;
|
|
186
242
|
}
|
|
243
|
+
function getJobStoreType() {
|
|
244
|
+
const raw = process.env.WORKER_DATABASE_TYPE?.toLowerCase();
|
|
245
|
+
if (raw === "mongodb" || raw === "upstash-redis") return raw;
|
|
246
|
+
return "upstash-redis";
|
|
247
|
+
}
|
|
248
|
+
function filterDepsForJobStore(runtimeDeps, jobStoreType) {
|
|
249
|
+
const filtered = new Set(runtimeDeps);
|
|
250
|
+
filtered.delete("mongodb");
|
|
251
|
+
filtered.delete("@upstash/redis");
|
|
252
|
+
if (jobStoreType === "mongodb") filtered.add("mongodb");
|
|
253
|
+
else filtered.add("@upstash/redis");
|
|
254
|
+
if (runtimeDeps.has("mongodb")) filtered.add("mongodb");
|
|
255
|
+
return filtered;
|
|
256
|
+
}
|
|
187
257
|
function buildDependenciesMap(projectRoot, deps) {
|
|
188
258
|
const projectPkg = readJsonFile(path.join(projectRoot, "package.json")) || {};
|
|
189
259
|
const projectDeps = projectPkg.dependencies || {};
|
|
@@ -253,12 +323,151 @@ async function scanWorkers(aiPath = "app/ai") {
|
|
|
253
323
|
}
|
|
254
324
|
return workers;
|
|
255
325
|
}
|
|
256
|
-
async function
|
|
326
|
+
async function scanQueues(aiPath = "app/ai") {
|
|
327
|
+
const base = aiPath.replace(/\\/g, "/");
|
|
328
|
+
const pattern = `${base}/queues/**/*.queue.ts`;
|
|
329
|
+
const files = await glob(pattern);
|
|
330
|
+
const queues = [];
|
|
331
|
+
for (const filePath of files) {
|
|
332
|
+
try {
|
|
333
|
+
const content = fs.readFileSync(filePath, "utf-8");
|
|
334
|
+
const idMatch = content.match(/defineWorkerQueue\s*\(\s*\{[\s\S]*?id:\s*['"]([^'"]+)['"]/);
|
|
335
|
+
if (!idMatch) {
|
|
336
|
+
console.warn(chalk.yellow(`\u26A0\uFE0F Skipping ${filePath}: No queue id found in defineWorkerQueue`));
|
|
337
|
+
continue;
|
|
338
|
+
}
|
|
339
|
+
const queueId = idMatch[1];
|
|
340
|
+
const steps = [];
|
|
341
|
+
const stepsMatch = content.match(/steps:\s*\[([\s\S]*?)\]/);
|
|
342
|
+
if (stepsMatch) {
|
|
343
|
+
const stepsStr = stepsMatch[1];
|
|
344
|
+
const stepRegex = /\{\s*workerId:\s*['"]([^'"]+)['"](?:,\s*(?:\/\/[^\r\n]*\r?\n\s*)?delaySeconds:\s*(\d+))?(?:,\s*(?:\/\/[^\r\n]*\r?\n\s*)?mapInputFromPrev:\s*['"]([^'"]+)['"])?\s*,?\s*(?:\/\/[^\r\n]*\r?\n\s*)?(?=\s*\})\s*\},?/g;
|
|
345
|
+
let m;
|
|
346
|
+
while ((m = stepRegex.exec(stepsStr)) !== null) {
|
|
347
|
+
steps.push({
|
|
348
|
+
workerId: m[1],
|
|
349
|
+
delaySeconds: m[2] ? parseInt(m[2], 10) : void 0,
|
|
350
|
+
mapInputFromPrev: m[3]
|
|
351
|
+
});
|
|
352
|
+
}
|
|
353
|
+
}
|
|
354
|
+
let schedule;
|
|
355
|
+
const contentWithoutLineComments = content.replace(/\/\/[^\n]*/g, "");
|
|
356
|
+
const scheduleStrMatch = contentWithoutLineComments.match(/schedule:\s*['"]([^'"]+)['"]/);
|
|
357
|
+
const scheduleObjMatch = contentWithoutLineComments.match(/schedule:\s*(\{[^}]+(?:\{[^}]*\}[^}]*)*\})/);
|
|
358
|
+
if (scheduleStrMatch) {
|
|
359
|
+
schedule = scheduleStrMatch[1];
|
|
360
|
+
} else if (scheduleObjMatch) {
|
|
361
|
+
try {
|
|
362
|
+
schedule = new Function("return " + scheduleObjMatch[1])();
|
|
363
|
+
} catch {
|
|
364
|
+
schedule = void 0;
|
|
365
|
+
}
|
|
366
|
+
}
|
|
367
|
+
queues.push({ id: queueId, filePath, steps, schedule });
|
|
368
|
+
} catch (error) {
|
|
369
|
+
console.error(chalk.red(`\u274C Error processing ${filePath}:`), error);
|
|
370
|
+
}
|
|
371
|
+
}
|
|
372
|
+
return queues;
|
|
373
|
+
}
|
|
374
|
+
function generateQueueRegistry(queues, outputDir, projectRoot) {
|
|
375
|
+
const generatedDir = path.join(outputDir, "generated");
|
|
376
|
+
if (!fs.existsSync(generatedDir)) {
|
|
377
|
+
fs.mkdirSync(generatedDir, { recursive: true });
|
|
378
|
+
}
|
|
379
|
+
const relToRoot = path.relative(generatedDir, projectRoot).replace(/\\/g, "/");
|
|
380
|
+
const queueModulesLines = [];
|
|
381
|
+
const queueModulesEntries = [];
|
|
382
|
+
const queuesWithMapping = queues.filter(
|
|
383
|
+
(q) => q.steps?.some((s) => s.mapInputFromPrev)
|
|
384
|
+
);
|
|
385
|
+
for (let i = 0; i < queuesWithMapping.length; i++) {
|
|
386
|
+
const q = queuesWithMapping[i];
|
|
387
|
+
const relPath = (relToRoot + "/" + q.filePath.replace(/\\/g, "/")).replace(/\.ts$/, "");
|
|
388
|
+
const safeId = q.id.replace(/[^a-zA-Z0-9]/g, "");
|
|
389
|
+
queueModulesLines.push(`const queueModule_${safeId} = require('${relPath}');`);
|
|
390
|
+
queueModulesEntries.push(` '${q.id}': queueModule_${safeId},`);
|
|
391
|
+
}
|
|
392
|
+
const queueModulesBlock = queueModulesLines.length > 0 ? `
|
|
393
|
+
${queueModulesLines.join("\n")}
|
|
394
|
+
const queueModules = {
|
|
395
|
+
${queueModulesEntries.join("\n")}
|
|
396
|
+
};
|
|
397
|
+
` : `
|
|
398
|
+
const queueModules = {};
|
|
399
|
+
`;
|
|
400
|
+
const registryContent = `/**
|
|
401
|
+
* Auto-generated queue registry. DO NOT EDIT.
|
|
402
|
+
* Generated by @microfox/ai-worker-cli from .queue.ts files.
|
|
403
|
+
*/
|
|
404
|
+
${queueModulesBlock}
|
|
405
|
+
|
|
406
|
+
const QUEUES = ${JSON.stringify(queues.map((q) => ({ id: q.id, steps: q.steps, schedule: q.schedule })), null, 2)};
|
|
407
|
+
|
|
408
|
+
export function getQueueById(queueId) {
|
|
409
|
+
return QUEUES.find((q) => q.id === queueId);
|
|
410
|
+
}
|
|
411
|
+
|
|
412
|
+
export function getNextStep(queueId, stepIndex) {
|
|
413
|
+
const queue = getQueueById(queueId);
|
|
414
|
+
if (!queue || !queue.steps || stepIndex < 0 || stepIndex >= queue.steps.length - 1) {
|
|
415
|
+
return undefined;
|
|
416
|
+
}
|
|
417
|
+
const step = queue.steps[stepIndex + 1];
|
|
418
|
+
return step ? { workerId: step.workerId, delaySeconds: step.delaySeconds, mapInputFromPrev: step.mapInputFromPrev } : undefined;
|
|
419
|
+
}
|
|
420
|
+
|
|
421
|
+
export function invokeMapInput(queueId, stepIndex, initialInput, previousOutputs) {
|
|
422
|
+
const queue = getQueueById(queueId);
|
|
423
|
+
const step = queue?.steps?.[stepIndex];
|
|
424
|
+
const fnName = step?.mapInputFromPrev;
|
|
425
|
+
if (!fnName) return previousOutputs.length ? previousOutputs[previousOutputs.length - 1].output : initialInput;
|
|
426
|
+
const mod = queueModules[queueId];
|
|
427
|
+
if (!mod || typeof mod[fnName] !== 'function') return previousOutputs.length ? previousOutputs[previousOutputs.length - 1].output : initialInput;
|
|
428
|
+
return mod[fnName](initialInput, previousOutputs);
|
|
429
|
+
}
|
|
430
|
+
`;
|
|
431
|
+
const registryPath = path.join(generatedDir, "workerQueues.registry.js");
|
|
432
|
+
fs.writeFileSync(registryPath, registryContent);
|
|
433
|
+
console.log(chalk.green(`\u2713 Generated queue registry: ${registryPath}`));
|
|
434
|
+
}
|
|
435
|
+
function getWorkersInQueues(queues) {
|
|
436
|
+
const set = /* @__PURE__ */ new Set();
|
|
437
|
+
for (const q of queues) {
|
|
438
|
+
for (const step of q.steps) {
|
|
439
|
+
set.add(step.workerId);
|
|
440
|
+
}
|
|
441
|
+
}
|
|
442
|
+
return set;
|
|
443
|
+
}
|
|
444
|
+
function mergeQueueCallees(calleeIds, queues, workers) {
|
|
445
|
+
const merged = new Map(calleeIds);
|
|
446
|
+
const workerIds = new Set(workers.map((w) => w.id));
|
|
447
|
+
for (const queue of queues) {
|
|
448
|
+
for (let i = 0; i < queue.steps.length - 1; i++) {
|
|
449
|
+
const fromWorkerId = queue.steps[i].workerId;
|
|
450
|
+
const toWorkerId = queue.steps[i + 1].workerId;
|
|
451
|
+
if (!workerIds.has(toWorkerId)) continue;
|
|
452
|
+
let callees = merged.get(fromWorkerId);
|
|
453
|
+
if (!callees) {
|
|
454
|
+
callees = /* @__PURE__ */ new Set();
|
|
455
|
+
merged.set(fromWorkerId, callees);
|
|
456
|
+
}
|
|
457
|
+
callees.add(toWorkerId);
|
|
458
|
+
}
|
|
459
|
+
}
|
|
460
|
+
return merged;
|
|
461
|
+
}
|
|
462
|
+
async function generateHandlers(workers, outputDir, queues = []) {
|
|
257
463
|
const handlersDir = path.join(outputDir, "handlers");
|
|
258
|
-
|
|
259
|
-
|
|
464
|
+
const workersSubdir = path.join(handlersDir, "workers");
|
|
465
|
+
const workersInQueues = getWorkersInQueues(queues);
|
|
466
|
+
if (fs.existsSync(workersSubdir)) {
|
|
467
|
+
fs.rmSync(workersSubdir, { recursive: true, force: true });
|
|
260
468
|
}
|
|
261
469
|
fs.mkdirSync(handlersDir, { recursive: true });
|
|
470
|
+
fs.mkdirSync(workersSubdir, { recursive: true });
|
|
262
471
|
for (const worker of workers) {
|
|
263
472
|
const handlerFile = path.join(handlersDir, worker.handlerPath.replace("handlers/", "") + ".js");
|
|
264
473
|
const handlerDir = path.dirname(handlerFile);
|
|
@@ -279,18 +488,88 @@ async function generateHandlers(workers, outputDir) {
|
|
|
279
488
|
const exportName = exportMatch ? exportMatch[2] : "worker";
|
|
280
489
|
const tempEntryFile = handlerFile.replace(".js", ".temp.ts");
|
|
281
490
|
const workerRef = defaultExport ? "workerModule.default" : `workerModule.${exportName}`;
|
|
282
|
-
const
|
|
491
|
+
const inQueue = workersInQueues.has(worker.id);
|
|
492
|
+
const registryRelPath = path.relative(path.dirname(path.resolve(handlerFile)), path.join(outputDir, "generated", "workerQueues.registry")).split(path.sep).join("/");
|
|
493
|
+
const registryImportPath = registryRelPath.startsWith(".") ? registryRelPath : "./" + registryRelPath;
|
|
494
|
+
const handlerCreation = inQueue ? `
|
|
495
|
+
import { createLambdaHandler, wrapHandlerForQueue } from '@microfox/ai-worker/handler';
|
|
496
|
+
import { getQueueJob } from '@microfox/ai-worker/queueJobStore';
|
|
497
|
+
import * as queueRegistry from '${registryImportPath}';
|
|
498
|
+
import * as workerModule from '${relativeImportPath}';
|
|
499
|
+
|
|
500
|
+
const WORKER_LOG_PREFIX = '[WorkerEntrypoint]';
|
|
501
|
+
|
|
502
|
+
const workerAgent = ${workerRef};
|
|
503
|
+
if (!workerAgent || typeof workerAgent.handler !== 'function') {
|
|
504
|
+
throw new Error('Worker module must export a createWorker result (default or named) with .handler');
|
|
505
|
+
}
|
|
506
|
+
|
|
507
|
+
const queueRuntime = {
|
|
508
|
+
getNextStep: queueRegistry.getNextStep,
|
|
509
|
+
invokeMapInput: queueRegistry.invokeMapInput,
|
|
510
|
+
getQueueJob,
|
|
511
|
+
};
|
|
512
|
+
const wrappedHandler = wrapHandlerForQueue(workerAgent.handler, queueRuntime);
|
|
513
|
+
|
|
514
|
+
const baseHandler = createLambdaHandler(wrappedHandler, workerAgent.outputSchema);
|
|
515
|
+
|
|
516
|
+
export const handler = async (event: any, context: any) => {
|
|
517
|
+
const records = Array.isArray((event as any)?.Records) ? (event as any).Records.length : 0;
|
|
518
|
+
let queueId, queueJobId;
|
|
519
|
+
try {
|
|
520
|
+
const first = (event as any)?.Records?.[0];
|
|
521
|
+
if (first?.body) {
|
|
522
|
+
const body = typeof first.body === 'string' ? JSON.parse(first.body) : first.body;
|
|
523
|
+
const qc = body?.input?.__workerQueue ?? body?.metadata?.__workerQueue;
|
|
524
|
+
if (qc?.id) queueId = qc.id;
|
|
525
|
+
if (qc?.queueJobId) queueJobId = qc.queueJobId;
|
|
526
|
+
}
|
|
527
|
+
console.log(WORKER_LOG_PREFIX, {
|
|
528
|
+
workerId: workerAgent.id,
|
|
529
|
+
inQueue: true,
|
|
530
|
+
...(queueId && { queueId }),
|
|
531
|
+
...(queueJobId && { queueJobId }),
|
|
532
|
+
records,
|
|
533
|
+
requestId: (context as any)?.awsRequestId,
|
|
534
|
+
});
|
|
535
|
+
} catch {
|
|
536
|
+
// Best-effort logging only
|
|
537
|
+
}
|
|
538
|
+
return baseHandler(event, context);
|
|
539
|
+
};
|
|
540
|
+
|
|
541
|
+
export const exportedWorkerConfig = workerModule.workerConfig || workerAgent?.workerConfig;
|
|
542
|
+
` : `
|
|
283
543
|
import { createLambdaHandler } from '@microfox/ai-worker/handler';
|
|
284
544
|
import * as workerModule from '${relativeImportPath}';
|
|
285
545
|
|
|
546
|
+
const WORKER_LOG_PREFIX = '[WorkerEntrypoint]';
|
|
547
|
+
|
|
286
548
|
const workerAgent = ${workerRef};
|
|
287
549
|
if (!workerAgent || typeof workerAgent.handler !== 'function') {
|
|
288
550
|
throw new Error('Worker module must export a createWorker result (default or named) with .handler');
|
|
289
551
|
}
|
|
290
552
|
|
|
291
|
-
|
|
553
|
+
const baseHandler = createLambdaHandler(workerAgent.handler, workerAgent.outputSchema);
|
|
554
|
+
|
|
555
|
+
export const handler = async (event: any, context: any) => {
|
|
556
|
+
const records = Array.isArray((event as any)?.Records) ? (event as any).Records.length : 0;
|
|
557
|
+
try {
|
|
558
|
+
console.log(WORKER_LOG_PREFIX, {
|
|
559
|
+
workerId: workerAgent.id,
|
|
560
|
+
inQueue: false,
|
|
561
|
+
records,
|
|
562
|
+
requestId: (context as any)?.awsRequestId,
|
|
563
|
+
});
|
|
564
|
+
} catch {
|
|
565
|
+
// Best-effort logging only
|
|
566
|
+
}
|
|
567
|
+
return baseHandler(event, context);
|
|
568
|
+
};
|
|
569
|
+
|
|
292
570
|
export const exportedWorkerConfig = workerModule.workerConfig || workerAgent?.workerConfig;
|
|
293
571
|
`;
|
|
572
|
+
const tempEntryContent = handlerCreation;
|
|
294
573
|
fs.writeFileSync(tempEntryFile, tempEntryContent);
|
|
295
574
|
try {
|
|
296
575
|
const fixLazyCachePlugin = {
|
|
@@ -361,7 +640,8 @@ export const exportedWorkerConfig = workerModule.workerConfig || workerAgent?.wo
|
|
|
361
640
|
console.log(chalk.green(`\u2713 Generated ${workers.length} bundled handlers`));
|
|
362
641
|
}
|
|
363
642
|
function generateDocsHandler(outputDir, serviceName, stage, region) {
|
|
364
|
-
const
|
|
643
|
+
const apiDir = path.join(outputDir, "handlers", "api");
|
|
644
|
+
const handlerFile = path.join(apiDir, "docs.js");
|
|
365
645
|
const tempEntryFile = handlerFile.replace(".js", ".temp.ts");
|
|
366
646
|
const handlerDir = path.dirname(handlerFile);
|
|
367
647
|
if (!fs.existsSync(handlerDir)) {
|
|
@@ -577,7 +857,8 @@ export const handler = async (
|
|
|
577
857
|
console.log(chalk.green(`\u2713 Generated docs.json handler`));
|
|
578
858
|
}
|
|
579
859
|
function generateTriggerHandler(outputDir, serviceName) {
|
|
580
|
-
const
|
|
860
|
+
const apiDir = path.join(outputDir, "handlers", "api");
|
|
861
|
+
const handlerFile = path.join(apiDir, "workers-trigger.js");
|
|
581
862
|
const tempEntryFile = handlerFile.replace(".js", ".temp.ts");
|
|
582
863
|
const handlerDir = path.dirname(handlerFile);
|
|
583
864
|
if (!fs.existsSync(handlerDir)) {
|
|
@@ -702,12 +983,142 @@ export const handler = async (event: APIGatewayProxyEvent): Promise<APIGatewayPr
|
|
|
702
983
|
fs.unlinkSync(tempEntryFile);
|
|
703
984
|
console.log(chalk.green(`\u2713 Generated /workers/trigger handler`));
|
|
704
985
|
}
|
|
705
|
-
function
|
|
706
|
-
const
|
|
986
|
+
function generateQueueHandler(outputDir, queue, serviceName) {
|
|
987
|
+
const queueFileId = queue.id.replace(/[^a-zA-Z0-9-]/g, "-").replace(/-+/g, "-");
|
|
988
|
+
const queuesDir = path.join(outputDir, "handlers", "queues");
|
|
989
|
+
const handlerFile = path.join(queuesDir, `${queueFileId}.js`);
|
|
990
|
+
const tempEntryFile = handlerFile.replace(".js", ".temp.ts");
|
|
991
|
+
const handlerDir = path.dirname(handlerFile);
|
|
992
|
+
if (!fs.existsSync(handlerDir)) {
|
|
993
|
+
fs.mkdirSync(handlerDir, { recursive: true });
|
|
994
|
+
}
|
|
995
|
+
const firstWorkerId = queue.steps[0]?.workerId;
|
|
996
|
+
if (!firstWorkerId) return;
|
|
997
|
+
const handlerContent = `/**
|
|
998
|
+
* Auto-generated queue handler for queue "${queue.id}"
|
|
999
|
+
* DO NOT EDIT - This file is generated by @microfox/ai-worker-cli
|
|
1000
|
+
* Invoked by schedule (if configured) or HTTP POST /queues/${queue.id}/start (dispatch proxy).
|
|
1001
|
+
*/
|
|
1002
|
+
|
|
1003
|
+
import { SQSClient, GetQueueUrlCommand, SendMessageCommand } from '@aws-sdk/client-sqs';
|
|
1004
|
+
import { upsertInitialQueueJob } from '@microfox/ai-worker/queueJobStore';
|
|
1005
|
+
|
|
1006
|
+
const QUEUE_ID = ${JSON.stringify(queue.id)};
|
|
1007
|
+
const FIRST_WORKER_ID = ${JSON.stringify(firstWorkerId)};
|
|
1008
|
+
const SERVICE_NAME = ${JSON.stringify(serviceName)};
|
|
1009
|
+
|
|
1010
|
+
function isHttpEvent(event: any): event is { body?: string; requestContext?: any } {
|
|
1011
|
+
return event && typeof event.requestContext === 'object' && (event.body !== undefined || event.httpMethod === 'POST');
|
|
1012
|
+
}
|
|
1013
|
+
|
|
1014
|
+
export const handler = async (event: any) => {
|
|
1015
|
+
const stage = process.env.ENVIRONMENT || process.env.STAGE || 'prod';
|
|
1016
|
+
const region = process.env.AWS_REGION || 'us-east-1';
|
|
1017
|
+
const queueName = \`\${SERVICE_NAME}-\${FIRST_WORKER_ID}-\${stage}\`;
|
|
1018
|
+
|
|
1019
|
+
let jobId: string;
|
|
1020
|
+
let initialInput: Record<string, any>;
|
|
1021
|
+
let context: Record<string, any> = {};
|
|
1022
|
+
let metadata: Record<string, any> = {};
|
|
1023
|
+
let webhookUrl: string | undefined;
|
|
1024
|
+
|
|
1025
|
+
if (isHttpEvent(event)) {
|
|
1026
|
+
const apiKey = process.env.WORKERS_TRIGGER_API_KEY;
|
|
1027
|
+
if (apiKey) {
|
|
1028
|
+
const provided = (event.headers && (event.headers['x-workers-trigger-key'] || event.headers['X-Workers-Trigger-Key'])) || '';
|
|
1029
|
+
if (provided !== apiKey) {
|
|
1030
|
+
return { statusCode: 401, headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ error: 'Unauthorized' }) };
|
|
1031
|
+
}
|
|
1032
|
+
}
|
|
1033
|
+
let body: { input?: any; initialInput?: any; jobId?: string; metadata?: any; context?: any; webhookUrl?: string } = {};
|
|
1034
|
+
if (event.body) {
|
|
1035
|
+
try {
|
|
1036
|
+
body = typeof event.body === 'string' ? JSON.parse(event.body) : event.body;
|
|
1037
|
+
} catch (_) {}
|
|
1038
|
+
}
|
|
1039
|
+
jobId = (body.jobId && String(body.jobId).trim()) || 'job-' + Date.now() + '-' + Math.random().toString(36).slice(2, 11);
|
|
1040
|
+
const rawInput = body.input != null ? body.input : body.initialInput;
|
|
1041
|
+
initialInput = rawInput != null && typeof rawInput === 'object' ? rawInput : {};
|
|
1042
|
+
context = body.context && typeof body.context === 'object' ? body.context : {};
|
|
1043
|
+
metadata = body.metadata && typeof body.metadata === 'object' ? body.metadata : {};
|
|
1044
|
+
webhookUrl = typeof body.webhookUrl === 'string' ? body.webhookUrl : undefined;
|
|
1045
|
+
|
|
1046
|
+
const response = { statusCode: 200, headers: { 'Content-Type': 'application/json' }, body: '' };
|
|
1047
|
+
try {
|
|
1048
|
+
await upsertInitialQueueJob({ queueJobId: jobId, queueId: QUEUE_ID, firstWorkerId: FIRST_WORKER_ID, firstWorkerJobId: jobId, metadata });
|
|
1049
|
+
await sendFirstMessage(region, queueName, jobId, initialInput, context, metadata, webhookUrl, 'http');
|
|
1050
|
+
response.body = JSON.stringify({ queueId: QUEUE_ID, jobId, status: 'queued' });
|
|
1051
|
+
} catch (err: any) {
|
|
1052
|
+
response.statusCode = 500;
|
|
1053
|
+
response.body = JSON.stringify({ error: err?.message || String(err) });
|
|
1054
|
+
}
|
|
1055
|
+
return response;
|
|
1056
|
+
}
|
|
1057
|
+
|
|
1058
|
+
// Scheduled invocation
|
|
1059
|
+
jobId = 'job-' + Date.now() + '-' + Math.random().toString(36).slice(2, 11);
|
|
1060
|
+
initialInput = {};
|
|
1061
|
+
try {
|
|
1062
|
+
await upsertInitialQueueJob({ queueJobId: jobId, queueId: QUEUE_ID, firstWorkerId: FIRST_WORKER_ID, firstWorkerJobId: jobId, metadata: {} });
|
|
1063
|
+
} catch (_) {}
|
|
1064
|
+
await sendFirstMessage(region, queueName, jobId, initialInput, context, metadata, webhookUrl, 'schedule');
|
|
1065
|
+
};
|
|
1066
|
+
|
|
1067
|
+
async function sendFirstMessage(
|
|
1068
|
+
region: string,
|
|
1069
|
+
queueName: string,
|
|
1070
|
+
jobId: string,
|
|
1071
|
+
initialInput: Record<string, any>,
|
|
1072
|
+
context: Record<string, any>,
|
|
1073
|
+
metadata: Record<string, any>,
|
|
1074
|
+
webhookUrl?: string,
|
|
1075
|
+
trigger?: 'schedule' | 'http'
|
|
1076
|
+
) {
|
|
1077
|
+
const sqs = new SQSClient({ region });
|
|
1078
|
+
const { QueueUrl } = await sqs.send(new GetQueueUrlCommand({ QueueName: queueName }));
|
|
1079
|
+
if (!QueueUrl) {
|
|
1080
|
+
throw new Error('Queue URL not found: ' + queueName);
|
|
1081
|
+
}
|
|
1082
|
+
|
|
1083
|
+
const queueContext = { id: QUEUE_ID, stepIndex: 0, initialInput, queueJobId: jobId };
|
|
1084
|
+
const messageBody = {
|
|
1085
|
+
workerId: FIRST_WORKER_ID,
|
|
1086
|
+
jobId,
|
|
1087
|
+
input: { ...initialInput, __workerQueue: queueContext },
|
|
1088
|
+
context,
|
|
1089
|
+
metadata: { ...metadata, __workerQueue: queueContext },
|
|
1090
|
+
...(webhookUrl ? { webhookUrl } : {}),
|
|
1091
|
+
timestamp: new Date().toISOString(),
|
|
1092
|
+
};
|
|
1093
|
+
|
|
1094
|
+
await sqs.send(new SendMessageCommand({
|
|
1095
|
+
QueueUrl,
|
|
1096
|
+
MessageBody: JSON.stringify(messageBody),
|
|
1097
|
+
}));
|
|
1098
|
+
|
|
1099
|
+
console.log('[queue] Dispatched first worker', { queueId: QUEUE_ID, jobId, workerId: FIRST_WORKER_ID, trigger: trigger ?? 'unknown' });
|
|
1100
|
+
}
|
|
1101
|
+
`;
|
|
1102
|
+
fs.writeFileSync(tempEntryFile, handlerContent);
|
|
1103
|
+
esbuild.buildSync({
|
|
1104
|
+
entryPoints: [tempEntryFile],
|
|
1105
|
+
bundle: true,
|
|
1106
|
+
platform: "node",
|
|
1107
|
+
target: "node20",
|
|
1108
|
+
outfile: handlerFile,
|
|
1109
|
+
external: ["aws-sdk", "canvas", "@microfox/puppeteer-sls", "@sparticuz/chromium"],
|
|
1110
|
+
packages: "bundle",
|
|
1111
|
+
logLevel: "error"
|
|
1112
|
+
});
|
|
1113
|
+
fs.unlinkSync(tempEntryFile);
|
|
1114
|
+
console.log(chalk.green(`\u2713 Generated queue handler for ${queue.id}`));
|
|
1115
|
+
}
|
|
1116
|
+
function generateWorkersConfigHandler(outputDir, workers, serviceName, queues = []) {
|
|
1117
|
+
const apiDir = path.join(outputDir, "handlers", "api");
|
|
1118
|
+
const handlerFile = path.join(apiDir, "workers-config.js");
|
|
707
1119
|
const tempEntryFile = handlerFile.replace(".js", ".temp.ts");
|
|
708
1120
|
const handlerDir = path.dirname(handlerFile);
|
|
709
|
-
if (fs.existsSync(handlerDir)
|
|
710
|
-
} else if (!fs.existsSync(handlerDir)) {
|
|
1121
|
+
if (!fs.existsSync(handlerDir)) {
|
|
711
1122
|
fs.mkdirSync(handlerDir, { recursive: true });
|
|
712
1123
|
}
|
|
713
1124
|
const handlerContent = `/**
|
|
@@ -718,8 +1129,9 @@ function generateWorkersConfigHandler(outputDir, workers, serviceName) {
|
|
|
718
1129
|
import { APIGatewayProxyEvent, APIGatewayProxyResult } from 'aws-lambda';
|
|
719
1130
|
import { SQSClient, GetQueueUrlCommand } from '@aws-sdk/client-sqs';
|
|
720
1131
|
|
|
721
|
-
// Worker IDs embedded at build time
|
|
1132
|
+
// Worker IDs and queue definitions embedded at build time.
|
|
722
1133
|
const WORKER_IDS: string[] = ${JSON.stringify(workers.map((w) => w.id), null, 2)};
|
|
1134
|
+
const QUEUES = ${JSON.stringify(queues.map((q) => ({ id: q.id, steps: q.steps, schedule: q.schedule })), null, 2)};
|
|
723
1135
|
const SERVICE_NAME = ${JSON.stringify(serviceName)};
|
|
724
1136
|
|
|
725
1137
|
export const handler = async (
|
|
@@ -790,6 +1202,7 @@ export const handler = async (
|
|
|
790
1202
|
stage,
|
|
791
1203
|
region,
|
|
792
1204
|
workers,
|
|
1205
|
+
queues: QUEUES,
|
|
793
1206
|
...(debug ? { attemptedQueueNames, errors } : {}),
|
|
794
1207
|
}),
|
|
795
1208
|
};
|
|
@@ -899,7 +1312,7 @@ function processScheduleEvents(scheduleConfig) {
|
|
|
899
1312
|
}
|
|
900
1313
|
return events;
|
|
901
1314
|
}
|
|
902
|
-
function generateServerlessConfig(workers, stage, region, envVars, serviceName) {
|
|
1315
|
+
function generateServerlessConfig(workers, stage, region, envVars, serviceName, calleeIds = /* @__PURE__ */ new Map(), queues = []) {
|
|
903
1316
|
const resources = {
|
|
904
1317
|
Resources: {},
|
|
905
1318
|
Outputs: {}
|
|
@@ -965,7 +1378,7 @@ function generateServerlessConfig(workers, stage, region, envVars, serviceName)
|
|
|
965
1378
|
}
|
|
966
1379
|
const functions = {};
|
|
967
1380
|
for (const worker of workers) {
|
|
968
|
-
const functionName =
|
|
1381
|
+
const functionName = toPrefixedCamel("worker", worker.id);
|
|
969
1382
|
const events = [
|
|
970
1383
|
{
|
|
971
1384
|
sqs: {
|
|
@@ -988,9 +1401,24 @@ function generateServerlessConfig(workers, stage, region, envVars, serviceName)
|
|
|
988
1401
|
if (worker.workerConfig?.layers?.length) {
|
|
989
1402
|
functions[functionName].layers = worker.workerConfig.layers;
|
|
990
1403
|
}
|
|
1404
|
+
const callees = calleeIds.get(worker.id);
|
|
1405
|
+
if (callees && callees.size > 0) {
|
|
1406
|
+
const env = {};
|
|
1407
|
+
for (const calleeId of callees) {
|
|
1408
|
+
const calleeWorker = workers.find((w) => w.id === calleeId);
|
|
1409
|
+
if (calleeWorker) {
|
|
1410
|
+
const queueLogicalId = `WorkerQueue${calleeWorker.id.replace(/[^a-zA-Z0-9]/g, "")}${stage}`;
|
|
1411
|
+
const envKey = `WORKER_QUEUE_URL_${sanitizeWorkerIdForEnv(calleeId)}`;
|
|
1412
|
+
env[envKey] = { Ref: queueLogicalId };
|
|
1413
|
+
}
|
|
1414
|
+
}
|
|
1415
|
+
if (Object.keys(env).length > 0) {
|
|
1416
|
+
functions[functionName].environment = env;
|
|
1417
|
+
}
|
|
1418
|
+
}
|
|
991
1419
|
}
|
|
992
1420
|
functions["getDocs"] = {
|
|
993
|
-
handler: "handlers/docs.handler",
|
|
1421
|
+
handler: "handlers/api/docs.handler",
|
|
994
1422
|
events: [
|
|
995
1423
|
{
|
|
996
1424
|
http: {
|
|
@@ -1002,7 +1430,7 @@ function generateServerlessConfig(workers, stage, region, envVars, serviceName)
|
|
|
1002
1430
|
]
|
|
1003
1431
|
};
|
|
1004
1432
|
functions["triggerWorker"] = {
|
|
1005
|
-
handler: "handlers/workers-trigger.handler",
|
|
1433
|
+
handler: "handlers/api/workers-trigger.handler",
|
|
1006
1434
|
events: [
|
|
1007
1435
|
{
|
|
1008
1436
|
http: {
|
|
@@ -1014,7 +1442,7 @@ function generateServerlessConfig(workers, stage, region, envVars, serviceName)
|
|
|
1014
1442
|
]
|
|
1015
1443
|
};
|
|
1016
1444
|
functions["workersConfig"] = {
|
|
1017
|
-
handler: "handlers/workers-config.handler",
|
|
1445
|
+
handler: "handlers/api/workers-config.handler",
|
|
1018
1446
|
events: [
|
|
1019
1447
|
{
|
|
1020
1448
|
http: {
|
|
@@ -1025,8 +1453,30 @@ function generateServerlessConfig(workers, stage, region, envVars, serviceName)
|
|
|
1025
1453
|
}
|
|
1026
1454
|
]
|
|
1027
1455
|
};
|
|
1456
|
+
for (const queue of queues) {
|
|
1457
|
+
const queueFileId = queue.id.replace(/[^a-zA-Z0-9-]/g, "-").replace(/-+/g, "-");
|
|
1458
|
+
const fnName = toPrefixedCamel("queue", queue.id);
|
|
1459
|
+
const events = [
|
|
1460
|
+
{
|
|
1461
|
+
http: {
|
|
1462
|
+
path: `queues/${queueFileId}/start`,
|
|
1463
|
+
method: "POST",
|
|
1464
|
+
cors: true
|
|
1465
|
+
}
|
|
1466
|
+
}
|
|
1467
|
+
];
|
|
1468
|
+
if (queue.schedule) {
|
|
1469
|
+
events.push(...processScheduleEvents(queue.schedule));
|
|
1470
|
+
}
|
|
1471
|
+
functions[fnName] = {
|
|
1472
|
+
handler: `handlers/queues/${queueFileId}.handler`,
|
|
1473
|
+
timeout: 60,
|
|
1474
|
+
memorySize: 128,
|
|
1475
|
+
events
|
|
1476
|
+
};
|
|
1477
|
+
}
|
|
1028
1478
|
const safeEnvVars = {};
|
|
1029
|
-
const allowedPrefixes = ["OPENAI_", "ANTHROPIC_", "DATABASE_", "MONGODB_", "REDIS_", "WORKERS_", "REMOTION_"];
|
|
1479
|
+
const allowedPrefixes = ["OPENAI_", "ANTHROPIC_", "DATABASE_", "MONGODB_", "REDIS_", "UPSTASH_", "WORKER_", "WORKERS_", "WORKFLOW_", "REMOTION_", "QUEUE_JOB_", "DEBUG_WORKER_QUEUES"];
|
|
1030
1480
|
for (const [key, value] of Object.entries(envVars)) {
|
|
1031
1481
|
if (allowedPrefixes.some((prefix) => key.startsWith(prefix))) {
|
|
1032
1482
|
safeEnvVars[key] = value;
|
|
@@ -1179,8 +1629,10 @@ async function build2(args) {
|
|
|
1179
1629
|
workers.map((w) => w.filePath),
|
|
1180
1630
|
process.cwd()
|
|
1181
1631
|
);
|
|
1182
|
-
const
|
|
1183
|
-
const
|
|
1632
|
+
const jobStoreType = getJobStoreType();
|
|
1633
|
+
const filteredDeps = filterDepsForJobStore(runtimeDeps, jobStoreType);
|
|
1634
|
+
const dependencies = buildDependenciesMap(process.cwd(), filteredDeps);
|
|
1635
|
+
const packageJson2 = {
|
|
1184
1636
|
name: "ai-router-workers",
|
|
1185
1637
|
version: "1.0.0",
|
|
1186
1638
|
description: "Auto-generated serverless workers",
|
|
@@ -1197,7 +1649,7 @@ async function build2(args) {
|
|
|
1197
1649
|
};
|
|
1198
1650
|
fs.writeFileSync(
|
|
1199
1651
|
path.join(serverlessDir, "package.json"),
|
|
1200
|
-
JSON.stringify(
|
|
1652
|
+
JSON.stringify(packageJson2, null, 2)
|
|
1201
1653
|
);
|
|
1202
1654
|
const envVars = loadEnvVars();
|
|
1203
1655
|
const workerEntryFiles = workers.map((w) => w.filePath);
|
|
@@ -1238,8 +1690,13 @@ async function build2(args) {
|
|
|
1238
1690
|
console.warn(chalk.yellow("\u26A0\uFE0F Failed to parse microfox.json, using default service name"));
|
|
1239
1691
|
}
|
|
1240
1692
|
}
|
|
1693
|
+
const queues = await scanQueues(aiPath);
|
|
1694
|
+
if (queues.length > 0) {
|
|
1695
|
+
console.log(chalk.blue(`\u2139\uFE0F Found ${queues.length} queue(s): ${queues.map((q) => q.id).join(", ")}`));
|
|
1696
|
+
generateQueueRegistry(queues, serverlessDir, process.cwd());
|
|
1697
|
+
}
|
|
1241
1698
|
ora("Generating handlers...").start().succeed("Generated handlers");
|
|
1242
|
-
await generateHandlers(workers, serverlessDir);
|
|
1699
|
+
await generateHandlers(workers, serverlessDir, queues);
|
|
1243
1700
|
const extractSpinner = ora("Extracting worker configs from bundled handlers...").start();
|
|
1244
1701
|
for (const worker of workers) {
|
|
1245
1702
|
try {
|
|
@@ -1254,7 +1711,8 @@ async function build2(args) {
|
|
|
1254
1711
|
console.log(chalk.gray(` \u2713 ${worker.id}: found ${module.exportedWorkerConfig.layers.length} layer(s)`));
|
|
1255
1712
|
}
|
|
1256
1713
|
} else {
|
|
1257
|
-
|
|
1714
|
+
worker.workerConfig = worker.workerConfig ?? { timeout: 300, memorySize: 512 };
|
|
1715
|
+
console.log(chalk.gray(` \u2139 ${worker.id}: using default config (exportedWorkerConfig not in bundle)`));
|
|
1258
1716
|
}
|
|
1259
1717
|
} catch (importError) {
|
|
1260
1718
|
console.log(chalk.gray(` \u2139 ${worker.id}: extracting config from source (import failed: ${importError?.message?.slice(0, 50) || "runtime error"}...)`));
|
|
@@ -1275,28 +1733,40 @@ async function build2(args) {
|
|
|
1275
1733
|
}
|
|
1276
1734
|
}
|
|
1277
1735
|
} catch (fallbackError) {
|
|
1278
|
-
|
|
1736
|
+
worker.workerConfig = worker.workerConfig ?? { timeout: 300, memorySize: 512 };
|
|
1737
|
+
console.log(chalk.gray(` \u2139 ${worker.id}: using default config (fallback extraction failed)`));
|
|
1279
1738
|
}
|
|
1280
1739
|
}
|
|
1281
1740
|
} else {
|
|
1282
|
-
|
|
1741
|
+
worker.workerConfig = worker.workerConfig ?? { timeout: 300, memorySize: 512 };
|
|
1742
|
+
console.warn(chalk.yellow(` \u26A0 ${worker.id}: handler file not found: ${handlerFile}, using defaults`));
|
|
1743
|
+
}
|
|
1744
|
+
if (!worker.workerConfig) {
|
|
1745
|
+
worker.workerConfig = { timeout: 300, memorySize: 512 };
|
|
1746
|
+
console.log(chalk.gray(` \u2139 ${worker.id}: using default config`));
|
|
1283
1747
|
}
|
|
1284
1748
|
} catch (error) {
|
|
1285
|
-
|
|
1749
|
+
worker.workerConfig = worker.workerConfig ?? { timeout: 300, memorySize: 512 };
|
|
1750
|
+
console.warn(chalk.yellow(` \u26A0 ${worker.id}: failed to extract config: ${error?.message || error}, using defaults`));
|
|
1286
1751
|
}
|
|
1287
1752
|
}
|
|
1288
1753
|
extractSpinner.succeed("Extracted configs");
|
|
1289
|
-
generateWorkersConfigHandler(serverlessDir, workers, serviceName);
|
|
1754
|
+
generateWorkersConfigHandler(serverlessDir, workers, serviceName, queues);
|
|
1290
1755
|
generateDocsHandler(serverlessDir, serviceName, stage, region);
|
|
1291
1756
|
generateTriggerHandler(serverlessDir, serviceName);
|
|
1292
|
-
|
|
1757
|
+
for (const queue of queues) {
|
|
1758
|
+
generateQueueHandler(serverlessDir, queue, serviceName);
|
|
1759
|
+
}
|
|
1760
|
+
let calleeIds = await collectCalleeWorkerIds(workers, process.cwd());
|
|
1761
|
+
calleeIds = mergeQueueCallees(calleeIds, queues, workers);
|
|
1762
|
+
const config = generateServerlessConfig(workers, stage, region, envVars, serviceName, calleeIds, queues);
|
|
1293
1763
|
const envStage = fs.existsSync(microfoxJsonPath) ? "prod" : stage;
|
|
1294
1764
|
const safeEnvVars = {
|
|
1295
1765
|
ENVIRONMENT: envStage,
|
|
1296
1766
|
STAGE: envStage,
|
|
1297
1767
|
NODE_ENV: envStage
|
|
1298
1768
|
};
|
|
1299
|
-
const allowedPrefixes = ["OPENAI_", "ANTHROPIC_", "DATABASE_", "MONGODB_", "REDIS_", "WORKERS_", "REMOTION_"];
|
|
1769
|
+
const allowedPrefixes = ["OPENAI_", "ANTHROPIC_", "DATABASE_", "MONGODB_", "REDIS_", "UPSTASH_", "WORKER_", "WORKERS_", "WORKFLOW_", "REMOTION_", "QUEUE_JOB_", "DEBUG_WORKER_QUEUES"];
|
|
1300
1770
|
for (const [key, value] of Object.entries(envVars)) {
|
|
1301
1771
|
if (key.startsWith("AWS_")) continue;
|
|
1302
1772
|
if (allowedPrefixes.some((prefix) => key.startsWith(prefix)) || referencedEnvKeys.has(key)) {
|
|
@@ -1370,10 +1840,2794 @@ var pushCommand = new Command().name("push").description("Build and deploy backg
|
|
|
1370
1840
|
await deploy(options);
|
|
1371
1841
|
});
|
|
1372
1842
|
|
|
1843
|
+
// src/commands/new.ts
|
|
1844
|
+
import { Command as Command2 } from "commander";
|
|
1845
|
+
import * as fs2 from "fs";
|
|
1846
|
+
import * as path2 from "path";
|
|
1847
|
+
import chalk2 from "chalk";
|
|
1848
|
+
import ora2 from "ora";
|
|
1849
|
+
import prompts from "prompts";
|
|
1850
|
+
var WORKER_DIR_DEFAULT = "app/ai/workers";
|
|
1851
|
+
var QUEUES_DIR_DEFAULT = "app/ai/queues";
|
|
1852
|
+
function scaffoldWorker(projectRoot, id, options) {
|
|
1853
|
+
const dir = path2.resolve(projectRoot, options.dir || WORKER_DIR_DEFAULT);
|
|
1854
|
+
if (!fs2.existsSync(dir)) {
|
|
1855
|
+
fs2.mkdirSync(dir, { recursive: true });
|
|
1856
|
+
}
|
|
1857
|
+
const fileSafeId = id.trim().replace(/[^a-zA-Z0-9_-]+/g, "-");
|
|
1858
|
+
const filePath = path2.join(dir, `${fileSafeId}.worker.ts`);
|
|
1859
|
+
const timeout = Number(options.timeout || "300") || 300;
|
|
1860
|
+
const memorySize = Number(options.memory || "512") || 512;
|
|
1861
|
+
const scheduleLine = options.schedule ? ` schedule: '${options.schedule}',
|
|
1862
|
+
` : "";
|
|
1863
|
+
const contents = `import { createWorker, type WorkerConfig } from '@microfox/ai-worker';
|
|
1864
|
+
import { z } from 'zod';
|
|
1865
|
+
import type { WorkerHandlerParams } from '@microfox/ai-worker/handler';
|
|
1866
|
+
|
|
1867
|
+
const InputSchema = z.object({
|
|
1868
|
+
// TODO: define input fields
|
|
1869
|
+
});
|
|
1870
|
+
|
|
1871
|
+
const OutputSchema = z.object({
|
|
1872
|
+
// TODO: define output fields
|
|
1873
|
+
});
|
|
1874
|
+
|
|
1875
|
+
type Input = z.infer<typeof InputSchema>;
|
|
1876
|
+
type Output = z.infer<typeof OutputSchema>;
|
|
1877
|
+
|
|
1878
|
+
export const workerConfig: WorkerConfig = {
|
|
1879
|
+
timeout: ${timeout},
|
|
1880
|
+
memorySize: ${memorySize},
|
|
1881
|
+
${scheduleLine}};
|
|
1882
|
+
|
|
1883
|
+
export default createWorker<typeof InputSchema, Output>({
|
|
1884
|
+
id: '${id}',
|
|
1885
|
+
inputSchema: InputSchema,
|
|
1886
|
+
outputSchema: OutputSchema,
|
|
1887
|
+
async handler({ input, ctx }: WorkerHandlerParams<Input, Output>) {
|
|
1888
|
+
const { jobId, workerId, jobStore, dispatchWorker, logger } = ctx;
|
|
1889
|
+
logger.info('start', { jobId, workerId });
|
|
1890
|
+
|
|
1891
|
+
await jobStore?.update({ status: 'running' });
|
|
1892
|
+
|
|
1893
|
+
// TODO: implement your business logic here
|
|
1894
|
+
const result: Output = {} as any;
|
|
1895
|
+
|
|
1896
|
+
await jobStore?.update({ status: 'completed', output: result });
|
|
1897
|
+
return result;
|
|
1898
|
+
},
|
|
1899
|
+
});
|
|
1900
|
+
`;
|
|
1901
|
+
fs2.writeFileSync(filePath, contents, "utf-8");
|
|
1902
|
+
return path2.relative(projectRoot, filePath);
|
|
1903
|
+
}
|
|
1904
|
+
function scaffoldQueue(projectRoot, id, options) {
|
|
1905
|
+
const dir = path2.resolve(projectRoot, options.dir || QUEUES_DIR_DEFAULT);
|
|
1906
|
+
if (!fs2.existsSync(dir)) {
|
|
1907
|
+
fs2.mkdirSync(dir, { recursive: true });
|
|
1908
|
+
}
|
|
1909
|
+
const fileSafeId = id.trim().replace(/[^a-zA-Z0-9_-]+/g, "-");
|
|
1910
|
+
const filePath = path2.join(dir, `${fileSafeId}.queue.ts`);
|
|
1911
|
+
const contents = `import { defineWorkerQueue } from '@microfox/ai-worker/queue';
|
|
1912
|
+
|
|
1913
|
+
/**
|
|
1914
|
+
* Worker queue: ${id}
|
|
1915
|
+
* Steps run in sequence. Each step's output can be mapped to the next step's input.
|
|
1916
|
+
*/
|
|
1917
|
+
export default defineWorkerQueue({
|
|
1918
|
+
id: '${id}',
|
|
1919
|
+
steps: [
|
|
1920
|
+
{ workerId: 'first-worker' },
|
|
1921
|
+
// Add more steps: { workerId: 'second-worker' }, { workerId: 'third-worker', delaySeconds: 10 }
|
|
1922
|
+
],
|
|
1923
|
+
// Optional: run on a schedule (CLI will generate a queue-starter Lambda)
|
|
1924
|
+
// schedule: 'cron(0 3 * * ? *)',
|
|
1925
|
+
});
|
|
1926
|
+
`;
|
|
1927
|
+
fs2.writeFileSync(filePath, contents, "utf-8");
|
|
1928
|
+
return path2.relative(projectRoot, filePath);
|
|
1929
|
+
}
|
|
1930
|
+
var newCommand = new Command2().name("new").description("Scaffold a new worker or queue (interactive: choose type, then enter id)").argument("[id]", "Worker or queue ID (optional; will prompt if omitted)").option("--type <worker|queue>", "Scaffold type (skips interactive prompt)").option("--dir <path>", "Directory for the output file (workers: app/ai/workers, queues: app/ai/queues)", "").option("--schedule <expression>", 'Optional schedule (workers only; e.g. "cron(0 3 * * ? *)")').option("--timeout <seconds>", "Lambda timeout in seconds (workers only)", "300").option("--memory <mb>", "Lambda memory in MB (workers only)", "512").action(
|
|
1931
|
+
async (idArg, options) => {
|
|
1932
|
+
const projectRoot = process.cwd();
|
|
1933
|
+
let type;
|
|
1934
|
+
let id;
|
|
1935
|
+
if (options.type === "worker" || options.type === "queue") {
|
|
1936
|
+
type = options.type;
|
|
1937
|
+
id = (idArg ?? "").trim();
|
|
1938
|
+
if (!id) {
|
|
1939
|
+
const res = await prompts({
|
|
1940
|
+
type: "text",
|
|
1941
|
+
name: "id",
|
|
1942
|
+
message: `Enter ${type} ID:`,
|
|
1943
|
+
validate: (v) => v.trim() ? true : "ID is required"
|
|
1944
|
+
});
|
|
1945
|
+
if (typeof res.id !== "string") {
|
|
1946
|
+
process.exitCode = 1;
|
|
1947
|
+
return;
|
|
1948
|
+
}
|
|
1949
|
+
id = res.id.trim();
|
|
1950
|
+
}
|
|
1951
|
+
} else {
|
|
1952
|
+
const typeRes = await prompts({
|
|
1953
|
+
type: "select",
|
|
1954
|
+
name: "type",
|
|
1955
|
+
message: "What do you want to create?",
|
|
1956
|
+
choices: [
|
|
1957
|
+
{ title: "Worker", value: "worker", description: "A single background worker (.worker.ts)" },
|
|
1958
|
+
{ title: "Queue", value: "queue", description: "A multi-step worker queue (.queue.ts)" }
|
|
1959
|
+
]
|
|
1960
|
+
});
|
|
1961
|
+
if (typeRes.type === void 0) {
|
|
1962
|
+
process.exitCode = 1;
|
|
1963
|
+
return;
|
|
1964
|
+
}
|
|
1965
|
+
type = typeRes.type;
|
|
1966
|
+
id = (idArg ?? "").trim();
|
|
1967
|
+
if (!id) {
|
|
1968
|
+
const idRes = await prompts({
|
|
1969
|
+
type: "text",
|
|
1970
|
+
name: "id",
|
|
1971
|
+
message: `Enter ${type} ID:`,
|
|
1972
|
+
validate: (v) => v.trim() ? true : "ID is required"
|
|
1973
|
+
});
|
|
1974
|
+
if (typeof idRes.id !== "string") {
|
|
1975
|
+
process.exitCode = 1;
|
|
1976
|
+
return;
|
|
1977
|
+
}
|
|
1978
|
+
id = idRes.id.trim();
|
|
1979
|
+
}
|
|
1980
|
+
}
|
|
1981
|
+
const spinner = ora2(`Scaffolding ${type}...`).start();
|
|
1982
|
+
try {
|
|
1983
|
+
const dirOpt = options.dir ? { dir: options.dir } : {};
|
|
1984
|
+
if (type === "worker") {
|
|
1985
|
+
const relativePath = scaffoldWorker(projectRoot, id, {
|
|
1986
|
+
...dirOpt,
|
|
1987
|
+
schedule: options.schedule,
|
|
1988
|
+
timeout: options.timeout,
|
|
1989
|
+
memory: options.memory
|
|
1990
|
+
});
|
|
1991
|
+
spinner.succeed(
|
|
1992
|
+
`Created worker: ${chalk2.cyan(relativePath)}
|
|
1993
|
+
Next: run ${chalk2.yellow("npx ai-worker push")} to build & deploy.`
|
|
1994
|
+
);
|
|
1995
|
+
} else {
|
|
1996
|
+
const relativePath = scaffoldQueue(projectRoot, id, dirOpt);
|
|
1997
|
+
spinner.succeed(
|
|
1998
|
+
`Created queue: ${chalk2.cyan(relativePath)}
|
|
1999
|
+
Edit steps (workerId) to match your workers, then run ${chalk2.yellow("npx ai-worker push")} to build & deploy.`
|
|
2000
|
+
);
|
|
2001
|
+
}
|
|
2002
|
+
} catch (error) {
|
|
2003
|
+
const err = error;
|
|
2004
|
+
spinner.fail(`Failed to scaffold ${type}`);
|
|
2005
|
+
console.error(chalk2.red(err?.stack || err?.message || String(error)));
|
|
2006
|
+
process.exitCode = 1;
|
|
2007
|
+
}
|
|
2008
|
+
}
|
|
2009
|
+
);
|
|
2010
|
+
|
|
2011
|
+
// src/commands/boilerplate.ts
|
|
2012
|
+
import { Command as Command3 } from "commander";
|
|
2013
|
+
import * as fs3 from "fs";
|
|
2014
|
+
import * as path3 from "path";
|
|
2015
|
+
import chalk3 from "chalk";
|
|
2016
|
+
import ora3 from "ora";
|
|
2017
|
+
var TEMPLATES = {
|
|
2018
|
+
"stores/jobStore.ts": `/**
|
|
2019
|
+
* Job store for tracking worker job status and results.
|
|
2020
|
+
*
|
|
2021
|
+
* Always uses MongoDB. Workers run on AWS Lambda and update jobs via the API;
|
|
2022
|
+
* in-memory storage is not shared across processes, so a persistent store is required.
|
|
2023
|
+
*
|
|
2024
|
+
* Configure via \`microfox.config.ts\` -> \`workflowSettings.jobStore\` or env:
|
|
2025
|
+
* - WORKER_DATABASE_TYPE: 'mongodb' | 'upstash-redis' (default: upstash-redis)
|
|
2026
|
+
* - DATABASE_MONGODB_URI or MONGODB_URI (required for mongodb)
|
|
2027
|
+
* - DATABASE_MONGODB_DB or MONGODB_DB; MONGODB_WORKER_JOBS_COLLECTION (default: worker_jobs)
|
|
2028
|
+
* - WORKER_UPSTASH_REDIS_* / WORKER_JOBS_TTL_SECONDS for Redis
|
|
2029
|
+
*
|
|
2030
|
+
* Job record structure:
|
|
2031
|
+
* {
|
|
2032
|
+
* jobId: string,
|
|
2033
|
+
* workerId: string,
|
|
2034
|
+
* status: 'queued' | 'running' | 'completed' | 'failed',
|
|
2035
|
+
* input: any,
|
|
2036
|
+
* output?: any,
|
|
2037
|
+
* error?: { message: string, stack?: string },
|
|
2038
|
+
* metadata?: Record<string, any>,
|
|
2039
|
+
* createdAt: string,
|
|
2040
|
+
* updatedAt: string,
|
|
2041
|
+
* completedAt?: string
|
|
2042
|
+
* }
|
|
2043
|
+
*/
|
|
2044
|
+
|
|
2045
|
+
export interface InternalJobEntry {
|
|
2046
|
+
jobId: string;
|
|
2047
|
+
workerId: string;
|
|
2048
|
+
}
|
|
2049
|
+
|
|
2050
|
+
export interface JobRecord {
|
|
2051
|
+
jobId: string;
|
|
2052
|
+
workerId: string;
|
|
2053
|
+
status: 'queued' | 'running' | 'completed' | 'failed';
|
|
2054
|
+
input: any;
|
|
2055
|
+
output?: any;
|
|
2056
|
+
error?: {
|
|
2057
|
+
message: string;
|
|
2058
|
+
stack?: string;
|
|
2059
|
+
};
|
|
2060
|
+
metadata?: Record<string, any>;
|
|
2061
|
+
internalJobs?: InternalJobEntry[];
|
|
2062
|
+
createdAt: string;
|
|
2063
|
+
updatedAt: string;
|
|
2064
|
+
completedAt?: string;
|
|
2065
|
+
}
|
|
2066
|
+
|
|
2067
|
+
// Storage adapter interface
|
|
2068
|
+
interface JobStoreAdapter {
|
|
2069
|
+
setJob(jobId: string, data: Partial<JobRecord>): Promise<void>;
|
|
2070
|
+
getJob(jobId: string): Promise<JobRecord | null>;
|
|
2071
|
+
updateJob(jobId: string, data: Partial<JobRecord>): Promise<void>;
|
|
2072
|
+
appendInternalJob(parentJobId: string, entry: InternalJobEntry): Promise<void>;
|
|
2073
|
+
listJobsByWorker(workerId: string): Promise<JobRecord[]>;
|
|
2074
|
+
}
|
|
2075
|
+
|
|
2076
|
+
// Job store can use MongoDB or Upstash Redis (workers run on Lambda; no in-memory fallback).
|
|
2077
|
+
function getStorageAdapter(): JobStoreAdapter {
|
|
2078
|
+
try {
|
|
2079
|
+
// Prefer workflowSettings.jobStore.type from microfox.config.ts; env fallback: WORKER_DATABASE_TYPE
|
|
2080
|
+
let jobStoreType: string | undefined;
|
|
2081
|
+
try {
|
|
2082
|
+
const config = require('@/microfox.config').StudioConfig as {
|
|
2083
|
+
workflowSettings?: { jobStore?: { type?: string } };
|
|
2084
|
+
};
|
|
2085
|
+
jobStoreType = config?.workflowSettings?.jobStore?.type;
|
|
2086
|
+
} catch {
|
|
2087
|
+
// Config missing or not resolvable; fall back to env
|
|
2088
|
+
}
|
|
2089
|
+
jobStoreType = jobStoreType || process.env.WORKER_DATABASE_TYPE || 'upstash-redis';
|
|
2090
|
+
const normalized = jobStoreType.toLowerCase();
|
|
2091
|
+
|
|
2092
|
+
if (normalized === 'upstash-redis' || normalized === 'redis') {
|
|
2093
|
+
const { redisJobStore } = require('./redisAdapter');
|
|
2094
|
+
console.log('[JobStore] Ready (Upstash Redis)');
|
|
2095
|
+
return redisJobStore;
|
|
2096
|
+
}
|
|
2097
|
+
|
|
2098
|
+
const { mongoJobStore } = require('./mongoAdapter');
|
|
2099
|
+
console.log('[JobStore] Ready (MongoDB)');
|
|
2100
|
+
return mongoJobStore;
|
|
2101
|
+
} catch (error: any) {
|
|
2102
|
+
const msg = error?.message || String(error);
|
|
2103
|
+
console.error('[JobStore] Job store adapter required (workers run on Lambda).', { error: msg });
|
|
2104
|
+
throw new Error(
|
|
2105
|
+
'Job store requires a persistent backend. Set workflowSettings.jobStore.type or WORKER_DATABASE_TYPE to "mongodb" or "upstash-redis", and set the corresponding connection settings. ' +
|
|
2106
|
+
\`Details: \${msg}\`
|
|
2107
|
+
);
|
|
2108
|
+
}
|
|
2109
|
+
}
|
|
2110
|
+
|
|
2111
|
+
// Lazy-loaded storage adapter
|
|
2112
|
+
let storageAdapter: JobStoreAdapter | null = null;
|
|
2113
|
+
function getAdapter(): JobStoreAdapter {
|
|
2114
|
+
if (!storageAdapter) {
|
|
2115
|
+
storageAdapter = getStorageAdapter();
|
|
2116
|
+
}
|
|
2117
|
+
return storageAdapter;
|
|
2118
|
+
}
|
|
2119
|
+
|
|
2120
|
+
/**
|
|
2121
|
+
* Store a job record.
|
|
2122
|
+
*/
|
|
2123
|
+
export async function setJob(jobId: string, data: Partial<JobRecord>): Promise<void> {
|
|
2124
|
+
try {
|
|
2125
|
+
const adapter = getAdapter();
|
|
2126
|
+
await adapter.setJob(jobId, data);
|
|
2127
|
+
} catch (error: any) {
|
|
2128
|
+
console.error('[JobStore] Error setting job:', {
|
|
2129
|
+
jobId,
|
|
2130
|
+
error: error?.message || String(error),
|
|
2131
|
+
stack: process.env.NODE_ENV === 'development' ? error?.stack : undefined,
|
|
2132
|
+
});
|
|
2133
|
+
throw error;
|
|
2134
|
+
}
|
|
2135
|
+
}
|
|
2136
|
+
|
|
2137
|
+
/**
|
|
2138
|
+
* Get a job record.
|
|
2139
|
+
*/
|
|
2140
|
+
export async function getJob(jobId: string): Promise<JobRecord | null> {
|
|
2141
|
+
try {
|
|
2142
|
+
const adapter = getAdapter();
|
|
2143
|
+
return await adapter.getJob(jobId);
|
|
2144
|
+
} catch (error: any) {
|
|
2145
|
+
console.error('[JobStore] Error getting job:', {
|
|
2146
|
+
jobId,
|
|
2147
|
+
error: error?.message || String(error),
|
|
2148
|
+
stack: process.env.NODE_ENV === 'development' ? error?.stack : undefined,
|
|
2149
|
+
});
|
|
2150
|
+
throw error;
|
|
2151
|
+
}
|
|
2152
|
+
}
|
|
2153
|
+
|
|
2154
|
+
/**
|
|
2155
|
+
* Update a job record.
|
|
2156
|
+
*/
|
|
2157
|
+
export async function updateJob(jobId: string, data: Partial<JobRecord>): Promise<void> {
|
|
2158
|
+
try {
|
|
2159
|
+
const adapter = getAdapter();
|
|
2160
|
+
await adapter.updateJob(jobId, data);
|
|
2161
|
+
} catch (error: any) {
|
|
2162
|
+
console.error('[JobStore] Error updating job:', {
|
|
2163
|
+
jobId,
|
|
2164
|
+
updates: Object.keys(data),
|
|
2165
|
+
error: error?.message || String(error),
|
|
2166
|
+
stack: process.env.NODE_ENV === 'development' ? error?.stack : undefined,
|
|
2167
|
+
});
|
|
2168
|
+
throw error;
|
|
2169
|
+
}
|
|
2170
|
+
}
|
|
2171
|
+
|
|
2172
|
+
/**
|
|
2173
|
+
* Append an internal (child) job to a parent job's internalJobs list.
|
|
2174
|
+
* Used when a worker dispatches another worker (ctx.dispatchWorker).
|
|
2175
|
+
*/
|
|
2176
|
+
export async function appendInternalJob(
|
|
2177
|
+
parentJobId: string,
|
|
2178
|
+
entry: InternalJobEntry
|
|
2179
|
+
): Promise<void> {
|
|
2180
|
+
try {
|
|
2181
|
+
const adapter = getAdapter();
|
|
2182
|
+
await adapter.appendInternalJob(parentJobId, entry);
|
|
2183
|
+
} catch (error: any) {
|
|
2184
|
+
console.error('[JobStore] Error appending internal job:', {
|
|
2185
|
+
parentJobId,
|
|
2186
|
+
entry,
|
|
2187
|
+
error: error?.message || String(error),
|
|
2188
|
+
stack: process.env.NODE_ENV === 'development' ? error?.stack : undefined,
|
|
2189
|
+
});
|
|
2190
|
+
throw error;
|
|
2191
|
+
}
|
|
2192
|
+
}
|
|
2193
|
+
|
|
2194
|
+
/**
|
|
2195
|
+
* List jobs by worker ID.
|
|
2196
|
+
*/
|
|
2197
|
+
export async function listJobsByWorker(workerId: string): Promise<JobRecord[]> {
|
|
2198
|
+
try {
|
|
2199
|
+
const adapter = getAdapter();
|
|
2200
|
+
return await adapter.listJobsByWorker(workerId);
|
|
2201
|
+
} catch (error: any) {
|
|
2202
|
+
console.error('[JobStore] Error listing jobs by worker:', {
|
|
2203
|
+
workerId,
|
|
2204
|
+
error: error?.message || String(error),
|
|
2205
|
+
stack: process.env.NODE_ENV === 'development' ? error?.stack : undefined,
|
|
2206
|
+
});
|
|
2207
|
+
throw error;
|
|
2208
|
+
}
|
|
2209
|
+
}
|
|
2210
|
+
`,
|
|
2211
|
+
"stores/mongoAdapter.ts": `/**
|
|
2212
|
+
* MongoDB adapter for job store.
|
|
2213
|
+
*
|
|
2214
|
+
* Provides persistent storage for worker job state using MongoDB.
|
|
2215
|
+
*
|
|
2216
|
+
* Configuration (from microfox.config.ts or env vars):
|
|
2217
|
+
* - workflowSettings.jobStore.mongodb.uri or DATABASE_MONGODB_URI/MONGODB_URI: MongoDB connection string
|
|
2218
|
+
* - workflowSettings.jobStore.mongodb.db or DATABASE_MONGODB_DB/MONGODB_DB: Database name (default: 'ai_router')
|
|
2219
|
+
*
|
|
2220
|
+
* Collection name: config -> workflowSettings.jobStore.mongodb.workerJobsCollection
|
|
2221
|
+
* (default: 'worker_jobs'). Env: MONGODB_WORKER_JOBS_COLLECTION then DATABASE_MONGODB_WORKER_JOBS_COLLECTION.
|
|
2222
|
+
*/
|
|
2223
|
+
|
|
2224
|
+
import { MongoClient, type Db, type Collection } from 'mongodb';
|
|
2225
|
+
import type { JobRecord, InternalJobEntry } from './jobStore';
|
|
2226
|
+
|
|
2227
|
+
declare global {
|
|
2228
|
+
// eslint-disable-next-line no-var
|
|
2229
|
+
var __workflowMongoClientPromise: Promise<MongoClient> | undefined;
|
|
2230
|
+
}
|
|
2231
|
+
|
|
2232
|
+
function getMongoUri(): string {
|
|
2233
|
+
// Try to get from config first, fallback to env vars
|
|
2234
|
+
let uri: string | undefined;
|
|
2235
|
+
try {
|
|
2236
|
+
const config = require('@/microfox.config').StudioConfig as {
|
|
2237
|
+
workflowSettings?: { jobStore?: { mongodb?: { uri?: string } } };
|
|
2238
|
+
};
|
|
2239
|
+
uri = config?.workflowSettings?.jobStore?.mongodb?.uri;
|
|
2240
|
+
} catch (error) {
|
|
2241
|
+
// Config not available, use env vars
|
|
2242
|
+
}
|
|
2243
|
+
|
|
2244
|
+
uri = uri || process.env.DATABASE_MONGODB_URI || process.env.MONGODB_URI;
|
|
2245
|
+
|
|
2246
|
+
if (!uri) {
|
|
2247
|
+
throw new Error(
|
|
2248
|
+
'Missing MongoDB connection string. Set workflowSettings.jobStore.mongodb.uri in microfox.config.ts or environment variable DATABASE_MONGODB_URI or MONGODB_URI.'
|
|
2249
|
+
);
|
|
2250
|
+
}
|
|
2251
|
+
return uri;
|
|
2252
|
+
}
|
|
2253
|
+
|
|
2254
|
+
function getMongoDbName(): string {
|
|
2255
|
+
// Try to get from config first, fallback to env vars
|
|
2256
|
+
let dbName: string | undefined;
|
|
2257
|
+
try {
|
|
2258
|
+
const config = require('@/microfox.config').StudioConfig as {
|
|
2259
|
+
workflowSettings?: { jobStore?: { mongodb?: { db?: string } } };
|
|
2260
|
+
};
|
|
2261
|
+
dbName = config?.workflowSettings?.jobStore?.mongodb?.db;
|
|
2262
|
+
} catch (error) {
|
|
2263
|
+
// Config not available, use env vars
|
|
2264
|
+
}
|
|
2265
|
+
|
|
2266
|
+
return dbName || process.env.DATABASE_MONGODB_DB || process.env.MONGODB_DB || 'ai_router';
|
|
2267
|
+
}
|
|
2268
|
+
|
|
2269
|
+
function getWorkerJobsCollection(): string {
|
|
2270
|
+
let collection: string | undefined;
|
|
2271
|
+
try {
|
|
2272
|
+
const config = require('@/microfox.config').StudioConfig as {
|
|
2273
|
+
workflowSettings?: { jobStore?: { mongodb?: { workerJobsCollection?: string } } };
|
|
2274
|
+
};
|
|
2275
|
+
collection = config?.workflowSettings?.jobStore?.mongodb?.workerJobsCollection;
|
|
2276
|
+
} catch {
|
|
2277
|
+
// Config not available
|
|
2278
|
+
}
|
|
2279
|
+
return (
|
|
2280
|
+
collection ||
|
|
2281
|
+
process.env.MONGODB_WORKER_JOBS_COLLECTION ||
|
|
2282
|
+
process.env.DATABASE_MONGODB_WORKER_JOBS_COLLECTION ||
|
|
2283
|
+
'worker_jobs'
|
|
2284
|
+
);
|
|
2285
|
+
}
|
|
2286
|
+
|
|
2287
|
+
async function getMongoClient(): Promise<MongoClient> {
|
|
2288
|
+
const uri = getMongoUri();
|
|
2289
|
+
|
|
2290
|
+
// Reuse a single client across hot reloads / lambda invocations when possible.
|
|
2291
|
+
if (!globalThis.__workflowMongoClientPromise) {
|
|
2292
|
+
const client = new MongoClient(uri, {
|
|
2293
|
+
// Keep defaults conservative; works on both local dev and Lambda.
|
|
2294
|
+
maxPoolSize: 10,
|
|
2295
|
+
minPoolSize: 0,
|
|
2296
|
+
serverSelectionTimeoutMS: 10_000,
|
|
2297
|
+
});
|
|
2298
|
+
globalThis.__workflowMongoClientPromise = client.connect();
|
|
2299
|
+
}
|
|
2300
|
+
|
|
2301
|
+
return globalThis.__workflowMongoClientPromise;
|
|
2302
|
+
}
|
|
2303
|
+
|
|
2304
|
+
async function getMongoDb(): Promise<Db> {
|
|
2305
|
+
const client = await getMongoClient();
|
|
2306
|
+
return client.db(getMongoDbName());
|
|
2307
|
+
}
|
|
2308
|
+
|
|
2309
|
+
/** Export for queue job store (shared MongoDB connection). */
|
|
2310
|
+
export async function getWorkflowDb(): Promise<Db> {
|
|
2311
|
+
return getMongoDb();
|
|
2312
|
+
}
|
|
2313
|
+
|
|
2314
|
+
async function getCollection(): Promise<Collection<JobRecord & { _id: string }>> {
|
|
2315
|
+
const db = await getMongoDb();
|
|
2316
|
+
return db.collection<JobRecord & { _id: string }>(getWorkerJobsCollection());
|
|
2317
|
+
}
|
|
2318
|
+
|
|
2319
|
+
/**
|
|
2320
|
+
* MongoDB storage adapter for job store.
|
|
2321
|
+
*/
|
|
2322
|
+
export const mongoJobStore = {
|
|
2323
|
+
async setJob(jobId: string, data: Partial<JobRecord>): Promise<void> {
|
|
2324
|
+
const now = new Date().toISOString();
|
|
2325
|
+
const collection = await getCollection();
|
|
2326
|
+
|
|
2327
|
+
const existing = await collection.findOne({ _id: jobId });
|
|
2328
|
+
|
|
2329
|
+
const record: JobRecord = {
|
|
2330
|
+
jobId,
|
|
2331
|
+
workerId: data.workerId || existing?.workerId || '',
|
|
2332
|
+
status: data.status || existing?.status || 'queued',
|
|
2333
|
+
input: data.input !== undefined ? data.input : existing?.input || {},
|
|
2334
|
+
output: data.output !== undefined ? data.output : existing?.output,
|
|
2335
|
+
error: data.error !== undefined ? data.error : existing?.error,
|
|
2336
|
+
metadata: { ...existing?.metadata, ...data.metadata },
|
|
2337
|
+
createdAt: existing?.createdAt || now,
|
|
2338
|
+
updatedAt: now,
|
|
2339
|
+
completedAt: data.completedAt || existing?.completedAt,
|
|
2340
|
+
};
|
|
2341
|
+
|
|
2342
|
+
// Set completedAt if status changed to completed/failed
|
|
2343
|
+
if (data.status && ['completed', 'failed'].includes(data.status) && !record.completedAt) {
|
|
2344
|
+
record.completedAt = now;
|
|
2345
|
+
}
|
|
2346
|
+
|
|
2347
|
+
await collection.updateOne(
|
|
2348
|
+
{ _id: jobId },
|
|
2349
|
+
{
|
|
2350
|
+
$set: {
|
|
2351
|
+
...record,
|
|
2352
|
+
_id: jobId,
|
|
2353
|
+
},
|
|
2354
|
+
},
|
|
2355
|
+
{ upsert: true }
|
|
2356
|
+
);
|
|
2357
|
+
},
|
|
2358
|
+
|
|
2359
|
+
async getJob(jobId: string): Promise<JobRecord | null> {
|
|
2360
|
+
const collection = await getCollection();
|
|
2361
|
+
const doc = await collection.findOne({ _id: jobId });
|
|
2362
|
+
|
|
2363
|
+
if (!doc) {
|
|
2364
|
+
return null;
|
|
2365
|
+
}
|
|
2366
|
+
|
|
2367
|
+
// Convert MongoDB document to JobRecord (remove _id, use jobId)
|
|
2368
|
+
const { _id, ...record } = doc;
|
|
2369
|
+
return record as JobRecord;
|
|
2370
|
+
},
|
|
2371
|
+
|
|
2372
|
+
async updateJob(jobId: string, data: Partial<JobRecord>): Promise<void> {
|
|
2373
|
+
const collection = await getCollection();
|
|
2374
|
+
const existing = await collection.findOne({ _id: jobId });
|
|
2375
|
+
|
|
2376
|
+
if (!existing) {
|
|
2377
|
+
throw new Error(\`Job \${jobId} not found\`);
|
|
2378
|
+
}
|
|
2379
|
+
|
|
2380
|
+
const now = new Date().toISOString();
|
|
2381
|
+
const update: any = {
|
|
2382
|
+
$set: {
|
|
2383
|
+
updatedAt: now,
|
|
2384
|
+
},
|
|
2385
|
+
};
|
|
2386
|
+
|
|
2387
|
+
if (data.status !== undefined) {
|
|
2388
|
+
update.$set.status = data.status;
|
|
2389
|
+
if (['completed', 'failed'].includes(data.status) && !existing.completedAt) {
|
|
2390
|
+
update.$set.completedAt = now;
|
|
2391
|
+
}
|
|
2392
|
+
}
|
|
2393
|
+
if (data.output !== undefined) {
|
|
2394
|
+
update.$set.output = data.output;
|
|
2395
|
+
}
|
|
2396
|
+
if (data.error !== undefined) {
|
|
2397
|
+
update.$set.error = data.error;
|
|
2398
|
+
}
|
|
2399
|
+
if (data.metadata !== undefined) {
|
|
2400
|
+
update.$set.metadata = { ...existing.metadata, ...data.metadata };
|
|
2401
|
+
}
|
|
2402
|
+
|
|
2403
|
+
await collection.updateOne({ _id: jobId }, update);
|
|
2404
|
+
},
|
|
2405
|
+
|
|
2406
|
+
async appendInternalJob(parentJobId: string, entry: InternalJobEntry): Promise<void> {
|
|
2407
|
+
const collection = await getCollection();
|
|
2408
|
+
const now = new Date().toISOString();
|
|
2409
|
+
await collection.updateOne(
|
|
2410
|
+
{ _id: parentJobId },
|
|
2411
|
+
{
|
|
2412
|
+
$push: { internalJobs: entry },
|
|
2413
|
+
$set: { updatedAt: now },
|
|
2414
|
+
}
|
|
2415
|
+
);
|
|
2416
|
+
},
|
|
2417
|
+
|
|
2418
|
+
async listJobsByWorker(workerId: string): Promise<JobRecord[]> {
|
|
2419
|
+
const collection = await getCollection();
|
|
2420
|
+
const docs = await collection
|
|
2421
|
+
.find({ workerId })
|
|
2422
|
+
.sort({ createdAt: -1 })
|
|
2423
|
+
.toArray();
|
|
2424
|
+
|
|
2425
|
+
return docs.map((doc) => {
|
|
2426
|
+
const { _id, ...record } = doc;
|
|
2427
|
+
return record as JobRecord;
|
|
2428
|
+
});
|
|
2429
|
+
},
|
|
2430
|
+
};
|
|
2431
|
+
`,
|
|
2432
|
+
"stores/redisAdapter.ts": `/**
|
|
2433
|
+
* Upstash Redis adapter for workflow/worker job store.
|
|
2434
|
+
*
|
|
2435
|
+
* Uses a hash-per-job model with key-level TTL for fast lookups by jobId.
|
|
2436
|
+
*
|
|
2437
|
+
* Configuration (from microfox.config.ts or env vars):
|
|
2438
|
+
* - workflowSettings.jobStore.redis; env: WORKER_UPSTASH_REDIS_REST_URL, WORKER_UPSTASH_REDIS_REST_TOKEN,
|
|
2439
|
+
* WORKER_UPSTASH_REDIS_JOBS_PREFIX (default: worker:jobs:), WORKER_JOBS_TTL_SECONDS
|
|
2440
|
+
*/
|
|
2441
|
+
|
|
2442
|
+
import { Redis } from '@upstash/redis';
|
|
2443
|
+
import type { JobRecord, InternalJobEntry } from './jobStore';
|
|
2444
|
+
|
|
2445
|
+
let redisClient: Redis | null = null;
|
|
2446
|
+
let redisUrl: string | undefined;
|
|
2447
|
+
let redisToken: string | undefined;
|
|
2448
|
+
let jobKeyPrefix: string = 'worker:jobs:';
|
|
2449
|
+
const defaultTtlSeconds = 60 * 60 * 24 * 7; // 7 days
|
|
2450
|
+
|
|
2451
|
+
function loadConfig() {
|
|
2452
|
+
try {
|
|
2453
|
+
// Prefer config from microfox.config.ts if present
|
|
2454
|
+
const config = require('@/microfox.config').StudioConfig as {
|
|
2455
|
+
workflowSettings?: {
|
|
2456
|
+
jobStore?: {
|
|
2457
|
+
redis?: {
|
|
2458
|
+
url?: string;
|
|
2459
|
+
token?: string;
|
|
2460
|
+
keyPrefix?: string;
|
|
2461
|
+
ttlSeconds?: number;
|
|
2462
|
+
};
|
|
2463
|
+
};
|
|
2464
|
+
};
|
|
2465
|
+
};
|
|
2466
|
+
const redisCfg = config?.workflowSettings?.jobStore?.redis;
|
|
2467
|
+
redisUrl = redisCfg?.url || redisUrl;
|
|
2468
|
+
redisToken = redisCfg?.token || redisToken;
|
|
2469
|
+
if (redisCfg?.keyPrefix) {
|
|
2470
|
+
jobKeyPrefix = redisCfg.keyPrefix;
|
|
2471
|
+
}
|
|
2472
|
+
} catch {
|
|
2473
|
+
// Config optional; fall back to env vars
|
|
2474
|
+
}
|
|
2475
|
+
|
|
2476
|
+
redisUrl =
|
|
2477
|
+
redisUrl ||
|
|
2478
|
+
process.env.WORKER_UPSTASH_REDIS_REST_URL ||
|
|
2479
|
+
process.env.UPSTASH_REDIS_REST_URL ||
|
|
2480
|
+
process.env.UPSTASH_REDIS_URL;
|
|
2481
|
+
redisToken =
|
|
2482
|
+
redisToken ||
|
|
2483
|
+
process.env.WORKER_UPSTASH_REDIS_REST_TOKEN ||
|
|
2484
|
+
process.env.UPSTASH_REDIS_REST_TOKEN ||
|
|
2485
|
+
process.env.UPSTASH_REDIS_TOKEN;
|
|
2486
|
+
jobKeyPrefix =
|
|
2487
|
+
jobKeyPrefix ||
|
|
2488
|
+
process.env.WORKER_UPSTASH_REDIS_JOBS_PREFIX ||
|
|
2489
|
+
process.env.UPSTASH_REDIS_KEY_PREFIX ||
|
|
2490
|
+
'worker:jobs:';
|
|
2491
|
+
}
|
|
2492
|
+
|
|
2493
|
+
function getRedis(): Redis {
|
|
2494
|
+
if (!redisClient) {
|
|
2495
|
+
loadConfig();
|
|
2496
|
+
if (!redisUrl || !redisToken) {
|
|
2497
|
+
throw new Error(
|
|
2498
|
+
'Missing Upstash Redis configuration. Set workflowSettings.jobStore.redis in microfox.config.ts or WORKER_UPSTASH_REDIS_REST_URL / WORKER_UPSTASH_REDIS_REST_TOKEN (or UPSTASH_REDIS_REST_URL / UPSTASH_REDIS_REST_TOKEN).'
|
|
2499
|
+
);
|
|
2500
|
+
}
|
|
2501
|
+
redisClient = new Redis({
|
|
2502
|
+
url: redisUrl,
|
|
2503
|
+
token: redisToken,
|
|
2504
|
+
});
|
|
2505
|
+
}
|
|
2506
|
+
return redisClient;
|
|
2507
|
+
}
|
|
2508
|
+
|
|
2509
|
+
function jobKey(jobId: string): string {
|
|
2510
|
+
return \`\${jobKeyPrefix}\${jobId}\`;
|
|
2511
|
+
}
|
|
2512
|
+
|
|
2513
|
+
/** Separate LIST key for internal job refs; each RPUSH is atomic so no race when appending multiple. */
|
|
2514
|
+
function internalListKey(jobId: string): string {
|
|
2515
|
+
return \`\${jobKeyPrefix}\${jobId}:internal\`;
|
|
2516
|
+
}
|
|
2517
|
+
|
|
2518
|
+
function workerIndexKey(workerId: string): string {
|
|
2519
|
+
// Secondary index: worker -> set of jobIds
|
|
2520
|
+
return \`\${jobKeyPrefix}by-worker:\${workerId}\`;
|
|
2521
|
+
}
|
|
2522
|
+
|
|
2523
|
+
function getJobTtlSeconds(): number {
|
|
2524
|
+
const raw =
|
|
2525
|
+
process.env.WORKER_JOBS_TTL_SECONDS || process.env.WORKFLOW_JOBS_TTL_SECONDS;
|
|
2526
|
+
if (!raw) return defaultTtlSeconds;
|
|
2527
|
+
const n = parseInt(raw, 10);
|
|
2528
|
+
return Number.isFinite(n) && n > 0 ? n : defaultTtlSeconds;
|
|
2529
|
+
}
|
|
2530
|
+
|
|
2531
|
+
async function loadJob(jobId: string): Promise<JobRecord | null> {
|
|
2532
|
+
const redis = getRedis();
|
|
2533
|
+
const key = jobKey(jobId);
|
|
2534
|
+
const data = await redis.hgetall<Record<string, string>>(key);
|
|
2535
|
+
if (!data || Object.keys(data).length === 0) return null;
|
|
2536
|
+
|
|
2537
|
+
const parseJson = <T>(val?: string | null): T | undefined => {
|
|
2538
|
+
if (!val) return undefined;
|
|
2539
|
+
try {
|
|
2540
|
+
return JSON.parse(val) as T;
|
|
2541
|
+
} catch {
|
|
2542
|
+
return undefined;
|
|
2543
|
+
}
|
|
2544
|
+
};
|
|
2545
|
+
|
|
2546
|
+
// Prefer atomic list key for internal jobs; fallback to hash field for old records
|
|
2547
|
+
const listKey = internalListKey(jobId);
|
|
2548
|
+
const listItems = (await redis.lrange(listKey, 0, -1)) ?? [];
|
|
2549
|
+
let internalJobs: InternalJobEntry[] | undefined;
|
|
2550
|
+
if (listItems.length > 0) {
|
|
2551
|
+
internalJobs = listItems
|
|
2552
|
+
.map((s) => {
|
|
2553
|
+
try {
|
|
2554
|
+
return JSON.parse(s) as InternalJobEntry;
|
|
2555
|
+
} catch {
|
|
2556
|
+
return null;
|
|
2557
|
+
}
|
|
2558
|
+
})
|
|
2559
|
+
.filter((e): e is InternalJobEntry => e != null);
|
|
2560
|
+
} else {
|
|
2561
|
+
internalJobs = parseJson<InternalJobEntry[]>(data.internalJobs);
|
|
2562
|
+
}
|
|
2563
|
+
|
|
2564
|
+
const record: JobRecord = {
|
|
2565
|
+
jobId: data.jobId,
|
|
2566
|
+
workerId: data.workerId,
|
|
2567
|
+
status: (data.status as JobRecord['status']) || 'queued',
|
|
2568
|
+
input: parseJson<any>(data.input) ?? {},
|
|
2569
|
+
output: parseJson<any>(data.output),
|
|
2570
|
+
error: parseJson<any>(data.error),
|
|
2571
|
+
metadata: parseJson<Record<string, any>>(data.metadata) ?? {},
|
|
2572
|
+
internalJobs,
|
|
2573
|
+
createdAt: data.createdAt,
|
|
2574
|
+
updatedAt: data.updatedAt,
|
|
2575
|
+
completedAt: data.completedAt,
|
|
2576
|
+
};
|
|
2577
|
+
|
|
2578
|
+
return record;
|
|
2579
|
+
}
|
|
2580
|
+
|
|
2581
|
+
export const redisJobStore = {
|
|
2582
|
+
async setJob(jobId: string, data: Partial<JobRecord>): Promise<void> {
|
|
2583
|
+
const redis = getRedis();
|
|
2584
|
+
const key = jobKey(jobId);
|
|
2585
|
+
const now = new Date().toISOString();
|
|
2586
|
+
|
|
2587
|
+
const existing = await loadJob(jobId);
|
|
2588
|
+
|
|
2589
|
+
const record: JobRecord = {
|
|
2590
|
+
jobId,
|
|
2591
|
+
workerId: data.workerId || existing?.workerId || '',
|
|
2592
|
+
status: data.status || existing?.status || 'queued',
|
|
2593
|
+
input: data.input !== undefined ? data.input : existing?.input || {},
|
|
2594
|
+
output: data.output !== undefined ? data.output : existing?.output,
|
|
2595
|
+
error: data.error !== undefined ? data.error : existing?.error,
|
|
2596
|
+
metadata: { ...(existing?.metadata || {}), ...(data.metadata || {}) },
|
|
2597
|
+
internalJobs: existing?.internalJobs,
|
|
2598
|
+
createdAt: existing?.createdAt || now,
|
|
2599
|
+
updatedAt: now,
|
|
2600
|
+
completedAt: data.completedAt || existing?.completedAt,
|
|
2601
|
+
};
|
|
2602
|
+
|
|
2603
|
+
if (data.status && ['completed', 'failed'].includes(data.status) && !record.completedAt) {
|
|
2604
|
+
record.completedAt = now;
|
|
2605
|
+
}
|
|
2606
|
+
|
|
2607
|
+
const toSet: Record<string, string> = {
|
|
2608
|
+
jobId: record.jobId,
|
|
2609
|
+
workerId: record.workerId,
|
|
2610
|
+
status: record.status,
|
|
2611
|
+
input: JSON.stringify(record.input ?? {}),
|
|
2612
|
+
metadata: JSON.stringify(record.metadata ?? {}),
|
|
2613
|
+
createdAt: record.createdAt,
|
|
2614
|
+
updatedAt: record.updatedAt,
|
|
2615
|
+
};
|
|
2616
|
+
if (record.output !== undefined) {
|
|
2617
|
+
toSet.output = JSON.stringify(record.output);
|
|
2618
|
+
}
|
|
2619
|
+
if (record.error !== undefined) {
|
|
2620
|
+
toSet.error = JSON.stringify(record.error);
|
|
2621
|
+
}
|
|
2622
|
+
if (record.internalJobs) {
|
|
2623
|
+
toSet.internalJobs = JSON.stringify(record.internalJobs);
|
|
2624
|
+
}
|
|
2625
|
+
if (record.completedAt) {
|
|
2626
|
+
toSet.completedAt = record.completedAt;
|
|
2627
|
+
}
|
|
2628
|
+
|
|
2629
|
+
await redis.hset(key, toSet);
|
|
2630
|
+
const ttl = getJobTtlSeconds();
|
|
2631
|
+
if (ttl > 0) {
|
|
2632
|
+
await redis.expire(key, ttl);
|
|
2633
|
+
}
|
|
2634
|
+
|
|
2635
|
+
// Maintain secondary index per worker
|
|
2636
|
+
if (record.workerId) {
|
|
2637
|
+
await redis.sadd(workerIndexKey(record.workerId), jobId);
|
|
2638
|
+
}
|
|
2639
|
+
},
|
|
2640
|
+
|
|
2641
|
+
async getJob(jobId: string): Promise<JobRecord | null> {
|
|
2642
|
+
return loadJob(jobId);
|
|
2643
|
+
},
|
|
2644
|
+
|
|
2645
|
+
async updateJob(jobId: string, data: Partial<JobRecord>): Promise<void> {
|
|
2646
|
+
const redis = getRedis();
|
|
2647
|
+
const key = jobKey(jobId);
|
|
2648
|
+
const existing = await loadJob(jobId);
|
|
2649
|
+
if (!existing) {
|
|
2650
|
+
throw new Error(\`Job \${jobId} not found\`);
|
|
2651
|
+
}
|
|
2652
|
+
|
|
2653
|
+
const now = new Date().toISOString();
|
|
2654
|
+
const update: Partial<JobRecord> = {
|
|
2655
|
+
updatedAt: now,
|
|
2656
|
+
};
|
|
2657
|
+
|
|
2658
|
+
if (data.status !== undefined) {
|
|
2659
|
+
update.status = data.status;
|
|
2660
|
+
if (['completed', 'failed'].includes(data.status) && !existing.completedAt) {
|
|
2661
|
+
update.completedAt = now;
|
|
2662
|
+
}
|
|
2663
|
+
}
|
|
2664
|
+
if (data.output !== undefined) {
|
|
2665
|
+
update.output = data.output;
|
|
2666
|
+
}
|
|
2667
|
+
if (data.error !== undefined) {
|
|
2668
|
+
update.error = data.error;
|
|
2669
|
+
}
|
|
2670
|
+
if (data.metadata !== undefined) {
|
|
2671
|
+
update.metadata = { ...(existing.metadata || {}), ...data.metadata };
|
|
2672
|
+
}
|
|
2673
|
+
|
|
2674
|
+
const toSet: Record<string, string> = {
|
|
2675
|
+
updatedAt: now,
|
|
2676
|
+
};
|
|
2677
|
+
if (update.status !== undefined) {
|
|
2678
|
+
toSet.status = update.status;
|
|
2679
|
+
}
|
|
2680
|
+
if (update.output !== undefined) {
|
|
2681
|
+
toSet.output = JSON.stringify(update.output);
|
|
2682
|
+
}
|
|
2683
|
+
if (update.error !== undefined) {
|
|
2684
|
+
toSet.error = JSON.stringify(update.error);
|
|
2685
|
+
}
|
|
2686
|
+
if (update.metadata !== undefined) {
|
|
2687
|
+
toSet.metadata = JSON.stringify(update.metadata);
|
|
2688
|
+
}
|
|
2689
|
+
if (update.completedAt) {
|
|
2690
|
+
toSet.completedAt = update.completedAt;
|
|
2691
|
+
}
|
|
2692
|
+
|
|
2693
|
+
await redis.hset(key, toSet);
|
|
2694
|
+
const ttl = getJobTtlSeconds();
|
|
2695
|
+
if (ttl > 0) {
|
|
2696
|
+
await redis.expire(key, ttl);
|
|
2697
|
+
}
|
|
2698
|
+
},
|
|
2699
|
+
|
|
2700
|
+
async appendInternalJob(parentJobId: string, entry: InternalJobEntry): Promise<void> {
|
|
2701
|
+
const redis = getRedis();
|
|
2702
|
+
const listKey = internalListKey(parentJobId);
|
|
2703
|
+
await redis.rpush(listKey, JSON.stringify(entry));
|
|
2704
|
+
const mainKey = jobKey(parentJobId);
|
|
2705
|
+
await redis.hset(mainKey, { updatedAt: new Date().toISOString() });
|
|
2706
|
+
const ttl = getJobTtlSeconds();
|
|
2707
|
+
if (ttl > 0) {
|
|
2708
|
+
await redis.expire(listKey, ttl);
|
|
2709
|
+
await redis.expire(mainKey, ttl);
|
|
2710
|
+
}
|
|
2711
|
+
},
|
|
2712
|
+
|
|
2713
|
+
async listJobsByWorker(workerId: string): Promise<JobRecord[]> {
|
|
2714
|
+
const redis = getRedis();
|
|
2715
|
+
const indexKey = workerIndexKey(workerId);
|
|
2716
|
+
const jobIds = (await redis.smembers(indexKey)) ?? [];
|
|
2717
|
+
const jobs: JobRecord[] = [];
|
|
2718
|
+
for (const jobId of jobIds) {
|
|
2719
|
+
const job = await loadJob(jobId);
|
|
2720
|
+
if (job) {
|
|
2721
|
+
jobs.push(job);
|
|
2722
|
+
}
|
|
2723
|
+
}
|
|
2724
|
+
// Most recent first
|
|
2725
|
+
jobs.sort((a, b) => b.createdAt.localeCompare(a.createdAt));
|
|
2726
|
+
return jobs;
|
|
2727
|
+
},
|
|
2728
|
+
};
|
|
2729
|
+
`,
|
|
2730
|
+
"stores/queueJobStore.ts": `/**
|
|
2731
|
+
* Queue job store for tracking multi-step queue execution.
|
|
2732
|
+
*
|
|
2733
|
+
* Stores a single record per queue run with steps array containing:
|
|
2734
|
+
* - workerId, workerJobId (worker_job id), status, input, output, startedAt, completedAt, error
|
|
2735
|
+
*
|
|
2736
|
+
* Uses MongoDB or Upstash Redis (same backend as worker_jobs), based on WORKER_DATABASE_TYPE.
|
|
2737
|
+
* Collection/key prefix: queue_jobs / worker:queue-jobs:
|
|
2738
|
+
*/
|
|
2739
|
+
|
|
2740
|
+
import type { Collection } from 'mongodb';
|
|
2741
|
+
import { Redis } from '@upstash/redis';
|
|
2742
|
+
import { getWorkflowDb } from './mongoAdapter';
|
|
2743
|
+
|
|
2744
|
+
export interface QueueJobStep {
|
|
2745
|
+
workerId: string;
|
|
2746
|
+
workerJobId: string;
|
|
2747
|
+
status: 'queued' | 'running' | 'completed' | 'failed';
|
|
2748
|
+
input?: unknown;
|
|
2749
|
+
output?: unknown;
|
|
2750
|
+
error?: { message: string };
|
|
2751
|
+
startedAt?: string;
|
|
2752
|
+
completedAt?: string;
|
|
2753
|
+
}
|
|
2754
|
+
|
|
2755
|
+
export interface QueueJobRecord {
|
|
2756
|
+
id: string;
|
|
2757
|
+
queueId: string;
|
|
2758
|
+
status: 'running' | 'completed' | 'failed' | 'partial';
|
|
2759
|
+
steps: QueueJobStep[];
|
|
2760
|
+
metadata?: Record<string, unknown>;
|
|
2761
|
+
createdAt: string;
|
|
2762
|
+
updatedAt: string;
|
|
2763
|
+
completedAt?: string;
|
|
2764
|
+
}
|
|
2765
|
+
|
|
2766
|
+
// === Backend selection ===
|
|
2767
|
+
|
|
2768
|
+
function getStoreType(): 'mongodb' | 'upstash-redis' {
|
|
2769
|
+
const t = (process.env.WORKER_DATABASE_TYPE || 'upstash-redis').toLowerCase();
|
|
2770
|
+
return t === 'mongodb' ? 'mongodb' : 'upstash-redis';
|
|
2771
|
+
}
|
|
2772
|
+
|
|
2773
|
+
function preferMongo(): boolean {
|
|
2774
|
+
return getStoreType() === 'mongodb';
|
|
2775
|
+
}
|
|
2776
|
+
|
|
2777
|
+
function preferRedis(): boolean {
|
|
2778
|
+
return getStoreType() !== 'mongodb';
|
|
2779
|
+
}
|
|
2780
|
+
|
|
2781
|
+
// === MongoDB backend ===
|
|
2782
|
+
|
|
2783
|
+
function getQueueJobsCollectionName(): string {
|
|
2784
|
+
return process.env.MONGODB_QUEUE_JOBS_COLLECTION || 'queue_jobs';
|
|
2785
|
+
}
|
|
2786
|
+
|
|
2787
|
+
async function getCollection(): Promise<Collection<QueueJobRecord & { _id: string }>> {
|
|
2788
|
+
const db = await getWorkflowDb();
|
|
2789
|
+
return db.collection<QueueJobRecord & { _id: string }>(getQueueJobsCollectionName());
|
|
2790
|
+
}
|
|
2791
|
+
|
|
2792
|
+
// === Redis backend ===
|
|
2793
|
+
|
|
2794
|
+
const redisUrl =
|
|
2795
|
+
process.env.WORKER_UPSTASH_REDIS_REST_URL ||
|
|
2796
|
+
process.env.UPSTASH_REDIS_REST_URL ||
|
|
2797
|
+
process.env.UPSTASH_REDIS_URL;
|
|
2798
|
+
const redisToken =
|
|
2799
|
+
process.env.WORKER_UPSTASH_REDIS_REST_TOKEN ||
|
|
2800
|
+
process.env.UPSTASH_REDIS_REST_TOKEN ||
|
|
2801
|
+
process.env.UPSTASH_REDIS_TOKEN;
|
|
2802
|
+
const queueKeyPrefix =
|
|
2803
|
+
process.env.WORKER_UPSTASH_REDIS_QUEUE_PREFIX ||
|
|
2804
|
+
process.env.UPSTASH_REDIS_QUEUE_PREFIX ||
|
|
2805
|
+
'worker:queue-jobs:';
|
|
2806
|
+
|
|
2807
|
+
let redisClient: Redis | null = null;
|
|
2808
|
+
|
|
2809
|
+
function getRedis(): Redis {
|
|
2810
|
+
if (!redisUrl || !redisToken) {
|
|
2811
|
+
throw new Error(
|
|
2812
|
+
'Upstash Redis configuration missing for queue job store. Set WORKER_UPSTASH_REDIS_REST_URL and WORKER_UPSTASH_REDIS_REST_TOKEN (or UPSTASH_REDIS_REST_URL/UPSTASH_REDIS_REST_TOKEN).'
|
|
2813
|
+
);
|
|
2814
|
+
}
|
|
2815
|
+
if (!redisClient) {
|
|
2816
|
+
redisClient = new Redis({
|
|
2817
|
+
url: redisUrl,
|
|
2818
|
+
token: redisToken,
|
|
2819
|
+
});
|
|
2820
|
+
}
|
|
2821
|
+
return redisClient;
|
|
2822
|
+
}
|
|
2823
|
+
|
|
2824
|
+
function queueKey(id: string): string {
|
|
2825
|
+
return \`\${queueKeyPrefix}\${id}\`;
|
|
2826
|
+
}
|
|
2827
|
+
|
|
2828
|
+
/** Hash values from Upstash hgetall may be auto-parsed (array/object) or raw strings. */
|
|
2829
|
+
function stepsFromHash(val: unknown): QueueJobStep[] {
|
|
2830
|
+
if (Array.isArray(val)) return val as QueueJobStep[];
|
|
2831
|
+
if (typeof val === 'string') {
|
|
2832
|
+
try {
|
|
2833
|
+
const parsed = JSON.parse(val) as QueueJobStep[];
|
|
2834
|
+
return Array.isArray(parsed) ? parsed : [];
|
|
2835
|
+
} catch {
|
|
2836
|
+
return [];
|
|
2837
|
+
}
|
|
2838
|
+
}
|
|
2839
|
+
return [];
|
|
2840
|
+
}
|
|
2841
|
+
|
|
2842
|
+
function metadataFromHash(val: unknown): Record<string, unknown> {
|
|
2843
|
+
if (val && typeof val === 'object' && !Array.isArray(val)) return val as Record<string, unknown>;
|
|
2844
|
+
if (typeof val === 'string') {
|
|
2845
|
+
try {
|
|
2846
|
+
const parsed = JSON.parse(val) as Record<string, unknown>;
|
|
2847
|
+
return parsed && typeof parsed === 'object' ? parsed : {};
|
|
2848
|
+
} catch {
|
|
2849
|
+
return {};
|
|
2850
|
+
}
|
|
2851
|
+
}
|
|
2852
|
+
return {};
|
|
2853
|
+
}
|
|
2854
|
+
|
|
2855
|
+
async function loadQueueJobRedis(queueJobId: string): Promise<QueueJobRecord | null> {
|
|
2856
|
+
const redis = getRedis();
|
|
2857
|
+
const key = queueKey(queueJobId);
|
|
2858
|
+
const data = await redis.hgetall(key);
|
|
2859
|
+
if (!data || typeof data !== 'object' || Object.keys(data).length === 0) return null;
|
|
2860
|
+
const record: QueueJobRecord = {
|
|
2861
|
+
id: (data as Record<string, unknown>).id === undefined ? queueJobId : String((data as Record<string, unknown>).id),
|
|
2862
|
+
queueId: String((data as Record<string, unknown>).queueId ?? ''),
|
|
2863
|
+
status: (String((data as Record<string, unknown>).status ?? 'running') as QueueJobRecord['status']),
|
|
2864
|
+
steps: stepsFromHash((data as Record<string, unknown>).steps),
|
|
2865
|
+
metadata: metadataFromHash((data as Record<string, unknown>).metadata),
|
|
2866
|
+
createdAt: String((data as Record<string, unknown>).createdAt ?? new Date().toISOString()),
|
|
2867
|
+
updatedAt: String((data as Record<string, unknown>).updatedAt ?? new Date().toISOString()),
|
|
2868
|
+
completedAt: (data as Record<string, unknown>).completedAt != null ? String((data as Record<string, unknown>).completedAt) : undefined,
|
|
2869
|
+
};
|
|
2870
|
+
return record;
|
|
2871
|
+
}
|
|
2872
|
+
|
|
2873
|
+
export async function createQueueJob(
|
|
2874
|
+
id: string,
|
|
2875
|
+
queueId: string,
|
|
2876
|
+
firstStep: { workerId: string; workerJobId: string },
|
|
2877
|
+
metadata?: Record<string, unknown>
|
|
2878
|
+
): Promise<void> {
|
|
2879
|
+
const now = new Date().toISOString();
|
|
2880
|
+
const record: QueueJobRecord = {
|
|
2881
|
+
id,
|
|
2882
|
+
queueId,
|
|
2883
|
+
status: 'running',
|
|
2884
|
+
steps: [
|
|
2885
|
+
{
|
|
2886
|
+
workerId: firstStep.workerId,
|
|
2887
|
+
workerJobId: firstStep.workerJobId,
|
|
2888
|
+
status: 'queued',
|
|
2889
|
+
},
|
|
2890
|
+
],
|
|
2891
|
+
metadata: metadata ?? {},
|
|
2892
|
+
createdAt: now,
|
|
2893
|
+
updatedAt: now,
|
|
2894
|
+
};
|
|
2895
|
+
|
|
2896
|
+
if (preferRedis()) {
|
|
2897
|
+
const redis = getRedis();
|
|
2898
|
+
const key = queueKey(id);
|
|
2899
|
+
const toSet: Record<string, string> = {
|
|
2900
|
+
id: record.id,
|
|
2901
|
+
queueId: record.queueId,
|
|
2902
|
+
status: record.status,
|
|
2903
|
+
steps: JSON.stringify(record.steps),
|
|
2904
|
+
metadata: JSON.stringify(record.metadata || {}),
|
|
2905
|
+
createdAt: record.createdAt,
|
|
2906
|
+
updatedAt: record.updatedAt,
|
|
2907
|
+
};
|
|
2908
|
+
await redis.hset(key, toSet);
|
|
2909
|
+
const ttlSeconds =
|
|
2910
|
+
typeof process.env.WORKER_QUEUE_JOBS_TTL_SECONDS === 'string'
|
|
2911
|
+
? parseInt(process.env.WORKER_QUEUE_JOBS_TTL_SECONDS, 10) || 60 * 60 * 24 * 7
|
|
2912
|
+
: typeof process.env.WORKER_JOBS_TTL_SECONDS === 'string'
|
|
2913
|
+
? parseInt(process.env.WORKER_JOBS_TTL_SECONDS, 10) || 60 * 60 * 24 * 7
|
|
2914
|
+
: 60 * 60 * 24 * 7; // 7 days default
|
|
2915
|
+
if (ttlSeconds > 0) {
|
|
2916
|
+
await redis.expire(key, ttlSeconds);
|
|
2917
|
+
}
|
|
2918
|
+
return;
|
|
2919
|
+
}
|
|
2920
|
+
|
|
2921
|
+
const collection = await getCollection();
|
|
2922
|
+
await collection.updateOne(
|
|
2923
|
+
{ _id: id },
|
|
2924
|
+
{ $set: { ...record, _id: id } },
|
|
2925
|
+
{ upsert: true }
|
|
2926
|
+
);
|
|
2927
|
+
}
|
|
2928
|
+
|
|
2929
|
+
export async function updateQueueStep(
|
|
2930
|
+
queueJobId: string,
|
|
2931
|
+
stepIndex: number,
|
|
2932
|
+
update: {
|
|
2933
|
+
status?: 'queued' | 'running' | 'completed' | 'failed';
|
|
2934
|
+
input?: unknown;
|
|
2935
|
+
output?: unknown;
|
|
2936
|
+
error?: { message: string };
|
|
2937
|
+
startedAt?: string;
|
|
2938
|
+
completedAt?: string;
|
|
2939
|
+
}
|
|
2940
|
+
): Promise<void> {
|
|
2941
|
+
const collection = await getCollection();
|
|
2942
|
+
const now = new Date().toISOString();
|
|
2943
|
+
const setKey = \`steps.\${stepIndex}\`;
|
|
2944
|
+
const existing = await collection.findOne({ _id: queueJobId });
|
|
2945
|
+
if (!existing) {
|
|
2946
|
+
throw new Error(\`Queue job \${queueJobId} not found\`);
|
|
2947
|
+
}
|
|
2948
|
+
const step = existing.steps[stepIndex];
|
|
2949
|
+
if (!step) {
|
|
2950
|
+
throw new Error(\`Queue job \${queueJobId} has no step at index \${stepIndex}\`);
|
|
2951
|
+
}
|
|
2952
|
+
const mergedStep: QueueJobStep = {
|
|
2953
|
+
...step,
|
|
2954
|
+
...(update.status !== undefined && { status: update.status }),
|
|
2955
|
+
...(update.input !== undefined && { input: update.input }),
|
|
2956
|
+
...(update.output !== undefined && { output: update.output }),
|
|
2957
|
+
...(update.error !== undefined && { error: update.error }),
|
|
2958
|
+
startedAt: update.startedAt ?? (update.status === 'running' ? now : step.startedAt),
|
|
2959
|
+
completedAt:
|
|
2960
|
+
update.completedAt ??
|
|
2961
|
+
(['completed', 'failed'].includes(update.status ?? '') ? now : step.completedAt),
|
|
2962
|
+
};
|
|
2963
|
+
const updateDoc: any = {
|
|
2964
|
+
$set: {
|
|
2965
|
+
[setKey]: mergedStep,
|
|
2966
|
+
updatedAt: now,
|
|
2967
|
+
},
|
|
2968
|
+
};
|
|
2969
|
+
if (update.status === 'failed') {
|
|
2970
|
+
updateDoc.$set.status = 'failed';
|
|
2971
|
+
if (!existing.completedAt) updateDoc.$set.completedAt = now;
|
|
2972
|
+
} else if (update.status === 'completed' && stepIndex === existing.steps.length - 1) {
|
|
2973
|
+
updateDoc.$set.status = 'completed';
|
|
2974
|
+
if (!existing.completedAt) updateDoc.$set.completedAt = now;
|
|
2975
|
+
}
|
|
2976
|
+
await collection.updateOne({ _id: queueJobId }, updateDoc);
|
|
2977
|
+
}
|
|
2978
|
+
|
|
2979
|
+
export async function appendQueueStep(
|
|
2980
|
+
queueJobId: string,
|
|
2981
|
+
step: { workerId: string; workerJobId: string }
|
|
2982
|
+
): Promise<void> {
|
|
2983
|
+
const collection = await getCollection();
|
|
2984
|
+
const now = new Date().toISOString();
|
|
2985
|
+
await collection.updateOne(
|
|
2986
|
+
{ _id: queueJobId },
|
|
2987
|
+
{
|
|
2988
|
+
$push: {
|
|
2989
|
+
steps: {
|
|
2990
|
+
workerId: step.workerId,
|
|
2991
|
+
workerJobId: step.workerJobId,
|
|
2992
|
+
status: 'queued',
|
|
2993
|
+
},
|
|
2994
|
+
},
|
|
2995
|
+
$set: { updatedAt: now },
|
|
2996
|
+
}
|
|
2997
|
+
);
|
|
2998
|
+
}
|
|
2999
|
+
|
|
3000
|
+
/**
|
|
3001
|
+
* Update queue job overall status (e.g. from webhook when queue run completes).
|
|
3002
|
+
*/
|
|
3003
|
+
export async function updateQueueJob(
|
|
3004
|
+
queueJobId: string,
|
|
3005
|
+
update: { status?: QueueJobRecord['status']; completedAt?: string }
|
|
3006
|
+
): Promise<void> {
|
|
3007
|
+
const now = new Date().toISOString();
|
|
3008
|
+
if (preferRedis()) {
|
|
3009
|
+
const redis = getRedis();
|
|
3010
|
+
const key = queueKey(queueJobId);
|
|
3011
|
+
const existing = await loadQueueJobRedis(queueJobId);
|
|
3012
|
+
if (!existing) throw new Error(\`Queue job \${queueJobId} not found\`);
|
|
3013
|
+
const toSet: Record<string, string> = {
|
|
3014
|
+
status: update.status ?? existing.status,
|
|
3015
|
+
updatedAt: now,
|
|
3016
|
+
};
|
|
3017
|
+
if (update.completedAt !== undefined) toSet.completedAt = update.completedAt;
|
|
3018
|
+
await redis.hset(key, toSet);
|
|
3019
|
+
return;
|
|
3020
|
+
}
|
|
3021
|
+
const collection = await getCollection();
|
|
3022
|
+
const setDoc: Record<string, string> = { updatedAt: now };
|
|
3023
|
+
if (update.status !== undefined) setDoc.status = update.status;
|
|
3024
|
+
if (update.completedAt !== undefined) setDoc.completedAt = update.completedAt;
|
|
3025
|
+
await collection.updateOne({ _id: queueJobId }, { $set: setDoc });
|
|
3026
|
+
}
|
|
3027
|
+
|
|
3028
|
+
export async function getQueueJob(queueJobId: string): Promise<QueueJobRecord | null> {
|
|
3029
|
+
if (preferRedis()) {
|
|
3030
|
+
return loadQueueJobRedis(queueJobId);
|
|
3031
|
+
}
|
|
3032
|
+
const collection = await getCollection();
|
|
3033
|
+
const doc = await collection.findOne({ _id: queueJobId });
|
|
3034
|
+
if (!doc) return null;
|
|
3035
|
+
const { _id, ...record } = doc;
|
|
3036
|
+
return { ...record, id: _id };
|
|
3037
|
+
}
|
|
3038
|
+
|
|
3039
|
+
export async function listQueueJobs(
|
|
3040
|
+
queueId?: string,
|
|
3041
|
+
limit = 50
|
|
3042
|
+
): Promise<QueueJobRecord[]> {
|
|
3043
|
+
if (preferRedis()) {
|
|
3044
|
+
// Redis: scan for keys matching prefix, then load each
|
|
3045
|
+
// Note: This is less efficient than MongoDB queries, but acceptable for small datasets
|
|
3046
|
+
const redis = getRedis();
|
|
3047
|
+
const pattern = queueKey('*');
|
|
3048
|
+
const keys: string[] = [];
|
|
3049
|
+
let cursor: number = 0;
|
|
3050
|
+
do {
|
|
3051
|
+
const result = await redis.scan(cursor, { match: pattern, count: 100 });
|
|
3052
|
+
cursor = typeof result[0] === 'number' ? result[0] : parseInt(String(result[0]), 10);
|
|
3053
|
+
keys.push(...(result[1] || []));
|
|
3054
|
+
} while (cursor !== 0);
|
|
3055
|
+
|
|
3056
|
+
const jobs = await Promise.all(
|
|
3057
|
+
keys.map((key) => {
|
|
3058
|
+
const id = key.replace(queueKeyPrefix, '');
|
|
3059
|
+
return loadQueueJobRedis(id);
|
|
3060
|
+
})
|
|
3061
|
+
);
|
|
3062
|
+
const valid = jobs.filter((j): j is QueueJobRecord => j !== null);
|
|
3063
|
+
const filtered = queueId ? valid.filter((j) => j.queueId === queueId) : valid;
|
|
3064
|
+
return filtered
|
|
3065
|
+
.sort((a, b) => new Date(b.createdAt).getTime() - new Date(a.createdAt).getTime())
|
|
3066
|
+
.slice(0, limit);
|
|
3067
|
+
}
|
|
3068
|
+
const collection = await getCollection();
|
|
3069
|
+
const filter = queueId ? { queueId } : {};
|
|
3070
|
+
const docs = await collection
|
|
3071
|
+
.find(filter)
|
|
3072
|
+
.sort({ createdAt: -1 })
|
|
3073
|
+
.limit(limit)
|
|
3074
|
+
.toArray();
|
|
3075
|
+
return docs.map((doc) => {
|
|
3076
|
+
const { _id, ...record } = doc;
|
|
3077
|
+
return { ...record, id: _id };
|
|
3078
|
+
});
|
|
3079
|
+
}
|
|
3080
|
+
`,
|
|
3081
|
+
"registry/workers.ts": `/**
|
|
3082
|
+
* Worker registry system.
|
|
3083
|
+
*
|
|
3084
|
+
* Uses only the GET /workers/config API as the source of truth.
|
|
3085
|
+
* No directory scanning, no dynamic imports, no .worker.ts loading.
|
|
3086
|
+
*
|
|
3087
|
+
* - getWorker(workerId): returns a synthetic WorkerAgent that dispatches via POST /workers/trigger
|
|
3088
|
+
* - listWorkers(): returns worker IDs from the config API response
|
|
3089
|
+
* - getQueueRegistry(): returns QueueRegistry from config (for dispatchQueue)
|
|
3090
|
+
*/
|
|
3091
|
+
|
|
3092
|
+
import type { WorkerAgent, WorkerQueueRegistry } from '@microfox/ai-worker';
|
|
3093
|
+
|
|
3094
|
+
/** Queue step config (matches WorkerQueueStep from @microfox/ai-worker). */
|
|
3095
|
+
export interface QueueStepConfig {
|
|
3096
|
+
workerId: string;
|
|
3097
|
+
delaySeconds?: number;
|
|
3098
|
+
mapInputFromPrev?: string;
|
|
3099
|
+
}
|
|
3100
|
+
|
|
3101
|
+
/** Queue config from workers/config API (matches WorkerQueueConfig structure). */
|
|
3102
|
+
export interface QueueConfig {
|
|
3103
|
+
id: string;
|
|
3104
|
+
steps: QueueStepConfig[];
|
|
3105
|
+
schedule?: string | { rate: string; enabled?: boolean; input?: Record<string, any> };
|
|
3106
|
+
}
|
|
3107
|
+
|
|
3108
|
+
export interface WorkersConfig {
|
|
3109
|
+
version?: string;
|
|
3110
|
+
stage?: string;
|
|
3111
|
+
region?: string;
|
|
3112
|
+
workers: Record<string, { queueUrl: string; region: string }>;
|
|
3113
|
+
queues?: QueueConfig[];
|
|
3114
|
+
}
|
|
3115
|
+
|
|
3116
|
+
let configCache: WorkersConfig | null = null;
|
|
3117
|
+
|
|
3118
|
+
function getConfigBaseUrl(): string {
|
|
3119
|
+
const raw =
|
|
3120
|
+
process.env.WORKERS_CONFIG_API_URL ||
|
|
3121
|
+
process.env.WORKER_BASE_URL;
|
|
3122
|
+
if (!raw?.trim()) {
|
|
3123
|
+
throw new Error(
|
|
3124
|
+
'WORKERS_CONFIG_API_URL or WORKER_BASE_URL is required for the worker registry. ' +
|
|
3125
|
+
'Set it to the base URL of your workers service (e.g. https://xxx.execute-api.us-east-1.amazonaws.com/prod).'
|
|
3126
|
+
);
|
|
3127
|
+
}
|
|
3128
|
+
const base = raw.trim().replace(/\\/+$/, '');
|
|
3129
|
+
if (base.endsWith('/workers/config')) {
|
|
3130
|
+
return base.replace(/\\/workers\\/config\\/?$/, '');
|
|
3131
|
+
}
|
|
3132
|
+
return base;
|
|
3133
|
+
}
|
|
3134
|
+
|
|
3135
|
+
function getConfigUrl(): string {
|
|
3136
|
+
const base = getConfigBaseUrl();
|
|
3137
|
+
return \`\${base}/workers/config\`;
|
|
3138
|
+
}
|
|
3139
|
+
|
|
3140
|
+
function getTriggerUrl(): string {
|
|
3141
|
+
const base = getConfigBaseUrl();
|
|
3142
|
+
return \`\${base}/workers/trigger\`;
|
|
3143
|
+
}
|
|
3144
|
+
|
|
3145
|
+
/**
|
|
3146
|
+
* Fetch and cache workers config from GET /workers/config.
|
|
3147
|
+
*/
|
|
3148
|
+
export async function fetchWorkersConfig(): Promise<WorkersConfig> {
|
|
3149
|
+
if (configCache) {
|
|
3150
|
+
return configCache;
|
|
3151
|
+
}
|
|
3152
|
+
const configUrl = getConfigUrl();
|
|
3153
|
+
const headers: Record<string, string> = { 'Content-Type': 'application/json' };
|
|
3154
|
+
const apiKey = process.env.WORKERS_CONFIG_API_KEY;
|
|
3155
|
+
if (apiKey) {
|
|
3156
|
+
headers['x-workers-config-key'] = apiKey;
|
|
3157
|
+
}
|
|
3158
|
+
const res = await fetch(configUrl, { method: 'GET', headers });
|
|
3159
|
+
if (!res.ok) {
|
|
3160
|
+
throw new Error(
|
|
3161
|
+
\`[WorkerRegistry] GET \${configUrl} failed: \${res.status} \${res.statusText}\`
|
|
3162
|
+
);
|
|
3163
|
+
}
|
|
3164
|
+
const data = (await res.json()) as WorkersConfig;
|
|
3165
|
+
if (!data?.workers || typeof data.workers !== 'object') {
|
|
3166
|
+
throw new Error(
|
|
3167
|
+
'[WorkerRegistry] Invalid config: expected { workers: { [id]: { queueUrl, region } } }'
|
|
3168
|
+
);
|
|
3169
|
+
}
|
|
3170
|
+
configCache = data;
|
|
3171
|
+
const workerIds = Object.keys(data.workers);
|
|
3172
|
+
const queueIds = data.queues?.map((q) => q.id) ?? [];
|
|
3173
|
+
console.log('[WorkerRegistry] Config loaded', { workers: workerIds.length, queues: queueIds });
|
|
3174
|
+
return data;
|
|
3175
|
+
}
|
|
3176
|
+
|
|
3177
|
+
/**
|
|
3178
|
+
* Build a synthetic WorkerAgent that dispatches via POST /workers/trigger.
|
|
3179
|
+
* Matches the trigger API contract used by @microfox/ai-worker.
|
|
3180
|
+
*/
|
|
3181
|
+
function createSyntheticAgent(workerId: string): WorkerAgent<any, any> {
|
|
3182
|
+
return {
|
|
3183
|
+
id: workerId,
|
|
3184
|
+
dispatch: async (input: any, options: any) => {
|
|
3185
|
+
const jobId =
|
|
3186
|
+
options?.jobId ||
|
|
3187
|
+
\`job-\${Date.now()}-\${Math.random().toString(36).slice(2, 11)}\`;
|
|
3188
|
+
const webhookUrl = options?.webhookUrl;
|
|
3189
|
+
const metadata = options?.metadata ?? {};
|
|
3190
|
+
const triggerUrl = getTriggerUrl();
|
|
3191
|
+
const messageBody = {
|
|
3192
|
+
workerId,
|
|
3193
|
+
jobId,
|
|
3194
|
+
input: input ?? {},
|
|
3195
|
+
context: {},
|
|
3196
|
+
webhookUrl: webhookUrl ?? undefined,
|
|
3197
|
+
metadata,
|
|
3198
|
+
timestamp: new Date().toISOString(),
|
|
3199
|
+
};
|
|
3200
|
+
const headers: Record<string, string> = {
|
|
3201
|
+
'Content-Type': 'application/json',
|
|
3202
|
+
};
|
|
3203
|
+
const key = process.env.WORKERS_TRIGGER_API_KEY;
|
|
3204
|
+
if (key) {
|
|
3205
|
+
headers['x-workers-trigger-key'] = key;
|
|
3206
|
+
}
|
|
3207
|
+
const response = await fetch(triggerUrl, {
|
|
3208
|
+
method: 'POST',
|
|
3209
|
+
headers,
|
|
3210
|
+
body: JSON.stringify({ workerId, body: messageBody }),
|
|
3211
|
+
});
|
|
3212
|
+
if (!response.ok) {
|
|
3213
|
+
const text = await response.text().catch(() => '');
|
|
3214
|
+
throw new Error(
|
|
3215
|
+
\`Failed to trigger worker "\${workerId}": \${response.status} \${response.statusText}\${text ? \` - \${text}\` : ''}\`
|
|
3216
|
+
);
|
|
3217
|
+
}
|
|
3218
|
+
const data = (await response.json().catch(() => ({}))) as any;
|
|
3219
|
+
const messageId = data?.messageId ? String(data.messageId) : \`trigger-\${jobId}\`;
|
|
3220
|
+
return { messageId, status: 'queued' as const, jobId };
|
|
3221
|
+
},
|
|
3222
|
+
} as WorkerAgent<any, any>;
|
|
3223
|
+
}
|
|
3224
|
+
|
|
3225
|
+
/**
|
|
3226
|
+
* List worker IDs from the config API.
|
|
3227
|
+
*/
|
|
3228
|
+
export async function listWorkers(): Promise<string[]> {
|
|
3229
|
+
const config = await fetchWorkersConfig();
|
|
3230
|
+
return Object.keys(config.workers);
|
|
3231
|
+
}
|
|
3232
|
+
|
|
3233
|
+
/**
|
|
3234
|
+
* Get a worker by ID. Returns a synthetic WorkerAgent that dispatches via
|
|
3235
|
+
* POST /workers/trigger. Returns null if the worker is not in the config.
|
|
3236
|
+
*/
|
|
3237
|
+
export async function getWorker(
|
|
3238
|
+
workerId: string
|
|
3239
|
+
): Promise<WorkerAgent<any, any> | null> {
|
|
3240
|
+
const config = await fetchWorkersConfig();
|
|
3241
|
+
if (!(workerId in config.workers)) {
|
|
3242
|
+
return null;
|
|
3243
|
+
}
|
|
3244
|
+
return createSyntheticAgent(workerId);
|
|
3245
|
+
}
|
|
3246
|
+
|
|
3247
|
+
/** Webpack require.context \u2013 auto-discovers app/ai/queues/*.queue.ts (Next.js). */
|
|
3248
|
+
function getQueueModuleContext(): { keys(): string[]; (key: string): unknown } | null {
|
|
3249
|
+
try {
|
|
3250
|
+
if (typeof require === 'undefined') return null;
|
|
3251
|
+
const ctx = (require as unknown as { context: (dir: string, sub: boolean, re: RegExp) => { keys(): string[]; (k: string): unknown } }).context(
|
|
3252
|
+
'@/app/ai/queues',
|
|
3253
|
+
false,
|
|
3254
|
+
/\\.queue\\.ts$/
|
|
3255
|
+
);
|
|
3256
|
+
return ctx;
|
|
3257
|
+
} catch {
|
|
3258
|
+
return null;
|
|
3259
|
+
}
|
|
3260
|
+
}
|
|
3261
|
+
|
|
3262
|
+
/**
|
|
3263
|
+
* Auto-discover queue modules from app/ai/queues/*.queue.ts (no per-queue registration).
|
|
3264
|
+
* Uses require.context when available (Next.js/webpack).
|
|
3265
|
+
*/
|
|
3266
|
+
function buildQueueModules(): Record<string, Record<string, (initial: unknown, prevOutputs: unknown[]) => unknown>> {
|
|
3267
|
+
const ctx = getQueueModuleContext();
|
|
3268
|
+
if (!ctx) return {};
|
|
3269
|
+
const out: Record<string, Record<string, (initial: unknown, prevOutputs: unknown[]) => unknown>> = {};
|
|
3270
|
+
for (const key of ctx.keys()) {
|
|
3271
|
+
const mod = ctx(key) as { default?: { id?: string }; [k: string]: unknown };
|
|
3272
|
+
const id = mod?.default?.id;
|
|
3273
|
+
if (id && typeof id === 'string') {
|
|
3274
|
+
out[id] = mod as Record<string, (initial: unknown, prevOutputs: unknown[]) => unknown>;
|
|
3275
|
+
}
|
|
3276
|
+
}
|
|
3277
|
+
return out;
|
|
3278
|
+
}
|
|
3279
|
+
|
|
3280
|
+
const queueModules = buildQueueModules();
|
|
3281
|
+
|
|
3282
|
+
/**
|
|
3283
|
+
* Returns a registry compatible with dispatchQueue. Queue definitions come from
|
|
3284
|
+
* GET /workers/config; mapInputFromPrev is resolved from app/ai/queues/*.queue.ts
|
|
3285
|
+
* automatically (no manual registration per queue).
|
|
3286
|
+
*/
|
|
3287
|
+
export async function getQueueRegistry(): Promise<WorkerQueueRegistry> {
|
|
3288
|
+
const config = await fetchWorkersConfig();
|
|
3289
|
+
const queues: QueueConfig[] = config.queues ?? [];
|
|
3290
|
+
|
|
3291
|
+
const registry = {
|
|
3292
|
+
getQueueById(queueId: string) {
|
|
3293
|
+
return queues.find((q) => q.id === queueId);
|
|
3294
|
+
},
|
|
3295
|
+
invokeMapInput(
|
|
3296
|
+
queueId: string,
|
|
3297
|
+
stepIndex: number,
|
|
3298
|
+
initialInput: unknown,
|
|
3299
|
+
previousOutputs: Array<{ stepIndex: number; workerId: string; output: unknown }>
|
|
3300
|
+
): unknown {
|
|
3301
|
+
const queue = queues.find((q) => q.id === queueId);
|
|
3302
|
+
const step = queue?.steps?.[stepIndex];
|
|
3303
|
+
const fnName = step?.mapInputFromPrev;
|
|
3304
|
+
if (!fnName) {
|
|
3305
|
+
return previousOutputs.length > 0 ? previousOutputs[previousOutputs.length - 1].output : initialInput;
|
|
3306
|
+
}
|
|
3307
|
+
const mod = queueModules[queueId];
|
|
3308
|
+
if (!mod || typeof mod[fnName] !== 'function') {
|
|
3309
|
+
return previousOutputs.length > 0 ? previousOutputs[previousOutputs.length - 1].output : initialInput;
|
|
3310
|
+
}
|
|
3311
|
+
return mod[fnName](initialInput, previousOutputs);
|
|
3312
|
+
},
|
|
3313
|
+
};
|
|
3314
|
+
return registry as WorkerQueueRegistry;
|
|
3315
|
+
}
|
|
3316
|
+
|
|
3317
|
+
/**
|
|
3318
|
+
* Clear the in-memory config cache (e.g. for tests or refresh).
|
|
3319
|
+
*/
|
|
3320
|
+
export function clearConfigCache(): void {
|
|
3321
|
+
configCache = null;
|
|
3322
|
+
}
|
|
3323
|
+
`,
|
|
3324
|
+
"workers/[...slug]/route.ts": `import { NextRequest, NextResponse } from 'next/server';
|
|
3325
|
+
|
|
3326
|
+
/**
|
|
3327
|
+
* Worker execution endpoint.
|
|
3328
|
+
*
|
|
3329
|
+
* POST /api/workflows/workers/:workerId - Execute a worker
|
|
3330
|
+
* GET /api/workflows/workers/:workerId/:jobId - Get worker job status
|
|
3331
|
+
* POST /api/workflows/workers/:workerId/webhook - Webhook callback for completion notifications
|
|
3332
|
+
*
|
|
3333
|
+
* This endpoint allows workers to be called like workflows, enabling
|
|
3334
|
+
* them to be used in orchestration.
|
|
3335
|
+
*
|
|
3336
|
+
* Workers are auto-discovered from app/ai directory (any .worker.ts files) or
|
|
3337
|
+
* can be imported and registered manually via registerWorker().
|
|
3338
|
+
*/
|
|
3339
|
+
|
|
3340
|
+
// Worker auto-discovery is implemented in ../registry/workers
|
|
3341
|
+
// - Create worker registry module: app/api/workflows/registry/workers.ts
|
|
3342
|
+
// - Scan app/ai/**/*.worker.ts files at startup or lazily on first access
|
|
3343
|
+
// - Use glob pattern: 'app/ai/**/*.worker.ts'
|
|
3344
|
+
// - Extract worker ID from file: const worker = await import(filePath); worker.id
|
|
3345
|
+
// - Cache workers in memory or persistent store
|
|
3346
|
+
// - Support hot-reload in development
|
|
3347
|
+
// - Export: scanWorkers(), getWorker(workerId), listWorkers()
|
|
3348
|
+
|
|
3349
|
+
/**
|
|
3350
|
+
* Get a worker by ID.
|
|
3351
|
+
*/
|
|
3352
|
+
async function getWorkerById(workerId: string): Promise<any | null> {
|
|
3353
|
+
const workersModule = await import('../../registry/workers') as { getWorker: (workerId: string) => Promise<any | null> };
|
|
3354
|
+
return await workersModule.getWorker(workerId);
|
|
3355
|
+
}
|
|
3356
|
+
|
|
3357
|
+
export async function POST(
|
|
3358
|
+
req: NextRequest,
|
|
3359
|
+
{ params }: { params: Promise<{ slug: string[] }> }
|
|
3360
|
+
) {
|
|
3361
|
+
let slug: string[] = [];
|
|
3362
|
+
try {
|
|
3363
|
+
const { slug: slugParam } = await params;
|
|
3364
|
+
slug = slugParam || [];
|
|
3365
|
+
const [workerId, action] = slug;
|
|
3366
|
+
|
|
3367
|
+
// Handle webhook endpoint
|
|
3368
|
+
if (action === 'webhook') {
|
|
3369
|
+
return handleWebhook(req, workerId);
|
|
3370
|
+
}
|
|
3371
|
+
|
|
3372
|
+
// Handle job store update endpoint (POST /api/workflows/workers/:workerId/update)
|
|
3373
|
+
if (action === 'update') {
|
|
3374
|
+
return handleJobUpdate(req, workerId);
|
|
3375
|
+
}
|
|
3376
|
+
|
|
3377
|
+
// Create job record (POST /api/workflows/workers/:workerId/job) \u2013 used before polling when trigger-only
|
|
3378
|
+
if (action === 'job') {
|
|
3379
|
+
return handleCreateJob(req, workerId);
|
|
3380
|
+
}
|
|
3381
|
+
|
|
3382
|
+
if (!workerId) {
|
|
3383
|
+
return NextResponse.json(
|
|
3384
|
+
{ error: 'Worker ID is required' },
|
|
3385
|
+
{ status: 400 }
|
|
3386
|
+
);
|
|
3387
|
+
}
|
|
3388
|
+
|
|
3389
|
+
let body;
|
|
3390
|
+
try {
|
|
3391
|
+
body = await req.json();
|
|
3392
|
+
} catch (parseError: any) {
|
|
3393
|
+
console.error('[Worker] Failed to parse request body:', {
|
|
3394
|
+
workerId,
|
|
3395
|
+
error: parseError?.message || String(parseError),
|
|
3396
|
+
});
|
|
3397
|
+
return NextResponse.json(
|
|
3398
|
+
{ error: 'Invalid JSON in request body' },
|
|
3399
|
+
{ status: 400 }
|
|
3400
|
+
);
|
|
3401
|
+
}
|
|
3402
|
+
|
|
3403
|
+
const { input, await: shouldAwait = false, jobId: providedJobId } = body;
|
|
3404
|
+
|
|
3405
|
+
console.log('[Worker] Dispatching worker:', {
|
|
3406
|
+
workerId,
|
|
3407
|
+
shouldAwait,
|
|
3408
|
+
hasInput: !!input,
|
|
3409
|
+
});
|
|
3410
|
+
|
|
3411
|
+
// Get the worker using registry system
|
|
3412
|
+
let worker;
|
|
3413
|
+
try {
|
|
3414
|
+
worker = await getWorkerById(workerId);
|
|
3415
|
+
} catch (getWorkerError: any) {
|
|
3416
|
+
console.error('[Worker] Error getting worker:', {
|
|
3417
|
+
workerId,
|
|
3418
|
+
error: getWorkerError?.message || String(getWorkerError),
|
|
3419
|
+
});
|
|
3420
|
+
return NextResponse.json(
|
|
3421
|
+
{ error: \`Failed to get worker: \${getWorkerError?.message || String(getWorkerError)}\` },
|
|
3422
|
+
{ status: 500 }
|
|
3423
|
+
);
|
|
3424
|
+
}
|
|
3425
|
+
|
|
3426
|
+
if (!worker) {
|
|
3427
|
+
console.warn('[Worker] Worker not found:', {
|
|
3428
|
+
workerId,
|
|
3429
|
+
});
|
|
3430
|
+
return NextResponse.json(
|
|
3431
|
+
{ error: \`Worker "\${workerId}" not found. Make sure it's exported from a .worker.ts file.\` },
|
|
3432
|
+
{ status: 404 }
|
|
3433
|
+
);
|
|
3434
|
+
}
|
|
3435
|
+
|
|
3436
|
+
// Webhook optional. Job updates use MongoDB only; never pass jobStoreUrl.
|
|
3437
|
+
const webhookBase = process.env.WORKFLOW_WEBHOOK_BASE_URL;
|
|
3438
|
+
const webhookUrl =
|
|
3439
|
+
shouldAwait && typeof webhookBase === 'string' && webhookBase
|
|
3440
|
+
? \`\${webhookBase.replace(/\\/+$/, '')}/api/workflows/workers/\${workerId}/webhook\`
|
|
3441
|
+
: undefined;
|
|
3442
|
+
|
|
3443
|
+
// Use a single jobId end-to-end (Next job store + SQS/Lambda job store).
|
|
3444
|
+
// If caller provides jobId, respect it; otherwise generate one.
|
|
3445
|
+
const jobId =
|
|
3446
|
+
(typeof providedJobId === 'string' && providedJobId.trim()
|
|
3447
|
+
? providedJobId.trim()
|
|
3448
|
+
: \`job-\${Date.now()}-\${Math.random().toString(36).slice(2, 11)}\`);
|
|
3449
|
+
|
|
3450
|
+
// Store initial job record
|
|
3451
|
+
const { setJob } = await import('../../stores/jobStore');
|
|
3452
|
+
try {
|
|
3453
|
+
await setJob(jobId, {
|
|
3454
|
+
jobId,
|
|
3455
|
+
workerId,
|
|
3456
|
+
status: 'queued',
|
|
3457
|
+
input: input || {},
|
|
3458
|
+
metadata: { source: 'workflow-orchestration' },
|
|
3459
|
+
});
|
|
3460
|
+
console.log('[Worker] Initial job record created:', {
|
|
3461
|
+
jobId,
|
|
3462
|
+
workerId,
|
|
3463
|
+
});
|
|
3464
|
+
} catch (setJobError: any) {
|
|
3465
|
+
console.error('[Worker] Failed to create initial job record:', {
|
|
3466
|
+
jobId,
|
|
3467
|
+
workerId,
|
|
3468
|
+
error: setJobError?.message || String(setJobError),
|
|
3469
|
+
});
|
|
3470
|
+
// Continue even if job store fails - worker dispatch can still proceed
|
|
3471
|
+
}
|
|
3472
|
+
|
|
3473
|
+
// Dispatch the worker. Job updates use MongoDB only; webhook only if configured.
|
|
3474
|
+
let dispatchResult;
|
|
3475
|
+
try {
|
|
3476
|
+
dispatchResult = await worker.dispatch(input || {}, {
|
|
3477
|
+
mode: 'auto',
|
|
3478
|
+
jobId,
|
|
3479
|
+
...(webhookUrl ? { webhookUrl } : {}),
|
|
3480
|
+
metadata: { source: 'workflow-orchestration' },
|
|
3481
|
+
});
|
|
3482
|
+
console.log('[Worker] Worker dispatched successfully:', {
|
|
3483
|
+
jobId: dispatchResult.jobId,
|
|
3484
|
+
workerId,
|
|
3485
|
+
messageId: dispatchResult.messageId,
|
|
3486
|
+
});
|
|
3487
|
+
} catch (dispatchError: any) {
|
|
3488
|
+
console.error('[Worker] Failed to dispatch worker:', {
|
|
3489
|
+
workerId,
|
|
3490
|
+
error: dispatchError?.message || String(dispatchError),
|
|
3491
|
+
stack: process.env.NODE_ENV === 'development' ? dispatchError?.stack : undefined,
|
|
3492
|
+
});
|
|
3493
|
+
throw new Error(\`Failed to dispatch worker: \${dispatchError?.message || String(dispatchError)}\`);
|
|
3494
|
+
}
|
|
3495
|
+
|
|
3496
|
+
const finalJobId = dispatchResult.jobId || jobId;
|
|
3497
|
+
|
|
3498
|
+
if (shouldAwait) {
|
|
3499
|
+
// For await mode, return job info and let caller poll status
|
|
3500
|
+
// The webhook handler will update the job when complete
|
|
3501
|
+
// For Vercel workflow: Use polling with setTimeout/setInterval
|
|
3502
|
+
// Workers are fire-and-forget only
|
|
3503
|
+
return NextResponse.json(
|
|
3504
|
+
{
|
|
3505
|
+
jobId: finalJobId,
|
|
3506
|
+
status: 'queued',
|
|
3507
|
+
message: 'Worker job queued. Use GET /api/workflows/workers/:workerId/:jobId to check status, or wait for webhook.',
|
|
3508
|
+
},
|
|
3509
|
+
{ status: 200 }
|
|
3510
|
+
);
|
|
3511
|
+
}
|
|
3512
|
+
|
|
3513
|
+
return NextResponse.json(
|
|
3514
|
+
{
|
|
3515
|
+
jobId: finalJobId,
|
|
3516
|
+
status: dispatchResult.status || 'queued',
|
|
3517
|
+
},
|
|
3518
|
+
{ status: 200 }
|
|
3519
|
+
);
|
|
3520
|
+
} catch (error: any) {
|
|
3521
|
+
console.error('[Worker] Error in POST handler:', {
|
|
3522
|
+
workerId: slug[0],
|
|
3523
|
+
error: error?.message || String(error),
|
|
3524
|
+
stack: process.env.NODE_ENV === 'development' ? error?.stack : undefined,
|
|
3525
|
+
});
|
|
3526
|
+
return NextResponse.json(
|
|
3527
|
+
{
|
|
3528
|
+
error: error?.message || String(error),
|
|
3529
|
+
stack: process.env.NODE_ENV === 'development' ? error?.stack : undefined,
|
|
3530
|
+
},
|
|
3531
|
+
{ status: 500 }
|
|
3532
|
+
);
|
|
3533
|
+
}
|
|
3534
|
+
}
|
|
3535
|
+
|
|
3536
|
+
export async function GET(
|
|
3537
|
+
req: NextRequest,
|
|
3538
|
+
{ params }: { params: Promise<{ slug: string[] }> }
|
|
3539
|
+
) {
|
|
3540
|
+
let slug: string[] = [];
|
|
3541
|
+
try {
|
|
3542
|
+
const { slug: slugParam } = await params;
|
|
3543
|
+
slug = slugParam || [];
|
|
3544
|
+
const [workerId, jobId] = slug;
|
|
3545
|
+
|
|
3546
|
+
if (!workerId || !jobId) {
|
|
3547
|
+
return NextResponse.json(
|
|
3548
|
+
{ error: 'Worker ID and job ID are required' },
|
|
3549
|
+
{ status: 400 }
|
|
3550
|
+
);
|
|
3551
|
+
}
|
|
3552
|
+
|
|
3553
|
+
console.log('[Worker] Getting job status:', {
|
|
3554
|
+
jobId,
|
|
3555
|
+
workerId,
|
|
3556
|
+
});
|
|
3557
|
+
|
|
3558
|
+
// Get job status from job store
|
|
3559
|
+
const { getJob } = await import('../../stores/jobStore');
|
|
3560
|
+
let job;
|
|
3561
|
+
try {
|
|
3562
|
+
job = await getJob(jobId);
|
|
3563
|
+
} catch (getJobError: any) {
|
|
3564
|
+
console.error('[Worker] Error getting job from store:', {
|
|
3565
|
+
jobId,
|
|
3566
|
+
workerId,
|
|
3567
|
+
error: getJobError?.message || String(getJobError),
|
|
3568
|
+
});
|
|
3569
|
+
return NextResponse.json(
|
|
3570
|
+
{ error: \`Failed to get job: \${getJobError?.message || String(getJobError)}\` },
|
|
3571
|
+
{ status: 500 }
|
|
3572
|
+
);
|
|
3573
|
+
}
|
|
3574
|
+
|
|
3575
|
+
if (!job) {
|
|
3576
|
+
console.warn('[Worker] Job not found:', {
|
|
3577
|
+
jobId,
|
|
3578
|
+
workerId,
|
|
3579
|
+
});
|
|
3580
|
+
return NextResponse.json(
|
|
3581
|
+
{ error: \`Job "\${jobId}" not found\` },
|
|
3582
|
+
{ status: 404 }
|
|
3583
|
+
);
|
|
3584
|
+
}
|
|
3585
|
+
|
|
3586
|
+
console.log('[Worker] Job status retrieved:', {
|
|
3587
|
+
jobId,
|
|
3588
|
+
workerId,
|
|
3589
|
+
status: job.status,
|
|
3590
|
+
});
|
|
3591
|
+
|
|
3592
|
+
return NextResponse.json(
|
|
3593
|
+
{
|
|
3594
|
+
jobId: job.jobId,
|
|
3595
|
+
workerId: job.workerId,
|
|
3596
|
+
status: job.status,
|
|
3597
|
+
output: job.output,
|
|
3598
|
+
error: job.error,
|
|
3599
|
+
metadata: job.metadata,
|
|
3600
|
+
createdAt: job.createdAt,
|
|
3601
|
+
updatedAt: job.updatedAt,
|
|
3602
|
+
completedAt: job.completedAt,
|
|
3603
|
+
},
|
|
3604
|
+
{ status: 200 }
|
|
3605
|
+
);
|
|
3606
|
+
} catch (error: any) {
|
|
3607
|
+
console.error('[Worker] Error in GET handler:', {
|
|
3608
|
+
workerId: slug[0],
|
|
3609
|
+
jobId: slug[1],
|
|
3610
|
+
error: error?.message || String(error),
|
|
3611
|
+
stack: process.env.NODE_ENV === 'development' ? error?.stack : undefined,
|
|
3612
|
+
});
|
|
3613
|
+
return NextResponse.json(
|
|
3614
|
+
{
|
|
3615
|
+
error: error?.message || String(error),
|
|
3616
|
+
stack: process.env.NODE_ENV === 'development' ? error?.stack : undefined,
|
|
3617
|
+
},
|
|
3618
|
+
{ status: 500 }
|
|
3619
|
+
);
|
|
3620
|
+
}
|
|
3621
|
+
}
|
|
3622
|
+
|
|
3623
|
+
/**
|
|
3624
|
+
* Create job record before polling (trigger-only flow).
|
|
3625
|
+
* POST /api/workflows/workers/:workerId/job
|
|
3626
|
+
* Body: { jobId, input }
|
|
3627
|
+
*/
|
|
3628
|
+
async function handleCreateJob(req: NextRequest, workerId: string) {
|
|
3629
|
+
try {
|
|
3630
|
+
if (!workerId) {
|
|
3631
|
+
return NextResponse.json({ error: 'Worker ID is required' }, { status: 400 });
|
|
3632
|
+
}
|
|
3633
|
+
const body = await req.json();
|
|
3634
|
+
const { jobId, input } = body;
|
|
3635
|
+
if (!jobId) {
|
|
3636
|
+
return NextResponse.json({ error: 'jobId is required in request body' }, { status: 400 });
|
|
3637
|
+
}
|
|
3638
|
+
const { setJob } = await import('../../stores/jobStore');
|
|
3639
|
+
await setJob(jobId, {
|
|
3640
|
+
jobId,
|
|
3641
|
+
workerId,
|
|
3642
|
+
status: 'queued',
|
|
3643
|
+
input: input ?? {},
|
|
3644
|
+
metadata: { source: 'workflow-orchestration' },
|
|
3645
|
+
});
|
|
3646
|
+
console.log('[Worker] Job created:', { jobId, workerId });
|
|
3647
|
+
return NextResponse.json({ message: 'Job created', jobId, workerId }, { status: 200 });
|
|
3648
|
+
} catch (error: any) {
|
|
3649
|
+
console.error('[Worker] Error creating job:', { workerId, error: error?.message || String(error) });
|
|
3650
|
+
return NextResponse.json(
|
|
3651
|
+
{ error: error?.message || String(error) },
|
|
3652
|
+
{ status: 500 }
|
|
3653
|
+
);
|
|
3654
|
+
}
|
|
3655
|
+
}
|
|
3656
|
+
|
|
3657
|
+
/**
|
|
3658
|
+
* Handle job store update from worker context.
|
|
3659
|
+
* POST /api/workflows/workers/:workerId/update
|
|
3660
|
+
*/
|
|
3661
|
+
async function handleJobUpdate(req: NextRequest, workerId: string) {
|
|
3662
|
+
try {
|
|
3663
|
+
if (!workerId) {
|
|
3664
|
+
return NextResponse.json(
|
|
3665
|
+
{ error: 'Worker ID is required' },
|
|
3666
|
+
{ status: 400 }
|
|
3667
|
+
);
|
|
3668
|
+
}
|
|
3669
|
+
|
|
3670
|
+
const body = await req.json();
|
|
3671
|
+
const { jobId, status, metadata, output, error } = body;
|
|
3672
|
+
|
|
3673
|
+
if (!jobId) {
|
|
3674
|
+
return NextResponse.json(
|
|
3675
|
+
{ error: 'jobId is required in request body' },
|
|
3676
|
+
{ status: 400 }
|
|
3677
|
+
);
|
|
3678
|
+
}
|
|
3679
|
+
|
|
3680
|
+
const { updateJob, setJob, getJob } = await import('../../stores/jobStore');
|
|
3681
|
+
const existing = await getJob(jobId);
|
|
3682
|
+
|
|
3683
|
+
// Upsert: create job if missing (e.g. workflow triggered via /workers/trigger directly)
|
|
3684
|
+
if (!existing) {
|
|
3685
|
+
await setJob(jobId, {
|
|
3686
|
+
jobId,
|
|
3687
|
+
workerId,
|
|
3688
|
+
status: status ?? 'queued',
|
|
3689
|
+
input: {},
|
|
3690
|
+
metadata: metadata ?? {},
|
|
3691
|
+
output,
|
|
3692
|
+
error,
|
|
3693
|
+
});
|
|
3694
|
+
return NextResponse.json(
|
|
3695
|
+
{ message: 'Job created and updated successfully', jobId, workerId },
|
|
3696
|
+
{ status: 200 }
|
|
3697
|
+
);
|
|
3698
|
+
}
|
|
3699
|
+
|
|
3700
|
+
const updateData: any = {};
|
|
3701
|
+
if (status !== undefined) updateData.status = status;
|
|
3702
|
+
if (metadata !== undefined) updateData.metadata = { ...existing.metadata, ...metadata };
|
|
3703
|
+
if (output !== undefined) updateData.output = output;
|
|
3704
|
+
if (error !== undefined) updateData.error = error;
|
|
3705
|
+
|
|
3706
|
+
await updateJob(jobId, updateData);
|
|
3707
|
+
|
|
3708
|
+
console.log('[Worker] Job updated:', { jobId, workerId, updates: Object.keys(updateData) });
|
|
3709
|
+
|
|
3710
|
+
return NextResponse.json(
|
|
3711
|
+
{ message: 'Job updated successfully', jobId, workerId },
|
|
3712
|
+
{ status: 200 }
|
|
3713
|
+
);
|
|
3714
|
+
} catch (error: any) {
|
|
3715
|
+
console.error('[Worker] Error updating job:', {
|
|
3716
|
+
workerId,
|
|
3717
|
+
error: error?.message || String(error),
|
|
3718
|
+
stack: process.env.NODE_ENV === 'development' ? error?.stack : undefined,
|
|
3719
|
+
});
|
|
3720
|
+
return NextResponse.json(
|
|
3721
|
+
{ error: error?.message || String(error) },
|
|
3722
|
+
{ status: 500 }
|
|
3723
|
+
);
|
|
3724
|
+
}
|
|
3725
|
+
}
|
|
3726
|
+
|
|
3727
|
+
/**
|
|
3728
|
+
* Handle webhook callback for worker completion.
|
|
3729
|
+
* POST /api/workflows/workers/:workerId/webhook
|
|
3730
|
+
*
|
|
3731
|
+
* This endpoint receives completion notifications from workers.
|
|
3732
|
+
* It updates the job store with the final status before returning.
|
|
3733
|
+
* Webhook is only called if webhookUrl was provided during dispatch.
|
|
3734
|
+
*/
|
|
3735
|
+
async function handleWebhook(req: NextRequest, workerId: string) {
|
|
3736
|
+
try {
|
|
3737
|
+
if (!workerId) {
|
|
3738
|
+
return NextResponse.json(
|
|
3739
|
+
{ error: 'Worker ID is required' },
|
|
3740
|
+
{ status: 400 }
|
|
3741
|
+
);
|
|
3742
|
+
}
|
|
3743
|
+
|
|
3744
|
+
const body = await req.json();
|
|
3745
|
+
const { jobId, status, output, error, metadata } = body;
|
|
3746
|
+
|
|
3747
|
+
if (!jobId) {
|
|
3748
|
+
return NextResponse.json(
|
|
3749
|
+
{ error: 'jobId is required in webhook payload' },
|
|
3750
|
+
{ status: 400 }
|
|
3751
|
+
);
|
|
3752
|
+
}
|
|
3753
|
+
|
|
3754
|
+
// Update job store with completion status (before any further processing)
|
|
3755
|
+
const { updateJob } = await import('../../stores/jobStore');
|
|
3756
|
+
|
|
3757
|
+
const jobStatus = status === 'success' ? 'completed' : 'failed';
|
|
3758
|
+
|
|
3759
|
+
try {
|
|
3760
|
+
// Update job with completion status
|
|
3761
|
+
await updateJob(jobId, {
|
|
3762
|
+
jobId,
|
|
3763
|
+
workerId,
|
|
3764
|
+
status: jobStatus,
|
|
3765
|
+
output,
|
|
3766
|
+
error,
|
|
3767
|
+
completedAt: new Date().toISOString(),
|
|
3768
|
+
metadata: metadata || {},
|
|
3769
|
+
});
|
|
3770
|
+
|
|
3771
|
+
console.log('[Worker] Webhook received and job updated:', {
|
|
3772
|
+
jobId,
|
|
3773
|
+
workerId,
|
|
3774
|
+
status: jobStatus,
|
|
3775
|
+
});
|
|
3776
|
+
} catch (updateError: any) {
|
|
3777
|
+
console.error('[Worker] Failed to update job store from webhook:', {
|
|
3778
|
+
jobId,
|
|
3779
|
+
workerId,
|
|
3780
|
+
error: updateError?.message || String(updateError),
|
|
3781
|
+
stack: process.env.NODE_ENV === 'development' ? updateError?.stack : undefined,
|
|
3782
|
+
});
|
|
3783
|
+
// Continue even if job store update fails - webhook was received
|
|
3784
|
+
}
|
|
3785
|
+
|
|
3786
|
+
return NextResponse.json(
|
|
3787
|
+
{ message: 'Webhook received', jobId, workerId, status: jobStatus },
|
|
3788
|
+
{ status: 200 }
|
|
3789
|
+
);
|
|
3790
|
+
} catch (error: any) {
|
|
3791
|
+
console.error('[Worker] Error handling webhook:', {
|
|
3792
|
+
workerId,
|
|
3793
|
+
error: error?.message || String(error),
|
|
3794
|
+
stack: process.env.NODE_ENV === 'development' ? error?.stack : undefined,
|
|
3795
|
+
});
|
|
3796
|
+
return NextResponse.json(
|
|
3797
|
+
{ error: error?.message || String(error) },
|
|
3798
|
+
{ status: 500 }
|
|
3799
|
+
);
|
|
3800
|
+
}
|
|
3801
|
+
}
|
|
3802
|
+
`,
|
|
3803
|
+
"queues/[...slug]/route.ts": `import { NextRequest, NextResponse } from 'next/server';
|
|
3804
|
+
import { dispatchQueue } from '@microfox/ai-worker';
|
|
3805
|
+
import { getQueueRegistry } from '../../registry/workers';
|
|
3806
|
+
import {
|
|
3807
|
+
getQueueJob,
|
|
3808
|
+
listQueueJobs,
|
|
3809
|
+
updateQueueJob,
|
|
3810
|
+
updateQueueStep,
|
|
3811
|
+
appendQueueStep,
|
|
3812
|
+
} from '../../stores/queueJobStore';
|
|
3813
|
+
|
|
3814
|
+
export const dynamic = 'force-dynamic';
|
|
3815
|
+
|
|
3816
|
+
const LOG = '[Queues]';
|
|
3817
|
+
|
|
3818
|
+
/**
|
|
3819
|
+
* Queue execution endpoint (mirrors workers route structure).
|
|
3820
|
+
*
|
|
3821
|
+
* POST /api/workflows/queues/:queueId - Trigger a queue (no registry import needed in app code)
|
|
3822
|
+
* GET /api/workflows/queues/:queueId/:jobId - Get queue job status
|
|
3823
|
+
* GET /api/workflows/queues - List queue jobs (query: queueId?, limit?)
|
|
3824
|
+
* POST /api/workflows/queues/:queueId/update - Update queue job step (for Lambda/callers)
|
|
3825
|
+
* POST /api/workflows/queues/:queueId/webhook - Webhook for queue completion
|
|
3826
|
+
*
|
|
3827
|
+
* Callers can trigger a queue with a simple POST; registry is resolved inside this route.
|
|
3828
|
+
*/
|
|
3829
|
+
async function getRegistry() {
|
|
3830
|
+
return getQueueRegistry();
|
|
3831
|
+
}
|
|
3832
|
+
|
|
3833
|
+
export async function POST(
|
|
3834
|
+
req: NextRequest,
|
|
3835
|
+
{ params }: { params: Promise<{ slug: string[] }> }
|
|
3836
|
+
) {
|
|
3837
|
+
let slug: string[] = [];
|
|
3838
|
+
try {
|
|
3839
|
+
const { slug: slugParam } = await params;
|
|
3840
|
+
slug = slugParam ?? [];
|
|
3841
|
+
const [queueId, action] = slug;
|
|
3842
|
+
|
|
3843
|
+
if (action === 'update') {
|
|
3844
|
+
return handleQueueJobUpdate(req, queueId);
|
|
3845
|
+
}
|
|
3846
|
+
if (action === 'webhook') {
|
|
3847
|
+
return handleQueueWebhook(req, queueId);
|
|
3848
|
+
}
|
|
3849
|
+
|
|
3850
|
+
if (!queueId) {
|
|
3851
|
+
return NextResponse.json(
|
|
3852
|
+
{ error: 'Queue ID is required. Use POST /api/workflows/queues/:queueId to trigger a queue.' },
|
|
3853
|
+
{ status: 400 }
|
|
3854
|
+
);
|
|
3855
|
+
}
|
|
3856
|
+
|
|
3857
|
+
let body: { input?: unknown; metadata?: Record<string, unknown>; jobId?: string } = {};
|
|
3858
|
+
try {
|
|
3859
|
+
body = await req.json();
|
|
3860
|
+
} catch {
|
|
3861
|
+
body = {};
|
|
3862
|
+
}
|
|
3863
|
+
const { input = {}, metadata, jobId: providedJobId } = body;
|
|
3864
|
+
|
|
3865
|
+
const registry = await getRegistry();
|
|
3866
|
+
const queue = registry.getQueueById(queueId);
|
|
3867
|
+
if (!queue) {
|
|
3868
|
+
console.warn(\`\${LOG} Queue not found: \${queueId}\`);
|
|
3869
|
+
return NextResponse.json(
|
|
3870
|
+
{ error: \`Queue "\${queueId}" not found. Ensure workers are deployed and config is available.\` },
|
|
3871
|
+
{ status: 404 }
|
|
3872
|
+
);
|
|
3873
|
+
}
|
|
3874
|
+
|
|
3875
|
+
const result = await dispatchQueue(queueId, input as Record<string, unknown>, {
|
|
3876
|
+
registry,
|
|
3877
|
+
metadata: metadata ?? { source: 'queues-api' },
|
|
3878
|
+
...(typeof providedJobId === 'string' && providedJobId.trim() ? { jobId: providedJobId.trim() } : {}),
|
|
3879
|
+
});
|
|
3880
|
+
|
|
3881
|
+
console.log(\`\${LOG} Queue triggered\`, {
|
|
3882
|
+
queueId: result.queueId,
|
|
3883
|
+
jobId: result.jobId,
|
|
3884
|
+
messageId: result.messageId,
|
|
3885
|
+
});
|
|
3886
|
+
|
|
3887
|
+
return NextResponse.json(
|
|
3888
|
+
{
|
|
3889
|
+
jobId: result.jobId,
|
|
3890
|
+
status: result.status,
|
|
3891
|
+
messageId: result.messageId,
|
|
3892
|
+
queueId: result.queueId,
|
|
3893
|
+
queueJobUrl: \`/api/workflows/queues/\${queueId}/\${result.jobId}\`,
|
|
3894
|
+
},
|
|
3895
|
+
{ status: 200 }
|
|
3896
|
+
);
|
|
3897
|
+
} catch (error: unknown) {
|
|
3898
|
+
const err = error instanceof Error ? error : new Error(String(error));
|
|
3899
|
+
console.error(\`\${LOG} POST error:\`, err.message, err.stack);
|
|
3900
|
+
return NextResponse.json(
|
|
3901
|
+
{ error: err.message },
|
|
3902
|
+
{ status: 500 }
|
|
3903
|
+
);
|
|
3904
|
+
}
|
|
3905
|
+
}
|
|
3906
|
+
|
|
3907
|
+
export async function GET(
|
|
3908
|
+
req: NextRequest,
|
|
3909
|
+
{ params }: { params: Promise<{ slug: string[] }> }
|
|
3910
|
+
) {
|
|
3911
|
+
let slug: string[] = [];
|
|
3912
|
+
try {
|
|
3913
|
+
const { slug: slugParam } = await params;
|
|
3914
|
+
slug = slugParam ?? [];
|
|
3915
|
+
const [queueId, jobId] = slug;
|
|
3916
|
+
|
|
3917
|
+
// List: GET /api/workflows/queues or GET /api/workflows/queues?queueId=...&limit=...
|
|
3918
|
+
if (slug.length === 0 || (slug.length === 1 && !jobId)) {
|
|
3919
|
+
const { searchParams } = new URL(req.url);
|
|
3920
|
+
const filterQueueId = searchParams.get('queueId') ?? (slug[0] || undefined);
|
|
3921
|
+
const limit = Math.min(
|
|
3922
|
+
100,
|
|
3923
|
+
Math.max(1, parseInt(searchParams.get('limit') ?? '50', 10) || 50)
|
|
3924
|
+
);
|
|
3925
|
+
const jobs = await listQueueJobs(filterQueueId, limit);
|
|
3926
|
+
return NextResponse.json({ jobs });
|
|
3927
|
+
}
|
|
3928
|
+
|
|
3929
|
+
// Get one: GET /api/workflows/queues/:queueId/:jobId
|
|
3930
|
+
if (!queueId || !jobId) {
|
|
3931
|
+
return NextResponse.json(
|
|
3932
|
+
{ error: 'Queue ID and job ID are required for GET. Use GET /api/workflows/queues/:queueId/:jobId' },
|
|
3933
|
+
{ status: 400 }
|
|
3934
|
+
);
|
|
3935
|
+
}
|
|
3936
|
+
|
|
3937
|
+
const job = await getQueueJob(jobId);
|
|
3938
|
+
if (!job) {
|
|
3939
|
+
return NextResponse.json({ error: 'Queue job not found' }, { status: 404 });
|
|
3940
|
+
}
|
|
3941
|
+
if (job.queueId !== queueId) {
|
|
3942
|
+
return NextResponse.json({ error: 'Queue job does not belong to this queue' }, { status: 400 });
|
|
3943
|
+
}
|
|
3944
|
+
|
|
3945
|
+
return NextResponse.json(job);
|
|
3946
|
+
} catch (error: unknown) {
|
|
3947
|
+
const err = error instanceof Error ? error : new Error(String(error));
|
|
3948
|
+
console.error(\`\${LOG} GET error:\`, err.message);
|
|
3949
|
+
return NextResponse.json(
|
|
3950
|
+
{ error: err.message },
|
|
3951
|
+
{ status: 500 }
|
|
3952
|
+
);
|
|
3953
|
+
}
|
|
3954
|
+
}
|
|
3955
|
+
|
|
3956
|
+
async function handleQueueJobUpdate(req: NextRequest, queueId: string) {
|
|
3957
|
+
if (!queueId) {
|
|
3958
|
+
return NextResponse.json({ error: 'Queue ID is required' }, { status: 400 });
|
|
3959
|
+
}
|
|
3960
|
+
const body = await req.json();
|
|
3961
|
+
const { queueJobId, jobId, action, stepIndex, workerJobId, workerId, output, error, input } = body;
|
|
3962
|
+
const id = queueJobId ?? jobId;
|
|
3963
|
+
if (!id) {
|
|
3964
|
+
return NextResponse.json(
|
|
3965
|
+
{ error: 'queueJobId or jobId is required in request body' },
|
|
3966
|
+
{ status: 400 }
|
|
3967
|
+
);
|
|
3968
|
+
}
|
|
3969
|
+
|
|
3970
|
+
if (action === 'append') {
|
|
3971
|
+
if (!workerId || !workerJobId) {
|
|
3972
|
+
return NextResponse.json(
|
|
3973
|
+
{ error: 'append requires workerId and workerJobId' },
|
|
3974
|
+
{ status: 400 }
|
|
3975
|
+
);
|
|
3976
|
+
}
|
|
3977
|
+
await appendQueueStep(id, { workerId, workerJobId });
|
|
3978
|
+
console.log(\`\${LOG} Step appended\`, { queueJobId: id, workerId, workerJobId });
|
|
3979
|
+
return NextResponse.json({ ok: true, action: 'append' });
|
|
3980
|
+
}
|
|
3981
|
+
|
|
3982
|
+
if (action === 'start') {
|
|
3983
|
+
if (typeof stepIndex !== 'number' || !workerJobId) {
|
|
3984
|
+
return NextResponse.json(
|
|
3985
|
+
{ error: 'start requires stepIndex and workerJobId' },
|
|
3986
|
+
{ status: 400 }
|
|
3987
|
+
);
|
|
3988
|
+
}
|
|
3989
|
+
await updateQueueStep(id, stepIndex, {
|
|
3990
|
+
status: 'running',
|
|
3991
|
+
startedAt: new Date().toISOString(),
|
|
3992
|
+
...(input !== undefined && { input }),
|
|
3993
|
+
});
|
|
3994
|
+
console.log(\`\${LOG} Step started\`, { queueJobId: id, stepIndex, workerJobId });
|
|
3995
|
+
return NextResponse.json({ ok: true, action: 'start' });
|
|
3996
|
+
}
|
|
3997
|
+
|
|
3998
|
+
if (action === 'complete') {
|
|
3999
|
+
if (typeof stepIndex !== 'number' || !workerJobId) {
|
|
4000
|
+
return NextResponse.json(
|
|
4001
|
+
{ error: 'complete requires stepIndex and workerJobId' },
|
|
4002
|
+
{ status: 400 }
|
|
4003
|
+
);
|
|
4004
|
+
}
|
|
4005
|
+
await updateQueueStep(id, stepIndex, {
|
|
4006
|
+
status: 'completed',
|
|
4007
|
+
output,
|
|
4008
|
+
completedAt: new Date().toISOString(),
|
|
4009
|
+
});
|
|
4010
|
+
console.log(\`\${LOG} Step completed\`, { queueJobId: id, stepIndex, workerJobId });
|
|
4011
|
+
return NextResponse.json({ ok: true, action: 'complete' });
|
|
4012
|
+
}
|
|
4013
|
+
|
|
4014
|
+
if (action === 'fail') {
|
|
4015
|
+
if (typeof stepIndex !== 'number' || !workerJobId) {
|
|
4016
|
+
return NextResponse.json(
|
|
4017
|
+
{ error: 'fail requires stepIndex and workerJobId' },
|
|
4018
|
+
{ status: 400 }
|
|
4019
|
+
);
|
|
4020
|
+
}
|
|
4021
|
+
await updateQueueStep(id, stepIndex, {
|
|
4022
|
+
status: 'failed',
|
|
4023
|
+
error: error ?? { message: 'Unknown error' },
|
|
4024
|
+
completedAt: new Date().toISOString(),
|
|
4025
|
+
});
|
|
4026
|
+
console.log(\`\${LOG} Step failed\`, { queueJobId: id, stepIndex, workerJobId });
|
|
4027
|
+
return NextResponse.json({ ok: true, action: 'fail' });
|
|
4028
|
+
}
|
|
4029
|
+
|
|
4030
|
+
return NextResponse.json(
|
|
4031
|
+
{ error: \`Unknown action: \${action}. Use start|complete|fail|append\` },
|
|
4032
|
+
{ status: 400 }
|
|
4033
|
+
);
|
|
4034
|
+
}
|
|
4035
|
+
|
|
4036
|
+
/**
|
|
4037
|
+
* Handle webhook callback for queue completion.
|
|
4038
|
+
* POST /api/workflows/queues/:queueId/webhook
|
|
4039
|
+
*
|
|
4040
|
+
* When a webhook URL is provided at dispatch time, the worker/runtime calls this
|
|
4041
|
+
* instead of updating the job store directly. This handler updates the queue job
|
|
4042
|
+
* store with the final status (same outcome as when no webhook: store reflects completion).
|
|
4043
|
+
*/
|
|
4044
|
+
async function handleQueueWebhook(req: NextRequest, queueId: string) {
|
|
4045
|
+
try {
|
|
4046
|
+
if (!queueId) {
|
|
4047
|
+
return NextResponse.json({ error: 'Queue ID is required' }, { status: 400 });
|
|
4048
|
+
}
|
|
4049
|
+
|
|
4050
|
+
const body = await req.json();
|
|
4051
|
+
const { queueJobId, jobId, status, output, error, metadata } = body;
|
|
4052
|
+
const id = queueJobId ?? jobId;
|
|
4053
|
+
if (!id) {
|
|
4054
|
+
return NextResponse.json(
|
|
4055
|
+
{ error: 'queueJobId or jobId is required in webhook payload' },
|
|
4056
|
+
{ status: 400 }
|
|
4057
|
+
);
|
|
4058
|
+
}
|
|
4059
|
+
|
|
4060
|
+
const jobStatus = status === 'success' ? 'completed' : 'failed';
|
|
4061
|
+
|
|
4062
|
+
try {
|
|
4063
|
+
await updateQueueJob(id, {
|
|
4064
|
+
status: jobStatus,
|
|
4065
|
+
completedAt: new Date().toISOString(),
|
|
4066
|
+
});
|
|
4067
|
+
console.log(\`\${LOG} Webhook received and queue job updated:\`, {
|
|
4068
|
+
queueJobId: id,
|
|
4069
|
+
queueId,
|
|
4070
|
+
status: jobStatus,
|
|
4071
|
+
});
|
|
4072
|
+
} catch (updateError: unknown) {
|
|
4073
|
+
const err = updateError instanceof Error ? updateError : new Error(String(updateError));
|
|
4074
|
+
console.error(\`\${LOG} Failed to update queue job from webhook:\`, {
|
|
4075
|
+
queueJobId: id,
|
|
4076
|
+
queueId,
|
|
4077
|
+
error: err.message,
|
|
4078
|
+
});
|
|
4079
|
+
// Still return 200 so the caller does not retry; store update can be retried elsewhere if needed
|
|
4080
|
+
}
|
|
4081
|
+
|
|
4082
|
+
return NextResponse.json(
|
|
4083
|
+
{ message: 'Webhook received', queueId, queueJobId: id, status: jobStatus },
|
|
4084
|
+
{ status: 200 }
|
|
4085
|
+
);
|
|
4086
|
+
} catch (error: unknown) {
|
|
4087
|
+
const err = error instanceof Error ? error : new Error(String(error));
|
|
4088
|
+
console.error(\`\${LOG} Error handling queue webhook:\`, { queueId, error: err.message });
|
|
4089
|
+
return NextResponse.json(
|
|
4090
|
+
{ error: err.message },
|
|
4091
|
+
{ status: 500 }
|
|
4092
|
+
);
|
|
4093
|
+
}
|
|
4094
|
+
}
|
|
4095
|
+
`,
|
|
4096
|
+
"../../../hooks/useWorkflowJob.ts": `'use client';
|
|
4097
|
+
|
|
4098
|
+
import { useCallback, useEffect, useRef, useState } from 'react';
|
|
4099
|
+
|
|
4100
|
+
export type WorkflowJobStatus =
|
|
4101
|
+
| 'idle'
|
|
4102
|
+
| 'queued'
|
|
4103
|
+
| 'running'
|
|
4104
|
+
| 'completed'
|
|
4105
|
+
| 'failed'
|
|
4106
|
+
| 'partial';
|
|
4107
|
+
|
|
4108
|
+
export interface WorkerJobResult {
|
|
4109
|
+
jobId: string;
|
|
4110
|
+
workerId: string;
|
|
4111
|
+
status: string;
|
|
4112
|
+
output?: unknown;
|
|
4113
|
+
error?: { message: string; stack?: string };
|
|
4114
|
+
metadata?: Record<string, unknown>;
|
|
4115
|
+
createdAt?: string;
|
|
4116
|
+
updatedAt?: string;
|
|
4117
|
+
completedAt?: string;
|
|
4118
|
+
}
|
|
4119
|
+
|
|
4120
|
+
export interface QueueJobStep {
|
|
4121
|
+
workerId: string;
|
|
4122
|
+
workerJobId: string;
|
|
4123
|
+
status: string;
|
|
4124
|
+
input?: unknown;
|
|
4125
|
+
output?: unknown;
|
|
4126
|
+
error?: { message: string };
|
|
4127
|
+
startedAt?: string;
|
|
4128
|
+
completedAt?: string;
|
|
4129
|
+
}
|
|
4130
|
+
|
|
4131
|
+
export interface QueueJobResult {
|
|
4132
|
+
id: string;
|
|
4133
|
+
queueId: string;
|
|
4134
|
+
status: string;
|
|
4135
|
+
steps: QueueJobStep[];
|
|
4136
|
+
metadata?: Record<string, unknown>;
|
|
4137
|
+
createdAt: string;
|
|
4138
|
+
updatedAt: string;
|
|
4139
|
+
completedAt?: string;
|
|
4140
|
+
}
|
|
4141
|
+
|
|
4142
|
+
export type WorkflowJobOutput = WorkerJobResult | QueueJobResult;
|
|
4143
|
+
|
|
4144
|
+
export interface UseWorkflowJobBaseOptions {
|
|
4145
|
+
/** Base URL for API calls (default: '' for relative, or set window.location.origin) */
|
|
4146
|
+
baseUrl?: string;
|
|
4147
|
+
/** Poll interval in ms (default: 2000) */
|
|
4148
|
+
pollIntervalMs?: number;
|
|
4149
|
+
/** Stop polling after this many ms (default: 300000 = 5 min) */
|
|
4150
|
+
pollTimeoutMs?: number;
|
|
4151
|
+
/** Start polling automatically after trigger (default: true) */
|
|
4152
|
+
autoPoll?: boolean;
|
|
4153
|
+
/** Called when job reaches completed (or queue: completed/partial) */
|
|
4154
|
+
onComplete?: (result: WorkflowJobOutput) => void;
|
|
4155
|
+
/** Called when job fails or trigger/poll errors */
|
|
4156
|
+
onError?: (error: Error) => void;
|
|
4157
|
+
/** If false, trigger is a no-op and auto-poll is skipped (default: true) */
|
|
4158
|
+
enabled?: boolean;
|
|
4159
|
+
}
|
|
4160
|
+
|
|
4161
|
+
export interface UseWorkflowJobWorkerOptions extends UseWorkflowJobBaseOptions {
|
|
4162
|
+
type: 'worker';
|
|
4163
|
+
workerId: string;
|
|
4164
|
+
}
|
|
4165
|
+
|
|
4166
|
+
export interface UseWorkflowJobQueueOptions extends UseWorkflowJobBaseOptions {
|
|
4167
|
+
type: 'queue';
|
|
4168
|
+
queueId: string;
|
|
4169
|
+
/** Optional metadata for queue trigger */
|
|
4170
|
+
metadata?: Record<string, unknown>;
|
|
4171
|
+
}
|
|
4172
|
+
|
|
4173
|
+
export type UseWorkflowJobOptions =
|
|
4174
|
+
| UseWorkflowJobWorkerOptions
|
|
4175
|
+
| UseWorkflowJobQueueOptions;
|
|
4176
|
+
|
|
4177
|
+
const TERMINAL_STATUSES = ['completed', 'failed', 'partial'];
|
|
4178
|
+
|
|
4179
|
+
function getBaseUrl(baseUrl?: string): string {
|
|
4180
|
+
if (baseUrl !== undefined && baseUrl !== '') return baseUrl;
|
|
4181
|
+
if (typeof window !== 'undefined') return window.location.origin;
|
|
4182
|
+
return '';
|
|
4183
|
+
}
|
|
4184
|
+
|
|
4185
|
+
export interface UseWorkflowJobReturn {
|
|
4186
|
+
/** Trigger the worker or queue. Pass input for the job. */
|
|
4187
|
+
trigger: (input?: Record<string, unknown>) => Promise<void>;
|
|
4188
|
+
/** Current job/queue job id (after trigger) */
|
|
4189
|
+
jobId: string | null;
|
|
4190
|
+
/** Current status: idle | queued | running | completed | failed | partial */
|
|
4191
|
+
status: WorkflowJobStatus;
|
|
4192
|
+
/** Last job output (worker or queue job object) */
|
|
4193
|
+
output: WorkflowJobOutput | null;
|
|
4194
|
+
/** Error from trigger or from job failure */
|
|
4195
|
+
error: Error | null;
|
|
4196
|
+
/** True while the trigger request is in flight */
|
|
4197
|
+
loading: boolean;
|
|
4198
|
+
/** True while polling for job status */
|
|
4199
|
+
polling: boolean;
|
|
4200
|
+
/** Reset state so you can trigger again */
|
|
4201
|
+
reset: () => void;
|
|
4202
|
+
}
|
|
4203
|
+
|
|
4204
|
+
export function useWorkflowJob(
|
|
4205
|
+
options: UseWorkflowJobWorkerOptions
|
|
4206
|
+
): UseWorkflowJobReturn & { output: WorkerJobResult | null };
|
|
4207
|
+
export function useWorkflowJob(
|
|
4208
|
+
options: UseWorkflowJobQueueOptions
|
|
4209
|
+
): UseWorkflowJobReturn & { output: QueueJobResult | null };
|
|
4210
|
+
export function useWorkflowJob(
|
|
4211
|
+
options: UseWorkflowJobOptions
|
|
4212
|
+
): UseWorkflowJobReturn {
|
|
4213
|
+
const {
|
|
4214
|
+
baseUrl: baseUrlOpt,
|
|
4215
|
+
pollIntervalMs = 2000,
|
|
4216
|
+
pollTimeoutMs = 300_000,
|
|
4217
|
+
autoPoll = true,
|
|
4218
|
+
onComplete,
|
|
4219
|
+
onError,
|
|
4220
|
+
enabled = true,
|
|
4221
|
+
} = options;
|
|
4222
|
+
|
|
4223
|
+
const baseUrl = getBaseUrl(baseUrlOpt);
|
|
4224
|
+
const prefix = baseUrl ? baseUrl.replace(/\\/+$/, '') : '';
|
|
4225
|
+
const api = (path: string) => \`\${prefix}/api/workflows\${path}\`;
|
|
4226
|
+
|
|
4227
|
+
const [jobId, setJobId] = useState<string | null>(null);
|
|
4228
|
+
const [status, setStatus] = useState<WorkflowJobStatus>('idle');
|
|
4229
|
+
const [output, setOutput] = useState<WorkflowJobOutput | null>(null);
|
|
4230
|
+
const [error, setError] = useState<Error | null>(null);
|
|
4231
|
+
const [loading, setLoading] = useState(false);
|
|
4232
|
+
const [polling, setPolling] = useState(false);
|
|
4233
|
+
|
|
4234
|
+
const intervalRef = useRef<ReturnType<typeof setInterval> | null>(null);
|
|
4235
|
+
const timeoutRef = useRef<ReturnType<typeof setTimeout> | null>(null);
|
|
4236
|
+
const mountedRef = useRef(true);
|
|
4237
|
+
|
|
4238
|
+
const clearPolling = useCallback(() => {
|
|
4239
|
+
if (intervalRef.current) {
|
|
4240
|
+
clearInterval(intervalRef.current);
|
|
4241
|
+
intervalRef.current = null;
|
|
4242
|
+
}
|
|
4243
|
+
if (timeoutRef.current) {
|
|
4244
|
+
clearTimeout(timeoutRef.current);
|
|
4245
|
+
timeoutRef.current = null;
|
|
4246
|
+
}
|
|
4247
|
+
setPolling(false);
|
|
4248
|
+
}, []);
|
|
4249
|
+
|
|
4250
|
+
const reset = useCallback(() => {
|
|
4251
|
+
clearPolling();
|
|
4252
|
+
setJobId(null);
|
|
4253
|
+
setStatus('idle');
|
|
4254
|
+
setOutput(null);
|
|
4255
|
+
setError(null);
|
|
4256
|
+
setLoading(false);
|
|
4257
|
+
setPolling(false);
|
|
4258
|
+
}, [clearPolling]);
|
|
4259
|
+
|
|
4260
|
+
const trigger = useCallback(
|
|
4261
|
+
async (input?: Record<string, unknown>) => {
|
|
4262
|
+
if (!enabled) return;
|
|
4263
|
+
|
|
4264
|
+
setError(null);
|
|
4265
|
+
setOutput(null);
|
|
4266
|
+
setLoading(true);
|
|
4267
|
+
|
|
4268
|
+
try {
|
|
4269
|
+
if (options.type === 'worker') {
|
|
4270
|
+
const res = await fetch(api(\`/workers/\${options.workerId}\`), {
|
|
4271
|
+
method: 'POST',
|
|
4272
|
+
headers: { 'Content-Type': 'application/json' },
|
|
4273
|
+
body: JSON.stringify({ input: input ?? {}, await: false }),
|
|
4274
|
+
});
|
|
4275
|
+
const data = await res.json();
|
|
4276
|
+
if (!res.ok) throw new Error(data?.error ?? \`HTTP \${res.status}\`);
|
|
4277
|
+
const id = data.jobId ?? null;
|
|
4278
|
+
if (!id) throw new Error('No jobId in response');
|
|
4279
|
+
setJobId(id);
|
|
4280
|
+
setStatus('queued');
|
|
4281
|
+
setLoading(false);
|
|
4282
|
+
|
|
4283
|
+
if (autoPoll) {
|
|
4284
|
+
setPolling(true);
|
|
4285
|
+
const deadline = Date.now() + pollTimeoutMs;
|
|
4286
|
+
const poll = async () => {
|
|
4287
|
+
if (!mountedRef.current) return;
|
|
4288
|
+
try {
|
|
4289
|
+
const r = await fetch(
|
|
4290
|
+
api(\`/workers/\${options.workerId}/\${id}\`)
|
|
4291
|
+
);
|
|
4292
|
+
const job = await r.json();
|
|
4293
|
+
if (!r.ok) {
|
|
4294
|
+
if (Date.now() >= deadline) {
|
|
4295
|
+
clearPolling();
|
|
4296
|
+
const err = new Error('Poll timeout');
|
|
4297
|
+
setError(err);
|
|
4298
|
+
setStatus('failed');
|
|
4299
|
+
onError?.(err);
|
|
4300
|
+
}
|
|
4301
|
+
return;
|
|
4302
|
+
}
|
|
4303
|
+
setStatus((job.status as WorkflowJobStatus) ?? 'running');
|
|
4304
|
+
setOutput(job as WorkerJobResult);
|
|
4305
|
+
if (job.status === 'completed') {
|
|
4306
|
+
clearPolling();
|
|
4307
|
+
onComplete?.(job as WorkerJobResult);
|
|
4308
|
+
} else if (job.status === 'failed') {
|
|
4309
|
+
clearPolling();
|
|
4310
|
+
const err = new Error(
|
|
4311
|
+
job?.error?.message ?? 'Job failed'
|
|
4312
|
+
);
|
|
4313
|
+
setError(err);
|
|
4314
|
+
setStatus('failed');
|
|
4315
|
+
onError?.(err);
|
|
4316
|
+
} else if (Date.now() >= deadline) {
|
|
4317
|
+
clearPolling();
|
|
4318
|
+
const err = new Error('Poll timeout');
|
|
4319
|
+
setError(err);
|
|
4320
|
+
onError?.(err);
|
|
4321
|
+
}
|
|
4322
|
+
} catch (e) {
|
|
4323
|
+
if (mountedRef.current) {
|
|
4324
|
+
clearPolling();
|
|
4325
|
+
const err = e instanceof Error ? e : new Error(String(e));
|
|
4326
|
+
setError(err);
|
|
4327
|
+
setStatus('failed');
|
|
4328
|
+
onError?.(err);
|
|
4329
|
+
}
|
|
4330
|
+
}
|
|
4331
|
+
};
|
|
4332
|
+
await poll();
|
|
4333
|
+
intervalRef.current = setInterval(poll, pollIntervalMs);
|
|
4334
|
+
timeoutRef.current = setTimeout(() => {
|
|
4335
|
+
clearPolling();
|
|
4336
|
+
setError(new Error('Poll timeout'));
|
|
4337
|
+
setStatus('failed');
|
|
4338
|
+
}, pollTimeoutMs);
|
|
4339
|
+
}
|
|
4340
|
+
} else {
|
|
4341
|
+
const body: Record<string, unknown> = {
|
|
4342
|
+
input: input ?? {},
|
|
4343
|
+
};
|
|
4344
|
+
if (options.metadata) body.metadata = options.metadata;
|
|
4345
|
+
const res = await fetch(api(\`/queues/\${options.queueId}\`), {
|
|
4346
|
+
method: 'POST',
|
|
4347
|
+
headers: { 'Content-Type': 'application/json' },
|
|
4348
|
+
body: JSON.stringify(body),
|
|
4349
|
+
});
|
|
4350
|
+
const data = await res.json();
|
|
4351
|
+
if (!res.ok) throw new Error(data?.error ?? \`HTTP \${res.status}\`);
|
|
4352
|
+
const id = data.jobId ?? null;
|
|
4353
|
+
if (!id) throw new Error('No jobId in response');
|
|
4354
|
+
setJobId(id);
|
|
4355
|
+
setStatus('queued');
|
|
4356
|
+
setLoading(false);
|
|
4357
|
+
|
|
4358
|
+
if (autoPoll) {
|
|
4359
|
+
setPolling(true);
|
|
4360
|
+
const deadline = Date.now() + pollTimeoutMs;
|
|
4361
|
+
const poll = async () => {
|
|
4362
|
+
if (!mountedRef.current) return;
|
|
4363
|
+
try {
|
|
4364
|
+
const r = await fetch(
|
|
4365
|
+
api(\`/queues/\${options.queueId}/\${id}\`)
|
|
4366
|
+
);
|
|
4367
|
+
const job = await r.json();
|
|
4368
|
+
if (!r.ok) {
|
|
4369
|
+
if (Date.now() >= deadline) {
|
|
4370
|
+
clearPolling();
|
|
4371
|
+
setError(new Error('Poll timeout'));
|
|
4372
|
+
setStatus('failed');
|
|
4373
|
+
}
|
|
4374
|
+
return;
|
|
4375
|
+
}
|
|
4376
|
+
const st = (job.status as string) ?? 'running';
|
|
4377
|
+
setStatus(st as WorkflowJobStatus);
|
|
4378
|
+
setOutput(job as QueueJobResult);
|
|
4379
|
+
if (TERMINAL_STATUSES.includes(st)) {
|
|
4380
|
+
clearPolling();
|
|
4381
|
+
onComplete?.(job as QueueJobResult);
|
|
4382
|
+
if (st === 'failed') {
|
|
4383
|
+
setError(new Error('Queue job failed'));
|
|
4384
|
+
onError?.(new Error('Queue job failed'));
|
|
4385
|
+
}
|
|
4386
|
+
} else if (Date.now() >= deadline) {
|
|
4387
|
+
clearPolling();
|
|
4388
|
+
setError(new Error('Poll timeout'));
|
|
4389
|
+
setStatus('failed');
|
|
4390
|
+
}
|
|
4391
|
+
} catch (e) {
|
|
4392
|
+
if (mountedRef.current) {
|
|
4393
|
+
clearPolling();
|
|
4394
|
+
const err = e instanceof Error ? e : new Error(String(e));
|
|
4395
|
+
setError(err);
|
|
4396
|
+
setStatus('failed');
|
|
4397
|
+
onError?.(err);
|
|
4398
|
+
}
|
|
4399
|
+
}
|
|
4400
|
+
};
|
|
4401
|
+
await poll();
|
|
4402
|
+
intervalRef.current = setInterval(poll, pollIntervalMs);
|
|
4403
|
+
timeoutRef.current = setTimeout(() => {
|
|
4404
|
+
clearPolling();
|
|
4405
|
+
setError(new Error('Poll timeout'));
|
|
4406
|
+
setStatus('failed');
|
|
4407
|
+
}, pollTimeoutMs);
|
|
4408
|
+
}
|
|
4409
|
+
}
|
|
4410
|
+
} catch (e) {
|
|
4411
|
+
const err = e instanceof Error ? e : new Error(String(e));
|
|
4412
|
+
setError(err);
|
|
4413
|
+
setStatus('failed');
|
|
4414
|
+
setLoading(false);
|
|
4415
|
+
onError?.(err);
|
|
4416
|
+
}
|
|
4417
|
+
},
|
|
4418
|
+
[
|
|
4419
|
+
enabled,
|
|
4420
|
+
options,
|
|
4421
|
+
api,
|
|
4422
|
+
autoPoll,
|
|
4423
|
+
pollIntervalMs,
|
|
4424
|
+
pollTimeoutMs,
|
|
4425
|
+
onComplete,
|
|
4426
|
+
onError,
|
|
4427
|
+
clearPolling,
|
|
4428
|
+
]
|
|
4429
|
+
);
|
|
4430
|
+
|
|
4431
|
+
useEffect(() => {
|
|
4432
|
+
mountedRef.current = true;
|
|
4433
|
+
return () => {
|
|
4434
|
+
mountedRef.current = false;
|
|
4435
|
+
clearPolling();
|
|
4436
|
+
};
|
|
4437
|
+
}, [clearPolling]);
|
|
4438
|
+
|
|
4439
|
+
return {
|
|
4440
|
+
trigger,
|
|
4441
|
+
jobId,
|
|
4442
|
+
status,
|
|
4443
|
+
output,
|
|
4444
|
+
error,
|
|
4445
|
+
loading,
|
|
4446
|
+
polling,
|
|
4447
|
+
reset,
|
|
4448
|
+
};
|
|
4449
|
+
}
|
|
4450
|
+
`
|
|
4451
|
+
};
|
|
4452
|
+
var WORKFLOW_SETTINGS_SNIPPET = ` // Workflow + worker runtime configuration (job store, etc.)
|
|
4453
|
+
workflowSettings: {
|
|
4454
|
+
jobStore: {
|
|
4455
|
+
// 'mongodb' | 'upstash-redis'
|
|
4456
|
+
type:
|
|
4457
|
+
(process.env.WORKER_DATABASE_TYPE as
|
|
4458
|
+
| 'mongodb'
|
|
4459
|
+
| 'upstash-redis') || 'upstash-redis',
|
|
4460
|
+
mongodb: {
|
|
4461
|
+
uri: process.env.DATABASE_MONGODB_URI || process.env.MONGODB_URI,
|
|
4462
|
+
db:
|
|
4463
|
+
process.env.DATABASE_MONGODB_DB ||
|
|
4464
|
+
process.env.MONGODB_DB ||
|
|
4465
|
+
'ai_router',
|
|
4466
|
+
workerJobsCollection:
|
|
4467
|
+
process.env.MONGODB_WORKER_JOBS_COLLECTION || 'worker_jobs',
|
|
4468
|
+
workflowStatusCollection:
|
|
4469
|
+
process.env.MONGODB_WORKFLOW_STATUS_COLLECTION || 'workflow_status',
|
|
4470
|
+
},
|
|
4471
|
+
redis: {
|
|
4472
|
+
url:
|
|
4473
|
+
process.env.WORKER_UPSTASH_REDIS_REST_URL ||
|
|
4474
|
+
process.env.UPSTASH_REDIS_REST_URL,
|
|
4475
|
+
token:
|
|
4476
|
+
process.env.WORKER_UPSTASH_REDIS_REST_TOKEN ||
|
|
4477
|
+
process.env.UPSTASH_REDIS_REST_TOKEN,
|
|
4478
|
+
keyPrefix:
|
|
4479
|
+
process.env.WORKER_UPSTASH_REDIS_JOBS_PREFIX ||
|
|
4480
|
+
'worker:jobs:',
|
|
4481
|
+
ttlSeconds:
|
|
4482
|
+
Number(process.env.WORKER_JOBS_TTL_SECONDS ?? 60 * 60 * 24 * 7),
|
|
4483
|
+
},
|
|
4484
|
+
},
|
|
4485
|
+
},`;
|
|
4486
|
+
function writeFile(filePath, content, force) {
|
|
4487
|
+
if (fs3.existsSync(filePath) && !force) {
|
|
4488
|
+
return false;
|
|
4489
|
+
}
|
|
4490
|
+
const dir = path3.dirname(filePath);
|
|
4491
|
+
if (!fs3.existsSync(dir)) {
|
|
4492
|
+
fs3.mkdirSync(dir, { recursive: true });
|
|
4493
|
+
}
|
|
4494
|
+
fs3.writeFileSync(filePath, content, "utf-8");
|
|
4495
|
+
return true;
|
|
4496
|
+
}
|
|
4497
|
+
function mergeMicrofoxConfig(configPath, force) {
|
|
4498
|
+
if (!fs3.existsSync(configPath)) {
|
|
4499
|
+
const content2 = `export const StudioConfig = {
|
|
4500
|
+
appName: 'My App',
|
|
4501
|
+
projectInfo: {
|
|
4502
|
+
framework: 'next-js',
|
|
4503
|
+
},
|
|
4504
|
+
studioSettings: {
|
|
4505
|
+
protection: {
|
|
4506
|
+
enabled: false,
|
|
4507
|
+
},
|
|
4508
|
+
database: {
|
|
4509
|
+
type: 'local',
|
|
4510
|
+
},
|
|
4511
|
+
},
|
|
4512
|
+
${WORKFLOW_SETTINGS_SNIPPET}
|
|
4513
|
+
};
|
|
4514
|
+
`;
|
|
4515
|
+
fs3.writeFileSync(configPath, content2, "utf-8");
|
|
4516
|
+
return true;
|
|
4517
|
+
}
|
|
4518
|
+
const content = fs3.readFileSync(configPath, "utf-8");
|
|
4519
|
+
if (content.includes("workflowSettings")) {
|
|
4520
|
+
if (!force) {
|
|
4521
|
+
return false;
|
|
4522
|
+
}
|
|
4523
|
+
return false;
|
|
4524
|
+
}
|
|
4525
|
+
const lines = content.split("\n");
|
|
4526
|
+
let insertIndex = -1;
|
|
4527
|
+
let braceCount = 0;
|
|
4528
|
+
let inStudioConfig = false;
|
|
4529
|
+
for (let i = 0; i < lines.length; i++) {
|
|
4530
|
+
const line = lines[i];
|
|
4531
|
+
if (line.includes("StudioConfig") && line.includes("=")) {
|
|
4532
|
+
inStudioConfig = true;
|
|
4533
|
+
}
|
|
4534
|
+
if (inStudioConfig) {
|
|
4535
|
+
const openBraces = (line.match(/{/g) || []).length;
|
|
4536
|
+
const closeBraces = (line.match(/}/g) || []).length;
|
|
4537
|
+
braceCount += openBraces - closeBraces;
|
|
4538
|
+
if (braceCount === 0 && closeBraces > 0 && insertIndex === -1) {
|
|
4539
|
+
insertIndex = i;
|
|
4540
|
+
break;
|
|
4541
|
+
}
|
|
4542
|
+
}
|
|
4543
|
+
}
|
|
4544
|
+
if (insertIndex === -1) {
|
|
4545
|
+
const lastBrace = content.lastIndexOf("}");
|
|
4546
|
+
if (lastBrace !== -1) {
|
|
4547
|
+
const before = content.slice(0, lastBrace);
|
|
4548
|
+
const after = content.slice(lastBrace);
|
|
4549
|
+
const newContent = before + ",\n" + WORKFLOW_SETTINGS_SNIPPET + "\n" + after;
|
|
4550
|
+
fs3.writeFileSync(configPath, newContent, "utf-8");
|
|
4551
|
+
return true;
|
|
4552
|
+
}
|
|
4553
|
+
return false;
|
|
4554
|
+
}
|
|
4555
|
+
const indent = lines[insertIndex].match(/^(\s*)/)?.[1] || " ";
|
|
4556
|
+
const workflowLines = WORKFLOW_SETTINGS_SNIPPET.split("\n").map((l, idx) => {
|
|
4557
|
+
if (idx === 0) return indent + l;
|
|
4558
|
+
return indent + l;
|
|
4559
|
+
});
|
|
4560
|
+
lines.splice(insertIndex, 0, ...workflowLines);
|
|
4561
|
+
fs3.writeFileSync(configPath, lines.join("\n"), "utf-8");
|
|
4562
|
+
return true;
|
|
4563
|
+
}
|
|
4564
|
+
var boilerplateCommand = new Command3().name("boilerplate").description("Create or update worker boilerplate files (job store, API routes, config)").option("--force", "Overwrite existing files", false).option("--app-dir <path>", "App directory path (default: app)", "app").option("--skip-config", "Skip microfox.config.ts updates", false).action((options) => {
|
|
4565
|
+
const spinner = ora3("Creating boilerplate files...").start();
|
|
4566
|
+
try {
|
|
4567
|
+
const projectRoot = process.cwd();
|
|
4568
|
+
const appDir = options.appDir || "app";
|
|
4569
|
+
const apiDir = path3.join(appDir, "api", "workflows");
|
|
4570
|
+
const force = options.force || false;
|
|
4571
|
+
const skipConfig = options.skipConfig || false;
|
|
4572
|
+
const filesCreated = [];
|
|
4573
|
+
const filesSkipped = [];
|
|
4574
|
+
for (const [relativePath, template] of Object.entries(TEMPLATES)) {
|
|
4575
|
+
const filePath = path3.normalize(path3.join(projectRoot, apiDir, relativePath));
|
|
4576
|
+
const written = writeFile(filePath, template, force);
|
|
4577
|
+
if (written) {
|
|
4578
|
+
filesCreated.push(path3.relative(projectRoot, filePath));
|
|
4579
|
+
} else {
|
|
4580
|
+
filesSkipped.push(path3.relative(projectRoot, filePath));
|
|
4581
|
+
}
|
|
4582
|
+
}
|
|
4583
|
+
let configUpdated = false;
|
|
4584
|
+
if (!skipConfig) {
|
|
4585
|
+
const configPath = path3.join(projectRoot, "microfox.config.ts");
|
|
4586
|
+
configUpdated = mergeMicrofoxConfig(configPath, force);
|
|
4587
|
+
if (configUpdated) {
|
|
4588
|
+
filesCreated.push("microfox.config.ts");
|
|
4589
|
+
} else if (fs3.existsSync(configPath)) {
|
|
4590
|
+
filesSkipped.push("microfox.config.ts");
|
|
4591
|
+
}
|
|
4592
|
+
}
|
|
4593
|
+
spinner.succeed("Boilerplate files created");
|
|
4594
|
+
if (filesCreated.length > 0) {
|
|
4595
|
+
console.log(chalk3.green("\n\u2713 Created files:"));
|
|
4596
|
+
filesCreated.forEach((f) => console.log(chalk3.gray(` - ${f}`)));
|
|
4597
|
+
}
|
|
4598
|
+
if (filesSkipped.length > 0) {
|
|
4599
|
+
console.log(chalk3.yellow("\n\u26A0 Skipped existing files (use --force to overwrite):"));
|
|
4600
|
+
filesSkipped.forEach((f) => console.log(chalk3.gray(` - ${f}`)));
|
|
4601
|
+
}
|
|
4602
|
+
console.log(
|
|
4603
|
+
chalk3.blue(
|
|
4604
|
+
`
|
|
4605
|
+
\u{1F4DA} Next steps:
|
|
4606
|
+
1. Configure your job store in microfox.config.ts (workflowSettings.jobStore)
|
|
4607
|
+
2. Set environment variables (MONGODB_URI or UPSTASH_REDIS_*)
|
|
4608
|
+
3. Create your first worker: ${chalk3.yellow("npx ai-worker new <worker-id>")}
|
|
4609
|
+
4. Deploy workers: ${chalk3.yellow("npx ai-worker push")}
|
|
4610
|
+
5. Use ${chalk3.yellow("hooks/useWorkflowJob.ts")} in client components to trigger and poll workers/queues`
|
|
4611
|
+
)
|
|
4612
|
+
);
|
|
4613
|
+
} catch (error) {
|
|
4614
|
+
spinner.fail("Failed to create boilerplate files");
|
|
4615
|
+
console.error(chalk3.red(error?.stack || error?.message || String(error)));
|
|
4616
|
+
process.exitCode = 1;
|
|
4617
|
+
}
|
|
4618
|
+
});
|
|
4619
|
+
|
|
1373
4620
|
// src/index.ts
|
|
1374
|
-
var
|
|
1375
|
-
|
|
4621
|
+
var __filename = fileURLToPath(import.meta.url);
|
|
4622
|
+
var __dirname = dirname3(__filename);
|
|
4623
|
+
var packageJsonPath = join4(__dirname, "..", "package.json");
|
|
4624
|
+
var packageJson = JSON.parse(readFileSync3(packageJsonPath, "utf-8"));
|
|
4625
|
+
var version = packageJson.version || "1.0.0";
|
|
4626
|
+
var program = new Command4();
|
|
4627
|
+
program.name("ai-worker").description("CLI tooling for deploying ai-router background workers").version(version);
|
|
1376
4628
|
program.addCommand(pushCommand);
|
|
4629
|
+
program.addCommand(newCommand);
|
|
4630
|
+
program.addCommand(boilerplateCommand);
|
|
1377
4631
|
program.parse(process.argv);
|
|
1378
4632
|
var aiWorkerCli = program;
|
|
1379
4633
|
export {
|