@microfox/ai-worker 1.0.2 → 1.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +8 -0
- package/README.md +1 -2
- package/dist/{chunk-BJPQY2NJ.mjs → chunk-72XGFZCE.mjs} +61 -84
- package/dist/chunk-72XGFZCE.mjs.map +1 -0
- package/dist/{chunk-4WU5ZCHS.mjs → chunk-7LQNS2SG.mjs} +78 -334
- package/dist/chunk-7LQNS2SG.mjs.map +1 -0
- package/dist/chunk-AOXGONGI.mjs +351 -0
- package/dist/chunk-AOXGONGI.mjs.map +1 -0
- package/dist/{client-D25XR0V8.d.mts → client-BqSJQ9mZ.d.mts} +34 -18
- package/dist/{client-D25XR0V8.d.ts → client-BqSJQ9mZ.d.ts} +34 -18
- package/dist/client.d.mts +1 -1
- package/dist/client.d.ts +1 -1
- package/dist/client.js +62 -83
- package/dist/client.js.map +1 -1
- package/dist/client.mjs +5 -1
- package/dist/handler.d.mts +32 -2
- package/dist/handler.d.ts +32 -2
- package/dist/handler.js +73 -24
- package/dist/handler.js.map +1 -1
- package/dist/handler.mjs +4 -1
- package/dist/index.d.mts +3 -3
- package/dist/index.d.ts +3 -3
- package/dist/index.js +136 -107
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +10 -2
- package/dist/index.mjs.map +1 -1
- package/dist/queueJobStore.d.mts +53 -0
- package/dist/queueJobStore.d.ts +53 -0
- package/dist/queueJobStore.js +378 -0
- package/dist/queueJobStore.js.map +1 -0
- package/dist/queueJobStore.mjs +14 -0
- package/dist/queueJobStore.mjs.map +1 -0
- package/package.json +7 -2
- package/dist/chunk-4WU5ZCHS.mjs.map +0 -1
- package/dist/chunk-BJPQY2NJ.mjs.map +0 -1
package/CHANGELOG.md
CHANGED
package/README.md
CHANGED
|
@@ -103,8 +103,7 @@ npx @microfox/ai-worker-cli@latest push
|
|
|
103
103
|
### Environment Variables
|
|
104
104
|
|
|
105
105
|
**Required for Next.js:**
|
|
106
|
-
- `WORKER_BASE_URL` - Base URL of your workers service (server-side). We append `/workers/trigger` and `/workers/config` internally when needed (e.g. `https://.../prod`).
|
|
107
|
-
- `NEXT_PUBLIC_WORKER_BASE_URL` - Same as `WORKER_BASE_URL`, but exposed to the browser (use this if you call `dispatch()` from client-side code).
|
|
106
|
+
- `WORKER_BASE_URL` - Base URL of your workers service (server-side only). We append `/workers/trigger` and `/workers/config` internally when needed (e.g. `https://.../prod`). For client-side, use `useWorkflowJob` which calls your app's `/api/workflows/*` routes.
|
|
108
107
|
- `WORKERS_TRIGGER_API_KEY` - Optional API key for trigger authentication (sent as `x-workers-trigger-key`)
|
|
109
108
|
|
|
110
109
|
**Required for Lambda (set via deploy script):**
|
|
@@ -1,9 +1,9 @@
|
|
|
1
1
|
// src/client.ts
|
|
2
2
|
function getWorkersTriggerUrl() {
|
|
3
|
-
const raw = process.env.WORKER_BASE_URL || process.env.
|
|
3
|
+
const raw = process.env.WORKER_BASE_URL || process.env.WORKERS_TRIGGER_API_URL || process.env.WORKERS_CONFIG_API_URL;
|
|
4
4
|
if (!raw) {
|
|
5
5
|
throw new Error(
|
|
6
|
-
"WORKER_BASE_URL
|
|
6
|
+
"WORKER_BASE_URL is required for background workers. Set it server-side only."
|
|
7
7
|
);
|
|
8
8
|
}
|
|
9
9
|
const url = new URL(raw);
|
|
@@ -15,6 +15,23 @@ function getWorkersTriggerUrl() {
|
|
|
15
15
|
url.pathname = `${basePath}/workers/trigger`.replace(/\/+$/, "");
|
|
16
16
|
return url.toString();
|
|
17
17
|
}
|
|
18
|
+
function getQueueStartUrl(queueId) {
|
|
19
|
+
const raw = process.env.WORKER_BASE_URL || process.env.WORKERS_TRIGGER_API_URL || process.env.WORKERS_CONFIG_API_URL;
|
|
20
|
+
if (!raw) {
|
|
21
|
+
throw new Error(
|
|
22
|
+
"WORKER_BASE_URL is required for background workers. Set it server-side only."
|
|
23
|
+
);
|
|
24
|
+
}
|
|
25
|
+
const url = new URL(raw);
|
|
26
|
+
url.search = "";
|
|
27
|
+
url.hash = "";
|
|
28
|
+
const path = url.pathname || "";
|
|
29
|
+
url.pathname = path.replace(/\/?workers\/(trigger|config)\/?$/, "");
|
|
30
|
+
const basePath = url.pathname.replace(/\/+$/, "");
|
|
31
|
+
const safeSegment = encodeURIComponent(queueId);
|
|
32
|
+
url.pathname = `${basePath}/queues/${safeSegment}/start`.replace(/\/+$/, "");
|
|
33
|
+
return url.toString();
|
|
34
|
+
}
|
|
18
35
|
function serializeContext(ctx) {
|
|
19
36
|
const serialized = {};
|
|
20
37
|
if (ctx.requestId) {
|
|
@@ -72,115 +89,75 @@ async function dispatch(workerId, input, inputSchema, options, ctx) {
|
|
|
72
89
|
jobId
|
|
73
90
|
};
|
|
74
91
|
}
|
|
75
|
-
async function
|
|
76
|
-
return handler({ input, ctx: ctx || {} });
|
|
77
|
-
}
|
|
78
|
-
async function dispatchQueue(queueId, initialInput, options = {}, ctx) {
|
|
79
|
-
const registry = options.registry;
|
|
80
|
-
if (!registry?.getQueueById) {
|
|
81
|
-
throw new Error(
|
|
82
|
-
"dispatchQueue requires options.registry with getQueueById. Use getQueueRegistry() from your workflows registry (e.g. app/api/workflows/registry/workers) and pass { registry: await getQueueRegistry() }."
|
|
83
|
-
);
|
|
84
|
-
}
|
|
85
|
-
const { getQueueById, invokeMapInput } = registry;
|
|
86
|
-
const queue = getQueueById(queueId);
|
|
87
|
-
if (!queue) {
|
|
88
|
-
throw new Error(`Worker queue "${queueId}" not found in registry`);
|
|
89
|
-
}
|
|
90
|
-
if (!queue.steps || queue.steps.length === 0) {
|
|
91
|
-
throw new Error(`Worker queue "${queueId}" has no steps defined`);
|
|
92
|
-
}
|
|
93
|
-
const stepIndex = 0;
|
|
94
|
-
const firstStep = queue.steps[stepIndex];
|
|
95
|
-
const firstWorkerId = firstStep.workerId;
|
|
96
|
-
if (!firstWorkerId) {
|
|
97
|
-
throw new Error(
|
|
98
|
-
`Worker queue "${queueId}" has an invalid first step (missing workerId)`
|
|
99
|
-
);
|
|
100
|
-
}
|
|
101
|
-
let firstInput = initialInput;
|
|
102
|
-
if (firstStep.mapInputFromPrev && typeof invokeMapInput === "function") {
|
|
103
|
-
firstInput = await invokeMapInput(
|
|
104
|
-
queueId,
|
|
105
|
-
stepIndex,
|
|
106
|
-
void 0,
|
|
107
|
-
initialInput
|
|
108
|
-
);
|
|
109
|
-
}
|
|
92
|
+
async function dispatchWorker(workerId, input, options = {}, ctx) {
|
|
110
93
|
const jobId = options.jobId || `job-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
|
|
111
|
-
const queueContext = {
|
|
112
|
-
id: queueId,
|
|
113
|
-
stepIndex,
|
|
114
|
-
initialInput,
|
|
115
|
-
queueJobId: jobId
|
|
116
|
-
};
|
|
117
|
-
if (options.onCreateQueueJob) {
|
|
118
|
-
try {
|
|
119
|
-
await options.onCreateQueueJob({
|
|
120
|
-
queueJobId: jobId,
|
|
121
|
-
queueId,
|
|
122
|
-
firstStep: { workerId: firstWorkerId, workerJobId: jobId },
|
|
123
|
-
metadata: options.metadata
|
|
124
|
-
});
|
|
125
|
-
} catch (err) {
|
|
126
|
-
console.warn("[dispatchQueue] onCreateQueueJob failed:", err?.message ?? err);
|
|
127
|
-
}
|
|
128
|
-
}
|
|
129
|
-
const normalizedFirstInput = firstInput !== null && typeof firstInput === "object" ? firstInput : { value: firstInput };
|
|
130
|
-
const inputWithQueue = {
|
|
131
|
-
...normalizedFirstInput,
|
|
132
|
-
__workerQueue: queueContext
|
|
133
|
-
};
|
|
134
|
-
const metadataWithQueue = {
|
|
135
|
-
...options.metadata || {},
|
|
136
|
-
__workerQueue: queueContext
|
|
137
|
-
};
|
|
138
94
|
const triggerUrl = getWorkersTriggerUrl();
|
|
139
95
|
const serializedContext = ctx ? serializeContext(ctx) : {};
|
|
140
96
|
const messageBody = {
|
|
141
|
-
workerId
|
|
97
|
+
workerId,
|
|
142
98
|
jobId,
|
|
143
|
-
input:
|
|
99
|
+
input: input ?? {},
|
|
144
100
|
context: serializedContext,
|
|
145
101
|
webhookUrl: options.webhookUrl,
|
|
146
|
-
metadata:
|
|
102
|
+
metadata: options.metadata || {},
|
|
147
103
|
timestamp: (/* @__PURE__ */ new Date()).toISOString()
|
|
148
104
|
};
|
|
149
|
-
const headers = {
|
|
150
|
-
"Content-Type": "application/json"
|
|
151
|
-
};
|
|
105
|
+
const headers = { "Content-Type": "application/json" };
|
|
152
106
|
const triggerKey = process.env.WORKERS_TRIGGER_API_KEY;
|
|
153
|
-
if (triggerKey)
|
|
154
|
-
headers["x-workers-trigger-key"] = triggerKey;
|
|
155
|
-
}
|
|
107
|
+
if (triggerKey) headers["x-workers-trigger-key"] = triggerKey;
|
|
156
108
|
const response = await fetch(triggerUrl, {
|
|
109
|
+
method: "POST",
|
|
110
|
+
headers,
|
|
111
|
+
body: JSON.stringify({ workerId, body: messageBody })
|
|
112
|
+
});
|
|
113
|
+
if (!response.ok) {
|
|
114
|
+
const text = await response.text().catch(() => "");
|
|
115
|
+
throw new Error(
|
|
116
|
+
`Failed to trigger worker "${workerId}": ${response.status} ${response.statusText}${text ? ` - ${text}` : ""}`
|
|
117
|
+
);
|
|
118
|
+
}
|
|
119
|
+
const data = await response.json().catch(() => ({}));
|
|
120
|
+
const messageId = data?.messageId ? String(data.messageId) : `trigger-${jobId}`;
|
|
121
|
+
return { messageId, status: "queued", jobId };
|
|
122
|
+
}
|
|
123
|
+
async function dispatchLocal(handler, input, ctx) {
|
|
124
|
+
return handler({ input, ctx: ctx || {} });
|
|
125
|
+
}
|
|
126
|
+
async function dispatchQueue(queueId, initialInput, options = {}, _ctx) {
|
|
127
|
+
const jobId = options.jobId || `job-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
|
|
128
|
+
const queueStartUrl = getQueueStartUrl(queueId);
|
|
129
|
+
const normalizedInput = initialInput !== null && typeof initialInput === "object" ? initialInput : { value: initialInput };
|
|
130
|
+
const headers = { "Content-Type": "application/json" };
|
|
131
|
+
const triggerKey = process.env.WORKERS_TRIGGER_API_KEY;
|
|
132
|
+
if (triggerKey) headers["x-workers-trigger-key"] = triggerKey;
|
|
133
|
+
const response = await fetch(queueStartUrl, {
|
|
157
134
|
method: "POST",
|
|
158
135
|
headers,
|
|
159
136
|
body: JSON.stringify({
|
|
160
|
-
|
|
161
|
-
|
|
137
|
+
input: normalizedInput,
|
|
138
|
+
initialInput: normalizedInput,
|
|
139
|
+
metadata: options.metadata ?? {},
|
|
140
|
+
jobId,
|
|
141
|
+
...options.webhookUrl ? { webhookUrl: options.webhookUrl } : {}
|
|
162
142
|
})
|
|
163
143
|
});
|
|
164
144
|
if (!response.ok) {
|
|
165
145
|
const text = await response.text().catch(() => "");
|
|
166
146
|
throw new Error(
|
|
167
|
-
`Failed to
|
|
147
|
+
`Failed to start queue "${queueId}": ${response.status} ${response.statusText}${text ? ` - ${text}` : ""}`
|
|
168
148
|
);
|
|
169
149
|
}
|
|
170
150
|
const data = await response.json().catch(() => ({}));
|
|
171
|
-
const messageId = data?.messageId
|
|
172
|
-
return {
|
|
173
|
-
queueId,
|
|
174
|
-
messageId,
|
|
175
|
-
status: "queued",
|
|
176
|
-
jobId
|
|
177
|
-
};
|
|
151
|
+
const messageId = data?.messageId ?? data?.jobId ?? `queue-${jobId}`;
|
|
152
|
+
return { queueId, messageId, status: "queued", jobId };
|
|
178
153
|
}
|
|
179
154
|
|
|
180
155
|
export {
|
|
181
156
|
getWorkersTriggerUrl,
|
|
157
|
+
getQueueStartUrl,
|
|
182
158
|
dispatch,
|
|
159
|
+
dispatchWorker,
|
|
183
160
|
dispatchLocal,
|
|
184
161
|
dispatchQueue
|
|
185
162
|
};
|
|
186
|
-
//# sourceMappingURL=chunk-
|
|
163
|
+
//# sourceMappingURL=chunk-72XGFZCE.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/client.ts"],"sourcesContent":["/**\n * Client for dispatching background worker jobs.\n *\n * In production, dispatching happens via the workers HTTP API:\n * POST /workers/trigger -> enqueues message to SQS on the workers service side\n *\n * This avoids requiring AWS credentials in your Next.js app.\n */\n\nimport type { ZodType, z } from 'zod';\nimport type { WorkerQueueConfig } from './queue.js';\n\nexport interface WorkerQueueRegistry {\n getQueueById(queueId: string): WorkerQueueConfig | undefined;\n /** (initialInput, previousOutputs) for best DX: derive next input from original request and all prior step outputs. */\n invokeMapInput?: (\n queueId: string,\n stepIndex: number,\n initialInput: unknown,\n previousOutputs: Array<{ stepIndex: number; workerId: string; output: unknown }>\n ) => Promise<unknown> | unknown;\n}\n\nexport interface DispatchOptions {\n /**\n * Optional webhook callback URL to notify when the job finishes.\n * Only called when provided. Default: no webhook (use job store / MongoDB only).\n */\n webhookUrl?: string;\n /**\n * Controls how dispatch executes.\n * - \"auto\" (default): local inline execution in development unless WORKERS_LOCAL_MODE=false.\n * - \"local\": force inline execution (no SQS).\n * - \"remote\": force SQS/Lambda dispatch even in development.\n */\n mode?: 'auto' | 'local' | 'remote';\n jobId?: string;\n metadata?: Record<string, any>;\n /**\n * In-memory queue registry for dispatchQueue. Required when using dispatchQueue.\n * Pass a registry that imports from your .queue.ts definitions (works on Vercel/serverless).\n */\n registry?: WorkerQueueRegistry;\n /**\n * Optional callback to create a queue job record before dispatching.\n * Called with queueJobId (= first worker's jobId), queueId, and firstStep.\n */\n onCreateQueueJob?: (params: {\n queueJobId: string;\n queueId: string;\n firstStep: { workerId: string; workerJobId: string };\n metadata?: Record<string, unknown>;\n }) => Promise<void>;\n}\n\nexport interface DispatchResult {\n messageId: string;\n status: 'queued';\n jobId: string;\n}\n\nexport interface DispatchQueueResult extends DispatchResult {\n queueId: string;\n}\n\nexport interface SerializedContext {\n requestId?: string;\n userId?: string;\n traceId?: string;\n [key: string]: any;\n}\n\n/**\n * Derives the full /workers/trigger URL from env.\n * Exported for use by local dispatchWorker (worker-to-worker in dev).\n * Server-side only; clients should use useWorkflowJob with your app's /api/workflows routes.\n *\n * Env vars:\n * - WORKER_BASE_URL: base URL of the workers service (e.g. https://.../prod)\n * - WORKERS_TRIGGER_API_URL / WORKERS_CONFIG_API_URL: legacy, still supported\n */\nexport function getWorkersTriggerUrl(): string {\n const raw =\n process.env.WORKER_BASE_URL ||\n process.env.WORKERS_TRIGGER_API_URL ||\n process.env.WORKERS_CONFIG_API_URL;\n\n if (!raw) {\n throw new Error(\n 'WORKER_BASE_URL is required for background workers. Set it server-side only.'\n );\n }\n\n const url = new URL(raw);\n url.search = '';\n url.hash = '';\n\n const path = url.pathname || '';\n\n // If the user pointed at a specific endpoint, normalize back to the service root.\n url.pathname = path.replace(/\\/?workers\\/(trigger|config)\\/?$/, '');\n\n const basePath = url.pathname.replace(/\\/+$/, '');\n url.pathname = `${basePath}/workers/trigger`.replace(/\\/+$/, '');\n\n return url.toString();\n}\n\n/**\n * URL for the queue start endpoint (dispatch proxy). Use this so queue starts\n * go through the queue handler Lambda for easier debugging (one log stream per queue).\n */\nexport function getQueueStartUrl(queueId: string): string {\n const raw =\n process.env.WORKER_BASE_URL ||\n process.env.WORKERS_TRIGGER_API_URL ||\n process.env.WORKERS_CONFIG_API_URL;\n\n if (!raw) {\n throw new Error(\n 'WORKER_BASE_URL is required for background workers. Set it server-side only.'\n );\n }\n\n const url = new URL(raw);\n url.search = '';\n url.hash = '';\n\n const path = url.pathname || '';\n url.pathname = path.replace(/\\/?workers\\/(trigger|config)\\/?$/, '');\n const basePath = url.pathname.replace(/\\/+$/, '');\n const safeSegment = encodeURIComponent(queueId);\n url.pathname = `${basePath}/queues/${safeSegment}/start`.replace(/\\/+$/, '');\n\n return url.toString();\n}\n\n/**\n * Serializes context data for transmission to Lambda.\n * Only serializes safe, JSON-compatible properties.\n */\nfunction serializeContext(ctx: any): SerializedContext {\n const serialized: SerializedContext = {};\n\n if (ctx.requestId) {\n serialized.requestId = ctx.requestId;\n }\n\n // Extract any additional serializable metadata\n if (ctx.metadata && typeof ctx.metadata === 'object') {\n Object.assign(serialized, ctx.metadata);\n }\n\n // Allow custom context serialization via a helper property\n if (ctx._serializeContext && typeof ctx._serializeContext === 'function') {\n const custom = ctx._serializeContext();\n Object.assign(serialized, custom);\n }\n\n return serialized;\n}\n\n\n/**\n * Dispatches a background worker job to SQS.\n *\n * @param workerId - The ID of the worker to dispatch\n * @param input - The input data for the worker (will be validated against inputSchema)\n * @param inputSchema - Zod schema for input validation\n * @param options - Dispatch options including webhook URL\n * @param ctx - Optional context object (only serializable parts will be sent)\n * @returns Promise resolving to dispatch result with messageId and jobId\n */\nexport async function dispatch<INPUT_SCHEMA extends ZodType<any>>(\n workerId: string,\n input: z.input<INPUT_SCHEMA>,\n inputSchema: INPUT_SCHEMA,\n options: DispatchOptions,\n ctx?: any\n): Promise<DispatchResult> {\n // Validate input against schema\n const validatedInput = inputSchema.parse(input);\n\n // Generate job ID if not provided\n const jobId =\n options.jobId || `job-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;\n\n // Resolve /workers/trigger endpoint URL\n const triggerUrl = getWorkersTriggerUrl();\n\n // Serialize context (only safe, JSON-compatible parts)\n const serializedContext = ctx ? serializeContext(ctx) : {};\n\n // Job updates use MongoDB only; never pass jobStoreUrl/origin URL.\n const messageBody = {\n workerId,\n jobId,\n input: validatedInput,\n context: serializedContext,\n webhookUrl: options.webhookUrl,\n metadata: options.metadata || {},\n timestamp: new Date().toISOString(),\n };\n\n const headers: Record<string, string> = {\n 'Content-Type': 'application/json',\n };\n const triggerKey = process.env.WORKERS_TRIGGER_API_KEY;\n if (triggerKey) {\n headers['x-workers-trigger-key'] = triggerKey;\n }\n\n const response = await fetch(triggerUrl, {\n method: 'POST',\n headers,\n body: JSON.stringify({\n workerId,\n body: messageBody,\n }),\n });\n\n if (!response.ok) {\n const text = await response.text().catch(() => '');\n throw new Error(\n `Failed to trigger worker \"${workerId}\": ${response.status} ${response.statusText}${text ? ` - ${text}` : ''}`\n );\n }\n\n const data = (await response.json().catch(() => ({}))) as any;\n const messageId = data?.messageId ? String(data.messageId) : `trigger-${jobId}`;\n\n return {\n messageId,\n status: 'queued',\n jobId,\n };\n}\n\n/**\n * Dispatch a worker by ID without importing the worker module.\n * Sends to the workers trigger API (WORKER_BASE_URL). No input schema validation at call site.\n *\n * @param workerId - The worker ID (e.g. 'echo', 'data-processor')\n * @param input - Input payload (object or undefined)\n * @param options - Optional jobId, webhookUrl, metadata\n * @param ctx - Optional context (serializable parts sent in the request)\n * @returns Promise resolving to { messageId, status: 'queued', jobId }\n */\nexport async function dispatchWorker(\n workerId: string,\n input?: Record<string, unknown>,\n options: DispatchOptions = {},\n ctx?: any\n): Promise<DispatchResult> {\n const jobId =\n options.jobId || `job-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;\n const triggerUrl = getWorkersTriggerUrl();\n const serializedContext = ctx ? serializeContext(ctx) : {};\n const messageBody = {\n workerId,\n jobId,\n input: input ?? {},\n context: serializedContext,\n webhookUrl: options.webhookUrl,\n metadata: options.metadata || {},\n timestamp: new Date().toISOString(),\n };\n const headers: Record<string, string> = { 'Content-Type': 'application/json' };\n const triggerKey = process.env.WORKERS_TRIGGER_API_KEY;\n if (triggerKey) headers['x-workers-trigger-key'] = triggerKey;\n const response = await fetch(triggerUrl, {\n method: 'POST',\n headers,\n body: JSON.stringify({ workerId, body: messageBody }),\n });\n if (!response.ok) {\n const text = await response.text().catch(() => '');\n throw new Error(\n `Failed to trigger worker \"${workerId}\": ${response.status} ${response.statusText}${text ? ` - ${text}` : ''}`\n );\n }\n const data = (await response.json().catch(() => ({}))) as any;\n const messageId = data?.messageId ? String(data.messageId) : `trigger-${jobId}`;\n return { messageId, status: 'queued', jobId };\n}\n\n/**\n * Local development mode: runs the handler immediately in the same process.\n * This bypasses SQS and Lambda for faster iteration during development.\n *\n * @param handler - The worker handler function\n * @param input - The input data\n * @param ctx - The context object\n * @returns The handler result\n */\nexport async function dispatchLocal<INPUT, OUTPUT>(\n handler: (params: { input: INPUT; ctx: any }) => Promise<OUTPUT>,\n input: INPUT,\n ctx?: any\n): Promise<OUTPUT> {\n return handler({ input, ctx: ctx || {} });\n}\n\n/**\n * Dispatches a queue by ID. POSTs to the queue-start API; the queue-start handler creates the queue job.\n * Pass the first worker's input directly (no registry required).\n */\nexport async function dispatchQueue<InitialInput = any>(\n queueId: string,\n initialInput?: InitialInput,\n options: DispatchOptions = {},\n _ctx?: any\n): Promise<DispatchQueueResult> {\n const jobId =\n options.jobId || `job-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;\n const queueStartUrl = getQueueStartUrl(queueId);\n const normalizedInput =\n initialInput !== null && typeof initialInput === 'object'\n ? (initialInput as Record<string, unknown>)\n : { value: initialInput };\n const headers: Record<string, string> = { 'Content-Type': 'application/json' };\n const triggerKey = process.env.WORKERS_TRIGGER_API_KEY;\n if (triggerKey) headers['x-workers-trigger-key'] = triggerKey;\n const response = await fetch(queueStartUrl, {\n method: 'POST',\n headers,\n body: JSON.stringify({\n input: normalizedInput,\n initialInput: normalizedInput,\n metadata: options.metadata ?? {},\n jobId,\n ...(options.webhookUrl ? { webhookUrl: options.webhookUrl } : {}),\n }),\n });\n if (!response.ok) {\n const text = await response.text().catch(() => '');\n throw new Error(\n `Failed to start queue \"${queueId}\": ${response.status} ${response.statusText}${text ? ` - ${text}` : ''}`\n );\n }\n const data = (await response.json().catch(() => ({}))) as any;\n const messageId = data?.messageId ?? data?.jobId ?? `queue-${jobId}`;\n return { queueId, messageId, status: 'queued', jobId };\n}\n\n"],"mappings":";AAiFO,SAAS,uBAA+B;AAC7C,QAAM,MACJ,QAAQ,IAAI,mBACZ,QAAQ,IAAI,2BACZ,QAAQ,IAAI;AAEd,MAAI,CAAC,KAAK;AACR,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAEA,QAAM,MAAM,IAAI,IAAI,GAAG;AACvB,MAAI,SAAS;AACb,MAAI,OAAO;AAEX,QAAM,OAAO,IAAI,YAAY;AAG7B,MAAI,WAAW,KAAK,QAAQ,oCAAoC,EAAE;AAElE,QAAM,WAAW,IAAI,SAAS,QAAQ,QAAQ,EAAE;AAChD,MAAI,WAAW,GAAG,QAAQ,mBAAmB,QAAQ,QAAQ,EAAE;AAE/D,SAAO,IAAI,SAAS;AACtB;AAMO,SAAS,iBAAiB,SAAyB;AACxD,QAAM,MACJ,QAAQ,IAAI,mBACZ,QAAQ,IAAI,2BACZ,QAAQ,IAAI;AAEd,MAAI,CAAC,KAAK;AACR,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAEA,QAAM,MAAM,IAAI,IAAI,GAAG;AACvB,MAAI,SAAS;AACb,MAAI,OAAO;AAEX,QAAM,OAAO,IAAI,YAAY;AAC7B,MAAI,WAAW,KAAK,QAAQ,oCAAoC,EAAE;AAClE,QAAM,WAAW,IAAI,SAAS,QAAQ,QAAQ,EAAE;AAChD,QAAM,cAAc,mBAAmB,OAAO;AAC9C,MAAI,WAAW,GAAG,QAAQ,WAAW,WAAW,SAAS,QAAQ,QAAQ,EAAE;AAE3E,SAAO,IAAI,SAAS;AACtB;AAMA,SAAS,iBAAiB,KAA6B;AACrD,QAAM,aAAgC,CAAC;AAEvC,MAAI,IAAI,WAAW;AACjB,eAAW,YAAY,IAAI;AAAA,EAC7B;AAGA,MAAI,IAAI,YAAY,OAAO,IAAI,aAAa,UAAU;AACpD,WAAO,OAAO,YAAY,IAAI,QAAQ;AAAA,EACxC;AAGA,MAAI,IAAI,qBAAqB,OAAO,IAAI,sBAAsB,YAAY;AACxE,UAAM,SAAS,IAAI,kBAAkB;AACrC,WAAO,OAAO,YAAY,MAAM;AAAA,EAClC;AAEA,SAAO;AACT;AAaA,eAAsB,SACpB,UACA,OACA,aACA,SACA,KACyB;AAEzB,QAAM,iBAAiB,YAAY,MAAM,KAAK;AAG9C,QAAM,QACJ,QAAQ,SAAS,OAAO,KAAK,IAAI,CAAC,IAAI,KAAK,OAAO,EAAE,SAAS,EAAE,EAAE,OAAO,GAAG,CAAC,CAAC;AAG/E,QAAM,aAAa,qBAAqB;AAGxC,QAAM,oBAAoB,MAAM,iBAAiB,GAAG,IAAI,CAAC;AAGzD,QAAM,cAAc;AAAA,IAClB;AAAA,IACA;AAAA,IACA,OAAO;AAAA,IACP,SAAS;AAAA,IACT,YAAY,QAAQ;AAAA,IACpB,UAAU,QAAQ,YAAY,CAAC;AAAA,IAC/B,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,EACpC;AAEA,QAAM,UAAkC;AAAA,IACtC,gBAAgB;AAAA,EAClB;AACA,QAAM,aAAa,QAAQ,IAAI;AAC/B,MAAI,YAAY;AACd,YAAQ,uBAAuB,IAAI;AAAA,EACrC;AAEA,QAAM,WAAW,MAAM,MAAM,YAAY;AAAA,IACvC,QAAQ;AAAA,IACR;AAAA,IACA,MAAM,KAAK,UAAU;AAAA,MACnB;AAAA,MACA,MAAM;AAAA,IACR,CAAC;AAAA,EACH,CAAC;AAED,MAAI,CAAC,SAAS,IAAI;AAChB,UAAM,OAAO,MAAM,SAAS,KAAK,EAAE,MAAM,MAAM,EAAE;AACjD,UAAM,IAAI;AAAA,MACR,6BAA6B,QAAQ,MAAM,SAAS,MAAM,IAAI,SAAS,UAAU,GAAG,OAAO,MAAM,IAAI,KAAK,EAAE;AAAA,IAC9G;AAAA,EACF;AAEA,QAAM,OAAQ,MAAM,SAAS,KAAK,EAAE,MAAM,OAAO,CAAC,EAAE;AACpD,QAAM,YAAY,MAAM,YAAY,OAAO,KAAK,SAAS,IAAI,WAAW,KAAK;AAE7E,SAAO;AAAA,IACL;AAAA,IACA,QAAQ;AAAA,IACR;AAAA,EACF;AACF;AAYA,eAAsB,eACpB,UACA,OACA,UAA2B,CAAC,GAC5B,KACyB;AACzB,QAAM,QACJ,QAAQ,SAAS,OAAO,KAAK,IAAI,CAAC,IAAI,KAAK,OAAO,EAAE,SAAS,EAAE,EAAE,OAAO,GAAG,CAAC,CAAC;AAC/E,QAAM,aAAa,qBAAqB;AACxC,QAAM,oBAAoB,MAAM,iBAAiB,GAAG,IAAI,CAAC;AACzD,QAAM,cAAc;AAAA,IAClB;AAAA,IACA;AAAA,IACA,OAAO,SAAS,CAAC;AAAA,IACjB,SAAS;AAAA,IACT,YAAY,QAAQ;AAAA,IACpB,UAAU,QAAQ,YAAY,CAAC;AAAA,IAC/B,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,EACpC;AACA,QAAM,UAAkC,EAAE,gBAAgB,mBAAmB;AAC7E,QAAM,aAAa,QAAQ,IAAI;AAC/B,MAAI,WAAY,SAAQ,uBAAuB,IAAI;AACnD,QAAM,WAAW,MAAM,MAAM,YAAY;AAAA,IACvC,QAAQ;AAAA,IACR;AAAA,IACA,MAAM,KAAK,UAAU,EAAE,UAAU,MAAM,YAAY,CAAC;AAAA,EACtD,CAAC;AACD,MAAI,CAAC,SAAS,IAAI;AAChB,UAAM,OAAO,MAAM,SAAS,KAAK,EAAE,MAAM,MAAM,EAAE;AACjD,UAAM,IAAI;AAAA,MACR,6BAA6B,QAAQ,MAAM,SAAS,MAAM,IAAI,SAAS,UAAU,GAAG,OAAO,MAAM,IAAI,KAAK,EAAE;AAAA,IAC9G;AAAA,EACF;AACA,QAAM,OAAQ,MAAM,SAAS,KAAK,EAAE,MAAM,OAAO,CAAC,EAAE;AACpD,QAAM,YAAY,MAAM,YAAY,OAAO,KAAK,SAAS,IAAI,WAAW,KAAK;AAC7E,SAAO,EAAE,WAAW,QAAQ,UAAU,MAAM;AAC9C;AAWA,eAAsB,cACpB,SACA,OACA,KACiB;AACjB,SAAO,QAAQ,EAAE,OAAO,KAAK,OAAO,CAAC,EAAE,CAAC;AAC1C;AAMA,eAAsB,cACpB,SACA,cACA,UAA2B,CAAC,GAC5B,MAC8B;AAC9B,QAAM,QACJ,QAAQ,SAAS,OAAO,KAAK,IAAI,CAAC,IAAI,KAAK,OAAO,EAAE,SAAS,EAAE,EAAE,OAAO,GAAG,CAAC,CAAC;AAC/E,QAAM,gBAAgB,iBAAiB,OAAO;AAC9C,QAAM,kBACJ,iBAAiB,QAAQ,OAAO,iBAAiB,WAC5C,eACD,EAAE,OAAO,aAAa;AAC5B,QAAM,UAAkC,EAAE,gBAAgB,mBAAmB;AAC7E,QAAM,aAAa,QAAQ,IAAI;AAC/B,MAAI,WAAY,SAAQ,uBAAuB,IAAI;AACnD,QAAM,WAAW,MAAM,MAAM,eAAe;AAAA,IAC1C,QAAQ;AAAA,IACR;AAAA,IACA,MAAM,KAAK,UAAU;AAAA,MACnB,OAAO;AAAA,MACP,cAAc;AAAA,MACd,UAAU,QAAQ,YAAY,CAAC;AAAA,MAC/B;AAAA,MACA,GAAI,QAAQ,aAAa,EAAE,YAAY,QAAQ,WAAW,IAAI,CAAC;AAAA,IACjE,CAAC;AAAA,EACH,CAAC;AACD,MAAI,CAAC,SAAS,IAAI;AAChB,UAAM,OAAO,MAAM,SAAS,KAAK,EAAE,MAAM,MAAM,EAAE;AACjD,UAAM,IAAI;AAAA,MACR,0BAA0B,OAAO,MAAM,SAAS,MAAM,IAAI,SAAS,UAAU,GAAG,OAAO,MAAM,IAAI,KAAK,EAAE;AAAA,IAC1G;AAAA,EACF;AACA,QAAM,OAAQ,MAAM,SAAS,KAAK,EAAE,MAAM,OAAO,CAAC,EAAE;AACpD,QAAM,YAAY,MAAM,aAAa,MAAM,SAAS,SAAS,KAAK;AAClE,SAAO,EAAE,SAAS,WAAW,QAAQ,UAAU,MAAM;AACvD;","names":[]}
|