@upstash/workflow 0.2.11 → 0.2.12

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/astro.d.mts CHANGED
@@ -1,6 +1,6 @@
1
1
  import { APIContext, APIRoute } from 'astro';
2
- import { e as WorkflowContext, k as PublicServeOptions, t as InvokableWorkflow } from './types-DS9q8FyV.mjs';
3
- import { s as serveManyBase } from './serve-many-Fuovl7gl.mjs';
2
+ import { e as WorkflowContext, k as PublicServeOptions, t as InvokableWorkflow } from './types-D1W0VOpy.mjs';
3
+ import { s as serveManyBase } from './serve-many-DLguU9iR.mjs';
4
4
  import '@upstash/qstash';
5
5
  import 'zod';
6
6
  import 'ai';
package/astro.d.ts CHANGED
@@ -1,6 +1,6 @@
1
1
  import { APIContext, APIRoute } from 'astro';
2
- import { e as WorkflowContext, k as PublicServeOptions, t as InvokableWorkflow } from './types-DS9q8FyV.js';
3
- import { s as serveManyBase } from './serve-many-DNnLsDIp.js';
2
+ import { e as WorkflowContext, k as PublicServeOptions, t as InvokableWorkflow } from './types-D1W0VOpy.js';
3
+ import { s as serveManyBase } from './serve-many-BdMq5rFX.js';
4
4
  import '@upstash/qstash';
5
5
  import 'zod';
6
6
  import 'ai';
package/astro.js CHANGED
@@ -134,7 +134,7 @@ var formatWorkflowError = (error) => {
134
134
  message: error.message
135
135
  } : {
136
136
  error: "Error",
137
- message: "An error occured while executing workflow."
137
+ message: `An error occured while executing workflow: '${typeof error === "string" ? error : JSON.stringify(error)}'`
138
138
  };
139
139
  };
140
140
 
@@ -2034,6 +2034,9 @@ var WorkflowApi = class extends BaseWorkflowApi {
2034
2034
  }
2035
2035
  };
2036
2036
 
2037
+ // src/agents/index.ts
2038
+ var import_openai3 = require("@ai-sdk/openai");
2039
+
2037
2040
  // src/agents/adapters.ts
2038
2041
  var import_openai2 = require("@ai-sdk/openai");
2039
2042
  var import_ai = require("ai");
@@ -2053,46 +2056,49 @@ you need from that agent.
2053
2056
  `;
2054
2057
 
2055
2058
  // src/agents/adapters.ts
2056
- var createWorkflowOpenAI = (context, config) => {
2057
- const { baseURL, apiKey } = config ?? {};
2058
- return (0, import_openai2.createOpenAI)({
2059
- baseURL,
2060
- apiKey,
2061
- compatibility: "strict",
2062
- fetch: async (input, init) => {
2063
- try {
2064
- const headers = init?.headers ? Object.fromEntries(new Headers(init.headers).entries()) : {};
2065
- const body = init?.body ? JSON.parse(init.body) : void 0;
2066
- const agentName = headers[AGENT_NAME_HEADER];
2067
- const stepName = agentName ? `Call Agent ${agentName}` : "Call Agent";
2068
- const responseInfo = await context.call(stepName, {
2069
- url: input.toString(),
2070
- method: init?.method,
2071
- headers,
2072
- body
2073
- });
2074
- const responseHeaders = new Headers(
2075
- Object.entries(responseInfo.header).reduce(
2076
- (acc, [key, values]) => {
2077
- acc[key] = values.join(", ");
2078
- return acc;
2079
- },
2080
- {}
2081
- )
2082
- );
2083
- return new Response(JSON.stringify(responseInfo.body), {
2084
- status: responseInfo.status,
2085
- headers: responseHeaders
2086
- });
2087
- } catch (error) {
2088
- if (error instanceof Error && error.name === "WorkflowAbort") {
2089
- throw error;
2090
- } else {
2091
- console.error("Error in fetch implementation:", error);
2092
- throw error;
2093
- }
2094
- }
2059
+ var fetchWithContextCall = async (context, ...params) => {
2060
+ const [input, init] = params;
2061
+ try {
2062
+ const headers = init?.headers ? Object.fromEntries(new Headers(init.headers).entries()) : {};
2063
+ const body = init?.body ? JSON.parse(init.body) : void 0;
2064
+ const agentName = headers[AGENT_NAME_HEADER];
2065
+ const stepName = agentName ? `Call Agent ${agentName}` : "Call Agent";
2066
+ const responseInfo = await context.call(stepName, {
2067
+ url: input.toString(),
2068
+ method: init?.method,
2069
+ headers,
2070
+ body
2071
+ });
2072
+ const responseHeaders = new Headers(
2073
+ Object.entries(responseInfo.header).reduce(
2074
+ (acc, [key, values]) => {
2075
+ acc[key] = values.join(", ");
2076
+ return acc;
2077
+ },
2078
+ {}
2079
+ )
2080
+ );
2081
+ return new Response(JSON.stringify(responseInfo.body), {
2082
+ status: responseInfo.status,
2083
+ headers: responseHeaders
2084
+ });
2085
+ } catch (error) {
2086
+ if (error instanceof Error && error.name === "WorkflowAbort") {
2087
+ throw error;
2088
+ } else {
2089
+ console.error("Error in fetch implementation:", error);
2090
+ throw error;
2095
2091
  }
2092
+ }
2093
+ };
2094
+ var createWorkflowModel = ({
2095
+ context,
2096
+ provider,
2097
+ providerParams
2098
+ }) => {
2099
+ return provider({
2100
+ fetch: (...params) => fetchWithContextCall(context, ...params),
2101
+ ...providerParams
2096
2102
  });
2097
2103
  };
2098
2104
  var wrapTools = ({
@@ -2332,9 +2338,14 @@ var WorkflowAgents = class {
2332
2338
  openai(...params) {
2333
2339
  const [model, settings] = params;
2334
2340
  const { baseURL, apiKey, ...otherSettings } = settings ?? {};
2335
- const openai2 = createWorkflowOpenAI(this.context, { baseURL, apiKey });
2336
- return openai2(model, otherSettings);
2341
+ const openaiModel = this.AISDKModel({
2342
+ context: this.context,
2343
+ provider: import_openai3.createOpenAI,
2344
+ providerParams: { baseURL, apiKey, compatibility: "strict" }
2345
+ });
2346
+ return openaiModel(model, otherSettings);
2337
2347
  }
2348
+ AISDKModel = createWorkflowModel;
2338
2349
  };
2339
2350
 
2340
2351
  // src/context/context.ts
@@ -3057,6 +3068,7 @@ var processOptions = (options) => {
3057
3068
  retries: DEFAULT_RETRIES,
3058
3069
  useJSONContent: false,
3059
3070
  disableTelemetry: false,
3071
+ onError: console.error,
3060
3072
  ...options
3061
3073
  };
3062
3074
  };
@@ -3106,7 +3118,8 @@ var serveBase = (routeFunction, telemetry2, options) => {
3106
3118
  retries,
3107
3119
  useJSONContent,
3108
3120
  disableTelemetry,
3109
- flowControl
3121
+ flowControl,
3122
+ onError
3110
3123
  } = processOptions(options);
3111
3124
  telemetry2 = disableTelemetry ? void 0 : telemetry2;
3112
3125
  const debug = WorkflowLogger.getLogger(verbose);
@@ -3235,8 +3248,19 @@ var serveBase = (routeFunction, telemetry2, options) => {
3235
3248
  try {
3236
3249
  return await handler(request);
3237
3250
  } catch (error) {
3238
- console.error(error);
3239
- return new Response(JSON.stringify(formatWorkflowError(error)), {
3251
+ const formattedError = formatWorkflowError(error);
3252
+ try {
3253
+ onError?.(error);
3254
+ } catch (onErrorError) {
3255
+ const formattedOnErrorError = formatWorkflowError(onErrorError);
3256
+ const errorMessage = `Error while running onError callback: '${formattedOnErrorError.message}'.
3257
+ Original error: '${formattedError.message}'`;
3258
+ console.error(errorMessage);
3259
+ return new Response(errorMessage, {
3260
+ status: 500
3261
+ });
3262
+ }
3263
+ return new Response(JSON.stringify(formattedError), {
3240
3264
  status: 500
3241
3265
  });
3242
3266
  }
package/astro.mjs CHANGED
@@ -2,7 +2,7 @@ import {
2
2
  SDK_TELEMETRY,
3
3
  serveBase,
4
4
  serveManyBase
5
- } from "./chunk-WQAJ2RSZ.mjs";
5
+ } from "./chunk-4GTHIL7S.mjs";
6
6
 
7
7
  // platforms/astro.ts
8
8
  var telemetry = {
@@ -85,7 +85,7 @@ var formatWorkflowError = (error) => {
85
85
  message: error.message
86
86
  } : {
87
87
  error: "Error",
88
- message: "An error occured while executing workflow."
88
+ message: `An error occured while executing workflow: '${typeof error === "string" ? error : JSON.stringify(error)}'`
89
89
  };
90
90
  };
91
91
 
@@ -1185,46 +1185,49 @@ you need from that agent.
1185
1185
  `;
1186
1186
 
1187
1187
  // src/agents/adapters.ts
1188
- var createWorkflowOpenAI = (context, config) => {
1189
- const { baseURL, apiKey } = config ?? {};
1190
- return createOpenAI({
1191
- baseURL,
1192
- apiKey,
1193
- compatibility: "strict",
1194
- fetch: async (input, init) => {
1195
- try {
1196
- const headers = init?.headers ? Object.fromEntries(new Headers(init.headers).entries()) : {};
1197
- const body = init?.body ? JSON.parse(init.body) : void 0;
1198
- const agentName = headers[AGENT_NAME_HEADER];
1199
- const stepName = agentName ? `Call Agent ${agentName}` : "Call Agent";
1200
- const responseInfo = await context.call(stepName, {
1201
- url: input.toString(),
1202
- method: init?.method,
1203
- headers,
1204
- body
1205
- });
1206
- const responseHeaders = new Headers(
1207
- Object.entries(responseInfo.header).reduce(
1208
- (acc, [key, values]) => {
1209
- acc[key] = values.join(", ");
1210
- return acc;
1211
- },
1212
- {}
1213
- )
1214
- );
1215
- return new Response(JSON.stringify(responseInfo.body), {
1216
- status: responseInfo.status,
1217
- headers: responseHeaders
1218
- });
1219
- } catch (error) {
1220
- if (error instanceof Error && error.name === "WorkflowAbort") {
1221
- throw error;
1222
- } else {
1223
- console.error("Error in fetch implementation:", error);
1224
- throw error;
1225
- }
1226
- }
1188
+ var fetchWithContextCall = async (context, ...params) => {
1189
+ const [input, init] = params;
1190
+ try {
1191
+ const headers = init?.headers ? Object.fromEntries(new Headers(init.headers).entries()) : {};
1192
+ const body = init?.body ? JSON.parse(init.body) : void 0;
1193
+ const agentName = headers[AGENT_NAME_HEADER];
1194
+ const stepName = agentName ? `Call Agent ${agentName}` : "Call Agent";
1195
+ const responseInfo = await context.call(stepName, {
1196
+ url: input.toString(),
1197
+ method: init?.method,
1198
+ headers,
1199
+ body
1200
+ });
1201
+ const responseHeaders = new Headers(
1202
+ Object.entries(responseInfo.header).reduce(
1203
+ (acc, [key, values]) => {
1204
+ acc[key] = values.join(", ");
1205
+ return acc;
1206
+ },
1207
+ {}
1208
+ )
1209
+ );
1210
+ return new Response(JSON.stringify(responseInfo.body), {
1211
+ status: responseInfo.status,
1212
+ headers: responseHeaders
1213
+ });
1214
+ } catch (error) {
1215
+ if (error instanceof Error && error.name === "WorkflowAbort") {
1216
+ throw error;
1217
+ } else {
1218
+ console.error("Error in fetch implementation:", error);
1219
+ throw error;
1227
1220
  }
1221
+ }
1222
+ };
1223
+ var createWorkflowModel = ({
1224
+ context,
1225
+ provider,
1226
+ providerParams
1227
+ }) => {
1228
+ return provider({
1229
+ fetch: (...params) => fetchWithContextCall(context, ...params),
1230
+ ...providerParams
1228
1231
  });
1229
1232
  };
1230
1233
  var wrapTools = ({
@@ -2169,6 +2172,9 @@ var WorkflowApi = class extends BaseWorkflowApi {
2169
2172
  }
2170
2173
  };
2171
2174
 
2175
+ // src/agents/index.ts
2176
+ import { createOpenAI as createOpenAI2 } from "@ai-sdk/openai";
2177
+
2172
2178
  // src/agents/agent.ts
2173
2179
  import { z } from "zod";
2174
2180
  import { generateText, tool as tool2, ToolExecutionError } from "ai";
@@ -2374,9 +2380,14 @@ var WorkflowAgents = class {
2374
2380
  openai(...params) {
2375
2381
  const [model, settings] = params;
2376
2382
  const { baseURL, apiKey, ...otherSettings } = settings ?? {};
2377
- const openai2 = createWorkflowOpenAI(this.context, { baseURL, apiKey });
2378
- return openai2(model, otherSettings);
2383
+ const openaiModel = this.AISDKModel({
2384
+ context: this.context,
2385
+ provider: createOpenAI2,
2386
+ providerParams: { baseURL, apiKey, compatibility: "strict" }
2387
+ });
2388
+ return openaiModel(model, otherSettings);
2379
2389
  }
2390
+ AISDKModel = createWorkflowModel;
2380
2391
  };
2381
2392
 
2382
2393
  // src/context/context.ts
@@ -3099,6 +3110,7 @@ var processOptions = (options) => {
3099
3110
  retries: DEFAULT_RETRIES,
3100
3111
  useJSONContent: false,
3101
3112
  disableTelemetry: false,
3113
+ onError: console.error,
3102
3114
  ...options
3103
3115
  };
3104
3116
  };
@@ -3148,7 +3160,8 @@ var serveBase = (routeFunction, telemetry, options) => {
3148
3160
  retries,
3149
3161
  useJSONContent,
3150
3162
  disableTelemetry,
3151
- flowControl
3163
+ flowControl,
3164
+ onError
3152
3165
  } = processOptions(options);
3153
3166
  telemetry = disableTelemetry ? void 0 : telemetry;
3154
3167
  const debug = WorkflowLogger.getLogger(verbose);
@@ -3277,8 +3290,19 @@ var serveBase = (routeFunction, telemetry, options) => {
3277
3290
  try {
3278
3291
  return await handler(request);
3279
3292
  } catch (error) {
3280
- console.error(error);
3281
- return new Response(JSON.stringify(formatWorkflowError(error)), {
3293
+ const formattedError = formatWorkflowError(error);
3294
+ try {
3295
+ onError?.(error);
3296
+ } catch (onErrorError) {
3297
+ const formattedOnErrorError = formatWorkflowError(onErrorError);
3298
+ const errorMessage = `Error while running onError callback: '${formattedOnErrorError.message}'.
3299
+ Original error: '${formattedError.message}'`;
3300
+ console.error(errorMessage);
3301
+ return new Response(errorMessage, {
3302
+ status: 500
3303
+ });
3304
+ }
3305
+ return new Response(JSON.stringify(formattedError), {
3282
3306
  status: 500
3283
3307
  });
3284
3308
  }
package/cloudflare.d.mts CHANGED
@@ -1,5 +1,5 @@
1
- import { R as RouteFunction, k as PublicServeOptions, t as InvokableWorkflow } from './types-DS9q8FyV.mjs';
2
- import { s as serveManyBase } from './serve-many-Fuovl7gl.mjs';
1
+ import { R as RouteFunction, k as PublicServeOptions, t as InvokableWorkflow } from './types-D1W0VOpy.mjs';
2
+ import { s as serveManyBase } from './serve-many-DLguU9iR.mjs';
3
3
  import '@upstash/qstash';
4
4
  import 'zod';
5
5
  import 'ai';
package/cloudflare.d.ts CHANGED
@@ -1,5 +1,5 @@
1
- import { R as RouteFunction, k as PublicServeOptions, t as InvokableWorkflow } from './types-DS9q8FyV.js';
2
- import { s as serveManyBase } from './serve-many-DNnLsDIp.js';
1
+ import { R as RouteFunction, k as PublicServeOptions, t as InvokableWorkflow } from './types-D1W0VOpy.js';
2
+ import { s as serveManyBase } from './serve-many-BdMq5rFX.js';
3
3
  import '@upstash/qstash';
4
4
  import 'zod';
5
5
  import 'ai';
package/cloudflare.js CHANGED
@@ -134,7 +134,7 @@ var formatWorkflowError = (error) => {
134
134
  message: error.message
135
135
  } : {
136
136
  error: "Error",
137
- message: "An error occured while executing workflow."
137
+ message: `An error occured while executing workflow: '${typeof error === "string" ? error : JSON.stringify(error)}'`
138
138
  };
139
139
  };
140
140
 
@@ -2034,6 +2034,9 @@ var WorkflowApi = class extends BaseWorkflowApi {
2034
2034
  }
2035
2035
  };
2036
2036
 
2037
+ // src/agents/index.ts
2038
+ var import_openai3 = require("@ai-sdk/openai");
2039
+
2037
2040
  // src/agents/adapters.ts
2038
2041
  var import_openai2 = require("@ai-sdk/openai");
2039
2042
  var import_ai = require("ai");
@@ -2053,46 +2056,49 @@ you need from that agent.
2053
2056
  `;
2054
2057
 
2055
2058
  // src/agents/adapters.ts
2056
- var createWorkflowOpenAI = (context, config) => {
2057
- const { baseURL, apiKey } = config ?? {};
2058
- return (0, import_openai2.createOpenAI)({
2059
- baseURL,
2060
- apiKey,
2061
- compatibility: "strict",
2062
- fetch: async (input, init) => {
2063
- try {
2064
- const headers = init?.headers ? Object.fromEntries(new Headers(init.headers).entries()) : {};
2065
- const body = init?.body ? JSON.parse(init.body) : void 0;
2066
- const agentName = headers[AGENT_NAME_HEADER];
2067
- const stepName = agentName ? `Call Agent ${agentName}` : "Call Agent";
2068
- const responseInfo = await context.call(stepName, {
2069
- url: input.toString(),
2070
- method: init?.method,
2071
- headers,
2072
- body
2073
- });
2074
- const responseHeaders = new Headers(
2075
- Object.entries(responseInfo.header).reduce(
2076
- (acc, [key, values]) => {
2077
- acc[key] = values.join(", ");
2078
- return acc;
2079
- },
2080
- {}
2081
- )
2082
- );
2083
- return new Response(JSON.stringify(responseInfo.body), {
2084
- status: responseInfo.status,
2085
- headers: responseHeaders
2086
- });
2087
- } catch (error) {
2088
- if (error instanceof Error && error.name === "WorkflowAbort") {
2089
- throw error;
2090
- } else {
2091
- console.error("Error in fetch implementation:", error);
2092
- throw error;
2093
- }
2094
- }
2059
+ var fetchWithContextCall = async (context, ...params) => {
2060
+ const [input, init] = params;
2061
+ try {
2062
+ const headers = init?.headers ? Object.fromEntries(new Headers(init.headers).entries()) : {};
2063
+ const body = init?.body ? JSON.parse(init.body) : void 0;
2064
+ const agentName = headers[AGENT_NAME_HEADER];
2065
+ const stepName = agentName ? `Call Agent ${agentName}` : "Call Agent";
2066
+ const responseInfo = await context.call(stepName, {
2067
+ url: input.toString(),
2068
+ method: init?.method,
2069
+ headers,
2070
+ body
2071
+ });
2072
+ const responseHeaders = new Headers(
2073
+ Object.entries(responseInfo.header).reduce(
2074
+ (acc, [key, values]) => {
2075
+ acc[key] = values.join(", ");
2076
+ return acc;
2077
+ },
2078
+ {}
2079
+ )
2080
+ );
2081
+ return new Response(JSON.stringify(responseInfo.body), {
2082
+ status: responseInfo.status,
2083
+ headers: responseHeaders
2084
+ });
2085
+ } catch (error) {
2086
+ if (error instanceof Error && error.name === "WorkflowAbort") {
2087
+ throw error;
2088
+ } else {
2089
+ console.error("Error in fetch implementation:", error);
2090
+ throw error;
2095
2091
  }
2092
+ }
2093
+ };
2094
+ var createWorkflowModel = ({
2095
+ context,
2096
+ provider,
2097
+ providerParams
2098
+ }) => {
2099
+ return provider({
2100
+ fetch: (...params) => fetchWithContextCall(context, ...params),
2101
+ ...providerParams
2096
2102
  });
2097
2103
  };
2098
2104
  var wrapTools = ({
@@ -2332,9 +2338,14 @@ var WorkflowAgents = class {
2332
2338
  openai(...params) {
2333
2339
  const [model, settings] = params;
2334
2340
  const { baseURL, apiKey, ...otherSettings } = settings ?? {};
2335
- const openai2 = createWorkflowOpenAI(this.context, { baseURL, apiKey });
2336
- return openai2(model, otherSettings);
2341
+ const openaiModel = this.AISDKModel({
2342
+ context: this.context,
2343
+ provider: import_openai3.createOpenAI,
2344
+ providerParams: { baseURL, apiKey, compatibility: "strict" }
2345
+ });
2346
+ return openaiModel(model, otherSettings);
2337
2347
  }
2348
+ AISDKModel = createWorkflowModel;
2338
2349
  };
2339
2350
 
2340
2351
  // src/context/context.ts
@@ -3057,6 +3068,7 @@ var processOptions = (options) => {
3057
3068
  retries: DEFAULT_RETRIES,
3058
3069
  useJSONContent: false,
3059
3070
  disableTelemetry: false,
3071
+ onError: console.error,
3060
3072
  ...options
3061
3073
  };
3062
3074
  };
@@ -3106,7 +3118,8 @@ var serveBase = (routeFunction, telemetry2, options) => {
3106
3118
  retries,
3107
3119
  useJSONContent,
3108
3120
  disableTelemetry,
3109
- flowControl
3121
+ flowControl,
3122
+ onError
3110
3123
  } = processOptions(options);
3111
3124
  telemetry2 = disableTelemetry ? void 0 : telemetry2;
3112
3125
  const debug = WorkflowLogger.getLogger(verbose);
@@ -3235,8 +3248,19 @@ var serveBase = (routeFunction, telemetry2, options) => {
3235
3248
  try {
3236
3249
  return await handler(request);
3237
3250
  } catch (error) {
3238
- console.error(error);
3239
- return new Response(JSON.stringify(formatWorkflowError(error)), {
3251
+ const formattedError = formatWorkflowError(error);
3252
+ try {
3253
+ onError?.(error);
3254
+ } catch (onErrorError) {
3255
+ const formattedOnErrorError = formatWorkflowError(onErrorError);
3256
+ const errorMessage = `Error while running onError callback: '${formattedOnErrorError.message}'.
3257
+ Original error: '${formattedError.message}'`;
3258
+ console.error(errorMessage);
3259
+ return new Response(errorMessage, {
3260
+ status: 500
3261
+ });
3262
+ }
3263
+ return new Response(JSON.stringify(formattedError), {
3240
3264
  status: 500
3241
3265
  });
3242
3266
  }
package/cloudflare.mjs CHANGED
@@ -2,7 +2,7 @@ import {
2
2
  SDK_TELEMETRY,
3
3
  serveBase,
4
4
  serveManyBase
5
- } from "./chunk-WQAJ2RSZ.mjs";
5
+ } from "./chunk-4GTHIL7S.mjs";
6
6
 
7
7
  // platforms/cloudflare.ts
8
8
  var getArgs = (args) => {
package/express.d.mts CHANGED
@@ -1,7 +1,7 @@
1
1
  import * as express_serve_static_core from 'express-serve-static-core';
2
- import { R as RouteFunction, W as WorkflowServeOptions, t as InvokableWorkflow } from './types-DS9q8FyV.mjs';
2
+ import { R as RouteFunction, W as WorkflowServeOptions, t as InvokableWorkflow } from './types-D1W0VOpy.mjs';
3
3
  import { Router } from 'express';
4
- import { s as serveManyBase } from './serve-many-Fuovl7gl.mjs';
4
+ import { s as serveManyBase } from './serve-many-DLguU9iR.mjs';
5
5
  import '@upstash/qstash';
6
6
  import 'zod';
7
7
  import 'ai';
package/express.d.ts CHANGED
@@ -1,7 +1,7 @@
1
1
  import * as express_serve_static_core from 'express-serve-static-core';
2
- import { R as RouteFunction, W as WorkflowServeOptions, t as InvokableWorkflow } from './types-DS9q8FyV.js';
2
+ import { R as RouteFunction, W as WorkflowServeOptions, t as InvokableWorkflow } from './types-D1W0VOpy.js';
3
3
  import { Router } from 'express';
4
- import { s as serveManyBase } from './serve-many-DNnLsDIp.js';
4
+ import { s as serveManyBase } from './serve-many-BdMq5rFX.js';
5
5
  import '@upstash/qstash';
6
6
  import 'zod';
7
7
  import 'ai';