@upstash/workflow 0.2.5 → 0.2.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/astro.d.mts CHANGED
@@ -1,6 +1,7 @@
1
1
  import { APIContext, APIRoute } from 'astro';
2
- import { b as WorkflowContext, j as PublicServeOptions } from './types-CalpUeFX.mjs';
2
+ import { b as WorkflowContext, j as PublicServeOptions } from './types-Cuqlx2Cr.mjs';
3
3
  import '@upstash/qstash';
4
+ import 'zod';
4
5
  import 'ai';
5
6
  import '@ai-sdk/openai';
6
7
 
package/astro.d.ts CHANGED
@@ -1,6 +1,7 @@
1
1
  import { APIContext, APIRoute } from 'astro';
2
- import { b as WorkflowContext, j as PublicServeOptions } from './types-CalpUeFX.js';
2
+ import { b as WorkflowContext, j as PublicServeOptions } from './types-Cuqlx2Cr.js';
3
3
  import '@upstash/qstash';
4
+ import 'zod';
4
5
  import 'ai';
5
6
  import '@ai-sdk/openai';
6
7
 
package/astro.js CHANGED
@@ -1574,11 +1574,13 @@ var AnthropicAPI = class extends BaseWorkflowApi {
1574
1574
  var import_qstash6 = require("@upstash/qstash");
1575
1575
  var OpenAIAPI = class extends BaseWorkflowApi {
1576
1576
  async call(stepName, settings) {
1577
- const { token, organization, operation, ...parameters } = settings;
1577
+ const { token, organization, operation, baseURL, ...parameters } = settings;
1578
+ const useOpenAI = baseURL === void 0;
1579
+ const provider = useOpenAI ? (0, import_qstash6.openai)({ token, organization }) : (0, import_qstash6.custom)({ baseUrl: baseURL, token });
1578
1580
  return await this.callApi(stepName, {
1579
1581
  api: {
1580
1582
  name: "llm",
1581
- provider: (0, import_qstash6.openai)({ token, organization })
1583
+ provider
1582
1584
  },
1583
1585
  ...parameters
1584
1586
  });
@@ -1638,8 +1640,11 @@ you need from that agent.
1638
1640
  `;
1639
1641
 
1640
1642
  // src/agents/adapters.ts
1641
- var createWorkflowOpenAI = (context) => {
1643
+ var createWorkflowOpenAI = (context, config) => {
1644
+ const { baseURL, apiKey } = config ?? {};
1642
1645
  return (0, import_openai2.createOpenAI)({
1646
+ baseURL,
1647
+ apiKey,
1643
1648
  compatibility: "strict",
1644
1649
  fetch: async (input, init) => {
1645
1650
  try {
@@ -1911,8 +1916,10 @@ var WorkflowAgents = class {
1911
1916
  * creates an openai model for agents
1912
1917
  */
1913
1918
  openai(...params) {
1914
- const openai2 = createWorkflowOpenAI(this.context);
1915
- return openai2(...params);
1919
+ const [model, settings] = params;
1920
+ const { baseURL, apiKey, ...otherSettings } = settings ?? {};
1921
+ const openai2 = createWorkflowOpenAI(this.context, { baseURL, apiKey });
1922
+ return openai2(model, otherSettings);
1916
1923
  }
1917
1924
  };
1918
1925
 
@@ -2667,7 +2674,8 @@ var processOptions = (options) => {
2667
2674
  return void 0;
2668
2675
  }
2669
2676
  try {
2670
- return JSON.parse(initialRequest);
2677
+ const parsed = JSON.parse(initialRequest);
2678
+ return options?.schema ? options.schema.parse(parsed) : parsed;
2671
2679
  } catch (error) {
2672
2680
  if (error instanceof SyntaxError) {
2673
2681
  return initialRequest;
package/astro.mjs CHANGED
@@ -1,7 +1,7 @@
1
1
  import {
2
2
  SDK_TELEMETRY,
3
3
  serveBase
4
- } from "./chunk-42MM2EPQ.mjs";
4
+ } from "./chunk-OLNSY3BB.mjs";
5
5
 
6
6
  // platforms/astro.ts
7
7
  function serve(routeFunction, options) {
@@ -1584,14 +1584,16 @@ var AnthropicAPI = class extends BaseWorkflowApi {
1584
1584
  };
1585
1585
 
1586
1586
  // src/context/api/openai.ts
1587
- import { openai } from "@upstash/qstash";
1587
+ import { custom, openai } from "@upstash/qstash";
1588
1588
  var OpenAIAPI = class extends BaseWorkflowApi {
1589
1589
  async call(stepName, settings) {
1590
- const { token, organization, operation, ...parameters } = settings;
1590
+ const { token, organization, operation, baseURL, ...parameters } = settings;
1591
+ const useOpenAI = baseURL === void 0;
1592
+ const provider = useOpenAI ? openai({ token, organization }) : custom({ baseUrl: baseURL, token });
1591
1593
  return await this.callApi(stepName, {
1592
1594
  api: {
1593
1595
  name: "llm",
1594
- provider: openai({ token, organization })
1596
+ provider
1595
1597
  },
1596
1598
  ...parameters
1597
1599
  });
@@ -1651,8 +1653,11 @@ you need from that agent.
1651
1653
  `;
1652
1654
 
1653
1655
  // src/agents/adapters.ts
1654
- var createWorkflowOpenAI = (context) => {
1656
+ var createWorkflowOpenAI = (context, config) => {
1657
+ const { baseURL, apiKey } = config ?? {};
1655
1658
  return createOpenAI({
1659
+ baseURL,
1660
+ apiKey,
1656
1661
  compatibility: "strict",
1657
1662
  fetch: async (input, init) => {
1658
1663
  try {
@@ -1924,8 +1929,10 @@ var WorkflowAgents = class {
1924
1929
  * creates an openai model for agents
1925
1930
  */
1926
1931
  openai(...params) {
1927
- const openai2 = createWorkflowOpenAI(this.context);
1928
- return openai2(...params);
1932
+ const [model, settings] = params;
1933
+ const { baseURL, apiKey, ...otherSettings } = settings ?? {};
1934
+ const openai2 = createWorkflowOpenAI(this.context, { baseURL, apiKey });
1935
+ return openai2(model, otherSettings);
1929
1936
  }
1930
1937
  };
1931
1938
 
@@ -2680,7 +2687,8 @@ var processOptions = (options) => {
2680
2687
  return void 0;
2681
2688
  }
2682
2689
  try {
2683
- return JSON.parse(initialRequest);
2690
+ const parsed = JSON.parse(initialRequest);
2691
+ return options?.schema ? options.schema.parse(parsed) : parsed;
2684
2692
  } catch (error) {
2685
2693
  if (error instanceof SyntaxError) {
2686
2694
  return initialRequest;
package/cloudflare.d.mts CHANGED
@@ -1,5 +1,6 @@
1
- import { R as RouteFunction, j as PublicServeOptions } from './types-CalpUeFX.mjs';
1
+ import { R as RouteFunction, j as PublicServeOptions } from './types-Cuqlx2Cr.mjs';
2
2
  import '@upstash/qstash';
3
+ import 'zod';
3
4
  import 'ai';
4
5
  import '@ai-sdk/openai';
5
6
 
package/cloudflare.d.ts CHANGED
@@ -1,5 +1,6 @@
1
- import { R as RouteFunction, j as PublicServeOptions } from './types-CalpUeFX.js';
1
+ import { R as RouteFunction, j as PublicServeOptions } from './types-Cuqlx2Cr.js';
2
2
  import '@upstash/qstash';
3
+ import 'zod';
3
4
  import 'ai';
4
5
  import '@ai-sdk/openai';
5
6
 
package/cloudflare.js CHANGED
@@ -1574,11 +1574,13 @@ var AnthropicAPI = class extends BaseWorkflowApi {
1574
1574
  var import_qstash6 = require("@upstash/qstash");
1575
1575
  var OpenAIAPI = class extends BaseWorkflowApi {
1576
1576
  async call(stepName, settings) {
1577
- const { token, organization, operation, ...parameters } = settings;
1577
+ const { token, organization, operation, baseURL, ...parameters } = settings;
1578
+ const useOpenAI = baseURL === void 0;
1579
+ const provider = useOpenAI ? (0, import_qstash6.openai)({ token, organization }) : (0, import_qstash6.custom)({ baseUrl: baseURL, token });
1578
1580
  return await this.callApi(stepName, {
1579
1581
  api: {
1580
1582
  name: "llm",
1581
- provider: (0, import_qstash6.openai)({ token, organization })
1583
+ provider
1582
1584
  },
1583
1585
  ...parameters
1584
1586
  });
@@ -1638,8 +1640,11 @@ you need from that agent.
1638
1640
  `;
1639
1641
 
1640
1642
  // src/agents/adapters.ts
1641
- var createWorkflowOpenAI = (context) => {
1643
+ var createWorkflowOpenAI = (context, config) => {
1644
+ const { baseURL, apiKey } = config ?? {};
1642
1645
  return (0, import_openai2.createOpenAI)({
1646
+ baseURL,
1647
+ apiKey,
1643
1648
  compatibility: "strict",
1644
1649
  fetch: async (input, init) => {
1645
1650
  try {
@@ -1911,8 +1916,10 @@ var WorkflowAgents = class {
1911
1916
  * creates an openai model for agents
1912
1917
  */
1913
1918
  openai(...params) {
1914
- const openai2 = createWorkflowOpenAI(this.context);
1915
- return openai2(...params);
1919
+ const [model, settings] = params;
1920
+ const { baseURL, apiKey, ...otherSettings } = settings ?? {};
1921
+ const openai2 = createWorkflowOpenAI(this.context, { baseURL, apiKey });
1922
+ return openai2(model, otherSettings);
1916
1923
  }
1917
1924
  };
1918
1925
 
@@ -2667,7 +2674,8 @@ var processOptions = (options) => {
2667
2674
  return void 0;
2668
2675
  }
2669
2676
  try {
2670
- return JSON.parse(initialRequest);
2677
+ const parsed = JSON.parse(initialRequest);
2678
+ return options?.schema ? options.schema.parse(parsed) : parsed;
2671
2679
  } catch (error) {
2672
2680
  if (error instanceof SyntaxError) {
2673
2681
  return initialRequest;
package/cloudflare.mjs CHANGED
@@ -1,7 +1,7 @@
1
1
  import {
2
2
  SDK_TELEMETRY,
3
3
  serveBase
4
- } from "./chunk-42MM2EPQ.mjs";
4
+ } from "./chunk-OLNSY3BB.mjs";
5
5
 
6
6
  // platforms/cloudflare.ts
7
7
  var getArgs = (args) => {
package/express.d.mts CHANGED
@@ -1,6 +1,7 @@
1
- import { R as RouteFunction, W as WorkflowServeOptions } from './types-CalpUeFX.mjs';
1
+ import { R as RouteFunction, W as WorkflowServeOptions } from './types-Cuqlx2Cr.mjs';
2
2
  import { Router } from 'express';
3
3
  import '@upstash/qstash';
4
+ import 'zod';
4
5
  import 'ai';
5
6
  import '@ai-sdk/openai';
6
7
 
package/express.d.ts CHANGED
@@ -1,6 +1,7 @@
1
- import { R as RouteFunction, W as WorkflowServeOptions } from './types-CalpUeFX.js';
1
+ import { R as RouteFunction, W as WorkflowServeOptions } from './types-Cuqlx2Cr.js';
2
2
  import { Router } from 'express';
3
3
  import '@upstash/qstash';
4
+ import 'zod';
4
5
  import 'ai';
5
6
  import '@ai-sdk/openai';
6
7
 
package/express.js CHANGED
@@ -25246,11 +25246,13 @@ var AnthropicAPI = class extends BaseWorkflowApi {
25246
25246
  var import_qstash6 = require("@upstash/qstash");
25247
25247
  var OpenAIAPI = class extends BaseWorkflowApi {
25248
25248
  async call(stepName, settings) {
25249
- const { token, organization, operation, ...parameters } = settings;
25249
+ const { token, organization, operation, baseURL, ...parameters } = settings;
25250
+ const useOpenAI = baseURL === void 0;
25251
+ const provider = useOpenAI ? (0, import_qstash6.openai)({ token, organization }) : (0, import_qstash6.custom)({ baseUrl: baseURL, token });
25250
25252
  return await this.callApi(stepName, {
25251
25253
  api: {
25252
25254
  name: "llm",
25253
- provider: (0, import_qstash6.openai)({ token, organization })
25255
+ provider
25254
25256
  },
25255
25257
  ...parameters
25256
25258
  });
@@ -25310,8 +25312,11 @@ you need from that agent.
25310
25312
  `;
25311
25313
 
25312
25314
  // src/agents/adapters.ts
25313
- var createWorkflowOpenAI = (context) => {
25315
+ var createWorkflowOpenAI = (context, config) => {
25316
+ const { baseURL, apiKey } = config ?? {};
25314
25317
  return (0, import_openai2.createOpenAI)({
25318
+ baseURL,
25319
+ apiKey,
25315
25320
  compatibility: "strict",
25316
25321
  fetch: async (input, init) => {
25317
25322
  try {
@@ -25583,8 +25588,10 @@ var WorkflowAgents = class {
25583
25588
  * creates an openai model for agents
25584
25589
  */
25585
25590
  openai(...params) {
25586
- const openai2 = createWorkflowOpenAI(this.context);
25587
- return openai2(...params);
25591
+ const [model, settings] = params;
25592
+ const { baseURL, apiKey, ...otherSettings } = settings ?? {};
25593
+ const openai2 = createWorkflowOpenAI(this.context, { baseURL, apiKey });
25594
+ return openai2(model, otherSettings);
25588
25595
  }
25589
25596
  };
25590
25597
 
@@ -26339,7 +26346,8 @@ var processOptions = (options) => {
26339
26346
  return void 0;
26340
26347
  }
26341
26348
  try {
26342
- return JSON.parse(initialRequest);
26349
+ const parsed = JSON.parse(initialRequest);
26350
+ return options?.schema ? options.schema.parse(parsed) : parsed;
26343
26351
  } catch (error) {
26344
26352
  if (error instanceof SyntaxError) {
26345
26353
  return initialRequest;
package/express.mjs CHANGED
@@ -4,7 +4,7 @@ import {
4
4
  __require,
5
5
  __toESM,
6
6
  serveBase
7
- } from "./chunk-42MM2EPQ.mjs";
7
+ } from "./chunk-OLNSY3BB.mjs";
8
8
 
9
9
  // node_modules/depd/index.js
10
10
  var require_depd = __commonJS({
package/h3.d.mts CHANGED
@@ -1,6 +1,7 @@
1
1
  import * as h3 from 'h3';
2
- import { R as RouteFunction, j as PublicServeOptions } from './types-CalpUeFX.mjs';
2
+ import { R as RouteFunction, j as PublicServeOptions } from './types-Cuqlx2Cr.mjs';
3
3
  import '@upstash/qstash';
4
+ import 'zod';
4
5
  import 'ai';
5
6
  import '@ai-sdk/openai';
6
7
 
package/h3.d.ts CHANGED
@@ -1,6 +1,7 @@
1
1
  import * as h3 from 'h3';
2
- import { R as RouteFunction, j as PublicServeOptions } from './types-CalpUeFX.js';
2
+ import { R as RouteFunction, j as PublicServeOptions } from './types-Cuqlx2Cr.js';
3
3
  import '@upstash/qstash';
4
+ import 'zod';
4
5
  import 'ai';
5
6
  import '@ai-sdk/openai';
6
7
 
package/h3.js CHANGED
@@ -1886,11 +1886,13 @@ var AnthropicAPI = class extends BaseWorkflowApi {
1886
1886
  var import_qstash6 = require("@upstash/qstash");
1887
1887
  var OpenAIAPI = class extends BaseWorkflowApi {
1888
1888
  async call(stepName, settings) {
1889
- const { token, organization, operation, ...parameters } = settings;
1889
+ const { token, organization, operation, baseURL, ...parameters } = settings;
1890
+ const useOpenAI = baseURL === void 0;
1891
+ const provider = useOpenAI ? (0, import_qstash6.openai)({ token, organization }) : (0, import_qstash6.custom)({ baseUrl: baseURL, token });
1890
1892
  return await this.callApi(stepName, {
1891
1893
  api: {
1892
1894
  name: "llm",
1893
- provider: (0, import_qstash6.openai)({ token, organization })
1895
+ provider
1894
1896
  },
1895
1897
  ...parameters
1896
1898
  });
@@ -1950,8 +1952,11 @@ you need from that agent.
1950
1952
  `;
1951
1953
 
1952
1954
  // src/agents/adapters.ts
1953
- var createWorkflowOpenAI = (context) => {
1955
+ var createWorkflowOpenAI = (context, config) => {
1956
+ const { baseURL, apiKey } = config ?? {};
1954
1957
  return (0, import_openai2.createOpenAI)({
1958
+ baseURL,
1959
+ apiKey,
1955
1960
  compatibility: "strict",
1956
1961
  fetch: async (input, init) => {
1957
1962
  try {
@@ -2223,8 +2228,10 @@ var WorkflowAgents = class {
2223
2228
  * creates an openai model for agents
2224
2229
  */
2225
2230
  openai(...params) {
2226
- const openai2 = createWorkflowOpenAI(this.context);
2227
- return openai2(...params);
2231
+ const [model, settings] = params;
2232
+ const { baseURL, apiKey, ...otherSettings } = settings ?? {};
2233
+ const openai2 = createWorkflowOpenAI(this.context, { baseURL, apiKey });
2234
+ return openai2(model, otherSettings);
2228
2235
  }
2229
2236
  };
2230
2237
 
@@ -2979,7 +2986,8 @@ var processOptions = (options) => {
2979
2986
  return void 0;
2980
2987
  }
2981
2988
  try {
2982
- return JSON.parse(initialRequest);
2989
+ const parsed = JSON.parse(initialRequest);
2990
+ return options?.schema ? options.schema.parse(parsed) : parsed;
2983
2991
  } catch (error) {
2984
2992
  if (error instanceof SyntaxError) {
2985
2993
  return initialRequest;
package/h3.mjs CHANGED
@@ -1,7 +1,7 @@
1
1
  import {
2
2
  SDK_TELEMETRY,
3
3
  serveBase
4
- } from "./chunk-42MM2EPQ.mjs";
4
+ } from "./chunk-OLNSY3BB.mjs";
5
5
 
6
6
  // node_modules/defu/dist/defu.mjs
7
7
  function isPlainObject(value) {
package/hono.d.mts CHANGED
@@ -1,7 +1,8 @@
1
1
  import { Context } from 'hono';
2
- import { R as RouteFunction, j as PublicServeOptions } from './types-CalpUeFX.mjs';
2
+ import { R as RouteFunction, j as PublicServeOptions } from './types-Cuqlx2Cr.mjs';
3
3
  import { Variables } from 'hono/types';
4
4
  import '@upstash/qstash';
5
+ import 'zod';
5
6
  import 'ai';
6
7
  import '@ai-sdk/openai';
7
8
 
package/hono.d.ts CHANGED
@@ -1,7 +1,8 @@
1
1
  import { Context } from 'hono';
2
- import { R as RouteFunction, j as PublicServeOptions } from './types-CalpUeFX.js';
2
+ import { R as RouteFunction, j as PublicServeOptions } from './types-Cuqlx2Cr.js';
3
3
  import { Variables } from 'hono/types';
4
4
  import '@upstash/qstash';
5
+ import 'zod';
5
6
  import 'ai';
6
7
  import '@ai-sdk/openai';
7
8
 
package/hono.js CHANGED
@@ -1574,11 +1574,13 @@ var AnthropicAPI = class extends BaseWorkflowApi {
1574
1574
  var import_qstash6 = require("@upstash/qstash");
1575
1575
  var OpenAIAPI = class extends BaseWorkflowApi {
1576
1576
  async call(stepName, settings) {
1577
- const { token, organization, operation, ...parameters } = settings;
1577
+ const { token, organization, operation, baseURL, ...parameters } = settings;
1578
+ const useOpenAI = baseURL === void 0;
1579
+ const provider = useOpenAI ? (0, import_qstash6.openai)({ token, organization }) : (0, import_qstash6.custom)({ baseUrl: baseURL, token });
1578
1580
  return await this.callApi(stepName, {
1579
1581
  api: {
1580
1582
  name: "llm",
1581
- provider: (0, import_qstash6.openai)({ token, organization })
1583
+ provider
1582
1584
  },
1583
1585
  ...parameters
1584
1586
  });
@@ -1638,8 +1640,11 @@ you need from that agent.
1638
1640
  `;
1639
1641
 
1640
1642
  // src/agents/adapters.ts
1641
- var createWorkflowOpenAI = (context) => {
1643
+ var createWorkflowOpenAI = (context, config) => {
1644
+ const { baseURL, apiKey } = config ?? {};
1642
1645
  return (0, import_openai2.createOpenAI)({
1646
+ baseURL,
1647
+ apiKey,
1643
1648
  compatibility: "strict",
1644
1649
  fetch: async (input, init) => {
1645
1650
  try {
@@ -1911,8 +1916,10 @@ var WorkflowAgents = class {
1911
1916
  * creates an openai model for agents
1912
1917
  */
1913
1918
  openai(...params) {
1914
- const openai2 = createWorkflowOpenAI(this.context);
1915
- return openai2(...params);
1919
+ const [model, settings] = params;
1920
+ const { baseURL, apiKey, ...otherSettings } = settings ?? {};
1921
+ const openai2 = createWorkflowOpenAI(this.context, { baseURL, apiKey });
1922
+ return openai2(model, otherSettings);
1916
1923
  }
1917
1924
  };
1918
1925
 
@@ -2667,7 +2674,8 @@ var processOptions = (options) => {
2667
2674
  return void 0;
2668
2675
  }
2669
2676
  try {
2670
- return JSON.parse(initialRequest);
2677
+ const parsed = JSON.parse(initialRequest);
2678
+ return options?.schema ? options.schema.parse(parsed) : parsed;
2671
2679
  } catch (error) {
2672
2680
  if (error instanceof SyntaxError) {
2673
2681
  return initialRequest;
package/hono.mjs CHANGED
@@ -1,7 +1,7 @@
1
1
  import {
2
2
  SDK_TELEMETRY,
3
3
  serveBase
4
- } from "./chunk-42MM2EPQ.mjs";
4
+ } from "./chunk-OLNSY3BB.mjs";
5
5
 
6
6
  // platforms/hono.ts
7
7
  var serve = (routeFunction, options) => {
package/index.d.mts CHANGED
@@ -1,6 +1,7 @@
1
- import { R as RouteFunction, W as WorkflowServeOptions, N as NotifyResponse, a as Waiter, S as Step } from './types-CalpUeFX.mjs';
2
- export { A as AsyncStepFunction, C as CallResponse, q as CallSettings, D as Duration, k as FailureFunctionPayload, F as FinishCondition, H as HeaderParams, L as LogLevel, o as NotifyStepResponse, P as ParallelCallState, j as PublicServeOptions, g as RawStep, l as RequiredExceptFields, i as StepFunction, f as StepType, e as StepTypes, h as SyncStepFunction, T as Telemetry, p as WaitEventOptions, m as WaitRequest, n as WaitStepResponse, c as WorkflowClient, b as WorkflowContext, s as WorkflowLogger, r as WorkflowLoggerOptions, d as WorkflowReceiver } from './types-CalpUeFX.mjs';
1
+ import { R as RouteFunction, W as WorkflowServeOptions, E as ExclusiveValidationOptions, N as NotifyResponse, a as Waiter, S as Step } from './types-Cuqlx2Cr.mjs';
2
+ export { A as AsyncStepFunction, C as CallResponse, q as CallSettings, D as Duration, k as FailureFunctionPayload, F as FinishCondition, H as HeaderParams, L as LogLevel, o as NotifyStepResponse, P as ParallelCallState, j as PublicServeOptions, g as RawStep, l as RequiredExceptFields, i as StepFunction, f as StepType, e as StepTypes, h as SyncStepFunction, T as Telemetry, p as WaitEventOptions, m as WaitRequest, n as WaitStepResponse, c as WorkflowClient, b as WorkflowContext, s as WorkflowLogger, r as WorkflowLoggerOptions, d as WorkflowReceiver } from './types-Cuqlx2Cr.mjs';
3
3
  import { Client as Client$1, QstashError } from '@upstash/qstash';
4
+ import 'zod';
4
5
  import 'ai';
5
6
  import '@ai-sdk/openai';
6
7
 
@@ -12,7 +13,7 @@ import '@ai-sdk/openai';
12
13
  * @param options - Options including the client, onFinish callback, and initialPayloadParser.
13
14
  * @returns An async method that consumes incoming requests and runs the workflow.
14
15
  */
15
- declare const serve: <TInitialPayload = unknown, TRequest extends Request = Request, TResponse extends Response = Response>(routeFunction: RouteFunction<TInitialPayload>, options?: Omit<WorkflowServeOptions<TResponse, TInitialPayload>, "useJSONContent">) => {
16
+ declare const serve: <TInitialPayload = unknown, TRequest extends Request = Request, TResponse extends Response = Response>(routeFunction: RouteFunction<TInitialPayload>, options?: Omit<WorkflowServeOptions<TResponse, TInitialPayload>, "useJSONContent" | "schema" | "initialPayloadParser"> & ExclusiveValidationOptions<TInitialPayload>) => {
16
17
  handler: (request: TRequest) => Promise<TResponse>;
17
18
  };
18
19
 
@@ -191,4 +192,4 @@ declare class WorkflowAbort extends Error {
191
192
  constructor(stepName: string, stepInfo?: Step, cancelWorkflow?: boolean);
192
193
  }
193
194
 
194
- export { Client, NotifyResponse, RouteFunction, Step, Waiter, WorkflowAbort, WorkflowError, WorkflowServeOptions, serve };
195
+ export { Client, ExclusiveValidationOptions, NotifyResponse, RouteFunction, Step, Waiter, WorkflowAbort, WorkflowError, WorkflowServeOptions, serve };
package/index.d.ts CHANGED
@@ -1,6 +1,7 @@
1
- import { R as RouteFunction, W as WorkflowServeOptions, N as NotifyResponse, a as Waiter, S as Step } from './types-CalpUeFX.js';
2
- export { A as AsyncStepFunction, C as CallResponse, q as CallSettings, D as Duration, k as FailureFunctionPayload, F as FinishCondition, H as HeaderParams, L as LogLevel, o as NotifyStepResponse, P as ParallelCallState, j as PublicServeOptions, g as RawStep, l as RequiredExceptFields, i as StepFunction, f as StepType, e as StepTypes, h as SyncStepFunction, T as Telemetry, p as WaitEventOptions, m as WaitRequest, n as WaitStepResponse, c as WorkflowClient, b as WorkflowContext, s as WorkflowLogger, r as WorkflowLoggerOptions, d as WorkflowReceiver } from './types-CalpUeFX.js';
1
+ import { R as RouteFunction, W as WorkflowServeOptions, E as ExclusiveValidationOptions, N as NotifyResponse, a as Waiter, S as Step } from './types-Cuqlx2Cr.js';
2
+ export { A as AsyncStepFunction, C as CallResponse, q as CallSettings, D as Duration, k as FailureFunctionPayload, F as FinishCondition, H as HeaderParams, L as LogLevel, o as NotifyStepResponse, P as ParallelCallState, j as PublicServeOptions, g as RawStep, l as RequiredExceptFields, i as StepFunction, f as StepType, e as StepTypes, h as SyncStepFunction, T as Telemetry, p as WaitEventOptions, m as WaitRequest, n as WaitStepResponse, c as WorkflowClient, b as WorkflowContext, s as WorkflowLogger, r as WorkflowLoggerOptions, d as WorkflowReceiver } from './types-Cuqlx2Cr.js';
3
3
  import { Client as Client$1, QstashError } from '@upstash/qstash';
4
+ import 'zod';
4
5
  import 'ai';
5
6
  import '@ai-sdk/openai';
6
7
 
@@ -12,7 +13,7 @@ import '@ai-sdk/openai';
12
13
  * @param options - Options including the client, onFinish callback, and initialPayloadParser.
13
14
  * @returns An async method that consumes incoming requests and runs the workflow.
14
15
  */
15
- declare const serve: <TInitialPayload = unknown, TRequest extends Request = Request, TResponse extends Response = Response>(routeFunction: RouteFunction<TInitialPayload>, options?: Omit<WorkflowServeOptions<TResponse, TInitialPayload>, "useJSONContent">) => {
16
+ declare const serve: <TInitialPayload = unknown, TRequest extends Request = Request, TResponse extends Response = Response>(routeFunction: RouteFunction<TInitialPayload>, options?: Omit<WorkflowServeOptions<TResponse, TInitialPayload>, "useJSONContent" | "schema" | "initialPayloadParser"> & ExclusiveValidationOptions<TInitialPayload>) => {
16
17
  handler: (request: TRequest) => Promise<TResponse>;
17
18
  };
18
19
 
@@ -191,4 +192,4 @@ declare class WorkflowAbort extends Error {
191
192
  constructor(stepName: string, stepInfo?: Step, cancelWorkflow?: boolean);
192
193
  }
193
194
 
194
- export { Client, NotifyResponse, RouteFunction, Step, Waiter, WorkflowAbort, WorkflowError, WorkflowServeOptions, serve };
195
+ export { Client, ExclusiveValidationOptions, NotifyResponse, RouteFunction, Step, Waiter, WorkflowAbort, WorkflowError, WorkflowServeOptions, serve };
package/index.js CHANGED
@@ -1587,11 +1587,13 @@ var AnthropicAPI = class extends BaseWorkflowApi {
1587
1587
  var import_qstash6 = require("@upstash/qstash");
1588
1588
  var OpenAIAPI = class extends BaseWorkflowApi {
1589
1589
  async call(stepName, settings) {
1590
- const { token, organization, operation, ...parameters } = settings;
1590
+ const { token, organization, operation, baseURL, ...parameters } = settings;
1591
+ const useOpenAI = baseURL === void 0;
1592
+ const provider = useOpenAI ? (0, import_qstash6.openai)({ token, organization }) : (0, import_qstash6.custom)({ baseUrl: baseURL, token });
1591
1593
  return await this.callApi(stepName, {
1592
1594
  api: {
1593
1595
  name: "llm",
1594
- provider: (0, import_qstash6.openai)({ token, organization })
1596
+ provider
1595
1597
  },
1596
1598
  ...parameters
1597
1599
  });
@@ -1651,8 +1653,11 @@ you need from that agent.
1651
1653
  `;
1652
1654
 
1653
1655
  // src/agents/adapters.ts
1654
- var createWorkflowOpenAI = (context) => {
1656
+ var createWorkflowOpenAI = (context, config) => {
1657
+ const { baseURL, apiKey } = config ?? {};
1655
1658
  return (0, import_openai2.createOpenAI)({
1659
+ baseURL,
1660
+ apiKey,
1656
1661
  compatibility: "strict",
1657
1662
  fetch: async (input, init) => {
1658
1663
  try {
@@ -1924,8 +1929,10 @@ var WorkflowAgents = class {
1924
1929
  * creates an openai model for agents
1925
1930
  */
1926
1931
  openai(...params) {
1927
- const openai2 = createWorkflowOpenAI(this.context);
1928
- return openai2(...params);
1932
+ const [model, settings] = params;
1933
+ const { baseURL, apiKey, ...otherSettings } = settings ?? {};
1934
+ const openai2 = createWorkflowOpenAI(this.context, { baseURL, apiKey });
1935
+ return openai2(model, otherSettings);
1929
1936
  }
1930
1937
  };
1931
1938
 
@@ -2680,7 +2687,8 @@ var processOptions = (options) => {
2680
2687
  return void 0;
2681
2688
  }
2682
2689
  try {
2683
- return JSON.parse(initialRequest);
2690
+ const parsed = JSON.parse(initialRequest);
2691
+ return options?.schema ? options.schema.parse(parsed) : parsed;
2684
2692
  } catch (error) {
2685
2693
  if (error instanceof SyntaxError) {
2686
2694
  return initialRequest;
package/index.mjs CHANGED
@@ -9,7 +9,7 @@ import {
9
9
  makeNotifyRequest,
10
10
  serve,
11
11
  triggerFirstInvocation
12
- } from "./chunk-42MM2EPQ.mjs";
12
+ } from "./chunk-OLNSY3BB.mjs";
13
13
 
14
14
  // src/client/index.ts
15
15
  import { Client as QStashClient } from "@upstash/qstash";
package/nextjs.d.mts CHANGED
@@ -1,6 +1,7 @@
1
1
  import { NextApiHandler } from 'next';
2
- import { R as RouteFunction, j as PublicServeOptions } from './types-CalpUeFX.mjs';
2
+ import { R as RouteFunction, j as PublicServeOptions } from './types-Cuqlx2Cr.mjs';
3
3
  import '@upstash/qstash';
4
+ import 'zod';
4
5
  import 'ai';
5
6
  import '@ai-sdk/openai';
6
7
 
package/nextjs.d.ts CHANGED
@@ -1,6 +1,7 @@
1
1
  import { NextApiHandler } from 'next';
2
- import { R as RouteFunction, j as PublicServeOptions } from './types-CalpUeFX.js';
2
+ import { R as RouteFunction, j as PublicServeOptions } from './types-Cuqlx2Cr.js';
3
3
  import '@upstash/qstash';
4
+ import 'zod';
4
5
  import 'ai';
5
6
  import '@ai-sdk/openai';
6
7
 
package/nextjs.js CHANGED
@@ -1575,11 +1575,13 @@ var AnthropicAPI = class extends BaseWorkflowApi {
1575
1575
  var import_qstash6 = require("@upstash/qstash");
1576
1576
  var OpenAIAPI = class extends BaseWorkflowApi {
1577
1577
  async call(stepName, settings) {
1578
- const { token, organization, operation, ...parameters } = settings;
1578
+ const { token, organization, operation, baseURL, ...parameters } = settings;
1579
+ const useOpenAI = baseURL === void 0;
1580
+ const provider = useOpenAI ? (0, import_qstash6.openai)({ token, organization }) : (0, import_qstash6.custom)({ baseUrl: baseURL, token });
1579
1581
  return await this.callApi(stepName, {
1580
1582
  api: {
1581
1583
  name: "llm",
1582
- provider: (0, import_qstash6.openai)({ token, organization })
1584
+ provider
1583
1585
  },
1584
1586
  ...parameters
1585
1587
  });
@@ -1639,8 +1641,11 @@ you need from that agent.
1639
1641
  `;
1640
1642
 
1641
1643
  // src/agents/adapters.ts
1642
- var createWorkflowOpenAI = (context) => {
1644
+ var createWorkflowOpenAI = (context, config) => {
1645
+ const { baseURL, apiKey } = config ?? {};
1643
1646
  return (0, import_openai2.createOpenAI)({
1647
+ baseURL,
1648
+ apiKey,
1644
1649
  compatibility: "strict",
1645
1650
  fetch: async (input, init) => {
1646
1651
  try {
@@ -1912,8 +1917,10 @@ var WorkflowAgents = class {
1912
1917
  * creates an openai model for agents
1913
1918
  */
1914
1919
  openai(...params) {
1915
- const openai2 = createWorkflowOpenAI(this.context);
1916
- return openai2(...params);
1920
+ const [model, settings] = params;
1921
+ const { baseURL, apiKey, ...otherSettings } = settings ?? {};
1922
+ const openai2 = createWorkflowOpenAI(this.context, { baseURL, apiKey });
1923
+ return openai2(model, otherSettings);
1917
1924
  }
1918
1925
  };
1919
1926
 
@@ -2668,7 +2675,8 @@ var processOptions = (options) => {
2668
2675
  return void 0;
2669
2676
  }
2670
2677
  try {
2671
- return JSON.parse(initialRequest);
2678
+ const parsed = JSON.parse(initialRequest);
2679
+ return options?.schema ? options.schema.parse(parsed) : parsed;
2672
2680
  } catch (error) {
2673
2681
  if (error instanceof SyntaxError) {
2674
2682
  return initialRequest;
package/nextjs.mjs CHANGED
@@ -1,7 +1,7 @@
1
1
  import {
2
2
  SDK_TELEMETRY,
3
3
  serveBase
4
- } from "./chunk-42MM2EPQ.mjs";
4
+ } from "./chunk-OLNSY3BB.mjs";
5
5
 
6
6
  // platforms/nextjs.ts
7
7
  var serve = (routeFunction, options) => {
package/package.json CHANGED
@@ -1 +1 @@
1
- {"name":"@upstash/workflow","version":"v0.2.5","description":"Durable, Reliable and Performant Serverless Functions","main":"./index.js","module":"./index.mjs","types":"./index.d.ts","files":["./*"],"exports":{".":{"import":"./index.mjs","require":"./index.js"},"./dist/nextjs":{"import":"./nextjs.mjs","require":"./nextjs.js"},"./nextjs":{"import":"./nextjs.mjs","require":"./nextjs.js"},"./h3":{"import":"./h3.mjs","require":"./h3.js"},"./svelte":{"import":"./svelte.mjs","require":"./svelte.js"},"./solidjs":{"import":"./solidjs.mjs","require":"./solidjs.js"},"./workflow":{"import":"./workflow.mjs","require":"./workflow.js"},"./hono":{"import":"./hono.mjs","require":"./hono.js"},"./cloudflare":{"import":"./cloudflare.mjs","require":"./cloudflare.js"},"./astro":{"import":"./astro.mjs","require":"./astro.js"},"./express":{"import":"./express.mjs","require":"./express.js"}},"scripts":{"build":"tsup && cp README.md ./dist/ && cp package.json ./dist/ && cp LICENSE ./dist/","test":"bun test src","fmt":"prettier --write .","lint":"tsc && eslint \"{src,platforms}/**/*.{js,ts,tsx}\" --quiet --fix","check-exports":"bun run build && cd dist && attw -P"},"repository":{"type":"git","url":"git+https://github.com/upstash/workflow-ts.git"},"keywords":["upstash","qstash","workflow","serverless"],"author":"Cahid Arda Oz","license":"MIT","bugs":{"url":"https://github.com/upstash/workflow-ts/issues"},"homepage":"https://github.com/upstash/workflow-ts#readme","devDependencies":{"@commitlint/cli":"^19.5.0","@commitlint/config-conventional":"^19.5.0","@eslint/js":"^9.11.1","@solidjs/start":"^1.0.8","@sveltejs/kit":"^2.6.1","@types/bun":"^1.1.10","@types/express":"^5.0.0","astro":"^4.16.7","eslint":"^9.11.1","eslint-plugin-unicorn":"^55.0.0","express":"^4.21.1","globals":"^15.10.0","h3":"^1.12.0","hono":"^4.6.3","husky":"^9.1.6","next":"^14.2.14","prettier":"3.3.3","tsup":"^8.3.0","typescript":"^5.7.2","typescript-eslint":"^8.18.0"},"dependencies":{"@ai-sdk/openai":"^1.0.15","@upstash/qstash":"^2.7.20","ai":"^4.0.30","zod":"^3.24.1"},"directories":{"example":"examples"}}
1
+ {"name":"@upstash/workflow","version":"v0.2.6","description":"Durable, Reliable and Performant Serverless Functions","main":"./index.js","module":"./index.mjs","types":"./index.d.ts","files":["./*"],"exports":{".":{"import":"./index.mjs","require":"./index.js"},"./dist/nextjs":{"import":"./nextjs.mjs","require":"./nextjs.js"},"./nextjs":{"import":"./nextjs.mjs","require":"./nextjs.js"},"./h3":{"import":"./h3.mjs","require":"./h3.js"},"./svelte":{"import":"./svelte.mjs","require":"./svelte.js"},"./solidjs":{"import":"./solidjs.mjs","require":"./solidjs.js"},"./workflow":{"import":"./workflow.mjs","require":"./workflow.js"},"./hono":{"import":"./hono.mjs","require":"./hono.js"},"./cloudflare":{"import":"./cloudflare.mjs","require":"./cloudflare.js"},"./astro":{"import":"./astro.mjs","require":"./astro.js"},"./express":{"import":"./express.mjs","require":"./express.js"}},"scripts":{"build":"tsup && cp README.md ./dist/ && cp package.json ./dist/ && cp LICENSE ./dist/","test":"bun test src","fmt":"prettier --write .","lint":"tsc && eslint \"{src,platforms}/**/*.{js,ts,tsx}\" --quiet --fix","check-exports":"bun run build && cd dist && attw -P"},"repository":{"type":"git","url":"git+https://github.com/upstash/workflow-ts.git"},"keywords":["upstash","qstash","workflow","serverless"],"author":"Cahid Arda Oz","license":"MIT","bugs":{"url":"https://github.com/upstash/workflow-ts/issues"},"homepage":"https://github.com/upstash/workflow-ts#readme","devDependencies":{"@commitlint/cli":"^19.5.0","@commitlint/config-conventional":"^19.5.0","@eslint/js":"^9.11.1","@solidjs/start":"^1.0.8","@sveltejs/kit":"^2.6.1","@types/bun":"^1.1.10","@types/express":"^5.0.0","astro":"^4.16.7","eslint":"^9.11.1","eslint-plugin-unicorn":"^55.0.0","express":"^4.21.1","globals":"^15.10.0","h3":"^1.12.0","hono":"^4.6.3","husky":"^9.1.6","next":"^14.2.14","prettier":"3.3.3","tsup":"^8.3.0","typescript":"^5.7.2","typescript-eslint":"^8.18.0"},"dependencies":{"@ai-sdk/openai":"^1.0.15","@upstash/qstash":"^2.7.20","ai":"^4.0.30","zod":"^3.24.1"},"directories":{"example":"examples"}}
package/solidjs.d.mts CHANGED
@@ -1,6 +1,7 @@
1
1
  import { APIEvent } from '@solidjs/start/server';
2
- import { R as RouteFunction, j as PublicServeOptions } from './types-CalpUeFX.mjs';
2
+ import { R as RouteFunction, j as PublicServeOptions } from './types-Cuqlx2Cr.mjs';
3
3
  import '@upstash/qstash';
4
+ import 'zod';
4
5
  import 'ai';
5
6
  import '@ai-sdk/openai';
6
7
 
package/solidjs.d.ts CHANGED
@@ -1,6 +1,7 @@
1
1
  import { APIEvent } from '@solidjs/start/server';
2
- import { R as RouteFunction, j as PublicServeOptions } from './types-CalpUeFX.js';
2
+ import { R as RouteFunction, j as PublicServeOptions } from './types-Cuqlx2Cr.js';
3
3
  import '@upstash/qstash';
4
+ import 'zod';
4
5
  import 'ai';
5
6
  import '@ai-sdk/openai';
6
7
 
package/solidjs.js CHANGED
@@ -1574,11 +1574,13 @@ var AnthropicAPI = class extends BaseWorkflowApi {
1574
1574
  var import_qstash6 = require("@upstash/qstash");
1575
1575
  var OpenAIAPI = class extends BaseWorkflowApi {
1576
1576
  async call(stepName, settings) {
1577
- const { token, organization, operation, ...parameters } = settings;
1577
+ const { token, organization, operation, baseURL, ...parameters } = settings;
1578
+ const useOpenAI = baseURL === void 0;
1579
+ const provider = useOpenAI ? (0, import_qstash6.openai)({ token, organization }) : (0, import_qstash6.custom)({ baseUrl: baseURL, token });
1578
1580
  return await this.callApi(stepName, {
1579
1581
  api: {
1580
1582
  name: "llm",
1581
- provider: (0, import_qstash6.openai)({ token, organization })
1583
+ provider
1582
1584
  },
1583
1585
  ...parameters
1584
1586
  });
@@ -1638,8 +1640,11 @@ you need from that agent.
1638
1640
  `;
1639
1641
 
1640
1642
  // src/agents/adapters.ts
1641
- var createWorkflowOpenAI = (context) => {
1643
+ var createWorkflowOpenAI = (context, config) => {
1644
+ const { baseURL, apiKey } = config ?? {};
1642
1645
  return (0, import_openai2.createOpenAI)({
1646
+ baseURL,
1647
+ apiKey,
1643
1648
  compatibility: "strict",
1644
1649
  fetch: async (input, init) => {
1645
1650
  try {
@@ -1911,8 +1916,10 @@ var WorkflowAgents = class {
1911
1916
  * creates an openai model for agents
1912
1917
  */
1913
1918
  openai(...params) {
1914
- const openai2 = createWorkflowOpenAI(this.context);
1915
- return openai2(...params);
1919
+ const [model, settings] = params;
1920
+ const { baseURL, apiKey, ...otherSettings } = settings ?? {};
1921
+ const openai2 = createWorkflowOpenAI(this.context, { baseURL, apiKey });
1922
+ return openai2(model, otherSettings);
1916
1923
  }
1917
1924
  };
1918
1925
 
@@ -2667,7 +2674,8 @@ var processOptions = (options) => {
2667
2674
  return void 0;
2668
2675
  }
2669
2676
  try {
2670
- return JSON.parse(initialRequest);
2677
+ const parsed = JSON.parse(initialRequest);
2678
+ return options?.schema ? options.schema.parse(parsed) : parsed;
2671
2679
  } catch (error) {
2672
2680
  if (error instanceof SyntaxError) {
2673
2681
  return initialRequest;
package/solidjs.mjs CHANGED
@@ -1,7 +1,7 @@
1
1
  import {
2
2
  SDK_TELEMETRY,
3
3
  serveBase
4
- } from "./chunk-42MM2EPQ.mjs";
4
+ } from "./chunk-OLNSY3BB.mjs";
5
5
 
6
6
  // platforms/solidjs.ts
7
7
  var serve = (routeFunction, options) => {
package/svelte.d.mts CHANGED
@@ -1,6 +1,7 @@
1
1
  import { RequestHandler } from '@sveltejs/kit';
2
- import { R as RouteFunction, j as PublicServeOptions } from './types-CalpUeFX.mjs';
2
+ import { R as RouteFunction, j as PublicServeOptions } from './types-Cuqlx2Cr.mjs';
3
3
  import '@upstash/qstash';
4
+ import 'zod';
4
5
  import 'ai';
5
6
  import '@ai-sdk/openai';
6
7
 
package/svelte.d.ts CHANGED
@@ -1,6 +1,7 @@
1
1
  import { RequestHandler } from '@sveltejs/kit';
2
- import { R as RouteFunction, j as PublicServeOptions } from './types-CalpUeFX.js';
2
+ import { R as RouteFunction, j as PublicServeOptions } from './types-Cuqlx2Cr.js';
3
3
  import '@upstash/qstash';
4
+ import 'zod';
4
5
  import 'ai';
5
6
  import '@ai-sdk/openai';
6
7
 
package/svelte.js CHANGED
@@ -1574,11 +1574,13 @@ var AnthropicAPI = class extends BaseWorkflowApi {
1574
1574
  var import_qstash6 = require("@upstash/qstash");
1575
1575
  var OpenAIAPI = class extends BaseWorkflowApi {
1576
1576
  async call(stepName, settings) {
1577
- const { token, organization, operation, ...parameters } = settings;
1577
+ const { token, organization, operation, baseURL, ...parameters } = settings;
1578
+ const useOpenAI = baseURL === void 0;
1579
+ const provider = useOpenAI ? (0, import_qstash6.openai)({ token, organization }) : (0, import_qstash6.custom)({ baseUrl: baseURL, token });
1578
1580
  return await this.callApi(stepName, {
1579
1581
  api: {
1580
1582
  name: "llm",
1581
- provider: (0, import_qstash6.openai)({ token, organization })
1583
+ provider
1582
1584
  },
1583
1585
  ...parameters
1584
1586
  });
@@ -1638,8 +1640,11 @@ you need from that agent.
1638
1640
  `;
1639
1641
 
1640
1642
  // src/agents/adapters.ts
1641
- var createWorkflowOpenAI = (context) => {
1643
+ var createWorkflowOpenAI = (context, config) => {
1644
+ const { baseURL, apiKey } = config ?? {};
1642
1645
  return (0, import_openai2.createOpenAI)({
1646
+ baseURL,
1647
+ apiKey,
1643
1648
  compatibility: "strict",
1644
1649
  fetch: async (input, init) => {
1645
1650
  try {
@@ -1911,8 +1916,10 @@ var WorkflowAgents = class {
1911
1916
  * creates an openai model for agents
1912
1917
  */
1913
1918
  openai(...params) {
1914
- const openai2 = createWorkflowOpenAI(this.context);
1915
- return openai2(...params);
1919
+ const [model, settings] = params;
1920
+ const { baseURL, apiKey, ...otherSettings } = settings ?? {};
1921
+ const openai2 = createWorkflowOpenAI(this.context, { baseURL, apiKey });
1922
+ return openai2(model, otherSettings);
1916
1923
  }
1917
1924
  };
1918
1925
 
@@ -2667,7 +2674,8 @@ var processOptions = (options) => {
2667
2674
  return void 0;
2668
2675
  }
2669
2676
  try {
2670
- return JSON.parse(initialRequest);
2677
+ const parsed = JSON.parse(initialRequest);
2678
+ return options?.schema ? options.schema.parse(parsed) : parsed;
2671
2679
  } catch (error) {
2672
2680
  if (error instanceof SyntaxError) {
2673
2681
  return initialRequest;
package/svelte.mjs CHANGED
@@ -1,7 +1,7 @@
1
1
  import {
2
2
  SDK_TELEMETRY,
3
3
  serveBase
4
- } from "./chunk-42MM2EPQ.mjs";
4
+ } from "./chunk-OLNSY3BB.mjs";
5
5
 
6
6
  // platforms/svelte.ts
7
7
  var serve = (routeFunction, options) => {
@@ -1,4 +1,5 @@
1
1
  import { PublishRequest, Client, Receiver, HTTPMethods as HTTPMethods$1 } from '@upstash/qstash';
2
+ import { z } from 'zod';
2
3
  import * as ai from 'ai';
3
4
  import { CoreTool, generateText } from 'ai';
4
5
  import * as _ai_sdk_openai from '@ai-sdk/openai';
@@ -349,6 +350,7 @@ declare class OpenAIAPI extends BaseWorkflowApi {
349
350
  call<TResult = ChatCompletion, TBody = CreateChatCompletion>(stepName: string, settings: ApiCallSettings<TBody, {
350
351
  token: string;
351
352
  organization?: string;
353
+ baseURL?: string;
352
354
  operation: "chat.completions.create";
353
355
  }>): Promise<CallResponse<TResult>>;
354
356
  }
@@ -391,45 +393,16 @@ declare class WorkflowApi extends BaseWorkflowApi {
391
393
  }
392
394
 
393
395
  /**
394
- * An Agent which utilizes the model and tools available to it
395
- * to achieve a given task
396
+ * creates an AI SDK openai client with a custom
397
+ * fetch implementation which uses context.call.
396
398
  *
397
- * @param name Name of the agent
398
- * @param background Background of the agent
399
- * @param model LLM model to use
400
- * @param tools tools available to the agent
401
- * @param maxSteps number of times the agent can call the LLM at most. If
402
- * the agent abruptly stops execution after calling tools, you may need
403
- * to increase maxSteps
404
- * @param temparature temparature used when calling the LLM
399
+ * @param context workflow context
400
+ * @returns ai sdk openai
405
401
  */
406
- declare class Agent {
407
- readonly name: AgentParameters["name"];
408
- readonly tools: AgentParameters["tools"];
409
- readonly maxSteps: AgentParameters["maxSteps"];
410
- readonly background: AgentParameters["background"];
411
- readonly model: AgentParameters["model"];
412
- readonly temparature: AgentParameters["temparature"];
413
- private readonly context;
414
- constructor({ tools, maxSteps, background, name, model, temparature }: AgentParameters, context: WorkflowContext);
415
- /**
416
- * Trigger the agent by passing a prompt
417
- *
418
- * @param prompt task to assign to the agent
419
- * @returns Response as `{ text: string }`
420
- */
421
- call({ prompt }: {
422
- prompt: string;
423
- }): Promise<{
424
- text: string;
425
- }>;
426
- /**
427
- * Convert the agent to a tool which can be used by other agents.
428
- *
429
- * @returns the agent as a tool
430
- */
431
- asTool(): AISDKTool;
432
- }
402
+ declare const createWorkflowOpenAI: (context: WorkflowContext, config?: {
403
+ baseURL?: string;
404
+ apiKey?: string;
405
+ }) => _ai_sdk_openai.OpenAIProvider;
433
406
 
434
407
  type AISDKTool = CoreTool;
435
408
  type LangchainTool = {
@@ -501,15 +474,53 @@ type MultiAgentTaskParams = TaskParams & {
501
474
  */
502
475
  background?: string;
503
476
  };
477
+ type ModelParams = Parameters<ReturnType<typeof createWorkflowOpenAI>>;
478
+ type CustomModelSettings = ModelParams["1"] & {
479
+ baseURL?: string;
480
+ apiKey?: string;
481
+ };
482
+ type CustomModelParams = [ModelParams[0], CustomModelSettings?];
504
483
 
505
484
  /**
506
- * creates an AI SDK openai client with a custom
507
- * fetch implementation which uses context.call.
485
+ * An Agent which utilizes the model and tools available to it
486
+ * to achieve a given task
508
487
  *
509
- * @param context workflow context
510
- * @returns ai sdk openai
488
+ * @param name Name of the agent
489
+ * @param background Background of the agent
490
+ * @param model LLM model to use
491
+ * @param tools tools available to the agent
492
+ * @param maxSteps number of times the agent can call the LLM at most. If
493
+ * the agent abruptly stops execution after calling tools, you may need
494
+ * to increase maxSteps
495
+ * @param temparature temparature used when calling the LLM
511
496
  */
512
- declare const createWorkflowOpenAI: (context: WorkflowContext) => _ai_sdk_openai.OpenAIProvider;
497
+ declare class Agent {
498
+ readonly name: AgentParameters["name"];
499
+ readonly tools: AgentParameters["tools"];
500
+ readonly maxSteps: AgentParameters["maxSteps"];
501
+ readonly background: AgentParameters["background"];
502
+ readonly model: AgentParameters["model"];
503
+ readonly temparature: AgentParameters["temparature"];
504
+ private readonly context;
505
+ constructor({ tools, maxSteps, background, name, model, temparature }: AgentParameters, context: WorkflowContext);
506
+ /**
507
+ * Trigger the agent by passing a prompt
508
+ *
509
+ * @param prompt task to assign to the agent
510
+ * @returns Response as `{ text: string }`
511
+ */
512
+ call({ prompt }: {
513
+ prompt: string;
514
+ }): Promise<{
515
+ text: string;
516
+ }>;
517
+ /**
518
+ * Convert the agent to a tool which can be used by other agents.
519
+ *
520
+ * @returns the agent as a tool
521
+ */
522
+ asTool(): AISDKTool;
523
+ }
513
524
 
514
525
  /**
515
526
  * An Agent Task
@@ -621,7 +632,7 @@ declare class WorkflowAgents {
621
632
  /**
622
633
  * creates an openai model for agents
623
634
  */
624
- openai(...params: Parameters<ReturnType<typeof createWorkflowOpenAI>>): ai.LanguageModelV1;
635
+ openai(...params: CustomModelParams): ai.LanguageModelV1;
625
636
  }
626
637
 
627
638
  /**
@@ -1026,7 +1037,7 @@ type StepFunction<TResult> = AsyncStepFunction<TResult> | SyncStepFunction<TResu
1026
1037
  type ParallelCallState = "first" | "partial" | "discard" | "last";
1027
1038
  type RouteFunction<TInitialPayload> = (context: WorkflowContext<TInitialPayload>) => Promise<void>;
1028
1039
  type FinishCondition = "success" | "duplicate-step" | "fromCallback" | "auth-fail" | "failure-callback" | "workflow-already-ended";
1029
- type WorkflowServeOptions<TResponse extends Response = Response, TInitialPayload = unknown> = {
1040
+ type WorkflowServeOptions<TResponse extends Response = Response, TInitialPayload = unknown> = ValidationOptions<TInitialPayload> & {
1030
1041
  /**
1031
1042
  * QStash client
1032
1043
  */
@@ -1038,10 +1049,6 @@ type WorkflowServeOptions<TResponse extends Response = Response, TInitialPayload
1038
1049
  * @returns response
1039
1050
  */
1040
1051
  onStepFinish?: (workflowRunId: string, finishCondition: FinishCondition) => TResponse;
1041
- /**
1042
- * Function to parse the initial payload passed by the user
1043
- */
1044
- initialPayloadParser?: (initialPayload: string) => TInitialPayload;
1045
1052
  /**
1046
1053
  * Url of the endpoint where the workflow is set up.
1047
1054
  *
@@ -1122,6 +1129,17 @@ type WorkflowServeOptions<TResponse extends Response = Response, TInitialPayload
1122
1129
  * Set `disableTelemetry` to disable this behavior.
1123
1130
  */
1124
1131
  disableTelemetry?: boolean;
1132
+ } & ValidationOptions<TInitialPayload>;
1133
+ type ValidationOptions<TInitialPayload> = {
1134
+ schema?: z.ZodType<TInitialPayload>;
1135
+ initialPayloadParser?: (initialPayload: string) => TInitialPayload;
1136
+ };
1137
+ type ExclusiveValidationOptions<TInitialPayload> = {
1138
+ schema?: ValidationOptions<TInitialPayload>["schema"];
1139
+ initialPayloadParser?: never;
1140
+ } | {
1141
+ schema?: never;
1142
+ initialPayloadParser?: ValidationOptions<TInitialPayload>["initialPayloadParser"];
1125
1143
  };
1126
1144
  type Telemetry = {
1127
1145
  /**
@@ -1137,7 +1155,7 @@ type Telemetry = {
1137
1155
  */
1138
1156
  runtime?: string;
1139
1157
  };
1140
- type PublicServeOptions<TInitialPayload = unknown, TResponse extends Response = Response> = Omit<WorkflowServeOptions<TResponse, TInitialPayload>, "onStepFinish" | "useJSONContent">;
1158
+ type PublicServeOptions<TInitialPayload = unknown, TResponse extends Response = Response> = Omit<WorkflowServeOptions<TResponse, TInitialPayload>, "onStepFinish" | "useJSONContent" | "schema" | "initialPayloadParser"> & ExclusiveValidationOptions<TInitialPayload>;
1141
1159
  /**
1142
1160
  * Payload passed as body in failureFunction
1143
1161
  */
@@ -1295,4 +1313,4 @@ type HeaderParams = {
1295
1313
  callTimeout?: never;
1296
1314
  });
1297
1315
 
1298
- export { type AsyncStepFunction as A, type CallResponse as C, type Duration as D, type FinishCondition as F, type HeaderParams as H, type LogLevel as L, type NotifyResponse as N, type ParallelCallState as P, type RouteFunction as R, type Step as S, type Telemetry as T, type WorkflowServeOptions as W, type Waiter as a, WorkflowContext as b, type WorkflowClient as c, type WorkflowReceiver as d, StepTypes as e, type StepType as f, type RawStep as g, type SyncStepFunction as h, type StepFunction as i, type PublicServeOptions as j, type FailureFunctionPayload as k, type RequiredExceptFields as l, type WaitRequest as m, type WaitStepResponse as n, type NotifyStepResponse as o, type WaitEventOptions as p, type CallSettings as q, type WorkflowLoggerOptions as r, WorkflowLogger as s };
1316
+ export { type AsyncStepFunction as A, type CallResponse as C, type Duration as D, type ExclusiveValidationOptions as E, type FinishCondition as F, type HeaderParams as H, type LogLevel as L, type NotifyResponse as N, type ParallelCallState as P, type RouteFunction as R, type Step as S, type Telemetry as T, type WorkflowServeOptions as W, type Waiter as a, WorkflowContext as b, type WorkflowClient as c, type WorkflowReceiver as d, StepTypes as e, type StepType as f, type RawStep as g, type SyncStepFunction as h, type StepFunction as i, type PublicServeOptions as j, type FailureFunctionPayload as k, type RequiredExceptFields as l, type WaitRequest as m, type WaitStepResponse as n, type NotifyStepResponse as o, type WaitEventOptions as p, type CallSettings as q, type WorkflowLoggerOptions as r, WorkflowLogger as s };
@@ -1,4 +1,5 @@
1
1
  import { PublishRequest, Client, Receiver, HTTPMethods as HTTPMethods$1 } from '@upstash/qstash';
2
+ import { z } from 'zod';
2
3
  import * as ai from 'ai';
3
4
  import { CoreTool, generateText } from 'ai';
4
5
  import * as _ai_sdk_openai from '@ai-sdk/openai';
@@ -349,6 +350,7 @@ declare class OpenAIAPI extends BaseWorkflowApi {
349
350
  call<TResult = ChatCompletion, TBody = CreateChatCompletion>(stepName: string, settings: ApiCallSettings<TBody, {
350
351
  token: string;
351
352
  organization?: string;
353
+ baseURL?: string;
352
354
  operation: "chat.completions.create";
353
355
  }>): Promise<CallResponse<TResult>>;
354
356
  }
@@ -391,45 +393,16 @@ declare class WorkflowApi extends BaseWorkflowApi {
391
393
  }
392
394
 
393
395
  /**
394
- * An Agent which utilizes the model and tools available to it
395
- * to achieve a given task
396
+ * creates an AI SDK openai client with a custom
397
+ * fetch implementation which uses context.call.
396
398
  *
397
- * @param name Name of the agent
398
- * @param background Background of the agent
399
- * @param model LLM model to use
400
- * @param tools tools available to the agent
401
- * @param maxSteps number of times the agent can call the LLM at most. If
402
- * the agent abruptly stops execution after calling tools, you may need
403
- * to increase maxSteps
404
- * @param temparature temparature used when calling the LLM
399
+ * @param context workflow context
400
+ * @returns ai sdk openai
405
401
  */
406
- declare class Agent {
407
- readonly name: AgentParameters["name"];
408
- readonly tools: AgentParameters["tools"];
409
- readonly maxSteps: AgentParameters["maxSteps"];
410
- readonly background: AgentParameters["background"];
411
- readonly model: AgentParameters["model"];
412
- readonly temparature: AgentParameters["temparature"];
413
- private readonly context;
414
- constructor({ tools, maxSteps, background, name, model, temparature }: AgentParameters, context: WorkflowContext);
415
- /**
416
- * Trigger the agent by passing a prompt
417
- *
418
- * @param prompt task to assign to the agent
419
- * @returns Response as `{ text: string }`
420
- */
421
- call({ prompt }: {
422
- prompt: string;
423
- }): Promise<{
424
- text: string;
425
- }>;
426
- /**
427
- * Convert the agent to a tool which can be used by other agents.
428
- *
429
- * @returns the agent as a tool
430
- */
431
- asTool(): AISDKTool;
432
- }
402
+ declare const createWorkflowOpenAI: (context: WorkflowContext, config?: {
403
+ baseURL?: string;
404
+ apiKey?: string;
405
+ }) => _ai_sdk_openai.OpenAIProvider;
433
406
 
434
407
  type AISDKTool = CoreTool;
435
408
  type LangchainTool = {
@@ -501,15 +474,53 @@ type MultiAgentTaskParams = TaskParams & {
501
474
  */
502
475
  background?: string;
503
476
  };
477
+ type ModelParams = Parameters<ReturnType<typeof createWorkflowOpenAI>>;
478
+ type CustomModelSettings = ModelParams["1"] & {
479
+ baseURL?: string;
480
+ apiKey?: string;
481
+ };
482
+ type CustomModelParams = [ModelParams[0], CustomModelSettings?];
504
483
 
505
484
  /**
506
- * creates an AI SDK openai client with a custom
507
- * fetch implementation which uses context.call.
485
+ * An Agent which utilizes the model and tools available to it
486
+ * to achieve a given task
508
487
  *
509
- * @param context workflow context
510
- * @returns ai sdk openai
488
+ * @param name Name of the agent
489
+ * @param background Background of the agent
490
+ * @param model LLM model to use
491
+ * @param tools tools available to the agent
492
+ * @param maxSteps number of times the agent can call the LLM at most. If
493
+ * the agent abruptly stops execution after calling tools, you may need
494
+ * to increase maxSteps
495
+ * @param temparature temparature used when calling the LLM
511
496
  */
512
- declare const createWorkflowOpenAI: (context: WorkflowContext) => _ai_sdk_openai.OpenAIProvider;
497
+ declare class Agent {
498
+ readonly name: AgentParameters["name"];
499
+ readonly tools: AgentParameters["tools"];
500
+ readonly maxSteps: AgentParameters["maxSteps"];
501
+ readonly background: AgentParameters["background"];
502
+ readonly model: AgentParameters["model"];
503
+ readonly temparature: AgentParameters["temparature"];
504
+ private readonly context;
505
+ constructor({ tools, maxSteps, background, name, model, temparature }: AgentParameters, context: WorkflowContext);
506
+ /**
507
+ * Trigger the agent by passing a prompt
508
+ *
509
+ * @param prompt task to assign to the agent
510
+ * @returns Response as `{ text: string }`
511
+ */
512
+ call({ prompt }: {
513
+ prompt: string;
514
+ }): Promise<{
515
+ text: string;
516
+ }>;
517
+ /**
518
+ * Convert the agent to a tool which can be used by other agents.
519
+ *
520
+ * @returns the agent as a tool
521
+ */
522
+ asTool(): AISDKTool;
523
+ }
513
524
 
514
525
  /**
515
526
  * An Agent Task
@@ -621,7 +632,7 @@ declare class WorkflowAgents {
621
632
  /**
622
633
  * creates an openai model for agents
623
634
  */
624
- openai(...params: Parameters<ReturnType<typeof createWorkflowOpenAI>>): ai.LanguageModelV1;
635
+ openai(...params: CustomModelParams): ai.LanguageModelV1;
625
636
  }
626
637
 
627
638
  /**
@@ -1026,7 +1037,7 @@ type StepFunction<TResult> = AsyncStepFunction<TResult> | SyncStepFunction<TResu
1026
1037
  type ParallelCallState = "first" | "partial" | "discard" | "last";
1027
1038
  type RouteFunction<TInitialPayload> = (context: WorkflowContext<TInitialPayload>) => Promise<void>;
1028
1039
  type FinishCondition = "success" | "duplicate-step" | "fromCallback" | "auth-fail" | "failure-callback" | "workflow-already-ended";
1029
- type WorkflowServeOptions<TResponse extends Response = Response, TInitialPayload = unknown> = {
1040
+ type WorkflowServeOptions<TResponse extends Response = Response, TInitialPayload = unknown> = ValidationOptions<TInitialPayload> & {
1030
1041
  /**
1031
1042
  * QStash client
1032
1043
  */
@@ -1038,10 +1049,6 @@ type WorkflowServeOptions<TResponse extends Response = Response, TInitialPayload
1038
1049
  * @returns response
1039
1050
  */
1040
1051
  onStepFinish?: (workflowRunId: string, finishCondition: FinishCondition) => TResponse;
1041
- /**
1042
- * Function to parse the initial payload passed by the user
1043
- */
1044
- initialPayloadParser?: (initialPayload: string) => TInitialPayload;
1045
1052
  /**
1046
1053
  * Url of the endpoint where the workflow is set up.
1047
1054
  *
@@ -1122,6 +1129,17 @@ type WorkflowServeOptions<TResponse extends Response = Response, TInitialPayload
1122
1129
  * Set `disableTelemetry` to disable this behavior.
1123
1130
  */
1124
1131
  disableTelemetry?: boolean;
1132
+ } & ValidationOptions<TInitialPayload>;
1133
+ type ValidationOptions<TInitialPayload> = {
1134
+ schema?: z.ZodType<TInitialPayload>;
1135
+ initialPayloadParser?: (initialPayload: string) => TInitialPayload;
1136
+ };
1137
+ type ExclusiveValidationOptions<TInitialPayload> = {
1138
+ schema?: ValidationOptions<TInitialPayload>["schema"];
1139
+ initialPayloadParser?: never;
1140
+ } | {
1141
+ schema?: never;
1142
+ initialPayloadParser?: ValidationOptions<TInitialPayload>["initialPayloadParser"];
1125
1143
  };
1126
1144
  type Telemetry = {
1127
1145
  /**
@@ -1137,7 +1155,7 @@ type Telemetry = {
1137
1155
  */
1138
1156
  runtime?: string;
1139
1157
  };
1140
- type PublicServeOptions<TInitialPayload = unknown, TResponse extends Response = Response> = Omit<WorkflowServeOptions<TResponse, TInitialPayload>, "onStepFinish" | "useJSONContent">;
1158
+ type PublicServeOptions<TInitialPayload = unknown, TResponse extends Response = Response> = Omit<WorkflowServeOptions<TResponse, TInitialPayload>, "onStepFinish" | "useJSONContent" | "schema" | "initialPayloadParser"> & ExclusiveValidationOptions<TInitialPayload>;
1141
1159
  /**
1142
1160
  * Payload passed as body in failureFunction
1143
1161
  */
@@ -1295,4 +1313,4 @@ type HeaderParams = {
1295
1313
  callTimeout?: never;
1296
1314
  });
1297
1315
 
1298
- export { type AsyncStepFunction as A, type CallResponse as C, type Duration as D, type FinishCondition as F, type HeaderParams as H, type LogLevel as L, type NotifyResponse as N, type ParallelCallState as P, type RouteFunction as R, type Step as S, type Telemetry as T, type WorkflowServeOptions as W, type Waiter as a, WorkflowContext as b, type WorkflowClient as c, type WorkflowReceiver as d, StepTypes as e, type StepType as f, type RawStep as g, type SyncStepFunction as h, type StepFunction as i, type PublicServeOptions as j, type FailureFunctionPayload as k, type RequiredExceptFields as l, type WaitRequest as m, type WaitStepResponse as n, type NotifyStepResponse as o, type WaitEventOptions as p, type CallSettings as q, type WorkflowLoggerOptions as r, WorkflowLogger as s };
1316
+ export { type AsyncStepFunction as A, type CallResponse as C, type Duration as D, type ExclusiveValidationOptions as E, type FinishCondition as F, type HeaderParams as H, type LogLevel as L, type NotifyResponse as N, type ParallelCallState as P, type RouteFunction as R, type Step as S, type Telemetry as T, type WorkflowServeOptions as W, type Waiter as a, WorkflowContext as b, type WorkflowClient as c, type WorkflowReceiver as d, StepTypes as e, type StepType as f, type RawStep as g, type SyncStepFunction as h, type StepFunction as i, type PublicServeOptions as j, type FailureFunctionPayload as k, type RequiredExceptFields as l, type WaitRequest as m, type WaitStepResponse as n, type NotifyStepResponse as o, type WaitEventOptions as p, type CallSettings as q, type WorkflowLoggerOptions as r, WorkflowLogger as s };