@upstash/workflow 0.2.6-canary.0 → 0.2.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/astro.d.mts CHANGED
@@ -1,6 +1,7 @@
1
1
  import { APIContext, APIRoute } from 'astro';
2
- import { b as WorkflowContext, j as PublicServeOptions } from './types-DKLbLp0d.mjs';
2
+ import { b as WorkflowContext, j as PublicServeOptions } from './types-Cuqlx2Cr.mjs';
3
3
  import '@upstash/qstash';
4
+ import 'zod';
4
5
  import 'ai';
5
6
  import '@ai-sdk/openai';
6
7
 
package/astro.d.ts CHANGED
@@ -1,6 +1,7 @@
1
1
  import { APIContext, APIRoute } from 'astro';
2
- import { b as WorkflowContext, j as PublicServeOptions } from './types-DKLbLp0d.js';
2
+ import { b as WorkflowContext, j as PublicServeOptions } from './types-Cuqlx2Cr.js';
3
3
  import '@upstash/qstash';
4
+ import 'zod';
4
5
  import 'ai';
5
6
  import '@ai-sdk/openai';
6
7
 
package/astro.js CHANGED
@@ -1640,9 +1640,11 @@ you need from that agent.
1640
1640
  `;
1641
1641
 
1642
1642
  // src/agents/adapters.ts
1643
- var createWorkflowOpenAI = (context, baseURL) => {
1643
+ var createWorkflowOpenAI = (context, config) => {
1644
+ const { baseURL, apiKey } = config ?? {};
1644
1645
  return (0, import_openai2.createOpenAI)({
1645
1646
  baseURL,
1647
+ apiKey,
1646
1648
  compatibility: "strict",
1647
1649
  fetch: async (input, init) => {
1648
1650
  try {
@@ -1915,8 +1917,8 @@ var WorkflowAgents = class {
1915
1917
  */
1916
1918
  openai(...params) {
1917
1919
  const [model, settings] = params;
1918
- const { baseURL, ...otherSettings } = settings ?? {};
1919
- const openai2 = createWorkflowOpenAI(this.context, baseURL);
1920
+ const { baseURL, apiKey, ...otherSettings } = settings ?? {};
1921
+ const openai2 = createWorkflowOpenAI(this.context, { baseURL, apiKey });
1920
1922
  return openai2(model, otherSettings);
1921
1923
  }
1922
1924
  };
@@ -2672,7 +2674,8 @@ var processOptions = (options) => {
2672
2674
  return void 0;
2673
2675
  }
2674
2676
  try {
2675
- return JSON.parse(initialRequest);
2677
+ const parsed = JSON.parse(initialRequest);
2678
+ return options?.schema ? options.schema.parse(parsed) : parsed;
2676
2679
  } catch (error) {
2677
2680
  if (error instanceof SyntaxError) {
2678
2681
  return initialRequest;
package/astro.mjs CHANGED
@@ -1,7 +1,7 @@
1
1
  import {
2
2
  SDK_TELEMETRY,
3
3
  serveBase
4
- } from "./chunk-XGCTFI4M.mjs";
4
+ } from "./chunk-OLNSY3BB.mjs";
5
5
 
6
6
  // platforms/astro.ts
7
7
  function serve(routeFunction, options) {
@@ -1653,9 +1653,11 @@ you need from that agent.
1653
1653
  `;
1654
1654
 
1655
1655
  // src/agents/adapters.ts
1656
- var createWorkflowOpenAI = (context, baseURL) => {
1656
+ var createWorkflowOpenAI = (context, config) => {
1657
+ const { baseURL, apiKey } = config ?? {};
1657
1658
  return createOpenAI({
1658
1659
  baseURL,
1660
+ apiKey,
1659
1661
  compatibility: "strict",
1660
1662
  fetch: async (input, init) => {
1661
1663
  try {
@@ -1928,8 +1930,8 @@ var WorkflowAgents = class {
1928
1930
  */
1929
1931
  openai(...params) {
1930
1932
  const [model, settings] = params;
1931
- const { baseURL, ...otherSettings } = settings ?? {};
1932
- const openai2 = createWorkflowOpenAI(this.context, baseURL);
1933
+ const { baseURL, apiKey, ...otherSettings } = settings ?? {};
1934
+ const openai2 = createWorkflowOpenAI(this.context, { baseURL, apiKey });
1933
1935
  return openai2(model, otherSettings);
1934
1936
  }
1935
1937
  };
@@ -2685,7 +2687,8 @@ var processOptions = (options) => {
2685
2687
  return void 0;
2686
2688
  }
2687
2689
  try {
2688
- return JSON.parse(initialRequest);
2690
+ const parsed = JSON.parse(initialRequest);
2691
+ return options?.schema ? options.schema.parse(parsed) : parsed;
2689
2692
  } catch (error) {
2690
2693
  if (error instanceof SyntaxError) {
2691
2694
  return initialRequest;
package/cloudflare.d.mts CHANGED
@@ -1,5 +1,6 @@
1
- import { R as RouteFunction, j as PublicServeOptions } from './types-DKLbLp0d.mjs';
1
+ import { R as RouteFunction, j as PublicServeOptions } from './types-Cuqlx2Cr.mjs';
2
2
  import '@upstash/qstash';
3
+ import 'zod';
3
4
  import 'ai';
4
5
  import '@ai-sdk/openai';
5
6
 
package/cloudflare.d.ts CHANGED
@@ -1,5 +1,6 @@
1
- import { R as RouteFunction, j as PublicServeOptions } from './types-DKLbLp0d.js';
1
+ import { R as RouteFunction, j as PublicServeOptions } from './types-Cuqlx2Cr.js';
2
2
  import '@upstash/qstash';
3
+ import 'zod';
3
4
  import 'ai';
4
5
  import '@ai-sdk/openai';
5
6
 
package/cloudflare.js CHANGED
@@ -1640,9 +1640,11 @@ you need from that agent.
1640
1640
  `;
1641
1641
 
1642
1642
  // src/agents/adapters.ts
1643
- var createWorkflowOpenAI = (context, baseURL) => {
1643
+ var createWorkflowOpenAI = (context, config) => {
1644
+ const { baseURL, apiKey } = config ?? {};
1644
1645
  return (0, import_openai2.createOpenAI)({
1645
1646
  baseURL,
1647
+ apiKey,
1646
1648
  compatibility: "strict",
1647
1649
  fetch: async (input, init) => {
1648
1650
  try {
@@ -1915,8 +1917,8 @@ var WorkflowAgents = class {
1915
1917
  */
1916
1918
  openai(...params) {
1917
1919
  const [model, settings] = params;
1918
- const { baseURL, ...otherSettings } = settings ?? {};
1919
- const openai2 = createWorkflowOpenAI(this.context, baseURL);
1920
+ const { baseURL, apiKey, ...otherSettings } = settings ?? {};
1921
+ const openai2 = createWorkflowOpenAI(this.context, { baseURL, apiKey });
1920
1922
  return openai2(model, otherSettings);
1921
1923
  }
1922
1924
  };
@@ -2672,7 +2674,8 @@ var processOptions = (options) => {
2672
2674
  return void 0;
2673
2675
  }
2674
2676
  try {
2675
- return JSON.parse(initialRequest);
2677
+ const parsed = JSON.parse(initialRequest);
2678
+ return options?.schema ? options.schema.parse(parsed) : parsed;
2676
2679
  } catch (error) {
2677
2680
  if (error instanceof SyntaxError) {
2678
2681
  return initialRequest;
package/cloudflare.mjs CHANGED
@@ -1,7 +1,7 @@
1
1
  import {
2
2
  SDK_TELEMETRY,
3
3
  serveBase
4
- } from "./chunk-XGCTFI4M.mjs";
4
+ } from "./chunk-OLNSY3BB.mjs";
5
5
 
6
6
  // platforms/cloudflare.ts
7
7
  var getArgs = (args) => {
package/express.d.mts CHANGED
@@ -1,6 +1,7 @@
1
- import { R as RouteFunction, W as WorkflowServeOptions } from './types-DKLbLp0d.mjs';
1
+ import { R as RouteFunction, W as WorkflowServeOptions } from './types-Cuqlx2Cr.mjs';
2
2
  import { Router } from 'express';
3
3
  import '@upstash/qstash';
4
+ import 'zod';
4
5
  import 'ai';
5
6
  import '@ai-sdk/openai';
6
7
 
package/express.d.ts CHANGED
@@ -1,6 +1,7 @@
1
- import { R as RouteFunction, W as WorkflowServeOptions } from './types-DKLbLp0d.js';
1
+ import { R as RouteFunction, W as WorkflowServeOptions } from './types-Cuqlx2Cr.js';
2
2
  import { Router } from 'express';
3
3
  import '@upstash/qstash';
4
+ import 'zod';
4
5
  import 'ai';
5
6
  import '@ai-sdk/openai';
6
7
 
package/express.js CHANGED
@@ -25312,9 +25312,11 @@ you need from that agent.
25312
25312
  `;
25313
25313
 
25314
25314
  // src/agents/adapters.ts
25315
- var createWorkflowOpenAI = (context, baseURL) => {
25315
+ var createWorkflowOpenAI = (context, config) => {
25316
+ const { baseURL, apiKey } = config ?? {};
25316
25317
  return (0, import_openai2.createOpenAI)({
25317
25318
  baseURL,
25319
+ apiKey,
25318
25320
  compatibility: "strict",
25319
25321
  fetch: async (input, init) => {
25320
25322
  try {
@@ -25587,8 +25589,8 @@ var WorkflowAgents = class {
25587
25589
  */
25588
25590
  openai(...params) {
25589
25591
  const [model, settings] = params;
25590
- const { baseURL, ...otherSettings } = settings ?? {};
25591
- const openai2 = createWorkflowOpenAI(this.context, baseURL);
25592
+ const { baseURL, apiKey, ...otherSettings } = settings ?? {};
25593
+ const openai2 = createWorkflowOpenAI(this.context, { baseURL, apiKey });
25592
25594
  return openai2(model, otherSettings);
25593
25595
  }
25594
25596
  };
@@ -26344,7 +26346,8 @@ var processOptions = (options) => {
26344
26346
  return void 0;
26345
26347
  }
26346
26348
  try {
26347
- return JSON.parse(initialRequest);
26349
+ const parsed = JSON.parse(initialRequest);
26350
+ return options?.schema ? options.schema.parse(parsed) : parsed;
26348
26351
  } catch (error) {
26349
26352
  if (error instanceof SyntaxError) {
26350
26353
  return initialRequest;
package/express.mjs CHANGED
@@ -4,7 +4,7 @@ import {
4
4
  __require,
5
5
  __toESM,
6
6
  serveBase
7
- } from "./chunk-XGCTFI4M.mjs";
7
+ } from "./chunk-OLNSY3BB.mjs";
8
8
 
9
9
  // node_modules/depd/index.js
10
10
  var require_depd = __commonJS({
package/h3.d.mts CHANGED
@@ -1,6 +1,7 @@
1
1
  import * as h3 from 'h3';
2
- import { R as RouteFunction, j as PublicServeOptions } from './types-DKLbLp0d.mjs';
2
+ import { R as RouteFunction, j as PublicServeOptions } from './types-Cuqlx2Cr.mjs';
3
3
  import '@upstash/qstash';
4
+ import 'zod';
4
5
  import 'ai';
5
6
  import '@ai-sdk/openai';
6
7
 
package/h3.d.ts CHANGED
@@ -1,6 +1,7 @@
1
1
  import * as h3 from 'h3';
2
- import { R as RouteFunction, j as PublicServeOptions } from './types-DKLbLp0d.js';
2
+ import { R as RouteFunction, j as PublicServeOptions } from './types-Cuqlx2Cr.js';
3
3
  import '@upstash/qstash';
4
+ import 'zod';
4
5
  import 'ai';
5
6
  import '@ai-sdk/openai';
6
7
 
package/h3.js CHANGED
@@ -1952,9 +1952,11 @@ you need from that agent.
1952
1952
  `;
1953
1953
 
1954
1954
  // src/agents/adapters.ts
1955
- var createWorkflowOpenAI = (context, baseURL) => {
1955
+ var createWorkflowOpenAI = (context, config) => {
1956
+ const { baseURL, apiKey } = config ?? {};
1956
1957
  return (0, import_openai2.createOpenAI)({
1957
1958
  baseURL,
1959
+ apiKey,
1958
1960
  compatibility: "strict",
1959
1961
  fetch: async (input, init) => {
1960
1962
  try {
@@ -2227,8 +2229,8 @@ var WorkflowAgents = class {
2227
2229
  */
2228
2230
  openai(...params) {
2229
2231
  const [model, settings] = params;
2230
- const { baseURL, ...otherSettings } = settings ?? {};
2231
- const openai2 = createWorkflowOpenAI(this.context, baseURL);
2232
+ const { baseURL, apiKey, ...otherSettings } = settings ?? {};
2233
+ const openai2 = createWorkflowOpenAI(this.context, { baseURL, apiKey });
2232
2234
  return openai2(model, otherSettings);
2233
2235
  }
2234
2236
  };
@@ -2984,7 +2986,8 @@ var processOptions = (options) => {
2984
2986
  return void 0;
2985
2987
  }
2986
2988
  try {
2987
- return JSON.parse(initialRequest);
2989
+ const parsed = JSON.parse(initialRequest);
2990
+ return options?.schema ? options.schema.parse(parsed) : parsed;
2988
2991
  } catch (error) {
2989
2992
  if (error instanceof SyntaxError) {
2990
2993
  return initialRequest;
package/h3.mjs CHANGED
@@ -1,7 +1,7 @@
1
1
  import {
2
2
  SDK_TELEMETRY,
3
3
  serveBase
4
- } from "./chunk-XGCTFI4M.mjs";
4
+ } from "./chunk-OLNSY3BB.mjs";
5
5
 
6
6
  // node_modules/defu/dist/defu.mjs
7
7
  function isPlainObject(value) {
package/hono.d.mts CHANGED
@@ -1,7 +1,8 @@
1
1
  import { Context } from 'hono';
2
- import { R as RouteFunction, j as PublicServeOptions } from './types-DKLbLp0d.mjs';
2
+ import { R as RouteFunction, j as PublicServeOptions } from './types-Cuqlx2Cr.mjs';
3
3
  import { Variables } from 'hono/types';
4
4
  import '@upstash/qstash';
5
+ import 'zod';
5
6
  import 'ai';
6
7
  import '@ai-sdk/openai';
7
8
 
package/hono.d.ts CHANGED
@@ -1,7 +1,8 @@
1
1
  import { Context } from 'hono';
2
- import { R as RouteFunction, j as PublicServeOptions } from './types-DKLbLp0d.js';
2
+ import { R as RouteFunction, j as PublicServeOptions } from './types-Cuqlx2Cr.js';
3
3
  import { Variables } from 'hono/types';
4
4
  import '@upstash/qstash';
5
+ import 'zod';
5
6
  import 'ai';
6
7
  import '@ai-sdk/openai';
7
8
 
package/hono.js CHANGED
@@ -1640,9 +1640,11 @@ you need from that agent.
1640
1640
  `;
1641
1641
 
1642
1642
  // src/agents/adapters.ts
1643
- var createWorkflowOpenAI = (context, baseURL) => {
1643
+ var createWorkflowOpenAI = (context, config) => {
1644
+ const { baseURL, apiKey } = config ?? {};
1644
1645
  return (0, import_openai2.createOpenAI)({
1645
1646
  baseURL,
1647
+ apiKey,
1646
1648
  compatibility: "strict",
1647
1649
  fetch: async (input, init) => {
1648
1650
  try {
@@ -1915,8 +1917,8 @@ var WorkflowAgents = class {
1915
1917
  */
1916
1918
  openai(...params) {
1917
1919
  const [model, settings] = params;
1918
- const { baseURL, ...otherSettings } = settings ?? {};
1919
- const openai2 = createWorkflowOpenAI(this.context, baseURL);
1920
+ const { baseURL, apiKey, ...otherSettings } = settings ?? {};
1921
+ const openai2 = createWorkflowOpenAI(this.context, { baseURL, apiKey });
1920
1922
  return openai2(model, otherSettings);
1921
1923
  }
1922
1924
  };
@@ -2672,7 +2674,8 @@ var processOptions = (options) => {
2672
2674
  return void 0;
2673
2675
  }
2674
2676
  try {
2675
- return JSON.parse(initialRequest);
2677
+ const parsed = JSON.parse(initialRequest);
2678
+ return options?.schema ? options.schema.parse(parsed) : parsed;
2676
2679
  } catch (error) {
2677
2680
  if (error instanceof SyntaxError) {
2678
2681
  return initialRequest;
package/hono.mjs CHANGED
@@ -1,7 +1,7 @@
1
1
  import {
2
2
  SDK_TELEMETRY,
3
3
  serveBase
4
- } from "./chunk-XGCTFI4M.mjs";
4
+ } from "./chunk-OLNSY3BB.mjs";
5
5
 
6
6
  // platforms/hono.ts
7
7
  var serve = (routeFunction, options) => {
package/index.d.mts CHANGED
@@ -1,6 +1,7 @@
1
- import { R as RouteFunction, W as WorkflowServeOptions, N as NotifyResponse, a as Waiter, S as Step } from './types-DKLbLp0d.mjs';
2
- export { A as AsyncStepFunction, C as CallResponse, q as CallSettings, D as Duration, k as FailureFunctionPayload, F as FinishCondition, H as HeaderParams, L as LogLevel, o as NotifyStepResponse, P as ParallelCallState, j as PublicServeOptions, g as RawStep, l as RequiredExceptFields, i as StepFunction, f as StepType, e as StepTypes, h as SyncStepFunction, T as Telemetry, p as WaitEventOptions, m as WaitRequest, n as WaitStepResponse, c as WorkflowClient, b as WorkflowContext, s as WorkflowLogger, r as WorkflowLoggerOptions, d as WorkflowReceiver } from './types-DKLbLp0d.mjs';
1
+ import { R as RouteFunction, W as WorkflowServeOptions, E as ExclusiveValidationOptions, N as NotifyResponse, a as Waiter, S as Step } from './types-Cuqlx2Cr.mjs';
2
+ export { A as AsyncStepFunction, C as CallResponse, q as CallSettings, D as Duration, k as FailureFunctionPayload, F as FinishCondition, H as HeaderParams, L as LogLevel, o as NotifyStepResponse, P as ParallelCallState, j as PublicServeOptions, g as RawStep, l as RequiredExceptFields, i as StepFunction, f as StepType, e as StepTypes, h as SyncStepFunction, T as Telemetry, p as WaitEventOptions, m as WaitRequest, n as WaitStepResponse, c as WorkflowClient, b as WorkflowContext, s as WorkflowLogger, r as WorkflowLoggerOptions, d as WorkflowReceiver } from './types-Cuqlx2Cr.mjs';
3
3
  import { Client as Client$1, QstashError } from '@upstash/qstash';
4
+ import 'zod';
4
5
  import 'ai';
5
6
  import '@ai-sdk/openai';
6
7
 
@@ -12,7 +13,7 @@ import '@ai-sdk/openai';
12
13
  * @param options - Options including the client, onFinish callback, and initialPayloadParser.
13
14
  * @returns An async method that consumes incoming requests and runs the workflow.
14
15
  */
15
- declare const serve: <TInitialPayload = unknown, TRequest extends Request = Request, TResponse extends Response = Response>(routeFunction: RouteFunction<TInitialPayload>, options?: Omit<WorkflowServeOptions<TResponse, TInitialPayload>, "useJSONContent">) => {
16
+ declare const serve: <TInitialPayload = unknown, TRequest extends Request = Request, TResponse extends Response = Response>(routeFunction: RouteFunction<TInitialPayload>, options?: Omit<WorkflowServeOptions<TResponse, TInitialPayload>, "useJSONContent" | "schema" | "initialPayloadParser"> & ExclusiveValidationOptions<TInitialPayload>) => {
16
17
  handler: (request: TRequest) => Promise<TResponse>;
17
18
  };
18
19
 
@@ -191,4 +192,4 @@ declare class WorkflowAbort extends Error {
191
192
  constructor(stepName: string, stepInfo?: Step, cancelWorkflow?: boolean);
192
193
  }
193
194
 
194
- export { Client, NotifyResponse, RouteFunction, Step, Waiter, WorkflowAbort, WorkflowError, WorkflowServeOptions, serve };
195
+ export { Client, ExclusiveValidationOptions, NotifyResponse, RouteFunction, Step, Waiter, WorkflowAbort, WorkflowError, WorkflowServeOptions, serve };
package/index.d.ts CHANGED
@@ -1,6 +1,7 @@
1
- import { R as RouteFunction, W as WorkflowServeOptions, N as NotifyResponse, a as Waiter, S as Step } from './types-DKLbLp0d.js';
2
- export { A as AsyncStepFunction, C as CallResponse, q as CallSettings, D as Duration, k as FailureFunctionPayload, F as FinishCondition, H as HeaderParams, L as LogLevel, o as NotifyStepResponse, P as ParallelCallState, j as PublicServeOptions, g as RawStep, l as RequiredExceptFields, i as StepFunction, f as StepType, e as StepTypes, h as SyncStepFunction, T as Telemetry, p as WaitEventOptions, m as WaitRequest, n as WaitStepResponse, c as WorkflowClient, b as WorkflowContext, s as WorkflowLogger, r as WorkflowLoggerOptions, d as WorkflowReceiver } from './types-DKLbLp0d.js';
1
+ import { R as RouteFunction, W as WorkflowServeOptions, E as ExclusiveValidationOptions, N as NotifyResponse, a as Waiter, S as Step } from './types-Cuqlx2Cr.js';
2
+ export { A as AsyncStepFunction, C as CallResponse, q as CallSettings, D as Duration, k as FailureFunctionPayload, F as FinishCondition, H as HeaderParams, L as LogLevel, o as NotifyStepResponse, P as ParallelCallState, j as PublicServeOptions, g as RawStep, l as RequiredExceptFields, i as StepFunction, f as StepType, e as StepTypes, h as SyncStepFunction, T as Telemetry, p as WaitEventOptions, m as WaitRequest, n as WaitStepResponse, c as WorkflowClient, b as WorkflowContext, s as WorkflowLogger, r as WorkflowLoggerOptions, d as WorkflowReceiver } from './types-Cuqlx2Cr.js';
3
3
  import { Client as Client$1, QstashError } from '@upstash/qstash';
4
+ import 'zod';
4
5
  import 'ai';
5
6
  import '@ai-sdk/openai';
6
7
 
@@ -12,7 +13,7 @@ import '@ai-sdk/openai';
12
13
  * @param options - Options including the client, onFinish callback, and initialPayloadParser.
13
14
  * @returns An async method that consumes incoming requests and runs the workflow.
14
15
  */
15
- declare const serve: <TInitialPayload = unknown, TRequest extends Request = Request, TResponse extends Response = Response>(routeFunction: RouteFunction<TInitialPayload>, options?: Omit<WorkflowServeOptions<TResponse, TInitialPayload>, "useJSONContent">) => {
16
+ declare const serve: <TInitialPayload = unknown, TRequest extends Request = Request, TResponse extends Response = Response>(routeFunction: RouteFunction<TInitialPayload>, options?: Omit<WorkflowServeOptions<TResponse, TInitialPayload>, "useJSONContent" | "schema" | "initialPayloadParser"> & ExclusiveValidationOptions<TInitialPayload>) => {
16
17
  handler: (request: TRequest) => Promise<TResponse>;
17
18
  };
18
19
 
@@ -191,4 +192,4 @@ declare class WorkflowAbort extends Error {
191
192
  constructor(stepName: string, stepInfo?: Step, cancelWorkflow?: boolean);
192
193
  }
193
194
 
194
- export { Client, NotifyResponse, RouteFunction, Step, Waiter, WorkflowAbort, WorkflowError, WorkflowServeOptions, serve };
195
+ export { Client, ExclusiveValidationOptions, NotifyResponse, RouteFunction, Step, Waiter, WorkflowAbort, WorkflowError, WorkflowServeOptions, serve };
package/index.js CHANGED
@@ -1653,9 +1653,11 @@ you need from that agent.
1653
1653
  `;
1654
1654
 
1655
1655
  // src/agents/adapters.ts
1656
- var createWorkflowOpenAI = (context, baseURL) => {
1656
+ var createWorkflowOpenAI = (context, config) => {
1657
+ const { baseURL, apiKey } = config ?? {};
1657
1658
  return (0, import_openai2.createOpenAI)({
1658
1659
  baseURL,
1660
+ apiKey,
1659
1661
  compatibility: "strict",
1660
1662
  fetch: async (input, init) => {
1661
1663
  try {
@@ -1928,8 +1930,8 @@ var WorkflowAgents = class {
1928
1930
  */
1929
1931
  openai(...params) {
1930
1932
  const [model, settings] = params;
1931
- const { baseURL, ...otherSettings } = settings ?? {};
1932
- const openai2 = createWorkflowOpenAI(this.context, baseURL);
1933
+ const { baseURL, apiKey, ...otherSettings } = settings ?? {};
1934
+ const openai2 = createWorkflowOpenAI(this.context, { baseURL, apiKey });
1933
1935
  return openai2(model, otherSettings);
1934
1936
  }
1935
1937
  };
@@ -2685,7 +2687,8 @@ var processOptions = (options) => {
2685
2687
  return void 0;
2686
2688
  }
2687
2689
  try {
2688
- return JSON.parse(initialRequest);
2690
+ const parsed = JSON.parse(initialRequest);
2691
+ return options?.schema ? options.schema.parse(parsed) : parsed;
2689
2692
  } catch (error) {
2690
2693
  if (error instanceof SyntaxError) {
2691
2694
  return initialRequest;
package/index.mjs CHANGED
@@ -9,7 +9,7 @@ import {
9
9
  makeNotifyRequest,
10
10
  serve,
11
11
  triggerFirstInvocation
12
- } from "./chunk-XGCTFI4M.mjs";
12
+ } from "./chunk-OLNSY3BB.mjs";
13
13
 
14
14
  // src/client/index.ts
15
15
  import { Client as QStashClient } from "@upstash/qstash";
package/nextjs.d.mts CHANGED
@@ -1,6 +1,7 @@
1
1
  import { NextApiHandler } from 'next';
2
- import { R as RouteFunction, j as PublicServeOptions } from './types-DKLbLp0d.mjs';
2
+ import { R as RouteFunction, j as PublicServeOptions } from './types-Cuqlx2Cr.mjs';
3
3
  import '@upstash/qstash';
4
+ import 'zod';
4
5
  import 'ai';
5
6
  import '@ai-sdk/openai';
6
7
 
package/nextjs.d.ts CHANGED
@@ -1,6 +1,7 @@
1
1
  import { NextApiHandler } from 'next';
2
- import { R as RouteFunction, j as PublicServeOptions } from './types-DKLbLp0d.js';
2
+ import { R as RouteFunction, j as PublicServeOptions } from './types-Cuqlx2Cr.js';
3
3
  import '@upstash/qstash';
4
+ import 'zod';
4
5
  import 'ai';
5
6
  import '@ai-sdk/openai';
6
7
 
package/nextjs.js CHANGED
@@ -1641,9 +1641,11 @@ you need from that agent.
1641
1641
  `;
1642
1642
 
1643
1643
  // src/agents/adapters.ts
1644
- var createWorkflowOpenAI = (context, baseURL) => {
1644
+ var createWorkflowOpenAI = (context, config) => {
1645
+ const { baseURL, apiKey } = config ?? {};
1645
1646
  return (0, import_openai2.createOpenAI)({
1646
1647
  baseURL,
1648
+ apiKey,
1647
1649
  compatibility: "strict",
1648
1650
  fetch: async (input, init) => {
1649
1651
  try {
@@ -1916,8 +1918,8 @@ var WorkflowAgents = class {
1916
1918
  */
1917
1919
  openai(...params) {
1918
1920
  const [model, settings] = params;
1919
- const { baseURL, ...otherSettings } = settings ?? {};
1920
- const openai2 = createWorkflowOpenAI(this.context, baseURL);
1921
+ const { baseURL, apiKey, ...otherSettings } = settings ?? {};
1922
+ const openai2 = createWorkflowOpenAI(this.context, { baseURL, apiKey });
1921
1923
  return openai2(model, otherSettings);
1922
1924
  }
1923
1925
  };
@@ -2673,7 +2675,8 @@ var processOptions = (options) => {
2673
2675
  return void 0;
2674
2676
  }
2675
2677
  try {
2676
- return JSON.parse(initialRequest);
2678
+ const parsed = JSON.parse(initialRequest);
2679
+ return options?.schema ? options.schema.parse(parsed) : parsed;
2677
2680
  } catch (error) {
2678
2681
  if (error instanceof SyntaxError) {
2679
2682
  return initialRequest;
package/nextjs.mjs CHANGED
@@ -1,7 +1,7 @@
1
1
  import {
2
2
  SDK_TELEMETRY,
3
3
  serveBase
4
- } from "./chunk-XGCTFI4M.mjs";
4
+ } from "./chunk-OLNSY3BB.mjs";
5
5
 
6
6
  // platforms/nextjs.ts
7
7
  var serve = (routeFunction, options) => {
package/package.json CHANGED
@@ -1 +1 @@
1
- {"name":"@upstash/workflow","version":"v0.2.6-canary.0","description":"Durable, Reliable and Performant Serverless Functions","main":"./index.js","module":"./index.mjs","types":"./index.d.ts","files":["./*"],"exports":{".":{"import":"./index.mjs","require":"./index.js"},"./dist/nextjs":{"import":"./nextjs.mjs","require":"./nextjs.js"},"./nextjs":{"import":"./nextjs.mjs","require":"./nextjs.js"},"./h3":{"import":"./h3.mjs","require":"./h3.js"},"./svelte":{"import":"./svelte.mjs","require":"./svelte.js"},"./solidjs":{"import":"./solidjs.mjs","require":"./solidjs.js"},"./workflow":{"import":"./workflow.mjs","require":"./workflow.js"},"./hono":{"import":"./hono.mjs","require":"./hono.js"},"./cloudflare":{"import":"./cloudflare.mjs","require":"./cloudflare.js"},"./astro":{"import":"./astro.mjs","require":"./astro.js"},"./express":{"import":"./express.mjs","require":"./express.js"}},"scripts":{"build":"tsup && cp README.md ./dist/ && cp package.json ./dist/ && cp LICENSE ./dist/","test":"bun test src","fmt":"prettier --write .","lint":"tsc && eslint \"{src,platforms}/**/*.{js,ts,tsx}\" --quiet --fix","check-exports":"bun run build && cd dist && attw -P"},"repository":{"type":"git","url":"git+https://github.com/upstash/workflow-ts.git"},"keywords":["upstash","qstash","workflow","serverless"],"author":"Cahid Arda Oz","license":"MIT","bugs":{"url":"https://github.com/upstash/workflow-ts/issues"},"homepage":"https://github.com/upstash/workflow-ts#readme","devDependencies":{"@commitlint/cli":"^19.5.0","@commitlint/config-conventional":"^19.5.0","@eslint/js":"^9.11.1","@solidjs/start":"^1.0.8","@sveltejs/kit":"^2.6.1","@types/bun":"^1.1.10","@types/express":"^5.0.0","astro":"^4.16.7","eslint":"^9.11.1","eslint-plugin-unicorn":"^55.0.0","express":"^4.21.1","globals":"^15.10.0","h3":"^1.12.0","hono":"^4.6.3","husky":"^9.1.6","next":"^14.2.14","prettier":"3.3.3","tsup":"^8.3.0","typescript":"^5.7.2","typescript-eslint":"^8.18.0"},"dependencies":{"@ai-sdk/openai":"^1.0.15","@upstash/qstash":"^2.7.20","ai":"^4.0.30","zod":"^3.24.1"},"directories":{"example":"examples"}}
1
+ {"name":"@upstash/workflow","version":"v0.2.6","description":"Durable, Reliable and Performant Serverless Functions","main":"./index.js","module":"./index.mjs","types":"./index.d.ts","files":["./*"],"exports":{".":{"import":"./index.mjs","require":"./index.js"},"./dist/nextjs":{"import":"./nextjs.mjs","require":"./nextjs.js"},"./nextjs":{"import":"./nextjs.mjs","require":"./nextjs.js"},"./h3":{"import":"./h3.mjs","require":"./h3.js"},"./svelte":{"import":"./svelte.mjs","require":"./svelte.js"},"./solidjs":{"import":"./solidjs.mjs","require":"./solidjs.js"},"./workflow":{"import":"./workflow.mjs","require":"./workflow.js"},"./hono":{"import":"./hono.mjs","require":"./hono.js"},"./cloudflare":{"import":"./cloudflare.mjs","require":"./cloudflare.js"},"./astro":{"import":"./astro.mjs","require":"./astro.js"},"./express":{"import":"./express.mjs","require":"./express.js"}},"scripts":{"build":"tsup && cp README.md ./dist/ && cp package.json ./dist/ && cp LICENSE ./dist/","test":"bun test src","fmt":"prettier --write .","lint":"tsc && eslint \"{src,platforms}/**/*.{js,ts,tsx}\" --quiet --fix","check-exports":"bun run build && cd dist && attw -P"},"repository":{"type":"git","url":"git+https://github.com/upstash/workflow-ts.git"},"keywords":["upstash","qstash","workflow","serverless"],"author":"Cahid Arda Oz","license":"MIT","bugs":{"url":"https://github.com/upstash/workflow-ts/issues"},"homepage":"https://github.com/upstash/workflow-ts#readme","devDependencies":{"@commitlint/cli":"^19.5.0","@commitlint/config-conventional":"^19.5.0","@eslint/js":"^9.11.1","@solidjs/start":"^1.0.8","@sveltejs/kit":"^2.6.1","@types/bun":"^1.1.10","@types/express":"^5.0.0","astro":"^4.16.7","eslint":"^9.11.1","eslint-plugin-unicorn":"^55.0.0","express":"^4.21.1","globals":"^15.10.0","h3":"^1.12.0","hono":"^4.6.3","husky":"^9.1.6","next":"^14.2.14","prettier":"3.3.3","tsup":"^8.3.0","typescript":"^5.7.2","typescript-eslint":"^8.18.0"},"dependencies":{"@ai-sdk/openai":"^1.0.15","@upstash/qstash":"^2.7.20","ai":"^4.0.30","zod":"^3.24.1"},"directories":{"example":"examples"}}
package/solidjs.d.mts CHANGED
@@ -1,6 +1,7 @@
1
1
  import { APIEvent } from '@solidjs/start/server';
2
- import { R as RouteFunction, j as PublicServeOptions } from './types-DKLbLp0d.mjs';
2
+ import { R as RouteFunction, j as PublicServeOptions } from './types-Cuqlx2Cr.mjs';
3
3
  import '@upstash/qstash';
4
+ import 'zod';
4
5
  import 'ai';
5
6
  import '@ai-sdk/openai';
6
7
 
package/solidjs.d.ts CHANGED
@@ -1,6 +1,7 @@
1
1
  import { APIEvent } from '@solidjs/start/server';
2
- import { R as RouteFunction, j as PublicServeOptions } from './types-DKLbLp0d.js';
2
+ import { R as RouteFunction, j as PublicServeOptions } from './types-Cuqlx2Cr.js';
3
3
  import '@upstash/qstash';
4
+ import 'zod';
4
5
  import 'ai';
5
6
  import '@ai-sdk/openai';
6
7
 
package/solidjs.js CHANGED
@@ -1640,9 +1640,11 @@ you need from that agent.
1640
1640
  `;
1641
1641
 
1642
1642
  // src/agents/adapters.ts
1643
- var createWorkflowOpenAI = (context, baseURL) => {
1643
+ var createWorkflowOpenAI = (context, config) => {
1644
+ const { baseURL, apiKey } = config ?? {};
1644
1645
  return (0, import_openai2.createOpenAI)({
1645
1646
  baseURL,
1647
+ apiKey,
1646
1648
  compatibility: "strict",
1647
1649
  fetch: async (input, init) => {
1648
1650
  try {
@@ -1915,8 +1917,8 @@ var WorkflowAgents = class {
1915
1917
  */
1916
1918
  openai(...params) {
1917
1919
  const [model, settings] = params;
1918
- const { baseURL, ...otherSettings } = settings ?? {};
1919
- const openai2 = createWorkflowOpenAI(this.context, baseURL);
1920
+ const { baseURL, apiKey, ...otherSettings } = settings ?? {};
1921
+ const openai2 = createWorkflowOpenAI(this.context, { baseURL, apiKey });
1920
1922
  return openai2(model, otherSettings);
1921
1923
  }
1922
1924
  };
@@ -2672,7 +2674,8 @@ var processOptions = (options) => {
2672
2674
  return void 0;
2673
2675
  }
2674
2676
  try {
2675
- return JSON.parse(initialRequest);
2677
+ const parsed = JSON.parse(initialRequest);
2678
+ return options?.schema ? options.schema.parse(parsed) : parsed;
2676
2679
  } catch (error) {
2677
2680
  if (error instanceof SyntaxError) {
2678
2681
  return initialRequest;
package/solidjs.mjs CHANGED
@@ -1,7 +1,7 @@
1
1
  import {
2
2
  SDK_TELEMETRY,
3
3
  serveBase
4
- } from "./chunk-XGCTFI4M.mjs";
4
+ } from "./chunk-OLNSY3BB.mjs";
5
5
 
6
6
  // platforms/solidjs.ts
7
7
  var serve = (routeFunction, options) => {
package/svelte.d.mts CHANGED
@@ -1,6 +1,7 @@
1
1
  import { RequestHandler } from '@sveltejs/kit';
2
- import { R as RouteFunction, j as PublicServeOptions } from './types-DKLbLp0d.mjs';
2
+ import { R as RouteFunction, j as PublicServeOptions } from './types-Cuqlx2Cr.mjs';
3
3
  import '@upstash/qstash';
4
+ import 'zod';
4
5
  import 'ai';
5
6
  import '@ai-sdk/openai';
6
7
 
package/svelte.d.ts CHANGED
@@ -1,6 +1,7 @@
1
1
  import { RequestHandler } from '@sveltejs/kit';
2
- import { R as RouteFunction, j as PublicServeOptions } from './types-DKLbLp0d.js';
2
+ import { R as RouteFunction, j as PublicServeOptions } from './types-Cuqlx2Cr.js';
3
3
  import '@upstash/qstash';
4
+ import 'zod';
4
5
  import 'ai';
5
6
  import '@ai-sdk/openai';
6
7
 
package/svelte.js CHANGED
@@ -1640,9 +1640,11 @@ you need from that agent.
1640
1640
  `;
1641
1641
 
1642
1642
  // src/agents/adapters.ts
1643
- var createWorkflowOpenAI = (context, baseURL) => {
1643
+ var createWorkflowOpenAI = (context, config) => {
1644
+ const { baseURL, apiKey } = config ?? {};
1644
1645
  return (0, import_openai2.createOpenAI)({
1645
1646
  baseURL,
1647
+ apiKey,
1646
1648
  compatibility: "strict",
1647
1649
  fetch: async (input, init) => {
1648
1650
  try {
@@ -1915,8 +1917,8 @@ var WorkflowAgents = class {
1915
1917
  */
1916
1918
  openai(...params) {
1917
1919
  const [model, settings] = params;
1918
- const { baseURL, ...otherSettings } = settings ?? {};
1919
- const openai2 = createWorkflowOpenAI(this.context, baseURL);
1920
+ const { baseURL, apiKey, ...otherSettings } = settings ?? {};
1921
+ const openai2 = createWorkflowOpenAI(this.context, { baseURL, apiKey });
1920
1922
  return openai2(model, otherSettings);
1921
1923
  }
1922
1924
  };
@@ -2672,7 +2674,8 @@ var processOptions = (options) => {
2672
2674
  return void 0;
2673
2675
  }
2674
2676
  try {
2675
- return JSON.parse(initialRequest);
2677
+ const parsed = JSON.parse(initialRequest);
2678
+ return options?.schema ? options.schema.parse(parsed) : parsed;
2676
2679
  } catch (error) {
2677
2680
  if (error instanceof SyntaxError) {
2678
2681
  return initialRequest;
package/svelte.mjs CHANGED
@@ -1,7 +1,7 @@
1
1
  import {
2
2
  SDK_TELEMETRY,
3
3
  serveBase
4
- } from "./chunk-XGCTFI4M.mjs";
4
+ } from "./chunk-OLNSY3BB.mjs";
5
5
 
6
6
  // platforms/svelte.ts
7
7
  var serve = (routeFunction, options) => {
@@ -1,4 +1,5 @@
1
1
  import { PublishRequest, Client, Receiver, HTTPMethods as HTTPMethods$1 } from '@upstash/qstash';
2
+ import { z } from 'zod';
2
3
  import * as ai from 'ai';
3
4
  import { CoreTool, generateText } from 'ai';
4
5
  import * as _ai_sdk_openai from '@ai-sdk/openai';
@@ -392,45 +393,16 @@ declare class WorkflowApi extends BaseWorkflowApi {
392
393
  }
393
394
 
394
395
  /**
395
- * An Agent which utilizes the model and tools available to it
396
- * to achieve a given task
396
+ * creates an AI SDK openai client with a custom
397
+ * fetch implementation which uses context.call.
397
398
  *
398
- * @param name Name of the agent
399
- * @param background Background of the agent
400
- * @param model LLM model to use
401
- * @param tools tools available to the agent
402
- * @param maxSteps number of times the agent can call the LLM at most. If
403
- * the agent abruptly stops execution after calling tools, you may need
404
- * to increase maxSteps
405
- * @param temparature temparature used when calling the LLM
399
+ * @param context workflow context
400
+ * @returns ai sdk openai
406
401
  */
407
- declare class Agent {
408
- readonly name: AgentParameters["name"];
409
- readonly tools: AgentParameters["tools"];
410
- readonly maxSteps: AgentParameters["maxSteps"];
411
- readonly background: AgentParameters["background"];
412
- readonly model: AgentParameters["model"];
413
- readonly temparature: AgentParameters["temparature"];
414
- private readonly context;
415
- constructor({ tools, maxSteps, background, name, model, temparature }: AgentParameters, context: WorkflowContext);
416
- /**
417
- * Trigger the agent by passing a prompt
418
- *
419
- * @param prompt task to assign to the agent
420
- * @returns Response as `{ text: string }`
421
- */
422
- call({ prompt }: {
423
- prompt: string;
424
- }): Promise<{
425
- text: string;
426
- }>;
427
- /**
428
- * Convert the agent to a tool which can be used by other agents.
429
- *
430
- * @returns the agent as a tool
431
- */
432
- asTool(): AISDKTool;
433
- }
402
+ declare const createWorkflowOpenAI: (context: WorkflowContext, config?: {
403
+ baseURL?: string;
404
+ apiKey?: string;
405
+ }) => _ai_sdk_openai.OpenAIProvider;
434
406
 
435
407
  type AISDKTool = CoreTool;
436
408
  type LangchainTool = {
@@ -502,20 +474,53 @@ type MultiAgentTaskParams = TaskParams & {
502
474
  */
503
475
  background?: string;
504
476
  };
505
-
506
477
  type ModelParams = Parameters<ReturnType<typeof createWorkflowOpenAI>>;
507
- type ModelSettingsWithBaseURL = ModelParams["1"] & {
478
+ type CustomModelSettings = ModelParams["1"] & {
508
479
  baseURL?: string;
480
+ apiKey?: string;
509
481
  };
510
- type ModelParamsWithBaseURL = [ModelParams[0], ModelSettingsWithBaseURL?];
482
+ type CustomModelParams = [ModelParams[0], CustomModelSettings?];
483
+
511
484
  /**
512
- * creates an AI SDK openai client with a custom
513
- * fetch implementation which uses context.call.
485
+ * An Agent which utilizes the model and tools available to it
486
+ * to achieve a given task
514
487
  *
515
- * @param context workflow context
516
- * @returns ai sdk openai
488
+ * @param name Name of the agent
489
+ * @param background Background of the agent
490
+ * @param model LLM model to use
491
+ * @param tools tools available to the agent
492
+ * @param maxSteps number of times the agent can call the LLM at most. If
493
+ * the agent abruptly stops execution after calling tools, you may need
494
+ * to increase maxSteps
495
+ * @param temparature temparature used when calling the LLM
517
496
  */
518
- declare const createWorkflowOpenAI: (context: WorkflowContext, baseURL?: string) => _ai_sdk_openai.OpenAIProvider;
497
+ declare class Agent {
498
+ readonly name: AgentParameters["name"];
499
+ readonly tools: AgentParameters["tools"];
500
+ readonly maxSteps: AgentParameters["maxSteps"];
501
+ readonly background: AgentParameters["background"];
502
+ readonly model: AgentParameters["model"];
503
+ readonly temparature: AgentParameters["temparature"];
504
+ private readonly context;
505
+ constructor({ tools, maxSteps, background, name, model, temparature }: AgentParameters, context: WorkflowContext);
506
+ /**
507
+ * Trigger the agent by passing a prompt
508
+ *
509
+ * @param prompt task to assign to the agent
510
+ * @returns Response as `{ text: string }`
511
+ */
512
+ call({ prompt }: {
513
+ prompt: string;
514
+ }): Promise<{
515
+ text: string;
516
+ }>;
517
+ /**
518
+ * Convert the agent to a tool which can be used by other agents.
519
+ *
520
+ * @returns the agent as a tool
521
+ */
522
+ asTool(): AISDKTool;
523
+ }
519
524
 
520
525
  /**
521
526
  * An Agent Task
@@ -627,7 +632,7 @@ declare class WorkflowAgents {
627
632
  /**
628
633
  * creates an openai model for agents
629
634
  */
630
- openai(...params: ModelParamsWithBaseURL): ai.LanguageModelV1;
635
+ openai(...params: CustomModelParams): ai.LanguageModelV1;
631
636
  }
632
637
 
633
638
  /**
@@ -1032,7 +1037,7 @@ type StepFunction<TResult> = AsyncStepFunction<TResult> | SyncStepFunction<TResu
1032
1037
  type ParallelCallState = "first" | "partial" | "discard" | "last";
1033
1038
  type RouteFunction<TInitialPayload> = (context: WorkflowContext<TInitialPayload>) => Promise<void>;
1034
1039
  type FinishCondition = "success" | "duplicate-step" | "fromCallback" | "auth-fail" | "failure-callback" | "workflow-already-ended";
1035
- type WorkflowServeOptions<TResponse extends Response = Response, TInitialPayload = unknown> = {
1040
+ type WorkflowServeOptions<TResponse extends Response = Response, TInitialPayload = unknown> = ValidationOptions<TInitialPayload> & {
1036
1041
  /**
1037
1042
  * QStash client
1038
1043
  */
@@ -1044,10 +1049,6 @@ type WorkflowServeOptions<TResponse extends Response = Response, TInitialPayload
1044
1049
  * @returns response
1045
1050
  */
1046
1051
  onStepFinish?: (workflowRunId: string, finishCondition: FinishCondition) => TResponse;
1047
- /**
1048
- * Function to parse the initial payload passed by the user
1049
- */
1050
- initialPayloadParser?: (initialPayload: string) => TInitialPayload;
1051
1052
  /**
1052
1053
  * Url of the endpoint where the workflow is set up.
1053
1054
  *
@@ -1128,6 +1129,17 @@ type WorkflowServeOptions<TResponse extends Response = Response, TInitialPayload
1128
1129
  * Set `disableTelemetry` to disable this behavior.
1129
1130
  */
1130
1131
  disableTelemetry?: boolean;
1132
+ } & ValidationOptions<TInitialPayload>;
1133
+ type ValidationOptions<TInitialPayload> = {
1134
+ schema?: z.ZodType<TInitialPayload>;
1135
+ initialPayloadParser?: (initialPayload: string) => TInitialPayload;
1136
+ };
1137
+ type ExclusiveValidationOptions<TInitialPayload> = {
1138
+ schema?: ValidationOptions<TInitialPayload>["schema"];
1139
+ initialPayloadParser?: never;
1140
+ } | {
1141
+ schema?: never;
1142
+ initialPayloadParser?: ValidationOptions<TInitialPayload>["initialPayloadParser"];
1131
1143
  };
1132
1144
  type Telemetry = {
1133
1145
  /**
@@ -1143,7 +1155,7 @@ type Telemetry = {
1143
1155
  */
1144
1156
  runtime?: string;
1145
1157
  };
1146
- type PublicServeOptions<TInitialPayload = unknown, TResponse extends Response = Response> = Omit<WorkflowServeOptions<TResponse, TInitialPayload>, "onStepFinish" | "useJSONContent">;
1158
+ type PublicServeOptions<TInitialPayload = unknown, TResponse extends Response = Response> = Omit<WorkflowServeOptions<TResponse, TInitialPayload>, "onStepFinish" | "useJSONContent" | "schema" | "initialPayloadParser"> & ExclusiveValidationOptions<TInitialPayload>;
1147
1159
  /**
1148
1160
  * Payload passed as body in failureFunction
1149
1161
  */
@@ -1301,4 +1313,4 @@ type HeaderParams = {
1301
1313
  callTimeout?: never;
1302
1314
  });
1303
1315
 
1304
- export { type AsyncStepFunction as A, type CallResponse as C, type Duration as D, type FinishCondition as F, type HeaderParams as H, type LogLevel as L, type NotifyResponse as N, type ParallelCallState as P, type RouteFunction as R, type Step as S, type Telemetry as T, type WorkflowServeOptions as W, type Waiter as a, WorkflowContext as b, type WorkflowClient as c, type WorkflowReceiver as d, StepTypes as e, type StepType as f, type RawStep as g, type SyncStepFunction as h, type StepFunction as i, type PublicServeOptions as j, type FailureFunctionPayload as k, type RequiredExceptFields as l, type WaitRequest as m, type WaitStepResponse as n, type NotifyStepResponse as o, type WaitEventOptions as p, type CallSettings as q, type WorkflowLoggerOptions as r, WorkflowLogger as s };
1316
+ export { type AsyncStepFunction as A, type CallResponse as C, type Duration as D, type ExclusiveValidationOptions as E, type FinishCondition as F, type HeaderParams as H, type LogLevel as L, type NotifyResponse as N, type ParallelCallState as P, type RouteFunction as R, type Step as S, type Telemetry as T, type WorkflowServeOptions as W, type Waiter as a, WorkflowContext as b, type WorkflowClient as c, type WorkflowReceiver as d, StepTypes as e, type StepType as f, type RawStep as g, type SyncStepFunction as h, type StepFunction as i, type PublicServeOptions as j, type FailureFunctionPayload as k, type RequiredExceptFields as l, type WaitRequest as m, type WaitStepResponse as n, type NotifyStepResponse as o, type WaitEventOptions as p, type CallSettings as q, type WorkflowLoggerOptions as r, WorkflowLogger as s };
@@ -1,4 +1,5 @@
1
1
  import { PublishRequest, Client, Receiver, HTTPMethods as HTTPMethods$1 } from '@upstash/qstash';
2
+ import { z } from 'zod';
2
3
  import * as ai from 'ai';
3
4
  import { CoreTool, generateText } from 'ai';
4
5
  import * as _ai_sdk_openai from '@ai-sdk/openai';
@@ -392,45 +393,16 @@ declare class WorkflowApi extends BaseWorkflowApi {
392
393
  }
393
394
 
394
395
  /**
395
- * An Agent which utilizes the model and tools available to it
396
- * to achieve a given task
396
+ * creates an AI SDK openai client with a custom
397
+ * fetch implementation which uses context.call.
397
398
  *
398
- * @param name Name of the agent
399
- * @param background Background of the agent
400
- * @param model LLM model to use
401
- * @param tools tools available to the agent
402
- * @param maxSteps number of times the agent can call the LLM at most. If
403
- * the agent abruptly stops execution after calling tools, you may need
404
- * to increase maxSteps
405
- * @param temparature temparature used when calling the LLM
399
+ * @param context workflow context
400
+ * @returns ai sdk openai
406
401
  */
407
- declare class Agent {
408
- readonly name: AgentParameters["name"];
409
- readonly tools: AgentParameters["tools"];
410
- readonly maxSteps: AgentParameters["maxSteps"];
411
- readonly background: AgentParameters["background"];
412
- readonly model: AgentParameters["model"];
413
- readonly temparature: AgentParameters["temparature"];
414
- private readonly context;
415
- constructor({ tools, maxSteps, background, name, model, temparature }: AgentParameters, context: WorkflowContext);
416
- /**
417
- * Trigger the agent by passing a prompt
418
- *
419
- * @param prompt task to assign to the agent
420
- * @returns Response as `{ text: string }`
421
- */
422
- call({ prompt }: {
423
- prompt: string;
424
- }): Promise<{
425
- text: string;
426
- }>;
427
- /**
428
- * Convert the agent to a tool which can be used by other agents.
429
- *
430
- * @returns the agent as a tool
431
- */
432
- asTool(): AISDKTool;
433
- }
402
+ declare const createWorkflowOpenAI: (context: WorkflowContext, config?: {
403
+ baseURL?: string;
404
+ apiKey?: string;
405
+ }) => _ai_sdk_openai.OpenAIProvider;
434
406
 
435
407
  type AISDKTool = CoreTool;
436
408
  type LangchainTool = {
@@ -502,20 +474,53 @@ type MultiAgentTaskParams = TaskParams & {
502
474
  */
503
475
  background?: string;
504
476
  };
505
-
506
477
  type ModelParams = Parameters<ReturnType<typeof createWorkflowOpenAI>>;
507
- type ModelSettingsWithBaseURL = ModelParams["1"] & {
478
+ type CustomModelSettings = ModelParams["1"] & {
508
479
  baseURL?: string;
480
+ apiKey?: string;
509
481
  };
510
- type ModelParamsWithBaseURL = [ModelParams[0], ModelSettingsWithBaseURL?];
482
+ type CustomModelParams = [ModelParams[0], CustomModelSettings?];
483
+
511
484
  /**
512
- * creates an AI SDK openai client with a custom
513
- * fetch implementation which uses context.call.
485
+ * An Agent which utilizes the model and tools available to it
486
+ * to achieve a given task
514
487
  *
515
- * @param context workflow context
516
- * @returns ai sdk openai
488
+ * @param name Name of the agent
489
+ * @param background Background of the agent
490
+ * @param model LLM model to use
491
+ * @param tools tools available to the agent
492
+ * @param maxSteps number of times the agent can call the LLM at most. If
493
+ * the agent abruptly stops execution after calling tools, you may need
494
+ * to increase maxSteps
495
+ * @param temparature temparature used when calling the LLM
517
496
  */
518
- declare const createWorkflowOpenAI: (context: WorkflowContext, baseURL?: string) => _ai_sdk_openai.OpenAIProvider;
497
+ declare class Agent {
498
+ readonly name: AgentParameters["name"];
499
+ readonly tools: AgentParameters["tools"];
500
+ readonly maxSteps: AgentParameters["maxSteps"];
501
+ readonly background: AgentParameters["background"];
502
+ readonly model: AgentParameters["model"];
503
+ readonly temparature: AgentParameters["temparature"];
504
+ private readonly context;
505
+ constructor({ tools, maxSteps, background, name, model, temparature }: AgentParameters, context: WorkflowContext);
506
+ /**
507
+ * Trigger the agent by passing a prompt
508
+ *
509
+ * @param prompt task to assign to the agent
510
+ * @returns Response as `{ text: string }`
511
+ */
512
+ call({ prompt }: {
513
+ prompt: string;
514
+ }): Promise<{
515
+ text: string;
516
+ }>;
517
+ /**
518
+ * Convert the agent to a tool which can be used by other agents.
519
+ *
520
+ * @returns the agent as a tool
521
+ */
522
+ asTool(): AISDKTool;
523
+ }
519
524
 
520
525
  /**
521
526
  * An Agent Task
@@ -627,7 +632,7 @@ declare class WorkflowAgents {
627
632
  /**
628
633
  * creates an openai model for agents
629
634
  */
630
- openai(...params: ModelParamsWithBaseURL): ai.LanguageModelV1;
635
+ openai(...params: CustomModelParams): ai.LanguageModelV1;
631
636
  }
632
637
 
633
638
  /**
@@ -1032,7 +1037,7 @@ type StepFunction<TResult> = AsyncStepFunction<TResult> | SyncStepFunction<TResu
1032
1037
  type ParallelCallState = "first" | "partial" | "discard" | "last";
1033
1038
  type RouteFunction<TInitialPayload> = (context: WorkflowContext<TInitialPayload>) => Promise<void>;
1034
1039
  type FinishCondition = "success" | "duplicate-step" | "fromCallback" | "auth-fail" | "failure-callback" | "workflow-already-ended";
1035
- type WorkflowServeOptions<TResponse extends Response = Response, TInitialPayload = unknown> = {
1040
+ type WorkflowServeOptions<TResponse extends Response = Response, TInitialPayload = unknown> = ValidationOptions<TInitialPayload> & {
1036
1041
  /**
1037
1042
  * QStash client
1038
1043
  */
@@ -1044,10 +1049,6 @@ type WorkflowServeOptions<TResponse extends Response = Response, TInitialPayload
1044
1049
  * @returns response
1045
1050
  */
1046
1051
  onStepFinish?: (workflowRunId: string, finishCondition: FinishCondition) => TResponse;
1047
- /**
1048
- * Function to parse the initial payload passed by the user
1049
- */
1050
- initialPayloadParser?: (initialPayload: string) => TInitialPayload;
1051
1052
  /**
1052
1053
  * Url of the endpoint where the workflow is set up.
1053
1054
  *
@@ -1128,6 +1129,17 @@ type WorkflowServeOptions<TResponse extends Response = Response, TInitialPayload
1128
1129
  * Set `disableTelemetry` to disable this behavior.
1129
1130
  */
1130
1131
  disableTelemetry?: boolean;
1132
+ } & ValidationOptions<TInitialPayload>;
1133
+ type ValidationOptions<TInitialPayload> = {
1134
+ schema?: z.ZodType<TInitialPayload>;
1135
+ initialPayloadParser?: (initialPayload: string) => TInitialPayload;
1136
+ };
1137
+ type ExclusiveValidationOptions<TInitialPayload> = {
1138
+ schema?: ValidationOptions<TInitialPayload>["schema"];
1139
+ initialPayloadParser?: never;
1140
+ } | {
1141
+ schema?: never;
1142
+ initialPayloadParser?: ValidationOptions<TInitialPayload>["initialPayloadParser"];
1131
1143
  };
1132
1144
  type Telemetry = {
1133
1145
  /**
@@ -1143,7 +1155,7 @@ type Telemetry = {
1143
1155
  */
1144
1156
  runtime?: string;
1145
1157
  };
1146
- type PublicServeOptions<TInitialPayload = unknown, TResponse extends Response = Response> = Omit<WorkflowServeOptions<TResponse, TInitialPayload>, "onStepFinish" | "useJSONContent">;
1158
+ type PublicServeOptions<TInitialPayload = unknown, TResponse extends Response = Response> = Omit<WorkflowServeOptions<TResponse, TInitialPayload>, "onStepFinish" | "useJSONContent" | "schema" | "initialPayloadParser"> & ExclusiveValidationOptions<TInitialPayload>;
1147
1159
  /**
1148
1160
  * Payload passed as body in failureFunction
1149
1161
  */
@@ -1301,4 +1313,4 @@ type HeaderParams = {
1301
1313
  callTimeout?: never;
1302
1314
  });
1303
1315
 
1304
- export { type AsyncStepFunction as A, type CallResponse as C, type Duration as D, type FinishCondition as F, type HeaderParams as H, type LogLevel as L, type NotifyResponse as N, type ParallelCallState as P, type RouteFunction as R, type Step as S, type Telemetry as T, type WorkflowServeOptions as W, type Waiter as a, WorkflowContext as b, type WorkflowClient as c, type WorkflowReceiver as d, StepTypes as e, type StepType as f, type RawStep as g, type SyncStepFunction as h, type StepFunction as i, type PublicServeOptions as j, type FailureFunctionPayload as k, type RequiredExceptFields as l, type WaitRequest as m, type WaitStepResponse as n, type NotifyStepResponse as o, type WaitEventOptions as p, type CallSettings as q, type WorkflowLoggerOptions as r, WorkflowLogger as s };
1316
+ export { type AsyncStepFunction as A, type CallResponse as C, type Duration as D, type ExclusiveValidationOptions as E, type FinishCondition as F, type HeaderParams as H, type LogLevel as L, type NotifyResponse as N, type ParallelCallState as P, type RouteFunction as R, type Step as S, type Telemetry as T, type WorkflowServeOptions as W, type Waiter as a, WorkflowContext as b, type WorkflowClient as c, type WorkflowReceiver as d, StepTypes as e, type StepType as f, type RawStep as g, type SyncStepFunction as h, type StepFunction as i, type PublicServeOptions as j, type FailureFunctionPayload as k, type RequiredExceptFields as l, type WaitRequest as m, type WaitStepResponse as n, type NotifyStepResponse as o, type WaitEventOptions as p, type CallSettings as q, type WorkflowLoggerOptions as r, WorkflowLogger as s };