@upstash/workflow 0.2.5-agents → 0.2.5-agents-3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/cloudflare.js CHANGED
@@ -826,29 +826,16 @@ var triggerWorkflowDelete = async (workflowContext, debug, cancel = false) => {
826
826
  await debug?.log("SUBMIT", "SUBMIT_CLEANUP", {
827
827
  deletedWorkflowRunId: workflowContext.workflowRunId
828
828
  });
829
- try {
830
- await workflowContext.qstashClient.http.request({
831
- path: ["v2", "workflows", "runs", `${workflowContext.workflowRunId}?cancel=${cancel}`],
832
- method: "DELETE",
833
- parseResponseAsJson: false
834
- });
835
- await debug?.log(
836
- "SUBMIT",
837
- "SUBMIT_CLEANUP",
838
- `workflow run ${workflowContext.workflowRunId} deleted.`
839
- );
840
- return { deleted: true };
841
- } catch (error) {
842
- if (error instanceof import_qstash3.QstashError && error.status === 404) {
843
- await debug?.log("WARN", "SUBMIT_CLEANUP", {
844
- message: `Failed to remove workflow run ${workflowContext.workflowRunId} as it doesn't exist.`,
845
- name: error.name,
846
- errorMessage: error.message
847
- });
848
- return { deleted: false };
849
- }
850
- throw error;
851
- }
829
+ await workflowContext.qstashClient.http.request({
830
+ path: ["v2", "workflows", "runs", `${workflowContext.workflowRunId}?cancel=${cancel}`],
831
+ method: "DELETE",
832
+ parseResponseAsJson: false
833
+ });
834
+ await debug?.log(
835
+ "SUBMIT",
836
+ "SUBMIT_CLEANUP",
837
+ `workflow run ${workflowContext.workflowRunId} deleted.`
838
+ );
852
839
  };
853
840
  var recreateUserHeaders = (headers) => {
854
841
  const filteredHeaders = new Headers();
@@ -1635,7 +1622,22 @@ var WorkflowApi = class extends BaseWorkflowApi {
1635
1622
  // src/agents/adapters.ts
1636
1623
  var import_openai2 = require("@ai-sdk/openai");
1637
1624
  var import_ai = require("ai");
1625
+
1626
+ // src/agents/constants.ts
1638
1627
  var AGENT_NAME_HEADER = "upstash-agent-name";
1628
+ var MANAGER_AGENT_PROMPT = `You are an agent orchestrating other AI Agents.
1629
+
1630
+ These other agents have tools available to them.
1631
+
1632
+ Given a prompt, utilize these agents to address requests.
1633
+
1634
+ Don't always call all the agents provided to you at the same time. You can call one and use it's response to call another.
1635
+
1636
+ Avoid calling the same agent twice in one turn. Instead, prefer to call it once but provide everything
1637
+ you need from that agent.
1638
+ `;
1639
+
1640
+ // src/agents/adapters.ts
1639
1641
  var createWorkflowOpenAI = (context) => {
1640
1642
  return (0, import_openai2.createOpenAI)({
1641
1643
  compatibility: "strict",
@@ -1702,30 +1704,49 @@ var convertLangchainTool = (langchainTool) => {
1702
1704
  return (0, import_ai.tool)({
1703
1705
  description: langchainTool.description,
1704
1706
  parameters: langchainTool.schema,
1705
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
1706
- execute: async (param) => langchainTool.invoke(param)
1707
+ execute: async (...param) => langchainTool.invoke(...param)
1707
1708
  });
1708
1709
  };
1709
1710
 
1710
1711
  // src/agents/agent.ts
1711
1712
  var import_zod = require("zod");
1712
1713
  var import_ai2 = require("ai");
1714
+
1715
+ // src/serve/utils.ts
1716
+ var isDisabledWorkflowContext = (context) => {
1717
+ return "disabled" in context;
1718
+ };
1719
+
1720
+ // src/agents/agent.ts
1713
1721
  var Agent = class {
1714
1722
  name;
1715
1723
  tools;
1716
1724
  maxSteps;
1717
1725
  background;
1718
1726
  model;
1719
- constructor({ tools, maxSteps, background, name, model }) {
1727
+ temparature;
1728
+ context;
1729
+ constructor({ tools, maxSteps, background, name, model, temparature = 0.1 }, context) {
1720
1730
  this.name = name;
1721
1731
  this.tools = tools ?? {};
1722
1732
  this.maxSteps = maxSteps;
1723
1733
  this.background = background;
1724
1734
  this.model = model;
1735
+ this.temparature = temparature;
1736
+ this.context = context;
1725
1737
  }
1738
+ /**
1739
+ * Trigger the agent by passing a prompt
1740
+ *
1741
+ * @param prompt task to assign to the agent
1742
+ * @returns Response as `{ text: string }`
1743
+ */
1726
1744
  async call({ prompt }) {
1727
1745
  try {
1728
- return await (0, import_ai2.generateText)({
1746
+ if (isDisabledWorkflowContext(this.context)) {
1747
+ await this.context.sleep("abort", 0);
1748
+ }
1749
+ const result = await (0, import_ai2.generateText)({
1729
1750
  model: this.model,
1730
1751
  tools: this.tools,
1731
1752
  maxSteps: this.maxSteps,
@@ -1733,8 +1754,10 @@ var Agent = class {
1733
1754
  prompt,
1734
1755
  headers: {
1735
1756
  [AGENT_NAME_HEADER]: this.name
1736
- }
1757
+ },
1758
+ temperature: this.temparature
1737
1759
  });
1760
+ return { text: result.text };
1738
1761
  } catch (error) {
1739
1762
  if (error instanceof import_ai2.ToolExecutionError) {
1740
1763
  if (error.cause instanceof Error && error.cause.name === "WorkflowAbort") {
@@ -1749,6 +1772,11 @@ var Agent = class {
1749
1772
  }
1750
1773
  }
1751
1774
  }
1775
+ /**
1776
+ * Convert the agent to a tool which can be used by other agents.
1777
+ *
1778
+ * @returns the agent as a tool
1779
+ */
1752
1780
  asTool() {
1753
1781
  const toolDescriptions = Object.values(this.tools).map((tool3) => tool3.description).join("\n");
1754
1782
  return (0, import_ai2.tool)({
@@ -1760,27 +1788,37 @@ var Agent = class {
1760
1788
  });
1761
1789
  }
1762
1790
  };
1763
- var MANAGER_AGENT_PROMPT = `You are an AI agent who orchestrates other AI Agents.
1764
- These other agents have tools available to them.
1765
- Given a prompt, utilize these agents to address requests.
1766
- Don't always call all the agents provided to you at the same time. You can call one and use it's response to call another.
1767
- `;
1768
1791
  var ManagerAgent = class extends Agent {
1769
1792
  agents;
1793
+ /**
1794
+ * A manager agent which coordinates agents available to it to achieve a
1795
+ * given task
1796
+ *
1797
+ * @param name Name of the agent
1798
+ * @param background Background of the agent. If not passed, default will be used.
1799
+ * @param model LLM model to use
1800
+ * @param agents: List of agents available to the agent
1801
+ * @param maxSteps number of times the manager agent can call the LLM at most.
1802
+ * If the agent abruptly stops execution after calling other agents, you may
1803
+ * need to increase maxSteps
1804
+ */
1770
1805
  constructor({
1771
- maxSteps,
1772
- background = MANAGER_AGENT_PROMPT,
1773
1806
  agents,
1807
+ background = MANAGER_AGENT_PROMPT,
1774
1808
  model,
1809
+ maxSteps,
1775
1810
  name = "manager llm"
1776
- }) {
1777
- super({
1778
- background,
1779
- maxSteps,
1780
- tools: Object.fromEntries(agents.map((agent) => [agent.name, agent.asTool()])),
1781
- name,
1782
- model
1783
- });
1811
+ }, context) {
1812
+ super(
1813
+ {
1814
+ background,
1815
+ maxSteps,
1816
+ tools: Object.fromEntries(agents.map((agent) => [agent.name, agent.asTool()])),
1817
+ name,
1818
+ model
1819
+ },
1820
+ context
1821
+ );
1784
1822
  this.agents = agents;
1785
1823
  }
1786
1824
  };
@@ -1796,25 +1834,32 @@ var Task = class {
1796
1834
  this.context = context;
1797
1835
  this.taskParameters = taskParameters;
1798
1836
  }
1837
+ /**
1838
+ * Run the agents to complete the task
1839
+ *
1840
+ * @returns Result of the task as { text: string }
1841
+ */
1799
1842
  async run() {
1800
1843
  const { prompt, ...otherParams } = this.taskParameters;
1801
- const safePrompt = await this.context.run("Get Prompt", () => prompt);
1802
1844
  if ("agent" in otherParams) {
1803
1845
  const agent = otherParams.agent;
1804
1846
  const result = await agent.call({
1805
- prompt: safePrompt
1847
+ prompt
1806
1848
  });
1807
1849
  return { text: result.text };
1808
1850
  } else {
1809
1851
  const { agents, maxSteps, model, background } = otherParams;
1810
- const managerAgent = new ManagerAgent({
1811
- model,
1812
- maxSteps,
1813
- agents,
1814
- name: "Manager LLM",
1815
- background
1816
- });
1817
- const result = await managerAgent.call({ prompt: safePrompt });
1852
+ const managerAgent = new ManagerAgent(
1853
+ {
1854
+ model,
1855
+ maxSteps,
1856
+ agents,
1857
+ name: "Manager LLM",
1858
+ background
1859
+ },
1860
+ this.context
1861
+ );
1862
+ const result = await managerAgent.call({ prompt });
1818
1863
  return { text: result.text };
1819
1864
  }
1820
1865
  }
@@ -1826,16 +1871,45 @@ var WorkflowAgents = class {
1826
1871
  constructor({ context }) {
1827
1872
  this.context = context;
1828
1873
  }
1874
+ /**
1875
+ * Defines an agent
1876
+ *
1877
+ * ```ts
1878
+ * const researcherAgent = context.agents.agent({
1879
+ * model,
1880
+ * name: 'academic',
1881
+ * maxSteps: 2,
1882
+ * tools: {
1883
+ * wikiTool: new WikipediaQueryRun({
1884
+ * topKResults: 1,
1885
+ * maxDocContentLength: 500,
1886
+ * })
1887
+ * },
1888
+ * background:
1889
+ * 'You are researcher agent with access to Wikipedia. ' +
1890
+ * 'Utilize Wikipedia as much as possible for correct information',
1891
+ * });
1892
+ * ```
1893
+ *
1894
+ * @param params agent parameters
1895
+ * @returns
1896
+ */
1829
1897
  agent(params) {
1830
1898
  const wrappedTools = wrapTools({ context: this.context, tools: params.tools });
1831
- return new Agent({
1832
- ...params,
1833
- tools: wrappedTools
1834
- });
1899
+ return new Agent(
1900
+ {
1901
+ ...params,
1902
+ tools: wrappedTools
1903
+ },
1904
+ this.context
1905
+ );
1835
1906
  }
1836
1907
  task(taskParameters) {
1837
1908
  return new Task({ context: this.context, taskParameters });
1838
1909
  }
1910
+ /**
1911
+ * creates an openai model for agents
1912
+ */
1839
1913
  openai(...params) {
1840
1914
  const openai2 = createWorkflowOpenAI(this.context);
1841
1915
  return openai2(...params);
@@ -2312,6 +2386,7 @@ function decodeBase64(base64) {
2312
2386
  var import_qstash8 = require("@upstash/qstash");
2313
2387
  var DisabledWorkflowContext = class _DisabledWorkflowContext extends WorkflowContext {
2314
2388
  static disabledMessage = "disabled-qstash-worklfow-run";
2389
+ disabled = true;
2315
2390
  /**
2316
2391
  * overwrite the WorkflowContext.addStep method to always raise WorkflowAbort
2317
2392
  * error in order to stop the execution whenever we encounter a step.
@@ -2436,7 +2511,6 @@ var checkIfLastOneIsDuplicate = async (steps, debug) => {
2436
2511
  if (step.stepId === lastStepId && step.targetStep === lastTargetStepId) {
2437
2512
  const message = `Upstash Workflow: The step '${step.stepName}' with id '${step.stepId}' has run twice during workflow execution. Rest of the workflow will continue running as usual.`;
2438
2513
  await debug?.log("WARN", "RESPONSE_DEFAULT", message);
2439
- console.log(steps);
2440
2514
  console.warn(message);
2441
2515
  return true;
2442
2516
  }
package/cloudflare.mjs CHANGED
@@ -1,8 +1,7 @@
1
1
  import {
2
2
  SDK_TELEMETRY,
3
3
  serveBase
4
- } from "./chunk-RFX5YRRT.mjs";
5
- import "./chunk-PU5J4TNC.mjs";
4
+ } from "./chunk-42MM2EPQ.mjs";
6
5
 
7
6
  // platforms/cloudflare.ts
8
7
  var getArgs = (args) => {
package/express.d.mts CHANGED
@@ -1,9 +1,8 @@
1
- import { R as RouteFunction, W as WorkflowServeOptions } from './types-BEyIoCRe.mjs';
1
+ import { R as RouteFunction, W as WorkflowServeOptions } from './types-CalpUeFX.mjs';
2
2
  import { Router } from 'express';
3
3
  import '@upstash/qstash';
4
4
  import 'ai';
5
5
  import '@ai-sdk/openai';
6
- import 'langchain/tools';
7
6
 
8
7
  declare function serve<TInitialPayload = unknown>(routeFunction: RouteFunction<TInitialPayload>, options?: Omit<WorkflowServeOptions<globalThis.Response, TInitialPayload>, "onStepFinish">): Router;
9
8
 
package/express.d.ts CHANGED
@@ -1,9 +1,8 @@
1
- import { R as RouteFunction, W as WorkflowServeOptions } from './types-BEyIoCRe.js';
1
+ import { R as RouteFunction, W as WorkflowServeOptions } from './types-CalpUeFX.js';
2
2
  import { Router } from 'express';
3
3
  import '@upstash/qstash';
4
4
  import 'ai';
5
5
  import '@ai-sdk/openai';
6
- import 'langchain/tools';
7
6
 
8
7
  declare function serve<TInitialPayload = unknown>(routeFunction: RouteFunction<TInitialPayload>, options?: Omit<WorkflowServeOptions<globalThis.Response, TInitialPayload>, "onStepFinish">): Router;
9
8
 
package/express.js CHANGED
@@ -24498,29 +24498,16 @@ var triggerWorkflowDelete = async (workflowContext, debug, cancel = false) => {
24498
24498
  await debug?.log("SUBMIT", "SUBMIT_CLEANUP", {
24499
24499
  deletedWorkflowRunId: workflowContext.workflowRunId
24500
24500
  });
24501
- try {
24502
- await workflowContext.qstashClient.http.request({
24503
- path: ["v2", "workflows", "runs", `${workflowContext.workflowRunId}?cancel=${cancel}`],
24504
- method: "DELETE",
24505
- parseResponseAsJson: false
24506
- });
24507
- await debug?.log(
24508
- "SUBMIT",
24509
- "SUBMIT_CLEANUP",
24510
- `workflow run ${workflowContext.workflowRunId} deleted.`
24511
- );
24512
- return { deleted: true };
24513
- } catch (error) {
24514
- if (error instanceof import_qstash3.QstashError && error.status === 404) {
24515
- await debug?.log("WARN", "SUBMIT_CLEANUP", {
24516
- message: `Failed to remove workflow run ${workflowContext.workflowRunId} as it doesn't exist.`,
24517
- name: error.name,
24518
- errorMessage: error.message
24519
- });
24520
- return { deleted: false };
24521
- }
24522
- throw error;
24523
- }
24501
+ await workflowContext.qstashClient.http.request({
24502
+ path: ["v2", "workflows", "runs", `${workflowContext.workflowRunId}?cancel=${cancel}`],
24503
+ method: "DELETE",
24504
+ parseResponseAsJson: false
24505
+ });
24506
+ await debug?.log(
24507
+ "SUBMIT",
24508
+ "SUBMIT_CLEANUP",
24509
+ `workflow run ${workflowContext.workflowRunId} deleted.`
24510
+ );
24524
24511
  };
24525
24512
  var recreateUserHeaders = (headers) => {
24526
24513
  const filteredHeaders = new Headers();
@@ -25307,7 +25294,22 @@ var WorkflowApi = class extends BaseWorkflowApi {
25307
25294
  // src/agents/adapters.ts
25308
25295
  var import_openai2 = require("@ai-sdk/openai");
25309
25296
  var import_ai = require("ai");
25297
+
25298
+ // src/agents/constants.ts
25310
25299
  var AGENT_NAME_HEADER = "upstash-agent-name";
25300
+ var MANAGER_AGENT_PROMPT = `You are an agent orchestrating other AI Agents.
25301
+
25302
+ These other agents have tools available to them.
25303
+
25304
+ Given a prompt, utilize these agents to address requests.
25305
+
25306
+ Don't always call all the agents provided to you at the same time. You can call one and use it's response to call another.
25307
+
25308
+ Avoid calling the same agent twice in one turn. Instead, prefer to call it once but provide everything
25309
+ you need from that agent.
25310
+ `;
25311
+
25312
+ // src/agents/adapters.ts
25311
25313
  var createWorkflowOpenAI = (context) => {
25312
25314
  return (0, import_openai2.createOpenAI)({
25313
25315
  compatibility: "strict",
@@ -25374,30 +25376,49 @@ var convertLangchainTool = (langchainTool) => {
25374
25376
  return (0, import_ai.tool)({
25375
25377
  description: langchainTool.description,
25376
25378
  parameters: langchainTool.schema,
25377
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
25378
- execute: async (param) => langchainTool.invoke(param)
25379
+ execute: async (...param) => langchainTool.invoke(...param)
25379
25380
  });
25380
25381
  };
25381
25382
 
25382
25383
  // src/agents/agent.ts
25383
25384
  var import_zod = require("zod");
25384
25385
  var import_ai2 = require("ai");
25386
+
25387
+ // src/serve/utils.ts
25388
+ var isDisabledWorkflowContext = (context) => {
25389
+ return "disabled" in context;
25390
+ };
25391
+
25392
+ // src/agents/agent.ts
25385
25393
  var Agent = class {
25386
25394
  name;
25387
25395
  tools;
25388
25396
  maxSteps;
25389
25397
  background;
25390
25398
  model;
25391
- constructor({ tools, maxSteps, background, name, model }) {
25399
+ temparature;
25400
+ context;
25401
+ constructor({ tools, maxSteps, background, name, model, temparature = 0.1 }, context) {
25392
25402
  this.name = name;
25393
25403
  this.tools = tools ?? {};
25394
25404
  this.maxSteps = maxSteps;
25395
25405
  this.background = background;
25396
25406
  this.model = model;
25407
+ this.temparature = temparature;
25408
+ this.context = context;
25397
25409
  }
25410
+ /**
25411
+ * Trigger the agent by passing a prompt
25412
+ *
25413
+ * @param prompt task to assign to the agent
25414
+ * @returns Response as `{ text: string }`
25415
+ */
25398
25416
  async call({ prompt }) {
25399
25417
  try {
25400
- return await (0, import_ai2.generateText)({
25418
+ if (isDisabledWorkflowContext(this.context)) {
25419
+ await this.context.sleep("abort", 0);
25420
+ }
25421
+ const result = await (0, import_ai2.generateText)({
25401
25422
  model: this.model,
25402
25423
  tools: this.tools,
25403
25424
  maxSteps: this.maxSteps,
@@ -25405,8 +25426,10 @@ var Agent = class {
25405
25426
  prompt,
25406
25427
  headers: {
25407
25428
  [AGENT_NAME_HEADER]: this.name
25408
- }
25429
+ },
25430
+ temperature: this.temparature
25409
25431
  });
25432
+ return { text: result.text };
25410
25433
  } catch (error) {
25411
25434
  if (error instanceof import_ai2.ToolExecutionError) {
25412
25435
  if (error.cause instanceof Error && error.cause.name === "WorkflowAbort") {
@@ -25421,6 +25444,11 @@ var Agent = class {
25421
25444
  }
25422
25445
  }
25423
25446
  }
25447
+ /**
25448
+ * Convert the agent to a tool which can be used by other agents.
25449
+ *
25450
+ * @returns the agent as a tool
25451
+ */
25424
25452
  asTool() {
25425
25453
  const toolDescriptions = Object.values(this.tools).map((tool3) => tool3.description).join("\n");
25426
25454
  return (0, import_ai2.tool)({
@@ -25432,27 +25460,37 @@ var Agent = class {
25432
25460
  });
25433
25461
  }
25434
25462
  };
25435
- var MANAGER_AGENT_PROMPT = `You are an AI agent who orchestrates other AI Agents.
25436
- These other agents have tools available to them.
25437
- Given a prompt, utilize these agents to address requests.
25438
- Don't always call all the agents provided to you at the same time. You can call one and use it's response to call another.
25439
- `;
25440
25463
  var ManagerAgent = class extends Agent {
25441
25464
  agents;
25465
+ /**
25466
+ * A manager agent which coordinates agents available to it to achieve a
25467
+ * given task
25468
+ *
25469
+ * @param name Name of the agent
25470
+ * @param background Background of the agent. If not passed, default will be used.
25471
+ * @param model LLM model to use
25472
+ * @param agents: List of agents available to the agent
25473
+ * @param maxSteps number of times the manager agent can call the LLM at most.
25474
+ * If the agent abruptly stops execution after calling other agents, you may
25475
+ * need to increase maxSteps
25476
+ */
25442
25477
  constructor({
25443
- maxSteps,
25444
- background = MANAGER_AGENT_PROMPT,
25445
25478
  agents,
25479
+ background = MANAGER_AGENT_PROMPT,
25446
25480
  model,
25481
+ maxSteps,
25447
25482
  name = "manager llm"
25448
- }) {
25449
- super({
25450
- background,
25451
- maxSteps,
25452
- tools: Object.fromEntries(agents.map((agent) => [agent.name, agent.asTool()])),
25453
- name,
25454
- model
25455
- });
25483
+ }, context) {
25484
+ super(
25485
+ {
25486
+ background,
25487
+ maxSteps,
25488
+ tools: Object.fromEntries(agents.map((agent) => [agent.name, agent.asTool()])),
25489
+ name,
25490
+ model
25491
+ },
25492
+ context
25493
+ );
25456
25494
  this.agents = agents;
25457
25495
  }
25458
25496
  };
@@ -25468,25 +25506,32 @@ var Task = class {
25468
25506
  this.context = context;
25469
25507
  this.taskParameters = taskParameters;
25470
25508
  }
25509
+ /**
25510
+ * Run the agents to complete the task
25511
+ *
25512
+ * @returns Result of the task as { text: string }
25513
+ */
25471
25514
  async run() {
25472
25515
  const { prompt, ...otherParams } = this.taskParameters;
25473
- const safePrompt = await this.context.run("Get Prompt", () => prompt);
25474
25516
  if ("agent" in otherParams) {
25475
25517
  const agent = otherParams.agent;
25476
25518
  const result = await agent.call({
25477
- prompt: safePrompt
25519
+ prompt
25478
25520
  });
25479
25521
  return { text: result.text };
25480
25522
  } else {
25481
25523
  const { agents, maxSteps, model, background } = otherParams;
25482
- const managerAgent = new ManagerAgent({
25483
- model,
25484
- maxSteps,
25485
- agents,
25486
- name: "Manager LLM",
25487
- background
25488
- });
25489
- const result = await managerAgent.call({ prompt: safePrompt });
25524
+ const managerAgent = new ManagerAgent(
25525
+ {
25526
+ model,
25527
+ maxSteps,
25528
+ agents,
25529
+ name: "Manager LLM",
25530
+ background
25531
+ },
25532
+ this.context
25533
+ );
25534
+ const result = await managerAgent.call({ prompt });
25490
25535
  return { text: result.text };
25491
25536
  }
25492
25537
  }
@@ -25498,16 +25543,45 @@ var WorkflowAgents = class {
25498
25543
  constructor({ context }) {
25499
25544
  this.context = context;
25500
25545
  }
25546
+ /**
25547
+ * Defines an agent
25548
+ *
25549
+ * ```ts
25550
+ * const researcherAgent = context.agents.agent({
25551
+ * model,
25552
+ * name: 'academic',
25553
+ * maxSteps: 2,
25554
+ * tools: {
25555
+ * wikiTool: new WikipediaQueryRun({
25556
+ * topKResults: 1,
25557
+ * maxDocContentLength: 500,
25558
+ * })
25559
+ * },
25560
+ * background:
25561
+ * 'You are researcher agent with access to Wikipedia. ' +
25562
+ * 'Utilize Wikipedia as much as possible for correct information',
25563
+ * });
25564
+ * ```
25565
+ *
25566
+ * @param params agent parameters
25567
+ * @returns
25568
+ */
25501
25569
  agent(params) {
25502
25570
  const wrappedTools = wrapTools({ context: this.context, tools: params.tools });
25503
- return new Agent({
25504
- ...params,
25505
- tools: wrappedTools
25506
- });
25571
+ return new Agent(
25572
+ {
25573
+ ...params,
25574
+ tools: wrappedTools
25575
+ },
25576
+ this.context
25577
+ );
25507
25578
  }
25508
25579
  task(taskParameters) {
25509
25580
  return new Task({ context: this.context, taskParameters });
25510
25581
  }
25582
+ /**
25583
+ * creates an openai model for agents
25584
+ */
25511
25585
  openai(...params) {
25512
25586
  const openai2 = createWorkflowOpenAI(this.context);
25513
25587
  return openai2(...params);
@@ -25984,6 +26058,7 @@ function decodeBase64(base64) {
25984
26058
  var import_qstash8 = require("@upstash/qstash");
25985
26059
  var DisabledWorkflowContext = class _DisabledWorkflowContext extends WorkflowContext {
25986
26060
  static disabledMessage = "disabled-qstash-worklfow-run";
26061
+ disabled = true;
25987
26062
  /**
25988
26063
  * overwrite the WorkflowContext.addStep method to always raise WorkflowAbort
25989
26064
  * error in order to stop the execution whenever we encounter a step.
@@ -26108,7 +26183,6 @@ var checkIfLastOneIsDuplicate = async (steps, debug) => {
26108
26183
  if (step.stepId === lastStepId && step.targetStep === lastTargetStepId) {
26109
26184
  const message = `Upstash Workflow: The step '${step.stepName}' with id '${step.stepId}' has run twice during workflow execution. Rest of the workflow will continue running as usual.`;
26110
26185
  await debug?.log("WARN", "RESPONSE_DEFAULT", message);
26111
- console.log(steps);
26112
26186
  console.warn(message);
26113
26187
  return true;
26114
26188
  }
package/express.mjs CHANGED
@@ -1,12 +1,10 @@
1
1
  import {
2
2
  SDK_TELEMETRY,
3
- serveBase
4
- } from "./chunk-RFX5YRRT.mjs";
5
- import {
6
3
  __commonJS,
7
4
  __require,
8
- __toESM
9
- } from "./chunk-PU5J4TNC.mjs";
5
+ __toESM,
6
+ serveBase
7
+ } from "./chunk-42MM2EPQ.mjs";
10
8
 
11
9
  // node_modules/depd/index.js
12
10
  var require_depd = __commonJS({
package/h3.d.mts CHANGED
@@ -1,9 +1,8 @@
1
1
  import * as h3 from 'h3';
2
- import { R as RouteFunction, j as PublicServeOptions } from './types-BEyIoCRe.mjs';
2
+ import { R as RouteFunction, j as PublicServeOptions } from './types-CalpUeFX.mjs';
3
3
  import '@upstash/qstash';
4
4
  import 'ai';
5
5
  import '@ai-sdk/openai';
6
- import 'langchain/tools';
7
6
 
8
7
  declare const serve: <TInitialPayload = unknown>(routeFunction: RouteFunction<TInitialPayload>, options?: PublicServeOptions<TInitialPayload>) => {
9
8
  handler: h3.EventHandler<h3.EventHandlerRequest, Promise<Response | {
package/h3.d.ts CHANGED
@@ -1,9 +1,8 @@
1
1
  import * as h3 from 'h3';
2
- import { R as RouteFunction, j as PublicServeOptions } from './types-BEyIoCRe.js';
2
+ import { R as RouteFunction, j as PublicServeOptions } from './types-CalpUeFX.js';
3
3
  import '@upstash/qstash';
4
4
  import 'ai';
5
5
  import '@ai-sdk/openai';
6
- import 'langchain/tools';
7
6
 
8
7
  declare const serve: <TInitialPayload = unknown>(routeFunction: RouteFunction<TInitialPayload>, options?: PublicServeOptions<TInitialPayload>) => {
9
8
  handler: h3.EventHandler<h3.EventHandlerRequest, Promise<Response | {