@upstash/workflow 0.2.5-agents → 0.2.5-agents-3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/h3.js CHANGED
@@ -1138,29 +1138,16 @@ var triggerWorkflowDelete = async (workflowContext, debug, cancel = false) => {
1138
1138
  await debug?.log("SUBMIT", "SUBMIT_CLEANUP", {
1139
1139
  deletedWorkflowRunId: workflowContext.workflowRunId
1140
1140
  });
1141
- try {
1142
- await workflowContext.qstashClient.http.request({
1143
- path: ["v2", "workflows", "runs", `${workflowContext.workflowRunId}?cancel=${cancel}`],
1144
- method: "DELETE",
1145
- parseResponseAsJson: false
1146
- });
1147
- await debug?.log(
1148
- "SUBMIT",
1149
- "SUBMIT_CLEANUP",
1150
- `workflow run ${workflowContext.workflowRunId} deleted.`
1151
- );
1152
- return { deleted: true };
1153
- } catch (error) {
1154
- if (error instanceof import_qstash3.QstashError && error.status === 404) {
1155
- await debug?.log("WARN", "SUBMIT_CLEANUP", {
1156
- message: `Failed to remove workflow run ${workflowContext.workflowRunId} as it doesn't exist.`,
1157
- name: error.name,
1158
- errorMessage: error.message
1159
- });
1160
- return { deleted: false };
1161
- }
1162
- throw error;
1163
- }
1141
+ await workflowContext.qstashClient.http.request({
1142
+ path: ["v2", "workflows", "runs", `${workflowContext.workflowRunId}?cancel=${cancel}`],
1143
+ method: "DELETE",
1144
+ parseResponseAsJson: false
1145
+ });
1146
+ await debug?.log(
1147
+ "SUBMIT",
1148
+ "SUBMIT_CLEANUP",
1149
+ `workflow run ${workflowContext.workflowRunId} deleted.`
1150
+ );
1164
1151
  };
1165
1152
  var recreateUserHeaders = (headers) => {
1166
1153
  const filteredHeaders = new Headers();
@@ -1947,7 +1934,22 @@ var WorkflowApi = class extends BaseWorkflowApi {
1947
1934
  // src/agents/adapters.ts
1948
1935
  var import_openai2 = require("@ai-sdk/openai");
1949
1936
  var import_ai = require("ai");
1937
+
1938
+ // src/agents/constants.ts
1950
1939
  var AGENT_NAME_HEADER = "upstash-agent-name";
1940
+ var MANAGER_AGENT_PROMPT = `You are an agent orchestrating other AI Agents.
1941
+
1942
+ These other agents have tools available to them.
1943
+
1944
+ Given a prompt, utilize these agents to address requests.
1945
+
1946
+ Don't always call all the agents provided to you at the same time. You can call one and use it's response to call another.
1947
+
1948
+ Avoid calling the same agent twice in one turn. Instead, prefer to call it once but provide everything
1949
+ you need from that agent.
1950
+ `;
1951
+
1952
+ // src/agents/adapters.ts
1951
1953
  var createWorkflowOpenAI = (context) => {
1952
1954
  return (0, import_openai2.createOpenAI)({
1953
1955
  compatibility: "strict",
@@ -2014,30 +2016,49 @@ var convertLangchainTool = (langchainTool) => {
2014
2016
  return (0, import_ai.tool)({
2015
2017
  description: langchainTool.description,
2016
2018
  parameters: langchainTool.schema,
2017
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
2018
- execute: async (param) => langchainTool.invoke(param)
2019
+ execute: async (...param) => langchainTool.invoke(...param)
2019
2020
  });
2020
2021
  };
2021
2022
 
2022
2023
  // src/agents/agent.ts
2023
2024
  var import_zod = require("zod");
2024
2025
  var import_ai2 = require("ai");
2026
+
2027
+ // src/serve/utils.ts
2028
+ var isDisabledWorkflowContext = (context) => {
2029
+ return "disabled" in context;
2030
+ };
2031
+
2032
+ // src/agents/agent.ts
2025
2033
  var Agent = class {
2026
2034
  name;
2027
2035
  tools;
2028
2036
  maxSteps;
2029
2037
  background;
2030
2038
  model;
2031
- constructor({ tools, maxSteps, background, name, model }) {
2039
+ temparature;
2040
+ context;
2041
+ constructor({ tools, maxSteps, background, name, model, temparature = 0.1 }, context) {
2032
2042
  this.name = name;
2033
2043
  this.tools = tools ?? {};
2034
2044
  this.maxSteps = maxSteps;
2035
2045
  this.background = background;
2036
2046
  this.model = model;
2047
+ this.temparature = temparature;
2048
+ this.context = context;
2037
2049
  }
2050
+ /**
2051
+ * Trigger the agent by passing a prompt
2052
+ *
2053
+ * @param prompt task to assign to the agent
2054
+ * @returns Response as `{ text: string }`
2055
+ */
2038
2056
  async call({ prompt }) {
2039
2057
  try {
2040
- return await (0, import_ai2.generateText)({
2058
+ if (isDisabledWorkflowContext(this.context)) {
2059
+ await this.context.sleep("abort", 0);
2060
+ }
2061
+ const result = await (0, import_ai2.generateText)({
2041
2062
  model: this.model,
2042
2063
  tools: this.tools,
2043
2064
  maxSteps: this.maxSteps,
@@ -2045,8 +2066,10 @@ var Agent = class {
2045
2066
  prompt,
2046
2067
  headers: {
2047
2068
  [AGENT_NAME_HEADER]: this.name
2048
- }
2069
+ },
2070
+ temperature: this.temparature
2049
2071
  });
2072
+ return { text: result.text };
2050
2073
  } catch (error) {
2051
2074
  if (error instanceof import_ai2.ToolExecutionError) {
2052
2075
  if (error.cause instanceof Error && error.cause.name === "WorkflowAbort") {
@@ -2061,6 +2084,11 @@ var Agent = class {
2061
2084
  }
2062
2085
  }
2063
2086
  }
2087
+ /**
2088
+ * Convert the agent to a tool which can be used by other agents.
2089
+ *
2090
+ * @returns the agent as a tool
2091
+ */
2064
2092
  asTool() {
2065
2093
  const toolDescriptions = Object.values(this.tools).map((tool3) => tool3.description).join("\n");
2066
2094
  return (0, import_ai2.tool)({
@@ -2072,27 +2100,37 @@ var Agent = class {
2072
2100
  });
2073
2101
  }
2074
2102
  };
2075
- var MANAGER_AGENT_PROMPT = `You are an AI agent who orchestrates other AI Agents.
2076
- These other agents have tools available to them.
2077
- Given a prompt, utilize these agents to address requests.
2078
- Don't always call all the agents provided to you at the same time. You can call one and use it's response to call another.
2079
- `;
2080
2103
  var ManagerAgent = class extends Agent {
2081
2104
  agents;
2105
+ /**
2106
+ * A manager agent which coordinates agents available to it to achieve a
2107
+ * given task
2108
+ *
2109
+ * @param name Name of the agent
2110
+ * @param background Background of the agent. If not passed, default will be used.
2111
+ * @param model LLM model to use
2112
+ * @param agents: List of agents available to the agent
2113
+ * @param maxSteps number of times the manager agent can call the LLM at most.
2114
+ * If the agent abruptly stops execution after calling other agents, you may
2115
+ * need to increase maxSteps
2116
+ */
2082
2117
  constructor({
2083
- maxSteps,
2084
- background = MANAGER_AGENT_PROMPT,
2085
2118
  agents,
2119
+ background = MANAGER_AGENT_PROMPT,
2086
2120
  model,
2121
+ maxSteps,
2087
2122
  name = "manager llm"
2088
- }) {
2089
- super({
2090
- background,
2091
- maxSteps,
2092
- tools: Object.fromEntries(agents.map((agent) => [agent.name, agent.asTool()])),
2093
- name,
2094
- model
2095
- });
2123
+ }, context) {
2124
+ super(
2125
+ {
2126
+ background,
2127
+ maxSteps,
2128
+ tools: Object.fromEntries(agents.map((agent) => [agent.name, agent.asTool()])),
2129
+ name,
2130
+ model
2131
+ },
2132
+ context
2133
+ );
2096
2134
  this.agents = agents;
2097
2135
  }
2098
2136
  };
@@ -2108,25 +2146,32 @@ var Task = class {
2108
2146
  this.context = context;
2109
2147
  this.taskParameters = taskParameters;
2110
2148
  }
2149
+ /**
2150
+ * Run the agents to complete the task
2151
+ *
2152
+ * @returns Result of the task as { text: string }
2153
+ */
2111
2154
  async run() {
2112
2155
  const { prompt, ...otherParams } = this.taskParameters;
2113
- const safePrompt = await this.context.run("Get Prompt", () => prompt);
2114
2156
  if ("agent" in otherParams) {
2115
2157
  const agent = otherParams.agent;
2116
2158
  const result = await agent.call({
2117
- prompt: safePrompt
2159
+ prompt
2118
2160
  });
2119
2161
  return { text: result.text };
2120
2162
  } else {
2121
2163
  const { agents, maxSteps, model, background } = otherParams;
2122
- const managerAgent = new ManagerAgent({
2123
- model,
2124
- maxSteps,
2125
- agents,
2126
- name: "Manager LLM",
2127
- background
2128
- });
2129
- const result = await managerAgent.call({ prompt: safePrompt });
2164
+ const managerAgent = new ManagerAgent(
2165
+ {
2166
+ model,
2167
+ maxSteps,
2168
+ agents,
2169
+ name: "Manager LLM",
2170
+ background
2171
+ },
2172
+ this.context
2173
+ );
2174
+ const result = await managerAgent.call({ prompt });
2130
2175
  return { text: result.text };
2131
2176
  }
2132
2177
  }
@@ -2138,16 +2183,45 @@ var WorkflowAgents = class {
2138
2183
  constructor({ context }) {
2139
2184
  this.context = context;
2140
2185
  }
2186
+ /**
2187
+ * Defines an agent
2188
+ *
2189
+ * ```ts
2190
+ * const researcherAgent = context.agents.agent({
2191
+ * model,
2192
+ * name: 'academic',
2193
+ * maxSteps: 2,
2194
+ * tools: {
2195
+ * wikiTool: new WikipediaQueryRun({
2196
+ * topKResults: 1,
2197
+ * maxDocContentLength: 500,
2198
+ * })
2199
+ * },
2200
+ * background:
2201
+ * 'You are researcher agent with access to Wikipedia. ' +
2202
+ * 'Utilize Wikipedia as much as possible for correct information',
2203
+ * });
2204
+ * ```
2205
+ *
2206
+ * @param params agent parameters
2207
+ * @returns
2208
+ */
2141
2209
  agent(params) {
2142
2210
  const wrappedTools = wrapTools({ context: this.context, tools: params.tools });
2143
- return new Agent({
2144
- ...params,
2145
- tools: wrappedTools
2146
- });
2211
+ return new Agent(
2212
+ {
2213
+ ...params,
2214
+ tools: wrappedTools
2215
+ },
2216
+ this.context
2217
+ );
2147
2218
  }
2148
2219
  task(taskParameters) {
2149
2220
  return new Task({ context: this.context, taskParameters });
2150
2221
  }
2222
+ /**
2223
+ * creates an openai model for agents
2224
+ */
2151
2225
  openai(...params) {
2152
2226
  const openai2 = createWorkflowOpenAI(this.context);
2153
2227
  return openai2(...params);
@@ -2624,6 +2698,7 @@ function decodeBase64(base64) {
2624
2698
  var import_qstash8 = require("@upstash/qstash");
2625
2699
  var DisabledWorkflowContext = class _DisabledWorkflowContext extends WorkflowContext {
2626
2700
  static disabledMessage = "disabled-qstash-worklfow-run";
2701
+ disabled = true;
2627
2702
  /**
2628
2703
  * overwrite the WorkflowContext.addStep method to always raise WorkflowAbort
2629
2704
  * error in order to stop the execution whenever we encounter a step.
@@ -2748,7 +2823,6 @@ var checkIfLastOneIsDuplicate = async (steps, debug) => {
2748
2823
  if (step.stepId === lastStepId && step.targetStep === lastTargetStepId) {
2749
2824
  const message = `Upstash Workflow: The step '${step.stepName}' with id '${step.stepId}' has run twice during workflow execution. Rest of the workflow will continue running as usual.`;
2750
2825
  await debug?.log("WARN", "RESPONSE_DEFAULT", message);
2751
- console.log(steps);
2752
2826
  console.warn(message);
2753
2827
  return true;
2754
2828
  }
package/h3.mjs CHANGED
@@ -1,8 +1,7 @@
1
1
  import {
2
2
  SDK_TELEMETRY,
3
3
  serveBase
4
- } from "./chunk-RFX5YRRT.mjs";
5
- import "./chunk-PU5J4TNC.mjs";
4
+ } from "./chunk-42MM2EPQ.mjs";
6
5
 
7
6
  // node_modules/defu/dist/defu.mjs
8
7
  function isPlainObject(value) {
package/hono.d.mts CHANGED
@@ -1,10 +1,9 @@
1
1
  import { Context } from 'hono';
2
- import { R as RouteFunction, j as PublicServeOptions } from './types-BEyIoCRe.mjs';
2
+ import { R as RouteFunction, j as PublicServeOptions } from './types-CalpUeFX.mjs';
3
3
  import { Variables } from 'hono/types';
4
4
  import '@upstash/qstash';
5
5
  import 'ai';
6
6
  import '@ai-sdk/openai';
7
- import 'langchain/tools';
8
7
 
9
8
  type WorkflowBindings = {
10
9
  QSTASH_TOKEN: string;
package/hono.d.ts CHANGED
@@ -1,10 +1,9 @@
1
1
  import { Context } from 'hono';
2
- import { R as RouteFunction, j as PublicServeOptions } from './types-BEyIoCRe.js';
2
+ import { R as RouteFunction, j as PublicServeOptions } from './types-CalpUeFX.js';
3
3
  import { Variables } from 'hono/types';
4
4
  import '@upstash/qstash';
5
5
  import 'ai';
6
6
  import '@ai-sdk/openai';
7
- import 'langchain/tools';
8
7
 
9
8
  type WorkflowBindings = {
10
9
  QSTASH_TOKEN: string;
package/hono.js CHANGED
@@ -826,29 +826,16 @@ var triggerWorkflowDelete = async (workflowContext, debug, cancel = false) => {
826
826
  await debug?.log("SUBMIT", "SUBMIT_CLEANUP", {
827
827
  deletedWorkflowRunId: workflowContext.workflowRunId
828
828
  });
829
- try {
830
- await workflowContext.qstashClient.http.request({
831
- path: ["v2", "workflows", "runs", `${workflowContext.workflowRunId}?cancel=${cancel}`],
832
- method: "DELETE",
833
- parseResponseAsJson: false
834
- });
835
- await debug?.log(
836
- "SUBMIT",
837
- "SUBMIT_CLEANUP",
838
- `workflow run ${workflowContext.workflowRunId} deleted.`
839
- );
840
- return { deleted: true };
841
- } catch (error) {
842
- if (error instanceof import_qstash3.QstashError && error.status === 404) {
843
- await debug?.log("WARN", "SUBMIT_CLEANUP", {
844
- message: `Failed to remove workflow run ${workflowContext.workflowRunId} as it doesn't exist.`,
845
- name: error.name,
846
- errorMessage: error.message
847
- });
848
- return { deleted: false };
849
- }
850
- throw error;
851
- }
829
+ await workflowContext.qstashClient.http.request({
830
+ path: ["v2", "workflows", "runs", `${workflowContext.workflowRunId}?cancel=${cancel}`],
831
+ method: "DELETE",
832
+ parseResponseAsJson: false
833
+ });
834
+ await debug?.log(
835
+ "SUBMIT",
836
+ "SUBMIT_CLEANUP",
837
+ `workflow run ${workflowContext.workflowRunId} deleted.`
838
+ );
852
839
  };
853
840
  var recreateUserHeaders = (headers) => {
854
841
  const filteredHeaders = new Headers();
@@ -1635,7 +1622,22 @@ var WorkflowApi = class extends BaseWorkflowApi {
1635
1622
  // src/agents/adapters.ts
1636
1623
  var import_openai2 = require("@ai-sdk/openai");
1637
1624
  var import_ai = require("ai");
1625
+
1626
+ // src/agents/constants.ts
1638
1627
  var AGENT_NAME_HEADER = "upstash-agent-name";
1628
+ var MANAGER_AGENT_PROMPT = `You are an agent orchestrating other AI Agents.
1629
+
1630
+ These other agents have tools available to them.
1631
+
1632
+ Given a prompt, utilize these agents to address requests.
1633
+
1634
+ Don't always call all the agents provided to you at the same time. You can call one and use it's response to call another.
1635
+
1636
+ Avoid calling the same agent twice in one turn. Instead, prefer to call it once but provide everything
1637
+ you need from that agent.
1638
+ `;
1639
+
1640
+ // src/agents/adapters.ts
1639
1641
  var createWorkflowOpenAI = (context) => {
1640
1642
  return (0, import_openai2.createOpenAI)({
1641
1643
  compatibility: "strict",
@@ -1702,30 +1704,49 @@ var convertLangchainTool = (langchainTool) => {
1702
1704
  return (0, import_ai.tool)({
1703
1705
  description: langchainTool.description,
1704
1706
  parameters: langchainTool.schema,
1705
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
1706
- execute: async (param) => langchainTool.invoke(param)
1707
+ execute: async (...param) => langchainTool.invoke(...param)
1707
1708
  });
1708
1709
  };
1709
1710
 
1710
1711
  // src/agents/agent.ts
1711
1712
  var import_zod = require("zod");
1712
1713
  var import_ai2 = require("ai");
1714
+
1715
+ // src/serve/utils.ts
1716
+ var isDisabledWorkflowContext = (context) => {
1717
+ return "disabled" in context;
1718
+ };
1719
+
1720
+ // src/agents/agent.ts
1713
1721
  var Agent = class {
1714
1722
  name;
1715
1723
  tools;
1716
1724
  maxSteps;
1717
1725
  background;
1718
1726
  model;
1719
- constructor({ tools, maxSteps, background, name, model }) {
1727
+ temparature;
1728
+ context;
1729
+ constructor({ tools, maxSteps, background, name, model, temparature = 0.1 }, context) {
1720
1730
  this.name = name;
1721
1731
  this.tools = tools ?? {};
1722
1732
  this.maxSteps = maxSteps;
1723
1733
  this.background = background;
1724
1734
  this.model = model;
1735
+ this.temparature = temparature;
1736
+ this.context = context;
1725
1737
  }
1738
+ /**
1739
+ * Trigger the agent by passing a prompt
1740
+ *
1741
+ * @param prompt task to assign to the agent
1742
+ * @returns Response as `{ text: string }`
1743
+ */
1726
1744
  async call({ prompt }) {
1727
1745
  try {
1728
- return await (0, import_ai2.generateText)({
1746
+ if (isDisabledWorkflowContext(this.context)) {
1747
+ await this.context.sleep("abort", 0);
1748
+ }
1749
+ const result = await (0, import_ai2.generateText)({
1729
1750
  model: this.model,
1730
1751
  tools: this.tools,
1731
1752
  maxSteps: this.maxSteps,
@@ -1733,8 +1754,10 @@ var Agent = class {
1733
1754
  prompt,
1734
1755
  headers: {
1735
1756
  [AGENT_NAME_HEADER]: this.name
1736
- }
1757
+ },
1758
+ temperature: this.temparature
1737
1759
  });
1760
+ return { text: result.text };
1738
1761
  } catch (error) {
1739
1762
  if (error instanceof import_ai2.ToolExecutionError) {
1740
1763
  if (error.cause instanceof Error && error.cause.name === "WorkflowAbort") {
@@ -1749,6 +1772,11 @@ var Agent = class {
1749
1772
  }
1750
1773
  }
1751
1774
  }
1775
+ /**
1776
+ * Convert the agent to a tool which can be used by other agents.
1777
+ *
1778
+ * @returns the agent as a tool
1779
+ */
1752
1780
  asTool() {
1753
1781
  const toolDescriptions = Object.values(this.tools).map((tool3) => tool3.description).join("\n");
1754
1782
  return (0, import_ai2.tool)({
@@ -1760,27 +1788,37 @@ var Agent = class {
1760
1788
  });
1761
1789
  }
1762
1790
  };
1763
- var MANAGER_AGENT_PROMPT = `You are an AI agent who orchestrates other AI Agents.
1764
- These other agents have tools available to them.
1765
- Given a prompt, utilize these agents to address requests.
1766
- Don't always call all the agents provided to you at the same time. You can call one and use it's response to call another.
1767
- `;
1768
1791
  var ManagerAgent = class extends Agent {
1769
1792
  agents;
1793
+ /**
1794
+ * A manager agent which coordinates agents available to it to achieve a
1795
+ * given task
1796
+ *
1797
+ * @param name Name of the agent
1798
+ * @param background Background of the agent. If not passed, default will be used.
1799
+ * @param model LLM model to use
1800
+ * @param agents: List of agents available to the agent
1801
+ * @param maxSteps number of times the manager agent can call the LLM at most.
1802
+ * If the agent abruptly stops execution after calling other agents, you may
1803
+ * need to increase maxSteps
1804
+ */
1770
1805
  constructor({
1771
- maxSteps,
1772
- background = MANAGER_AGENT_PROMPT,
1773
1806
  agents,
1807
+ background = MANAGER_AGENT_PROMPT,
1774
1808
  model,
1809
+ maxSteps,
1775
1810
  name = "manager llm"
1776
- }) {
1777
- super({
1778
- background,
1779
- maxSteps,
1780
- tools: Object.fromEntries(agents.map((agent) => [agent.name, agent.asTool()])),
1781
- name,
1782
- model
1783
- });
1811
+ }, context) {
1812
+ super(
1813
+ {
1814
+ background,
1815
+ maxSteps,
1816
+ tools: Object.fromEntries(agents.map((agent) => [agent.name, agent.asTool()])),
1817
+ name,
1818
+ model
1819
+ },
1820
+ context
1821
+ );
1784
1822
  this.agents = agents;
1785
1823
  }
1786
1824
  };
@@ -1796,25 +1834,32 @@ var Task = class {
1796
1834
  this.context = context;
1797
1835
  this.taskParameters = taskParameters;
1798
1836
  }
1837
+ /**
1838
+ * Run the agents to complete the task
1839
+ *
1840
+ * @returns Result of the task as { text: string }
1841
+ */
1799
1842
  async run() {
1800
1843
  const { prompt, ...otherParams } = this.taskParameters;
1801
- const safePrompt = await this.context.run("Get Prompt", () => prompt);
1802
1844
  if ("agent" in otherParams) {
1803
1845
  const agent = otherParams.agent;
1804
1846
  const result = await agent.call({
1805
- prompt: safePrompt
1847
+ prompt
1806
1848
  });
1807
1849
  return { text: result.text };
1808
1850
  } else {
1809
1851
  const { agents, maxSteps, model, background } = otherParams;
1810
- const managerAgent = new ManagerAgent({
1811
- model,
1812
- maxSteps,
1813
- agents,
1814
- name: "Manager LLM",
1815
- background
1816
- });
1817
- const result = await managerAgent.call({ prompt: safePrompt });
1852
+ const managerAgent = new ManagerAgent(
1853
+ {
1854
+ model,
1855
+ maxSteps,
1856
+ agents,
1857
+ name: "Manager LLM",
1858
+ background
1859
+ },
1860
+ this.context
1861
+ );
1862
+ const result = await managerAgent.call({ prompt });
1818
1863
  return { text: result.text };
1819
1864
  }
1820
1865
  }
@@ -1826,16 +1871,45 @@ var WorkflowAgents = class {
1826
1871
  constructor({ context }) {
1827
1872
  this.context = context;
1828
1873
  }
1874
+ /**
1875
+ * Defines an agent
1876
+ *
1877
+ * ```ts
1878
+ * const researcherAgent = context.agents.agent({
1879
+ * model,
1880
+ * name: 'academic',
1881
+ * maxSteps: 2,
1882
+ * tools: {
1883
+ * wikiTool: new WikipediaQueryRun({
1884
+ * topKResults: 1,
1885
+ * maxDocContentLength: 500,
1886
+ * })
1887
+ * },
1888
+ * background:
1889
+ * 'You are researcher agent with access to Wikipedia. ' +
1890
+ * 'Utilize Wikipedia as much as possible for correct information',
1891
+ * });
1892
+ * ```
1893
+ *
1894
+ * @param params agent parameters
1895
+ * @returns
1896
+ */
1829
1897
  agent(params) {
1830
1898
  const wrappedTools = wrapTools({ context: this.context, tools: params.tools });
1831
- return new Agent({
1832
- ...params,
1833
- tools: wrappedTools
1834
- });
1899
+ return new Agent(
1900
+ {
1901
+ ...params,
1902
+ tools: wrappedTools
1903
+ },
1904
+ this.context
1905
+ );
1835
1906
  }
1836
1907
  task(taskParameters) {
1837
1908
  return new Task({ context: this.context, taskParameters });
1838
1909
  }
1910
+ /**
1911
+ * creates an openai model for agents
1912
+ */
1839
1913
  openai(...params) {
1840
1914
  const openai2 = createWorkflowOpenAI(this.context);
1841
1915
  return openai2(...params);
@@ -2312,6 +2386,7 @@ function decodeBase64(base64) {
2312
2386
  var import_qstash8 = require("@upstash/qstash");
2313
2387
  var DisabledWorkflowContext = class _DisabledWorkflowContext extends WorkflowContext {
2314
2388
  static disabledMessage = "disabled-qstash-worklfow-run";
2389
+ disabled = true;
2315
2390
  /**
2316
2391
  * overwrite the WorkflowContext.addStep method to always raise WorkflowAbort
2317
2392
  * error in order to stop the execution whenever we encounter a step.
@@ -2436,7 +2511,6 @@ var checkIfLastOneIsDuplicate = async (steps, debug) => {
2436
2511
  if (step.stepId === lastStepId && step.targetStep === lastTargetStepId) {
2437
2512
  const message = `Upstash Workflow: The step '${step.stepName}' with id '${step.stepId}' has run twice during workflow execution. Rest of the workflow will continue running as usual.`;
2438
2513
  await debug?.log("WARN", "RESPONSE_DEFAULT", message);
2439
- console.log(steps);
2440
2514
  console.warn(message);
2441
2515
  return true;
2442
2516
  }
package/hono.mjs CHANGED
@@ -1,8 +1,7 @@
1
1
  import {
2
2
  SDK_TELEMETRY,
3
3
  serveBase
4
- } from "./chunk-RFX5YRRT.mjs";
5
- import "./chunk-PU5J4TNC.mjs";
4
+ } from "./chunk-42MM2EPQ.mjs";
6
5
 
7
6
  // platforms/hono.ts
8
7
  var serve = (routeFunction, options) => {
package/index.d.mts CHANGED
@@ -1,9 +1,8 @@
1
- import { R as RouteFunction, W as WorkflowServeOptions, N as NotifyResponse, a as Waiter, S as Step } from './types-BEyIoCRe.mjs';
2
- export { A as AsyncStepFunction, C as CallResponse, q as CallSettings, D as Duration, k as FailureFunctionPayload, F as FinishCondition, H as HeaderParams, L as LogLevel, o as NotifyStepResponse, P as ParallelCallState, j as PublicServeOptions, g as RawStep, l as RequiredExceptFields, i as StepFunction, f as StepType, e as StepTypes, h as SyncStepFunction, T as Telemetry, p as WaitEventOptions, m as WaitRequest, n as WaitStepResponse, c as WorkflowClient, b as WorkflowContext, s as WorkflowLogger, r as WorkflowLoggerOptions, d as WorkflowReceiver } from './types-BEyIoCRe.mjs';
1
+ import { R as RouteFunction, W as WorkflowServeOptions, N as NotifyResponse, a as Waiter, S as Step } from './types-CalpUeFX.mjs';
2
+ export { A as AsyncStepFunction, C as CallResponse, q as CallSettings, D as Duration, k as FailureFunctionPayload, F as FinishCondition, H as HeaderParams, L as LogLevel, o as NotifyStepResponse, P as ParallelCallState, j as PublicServeOptions, g as RawStep, l as RequiredExceptFields, i as StepFunction, f as StepType, e as StepTypes, h as SyncStepFunction, T as Telemetry, p as WaitEventOptions, m as WaitRequest, n as WaitStepResponse, c as WorkflowClient, b as WorkflowContext, s as WorkflowLogger, r as WorkflowLoggerOptions, d as WorkflowReceiver } from './types-CalpUeFX.mjs';
3
3
  import { Client as Client$1, QstashError } from '@upstash/qstash';
4
4
  import 'ai';
5
5
  import '@ai-sdk/openai';
6
- import 'langchain/tools';
7
6
 
8
7
  /**
9
8
  * Creates an async method that handles incoming requests and runs the provided
package/index.d.ts CHANGED
@@ -1,9 +1,8 @@
1
- import { R as RouteFunction, W as WorkflowServeOptions, N as NotifyResponse, a as Waiter, S as Step } from './types-BEyIoCRe.js';
2
- export { A as AsyncStepFunction, C as CallResponse, q as CallSettings, D as Duration, k as FailureFunctionPayload, F as FinishCondition, H as HeaderParams, L as LogLevel, o as NotifyStepResponse, P as ParallelCallState, j as PublicServeOptions, g as RawStep, l as RequiredExceptFields, i as StepFunction, f as StepType, e as StepTypes, h as SyncStepFunction, T as Telemetry, p as WaitEventOptions, m as WaitRequest, n as WaitStepResponse, c as WorkflowClient, b as WorkflowContext, s as WorkflowLogger, r as WorkflowLoggerOptions, d as WorkflowReceiver } from './types-BEyIoCRe.js';
1
+ import { R as RouteFunction, W as WorkflowServeOptions, N as NotifyResponse, a as Waiter, S as Step } from './types-CalpUeFX.js';
2
+ export { A as AsyncStepFunction, C as CallResponse, q as CallSettings, D as Duration, k as FailureFunctionPayload, F as FinishCondition, H as HeaderParams, L as LogLevel, o as NotifyStepResponse, P as ParallelCallState, j as PublicServeOptions, g as RawStep, l as RequiredExceptFields, i as StepFunction, f as StepType, e as StepTypes, h as SyncStepFunction, T as Telemetry, p as WaitEventOptions, m as WaitRequest, n as WaitStepResponse, c as WorkflowClient, b as WorkflowContext, s as WorkflowLogger, r as WorkflowLoggerOptions, d as WorkflowReceiver } from './types-CalpUeFX.js';
3
3
  import { Client as Client$1, QstashError } from '@upstash/qstash';
4
4
  import 'ai';
5
5
  import '@ai-sdk/openai';
6
- import 'langchain/tools';
7
6
 
8
7
  /**
9
8
  * Creates an async method that handles incoming requests and runs the provided