@aigne/example-workflow-handoff 1.1.0 → 1.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -90,12 +90,12 @@ The following example demonstrates how to build a handoff workflow:
90
90
 
91
91
  ```typescript
92
92
  import assert from "node:assert";
93
- import { AIAgent, ChatModelOpenAI, ExecutionEngine } from "@aigne/core-next";
93
+ import { AIAgent, OpenAIChatModel, ExecutionEngine } from "@aigne/core";
94
94
 
95
95
  const { OPENAI_API_KEY } = process.env;
96
96
  assert(OPENAI_API_KEY, "Please set the OPENAI_API_KEY environment variable");
97
97
 
98
- const model = new ChatModelOpenAI({
98
+ const model = new OpenAIChatModel({
99
99
  apiKey: OPENAI_API_KEY,
100
100
  });
101
101
 
@@ -118,7 +118,7 @@ const agentB = AIAgent.from({
118
118
 
119
119
  const engine = new ExecutionEngine({ model });
120
120
 
121
- const userAgent = await engine.run(agentA);
121
+ const userAgent = engine.call(agentA);
122
122
 
123
123
  const result1 = await userAgent.call("transfer to agent b");
124
124
  console.log(result1);
package/index.ts CHANGED
@@ -4,17 +4,17 @@ import assert from "node:assert";
4
4
  import {
5
5
  AIAgent,
6
6
  type Agent,
7
- ChatModelOpenAI,
8
7
  ExecutionEngine,
9
8
  FunctionAgent,
9
+ OpenAIChatModel,
10
10
  runChatLoopInTerminal,
11
- } from "@aigne/core-next";
11
+ } from "@aigne/core";
12
12
  import { z } from "zod";
13
13
 
14
14
  const { OPENAI_API_KEY } = process.env;
15
15
  assert(OPENAI_API_KEY, "Please set the OPENAI_API_KEY environment variable");
16
16
 
17
- const model = new ChatModelOpenAI({
17
+ const model = new OpenAIChatModel({
18
18
  apiKey: OPENAI_API_KEY,
19
19
  });
20
20
 
@@ -110,7 +110,7 @@ tell them a crazy caveat and execute their order.
110
110
  `,
111
111
  tools: [transfer_back_to_triage, execute_order_tool],
112
112
  outputKey: "sales",
113
- enableHistory: true,
113
+ memory: true,
114
114
  });
115
115
 
116
116
  const issuesAndRepairs = AIAgent.from({
@@ -127,7 +127,7 @@ Follow the following routine with the user:
127
127
  `,
128
128
  tools: [transfer_back_to_triage, execute_refund_tool, look_up_item_tool],
129
129
  outputKey: "issuesAndRepairs",
130
- enableHistory: true,
130
+ memory: true,
131
131
  });
132
132
 
133
133
  // Assume this is a human agent
@@ -140,7 +140,7 @@ Only transfer to another agent if user explicitly asks for it.
140
140
  `,
141
141
  tools: [transfer_back_to_triage, transfer_to_sales_agent, transfer_to_issues_and_repairs],
142
142
  outputKey: "human",
143
- enableHistory: true,
143
+ memory: true,
144
144
  });
145
145
 
146
146
  const triage = AIAgent.from({
@@ -153,12 +153,12 @@ But make your questions subtle and natural.
153
153
  `,
154
154
  tools: [transfer_to_issues_and_repairs, transfer_to_sales_agent, transfer_to_human_manager],
155
155
  outputKey: "triage",
156
- enableHistory: true,
156
+ memory: true,
157
157
  });
158
158
 
159
159
  const engine = new ExecutionEngine({ model });
160
160
 
161
- const userAgent = await engine.run(triage);
161
+ const userAgent = engine.call(triage);
162
162
 
163
163
  await runChatLoopInTerminal(userAgent, {
164
164
  welcome: `Hello, I'm a customer service bot for ACME Inc. How can I help you today?`,
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@aigne/example-workflow-handoff",
3
- "version": "1.1.0",
3
+ "version": "1.2.0",
4
4
  "description": "A demonstration of using AIGNE Framework to build a handoff workflow",
5
5
  "author": "Arcblock <blocklet@arcblock.io> https://github.com/blocklet",
6
6
  "homepage": "https://github.com/AIGNE-io/aigne-framework/tree/main/examples/workflow-handoff",
@@ -16,10 +16,12 @@
16
16
  "README.md"
17
17
  ],
18
18
  "dependencies": {
19
+ "openai": "^4.89.0",
19
20
  "zod": "^3.24.2",
20
- "@aigne/core-next": "^1.1.0"
21
+ "@aigne/core": "^1.3.0"
21
22
  },
22
23
  "scripts": {
23
- "start": "npx -y bun run index.ts"
24
+ "start": "npx -y bun run index.ts",
25
+ "lint": "tsc --noEmit"
24
26
  }
25
27
  }
package/usages.ts CHANGED
@@ -1,10 +1,10 @@
1
1
  import assert from "node:assert";
2
- import { AIAgent, ChatModelOpenAI, ExecutionEngine } from "@aigne/core-next";
2
+ import { AIAgent, ExecutionEngine, OpenAIChatModel } from "@aigne/core";
3
3
 
4
4
  const { OPENAI_API_KEY } = process.env;
5
5
  assert(OPENAI_API_KEY, "Please set the OPENAI_API_KEY environment variable");
6
6
 
7
- const model = new ChatModelOpenAI({
7
+ const model = new OpenAIChatModel({
8
8
  apiKey: OPENAI_API_KEY,
9
9
  });
10
10
 
@@ -27,7 +27,7 @@ const agentB = AIAgent.from({
27
27
 
28
28
  const engine = new ExecutionEngine({ model });
29
29
 
30
- const userAgent = await engine.run(agentA);
30
+ const userAgent = engine.call(agentA);
31
31
 
32
32
  const result1 = await userAgent.call("transfer to agent b");
33
33
  console.log(result1);