@aigne/example-workflow-handoff 1.1.0 → 1.2.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +4 -3
- package/index.ts +9 -14
- package/package.json +5 -3
- package/usages.ts +4 -3
package/README.md
CHANGED
|
@@ -90,12 +90,13 @@ The following example demonstrates how to build a handoff workflow:
|
|
|
90
90
|
|
|
91
91
|
```typescript
|
|
92
92
|
import assert from "node:assert";
|
|
93
|
-
import { AIAgent,
|
|
93
|
+
import { AIAgent, ExecutionEngine } from "@aigne/core";
|
|
94
|
+
import { OpenAIChatModel } from "@aigne/core/models/openai-chat-model.js";
|
|
94
95
|
|
|
95
96
|
const { OPENAI_API_KEY } = process.env;
|
|
96
97
|
assert(OPENAI_API_KEY, "Please set the OPENAI_API_KEY environment variable");
|
|
97
98
|
|
|
98
|
-
const model = new
|
|
99
|
+
const model = new OpenAIChatModel({
|
|
99
100
|
apiKey: OPENAI_API_KEY,
|
|
100
101
|
});
|
|
101
102
|
|
|
@@ -118,7 +119,7 @@ const agentB = AIAgent.from({
|
|
|
118
119
|
|
|
119
120
|
const engine = new ExecutionEngine({ model });
|
|
120
121
|
|
|
121
|
-
const userAgent =
|
|
122
|
+
const userAgent = engine.call(agentA);
|
|
122
123
|
|
|
123
124
|
const result1 = await userAgent.call("transfer to agent b");
|
|
124
125
|
console.log(result1);
|
package/index.ts
CHANGED
|
@@ -1,20 +1,15 @@
|
|
|
1
1
|
#!/usr/bin/env npx -y bun
|
|
2
2
|
|
|
3
3
|
import assert from "node:assert";
|
|
4
|
-
import {
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
ChatModelOpenAI,
|
|
8
|
-
ExecutionEngine,
|
|
9
|
-
FunctionAgent,
|
|
10
|
-
runChatLoopInTerminal,
|
|
11
|
-
} from "@aigne/core-next";
|
|
4
|
+
import { AIAgent, type Agent, ExecutionEngine, FunctionAgent } from "@aigne/core";
|
|
5
|
+
import { OpenAIChatModel } from "@aigne/core/models/openai-chat-model.js";
|
|
6
|
+
import { runChatLoopInTerminal } from "@aigne/core/utils/run-chat-loop.js";
|
|
12
7
|
import { z } from "zod";
|
|
13
8
|
|
|
14
9
|
const { OPENAI_API_KEY } = process.env;
|
|
15
10
|
assert(OPENAI_API_KEY, "Please set the OPENAI_API_KEY environment variable");
|
|
16
11
|
|
|
17
|
-
const model = new
|
|
12
|
+
const model = new OpenAIChatModel({
|
|
18
13
|
apiKey: OPENAI_API_KEY,
|
|
19
14
|
});
|
|
20
15
|
|
|
@@ -110,7 +105,7 @@ tell them a crazy caveat and execute their order.
|
|
|
110
105
|
`,
|
|
111
106
|
tools: [transfer_back_to_triage, execute_order_tool],
|
|
112
107
|
outputKey: "sales",
|
|
113
|
-
|
|
108
|
+
memory: true,
|
|
114
109
|
});
|
|
115
110
|
|
|
116
111
|
const issuesAndRepairs = AIAgent.from({
|
|
@@ -127,7 +122,7 @@ Follow the following routine with the user:
|
|
|
127
122
|
`,
|
|
128
123
|
tools: [transfer_back_to_triage, execute_refund_tool, look_up_item_tool],
|
|
129
124
|
outputKey: "issuesAndRepairs",
|
|
130
|
-
|
|
125
|
+
memory: true,
|
|
131
126
|
});
|
|
132
127
|
|
|
133
128
|
// Assume this is a human agent
|
|
@@ -140,7 +135,7 @@ Only transfer to another agent if user explicitly asks for it.
|
|
|
140
135
|
`,
|
|
141
136
|
tools: [transfer_back_to_triage, transfer_to_sales_agent, transfer_to_issues_and_repairs],
|
|
142
137
|
outputKey: "human",
|
|
143
|
-
|
|
138
|
+
memory: true,
|
|
144
139
|
});
|
|
145
140
|
|
|
146
141
|
const triage = AIAgent.from({
|
|
@@ -153,12 +148,12 @@ But make your questions subtle and natural.
|
|
|
153
148
|
`,
|
|
154
149
|
tools: [transfer_to_issues_and_repairs, transfer_to_sales_agent, transfer_to_human_manager],
|
|
155
150
|
outputKey: "triage",
|
|
156
|
-
|
|
151
|
+
memory: true,
|
|
157
152
|
});
|
|
158
153
|
|
|
159
154
|
const engine = new ExecutionEngine({ model });
|
|
160
155
|
|
|
161
|
-
const userAgent =
|
|
156
|
+
const userAgent = engine.call(triage);
|
|
162
157
|
|
|
163
158
|
await runChatLoopInTerminal(userAgent, {
|
|
164
159
|
welcome: `Hello, I'm a customer service bot for ACME Inc. How can I help you today?`,
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@aigne/example-workflow-handoff",
|
|
3
|
-
"version": "1.1
|
|
3
|
+
"version": "1.2.1",
|
|
4
4
|
"description": "A demonstration of using AIGNE Framework to build a handoff workflow",
|
|
5
5
|
"author": "Arcblock <blocklet@arcblock.io> https://github.com/blocklet",
|
|
6
6
|
"homepage": "https://github.com/AIGNE-io/aigne-framework/tree/main/examples/workflow-handoff",
|
|
@@ -16,10 +16,12 @@
|
|
|
16
16
|
"README.md"
|
|
17
17
|
],
|
|
18
18
|
"dependencies": {
|
|
19
|
+
"openai": "^4.89.1",
|
|
19
20
|
"zod": "^3.24.2",
|
|
20
|
-
"@aigne/core
|
|
21
|
+
"@aigne/core": "^1.5.0"
|
|
21
22
|
},
|
|
22
23
|
"scripts": {
|
|
23
|
-
"start": "npx -y bun run index.ts"
|
|
24
|
+
"start": "npx -y bun run index.ts",
|
|
25
|
+
"lint": "tsc --noEmit"
|
|
24
26
|
}
|
|
25
27
|
}
|
package/usages.ts
CHANGED
|
@@ -1,10 +1,11 @@
|
|
|
1
1
|
import assert from "node:assert";
|
|
2
|
-
import { AIAgent,
|
|
2
|
+
import { AIAgent, ExecutionEngine } from "@aigne/core";
|
|
3
|
+
import { OpenAIChatModel } from "@aigne/core/models/openai-chat-model.js";
|
|
3
4
|
|
|
4
5
|
const { OPENAI_API_KEY } = process.env;
|
|
5
6
|
assert(OPENAI_API_KEY, "Please set the OPENAI_API_KEY environment variable");
|
|
6
7
|
|
|
7
|
-
const model = new
|
|
8
|
+
const model = new OpenAIChatModel({
|
|
8
9
|
apiKey: OPENAI_API_KEY,
|
|
9
10
|
});
|
|
10
11
|
|
|
@@ -27,7 +28,7 @@ const agentB = AIAgent.from({
|
|
|
27
28
|
|
|
28
29
|
const engine = new ExecutionEngine({ model });
|
|
29
30
|
|
|
30
|
-
const userAgent =
|
|
31
|
+
const userAgent = engine.call(agentA);
|
|
31
32
|
|
|
32
33
|
const result1 = await userAgent.call("transfer to agent b");
|
|
33
34
|
console.log(result1);
|