@aigne/example-workflow-reflection 1.3.0 → 1.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -0
- package/index.ts +3 -9
- package/package.json +5 -4
- package/usages.ts +11 -2
package/README.md
CHANGED
|
@@ -24,6 +24,7 @@ class reviewer processing
|
|
|
24
24
|
## Prerequisites
|
|
25
25
|
|
|
26
26
|
- [Node.js](https://nodejs.org) and npm installed on your machine
|
|
27
|
+
- [Bun](https://bun.sh) installed on your machine
|
|
27
28
|
- [OpenAI API key](https://platform.openai.com/api-keys) used to interact with OpenAI API
|
|
28
29
|
- [Pnpm](https://pnpm.io) [Optional] if you want to run the example from source code
|
|
29
30
|
|
package/index.ts
CHANGED
|
@@ -1,17 +1,11 @@
|
|
|
1
1
|
#!/usr/bin/env npx -y bun
|
|
2
2
|
|
|
3
|
-
import
|
|
3
|
+
import { runChatLoopInTerminal } from "@aigne/cli/utils/run-chat-loop.js";
|
|
4
4
|
import { AIAgent, ExecutionEngine, UserAgent, UserInputTopic, UserOutputTopic } from "@aigne/core";
|
|
5
|
-
import {
|
|
6
|
-
import { runChatLoopInTerminal } from "@aigne/core/utils/run-chat-loop.js";
|
|
5
|
+
import { loadModel } from "@aigne/core/loader/index.js";
|
|
7
6
|
import { z } from "zod";
|
|
8
7
|
|
|
9
|
-
const
|
|
10
|
-
assert(OPENAI_API_KEY, "Please set the OPENAI_API_KEY environment variable");
|
|
11
|
-
|
|
12
|
-
const model = new OpenAIChatModel({
|
|
13
|
-
apiKey: OPENAI_API_KEY,
|
|
14
|
-
});
|
|
8
|
+
const model = await loadModel();
|
|
15
9
|
|
|
16
10
|
const coder = AIAgent.from({
|
|
17
11
|
subscribeTopic: [UserInputTopic, "rewrite_request"],
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@aigne/example-workflow-reflection",
|
|
3
|
-
"version": "1.
|
|
3
|
+
"version": "1.5.0",
|
|
4
4
|
"description": "A demonstration of using AIGNE Framework to build a reflection workflow",
|
|
5
5
|
"author": "Arcblock <blocklet@arcblock.io> https://github.com/blocklet",
|
|
6
6
|
"homepage": "https://github.com/AIGNE-io/aigne-framework/tree/main/examples/workflow-reflection",
|
|
@@ -16,12 +16,13 @@
|
|
|
16
16
|
"README.md"
|
|
17
17
|
],
|
|
18
18
|
"dependencies": {
|
|
19
|
-
"openai": "^4.
|
|
19
|
+
"openai": "^4.94.0",
|
|
20
20
|
"zod": "^3.24.2",
|
|
21
|
-
"@aigne/
|
|
21
|
+
"@aigne/cli": "^1.3.0",
|
|
22
|
+
"@aigne/core": "^1.8.0"
|
|
22
23
|
},
|
|
23
24
|
"scripts": {
|
|
24
|
-
"start": "
|
|
25
|
+
"start": "bun run index.ts",
|
|
25
26
|
"lint": "tsc --noEmit"
|
|
26
27
|
}
|
|
27
28
|
}
|
package/usages.ts
CHANGED
|
@@ -1,5 +1,11 @@
|
|
|
1
1
|
import assert from "node:assert";
|
|
2
|
-
import {
|
|
2
|
+
import {
|
|
3
|
+
AIAgent,
|
|
4
|
+
ExecutionEngine,
|
|
5
|
+
UserInputTopic,
|
|
6
|
+
UserOutputTopic,
|
|
7
|
+
createPublishMessage,
|
|
8
|
+
} from "@aigne/core";
|
|
3
9
|
import { OpenAIChatModel } from "@aigne/core/models/openai-chat-model.js";
|
|
4
10
|
import { z } from "zod";
|
|
5
11
|
|
|
@@ -69,7 +75,10 @@ Please review the code. If previous feedback was provided, see if it was address
|
|
|
69
75
|
});
|
|
70
76
|
|
|
71
77
|
const engine = new ExecutionEngine({ model, agents: [coder, reviewer] });
|
|
72
|
-
engine.publish(
|
|
78
|
+
engine.publish(
|
|
79
|
+
UserInputTopic,
|
|
80
|
+
createPublishMessage("Write a function to find the sum of all even numbers in a list."),
|
|
81
|
+
);
|
|
73
82
|
|
|
74
83
|
const { message } = await engine.subscribe(UserOutputTopic);
|
|
75
84
|
console.log(message);
|