@aigne/example-workflow-reflection 1.9.1 → 1.10.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +44 -3
- package/index.ts +19 -15
- package/package.json +6 -5
- package/usages.ts +1 -1
package/README.md
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
# Workflow Reflection Demo
|
|
2
2
|
|
|
3
|
-
This is a demonstration of using [AIGNE Framework](https://github.com/AIGNE-io/aigne-framework) to build a reflection workflow.
|
|
3
|
+
This is a demonstration of using [AIGNE Framework](https://github.com/AIGNE-io/aigne-framework) to build a reflection workflow. The example now supports both one-shot and interactive chat modes, along with customizable model settings and pipeline input/output.
|
|
4
4
|
|
|
5
5
|
```mermaid
|
|
6
6
|
flowchart LR
|
|
@@ -34,7 +34,14 @@ class reviewer processing
|
|
|
34
34
|
```bash
|
|
35
35
|
export OPENAI_API_KEY=YOUR_OPENAI_API_KEY # Set your OpenAI API key
|
|
36
36
|
|
|
37
|
-
|
|
37
|
+
# Run in one-shot mode (default)
|
|
38
|
+
npx -y @aigne/example-workflow-reflection
|
|
39
|
+
|
|
40
|
+
# Run in interactive chat mode
|
|
41
|
+
npx -y @aigne/example-workflow-reflection --chat
|
|
42
|
+
|
|
43
|
+
# Use pipeline input
|
|
44
|
+
echo "Write a function to validate email addresses" | npx -y @aigne/example-workflow-reflection
|
|
38
45
|
```
|
|
39
46
|
|
|
40
47
|
## Installation
|
|
@@ -64,7 +71,41 @@ OPENAI_API_KEY="" # Set your OpenAI API key here
|
|
|
64
71
|
### Run the Example
|
|
65
72
|
|
|
66
73
|
```bash
|
|
67
|
-
pnpm start
|
|
74
|
+
pnpm start # Run in one-shot mode (default)
|
|
75
|
+
|
|
76
|
+
# Run in interactive chat mode
|
|
77
|
+
pnpm start -- --chat
|
|
78
|
+
|
|
79
|
+
# Use pipeline input
|
|
80
|
+
echo "Write a function to validate email addresses" | pnpm start
|
|
81
|
+
```
|
|
82
|
+
|
|
83
|
+
### Run Options
|
|
84
|
+
|
|
85
|
+
The example supports the following command-line parameters:
|
|
86
|
+
|
|
87
|
+
| Parameter | Description | Default |
|
|
88
|
+
|-----------|-------------|---------|
|
|
89
|
+
| `--chat` | Run in interactive chat mode | Disabled (one-shot mode) |
|
|
90
|
+
| `--model <provider[:model]>` | AI model to use in format 'provider[:model]' where model is optional. Examples: 'openai' or 'openai:gpt-4o-mini' | openai |
|
|
91
|
+
| `--temperature <value>` | Temperature for model generation | Provider default |
|
|
92
|
+
| `--top-p <value>` | Top-p sampling value | Provider default |
|
|
93
|
+
| `--presence-penalty <value>` | Presence penalty value | Provider default |
|
|
94
|
+
| `--frequency-penalty <value>` | Frequency penalty value | Provider default |
|
|
95
|
+
| `--log-level <level>` | Set logging level (ERROR, WARN, INFO, DEBUG, TRACE) | INFO |
|
|
96
|
+
| `--input`, `-i <input>` | Specify input directly | None |
|
|
97
|
+
|
|
98
|
+
#### Examples
|
|
99
|
+
|
|
100
|
+
```bash
|
|
101
|
+
# Run in chat mode (interactive)
|
|
102
|
+
pnpm start -- --chat
|
|
103
|
+
|
|
104
|
+
# Set logging level
|
|
105
|
+
pnpm start -- --log-level DEBUG
|
|
106
|
+
|
|
107
|
+
# Use pipeline input
|
|
108
|
+
echo "Write a function to validate email addresses" | pnpm start
|
|
68
109
|
```
|
|
69
110
|
|
|
70
111
|
## Example
|
package/index.ts
CHANGED
|
@@ -1,12 +1,9 @@
|
|
|
1
1
|
#!/usr/bin/env bunwrapper
|
|
2
2
|
|
|
3
|
-
import {
|
|
4
|
-
import { AIAgent,
|
|
5
|
-
import { loadModel } from "@aigne/core/loader/index.js";
|
|
3
|
+
import { runWithAIGNE } from "@aigne/cli/utils/run-with-aigne.js";
|
|
4
|
+
import { AIAgent, UserAgent, UserInputTopic, UserOutputTopic } from "@aigne/core";
|
|
6
5
|
import { z } from "zod";
|
|
7
6
|
|
|
8
|
-
const model = await loadModel();
|
|
9
|
-
|
|
10
7
|
const coder = AIAgent.from({
|
|
11
8
|
name: "coder",
|
|
12
9
|
subscribeTopic: [UserInputTopic, "rewrite_request"],
|
|
@@ -67,15 +64,22 @@ Please review the code. If previous feedback was provided, see if it was address
|
|
|
67
64
|
includeInputInOutput: true,
|
|
68
65
|
});
|
|
69
66
|
|
|
70
|
-
|
|
67
|
+
await runWithAIGNE(
|
|
68
|
+
(aigne) => {
|
|
69
|
+
aigne.addAgent(coder, reviewer);
|
|
71
70
|
|
|
72
|
-
const userAgent = UserAgent.from({
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
});
|
|
71
|
+
const userAgent = UserAgent.from({
|
|
72
|
+
context: aigne.newContext(),
|
|
73
|
+
publishTopic: UserInputTopic,
|
|
74
|
+
subscribeTopic: UserOutputTopic,
|
|
75
|
+
});
|
|
77
76
|
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
77
|
+
return userAgent;
|
|
78
|
+
},
|
|
79
|
+
{
|
|
80
|
+
chatLoopOptions: {
|
|
81
|
+
welcome: `Hello, I'm a coder with a reviewer. I can help you write code and get it reviewed.`,
|
|
82
|
+
defaultQuestion: "Write a function to find the sum of all even numbers in a list.",
|
|
83
|
+
},
|
|
84
|
+
},
|
|
85
|
+
);
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@aigne/example-workflow-reflection",
|
|
3
|
-
"version": "1.
|
|
3
|
+
"version": "1.10.0",
|
|
4
4
|
"description": "A demonstration of using AIGNE Framework to build a reflection workflow",
|
|
5
5
|
"author": "Arcblock <blocklet@arcblock.io> https://github.com/blocklet",
|
|
6
6
|
"homepage": "https://github.com/AIGNE-io/aigne-framework/tree/main/examples/workflow-reflection",
|
|
@@ -16,13 +16,14 @@
|
|
|
16
16
|
"README.md"
|
|
17
17
|
],
|
|
18
18
|
"dependencies": {
|
|
19
|
-
"openai": "^4.97.0",
|
|
20
19
|
"zod": "^3.24.4",
|
|
21
|
-
"@aigne/cli": "^1.
|
|
22
|
-
"@aigne/core": "^1.
|
|
20
|
+
"@aigne/cli": "^1.10.0",
|
|
21
|
+
"@aigne/core": "^1.16.0",
|
|
22
|
+
"@aigne/openai": "^0.1.0"
|
|
23
23
|
},
|
|
24
24
|
"devDependencies": {
|
|
25
|
-
"@
|
|
25
|
+
"@types/bun": "^1.2.9",
|
|
26
|
+
"@aigne/test-utils": "^0.3.0"
|
|
26
27
|
},
|
|
27
28
|
"scripts": {
|
|
28
29
|
"start": "bun run index.ts",
|
package/usages.ts
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import assert from "node:assert";
|
|
2
2
|
import { AIAgent, AIGNE, UserInputTopic, UserOutputTopic } from "@aigne/core";
|
|
3
|
-
import { OpenAIChatModel } from "@aigne/
|
|
3
|
+
import { OpenAIChatModel } from "@aigne/openai";
|
|
4
4
|
import { z } from "zod";
|
|
5
5
|
|
|
6
6
|
const { OPENAI_API_KEY } = process.env;
|