@aigne/example-workflow-reflection 1.2.0 → 1.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -71,13 +71,8 @@ The following example demonstrates how to build a reflection workflow:
71
71
 
72
72
  ```typescript
73
73
  import assert from "node:assert";
74
- import {
75
- AIAgent,
76
- OpenAIChatModel,
77
- ExecutionEngine,
78
- UserInputTopic,
79
- UserOutputTopic,
80
- } from "@aigne/core";
74
+ import { AIAgent, ExecutionEngine, UserInputTopic, UserOutputTopic } from "@aigne/core";
75
+ import { OpenAIChatModel } from "@aigne/core/models/openai-chat-model.js";
81
76
  import { z } from "zod";
82
77
 
83
78
  const { OPENAI_API_KEY } = process.env;
package/index.ts CHANGED
@@ -1,15 +1,9 @@
1
1
  #!/usr/bin/env npx -y bun
2
2
 
3
3
  import assert from "node:assert";
4
- import {
5
- AIAgent,
6
- ExecutionEngine,
7
- OpenAIChatModel,
8
- UserAgent,
9
- UserInputTopic,
10
- UserOutputTopic,
11
- runChatLoopInTerminal,
12
- } from "@aigne/core";
4
+ import { AIAgent, ExecutionEngine, UserAgent, UserInputTopic, UserOutputTopic } from "@aigne/core";
5
+ import { OpenAIChatModel } from "@aigne/core/models/openai-chat-model.js";
6
+ import { runChatLoopInTerminal } from "@aigne/core/utils/run-chat-loop.js";
13
7
  import { z } from "zod";
14
8
 
15
9
  const { OPENAI_API_KEY } = process.env;
@@ -80,7 +74,7 @@ Please review the code. If previous feedback was provided, see if it was address
80
74
  const engine = new ExecutionEngine({ model, agents: [coder, reviewer] });
81
75
 
82
76
  const userAgent = UserAgent.from({
83
- context: engine,
77
+ context: engine.newContext(),
84
78
  publishTopic: UserInputTopic,
85
79
  subscribeTopic: UserOutputTopic,
86
80
  });
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@aigne/example-workflow-reflection",
3
- "version": "1.2.0",
3
+ "version": "1.3.0",
4
4
  "description": "A demonstration of using AIGNE Framework to build a reflection workflow",
5
5
  "author": "Arcblock <blocklet@arcblock.io> https://github.com/blocklet",
6
6
  "homepage": "https://github.com/AIGNE-io/aigne-framework/tree/main/examples/workflow-reflection",
@@ -16,9 +16,9 @@
16
16
  "README.md"
17
17
  ],
18
18
  "dependencies": {
19
- "openai": "^4.89.0",
19
+ "openai": "^4.91.1",
20
20
  "zod": "^3.24.2",
21
- "@aigne/core": "^1.3.0"
21
+ "@aigne/core": "^1.6.0"
22
22
  },
23
23
  "scripts": {
24
24
  "start": "npx -y bun run index.ts",
package/usages.ts CHANGED
@@ -1,11 +1,6 @@
1
1
  import assert from "node:assert";
2
- import {
3
- AIAgent,
4
- ExecutionEngine,
5
- OpenAIChatModel,
6
- UserInputTopic,
7
- UserOutputTopic,
8
- } from "@aigne/core";
2
+ import { AIAgent, ExecutionEngine, UserInputTopic, UserOutputTopic } from "@aigne/core";
3
+ import { OpenAIChatModel } from "@aigne/core/models/openai-chat-model.js";
9
4
  import { z } from "zod";
10
5
 
11
6
  const { OPENAI_API_KEY } = process.env;