@aigne/example-workflow-sequential 1.8.0 → 1.10.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -74,7 +74,7 @@ The following example demonstrates how to build a sequential workflow:
74
74
 
75
75
  ```typescript
76
76
  import assert from "node:assert";
77
- import { AIAgent, ExecutionEngine, sequential } from "@aigne/core";
77
+ import { AIAgent, AIGNE, TeamAgent, ProcessMode } from "@aigne/core";
78
78
  import { OpenAIChatModel } from "@aigne/core/models/openai-chat-model.js";
79
79
 
80
80
  const { OPENAI_API_KEY } = process.env;
@@ -126,9 +126,15 @@ Draft copy:
126
126
  outputKey: "content",
127
127
  });
128
128
 
129
- const engine = new ExecutionEngine({ model });
129
+ const aigne = new AIGNE({ model });
130
130
 
131
- const result = await engine.call(sequential(conceptExtractor, writer, formatProof), {
131
+ // 创建一个 TeamAgent 来处理顺序工作流
132
+ const teamAgent = TeamAgent.from({
133
+ skills: [conceptExtractor, writer, formatProof],
134
+ mode: ProcessMode.sequential // 默认值,可以省略
135
+ });
136
+
137
+ const result = await aigne.invoke(teamAgent, {
132
138
  product: "AIGNE is a No-code Generative AI Apps Engine",
133
139
  });
134
140
 
package/index.test.ts CHANGED
@@ -1,5 +1,11 @@
1
- import { test } from "bun:test";
1
+ import { expect, test } from "bun:test";
2
+ import { runExampleTest } from "@aigne/test-utils/run-example-test.js";
2
3
 
3
- test("should successfully execute the workflow-sequential", () => import("./index.js"), {
4
- timeout: 60000,
5
- });
4
+ test(
5
+ "should successfully run the workflow-sequential",
6
+ async () => {
7
+ const { code } = await runExampleTest();
8
+ expect(code).toBe(0);
9
+ },
10
+ { timeout: 600000 },
11
+ );
package/index.ts CHANGED
@@ -1,7 +1,7 @@
1
1
  #!/usr/bin/env bunwrapper
2
2
 
3
3
  import { runChatLoopInTerminal } from "@aigne/cli/utils/run-chat-loop.js";
4
- import { AIAgent, ExecutionEngine, sequential } from "@aigne/core";
4
+ import { AIAgent, AIGNE, ProcessMode, TeamAgent } from "@aigne/core";
5
5
  import { loadModel } from "@aigne/core/loader/index.js";
6
6
 
7
7
  const model = await loadModel();
@@ -48,9 +48,14 @@ Draft copy:
48
48
  outputKey: "content",
49
49
  });
50
50
 
51
- const engine = new ExecutionEngine({ model });
51
+ const aigne = new AIGNE({ model });
52
52
 
53
- const userAgent = engine.call(sequential(conceptExtractor, writer, formatProof));
53
+ const userAgent = aigne.invoke(
54
+ TeamAgent.from({
55
+ skills: [conceptExtractor, writer, formatProof],
56
+ mode: ProcessMode.sequential,
57
+ }),
58
+ );
54
59
 
55
60
  await runChatLoopInTerminal(userAgent, {
56
61
  welcome: `Hello, I'm a marketing assistant. I can help you with product descriptions, marketing copy, and editing.`,
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@aigne/example-workflow-sequential",
3
- "version": "1.8.0",
3
+ "version": "1.10.0",
4
4
  "description": "A demonstration of using AIGNE Framework to build a sequential workflow",
5
5
  "author": "Arcblock <blocklet@arcblock.io> https://github.com/blocklet",
6
6
  "homepage": "https://github.com/AIGNE-io/aigne-framework/tree/main/examples/workflow-sequential",
@@ -18,8 +18,11 @@
18
18
  "dependencies": {
19
19
  "openai": "^4.94.0",
20
20
  "zod": "^3.24.2",
21
- "@aigne/cli": "^1.7.0",
22
- "@aigne/core": "^1.11.0"
21
+ "@aigne/cli": "^1.8.1",
22
+ "@aigne/core": "^1.13.0"
23
+ },
24
+ "devDependencies": {
25
+ "@aigne/test-utils": "^0.1.0"
23
26
  },
24
27
  "scripts": {
25
28
  "start": "bun run index.ts",
package/usages.ts CHANGED
@@ -1,5 +1,5 @@
1
1
  import assert from "node:assert";
2
- import { AIAgent, ExecutionEngine, sequential } from "@aigne/core";
2
+ import { AIAgent, AIGNE, ProcessMode, TeamAgent } from "@aigne/core";
3
3
  import { OpenAIChatModel } from "@aigne/core/models/openai-chat-model.js";
4
4
 
5
5
  const { OPENAI_API_KEY } = process.env;
@@ -51,11 +51,17 @@ Draft copy:
51
51
  outputKey: "content",
52
52
  });
53
53
 
54
- const engine = new ExecutionEngine({ model });
54
+ const aigne = new AIGNE({ model });
55
55
 
56
- const result = await engine.call(sequential(conceptExtractor, writer, formatProof), {
57
- product: "AIGNE is a No-code Generative AI Apps Engine",
58
- });
56
+ const result = await aigne.invoke(
57
+ TeamAgent.from({
58
+ skills: [conceptExtractor, writer, formatProof],
59
+ mode: ProcessMode.sequential,
60
+ }),
61
+ {
62
+ product: "AIGNE is a No-code Generative AI Apps Engine",
63
+ },
64
+ );
59
65
 
60
66
  console.log(result);
61
67