@aigne/example-workflow-concurrency 1.7.0 → 1.8.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +9 -3
- package/index.test.ts +10 -4
- package/index.ts +8 -3
- package/package.json +6 -3
- package/usages.ts +12 -6
package/README.md
CHANGED
|
@@ -76,7 +76,7 @@ The following example demonstrates how to build a concurrency workflow:
|
|
|
76
76
|
|
|
77
77
|
```typescript
|
|
78
78
|
import assert from "node:assert";
|
|
79
|
-
import { AIAgent,
|
|
79
|
+
import { AIAgent, AIGNE, TeamAgent, ProcessMode } from "@aigne/core";
|
|
80
80
|
import { OpenAIChatModel } from "@aigne/core/models/openai-chat-model.js";
|
|
81
81
|
|
|
82
82
|
const { OPENAI_API_KEY } = process.env;
|
|
@@ -104,9 +104,15 @@ Product description:
|
|
|
104
104
|
outputKey: "audience",
|
|
105
105
|
});
|
|
106
106
|
|
|
107
|
-
const
|
|
107
|
+
const aigne = new AIGNE({ model });
|
|
108
108
|
|
|
109
|
-
|
|
109
|
+
// 创建一个 TeamAgent 来处理并行工作流
|
|
110
|
+
const teamAgent = TeamAgent.from({
|
|
111
|
+
skills: [featureExtractor, audienceAnalyzer],
|
|
112
|
+
mode: ProcessMode.parallel
|
|
113
|
+
});
|
|
114
|
+
|
|
115
|
+
const result = await aigne.invoke(teamAgent, {
|
|
110
116
|
product: "AIGNE is a No-code Generative AI Apps Engine",
|
|
111
117
|
});
|
|
112
118
|
|
package/index.test.ts
CHANGED
|
@@ -1,5 +1,11 @@
|
|
|
1
|
-
import { test } from "bun:test";
|
|
1
|
+
import { expect, test } from "bun:test";
|
|
2
|
+
import { runExampleTest } from "@aigne/test-utils/run-example-test.js";
|
|
2
3
|
|
|
3
|
-
test(
|
|
4
|
-
|
|
5
|
-
|
|
4
|
+
test(
|
|
5
|
+
"should successfully run the workflow-concurrency",
|
|
6
|
+
async () => {
|
|
7
|
+
const { code } = await runExampleTest();
|
|
8
|
+
expect(code).toBe(0);
|
|
9
|
+
},
|
|
10
|
+
{ timeout: 600000 },
|
|
11
|
+
);
|
package/index.ts
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
#!/usr/bin/env bunwrapper
|
|
2
2
|
|
|
3
3
|
import { runChatLoopInTerminal } from "@aigne/cli/utils/run-chat-loop.js";
|
|
4
|
-
import { AIAgent,
|
|
4
|
+
import { AIAgent, AIGNE, ProcessMode, TeamAgent } from "@aigne/core";
|
|
5
5
|
import { loadModel } from "@aigne/core/loader/index.js";
|
|
6
6
|
|
|
7
7
|
const model = await loadModel();
|
|
@@ -24,9 +24,14 @@ Product description:
|
|
|
24
24
|
outputKey: "audience",
|
|
25
25
|
});
|
|
26
26
|
|
|
27
|
-
const
|
|
27
|
+
const aigne = new AIGNE({ model });
|
|
28
28
|
|
|
29
|
-
const userAgent =
|
|
29
|
+
const userAgent = aigne.invoke(
|
|
30
|
+
TeamAgent.from({
|
|
31
|
+
skills: [featureExtractor, audienceAnalyzer],
|
|
32
|
+
mode: ProcessMode.parallel,
|
|
33
|
+
}),
|
|
34
|
+
);
|
|
30
35
|
|
|
31
36
|
await runChatLoopInTerminal(userAgent, {
|
|
32
37
|
welcome: `Hello, I'm a product analyst and market researcher. I can help you with extracting features and identifying target audience.`,
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@aigne/example-workflow-concurrency",
|
|
3
|
-
"version": "1.
|
|
3
|
+
"version": "1.8.0",
|
|
4
4
|
"description": "A demonstration of using AIGNE Framework to build a concurrency workflow",
|
|
5
5
|
"author": "Arcblock <blocklet@arcblock.io> https://github.com/blocklet",
|
|
6
6
|
"homepage": "https://github.com/AIGNE-io/aigne-framework/tree/main/examples/workflow-concurrency",
|
|
@@ -18,8 +18,11 @@
|
|
|
18
18
|
"dependencies": {
|
|
19
19
|
"openai": "^4.94.0",
|
|
20
20
|
"zod": "^3.24.2",
|
|
21
|
-
"@aigne/
|
|
22
|
-
"@aigne/
|
|
21
|
+
"@aigne/core": "^1.12.0",
|
|
22
|
+
"@aigne/cli": "^1.8.0"
|
|
23
|
+
},
|
|
24
|
+
"devDependencies": {
|
|
25
|
+
"@aigne/test-utils": "^0.1.0"
|
|
23
26
|
},
|
|
24
27
|
"scripts": {
|
|
25
28
|
"start": "bun run index.ts",
|
package/usages.ts
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import assert from "node:assert";
|
|
2
|
-
import { AIAgent,
|
|
2
|
+
import { AIAgent, AIGNE, ProcessMode, TeamAgent } from "@aigne/core";
|
|
3
3
|
import { OpenAIChatModel } from "@aigne/core/models/openai-chat-model.js";
|
|
4
4
|
|
|
5
5
|
const { OPENAI_API_KEY } = process.env;
|
|
@@ -27,11 +27,17 @@ Product description:
|
|
|
27
27
|
outputKey: "audience",
|
|
28
28
|
});
|
|
29
29
|
|
|
30
|
-
const
|
|
31
|
-
|
|
32
|
-
const result = await
|
|
33
|
-
|
|
34
|
-
|
|
30
|
+
const aigne = new AIGNE({ model });
|
|
31
|
+
|
|
32
|
+
const result = await aigne.invoke(
|
|
33
|
+
TeamAgent.from({
|
|
34
|
+
skills: [featureExtractor, audienceAnalyzer],
|
|
35
|
+
mode: ProcessMode.parallel,
|
|
36
|
+
}),
|
|
37
|
+
{
|
|
38
|
+
product: "AIGNE is a No-code Generative AI Apps Engine",
|
|
39
|
+
},
|
|
40
|
+
);
|
|
35
41
|
|
|
36
42
|
console.log(result);
|
|
37
43
|
|