@aigne/example-workflow-concurrency 1.1.0 → 1.2.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +6 -6
- package/index.ts +5 -9
- package/package.json +5 -3
- package/usages.ts +6 -6
package/README.md
CHANGED
|
@@ -74,12 +74,13 @@ The following example demonstrates how to build a concurrency workflow:
|
|
|
74
74
|
|
|
75
75
|
```typescript
|
|
76
76
|
import assert from "node:assert";
|
|
77
|
-
import { AIAgent,
|
|
77
|
+
import { AIAgent, ExecutionEngine, parallel } from "@aigne/core";
|
|
78
|
+
import { OpenAIChatModel } from "@aigne/core/models/openai-chat-model.js";
|
|
78
79
|
|
|
79
80
|
const { OPENAI_API_KEY } = process.env;
|
|
80
81
|
assert(OPENAI_API_KEY, "Please set the OPENAI_API_KEY environment variable");
|
|
81
82
|
|
|
82
|
-
const model = new
|
|
83
|
+
const model = new OpenAIChatModel({
|
|
83
84
|
apiKey: OPENAI_API_KEY,
|
|
84
85
|
});
|
|
85
86
|
|
|
@@ -103,10 +104,9 @@ Product description:
|
|
|
103
104
|
|
|
104
105
|
const engine = new ExecutionEngine({ model });
|
|
105
106
|
|
|
106
|
-
const result = await engine.
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
);
|
|
107
|
+
const result = await engine.call(parallel(featureExtractor, audienceAnalyzer), {
|
|
108
|
+
product: "AIGNE is a No-code Generative AI Apps Engine",
|
|
109
|
+
});
|
|
110
110
|
|
|
111
111
|
console.log(result);
|
|
112
112
|
|
package/index.ts
CHANGED
|
@@ -1,18 +1,14 @@
|
|
|
1
1
|
#!/usr/bin/env npx -y bun
|
|
2
2
|
|
|
3
3
|
import assert from "node:assert";
|
|
4
|
-
import {
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
ExecutionEngine,
|
|
8
|
-
parallel,
|
|
9
|
-
runChatLoopInTerminal,
|
|
10
|
-
} from "@aigne/core-next";
|
|
4
|
+
import { AIAgent, ExecutionEngine, parallel } from "@aigne/core";
|
|
5
|
+
import { OpenAIChatModel } from "@aigne/core/models/openai-chat-model.js";
|
|
6
|
+
import { runChatLoopInTerminal } from "@aigne/core/utils/run-chat-loop.js";
|
|
11
7
|
|
|
12
8
|
const { OPENAI_API_KEY } = process.env;
|
|
13
9
|
assert(OPENAI_API_KEY, "Please set the OPENAI_API_KEY environment variable");
|
|
14
10
|
|
|
15
|
-
const model = new
|
|
11
|
+
const model = new OpenAIChatModel({
|
|
16
12
|
apiKey: OPENAI_API_KEY,
|
|
17
13
|
});
|
|
18
14
|
|
|
@@ -36,7 +32,7 @@ Product description:
|
|
|
36
32
|
|
|
37
33
|
const engine = new ExecutionEngine({ model });
|
|
38
34
|
|
|
39
|
-
const userAgent =
|
|
35
|
+
const userAgent = engine.call(parallel(featureExtractor, audienceAnalyzer));
|
|
40
36
|
|
|
41
37
|
await runChatLoopInTerminal(userAgent, {
|
|
42
38
|
welcome: `Hello, I'm a product analyst and market researcher. I can help you with extracting features and identifying target audience.`,
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@aigne/example-workflow-concurrency",
|
|
3
|
-
"version": "1.1
|
|
3
|
+
"version": "1.2.1",
|
|
4
4
|
"description": "A demonstration of using AIGNE Framework to build a concurrency workflow",
|
|
5
5
|
"author": "Arcblock <blocklet@arcblock.io> https://github.com/blocklet",
|
|
6
6
|
"homepage": "https://github.com/AIGNE-io/aigne-framework/tree/main/examples/workflow-concurrency",
|
|
@@ -16,10 +16,12 @@
|
|
|
16
16
|
"README.md"
|
|
17
17
|
],
|
|
18
18
|
"dependencies": {
|
|
19
|
+
"openai": "^4.89.1",
|
|
19
20
|
"zod": "^3.24.2",
|
|
20
|
-
"@aigne/core
|
|
21
|
+
"@aigne/core": "^1.5.0"
|
|
21
22
|
},
|
|
22
23
|
"scripts": {
|
|
23
|
-
"start": "npx -y bun run index.ts"
|
|
24
|
+
"start": "npx -y bun run index.ts",
|
|
25
|
+
"lint": "tsc --noEmit"
|
|
24
26
|
}
|
|
25
27
|
}
|
package/usages.ts
CHANGED
|
@@ -1,10 +1,11 @@
|
|
|
1
1
|
import assert from "node:assert";
|
|
2
|
-
import { AIAgent,
|
|
2
|
+
import { AIAgent, ExecutionEngine, parallel } from "@aigne/core";
|
|
3
|
+
import { OpenAIChatModel } from "@aigne/core/models/openai-chat-model.js";
|
|
3
4
|
|
|
4
5
|
const { OPENAI_API_KEY } = process.env;
|
|
5
6
|
assert(OPENAI_API_KEY, "Please set the OPENAI_API_KEY environment variable");
|
|
6
7
|
|
|
7
|
-
const model = new
|
|
8
|
+
const model = new OpenAIChatModel({
|
|
8
9
|
apiKey: OPENAI_API_KEY,
|
|
9
10
|
});
|
|
10
11
|
|
|
@@ -28,10 +29,9 @@ Product description:
|
|
|
28
29
|
|
|
29
30
|
const engine = new ExecutionEngine({ model });
|
|
30
31
|
|
|
31
|
-
const result = await engine.
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
);
|
|
32
|
+
const result = await engine.call(parallel(featureExtractor, audienceAnalyzer), {
|
|
33
|
+
product: "AIGNE is a No-code Generative AI Apps Engine",
|
|
34
|
+
});
|
|
35
35
|
|
|
36
36
|
console.log(result);
|
|
37
37
|
|