@aigne/example-workflow-concurrency 1.9.2 → 1.10.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (4) hide show
  1. package/README.md +44 -3
  2. package/index.ts +12 -17
  3. package/package.json +6 -6
  4. package/usages.ts +1 -1
package/README.md CHANGED
@@ -1,6 +1,6 @@
1
1
  # Workflow Concurrency Demo
2
2
 
3
- This is a demonstration of using [AIGNE Framework](https://github.com/AIGNE-io/aigne-framework) to build a concurrency workflow.
3
+ This is a demonstration of using [AIGNE Framework](https://github.com/AIGNE-io/aigne-framework) to build a concurrency workflow. The example now supports both one-shot and interactive chat modes, along with customizable model settings and pipeline input/output.
4
4
 
5
5
  ```mermaid
6
6
  flowchart LR
@@ -37,7 +37,14 @@ class aggregator processing
37
37
  ```bash
38
38
  export OPENAI_API_KEY=YOUR_OPENAI_API_KEY # Set your OpenAI API key
39
39
 
40
- npx -y @aigne/example-workflow-concurrency # Run the example
40
+ # Run in one-shot mode (default)
41
+ npx -y @aigne/example-workflow-concurrency
42
+
43
+ # Run in interactive chat mode
44
+ npx -y @aigne/example-workflow-concurrency --chat
45
+
46
+ # Use pipeline input
47
+ echo "Analyze product: Smart home assistant with voice control and AI learning capabilities" | npx -y @aigne/example-workflow-concurrency
41
48
  ```
42
49
 
43
50
  ## Installation
@@ -67,7 +74,41 @@ OPENAI_API_KEY="" # Set your OpenAI API key here
67
74
  ### Run the Example
68
75
 
69
76
  ```bash
70
- pnpm start
77
+ pnpm start # Run in one-shot mode (default)
78
+
79
+ # Run in interactive chat mode
80
+ pnpm start -- --chat
81
+
82
+ # Use pipeline input
83
+ echo "Analyze product: Smart home assistant with voice control and AI learning capabilities" | pnpm start
84
+ ```
85
+
86
+ ### Run Options
87
+
88
+ The example supports the following command-line parameters:
89
+
90
+ | Parameter | Description | Default |
91
+ |-----------|-------------|---------|
92
+ | `--chat` | Run in interactive chat mode | Disabled (one-shot mode) |
93
+ | `--model <provider[:model]>` | AI model to use in format 'provider[:model]' where model is optional. Examples: 'openai' or 'openai:gpt-4o-mini' | openai |
94
+ | `--temperature <value>` | Temperature for model generation | Provider default |
95
+ | `--top-p <value>` | Top-p sampling value | Provider default |
96
+ | `--presence-penalty <value>` | Presence penalty value | Provider default |
97
+ | `--frequency-penalty <value>` | Frequency penalty value | Provider default |
98
+ | `--log-level <level>` | Set logging level (ERROR, WARN, INFO, DEBUG, TRACE) | INFO |
99
+ | `--input`, `-i <input>` | Specify input directly | None |
100
+
101
+ #### Examples
102
+
103
+ ```bash
104
+ # Run in chat mode (interactive)
105
+ pnpm start -- --chat
106
+
107
+ # Set logging level
108
+ pnpm start -- --log-level DEBUG
109
+
110
+ # Use pipeline input
111
+ echo "Analyze product: Smart home assistant with voice control and AI learning capabilities" | pnpm start
71
112
  ```
72
113
 
73
114
  ## Example
package/index.ts CHANGED
@@ -1,10 +1,7 @@
1
1
  #!/usr/bin/env bunwrapper
2
2
 
3
- import { runChatLoopInTerminal } from "@aigne/cli/utils/run-chat-loop.js";
4
- import { AIAgent, AIGNE, ProcessMode, TeamAgent } from "@aigne/core";
5
- import { loadModel } from "@aigne/core/loader/index.js";
6
-
7
- const model = await loadModel();
3
+ import { runWithAIGNE } from "@aigne/cli/utils/run-with-aigne.js";
4
+ import { AIAgent, ProcessMode, TeamAgent } from "@aigne/core";
8
5
 
9
6
  const featureExtractor = AIAgent.from({
10
7
  instructions: `\
@@ -24,17 +21,15 @@ Product description:
24
21
  outputKey: "audience",
25
22
  });
26
23
 
27
- const aigne = new AIGNE({ model });
28
-
29
- const userAgent = aigne.invoke(
30
- TeamAgent.from({
31
- skills: [featureExtractor, audienceAnalyzer],
32
- mode: ProcessMode.parallel,
33
- }),
34
- );
24
+ const agent = TeamAgent.from({
25
+ skills: [featureExtractor, audienceAnalyzer],
26
+ mode: ProcessMode.parallel,
27
+ });
35
28
 
36
- await runChatLoopInTerminal(userAgent, {
37
- welcome: `Hello, I'm a product analyst and market researcher. I can help you with extracting features and identifying target audience.`,
38
- defaultQuestion: "AIGNE is a No-code Generative AI Apps Engine",
39
- inputKey: "product",
29
+ await runWithAIGNE(agent, {
30
+ chatLoopOptions: {
31
+ welcome: `Hello, I'm a product analyst and market researcher. I can help you with extracting features and identifying target audience.`,
32
+ defaultQuestion: "AIGNE is a No-code Generative AI Apps Engine",
33
+ inputKey: "product",
34
+ },
40
35
  });
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@aigne/example-workflow-concurrency",
3
- "version": "1.9.2",
3
+ "version": "1.10.1",
4
4
  "description": "A demonstration of using AIGNE Framework to build a concurrency workflow",
5
5
  "author": "Arcblock <blocklet@arcblock.io> https://github.com/blocklet",
6
6
  "homepage": "https://github.com/AIGNE-io/aigne-framework/tree/main/examples/workflow-concurrency",
@@ -16,13 +16,13 @@
16
16
  "README.md"
17
17
  ],
18
18
  "dependencies": {
19
- "openai": "^4.97.0",
20
- "zod": "^3.24.4",
21
- "@aigne/cli": "^1.9.1",
22
- "@aigne/core": "^1.15.0"
19
+ "@aigne/cli": "^1.10.1",
20
+ "@aigne/openai": "^0.2.0",
21
+ "@aigne/core": "^1.17.0"
23
22
  },
24
23
  "devDependencies": {
25
- "@aigne/test-utils": "^0.2.0"
24
+ "@types/bun": "^1.2.9",
25
+ "@aigne/test-utils": "^0.3.1"
26
26
  },
27
27
  "scripts": {
28
28
  "start": "bun run index.ts",
package/usages.ts CHANGED
@@ -1,6 +1,6 @@
1
1
  import assert from "node:assert";
2
2
  import { AIAgent, AIGNE, ProcessMode, TeamAgent } from "@aigne/core";
3
- import { OpenAIChatModel } from "@aigne/core/models/openai-chat-model.js";
3
+ import { OpenAIChatModel } from "@aigne/openai";
4
4
 
5
5
  const { OPENAI_API_KEY } = process.env;
6
6
  assert(OPENAI_API_KEY, "Please set the OPENAI_API_KEY environment variable");