@aigne/example-workflow-handoff 1.4.0 → 1.6.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. package/README.md +8 -6
  2. package/index.ts +3 -9
  3. package/package.json +5 -5
package/README.md CHANGED
@@ -43,15 +43,17 @@ end
43
43
  ## Prerequisites
44
44
 
45
45
  - [Node.js](https://nodejs.org) and npm installed on your machine
46
- - [OpenAI API key](https://platform.openai.com/api-keys) used to interact with OpenAI API
47
- - [Pnpm](https://pnpm.io) [Optional] if you want to run the example from source code
46
+ - An [OpenAI API key](https://platform.openai.com/api-keys) for interacting with OpenAI's services
47
+ - Optional dependencies (if running the example from source code):
48
+ - [Bun](https://bun.sh) for running unit tests & examples
49
+ - [Pnpm](https://pnpm.io) for package management
48
50
 
49
- ## Try without Installation
51
+ ## Quick Start (No Installation Required)
50
52
 
51
53
  ```bash
52
- export OPENAI_API_KEY=YOUR_OPENAI_API_KEY # setup your OpenAI API key
54
+ export OPENAI_API_KEY=YOUR_OPENAI_API_KEY # Set your OpenAI API key
53
55
 
54
- npx -y @aigne/example-workflow-handoff # run the example
56
+ npx -y @aigne/example-workflow-handoff # Run the example
55
57
  ```
56
58
 
57
59
  ## Installation
@@ -75,7 +77,7 @@ pnpm install
75
77
  Setup your OpenAI API key in the `.env.local` file:
76
78
 
77
79
  ```bash
78
- OPENAI_API_KEY="" # setup your OpenAI API key here
80
+ OPENAI_API_KEY="" # Set your OpenAI API key here
79
81
  ```
80
82
 
81
83
  ### Run the Example
package/index.ts CHANGED
@@ -1,17 +1,11 @@
1
- #!/usr/bin/env npx -y bun
1
+ #!/usr/bin/env bunwrapper
2
2
 
3
- import assert from "node:assert";
4
3
  import { runChatLoopInTerminal } from "@aigne/cli/utils/run-chat-loop.js";
5
4
  import { AIAgent, type Agent, ExecutionEngine, FunctionAgent } from "@aigne/core";
6
- import { OpenAIChatModel } from "@aigne/core/models/openai-chat-model.js";
5
+ import { loadModel } from "@aigne/core/loader/index.js";
7
6
  import { z } from "zod";
8
7
 
9
- const { OPENAI_API_KEY } = process.env;
10
- assert(OPENAI_API_KEY, "Please set the OPENAI_API_KEY environment variable");
11
-
12
- const model = new OpenAIChatModel({
13
- apiKey: OPENAI_API_KEY,
14
- });
8
+ const model = await loadModel();
15
9
 
16
10
  const execute_order_tool = FunctionAgent.from({
17
11
  name: "execute_order",
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@aigne/example-workflow-handoff",
3
- "version": "1.4.0",
3
+ "version": "1.6.1",
4
4
  "description": "A demonstration of using AIGNE Framework to build a handoff workflow",
5
5
  "author": "Arcblock <blocklet@arcblock.io> https://github.com/blocklet",
6
6
  "homepage": "https://github.com/AIGNE-io/aigne-framework/tree/main/examples/workflow-handoff",
@@ -16,13 +16,13 @@
16
16
  "README.md"
17
17
  ],
18
18
  "dependencies": {
19
- "openai": "^4.93.0",
19
+ "openai": "^4.94.0",
20
20
  "zod": "^3.24.2",
21
- "@aigne/cli": "^1.2.0",
22
- "@aigne/core": "^1.7.0"
21
+ "@aigne/cli": "^1.5.1",
22
+ "@aigne/core": "^1.10.0"
23
23
  },
24
24
  "scripts": {
25
- "start": "npx -y bun run index.ts",
25
+ "start": "bun run index.ts",
26
26
  "lint": "tsc --noEmit"
27
27
  }
28
28
  }