@aigne/cli 1.16.0 → 1.17.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +23 -0
- package/README.md +4 -4
- package/README.zh.md +4 -4
- package/dist/cli.js +1 -1
- package/dist/commands/run.js +10 -3
- package/dist/commands/serve.js +4 -3
- package/dist/commands/test.js +3 -2
- package/dist/constants.d.ts +2 -4
- package/dist/constants.js +51 -10
- package/dist/utils/run-with-aigne.js +5 -3
- package/package.json +13 -11
- package/templates/default/.env.local.example +18 -0
package/CHANGELOG.md
CHANGED
|
@@ -1,5 +1,28 @@
|
|
|
1
1
|
# Changelog
|
|
2
2
|
|
|
3
|
+
## [1.17.0](https://github.com/AIGNE-io/aigne-framework/compare/cli-v1.16.0...cli-v1.17.0) (2025-07-01)
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
### Features
|
|
7
|
+
|
|
8
|
+
* **cli:** support HTTPS_PROXY and named path param ([#196](https://github.com/AIGNE-io/aigne-framework/issues/196)) ([04e684e](https://github.com/AIGNE-io/aigne-framework/commit/04e684ee26bc2d79924b0e3cb541cd07a7191804))
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
### Dependencies
|
|
12
|
+
|
|
13
|
+
* The following workspace dependencies were updated
|
|
14
|
+
* dependencies
|
|
15
|
+
* @aigne/agent-library bumped to 1.17.4
|
|
16
|
+
* @aigne/anthropic bumped to 0.4.0
|
|
17
|
+
* @aigne/bedrock bumped to 0.4.0
|
|
18
|
+
* @aigne/core bumped to 1.27.0
|
|
19
|
+
* @aigne/deepseek bumped to 0.3.11
|
|
20
|
+
* @aigne/gemini bumped to 0.3.11
|
|
21
|
+
* @aigne/ollama bumped to 0.3.11
|
|
22
|
+
* @aigne/open-router bumped to 0.3.11
|
|
23
|
+
* @aigne/openai bumped to 0.5.0
|
|
24
|
+
* @aigne/xai bumped to 0.3.11
|
|
25
|
+
|
|
3
26
|
## [1.16.0](https://github.com/AIGNE-io/aigne-framework/compare/cli-v1.15.0...cli-v1.16.0) (2025-06-30)
|
|
4
27
|
|
|
5
28
|
|
package/README.md
CHANGED
|
@@ -55,13 +55,13 @@ aigne --help
|
|
|
55
55
|
aigne create [path]
|
|
56
56
|
|
|
57
57
|
# Run an agent
|
|
58
|
-
aigne run
|
|
58
|
+
aigne run --path xxx
|
|
59
59
|
|
|
60
60
|
# Run tests
|
|
61
|
-
aigne test
|
|
61
|
+
aigne test --path xxx
|
|
62
62
|
|
|
63
63
|
# Start MCP server
|
|
64
|
-
aigne serve
|
|
64
|
+
aigne serve --path xxx --mcp
|
|
65
65
|
|
|
66
66
|
# Start observability server
|
|
67
67
|
aigne observe [option]
|
|
@@ -133,7 +133,7 @@ aigne serve --mcp
|
|
|
133
133
|
aigne serve --mcp --port 3001
|
|
134
134
|
|
|
135
135
|
# Start MCP server for agents at specified path
|
|
136
|
-
aigne serve path/to/agents --mcp
|
|
136
|
+
aigne serve --path path/to/agents --mcp
|
|
137
137
|
```
|
|
138
138
|
|
|
139
139
|
## Serve Command (observability)
|
package/README.zh.md
CHANGED
|
@@ -55,13 +55,13 @@ aigne --help
|
|
|
55
55
|
aigne create [path]
|
|
56
56
|
|
|
57
57
|
# 运行代理
|
|
58
|
-
aigne run
|
|
58
|
+
aigne run --path xxx
|
|
59
59
|
|
|
60
60
|
# 运行测试
|
|
61
|
-
aigne test
|
|
61
|
+
aigne test --path xxx
|
|
62
62
|
|
|
63
63
|
# 启动 MCP 服务器
|
|
64
|
-
aigne serve
|
|
64
|
+
aigne serve --path xxx --mcp
|
|
65
65
|
|
|
66
66
|
# 启动 observability 服务器
|
|
67
67
|
aigne observe [option]
|
|
@@ -133,7 +133,7 @@ aigne serve --mcp
|
|
|
133
133
|
aigne serve --mcp --port 3001
|
|
134
134
|
|
|
135
135
|
# 为指定路径的代理启动 MCP 服务器
|
|
136
|
-
aigne serve path/to/agents --mcp
|
|
136
|
+
aigne serve -- path path/to/agents --mcp
|
|
137
137
|
```
|
|
138
138
|
|
|
139
139
|
## 服务命令 (observability)
|
package/dist/cli.js
CHANGED
package/dist/commands/run.js
CHANGED
|
@@ -7,6 +7,7 @@ import { loadModel } from "@aigne/core/loader/index.js";
|
|
|
7
7
|
import { logger } from "@aigne/core/utils/logger.js";
|
|
8
8
|
import { isNonNullable } from "@aigne/core/utils/type-utils.js";
|
|
9
9
|
import { Listr, PRESET_TIMER } from "@aigne/listr2";
|
|
10
|
+
import { config } from "dotenv-flow";
|
|
10
11
|
import { availableMemories, availableModels } from "../constants.js";
|
|
11
12
|
import { isV1Package, toAIGNEPackage } from "../utils/agent-v1.js";
|
|
12
13
|
import { downloadAndExtract } from "../utils/download.js";
|
|
@@ -43,7 +44,12 @@ export function createRunCommand() {
|
|
|
43
44
|
{
|
|
44
45
|
title: "Initialize AIGNE",
|
|
45
46
|
task: async (ctx) => {
|
|
46
|
-
|
|
47
|
+
// Load env files in the aigne directory
|
|
48
|
+
config({ path: dir, silent: true });
|
|
49
|
+
const aigne = await loadAIGNE(dir, {
|
|
50
|
+
...options,
|
|
51
|
+
model: options.model || process.env.MODEL,
|
|
52
|
+
});
|
|
47
53
|
ctx.aigne = aigne;
|
|
48
54
|
},
|
|
49
55
|
},
|
|
@@ -91,10 +97,11 @@ export function createRunCommand() {
|
|
|
91
97
|
.showSuggestionAfterError(true);
|
|
92
98
|
}
|
|
93
99
|
async function loadAIGNE(path, options) {
|
|
100
|
+
const models = availableModels();
|
|
94
101
|
const model = options.model
|
|
95
|
-
? await loadModel(
|
|
102
|
+
? await loadModel(models, parseModelOption(options.model))
|
|
96
103
|
: undefined;
|
|
97
|
-
return await AIGNE.load(path, { models
|
|
104
|
+
return await AIGNE.load(path, { models, memories: availableMemories, model });
|
|
98
105
|
}
|
|
99
106
|
async function downloadPackage(url, cacheDir) {
|
|
100
107
|
await rm(cacheDir, { recursive: true, force: true });
|
package/dist/commands/serve.js
CHANGED
|
@@ -16,16 +16,17 @@ const DEFAULT_PORT = () => tryOrThrow(() => {
|
|
|
16
16
|
export function createServeCommand() {
|
|
17
17
|
return new Command("serve")
|
|
18
18
|
.description("Serve the agents in the specified directory as a MCP server")
|
|
19
|
-
.
|
|
19
|
+
.option("--url, --path <path_or_url>", "Path to the agents directory or URL to aigne project", ".")
|
|
20
20
|
.option("--mcp", "Serve the agents as a MCP server")
|
|
21
21
|
.option("--host <host>", "Host to run the MCP server on, use 0.0.0.0 to publicly expose the server", "localhost")
|
|
22
22
|
.option("--port <port>", "Port to run the MCP server on", (s) => Number.parseInt(s))
|
|
23
23
|
.option("--pathname <pathname>", "Pathname to the service", "/mcp")
|
|
24
|
-
.action(async (
|
|
24
|
+
.action(async (options) => {
|
|
25
|
+
const { path } = options;
|
|
25
26
|
const absolutePath = isAbsolute(path) ? path : resolve(process.cwd(), path);
|
|
26
27
|
const port = options.port || DEFAULT_PORT();
|
|
27
28
|
const aigne = await AIGNE.load(absolutePath, {
|
|
28
|
-
models: availableModels,
|
|
29
|
+
models: availableModels(),
|
|
29
30
|
memories: availableMemories,
|
|
30
31
|
});
|
|
31
32
|
if (options.mcp)
|
package/dist/commands/test.js
CHANGED
|
@@ -4,8 +4,9 @@ import { Command } from "commander";
|
|
|
4
4
|
export function createTestCommand() {
|
|
5
5
|
return new Command("test")
|
|
6
6
|
.description("Run tests in the specified agents directory")
|
|
7
|
-
.
|
|
8
|
-
.action(async (
|
|
7
|
+
.option("--url, --path <path_or_url>", "Path to the agents directory or URL to aigne project", ".")
|
|
8
|
+
.action(async (options) => {
|
|
9
|
+
const { path } = options;
|
|
9
10
|
const absolutePath = isAbsolute(path) ? path : resolve(process.cwd(), path);
|
|
10
11
|
spawnSync("node", ["--test"], { cwd: absolutePath, stdio: "inherit" });
|
|
11
12
|
})
|
package/dist/constants.d.ts
CHANGED
|
@@ -1,7 +1,5 @@
|
|
|
1
1
|
import { DefaultMemory } from "@aigne/agent-library/default-memory/index.js";
|
|
2
|
-
import {
|
|
3
|
-
import { BedrockChatModel } from "@aigne/bedrock";
|
|
4
|
-
import { OpenAIChatModel } from "@aigne/openai";
|
|
2
|
+
import type { LoadableModel } from "@aigne/core/loader/index.js";
|
|
5
3
|
export declare const AIGNE_CLI_VERSION: string;
|
|
6
|
-
export declare
|
|
4
|
+
export declare function availableModels(): LoadableModel[];
|
|
7
5
|
export declare const availableMemories: (typeof DefaultMemory)[];
|
package/dist/constants.js
CHANGED
|
@@ -7,16 +7,57 @@ import { OllamaChatModel } from "@aigne/ollama";
|
|
|
7
7
|
import { OpenRouterChatModel } from "@aigne/open-router";
|
|
8
8
|
import { OpenAIChatModel } from "@aigne/openai";
|
|
9
9
|
import { XAIChatModel } from "@aigne/xai";
|
|
10
|
+
import { NodeHttpHandler, streamCollector } from "@smithy/node-http-handler";
|
|
11
|
+
import { HttpsProxyAgent } from "https-proxy-agent";
|
|
10
12
|
import pkg from "../package.json" with { type: "json" };
|
|
11
13
|
export const AIGNE_CLI_VERSION = pkg.version;
|
|
12
|
-
export
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
14
|
+
export function availableModels() {
|
|
15
|
+
const proxy = ["HTTPS_PROXY", "https_proxy", "HTTP_PROXY", "http_proxy", "ALL_PROXY", "all_proxy"]
|
|
16
|
+
.map((i) => process.env[i])
|
|
17
|
+
.filter(Boolean)[0];
|
|
18
|
+
const httpAgent = proxy ? new HttpsProxyAgent(proxy) : undefined;
|
|
19
|
+
return [
|
|
20
|
+
{
|
|
21
|
+
name: OpenAIChatModel.name,
|
|
22
|
+
create: (params) => new OpenAIChatModel({ ...params, clientOptions: { httpAgent } }),
|
|
23
|
+
},
|
|
24
|
+
{
|
|
25
|
+
name: AnthropicChatModel.name,
|
|
26
|
+
create: (params) => new AnthropicChatModel({ ...params, clientOptions: { httpAgent } }),
|
|
27
|
+
},
|
|
28
|
+
{
|
|
29
|
+
name: BedrockChatModel.name,
|
|
30
|
+
create: (params) => new BedrockChatModel({
|
|
31
|
+
...params,
|
|
32
|
+
clientOptions: {
|
|
33
|
+
requestHandler: NodeHttpHandler.create({
|
|
34
|
+
httpAgent,
|
|
35
|
+
httpsAgent: httpAgent,
|
|
36
|
+
}),
|
|
37
|
+
streamCollector,
|
|
38
|
+
},
|
|
39
|
+
}),
|
|
40
|
+
},
|
|
41
|
+
{
|
|
42
|
+
name: DeepSeekChatModel.name,
|
|
43
|
+
create: (params) => new DeepSeekChatModel({ ...params, clientOptions: { httpAgent } }),
|
|
44
|
+
},
|
|
45
|
+
{
|
|
46
|
+
name: GeminiChatModel.name,
|
|
47
|
+
create: (params) => new GeminiChatModel({ ...params, clientOptions: { httpAgent } }),
|
|
48
|
+
},
|
|
49
|
+
{
|
|
50
|
+
name: OllamaChatModel.name,
|
|
51
|
+
create: (params) => new OllamaChatModel({ ...params, clientOptions: { httpAgent } }),
|
|
52
|
+
},
|
|
53
|
+
{
|
|
54
|
+
name: OpenRouterChatModel.name,
|
|
55
|
+
create: (params) => new OpenRouterChatModel({ ...params, clientOptions: { httpAgent } }),
|
|
56
|
+
},
|
|
57
|
+
{
|
|
58
|
+
name: XAIChatModel.name,
|
|
59
|
+
create: (params) => new XAIChatModel({ ...params, clientOptions: { httpAgent } }),
|
|
60
|
+
},
|
|
61
|
+
];
|
|
62
|
+
}
|
|
22
63
|
export const availableMemories = [DefaultMemory];
|
|
@@ -21,7 +21,9 @@ export const createRunAIGNECommand = (name = "run") => new Command(name)
|
|
|
21
21
|
.allowExcessArguments(true)
|
|
22
22
|
.description("Run agent with AIGNE in terminal")
|
|
23
23
|
.option("--chat", "Run chat loop in terminal", false)
|
|
24
|
-
.option("--model <provider[:model]>", `AI model to use in format 'provider[:model]' where model is optional. Examples: 'openai' or 'openai:gpt-4o-mini'. Available providers: ${availableModels
|
|
24
|
+
.option("--model <provider[:model]>", `AI model to use in format 'provider[:model]' where model is optional. Examples: 'openai' or 'openai:gpt-4o-mini'. Available providers: ${availableModels()
|
|
25
|
+
.map((i) => i.name.toLowerCase().replace(/ChatModel$/i, ""))
|
|
26
|
+
.join(", ")} (default: openai)`)
|
|
25
27
|
.option("--temperature <temperature>", "Temperature for the model (controls randomness, higher values produce more random outputs). Range: 0.0-2.0", customZodError("--temperature", (s) => z.coerce.number().min(0).max(2).parse(s)))
|
|
26
28
|
.option("--top-p <top-p>", "Top P (nucleus sampling) parameter for the model (controls diversity). Range: 0.0-1.0", customZodError("--top-p", (s) => z.coerce.number().min(0).max(1).parse(s)))
|
|
27
29
|
.option("--presence-penalty <presence-penalty>", "Presence penalty for the model (penalizes repeating the same tokens). Range: -2.0 to 2.0", customZodError("--presence-penalty", (s) => z.coerce.number().min(-2).max(2).parse(s)))
|
|
@@ -89,7 +91,7 @@ export async function parseAgentInputByCommander(agent, options = {}) {
|
|
|
89
91
|
return input;
|
|
90
92
|
}
|
|
91
93
|
export const parseModelOption = (model) => {
|
|
92
|
-
const { provider, name } = model?.match(/(?<provider>[^:]+)(:(?<name>(\S+)))?/)?.groups ?? {};
|
|
94
|
+
const { provider, name } = (model || process.env.MODEL)?.match(/(?<provider>[^:]+)(:(?<name>(\S+)))?/)?.groups ?? {};
|
|
93
95
|
return { provider, name };
|
|
94
96
|
};
|
|
95
97
|
export async function runWithAIGNE(agentCreator, { argv = process.argv, chatLoopOptions, modelOptions, outputKey, } = {}) {
|
|
@@ -100,7 +102,7 @@ export async function runWithAIGNE(agentCreator, { argv = process.argv, chatLoop
|
|
|
100
102
|
if (options.logLevel) {
|
|
101
103
|
logger.level = options.logLevel;
|
|
102
104
|
}
|
|
103
|
-
const model = await loadModel(availableModels, {
|
|
105
|
+
const model = await loadModel(availableModels(), {
|
|
104
106
|
...parseModelOption(options.model),
|
|
105
107
|
temperature: options.temperature,
|
|
106
108
|
topP: options.topP,
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@aigne/cli",
|
|
3
|
-
"version": "1.
|
|
3
|
+
"version": "1.17.0",
|
|
4
4
|
"description": "cli for AIGNE framework",
|
|
5
5
|
"publishConfig": {
|
|
6
6
|
"access": "public"
|
|
@@ -38,6 +38,7 @@
|
|
|
38
38
|
"@aigne/listr2": "^1.0.10",
|
|
39
39
|
"@aigne/marked-terminal": "^7.3.1",
|
|
40
40
|
"@modelcontextprotocol/sdk": "^1.11.0",
|
|
41
|
+
"@smithy/node-http-handler": "^4.0.6",
|
|
41
42
|
"chalk": "^5.4.1",
|
|
42
43
|
"commander": "^13.1.0",
|
|
43
44
|
"detect-port": "^2.1.0",
|
|
@@ -45,6 +46,7 @@
|
|
|
45
46
|
"express": "^5.1.0",
|
|
46
47
|
"glob": "^11.0.2",
|
|
47
48
|
"gradient-string": "^3.0.0",
|
|
49
|
+
"https-proxy-agent": "^7.0.6",
|
|
48
50
|
"inquirer": "^12.6.0",
|
|
49
51
|
"marked": "^15.0.11",
|
|
50
52
|
"prettier": "^3.5.3",
|
|
@@ -53,17 +55,17 @@
|
|
|
53
55
|
"wrap-ansi": "^9.0.0",
|
|
54
56
|
"yaml": "^2.7.1",
|
|
55
57
|
"zod": "^3.24.4",
|
|
56
|
-
"@aigne/agent-library": "^1.17.
|
|
57
|
-
"@aigne/
|
|
58
|
-
"@aigne/
|
|
59
|
-
"@aigne/core": "^1.
|
|
60
|
-
"@aigne/
|
|
58
|
+
"@aigne/agent-library": "^1.17.4",
|
|
59
|
+
"@aigne/bedrock": "^0.4.0",
|
|
60
|
+
"@aigne/anthropic": "^0.4.0",
|
|
61
|
+
"@aigne/core": "^1.27.0",
|
|
62
|
+
"@aigne/gemini": "^0.3.11",
|
|
63
|
+
"@aigne/ollama": "^0.3.11",
|
|
61
64
|
"@aigne/observability": "^0.3.0",
|
|
62
|
-
"@aigne/
|
|
63
|
-
"@aigne/
|
|
64
|
-
"@aigne/openai": "^0.
|
|
65
|
-
"@aigne/xai": "^0.3.
|
|
66
|
-
"@aigne/open-router": "^0.3.10"
|
|
65
|
+
"@aigne/deepseek": "^0.3.11",
|
|
66
|
+
"@aigne/open-router": "^0.3.11",
|
|
67
|
+
"@aigne/openai": "^0.5.0",
|
|
68
|
+
"@aigne/xai": "^0.3.11"
|
|
67
69
|
},
|
|
68
70
|
"devDependencies": {
|
|
69
71
|
"@types/archiver": "^6.0.3",
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
# Change the name of this file to .env.local and fill in the following values
|
|
2
|
+
|
|
3
|
+
# Use this for OpenAI models
|
|
4
|
+
MODEL="openai:gpt-4.1"
|
|
5
|
+
OPENAI_API_KEY=""
|
|
6
|
+
|
|
7
|
+
# Use this for Anthropic models
|
|
8
|
+
# MODEL="anthropic:claude-3-7-sonnet-latest"
|
|
9
|
+
# ANTHROPIC_API_KEY="" # Your Anthropic API key
|
|
10
|
+
|
|
11
|
+
# Use this for AWS Bedrock models
|
|
12
|
+
# AWS_ACCESS_KEY_ID=""
|
|
13
|
+
# AWS_SECRET_ACCESS_KEY=""
|
|
14
|
+
# AWS_REGION=us-west-2
|
|
15
|
+
# MODEL=Bedrock:us.amazon.nova-premier-v1:0
|
|
16
|
+
|
|
17
|
+
# Setup proxy if needed
|
|
18
|
+
# HTTPS_PROXY=http://localhost:7890
|