@aigne/cli 1.15.0 → 1.17.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,53 @@
1
1
  # Changelog
2
2
 
3
+ ## [1.17.0](https://github.com/AIGNE-io/aigne-framework/compare/cli-v1.16.0...cli-v1.17.0) (2025-07-01)
4
+
5
+
6
+ ### Features
7
+
8
+ * **cli:** support HTTPS_PROXY and named path param ([#196](https://github.com/AIGNE-io/aigne-framework/issues/196)) ([04e684e](https://github.com/AIGNE-io/aigne-framework/commit/04e684ee26bc2d79924b0e3cb541cd07a7191804))
9
+
10
+
11
+ ### Dependencies
12
+
13
+ * The following workspace dependencies were updated
14
+ * dependencies
15
+ * @aigne/agent-library bumped to 1.17.4
16
+ * @aigne/anthropic bumped to 0.4.0
17
+ * @aigne/bedrock bumped to 0.4.0
18
+ * @aigne/core bumped to 1.27.0
19
+ * @aigne/deepseek bumped to 0.3.11
20
+ * @aigne/gemini bumped to 0.3.11
21
+ * @aigne/ollama bumped to 0.3.11
22
+ * @aigne/open-router bumped to 0.3.11
23
+ * @aigne/openai bumped to 0.5.0
24
+ * @aigne/xai bumped to 0.3.11
25
+
26
+ ## [1.16.0](https://github.com/AIGNE-io/aigne-framework/compare/cli-v1.15.0...cli-v1.16.0) (2025-06-30)
27
+
28
+
29
+ ### Features
30
+
31
+ * **cli:** auto-load dotenv files for AIGNE CLI ([#192](https://github.com/AIGNE-io/aigne-framework/issues/192)) ([56d5632](https://github.com/AIGNE-io/aigne-framework/commit/56d5632ba427a1cf39235bcd1c30df3bc60643f6))
32
+ * **ux:** polish tracing ux and update docs ([#193](https://github.com/AIGNE-io/aigne-framework/issues/193)) ([f80b63e](https://github.com/AIGNE-io/aigne-framework/commit/f80b63ecb1cfb00daa9b68330026da839d33f8a2))
33
+
34
+
35
+ ### Dependencies
36
+
37
+ * The following workspace dependencies were updated
38
+ * dependencies
39
+ * @aigne/agent-library bumped to 1.17.3
40
+ * @aigne/anthropic bumped to 0.3.10
41
+ * @aigne/bedrock bumped to 0.3.10
42
+ * @aigne/core bumped to 1.26.0
43
+ * @aigne/deepseek bumped to 0.3.10
44
+ * @aigne/gemini bumped to 0.3.10
45
+ * @aigne/observability bumped to 0.3.0
46
+ * @aigne/ollama bumped to 0.3.10
47
+ * @aigne/open-router bumped to 0.3.10
48
+ * @aigne/openai bumped to 0.4.3
49
+ * @aigne/xai bumped to 0.3.10
50
+
3
51
  ## [1.15.0](https://github.com/AIGNE-io/aigne-framework/compare/cli-v1.14.1...cli-v1.15.0) (2025-06-29)
4
52
 
5
53
 
@@ -29,7 +77,7 @@
29
77
 
30
78
  ### Bug Fixes
31
79
 
32
- * agine cli not found package ([#185](https://github.com/AIGNE-io/aigne-framework/issues/185)) ([5d98b61](https://github.com/AIGNE-io/aigne-framework/commit/5d98b6158f1e43e049a3a51a69bab88092bf1c92))
80
+ * aigne cli not found package ([#185](https://github.com/AIGNE-io/aigne-framework/issues/185)) ([5d98b61](https://github.com/AIGNE-io/aigne-framework/commit/5d98b6158f1e43e049a3a51a69bab88092bf1c92))
33
81
 
34
82
 
35
83
  ### Dependencies
package/README.md CHANGED
@@ -55,16 +55,16 @@ aigne --help
55
55
  aigne create [path]
56
56
 
57
57
  # Run an agent
58
- aigne run [path]
58
+ aigne run --path xxx
59
59
 
60
60
  # Run tests
61
- aigne test [path]
61
+ aigne test --path xxx
62
62
 
63
63
  # Start MCP server
64
- aigne serve [path] --mcp
64
+ aigne serve --path xxx --mcp
65
65
 
66
66
  # Start observability server
67
- aigne observability [option]
67
+ aigne observe [option]
68
68
  ```
69
69
 
70
70
  ## Create Command
@@ -133,7 +133,7 @@ aigne serve --mcp
133
133
  aigne serve --mcp --port 3001
134
134
 
135
135
  # Start MCP server for agents at specified path
136
- aigne serve path/to/agents --mcp
136
+ aigne serve --path path/to/agents --mcp
137
137
  ```
138
138
 
139
139
  ## Serve Command (observability)
@@ -142,10 +142,10 @@ Start the service for monitoring data
142
142
 
143
143
  ```bash
144
144
  # Start observability server on default port 7890
145
- aigne observability
145
+ aigne observe
146
146
 
147
147
  # Start observability server on specified port
148
- aigne observability --port 3001
148
+ aigne observe --port 3001
149
149
  ```
150
150
 
151
151
  ## License
package/README.zh.md CHANGED
@@ -55,16 +55,16 @@ aigne --help
55
55
  aigne create [path]
56
56
 
57
57
  # 运行代理
58
- aigne run [path]
58
+ aigne run --path xxx
59
59
 
60
60
  # 运行测试
61
- aigne test [path]
61
+ aigne test --path xxx
62
62
 
63
63
  # 启动 MCP 服务器
64
- aigne serve [path] --mcp
64
+ aigne serve --path xxx --mcp
65
65
 
66
66
  # 启动 observability 服务器
67
- aigne observability [option]
67
+ aigne observe [option]
68
68
  ```
69
69
 
70
70
  ## 创建命令 (create)
@@ -133,7 +133,7 @@ aigne serve --mcp
133
133
  aigne serve --mcp --port 3001
134
134
 
135
135
  # 为指定路径的代理启动 MCP 服务器
136
- aigne serve path/to/agents --mcp
136
+ aigne serve -- path path/to/agents --mcp
137
137
  ```
138
138
 
139
139
  ## 服务命令 (observability)
@@ -142,10 +142,10 @@ aigne serve path/to/agents --mcp
142
142
 
143
143
  ```bash
144
144
  # 在默认端口 7890 启动 Observability 服务器
145
- aigne observability
145
+ aigne observe
146
146
 
147
147
  # 在指定端口启动 Observability 服务器
148
- aigne observability --port 3001
148
+ aigne observe --port 3001
149
149
  ```
150
150
 
151
151
  ## 协议
package/dist/cli.js CHANGED
@@ -1,6 +1,8 @@
1
1
  #!/usr/bin/env node
2
+ import { config } from "dotenv-flow";
2
3
  import PrettyError from "pretty-error";
3
4
  import { createAIGNECommand } from "./commands/aigne.js";
5
+ config({ silent: true });
4
6
  createAIGNECommand()
5
7
  .parseAsync()
6
8
  .catch((error) => {
@@ -61,7 +61,7 @@ export function createCreateCommand() {
61
61
  const destination = join(path, file);
62
62
  await cp(source, destination, { recursive: true, force: true });
63
63
  }
64
- console.log("\n✅ Aigne project created successfully!");
64
+ console.log("\n✅ AIGNE project created successfully!");
65
65
  console.log(`\nTo use your new agent, run:\n cd ${relative(process.cwd(), path)} && aigne run`);
66
66
  })
67
67
  .showHelpAfterError(true)
@@ -13,10 +13,10 @@ const DEFAULT_PORT = () => tryOrThrow(() => {
13
13
  return port;
14
14
  }, (error) => new Error(`parse PORT error ${error.message}`));
15
15
  export function createObservabilityCommand() {
16
- return new Command("observability")
16
+ return new Command("observe")
17
17
  .description("Start the observability server")
18
- .option("--host <host>", "Host to run the MCP server on, use 0.0.0.0 to publicly expose the server", "localhost")
19
- .option("--port <port>", "Port to run the MCP server on", (s) => Number.parseInt(s))
18
+ .option("--host <host>", "Host to run the observability server on, use 0.0.0.0 to publicly expose the server", "localhost")
19
+ .option("--port <port>", "Port to run the observability server on", (s) => Number.parseInt(s))
20
20
  .action(async (options) => {
21
21
  const port = await detectPort(options.port || DEFAULT_PORT());
22
22
  const dbUrl = getObservabilityDbPath();
@@ -7,6 +7,7 @@ import { loadModel } from "@aigne/core/loader/index.js";
7
7
  import { logger } from "@aigne/core/utils/logger.js";
8
8
  import { isNonNullable } from "@aigne/core/utils/type-utils.js";
9
9
  import { Listr, PRESET_TIMER } from "@aigne/listr2";
10
+ import { config } from "dotenv-flow";
10
11
  import { availableMemories, availableModels } from "../constants.js";
11
12
  import { isV1Package, toAIGNEPackage } from "../utils/agent-v1.js";
12
13
  import { downloadAndExtract } from "../utils/download.js";
@@ -43,7 +44,12 @@ export function createRunCommand() {
43
44
  {
44
45
  title: "Initialize AIGNE",
45
46
  task: async (ctx) => {
46
- const aigne = await loadAIGNE(dir, options);
47
+ // Load env files in the aigne directory
48
+ config({ path: dir, silent: true });
49
+ const aigne = await loadAIGNE(dir, {
50
+ ...options,
51
+ model: options.model || process.env.MODEL,
52
+ });
47
53
  ctx.aigne = aigne;
48
54
  },
49
55
  },
@@ -91,10 +97,11 @@ export function createRunCommand() {
91
97
  .showSuggestionAfterError(true);
92
98
  }
93
99
  async function loadAIGNE(path, options) {
100
+ const models = availableModels();
94
101
  const model = options.model
95
- ? await loadModel(availableModels, parseModelOption(options.model))
102
+ ? await loadModel(models, parseModelOption(options.model))
96
103
  : undefined;
97
- return await AIGNE.load(path, { models: availableModels, memories: availableMemories, model });
104
+ return await AIGNE.load(path, { models, memories: availableMemories, model });
98
105
  }
99
106
  async function downloadPackage(url, cacheDir) {
100
107
  await rm(cacheDir, { recursive: true, force: true });
@@ -16,16 +16,17 @@ const DEFAULT_PORT = () => tryOrThrow(() => {
16
16
  export function createServeCommand() {
17
17
  return new Command("serve")
18
18
  .description("Serve the agents in the specified directory as a MCP server")
19
- .argument("[path]", "Path to the agents directory", ".")
19
+ .option("--url, --path <path_or_url>", "Path to the agents directory or URL to aigne project", ".")
20
20
  .option("--mcp", "Serve the agents as a MCP server")
21
21
  .option("--host <host>", "Host to run the MCP server on, use 0.0.0.0 to publicly expose the server", "localhost")
22
22
  .option("--port <port>", "Port to run the MCP server on", (s) => Number.parseInt(s))
23
23
  .option("--pathname <pathname>", "Pathname to the service", "/mcp")
24
- .action(async (path, options) => {
24
+ .action(async (options) => {
25
+ const { path } = options;
25
26
  const absolutePath = isAbsolute(path) ? path : resolve(process.cwd(), path);
26
27
  const port = options.port || DEFAULT_PORT();
27
28
  const aigne = await AIGNE.load(absolutePath, {
28
- models: availableModels,
29
+ models: availableModels(),
29
30
  memories: availableMemories,
30
31
  });
31
32
  if (options.mcp)
@@ -4,8 +4,9 @@ import { Command } from "commander";
4
4
  export function createTestCommand() {
5
5
  return new Command("test")
6
6
  .description("Run tests in the specified agents directory")
7
- .argument("[path]", "Path to the agents directory", ".")
8
- .action(async (path) => {
7
+ .option("--url, --path <path_or_url>", "Path to the agents directory or URL to aigne project", ".")
8
+ .action(async (options) => {
9
+ const { path } = options;
9
10
  const absolutePath = isAbsolute(path) ? path : resolve(process.cwd(), path);
10
11
  spawnSync("node", ["--test"], { cwd: absolutePath, stdio: "inherit" });
11
12
  })
@@ -1,7 +1,5 @@
1
1
  import { DefaultMemory } from "@aigne/agent-library/default-memory/index.js";
2
- import { AnthropicChatModel } from "@aigne/anthropic";
3
- import { BedrockChatModel } from "@aigne/bedrock";
4
- import { OpenAIChatModel } from "@aigne/openai";
2
+ import type { LoadableModel } from "@aigne/core/loader/index.js";
5
3
  export declare const AIGNE_CLI_VERSION: string;
6
- export declare const availableModels: (typeof OpenAIChatModel | typeof AnthropicChatModel | typeof BedrockChatModel)[];
4
+ export declare function availableModels(): LoadableModel[];
7
5
  export declare const availableMemories: (typeof DefaultMemory)[];
package/dist/constants.js CHANGED
@@ -7,16 +7,57 @@ import { OllamaChatModel } from "@aigne/ollama";
7
7
  import { OpenRouterChatModel } from "@aigne/open-router";
8
8
  import { OpenAIChatModel } from "@aigne/openai";
9
9
  import { XAIChatModel } from "@aigne/xai";
10
+ import { NodeHttpHandler, streamCollector } from "@smithy/node-http-handler";
11
+ import { HttpsProxyAgent } from "https-proxy-agent";
10
12
  import pkg from "../package.json" with { type: "json" };
11
13
  export const AIGNE_CLI_VERSION = pkg.version;
12
- export const availableModels = [
13
- OpenAIChatModel,
14
- AnthropicChatModel,
15
- BedrockChatModel,
16
- DeepSeekChatModel,
17
- GeminiChatModel,
18
- OllamaChatModel,
19
- OpenRouterChatModel,
20
- XAIChatModel,
21
- ];
14
+ export function availableModels() {
15
+ const proxy = ["HTTPS_PROXY", "https_proxy", "HTTP_PROXY", "http_proxy", "ALL_PROXY", "all_proxy"]
16
+ .map((i) => process.env[i])
17
+ .filter(Boolean)[0];
18
+ const httpAgent = proxy ? new HttpsProxyAgent(proxy) : undefined;
19
+ return [
20
+ {
21
+ name: OpenAIChatModel.name,
22
+ create: (params) => new OpenAIChatModel({ ...params, clientOptions: { httpAgent } }),
23
+ },
24
+ {
25
+ name: AnthropicChatModel.name,
26
+ create: (params) => new AnthropicChatModel({ ...params, clientOptions: { httpAgent } }),
27
+ },
28
+ {
29
+ name: BedrockChatModel.name,
30
+ create: (params) => new BedrockChatModel({
31
+ ...params,
32
+ clientOptions: {
33
+ requestHandler: NodeHttpHandler.create({
34
+ httpAgent,
35
+ httpsAgent: httpAgent,
36
+ }),
37
+ streamCollector,
38
+ },
39
+ }),
40
+ },
41
+ {
42
+ name: DeepSeekChatModel.name,
43
+ create: (params) => new DeepSeekChatModel({ ...params, clientOptions: { httpAgent } }),
44
+ },
45
+ {
46
+ name: GeminiChatModel.name,
47
+ create: (params) => new GeminiChatModel({ ...params, clientOptions: { httpAgent } }),
48
+ },
49
+ {
50
+ name: OllamaChatModel.name,
51
+ create: (params) => new OllamaChatModel({ ...params, clientOptions: { httpAgent } }),
52
+ },
53
+ {
54
+ name: OpenRouterChatModel.name,
55
+ create: (params) => new OpenRouterChatModel({ ...params, clientOptions: { httpAgent } }),
56
+ },
57
+ {
58
+ name: XAIChatModel.name,
59
+ create: (params) => new XAIChatModel({ ...params, clientOptions: { httpAgent } }),
60
+ },
61
+ ];
62
+ }
22
63
  export const availableMemories = [DefaultMemory];
@@ -20,7 +20,7 @@ export async function toAIGNEPackage(src, dst) {
20
20
  agents: [],
21
21
  };
22
22
  for (const agent of definition.agents) {
23
- const { content } = await assistantToAigneV2(agent, definition);
23
+ const { content } = await assistantToAIGNEV2(agent, definition);
24
24
  const filename = getAgentFilename(agent);
25
25
  await writeFile(join(dst, filename), content);
26
26
  aigne.agents.push(filename);
@@ -42,7 +42,7 @@ async function loadAgentV1Package(path) {
42
42
  }
43
43
  return definition;
44
44
  }
45
- function assistantToAigneV2(agent, project) {
45
+ function assistantToAIGNEV2(agent, project) {
46
46
  const converter = AGENT_MAP[agent.type];
47
47
  if (!converter)
48
48
  throw new Error(`Unsupported agent type: ${agent.type}`);
@@ -21,7 +21,9 @@ export const createRunAIGNECommand = (name = "run") => new Command(name)
21
21
  .allowExcessArguments(true)
22
22
  .description("Run agent with AIGNE in terminal")
23
23
  .option("--chat", "Run chat loop in terminal", false)
24
- .option("--model <provider[:model]>", `AI model to use in format 'provider[:model]' where model is optional. Examples: 'openai' or 'openai:gpt-4o-mini'. Available providers: ${availableModels.map((i) => i.name.toLowerCase().replace(/ChatModel$/i, "")).join(", ")} (default: openai)`)
24
+ .option("--model <provider[:model]>", `AI model to use in format 'provider[:model]' where model is optional. Examples: 'openai' or 'openai:gpt-4o-mini'. Available providers: ${availableModels()
25
+ .map((i) => i.name.toLowerCase().replace(/ChatModel$/i, ""))
26
+ .join(", ")} (default: openai)`)
25
27
  .option("--temperature <temperature>", "Temperature for the model (controls randomness, higher values produce more random outputs). Range: 0.0-2.0", customZodError("--temperature", (s) => z.coerce.number().min(0).max(2).parse(s)))
26
28
  .option("--top-p <top-p>", "Top P (nucleus sampling) parameter for the model (controls diversity). Range: 0.0-1.0", customZodError("--top-p", (s) => z.coerce.number().min(0).max(1).parse(s)))
27
29
  .option("--presence-penalty <presence-penalty>", "Presence penalty for the model (penalizes repeating the same tokens). Range: -2.0 to 2.0", customZodError("--presence-penalty", (s) => z.coerce.number().min(-2).max(2).parse(s)))
@@ -89,7 +91,7 @@ export async function parseAgentInputByCommander(agent, options = {}) {
89
91
  return input;
90
92
  }
91
93
  export const parseModelOption = (model) => {
92
- const { provider, name } = model?.match(/(?<provider>[^:]+)(:(?<name>(\S+)))?/)?.groups ?? {};
94
+ const { provider, name } = (model || process.env.MODEL)?.match(/(?<provider>[^:]+)(:(?<name>(\S+)))?/)?.groups ?? {};
93
95
  return { provider, name };
94
96
  };
95
97
  export async function runWithAIGNE(agentCreator, { argv = process.argv, chatLoopOptions, modelOptions, outputKey, } = {}) {
@@ -100,7 +102,7 @@ export async function runWithAIGNE(agentCreator, { argv = process.argv, chatLoop
100
102
  if (options.logLevel) {
101
103
  logger.level = options.logLevel;
102
104
  }
103
- const model = await loadModel(availableModels, {
105
+ const model = await loadModel(availableModels(), {
104
106
  ...parseModelOption(options.model),
105
107
  temperature: options.temperature,
106
108
  topP: options.topP,
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@aigne/cli",
3
- "version": "1.15.0",
3
+ "version": "1.17.0",
4
4
  "description": "cli for AIGNE framework",
5
5
  "publishConfig": {
6
6
  "access": "public"
@@ -38,11 +38,15 @@
38
38
  "@aigne/listr2": "^1.0.10",
39
39
  "@aigne/marked-terminal": "^7.3.1",
40
40
  "@modelcontextprotocol/sdk": "^1.11.0",
41
+ "@smithy/node-http-handler": "^4.0.6",
41
42
  "chalk": "^5.4.1",
42
43
  "commander": "^13.1.0",
44
+ "detect-port": "^2.1.0",
45
+ "dotenv-flow": "^4.1.0",
43
46
  "express": "^5.1.0",
44
47
  "glob": "^11.0.2",
45
48
  "gradient-string": "^3.0.0",
49
+ "https-proxy-agent": "^7.0.6",
46
50
  "inquirer": "^12.6.0",
47
51
  "marked": "^15.0.11",
48
52
  "prettier": "^3.5.3",
@@ -51,18 +55,17 @@
51
55
  "wrap-ansi": "^9.0.0",
52
56
  "yaml": "^2.7.1",
53
57
  "zod": "^3.24.4",
54
- "detect-port": "^2.1.0",
55
- "@aigne/agent-library": "^1.17.2",
56
- "@aigne/anthropic": "^0.3.9",
57
- "@aigne/deepseek": "^0.3.9",
58
- "@aigne/gemini": "^0.3.9",
59
- "@aigne/bedrock": "^0.3.9",
60
- "@aigne/ollama": "^0.3.9",
61
- "@aigne/core": "^1.25.0",
62
- "@aigne/observability": "^0.2.0",
63
- "@aigne/open-router": "^0.3.9",
64
- "@aigne/openai": "^0.4.2",
65
- "@aigne/xai": "^0.3.9"
58
+ "@aigne/agent-library": "^1.17.4",
59
+ "@aigne/bedrock": "^0.4.0",
60
+ "@aigne/anthropic": "^0.4.0",
61
+ "@aigne/core": "^1.27.0",
62
+ "@aigne/gemini": "^0.3.11",
63
+ "@aigne/ollama": "^0.3.11",
64
+ "@aigne/observability": "^0.3.0",
65
+ "@aigne/deepseek": "^0.3.11",
66
+ "@aigne/open-router": "^0.3.11",
67
+ "@aigne/openai": "^0.5.0",
68
+ "@aigne/xai": "^0.3.11"
66
69
  },
67
70
  "devDependencies": {
68
71
  "@types/archiver": "^6.0.3",
@@ -0,0 +1,18 @@
1
+ # Change the name of this file to .env.local and fill in the following values
2
+
3
+ # Use this for OpenAI models
4
+ MODEL="openai:gpt-4.1"
5
+ OPENAI_API_KEY=""
6
+
7
+ # Use this for Anthropic models
8
+ # MODEL="anthropic:claude-3-7-sonnet-latest"
9
+ # ANTHROPIC_API_KEY="" # Your Anthropic API key
10
+
11
+ # Use this for AWS Bedrock models
12
+ # AWS_ACCESS_KEY_ID=""
13
+ # AWS_SECRET_ACCESS_KEY=""
14
+ # AWS_REGION=us-west-2
15
+ # MODEL=Bedrock:us.amazon.nova-premier-v1:0
16
+
17
+ # Setup proxy if needed
18
+ # HTTPS_PROXY=http://localhost:7890
@@ -1,6 +1,6 @@
1
- # Aigne Default Template
1
+ # AIGNE Default Template
2
2
 
3
- This is the default project template for the Aigne framework, providing a basic chat agent and JavaScript code execution functionality.
3
+ This is the default project template for the AIGNE framework, providing a basic chat agent and JavaScript code execution functionality.
4
4
 
5
5
  ## Template Structure
6
6