@aigne/cli 1.29.0 → 1.30.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,32 @@
1
1
  # Changelog
2
2
 
3
+ ## [1.30.0](https://github.com/AIGNE-io/aigne-framework/compare/cli-v1.29.0...cli-v1.30.0) (2025-08-01)
4
+
5
+
6
+ ### Features
7
+
8
+ * **cli:** add `--model` option for aigne applications ([#302](https://github.com/AIGNE-io/aigne-framework/issues/302)) ([5d63743](https://github.com/AIGNE-io/aigne-framework/commit/5d63743b8a47be64fd49245983f4f2f9da3197a0))
9
+ * **cli:** add `upgrade` command for aigne app ([#299](https://github.com/AIGNE-io/aigne-framework/issues/299)) ([1bf461a](https://github.com/AIGNE-io/aigne-framework/commit/1bf461ab644b2d810ef81cd3092475496dfc7ddc))
10
+ * support google model and skip check mode when connected to Hub ([#300](https://github.com/AIGNE-io/aigne-framework/issues/300)) ([e992c0f](https://github.com/AIGNE-io/aigne-framework/commit/e992c0f3335a7c512fa807d5b8ad10c9c3bf2351))
11
+
12
+
13
+ ### Bug Fixes
14
+
15
+ * **cli:** indicator not responsive in macos terminal ([#304](https://github.com/AIGNE-io/aigne-framework/issues/304)) ([336f75b](https://github.com/AIGNE-io/aigne-framework/commit/336f75b8a7dfaf28d78e9a4cfcb4ac8c6a29c469))
16
+
17
+
18
+ ### Dependencies
19
+
20
+ * The following workspace dependencies were updated
21
+ * dependencies
22
+ * @aigne/agent-library bumped to 1.21.9
23
+ * @aigne/agentic-memory bumped to 1.0.9
24
+ * @aigne/aigne-hub bumped to 0.4.0
25
+ * @aigne/openai bumped to 0.10.9
26
+ * @aigne/core bumped to 1.42.0
27
+ * @aigne/default-memory bumped to 1.0.9
28
+ * @aigne/observability-api bumped to 0.9.0
29
+
3
30
  ## [1.29.0](https://github.com/AIGNE-io/aigne-framework/compare/cli-v1.28.0...cli-v1.29.0) (2025-07-31)
4
31
 
5
32
 
@@ -1,3 +1,4 @@
1
+ import chalk from "chalk";
1
2
  import yargs from "yargs";
2
3
  import { AIGNE_CLI_VERSION } from "../constants.js";
3
4
  import { asciiLogo } from "../utils/ascii-logo.js";
@@ -10,6 +11,7 @@ import { createServeMCPCommand } from "./serve-mcp.js";
10
11
  import { createTestCommand } from "./test.js";
11
12
  export function createAIGNECommand(options) {
12
13
  console.log(asciiLogo);
14
+ console.log(`${chalk.grey("TIPS:")} use ${chalk.greenBright("aigne observe")} to start the observability server.\n`);
13
15
  return yargs()
14
16
  .scriptName("aigne")
15
17
  .usage("CLI for AIGNE framework")
@@ -7,13 +7,16 @@ export declare function invokeCLIAgentFromDir(options: {
7
7
  input: Message & {
8
8
  input?: string[];
9
9
  format?: "yaml" | "json";
10
+ model?: string;
10
11
  };
11
12
  }): Promise<void>;
12
- export declare function loadApplication({ name, dir, }: {
13
+ export declare function loadApplication({ name, dir, forceUpgrade, }: {
13
14
  name: string;
14
15
  dir?: string;
16
+ forceUpgrade?: boolean;
15
17
  }): Promise<{
16
18
  aigne: AIGNE;
17
19
  dir: string;
18
20
  version: string;
21
+ isCache?: boolean;
19
22
  }>;
@@ -1,16 +1,16 @@
1
1
  import assert from "node:assert";
2
- import { spawnSync } from "node:child_process";
2
+ import { spawn } from "node:child_process";
3
3
  import { readFile, stat, writeFile } from "node:fs/promises";
4
4
  import { homedir } from "node:os";
5
5
  import { extname, join } from "node:path";
6
6
  import { isatty } from "node:tty";
7
+ import { loadModel } from "@aigne/aigne-hub";
7
8
  import { AIAgent, AIGNE, readAllString } from "@aigne/core";
8
9
  import { pick } from "@aigne/core/utils/type-utils.js";
9
10
  import { Listr, PRESET_TIMER } from "@aigne/listr2";
10
11
  import { joinURL } from "ufo";
11
12
  import { parse } from "yaml";
12
13
  import { ZodObject, ZodString } from "zod";
13
- import { availableModels } from "../constants.js";
14
14
  import { downloadAndExtract } from "../utils/download.js";
15
15
  import { loadAIGNE } from "../utils/load-aigne.js";
16
16
  import { runAgentWithAIGNE, stdinHasData } from "../utils/run-with-aigne.js";
@@ -29,8 +29,14 @@ export function createAppCommands() {
29
29
  describe: app.describe,
30
30
  aliases: app.aliases,
31
31
  builder: async (yargs) => {
32
- const { aigne, dir, version } = await loadApplication({ name: app.name });
33
- yargs.command(serveMcpCommandModule({ name: app.name, dir }));
32
+ const { aigne, dir, version, isCache } = await loadApplication({ name: app.name });
33
+ yargs
34
+ .option("model", {
35
+ type: "string",
36
+ description: "Model to use for the application, example: openai:gpt-4.1 or google:gemini-2.5-flash",
37
+ })
38
+ .command(serveMcpCommandModule({ name: app.name, dir }))
39
+ .command(upgradeCommandModule({ name: app.name, dir, isLatest: !isCache, version }));
34
40
  for (const agent of aigne.cli?.agents ?? []) {
35
41
  yargs.command(agentCommandModule({ dir, agent }));
36
42
  }
@@ -64,6 +70,20 @@ const serveMcpCommandModule = ({ name, dir, }) => ({
64
70
  await serveMCPServerFromDir({ ...options, dir });
65
71
  },
66
72
  });
73
+ const upgradeCommandModule = ({ name, dir, isLatest, version, }) => ({
74
+ command: "upgrade",
75
+ describe: `Upgrade ${name} to the latest version`,
76
+ handler: async () => {
77
+ if (!isLatest) {
78
+ const result = await loadApplication({ name, dir, forceUpgrade: true });
79
+ if (version !== result.version) {
80
+ console.log(`\n✅ Upgraded ${name} to version ${version}`);
81
+ return;
82
+ }
83
+ }
84
+ console.log(`\n✅ ${name} is already at the latest version (${version})`);
85
+ },
86
+ });
67
87
  const agentCommandModule = ({ dir, agent, }) => {
68
88
  const inputSchema = agent.inputSchema instanceof ZodObject ? agent.inputSchema.shape : {};
69
89
  return {
@@ -99,7 +119,7 @@ const agentCommandModule = ({ dir, agent, }) => {
99
119
  };
100
120
  };
101
121
  export async function invokeCLIAgentFromDir(options) {
102
- const aigne = await loadAIGNE(options.dir);
122
+ const aigne = await loadAIGNE(options.dir, { model: options.input.model });
103
123
  try {
104
124
  const agent = aigne.cli.agents[options.agent];
105
125
  assert(agent, `Agent ${options.agent} not found in ${options.dir}`);
@@ -158,27 +178,28 @@ async function readFileAsInput(value, { format } = {}) {
158
178
  }
159
179
  return value;
160
180
  }
161
- export async function loadApplication({ name, dir, }) {
181
+ export async function loadApplication({ name, dir, forceUpgrade = false, }) {
162
182
  name = `@aigne/${name}`;
163
183
  dir ??= join(homedir(), ".aigne", "registry.npmjs.org", name);
164
- const check = await isInstallationAvailable(dir);
184
+ const check = forceUpgrade ? undefined : await isInstallationAvailable(dir);
165
185
  if (check?.available) {
166
186
  return {
167
- aigne: await AIGNE.load(dir, { models: availableModels() }),
187
+ aigne: await AIGNE.load(dir, { loadModel }),
168
188
  dir,
169
189
  version: check.version,
190
+ isCache: true,
170
191
  };
171
192
  }
172
193
  const result = await new Listr([
173
194
  {
174
- title: "Fetching application metadata",
195
+ title: `Fetching ${name} metadata`,
175
196
  task: async (ctx) => {
176
197
  const info = await getNpmTgzInfo(name);
177
198
  Object.assign(ctx, info);
178
199
  },
179
200
  },
180
201
  {
181
- title: "Downloading application",
202
+ title: `Downloading ${name}`,
182
203
  skip: (ctx) => ctx.version === check?.version,
183
204
  task: async (ctx) => {
184
205
  await downloadAndExtract(ctx.url, dir, { strip: 1 });
@@ -199,7 +220,7 @@ export async function loadApplication({ name, dir, }) {
199
220
  },
200
221
  }).run();
201
222
  return {
202
- aigne: await AIGNE.load(dir, { models: availableModels() }),
223
+ aigne: await AIGNE.load(dir, { loadModel }),
203
224
  dir,
204
225
  version: result.version,
205
226
  };
@@ -219,14 +240,22 @@ async function isInstallationAvailable(dir, { cacheTimeMs = NPM_PACKAGE_CACHE_TI
219
240
  return { version, available };
220
241
  }
221
242
  async function installDependencies(dir) {
222
- const { stderr, status } = spawnSync("npm", ["install", "--omit", "dev"], {
223
- cwd: dir,
224
- stdio: "pipe",
243
+ await new Promise((resolve, reject) => {
244
+ const child = spawn("npm", ["install", "--omit", "dev"], { cwd: dir, stdio: "pipe" });
245
+ let stderr = "";
246
+ child.stderr.on("data", (data) => {
247
+ stderr += data.toString();
248
+ });
249
+ child.on("error", (error) => reject(error));
250
+ child.on("exit", (code) => {
251
+ if (code === 0)
252
+ resolve();
253
+ else {
254
+ console.error(stderr);
255
+ reject(new Error(`npm install failed with code ${code}`));
256
+ }
257
+ });
225
258
  });
226
- if (status !== 0) {
227
- console.error(stderr.toString());
228
- throw new Error(`Failed to install dependencies in ${dir}`);
229
- }
230
259
  await writeFile(join(dir, ".aigne-cli.json"), JSON.stringify({ installedAt: Date.now() }, null, 2));
231
260
  }
232
261
  async function getNpmTgzInfo(name) {
@@ -2,5 +2,12 @@ import type { CommandModule } from "yargs";
2
2
  interface ConnectOptions {
3
3
  url?: string;
4
4
  }
5
+ interface StatusInfo {
6
+ host: string;
7
+ apiUrl: string;
8
+ apiKey: string;
9
+ }
10
+ export declare function getConnectionStatus(): Promise<StatusInfo[]>;
11
+ export declare function displayStatus(statusList: StatusInfo[]): Promise<void>;
5
12
  export declare function createConnectCommand(): CommandModule<unknown, ConnectOptions>;
6
13
  export {};
@@ -4,7 +4,11 @@ import chalk from "chalk";
4
4
  import { parse } from "yaml";
5
5
  import { getUserInfo } from "../utils/aigne-hub-user.js";
6
6
  import { AIGNE_ENV_FILE, connectToAIGNEHub } from "../utils/load-aigne.js";
7
- async function getConnectionStatus() {
7
+ const formatNumber = (balance) => {
8
+ const balanceNum = String(balance).split(".")[0];
9
+ return chalk.yellow((balanceNum || "").replace(/\B(?=(\d{3})+(?!\d))/g, ","));
10
+ };
11
+ export async function getConnectionStatus() {
8
12
  if (!existsSync(AIGNE_ENV_FILE)) {
9
13
  return [];
10
14
  }
@@ -25,7 +29,7 @@ async function getConnectionStatus() {
25
29
  return [];
26
30
  }
27
31
  }
28
- async function displayStatus(statusList) {
32
+ export async function displayStatus(statusList) {
29
33
  if (statusList.length === 0) {
30
34
  console.log(chalk.yellow("No AIGNE Hub connections found."));
31
35
  console.log("Use 'aigne connect <url>' to connect to a hub.");
@@ -46,11 +50,16 @@ async function displayStatus(statusList) {
46
50
  console.log(` Status: ${statusText}`);
47
51
  if (userInfo) {
48
52
  console.log(` User: ${userInfo?.user.fullName}`);
49
- console.log(` Email: ${userInfo?.user.email}`);
53
+ console.log(` User DID: ${userInfo?.user.did}`);
54
+ if (userInfo?.user.email) {
55
+ console.log(` Email: ${userInfo?.user.email}`);
56
+ }
50
57
  if (userInfo?.creditBalance) {
51
- console.log(` Plan: ${userInfo?.creditBalance?.balance}/${userInfo?.creditBalance?.total}`);
58
+ const balance = formatNumber(userInfo?.creditBalance?.balance);
59
+ const total = formatNumber(userInfo?.creditBalance?.total);
60
+ console.log(` Plan: ${balance} / ${total}`);
52
61
  }
53
- console.log(` Billing URL: ${userInfo?.paymentLink}`);
62
+ console.log(` Billing URL: ${userInfo?.paymentLink ? chalk.green(userInfo.paymentLink) : chalk.red("N/A")}`);
54
63
  }
55
64
  console.log("");
56
65
  }
@@ -1,5 +1,3 @@
1
- import type { LoadableModel } from "@aigne/core/loader/index.js";
2
1
  import { DefaultMemory } from "@aigne/default-memory";
3
2
  export declare const AIGNE_CLI_VERSION: any;
4
- export declare function availableModels(): LoadableModel[];
5
3
  export declare const availableMemories: (typeof DefaultMemory)[];
package/dist/constants.js CHANGED
@@ -1,82 +1,6 @@
1
1
  import { createRequire } from "node:module";
2
2
  import { AgenticMemory } from "@aigne/agentic-memory";
3
- import { AIGNEHubChatModel } from "@aigne/aigne-hub";
4
- import { AnthropicChatModel } from "@aigne/anthropic";
5
- import { BedrockChatModel } from "@aigne/bedrock";
6
- import { DeepSeekChatModel } from "@aigne/deepseek";
7
3
  import { DefaultMemory } from "@aigne/default-memory";
8
- import { GeminiChatModel } from "@aigne/gemini";
9
- import { OllamaChatModel } from "@aigne/ollama";
10
- import { OpenRouterChatModel } from "@aigne/open-router";
11
- import { OpenAIChatModel } from "@aigne/openai";
12
- import { XAIChatModel } from "@aigne/xai";
13
- import { NodeHttpHandler, streamCollector } from "@smithy/node-http-handler";
14
- import { HttpsProxyAgent } from "https-proxy-agent";
15
4
  const require = createRequire(import.meta.url);
16
5
  export const AIGNE_CLI_VERSION = require("../package.json").version;
17
- export function availableModels() {
18
- const proxy = ["HTTPS_PROXY", "https_proxy", "HTTP_PROXY", "http_proxy", "ALL_PROXY", "all_proxy"]
19
- .map((i) => process.env[i])
20
- .filter(Boolean)[0];
21
- const httpAgent = proxy ? new HttpsProxyAgent(proxy) : undefined;
22
- const clientOptions = {
23
- fetchOptions: {
24
- // @ts-ignore
25
- agent: httpAgent,
26
- },
27
- };
28
- return [
29
- {
30
- name: OpenAIChatModel.name,
31
- apiKeyEnvName: "OPENAI_API_KEY",
32
- create: (params) => new OpenAIChatModel({ ...params, clientOptions }),
33
- },
34
- {
35
- name: AnthropicChatModel.name,
36
- apiKeyEnvName: "ANTHROPIC_API_KEY",
37
- create: (params) => new AnthropicChatModel({ ...params, clientOptions }),
38
- },
39
- {
40
- name: BedrockChatModel.name,
41
- apiKeyEnvName: "AWS_ACCESS_KEY_ID",
42
- create: (params) => new BedrockChatModel({
43
- ...params,
44
- clientOptions: {
45
- requestHandler: NodeHttpHandler.create({ httpAgent, httpsAgent: httpAgent }),
46
- streamCollector,
47
- },
48
- }),
49
- },
50
- {
51
- name: DeepSeekChatModel.name,
52
- apiKeyEnvName: "DEEPSEEK_API_KEY",
53
- create: (params) => new DeepSeekChatModel({ ...params, clientOptions }),
54
- },
55
- {
56
- name: GeminiChatModel.name,
57
- apiKeyEnvName: "GEMINI_API_KEY",
58
- create: (params) => new GeminiChatModel({ ...params, clientOptions }),
59
- },
60
- {
61
- name: OllamaChatModel.name,
62
- apiKeyEnvName: "OLLAMA_API_KEY",
63
- create: (params) => new OllamaChatModel({ ...params, clientOptions }),
64
- },
65
- {
66
- name: OpenRouterChatModel.name,
67
- apiKeyEnvName: "OPEN_ROUTER_API_KEY",
68
- create: (params) => new OpenRouterChatModel({ ...params, clientOptions }),
69
- },
70
- {
71
- name: XAIChatModel.name,
72
- apiKeyEnvName: "XAI_API_KEY",
73
- create: (params) => new XAIChatModel({ ...params, clientOptions }),
74
- },
75
- {
76
- name: AIGNEHubChatModel.name,
77
- apiKeyEnvName: "AIGNE_HUB_API_KEY",
78
- create: (params) => new AIGNEHubChatModel({ ...params, clientOptions }),
79
- },
80
- ];
81
- }
82
6
  export const availableMemories = [DefaultMemory, AgenticMemory];
@@ -1,5 +1,5 @@
1
+ import { availableModels } from "@aigne/aigne-hub";
1
2
  import { AIGNE } from "@aigne/core";
2
- import type { LoadableModel } from "@aigne/core/loader/index.js";
3
3
  import inquirer from "inquirer";
4
4
  import { type RunAIGNECommandOptions } from "./run-with-aigne.js";
5
5
  export declare const decrypt: (m: string, s: string, i: string) => string;
@@ -40,7 +40,7 @@ interface CreateConnectOptions {
40
40
  }) => Promise<FetchResult>;
41
41
  }
42
42
  export declare function createConnect({ connectUrl, openPage, fetchInterval, retry, source, connectAction, wrapSpinner, closeOnSuccess, intervalFetchConfig, }: CreateConnectOptions): Promise<FetchResult>;
43
- export declare const formatModelName: (models: LoadableModel[], model: string, inquirerPrompt: typeof inquirer.prompt) => Promise<string>;
43
+ export declare const formatModelName: (models: ReturnType<typeof availableModels>, model: string, inquirerPrompt: typeof inquirer.prompt) => Promise<string>;
44
44
  export declare function connectToAIGNEHub(url: string): Promise<{
45
45
  accessKey: string;
46
46
  url: string;
@@ -48,7 +48,11 @@ export declare function connectToAIGNEHub(url: string): Promise<{
48
48
  accessKey: undefined;
49
49
  url: undefined;
50
50
  }>;
51
- export declare function loadAIGNE(path: string, options?: RunOptions, actionOptions?: {
51
+ export declare const checkConnectionStatus: (host: string) => Promise<{
52
+ apiKey: any;
53
+ url: string;
54
+ }>;
55
+ export declare function loadAIGNE(path: string, options?: Pick<RunOptions, "model">, actionOptions?: {
52
56
  inquirerPromptFn?: (prompt: {
53
57
  type: string;
54
58
  name: string;
@@ -2,8 +2,9 @@ import { existsSync, mkdirSync } from "node:fs";
2
2
  import { readFile, writeFile } from "node:fs/promises";
3
3
  import { homedir } from "node:os";
4
4
  import { join } from "node:path";
5
+ import { availableModels, findModel, loadModel } from "@aigne/aigne-hub";
5
6
  import { AIGNE } from "@aigne/core";
6
- import { loadAIGNEFile, loadModel } from "@aigne/core/loader/index.js";
7
+ import { loadAIGNEFile } from "@aigne/core/loader/index.js";
7
8
  import { logger } from "@aigne/core/utils/logger.js";
8
9
  import { AesCrypter } from "@ocap/mcrypto/lib/crypter/aes-legacy.js";
9
10
  import crypto from "crypto";
@@ -12,7 +13,7 @@ import open from "open";
12
13
  import pWaitFor from "p-wait-for";
13
14
  import { joinURL, withQuery } from "ufo";
14
15
  import { parse, stringify } from "yaml";
15
- import { availableMemories, availableModels } from "../constants.js";
16
+ import { availableMemories } from "../constants.js";
16
17
  import { parseModelOption } from "./run-with-aigne.js";
17
18
  const aes = new AesCrypter();
18
19
  export const decrypt = (m, s, i) => aes.decrypt(m, crypto.pbkdf2Sync(i, s, 256, 32, "sha512").toString("hex"));
@@ -104,10 +105,11 @@ export const formatModelName = async (models, model, inquirerPrompt) => {
104
105
  if (providerName.includes(AGENT_HUB_PROVIDER)) {
105
106
  return model;
106
107
  }
107
- const m = models.find((m) => m.name.toLowerCase().includes(providerName.toLowerCase()));
108
+ const m = findModel(models, providerName);
108
109
  if (!m)
109
110
  throw new Error(`Unsupported model: ${provider} ${name}`);
110
- if (m.apiKeyEnvName && process.env[m.apiKeyEnvName]) {
111
+ const apiKeyEnvName = Array.isArray(m.apiKeyEnvName) ? m.apiKeyEnvName : [m.apiKeyEnvName];
112
+ if (apiKeyEnvName.some((name) => name && process.env[name])) {
111
113
  return model;
112
114
  }
113
115
  if (TEST_ENV) {
@@ -123,7 +125,7 @@ export const formatModelName = async (models, model, inquirerPrompt) => {
123
125
  value: true,
124
126
  },
125
127
  {
126
- name: `Exit and bring my owner API Key by set ${m.apiKeyEnvName}`,
128
+ name: `Exit and bring my owner API Key by set ${apiKeyEnvName.join(", ")}`,
127
129
  value: false,
128
130
  },
129
131
  ],
@@ -175,42 +177,48 @@ export async function connectToAIGNEHub(url) {
175
177
  return { accessKey: undefined, url: undefined };
176
178
  }
177
179
  }
180
+ export const checkConnectionStatus = async (host) => {
181
+ // aigne-hub access token
182
+ if (!existsSync(AIGNE_ENV_FILE)) {
183
+ throw new Error("AIGNE_HUB_API_KEY file not found, need to login first");
184
+ }
185
+ const data = await readFile(AIGNE_ENV_FILE, "utf8");
186
+ if (!data.includes("AIGNE_HUB_API_KEY")) {
187
+ throw new Error("AIGNE_HUB_API_KEY key not found, need to login first");
188
+ }
189
+ const envs = parse(data);
190
+ if (!envs[host]) {
191
+ throw new Error("AIGNE_HUB_API_KEY host not found, need to login first");
192
+ }
193
+ const env = envs[host];
194
+ if (!env.AIGNE_HUB_API_KEY) {
195
+ throw new Error("AIGNE_HUB_API_KEY key not found, need to login first");
196
+ }
197
+ return {
198
+ apiKey: env.AIGNE_HUB_API_KEY,
199
+ url: joinURL(env.AIGNE_HUB_API_URL),
200
+ };
201
+ };
202
+ const mockInquirerPrompt = (() => Promise.resolve({ useAigneHub: true }));
178
203
  export async function loadAIGNE(path, options, actionOptions) {
179
204
  const models = availableModels();
180
205
  const AIGNE_HUB_URL = process.env.AIGNE_HUB_API_URL || DEFAULT_URL;
181
206
  const connectUrl = joinURL(new URL(AIGNE_HUB_URL).origin, WELLKNOWN_SERVICE_PATH_PREFIX);
182
207
  const inquirerPrompt = (actionOptions?.inquirerPromptFn ??
183
208
  inquirer.prompt);
209
+ const { host } = new URL(AIGNE_HUB_URL);
184
210
  const { aigne } = await loadAIGNEFile(path).catch(() => ({ aigne: null }));
185
- let accessKeyOptions = {};
186
- const modelName = await formatModelName(models, options?.model || `${aigne?.model?.provider ?? ""}:${aigne?.model?.name ?? ""}`, inquirerPrompt);
211
+ const result = await checkConnectionStatus(host).catch(() => null);
212
+ const alreadyConnected = Boolean(result?.apiKey);
213
+ const modelName = await formatModelName(models, options?.model || `${aigne?.model?.provider ?? ""}:${aigne?.model?.name ?? ""}`, alreadyConnected ? mockInquirerPrompt : inquirerPrompt);
214
+ let credential = {};
187
215
  if (TEST_ENV && !actionOptions?.runTest) {
188
- const model = await loadModel(models, parseModelOption(modelName), undefined, accessKeyOptions);
189
- return await AIGNE.load(path, { models, memories: availableMemories, model });
216
+ const model = await loadModel(parseModelOption(modelName));
217
+ return await AIGNE.load(path, { loadModel, memories: availableMemories, model });
190
218
  }
191
219
  if ((modelName.toLocaleLowerCase() || "").includes(AGENT_HUB_PROVIDER)) {
192
- const { host } = new URL(AIGNE_HUB_URL);
193
220
  try {
194
- // aigne-hub access token
195
- if (!existsSync(AIGNE_ENV_FILE)) {
196
- throw new Error("AIGNE_HUB_API_KEY file not found, need to login first");
197
- }
198
- const data = await readFile(AIGNE_ENV_FILE, "utf8");
199
- if (!data.includes("AIGNE_HUB_API_KEY")) {
200
- throw new Error("AIGNE_HUB_API_KEY key not found, need to login first");
201
- }
202
- const envs = parse(data);
203
- if (!envs[host]) {
204
- throw new Error("AIGNE_HUB_API_KEY host not found, need to login first");
205
- }
206
- const env = envs[host];
207
- if (!env.AIGNE_HUB_API_KEY) {
208
- throw new Error("AIGNE_HUB_API_KEY key not found, need to login first");
209
- }
210
- accessKeyOptions = {
211
- accessKey: env.AIGNE_HUB_API_KEY,
212
- url: joinURL(env.AIGNE_HUB_API_URL),
213
- };
221
+ credential = await checkConnectionStatus(host);
214
222
  }
215
223
  catch (error) {
216
224
  if (error instanceof Error && error.message.includes("login first")) {
@@ -232,10 +240,10 @@ export async function loadAIGNE(path, options, actionOptions) {
232
240
  console.warn("The AIGNE Hub connection has been cancelled");
233
241
  process.exit(0);
234
242
  }
235
- accessKeyOptions = await connectToAIGNEHub(connectUrl);
243
+ credential = await connectToAIGNEHub(connectUrl);
236
244
  }
237
245
  }
238
246
  }
239
- const model = await loadModel(models, parseModelOption(modelName), undefined, accessKeyOptions);
240
- return await AIGNE.load(path, { models, memories: availableMemories, model });
247
+ const model = await loadModel(parseModelOption(modelName), undefined, credential);
248
+ return await AIGNE.load(path, { loadModel, memories: availableMemories, model });
241
249
  }
@@ -4,8 +4,8 @@ import { dirname, isAbsolute, join } from "node:path";
4
4
  import { isatty } from "node:tty";
5
5
  import { promisify } from "node:util";
6
6
  import { exists } from "@aigne/agent-library/utils/fs.js";
7
+ import { availableModels, loadModel } from "@aigne/aigne-hub";
7
8
  import { AIAgent, AIGNE, DEFAULT_OUTPUT_KEY, readAllString, UserAgent, } from "@aigne/core";
8
- import { loadModel } from "@aigne/core/loader/index.js";
9
9
  import { getLevelFromEnv, LogLevel, logger } from "@aigne/core/utils/logger.js";
10
10
  import { flat, isEmpty, tryOrThrow } from "@aigne/core/utils/type-utils.js";
11
11
  import chalk from "chalk";
@@ -13,7 +13,6 @@ import { parse } from "yaml";
13
13
  import yargs from "yargs";
14
14
  import { hideBin } from "yargs/helpers";
15
15
  import { ZodError, ZodObject, z } from "zod";
16
- import { availableModels } from "../constants.js";
17
16
  import { TerminalTracer } from "../tracer/terminal.js";
18
17
  import { DEFAULT_CHAT_INPUT_KEY, runChatLoopInTerminal, } from "./run-chat-loop.js";
19
18
  export const createRunAIGNECommand = (yargs) => yargs
@@ -24,7 +23,12 @@ export const createRunAIGNECommand = (yargs) => yargs
24
23
  })
25
24
  .option("model", {
26
25
  describe: `AI model to use in format 'provider[:model]' where model is optional. Examples: 'openai' or 'openai:gpt-4o-mini'. Available providers: ${availableModels()
27
- .map((i) => i.name.toLowerCase().replace(/ChatModel$/i, ""))
26
+ .map((i) => {
27
+ if (typeof i.name === "string") {
28
+ return i.name.toLowerCase().replace(/ChatModel$/i, "");
29
+ }
30
+ return i.name.map((n) => n.toLowerCase().replace(/ChatModel$/i, ""));
31
+ })
28
32
  .join(", ")} (default: openai)`,
29
33
  type: "string",
30
34
  })
@@ -129,7 +133,7 @@ export async function runWithAIGNE(agentCreator, { argv = process.argv, chatLoop
129
133
  if (options.logLevel) {
130
134
  logger.level = options.logLevel;
131
135
  }
132
- const model = await loadModel(availableModels(), {
136
+ const model = await loadModel({
133
137
  ...parseModelOption(options.model),
134
138
  temperature: options.temperature,
135
139
  topP: options.topP,
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@aigne/cli",
3
- "version": "1.29.0",
3
+ "version": "1.30.0",
4
4
  "description": "cli for AIGNE framework",
5
5
  "publishConfig": {
6
6
  "access": "public"
@@ -70,20 +70,13 @@
70
70
  "yaml": "^2.8.0",
71
71
  "yargs": "^18.0.0",
72
72
  "zod": "^3.25.67",
73
- "@aigne/agent-library": "^1.21.8",
74
- "@aigne/aigne-hub": "^0.3.2",
75
- "@aigne/anthropic": "^0.10.4",
76
- "@aigne/bedrock": "^0.8.8",
77
- "@aigne/core": "^1.41.0",
78
- "@aigne/deepseek": "^0.7.8",
79
- "@aigne/agentic-memory": "^1.0.8",
80
- "@aigne/default-memory": "^1.0.8",
81
- "@aigne/gemini": "^0.8.8",
82
- "@aigne/observability-api": "^0.8.2",
83
- "@aigne/ollama": "^0.7.8",
84
- "@aigne/open-router": "^0.7.8",
85
- "@aigne/openai": "^0.10.8",
86
- "@aigne/xai": "^0.7.8"
73
+ "@aigne/agent-library": "^1.21.9",
74
+ "@aigne/agentic-memory": "^1.0.9",
75
+ "@aigne/aigne-hub": "^0.4.0",
76
+ "@aigne/openai": "^0.10.9",
77
+ "@aigne/default-memory": "^1.0.9",
78
+ "@aigne/core": "^1.42.0",
79
+ "@aigne/observability-api": "^0.9.0"
87
80
  },
88
81
  "devDependencies": {
89
82
  "@types/archiver": "^6.0.3",