jinzd-ai-cli 0.4.34 → 0.4.36

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -7,7 +7,7 @@ import {
7
7
  ProviderNotFoundError,
8
8
  RateLimitError,
9
9
  schemaToJsonSchema
10
- } from "./chunk-XBSFS4Z3.js";
10
+ } from "./chunk-3G3R4NFG.js";
11
11
  import {
12
12
  APP_NAME,
13
13
  CONFIG_DIR_NAME,
@@ -20,7 +20,7 @@ import {
20
20
  MCP_TOOL_PREFIX,
21
21
  PLUGINS_DIR_NAME,
22
22
  VERSION
23
- } from "./chunk-YFXZ275D.js";
23
+ } from "./chunk-NCPGLZIQ.js";
24
24
 
25
25
  // src/config/config-manager.ts
26
26
  import { readFileSync, writeFileSync, existsSync, mkdirSync } from "fs";
@@ -2100,13 +2100,16 @@ var OllamaProvider = class extends OpenAICompatibleProvider {
2100
2100
  defaultBaseUrl = "http://localhost:11434/v1";
2101
2101
  defaultTimeout = 12e4;
2102
2102
  // 本地推理可能较慢,默认 2 分钟
2103
- /** 动态模型列表,initialize 时从 Ollama 获取 */
2103
+ /** 动态模型列表 */
2104
2104
  dynamicModels = [];
2105
+ /** 是否已成功连接过 Ollama */
2106
+ connected = false;
2107
+ /** base URL(不含 /v1),用于错误提示 */
2108
+ ollamaHost = "http://localhost:11434";
2105
2109
  info = {
2106
2110
  id: "ollama",
2107
2111
  displayName: "Ollama (Local)",
2108
2112
  defaultModel: "",
2109
- // 动态设置
2110
2113
  apiKeyEnvVar: "",
2111
2114
  requiresApiKey: false,
2112
2115
  baseUrl: this.defaultBaseUrl,
@@ -2114,81 +2117,65 @@ var OllamaProvider = class extends OpenAICompatibleProvider {
2114
2117
  };
2115
2118
  async initialize(apiKey, options) {
2116
2119
  const baseUrl = options?.baseUrl ?? this.defaultBaseUrl;
2117
- const ollamaHost = baseUrl.replace(/\/v1\/?$/, "");
2118
- this.dynamicModels = await this.fetchModels(ollamaHost);
2119
- if (this.dynamicModels.length === 0) {
2120
- throw new Error("Ollama is running but no models are installed. Run `ollama pull <model>` to install one.");
2121
- }
2122
- const preferred = ["llama3.1", "llama3", "qwen2.5", "qwen2", "deepseek-r1", "mistral", "gemma2"];
2123
- const defaultModel = this.dynamicModels.find(
2124
- (m) => preferred.some((p) => m.id.startsWith(p))
2125
- )?.id ?? this.dynamicModels[0].id;
2126
- Object.assign(this.info, {
2127
- defaultModel,
2128
- baseUrl,
2129
- models: this.dynamicModels
2130
- });
2120
+ this.ollamaHost = baseUrl.replace(/\/v1\/?$/, "");
2131
2121
  await super.initialize(apiKey || "ollama", { ...options, baseUrl });
2122
+ await this.tryConnect();
2132
2123
  }
2133
2124
  /**
2134
- * Ollama /api/tags 获取本地模型列表。
2135
- * 如果 Ollama 未运行则抛出错误。
2125
+ * 尝试通过 OpenAI client 的 models.list() 刷新 Ollama 模型列表。
2126
+ * 成功返回 true,失败返回 false(不抛异常)。
2136
2127
  */
2137
- async fetchModels(ollamaHost) {
2138
- const url = `${ollamaHost}/api/tags`;
2139
- let response;
2128
+ async tryConnect() {
2140
2129
  try {
2141
- response = await fetch(url, { signal: AbortSignal.timeout(5e3) });
2142
- } catch {
2143
- throw new Error(
2144
- `Cannot connect to Ollama at ${ollamaHost}. Make sure Ollama is running (https://ollama.com).`
2145
- );
2146
- }
2147
- if (!response.ok) {
2148
- throw new Error(`Ollama API error: ${response.status} ${response.statusText}`);
2149
- }
2150
- const data = await response.json();
2151
- if (!data.models || data.models.length === 0) {
2152
- return [];
2153
- }
2154
- return data.models.map((m) => {
2155
- const paramSize = m.details?.parameter_size ?? "";
2156
- const sizeGB = (m.size / 1e9).toFixed(1);
2157
- return {
2158
- id: m.name,
2159
- displayName: `${m.name} (${paramSize || sizeGB + "GB"})`,
2160
- contextWindow: this.estimateContextWindow(m.name),
2130
+ const response = await this.client.models.list();
2131
+ const modelIds = response.data.map((m) => m.id);
2132
+ if (modelIds.length === 0) {
2133
+ this.connected = false;
2134
+ return false;
2135
+ }
2136
+ this.dynamicModels = modelIds.map((id) => ({
2137
+ id,
2138
+ displayName: id,
2139
+ contextWindow: this.estimateContextWindow(id),
2161
2140
  supportsStreaming: true,
2162
2141
  supportsThinking: false
2163
- };
2164
- });
2142
+ }));
2143
+ const preferred = ["llama3.1", "llama3", "qwen2.5", "qwen3", "qwen2", "deepseek-r1", "mistral", "gemma2", "gemma3"];
2144
+ const defaultModel = this.dynamicModels.find(
2145
+ (m) => preferred.some((p) => m.id.startsWith(p))
2146
+ )?.id ?? this.dynamicModels[0].id;
2147
+ Object.assign(this.info, {
2148
+ defaultModel,
2149
+ models: this.dynamicModels
2150
+ });
2151
+ this.connected = true;
2152
+ return true;
2153
+ } catch {
2154
+ this.connected = false;
2155
+ return false;
2156
+ }
2165
2157
  }
2166
- /** 根据模型名估算上下文窗口(Ollama 模型元数据不含此信息) */
2158
+ /** 根据模型名估算上下文窗口 */
2167
2159
  estimateContextWindow(modelName) {
2168
2160
  const name = modelName.toLowerCase();
2169
2161
  if (name.includes("llama3") || name.includes("llama-3")) return 131072;
2170
2162
  if (name.includes("qwen2.5") || name.includes("qwen3")) return 131072;
2171
2163
  if (name.includes("qwen2") || name.includes("qwen-2")) return 32768;
2172
2164
  if (name.includes("deepseek")) return 65536;
2173
- if (name.includes("gemma2") || name.includes("gemma-2")) return 8192;
2165
+ if (name.includes("gemma2") || name.includes("gemma-2") || name.includes("gemma3")) return 8192;
2174
2166
  if (name.includes("mistral")) return 32768;
2175
2167
  if (name.includes("phi")) return 16384;
2176
2168
  if (name.includes("codellama") || name.includes("code-llama")) return 16384;
2177
2169
  return 8192;
2178
2170
  }
2179
- /** 动态返回当前 Ollama 安装的模型 */
2171
+ /** 实时刷新模型列表 */
2180
2172
  async listModels() {
2173
+ await this.tryConnect();
2181
2174
  return this.dynamicModels.length > 0 ? this.dynamicModels : this.info.models;
2182
2175
  }
2183
2176
  /** Ollama 无需验证 API Key,只检查服务是否可达 */
2184
2177
  async validateApiKey() {
2185
- try {
2186
- const ollamaHost = this.info.baseUrl.replace(/\/v1\/?$/, "");
2187
- await fetch(`${ollamaHost}/api/tags`, { signal: AbortSignal.timeout(5e3) });
2188
- return true;
2189
- } catch {
2190
- return false;
2191
- }
2178
+ return this.tryConnect();
2192
2179
  }
2193
2180
  };
2194
2181
 
@@ -9,7 +9,7 @@ import {
9
9
  SUBAGENT_DEFAULT_MAX_ROUNDS,
10
10
  SUBAGENT_MAX_ROUNDS_LIMIT,
11
11
  runTestsTool
12
- } from "./chunk-YFXZ275D.js";
12
+ } from "./chunk-NCPGLZIQ.js";
13
13
 
14
14
  // src/tools/builtin/bash.ts
15
15
  import { execSync } from "child_process";
@@ -6,7 +6,7 @@ import { platform } from "os";
6
6
  import chalk from "chalk";
7
7
 
8
8
  // src/core/constants.ts
9
- var VERSION = "0.4.34";
9
+ var VERSION = "0.4.36";
10
10
  var APP_NAME = "ai-cli";
11
11
  var CONFIG_DIR_NAME = ".aicli";
12
12
  var CONFIG_FILE_NAME = "config.json";
@@ -8,7 +8,7 @@ import { platform } from "os";
8
8
  import chalk from "chalk";
9
9
 
10
10
  // src/core/constants.ts
11
- var VERSION = "0.4.34";
11
+ var VERSION = "0.4.36";
12
12
  var APP_NAME = "ai-cli";
13
13
  var CONFIG_DIR_NAME = ".aicli";
14
14
  var CONFIG_FILE_NAME = "config.json";
@@ -387,7 +387,7 @@ ${content}`);
387
387
  }
388
388
  }
389
389
  async function runTaskMode(config, providers, configManager, topic) {
390
- const { TaskOrchestrator } = await import("./task-orchestrator-7BBSI7N6.js");
390
+ const { TaskOrchestrator } = await import("./task-orchestrator-6AVTEKH4.js");
391
391
  const orchestrator = new TaskOrchestrator(config, providers, configManager);
392
392
  let interrupted = false;
393
393
  const onSigint = () => {
package/dist/index.js CHANGED
@@ -24,7 +24,7 @@ import {
24
24
  saveDevState,
25
25
  sessionHasMeaningfulContent,
26
26
  setupProxy
27
- } from "./chunk-G4YXWCA2.js";
27
+ } from "./chunk-2WCIKGEH.js";
28
28
  import {
29
29
  ToolExecutor,
30
30
  ToolRegistry,
@@ -37,7 +37,7 @@ import {
37
37
  spawnAgentContext,
38
38
  theme,
39
39
  undoStack
40
- } from "./chunk-XBSFS4Z3.js";
40
+ } from "./chunk-3G3R4NFG.js";
41
41
  import {
42
42
  fileCheckpoints
43
43
  } from "./chunk-4BKXL7SM.js";
@@ -61,7 +61,7 @@ import {
61
61
  SKILLS_DIR_NAME,
62
62
  VERSION,
63
63
  buildUserIdentityPrompt
64
- } from "./chunk-YFXZ275D.js";
64
+ } from "./chunk-NCPGLZIQ.js";
65
65
 
66
66
  // src/index.ts
67
67
  import { program } from "commander";
@@ -978,6 +978,16 @@ function createDefaultCommands() {
978
978
  ctx.renderer.renderError(`Provider '${targetId}' is not configured. Run: ai-cli config`);
979
979
  return;
980
980
  }
981
+ const targetProvider = ctx.providers.get(targetId);
982
+ if (targetProvider.info.models.length === 0) {
983
+ const models = await targetProvider.listModels();
984
+ if (models.length === 0) {
985
+ ctx.renderer.renderError(
986
+ `Provider '${targetId}' has no available models. ` + (targetId === "ollama" ? "Make sure Ollama is running (`ollama serve`) and has models (`ollama pull <model>`)." : "Check provider configuration.")
987
+ );
988
+ return;
989
+ }
990
+ }
981
991
  if (targetId === ctx.getCurrentProvider()) {
982
992
  ctx.renderer.printInfo(`Already using provider: ${targetId}`);
983
993
  return;
@@ -1003,8 +1013,9 @@ function createDefaultCommands() {
1003
1013
  let targetModel;
1004
1014
  if (!args[0]) {
1005
1015
  const provider = ctx.providers.get(ctx.getCurrentProvider());
1016
+ const models = await provider.listModels();
1006
1017
  const currentModel = ctx.getCurrentModel();
1007
- const items = provider.info.models.map((m) => ({
1018
+ const items = models.map((m) => ({
1008
1019
  value: m.id,
1009
1020
  label: m.id,
1010
1021
  hint: [
@@ -2087,7 +2098,7 @@ ${hint}` : "")
2087
2098
  usage: "/test [command|filter]",
2088
2099
  async execute(args, ctx) {
2089
2100
  try {
2090
- const { executeTests } = await import("./run-tests-G3MMJS6Y.js");
2101
+ const { executeTests } = await import("./run-tests-2PI4DIQM.js");
2091
2102
  const argStr = args.join(" ").trim();
2092
2103
  let testArgs = {};
2093
2104
  if (argStr) {
@@ -5458,7 +5469,7 @@ program.command("web").description("Start Web UI server with browser-based chat
5458
5469
  console.error("Error: Invalid port number. Must be between 1 and 65535.");
5459
5470
  process.exit(1);
5460
5471
  }
5461
- const { startWebServer } = await import("./server-4EPEVN6P.js");
5472
+ const { startWebServer } = await import("./server-HDJXB7RH.js");
5462
5473
  await startWebServer({ port, host: options.host });
5463
5474
  });
5464
5475
  program.command("user [action] [username]").description("Manage Web UI users (list | create <name> | delete <name> | reset-password <name> | migrate <name>)").action(async (action, username) => {
@@ -5691,7 +5702,7 @@ program.command("hub [topic]").description("Start multi-agent hub (discuss / bra
5691
5702
  }),
5692
5703
  config.get("customProviders")
5693
5704
  );
5694
- const { startHub } = await import("./hub-WISCPSXJ.js");
5705
+ const { startHub } = await import("./hub-EEZCFZHN.js");
5695
5706
  await startHub(
5696
5707
  {
5697
5708
  topic: topic ?? "",
@@ -2,7 +2,7 @@
2
2
  import {
3
3
  executeTests,
4
4
  runTestsTool
5
- } from "./chunk-YFXZ275D.js";
5
+ } from "./chunk-NCPGLZIQ.js";
6
6
  export {
7
7
  executeTests,
8
8
  runTestsTool
@@ -1,7 +1,7 @@
1
1
  import {
2
2
  executeTests,
3
3
  runTestsTool
4
- } from "./chunk-3T6UI6JT.js";
4
+ } from "./chunk-7E7OF7CV.js";
5
5
  export {
6
6
  executeTests,
7
7
  runTestsTool
@@ -15,7 +15,7 @@ import {
15
15
  hadPreviousWriteToolCalls,
16
16
  loadDevState,
17
17
  setupProxy
18
- } from "./chunk-G4YXWCA2.js";
18
+ } from "./chunk-2WCIKGEH.js";
19
19
  import {
20
20
  AuthManager
21
21
  } from "./chunk-BYNY5JPB.js";
@@ -33,7 +33,7 @@ import {
33
33
  spawnAgentContext,
34
34
  truncateOutput,
35
35
  undoStack
36
- } from "./chunk-XBSFS4Z3.js";
36
+ } from "./chunk-3G3R4NFG.js";
37
37
  import "./chunk-4BKXL7SM.js";
38
38
  import {
39
39
  AGENTIC_BEHAVIOR_GUIDELINE,
@@ -52,7 +52,7 @@ import {
52
52
  SKILLS_DIR_NAME,
53
53
  VERSION,
54
54
  buildUserIdentityPrompt
55
- } from "./chunk-YFXZ275D.js";
55
+ } from "./chunk-NCPGLZIQ.js";
56
56
 
57
57
  // src/web/server.ts
58
58
  import express from "express";
@@ -1606,7 +1606,7 @@ ${undoResults.map((r) => ` \u2022 ${r}`).join("\n")}` });
1606
1606
  case "test": {
1607
1607
  this.send({ type: "info", message: "\u{1F9EA} Running tests..." });
1608
1608
  try {
1609
- const { executeTests } = await import("./run-tests-G3MMJS6Y.js");
1609
+ const { executeTests } = await import("./run-tests-2PI4DIQM.js");
1610
1610
  const argStr = args.join(" ").trim();
1611
1611
  let testArgs = {};
1612
1612
  if (argStr) {
@@ -4,11 +4,11 @@ import {
4
4
  getDangerLevel,
5
5
  googleSearchContext,
6
6
  truncateOutput
7
- } from "./chunk-XBSFS4Z3.js";
7
+ } from "./chunk-3G3R4NFG.js";
8
8
  import "./chunk-4BKXL7SM.js";
9
9
  import {
10
10
  SUBAGENT_ALLOWED_TOOLS
11
- } from "./chunk-YFXZ275D.js";
11
+ } from "./chunk-NCPGLZIQ.js";
12
12
 
13
13
  // src/hub/task-orchestrator.ts
14
14
  import { createInterface } from "readline";
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "jinzd-ai-cli",
3
- "version": "0.4.34",
3
+ "version": "0.4.36",
4
4
  "description": "Cross-platform REPL-style AI CLI with multi-provider support",
5
5
  "type": "module",
6
6
  "main": "./dist/index.js",