@arrislink/axon 1.1.1 → 1.1.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -161,8 +161,8 @@ ax plan # Uses configured provider
161
161
 
162
162
  **Provider Priority:**
163
163
  1. **CLI Mode** - Uses OpenCode CLI (inherits full OMO capabilities)
164
- 2. **Direct Mode** - Reads OMO config, calls APIs directly
165
- 3. **Fallback Mode** - Uses `ANTHROPIC_API_KEY` environment variable
164
+ 2. **Direct Mode** - Reads OMO config and resolves **Antigravity** tokens automatically
165
+ 3. **Fallback Mode** - Uses `ANTHROPIC_API_KEY` etc. environment variables
166
166
 
167
167
  ### Environment Variables
168
168
 
@@ -216,8 +216,8 @@ graph TD
216
216
 
217
217
  subgraph "LLM Layer"
218
218
  Orch --> LLMInt[Unified LLM Interface]
219
- LLMInt --> OMO[OhMyOpenCode Registry]
220
- OMO --> Providers[Providers: Anthropic, OpenAI, Antigrav, etc.]
219
+ LLMInt --> OMO[OMO Config & Antigravity Auth]
220
+ OMO --> Providers[Providers: Anthropic, Google Gemini, OpenAI, etc.]
221
221
  end
222
222
  ```
223
223
 
package/README.zh-CN.md CHANGED
@@ -156,12 +156,12 @@ omo config set-provider antigravity
156
156
 
157
157
  # Axon 自动检测并使用 OMO 配置
158
158
  ax plan # 使用配置的提供商
159
- ```
160
159
 
161
- **提供商优先级:**
162
- 1. **CLI 模式** - 使用 OpenCode CLI(继承 OMO 全部能力)
163
- 2. **直接模式** - 读取 OMO 配置,直接调用 API
164
- 3. **回退模式** - 使用 `ANTHROPIC_API_KEY` 环境变量
160
+ **Provider 优先级:**
161
+ 1. **CLI 模式** - 使用 OpenCode CLI (继承完整的 OMO 能力)
162
+ 2. **Direct 模式** - 读取 OMO 配置并自动解析 **Antigravity** 刷新令牌
163
+ 3. **Fallback 模式** - 使用 `ANTHROPIC_API_KEY` 等环境变量
164
+ ```
165
165
 
166
166
  ### 环境变量
167
167
 
@@ -215,8 +215,8 @@ graph TD
215
215
 
216
216
  subgraph "LLM 层"
217
217
  Orch --> LLMInt[统一 LLM 接口]
218
- LLMInt --> OMO[OhMyOpenCode 注册表]
219
- OMO --> Providers[平台: Anthropic, OpenAI, Antigrav 等]
218
+ LLMInt --> OMO[OMO 配置 & Antigravity 认证]
219
+ OMO --> Providers[提供商: Anthropic, Google Gemini, OpenAI, etc.]
220
220
  end
221
221
  ```
222
222
 
package/dist/index.js CHANGED
@@ -2427,9 +2427,15 @@ var init_defaults = __esm(() => {
2427
2427
  MODEL_PRICING = {
2428
2428
  "claude-sonnet-4-20250514": { input: 3, output: 15 },
2429
2429
  "claude-opus-4-5-20251101": { input: 15, output: 75 },
2430
+ "claude-opus-4-6": { input: 15, output: 75 },
2430
2431
  "gemini-2.0-flash-exp": { input: 0.5, output: 0.5 },
2432
+ "gemini-3-pro": { input: 1.25, output: 3.75 },
2433
+ "gemini-3-flash": { input: 0.1, output: 0.4 },
2431
2434
  "gpt-4-turbo": { input: 10, output: 30 },
2432
- "gpt-4o": { input: 5, output: 15 }
2435
+ "gpt-4o": { input: 5, output: 15 },
2436
+ "gpt-5.3-codex": { input: 10, output: 30 },
2437
+ "gpt-5-nano": { input: 0.5, output: 1.5 },
2438
+ "glm-4.7-free": { input: 0, output: 0 }
2433
2439
  };
2434
2440
  });
2435
2441
 
@@ -16362,11 +16368,21 @@ class AnthropicClient {
16362
16368
  body: JSON.stringify(body)
16363
16369
  });
16364
16370
  if (!response.ok) {
16365
- const errorData = await response.json().catch(() => ({}));
16366
- throw new APIError(`LLM API \u8C03\u7528\u5931\u8D25 (${response.status}): ${errorData.error?.message || response.statusText}`, response.status);
16371
+ const errorText = await response.text().catch(() => "");
16372
+ let errorData = {};
16373
+ try {
16374
+ errorData = JSON.parse(errorText);
16375
+ } catch {}
16376
+ throw new APIError(`LLM API \u8C03\u7528\u5931\u8D25 (${response.status}): ${errorData.error?.message || errorText || response.statusText}`, response.status);
16377
+ }
16378
+ const responseText = await response.text();
16379
+ let data;
16380
+ try {
16381
+ data = JSON.parse(responseText);
16382
+ } catch (err) {
16383
+ throw new APIError(`LLM API \u54CD\u5E94\u89E3\u6790\u5931\u8D25 (\u65E0\u6548\u7684 JSON): ${responseText.substring(0, 100)}...`, 500);
16367
16384
  }
16368
- const data = await response.json();
16369
- const textContent = data.content.find((c) => c.type === "text");
16385
+ const textContent = data.content?.find((c) => c.type === "text");
16370
16386
  const content = textContent?.text || "";
16371
16387
  return {
16372
16388
  content,
@@ -16398,120 +16414,6 @@ var init_anthropic = __esm(() => {
16398
16414
  init_errors();
16399
16415
  });
16400
16416
 
16401
- // src/core/llm/opencode-client.ts
16402
- class OpenCodeLLMClient {
16403
- agent;
16404
- command;
16405
- constructor(agent = "sisyphus", command = ["opencode"]) {
16406
- this.agent = agent;
16407
- this.command = command;
16408
- }
16409
- async chat(messages, options) {
16410
- const iterator = this.streamChat(messages, options);
16411
- let result = await iterator.next();
16412
- while (!result.done) {
16413
- result = await iterator.next();
16414
- }
16415
- return result.value;
16416
- }
16417
- async* streamChat(messages, options) {
16418
- const prompt = this.formatMessages(messages);
16419
- const args = [...this.command, "run", "--agent", this.agent, "--format", "json"];
16420
- if (options?.model) {
16421
- args.push("--model", options.model);
16422
- }
16423
- const proc = Bun.spawn(args, {
16424
- stdin: new Blob([prompt]),
16425
- stdout: "pipe",
16426
- stderr: "pipe"
16427
- });
16428
- let fullResponse = "";
16429
- let metadata = {
16430
- model: "unknown",
16431
- tokens: { input: 0, output: 0 },
16432
- cost: 0
16433
- };
16434
- const decoder = new TextDecoder;
16435
- const reader = proc.stdout.getReader();
16436
- let buffer = "";
16437
- try {
16438
- while (true) {
16439
- const { done, value } = await reader.read();
16440
- if (done)
16441
- break;
16442
- buffer += decoder.decode(value, { stream: true });
16443
- const lines = buffer.split(`
16444
- `);
16445
- buffer = lines.pop() || "";
16446
- for (const line of lines) {
16447
- if (!line.trim())
16448
- continue;
16449
- try {
16450
- const event = JSON.parse(line);
16451
- if (event.type === "config" && event.part?.model) {
16452
- metadata.model = event.part.model;
16453
- } else if (event.model && metadata.model === "unknown") {
16454
- metadata.model = event.model;
16455
- }
16456
- if (event.type === "text" && event.part?.text) {
16457
- const text = event.part.text;
16458
- fullResponse += text;
16459
- yield text;
16460
- } else if (event.type === "content" && event.part?.content) {
16461
- const text = event.part.content;
16462
- fullResponse += text;
16463
- yield text;
16464
- } else if (event.type === "step_finish") {
16465
- if (event.part?.snapshot) {}
16466
- if (event.part?.tokens) {
16467
- metadata.tokens.input = event.part.tokens.input || 0;
16468
- metadata.tokens.output = event.part.tokens.output || 0;
16469
- }
16470
- if (event.part?.cost) {
16471
- metadata.cost = event.part.cost;
16472
- }
16473
- }
16474
- } catch (e) {}
16475
- }
16476
- }
16477
- } finally {
16478
- reader.releaseLock();
16479
- proc.kill();
16480
- }
16481
- const exitCode = await proc.exited;
16482
- const stderr = await new Response(proc.stderr).text();
16483
- if (exitCode !== 0) {
16484
- throw new Error(`OpenCode CLI Error: ${stderr || "Unknown error"}`);
16485
- }
16486
- if (fullResponse.trim() === "" && stderr.trim().length > 0) {
16487
- if (stderr.includes("Error") || stderr.includes("NotFound") || stderr.includes("|")) {
16488
- throw new Error(`OpenCode CLI Silent Crash: ${stderr.split(`
16489
- `)[0]}`);
16490
- }
16491
- }
16492
- if (fullResponse.trim() === "") {
16493
- throw new Error("OpenCode CLI returned an empty response");
16494
- }
16495
- return {
16496
- content: fullResponse,
16497
- model: metadata.model,
16498
- tokens: metadata.tokens,
16499
- cost: metadata.cost
16500
- };
16501
- }
16502
- async complete(prompt, options) {
16503
- const result = await this.chat([{ role: "user", content: prompt }], options);
16504
- return result.content;
16505
- }
16506
- formatMessages(messages) {
16507
- return messages.map((m) => `<${m.role}>
16508
- ${m.content}
16509
- </${m.role}>`).join(`
16510
-
16511
- `);
16512
- }
16513
- }
16514
-
16515
16417
  // src/core/llm/omo-config-reader.ts
16516
16418
  var exports_omo_config_reader = {};
16517
16419
  __export(exports_omo_config_reader, {
@@ -16521,6 +16423,18 @@ __export(exports_omo_config_reader, {
16521
16423
  });
16522
16424
  import { existsSync as existsSync4, readFileSync as readFileSync2 } from "fs";
16523
16425
  import { homedir as homedir2 } from "os";
16426
+ function resolveProviderType(modelString) {
16427
+ const [prefix] = (modelString || "").split("/");
16428
+ const typeMap = {
16429
+ opencode: "antigravity",
16430
+ anthropic: "anthropic",
16431
+ google: "google",
16432
+ openai: "openai",
16433
+ mistral: "mistral",
16434
+ deepseek: "deepseek"
16435
+ };
16436
+ return typeMap[prefix] || prefix;
16437
+ }
16524
16438
  function getOMOConfigPaths() {
16525
16439
  const home = homedir2();
16526
16440
  return [
@@ -16538,8 +16452,10 @@ class OMOConfigReader {
16538
16452
  configSource = "";
16539
16453
  defaultProvider;
16540
16454
  fallbackChain = [];
16455
+ antigravityToken;
16541
16456
  constructor() {
16542
16457
  this.loadConfig();
16458
+ this.loadAntigravityToken();
16543
16459
  }
16544
16460
  loadConfig() {
16545
16461
  const paths = getOMOConfigPaths();
@@ -16550,7 +16466,7 @@ class OMOConfigReader {
16550
16466
  if (path.endsWith(".yaml") || path.endsWith(".yml")) {
16551
16467
  this.loadYamlConfig(content);
16552
16468
  } else if (path.endsWith(".json")) {
16553
- this.loadJsonConfig(content);
16469
+ this.loadJsonConfig(content, path);
16554
16470
  }
16555
16471
  if (this.providers.length > 0) {
16556
16472
  this.configSource = path;
@@ -16561,6 +16477,45 @@ class OMOConfigReader {
16561
16477
  }
16562
16478
  }
16563
16479
  }
16480
+ if (this.providers.length > 0 && !this.configSource.endsWith("opencode.json")) {
16481
+ this.mergeOpenCodeProviders();
16482
+ }
16483
+ }
16484
+ loadAntigravityToken() {
16485
+ try {
16486
+ const accountsPath = `${homedir2()}/.config/opencode/antigravity-accounts.json`;
16487
+ if (existsSync4(accountsPath)) {
16488
+ const accounts = JSON.parse(readFileSync2(accountsPath, "utf-8"));
16489
+ if (accounts.accounts?.length > 0) {
16490
+ const activeIdx = accounts.activeIndex ?? 0;
16491
+ const account = accounts.accounts.find((a) => a.enabled !== false) || accounts.accounts[activeIdx];
16492
+ if (account) {
16493
+ this.antigravityToken = account.token || account.refreshToken;
16494
+ }
16495
+ }
16496
+ }
16497
+ } catch {}
16498
+ }
16499
+ mergeOpenCodeProviders() {
16500
+ const opencodePath = `${homedir2()}/.config/opencode/opencode.json`;
16501
+ if (!existsSync4(opencodePath))
16502
+ return;
16503
+ try {
16504
+ const content = readFileSync2(opencodePath, "utf-8");
16505
+ const config = JSON.parse(content);
16506
+ if (config.provider) {
16507
+ for (const [name, details] of Object.entries(config.provider)) {
16508
+ if (!this.providers.some((p) => p.name === name)) {
16509
+ this.providers.push({
16510
+ name,
16511
+ type: name,
16512
+ models: Object.keys(details.models || {}),
16513
+ endpoint: details.endpoint
16514
+ });
16515
+ }
16516
+ }
16517
+ }
16518
+ } catch {}
16564
16519
  }
16565
16520
  loadYamlConfig(content) {
16566
16521
  const config = $parse(content);
@@ -16570,15 +16525,15 @@ class OMOConfigReader {
16570
16525
  this.fallbackChain = config.fallback_chain || [];
16571
16526
  }
16572
16527
  }
16573
- loadJsonConfig(content) {
16528
+ loadJsonConfig(content, _filePath) {
16574
16529
  const config = JSON.parse(content);
16575
16530
  if (config.agents) {
16576
16531
  this.providers = Object.entries(config.agents).map(([name, agent]) => {
16577
- const [providerType] = (agent.model || "").split("/");
16532
+ const resolvedType = resolveProviderType(agent.model || "");
16578
16533
  return {
16579
16534
  name,
16580
16535
  models: [agent.model || "unknown"],
16581
- type: providerType || "unknown",
16536
+ type: resolvedType,
16582
16537
  endpoint: undefined,
16583
16538
  api_key: undefined
16584
16539
  };
@@ -16612,11 +16567,14 @@ class OMOConfigReader {
16612
16567
  if (p)
16613
16568
  return p;
16614
16569
  }
16615
- const priority2 = ["antigravity", "anthropic", "openai", "google", "sisyphus"];
16616
- for (const name of priority2) {
16617
- const p = this.getProvider(name);
16618
- if (p)
16619
- return p;
16570
+ const priority2 = ["antigravity", "anthropic", "openai", "google"];
16571
+ for (const target of priority2) {
16572
+ const byName = this.getProvider(target);
16573
+ if (byName)
16574
+ return byName;
16575
+ const byType = this.providers.find((p) => p.type === target);
16576
+ if (byType)
16577
+ return byType;
16620
16578
  }
16621
16579
  return this.providers[0] || null;
16622
16580
  }
@@ -16633,20 +16591,155 @@ class OMOConfigReader {
16633
16591
  getConfigSource() {
16634
16592
  return this.configSource;
16635
16593
  }
16594
+ hasAntigravityAuth() {
16595
+ return !!this.antigravityToken;
16596
+ }
16597
+ getAntigravityToken() {
16598
+ return this.antigravityToken;
16599
+ }
16636
16600
  getProviderApiKey(provider) {
16637
16601
  if (provider.api_key) {
16638
16602
  const match = provider.api_key.match(/^\$\{(\w+)\}$/);
16639
16603
  return match ? process.env[match[1]] : provider.api_key;
16640
16604
  }
16641
16605
  const type = provider.type || provider.name;
16642
- const envVar = `${type.toUpperCase()}_API_KEY`;
16643
- return process.env[envVar];
16606
+ const envMappings = {
16607
+ anthropic: ["ANTHROPIC_API_KEY"],
16608
+ openai: ["OPENAI_API_KEY"],
16609
+ google: ["GOOGLE_API_KEY", "GEMINI_API_KEY"],
16610
+ antigravity: ["ANTIGRAVITY_API_KEY"],
16611
+ deepseek: ["DEEPSEEK_API_KEY"]
16612
+ };
16613
+ const envVars = envMappings[type] || [`${type.toUpperCase()}_API_KEY`];
16614
+ for (const envVar of envVars) {
16615
+ const envKey = process.env[envVar];
16616
+ if (envKey)
16617
+ return envKey;
16618
+ }
16619
+ if (this.antigravityToken) {
16620
+ return this.antigravityToken;
16621
+ }
16622
+ return;
16644
16623
  }
16645
16624
  }
16646
16625
  var init_omo_config_reader = __esm(() => {
16647
16626
  init_dist();
16648
16627
  });
16649
16628
 
16629
+ // src/core/llm/opencode-client.ts
16630
+ class OpenCodeLLMClient {
16631
+ agent;
16632
+ command;
16633
+ constructor(agent = "sisyphus", command = ["opencode"]) {
16634
+ this.agent = agent;
16635
+ this.command = command;
16636
+ }
16637
+ async chat(messages, options) {
16638
+ const iterator = this.streamChat(messages, options);
16639
+ let result = await iterator.next();
16640
+ while (!result.done) {
16641
+ result = await iterator.next();
16642
+ }
16643
+ return result.value;
16644
+ }
16645
+ async* streamChat(messages, options) {
16646
+ const prompt = this.formatMessages(messages);
16647
+ const args = [...this.command, "run", "--agent", this.agent, "--format", "json"];
16648
+ if (options?.model) {
16649
+ args.push("--model", options.model);
16650
+ }
16651
+ const proc = Bun.spawn(args, {
16652
+ stdin: new Blob([prompt]),
16653
+ stdout: "pipe",
16654
+ stderr: "pipe"
16655
+ });
16656
+ let fullResponse = "";
16657
+ let metadata = {
16658
+ model: "unknown",
16659
+ tokens: { input: 0, output: 0 },
16660
+ cost: 0
16661
+ };
16662
+ const decoder = new TextDecoder;
16663
+ const reader = proc.stdout.getReader();
16664
+ let buffer = "";
16665
+ try {
16666
+ while (true) {
16667
+ const { done, value } = await reader.read();
16668
+ if (done)
16669
+ break;
16670
+ buffer += decoder.decode(value, { stream: true });
16671
+ const lines = buffer.split(`
16672
+ `);
16673
+ buffer = lines.pop() || "";
16674
+ for (const line of lines) {
16675
+ if (!line.trim())
16676
+ continue;
16677
+ try {
16678
+ const event = JSON.parse(line);
16679
+ if (event.type === "config" && event.part?.model) {
16680
+ metadata.model = event.part.model;
16681
+ } else if (event.model && metadata.model === "unknown") {
16682
+ metadata.model = event.model;
16683
+ }
16684
+ if (event.type === "text" && event.part?.text) {
16685
+ const text = event.part.text;
16686
+ fullResponse += text;
16687
+ yield text;
16688
+ } else if (event.type === "content" && event.part?.content) {
16689
+ const text = event.part.content;
16690
+ fullResponse += text;
16691
+ yield text;
16692
+ } else if (event.type === "step_finish") {
16693
+ if (event.part?.snapshot) {}
16694
+ if (event.part?.tokens) {
16695
+ metadata.tokens.input = event.part.tokens.input || 0;
16696
+ metadata.tokens.output = event.part.tokens.output || 0;
16697
+ }
16698
+ if (event.part?.cost) {
16699
+ metadata.cost = event.part.cost;
16700
+ }
16701
+ }
16702
+ } catch (e) {}
16703
+ }
16704
+ }
16705
+ } finally {
16706
+ reader.releaseLock();
16707
+ proc.kill();
16708
+ }
16709
+ const exitCode = await proc.exited;
16710
+ const stderr = await new Response(proc.stderr).text();
16711
+ if (exitCode !== 0) {
16712
+ throw new Error(`OpenCode CLI Error: ${stderr || "Unknown error"}`);
16713
+ }
16714
+ if (fullResponse.trim() === "" && stderr.trim().length > 0) {
16715
+ if (stderr.includes("Error") || stderr.includes("NotFound") || stderr.includes("|")) {
16716
+ throw new Error(`OpenCode CLI Silent Crash: ${stderr.split(`
16717
+ `)[0]}`);
16718
+ }
16719
+ }
16720
+ if (fullResponse.trim() === "") {
16721
+ throw new Error("OpenCode CLI returned an empty response");
16722
+ }
16723
+ return {
16724
+ content: fullResponse,
16725
+ model: metadata.model,
16726
+ tokens: metadata.tokens,
16727
+ cost: metadata.cost
16728
+ };
16729
+ }
16730
+ async complete(prompt, options) {
16731
+ const result = await this.chat([{ role: "user", content: prompt }], options);
16732
+ return result.content;
16733
+ }
16734
+ formatMessages(messages) {
16735
+ return messages.map((m) => `<${m.role}>
16736
+ ${m.content}
16737
+ </${m.role}>`).join(`
16738
+
16739
+ `);
16740
+ }
16741
+ }
16742
+
16650
16743
  // src/core/llm/unified-client.ts
16651
16744
  class UnifiedLLMClient {
16652
16745
  omoConfig;
@@ -16658,11 +16751,20 @@ class UnifiedLLMClient {
16658
16751
  if (!provider) {
16659
16752
  throw new Error("\u672A\u627E\u5230\u53EF\u7528\u7684 LLM Provider");
16660
16753
  }
16661
- switch (provider.name) {
16754
+ const providerType = provider.type || provider.name;
16755
+ switch (providerType) {
16662
16756
  case "anthropic":
16663
- case "antigravity":
16664
16757
  return this.chatAnthropic(provider, messages, options);
16758
+ case "antigravity":
16759
+ return this.chatAntigravity(provider, messages, options);
16760
+ case "google":
16761
+ return this.chatGoogle(provider, messages, options);
16762
+ case "openai":
16763
+ return this.chatOpenAI(provider, messages, options);
16764
+ case "deepseek":
16765
+ return this.chatOpenAI(provider, messages, options, "https://api.deepseek.com/v1");
16665
16766
  default:
16767
+ console.warn(`\uD83E\uDDE0 Axon: \u672A\u77E5 provider type '${providerType}'\uFF0C\u4F7F\u7528 Anthropic \u517C\u5BB9\u6A21\u5F0F`);
16666
16768
  return this.chatAnthropic(provider, messages, options);
16667
16769
  }
16668
16770
  }
@@ -16670,17 +16772,39 @@ class UnifiedLLMClient {
16670
16772
  const result = await this.chat([{ role: "user", content: prompt }], options);
16671
16773
  return result.content;
16672
16774
  }
16775
+ cleanModelName(model) {
16776
+ return model.replace(/^[^/]+\//, "");
16777
+ }
16673
16778
  async chatAnthropic(provider, messages, options) {
16674
16779
  const apiKey = this.omoConfig.getProviderApiKey(provider) || process.env["ANTHROPIC_API_KEY"];
16675
16780
  if (!apiKey) {
16676
16781
  throw new Error(`\u672A\u627E\u5230 ${provider.name} \u7684 API \u5BC6\u94A5`);
16677
16782
  }
16783
+ const model = this.cleanModelName(options?.model || provider.models?.[0] || "claude-sonnet-4-20250514");
16678
16784
  const client = new AnthropicClient(apiKey, {
16679
- model: options?.model || provider.models?.[0] || "claude-sonnet-4-20250514",
16785
+ model,
16680
16786
  provider: "anthropic",
16681
16787
  temperature: options?.temperature ?? 0.7,
16682
16788
  max_tokens: options?.maxTokens || 8000
16683
- }, provider.endpoint);
16789
+ }, provider.endpoint || "https://api.anthropic.com/v1");
16790
+ return this.executeAnthropicChat(client, model, messages, options);
16791
+ }
16792
+ async chatAntigravity(provider, messages, options) {
16793
+ const apiKey = this.omoConfig.getProviderApiKey(provider);
16794
+ if (!apiKey) {
16795
+ throw new Error(`\u672A\u627E\u5230 ${provider.name} \u7684 API \u5BC6\u94A5 (Antigravity token \u6216\u73AF\u5883\u53D8\u91CF\u5747\u672A\u8BBE\u7F6E)`);
16796
+ }
16797
+ const rawModel = options?.model || provider.models?.[0] || "claude-sonnet-4-20250514";
16798
+ const displayModel = this.cleanModelName(rawModel);
16799
+ const client = new AnthropicClient(apiKey, {
16800
+ model: rawModel,
16801
+ provider: "anthropic",
16802
+ temperature: options?.temperature ?? 0.7,
16803
+ max_tokens: options?.maxTokens || 8000
16804
+ }, provider.endpoint || "https://api.antigravity.ai/v1");
16805
+ return this.executeAnthropicChat(client, displayModel, messages, options);
16806
+ }
16807
+ async executeAnthropicChat(client, model, messages, options) {
16684
16808
  const systemMessage = messages.find((m) => m.role === "system");
16685
16809
  const chatMessages = messages.filter((m) => m.role !== "system").map((m) => ({
16686
16810
  role: m.role,
@@ -16691,12 +16815,120 @@ class UnifiedLLMClient {
16691
16815
  });
16692
16816
  return {
16693
16817
  content: response.content,
16694
- model: options?.model || provider.models?.[0] || "unknown",
16818
+ model,
16695
16819
  tokens: {
16696
16820
  input: response.usage.input_tokens,
16697
16821
  output: response.usage.output_tokens
16698
16822
  },
16699
- cost: this.calculateCost(options?.model || provider.models?.[0] || "unknown", response.usage)
16823
+ cost: this.calculateCost(model, response.usage)
16824
+ };
16825
+ }
16826
+ async chatGoogle(provider, messages, options) {
16827
+ const apiKey = this.omoConfig.getProviderApiKey(provider);
16828
+ if (!apiKey) {
16829
+ throw new Error(`\u672A\u627E\u5230 ${provider.name} \u7684 API \u5BC6\u94A5`);
16830
+ }
16831
+ const model = this.cleanModelName(options?.model || provider.models?.[0] || "gemini-2.0-flash");
16832
+ const isAntigravityAuth = this.omoConfig.hasAntigravityAuth() && apiKey === this.omoConfig.getAntigravityToken();
16833
+ if (isAntigravityAuth) {
16834
+ return this.chatAntigravity({
16835
+ ...provider,
16836
+ type: "antigravity",
16837
+ endpoint: provider.endpoint || "https://api.opencode.ai/v1"
16838
+ }, messages, options);
16839
+ }
16840
+ const systemMessage = messages.find((m) => m.role === "system");
16841
+ const chatMessages = messages.filter((m) => m.role !== "system").map((m) => ({
16842
+ role: m.role === "assistant" ? "model" : "user",
16843
+ parts: [{ text: m.content }]
16844
+ }));
16845
+ const endpoint = provider.endpoint || "https://generativelanguage.googleapis.com/v1beta";
16846
+ const url = `${endpoint}/models/${model}:generateContent?key=${apiKey}`;
16847
+ const body = {
16848
+ contents: chatMessages,
16849
+ generationConfig: {
16850
+ temperature: options?.temperature ?? 0.7,
16851
+ maxOutputTokens: options?.maxTokens || 8000
16852
+ }
16853
+ };
16854
+ if (systemMessage) {
16855
+ body.systemInstruction = { parts: [{ text: systemMessage.content }] };
16856
+ }
16857
+ const response = await fetch(url, {
16858
+ method: "POST",
16859
+ headers: { "Content-Type": "application/json" },
16860
+ body: JSON.stringify(body)
16861
+ });
16862
+ if (!response.ok) {
16863
+ const errorData = await response.json().catch(() => ({}));
16864
+ throw new Error(`Google API \u8C03\u7528\u5931\u8D25 (${response.status}): ${errorData.error?.message || response.statusText}`);
16865
+ }
16866
+ const data = await response.json();
16867
+ const content = data.candidates?.[0]?.content?.parts?.[0]?.text || "";
16868
+ const usageMetadata = data.usageMetadata || {};
16869
+ return {
16870
+ content,
16871
+ model,
16872
+ tokens: {
16873
+ input: usageMetadata.promptTokenCount || 0,
16874
+ output: usageMetadata.candidatesTokenCount || 0
16875
+ },
16876
+ cost: this.calculateCost(model, {
16877
+ input_tokens: usageMetadata.promptTokenCount || 0,
16878
+ output_tokens: usageMetadata.candidatesTokenCount || 0
16879
+ })
16880
+ };
16881
+ }
16882
+ async chatOpenAI(provider, messages, options, defaultEndpoint = "https://api.openai.com/v1") {
16883
+ const apiKey = this.omoConfig.getProviderApiKey(provider);
16884
+ if (!apiKey) {
16885
+ throw new Error(`\u672A\u627E\u5230 ${provider.name} \u7684 API \u5BC6\u94A5`);
16886
+ }
16887
+ const isAntigravityAuth = this.omoConfig.hasAntigravityAuth() && apiKey === this.omoConfig.getAntigravityToken();
16888
+ if (isAntigravityAuth) {
16889
+ return this.chatAntigravity({
16890
+ ...provider,
16891
+ type: "antigravity"
16892
+ }, messages, options);
16893
+ }
16894
+ const model = this.cleanModelName(options?.model || provider.models?.[0] || "gpt-4o");
16895
+ const endpoint = provider.endpoint || defaultEndpoint;
16896
+ const url = `${endpoint}/chat/completions`;
16897
+ const openaiMessages = messages.map((m) => ({
16898
+ role: m.role,
16899
+ content: m.content
16900
+ }));
16901
+ const response = await fetch(url, {
16902
+ method: "POST",
16903
+ headers: {
16904
+ "Content-Type": "application/json",
16905
+ Authorization: `Bearer ${apiKey}`
16906
+ },
16907
+ body: JSON.stringify({
16908
+ model,
16909
+ messages: openaiMessages,
16910
+ temperature: options?.temperature ?? 0.7,
16911
+ max_tokens: options?.maxTokens || 8000
16912
+ })
16913
+ });
16914
+ if (!response.ok) {
16915
+ const errorData = await response.json().catch(() => ({}));
16916
+ throw new Error(`OpenAI API \u8C03\u7528\u5931\u8D25 (${response.status}): ${errorData.error?.message || response.statusText}`);
16917
+ }
16918
+ const data = await response.json();
16919
+ const content = data.choices?.[0]?.message?.content || "";
16920
+ const usage = data.usage || {};
16921
+ return {
16922
+ content,
16923
+ model,
16924
+ tokens: {
16925
+ input: usage.prompt_tokens || 0,
16926
+ output: usage.completion_tokens || 0
16927
+ },
16928
+ cost: this.calculateCost(model, {
16929
+ input_tokens: usage.prompt_tokens || 0,
16930
+ output_tokens: usage.completion_tokens || 0
16931
+ })
16700
16932
  };
16701
16933
  }
16702
16934
  calculateCost(model, usage) {
@@ -16746,7 +16978,10 @@ class AxonLLMClient {
16746
16978
  }
16747
16979
  } catch {}
16748
16980
  if (hasOMOConfig() && this.omoConfig.hasProviders()) {
16749
- return "direct";
16981
+ const primary = this.omoConfig.getPrimaryProvider();
16982
+ if (primary && this.omoConfig.getProviderApiKey(primary)) {
16983
+ return "direct";
16984
+ }
16750
16985
  }
16751
16986
  return "fallback";
16752
16987
  }
@@ -16759,18 +16994,32 @@ class AxonLLMClient {
16759
16994
  this.unifiedClient = new UnifiedLLMClient(this.omoConfig);
16760
16995
  break;
16761
16996
  case "fallback":
16762
- const apiKey = process.env["ANTHROPIC_API_KEY"];
16763
- if (apiKey) {
16764
- this.anthropicClient = new AnthropicClient(apiKey, {
16765
- model: "claude-3-5-sonnet-20240620",
16766
- provider: "anthropic",
16767
- temperature: 0.7,
16768
- max_tokens: 4000
16769
- });
16770
- }
16997
+ this.initFallbackClient();
16771
16998
  break;
16772
16999
  }
16773
17000
  }
17001
+ initFallbackClient() {
17002
+ const envKeys = [
17003
+ { key: "ANTHROPIC_API_KEY", model: "claude-sonnet-4-20250514", provider: "anthropic" },
17004
+ { key: "OPENAI_API_KEY", model: "gpt-4o", provider: "openai" },
17005
+ { key: "GOOGLE_API_KEY", model: "gemini-2.0-flash", provider: "google" }
17006
+ ];
17007
+ for (const { key, model, provider } of envKeys) {
17008
+ const apiKey = process.env[key];
17009
+ if (apiKey) {
17010
+ this.anthropicClient = new AnthropicClient(apiKey, {
17011
+ model,
17012
+ provider,
17013
+ temperature: 0.7,
17014
+ max_tokens: 4000
17015
+ });
17016
+ return;
17017
+ }
17018
+ }
17019
+ if (this.mode === "fallback" && this.omoConfig.hasProviders() && this.omoConfig.hasAntigravityAuth()) {
17020
+ this.unifiedClient = new UnifiedLLMClient(this.omoConfig);
17021
+ }
17022
+ }
16774
17023
  async chat(messages, options) {
16775
17024
  try {
16776
17025
  if (this.mode === "cli" && this.openCodeClient) {
@@ -16783,20 +17032,55 @@ class AxonLLMClient {
16783
17032
  if (this.anthropicClient) {
16784
17033
  return await this.chatAnthropicFallback(messages, options);
16785
17034
  }
16786
- throw new APIError("\u672A\u627E\u5230\u6709\u6548\u7684 LLM \u914D\u7F6E\u6216 API \u5BC6\u94A5", 401);
17035
+ if (this.unifiedClient) {
17036
+ return await this.unifiedClient.chat(messages, options);
17037
+ }
17038
+ const diagInfo = this.getDiagnosticInfo();
17039
+ throw new APIError(`\u672A\u627E\u5230\u6709\u6548\u7684 LLM \u914D\u7F6E\u6216 API \u5BC6\u94A5 (${diagInfo})`, 401);
16787
17040
  }
16788
17041
  throw new Error(`\u672A\u652F\u6301\u7684 LLM \u6A21\u5F0F: ${this.mode}`);
16789
17042
  } catch (error) {
16790
- if (this.mode !== "fallback") {
16791
- console.warn(`\uD83E\uDDE0 Axon: ${this.mode} \u6A21\u5F0F\u8C03\u7528\u5931\u8D25\u6216\u54CD\u5E94\u4E3A\u7A7A\uFF0C\u5C1D\u8BD5\u56DE\u9000...`);
16792
- if (process.env["DEBUG"])
16793
- console.error(error);
16794
- this.mode = "fallback";
16795
- this.initClient();
16796
- return await this.chat(messages, options);
16797
- }
16798
- throw error;
17043
+ return this.handleChatError(error, messages, options);
17044
+ }
17045
+ }
17046
+ async handleChatError(error, messages, options) {
17047
+ if (this.mode === "cli") {
17048
+ console.warn("\uD83E\uDDE0 Axon: CLI \u6A21\u5F0F\u8C03\u7528\u5931\u8D25\uFF0C\u5C1D\u8BD5 Direct \u6A21\u5F0F...");
17049
+ if (process.env["DEBUG"])
17050
+ console.error(error);
17051
+ if (this.omoConfig.hasProviders()) {
17052
+ const primary = this.omoConfig.getPrimaryProvider();
17053
+ if (primary && this.omoConfig.getProviderApiKey(primary)) {
17054
+ this.mode = "direct";
17055
+ this.initClient();
17056
+ return await this.chat(messages, options);
17057
+ }
17058
+ }
17059
+ console.warn("\uD83E\uDDE0 Axon: Direct \u6A21\u5F0F\u65E0\u53EF\u7528 Provider\uFF0C\u5C1D\u8BD5 Fallback \u6A21\u5F0F...");
17060
+ this.mode = "fallback";
17061
+ this.initClient();
17062
+ return await this.chat(messages, options);
17063
+ }
17064
+ if (this.mode === "direct" || this.mode === "fallback" && this.unifiedClient) {
17065
+ console.warn("\uD83E\uDDE0 Axon: Direct/Proxy \u6A21\u5F0F\u8C03\u7528\u5931\u8D25\uFF0C\u5C1D\u8BD5\u73AF\u5883\u53D8\u91CF Fallback...");
17066
+ if (process.env["DEBUG"])
17067
+ console.error(error);
17068
+ this.unifiedClient = undefined;
17069
+ this.mode = "fallback";
17070
+ this.initClient();
17071
+ return await this.chat(messages, options);
16799
17072
  }
17073
+ throw error;
17074
+ }
17075
+ getDiagnosticInfo() {
17076
+ return [
17077
+ `\u914D\u7F6E\u6587\u4EF6: ${this.omoConfig.getConfigSource() || "\u672A\u627E\u5230"}`,
17078
+ `Providers: ${this.omoConfig.getAllProviders().length}`,
17079
+ `Antigravity Token: ${this.omoConfig.hasAntigravityAuth() ? "\u5DF2\u627E\u5230" : "\u672A\u627E\u5230"}`,
17080
+ `ANTHROPIC_API_KEY: ${process.env["ANTHROPIC_API_KEY"] ? "\u5DF2\u8BBE\u7F6E" : "\u672A\u8BBE\u7F6E"}`,
17081
+ `OPENAI_API_KEY: ${process.env["OPENAI_API_KEY"] ? "\u5DF2\u8BBE\u7F6E" : "\u672A\u8BBE\u7F6E"}`,
17082
+ `GOOGLE_API_KEY: ${process.env["GOOGLE_API_KEY"] ? "\u5DF2\u8BBE\u7F6E" : "\u672A\u8BBE\u7F6E"}`
17083
+ ].join(", ");
16800
17084
  }
16801
17085
  async chatAnthropicFallback(messages, options) {
16802
17086
  if (!this.anthropicClient)
@@ -16813,7 +17097,7 @@ class AxonLLMClient {
16813
17097
  });
16814
17098
  return {
16815
17099
  content: response.content,
16816
- model: options?.model || "claude-3-5-sonnet-20240620",
17100
+ model: options?.model || "claude-sonnet-4-20250514",
16817
17101
  tokens: {
16818
17102
  input: response.usage.input_tokens,
16819
17103
  output: response.usage.output_tokens
@@ -16845,10 +17129,10 @@ class AxonLLMClient {
16845
17129
  }
16846
17130
  }
16847
17131
  var init_llm = __esm(() => {
17132
+ init_errors();
16848
17133
  init_anthropic();
16849
- init_unified_client();
16850
17134
  init_omo_config_reader();
16851
- init_errors();
17135
+ init_unified_client();
16852
17136
  });
16853
17137
 
16854
17138
  // node_modules/underscore/underscore-node-f.cjs
@@ -54081,12 +54365,19 @@ var doctorCommand = new Command("doctor").description(t("Diagnose environment is
54081
54365
  let apiKey = process.env[envVar];
54082
54366
  let source = "environment";
54083
54367
  if (!apiKey) {
54084
- const provider = omoReader.getProvider(name);
54368
+ let provider = omoReader.getProvider(name);
54369
+ if (!provider) {
54370
+ provider = omoReader.getAllProviders().find((p) => p.type === name) || null;
54371
+ }
54085
54372
  if (provider) {
54086
54373
  apiKey = omoReader.getProviderApiKey(provider);
54087
- source = "OMO config (~/.omo/providers.yaml)";
54374
+ source = `OMO config (${omoReader.getConfigSource()})`;
54088
54375
  }
54089
54376
  }
54377
+ if (!apiKey && omoReader.hasAntigravityAuth()) {
54378
+ apiKey = omoReader.getAntigravityToken();
54379
+ source = "Antigravity token (~/.config/opencode/antigravity-accounts.json)";
54380
+ }
54090
54381
  if (apiKey) {
54091
54382
  if (options.checkKeys) {
54092
54383
  spinner.start(`\u9A8C\u8BC1 ${name} API \u5BC6\u94A5 (${source})...`);
@@ -54287,7 +54578,7 @@ configCommand.command("show").description(t("Show current Axon running mode", "\
54287
54578
  console.log(source_default.cyan(" bunx oh-my-opencode install"));
54288
54579
  }
54289
54580
  });
54290
- configCommand.command("test").description(t("Test Provider connection", "\u6D4B\u8BD5 Provider \u8FDE\u63A5")).option("-p, --provider <name>", t("Specify Provider to test", "\u6307\u5B9A Provider \u6D4B\u8BD5")).option("-m, --model <model>", t("Specify model for testing", "\u6307\u5B9A\u6D4B\u8BD5\u4F7F\u7528\u7684\u6A21\u578B")).action(async (options) => {
54581
+ configCommand.command("test").description(t("Test Provider connection", "\u6D4B\u8BD5 Provider \u8FDE\u63A5")).option("-p, --provider <name>", t("Specify Provider to test", "\u6307\u5B9A Provider \u6D4B\u8BD5")).option("-m, --model <model>", t("Specify model for testing", "\u6307\u5B9A\u6D4B\u8BD5\u4F7F\u7528\u7684\u6A21\u578B")).option("--mode <mode>", t("Force specific LLM mode (cli, direct, fallback)", "\u5F3A\u5236\u4F7F\u7528\u7279\u5B9A LLM \u6A21\u5F0F (cli, direct, fallback)")).action(async (options) => {
54291
54582
  const spinner2 = ora("\u6B63\u5728\u521D\u59CB\u5316 LLM \u5BA2\u6237\u7AEF...").start();
54292
54583
  try {
54293
54584
  const omo = new OMOConfigReader;
@@ -54307,8 +54598,9 @@ configCommand.command("test").description(t("Test Provider connection", "\u6D4B\
54307
54598
  }
54308
54599
  }
54309
54600
  const model = options.model || primary?.models?.[0];
54310
- spinner2.text = `\u6D4B\u8BD5\u8FDE\u63A5: ${source_default.cyan(providerName)}${model ? ` (\u6A21\u578B: ${source_default.cyan(model)})` : ""}...`;
54311
- const client = new AxonLLMClient;
54601
+ const mode = options.mode;
54602
+ spinner2.text = `\u6D4B\u8BD5\u8FDE\u63A5: ${source_default.cyan(providerName)}${model ? ` (\u6A21\u578B: ${source_default.cyan(model)})` : ""}${mode ? ` [\u6A21\u5F0F: ${source_default.cyan(mode)}]` : ""}...`;
54603
+ const client = new AxonLLMClient(mode);
54312
54604
  const start = Date.now();
54313
54605
  const response = await client.chat([{ role: "user", content: 'Say "OK" if you can hear me.' }], {
54314
54606
  model,
@@ -54508,7 +54800,8 @@ var VERSION = pkg.version;
54508
54800
  var program2 = new Command;
54509
54801
  program2.name("ax").description(`${source_default.green("\uD83E\uDDE0")} ${source_default.bold("Axon")} - AI-Powered Development Operating System (v${VERSION})
54510
54802
 
54511
- ${source_default.dim("\u4ECE\u9700\u6C42\u5230\u4EE3\u7801\uFF0C\u8BA9 AI \u6210\u4E3A\u4F60\u7684\u5F00\u53D1\u4F19\u4F34\uFF0C\u800C\u975E\u5DE5\u5177\u3002")}`).version(VERSION, "-v, --version", "\u663E\u793A\u7248\u672C\u53F7").helpOption("-h, --help", "\u663E\u793A\u5E2E\u52A9\u4FE1\u606F");
54803
+ From requirements to code, let AI be your development partner, not a tool.
54804
+ \u4ECE\u9700\u6C42\u5230\u4EE3\u7801\uFF0C\u8BA9 AI \u6210\u4E3A\u4F60\u7684\u5F00\u53D1\u4F19\u4F34\uFF0C\u800C\u975E\u5DE5\u5177\u3002`).version(VERSION, "-v, --version", "Show version").helpOption("-h, --help", "Show help information");
54512
54805
  program2.addCommand(initCommand);
54513
54806
  program2.addCommand(specCommand);
54514
54807
  program2.addCommand(planCommand);
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@arrislink/axon",
3
- "version": "1.1.1",
3
+ "version": "1.1.3",
4
4
  "description": "AI-Powered Development Operating System with unified LLM provider support",
5
5
  "type": "module",
6
6
  "main": "dist/index.js",