@arrislink/axon 1.1.2 → 1.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/index.js +81 -64
  2. package/package.json +1 -1
package/dist/index.js CHANGED
@@ -2388,20 +2388,14 @@ var init_defaults = __esm(() => {
2388
2388
  },
2389
2389
  agents: {
2390
2390
  sisyphus: {
2391
- model: "claude-sonnet-4-20250514",
2392
- provider: "anthropic",
2393
2391
  temperature: 0.7,
2394
2392
  max_tokens: 8000
2395
2393
  },
2396
2394
  oracle: {
2397
- model: "claude-sonnet-4-20250514",
2398
- provider: "anthropic",
2399
2395
  temperature: 0.3,
2400
2396
  max_tokens: 4000
2401
2397
  },
2402
2398
  background: {
2403
- model: "gemini-2.0-flash-exp",
2404
- provider: "google",
2405
2399
  temperature: 0.5,
2406
2400
  max_tokens: 4000
2407
2401
  }
@@ -16368,11 +16362,21 @@ class AnthropicClient {
16368
16362
  body: JSON.stringify(body)
16369
16363
  });
16370
16364
  if (!response.ok) {
16371
- const errorData = await response.json().catch(() => ({}));
16372
- throw new APIError(`LLM API \u8C03\u7528\u5931\u8D25 (${response.status}): ${errorData.error?.message || response.statusText}`, response.status);
16365
+ const errorText = await response.text().catch(() => "");
16366
+ let errorData = {};
16367
+ try {
16368
+ errorData = JSON.parse(errorText);
16369
+ } catch {}
16370
+ throw new APIError(`LLM API \u8C03\u7528\u5931\u8D25 (${response.status}): ${errorData.error?.message || errorText || response.statusText}`, response.status);
16371
+ }
16372
+ const responseText = await response.text();
16373
+ let data;
16374
+ try {
16375
+ data = JSON.parse(responseText);
16376
+ } catch (err) {
16377
+ throw new APIError(`LLM API \u54CD\u5E94\u89E3\u6790\u5931\u8D25 (\u65E0\u6548\u7684 JSON): ${responseText.substring(0, 100)}...`, 500);
16373
16378
  }
16374
- const data = await response.json();
16375
- const textContent = data.content.find((c) => c.type === "text");
16379
+ const textContent = data.content?.find((c) => c.type === "text");
16376
16380
  const content = textContent?.text || "";
16377
16381
  return {
16378
16382
  content,
@@ -16634,14 +16638,16 @@ class OpenCodeLLMClient {
16634
16638
  }
16635
16639
  async* streamChat(messages, options) {
16636
16640
  const prompt = this.formatMessages(messages);
16637
- const args = [...this.command, "run", "--agent", this.agent, "--format", "json"];
16641
+ const agent = options?.agent || this.agent;
16642
+ const args = [...this.command, "run", "--agent", agent, "--format", "json"];
16638
16643
  if (options?.model) {
16639
16644
  args.push("--model", options.model);
16640
16645
  }
16641
16646
  const proc = Bun.spawn(args, {
16642
16647
  stdin: new Blob([prompt]),
16643
16648
  stdout: "pipe",
16644
- stderr: "pipe"
16649
+ stderr: "pipe",
16650
+ env: process.env
16645
16651
  });
16646
16652
  let fullResponse = "";
16647
16653
  let metadata = {
@@ -16737,25 +16743,39 @@ class UnifiedLLMClient {
16737
16743
  this.omoConfig = omoConfig || new OMOConfigReader;
16738
16744
  }
16739
16745
  async chat(messages, options) {
16740
- const provider = this.omoConfig.getPrimaryProvider();
16746
+ let provider = null;
16747
+ let modelOverride = options?.model;
16748
+ if (options?.agent) {
16749
+ const agentInfo = this.omoConfig.getProvider(options.agent);
16750
+ if (agentInfo) {
16751
+ provider = agentInfo;
16752
+ if (!modelOverride && agentInfo.models?.[0]) {
16753
+ modelOverride = agentInfo.models[0];
16754
+ }
16755
+ }
16756
+ }
16757
+ if (!provider) {
16758
+ provider = this.omoConfig.getPrimaryProvider();
16759
+ }
16741
16760
  if (!provider) {
16742
16761
  throw new Error("\u672A\u627E\u5230\u53EF\u7528\u7684 LLM Provider");
16743
16762
  }
16744
16763
  const providerType = provider.type || provider.name;
16764
+ const mergedOptions = { ...options, model: modelOverride };
16745
16765
  switch (providerType) {
16746
16766
  case "anthropic":
16747
- return this.chatAnthropic(provider, messages, options);
16767
+ return this.chatAnthropic(provider, messages, mergedOptions);
16748
16768
  case "antigravity":
16749
- return this.chatAntigravity(provider, messages, options);
16769
+ return this.chatAntigravity(provider, messages, mergedOptions);
16750
16770
  case "google":
16751
- return this.chatGoogle(provider, messages, options);
16771
+ return this.chatGoogle(provider, messages, mergedOptions);
16752
16772
  case "openai":
16753
- return this.chatOpenAI(provider, messages, options);
16773
+ return this.chatOpenAI(provider, messages, mergedOptions);
16754
16774
  case "deepseek":
16755
- return this.chatOpenAI(provider, messages, options, "https://api.deepseek.com/v1");
16775
+ return this.chatOpenAI(provider, messages, mergedOptions, "https://api.deepseek.com/v1");
16756
16776
  default:
16757
16777
  console.warn(`\uD83E\uDDE0 Axon: \u672A\u77E5 provider type '${providerType}'\uFF0C\u4F7F\u7528 Anthropic \u517C\u5BB9\u6A21\u5F0F`);
16758
- return this.chatAnthropic(provider, messages, options);
16778
+ return this.chatAnthropic(provider, messages, mergedOptions);
16759
16779
  }
16760
16780
  }
16761
16781
  async complete(prompt, options) {
@@ -16763,8 +16783,7 @@ class UnifiedLLMClient {
16763
16783
  return result.content;
16764
16784
  }
16765
16785
  cleanModelName(model) {
16766
- const parts = model.split("/");
16767
- return parts.length > 1 ? parts.slice(1).join("/") : model;
16786
+ return model.replace(/^[^/]+\//, "");
16768
16787
  }
16769
16788
  async chatAnthropic(provider, messages, options) {
16770
16789
  const apiKey = this.omoConfig.getProviderApiKey(provider) || process.env["ANTHROPIC_API_KEY"];
@@ -16792,7 +16811,7 @@ class UnifiedLLMClient {
16792
16811
  provider: "anthropic",
16793
16812
  temperature: options?.temperature ?? 0.7,
16794
16813
  max_tokens: options?.maxTokens || 8000
16795
- }, provider.endpoint || "https://api.opencode.ai/v1");
16814
+ }, provider.endpoint || "https://api.antigravity.ai/v1");
16796
16815
  return this.executeAnthropicChat(client, displayModel, messages, options);
16797
16816
  }
16798
16817
  async executeAnthropicChat(client, model, messages, options) {
@@ -16954,20 +16973,20 @@ class AxonLLMClient {
16954
16973
  }
16955
16974
  detectedCommand = ["opencode"];
16956
16975
  detectMode() {
16957
- try {
16958
- const proc = Bun.spawnSync(["opencode", "--version"]);
16959
- if (proc.success) {
16960
- this.detectedCommand = ["opencode"];
16961
- return "cli";
16962
- }
16963
- } catch {}
16964
- try {
16965
- const proc = Bun.spawnSync(["bunx", "opencode", "--version"]);
16966
- if (proc.success) {
16967
- this.detectedCommand = ["bunx", "opencode"];
16968
- return "cli";
16969
- }
16970
- } catch {}
16976
+ const forcedMode = process.env["AXON_LLM_MODE"];
16977
+ if (forcedMode === "cli" || forcedMode === "direct" || forcedMode === "fallback") {
16978
+ return forcedMode;
16979
+ }
16980
+ const opencodePath = Bun.which("opencode");
16981
+ if (opencodePath) {
16982
+ this.detectedCommand = [opencodePath];
16983
+ return "cli";
16984
+ }
16985
+ const bunxOpencode = Bun.spawnSync(["bunx", "opencode", "--version"]);
16986
+ if (bunxOpencode.success) {
16987
+ this.detectedCommand = ["bunx", "opencode"];
16988
+ return "cli";
16989
+ }
16971
16990
  if (hasOMOConfig() && this.omoConfig.hasProviders()) {
16972
16991
  const primary = this.omoConfig.getPrimaryProvider();
16973
16992
  if (primary && this.omoConfig.getProviderApiKey(primary)) {
@@ -17007,14 +17026,24 @@ class AxonLLMClient {
17007
17026
  return;
17008
17027
  }
17009
17028
  }
17010
- if (this.omoConfig.hasProviders() && this.omoConfig.hasAntigravityAuth()) {
17029
+ if (this.mode === "fallback" && this.omoConfig.hasProviders() && this.omoConfig.hasAntigravityAuth()) {
17011
17030
  this.unifiedClient = new UnifiedLLMClient(this.omoConfig);
17012
17031
  }
17013
17032
  }
17014
17033
  async chat(messages, options) {
17015
17034
  try {
17016
17035
  if (this.mode === "cli" && this.openCodeClient) {
17017
- return await this.openCodeClient.chat(messages, options);
17036
+ const chatOptions = { ...options };
17037
+ if (!chatOptions.agent && chatOptions.model) {
17038
+ const isDefaultModel = [
17039
+ "claude-sonnet-4-20250514",
17040
+ "gemini-2.0-flash-exp",
17041
+ "gpt-4o"
17042
+ ].includes(chatOptions.model);
17043
+ if (isDefaultModel)
17044
+ chatOptions.model = undefined;
17045
+ }
17046
+ return await this.openCodeClient.chat(messages, chatOptions);
17018
17047
  }
17019
17048
  if (this.mode === "direct" && this.unifiedClient) {
17020
17049
  return await this.unifiedClient.chat(messages, options);
@@ -17036,26 +17065,17 @@ class AxonLLMClient {
17036
17065
  }
17037
17066
  async handleChatError(error, messages, options) {
17038
17067
  if (this.mode === "cli") {
17039
- console.warn("\uD83E\uDDE0 Axon: CLI \u6A21\u5F0F\u8C03\u7528\u5931\u8D25\uFF0C\u5C1D\u8BD5 Direct \u6A21\u5F0F...");
17040
- if (process.env["DEBUG"])
17041
- console.error(error);
17042
- if (this.omoConfig.hasProviders()) {
17043
- const primary = this.omoConfig.getPrimaryProvider();
17044
- if (primary && this.omoConfig.getProviderApiKey(primary)) {
17045
- this.mode = "direct";
17046
- this.initClient();
17047
- return await this.chat(messages, options);
17048
- }
17049
- }
17050
- console.warn("\uD83E\uDDE0 Axon: Direct \u6A21\u5F0F\u65E0\u53EF\u7528 Provider\uFF0C\u5C1D\u8BD5 Fallback \u6A21\u5F0F...");
17051
- this.mode = "fallback";
17052
- this.initClient();
17053
- return await this.chat(messages, options);
17054
- }
17055
- if (this.mode === "direct") {
17056
- console.warn("\uD83E\uDDE0 Axon: Direct \u6A21\u5F0F\u8C03\u7528\u5931\u8D25\uFF0C\u5C1D\u8BD5 Fallback \u6A21\u5F0F...");
17068
+ const errMsg = error instanceof Error ? error.message : String(error);
17069
+ throw new APIError(`CLI \u6A21\u5F0F\u8C03\u7528\u5931\u8D25: ${errMsg.split(`
17070
+ `)[0]}`, 500);
17071
+ }
17072
+ if (this.mode === "direct" || this.mode === "fallback" && this.unifiedClient) {
17073
+ const errMsg = error instanceof Error ? error.message : String(error);
17074
+ console.warn(`\uD83E\uDDE0 Axon: Direct/Proxy \u6A21\u5F0F\u8C03\u7528\u5931\u8D25 (${errMsg.split(`
17075
+ `)[0]})\uFF0C\u5C1D\u8BD5\u73AF\u5883\u53D8\u91CF Fallback...`);
17057
17076
  if (process.env["DEBUG"])
17058
17077
  console.error(error);
17078
+ this.unifiedClient = undefined;
17059
17079
  this.mode = "fallback";
17060
17080
  this.initClient();
17061
17081
  return await this.chat(messages, options);
@@ -45845,17 +45865,14 @@ function getGraphStats(graph) {
45845
45865
  // src/core/beads/generator.ts
45846
45866
  class BeadsGenerator {
45847
45867
  llm;
45848
- config;
45849
- constructor(config) {
45850
- this.config = config;
45868
+ constructor(_config) {
45851
45869
  this.llm = new AxonLLMClient;
45852
45870
  }
45853
45871
  async generateFromSpec(specContent) {
45854
45872
  const prompt = this.buildPrompt(specContent);
45855
45873
  const response = await this.llm.chat([{ role: "user", content: prompt }], {
45856
- model: this.config.agents.sisyphus.model,
45857
- temperature: 0.7,
45858
- maxTokens: 8000
45874
+ agent: "sisyphus",
45875
+ temperature: 0.7
45859
45876
  });
45860
45877
  const beadsData = this.parseResponse(response.content);
45861
45878
  const graph = {
@@ -52367,8 +52384,8 @@ var coerce = {
52367
52384
  var NEVER = INVALID;
52368
52385
  // src/core/config/schema.ts
52369
52386
  var AgentConfigSchema = exports_external.object({
52370
- model: exports_external.string(),
52371
- provider: exports_external.enum(["anthropic", "openai", "google"]),
52387
+ model: exports_external.string().optional(),
52388
+ provider: exports_external.enum(["anthropic", "openai", "google"]).optional(),
52372
52389
  temperature: exports_external.number().min(0).max(2).optional(),
52373
52390
  max_tokens: exports_external.number().positive().optional()
52374
52391
  });
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@arrislink/axon",
3
- "version": "1.1.2",
3
+ "version": "1.2.0",
4
4
  "description": "AI-Powered Development Operating System with unified LLM provider support",
5
5
  "type": "module",
6
6
  "main": "dist/index.js",