@arrislink/axon 1.1.3 → 1.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/index.js +61 -54
  2. package/package.json +1 -1
package/dist/index.js CHANGED
@@ -2388,20 +2388,14 @@ var init_defaults = __esm(() => {
2388
2388
  },
2389
2389
  agents: {
2390
2390
  sisyphus: {
2391
- model: "claude-sonnet-4-20250514",
2392
- provider: "anthropic",
2393
2391
  temperature: 0.7,
2394
2392
  max_tokens: 8000
2395
2393
  },
2396
2394
  oracle: {
2397
- model: "claude-sonnet-4-20250514",
2398
- provider: "anthropic",
2399
2395
  temperature: 0.3,
2400
2396
  max_tokens: 4000
2401
2397
  },
2402
2398
  background: {
2403
- model: "gemini-2.0-flash-exp",
2404
- provider: "google",
2405
2399
  temperature: 0.5,
2406
2400
  max_tokens: 4000
2407
2401
  }
@@ -16644,14 +16638,16 @@ class OpenCodeLLMClient {
16644
16638
  }
16645
16639
  async* streamChat(messages, options) {
16646
16640
  const prompt = this.formatMessages(messages);
16647
- const args = [...this.command, "run", "--agent", this.agent, "--format", "json"];
16641
+ const agent = options?.agent || this.agent;
16642
+ const args = [...this.command, "run", "--agent", agent, "--format", "json"];
16648
16643
  if (options?.model) {
16649
16644
  args.push("--model", options.model);
16650
16645
  }
16651
16646
  const proc = Bun.spawn(args, {
16652
16647
  stdin: new Blob([prompt]),
16653
16648
  stdout: "pipe",
16654
- stderr: "pipe"
16649
+ stderr: "pipe",
16650
+ env: process.env
16655
16651
  });
16656
16652
  let fullResponse = "";
16657
16653
  let metadata = {
@@ -16747,25 +16743,39 @@ class UnifiedLLMClient {
16747
16743
  this.omoConfig = omoConfig || new OMOConfigReader;
16748
16744
  }
16749
16745
  async chat(messages, options) {
16750
- const provider = this.omoConfig.getPrimaryProvider();
16746
+ let provider = null;
16747
+ let modelOverride = options?.model;
16748
+ if (options?.agent) {
16749
+ const agentInfo = this.omoConfig.getProvider(options.agent);
16750
+ if (agentInfo) {
16751
+ provider = agentInfo;
16752
+ if (!modelOverride && agentInfo.models?.[0]) {
16753
+ modelOverride = agentInfo.models[0];
16754
+ }
16755
+ }
16756
+ }
16757
+ if (!provider) {
16758
+ provider = this.omoConfig.getPrimaryProvider();
16759
+ }
16751
16760
  if (!provider) {
16752
16761
  throw new Error("\u672A\u627E\u5230\u53EF\u7528\u7684 LLM Provider");
16753
16762
  }
16754
16763
  const providerType = provider.type || provider.name;
16764
+ const mergedOptions = { ...options, model: modelOverride };
16755
16765
  switch (providerType) {
16756
16766
  case "anthropic":
16757
- return this.chatAnthropic(provider, messages, options);
16767
+ return this.chatAnthropic(provider, messages, mergedOptions);
16758
16768
  case "antigravity":
16759
- return this.chatAntigravity(provider, messages, options);
16769
+ return this.chatAntigravity(provider, messages, mergedOptions);
16760
16770
  case "google":
16761
- return this.chatGoogle(provider, messages, options);
16771
+ return this.chatGoogle(provider, messages, mergedOptions);
16762
16772
  case "openai":
16763
- return this.chatOpenAI(provider, messages, options);
16773
+ return this.chatOpenAI(provider, messages, mergedOptions);
16764
16774
  case "deepseek":
16765
- return this.chatOpenAI(provider, messages, options, "https://api.deepseek.com/v1");
16775
+ return this.chatOpenAI(provider, messages, mergedOptions, "https://api.deepseek.com/v1");
16766
16776
  default:
16767
16777
  console.warn(`\uD83E\uDDE0 Axon: \u672A\u77E5 provider type '${providerType}'\uFF0C\u4F7F\u7528 Anthropic \u517C\u5BB9\u6A21\u5F0F`);
16768
- return this.chatAnthropic(provider, messages, options);
16778
+ return this.chatAnthropic(provider, messages, mergedOptions);
16769
16779
  }
16770
16780
  }
16771
16781
  async complete(prompt, options) {
@@ -16963,20 +16973,20 @@ class AxonLLMClient {
16963
16973
  }
16964
16974
  detectedCommand = ["opencode"];
16965
16975
  detectMode() {
16966
- try {
16967
- const proc = Bun.spawnSync(["opencode", "--version"]);
16968
- if (proc.success) {
16969
- this.detectedCommand = ["opencode"];
16970
- return "cli";
16971
- }
16972
- } catch {}
16973
- try {
16974
- const proc = Bun.spawnSync(["bunx", "opencode", "--version"]);
16975
- if (proc.success) {
16976
- this.detectedCommand = ["bunx", "opencode"];
16977
- return "cli";
16978
- }
16979
- } catch {}
16976
+ const forcedMode = process.env["AXON_LLM_MODE"];
16977
+ if (forcedMode === "cli" || forcedMode === "direct" || forcedMode === "fallback") {
16978
+ return forcedMode;
16979
+ }
16980
+ const opencodePath = Bun.which("opencode");
16981
+ if (opencodePath) {
16982
+ this.detectedCommand = [opencodePath];
16983
+ return "cli";
16984
+ }
16985
+ const bunxOpencode = Bun.spawnSync(["bunx", "opencode", "--version"]);
16986
+ if (bunxOpencode.success) {
16987
+ this.detectedCommand = ["bunx", "opencode"];
16988
+ return "cli";
16989
+ }
16980
16990
  if (hasOMOConfig() && this.omoConfig.hasProviders()) {
16981
16991
  const primary = this.omoConfig.getPrimaryProvider();
16982
16992
  if (primary && this.omoConfig.getProviderApiKey(primary)) {
@@ -17023,7 +17033,17 @@ class AxonLLMClient {
17023
17033
  async chat(messages, options) {
17024
17034
  try {
17025
17035
  if (this.mode === "cli" && this.openCodeClient) {
17026
- return await this.openCodeClient.chat(messages, options);
17036
+ const chatOptions = { ...options };
17037
+ if (!chatOptions.agent && chatOptions.model) {
17038
+ const isDefaultModel = [
17039
+ "claude-sonnet-4-20250514",
17040
+ "gemini-2.0-flash-exp",
17041
+ "gpt-4o"
17042
+ ].includes(chatOptions.model);
17043
+ if (isDefaultModel)
17044
+ chatOptions.model = undefined;
17045
+ }
17046
+ return await this.openCodeClient.chat(messages, chatOptions);
17027
17047
  }
17028
17048
  if (this.mode === "direct" && this.unifiedClient) {
17029
17049
  return await this.unifiedClient.chat(messages, options);
@@ -17045,24 +17065,14 @@ class AxonLLMClient {
17045
17065
  }
17046
17066
  async handleChatError(error, messages, options) {
17047
17067
  if (this.mode === "cli") {
17048
- console.warn("\uD83E\uDDE0 Axon: CLI \u6A21\u5F0F\u8C03\u7528\u5931\u8D25\uFF0C\u5C1D\u8BD5 Direct \u6A21\u5F0F...");
17049
- if (process.env["DEBUG"])
17050
- console.error(error);
17051
- if (this.omoConfig.hasProviders()) {
17052
- const primary = this.omoConfig.getPrimaryProvider();
17053
- if (primary && this.omoConfig.getProviderApiKey(primary)) {
17054
- this.mode = "direct";
17055
- this.initClient();
17056
- return await this.chat(messages, options);
17057
- }
17058
- }
17059
- console.warn("\uD83E\uDDE0 Axon: Direct \u6A21\u5F0F\u65E0\u53EF\u7528 Provider\uFF0C\u5C1D\u8BD5 Fallback \u6A21\u5F0F...");
17060
- this.mode = "fallback";
17061
- this.initClient();
17062
- return await this.chat(messages, options);
17068
+ const errMsg = error instanceof Error ? error.message : String(error);
17069
+ throw new APIError(`CLI \u6A21\u5F0F\u8C03\u7528\u5931\u8D25: ${errMsg.split(`
17070
+ `)[0]}`, 500);
17063
17071
  }
17064
17072
  if (this.mode === "direct" || this.mode === "fallback" && this.unifiedClient) {
17065
- console.warn("\uD83E\uDDE0 Axon: Direct/Proxy \u6A21\u5F0F\u8C03\u7528\u5931\u8D25\uFF0C\u5C1D\u8BD5\u73AF\u5883\u53D8\u91CF Fallback...");
17073
+ const errMsg = error instanceof Error ? error.message : String(error);
17074
+ console.warn(`\uD83E\uDDE0 Axon: Direct/Proxy \u6A21\u5F0F\u8C03\u7528\u5931\u8D25 (${errMsg.split(`
17075
+ `)[0]})\uFF0C\u5C1D\u8BD5\u73AF\u5883\u53D8\u91CF Fallback...`);
17066
17076
  if (process.env["DEBUG"])
17067
17077
  console.error(error);
17068
17078
  this.unifiedClient = undefined;
@@ -45855,17 +45865,14 @@ function getGraphStats(graph) {
45855
45865
  // src/core/beads/generator.ts
45856
45866
  class BeadsGenerator {
45857
45867
  llm;
45858
- config;
45859
- constructor(config) {
45860
- this.config = config;
45868
+ constructor(_config) {
45861
45869
  this.llm = new AxonLLMClient;
45862
45870
  }
45863
45871
  async generateFromSpec(specContent) {
45864
45872
  const prompt = this.buildPrompt(specContent);
45865
45873
  const response = await this.llm.chat([{ role: "user", content: prompt }], {
45866
- model: this.config.agents.sisyphus.model,
45867
- temperature: 0.7,
45868
- maxTokens: 8000
45874
+ agent: "sisyphus",
45875
+ temperature: 0.7
45869
45876
  });
45870
45877
  const beadsData = this.parseResponse(response.content);
45871
45878
  const graph = {
@@ -52377,8 +52384,8 @@ var coerce = {
52377
52384
  var NEVER = INVALID;
52378
52385
  // src/core/config/schema.ts
52379
52386
  var AgentConfigSchema = exports_external.object({
52380
- model: exports_external.string(),
52381
- provider: exports_external.enum(["anthropic", "openai", "google"]),
52387
+ model: exports_external.string().optional(),
52388
+ provider: exports_external.enum(["anthropic", "openai", "google"]).optional(),
52382
52389
  temperature: exports_external.number().min(0).max(2).optional(),
52383
52390
  max_tokens: exports_external.number().positive().optional()
52384
52391
  });
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@arrislink/axon",
3
- "version": "1.1.3",
3
+ "version": "1.2.0",
4
4
  "description": "AI-Powered Development Operating System with unified LLM provider support",
5
5
  "type": "module",
6
6
  "main": "dist/index.js",