legion-llm 0.3.3 → 0.3.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 7faf26458139d4c0e585e5c30e42602e85402e20abcbe2cd73ef8449ae17f947
4
- data.tar.gz: 73fecb93dbfd407891e64a4278c21c9e9f7fcc2af8313660de4e075a3195bbc7
3
+ metadata.gz: bd0b530095616abc383dcd06473a6c435753f021458c76c981da1d4e98583a5f
4
+ data.tar.gz: 21d8645355c14d591891c3484ca90957e99b0cb376b115eb61dd61f3e0721800
5
5
  SHA512:
6
- metadata.gz: 067c7e99927b675df13517a6ca5aa12b494fdedd8e277e7a42ae060e7597033597fbd0f88f595d5368937ba4a00aa7f6e2e02c14e56d01763f4253eb1cd3f421
7
- data.tar.gz: 9dc754975461db838b49d1f9826d54a80c7bdf106fa44fa6fe3d3693eefe70a1d503ed5025e551a458b367ab53580c67b7c9cf1fb9ee01e69cd5c2394181f150
6
+ metadata.gz: 53f3b6bd09f86625986e6f9d5c53f665e000e71d78dc5db36d599f1b5e5d7267d40ca1a2fe1e9f2b48cc54fb7ab6d272108869a02b327ff9669f978b83280e71
7
+ data.tar.gz: 381d707e3bdb75a1cf87d82404dc842140f98fa4bb5091e5a837685235327684b800de977efcd24857bb0ab8ab5bc75d42fdc22364aa4b024d6adf8f27cdab65
data/CHANGELOG.md CHANGED
@@ -1,5 +1,14 @@
1
1
  # Legion LLM Changelog
2
2
 
3
+ ## [0.3.4] - 2026-03-18
4
+
5
+ ### Added
6
+ - Auto-configure LLM providers from environment variables (`AWS_BEARER_TOKEN_BEDROCK`, `ANTHROPIC_API_KEY`, `OPENAI_API_KEY`, `CODEX_API_KEY`, `GEMINI_API_KEY`)
7
+ - `ANTHROPIC_MODEL` env var sets default model for Anthropic and Bedrock providers
8
+ - Import Claude CLI config from `~/.claude/settings.json` and `~/.claude.json`
9
+ - Auto-detect Ollama via local port probe (no env var needed)
10
+ - Auto-enable providers when credentials are found in environment
11
+
3
12
  ## [0.3.3] - 2026-03-17
4
13
 
5
14
  ### Added
data/CLAUDE.md CHANGED
@@ -284,7 +284,7 @@ In-memory signal consumer with pluggable handlers. Adjusts effective priorities
284
284
  | `lib/legion/llm/embeddings.rb` | Embeddings module: generate, generate_batch, default_model |
285
285
  | `lib/legion/llm/shadow_eval.rb` | Shadow evaluation: enabled?, should_sample?, evaluate, compare |
286
286
  | `lib/legion/llm/structured_output.rb` | JSON schema enforcement with native response_format and prompt fallback |
287
- | `lib/legion/llm/version.rb` | Version constant (0.3.2) |
287
+ | `lib/legion/llm/version.rb` | Version constant (0.3.3) |
288
288
  | `lib/legion/llm/quality_checker.rb` | QualityChecker module with QualityResult struct |
289
289
  | `lib/legion/llm/escalation_history.rb` | EscalationHistory mixin: `escalation_history`, `escalated?`, `final_resolution`, `escalation_chain` |
290
290
  | `lib/legion/llm/router/escalation_chain.rb` | EscalationChain value object |
@@ -0,0 +1,59 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Legion
4
+ module LLM
5
+ module ClaudeConfigLoader
6
+ CLAUDE_SETTINGS = File.expand_path('~/.claude/settings.json')
7
+ CLAUDE_CONFIG = File.expand_path('~/.claude.json')
8
+
9
+ module_function
10
+
11
+ def load
12
+ config = read_json(CLAUDE_SETTINGS).merge(read_json(CLAUDE_CONFIG))
13
+ return if config.empty?
14
+
15
+ apply_claude_config(config)
16
+ end
17
+
18
+ def read_json(path)
19
+ return {} unless File.exist?(path)
20
+
21
+ require 'json'
22
+ ::JSON.parse(File.read(path), symbolize_names: true)
23
+ rescue StandardError
24
+ {}
25
+ end
26
+
27
+ def apply_claude_config(config)
28
+ apply_api_keys(config)
29
+ apply_model_preference(config)
30
+ end
31
+
32
+ def apply_api_keys(config)
33
+ llm = Legion::LLM.settings
34
+ providers = llm[:providers]
35
+
36
+ if config[:anthropicApiKey] && providers.dig(:anthropic, :api_key).nil?
37
+ providers[:anthropic][:api_key] = config[:anthropicApiKey]
38
+ Legion::Logging.debug 'Imported Anthropic API key from Claude CLI config'
39
+ end
40
+
41
+ return unless config[:openaiApiKey] && providers.dig(:openai, :api_key).nil?
42
+
43
+ providers[:openai][:api_key] = config[:openaiApiKey]
44
+ Legion::Logging.debug 'Imported OpenAI API key from Claude CLI config'
45
+ end
46
+
47
+ def apply_model_preference(config)
48
+ return unless config[:preferredModel] || config[:model]
49
+
50
+ model = config[:preferredModel] || config[:model]
51
+ llm = Legion::LLM.settings
52
+ return if llm[:default_model]
53
+
54
+ llm[:default_model] = model
55
+ Legion::Logging.debug "Imported model preference from Claude CLI config: #{model}"
56
+ end
57
+ end
58
+ end
59
+ end
@@ -4,6 +4,7 @@ module Legion
4
4
  module LLM
5
5
  module Providers
6
6
  def configure_providers
7
+ auto_enable_from_resolved_credentials
7
8
  settings[:providers].each do |provider, config|
8
9
  next unless config[:enabled]
9
10
 
@@ -11,6 +12,37 @@ module Legion
11
12
  end
12
13
  end
13
14
 
15
+ def auto_enable_from_resolved_credentials
16
+ settings[:providers].each do |provider, config|
17
+ next if config[:enabled]
18
+
19
+ has_creds = case provider
20
+ when :bedrock
21
+ config[:bearer_token] || (config[:api_key] && config[:secret_key])
22
+ when :ollama
23
+ ollama_running?(config)
24
+ else
25
+ config[:api_key]
26
+ end
27
+ next unless has_creds
28
+
29
+ config[:enabled] = true
30
+ Legion::Logging.info "Auto-enabled #{provider} provider (credentials found)"
31
+ end
32
+ end
33
+
34
+ def ollama_running?(config)
35
+ require 'socket'
36
+ url = config[:base_url] || 'http://localhost:11434'
37
+ host_part = url.gsub(%r{^https?://}, '').split(':')
38
+ addr = host_part[0]
39
+ port = (host_part[1] || '11434').to_i
40
+ Socket.tcp(addr, port, connect_timeout: 1).close
41
+ true
42
+ rescue StandardError
43
+ false
44
+ end
45
+
14
46
  def apply_provider_config(provider, config)
15
47
  case provider
16
48
  when :bedrock
@@ -4,10 +4,11 @@ module Legion
4
4
  module LLM
5
5
  module Settings
6
6
  def self.default
7
+ model_override = ENV.fetch('ANTHROPIC_MODEL', nil)
7
8
  {
8
9
  enabled: true,
9
10
  connected: false,
10
- default_model: nil,
11
+ default_model: model_override,
11
12
  default_provider: nil,
12
13
  providers: providers,
13
14
  routing: routing_defaults,
@@ -68,23 +69,23 @@ module Legion
68
69
  api_key: nil,
69
70
  secret_key: nil,
70
71
  session_token: nil,
71
- bearer_token: nil,
72
+ bearer_token: 'env://AWS_BEARER_TOKEN_BEDROCK',
72
73
  region: 'us-east-2'
73
74
  },
74
75
  anthropic: {
75
76
  enabled: false,
76
77
  default_model: 'claude-sonnet-4-6',
77
- api_key: nil
78
+ api_key: 'env://ANTHROPIC_API_KEY'
78
79
  },
79
80
  openai: {
80
81
  enabled: false,
81
82
  default_model: 'gpt-4o',
82
- api_key: nil
83
+ api_key: ['env://OPENAI_API_KEY', 'env://CODEX_API_KEY']
83
84
  },
84
85
  gemini: {
85
86
  enabled: false,
86
87
  default_model: 'gemini-2.0-flash',
87
- api_key: nil
88
+ api_key: 'env://GEMINI_API_KEY'
88
89
  },
89
90
  ollama: {
90
91
  enabled: false,
@@ -2,6 +2,6 @@
2
2
 
3
3
  module Legion
4
4
  module LLM
5
- VERSION = '0.3.3'
5
+ VERSION = '0.3.4'
6
6
  end
7
7
  end
data/lib/legion/llm.rb CHANGED
@@ -19,6 +19,9 @@ module Legion
19
19
  def start
20
20
  Legion::Logging.debug 'Legion::LLM is running start'
21
21
 
22
+ require 'legion/llm/claude_config_loader'
23
+ ClaudeConfigLoader.load
24
+
22
25
  configure_providers
23
26
  run_discovery
24
27
  set_defaults
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: legion-llm
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.3.3
4
+ version: 0.3.4
5
5
  platform: ruby
6
6
  authors:
7
7
  - Esity
@@ -89,6 +89,7 @@ files:
89
89
  - legion-llm.gemspec
90
90
  - lib/legion/llm.rb
91
91
  - lib/legion/llm/bedrock_bearer_auth.rb
92
+ - lib/legion/llm/claude_config_loader.rb
92
93
  - lib/legion/llm/compressor.rb
93
94
  - lib/legion/llm/discovery/ollama.rb
94
95
  - lib/legion/llm/discovery/system.rb