legion-llm 0.3.7 → 0.3.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: b06b6f10d21c6c4d03c73646fbecc2112e61e47e1dd82059076c61a721efb1aa
4
- data.tar.gz: 541a1a1de0a108e95b5e2c204ec579a1b0a5f77e935bad64d7668aef9ae3322d
3
+ metadata.gz: b7bbfc4cc4f4d111b0407d6adc2cd070a53f139db1997dd035f423aeb8a156a6
4
+ data.tar.gz: 4424d53100199b49da45209cd3b240eb45a6c2a1298f142f22a1b0174bca585d
5
5
  SHA512:
6
- metadata.gz: 06ab55cec8a23d4be70ea3851fd4a7717686c4e02f7b4ca2f479e2353f79b14cacc770343a81ac54f492a05de3b5228aebb7c2e464203e434eba53e8b4144694
7
- data.tar.gz: 93623de5b0baa0bb5390678daac043fc6c08111f965886b03b87f5e769aa6e5f267a713c1c20d93d061341a6ceca8d88fcbd0dfa431d4ce84c2caf5768b19609
6
+ metadata.gz: d9be18e398e238a82ebefe863f7eb6e29c8de94892044d9767116febe7087478f7e817d051a9dbd25fb2752062c40b341dc54ba8b55c22d0b02570e89d367e6a
7
+ data.tar.gz: 56e717c13a756e6f7320956faf1ea3ad6f18ce26788319a4153f5bf97fe843c939cb8a618aba838a1e5a79bfb9c0d6f6f2ff8f9121f8f3564970cd7b4e0dae4e
data/CHANGELOG.md CHANGED
@@ -1,5 +1,13 @@
1
1
  # Legion LLM Changelog
2
2
 
3
+ ## [0.3.8] - 2026-03-20
4
+
5
+ ### Added
6
+ - Azure AI Foundry provider: `api_base`, `api_key`, `auth_token` settings
7
+ - `configure_azure` wires RubyLLM for Azure OpenAI endpoints (api-key or bearer token auth)
8
+ - Azure added to auto-detection priority chain (position 5, between Gemini and Ollama)
9
+ - Credentials support `vault://` and `env://` resolver URIs via settings secret resolver
10
+
3
11
  ## [0.3.7] - 2026-03-19
4
12
 
5
13
  ### Added
data/CLAUDE.md CHANGED
@@ -400,12 +400,10 @@ bundle exec rubocop # 52 files, 0 offenses
400
400
 
401
401
  ## Design Documents
402
402
 
403
- - `docs/plans/2026-03-14-llm-dynamic-routing-design.md` — Full design (approved)
404
- - `docs/plans/2026-03-14-llm-dynamic-routing-implementation.md` — Implementation plan
405
- - `docs/plans/2026-03-15-ollama-discovery-design.md` — Ollama discovery design (approved)
406
- - `docs/plans/2026-03-15-ollama-discovery-implementation.md` — Discovery implementation plan
407
- - `docs/plans/2026-03-16-llm-escalation-design.md` — Model escalation design (approved)
408
- - `docs/plans/2026-03-16-llm-escalation-implementation.md` — Escalation implementation plan
403
+ - `docs/work/completed/2026-03-14-llm-dynamic-routing-design.md` — Full design (approved)
404
+ - `docs/work/completed/2026-03-14-llm-dynamic-routing-implementation.md` — Implementation plan
405
+ - `docs/work/completed/2026-03-16-llm-escalation-design.md` — Model escalation design (approved)
406
+ - `docs/work/completed/2026-03-16-llm-escalation-implementation.md` — Escalation implementation plan
409
407
 
410
408
  ## Future (Not Yet Built)
411
409
 
@@ -19,6 +19,8 @@ module Legion
19
19
  has_creds = case provider
20
20
  when :bedrock
21
21
  config[:bearer_token] || (config[:api_key] && config[:secret_key])
22
+ when :azure
23
+ config[:api_base] && (config[:api_key] || config[:auth_token])
22
24
  when :ollama
23
25
  ollama_running?(config)
24
26
  else
@@ -53,6 +55,8 @@ module Legion
53
55
  configure_openai(config)
54
56
  when :gemini
55
57
  configure_gemini(config)
58
+ when :azure
59
+ configure_azure(config)
56
60
  when :ollama
57
61
  configure_ollama(config)
58
62
  else
@@ -109,6 +113,20 @@ module Legion
109
113
  Legion::Logging.info 'Configured Gemini provider'
110
114
  end
111
115
 
116
+ def configure_azure(config)
117
+ api_base = config[:api_base]
118
+ api_key = config[:api_key]
119
+ auth_token = config[:auth_token]
120
+ return unless api_base && (api_key || auth_token)
121
+
122
+ RubyLLM.configure do |c|
123
+ c.azure_api_base = api_base
124
+ c.azure_api_key = api_key if api_key
125
+ c.azure_ai_auth_token = auth_token if auth_token
126
+ end
127
+ Legion::Logging.info "Configured Azure AI Foundry provider (#{api_base})"
128
+ end
129
+
112
130
  def configure_ollama(config)
113
131
  RubyLLM.configure do |c|
114
132
  c.ollama_api_base = config[:base_url] if config[:base_url]
@@ -95,6 +95,13 @@ module Legion
95
95
  default_model: 'gemini-2.0-flash',
96
96
  api_key: 'env://GEMINI_API_KEY'
97
97
  },
98
+ azure: {
99
+ enabled: false,
100
+ default_model: nil,
101
+ api_base: nil,
102
+ api_key: nil,
103
+ auth_token: nil
104
+ },
98
105
  ollama: {
99
106
  enabled: false,
100
107
  default_model: 'llama3',
@@ -2,6 +2,6 @@
2
2
 
3
3
  module Legion
4
4
  module LLM
5
- VERSION = '0.3.7'
5
+ VERSION = '0.3.8'
6
6
  end
7
7
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: legion-llm
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.3.7
4
+ version: 0.3.8
5
5
  platform: ruby
6
6
  authors:
7
7
  - Esity