lex-agentic-learning 0.1.9 → 0.1.11

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 8f2bd98de70d5e11f07c18043483a22d4325508c7e2601fd2d1c3a3bdb9726bd
4
- data.tar.gz: e1f9d5d06972b5fe01b932d973050d701aa82663bca6ff76709769df476ea80b
3
+ metadata.gz: 98b0f0afb82cb17c9db706827d06ce2f27e9a69c590170b720bfa986bcb5529d
4
+ data.tar.gz: d985458466141a279fe4c0b79aaeccf168b15246f6863e2ff7181bf90d88bb31
5
5
  SHA512:
6
- metadata.gz: d5858c88e16679558b7bf3d2433f3d88810a85f5a41cb7cf37991ccc22195569f65fffad058dbeb2aeaf359b89855dd3ac3330759da49e36c18c489afc08b713
7
- data.tar.gz: 55abe2021cd74cbd3885d75a2ff321febe189d369e802ea910eced3d86ac9398adf807414880f006ab23b0a5b869f81a304093fdc10a05da8dedc6352136611a
6
+ metadata.gz: 3bf9d04bee2e5d19aba000ec08d043caa79fd704eea2414adcb89277a88e655784933bc5a358415a3d5a7319b3cc044bee04cc3fd6d2ab6fb74b4f932c3eac1b
7
+ data.tar.gz: 92193dbf3ffb4efeb3ec9c52751888a3798764652f75bee7b610e8b91e743fc27f286cbbef65eff134fe1adce4c478c6022fe6c650118cfa331f1160b736e122
data/CHANGELOG.md CHANGED
@@ -1,5 +1,13 @@
1
1
  # Changelog
2
2
 
3
+ ## [0.1.11] - 2026-05-07
4
+ ### Fixed
5
+ - Curiosity self-inquiry now uses native `Legion::LLM.chat` response hashes directly and avoids obsolete legacy LLM fallback calls after native chat succeeds.
6
+
7
+ ## [0.1.10] - 2026-04-28
8
+ ### Fixed
9
+ - Curiosity self-inquiry now prefers the current `Legion::LLM.ask` router before legacy helper fallbacks, preventing old `lex(:llm, :complete)` failures from hiding native LLM availability.
10
+
3
11
  ## [0.1.9] - 2026-04-28
4
12
  ### Fixed
5
13
  - Curiosity self-inquiry now uses the current `Legion::LLM.ask` API before falling back to legacy `Legion::LLM.complete`, preserving compatibility with the LLM routing uplift.
data/README.md CHANGED
@@ -5,7 +5,7 @@ Domain consolidation gem for learning, adaptation, and knowledge acquisition. Bu
5
5
  ## Overview
6
6
 
7
7
  **Gem**: `lex-agentic-learning`
8
- **Version**: 0.1.9
8
+ **Version**: 0.1.10
9
9
  **Namespace**: `Legion::Extensions::Agentic::Learning`
10
10
 
11
11
  ## Sub-Modules
@@ -136,20 +136,15 @@ module Legion
136
136
  def query_llm_for_wonder(question, domain)
137
137
  prompt = build_self_inquiry_prompt(question, domain)
138
138
 
139
- # Try via Lex helper (primary path inside Legion runtime)
140
- if respond_to?(:lex, true)
141
- result = lex(:llm, :complete, prompt: prompt, max_tokens: 300)
142
- text = extract_llm_text(result)
143
- return text if text && !text.empty?
144
- end
145
-
146
- # Direct LLM fallback for current legion-llm; complete is kept for older installs.
147
- if defined?(Legion::LLM) && Legion::LLM.respond_to?(:ask)
148
- result = Legion::LLM.ask(message: prompt) # rubocop:disable Legion/HelperMigration/DirectLlm
149
- text = extract_llm_text(result)
150
- return text if text && !text.empty?
151
- elsif defined?(Legion::LLM) && Legion::LLM.respond_to?(:complete)
152
- result = Legion::LLM.complete(prompt: prompt, max_tokens: 300)
139
+ if defined?(Legion::LLM) && Legion::LLM.respond_to?(:chat)
140
+ result = Legion::LLM.chat( # rubocop:disable Legion/HelperMigration/DirectLlm
141
+ message: prompt,
142
+ caller: {
143
+ extension: 'lex-agentic-learning',
144
+ operation: 'curiosity',
145
+ phase: 'self_inquiry'
146
+ }
147
+ )
153
148
  text = extract_llm_text(result)
154
149
  return text if text && !text.empty?
155
150
  end
@@ -162,9 +157,11 @@ module Legion
162
157
 
163
158
  def extract_llm_text(result)
164
159
  return result.strip if result.is_a?(String)
160
+ return result.content.to_s.strip if result.respond_to?(:content)
165
161
  return nil unless result.is_a?(Hash)
166
162
 
167
- text = result[:response] || result[:content] || result[:text] || result[:completion]
163
+ text = result[:response] || result[:content] || result[:text] || result[:completion] ||
164
+ result.dig(:message, :content) || result.dig('message', 'content')
168
165
  text&.to_s&.strip
169
166
  end
170
167
 
@@ -4,7 +4,7 @@ module Legion
4
4
  module Extensions
5
5
  module Agentic
6
6
  module Learning
7
- VERSION = '0.1.9'
7
+ VERSION = '0.1.11'
8
8
  end
9
9
  end
10
10
  end
@@ -163,12 +163,12 @@ RSpec.describe Legion::Extensions::Agentic::Learning::Curiosity::Runners::Curios
163
163
  allow(client).to receive(:respond_to?).with(:lex, true).and_return(false)
164
164
  end
165
165
 
166
- it 'uses Legion::LLM.ask response hashes for current legion-llm' do
166
+ it 'uses Legion::LLM.chat response hashes for current legion-llm' do
167
167
  llm = Module.new do
168
- def self.ask(message:)
168
+ def self.chat(message:, **)
169
169
  raise 'missing prompt' if message.to_s.empty?
170
170
 
171
- { response: ' useful insight ' }
171
+ { content: ' useful insight ' }
172
172
  end
173
173
  end
174
174
  stub_const('Legion::LLM', llm)
@@ -176,17 +176,27 @@ RSpec.describe Legion::Extensions::Agentic::Learning::Curiosity::Runners::Curios
176
176
  expect(client.send(:query_llm_for_wonder, 'why?', :curiosity)).to eq('useful insight')
177
177
  end
178
178
 
179
- it 'keeps legacy Legion::LLM.complete fallback for older installs' do
179
+ it 'does not call legacy LLM paths when native chat succeeds' do
180
+ allow(client).to receive(:respond_to?).with(:lex, true).and_return(true)
181
+ expect(client).not_to receive(:lex)
182
+
180
183
  llm = Module.new do
181
- def self.complete(prompt:, max_tokens:)
182
- raise 'missing prompt' if prompt.to_s.empty? || max_tokens != 300
184
+ def self.chat(message:, **)
185
+ raise 'missing prompt' if message.to_s.empty?
183
186
 
184
- { content: 'legacy insight' }
187
+ { content: 'chat survived' }
185
188
  end
186
189
  end
187
190
  stub_const('Legion::LLM', llm)
188
191
 
189
- expect(client.send(:query_llm_for_wonder, 'why?', :curiosity)).to eq('legacy insight')
192
+ expect(client.send(:query_llm_for_wonder, 'why?', :curiosity)).to eq('chat survived')
193
+ end
194
+
195
+ it 'returns nil when native chat is unavailable' do
196
+ llm = Module.new
197
+ stub_const('Legion::LLM', llm)
198
+
199
+ expect(client.send(:query_llm_for_wonder, 'why?', :curiosity)).to be_nil
190
200
  end
191
201
  end
192
202
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: lex-agentic-learning
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.9
4
+ version: 0.1.11
5
5
  platform: ruby
6
6
  authors:
7
7
  - Esity