lex-agentic-learning 0.1.8 → 0.1.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 9ca1a0df917cf9f0e83e16a2c8630312a21bf5047e56629ca6f513cb3d6c617c
4
- data.tar.gz: a17e63a44b99f90d8f4a378152a0d99b2fba6a07d75defebdb2a3074622ef031
3
+ metadata.gz: a4da6d50340dafa221b40f6c63e1af34678d45c2729be6d3b2e67701ea6ee19a
4
+ data.tar.gz: f485edff0dacee93438320daf14fb9171b5685a51afeba146e4c70f1fe10e0cb
5
5
  SHA512:
6
- metadata.gz: eaae4ee70f41226f954c3b91f4af8ad6567bb71846eebf2b6b5d4e76a4960f2989b63508d23429e52563f74a3d7063a3ad36f458494fc576cea3ba5bab992fb1
7
- data.tar.gz: 5b2254a369bf1111c2129c614a1fbc0b7ec5cf8a86192cb9255ec48f96c6e696b7c8b9fd9676d232d15c33729adf0c8f42330ad1d4c21f4f0254f8d6d2716aaf
6
+ metadata.gz: c5cc7fb0ac840ea9b1f3f0914f26c395a36ca2a6f8cbc3e03dbc33ab13c0140bd5b5dba73e22fb7879e7fe7f5b6f82c4d2202c594cbcd781c1a6a292ea1f0fe1
7
+ data.tar.gz: fb6446f01f7d2b0659b98e51971248c2f2d913df1c952cc0e4aa083973e3302aa205e0e5132e1ac47f6cc7723db6db76b55720f2a6e1a0e0d6bea36cc3075e74
data/CHANGELOG.md CHANGED
@@ -1,5 +1,13 @@
1
1
  # Changelog
2
2
 
3
+ ## [0.1.10] - 2026-04-28
4
+ ### Fixed
5
+ - Curiosity self-inquiry now prefers the current `Legion::LLM.ask` router before legacy helper fallbacks, preventing old `lex(:llm, :complete)` failures from hiding native LLM availability.
6
+
7
+ ## [0.1.9] - 2026-04-28
8
+ ### Fixed
9
+ - Curiosity self-inquiry now uses the current `Legion::LLM.ask` API before falling back to legacy `Legion::LLM.complete`, preserving compatibility with the LLM routing uplift.
10
+
3
11
  ## [0.1.8] - 2026-04-22
4
12
  ### Added
5
13
  - 3 new decay actors: Curiosity::Decay (300s), EpistemicCuriosity::Decay (300s), Habit::Decay (300s)
data/README.md CHANGED
@@ -5,7 +5,7 @@ Domain consolidation gem for learning, adaptation, and knowledge acquisition. Bu
5
5
  ## Overview
6
6
 
7
7
  **Gem**: `lex-agentic-learning`
8
- **Version**: 0.1.8
8
+ **Version**: 0.1.10
9
9
  **Namespace**: `Legion::Extensions::Agentic::Learning`
10
10
 
11
11
  ## Sub-Modules
@@ -136,17 +136,21 @@ module Legion
136
136
  def query_llm_for_wonder(question, domain)
137
137
  prompt = build_self_inquiry_prompt(question, domain)
138
138
 
139
- # Try via Lex helper (primary path inside Legion runtime)
139
+ # Prefer the current legion-llm router; legacy helper paths are fallbacks only.
140
+ if defined?(Legion::LLM) && Legion::LLM.respond_to?(:ask)
141
+ result = Legion::LLM.ask(message: prompt) # rubocop:disable Legion/HelperMigration/DirectLlm
142
+ text = extract_llm_text(result)
143
+ return text if text && !text.empty?
144
+ end
145
+
140
146
  if respond_to?(:lex, true)
141
- result = lex(:llm, :complete, prompt: prompt, max_tokens: 300)
142
- text = result[:content] || result[:text] || result[:completion] if result.is_a?(Hash)
147
+ text = query_legacy_lex_llm(prompt)
143
148
  return text if text && !text.empty?
144
149
  end
145
150
 
146
- # Direct LLM gateway fallback
147
151
  if defined?(Legion::LLM) && Legion::LLM.respond_to?(:complete)
148
152
  result = Legion::LLM.complete(prompt: prompt, max_tokens: 300)
149
- text = result[:content] || result[:text] || result[:completion] if result.is_a?(Hash)
153
+ text = extract_llm_text(result)
150
154
  return text if text && !text.empty?
151
155
  end
152
156
 
@@ -156,6 +160,22 @@ module Legion
156
160
  nil
157
161
  end
158
162
 
163
+ def query_legacy_lex_llm(prompt)
164
+ result = lex(:llm, :complete, prompt: prompt, max_tokens: 300)
165
+ extract_llm_text(result)
166
+ rescue StandardError => e
167
+ log.warn "[curiosity:self_inquiry] legacy lex LLM query failed: #{e.class}: #{e.message}"
168
+ nil
169
+ end
170
+
171
+ def extract_llm_text(result)
172
+ return result.strip if result.is_a?(String)
173
+ return nil unless result.is_a?(Hash)
174
+
175
+ text = result[:response] || result[:content] || result[:text] || result[:completion]
176
+ text&.to_s&.strip
177
+ end
178
+
159
179
  def build_self_inquiry_prompt(question, domain)
160
180
  <<~PROMPT.strip
161
181
  You are GAIA, a thinking, curious AI. You are reflecting on a knowledge gap you detected in yourself.
@@ -4,7 +4,7 @@ module Legion
4
4
  module Extensions
5
5
  module Agentic
6
6
  module Learning
7
- VERSION = '0.1.8'
7
+ VERSION = '0.1.10'
8
8
  end
9
9
  end
10
10
  end
@@ -156,4 +156,53 @@ RSpec.describe Legion::Extensions::Agentic::Learning::Curiosity::Runners::Curios
156
156
  expect(result[:pruned]).to eq(1)
157
157
  end
158
158
  end
159
+
160
+ describe '#query_llm_for_wonder' do
161
+ before do
162
+ allow(client).to receive(:respond_to?).and_call_original
163
+ allow(client).to receive(:respond_to?).with(:lex, true).and_return(false)
164
+ end
165
+
166
+ it 'uses Legion::LLM.ask response hashes for current legion-llm' do
167
+ llm = Module.new do
168
+ def self.ask(message:)
169
+ raise 'missing prompt' if message.to_s.empty?
170
+
171
+ { response: ' useful insight ' }
172
+ end
173
+ end
174
+ stub_const('Legion::LLM', llm)
175
+
176
+ expect(client.send(:query_llm_for_wonder, 'why?', :curiosity)).to eq('useful insight')
177
+ end
178
+
179
+ it 'does not let legacy lex complete failures hide current Legion::LLM.ask' do
180
+ allow(client).to receive(:respond_to?).with(:lex, true).and_return(true)
181
+ allow(client).to receive(:lex).and_raise(StandardError, 'legacy unavailable')
182
+
183
+ llm = Module.new do
184
+ def self.ask(message:)
185
+ raise 'missing prompt' if message.to_s.empty?
186
+
187
+ { response: 'ask survived' }
188
+ end
189
+ end
190
+ stub_const('Legion::LLM', llm)
191
+
192
+ expect(client.send(:query_llm_for_wonder, 'why?', :curiosity)).to eq('ask survived')
193
+ end
194
+
195
+ it 'keeps legacy Legion::LLM.complete fallback for older installs' do
196
+ llm = Module.new do
197
+ def self.complete(prompt:, max_tokens:)
198
+ raise 'missing prompt' if prompt.to_s.empty? || max_tokens != 300
199
+
200
+ { content: 'legacy insight' }
201
+ end
202
+ end
203
+ stub_const('Legion::LLM', llm)
204
+
205
+ expect(client.send(:query_llm_for_wonder, 'why?', :curiosity)).to eq('legacy insight')
206
+ end
207
+ end
159
208
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: lex-agentic-learning
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.8
4
+ version: 0.1.10
5
5
  platform: ruby
6
6
  authors:
7
7
  - Esity