lex-agentic-language 0.1.10 → 0.1.11

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 2e44972f87bca2dccf617db64c48e8e3b99d2174bb45f1d21cd71a72a2489a59
4
- data.tar.gz: 1e9fb62235879f874ef6c2ca07b6f58dc104fcc5fcb3d64591dcdf5d6477484f
3
+ metadata.gz: 79cfb7e1534a2010a54b836636c9a5406fefda03bb97cb700f131f1a92a6e77c
4
+ data.tar.gz: 026cd8c1a5831eed7555d231471595bb2d3ab5ba832d3a98c90646c01baa54f4
5
5
  SHA512:
6
- metadata.gz: aed85b23f9a856e2a26ac20bfdacd601c0ad09c8394f38bedb8195d28852fab393ee170b29d176109c83d6ab7cc426883270621c0bc629a7cab6244f94d38cc5
7
- data.tar.gz: e57dbce2e2254462c9940b57b7dcac4b349e8edb3dad723e4c4d3c06bcc0dc6882cf6a4e52dfdb75bed12fdde5bd755f18c6e447c242fbd5d3fe4dcc42b5917b
6
+ metadata.gz: 347a89a7300d788e8b76b1882c52ab280ad99c8ed1c38981cc7de53071dbf905678b086847d9923b7fa9242a08189e78adc658a7f553aace3b0669712d466119
7
+ data.tar.gz: 3b15a51fe618f5f478c48182688803a626558eca1d20a66093b5cdf384e3dbc53e41a96f61a3c4360711273cda056154cc997ac62b84a6a3de5e986519a9a0b4
data/CHANGELOG.md CHANGED
@@ -1,5 +1,10 @@
1
1
  # Changelog
2
2
 
3
+ ## [0.1.11] - 2026-05-07
4
+ ### Fixed
5
+ - Narrator LLM enhancement now sends native chat message payloads with system/user roles while preserving legacy session fallback for older test doubles.
6
+ - LLM narration state maps reflection `unacted_count` into pending adaptations.
7
+
3
8
  ## [0.1.10] - 2026-04-27
4
9
  ### Fixed
5
10
  - Narrator LLM enhancement now skips empty idle narration and rate-limits provider failure warnings without warning-level backtrace floods. Fixes #7
@@ -51,16 +51,39 @@ module Legion
51
51
  caller: { extension: 'lex-agentic-language', mode: :narrator }
52
52
  )
53
53
  content = response&.message&.dig(:content)
54
- ::Struct.new(:content).new(content) if content
55
54
  else
56
- chat = Legion::LLM.chat # rubocop:disable Legion/HelperMigration/DirectLlm
57
- chat.with_instructions(SYSTEM_PROMPT)
58
- chat.ask(prompt)
55
+ response = Legion::LLM.chat( # rubocop:disable Legion/HelperMigration/DirectLlm
56
+ message: [
57
+ { role: 'system', content: SYSTEM_PROMPT },
58
+ { role: 'user', content: prompt }
59
+ ],
60
+ caller: { extension: 'lex-agentic-language', mode: :narrator }
61
+ )
62
+ content = extract_response_content(response, prompt)
59
63
  end
64
+ ::Struct.new(:content).new(content) if content
60
65
  end
61
66
 
62
67
  private_class_method :llm_ask
63
68
 
69
+ def extract_response_content(response, prompt = nil)
70
+ return response.strip if response.is_a?(String)
71
+ return response.content if response.respond_to?(:content)
72
+
73
+ if response.respond_to?(:ask)
74
+ response.with_instructions(SYSTEM_PROMPT) if response.respond_to?(:with_instructions)
75
+ asked = response.ask(prompt)
76
+ return extract_response_content(asked)
77
+ end
78
+ return nil unless response.is_a?(Hash)
79
+
80
+ response[:content] || response['content'] ||
81
+ response.dig(:message, :content) || response.dig('message', 'content') ||
82
+ response[:response] || response['response']
83
+ end
84
+
85
+ private_class_method :extract_response_content
86
+
64
87
  def pipeline_available?
65
88
  !!(defined?(Legion::LLM::Pipeline::GaiaCaller) &&
66
89
  Legion::LLM.respond_to?(:pipeline_enabled?) &&
@@ -187,7 +187,7 @@ module Legion
187
187
  r = cognitive_state[:reflection] || {}
188
188
  {
189
189
  health: r[:health] || 1.0,
190
- pending_adaptations: r[:pending_adaptations] || 0
190
+ pending_adaptations: r[:pending_adaptations] || r[:unacted_count] || 0
191
191
  }
192
192
  end
193
193
 
@@ -4,7 +4,7 @@ module Legion
4
4
  module Extensions
5
5
  module Agentic
6
6
  module Language
7
- VERSION = '0.1.10'
7
+ VERSION = '0.1.11'
8
8
  end
9
9
  end
10
10
  end
@@ -49,27 +49,32 @@ RSpec.describe Legion::Extensions::Agentic::Language::Narrator::Helpers::LlmEnha
49
49
 
50
50
  context 'when LLM returns a response' do
51
51
  it 'returns the response content as a string' do
52
- response_double = double('response', content: 'I feel alert and curious about what lies ahead.')
53
- chat_double = double('chat')
54
- allow(chat_double).to receive(:with_instructions)
55
- allow(chat_double).to receive(:ask).and_return(response_double)
56
52
  llm_double = double('Legion::LLM', started?: true)
57
- allow(llm_double).to receive(:chat).and_return(chat_double)
53
+ allow(llm_double).to receive(:chat).and_return(content: 'I feel alert and curious about what lies ahead.')
58
54
  stub_const('Legion::LLM', llm_double)
59
55
 
60
56
  result = described_class.narrate(sections_data: sections_data)
61
57
  expect(result).to be_a(String)
62
58
  expect(result).to eq('I feel alert and curious about what lies ahead.')
63
59
  end
60
+
61
+ it 'sends system instructions and user prompt in the native chat message payload' do
62
+ llm_double = double('Legion::LLM', started?: true)
63
+ allow(llm_double).to receive(:chat).and_return(content: 'native narrative')
64
+ stub_const('Legion::LLM', llm_double)
65
+
66
+ described_class.narrate(sections_data: sections_data)
67
+
68
+ expect(llm_double).to have_received(:chat)
69
+ .with(hash_including(message: array_including(hash_including(role: 'system'),
70
+ hash_including(role: 'user'))))
71
+ end
64
72
  end
65
73
 
66
74
  context 'when LLM returns nil response' do
67
75
  it 'returns nil' do
68
- chat_double = double('chat')
69
- allow(chat_double).to receive(:with_instructions)
70
- allow(chat_double).to receive(:ask).and_return(nil)
71
76
  llm_double = double('Legion::LLM', started?: true)
72
- allow(llm_double).to receive(:chat).and_return(chat_double)
77
+ allow(llm_double).to receive(:chat).and_return(nil)
73
78
  stub_const('Legion::LLM', llm_double)
74
79
 
75
80
  result = described_class.narrate(sections_data: sections_data)
@@ -104,12 +109,8 @@ RSpec.describe Legion::Extensions::Agentic::Language::Narrator::Helpers::LlmEnha
104
109
 
105
110
  context 'with empty sections_data' do
106
111
  it 'does not raise and returns a string when LLM responds' do
107
- response_double = double('response', content: 'Everything seems quiet.')
108
- chat_double = double('chat')
109
- allow(chat_double).to receive(:with_instructions)
110
- allow(chat_double).to receive(:ask).and_return(response_double)
111
112
  llm_double = double('Legion::LLM', started?: true)
112
- allow(llm_double).to receive(:chat).and_return(chat_double)
113
+ allow(llm_double).to receive(:chat).and_return(content: 'Everything seems quiet.')
113
114
  stub_const('Legion::LLM', llm_double)
114
115
 
115
116
  result = described_class.narrate(sections_data: {})
@@ -33,6 +33,15 @@ RSpec.describe Legion::Extensions::Agentic::Language::Narrator::Runners::Narrato
33
33
  end
34
34
  end
35
35
 
36
+ describe 'reflection data for LLM narration' do
37
+ it 'maps reflection unacted_count to pending_adaptations' do
38
+ data = client.send(:llm_reflection_data, reflection: { health: 0.7, unacted_count: 3 })
39
+
40
+ expect(data[:health]).to eq(0.7)
41
+ expect(data[:pending_adaptations]).to eq(3)
42
+ end
43
+ end
44
+
36
45
  describe '#recent_entries' do
37
46
  before do
38
47
  5.times { |i| client.narrate(tick_results: { emotional_evaluation: { valence: i * 0.2 } }, cognitive_state: {}) }
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: lex-agentic-language
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.10
4
+ version: 0.1.11
5
5
  platform: ruby
6
6
  authors:
7
7
  - Esity