lex-cognitive-chunking 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml ADDED
@@ -0,0 +1,7 @@
1
+ ---
2
+ SHA256:
3
+ metadata.gz: c64d452ab027646fd43adc461bf72e57c4adc49fec07d02dc3765c3ff056ac9a
4
+ data.tar.gz: 47e6316149c761d5c13456c84d8e39013d6b5eac586cc5fe6eec297487867aca
5
+ SHA512:
6
+ metadata.gz: efcc6bf5d344b740c61ae0b138bcbbd1bcd9bdeceb839ebef63fe89b3be321ff33b9d2a11a4e45e5ddcafaa2db7890eaba5b421162cce8722a6673d10359036c
7
+ data.tar.gz: 319265447be25637b1961dac45b9f66e1133415c06ba42c3aca5ddb53f77dead0e5d9373f13ccc0d214e12d690b855180823dbb5fecc72c085ca2faa8fe1adc2
data/Gemfile ADDED
@@ -0,0 +1,15 @@
1
+ # frozen_string_literal: true
2
+
3
+ source 'https://rubygems.org'
4
+ gemspec
5
+
6
+ group :test do
7
+ gem 'rake'
8
+ gem 'rspec', '~> 3.13'
9
+ gem 'rspec_junit_formatter'
10
+ gem 'rubocop', '~> 1.75', require: false
11
+ gem 'rubocop-rspec', require: false
12
+ gem 'simplecov'
13
+ end
14
+
15
+ gem 'legion-gaia', path: '../../legion-gaia'
data/LICENSE ADDED
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2026 Esity
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
data/README.md ADDED
@@ -0,0 +1,42 @@
1
+ # lex-cognitive-chunking
2
+
3
+ LEX extension for LegionIO implementing George Miller's 7+/-2 cognitive chunking principle. Groups individual information items into meaningful chunks and models working memory capacity constraints.
4
+
5
+ ## What It Does
6
+
7
+ Raw information items are pooled and then grouped into named chunks. Chunks can be merged hierarchically (items -> concepts -> schemas) to reduce working memory footprint. A bounded working memory holds up to 7 chunks (capacity varies +/- 2); loading beyond capacity triggers an overloaded state. Recall strength decays over time; reinforcement on access keeps important chunks alive.
8
+
9
+ ## Usage
10
+
11
+ ```ruby
12
+ client = Legion::Extensions::CognitiveChunking::Client.new
13
+
14
+ r1 = client.add_item(content: 'e4 e5', domain: :chess)
15
+ r2 = client.add_item(content: 'Nf3 Nc6', domain: :chess)
16
+ r3 = client.add_item(content: 'Bb5', domain: :chess)
17
+
18
+ chunk = client.create_chunk(
19
+ label: 'Ruy Lopez opening',
20
+ item_ids: [r1[:item_id], r2[:item_id], r3[:item_id]]
21
+ )
22
+
23
+ client.load_to_working_memory(chunk_id: chunk[:chunk_id])
24
+ client.working_memory_status
25
+ # => { size: 1, capacity: 7, load: 0.14, label: :spacious, overloaded: false }
26
+
27
+ client.decay_all # call periodically for recall decay
28
+ client.reinforce_chunk(chunk_id: chunk[:chunk_id]) # boost on access
29
+ client.chunking_report
30
+ ```
31
+
32
+ ## Development
33
+
34
+ ```bash
35
+ bundle install
36
+ bundle exec rspec
37
+ bundle exec rubocop
38
+ ```
39
+
40
+ ## License
41
+
42
+ MIT
@@ -0,0 +1,29 @@
1
+ # frozen_string_literal: true
2
+
3
+ require_relative 'lib/legion/extensions/cognitive_chunking/version'
4
+
5
+ Gem::Specification.new do |spec|
6
+ spec.name = 'lex-cognitive-chunking'
7
+ spec.version = Legion::Extensions::CognitiveChunking::VERSION
8
+ spec.authors = ['Esity']
9
+ spec.email = ['matthewdiverson@gmail.com']
10
+
11
+ spec.summary = 'LEX Cognitive Chunking'
12
+ spec.description = "George Miller's 7+/-2 principle: groups information items into hierarchical chunks to model working memory capacity"
13
+ spec.homepage = 'https://github.com/LegionIO/lex-cognitive-chunking'
14
+ spec.license = 'MIT'
15
+ spec.required_ruby_version = '>= 3.4'
16
+
17
+ spec.metadata['homepage_uri'] = spec.homepage
18
+ spec.metadata['source_code_uri'] = 'https://github.com/LegionIO/lex-cognitive-chunking'
19
+ spec.metadata['documentation_uri'] = 'https://github.com/LegionIO/lex-cognitive-chunking'
20
+ spec.metadata['changelog_uri'] = 'https://github.com/LegionIO/lex-cognitive-chunking'
21
+ spec.metadata['bug_tracker_uri'] = 'https://github.com/LegionIO/lex-cognitive-chunking/issues'
22
+ spec.metadata['rubygems_mfa_required'] = 'true'
23
+
24
+ spec.files = Dir.chdir(File.expand_path(__dir__)) do
25
+ Dir.glob('{lib,spec}/**/*') + %w[lex-cognitive-chunking.gemspec Gemfile LICENSE README.md]
26
+ end
27
+ spec.require_paths = ['lib']
28
+ spec.add_development_dependency 'legion-gaia'
29
+ end
@@ -0,0 +1,17 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'legion/extensions/cognitive_chunking/runners/cognitive_chunking'
4
+
5
+ module Legion
6
+ module Extensions
7
+ module CognitiveChunking
8
+ class Client
9
+ include Runners::CognitiveChunking
10
+
11
+ def initialize(**)
12
+ @chunking_engine = Helpers::ChunkingEngine.new
13
+ end
14
+ end
15
+ end
16
+ end
17
+ end
@@ -0,0 +1,88 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Legion
4
+ module Extensions
5
+ module CognitiveChunking
6
+ module Helpers
7
+ class Chunk
8
+ include Constants
9
+
10
+ attr_reader :id, :label, :item_ids, :sub_chunk_ids, :coherence, :recall_strength,
11
+ :access_count, :created_at
12
+
13
+ def initialize(label:, item_ids: [])
14
+ @id = ::SecureRandom.uuid
15
+ @label = label
16
+ @item_ids = item_ids.dup
17
+ @sub_chunk_ids = []
18
+ @coherence = DEFAULT_COHERENCE
19
+ @recall_strength = 0.8
20
+ @access_count = 0
21
+ @created_at = Time.now.utc
22
+ end
23
+
24
+ def add_item!(item_id:)
25
+ @item_ids << item_id unless @item_ids.include?(item_id)
26
+ end
27
+
28
+ def remove_item!(item_id:)
29
+ @item_ids.delete(item_id)
30
+ end
31
+
32
+ def add_sub_chunk!(chunk_id:)
33
+ @sub_chunk_ids << chunk_id unless @sub_chunk_ids.include?(chunk_id)
34
+ end
35
+
36
+ def reinforce!
37
+ @access_count += 1
38
+ @coherence = [(@coherence + COHERENCE_BOOST).round(10), 1.0].min
39
+ @recall_strength = [(@recall_strength + RECALL_BOOST).round(10), 1.0].min
40
+ end
41
+
42
+ def decay!
43
+ @recall_strength = [(@recall_strength - RECALL_DECAY).round(10), 0.0].max
44
+ @coherence = [(@coherence - COHERENCE_DECAY).round(10), 0.0].max
45
+ end
46
+
47
+ def size
48
+ @item_ids.size
49
+ end
50
+
51
+ def hierarchical?
52
+ !@sub_chunk_ids.empty?
53
+ end
54
+
55
+ def coherence_label
56
+ COHERENCE_LABELS.find { |range, _| range.cover?(@coherence) }&.last || :unchunked
57
+ end
58
+
59
+ def recall_label
60
+ RECALL_LABELS.find { |range, _| range.cover?(@recall_strength) }&.last || :forgotten
61
+ end
62
+
63
+ def size_label
64
+ CHUNK_SIZE_LABELS.find { |range, _| range.cover?(size) }&.last || :micro
65
+ end
66
+
67
+ def to_h
68
+ {
69
+ id: @id,
70
+ label: @label,
71
+ item_ids: @item_ids.dup,
72
+ sub_chunk_ids: @sub_chunk_ids.dup,
73
+ coherence: @coherence.round(10),
74
+ recall_strength: @recall_strength.round(10),
75
+ access_count: @access_count,
76
+ created_at: @created_at,
77
+ size: size,
78
+ hierarchical: hierarchical?,
79
+ coherence_label: coherence_label,
80
+ recall_label: recall_label,
81
+ size_label: size_label
82
+ }
83
+ end
84
+ end
85
+ end
86
+ end
87
+ end
88
+ end
@@ -0,0 +1,143 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Legion
4
+ module Extensions
5
+ module CognitiveChunking
6
+ module Helpers
7
+ class ChunkingEngine
8
+ include Constants
9
+
10
+ attr_reader :items, :chunks, :working_memory
11
+
12
+ def initialize
13
+ @items = {}
14
+ @chunks = {}
15
+ @working_memory = []
16
+ end
17
+
18
+ def add_item(content:, domain: :general)
19
+ return { success: false, error: :capacity_exceeded } if @items.size >= MAX_ITEMS
20
+
21
+ item = InformationItem.new(content: content, domain: domain)
22
+ @items[item.id] = item
23
+ { success: true, item_id: item.id, item: item.to_h }
24
+ end
25
+
26
+ def create_chunk(label:, item_ids:)
27
+ return { success: false, error: :capacity_exceeded } if @chunks.size >= MAX_CHUNKS
28
+ return { success: false, error: :empty_item_ids } if item_ids.empty?
29
+
30
+ valid_ids = item_ids.select { |id| @items.key?(id) }
31
+ return { success: false, error: :no_valid_items } if valid_ids.empty?
32
+
33
+ chunk = Chunk.new(label: label, item_ids: valid_ids)
34
+ @chunks[chunk.id] = chunk
35
+ valid_ids.each { |id| @items[id]&.assign_to_chunk!(chunk_id: chunk.id) }
36
+
37
+ { success: true, chunk_id: chunk.id, chunk: chunk.to_h }
38
+ end
39
+
40
+ def merge_chunks(chunk_ids:, label:)
41
+ return { success: false, error: :capacity_exceeded } if @chunks.size >= MAX_CHUNKS
42
+ return { success: false, error: :insufficient_chunks } if chunk_ids.size < 2
43
+
44
+ valid_chunk_ids = chunk_ids.select { |id| @chunks.key?(id) }
45
+ return { success: false, error: :no_valid_chunks } if valid_chunk_ids.size < 2
46
+
47
+ merged_item_ids = valid_chunk_ids.flat_map { |id| @chunks[id].item_ids }.uniq
48
+ parent = Chunk.new(label: label, item_ids: merged_item_ids)
49
+ valid_chunk_ids.each { |id| parent.add_sub_chunk!(chunk_id: id) }
50
+ @chunks[parent.id] = parent
51
+
52
+ { success: true, chunk_id: parent.id, chunk: parent.to_h, merged_from: valid_chunk_ids }
53
+ end
54
+
55
+ def load_to_working_memory(chunk_id:)
56
+ return { success: false, error: :chunk_not_found } unless @chunks.key?(chunk_id)
57
+ return { success: false, error: :already_loaded } if @working_memory.include?(chunk_id)
58
+ return { success: false, error: :capacity_exceeded } if @working_memory.size >= WORKING_MEMORY_CAPACITY
59
+
60
+ @working_memory << chunk_id
61
+ @chunks[chunk_id].reinforce!
62
+ { success: true, chunk_id: chunk_id, working_memory_size: @working_memory.size }
63
+ end
64
+
65
+ def unload_from_working_memory(chunk_id:)
66
+ return { success: false, error: :not_in_working_memory } unless @working_memory.include?(chunk_id)
67
+
68
+ @working_memory.delete(chunk_id)
69
+ { success: true, chunk_id: chunk_id, working_memory_size: @working_memory.size }
70
+ end
71
+
72
+ def working_memory_load
73
+ return 0.0 if WORKING_MEMORY_CAPACITY.zero?
74
+
75
+ (@working_memory.size.to_f / WORKING_MEMORY_CAPACITY).round(10)
76
+ end
77
+
78
+ def working_memory_overloaded?
79
+ @working_memory.size > WORKING_MEMORY_CAPACITY
80
+ end
81
+
82
+ def decay_all!
83
+ @chunks.each_value(&:decay!)
84
+ { success: true, chunks_decayed: @chunks.size }
85
+ end
86
+
87
+ def reinforce_chunk(chunk_id:)
88
+ return { success: false, error: :chunk_not_found } unless @chunks.key?(chunk_id)
89
+
90
+ @chunks[chunk_id].reinforce!
91
+ { success: true, chunk_id: chunk_id, chunk: @chunks[chunk_id].to_h }
92
+ end
93
+
94
+ def strongest_chunks(limit: 10)
95
+ @chunks.values
96
+ .sort_by { |c| -c.recall_strength }
97
+ .first(limit)
98
+ .map(&:to_h)
99
+ end
100
+
101
+ def unchunked_items
102
+ @items.values.reject(&:chunked?).map(&:to_h)
103
+ end
104
+
105
+ def chunking_efficiency
106
+ return 0.0 if @items.empty?
107
+
108
+ chunked_count = @items.values.count(&:chunked?)
109
+ (chunked_count.to_f / @items.size).round(10)
110
+ end
111
+
112
+ def chunking_report
113
+ wm_load = working_memory_load
114
+ capacity_label = CAPACITY_LABELS.find { |range, _| range.cover?(wm_load) }&.last || :empty
115
+
116
+ {
117
+ total_items: @items.size,
118
+ total_chunks: @chunks.size,
119
+ unchunked_items: unchunked_items.size,
120
+ chunking_efficiency: chunking_efficiency,
121
+ working_memory: {
122
+ current: @working_memory.size,
123
+ capacity: WORKING_MEMORY_CAPACITY,
124
+ load: wm_load,
125
+ label: capacity_label,
126
+ chunk_ids: @working_memory.dup
127
+ },
128
+ strongest_chunks: strongest_chunks(limit: 5)
129
+ }
130
+ end
131
+
132
+ def to_h
133
+ {
134
+ items: @items.transform_values(&:to_h),
135
+ chunks: @chunks.transform_values(&:to_h),
136
+ working_memory: @working_memory.dup
137
+ }
138
+ end
139
+ end
140
+ end
141
+ end
142
+ end
143
+ end
@@ -0,0 +1,52 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Legion
4
+ module Extensions
5
+ module CognitiveChunking
6
+ module Helpers
7
+ module Constants
8
+ MAX_ITEMS = 500
9
+ MAX_CHUNKS = 200
10
+ WORKING_MEMORY_CAPACITY = 7 # Miller's magic number
11
+ CAPACITY_VARIANCE = 2 # +/- 2
12
+ DEFAULT_COHERENCE = 0.5
13
+ COHERENCE_BOOST = 0.08
14
+ COHERENCE_DECAY = 0.03
15
+ RECALL_DECAY = 0.02
16
+ RECALL_BOOST = 0.1
17
+
18
+ CHUNK_SIZE_LABELS = {
19
+ (7..) => :large,
20
+ (5...7) => :medium,
21
+ (3...5) => :small,
22
+ (..3) => :micro
23
+ }.freeze
24
+
25
+ COHERENCE_LABELS = {
26
+ (0.8..) => :tightly_chunked,
27
+ (0.6...0.8) => :well_chunked,
28
+ (0.4...0.6) => :loosely_chunked,
29
+ (0.2...0.4) => :weakly_chunked,
30
+ (..0.2) => :unchunked
31
+ }.freeze
32
+
33
+ RECALL_LABELS = {
34
+ (0.8..) => :instant,
35
+ (0.6...0.8) => :easy,
36
+ (0.4...0.6) => :moderate,
37
+ (0.2...0.4) => :difficult,
38
+ (..0.2) => :forgotten
39
+ }.freeze
40
+
41
+ CAPACITY_LABELS = {
42
+ (0.8..) => :overloaded,
43
+ (0.6...0.8) => :near_capacity,
44
+ (0.4...0.6) => :comfortable,
45
+ (0.2...0.4) => :spacious,
46
+ (..0.2) => :empty
47
+ }.freeze
48
+ end
49
+ end
50
+ end
51
+ end
52
+ end
@@ -0,0 +1,47 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Legion
4
+ module Extensions
5
+ module CognitiveChunking
6
+ module Helpers
7
+ class InformationItem
8
+ attr_reader :id, :content, :domain, :chunk_id, :created_at
9
+
10
+ def initialize(content:, domain: :general)
11
+ @id = ::SecureRandom.uuid
12
+ @content = content
13
+ @domain = domain
14
+ @chunked = false
15
+ @chunk_id = nil
16
+ @created_at = Time.now.utc
17
+ end
18
+
19
+ def chunked?
20
+ @chunked
21
+ end
22
+
23
+ def assign_to_chunk!(chunk_id:)
24
+ @chunked = true
25
+ @chunk_id = chunk_id
26
+ end
27
+
28
+ def unchunk!
29
+ @chunked = false
30
+ @chunk_id = nil
31
+ end
32
+
33
+ def to_h
34
+ {
35
+ id: @id,
36
+ content: @content,
37
+ domain: @domain,
38
+ chunked: @chunked,
39
+ chunk_id: @chunk_id,
40
+ created_at: @created_at
41
+ }
42
+ end
43
+ end
44
+ end
45
+ end
46
+ end
47
+ end
@@ -0,0 +1,107 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Legion
4
+ module Extensions
5
+ module CognitiveChunking
6
+ module Runners
7
+ module CognitiveChunking
8
+ include Legion::Extensions::Helpers::Lex if Legion::Extensions.const_defined?(:Helpers) &&
9
+ Legion::Extensions::Helpers.const_defined?(:Lex)
10
+
11
+ def add_item(content:, domain: :general, engine: nil, **)
12
+ eng = engine || chunking_engine
13
+ result = eng.add_item(content: content, domain: domain)
14
+ Legion::Logging.debug "[cognitive_chunking] add_item: domain=#{domain} success=#{result[:success]}"
15
+ result
16
+ end
17
+
18
+ def create_chunk(label:, item_ids:, engine: nil, **)
19
+ eng = engine || chunking_engine
20
+ result = eng.create_chunk(label: label, item_ids: item_ids)
21
+ Legion::Logging.debug "[cognitive_chunking] create_chunk: label=#{label} items=#{item_ids.size} success=#{result[:success]}"
22
+ result
23
+ end
24
+
25
+ def merge_chunks(chunk_ids:, label:, engine: nil, **)
26
+ eng = engine || chunking_engine
27
+ result = eng.merge_chunks(chunk_ids: chunk_ids, label: label)
28
+ Legion::Logging.debug "[cognitive_chunking] merge_chunks: label=#{label} sources=#{chunk_ids.size} success=#{result[:success]}"
29
+ result
30
+ end
31
+
32
+ def load_to_working_memory(chunk_id:, engine: nil, **)
33
+ eng = engine || chunking_engine
34
+ result = eng.load_to_working_memory(chunk_id: chunk_id)
35
+ Legion::Logging.debug "[cognitive_chunking] load_wm: chunk_id=#{chunk_id} wm_size=#{result[:working_memory_size]} success=#{result[:success]}"
36
+ result
37
+ end
38
+
39
+ def unload_from_working_memory(chunk_id:, engine: nil, **)
40
+ eng = engine || chunking_engine
41
+ result = eng.unload_from_working_memory(chunk_id: chunk_id)
42
+ Legion::Logging.debug "[cognitive_chunking] unload_wm: chunk_id=#{chunk_id} success=#{result[:success]}"
43
+ result
44
+ end
45
+
46
+ def working_memory_status(engine: nil, **)
47
+ eng = engine || chunking_engine
48
+ load = eng.working_memory_load
49
+ label = Helpers::Constants::CAPACITY_LABELS.find { |range, _| range.cover?(load) }&.last || :empty
50
+
51
+ Legion::Logging.debug "[cognitive_chunking] wm_status: load=#{load.round(2)} label=#{label}"
52
+ {
53
+ success: true,
54
+ size: eng.working_memory.size,
55
+ capacity: Helpers::Constants::WORKING_MEMORY_CAPACITY,
56
+ load: load,
57
+ label: label,
58
+ overloaded: eng.working_memory_overloaded?
59
+ }
60
+ end
61
+
62
+ def decay_all(engine: nil, **)
63
+ eng = engine || chunking_engine
64
+ result = eng.decay_all!
65
+ Legion::Logging.debug "[cognitive_chunking] decay_all: chunks_decayed=#{result[:chunks_decayed]}"
66
+ result
67
+ end
68
+
69
+ def reinforce_chunk(chunk_id:, engine: nil, **)
70
+ eng = engine || chunking_engine
71
+ result = eng.reinforce_chunk(chunk_id: chunk_id)
72
+ Legion::Logging.debug "[cognitive_chunking] reinforce_chunk: chunk_id=#{chunk_id} success=#{result[:success]}"
73
+ result
74
+ end
75
+
76
+ def chunking_report(engine: nil, **)
77
+ eng = engine || chunking_engine
78
+ report = eng.chunking_report
79
+ eff = report[:chunking_efficiency].round(2)
80
+ Legion::Logging.debug "[cognitive_chunking] report: items=#{report[:total_items]} chunks=#{report[:total_chunks]} efficiency=#{eff}"
81
+ { success: true, report: report }
82
+ end
83
+
84
+ def strongest_chunks(limit: 10, engine: nil, **)
85
+ eng = engine || chunking_engine
86
+ chunks = eng.strongest_chunks(limit: limit)
87
+ Legion::Logging.debug "[cognitive_chunking] strongest_chunks: count=#{chunks.size}"
88
+ { success: true, chunks: chunks }
89
+ end
90
+
91
+ def unchunked_items(engine: nil, **)
92
+ eng = engine || chunking_engine
93
+ items = eng.unchunked_items
94
+ Legion::Logging.debug "[cognitive_chunking] unchunked_items: count=#{items.size}"
95
+ { success: true, items: items }
96
+ end
97
+
98
+ private
99
+
100
+ def chunking_engine
101
+ @chunking_engine ||= Helpers::ChunkingEngine.new
102
+ end
103
+ end
104
+ end
105
+ end
106
+ end
107
+ end
@@ -0,0 +1,9 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Legion
4
+ module Extensions
5
+ module CognitiveChunking
6
+ VERSION = '0.1.0'
7
+ end
8
+ end
9
+ end
@@ -0,0 +1,17 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'securerandom'
4
+ require 'legion/extensions/cognitive_chunking/version'
5
+ require 'legion/extensions/cognitive_chunking/helpers/constants'
6
+ require 'legion/extensions/cognitive_chunking/helpers/information_item'
7
+ require 'legion/extensions/cognitive_chunking/helpers/chunk'
8
+ require 'legion/extensions/cognitive_chunking/helpers/chunking_engine'
9
+ require 'legion/extensions/cognitive_chunking/runners/cognitive_chunking'
10
+
11
+ module Legion
12
+ module Extensions
13
+ module CognitiveChunking
14
+ extend Legion::Extensions::Core if Legion::Extensions.const_defined? :Core
15
+ end
16
+ end
17
+ end
@@ -0,0 +1,67 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'legion/extensions/cognitive_chunking/client'
4
+
5
+ RSpec.describe Legion::Extensions::CognitiveChunking::Client do
6
+ let(:client) { described_class.new }
7
+
8
+ it 'responds to all runner methods' do
9
+ %i[add_item create_chunk merge_chunks load_to_working_memory
10
+ unload_from_working_memory working_memory_status decay_all
11
+ reinforce_chunk chunking_report strongest_chunks unchunked_items].each do |method|
12
+ expect(client).to respond_to(method)
13
+ end
14
+ end
15
+
16
+ it 'maintains state across calls using internal engine' do
17
+ client.add_item(content: 'pawn structure')
18
+ client.add_item(content: 'knight outpost')
19
+ report = client.chunking_report
20
+ expect(report[:report][:total_items]).to eq(2)
21
+ end
22
+
23
+ it 'runs a full chunking workflow' do
24
+ r1 = client.add_item(content: 'e4 e5', domain: :chess)
25
+ r2 = client.add_item(content: 'Nf3 Nc6', domain: :chess)
26
+ r3 = client.add_item(content: 'Bb5', domain: :chess)
27
+
28
+ chunk = client.create_chunk(
29
+ label: 'Ruy Lopez opening',
30
+ item_ids: [r1[:item_id], r2[:item_id], r3[:item_id]]
31
+ )
32
+ expect(chunk[:success]).to be true
33
+
34
+ loaded = client.load_to_working_memory(chunk_id: chunk[:chunk_id])
35
+ expect(loaded[:success]).to be true
36
+ expect(loaded[:working_memory_size]).to eq(1)
37
+
38
+ status = client.working_memory_status
39
+ expect(status[:size]).to eq(1)
40
+ expect(status[:load]).to be > 0.0
41
+
42
+ report = client.chunking_report
43
+ expect(report[:report][:chunking_efficiency]).to eq(1.0)
44
+ expect(report[:report][:unchunked_items]).to eq(0)
45
+ end
46
+
47
+ it 'supports merge of two chunks into a hierarchy' do
48
+ ra = client.add_item(content: 'opening theory')[:item_id]
49
+ rb = client.add_item(content: 'endgame theory')[:item_id]
50
+ ca = client.create_chunk(label: 'Opening', item_ids: [ra])[:chunk_id]
51
+ cb = client.create_chunk(label: 'Endgame', item_ids: [rb])[:chunk_id]
52
+
53
+ merged = client.merge_chunks(chunk_ids: [ca, cb], label: 'Chess Theory')
54
+ expect(merged[:success]).to be true
55
+ expect(merged[:chunk][:hierarchical]).to be true
56
+ end
57
+
58
+ it 'reports unchunked items correctly' do
59
+ client.add_item(content: 'orphan item')
60
+ r2 = client.add_item(content: 'grouped item')[:item_id]
61
+ client.create_chunk(label: 'group', item_ids: [r2])
62
+
63
+ result = client.unchunked_items
64
+ expect(result[:items].size).to eq(1)
65
+ expect(result[:items].first[:content]).to eq('orphan item')
66
+ end
67
+ end