dspy 0.34.2 → 0.34.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. checksums.yaml +4 -4
  2. data/lib/dspy/chain_of_thought.rb +3 -2
  3. data/lib/dspy/context.rb +17 -1
  4. data/lib/dspy/evals/version.rb +1 -1
  5. data/lib/dspy/evals.rb +42 -31
  6. data/lib/dspy/events.rb +2 -3
  7. data/lib/dspy/example.rb +1 -1
  8. data/lib/dspy/lm/adapter.rb +39 -0
  9. data/lib/dspy/lm/json_strategy.rb +37 -2
  10. data/lib/dspy/lm/message.rb +1 -1
  11. data/lib/dspy/lm/response.rb +1 -1
  12. data/lib/dspy/lm/usage.rb +4 -4
  13. data/lib/dspy/lm.rb +9 -49
  14. data/lib/dspy/mixins/type_coercion.rb +189 -30
  15. data/lib/dspy/module.rb +70 -25
  16. data/lib/dspy/predict.rb +32 -5
  17. data/lib/dspy/prediction.rb +15 -57
  18. data/lib/dspy/prompt.rb +50 -30
  19. data/lib/dspy/propose/dataset_summary_generator.rb +1 -1
  20. data/lib/dspy/propose/grounded_proposer.rb +3 -3
  21. data/lib/dspy/re_act.rb +0 -162
  22. data/lib/dspy/registry/signature_registry.rb +3 -3
  23. data/lib/dspy/ruby_llm/lm/adapters/ruby_llm_adapter.rb +1 -27
  24. data/lib/dspy/schema/sorbet_json_schema.rb +7 -6
  25. data/lib/dspy/schema/version.rb +1 -1
  26. data/lib/dspy/schema_adapters.rb +1 -1
  27. data/lib/dspy/storage/program_storage.rb +2 -2
  28. data/lib/dspy/structured_outputs_prompt.rb +3 -3
  29. data/lib/dspy/teleprompt/utils.rb +2 -2
  30. data/lib/dspy/tools/github_cli_toolset.rb +7 -7
  31. data/lib/dspy/tools/text_processing_toolset.rb +2 -2
  32. data/lib/dspy/tools/toolset.rb +1 -1
  33. data/lib/dspy/version.rb +1 -1
  34. data/lib/dspy.rb +1 -4
  35. metadata +1 -26
  36. data/lib/dspy/events/subscriber_mixin.rb +0 -79
  37. data/lib/dspy/events/subscribers.rb +0 -43
  38. data/lib/dspy/memory/embedding_engine.rb +0 -68
  39. data/lib/dspy/memory/in_memory_store.rb +0 -216
  40. data/lib/dspy/memory/local_embedding_engine.rb +0 -244
  41. data/lib/dspy/memory/memory_compactor.rb +0 -298
  42. data/lib/dspy/memory/memory_manager.rb +0 -266
  43. data/lib/dspy/memory/memory_record.rb +0 -163
  44. data/lib/dspy/memory/memory_store.rb +0 -90
  45. data/lib/dspy/memory.rb +0 -30
  46. data/lib/dspy/tools/memory_toolset.rb +0 -117
@@ -1,298 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- require 'sorbet-runtime'
4
-
5
- module DSPy
6
- module Memory
7
- # Simple memory compaction system with inline triggers
8
- # Handles deduplication, relevance pruning, and conflict resolution
9
- class MemoryCompactor
10
- extend T::Sig
11
-
12
- # Compaction thresholds
13
- DEFAULT_MAX_MEMORIES = 1000
14
- DEFAULT_MAX_AGE_DAYS = 90
15
- DEFAULT_SIMILARITY_THRESHOLD = 0.95
16
- DEFAULT_LOW_ACCESS_THRESHOLD = 0.1
17
-
18
- sig { returns(Integer) }
19
- attr_reader :max_memories
20
-
21
- sig { returns(Integer) }
22
- attr_reader :max_age_days
23
-
24
- sig { returns(Float) }
25
- attr_reader :similarity_threshold
26
-
27
- sig { returns(Float) }
28
- attr_reader :low_access_threshold
29
-
30
- sig do
31
- params(
32
- max_memories: Integer,
33
- max_age_days: Integer,
34
- similarity_threshold: Float,
35
- low_access_threshold: Float
36
- ).void
37
- end
38
- def initialize(
39
- max_memories: DEFAULT_MAX_MEMORIES,
40
- max_age_days: DEFAULT_MAX_AGE_DAYS,
41
- similarity_threshold: DEFAULT_SIMILARITY_THRESHOLD,
42
- low_access_threshold: DEFAULT_LOW_ACCESS_THRESHOLD
43
- )
44
- @max_memories = max_memories
45
- @max_age_days = max_age_days
46
- @similarity_threshold = similarity_threshold
47
- @low_access_threshold = low_access_threshold
48
- end
49
-
50
- # Main compaction entry point - checks all triggers and compacts if needed
51
- sig { params(store: MemoryStore, embedding_engine: EmbeddingEngine, user_id: T.nilable(String)).returns(T::Hash[Symbol, T.untyped]) }
52
- def compact_if_needed!(store, embedding_engine, user_id: nil)
53
- DSPy::Context.with_span(operation: 'memory.compaction_check', 'memory.user_id' => user_id) do
54
- results = {}
55
-
56
- # Check triggers in order of impact
57
- if size_compaction_needed?(store, user_id)
58
- results[:size_compaction] = perform_size_compaction!(store, user_id)
59
- end
60
-
61
- if age_compaction_needed?(store, user_id)
62
- results[:age_compaction] = perform_age_compaction!(store, user_id)
63
- end
64
-
65
- if duplication_compaction_needed?(store, embedding_engine, user_id)
66
- results[:deduplication] = perform_deduplication!(store, embedding_engine, user_id)
67
- end
68
-
69
- if relevance_compaction_needed?(store, user_id)
70
- results[:relevance_pruning] = perform_relevance_pruning!(store, user_id)
71
- end
72
-
73
- results[:total_compacted] = results.values.sum { |r| r.is_a?(Hash) ? r[:removed_count] || 0 : 0 }
74
- results
75
- end
76
- end
77
-
78
- # Check if size-based compaction is needed
79
- sig { params(store: MemoryStore, user_id: T.nilable(String)).returns(T::Boolean) }
80
- def size_compaction_needed?(store, user_id)
81
- store.count(user_id: user_id) > @max_memories
82
- end
83
-
84
- # Check if age-based compaction is needed
85
- sig { params(store: MemoryStore, user_id: T.nilable(String)).returns(T::Boolean) }
86
- def age_compaction_needed?(store, user_id)
87
- memories = store.list(user_id: user_id)
88
- return false if memories.empty?
89
-
90
- # Check if any memory exceeds the age limit
91
- memories.any? { |memory| memory.age_in_days > @max_age_days }
92
- end
93
-
94
- # Check if deduplication is needed (simple heuristic)
95
- sig { params(store: MemoryStore, embedding_engine: EmbeddingEngine, user_id: T.nilable(String)).returns(T::Boolean) }
96
- def duplication_compaction_needed?(store, embedding_engine, user_id)
97
- # Sample recent memories to check for duplicates
98
- recent_memories = store.list(user_id: user_id, limit: 50)
99
- return false if recent_memories.length < 10
100
-
101
- # Quick duplicate check on a sample
102
- sample_size = [recent_memories.length / 4, 10].max
103
- sample = recent_memories.sample(sample_size)
104
-
105
- duplicate_count = 0
106
- sample.each_with_index do |memory1, i|
107
- sample[(i+1)..-1].each do |memory2|
108
- next unless memory1.embedding && memory2.embedding
109
-
110
- similarity = embedding_engine.cosine_similarity(memory1.embedding, memory2.embedding)
111
- duplicate_count += 1 if similarity > @similarity_threshold
112
- end
113
- end
114
-
115
- # Need deduplication if > 20% of sample has duplicates
116
- (duplicate_count.to_f / sample_size) > 0.2
117
- end
118
-
119
- # Check if relevance-based pruning is needed
120
- sig { params(store: MemoryStore, user_id: T.nilable(String)).returns(T::Boolean) }
121
- def relevance_compaction_needed?(store, user_id)
122
- memories = store.list(user_id: user_id, limit: 100)
123
- return false if memories.length < 50
124
-
125
- # Check if many memories have low access counts
126
- total_access = memories.sum(&:access_count)
127
- return false if total_access == 0
128
-
129
- # Calculate relative access for each memory
130
- low_access_count = memories.count do |memory|
131
- relative_access = memory.access_count.to_f / total_access
132
- relative_access < @low_access_threshold
133
- end
134
-
135
- # Need pruning if > 30% of memories have low relative access
136
- low_access_ratio = low_access_count.to_f / memories.length
137
- low_access_ratio > 0.3
138
- end
139
-
140
- private
141
-
142
- # Remove oldest memories when over size limit
143
- sig { params(store: MemoryStore, user_id: T.nilable(String)).returns(T::Hash[Symbol, T.untyped]) }
144
- def perform_size_compaction!(store, user_id)
145
- DSPy::Context.with_span(operation: 'memory.size_compaction', 'memory.user_id' => user_id) do
146
- current_count = store.count(user_id: user_id)
147
- target_count = (@max_memories * 0.8).to_i # Remove to 80% of limit
148
- remove_count = current_count - target_count
149
-
150
- # Don't remove if already under target
151
- if remove_count <= 0
152
- return {
153
- trigger: 'size_limit_exceeded',
154
- removed_count: 0,
155
- before_count: current_count,
156
- after_count: current_count,
157
- note: 'already_under_target'
158
- }
159
- end
160
-
161
- # Get oldest memories
162
- all_memories = store.list(user_id: user_id)
163
- oldest_memories = all_memories.sort_by(&:created_at).first(remove_count)
164
-
165
- removed_count = 0
166
- oldest_memories.each do |memory|
167
- if store.delete(memory.id)
168
- removed_count += 1
169
- end
170
- end
171
-
172
- {
173
- trigger: 'size_limit_exceeded',
174
- removed_count: removed_count,
175
- before_count: current_count,
176
- after_count: current_count - removed_count
177
- }
178
- end
179
- end
180
-
181
- # Remove memories older than age limit
182
- sig { params(store: MemoryStore, user_id: T.nilable(String)).returns(T::Hash[Symbol, T.untyped]) }
183
- def perform_age_compaction!(store, user_id)
184
- DSPy::Context.with_span(operation: 'memory.age_compaction', 'memory.user_id' => user_id) do
185
- cutoff_time = Time.now - (@max_age_days * 24 * 60 * 60)
186
- all_memories = store.list(user_id: user_id)
187
- old_memories = all_memories.select { |m| m.created_at < cutoff_time }
188
-
189
- removed_count = 0
190
- old_memories.each do |memory|
191
- if store.delete(memory.id)
192
- removed_count += 1
193
- end
194
- end
195
-
196
- {
197
- trigger: 'age_limit_exceeded',
198
- removed_count: removed_count,
199
- cutoff_age_days: @max_age_days,
200
- oldest_removed_age: old_memories.empty? ? nil : old_memories.max_by(&:created_at).age_in_days
201
- }
202
- end
203
- end
204
-
205
- # Remove near-duplicate memories using embedding similarity
206
- sig { params(store: MemoryStore, embedding_engine: EmbeddingEngine, user_id: T.nilable(String)).returns(T::Hash[Symbol, T.untyped]) }
207
- def perform_deduplication!(store, embedding_engine, user_id)
208
- DSPy::Context.with_span(operation: 'memory.deduplication', 'memory.user_id' => user_id) do
209
- memories = store.list(user_id: user_id)
210
- memories_with_embeddings = memories.select(&:embedding)
211
-
212
- duplicates_to_remove = []
213
- processed = Set.new
214
-
215
- memories_with_embeddings.each_with_index do |memory1, i|
216
- next if processed.include?(memory1.id)
217
-
218
- memories_with_embeddings[(i+1)..-1].each do |memory2|
219
- next if processed.include?(memory2.id)
220
-
221
- similarity = embedding_engine.cosine_similarity(memory1.embedding, memory2.embedding)
222
-
223
- if similarity > @similarity_threshold
224
- # Keep the one with higher access count, or newer if tied
225
- keeper, duplicate = if memory1.access_count > memory2.access_count
226
- [memory1, memory2]
227
- elsif memory1.access_count < memory2.access_count
228
- [memory2, memory1]
229
- else
230
- # Tie: keep newer one
231
- memory1.created_at > memory2.created_at ? [memory1, memory2] : [memory2, memory1]
232
- end
233
-
234
- duplicates_to_remove << duplicate
235
- processed.add(duplicate.id)
236
- end
237
- end
238
-
239
- processed.add(memory1.id)
240
- end
241
-
242
- removed_count = 0
243
- duplicates_to_remove.uniq.each do |memory|
244
- if store.delete(memory.id)
245
- removed_count += 1
246
- end
247
- end
248
-
249
- {
250
- trigger: 'duplicate_similarity_detected',
251
- removed_count: removed_count,
252
- similarity_threshold: @similarity_threshold,
253
- total_checked: memories_with_embeddings.length
254
- }
255
- end
256
- end
257
-
258
- # Remove memories with low relevance (low access patterns)
259
- sig { params(store: MemoryStore, user_id: T.nilable(String)).returns(T::Hash[Symbol, T.untyped]) }
260
- def perform_relevance_pruning!(store, user_id)
261
- DSPy::Context.with_span(operation: 'memory.relevance_pruning', 'memory.user_id' => user_id) do
262
- memories = store.list(user_id: user_id)
263
- total_access = memories.sum(&:access_count)
264
- return { removed_count: 0, trigger: 'no_access_data' } if total_access == 0
265
-
266
- # Calculate relevance scores
267
- scored_memories = memories.map do |memory|
268
- # Combine access frequency with recency
269
- access_score = memory.access_count.to_f / total_access
270
- recency_score = 1.0 / (memory.age_in_days + 1) # Avoid division by zero
271
- relevance_score = (access_score * 0.7) + (recency_score * 0.3)
272
-
273
- { memory: memory, score: relevance_score }
274
- end
275
-
276
- # Remove bottom 20% by relevance
277
- sorted_by_relevance = scored_memories.sort_by { |item| item[:score] }
278
- remove_count = (memories.length * 0.2).to_i
279
- to_remove = sorted_by_relevance.first(remove_count)
280
-
281
- removed_count = 0
282
- to_remove.each do |item|
283
- if store.delete(item[:memory].id)
284
- removed_count += 1
285
- end
286
- end
287
-
288
- {
289
- trigger: 'low_relevance_detected',
290
- removed_count: removed_count,
291
- lowest_score: to_remove.first&.dig(:score),
292
- highest_score: sorted_by_relevance.last&.dig(:score)
293
- }
294
- end
295
- end
296
- end
297
- end
298
- end
@@ -1,266 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- require 'sorbet-runtime'
4
- require_relative 'memory_record'
5
- require_relative 'memory_store'
6
- require_relative 'in_memory_store'
7
- require_relative 'embedding_engine'
8
- require_relative 'local_embedding_engine'
9
- require_relative 'memory_compactor'
10
-
11
- module DSPy
12
- module Memory
13
- # High-level memory management interface implementing MemoryTools API
14
- class MemoryManager
15
- extend T::Sig
16
-
17
- sig { returns(MemoryStore) }
18
- attr_reader :store
19
-
20
- sig { returns(EmbeddingEngine) }
21
- attr_reader :embedding_engine
22
-
23
- sig { returns(MemoryCompactor) }
24
- attr_reader :compactor
25
-
26
- sig { params(store: T.nilable(MemoryStore), embedding_engine: T.nilable(EmbeddingEngine), compactor: T.nilable(MemoryCompactor)).void }
27
- def initialize(store: nil, embedding_engine: nil, compactor: nil)
28
- @store = store || InMemoryStore.new
29
- @embedding_engine = embedding_engine || create_default_embedding_engine
30
- @compactor = compactor || MemoryCompactor.new
31
- end
32
-
33
- # Store a memory with automatic embedding generation
34
- sig { params(content: String, user_id: T.nilable(String), tags: T::Array[String], metadata: T::Hash[String, T.untyped]).returns(MemoryRecord) }
35
- def store_memory(content, user_id: nil, tags: [], metadata: {})
36
- # Generate embedding for the content
37
- embedding = @embedding_engine.embed(content)
38
-
39
- # Create memory record
40
- record = MemoryRecord.new(
41
- content: content,
42
- user_id: user_id,
43
- tags: tags,
44
- embedding: embedding,
45
- metadata: metadata
46
- )
47
-
48
- # Store in backend
49
- success = @store.store(record)
50
- raise "Failed to store memory" unless success
51
-
52
- # Check if compaction is needed after storing
53
- compact_if_needed!(user_id)
54
-
55
- record
56
- end
57
-
58
- # Retrieve a memory by ID
59
- sig { params(memory_id: String).returns(T.nilable(MemoryRecord)) }
60
- def get_memory(memory_id)
61
- @store.retrieve(memory_id)
62
- end
63
-
64
- # Update an existing memory
65
- sig { params(memory_id: String, new_content: String, tags: T.nilable(T::Array[String]), metadata: T.nilable(T::Hash[String, T.untyped])).returns(T::Boolean) }
66
- def update_memory(memory_id, new_content, tags: nil, metadata: nil)
67
- record = @store.retrieve(memory_id)
68
- return false unless record
69
-
70
- # Update content and regenerate embedding
71
- record.update_content!(new_content)
72
- record.embedding = @embedding_engine.embed(new_content)
73
-
74
- # Update tags if provided
75
- record.tags = tags if tags
76
-
77
- # Update metadata if provided
78
- record.metadata.merge!(metadata) if metadata
79
-
80
- @store.update(record)
81
- end
82
-
83
- # Delete a memory
84
- sig { params(memory_id: String).returns(T::Boolean) }
85
- def delete_memory(memory_id)
86
- @store.delete(memory_id)
87
- end
88
-
89
- # Get all memories for a user
90
- sig { params(user_id: T.nilable(String), limit: T.nilable(Integer), offset: T.nilable(Integer)).returns(T::Array[MemoryRecord]) }
91
- def get_all_memories(user_id: nil, limit: nil, offset: nil)
92
- @store.list(user_id: user_id, limit: limit, offset: offset)
93
- end
94
-
95
- # Semantic search using embeddings
96
- sig { params(query: String, user_id: T.nilable(String), limit: T.nilable(Integer), threshold: T.nilable(Float)).returns(T::Array[MemoryRecord]) }
97
- def search_memories(query, user_id: nil, limit: 10, threshold: 0.5)
98
- DSPy::Context.with_span(
99
- operation: 'memory.search',
100
- **DSPy::ObservationType::Retriever.langfuse_attributes,
101
- 'retriever.query' => query,
102
- 'retriever.user_id' => user_id,
103
- 'retriever.limit' => limit,
104
- 'retriever.threshold' => threshold
105
- ) do |span|
106
- # Generate embedding for the query
107
- query_embedding = @embedding_engine.embed(query)
108
-
109
- # Perform vector search if supported
110
- results = if @store.supports_vector_search?
111
- @store.vector_search(query_embedding, user_id: user_id, limit: limit, threshold: threshold)
112
- else
113
- # Fallback to text search
114
- @store.search(query, user_id: user_id, limit: limit)
115
- end
116
-
117
- # Add retrieval results to span
118
- if span
119
- span.set_attribute('retriever.results_count', results.length)
120
- span.set_attribute('retriever.results', results.map { |r| { id: r.id, content: r.content[0..100] } }.to_json)
121
- end
122
-
123
- results
124
- end
125
- end
126
-
127
- # Search by tags
128
- sig { params(tags: T::Array[String], user_id: T.nilable(String), limit: T.nilable(Integer)).returns(T::Array[MemoryRecord]) }
129
- def search_by_tags(tags, user_id: nil, limit: nil)
130
- @store.search_by_tags(tags, user_id: user_id, limit: limit)
131
- end
132
-
133
- # Text-based search (fallback when embeddings not available)
134
- sig { params(query: String, user_id: T.nilable(String), limit: T.nilable(Integer)).returns(T::Array[MemoryRecord]) }
135
- def search_text(query, user_id: nil, limit: nil)
136
- @store.search(query, user_id: user_id, limit: limit)
137
- end
138
-
139
- # Count memories
140
- sig { params(user_id: T.nilable(String)).returns(Integer) }
141
- def count_memories(user_id: nil)
142
- @store.count(user_id: user_id)
143
- end
144
-
145
- # Clear all memories for a user
146
- sig { params(user_id: T.nilable(String)).returns(Integer) }
147
- def clear_memories(user_id: nil)
148
- @store.clear(user_id: user_id)
149
- end
150
-
151
- # Find similar memories to a given memory
152
- sig { params(memory_id: String, limit: T.nilable(Integer), threshold: T.nilable(Float)).returns(T::Array[MemoryRecord]) }
153
- def find_similar(memory_id, limit: 5, threshold: 0.7)
154
- record = @store.retrieve(memory_id)
155
- return [] unless record&.embedding
156
-
157
- results = @store.vector_search(record.embedding, user_id: record.user_id, limit: limit + 1, threshold: threshold)
158
-
159
- # Remove the original record from results
160
- results.reject { |r| r.id == memory_id }
161
- end
162
-
163
- # Batch operations
164
- sig { params(contents: T::Array[String], user_id: T.nilable(String), tags: T::Array[String]).returns(T::Array[MemoryRecord]) }
165
- def store_memories_batch(contents, user_id: nil, tags: [])
166
- # Generate embeddings in batch for efficiency
167
- embeddings = @embedding_engine.embed_batch(contents)
168
-
169
- records = contents.zip(embeddings).map do |content, embedding|
170
- MemoryRecord.new(
171
- content: content,
172
- user_id: user_id,
173
- tags: tags,
174
- embedding: embedding
175
- )
176
- end
177
-
178
- # Store all records
179
- results = @store.store_batch(records)
180
-
181
- # Compact after batch operation
182
- compact_if_needed!(user_id)
183
-
184
- # Return only successfully stored records
185
- records.select.with_index { |_, idx| results[idx] }
186
- end
187
-
188
- # Get memory statistics
189
- sig { returns(T::Hash[Symbol, T.untyped]) }
190
- def stats
191
- store_stats = @store.stats
192
- engine_stats = @embedding_engine.stats
193
-
194
- {
195
- store: store_stats,
196
- embedding_engine: engine_stats,
197
- total_memories: store_stats[:total_memories] || 0
198
- }
199
- end
200
-
201
- # Health check
202
- sig { returns(T::Boolean) }
203
- def healthy?
204
- @embedding_engine.ready? && @store.respond_to?(:count)
205
- end
206
-
207
- # Export memories to hash format
208
- sig { params(user_id: T.nilable(String)).returns(T::Array[T::Hash[String, T.untyped]]) }
209
- def export_memories(user_id: nil)
210
- memories = get_all_memories(user_id: user_id)
211
- memories.map(&:to_h)
212
- end
213
-
214
- # Import memories from hash format
215
- sig { params(memories_data: T::Array[T::Hash[String, T.untyped]]).returns(Integer) }
216
- def import_memories(memories_data)
217
- records = memories_data.map { |data| MemoryRecord.from_h(data) }
218
- results = @store.store_batch(records)
219
-
220
- # Compact after batch import
221
- user_ids = records.map(&:user_id).compact.uniq
222
- user_ids.each { |user_id| compact_if_needed!(user_id) }
223
-
224
- results.count(true)
225
- end
226
-
227
- # Trigger memory compaction if needed
228
- sig { params(user_id: T.nilable(String)).returns(T::Hash[Symbol, T.untyped]) }
229
- def compact_if_needed!(user_id = nil)
230
- @compactor.compact_if_needed!(@store, @embedding_engine, user_id: user_id)
231
- end
232
-
233
- # Force memory compaction (useful for testing or manual cleanup)
234
- sig { params(user_id: T.nilable(String)).returns(T::Hash[Symbol, T.untyped]) }
235
- def force_compact!(user_id = nil)
236
- DSPy::Context.with_span(
237
- operation: 'memory.compaction_complete',
238
- 'memory.user_id' => user_id,
239
- 'memory.forced' => true
240
- ) do
241
- results = {}
242
-
243
- # Run all compaction strategies regardless of thresholds
244
- results[:size_compaction] = @compactor.send(:perform_size_compaction!, @store, user_id)
245
- results[:age_compaction] = @compactor.send(:perform_age_compaction!, @store, user_id)
246
- results[:deduplication] = @compactor.send(:perform_deduplication!, @store, @embedding_engine, user_id)
247
- results[:relevance_pruning] = @compactor.send(:perform_relevance_pruning!, @store, user_id)
248
-
249
- results[:total_compacted] = results.values.sum { |r| r.is_a?(Hash) ? r[:removed_count] || 0 : 0 }
250
- results
251
- end
252
- end
253
-
254
- private
255
-
256
- # Create default embedding engine
257
- sig { returns(EmbeddingEngine) }
258
- def create_default_embedding_engine
259
- LocalEmbeddingEngine.new
260
- rescue => e
261
- # Fallback to no-op engine if local engine fails
262
- NoOpEmbeddingEngine.new
263
- end
264
- end
265
- end
266
- end