ragdoll 0.1.0 → 0.1.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (55) hide show
  1. checksums.yaml +4 -4
  2. data/README.md +318 -40
  3. data/Rakefile +66 -4
  4. data/app/jobs/ragdoll/extract_keywords_job.rb +28 -0
  5. data/app/jobs/ragdoll/extract_text_job.rb +38 -0
  6. data/app/jobs/ragdoll/generate_embeddings_job.rb +28 -0
  7. data/app/jobs/ragdoll/generate_summary_job.rb +25 -0
  8. data/app/lib/ragdoll/metadata_schemas.rb +332 -0
  9. data/app/models/ragdoll/audio_content.rb +142 -0
  10. data/app/models/ragdoll/content.rb +95 -0
  11. data/app/models/ragdoll/document.rb +606 -4
  12. data/app/models/ragdoll/embedding.rb +172 -5
  13. data/app/models/ragdoll/image_content.rb +194 -0
  14. data/app/models/ragdoll/text_content.rb +137 -0
  15. data/app/services/ragdoll/configuration_service.rb +113 -0
  16. data/app/services/ragdoll/document_management.rb +108 -0
  17. data/app/services/ragdoll/document_processor.rb +342 -0
  18. data/app/services/ragdoll/embedding_service.rb +202 -0
  19. data/app/services/ragdoll/image_description_service.rb +230 -0
  20. data/app/services/ragdoll/metadata_generator.rb +329 -0
  21. data/app/services/ragdoll/model_resolver.rb +72 -0
  22. data/app/services/ragdoll/search_engine.rb +51 -0
  23. data/app/services/ragdoll/text_chunker.rb +208 -0
  24. data/app/services/ragdoll/text_generation_service.rb +355 -0
  25. data/db/migrate/001_enable_postgresql_extensions.rb +23 -0
  26. data/db/migrate/004_create_ragdoll_documents.rb +70 -0
  27. data/db/migrate/005_create_ragdoll_embeddings.rb +41 -0
  28. data/db/migrate/006_create_ragdoll_contents.rb +47 -0
  29. data/lib/ragdoll/core/client.rb +306 -0
  30. data/lib/ragdoll/core/configuration.rb +257 -0
  31. data/lib/ragdoll/core/database.rb +141 -0
  32. data/lib/ragdoll/core/errors.rb +11 -0
  33. data/lib/ragdoll/core/model.rb +45 -0
  34. data/lib/ragdoll/core/shrine_config.rb +71 -0
  35. data/lib/ragdoll/core/version.rb +8 -0
  36. data/lib/ragdoll/core.rb +91 -0
  37. data/lib/ragdoll-core.rb +3 -0
  38. data/lib/ragdoll.rb +243 -6
  39. data/lib/tasks/annotate.rake +126 -0
  40. data/lib/tasks/db.rake +338 -0
  41. metadata +42 -35
  42. data/config/initializers/ragdoll.rb +0 -6
  43. data/config/routes.rb +0 -5
  44. data/db/migrate/20250218123456_create_documents.rb +0 -20
  45. data/lib/config/database.yml +0 -28
  46. data/lib/config/ragdoll.yml +0 -31
  47. data/lib/ragdoll/engine.rb +0 -16
  48. data/lib/ragdoll/import_job.rb +0 -15
  49. data/lib/ragdoll/ingestion.rb +0 -30
  50. data/lib/ragdoll/search.rb +0 -18
  51. data/lib/ragdoll/version.rb +0 -7
  52. data/lib/tasks/import_task.thor +0 -32
  53. data/lib/tasks/jobs_task.thor +0 -40
  54. data/lib/tasks/ragdoll_tasks.thor +0 -7
  55. data/lib/tasks/search_task.thor +0 -55
@@ -0,0 +1,47 @@
1
+ class CreateRagdollContents < ActiveRecord::Migration[7.0]
2
+ def change
3
+ create_table :ragdoll_contents,
4
+ comment: "Content storage for polymorphic embedding architecture using STI" do |t|
5
+
6
+ t.string :type, null: false,
7
+ comment: "Type of content (e.g., AudioContent, ImageContent, TextContent)"
8
+
9
+ t.references :document, null: false, foreign_key: { to_table: :ragdoll_documents },
10
+ comment: "Reference to parent document"
11
+
12
+ t.string :embedding_model, null: false,
13
+ comment: "Embedding model to use for this content"
14
+
15
+ t.text :content,
16
+ comment: "Text content or description of the file"
17
+
18
+ t.text :data,
19
+ comment: "Raw data from file"
20
+
21
+ t.json :metadata, default: {},
22
+ comment: "Additional metadata about the file's raw data"
23
+
24
+ t.float :duration,
25
+ comment: "Duration of audio in seconds (for audio content)"
26
+
27
+ t.integer :sample_rate,
28
+ comment: "Audio sample rate in Hz (for audio content)"
29
+
30
+ t.timestamps null: false,
31
+ comment: "Standard creation and update timestamps"
32
+
33
+ ###########
34
+ # Indexes #
35
+ ###########
36
+
37
+ t.index :embedding_model,
38
+ comment: "Index for filtering by embedding model"
39
+
40
+ t.index :type,
41
+ comment: "Index for filtering by content type"
42
+
43
+ t.index "to_tsvector('english', COALESCE(content, ''))", using: :gin, name: "index_ragdoll_contents_on_fulltext_search",
44
+ comment: "Full-text search index for text content"
45
+ end
46
+ end
47
+ end
@@ -0,0 +1,306 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "fileutils"
4
+
5
+ module Ragdoll
6
+ module Core
7
+ class Client
8
+ def initialize
9
+ # Setup configuration services
10
+ @config_service = Ragdoll::ConfigurationService.new
11
+ @model_resolver = Ragdoll::ModelResolver.new(@config_service)
12
+
13
+ # Setup logging
14
+ setup_logging
15
+
16
+ # Setup database connection
17
+ Database.setup(@config_service.config.database)
18
+
19
+ @embedding_service = Ragdoll::EmbeddingService.new(
20
+ client: nil,
21
+ config_service: @config_service,
22
+ model_resolver: @model_resolver
23
+ )
24
+ @search_engine = Ragdoll::SearchEngine.new(@embedding_service, config_service: @config_service)
25
+ end
26
+
27
+ # Primary method for RAG applications
28
+ # Returns context-enhanced content for AI prompts
29
+ def enhance_prompt(prompt:, context_limit: 5, **options)
30
+ context_data = get_context(query: prompt, limit: context_limit, **options)
31
+
32
+ if context_data[:context_chunks].any?
33
+ enhanced_prompt = build_enhanced_prompt(prompt, context_data[:combined_context])
34
+ {
35
+ enhanced_prompt: enhanced_prompt,
36
+ original_prompt: prompt,
37
+ context_sources: context_data[:context_chunks].map { |chunk| chunk[:source] },
38
+ context_count: context_data[:total_chunks]
39
+ }
40
+ else
41
+ {
42
+ enhanced_prompt: prompt,
43
+ original_prompt: prompt,
44
+ context_sources: [],
45
+ context_count: 0
46
+ }
47
+ end
48
+ end
49
+
50
+ # Get relevant context without prompt enhancement
51
+ def get_context(query:, limit: 10, **options)
52
+ results = search_similar_content(query: query, limit: limit, **options)
53
+
54
+ context_chunks = results.map do |result|
55
+ {
56
+ content: result[:content],
57
+ source: result[:document_location],
58
+ similarity: result[:similarity],
59
+ chunk_index: result[:chunk_index]
60
+ }
61
+ end
62
+
63
+ combined_context = context_chunks.map { |chunk| chunk[:content] }.join("\n\n")
64
+
65
+ {
66
+ context_chunks: context_chunks,
67
+ combined_context: combined_context,
68
+ total_chunks: context_chunks.length
69
+ }
70
+ end
71
+
72
+ # FIXME: This high-level API method should be able to take a query that is
73
+ # a string or a file. If its a file, then the downstream Process will
74
+ # be responsible for reading the file and passing the contents to the
75
+ # search method based upon whether the content is text, image or audio.
76
+
77
+ # Semantic search++ should incorporate hybrid search
78
+ def search(query:, **options)
79
+ results = search_similar_content(query: query, **options)
80
+
81
+ {
82
+ query: query,
83
+ results: results,
84
+ total_results: results.length
85
+ }
86
+ end
87
+
88
+ # Search similar content (core functionality)
89
+ def search_similar_content(query:, **options)
90
+ @search_engine.search_similar_content(query, **options)
91
+ end
92
+
93
+ # Hybrid search combining semantic and full-text search
94
+ def hybrid_search(query:, **options)
95
+ # Generate embedding for the query
96
+ query_embedding = @embedding_service.generate_embedding(query)
97
+
98
+ # Perform hybrid search
99
+ results = Ragdoll::Document.hybrid_search(query, query_embedding: query_embedding, **options)
100
+
101
+ {
102
+ query: query,
103
+ search_type: "hybrid",
104
+ results: results,
105
+ total_results: results.length,
106
+ semantic_weight: options[:semantic_weight] || 0.7,
107
+ text_weight: options[:text_weight] || 0.3
108
+ }
109
+ rescue StandardError => e
110
+ {
111
+ query: query,
112
+ search_type: "hybrid",
113
+ results: [],
114
+ total_results: 0,
115
+ error: "Hybrid search failed: #{e.message}"
116
+ }
117
+ end
118
+
119
+ # Document management
120
+ def add_document(path:)
121
+ # Parse the document
122
+ parsed = Ragdoll::DocumentProcessor.parse(path)
123
+
124
+ # Extract title from metadata or use filename
125
+ title = parsed[:metadata][:title] ||
126
+ File.basename(path, File.extname(path))
127
+
128
+ # Add document to database
129
+ doc_id = Ragdoll::DocumentManagement.add_document(path, parsed[:content], {
130
+ title: title,
131
+ document_type: parsed[:document_type],
132
+ **parsed[:metadata]
133
+ })
134
+
135
+ # Queue background jobs for processing if content is available
136
+ embeddings_queued = false
137
+ if parsed[:content].present?
138
+ Ragdoll::GenerateEmbeddingsJob.perform_later(doc_id)
139
+ Ragdoll::GenerateSummaryJob.perform_later(doc_id)
140
+ Ragdoll::ExtractKeywordsJob.perform_later(doc_id)
141
+ embeddings_queued = true
142
+ end
143
+
144
+ # Return success information
145
+ {
146
+ success: true,
147
+ document_id: doc_id,
148
+ title: title,
149
+ document_type: parsed[:document_type],
150
+ content_length: parsed[:content]&.length || 0,
151
+ embeddings_queued: embeddings_queued,
152
+ message: "Document '#{title}' added successfully with ID #{doc_id}"
153
+ }
154
+ rescue StandardError => e # StandardError => e
155
+ {
156
+ success: false,
157
+ error: e.message,
158
+ message: "Failed to add document: #{e.message}"
159
+ }
160
+ end
161
+
162
+ def add_text(content:, title:, **options)
163
+ # Add document to database
164
+ doc_id = Ragdoll::DocumentManagement.add_document(title, content, {
165
+ title: title,
166
+ document_type: "text",
167
+ **options
168
+ })
169
+
170
+ # Queue background job for embeddings
171
+ Ragdoll::GenerateEmbeddingsJob.perform_later(doc_id,
172
+ chunk_size: options[:chunk_size],
173
+ chunk_overlap: options[:chunk_overlap])
174
+
175
+ doc_id
176
+ end
177
+
178
+ def add_directory(path:, recursive: false)
179
+ results = []
180
+ pattern = recursive ? File.join(path, "**", "*") : File.join(path, "*")
181
+
182
+ Dir.glob(pattern).each do |file_path|
183
+ next unless File.file?(file_path)
184
+
185
+ begin
186
+ doc_id = add_document(path: file_path)
187
+ results << { file: file_path, document_id: doc_id, status: "success" }
188
+ rescue StandardError => e
189
+ results << { file: file_path, error: e.message, status: "error" }
190
+ end
191
+ end
192
+
193
+ results
194
+ end
195
+
196
+ def get_document(id:)
197
+ document_hash = Ragdoll::DocumentManagement.get_document(id)
198
+ return nil unless document_hash
199
+
200
+ # DocumentManagement.get_document already returns a hash with all needed info
201
+ document_hash
202
+ end
203
+
204
+ def document_status(id:)
205
+ document = Ragdoll::Document.find(id)
206
+ embeddings_count = document.all_embeddings.count
207
+
208
+ {
209
+ id: document.id,
210
+ title: document.title,
211
+ status: document.status,
212
+ embeddings_count: embeddings_count,
213
+ embeddings_ready: embeddings_count.positive?,
214
+ content_preview: document.content&.first(200) || "No content",
215
+ message: case document.status
216
+ when "processed"
217
+ "Document processed successfully with #{embeddings_count} embeddings"
218
+ when "processing"
219
+ "Document is being processed"
220
+ when "pending"
221
+ "Document is pending processing"
222
+ when "error"
223
+ "Document processing failed"
224
+ else
225
+ "Document status: #{document.status}"
226
+ end
227
+ }
228
+ rescue ActiveRecord::RecordNotFound
229
+ {
230
+ success: false,
231
+ error: "Document not found",
232
+ message: "Document with ID #{id} does not exist"
233
+ }
234
+ end
235
+
236
+ def update_document(id:, **updates)
237
+ Ragdoll::DocumentManagement.update_document(id, **updates)
238
+ end
239
+
240
+ def delete_document(id:)
241
+ Ragdoll::DocumentManagement.delete_document(id)
242
+ end
243
+
244
+ def list_documents(**options)
245
+ Ragdoll::DocumentManagement.list_documents(options)
246
+ end
247
+
248
+ # Analytics and stats
249
+ def stats
250
+ Ragdoll::DocumentManagement.get_document_stats
251
+ end
252
+
253
+ def search_analytics(days: 30)
254
+ # This could be implemented with additional database queries
255
+ Ragdoll::Embedding.where("returned_at > ?", days.days.ago)
256
+ .group("DATE(returned_at)")
257
+ .count
258
+ end
259
+
260
+ # Health check
261
+ def healthy?
262
+ Database.connected? && stats[:total_documents] >= 0
263
+ rescue StandardError
264
+ false
265
+ end
266
+
267
+ private
268
+
269
+ def setup_logging
270
+ require "logger"
271
+ require "active_job"
272
+
273
+ # Create log directory if it doesn't exist
274
+ log_file = @config_service.config.logging[:filepath]
275
+ log_dir = File.dirname(log_file)
276
+ FileUtils.mkdir_p(log_dir) unless Dir.exist?(log_dir)
277
+
278
+ # Set up logger with appropriate level
279
+ logger = Logger.new(log_file)
280
+ logger.level = case @config_service.config.logging[:level]
281
+ when :debug then Logger::DEBUG
282
+ when :info then Logger::INFO
283
+ when :warn then Logger::WARN
284
+ when :error then Logger::ERROR
285
+ when :fatal then Logger::FATAL
286
+ else Logger::WARN
287
+ end
288
+
289
+ # Configure ActiveJob to use our logger and reduce verbosity
290
+ ActiveJob::Base.logger = logger
291
+ ActiveJob::Base.logger.level = Logger::WARN
292
+
293
+ # Set up ActiveJob queue adapter - use inline for immediate execution
294
+ ActiveJob::Base.queue_adapter = :inline
295
+ end
296
+
297
+ def build_enhanced_prompt(original_prompt, context)
298
+ template = @config_service.config.prompt_template(:rag_enhancement)
299
+
300
+ template
301
+ .gsub("{{context}}", context)
302
+ .gsub("{{prompt}}", original_prompt)
303
+ end
304
+ end
305
+ end
306
+ end
@@ -0,0 +1,257 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "yaml"
4
+ require "fileutils"
5
+ require "ostruct"
6
+ require_relative "model"
7
+
8
+ module Ragdoll
9
+ module Core
10
+ class Configuration
11
+ class ConfigurationFileNotFoundError < StandardError; end
12
+ class ConfigurationSaveError < StandardError; end
13
+ class ConfigurationLoadUnknownError < StandardError; end
14
+
15
+ DEFAULT = {
16
+ # Base directory for all Ragdoll files - single source of truth
17
+ base_directory: File.join(Dir.home, ".config", "ragdoll"),
18
+
19
+ # Configuration file path derived from base directory
20
+ config_filepath: File.join(Dir.home, ".config", "ragdoll", "config.yml"),
21
+
22
+ # Model configurations organized by purpose with inheritance support
23
+ models: {
24
+ text_generation: {
25
+ default: -> { Model.new(ENV.fetch("RAGDOLL_DEFAULT_TEXT_MODEL", "openai/gpt-4o")) },
26
+ summary: -> { Model.new(ENV.fetch("RAGDOLL_SUMMARY_MODEL", "openai/gpt-4o")) },
27
+ keywords: -> { Model.new(ENV.fetch("RAGDOLL_KEYWORDS_MODEL", "openai/gpt-4o")) }
28
+ },
29
+ embedding: {
30
+ provider: :openai,
31
+ text: -> { Model.new(ENV.fetch("RAGDOLL_TEXT_EMBEDDING_MODEL", "openai/text-embedding-3-small")) },
32
+ image: -> { Model.new(ENV.fetch("RAGDOLL_IMAGE_EMBEDDING_MODEL", "openai/clip-vit-base-patch32")) },
33
+ audio: -> { Model.new(ENV.fetch("RAGDOLL_AUDIO_EMBEDDING_MODEL", "openai/whisper-1")) },
34
+ max_dimensions: 3072,
35
+ cache_embeddings: true
36
+ }
37
+ },
38
+
39
+ # Processing configuration by content type
40
+ processing: {
41
+ text: {
42
+ chunking: {
43
+ max_tokens: 1000,
44
+ overlap: 200
45
+ }
46
+ },
47
+ default: {
48
+ chunking: {
49
+ max_tokens: 4096,
50
+ overlap: 128
51
+ }
52
+ },
53
+ search: {
54
+ similarity_threshold: 0.7,
55
+ max_results: 10,
56
+ analytics: {
57
+ enable: true,
58
+ usage_tracking_enabled: true,
59
+ ranking_enabled: true,
60
+ recency_weight: 0.3,
61
+ frequency_weight: 0.7,
62
+ similarity_weight: 1.0
63
+ }
64
+ }
65
+ },
66
+
67
+ # LLM provider configurations (renamed from ruby_llm_config)
68
+ llm_providers: {
69
+ default_provider: :openai,
70
+ openai: {
71
+ api_key: -> { ENV.fetch("OPENAI_API_KEY", nil) },
72
+ organization: -> { ENV.fetch("OPENAI_ORGANIZATION", nil) },
73
+ project: -> { ENV.fetch("OPENAI_PROJECT", nil) }
74
+ },
75
+ anthropic: {
76
+ api_key: -> { ENV.fetch("ANTHROPIC_API_KEY", nil) }
77
+ },
78
+ google: {
79
+ api_key: -> { ENV.fetch("GOOGLE_API_KEY", nil) },
80
+ project_id: -> { ENV.fetch("GOOGLE_PROJECT_ID", nil) }
81
+ },
82
+ azure: {
83
+ api_key: -> { ENV.fetch("AZURE_OPENAI_API_KEY", nil) },
84
+ endpoint: -> { ENV.fetch("AZURE_OPENAI_ENDPOINT", nil) },
85
+ api_version: -> { ENV.fetch("AZURE_OPENAI_API_VERSION", "2024-02-01") }
86
+ },
87
+ ollama: {
88
+ endpoint: -> { ENV.fetch("OLLAMA_ENDPOINT", "http://localhost:11434") }
89
+ },
90
+ huggingface: {
91
+ api_key: -> { ENV.fetch("HUGGINGFACE_API_KEY", nil) }
92
+ },
93
+ openrouter: {
94
+ api_key: -> { ENV.fetch("OPENROUTER_API_KEY", nil) }
95
+ }
96
+ },
97
+
98
+ # Summarization configuration
99
+ summarization: {
100
+ enable: true,
101
+ max_length: 300,
102
+ min_content_length: 300
103
+ },
104
+
105
+ # Database configuration with standardized ENV variable name
106
+ database: {
107
+ adapter: "postgresql",
108
+ database: "ragdoll_development",
109
+ username: "ragdoll",
110
+ password: -> { ENV.fetch("RAGDOLL_DATABASE_PASSWORD", nil) },
111
+ host: "localhost",
112
+ port: 5432,
113
+ auto_migrate: true,
114
+ logger: nil
115
+ },
116
+
117
+ # Logging configuration with corrected key names and path derivation
118
+ logging: {
119
+ level: :warn, # Fixed: was log_level, now matches usage
120
+ directory: File.join(Dir.home, ".config", "ragdoll", "logs"),
121
+ filepath: File.join(Dir.home, ".config", "ragdoll", "logs", "ragdoll.log")
122
+ },
123
+
124
+ # Prompt templates for customizable text generation
125
+ prompt_templates: {
126
+ rag_enhancement: <<~TEMPLATE.strip
127
+ You are an AI assistant. Use the following context to help answer the user's question.
128
+ If the context doesn't contain relevant information, say so.
129
+
130
+ Context:
131
+ {{context}}
132
+
133
+ Question: {{prompt}}
134
+
135
+ Answer:
136
+ TEMPLATE
137
+ }
138
+
139
+ }.freeze
140
+
141
+ def initialize(config = {})
142
+ merged_config = deep_merge(self.class::DEFAULT, config)
143
+ resolved_config = resolve_procs(merged_config, [])
144
+ @config = OpenStruct.new(resolved_config)
145
+ end
146
+
147
+ def self.load(path: nil)
148
+ path ||= DEFAULT[:config_filepath]
149
+
150
+ raise ConfigurationFileNotFoundError, "Configuration file not found: #{path}" unless File.exist?(path)
151
+
152
+ new(YAML.safe_load_file(path) || {})
153
+ rescue Errno::ENOENT
154
+ raise ConfigurationFileNotFoundError, "Configuration file not found: #{path}"
155
+ rescue StandardError => e
156
+ raise ConfigurationLoadUnknownError, "Failed to load configuration from #{path}: #{e.message}"
157
+ end
158
+
159
+ def save(path: nil)
160
+ if path.nil?
161
+ path = @config.config_filepath
162
+ else
163
+ save_filepath = @config.config_filepath
164
+ @config.config_filepath = path
165
+ end
166
+
167
+ FileUtils.mkdir_p(File.dirname(path))
168
+
169
+ File.write(path, @config.to_yaml)
170
+ rescue StandardError => e
171
+ @config.config_filepath = save_filepath unless save_filepath.nil?
172
+ raise ConfigurationSaveError, "Failed to save configuration to #{path}: #{e.message}"
173
+ end
174
+
175
+ # SMELL: isn't this method more of a utility?
176
+
177
+ # Parse a provider/model string into its components
178
+ # Format: "provider/model" -> { provider: :provider, model: "model" }
179
+ # Format: "model" -> { provider: nil, model: "model" } (RubyLLM determines provider)
180
+ def parse_provider_model(provider_model_string)
181
+ return { provider: nil, model: nil } if provider_model_string.nil? || provider_model_string.empty?
182
+
183
+ parts = provider_model_string.split("/", 2)
184
+ if parts.length == 2
185
+ { provider: parts[0].to_sym, model: parts[1] }
186
+ else
187
+ # If no slash, let RubyLLM determine provider from model name
188
+ { provider: nil, model: provider_model_string }
189
+ end
190
+ end
191
+
192
+ # Resolve model with inheritance support
193
+ # Returns the model string for a given task, with inheritance from default
194
+ def resolve_model(task_type)
195
+ case task_type
196
+ when :embedding
197
+ @config.models[:embedding]
198
+ when :text, :summary, :keywords, :default
199
+ @config.models[:text_generation][task_type] || @config.models[:text_generation][:default]
200
+ else
201
+ @config.models[:text_generation][:default]
202
+ end
203
+ end
204
+
205
+ # Get provider credentials for a given provider
206
+ def provider_credentials(provider = nil)
207
+ provider ||= @config.llm_providers[:default_provider]
208
+ @config.llm_providers[provider] || {}
209
+ end
210
+
211
+ # Resolve embedding model for content type
212
+ def embedding_model(content_type = :text)
213
+ @config.models[:embedding][content_type] || @config.models[:embedding][:text]
214
+ end
215
+
216
+ # Get prompt template
217
+ def prompt_template(template_name = :rag_enhancement)
218
+ @config.prompt_templates[template_name]
219
+ end
220
+
221
+ # Enable method delegation to the internal OpenStruct
222
+ def method_missing(method_name, *args, &block)
223
+ @config.send(method_name, *args, &block)
224
+ end
225
+
226
+ def respond_to_missing?(method_name, include_private = false)
227
+ @config.respond_to?(method_name, include_private) || super
228
+ end
229
+
230
+ private
231
+
232
+ def resolve_procs(obj, path = [])
233
+ case obj
234
+ when Hash
235
+ obj.each_with_object({}) { |(k, v), result| result[k] = resolve_procs(v, path + [k]) }
236
+ when Proc
237
+ obj.call
238
+ when String
239
+ # Convert strings to Model instances in the models configuration section
240
+ if path.length >= 2 && path[0] == :models
241
+ Model.new(obj)
242
+ else
243
+ obj
244
+ end
245
+ else
246
+ obj
247
+ end
248
+ end
249
+
250
+ def deep_merge(hash1, hash2)
251
+ hash1.merge(hash2) do |_key, oldval, newval|
252
+ oldval.is_a?(Hash) && newval.is_a?(Hash) ? deep_merge(oldval, newval) : newval
253
+ end
254
+ end
255
+ end
256
+ end
257
+ end