desiru 0.1.0 → 0.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (98) hide show
  1. checksums.yaml +4 -4
  2. data/.env.example +34 -0
  3. data/.rubocop.yml +7 -4
  4. data/.ruby-version +1 -0
  5. data/CLAUDE.md +4 -0
  6. data/Gemfile +21 -2
  7. data/Gemfile.lock +87 -12
  8. data/README.md +295 -2
  9. data/Rakefile +1 -0
  10. data/db/migrations/001_create_initial_tables.rb +96 -0
  11. data/db/migrations/002_create_job_results.rb +39 -0
  12. data/desiru.db +0 -0
  13. data/desiru.gemspec +2 -5
  14. data/docs/background_processing_roadmap.md +87 -0
  15. data/docs/job_scheduling.md +167 -0
  16. data/dspy-analysis-swarm.yml +60 -0
  17. data/dspy-feature-analysis.md +121 -0
  18. data/examples/README.md +69 -0
  19. data/examples/api_with_persistence.rb +122 -0
  20. data/examples/assertions_example.rb +232 -0
  21. data/examples/async_processing.rb +2 -0
  22. data/examples/few_shot_learning.rb +1 -2
  23. data/examples/graphql_api.rb +4 -2
  24. data/examples/graphql_integration.rb +3 -3
  25. data/examples/graphql_optimization_summary.md +143 -0
  26. data/examples/graphql_performance_benchmark.rb +247 -0
  27. data/examples/persistence_example.rb +102 -0
  28. data/examples/react_agent.rb +203 -0
  29. data/examples/rest_api.rb +173 -0
  30. data/examples/rest_api_advanced.rb +333 -0
  31. data/examples/scheduled_job_example.rb +116 -0
  32. data/examples/simple_qa.rb +1 -2
  33. data/examples/sinatra_api.rb +109 -0
  34. data/examples/typed_signatures.rb +1 -2
  35. data/graphql_optimization_summary.md +53 -0
  36. data/lib/desiru/api/grape_integration.rb +284 -0
  37. data/lib/desiru/api/persistence_middleware.rb +148 -0
  38. data/lib/desiru/api/sinatra_integration.rb +217 -0
  39. data/lib/desiru/api.rb +42 -0
  40. data/lib/desiru/assertions.rb +74 -0
  41. data/lib/desiru/async_status.rb +65 -0
  42. data/lib/desiru/cache.rb +1 -1
  43. data/lib/desiru/configuration.rb +2 -1
  44. data/lib/desiru/errors.rb +160 -0
  45. data/lib/desiru/field.rb +17 -14
  46. data/lib/desiru/graphql/batch_loader.rb +85 -0
  47. data/lib/desiru/graphql/data_loader.rb +242 -75
  48. data/lib/desiru/graphql/enum_builder.rb +75 -0
  49. data/lib/desiru/graphql/executor.rb +37 -4
  50. data/lib/desiru/graphql/schema_generator.rb +62 -158
  51. data/lib/desiru/graphql/type_builder.rb +138 -0
  52. data/lib/desiru/graphql/type_cache_warmer.rb +91 -0
  53. data/lib/desiru/jobs/async_predict.rb +1 -1
  54. data/lib/desiru/jobs/base.rb +67 -0
  55. data/lib/desiru/jobs/batch_processor.rb +6 -6
  56. data/lib/desiru/jobs/retriable.rb +119 -0
  57. data/lib/desiru/jobs/retry_strategies.rb +169 -0
  58. data/lib/desiru/jobs/scheduler.rb +219 -0
  59. data/lib/desiru/jobs/webhook_notifier.rb +242 -0
  60. data/lib/desiru/models/anthropic.rb +164 -0
  61. data/lib/desiru/models/base.rb +37 -3
  62. data/lib/desiru/models/open_ai.rb +151 -0
  63. data/lib/desiru/models/open_router.rb +161 -0
  64. data/lib/desiru/module.rb +59 -9
  65. data/lib/desiru/modules/chain_of_thought.rb +3 -3
  66. data/lib/desiru/modules/majority.rb +51 -0
  67. data/lib/desiru/modules/multi_chain_comparison.rb +204 -0
  68. data/lib/desiru/modules/predict.rb +8 -1
  69. data/lib/desiru/modules/program_of_thought.rb +139 -0
  70. data/lib/desiru/modules/react.rb +273 -0
  71. data/lib/desiru/modules/retrieve.rb +4 -2
  72. data/lib/desiru/optimizers/base.rb +2 -4
  73. data/lib/desiru/optimizers/bootstrap_few_shot.rb +2 -2
  74. data/lib/desiru/optimizers/copro.rb +268 -0
  75. data/lib/desiru/optimizers/knn_few_shot.rb +185 -0
  76. data/lib/desiru/persistence/database.rb +71 -0
  77. data/lib/desiru/persistence/models/api_request.rb +38 -0
  78. data/lib/desiru/persistence/models/job_result.rb +138 -0
  79. data/lib/desiru/persistence/models/module_execution.rb +37 -0
  80. data/lib/desiru/persistence/models/optimization_result.rb +28 -0
  81. data/lib/desiru/persistence/models/training_example.rb +25 -0
  82. data/lib/desiru/persistence/models.rb +11 -0
  83. data/lib/desiru/persistence/repositories/api_request_repository.rb +98 -0
  84. data/lib/desiru/persistence/repositories/base_repository.rb +77 -0
  85. data/lib/desiru/persistence/repositories/job_result_repository.rb +116 -0
  86. data/lib/desiru/persistence/repositories/module_execution_repository.rb +85 -0
  87. data/lib/desiru/persistence/repositories/optimization_result_repository.rb +67 -0
  88. data/lib/desiru/persistence/repositories/training_example_repository.rb +102 -0
  89. data/lib/desiru/persistence/repository.rb +29 -0
  90. data/lib/desiru/persistence/setup.rb +77 -0
  91. data/lib/desiru/persistence.rb +49 -0
  92. data/lib/desiru/registry.rb +3 -5
  93. data/lib/desiru/signature.rb +91 -24
  94. data/lib/desiru/version.rb +1 -1
  95. data/lib/desiru.rb +23 -8
  96. data/missing-features-analysis.md +192 -0
  97. metadata +63 -45
  98. data/lib/desiru/models/raix_adapter.rb +0 -210
@@ -0,0 +1,85 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'graphql'
4
+
5
+ module Desiru
6
+ module GraphQL
7
+ # GraphQL-compatible batch loader that integrates with GraphQL's lazy execution
8
+ class BatchLoader < ::GraphQL::Dataloader::Source
9
+ def initialize(module_instance)
10
+ super()
11
+ @module_instance = module_instance
12
+ end
13
+
14
+ # Fetch implementation for GraphQL::Dataloader
15
+ def fetch(inputs_array)
16
+ if @module_instance.respond_to?(:batch_forward)
17
+ # Use batch processing if available
18
+ @module_instance.batch_forward(inputs_array)
19
+ else
20
+ # Fall back to individual processing
21
+ inputs_array.map { |inputs| @module_instance.call(inputs) }
22
+ end
23
+ end
24
+ end
25
+
26
+ # Module loader that provides batch loading for Desiru modules
27
+ class ModuleLoader < ::GraphQL::Dataloader::Source
28
+ def initialize(operation_name, modules)
29
+ super()
30
+ @operation_name = operation_name
31
+ @modules = modules
32
+ end
33
+
34
+ def fetch(args_array)
35
+ module_instance = @modules[@operation_name.to_s] || @modules[@operation_name.to_sym]
36
+
37
+ raise "Module not found for operation: #{@operation_name}" unless module_instance
38
+
39
+ # Transform GraphQL arguments to snake_case
40
+ transformed_args = args_array.map { |args| transform_graphql_args(args) }
41
+
42
+ results = if module_instance.respond_to?(:batch_forward)
43
+ # Batch process all requests
44
+ module_instance.batch_forward(transformed_args)
45
+ else
46
+ # Fall back to individual processing
47
+ transformed_args.map { |args| module_instance.call(args) }
48
+ end
49
+
50
+ # Transform results back to camelCase
51
+ results.map { |result| transform_module_result(result) }
52
+ end
53
+
54
+ private
55
+
56
+ def transform_graphql_args(args)
57
+ # Convert camelCase keys to snake_case
58
+ args.transform_keys do |key|
59
+ key_str = key.to_s
60
+ if key_str =~ /[a-z][A-Z]/
61
+ key_str.gsub(/([A-Z])/, '_\1').downcase.to_sym
62
+ else
63
+ key_str.downcase.to_sym
64
+ end
65
+ end
66
+ end
67
+
68
+ def transform_module_result(result)
69
+ # Convert ModuleResult to hash with camelCase keys
70
+ if result.respond_to?(:to_h)
71
+ result.to_h.transform_keys { |key| camelcase_field_name(key) }
72
+ else
73
+ result
74
+ end
75
+ end
76
+
77
+ def camelcase_field_name(field_name)
78
+ # Convert snake_case to camelCase
79
+ clean_name = field_name.to_s.gsub('?', '')
80
+ parts = clean_name.split('_')
81
+ parts[0] + parts[1..].map(&:capitalize).join
82
+ end
83
+ end
84
+ end
85
+ end
@@ -9,35 +9,38 @@ module Desiru
9
9
  @loaders = {}
10
10
  @results_cache = {}
11
11
  @pending_loads = Hash.new { |h, k| h[k] = [] }
12
+ @pending_promises = Hash.new { |h, k| h[k] = {} }
13
+ @mutex = Mutex.new
12
14
  end
13
15
 
14
16
  # Get or create a loader for a specific module
15
- def for(module_class, **options)
17
+ def for(module_class_or_instance, **options)
18
+ # Handle both module classes and instances
19
+ module_class = module_class_or_instance.is_a?(Class) ? module_class_or_instance : module_class_or_instance.class
16
20
  key = loader_key(module_class, options)
17
- @loaders[key] ||= BatchLoader.new(module_class, **options)
21
+ @loaders[key] ||= BatchLoader.new(module_class_or_instance, self, **options)
18
22
  end
19
23
 
20
24
  # Execute all pending loads in batch
21
25
  def perform_loads
22
- @pending_loads.each do |loader_key, batch|
23
- next if batch.empty?
24
-
25
- loader = @loaders[loader_key]
26
- results = loader.load_batch(batch.map(&:first))
27
-
28
- batch.each_with_index do |(_inputs, promise), idx|
29
- promise.fulfill(results[idx])
26
+ @mutex.synchronize do
27
+ @pending_loads.each do |loader_key, batch|
28
+ process_loader_batch(loader_key, batch)
30
29
  end
31
- end
32
30
 
33
- @pending_loads.clear
31
+ @pending_loads.clear
32
+ @pending_promises.clear
33
+ end
34
34
  end
35
35
 
36
36
  # Clear all caches
37
37
  def clear!
38
- @results_cache.clear
39
- @pending_loads.clear
40
- @loaders.values.each(&:clear_cache!)
38
+ @mutex.synchronize do
39
+ @results_cache.clear
40
+ @pending_loads.clear
41
+ @pending_promises.clear
42
+ @loaders.each_value(&:clear_cache!)
43
+ end
41
44
  end
42
45
 
43
46
  private
@@ -46,48 +49,115 @@ module Desiru
46
49
  "#{module_class.name}:#{options.hash}"
47
50
  end
48
51
 
52
+ def group_inputs_by_signature(inputs_array)
53
+ inputs_array.group_by do |inputs|
54
+ # Group by input keys to process similar queries together
55
+ inputs.keys.sort.join(':')
56
+ end
57
+ end
58
+
59
+ def process_loader_batch(loader_key, batch)
60
+ return if batch.empty?
61
+
62
+ loader = @loaders[loader_key]
63
+ return unless loader # Skip if loader not found
64
+
65
+ # Deduplicate requests
66
+ unique_inputs_map, promises_by_inputs = deduplicate_batch(batch)
67
+ unique_inputs = unique_inputs_map.values
68
+
69
+ # Process batch and handle results
70
+ results_map = execute_batch(loader, unique_inputs, unique_inputs_map)
71
+
72
+ # Fulfill promises with results
73
+ fulfill_promises(promises_by_inputs, results_map)
74
+ end
75
+
76
+ def deduplicate_batch(batch)
77
+ unique_inputs_map = {}
78
+ promises_by_inputs = Hash.new { |h, k| h[k] = [] }
79
+
80
+ batch.each do |inputs, promise|
81
+ input_key = inputs.sort.to_h.hash
82
+ unique_inputs_map[input_key] = inputs
83
+ promises_by_inputs[input_key] << promise
84
+ end
85
+
86
+ [unique_inputs_map, promises_by_inputs]
87
+ end
88
+
89
+ def execute_batch(loader, unique_inputs, unique_inputs_map)
90
+ results_map = {}
91
+
92
+ begin
93
+ results = loader.load_batch(unique_inputs)
94
+ unique_inputs.each_with_index do |inputs, idx|
95
+ input_key = inputs.sort.to_h.hash
96
+ results_map[input_key] = results[idx]
97
+ end
98
+ rescue StandardError => e
99
+ # Mark all promises as rejected on error
100
+ unique_inputs_map.each_key do |input_key|
101
+ results_map[input_key] = { error: e }
102
+ end
103
+ end
104
+
105
+ results_map
106
+ end
107
+
108
+ def fulfill_promises(promises_by_inputs, results_map)
109
+ promises_by_inputs.each do |input_key, promises|
110
+ result = results_map[input_key]
111
+
112
+ promises.each do |promise|
113
+ if result.is_a?(Hash) && result[:error]
114
+ promise.reject(result[:error])
115
+ else
116
+ promise.fulfill(result)
117
+ end
118
+ end
119
+ end
120
+ end
121
+
49
122
  # Individual batch loader for a specific module
50
123
  class BatchLoader
51
- attr_reader :module_class, :batch_size, :cache
124
+ attr_reader :module_class_or_instance, :batch_size, :cache, :parent_loader
52
125
 
53
- def initialize(module_class, batch_size: 100, cache: true)
54
- @module_class = module_class
126
+ def initialize(module_class_or_instance, parent_loader, batch_size: 100, cache: true)
127
+ @module_class_or_instance = module_class_or_instance
128
+ @parent_loader = parent_loader
55
129
  @batch_size = batch_size
56
130
  @cache = cache
57
131
  @cache_store = {} if cache
58
132
  end
59
133
 
60
- # Load a batch of inputs
134
+ # Load a batch of inputs - used for immediate batch processing
61
135
  def load_batch(inputs_array)
62
136
  return load_from_cache(inputs_array) if cache && all_cached?(inputs_array)
63
137
 
64
- # Group inputs by signature to optimize processing
65
- grouped = group_by_signature(inputs_array)
66
- results = []
138
+ results = process_batch(inputs_array)
67
139
 
68
- grouped.each do |_signature_key, inputs_group|
69
- module_instance = create_module_instance(inputs_group.first)
70
-
71
- # Process in chunks to respect batch_size
72
- inputs_group.each_slice(batch_size) do |chunk|
73
- chunk_results = process_chunk(module_instance, chunk)
74
- results.concat(chunk_results)
75
-
76
- # Cache results if enabled
77
- cache_results(chunk, chunk_results) if cache
78
- end
79
- end
140
+ # Cache results if enabled
141
+ cache_results(inputs_array, results) if cache
80
142
 
81
143
  results
82
144
  end
83
145
 
84
146
  # Load a single input (returns a promise for lazy evaluation)
85
147
  def load(inputs)
86
- Promise.new do |promise|
87
- if cache && @cache_store.key?(cache_key(inputs))
88
- promise.fulfill(@cache_store[cache_key(inputs)])
89
- else
90
- # Queue for batch loading
148
+ # Check cache first if enabled
149
+ if cache && @cache_store.key?(cache_key(inputs))
150
+ # Return immediately fulfilled promise for cached value
151
+ promise = Promise.new
152
+ promise.fulfill(@cache_store[cache_key(inputs)])
153
+ promise
154
+ else
155
+ # Check for existing pending promise to enable deduplication
156
+ existing_promise = check_pending_promise(inputs)
157
+ return existing_promise if existing_promise
158
+
159
+ # Create promise and queue for batch loading
160
+ Promise.new do |promise|
91
161
  queue_for_loading(inputs, promise)
92
162
  end
93
163
  end
@@ -97,6 +167,29 @@ module Desiru
97
167
  @cache_store.clear if cache
98
168
  end
99
169
 
170
+ # Process a batch of inputs
171
+ def process_batch(inputs_array)
172
+ # Use the provided module instance or create one
173
+ module_instance = if @module_class_or_instance.is_a?(Class)
174
+ create_module_instance(inputs_array.first)
175
+ else
176
+ @module_class_or_instance
177
+ end
178
+
179
+ if module_instance.respond_to?(:batch_forward)
180
+ # If module supports batch processing
181
+ module_instance.batch_forward(inputs_array)
182
+ else
183
+ # Fall back to individual processing
184
+ inputs_array.map { |inputs| module_instance.call(inputs) }
185
+ end
186
+ end
187
+
188
+ # Cache a single result
189
+ def cache_result(inputs, result)
190
+ @cache_store[cache_key(inputs)] = result if cache
191
+ end
192
+
100
193
  private
101
194
 
102
195
  def all_cached?(inputs_array)
@@ -117,17 +210,17 @@ module Desiru
117
210
  inputs.sort.to_h.hash
118
211
  end
119
212
 
120
- def group_by_signature(inputs_array)
121
- inputs_array.group_by do |inputs|
122
- # Group by input keys to process similar queries together
123
- inputs.keys.sort.join(':')
124
- end
125
- end
126
-
127
213
  def create_module_instance(sample_inputs)
128
214
  # Infer signature from inputs
129
215
  signature = infer_signature(sample_inputs)
130
- module_class.new(signature)
216
+
217
+ # Get the module class
218
+ if @module_class_or_instance.is_a?(Class)
219
+ @module_class_or_instance.new(signature)
220
+ else
221
+ # Already an instance, return it
222
+ @module_class_or_instance
223
+ end
131
224
  end
132
225
 
133
226
  def infer_signature(inputs)
@@ -145,64 +238,138 @@ module Desiru
145
238
  when TrueClass, FalseClass then 'bool'
146
239
  when Array then 'list'
147
240
  when Hash then 'hash'
148
- else 'string'
241
+ else value.class.name.downcase
149
242
  end
150
243
  end
151
244
 
152
- def process_chunk(module_instance, chunk)
153
- if module_instance.respond_to?(:batch_forward)
154
- # If module supports batch processing
155
- module_instance.batch_forward(chunk)
156
- else
157
- # Fall back to individual processing
158
- chunk.map { |inputs| module_instance.call(inputs) }
245
+ def check_pending_promise(inputs)
246
+ # Check if there's already a pending promise for these inputs
247
+ final_key = loader_key
248
+ input_key = inputs.sort.to_h.hash
249
+
250
+ parent_loader.instance_variable_get(:@mutex).synchronize do
251
+ parent_loader.instance_variable_get(:@pending_promises)[final_key][input_key]
159
252
  end
160
253
  end
161
254
 
162
255
  def queue_for_loading(inputs, promise)
163
- # This would integrate with the parent DataLoader's pending loads
164
- # For now, process immediately
165
- result = module_class.new(infer_signature(inputs)).call(inputs)
166
- promise.fulfill(result)
167
- @cache_store[cache_key(inputs)] = result if cache
256
+ # Queue the request with the parent DataLoader for batch processing
257
+ final_key = loader_key
258
+ input_key = inputs.sort.to_h.hash
259
+
260
+ parent_loader.instance_variable_get(:@mutex).synchronize do
261
+ # Store this promise for future deduplication
262
+ parent_loader.instance_variable_get(:@pending_promises)[final_key][input_key] = promise
263
+ parent_loader.instance_variable_get(:@pending_loads)[final_key] << [inputs, promise]
264
+ end
265
+ end
266
+
267
+ def loader_key
268
+ # Create a key that matches how this loader was registered
269
+ module_name = if @module_class_or_instance.is_a?(Class)
270
+ @module_class_or_instance.name
271
+ else
272
+ @module_class_or_instance.class.name
273
+ end
274
+ loader_key = "#{module_name}:#{batch_size}:#{cache}"
275
+
276
+ # Find the actual loader key that was used to create this loader
277
+ loaders = parent_loader.instance_variable_get(:@loaders)
278
+ actual_key = loaders.keys.find { |k| loaders[k] == self }
279
+ actual_key || loader_key
168
280
  end
169
281
  end
170
282
 
171
- # Promise implementation for lazy loading
283
+ # Thread-safe Promise implementation for lazy loading
172
284
  class Promise
173
285
  def initialize(&block)
286
+ @mutex = Mutex.new
287
+ @condition = ConditionVariable.new
174
288
  @fulfilled = false
175
289
  @value = nil
290
+ @error = nil
176
291
  @callbacks = []
177
- block.call(self) if block
292
+ block&.call(self)
178
293
  end
179
294
 
180
295
  def fulfill(value)
181
- return if @fulfilled
296
+ callbacks_to_run = nil
297
+
298
+ @mutex.synchronize do
299
+ return if @fulfilled
300
+
301
+ @value = value
302
+ @fulfilled = true
303
+ callbacks_to_run = @callbacks.dup
304
+ @callbacks.clear
182
305
 
183
- @value = value
184
- @fulfilled = true
185
- @callbacks.each { |cb| cb.call(value) }
186
- @callbacks.clear
306
+ # Signal all waiting threads
307
+ @condition.broadcast
308
+ end
309
+
310
+ # Run callbacks outside the mutex to avoid deadlock
311
+ callbacks_to_run&.each { |cb| cb.call(value) }
312
+ end
313
+
314
+ def reject(error)
315
+ @mutex.synchronize do
316
+ return if @fulfilled
317
+
318
+ @error = error
319
+ @fulfilled = true
320
+ @callbacks.clear
321
+
322
+ # Signal all waiting threads
323
+ @condition.broadcast
324
+ end
187
325
  end
188
326
 
189
327
  def then(&block)
190
- if @fulfilled
191
- block.call(@value)
192
- else
193
- @callbacks << block
328
+ run_immediately = false
329
+ value_to_pass = nil
330
+
331
+ @mutex.synchronize do
332
+ if @fulfilled && !@error
333
+ run_immediately = true
334
+ value_to_pass = @value
335
+ elsif !@fulfilled
336
+ @callbacks << block
337
+ end
194
338
  end
339
+
340
+ # Run callback outside mutex if already fulfilled
341
+ block.call(value_to_pass) if run_immediately
342
+
195
343
  self
196
344
  end
197
345
 
198
- def value
199
- raise "Promise not yet fulfilled" unless @fulfilled
346
+ def value(timeout: nil)
347
+ @mutex.synchronize do
348
+ if timeout
349
+ end_time = Time.now + timeout
350
+ until @fulfilled
351
+ remaining = end_time - Time.now
352
+ break if remaining <= 0
353
+
354
+ @condition.wait(@mutex, remaining)
355
+ end
356
+ else
357
+ @condition.wait(@mutex) until @fulfilled
358
+ end
359
+
360
+ raise @error if @error
361
+ raise "Promise not yet fulfilled" unless @fulfilled
200
362
 
201
- @value
363
+ @value
364
+ end
202
365
  end
203
366
 
204
367
  def fulfilled?
205
- @fulfilled
368
+ @mutex.synchronize { @fulfilled }
369
+ end
370
+
371
+ def rejected?
372
+ @mutex.synchronize { @fulfilled && !@error.nil? }
206
373
  end
207
374
  end
208
375
  end
@@ -0,0 +1,75 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'graphql'
4
+
5
+ module Desiru
6
+ module GraphQL
7
+ # Handles GraphQL enum type generation
8
+ module EnumBuilder
9
+ extend self
10
+
11
+ def create_enum_type(field, type_cache, cache_mutex)
12
+ values = extract_literal_values(field)
13
+ cache_key = "Enum:#{field.name}:#{values.sort.join(',')}"
14
+
15
+ cache_mutex.synchronize do
16
+ return type_cache[cache_key] if type_cache[cache_key]
17
+ end
18
+
19
+ enum_name = "#{field.name.to_s.capitalize}Enum#{cache_key.hash.abs}"
20
+
21
+ enum_type = Class.new(::GraphQL::Schema::Enum) do
22
+ graphql_name enum_name
23
+ description "Enum for #{field.name}"
24
+
25
+ values.each do |val|
26
+ value val.upcase.gsub(/[^A-Z0-9_]/, '_'), value: val
27
+ end
28
+ end
29
+
30
+ cache_mutex.synchronize do
31
+ type_cache[cache_key] = enum_type
32
+ end
33
+
34
+ enum_type
35
+ end
36
+
37
+ def create_enum_from_values(values, type_cache, cache_mutex)
38
+ cache_key = "LiteralEnum:#{values.sort.join(',')}"
39
+
40
+ cache_mutex.synchronize do
41
+ return type_cache[cache_key] if type_cache[cache_key]
42
+ end
43
+
44
+ enum_name = "Literal#{cache_key.hash.abs}Enum"
45
+
46
+ enum_type = Class.new(::GraphQL::Schema::Enum) do
47
+ graphql_name enum_name
48
+
49
+ values.each do |val|
50
+ value val.upcase.gsub(/[^A-Z0-9_]/, '_'), value: val
51
+ end
52
+ end
53
+
54
+ cache_mutex.synchronize do
55
+ type_cache[cache_key] = enum_type
56
+ end
57
+
58
+ enum_type
59
+ end
60
+
61
+ private
62
+
63
+ def extract_literal_values(field)
64
+ # Try to extract values from the field's validator
65
+ if field.respond_to?(:validator) && field.validator.respond_to?(:instance_variable_get)
66
+ field.validator.instance_variable_get(:@values) || []
67
+ elsif field.respond_to?(:element_type) && field.element_type.is_a?(Hash)
68
+ field.element_type[:values] || []
69
+ else
70
+ []
71
+ end
72
+ end
73
+ end
74
+ end
75
+ end
@@ -53,20 +53,48 @@ module Desiru
53
53
  results
54
54
  end
55
55
 
56
+ # Execute with automatic lazy loading support
57
+ def execute_with_lazy_loading(query_string, variables: {}, context: {}, operation_name: nil)
58
+ context[:data_loader] = @data_loader
59
+
60
+ # Use GraphQL's built-in lazy execution
61
+ @schema.execute(
62
+ query_string,
63
+ variables: variables,
64
+ context: context,
65
+ operation_name: operation_name
66
+ ) do |schema_query|
67
+ # Configure lazy loading behavior
68
+ schema_query.after_lazy_resolve do |value|
69
+ # Trigger batch loading after each lazy resolution
70
+ @data_loader.perform_loads
71
+ value
72
+ end
73
+ end
74
+ end
75
+
56
76
  private
57
77
 
58
78
  def batch_execute
59
79
  # Start batch loading context
60
80
  @data_loader.clear! if @data_loader.respond_to?(:clear!)
61
81
 
62
- # Execute the GraphQL queries
82
+ # Execute the GraphQL queries with lazy loading support
63
83
  result = yield
64
84
 
65
- # Perform all pending batch loads
66
- @data_loader.perform_loads if @data_loader.respond_to?(:perform_loads)
85
+ # Always perform loads at least once to ensure batch processing
86
+ @data_loader.perform_loads
87
+
88
+ # Then perform any additional pending loads
89
+ @data_loader.perform_loads while pending_loads?
67
90
 
68
91
  result
69
92
  end
93
+
94
+ def pending_loads?
95
+ pending_loads = @data_loader.instance_variable_get(:@pending_loads)
96
+ pending_loads&.any? { |_, batch| !batch.empty? }
97
+ end
70
98
  end
71
99
 
72
100
  # GraphQL field extension for lazy loading
@@ -79,8 +107,13 @@ module Desiru
79
107
  if result.fulfilled?
80
108
  result.value
81
109
  else
82
- # Create a lazy resolver
110
+ # Create a lazy resolver that integrates with DataLoader
83
111
  ::GraphQL::Execution::Lazy.new do
112
+ data_loader = context[:data_loader]
113
+
114
+ # Ensure batch loads are performed before accessing value
115
+ data_loader.perform_loads if data_loader && !result.fulfilled?
116
+
84
117
  result.value
85
118
  end
86
119
  end