ruby_llm-agents 3.7.2 → 3.8.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (38) hide show
  1. checksums.yaml +4 -4
  2. data/app/controllers/ruby_llm/agents/agents_controller.rb +14 -141
  3. data/app/controllers/ruby_llm/agents/dashboard_controller.rb +12 -166
  4. data/app/controllers/ruby_llm/agents/executions_controller.rb +1 -1
  5. data/app/helpers/ruby_llm/agents/application_helper.rb +38 -0
  6. data/app/models/ruby_llm/agents/execution/analytics.rb +302 -103
  7. data/app/models/ruby_llm/agents/execution.rb +76 -54
  8. data/app/models/ruby_llm/agents/execution_detail.rb +2 -0
  9. data/app/models/ruby_llm/agents/tenant.rb +39 -0
  10. data/app/services/ruby_llm/agents/agent_registry.rb +98 -0
  11. data/app/views/ruby_llm/agents/executions/_list.html.erb +3 -17
  12. data/lib/generators/ruby_llm_agents/templates/add_dashboard_performance_indexes_migration.rb.tt +23 -0
  13. data/lib/generators/ruby_llm_agents/templates/migration.rb.tt +3 -0
  14. data/lib/generators/ruby_llm_agents/upgrade_generator.rb +25 -0
  15. data/lib/ruby_llm/agents/base_agent.rb +7 -1
  16. data/lib/ruby_llm/agents/core/configuration.rb +1 -0
  17. data/lib/ruby_llm/agents/core/instrumentation.rb +15 -19
  18. data/lib/ruby_llm/agents/core/version.rb +1 -1
  19. data/lib/ruby_llm/agents/infrastructure/alert_manager.rb +4 -4
  20. data/lib/ruby_llm/agents/infrastructure/budget_tracker.rb +19 -11
  21. data/lib/ruby_llm/agents/pipeline/builder.rb +8 -4
  22. data/lib/ruby_llm/agents/pipeline/context.rb +43 -1
  23. data/lib/ruby_llm/agents/pipeline/middleware/budget.rb +6 -4
  24. data/lib/ruby_llm/agents/pipeline/middleware/cache.rb +6 -4
  25. data/lib/ruby_llm/agents/pipeline/middleware/instrumentation.rb +26 -75
  26. data/lib/ruby_llm/agents/pipeline/middleware/reliability.rb +6 -6
  27. data/lib/ruby_llm/agents/pipeline/middleware/tenant.rb +23 -27
  28. data/lib/ruby_llm/agents/providers/inception/capabilities.rb +107 -0
  29. data/lib/ruby_llm/agents/providers/inception/chat.rb +17 -0
  30. data/lib/ruby_llm/agents/providers/inception/configuration.rb +9 -0
  31. data/lib/ruby_llm/agents/providers/inception/models.rb +38 -0
  32. data/lib/ruby_llm/agents/providers/inception/registry.rb +45 -0
  33. data/lib/ruby_llm/agents/providers/inception.rb +50 -0
  34. data/lib/ruby_llm/agents/results/base.rb +4 -2
  35. data/lib/ruby_llm/agents/results/image_analysis_result.rb +4 -2
  36. data/lib/ruby_llm/agents/text/embedder.rb +4 -0
  37. data/lib/ruby_llm/agents.rb +4 -0
  38. metadata +8 -1
@@ -152,7 +152,7 @@ module RubyLLM
152
152
  private
153
153
 
154
154
  # Builds hourly chart data for last 24 hours
155
- # Optimized: Single GROUP BY query instead of 72 individual queries
155
+ # Optimized: Single SQL GROUP BY with conditional aggregation
156
156
  # Database-agnostic: works with both PostgreSQL and SQLite
157
157
  #
158
158
  # @param offset_days [Integer, nil] Optional offset for comparison data
@@ -161,12 +161,9 @@ module RubyLLM
161
161
  reference_time = (Time.current - offset).beginning_of_hour
162
162
  start_time = reference_time - 23.hours
163
163
 
164
- # Use database-agnostic aggregation with Ruby post-processing
165
- results = where(created_at: start_time..(reference_time + 1.hour))
166
- .select(:status, :total_cost, :duration_ms, :input_tokens, :output_tokens, :created_at)
167
- .group_by { |r| r.created_at.beginning_of_hour }
164
+ scope = where(created_at: start_time..(reference_time + 1.hour))
165
+ results = aggregated_chart_query(scope, granularity: :hour)
168
166
 
169
- # Build arrays for all 24 hours (fill missing with zeros)
170
167
  success_data = []
171
168
  failed_data = []
172
169
  cost_data = []
@@ -181,29 +178,23 @@ module RubyLLM
181
178
 
182
179
  23.downto(0).each do |hours_ago|
183
180
  bucket_time = (reference_time - hours_ago.hours).beginning_of_hour
184
- rows = results[bucket_time] || []
185
-
186
- s = rows.count { |r| r.status == "success" }
187
- f = rows.count { |r| r.status.in?(%w[error timeout]) }
188
- c = rows.sum { |r| r.total_cost.to_f }
189
- t = rows.sum { |r| (r.input_tokens || 0) + (r.output_tokens || 0) }
190
-
191
- # Average duration for this bucket
192
- duration_rows = rows.select { |r| r.duration_ms.to_i > 0 }
193
- d = duration_rows.any? ? (duration_rows.sum { |r| r.duration_ms.to_i } / duration_rows.count) : 0
194
-
195
- success_data << s
196
- failed_data << f
197
- cost_data << c.round(4)
198
- duration_data << d.round
199
- tokens_data << t
200
-
201
- total_success += s
202
- total_failed += f
203
- total_cost += c
204
- total_tokens += t
205
- total_duration_sum += duration_rows.sum { |r| r.duration_ms.to_i }
206
- total_duration_count += duration_rows.count
181
+ key = bucket_time.strftime("%Y-%m-%d %H:00:00")
182
+ row = results[key] || {success: 0, failed: 0, cost: 0.0, duration: 0, tokens: 0}
183
+
184
+ success_data << row[:success]
185
+ failed_data << row[:failed]
186
+ cost_data << row[:cost].round(4)
187
+ duration_data << row[:duration]
188
+ tokens_data << row[:tokens]
189
+
190
+ total_success += row[:success]
191
+ total_failed += row[:failed]
192
+ total_cost += row[:cost]
193
+ total_tokens += row[:tokens]
194
+ if row[:duration] > 0
195
+ total_duration_sum += row[:duration]
196
+ total_duration_count += 1
197
+ end
207
198
  end
208
199
 
209
200
  avg_duration_ms = (total_duration_count > 0) ? (total_duration_sum / total_duration_count).round : 0
@@ -228,7 +219,7 @@ module RubyLLM
228
219
  end
229
220
 
230
221
  # Builds daily chart data for specified number of days
231
- # Optimized: Single query instead of 3*days individual queries
222
+ # Optimized: Single SQL GROUP BY with conditional aggregation
232
223
  # Database-agnostic: works with both PostgreSQL and SQLite
233
224
  #
234
225
  # @param days [Integer] Number of days to include
@@ -238,12 +229,9 @@ module RubyLLM
238
229
  end_date = Date.current - offset.days
239
230
  start_date = end_date - (days - 1).days
240
231
 
241
- # Use database-agnostic aggregation with Ruby post-processing
242
- results = where(created_at: start_date.beginning_of_day..end_date.end_of_day)
243
- .select(:status, :total_cost, :duration_ms, :input_tokens, :output_tokens, :created_at)
244
- .group_by { |r| r.created_at.to_date }
232
+ scope = where(created_at: start_date.beginning_of_day..end_date.end_of_day)
233
+ results = aggregated_chart_query(scope, granularity: :day)
245
234
 
246
- # Build arrays for all days (fill missing with zeros)
247
235
  success_data = []
248
236
  failed_data = []
249
237
  cost_data = []
@@ -258,29 +246,23 @@ module RubyLLM
258
246
 
259
247
  (days - 1).downto(0).each do |i|
260
248
  date = end_date - i.days
261
- rows = results[date] || []
262
-
263
- s = rows.count { |r| r.status == "success" }
264
- f = rows.count { |r| r.status.in?(%w[error timeout]) }
265
- c = rows.sum { |r| r.total_cost.to_f }
266
- t = rows.sum { |r| (r.input_tokens || 0) + (r.output_tokens || 0) }
267
-
268
- # Average duration for this bucket
269
- duration_rows = rows.select { |r| r.duration_ms.to_i > 0 }
270
- d = duration_rows.any? ? (duration_rows.sum { |r| r.duration_ms.to_i } / duration_rows.count) : 0
271
-
272
- success_data << s
273
- failed_data << f
274
- cost_data << c.round(4)
275
- duration_data << d.round
276
- tokens_data << t
277
-
278
- total_success += s
279
- total_failed += f
280
- total_cost += c
281
- total_tokens += t
282
- total_duration_sum += duration_rows.sum { |r| r.duration_ms.to_i }
283
- total_duration_count += duration_rows.count
249
+ key = date.to_s
250
+ row = results[key] || {success: 0, failed: 0, cost: 0.0, duration: 0, tokens: 0}
251
+
252
+ success_data << row[:success]
253
+ failed_data << row[:failed]
254
+ cost_data << row[:cost].round(4)
255
+ duration_data << row[:duration]
256
+ tokens_data << row[:tokens]
257
+
258
+ total_success += row[:success]
259
+ total_failed += row[:failed]
260
+ total_cost += row[:cost]
261
+ total_tokens += row[:tokens]
262
+ if row[:duration] > 0
263
+ total_duration_sum += row[:duration]
264
+ total_duration_count += 1
265
+ end
284
266
  end
285
267
 
286
268
  avg_duration_ms = (total_duration_count > 0) ? (total_duration_sum / total_duration_count).round : 0
@@ -306,6 +288,7 @@ module RubyLLM
306
288
  end
307
289
 
308
290
  # Builds daily chart data for a custom date range
291
+ # Optimized: Single SQL GROUP BY with conditional aggregation
309
292
  # Database-agnostic: works with both PostgreSQL and SQLite
310
293
  #
311
294
  # @param from_date [Date] Start date (inclusive)
@@ -314,12 +297,9 @@ module RubyLLM
314
297
  def build_daily_chart_data_for_dates(from_date, to_date)
315
298
  days = (to_date - from_date).to_i + 1
316
299
 
317
- # Use database-agnostic aggregation with Ruby post-processing
318
- results = where(created_at: from_date.beginning_of_day..to_date.end_of_day)
319
- .select(:status, :total_cost, :duration_ms, :input_tokens, :output_tokens, :created_at)
320
- .group_by { |r| r.created_at.to_date }
300
+ scope = where(created_at: from_date.beginning_of_day..to_date.end_of_day)
301
+ results = aggregated_chart_query(scope, granularity: :day)
321
302
 
322
- # Build arrays for all days (fill missing with zeros)
323
303
  success_data = []
324
304
  failed_data = []
325
305
  cost_data = []
@@ -334,29 +314,23 @@ module RubyLLM
334
314
 
335
315
  (0...days).each do |i|
336
316
  date = from_date + i.days
337
- rows = results[date] || []
338
-
339
- s = rows.count { |r| r.status == "success" }
340
- f = rows.count { |r| r.status.in?(%w[error timeout]) }
341
- c = rows.sum { |r| r.total_cost.to_f }
342
- t = rows.sum { |r| (r.input_tokens || 0) + (r.output_tokens || 0) }
343
-
344
- # Average duration for this bucket
345
- duration_rows = rows.select { |r| r.duration_ms.to_i > 0 }
346
- d = duration_rows.any? ? (duration_rows.sum { |r| r.duration_ms.to_i } / duration_rows.count) : 0
347
-
348
- success_data << s
349
- failed_data << f
350
- cost_data << c.round(4)
351
- duration_data << d.round
352
- tokens_data << t
353
-
354
- total_success += s
355
- total_failed += f
356
- total_cost += c
357
- total_tokens += t
358
- total_duration_sum += duration_rows.sum { |r| r.duration_ms.to_i }
359
- total_duration_count += duration_rows.count
317
+ key = date.to_s
318
+ row = results[key] || {success: 0, failed: 0, cost: 0.0, duration: 0, tokens: 0}
319
+
320
+ success_data << row[:success]
321
+ failed_data << row[:failed]
322
+ cost_data << row[:cost].round(4)
323
+ duration_data << row[:duration]
324
+ tokens_data << row[:tokens]
325
+
326
+ total_success += row[:success]
327
+ total_failed += row[:failed]
328
+ total_cost += row[:cost]
329
+ total_tokens += row[:tokens]
330
+ if row[:duration] > 0
331
+ total_duration_sum += row[:duration]
332
+ total_duration_count += 1
333
+ end
360
334
  end
361
335
 
362
336
  avg_duration_ms = (total_duration_count > 0) ? (total_duration_sum / total_duration_count).round : 0
@@ -387,25 +361,28 @@ module RubyLLM
387
361
 
388
362
  # Builds the hourly activity data structure
389
363
  # Shows the last 24 hours with current hour on the right
364
+ # Optimized: Single SQL GROUP BY instead of 48 individual queries
390
365
  #
391
366
  # @return [Array<Hash>] Success and failed series data
392
367
  # @api private
393
368
  def build_hourly_activity_data
369
+ reference_time = Time.current.beginning_of_hour
370
+ start_time = reference_time - 23.hours
371
+
372
+ scope = where(created_at: start_time..(reference_time + 1.hour))
373
+ results = aggregated_chart_query(scope, granularity: :hour)
374
+
394
375
  success_data = {}
395
376
  failed_data = {}
396
377
 
397
- # Use current time as reference so chart shows "now" on the right
398
- reference_time = Time.current.beginning_of_hour
399
-
400
- # Create entries for the last 24 hours ending at current hour
401
378
  23.downto(0).each do |hours_ago|
402
- start_time = reference_time - hours_ago.hours
403
- end_time = start_time + 1.hour
404
- time_label = start_time.in_time_zone.strftime("%H:%M")
379
+ bucket_time = (reference_time - hours_ago.hours).beginning_of_hour
380
+ time_label = bucket_time.in_time_zone.strftime("%H:%M")
381
+ key = bucket_time.strftime("%Y-%m-%d %H:00:00")
382
+ row = results[key] || {success: 0, failed: 0}
405
383
 
406
- hour_scope = where(created_at: start_time...end_time)
407
- success_data[time_label] = hour_scope.successful.count
408
- failed_data[time_label] = hour_scope.failed.count
384
+ success_data[time_label] = row[:success]
385
+ failed_data[time_label] = row[:failed]
409
386
  end
410
387
 
411
388
  [
@@ -428,22 +405,38 @@ module RubyLLM
428
405
  end
429
406
 
430
407
  # Builds the hourly cost data structure (uncached)
408
+ # Optimized: Single SQL GROUP BY instead of 48 individual queries
431
409
  #
432
410
  # @return [Array<Hash>] Input and output cost series data
433
411
  # @api private
434
412
  def build_hourly_cost_data
413
+ day_start = Time.current.beginning_of_day
414
+ bucket = date_bucket_sql(:hour)
415
+
416
+ rows = where(created_at: day_start..(day_start + 24.hours))
417
+ .select(
418
+ Arel.sql("#{bucket} AS bucket"),
419
+ Arel.sql("SUM(COALESCE(input_cost, 0)) AS sum_input_cost"),
420
+ Arel.sql("SUM(COALESCE(output_cost, 0)) AS sum_output_cost")
421
+ )
422
+ .group(Arel.sql("bucket"))
423
+
424
+ cost_by_hour = rows.each_with_object({}) do |row, hash|
425
+ hash[row["bucket"].to_s] = {
426
+ input: row["sum_input_cost"].to_f.round(6),
427
+ output: row["sum_output_cost"].to_f.round(6)
428
+ }
429
+ end
430
+
435
431
  input_cost_data = {}
436
432
  output_cost_data = {}
437
433
 
438
- # Create entries for each hour of the day (0-23)
439
434
  (0..23).each do |hour|
440
435
  time_label = format("%02d:00", hour)
441
- start_time = Time.current.beginning_of_day + hour.hours
442
- end_time = start_time + 1.hour
443
-
444
- hour_scope = where(created_at: start_time...end_time)
445
- input_cost_data[time_label] = (hour_scope.sum(:input_cost) || 0).round(6)
446
- output_cost_data[time_label] = (hour_scope.sum(:output_cost) || 0).round(6)
436
+ key = (day_start + hour.hours).strftime("%Y-%m-%d %H:00:00")
437
+ row = cost_by_hour[key] || {input: 0, output: 0}
438
+ input_cost_data[time_label] = row[:input]
439
+ output_cost_data[time_label] = row[:output]
447
440
  end
448
441
 
449
442
  [
@@ -514,6 +507,141 @@ module RubyLLM
514
507
  (rate_limited_count.to_f / total * 100).round(1)
515
508
  end
516
509
 
510
+ # Builds per-model statistics for model comparison
511
+ # Optimized: Single SQL GROUP BY with conditional aggregation
512
+ #
513
+ # @param scope [ActiveRecord::Relation] Pre-filtered scope
514
+ # @return [Array<Hash>] Model stats sorted by total cost descending
515
+ def model_stats(scope: all)
516
+ rows = scope.where.not(model_id: nil)
517
+ .select(
518
+ :model_id,
519
+ Arel.sql("COUNT(*) AS exec_count"),
520
+ Arel.sql("COALESCE(SUM(total_cost), 0) AS sum_cost"),
521
+ Arel.sql("COALESCE(SUM(total_tokens), 0) AS sum_tokens"),
522
+ Arel.sql("AVG(duration_ms) AS avg_dur"),
523
+ Arel.sql("SUM(CASE WHEN status = 'success' THEN 1 ELSE 0 END) AS success_cnt")
524
+ )
525
+ .group(:model_id)
526
+
527
+ total_cost = rows.sum { |r| r["sum_cost"].to_f }
528
+
529
+ rows.map do |row|
530
+ count = row["exec_count"].to_i
531
+ model_cost = row["sum_cost"].to_f
532
+ model_tokens = row["sum_tokens"].to_i
533
+ successful = row["success_cnt"].to_i
534
+
535
+ {
536
+ model_id: row.model_id,
537
+ executions: count,
538
+ total_cost: model_cost,
539
+ total_tokens: model_tokens,
540
+ avg_duration_ms: row["avg_dur"].to_i,
541
+ success_rate: (count > 0) ? (successful.to_f / count * 100).round(1) : 0,
542
+ cost_per_1k_tokens: (model_tokens > 0) ? (model_cost / model_tokens * 1000).round(4) : 0,
543
+ cost_percentage: (total_cost > 0) ? (model_cost / total_cost * 100).round(1) : 0
544
+ }
545
+ end.sort_by { |m| -(m[:total_cost] || 0) }
546
+ end
547
+
548
+ # Builds top errors list from error executions
549
+ #
550
+ # @param scope [ActiveRecord::Relation] Pre-filtered scope
551
+ # @param limit [Integer] Max errors to return
552
+ # @return [Array<Hash>] Top error classes with counts
553
+ def top_errors(scope: all, limit: 5)
554
+ error_scope = scope.where(status: "error")
555
+ total_errors = error_scope.count
556
+
557
+ error_scope.group(:error_class)
558
+ .select("error_class, COUNT(*) as count, MAX(created_at) as last_seen")
559
+ .order("count DESC")
560
+ .limit(limit)
561
+ .map do |row|
562
+ {
563
+ error_class: row.error_class || "Unknown Error",
564
+ count: row.count,
565
+ percentage: (total_errors > 0) ? (row.count.to_f / total_errors * 100).round(1) : 0,
566
+ last_seen: row.last_seen
567
+ }
568
+ end
569
+ end
570
+
571
+ # Builds cache savings statistics
572
+ # Optimized: Single SQL query with conditional aggregation
573
+ #
574
+ # @param scope [ActiveRecord::Relation] Pre-filtered scope
575
+ # @return [Hash] Cache savings data
576
+ def cache_savings(scope: all)
577
+ cond = cache_hit_condition
578
+ total_count, cache_count, cache_cost = scope.pick(
579
+ Arel.sql("COUNT(*)"),
580
+ Arel.sql("SUM(CASE WHEN #{cond} THEN 1 ELSE 0 END)"),
581
+ Arel.sql("COALESCE(SUM(CASE WHEN #{cond} THEN total_cost ELSE 0 END), 0)")
582
+ )
583
+
584
+ total_count = total_count.to_i
585
+ cache_count = cache_count.to_i
586
+
587
+ return {count: 0, estimated_savings: 0, hit_rate: 0, total_executions: 0} if total_count.zero?
588
+
589
+ {
590
+ count: cache_count,
591
+ estimated_savings: cache_cost.to_f,
592
+ hit_rate: (cache_count.to_f / total_count * 100).round(1),
593
+ total_executions: total_count
594
+ }
595
+ end
596
+
597
+ # Batch fetches execution stats grouped by agent type
598
+ # Optimized: Single SQL GROUP BY with conditional aggregation
599
+ #
600
+ # @param scope [ActiveRecord::Relation] Pre-filtered scope
601
+ # @return [Hash<String, Hash>] Agent type => stats hash
602
+ def batch_agent_stats(scope: all)
603
+ rows = scope.select(
604
+ :agent_type,
605
+ Arel.sql("COUNT(*) AS exec_count"),
606
+ Arel.sql("COALESCE(SUM(total_cost), 0) AS sum_cost"),
607
+ Arel.sql("AVG(duration_ms) AS avg_dur"),
608
+ Arel.sql("SUM(CASE WHEN status = 'success' THEN 1 ELSE 0 END) AS success_cnt")
609
+ ).group(:agent_type)
610
+
611
+ rows.each_with_object({}) do |row, hash|
612
+ count = row["exec_count"].to_i
613
+ total_cost = row["sum_cost"].to_f
614
+ successful = row["success_cnt"].to_i
615
+
616
+ hash[row.agent_type] = {
617
+ count: count,
618
+ total_cost: total_cost,
619
+ avg_cost: (count > 0) ? (total_cost / count).round(6) : 0,
620
+ avg_duration_ms: row["avg_dur"].to_i,
621
+ success_rate: (count > 0) ? (successful.to_f / count * 100).round(1) : 0
622
+ }
623
+ end
624
+ end
625
+
626
+ # Cached daily statistics for dashboard
627
+ #
628
+ # @return [Hash] Daily stats with totals and rates
629
+ def dashboard_daily_stats
630
+ Rails.cache.fetch("ruby_llm_agents/daily_stats/#{Date.current}", expires_in: 1.minute) do
631
+ scope = today
632
+ total = scope.count
633
+ {
634
+ total_executions: total,
635
+ successful: scope.successful.count,
636
+ failed: scope.failed.count,
637
+ total_cost: scope.total_cost_sum || 0,
638
+ total_tokens: scope.total_tokens_sum || 0,
639
+ avg_duration_ms: scope.avg_duration&.round || 0,
640
+ success_rate: (total > 0) ? (scope.successful.count.to_f / total * 100).round(1) : 0.0
641
+ }
642
+ end
643
+ end
644
+
517
645
  private
518
646
 
519
647
  # Calculates success rate percentage for a scope
@@ -563,6 +691,77 @@ module RubyLLM
563
691
  return 0.0 if old_value.nil? || old_value.zero?
564
692
  ((new_value - old_value).to_f / old_value * 100).round(2)
565
693
  end
694
+
695
+ # Returns a SQL expression for date/time bucketing
696
+ #
697
+ # Database-agnostic: uses strftime for SQLite, date_trunc for PostgreSQL.
698
+ #
699
+ # @param granularity [Symbol] :hour or :day
700
+ # @return [Arel::Nodes::SqlLiteral] SQL fragment for SELECT/GROUP BY
701
+ def date_bucket_sql(granularity)
702
+ col = "#{table_name}.created_at"
703
+
704
+ if connection.adapter_name.downcase.include?("sqlite")
705
+ case granularity
706
+ when :hour then Arel.sql("strftime('%Y-%m-%d %H:00:00', #{col})")
707
+ when :day then Arel.sql("strftime('%Y-%m-%d', #{col})")
708
+ else raise ArgumentError, "Unknown granularity: #{granularity}"
709
+ end
710
+ else
711
+ case granularity
712
+ when :hour then Arel.sql("to_char(date_trunc('hour', #{col}), 'YYYY-MM-DD HH24:00:00')")
713
+ when :day then Arel.sql("to_char(#{col}::date, 'YYYY-MM-DD')")
714
+ else raise ArgumentError, "Unknown granularity: #{granularity}"
715
+ end
716
+ end
717
+ end
718
+
719
+ # Runs a single aggregated query for chart data using SQL GROUP BY
720
+ #
721
+ # Replaces loading all records into Ruby memory. One SQL query returns
722
+ # pre-aggregated metrics per time bucket.
723
+ #
724
+ # @param scope [ActiveRecord::Relation] Pre-filtered scope with time range
725
+ # @param granularity [Symbol] :hour or :day
726
+ # @return [Hash{String => Hash}] Bucket key => {success:, failed:, cost:, duration:, tokens:}
727
+ def aggregated_chart_query(scope, granularity:)
728
+ bucket = date_bucket_sql(granularity)
729
+
730
+ rows = scope
731
+ .select(
732
+ Arel.sql("#{bucket} AS bucket"),
733
+ Arel.sql("SUM(CASE WHEN status = 'success' THEN 1 ELSE 0 END) AS success_count"),
734
+ Arel.sql("SUM(CASE WHEN status IN ('error','timeout') THEN 1 ELSE 0 END) AS failed_count"),
735
+ Arel.sql("SUM(COALESCE(total_cost, 0)) AS sum_cost"),
736
+ Arel.sql("AVG(CASE WHEN duration_ms > 0 THEN duration_ms ELSE NULL END) AS avg_dur"),
737
+ Arel.sql("SUM(COALESCE(input_tokens, 0) + COALESCE(output_tokens, 0)) AS sum_tokens")
738
+ )
739
+ .group(Arel.sql("bucket"))
740
+ .order(Arel.sql("bucket"))
741
+
742
+ rows.each_with_object({}) do |row, hash|
743
+ hash[row["bucket"].to_s] = {
744
+ success: row["success_count"].to_i,
745
+ failed: row["failed_count"].to_i,
746
+ cost: row["sum_cost"].to_f,
747
+ duration: row["avg_dur"].to_i,
748
+ tokens: row["sum_tokens"].to_i
749
+ }
750
+ end
751
+ end
752
+
753
+ # SQL condition for boolean cache_hit column
754
+ #
755
+ # SQLite stores booleans as 1/0, PostgreSQL as TRUE/FALSE.
756
+ #
757
+ # @return [String] SQL condition fragment
758
+ def cache_hit_condition
759
+ if connection.adapter_name.downcase.include?("sqlite")
760
+ "cache_hit = 1"
761
+ else
762
+ "cache_hit = TRUE"
763
+ end
764
+ end
566
765
  end
567
766
  end
568
767
  end