rails_pulse 0.1.2 → 0.1.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (83) hide show
  1. checksums.yaml +4 -4
  2. data/README.md +10 -4
  3. data/app/assets/images/rails_pulse/dashboard.png +0 -0
  4. data/app/assets/images/rails_pulse/request.png +0 -0
  5. data/app/assets/stylesheets/rails_pulse/application.css +28 -5
  6. data/app/assets/stylesheets/rails_pulse/components/badge.css +13 -0
  7. data/app/assets/stylesheets/rails_pulse/components/base.css +12 -2
  8. data/app/assets/stylesheets/rails_pulse/components/collapsible.css +30 -0
  9. data/app/assets/stylesheets/rails_pulse/components/popover.css +0 -1
  10. data/app/assets/stylesheets/rails_pulse/components/row.css +55 -3
  11. data/app/assets/stylesheets/rails_pulse/components/sidebar_menu.css +23 -0
  12. data/app/controllers/concerns/zoom_range_concern.rb +31 -0
  13. data/app/controllers/rails_pulse/application_controller.rb +5 -1
  14. data/app/controllers/rails_pulse/queries_controller.rb +46 -1
  15. data/app/controllers/rails_pulse/requests_controller.rb +14 -1
  16. data/app/controllers/rails_pulse/routes_controller.rb +40 -1
  17. data/app/helpers/rails_pulse/chart_helper.rb +15 -7
  18. data/app/javascript/rails_pulse/application.js +34 -3
  19. data/app/javascript/rails_pulse/controllers/collapsible_controller.js +32 -0
  20. data/app/javascript/rails_pulse/controllers/color_scheme_controller.js +2 -1
  21. data/app/javascript/rails_pulse/controllers/expandable_rows_controller.js +58 -0
  22. data/app/javascript/rails_pulse/controllers/index_controller.js +241 -11
  23. data/app/javascript/rails_pulse/controllers/popover_controller.js +28 -4
  24. data/app/javascript/rails_pulse/controllers/table_sort_controller.js +14 -0
  25. data/app/models/rails_pulse/queries/cards/average_query_times.rb +19 -19
  26. data/app/models/rails_pulse/queries/cards/execution_rate.rb +13 -8
  27. data/app/models/rails_pulse/queries/cards/percentile_query_times.rb +13 -8
  28. data/app/models/rails_pulse/query.rb +46 -0
  29. data/app/models/rails_pulse/routes/cards/average_response_times.rb +17 -19
  30. data/app/models/rails_pulse/routes/cards/error_rate_per_route.rb +13 -8
  31. data/app/models/rails_pulse/routes/cards/percentile_response_times.rb +13 -8
  32. data/app/models/rails_pulse/routes/cards/request_count_totals.rb +13 -8
  33. data/app/services/rails_pulse/analysis/backtrace_analyzer.rb +256 -0
  34. data/app/services/rails_pulse/analysis/base_analyzer.rb +67 -0
  35. data/app/services/rails_pulse/analysis/explain_plan_analyzer.rb +206 -0
  36. data/app/services/rails_pulse/analysis/index_recommendation_engine.rb +326 -0
  37. data/app/services/rails_pulse/analysis/n_plus_one_detector.rb +241 -0
  38. data/app/services/rails_pulse/analysis/query_characteristics_analyzer.rb +146 -0
  39. data/app/services/rails_pulse/analysis/suggestion_generator.rb +217 -0
  40. data/app/services/rails_pulse/query_analysis_service.rb +125 -0
  41. data/app/views/layouts/rails_pulse/_sidebar_menu.html.erb +0 -1
  42. data/app/views/layouts/rails_pulse/application.html.erb +0 -2
  43. data/app/views/rails_pulse/components/_breadcrumbs.html.erb +1 -1
  44. data/app/views/rails_pulse/components/_code_panel.html.erb +17 -3
  45. data/app/views/rails_pulse/components/_empty_state.html.erb +1 -1
  46. data/app/views/rails_pulse/components/_metric_card.html.erb +27 -4
  47. data/app/views/rails_pulse/components/_panel.html.erb +1 -1
  48. data/app/views/rails_pulse/components/_sparkline_stats.html.erb +5 -7
  49. data/app/views/rails_pulse/components/_table_head.html.erb +6 -1
  50. data/app/views/rails_pulse/dashboard/index.html.erb +1 -1
  51. data/app/views/rails_pulse/operations/show.html.erb +17 -15
  52. data/app/views/rails_pulse/queries/_analysis_error.html.erb +15 -0
  53. data/app/views/rails_pulse/queries/_analysis_prompt.html.erb +27 -0
  54. data/app/views/rails_pulse/queries/_analysis_results.html.erb +87 -0
  55. data/app/views/rails_pulse/queries/_analysis_section.html.erb +39 -0
  56. data/app/views/rails_pulse/queries/_show_table.html.erb +1 -1
  57. data/app/views/rails_pulse/queries/_table.html.erb +1 -1
  58. data/app/views/rails_pulse/queries/index.html.erb +48 -51
  59. data/app/views/rails_pulse/queries/show.html.erb +56 -52
  60. data/app/views/rails_pulse/requests/_operations.html.erb +30 -43
  61. data/app/views/rails_pulse/requests/_table.html.erb +3 -1
  62. data/app/views/rails_pulse/requests/index.html.erb +48 -51
  63. data/app/views/rails_pulse/routes/_table.html.erb +1 -1
  64. data/app/views/rails_pulse/routes/index.html.erb +49 -52
  65. data/app/views/rails_pulse/routes/show.html.erb +4 -4
  66. data/config/routes.rb +5 -1
  67. data/db/migrate/20250916031656_add_analysis_to_rails_pulse_queries.rb +13 -0
  68. data/db/rails_pulse_schema.rb +9 -0
  69. data/lib/generators/rails_pulse/convert_to_migrations_generator.rb +65 -0
  70. data/lib/generators/rails_pulse/install_generator.rb +71 -18
  71. data/lib/generators/rails_pulse/templates/migrations/install_rails_pulse_tables.rb +22 -0
  72. data/lib/generators/rails_pulse/templates/migrations/upgrade_rails_pulse_tables.rb +19 -0
  73. data/lib/generators/rails_pulse/upgrade_generator.rb +225 -0
  74. data/lib/rails_pulse/version.rb +1 -1
  75. data/lib/tasks/rails_pulse.rake +27 -8
  76. data/public/rails-pulse-assets/rails-pulse.css +1 -1
  77. data/public/rails-pulse-assets/rails-pulse.css.map +1 -1
  78. data/public/rails-pulse-assets/rails-pulse.js +53 -53
  79. data/public/rails-pulse-assets/rails-pulse.js.map +4 -4
  80. metadata +23 -5
  81. data/app/assets/images/rails_pulse/rails-pulse-logo.png +0 -0
  82. data/app/assets/images/rails_pulse/routes.png +0 -0
  83. data/app/javascript/rails_pulse/controllers/expandable_row_controller.js +0 -67
@@ -39,15 +39,20 @@ module RailsPulse
39
39
  trend_icon = percentage < 0.1 ? "move-right" : current_period_errors < previous_period_errors ? "trending-down" : "trending-up"
40
40
  trend_amount = previous_period_errors.zero? ? "0%" : "#{percentage}%"
41
41
 
42
- # Separate query for sparkline data - group by week using Rails
43
- sparkline_data = base_query
44
- .group_by_week(:period_start, time_zone: "UTC")
42
+ # Sparkline data by day with zero-filled days over the last 14 days
43
+ grouped_daily = base_query
44
+ .group_by_day(:period_start, time_zone: "UTC")
45
45
  .sum(:error_count)
46
- .each_with_object({}) do |(week_start, total_errors), hash|
47
- formatted_date = week_start.strftime("%b %-d")
48
- value = total_errors || 0
49
- hash[formatted_date] = { value: value }
50
- end
46
+
47
+ start_day = 2.weeks.ago.beginning_of_day.to_date
48
+ end_day = Time.current.to_date
49
+
50
+ sparkline_data = {}
51
+ (start_day..end_day).each do |day|
52
+ total = grouped_daily[day] || 0
53
+ label = day.strftime("%b %-d")
54
+ sparkline_data[label] = { value: total }
55
+ end
51
56
 
52
57
  {
53
58
  id: "error_rate_per_route",
@@ -33,15 +33,20 @@ module RailsPulse
33
33
  trend_icon = percentage < 0.1 ? "move-right" : current_period_p95 < previous_period_p95 ? "trending-down" : "trending-up"
34
34
  trend_amount = previous_period_p95.zero? ? "0%" : "#{percentage}%"
35
35
 
36
- # Separate query for sparkline data - group by week using Rails
37
- sparkline_data = base_query
38
- .group_by_week(:period_start, time_zone: "UTC")
36
+ # Sparkline data by day with zero-filled days over the last 14 days
37
+ grouped_daily = base_query
38
+ .group_by_day(:period_start, time_zone: "UTC")
39
39
  .average(:p95_duration)
40
- .each_with_object({}) do |(week_start, avg_p95), hash|
41
- formatted_date = week_start.strftime("%b %-d")
42
- value = (avg_p95 || 0).round(0)
43
- hash[formatted_date] = { value: value }
44
- end
40
+
41
+ start_day = 2.weeks.ago.beginning_of_day.to_date
42
+ end_day = Time.current.to_date
43
+
44
+ sparkline_data = {}
45
+ (start_day..end_day).each do |day|
46
+ avg = grouped_daily[day]&.round(0) || 0
47
+ label = day.strftime("%b %-d")
48
+ sparkline_data[label] = { value: avg }
49
+ end
45
50
 
46
51
  {
47
52
  id: "percentile_response_times",
@@ -33,15 +33,20 @@ module RailsPulse
33
33
  trend_icon = percentage < 0.1 ? "move-right" : current_period_count < previous_period_count ? "trending-down" : "trending-up"
34
34
  trend_amount = previous_period_count.zero? ? "0%" : "#{percentage}%"
35
35
 
36
- # Separate query for sparkline data - group by week using Rails
37
- sparkline_data = base_query
38
- .group_by_week(:period_start, time_zone: "UTC")
36
+ # Sparkline data by day with zero-filled days over the last 14 days
37
+ grouped_daily = base_query
38
+ .group_by_day(:period_start, time_zone: "UTC")
39
39
  .sum(:count)
40
- .each_with_object({}) do |(week_start, total_count), hash|
41
- formatted_date = week_start.strftime("%b %-d")
42
- value = total_count || 0
43
- hash[formatted_date] = { value: value }
44
- end
40
+
41
+ start_day = 2.weeks.ago.beginning_of_day.to_date
42
+ end_day = Time.current.to_date
43
+
44
+ sparkline_data = {}
45
+ (start_day..end_day).each do |day|
46
+ total = grouped_daily[day] || 0
47
+ label = day.strftime("%b %-d")
48
+ sparkline_data[label] = { value: total }
49
+ end
45
50
 
46
51
  # Calculate average requests per minute over 2-week period
47
52
  total_minutes = 2.weeks / 1.minute
@@ -0,0 +1,256 @@
1
+ # Analyzes execution backtraces to identify code hotspots and execution patterns.
2
+ # Tracks most common execution locations, controller/model usage, and framework layer distribution.
3
+ module RailsPulse
4
+ module Analysis
5
+ class BacktraceAnalyzer < BaseAnalyzer
6
+ def analyze
7
+ backtraces = extract_backtraces
8
+
9
+ {
10
+ total_executions: operations.count,
11
+ unique_locations: backtraces.uniq.count,
12
+ most_common_location: find_most_common_location(backtraces),
13
+ potential_n_plus_one: detect_simple_n_plus_one_pattern,
14
+ execution_frequency: calculate_execution_frequency,
15
+ location_distribution: calculate_location_distribution(backtraces),
16
+ code_hotspots: identify_code_hotspots(backtraces),
17
+ execution_contexts: analyze_execution_contexts(backtraces)
18
+ }
19
+ end
20
+
21
+ private
22
+
23
+ def extract_backtraces
24
+ operations.filter_map(&:codebase_location).compact
25
+ end
26
+
27
+ def find_most_common_location(backtraces)
28
+ return nil if backtraces.empty?
29
+
30
+ frequency = backtraces.tally
31
+ most_common = frequency.max_by { |_, count| count }
32
+
33
+ return nil unless most_common
34
+
35
+ {
36
+ location: most_common[0],
37
+ count: most_common[1],
38
+ percentage: (most_common[1].to_f / backtraces.length * 100).round(1)
39
+ }
40
+ end
41
+
42
+ def detect_simple_n_plus_one_pattern
43
+ # Simple N+1 detection: many operations with same query in short time
44
+ time_window = 1.minute
45
+ groups = operations.group_by { |op| op.occurred_at.beginning_of_minute }
46
+
47
+ suspicious_groups = groups.select { |_, ops| ops.count > 10 }
48
+
49
+ {
50
+ detected: suspicious_groups.any?,
51
+ suspicious_periods: suspicious_groups.map do |time, ops|
52
+ {
53
+ period: time.strftime("%Y-%m-%d %H:%M"),
54
+ count: ops.count,
55
+ avg_duration: ops.sum(&:duration) / ops.count
56
+ }
57
+ end
58
+ }
59
+ end
60
+
61
+ def calculate_execution_frequency
62
+ return 0 if operations.empty? || operations.count < 2
63
+
64
+ time_span = operations.last.occurred_at - operations.first.occurred_at
65
+ return operations.count if time_span <= 0
66
+
67
+ (operations.count / time_span.in_hours).round(2)
68
+ end
69
+
70
+ def calculate_location_distribution(backtraces)
71
+ return {} if backtraces.empty?
72
+
73
+ total = backtraces.length
74
+ distribution = backtraces.tally.transform_values { |count| (count.to_f / total * 100).round(1) }
75
+
76
+ # Sort by frequency and return top locations
77
+ distribution.sort_by { |_, percentage| -percentage }.first(10).to_h
78
+ end
79
+
80
+ def identify_code_hotspots(backtraces)
81
+ return [] if backtraces.empty?
82
+
83
+ # Group by file/method to identify hotspots
84
+ hotspots = []
85
+
86
+ # Group by controller actions
87
+ controller_hotspots = group_by_controller_actions(backtraces)
88
+ hotspots.concat(controller_hotspots)
89
+
90
+ # Group by model methods
91
+ model_hotspots = group_by_model_methods(backtraces)
92
+ hotspots.concat(model_hotspots)
93
+
94
+ # Group by file
95
+ file_hotspots = group_by_files(backtraces)
96
+ hotspots.concat(file_hotspots)
97
+
98
+ # Sort by frequency and return top hotspots
99
+ hotspots.sort_by { |hotspot| -hotspot[:count] }.first(10)
100
+ end
101
+
102
+ def group_by_controller_actions(backtraces)
103
+ controller_traces = backtraces.select { |trace| trace.include?("app/controllers/") }
104
+
105
+ controller_actions = controller_traces.filter_map do |trace|
106
+ match = trace.match(%r{app/controllers/(.+?)\.rb.*in `(.+?)'})
107
+ next unless match
108
+
109
+ controller = match[1].gsub("_controller", "").humanize
110
+ action = match[2]
111
+ "#{controller}##{action}"
112
+ end
113
+
114
+ build_hotspot_data(controller_actions, "controller_action")
115
+ end
116
+
117
+ def group_by_model_methods(backtraces)
118
+ model_traces = backtraces.select { |trace| trace.include?("app/models/") }
119
+
120
+ model_methods = model_traces.filter_map do |trace|
121
+ match = trace.match(%r{app/models/(.+?)\.rb.*in `(.+?)'})
122
+ next unless match
123
+
124
+ model = match[1].classify
125
+ method = match[2]
126
+ "#{model}.#{method}"
127
+ end
128
+
129
+ build_hotspot_data(model_methods, "model_method")
130
+ end
131
+
132
+ def group_by_files(backtraces)
133
+ files = backtraces.filter_map do |trace|
134
+ match = trace.match(%r{(app/[^:]+)})
135
+ match[1] if match
136
+ end
137
+
138
+ build_hotspot_data(files, "file")
139
+ end
140
+
141
+ def build_hotspot_data(items, type)
142
+ return [] if items.empty?
143
+
144
+ item_counts = items.tally
145
+ total_operations = operations.count
146
+
147
+ item_counts.map do |item, count|
148
+ {
149
+ type: type,
150
+ location: item,
151
+ count: count,
152
+ percentage: (count.to_f / total_operations * 100).round(1),
153
+ operations_per_execution: (count.to_f / item_counts.values.sum * total_operations).round(2)
154
+ }
155
+ end
156
+ end
157
+
158
+ def analyze_execution_contexts(backtraces)
159
+ return {} if backtraces.empty?
160
+
161
+ contexts = {
162
+ framework_layers: analyze_framework_layers(backtraces),
163
+ application_layers: analyze_application_layers(backtraces),
164
+ gem_usage: analyze_gem_usage(backtraces),
165
+ database_access_patterns: analyze_database_access_patterns(backtraces)
166
+ }
167
+
168
+ contexts
169
+ end
170
+
171
+ def analyze_framework_layers(backtraces)
172
+ layers = {
173
+ controller: backtraces.count { |trace| trace.include?("app/controllers/") },
174
+ model: backtraces.count { |trace| trace.include?("app/models/") },
175
+ view: backtraces.count { |trace| trace.include?("app/views/") },
176
+ service: backtraces.count { |trace| trace.include?("app/services/") },
177
+ job: backtraces.count { |trace| trace.include?("app/jobs/") },
178
+ rails_framework: backtraces.count { |trace| trace.include?("railties") || trace.include?("actionpack") },
179
+ activerecord: backtraces.count { |trace| trace.include?("activerecord") }
180
+ }
181
+
182
+ total = backtraces.count
183
+ layers.transform_values { |count| { count: count, percentage: (count.to_f / total * 100).round(1) } }
184
+ end
185
+
186
+ def analyze_application_layers(backtraces)
187
+ app_traces = backtraces.select { |trace| trace.include?("app/") }
188
+
189
+ layers = {}
190
+ app_traces.each do |trace|
191
+ layer = extract_app_layer(trace)
192
+ layers[layer] ||= 0
193
+ layers[layer] += 1
194
+ end
195
+
196
+ total = app_traces.count
197
+ layers.transform_values { |count| { count: count, percentage: (count.to_f / total * 100).round(1) } }
198
+ end
199
+
200
+ def extract_app_layer(trace)
201
+ case trace
202
+ when /app\/controllers/ then :controllers
203
+ when /app\/models/ then :models
204
+ when /app\/services/ then :services
205
+ when /app\/jobs/ then :jobs
206
+ when /app\/mailers/ then :mailers
207
+ when /app\/helpers/ then :helpers
208
+ when /app\/views/ then :views
209
+ when /app\/lib/ then :lib
210
+ else :other
211
+ end
212
+ end
213
+
214
+ def analyze_gem_usage(backtraces)
215
+ gem_traces = backtraces.reject { |trace| trace.include?("app/") || trace.include?("config/") }
216
+
217
+ gems = gem_traces.filter_map do |trace|
218
+ # Extract gem name from path like "/gems/gem_name-version/lib/..."
219
+ match = trace.match(%r{/gems/([^/]+)/})
220
+ match[1].split("-").first if match
221
+ end
222
+
223
+ gem_counts = gems.tally
224
+ total = gem_traces.count
225
+
226
+ gem_counts.transform_values { |count| { count: count, percentage: (count.to_f / total * 100).round(1) } }
227
+ .sort_by { |_, data| -data[:count] }
228
+ .first(5)
229
+ .to_h
230
+ end
231
+
232
+ def analyze_database_access_patterns(backtraces)
233
+ db_traces = backtraces.select { |trace|
234
+ trace.include?("activerecord") ||
235
+ trace.include?("execute_query") ||
236
+ trace.include?("adapter")
237
+ }
238
+
239
+ {
240
+ total_db_operations: db_traces.count,
241
+ percentage_db_operations: (db_traces.count.to_f / backtraces.count * 100).round(1),
242
+ common_db_methods: extract_common_db_methods(db_traces)
243
+ }
244
+ end
245
+
246
+ def extract_common_db_methods(db_traces)
247
+ methods = db_traces.filter_map do |trace|
248
+ match = trace.match(/in `(.+?)'/)
249
+ match[1] if match
250
+ end
251
+
252
+ methods.tally.sort_by { |_, count| -count }.first(5).to_h
253
+ end
254
+ end
255
+ end
256
+ end
@@ -0,0 +1,67 @@
1
+ # Base class providing common utilities for all query analyzers.
2
+ # Handles database adapter detection, SQL parsing, and normalization.
3
+ module RailsPulse
4
+ module Analysis
5
+ class BaseAnalyzer
6
+ attr_reader :query, :operations
7
+
8
+ def initialize(query, operations = [])
9
+ @query = query
10
+ @operations = Array(operations)
11
+ end
12
+
13
+ # Each analyzer must implement this method
14
+ def analyze
15
+ raise NotImplementedError, "#{self.class} must implement #analyze"
16
+ end
17
+
18
+ protected
19
+
20
+ def sql
21
+ @sql ||= query.normalized_sql
22
+ end
23
+
24
+ def recent_operations
25
+ @recent_operations ||= operations.select { |op| op.occurred_at > 48.hours.ago }
26
+ end
27
+
28
+ # Utility method for database adapter detection
29
+ def database_adapter
30
+ @database_adapter ||= RailsPulse::ApplicationRecord.connection.adapter_name.downcase
31
+ end
32
+
33
+ def postgresql?
34
+ database_adapter == "postgresql"
35
+ end
36
+
37
+ def mysql?
38
+ database_adapter.in?([ "mysql", "mysql2" ])
39
+ end
40
+
41
+ def sqlite?
42
+ database_adapter == "sqlite"
43
+ end
44
+
45
+ # Common SQL parsing utilities
46
+ def extract_main_table(sql_string = sql)
47
+ match = sql_string.match(/FROM\s+(\w+)/i)
48
+ match ? match[1] : nil
49
+ end
50
+
51
+ def extract_where_clause(sql_string = sql)
52
+ match = sql_string.match(/WHERE\s+(.+?)(?:\s+ORDER\s+BY|\s+GROUP\s+BY|\s+LIMIT|\s*$)/i)
53
+ match ? match[1] : nil
54
+ end
55
+
56
+ def normalize_sql_for_pattern_detection(sql_string)
57
+ return "" unless sql_string.present?
58
+
59
+ sql_string.gsub(/\d+/, "?") # Replace numbers with placeholders
60
+ .gsub(/'[^']*'/, "?") # Replace strings with placeholders
61
+ .gsub(/\s+/, " ") # Normalize whitespace
62
+ .strip
63
+ .downcase
64
+ end
65
+ end
66
+ end
67
+ end
@@ -0,0 +1,206 @@
1
+ # Executes database EXPLAIN commands and analyzes query execution plans.
2
+ # Detects sequential scans, temporary tables, high-cost operations, and database-specific performance issues.
3
+ module RailsPulse
4
+ module Analysis
5
+ class ExplainPlanAnalyzer < BaseAnalyzer
6
+ EXPLAIN_TIMEOUT = 5.seconds
7
+
8
+ def analyze
9
+ return { explain_plan: nil, issues: [] } if recent_operations.empty?
10
+
11
+ actual_sql = recent_operations.first.label
12
+ explain_plan = generate_explain_plan(actual_sql)
13
+
14
+ {
15
+ explain_plan: explain_plan,
16
+ issues: detect_explain_issues(explain_plan)
17
+ }
18
+ end
19
+
20
+ private
21
+
22
+ def generate_explain_plan(sql)
23
+ return nil unless sql.present?
24
+
25
+ # Skip EXPLAIN queries in test environment to avoid transaction issues
26
+ return nil if Rails.env.test?
27
+
28
+ begin
29
+ sanitized_sql = sanitize_sql_for_explain(sql)
30
+
31
+ Timeout.timeout(EXPLAIN_TIMEOUT) do
32
+ case database_adapter
33
+ when "postgresql"
34
+ execute_postgres_explain(sanitized_sql)
35
+ when "mysql", "mysql2"
36
+ execute_mysql_explain(sanitized_sql)
37
+ when "sqlite"
38
+ execute_sqlite_explain(sanitized_sql)
39
+ else
40
+ nil
41
+ end
42
+ end
43
+ rescue => e
44
+ Rails.logger.warn("[ExplainPlanAnalyzer] EXPLAIN failed for query #{query.id}: #{e.message}")
45
+ nil
46
+ end
47
+ end
48
+
49
+ def detect_explain_issues(explain_plan)
50
+ return [] unless explain_plan.present?
51
+
52
+ issues = []
53
+
54
+ # Look for common issues in EXPLAIN output
55
+ if sequential_scan?(explain_plan)
56
+ issues << {
57
+ type: "sequential_scan",
58
+ severity: "warning",
59
+ description: "Query performs sequential/table scan",
60
+ impact: "Poor performance on large tables"
61
+ }
62
+ end
63
+
64
+ if temporary_operations?(explain_plan)
65
+ issues << {
66
+ type: "temporary_table",
67
+ severity: "warning",
68
+ description: "Query uses temporary tables or filesort",
69
+ impact: "Increased memory usage and processing time"
70
+ }
71
+ end
72
+
73
+ # Database-specific analysis
74
+ case database_adapter
75
+ when "postgresql"
76
+ issues.concat(analyze_postgres_specific_issues(explain_plan))
77
+ when "mysql", "mysql2"
78
+ issues.concat(analyze_mysql_specific_issues(explain_plan))
79
+ when "sqlite"
80
+ issues.concat(analyze_sqlite_specific_issues(explain_plan))
81
+ end
82
+
83
+ issues
84
+ end
85
+
86
+ def sequential_scan?(explain_plan)
87
+ explain_plan.downcase.include?("seq scan") ||
88
+ explain_plan.downcase.include?("table scan") ||
89
+ explain_plan.downcase.include?("full table scan")
90
+ end
91
+
92
+ def temporary_operations?(explain_plan)
93
+ explain_plan.downcase.include?("temporary") ||
94
+ explain_plan.downcase.include?("filesort") ||
95
+ explain_plan.downcase.include?("using temporary")
96
+ end
97
+
98
+ def analyze_postgres_specific_issues(explain_plan)
99
+ issues = []
100
+
101
+ # High cost operations
102
+ if explain_plan.match(/cost=(\d+\.\d+)\.\.(\d+\.\d+)/)
103
+ total_cost = $2.to_f
104
+ if total_cost > 1000
105
+ issues << {
106
+ type: "high_cost_operation",
107
+ severity: "warning",
108
+ description: "Query has high execution cost (#{total_cost.round(2)})",
109
+ impact: "May indicate need for optimization or indexing"
110
+ }
111
+ end
112
+ end
113
+
114
+ # Hash joins on large datasets
115
+ if explain_plan.include?("Hash Join") && explain_plan.match(/rows=(\d+)/)
116
+ rows = $1.to_i
117
+ if rows > 10000
118
+ issues << {
119
+ type: "large_hash_join",
120
+ severity: "info",
121
+ description: "Hash join on large dataset (#{rows} rows)",
122
+ impact: "High memory usage during query execution"
123
+ }
124
+ end
125
+ end
126
+
127
+ issues
128
+ end
129
+
130
+ def analyze_mysql_specific_issues(explain_plan)
131
+ issues = []
132
+
133
+ # Using where with no index
134
+ if explain_plan.include?("Using where") && !explain_plan.include?("Using index")
135
+ issues << {
136
+ type: "where_without_index",
137
+ severity: "warning",
138
+ description: "WHERE clause not using index efficiently",
139
+ impact: "Slower query execution due to row-by-row filtering"
140
+ }
141
+ end
142
+
143
+ # Full table scan with large row count
144
+ if explain_plan.match(/type: ALL.*rows: (\d+)/)
145
+ rows = $1.to_i
146
+ if rows > 1000
147
+ issues << {
148
+ type: "full_scan_large_table",
149
+ severity: "warning",
150
+ description: "Full table scan on table with #{rows} rows",
151
+ impact: "Very slow query execution on large dataset"
152
+ }
153
+ end
154
+ end
155
+
156
+ issues
157
+ end
158
+
159
+ def analyze_sqlite_specific_issues(explain_plan)
160
+ issues = []
161
+
162
+ # SCAN TABLE operations
163
+ if explain_plan.include?("SCAN TABLE")
164
+ issues << {
165
+ type: "table_scan",
166
+ severity: "warning",
167
+ description: "SQLite performing table scan",
168
+ impact: "Linear search through all table rows"
169
+ }
170
+ end
171
+
172
+ # Missing index usage
173
+ if explain_plan.include?("USING INDEX") == false && explain_plan.include?("WHERE")
174
+ issues << {
175
+ type: "no_index_usage",
176
+ severity: "info",
177
+ description: "Query not utilizing available indexes",
178
+ impact: "Potential for optimization with proper indexing"
179
+ }
180
+ end
181
+
182
+ issues
183
+ end
184
+
185
+ def sanitize_sql_for_explain(sql)
186
+ # Basic sanitization for EXPLAIN
187
+ sql.strip.gsub(/;+\s*$/, "")
188
+ end
189
+
190
+ def execute_postgres_explain(sql)
191
+ result = RailsPulse::ApplicationRecord.connection.execute("EXPLAIN (ANALYZE, BUFFERS) #{sql}")
192
+ result.values.flatten.join("\n")
193
+ end
194
+
195
+ def execute_mysql_explain(sql)
196
+ result = RailsPulse::ApplicationRecord.connection.execute("EXPLAIN #{sql}")
197
+ result.to_a.map { |row| row.values.join(" | ") }.join("\n")
198
+ end
199
+
200
+ def execute_sqlite_explain(sql)
201
+ result = RailsPulse::ApplicationRecord.connection.execute("EXPLAIN QUERY PLAN #{sql}")
202
+ result.map { |row| row.values.join(" | ") }.join("\n")
203
+ end
204
+ end
205
+ end
206
+ end