dbviewer 0.5.1 → 0.5.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (40) hide show
  1. checksums.yaml +4 -4
  2. data/README.md +156 -1
  3. data/app/controllers/concerns/dbviewer/database_operations.rb +11 -19
  4. data/app/controllers/dbviewer/api/entity_relationship_diagrams_controller.rb +84 -0
  5. data/app/controllers/dbviewer/api/queries_controller.rb +1 -1
  6. data/app/controllers/dbviewer/entity_relationship_diagrams_controller.rb +5 -6
  7. data/app/controllers/dbviewer/logs_controller.rb +1 -1
  8. data/app/controllers/dbviewer/tables_controller.rb +2 -8
  9. data/app/helpers/dbviewer/application_helper.rb +1 -1
  10. data/app/views/dbviewer/entity_relationship_diagrams/index.html.erb +217 -100
  11. data/app/views/dbviewer/tables/show.html.erb +278 -404
  12. data/config/routes.rb +7 -0
  13. data/lib/dbviewer/database/cache_manager.rb +78 -0
  14. data/lib/dbviewer/database/dynamic_model_factory.rb +62 -0
  15. data/lib/dbviewer/database/manager.rb +204 -0
  16. data/lib/dbviewer/database/metadata_manager.rb +129 -0
  17. data/lib/dbviewer/datatable/query_operations.rb +330 -0
  18. data/lib/dbviewer/datatable/query_params.rb +41 -0
  19. data/lib/dbviewer/engine.rb +11 -8
  20. data/lib/dbviewer/query/analyzer.rb +250 -0
  21. data/lib/dbviewer/query/collection.rb +39 -0
  22. data/lib/dbviewer/query/executor.rb +93 -0
  23. data/lib/dbviewer/query/logger.rb +108 -0
  24. data/lib/dbviewer/query/parser.rb +56 -0
  25. data/lib/dbviewer/storage/file_storage.rb +0 -3
  26. data/lib/dbviewer/version.rb +1 -1
  27. data/lib/dbviewer.rb +24 -7
  28. metadata +14 -14
  29. data/lib/dbviewer/cache_manager.rb +0 -78
  30. data/lib/dbviewer/database_manager.rb +0 -249
  31. data/lib/dbviewer/dynamic_model_factory.rb +0 -60
  32. data/lib/dbviewer/error_handler.rb +0 -18
  33. data/lib/dbviewer/logger.rb +0 -76
  34. data/lib/dbviewer/query_analyzer.rb +0 -239
  35. data/lib/dbviewer/query_collection.rb +0 -37
  36. data/lib/dbviewer/query_executor.rb +0 -91
  37. data/lib/dbviewer/query_parser.rb +0 -72
  38. data/lib/dbviewer/table_metadata_manager.rb +0 -136
  39. data/lib/dbviewer/table_query_operations.rb +0 -621
  40. data/lib/dbviewer/table_query_params.rb +0 -39
@@ -0,0 +1,330 @@
1
+ module Dbviewer
2
+ module Datatable
3
+ # QueryOperations handles CRUD operations and data querying for database tables
4
+ # It provides methods to fetch, filter and manipulate data in tables
5
+ class QueryOperations
6
+ attr_reader :connection, :adapter_name
7
+
8
+ # Initialize with dependencies
9
+ # @param connection [ActiveRecord::ConnectionAdapters::AbstractAdapter] The database connection
10
+ # @param dynamic_model_factory [Dbviewer::Database::DynamicModelFactory] Factory for creating dynamic AR models
11
+ # @param query_executor [Dbviewer::Query::Executor] Executor for raw SQL queries
12
+ # @param table_metadata_manager [Dbviewer::Database::MetadataManager] Manager for table metadata
13
+ def initialize(connection, dynamic_model_factory, query_executor, table_metadata_manager)
14
+ @connection = connection
15
+ @adapter_name = connection.adapter_name.downcase
16
+ @dynamic_model_factory = dynamic_model_factory
17
+ @query_executor = query_executor
18
+ @table_metadata_manager = table_metadata_manager
19
+ @query_analyzer = ::Dbviewer::Query::Analyzer.new(connection)
20
+ end
21
+
22
+ # Get the number of columns in a table
23
+ # @param table_name [String] Name of the table
24
+ # @return [Integer] Number of columns
25
+ def column_count(table_name)
26
+ table_columns(table_name).size
27
+ end
28
+
29
+ # Get records from a table with pagination and sorting
30
+ # @param table_name [String] Name of the table
31
+ # @param params [Dbviewer::Datatable::QueryParams] Query parameters object
32
+ # @return [ActiveRecord::Result] Result set with columns and rows
33
+ def table_records(table_name, params)
34
+ model = get_model_for(table_name)
35
+ query = model.all
36
+
37
+ # Apply column filters if provided
38
+ query = apply_column_filters(query, table_name, params.column_filters)
39
+
40
+ # Apply sorting if provided
41
+ if params.order_by.present? && column_exists?(table_name, params.order_by)
42
+ query = query.order("#{connection.quote_column_name(params.order_by)} #{params.direction}")
43
+ end
44
+
45
+ # Apply pagination
46
+ records = query.limit(params.per_page).offset((params.page - 1) * params.per_page)
47
+
48
+ # Get column names for consistent ordering
49
+ column_names = table_columns(table_name).map { |c| c[:name] }
50
+
51
+ # Format results
52
+ @query_executor.to_result_set(records, column_names)
53
+ rescue => e
54
+ Rails.logger.error("[DBViewer] Error executing table query: #{e.message}")
55
+ raise e
56
+ end
57
+
58
+ # Get the total count of records in a table
59
+ # @param table_name [String] Name of the table
60
+ # @return [Integer] Number of records
61
+ def table_count(table_name)
62
+ get_model_for(table_name).count
63
+ rescue => e
64
+ Rails.logger.error("[DBViewer] Error counting records in table #{table_name}: #{e.message}")
65
+ 0
66
+ end
67
+
68
+ # Get the number of records in a table (alias for table_count)
69
+ # @param table_name [String] Name of the table
70
+ # @return [Integer] Number of records
71
+ def record_count(table_name)
72
+ table_count(table_name)
73
+ end
74
+
75
+ # Get the number of records in a table with filters applied
76
+ # @param table_name [String] Name of the table
77
+ # @param column_filters [Hash] Hash of column_name => filter_value for filtering
78
+ # @return [Integer] Number of filtered records
79
+ def filtered_record_count(table_name, column_filters = {})
80
+ return table_count(table_name) unless column_filters.present?
81
+
82
+ model = get_model_for(table_name)
83
+ query = model.all
84
+
85
+ # Apply filters in the same way as table_records
86
+ query = apply_column_filters(query, table_name, column_filters)
87
+ query.count
88
+ rescue => e
89
+ Rails.logger.error("[DBViewer] Error counting filtered records in table #{table_name}: #{e.message}")
90
+ 0
91
+ end
92
+
93
+ # Get column histogram/value distribution data for a specific column
94
+ # @param table_name [String] Name of the table
95
+ # @param column_name [String] Name of the column
96
+ # @param limit [Integer] Maximum number of distinct values to return
97
+ # @return [Array<Hash>] Array of value distribution data with labels and counts
98
+ def fetch_column_distribution(table_name, column_name, limit = 20)
99
+ return [] unless column_exists?(table_name, column_name)
100
+
101
+ query = "SELECT #{column_name} as label, COUNT(*) as count FROM #{table_name}
102
+ WHERE #{column_name} IS NOT NULL
103
+ GROUP BY #{column_name}
104
+ ORDER BY count DESC LIMIT #{limit}"
105
+
106
+ begin
107
+ result = @connection.execute(query)
108
+ adapter = @connection.adapter_name.downcase
109
+
110
+ # Format depends on adapter
111
+ if adapter =~ /mysql/
112
+ result.to_a.map { |row| { label: row[0], value: row[1] } }
113
+ elsif adapter =~ /sqlite/
114
+ result.map { |row| { label: row["label"], value: row["count"] } }
115
+ else # postgresql
116
+ result.map { |row| { label: row["label"], value: row["count"] } }
117
+ end
118
+ rescue => e
119
+ Rails.logger.error("Error fetching column distribution: #{e.message}")
120
+ []
121
+ end
122
+ end
123
+
124
+ # Get timestamp aggregation data for charts
125
+ # @param table_name [String] Name of the table
126
+ # @param grouping [String] Grouping type (hourly, daily, weekly, monthly)
127
+ # @param column [String] Timestamp column name (defaults to created_at)
128
+ # @return [Array<Hash>] Array of timestamp data with labels and counts
129
+ def fetch_timestamp_data(table_name, grouping = "daily", column = "created_at")
130
+ return [] unless column_exists?(table_name, column)
131
+
132
+ adapter = @connection.adapter_name.downcase
133
+
134
+ date_format = case grouping
135
+ when "hourly"
136
+ if adapter =~ /mysql/
137
+ "DATE_FORMAT(#{column}, '%Y-%m-%d %H:00')"
138
+ elsif adapter =~ /sqlite/
139
+ "strftime('%Y-%m-%d %H:00', #{column})"
140
+ else # postgresql
141
+ "TO_CHAR(#{column}, 'YYYY-MM-DD HH24:00')"
142
+ end
143
+ when "weekly"
144
+ if adapter =~ /mysql/
145
+ "DATE_FORMAT(#{column}, '%Y-%v')"
146
+ elsif adapter =~ /sqlite/
147
+ "strftime('%Y-%W', #{column})"
148
+ else # postgresql
149
+ "TO_CHAR(#{column}, 'YYYY-IW')"
150
+ end
151
+ when "monthly"
152
+ if adapter =~ /mysql/
153
+ "DATE_FORMAT(#{column}, '%Y-%m')"
154
+ elsif adapter =~ /sqlite/
155
+ "strftime('%Y-%m', #{column})"
156
+ else # postgresql
157
+ "TO_CHAR(#{column}, 'YYYY-MM')"
158
+ end
159
+ else # daily is default
160
+ if adapter =~ /mysql/
161
+ "DATE(#{column})"
162
+ elsif adapter =~ /sqlite/
163
+ "date(#{column})"
164
+ else # postgresql
165
+ "DATE(#{column})"
166
+ end
167
+ end
168
+
169
+ # Query works the same for all database adapters
170
+ query = "SELECT #{date_format} as label, COUNT(*) as count FROM #{table_name}
171
+ WHERE #{column} IS NOT NULL
172
+ GROUP BY label
173
+ ORDER BY MIN(#{column}) DESC LIMIT 30"
174
+
175
+ begin
176
+ result = @connection.execute(query)
177
+
178
+ # Format depends on adapter
179
+ if adapter =~ /mysql/
180
+ result.to_a.map { |row| { label: row[0], value: row[1] } }
181
+ elsif adapter =~ /sqlite/
182
+ result.map { |row| { label: row["label"], value: row["count"] } }
183
+ else # postgresql
184
+ result.map { |row| { label: row["label"], value: row["count"] } }
185
+ end
186
+ rescue => e
187
+ Rails.logger.error("Error fetching timestamp data: #{e.message}")
188
+ []
189
+ end
190
+ end
191
+
192
+ # Execute a raw SQL query after validating for safety
193
+ # @param sql [String] SQL query to execute
194
+ # @return [ActiveRecord::Result] Result set with columns and rows
195
+ # @raise [StandardError] If the query is invalid or unsafe
196
+ def execute_query(sql)
197
+ @query_executor.execute_query(sql)
198
+ end
199
+
200
+ # Execute a SQLite PRAGMA command without adding a LIMIT clause
201
+ # @param pragma [String] PRAGMA command to execute (without the "PRAGMA" keyword)
202
+ # @return [ActiveRecord::Result] Result set with the PRAGMA value
203
+ # @raise [StandardError] If the query is invalid or cannot be executed
204
+ def execute_sqlite_pragma(pragma)
205
+ @query_executor.execute_sqlite_pragma(pragma)
206
+ end
207
+
208
+ # Analyze query patterns and return performance recommendations
209
+ # @param table_name [String] Name of the table
210
+ # @param query_params [Dbviewer::Datatable::QueryParams] Query parameters
211
+ # @return [Hash] Analysis results
212
+ def analyze_query(table_name, query_params)
213
+ @query_analyzer.analyze_query(table_name, query_params)
214
+ end
215
+
216
+ private
217
+
218
+ # Apply column filters to a query
219
+ # @param query [ActiveRecord::Relation] The query to apply filters to
220
+ # @param table_name [String] Name of the table
221
+ # @param column_filters [Hash] Hash of column_name => filter_value pairs
222
+ # @return [ActiveRecord::Relation] The filtered query
223
+ def apply_column_filters(query, table_name, column_filters)
224
+ return query unless column_filters.present?
225
+
226
+ # Create a copy of column_filters to modify without affecting the original
227
+ filters = column_filters.dup
228
+
229
+ # First check if we have a datetime range filter for created_at
230
+ if filters["created_at"].present? &&
231
+ filters["created_at_end"].present? &&
232
+ column_exists?(table_name, "created_at")
233
+
234
+ # Handle datetime range for created_at
235
+ begin
236
+ start_datetime = Time.parse(filters["created_at"].to_s)
237
+ end_datetime = Time.parse(filters["created_at_end"].to_s)
238
+
239
+ # Make sure end_datetime is at the end of the day/minute if it doesn't have time component
240
+ if end_datetime.to_s.match(/00:00:00/)
241
+ end_datetime = end_datetime.end_of_day
242
+ end
243
+
244
+ Rails.logger.info("[DBViewer] Applying date range filter on #{table_name}.created_at: #{start_datetime} to #{end_datetime}")
245
+
246
+ # Use qualified column name for tables with schema
247
+ column_name = "#{table_name}.created_at"
248
+
249
+ # Different databases may require different SQL for datetime comparison
250
+ adapter_name = connection.adapter_name.downcase
251
+
252
+ if adapter_name =~ /mysql/
253
+ query = query.where("#{column_name} BETWEEN ? AND ?", start_datetime, end_datetime)
254
+ elsif adapter_name =~ /sqlite/
255
+ # SQLite needs special handling for datetime format
256
+ query = query.where("datetime(#{column_name}) BETWEEN datetime(?) AND datetime(?)", start_datetime, end_datetime)
257
+ else # postgresql
258
+ query = query.where("#{column_name} >= ? AND #{column_name} <= ?", start_datetime, end_datetime)
259
+ end
260
+
261
+ # Remove these from filters as they're handled above
262
+ filters.delete("created_at")
263
+ filters.delete("created_at_end")
264
+
265
+ rescue => e
266
+ Rails.logger.error("[DBViewer] Error parsing date range: #{e.message}")
267
+ # Remove the problematic filters and continue
268
+ filters.delete("created_at")
269
+ filters.delete("created_at_end")
270
+ end
271
+ end
272
+
273
+ # Apply remaining simple column filters
274
+ filters.each do |column, value|
275
+ next unless column_exists?(table_name, column)
276
+ next if value.blank?
277
+
278
+ column_sym = column.to_sym
279
+ Rails.logger.debug("[DBViewer] Applying filter: #{column} = #{value}")
280
+
281
+ # Handle different types of filtering
282
+ if value.to_s.include?("%") || value.to_s.include?("*")
283
+ # Pattern matching (LIKE operation)
284
+ pattern = value.to_s.gsub("*", "%")
285
+ query = query.where("#{column} LIKE ?", pattern)
286
+ elsif value.to_s.start_with?(">=", "<=", ">", "<", "!=")
287
+ # Comparison operators
288
+ operator = value.to_s.match(/^(>=|<=|>|<|!=)/)[1]
289
+ comparison_value = value.to_s.gsub(/^(>=|<=|>|<|!=)\s*/, "")
290
+
291
+ case operator
292
+ when ">="
293
+ query = query.where("#{column} >= ?", comparison_value)
294
+ when "<="
295
+ query = query.where("#{column} <= ?", comparison_value)
296
+ when ">"
297
+ query = query.where("#{column} > ?", comparison_value)
298
+ when "<"
299
+ query = query.where("#{column} < ?", comparison_value)
300
+ when "!="
301
+ query = query.where("#{column} != ?", comparison_value)
302
+ end
303
+ else
304
+ # Exact match
305
+ query = query.where(column_sym => value)
306
+ end
307
+ end
308
+
309
+ query
310
+ end
311
+
312
+ # Helper methods delegated to managers
313
+ def get_model_for(table_name)
314
+ @dynamic_model_factory.get_model_for(table_name)
315
+ end
316
+
317
+ def table_columns(table_name)
318
+ @table_metadata_manager.table_columns(table_name)
319
+ end
320
+
321
+ def column_exists?(table_name, column_name)
322
+ @table_metadata_manager.column_exists?(table_name, column_name)
323
+ end
324
+
325
+ def primary_key(table_name)
326
+ @table_metadata_manager.primary_key(table_name)
327
+ end
328
+ end
329
+ end
330
+ end
@@ -0,0 +1,41 @@
1
+ module Dbviewer
2
+ module Datatable
3
+ # QueryParams encapsulates parameters for table querying operations
4
+ class QueryParams
5
+ attr_reader :page, :order_by, :direction, :per_page, :column_filters, :max_records
6
+
7
+ # Initialize query parameters with defaults
8
+ # @param page [Integer] Page number (1-based)
9
+ # @param order_by [String, nil] Column to sort by
10
+ # @param direction [String] Sort direction ('ASC' or 'DESC')
11
+ # @param per_page [Integer, nil] Number of records per page
12
+ # @param column_filters [Hash, nil] Hash of column filters
13
+ # @param max_records [Integer] Maximum number of records to fetch
14
+ def initialize(
15
+ page: 1,
16
+ order_by: nil,
17
+ direction: "ASC",
18
+ per_page: nil,
19
+ column_filters: nil,
20
+ max_records: 1000
21
+ )
22
+ @page = [ 1, page.to_i ].max
23
+ @order_by = order_by
24
+ @direction = normalize_direction(direction)
25
+ @per_page = normalize_per_page(per_page || 25, max_records)
26
+ @column_filters = column_filters || {}
27
+ @max_records = max_records
28
+ end
29
+
30
+ private
31
+
32
+ def normalize_direction(dir)
33
+ %w[ASC DESC].include?(dir.to_s.upcase) ? dir.to_s.upcase : "ASC"
34
+ end
35
+
36
+ def normalize_per_page(per_page_value, max)
37
+ [ per_page_value.to_i, max ].min
38
+ end
39
+ end
40
+ end
41
+ end
@@ -2,9 +2,10 @@ module Dbviewer
2
2
  class Engine < ::Rails::Engine
3
3
  isolate_namespace Dbviewer
4
4
 
5
- # Ensure lib directory is in the autoload path
6
- config.autoload_paths << File.expand_path("../../", __FILE__)
7
- config.eager_load_paths << File.expand_path("../../", __FILE__)
5
+ # Autoload lib directory
6
+ lib_path = File.expand_path("..", __dir__)
7
+ config.autoload_paths << lib_path
8
+ config.eager_load_paths << lib_path
8
9
 
9
10
  # Register generators
10
11
  config.app_generators do |g|
@@ -18,12 +19,14 @@ module Dbviewer
18
19
  end
19
20
 
20
21
  initializer "dbviewer.notifications" do
22
+ return unless Rails.env.development?
23
+
21
24
  ActiveSupport::Notifications.subscribe("sql.active_record") do |*args|
22
25
  event = ActiveSupport::Notifications::Event.new(*args)
23
26
 
24
27
  next if skip_internal_query?(event)
25
28
 
26
- Logger.instance.add(event)
29
+ Dbviewer::Query::Logger.instance.add(event)
27
30
  end
28
31
  end
29
32
 
@@ -33,10 +36,10 @@ module Dbviewer
33
36
  caller_locations = caller_locations(1)
34
37
  return false unless caller_locations
35
38
 
36
- # Look for dbviewer in the call stack
37
- caller_locations.any? { |l| l.path.include?("dbviewer") }
38
- rescue
39
- false
39
+ excluded_caller_locations = caller_locations.filter do |caller_location|
40
+ !caller_location.path.include?("lib/dbviewer/engine.rb")
41
+ end
42
+ excluded_caller_locations.any? { |l| l.path.include?("dbviewer") }
40
43
  end
41
44
  end
42
45
  end
@@ -0,0 +1,250 @@
1
+ module Dbviewer
2
+ module Query
3
+ # Analyzer handles analysis of query patterns and statistics
4
+ class Analyzer
5
+ # Calculate statistics for a collection of queries
6
+ def self.generate_stats(queries)
7
+ {
8
+ total_count: queries.size,
9
+ total_duration_ms: queries.sum { |q| q[:duration_ms] },
10
+ avg_duration_ms: calculate_average_duration(queries),
11
+ max_duration_ms: queries.map { |q| q[:duration_ms] }.max || 0,
12
+ tables_queried: extract_queried_tables(queries),
13
+ potential_n_plus_1: detect_potential_n_plus_1(queries),
14
+ slowest_queries: get_slowest_queries(queries)
15
+ }.merge(calculate_request_stats(queries))
16
+ end
17
+
18
+ # Instance methods for query analysis
19
+ attr_reader :connection, :adapter_name
20
+
21
+ # Initialize the analyzer for instance methods
22
+ # @param connection [ActiveRecord::ConnectionAdapters::AbstractAdapter] Database connection
23
+ def initialize(connection)
24
+ @connection = connection
25
+ @adapter_name = connection.adapter_name.downcase
26
+ end
27
+
28
+ # Check if a table has an index on a column
29
+ # @param table_name [String] Name of the table
30
+ # @param column_name [String] Name of the column
31
+ # @return [Boolean] True if column has an index, false otherwise
32
+ def has_index_on?(table_name, column_name)
33
+ indexes = connection.indexes(table_name)
34
+ indexes.any? do |index|
35
+ index.columns.include?(column_name) ||
36
+ (index.columns.length == 1 && index.columns[0] == column_name)
37
+ end
38
+ end
39
+
40
+ # Analyze a query and provide performance statistics and recommendations
41
+ # @param table_name [String] Name of the table
42
+ # @param query_params [Dbviewer::Datatable::QueryParams] Query parameters with filters
43
+ # @return [Hash] Analysis results with statistics and recommendations
44
+ def analyze_query(table_name, query_params)
45
+ results = {
46
+ table: table_name,
47
+ filters: query_params.column_filters.keys,
48
+ analysis: [],
49
+ recommendations: []
50
+ }
51
+
52
+ # Check created_at filter performance
53
+ if query_params.column_filters.key?("created_at")
54
+ analyze_timestamp_query(table_name, "created_at", results)
55
+ end
56
+
57
+ # Check updated_at filter performance
58
+ if query_params.column_filters.key?("updated_at")
59
+ analyze_timestamp_query(table_name, "updated_at", results)
60
+ end
61
+
62
+ # Check other filters for potential indexes
63
+ query_params.column_filters.each do |column_name, _value|
64
+ next if [ "created_at", "updated_at" ].include?(column_name)
65
+
66
+ unless has_index_on?(table_name, column_name)
67
+ results[:recommendations] << {
68
+ type: "missing_index",
69
+ message: "Consider adding an index on '#{column_name}' for faster filtering",
70
+ sql: index_creation_sql(table_name, column_name)
71
+ }
72
+ end
73
+ end
74
+
75
+ # Add database-specific recommendations
76
+ add_database_specific_recommendations(results)
77
+
78
+ results
79
+ end
80
+
81
+ private
82
+
83
+ # Analyze timestamp-based queries
84
+ def analyze_timestamp_query(table_name, column_name, results)
85
+ # Check if column exists
86
+ unless connection.column_exists?(table_name, column_name)
87
+ results[:analysis] << "Column '#{column_name}' not found in table '#{table_name}'"
88
+ return
89
+ end
90
+
91
+ # Check if there's an index on the timestamp column
92
+ unless has_index_on?(table_name, column_name)
93
+ results[:recommendations] << {
94
+ type: "missing_index",
95
+ message: "Consider adding an index on '#{column_name}' for faster filtering",
96
+ sql: index_creation_sql(table_name, column_name)
97
+ }
98
+ end
99
+
100
+ # Estimate data distribution if possible
101
+ if adapter_supports_statistics?(adapter_name)
102
+ add_data_distribution_stats(table_name, column_name, results)
103
+ end
104
+ rescue => e
105
+ results[:analysis] << "Error analyzing timestamp query: #{e.message}"
106
+ end
107
+
108
+ # Check if adapter supports statistics gathering
109
+ def adapter_supports_statistics?(adapter_name)
110
+ adapter_name.include?("postgresql")
111
+ end
112
+
113
+ # Add data distribution statistics
114
+ def add_data_distribution_stats(table_name, column_name, results)
115
+ case adapter_name
116
+ when /postgresql/
117
+ begin
118
+ # Get approximate date range and distribution
119
+ range_query = "SELECT min(#{column_name}), max(#{column_name}) FROM #{table_name}"
120
+ range_result = connection.execute(range_query).first
121
+
122
+ if range_result["min"] && range_result["max"]
123
+ min_date = range_result["min"]
124
+ max_date = range_result["max"]
125
+
126
+ results[:analysis] << {
127
+ type: "date_range",
128
+ min_date: min_date,
129
+ max_date: max_date,
130
+ span_days: ((Time.parse(max_date) - Time.parse(min_date)) / 86400).round
131
+ }
132
+ end
133
+ rescue => e
134
+ results[:analysis] << "Error getting date distribution: #{e.message}"
135
+ end
136
+ end
137
+ end
138
+
139
+ # Generate SQL for index creation
140
+ def index_creation_sql(table_name, column_name)
141
+ index_name = "index_#{table_name.gsub('.', '_')}_on_#{column_name}"
142
+ "CREATE INDEX #{index_name} ON #{table_name} (#{column_name})"
143
+ end
144
+
145
+ # Add database-specific recommendations
146
+ def add_database_specific_recommendations(results)
147
+ case adapter_name
148
+ when /mysql/
149
+ results[:recommendations] << {
150
+ type: "performance",
151
+ message: "For MySQL, consider optimizing the query with appropriate indexes and use EXPLAIN to verify query plan"
152
+ }
153
+ when /postgresql/
154
+ results[:recommendations] << {
155
+ type: "performance",
156
+ message: "For PostgreSQL, consider using EXPLAIN ANALYZE to verify query execution plan"
157
+ }
158
+ when /sqlite/
159
+ results[:recommendations] << {
160
+ type: "performance",
161
+ message: "For SQLite, consider using EXPLAIN QUERY PLAN to verify query execution strategy"
162
+ }
163
+ end
164
+ end
165
+
166
+ def self.calculate_average_duration(queries)
167
+ queries.any? ? (queries.sum { |q| q[:duration_ms] } / queries.size.to_f).round(2) : 0
168
+ end
169
+
170
+ def self.calculate_request_stats(queries)
171
+ # Calculate request groups statistics
172
+ requests = queries.group_by { |q| q[:request_id] }
173
+ {
174
+ request_count: requests.size,
175
+ avg_queries_per_request: queries.any? ? (queries.size.to_f / requests.size).round(2) : 0,
176
+ max_queries_per_request: requests.map { |_id, reqs| reqs.size }.max || 0
177
+ }
178
+ end
179
+
180
+ def self.extract_queried_tables(queries)
181
+ tables = Hash.new(0)
182
+
183
+ queries.each do |query|
184
+ extracted = ::Dbviewer::Query::Parser.extract_tables(query[:sql])
185
+ extracted.each { |table| tables[table] += 1 }
186
+ end
187
+
188
+ tables.sort_by { |_table, count| -count }.first(10).to_h
189
+ end
190
+
191
+ def self.detect_potential_n_plus_1(queries)
192
+ return [] if queries.empty?
193
+
194
+ potential_issues = []
195
+
196
+ # Group queries by request ID
197
+ requests = queries.group_by { |q| q[:request_id] }
198
+
199
+ requests.each do |request_id, request_queries|
200
+ analyze_request_patterns(request_id, request_queries, potential_issues)
201
+ end
202
+
203
+ potential_issues.sort_by { |issue| -issue[:total_duration_ms] }.first(10)
204
+ end
205
+
206
+ def self.get_slowest_queries(queries)
207
+ queries.sort_by { |q| -q[:duration_ms] }.first(10).map do |query|
208
+ {
209
+ sql: query[:sql],
210
+ duration_ms: query[:duration_ms],
211
+ request_id: query[:request_id],
212
+ timestamp: query[:timestamp]
213
+ }
214
+ end
215
+ end
216
+
217
+ def self.analyze_request_patterns(request_id, request_queries, potential_issues)
218
+ # Group similar queries within this request
219
+ patterns = {}
220
+
221
+ request_queries.each do |query|
222
+ # Normalize the query to detect patterns
223
+ normalized = ::Dbviewer::Query::Parser.normalize(query[:sql])
224
+ patterns[normalized] ||= []
225
+ patterns[normalized] << query
226
+ end
227
+
228
+ # Look for patterns with many repetitions
229
+ patterns.each do |pattern, pattern_queries|
230
+ next if pattern_queries.size < 5 # Only interested in repeated patterns
231
+
232
+ # Check if these queries target the same table
233
+ tables = ::Dbviewer::Query::Parser.extract_tables(pattern_queries.first[:sql])
234
+ target_table = tables.size == 1 ? tables.first : nil
235
+
236
+ # Add to potential issues
237
+ total_time = pattern_queries.sum { |q| q[:duration_ms] }
238
+ potential_issues << {
239
+ request_id: request_id,
240
+ pattern: pattern_queries.first[:sql],
241
+ count: pattern_queries.size,
242
+ table: target_table,
243
+ total_duration_ms: total_time.round(2),
244
+ avg_duration_ms: (total_time / pattern_queries.size).round(2)
245
+ }
246
+ end
247
+ end
248
+ end
249
+ end
250
+ end