pg_insights 0.3.1 → 0.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/app/assets/javascripts/pg_insights/application.js +91 -24
- data/app/assets/javascripts/pg_insights/plan_performance.js +53 -0
- data/app/assets/javascripts/pg_insights/query_comparison.js +1129 -0
- data/app/assets/javascripts/pg_insights/results/view_toggles.js +26 -5
- data/app/assets/javascripts/pg_insights/results.js +231 -2
- data/app/assets/stylesheets/pg_insights/analysis.css +2628 -0
- data/app/assets/stylesheets/pg_insights/application.css +51 -1
- data/app/assets/stylesheets/pg_insights/results.css +12 -1
- data/app/controllers/pg_insights/insights_controller.rb +486 -9
- data/app/helpers/pg_insights/application_helper.rb +339 -0
- data/app/helpers/pg_insights/insights_helper.rb +567 -0
- data/app/jobs/pg_insights/query_analysis_job.rb +142 -0
- data/app/models/pg_insights/query_execution.rb +198 -0
- data/app/services/pg_insights/query_analysis_service.rb +269 -0
- data/app/views/layouts/pg_insights/application.html.erb +8 -1
- data/app/views/pg_insights/insights/_compare_view.html.erb +264 -0
- data/app/views/pg_insights/insights/_empty_state.html.erb +9 -0
- data/app/views/pg_insights/insights/_execution_table_view.html.erb +86 -0
- data/app/views/pg_insights/insights/_history_bar.html.erb +33 -0
- data/app/views/pg_insights/insights/_perf_view.html.erb +244 -0
- data/app/views/pg_insights/insights/_plan_nodes.html.erb +12 -0
- data/app/views/pg_insights/insights/_plan_tree.html.erb +30 -0
- data/app/views/pg_insights/insights/_plan_tree_modern.html.erb +12 -0
- data/app/views/pg_insights/insights/_plan_view.html.erb +159 -0
- data/app/views/pg_insights/insights/_query_panel.html.erb +3 -2
- data/app/views/pg_insights/insights/_result.html.erb +19 -4
- data/app/views/pg_insights/insights/_results_info.html.erb +33 -9
- data/app/views/pg_insights/insights/_results_info_empty.html.erb +10 -0
- data/app/views/pg_insights/insights/_results_panel.html.erb +7 -9
- data/app/views/pg_insights/insights/_results_table.html.erb +0 -5
- data/app/views/pg_insights/insights/_visual_view.html.erb +212 -0
- data/app/views/pg_insights/insights/index.html.erb +4 -1
- data/app/views/pg_insights/timeline/compare.html.erb +3 -3
- data/config/routes.rb +6 -0
- data/lib/generators/pg_insights/install_generator.rb +20 -14
- data/lib/generators/pg_insights/templates/db/migrate/create_pg_insights_query_executions.rb +45 -0
- data/lib/pg_insights/version.rb +1 -1
- data/lib/pg_insights.rb +30 -2
- metadata +20 -2
@@ -0,0 +1,142 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module PgInsights
|
4
|
+
class QueryAnalysisJob < ApplicationJob
|
5
|
+
queue_as do
|
6
|
+
PgInsights.queue_name || :default
|
7
|
+
end
|
8
|
+
|
9
|
+
retry_on StandardError, wait: :polynomially_longer, attempts: 3
|
10
|
+
discard_on ActiveRecord::RecordNotFound
|
11
|
+
|
12
|
+
def perform(execution_id, options = {})
|
13
|
+
execution = QueryExecution.find(execution_id)
|
14
|
+
|
15
|
+
# Skip if already completed or failed
|
16
|
+
return execution if execution.completed? || execution.failed?
|
17
|
+
|
18
|
+
Rails.logger.info "Starting query analysis for execution #{execution_id}"
|
19
|
+
|
20
|
+
begin
|
21
|
+
execution.mark_as_running!
|
22
|
+
|
23
|
+
results = case execution.execution_type
|
24
|
+
when "execute"
|
25
|
+
execute_query(execution.sql_text, options)
|
26
|
+
when "analyze"
|
27
|
+
analyze_query(execution.sql_text, options)
|
28
|
+
when "both"
|
29
|
+
execute_and_analyze_query(execution.sql_text, options)
|
30
|
+
else
|
31
|
+
raise ArgumentError, "Invalid execution_type: #{execution.execution_type}"
|
32
|
+
end
|
33
|
+
|
34
|
+
execution.mark_as_completed!(results)
|
35
|
+
Rails.logger.info "Completed query analysis for execution #{execution_id}"
|
36
|
+
|
37
|
+
execution
|
38
|
+
|
39
|
+
rescue => e
|
40
|
+
Rails.logger.error "Query analysis job failed for execution #{execution_id}: #{e.message}"
|
41
|
+
Rails.logger.error e.backtrace.join("\n") if Rails.env.development?
|
42
|
+
|
43
|
+
execution.mark_as_failed!(e.message, e.backtrace&.first&.truncate(500))
|
44
|
+
raise # Re-raise for job retry mechanism
|
45
|
+
end
|
46
|
+
end
|
47
|
+
|
48
|
+
private
|
49
|
+
|
50
|
+
def execute_query(sql, options)
|
51
|
+
result = execute_with_timeout(sql, PgInsights.query_execution_timeout_ms)
|
52
|
+
|
53
|
+
{
|
54
|
+
result_data: serialize_result_data(result),
|
55
|
+
result_rows_count: result.rows.count,
|
56
|
+
result_columns_count: result.columns.count,
|
57
|
+
total_time_ms: measure_execution_time { result }
|
58
|
+
}
|
59
|
+
end
|
60
|
+
|
61
|
+
def analyze_query(sql, options)
|
62
|
+
explain_sql = build_explain_query(sql, options)
|
63
|
+
result = execute_with_timeout(explain_sql, PgInsights.query_analysis_timeout_ms)
|
64
|
+
|
65
|
+
plan_data = parse_explain_output(result)
|
66
|
+
insights = generate_performance_insights(plan_data)
|
67
|
+
|
68
|
+
{
|
69
|
+
execution_plan: plan_data,
|
70
|
+
plan_summary: generate_plan_summary(plan_data),
|
71
|
+
planning_time_ms: extract_planning_time(plan_data),
|
72
|
+
execution_time_ms: extract_execution_time(plan_data),
|
73
|
+
total_time_ms: calculate_total_time(plan_data),
|
74
|
+
query_cost: extract_query_cost(plan_data),
|
75
|
+
performance_insights: insights,
|
76
|
+
execution_stats: extract_execution_stats(plan_data)
|
77
|
+
}
|
78
|
+
end
|
79
|
+
|
80
|
+
def execute_and_analyze_query(sql, options)
|
81
|
+
# Execute the query first
|
82
|
+
execution_results = execute_query(sql, options)
|
83
|
+
|
84
|
+
# Then analyze it
|
85
|
+
analysis_results = analyze_query(sql, options)
|
86
|
+
|
87
|
+
# Merge both sets of results
|
88
|
+
execution_results.merge(analysis_results)
|
89
|
+
end
|
90
|
+
|
91
|
+
# Delegate to service methods to avoid code duplication
|
92
|
+
def execute_with_timeout(sql, timeout_ms)
|
93
|
+
QueryAnalysisService.send(:execute_with_timeout, sql, timeout_ms)
|
94
|
+
end
|
95
|
+
|
96
|
+
def build_explain_query(sql, options)
|
97
|
+
QueryAnalysisService.send(:build_explain_query, sql, options)
|
98
|
+
end
|
99
|
+
|
100
|
+
def parse_explain_output(result)
|
101
|
+
QueryAnalysisService.send(:parse_explain_output, result)
|
102
|
+
end
|
103
|
+
|
104
|
+
def generate_plan_summary(plan_data)
|
105
|
+
QueryAnalysisService.send(:generate_plan_summary, plan_data)
|
106
|
+
end
|
107
|
+
|
108
|
+
def extract_planning_time(plan_data)
|
109
|
+
QueryAnalysisService.send(:extract_planning_time, plan_data)
|
110
|
+
end
|
111
|
+
|
112
|
+
def extract_execution_time(plan_data)
|
113
|
+
QueryAnalysisService.send(:extract_execution_time, plan_data)
|
114
|
+
end
|
115
|
+
|
116
|
+
def calculate_total_time(plan_data)
|
117
|
+
QueryAnalysisService.send(:calculate_total_time, plan_data)
|
118
|
+
end
|
119
|
+
|
120
|
+
def extract_query_cost(plan_data)
|
121
|
+
QueryAnalysisService.send(:extract_query_cost, plan_data)
|
122
|
+
end
|
123
|
+
|
124
|
+
def extract_execution_stats(plan_data)
|
125
|
+
QueryAnalysisService.send(:extract_execution_stats, plan_data)
|
126
|
+
end
|
127
|
+
|
128
|
+
def generate_performance_insights(plan_data)
|
129
|
+
QueryAnalysisService.send(:generate_performance_insights, plan_data)
|
130
|
+
end
|
131
|
+
|
132
|
+
def serialize_result_data(result)
|
133
|
+
QueryAnalysisService.send(:serialize_result_data, result)
|
134
|
+
end
|
135
|
+
|
136
|
+
def measure_execution_time
|
137
|
+
start_time = Time.current
|
138
|
+
yield
|
139
|
+
((Time.current - start_time) * 1000).round(3)
|
140
|
+
end
|
141
|
+
end
|
142
|
+
end
|
@@ -0,0 +1,198 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module PgInsights
|
4
|
+
class QueryExecution < ApplicationRecord
|
5
|
+
self.table_name = "pg_insights_query_executions"
|
6
|
+
|
7
|
+
belongs_to :query, class_name: "PgInsights::Query", optional: true
|
8
|
+
|
9
|
+
EXECUTION_TYPES = %w[execute analyze both].freeze
|
10
|
+
STATUSES = %w[pending running completed failed].freeze
|
11
|
+
|
12
|
+
validates :sql_text, presence: true
|
13
|
+
validates :execution_type, inclusion: { in: EXECUTION_TYPES }
|
14
|
+
validates :status, inclusion: { in: STATUSES }
|
15
|
+
|
16
|
+
scope :recent, -> { order(created_at: :desc) }
|
17
|
+
scope :completed, -> { where(status: "completed") }
|
18
|
+
scope :failed, -> { where(status: "failed") }
|
19
|
+
scope :with_analysis, -> { where(execution_type: [ "analyze", "both" ]) }
|
20
|
+
scope :with_results, -> { where(execution_type: [ "execute", "both" ]) }
|
21
|
+
scope :analyzable, -> { completed.with_analysis.where.not(execution_plan: nil) }
|
22
|
+
scope :recent_history, ->(limit = 10) { analyzable.recent.limit(limit) }
|
23
|
+
|
24
|
+
# Status management
|
25
|
+
def pending?
|
26
|
+
status == "pending"
|
27
|
+
end
|
28
|
+
|
29
|
+
def running?
|
30
|
+
status == "running"
|
31
|
+
end
|
32
|
+
|
33
|
+
def completed?
|
34
|
+
status == "completed"
|
35
|
+
end
|
36
|
+
|
37
|
+
def failed?
|
38
|
+
status == "failed"
|
39
|
+
end
|
40
|
+
|
41
|
+
def success?
|
42
|
+
completed? && error_message.blank?
|
43
|
+
end
|
44
|
+
|
45
|
+
# Execution type checks
|
46
|
+
def includes_execution?
|
47
|
+
execution_type.in?([ "execute", "both" ])
|
48
|
+
end
|
49
|
+
|
50
|
+
def includes_analysis?
|
51
|
+
execution_type.in?([ "analyze", "both" ])
|
52
|
+
end
|
53
|
+
|
54
|
+
# Performance metrics
|
55
|
+
def has_timing_data?
|
56
|
+
planning_time_ms.present? || execution_time_ms.present?
|
57
|
+
end
|
58
|
+
|
59
|
+
def formatted_total_time
|
60
|
+
return nil unless total_time_ms
|
61
|
+
|
62
|
+
if total_time_ms < 1000
|
63
|
+
"#{total_time_ms.round(2)}ms"
|
64
|
+
else
|
65
|
+
"#{(total_time_ms / 1000).round(2)}s"
|
66
|
+
end
|
67
|
+
end
|
68
|
+
|
69
|
+
def formatted_query_cost
|
70
|
+
return nil unless query_cost
|
71
|
+
|
72
|
+
if query_cost < 1000
|
73
|
+
query_cost.round(2)
|
74
|
+
else
|
75
|
+
"#{(query_cost / 1000).round(1)}K"
|
76
|
+
end
|
77
|
+
end
|
78
|
+
|
79
|
+
# Plan analysis helpers
|
80
|
+
def has_plan_data?
|
81
|
+
execution_plan.present?
|
82
|
+
end
|
83
|
+
|
84
|
+
def plan_nodes
|
85
|
+
return [] unless execution_plan.present?
|
86
|
+
|
87
|
+
# Extract plan nodes from PostgreSQL EXPLAIN output
|
88
|
+
plan_data = execution_plan.is_a?(Array) ? execution_plan.first : execution_plan
|
89
|
+
extract_plan_nodes(plan_data["Plan"]) if plan_data && plan_data["Plan"]
|
90
|
+
end
|
91
|
+
|
92
|
+
def optimization_suggestions
|
93
|
+
return [] unless performance_insights.present?
|
94
|
+
|
95
|
+
performance_insights["suggestions"] || []
|
96
|
+
end
|
97
|
+
|
98
|
+
def has_performance_issues?
|
99
|
+
return false unless performance_insights.present?
|
100
|
+
|
101
|
+
insights = performance_insights
|
102
|
+
insights["issues_detected"] == true ||
|
103
|
+
insights["slow_operations"].present? ||
|
104
|
+
insights["missing_indexes"].present?
|
105
|
+
end
|
106
|
+
|
107
|
+
# Result data helpers
|
108
|
+
def has_result_data?
|
109
|
+
result_data.present? && result_rows_count.present?
|
110
|
+
end
|
111
|
+
|
112
|
+
def result_summary
|
113
|
+
return nil unless has_result_data?
|
114
|
+
|
115
|
+
"#{result_rows_count} #{'row'.pluralize(result_rows_count)} • #{result_columns_count} #{'column'.pluralize(result_columns_count)}"
|
116
|
+
end
|
117
|
+
|
118
|
+
# Status transitions
|
119
|
+
def mark_as_running!
|
120
|
+
update!(
|
121
|
+
status: "running",
|
122
|
+
started_at: Time.current
|
123
|
+
)
|
124
|
+
end
|
125
|
+
|
126
|
+
def mark_as_completed!(results = {})
|
127
|
+
update!(
|
128
|
+
status: "completed",
|
129
|
+
completed_at: Time.current,
|
130
|
+
duration_ms: calculate_duration,
|
131
|
+
**results
|
132
|
+
)
|
133
|
+
end
|
134
|
+
|
135
|
+
def mark_as_failed!(error_msg, error_detail = nil)
|
136
|
+
update!(
|
137
|
+
status: "failed",
|
138
|
+
completed_at: Time.current,
|
139
|
+
duration_ms: calculate_duration,
|
140
|
+
error_message: error_msg,
|
141
|
+
error_detail: error_detail
|
142
|
+
)
|
143
|
+
end
|
144
|
+
|
145
|
+
# History display helpers (public methods)
|
146
|
+
def display_title
|
147
|
+
return sql_text.truncate(50) if sql_text.present?
|
148
|
+
"Query ##{id}"
|
149
|
+
end
|
150
|
+
|
151
|
+
def display_summary
|
152
|
+
parts = []
|
153
|
+
parts << "#{formatted_total_time}" if total_time_ms.present?
|
154
|
+
parts << "Cost: #{formatted_query_cost}" if query_cost.present?
|
155
|
+
parts << "#{result_rows_count} rows" if result_rows_count.present?
|
156
|
+
parts.join(" • ")
|
157
|
+
end
|
158
|
+
|
159
|
+
def performance_class
|
160
|
+
return "performance-excellent" if total_time_ms && total_time_ms < 50
|
161
|
+
return "performance-good" if total_time_ms && total_time_ms < 200
|
162
|
+
return "performance-fair" if total_time_ms && total_time_ms < 1000
|
163
|
+
return "performance-poor" if total_time_ms && total_time_ms >= 1000
|
164
|
+
"performance-unknown"
|
165
|
+
end
|
166
|
+
|
167
|
+
private
|
168
|
+
|
169
|
+
def calculate_duration
|
170
|
+
return nil unless started_at
|
171
|
+
|
172
|
+
end_time = completed_at || Time.current
|
173
|
+
((end_time - started_at) * 1000).round(3)
|
174
|
+
end
|
175
|
+
|
176
|
+
def extract_plan_nodes(plan_node, nodes = [], level = 0)
|
177
|
+
return nodes unless plan_node
|
178
|
+
|
179
|
+
nodes << {
|
180
|
+
node_type: plan_node["Node Type"],
|
181
|
+
relation_name: plan_node["Relation Name"],
|
182
|
+
cost: plan_node["Total Cost"],
|
183
|
+
actual_time: plan_node["Actual Total Time"],
|
184
|
+
actual_rows: plan_node["Actual Rows"],
|
185
|
+
level: level
|
186
|
+
}
|
187
|
+
|
188
|
+
# Recursively process child plans
|
189
|
+
if plan_node["Plans"]
|
190
|
+
plan_node["Plans"].each do |child_plan|
|
191
|
+
extract_plan_nodes(child_plan, nodes, level + 1)
|
192
|
+
end
|
193
|
+
end
|
194
|
+
|
195
|
+
nodes
|
196
|
+
end
|
197
|
+
end
|
198
|
+
end
|
@@ -0,0 +1,269 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module PgInsights
|
4
|
+
class QueryAnalysisService
|
5
|
+
class << self
|
6
|
+
def execute_query(sql, execution_type: "execute", options: {})
|
7
|
+
execution = create_execution_record(sql, execution_type)
|
8
|
+
|
9
|
+
begin
|
10
|
+
execution.mark_as_running!
|
11
|
+
|
12
|
+
results = case execution_type.to_s
|
13
|
+
when "execute"
|
14
|
+
execute_only(sql, options)
|
15
|
+
when "analyze"
|
16
|
+
analyze_only(sql, options)
|
17
|
+
when "both"
|
18
|
+
execute_and_analyze(sql, options)
|
19
|
+
else
|
20
|
+
raise ArgumentError, "Invalid execution_type: #{execution_type}"
|
21
|
+
end
|
22
|
+
|
23
|
+
execution.mark_as_completed!(results)
|
24
|
+
execution
|
25
|
+
|
26
|
+
rescue => e
|
27
|
+
Rails.logger.error "Query analysis failed: #{e.message}"
|
28
|
+
Rails.logger.error e.backtrace.join("\n") if Rails.env.development?
|
29
|
+
|
30
|
+
execution.mark_as_failed!(e.message, e.backtrace&.first&.truncate(500))
|
31
|
+
execution
|
32
|
+
end
|
33
|
+
end
|
34
|
+
|
35
|
+
def analyze_query_async(sql, execution_type: "analyze", options: {})
|
36
|
+
if background_jobs_available?
|
37
|
+
execution = create_execution_record(sql, execution_type)
|
38
|
+
QueryAnalysisJob.perform_later(execution.id, options)
|
39
|
+
execution
|
40
|
+
else
|
41
|
+
# Fallback to synchronous execution
|
42
|
+
execute_query(sql, execution_type: execution_type, options: options)
|
43
|
+
end
|
44
|
+
end
|
45
|
+
|
46
|
+
private
|
47
|
+
|
48
|
+
def create_execution_record(sql, execution_type)
|
49
|
+
QueryExecution.create!(
|
50
|
+
sql_text: normalize_sql(sql),
|
51
|
+
execution_type: execution_type,
|
52
|
+
status: "pending"
|
53
|
+
)
|
54
|
+
end
|
55
|
+
|
56
|
+
def execute_only(sql, options)
|
57
|
+
result = execute_with_timeout(sql, PgInsights.query_execution_timeout_ms)
|
58
|
+
|
59
|
+
{
|
60
|
+
result_data: serialize_result_data(result),
|
61
|
+
result_rows_count: result.rows.count,
|
62
|
+
result_columns_count: result.columns.count,
|
63
|
+
total_time_ms: measure_execution_time { result }
|
64
|
+
}
|
65
|
+
end
|
66
|
+
|
67
|
+
def analyze_only(sql, options)
|
68
|
+
explain_sql = build_explain_query(sql, options)
|
69
|
+
result = execute_with_timeout(explain_sql, PgInsights.query_analysis_timeout_ms)
|
70
|
+
|
71
|
+
plan_data = parse_explain_output(result)
|
72
|
+
insights = generate_performance_insights(plan_data)
|
73
|
+
|
74
|
+
{
|
75
|
+
execution_plan: plan_data,
|
76
|
+
plan_summary: generate_plan_summary(plan_data),
|
77
|
+
planning_time_ms: extract_planning_time(plan_data),
|
78
|
+
execution_time_ms: extract_execution_time(plan_data),
|
79
|
+
total_time_ms: calculate_total_time(plan_data),
|
80
|
+
query_cost: extract_query_cost(plan_data),
|
81
|
+
performance_insights: insights,
|
82
|
+
execution_stats: extract_execution_stats(plan_data)
|
83
|
+
}
|
84
|
+
end
|
85
|
+
|
86
|
+
def execute_and_analyze(sql, options)
|
87
|
+
# Execute the query first
|
88
|
+
execution_results = execute_only(sql, options)
|
89
|
+
|
90
|
+
# Then analyze it
|
91
|
+
analysis_results = analyze_only(sql, options)
|
92
|
+
|
93
|
+
# Merge both sets of results
|
94
|
+
execution_results.merge(analysis_results)
|
95
|
+
end
|
96
|
+
|
97
|
+
def execute_with_timeout(sql, timeout_ms)
|
98
|
+
ActiveRecord::Base.connection.transaction do
|
99
|
+
ActiveRecord::Base.connection.execute("SET LOCAL statement_timeout = #{timeout_ms}")
|
100
|
+
ActiveRecord::Base.connection.exec_query(sql)
|
101
|
+
end
|
102
|
+
end
|
103
|
+
|
104
|
+
def build_explain_query(sql, options)
|
105
|
+
explain_options = []
|
106
|
+
explain_options << "ANALYZE"
|
107
|
+
explain_options << "VERBOSE" if options[:verbose]
|
108
|
+
explain_options << "COSTS" if options.fetch(:costs, true)
|
109
|
+
explain_options << "SETTINGS" if options[:settings]
|
110
|
+
explain_options << "BUFFERS" if options[:buffers]
|
111
|
+
explain_options << "TIMING" if options.fetch(:timing, true)
|
112
|
+
explain_options << "SUMMARY" if options.fetch(:summary, true)
|
113
|
+
explain_options << "FORMAT JSON" # Always use JSON for parsing
|
114
|
+
|
115
|
+
"EXPLAIN (#{explain_options.join(', ')}) #{sql}"
|
116
|
+
end
|
117
|
+
|
118
|
+
def parse_explain_output(result)
|
119
|
+
return {} if result.rows.empty?
|
120
|
+
|
121
|
+
json_string = result.rows.first.first
|
122
|
+
JSON.parse(json_string)
|
123
|
+
rescue JSON::ParserError => e
|
124
|
+
Rails.logger.error "Failed to parse EXPLAIN output: #{e.message}"
|
125
|
+
{ error: "Failed to parse execution plan", raw_output: result.rows }
|
126
|
+
end
|
127
|
+
|
128
|
+
def generate_plan_summary(plan_data)
|
129
|
+
return nil unless plan_data.present? && plan_data.first
|
130
|
+
|
131
|
+
plan = plan_data.first["Plan"]
|
132
|
+
return nil unless plan
|
133
|
+
|
134
|
+
summary_parts = []
|
135
|
+
summary_parts << "#{plan['Node Type']}"
|
136
|
+
summary_parts << "on #{plan['Relation Name']}" if plan["Relation Name"]
|
137
|
+
summary_parts << "(cost=#{plan['Total Cost']&.round(2)})" if plan["Total Cost"]
|
138
|
+
summary_parts << "rows=#{plan['Actual Rows']}" if plan["Actual Rows"]
|
139
|
+
summary_parts << "time=#{plan['Actual Total Time']&.round(2)}ms" if plan["Actual Total Time"]
|
140
|
+
|
141
|
+
summary_parts.join(" ")
|
142
|
+
end
|
143
|
+
|
144
|
+
def extract_planning_time(plan_data)
|
145
|
+
return nil unless plan_data.present? && plan_data.first
|
146
|
+
|
147
|
+
plan_data.first["Planning Time"]
|
148
|
+
end
|
149
|
+
|
150
|
+
def extract_execution_time(plan_data)
|
151
|
+
return nil unless plan_data.present? && plan_data.first
|
152
|
+
|
153
|
+
plan_data.first["Execution Time"]
|
154
|
+
end
|
155
|
+
|
156
|
+
def calculate_total_time(plan_data)
|
157
|
+
planning = extract_planning_time(plan_data) || 0
|
158
|
+
execution = extract_execution_time(plan_data) || 0
|
159
|
+
planning + execution
|
160
|
+
end
|
161
|
+
|
162
|
+
def extract_query_cost(plan_data)
|
163
|
+
return nil unless plan_data.present? && plan_data.first && plan_data.first["Plan"]
|
164
|
+
|
165
|
+
plan_data.first["Plan"]["Total Cost"]
|
166
|
+
end
|
167
|
+
|
168
|
+
def extract_execution_stats(plan_data)
|
169
|
+
return {} unless plan_data.present? && plan_data.first
|
170
|
+
|
171
|
+
stats = {}
|
172
|
+
plan = plan_data.first["Plan"]
|
173
|
+
|
174
|
+
if plan
|
175
|
+
stats[:shared_hit_blocks] = plan["Shared Hit Blocks"] if plan["Shared Hit Blocks"]
|
176
|
+
stats[:shared_read_blocks] = plan["Shared Read Blocks"] if plan["Shared Read Blocks"]
|
177
|
+
stats[:shared_dirtied_blocks] = plan["Shared Dirtied Blocks"] if plan["Shared Dirtied_blocks"]
|
178
|
+
stats[:local_hit_blocks] = plan["Local Hit Blocks"] if plan["Local Hit Blocks"]
|
179
|
+
stats[:local_read_blocks] = plan["Local Read Blocks"] if plan["Local Read Blocks"]
|
180
|
+
stats[:temp_read_blocks] = plan["Temp Read Blocks"] if plan["Temp Read Blocks"]
|
181
|
+
stats[:temp_written_blocks] = plan["Temp Written Blocks"] if plan["Temp Written Blocks"]
|
182
|
+
end
|
183
|
+
|
184
|
+
stats
|
185
|
+
end
|
186
|
+
|
187
|
+
def generate_performance_insights(plan_data)
|
188
|
+
return { suggestions: [], issues_detected: false } unless plan_data.present?
|
189
|
+
|
190
|
+
insights = { suggestions: [], issues_detected: false, slow_operations: [], missing_indexes: [] }
|
191
|
+
|
192
|
+
plan = plan_data.first&.dig("Plan")
|
193
|
+
return insights unless plan
|
194
|
+
|
195
|
+
# Analyze plan for performance issues
|
196
|
+
analyze_node_performance(plan, insights)
|
197
|
+
|
198
|
+
insights[:issues_detected] = insights[:suggestions].any? ||
|
199
|
+
insights[:slow_operations].any? ||
|
200
|
+
insights[:missing_indexes].any?
|
201
|
+
|
202
|
+
insights
|
203
|
+
end
|
204
|
+
|
205
|
+
def analyze_node_performance(node, insights, level = 0)
|
206
|
+
return unless node
|
207
|
+
|
208
|
+
node_type = node["Node Type"]
|
209
|
+
actual_time = node["Actual Total Time"]
|
210
|
+
actual_rows = node["Actual Rows"]
|
211
|
+
relation_name = node["Relation Name"]
|
212
|
+
|
213
|
+
# Check for expensive sequential scans
|
214
|
+
if node_type == "Seq Scan" && actual_rows && actual_rows > 1000
|
215
|
+
insights[:slow_operations] << "Sequential scan on #{relation_name} (#{actual_rows} rows)"
|
216
|
+
insights[:suggestions] << "Consider adding an index on #{relation_name} to avoid full table scan"
|
217
|
+
insights[:missing_indexes] << relation_name if relation_name
|
218
|
+
end
|
219
|
+
|
220
|
+
# Check for expensive sorts
|
221
|
+
if node_type == "Sort" && actual_time && actual_time > 100
|
222
|
+
insights[:slow_operations] << "Expensive sort operation (#{actual_time.round(2)}ms)"
|
223
|
+
insights[:suggestions] << "Consider adding an index to avoid sorting, or increase work_mem"
|
224
|
+
end
|
225
|
+
|
226
|
+
# Check for nested loop joins with high cost
|
227
|
+
if node_type == "Nested Loop" && actual_time && actual_time > 50
|
228
|
+
insights[:slow_operations] << "Potentially expensive nested loop join"
|
229
|
+
insights[:suggestions] << "Consider adding indexes on join columns or using different join strategy"
|
230
|
+
end
|
231
|
+
|
232
|
+
# Check for hash joins that spill to disk
|
233
|
+
if node_type == "Hash Join" && node["Temp Written Blocks"] && node["Temp Written Blocks"] > 0
|
234
|
+
insights[:slow_operations] << "Hash join spilling to disk"
|
235
|
+
insights[:suggestions] << "Consider increasing work_mem to avoid disk spilling"
|
236
|
+
end
|
237
|
+
|
238
|
+
# Recursively analyze child nodes
|
239
|
+
if node["Plans"]
|
240
|
+
node["Plans"].each do |child_plan|
|
241
|
+
analyze_node_performance(child_plan, insights, level + 1)
|
242
|
+
end
|
243
|
+
end
|
244
|
+
end
|
245
|
+
|
246
|
+
def serialize_result_data(result)
|
247
|
+
{
|
248
|
+
columns: result.columns,
|
249
|
+
rows: result.rows,
|
250
|
+
column_types: result.column_types
|
251
|
+
}
|
252
|
+
end
|
253
|
+
|
254
|
+
def measure_execution_time
|
255
|
+
start_time = Time.current
|
256
|
+
yield
|
257
|
+
((Time.current - start_time) * 1000).round(3)
|
258
|
+
end
|
259
|
+
|
260
|
+
def normalize_sql(sql)
|
261
|
+
sql.strip.gsub(/\s+/, " ")
|
262
|
+
end
|
263
|
+
|
264
|
+
def background_jobs_available?
|
265
|
+
defined?(ActiveJob) && ActiveJob::Base.queue_adapter.present?
|
266
|
+
end
|
267
|
+
end
|
268
|
+
end
|
269
|
+
end
|
@@ -8,10 +8,17 @@
|
|
8
8
|
<%#= favicon_link_tag "favicon.png" %>
|
9
9
|
<%= stylesheet_link_tag "pg_insights/application", media: "all" %>
|
10
10
|
<%= stylesheet_link_tag "pg_insights/results", media: "all" %>
|
11
|
+
<%= stylesheet_link_tag "pg_insights/analysis", media: "all" %>
|
11
12
|
<%= stylesheet_link_tag "pg_insights/health", media: "all" %>
|
12
|
-
<%= javascript_include_tag "
|
13
|
+
<%= javascript_include_tag "chartkick", nonce: true %>
|
14
|
+
<%= javascript_include_tag "Chart.bundle", nonce: true %>
|
15
|
+
<%= javascript_include_tag "pg_insights/results/view_toggles", nonce: true %>
|
16
|
+
<%= javascript_include_tag "pg_insights/results/chart_renderer", nonce: true %>
|
17
|
+
<%= javascript_include_tag "pg_insights/results/table_manager", nonce: true %>
|
13
18
|
<%= javascript_include_tag "pg_insights/results", nonce: true %>
|
19
|
+
<%= javascript_include_tag "pg_insights/query_comparison", nonce: true %>
|
14
20
|
<%= javascript_include_tag "pg_insights/health", nonce: true %>
|
21
|
+
<%= javascript_include_tag "pg_insights/application", nonce: true %>
|
15
22
|
|
16
23
|
</head>
|
17
24
|
<body>
|