dbwatcher 0.1.5 → 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/README.md +2 -2
- data/app/controllers/dbwatcher/base_controller.rb +95 -0
- data/app/controllers/dbwatcher/dashboard_controller.rb +12 -0
- data/app/controllers/dbwatcher/queries_controller.rb +24 -0
- data/app/controllers/dbwatcher/sessions_controller.rb +15 -20
- data/app/controllers/dbwatcher/tables_controller.rb +38 -0
- data/app/helpers/dbwatcher/application_helper.rb +103 -0
- data/app/helpers/dbwatcher/formatting_helper.rb +108 -0
- data/app/helpers/dbwatcher/session_helper.rb +27 -0
- data/app/views/dbwatcher/dashboard/index.html.erb +177 -0
- data/app/views/dbwatcher/queries/index.html.erb +240 -0
- data/app/views/dbwatcher/sessions/index.html.erb +120 -27
- data/app/views/dbwatcher/sessions/show.html.erb +326 -129
- data/app/views/dbwatcher/shared/_badge.html.erb +4 -0
- data/app/views/dbwatcher/shared/_data_table.html.erb +20 -0
- data/app/views/dbwatcher/shared/_header.html.erb +7 -0
- data/app/views/dbwatcher/shared/_page_layout.html.erb +20 -0
- data/app/views/dbwatcher/shared/_section_panel.html.erb +9 -0
- data/app/views/dbwatcher/shared/_stats_card.html.erb +11 -0
- data/app/views/dbwatcher/shared/_tab_bar.html.erb +6 -0
- data/app/views/dbwatcher/tables/changes.html.erb +225 -0
- data/app/views/dbwatcher/tables/index.html.erb +123 -0
- data/app/views/dbwatcher/tables/show.html.erb +86 -0
- data/app/views/layouts/dbwatcher/application.html.erb +375 -26
- data/config/routes.rb +17 -3
- data/lib/dbwatcher/configuration.rb +9 -1
- data/lib/dbwatcher/engine.rb +12 -7
- data/lib/dbwatcher/logging.rb +72 -0
- data/lib/dbwatcher/services/dashboard_data_aggregator.rb +121 -0
- data/lib/dbwatcher/services/query_filter_processor.rb +114 -0
- data/lib/dbwatcher/services/table_statistics_collector.rb +119 -0
- data/lib/dbwatcher/sql_logger.rb +107 -0
- data/lib/dbwatcher/storage/api/base_api.rb +134 -0
- data/lib/dbwatcher/storage/api/concerns/table_analyzer.rb +172 -0
- data/lib/dbwatcher/storage/api/query_api.rb +95 -0
- data/lib/dbwatcher/storage/api/session_api.rb +134 -0
- data/lib/dbwatcher/storage/api/table_api.rb +86 -0
- data/lib/dbwatcher/storage/base_storage.rb +113 -0
- data/lib/dbwatcher/storage/change_processor.rb +65 -0
- data/lib/dbwatcher/storage/concerns/data_normalizer.rb +134 -0
- data/lib/dbwatcher/storage/concerns/error_handler.rb +75 -0
- data/lib/dbwatcher/storage/concerns/timestampable.rb +74 -0
- data/lib/dbwatcher/storage/concerns/validatable.rb +117 -0
- data/lib/dbwatcher/storage/date_helper.rb +21 -0
- data/lib/dbwatcher/storage/errors.rb +86 -0
- data/lib/dbwatcher/storage/file_manager.rb +122 -0
- data/lib/dbwatcher/storage/null_session.rb +39 -0
- data/lib/dbwatcher/storage/query_storage.rb +338 -0
- data/lib/dbwatcher/storage/query_validator.rb +24 -0
- data/lib/dbwatcher/storage/session.rb +58 -0
- data/lib/dbwatcher/storage/session_operations.rb +37 -0
- data/lib/dbwatcher/storage/session_query.rb +71 -0
- data/lib/dbwatcher/storage/session_storage.rb +322 -0
- data/lib/dbwatcher/storage/table_storage.rb +237 -0
- data/lib/dbwatcher/storage.rb +112 -85
- data/lib/dbwatcher/tracker.rb +4 -55
- data/lib/dbwatcher/version.rb +1 -1
- data/lib/dbwatcher.rb +12 -2
- metadata +47 -1
@@ -0,0 +1,114 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Dbwatcher
|
4
|
+
module Services
|
5
|
+
# Service object for filtering and sorting queries based on criteria
|
6
|
+
# Implements the strategy pattern for different filter types
|
7
|
+
class QueryFilterProcessor
|
8
|
+
include Dbwatcher::Logging
|
9
|
+
|
10
|
+
attr_reader :queries, :filter_params
|
11
|
+
|
12
|
+
# @param queries [Array<Hash>] the queries to filter
|
13
|
+
# @param filter_params [Hash] filtering parameters
|
14
|
+
def initialize(queries, filter_params)
|
15
|
+
@queries = queries
|
16
|
+
@filter_params = filter_params
|
17
|
+
end
|
18
|
+
|
19
|
+
# @param queries [Array<Hash>] queries to filter
|
20
|
+
# @param filter_params [Hash] filtering parameters
|
21
|
+
# @return [Array<Hash>] filtered and sorted queries
|
22
|
+
def self.call(queries, filter_params)
|
23
|
+
new(queries, filter_params).call
|
24
|
+
end
|
25
|
+
|
26
|
+
def call
|
27
|
+
log_filtering_start
|
28
|
+
start_time = Time.current
|
29
|
+
|
30
|
+
result = apply_all_filters
|
31
|
+
log_filtering_completion(start_time, result)
|
32
|
+
|
33
|
+
result
|
34
|
+
end
|
35
|
+
|
36
|
+
private
|
37
|
+
|
38
|
+
def log_filtering_start
|
39
|
+
log_info "Starting query filtering", {
|
40
|
+
initial_count: queries.length,
|
41
|
+
filters: active_filters.join(", ")
|
42
|
+
}
|
43
|
+
end
|
44
|
+
|
45
|
+
def apply_all_filters
|
46
|
+
queries
|
47
|
+
.then { |q| apply_operation_filter(q) }
|
48
|
+
.then { |q| apply_table_filter(q) }
|
49
|
+
.then { |q| apply_duration_filter(q) }
|
50
|
+
.then { |q| sort_by_timestamp_descending(q) }
|
51
|
+
end
|
52
|
+
|
53
|
+
def log_filtering_completion(start_time, result)
|
54
|
+
duration = Time.current - start_time
|
55
|
+
log_info "Completed query filtering in #{duration.round(3)}s", {
|
56
|
+
final_count: result.length,
|
57
|
+
filtered_out: queries.length - result.length
|
58
|
+
}
|
59
|
+
end
|
60
|
+
|
61
|
+
def apply_operation_filter(queries)
|
62
|
+
return queries unless filter_params[:operation].present?
|
63
|
+
|
64
|
+
queries.select { |query| matches_operation?(query) }
|
65
|
+
end
|
66
|
+
|
67
|
+
def matches_operation?(query)
|
68
|
+
query[:operation] == filter_params[:operation]
|
69
|
+
end
|
70
|
+
|
71
|
+
def apply_table_filter(queries)
|
72
|
+
return queries unless filter_params[:table].present?
|
73
|
+
|
74
|
+
queries.select { |query| includes_table?(query) }
|
75
|
+
end
|
76
|
+
|
77
|
+
def includes_table?(query)
|
78
|
+
query[:tables]&.include?(filter_params[:table])
|
79
|
+
end
|
80
|
+
|
81
|
+
def apply_duration_filter(queries)
|
82
|
+
return queries unless filter_params[:min_duration].present?
|
83
|
+
|
84
|
+
min_duration_threshold = filter_params[:min_duration].to_f
|
85
|
+
queries.select { |query| exceeds_duration_threshold?(query, min_duration_threshold) }
|
86
|
+
end
|
87
|
+
|
88
|
+
def exceeds_duration_threshold?(query, threshold)
|
89
|
+
duration = query[:duration]
|
90
|
+
duration && duration >= threshold
|
91
|
+
end
|
92
|
+
|
93
|
+
def sort_by_timestamp_descending(queries)
|
94
|
+
queries.sort_by { |query| timestamp_for_sorting(query) }.reverse
|
95
|
+
end
|
96
|
+
|
97
|
+
def timestamp_for_sorting(query)
|
98
|
+
return 0 unless query[:timestamp]
|
99
|
+
|
100
|
+
Time.parse(query[:timestamp]).to_i
|
101
|
+
rescue ArgumentError
|
102
|
+
0
|
103
|
+
end
|
104
|
+
|
105
|
+
def active_filters
|
106
|
+
filters = []
|
107
|
+
filters << "operation=#{filter_params[:operation]}" if filter_params[:operation].present?
|
108
|
+
filters << "table=#{filter_params[:table]}" if filter_params[:table].present?
|
109
|
+
filters << "min_duration=#{filter_params[:min_duration]}" if filter_params[:min_duration].present?
|
110
|
+
filters.empty? ? ["none"] : filters
|
111
|
+
end
|
112
|
+
end
|
113
|
+
end
|
114
|
+
end
|
@@ -0,0 +1,119 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Dbwatcher
|
4
|
+
module Services
|
5
|
+
# Service object for collecting and organizing table statistics
|
6
|
+
# Follows the command pattern with self.call class method
|
7
|
+
class TableStatisticsCollector
|
8
|
+
include Dbwatcher::Logging
|
9
|
+
|
10
|
+
# @return [Array<Hash>] sorted array of table statistics
|
11
|
+
def self.call
|
12
|
+
new.call
|
13
|
+
end
|
14
|
+
|
15
|
+
def call
|
16
|
+
log_info "Starting table statistics collection"
|
17
|
+
start_time = Time.current
|
18
|
+
|
19
|
+
tables = build_initial_tables_hash
|
20
|
+
populate_change_statistics(tables)
|
21
|
+
result = sort_by_change_count(tables)
|
22
|
+
|
23
|
+
duration = Time.current - start_time
|
24
|
+
log_info "Completed table statistics collection in #{duration.round(3)}s", {
|
25
|
+
tables_count: result.length,
|
26
|
+
total_changes: result.sum { |t| t[:change_count] }
|
27
|
+
}
|
28
|
+
|
29
|
+
result
|
30
|
+
end
|
31
|
+
|
32
|
+
private
|
33
|
+
|
34
|
+
def build_initial_tables_hash
|
35
|
+
tables = {}
|
36
|
+
schema_tables_count = load_schema_tables(tables)
|
37
|
+
log_schema_loading_result(schema_tables_count)
|
38
|
+
tables
|
39
|
+
end
|
40
|
+
|
41
|
+
def load_schema_tables(tables)
|
42
|
+
return 0 unless schema_available?
|
43
|
+
|
44
|
+
schema_tables_count = 0
|
45
|
+
begin
|
46
|
+
ActiveRecord::Base.connection.tables.each do |table|
|
47
|
+
tables[table] = build_table_entry(table)
|
48
|
+
schema_tables_count += 1
|
49
|
+
end
|
50
|
+
schema_tables_count
|
51
|
+
rescue StandardError => e
|
52
|
+
log_warn "Could not load tables from schema: #{e.message}"
|
53
|
+
0
|
54
|
+
end
|
55
|
+
end
|
56
|
+
|
57
|
+
def schema_available?
|
58
|
+
defined?(ActiveRecord::Base)
|
59
|
+
end
|
60
|
+
|
61
|
+
def log_schema_loading_result(count)
|
62
|
+
if count.positive?
|
63
|
+
log_debug "Loaded #{count} tables from database schema"
|
64
|
+
else
|
65
|
+
log_debug "ActiveRecord not available, starting with empty tables hash"
|
66
|
+
end
|
67
|
+
end
|
68
|
+
|
69
|
+
def build_table_entry(table_name)
|
70
|
+
{
|
71
|
+
name: table_name,
|
72
|
+
change_count: 0,
|
73
|
+
last_change: nil
|
74
|
+
}
|
75
|
+
end
|
76
|
+
|
77
|
+
def populate_change_statistics(tables)
|
78
|
+
sessions_processed = 0
|
79
|
+
total_changes = 0
|
80
|
+
|
81
|
+
Storage.sessions.all.each do |session_info|
|
82
|
+
session = Storage.sessions.find(session_info[:id])
|
83
|
+
next unless session
|
84
|
+
|
85
|
+
session_changes_count = session.changes.length
|
86
|
+
update_tables_from_session(tables, session)
|
87
|
+
sessions_processed += 1
|
88
|
+
total_changes += session_changes_count
|
89
|
+
end
|
90
|
+
|
91
|
+
log_debug "Processed #{sessions_processed} sessions with #{total_changes} total changes"
|
92
|
+
tables
|
93
|
+
end
|
94
|
+
|
95
|
+
def update_tables_from_session(tables, session)
|
96
|
+
session.changes.each do |change|
|
97
|
+
table_name = change[:table_name]
|
98
|
+
next if table_name.nil? || table_name.empty?
|
99
|
+
|
100
|
+
tables[table_name] ||= build_table_entry(table_name)
|
101
|
+
update_table_change_statistics(tables[table_name], change)
|
102
|
+
end
|
103
|
+
end
|
104
|
+
|
105
|
+
def update_table_change_statistics(table_stats, change)
|
106
|
+
table_stats[:change_count] += 1
|
107
|
+
timestamp = change[:timestamp]
|
108
|
+
|
109
|
+
return unless table_stats[:last_change].nil? || timestamp > table_stats[:last_change]
|
110
|
+
|
111
|
+
table_stats[:last_change] = timestamp
|
112
|
+
end
|
113
|
+
|
114
|
+
def sort_by_change_count(tables)
|
115
|
+
tables.values.sort_by { |table| -table[:change_count] }
|
116
|
+
end
|
117
|
+
end
|
118
|
+
end
|
119
|
+
end
|
@@ -0,0 +1,107 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Dbwatcher
|
4
|
+
class SqlLogger
|
5
|
+
include Singleton
|
6
|
+
|
7
|
+
attr_reader :queries
|
8
|
+
|
9
|
+
def initialize
|
10
|
+
@queries = []
|
11
|
+
@mutex = Mutex.new
|
12
|
+
setup_subscriber if Dbwatcher.configuration.track_queries
|
13
|
+
end
|
14
|
+
|
15
|
+
def log_query(sql, name, binds, _type_casted_binds, duration)
|
16
|
+
return unless Dbwatcher.configuration.track_queries
|
17
|
+
|
18
|
+
@mutex.synchronize do
|
19
|
+
query = create_query_record(sql, name, binds, duration)
|
20
|
+
store_query(query)
|
21
|
+
end
|
22
|
+
end
|
23
|
+
|
24
|
+
def create_query_record(sql, name, binds, duration)
|
25
|
+
{
|
26
|
+
id: SecureRandom.uuid,
|
27
|
+
sql: sql,
|
28
|
+
name: name,
|
29
|
+
binds: binds,
|
30
|
+
duration: duration,
|
31
|
+
timestamp: Time.current,
|
32
|
+
session_id: current_session_id,
|
33
|
+
backtrace: filtered_backtrace,
|
34
|
+
tables: extract_tables(sql),
|
35
|
+
operation: extract_operation(sql)
|
36
|
+
}
|
37
|
+
end
|
38
|
+
|
39
|
+
def store_query(query)
|
40
|
+
@queries << query
|
41
|
+
Storage.queries.create(query)
|
42
|
+
end
|
43
|
+
|
44
|
+
def clear_queries
|
45
|
+
@mutex.synchronize do
|
46
|
+
@queries.clear
|
47
|
+
end
|
48
|
+
end
|
49
|
+
|
50
|
+
private
|
51
|
+
|
52
|
+
def setup_subscriber
|
53
|
+
ActiveSupport::Notifications.subscribe("sql.active_record") do |_name, start, finish, _id, payload|
|
54
|
+
next if skip_query?(payload)
|
55
|
+
|
56
|
+
duration = (finish - start) * 1000.0
|
57
|
+
log_query(
|
58
|
+
payload[:sql],
|
59
|
+
payload[:name],
|
60
|
+
payload[:binds],
|
61
|
+
payload[:type_casted_binds],
|
62
|
+
duration
|
63
|
+
)
|
64
|
+
end
|
65
|
+
end
|
66
|
+
|
67
|
+
def skip_query?(payload)
|
68
|
+
skip_schema_query?(payload) || skip_internal_query?(payload)
|
69
|
+
end
|
70
|
+
|
71
|
+
def skip_schema_query?(payload)
|
72
|
+
payload[:name]&.include?("SCHEMA")
|
73
|
+
end
|
74
|
+
|
75
|
+
def skip_internal_query?(payload)
|
76
|
+
return true if payload[:sql]&.include?("sqlite_master")
|
77
|
+
return true if payload[:sql]&.include?("PRAGMA")
|
78
|
+
return true if payload[:sql]&.include?("information_schema")
|
79
|
+
|
80
|
+
false
|
81
|
+
end
|
82
|
+
|
83
|
+
def extract_tables(sql)
|
84
|
+
# Extract table names from SQL
|
85
|
+
tables = []
|
86
|
+
# Match FROM, JOIN, INTO, UPDATE, DELETE FROM patterns
|
87
|
+
sql.scan(/(?:FROM|JOIN|INTO|UPDATE|DELETE\s+FROM)\s+["`]?(\w+)["`]?/i) do |match|
|
88
|
+
tables << match[0]
|
89
|
+
end
|
90
|
+
tables.uniq
|
91
|
+
end
|
92
|
+
|
93
|
+
def extract_operation(sql)
|
94
|
+
sql.strip.split(/\s+/).first.upcase
|
95
|
+
end
|
96
|
+
|
97
|
+
def filtered_backtrace
|
98
|
+
caller.select { |line| line.include?(Rails.root.to_s) }
|
99
|
+
.reject { |line| line.include?("dbwatcher") }
|
100
|
+
.first(5)
|
101
|
+
end
|
102
|
+
|
103
|
+
def current_session_id
|
104
|
+
Tracker.current_session&.id
|
105
|
+
end
|
106
|
+
end
|
107
|
+
end
|
@@ -0,0 +1,134 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Dbwatcher
|
4
|
+
module Storage
|
5
|
+
module Api
|
6
|
+
# Base class for all storage API classes
|
7
|
+
#
|
8
|
+
# This class provides common functionality and patterns for all
|
9
|
+
# storage API implementations (SessionAPI, QueryAPI, TableAPI).
|
10
|
+
# It establishes the foundation for the fluent interface pattern
|
11
|
+
# and shared filtering capabilities.
|
12
|
+
#
|
13
|
+
# @abstract Subclass and implement specific API methods
|
14
|
+
# @example
|
15
|
+
# class MyAPI < BaseAPI
|
16
|
+
# def my_filter(value)
|
17
|
+
# @filters[:my_key] = value
|
18
|
+
# self
|
19
|
+
# end
|
20
|
+
# end
|
21
|
+
class BaseAPI
|
22
|
+
include Storage::Concerns::DataNormalizer
|
23
|
+
|
24
|
+
# Initialize the API with a storage backend
|
25
|
+
#
|
26
|
+
# @param storage [Object] storage backend instance
|
27
|
+
def initialize(storage)
|
28
|
+
@storage = storage
|
29
|
+
@filters = {}
|
30
|
+
@limit_value = nil
|
31
|
+
end
|
32
|
+
|
33
|
+
# Apply limit to results
|
34
|
+
#
|
35
|
+
# @param count [Integer] maximum number of results
|
36
|
+
# @return [BaseAPI] self for method chaining
|
37
|
+
def limit(count)
|
38
|
+
@limit_value = count
|
39
|
+
self
|
40
|
+
end
|
41
|
+
|
42
|
+
# Filter by conditions
|
43
|
+
#
|
44
|
+
# @param conditions [Hash] filtering conditions
|
45
|
+
# @return [BaseAPI] self for method chaining
|
46
|
+
def where(conditions)
|
47
|
+
@filters.merge!(conditions)
|
48
|
+
self
|
49
|
+
end
|
50
|
+
|
51
|
+
# Get all results after applying filters
|
52
|
+
#
|
53
|
+
# @return [Array] filtered results
|
54
|
+
# @abstract Subclasses should implement this method
|
55
|
+
def all
|
56
|
+
raise NotImplementedError, "Subclasses must implement #all"
|
57
|
+
end
|
58
|
+
|
59
|
+
# Create a new record
|
60
|
+
#
|
61
|
+
# @param data [Hash] record data
|
62
|
+
# @return [Hash] created record
|
63
|
+
# @abstract Subclasses should implement this method if creation is supported
|
64
|
+
def create(data)
|
65
|
+
@storage.save(data)
|
66
|
+
end
|
67
|
+
|
68
|
+
protected
|
69
|
+
|
70
|
+
attr_reader :storage, :filters, :limit_value
|
71
|
+
|
72
|
+
# Apply common filters to a result set
|
73
|
+
#
|
74
|
+
# @param results [Array] raw results
|
75
|
+
# @return [Array] filtered results
|
76
|
+
def apply_common_filters(results)
|
77
|
+
result = results
|
78
|
+
|
79
|
+
# Apply limit if specified
|
80
|
+
result = result.first(limit_value) if limit_value
|
81
|
+
|
82
|
+
result
|
83
|
+
end
|
84
|
+
|
85
|
+
# Apply time-based filtering
|
86
|
+
#
|
87
|
+
# @param results [Array] results to filter
|
88
|
+
# @param time_field [Symbol] field containing timestamp
|
89
|
+
# @return [Array] filtered results
|
90
|
+
def apply_time_filter(results, time_field)
|
91
|
+
return results unless filters[:started_after]
|
92
|
+
|
93
|
+
cutoff = filters[:started_after]
|
94
|
+
results.select do |item|
|
95
|
+
timestamp = item[time_field]
|
96
|
+
next false unless timestamp
|
97
|
+
|
98
|
+
begin
|
99
|
+
Time.parse(timestamp.to_s) >= cutoff
|
100
|
+
rescue ArgumentError
|
101
|
+
false
|
102
|
+
end
|
103
|
+
end
|
104
|
+
end
|
105
|
+
|
106
|
+
# Apply pattern matching filter
|
107
|
+
#
|
108
|
+
# @param results [Array] results to filter
|
109
|
+
# @param fields [Array<Symbol>] fields to search in
|
110
|
+
# @param pattern [String] pattern to match
|
111
|
+
# @return [Array] filtered results
|
112
|
+
def apply_pattern_filter(results, fields, pattern)
|
113
|
+
return results unless pattern
|
114
|
+
|
115
|
+
results.select do |item|
|
116
|
+
fields.any? do |field|
|
117
|
+
value = item[field]
|
118
|
+
value&.to_s&.include?(pattern)
|
119
|
+
end
|
120
|
+
end
|
121
|
+
end
|
122
|
+
|
123
|
+
# Safe value extraction with normalization
|
124
|
+
#
|
125
|
+
# @param item [Hash] item to extract from
|
126
|
+
# @param key [Symbol] key to extract
|
127
|
+
# @return [Object] extracted value
|
128
|
+
def safe_extract(item, key)
|
129
|
+
extract_value(item, key.to_s)
|
130
|
+
end
|
131
|
+
end
|
132
|
+
end
|
133
|
+
end
|
134
|
+
end
|
@@ -0,0 +1,172 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Dbwatcher
|
4
|
+
module Storage
|
5
|
+
module Api
|
6
|
+
module Concerns
|
7
|
+
# Provides reusable table analysis functionality for API classes
|
8
|
+
#
|
9
|
+
# This concern extracts common table analysis logic used by API classes
|
10
|
+
# to avoid duplication and provide consistent table analysis capabilities.
|
11
|
+
#
|
12
|
+
# @example
|
13
|
+
# class MyAPI < BaseAPI
|
14
|
+
# include Api::Concerns::TableAnalyzer
|
15
|
+
#
|
16
|
+
# def analyze(session)
|
17
|
+
# build_tables_summary(session)
|
18
|
+
# end
|
19
|
+
# end
|
20
|
+
module TableAnalyzer
|
21
|
+
# Build tables summary from session changes
|
22
|
+
#
|
23
|
+
# @param session [Session] session to analyze
|
24
|
+
# @return [Hash] tables summary hash
|
25
|
+
def build_tables_summary(session)
|
26
|
+
tables = {}
|
27
|
+
process_session_changes(session, tables)
|
28
|
+
tables
|
29
|
+
end
|
30
|
+
|
31
|
+
# Process all changes in a session
|
32
|
+
#
|
33
|
+
# @param session [Session] session with changes
|
34
|
+
# @param tables [Hash] tables hash to populate
|
35
|
+
# @return [void]
|
36
|
+
def process_session_changes(session, tables)
|
37
|
+
return unless session&.changes.respond_to?(:each)
|
38
|
+
|
39
|
+
session.changes.each do |change|
|
40
|
+
table_name = extract_table_name(change)
|
41
|
+
next unless table_name
|
42
|
+
|
43
|
+
initialize_table_data(tables, table_name)
|
44
|
+
update_table_data(tables[table_name], change)
|
45
|
+
update_sample_record(tables[table_name], change)
|
46
|
+
end
|
47
|
+
end
|
48
|
+
|
49
|
+
# Extract table name from change data
|
50
|
+
#
|
51
|
+
# @param change [Hash] change data
|
52
|
+
# @return [String, nil] table name or nil
|
53
|
+
def extract_table_name(change)
|
54
|
+
return nil unless change.is_a?(Hash)
|
55
|
+
|
56
|
+
# Only use symbols since data is normalized
|
57
|
+
change[:table_name]
|
58
|
+
end
|
59
|
+
|
60
|
+
# Initialize table data structure
|
61
|
+
#
|
62
|
+
# @param tables [Hash] tables hash
|
63
|
+
# @param table_name [String] table name
|
64
|
+
# @return [void]
|
65
|
+
def initialize_table_data(tables, table_name)
|
66
|
+
tables[table_name] ||= {
|
67
|
+
name: table_name,
|
68
|
+
operations: { "INSERT" => 0, "UPDATE" => 0, "DELETE" => 0 },
|
69
|
+
changes: [],
|
70
|
+
sample_record: nil,
|
71
|
+
records: {},
|
72
|
+
relationships: []
|
73
|
+
}
|
74
|
+
end
|
75
|
+
|
76
|
+
# Update table data with change information
|
77
|
+
#
|
78
|
+
# @param table_data [Hash] table data hash
|
79
|
+
# @param change [Hash] change data
|
80
|
+
# @return [void]
|
81
|
+
def update_table_data(table_data, change)
|
82
|
+
# Count operations
|
83
|
+
operation = extract_operation(change)
|
84
|
+
table_data[:operations][operation] ||= 0
|
85
|
+
table_data[:operations][operation] += 1
|
86
|
+
|
87
|
+
# Add change to the list
|
88
|
+
table_data[:changes] << change
|
89
|
+
end
|
90
|
+
|
91
|
+
# Update sample record if not already set
|
92
|
+
#
|
93
|
+
# @param table_data [Hash] table data hash
|
94
|
+
# @param change [Hash] change data
|
95
|
+
# @return [void]
|
96
|
+
def update_sample_record(table_data, change)
|
97
|
+
return unless table_data[:sample_record].nil?
|
98
|
+
|
99
|
+
snapshot = extract_record_snapshot(change)
|
100
|
+
table_data[:sample_record] = snapshot if snapshot
|
101
|
+
end
|
102
|
+
|
103
|
+
# Update record history for analysis
|
104
|
+
#
|
105
|
+
# @param table_data [Hash] table data hash
|
106
|
+
# @param change [Hash] change data
|
107
|
+
# @return [void]
|
108
|
+
def update_record_history(table_data, change)
|
109
|
+
record_id = extract_record_id(change)
|
110
|
+
return unless record_id
|
111
|
+
|
112
|
+
table_data[:records][record_id] ||= []
|
113
|
+
table_data[:records][record_id] << {
|
114
|
+
operation: extract_operation(change),
|
115
|
+
timestamp: extract_timestamp(change),
|
116
|
+
changes: extract_field_changes(change)
|
117
|
+
}
|
118
|
+
end
|
119
|
+
|
120
|
+
private
|
121
|
+
|
122
|
+
# Extract operation from change data
|
123
|
+
#
|
124
|
+
# @param change [Hash] change data
|
125
|
+
# @return [String] operation string
|
126
|
+
def extract_operation(change)
|
127
|
+
# Only use symbols since data is normalized
|
128
|
+
operation = change[:operation] || "UNKNOWN"
|
129
|
+
operation.to_s.upcase
|
130
|
+
end
|
131
|
+
|
132
|
+
# Extract record snapshot from change data
|
133
|
+
#
|
134
|
+
# @param change [Hash] change data
|
135
|
+
# @return [Hash, nil] record snapshot or nil
|
136
|
+
def extract_record_snapshot(change)
|
137
|
+
# Only use symbols since data is normalized
|
138
|
+
change[:record_snapshot]
|
139
|
+
end
|
140
|
+
|
141
|
+
# Extract record ID from change data
|
142
|
+
#
|
143
|
+
# @param change [Hash] change data
|
144
|
+
# @return [String, nil] record ID or nil
|
145
|
+
def extract_record_id(change)
|
146
|
+
# Only use symbols since data is normalized
|
147
|
+
id = change[:record_id]
|
148
|
+
id&.to_s
|
149
|
+
end
|
150
|
+
|
151
|
+
# Extract timestamp from change data
|
152
|
+
#
|
153
|
+
# @param change [Hash] change data
|
154
|
+
# @return [String, nil] timestamp string or nil
|
155
|
+
def extract_timestamp(change)
|
156
|
+
# Only use symbols since data is normalized
|
157
|
+
change[:timestamp]
|
158
|
+
end
|
159
|
+
|
160
|
+
# Extract field changes from change data
|
161
|
+
#
|
162
|
+
# @param change [Hash] change data
|
163
|
+
# @return [Hash] field changes hash
|
164
|
+
def extract_field_changes(change)
|
165
|
+
# Only use symbols since data is normalized
|
166
|
+
change[:changes] || {}
|
167
|
+
end
|
168
|
+
end
|
169
|
+
end
|
170
|
+
end
|
171
|
+
end
|
172
|
+
end
|