database_recorder 0.2.0 → 0.2.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/README.md +6 -0
- data/lib/database_recorder/active_record/recorder.rb +3 -3
- data/lib/database_recorder/config.rb +4 -2
- data/lib/database_recorder/core.rb +24 -0
- data/lib/database_recorder/mysql2/recorded_result.rb +4 -4
- data/lib/database_recorder/mysql2/recorder.rb +6 -6
- data/lib/database_recorder/pg/recorded_result.rb +3 -3
- data/lib/database_recorder/pg/recorder.rb +7 -7
- data/lib/database_recorder/recording.rb +12 -11
- data/lib/database_recorder/rspec.rb +1 -0
- data/lib/database_recorder/storage/base.rb +2 -1
- data/lib/database_recorder/storage/file.rb +21 -15
- data/lib/database_recorder/storage/redis.rb +15 -13
- data/lib/database_recorder/version.rb +3 -1
- metadata +2 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: d0b2575a34a9e68ee59f68884a492a33c071a05b9d9999caf3baeca5c070c11d
|
4
|
+
data.tar.gz: 29b78920c82393f3e9a23d334b7675d29dc96f3b77f13ff1bf33384cc3206a0d
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 0c7c56cc0c95e24b410ec5d63ac64b1944a20d7084d63ae64b0d80ea265b815435d2c3ce14eec3fb9d2f69261c567ddd59eaba24b4ca0c28f3993272fbb2e33b
|
7
|
+
data.tar.gz: 78db7a6377ba564f5e039bd74f32ffe4c4f6d6c6ff5719340818738a7a4ea40caf4f2164da1cf72e1ebb3800863e1921d3c2bbf32952ff00dc1154487367fe4a
|
data/README.md
CHANGED
@@ -70,6 +70,12 @@ DatabaseRecorder::Config.replay_recordings = true
|
|
70
70
|
# To store the queries: :file | :redis | nil
|
71
71
|
DatabaseRecorder::Config.storage = :redis
|
72
72
|
# nil to avoid storing the queries
|
73
|
+
|
74
|
+
# File storage options
|
75
|
+
DatabaseRecorder::Config.storage_options = { recordings_path: '/some/path' }
|
76
|
+
|
77
|
+
# Redis storage options
|
78
|
+
DatabaseRecorder::Config.storage_options = { connection: Redis.new }
|
73
79
|
```
|
74
80
|
|
75
81
|
## History of the queries
|
@@ -18,16 +18,16 @@ module DatabaseRecorder
|
|
18
18
|
if Config.replay_recordings && Recording.from_cache
|
19
19
|
Recording.push(sql: sql, binds: binds)
|
20
20
|
data = Recording.cached_query_for(sql)
|
21
|
-
return yield if !data || !data[
|
21
|
+
return yield if !data || !data[:result] # cache miss
|
22
22
|
|
23
|
-
RecordedResult.new(data[
|
23
|
+
RecordedResult.new(data[:result][:fields], data[:result][:values])
|
24
24
|
else
|
25
25
|
yield.tap do |result|
|
26
26
|
result_data =
|
27
27
|
if result && (result.respond_to?(:fields) || result.respond_to?(:columns))
|
28
28
|
fields = result.respond_to?(:fields) ? result.fields : result.columns
|
29
29
|
values = result.respond_to?(:values) ? result.values : result.to_a
|
30
|
-
{
|
30
|
+
{ count: result.count, fields: fields, values: values }
|
31
31
|
end
|
32
32
|
Recording.push(sql: sql, name: name, binds: type_casted_binds, result: result_data)
|
33
33
|
end
|
@@ -17,18 +17,20 @@ module DatabaseRecorder
|
|
17
17
|
redis: DatabaseRecorder::Storage::Redis
|
18
18
|
}.freeze
|
19
19
|
|
20
|
-
attr_accessor :db_driver, :print_queries, :replay_recordings, :storage
|
20
|
+
attr_accessor :db_driver, :print_queries, :replay_recordings, :storage, :storage_options
|
21
21
|
|
22
22
|
class << self
|
23
23
|
extend Forwardable
|
24
24
|
|
25
|
-
def_delegators :instance, :db_driver, :print_queries, :replay_recordings, :replay_recordings=, :storage
|
25
|
+
def_delegators :instance, :db_driver, :print_queries, :replay_recordings, :replay_recordings=, :storage,
|
26
|
+
:storage_options, :storage_options=
|
26
27
|
|
27
28
|
def load_defaults
|
28
29
|
instance.db_driver = DEFAULT_DB_DRIVER
|
29
30
|
instance.print_queries = false
|
30
31
|
instance.replay_recordings = false
|
31
32
|
instance.storage = DEFAULT_STORAGE
|
33
|
+
instance.storage_options = {}
|
32
34
|
end
|
33
35
|
|
34
36
|
def db_driver=(value)
|
@@ -22,5 +22,29 @@ module DatabaseRecorder
|
|
22
22
|
when :pg then PG::Recorder.setup
|
23
23
|
end
|
24
24
|
end
|
25
|
+
|
26
|
+
def string_keys_recursive(hash)
|
27
|
+
{}.tap do |h|
|
28
|
+
hash.each do |key, value|
|
29
|
+
h[key.to_s] = transform(value, :string_keys_recursive)
|
30
|
+
end
|
31
|
+
end
|
32
|
+
end
|
33
|
+
|
34
|
+
def symbolize_recursive(hash)
|
35
|
+
{}.tap do |h|
|
36
|
+
hash.each do |key, value|
|
37
|
+
h[key.to_sym] = transform(value, :symbolize_recursive)
|
38
|
+
end
|
39
|
+
end
|
40
|
+
end
|
41
|
+
|
42
|
+
def transform(value, source_method)
|
43
|
+
case value
|
44
|
+
when Hash then method(source_method).call(value)
|
45
|
+
when Array then value.map { |v| transform(v, source_method) }
|
46
|
+
else value
|
47
|
+
end
|
48
|
+
end
|
25
49
|
end
|
26
50
|
end
|
@@ -17,10 +17,10 @@ module DatabaseRecorder
|
|
17
17
|
alias :size :count
|
18
18
|
|
19
19
|
def prepare(data)
|
20
|
-
@count = data[
|
21
|
-
@fields = data[
|
22
|
-
@entries = data[
|
23
|
-
# @values = data[
|
20
|
+
@count = data[:count]
|
21
|
+
@fields = data[:fields]
|
22
|
+
@entries = data[:values]
|
23
|
+
# @values = data[:values]
|
24
24
|
end
|
25
25
|
|
26
26
|
# def server_flags
|
@@ -13,7 +13,7 @@ module DatabaseRecorder
|
|
13
13
|
end
|
14
14
|
|
15
15
|
def format_result(result)
|
16
|
-
{
|
16
|
+
{ count: result.count, fields: result.fields, values: result.to_a } if result.is_a?(::Mysql2::Result)
|
17
17
|
# else
|
18
18
|
# last_insert_id = adapter.query('SELECT LAST_INSERT_ID() AS _dbr_last_insert_id').to_a
|
19
19
|
# { 'count' => last_insert_id.count, 'fields' => ['id'], 'values' => last_insert_id }
|
@@ -36,14 +36,14 @@ module DatabaseRecorder
|
|
36
36
|
|
37
37
|
def store_prepared_statement(adapter, source:, binds:)
|
38
38
|
# sql = @last_prepared&.send(:[], 'sql')
|
39
|
-
sql = @last_prepared[
|
39
|
+
sql = @last_prepared[:sql]
|
40
40
|
Core.log_query(sql, source)
|
41
41
|
if Config.replay_recordings && !Recording.cache.nil?
|
42
|
-
data = Recording.cache.find { |query| query[
|
42
|
+
data = Recording.cache.find { |query| query[:sql] == sql }
|
43
43
|
return yield unless data # cache miss
|
44
44
|
|
45
|
-
Recording.push(sql: data[
|
46
|
-
RecordedResult.new(data[
|
45
|
+
Recording.push(sql: data[:sql], binds: data[:binds], source: source)
|
46
|
+
RecordedResult.new(data[:result].slice(:count, :fields, :values))
|
47
47
|
else
|
48
48
|
yield.tap do |result|
|
49
49
|
Recording.update_prepared(sql: sql, binds: binds, result: format_result(result), source: source)
|
@@ -60,7 +60,7 @@ module DatabaseRecorder
|
|
60
60
|
data = Recording.cached_query_for(sql)
|
61
61
|
return yield unless data # cache miss
|
62
62
|
|
63
|
-
RecordedResult.new.prepare(data[
|
63
|
+
RecordedResult.new.prepare(data[:result].slice(:count, :fields, :values)) if data[:result]
|
64
64
|
else
|
65
65
|
yield.tap do |result|
|
66
66
|
Recording.push(sql: sql, result: format_result(result), source: source)
|
@@ -11,9 +11,9 @@ module DatabaseRecorder
|
|
11
11
|
alias :rows :values
|
12
12
|
|
13
13
|
def initialize(data)
|
14
|
-
@count = data[
|
15
|
-
@fields = data[
|
16
|
-
@values = data[
|
14
|
+
@count = data[:count]
|
15
|
+
@fields = data[:fields]
|
16
|
+
@values = data[:values]
|
17
17
|
end
|
18
18
|
|
19
19
|
def clear; end
|
@@ -12,7 +12,7 @@ module DatabaseRecorder
|
|
12
12
|
end
|
13
13
|
|
14
14
|
def format_result(result)
|
15
|
-
{
|
15
|
+
{ count: result.count, fields: result.fields, values: result.values } if result
|
16
16
|
end
|
17
17
|
|
18
18
|
def prepare_statement(sql: nil, name: nil, binds: nil, source: nil)
|
@@ -28,18 +28,18 @@ module DatabaseRecorder
|
|
28
28
|
|
29
29
|
def store_prepared_statement(name: nil, sql: nil, binds: nil, source: nil)
|
30
30
|
if Config.replay_recordings && !Recording.cache.nil?
|
31
|
-
data = Recording.cache.find { |query| query[
|
31
|
+
data = Recording.cache.find { |query| query[:name] == name }
|
32
32
|
return yield unless data # cache miss
|
33
33
|
|
34
|
-
Core.log_query(data[
|
35
|
-
Recording.push(sql: data[
|
36
|
-
RecordedResult.new(data[
|
34
|
+
Core.log_query(data[:sql], source)
|
35
|
+
Recording.push(sql: data[:sql], binds: data[:binds], source: source)
|
36
|
+
RecordedResult.new(data[:result].slice(:count, :fields, :values))
|
37
37
|
else
|
38
38
|
Core.log_query(sql, source)
|
39
39
|
yield.tap do |query_result|
|
40
40
|
result = format_result(query_result)
|
41
41
|
query = Recording.update_prepared(name: name, sql: sql, binds: binds, result: result, source: source)
|
42
|
-
Core.log_query(query[
|
42
|
+
Core.log_query(query[:sql], source)
|
43
43
|
end
|
44
44
|
end
|
45
45
|
end
|
@@ -54,7 +54,7 @@ module DatabaseRecorder
|
|
54
54
|
data = Recording.cached_query_for(sql)
|
55
55
|
return yield unless data # cache miss
|
56
56
|
|
57
|
-
RecordedResult.new(data[
|
57
|
+
RecordedResult.new(data[:result].slice(:count, :fields, :values))
|
58
58
|
else
|
59
59
|
yield.tap do |result|
|
60
60
|
Recording.push(name: name, sql: sql, binds: binds, result: format_result(result), source: source)
|
@@ -4,13 +4,14 @@ require 'forwardable'
|
|
4
4
|
|
5
5
|
module DatabaseRecorder
|
6
6
|
class Recording
|
7
|
-
attr_accessor :cache, :entities
|
7
|
+
attr_accessor :cache, :entities, :metadata
|
8
8
|
attr_reader :from_cache, :options, :prepared_queries, :queries, :started
|
9
9
|
|
10
10
|
def initialize(options: {})
|
11
11
|
(@@instances ||= {})[Process.pid] = self
|
12
12
|
@cache = nil
|
13
13
|
@entities = []
|
14
|
+
@metadata = {}
|
14
15
|
@options = options
|
15
16
|
@queries = []
|
16
17
|
@search_index = 0
|
@@ -21,7 +22,7 @@ module DatabaseRecorder
|
|
21
22
|
current = @search_index
|
22
23
|
match = cache[@search_index..].find do |item|
|
23
24
|
current += 1
|
24
|
-
item[
|
25
|
+
item[:sql] == sql
|
25
26
|
end
|
26
27
|
return unless match
|
27
28
|
|
@@ -32,7 +33,7 @@ module DatabaseRecorder
|
|
32
33
|
end
|
33
34
|
|
34
35
|
def new_entity(model:, id:)
|
35
|
-
@entities.push(
|
36
|
+
@entities.push(model: model, id: id)
|
36
37
|
end
|
37
38
|
|
38
39
|
def pull_entity
|
@@ -40,32 +41,32 @@ module DatabaseRecorder
|
|
40
41
|
end
|
41
42
|
|
42
43
|
def push(sql:, name: nil, binds: nil, result: nil, source: nil)
|
43
|
-
query = {
|
44
|
+
query = { name: name, sql: sql, binds: binds, result: result }.compact
|
44
45
|
@queries.push(query)
|
45
46
|
end
|
46
47
|
|
47
48
|
def push_prepared(name: nil, sql: nil, binds: nil, result: nil, source: nil)
|
48
|
-
query = {
|
49
|
+
query = { name: name, sql: sql, binds: binds, result: result }.compact
|
49
50
|
@@prepared_queries[name || sql] = query
|
50
51
|
end
|
51
52
|
|
52
53
|
def start
|
53
54
|
@started = true
|
54
|
-
storage = Config.storage&.new(self, name: options[:name])
|
55
|
+
storage = Config.storage&.new(self, name: options[:name], options: Config.storage_options)
|
55
56
|
@from_cache = storage&.load
|
56
57
|
yield
|
57
58
|
storage&.save unless from_cache
|
58
59
|
@started = false
|
59
|
-
result = { current_queries: queries.map {
|
60
|
-
result[:stored_queries] = cache.map {
|
60
|
+
result = { current_queries: queries.map { |query| query[:sql] } }
|
61
|
+
result[:stored_queries] = cache.map { |query| query[:sql] } if from_cache
|
61
62
|
result
|
62
63
|
end
|
63
64
|
|
64
65
|
def update_prepared(name: nil, sql: nil, binds: nil, result: nil, source: nil)
|
65
66
|
query = @@prepared_queries[name || sql]
|
66
|
-
query[
|
67
|
-
query[
|
68
|
-
query[
|
67
|
+
query[:sql] = sql if sql
|
68
|
+
query[:binds] = binds if binds
|
69
|
+
query[:result] = result if result
|
69
70
|
@queries.push(query)
|
70
71
|
query
|
71
72
|
end
|
@@ -16,6 +16,7 @@ module DatabaseRecorder
|
|
16
16
|
options.merge!(example.metadata[:dbr]) if example.metadata[:dbr].is_a?(Hash)
|
17
17
|
options.merge!(example: example, name: "#{example.full_description}__#{ref}")
|
18
18
|
Recording.new(options: options).tap do |recording|
|
19
|
+
recording.metadata = { example: example.id, started_at: Time.now }
|
19
20
|
result = recording.start { example.run }
|
20
21
|
if options[:verify_queries] && result[:stored_queries]
|
21
22
|
expect(result[:stored_queries]).to match_array(result[:current_queries])
|
@@ -4,11 +4,12 @@ module DatabaseRecorder
|
|
4
4
|
module Storage
|
5
5
|
class File < Base
|
6
6
|
def load
|
7
|
-
stored_data = ::File.exist?(
|
7
|
+
stored_data = ::File.exist?(storage_path) ? ::File.read(storage_path) : false
|
8
8
|
if stored_data
|
9
|
-
|
10
|
-
|
11
|
-
@recording.
|
9
|
+
parsed_data = YAML.load(stored_data) # rubocop:disable Security/YAMLLoad
|
10
|
+
data = Core.symbolize_recursive(parsed_data)
|
11
|
+
@recording.cache = data[:queries] || []
|
12
|
+
@recording.entities = data[:entities]
|
12
13
|
true
|
13
14
|
else
|
14
15
|
false
|
@@ -16,10 +17,22 @@ module DatabaseRecorder
|
|
16
17
|
end
|
17
18
|
|
18
19
|
def save
|
19
|
-
data = {
|
20
|
-
data[
|
21
|
-
|
22
|
-
|
20
|
+
data = {}
|
21
|
+
data[:metadata] = @recording.metadata unless @recording.metadata.empty?
|
22
|
+
data[:queries] = @recording.queries if @recording.queries.any?
|
23
|
+
data[:entities] = @recording.entities if @recording.entities.any?
|
24
|
+
serialized_data = Core.string_keys_recursive(data).to_yaml
|
25
|
+
::File.write(storage_path, serialized_data)
|
26
|
+
true
|
27
|
+
end
|
28
|
+
|
29
|
+
def storage_path
|
30
|
+
@storage_path ||= begin
|
31
|
+
name = normalize_name(@name)
|
32
|
+
path = @options[:recordings_path] || 'spec/dbr'
|
33
|
+
FileUtils.mkdir_p(path)
|
34
|
+
"#{path}/#{name}.yml"
|
35
|
+
end
|
23
36
|
end
|
24
37
|
|
25
38
|
private
|
@@ -27,13 +40,6 @@ module DatabaseRecorder
|
|
27
40
|
def normalize_name(string)
|
28
41
|
string.gsub(%r{[:/]}, '-').gsub(/[^\w-]/, '_')
|
29
42
|
end
|
30
|
-
|
31
|
-
def record_file
|
32
|
-
name = normalize_name(@name)
|
33
|
-
path = 'spec/dbr'
|
34
|
-
FileUtils.mkdir_p(path)
|
35
|
-
"#{path}/#{name}.yml"
|
36
|
-
end
|
37
43
|
end
|
38
44
|
end
|
39
45
|
end
|
@@ -3,12 +3,17 @@
|
|
3
3
|
module DatabaseRecorder
|
4
4
|
module Storage
|
5
5
|
class Redis < Base
|
6
|
+
def connection
|
7
|
+
@connection ||= @options[:connection] || ::Redis.new
|
8
|
+
end
|
9
|
+
|
6
10
|
def load
|
7
|
-
stored_data =
|
11
|
+
stored_data = connection.get(@name)
|
8
12
|
if stored_data
|
9
|
-
|
10
|
-
|
11
|
-
@recording.
|
13
|
+
parsed_data = JSON.parse(stored_data)
|
14
|
+
data = Core.symbolize_recursive(parsed_data)
|
15
|
+
@recording.cache = data[:queries] || []
|
16
|
+
@recording.entities = data[:entities]
|
12
17
|
true
|
13
18
|
else
|
14
19
|
false
|
@@ -16,16 +21,13 @@ module DatabaseRecorder
|
|
16
21
|
end
|
17
22
|
|
18
23
|
def save
|
19
|
-
data = {
|
20
|
-
data[
|
24
|
+
data = {}
|
25
|
+
data[:metadata] = @recording.metadata unless @recording.metadata.empty?
|
26
|
+
data[:queries] = @recording.queries if @recording.queries.any?
|
27
|
+
data[:entities] = @recording.entities if @recording.entities.any?
|
21
28
|
serialized_data = data.to_json
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
class << self
|
26
|
-
def connection
|
27
|
-
@connection ||= ::Redis.new
|
28
|
-
end
|
29
|
+
connection.set(@name, serialized_data)
|
30
|
+
true
|
29
31
|
end
|
30
32
|
end
|
31
33
|
end
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: database_recorder
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.2.
|
4
|
+
version: 0.2.1
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Mattia Roccoberton
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2022-04-
|
11
|
+
date: 2022-04-16 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: coderay
|