database_recorder 0.1.1 → 0.2.2
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/README.md +31 -5
- data/lib/database_recorder/active_record/recorder.rb +3 -3
- data/lib/database_recorder/config.rb +6 -2
- data/lib/database_recorder/core.rb +29 -2
- data/lib/database_recorder/mysql2/client_ext.rb +2 -2
- data/lib/database_recorder/mysql2/recorded_result.rb +4 -4
- data/lib/database_recorder/mysql2/recorder.rb +41 -25
- data/lib/database_recorder/mysql2/statement_ext.rb +1 -1
- data/lib/database_recorder/pg/connection_ext.rb +14 -18
- data/lib/database_recorder/pg/recorded_result.rb +3 -3
- data/lib/database_recorder/pg/recorder.rb +38 -11
- data/lib/database_recorder/recording.rb +25 -13
- data/lib/database_recorder/rspec.rb +2 -1
- data/lib/database_recorder/storage/base.rb +2 -1
- data/lib/database_recorder/storage/file.rb +24 -15
- data/lib/database_recorder/storage/redis.rb +17 -13
- data/lib/database_recorder/version.rb +3 -1
- metadata +2 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 48d1f4f88105fa9ac49075181f90208ca051bec2273daaeaf77cca9ad2c1ed77
|
4
|
+
data.tar.gz: 9ef096a48a9a0dafd7bbab635a20805d4b5ec0f06710d9b596554a1f03016c8e
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 177da34267bb1fb58fb1b99e68d68430f8d36822facffb9fcd42e2e024176bbfc7be4d57ef807ce834bf1522151255c1614e5a9bb73833d86b70eb502aac0a30
|
7
|
+
data.tar.gz: 43d907dd4dc7902280bb379a4ae20dadca9c89712e62a188be7609795efd0c87db968c0b938e303e1f3e38bf44e23d34ed91be567169e118b42b3c00bd7e212e
|
data/README.md
CHANGED
@@ -6,7 +6,7 @@
|
|
6
6
|
[![Specs PostgreSQL](https://github.com/blocknotes/database_recorder/actions/workflows/specs_postgres.yml/badge.svg)](https://github.com/blocknotes/database_recorder/actions/workflows/specs_postgres.yml)
|
7
7
|
|
8
8
|
Record database queries for testing and development purposes.
|
9
|
-
|
9
|
+
Store queries information on files or Redis.
|
10
10
|
|
11
11
|
Main features:
|
12
12
|
- store the history of the queries of a test when it run (for monitoring);
|
@@ -15,10 +15,16 @@ Main features:
|
|
15
15
|
|
16
16
|
Sample output: [test.yml](extra/sample.yml)
|
17
17
|
|
18
|
+
Creating an environment variable to enable it with RSpec:
|
19
|
+
|
20
|
+
![image1](extra/image1.png)
|
21
|
+
|
18
22
|
## Install
|
19
23
|
|
24
|
+
### With RSpec
|
25
|
+
|
20
26
|
- Add to your Gemfile: `gem 'database_recorder', require: false` (:development, :test groups recommended)
|
21
|
-
-
|
27
|
+
- Add in **rails_helper.rb**:
|
22
28
|
|
23
29
|
```rb
|
24
30
|
require 'database_recorder'
|
@@ -49,9 +55,20 @@ RSpec.configure do |config|
|
|
49
55
|
end
|
50
56
|
```
|
51
57
|
|
52
|
-
|
58
|
+
### With plain Ruby
|
53
59
|
|
54
|
-
|
60
|
+
```rb
|
61
|
+
DatabaseRecorder::Config.db_driver = :pg
|
62
|
+
DatabaseRecorder::Core.setup
|
63
|
+
DatabaseRecorder::Recording.new(options: { name: 'pg_file' }).tap do |recording|
|
64
|
+
pp recording.start do
|
65
|
+
PG.connect(DB_CONFIG).exec("INSERT INTO tags(name, created_at, updated_at) VALUES('tag1', NOW(), NOW())")
|
66
|
+
PG.connect(DB_CONFIG).exec("SELECT * FROM tags")
|
67
|
+
end
|
68
|
+
end
|
69
|
+
```
|
70
|
+
|
71
|
+
Please check more [examples](examples).
|
55
72
|
|
56
73
|
## Config
|
57
74
|
|
@@ -61,7 +78,10 @@ Add to your _spec_helper.rb_:
|
|
61
78
|
# Database driver to use: :active_record | :mysql2 | :pg
|
62
79
|
DatabaseRecorder::Config.db_driver = :pg
|
63
80
|
|
64
|
-
#
|
81
|
+
# Log queries format (default: '[DB] %sql [%name]')
|
82
|
+
DatabaseRecorder::Config.log_format = '>>> %name -- %sql'
|
83
|
+
|
84
|
+
# To print/log the queries while executing the specs: false | true | :color
|
65
85
|
DatabaseRecorder::Config.print_queries = true
|
66
86
|
|
67
87
|
# Replay the recordings intercepting the queries
|
@@ -70,6 +90,12 @@ DatabaseRecorder::Config.replay_recordings = true
|
|
70
90
|
# To store the queries: :file | :redis | nil
|
71
91
|
DatabaseRecorder::Config.storage = :redis
|
72
92
|
# nil to avoid storing the queries
|
93
|
+
|
94
|
+
# File storage options
|
95
|
+
DatabaseRecorder::Config.storage_options = { recordings_path: '/some/path' }
|
96
|
+
|
97
|
+
# Redis storage options
|
98
|
+
DatabaseRecorder::Config.storage_options = { connection: Redis.new }
|
73
99
|
```
|
74
100
|
|
75
101
|
## History of the queries
|
@@ -18,16 +18,16 @@ module DatabaseRecorder
|
|
18
18
|
if Config.replay_recordings && Recording.from_cache
|
19
19
|
Recording.push(sql: sql, binds: binds)
|
20
20
|
data = Recording.cached_query_for(sql)
|
21
|
-
return yield if !data || !data[
|
21
|
+
return yield if !data || !data[:result] # cache miss
|
22
22
|
|
23
|
-
RecordedResult.new(data[
|
23
|
+
RecordedResult.new(data[:result][:fields], data[:result][:values])
|
24
24
|
else
|
25
25
|
yield.tap do |result|
|
26
26
|
result_data =
|
27
27
|
if result && (result.respond_to?(:fields) || result.respond_to?(:columns))
|
28
28
|
fields = result.respond_to?(:fields) ? result.fields : result.columns
|
29
29
|
values = result.respond_to?(:values) ? result.values : result.to_a
|
30
|
-
{
|
30
|
+
{ count: result.count, fields: fields, values: values }
|
31
31
|
end
|
32
32
|
Recording.push(sql: sql, name: name, binds: type_casted_binds, result: result_data)
|
33
33
|
end
|
@@ -8,6 +8,7 @@ module DatabaseRecorder
|
|
8
8
|
include Singleton
|
9
9
|
|
10
10
|
DEFAULT_DB_DRIVER = :active_record
|
11
|
+
DEFAULT_LOG_FORMAT = '[DB] %sql [%name]'
|
11
12
|
DEFAULT_STORAGE = DatabaseRecorder::Storage::File
|
12
13
|
|
13
14
|
DB_DRIVER_VALUES = %i[active_record mysql2 pg].freeze
|
@@ -17,18 +18,21 @@ module DatabaseRecorder
|
|
17
18
|
redis: DatabaseRecorder::Storage::Redis
|
18
19
|
}.freeze
|
19
20
|
|
20
|
-
attr_accessor :db_driver, :print_queries, :replay_recordings, :storage
|
21
|
+
attr_accessor :db_driver, :log_format, :print_queries, :replay_recordings, :storage, :storage_options
|
21
22
|
|
22
23
|
class << self
|
23
24
|
extend Forwardable
|
24
25
|
|
25
|
-
def_delegators :instance, :db_driver, :
|
26
|
+
def_delegators :instance, :db_driver, :log_format, :log_format=, :print_queries, :replay_recordings,
|
27
|
+
:replay_recordings=, :storage, :storage_options, :storage_options=
|
26
28
|
|
27
29
|
def load_defaults
|
28
30
|
instance.db_driver = DEFAULT_DB_DRIVER
|
31
|
+
instance.log_format = DEFAULT_LOG_FORMAT
|
29
32
|
instance.print_queries = false
|
30
33
|
instance.replay_recordings = false
|
31
34
|
instance.storage = DEFAULT_STORAGE
|
35
|
+
instance.storage_options = {}
|
32
36
|
end
|
33
37
|
|
34
38
|
def db_driver=(value)
|
@@ -7,8 +7,11 @@ module DatabaseRecorder
|
|
7
7
|
def log_query(sql, source = nil)
|
8
8
|
log =
|
9
9
|
case DatabaseRecorder::Config.print_queries
|
10
|
-
when true
|
11
|
-
|
10
|
+
when true
|
11
|
+
DatabaseRecorder::Config.log_format.sub('%name', source.to_s).sub('%sql', sql)
|
12
|
+
when :color
|
13
|
+
code_ray_sql = CodeRay.scan(sql, :sql).term
|
14
|
+
DatabaseRecorder::Config.log_format.sub('%name', source.to_s).sub('%sql', code_ray_sql || '')
|
12
15
|
end
|
13
16
|
|
14
17
|
puts log if log
|
@@ -22,5 +25,29 @@ module DatabaseRecorder
|
|
22
25
|
when :pg then PG::Recorder.setup
|
23
26
|
end
|
24
27
|
end
|
28
|
+
|
29
|
+
def string_keys_recursive(hash)
|
30
|
+
{}.tap do |h|
|
31
|
+
hash.each do |key, value|
|
32
|
+
h[key.to_s] = transform(value, :string_keys_recursive)
|
33
|
+
end
|
34
|
+
end
|
35
|
+
end
|
36
|
+
|
37
|
+
def symbolize_recursive(hash)
|
38
|
+
{}.tap do |h|
|
39
|
+
hash.each do |key, value|
|
40
|
+
h[key.to_sym] = transform(value, :symbolize_recursive)
|
41
|
+
end
|
42
|
+
end
|
43
|
+
end
|
44
|
+
|
45
|
+
def transform(value, source_method)
|
46
|
+
case value
|
47
|
+
when Hash then method(source_method).call(value)
|
48
|
+
when Array then value.map { |v| transform(v, source_method) }
|
49
|
+
else value
|
50
|
+
end
|
51
|
+
end
|
25
52
|
end
|
26
53
|
end
|
@@ -4,13 +4,13 @@ module DatabaseRecorder
|
|
4
4
|
module Mysql2
|
5
5
|
module ClientExt
|
6
6
|
def query(sql, options = {})
|
7
|
-
Recorder.
|
7
|
+
Recorder.store_query(self, sql: sql, source: :query) do
|
8
8
|
super
|
9
9
|
end
|
10
10
|
end
|
11
11
|
|
12
12
|
def prepare(*args)
|
13
|
-
Recorder.
|
13
|
+
Recorder.prepare_statement(self, sql: args[0], source: :prepare) do
|
14
14
|
super
|
15
15
|
end
|
16
16
|
end
|
@@ -17,10 +17,10 @@ module DatabaseRecorder
|
|
17
17
|
alias :size :count
|
18
18
|
|
19
19
|
def prepare(data)
|
20
|
-
@count = data[
|
21
|
-
@fields = data[
|
22
|
-
@entries = data[
|
23
|
-
# @values = data[
|
20
|
+
@count = data[:count]
|
21
|
+
@fields = data[:fields]
|
22
|
+
@entries = data[:values]
|
23
|
+
# @values = data[:values]
|
24
24
|
end
|
25
25
|
|
26
26
|
# def server_flags
|
@@ -12,29 +12,16 @@ module DatabaseRecorder
|
|
12
12
|
sql.match?(/information_schema.statistics/)
|
13
13
|
end
|
14
14
|
|
15
|
-
def
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
data = Recording.cached_query_for(sql)
|
22
|
-
return yield unless data # cache miss
|
15
|
+
def format_result(result)
|
16
|
+
{ count: result.count, fields: result.fields, values: result.to_a } if result.is_a?(::Mysql2::Result)
|
17
|
+
# else
|
18
|
+
# last_insert_id = adapter.query('SELECT LAST_INSERT_ID() AS _dbr_last_insert_id').to_a
|
19
|
+
# { 'count' => last_insert_id.count, 'fields' => ['id'], 'values' => last_insert_id }
|
20
|
+
end
|
23
21
|
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
result_data =
|
28
|
-
if result.is_a? ::Mysql2::Result
|
29
|
-
{ 'count' => result.count, 'fields' => result.fields, 'values' => result.to_a }
|
30
|
-
# else
|
31
|
-
# last_insert_id = adapter.query('SELECT LAST_INSERT_ID() AS _dbr_last_insert_id').to_a
|
32
|
-
# { 'count' => last_insert_id.count, 'fields' => ['id'], 'values' => last_insert_id }
|
33
|
-
end
|
34
|
-
|
35
|
-
Recording.push(sql: sql, result: result_data)
|
36
|
-
end
|
37
|
-
end
|
22
|
+
def prepare_statement(adapter, sql: nil, name: nil, binds: nil, source: nil)
|
23
|
+
@last_prepared = Recording.push_prepared(name: name, sql: sql, binds: binds, source: source)
|
24
|
+
yield if !Config.replay_recordings || Recording.cache.nil?
|
38
25
|
end
|
39
26
|
|
40
27
|
def setup
|
@@ -47,9 +34,38 @@ module DatabaseRecorder
|
|
47
34
|
end
|
48
35
|
end
|
49
36
|
|
50
|
-
def
|
51
|
-
|
52
|
-
|
37
|
+
def store_prepared_statement(adapter, source:, binds:)
|
38
|
+
# sql = @last_prepared&.send(:[], 'sql')
|
39
|
+
sql = @last_prepared[:sql]
|
40
|
+
Core.log_query(sql, source)
|
41
|
+
if Config.replay_recordings && !Recording.cache.nil?
|
42
|
+
data = Recording.cache.find { |query| query[:sql] == sql }
|
43
|
+
return yield unless data # cache miss
|
44
|
+
|
45
|
+
Recording.push(sql: data[:sql], binds: data[:binds], source: source)
|
46
|
+
RecordedResult.new(data[:result].slice(:count, :fields, :values))
|
47
|
+
else
|
48
|
+
yield.tap do |result|
|
49
|
+
Recording.update_prepared(sql: sql, binds: binds, result: format_result(result), source: source)
|
50
|
+
end
|
51
|
+
end
|
52
|
+
end
|
53
|
+
|
54
|
+
def store_query(adapter, sql:, source:)
|
55
|
+
return yield if ignore_query?(sql)
|
56
|
+
|
57
|
+
Core.log_query(sql, source)
|
58
|
+
if Config.replay_recordings && !Recording.cache.nil?
|
59
|
+
Recording.push(sql: sql, source: source)
|
60
|
+
data = Recording.cached_query_for(sql)
|
61
|
+
return yield unless data # cache miss
|
62
|
+
|
63
|
+
RecordedResult.new.prepare(data[:result].slice(:count, :fields, :values)) if data[:result]
|
64
|
+
else
|
65
|
+
yield.tap do |result|
|
66
|
+
Recording.push(sql: sql, result: format_result(result), source: source)
|
67
|
+
end
|
68
|
+
end
|
53
69
|
end
|
54
70
|
end
|
55
71
|
end
|
@@ -4,47 +4,43 @@ module DatabaseRecorder
|
|
4
4
|
module PG
|
5
5
|
module ConnectionExt
|
6
6
|
def async_exec(sql)
|
7
|
-
Recorder.
|
7
|
+
Recorder.store_query(sql: sql, source: :async_exec) do
|
8
8
|
super
|
9
9
|
end
|
10
10
|
end
|
11
11
|
|
12
12
|
def sync_exec(sql)
|
13
|
-
Recorder.
|
13
|
+
Recorder.store_query(sql: sql, source: :sync_exec) do
|
14
14
|
super
|
15
15
|
end
|
16
16
|
end
|
17
17
|
|
18
18
|
def exec(*args)
|
19
|
-
Recorder.
|
19
|
+
Recorder.store_query(sql: args[0], source: :exec) do
|
20
20
|
super
|
21
21
|
end
|
22
22
|
end
|
23
23
|
|
24
|
-
def
|
25
|
-
Recorder.
|
24
|
+
def exec_params(*args)
|
25
|
+
Recorder.store_query(sql: args[0], binds: args[1], source: :exec_params) do
|
26
26
|
super
|
27
27
|
end
|
28
28
|
end
|
29
29
|
|
30
|
-
def
|
31
|
-
Recorder.
|
30
|
+
def exec_prepared(*args)
|
31
|
+
Recorder.store_prepared_statement(name: args[0], binds: args[1], source: :exec_prepared) do
|
32
32
|
super
|
33
33
|
end
|
34
34
|
end
|
35
35
|
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
# def sync_exec_params(*args)
|
42
|
-
# puts ">>> #{args[0]}"
|
43
|
-
# super
|
44
|
-
# end
|
36
|
+
def prepare(*args)
|
37
|
+
Recorder.prepare_statement(name: args[0], sql: args[1], source: :prepare) do
|
38
|
+
super
|
39
|
+
end
|
40
|
+
end
|
45
41
|
|
46
|
-
def
|
47
|
-
Recorder.
|
42
|
+
def query(*args)
|
43
|
+
Recorder.store_query(sql: args[0], source: :query) do
|
48
44
|
super
|
49
45
|
end
|
50
46
|
end
|
@@ -11,9 +11,9 @@ module DatabaseRecorder
|
|
11
11
|
alias :rows :values
|
12
12
|
|
13
13
|
def initialize(data)
|
14
|
-
@count = data[
|
15
|
-
@fields = data[
|
16
|
-
@values = data[
|
14
|
+
@count = data[:count]
|
15
|
+
@fields = data[:fields]
|
16
|
+
@values = data[:values]
|
17
17
|
end
|
18
18
|
|
19
19
|
def clear; end
|
@@ -11,29 +11,56 @@ module DatabaseRecorder
|
|
11
11
|
sql.match?(/ pg_attribute |SHOW max_identifier_length|SHOW search_path/)
|
12
12
|
end
|
13
13
|
|
14
|
-
def
|
14
|
+
def format_result(result)
|
15
|
+
{ count: result.count, fields: result.fields, values: result.values } if result
|
16
|
+
end
|
17
|
+
|
18
|
+
def prepare_statement(sql: nil, name: nil, binds: nil, source: nil)
|
19
|
+
Recording.push_prepared(name: name, sql: sql, binds: binds, source: source)
|
20
|
+
yield if !Config.replay_recordings || Recording.cache.nil?
|
21
|
+
end
|
22
|
+
|
23
|
+
def setup
|
24
|
+
::PG::Connection.class_eval do
|
25
|
+
prepend ConnectionExt
|
26
|
+
end
|
27
|
+
end
|
28
|
+
|
29
|
+
def store_prepared_statement(name: nil, sql: nil, binds: nil, source: nil)
|
30
|
+
if Config.replay_recordings && !Recording.cache.nil?
|
31
|
+
data = Recording.cache.find { |query| query[:name] == name }
|
32
|
+
return yield unless data # cache miss
|
33
|
+
|
34
|
+
Core.log_query(data[:sql], source)
|
35
|
+
Recording.push(sql: data[:sql], binds: data[:binds], source: source)
|
36
|
+
RecordedResult.new(data[:result].slice(:count, :fields, :values))
|
37
|
+
else
|
38
|
+
Core.log_query(sql, source)
|
39
|
+
yield.tap do |query_result|
|
40
|
+
result = format_result(query_result)
|
41
|
+
query = Recording.update_prepared(name: name, sql: sql, binds: binds, result: result, source: source)
|
42
|
+
Core.log_query(query[:sql], source)
|
43
|
+
end
|
44
|
+
end
|
45
|
+
end
|
46
|
+
|
47
|
+
def store_query(name: nil, sql: nil, binds: nil, source: nil)
|
15
48
|
return yield if ignore_query?(sql)
|
16
49
|
|
17
50
|
Core.log_query(sql, source)
|
51
|
+
@prepared_statement = nil
|
18
52
|
if Config.replay_recordings && !Recording.cache.nil?
|
19
|
-
Recording.push(sql: sql, binds: binds)
|
53
|
+
Recording.push(sql: sql, binds: binds, source: source)
|
20
54
|
data = Recording.cached_query_for(sql)
|
21
55
|
return yield unless data # cache miss
|
22
56
|
|
23
|
-
RecordedResult.new(data[
|
57
|
+
RecordedResult.new(data[:result].slice(:count, :fields, :values))
|
24
58
|
else
|
25
59
|
yield.tap do |result|
|
26
|
-
|
27
|
-
Recording.push(sql: sql, binds: binds, result: result_data)
|
60
|
+
Recording.push(name: name, sql: sql, binds: binds, result: format_result(result), source: source)
|
28
61
|
end
|
29
62
|
end
|
30
63
|
end
|
31
|
-
|
32
|
-
def setup
|
33
|
-
::PG::Connection.class_eval do
|
34
|
-
prepend ConnectionExt
|
35
|
-
end
|
36
|
-
end
|
37
64
|
end
|
38
65
|
end
|
39
66
|
end
|
@@ -4,23 +4,25 @@ require 'forwardable'
|
|
4
4
|
|
5
5
|
module DatabaseRecorder
|
6
6
|
class Recording
|
7
|
-
attr_accessor :cache, :entities
|
8
|
-
attr_reader :from_cache, :options, :queries, :started
|
7
|
+
attr_accessor :cache, :entities, :metadata
|
8
|
+
attr_reader :from_cache, :options, :prepared_queries, :queries, :started
|
9
9
|
|
10
10
|
def initialize(options: {})
|
11
11
|
(@@instances ||= {})[Process.pid] = self
|
12
12
|
@cache = nil
|
13
13
|
@entities = []
|
14
|
+
@metadata = {}
|
14
15
|
@options = options
|
15
16
|
@queries = []
|
16
17
|
@search_index = 0
|
18
|
+
@@prepared_queries ||= {}
|
17
19
|
end
|
18
20
|
|
19
21
|
def cached_query_for(sql)
|
20
22
|
current = @search_index
|
21
23
|
match = cache[@search_index..].find do |item|
|
22
24
|
current += 1
|
23
|
-
item[
|
25
|
+
item[:sql] == sql
|
24
26
|
end
|
25
27
|
return unless match
|
26
28
|
|
@@ -31,39 +33,49 @@ module DatabaseRecorder
|
|
31
33
|
end
|
32
34
|
|
33
35
|
def new_entity(model:, id:)
|
34
|
-
@entities.push(
|
36
|
+
@entities.push(model: model, id: id)
|
35
37
|
end
|
36
38
|
|
37
39
|
def pull_entity
|
38
40
|
@entities.shift
|
39
41
|
end
|
40
42
|
|
41
|
-
def push(sql:, binds: nil, result: nil,
|
42
|
-
query = {
|
43
|
+
def push(sql:, name: nil, binds: nil, result: nil, source: nil)
|
44
|
+
query = { name: name, sql: sql, binds: binds, result: result }.compact
|
43
45
|
@queries.push(query)
|
44
46
|
end
|
45
47
|
|
48
|
+
def push_prepared(name: nil, sql: nil, binds: nil, result: nil, source: nil)
|
49
|
+
query = { name: name, sql: sql, binds: binds, result: result }.compact
|
50
|
+
@@prepared_queries[name || sql] = query
|
51
|
+
end
|
52
|
+
|
46
53
|
def start
|
47
54
|
@started = true
|
48
|
-
storage = Config.storage&.new(self, name: options[:name])
|
55
|
+
storage = Config.storage&.new(self, name: options[:name], options: Config.storage_options)
|
49
56
|
@from_cache = storage&.load
|
50
57
|
yield
|
51
58
|
storage&.save unless from_cache
|
52
59
|
@started = false
|
53
|
-
result = { current_queries: queries.map {
|
54
|
-
result[:stored_queries] = cache.map {
|
60
|
+
result = { current_queries: queries.map { |query| query[:sql] } }
|
61
|
+
result[:stored_queries] = cache.map { |query| query[:sql] } if from_cache
|
55
62
|
result
|
56
63
|
end
|
57
64
|
|
58
|
-
def
|
59
|
-
|
65
|
+
def update_prepared(name: nil, sql: nil, binds: nil, result: nil, source: nil)
|
66
|
+
query = @@prepared_queries[name || sql]
|
67
|
+
query[:sql] = sql if sql
|
68
|
+
query[:binds] = binds if binds
|
69
|
+
query[:result] = result if result
|
70
|
+
@queries.push(query)
|
71
|
+
query
|
60
72
|
end
|
61
73
|
|
62
74
|
class << self
|
63
75
|
extend Forwardable
|
64
76
|
|
65
|
-
def_delegators :current_instance, :cache, :cached_query_for, :from_cache, :new_entity, :
|
66
|
-
:queries, :
|
77
|
+
def_delegators :current_instance, :cache, :cached_query_for, :from_cache, :new_entity, :prepared_queries,
|
78
|
+
:pull_entity, :push, :push_prepared, :queries, :update_prepared
|
67
79
|
|
68
80
|
def current_instance
|
69
81
|
(@@instances ||= {})[Process.pid]
|
@@ -13,9 +13,10 @@ module DatabaseRecorder
|
|
13
13
|
config.around(:each, :dbr) do |example|
|
14
14
|
ref = (example.metadata[:scoped_id] || '').split(':')[-1]
|
15
15
|
options = {}
|
16
|
+
options[:name] = "#{example.full_description}__#{ref}"
|
16
17
|
options.merge!(example.metadata[:dbr]) if example.metadata[:dbr].is_a?(Hash)
|
17
|
-
options.merge!(example: example, name: "#{example.full_description}__#{ref}")
|
18
18
|
Recording.new(options: options).tap do |recording|
|
19
|
+
recording.metadata = { example: example.id, started_at: Time.now }
|
19
20
|
result = recording.start { example.run }
|
20
21
|
if options[:verify_queries] && result[:stored_queries]
|
21
22
|
expect(result[:stored_queries]).to match_array(result[:current_queries])
|
@@ -1,14 +1,18 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
|
+
require 'fileutils'
|
4
|
+
require 'yaml'
|
5
|
+
|
3
6
|
module DatabaseRecorder
|
4
7
|
module Storage
|
5
8
|
class File < Base
|
6
9
|
def load
|
7
|
-
stored_data = ::File.exist?(
|
10
|
+
stored_data = ::File.exist?(storage_path) ? ::File.read(storage_path) : false
|
8
11
|
if stored_data
|
9
|
-
|
10
|
-
|
11
|
-
@recording.
|
12
|
+
parsed_data = YAML.load(stored_data) # rubocop:disable Security/YAMLLoad
|
13
|
+
data = Core.symbolize_recursive(parsed_data)
|
14
|
+
@recording.cache = data[:queries] || []
|
15
|
+
@recording.entities = data[:entities]
|
12
16
|
true
|
13
17
|
else
|
14
18
|
false
|
@@ -16,10 +20,22 @@ module DatabaseRecorder
|
|
16
20
|
end
|
17
21
|
|
18
22
|
def save
|
19
|
-
data = {
|
20
|
-
data[
|
21
|
-
|
22
|
-
|
23
|
+
data = {}
|
24
|
+
data[:metadata] = @recording.metadata unless @recording.metadata.empty?
|
25
|
+
data[:queries] = @recording.queries if @recording.queries.any?
|
26
|
+
data[:entities] = @recording.entities if @recording.entities.any?
|
27
|
+
serialized_data = ::YAML.dump(Core.string_keys_recursive(data))
|
28
|
+
::File.write(storage_path, serialized_data)
|
29
|
+
true
|
30
|
+
end
|
31
|
+
|
32
|
+
def storage_path
|
33
|
+
@storage_path ||= begin
|
34
|
+
name = normalize_name(@name)
|
35
|
+
path = @options[:recordings_path] || 'spec/dbr'
|
36
|
+
::FileUtils.mkdir_p(path)
|
37
|
+
"#{path}/#{name}.yml"
|
38
|
+
end
|
23
39
|
end
|
24
40
|
|
25
41
|
private
|
@@ -27,13 +43,6 @@ module DatabaseRecorder
|
|
27
43
|
def normalize_name(string)
|
28
44
|
string.gsub(%r{[:/]}, '-').gsub(/[^\w-]/, '_')
|
29
45
|
end
|
30
|
-
|
31
|
-
def record_file
|
32
|
-
name = normalize_name(@name)
|
33
|
-
path = 'spec/dbr'
|
34
|
-
FileUtils.mkdir_p(path)
|
35
|
-
"#{path}/#{name}.yml"
|
36
|
-
end
|
37
46
|
end
|
38
47
|
end
|
39
48
|
end
|
@@ -1,14 +1,21 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
|
+
require 'json'
|
4
|
+
|
3
5
|
module DatabaseRecorder
|
4
6
|
module Storage
|
5
7
|
class Redis < Base
|
8
|
+
def connection
|
9
|
+
@connection ||= @options[:connection] || ::Redis.new
|
10
|
+
end
|
11
|
+
|
6
12
|
def load
|
7
|
-
stored_data =
|
13
|
+
stored_data = connection.get(@name)
|
8
14
|
if stored_data
|
9
|
-
|
10
|
-
|
11
|
-
@recording.
|
15
|
+
parsed_data = JSON.parse(stored_data)
|
16
|
+
data = Core.symbolize_recursive(parsed_data)
|
17
|
+
@recording.cache = data[:queries] || []
|
18
|
+
@recording.entities = data[:entities]
|
12
19
|
true
|
13
20
|
else
|
14
21
|
false
|
@@ -16,16 +23,13 @@ module DatabaseRecorder
|
|
16
23
|
end
|
17
24
|
|
18
25
|
def save
|
19
|
-
data = {
|
20
|
-
data[
|
26
|
+
data = {}
|
27
|
+
data[:metadata] = @recording.metadata unless @recording.metadata.empty?
|
28
|
+
data[:queries] = @recording.queries if @recording.queries.any?
|
29
|
+
data[:entities] = @recording.entities if @recording.entities.any?
|
21
30
|
serialized_data = data.to_json
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
class << self
|
26
|
-
def connection
|
27
|
-
@connection ||= ::Redis.new
|
28
|
-
end
|
31
|
+
connection.set(@name, serialized_data)
|
32
|
+
true
|
29
33
|
end
|
30
34
|
end
|
31
35
|
end
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: database_recorder
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.
|
4
|
+
version: 0.2.2
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Mattia Roccoberton
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2022-04-
|
11
|
+
date: 2022-04-18 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: coderay
|