squcumber-postgres 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml ADDED
@@ -0,0 +1,7 @@
1
+ ---
2
+ SHA1:
3
+ metadata.gz: 0db456e704910175875824a14312d2c5668569d8
4
+ data.tar.gz: 9e198d71fd8649646683b2df6be14a88e139a662
5
+ SHA512:
6
+ metadata.gz: 8f9db0cd15453cd1fac71a582fd821b56f1cf51fbc22d88e1446fdf1e7673383e2617a9c769e4db6f4877b092ea165fd6e46e0c2865538711dac00ed1e0f97a1
7
+ data.tar.gz: 4786343ae7714bfc33bc1779008a1aaacd85dd3569f5a09c9e673e076f5c96bfd532e776fed95da0c18034c90037229e864970d55e0122697eceecb5f57486ef
data/Rakefile ADDED
@@ -0,0 +1,2 @@
1
+ load 'lib/squcumber-postgres/tasks/test.rake'
2
+ project_name = 'squcumber-postgres'
@@ -0,0 +1 @@
1
+ Dir[File.dirname(__FILE__) + '/squcumber-postgres/*.rb'].each { |file| require file }
@@ -0,0 +1,129 @@
1
+ require 'pg'
2
+
3
+ module Squcumber
4
+ module Postgres
5
+ module Mock
6
+ class Database
7
+ DELETE_DB_WHEN_FINISHED = ENV['KEEP_TEST_DB'].to_i == 1 ? false : true
8
+ TEST_DB_NAME_OVERRIDE = ENV.fetch('TEST_DB_NAME_OVERRIDE', '')
9
+
10
+ def initialize(production_database)
11
+ @production_database = production_database or raise ArgumentError, 'No production database provided'
12
+
13
+ test_db_name_postfix = TEST_DB_NAME_OVERRIDE.empty? ? rand(10000..99999) : TEST_DB_NAME_OVERRIDE
14
+ @test_db_name = "test_env_#{test_db_name_postfix}"
15
+
16
+ if @production_database.exec("select datname from pg_database where datname like '%#{@test_db_name}%'").num_tuples != 0
17
+ @production_database.exec("drop database #{@test_db_name}")
18
+ end
19
+ @production_database.exec("create database #{@test_db_name}")
20
+
21
+ @testing_database = PG.connect(
22
+ host: ENV['DB_HOST'],
23
+ port: ENV['DB_PORT'],
24
+ dbname: @test_db_name,
25
+ user: ENV['DB_USER'],
26
+ password: ENV['DB_PASSWORD']
27
+ )
28
+ end
29
+
30
+ def setup(schemas)
31
+ schemas.each do |schema|
32
+ exec("drop schema if exists #{schema} cascade")
33
+ exec("create schema #{schema}")
34
+ end
35
+ end
36
+
37
+ def truncate_all_tables
38
+ @testing_database
39
+ .exec("select schemaname || '.' || tablename as schema_and_table from pg_tables where tableowner = '#{ENV['DB_USER']}'")
40
+ .map { |row| row['schema_and_table'] }
41
+ .each { |schema_and_table| exec("truncate table #{schema_and_table}") }
42
+ end
43
+
44
+ def exec(statement)
45
+ @testing_database.exec(statement)
46
+ end
47
+ alias_method :query, :exec
48
+
49
+ def exec_file(path)
50
+ exec(File.read("#{path}"))
51
+ end
52
+ alias_method :query_file, :exec_file
53
+
54
+ # Redshift does not allow to copy a table schema to another database, i.e.
55
+ # `create table some_db.some_table (like another_db.some_table)` cannot be used.
56
+ def copy_table_def_from_prod(schema, table)
57
+ create_table_statement = _get_create_table_statement(schema, table)
58
+ exec(create_table_statement)
59
+ end
60
+
61
+ def copy_table_defs_from_prod(tables)
62
+ tables.each do |obj|
63
+ obj.each { |schema, table| copy_table_def_from_prod(schema, table) }
64
+ end
65
+ end
66
+
67
+ def mock(mock)
68
+ mock.each do |schema_and_table, data|
69
+ raise "Mock data for #{schema_and_table} is not correctly formatted: must be Array but was #{data.class}" unless data.is_a?(Array)
70
+ data.each { |datum| insert_mock_values(schema_and_table, datum) }
71
+ end
72
+ end
73
+
74
+ def insert_mock_values(schema_and_table, mock)
75
+ schema, table = schema_and_table.split('.')
76
+ keys = []
77
+ vals = []
78
+ mock.each do |key, value|
79
+ unless value.nil?
80
+ keys << key
81
+ vals << (value.is_a?(String) ? "'#{value}'" : value)
82
+ end
83
+ end
84
+ exec("insert into #{schema}.#{table} (#{keys.join(',')}) values (#{vals.join(',')})") unless vals.empty?
85
+ end
86
+
87
+ def destroy
88
+ @testing_database.close()
89
+
90
+ if DELETE_DB_WHEN_FINISHED
91
+ attempts = 0
92
+ begin
93
+ attempts += 1
94
+ @production_database.exec("drop database #{@test_db_name}")
95
+ rescue PG::ObjectInUse
96
+ sleep 5
97
+ retry unless attempts >= 3
98
+ end
99
+ else
100
+ puts "\nTest database has been kept alive: #{@test_db_name}"
101
+ end
102
+
103
+ @production_database.close()
104
+ end
105
+
106
+ private
107
+
108
+ def _get_create_table_statement(schema, table)
109
+ @production_database.exec("set search_path to '$user', #{schema};")
110
+ table_schema = @production_database.query("select * from information_schema.columns where table_schema = '#{schema}' and table_name = '#{table}';")
111
+ raise "Sorry, there is no table information for #{schema}.#{table}" if table_schema.num_tuples == 0
112
+
113
+ definitions = _get_column_definitions(table_schema).join(',')
114
+
115
+ "create table if not exists #{schema}.#{table} (#{definitions});"
116
+ end
117
+
118
+ def _get_column_definitions(table_definition)
119
+ table_definition.map do |definition|
120
+ if definition['data_type'].eql?('character')
121
+ definition['data_type'] = "#{definition['data_type']}(#{definition['character_maximum_length'].to_s})"
122
+ end
123
+ "#{definition['column_name']} #{definition['data_type']} default null"
124
+ end
125
+ end
126
+ end
127
+ end
128
+ end
129
+ end
@@ -0,0 +1,51 @@
1
+ require 'cucumber'
2
+ require 'cucumber/rake/task'
3
+ require 'rake'
4
+
5
+ module Squcumber
6
+ module Postgres
7
+ module Rake
8
+ class Task
9
+ include ::Rake::DSL if defined? ::Rake::DSL
10
+
11
+ def install_tasks
12
+ namespace :test do
13
+ # Auto-generate Rake tasks for each feature and each of their parent directories
14
+ @features_dir = File.join(FileUtils.pwd, 'features')
15
+ features = Dir.glob("#{@features_dir}/**/*.feature")
16
+ parent_directories = features.map { |f| f.split('/')[0..-2].join('/') }.uniq
17
+
18
+ features.each do |feature|
19
+ feature_name = feature.gsub(File.join(FileUtils.pwd, 'features/'), '').gsub('.feature', '')
20
+ task_name = feature_name.gsub('/', ':')
21
+ desc "Run SQL tests for feature #{feature_name}"
22
+ task "sql:#{task_name}".to_sym, [:scenario_line_number] do |_, args|
23
+ cucumber_task_name = "cucumber_#{task_name}".to_sym
24
+ ::Cucumber::Rake::Task.new(cucumber_task_name) do |t|
25
+ line_number = args[:scenario_line_number].nil? ? '' : ":#{args[:scenario_line_number]}"
26
+ t.cucumber_opts = "#{feature}#{line_number} --format pretty --format html --out #{feature_name.gsub('/','_')}.html --require #{File.dirname(__FILE__)}/../support --require #{File.dirname(__FILE__)}/../step_definitions"
27
+ end
28
+ ::Rake::Task[cucumber_task_name].execute
29
+ end
30
+ end
31
+
32
+ parent_directories.each do |feature|
33
+ feature_name = feature.gsub(File.join(FileUtils.pwd, 'features/'), '').gsub('.feature', '')
34
+ task_name = feature_name.gsub('/', ':')
35
+ desc "Run SQL tests for all features in #{feature_name}"
36
+ task "sql:#{task_name}".to_sym do
37
+ cucumber_task_name = "cucumber_#{task_name}".to_sym
38
+ ::Cucumber::Rake::Task.new(cucumber_task_name) do |t|
39
+ t.cucumber_opts = "#{feature} --format pretty --format html --out #{feature_name.gsub('/','_')}.html --require #{File.dirname(__FILE__)}/../support --require #{File.dirname(__FILE__)}/../step_definitions"
40
+ end
41
+ ::Rake::Task[cucumber_task_name].execute
42
+ end
43
+ end
44
+ end
45
+ end
46
+ end
47
+ end
48
+ end
49
+ end
50
+
51
+ Squcumber::Postgres::Rake::Task.new.install_tasks
@@ -0,0 +1,159 @@
1
+ require 'json'
2
+
3
+ $feature_name ||= ''
4
+ $setup ||= false
5
+
6
+ Before do |scenario|
7
+ current_scenario_name = scenario.feature.name rescue nil
8
+ if current_scenario_name != $feature_name
9
+ $feature_name = current_scenario_name
10
+ $setup = false
11
+ end
12
+ end
13
+
14
+ # Takes a path and then sequentially adds what's provided in `data`
15
+ # to be later executed in the step `the given SQL files are executed`
16
+ # +path+:: relative to root of project, e.g. "jobs/kpi/sales"
17
+ Given(/^the SQL files in the path "?([^"]*)"?:$/) do |path, data|
18
+ @sql_file_path = path
19
+ @sql_files_to_execute = data.hashes.map { |e| "#{@sql_file_path}/#{e['file']}" }
20
+ end
21
+
22
+ Given(/^the SQL file path "?([^\s"]+)"?$/) do |path|
23
+ @sql_file_path = path
24
+ end
25
+
26
+ Given(/^Pending: (.*)/) { |reason| pending(reason) }
27
+
28
+ Given(/^their schema dependencies:$/) do |data|
29
+ unless $setup
30
+ schemas = data.hashes.map { |hash| hash['schema'] }.compact.uniq
31
+ TESTING_DATABASE.setup(schemas)
32
+ end
33
+ end
34
+
35
+ Given(/^their table dependencies:$/) do |data|
36
+ if $setup
37
+ silence_streams(STDERR) do
38
+ TESTING_DATABASE.truncate_all_tables()
39
+ end
40
+ else
41
+ tables = []
42
+ schemas = []
43
+ data.hashes.each do |hash|
44
+ schema, table = hash['table'].split('.')
45
+ schemas << schema
46
+ tables << { schema => table }
47
+ end
48
+ silence_streams(STDERR) do
49
+ TESTING_DATABASE.setup(schemas.compact.uniq)
50
+ TESTING_DATABASE.copy_table_defs_from_prod(tables)
51
+ end
52
+ $setup = true
53
+ end
54
+ end
55
+
56
+ Given(/^the following defaults for "?([^\s"]+)"? \(if not stated otherwise\):$/) do |table, data|
57
+ @defaults ||= {}
58
+ @defaults[table] = data.hashes[0]
59
+ end
60
+
61
+ Given(/a clean environment/) do
62
+ silence_streams(STDERR) do
63
+ TESTING_DATABASE.truncate_all_tables()
64
+ end
65
+ end
66
+
67
+ Given(/^the existing table "?([a-zA-Z0-9_]+)\.([a-zA-Z0-9_]+)"?( with date placeholders)?:$/) do |schema, table, placeholder, data|
68
+ mock_data = data.hashes
69
+ @defaults ||= {}
70
+ defaults = @defaults["#{schema}.#{table}"]
71
+
72
+ unless defaults.nil? or defaults.empty?
73
+ mock_data.map! { |entry| defaults.merge(entry) }
74
+ end
75
+
76
+ mock_data = convert_mock_values(mock_data) if placeholder
77
+
78
+ TESTING_DATABASE.mock(
79
+ Hash["#{schema}.#{table}", mock_data]
80
+ )
81
+ end
82
+
83
+ When(/^the given SQL files are executed$/) do
84
+ silence_streams(STDERR) do
85
+ @sql_files_to_execute.each { |file| TESTING_DATABASE.exec_file(file) }
86
+ end
87
+ end
88
+
89
+ When(/^the SQL file "?([^\s]+)"? is executed/) do |file|
90
+ silence_streams(STDERR) do
91
+ TESTING_DATABASE.exec_file("#{@sql_file_path}/#{file}")
92
+ end
93
+ end
94
+
95
+ When(/^the resulting table "?([^\s"]*)"? is queried(?:, ordered by "?([^"]*)"?)?/) do |table, sort_column|
96
+ sort_statement = (sort_column.nil? or sort_column.empty?) ? '' : "order by #{sort_column}"
97
+ @result = TESTING_DATABASE.query("select * from #{table} #{sort_statement};").map { |e| e }
98
+ end
99
+
100
+ Then(/^the result starts with.*$/) do |data|
101
+ actual = @result[0..(data.hashes.length - 1)] || []
102
+ expected = data.hashes || []
103
+
104
+ expected.each_with_index do |hash, i|
105
+ raise("Does not start with expected result, got:\n#{format_error(data, actual)}") unless actual[i].all? do |key, value|
106
+ values_match(value, hash[key]) # actual,expected
107
+ end
108
+ end
109
+ end
110
+
111
+ Then(/^the result includes.*$/) do |data|
112
+ actual = @result || []
113
+ expected = data.hashes || []
114
+
115
+ expected.each do |hash|
116
+ raise("Result is not included, got:\n#{format_error(data, actual)}") unless actual.any? do |row|
117
+ row.all? do |key, value|
118
+ values_match(value, hash[key]) # actual,expected
119
+ end
120
+ end
121
+ end
122
+ end
123
+
124
+ Then(/^the result does not include.*$/) do |data|
125
+ actual = @result || []
126
+ expected = data.hashes || []
127
+
128
+ expected.each do |hash|
129
+ raise("Result is included, got:\n#{format_error(data, actual)}") if actual.any? do |row|
130
+ row.all? do |key, value|
131
+ values_match(value, hash[key]) # actual,expected
132
+ end
133
+ end
134
+ end
135
+ end
136
+
137
+ Then(/^the result exactly matches.*$/) do |data|
138
+ actual = @result || []
139
+ expected = data.hashes || []
140
+
141
+ raise("Does not match exactly, got:\n#{format_error(data, actual)}") if actual.length != expected.length
142
+
143
+ actual.each_with_index do |row, i|
144
+ raise("Does not match exactly, got:\n#{format_error(data, actual)}") unless (expected[i] || {}).all? do |key, value|
145
+ values_match(row[key], value) # actual,expected
146
+ end
147
+ end
148
+
149
+ expected.each_with_index do |hash, i|
150
+ raise("Does not match exactly, got:\n#{format_error(data, actual)}") unless (actual[i] || {}).all? do |key, value|
151
+ values_match(value, hash[key]) # actual,expected
152
+ end
153
+ end
154
+ end
155
+
156
+ Then(/^the result is empty.*$/) do
157
+ actual = @result || []
158
+ raise("Result is not empty, got:\n#{format_error({}, actual)}") unless actual.length == 0
159
+ end
@@ -0,0 +1,17 @@
1
+ require_relative '../mock/database'
2
+
3
+ print 'Connect to production database...'
4
+ production_database = PG.connect(
5
+ host: ENV['DB_HOST'],
6
+ port: ENV['DB_PORT'],
7
+ dbname: ENV['DB_NAME'],
8
+ user: ENV['DB_USER'],
9
+ password: ENV['DB_PASSWORD']
10
+ )
11
+ puts 'DONE.'
12
+
13
+ TESTING_DATABASE ||= Squcumber::Postgres::Mock::Database.new(production_database)
14
+
15
+ at_exit do
16
+ TESTING_DATABASE.destroy rescue nil
17
+ end
@@ -0,0 +1,45 @@
1
+ module MatcherHelpers
2
+ def values_match(actual, expected)
3
+ if expected.eql?('today')
4
+ actual.match(/#{Regexp.quote(Date.today.to_s)}/)
5
+ elsif expected.eql?('yesterday')
6
+ actual.match(/#{Regexp.quote((Date.today - 1).to_s)}/)
7
+ elsif expected.eql?('any_date')
8
+ actual.match(/^\d{4}\-\d{2}\-\d{2} \d{2}:\d{2}:\d{2}$/)
9
+ elsif expected.eql?('any_string')
10
+ true if actual.is_a?(String) or actual.nil?
11
+ elsif expected.eql?('false') or expected.eql?('true')
12
+ true if actual.eql?(expected[0])
13
+ elsif !expected.nil?
14
+ actual ||= ''
15
+ actual.eql?(expected)
16
+ else # we have not mocked this, so ignore it
17
+ true
18
+ end
19
+ end
20
+
21
+ def timetravel(date, i, method); i > 0 ? timetravel(date.send(method.to_sym), i - 1, method) : date; end
22
+
23
+ def convert_mock_values(mock_data)
24
+ mock_data.map do |entry|
25
+ entry.each do |key, value|
26
+ entry[key] = case value
27
+ when /today/
28
+ Date.today.to_s
29
+ when /yesterday/
30
+ Date.today.prev_day.to_s
31
+ when /\s*\d+\s+month(s)?\s+ago\s*/
32
+ number_of_months = value.match(/\d+/)[0].to_i
33
+ timetravel(Date.today, number_of_months, :prev_month).to_s
34
+ when /\s*\d+\s+day(s)?\s+ago\s*/
35
+ number_of_days = value.match(/\d+/)[0].to_i
36
+ timetravel(Date.today, number_of_days, :prev_day).to_s
37
+ else
38
+ value
39
+ end
40
+ end
41
+ end
42
+ end
43
+ end
44
+
45
+ World(MatcherHelpers)
@@ -0,0 +1,43 @@
1
+ module OutputHelpers
2
+ def silence_streams(*streams)
3
+ unless ENV['SHOW_STDOUT'].to_i == 1
4
+ begin
5
+ on_hold = streams.collect { |stream| stream.dup }
6
+ streams.each do |stream|
7
+ stream.reopen('/dev/null')
8
+ stream.sync = true
9
+ end
10
+ yield
11
+ ensure
12
+ streams.each_with_index do |stream, i|
13
+ stream.reopen(on_hold[i])
14
+ end
15
+ end
16
+ end
17
+ end
18
+
19
+ def format_error(expected_data, actual_result)
20
+ expectation_count = (expected_data.rows.count rescue nil) || 0
21
+ if expectation_count == 0
22
+ table_headings = actual_result[0].keys
23
+ else
24
+ table_headings = expected_data.hashes[0].keys
25
+ end
26
+ print_data = Hash[table_headings.map { |key| [key, key.length] }]
27
+
28
+ actual_result.each do |row|
29
+ row.each do |key, value|
30
+ print_data[key] = value.length if (value.to_s.length > print_data[key].to_i)
31
+ end
32
+ end
33
+
34
+ error = '| ' + table_headings.map { |k| k.ljust(print_data[k], ' ') }.join(' | ') + " |\n"
35
+ error << actual_result.map do |row|
36
+ '| ' + table_headings.map { |k| (row[k] || '').ljust(print_data[k], ' ') }.join(' | ') + ' |'
37
+ end.join("\n") + "\n"
38
+
39
+ error
40
+ end
41
+ end
42
+
43
+ World(OutputHelpers)
@@ -0,0 +1,28 @@
1
+ # encoding: utf-8
2
+
3
+ require 'rspec/collection_matchers'
4
+ require 'codeclimate-test-reporter'
5
+ CodeClimate::TestReporter.start
6
+
7
+ RSpec.configure do |config|
8
+ config.color = true
9
+
10
+ # Redirect stderr and stdout to get rid of info messages during execution
11
+ # of specs.
12
+ # Via http://stackoverflow.com/questions/15430551/suppress-console-output-during-rspec-tests
13
+ unless ENV['SPEC_SHOW_STDOUT'] == '1'
14
+ original_stderr = $stderr
15
+ original_stdout = $stdout
16
+ config.before(:all) do
17
+ $stderr = File.new('/dev/null', 'w')
18
+ $stdout = File.new('/dev/null', 'w')
19
+ end
20
+ config.after(:all) do
21
+ $stderr = original_stderr
22
+ $stdout = original_stdout
23
+ end
24
+ end
25
+ end
26
+
27
+ # Via https://www.relishapp.com/rspec/rspec-core/docs/example-groups/shared-examples
28
+ Dir["./spec/support/**/*.rb"].sort.each { |f| require f }
@@ -0,0 +1,447 @@
1
+ require_relative '../../spec_helper'
2
+ require_relative '../../../lib/squcumber-postgres/mock/database'
3
+
4
+ module Squcumber::Postgres::Mock
5
+ describe Database do
6
+ let(:production_database) { double(PG::Connection) }
7
+ let(:testing_database) { double(PG::Connection) }
8
+
9
+ let(:empty_result) { double(PG::Result) }
10
+ let(:non_empty_result) { double(PG::Result) }
11
+
12
+ before(:each) do
13
+ allow(ENV).to receive(:[]).with('DB_HOST').and_return('some.db.host')
14
+ allow(ENV).to receive(:[]).with('DB_PORT').and_return(1234)
15
+ allow(ENV).to receive(:[]).with('DB_USER').and_return('some_user')
16
+ allow(ENV).to receive(:[]).with('DB_PASSWORD').and_return('s0m3_p4ssw0rd')
17
+ allow(ENV).to receive(:[]).with('DB_NAME').and_return('some_db')
18
+
19
+ allow(PG).to receive(:connect).and_return(testing_database)
20
+ allow(production_database).to receive(:exec).with(/^\s*create\s+database\s+/)
21
+ allow(production_database).to receive(:exec).with(/^\s*select\s+datname\s+from\s+pg_database\s+/).and_return(empty_result)
22
+ allow(production_database).to receive(:exec).with(/^\s*set\s+search_path\s+to\s+/)
23
+ allow(production_database).to receive(:exec).with(/^\s*drop\s+database\s+/)
24
+ allow(testing_database).to receive(:exec)
25
+
26
+ allow(empty_result).to receive(:num_tuples).and_return(0)
27
+ allow(non_empty_result).to receive(:num_tuples).and_return(1)
28
+ end
29
+
30
+ describe '#initialize' do
31
+ context 'when all arguments are provided' do
32
+ context 'and the database does not exist' do
33
+ it 'does not raise an error' do
34
+ expect { described_class.new(production_database) }.to_not raise_error
35
+ end
36
+
37
+ it 'generates a testing database name with expected pattern' do
38
+ dummy = described_class.new(production_database)
39
+ expect(dummy.instance_variable_get(:@test_db_name)).to match(/^test_env_\d{5}$/)
40
+ end
41
+
42
+ it 'does not try to drop the database' do
43
+ described_class.new(production_database)
44
+ expect(production_database).to_not have_received(:exec).with(/^drop\s+database\s+/)
45
+ end
46
+
47
+ it 'creates the testing database' do
48
+ dummy = described_class.new(production_database)
49
+ test_db_name = dummy.instance_variable_get(:@test_db_name)
50
+ expect(production_database).to have_received(:exec).with(/^\s*create\s+database\s+#{Regexp.quote(test_db_name)}\s*;?\s*$/)
51
+ end
52
+
53
+ it 'connects to testing database in correct order with correct attributes' do
54
+ dummy = described_class.new(production_database)
55
+
56
+ test_db_name = dummy.instance_variable_get(:@test_db_name)
57
+ expect(production_database).to have_received(:exec).with(/^\s*create\s+database\s+#{Regexp.quote(test_db_name)}\s*;?\s*$/).ordered
58
+
59
+ expect(PG).to have_received(:connect).with(
60
+ host: 'some.db.host',
61
+ port: 1234,
62
+ dbname: test_db_name,
63
+ user: 'some_user',
64
+ password: 's0m3_p4ssw0rd'
65
+ ).ordered
66
+ end
67
+ end
68
+
69
+ context 'and the database name is being overridden' do
70
+ let(:testing_db_name) { 'some_db_name' }
71
+
72
+ before(:each) do
73
+ stub_const("#{described_class}::TEST_DB_NAME_OVERRIDE", testing_db_name)
74
+ end
75
+
76
+ it 'does not raise an error' do
77
+ expect { described_class.new(production_database) }.to_not raise_error
78
+ end
79
+
80
+ it 'generates a testing database name with expected pattern' do
81
+ dummy = described_class.new(production_database)
82
+ expect(dummy.instance_variable_get(:@test_db_name)).to match(/^test_env_#{Regexp.quote(testing_db_name)}$/)
83
+ end
84
+
85
+ it 'does not try to drop the database' do
86
+ described_class.new(production_database)
87
+ expect(production_database).to_not have_received(:exec).with(/^drop\s+database\s+/)
88
+ end
89
+
90
+ it 'creates the testing database' do
91
+ described_class.new(production_database)
92
+ expect(production_database).to have_received(:exec).with(/^\s*create\s+database\s+test_env\_#{Regexp.quote(testing_db_name)}\s*;?\s*$/)
93
+ end
94
+
95
+ it 'connects to testing database in correct order with correct attributes' do
96
+ described_class.new(production_database)
97
+ expect(production_database).to have_received(:exec).with(/^\s*create\s+database\s+test_env_#{Regexp.quote(testing_db_name)}\s*;?\s*$/).ordered
98
+
99
+ expect(PG).to have_received(:connect).with(
100
+ host: 'some.db.host',
101
+ port: 1234,
102
+ dbname: 'test_env_' + testing_db_name,
103
+ user: 'some_user',
104
+ password: 's0m3_p4ssw0rd'
105
+ ).ordered
106
+ end
107
+ end
108
+
109
+ context 'and the database already exists' do
110
+ before(:each) do
111
+ allow(production_database).to receive(:exec).with(/^select\s+datname\s+from\s+pg_database\s+/).and_return(non_empty_result)
112
+ end
113
+
114
+ it 'does not raise an error' do
115
+ expect { described_class.new(production_database) }.to_not raise_error
116
+ end
117
+
118
+ it 'generates a testing database name with expected pattern' do
119
+ dummy = described_class.new(production_database)
120
+ expect(dummy.instance_variable_get(:@test_db_name)).to match(/^test_env_\d{5}$/)
121
+ end
122
+
123
+ it 'drops the existing testing database' do
124
+ described_class.new(production_database)
125
+ expect(production_database).to have_received(:exec).with(/^drop\s+database\s+/)
126
+ end
127
+
128
+ it 'creates the testing database' do
129
+ dummy = described_class.new(production_database)
130
+ test_db_name = dummy.instance_variable_get(:@test_db_name)
131
+ expect(production_database).to have_received(:exec).with(/^\s*create\s+database\s+#{Regexp.quote(test_db_name)}\s*;?\s*$/)
132
+ end
133
+
134
+ it 'connects to testing database in correct order with correct attributes' do
135
+ dummy = described_class.new(production_database)
136
+
137
+ test_db_name = dummy.instance_variable_get(:@test_db_name)
138
+ expect(production_database).to have_received(:exec).with(/^\s*create\s+database\s+#{Regexp.quote(test_db_name)}\s*;?\s*$/).ordered
139
+
140
+ expect(PG).to have_received(:connect).with(
141
+ host: 'some.db.host',
142
+ port: 1234,
143
+ dbname: test_db_name,
144
+ user: 'some_user',
145
+ password: 's0m3_p4ssw0rd'
146
+ ).ordered
147
+ end
148
+ end
149
+ end
150
+
151
+ context 'when some arguments are missing' do
152
+ it 'raises an error when production database is not provided' do
153
+ expect { described_class.new(nil) }.to raise_error(ArgumentError, 'No production database provided')
154
+ end
155
+ end
156
+ end
157
+
158
+ describe '#setup' do
159
+ let(:schemas) { ['some_schema', 'another_schema'] }
160
+
161
+ before(:each) do
162
+ @dummy = described_class.new(production_database)
163
+ allow(@dummy).to receive(:exec)
164
+ @dummy.setup(schemas)
165
+ end
166
+
167
+ it 'drops and creates all schemas' do
168
+ expect(@dummy).to have_received(:exec).with('drop schema if exists some_schema cascade').ordered
169
+ expect(@dummy).to have_received(:exec).with('create schema some_schema').ordered
170
+ expect(@dummy).to have_received(:exec).with('drop schema if exists another_schema cascade').ordered
171
+ expect(@dummy).to have_received(:exec).with('create schema another_schema').ordered
172
+ end
173
+ end
174
+
175
+ describe '#truncate_all_tables' do
176
+ let(:existing_tables) { ['some_schema.some_table', 'some_other_schema.some_other_table'] }
177
+
178
+ before(:each) do
179
+ allow(testing_database).to receive_message_chain(:exec, :map).and_return(existing_tables)
180
+ @dummy = described_class.new(production_database)
181
+ @dummy.truncate_all_tables()
182
+ end
183
+
184
+ it 'asks the testing database for currently existing tables in production schemas' do
185
+ expect(testing_database).to have_received(:exec).with(/^\s*select\s+schemaname\s+\|\|\s+'\.'\s+\|\|\s+tablename\s+as schema\_and\_table\s+from\s+pg_tables\s+where\s+tableowner\s*=\s*'some_user'\s*;?\s*$/)
186
+ end
187
+
188
+ it 'truncates the returned tables in the testing database' do
189
+ expect(testing_database).to have_received(:exec).with(/^\s*select\s+/).ordered
190
+ expect(testing_database).to have_received(:exec).with(/^\s*truncate\s+table\s+some\_schema\.some\_table\s*;?\s*$/).ordered
191
+ expect(testing_database).to have_received(:exec).with(/^\s*truncate\s+table\s+some\_other\_schema\.some\_other\_table\s*;?\s*$/).ordered
192
+ end
193
+
194
+ it 'does not truncate anything in the production database' do
195
+ expect(production_database).to_not have_received(:exec).with(/truncate/)
196
+ end
197
+ end
198
+
199
+ describe '#exec' do
200
+ let(:some_statement) { 'some statement' }
201
+
202
+ before(:each) do
203
+ @dummy = described_class.new(production_database)
204
+ end
205
+
206
+ it 'executes the passed statement on the testing database' do
207
+ @dummy.exec(some_statement)
208
+ expect(testing_database).to have_received(:exec).with(some_statement)
209
+ end
210
+
211
+ it 'does not execute the passed statement on the production database' do
212
+ @dummy.exec(some_statement)
213
+ expect(production_database).to_not have_received(:exec).with(some_statement)
214
+ end
215
+
216
+ it 'sets an alias for \'query\'' do
217
+ expect(@dummy).to respond_to(:query)
218
+ @dummy.query(some_statement)
219
+ expect(testing_database).to have_received(:exec).with(some_statement)
220
+ end
221
+ end
222
+
223
+ describe '#exec_file' do
224
+ let(:some_file_path) { 'some/file/path' }
225
+ let(:some_file_content) { 'some file content' }
226
+
227
+ before(:each) do
228
+ allow(File).to receive(:read).with(some_file_path).and_return(some_file_content)
229
+ @dummy = described_class.new(production_database)
230
+ end
231
+
232
+ it 'reads the statement from the path provided, relative to root' do
233
+ @dummy.exec_file(some_file_path)
234
+ expect(File).to have_received(:read).with(some_file_path)
235
+ end
236
+
237
+ it 'executes the file content on the testing database' do
238
+ @dummy.exec_file(some_file_path)
239
+ expect(testing_database).to have_received(:exec).with(some_file_content)
240
+ end
241
+
242
+ it 'does not execute file content on the production database' do
243
+ @dummy.exec_file(some_file_path)
244
+ expect(production_database).to_not have_received(:exec).with(some_file_content)
245
+ end
246
+
247
+ it 'sets an alias for \'query_file\'' do
248
+ expect(@dummy).to respond_to(:query_file)
249
+ @dummy.query_file(some_file_path)
250
+ expect(testing_database).to have_received(:exec).with(some_file_content)
251
+ end
252
+ end
253
+
254
+ describe '#insert_mock_values' do
255
+ let(:table) { 'some_schema.some_table' }
256
+ let(:mock) do
257
+ {
258
+ 'some_column' => 'some_value',
259
+ 'some_other_column' => 1234
260
+ }
261
+ end
262
+
263
+ before(:each) do
264
+ @dummy = described_class.new(production_database)
265
+ @dummy.insert_mock_values(table, mock)
266
+ end
267
+
268
+ it 'transforms a given hash to an \'insert\' statement' do
269
+ expect(testing_database).to have_received(:exec).with(/^\s*insert\s+into\s+some\_schema\.some\_table\s+\(some\_column,some\_other\_column\)\s+values\s*\('some\_value',1234\)\s*;?\s*$/)
270
+ end
271
+
272
+ it 'does not try to insert anything into a production table' do
273
+ expect(production_database).to_not have_received(:exec).with(/insert/)
274
+ end
275
+ end
276
+
277
+ describe '#mock' do
278
+ let(:mock) do
279
+ {
280
+ 'some_schema.some_table' => [
281
+ { 'some_column' => 'some_value', 'some_other_column' => 'some_other_value' },
282
+ { 'some_column' => 'another_value', 'some_other_column' => 'yet_another_value' }
283
+ ],
284
+ 'some_other_schema.some_other_table' => [
285
+ { 'another_column' => 'some_value' }
286
+ ]
287
+ }
288
+ end
289
+
290
+ before(:each) do
291
+ @dummy = described_class.new(production_database)
292
+ allow(@dummy).to receive(:insert_mock_values)
293
+ @dummy.mock(mock)
294
+ end
295
+
296
+ it 'inserts the mock values' do
297
+ expect(@dummy).to have_received(:insert_mock_values).with('some_schema.some_table', { 'some_column' => 'some_value', 'some_other_column' => 'some_other_value' }).ordered
298
+ expect(@dummy).to have_received(:insert_mock_values).with('some_schema.some_table', { 'some_column' => 'another_value', 'some_other_column' => 'yet_another_value' }).ordered
299
+ expect(@dummy).to have_received(:insert_mock_values).with('some_other_schema.some_other_table', { 'another_column' => 'some_value'}).ordered
300
+ end
301
+ end
302
+
303
+ describe '#copy_table_defs_from_prod' do
304
+ let(:tables) { [{'some_schema' => 'some_table'}, {'some_other_schema' => 'some_other_table'}] }
305
+
306
+ before(:each) do
307
+ @dummy = described_class.new(production_database)
308
+ allow(@dummy).to receive(:copy_table_def_from_prod)
309
+ @dummy.copy_table_defs_from_prod(tables)
310
+ end
311
+
312
+ it 'triggers copies the individual table definitions from production' do
313
+ expect(@dummy).to have_received(:copy_table_def_from_prod).with('some_schema', 'some_table').ordered
314
+ expect(@dummy).to have_received(:copy_table_def_from_prod).with('some_other_schema', 'some_other_table').ordered
315
+ end
316
+ end
317
+
318
+ describe '#copy_table_def_from_prod' do
319
+ let(:schema) { 'some_schema' }
320
+ let(:table) { 'some_table' }
321
+ let(:some_table_definition) { 'some table definition' }
322
+
323
+ before(:each) do
324
+ @dummy = described_class.new(production_database)
325
+ allow(@dummy).to receive(:_get_create_table_statement).and_return(some_table_definition)
326
+ @dummy.copy_table_def_from_prod(schema, table)
327
+ end
328
+
329
+ it 'retrieves the table definition' do
330
+ expect(@dummy).to have_received(:_get_create_table_statement).with(schema, table)
331
+ end
332
+
333
+ it 'executes the retrieved table definition on the testing database' do
334
+ expect(testing_database).to have_received(:exec).with(some_table_definition)
335
+ end
336
+ end
337
+
338
+ describe '#_get_create_table_statement' do
339
+ let(:schema) { 'some_schema' }
340
+ let(:table) { 'some_table' }
341
+ let(:table_definition) do
342
+ [
343
+ {'table_schema' => 'some_schema', 'table_name' => 'some_table', 'column_name' => 'some_column', 'data_type' => 'integer', 'is_nullable' => 'YES'},
344
+ {'table_schema' => 'some_schema', 'table_name' => 'some_table', 'column_name' => 'some_other_column', 'data_type' => 'character varying(255)', 'is_nullable' => 'YES'},
345
+ {'table_schema' => 'some_schema', 'table_name' => 'some_table', 'column_name' => 'yet_another_column', 'data_type' => 'character(5)', 'is_nullable' => 'NO'}
346
+ ]
347
+ end
348
+
349
+ before(:each) do
350
+ @dummy = described_class.new(production_database)
351
+ end
352
+
353
+ context 'in any case' do
354
+ before(:each) do
355
+ allow(production_database).to receive(:query).and_return(table_definition)
356
+ @dummy.send(:_get_create_table_statement, schema, table) rescue nil
357
+ end
358
+
359
+ it 'sets the search path and queries the table definition' do
360
+ expect(production_database).to have_received(:exec).with(/^\s*set search\_path\s+to\s+'\$user',\s*some\_schema\s*;\s*$/).ordered
361
+ expect(production_database).to have_received(:query).with(/^\s*select\s+\*\s+from\s+information\_schema\.columns\s+where\s+table\_schema\s*=\s*'some\_schema'\s+and\s+table\_name\s*=\s*'some\_table'\s*;\s*$/).ordered
362
+ end
363
+ end
364
+
365
+ context 'when there is a table definition' do
366
+ before(:each) do
367
+ allow(production_database).to receive(:query).and_return(table_definition)
368
+ allow(table_definition).to receive(:num_tuples).and_return(1)
369
+ end
370
+
371
+ it 'does not raise an error' do
372
+ expect { @dummy.send(:_get_create_table_statement, schema, table) }.to_not raise_error
373
+ end
374
+
375
+ it 'returns a correctly parsed schema' do
376
+ expect(@dummy.send(:_get_create_table_statement, schema, table)).to match(
377
+ /^\s*create\s+table\s+if\s+not\s+exists\s+some\_schema\.some\_table\s+\(\s*some\_column\s+integer\s+(not|default)\s+null\s*,\s*some\_other\_column\s+character\s+varying\(255\)\s+(not|default)\s+null\s*,\s*yet\_another\_column\s+character\(5\)\s+(not|default)\s+null\)\s*;\s*$/
378
+ )
379
+ end
380
+
381
+ it 'returns the parsed schema with all columns allowing null values' do
382
+ expect(@dummy.send(:_get_create_table_statement, schema, table)).to match(
383
+ /^\s*create\s+table\s+if\s+not\s+exists\s+some\_schema\.some\_table\s+\(\s*some\_column\s+integer\s+default\s+null\s*,\s*some\_other\_column\s+character\s+varying\(255\)\s+default\s+null\s*,\s*yet\_another\_column\s+character\(5\)\s+default\s+null\)\s*;\s*$/
384
+ )
385
+ end
386
+ end
387
+
388
+ context 'when there is no table definition' do
389
+ before(:each) do
390
+ allow(production_database).to receive(:query).and_return(table_definition)
391
+ allow(table_definition).to receive(:num_tuples).and_return(0)
392
+ end
393
+
394
+ it 'raises an error' do
395
+ expect { @dummy.send(:_get_create_table_statement, schema, table) }.to raise_error(RuntimeError, /^Sorry, there is no table information/)
396
+ end
397
+ end
398
+ end
399
+
400
+ describe '#destroy' do
401
+ before(:each) do
402
+ allow(production_database).to receive(:close)
403
+ allow(testing_database).to receive(:close)
404
+ @dummy = described_class.new(production_database)
405
+ end
406
+
407
+ context 'when the db shall be kept' do
408
+ before(:each) do
409
+ stub_const("#{described_class}::DELETE_DB_WHEN_FINISHED", false)
410
+ @dummy.destroy()
411
+ end
412
+
413
+ it 'closes the connection to the production database' do
414
+ expect(production_database).to have_received(:close)
415
+ end
416
+
417
+ it 'closes the connection to the testing database' do
418
+ expect(testing_database).to have_received(:close)
419
+ end
420
+
421
+ it 'does not drop the testing database' do
422
+ expect(production_database).to_not have_received(:exec).with(/^drop\s+database/)
423
+ end
424
+ end
425
+
426
+ context 'when the db may be deleted' do
427
+ before(:each) do
428
+ stub_const("#{described_class}::DELETE_DB_WHEN_FINISHED", true)
429
+ @dummy.destroy()
430
+ end
431
+
432
+ it 'closes the connection to the production database' do
433
+ expect(production_database).to have_received(:close)
434
+ end
435
+
436
+ it 'closes the connection to the testing database' do
437
+ expect(testing_database).to have_received(:close)
438
+ end
439
+
440
+ it 'does not drop the testing database' do
441
+ expect(production_database).to have_received(:exec).with(/^create\s+database\s+#{Regexp.quote(@dummy.instance_variable_get(:@test_db_name))}/).ordered
442
+ expect(production_database).to have_received(:exec).with(/^drop\s+database\s+#{Regexp.quote(@dummy.instance_variable_get(:@test_db_name))}$/).ordered
443
+ end
444
+ end
445
+ end
446
+ end
447
+ end
metadata ADDED
@@ -0,0 +1,175 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: squcumber-postgres
3
+ version: !ruby/object:Gem::Version
4
+ version: 0.0.1
5
+ platform: ruby
6
+ authors:
7
+ - Stefanie Grunwald
8
+ autorequire:
9
+ bindir: bin
10
+ cert_chain: []
11
+ date: 2016-07-01 00:00:00.000000000 Z
12
+ dependencies:
13
+ - !ruby/object:Gem::Dependency
14
+ name: pg
15
+ requirement: !ruby/object:Gem::Requirement
16
+ requirements:
17
+ - - ">="
18
+ - !ruby/object:Gem::Version
19
+ version: '0.16'
20
+ - - "<"
21
+ - !ruby/object:Gem::Version
22
+ version: '1.0'
23
+ type: :runtime
24
+ prerelease: false
25
+ version_requirements: !ruby/object:Gem::Requirement
26
+ requirements:
27
+ - - ">="
28
+ - !ruby/object:Gem::Version
29
+ version: '0.16'
30
+ - - "<"
31
+ - !ruby/object:Gem::Version
32
+ version: '1.0'
33
+ - !ruby/object:Gem::Dependency
34
+ name: cucumber
35
+ requirement: !ruby/object:Gem::Requirement
36
+ requirements:
37
+ - - ">="
38
+ - !ruby/object:Gem::Version
39
+ version: '2.0'
40
+ - - "<"
41
+ - !ruby/object:Gem::Version
42
+ version: '3.0'
43
+ type: :runtime
44
+ prerelease: false
45
+ version_requirements: !ruby/object:Gem::Requirement
46
+ requirements:
47
+ - - ">="
48
+ - !ruby/object:Gem::Version
49
+ version: '2.0'
50
+ - - "<"
51
+ - !ruby/object:Gem::Version
52
+ version: '3.0'
53
+ - !ruby/object:Gem::Dependency
54
+ name: rake
55
+ requirement: !ruby/object:Gem::Requirement
56
+ requirements:
57
+ - - ">="
58
+ - !ruby/object:Gem::Version
59
+ version: '10.1'
60
+ - - "<"
61
+ - !ruby/object:Gem::Version
62
+ version: '12.0'
63
+ type: :runtime
64
+ prerelease: false
65
+ version_requirements: !ruby/object:Gem::Requirement
66
+ requirements:
67
+ - - ">="
68
+ - !ruby/object:Gem::Version
69
+ version: '10.1'
70
+ - - "<"
71
+ - !ruby/object:Gem::Version
72
+ version: '12.0'
73
+ - !ruby/object:Gem::Dependency
74
+ name: rspec
75
+ requirement: !ruby/object:Gem::Requirement
76
+ requirements:
77
+ - - ">="
78
+ - !ruby/object:Gem::Version
79
+ version: '3.1'
80
+ - - "<"
81
+ - !ruby/object:Gem::Version
82
+ version: '4.0'
83
+ type: :development
84
+ prerelease: false
85
+ version_requirements: !ruby/object:Gem::Requirement
86
+ requirements:
87
+ - - ">="
88
+ - !ruby/object:Gem::Version
89
+ version: '3.1'
90
+ - - "<"
91
+ - !ruby/object:Gem::Version
92
+ version: '4.0'
93
+ - !ruby/object:Gem::Dependency
94
+ name: rspec-collection_matchers
95
+ requirement: !ruby/object:Gem::Requirement
96
+ requirements:
97
+ - - ">="
98
+ - !ruby/object:Gem::Version
99
+ version: 1.1.2
100
+ - - "<"
101
+ - !ruby/object:Gem::Version
102
+ version: '2.0'
103
+ type: :development
104
+ prerelease: false
105
+ version_requirements: !ruby/object:Gem::Requirement
106
+ requirements:
107
+ - - ">="
108
+ - !ruby/object:Gem::Version
109
+ version: 1.1.2
110
+ - - "<"
111
+ - !ruby/object:Gem::Version
112
+ version: '2.0'
113
+ - !ruby/object:Gem::Dependency
114
+ name: codeclimate-test-reporter
115
+ requirement: !ruby/object:Gem::Requirement
116
+ requirements:
117
+ - - ">="
118
+ - !ruby/object:Gem::Version
119
+ version: 0.4.3
120
+ - - "<"
121
+ - !ruby/object:Gem::Version
122
+ version: '1.0'
123
+ type: :development
124
+ prerelease: false
125
+ version_requirements: !ruby/object:Gem::Requirement
126
+ requirements:
127
+ - - ">="
128
+ - !ruby/object:Gem::Version
129
+ version: 0.4.3
130
+ - - "<"
131
+ - !ruby/object:Gem::Version
132
+ version: '1.0'
133
+ description:
134
+ email: steffi@physics.org
135
+ executables: []
136
+ extensions: []
137
+ extra_rdoc_files: []
138
+ files:
139
+ - Rakefile
140
+ - lib/squcumber-postgres.rb
141
+ - lib/squcumber-postgres/mock/database.rb
142
+ - lib/squcumber-postgres/rake/task.rb
143
+ - lib/squcumber-postgres/step_definitions/common_steps.rb
144
+ - lib/squcumber-postgres/support/database.rb
145
+ - lib/squcumber-postgres/support/matchers.rb
146
+ - lib/squcumber-postgres/support/output.rb
147
+ - spec/spec_helper.rb
148
+ - spec/squcumber-postgres/mock/database_spec.rb
149
+ homepage: https://github.com/moertel/sQucumber-postgres
150
+ licenses:
151
+ - MIT
152
+ metadata: {}
153
+ post_install_message:
154
+ rdoc_options: []
155
+ require_paths:
156
+ - lib
157
+ required_ruby_version: !ruby/object:Gem::Requirement
158
+ requirements:
159
+ - - ">="
160
+ - !ruby/object:Gem::Version
161
+ version: '2.0'
162
+ required_rubygems_version: !ruby/object:Gem::Requirement
163
+ requirements:
164
+ - - ">="
165
+ - !ruby/object:Gem::Version
166
+ version: '0'
167
+ requirements: []
168
+ rubyforge_project:
169
+ rubygems_version: 2.6.3
170
+ signing_key:
171
+ specification_version: 3
172
+ summary: Define and execute SQL integration tests for AWS postgres
173
+ test_files:
174
+ - spec/spec_helper.rb
175
+ - spec/squcumber-postgres/mock/database_spec.rb