squcumber-redshift 0.1.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,15 @@
1
+ ---
2
+ !binary "U0hBMQ==":
3
+ metadata.gz: !binary |-
4
+ ODMzNWMxODJhMzdkYzQ0ZTMyOGJkNmVlN2Y5NTZkMWRjMzcwOGQzNw==
5
+ data.tar.gz: !binary |-
6
+ NWEwMDQ0ZDRmZjEzZWUyYzZlYTJhODZhMWQwZGIxM2QwNzMyNjEwNQ==
7
+ SHA512:
8
+ metadata.gz: !binary |-
9
+ OWJmNTlhYTRlMDdjNjc4OGZkMDgwNDAyNDcyYmUzMDJkNzYwNTgzNDNjYWU5
10
+ NzIxNmIzOTVmNmE4N2Y4YTAyYjE2ZDA5Mjc3YmI0YTk5YTM1OWVjNzYyMzIy
11
+ MGI2MTZlYWZjMDY0MTU3YWEwN2MzMTJiZTIwNDFmZWJkODEyM2Y=
12
+ data.tar.gz: !binary |-
13
+ YTQ3NTBiNThiYzZhYTgzZjVmZmI4NWZhNzkxZmE5YThjYTk2MDZkMmI1OTMz
14
+ MWQyNGUxOGFhOGRmYjE4Yzg3Y2I4OTE3NGVlMWM0MDU4NGNjOWMyMjU0Mzky
15
+ NzVlYjRhYjI5NTVmMDQxYTI1NmIwMDU1YjFhMGE0ZTY1NTI4YjQ=
@@ -0,0 +1,2 @@
1
+ load 'lib/squcumber-redshift/tasks/test.rake'
2
+ project_name = 'squcumber-redshift'
@@ -0,0 +1 @@
1
+ Dir[File.dirname(__FILE__) + '/squcumber/*.rb'].each { |file| require file }
@@ -0,0 +1,134 @@
1
+ require 'pg'
2
+
3
+ module Squcumber
4
+ module Redshift
5
+ module Mock
6
+ class Database
7
+ DELETE_DB_WHEN_FINISHED = ENV['KEEP_TEST_DB'].to_i == 1 ? false : true
8
+ TEST_DB_NAME_OVERRIDE = ENV.fetch('TEST_DB_NAME_OVERRIDE', '')
9
+
10
+ def initialize(production_database)
11
+ @production_database = production_database or raise ArgumentError, 'No production database provided'
12
+
13
+ test_db_name_postfix = TEST_DB_NAME_OVERRIDE.empty? ? rand(10000..99999) : TEST_DB_NAME_OVERRIDE
14
+ @test_db_name = "test_env_#{test_db_name_postfix}"
15
+
16
+ if @production_database.exec("select datname from pg_database where datname like '%#{@test_db_name}%'").num_tuples != 0
17
+ @production_database.exec("drop database #{@test_db_name}")
18
+ end
19
+ @production_database.exec("create database #{@test_db_name}")
20
+
21
+ @testing_database = PG.connect(
22
+ host: ENV['REDSHIFT_HOST'],
23
+ port: ENV['REDSHIFT_PORT'],
24
+ dbname: @test_db_name,
25
+ user: ENV['REDSHIFT_USER'],
26
+ password: ENV['REDSHIFT_PASSWORD']
27
+ )
28
+ end
29
+
30
+ def setup(schemas)
31
+ schemas.each { |schema| exec("create schema #{schema}") }
32
+ end
33
+
34
+ def truncate_all_tables
35
+ @testing_database
36
+ .exec("select schemaname || '.' || tablename as schema_and_table from pg_tables where tableowner = '#{ENV['REDSHIFT_USER']}'")
37
+ .map { |row| row['schema_and_table'] }
38
+ .each { |schema_and_table| exec("truncate table #{schema_and_table}") }
39
+ end
40
+
41
+ def exec(statement)
42
+ @testing_database.exec(statement)
43
+ end
44
+ alias_method :query, :exec
45
+
46
+ def exec_file(path)
47
+ exec(File.read("#{path}"))
48
+ end
49
+ alias_method :query_file, :exec_file
50
+
51
+ # Redshift does not allow to copy a table schema to another database, i.e.
52
+ # `create table some_db.some_table (like another_db.some_table)` cannot be used.
53
+ def copy_table_def_from_prod(schema, table)
54
+ create_table_statement = _get_create_table_statement(schema, table)
55
+ exec(create_table_statement)
56
+ end
57
+
58
+ def copy_table_defs_from_prod(tables)
59
+ tables.each do |obj|
60
+ obj.each { |schema, table| copy_table_def_from_prod(schema, table) }
61
+ end
62
+ end
63
+
64
+ def mock(mock)
65
+ mock.each do |schema_and_table, data|
66
+ raise "Mock data for #{schema_and_table} is not correctly formatted: must be Array but was #{data.class}" unless data.is_a?(Array)
67
+ data.each { |datum| insert_mock_values(schema_and_table, datum) }
68
+ end
69
+ end
70
+
71
+ def insert_mock_values(schema_and_table, mock)
72
+ schema, table = schema_and_table.split('.')
73
+ keys = []
74
+ vals = []
75
+ mock.each do |key, value|
76
+ unless value.nil?
77
+ keys << key
78
+ vals << (value.is_a?(String) ? "'#{value}'" : value)
79
+ end
80
+ end
81
+ exec("insert into #{schema}.#{table} (#{keys.join(',')}) values (#{vals.join(',')})") unless vals.empty?
82
+ end
83
+
84
+ def destroy
85
+ @testing_database.close()
86
+
87
+ if DELETE_DB_WHEN_FINISHED
88
+ attempts = 0
89
+ begin
90
+ attempts += 1
91
+ @production_database.exec("drop database #{@test_db_name}")
92
+ rescue PG::ObjectInUse
93
+ sleep 5
94
+ retry unless attempts >= 3
95
+ end
96
+ else
97
+ puts "\nTest database has been kept alive: #{@test_db_name}"
98
+ end
99
+
100
+ @production_database.close()
101
+ end
102
+
103
+ private
104
+
105
+ def _get_create_table_statement(schema, table)
106
+ @production_database.exec("set search_path to '$user', #{schema};")
107
+ table_schema = @production_database.query("select * from pg_table_def where schemaname = '#{schema}' and tablename = '#{table}';")
108
+ raise "Sorry, there is no table information for #{schema}.#{table}" if table_schema.num_tuples == 0
109
+
110
+ distkey = _get_table_distkey(table_schema)
111
+ sortkeys = _get_table_sortkeys(table_schema).join(',')
112
+ definitions = _get_column_definitions(table_schema).join(',')
113
+
114
+ table_distkey = "distkey(#{distkey})" unless distkey.nil?
115
+ table_sortkeys = "sortkey(#{sortkeys})" unless sortkeys.empty?
116
+
117
+ "create table if not exists #{schema}.#{table} (#{definitions}) #{table_distkey} #{table_sortkeys};"
118
+ end
119
+
120
+ def _get_table_distkey(table_definition)
121
+ table_definition.select { |definition| definition['distkey'].eql?('t') }[0]['column'] rescue nil
122
+ end
123
+
124
+ def _get_table_sortkeys(table_definition)
125
+ table_definition.sort_by { |e| e['sortkey'].to_i }.select { |e| e['sortkey'].to_i != 0 }.map { |e| e['column'] } rescue nil
126
+ end
127
+
128
+ def _get_column_definitions(table_definition)
129
+ table_definition.map { |definition| "#{definition['column']} #{definition['type']} default null" }
130
+ end
131
+ end
132
+ end
133
+ end
134
+ end
@@ -0,0 +1,51 @@
1
+ require 'cucumber'
2
+ require 'cucumber/rake/task'
3
+ require 'rake'
4
+
5
+ module Squcumber
6
+ module Redshift
7
+ module Rake
8
+ class Task
9
+ include ::Rake::DSL if defined? ::Rake::DSL
10
+
11
+ def install_tasks
12
+ namespace :test do
13
+ # Auto-generate Rake tasks for each feature and each of their parent directories
14
+ @features_dir = File.join(FileUtils.pwd, 'features')
15
+ features = Dir.glob("#{@features_dir}/**/*.feature")
16
+ parent_directories = features.map { |f| f.split('/')[0..-2].join('/') }.uniq
17
+
18
+ features.each do |feature|
19
+ feature_name = feature.gsub(File.join(FileUtils.pwd, 'features/'), '').gsub('.feature', '')
20
+ task_name = feature_name.gsub('/', ':')
21
+ desc "Run SQL tests for feature #{feature_name}"
22
+ task "sql:#{task_name}".to_sym, [:scenario_line_number] do |_, args|
23
+ cucumber_task_name = "cucumber_#{task_name}".to_sym
24
+ ::Cucumber::Rake::Task.new(cucumber_task_name) do |t|
25
+ line_number = args[:scenario_line_number].nil? ? '' : ":#{args[:scenario_line_number]}"
26
+ t.cucumber_opts = "#{feature}#{line_number} --format pretty --format html --out #{feature_name.gsub('/','_')}.html --require #{File.dirname(__FILE__)}/../support --require #{File.dirname(__FILE__)}/../step_definitions"
27
+ end
28
+ ::Rake::Task[cucumber_task_name].execute
29
+ end
30
+ end
31
+
32
+ parent_directories.each do |feature|
33
+ feature_name = feature.gsub(File.join(FileUtils.pwd, 'features/'), '').gsub('.feature', '')
34
+ task_name = feature_name.gsub('/', ':')
35
+ desc "Run SQL tests for all features in #{feature_name}"
36
+ task "sql:#{task_name}".to_sym do
37
+ cucumber_task_name = "cucumber_#{task_name}".to_sym
38
+ ::Cucumber::Rake::Task.new(cucumber_task_name) do |t|
39
+ t.cucumber_opts = "#{feature} --format pretty --format html --out #{feature_name.gsub('/','_')}.html --require #{File.dirname(__FILE__)}/../support --require #{File.dirname(__FILE__)}/../step_definitions"
40
+ end
41
+ ::Rake::Task[cucumber_task_name].execute
42
+ end
43
+ end
44
+ end
45
+ end
46
+ end
47
+ end
48
+ end
49
+ end
50
+
51
+ Squcumber::Redshift::Rake::Task.new.install_tasks
@@ -0,0 +1,152 @@
1
+ require 'json'
2
+
3
+ Before do
4
+ $setup ||= false
5
+ end
6
+
7
+ # Takes a path and then sequentially adds what's provided in `data`
8
+ # to be later executed in the step `the given SQL files are executed`
9
+ # +path+:: relative to root of project, e.g. "jobs/kpi/sales"
10
+ Given(/^the SQL files in the path "?([^"]*)"?:$/) do |path, data|
11
+ @sql_file_path = path
12
+ @sql_files_to_execute = data.hashes.map { |e| "#{@sql_file_path}/#{e['file']}" }
13
+ end
14
+
15
+ Given(/^the SQL file path "?([^\s"]+)"?$/) do |path|
16
+ @sql_file_path = path
17
+ end
18
+
19
+ Given(/^Pending: (.*)/) { |reason| pending(reason) }
20
+
21
+ Given(/^their schema dependencies:$/) do |data|
22
+ unless $setup
23
+ schemas = data.hashes.map { |hash| hash['schema'] }.compact.uniq
24
+ TESTING_DATABASE.setup(schemas)
25
+ end
26
+ end
27
+
28
+ Given(/^their table dependencies:$/) do |data|
29
+ if $setup
30
+ silence_streams(STDERR) do
31
+ TESTING_DATABASE.truncate_all_tables()
32
+ end
33
+ else
34
+ tables = []
35
+ schemas = []
36
+ data.hashes.each do |hash|
37
+ schema, table = hash['table'].split('.')
38
+ schemas << schema
39
+ tables << { schema => table }
40
+ end
41
+ silence_streams(STDERR) do
42
+ TESTING_DATABASE.setup(schemas.compact.uniq)
43
+ TESTING_DATABASE.copy_table_defs_from_prod(tables)
44
+ end
45
+ $setup = true
46
+ end
47
+ end
48
+
49
+ Given(/^the following defaults for "?([^\s"]+)"? \(if not stated otherwise\):$/) do |table, data|
50
+ @defaults ||= {}
51
+ @defaults[table] = data.hashes[0]
52
+ end
53
+
54
+ Given(/a clean environment/) do
55
+ silence_streams(STDERR) do
56
+ TESTING_DATABASE.truncate_all_tables()
57
+ end
58
+ end
59
+
60
+ Given(/^the existing table "?([a-zA-Z0-9_]+)\.([a-zA-Z0-9_]+)"?( with date placeholders)?:$/) do |schema, table, placeholder, data|
61
+ mock_data = data.hashes
62
+ @defaults ||= {}
63
+ defaults = @defaults["#{schema}.#{table}"]
64
+
65
+ unless defaults.nil? or defaults.empty?
66
+ mock_data.map! { |entry| defaults.merge(entry) }
67
+ end
68
+
69
+ mock_data = convert_mock_values(mock_data) if placeholder
70
+
71
+ TESTING_DATABASE.mock(
72
+ Hash["#{schema}.#{table}", mock_data]
73
+ )
74
+ end
75
+
76
+ When(/^the given SQL files are executed$/) do
77
+ silence_streams(STDERR) do
78
+ @sql_files_to_execute.each { |file| TESTING_DATABASE.exec_file(file) }
79
+ end
80
+ end
81
+
82
+ When(/^the SQL file "?([^\s]+)"? is executed/) do |file|
83
+ silence_streams(STDERR) do
84
+ TESTING_DATABASE.exec_file("#{@sql_file_path}/#{file}")
85
+ end
86
+ end
87
+
88
+ When(/^the resulting table "?([^\s"]*)"? is queried(?:, ordered by "?([^"]*)"?)?/) do |table, sort_column|
89
+ sort_statement = (sort_column.nil? or sort_column.empty?) ? '' : "order by #{sort_column}"
90
+ @result = TESTING_DATABASE.query("select * from #{table} #{sort_statement};").map { |e| e }
91
+ end
92
+
93
+ Then(/^the result starts with.*$/) do |data|
94
+ actual = @result[0..(data.hashes.length - 1)] || []
95
+ expected = data.hashes || []
96
+
97
+ expected.each_with_index do |hash, i|
98
+ raise("Does not start with expected result, got:\n#{format_error(data, actual)}") unless actual[i].all? do |key, value|
99
+ values_match(value, hash[key]) # actual,expected
100
+ end
101
+ end
102
+ end
103
+
104
+ Then(/^the result includes.*$/) do |data|
105
+ actual = @result || []
106
+ expected = data.hashes || []
107
+
108
+ expected.each do |hash|
109
+ raise("Result is not included, got:\n#{format_error(data, actual)}") unless actual.any? do |row|
110
+ row.all? do |key, value|
111
+ values_match(value, hash[key]) # actual,expected
112
+ end
113
+ end
114
+ end
115
+ end
116
+
117
+ Then(/^the result does not include.*$/) do |data|
118
+ actual = @result || []
119
+ expected = data.hashes || []
120
+
121
+ expected.each do |hash|
122
+ raise("Result is included, got:\n#{format_error(data, actual)}") if actual.any? do |row|
123
+ row.all? do |key, value|
124
+ values_match(value, hash[key]) # actual,expected
125
+ end
126
+ end
127
+ end
128
+ end
129
+
130
+ Then(/^the result exactly matches.*$/) do |data|
131
+ actual = @result || []
132
+ expected = data.hashes || []
133
+
134
+ raise("Does not match exactly, got:\n#{format_error(data, actual)}") if actual.length != expected.length
135
+
136
+ actual.each_with_index do |row, i|
137
+ raise("Does not match exactly, got:\n#{format_error(data, actual)}") unless (expected[i] || {}).all? do |key, value|
138
+ values_match(row[key], value) # actual,expected
139
+ end
140
+ end
141
+
142
+ expected.each_with_index do |hash, i|
143
+ raise("Does not match exactly, got:\n#{format_error(data, actual)}") unless (actual[i] || {}).all? do |key, value|
144
+ values_match(value, hash[key]) # actual,expected
145
+ end
146
+ end
147
+ end
148
+
149
+ Then(/^the result is empty.*$/) do
150
+ actual = @result || []
151
+ raise("Result is not empty, got:\n#{format_error({}, actual)}") unless actual.length == 0
152
+ end
@@ -0,0 +1,17 @@
1
+ require_relative '../mock/database'
2
+
3
+ print 'Connect to production database...'
4
+ production_database = PG.connect(
5
+ host: ENV['REDSHIFT_HOST'],
6
+ port: ENV['REDSHIFT_PORT'],
7
+ dbname: ENV['REDSHIFT_DB'],
8
+ user: ENV['REDSHIFT_USER'],
9
+ password: ENV['REDSHIFT_PASSWORD']
10
+ )
11
+ puts 'DONE.'
12
+
13
+ TESTING_DATABASE ||= Squcumber::Redshift::Mock::Database.new(production_database)
14
+
15
+ at_exit do
16
+ TESTING_DATABASE.destroy
17
+ end
@@ -0,0 +1,45 @@
1
+ module MatcherHelpers
2
+ def values_match(actual, expected)
3
+ if expected.eql?('today')
4
+ actual.match(/#{Regexp.quote(Date.today.to_s)}/)
5
+ elsif expected.eql?('yesterday')
6
+ actual.match(/#{Regexp.quote((Date.today - 1).to_s)}/)
7
+ elsif expected.eql?('any_date')
8
+ actual.match(/^\d{4}\-\d{2}\-\d{2} \d{2}:\d{2}:\d{2}$/)
9
+ elsif expected.eql?('any_string')
10
+ true if actual.is_a?(String) or actual.nil?
11
+ elsif expected.eql?('false') or expected.eql?('true')
12
+ true if actual.eql?(expected[0])
13
+ elsif !expected.nil?
14
+ actual ||= ''
15
+ actual.eql?(expected)
16
+ else # we have not mocked this, so ignore it
17
+ true
18
+ end
19
+ end
20
+
21
+ def timetravel(date, i, method); i > 0 ? timetravel(date.send(method.to_sym), i - 1, method) : date; end
22
+
23
+ def convert_mock_values(mock_data)
24
+ mock_data.map do |entry|
25
+ entry.each do |key, value|
26
+ entry[key] = case value
27
+ when /today/
28
+ Date.today.to_s
29
+ when /yesterday/
30
+ Date.today.prev_day.to_s
31
+ when /\s*\d+\s+month(s)?\s+ago\s*/
32
+ number_of_months = value.match(/\d+/)[0].to_i
33
+ timetravel(Date.today, number_of_months, :prev_month).to_s
34
+ when /\s*\d+\s+day(s)?\s+ago\s*/
35
+ number_of_days = value.match(/\d+/)[0].to_i
36
+ timetravel(Date.today, number_of_days, :prev_day).to_s
37
+ else
38
+ value
39
+ end
40
+ end
41
+ end
42
+ end
43
+ end
44
+
45
+ World(MatcherHelpers)
@@ -0,0 +1,43 @@
1
+ module OutputHelpers
2
+ def silence_streams(*streams)
3
+ unless ENV['SHOW_STDOUT'].to_i == 1
4
+ begin
5
+ on_hold = streams.collect { |stream| stream.dup }
6
+ streams.each do |stream|
7
+ stream.reopen('/dev/null')
8
+ stream.sync = true
9
+ end
10
+ yield
11
+ ensure
12
+ streams.each_with_index do |stream, i|
13
+ stream.reopen(on_hold[i])
14
+ end
15
+ end
16
+ end
17
+ end
18
+
19
+ def format_error(expected_data, actual_result)
20
+ expectation_count = (expected_data.rows.count rescue nil) || 0
21
+ if expectation_count == 0
22
+ table_headings = actual_result[0].keys
23
+ else
24
+ table_headings = expected_data.hashes[0].keys
25
+ end
26
+ print_data = Hash[table_headings.map { |key| [key, key.length] }]
27
+
28
+ actual_result.each do |row|
29
+ row.each do |key, value|
30
+ print_data[key] = value.length if (value.to_s.length > print_data[key].to_i)
31
+ end
32
+ end
33
+
34
+ error = '| ' + table_headings.map { |k| k.ljust(print_data[k], ' ') }.join(' | ') + " |\n"
35
+ error << actual_result.map do |row|
36
+ '| ' + table_headings.map { |k| (row[k] || '').ljust(print_data[k], ' ') }.join(' | ') + ' |'
37
+ end.join("\n") + "\n"
38
+
39
+ error
40
+ end
41
+ end
42
+
43
+ World(OutputHelpers)
@@ -0,0 +1,28 @@
1
+ # encoding: utf-8
2
+
3
+ require 'rspec/collection_matchers'
4
+ require 'codeclimate-test-reporter'
5
+ CodeClimate::TestReporter.start
6
+
7
+ RSpec.configure do |config|
8
+ config.color = true
9
+
10
+ # Redirect stderr and stdout to get rid of info messages during execution
11
+ # of specs.
12
+ # Via http://stackoverflow.com/questions/15430551/suppress-console-output-during-rspec-tests
13
+ unless ENV['SPEC_SHOW_STDOUT'] == '1'
14
+ original_stderr = $stderr
15
+ original_stdout = $stdout
16
+ config.before(:all) do
17
+ $stderr = File.new('/dev/null', 'w')
18
+ $stdout = File.new('/dev/null', 'w')
19
+ end
20
+ config.after(:all) do
21
+ $stderr = original_stderr
22
+ $stdout = original_stdout
23
+ end
24
+ end
25
+ end
26
+
27
+ # Via https://www.relishapp.com/rspec/rspec-core/docs/example-groups/shared-examples
28
+ Dir["./spec/support/**/*.rb"].sort.each { |f| require f }
@@ -0,0 +1,430 @@
1
+ require_relative '../../spec_helper'
2
+ require_relative '../../../lib/squcumber-redshift/mock/database'
3
+
4
+ module Squcumber::Redshift::Mock
5
+ describe Database do
6
+ let(:production_database) { double(PG::Connection) }
7
+ let(:testing_database) { double(PG::Connection) }
8
+
9
+ let(:empty_result) { double(PG::Result) }
10
+ let(:non_empty_result) { double(PG::Result) }
11
+
12
+ before(:each) do
13
+ allow(ENV).to receive(:[]).with('REDSHIFT_HOST').and_return('some.db.host')
14
+ allow(ENV).to receive(:[]).with('REDSHIFT_PORT').and_return(1234)
15
+ allow(ENV).to receive(:[]).with('REDSHIFT_USER').and_return('some_user')
16
+ allow(ENV).to receive(:[]).with('REDSHIFT_PASSWORD').and_return('s0m3_p4ssw0rd')
17
+ allow(ENV).to receive(:[]).with('REDSHIFT_DB').and_return('some_db')
18
+
19
+ allow(PG).to receive(:connect).and_return(testing_database)
20
+ allow(production_database).to receive(:exec).with(/^\s*create\s+database\s+/)
21
+ allow(production_database).to receive(:exec).with(/^\s*select\s+datname\s+from\s+pg_database\s+/).and_return(empty_result)
22
+ allow(production_database).to receive(:exec).with(/^\s*set\s+search_path\s+to\s+/)
23
+ allow(production_database).to receive(:exec).with(/^\s*drop\s+database\s+/)
24
+ allow(testing_database).to receive(:exec)
25
+
26
+ allow(empty_result).to receive(:num_tuples).and_return(0)
27
+ allow(non_empty_result).to receive(:num_tuples).and_return(1)
28
+ end
29
+
30
+ describe '#initialize' do
31
+ context 'when all arguments are provided' do
32
+ context 'and the database does not exist' do
33
+ it 'does not raise an error' do
34
+ expect { described_class.new(production_database) }.to_not raise_error
35
+ end
36
+
37
+ it 'generates a testing database name with expected pattern' do
38
+ dummy = described_class.new(production_database)
39
+ expect(dummy.instance_variable_get(:@test_db_name)).to match(/^test_env_\d{5}$/)
40
+ end
41
+
42
+ it 'does not try to drop the database' do
43
+ described_class.new(production_database)
44
+ expect(production_database).to_not have_received(:exec).with(/^drop\s+database\s+/)
45
+ end
46
+
47
+ it 'creates the testing database' do
48
+ dummy = described_class.new(production_database)
49
+ test_db_name = dummy.instance_variable_get(:@test_db_name)
50
+ expect(production_database).to have_received(:exec).with(/^\s*create\s+database\s+#{Regexp.quote(test_db_name)}\s*;?\s*$/)
51
+ end
52
+
53
+ it 'connects to testing database in correct order with correct attributes' do
54
+ dummy = described_class.new(production_database)
55
+
56
+ test_db_name = dummy.instance_variable_get(:@test_db_name)
57
+ expect(production_database).to have_received(:exec).with(/^\s*create\s+database\s+#{Regexp.quote(test_db_name)}\s*;?\s*$/).ordered
58
+
59
+ expect(PG).to have_received(:connect).with(
60
+ host: 'some.db.host',
61
+ port: 1234,
62
+ dbname: test_db_name,
63
+ user: 'some_user',
64
+ password: 's0m3_p4ssw0rd'
65
+ ).ordered
66
+ end
67
+ end
68
+
69
+ context 'and the database name is being overridden' do
70
+ let(:testing_db_name) { 'some_db_name' }
71
+
72
+ before(:each) do
73
+ stub_const("#{described_class}::TEST_DB_NAME_OVERRIDE", testing_db_name)
74
+ end
75
+
76
+ it 'does not raise an error' do
77
+ expect { described_class.new(production_database) }.to_not raise_error
78
+ end
79
+
80
+ it 'generates a testing database name with expected pattern' do
81
+ dummy = described_class.new(production_database)
82
+ expect(dummy.instance_variable_get(:@test_db_name)).to match(/^test_env_#{Regexp.quote(testing_db_name)}$/)
83
+ end
84
+
85
+ it 'does not try to drop the database' do
86
+ described_class.new(production_database)
87
+ expect(production_database).to_not have_received(:exec).with(/^drop\s+database\s+/)
88
+ end
89
+
90
+ it 'creates the testing database' do
91
+ described_class.new(production_database)
92
+ expect(production_database).to have_received(:exec).with(/^\s*create\s+database\s+test_env\_#{Regexp.quote(testing_db_name)}\s*;?\s*$/)
93
+ end
94
+
95
+ it 'connects to testing database in correct order with correct attributes' do
96
+ described_class.new(production_database)
97
+ expect(production_database).to have_received(:exec).with(/^\s*create\s+database\s+test_env_#{Regexp.quote(testing_db_name)}\s*;?\s*$/).ordered
98
+
99
+ expect(PG).to have_received(:connect).with(
100
+ host: 'some.db.host',
101
+ port: 1234,
102
+ dbname: 'test_env_' + testing_db_name,
103
+ user: 'some_user',
104
+ password: 's0m3_p4ssw0rd'
105
+ ).ordered
106
+ end
107
+ end
108
+
109
+ context 'and the database already exists' do
110
+ before(:each) do
111
+ allow(production_database).to receive(:exec).with(/^select\s+datname\s+from\s+pg_database\s+/).and_return(non_empty_result)
112
+ end
113
+
114
+ it 'does not raise an error' do
115
+ expect { described_class.new(production_database) }.to_not raise_error
116
+ end
117
+
118
+ it 'generates a testing database name with expected pattern' do
119
+ dummy = described_class.new(production_database)
120
+ expect(dummy.instance_variable_get(:@test_db_name)).to match(/^test_env_\d{5}$/)
121
+ end
122
+
123
+ it 'drops the existing testing database' do
124
+ described_class.new(production_database)
125
+ expect(production_database).to have_received(:exec).with(/^drop\s+database\s+/)
126
+ end
127
+
128
+ it 'creates the testing database' do
129
+ dummy = described_class.new(production_database)
130
+ test_db_name = dummy.instance_variable_get(:@test_db_name)
131
+ expect(production_database).to have_received(:exec).with(/^\s*create\s+database\s+#{Regexp.quote(test_db_name)}\s*;?\s*$/)
132
+ end
133
+
134
+ it 'connects to testing database in correct order with correct attributes' do
135
+ dummy = described_class.new(production_database)
136
+
137
+ test_db_name = dummy.instance_variable_get(:@test_db_name)
138
+ expect(production_database).to have_received(:exec).with(/^\s*create\s+database\s+#{Regexp.quote(test_db_name)}\s*;?\s*$/).ordered
139
+
140
+ expect(PG).to have_received(:connect).with(
141
+ host: 'some.db.host',
142
+ port: 1234,
143
+ dbname: test_db_name,
144
+ user: 'some_user',
145
+ password: 's0m3_p4ssw0rd'
146
+ ).ordered
147
+ end
148
+ end
149
+ end
150
+
151
+ context 'when some arguments are missing' do
152
+ it 'raises an error when production database is not provided' do
153
+ expect { described_class.new(nil) }.to raise_error(ArgumentError, 'No production database provided')
154
+ end
155
+ end
156
+ end
157
+
158
+ describe '#truncate_all_tables' do
159
+ let(:existing_tables) { ['some_schema.some_table', 'some_other_schema.some_other_table'] }
160
+
161
+ before(:each) do
162
+ allow(testing_database).to receive_message_chain(:exec, :map).and_return(existing_tables)
163
+ @dummy = described_class.new(production_database)
164
+ @dummy.truncate_all_tables()
165
+ end
166
+
167
+ it 'asks the testing database for currently existing tables in production schemas' do
168
+ expect(testing_database).to have_received(:exec).with(/^\s*select\s+schemaname\s+\|\|\s+'\.'\s+\|\|\s+tablename\s+as schema\_and\_table\s+from\s+pg_tables\s+where\s+tableowner\s*=\s*'some_user'\s*;?\s*$/)
169
+ end
170
+
171
+ it 'truncates the returned tables in the testing database' do
172
+ expect(testing_database).to have_received(:exec).with(/^\s*select\s+/).ordered
173
+ expect(testing_database).to have_received(:exec).with(/^\s*truncate\s+table\s+some\_schema\.some\_table\s*;?\s*$/).ordered
174
+ expect(testing_database).to have_received(:exec).with(/^\s*truncate\s+table\s+some\_other\_schema\.some\_other\_table\s*;?\s*$/).ordered
175
+ end
176
+
177
+ it 'does not truncate anything in the production database' do
178
+ expect(production_database).to_not have_received(:exec).with(/truncate/)
179
+ end
180
+ end
181
+
182
+ describe '#exec' do
183
+ let(:some_statement) { 'some statement' }
184
+
185
+ before(:each) do
186
+ @dummy = described_class.new(production_database)
187
+ end
188
+
189
+ it 'executes the passed statement on the testing database' do
190
+ @dummy.exec(some_statement)
191
+ expect(testing_database).to have_received(:exec).with(some_statement)
192
+ end
193
+
194
+ it 'does not execute the passed statement on the production database' do
195
+ @dummy.exec(some_statement)
196
+ expect(production_database).to_not have_received(:exec).with(some_statement)
197
+ end
198
+
199
+ it 'sets an alias for \'query\'' do
200
+ expect(@dummy).to respond_to(:query)
201
+ @dummy.query(some_statement)
202
+ expect(testing_database).to have_received(:exec).with(some_statement)
203
+ end
204
+ end
205
+
206
+ describe '#exec_file' do
207
+ let(:some_file_path) { 'some/file/path' }
208
+ let(:some_file_content) { 'some file content' }
209
+
210
+ before(:each) do
211
+ allow(File).to receive(:read).with(some_file_path).and_return(some_file_content)
212
+ @dummy = described_class.new(production_database)
213
+ end
214
+
215
+ it 'reads the statement from the path provided, relative to root' do
216
+ @dummy.exec_file(some_file_path)
217
+ expect(File).to have_received(:read).with(some_file_path)
218
+ end
219
+
220
+ it 'executes the file content on the testing database' do
221
+ @dummy.exec_file(some_file_path)
222
+ expect(testing_database).to have_received(:exec).with(some_file_content)
223
+ end
224
+
225
+ it 'does not execute file content on the production database' do
226
+ @dummy.exec_file(some_file_path)
227
+ expect(production_database).to_not have_received(:exec).with(some_file_content)
228
+ end
229
+
230
+ it 'sets an alias for \'query_file\'' do
231
+ expect(@dummy).to respond_to(:query_file)
232
+ @dummy.query_file(some_file_path)
233
+ expect(testing_database).to have_received(:exec).with(some_file_content)
234
+ end
235
+ end
236
+
237
+ describe '#insert_mock_values' do
238
+ let(:table) { 'some_schema.some_table' }
239
+ let(:mock) do
240
+ {
241
+ 'some_column' => 'some_value',
242
+ 'some_other_column' => 1234
243
+ }
244
+ end
245
+
246
+ before(:each) do
247
+ @dummy = described_class.new(production_database)
248
+ @dummy.insert_mock_values(table, mock)
249
+ end
250
+
251
+ it 'transforms a given hash to an \'insert\' statement' do
252
+ expect(testing_database).to have_received(:exec).with(/^\s*insert\s+into\s+some\_schema\.some\_table\s+\(some\_column,some\_other\_column\)\s+values\s*\('some\_value',1234\)\s*;?\s*$/)
253
+ end
254
+
255
+ it 'does not try to insert anything into a production table' do
256
+ expect(production_database).to_not have_received(:exec).with(/insert/)
257
+ end
258
+ end
259
+
260
+ describe '#mock' do
261
+ let(:mock) do
262
+ {
263
+ 'some_schema.some_table' => [
264
+ { 'some_column' => 'some_value', 'some_other_column' => 'some_other_value' },
265
+ { 'some_column' => 'another_value', 'some_other_column' => 'yet_another_value' }
266
+ ],
267
+ 'some_other_schema.some_other_table' => [
268
+ { 'another_column' => 'some_value' }
269
+ ]
270
+ }
271
+ end
272
+
273
+ before(:each) do
274
+ @dummy = described_class.new(production_database)
275
+ allow(@dummy).to receive(:insert_mock_values)
276
+ @dummy.mock(mock)
277
+ end
278
+
279
+ it 'inserts the mock values' do
280
+ expect(@dummy).to have_received(:insert_mock_values).with('some_schema.some_table', { 'some_column' => 'some_value', 'some_other_column' => 'some_other_value' }).ordered
281
+ expect(@dummy).to have_received(:insert_mock_values).with('some_schema.some_table', { 'some_column' => 'another_value', 'some_other_column' => 'yet_another_value' }).ordered
282
+ expect(@dummy).to have_received(:insert_mock_values).with('some_other_schema.some_other_table', { 'another_column' => 'some_value'}).ordered
283
+ end
284
+ end
285
+
286
+ describe '#copy_table_defs_from_prod' do
287
+ let(:tables) { [{'some_schema' => 'some_table'}, {'some_other_schema' => 'some_other_table'}] }
288
+
289
+ before(:each) do
290
+ @dummy = described_class.new(production_database)
291
+ allow(@dummy).to receive(:copy_table_def_from_prod)
292
+ @dummy.copy_table_defs_from_prod(tables)
293
+ end
294
+
295
+ it 'triggers copies the individual table definitions from production' do
296
+ expect(@dummy).to have_received(:copy_table_def_from_prod).with('some_schema', 'some_table').ordered
297
+ expect(@dummy).to have_received(:copy_table_def_from_prod).with('some_other_schema', 'some_other_table').ordered
298
+ end
299
+ end
300
+
301
+ describe '#copy_table_def_from_prod' do
302
+ let(:schema) { 'some_schema' }
303
+ let(:table) { 'some_table' }
304
+ let(:some_table_definition) { 'some table definition' }
305
+
306
+ before(:each) do
307
+ @dummy = described_class.new(production_database)
308
+ allow(@dummy).to receive(:_get_create_table_statement).and_return(some_table_definition)
309
+ @dummy.copy_table_def_from_prod(schema, table)
310
+ end
311
+
312
+ it 'retrieves the table definition' do
313
+ expect(@dummy).to have_received(:_get_create_table_statement).with(schema, table)
314
+ end
315
+
316
+ it 'executes the retrieved table definition on the testing database' do
317
+ expect(testing_database).to have_received(:exec).with(some_table_definition)
318
+ end
319
+ end
320
+
321
+ describe '#_get_create_table_statement' do
322
+ let(:schema) { 'some_schema' }
323
+ let(:table) { 'some_table' }
324
+ let(:table_definition) do
325
+ [
326
+ {'schemaname' => 'some_schema', 'tablename' => 'some_table', 'column' => 'some_column', 'type' => 'integer', 'encoding' => 'none', 'distkey' => 't', 'sortkey' => 1, 'notnull' => 't'},
327
+ {'schemaname' => 'some_schema', 'tablename' => 'some_table', 'column' => 'some_other_column', 'type' => 'character varying(255)', 'encoding' => 'none', 'distkey' => 'f', 'sortkey' => 0, 'notnull' => 't'},
328
+ {'schemaname' => 'some_schema', 'tablename' => 'some_table', 'column' => 'yet_another_column', 'type' => 'character(5)', 'encoding' => 'none', 'distkey' => 'f', 'sortkey' => 2, 'notnull' => 't'}
329
+ ]
330
+ end
331
+
332
+ before(:each) do
333
+ @dummy = described_class.new(production_database)
334
+ end
335
+
336
+ context 'in any case' do
337
+ before(:each) do
338
+ allow(production_database).to receive(:query).and_return(table_definition)
339
+ @dummy.send(:_get_create_table_statement, schema, table) rescue nil
340
+ end
341
+
342
+ it 'sets the search path and queries the table definition' do
343
+ expect(production_database).to have_received(:exec).with(/^\s*set search\_path\s+to\s+'\$user',\s*some\_schema\s*;\s*$/).ordered
344
+ expect(production_database).to have_received(:query).with(/^\s*select\s+\*\s+from\s+pg\_table\_def\s+where\s+schemaname\s*=\s*'some\_schema'\s+and\s+tablename\s*=\s*'some\_table'\s*;\s*$/).ordered
345
+ end
346
+ end
347
+
348
+ context 'when there is a table definition' do
349
+ before(:each) do
350
+ allow(production_database).to receive(:query).and_return(table_definition)
351
+ allow(table_definition).to receive(:num_tuples).and_return(1)
352
+ end
353
+
354
+ it 'does not raise an error' do
355
+ expect { @dummy.send(:_get_create_table_statement, schema, table) }.to_not raise_error
356
+ end
357
+
358
+ it 'returns a correctly parsed schema' do
359
+ expect(@dummy.send(:_get_create_table_statement, schema, table)).to match(
360
+ /^\s*create\s+table\s+if\s+not\s+exists\s+some\_schema\.some\_table\s+\(\s*some\_column\s+integer\s+(not|default)\s+null\s*,\s*some\_other\_column\s+character\s+varying\(255\)\s+(not|default)\s+null\s*,\s*yet\_another\_column\s+character\(5\)\s+(not|default)\s+null\)\s+distkey\s*\(\s*some\_column\s*\)\s+sortkey\s*\(\s*some\_column\s*,\s*yet\_another\_column\s*\)\s*;\s*$/
361
+ )
362
+ end
363
+
364
+ it 'returns the parsed schema with all columns allowing null values' do
365
+ expect(@dummy.send(:_get_create_table_statement, schema, table)).to match(
366
+ /^\s*create\s+table\s+if\s+not\s+exists\s+some\_schema\.some\_table\s+\(\s*some\_column\s+integer\s+default\s+null\s*,\s*some\_other\_column\s+character\s+varying\(255\)\s+default\s+null\s*,\s*yet\_another\_column\s+character\(5\)\s+default\s+null\)\s+distkey\s*\(\s*some\_column\s*\)\s+sortkey\s*\(\s*some\_column\s*,\s*yet\_another\_column\s*\)\s*;\s*$/
367
+ )
368
+ end
369
+ end
370
+
371
+ context 'when there is no table definition' do
372
+ before(:each) do
373
+ allow(production_database).to receive(:query).and_return(table_definition)
374
+ allow(table_definition).to receive(:num_tuples).and_return(0)
375
+ end
376
+
377
+ it 'raises an error' do
378
+ expect { @dummy.send(:_get_create_table_statement, schema, table) }.to raise_error(RuntimeError, /^Sorry, there is no table information/)
379
+ end
380
+ end
381
+ end
382
+
383
+ describe '#destroy' do
384
+ before(:each) do
385
+ allow(production_database).to receive(:close)
386
+ allow(testing_database).to receive(:close)
387
+ @dummy = described_class.new(production_database)
388
+ end
389
+
390
+ context 'when the db shall be kept' do
391
+ before(:each) do
392
+ stub_const("#{described_class}::DELETE_DB_WHEN_FINISHED", false)
393
+ @dummy.destroy()
394
+ end
395
+
396
+ it 'closes the connection to the production database' do
397
+ expect(production_database).to have_received(:close)
398
+ end
399
+
400
+ it 'closes the connection to the testing database' do
401
+ expect(testing_database).to have_received(:close)
402
+ end
403
+
404
+ it 'does not drop the testing database' do
405
+ expect(production_database).to_not have_received(:exec).with(/^drop\s+database/)
406
+ end
407
+ end
408
+
409
+ context 'when the db may be deleted' do
410
+ before(:each) do
411
+ stub_const("#{described_class}::DELETE_DB_WHEN_FINISHED", true)
412
+ @dummy.destroy()
413
+ end
414
+
415
+ it 'closes the connection to the production database' do
416
+ expect(production_database).to have_received(:close)
417
+ end
418
+
419
+ it 'closes the connection to the testing database' do
420
+ expect(testing_database).to have_received(:close)
421
+ end
422
+
423
+ it 'does not drop the testing database' do
424
+ expect(production_database).to have_received(:exec).with(/^create\s+database\s+#{Regexp.quote(@dummy.instance_variable_get(:@test_db_name))}/).ordered
425
+ expect(production_database).to have_received(:exec).with(/^drop\s+database\s+#{Regexp.quote(@dummy.instance_variable_get(:@test_db_name))}$/).ordered
426
+ end
427
+ end
428
+ end
429
+ end
430
+ end
metadata ADDED
@@ -0,0 +1,175 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: squcumber-redshift
3
+ version: !ruby/object:Gem::Version
4
+ version: 0.1.2
5
+ platform: ruby
6
+ authors:
7
+ - Stefanie Grunwald
8
+ autorequire:
9
+ bindir: bin
10
+ cert_chain: []
11
+ date: 2016-07-01 00:00:00.000000000 Z
12
+ dependencies:
13
+ - !ruby/object:Gem::Dependency
14
+ name: pg
15
+ requirement: !ruby/object:Gem::Requirement
16
+ requirements:
17
+ - - ! '>='
18
+ - !ruby/object:Gem::Version
19
+ version: 0.18.0
20
+ - - <
21
+ - !ruby/object:Gem::Version
22
+ version: '1.0'
23
+ type: :runtime
24
+ prerelease: false
25
+ version_requirements: !ruby/object:Gem::Requirement
26
+ requirements:
27
+ - - ! '>='
28
+ - !ruby/object:Gem::Version
29
+ version: 0.18.0
30
+ - - <
31
+ - !ruby/object:Gem::Version
32
+ version: '1.0'
33
+ - !ruby/object:Gem::Dependency
34
+ name: cucumber
35
+ requirement: !ruby/object:Gem::Requirement
36
+ requirements:
37
+ - - ! '>='
38
+ - !ruby/object:Gem::Version
39
+ version: 2.4.0
40
+ - - <
41
+ - !ruby/object:Gem::Version
42
+ version: '3.0'
43
+ type: :runtime
44
+ prerelease: false
45
+ version_requirements: !ruby/object:Gem::Requirement
46
+ requirements:
47
+ - - ! '>='
48
+ - !ruby/object:Gem::Version
49
+ version: 2.4.0
50
+ - - <
51
+ - !ruby/object:Gem::Version
52
+ version: '3.0'
53
+ - !ruby/object:Gem::Dependency
54
+ name: rake
55
+ requirement: !ruby/object:Gem::Requirement
56
+ requirements:
57
+ - - ! '>='
58
+ - !ruby/object:Gem::Version
59
+ version: 11.2.2
60
+ - - <
61
+ - !ruby/object:Gem::Version
62
+ version: '12.0'
63
+ type: :runtime
64
+ prerelease: false
65
+ version_requirements: !ruby/object:Gem::Requirement
66
+ requirements:
67
+ - - ! '>='
68
+ - !ruby/object:Gem::Version
69
+ version: 11.2.2
70
+ - - <
71
+ - !ruby/object:Gem::Version
72
+ version: '12.0'
73
+ - !ruby/object:Gem::Dependency
74
+ name: rspec
75
+ requirement: !ruby/object:Gem::Requirement
76
+ requirements:
77
+ - - ! '>='
78
+ - !ruby/object:Gem::Version
79
+ version: '3.1'
80
+ - - <
81
+ - !ruby/object:Gem::Version
82
+ version: '4.0'
83
+ type: :development
84
+ prerelease: false
85
+ version_requirements: !ruby/object:Gem::Requirement
86
+ requirements:
87
+ - - ! '>='
88
+ - !ruby/object:Gem::Version
89
+ version: '3.1'
90
+ - - <
91
+ - !ruby/object:Gem::Version
92
+ version: '4.0'
93
+ - !ruby/object:Gem::Dependency
94
+ name: rspec-collection_matchers
95
+ requirement: !ruby/object:Gem::Requirement
96
+ requirements:
97
+ - - ! '>='
98
+ - !ruby/object:Gem::Version
99
+ version: 1.1.2
100
+ - - <
101
+ - !ruby/object:Gem::Version
102
+ version: '2.0'
103
+ type: :development
104
+ prerelease: false
105
+ version_requirements: !ruby/object:Gem::Requirement
106
+ requirements:
107
+ - - ! '>='
108
+ - !ruby/object:Gem::Version
109
+ version: 1.1.2
110
+ - - <
111
+ - !ruby/object:Gem::Version
112
+ version: '2.0'
113
+ - !ruby/object:Gem::Dependency
114
+ name: codeclimate-test-reporter
115
+ requirement: !ruby/object:Gem::Requirement
116
+ requirements:
117
+ - - ! '>='
118
+ - !ruby/object:Gem::Version
119
+ version: 0.4.3
120
+ - - <
121
+ - !ruby/object:Gem::Version
122
+ version: '1.0'
123
+ type: :development
124
+ prerelease: false
125
+ version_requirements: !ruby/object:Gem::Requirement
126
+ requirements:
127
+ - - ! '>='
128
+ - !ruby/object:Gem::Version
129
+ version: 0.4.3
130
+ - - <
131
+ - !ruby/object:Gem::Version
132
+ version: '1.0'
133
+ description:
134
+ email: steffi@physics.org
135
+ executables: []
136
+ extensions: []
137
+ extra_rdoc_files: []
138
+ files:
139
+ - Rakefile
140
+ - lib/squcumber-redshift.rb
141
+ - lib/squcumber-redshift/mock/database.rb
142
+ - lib/squcumber-redshift/rake/task.rb
143
+ - lib/squcumber-redshift/step_definitions/common_steps.rb
144
+ - lib/squcumber-redshift/support/database.rb
145
+ - lib/squcumber-redshift/support/matchers.rb
146
+ - lib/squcumber-redshift/support/output.rb
147
+ - spec/spec_helper.rb
148
+ - spec/squcumber-redshift/mock/database_spec.rb
149
+ homepage: https://github.com/moertel/sQucumber-redshift
150
+ licenses:
151
+ - MIT
152
+ metadata: {}
153
+ post_install_message:
154
+ rdoc_options: []
155
+ require_paths:
156
+ - lib
157
+ required_ruby_version: !ruby/object:Gem::Requirement
158
+ requirements:
159
+ - - ! '>='
160
+ - !ruby/object:Gem::Version
161
+ version: '2.0'
162
+ required_rubygems_version: !ruby/object:Gem::Requirement
163
+ requirements:
164
+ - - ! '>='
165
+ - !ruby/object:Gem::Version
166
+ version: '0'
167
+ requirements: []
168
+ rubyforge_project:
169
+ rubygems_version: 2.4.5
170
+ signing_key:
171
+ specification_version: 3
172
+ summary: Define and execute SQL integration tests for AWS Redshift
173
+ test_files:
174
+ - spec/spec_helper.rb
175
+ - spec/squcumber-redshift/mock/database_spec.rb