db-factory 0.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/History.txt +8 -0
- data/VERSION +1 -0
- data/db-factory.gemspec +12 -0
- data/examples/simple_example_1/Readme.txt +23 -0
- data/examples/simple_example_1/create_objects.sql +35 -0
- data/examples/simple_example_1/simple_example_1.yml +63 -0
- data/examples/simple_example_1/simple_example_1_spec.rb +21 -0
- data/examples/simple_example_1/spec_helper.rb +52 -0
- data/lib/config.yml +15 -0
- data/lib/db-factory.rb +5 -0
- data/lib/db_factory/db_factory.rb +532 -0
- data/lib/db_factory/helpers.rb +32 -0
- data/lib/db_factory/version.rb +3 -0
- metadata +75 -0
data/History.txt
ADDED
data/VERSION
ADDED
@@ -0,0 +1 @@
|
|
1
|
+
0.1.1
|
data/db-factory.gemspec
ADDED
@@ -0,0 +1,12 @@
|
|
1
|
+
Gem::Specification.new do |s|
|
2
|
+
s.name = 'db-factory'
|
3
|
+
s.version = '0.1.1'
|
4
|
+
s.date = '2013-04-16'
|
5
|
+
s.summary = "Prepare and compare test data for PL/SQL unit testing"
|
6
|
+
s.description = "db-factory uses definition files with YAML/ERB syntax to define test data (preconditions) for unit PL/SQL testing and can compare actual data with defined expected data."
|
7
|
+
s.authors = ["Stefan Cibiri"]
|
8
|
+
s.email = 'stefancibiri@yahoo.com'
|
9
|
+
s.files = ["History.txt", "VERSION", "db-factory.gemspec", "examples/simple_example_1/Readme.txt", "examples/simple_example_1/create_objects.sql", "examples/simple_example_1/simple_example_1.yml", "examples/simple_example_1/simple_example_1_spec.rb", "examples/simple_example_1/spec_helper.rb", "lib/config.yml", "lib/db-factory.rb", "lib/db_factory/db_factory.rb", "lib/db_factory/helpers.rb", "lib/db_factory/version.rb"]
|
10
|
+
s.homepage = "https://github.com/stci/db-factory"
|
11
|
+
s.add_dependency('ruby-plsql')
|
12
|
+
end
|
@@ -0,0 +1,23 @@
|
|
1
|
+
Simple Example 1 Readme
|
2
|
+
-----------------------
|
3
|
+
|
4
|
+
1) run "create_objects.sql" under e.g. SCOTT schema (which normaly should be schema where your application and data is)
|
5
|
+
|
6
|
+
2) create user TEST_USER with there privileges
|
7
|
+
grant connect to TEST_USER;
|
8
|
+
grant delete_catalog_role to TEST_USER;
|
9
|
+
grant execute_catalog_role to TEST_USER;
|
10
|
+
grant select_catalog_role to TEST_USER;
|
11
|
+
grant alter any table to TEST_USER;
|
12
|
+
grant create role to TEST_USER;
|
13
|
+
grant delete any table to TEST_USER;
|
14
|
+
grant execute any procedure to TEST_USER;
|
15
|
+
grant flashback any table to TEST_USER;
|
16
|
+
grant insert any table to TEST_USER;
|
17
|
+
grant select any dictionary to TEST_USER;
|
18
|
+
grant select any sequence to TEST_USER;
|
19
|
+
grant select any table to TEST_USER;
|
20
|
+
grant update any table to TEST_USER;
|
21
|
+
|
22
|
+
3) execute in command line:
|
23
|
+
rspec simple_example_1_spec.rb
|
@@ -0,0 +1,35 @@
|
|
1
|
+
create table DEPARTEMENTS
|
2
|
+
(
|
3
|
+
departement_id NUMBER,
|
4
|
+
departement_name VARCHAR2(30),
|
5
|
+
coeficient number(5,2)
|
6
|
+
);
|
7
|
+
|
8
|
+
create table EMPLOYEES
|
9
|
+
(
|
10
|
+
employee_id NUMBER,
|
11
|
+
departement_id NUMBER,
|
12
|
+
first_name VARCHAR2(30),
|
13
|
+
last_name VARCHAR2(30),
|
14
|
+
hire_date DATE,
|
15
|
+
phone_number VARCHAR2(30),
|
16
|
+
salary NUMBER(12,2)
|
17
|
+
);
|
18
|
+
|
19
|
+
create or replace function salary_update(dep_id_from number,
|
20
|
+
dep_id_to number) return number is
|
21
|
+
begin
|
22
|
+
update employees e
|
23
|
+
set e.salary = e.salary *
|
24
|
+
(select coeficient
|
25
|
+
from departements d
|
26
|
+
where d.departement_id = e.departement_id)
|
27
|
+
where e.departement_id between dep_id_from and dep_id_to;
|
28
|
+
|
29
|
+
return 0;
|
30
|
+
|
31
|
+
exception
|
32
|
+
when others then
|
33
|
+
dbms_output.put_line(sqlerrm);
|
34
|
+
return 1;
|
35
|
+
end;
|
@@ -0,0 +1,63 @@
|
|
1
|
+
--- %FORMAT: 1.0
|
2
|
+
|
3
|
+
CONSTANTS:
|
4
|
+
- HIRE_DATE : &HD
|
5
|
+
<%= Date.today %>
|
6
|
+
|
7
|
+
DEFAULTS:
|
8
|
+
tables:
|
9
|
+
|
10
|
+
- "scott.employees": {
|
11
|
+
:hire_date : *HD,
|
12
|
+
:last_name : 'SMITH'
|
13
|
+
}
|
14
|
+
|
15
|
+
COMMON:
|
16
|
+
setup:
|
17
|
+
tables:
|
18
|
+
|
19
|
+
- "scott.departements":
|
20
|
+
delete:
|
21
|
+
condition: "1=1"
|
22
|
+
columns:
|
23
|
+
[:departement_id, :departement_name, :coeficient]
|
24
|
+
data:
|
25
|
+
- [1 , 'IT' , 1.15 ]
|
26
|
+
- [2 , 'Administration' , 1.10 ]
|
27
|
+
- [3 , 'Sales' , 0.95 ]
|
28
|
+
- [4 , 'Other' , ~ ]
|
29
|
+
|
30
|
+
SALARY:
|
31
|
+
setup:
|
32
|
+
tables:
|
33
|
+
|
34
|
+
- "scott.employees":
|
35
|
+
delete:
|
36
|
+
condition: "last_name = 'SMITH'"
|
37
|
+
columns:
|
38
|
+
[:employee_id, :departement_id, :first_name, :salary]
|
39
|
+
data:
|
40
|
+
- [1 , 1 , 'STEVE' , 100.00 ]
|
41
|
+
- [2 , 1 , 'BOB' , 120.00 ]
|
42
|
+
- [3 , 2 , 'COLIN' , 180.30 ]
|
43
|
+
- [4 , 3 , 'PHIL' , 280.00 ]
|
44
|
+
- [5 , 4 , 'JHON' , 88.00 ]
|
45
|
+
|
46
|
+
postconditions:
|
47
|
+
tables:
|
48
|
+
|
49
|
+
- "scott.employees":
|
50
|
+
filter: "last_name = 'SMITH' and departement_id <> 4"
|
51
|
+
connect keys: [:employee_id]
|
52
|
+
columns:
|
53
|
+
[:employee_id, :salary]
|
54
|
+
stage-1:
|
55
|
+
- [1 , 115.00 ]
|
56
|
+
- [2 , 138.00 ]
|
57
|
+
- [3 , 198.33 ]
|
58
|
+
- [4 , 280.00 ]
|
59
|
+
expected data:
|
60
|
+
- [1 , 115.00 ]
|
61
|
+
- [2 , 138.00 ]
|
62
|
+
- [3 , 198.33 ]
|
63
|
+
- [4 , 266.00 ]
|
@@ -0,0 +1,21 @@
|
|
1
|
+
require File.expand_path(File.dirname(__FILE__) + '/spec_helper')
|
2
|
+
|
3
|
+
describe "Simple example 1" do
|
4
|
+
|
5
|
+
DBFactory.load('simple_example_1.yml')
|
6
|
+
|
7
|
+
before(:all) do
|
8
|
+
DBFactory.setup('SALARY')
|
9
|
+
end
|
10
|
+
|
11
|
+
it "should update salary of employees according coeficient defined on department" do
|
12
|
+
|
13
|
+
plsql.scott.salary_update(1, 2).should == 0
|
14
|
+
DBFactory.evaluate('stage-1').should == true
|
15
|
+
|
16
|
+
plsql.scott.salary_update(3, 3).should == 0
|
17
|
+
DBFactory.evaluate().should == true
|
18
|
+
|
19
|
+
end
|
20
|
+
|
21
|
+
end
|
@@ -0,0 +1,52 @@
|
|
1
|
+
require "rspec"
|
2
|
+
require "ruby-plsql"
|
3
|
+
require "db-factory"
|
4
|
+
|
5
|
+
$LOAD_PATH << File.dirname(__FILE__) + '/factories'
|
6
|
+
|
7
|
+
# Establish connection to database where tests will be performed.
|
8
|
+
# Change according to your needs.
|
9
|
+
DATABASE_USER = "TEST_USER"
|
10
|
+
DATABASE_PASSWORD = "TEST_USER"
|
11
|
+
DATABASE_NAME = "DEV_DB" # TNS
|
12
|
+
|
13
|
+
plsql.connect! DATABASE_USER, DATABASE_PASSWORD, DATABASE_NAME
|
14
|
+
|
15
|
+
# Set autocommit to false so that automatic commits after each statement are _not_ performed
|
16
|
+
plsql.connection.autocommit = false
|
17
|
+
# reduce network traffic in case of large resultsets
|
18
|
+
plsql.connection.prefetch_rows = 100
|
19
|
+
# uncomment to log DBMS_OUTPUT to standard output
|
20
|
+
# plsql.dbms_output_stream = STDOUT
|
21
|
+
|
22
|
+
# Do logoff when exiting to ensure that session temporary tables
|
23
|
+
# (used when calling procedures with table types defined in packages)
|
24
|
+
at_exit do
|
25
|
+
plsql.logoff
|
26
|
+
end
|
27
|
+
|
28
|
+
RSpec.configure do |config|
|
29
|
+
config.before(:each) do |test|
|
30
|
+
plsql.savepoint "before_each"
|
31
|
+
|
32
|
+
plsql.sys.dbms_application_info.set_module(:module_name => test_filename, :action_name => test.example.metadata[:example_group][:full_description]);
|
33
|
+
end
|
34
|
+
config.after(:each) do
|
35
|
+
# Always perform rollback to savepoint after each test
|
36
|
+
plsql.rollback_to "before_each"
|
37
|
+
end
|
38
|
+
config.after(:all) do
|
39
|
+
# Always perform rollback after each describe block
|
40
|
+
plsql.rollback
|
41
|
+
end
|
42
|
+
end
|
43
|
+
|
44
|
+
# require all helper methods which are located in any helpers subdirectories
|
45
|
+
Dir[File.dirname(__FILE__) + '/**/helpers/*.rb'].each {|f| require f}
|
46
|
+
|
47
|
+
# require all factory modules which are located in any factories subdirectories
|
48
|
+
Dir[File.dirname(__FILE__) + '/**/factories/*.rb'].each {|f| require f}
|
49
|
+
|
50
|
+
# Add source directory to load path where PL/SQL example procedures are defined.
|
51
|
+
# It is not required if PL/SQL procedures are already loaded in test database in some other way.
|
52
|
+
$:.push File.dirname(__FILE__) + '/../../source'
|
data/lib/config.yml
ADDED
@@ -0,0 +1,15 @@
|
|
1
|
+
# configuration file
|
2
|
+
|
3
|
+
GENERAL:
|
4
|
+
supported format:
|
5
|
+
- 1.0
|
6
|
+
validation schema: validator.yml
|
7
|
+
|
8
|
+
LOG:
|
9
|
+
path : .
|
10
|
+
filename : <%= "DBFactory.#{Date.today().to_s()}.log" %>
|
11
|
+
date format: "%Y-%m-%d %H:%M:%S"
|
12
|
+
level : <%= Logger::DEBUG %>
|
13
|
+
|
14
|
+
DIFF:
|
15
|
+
filename_tpl: "diff-#CASE#.yml"
|
data/lib/db-factory.rb
ADDED
@@ -0,0 +1,532 @@
|
|
1
|
+
require "logger"
|
2
|
+
require "bigdecimal/util"
|
3
|
+
require "yaml"
|
4
|
+
require "erb"
|
5
|
+
|
6
|
+
class DBFactoryClass
|
7
|
+
|
8
|
+
@cfg
|
9
|
+
@log
|
10
|
+
@definition
|
11
|
+
@case
|
12
|
+
@scn
|
13
|
+
|
14
|
+
def initialize()
|
15
|
+
l_erb = ERB.new(File.read("#{ROOT}/config.yml"))
|
16
|
+
@cfg = YAML.load(l_erb.result(binding).to_s)
|
17
|
+
|
18
|
+
@log = Logger.new("#{@cfg['LOG']['path']}/#{@cfg['LOG']['filename']}")
|
19
|
+
@log.info "================ Initializing DBFactory ================"
|
20
|
+
@log.debug "[ DBFactory::initialize(...) ]"
|
21
|
+
|
22
|
+
@log.datetime_format = @cfg['LOG']['date format']
|
23
|
+
@log.level = @cfg['LOG']['level']
|
24
|
+
|
25
|
+
return self
|
26
|
+
end
|
27
|
+
|
28
|
+
def load(definition_file)
|
29
|
+
@log.debug "[ DBFactory::load(...) ]"
|
30
|
+
@log.debug " > definition_file = <#{definition_file}>"
|
31
|
+
|
32
|
+
begin
|
33
|
+
@log.debug "Reading constants ..."
|
34
|
+
constants = YAML.load_file(definition_file)['CONSTANTS'].reduce(Hash.new, :merge)
|
35
|
+
rescue Exception => e
|
36
|
+
@log.error "[ DBFactory::load(...) ] ERROR: <#{e.to_s}>"
|
37
|
+
@log.error "Error occurred during load of definition file <#{definition_file}>"
|
38
|
+
raise "Check defintion file <#{definition_file}>: #{e.to_s}"
|
39
|
+
end
|
40
|
+
|
41
|
+
@log.debug "Injecting constants ..."
|
42
|
+
l_erb = ERB.new(File.read(definition_file).strip.gsub(/^\s*#.*/,'').gsub(/\n^\s*$/,''))
|
43
|
+
|
44
|
+
@log.debug "Parsing defintion file ..."
|
45
|
+
@definition = YAML.load(l_erb.result(binding).to_s)
|
46
|
+
@case = 'COMMON'
|
47
|
+
|
48
|
+
# puts @cfg['GENERAL']['supported_file_format_versions'].to_s
|
49
|
+
# puts @definition['FORMAT VERSION']
|
50
|
+
# puts @definition.inspect
|
51
|
+
raise "Unsupported version of YAML file. Supported file versions: #{@cfg['GENERAL']['supported format'].to_s}" unless @cfg['GENERAL']['supported
|
52
|
+
format'].include?(@definition['%FORMAT'])
|
53
|
+
end
|
54
|
+
|
55
|
+
def setup(casename = 'COMMON')
|
56
|
+
@log.debug "[ DBFactory::setup(...) ]"
|
57
|
+
@log.debug " > casename = <#{casename}>"
|
58
|
+
|
59
|
+
if casename != 'COMMON'
|
60
|
+
setup()
|
61
|
+
else
|
62
|
+
@scn = plsql.select("select current_scn from v$database")[0][:current_scn]
|
63
|
+
@log.info("CURRENT SCN from DB = <#{@scn}>")
|
64
|
+
end
|
65
|
+
|
66
|
+
@case = casename
|
67
|
+
|
68
|
+
if @definition[@case].include?('cleanup')
|
69
|
+
@definition[@case]['cleanup']['tables'].each { |t|
|
70
|
+
cleanup_table(t)
|
71
|
+
}
|
72
|
+
end
|
73
|
+
|
74
|
+
# puts @definition[@case] unless casename == 'COMMON'
|
75
|
+
@definition[@case]['setup']['tables'].each { |t|
|
76
|
+
prepare_table(t)
|
77
|
+
}
|
78
|
+
|
79
|
+
end
|
80
|
+
|
81
|
+
def check_postconditions(stage = nil)
|
82
|
+
@log.debug "[ DBFactory::check_postconditions(...) ]"
|
83
|
+
l_result = true
|
84
|
+
raise "Use stage name (must contain string 'stage-[stage name]'" unless stage.nil? || stage.include?('stage')
|
85
|
+
# l_stage = "stage-#{stage}" unless stage == nil
|
86
|
+
l_diff_file = @cfg['DIFF']['filename_tpl'].gsub('#CASE#',@case)
|
87
|
+
File.delete(l_diff_file) unless not File.exist?(l_diff_file)
|
88
|
+
@definition[@case]['postconditions']['tables'].each { |t|
|
89
|
+
if stage.nil? || t.values[0].include?(stage)
|
90
|
+
l_result_tmp = check_postconditions_for_table(t, stage)
|
91
|
+
l_result = l_result_tmp unless l_result == false
|
92
|
+
end
|
93
|
+
}
|
94
|
+
|
95
|
+
if l_result
|
96
|
+
@log.info("> DBFactory::evaluate(...) ... PASS")
|
97
|
+
else
|
98
|
+
@log.warn("> DBFactory::evaluate(...) ... FAIL")
|
99
|
+
end
|
100
|
+
|
101
|
+
return l_result
|
102
|
+
end
|
103
|
+
|
104
|
+
def flashback()
|
105
|
+
@log.debug "[ DBFactory::flashback(...) ]"
|
106
|
+
l_result = true
|
107
|
+
@definition[@case]['flashback']['tables'].each { |t|
|
108
|
+
table_owner = t.split('.')[0].upcase
|
109
|
+
table_name = t.split('.')[1].upcase
|
110
|
+
@log.debug " > table_owner = <#{table_owner}>"
|
111
|
+
@log.debug " > table_name = <#{table_name}>"
|
112
|
+
@log.debug " > scn = <#{@scn}>"
|
113
|
+
l_result = plsql.flashback_table(table_owner, table_name, @scn) unless l_result == false
|
114
|
+
@log.debug "...DONE"
|
115
|
+
}
|
116
|
+
@scn = nil
|
117
|
+
|
118
|
+
return l_result
|
119
|
+
end
|
120
|
+
|
121
|
+
private
|
122
|
+
|
123
|
+
def execute_sql(sql_command)
|
124
|
+
@log.debug "[ DBFactory::sql_command(...) ]"
|
125
|
+
@log.debug " > sql_command = <#{sql_command}>"
|
126
|
+
|
127
|
+
begin
|
128
|
+
eval("plsql.execute('#{sql_command}')")
|
129
|
+
rescue Exception => e
|
130
|
+
@log.error "[ DBFactory::execute_sql(...) ] ERROR: <#{e.to_s}>"
|
131
|
+
@log.error "Error occurred during execution of SQL command <#{sql_command}>"
|
132
|
+
raise
|
133
|
+
end
|
134
|
+
end
|
135
|
+
|
136
|
+
def cleanup_table(table)
|
137
|
+
@log.debug "[ DBFactory::cleanup_table(...) ]"
|
138
|
+
|
139
|
+
begin
|
140
|
+
# l_delete = @definition[@case]['cleanup']['tables'][tablename]['delete']['condition']
|
141
|
+
l_delete = table.values[0]['delete']['condition']
|
142
|
+
@log.debug "Using delete condition: <#{l_delete}>"
|
143
|
+
l_object_id = plsql.find_table(table.keys[0])
|
144
|
+
l_object_id.delete("WHERE #{l_delete}")
|
145
|
+
rescue Exception => e
|
146
|
+
@log.error "[ DBFactory::cleanup(...) ] ERROR: <#{e.to_s}>"
|
147
|
+
# @log.warn "Error occured during cleanup procedure on table <#{tablename}> failed in <#{@case}> block"
|
148
|
+
# raise "Error occured during cleanup procedutre on table <#{tablename}> failed in <#{@case}> block"
|
149
|
+
raise "Error occured during cleanup procedutre"
|
150
|
+
end
|
151
|
+
end
|
152
|
+
|
153
|
+
def delete(table)
|
154
|
+
@log.debug "[ DBFactory::delete(...) ]"
|
155
|
+
|
156
|
+
begin
|
157
|
+
if table.values[0].include?('delete')
|
158
|
+
l_delete = table.values[0]['delete']['condition']
|
159
|
+
else
|
160
|
+
l_delete = '1=0'
|
161
|
+
end
|
162
|
+
l_object_id = plsql.find_table(table.keys[0])
|
163
|
+
l_object_id.delete("WHERE #{l_delete}") unless l_delete.nil?
|
164
|
+
rescue Exception => e
|
165
|
+
@log.error " >> ERROR: <#{e.to_s}>"
|
166
|
+
raise "Failed on delete statement."
|
167
|
+
end
|
168
|
+
end
|
169
|
+
|
170
|
+
def insert(table)
|
171
|
+
@log.debug "[ DBFactory::insert(...) ]"
|
172
|
+
|
173
|
+
begin
|
174
|
+
tablename = table.keys[0]
|
175
|
+
l_data = data(table)
|
176
|
+
l_object_id = plsql.find_table(tablename)
|
177
|
+
l_object_id.insert(l_data)
|
178
|
+
@log.info("#{l_data.size} records were inserted into <#{tablename}> table")
|
179
|
+
rescue Exception => e
|
180
|
+
raise "[ DBFactory::insert ]: Failed insert data. #{e.to_s}"
|
181
|
+
end
|
182
|
+
end
|
183
|
+
|
184
|
+
def sql_statement(table)
|
185
|
+
@log.debug "[ DBFactory::sql_statement(...) ]"
|
186
|
+
|
187
|
+
begin
|
188
|
+
l_statement = table.values[0]['sql statement']
|
189
|
+
execute_sql(l_statement) unless l_statement.nil?
|
190
|
+
rescue Exception => e
|
191
|
+
@log.error " >> ERROR: <#{e.to_s}>"
|
192
|
+
raise "Failed on SQL statement"
|
193
|
+
end
|
194
|
+
end
|
195
|
+
|
196
|
+
def other_column_data(table)
|
197
|
+
@log.debug "[ DBFactory::other_column_data(...) ]"
|
198
|
+
|
199
|
+
tablename = table.keys[0]
|
200
|
+
begin
|
201
|
+
l_object_id = plsql.find_table(tablename)
|
202
|
+
l_other_column_data = l_object_id.first || {}
|
203
|
+
rescue Exception => e
|
204
|
+
raise "[ DBFactory::other_column_data ]: #{e.to_s}"
|
205
|
+
end
|
206
|
+
|
207
|
+
return l_other_column_data
|
208
|
+
end
|
209
|
+
|
210
|
+
def defaults(table)
|
211
|
+
@log.debug "[ DBFactory::defaults(...) ]"
|
212
|
+
|
213
|
+
l_other_column_data = other_column_data(table)
|
214
|
+
|
215
|
+
tablename = table.keys[0]
|
216
|
+
l_defaults1 = {}
|
217
|
+
begin
|
218
|
+
@definition['DEFAULTS']['tables'].each { |t|
|
219
|
+
if t.keys[0] == table.keys[0]
|
220
|
+
l_defaults1 = t.values[0] || {}
|
221
|
+
break
|
222
|
+
end
|
223
|
+
}
|
224
|
+
# if l_defaults1 != nil
|
225
|
+
# @log.debug "Loading of defaults for table <#{tablename}> was succesfull in <DEFAULTS> block"
|
226
|
+
# else
|
227
|
+
# l_defaults1 = {}
|
228
|
+
# end
|
229
|
+
rescue Exception => e
|
230
|
+
@log.error "Error: <#{e.to_s}>"
|
231
|
+
@log.warn "Loading of defaults for table <#{tablename}> failed in <DEFAULTS> block"
|
232
|
+
end
|
233
|
+
|
234
|
+
l_defaults2 = {}
|
235
|
+
begin
|
236
|
+
l_defaults2 = table.values[0]['defaults'] || {}
|
237
|
+
# if l_defaults2 != nil
|
238
|
+
# @log.debug "Loading of defaults for table <#{tablename}> was succesfull in <#{@case}> block"
|
239
|
+
# else
|
240
|
+
# l_defaults2 = {}
|
241
|
+
# end
|
242
|
+
rescue Exception => e
|
243
|
+
@log.error "[ DBFactory::defaults(...) ] ERROR: <#{e.to_s}>"
|
244
|
+
@log.warn "Loading of defaults for table <#{tablename}> failed in <#{@case}> block"
|
245
|
+
end
|
246
|
+
|
247
|
+
return(l_other_column_data.merge(l_defaults1).merge(l_defaults2))
|
248
|
+
end
|
249
|
+
|
250
|
+
def data(table)
|
251
|
+
@log.debug "[ DBFactory::data(...) ]"
|
252
|
+
|
253
|
+
table_def = table[table.keys[0]]
|
254
|
+
begin
|
255
|
+
l_data = []
|
256
|
+
l_defaults = defaults(table)
|
257
|
+
l_data_tmp = table_def['data']
|
258
|
+
|
259
|
+
case
|
260
|
+
when l_data_tmp.nil?
|
261
|
+
l_data = []
|
262
|
+
when l_data_tmp[0].instance_of?(Hash)
|
263
|
+
l_data_tmp.collect! { |values|
|
264
|
+
values = l_defaults.merge(values)
|
265
|
+
}
|
266
|
+
l_data = l_data_tmp
|
267
|
+
when l_data_tmp[0].instance_of?(Array)
|
268
|
+
l_columns = table_def['columns']
|
269
|
+
l_data_tmp.each_index { |i|
|
270
|
+
l_data[i] = {}
|
271
|
+
l_data_tmp[i].each_index { |k|
|
272
|
+
l_data[i] = l_data[i].merge({l_columns[k] => l_data_tmp[i][k]})
|
273
|
+
}
|
274
|
+
}
|
275
|
+
l_data.collect! { |values|
|
276
|
+
values = l_defaults.merge(values)
|
277
|
+
}
|
278
|
+
end
|
279
|
+
rescue Exception => e
|
280
|
+
raise "[ DBFactory::data ]: Failed loading of data. #{e.to_s}"
|
281
|
+
end
|
282
|
+
|
283
|
+
# convert FLOAT - otherwise OCI error will be rasied
|
284
|
+
l_data.each_index {|i|
|
285
|
+
l_data[i].each { |k,v|
|
286
|
+
l_data[i][k] = v.instance_of?(Float) ? v.to_d : v
|
287
|
+
}
|
288
|
+
}
|
289
|
+
return l_data
|
290
|
+
end
|
291
|
+
|
292
|
+
def prepare_table(table)
|
293
|
+
@log.debug "[ DBFactory::prepare_table(...) ]"
|
294
|
+
@log.debug ">> table: #{table.keys[0]}"
|
295
|
+
|
296
|
+
begin
|
297
|
+
tablename = table.keys[0]
|
298
|
+
l_object_id = plsql.find_table(tablename)
|
299
|
+
rescue Exception => e
|
300
|
+
raise "Failed to find table <#{tablename}> in DB"
|
301
|
+
end
|
302
|
+
|
303
|
+
# delete data according to delete condition in file
|
304
|
+
delete(table)
|
305
|
+
|
306
|
+
# execute SQL statement
|
307
|
+
sql_statement(table)
|
308
|
+
|
309
|
+
# insert defined data
|
310
|
+
insert(table)
|
311
|
+
|
312
|
+
end
|
313
|
+
|
314
|
+
def expected_columns(table)
|
315
|
+
l_data_tmp = table.values[0]['expected data']
|
316
|
+
case
|
317
|
+
when l_data_tmp[0].instance_of?(Hash)
|
318
|
+
l_expected_columns = l_data_tmp[0].keys
|
319
|
+
when l_data_tmp[0].instance_of?(Array)
|
320
|
+
l_expected_columns = table.values[0]['columns']
|
321
|
+
end
|
322
|
+
l_expected_columns
|
323
|
+
end
|
324
|
+
|
325
|
+
def expected_data(table, stage = nil)
|
326
|
+
@log.debug "[ DBFactory::expected_data(...) ]"
|
327
|
+
@log.debug " > stage = <#{stage}>"
|
328
|
+
|
329
|
+
stage ||= 'expected data'
|
330
|
+
|
331
|
+
l_data = []
|
332
|
+
begin
|
333
|
+
l_data_tmp = table.values[0][stage]
|
334
|
+
case
|
335
|
+
when l_data_tmp[0].instance_of?(Hash)
|
336
|
+
l_data = l_data_tmp
|
337
|
+
when l_data_tmp[0].instance_of?(Array)
|
338
|
+
l_columns = table.values[0]['columns']
|
339
|
+
# l_columns = expected_columns(tablename)
|
340
|
+
l_data_tmp.each_index { |i|
|
341
|
+
l_data[i] = {}
|
342
|
+
l_data_tmp[i].each_index { |k|
|
343
|
+
l_data[i] = l_data[i].merge({l_columns[k] => l_data_tmp[i][k]})
|
344
|
+
}
|
345
|
+
}
|
346
|
+
end
|
347
|
+
rescue Exception => e
|
348
|
+
@log.error "[ DBFactory::expected_data(...) ] ERROR: <#{e.to_s}>"
|
349
|
+
raise "Failed to get expected data."
|
350
|
+
end
|
351
|
+
|
352
|
+
# @log.info("There are currently #{l_data.size} records expected in <#{tablename}> table")
|
353
|
+
return l_data
|
354
|
+
end
|
355
|
+
|
356
|
+
def actual_data(table)
|
357
|
+
@log.debug "[ DBFactory::actual_data(...) ]"
|
358
|
+
|
359
|
+
l_object_id = plsql.find_table(table.keys[0])
|
360
|
+
begin
|
361
|
+
l_filter = table.values[0]['filter'] || '1=1'
|
362
|
+
@log.debug("filter: <#{l_filter}>")
|
363
|
+
l_data = l_object_id.all("WHERE #{l_filter}")
|
364
|
+
l_columns = expected_columns(table)
|
365
|
+
@log.debug "columns: <#{l_columns}>"
|
366
|
+
l_data.each_index { |i|
|
367
|
+
l_data[i].delete_if { |k,v| not l_columns.include?(k) }
|
368
|
+
}
|
369
|
+
l_data.each_index { |i|
|
370
|
+
l_data[i].each { |k,v|
|
371
|
+
if v.instance_of?(String)
|
372
|
+
l_data[i][k] = v.rstrip
|
373
|
+
end
|
374
|
+
if v.instance_of?(BigDecimal)
|
375
|
+
l_data[i][k] = v.to_f
|
376
|
+
end
|
377
|
+
}
|
378
|
+
}
|
379
|
+
rescue Exception => e
|
380
|
+
@log.error "[ DBFactory::actual_data(...) ] ERROR: <#{e.to_s}>"
|
381
|
+
# @log.warn "Loading actual data for table <#{tablename}> failed in <#{@case}> block"
|
382
|
+
raise "Failed to load actual data from DB." #or table <#{tablename}> failed in <#{@case}> block"
|
383
|
+
end
|
384
|
+
|
385
|
+
# @log.info("There are currently #{l_data.size} records in <#{tablename}> table")
|
386
|
+
return l_data
|
387
|
+
end
|
388
|
+
|
389
|
+
def pk_data_split(data, pk)
|
390
|
+
l_split_data = []
|
391
|
+
data.each { |values|
|
392
|
+
l_pk = {}
|
393
|
+
pk.each { |k|
|
394
|
+
l_pk[k] = values[k]
|
395
|
+
values.delete(k)
|
396
|
+
}
|
397
|
+
l_split_data.push({"PK" => l_pk, "DATA" => values})
|
398
|
+
}
|
399
|
+
# puts l_split_data.inspect
|
400
|
+
return l_split_data
|
401
|
+
end
|
402
|
+
|
403
|
+
def get_all_pk(data1, data2)
|
404
|
+
l_all_pks = []
|
405
|
+
data1.each { |v|
|
406
|
+
l_tmp = {"PK" => v["PK"]}
|
407
|
+
l_all_pks.push(l_tmp) unless l_all_pks.include?(l_tmp)
|
408
|
+
}
|
409
|
+
data2.each { |v|
|
410
|
+
l_tmp = {"PK" => v["PK"]}
|
411
|
+
l_all_pks.push(l_tmp) unless l_all_pks.include?(l_tmp)
|
412
|
+
}
|
413
|
+
return l_all_pks #.sort_by { |k| k["PK"] }
|
414
|
+
end
|
415
|
+
|
416
|
+
def check_postconditions_for_table(table, stage = nil)
|
417
|
+
@log.debug "[ DBFactory::check_postconditions_for_table(...) ]"
|
418
|
+
@log.debug " > stage = <#{stage}>"
|
419
|
+
|
420
|
+
tablename = table.keys[0]
|
421
|
+
l_data = nil
|
422
|
+
begin
|
423
|
+
l_expected_data = expected_data(table, stage)
|
424
|
+
l_actual_data = actual_data(table)
|
425
|
+
|
426
|
+
if l_expected_data == l_actual_data
|
427
|
+
@log.info("> DBFactory::compare_table(#{tablename}, #{stage}) ... PASS")
|
428
|
+
return true
|
429
|
+
end
|
430
|
+
|
431
|
+
@log.warn "Seems that some differences found when comparing actual and expected data in table <#{tablename}>"
|
432
|
+
|
433
|
+
# l_pk = @definition[@case]['postconditions']['tables'][tablename]['connect keys']
|
434
|
+
l_pk = table.values[0]['connect keys']
|
435
|
+
@log.debug "connect keys: <#{l_pk}>"
|
436
|
+
if l_pk == nil
|
437
|
+
raise "Missing <connect keys> definition"
|
438
|
+
end
|
439
|
+
|
440
|
+
te = pk_data_split(l_expected_data, l_pk) #.sort_by { |v| v.values_at(0) }
|
441
|
+
ta = pk_data_split(l_actual_data, l_pk) #.sort_by { |v| v.values_at(0) }
|
442
|
+
# puts te.inspect
|
443
|
+
# puts ta.inspect
|
444
|
+
d = []
|
445
|
+
get_all_pk(te, ta).each { |k|
|
446
|
+
vei = te.index { |v| v["PK"] == k["PK"] }
|
447
|
+
if vei != nil
|
448
|
+
ve = te[vei]
|
449
|
+
te.delete_at(vei)
|
450
|
+
else
|
451
|
+
ve = {"PK" => {}, "DATA" => {}}
|
452
|
+
end
|
453
|
+
|
454
|
+
# ve = te.detect { |v| v["PK"] == k["PK"] }
|
455
|
+
# if ve == nil
|
456
|
+
# ve = {"PK" => {}, "DATA" => {}}
|
457
|
+
# end
|
458
|
+
vai = ta.index { |v| v["PK"] == k["PK"] }
|
459
|
+
if vai != nil
|
460
|
+
va = ta[vai]
|
461
|
+
ta.delete_at(vai)
|
462
|
+
else
|
463
|
+
va = {"PK" => {}, "DATA" => {}}
|
464
|
+
end
|
465
|
+
# va = ta.detect { |v| v["PK"] == k["PK"] }
|
466
|
+
# if va == nil
|
467
|
+
# va = {"PK" => {}, "DATA" => {}}
|
468
|
+
# end
|
469
|
+
diff = va["DATA"].diff(ve["DATA"])
|
470
|
+
# puts diff.inspect
|
471
|
+
# d.push({"PK" => k["PK"], "DATA" => diff}) unless diff == {}
|
472
|
+
d.push(k["PK"].merge(diff)) unless diff == {}
|
473
|
+
}
|
474
|
+
|
475
|
+
if d.empty?
|
476
|
+
# puts "l_expected_datasize = <#{l_expected_data.size}>"
|
477
|
+
# puts l_expected_data.inspect
|
478
|
+
# puts "l_actual_data.size = <#{l_actual_data.size}>"
|
479
|
+
@log.info("> DBFactory::compare_table(#{tablename}, #{stage}) ... PASS")
|
480
|
+
return true
|
481
|
+
# return false
|
482
|
+
else
|
483
|
+
if stage.nil?
|
484
|
+
d = {"#{@case}" => {"#{tablename}" => d}}.to_yaml
|
485
|
+
else
|
486
|
+
d = {"#{@case}" => {"#{stage}" => {"#{tablename}" => d}}}.to_yaml
|
487
|
+
end
|
488
|
+
l_diff_file = @cfg['DIFF']['filename_tpl'].gsub('#CASE#',@case)
|
489
|
+
@log.info "Writing differences for table <#{tablename}> for <#{@case}> block into <#{l_diff_file}>"
|
490
|
+
l_fw = File.open(l_diff_file, "a")
|
491
|
+
l_fw.write(d)
|
492
|
+
l_fw.close
|
493
|
+
end
|
494
|
+
|
495
|
+
rescue Exception => e
|
496
|
+
@log.error "[ DBFactory::compare_table(...) ] ERROR: <#{e.to_s}>"
|
497
|
+
@log.warn "Comparison of actual and expected data for table <#{tablename}> failed in <#{@case}> block"
|
498
|
+
raise "Error occurred during comparison if data for table <#{tablename}> in <#{@case}> block"
|
499
|
+
end
|
500
|
+
|
501
|
+
@log.warn("> DBFactory::compare_table(#{tablename}, #{stage}) ... FAIL")
|
502
|
+
|
503
|
+
return false
|
504
|
+
end
|
505
|
+
|
506
|
+
end
|
507
|
+
|
508
|
+
module DBFactory
|
509
|
+
|
510
|
+
LOG = "#{ROOT}/log"
|
511
|
+
DEFAULTS = "#{ROOT}/defaults"
|
512
|
+
|
513
|
+
@instance
|
514
|
+
|
515
|
+
def self.load(*args)
|
516
|
+
@instance = DBFactoryClass.new
|
517
|
+
@instance.load(*args)
|
518
|
+
end
|
519
|
+
|
520
|
+
def self.setup(*args)
|
521
|
+
@instance.setup(*args)
|
522
|
+
end
|
523
|
+
|
524
|
+
def self.evaluate(*args)
|
525
|
+
@instance.check_postconditions(*args)
|
526
|
+
end
|
527
|
+
|
528
|
+
def self.flashback(*args)
|
529
|
+
@instance.flashback(*args)
|
530
|
+
end
|
531
|
+
|
532
|
+
end
|
@@ -0,0 +1,32 @@
|
|
1
|
+
module PLSQL
|
2
|
+
class Schema
|
3
|
+
# Returns object ID of table object. Examples:
|
4
|
+
# plsql.find_table('scott.my_table')
|
5
|
+
# plsql.find_table('scott', 'my_table')
|
6
|
+
def find_table(*args)
|
7
|
+
if args.size == 1
|
8
|
+
table_owner = args[0].split('.')[0].upcase
|
9
|
+
table_name = args[0].split('.')[1].upcase
|
10
|
+
else
|
11
|
+
table_name = args[0]
|
12
|
+
table_owner = args[1]
|
13
|
+
end
|
14
|
+
find_database_object(table_name, table_owner)
|
15
|
+
end
|
16
|
+
end
|
17
|
+
end
|
18
|
+
|
19
|
+
class Hash
|
20
|
+
# Returns difference between 2 hashes (actual values and expecyted values)
|
21
|
+
# as Hash with "expected" and "actual" keys
|
22
|
+
def diff(other)
|
23
|
+
l_keys = self.keys.concat(other.keys).uniq
|
24
|
+
l_keys.inject({}) do |memo, key|
|
25
|
+
l_expected = other[key] rescue {}
|
26
|
+
unless self[key] == l_expected
|
27
|
+
memo[key] = {"actual" => self[key], "expected" => l_expected}
|
28
|
+
end
|
29
|
+
memo
|
30
|
+
end
|
31
|
+
end
|
32
|
+
end
|
metadata
ADDED
@@ -0,0 +1,75 @@
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
2
|
+
name: db-factory
|
3
|
+
version: !ruby/object:Gem::Version
|
4
|
+
version: 0.1.1
|
5
|
+
prerelease:
|
6
|
+
platform: ruby
|
7
|
+
authors:
|
8
|
+
- Stefan Cibiri
|
9
|
+
autorequire:
|
10
|
+
bindir: bin
|
11
|
+
cert_chain: []
|
12
|
+
date: 2013-04-16 00:00:00.000000000 Z
|
13
|
+
dependencies:
|
14
|
+
- !ruby/object:Gem::Dependency
|
15
|
+
name: ruby-plsql
|
16
|
+
requirement: !ruby/object:Gem::Requirement
|
17
|
+
none: false
|
18
|
+
requirements:
|
19
|
+
- - ! '>='
|
20
|
+
- !ruby/object:Gem::Version
|
21
|
+
version: '0'
|
22
|
+
type: :runtime
|
23
|
+
prerelease: false
|
24
|
+
version_requirements: !ruby/object:Gem::Requirement
|
25
|
+
none: false
|
26
|
+
requirements:
|
27
|
+
- - ! '>='
|
28
|
+
- !ruby/object:Gem::Version
|
29
|
+
version: '0'
|
30
|
+
description: db-factory uses definition files with YAML/ERB syntax to define test
|
31
|
+
data (preconditions) for unit PL/SQL testing and can compare actual data with defined
|
32
|
+
expected data.
|
33
|
+
email: stefancibiri@yahoo.com
|
34
|
+
executables: []
|
35
|
+
extensions: []
|
36
|
+
extra_rdoc_files: []
|
37
|
+
files:
|
38
|
+
- History.txt
|
39
|
+
- VERSION
|
40
|
+
- db-factory.gemspec
|
41
|
+
- examples/simple_example_1/Readme.txt
|
42
|
+
- examples/simple_example_1/create_objects.sql
|
43
|
+
- examples/simple_example_1/simple_example_1.yml
|
44
|
+
- examples/simple_example_1/simple_example_1_spec.rb
|
45
|
+
- examples/simple_example_1/spec_helper.rb
|
46
|
+
- lib/config.yml
|
47
|
+
- lib/db-factory.rb
|
48
|
+
- lib/db_factory/db_factory.rb
|
49
|
+
- lib/db_factory/helpers.rb
|
50
|
+
- lib/db_factory/version.rb
|
51
|
+
homepage: https://github.com/stci/db-factory
|
52
|
+
licenses: []
|
53
|
+
post_install_message:
|
54
|
+
rdoc_options: []
|
55
|
+
require_paths:
|
56
|
+
- lib
|
57
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
58
|
+
none: false
|
59
|
+
requirements:
|
60
|
+
- - ! '>='
|
61
|
+
- !ruby/object:Gem::Version
|
62
|
+
version: '0'
|
63
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
64
|
+
none: false
|
65
|
+
requirements:
|
66
|
+
- - ! '>='
|
67
|
+
- !ruby/object:Gem::Version
|
68
|
+
version: '0'
|
69
|
+
requirements: []
|
70
|
+
rubyforge_project:
|
71
|
+
rubygems_version: 1.8.23
|
72
|
+
signing_key:
|
73
|
+
specification_version: 3
|
74
|
+
summary: Prepare and compare test data for PL/SQL unit testing
|
75
|
+
test_files: []
|