rspec-hive 0.1.0

Sign up to get free protection for your applications and to get access to all the features.
@@ -0,0 +1,167 @@
1
+ require 'spec_helper'
2
+ require 'tempfile'
3
+
4
+ describe RSpec::Hive::Configuration do
5
+ RSpec.shared_examples('config') do
6
+ its(:host) do
7
+ is_expected.to eq(expected_host)
8
+ end
9
+
10
+ its(:port) do
11
+ is_expected.to eq(expected_port)
12
+ end
13
+
14
+ its(:host_shared_directory_path) do
15
+ is_expected.to match(expected_host_shared_directory_path)
16
+ end
17
+
18
+ its(:docker_shared_directory_path) do
19
+ is_expected.to eq(expected_docker_shared_directory_path)
20
+ end
21
+
22
+ its(:hive_version) do
23
+ is_expected.to eq(expected_hive_version)
24
+ end
25
+
26
+ its(:connection_timeout) do
27
+ is_expected.to eq(expected_timeout)
28
+ end
29
+
30
+ its(:hive_options) do
31
+ is_expected.to eq(expected_hive_options)
32
+ end
33
+ end
34
+
35
+ let(:expected_host_shared_directory_path) do
36
+ '/Users/Shared/tmp/spec-tmp-files'
37
+ end
38
+ let(:expected_docker_shared_directory_path) { '/tmp/spec-tmp-files' }
39
+ let(:expected_hive_version) { 10 }
40
+ let(:expected_timeout) { 1800 }
41
+ let(:expected_partition_flag) { 'true' }
42
+ let(:expected_partition_mode) { 'nonstrict' }
43
+ let(:expected_partiton_pernodexi) { '100000' }
44
+ let(:expected_partitions) { '100000' }
45
+ let(:expected_java_opts) { '-Xmx2048m' }
46
+ let(:expected_hive_options) do
47
+ {'hive.exec.dynamic.partition' => expected_partition_flag,
48
+ 'hive.exec.dynamic.partition.mode' => expected_partition_mode,
49
+ 'hive.exec.max.dynamic.partitions.pernodexi' => expected_partiton_pernodexi,
50
+ 'hive.exec.max.dynamic.partitions' => expected_partitions,
51
+ 'mapred.child.java.opts' => expected_java_opts}
52
+ end
53
+
54
+ context 'when no configuration file is provided' do
55
+ let(:expected_port) { 10000 }
56
+
57
+ before { allow(Dir).to receive(:mktmpdir) { mock_tmpdir } }
58
+
59
+ subject { described_class.new }
60
+
61
+ context 'when on Mac' do
62
+ let(:mock_tmpdir) { '/Users/Shared/test/' }
63
+ let(:expected_host) { '192.168.99.100' }
64
+ let(:expected_host_shared_directory_path) { '/Users/Shared/test/spec-tmp-files' }
65
+
66
+ before do
67
+ allow_any_instance_of(described_class).to receive(:mac?) { true }
68
+ end
69
+
70
+ include_examples('config')
71
+ end
72
+
73
+ context 'when on Linux' do
74
+ let(:mock_tmpdir) { '/tmp/test/' }
75
+ let(:expected_host) { '127.0.0.1' }
76
+ let(:expected_host_shared_directory_path) { '/tmp/test/spec-tmp-files' }
77
+
78
+ before do
79
+ allow_any_instance_of(described_class).to receive(:mac?) { false }
80
+ end
81
+
82
+ include_examples('config')
83
+ end
84
+ end
85
+
86
+ context 'when there is a configuration file' do
87
+ let(:path_to_config_file) do
88
+ Tempfile.open(%w(config .yml)) do |f|
89
+ f.write yaml_hash.to_yaml
90
+ f.path
91
+ end
92
+ end
93
+ let(:expected_host) { '127.0.0.2' }
94
+ let(:expected_port) { 10001 }
95
+
96
+ context 'where all parameters are present' do
97
+ let(:yaml_hash) do
98
+ {
99
+ 'hive' =>
100
+ {
101
+ 'host' => '127.0.0.2',
102
+ 'port' => 10001,
103
+ 'host_shared_directory_path' => expected_host_shared_directory_path,
104
+ 'docker_shared_directory_path' => expected_docker_shared_directory_path,
105
+ 'hive_version' => '10',
106
+ 'timeout' => 1800
107
+ }
108
+ }
109
+ end
110
+
111
+ after { File.unlink(path_to_config_file) }
112
+
113
+ subject { described_class.new(path_to_config_file) }
114
+
115
+ include_examples('config')
116
+ end
117
+
118
+ context 'where there are only required parameters' do
119
+ let(:yaml_hash) do
120
+ {
121
+ 'hive' =>
122
+ {
123
+ 'host' => '127.0.0.2',
124
+ 'port' => 10001,
125
+ 'host_shared_directory_path' => expected_host_shared_directory_path,
126
+ 'docker_shared_directory_path' => expected_docker_shared_directory_path
127
+ }
128
+ }
129
+ end
130
+ let(:expected_hive_version) { 10 }
131
+
132
+ after { File.unlink(path_to_config_file) }
133
+
134
+ subject { described_class.new(path_to_config_file) }
135
+
136
+ include_examples('config')
137
+ end
138
+
139
+ context 'where there are some parametres required and optional' do
140
+ let(:yaml_hash) do
141
+ {
142
+ 'hive' =>
143
+ {
144
+ 'host' => '127.0.0.2',
145
+ 'port' => 10001,
146
+ 'host_shared_directory_path' => expected_host_shared_directory_path,
147
+ 'docker_shared_directory_path' => expected_docker_shared_directory_path,
148
+ 'hive_version' => 11,
149
+ 'timeout' => 60,
150
+ 'hive_options' => {
151
+ 'mapred.child.java.opts' => '-Xmx64m'
152
+ }
153
+ }
154
+ }
155
+ end
156
+ let(:expected_timeout) { 60 }
157
+ let(:expected_hive_version) { 11 }
158
+ let(:expected_java_opts) { '-Xmx64m' }
159
+
160
+ after { File.unlink(path_to_config_file) }
161
+
162
+ subject { described_class.new(path_to_config_file) }
163
+
164
+ include_examples('config')
165
+ end
166
+ end
167
+ end
@@ -0,0 +1,311 @@
1
+ require 'spec_helper'
2
+
3
+ describe RSpec::Hive::ConnectionDelegator do
4
+ describe '#load_into_table' do
5
+ let(:host_shared_directory_path) { '/tmp/host' }
6
+ let(:docker_file_path) { '/tmp/docked/test_file' }
7
+ let(:config) do
8
+ double(
9
+ RSpec::Hive::Configuration,
10
+ host_shared_directory_path: host_shared_directory_path
11
+ )
12
+ end
13
+ let(:delimiter) { "\t" }
14
+ let(:table_name) { 'test_table' }
15
+ let(:table_schema) { instance_double(RBHive::TableSchema, name: table_name) }
16
+ let(:connection) { double('Connection') }
17
+ let(:file_mock) { double(Tempfile) }
18
+
19
+ let(:values) { ['a', 'b', 1] }
20
+
21
+ before do
22
+ table_schema.instance_variable_set(:@field_sep, delimiter)
23
+
24
+ expect(Tempfile).to receive(:open).
25
+ with(table_name, host_shared_directory_path).and_yield(file_mock)
26
+
27
+ expect(subject).to receive(:docker_path).
28
+ with(file_mock) { docker_file_path }
29
+
30
+ expect(subject).to receive(:write_values_to_file).
31
+ with(file_mock, values, "\t").once
32
+ end
33
+
34
+ context 'without partitions' do
35
+ before do
36
+ expect(subject).to receive(:load_file_to_hive_table).
37
+ with(table_name, docker_file_path, nil).once
38
+
39
+ expect(subject).not_to receive(:partition_clause)
40
+ end
41
+
42
+ subject { described_class.new(connection, config) }
43
+
44
+ it do
45
+ subject.load_into_table(table_schema, values)
46
+ end
47
+ end
48
+
49
+ context 'with partitions' do
50
+ let(:partitions) { {day: '20160101', hm: '2020'} }
51
+ let(:partition_query) { "PARTITION(day='20160101',hm='2020')" }
52
+ before do
53
+ expect(subject).to receive(:load_file_to_hive_table).
54
+ with(table_name, docker_file_path, partition_query).once
55
+ expect(subject).to receive(:partition_clause).
56
+ with(partitions) { partition_query }
57
+ end
58
+
59
+ subject { described_class.new(connection, config) }
60
+
61
+ it do
62
+ subject.load_into_table(table_schema, values, partitions)
63
+ end
64
+ end
65
+ end
66
+
67
+ describe '#load_partition' do
68
+ let(:config) { double('Config') }
69
+ let(:connection) { double('Connection') }
70
+
71
+ let(:table_name) { 'test_table' }
72
+ let(:partitions) do
73
+ [{dth: 'mon', country: 'us'}, {dth: 'tue', country: 'us'}]
74
+ end
75
+ let(:partition_query) do
76
+ "PARTITION(dth='mon',country='us') PARTITION(dth='tue',country='us')"
77
+ end
78
+
79
+ let(:executed_query) do
80
+ "ALTER TABLE test_table ADD PARTITION(dth='mon',country='us') PARTITION(dth='tue',country='us')"
81
+ end
82
+
83
+ before do
84
+ expect(subject).to receive(:partition_clause).
85
+ with(partitions) { partition_query }
86
+ expect(connection).to receive(:execute).with(executed_query)
87
+ end
88
+
89
+ subject { described_class.new(connection, config) }
90
+
91
+ it do
92
+ subject.load_partitions(table_name, partitions)
93
+ end
94
+ end
95
+
96
+ describe '#partition_clause' do
97
+ let(:config) { double('Config') }
98
+ let(:connection) { double('Connection') }
99
+
100
+ context 'with single partition' do
101
+ let(:partitions) { {day: '20160101', hm: '2020'} }
102
+ let(:partition_query) { "PARTITION(day='20160101',hm='2020')" }
103
+
104
+ subject { described_class.new(connection, config) }
105
+
106
+ it 'translates partition hash to single query' do
107
+ expect(subject.send(:partition_clause, partitions)).to eq(partition_query)
108
+ end
109
+ end
110
+
111
+ context 'with multiple partitions' do
112
+ let(:partitions) { [{day: 'mon', hm: '2020'}, {day: 'tue', hm: '2020'}, {day: 'mon', hm: '2030'}] }
113
+ let(:partition_query) do
114
+ "PARTITION(day='mon',hm='2020') PARTITION(day='tue',hm='2020') PARTITION(day='mon',hm='2030')"
115
+ end
116
+
117
+ subject { described_class.new(connection, config) }
118
+
119
+ it 'translates partition hash to combined query' do
120
+ expect(subject.send(:partition_clause, partitions)).to eq(partition_query)
121
+ end
122
+ end
123
+ end
124
+
125
+ describe '#write_values_to_file' do
126
+ let(:file) { StringIO.new }
127
+ let(:values) do
128
+ [['a', 'b', 1],
129
+ ['aa', 'bb', 22]]
130
+ end
131
+ let(:connection) { double('Connection') }
132
+ let(:config) { double('Config') }
133
+ let(:delimiter) { '|' }
134
+ let(:expected_file_content) { "a|b|1\naa|bb|22\n" }
135
+
136
+ subject { described_class.new(connection, config) }
137
+ it 'writes values to file in correct format' do
138
+ subject.send(:write_values_to_file, file, values, delimiter)
139
+ file.rewind
140
+ expect(file.read).to eq(expected_file_content)
141
+ end
142
+ end
143
+
144
+ describe '#load_file_to_hive_table' do
145
+ let(:connection) { double('Connection') }
146
+ let(:config) { double('Config') }
147
+ let(:table_name) { 'test_table' }
148
+ let(:file_path) { '/tmp/test' }
149
+ let(:execute_text) do
150
+ "load data local inpath '/tmp/test' into table test_table"
151
+ end
152
+
153
+ before do
154
+ expect(connection).to receive(:execute).with(execute_text)
155
+ end
156
+
157
+ subject { described_class.new(connection, config) }
158
+
159
+ it do
160
+ subject.send(:load_file_to_hive_table, table_name, file_path)
161
+ end
162
+ end
163
+
164
+ describe '#translate_to_docker_path' do
165
+ let(:file_mock) { double(File) }
166
+ let(:file_name) { 'testfile' }
167
+ let(:file_host_path) { '/tmp/host/testfile' }
168
+ let(:expected_file_path) { '/tmp/docker/testfile' }
169
+
170
+ let(:connection) { double('Connection') }
171
+ let(:docker_shared_directory_path) { '/tmp/docker' }
172
+ let(:config) do
173
+ double(
174
+ RSpec::Hive::Configuration,
175
+ docker_shared_directory_path: docker_shared_directory_path
176
+ )
177
+ end
178
+
179
+ before do
180
+ expect(file_mock).to receive(:path) { file_host_path }
181
+ end
182
+
183
+ subject { described_class.new(connection, config) }
184
+
185
+ it do
186
+ expect(subject.send(:docker_path, file_mock)).
187
+ to eq(expected_file_path)
188
+ end
189
+ end
190
+
191
+ describe '#show_tables' do
192
+ let(:connection) { double('Connection') }
193
+ let(:config) { double('Config') }
194
+ let(:fetch_text) { 'SHOW TABLES' }
195
+
196
+ before do
197
+ expect(connection).to receive(:fetch).with(fetch_text)
198
+ end
199
+
200
+ subject { described_class.new(connection, config) }
201
+
202
+ it do
203
+ subject.show_tables
204
+ end
205
+ end
206
+
207
+ describe '#create_database' do
208
+ let(:connection) { double('Connection') }
209
+ let(:config) { double('Config') }
210
+ let(:db_name) { 'test' }
211
+ let(:fetch_text) { 'CREATE DATABASE IF NOT EXISTS `test`' }
212
+
213
+ before do
214
+ expect(connection).to receive(:execute).with(fetch_text)
215
+ end
216
+
217
+ subject { described_class.new(connection, config) }
218
+
219
+ it do
220
+ subject.create_database(db_name)
221
+ end
222
+ end
223
+
224
+ describe '#create_table' do
225
+ let(:connection) { double('Connection') }
226
+ let(:config) { double('Config') }
227
+ let(:table_schema) { double('Table_schema') }
228
+ let(:table_statement) { 'I AM TABLE STATEMENT' }
229
+
230
+ before do
231
+ expect(table_schema).to receive(:dup) { table_schema }
232
+ expect(table_schema).to receive(:instance_variable_set).with(:@location, nil)
233
+ expect(table_schema).to receive(:create_table_statement) { table_statement }
234
+ expect(connection).to receive(:execute).with(table_statement)
235
+ end
236
+
237
+ subject { described_class.new(connection, config) }
238
+
239
+ it do
240
+ subject.create_table(table_schema)
241
+ end
242
+ end
243
+
244
+ describe '#use databaes' do
245
+ let(:connection) { double('Connection') }
246
+ let(:config) { double('Config') }
247
+ let(:db_name) { 'test' }
248
+ let(:fetch_text) { 'USE `test`' }
249
+
250
+ before do
251
+ expect(connection).to receive(:execute).with(fetch_text)
252
+ end
253
+
254
+ subject { described_class.new(connection, config) }
255
+
256
+ it do
257
+ subject.use_database(db_name)
258
+ end
259
+ end
260
+
261
+ describe '#drop_databse' do
262
+ let(:connection) { double('Connection') }
263
+ let(:config) { double('Config') }
264
+ let(:db_name) { 'test' }
265
+ let(:fetch_text) { 'DROP DATABASE `test`' }
266
+
267
+ before do
268
+ expect(connection).to receive(:execute).with(fetch_text)
269
+ end
270
+
271
+ subject { described_class.new(connection, config) }
272
+
273
+ it do
274
+ subject.drop_database(db_name)
275
+ end
276
+ end
277
+
278
+ describe '#show_databases' do
279
+ let(:connection) { double('Connection') }
280
+ let(:config) { double('Config') }
281
+ let(:fetch_text) { 'SHOW DATABASES' }
282
+
283
+ before do
284
+ expect(connection).to receive(:fetch).with(fetch_text)
285
+ end
286
+
287
+ subject { described_class.new(connection, config) }
288
+
289
+ it do
290
+ subject.show_databases
291
+ end
292
+ end
293
+
294
+ describe '#switch database' do
295
+ let(:connection) { double('Connection') }
296
+ let(:config) { double('Config') }
297
+
298
+ let(:db_name) { 'test_db' }
299
+
300
+ before do
301
+ expect(subject).to receive(:create_database).once
302
+ expect(subject).to receive(:use_database).once
303
+ end
304
+
305
+ subject { described_class.new(connection, config) }
306
+
307
+ it do
308
+ subject.switch_database(db_name)
309
+ end
310
+ end
311
+ end