flydata 0.7.6 → 0.7.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/VERSION +1 -1
- data/flydata.gemspec +0 -0
- data/lib/flydata/command/sync.rb +22 -15
- data/lib/flydata/source/sync.rb +56 -19
- data/lib/flydata/source_mysql/sync.rb +4 -16
- data/lib/flydata/source_postgresql/sync.rb +1 -15
- data/lib/flydata/table_attribute.rb +10 -0
- data/spec/flydata/command/sync_spec.rb +182 -12
- metadata +2 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 47e53f1fbb8f9c3312cbac20679d9b389d41994a
|
4
|
+
data.tar.gz: d3ab901fdc3747ab54b82b2a087c3a0aa74fe44f
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 9f92fd99a9411b656ef014e3e64214d50c5549c4eac74b1dbde279f65ccba452dde8698bc26d5cf1dd145b092dcf459b2f839f996a804fbd120d2f5e361de30d
|
7
|
+
data.tar.gz: 9f2181a206ffa141fa7bca9822dcd824ca05e45d65ffe030cc1a0c055c27fe8b7161855e835817813d214ef9ffa78beeeb6e29e6f8c9410bc73368a02017b421
|
data/VERSION
CHANGED
@@ -1 +1 @@
|
|
1
|
-
0.7.
|
1
|
+
0.7.7
|
data/flydata.gemspec
CHANGED
Binary file
|
data/lib/flydata/command/sync.rb
CHANGED
@@ -295,10 +295,10 @@ EOS
|
|
295
295
|
message = "\nFollowing errors are found.\n"
|
296
296
|
|
297
297
|
if status.include? :STUCK_AT_PROCESS
|
298
|
-
message += " -
|
298
|
+
message += " - Timeout while processing data\n"
|
299
299
|
end
|
300
300
|
if status.include? :STUCK_AT_UPLOAD
|
301
|
-
message += " -
|
301
|
+
message += " - Timeout while uploading data\n"
|
302
302
|
end
|
303
303
|
if status.include? :ABNORMAL_SHUTDOWN
|
304
304
|
message += " - Agent was not shut down correctly\n"
|
@@ -411,13 +411,11 @@ EOS
|
|
411
411
|
_check(context, stop_agent:true)
|
412
412
|
|
413
413
|
if status.include? :STUCK_AT_PROCESS
|
414
|
-
e = AgentError.new("
|
414
|
+
e = AgentError.new("Timeout while processing data")
|
415
415
|
e.description = <<EOS
|
416
|
-
|
416
|
+
We are currently processing the remaining buffered data. Please wait a few minutes and repeat `flydata sync:repair` command.
|
417
417
|
|
418
|
-
flydata
|
419
|
-
|
420
|
-
If you need help, please contact FlyData Support (support@flydata.com).
|
418
|
+
If this issue continues, please contact FlyData Support (support@flydata.com).
|
421
419
|
EOS
|
422
420
|
raise e
|
423
421
|
end
|
@@ -551,7 +549,7 @@ EOS
|
|
551
549
|
# Remove the lock file if exists.
|
552
550
|
File.delete(FLYDATA_LOCK) if File.exists?(FLYDATA_LOCK)
|
553
551
|
|
554
|
-
log_info_stdout "Repair is done.
|
552
|
+
log_info_stdout "Repair is done. Restarting."
|
555
553
|
end
|
556
554
|
|
557
555
|
# Initial sync
|
@@ -1270,25 +1268,34 @@ Thank you for using FlyData!
|
|
1270
1268
|
|
1271
1269
|
table_lists = source.sync.table_lists
|
1272
1270
|
|
1273
|
-
#full_tables will either
|
1271
|
+
# `full_tables` will either
|
1272
|
+
# - include all tables (full-sync & append_only) including invalid tables, or
|
1273
|
+
# - all valid tables that aren't new tables
|
1274
1274
|
|
1275
1275
|
# The 'new_tables' list may include tables which has already been
|
1276
1276
|
# synced for backward compatibility reason.
|
1277
1277
|
# Filter out such tables so that we get a list of 'new' tables with no
|
1278
1278
|
# position file.
|
1279
1279
|
real_new_tables = sync_fm.get_new_table_list(table_lists['new_tables'], "pos")
|
1280
|
-
|
1281
|
-
|
1280
|
+
valid_tables_append_only = table_lists['tables_append_only'] || []
|
1281
|
+
|
1282
|
+
if options[:include_all_tables]
|
1283
|
+
@full_tables = table_lists['tables'] + table_lists['invalid_tables']
|
1284
|
+
@append_only_tables = valid_tables_append_only + table_lists['invalid_tables_append_only']
|
1285
|
+
else
|
1286
|
+
@full_tables = table_lists['tables'] - real_new_tables
|
1287
|
+
@append_only_tables = valid_tables_append_only
|
1288
|
+
end
|
1282
1289
|
|
1283
|
-
|
1284
|
-
@
|
1290
|
+
# tables that do not have a .pos file
|
1291
|
+
@unsynced_tables = sync_fm.get_new_table_list(@full_tables, "pos")
|
1292
|
+
# tables that do not have a .generated_ddl file
|
1293
|
+
@no_ddl_generated_tables = sync_fm.get_new_table_list(@full_tables, "generated_ddl")
|
1285
1294
|
|
1286
1295
|
@input_tables = sync_resumed ? sync_info[:tables] : input_tables
|
1287
1296
|
@input_tables ||= []
|
1288
1297
|
@full_initial_sync = sync_resumed ? sync_info[:initial_sync] :
|
1289
1298
|
(@unsynced_tables == @full_tables)
|
1290
|
-
@append_only_tables = table_lists['tables_append_only']
|
1291
|
-
@append_only_tables ||= []
|
1292
1299
|
|
1293
1300
|
sync_fm.close
|
1294
1301
|
|
data/lib/flydata/source/sync.rb
CHANGED
@@ -7,6 +7,9 @@ module Flydata
|
|
7
7
|
module Source
|
8
8
|
|
9
9
|
class Sync < Component
|
10
|
+
# subclass has to define these constant variables
|
11
|
+
#SOURCE_PREFERENCE_NAME = 'xxxxx_data_entry_preference'
|
12
|
+
|
10
13
|
def self.inherited(child_class)
|
11
14
|
Source.register(child_class, self)
|
12
15
|
end
|
@@ -18,7 +21,9 @@ class Sync < Component
|
|
18
21
|
#
|
19
22
|
# Raises exception when the source does not support sync
|
20
23
|
def setup
|
21
|
-
|
24
|
+
prefs = de[self.class::SOURCE_PREFERENCE_NAME]
|
25
|
+
setup_table_prefs(prefs)
|
26
|
+
setup_pk_override_prefs(prefs)
|
22
27
|
end
|
23
28
|
|
24
29
|
# Public Interface: Tells if the source support sync or not
|
@@ -30,47 +35,54 @@ class Sync < Component
|
|
30
35
|
|
31
36
|
# Public Interface: Table lists
|
32
37
|
#
|
33
|
-
# Returns lists of tables in a hash.
|
34
|
-
# "tables" : An array of tables currently in sync
|
38
|
+
# Returns lists of tables in a hash. The following lists will be returned
|
39
|
+
# "tables" : An array of tables currently in sync (valid tables append-only & valid tables full-sync)
|
35
40
|
# "new_tables" : An array of tables for which no generate_table_ddl has been run yet
|
36
41
|
# "invalid_tables" : An array of tables that had an issue starting sync
|
42
|
+
# "tables_append_only" : An array of tables whose sync type is append-only
|
43
|
+
# "invalid_tables_append_only" : An array of append-only tables that had an issue starting sync
|
44
|
+
SOURCE_TABLE_LISTS_KEYS = %w(
|
45
|
+
tables
|
46
|
+
new_tables
|
47
|
+
invalid_tables
|
48
|
+
tables_append_only
|
49
|
+
invalid_tables_append_only
|
50
|
+
)
|
37
51
|
def table_lists
|
38
|
-
|
52
|
+
de[self.class::SOURCE_PREFERENCE_NAME].select {|key, value| SOURCE_TABLE_LISTS_KEYS.include?(key)}
|
39
53
|
end
|
40
54
|
|
41
55
|
# Public Interface: Data Servers
|
42
56
|
#
|
43
57
|
# Returns a comma separated list of data servers to which the agent sends data
|
44
58
|
def data_servers
|
45
|
-
|
59
|
+
de[self.class::SOURCE_PREFERENCE_NAME]['data_servers']
|
46
60
|
end
|
47
61
|
|
48
62
|
# Public Interface: Forwarder
|
49
63
|
#
|
50
64
|
# Returns a forwarder type in string. Values are 'tcpforwarder' or 'sslforwarder'.
|
51
65
|
def forwarder
|
52
|
-
|
66
|
+
de[self.class::SOURCE_PREFERENCE_NAME]['forwarder']
|
53
67
|
end
|
54
68
|
|
55
69
|
private
|
56
70
|
|
57
71
|
def setup_table_prefs(prefs)
|
72
|
+
# valid tables
|
73
|
+
prefs['tables_append_only'] = split_pref_string(prefs, 'tables_append_only')
|
74
|
+
prefs['tables'] = split_pref_string(prefs, 'tables')
|
75
|
+
|
58
76
|
if prefs['tables_append_only']
|
59
|
-
prefs['
|
60
|
-
prefs['tables_append_only'].split(/(?:\s*,\s*|\s+)/).uniq
|
61
|
-
prefs['tables'] = (prefs['tables'].to_s.split(/(?:\s*,\s*|\s+)/) +
|
62
|
-
prefs['tables_append_only']).uniq
|
63
|
-
else
|
64
|
-
prefs['tables'] = prefs['tables'].to_s.split(/(?:\s*,\s*|\s+)/).uniq
|
77
|
+
prefs['tables'] = (prefs['tables'] + prefs['tables_append_only']).uniq
|
65
78
|
end
|
66
|
-
prefs['invalid_tables'] =
|
67
|
-
prefs['invalid_tables'].kind_of?(String) ?
|
68
|
-
prefs['invalid_tables'].split(/(?:\s*,\s*|\s+)/).uniq : []
|
69
|
-
prefs['new_tables'] =
|
70
|
-
prefs['new_tables'].kind_of?(String) ?
|
71
|
-
prefs['new_tables'].split(/(?:\s*,\s*|\s+)/).uniq : []
|
72
79
|
|
73
|
-
|
80
|
+
# invalid tables
|
81
|
+
prefs['invalid_tables'] = split_pref_string(prefs, 'invalid_tables')
|
82
|
+
prefs['invalid_tables_append_only'], prefs['invalid_tables_full_sync'] = categorize_invalid_tables(prefs, prefs['invalid_tables'])
|
83
|
+
|
84
|
+
# new tables
|
85
|
+
prefs['new_tables'] = split_pref_string(prefs, 'new_tables')
|
74
86
|
end
|
75
87
|
|
76
88
|
def setup_pk_override_prefs(prefs)
|
@@ -99,6 +111,31 @@ class Sync < Component
|
|
99
111
|
$log.info "Primary key override is updated. Using pk_override: #{prefs['pk_override']}"
|
100
112
|
end
|
101
113
|
end
|
114
|
+
|
115
|
+
# Utility methods
|
116
|
+
def split_pref_string(prefs, key)
|
117
|
+
return nil unless prefs
|
118
|
+
return [] unless prefs[key].kind_of?(String)
|
119
|
+
prefs[key].split(/(?:\s*,\s*|\s+)/).uniq
|
120
|
+
end
|
121
|
+
|
122
|
+
def categorize_invalid_tables(prefs, invalid_tables)
|
123
|
+
invalid_tables_append_only = []
|
124
|
+
invalid_tables_full_sync = []
|
125
|
+
|
126
|
+
invalid_tables.each do |tbl_name|
|
127
|
+
if append_only_table?(prefs, tbl_name)
|
128
|
+
invalid_tables_append_only << tbl_name
|
129
|
+
else
|
130
|
+
invalid_tables_full_sync << tbl_name
|
131
|
+
end
|
132
|
+
end
|
133
|
+
return [invalid_tables_append_only, invalid_tables_full_sync]
|
134
|
+
end
|
135
|
+
|
136
|
+
def append_only_table?(prefs, table_name)
|
137
|
+
TableAttribute::append_only_table?(prefs['table_attributes'], table_name)
|
138
|
+
end
|
102
139
|
end
|
103
140
|
|
104
141
|
end
|
@@ -4,11 +4,11 @@ module Flydata
|
|
4
4
|
module SourceMysql
|
5
5
|
|
6
6
|
class Sync < Source::Sync
|
7
|
-
|
8
|
-
mp = de['mysql_data_entry_preference']
|
9
|
-
|
10
|
-
setup_table_prefs(mp)
|
7
|
+
SOURCE_PREFERENCE_NAME = 'mysql_data_entry_preference'
|
11
8
|
|
9
|
+
def setup
|
10
|
+
super
|
11
|
+
mp = de[SOURCE_PREFERENCE_NAME]
|
12
12
|
unless mp['ssl_ca_content'].to_s.strip.empty?
|
13
13
|
sync_fm = SyncFileManager.new(de)
|
14
14
|
sync_fm.save_ssl_ca(mp['ssl_ca_content'])
|
@@ -20,18 +20,6 @@ class Sync < Source::Sync
|
|
20
20
|
def supported?
|
21
21
|
true
|
22
22
|
end
|
23
|
-
|
24
|
-
def table_lists
|
25
|
-
de['mysql_data_entry_preference'].select {|key, value| %w(tables new_tables invalid_tables tables_append_only).include?(key)}
|
26
|
-
end
|
27
|
-
|
28
|
-
def data_servers
|
29
|
-
de['mysql_data_entry_preference']['data_servers']
|
30
|
-
end
|
31
|
-
|
32
|
-
def forwarder
|
33
|
-
de['mysql_data_entry_preference']['forwarder']
|
34
|
-
end
|
35
23
|
end
|
36
24
|
|
37
25
|
end
|
@@ -4,25 +4,11 @@ module Flydata
|
|
4
4
|
module SourcePostgresql
|
5
5
|
|
6
6
|
class Sync < Source::Sync
|
7
|
-
|
8
|
-
setup_table_prefs(de['postgresql_data_entry_preference'])
|
9
|
-
end
|
7
|
+
SOURCE_PREFERENCE_NAME = 'postgresql_data_entry_preference'
|
10
8
|
|
11
9
|
def supported?
|
12
10
|
true
|
13
11
|
end
|
14
|
-
|
15
|
-
def table_lists
|
16
|
-
de['postgresql_data_entry_preference'].select {|key, value| %w(tables new_tables invalid_tables tables_append_only).include?(key)}
|
17
|
-
end
|
18
|
-
|
19
|
-
def data_servers
|
20
|
-
de['postgresql_data_entry_preference']['data_servers']
|
21
|
-
end
|
22
|
-
|
23
|
-
def forwarder
|
24
|
-
de['postgresql_data_entry_preference']['forwarder']
|
25
|
-
end
|
26
12
|
end
|
27
13
|
|
28
14
|
end
|
@@ -54,5 +54,15 @@ module TableAttribute
|
|
54
54
|
tbl_attr[key.to_s] = val
|
55
55
|
end
|
56
56
|
end
|
57
|
+
|
58
|
+
def self.find_table(table_attributes, tbl_name)
|
59
|
+
table_attributes.find {|tbl_attr| tbl_attr['table_name'] == tbl_name}
|
60
|
+
end
|
61
|
+
|
62
|
+
def self.append_only_table?(table_attributes, tbl_name)
|
63
|
+
attr = find_table(table_attributes, tbl_name)
|
64
|
+
raise "Sync type for invalid table `#{tbl_name}` not known" unless attr
|
65
|
+
attr.has_key?('omit_events')
|
66
|
+
end
|
57
67
|
end
|
58
68
|
end
|
@@ -74,6 +74,7 @@ module Flydata
|
|
74
74
|
end
|
75
75
|
end
|
76
76
|
end
|
77
|
+
|
77
78
|
describe '#generate_source_dump' do
|
78
79
|
let (:flydata) { double('flydata') }
|
79
80
|
let (:dp) { double('dp') }
|
@@ -88,6 +89,7 @@ module Flydata
|
|
88
89
|
let (:target_tables) { ["test_table_1"] }
|
89
90
|
let (:db_byte) { 1 }
|
90
91
|
let (:disk_byte) { 100 }
|
92
|
+
|
91
93
|
before do
|
92
94
|
require 'flydata/source_mysql/generate_source_dump'
|
93
95
|
allow_any_instance_of(Flydata::SourceMysql::GenerateSourceDump).to receive(:dump_size).and_return(db_byte)
|
@@ -109,6 +111,7 @@ module Flydata
|
|
109
111
|
expect(subject).to receive(:ask_yes_no).and_return(true).at_least(:once)
|
110
112
|
expect_any_instance_of(FlydataCore::Event::ApiEventSender).to receive(:send_event).once
|
111
113
|
end
|
114
|
+
|
112
115
|
context 'with no stream option' do
|
113
116
|
before do
|
114
117
|
expect(default_sync_fm).to receive(:save_sync_info).once
|
@@ -131,6 +134,7 @@ module Flydata
|
|
131
134
|
}.to raise_error
|
132
135
|
end
|
133
136
|
end
|
137
|
+
|
134
138
|
context 'with stream option' do
|
135
139
|
it 'will export to io' do
|
136
140
|
expect(default_sync_fm).to receive(:save_sync_info).once
|
@@ -140,6 +144,7 @@ module Flydata
|
|
140
144
|
end
|
141
145
|
end
|
142
146
|
end
|
147
|
+
|
143
148
|
describe '#convert_to_flydata_values' do
|
144
149
|
subject { subject_object.send(:convert_to_flydata_values, source_table, values) }
|
145
150
|
let(:values) { [4, 'John', nil, col4_value, nil, nil] }
|
@@ -175,14 +180,34 @@ module Flydata
|
|
175
180
|
subject { subject_object.send(:data_entry) }
|
176
181
|
|
177
182
|
let(:de) { { 'mysql_data_entry_preference' => mp } }
|
178
|
-
let(:mp) { { 'tables' =>
|
179
|
-
'table_attributes' =>
|
180
|
-
|
181
|
-
{"table_name"=>"Addresses", "status"=>"init_sync_pending"}
|
182
|
-
],
|
183
|
-
'pk_override' => {"Users"=>["id"]}
|
183
|
+
let(:mp) { { 'tables' => default_tables_str,
|
184
|
+
'table_attributes' => default_table_attributes,
|
185
|
+
'pk_override' => pk_override_hash
|
184
186
|
} }
|
187
|
+
let(:default_tables_str) { 'Users,Addresses' }
|
188
|
+
let(:default_table_attributes) {[
|
189
|
+
{"table_name"=>"Users", "status"=>"init_sync_pending"},
|
190
|
+
{"table_name"=>"Addresses", "status"=>"init_sync_pending"}
|
191
|
+
]}
|
185
192
|
|
193
|
+
let(:append_only_tables_list) { %w|Invoices Sessions Addresses| }
|
194
|
+
let(:append_only_tables_str) { 'Invoices,Sessions,Addresses' }
|
195
|
+
let(:tbl_attrs_for_append_only_tables) { [
|
196
|
+
{"table_name"=>"Invoices", "omit_events"=>["delete"], "status"=>"init_sync_pending"},
|
197
|
+
{"table_name"=>"Sessions", "omit_events"=>["delete"], "status"=>"init_sync_pending"},
|
198
|
+
{"table_name"=>"Addresses", "omit_events"=>["delete"], "status"=>"init_sync_pending"}
|
199
|
+
] }
|
200
|
+
|
201
|
+
let(:invalid_tables_list) { %w|error_fullsync_1 error_append_2| }
|
202
|
+
let(:invalid_tables_str) { 'error_fullsync_1,error_append_2' }
|
203
|
+
let(:tbl_attrs_for_invalid_tables) {[
|
204
|
+
{"table_name"=>"error_fullsync_1", "status"=>"init_sync_pending",
|
205
|
+
"invalid_table_reason"=>"no primary key defined"},
|
206
|
+
{"table_name"=>"error_append_2", "omit_events"=>["delete"], "status"=>"init_sync_pending",
|
207
|
+
"invalid_table_reason"=>"table does not exist in the MySQL database"},
|
208
|
+
]}
|
209
|
+
|
210
|
+
let(:pk_override_hash) { {"Users"=>["id"]} }
|
186
211
|
let(:sfm) { double('sfm') }
|
187
212
|
let(:ssl_ca_content) { double('ssl_ca_content') }
|
188
213
|
let(:ssl_ca_path) { double('ssl_ca_path') }
|
@@ -199,26 +224,74 @@ module Flydata
|
|
199
224
|
expect(subject_object).to receive(:retrieve_data_entries).
|
200
225
|
and_return([de])
|
201
226
|
end
|
227
|
+
|
202
228
|
context 'without tables_append_only' do
|
203
229
|
it "expands a table list string to an array of tables" do
|
204
230
|
subject
|
205
231
|
expect(mp['tables']).to eq %w(Users Addresses)
|
206
232
|
end
|
207
233
|
end
|
234
|
+
|
208
235
|
context 'with tables_append_only' do
|
209
236
|
before do
|
210
|
-
mp['tables_append_only'] =
|
211
|
-
mp['table_attributes']
|
212
|
-
{"table_name"=>"Invoices", "omit_events"=>["delete"], "status"=>"init_sync_pending"},
|
213
|
-
{"table_name"=>"Sessions", "omit_events"=>["delete"], "status"=>"init_sync_pending"},
|
214
|
-
{"table_name"=>"Addresses", "omit_events"=>["delete"], "status"=>"init_sync_pending"}
|
215
|
-
]
|
237
|
+
mp['tables_append_only'] = append_only_tables_str
|
238
|
+
mp['table_attributes'] = default_table_attributes + tbl_attrs_for_append_only_tables
|
216
239
|
end
|
217
240
|
it "creates an array of tables from 'tables' and 'tables_append_only' combined" do
|
218
241
|
subject
|
219
242
|
expect(mp['tables']).to eq %w(Users Addresses Invoices Sessions)
|
220
243
|
end
|
244
|
+
|
245
|
+
it 'creates an array of append-only tables' do
|
246
|
+
subject
|
247
|
+
expect(mp['tables_append_only']).to eq append_only_tables_list
|
248
|
+
end
|
249
|
+
end
|
250
|
+
|
251
|
+
context 'with invalid tables and invalid append-only tables' do
|
252
|
+
before do
|
253
|
+
mp['tables_append_only'] = append_only_tables_str
|
254
|
+
mp['table_attributes'] = default_table_attributes + tbl_attrs_for_append_only_tables
|
255
|
+
|
256
|
+
mp['invalid_tables'] = invalid_tables_str
|
257
|
+
mp['table_attributes'] = default_table_attributes +
|
258
|
+
tbl_attrs_for_append_only_tables +
|
259
|
+
tbl_attrs_for_invalid_tables
|
260
|
+
end
|
261
|
+
|
262
|
+
it 'does not change value for `tables`' do
|
263
|
+
subject
|
264
|
+
expect(mp['tables']).to eq %w(Users Addresses Invoices Sessions)
|
265
|
+
end
|
266
|
+
|
267
|
+
it 'does not change value for `tables_append_only`' do
|
268
|
+
subject
|
269
|
+
expect(mp['tables_append_only']).to eq append_only_tables_list
|
270
|
+
end
|
271
|
+
|
272
|
+
it 'creates an array of invalid tables' do
|
273
|
+
subject
|
274
|
+
expect(mp['invalid_tables']).to eq invalid_tables_list
|
275
|
+
end
|
276
|
+
|
277
|
+
it 'categorize invalid tables by sync type' do
|
278
|
+
subject
|
279
|
+
expect(mp['invalid_tables_append_only']).to eq %w(error_append_2)
|
280
|
+
expect(mp['invalid_tables_full_sync']).to eq %w(error_fullsync_1)
|
281
|
+
end
|
282
|
+
end
|
283
|
+
|
284
|
+
context 'with an invalid table which do not have table_attributes entry' do
|
285
|
+
before do
|
286
|
+
mp['invalid_tables'] = invalid_tables_str + ',table_from_conf'
|
287
|
+
mp['table_attributes'] += default_table_attributes + tbl_attrs_for_invalid_tables
|
288
|
+
end
|
289
|
+
|
290
|
+
it 'cannot determine sync type for the table and raise an error' do
|
291
|
+
expect { subject }.to raise_error /Sync type for invalid table `table_from_conf` not known/
|
292
|
+
end
|
221
293
|
end
|
294
|
+
|
222
295
|
context 'with ssl_ca_content' do
|
223
296
|
before { mp["ssl_ca_content"] = ssl_ca_content }
|
224
297
|
it "saves the content to a local file via SyncFileManager" do
|
@@ -233,6 +306,7 @@ module Flydata
|
|
233
306
|
end
|
234
307
|
end
|
235
308
|
end
|
309
|
+
|
236
310
|
context 'called twice' do
|
237
311
|
before { subject }
|
238
312
|
it "repurposes the saved de" do
|
@@ -242,6 +316,7 @@ module Flydata
|
|
242
316
|
end
|
243
317
|
end
|
244
318
|
end
|
319
|
+
|
245
320
|
context 'type RedshiftFileDataEntry' do
|
246
321
|
before { de['type'] = 'RedshiftFileDataEntry' }
|
247
322
|
it "raises an error about unsupported data entry" do
|
@@ -249,6 +324,101 @@ module Flydata
|
|
249
324
|
end
|
250
325
|
end
|
251
326
|
end
|
327
|
+
|
328
|
+
describe '#set_current_tables' do
|
329
|
+
subject { subject_object.send(:set_current_tables, input_tables, options) }
|
330
|
+
let(:input_tables) { nil }
|
331
|
+
let(:table_lists) {{
|
332
|
+
"tables" => tables,
|
333
|
+
"invalid_tables" => invalid_tables,
|
334
|
+
"new_tables" => new_tables,
|
335
|
+
"tables_append_only" => tables_append_only,
|
336
|
+
"invalid_tables_append_only" => invalid_tables_append_only,
|
337
|
+
}}
|
338
|
+
let(:real_new_tables) { [] }
|
339
|
+
let(:tables) { ["table1", "table2", "table4"] }
|
340
|
+
let(:invalid_tables) { ["table3_invalid","append3_invalid"] }
|
341
|
+
let(:invalid_tables_append_only) { ["append3_invalid"] }
|
342
|
+
let(:new_tables) { ["table4"] }
|
343
|
+
|
344
|
+
let(:data_entry_with_table_lists) do
|
345
|
+
new_de = default_data_entry.dup
|
346
|
+
new_de['mysql_data_entry_preference'].merge!(table_lists)
|
347
|
+
new_de
|
348
|
+
end
|
349
|
+
before do
|
350
|
+
allow(subject_object).to receive(:data_entry).and_return(data_entry_with_table_lists)
|
351
|
+
end
|
352
|
+
|
353
|
+
context 'when include_all_tables option is true' do
|
354
|
+
let(:options) { { include_all_tables: true } }
|
355
|
+
|
356
|
+
context 'when there are append-only tables (valid & invalid)' do
|
357
|
+
let(:tables_append_only) { ["append1","append2"] }
|
358
|
+
let(:invalid_tables_append_only) { ["append3_invalid"] }
|
359
|
+
it do
|
360
|
+
subject
|
361
|
+
expect(subject_object.instance_variable_get(:@full_tables)).to eq(
|
362
|
+
tables + invalid_tables)
|
363
|
+
expect(subject_object.instance_variable_get(:@append_only_tables)).to eq(
|
364
|
+
tables_append_only + invalid_tables_append_only)
|
365
|
+
end
|
366
|
+
end
|
367
|
+
|
368
|
+
context "when table_lists['tables_append_only'] is nil" do
|
369
|
+
let(:tables_append_only) { nil }
|
370
|
+
it do
|
371
|
+
subject
|
372
|
+
expect(subject_object.instance_variable_get(:@full_tables)).to eq(
|
373
|
+
tables + invalid_tables)
|
374
|
+
expect(subject_object.instance_variable_get(:@append_only_tables)).to eq(
|
375
|
+
invalid_tables_append_only)
|
376
|
+
end
|
377
|
+
end
|
378
|
+
|
379
|
+
context 'when there is no append-only tables' do
|
380
|
+
let(:tables_append_only) { [] }
|
381
|
+
let(:invalid_tables_append_only) { [] }
|
382
|
+
it do
|
383
|
+
subject
|
384
|
+
expect(subject_object.instance_variable_get(:@full_tables)).to eq(
|
385
|
+
tables + invalid_tables)
|
386
|
+
expect(subject_object.instance_variable_get(:@append_only_tables)).to eq([])
|
387
|
+
end
|
388
|
+
end
|
389
|
+
end
|
390
|
+
|
391
|
+
context 'when include_all_tables option is false' do
|
392
|
+
let(:options) { { include_all_tables: false } }
|
393
|
+
context 'when there is no real_new_tables' do
|
394
|
+
let(:real_new_tables) { [] }
|
395
|
+
let(:tables_append_only) { ["append1","append2"] }
|
396
|
+
let(:invalid_tables_append_only) { ["append3_invalid"] }
|
397
|
+
before do
|
398
|
+
allow_any_instance_of(SyncFileManager).to receive(:get_new_table_list).and_return(real_new_tables)
|
399
|
+
end
|
400
|
+
it do
|
401
|
+
subject
|
402
|
+
expect(subject_object.instance_variable_get(:@full_tables)).to eq(tables - real_new_tables)
|
403
|
+
expect(subject_object.instance_variable_get(:@append_only_tables)).to eq(tables_append_only)
|
404
|
+
end
|
405
|
+
end
|
406
|
+
|
407
|
+
context 'there is real_new_tables' do
|
408
|
+
let(:real_new_tables) { ['table1'] }
|
409
|
+
let(:tables_append_only) { ["append1","append2"] }
|
410
|
+
let(:invalid_tables_append_only) { ["append3_invalid"] }
|
411
|
+
before do
|
412
|
+
allow_any_instance_of(SyncFileManager).to receive(:get_new_table_list).and_return(real_new_tables)
|
413
|
+
end
|
414
|
+
it do
|
415
|
+
subject
|
416
|
+
expect(subject_object.instance_variable_get(:@full_tables)).to eq(tables - real_new_tables)
|
417
|
+
expect(subject_object.instance_variable_get(:@append_only_tables)).to eq(tables_append_only)
|
418
|
+
end
|
419
|
+
end
|
420
|
+
end
|
421
|
+
end
|
252
422
|
end
|
253
423
|
end
|
254
424
|
end
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: flydata
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.7.
|
4
|
+
version: 0.7.7
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Koichi Fujikawa
|
@@ -12,7 +12,7 @@ authors:
|
|
12
12
|
autorequire:
|
13
13
|
bindir: bin
|
14
14
|
cert_chain: []
|
15
|
-
date: 2016-06-
|
15
|
+
date: 2016-06-30 00:00:00.000000000 Z
|
16
16
|
dependencies:
|
17
17
|
- !ruby/object:Gem::Dependency
|
18
18
|
name: rest-client
|