myreplicator 1.1.11 → 1.1.12
Sign up to get free protection for your applications and to get access to all the features.
- data/app/models/myreplicator/log.rb +4 -2
- data/lib/loader/loader.rb +37 -8
- data/lib/loader/vertica/vertica_loader.rb +20 -18
- data/lib/myreplicator/version.rb +1 -1
- data/lib/transporter/transporter.rb +36 -13
- data/test/dummy/load_exceptions.log +4876 -0
- data/test/dummy/log/development.log +186 -0
- data/test/dummy/tmp/temp.txt +0 -0
- metadata +6 -6
- data/test/dummy/tmp/pids/server.pid +0 -1
@@ -117,10 +117,12 @@ module Myreplicator
|
|
117
117
|
def self.completed? *args
|
118
118
|
options = args.extract_options!
|
119
119
|
log = Log.where(:export_id => options[:export_id],
|
120
|
-
:file => options[:
|
120
|
+
:file => options[:file],
|
121
121
|
:job_type => options[:job_type]).last
|
122
|
+
Kernel.p "===== transport_complete? log ====="
|
123
|
+
Kernel.p log
|
122
124
|
if log.nil?
|
123
|
-
return
|
125
|
+
return false
|
124
126
|
else
|
125
127
|
return true if log.state != "running"
|
126
128
|
end
|
data/lib/loader/loader.rb
CHANGED
@@ -32,8 +32,12 @@ module Myreplicator
|
|
32
32
|
initials = []
|
33
33
|
incrementals = []
|
34
34
|
all_files = Myreplicator::Loader.metadata_files
|
35
|
-
|
35
|
+
|
36
|
+
Kernel.p "===== all_files ====="
|
37
|
+
Kernel.p all_files
|
38
|
+
|
36
39
|
all_files.each do |m|
|
40
|
+
Kernel.p m
|
37
41
|
if m.export_type == "initial"
|
38
42
|
initials << m # Add initial to the list
|
39
43
|
all_files.delete(m) # Delete obj from mixed list
|
@@ -156,9 +160,9 @@ module Myreplicator
|
|
156
160
|
##
|
157
161
|
def self.initial_load metadata
|
158
162
|
exp = Export.find(metadata.export_id)
|
159
|
-
Kernel.p "===== unzip ====="
|
160
|
-
Loader.unzip(metadata.filename)
|
161
|
-
metadata.zipped = false
|
163
|
+
#Kernel.p "===== unzip ====="
|
164
|
+
#Loader.unzip(metadata.filename)
|
165
|
+
#metadata.zipped = false
|
162
166
|
|
163
167
|
cmd = ImportSql.initial_load(:db => exp.destination_schema,
|
164
168
|
:filepath => metadata.destination_filepath(tmp_dir))
|
@@ -178,8 +182,8 @@ module Myreplicator
|
|
178
182
|
##
|
179
183
|
def self.incremental_load metadata
|
180
184
|
exp = Export.find(metadata.export_id)
|
181
|
-
Loader.unzip(metadata.filename)
|
182
|
-
metadata.zipped = false
|
185
|
+
#Loader.unzip(metadata.filename)
|
186
|
+
#metadata.zipped = false
|
183
187
|
|
184
188
|
options = {:table_name => exp.table_name,
|
185
189
|
:db => exp.destination_schema,
|
@@ -227,8 +231,12 @@ module Myreplicator
|
|
227
231
|
# being loaded is completed
|
228
232
|
##
|
229
233
|
def self.transfer_completed? metadata
|
234
|
+
Kernel.p "===== transfer_completed? metadata ====="
|
235
|
+
Kernel.p ({:export_id => metadata.export_id,
|
236
|
+
:file => metadata.export_path,
|
237
|
+
:job_type => "transporter"})
|
230
238
|
if Log.completed?(:export_id => metadata.export_id,
|
231
|
-
:file => metadata.
|
239
|
+
:file => metadata.export_path,
|
232
240
|
:job_type => "transporter")
|
233
241
|
return true
|
234
242
|
end
|
@@ -274,7 +282,28 @@ module Myreplicator
|
|
274
282
|
Dir.glob(File.join(tmp_dir, "*.json")).each do |json_file|
|
275
283
|
files << ExportMetadata.new(:metadata_path => json_file)
|
276
284
|
end
|
277
|
-
|
285
|
+
result = []
|
286
|
+
Kernel.p files
|
287
|
+
files.each do |file|
|
288
|
+
puts "<<<<<<<<<<<<<<<<"
|
289
|
+
job = Export.where("id = #{file.export_id}").first
|
290
|
+
puts "<<<<<<<<<<<<<<<<"
|
291
|
+
Kernel.p job
|
292
|
+
puts "<<<<<<<<<<<<<<<<"
|
293
|
+
puts "&&&&&&&&&&&&&&&&&&&&&&&&&&"
|
294
|
+
Kernel.p file
|
295
|
+
puts "&&&&&&&&&&&&&&&&&&&&&&&&&&"
|
296
|
+
if job.state == "transport_completed"
|
297
|
+
result << file
|
298
|
+
end
|
299
|
+
puts "^^^^^^^^^^^^^^^^^^^^^^^^^^"
|
300
|
+
Kernel.p result
|
301
|
+
puts "^^^^^^^^^^^^^^^^^^^^^^^^^^"
|
302
|
+
end
|
303
|
+
puts "<<<<<<<<<<<<<<<<"
|
304
|
+
a = gets
|
305
|
+
puts "<<<<<<<<<<<<<<<<"
|
306
|
+
return result
|
278
307
|
end
|
279
308
|
|
280
309
|
##
|
@@ -137,10 +137,12 @@ module Myreplicator
|
|
137
137
|
vertica_copy options
|
138
138
|
elsif schema_check[:changed]
|
139
139
|
if metadata.export_type == 'initial'
|
140
|
+
Kernel.p "===== schema_check[:changed] ====="
|
140
141
|
Loader.clear_older_files metadata # clear old incremental files
|
141
142
|
apply_schema_change(ops, temp_table)
|
142
143
|
else
|
143
144
|
Loader.cleanup metadata #Remove incremental file
|
145
|
+
Kernel.p "===== Remove incremental file ====="
|
144
146
|
end
|
145
147
|
else
|
146
148
|
temp_table = create_temp_table ops
|
@@ -354,24 +356,24 @@ module Myreplicator
|
|
354
356
|
# place holder
|
355
357
|
end
|
356
358
|
end
|
357
|
-
|
358
|
-
|
359
|
-
|
360
|
-
|
361
|
-
|
362
|
-
|
363
|
-
|
364
|
-
|
365
|
-
|
366
|
-
|
367
|
-
|
368
|
-
|
369
|
-
|
370
|
-
|
371
|
-
|
372
|
-
|
373
|
-
|
374
|
-
|
359
|
+
=begin
|
360
|
+
def create_all_tables db
|
361
|
+
tables = Myreplicator::DB.get_tables(db)
|
362
|
+
sqls = {}
|
363
|
+
tables.each do |table|
|
364
|
+
puts "Creating #{db}.#{table}"
|
365
|
+
sql = "DROP TABLE IF EXISTS #{db}.#{table} CASCADE;"
|
366
|
+
VerticaDb::Base.connection.execute sql
|
367
|
+
sql = Loader::VerticaLoader.create_table(:vertica_db => "bidw",
|
368
|
+
:vertica_table => table,
|
369
|
+
:vertica_schema => db,
|
370
|
+
:table => table,
|
371
|
+
:db => db)
|
372
|
+
sqls["#{table}"] = sql
|
373
|
+
VerticaDb::Base.connection.execute sql
|
374
|
+
end
|
375
|
+
end
|
376
|
+
=end
|
375
377
|
end
|
376
378
|
end
|
377
379
|
end
|
data/lib/myreplicator/version.rb
CHANGED
@@ -39,8 +39,9 @@ module Myreplicator
|
|
39
39
|
# downloads export files concurrently from multiple sources
|
40
40
|
##
|
41
41
|
def self.transfer
|
42
|
-
unique_jobs = Export.where("active = 1").group("source_schema")
|
43
|
-
|
42
|
+
unique_jobs = Export.where("active = 1 and state = 'export_completed'").group("source_schema")
|
43
|
+
Kernel.p "===== unique_jobs ====="
|
44
|
+
Kernel.p unique_jobs
|
44
45
|
unique_jobs.each do |export|
|
45
46
|
download export
|
46
47
|
end
|
@@ -51,19 +52,20 @@ module Myreplicator
|
|
51
52
|
# Kicks off parallel download
|
52
53
|
##
|
53
54
|
def self.download export
|
54
|
-
|
55
|
+
Kernel.p "===== 1 ====="
|
56
|
+
parallel_download(completed_files(export))
|
55
57
|
end
|
56
58
|
|
57
59
|
##
|
58
60
|
# Gathers all files that need to be downloaded
|
59
61
|
# Gives the queue to parallelizer library to download in parallel
|
60
62
|
##
|
61
|
-
def self.parallel_download
|
63
|
+
def self.parallel_download files
|
62
64
|
p = Parallelizer.new(:klass => "Myreplicator::Transporter")
|
63
|
-
|
64
|
-
files.each do |
|
65
|
-
puts
|
66
|
-
p.queue << {:params =>[export,
|
65
|
+
|
66
|
+
files.each do |f|
|
67
|
+
puts f[:file]
|
68
|
+
p.queue << {:params =>[f[:export], f[:file]], :block => download_file}
|
67
69
|
end
|
68
70
|
|
69
71
|
p.run
|
@@ -103,6 +105,7 @@ module Myreplicator
|
|
103
105
|
local_dump_file = File.join(tmp_dir, dump_file.split("/").last)
|
104
106
|
sftp.download!(dump_file, local_dump_file)
|
105
107
|
Transporter.remove!(export, json_file, dump_file)
|
108
|
+
export.update_attributes!({:state => 'transport_completed'})
|
106
109
|
# store back up as well
|
107
110
|
unless metadata.store_in.blank?
|
108
111
|
Transporter.backup_files(metadata.backup_path, json_local_path, local_dump_file)
|
@@ -144,12 +147,30 @@ module Myreplicator
|
|
144
147
|
def self.completed_files export
|
145
148
|
ssh = export.ssh_to_source
|
146
149
|
done_files = ssh.exec!(get_done_files(export))
|
147
|
-
|
148
|
-
|
149
|
-
return done_files.split("\n")
|
150
|
+
if done_files.blank?
|
151
|
+
return []
|
150
152
|
end
|
151
|
-
|
152
|
-
|
153
|
+
files = done_files.split("\n")
|
154
|
+
jobs = Export.where("active = 1 and state = 'export_completed' and source_schema = '#{export.source_schema}'")
|
155
|
+
#jobs.each do |j|
|
156
|
+
# j.update_attributes!({:state => "transporting"})
|
157
|
+
#end
|
158
|
+
result = []
|
159
|
+
files.each do |file|
|
160
|
+
flag = nil
|
161
|
+
jobs.each do |job|
|
162
|
+
if file.include?(job.table_name)
|
163
|
+
flag = job
|
164
|
+
job.update_attributes!({:state => 'transporting'})
|
165
|
+
end
|
166
|
+
end
|
167
|
+
if flag
|
168
|
+
result << {:file => file, :export => flag}
|
169
|
+
end
|
170
|
+
end
|
171
|
+
Kernel.p "===== done_files ====="
|
172
|
+
Kernel.p result
|
173
|
+
return result
|
153
174
|
end
|
154
175
|
|
155
176
|
def self.metadata_obj json_path
|
@@ -177,6 +198,8 @@ module Myreplicator
|
|
177
198
|
# Grep -s used to supress error messages
|
178
199
|
##
|
179
200
|
def self.get_done_files export
|
201
|
+
Kernel.p "===== export ====="
|
202
|
+
Kernel.p export
|
180
203
|
cmd = "cd #{Myreplicator.configs[export.source_schema]["ssh_tmp_dir"]}; grep -ls export_completed *.json"
|
181
204
|
end
|
182
205
|
|