db_sucker 3.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (60) hide show
  1. checksums.yaml +7 -0
  2. data/.gitignore +16 -0
  3. data/CHANGELOG.md +45 -0
  4. data/Gemfile +4 -0
  5. data/LICENSE.txt +22 -0
  6. data/README.md +193 -0
  7. data/Rakefile +1 -0
  8. data/VERSION +1 -0
  9. data/bin/db_sucker +12 -0
  10. data/bin/db_sucker.sh +14 -0
  11. data/db_sucker.gemspec +29 -0
  12. data/doc/config_example.rb +53 -0
  13. data/doc/container_example.yml +150 -0
  14. data/lib/db_sucker/adapters/mysql2.rb +103 -0
  15. data/lib/db_sucker/application/colorize.rb +28 -0
  16. data/lib/db_sucker/application/container/accessors.rb +60 -0
  17. data/lib/db_sucker/application/container/ssh.rb +225 -0
  18. data/lib/db_sucker/application/container/validations.rb +53 -0
  19. data/lib/db_sucker/application/container/variation/accessors.rb +45 -0
  20. data/lib/db_sucker/application/container/variation/helpers.rb +21 -0
  21. data/lib/db_sucker/application/container/variation/worker_api.rb +65 -0
  22. data/lib/db_sucker/application/container/variation.rb +60 -0
  23. data/lib/db_sucker/application/container.rb +70 -0
  24. data/lib/db_sucker/application/container_collection.rb +47 -0
  25. data/lib/db_sucker/application/core.rb +222 -0
  26. data/lib/db_sucker/application/dispatch.rb +364 -0
  27. data/lib/db_sucker/application/evented_resultset.rb +149 -0
  28. data/lib/db_sucker/application/fake_channel.rb +22 -0
  29. data/lib/db_sucker/application/output_helper.rb +197 -0
  30. data/lib/db_sucker/application/sklaven_treiber/log_spool.rb +57 -0
  31. data/lib/db_sucker/application/sklaven_treiber/worker/accessors.rb +105 -0
  32. data/lib/db_sucker/application/sklaven_treiber/worker/core.rb +168 -0
  33. data/lib/db_sucker/application/sklaven_treiber/worker/helpers.rb +144 -0
  34. data/lib/db_sucker/application/sklaven_treiber/worker/io/base.rb +240 -0
  35. data/lib/db_sucker/application/sklaven_treiber/worker/io/file_copy.rb +81 -0
  36. data/lib/db_sucker/application/sklaven_treiber/worker/io/file_gunzip.rb +58 -0
  37. data/lib/db_sucker/application/sklaven_treiber/worker/io/file_import_sql.rb +80 -0
  38. data/lib/db_sucker/application/sklaven_treiber/worker/io/file_shasum.rb +49 -0
  39. data/lib/db_sucker/application/sklaven_treiber/worker/io/pv_wrapper.rb +73 -0
  40. data/lib/db_sucker/application/sklaven_treiber/worker/io/sftp_download.rb +57 -0
  41. data/lib/db_sucker/application/sklaven_treiber/worker/io/throughput.rb +219 -0
  42. data/lib/db_sucker/application/sklaven_treiber/worker/routines.rb +313 -0
  43. data/lib/db_sucker/application/sklaven_treiber/worker.rb +48 -0
  44. data/lib/db_sucker/application/sklaven_treiber.rb +281 -0
  45. data/lib/db_sucker/application/slot_pool.rb +137 -0
  46. data/lib/db_sucker/application/tie.rb +25 -0
  47. data/lib/db_sucker/application/window/core.rb +185 -0
  48. data/lib/db_sucker/application/window/dialog.rb +142 -0
  49. data/lib/db_sucker/application/window/keypad/core.rb +85 -0
  50. data/lib/db_sucker/application/window/keypad.rb +174 -0
  51. data/lib/db_sucker/application/window/prompt.rb +124 -0
  52. data/lib/db_sucker/application/window.rb +329 -0
  53. data/lib/db_sucker/application.rb +168 -0
  54. data/lib/db_sucker/patches/beta-warning.rb +374 -0
  55. data/lib/db_sucker/patches/developer.rb +29 -0
  56. data/lib/db_sucker/patches/net-sftp.rb +20 -0
  57. data/lib/db_sucker/patches/thread-count.rb +30 -0
  58. data/lib/db_sucker/version.rb +4 -0
  59. data/lib/db_sucker.rb +81 -0
  60. metadata +217 -0
@@ -0,0 +1,73 @@
1
+ module DbSucker
2
+ class Application
3
+ class SklavenTreiber
4
+ class Worker
5
+ module IO
6
+ class PvWrapper < Base
7
+ NoCommandError = Class.new(::ArgumentError)
8
+ attr_accessor :cmd, :result
9
+
10
+ def init
11
+ @enabled ||= Proc.new {}
12
+ @fallback ||= Proc.new {}
13
+ @cmd ||= @local
14
+ end
15
+
16
+ def enabled &block
17
+ @enabled = block
18
+ end
19
+
20
+ def fallback &block
21
+ @fallback = block
22
+ end
23
+
24
+ def perform! opts = {}
25
+ if @ctn.pv_utility
26
+ @enabled.call(@ctn.pv_utility)
27
+ raise(NoCommandError, "no command was provided, set `pv.cmd = mycmd' in the enabled callback") if @cmd.blank?
28
+ execute(opts.slice(:tries).merge(sleep_error: 3)) do
29
+ _perform_with_wrapper
30
+ end
31
+ else
32
+ execute(opts.slice(:tries), &@fallback)
33
+ end
34
+ end
35
+
36
+ def _perform_with_wrapper
37
+ @state = :working
38
+ target_thread = Thread.current
39
+ channel, @result = @ctn.nonblocking_channel_result(cmd, channel: true, use_sh: true)
40
+
41
+ killer = @worker.app.spawn_thread(:sklaventreiber_worker_io_pv_killer) do |thr|
42
+ thr[:current_task] = target_thread[:current_task] if target_thread[:current_task]
43
+ loop do
44
+ if @worker.should_cancel && !thr[:canceled]
45
+ if channel.is_a?(Net::SSH::Connection::Channel)
46
+ if channel[:pty]
47
+ channel.send_data("\C-c") rescue false
48
+ elsif channel[:pid]
49
+ @ctn.kill_remote_process(channel[:pid])
50
+ end
51
+ end
52
+ channel.close rescue false
53
+ thr[:canceled] = true
54
+ end
55
+ break unless channel.active?
56
+ thr.wait(0.1)
57
+ end
58
+ end
59
+
60
+ @result.each_linex do |grp, line|
61
+ next unless grp == :stderr
62
+ @offset = line.to_i
63
+ end
64
+ killer.signal.join
65
+ ensure
66
+ killer.kill
67
+ end
68
+ end
69
+ end
70
+ end
71
+ end
72
+ end
73
+ end
@@ -0,0 +1,57 @@
1
+ module DbSucker
2
+ class Application
3
+ class SklavenTreiber
4
+ class Worker
5
+ module IO
6
+ class SftpDownload < Base
7
+ UnknownEventError = Class.new(::RuntimeError)
8
+ attr_reader :downloader
9
+
10
+ def init
11
+ @label = "downloading"
12
+ @entity = "download"
13
+ @throughput.categories << :inet << :inet_down
14
+ end
15
+
16
+ def reset_state
17
+ super
18
+ @downloader = nil
19
+ end
20
+
21
+ def download! opts = {}
22
+ opts = opts.reverse_merge(tries: 3, read_size: @read_size, force_new_connection: true)
23
+ prepare_local_destination
24
+ execute(opts.slice(:tries).merge(sleep_error: 3)) do
25
+ @ctn.sftp_start(opts[:force_new_connection]) do |sftp|
26
+ @filesize = sftp.lstat!(@remote).size
27
+ sftp.download!(@remote, @local, read_size: opts[:read_size], requests: 1) do |event, downloader, *args|
28
+ if !@closing && @abort_if.call(self, event, downloader, *args)
29
+ downloader.abort!
30
+ @closing = true
31
+ end
32
+
33
+ case event
34
+ when :open
35
+ @downloader = downloader
36
+ @state = :init
37
+ when :get
38
+ @state = :downloading
39
+ @offset = args[1] + args[2].length
40
+ GC.start if @offset % GC_FORCE_RATE == 0
41
+ when :close
42
+ @state = :finishing
43
+ when :finish
44
+ @state = :done
45
+ else
46
+ raise UnknownEventError, "unknown event `#{event}'"
47
+ end
48
+ end
49
+ end
50
+ end
51
+ end
52
+ end
53
+ end
54
+ end
55
+ end
56
+ end
57
+ end
@@ -0,0 +1,219 @@
1
+ module DbSucker
2
+ class Application
3
+ class SklavenTreiber
4
+ class Worker
5
+ module IO
6
+ class Throughput
7
+ InstanceAlreadyRegisteredError = Class.new(::ArgumentError)
8
+
9
+ attr_reader :sklaventreiber, :stats
10
+
11
+ def initialize sklaventreiber
12
+ @sklaventreiber = sklaventreiber
13
+ @instances = {}
14
+ @stats = {}
15
+ @monitor = Monitor.new
16
+ @polling = []
17
+ end
18
+
19
+ def app
20
+ sklaventreiber.app
21
+ end
22
+
23
+ def sync &block
24
+ @monitor.synchronize(&block)
25
+ end
26
+
27
+ def poll! instance
28
+ sync { @polling.push(instance) }
29
+ end
30
+
31
+ def nopoll! instance
32
+ sync { @polling.delete(instance) }
33
+ end
34
+
35
+ def start_loop
36
+ @poll = app.spawn_thread(:sklaventreiber_throughput) do |thr|
37
+ thr[:polling] = 0
38
+ loop do
39
+ sync {
40
+ thr[:polling] = @polling.length
41
+ @polling.each(&:ping)
42
+ }
43
+ break if thr[:stop]
44
+ thr.wait(0.1)
45
+ end
46
+ end
47
+ end
48
+
49
+ def stop_loop
50
+ sync do
51
+ return unless @poll
52
+ @poll[:stop] = true
53
+ end
54
+ @poll.signal.join
55
+ end
56
+
57
+ def commit! bytes, *categories
58
+ sync do
59
+ return unless bytes
60
+ categories.flatten.each do |cat|
61
+ @stats[cat] ||= 0
62
+ @stats[cat] += bytes
63
+ end
64
+ end
65
+ end
66
+
67
+ def register target
68
+ sync do
69
+ if @instances[target]
70
+ raise InstanceAlreadyRegisteredError, "throughput manager cannot register more than once on the same target: `#{target}'"
71
+ else
72
+ raise NotImplementedError, "throughput manager requires the target to respond_to?(:filesize)" unless target.respond_to?(:filesize)
73
+ raise NotImplementedError, "throughput manager requires the target to respond_to?(:offset)" unless target.respond_to?(:offset)
74
+ @instances[target] = Instance.new(self, target)
75
+ end
76
+ end
77
+ end
78
+
79
+ def unregister instance
80
+ sync do
81
+ @instances.clone.each do |k, v|
82
+ if v == instance
83
+ @instances.delete(k)
84
+ break
85
+ end
86
+ end
87
+ end
88
+ end
89
+
90
+ class Instance
91
+ attr_reader :ioop, :categories, :sopts
92
+
93
+ def initialize manager, ioop
94
+ @manager = manager
95
+ @ioop = ioop
96
+ @monitor = Monitor.new
97
+ @pauses = 0
98
+ @categories = [:total]
99
+ @tracking = []
100
+ @tracking_offset = 0
101
+ @sopts = { perc_modifier: 1, perc_base: 0, tracking: 5.seconds }
102
+ reset_stats
103
+ end
104
+
105
+ def self.expose what, &how
106
+ define_method(what) do |*args, &block|
107
+ sync { instance_exec(*args, &how) }
108
+ end
109
+ end
110
+
111
+ [:filesize, :offset].each do |m|
112
+ define_method(m) {|*a| @ioop.send(m, *a) }
113
+ end
114
+
115
+ [:human_bytes, :human_percentage, :human_seconds, :human_seconds2].each do |m|
116
+ define_method(m) {|*a| @manager.app.send(m, *a) }
117
+ end
118
+
119
+ def sync &block
120
+ @monitor.synchronize(&block)
121
+ end
122
+
123
+ def commit!
124
+ sync do
125
+ ping
126
+ return unless offset
127
+ @manager.commit!(offset, @categories)
128
+ end
129
+ end
130
+
131
+ def pause!
132
+ sync do
133
+ return if @pause_started
134
+ @pause_started = Time.current
135
+ end
136
+ end
137
+
138
+ def unpause!
139
+ sync do
140
+ return unless @pause_started
141
+ @pauses += Time.current - @pause_started
142
+ @pause_started = false
143
+ end
144
+ end
145
+
146
+ def ping
147
+ sync do
148
+ return unless @started_at
149
+ @stats[:bps_avg] = runtime.zero? ? 0 : (offset.to_d / runtime.to_d).to_i
150
+ @stats[:eta2] = @stats[:bps_avg].zero? ? -1 : (bytes_remain.to_d / @stats[:bps_avg].to_d).to_i
151
+
152
+ # eta tracking
153
+ od = @tracking.last ? offset - @tracking_offset - @tracking.sum{|t,o| o }.to_d : 0
154
+ @tracking << [Time.current, od] if !od.zero? || @tracking.empty?
155
+ while @tracking.any? && @tracking.first[0] < @sopts[:tracking].ago
156
+ @tracking_offset += @tracking.shift[1]
157
+ end
158
+
159
+ range = @tracking.any? ? @tracking.last[0] - @tracking.first[0] : 0
160
+ @stats[:bps] = range.zero? ? 0 : @tracking.sum{|t,o| o }.to_d / range.to_d
161
+ @stats[:eta] = @stats[:bps].zero? ? -1 : (bytes_remain.to_d / @stats[:bps].to_d).to_i
162
+ end
163
+ end
164
+
165
+ def unregister
166
+ @manager.unregister(self)
167
+ end
168
+
169
+ def reset_stats
170
+ sync do
171
+ @tracking_offset = 0
172
+ @tracking.clear
173
+ @stats = { eta: -1, eta2: -1, bps: 0, bps_avg: 0 }
174
+ end
175
+ end
176
+
177
+ # =======
178
+ # = API =
179
+ # =======
180
+ expose(:eta) { ping; @stats[:eta] }
181
+ expose(:eta2) { ping; @stats[:eta2] }
182
+ expose(:bps) { ping; @stats[:bps] }
183
+ expose(:bps_avg) { ping; @stats[:bps_avg] }
184
+ expose(:done_percentage) { @sopts[:perc_base].to_d + @sopts[:perc_modifier].to_d * (filesize == 0 ? 100 : offset == 0 ? 0 : (offset.to_d / filesize.to_d * 100.to_d)) }
185
+ expose(:remain_percentage) { 100.to_d - done_percentage }
186
+ expose(:bytes_remain) { filesize - offset }
187
+ expose(:pauses) { @pauses + (@pause_started ? Time.current - @pause_started : 0) }
188
+ expose(:runtime) { @started_at ? (@ended_at || Time.current) - @started_at - pauses : 0 }
189
+ expose(:f_byte_progress) { "#{f_offset}/#{f_filesize}" }
190
+
191
+ [:bps, :bps_avg, :done_percentage, :remain_percentage, :bytes_remain, :offset, :filesize].each do |m|
192
+ expose(:"f_#{m}") { human_bytes(send(m)) }
193
+ end
194
+ [:done_percentage, :remain_percentage].each do |m|
195
+ expose(:"f_#{m}") { human_percentage(send(m)) }
196
+ end
197
+ [:runtime].each do |m|
198
+ expose(:"f_#{m}") { human_seconds(send(m)) }
199
+ end
200
+ [:eta, :eta2].each do |m|
201
+ expose(:"f_#{m}") { r = send(m); r == -1 ? "?:¿?:¿?" : human_seconds2(send(m)) }
202
+ end
203
+
204
+ def measure &block
205
+ @manager.poll!(self)
206
+ @started_at = Time.current
207
+ block.call(self)
208
+ ensure
209
+ @ended_at = Time.current
210
+ @manager.nopoll!(self)
211
+ commit!
212
+ end
213
+ end
214
+ end
215
+ end
216
+ end
217
+ end
218
+ end
219
+ end
@@ -0,0 +1,313 @@
1
+ module DbSucker
2
+ class Application
3
+ class SklavenTreiber
4
+ class Worker
5
+ module Routines
6
+ def _r_dump_file
7
+ @status = ["dumping table to remote file...", "yellow"]
8
+
9
+ pv_wrap(@ctn, nil) do |pv|
10
+ pv.enabled do |pvbinary|
11
+ pv.filesize = -1
12
+ pv.label = "dumping table"
13
+ pv.entity = "table dump"
14
+ pv.status_format = app.opts[:status_format]
15
+ pv.mode = :nofs
16
+ @status = [pv, "yellow"]
17
+ pv.abort_if { @should_cancel }
18
+
19
+ @remote_file_raw_tmp, pv.cmd = var.dump_to_remote_command(self, pvbinary)
20
+ end
21
+
22
+ pv.fallback do
23
+ @remote_file_raw_tmp, (channel, result) = var.dump_to_remote(self, false)
24
+ second_progress(channel, "dumping table to remote file (:seconds)...").join
25
+ end
26
+
27
+ pv.on_complete do
28
+ @remote_files_to_remove << @remote_file_raw_tmp
29
+ end
30
+
31
+ pv.perform!
32
+ end
33
+ _cancelpoint
34
+
35
+ # check if response has any sort of errors and abort
36
+ # if result.any?
37
+ # r = result.join
38
+ # if m = r.match(/(Unknown column '(.+)') in .+ \(([0-9]+)\)/i)
39
+ # @status = ["[DUMP] Failed: #{m[1]} (#{m[3]})", :red]
40
+ # throw :abort_execution, true
41
+ # end
42
+ # end
43
+
44
+ @remote_file_raw = @remote_file_raw_tmp[0..-5]
45
+ ctn.sftp_start do |sftp|
46
+ # rename tmp file
47
+ sftp.rename!(@remote_file_raw_tmp, @remote_file_raw)
48
+
49
+ # save size for gzip progress
50
+ @remote_file_raw_filesize = sftp.lstat!(@remote_file_raw).size
51
+ end
52
+
53
+ @remote_files_to_remove.delete(@remote_file_raw_tmp)
54
+ @remote_files_to_remove << @remote_file_raw
55
+ end
56
+
57
+ def _r_calculate_raw_hash
58
+ @status = ["calculating integrity hash for raw file...", "yellow"]
59
+
60
+ pv_wrap(@ctn, nil) do |pv|
61
+ pv.enabled do |pvbinary|
62
+ pv.filesize = @remote_file_raw_filesize
63
+ pv.label = "hashing raw file"
64
+ pv.entity = "hashing raw file"
65
+ pv.status_format = app.opts[:status_format]
66
+ @status = [pv, "yellow"]
67
+ pv.abort_if { @should_cancel }
68
+ pv.cmd = ctn.calculate_remote_integrity_hash_command(@remote_file_raw, pvbinary)
69
+ end
70
+
71
+ pv.fallback do
72
+ cmd, (channel, pv.result) = ctn.calculate_remote_integrity_hash(@remote_file_raw, false)
73
+ second_progress(channel, "calculating integrity hash for raw file (:seconds)...").join
74
+ end
75
+
76
+ pv.on_success do
77
+ @integrity = { raw: pv.result.for_group(:stdout).join.split(" ").first.try(:strip).presence }
78
+ end
79
+
80
+ pv.perform!
81
+ end
82
+ end
83
+
84
+ def _r_compress_file
85
+ @status = ["compressing file for transfer...", "yellow"]
86
+
87
+ pv_wrap(@ctn, nil) do |pv|
88
+ pv.enabled do |pvbinary|
89
+ pv.filesize = @remote_file_raw_filesize
90
+ pv.label = "compressing"
91
+ pv.entity = "compress"
92
+ pv.status_format = app.opts[:status_format]
93
+ @status = [pv, "yellow"]
94
+ pv.abort_if { @should_cancel }
95
+ @remote_file_compressed, pv.cmd = var.compress_file_command(@remote_file_raw, pvbinary)
96
+ @remote_files_to_remove << @remote_file_compressed
97
+ end
98
+
99
+ pv.fallback do
100
+ @remote_file_compressed, (channel, result) = var.compress_file(@remote_file_raw, false)
101
+ @remote_files_to_remove << @remote_file_compressed
102
+ second_progress(channel, "compressing file for transfer (:seconds)...").join
103
+ end
104
+
105
+ pv.on_success do
106
+ @remote_files_to_remove.delete(@remote_file_raw)
107
+ end
108
+
109
+ pv.perform!
110
+ end
111
+ end
112
+
113
+ def _r_calculate_compressed_hash
114
+ @status = ["calculating integrity hash for compressed file...", "yellow"]
115
+
116
+ pv_wrap(@ctn, nil) do |pv|
117
+ pv.enabled do |pvbinary|
118
+ pv.filesize = @remote_file_raw_filesize
119
+ pv.label = "hashing compressed file"
120
+ pv.entity = "hashing compressed file"
121
+ pv.status_format = app.opts[:status_format]
122
+ @status = [pv, "yellow"]
123
+ pv.abort_if { @should_cancel }
124
+ pv.cmd = ctn.calculate_remote_integrity_hash_command(@remote_file_compressed, pvbinary)
125
+ end
126
+
127
+ pv.fallback do
128
+ cmd, (channel, pv.result) = ctn.calculate_remote_integrity_hash(@remote_file_compressed, false)
129
+ second_progress(channel, "calculating integrity hash for compressed file (:seconds)...").join
130
+ end
131
+
132
+ pv.on_success do
133
+ @integrity[:compressed] = pv.result.for_group(:stdout).join.split(" ").first.try(:strip).presence
134
+ end
135
+
136
+ pv.perform!
137
+ end
138
+ end
139
+
140
+ def _l_download_file
141
+ @status = ["initiating download...", "yellow"]
142
+ @local_file_compressed = local_tmp_file(File.basename(@remote_file_compressed))
143
+ @local_files_to_remove << @local_file_compressed
144
+
145
+ sftp_download(@ctn, @remote_file_compressed => @local_file_compressed) do |dl|
146
+ dl.status_format = app.opts[:status_format]
147
+ @status = [dl, "yellow"]
148
+ dl.abort_if { @should_cancel }
149
+ dl.download!
150
+ end
151
+ end
152
+
153
+ def _l_verify_compressed_hash
154
+ return unless @integrity[:compressed]
155
+ label = "verifying compressed file"
156
+ @status = ["#{label}...", :yellow]
157
+
158
+ file_shasum(@ctn, @local_file_compressed) do |fc|
159
+ fc.label = label
160
+ fc.sha = ctn.integrity_sha
161
+ fc.status_format = app.opts[:status_format]
162
+ @status = [fc, "yellow"]
163
+
164
+ fc.abort_if { @should_cancel }
165
+ fc.on_success do
166
+ @integrity[:compressed_local] = fc.result
167
+ end
168
+ fc.verify!
169
+ end
170
+
171
+
172
+ if !@should_cancel && @integrity[:compressed] != @integrity[:compressed_local]
173
+ @status = ["[INTEGRITY] downloaded compressed file corrupted! (remote: #{@integrity[:compressed]}, local: #{@integrity[:compressed_local]})", :red]
174
+ throw :abort_execution, true
175
+ end
176
+ end
177
+
178
+ def _l_copy_file file = nil
179
+ label = "copying #{var.copies_file_compressed? ? "gzipped" : "raw"} file"
180
+ @status = ["#{label}...", :yellow]
181
+
182
+ @copy_file_source = var.copies_file_compressed? ? @local_file_compressed : @local_file_raw
183
+ @copy_file_target = copy_file_destination(var.data["file"])
184
+
185
+ file_copy(@ctn, @copy_file_source => @copy_file_target) do |fc|
186
+ fc.label = label
187
+ fc.status_format = app.opts[:status_format]
188
+ @status = [fc, "yellow"]
189
+ fc.abort_if { @should_cancel }
190
+ fc.copy!
191
+ end
192
+ end
193
+
194
+ def _l_decompress_file
195
+ label = "decompressing file"
196
+ @status = ["#{label}...", :yellow]
197
+
198
+ file_gunzip(@ctn, @local_file_compressed) do |fc|
199
+ fc.filesize = @remote_file_raw_filesize
200
+ fc.label = label
201
+ fc.status_format = app.opts[:status_format]
202
+ @status = [fc, "yellow"]
203
+
204
+ fc.abort_if { @should_cancel }
205
+ fc.on_success do
206
+ @local_files_to_remove.delete(@local_file_compressed)
207
+ @local_file_raw = fc.local
208
+ @local_files_to_remove << @local_file_raw
209
+ end
210
+ fc.gunzip!
211
+ end
212
+ end
213
+
214
+ def _l_verify_raw_hash
215
+ return unless @integrity[:raw]
216
+ label = "verifying raw file"
217
+ @status = ["#{label}...", :yellow]
218
+
219
+ file_shasum(@ctn, @local_file_raw) do |fc|
220
+ fc.label = label
221
+ fc.sha = ctn.integrity_sha
222
+ fc.status_format = app.opts[:status_format]
223
+ @status = [fc, "yellow"]
224
+
225
+ fc.abort_if { @should_cancel }
226
+ fc.on_success do
227
+ @integrity[:raw_local] = fc.result
228
+ end
229
+ fc.verify!
230
+ end
231
+
232
+ if !@should_cancel && @integrity[:raw] != @integrity[:raw_local]
233
+ @status = ["[INTEGRITY] extracted raw file corrupted! (remote: #{@integrity[:raw]}, local: #{@integrity[:raw_local]})", :red]
234
+ throw :abort_execution, true
235
+ end
236
+ end
237
+
238
+ def _l_import_file
239
+ if File.size(@local_file_raw) > app.opts[:deferred_threshold] && app.opts[:deferred_import]
240
+ @deferred = true
241
+ @perform << "l_wait_for_workers"
242
+ else
243
+ # cancel!("importing not yet implemented", true)
244
+ _do_import_file
245
+ end
246
+ end
247
+
248
+ def _l_wait_for_workers
249
+ @perform << "l_import_file_deferred"
250
+ wait_defer_ready
251
+ end
252
+
253
+ def _l_import_file_deferred
254
+ @status = ["importing #{human_bytes(File.size(@local_file_raw))} SQL data into local server...", :yellow]
255
+ _do_import_file(@local_file_raw)
256
+ end
257
+
258
+ def _do_import_file
259
+ @status = ["importing #{human_bytes(File.size(@local_file_raw))} SQL data into local server...", :yellow]
260
+
261
+ imp = @var.data["importer"]
262
+ impf = @var.parse_flags(var.data["importer_flags"]).merge(deferred: @deferred)
263
+
264
+ if imp == "void10"
265
+ t = app.channelfy_thread app.spawn_thread(:sklaventreiber_worker_io_import_sql) {|thr| thr.wait(10) }
266
+ second_progress(t, "importing with void10, sleeping 10 seconds (:seconds)...").join
267
+ elsif imp == "sequel" || @var.constraint(table)
268
+ raise NotImplementedError, "SequelImporter is not yet implemented/ported to new db_sucker version!"
269
+ # # imp_was_sequel = imp == "sequel"
270
+ # # imp = "sequel"
271
+ # # t = app.channelfy_thread Thread.new {
272
+ # # Thread.current[:importer] = imp = SequelImporter.new(worker, file, ignore_errors: !imp_was_sequel)
273
+ # # imp.start
274
+ # # }
275
+ # var.load_local_file(self, file) do |importer, channel|
276
+ # case importer
277
+ # when "sequel"
278
+ # sequel_progress(channel).join
279
+ # if channel[:importer].error
280
+ # @status = ["importing with Sequel", :yellow]
281
+ # raise channel[:importer].error
282
+ # end
283
+ # else second_progress(channel, "#{"(deferred) " if deferred}loading file (#{human_filesize(File.size(file))}) into local SQL server (:seconds)...").join
284
+ # end
285
+ # throw :abort_execution, channel[:error_message] if channel[:error_message]
286
+ # @return_message = channel[:return_message] if channel[:return_message]
287
+ # end
288
+ elsif imp == "binary"
289
+ t = app.channelfy_thread app.spawn_thread(:sklaventreiber_worker_io_import_sql) {|thr|
290
+ begin
291
+ file_import_sql(@ctn, :instruction) do |fi|
292
+ @status = [fi, "yellow"]
293
+ fi.instruction = @var.import_instruction_for(@local_file_raw, impf)
294
+ fi.filesize = File.size(@local_file_raw)
295
+ fi.status_format = app.opts[:status_format]
296
+ fi.abort_if { @should_cancel }
297
+ fi.import!
298
+ end
299
+ rescue Worker::IO::FileImportSql::ImportError => ex
300
+ fail! "ImportError: #{ex.message}"
301
+ sleep 3
302
+ end
303
+ }
304
+ else
305
+ raise ImporterNotFoundError, "variation `#{cfg.name}/#{name}' defines unknown importer `#{imp}' (in `#{cfg.src}')"
306
+ end
307
+ t.join
308
+ end
309
+ end
310
+ end
311
+ end
312
+ end
313
+ end