db_sucker 3.0.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/.gitignore +16 -0
- data/CHANGELOG.md +45 -0
- data/Gemfile +4 -0
- data/LICENSE.txt +22 -0
- data/README.md +193 -0
- data/Rakefile +1 -0
- data/VERSION +1 -0
- data/bin/db_sucker +12 -0
- data/bin/db_sucker.sh +14 -0
- data/db_sucker.gemspec +29 -0
- data/doc/config_example.rb +53 -0
- data/doc/container_example.yml +150 -0
- data/lib/db_sucker/adapters/mysql2.rb +103 -0
- data/lib/db_sucker/application/colorize.rb +28 -0
- data/lib/db_sucker/application/container/accessors.rb +60 -0
- data/lib/db_sucker/application/container/ssh.rb +225 -0
- data/lib/db_sucker/application/container/validations.rb +53 -0
- data/lib/db_sucker/application/container/variation/accessors.rb +45 -0
- data/lib/db_sucker/application/container/variation/helpers.rb +21 -0
- data/lib/db_sucker/application/container/variation/worker_api.rb +65 -0
- data/lib/db_sucker/application/container/variation.rb +60 -0
- data/lib/db_sucker/application/container.rb +70 -0
- data/lib/db_sucker/application/container_collection.rb +47 -0
- data/lib/db_sucker/application/core.rb +222 -0
- data/lib/db_sucker/application/dispatch.rb +364 -0
- data/lib/db_sucker/application/evented_resultset.rb +149 -0
- data/lib/db_sucker/application/fake_channel.rb +22 -0
- data/lib/db_sucker/application/output_helper.rb +197 -0
- data/lib/db_sucker/application/sklaven_treiber/log_spool.rb +57 -0
- data/lib/db_sucker/application/sklaven_treiber/worker/accessors.rb +105 -0
- data/lib/db_sucker/application/sklaven_treiber/worker/core.rb +168 -0
- data/lib/db_sucker/application/sklaven_treiber/worker/helpers.rb +144 -0
- data/lib/db_sucker/application/sklaven_treiber/worker/io/base.rb +240 -0
- data/lib/db_sucker/application/sklaven_treiber/worker/io/file_copy.rb +81 -0
- data/lib/db_sucker/application/sklaven_treiber/worker/io/file_gunzip.rb +58 -0
- data/lib/db_sucker/application/sklaven_treiber/worker/io/file_import_sql.rb +80 -0
- data/lib/db_sucker/application/sklaven_treiber/worker/io/file_shasum.rb +49 -0
- data/lib/db_sucker/application/sklaven_treiber/worker/io/pv_wrapper.rb +73 -0
- data/lib/db_sucker/application/sklaven_treiber/worker/io/sftp_download.rb +57 -0
- data/lib/db_sucker/application/sklaven_treiber/worker/io/throughput.rb +219 -0
- data/lib/db_sucker/application/sklaven_treiber/worker/routines.rb +313 -0
- data/lib/db_sucker/application/sklaven_treiber/worker.rb +48 -0
- data/lib/db_sucker/application/sklaven_treiber.rb +281 -0
- data/lib/db_sucker/application/slot_pool.rb +137 -0
- data/lib/db_sucker/application/tie.rb +25 -0
- data/lib/db_sucker/application/window/core.rb +185 -0
- data/lib/db_sucker/application/window/dialog.rb +142 -0
- data/lib/db_sucker/application/window/keypad/core.rb +85 -0
- data/lib/db_sucker/application/window/keypad.rb +174 -0
- data/lib/db_sucker/application/window/prompt.rb +124 -0
- data/lib/db_sucker/application/window.rb +329 -0
- data/lib/db_sucker/application.rb +168 -0
- data/lib/db_sucker/patches/beta-warning.rb +374 -0
- data/lib/db_sucker/patches/developer.rb +29 -0
- data/lib/db_sucker/patches/net-sftp.rb +20 -0
- data/lib/db_sucker/patches/thread-count.rb +30 -0
- data/lib/db_sucker/version.rb +4 -0
- data/lib/db_sucker.rb +81 -0
- metadata +217 -0
@@ -0,0 +1,28 @@
|
|
1
|
+
module DbSucker
|
2
|
+
class Application
|
3
|
+
module Colorize
|
4
|
+
UnknownColorError = Class.new(::ArgumentError)
|
5
|
+
COLORMAP = {
|
6
|
+
black: 30,
|
7
|
+
gray: 30,
|
8
|
+
red: 31,
|
9
|
+
green: 32,
|
10
|
+
yellow: 33,
|
11
|
+
blue: 34,
|
12
|
+
magenta: 35,
|
13
|
+
cyan: 36,
|
14
|
+
white: 37,
|
15
|
+
}
|
16
|
+
|
17
|
+
def colorize str, color = :yellow
|
18
|
+
ccode = COLORMAP[color.to_sym] || raise(UnknownColorError, "unknown color `#{color}'")
|
19
|
+
@opts[:colorize] ? "\e[#{ccode}m#{str}\e[0m" : "#{str}"
|
20
|
+
end
|
21
|
+
alias_method :c, :colorize
|
22
|
+
|
23
|
+
def decolorize str
|
24
|
+
str.to_s.gsub(/\e\[.*?(\d)+m/ , '')
|
25
|
+
end
|
26
|
+
end
|
27
|
+
end
|
28
|
+
end
|
@@ -0,0 +1,60 @@
|
|
1
|
+
module DbSucker
|
2
|
+
class Application
|
3
|
+
class Container
|
4
|
+
module Accessors
|
5
|
+
def ctn
|
6
|
+
self
|
7
|
+
end
|
8
|
+
|
9
|
+
def source
|
10
|
+
ctn.data["source"]
|
11
|
+
end
|
12
|
+
|
13
|
+
def tmp_path
|
14
|
+
source["ssh"]["tmp_location"].presence || "/tmp/db_sucker_tmp"
|
15
|
+
end
|
16
|
+
|
17
|
+
def variations
|
18
|
+
Hash[data["variations"].keys.map{|id| [id, variation(id)] }]
|
19
|
+
end
|
20
|
+
|
21
|
+
def variation v
|
22
|
+
return unless vd = data["variations"][v]
|
23
|
+
Variation.new(self, v, vd)
|
24
|
+
end
|
25
|
+
|
26
|
+
def integrity_binary
|
27
|
+
(source["integrity_binary"].nil? ? "shasum -ba" : source["integrity_binary"]).presence
|
28
|
+
end
|
29
|
+
|
30
|
+
def integrity_sha
|
31
|
+
source["integrity_sha"].nil? ? 512 : source["integrity_sha"]
|
32
|
+
end
|
33
|
+
|
34
|
+
def integrity?
|
35
|
+
!!integrity_sha
|
36
|
+
end
|
37
|
+
|
38
|
+
def ssh_key_files
|
39
|
+
@ssh_key_files ||= begin
|
40
|
+
files = [*data["source"]["ssh"]["keyfile"]].reject(&:blank?).map do |f|
|
41
|
+
if f.start_with?("~")
|
42
|
+
Pathname.new(File.expand_path(f))
|
43
|
+
else
|
44
|
+
Pathname.new(File.dirname(src)).join(f)
|
45
|
+
end
|
46
|
+
end
|
47
|
+
files.each do |f|
|
48
|
+
begin
|
49
|
+
File.open(f)
|
50
|
+
rescue Errno::ENOENT
|
51
|
+
warning("SSH identity file `#{f}' for identifier `#{name}' does not exist! (in `#{src}')")
|
52
|
+
end
|
53
|
+
end
|
54
|
+
files
|
55
|
+
end
|
56
|
+
end
|
57
|
+
end
|
58
|
+
end
|
59
|
+
end
|
60
|
+
end
|
@@ -0,0 +1,225 @@
|
|
1
|
+
module DbSucker
|
2
|
+
class Application
|
3
|
+
class Container
|
4
|
+
module SSH
|
5
|
+
CommandExecutionError = Class.new(::RuntimeError)
|
6
|
+
ChannelOpenFailedError = Class.new(::RuntimeError)
|
7
|
+
|
8
|
+
begin # SSH
|
9
|
+
def ssh_begin
|
10
|
+
debug "Opening SSH connection for identifier `#{name}'"
|
11
|
+
@ssh = ssh_start
|
12
|
+
begin
|
13
|
+
yield(@ssh)
|
14
|
+
ensure
|
15
|
+
ssh_end
|
16
|
+
end if block_given?
|
17
|
+
end
|
18
|
+
|
19
|
+
def ssh_end
|
20
|
+
return unless @ssh
|
21
|
+
debug "Closing SSH connection for identifier `#{name}'"
|
22
|
+
@ssh.try(:close) rescue false
|
23
|
+
debug "CLOSED SSH connection for identifier `#{name}'"
|
24
|
+
@ssh = nil
|
25
|
+
end
|
26
|
+
|
27
|
+
def ssh_sync &block
|
28
|
+
@ssh_mutex.synchronize(&block)
|
29
|
+
end
|
30
|
+
|
31
|
+
def ssh_start new_connection = false, &block
|
32
|
+
if @ssh && !new_connection
|
33
|
+
ssh_sync do
|
34
|
+
debug "Reusing SSH connection in start for identifier `#{name}'"
|
35
|
+
return block ? block.call(@ssh) : @ssh
|
36
|
+
end
|
37
|
+
end
|
38
|
+
debug "Opening new SSH connection in start for identifier `#{name}'"
|
39
|
+
|
40
|
+
opt = {}
|
41
|
+
opt[:user] = source["ssh"]["username"] if source["ssh"]["username"].present?
|
42
|
+
opt[:password] = source["ssh"]["password"] if source["ssh"]["password"].present?
|
43
|
+
opt[:keys] = ssh_key_files if ssh_key_files.any?
|
44
|
+
opt[:port] = source["ssh"]["port"] if source["ssh"]["port"].present?
|
45
|
+
if block
|
46
|
+
Net::SSH.start(source["ssh"]["hostname"], nil, opt) do |ssh|
|
47
|
+
block.call(ssh)
|
48
|
+
end
|
49
|
+
else
|
50
|
+
Net::SSH.start(source["ssh"]["hostname"], nil, opt)
|
51
|
+
end
|
52
|
+
end
|
53
|
+
end
|
54
|
+
|
55
|
+
begin # SFTP
|
56
|
+
def sftp_begin
|
57
|
+
debug "Opening SFTP connection for identifier `#{name}'"
|
58
|
+
@sftp = sftp_start
|
59
|
+
begin
|
60
|
+
yield(@sftp)
|
61
|
+
ensure
|
62
|
+
sftp_end
|
63
|
+
end if block_given?
|
64
|
+
end
|
65
|
+
|
66
|
+
def sftp_end
|
67
|
+
return unless @sftp
|
68
|
+
debug "Closing SFTP connection for identifier `#{name}'"
|
69
|
+
@sftp.try(:close) rescue false
|
70
|
+
debug "CLOSED SFTP connection for identifier `#{name}'"
|
71
|
+
@sftp = nil
|
72
|
+
end
|
73
|
+
|
74
|
+
def sftp_sync &block
|
75
|
+
@sftp_mutex.synchronize(&block)
|
76
|
+
end
|
77
|
+
|
78
|
+
def sftp_start new_connection = false, &block
|
79
|
+
if @sftp && !new_connection
|
80
|
+
sftp_sync do
|
81
|
+
debug "Reusing SFTP connection in start for identifier `#{name}'"
|
82
|
+
return block ? block.call(@sftp) : @sftp
|
83
|
+
end
|
84
|
+
end
|
85
|
+
debug "Opening new SFTP connection in start for identifier `#{name}'"
|
86
|
+
|
87
|
+
opt = {}
|
88
|
+
opt[:user] = source["ssh"]["username"] if source["ssh"]["username"].present?
|
89
|
+
opt[:password] = source["ssh"]["password"] if source["ssh"]["password"].present?
|
90
|
+
opt[:keys] = ssh_key_files if ssh_key_files.any?
|
91
|
+
opt[:port] = source["ssh"]["port"] if source["ssh"]["port"].present?
|
92
|
+
if block
|
93
|
+
Net::SFTP.start(source["ssh"]["hostname"], nil, opt) do |sftp|
|
94
|
+
block.call(sftp)
|
95
|
+
end
|
96
|
+
else
|
97
|
+
Net::SFTP.start(source["ssh"]["hostname"], nil, opt)
|
98
|
+
end
|
99
|
+
end
|
100
|
+
end
|
101
|
+
|
102
|
+
begin # SSH helpers
|
103
|
+
def loop_ssh *args, &block
|
104
|
+
return false unless @ssh
|
105
|
+
@ssh.loop(*args, &block)
|
106
|
+
end
|
107
|
+
|
108
|
+
def blocking_channel ssh = nil, result = nil, &block
|
109
|
+
waitlock = Queue.new
|
110
|
+
(ssh || ssh_start).open_channel do |ch|
|
111
|
+
waitlock.pop
|
112
|
+
block.call(ch)
|
113
|
+
end.tap do |ch|
|
114
|
+
# catch open_fail errors
|
115
|
+
ch.on_open_failed do |_ch, code, desc|
|
116
|
+
result.try(:close!)
|
117
|
+
_ch[:open_failed] = true
|
118
|
+
raise ChannelOpenFailedError, "#{code}: #{desc}"
|
119
|
+
end
|
120
|
+
|
121
|
+
ch[:wait_monitor] = Monitor.new
|
122
|
+
ch[:wait_condition] = ch[:wait_monitor].new_cond
|
123
|
+
st = app.sklaventreiber
|
124
|
+
waitlock << true
|
125
|
+
if !ssh && st && st.sync{ st.try(:poll) }
|
126
|
+
ch[:wait_monitor].synchronize do
|
127
|
+
ch[:wait_condition].wait(0.1) while ch.active?
|
128
|
+
end
|
129
|
+
else
|
130
|
+
ch.wait
|
131
|
+
end
|
132
|
+
end
|
133
|
+
end
|
134
|
+
|
135
|
+
def nonblocking_channel ssh = nil, result = nil, &block
|
136
|
+
(ssh || ssh_start).open_channel do |ch|
|
137
|
+
ch[:wait_monitor] = Monitor.new
|
138
|
+
ch[:wait_condition] = ch[:wait_monitor].new_cond
|
139
|
+
block.call(ch)
|
140
|
+
end.tap do |ch|
|
141
|
+
# catch open_fail errors
|
142
|
+
ch.on_open_failed do |_ch, code, desc|
|
143
|
+
result.try(:close!)
|
144
|
+
_ch[:open_failed] = true
|
145
|
+
raise ChannelOpenFailedError, "#{code}: #{desc}"
|
146
|
+
end
|
147
|
+
end
|
148
|
+
end
|
149
|
+
|
150
|
+
def kill_remote_process pid, sig = :INT
|
151
|
+
ssh_start(true) do |ssh|
|
152
|
+
blocking_channel_result("kill -#{sig} -#{pid}", ssh: ssh)
|
153
|
+
end
|
154
|
+
end
|
155
|
+
|
156
|
+
def blocking_channel_result cmd, opts = {}
|
157
|
+
opts = opts.reverse_merge(ssh: nil, blocking: true, channel: false, request_pty: false, use_sh: false)
|
158
|
+
if opts[:use_sh]
|
159
|
+
cmd = %{/bin/sh -c 'echo $(ps -o pgid= $$ | grep -o [0-9]*) && #{cmd}'}
|
160
|
+
pid_monitor = Monitor.new
|
161
|
+
pid_signal = pid_monitor.new_cond
|
162
|
+
end
|
163
|
+
result = EventedResultset.new
|
164
|
+
chan = send(opts[:blocking] ? :blocking_channel : :nonblocking_channel, opts[:ssh], result) do |ch|
|
165
|
+
chproc = ->(ch, cmd, result) {
|
166
|
+
ch.exec(cmd) do |ch, success|
|
167
|
+
Thread.main[:app].debug "START: #{Thread.current == Thread.main ? :main : Thread.current[:itype]}-#{cmd} #{success}"
|
168
|
+
raise CommandExecutionError, "could not execute command" unless success
|
169
|
+
|
170
|
+
# "on_data" is called when the process writes something to stdout
|
171
|
+
ch.on_data do |c, data|
|
172
|
+
Thread.main[:app].debug "#{Thread.current == Thread.main ? :main : Thread.current[:itype]}-#{Time.current.to_f}: STDOUT: #{data}".chomp
|
173
|
+
if opts[:use_sh] && result.empty?
|
174
|
+
ch[:pid] = data.to_i
|
175
|
+
ch[:pid] = false if ch[:pid].zero?
|
176
|
+
pid_monitor.synchronize { pid_signal.broadcast } if opts[:use_sh]
|
177
|
+
next
|
178
|
+
end
|
179
|
+
result.enq(data, :stdout)
|
180
|
+
end
|
181
|
+
|
182
|
+
# "on_extended_data" is called when the process writes something to stderr
|
183
|
+
ch.on_extended_data do |c, type, data|
|
184
|
+
Thread.main[:app].debug "#{Thread.current == Thread.main ? :main : Thread.current[:itype]}-#{Time.current.to_f}: STDERR: #{data}".chomp
|
185
|
+
result.enq(data, :stderr)
|
186
|
+
end
|
187
|
+
|
188
|
+
ch.on_request "exit-status" do |ch, data|
|
189
|
+
Thread.main[:app].debug "#{Thread.current == Thread.main ? :main : Thread.current[:itype]}-#{Time.current.to_f}: EXIT: #{data.read_long} #{cmd}".chomp
|
190
|
+
end
|
191
|
+
|
192
|
+
ch.on_close do |ch|
|
193
|
+
ch[:wait_monitor].synchronize { ch[:wait_condition].broadcast }
|
194
|
+
Thread.main[:app].debug "#{Thread.current == Thread.main ? :main : Thread.current[:itype]}-#{Time.current.to_f}: CLOSED: #{cmd}".chomp
|
195
|
+
end
|
196
|
+
|
197
|
+
ch.on_eof do
|
198
|
+
Thread.main[:app].debug "#{Thread.current == Thread.main ? :main : Thread.current[:itype]}-#{Time.current.to_f}: EOF: #{cmd}".chomp
|
199
|
+
result.close!
|
200
|
+
ch[:handler].try(:signal)
|
201
|
+
end
|
202
|
+
end
|
203
|
+
}
|
204
|
+
if opts[:request_pty]
|
205
|
+
ch.request_pty do |ch, success|
|
206
|
+
raise CommandExecutionError, "could not obtain pty" unless success
|
207
|
+
ch[:pty] = true
|
208
|
+
chproc.call(ch, cmd, result)
|
209
|
+
end
|
210
|
+
else
|
211
|
+
chproc.call(ch, cmd, result)
|
212
|
+
end
|
213
|
+
end
|
214
|
+
pid_monitor.synchronize { pid_signal.wait(1) if !chan[:pid] } if opts[:use_sh]
|
215
|
+
opts[:channel] ? [chan, result] : result
|
216
|
+
end
|
217
|
+
|
218
|
+
def nonblocking_channel_result cmd, opts = {}
|
219
|
+
blocking_channel_result(cmd, opts.merge(blocking: false))
|
220
|
+
end
|
221
|
+
end
|
222
|
+
end
|
223
|
+
end
|
224
|
+
end
|
225
|
+
end
|
@@ -0,0 +1,53 @@
|
|
1
|
+
module DbSucker
|
2
|
+
class Application
|
3
|
+
class Container
|
4
|
+
module Validations
|
5
|
+
def verify!
|
6
|
+
_verify("/", data, __keys_for(:root))
|
7
|
+
|
8
|
+
# validate source
|
9
|
+
if sd = data["source"]
|
10
|
+
_verify("/source", sd, __keys_for(:source))
|
11
|
+
if sd["ssh"]
|
12
|
+
_verify("/source/ssh", sd["ssh"], __keys_for(:source_ssh))
|
13
|
+
ssh_key_files
|
14
|
+
end
|
15
|
+
end
|
16
|
+
|
17
|
+
# validate variations
|
18
|
+
if sd = data["variations"]
|
19
|
+
sd.each do |name, vd|
|
20
|
+
begin
|
21
|
+
_verify("/variations/#{name}", vd, __keys_for(:variation))
|
22
|
+
base = sd[vd["base"]] if vd["base"]
|
23
|
+
raise(ConfigurationError, "variation `#{name}' cannot base from `#{vd["base"]}' since it doesn't exist (in `#{src}')") if vd["base"] && !base
|
24
|
+
raise ConfigurationError, "variation `#{name}' must define an adapter (mysql2, postgres, ...)" if vd["adapter"].blank? && vd["database"] != false && (!base || base["adapter"].blank?)
|
25
|
+
rescue ConfigurationError => ex
|
26
|
+
abort "#{ex.message} (in `#{src}' [/variations/#{name}])"
|
27
|
+
end
|
28
|
+
end
|
29
|
+
end
|
30
|
+
end
|
31
|
+
|
32
|
+
def _verify token, hash, keys
|
33
|
+
begin
|
34
|
+
hash.assert_valid_keys(keys)
|
35
|
+
raise ConfigurationError, "A source must define an adapter (mysql2, postgres, ...)" if token == "/source" && hash["adapter"].blank?
|
36
|
+
raise ConfigurationError, "A variation `#{name}' can only define either a `only' or `except' option" if hash["only"] && hash["except"]
|
37
|
+
rescue ConfigurationError, ArgumentError => ex
|
38
|
+
abort "#{ex.message} (in `#{src}' [#{token}])"
|
39
|
+
end
|
40
|
+
end
|
41
|
+
|
42
|
+
def __keys_for which
|
43
|
+
{
|
44
|
+
root: %w[source variations],
|
45
|
+
source: %w[adapter ssh database hostname username password args client_binary dump_binary gzip_binary integrity_sha integrity_binary],
|
46
|
+
source_ssh: %w[hostname username keyfile password port tmp_location],
|
47
|
+
variation: %w[adapter label base database hostname username password args client_binary incremental file only except importer importer_flags ignore_always constraints],
|
48
|
+
}[which] || []
|
49
|
+
end
|
50
|
+
end
|
51
|
+
end
|
52
|
+
end
|
53
|
+
end
|
@@ -0,0 +1,45 @@
|
|
1
|
+
module DbSucker
|
2
|
+
class Application
|
3
|
+
class Container
|
4
|
+
class Variation
|
5
|
+
module Accessors
|
6
|
+
def ctn
|
7
|
+
cfg
|
8
|
+
end
|
9
|
+
|
10
|
+
def source
|
11
|
+
ctn.source
|
12
|
+
end
|
13
|
+
|
14
|
+
def label
|
15
|
+
data["label"]
|
16
|
+
end
|
17
|
+
|
18
|
+
def incrementals
|
19
|
+
data["incremental"] || {}
|
20
|
+
end
|
21
|
+
|
22
|
+
def gzip_binary
|
23
|
+
source["gzip_binary"] || "gzip"
|
24
|
+
end
|
25
|
+
|
26
|
+
def copies_file?
|
27
|
+
data["file"]
|
28
|
+
end
|
29
|
+
|
30
|
+
def copies_file_compressed?
|
31
|
+
copies_file? && data["file"].end_with?(".gz")
|
32
|
+
end
|
33
|
+
|
34
|
+
def requires_uncompression?
|
35
|
+
!copies_file_compressed? || data["database"]
|
36
|
+
end
|
37
|
+
|
38
|
+
def constraint table
|
39
|
+
data["constraints"] && (data["constraints"][table] || data["constraints"]["__default"])
|
40
|
+
end
|
41
|
+
end
|
42
|
+
end
|
43
|
+
end
|
44
|
+
end
|
45
|
+
end
|
@@ -0,0 +1,21 @@
|
|
1
|
+
module DbSucker
|
2
|
+
class Application
|
3
|
+
class Container
|
4
|
+
class Variation
|
5
|
+
module Helpers
|
6
|
+
def parse_flags flags
|
7
|
+
flags.to_s.split(" ").map(&:strip).reject(&:blank?).each_with_object({}) do |fstr, res|
|
8
|
+
if m = fstr.match(/\+(?<key>[^=]+)(?:=(?<value>))?/)
|
9
|
+
res[m[:key].strip] = m[:value].nil? ? true : m[:value]
|
10
|
+
elsif m = fstr.match(/\-(?<key>[^=]+)/)
|
11
|
+
res[m[:key]] = false
|
12
|
+
else
|
13
|
+
raise InvalidImporterFlagError, "invalid flag `#{fstr}' for variation `#{cfg.name}/#{name}' (in `#{cfg.src}')"
|
14
|
+
end
|
15
|
+
end
|
16
|
+
end
|
17
|
+
end
|
18
|
+
end
|
19
|
+
end
|
20
|
+
end
|
21
|
+
end
|
@@ -0,0 +1,65 @@
|
|
1
|
+
module DbSucker
|
2
|
+
class Application
|
3
|
+
class Container
|
4
|
+
class Variation
|
5
|
+
module WorkerApi
|
6
|
+
def tables_to_transfer
|
7
|
+
all = cfg.table_list(cfg.data["source"]["database"]).map(&:first)
|
8
|
+
keep = []
|
9
|
+
if data["only"]
|
10
|
+
[*data["only"]].each do |t|
|
11
|
+
unless all.include?(t)
|
12
|
+
raise TableNotFoundError, "table `#{t}' for the database `#{cfg.source["database"]}' could not be found (provided by variation `#{cfg.name}/#{name}' in `#{cfg.src}')"
|
13
|
+
end
|
14
|
+
keep << t
|
15
|
+
end
|
16
|
+
elsif data["except"]
|
17
|
+
keep = all.dup
|
18
|
+
[*data["except"]].each do |t|
|
19
|
+
unless all.include?(t)
|
20
|
+
raise TableNotFoundError, "table `#{t}' for the database `#{cfg.source["database"]}' could not be found (provided by variation `#{cfg.name}/#{name}' in `#{cfg.src}')"
|
21
|
+
end
|
22
|
+
keep.delete(t)
|
23
|
+
end
|
24
|
+
else
|
25
|
+
keep = all.dup
|
26
|
+
end
|
27
|
+
keep -= data["ignore_always"] if data["ignore_always"].is_a?(Array)
|
28
|
+
|
29
|
+
[keep, all]
|
30
|
+
end
|
31
|
+
|
32
|
+
def dump_to_remote_command worker, pv_binary = false
|
33
|
+
tmpfile = worker.tmp_filename(true)
|
34
|
+
cmd = dump_command_for(worker.table)
|
35
|
+
if pv_binary.presence
|
36
|
+
cmd << %{ | #{pv_binary} -n -b > #{tmpfile}}
|
37
|
+
else
|
38
|
+
cmd << %{ > #{tmpfile}}
|
39
|
+
end
|
40
|
+
[tmpfile, cmd]
|
41
|
+
end
|
42
|
+
|
43
|
+
def dump_to_remote worker, blocking = true
|
44
|
+
tfile, cmd = dump_to_remote_command(worker)
|
45
|
+
[tfile, cfg.blocking_channel_result(cmd, channel: true, use_sh: true, blocking: blocking)]
|
46
|
+
end
|
47
|
+
|
48
|
+
def compress_file_command file, pv_binary = false
|
49
|
+
if pv_binary.presence
|
50
|
+
cmd = %{#{pv_binary} -n -b #{file} | #{gzip_binary} > #{file}.gz && rm #{file} }
|
51
|
+
else
|
52
|
+
cmd = %{#{gzip_binary} #{file}}
|
53
|
+
end
|
54
|
+
["#{file}.gz", cmd]
|
55
|
+
end
|
56
|
+
|
57
|
+
def compress_file file, blocking = true
|
58
|
+
nfile, cmd = compress_file_command(file)
|
59
|
+
[nfile, cfg.blocking_channel_result(cmd, channel: true, use_sh: true, blocking: blocking)]
|
60
|
+
end
|
61
|
+
end
|
62
|
+
end
|
63
|
+
end
|
64
|
+
end
|
65
|
+
end
|
@@ -0,0 +1,60 @@
|
|
1
|
+
module DbSucker
|
2
|
+
class Application
|
3
|
+
class Container
|
4
|
+
class Variation
|
5
|
+
ImporterNotFoundError = Class.new(::RuntimeError)
|
6
|
+
InvalidImporterFlagError = Class.new(::RuntimeError)
|
7
|
+
|
8
|
+
include Accessors
|
9
|
+
include Helpers
|
10
|
+
include WorkerApi
|
11
|
+
|
12
|
+
attr_reader :cfg, :name, :data
|
13
|
+
|
14
|
+
def initialize cfg, name, data
|
15
|
+
@cfg, @name, @data = cfg, name, data
|
16
|
+
|
17
|
+
if data["base"]
|
18
|
+
bdata = cfg.variation(data["base"]) || raise(ConfigurationError, "variation `#{cfg.name}/#{name}' cannot base from `#{data["base"]}' since it doesn't exist (in `#{cfg.src}')")
|
19
|
+
@data = data.reverse_merge(bdata.data)
|
20
|
+
end
|
21
|
+
|
22
|
+
if @data["adapter"]
|
23
|
+
begin
|
24
|
+
adapter = "DbSucker::Adapters::#{@data["adapter"].camelize}::Api".constantize
|
25
|
+
@cfg.app.sync { adapter.require_dependencies }
|
26
|
+
extend adapter
|
27
|
+
rescue NameError => ex
|
28
|
+
raise(AdapterNotFoundError, "variation `#{cfg.name}/#{name}' defines invalid adapter `#{@data["adapter"]}' (in `#{cfg.src}'): #{ex.message}", ex.backtrace)
|
29
|
+
end
|
30
|
+
elsif @data["database"]
|
31
|
+
raise(ConfigurationError, "variation `#{cfg.name}/#{name}' must define an adapter (mysql2, postgres, ...) if database is provided (in `#{cfg.src}')")
|
32
|
+
end
|
33
|
+
end
|
34
|
+
|
35
|
+
# ===============
|
36
|
+
# = Adapter API =
|
37
|
+
# ===============
|
38
|
+
[
|
39
|
+
:client_binary,
|
40
|
+
:local_client_binary,
|
41
|
+
:dump_binary,
|
42
|
+
:client_call,
|
43
|
+
:local_client_call,
|
44
|
+
:dump_call,
|
45
|
+
:database_list,
|
46
|
+
:table_list,
|
47
|
+
:hostname,
|
48
|
+
].each do |meth|
|
49
|
+
define_method meth do
|
50
|
+
raise NotImplementedError, "your selected adapter `#{@data["adapter"]}' must implement `##{meth}' for variation `#{cfg.name}/#{name}' (in `#{cfg.src}')"
|
51
|
+
end
|
52
|
+
end
|
53
|
+
|
54
|
+
def dump_command_for table
|
55
|
+
raise NotImplementedError, "your selected adapter `#{@data["adapter"]}' must implement `#dump_command_for(table)' for variation `#{cfg.name}/#{name}' (in `#{cfg.src}')"
|
56
|
+
end
|
57
|
+
end
|
58
|
+
end
|
59
|
+
end
|
60
|
+
end
|
@@ -0,0 +1,70 @@
|
|
1
|
+
module DbSucker
|
2
|
+
class Application
|
3
|
+
class Container
|
4
|
+
AdapterNotFoundError = Class.new(::ArgumentError)
|
5
|
+
TableNotFoundError = Class.new(::RuntimeError)
|
6
|
+
ConfigurationError = Class.new(::ArgumentError)
|
7
|
+
|
8
|
+
include Accessors
|
9
|
+
include Validations
|
10
|
+
include SSH
|
11
|
+
OutputHelper.hook(self)
|
12
|
+
|
13
|
+
attr_reader :app, :name, :src, :data
|
14
|
+
|
15
|
+
def initialize app, name, src, data
|
16
|
+
@app = app
|
17
|
+
@name = name
|
18
|
+
@src = src
|
19
|
+
@data = data
|
20
|
+
@ssh_mutex = Monitor.new
|
21
|
+
@sftp_mutex = Monitor.new
|
22
|
+
|
23
|
+
verify!
|
24
|
+
|
25
|
+
begin
|
26
|
+
adapter = "DbSucker::Adapters::#{source["adapter"].camelize}::Api".constantize
|
27
|
+
app.sync { adapter.require_dependencies }
|
28
|
+
extend adapter
|
29
|
+
rescue NameError => ex
|
30
|
+
raise(AdapterNotFoundError, "identifier `#{name}' defines invalid source adapter `#{source["adapter"]}' (in `#{@src}'): #{ex.message}", ex.backtrace)
|
31
|
+
end
|
32
|
+
end
|
33
|
+
|
34
|
+
def pv_utility
|
35
|
+
app.sync do
|
36
|
+
if @_pv_utility.nil?
|
37
|
+
if app.opts[:pv_enabled]
|
38
|
+
res = blocking_channel_result("which pv && pv --version")
|
39
|
+
app.debug "#{Time.current.to_f}: #{res.to_a.inspect}"
|
40
|
+
if m = res[1].to_s.match(/pv\s([0-9\.]+)\s/i)
|
41
|
+
if Gem::Version.new(m[1]) >= Gem::Version.new("1.3.8")
|
42
|
+
@_pv_utility = res[0].strip.presence
|
43
|
+
end
|
44
|
+
end
|
45
|
+
end
|
46
|
+
@_pv_utility = false unless @_pv_utility
|
47
|
+
end
|
48
|
+
@_pv_utility
|
49
|
+
end
|
50
|
+
rescue Errno::ECONNRESET
|
51
|
+
end
|
52
|
+
|
53
|
+
def calculate_remote_integrity_hash_command file, pv_binary = false
|
54
|
+
return unless integrity?
|
55
|
+
icmd = "#{integrity_binary}#{integrity_sha}"
|
56
|
+
if pv_binary.presence
|
57
|
+
%{#{pv_binary} -n -b #{file} | #{icmd}}
|
58
|
+
else
|
59
|
+
%{#{icmd} #{file}}
|
60
|
+
end
|
61
|
+
end
|
62
|
+
|
63
|
+
def calculate_remote_integrity_hash file, blocking = true
|
64
|
+
cmd = calculate_remote_integrity_hash_command(file)
|
65
|
+
return unless cmd
|
66
|
+
[cmd, blocking_channel_result(cmd, channel: true, use_sh: true, blocking: blocking)]
|
67
|
+
end
|
68
|
+
end
|
69
|
+
end
|
70
|
+
end
|
@@ -0,0 +1,47 @@
|
|
1
|
+
module DbSucker
|
2
|
+
class Application
|
3
|
+
class ContainerCollection
|
4
|
+
DuplicateIdentifierError = Class.new(::ArgumentError)
|
5
|
+
YAMLParseError = Class.new(::RuntimeError)
|
6
|
+
|
7
|
+
attr_reader :app, :data
|
8
|
+
|
9
|
+
def initialize app
|
10
|
+
@app = app
|
11
|
+
@data = {}
|
12
|
+
end
|
13
|
+
|
14
|
+
def yml_configs disabled = false
|
15
|
+
files = Dir["#{app.core_cfg_path}/**/*.yml"].select{|f| File.file?(f) }
|
16
|
+
return files if disabled
|
17
|
+
files.reject do |f|
|
18
|
+
f.gsub("#{app.core_cfg_path}/", "").split("/").any?{|fp| fp.start_with?("__") }
|
19
|
+
end
|
20
|
+
end
|
21
|
+
|
22
|
+
def load_all_configs
|
23
|
+
yml_configs.each{|f| load_yml_config(f) }
|
24
|
+
end
|
25
|
+
|
26
|
+
def load_yml_config file
|
27
|
+
YAML.load_file(file).each do |id, cfg|
|
28
|
+
if @data.key?(id)
|
29
|
+
raise DuplicateIdentifierError, "double use of identifier `#{id}' in `#{file}'"
|
30
|
+
else
|
31
|
+
@data[id] = Container.new(app, id, file, cfg)
|
32
|
+
end
|
33
|
+
end
|
34
|
+
rescue Psych::SyntaxError => ex
|
35
|
+
app.abort ex.message
|
36
|
+
end
|
37
|
+
|
38
|
+
def get id
|
39
|
+
@data[id]
|
40
|
+
end
|
41
|
+
|
42
|
+
def each &block
|
43
|
+
@data.each(&block)
|
44
|
+
end
|
45
|
+
end
|
46
|
+
end
|
47
|
+
end
|