taps 0.3.11 → 0.3.12

Sign up to get free protection for your applications and to get access to all the features.
@@ -1,20 +1,20 @@
1
1
  Sequel::Model.db = Sequel.connect(Taps::Config.taps_database_url)
2
2
 
3
3
  class DbSession < Sequel::Model
4
- plugin :schema
5
- set_schema do
6
- primary_key :id
7
- text :key
8
- text :database_url
9
- timestamp :started_at
10
- timestamp :last_access
11
- end
4
+ plugin :schema
5
+ set_schema do
6
+ primary_key :id
7
+ text :key
8
+ text :database_url
9
+ timestamp :started_at
10
+ timestamp :last_access
11
+ end
12
12
 
13
- def conn
14
- Sequel.connect(database_url) do |db|
15
- yield db if block_given?
16
- end
17
- end
13
+ def conn
14
+ Sequel.connect(database_url) do |db|
15
+ yield db if block_given?
16
+ end
17
+ end
18
18
  end
19
19
 
20
20
  DbSession.create_table! unless DbSession.table_exists?
data/lib/taps/log.rb CHANGED
@@ -1,15 +1,15 @@
1
1
  module Taps
2
- def self.log=(log)
3
- @@log = log
4
- end
2
+ def self.log=(log)
3
+ @@log = log
4
+ end
5
5
 
6
- def self.log
7
- @@log ||= begin
8
- require 'logger'
9
- log = Logger.new($stderr)
10
- log.level = Logger::ERROR
11
- log.datetime_format = "%Y-%m-%d %H:%M:%S"
12
- log
13
- end
14
- end
6
+ def self.log
7
+ @@log ||= begin
8
+ require 'logger'
9
+ log = Logger.new($stderr)
10
+ log.level = Logger::ERROR
11
+ log.datetime_format = "%Y-%m-%d %H:%M:%S"
12
+ log
13
+ end
14
+ end
15
15
  end
data/lib/taps/monkey.rb CHANGED
@@ -1,21 +1,21 @@
1
1
  class Hash
2
- def symbolize_keys
3
- inject({}) do |options, (key, value)|
4
- options[(key.to_sym rescue key) || key] = value
5
- options
6
- end
7
- end
2
+ def symbolize_keys
3
+ inject({}) do |options, (key, value)|
4
+ options[(key.to_sym rescue key) || key] = value
5
+ options
6
+ end
7
+ end
8
8
 
9
- def symbolize_keys!
10
- self.replace(symbolize_keys)
11
- end
9
+ def symbolize_keys!
10
+ self.replace(symbolize_keys)
11
+ end
12
12
 
13
- def symbolize_recursively!
14
- self.replace(symbolize_keys)
15
- self.each do |k, v|
16
- if v.kind_of?(Hash)
17
- v.symbolize_keys!
18
- end
19
- end
20
- end
13
+ def symbolize_recursively!
14
+ self.replace(symbolize_keys)
15
+ self.each do |k, v|
16
+ if v.kind_of?(Hash)
17
+ v.symbolize_keys!
18
+ end
19
+ end
20
+ end
21
21
  end
@@ -5,69 +5,69 @@ require 'stringio'
5
5
 
6
6
  module Taps
7
7
  class Multipart
8
- class Container
9
- attr_accessor :attachments
8
+ class Container
9
+ attr_accessor :attachments
10
10
 
11
- def initialize
12
- @attachments = []
13
- end
11
+ def initialize
12
+ @attachments = []
13
+ end
14
14
 
15
- def attach(opts)
16
- mp = Taps::Multipart.new(opts)
17
- attachments << mp
18
- end
15
+ def attach(opts)
16
+ mp = Taps::Multipart.new(opts)
17
+ attachments << mp
18
+ end
19
19
 
20
- def generate
21
- hash = {}
22
- attachments.each do |mp|
23
- hash[mp.name] = mp
24
- end
25
- m = RestClient::Payload::Multipart.new(hash)
26
- [m.to_s, m.headers['Content-Type']]
27
- end
28
- end
20
+ def generate
21
+ hash = {}
22
+ attachments.each do |mp|
23
+ hash[mp.name] = mp
24
+ end
25
+ m = RestClient::Payload::Multipart.new(hash)
26
+ [m.to_s, m.headers['Content-Type']]
27
+ end
28
+ end
29
29
 
30
- attr_reader :opts
30
+ attr_reader :opts
31
31
 
32
- def initialize(opts={})
33
- @opts = opts
34
- end
32
+ def initialize(opts={})
33
+ @opts = opts
34
+ end
35
35
 
36
- def name
37
- opts[:name]
38
- end
36
+ def name
37
+ opts[:name]
38
+ end
39
39
 
40
- def to_s
41
- opts[:payload]
42
- end
40
+ def to_s
41
+ opts[:payload]
42
+ end
43
43
 
44
- def content_type
45
- opts[:content_type] || 'text/plain'
46
- end
44
+ def content_type
45
+ opts[:content_type] || 'text/plain'
46
+ end
47
47
 
48
- def original_filename
49
- opts[:original_filename]
50
- end
48
+ def original_filename
49
+ opts[:original_filename]
50
+ end
51
51
 
52
- def self.create
53
- c = Taps::Multipart::Container.new
54
- yield c
55
- c.generate
56
- end
52
+ def self.create
53
+ c = Taps::Multipart::Container.new
54
+ yield c
55
+ c.generate
56
+ end
57
57
 
58
- # response is a rest-client response
59
- def self.parse(response)
60
- content = response.to_s
61
- env = {
62
- 'CONTENT_TYPE' => response.headers[:content_type],
63
- 'CONTENT_LENGTH' => content.size,
64
- 'rack.input' => StringIO.new(content)
65
- }
58
+ # response is a rest-client response
59
+ def self.parse(response)
60
+ content = response.to_s
61
+ env = {
62
+ 'CONTENT_TYPE' => response.headers[:content_type],
63
+ 'CONTENT_LENGTH' => content.size,
64
+ 'rack.input' => StringIO.new(content)
65
+ }
66
66
 
67
- params = Rack::Utils::Multipart.parse_multipart(env)
68
- params.symbolize_keys!
69
- params
70
- end
67
+ params = Rack::Utils::Multipart.parse_multipart(env)
68
+ params.symbolize_keys!
69
+ params
70
+ end
71
71
 
72
72
  end
73
73
  end
@@ -13,537 +13,537 @@ $VERBOSE = nil
13
13
  module Taps
14
14
 
15
15
  class Operation
16
- attr_reader :database_url, :remote_url, :opts
17
- attr_reader :session_uri
18
-
19
- def initialize(database_url, remote_url, opts={})
20
- @database_url = database_url
21
- @remote_url = remote_url
22
- @opts = opts
23
- @exiting = false
24
- @session_uri = opts[:session_uri]
25
- end
26
-
27
- def file_prefix
28
- "op"
29
- end
30
-
31
- def indexes_first?
32
- !!opts[:indexes_first]
33
- end
34
-
35
- def table_filter
36
- opts[:table_filter]
37
- end
38
-
39
- def apply_table_filter(tables)
40
- return tables unless table_filter
41
- re = Regexp.new(table_filter)
42
- if tables.kind_of?(Hash)
43
- ntables = {}
44
- tables.each do |t, d|
45
- unless re.match(t.to_s).nil?
46
- ntables[t] = d
47
- end
48
- end
49
- ntables
50
- else
51
- tables.reject { |t| re.match(t.to_s).nil? }
52
- end
53
- end
54
-
55
- def log
56
- Taps.log
57
- end
58
-
59
- def store_session
60
- file = "#{file_prefix}_#{Time.now.strftime("%Y%m%d%H%M")}.dat"
61
- puts "\nSaving session to #{file}.."
62
- File.open(file, 'w') do |f|
63
- f.write(to_hash.to_json)
64
- end
65
- end
66
-
67
- def to_hash
68
- {
69
- :klass => self.class.to_s,
70
- :database_url => database_url,
71
- :remote_url => remote_url,
72
- :session_uri => session_uri,
73
- :stream_state => stream_state,
74
- :completed_tables => completed_tables,
75
- :table_filter => table_filter,
76
- }
77
- end
78
-
79
- def exiting?
80
- !!@exiting
81
- end
82
-
83
- def setup_signal_trap
84
- trap("INT") {
85
- puts "\nCompleting current action..."
86
- @exiting = true
87
- }
88
-
89
- trap("TERM") {
90
- puts "\nCompleting current action..."
91
- @exiting = true
92
- }
93
- end
94
-
95
- def resuming?
96
- opts[:resume] == true
97
- end
98
-
99
- def default_chunksize
100
- opts[:default_chunksize]
101
- end
102
-
103
- def completed_tables
104
- opts[:completed_tables] ||= []
105
- end
106
-
107
- def stream_state
108
- opts[:stream_state] ||= {}
109
- end
110
-
111
- def stream_state=(val)
112
- opts[:stream_state] = val
113
- end
114
-
115
- def compression_disabled?
116
- !!opts[:disable_compression]
117
- end
118
-
119
- def db
120
- @db ||= Sequel.connect(database_url)
121
- end
122
-
123
- def server
124
- @server ||= RestClient::Resource.new(remote_url)
125
- end
126
-
127
- def session_resource
128
- @session_resource ||= begin
129
- @session_uri ||= server['sessions'].post('', http_headers).to_s
130
- server[@session_uri]
131
- end
132
- end
133
-
134
- def set_session(uri)
135
- session_uri = uri
136
- @session_resource = server[session_uri]
137
- end
138
-
139
- def close_session
140
- @session_resource.delete(http_headers) if @session_resource
141
- end
142
-
143
- def safe_url(url)
144
- url.sub(/\/\/(.+?)?:(.*?)@/, '//\1:[hidden]@')
145
- end
146
-
147
- def safe_remote_url
148
- safe_url(remote_url)
149
- end
150
-
151
- def safe_database_url
152
- safe_url(database_url)
153
- end
154
-
155
- def http_headers(extra = {})
156
- base = { :taps_version => Taps.version }
157
- if compression_disabled?
158
- base[:accept_encoding] = ""
159
- else
160
- base[:accept_encoding] = "gzip, deflate"
161
- end
162
- base.merge(extra)
163
- end
164
-
165
- def format_number(num)
166
- num.to_s.gsub(/(\d)(?=(\d\d\d)+(?!\d))/, "\\1,")
167
- end
168
-
169
- def verify_server
170
- begin
171
- server['/'].get(http_headers)
172
- rescue RestClient::RequestFailed => e
173
- if e.http_code == 417
174
- puts "#{safe_remote_url} is running a different minor version of taps."
175
- puts "#{e.response.to_s}"
176
- exit(1)
177
- else
178
- raise
179
- end
180
- rescue RestClient::Unauthorized
181
- puts "Bad credentials given for #{safe_remote_url}"
182
- exit(1)
183
- rescue Errno::ECONNREFUSED
184
- puts "Can't connect to #{safe_remote_url}. Please check that it's running"
185
- exit(1)
186
- end
187
- end
188
-
189
- def self.factory(type, database_url, remote_url, opts)
190
- type = :resume if opts[:resume]
191
- klass = case type
192
- when :pull then Taps::Pull
193
- when :push then Taps::Push
194
- when :resume then eval(opts[:klass])
195
- else raise "Unknown Operation Type -> #{type}"
196
- end
197
-
198
- klass.new(database_url, remote_url, opts)
199
- end
16
+ attr_reader :database_url, :remote_url, :opts
17
+ attr_reader :session_uri
18
+
19
+ def initialize(database_url, remote_url, opts={})
20
+ @database_url = database_url
21
+ @remote_url = remote_url
22
+ @opts = opts
23
+ @exiting = false
24
+ @session_uri = opts[:session_uri]
25
+ end
26
+
27
+ def file_prefix
28
+ "op"
29
+ end
30
+
31
+ def indexes_first?
32
+ !!opts[:indexes_first]
33
+ end
34
+
35
+ def table_filter
36
+ opts[:table_filter]
37
+ end
38
+
39
+ def apply_table_filter(tables)
40
+ return tables unless table_filter
41
+ re = Regexp.new(table_filter)
42
+ if tables.kind_of?(Hash)
43
+ ntables = {}
44
+ tables.each do |t, d|
45
+ unless re.match(t.to_s).nil?
46
+ ntables[t] = d
47
+ end
48
+ end
49
+ ntables
50
+ else
51
+ tables.reject { |t| re.match(t.to_s).nil? }
52
+ end
53
+ end
54
+
55
+ def log
56
+ Taps.log
57
+ end
58
+
59
+ def store_session
60
+ file = "#{file_prefix}_#{Time.now.strftime("%Y%m%d%H%M")}.dat"
61
+ puts "\nSaving session to #{file}.."
62
+ File.open(file, 'w') do |f|
63
+ f.write(to_hash.to_json)
64
+ end
65
+ end
66
+
67
+ def to_hash
68
+ {
69
+ :klass => self.class.to_s,
70
+ :database_url => database_url,
71
+ :remote_url => remote_url,
72
+ :session_uri => session_uri,
73
+ :stream_state => stream_state,
74
+ :completed_tables => completed_tables,
75
+ :table_filter => table_filter,
76
+ }
77
+ end
78
+
79
+ def exiting?
80
+ !!@exiting
81
+ end
82
+
83
+ def setup_signal_trap
84
+ trap("INT") {
85
+ puts "\nCompleting current action..."
86
+ @exiting = true
87
+ }
88
+
89
+ trap("TERM") {
90
+ puts "\nCompleting current action..."
91
+ @exiting = true
92
+ }
93
+ end
94
+
95
+ def resuming?
96
+ opts[:resume] == true
97
+ end
98
+
99
+ def default_chunksize
100
+ opts[:default_chunksize]
101
+ end
102
+
103
+ def completed_tables
104
+ opts[:completed_tables] ||= []
105
+ end
106
+
107
+ def stream_state
108
+ opts[:stream_state] ||= {}
109
+ end
110
+
111
+ def stream_state=(val)
112
+ opts[:stream_state] = val
113
+ end
114
+
115
+ def compression_disabled?
116
+ !!opts[:disable_compression]
117
+ end
118
+
119
+ def db
120
+ @db ||= Sequel.connect(database_url)
121
+ end
122
+
123
+ def server
124
+ @server ||= RestClient::Resource.new(remote_url)
125
+ end
126
+
127
+ def session_resource
128
+ @session_resource ||= begin
129
+ @session_uri ||= server['sessions'].post('', http_headers).to_s
130
+ server[@session_uri]
131
+ end
132
+ end
133
+
134
+ def set_session(uri)
135
+ session_uri = uri
136
+ @session_resource = server[session_uri]
137
+ end
138
+
139
+ def close_session
140
+ @session_resource.delete(http_headers) if @session_resource
141
+ end
142
+
143
+ def safe_url(url)
144
+ url.sub(/\/\/(.+?)?:(.*?)@/, '//\1:[hidden]@')
145
+ end
146
+
147
+ def safe_remote_url
148
+ safe_url(remote_url)
149
+ end
150
+
151
+ def safe_database_url
152
+ safe_url(database_url)
153
+ end
154
+
155
+ def http_headers(extra = {})
156
+ base = { :taps_version => Taps.version }
157
+ if compression_disabled?
158
+ base[:accept_encoding] = ""
159
+ else
160
+ base[:accept_encoding] = "gzip, deflate"
161
+ end
162
+ base.merge(extra)
163
+ end
164
+
165
+ def format_number(num)
166
+ num.to_s.gsub(/(\d)(?=(\d\d\d)+(?!\d))/, "\\1,")
167
+ end
168
+
169
+ def verify_server
170
+ begin
171
+ server['/'].get(http_headers)
172
+ rescue RestClient::RequestFailed => e
173
+ if e.http_code == 417
174
+ puts "#{safe_remote_url} is running a different minor version of taps."
175
+ puts "#{e.response.to_s}"
176
+ exit(1)
177
+ else
178
+ raise
179
+ end
180
+ rescue RestClient::Unauthorized
181
+ puts "Bad credentials given for #{safe_remote_url}"
182
+ exit(1)
183
+ rescue Errno::ECONNREFUSED
184
+ puts "Can't connect to #{safe_remote_url}. Please check that it's running"
185
+ exit(1)
186
+ end
187
+ end
188
+
189
+ def self.factory(type, database_url, remote_url, opts)
190
+ type = :resume if opts[:resume]
191
+ klass = case type
192
+ when :pull then Taps::Pull
193
+ when :push then Taps::Push
194
+ when :resume then eval(opts[:klass])
195
+ else raise "Unknown Operation Type -> #{type}"
196
+ end
197
+
198
+ klass.new(database_url, remote_url, opts)
199
+ end
200
200
  end
201
201
 
202
202
  class Pull < Operation
203
- def file_prefix
204
- "pull"
205
- end
206
-
207
- def to_hash
208
- super.merge(:remote_tables_info => remote_tables_info)
209
- end
210
-
211
- def run
212
- verify_server
213
-
214
- begin
215
- unless resuming?
216
- pull_schema
217
- pull_indexes if indexes_first?
218
- end
219
- setup_signal_trap
220
- pull_partial_data if resuming?
221
- pull_data
222
- pull_indexes unless indexes_first?
223
- pull_reset_sequences
224
- close_session
225
- rescue RestClient::Exception => e
226
- store_session
227
- if e.respond_to?(:response)
228
- puts "!!! Caught Server Exception"
229
- puts "HTTP CODE: #{e.http_code}"
230
- puts "#{e.response.to_s}"
231
- exit(1)
232
- else
233
- raise
234
- end
235
- end
236
- end
237
-
238
- def pull_schema
239
- puts "Receiving schema"
240
-
241
- progress = ProgressBar.new('Schema', tables.size)
242
- tables.each do |table_name, count|
243
- schema_data = session_resource['pull/schema'].post({:table_name => table_name}, http_headers).to_s
244
- log.debug "Table: #{table_name}\n#{schema_data}\n"
245
- output = Taps::Utils.load_schema(database_url, schema_data)
246
- puts output if output
247
- progress.inc(1)
248
- end
249
- progress.finish
250
- end
251
-
252
- def pull_data
253
- puts "Receiving data"
254
-
255
- puts "#{tables.size} tables, #{format_number(record_count)} records"
256
-
257
- tables.each do |table_name, count|
258
- progress = ProgressBar.new(table_name.to_s, count)
259
- stream = Taps::DataStream.factory(db, {
260
- :chunksize => default_chunksize,
261
- :table_name => table_name
262
- })
263
- pull_data_from_table(stream, progress)
264
- end
265
- end
266
-
267
- def pull_partial_data
268
- return if stream_state == {}
269
-
270
- table_name = stream_state[:table_name]
271
- record_count = tables[table_name.to_s]
272
- puts "Resuming #{table_name}, #{format_number(record_count)} records"
273
-
274
- progress = ProgressBar.new(table_name.to_s, record_count)
275
- stream = Taps::DataStream.factory(db, stream_state)
276
- pull_data_from_table(stream, progress)
277
- end
278
-
279
- def pull_data_from_table(stream, progress)
280
- loop do
281
- begin
282
- if exiting?
283
- store_session
284
- exit 0
285
- end
286
-
287
- size = stream.fetch_remote(session_resource['pull/table'], http_headers)
288
- break if stream.complete?
289
- progress.inc(size) unless exiting?
290
- stream.error = false
291
- self.stream_state = stream.to_hash
292
- rescue DataStream::CorruptedData => e
293
- puts "Corrupted Data Received #{e.message}, retrying..."
294
- stream.error = true
295
- next
296
- end
297
- end
298
-
299
- progress.finish
300
- completed_tables << stream.table_name.to_s
301
- self.stream_state = {}
302
- end
303
-
304
- def tables
305
- h = {}
306
- remote_tables_info.each do |table_name, count|
307
- next if completed_tables.include?(table_name.to_s)
308
- h[table_name.to_s] = count
309
- end
310
- h
311
- end
312
-
313
- def record_count
314
- @record_count ||= remote_tables_info.values.inject(0) { |a,c| a += c }
315
- end
316
-
317
- def remote_tables_info
318
- opts[:remote_tables_info] ||= fetch_remote_tables_info
319
- end
320
-
321
- def fetch_remote_tables_info
322
- retries = 0
323
- max_retries = 10
324
- begin
325
- tables = JSON.load(session_resource['pull/table_names'].get(http_headers).to_s)
326
- rescue RestClient::Exception
327
- retries += 1
328
- retry if retries <= max_retries
329
- puts "Unable to fetch tables information from #{remote_url}. Please check the server log."
330
- exit(1)
331
- end
332
-
333
- data = {}
334
- apply_table_filter(tables).each do |table_name|
335
- retries = 0
336
- begin
337
- count = session_resource['pull/table_count'].post({:table => table_name}, http_headers).to_s.to_i
338
- data[table_name] = count
339
- rescue RestClient::Exception
340
- retries += 1
341
- retry if retries <= max_retries
342
- puts "Unable to fetch tables information from #{remote_url}. Please check the server log."
343
- exit(1)
344
- end
345
- end
346
- data
347
- end
348
-
349
- def pull_indexes
350
- puts "Receiving indexes"
351
-
352
- idxs = JSON.parse(session_resource['pull/indexes'].get(http_headers).to_s)
353
-
354
- apply_table_filter(idxs).each do |table, indexes|
355
- next unless indexes.size > 0
356
- progress = ProgressBar.new(table, indexes.size)
357
- indexes.each do |idx|
358
- output = Taps::Utils.load_indexes(database_url, idx)
359
- puts output if output
360
- progress.inc(1)
361
- end
362
- progress.finish
363
- end
364
- end
365
-
366
- def pull_reset_sequences
367
- puts "Resetting sequences"
368
-
369
- output = Taps::Utils.schema_bin(:reset_db_sequences, database_url)
370
- puts output if output
371
- end
203
+ def file_prefix
204
+ "pull"
205
+ end
206
+
207
+ def to_hash
208
+ super.merge(:remote_tables_info => remote_tables_info)
209
+ end
210
+
211
+ def run
212
+ verify_server
213
+
214
+ begin
215
+ unless resuming?
216
+ pull_schema
217
+ pull_indexes if indexes_first?
218
+ end
219
+ setup_signal_trap
220
+ pull_partial_data if resuming?
221
+ pull_data
222
+ pull_indexes unless indexes_first?
223
+ pull_reset_sequences
224
+ close_session
225
+ rescue RestClient::Exception => e
226
+ store_session
227
+ if e.respond_to?(:response)
228
+ puts "!!! Caught Server Exception"
229
+ puts "HTTP CODE: #{e.http_code}"
230
+ puts "#{e.response.to_s}"
231
+ exit(1)
232
+ else
233
+ raise
234
+ end
235
+ end
236
+ end
237
+
238
+ def pull_schema
239
+ puts "Receiving schema"
240
+
241
+ progress = ProgressBar.new('Schema', tables.size)
242
+ tables.each do |table_name, count|
243
+ schema_data = session_resource['pull/schema'].post({:table_name => table_name}, http_headers).to_s
244
+ log.debug "Table: #{table_name}\n#{schema_data}\n"
245
+ output = Taps::Utils.load_schema(database_url, schema_data)
246
+ puts output if output
247
+ progress.inc(1)
248
+ end
249
+ progress.finish
250
+ end
251
+
252
+ def pull_data
253
+ puts "Receiving data"
254
+
255
+ puts "#{tables.size} tables, #{format_number(record_count)} records"
256
+
257
+ tables.each do |table_name, count|
258
+ progress = ProgressBar.new(table_name.to_s, count)
259
+ stream = Taps::DataStream.factory(db, {
260
+ :chunksize => default_chunksize,
261
+ :table_name => table_name
262
+ })
263
+ pull_data_from_table(stream, progress)
264
+ end
265
+ end
266
+
267
+ def pull_partial_data
268
+ return if stream_state == {}
269
+
270
+ table_name = stream_state[:table_name]
271
+ record_count = tables[table_name.to_s]
272
+ puts "Resuming #{table_name}, #{format_number(record_count)} records"
273
+
274
+ progress = ProgressBar.new(table_name.to_s, record_count)
275
+ stream = Taps::DataStream.factory(db, stream_state)
276
+ pull_data_from_table(stream, progress)
277
+ end
278
+
279
+ def pull_data_from_table(stream, progress)
280
+ loop do
281
+ begin
282
+ if exiting?
283
+ store_session
284
+ exit 0
285
+ end
286
+
287
+ size = stream.fetch_remote(session_resource['pull/table'], http_headers)
288
+ break if stream.complete?
289
+ progress.inc(size) unless exiting?
290
+ stream.error = false
291
+ self.stream_state = stream.to_hash
292
+ rescue DataStream::CorruptedData => e
293
+ puts "Corrupted Data Received #{e.message}, retrying..."
294
+ stream.error = true
295
+ next
296
+ end
297
+ end
298
+
299
+ progress.finish
300
+ completed_tables << stream.table_name.to_s
301
+ self.stream_state = {}
302
+ end
303
+
304
+ def tables
305
+ h = {}
306
+ remote_tables_info.each do |table_name, count|
307
+ next if completed_tables.include?(table_name.to_s)
308
+ h[table_name.to_s] = count
309
+ end
310
+ h
311
+ end
312
+
313
+ def record_count
314
+ @record_count ||= remote_tables_info.values.inject(0) { |a,c| a += c }
315
+ end
316
+
317
+ def remote_tables_info
318
+ opts[:remote_tables_info] ||= fetch_remote_tables_info
319
+ end
320
+
321
+ def fetch_remote_tables_info
322
+ retries = 0
323
+ max_retries = 10
324
+ begin
325
+ tables = JSON.load(session_resource['pull/table_names'].get(http_headers).to_s)
326
+ rescue RestClient::Exception
327
+ retries += 1
328
+ retry if retries <= max_retries
329
+ puts "Unable to fetch tables information from #{remote_url}. Please check the server log."
330
+ exit(1)
331
+ end
332
+
333
+ data = {}
334
+ apply_table_filter(tables).each do |table_name|
335
+ retries = 0
336
+ begin
337
+ count = session_resource['pull/table_count'].post({:table => table_name}, http_headers).to_s.to_i
338
+ data[table_name] = count
339
+ rescue RestClient::Exception
340
+ retries += 1
341
+ retry if retries <= max_retries
342
+ puts "Unable to fetch tables information from #{remote_url}. Please check the server log."
343
+ exit(1)
344
+ end
345
+ end
346
+ data
347
+ end
348
+
349
+ def pull_indexes
350
+ puts "Receiving indexes"
351
+
352
+ idxs = JSON.parse(session_resource['pull/indexes'].get(http_headers).to_s)
353
+
354
+ apply_table_filter(idxs).each do |table, indexes|
355
+ next unless indexes.size > 0
356
+ progress = ProgressBar.new(table, indexes.size)
357
+ indexes.each do |idx|
358
+ output = Taps::Utils.load_indexes(database_url, idx)
359
+ puts output if output
360
+ progress.inc(1)
361
+ end
362
+ progress.finish
363
+ end
364
+ end
365
+
366
+ def pull_reset_sequences
367
+ puts "Resetting sequences"
368
+
369
+ output = Taps::Utils.schema_bin(:reset_db_sequences, database_url)
370
+ puts output if output
371
+ end
372
372
  end
373
373
 
374
374
  class Push < Operation
375
- def file_prefix
376
- "push"
377
- end
378
-
379
- def to_hash
380
- super.merge(:local_tables_info => local_tables_info)
381
- end
382
-
383
- def run
384
- verify_server
385
- begin
386
- unless resuming?
387
- push_schema
388
- push_indexes if indexes_first?
389
- end
390
- setup_signal_trap
391
- push_partial_data if resuming?
392
- push_data
393
- push_indexes unless indexes_first?
394
- push_reset_sequences
395
- close_session
396
- rescue RestClient::Exception => e
397
- store_session
398
- if e.respond_to?(:response)
399
- puts "!!! Caught Server Exception"
400
- puts "HTTP CODE: #{e.http_code}"
401
- puts "#{e.response.to_s}"
402
- exit(1)
403
- else
404
- raise
405
- end
406
- end
407
- end
408
-
409
- def push_indexes
410
- idxs = JSON.parse(Taps::Utils.schema_bin(:indexes_individual, database_url))
411
-
412
- return unless idxs.size > 0
413
-
414
- puts "Sending indexes"
415
-
416
- apply_table_filter(idxs).each do |table, indexes|
417
- next unless indexes.size > 0
418
- progress = ProgressBar.new(table, indexes.size)
419
- indexes.each do |idx|
420
- session_resource['push/indexes'].post(idx, http_headers)
421
- progress.inc(1)
422
- end
423
- progress.finish
424
- end
425
- end
426
-
427
- def push_schema
428
- puts "Sending schema"
429
-
430
- progress = ProgressBar.new('Schema', tables.size)
431
- tables.each do |table, count|
432
- schema_data = Taps::Utils.schema_bin(:dump_table, database_url, table)
433
- log.debug "Table: #{table}\n#{schema_data}\n"
434
- session_resource['push/schema'].post(schema_data, http_headers)
435
- progress.inc(1)
436
- end
437
- progress.finish
438
- end
439
-
440
- def push_reset_sequences
441
- puts "Resetting sequences"
442
-
443
- session_resource['push/reset_sequences'].post('', http_headers)
444
- end
445
-
446
- def push_partial_data
447
- return if stream_state == {}
448
-
449
- table_name = stream_state[:table_name]
450
- record_count = tables[table_name.to_s]
451
- puts "Resuming #{table_name}, #{format_number(record_count)} records"
452
- progress = ProgressBar.new(table_name.to_s, record_count)
453
- stream = Taps::DataStream.factory(db, stream_state)
454
- push_data_from_table(stream, progress)
455
- end
456
-
457
- def push_data
458
- puts "Sending data"
459
-
460
- puts "#{tables.size} tables, #{format_number(record_count)} records"
461
-
462
- tables.each do |table_name, count|
463
- stream = Taps::DataStream.factory(db,
464
- :table_name => table_name,
465
- :chunksize => default_chunksize)
466
- progress = ProgressBar.new(table_name.to_s, count)
467
- push_data_from_table(stream, progress)
468
- end
469
- end
470
-
471
- def push_data_from_table(stream, progress)
472
- loop do
473
- if exiting?
474
- store_session
475
- exit 0
476
- end
477
-
478
- row_size = 0
479
- chunksize = stream.state[:chunksize]
480
- chunksize = Taps::Utils.calculate_chunksize(chunksize) do |c|
481
- stream.state[:chunksize] = c
482
- encoded_data, row_size, elapsed_time = stream.fetch
483
- break if stream.complete?
484
-
485
- data = {
486
- :state => stream.to_hash,
487
- :checksum => Taps::Utils.checksum(encoded_data).to_s
488
- }
489
-
490
- begin
491
- content, content_type = Taps::Multipart.create do |r|
492
- r.attach :name => :encoded_data,
493
- :payload => encoded_data,
494
- :content_type => 'application/octet-stream'
495
- r.attach :name => :json,
496
- :payload => data.to_json,
497
- :content_type => 'application/json'
498
- end
499
- session_resource['push/table'].post(content, http_headers(:content_type => content_type))
500
- self.stream_state = stream.to_hash
501
- rescue RestClient::RequestFailed => e
502
- # retry the same data, it got corrupted somehow.
503
- if e.http_code == 412
504
- next
505
- end
506
- raise
507
- end
508
- elapsed_time
509
- end
510
- stream.state[:chunksize] = chunksize
511
-
512
- progress.inc(row_size)
513
-
514
- stream.increment(row_size)
515
- break if stream.complete?
516
- end
517
-
518
- progress.finish
519
- completed_tables << stream.table_name.to_s
520
- self.stream_state = {}
521
- end
522
-
523
- def local_tables_info
524
- opts[:local_tables_info] ||= fetch_local_tables_info
525
- end
526
-
527
- def tables
528
- h = {}
529
- local_tables_info.each do |table_name, count|
530
- next if completed_tables.include?(table_name.to_s)
531
- h[table_name.to_s] = count
532
- end
533
- h
534
- end
535
-
536
- def record_count
537
- @record_count ||= local_tables_info.values.inject(0) { |a,c| a += c }
538
- end
539
-
540
- def fetch_local_tables_info
541
- tables_with_counts = {}
542
- db.tables.each do |table|
543
- tables_with_counts[table] = db[table.to_sym.identifier].count
544
- end
545
- apply_table_filter(tables_with_counts)
546
- end
375
+ def file_prefix
376
+ "push"
377
+ end
378
+
379
+ def to_hash
380
+ super.merge(:local_tables_info => local_tables_info)
381
+ end
382
+
383
+ def run
384
+ verify_server
385
+ begin
386
+ unless resuming?
387
+ push_schema
388
+ push_indexes if indexes_first?
389
+ end
390
+ setup_signal_trap
391
+ push_partial_data if resuming?
392
+ push_data
393
+ push_indexes unless indexes_first?
394
+ push_reset_sequences
395
+ close_session
396
+ rescue RestClient::Exception => e
397
+ store_session
398
+ if e.respond_to?(:response)
399
+ puts "!!! Caught Server Exception"
400
+ puts "HTTP CODE: #{e.http_code}"
401
+ puts "#{e.response.to_s}"
402
+ exit(1)
403
+ else
404
+ raise
405
+ end
406
+ end
407
+ end
408
+
409
+ def push_indexes
410
+ idxs = JSON.parse(Taps::Utils.schema_bin(:indexes_individual, database_url))
411
+
412
+ return unless idxs.size > 0
413
+
414
+ puts "Sending indexes"
415
+
416
+ apply_table_filter(idxs).each do |table, indexes|
417
+ next unless indexes.size > 0
418
+ progress = ProgressBar.new(table, indexes.size)
419
+ indexes.each do |idx|
420
+ session_resource['push/indexes'].post(idx, http_headers)
421
+ progress.inc(1)
422
+ end
423
+ progress.finish
424
+ end
425
+ end
426
+
427
+ def push_schema
428
+ puts "Sending schema"
429
+
430
+ progress = ProgressBar.new('Schema', tables.size)
431
+ tables.each do |table, count|
432
+ schema_data = Taps::Utils.schema_bin(:dump_table, database_url, table)
433
+ log.debug "Table: #{table}\n#{schema_data}\n"
434
+ session_resource['push/schema'].post(schema_data, http_headers)
435
+ progress.inc(1)
436
+ end
437
+ progress.finish
438
+ end
439
+
440
+ def push_reset_sequences
441
+ puts "Resetting sequences"
442
+
443
+ session_resource['push/reset_sequences'].post('', http_headers)
444
+ end
445
+
446
+ def push_partial_data
447
+ return if stream_state == {}
448
+
449
+ table_name = stream_state[:table_name]
450
+ record_count = tables[table_name.to_s]
451
+ puts "Resuming #{table_name}, #{format_number(record_count)} records"
452
+ progress = ProgressBar.new(table_name.to_s, record_count)
453
+ stream = Taps::DataStream.factory(db, stream_state)
454
+ push_data_from_table(stream, progress)
455
+ end
456
+
457
+ def push_data
458
+ puts "Sending data"
459
+
460
+ puts "#{tables.size} tables, #{format_number(record_count)} records"
461
+
462
+ tables.each do |table_name, count|
463
+ stream = Taps::DataStream.factory(db,
464
+ :table_name => table_name,
465
+ :chunksize => default_chunksize)
466
+ progress = ProgressBar.new(table_name.to_s, count)
467
+ push_data_from_table(stream, progress)
468
+ end
469
+ end
470
+
471
+ def push_data_from_table(stream, progress)
472
+ loop do
473
+ if exiting?
474
+ store_session
475
+ exit 0
476
+ end
477
+
478
+ row_size = 0
479
+ chunksize = stream.state[:chunksize]
480
+ chunksize = Taps::Utils.calculate_chunksize(chunksize) do |c|
481
+ stream.state[:chunksize] = c
482
+ encoded_data, row_size, elapsed_time = stream.fetch
483
+ break if stream.complete?
484
+
485
+ data = {
486
+ :state => stream.to_hash,
487
+ :checksum => Taps::Utils.checksum(encoded_data).to_s
488
+ }
489
+
490
+ begin
491
+ content, content_type = Taps::Multipart.create do |r|
492
+ r.attach :name => :encoded_data,
493
+ :payload => encoded_data,
494
+ :content_type => 'application/octet-stream'
495
+ r.attach :name => :json,
496
+ :payload => data.to_json,
497
+ :content_type => 'application/json'
498
+ end
499
+ session_resource['push/table'].post(content, http_headers(:content_type => content_type))
500
+ self.stream_state = stream.to_hash
501
+ rescue RestClient::RequestFailed => e
502
+ # retry the same data, it got corrupted somehow.
503
+ if e.http_code == 412
504
+ next
505
+ end
506
+ raise
507
+ end
508
+ elapsed_time
509
+ end
510
+ stream.state[:chunksize] = chunksize
511
+
512
+ progress.inc(row_size)
513
+
514
+ stream.increment(row_size)
515
+ break if stream.complete?
516
+ end
517
+
518
+ progress.finish
519
+ completed_tables << stream.table_name.to_s
520
+ self.stream_state = {}
521
+ end
522
+
523
+ def local_tables_info
524
+ opts[:local_tables_info] ||= fetch_local_tables_info
525
+ end
526
+
527
+ def tables
528
+ h = {}
529
+ local_tables_info.each do |table_name, count|
530
+ next if completed_tables.include?(table_name.to_s)
531
+ h[table_name.to_s] = count
532
+ end
533
+ h
534
+ end
535
+
536
+ def record_count
537
+ @record_count ||= local_tables_info.values.inject(0) { |a,c| a += c }
538
+ end
539
+
540
+ def fetch_local_tables_info
541
+ tables_with_counts = {}
542
+ db.tables.each do |table|
543
+ tables_with_counts[table] = db[table.to_sym.identifier].count
544
+ end
545
+ apply_table_filter(tables_with_counts)
546
+ end
547
547
 
548
548
  end
549
549