tapsoob 0.3.24-java → 0.4.1-java

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 5f2ae263c3b94ba8a2d39cdcae3c4ed22462e8b0350a48b865300c5800988ac4
4
- data.tar.gz: ebb482c1449af92696f4ef1cfcaae8336c22d018a90cc3f1e9c0726820c79e3b
3
+ metadata.gz: 4c06335f502637ae12395ff3c5d183297a64c650cfebe4df4f7383e45e87ebaa
4
+ data.tar.gz: f12f5540fdbc10d947ca5d224b816f055a5f4f543c26be47b15c9fed8476ec96
5
5
  SHA512:
6
- metadata.gz: da3ff1a49face2aec964a6848c994decece8606c8b5681dd065bb4dbb9f7f456734511fb2791c383dc35500fb2aa59b7c262ccde288f28eb9783392723fdaf04
7
- data.tar.gz: fe0a13d721abbdf54145a6ce875334b96668edd505159395ad65043105e7a716f8feabe43c35bebc0ddcc5baf30a273b4d3431718346f56044f5dfbe83118fa4
6
+ metadata.gz: c04ab5686c8bd94b517b023b71bcb5910072b4e271ac822d2d4866bb03a376a5d358dfce9b32cb1eb37f047de9eb676d404488c9313240a55714cf038a13c713
7
+ data.tar.gz: e65ac3b651435e35c4abbfcb0318e30d32310f3e58274d0b36ffe0ea14eaf3aa52995cef440589c4d4cd4016201c665baee161c3e4020c53ae37f791f9017383
@@ -28,6 +28,8 @@ module Tapsoob
28
28
  option :tables, desc: "Shortcut to filter on a list of tables", type: :array, aliases: "-t"
29
29
  option :"exclude-tables", desc: "Shortcut to exclude a list of tables", type: :array, aliases: "-e"
30
30
  option :progress, desc: "Show progress", default: true, type: :boolean, aliases: "-p"
31
+ option :purge, desc: "Purge data in tables prior to performing the import", default: false, type: :boolean, aliases: "-p"
32
+ option :"discard-identity", desc: "Remove identity when pushing data (may result in creating duplicates)", default: false, type: :boolean
31
33
  option :debug, desc: "Enable debug messages", default: false, type: :boolean, aliases: "-d"
32
34
  def push(database_url, dump_path = nil)
33
35
  # instantiate stuff
@@ -46,9 +48,8 @@ module Tapsoob
46
48
  data.each do |table|
47
49
  stream = Tapsoob::DataStream.factory(db(database_url, opts), {
48
50
  table_name: table[:table_name],
49
- chunksize: opts[:default_chunksize],
50
- debug: opts[:debug]
51
- })
51
+ chunksize: opts[:default_chunksize]
52
+ }, { :"discard-identity" => opts[:"discard-identity"] || false, :purge => opts[:purge] || false, :debug => opts[:debug] })
52
53
 
53
54
  begin
54
55
  stream.import_rows(table)
@@ -66,6 +67,10 @@ module Tapsoob
66
67
  debug: options[:debug]
67
68
  }
68
69
 
70
+ # Push only options
71
+ opts[:purge] = options[:purge] if options.key?(:purge)
72
+ opts[:"discard-identity"] = options[:"discard-identity"] if options.key?(:"discard-identity")
73
+
69
74
  # Default chunksize
70
75
  if options[:chunksize]
71
76
  opts[:default_chunksize] = (options[:chunksize] < 10 ? 10 : options[:chunksize])
@@ -91,7 +91,8 @@ module Tapsoob
91
91
  rows = {
92
92
  :table_name => ds["table_name"],
93
93
  :header => ds["header"],
94
- :data => ds["data"][state[:offset], (state[:offset] + state[:chunksize])] || [ ]
94
+ :data => (ds["data"][state[:offset], (state[:offset] + state[:chunksize])] || [ ]),
95
+ :types => ds["types"]
95
96
  }
96
97
  update_chunksize_stats
97
98
  rows
@@ -219,20 +220,52 @@ module Tapsoob
219
220
  end
220
221
 
221
222
  def import_rows(rows)
223
+ columns = rows[:header]
224
+ data = rows[:data]
225
+
226
+ # Only import existing columns
227
+ if table.columns.size != columns.size
228
+ existing_columns = table.columns.map(&:to_s)
229
+ additional_columns = columns - existing_columns
230
+ additional_columns_idxs = additional_columns.map { |c| columns.index(c) }
231
+ additional_columns_idxs.reverse.each do |idx|
232
+ columns.delete_at(idx)
233
+ rows[:types].delete_at(idx)
234
+ end
235
+ data.each_index { |didx| additional_columns_idxs.reverse.each { |idx| data[didx].delete_at(idx) } }
236
+ end
237
+
222
238
  # Decode blobs
223
239
  if rows.has_key?(:types) && rows[:types].include?("blob")
224
240
  blob_indices = rows[:types].each_index.select { |idx| rows[:types][idx] == "blob" }
225
- rows[:data].each_index do |idx|
241
+ data.each_index do |idx|
226
242
  blob_indices.each do |bi|
227
- rows[:data][idx][bi] = Sequel::SQL::Blob.new(Tapsoob::Utils.base64decode(rows[:data][idx][bi])) unless rows[:data][idx][bi].nil?
243
+ data[idx][bi] = Sequel::SQL::Blob.new(Tapsoob::Utils.base64decode(data[idx][bi])) unless data[idx][bi].nil?
244
+ end
245
+ end
246
+ end
247
+
248
+ # Parse date/datetime/time columns
249
+ if rows.has_key?(:types)
250
+ %w(date datetime time).each do |type|
251
+ if rows[:types].include?(type)
252
+ type_indices = rows[:types].each_index.select { |idx| rows[:types][idx] == type }
253
+ data.each_index do |idx|
254
+ type_indices.each do |ti|
255
+ data[idx][ti] = Sequel.send("string_to_#{type}".to_sym, data[idx][ti]) unless data[idx][ti].nil?
256
+ end
257
+ end
228
258
  end
229
259
  end
230
260
  end
231
261
 
232
262
  # Remove id column
233
- columns = ((@options[:"discard-identity"] && rows[:header].include?("id")) ? rows[:header] - ["id"] : rows[:header])
234
-
235
- table.import(columns, rows[:data], :commit_every => 100)
263
+ if @options[:"discard-identity"] && rows[:header].include?("id")
264
+ columns = rows[:header] - ["id"]
265
+ data = data.map { |d| d[1..-1] }
266
+ end
267
+
268
+ table.import(columns, data, :commit_every => 100)
236
269
  state[:offset] += rows[:data].size
237
270
  rescue Exception => ex
238
271
  case ex.message
@@ -198,7 +198,7 @@ module Tapsoob
198
198
  stream = Tapsoob::DataStream.factory(db, {
199
199
  :chunksize => default_chunksize,
200
200
  :table_name => table_name
201
- })
201
+ }, { :debug => opts[:debug] })
202
202
  pull_data_from_table(stream, progress)
203
203
  end
204
204
  end
@@ -393,10 +393,10 @@ module Tapsoob
393
393
 
394
394
  tables.each do |table_name, count|
395
395
  next unless File.exists?(File.join(dump_path, "data", "#{table_name}.json"))
396
- db[table_name.to_sym].truncate if @opts[:purge]
396
+ db[table.to_sym].truncate if @opts[:purge]
397
397
  stream = Tapsoob::DataStream.factory(db, {
398
398
  :table_name => table_name,
399
- :chunksize => default_chunksize }, { :"discard-identity" => @opts[:"discard-identity"] || false })
399
+ :chunksize => default_chunksize }, { :"discard-identity" => opts[:"discard-identity"] || false, :purge => opts[:purge] || false, :debug => opts[:debug] })
400
400
  progress = ProgressBar.new(table_name.to_s, count)
401
401
  push_data_from_file(stream, progress)
402
402
  end
@@ -429,7 +429,8 @@ module Tapsoob
429
429
  }
430
430
  end
431
431
 
432
- size = stream.fetch_data_in_database({ :encoded_data => encoded_data, :checksum => data[:checksum] })
432
+ row_size = stream.fetch_data_in_database({ :encoded_data => encoded_data, :checksum => data[:checksum] })
433
+ log.debug "row size: #{row_size}"
433
434
  self.stream_state = stream.to_hash
434
435
 
435
436
  c.idle_secs = (d1 + d2)
@@ -481,7 +482,7 @@ module Tapsoob
481
482
  tbls.each do |table|
482
483
  if File.exists?(File.join(dump_path, "data", "#{table}.json"))
483
484
  data = JSON.parse(File.read(File.join(dump_path, "data", "#{table}.json")))
484
- tables_with_counts[table] = data.size
485
+ tables_with_counts[table] = data["data"].size
485
486
  else
486
487
  tables_with_counts[table] = 0
487
488
  end
@@ -1,4 +1,4 @@
1
1
  # -*- encoding : utf-8 -*-
2
2
  module Tapsoob
3
- VERSION = "0.3.24".freeze
3
+ VERSION = "0.4.1".freeze
4
4
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: tapsoob
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.3.24
4
+ version: 0.4.1
5
5
  platform: java
6
6
  authors:
7
7
  - Félix Bellanger
@@ -9,7 +9,7 @@ authors:
9
9
  autorequire:
10
10
  bindir: bin
11
11
  cert_chain: []
12
- date: 2021-06-11 00:00:00.000000000 Z
12
+ date: 2021-06-21 00:00:00.000000000 Z
13
13
  dependencies:
14
14
  - !ruby/object:Gem::Dependency
15
15
  requirement: !ruby/object:Gem::Requirement