bigquery_migration 0.2.0.pre2 → 0.2.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: c8e9acd181e25a8e935168d4a99213c52bca11c9
4
- data.tar.gz: a5f4f71c2a04fbc453656bff29b9b961d56b16dd
3
+ metadata.gz: ee06c260e388082cdec0d3e4f3a72577952acfb8
4
+ data.tar.gz: 225387ac8df6a0fda1b723bc4f10ca6955fd9025
5
5
  SHA512:
6
- metadata.gz: d4cbc58db0ca399e5edae6fc21d9fab708db9edebf0e9fadb8f3fdf7f3098d595de662fb92355bbea0bebffa1ffab0e426f89fc7708226917ede3832378572f4
7
- data.tar.gz: 3fa2a4a0313eebca630b9648e8a8c5b747ea596ccfc57aa5e477ad5cbb935bb32c626128df8b1f71d9b7a09752ee5f18c02e7e86032eaea6c39894947321c97a
6
+ metadata.gz: 77326f487936158701a52e0a74f0d2241d2375ca5f96cb6803e7b85cec1b3206fa7728a699eae10a6b351adfb595242fa83ac742d257179128df157ec082b49f
7
+ data.tar.gz: a1b0e5a6418c43383b3c8b4d36deed62d9af9f823d9b1e6e333c9066f4e879d09214850b00a85238c0b8826392744da92dd9794edca97a67f80a8d794069445d
data/.gitignore CHANGED
@@ -9,3 +9,4 @@
9
9
  /tmp/
10
10
  your-project-000.json
11
11
  .tags
12
+ .ruby-version
data/CHANGELOG.md CHANGED
@@ -4,11 +4,22 @@ Enhancements:
4
4
 
5
5
  * Support migrate_partitioned_table
6
6
 
7
+ Fixes:
8
+
9
+ * Fix list_table_data for when a value is an empty hash
10
+
11
+ # 0.1.7 (2016/09/17)
12
+
13
+ Fixes:
14
+
15
+ * Prohibit to create a table with empty columns
16
+ * Create a table only if a table does not exist
17
+
7
18
  # 0.1.6 (2016/07/26)
8
19
 
9
20
  Fixes:
10
21
 
11
- * Fix empty hash to nil
22
+ * Fix empty hash to nil for list table data
12
23
 
13
24
  # 0.1.5 (2016/07/25)
14
25
 
@@ -420,7 +420,7 @@ class BigqueryMigration
420
420
  {name: name}.merge!(column)
421
421
  end
422
422
  if rows = response.to_h[:rows]
423
- values = TableData.new(columns, rows).generate_values
423
+ values = TableData.new(columns, rows).values
424
424
  end
425
425
 
426
426
  {
@@ -1,39 +1,76 @@
1
- # Convert from BigQuery Web console's JavaScript
1
+ # This codes are translated from BigQuery Web console's JavaScript
2
2
  require_relative 'error'
3
3
 
4
4
  class BigqueryMigration
5
5
  class TableData
6
- attr_reader :rows
7
- attr_reader :columns
6
+ attr_reader :rows, :columns
8
7
 
9
8
  def logger
10
9
  BigqueryMigration.logger
11
10
  end
12
11
 
13
12
  def initialize(columns, rows)
14
- @columns = columns || raise(ConfigError, '`columns` is required.')
15
- @rows = rows || raise(ConfigError, '`rows` is required.')
13
+ @columns = columns || raise(Error, '`columns` is required.')
14
+ @rows = rows || raise(Error, '`rows` is required.')
16
15
  end
17
16
 
18
- def generate_values
19
- rows = @rows.map do |row|
20
- values = []
21
- max_repeated_count = calculate_repeated_count(columns: @columns, rows: row).max
22
- max_repeated_count.times do |count|
23
- values.push(generate_value(columns: @columns, rows: row, count: count))
17
+ # format list_table_data response rows which is like
18
+ #
19
+ # [
20
+ # { f: [
21
+ # { v: "foo" },
22
+ # { v: "1" },
23
+ # { v: [] },
24
+ # { v: "1.1" },
25
+ # { v: "true" },
26
+ # { v: "1.444435200E9" }
27
+ # ] },
28
+ # { f: [
29
+ # { v: "foo" },
30
+ # { v: "2" },
31
+ # { v: [
32
+ # { v: "foo" },
33
+ # { v: "bar" }
34
+ # ] },
35
+ # { v: "2.2" },
36
+ # { v: "false" },
37
+ # { v: "1.444435200E9" }
38
+ # ] }
39
+ # ]
40
+ #
41
+ # into
42
+ #
43
+ # [
44
+ # # first row
45
+ # [
46
+ # [ "foo", "1", nil, "1.1", "true", "1.444435200E9" ]
47
+ # ],
48
+ # # second row
49
+ # [
50
+ # [ "foo", "2", "foo", "2.2", "false", "1.444435200E9" ],
51
+ # [ nil, nil, "bar", nil, nil, nil ],
52
+ # ],
53
+ # ]
54
+ def values
55
+ values = @rows.map do |row|
56
+ repeated_count = repeated_count(columns: @columns, rows: row)
57
+ formatted_row = []
58
+ repeated_count.times do |count|
59
+ formatted_row << format_row(columns: @columns, rows: row, count: count)
24
60
  end
25
- values
61
+ formatted_row
26
62
  end
27
- # For backword compatibility
28
- max_row_count = (rows.map(&:length) || []).max
29
- max_row_count > 1 ? rows : rows.map(&:flatten)
63
+ # flattern if there is no repeated column for backward compatibility
64
+ values.map(&:length).max > 1 ? values : values.flatten(1)
30
65
  end
31
66
 
32
- # This method called recursively.
33
- # So, rows must be a hash and hash has key f:.
34
- private def calculate_repeated_count(columns: nil, rows: nil)
35
- # logger.info { "calculate_repeated_count(columns: #{columns}, rows: #{rows})" }
36
- return [1] if (rows.nil? || rows.empty?)
67
+ private
68
+
69
+ # Count maximum number of rows on repeated columns
70
+ #
71
+ # This method called recursively, rows must be a hash and hash has key f:
72
+ def repeated_count(columns: nil, rows: nil)
73
+ return 1 if (rows.nil? || rows.empty?)
37
74
  validate_rows!(rows)
38
75
  rows[:f].zip(columns).map do |row, column|
39
76
  if column[:type] == 'RECORD'
@@ -41,84 +78,76 @@ class BigqueryMigration
41
78
  if row[:v].length == 0
42
79
  1
43
80
  else
44
- recursive_repeated_counts = row[:v].map do |v|
45
- _repeated_counts = calculate_repeated_count(columns: column[:fields], rows: v[:v])
46
- repeated_count = _repeated_counts.inject(0) { |acc, n| [acc, n].max }
47
- v[:repeated_count] = repeated_count
48
- end
49
- recursive_repeated_counts.inject(0) { |acc, n| acc + n }
81
+ row[:v].map do |v|
82
+ v[:repeated_count] = repeated_count(columns: column[:fields], rows: v[:v])
83
+ end.inject(:+)
50
84
  end
51
85
  else
52
- _repeated_counts = calculate_repeated_count(columns: column[:fields], rows: row[:v])
53
- _repeated_counts.inject(0) { |acc, n| [acc, n].max }
86
+ repeated_count(columns: column[:fields], rows: row[:v])
54
87
  end
55
88
  elsif column[:mode] == 'REPEATED'
56
89
  [(row[:v] || []).length, 1].max
57
90
  else
58
91
  1
59
92
  end
60
- end
93
+ end.max
61
94
  end
62
95
 
63
96
  # This method called recursively.
64
97
  # So, rows must be a hash and hash has key f:.
65
- private def generate_value(columns: nil, rows: nil, count: nil)
66
- # logger.info { "generate_value(columns: #{columns}, rows: #{rows}, count: #{count})" }
67
- value = []
98
+ def format_row(columns: nil, rows: nil, count: nil)
99
+ formatted_row = []
68
100
  return [nil] if (rows.nil? || rows.empty?)
69
101
  validate_rows!(rows)
70
102
  rows[:f].zip(columns).each do |row, column|
71
103
  if column[:type] == 'RECORD'
72
104
  if column[:mode] == 'REPEATED'
73
105
  recursive = false
74
- # Fixme: would like to avoid using the index counter
75
106
  current = 0
76
107
  row[:v].each do |v|
77
108
  repeated_count = v[:repeated_count]
78
109
  if current <= count && count < (current + repeated_count)
79
- generated_values = generate_value(columns: column[:fields], rows: v[:v], count: count - current)
80
- value.concat(generated_values)
110
+ formatted_row.concat format_row(columns: column[:fields], rows: v[:v], count: count - current)
81
111
  recursive = true
82
112
  end
83
113
  current = current + repeated_count
84
114
  end
85
115
  unless recursive
86
- nil_count = generate_nil_count(column[:fields])
87
- value.concat(Array.new(nil_count))
116
+ nil_count = get_nil_count(column[:fields])
117
+ formatted_row.concat(Array.new(nil_count))
88
118
  end
89
119
  elsif row[:v].nil?
90
- nil_count = generate_nil_count(column[:fields])
91
- value.concat(Array.new(nil_count))
120
+ nil_count = get_nil_count(column[:fields])
121
+ formatted_row.concat(Array.new(nil_count))
92
122
  else
93
- generated_values = generate_value(columns: column[:fields], rows: row[:v], count: count)
94
- value.concat(generated_values)
123
+ formatted_row.concat format_row(columns: column[:fields], rows: row[:v], count: count)
95
124
  end
96
125
  elsif column[:mode] == 'REPEATED'
97
126
  v = row[:v]
98
- count < v.length ? value.push(normalize_value(v[count][:v])) : value.push(nil)
127
+ count < v.length ? formatted_row.push(normalize_value(v[count][:v])) : formatted_row.push(nil)
99
128
  elsif count == 0
100
- value.push((normalize_value(row[:v])))
129
+ formatted_row.push((normalize_value(row[:v])))
101
130
  else
102
- value.push(nil)
131
+ formatted_row.push(nil)
103
132
  end
104
133
  end
105
- value
134
+ formatted_row
106
135
  end
107
136
 
108
137
  # special treatment empty hash.
109
138
  # nil is converted into {} by to_h
110
- private def normalize_value(v)
139
+ def normalize_value(v)
111
140
  v.is_a?(Hash) && v.empty? ? nil : v
112
141
  end
113
142
 
114
- private def generate_nil_count(fields)
143
+ def get_nil_count(fields)
115
144
  fields.inject(0) do |acc, f|
116
- f[:type] == 'RECORD' ? acc + generate_nil_count(f[:fields]) : acc + 1
145
+ f[:type] == 'RECORD' ? acc + get_nil_count(f[:fields]) : acc + 1
117
146
  end
118
147
  end
119
148
 
120
- private def validate_rows!(rows)
121
- raise ConfigError, '`rows` must be a hash and hash has key `:f`.' if !rows.is_a?(Hash) || !rows.has_key?(:f)
149
+ def validate_rows!(rows)
150
+ raise Error, '`rows` must be a hash and hash has key `:f`.' if !rows.is_a?(Hash) || !rows.has_key?(:f)
122
151
  end
123
152
  end
124
153
  end
@@ -1,3 +1,3 @@
1
1
  class BigqueryMigration
2
- VERSION = "0.2.0.pre2"
2
+ VERSION = "0.2.0"
3
3
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: bigquery_migration
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.2.0.pre2
4
+ version: 0.2.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Naotoshi Seo
@@ -180,9 +180,9 @@ required_ruby_version: !ruby/object:Gem::Requirement
180
180
  version: '0'
181
181
  required_rubygems_version: !ruby/object:Gem::Requirement
182
182
  requirements:
183
- - - ">"
183
+ - - ">="
184
184
  - !ruby/object:Gem::Version
185
- version: 1.3.1
185
+ version: '0'
186
186
  requirements: []
187
187
  rubyforge_project:
188
188
  rubygems_version: 2.5.1