bigquery_migration 0.3.1 → 0.3.2

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 68a48ec86057ec0242c55beb90f6dfc8db688e6073d634fbcc9a03e0066b3357
4
- data.tar.gz: 2a4358172e12aea071e5b260aa9d8f2dd57ec4d9a29d01ba2926f563ab58beb4
3
+ metadata.gz: dbaba64f116082e53c68531eda4252832447fbdc722539be390145fbfb0f30bf
4
+ data.tar.gz: a16286041654a619a9b41d1954e610812c3e9fa956446d848e5ac2bbbcc503a0
5
5
  SHA512:
6
- metadata.gz: 1df093bcd1f22dc01cd6e09e8ed10d6ad7b85182ecb0d268b8afac48d46489eb5ed7c1f5ff3d5f564b0d7f15017aaf7b02f55fa1bef23618d4f4fde044bb5f67
7
- data.tar.gz: 9af8c78d3ed6c9dfd3a269bde24e016f7cf4b63b2ac8a3e2f39ac513a1e035657329c8709ed7ee8ef0f71f5c91881d330804a5af7652bf8b54e0338c50ad24c2
6
+ metadata.gz: fcafd619bea25e1c79e2cd841a0a4e0ca7b4bec474ec403b73ee8d2fac041fd621afd8a8c302876c0317196dccfc9926b31b557aae98bddeeb141e545accacfb
7
+ data.tar.gz: cd1e6d6a427fa43e5b58dc063608f9fd3aa10affec688043a711e431d8b9cebe0c37d5069cb689c70a24baa3beb3a308f0997110c0c538c137c41f1630f10baf
@@ -1,3 +1,9 @@
1
+ # 0.3.2 (2019/04/29)
2
+
3
+ Enhancements:
4
+
5
+ * Support clustered table
6
+
1
7
  # 0.3.1 (2018/05/23)
2
8
 
3
9
  Enhancements:
@@ -29,5 +29,4 @@ Gem::Specification.new do |spec|
29
29
  spec.add_development_dependency "pry-byebug"
30
30
  spec.add_development_dependency "test-unit"
31
31
  spec.add_development_dependency "test-unit-rr"
32
- spec.add_development_dependency "test-unit-power_assert"
33
32
  end
@@ -0,0 +1,28 @@
1
+ bigquery: &bigquery
2
+ credentials_file: example/your-project-000.json
3
+ dataset: your_dataset_name
4
+ table: your_clustered_table_name
5
+ clustering:
6
+ fields:
7
+ - timestamp
8
+ - integer
9
+
10
+ actions:
11
+ - action: create_dataset
12
+ <<: *bigquery
13
+ - action: migrate_partitioned_table
14
+ <<: *bigquery
15
+ columns:
16
+ - { name: 'timestamp', type: 'TIMESTAMP' }
17
+ - { name: 'integer', type: 'INTEGER' }
18
+ - name: 'record'
19
+ type: 'RECORD'
20
+ fields:
21
+ - { name: 'string', type: 'STRING' }
22
+ - { name: 'integer', type: 'INTEGER' }
23
+ - { name: 'bytes', type: 'BYTES' }
24
+ - action: migrate_partitioned_table
25
+ <<: *bigquery
26
+ schema_file: example/schema.json
27
+ - action: delete_table
28
+ <<: *bigquery
@@ -88,7 +88,7 @@ class BigqueryMigration
88
88
  dataset ||= self.dataset
89
89
  begin
90
90
  logger.info { "Get dataset... #{project}:#{dataset}" }
91
- client.get_dataset(project, dataset)
91
+ response = client.get_dataset(project, dataset)
92
92
  rescue Google::Apis::ServerError, Google::Apis::ClientError, Google::Apis::AuthorizationError => e
93
93
  if e.status_code == 404
94
94
  raise NotFoundError, "Dataset #{project}:#{dataset} is not found"
@@ -191,6 +191,12 @@ class BigqueryMigration
191
191
  }
192
192
  end
193
193
 
194
+ if clustering && clustering[:fields]
195
+ body[:clustering] = {
196
+ fields: clustering[:fields]
197
+ }
198
+ end
199
+
194
200
  opts = {}
195
201
  logger.debug { "#{head}insert_table(#{project}, #{dataset}, #{body}, #{opts})" }
196
202
  unless dry_run?
@@ -729,7 +735,14 @@ class BigqueryMigration
729
735
  when String
730
736
  return HashUtil.deep_symbolize_keys(JSON.parse(File.read(json_keyfile)))
731
737
  when Hash
732
- return json_keyfile[:content]
738
+ case json_keyfile[:content]
739
+ when String
740
+ return HashUtil.deep_symbolize_keys(JSON.parse(json_keyfile[:content]))
741
+ when Hash
742
+ return json_keyfile[:content]
743
+ else
744
+ raise ConfigError.new "Unsupported json_keyfile type"
745
+ end
733
746
  else
734
747
  raise ConfigError.new "Unsupported json_keyfile type"
735
748
  end
@@ -830,6 +843,10 @@ class BigqueryMigration
830
843
  config[:location]
831
844
  end
832
845
 
846
+ def clustering
847
+ config[:clustering]
848
+ end
849
+
833
850
  def job_status_polling_interval
834
851
  @job_status_polling_interval ||= config[:job_status_polling_interval] || 5
835
852
  end
@@ -1,3 +1,3 @@
1
1
  class BigqueryMigration
2
- VERSION = "0.3.1"
2
+ VERSION = "0.3.2"
3
3
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: bigquery_migration
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.3.1
4
+ version: 0.3.2
5
5
  platform: ruby
6
6
  authors:
7
7
  - Naotoshi Seo
@@ -10,7 +10,7 @@ authors:
10
10
  autorequire:
11
11
  bindir: exe
12
12
  cert_chain: []
13
- date: 2018-05-22 00:00:00.000000000 Z
13
+ date: 2019-04-28 00:00:00.000000000 Z
14
14
  dependencies:
15
15
  - !ruby/object:Gem::Dependency
16
16
  name: google-api-client
@@ -138,20 +138,6 @@ dependencies:
138
138
  - - ">="
139
139
  - !ruby/object:Gem::Version
140
140
  version: '0'
141
- - !ruby/object:Gem::Dependency
142
- name: test-unit-power_assert
143
- requirement: !ruby/object:Gem::Requirement
144
- requirements:
145
- - - ">="
146
- - !ruby/object:Gem::Version
147
- version: '0'
148
- type: :development
149
- prerelease: false
150
- version_requirements: !ruby/object:Gem::Requirement
151
- requirements:
152
- - - ">="
153
- - !ruby/object:Gem::Version
154
- version: '0'
155
141
  description: Migrate BigQuery table schema.
156
142
  email:
157
143
  - sonots@gmail.com
@@ -179,6 +165,7 @@ files:
179
165
  - example/copy_table.yml
180
166
  - example/example.yml
181
167
  - example/insert_select.yml
168
+ - example/migrate_clustered_table.yml
182
169
  - example/migrate_partitioned_table.yml
183
170
  - example/migrate_table.yml
184
171
  - example/schema.json
@@ -217,8 +204,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
217
204
  - !ruby/object:Gem::Version
218
205
  version: '0'
219
206
  requirements: []
220
- rubyforge_project:
221
- rubygems_version: 2.7.4
207
+ rubygems_version: 3.0.3
222
208
  signing_key:
223
209
  specification_version: 4
224
210
  summary: Migrate BigQuery table schema