bigquery_migration 0.3.0 → 0.3.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +5 -5
- data/CHANGELOG.md +6 -0
- data/README.md +12 -3
- data/bigquery_migration.gemspec +2 -2
- data/lib/bigquery_migration/bigquery_wrapper.rb +28 -11
- data/lib/bigquery_migration/version.rb +1 -1
- metadata +7 -3
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
|
-
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
2
|
+
SHA256:
|
3
|
+
metadata.gz: 68a48ec86057ec0242c55beb90f6dfc8db688e6073d634fbcc9a03e0066b3357
|
4
|
+
data.tar.gz: 2a4358172e12aea071e5b260aa9d8f2dd57ec4d9a29d01ba2926f563ab58beb4
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 1df093bcd1f22dc01cd6e09e8ed10d6ad7b85182ecb0d268b8afac48d46489eb5ed7c1f5ff3d5f564b0d7f15017aaf7b02f55fa1bef23618d4f4fde044bb5f67
|
7
|
+
data.tar.gz: 9af8c78d3ed6c9dfd3a269bde24e016f7cf4b63b2ac8a3e2f39ac513a1e035657329c8709ed7ee8ef0f71f5c91881d330804a5af7652bf8b54e0338c50ad24c2
|
data/CHANGELOG.md
CHANGED
data/README.md
CHANGED
@@ -2,6 +2,10 @@
|
|
2
2
|
|
3
3
|
BigqueryMigraiton is a tool or a ruby library to migrate (or alter) BigQuery table schema.
|
4
4
|
|
5
|
+
## Requirements
|
6
|
+
|
7
|
+
* Ruby >= 2.3.0
|
8
|
+
|
5
9
|
## Installation
|
6
10
|
|
7
11
|
Add this line to your application's Gemfile:
|
@@ -31,6 +35,8 @@ bigquery: &bigquery
|
|
31
35
|
json_keyfile: your-project-000.json
|
32
36
|
dataset: your_dataset_name
|
33
37
|
table: your_table_name
|
38
|
+
# If your data is in a location other than the US or EU multi-region, you must specify the location
|
39
|
+
# location: asia-northeast1
|
34
40
|
|
35
41
|
actions:
|
36
42
|
- action: create_dataset
|
@@ -59,9 +65,12 @@ $ bundle exec bq_migrate run config.yml --exec
|
|
59
65
|
require 'bigquery_migration'
|
60
66
|
|
61
67
|
config = {
|
62
|
-
json_keyfile: '/path/to/your-project-000.json'
|
63
|
-
dataset: 'your_dataset_name'
|
64
|
-
table: 'your_table_name'
|
68
|
+
json_keyfile: '/path/to/your-project-000.json',
|
69
|
+
dataset: 'your_dataset_name',
|
70
|
+
table: 'your_table_name',
|
71
|
+
|
72
|
+
# If your data is in a location other than the US or EU multi-region, you must specify the location
|
73
|
+
# location: asia-northeast1,
|
65
74
|
}
|
66
75
|
columns = [
|
67
76
|
{ name: 'string', type: 'STRING' },
|
data/bigquery_migration.gemspec
CHANGED
@@ -6,8 +6,8 @@ require 'bigquery_migration/version'
|
|
6
6
|
Gem::Specification.new do |spec|
|
7
7
|
spec.name = "bigquery_migration"
|
8
8
|
spec.version = BigqueryMigration::VERSION
|
9
|
-
spec.authors = ["Naotoshi Seo"]
|
10
|
-
spec.email = ["sonots@gmail.com"]
|
9
|
+
spec.authors = ["Naotoshi Seo", "kysnm", "potato2003"]
|
10
|
+
spec.email = ["sonots@gmail.com", "tokyoincidents.g@gmail.com", "potato2003@gmail.com"]
|
11
11
|
|
12
12
|
spec.summary = %q{Migrate BigQuery table schema}
|
13
13
|
spec.description = %q{Migrate BigQuery table schema.}
|
@@ -30,7 +30,7 @@ class BigqueryMigration
|
|
30
30
|
client.request_options.retries = retries
|
31
31
|
client.client_options.open_timeout_sec = open_timeout_sec
|
32
32
|
if client.request_options.respond_to?(:timeout_sec)
|
33
|
-
client.
|
33
|
+
client.request_options.timeout_sec = timeout_sec
|
34
34
|
else # google-api-ruby-client >= v0.11.0
|
35
35
|
if timeout_sec
|
36
36
|
logger.warn { "timeout_sec is deprecated in google-api-ruby-client >= v0.11.0. Use read_timeout_sec instead" }
|
@@ -49,9 +49,9 @@ class BigqueryMigration
|
|
49
49
|
token_credential_uri: "https://accounts.google.com/o/oauth2/token",
|
50
50
|
audience: "https://accounts.google.com/o/oauth2/token",
|
51
51
|
scope: scope,
|
52
|
-
client_id: credentials[
|
53
|
-
client_secret: credentials[
|
54
|
-
refresh_token: credentials[
|
52
|
+
client_id: credentials[:client_id],
|
53
|
+
client_secret: credentials[:client_secret],
|
54
|
+
refresh_token: credentials[:refresh_token]
|
55
55
|
)
|
56
56
|
auth.refresh!
|
57
57
|
when 'compute_engine'
|
@@ -116,7 +116,9 @@ class BigqueryMigration
|
|
116
116
|
dataset_id: dataset,
|
117
117
|
},
|
118
118
|
}.merge(hint)
|
119
|
+
body[:location] = location if location
|
119
120
|
opts = {}
|
121
|
+
|
120
122
|
logger.debug { "#{head}insert_dataset(#{project}, #{body}, #{opts})" }
|
121
123
|
unless dry_run?
|
122
124
|
response = client.insert_dataset(project, body, opts)
|
@@ -479,6 +481,7 @@ class BigqueryMigration
|
|
479
481
|
}
|
480
482
|
}
|
481
483
|
}
|
484
|
+
body[:job_reference][:location] = location if location
|
482
485
|
opts = {}
|
483
486
|
|
484
487
|
logger.info { "#{head}insert_job(#{project}, #{body}, #{opts})" }
|
@@ -519,6 +522,7 @@ class BigqueryMigration
|
|
519
522
|
}
|
520
523
|
}
|
521
524
|
}
|
525
|
+
body[:job_reference][:location] = location if location
|
522
526
|
opts = {}
|
523
527
|
|
524
528
|
logger.info { "#{head}insert_job(#{project}, #{body}, #{opts})" }
|
@@ -563,7 +567,11 @@ class BigqueryMigration
|
|
563
567
|
"job_id:[#{job_id}] elapsed_time:#{elapsed.to_f}sec status:[#{status}]"
|
564
568
|
}
|
565
569
|
sleep wait_interval
|
566
|
-
|
570
|
+
if support_location_keyword?
|
571
|
+
_response = client.get_job(project, job_id, location: location)
|
572
|
+
else
|
573
|
+
_response = client.get_job(project, job_id)
|
574
|
+
end
|
567
575
|
end
|
568
576
|
end
|
569
577
|
|
@@ -707,6 +715,11 @@ class BigqueryMigration
|
|
707
715
|
result.merge!( before_columns: before_columns, after_columns: after_columns )
|
708
716
|
end
|
709
717
|
|
718
|
+
# the location keyword arguments are available in google-api-client v0.19.6 or later
|
719
|
+
def support_location_keyword?
|
720
|
+
@support_location_keyword ||= client.method(:get_job).parameters.include?([:key, :location])
|
721
|
+
end
|
722
|
+
|
710
723
|
# For old version compatibility
|
711
724
|
# Use credentials_file or credentials instead
|
712
725
|
def json_key
|
@@ -714,7 +727,7 @@ class BigqueryMigration
|
|
714
727
|
begin
|
715
728
|
case json_keyfile
|
716
729
|
when String
|
717
|
-
return JSON.parse(File.read(json_keyfile))
|
730
|
+
return HashUtil.deep_symbolize_keys(JSON.parse(File.read(json_keyfile)))
|
718
731
|
when Hash
|
719
732
|
return json_keyfile[:content]
|
720
733
|
else
|
@@ -729,11 +742,11 @@ class BigqueryMigration
|
|
729
742
|
|
730
743
|
# compute_engine, authorized_user, service_account
|
731
744
|
def auth_method
|
732
|
-
@auth_method ||= ENV['AUTH_METHOD'] || config.fetch(:auth_method, nil) || credentials[
|
745
|
+
@auth_method ||= ENV['AUTH_METHOD'] || config.fetch(:auth_method, nil) || credentials[:type] || 'compute_engine'
|
733
746
|
end
|
734
747
|
|
735
748
|
def credentials
|
736
|
-
json_key || JSON.parse(config.fetch(:credentials, nil) || File.read(credentials_file))
|
749
|
+
json_key || HashUtil.deep_symbolize_keys(JSON.parse(config.fetch(:credentials, nil) || File.read(credentials_file)))
|
737
750
|
end
|
738
751
|
|
739
752
|
def credentials_file
|
@@ -775,7 +788,7 @@ class BigqueryMigration
|
|
775
788
|
end
|
776
789
|
|
777
790
|
def service_account
|
778
|
-
@service_account ||= ENV['GOOGLE_SERVICE_ACCOUNT'] || config.fetch(:service_account, nil) || credentials[
|
791
|
+
@service_account ||= ENV['GOOGLE_SERVICE_ACCOUNT'] || config.fetch(:service_account, nil) || credentials[:client_email] || service_account_default
|
779
792
|
end
|
780
793
|
|
781
794
|
def retries
|
@@ -800,8 +813,8 @@ class BigqueryMigration
|
|
800
813
|
end
|
801
814
|
|
802
815
|
def project
|
803
|
-
@project ||= ENV['GOOGLE_PROJECT'] || config.fetch(:project, nil) || credentials[
|
804
|
-
@project ||= credentials[
|
816
|
+
@project ||= ENV['GOOGLE_PROJECT'] || config.fetch(:project, nil) || credentials[:project_id]
|
817
|
+
@project ||= credentials[:client_email].chomp('.iam.gserviceaccount.com').split('@').last if credentials[:client_email]
|
805
818
|
@project ||= project_default || raise(ConfigError, '`project` is required.')
|
806
819
|
end
|
807
820
|
|
@@ -813,6 +826,10 @@ class BigqueryMigration
|
|
813
826
|
@table ||= config[:table] || raise(ConfigError, '`table` is required.')
|
814
827
|
end
|
815
828
|
|
829
|
+
def location
|
830
|
+
config[:location]
|
831
|
+
end
|
832
|
+
|
816
833
|
def job_status_polling_interval
|
817
834
|
@job_status_polling_interval ||= config[:job_status_polling_interval] || 5
|
818
835
|
end
|
metadata
CHANGED
@@ -1,14 +1,16 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: bigquery_migration
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.3.
|
4
|
+
version: 0.3.1
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Naotoshi Seo
|
8
|
+
- kysnm
|
9
|
+
- potato2003
|
8
10
|
autorequire:
|
9
11
|
bindir: exe
|
10
12
|
cert_chain: []
|
11
|
-
date:
|
13
|
+
date: 2018-05-22 00:00:00.000000000 Z
|
12
14
|
dependencies:
|
13
15
|
- !ruby/object:Gem::Dependency
|
14
16
|
name: google-api-client
|
@@ -153,6 +155,8 @@ dependencies:
|
|
153
155
|
description: Migrate BigQuery table schema.
|
154
156
|
email:
|
155
157
|
- sonots@gmail.com
|
158
|
+
- tokyoincidents.g@gmail.com
|
159
|
+
- potato2003@gmail.com
|
156
160
|
executables:
|
157
161
|
- bq-migrate
|
158
162
|
- bq_migrate
|
@@ -214,7 +218,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
214
218
|
version: '0'
|
215
219
|
requirements: []
|
216
220
|
rubyforge_project:
|
217
|
-
rubygems_version: 2.
|
221
|
+
rubygems_version: 2.7.4
|
218
222
|
signing_key:
|
219
223
|
specification_version: 4
|
220
224
|
summary: Migrate BigQuery table schema
|