bigquery_migration 0.3.0 → 0.3.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
- SHA1:
3
- metadata.gz: 97e77d9eca83b83c064a4e5328dbd3d776633290
4
- data.tar.gz: 8de9b03871570d5558e29a16d84b0fbef6873fc7
2
+ SHA256:
3
+ metadata.gz: 68a48ec86057ec0242c55beb90f6dfc8db688e6073d634fbcc9a03e0066b3357
4
+ data.tar.gz: 2a4358172e12aea071e5b260aa9d8f2dd57ec4d9a29d01ba2926f563ab58beb4
5
5
  SHA512:
6
- metadata.gz: 7de67b97655404b7f4a9f209484932252ed9d64da3e5535830012ce22cf6fcb3ed1283f13e8859017072bcaff43627d069f9b03851d97b00157c262b82351468
7
- data.tar.gz: f82e01cb2b9d25c24344dd839d56807039c7c24fdf4ea055b308a02b84feecb49997ce73c7faf698b6c6345a9c3435350be8fb416d2a53894918fad6836c502e
6
+ metadata.gz: 1df093bcd1f22dc01cd6e09e8ed10d6ad7b85182ecb0d268b8afac48d46489eb5ed7c1f5ff3d5f564b0d7f15017aaf7b02f55fa1bef23618d4f4fde044bb5f67
7
+ data.tar.gz: 9af8c78d3ed6c9dfd3a269bde24e016f7cf4b63b2ac8a3e2f39ac513a1e035657329c8709ed7ee8ef0f71f5c91881d330804a5af7652bf8b54e0338c50ad24c2
data/CHANGELOG.md CHANGED
@@ -1,3 +1,9 @@
1
+ # 0.3.1 (2018/05/23)
2
+
3
+ Enhancements:
4
+
5
+ * Support newly added location option of google-api-ruby-client.
6
+
1
7
  # 0.3.0 (2017/04/26)
2
8
 
3
9
  Enhancements:
data/README.md CHANGED
@@ -2,6 +2,10 @@
2
2
 
3
3
  BigqueryMigraiton is a tool or a ruby library to migrate (or alter) BigQuery table schema.
4
4
 
5
+ ## Requirements
6
+
7
+ * Ruby >= 2.3.0
8
+
5
9
  ## Installation
6
10
 
7
11
  Add this line to your application's Gemfile:
@@ -31,6 +35,8 @@ bigquery: &bigquery
31
35
  json_keyfile: your-project-000.json
32
36
  dataset: your_dataset_name
33
37
  table: your_table_name
38
+ # If your data is in a location other than the US or EU multi-region, you must specify the location
39
+ # location: asia-northeast1
34
40
 
35
41
  actions:
36
42
  - action: create_dataset
@@ -59,9 +65,12 @@ $ bundle exec bq_migrate run config.yml --exec
59
65
  require 'bigquery_migration'
60
66
 
61
67
  config = {
62
- json_keyfile: '/path/to/your-project-000.json'
63
- dataset: 'your_dataset_name'
64
- table: 'your_table_name'
68
+ json_keyfile: '/path/to/your-project-000.json',
69
+ dataset: 'your_dataset_name',
70
+ table: 'your_table_name',
71
+
72
+ # If your data is in a location other than the US or EU multi-region, you must specify the location
73
+ # location: asia-northeast1,
65
74
  }
66
75
  columns = [
67
76
  { name: 'string', type: 'STRING' },
@@ -6,8 +6,8 @@ require 'bigquery_migration/version'
6
6
  Gem::Specification.new do |spec|
7
7
  spec.name = "bigquery_migration"
8
8
  spec.version = BigqueryMigration::VERSION
9
- spec.authors = ["Naotoshi Seo"]
10
- spec.email = ["sonots@gmail.com"]
9
+ spec.authors = ["Naotoshi Seo", "kysnm", "potato2003"]
10
+ spec.email = ["sonots@gmail.com", "tokyoincidents.g@gmail.com", "potato2003@gmail.com"]
11
11
 
12
12
  spec.summary = %q{Migrate BigQuery table schema}
13
13
  spec.description = %q{Migrate BigQuery table schema.}
@@ -30,7 +30,7 @@ class BigqueryMigration
30
30
  client.request_options.retries = retries
31
31
  client.client_options.open_timeout_sec = open_timeout_sec
32
32
  if client.request_options.respond_to?(:timeout_sec)
33
- client.client_options.timeout_sec = timeout_sec
33
+ client.request_options.timeout_sec = timeout_sec
34
34
  else # google-api-ruby-client >= v0.11.0
35
35
  if timeout_sec
36
36
  logger.warn { "timeout_sec is deprecated in google-api-ruby-client >= v0.11.0. Use read_timeout_sec instead" }
@@ -49,9 +49,9 @@ class BigqueryMigration
49
49
  token_credential_uri: "https://accounts.google.com/o/oauth2/token",
50
50
  audience: "https://accounts.google.com/o/oauth2/token",
51
51
  scope: scope,
52
- client_id: credentials['client_id'],
53
- client_secret: credentials['client_secret'],
54
- refresh_token: credentials['refresh_token']
52
+ client_id: credentials[:client_id],
53
+ client_secret: credentials[:client_secret],
54
+ refresh_token: credentials[:refresh_token]
55
55
  )
56
56
  auth.refresh!
57
57
  when 'compute_engine'
@@ -116,7 +116,9 @@ class BigqueryMigration
116
116
  dataset_id: dataset,
117
117
  },
118
118
  }.merge(hint)
119
+ body[:location] = location if location
119
120
  opts = {}
121
+
120
122
  logger.debug { "#{head}insert_dataset(#{project}, #{body}, #{opts})" }
121
123
  unless dry_run?
122
124
  response = client.insert_dataset(project, body, opts)
@@ -479,6 +481,7 @@ class BigqueryMigration
479
481
  }
480
482
  }
481
483
  }
484
+ body[:job_reference][:location] = location if location
482
485
  opts = {}
483
486
 
484
487
  logger.info { "#{head}insert_job(#{project}, #{body}, #{opts})" }
@@ -519,6 +522,7 @@ class BigqueryMigration
519
522
  }
520
523
  }
521
524
  }
525
+ body[:job_reference][:location] = location if location
522
526
  opts = {}
523
527
 
524
528
  logger.info { "#{head}insert_job(#{project}, #{body}, #{opts})" }
@@ -563,7 +567,11 @@ class BigqueryMigration
563
567
  "job_id:[#{job_id}] elapsed_time:#{elapsed.to_f}sec status:[#{status}]"
564
568
  }
565
569
  sleep wait_interval
566
- _response = client.get_job(project, job_id)
570
+ if support_location_keyword?
571
+ _response = client.get_job(project, job_id, location: location)
572
+ else
573
+ _response = client.get_job(project, job_id)
574
+ end
567
575
  end
568
576
  end
569
577
 
@@ -707,6 +715,11 @@ class BigqueryMigration
707
715
  result.merge!( before_columns: before_columns, after_columns: after_columns )
708
716
  end
709
717
 
718
+ # the location keyword arguments are available in google-api-client v0.19.6 or later
719
+ def support_location_keyword?
720
+ @support_location_keyword ||= client.method(:get_job).parameters.include?([:key, :location])
721
+ end
722
+
710
723
  # For old version compatibility
711
724
  # Use credentials_file or credentials instead
712
725
  def json_key
@@ -714,7 +727,7 @@ class BigqueryMigration
714
727
  begin
715
728
  case json_keyfile
716
729
  when String
717
- return JSON.parse(File.read(json_keyfile))
730
+ return HashUtil.deep_symbolize_keys(JSON.parse(File.read(json_keyfile)))
718
731
  when Hash
719
732
  return json_keyfile[:content]
720
733
  else
@@ -729,11 +742,11 @@ class BigqueryMigration
729
742
 
730
743
  # compute_engine, authorized_user, service_account
731
744
  def auth_method
732
- @auth_method ||= ENV['AUTH_METHOD'] || config.fetch(:auth_method, nil) || credentials['type'] || 'compute_engine'
745
+ @auth_method ||= ENV['AUTH_METHOD'] || config.fetch(:auth_method, nil) || credentials[:type] || 'compute_engine'
733
746
  end
734
747
 
735
748
  def credentials
736
- json_key || JSON.parse(config.fetch(:credentials, nil) || File.read(credentials_file))
749
+ json_key || HashUtil.deep_symbolize_keys(JSON.parse(config.fetch(:credentials, nil) || File.read(credentials_file)))
737
750
  end
738
751
 
739
752
  def credentials_file
@@ -775,7 +788,7 @@ class BigqueryMigration
775
788
  end
776
789
 
777
790
  def service_account
778
- @service_account ||= ENV['GOOGLE_SERVICE_ACCOUNT'] || config.fetch(:service_account, nil) || credentials['client_email'] || service_account_default
791
+ @service_account ||= ENV['GOOGLE_SERVICE_ACCOUNT'] || config.fetch(:service_account, nil) || credentials[:client_email] || service_account_default
779
792
  end
780
793
 
781
794
  def retries
@@ -800,8 +813,8 @@ class BigqueryMigration
800
813
  end
801
814
 
802
815
  def project
803
- @project ||= ENV['GOOGLE_PROJECT'] || config.fetch(:project, nil) || credentials['project_id']
804
- @project ||= credentials['client_email'].chomp('.iam.gserviceaccount.com').split('@').last if credentials['client_email']
816
+ @project ||= ENV['GOOGLE_PROJECT'] || config.fetch(:project, nil) || credentials[:project_id]
817
+ @project ||= credentials[:client_email].chomp('.iam.gserviceaccount.com').split('@').last if credentials[:client_email]
805
818
  @project ||= project_default || raise(ConfigError, '`project` is required.')
806
819
  end
807
820
 
@@ -813,6 +826,10 @@ class BigqueryMigration
813
826
  @table ||= config[:table] || raise(ConfigError, '`table` is required.')
814
827
  end
815
828
 
829
+ def location
830
+ config[:location]
831
+ end
832
+
816
833
  def job_status_polling_interval
817
834
  @job_status_polling_interval ||= config[:job_status_polling_interval] || 5
818
835
  end
@@ -1,3 +1,3 @@
1
1
  class BigqueryMigration
2
- VERSION = "0.3.0"
2
+ VERSION = "0.3.1"
3
3
  end
metadata CHANGED
@@ -1,14 +1,16 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: bigquery_migration
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.3.0
4
+ version: 0.3.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Naotoshi Seo
8
+ - kysnm
9
+ - potato2003
8
10
  autorequire:
9
11
  bindir: exe
10
12
  cert_chain: []
11
- date: 2017-04-26 00:00:00.000000000 Z
13
+ date: 2018-05-22 00:00:00.000000000 Z
12
14
  dependencies:
13
15
  - !ruby/object:Gem::Dependency
14
16
  name: google-api-client
@@ -153,6 +155,8 @@ dependencies:
153
155
  description: Migrate BigQuery table schema.
154
156
  email:
155
157
  - sonots@gmail.com
158
+ - tokyoincidents.g@gmail.com
159
+ - potato2003@gmail.com
156
160
  executables:
157
161
  - bq-migrate
158
162
  - bq_migrate
@@ -214,7 +218,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
214
218
  version: '0'
215
219
  requirements: []
216
220
  rubyforge_project:
217
- rubygems_version: 2.6.11
221
+ rubygems_version: 2.7.4
218
222
  signing_key:
219
223
  specification_version: 4
220
224
  summary: Migrate BigQuery table schema