google-cloud-bigquery 1.10.0 → 1.11.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 475fe669391639d85bc48bdabb141ab8efa86dd9cdcfa97544d92b77099a1f63
4
- data.tar.gz: ad906eefc520fff184d81d1496e7e65914be90d631d4d31aead94fa41f4fff0f
3
+ metadata.gz: 320433f33bcb051d07a42335735d2b80500b7bbe2546bb20dcdb67e5c62b2f6a
4
+ data.tar.gz: 7f91617d36ea5c2917be9c555134ecdd7d9efd07e74852d47ab18c6d9ebfe3c8
5
5
  SHA512:
6
- metadata.gz: 9639819d2a90cf202dcc9f2d4ef14fb1ddfab512ddb965af8fd394d7228a6ac5d061ba4e7444a98ac5c5a09a2bb189b0f6c81146375cd5d5bbbe78576f249d05
7
- data.tar.gz: d3c854d3eb4ed7a053262842ab48e8bdcde7b929f1059695c8e67e36447c52fdaa1225b78334590d54ee0e445da8dcefd0402d04d5202c4d66155e9d27edb3b2
6
+ metadata.gz: a377500707a12139df115775db7a193ad5e28090394134c8f1116a8ede7a92e0c8c91bcb1eecb736f1240f35a7ec20cc433885a6d143e50e20b19b27e088a5e1
7
+ data.tar.gz: 4d7118fb5872d718e2268e980251b9002a1a3e625baf58f56f2f8c1f4f52264ae5d3296fb1c1e2977a5ba62ec63291f8d33cb1749289160e2a5a5621a8497bb6
@@ -1,5 +1,16 @@
1
1
  # Release History
2
2
 
3
+ ### 1.11.0 / 2019-02-01
4
+
5
+ * Make use of Credentials#project_id
6
+ * Use Credentials#project_id
7
+ If a project_id is not provided, use the value on the Credentials object.
8
+ This value was added in googleauth 0.7.0.
9
+ * Loosen googleauth dependency
10
+ Allow for new releases up to 0.10.
11
+ The googleauth devs have committed to maintanining the current API
12
+ and will not make backwards compatible changes before 0.10.
13
+
3
14
  ### 1.10.0 / 2018-12-06
4
15
 
5
16
  * Add dryrun param to Project#query_job and Dataset#query_job
@@ -65,7 +65,7 @@ module Google
65
65
  # bigquery = gcloud.bigquery scope: platform_scope
66
66
  #
67
67
  def bigquery scope: nil, retries: nil, timeout: nil
68
- Google::Cloud.bigquery @project, @keyfile, scope: scope,
68
+ Google::Cloud.bigquery @project, @keyfile, scope: scope,
69
69
  retries: (retries || @retries),
70
70
  timeout: (timeout || @timeout)
71
71
  end
@@ -129,7 +129,7 @@ Google::Cloud.configure.add_config! :bigquery do |config|
129
129
  config.add_field! :project_id, default_project, match: String, allow_nil: true
130
130
  config.add_alias! :project, :project_id
131
131
  config.add_field! :credentials, default_creds,
132
- match: [String, Hash, Google::Auth::Credentials],
132
+ match: [String, Hash, Google::Auth::Credentials],
133
133
  allow_nil: true
134
134
  config.add_alias! :keyfile, :credentials
135
135
  config.add_field! :scope, nil, match: [String, Array]
@@ -66,19 +66,22 @@ module Google
66
66
  #
67
67
  def self.new project_id: nil, credentials: nil, scope: nil, retries: nil,
68
68
  timeout: nil, project: nil, keyfile: nil
69
- project_id ||= (project || default_project_id)
70
- project_id = project_id.to_s # Always cast to a string
71
- raise ArgumentError, "project_id is missing" if project_id.empty?
72
-
73
- scope ||= configure.scope
74
- retries ||= configure.retries
75
- timeout ||= configure.timeout
76
-
69
+ project_id ||= (project || default_project_id)
70
+ scope ||= configure.scope
71
+ retries ||= configure.retries
72
+ timeout ||= configure.timeout
77
73
  credentials ||= (keyfile || default_credentials(scope: scope))
74
+
78
75
  unless credentials.is_a? Google::Auth::Credentials
79
76
  credentials = Bigquery::Credentials.new credentials, scope: scope
80
77
  end
81
78
 
79
+ if credentials.respond_to? :project_id
80
+ project_id ||= credentials.project_id
81
+ end
82
+ project_id = project_id.to_s # Always cast to a string
83
+ raise ArgumentError, "project_id is missing" if project_id.empty?
84
+
82
85
  Bigquery::Project.new(
83
86
  Bigquery::Service.new(
84
87
  project_id, credentials, retries: retries, timeout: timeout
@@ -278,12 +278,12 @@ module Google
278
278
  def self.create_disposition str
279
279
  val = {
280
280
  "create_if_needed" => "CREATE_IF_NEEDED",
281
- "createifneeded" => "CREATE_IF_NEEDED",
282
- "if_needed" => "CREATE_IF_NEEDED",
283
- "needed" => "CREATE_IF_NEEDED",
284
- "create_never" => "CREATE_NEVER",
285
- "createnever" => "CREATE_NEVER",
286
- "never" => "CREATE_NEVER"
281
+ "createifneeded" => "CREATE_IF_NEEDED",
282
+ "if_needed" => "CREATE_IF_NEEDED",
283
+ "needed" => "CREATE_IF_NEEDED",
284
+ "create_never" => "CREATE_NEVER",
285
+ "createnever" => "CREATE_NEVER",
286
+ "never" => "CREATE_NEVER"
287
287
  }[str.to_s.downcase]
288
288
  return val unless val.nil?
289
289
  str
@@ -298,14 +298,14 @@ module Google
298
298
  def self.write_disposition str
299
299
  val = {
300
300
  "write_truncate" => "WRITE_TRUNCATE",
301
- "writetruncate" => "WRITE_TRUNCATE",
302
- "truncate" => "WRITE_TRUNCATE",
303
- "write_append" => "WRITE_APPEND",
304
- "writeappend" => "WRITE_APPEND",
305
- "append" => "WRITE_APPEND",
306
- "write_empty" => "WRITE_EMPTY",
307
- "writeempty" => "WRITE_EMPTY",
308
- "empty" => "WRITE_EMPTY"
301
+ "writetruncate" => "WRITE_TRUNCATE",
302
+ "truncate" => "WRITE_TRUNCATE",
303
+ "write_append" => "WRITE_APPEND",
304
+ "writeappend" => "WRITE_APPEND",
305
+ "append" => "WRITE_APPEND",
306
+ "write_empty" => "WRITE_EMPTY",
307
+ "writeempty" => "WRITE_EMPTY",
308
+ "empty" => "WRITE_EMPTY"
309
309
  }[str.to_s.downcase]
310
310
  return val unless val.nil?
311
311
  str
@@ -319,15 +319,15 @@ module Google
319
319
  # @return [String] API representation of source format.
320
320
  def self.source_format format
321
321
  val = {
322
- "csv" => "CSV",
323
- "json" => "NEWLINE_DELIMITED_JSON",
322
+ "csv" => "CSV",
323
+ "json" => "NEWLINE_DELIMITED_JSON",
324
324
  "newline_delimited_json" => "NEWLINE_DELIMITED_JSON",
325
- "avro" => "AVRO",
326
- "orc" => "ORC",
327
- "parquet" => "PARQUET",
328
- "datastore" => "DATASTORE_BACKUP",
329
- "backup" => "DATASTORE_BACKUP",
330
- "datastore_backup" => "DATASTORE_BACKUP"
325
+ "avro" => "AVRO",
326
+ "orc" => "ORC",
327
+ "parquet" => "PARQUET",
328
+ "datastore" => "DATASTORE_BACKUP",
329
+ "backup" => "DATASTORE_BACKUP",
330
+ "datastore_backup" => "DATASTORE_BACKUP"
331
331
  }[format.to_s.downcase]
332
332
  return val unless val.nil?
333
333
  format
@@ -352,12 +352,12 @@ module Google
352
352
  #
353
353
  # @return [String] API representation of source format.
354
354
  def self.derive_source_format path
355
- return "CSV" if path.end_with? ".csv"
355
+ return "CSV" if path.end_with? ".csv"
356
356
  return "NEWLINE_DELIMITED_JSON" if path.end_with? ".json"
357
- return "AVRO" if path.end_with? ".avro"
358
- return "ORC" if path.end_with? ".orc"
359
- return "PARQUET" if path.end_with? ".parquet"
360
- return "DATASTORE_BACKUP" if path.end_with? ".backup_info"
357
+ return "AVRO" if path.end_with? ".avro"
358
+ return "ORC" if path.end_with? ".orc"
359
+ return "PARQUET" if path.end_with? ".parquet"
360
+ return "DATASTORE_BACKUP" if path.end_with? ".backup_info"
361
361
  nil
362
362
  end
363
363
  end
@@ -160,20 +160,21 @@ module Google
160
160
  # configuration object for setting copy options.
161
161
  def self.from_options service, source, target, options = {}
162
162
  job_ref = service.job_ref_from options[:job_id], options[:prefix]
163
+ copy_cfg = Google::Apis::BigqueryV2::JobConfigurationTableCopy.new(
164
+ source_table: source,
165
+ destination_table: target
166
+ )
163
167
  req = Google::Apis::BigqueryV2::Job.new(
164
168
  job_reference: job_ref,
165
169
  configuration: Google::Apis::BigqueryV2::JobConfiguration.new(
166
- copy: Google::Apis::BigqueryV2::JobConfigurationTableCopy.new(
167
- source_table: source,
168
- destination_table: target
169
- ),
170
+ copy: copy_cfg,
170
171
  dry_run: options[:dryrun]
171
172
  )
172
173
  )
173
174
 
174
175
  updater = CopyJob::Updater.new req
175
176
  updater.create = options[:create]
176
- updater.write = options[:write]
177
+ updater.write = options[:write]
177
178
  updater.labels = options[:labels] if options[:labels]
178
179
  updater
179
180
  end
@@ -226,8 +227,8 @@ module Google
226
227
  #
227
228
  # @!group Attributes
228
229
  def create= new_create
229
- @gapi.configuration.copy.update! create_disposition:
230
- Convert.create_disposition(new_create)
230
+ @gapi.configuration.copy.update! \
231
+ create_disposition: Convert.create_disposition(new_create)
231
232
  end
232
233
 
233
234
  ##
@@ -247,8 +248,8 @@ module Google
247
248
  #
248
249
  # @!group Attributes
249
250
  def write= new_write
250
- @gapi.configuration.copy.update! write_disposition:
251
- Convert.write_disposition(new_write)
251
+ @gapi.configuration.copy.update! \
252
+ write_disposition: Convert.write_disposition(new_write)
252
253
  end
253
254
 
254
255
  ##
@@ -480,7 +480,7 @@ module Google
480
480
  # dataset = bigquery.dataset "my_dataset"
481
481
  #
482
482
  # table = dataset.create_table "my_table" do |t|
483
- # t.name = "My Table",
483
+ # t.name = "My Table"
484
484
  # t.description = "A description of my table."
485
485
  # t.schema do |s|
486
486
  # s.string "first_name", mode: :required
@@ -570,16 +570,18 @@ module Google
570
570
  #
571
571
  def create_view table_id, query, name: nil, description: nil,
572
572
  standard_sql: nil, legacy_sql: nil, udfs: nil
573
+ use_legacy_sql = Convert.resolve_legacy_sql standard_sql, legacy_sql
573
574
  new_view_opts = {
574
575
  table_reference: Google::Apis::BigqueryV2::TableReference.new(
575
- project_id: project_id, dataset_id: dataset_id, table_id: table_id
576
+ project_id: project_id,
577
+ dataset_id: dataset_id,
578
+ table_id: table_id
576
579
  ),
577
- friendly_name: name,
578
- description: description,
579
- view: Google::Apis::BigqueryV2::ViewDefinition.new(
580
- query: query,
581
- use_legacy_sql: Convert.resolve_legacy_sql(standard_sql,
582
- legacy_sql),
580
+ friendly_name: name,
581
+ description: description,
582
+ view: Google::Apis::BigqueryV2::ViewDefinition.new(
583
+ query: query,
584
+ use_legacy_sql: use_legacy_sql,
583
585
  user_defined_function_resources: udfs_gapi(udfs)
584
586
  )
585
587
  }.delete_if { |_, v| v.nil? }
@@ -1951,9 +1953,9 @@ module Google
1951
1953
 
1952
1954
  if autocreate
1953
1955
  begin
1954
- insert_data table_id, rows, skip_invalid: skip_invalid,
1956
+ insert_data table_id, rows, skip_invalid: skip_invalid,
1955
1957
  ignore_unknown: ignore_unknown,
1956
- insert_ids: insert_ids
1958
+ insert_ids: insert_ids
1957
1959
  rescue Google::Cloud::NotFoundError
1958
1960
  sleep rand(1..60)
1959
1961
  begin
@@ -1966,15 +1968,15 @@ module Google
1966
1968
  # rubocop:enable Lint/HandleExceptions
1967
1969
 
1968
1970
  sleep 60
1969
- insert table_id, rows, skip_invalid: skip_invalid,
1971
+ insert table_id, rows, skip_invalid: skip_invalid,
1970
1972
  ignore_unknown: ignore_unknown,
1971
- autocreate: true,
1972
- insert_ids: insert_ids
1973
+ autocreate: true,
1974
+ insert_ids: insert_ids
1973
1975
  end
1974
1976
  else
1975
- insert_data table_id, rows, skip_invalid: skip_invalid,
1977
+ insert_data table_id, rows, skip_invalid: skip_invalid,
1976
1978
  ignore_unknown: ignore_unknown,
1977
- insert_ids: insert_ids
1979
+ insert_ids: insert_ids
1978
1980
  end
1979
1981
  end
1980
1982
 
@@ -2047,9 +2049,9 @@ module Google
2047
2049
  rows = [rows] if rows.is_a? Hash
2048
2050
  raise ArgumentError, "No rows provided" if rows.empty?
2049
2051
  ensure_service!
2050
- options = { skip_invalid: skip_invalid,
2052
+ options = { skip_invalid: skip_invalid,
2051
2053
  ignore_unknown: ignore_unknown,
2052
- insert_ids: insert_ids }
2054
+ insert_ids: insert_ids }
2053
2055
  gapi = service.insert_tabledata dataset_id, table_id, rows, options
2054
2056
  InsertResponse.from_gapi rows, gapi
2055
2057
  end
@@ -2112,11 +2114,11 @@ module Google
2112
2114
  Google::Apis::BigqueryV2::Job.new(
2113
2115
  job_reference: job_ref,
2114
2116
  configuration: Google::Apis::BigqueryV2::JobConfiguration.new(
2115
- load: Google::Apis::BigqueryV2::JobConfigurationLoad.new(
2117
+ load: Google::Apis::BigqueryV2::JobConfigurationLoad.new(
2116
2118
  destination_table: Google::Apis::BigqueryV2::TableReference.new(
2117
2119
  project_id: @service.project,
2118
2120
  dataset_id: dataset_id,
2119
- table_id: table_id
2121
+ table_id: table_id
2120
2122
  )
2121
2123
  ),
2122
2124
  dry_run: dryrun
@@ -2151,12 +2153,12 @@ module Google
2151
2153
  job.encoding = encoding unless encoding.nil?
2152
2154
  job.ignore_unknown = ignore_unknown unless ignore_unknown.nil?
2153
2155
  job.max_bad_records = max_bad_records unless max_bad_records.nil?
2154
- load_job_csv_options! job, jagged_rows: jagged_rows,
2156
+ load_job_csv_options! job, jagged_rows: jagged_rows,
2155
2157
  quoted_newlines: quoted_newlines,
2156
- delimiter: delimiter,
2157
- quote: quote,
2158
- skip_leading: skip_leading,
2159
- null_marker: null_marker
2158
+ delimiter: delimiter,
2159
+ quote: quote,
2160
+ skip_leading: skip_leading,
2161
+ null_marker: null_marker
2160
2162
  end
2161
2163
 
2162
2164
  def load_job_updater table_id, format: nil, create: nil,
@@ -2176,17 +2178,17 @@ module Google
2176
2178
  job.schema = schema unless schema.nil?
2177
2179
  job.autodetect = autodetect unless autodetect.nil?
2178
2180
  job.labels = labels unless labels.nil?
2179
- load_job_file_options! job, format: format,
2181
+ load_job_file_options! job, format: format,
2180
2182
  projection_fields: projection_fields,
2181
- jagged_rows: jagged_rows,
2182
- quoted_newlines: quoted_newlines,
2183
- encoding: encoding,
2184
- delimiter: delimiter,
2185
- ignore_unknown: ignore_unknown,
2186
- max_bad_records: max_bad_records,
2187
- quote: quote,
2188
- skip_leading: skip_leading,
2189
- null_marker: null_marker
2183
+ jagged_rows: jagged_rows,
2184
+ quoted_newlines: quoted_newlines,
2185
+ encoding: encoding,
2186
+ delimiter: delimiter,
2187
+ ignore_unknown: ignore_unknown,
2188
+ max_bad_records: max_bad_records,
2189
+ quote: quote,
2190
+ skip_leading: skip_leading,
2191
+ null_marker: null_marker
2190
2192
  end
2191
2193
  end
2192
2194
 
@@ -184,7 +184,7 @@ module Google
184
184
  configuration: Google::Apis::BigqueryV2::JobConfiguration.new(
185
185
  extract: Google::Apis::BigqueryV2::JobConfigurationExtract.new(
186
186
  destination_uris: Array(storage_urls),
187
- source_table: table
187
+ source_table: table
188
188
  ),
189
189
  dry_run: options[:dryrun]
190
190
  )
@@ -268,8 +268,8 @@ module Google
268
268
  # @!group Attributes
269
269
  #
270
270
  def format= new_format
271
- @gapi.configuration.extract.update! destination_format:
272
- Convert.source_format(new_format)
271
+ @gapi.configuration.extract.update! \
272
+ destination_format: Convert.source_format(new_format)
273
273
  end
274
274
 
275
275
  ##
@@ -408,7 +408,7 @@ module Google
408
408
 
409
409
  Google::Apis::Error.new error["message"],
410
410
  status_code: error_status_code,
411
- body: error_body
411
+ body: error_body
412
412
  end
413
413
 
414
414
  ##
@@ -933,8 +933,8 @@ module Google
933
933
  # @!group Attributes
934
934
  #
935
935
  def format= new_format
936
- @gapi.configuration.load.update! source_format:
937
- Convert.source_format(new_format)
936
+ @gapi.configuration.load.update! \
937
+ source_format: Convert.source_format(new_format)
938
938
  end
939
939
 
940
940
  ##
@@ -954,8 +954,8 @@ module Google
954
954
  # @!group Attributes
955
955
  #
956
956
  def create= new_create
957
- @gapi.configuration.load.update! create_disposition:
958
- Convert.create_disposition(new_create)
957
+ @gapi.configuration.load.update! \
958
+ create_disposition: Convert.create_disposition(new_create)
959
959
  end
960
960
 
961
961
  ##
@@ -976,8 +976,8 @@ module Google
976
976
  # @!group Attributes
977
977
  #
978
978
  def write= new_write
979
- @gapi.configuration.load.update! write_disposition:
980
- Convert.write_disposition(new_write)
979
+ @gapi.configuration.load.update! \
980
+ write_disposition: Convert.write_disposition(new_write)
981
981
  end
982
982
 
983
983
  ##
@@ -998,8 +998,8 @@ module Google
998
998
  if new_fields.nil?
999
999
  @gapi.configuration.load.update! projection_fields: nil
1000
1000
  else
1001
- @gapi.configuration.load.update! projection_fields:
1002
- Array(new_fields)
1001
+ @gapi.configuration.load.update! \
1002
+ projection_fields: Array(new_fields)
1003
1003
  end
1004
1004
  end
1005
1005
 
@@ -266,7 +266,7 @@ module Google
266
266
  job = copy_job source_table,
267
267
  destination_table,
268
268
  create: create,
269
- write: write,
269
+ write: write,
270
270
  &block
271
271
  job.wait_until_done!
272
272
  ensure_job_succeeded! job
@@ -1440,10 +1440,10 @@ module Google
1440
1440
  delimiter: nil, header: nil, &block
1441
1441
  job = extract_job table,
1442
1442
  extract_url,
1443
- format: format,
1443
+ format: format,
1444
1444
  compression: compression,
1445
- delimiter: delimiter,
1446
- header: header,
1445
+ delimiter: delimiter,
1446
+ header: header,
1447
1447
  &block
1448
1448
  job.wait_until_done!
1449
1449
  ensure_job_succeeded! job
@@ -1165,12 +1165,12 @@ module Google
1165
1165
  Google::Apis::BigqueryV2::TableReference.new(
1166
1166
  project_id: tbl.project_id,
1167
1167
  dataset_id: tbl.dataset_id,
1168
- table_id: tbl.table_id
1168
+ table_id: tbl.table_id
1169
1169
  )
1170
1170
  end
1171
1171
 
1172
1172
  def priority_value str
1173
- { "batch" => "BATCH",
1173
+ { "batch" => "BATCH",
1174
1174
  "interactive" => "INTERACTIVE" }[str.to_s.downcase]
1175
1175
  end
1176
1176
 
@@ -1348,7 +1348,7 @@ module Google
1348
1348
  def destination_table_gapi
1349
1349
  Google::Apis::BigqueryV2::Table.new \
1350
1350
  table_reference: @gapi.configuration.query.destination_table,
1351
- schema: destination_schema
1351
+ schema: destination_schema
1352
1352
  end
1353
1353
  end
1354
1354
  end
@@ -461,7 +461,7 @@ module Google
461
461
  raise ArgumentError, "a block is required" unless block_given?
462
462
 
463
463
  nested_field = add_field name, :record, description: description,
464
- mode: mode
464
+ mode: mode
465
465
  yield nested_field
466
466
  nested_field
467
467
  end
@@ -507,11 +507,11 @@ module Google
507
507
  frozen_check!
508
508
 
509
509
  new_gapi = Google::Apis::BigqueryV2::TableFieldSchema.new(
510
- name: String(name),
511
- type: verify_type(type),
510
+ name: String(name),
511
+ type: verify_type(type),
512
512
  description: description,
513
- mode: verify_mode(mode),
514
- fields: []
513
+ mode: verify_mode(mode),
514
+ fields: []
515
515
  )
516
516
 
517
517
  # Remove any existing field of this name
@@ -549,7 +549,7 @@ module Google
549
549
  raise ArgumentError, "a block is required" unless block_given?
550
550
 
551
551
  nested_field = add_field name, :record, description: description,
552
- mode: mode
552
+ mode: mode
553
553
  yield nested_field
554
554
  nested_field
555
555
  end
@@ -602,11 +602,11 @@ module Google
602
602
  frozen_check!
603
603
 
604
604
  new_gapi = Google::Apis::BigqueryV2::TableFieldSchema.new(
605
- name: String(name),
606
- type: verify_type(type),
605
+ name: String(name),
606
+ type: verify_type(type),
607
607
  description: description,
608
- mode: verify_mode(mode),
609
- fields: []
608
+ mode: verify_mode(mode),
609
+ fields: []
610
610
  )
611
611
 
612
612
  # Remove any existing field of this name
@@ -136,7 +136,7 @@ module Google
136
136
  execute backoff: true do
137
137
  service.list_tables @project, dataset_id,
138
138
  max_results: options[:max],
139
- page_token: options[:token]
139
+ page_token: options[:token]
140
140
  end
141
141
  end
142
142
 
@@ -197,9 +197,9 @@ module Google
197
197
  json_txt = service.list_table_data \
198
198
  @project, dataset_id, table_id,
199
199
  max_results: options.delete(:max),
200
- page_token: options.delete(:token),
200
+ page_token: options.delete(:token),
201
201
  start_index: options.delete(:start),
202
- options: { skip_deserialization: true }
202
+ options: { skip_deserialization: true }
203
203
  JSON.parse json_txt, symbolize_names: true
204
204
  end
205
205
  end
@@ -217,14 +217,14 @@ module Google
217
217
  insert_id ||= SecureRandom.uuid
218
218
  {
219
219
  insertId: insert_id,
220
- json: json_row
220
+ json: json_row
221
221
  }
222
222
  end
223
223
 
224
224
  insert_req = {
225
- rows: insert_rows,
225
+ rows: insert_rows,
226
226
  ignoreUnknownValues: options[:ignore_unknown],
227
- skipInvalidRows: options[:skip_invalid]
227
+ skipInvalidRows: options[:skip_invalid]
228
228
  }.to_json
229
229
 
230
230
  # The insertAll with insertId operation is considered idempotent
@@ -287,13 +287,13 @@ module Google
287
287
  def job_query_results job_id, options = {}
288
288
  # The get operation is considered idempotent
289
289
  execute backoff: true do
290
- service.get_job_query_results @project,
291
- job_id,
292
- location: options.delete(:location),
293
- max_results: options.delete(:max),
294
- page_token: options.delete(:token),
295
- start_index: options.delete(:start),
296
- timeout_ms: options.delete(:timeout)
290
+ service.get_job_query_results \
291
+ @project, job_id,
292
+ location: options.delete(:location),
293
+ max_results: options.delete(:max),
294
+ page_token: options.delete(:token),
295
+ start_index: options.delete(:start),
296
+ timeout_ms: options.delete(:timeout)
297
297
  end
298
298
  end
299
299
 
@@ -371,7 +371,7 @@ module Google
371
371
  def list_projects options = {}
372
372
  execute backoff: true do
373
373
  service.list_projects max_results: options[:max],
374
- page_token: options[:token]
374
+ page_token: options[:token]
375
375
  end
376
376
  end
377
377
 
@@ -383,7 +383,7 @@ module Google
383
383
  job_id ||= "#{prefix}#{generate_id}"
384
384
  job_ref = API::JobReference.new(
385
385
  project_id: @project,
386
- job_id: job_id
386
+ job_id: job_id
387
387
  )
388
388
  # BigQuery does not allow nil location, but missing is ok.
389
389
  job_ref.location = location if location
@@ -1071,10 +1071,10 @@ module Google
1071
1071
  # @!group Lifecycle
1072
1072
  #
1073
1073
  def set_query query, standard_sql: nil, legacy_sql: nil, udfs: nil
1074
+ use_legacy_sql = Convert.resolve_legacy_sql standard_sql, legacy_sql
1074
1075
  @gapi.view = Google::Apis::BigqueryV2::ViewDefinition.new \
1075
- query: query,
1076
- use_legacy_sql: Convert.resolve_legacy_sql(standard_sql,
1077
- legacy_sql),
1076
+ query: query,
1077
+ use_legacy_sql: use_legacy_sql,
1078
1078
  user_defined_function_resources: udfs_gapi(udfs)
1079
1079
  patch_gapi! :view
1080
1080
  end
@@ -1526,10 +1526,10 @@ module Google
1526
1526
  def extract extract_url, format: nil, compression: nil, delimiter: nil,
1527
1527
  header: nil, &block
1528
1528
  job = extract_job extract_url,
1529
- format: format,
1529
+ format: format,
1530
1530
  compression: compression,
1531
- delimiter: delimiter,
1532
- header: header,
1531
+ delimiter: delimiter,
1532
+ header: header,
1533
1533
  &block
1534
1534
  job.wait_until_done!
1535
1535
  ensure_job_succeeded! job
@@ -1998,9 +1998,9 @@ module Google
1998
1998
  rows = [rows] if rows.is_a? Hash
1999
1999
  raise ArgumentError, "No rows provided" if rows.empty?
2000
2000
  ensure_service!
2001
- options = { skip_invalid: skip_invalid,
2001
+ options = { skip_invalid: skip_invalid,
2002
2002
  ignore_unknown: ignore_unknown,
2003
- insert_ids: insert_ids }
2003
+ insert_ids: insert_ids }
2004
2004
  gapi = service.insert_tabledata dataset_id, table_id, rows, options
2005
2005
  InsertResponse.from_gapi rows, gapi
2006
2006
  end
@@ -2257,7 +2257,7 @@ module Google
2257
2257
  reference_gapi = Google::Apis::BigqueryV2::TableReference.new(
2258
2258
  project_id: project_id,
2259
2259
  dataset_id: dataset_id,
2260
- table_id: table_id
2260
+ table_id: table_id
2261
2261
  )
2262
2262
  b.service = service
2263
2263
  b.instance_variable_set :@reference, reference_gapi
@@ -2330,11 +2330,11 @@ module Google
2330
2330
  Google::Apis::BigqueryV2::Job.new(
2331
2331
  job_reference: job_ref,
2332
2332
  configuration: Google::Apis::BigqueryV2::JobConfiguration.new(
2333
- load: Google::Apis::BigqueryV2::JobConfigurationLoad.new(
2333
+ load: Google::Apis::BigqueryV2::JobConfigurationLoad.new(
2334
2334
  destination_table: Google::Apis::BigqueryV2::TableReference.new(
2335
2335
  project_id: @service.project,
2336
2336
  dataset_id: dataset_id,
2337
- table_id: table_id
2337
+ table_id: table_id
2338
2338
  )
2339
2339
  ),
2340
2340
  dry_run: dryrun
@@ -2369,12 +2369,12 @@ module Google
2369
2369
  job.encoding = encoding unless encoding.nil?
2370
2370
  job.ignore_unknown = ignore_unknown unless ignore_unknown.nil?
2371
2371
  job.max_bad_records = max_bad_records unless max_bad_records.nil?
2372
- load_job_csv_options! job, jagged_rows: jagged_rows,
2372
+ load_job_csv_options! job, jagged_rows: jagged_rows,
2373
2373
  quoted_newlines: quoted_newlines,
2374
- delimiter: delimiter,
2375
- quote: quote,
2376
- skip_leading: skip_leading,
2377
- null_marker: null_marker
2374
+ delimiter: delimiter,
2375
+ quote: quote,
2376
+ skip_leading: skip_leading,
2377
+ null_marker: null_marker
2378
2378
  end
2379
2379
 
2380
2380
  def load_job_updater format: nil, create: nil,
@@ -2394,17 +2394,17 @@ module Google
2394
2394
  job.schema = schema unless schema.nil?
2395
2395
  job.autodetect = autodetect unless autodetect.nil?
2396
2396
  job.labels = labels unless labels.nil?
2397
- load_job_file_options! job, format: format,
2397
+ load_job_file_options! job, format: format,
2398
2398
  projection_fields: projection_fields,
2399
- jagged_rows: jagged_rows,
2400
- quoted_newlines: quoted_newlines,
2401
- encoding: encoding,
2402
- delimiter: delimiter,
2403
- ignore_unknown: ignore_unknown,
2404
- max_bad_records: max_bad_records,
2405
- quote: quote,
2406
- skip_leading: skip_leading,
2407
- null_marker: null_marker
2399
+ jagged_rows: jagged_rows,
2400
+ quoted_newlines: quoted_newlines,
2401
+ encoding: encoding,
2402
+ delimiter: delimiter,
2403
+ ignore_unknown: ignore_unknown,
2404
+ max_bad_records: max_bad_records,
2405
+ quote: quote,
2406
+ skip_leading: skip_leading,
2407
+ null_marker: null_marker
2408
2408
  end
2409
2409
  end
2410
2410
 
@@ -2590,7 +2590,7 @@ module Google
2590
2590
  # bigquery = Google::Cloud::Bigquery.new
2591
2591
  # dataset = bigquery.dataset "my_dataset"
2592
2592
  # table = dataset.create_table "my_table" do |t|
2593
- # t.name = "My Table",
2593
+ # t.name = "My Table"
2594
2594
  # t.description = "A description of my table."
2595
2595
  # t.schema do |s|
2596
2596
  # s.string "first_name", mode: :required
@@ -2607,7 +2607,7 @@ module Google
2607
2607
  # bigquery = Google::Cloud::Bigquery.new
2608
2608
  # dataset = bigquery.dataset "my_dataset"
2609
2609
  # table = dataset.create_table "my_table" do |t|
2610
- # t.name = "My Table",
2610
+ # t.name = "My Table"
2611
2611
  # t.description = "A description of my table."
2612
2612
  # t.schema do |s|
2613
2613
  # s.load File.open("schema.json")
@@ -123,7 +123,7 @@ module Google
123
123
  push_batch_request!
124
124
 
125
125
  @batch = Batch.new max_bytes: @max_bytes,
126
- max_rows: @max_rows
126
+ max_rows: @max_rows
127
127
  @batch.insert row, insert_id
128
128
  end
129
129
  end
@@ -250,9 +250,9 @@ module Google
250
250
  Concurrent::Future.new(executor: @thread_pool) do
251
251
  begin
252
252
  raise ArgumentError, "No rows provided" if json_rows.empty?
253
- options = { skip_invalid: @skip_invalid,
253
+ options = { skip_invalid: @skip_invalid,
254
254
  ignore_unknown: @ignore_unknown,
255
- insert_ids: insert_ids }
255
+ insert_ids: insert_ids }
256
256
  insert_resp = @table.service.insert_tabledata_json_rows(
257
257
  @table.dataset_id, @table.table_id, json_rows, options
258
258
  )
@@ -16,7 +16,7 @@
16
16
  module Google
17
17
  module Cloud
18
18
  module Bigquery
19
- VERSION = "1.10.0".freeze
19
+ VERSION = "1.11.0".freeze
20
20
  end
21
21
  end
22
22
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: google-cloud-bigquery
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.10.0
4
+ version: 1.11.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Mike Moore
@@ -9,7 +9,7 @@ authors:
9
9
  autorequire:
10
10
  bindir: bin
11
11
  cert_chain: []
12
- date: 2018-12-07 00:00:00.000000000 Z
12
+ date: 2019-02-01 00:00:00.000000000 Z
13
13
  dependencies:
14
14
  - !ruby/object:Gem::Dependency
15
15
  name: google-cloud-core
@@ -43,16 +43,22 @@ dependencies:
43
43
  name: googleauth
44
44
  requirement: !ruby/object:Gem::Requirement
45
45
  requirements:
46
- - - "~>"
46
+ - - ">="
47
47
  - !ruby/object:Gem::Version
48
48
  version: 0.6.2
49
+ - - "<"
50
+ - !ruby/object:Gem::Version
51
+ version: 0.10.0
49
52
  type: :runtime
50
53
  prerelease: false
51
54
  version_requirements: !ruby/object:Gem::Requirement
52
55
  requirements:
53
- - - "~>"
56
+ - - ">="
54
57
  - !ruby/object:Gem::Version
55
58
  version: 0.6.2
59
+ - - "<"
60
+ - !ruby/object:Gem::Version
61
+ version: 0.10.0
56
62
  - !ruby/object:Gem::Dependency
57
63
  name: concurrent-ruby
58
64
  requirement: !ruby/object:Gem::Requirement
@@ -157,14 +163,14 @@ dependencies:
157
163
  requirements:
158
164
  - - "~>"
159
165
  - !ruby/object:Gem::Version
160
- version: 0.59.2
166
+ version: 0.61.0
161
167
  type: :development
162
168
  prerelease: false
163
169
  version_requirements: !ruby/object:Gem::Requirement
164
170
  requirements:
165
171
  - - "~>"
166
172
  - !ruby/object:Gem::Version
167
- version: 0.59.2
173
+ version: 0.61.0
168
174
  - !ruby/object:Gem::Dependency
169
175
  name: simplecov
170
176
  requirement: !ruby/object:Gem::Requirement
@@ -271,7 +277,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
271
277
  version: '0'
272
278
  requirements: []
273
279
  rubyforge_project:
274
- rubygems_version: 2.7.7
280
+ rubygems_version: 2.7.6
275
281
  signing_key:
276
282
  specification_version: 4
277
283
  summary: API Client library for Google BigQuery