google-cloud-bigquery 1.16.0 → 1.17.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (33) hide show
  1. checksums.yaml +4 -4
  2. data/AUTHENTICATION.md +7 -27
  3. data/CHANGELOG.md +8 -0
  4. data/CONTRIBUTING.md +1 -1
  5. data/OVERVIEW.md +5 -5
  6. data/lib/google-cloud-bigquery.rb +5 -11
  7. data/lib/google/cloud/bigquery.rb +3 -5
  8. data/lib/google/cloud/bigquery/convert.rb +23 -46
  9. data/lib/google/cloud/bigquery/copy_job.rb +6 -16
  10. data/lib/google/cloud/bigquery/credentials.rb +5 -12
  11. data/lib/google/cloud/bigquery/data.rb +10 -16
  12. data/lib/google/cloud/bigquery/dataset.rb +58 -118
  13. data/lib/google/cloud/bigquery/dataset/access.rb +5 -13
  14. data/lib/google/cloud/bigquery/dataset/list.rb +4 -9
  15. data/lib/google/cloud/bigquery/external.rb +14 -35
  16. data/lib/google/cloud/bigquery/extract_job.rb +2 -5
  17. data/lib/google/cloud/bigquery/insert_response.rb +1 -3
  18. data/lib/google/cloud/bigquery/job.rb +5 -9
  19. data/lib/google/cloud/bigquery/job/list.rb +3 -7
  20. data/lib/google/cloud/bigquery/load_job.rb +18 -33
  21. data/lib/google/cloud/bigquery/model.rb +1 -4
  22. data/lib/google/cloud/bigquery/model/list.rb +3 -7
  23. data/lib/google/cloud/bigquery/project.rb +27 -56
  24. data/lib/google/cloud/bigquery/project/list.rb +3 -7
  25. data/lib/google/cloud/bigquery/query_job.rb +40 -79
  26. data/lib/google/cloud/bigquery/schema.rb +3 -8
  27. data/lib/google/cloud/bigquery/schema/field.rb +6 -11
  28. data/lib/google/cloud/bigquery/service.rb +26 -58
  29. data/lib/google/cloud/bigquery/table.rb +58 -112
  30. data/lib/google/cloud/bigquery/table/async_inserter.rb +10 -18
  31. data/lib/google/cloud/bigquery/table/list.rb +3 -7
  32. data/lib/google/cloud/bigquery/version.rb +1 -1
  33. metadata +36 -42
@@ -209,8 +209,7 @@ module Google
209
209
  #
210
210
  def time_partitioning_type= type
211
211
  reload! unless resource_full?
212
- @gapi.time_partitioning ||= \
213
- Google::Apis::BigqueryV2::TimePartitioning.new
212
+ @gapi.time_partitioning ||= Google::Apis::BigqueryV2::TimePartitioning.new
214
213
  @gapi.time_partitioning.type = type
215
214
  patch_gapi! :time_partitioning
216
215
  end
@@ -267,8 +266,7 @@ module Google
267
266
  #
268
267
  def time_partitioning_field= field
269
268
  reload! unless resource_full?
270
- @gapi.time_partitioning ||= \
271
- Google::Apis::BigqueryV2::TimePartitioning.new
269
+ @gapi.time_partitioning ||= Google::Apis::BigqueryV2::TimePartitioning.new
272
270
  @gapi.time_partitioning.field = field
273
271
  patch_gapi! :time_partitioning
274
272
  end
@@ -286,9 +284,9 @@ module Google
286
284
  def time_partitioning_expiration
287
285
  return nil if reference?
288
286
  ensure_full_data!
289
- @gapi.time_partitioning.expiration_ms / 1_000 if
290
- time_partitioning? &&
291
- !@gapi.time_partitioning.expiration_ms.nil?
287
+ return nil unless time_partitioning?
288
+ return nil if @gapi.time_partitioning.expiration_ms.nil?
289
+ @gapi.time_partitioning.expiration_ms / 1_000
292
290
  end
293
291
 
294
292
  ##
@@ -319,8 +317,7 @@ module Google
319
317
  #
320
318
  def time_partitioning_expiration= expiration
321
319
  reload! unless resource_full?
322
- @gapi.time_partitioning ||= \
323
- Google::Apis::BigqueryV2::TimePartitioning.new
320
+ @gapi.time_partitioning ||= Google::Apis::BigqueryV2::TimePartitioning.new
324
321
  @gapi.time_partitioning.expiration_ms = expiration * 1000
325
322
  patch_gapi! :time_partitioning
326
323
  end
@@ -891,8 +888,7 @@ module Google
891
888
  return nil if reference?
892
889
  ensure_full_data!
893
890
  return nil if @gapi.encryption_configuration.nil?
894
- EncryptionConfiguration.from_gapi(@gapi.encryption_configuration)
895
- .freeze
891
+ EncryptionConfiguration.from_gapi(@gapi.encryption_configuration).freeze
896
892
  end
897
893
 
898
894
  ##
@@ -985,7 +981,7 @@ module Google
985
981
  def buffer_bytes
986
982
  return nil if reference?
987
983
  ensure_full_data!
988
- @gapi.streaming_buffer.estimated_bytes if @gapi.streaming_buffer
984
+ @gapi.streaming_buffer&.estimated_bytes
989
985
  end
990
986
 
991
987
  ##
@@ -1003,7 +999,7 @@ module Google
1003
999
  def buffer_rows
1004
1000
  return nil if reference?
1005
1001
  ensure_full_data!
1006
- @gapi.streaming_buffer.estimated_rows if @gapi.streaming_buffer
1002
+ @gapi.streaming_buffer&.estimated_rows
1007
1003
  end
1008
1004
 
1009
1005
  ##
@@ -1032,7 +1028,7 @@ module Google
1032
1028
  # @!group Attributes
1033
1029
  #
1034
1030
  def query
1035
- @gapi.view.query if @gapi.view
1031
+ @gapi.view&.query
1036
1032
  end
1037
1033
 
1038
1034
  ##
@@ -1102,10 +1098,11 @@ module Google
1102
1098
  #
1103
1099
  def set_query query, standard_sql: nil, legacy_sql: nil, udfs: nil
1104
1100
  use_legacy_sql = Convert.resolve_legacy_sql standard_sql, legacy_sql
1105
- @gapi.view = Google::Apis::BigqueryV2::ViewDefinition.new \
1101
+ @gapi.view = Google::Apis::BigqueryV2::ViewDefinition.new(
1106
1102
  query: query,
1107
1103
  use_legacy_sql: use_legacy_sql,
1108
1104
  user_defined_function_resources: udfs_gapi(udfs)
1105
+ )
1109
1106
  patch_gapi! :view
1110
1107
  end
1111
1108
 
@@ -1201,8 +1198,7 @@ module Google
1201
1198
  ensure_service!
1202
1199
  reload! unless resource_full?
1203
1200
  options = { token: token, max: max, start: start }
1204
- data_json = service.list_tabledata \
1205
- dataset_id, table_id, options
1201
+ data_json = service.list_tabledata dataset_id, table_id, options
1206
1202
  Data.from_gapi_json data_json, gapi, nil, service
1207
1203
  end
1208
1204
 
@@ -1301,11 +1297,9 @@ module Google
1301
1297
  #
1302
1298
  # @!group Data
1303
1299
  #
1304
- def copy_job destination_table, create: nil, write: nil, job_id: nil,
1305
- prefix: nil, labels: nil, dryrun: nil
1300
+ def copy_job destination_table, create: nil, write: nil, job_id: nil, prefix: nil, labels: nil, dryrun: nil
1306
1301
  ensure_service!
1307
- options = { create: create, write: write, dryrun: dryrun,
1308
- labels: labels, job_id: job_id, prefix: prefix }
1302
+ options = { create: create, write: write, dryrun: dryrun, labels: labels, job_id: job_id, prefix: prefix }
1309
1303
  updater = CopyJob::Updater.from_options(
1310
1304
  service,
1311
1305
  table_ref,
@@ -1474,15 +1468,12 @@ module Google
1474
1468
  #
1475
1469
  # @!group Data
1476
1470
  #
1477
- def extract_job extract_url, format: nil, compression: nil,
1478
- delimiter: nil, header: nil, job_id: nil, prefix: nil,
1479
- labels: nil, dryrun: nil
1471
+ def extract_job extract_url, format: nil, compression: nil, delimiter: nil, header: nil, job_id: nil,
1472
+ prefix: nil, labels: nil, dryrun: nil
1480
1473
  ensure_service!
1481
- options = { format: format, compression: compression,
1482
- delimiter: delimiter, header: header, dryrun: dryrun,
1474
+ options = { format: format, compression: compression, delimiter: delimiter, header: header, dryrun: dryrun,
1483
1475
  job_id: job_id, prefix: prefix, labels: labels }
1484
- updater = ExtractJob::Updater.from_options service, table_ref,
1485
- extract_url, options
1476
+ updater = ExtractJob::Updater.from_options service, table_ref, extract_url, options
1486
1477
  updater.location = location if location # may be table reference
1487
1478
 
1488
1479
  yield updater if block_given?
@@ -1553,8 +1544,7 @@ module Google
1553
1544
  #
1554
1545
  # @!group Data
1555
1546
  #
1556
- def extract extract_url, format: nil, compression: nil, delimiter: nil,
1557
- header: nil, &block
1547
+ def extract extract_url, format: nil, compression: nil, delimiter: nil, header: nil, &block
1558
1548
  job = extract_job extract_url,
1559
1549
  format: format,
1560
1550
  compression: compression,
@@ -1750,28 +1740,18 @@ module Google
1750
1740
  #
1751
1741
  # @!group Data
1752
1742
  #
1753
- def load_job files, format: nil, create: nil, write: nil,
1754
- projection_fields: nil, jagged_rows: nil,
1755
- quoted_newlines: nil, encoding: nil, delimiter: nil,
1756
- ignore_unknown: nil, max_bad_records: nil, quote: nil,
1757
- skip_leading: nil, job_id: nil, prefix: nil, labels: nil,
1758
- autodetect: nil, null_marker: nil, dryrun: nil
1743
+ def load_job files, format: nil, create: nil, write: nil, projection_fields: nil, jagged_rows: nil,
1744
+ quoted_newlines: nil, encoding: nil, delimiter: nil, ignore_unknown: nil, max_bad_records: nil,
1745
+ quote: nil, skip_leading: nil, job_id: nil, prefix: nil, labels: nil, autodetect: nil,
1746
+ null_marker: nil, dryrun: nil
1759
1747
  ensure_service!
1760
1748
 
1761
- updater = load_job_updater format: format, create: create,
1762
- write: write,
1763
- projection_fields: projection_fields,
1764
- jagged_rows: jagged_rows,
1765
- quoted_newlines: quoted_newlines,
1766
- encoding: encoding,
1767
- delimiter: delimiter,
1768
- ignore_unknown: ignore_unknown,
1769
- max_bad_records: max_bad_records,
1770
- quote: quote, skip_leading: skip_leading,
1771
- dryrun: dryrun, job_id: job_id,
1772
- prefix: prefix, schema: schema,
1773
- labels: labels, autodetect: autodetect,
1774
- null_marker: null_marker
1749
+ updater = load_job_updater format: format, create: create, write: write, projection_fields: projection_fields,
1750
+ jagged_rows: jagged_rows, quoted_newlines: quoted_newlines, encoding: encoding,
1751
+ delimiter: delimiter, ignore_unknown: ignore_unknown,
1752
+ max_bad_records: max_bad_records, quote: quote, skip_leading: skip_leading,
1753
+ dryrun: dryrun, job_id: job_id, prefix: prefix, schema: schema, labels: labels,
1754
+ autodetect: autodetect, null_marker: null_marker
1775
1755
 
1776
1756
  yield updater if block_given?
1777
1757
 
@@ -1945,20 +1925,13 @@ module Google
1945
1925
  #
1946
1926
  # @!group Data
1947
1927
  #
1948
- def load files, format: nil, create: nil, write: nil,
1949
- projection_fields: nil, jagged_rows: nil, quoted_newlines: nil,
1950
- encoding: nil, delimiter: nil, ignore_unknown: nil,
1951
- max_bad_records: nil, quote: nil, skip_leading: nil,
1952
- autodetect: nil, null_marker: nil, &block
1953
- job = load_job files, format: format, create: create, write: write,
1954
- projection_fields: projection_fields,
1955
- jagged_rows: jagged_rows,
1956
- quoted_newlines: quoted_newlines,
1957
- encoding: encoding, delimiter: delimiter,
1958
- ignore_unknown: ignore_unknown,
1959
- max_bad_records: max_bad_records,
1960
- quote: quote, skip_leading: skip_leading,
1961
- autodetect: autodetect,
1928
+ def load files, format: nil, create: nil, write: nil, projection_fields: nil, jagged_rows: nil,
1929
+ quoted_newlines: nil, encoding: nil, delimiter: nil, ignore_unknown: nil, max_bad_records: nil,
1930
+ quote: nil, skip_leading: nil, autodetect: nil, null_marker: nil, &block
1931
+ job = load_job files, format: format, create: create, write: write, projection_fields: projection_fields,
1932
+ jagged_rows: jagged_rows, quoted_newlines: quoted_newlines, encoding: encoding,
1933
+ delimiter: delimiter, ignore_unknown: ignore_unknown, max_bad_records: max_bad_records,
1934
+ quote: quote, skip_leading: skip_leading, autodetect: autodetect,
1962
1935
  null_marker: null_marker, &block
1963
1936
 
1964
1937
  job.wait_until_done!
@@ -2031,15 +2004,13 @@ module Google
2031
2004
  def insert rows, insert_ids: nil, skip_invalid: nil, ignore_unknown: nil
2032
2005
  rows = [rows] if rows.is_a? Hash
2033
2006
  insert_ids = Array insert_ids
2034
- if insert_ids.count > 0 && insert_ids.count != rows.count
2007
+ if insert_ids.count.positive? && insert_ids.count != rows.count
2035
2008
  raise ArgumentError, "insert_ids must be the same size as rows"
2036
2009
  end
2037
2010
  rows = [rows] if rows.is_a? Hash
2038
2011
  raise ArgumentError, "No rows provided" if rows.empty?
2039
2012
  ensure_service!
2040
- options = { skip_invalid: skip_invalid,
2041
- ignore_unknown: ignore_unknown,
2042
- insert_ids: insert_ids }
2013
+ options = { skip_invalid: skip_invalid, ignore_unknown: ignore_unknown, insert_ids: insert_ids }
2043
2014
  gapi = service.insert_tabledata dataset_id, table_id, rows, options
2044
2015
  InsertResponse.from_gapi rows, gapi
2045
2016
  end
@@ -2091,15 +2062,12 @@ module Google
2091
2062
  #
2092
2063
  # inserter.stop.wait!
2093
2064
  #
2094
- def insert_async skip_invalid: nil, ignore_unknown: nil,
2095
- max_bytes: 10000000, max_rows: 500, interval: 10,
2065
+ def insert_async skip_invalid: nil, ignore_unknown: nil, max_bytes: 10_000_000, max_rows: 500, interval: 10,
2096
2066
  threads: 4, &block
2097
2067
  ensure_service!
2098
2068
 
2099
- AsyncInserter.new self, skip_invalid: skip_invalid,
2100
- ignore_unknown: ignore_unknown,
2101
- max_bytes: max_bytes, max_rows: max_rows,
2102
- interval: interval, threads: threads, &block
2069
+ AsyncInserter.new self, skip_invalid: skip_invalid, ignore_unknown: ignore_unknown, max_bytes: max_bytes,
2070
+ max_rows: max_rows, interval: interval, threads: threads, &block
2103
2071
  end
2104
2072
 
2105
2073
  ##
@@ -2341,9 +2309,7 @@ module Google
2341
2309
  def patch_gapi! *attributes
2342
2310
  return if attributes.empty?
2343
2311
  ensure_service!
2344
- patch_args = Hash[attributes.map do |attr|
2345
- [attr, @gapi.send(attr)]
2346
- end]
2312
+ patch_args = Hash[attributes.map { |attr| [attr, @gapi.send(attr)] }]
2347
2313
  patch_gapi = Google::Apis::BigqueryV2::Table.new patch_args
2348
2314
  patch_gapi.etag = etag if etag
2349
2315
  @gapi = service.patch_table dataset_id, table_id, patch_gapi
@@ -2381,11 +2347,8 @@ module Google
2381
2347
  )
2382
2348
  end
2383
2349
 
2384
- def load_job_csv_options! job, jagged_rows: nil,
2385
- quoted_newlines: nil,
2386
- delimiter: nil,
2387
- quote: nil, skip_leading: nil,
2388
- null_marker: nil
2350
+ def load_job_csv_options! job, jagged_rows: nil, quoted_newlines: nil, delimiter: nil, quote: nil,
2351
+ skip_leading: nil, null_marker: nil
2389
2352
  job.jagged_rows = jagged_rows unless jagged_rows.nil?
2390
2353
  job.quoted_newlines = quoted_newlines unless quoted_newlines.nil?
2391
2354
  job.delimiter = delimiter unless delimiter.nil?
@@ -2394,17 +2357,11 @@ module Google
2394
2357
  job.skip_leading = skip_leading unless skip_leading.nil?
2395
2358
  end
2396
2359
 
2397
- def load_job_file_options! job, format: nil,
2398
- projection_fields: nil,
2399
- jagged_rows: nil, quoted_newlines: nil,
2400
- encoding: nil, delimiter: nil,
2401
- ignore_unknown: nil, max_bad_records: nil,
2402
- quote: nil, skip_leading: nil,
2403
- null_marker: nil
2360
+ def load_job_file_options! job, format: nil, projection_fields: nil, jagged_rows: nil, quoted_newlines: nil,
2361
+ encoding: nil, delimiter: nil, ignore_unknown: nil, max_bad_records: nil, quote: nil,
2362
+ skip_leading: nil, null_marker: nil
2404
2363
  job.format = format unless format.nil?
2405
- unless projection_fields.nil?
2406
- job.projection_fields = projection_fields
2407
- end
2364
+ job.projection_fields = projection_fields unless projection_fields.nil?
2408
2365
  job.encoding = encoding unless encoding.nil?
2409
2366
  job.ignore_unknown = ignore_unknown unless ignore_unknown.nil?
2410
2367
  job.max_bad_records = max_bad_records unless max_bad_records.nil?
@@ -2416,16 +2373,11 @@ module Google
2416
2373
  null_marker: null_marker
2417
2374
  end
2418
2375
 
2419
- def load_job_updater format: nil, create: nil,
2420
- write: nil, projection_fields: nil,
2421
- jagged_rows: nil, quoted_newlines: nil,
2422
- encoding: nil, delimiter: nil,
2423
- ignore_unknown: nil, max_bad_records: nil,
2424
- quote: nil, skip_leading: nil, dryrun: nil,
2425
- schema: nil, job_id: nil, prefix: nil, labels: nil,
2426
- autodetect: nil, null_marker: nil
2427
- new_job = load_job_gapi table_id, dryrun, job_id: job_id,
2428
- prefix: prefix
2376
+ def load_job_updater format: nil, create: nil, write: nil, projection_fields: nil, jagged_rows: nil,
2377
+ quoted_newlines: nil, encoding: nil, delimiter: nil, ignore_unknown: nil,
2378
+ max_bad_records: nil, quote: nil, skip_leading: nil, dryrun: nil, schema: nil, job_id: nil,
2379
+ prefix: nil, labels: nil, autodetect: nil, null_marker: nil
2380
+ new_job = load_job_gapi table_id, dryrun, job_id: job_id, prefix: prefix
2429
2381
  LoadJob::Updater.new(new_job).tap do |job|
2430
2382
  job.location = location if location # may be table reference
2431
2383
  job.create = create unless create.nil?
@@ -2463,9 +2415,7 @@ module Google
2463
2415
  job_gapi.configuration.load.update! source_uris: urls
2464
2416
  if job_gapi.configuration.load.source_format.nil?
2465
2417
  source_format = Convert.derive_source_format_from_list urls
2466
- unless source_format.nil?
2467
- job_gapi.configuration.load.source_format = source_format
2468
- end
2418
+ job_gapi.configuration.load.source_format = source_format unless source_format.nil?
2469
2419
  end
2470
2420
  end
2471
2421
 
@@ -2477,9 +2427,7 @@ module Google
2477
2427
  path = Pathname(file).to_path
2478
2428
  if job_gapi.configuration.load.source_format.nil?
2479
2429
  source_format = Convert.derive_source_format path
2480
- unless source_format.nil?
2481
- job_gapi.configuration.load.source_format = source_format
2482
- end
2430
+ job_gapi.configuration.load.source_format = source_format unless source_format.nil?
2483
2431
  end
2484
2432
 
2485
2433
  gapi = service.load_table_file file, job_gapi
@@ -2499,10 +2447,8 @@ module Google
2499
2447
  def storage_url? files
2500
2448
  [files].flatten.all? do |file|
2501
2449
  file.respond_to?(:to_gs_url) ||
2502
- (file.respond_to?(:to_str) &&
2503
- file.to_str.downcase.start_with?("gs://")) ||
2504
- (file.is_a?(URI) &&
2505
- file.to_s.downcase.start_with?("gs://"))
2450
+ (file.respond_to?(:to_str) && file.to_str.downcase.start_with?("gs://")) ||
2451
+ (file.is_a?(URI) && file.to_s.downcase.start_with?("gs://"))
2506
2452
  end
2507
2453
  end
2508
2454
 
@@ -71,9 +71,8 @@ module Google
71
71
 
72
72
  ##
73
73
  # @private
74
- def initialize table, skip_invalid: nil, ignore_unknown: nil,
75
- max_bytes: 10000000, max_rows: 500, interval: 10,
76
- threads: 4, &block
74
+ def initialize table, skip_invalid: nil, ignore_unknown: nil, max_bytes: 10_000_000, max_rows: 500,
75
+ interval: 10, threads: 4, &block
77
76
  @table = table
78
77
  @skip_invalid = skip_invalid
79
78
  @ignore_unknown = ignore_unknown
@@ -86,8 +85,7 @@ module Google
86
85
 
87
86
  @batch = nil
88
87
 
89
- @thread_pool = Concurrent::ThreadPoolExecutor.new \
90
- max_threads: @threads
88
+ @thread_pool = Concurrent::ThreadPoolExecutor.new max_threads: @threads
91
89
 
92
90
  @cond = new_cond
93
91
 
@@ -136,8 +134,7 @@ module Google
136
134
  unless @batch.try_insert row, insert_id
137
135
  push_batch_request!
138
136
 
139
- @batch = Batch.new max_bytes: @max_bytes,
140
- max_rows: @max_rows
137
+ @batch = Batch.new max_bytes: @max_bytes, max_rows: @max_rows
141
138
  @batch.insert row, insert_id
142
139
  end
143
140
  end
@@ -228,7 +225,7 @@ module Google
228
225
  def validate_insert_args rows, insert_ids
229
226
  rows = [rows] if rows.is_a? Hash
230
227
  insert_ids = Array insert_ids
231
- if insert_ids.count > 0 && insert_ids.count != rows.count
228
+ if insert_ids.count.positive? && insert_ids.count != rows.count
232
229
  raise ArgumentError, "insert_ids must be the same size as rows"
233
230
  end
234
231
  [rows, insert_ids]
@@ -265,19 +262,15 @@ module Google
265
262
  Concurrent::Future.new executor: @thread_pool do
266
263
  begin
267
264
  raise ArgumentError, "No rows provided" if json_rows.empty?
268
- options = { skip_invalid: @skip_invalid,
269
- ignore_unknown: @ignore_unknown,
270
- insert_ids: insert_ids }
265
+ options = { skip_invalid: @skip_invalid, ignore_unknown: @ignore_unknown, insert_ids: insert_ids }
271
266
  insert_resp = @table.service.insert_tabledata_json_rows(
272
267
  @table.dataset_id, @table.table_id, json_rows, options
273
268
  )
274
- result = Result.new(
275
- InsertResponse.from_gapi(orig_rows, insert_resp)
276
- )
269
+ result = Result.new InsertResponse.from_gapi(orig_rows, insert_resp)
277
270
  rescue StandardError => e
278
271
  result = Result.new nil, e
279
272
  ensure
280
- @callback.call result if @callback
273
+ @callback&.call result
281
274
  end
282
275
  end.execute
283
276
 
@@ -290,7 +283,7 @@ module Google
290
283
  class Batch
291
284
  attr_reader :max_bytes, :max_rows, :rows, :json_rows, :insert_ids
292
285
 
293
- def initialize max_bytes: 10000000, max_rows: 500
286
+ def initialize max_bytes: 10_000_000, max_rows: 500
294
287
  @max_bytes = max_bytes
295
288
  @max_rows = max_rows
296
289
  @rows = []
@@ -306,8 +299,7 @@ module Google
306
299
  insert_id ||= SecureRandom.uuid
307
300
  json_row = to_json_row row
308
301
 
309
- insert_rows_bytes \
310
- row, json_row, insert_id, addl_bytes_for(json_row, insert_id)
302
+ insert_rows_bytes row, json_row, insert_id, addl_bytes_for(json_row, insert_id)
311
303
  end
312
304
 
313
305
  def try_insert row, insert_id
@@ -133,15 +133,13 @@ module Google
133
133
  #
134
134
  def all request_limit: nil
135
135
  request_limit = request_limit.to_i if request_limit
136
- unless block_given?
137
- return enum_for :all, request_limit: request_limit
138
- end
136
+ return enum_for :all, request_limit: request_limit unless block_given?
139
137
  results = self
140
138
  loop do
141
139
  results.each { |r| yield r }
142
140
  if request_limit
143
141
  request_limit -= 1
144
- break if request_limit < 0
142
+ break if request_limit.negative?
145
143
  end
146
144
  break unless results.next?
147
145
  results = results.next
@@ -151,9 +149,7 @@ module Google
151
149
  ##
152
150
  # @private New Table::List from a response object.
153
151
  def self.from_gapi gapi_list, service, dataset_id = nil, max = nil
154
- tables = List.new(Array(gapi_list.tables).map do |gapi_object|
155
- Table.from_gapi gapi_object, service
156
- end)
152
+ tables = List.new(Array(gapi_list.tables).map { |gapi_object| Table.from_gapi gapi_object, service })
157
153
  tables.instance_variable_set :@token, gapi_list.next_page_token
158
154
  tables.instance_variable_set :@etag, gapi_list.etag
159
155
  tables.instance_variable_set :@total, gapi_list.total_items