google-cloud-bigquery 1.29.0 → 1.30.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +15 -0
- data/CONTRIBUTING.md +2 -3
- data/OVERVIEW.md +15 -14
- data/lib/google/cloud/bigquery/convert.rb +72 -72
- data/lib/google/cloud/bigquery/dataset.rb +75 -41
- data/lib/google/cloud/bigquery/load_job.rb +51 -3
- data/lib/google/cloud/bigquery/project.rb +44 -40
- data/lib/google/cloud/bigquery/query_job.rb +37 -34
- data/lib/google/cloud/bigquery/schema.rb +39 -3
- data/lib/google/cloud/bigquery/schema/field.rb +63 -13
- data/lib/google/cloud/bigquery/standard_sql.rb +11 -0
- data/lib/google/cloud/bigquery/table.rb +87 -7
- data/lib/google/cloud/bigquery/table/async_inserter.rb +20 -2
- data/lib/google/cloud/bigquery/version.rb +1 -1
- metadata +2 -2
@@ -273,6 +273,17 @@ module Google
|
|
273
273
|
type_kind == "NUMERIC".freeze
|
274
274
|
end
|
275
275
|
|
276
|
+
##
|
277
|
+
# Checks if the {#type_kind} of the field is `BIGNUMERIC`.
|
278
|
+
#
|
279
|
+
# @return [Boolean] `true` when `BIGNUMERIC`, `false` otherwise.
|
280
|
+
#
|
281
|
+
# @!group Helpers
|
282
|
+
#
|
283
|
+
def bignumeric?
|
284
|
+
type_kind == "BIGNUMERIC".freeze
|
285
|
+
end
|
286
|
+
|
276
287
|
##
|
277
288
|
# Checks if the {#type_kind} of the field is `BOOL`.
|
278
289
|
#
|
@@ -395,8 +395,8 @@ module Google
|
|
395
395
|
# {#resource_full?}), the full representation will be retrieved before
|
396
396
|
# the update to comply with ETag-based optimistic concurrency control.
|
397
397
|
#
|
398
|
-
# @param [Integer] expiration An expiration time, in seconds,
|
399
|
-
# for data in time partitions.
|
398
|
+
# @param [Integer, nil] expiration An expiration time, in seconds,
|
399
|
+
# for data in time partitions, , or `nil` to indicate no expiration time.
|
400
400
|
#
|
401
401
|
# @example
|
402
402
|
# require "google/cloud/bigquery"
|
@@ -416,8 +416,9 @@ module Google
|
|
416
416
|
#
|
417
417
|
def time_partitioning_expiration= expiration
|
418
418
|
reload! unless resource_full?
|
419
|
+
expiration_ms = expiration * 1000 if expiration
|
419
420
|
@gapi.time_partitioning ||= Google::Apis::BigqueryV2::TimePartitioning.new
|
420
|
-
@gapi.time_partitioning.expiration_ms =
|
421
|
+
@gapi.time_partitioning.expiration_ms = expiration_ms
|
421
422
|
patch_gapi! :time_partitioning
|
422
423
|
end
|
423
424
|
|
@@ -2305,6 +2306,21 @@ module Google
|
|
2305
2306
|
# need to complete a load operation before the data can appear in query
|
2306
2307
|
# results.
|
2307
2308
|
#
|
2309
|
+
# Simple Ruby types are generally accepted per JSON rules, along with the following support for BigQuery's more
|
2310
|
+
# complex types:
|
2311
|
+
#
|
2312
|
+
# | BigQuery | Ruby | Notes |
|
2313
|
+
# |--------------|--------------------------------------|----------------------------------------------------|
|
2314
|
+
# | `NUMERIC` | `BigDecimal` | `BigDecimal` values will be rounded to scale 9. |
|
2315
|
+
# | `BIGNUMERIC` | `String` | Pass as `String` to avoid rounding to scale 9. |
|
2316
|
+
# | `DATETIME` | `DateTime` | `DATETIME` does not support time zone. |
|
2317
|
+
# | `DATE` | `Date` | |
|
2318
|
+
# | `TIMESTAMP` | `Time` | |
|
2319
|
+
# | `TIME` | `Google::Cloud::BigQuery::Time` | |
|
2320
|
+
# | `BYTES` | `File`, `IO`, `StringIO`, or similar | |
|
2321
|
+
# | `ARRAY` | `Array` | Nested arrays, `nil` values are not supported. |
|
2322
|
+
# | `STRUCT` | `Hash` | Hash keys may be strings or symbols. |
|
2323
|
+
#
|
2308
2324
|
# Because BigQuery's streaming API is designed for high insertion rates,
|
2309
2325
|
# modifications to the underlying table metadata are eventually
|
2310
2326
|
# consistent when interacting with the streaming system. In most cases
|
@@ -2318,7 +2334,10 @@ module Google
|
|
2318
2334
|
# BigQuery Troubleshooting: Metadata errors for streaming inserts
|
2319
2335
|
#
|
2320
2336
|
# @param [Hash, Array<Hash>] rows A hash object or array of hash objects
|
2321
|
-
# containing the data. Required.
|
2337
|
+
# containing the data. Required. `BigDecimal` values will be rounded to
|
2338
|
+
# scale 9 to conform with the BigQuery `NUMERIC` data type. To avoid
|
2339
|
+
# rounding `BIGNUMERIC` type values with scale greater than 9, use `String`
|
2340
|
+
# instead of `BigDecimal`.
|
2322
2341
|
# @param [Array<String|Symbol>, Symbol] insert_ids A unique ID for each row. BigQuery uses this property to
|
2323
2342
|
# detect duplicate insertion requests on a best-effort basis. For more information, see [data
|
2324
2343
|
# consistency](https://cloud.google.com/bigquery/streaming-data-into-bigquery#dataconsistency). Optional. If
|
@@ -2361,6 +2380,19 @@ module Google
|
|
2361
2380
|
# ]
|
2362
2381
|
# table.insert rows
|
2363
2382
|
#
|
2383
|
+
# @example Pass `BIGNUMERIC` value as a string to avoid rounding to scale 9 in the conversion from `BigDecimal`:
|
2384
|
+
# require "google/cloud/bigquery"
|
2385
|
+
#
|
2386
|
+
# bigquery = Google::Cloud::Bigquery.new
|
2387
|
+
# dataset = bigquery.dataset "my_dataset"
|
2388
|
+
# table = dataset.table "my_table"
|
2389
|
+
#
|
2390
|
+
# row = {
|
2391
|
+
# "my_numeric" => BigDecimal("123456798.987654321"),
|
2392
|
+
# "my_bignumeric" => "123456798.98765432100001" # BigDecimal would be rounded, use String instead!
|
2393
|
+
# }
|
2394
|
+
# table.insert row
|
2395
|
+
#
|
2364
2396
|
# @!group Data
|
2365
2397
|
#
|
2366
2398
|
def insert rows, insert_ids: nil, skip_invalid: nil, ignore_unknown: nil
|
@@ -3229,9 +3261,18 @@ module Google
|
|
3229
3261
|
end
|
3230
3262
|
|
3231
3263
|
##
|
3232
|
-
# Adds a numeric number field to the schema.
|
3233
|
-
# fixed
|
3234
|
-
# the
|
3264
|
+
# Adds a numeric number field to the schema. `NUMERIC` is a decimal
|
3265
|
+
# type with fixed precision and scale. Precision is the number of
|
3266
|
+
# digits that the number contains. Scale is how many of these
|
3267
|
+
# digits appear after the decimal point. It supports:
|
3268
|
+
#
|
3269
|
+
# Precision: 38
|
3270
|
+
# Scale: 9
|
3271
|
+
# Min: -9.9999999999999999999999999999999999999E+28
|
3272
|
+
# Max: 9.9999999999999999999999999999999999999E+28
|
3273
|
+
#
|
3274
|
+
# This type can represent decimal fractions exactly, and is suitable
|
3275
|
+
# for financial calculations.
|
3235
3276
|
#
|
3236
3277
|
# See {Schema#numeric}
|
3237
3278
|
#
|
@@ -3258,6 +3299,45 @@ module Google
|
|
3258
3299
|
schema.numeric name, description: description, mode: mode
|
3259
3300
|
end
|
3260
3301
|
|
3302
|
+
##
|
3303
|
+
# Adds a bignumeric number field to the schema. `BIGNUMERIC` is a
|
3304
|
+
# decimal type with fixed precision and scale. Precision is the
|
3305
|
+
# number of digits that the number contains. Scale is how many of
|
3306
|
+
# these digits appear after the decimal point. It supports:
|
3307
|
+
#
|
3308
|
+
# Precision: 76.76 (the 77th digit is partial)
|
3309
|
+
# Scale: 38
|
3310
|
+
# Min: -5.7896044618658097711785492504343953926634992332820282019728792003956564819968E+38
|
3311
|
+
# Max: 5.7896044618658097711785492504343953926634992332820282019728792003956564819967E+38
|
3312
|
+
#
|
3313
|
+
# This type can represent decimal fractions exactly, and is suitable
|
3314
|
+
# for financial calculations.
|
3315
|
+
#
|
3316
|
+
# See {Schema#bignumeric}
|
3317
|
+
#
|
3318
|
+
# @param [String] name The field name. The name must contain only
|
3319
|
+
# letters (a-z, A-Z), numbers (0-9), or underscores (_), and must
|
3320
|
+
# start with a letter or underscore. The maximum length is 128
|
3321
|
+
# characters.
|
3322
|
+
# @param [String] description A description of the field.
|
3323
|
+
# @param [Symbol] mode The field's mode. The possible values are
|
3324
|
+
# `:nullable`, `:required`, and `:repeated`. The default value is
|
3325
|
+
# `:nullable`.
|
3326
|
+
#
|
3327
|
+
# @example
|
3328
|
+
# require "google/cloud/bigquery"
|
3329
|
+
#
|
3330
|
+
# bigquery = Google::Cloud::Bigquery.new
|
3331
|
+
# dataset = bigquery.dataset "my_dataset"
|
3332
|
+
# table = dataset.create_table "my_table" do |schema|
|
3333
|
+
# schema.bignumeric "total_cost", mode: :required
|
3334
|
+
# end
|
3335
|
+
#
|
3336
|
+
# @!group Schema
|
3337
|
+
def bignumeric name, description: nil, mode: :nullable
|
3338
|
+
schema.bignumeric name, description: description, mode: mode
|
3339
|
+
end
|
3340
|
+
|
3261
3341
|
##
|
3262
3342
|
# Adds a boolean field to the schema.
|
3263
3343
|
#
|
@@ -101,6 +101,21 @@ module Google
|
|
101
101
|
# collected in batches and inserted together.
|
102
102
|
# See {Google::Cloud::Bigquery::Table#insert_async}.
|
103
103
|
#
|
104
|
+
# Simple Ruby types are generally accepted per JSON rules, along with the following support for BigQuery's
|
105
|
+
# more complex types:
|
106
|
+
#
|
107
|
+
# | BigQuery | Ruby | Notes |
|
108
|
+
# |--------------|--------------------------------------|----------------------------------------------------|
|
109
|
+
# | `NUMERIC` | `BigDecimal` | `BigDecimal` values will be rounded to scale 9. |
|
110
|
+
# | `BIGNUMERIC` | `String` | Pass as `String` to avoid rounding to scale 9. |
|
111
|
+
# | `DATETIME` | `DateTime` | `DATETIME` does not support time zone. |
|
112
|
+
# | `DATE` | `Date` | |
|
113
|
+
# | `TIMESTAMP` | `Time` | |
|
114
|
+
# | `TIME` | `Google::Cloud::BigQuery::Time` | |
|
115
|
+
# | `BYTES` | `File`, `IO`, `StringIO`, or similar | |
|
116
|
+
# | `ARRAY` | `Array` | Nested arrays, `nil` values are not supported. |
|
117
|
+
# | `STRUCT` | `Hash` | Hash keys may be strings or symbols. |
|
118
|
+
#
|
104
119
|
# Because BigQuery's streaming API is designed for high insertion
|
105
120
|
# rates, modifications to the underlying table metadata are eventually
|
106
121
|
# consistent when interacting with the streaming system. In most cases
|
@@ -114,8 +129,11 @@ module Google
|
|
114
129
|
# @see https://cloud.google.com/bigquery/troubleshooting-errors#metadata-errors-for-streaming-inserts
|
115
130
|
# BigQuery Troubleshooting: Metadata errors for streaming inserts
|
116
131
|
#
|
117
|
-
# @param [Hash, Array<Hash>] rows A hash object or array of hash
|
118
|
-
#
|
132
|
+
# @param [Hash, Array<Hash>] rows A hash object or array of hash objects
|
133
|
+
# containing the data. Required. `BigDecimal` values will be rounded to
|
134
|
+
# scale 9 to conform with the BigQuery `NUMERIC` data type. To avoid
|
135
|
+
# rounding `BIGNUMERIC` type values with scale greater than 9, use `String`
|
136
|
+
# instead of `BigDecimal`.
|
119
137
|
# @param [Array<String|Symbol>, Symbol] insert_ids A unique ID for each row. BigQuery uses this property to
|
120
138
|
# detect duplicate insertion requests on a best-effort basis. For more information, see [data
|
121
139
|
# consistency](https://cloud.google.com/bigquery/streaming-data-into-bigquery#dataconsistency). Optional. If
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: google-cloud-bigquery
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 1.
|
4
|
+
version: 1.30.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Mike Moore
|
@@ -9,7 +9,7 @@ authors:
|
|
9
9
|
autorequire:
|
10
10
|
bindir: bin
|
11
11
|
cert_chain: []
|
12
|
-
date: 2021-
|
12
|
+
date: 2021-04-20 00:00:00.000000000 Z
|
13
13
|
dependencies:
|
14
14
|
- !ruby/object:Gem::Dependency
|
15
15
|
name: concurrent-ruby
|