google-cloud-bigquery-storage-v1 0.10.0 → 0.11.1

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 3a95d34616ffc1455f203b27be3825854e709526a5cfe3e4c72db03faa5351b2
4
- data.tar.gz: 8af44f131742eaf8ce8f843d85509b7f89edfd5e4098a85770e78a537d1ab13a
3
+ metadata.gz: 93523ae607e50f73b09408fdb11c67f28b18a7b23585d65876e3613a71c80393
4
+ data.tar.gz: 186c06d4d9ec840c84472b7e8cb69680713d28457b81d009ba8bec85bb346ce2
5
5
  SHA512:
6
- metadata.gz: 77d2465381c89df36bfebfb685ff9ba73f7f7f099c61996277e44d8ed37b0f922f11cebfd52cf6e143923d0935d39f62a24d0e473718faa27c8c0d489e1955ff
7
- data.tar.gz: fe39b9210c5843f6e3ad6fc5f5aaa068139dff0f941681b0b0003defdd566a13ac773fb0e66e7fbfa351acbec535a0d65a2904f0ba29e87ff7a59fde9fdf87b4
6
+ metadata.gz: 30d6f2151678722ff459a97691e99c310d0428e4717ca2123457903ae39cc4c24ef1d4dde6db9225079aad77d8c956a80cf35c9afe96b6c537cb765e717aa53f
7
+ data.tar.gz: 85b427659ebd8058d344feb0f79eb770e861bc570cc55a0c4711dce350933b82fc5650e83c0aeb9da28fa3f295df5e227f3708f98ea102bf965685082ac9116f
data/README.md CHANGED
@@ -69,6 +69,11 @@ module GRPC
69
69
  end
70
70
  ```
71
71
 
72
+
73
+ ## Google Cloud Samples
74
+
75
+ To browse ready to use code samples check [Google Cloud Samples](https://cloud.google.com/docs/samples).
76
+
72
77
  ## Supported Ruby Versions
73
78
 
74
79
  This library is supported on Ruby 2.5+.
@@ -207,11 +207,13 @@ module Google
207
207
  # Max initial number of streams. If unset or zero, the server will
208
208
  # provide a value of streams so as to produce reasonable throughput. Must be
209
209
  # non-negative. The number of streams may be lower than the requested number,
210
- # depending on the amount parallelism that is reasonable for the table. Error
211
- # will be returned if the max count is greater than the current system
212
- # max limit of 1,000.
210
+ # depending on the amount parallelism that is reasonable for the table.
211
+ # There is a default system max limit of 1,000.
213
212
  #
214
- # Streams must be read starting from offset 0.
213
+ # This must be greater than or equal to preferred_min_stream_count.
214
+ # Typically, clients should either leave this unset to let the system to
215
+ # determine an upper bound OR set this a size for the maximum "units of work"
216
+ # it can gracefully handle.
215
217
  #
216
218
  # @yield [response, operation] Access the result along with the RPC operation
217
219
  # @yieldparam response [::Google::Cloud::Bigquery::Storage::V1::ReadSession]
@@ -70,9 +70,9 @@ module Google
70
70
  end
71
71
  default_config = Client::Configuration.new parent_config
72
72
 
73
- default_config.rpcs.create_write_stream.timeout = 600.0
73
+ default_config.rpcs.create_write_stream.timeout = 1200.0
74
74
  default_config.rpcs.create_write_stream.retry_policy = {
75
- initial_delay: 0.1, max_delay: 60.0, multiplier: 1.3, retry_codes: [4, 14]
75
+ initial_delay: 10.0, max_delay: 120.0, multiplier: 1.3, retry_codes: [4, 14, 8]
76
76
  }
77
77
 
78
78
  default_config.rpcs.append_rows.timeout = 86_400.0
@@ -1,9 +1,10 @@
1
1
  # Generated by the protocol buffer compiler. DO NOT EDIT!
2
2
  # source: google/cloud/bigquery/storage/v1/protobuf.proto
3
3
 
4
- require 'google/protobuf/descriptor_pb'
5
4
  require 'google/protobuf'
6
5
 
6
+ require 'google/protobuf/descriptor_pb'
7
+
7
8
  Google::Protobuf::DescriptorPool.generated_pool.build do
8
9
  add_file("google/cloud/bigquery/storage/v1/protobuf.proto", :syntax => :proto3) do
9
10
  add_message "google.cloud.bigquery.storage.v1.ProtoSchema" do
@@ -1,6 +1,8 @@
1
1
  # Generated by the protocol buffer compiler. DO NOT EDIT!
2
2
  # source: google/cloud/bigquery/storage/v1/storage.proto
3
3
 
4
+ require 'google/protobuf'
5
+
4
6
  require 'google/api/annotations_pb'
5
7
  require 'google/api/client_pb'
6
8
  require 'google/api/field_behavior_pb'
@@ -13,7 +15,6 @@ require 'google/cloud/bigquery/storage/v1/table_pb'
13
15
  require 'google/protobuf/timestamp_pb'
14
16
  require 'google/protobuf/wrappers_pb'
15
17
  require 'google/rpc/status_pb'
16
- require 'google/protobuf'
17
18
 
18
19
  Google::Protobuf::DescriptorPool.generated_pool.build do
19
20
  add_file("google/cloud/bigquery/storage/v1/storage.proto", :syntax => :proto3) do
@@ -75,6 +76,7 @@ Google::Protobuf::DescriptorPool.generated_pool.build do
75
76
  end
76
77
  add_message "google.cloud.bigquery.storage.v1.AppendRowsResponse" do
77
78
  optional :updated_schema, :message, 3, "google.cloud.bigquery.storage.v1.TableSchema"
79
+ repeated :row_errors, :message, 4, "google.cloud.bigquery.storage.v1.RowError"
78
80
  oneof :response do
79
81
  optional :append_result, :message, 1, "google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult"
80
82
  optional :error, :message, 2, "google.rpc.Status"
@@ -124,6 +126,15 @@ Google::Protobuf::DescriptorPool.generated_pool.build do
124
126
  value :OFFSET_ALREADY_EXISTS, 8
125
127
  value :OFFSET_OUT_OF_RANGE, 9
126
128
  end
129
+ add_message "google.cloud.bigquery.storage.v1.RowError" do
130
+ optional :index, :int64, 1
131
+ optional :code, :enum, 2, "google.cloud.bigquery.storage.v1.RowError.RowErrorCode"
132
+ optional :message, :string, 3
133
+ end
134
+ add_enum "google.cloud.bigquery.storage.v1.RowError.RowErrorCode" do
135
+ value :ROW_ERROR_CODE_UNSPECIFIED, 0
136
+ value :FIELDS_ERROR, 1
137
+ end
127
138
  end
128
139
  end
129
140
 
@@ -154,6 +165,8 @@ module Google
154
165
  FlushRowsResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.storage.v1.FlushRowsResponse").msgclass
155
166
  StorageError = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.storage.v1.StorageError").msgclass
156
167
  StorageError::StorageErrorCode = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.storage.v1.StorageError.StorageErrorCode").enummodule
168
+ RowError = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.storage.v1.RowError").msgclass
169
+ RowError::RowErrorCode = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.storage.v1.RowError.RowErrorCode").enummodule
157
170
  end
158
171
  end
159
172
  end
@@ -1,13 +1,14 @@
1
1
  # Generated by the protocol buffer compiler. DO NOT EDIT!
2
2
  # source: google/cloud/bigquery/storage/v1/stream.proto
3
3
 
4
+ require 'google/protobuf'
5
+
4
6
  require 'google/api/field_behavior_pb'
5
7
  require 'google/api/resource_pb'
6
8
  require 'google/cloud/bigquery/storage/v1/arrow_pb'
7
9
  require 'google/cloud/bigquery/storage/v1/avro_pb'
8
10
  require 'google/cloud/bigquery/storage/v1/table_pb'
9
11
  require 'google/protobuf/timestamp_pb'
10
- require 'google/protobuf'
11
12
 
12
13
  Google::Protobuf::DescriptorPool.generated_pool.build do
13
14
  add_file("google/cloud/bigquery/storage/v1/stream.proto", :syntax => :proto3) do
@@ -1,9 +1,10 @@
1
1
  # Generated by the protocol buffer compiler. DO NOT EDIT!
2
2
  # source: google/cloud/bigquery/storage/v1/table.proto
3
3
 
4
- require 'google/api/field_behavior_pb'
5
4
  require 'google/protobuf'
6
5
 
6
+ require 'google/api/field_behavior_pb'
7
+
7
8
  Google::Protobuf::DescriptorPool.generated_pool.build do
8
9
  add_file("google/cloud/bigquery/storage/v1/table.proto", :syntax => :proto3) do
9
10
  add_message "google.cloud.bigquery.storage.v1.TableSchema" do
@@ -22,7 +22,7 @@ module Google
22
22
  module Bigquery
23
23
  module Storage
24
24
  module V1
25
- VERSION = "0.10.0"
25
+ VERSION = "0.11.1"
26
26
  end
27
27
  end
28
28
  end
@@ -42,7 +42,8 @@ module Google
42
42
  # IPC-serialized Arrow RecordBatch.
43
43
  # @!attribute [rw] row_count
44
44
  # @return [::Integer]
45
- # The count of rows in `serialized_record_batch`.
45
+ # [Deprecated] The count of rows in `serialized_record_batch`.
46
+ # Please use the format-independent ReadRowsResponse.row_count instead.
46
47
  class ArrowRecordBatch
47
48
  include ::Google::Protobuf::MessageExts
48
49
  extend ::Google::Protobuf::MessageExts::ClassMethods
@@ -38,7 +38,8 @@ module Google
38
38
  # Binary serialized rows in a block.
39
39
  # @!attribute [rw] row_count
40
40
  # @return [::Integer]
41
- # The count of rows in the returning block.
41
+ # [Deprecated] The count of rows in the returning block.
42
+ # Please use the format-independent ReadRowsResponse.row_count instead.
42
43
  class AvroRows
43
44
  include ::Google::Protobuf::MessageExts
44
45
  extend ::Google::Protobuf::MessageExts::ClassMethods
@@ -35,11 +35,13 @@ module Google
35
35
  # Max initial number of streams. If unset or zero, the server will
36
36
  # provide a value of streams so as to produce reasonable throughput. Must be
37
37
  # non-negative. The number of streams may be lower than the requested number,
38
- # depending on the amount parallelism that is reasonable for the table. Error
39
- # will be returned if the max count is greater than the current system
40
- # max limit of 1,000.
38
+ # depending on the amount parallelism that is reasonable for the table.
39
+ # There is a default system max limit of 1,000.
41
40
  #
42
- # Streams must be read starting from offset 0.
41
+ # This must be greater than or equal to preferred_min_stream_count.
42
+ # Typically, clients should either leave this unset to let the system to
43
+ # determine an upper bound OR set this a size for the maximum "units of work"
44
+ # it can gracefully handle.
43
45
  class CreateReadSessionRequest
44
46
  include ::Google::Protobuf::MessageExts
45
47
  extend ::Google::Protobuf::MessageExts::ClassMethods
@@ -262,6 +264,11 @@ module Google
262
264
  # If backend detects a schema update, pass it to user so that user can
263
265
  # use it to input new type of message. It will be empty when no schema
264
266
  # updates have occurred.
267
+ # @!attribute [rw] row_errors
268
+ # @return [::Array<::Google::Cloud::Bigquery::Storage::V1::RowError>]
269
+ # If a request failed due to corrupted rows, no rows in the batch will be
270
+ # appended. The API will return row level error info, so that the caller can
271
+ # remove the bad rows and retry the request.
265
272
  class AppendRowsResponse
266
273
  include ::Google::Protobuf::MessageExts
267
274
  extend ::Google::Protobuf::MessageExts::ClassMethods
@@ -413,6 +420,30 @@ module Google
413
420
  OFFSET_OUT_OF_RANGE = 9
414
421
  end
415
422
  end
423
+
424
+ # The message that presents row level error info in a request.
425
+ # @!attribute [rw] index
426
+ # @return [::Integer]
427
+ # Index of the malformed row in the request.
428
+ # @!attribute [rw] code
429
+ # @return [::Google::Cloud::Bigquery::Storage::V1::RowError::RowErrorCode]
430
+ # Structured error reason for a row error.
431
+ # @!attribute [rw] message
432
+ # @return [::String]
433
+ # Description of the issue encountered when processing the row.
434
+ class RowError
435
+ include ::Google::Protobuf::MessageExts
436
+ extend ::Google::Protobuf::MessageExts::ClassMethods
437
+
438
+ # Error code for `RowError`.
439
+ module RowErrorCode
440
+ # Default error.
441
+ ROW_ERROR_CODE_UNSPECIFIED = 0
442
+
443
+ # One or more fields in the row has errors.
444
+ FIELDS_ERROR = 1
445
+ end
446
+ end
416
447
  end
417
448
  end
418
449
  end
@@ -34,7 +34,7 @@ module Google
34
34
  # automatically assigned and currently cannot be specified or updated.
35
35
  # @!attribute [rw] data_format
36
36
  # @return [::Google::Cloud::Bigquery::Storage::V1::DataFormat]
37
- # Immutable. Data format of the output data.
37
+ # Immutable. Data format of the output data. DATA_FORMAT_UNSPECIFIED not supported.
38
38
  # @!attribute [r] avro_schema
39
39
  # @return [::Google::Cloud::Bigquery::Storage::V1::AvroSchema]
40
40
  # Output only. Avro schema.
@@ -185,6 +185,7 @@ module Google
185
185
 
186
186
  # Data format for input or output data.
187
187
  module DataFormat
188
+ # Data format is unspecified.
188
189
  DATA_FORMAT_UNSPECIFIED = 0
189
190
 
190
191
  # Avro is a standard open source row based file format.
@@ -22,7 +22,9 @@ module Google
22
22
  module Bigquery
23
23
  module Storage
24
24
  module V1
25
- # Schema of a table.
25
+ # Schema of a table. This schema is a subset of
26
+ # google.cloud.bigquery.v2.TableSchema containing information necessary to
27
+ # generate valid message to write to BigQuery.
26
28
  # @!attribute [rw] fields
27
29
  # @return [::Array<::Google::Cloud::Bigquery::Storage::V1::TableFieldSchema>]
28
30
  # Describes the fields in a table.
@@ -44,7 +44,7 @@ module Google
44
44
  # foo = any.unpack(Foo.class);
45
45
  # }
46
46
  #
47
- # Example 3: Pack and unpack a message in Python.
47
+ # Example 3: Pack and unpack a message in Python.
48
48
  #
49
49
  # foo = Foo(...)
50
50
  # any = Any()
@@ -54,7 +54,7 @@ module Google
54
54
  # any.Unpack(foo)
55
55
  # ...
56
56
  #
57
- # Example 4: Pack and unpack a message in Go
57
+ # Example 4: Pack and unpack a message in Go
58
58
  #
59
59
  # foo := &pb.Foo{...}
60
60
  # any, err := anypb.New(foo)
@@ -75,7 +75,7 @@ module Google
75
75
  #
76
76
  #
77
77
  # JSON
78
- # ====
78
+ #
79
79
  # The JSON representation of an `Any` value uses the regular
80
80
  # representation of the deserialized, embedded message, with an
81
81
  # additional field `@type` which contains the type URL. Example:
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: google-cloud-bigquery-storage-v1
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.10.0
4
+ version: 0.11.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Google LLC
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2022-03-04 00:00:00.000000000 Z
11
+ date: 2022-06-28 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: gapic-common
@@ -219,7 +219,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
219
219
  - !ruby/object:Gem::Version
220
220
  version: '0'
221
221
  requirements: []
222
- rubygems_version: 3.3.5
222
+ rubygems_version: 3.3.14
223
223
  signing_key:
224
224
  specification_version: 4
225
225
  summary: API Client library for the BigQuery Storage V1 API