google-cloud-bigquery-storage-v1 0.16.0 → 0.17.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 3ba278f72eb260018a4fa28c33c050f3c101994e3bd1ac72ec0fb91fb1a417ad
4
- data.tar.gz: 58ceab94ac0e90fa8f3197b3853472778f133b08eeabab01674e35c07e44cfec
3
+ metadata.gz: 0c72d5f9b1dfa4079678a8e73a1d5d3df8d71e195609aa3de08d66b9f0f56287
4
+ data.tar.gz: 10f4a52344cc7755e7ccb7d49c005024c8a1815af11f5ecbc060525bb1ceb8a7
5
5
  SHA512:
6
- metadata.gz: adb7300aa88a8ea63a809183d017e3d14b7f4ba3c51b066abdf838c112080c2718a0634760d39d20ef8f084850f9572abb3971a7217aa1cb703f277e5f63e7da
7
- data.tar.gz: 1112e6845a4f4c2fa78336e582c4783b420a7e9afe9746c36977310152f16a8b450f996393da2d77b4c9ed311a89214a6ff262e710231aa50afaf714020890f9
6
+ metadata.gz: bdef2e5107c751723afbea592e9c327030dcb5cfd88376aca15dfacb398d472c6054ff89e7fd7766575077af4fbaa45031b0a99fd36d178ef2f19a9e0382278a
7
+ data.tar.gz: 0f76040930bcab5235a00f70c467d9bef79bb65437f2cc422cc117287a6ec03ecaa8bbe5dc237a18b47612f2e59bb5d6b49a516be08008d39834f4836d61c6c8
data/README.md CHANGED
@@ -46,7 +46,7 @@ for general usage information.
46
46
  ## Enabling Logging
47
47
 
48
48
  To enable logging for this library, set the logger for the underlying [gRPC](https://github.com/grpc/grpc/tree/master/src/ruby) library.
49
- The logger that you set may be a Ruby stdlib [`Logger`](https://ruby-doc.org/stdlib/libdoc/logger/rdoc/Logger.html) as shown below,
49
+ The logger that you set may be a Ruby stdlib [`Logger`](https://ruby-doc.org/current/stdlibs/logger/Logger.html) as shown below,
50
50
  or a [`Google::Cloud::Logging::Logger`](https://googleapis.dev/ruby/google-cloud-logging/latest)
51
51
  that will write logs to [Cloud Logging](https://cloud.google.com/logging/). See [grpc/logconfig.rb](https://github.com/grpc/grpc/blob/master/src/ruby/lib/grpc/logconfig.rb)
52
52
  and the gRPC [spec_helper.rb](https://github.com/grpc/grpc/blob/master/src/ruby/spec/spec_helper.rb) for additional information.
@@ -581,8 +581,8 @@ module Google
581
581
  # the default parameter values, pass an empty Hash as a request object (see above).
582
582
  #
583
583
  # @param parent [::String]
584
- # Required. Parent table that all the streams should belong to, in the form of
585
- # `projects/{project}/datasets/{dataset}/tables/{table}`.
584
+ # Required. Parent table that all the streams should belong to, in the form
585
+ # of `projects/{project}/datasets/{dataset}/tables/{table}`.
586
586
  # @param write_streams [::Array<::String>]
587
587
  # Required. The group of streams that will be committed atomically.
588
588
  #
@@ -21,6 +21,7 @@ Google::Protobuf::DescriptorPool.generated_pool.build do
21
21
  optional :read_options, :message, 8, "google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions"
22
22
  repeated :streams, :message, 10, "google.cloud.bigquery.storage.v1.ReadStream"
23
23
  optional :estimated_total_bytes_scanned, :int64, 12
24
+ optional :estimated_row_count, :int64, 14
24
25
  optional :trace_id, :string, 13
25
26
  oneof :schema do
26
27
  optional :avro_schema, :message, 4, "google.cloud.bigquery.storage.v1.AvroSchema"
@@ -22,7 +22,7 @@ module Google
22
22
  module Bigquery
23
23
  module Storage
24
24
  module V1
25
- VERSION = "0.16.0"
25
+ VERSION = "0.17.0"
26
26
  end
27
27
  end
28
28
  end
@@ -198,10 +198,10 @@ module Google
198
198
  # Requests larger than this return an error, typically `INVALID_ARGUMENT`.
199
199
  # @!attribute [rw] write_stream
200
200
  # @return [::String]
201
- # Required. The write_stream identifies the target of the append operation, and only
202
- # needs to be specified as part of the first request on the gRPC connection.
203
- # If provided for subsequent requests, it must match the value of the first
204
- # request.
201
+ # Required. The write_stream identifies the target of the append operation,
202
+ # and only needs to be specified as part of the first request on the gRPC
203
+ # connection. If provided for subsequent requests, it must match the value of
204
+ # the first request.
205
205
  #
206
206
  # For explicitly created write streams, the format is:
207
207
  #
@@ -364,8 +364,8 @@ module Google
364
364
  # Request message for `BatchCommitWriteStreams`.
365
365
  # @!attribute [rw] parent
366
366
  # @return [::String]
367
- # Required. Parent table that all the streams should belong to, in the form of
368
- # `projects/{project}/datasets/{dataset}/tables/{table}`.
367
+ # Required. Parent table that all the streams should belong to, in the form
368
+ # of `projects/{project}/datasets/{dataset}/tables/{table}`.
369
369
  # @!attribute [rw] write_streams
370
370
  # @return [::Array<::String>]
371
371
  # Required. The group of streams that will be committed atomically.
@@ -29,12 +29,14 @@ module Google
29
29
  # `projects/{project_id}/locations/{location}/sessions/{session_id}`.
30
30
  # @!attribute [r] expire_time
31
31
  # @return [::Google::Protobuf::Timestamp]
32
- # Output only. Time at which the session becomes invalid. After this time, subsequent
33
- # requests to read this Session will return errors. The expire_time is
34
- # automatically assigned and currently cannot be specified or updated.
32
+ # Output only. Time at which the session becomes invalid. After this time,
33
+ # subsequent requests to read this Session will return errors. The
34
+ # expire_time is automatically assigned and currently cannot be specified or
35
+ # updated.
35
36
  # @!attribute [rw] data_format
36
37
  # @return [::Google::Cloud::Bigquery::Storage::V1::DataFormat]
37
- # Immutable. Data format of the output data. DATA_FORMAT_UNSPECIFIED not supported.
38
+ # Immutable. Data format of the output data. DATA_FORMAT_UNSPECIFIED not
39
+ # supported.
38
40
  # @!attribute [r] avro_schema
39
41
  # @return [::Google::Cloud::Bigquery::Storage::V1::AvroSchema]
40
42
  # Output only. Avro schema.
@@ -47,7 +49,8 @@ module Google
47
49
  # `projects/{project_id}/datasets/{dataset_id}/tables/{table_id}`
48
50
  # @!attribute [rw] table_modifiers
49
51
  # @return [::Google::Cloud::Bigquery::Storage::V1::ReadSession::TableModifiers]
50
- # Optional. Any modifiers which are applied when reading from the specified table.
52
+ # Optional. Any modifiers which are applied when reading from the specified
53
+ # table.
51
54
  # @!attribute [rw] read_options
52
55
  # @return [::Google::Cloud::Bigquery::Storage::V1::ReadSession::TableReadOptions]
53
56
  # Optional. Read options for this session (e.g. column selection, filters).
@@ -64,10 +67,15 @@ module Google
64
67
  # Output only. An estimate on the number of bytes this session will scan when
65
68
  # all streams are completely consumed. This estimate is based on
66
69
  # metadata from the table which might be incomplete or stale.
70
+ # @!attribute [r] estimated_row_count
71
+ # @return [::Integer]
72
+ # Output only. An estimate on the number of rows present in this session's
73
+ # streams. This estimate is based on metadata from the table which might be
74
+ # incomplete or stale.
67
75
  # @!attribute [rw] trace_id
68
76
  # @return [::String]
69
- # Optional. ID set by client to annotate a session identity. This does not need
70
- # to be strictly unique, but instead the same ID should be used to group
77
+ # Optional. ID set by client to annotate a session identity. This does not
78
+ # need to be strictly unique, but instead the same ID should be used to group
71
79
  # logically connected sessions (e.g. All using the same ID for all sessions
72
80
  # needed to complete a Spark SQL query is reasonable).
73
81
  #
@@ -181,8 +189,8 @@ module Google
181
189
  # Immutable. Type of the stream.
182
190
  # @!attribute [r] create_time
183
191
  # @return [::Google::Protobuf::Timestamp]
184
- # Output only. Create time of the stream. For the _default stream, this is the
185
- # creation_time of the table.
192
+ # Output only. Create time of the stream. For the _default stream, this is
193
+ # the creation_time of the table.
186
194
  # @!attribute [r] commit_time
187
195
  # @return [::Google::Protobuf::Timestamp]
188
196
  # Output only. Commit time of the stream.
@@ -47,7 +47,8 @@ module Google
47
47
  # Optional. The field mode. The default value is NULLABLE.
48
48
  # @!attribute [rw] fields
49
49
  # @return [::Array<::Google::Cloud::Bigquery::Storage::V1::TableFieldSchema>]
50
- # Optional. Describes the nested schema fields if the type property is set to STRUCT.
50
+ # Optional. Describes the nested schema fields if the type property is set to
51
+ # STRUCT.
51
52
  # @!attribute [rw] description
52
53
  # @return [::String]
53
54
  # Optional. The field description. The maximum length is 1,024 characters.
@@ -28,12 +28,14 @@ module Google
28
28
  # [API Design Guide](https://cloud.google.com/apis/design/errors).
29
29
  # @!attribute [rw] code
30
30
  # @return [::Integer]
31
- # The status code, which should be an enum value of [google.rpc.Code][google.rpc.Code].
31
+ # The status code, which should be an enum value of
32
+ # [google.rpc.Code][google.rpc.Code].
32
33
  # @!attribute [rw] message
33
34
  # @return [::String]
34
35
  # A developer-facing error message, which should be in English. Any
35
36
  # user-facing error message should be localized and sent in the
36
- # {::Google::Rpc::Status#details google.rpc.Status.details} field, or localized by the client.
37
+ # {::Google::Rpc::Status#details google.rpc.Status.details} field, or localized
38
+ # by the client.
37
39
  # @!attribute [rw] details
38
40
  # @return [::Array<::Google::Protobuf::Any>]
39
41
  # A list of messages that carry the error details. There is a common set of
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: google-cloud-bigquery-storage-v1
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.16.0
4
+ version: 0.17.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Google LLC
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2022-11-16 00:00:00.000000000 Z
11
+ date: 2022-12-14 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: gapic-common