google-cloud-bigquery-storage-v1 0.16.0 → 0.18.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 3ba278f72eb260018a4fa28c33c050f3c101994e3bd1ac72ec0fb91fb1a417ad
4
- data.tar.gz: 58ceab94ac0e90fa8f3197b3853472778f133b08eeabab01674e35c07e44cfec
3
+ metadata.gz: c754ac4db39e1112d5d4bc913247f32e7485c1a61fcaa8c648854d901e7dbebc
4
+ data.tar.gz: 17313c366341a1f7268c44e0600a62a99c1d462664a3a3b4b53986854e42c88c
5
5
  SHA512:
6
- metadata.gz: adb7300aa88a8ea63a809183d017e3d14b7f4ba3c51b066abdf838c112080c2718a0634760d39d20ef8f084850f9572abb3971a7217aa1cb703f277e5f63e7da
7
- data.tar.gz: 1112e6845a4f4c2fa78336e582c4783b420a7e9afe9746c36977310152f16a8b450f996393da2d77b4c9ed311a89214a6ff262e710231aa50afaf714020890f9
6
+ metadata.gz: 6d840cc71fb207f1baab314707ee42a60183501b335695b1517f0da99f7a9164ff5f6f3b014bff79f113c95aae422b5537f5bc9aab22ad7d6a3e9c704a163646
7
+ data.tar.gz: 3d2b217346af0d50d23a79839f9eff981d5cac8c29ef16a5210f38f33ddee70819017a3a1c8a0b053cdb33806ae153d2df5f3ad89a7f0e77a97a2f2b55e69f95
data/README.md CHANGED
@@ -46,7 +46,7 @@ for general usage information.
46
46
  ## Enabling Logging
47
47
 
48
48
  To enable logging for this library, set the logger for the underlying [gRPC](https://github.com/grpc/grpc/tree/master/src/ruby) library.
49
- The logger that you set may be a Ruby stdlib [`Logger`](https://ruby-doc.org/stdlib/libdoc/logger/rdoc/Logger.html) as shown below,
49
+ The logger that you set may be a Ruby stdlib [`Logger`](https://ruby-doc.org/current/stdlibs/logger/Logger.html) as shown below,
50
50
  or a [`Google::Cloud::Logging::Logger`](https://googleapis.dev/ruby/google-cloud-logging/latest)
51
51
  that will write logs to [Cloud Logging](https://cloud.google.com/logging/). See [grpc/logconfig.rb](https://github.com/grpc/grpc/blob/master/src/ruby/lib/grpc/logconfig.rb)
52
52
  and the gRPC [spec_helper.rb](https://github.com/grpc/grpc/blob/master/src/ruby/spec/spec_helper.rb) for additional information.
@@ -36,7 +36,7 @@ module Google
36
36
  #
37
37
  # The Read API can be used to read data from BigQuery.
38
38
  #
39
- # To load this service and instantiate a client:
39
+ # @example Load this service and instantiate a gRPC client
40
40
  #
41
41
  # require "google/cloud/bigquery/storage/v1/big_query_read"
42
42
  # client = ::Google::Cloud::Bigquery::Storage::V1::BigQueryRead::Client.new
@@ -581,8 +581,8 @@ module Google
581
581
  # the default parameter values, pass an empty Hash as a request object (see above).
582
582
  #
583
583
  # @param parent [::String]
584
- # Required. Parent table that all the streams should belong to, in the form of
585
- # `projects/{project}/datasets/{dataset}/tables/{table}`.
584
+ # Required. Parent table that all the streams should belong to, in the form
585
+ # of `projects/{project}/datasets/{dataset}/tables/{table}`.
586
586
  # @param write_streams [::Array<::String>]
587
587
  # Required. The group of streams that will be committed atomically.
588
588
  #
@@ -39,7 +39,7 @@ module Google
39
39
  # For supplementary information about the Write API, see:
40
40
  # https://cloud.google.com/bigquery/docs/write-api
41
41
  #
42
- # To load this service and instantiate a client:
42
+ # @example Load this service and instantiate a gRPC client
43
43
  #
44
44
  # require "google/cloud/bigquery/storage/v1/big_query_write"
45
45
  # client = ::Google::Cloud::Bigquery::Storage::V1::BigQueryWrite::Client.new
@@ -21,6 +21,7 @@ Google::Protobuf::DescriptorPool.generated_pool.build do
21
21
  optional :read_options, :message, 8, "google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions"
22
22
  repeated :streams, :message, 10, "google.cloud.bigquery.storage.v1.ReadStream"
23
23
  optional :estimated_total_bytes_scanned, :int64, 12
24
+ optional :estimated_row_count, :int64, 14
24
25
  optional :trace_id, :string, 13
25
26
  oneof :schema do
26
27
  optional :avro_schema, :message, 4, "google.cloud.bigquery.storage.v1.AvroSchema"
@@ -19,6 +19,7 @@ Google::Protobuf::DescriptorPool.generated_pool.build do
19
19
  optional :max_length, :int64, 7
20
20
  optional :precision, :int64, 8
21
21
  optional :scale, :int64, 9
22
+ optional :default_value_expression, :string, 10
22
23
  end
23
24
  add_enum "google.cloud.bigquery.storage.v1.TableFieldSchema.Type" do
24
25
  value :TYPE_UNSPECIFIED, 0
@@ -22,7 +22,7 @@ module Google
22
22
  module Bigquery
23
23
  module Storage
24
24
  module V1
25
- VERSION = "0.16.0"
25
+ VERSION = "0.18.0"
26
26
  end
27
27
  end
28
28
  end
@@ -25,9 +25,9 @@ module Google
25
25
  module Bigquery
26
26
  module Storage
27
27
  ##
28
- # To load this package, including all its services, and instantiate a client:
28
+ # API client module.
29
29
  #
30
- # @example
30
+ # @example Load this package, including all its services, and instantiate a gRPC client
31
31
  #
32
32
  # require "google/cloud/bigquery/storage/v1"
33
33
  # client = ::Google::Cloud::Bigquery::Storage::V1::BigQueryRead::Client.new
@@ -198,10 +198,10 @@ module Google
198
198
  # Requests larger than this return an error, typically `INVALID_ARGUMENT`.
199
199
  # @!attribute [rw] write_stream
200
200
  # @return [::String]
201
- # Required. The write_stream identifies the target of the append operation, and only
202
- # needs to be specified as part of the first request on the gRPC connection.
203
- # If provided for subsequent requests, it must match the value of the first
204
- # request.
201
+ # Required. The write_stream identifies the target of the append operation,
202
+ # and only needs to be specified as part of the first request on the gRPC
203
+ # connection. If provided for subsequent requests, it must match the value of
204
+ # the first request.
205
205
  #
206
206
  # For explicitly created write streams, the format is:
207
207
  #
@@ -364,8 +364,8 @@ module Google
364
364
  # Request message for `BatchCommitWriteStreams`.
365
365
  # @!attribute [rw] parent
366
366
  # @return [::String]
367
- # Required. Parent table that all the streams should belong to, in the form of
368
- # `projects/{project}/datasets/{dataset}/tables/{table}`.
367
+ # Required. Parent table that all the streams should belong to, in the form
368
+ # of `projects/{project}/datasets/{dataset}/tables/{table}`.
369
369
  # @!attribute [rw] write_streams
370
370
  # @return [::Array<::String>]
371
371
  # Required. The group of streams that will be committed atomically.
@@ -29,12 +29,14 @@ module Google
29
29
  # `projects/{project_id}/locations/{location}/sessions/{session_id}`.
30
30
  # @!attribute [r] expire_time
31
31
  # @return [::Google::Protobuf::Timestamp]
32
- # Output only. Time at which the session becomes invalid. After this time, subsequent
33
- # requests to read this Session will return errors. The expire_time is
34
- # automatically assigned and currently cannot be specified or updated.
32
+ # Output only. Time at which the session becomes invalid. After this time,
33
+ # subsequent requests to read this Session will return errors. The
34
+ # expire_time is automatically assigned and currently cannot be specified or
35
+ # updated.
35
36
  # @!attribute [rw] data_format
36
37
  # @return [::Google::Cloud::Bigquery::Storage::V1::DataFormat]
37
- # Immutable. Data format of the output data. DATA_FORMAT_UNSPECIFIED not supported.
38
+ # Immutable. Data format of the output data. DATA_FORMAT_UNSPECIFIED not
39
+ # supported.
38
40
  # @!attribute [r] avro_schema
39
41
  # @return [::Google::Cloud::Bigquery::Storage::V1::AvroSchema]
40
42
  # Output only. Avro schema.
@@ -47,7 +49,8 @@ module Google
47
49
  # `projects/{project_id}/datasets/{dataset_id}/tables/{table_id}`
48
50
  # @!attribute [rw] table_modifiers
49
51
  # @return [::Google::Cloud::Bigquery::Storage::V1::ReadSession::TableModifiers]
50
- # Optional. Any modifiers which are applied when reading from the specified table.
52
+ # Optional. Any modifiers which are applied when reading from the specified
53
+ # table.
51
54
  # @!attribute [rw] read_options
52
55
  # @return [::Google::Cloud::Bigquery::Storage::V1::ReadSession::TableReadOptions]
53
56
  # Optional. Read options for this session (e.g. column selection, filters).
@@ -64,10 +67,15 @@ module Google
64
67
  # Output only. An estimate on the number of bytes this session will scan when
65
68
  # all streams are completely consumed. This estimate is based on
66
69
  # metadata from the table which might be incomplete or stale.
70
+ # @!attribute [r] estimated_row_count
71
+ # @return [::Integer]
72
+ # Output only. An estimate on the number of rows present in this session's
73
+ # streams. This estimate is based on metadata from the table which might be
74
+ # incomplete or stale.
67
75
  # @!attribute [rw] trace_id
68
76
  # @return [::String]
69
- # Optional. ID set by client to annotate a session identity. This does not need
70
- # to be strictly unique, but instead the same ID should be used to group
77
+ # Optional. ID set by client to annotate a session identity. This does not
78
+ # need to be strictly unique, but instead the same ID should be used to group
71
79
  # logically connected sessions (e.g. All using the same ID for all sessions
72
80
  # needed to complete a Spark SQL query is reasonable).
73
81
  #
@@ -181,8 +189,8 @@ module Google
181
189
  # Immutable. Type of the stream.
182
190
  # @!attribute [r] create_time
183
191
  # @return [::Google::Protobuf::Timestamp]
184
- # Output only. Create time of the stream. For the _default stream, this is the
185
- # creation_time of the table.
192
+ # Output only. Create time of the stream. For the _default stream, this is
193
+ # the creation_time of the table.
186
194
  # @!attribute [r] commit_time
187
195
  # @return [::Google::Protobuf::Timestamp]
188
196
  # Output only. Commit time of the stream.
@@ -47,7 +47,8 @@ module Google
47
47
  # Optional. The field mode. The default value is NULLABLE.
48
48
  # @!attribute [rw] fields
49
49
  # @return [::Array<::Google::Cloud::Bigquery::Storage::V1::TableFieldSchema>]
50
- # Optional. Describes the nested schema fields if the type property is set to STRUCT.
50
+ # Optional. Describes the nested schema fields if the type property is set to
51
+ # STRUCT.
51
52
  # @!attribute [rw] description
52
53
  # @return [::String]
53
54
  # Optional. The field description. The maximum length is 1,024 characters.
@@ -102,6 +103,10 @@ module Google
102
103
  # @!attribute [rw] scale
103
104
  # @return [::Integer]
104
105
  # Optional. See documentation for precision.
106
+ # @!attribute [rw] default_value_expression
107
+ # @return [::String]
108
+ # Optional. A SQL expression to specify the [default value]
109
+ # (https://cloud.google.com/bigquery/docs/default-values) for this field.
105
110
  class TableFieldSchema
106
111
  include ::Google::Protobuf::MessageExts
107
112
  extend ::Google::Protobuf::MessageExts::ClassMethods
@@ -28,12 +28,14 @@ module Google
28
28
  # [API Design Guide](https://cloud.google.com/apis/design/errors).
29
29
  # @!attribute [rw] code
30
30
  # @return [::Integer]
31
- # The status code, which should be an enum value of [google.rpc.Code][google.rpc.Code].
31
+ # The status code, which should be an enum value of
32
+ # [google.rpc.Code][google.rpc.Code].
32
33
  # @!attribute [rw] message
33
34
  # @return [::String]
34
35
  # A developer-facing error message, which should be in English. Any
35
36
  # user-facing error message should be localized and sent in the
36
- # {::Google::Rpc::Status#details google.rpc.Status.details} field, or localized by the client.
37
+ # {::Google::Rpc::Status#details google.rpc.Status.details} field, or localized
38
+ # by the client.
37
39
  # @!attribute [rw] details
38
40
  # @return [::Array<::Google::Protobuf::Any>]
39
41
  # A list of messages that carry the error details. There is a common set of
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: google-cloud-bigquery-storage-v1
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.16.0
4
+ version: 0.18.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Google LLC
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2022-11-16 00:00:00.000000000 Z
11
+ date: 2023-02-13 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: gapic-common
@@ -16,7 +16,7 @@ dependencies:
16
16
  requirements:
17
17
  - - ">="
18
18
  - !ruby/object:Gem::Version
19
- version: '0.12'
19
+ version: 0.16.0
20
20
  - - "<"
21
21
  - !ruby/object:Gem::Version
22
22
  version: 2.a
@@ -26,7 +26,7 @@ dependencies:
26
26
  requirements:
27
27
  - - ">="
28
28
  - !ruby/object:Gem::Version
29
- version: '0.12'
29
+ version: 0.16.0
30
30
  - - "<"
31
31
  - !ruby/object:Gem::Version
32
32
  version: 2.a
@@ -223,7 +223,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
223
223
  - !ruby/object:Gem::Version
224
224
  version: '0'
225
225
  requirements: []
226
- rubygems_version: 3.3.14
226
+ rubygems_version: 3.4.2
227
227
  signing_key:
228
228
  specification_version: 4
229
229
  summary: API Client library for the BigQuery Storage V1 API