google-cloud-bigquery-storage-v1 0.1.3 → 0.1.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/AUTHENTICATION.md +6 -6
- data/README.md +1 -1
- data/lib/google/cloud/bigquery/storage/v1.rb +1 -1
- data/lib/google/cloud/bigquery/storage/v1/big_query_read.rb +1 -1
- data/lib/google/cloud/bigquery/storage/v1/big_query_read/client.rb +88 -88
- data/lib/google/cloud/bigquery/storage/v1/big_query_read/credentials.rb +1 -1
- data/lib/google/cloud/bigquery/storage/v1/big_query_read/paths.rb +11 -11
- data/lib/google/cloud/bigquery/storage/v1/version.rb +1 -1
- data/proto_docs/google/api/resource.rb +12 -12
- data/proto_docs/google/cloud/bigquery/storage/v1/arrow.rb +7 -7
- data/proto_docs/google/cloud/bigquery/storage/v1/avro.rb +7 -7
- data/proto_docs/google/cloud/bigquery/storage/v1/storage.rb +34 -34
- data/proto_docs/google/cloud/bigquery/storage/v1/stream.rb +21 -21
- data/proto_docs/google/protobuf/timestamp.rb +4 -4
- metadata +2 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 7b84c403757e584333803750cbf49460fb123cb8b70627d85992a93fe0f4e095
|
4
|
+
data.tar.gz: 7788fa7a7327e87e6f1d7f4b945c52521fd2be7d1b32f2f069790e09ba9e7a05
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 3425cabdb828f4e27190b279ebd5dd7b49c468299468eb845f8ac1bc4b91430acaee5743a8209bf40f83cd88c67ec43b15df631dadc72aef8d9ac611dadee4fa
|
7
|
+
data.tar.gz: 6c0cf67ab16da69dc2aa8fceb58acbc50480fa3037fc6e11bb6780c76d56936c2fb7144f654554710250a7bad06a6770fe10a9d99c7560bf6fd1f2c2eededa2b
|
data/AUTHENTICATION.md
CHANGED
@@ -27,7 +27,7 @@ export BIGQUERY_STORAGE_CREDENTIALS=path/to/keyfile.json
|
|
27
27
|
```ruby
|
28
28
|
require "google/cloud/bigquery/storage/v1"
|
29
29
|
|
30
|
-
client = Google::Cloud::Bigquery::Storage::V1::BigQueryRead::Client.new
|
30
|
+
client = ::Google::Cloud::Bigquery::Storage::V1::BigQueryRead::Client.new
|
31
31
|
```
|
32
32
|
|
33
33
|
## Credential Lookup
|
@@ -64,7 +64,7 @@ containers where writing files is difficult or not encouraged.
|
|
64
64
|
|
65
65
|
The environment variables that google-cloud-bigquery-storage-v1
|
66
66
|
checks for credentials are configured on the service Credentials class (such as
|
67
|
-
{Google::Cloud::Bigquery::Storage::V1::BigQueryRead::Credentials}):
|
67
|
+
{::Google::Cloud::Bigquery::Storage::V1::BigQueryRead::Credentials}):
|
68
68
|
|
69
69
|
1. `BIGQUERY_STORAGE_CREDENTIALS` - Path to JSON file, or JSON contents
|
70
70
|
2. `BIGQUERY_STORAGE_KEYFILE` - Path to JSON file, or JSON contents
|
@@ -77,7 +77,7 @@ require "google/cloud/bigquery/storage/v1"
|
|
77
77
|
|
78
78
|
ENV["BIGQUERY_STORAGE_CREDENTIALS"] = "path/to/keyfile.json"
|
79
79
|
|
80
|
-
client = Google::Cloud::Bigquery::Storage::V1::BigQueryRead::Client.new
|
80
|
+
client = ::Google::Cloud::Bigquery::Storage::V1::BigQueryRead::Client.new
|
81
81
|
```
|
82
82
|
|
83
83
|
### Configuration
|
@@ -88,7 +88,7 @@ environment variables. Either on an individual client initialization:
|
|
88
88
|
```ruby
|
89
89
|
require "google/cloud/bigquery/storage/v1"
|
90
90
|
|
91
|
-
client = Google::Cloud::Bigquery::Storage::V1::BigQueryRead::Client.new do |config|
|
91
|
+
client = ::Google::Cloud::Bigquery::Storage::V1::BigQueryRead::Client.new do |config|
|
92
92
|
config.credentials = "path/to/keyfile.json"
|
93
93
|
end
|
94
94
|
```
|
@@ -98,11 +98,11 @@ Or configured globally for all clients:
|
|
98
98
|
```ruby
|
99
99
|
require "google/cloud/bigquery/storage/v1"
|
100
100
|
|
101
|
-
Google::Cloud::Bigquery::Storage::V1::BigQueryRead::Client.configure do |config|
|
101
|
+
::Google::Cloud::Bigquery::Storage::V1::BigQueryRead::Client.configure do |config|
|
102
102
|
config.credentials = "path/to/keyfile.json"
|
103
103
|
end
|
104
104
|
|
105
|
-
client = Google::Cloud::Bigquery::Storage::V1::BigQueryRead::Client.new
|
105
|
+
client = ::Google::Cloud::Bigquery::Storage::V1::BigQueryRead::Client.new
|
106
106
|
```
|
107
107
|
|
108
108
|
### Cloud SDK
|
data/README.md
CHANGED
@@ -25,7 +25,7 @@ In order to use this library, you first need to go through the following steps:
|
|
25
25
|
```ruby
|
26
26
|
require "google/cloud/bigquery/storage/v1"
|
27
27
|
|
28
|
-
client = Google::Cloud::Bigquery::Storage::V1::BigQueryRead::Client.new
|
28
|
+
client = ::Google::Cloud::Bigquery::Storage::V1::BigQueryRead::Client.new
|
29
29
|
request = my_create_request
|
30
30
|
response = client.create_read_session request
|
31
31
|
```
|
@@ -27,7 +27,7 @@ module Google
|
|
27
27
|
# To load this package, including all its services, and instantiate a client:
|
28
28
|
#
|
29
29
|
# require "google/cloud/bigquery/storage/v1"
|
30
|
-
# client = Google::Cloud::Bigquery::Storage::V1::BigQueryRead::Client.new
|
30
|
+
# client = ::Google::Cloud::Bigquery::Storage::V1::BigQueryRead::Client.new
|
31
31
|
#
|
32
32
|
module V1
|
33
33
|
end
|
@@ -39,7 +39,7 @@ module Google
|
|
39
39
|
# To load this service and instantiate a client:
|
40
40
|
#
|
41
41
|
# require "google/cloud/bigquery/storage/v1/big_query_read"
|
42
|
-
# client = Google::Cloud::Bigquery::Storage::V1::BigQueryRead::Client.new
|
42
|
+
# client = ::Google::Cloud::Bigquery::Storage::V1::BigQueryRead::Client.new
|
43
43
|
#
|
44
44
|
module BigQueryRead
|
45
45
|
end
|
@@ -41,15 +41,15 @@ module Google
|
|
41
41
|
##
|
42
42
|
# Configure the BigQueryRead Client class.
|
43
43
|
#
|
44
|
-
# See {Google::Cloud::Bigquery::Storage::V1::BigQueryRead::Client::Configuration}
|
44
|
+
# See {::Google::Cloud::Bigquery::Storage::V1::BigQueryRead::Client::Configuration}
|
45
45
|
# for a description of the configuration fields.
|
46
46
|
#
|
47
47
|
# ## Example
|
48
48
|
#
|
49
49
|
# To modify the configuration for all BigQueryRead clients:
|
50
50
|
#
|
51
|
-
# Google::Cloud::Bigquery::Storage::V1::BigQueryRead::Client.configure do |config|
|
52
|
-
# config.timeout =
|
51
|
+
# ::Google::Cloud::Bigquery::Storage::V1::BigQueryRead::Client.configure do |config|
|
52
|
+
# config.timeout = 10.0
|
53
53
|
# end
|
54
54
|
#
|
55
55
|
# @yield [config] Configure the Client client.
|
@@ -105,7 +105,7 @@ module Google
|
|
105
105
|
# but structural changes (adding new fields, etc.) are not allowed. Structural changes
|
106
106
|
# should be made on {Client.configure}.
|
107
107
|
#
|
108
|
-
# See {Google::Cloud::Bigquery::Storage::V1::BigQueryRead::Client::Configuration}
|
108
|
+
# See {::Google::Cloud::Bigquery::Storage::V1::BigQueryRead::Client::Configuration}
|
109
109
|
# for a description of the configuration fields.
|
110
110
|
#
|
111
111
|
# @yield [config] Configure the Client client.
|
@@ -126,13 +126,13 @@ module Google
|
|
126
126
|
# To create a new BigQueryRead client with the default
|
127
127
|
# configuration:
|
128
128
|
#
|
129
|
-
# client = Google::Cloud::Bigquery::Storage::V1::BigQueryRead::Client.new
|
129
|
+
# client = ::Google::Cloud::Bigquery::Storage::V1::BigQueryRead::Client.new
|
130
130
|
#
|
131
131
|
# To create a new BigQueryRead client with a custom
|
132
132
|
# configuration:
|
133
133
|
#
|
134
|
-
# client = Google::Cloud::Bigquery::Storage::V1::BigQueryRead::Client.new do |config|
|
135
|
-
# config.timeout =
|
134
|
+
# client = ::Google::Cloud::Bigquery::Storage::V1::BigQueryRead::Client.new do |config|
|
135
|
+
# config.timeout = 10.0
|
136
136
|
# end
|
137
137
|
#
|
138
138
|
# @yield [config] Configure the BigQueryRead client.
|
@@ -159,8 +159,8 @@ module Google
|
|
159
159
|
end
|
160
160
|
@quota_project_id = credentials.respond_to?(:quota_project_id) ? credentials.quota_project_id : nil
|
161
161
|
|
162
|
-
@big_query_read_stub = Gapic::ServiceStub.new(
|
163
|
-
Google::Cloud::Bigquery::Storage::V1::BigQueryRead::Stub,
|
162
|
+
@big_query_read_stub = ::Gapic::ServiceStub.new(
|
163
|
+
::Google::Cloud::Bigquery::Storage::V1::BigQueryRead::Stub,
|
164
164
|
credentials: credentials,
|
165
165
|
endpoint: @config.endpoint,
|
166
166
|
channel_args: @config.channel_args,
|
@@ -193,12 +193,12 @@ module Google
|
|
193
193
|
#
|
194
194
|
# @overload create_read_session(request, options = nil)
|
195
195
|
# Pass arguments to `create_read_session` via a request object, either of type
|
196
|
-
# {Google::Cloud::Bigquery::Storage::V1::CreateReadSessionRequest} or an equivalent Hash.
|
196
|
+
# {::Google::Cloud::Bigquery::Storage::V1::CreateReadSessionRequest} or an equivalent Hash.
|
197
197
|
#
|
198
|
-
# @param request [Google::Cloud::Bigquery::Storage::V1::CreateReadSessionRequest, Hash]
|
198
|
+
# @param request [::Google::Cloud::Bigquery::Storage::V1::CreateReadSessionRequest, ::Hash]
|
199
199
|
# A request object representing the call parameters. Required. To specify no
|
200
200
|
# parameters, or to keep all the default parameter values, pass an empty Hash.
|
201
|
-
# @param options [Gapic::CallOptions, Hash]
|
201
|
+
# @param options [::Gapic::CallOptions, ::Hash]
|
202
202
|
# Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
|
203
203
|
#
|
204
204
|
# @overload create_read_session(parent: nil, read_session: nil, max_stream_count: nil)
|
@@ -206,12 +206,12 @@ module Google
|
|
206
206
|
# least one keyword argument is required. To specify no parameters, or to keep all
|
207
207
|
# the default parameter values, pass an empty Hash as a request object (see above).
|
208
208
|
#
|
209
|
-
# @param parent [String]
|
209
|
+
# @param parent [::String]
|
210
210
|
# Required. The request project that owns the session, in the form of
|
211
211
|
# `projects/{project_id}`.
|
212
|
-
# @param read_session [Google::Cloud::Bigquery::Storage::V1::ReadSession, Hash]
|
212
|
+
# @param read_session [::Google::Cloud::Bigquery::Storage::V1::ReadSession, ::Hash]
|
213
213
|
# Required. Session to be created.
|
214
|
-
# @param max_stream_count [Integer]
|
214
|
+
# @param max_stream_count [::Integer]
|
215
215
|
# Max initial number of streams. If unset or zero, the server will
|
216
216
|
# provide a value of streams so as to produce reasonable throughput. Must be
|
217
217
|
# non-negative. The number of streams may be lower than the requested number,
|
@@ -222,26 +222,26 @@ module Google
|
|
222
222
|
# Streams must be read starting from offset 0.
|
223
223
|
#
|
224
224
|
# @yield [response, operation] Access the result along with the RPC operation
|
225
|
-
# @yieldparam response [Google::Cloud::Bigquery::Storage::V1::ReadSession]
|
226
|
-
# @yieldparam operation [GRPC::ActiveCall::Operation]
|
225
|
+
# @yieldparam response [::Google::Cloud::Bigquery::Storage::V1::ReadSession]
|
226
|
+
# @yieldparam operation [::GRPC::ActiveCall::Operation]
|
227
227
|
#
|
228
|
-
# @return [Google::Cloud::Bigquery::Storage::V1::ReadSession]
|
228
|
+
# @return [::Google::Cloud::Bigquery::Storage::V1::ReadSession]
|
229
229
|
#
|
230
|
-
# @raise [Google::Cloud::Error] if the RPC is aborted.
|
230
|
+
# @raise [::Google::Cloud::Error] if the RPC is aborted.
|
231
231
|
#
|
232
232
|
def create_read_session request, options = nil
|
233
|
-
raise ArgumentError, "request must be provided" if request.nil?
|
233
|
+
raise ::ArgumentError, "request must be provided" if request.nil?
|
234
234
|
|
235
|
-
request = Gapic::Protobuf.coerce request, to: Google::Cloud::Bigquery::Storage::V1::CreateReadSessionRequest
|
235
|
+
request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::Storage::V1::CreateReadSessionRequest
|
236
236
|
|
237
237
|
# Converts hash and nil to an options object
|
238
|
-
options = Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
|
238
|
+
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
|
239
239
|
|
240
240
|
# Customize the options with defaults
|
241
241
|
metadata = @config.rpcs.create_read_session.metadata.to_h
|
242
242
|
|
243
243
|
# Set x-goog-api-client and x-goog-user-project headers
|
244
|
-
metadata[:"x-goog-api-client"] ||= Gapic::Headers.x_goog_api_client \
|
244
|
+
metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
|
245
245
|
lib_name: @config.lib_name, lib_version: @config.lib_version,
|
246
246
|
gapic_version: ::Google::Cloud::Bigquery::Storage::V1::VERSION
|
247
247
|
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
|
@@ -262,8 +262,8 @@ module Google
|
|
262
262
|
yield response, operation if block_given?
|
263
263
|
return response
|
264
264
|
end
|
265
|
-
rescue GRPC::BadStatus => e
|
266
|
-
raise Google::Cloud::Error.from_error(e)
|
265
|
+
rescue ::GRPC::BadStatus => e
|
266
|
+
raise ::Google::Cloud::Error.from_error(e)
|
267
267
|
end
|
268
268
|
|
269
269
|
##
|
@@ -277,12 +277,12 @@ module Google
|
|
277
277
|
#
|
278
278
|
# @overload read_rows(request, options = nil)
|
279
279
|
# Pass arguments to `read_rows` via a request object, either of type
|
280
|
-
# {Google::Cloud::Bigquery::Storage::V1::ReadRowsRequest} or an equivalent Hash.
|
280
|
+
# {::Google::Cloud::Bigquery::Storage::V1::ReadRowsRequest} or an equivalent Hash.
|
281
281
|
#
|
282
|
-
# @param request [Google::Cloud::Bigquery::Storage::V1::ReadRowsRequest, Hash]
|
282
|
+
# @param request [::Google::Cloud::Bigquery::Storage::V1::ReadRowsRequest, ::Hash]
|
283
283
|
# A request object representing the call parameters. Required. To specify no
|
284
284
|
# parameters, or to keep all the default parameter values, pass an empty Hash.
|
285
|
-
# @param options [Gapic::CallOptions, Hash]
|
285
|
+
# @param options [::Gapic::CallOptions, ::Hash]
|
286
286
|
# Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
|
287
287
|
#
|
288
288
|
# @overload read_rows(read_stream: nil, offset: nil)
|
@@ -290,34 +290,34 @@ module Google
|
|
290
290
|
# least one keyword argument is required. To specify no parameters, or to keep all
|
291
291
|
# the default parameter values, pass an empty Hash as a request object (see above).
|
292
292
|
#
|
293
|
-
# @param read_stream [String]
|
293
|
+
# @param read_stream [::String]
|
294
294
|
# Required. Stream to read rows from.
|
295
|
-
# @param offset [Integer]
|
295
|
+
# @param offset [::Integer]
|
296
296
|
# The offset requested must be less than the last row read from Read.
|
297
297
|
# Requesting a larger offset is undefined. If not specified, start reading
|
298
298
|
# from offset zero.
|
299
299
|
#
|
300
300
|
# @yield [response, operation] Access the result along with the RPC operation
|
301
|
-
# @yieldparam response [Enumerable
|
302
|
-
# @yieldparam operation [GRPC::ActiveCall::Operation]
|
301
|
+
# @yieldparam response [::Enumerable<::Google::Cloud::Bigquery::Storage::V1::ReadRowsResponse>]
|
302
|
+
# @yieldparam operation [::GRPC::ActiveCall::Operation]
|
303
303
|
#
|
304
|
-
# @return [Enumerable
|
304
|
+
# @return [::Enumerable<::Google::Cloud::Bigquery::Storage::V1::ReadRowsResponse>]
|
305
305
|
#
|
306
|
-
# @raise [Google::Cloud::Error] if the RPC is aborted.
|
306
|
+
# @raise [::Google::Cloud::Error] if the RPC is aborted.
|
307
307
|
#
|
308
308
|
def read_rows request, options = nil
|
309
|
-
raise ArgumentError, "request must be provided" if request.nil?
|
309
|
+
raise ::ArgumentError, "request must be provided" if request.nil?
|
310
310
|
|
311
|
-
request = Gapic::Protobuf.coerce request, to: Google::Cloud::Bigquery::Storage::V1::ReadRowsRequest
|
311
|
+
request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::Storage::V1::ReadRowsRequest
|
312
312
|
|
313
313
|
# Converts hash and nil to an options object
|
314
|
-
options = Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
|
314
|
+
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
|
315
315
|
|
316
316
|
# Customize the options with defaults
|
317
317
|
metadata = @config.rpcs.read_rows.metadata.to_h
|
318
318
|
|
319
319
|
# Set x-goog-api-client and x-goog-user-project headers
|
320
|
-
metadata[:"x-goog-api-client"] ||= Gapic::Headers.x_goog_api_client \
|
320
|
+
metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
|
321
321
|
lib_name: @config.lib_name, lib_version: @config.lib_version,
|
322
322
|
gapic_version: ::Google::Cloud::Bigquery::Storage::V1::VERSION
|
323
323
|
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
|
@@ -338,8 +338,8 @@ module Google
|
|
338
338
|
yield response, operation if block_given?
|
339
339
|
return response
|
340
340
|
end
|
341
|
-
rescue GRPC::BadStatus => e
|
342
|
-
raise Google::Cloud::Error.from_error(e)
|
341
|
+
rescue ::GRPC::BadStatus => e
|
342
|
+
raise ::Google::Cloud::Error.from_error(e)
|
343
343
|
end
|
344
344
|
|
345
345
|
##
|
@@ -358,12 +358,12 @@ module Google
|
|
358
358
|
#
|
359
359
|
# @overload split_read_stream(request, options = nil)
|
360
360
|
# Pass arguments to `split_read_stream` via a request object, either of type
|
361
|
-
# {Google::Cloud::Bigquery::Storage::V1::SplitReadStreamRequest} or an equivalent Hash.
|
361
|
+
# {::Google::Cloud::Bigquery::Storage::V1::SplitReadStreamRequest} or an equivalent Hash.
|
362
362
|
#
|
363
|
-
# @param request [Google::Cloud::Bigquery::Storage::V1::SplitReadStreamRequest, Hash]
|
363
|
+
# @param request [::Google::Cloud::Bigquery::Storage::V1::SplitReadStreamRequest, ::Hash]
|
364
364
|
# A request object representing the call parameters. Required. To specify no
|
365
365
|
# parameters, or to keep all the default parameter values, pass an empty Hash.
|
366
|
-
# @param options [Gapic::CallOptions, Hash]
|
366
|
+
# @param options [::Gapic::CallOptions, ::Hash]
|
367
367
|
# Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
|
368
368
|
#
|
369
369
|
# @overload split_read_stream(name: nil, fraction: nil)
|
@@ -371,9 +371,9 @@ module Google
|
|
371
371
|
# least one keyword argument is required. To specify no parameters, or to keep all
|
372
372
|
# the default parameter values, pass an empty Hash as a request object (see above).
|
373
373
|
#
|
374
|
-
# @param name [String]
|
374
|
+
# @param name [::String]
|
375
375
|
# Required. Name of the stream to split.
|
376
|
-
# @param fraction [Float]
|
376
|
+
# @param fraction [::Float]
|
377
377
|
# A value in the range (0.0, 1.0) that specifies the fractional point at
|
378
378
|
# which the original stream should be split. The actual split point is
|
379
379
|
# evaluated on pre-filtered rows, so if a filter is provided, then there is
|
@@ -383,26 +383,26 @@ module Google
|
|
383
383
|
# will always map to a data storage boundary on the server side.
|
384
384
|
#
|
385
385
|
# @yield [response, operation] Access the result along with the RPC operation
|
386
|
-
# @yieldparam response [Google::Cloud::Bigquery::Storage::V1::SplitReadStreamResponse]
|
387
|
-
# @yieldparam operation [GRPC::ActiveCall::Operation]
|
386
|
+
# @yieldparam response [::Google::Cloud::Bigquery::Storage::V1::SplitReadStreamResponse]
|
387
|
+
# @yieldparam operation [::GRPC::ActiveCall::Operation]
|
388
388
|
#
|
389
|
-
# @return [Google::Cloud::Bigquery::Storage::V1::SplitReadStreamResponse]
|
389
|
+
# @return [::Google::Cloud::Bigquery::Storage::V1::SplitReadStreamResponse]
|
390
390
|
#
|
391
|
-
# @raise [Google::Cloud::Error] if the RPC is aborted.
|
391
|
+
# @raise [::Google::Cloud::Error] if the RPC is aborted.
|
392
392
|
#
|
393
393
|
def split_read_stream request, options = nil
|
394
|
-
raise ArgumentError, "request must be provided" if request.nil?
|
394
|
+
raise ::ArgumentError, "request must be provided" if request.nil?
|
395
395
|
|
396
|
-
request = Gapic::Protobuf.coerce request, to: Google::Cloud::Bigquery::Storage::V1::SplitReadStreamRequest
|
396
|
+
request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::Storage::V1::SplitReadStreamRequest
|
397
397
|
|
398
398
|
# Converts hash and nil to an options object
|
399
|
-
options = Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
|
399
|
+
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
|
400
400
|
|
401
401
|
# Customize the options with defaults
|
402
402
|
metadata = @config.rpcs.split_read_stream.metadata.to_h
|
403
403
|
|
404
404
|
# Set x-goog-api-client and x-goog-user-project headers
|
405
|
-
metadata[:"x-goog-api-client"] ||= Gapic::Headers.x_goog_api_client \
|
405
|
+
metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
|
406
406
|
lib_name: @config.lib_name, lib_version: @config.lib_version,
|
407
407
|
gapic_version: ::Google::Cloud::Bigquery::Storage::V1::VERSION
|
408
408
|
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
|
@@ -423,8 +423,8 @@ module Google
|
|
423
423
|
yield response, operation if block_given?
|
424
424
|
return response
|
425
425
|
end
|
426
|
-
rescue GRPC::BadStatus => e
|
427
|
-
raise Google::Cloud::Error.from_error(e)
|
426
|
+
rescue ::GRPC::BadStatus => e
|
427
|
+
raise ::Google::Cloud::Error.from_error(e)
|
428
428
|
end
|
429
429
|
|
430
430
|
##
|
@@ -434,7 +434,7 @@ module Google
|
|
434
434
|
# providing control over timeouts, retry behavior, logging, transport
|
435
435
|
# parameters, and other low-level controls. Certain parameters can also be
|
436
436
|
# applied individually to specific RPCs. See
|
437
|
-
# {Google::Cloud::Bigquery::Storage::V1::BigQueryRead::Client::Configuration::Rpcs}
|
437
|
+
# {::Google::Cloud::Bigquery::Storage::V1::BigQueryRead::Client::Configuration::Rpcs}
|
438
438
|
# for a list of RPCs that can be configured independently.
|
439
439
|
#
|
440
440
|
# Configuration can be applied globally to all clients, or to a single client
|
@@ -445,22 +445,22 @@ module Google
|
|
445
445
|
# To modify the global config, setting the timeout for create_read_session
|
446
446
|
# to 20 seconds, and all remaining timeouts to 10 seconds:
|
447
447
|
#
|
448
|
-
# Google::Cloud::Bigquery::Storage::V1::BigQueryRead::Client.configure do |config|
|
449
|
-
# config.timeout =
|
450
|
-
# config.rpcs.create_read_session.timeout =
|
448
|
+
# ::Google::Cloud::Bigquery::Storage::V1::BigQueryRead::Client.configure do |config|
|
449
|
+
# config.timeout = 10.0
|
450
|
+
# config.rpcs.create_read_session.timeout = 20.0
|
451
451
|
# end
|
452
452
|
#
|
453
453
|
# To apply the above configuration only to a new client:
|
454
454
|
#
|
455
|
-
# client = Google::Cloud::Bigquery::Storage::V1::BigQueryRead::Client.new do |config|
|
456
|
-
# config.timeout =
|
457
|
-
# config.rpcs.create_read_session.timeout =
|
455
|
+
# client = ::Google::Cloud::Bigquery::Storage::V1::BigQueryRead::Client.new do |config|
|
456
|
+
# config.timeout = 10.0
|
457
|
+
# config.rpcs.create_read_session.timeout = 20.0
|
458
458
|
# end
|
459
459
|
#
|
460
460
|
# @!attribute [rw] endpoint
|
461
461
|
# The hostname or hostname:port of the service endpoint.
|
462
462
|
# Defaults to `"bigquerystorage.googleapis.com"`.
|
463
|
-
# @return [String]
|
463
|
+
# @return [::String]
|
464
464
|
# @!attribute [rw] credentials
|
465
465
|
# Credentials to send with calls. You may provide any of the following types:
|
466
466
|
# * (`String`) The path to a service account key file in JSON format
|
@@ -472,29 +472,29 @@ module Google
|
|
472
472
|
# * (`GRPC::Core::Channel`) a gRPC channel with included credentials
|
473
473
|
# * (`GRPC::Core::ChannelCredentials`) a gRPC credentails object
|
474
474
|
# * (`nil`) indicating no credentials
|
475
|
-
# @return [Object]
|
475
|
+
# @return [::Object]
|
476
476
|
# @!attribute [rw] scope
|
477
477
|
# The OAuth scopes
|
478
|
-
# @return [Array
|
478
|
+
# @return [::Array<::String>]
|
479
479
|
# @!attribute [rw] lib_name
|
480
480
|
# The library name as recorded in instrumentation and logging
|
481
|
-
# @return [String]
|
481
|
+
# @return [::String]
|
482
482
|
# @!attribute [rw] lib_version
|
483
483
|
# The library version as recorded in instrumentation and logging
|
484
|
-
# @return [String]
|
484
|
+
# @return [::String]
|
485
485
|
# @!attribute [rw] channel_args
|
486
486
|
# Extra parameters passed to the gRPC channel. Note: this is ignored if a
|
487
487
|
# `GRPC::Core::Channel` object is provided as the credential.
|
488
|
-
# @return [Hash]
|
488
|
+
# @return [::Hash]
|
489
489
|
# @!attribute [rw] interceptors
|
490
490
|
# An array of interceptors that are run before calls are executed.
|
491
|
-
# @return [Array
|
491
|
+
# @return [::Array<::GRPC::ClientInterceptor>]
|
492
492
|
# @!attribute [rw] timeout
|
493
|
-
# The call timeout in
|
494
|
-
# @return [Numeric]
|
493
|
+
# The call timeout in seconds.
|
494
|
+
# @return [::Numeric]
|
495
495
|
# @!attribute [rw] metadata
|
496
496
|
# Additional gRPC headers to be sent with the call.
|
497
|
-
# @return [Hash{Symbol
|
497
|
+
# @return [::Hash{::Symbol=>::String}]
|
498
498
|
# @!attribute [rw] retry_policy
|
499
499
|
# The retry policy. The value is a hash with the following keys:
|
500
500
|
# * `:initial_delay` (*type:* `Numeric`) - The initial delay in seconds.
|
@@ -502,10 +502,10 @@ module Google
|
|
502
502
|
# * `:multiplier` (*type:* `Numeric`) - The incremental backoff multiplier.
|
503
503
|
# * `:retry_codes` (*type:* `Array<String>`) - The error codes that should
|
504
504
|
# trigger a retry.
|
505
|
-
# @return [Hash]
|
505
|
+
# @return [::Hash]
|
506
506
|
#
|
507
507
|
class Configuration
|
508
|
-
extend Gapic::Config
|
508
|
+
extend ::Gapic::Config
|
509
509
|
|
510
510
|
config_attr :endpoint, "bigquerystorage.googleapis.com", String
|
511
511
|
config_attr :credentials, nil do |value|
|
@@ -513,14 +513,14 @@ module Google
|
|
513
513
|
allowed += [::GRPC::Core::Channel, ::GRPC::Core::ChannelCredentials] if defined? ::GRPC
|
514
514
|
allowed.any? { |klass| klass === value }
|
515
515
|
end
|
516
|
-
config_attr :scope, nil, String, Array, nil
|
517
|
-
config_attr :lib_name, nil, String, nil
|
518
|
-
config_attr :lib_version, nil, String, nil
|
519
|
-
config_attr(:channel_args, { "grpc.service_config_disable_resolution"=>1 }, Hash, nil)
|
520
|
-
config_attr :interceptors, nil, Array, nil
|
521
|
-
config_attr :timeout, nil, Numeric, nil
|
522
|
-
config_attr :metadata, nil, Hash, nil
|
523
|
-
config_attr :retry_policy, nil, Hash, Proc, nil
|
516
|
+
config_attr :scope, nil, ::String, ::Array, nil
|
517
|
+
config_attr :lib_name, nil, ::String, nil
|
518
|
+
config_attr :lib_version, nil, ::String, nil
|
519
|
+
config_attr(:channel_args, { "grpc.service_config_disable_resolution"=>1 }, ::Hash, nil)
|
520
|
+
config_attr :interceptors, nil, ::Array, nil
|
521
|
+
config_attr :timeout, nil, ::Numeric, nil
|
522
|
+
config_attr :metadata, nil, ::Hash, nil
|
523
|
+
config_attr :retry_policy, nil, ::Hash, Proc, nil
|
524
524
|
|
525
525
|
# @private
|
526
526
|
def initialize parent_config = nil
|
@@ -561,28 +561,28 @@ module Google
|
|
561
561
|
class Rpcs
|
562
562
|
##
|
563
563
|
# RPC-specific configuration for `create_read_session`
|
564
|
-
# @return [Gapic::Config::Method]
|
564
|
+
# @return [::Gapic::Config::Method]
|
565
565
|
#
|
566
566
|
attr_reader :create_read_session
|
567
567
|
##
|
568
568
|
# RPC-specific configuration for `read_rows`
|
569
|
-
# @return [Gapic::Config::Method]
|
569
|
+
# @return [::Gapic::Config::Method]
|
570
570
|
#
|
571
571
|
attr_reader :read_rows
|
572
572
|
##
|
573
573
|
# RPC-specific configuration for `split_read_stream`
|
574
|
-
# @return [Gapic::Config::Method]
|
574
|
+
# @return [::Gapic::Config::Method]
|
575
575
|
#
|
576
576
|
attr_reader :split_read_stream
|
577
577
|
|
578
578
|
# @private
|
579
579
|
def initialize parent_rpcs = nil
|
580
580
|
create_read_session_config = parent_rpcs&.create_read_session if parent_rpcs&.respond_to? :create_read_session
|
581
|
-
@create_read_session = Gapic::Config::Method.new create_read_session_config
|
581
|
+
@create_read_session = ::Gapic::Config::Method.new create_read_session_config
|
582
582
|
read_rows_config = parent_rpcs&.read_rows if parent_rpcs&.respond_to? :read_rows
|
583
|
-
@read_rows = Gapic::Config::Method.new read_rows_config
|
583
|
+
@read_rows = ::Gapic::Config::Method.new read_rows_config
|
584
584
|
split_read_stream_config = parent_rpcs&.split_read_stream if parent_rpcs&.respond_to? :split_read_stream
|
585
|
-
@split_read_stream = Gapic::Config::Method.new split_read_stream_config
|
585
|
+
@split_read_stream = ::Gapic::Config::Method.new split_read_stream_config
|
586
586
|
|
587
587
|
yield self if block_given?
|
588
588
|
end
|
@@ -25,7 +25,7 @@ module Google
|
|
25
25
|
module V1
|
26
26
|
module BigQueryRead
|
27
27
|
# Credentials for the BigQueryRead API.
|
28
|
-
class Credentials < Google::Auth::Credentials
|
28
|
+
class Credentials < ::Google::Auth::Credentials
|
29
29
|
self.scope = [
|
30
30
|
"https://www.googleapis.com/auth/bigquery",
|
31
31
|
"https://www.googleapis.com/auth/bigquery.readonly",
|
@@ -34,7 +34,7 @@ module Google
|
|
34
34
|
#
|
35
35
|
# @param project [String]
|
36
36
|
#
|
37
|
-
# @return [String]
|
37
|
+
# @return [::String]
|
38
38
|
def project_path project:
|
39
39
|
"projects/#{project}"
|
40
40
|
end
|
@@ -50,10 +50,10 @@ module Google
|
|
50
50
|
# @param location [String]
|
51
51
|
# @param session [String]
|
52
52
|
#
|
53
|
-
# @return [String]
|
53
|
+
# @return [::String]
|
54
54
|
def read_session_path project:, location:, session:
|
55
|
-
raise ArgumentError, "project cannot contain /" if project.to_s.include? "/"
|
56
|
-
raise ArgumentError, "location cannot contain /" if location.to_s.include? "/"
|
55
|
+
raise ::ArgumentError, "project cannot contain /" if project.to_s.include? "/"
|
56
|
+
raise ::ArgumentError, "location cannot contain /" if location.to_s.include? "/"
|
57
57
|
|
58
58
|
"projects/#{project}/locations/#{location}/sessions/#{session}"
|
59
59
|
end
|
@@ -70,11 +70,11 @@ module Google
|
|
70
70
|
# @param session [String]
|
71
71
|
# @param stream [String]
|
72
72
|
#
|
73
|
-
# @return [String]
|
73
|
+
# @return [::String]
|
74
74
|
def read_stream_path project:, location:, session:, stream:
|
75
|
-
raise ArgumentError, "project cannot contain /" if project.to_s.include? "/"
|
76
|
-
raise ArgumentError, "location cannot contain /" if location.to_s.include? "/"
|
77
|
-
raise ArgumentError, "session cannot contain /" if session.to_s.include? "/"
|
75
|
+
raise ::ArgumentError, "project cannot contain /" if project.to_s.include? "/"
|
76
|
+
raise ::ArgumentError, "location cannot contain /" if location.to_s.include? "/"
|
77
|
+
raise ::ArgumentError, "session cannot contain /" if session.to_s.include? "/"
|
78
78
|
|
79
79
|
"projects/#{project}/locations/#{location}/sessions/#{session}/streams/#{stream}"
|
80
80
|
end
|
@@ -90,10 +90,10 @@ module Google
|
|
90
90
|
# @param dataset [String]
|
91
91
|
# @param table [String]
|
92
92
|
#
|
93
|
-
# @return [String]
|
93
|
+
# @return [::String]
|
94
94
|
def table_path project:, dataset:, table:
|
95
|
-
raise ArgumentError, "project cannot contain /" if project.to_s.include? "/"
|
96
|
-
raise ArgumentError, "dataset cannot contain /" if dataset.to_s.include? "/"
|
95
|
+
raise ::ArgumentError, "project cannot contain /" if project.to_s.include? "/"
|
96
|
+
raise ::ArgumentError, "dataset cannot contain /" if dataset.to_s.include? "/"
|
97
97
|
|
98
98
|
"projects/#{project}/datasets/#{dataset}/tables/#{table}"
|
99
99
|
end
|
@@ -128,7 +128,7 @@ module Google
|
|
128
128
|
# - pattern: "shelves/{shelf}"
|
129
129
|
# parent_type: "cloudresourcemanager.googleapis.com/Folder"
|
130
130
|
# @!attribute [rw] type
|
131
|
-
# @return [String]
|
131
|
+
# @return [::String]
|
132
132
|
# The resource type. It must be in the format of
|
133
133
|
# \\{service_name}/\\{resource_type_kind}. The `resource_type_kind` must be
|
134
134
|
# singular and must not include version numbers.
|
@@ -140,7 +140,7 @@ module Google
|
|
140
140
|
# should use PascalCase (UpperCamelCase). The maximum number of
|
141
141
|
# characters allowed for the `resource_type_kind` is 100.
|
142
142
|
# @!attribute [rw] pattern
|
143
|
-
# @return [Array
|
143
|
+
# @return [::Array<::String>]
|
144
144
|
# Optional. The relative resource name pattern associated with this resource
|
145
145
|
# type. The DNS prefix of the full resource name shouldn't be specified here.
|
146
146
|
#
|
@@ -161,11 +161,11 @@ module Google
|
|
161
161
|
# the same component name (e.g. "project") refers to IDs of the same
|
162
162
|
# type of resource.
|
163
163
|
# @!attribute [rw] name_field
|
164
|
-
# @return [String]
|
164
|
+
# @return [::String]
|
165
165
|
# Optional. The field on the resource that designates the resource name
|
166
166
|
# field. If omitted, this is assumed to be "name".
|
167
167
|
# @!attribute [rw] history
|
168
|
-
# @return [Google::Api::ResourceDescriptor::History]
|
168
|
+
# @return [::Google::Api::ResourceDescriptor::History]
|
169
169
|
# Optional. The historical or future-looking state of the resource pattern.
|
170
170
|
#
|
171
171
|
# Example:
|
@@ -182,19 +182,19 @@ module Google
|
|
182
182
|
# };
|
183
183
|
# }
|
184
184
|
# @!attribute [rw] plural
|
185
|
-
# @return [String]
|
185
|
+
# @return [::String]
|
186
186
|
# The plural name used in the resource name, such as 'projects' for
|
187
187
|
# the name of 'projects/\\{project}'. It is the same concept of the `plural`
|
188
188
|
# field in k8s CRD spec
|
189
189
|
# https://kubernetes.io/docs/tasks/access-kubernetes-api/custom-resources/custom-resource-definitions/
|
190
190
|
# @!attribute [rw] singular
|
191
|
-
# @return [String]
|
191
|
+
# @return [::String]
|
192
192
|
# The same concept of the `singular` field in k8s CRD spec
|
193
193
|
# https://kubernetes.io/docs/tasks/access-kubernetes-api/custom-resources/custom-resource-definitions/
|
194
194
|
# Such as "project" for the `resourcemanager.googleapis.com/Project` type.
|
195
195
|
class ResourceDescriptor
|
196
|
-
include Google::Protobuf::MessageExts
|
197
|
-
extend Google::Protobuf::MessageExts::ClassMethods
|
196
|
+
include ::Google::Protobuf::MessageExts
|
197
|
+
extend ::Google::Protobuf::MessageExts::ClassMethods
|
198
198
|
|
199
199
|
# A description of the historical or future-looking state of the
|
200
200
|
# resource pattern.
|
@@ -216,7 +216,7 @@ module Google
|
|
216
216
|
# Defines a proto annotation that describes a string field that refers to
|
217
217
|
# an API resource.
|
218
218
|
# @!attribute [rw] type
|
219
|
-
# @return [String]
|
219
|
+
# @return [::String]
|
220
220
|
# The resource type that the annotated field references.
|
221
221
|
#
|
222
222
|
# Example:
|
@@ -227,7 +227,7 @@ module Google
|
|
227
227
|
# }];
|
228
228
|
# }
|
229
229
|
# @!attribute [rw] child_type
|
230
|
-
# @return [String]
|
230
|
+
# @return [::String]
|
231
231
|
# The resource type of a child collection that the annotated field
|
232
232
|
# references. This is useful for annotating the `parent` field that
|
233
233
|
# doesn't have a fixed resource type.
|
@@ -240,8 +240,8 @@ module Google
|
|
240
240
|
# };
|
241
241
|
# }
|
242
242
|
class ResourceReference
|
243
|
-
include Google::Protobuf::MessageExts
|
244
|
-
extend Google::Protobuf::MessageExts::ClassMethods
|
243
|
+
include ::Google::Protobuf::MessageExts
|
244
|
+
extend ::Google::Protobuf::MessageExts::ClassMethods
|
245
245
|
end
|
246
246
|
end
|
247
247
|
end
|
@@ -29,23 +29,23 @@ module Google
|
|
29
29
|
#
|
30
30
|
# See code samples on how this message can be deserialized.
|
31
31
|
# @!attribute [rw] serialized_schema
|
32
|
-
# @return [String]
|
32
|
+
# @return [::String]
|
33
33
|
# IPC serialized Arrow schema.
|
34
34
|
class ArrowSchema
|
35
|
-
include Google::Protobuf::MessageExts
|
36
|
-
extend Google::Protobuf::MessageExts::ClassMethods
|
35
|
+
include ::Google::Protobuf::MessageExts
|
36
|
+
extend ::Google::Protobuf::MessageExts::ClassMethods
|
37
37
|
end
|
38
38
|
|
39
39
|
# Arrow RecordBatch.
|
40
40
|
# @!attribute [rw] serialized_record_batch
|
41
|
-
# @return [String]
|
41
|
+
# @return [::String]
|
42
42
|
# IPC-serialized Arrow RecordBatch.
|
43
43
|
# @!attribute [rw] row_count
|
44
|
-
# @return [Integer]
|
44
|
+
# @return [::Integer]
|
45
45
|
# The count of rows in `serialized_record_batch`.
|
46
46
|
class ArrowRecordBatch
|
47
|
-
include Google::Protobuf::MessageExts
|
48
|
-
extend Google::Protobuf::MessageExts::ClassMethods
|
47
|
+
include ::Google::Protobuf::MessageExts
|
48
|
+
extend ::Google::Protobuf::MessageExts::ClassMethods
|
49
49
|
end
|
50
50
|
end
|
51
51
|
end
|
@@ -24,24 +24,24 @@ module Google
|
|
24
24
|
module V1
|
25
25
|
# Avro schema.
|
26
26
|
# @!attribute [rw] schema
|
27
|
-
# @return [String]
|
27
|
+
# @return [::String]
|
28
28
|
# Json serialized schema, as described at
|
29
29
|
# https://avro.apache.org/docs/1.8.1/spec.html.
|
30
30
|
class AvroSchema
|
31
|
-
include Google::Protobuf::MessageExts
|
32
|
-
extend Google::Protobuf::MessageExts::ClassMethods
|
31
|
+
include ::Google::Protobuf::MessageExts
|
32
|
+
extend ::Google::Protobuf::MessageExts::ClassMethods
|
33
33
|
end
|
34
34
|
|
35
35
|
# Avro rows.
|
36
36
|
# @!attribute [rw] serialized_binary_rows
|
37
|
-
# @return [String]
|
37
|
+
# @return [::String]
|
38
38
|
# Binary serialized rows in a block.
|
39
39
|
# @!attribute [rw] row_count
|
40
|
-
# @return [Integer]
|
40
|
+
# @return [::Integer]
|
41
41
|
# The count of rows in the returning block.
|
42
42
|
class AvroRows
|
43
|
-
include Google::Protobuf::MessageExts
|
44
|
-
extend Google::Protobuf::MessageExts::ClassMethods
|
43
|
+
include ::Google::Protobuf::MessageExts
|
44
|
+
extend ::Google::Protobuf::MessageExts::ClassMethods
|
45
45
|
end
|
46
46
|
end
|
47
47
|
end
|
@@ -24,14 +24,14 @@ module Google
|
|
24
24
|
module V1
|
25
25
|
# Request message for `CreateReadSession`.
|
26
26
|
# @!attribute [rw] parent
|
27
|
-
# @return [String]
|
27
|
+
# @return [::String]
|
28
28
|
# Required. The request project that owns the session, in the form of
|
29
29
|
# `projects/{project_id}`.
|
30
30
|
# @!attribute [rw] read_session
|
31
|
-
# @return [Google::Cloud::Bigquery::Storage::V1::ReadSession]
|
31
|
+
# @return [::Google::Cloud::Bigquery::Storage::V1::ReadSession]
|
32
32
|
# Required. Session to be created.
|
33
33
|
# @!attribute [rw] max_stream_count
|
34
|
-
# @return [Integer]
|
34
|
+
# @return [::Integer]
|
35
35
|
# Max initial number of streams. If unset or zero, the server will
|
36
36
|
# provide a value of streams so as to produce reasonable throughput. Must be
|
37
37
|
# non-negative. The number of streams may be lower than the requested number,
|
@@ -41,44 +41,44 @@ module Google
|
|
41
41
|
#
|
42
42
|
# Streams must be read starting from offset 0.
|
43
43
|
class CreateReadSessionRequest
|
44
|
-
include Google::Protobuf::MessageExts
|
45
|
-
extend Google::Protobuf::MessageExts::ClassMethods
|
44
|
+
include ::Google::Protobuf::MessageExts
|
45
|
+
extend ::Google::Protobuf::MessageExts::ClassMethods
|
46
46
|
end
|
47
47
|
|
48
48
|
# Request message for `ReadRows`.
|
49
49
|
# @!attribute [rw] read_stream
|
50
|
-
# @return [String]
|
50
|
+
# @return [::String]
|
51
51
|
# Required. Stream to read rows from.
|
52
52
|
# @!attribute [rw] offset
|
53
|
-
# @return [Integer]
|
53
|
+
# @return [::Integer]
|
54
54
|
# The offset requested must be less than the last row read from Read.
|
55
55
|
# Requesting a larger offset is undefined. If not specified, start reading
|
56
56
|
# from offset zero.
|
57
57
|
class ReadRowsRequest
|
58
|
-
include Google::Protobuf::MessageExts
|
59
|
-
extend Google::Protobuf::MessageExts::ClassMethods
|
58
|
+
include ::Google::Protobuf::MessageExts
|
59
|
+
extend ::Google::Protobuf::MessageExts::ClassMethods
|
60
60
|
end
|
61
61
|
|
62
62
|
# Information on if the current connection is being throttled.
|
63
63
|
# @!attribute [rw] throttle_percent
|
64
|
-
# @return [Integer]
|
64
|
+
# @return [::Integer]
|
65
65
|
# How much this connection is being throttled. Zero means no throttling,
|
66
66
|
# 100 means fully throttled.
|
67
67
|
class ThrottleState
|
68
|
-
include Google::Protobuf::MessageExts
|
69
|
-
extend Google::Protobuf::MessageExts::ClassMethods
|
68
|
+
include ::Google::Protobuf::MessageExts
|
69
|
+
extend ::Google::Protobuf::MessageExts::ClassMethods
|
70
70
|
end
|
71
71
|
|
72
72
|
# Estimated stream statistics for a given Stream.
|
73
73
|
# @!attribute [rw] progress
|
74
|
-
# @return [Google::Cloud::Bigquery::Storage::V1::StreamStats::Progress]
|
74
|
+
# @return [::Google::Cloud::Bigquery::Storage::V1::StreamStats::Progress]
|
75
75
|
# Represents the progress of the current stream.
|
76
76
|
class StreamStats
|
77
|
-
include Google::Protobuf::MessageExts
|
78
|
-
extend Google::Protobuf::MessageExts::ClassMethods
|
77
|
+
include ::Google::Protobuf::MessageExts
|
78
|
+
extend ::Google::Protobuf::MessageExts::ClassMethods
|
79
79
|
|
80
80
|
# @!attribute [rw] at_response_start
|
81
|
-
# @return [Float]
|
81
|
+
# @return [::Float]
|
82
82
|
# The fraction of rows assigned to the stream that have been processed by
|
83
83
|
# the server so far, not including the rows in the current response
|
84
84
|
# message.
|
@@ -92,44 +92,44 @@ module Google
|
|
92
92
|
# previous response may not necessarily be equal to the
|
93
93
|
# `at_response_start` value of the current response.
|
94
94
|
# @!attribute [rw] at_response_end
|
95
|
-
# @return [Float]
|
95
|
+
# @return [::Float]
|
96
96
|
# Similar to `at_response_start`, except that this value includes the
|
97
97
|
# rows in the current response.
|
98
98
|
class Progress
|
99
|
-
include Google::Protobuf::MessageExts
|
100
|
-
extend Google::Protobuf::MessageExts::ClassMethods
|
99
|
+
include ::Google::Protobuf::MessageExts
|
100
|
+
extend ::Google::Protobuf::MessageExts::ClassMethods
|
101
101
|
end
|
102
102
|
end
|
103
103
|
|
104
104
|
# Response from calling `ReadRows` may include row data, progress and
|
105
105
|
# throttling information.
|
106
106
|
# @!attribute [rw] avro_rows
|
107
|
-
# @return [Google::Cloud::Bigquery::Storage::V1::AvroRows]
|
107
|
+
# @return [::Google::Cloud::Bigquery::Storage::V1::AvroRows]
|
108
108
|
# Serialized row data in AVRO format.
|
109
109
|
# @!attribute [rw] arrow_record_batch
|
110
|
-
# @return [Google::Cloud::Bigquery::Storage::V1::ArrowRecordBatch]
|
110
|
+
# @return [::Google::Cloud::Bigquery::Storage::V1::ArrowRecordBatch]
|
111
111
|
# Serialized row data in Arrow RecordBatch format.
|
112
112
|
# @!attribute [rw] row_count
|
113
|
-
# @return [Integer]
|
113
|
+
# @return [::Integer]
|
114
114
|
# Number of serialized rows in the rows block.
|
115
115
|
# @!attribute [rw] stats
|
116
|
-
# @return [Google::Cloud::Bigquery::Storage::V1::StreamStats]
|
116
|
+
# @return [::Google::Cloud::Bigquery::Storage::V1::StreamStats]
|
117
117
|
# Statistics for the stream.
|
118
118
|
# @!attribute [rw] throttle_state
|
119
|
-
# @return [Google::Cloud::Bigquery::Storage::V1::ThrottleState]
|
119
|
+
# @return [::Google::Cloud::Bigquery::Storage::V1::ThrottleState]
|
120
120
|
# Throttling state. If unset, the latest response still describes
|
121
121
|
# the current throttling status.
|
122
122
|
class ReadRowsResponse
|
123
|
-
include Google::Protobuf::MessageExts
|
124
|
-
extend Google::Protobuf::MessageExts::ClassMethods
|
123
|
+
include ::Google::Protobuf::MessageExts
|
124
|
+
extend ::Google::Protobuf::MessageExts::ClassMethods
|
125
125
|
end
|
126
126
|
|
127
127
|
# Request message for `SplitReadStream`.
|
128
128
|
# @!attribute [rw] name
|
129
|
-
# @return [String]
|
129
|
+
# @return [::String]
|
130
130
|
# Required. Name of the stream to split.
|
131
131
|
# @!attribute [rw] fraction
|
132
|
-
# @return [Float]
|
132
|
+
# @return [::Float]
|
133
133
|
# A value in the range (0.0, 1.0) that specifies the fractional point at
|
134
134
|
# which the original stream should be split. The actual split point is
|
135
135
|
# evaluated on pre-filtered rows, so if a filter is provided, then there is
|
@@ -138,23 +138,23 @@ module Google
|
|
138
138
|
# server-side unit for assigning data is collections of rows, this fraction
|
139
139
|
# will always map to a data storage boundary on the server side.
|
140
140
|
class SplitReadStreamRequest
|
141
|
-
include Google::Protobuf::MessageExts
|
142
|
-
extend Google::Protobuf::MessageExts::ClassMethods
|
141
|
+
include ::Google::Protobuf::MessageExts
|
142
|
+
extend ::Google::Protobuf::MessageExts::ClassMethods
|
143
143
|
end
|
144
144
|
|
145
145
|
# Response message for `SplitReadStream`.
|
146
146
|
# @!attribute [rw] primary_stream
|
147
|
-
# @return [Google::Cloud::Bigquery::Storage::V1::ReadStream]
|
147
|
+
# @return [::Google::Cloud::Bigquery::Storage::V1::ReadStream]
|
148
148
|
# Primary stream, which contains the beginning portion of
|
149
149
|
# |original_stream|. An empty value indicates that the original stream can no
|
150
150
|
# longer be split.
|
151
151
|
# @!attribute [rw] remainder_stream
|
152
|
-
# @return [Google::Cloud::Bigquery::Storage::V1::ReadStream]
|
152
|
+
# @return [::Google::Cloud::Bigquery::Storage::V1::ReadStream]
|
153
153
|
# Remainder stream, which contains the tail of |original_stream|. An empty
|
154
154
|
# value indicates that the original stream can no longer be split.
|
155
155
|
class SplitReadStreamResponse
|
156
|
-
include Google::Protobuf::MessageExts
|
157
|
-
extend Google::Protobuf::MessageExts::ClassMethods
|
156
|
+
include ::Google::Protobuf::MessageExts
|
157
|
+
extend ::Google::Protobuf::MessageExts::ClassMethods
|
158
158
|
end
|
159
159
|
end
|
160
160
|
end
|
@@ -24,35 +24,35 @@ module Google
|
|
24
24
|
module V1
|
25
25
|
# Information about the ReadSession.
|
26
26
|
# @!attribute [r] name
|
27
|
-
# @return [String]
|
27
|
+
# @return [::String]
|
28
28
|
# Output only. Unique identifier for the session, in the form
|
29
29
|
# `projects/{project_id}/locations/{location}/sessions/{session_id}`.
|
30
30
|
# @!attribute [r] expire_time
|
31
|
-
# @return [Google::Protobuf::Timestamp]
|
31
|
+
# @return [::Google::Protobuf::Timestamp]
|
32
32
|
# Output only. Time at which the session becomes invalid. After this time, subsequent
|
33
33
|
# requests to read this Session will return errors. The expire_time is
|
34
34
|
# automatically assigned and currently cannot be specified or updated.
|
35
35
|
# @!attribute [rw] data_format
|
36
|
-
# @return [Google::Cloud::Bigquery::Storage::V1::DataFormat]
|
36
|
+
# @return [::Google::Cloud::Bigquery::Storage::V1::DataFormat]
|
37
37
|
# Immutable. Data format of the output data.
|
38
38
|
# @!attribute [r] avro_schema
|
39
|
-
# @return [Google::Cloud::Bigquery::Storage::V1::AvroSchema]
|
39
|
+
# @return [::Google::Cloud::Bigquery::Storage::V1::AvroSchema]
|
40
40
|
# Output only. Avro schema.
|
41
41
|
# @!attribute [r] arrow_schema
|
42
|
-
# @return [Google::Cloud::Bigquery::Storage::V1::ArrowSchema]
|
42
|
+
# @return [::Google::Cloud::Bigquery::Storage::V1::ArrowSchema]
|
43
43
|
# Output only. Arrow schema.
|
44
44
|
# @!attribute [rw] table
|
45
|
-
# @return [String]
|
45
|
+
# @return [::String]
|
46
46
|
# Immutable. Table that this ReadSession is reading from, in the form
|
47
47
|
# `projects/{project_id}/datasets/{dataset_id}/tables/{table_id}`
|
48
48
|
# @!attribute [rw] table_modifiers
|
49
|
-
# @return [Google::Cloud::Bigquery::Storage::V1::ReadSession::TableModifiers]
|
49
|
+
# @return [::Google::Cloud::Bigquery::Storage::V1::ReadSession::TableModifiers]
|
50
50
|
# Optional. Any modifiers which are applied when reading from the specified table.
|
51
51
|
# @!attribute [rw] read_options
|
52
|
-
# @return [Google::Cloud::Bigquery::Storage::V1::ReadSession::TableReadOptions]
|
52
|
+
# @return [::Google::Cloud::Bigquery::Storage::V1::ReadSession::TableReadOptions]
|
53
53
|
# Optional. Read options for this session (e.g. column selection, filters).
|
54
54
|
# @!attribute [r] streams
|
55
|
-
# @return [Array
|
55
|
+
# @return [::Array<::Google::Cloud::Bigquery::Storage::V1::ReadStream>]
|
56
56
|
# Output only. A list of streams created with the session.
|
57
57
|
#
|
58
58
|
# At least one stream is created with the session. In the future, larger
|
@@ -60,27 +60,27 @@ module Google
|
|
60
60
|
# in that case, the user will need to use a List method to get the streams
|
61
61
|
# instead, which is not yet available.
|
62
62
|
class ReadSession
|
63
|
-
include Google::Protobuf::MessageExts
|
64
|
-
extend Google::Protobuf::MessageExts::ClassMethods
|
63
|
+
include ::Google::Protobuf::MessageExts
|
64
|
+
extend ::Google::Protobuf::MessageExts::ClassMethods
|
65
65
|
|
66
66
|
# Additional attributes when reading a table.
|
67
67
|
# @!attribute [rw] snapshot_time
|
68
|
-
# @return [Google::Protobuf::Timestamp]
|
68
|
+
# @return [::Google::Protobuf::Timestamp]
|
69
69
|
# The snapshot time of the table. If not set, interpreted as now.
|
70
70
|
class TableModifiers
|
71
|
-
include Google::Protobuf::MessageExts
|
72
|
-
extend Google::Protobuf::MessageExts::ClassMethods
|
71
|
+
include ::Google::Protobuf::MessageExts
|
72
|
+
extend ::Google::Protobuf::MessageExts::ClassMethods
|
73
73
|
end
|
74
74
|
|
75
75
|
# Options dictating how we read a table.
|
76
76
|
# @!attribute [rw] selected_fields
|
77
|
-
# @return [Array
|
77
|
+
# @return [::Array<::String>]
|
78
78
|
# Names of the fields in the table that should be read. If empty, all
|
79
79
|
# fields will be read. If the specified field is a nested field, all
|
80
80
|
# the sub-fields in the field will be selected. The output field order is
|
81
81
|
# unrelated to the order of fields in selected_fields.
|
82
82
|
# @!attribute [rw] row_restriction
|
83
|
-
# @return [String]
|
83
|
+
# @return [::String]
|
84
84
|
# SQL text filtering statement, similar to a WHERE clause in a query.
|
85
85
|
# Aggregates are not supported.
|
86
86
|
#
|
@@ -90,8 +90,8 @@ module Google
|
|
90
90
|
# "st_equals(geo_field, st_geofromtext("POINT(2, 2)"))"
|
91
91
|
# "numeric_field BETWEEN 1.0 AND 5.0"
|
92
92
|
class TableReadOptions
|
93
|
-
include Google::Protobuf::MessageExts
|
94
|
-
extend Google::Protobuf::MessageExts::ClassMethods
|
93
|
+
include ::Google::Protobuf::MessageExts
|
94
|
+
extend ::Google::Protobuf::MessageExts::ClassMethods
|
95
95
|
end
|
96
96
|
end
|
97
97
|
|
@@ -99,12 +99,12 @@ module Google
|
|
99
99
|
# Most of the information about `ReadStream` instances is aggregated, making
|
100
100
|
# `ReadStream` lightweight.
|
101
101
|
# @!attribute [r] name
|
102
|
-
# @return [String]
|
102
|
+
# @return [::String]
|
103
103
|
# Output only. Name of the stream, in the form
|
104
104
|
# `projects/{project_id}/locations/{location}/sessions/{session_id}/streams/{stream_id}`.
|
105
105
|
class ReadStream
|
106
|
-
include Google::Protobuf::MessageExts
|
107
|
-
extend Google::Protobuf::MessageExts::ClassMethods
|
106
|
+
include ::Google::Protobuf::MessageExts
|
107
|
+
extend ::Google::Protobuf::MessageExts::ClassMethods
|
108
108
|
end
|
109
109
|
|
110
110
|
# Data format for input or output data.
|
@@ -102,19 +102,19 @@ module Google
|
|
102
102
|
# http://www.joda.org/joda-time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime%2D%2D
|
103
103
|
# ) to obtain a formatter capable of generating timestamps in this format.
|
104
104
|
# @!attribute [rw] seconds
|
105
|
-
# @return [Integer]
|
105
|
+
# @return [::Integer]
|
106
106
|
# Represents seconds of UTC time since Unix epoch
|
107
107
|
# 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to
|
108
108
|
# 9999-12-31T23:59:59Z inclusive.
|
109
109
|
# @!attribute [rw] nanos
|
110
|
-
# @return [Integer]
|
110
|
+
# @return [::Integer]
|
111
111
|
# Non-negative fractions of a second at nanosecond resolution. Negative
|
112
112
|
# second values with fractions must still have non-negative nanos values
|
113
113
|
# that count forward in time. Must be from 0 to 999,999,999
|
114
114
|
# inclusive.
|
115
115
|
class Timestamp
|
116
|
-
include Google::Protobuf::MessageExts
|
117
|
-
extend Google::Protobuf::MessageExts::ClassMethods
|
116
|
+
include ::Google::Protobuf::MessageExts
|
117
|
+
extend ::Google::Protobuf::MessageExts::ClassMethods
|
118
118
|
end
|
119
119
|
end
|
120
120
|
end
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: google-cloud-bigquery-storage-v1
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.1.
|
4
|
+
version: 0.1.4
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Google LLC
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2020-
|
11
|
+
date: 2020-05-05 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: gapic-common
|