google-cloud-bigquery-storage-v1 0.1.0

Sign up to get free protection for your applications and to get access to all the features.
@@ -0,0 +1,29 @@
1
+ # Generated by the protocol buffer compiler. DO NOT EDIT!
2
+ # source: google/cloud/bigquery/storage/v1/avro.proto
3
+
4
+ require 'google/protobuf'
5
+
6
+ Google::Protobuf::DescriptorPool.generated_pool.build do
7
+ add_file("google/cloud/bigquery/storage/v1/avro.proto", :syntax => :proto3) do
8
+ add_message "google.cloud.bigquery.storage.v1.AvroSchema" do
9
+ optional :schema, :string, 1
10
+ end
11
+ add_message "google.cloud.bigquery.storage.v1.AvroRows" do
12
+ optional :serialized_binary_rows, :bytes, 1
13
+ optional :row_count, :int64, 2
14
+ end
15
+ end
16
+ end
17
+
18
+ module Google
19
+ module Cloud
20
+ module Bigquery
21
+ module Storage
22
+ module V1
23
+ AvroSchema = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.storage.v1.AvroSchema").msgclass
24
+ AvroRows = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.storage.v1.AvroRows").msgclass
25
+ end
26
+ end
27
+ end
28
+ end
29
+ end
@@ -0,0 +1,20 @@
1
+ # frozen_string_literal: true
2
+
3
+ # Copyright 2020 Google LLC
4
+ #
5
+ # Licensed under the Apache License, Version 2.0 (the "License");
6
+ # you may not use this file except in compliance with the License.
7
+ # You may obtain a copy of the License at
8
+ #
9
+ # https://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing, software
12
+ # distributed under the License is distributed on an "AS IS" BASIS,
13
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14
+ # See the License for the specific language governing permissions and
15
+ # limitations under the License.
16
+
17
+ # Auto-generated by gapic-generator-ruby. DO NOT EDIT!
18
+
19
+ require "google/cloud/bigquery/storage/v1/big_query_read/client"
20
+ require "google/cloud/bigquery/storage/v1/big_query_read/credentials"
@@ -0,0 +1,628 @@
1
+ # frozen_string_literal: true
2
+
3
+ # Copyright 2020 Google LLC
4
+ #
5
+ # Licensed under the Apache License, Version 2.0 (the "License");
6
+ # you may not use this file except in compliance with the License.
7
+ # You may obtain a copy of the License at
8
+ #
9
+ # https://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing, software
12
+ # distributed under the License is distributed on an "AS IS" BASIS,
13
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14
+ # See the License for the specific language governing permissions and
15
+ # limitations under the License.
16
+
17
+ # Auto-generated by gapic-generator-ruby. DO NOT EDIT!
18
+
19
+ require "gapic/common"
20
+ require "gapic/config"
21
+ require "gapic/config/method"
22
+
23
+ require "google/cloud/errors"
24
+ require "google/cloud/bigquery/storage/v1/version"
25
+ require "google/cloud/bigquery/storage/v1/storage_pb"
26
+ require "google/cloud/bigquery/storage/v1/big_query_read/credentials"
27
+ require "google/cloud/bigquery/storage/v1/big_query_read/paths"
28
+
29
+ module Google
30
+ module Cloud
31
+ module Bigquery
32
+ module Storage
33
+ module V1
34
+ module BigQueryRead
35
+ ##
36
+ # Client for the BigQueryRead service.
37
+ #
38
+ # BigQuery Read API.
39
+ #
40
+ # The Read API can be used to read data from BigQuery.
41
+ #
42
+ class Client
43
+ include Paths
44
+
45
+ # @private
46
+ attr_reader :big_query_read_stub
47
+
48
+ ##
49
+ # Configure the BigQueryRead Client class.
50
+ #
51
+ # See {Google::Cloud::Bigquery::Storage::V1::BigQueryRead::Client::Configuration}
52
+ # for a description of the configuration fields.
53
+ #
54
+ # ## Example
55
+ #
56
+ # To modify the configuration for all BigQueryRead clients:
57
+ #
58
+ # Google::Cloud::Bigquery::Storage::V1::BigQueryRead::Client.configure do |config|
59
+ # config.timeout = 10_000
60
+ # end
61
+ #
62
+ # @yield [config] Configure the Client client.
63
+ # @yieldparam config [Client::Configuration]
64
+ #
65
+ # @return [Client::Configuration]
66
+ #
67
+ def self.configure
68
+ @configure ||= begin
69
+ namespace = ["Google", "Cloud", "Bigquery", "Storage", "V1"]
70
+ parent_config = while namespace.any?
71
+ parent_name = namespace.join "::"
72
+ parent_const = const_get parent_name
73
+ break parent_const.configure if parent_const&.respond_to? :configure
74
+ namespace.pop
75
+ end
76
+ default_config = Client::Configuration.new parent_config
77
+
78
+ default_config.rpcs.create_read_session.timeout = 600.0
79
+ default_config.rpcs.create_read_session.retry_policy = {
80
+ initial_delay: 0.1,
81
+ max_delay: 60.0,
82
+ multiplier: 1.3,
83
+ retry_codes: ["DEADLINE_EXCEEDED", "UNAVAILABLE"]
84
+ }
85
+
86
+ default_config.rpcs.read_rows.timeout = 86_400.0
87
+ default_config.rpcs.read_rows.retry_policy = {
88
+ initial_delay: 0.1,
89
+ max_delay: 60.0,
90
+ multiplier: 1.3,
91
+ retry_codes: ["UNAVAILABLE"]
92
+ }
93
+
94
+ default_config.rpcs.split_read_stream.timeout = 600.0
95
+ default_config.rpcs.split_read_stream.retry_policy = {
96
+ initial_delay: 0.1,
97
+ max_delay: 60.0,
98
+ multiplier: 1.3,
99
+ retry_codes: ["DEADLINE_EXCEEDED", "UNAVAILABLE"]
100
+ }
101
+
102
+ default_config
103
+ end
104
+ yield @configure if block_given?
105
+ @configure
106
+ end
107
+
108
+ ##
109
+ # Configure the BigQueryRead Client instance.
110
+ #
111
+ # The configuration is set to the derived mode, meaning that values can be changed,
112
+ # but structural changes (adding new fields, etc.) are not allowed. Structural changes
113
+ # should be made on {Client.configure}.
114
+ #
115
+ # See {Google::Cloud::Bigquery::Storage::V1::BigQueryRead::Client::Configuration}
116
+ # for a description of the configuration fields.
117
+ #
118
+ # @yield [config] Configure the Client client.
119
+ # @yieldparam config [Client::Configuration]
120
+ #
121
+ # @return [Client::Configuration]
122
+ #
123
+ def configure
124
+ yield @config if block_given?
125
+ @config
126
+ end
127
+
128
+ ##
129
+ # Create a new BigQueryRead client object.
130
+ #
131
+ # ## Examples
132
+ #
133
+ # To create a new BigQueryRead client with the default
134
+ # configuration:
135
+ #
136
+ # client = Google::Cloud::Bigquery::Storage::V1::BigQueryRead::Client.new
137
+ #
138
+ # To create a new BigQueryRead client with a custom
139
+ # configuration:
140
+ #
141
+ # client = Google::Cloud::Bigquery::Storage::V1::BigQueryRead::Client.new do |config|
142
+ # config.timeout = 10_000
143
+ # end
144
+ #
145
+ # @yield [config] Configure the BigQueryRead client.
146
+ # @yieldparam config [Client::Configuration]
147
+ #
148
+ def initialize
149
+ # These require statements are intentionally placed here to initialize
150
+ # the gRPC module only when it's required.
151
+ # See https://github.com/googleapis/toolkit/issues/446
152
+ require "gapic/grpc"
153
+ require "google/cloud/bigquery/storage/v1/storage_services_pb"
154
+
155
+ # Create the configuration object
156
+ @config = Configuration.new Client.configure
157
+
158
+ # Yield the configuration if needed
159
+ yield @config if block_given?
160
+
161
+ # Create credentials
162
+ credentials = @config.credentials
163
+ credentials ||= Credentials.default scope: @config.scope
164
+ if credentials.is_a?(String) || credentials.is_a?(Hash)
165
+ credentials = Credentials.new credentials, scope: @config.scope
166
+ end
167
+ @quota_project_id = credentials.respond_to?(:quota_project_id) ? credentials.quota_project_id : nil
168
+
169
+ @big_query_read_stub = Gapic::ServiceStub.new(
170
+ Google::Cloud::Bigquery::Storage::V1::BigQueryRead::Stub,
171
+ credentials: credentials,
172
+ endpoint: @config.endpoint,
173
+ channel_args: @config.channel_args,
174
+ interceptors: @config.interceptors
175
+ )
176
+ end
177
+
178
+ # Service calls
179
+
180
+ ##
181
+ # Creates a new read session. A read session divides the contents of a
182
+ # BigQuery table into one or more streams, which can then be used to read
183
+ # data from the table. The read session also specifies properties of the
184
+ # data to be read, such as a list of columns or a push-down filter describing
185
+ # the rows to be returned.
186
+ #
187
+ # A particular row can be read by at most one stream. When the caller has
188
+ # reached the end of each stream in the session, then all the data in the
189
+ # table has been read.
190
+ #
191
+ # Data is assigned to each stream such that roughly the same number of
192
+ # rows can be read from each stream. Because the server-side unit for
193
+ # assigning data is collections of rows, the API does not guarantee that
194
+ # each stream will return the same number or rows. Additionally, the
195
+ # limits are enforced based on the number of pre-filtered rows, so some
196
+ # filters can lead to lopsided assignments.
197
+ #
198
+ # Read sessions automatically expire 24 hours after they are created and do
199
+ # not require manual clean-up by the caller.
200
+ #
201
+ # @overload create_read_session(request, options = nil)
202
+ # @param request [Google::Cloud::Bigquery::Storage::V1::CreateReadSessionRequest | Hash]
203
+ # Creates a new read session. A read session divides the contents of a
204
+ # BigQuery table into one or more streams, which can then be used to read
205
+ # data from the table. The read session also specifies properties of the
206
+ # data to be read, such as a list of columns or a push-down filter describing
207
+ # the rows to be returned.
208
+ #
209
+ # A particular row can be read by at most one stream. When the caller has
210
+ # reached the end of each stream in the session, then all the data in the
211
+ # table has been read.
212
+ #
213
+ # Data is assigned to each stream such that roughly the same number of
214
+ # rows can be read from each stream. Because the server-side unit for
215
+ # assigning data is collections of rows, the API does not guarantee that
216
+ # each stream will return the same number or rows. Additionally, the
217
+ # limits are enforced based on the number of pre-filtered rows, so some
218
+ # filters can lead to lopsided assignments.
219
+ #
220
+ # Read sessions automatically expire 24 hours after they are created and do
221
+ # not require manual clean-up by the caller.
222
+ # @param options [Gapic::CallOptions, Hash]
223
+ # Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
224
+ #
225
+ # @overload create_read_session(parent: nil, read_session: nil, max_stream_count: nil)
226
+ # @param parent [String]
227
+ # Required. The request project that owns the session, in the form of
228
+ # `projects/{project_id}`.
229
+ # @param read_session [Google::Cloud::Bigquery::Storage::V1::ReadSession | Hash]
230
+ # Required. Session to be created.
231
+ # @param max_stream_count [Integer]
232
+ # Max initial number of streams. If unset or zero, the server will
233
+ # provide a value of streams so as to produce reasonable throughput. Must be
234
+ # non-negative. The number of streams may be lower than the requested number,
235
+ # depending on the amount parallelism that is reasonable for the table. Error
236
+ # will be returned if the max count is greater than the current system
237
+ # max limit of 1,000.
238
+ #
239
+ # Streams must be read starting from offset 0.
240
+ #
241
+ #
242
+ # @yield [response, operation] Access the result along with the RPC operation
243
+ # @yieldparam response [Google::Cloud::Bigquery::Storage::V1::ReadSession]
244
+ # @yieldparam operation [GRPC::ActiveCall::Operation]
245
+ #
246
+ # @return [Google::Cloud::Bigquery::Storage::V1::ReadSession]
247
+ #
248
+ # @raise [Google::Cloud::Error] if the RPC is aborted.
249
+ #
250
+ def create_read_session request, options = nil
251
+ raise ArgumentError, "request must be provided" if request.nil?
252
+
253
+ request = Gapic::Protobuf.coerce request, to: Google::Cloud::Bigquery::Storage::V1::CreateReadSessionRequest
254
+
255
+ # Converts hash and nil to an options object
256
+ options = Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
257
+
258
+ # Customize the options with defaults
259
+ metadata = @config.rpcs.create_read_session.metadata.to_h
260
+
261
+ # Set x-goog-api-client and x-goog-user-project headers
262
+ metadata[:"x-goog-api-client"] ||= Gapic::Headers.x_goog_api_client \
263
+ lib_name: @config.lib_name, lib_version: @config.lib_version,
264
+ gapic_version: ::Google::Cloud::Bigquery::Storage::V1::VERSION
265
+ metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
266
+
267
+ header_params = {
268
+ "read_session.table" => request.read_session.table
269
+ }
270
+ request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
271
+ metadata[:"x-goog-request-params"] ||= request_params_header
272
+
273
+ options.apply_defaults timeout: @config.rpcs.create_read_session.timeout,
274
+ metadata: metadata,
275
+ retry_policy: @config.rpcs.create_read_session.retry_policy
276
+ options.apply_defaults metadata: @config.metadata,
277
+ retry_policy: @config.retry_policy
278
+
279
+ @big_query_read_stub.call_rpc :create_read_session, request, options: options do |response, operation|
280
+ yield response, operation if block_given?
281
+ return response
282
+ end
283
+ rescue GRPC::BadStatus => e
284
+ raise Google::Cloud::Error.from_error(e)
285
+ end
286
+
287
+ ##
288
+ # Reads rows from the stream in the format prescribed by the ReadSession.
289
+ # Each response contains one or more table rows, up to a maximum of 100 MiB
290
+ # per response; read requests which attempt to read individual rows larger
291
+ # than 100 MiB will fail.
292
+ #
293
+ # Each request also returns a set of stream statistics reflecting the current
294
+ # state of the stream.
295
+ #
296
+ # @overload read_rows(request, options = nil)
297
+ # @param request [Google::Cloud::Bigquery::Storage::V1::ReadRowsRequest | Hash]
298
+ # Reads rows from the stream in the format prescribed by the ReadSession.
299
+ # Each response contains one or more table rows, up to a maximum of 100 MiB
300
+ # per response; read requests which attempt to read individual rows larger
301
+ # than 100 MiB will fail.
302
+ #
303
+ # Each request also returns a set of stream statistics reflecting the current
304
+ # state of the stream.
305
+ # @param options [Gapic::CallOptions, Hash]
306
+ # Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
307
+ #
308
+ # @overload read_rows(read_stream: nil, offset: nil)
309
+ # @param read_stream [String]
310
+ # Required. Stream to read rows from.
311
+ # @param offset [Integer]
312
+ # The offset requested must be less than the last row read from Read.
313
+ # Requesting a larger offset is undefined. If not specified, start reading
314
+ # from offset zero.
315
+ #
316
+ #
317
+ # @yield [response, operation] Access the result along with the RPC operation
318
+ # @yieldparam response [Enumerable<Google::Cloud::Bigquery::Storage::V1::ReadRowsResponse>]
319
+ # @yieldparam operation [GRPC::ActiveCall::Operation]
320
+ #
321
+ # @return [Enumerable<Google::Cloud::Bigquery::Storage::V1::ReadRowsResponse>]
322
+ #
323
+ # @raise [Google::Cloud::Error] if the RPC is aborted.
324
+ #
325
+ def read_rows request, options = nil
326
+ raise ArgumentError, "request must be provided" if request.nil?
327
+
328
+ request = Gapic::Protobuf.coerce request, to: Google::Cloud::Bigquery::Storage::V1::ReadRowsRequest
329
+
330
+ # Converts hash and nil to an options object
331
+ options = Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
332
+
333
+ # Customize the options with defaults
334
+ metadata = @config.rpcs.read_rows.metadata.to_h
335
+
336
+ # Set x-goog-api-client and x-goog-user-project headers
337
+ metadata[:"x-goog-api-client"] ||= Gapic::Headers.x_goog_api_client \
338
+ lib_name: @config.lib_name, lib_version: @config.lib_version,
339
+ gapic_version: ::Google::Cloud::Bigquery::Storage::V1::VERSION
340
+ metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
341
+
342
+ header_params = {
343
+ "read_stream" => request.read_stream
344
+ }
345
+ request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
346
+ metadata[:"x-goog-request-params"] ||= request_params_header
347
+
348
+ options.apply_defaults timeout: @config.rpcs.read_rows.timeout,
349
+ metadata: metadata,
350
+ retry_policy: @config.rpcs.read_rows.retry_policy
351
+ options.apply_defaults metadata: @config.metadata,
352
+ retry_policy: @config.retry_policy
353
+
354
+ @big_query_read_stub.call_rpc :read_rows, request, options: options do |response, operation|
355
+ yield response, operation if block_given?
356
+ return response
357
+ end
358
+ rescue GRPC::BadStatus => e
359
+ raise Google::Cloud::Error.from_error(e)
360
+ end
361
+
362
+ ##
363
+ # Splits a given `ReadStream` into two `ReadStream` objects. These
364
+ # `ReadStream` objects are referred to as the primary and the residual
365
+ # streams of the split. The original `ReadStream` can still be read from in
366
+ # the same manner as before. Both of the returned `ReadStream` objects can
367
+ # also be read from, and the rows returned by both child streams will be
368
+ # the same as the rows read from the original stream.
369
+ #
370
+ # Moreover, the two child streams will be allocated back-to-back in the
371
+ # original `ReadStream`. Concretely, it is guaranteed that for streams
372
+ # original, primary, and residual, that original[0-j] = primary[0-j] and
373
+ # original[j-n] = residual[0-m] once the streams have been read to
374
+ # completion.
375
+ #
376
+ # @overload split_read_stream(request, options = nil)
377
+ # @param request [Google::Cloud::Bigquery::Storage::V1::SplitReadStreamRequest | Hash]
378
+ # Splits a given `ReadStream` into two `ReadStream` objects. These
379
+ # `ReadStream` objects are referred to as the primary and the residual
380
+ # streams of the split. The original `ReadStream` can still be read from in
381
+ # the same manner as before. Both of the returned `ReadStream` objects can
382
+ # also be read from, and the rows returned by both child streams will be
383
+ # the same as the rows read from the original stream.
384
+ #
385
+ # Moreover, the two child streams will be allocated back-to-back in the
386
+ # original `ReadStream`. Concretely, it is guaranteed that for streams
387
+ # original, primary, and residual, that original[0-j] = primary[0-j] and
388
+ # original[j-n] = residual[0-m] once the streams have been read to
389
+ # completion.
390
+ # @param options [Gapic::CallOptions, Hash]
391
+ # Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
392
+ #
393
+ # @overload split_read_stream(name: nil, fraction: nil)
394
+ # @param name [String]
395
+ # Required. Name of the stream to split.
396
+ # @param fraction [Float]
397
+ # A value in the range (0.0, 1.0) that specifies the fractional point at
398
+ # which the original stream should be split. The actual split point is
399
+ # evaluated on pre-filtered rows, so if a filter is provided, then there is
400
+ # no guarantee that the division of the rows between the new child streams
401
+ # will be proportional to this fractional value. Additionally, because the
402
+ # server-side unit for assigning data is collections of rows, this fraction
403
+ # will always map to a data storage boundary on the server side.
404
+ #
405
+ #
406
+ # @yield [response, operation] Access the result along with the RPC operation
407
+ # @yieldparam response [Google::Cloud::Bigquery::Storage::V1::SplitReadStreamResponse]
408
+ # @yieldparam operation [GRPC::ActiveCall::Operation]
409
+ #
410
+ # @return [Google::Cloud::Bigquery::Storage::V1::SplitReadStreamResponse]
411
+ #
412
+ # @raise [Google::Cloud::Error] if the RPC is aborted.
413
+ #
414
+ def split_read_stream request, options = nil
415
+ raise ArgumentError, "request must be provided" if request.nil?
416
+
417
+ request = Gapic::Protobuf.coerce request, to: Google::Cloud::Bigquery::Storage::V1::SplitReadStreamRequest
418
+
419
+ # Converts hash and nil to an options object
420
+ options = Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
421
+
422
+ # Customize the options with defaults
423
+ metadata = @config.rpcs.split_read_stream.metadata.to_h
424
+
425
+ # Set x-goog-api-client and x-goog-user-project headers
426
+ metadata[:"x-goog-api-client"] ||= Gapic::Headers.x_goog_api_client \
427
+ lib_name: @config.lib_name, lib_version: @config.lib_version,
428
+ gapic_version: ::Google::Cloud::Bigquery::Storage::V1::VERSION
429
+ metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
430
+
431
+ header_params = {
432
+ "name" => request.name
433
+ }
434
+ request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
435
+ metadata[:"x-goog-request-params"] ||= request_params_header
436
+
437
+ options.apply_defaults timeout: @config.rpcs.split_read_stream.timeout,
438
+ metadata: metadata,
439
+ retry_policy: @config.rpcs.split_read_stream.retry_policy
440
+ options.apply_defaults metadata: @config.metadata,
441
+ retry_policy: @config.retry_policy
442
+
443
+ @big_query_read_stub.call_rpc :split_read_stream, request, options: options do |response, operation|
444
+ yield response, operation if block_given?
445
+ return response
446
+ end
447
+ rescue GRPC::BadStatus => e
448
+ raise Google::Cloud::Error.from_error(e)
449
+ end
450
+
451
+ ##
452
+ # Configuration class for the BigQueryRead API.
453
+ #
454
+ # This class represents the configuration for BigQueryRead,
455
+ # providing control over timeouts, retry behavior, logging, transport
456
+ # parameters, and other low-level controls. Certain parameters can also be
457
+ # applied individually to specific RPCs. See
458
+ # {Google::Cloud::Bigquery::Storage::V1::BigQueryRead::Client::Configuration::Rpcs}
459
+ # for a list of RPCs that can be configured independently.
460
+ #
461
+ # Configuration can be applied globally to all clients, or to a single client
462
+ # on construction.
463
+ #
464
+ # # Examples
465
+ #
466
+ # To modify the global config, setting the timeout for create_read_session
467
+ # to 20 seconds, and all remaining timeouts to 10 seconds:
468
+ #
469
+ # Google::Cloud::Bigquery::Storage::V1::BigQueryRead::Client.configure do |config|
470
+ # config.timeout = 10_000
471
+ # config.rpcs.create_read_session.timeout = 20_000
472
+ # end
473
+ #
474
+ # To apply the above configuration only to a new client:
475
+ #
476
+ # client = Google::Cloud::Bigquery::Storage::V1::BigQueryRead::Client.new do |config|
477
+ # config.timeout = 10_000
478
+ # config.rpcs.create_read_session.timeout = 20_000
479
+ # end
480
+ #
481
+ # @!attribute [rw] endpoint
482
+ # The hostname or hostname:port of the service endpoint.
483
+ # Defaults to `"bigquerystorage.googleapis.com"`.
484
+ # @return [String]
485
+ # @!attribute [rw] credentials
486
+ # Credentials to send with calls. You may provide any of the following types:
487
+ # * (`String`) The path to a service account key file in JSON format
488
+ # * (`Hash`) A service account key as a Hash
489
+ # * (`Google::Auth::Credentials`) A googleauth credentials object
490
+ # (see the [googleauth docs](https://googleapis.dev/ruby/googleauth/latest/index.html))
491
+ # * (`Signet::OAuth2::Client`) A signet oauth2 client object
492
+ # (see the [signet docs](https://googleapis.dev/ruby/signet/latest/Signet/OAuth2/Client.html))
493
+ # * (`GRPC::Core::Channel`) a gRPC channel with included credentials
494
+ # * (`GRPC::Core::ChannelCredentials`) a gRPC credentails object
495
+ # * (`nil`) indicating no credentials
496
+ # @return [Object]
497
+ # @!attribute [rw] scope
498
+ # The OAuth scopes
499
+ # @return [Array<String>]
500
+ # @!attribute [rw] lib_name
501
+ # The library name as recorded in instrumentation and logging
502
+ # @return [String]
503
+ # @!attribute [rw] lib_version
504
+ # The library version as recorded in instrumentation and logging
505
+ # @return [String]
506
+ # @!attribute [rw] channel_args
507
+ # Extra parameters passed to the gRPC channel. Note: this is ignored if a
508
+ # `GRPC::Core::Channel` object is provided as the credential.
509
+ # @return [Hash]
510
+ # @!attribute [rw] interceptors
511
+ # An array of interceptors that are run before calls are executed.
512
+ # @return [Array<GRPC::ClientInterceptor>]
513
+ # @!attribute [rw] timeout
514
+ # The call timeout in milliseconds.
515
+ # @return [Numeric]
516
+ # @!attribute [rw] metadata
517
+ # Additional gRPC headers to be sent with the call.
518
+ # @return [Hash{Symbol=>String}]
519
+ # @!attribute [rw] retry_policy
520
+ # The retry policy. The value is a hash with the following keys:
521
+ # * `:initial_delay` (*type:* `Numeric`) - The initial delay in seconds.
522
+ # * `:max_delay` (*type:* `Numeric`) - The max delay in seconds.
523
+ # * `:multiplier` (*type:* `Numeric`) - The incremental backoff multiplier.
524
+ # * `:retry_codes` (*type:* `Array<String>`) - The error codes that should
525
+ # trigger a retry.
526
+ # @return [Hash]
527
+ #
528
+ class Configuration
529
+ extend Gapic::Config
530
+
531
+ config_attr :endpoint, "bigquerystorage.googleapis.com", String
532
+ config_attr :credentials, nil do |value|
533
+ allowed = [::String, ::Hash, ::Proc, ::Google::Auth::Credentials, ::Signet::OAuth2::Client, nil]
534
+ allowed += [::GRPC::Core::Channel, ::GRPC::Core::ChannelCredentials] if defined? ::GRPC
535
+ allowed.any? { |klass| klass === value }
536
+ end
537
+ config_attr :scope, nil, String, Array, nil
538
+ config_attr :lib_name, nil, String, nil
539
+ config_attr :lib_version, nil, String, nil
540
+ config_attr(:channel_args, { "grpc.service_config_disable_resolution"=>1 }, Hash, nil)
541
+ config_attr :interceptors, nil, Array, nil
542
+ config_attr :timeout, nil, Numeric, nil
543
+ config_attr :metadata, nil, Hash, nil
544
+ config_attr :retry_policy, nil, Hash, Proc, nil
545
+
546
+ # @private
547
+ def initialize parent_config = nil
548
+ @parent_config = parent_config unless parent_config.nil?
549
+
550
+ yield self if block_given?
551
+ end
552
+
553
+ ##
554
+ # Configurations for individual RPCs
555
+ # @return [Rpcs]
556
+ #
557
+ def rpcs
558
+ @rpcs ||= begin
559
+ parent_rpcs = nil
560
+ parent_rpcs = @parent_config.rpcs if @parent_config&.respond_to? :rpcs
561
+ Rpcs.new parent_rpcs
562
+ end
563
+ end
564
+
565
+ ##
566
+ # Configuration RPC class for the BigQueryRead API.
567
+ #
568
+ # Includes fields providing the configuration for each RPC in this service.
569
+ # Each configuration object is of type `Gapic::Config::Method` and includes
570
+ # the following configuration fields:
571
+ #
572
+ # * `timeout` (*type:* `Numeric`) - The call timeout in milliseconds
573
+ # * `metadata` (*type:* `Hash{Symbol=>String}`) - Additional gRPC headers
574
+ # * `retry_policy (*type:* `Hash`) - The retry policy. The policy fields
575
+ # include the following keys:
576
+ # * `:initial_delay` (*type:* `Numeric`) - The initial delay in seconds.
577
+ # * `:max_delay` (*type:* `Numeric`) - The max delay in seconds.
578
+ # * `:multiplier` (*type:* `Numeric`) - The incremental backoff multiplier.
579
+ # * `:retry_codes` (*type:* `Array<String>`) - The error codes that should
580
+ # trigger a retry.
581
+ #
582
+ class Rpcs
583
+ ##
584
+ # RPC-specific configuration for `create_read_session`
585
+ # @return [Gapic::Config::Method]
586
+ #
587
+ attr_reader :create_read_session
588
+ ##
589
+ # RPC-specific configuration for `read_rows`
590
+ # @return [Gapic::Config::Method]
591
+ #
592
+ attr_reader :read_rows
593
+ ##
594
+ # RPC-specific configuration for `split_read_stream`
595
+ # @return [Gapic::Config::Method]
596
+ #
597
+ attr_reader :split_read_stream
598
+
599
+ # @private
600
+ def initialize parent_rpcs = nil
601
+ create_read_session_config = parent_rpcs&.create_read_session if parent_rpcs&.respond_to? :create_read_session
602
+ @create_read_session = Gapic::Config::Method.new create_read_session_config
603
+ read_rows_config = parent_rpcs&.read_rows if parent_rpcs&.respond_to? :read_rows
604
+ @read_rows = Gapic::Config::Method.new read_rows_config
605
+ split_read_stream_config = parent_rpcs&.split_read_stream if parent_rpcs&.respond_to? :split_read_stream
606
+ @split_read_stream = Gapic::Config::Method.new split_read_stream_config
607
+
608
+ yield self if block_given?
609
+ end
610
+ end
611
+ end
612
+ end
613
+ end
614
+ end
615
+ end
616
+ end
617
+ end
618
+ end
619
+
620
+ # rubocop:disable Lint/HandleExceptions
621
+
622
+ # Once client is loaded, load helpers.rb if it exists.
623
+ begin
624
+ require "google/cloud/bigquery/storage/v1/big_query_read/helpers"
625
+ rescue LoadError
626
+ end
627
+
628
+ # rubocop:enable Lint/HandleExceptions