google-cloud-bigquery-storage-v1 0.6.1 → 0.8.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,827 @@
1
+ # frozen_string_literal: true
2
+
3
+ # Copyright 2021 Google LLC
4
+ #
5
+ # Licensed under the Apache License, Version 2.0 (the "License");
6
+ # you may not use this file except in compliance with the License.
7
+ # You may obtain a copy of the License at
8
+ #
9
+ # https://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing, software
12
+ # distributed under the License is distributed on an "AS IS" BASIS,
13
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14
+ # See the License for the specific language governing permissions and
15
+ # limitations under the License.
16
+
17
+ # Auto-generated by gapic-generator-ruby. DO NOT EDIT!
18
+
19
+ require "google/cloud/errors"
20
+ require "google/cloud/bigquery/storage/v1/storage_pb"
21
+
22
+ module Google
23
+ module Cloud
24
+ module Bigquery
25
+ module Storage
26
+ module V1
27
+ module BigQueryWrite
28
+ ##
29
+ # Client for the BigQueryWrite service.
30
+ #
31
+ # BigQuery Write API.
32
+ #
33
+ # The Write API can be used to write data to BigQuery.
34
+ #
35
+ # For supplementary information about the Write API, see:
36
+ # https://cloud.google.com/bigquery/docs/write-api
37
+ #
38
+ class Client
39
+ include Paths
40
+
41
+ # @private
42
+ attr_reader :big_query_write_stub
43
+
44
+ ##
45
+ # Configure the BigQueryWrite Client class.
46
+ #
47
+ # See {::Google::Cloud::Bigquery::Storage::V1::BigQueryWrite::Client::Configuration}
48
+ # for a description of the configuration fields.
49
+ #
50
+ # @example
51
+ #
52
+ # # Modify the configuration for all BigQueryWrite clients
53
+ # ::Google::Cloud::Bigquery::Storage::V1::BigQueryWrite::Client.configure do |config|
54
+ # config.timeout = 10.0
55
+ # end
56
+ #
57
+ # @yield [config] Configure the Client client.
58
+ # @yieldparam config [Client::Configuration]
59
+ #
60
+ # @return [Client::Configuration]
61
+ #
62
+ def self.configure
63
+ @configure ||= begin
64
+ namespace = ["Google", "Cloud", "Bigquery", "Storage", "V1"]
65
+ parent_config = while namespace.any?
66
+ parent_name = namespace.join "::"
67
+ parent_const = const_get parent_name
68
+ break parent_const.configure if parent_const.respond_to? :configure
69
+ namespace.pop
70
+ end
71
+ default_config = Client::Configuration.new parent_config
72
+
73
+ default_config.rpcs.create_write_stream.timeout = 600.0
74
+ default_config.rpcs.create_write_stream.retry_policy = {
75
+ initial_delay: 0.1, max_delay: 60.0, multiplier: 1.3, retry_codes: [4, 14]
76
+ }
77
+
78
+ default_config.rpcs.append_rows.timeout = 86_400.0
79
+ default_config.rpcs.append_rows.retry_policy = {
80
+ initial_delay: 0.1, max_delay: 60.0, multiplier: 1.3, retry_codes: [14]
81
+ }
82
+
83
+ default_config.rpcs.get_write_stream.timeout = 600.0
84
+ default_config.rpcs.get_write_stream.retry_policy = {
85
+ initial_delay: 0.1, max_delay: 60.0, multiplier: 1.3, retry_codes: [4, 14]
86
+ }
87
+
88
+ default_config.rpcs.finalize_write_stream.timeout = 600.0
89
+ default_config.rpcs.finalize_write_stream.retry_policy = {
90
+ initial_delay: 0.1, max_delay: 60.0, multiplier: 1.3, retry_codes: [4, 14]
91
+ }
92
+
93
+ default_config.rpcs.batch_commit_write_streams.timeout = 600.0
94
+ default_config.rpcs.batch_commit_write_streams.retry_policy = {
95
+ initial_delay: 0.1, max_delay: 60.0, multiplier: 1.3, retry_codes: [4, 14]
96
+ }
97
+
98
+ default_config.rpcs.flush_rows.timeout = 600.0
99
+ default_config.rpcs.flush_rows.retry_policy = {
100
+ initial_delay: 0.1, max_delay: 60.0, multiplier: 1.3, retry_codes: [4, 14]
101
+ }
102
+
103
+ default_config
104
+ end
105
+ yield @configure if block_given?
106
+ @configure
107
+ end
108
+
109
+ ##
110
+ # Configure the BigQueryWrite Client instance.
111
+ #
112
+ # The configuration is set to the derived mode, meaning that values can be changed,
113
+ # but structural changes (adding new fields, etc.) are not allowed. Structural changes
114
+ # should be made on {Client.configure}.
115
+ #
116
+ # See {::Google::Cloud::Bigquery::Storage::V1::BigQueryWrite::Client::Configuration}
117
+ # for a description of the configuration fields.
118
+ #
119
+ # @yield [config] Configure the Client client.
120
+ # @yieldparam config [Client::Configuration]
121
+ #
122
+ # @return [Client::Configuration]
123
+ #
124
+ def configure
125
+ yield @config if block_given?
126
+ @config
127
+ end
128
+
129
+ ##
130
+ # Create a new BigQueryWrite client object.
131
+ #
132
+ # @example
133
+ #
134
+ # # Create a client using the default configuration
135
+ # client = ::Google::Cloud::Bigquery::Storage::V1::BigQueryWrite::Client.new
136
+ #
137
+ # # Create a client using a custom configuration
138
+ # client = ::Google::Cloud::Bigquery::Storage::V1::BigQueryWrite::Client.new do |config|
139
+ # config.timeout = 10.0
140
+ # end
141
+ #
142
+ # @yield [config] Configure the BigQueryWrite client.
143
+ # @yieldparam config [Client::Configuration]
144
+ #
145
+ def initialize
146
+ # These require statements are intentionally placed here to initialize
147
+ # the gRPC module only when it's required.
148
+ # See https://github.com/googleapis/toolkit/issues/446
149
+ require "gapic/grpc"
150
+ require "google/cloud/bigquery/storage/v1/storage_services_pb"
151
+
152
+ # Create the configuration object
153
+ @config = Configuration.new Client.configure
154
+
155
+ # Yield the configuration if needed
156
+ yield @config if block_given?
157
+
158
+ # Create credentials
159
+ credentials = @config.credentials
160
+ # Use self-signed JWT if the endpoint is unchanged from default,
161
+ # but only if the default endpoint does not have a region prefix.
162
+ enable_self_signed_jwt = @config.endpoint == Client.configure.endpoint &&
163
+ !@config.endpoint.split(".").first.include?("-")
164
+ credentials ||= Credentials.default scope: @config.scope,
165
+ enable_self_signed_jwt: enable_self_signed_jwt
166
+ if credentials.is_a?(::String) || credentials.is_a?(::Hash)
167
+ credentials = Credentials.new credentials, scope: @config.scope
168
+ end
169
+ @quota_project_id = @config.quota_project
170
+ @quota_project_id ||= credentials.quota_project_id if credentials.respond_to? :quota_project_id
171
+
172
+ @big_query_write_stub = ::Gapic::ServiceStub.new(
173
+ ::Google::Cloud::Bigquery::Storage::V1::BigQueryWrite::Stub,
174
+ credentials: credentials,
175
+ endpoint: @config.endpoint,
176
+ channel_args: @config.channel_args,
177
+ interceptors: @config.interceptors
178
+ )
179
+ end
180
+
181
+ # Service calls
182
+
183
+ ##
184
+ # Creates a write stream to the given table.
185
+ # Additionally, every table has a special stream named '_default'
186
+ # to which data can be written. This stream doesn't need to be created using
187
+ # CreateWriteStream. It is a stream that can be used simultaneously by any
188
+ # number of clients. Data written to this stream is considered committed as
189
+ # soon as an acknowledgement is received.
190
+ #
191
+ # @overload create_write_stream(request, options = nil)
192
+ # Pass arguments to `create_write_stream` via a request object, either of type
193
+ # {::Google::Cloud::Bigquery::Storage::V1::CreateWriteStreamRequest} or an equivalent Hash.
194
+ #
195
+ # @param request [::Google::Cloud::Bigquery::Storage::V1::CreateWriteStreamRequest, ::Hash]
196
+ # A request object representing the call parameters. Required. To specify no
197
+ # parameters, or to keep all the default parameter values, pass an empty Hash.
198
+ # @param options [::Gapic::CallOptions, ::Hash]
199
+ # Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
200
+ #
201
+ # @overload create_write_stream(parent: nil, write_stream: nil)
202
+ # Pass arguments to `create_write_stream` via keyword arguments. Note that at
203
+ # least one keyword argument is required. To specify no parameters, or to keep all
204
+ # the default parameter values, pass an empty Hash as a request object (see above).
205
+ #
206
+ # @param parent [::String]
207
+ # Required. Reference to the table to which the stream belongs, in the format
208
+ # of `projects/{project}/datasets/{dataset}/tables/{table}`.
209
+ # @param write_stream [::Google::Cloud::Bigquery::Storage::V1::WriteStream, ::Hash]
210
+ # Required. Stream to be created.
211
+ #
212
+ # @yield [response, operation] Access the result along with the RPC operation
213
+ # @yieldparam response [::Google::Cloud::Bigquery::Storage::V1::WriteStream]
214
+ # @yieldparam operation [::GRPC::ActiveCall::Operation]
215
+ #
216
+ # @return [::Google::Cloud::Bigquery::Storage::V1::WriteStream]
217
+ #
218
+ # @raise [::Google::Cloud::Error] if the RPC is aborted.
219
+ #
220
+ def create_write_stream request, options = nil
221
+ raise ::ArgumentError, "request must be provided" if request.nil?
222
+
223
+ request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::Storage::V1::CreateWriteStreamRequest
224
+
225
+ # Converts hash and nil to an options object
226
+ options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
227
+
228
+ # Customize the options with defaults
229
+ metadata = @config.rpcs.create_write_stream.metadata.to_h
230
+
231
+ # Set x-goog-api-client and x-goog-user-project headers
232
+ metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
233
+ lib_name: @config.lib_name, lib_version: @config.lib_version,
234
+ gapic_version: ::Google::Cloud::Bigquery::Storage::V1::VERSION
235
+ metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
236
+
237
+ header_params = {
238
+ "parent" => request.parent
239
+ }
240
+ request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
241
+ metadata[:"x-goog-request-params"] ||= request_params_header
242
+
243
+ options.apply_defaults timeout: @config.rpcs.create_write_stream.timeout,
244
+ metadata: metadata,
245
+ retry_policy: @config.rpcs.create_write_stream.retry_policy
246
+
247
+ options.apply_defaults timeout: @config.timeout,
248
+ metadata: @config.metadata,
249
+ retry_policy: @config.retry_policy
250
+
251
+ @big_query_write_stub.call_rpc :create_write_stream, request, options: options do |response, operation|
252
+ yield response, operation if block_given?
253
+ return response
254
+ end
255
+ rescue ::GRPC::BadStatus => e
256
+ raise ::Google::Cloud::Error.from_error(e)
257
+ end
258
+
259
+ ##
260
+ # Appends data to the given stream.
261
+ #
262
+ # If `offset` is specified, the `offset` is checked against the end of
263
+ # stream. The server returns `OUT_OF_RANGE` in `AppendRowsResponse` if an
264
+ # attempt is made to append to an offset beyond the current end of the stream
265
+ # or `ALREADY_EXISTS` if user provides an `offset` that has already been
266
+ # written to. User can retry with adjusted offset within the same RPC
267
+ # connection. If `offset` is not specified, append happens at the end of the
268
+ # stream.
269
+ #
270
+ # The response contains an optional offset at which the append
271
+ # happened. No offset information will be returned for appends to a
272
+ # default stream.
273
+ #
274
+ # Responses are received in the same order in which requests are sent.
275
+ # There will be one response for each successful inserted request. Responses
276
+ # may optionally embed error information if the originating AppendRequest was
277
+ # not successfully processed.
278
+ #
279
+ # The specifics of when successfully appended data is made visible to the
280
+ # table are governed by the type of stream:
281
+ #
282
+ # * For COMMITTED streams (which includes the default stream), data is
283
+ # visible immediately upon successful append.
284
+ #
285
+ # * For BUFFERED streams, data is made visible via a subsequent `FlushRows`
286
+ # rpc which advances a cursor to a newer offset in the stream.
287
+ #
288
+ # * For PENDING streams, data is not made visible until the stream itself is
289
+ # finalized (via the `FinalizeWriteStream` rpc), and the stream is explicitly
290
+ # committed via the `BatchCommitWriteStreams` rpc.
291
+ #
292
+ # @param request [::Gapic::StreamInput, ::Enumerable<::Google::Cloud::Bigquery::Storage::V1::AppendRowsRequest, ::Hash>]
293
+ # An enumerable of {::Google::Cloud::Bigquery::Storage::V1::AppendRowsRequest} instances.
294
+ # @param options [::Gapic::CallOptions, ::Hash]
295
+ # Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
296
+ #
297
+ # @yield [response, operation] Access the result along with the RPC operation
298
+ # @yieldparam response [::Enumerable<::Google::Cloud::Bigquery::Storage::V1::AppendRowsResponse>]
299
+ # @yieldparam operation [::GRPC::ActiveCall::Operation]
300
+ #
301
+ # @return [::Enumerable<::Google::Cloud::Bigquery::Storage::V1::AppendRowsResponse>]
302
+ #
303
+ # @raise [::Google::Cloud::Error] if the RPC is aborted.
304
+ #
305
+ def append_rows request, options = nil
306
+ unless request.is_a? ::Enumerable
307
+ raise ::ArgumentError, "request must be an Enumerable" unless request.respond_to? :to_enum
308
+ request = request.to_enum
309
+ end
310
+
311
+ request = request.lazy.map do |req|
312
+ ::Gapic::Protobuf.coerce req, to: ::Google::Cloud::Bigquery::Storage::V1::AppendRowsRequest
313
+ end
314
+
315
+ # Converts hash and nil to an options object
316
+ options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
317
+
318
+ # Customize the options with defaults
319
+ metadata = @config.rpcs.append_rows.metadata.to_h
320
+
321
+ # Set x-goog-api-client and x-goog-user-project headers
322
+ metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
323
+ lib_name: @config.lib_name, lib_version: @config.lib_version,
324
+ gapic_version: ::Google::Cloud::Bigquery::Storage::V1::VERSION
325
+ metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
326
+
327
+ options.apply_defaults timeout: @config.rpcs.append_rows.timeout,
328
+ metadata: metadata,
329
+ retry_policy: @config.rpcs.append_rows.retry_policy
330
+
331
+ options.apply_defaults timeout: @config.timeout,
332
+ metadata: @config.metadata,
333
+ retry_policy: @config.retry_policy
334
+
335
+ @big_query_write_stub.call_rpc :append_rows, request, options: options do |response, operation|
336
+ yield response, operation if block_given?
337
+ return response
338
+ end
339
+ rescue ::GRPC::BadStatus => e
340
+ raise ::Google::Cloud::Error.from_error(e)
341
+ end
342
+
343
+ ##
344
+ # Gets information about a write stream.
345
+ #
346
+ # @overload get_write_stream(request, options = nil)
347
+ # Pass arguments to `get_write_stream` via a request object, either of type
348
+ # {::Google::Cloud::Bigquery::Storage::V1::GetWriteStreamRequest} or an equivalent Hash.
349
+ #
350
+ # @param request [::Google::Cloud::Bigquery::Storage::V1::GetWriteStreamRequest, ::Hash]
351
+ # A request object representing the call parameters. Required. To specify no
352
+ # parameters, or to keep all the default parameter values, pass an empty Hash.
353
+ # @param options [::Gapic::CallOptions, ::Hash]
354
+ # Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
355
+ #
356
+ # @overload get_write_stream(name: nil)
357
+ # Pass arguments to `get_write_stream` via keyword arguments. Note that at
358
+ # least one keyword argument is required. To specify no parameters, or to keep all
359
+ # the default parameter values, pass an empty Hash as a request object (see above).
360
+ #
361
+ # @param name [::String]
362
+ # Required. Name of the stream to get, in the form of
363
+ # `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`.
364
+ #
365
+ # @yield [response, operation] Access the result along with the RPC operation
366
+ # @yieldparam response [::Google::Cloud::Bigquery::Storage::V1::WriteStream]
367
+ # @yieldparam operation [::GRPC::ActiveCall::Operation]
368
+ #
369
+ # @return [::Google::Cloud::Bigquery::Storage::V1::WriteStream]
370
+ #
371
+ # @raise [::Google::Cloud::Error] if the RPC is aborted.
372
+ #
373
+ def get_write_stream request, options = nil
374
+ raise ::ArgumentError, "request must be provided" if request.nil?
375
+
376
+ request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::Storage::V1::GetWriteStreamRequest
377
+
378
+ # Converts hash and nil to an options object
379
+ options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
380
+
381
+ # Customize the options with defaults
382
+ metadata = @config.rpcs.get_write_stream.metadata.to_h
383
+
384
+ # Set x-goog-api-client and x-goog-user-project headers
385
+ metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
386
+ lib_name: @config.lib_name, lib_version: @config.lib_version,
387
+ gapic_version: ::Google::Cloud::Bigquery::Storage::V1::VERSION
388
+ metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
389
+
390
+ header_params = {
391
+ "name" => request.name
392
+ }
393
+ request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
394
+ metadata[:"x-goog-request-params"] ||= request_params_header
395
+
396
+ options.apply_defaults timeout: @config.rpcs.get_write_stream.timeout,
397
+ metadata: metadata,
398
+ retry_policy: @config.rpcs.get_write_stream.retry_policy
399
+
400
+ options.apply_defaults timeout: @config.timeout,
401
+ metadata: @config.metadata,
402
+ retry_policy: @config.retry_policy
403
+
404
+ @big_query_write_stub.call_rpc :get_write_stream, request, options: options do |response, operation|
405
+ yield response, operation if block_given?
406
+ return response
407
+ end
408
+ rescue ::GRPC::BadStatus => e
409
+ raise ::Google::Cloud::Error.from_error(e)
410
+ end
411
+
412
+ ##
413
+ # Finalize a write stream so that no new data can be appended to the
414
+ # stream. Finalize is not supported on the '_default' stream.
415
+ #
416
+ # @overload finalize_write_stream(request, options = nil)
417
+ # Pass arguments to `finalize_write_stream` via a request object, either of type
418
+ # {::Google::Cloud::Bigquery::Storage::V1::FinalizeWriteStreamRequest} or an equivalent Hash.
419
+ #
420
+ # @param request [::Google::Cloud::Bigquery::Storage::V1::FinalizeWriteStreamRequest, ::Hash]
421
+ # A request object representing the call parameters. Required. To specify no
422
+ # parameters, or to keep all the default parameter values, pass an empty Hash.
423
+ # @param options [::Gapic::CallOptions, ::Hash]
424
+ # Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
425
+ #
426
+ # @overload finalize_write_stream(name: nil)
427
+ # Pass arguments to `finalize_write_stream` via keyword arguments. Note that at
428
+ # least one keyword argument is required. To specify no parameters, or to keep all
429
+ # the default parameter values, pass an empty Hash as a request object (see above).
430
+ #
431
+ # @param name [::String]
432
+ # Required. Name of the stream to finalize, in the form of
433
+ # `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`.
434
+ #
435
+ # @yield [response, operation] Access the result along with the RPC operation
436
+ # @yieldparam response [::Google::Cloud::Bigquery::Storage::V1::FinalizeWriteStreamResponse]
437
+ # @yieldparam operation [::GRPC::ActiveCall::Operation]
438
+ #
439
+ # @return [::Google::Cloud::Bigquery::Storage::V1::FinalizeWriteStreamResponse]
440
+ #
441
+ # @raise [::Google::Cloud::Error] if the RPC is aborted.
442
+ #
443
+ def finalize_write_stream request, options = nil
444
+ raise ::ArgumentError, "request must be provided" if request.nil?
445
+
446
+ request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::Storage::V1::FinalizeWriteStreamRequest
447
+
448
+ # Converts hash and nil to an options object
449
+ options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
450
+
451
+ # Customize the options with defaults
452
+ metadata = @config.rpcs.finalize_write_stream.metadata.to_h
453
+
454
+ # Set x-goog-api-client and x-goog-user-project headers
455
+ metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
456
+ lib_name: @config.lib_name, lib_version: @config.lib_version,
457
+ gapic_version: ::Google::Cloud::Bigquery::Storage::V1::VERSION
458
+ metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
459
+
460
+ header_params = {
461
+ "name" => request.name
462
+ }
463
+ request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
464
+ metadata[:"x-goog-request-params"] ||= request_params_header
465
+
466
+ options.apply_defaults timeout: @config.rpcs.finalize_write_stream.timeout,
467
+ metadata: metadata,
468
+ retry_policy: @config.rpcs.finalize_write_stream.retry_policy
469
+
470
+ options.apply_defaults timeout: @config.timeout,
471
+ metadata: @config.metadata,
472
+ retry_policy: @config.retry_policy
473
+
474
+ @big_query_write_stub.call_rpc :finalize_write_stream, request, options: options do |response, operation|
475
+ yield response, operation if block_given?
476
+ return response
477
+ end
478
+ rescue ::GRPC::BadStatus => e
479
+ raise ::Google::Cloud::Error.from_error(e)
480
+ end
481
+
482
+ ##
483
+ # Atomically commits a group of `PENDING` streams that belong to the same
484
+ # `parent` table.
485
+ #
486
+ # Streams must be finalized before commit and cannot be committed multiple
487
+ # times. Once a stream is committed, data in the stream becomes available
488
+ # for read operations.
489
+ #
490
+ # @overload batch_commit_write_streams(request, options = nil)
491
+ # Pass arguments to `batch_commit_write_streams` via a request object, either of type
492
+ # {::Google::Cloud::Bigquery::Storage::V1::BatchCommitWriteStreamsRequest} or an equivalent Hash.
493
+ #
494
+ # @param request [::Google::Cloud::Bigquery::Storage::V1::BatchCommitWriteStreamsRequest, ::Hash]
495
+ # A request object representing the call parameters. Required. To specify no
496
+ # parameters, or to keep all the default parameter values, pass an empty Hash.
497
+ # @param options [::Gapic::CallOptions, ::Hash]
498
+ # Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
499
+ #
500
+ # @overload batch_commit_write_streams(parent: nil, write_streams: nil)
501
+ # Pass arguments to `batch_commit_write_streams` via keyword arguments. Note that at
502
+ # least one keyword argument is required. To specify no parameters, or to keep all
503
+ # the default parameter values, pass an empty Hash as a request object (see above).
504
+ #
505
+ # @param parent [::String]
506
+ # Required. Parent table that all the streams should belong to, in the form of
507
+ # `projects/{project}/datasets/{dataset}/tables/{table}`.
508
+ # @param write_streams [::Array<::String>]
509
+ # Required. The group of streams that will be committed atomically.
510
+ #
511
+ # @yield [response, operation] Access the result along with the RPC operation
512
+ # @yieldparam response [::Google::Cloud::Bigquery::Storage::V1::BatchCommitWriteStreamsResponse]
513
+ # @yieldparam operation [::GRPC::ActiveCall::Operation]
514
+ #
515
+ # @return [::Google::Cloud::Bigquery::Storage::V1::BatchCommitWriteStreamsResponse]
516
+ #
517
+ # @raise [::Google::Cloud::Error] if the RPC is aborted.
518
+ #
519
+ def batch_commit_write_streams request, options = nil
520
+ raise ::ArgumentError, "request must be provided" if request.nil?
521
+
522
+ request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::Storage::V1::BatchCommitWriteStreamsRequest
523
+
524
+ # Converts hash and nil to an options object
525
+ options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
526
+
527
+ # Customize the options with defaults
528
+ metadata = @config.rpcs.batch_commit_write_streams.metadata.to_h
529
+
530
+ # Set x-goog-api-client and x-goog-user-project headers
531
+ metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
532
+ lib_name: @config.lib_name, lib_version: @config.lib_version,
533
+ gapic_version: ::Google::Cloud::Bigquery::Storage::V1::VERSION
534
+ metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
535
+
536
+ header_params = {
537
+ "parent" => request.parent
538
+ }
539
+ request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
540
+ metadata[:"x-goog-request-params"] ||= request_params_header
541
+
542
+ options.apply_defaults timeout: @config.rpcs.batch_commit_write_streams.timeout,
543
+ metadata: metadata,
544
+ retry_policy: @config.rpcs.batch_commit_write_streams.retry_policy
545
+
546
+ options.apply_defaults timeout: @config.timeout,
547
+ metadata: @config.metadata,
548
+ retry_policy: @config.retry_policy
549
+
550
+ @big_query_write_stub.call_rpc :batch_commit_write_streams, request, options: options do |response, operation|
551
+ yield response, operation if block_given?
552
+ return response
553
+ end
554
+ rescue ::GRPC::BadStatus => e
555
+ raise ::Google::Cloud::Error.from_error(e)
556
+ end
557
+
558
+ ##
559
+ # Flushes rows to a BUFFERED stream.
560
+ #
561
+ # If users are appending rows to BUFFERED stream, flush operation is
562
+ # required in order for the rows to become available for reading. A
563
+ # Flush operation flushes up to any previously flushed offset in a BUFFERED
564
+ # stream, to the offset specified in the request.
565
+ #
566
+ # Flush is not supported on the _default stream, since it is not BUFFERED.
567
+ #
568
+ # @overload flush_rows(request, options = nil)
569
+ # Pass arguments to `flush_rows` via a request object, either of type
570
+ # {::Google::Cloud::Bigquery::Storage::V1::FlushRowsRequest} or an equivalent Hash.
571
+ #
572
+ # @param request [::Google::Cloud::Bigquery::Storage::V1::FlushRowsRequest, ::Hash]
573
+ # A request object representing the call parameters. Required. To specify no
574
+ # parameters, or to keep all the default parameter values, pass an empty Hash.
575
+ # @param options [::Gapic::CallOptions, ::Hash]
576
+ # Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
577
+ #
578
+ # @overload flush_rows(write_stream: nil, offset: nil)
579
+ # Pass arguments to `flush_rows` via keyword arguments. Note that at
580
+ # least one keyword argument is required. To specify no parameters, or to keep all
581
+ # the default parameter values, pass an empty Hash as a request object (see above).
582
+ #
583
+ # @param write_stream [::String]
584
+ # Required. The stream that is the target of the flush operation.
585
+ # @param offset [::Google::Protobuf::Int64Value, ::Hash]
586
+ # Ending offset of the flush operation. Rows before this offset(including
587
+ # this offset) will be flushed.
588
+ #
589
+ # @yield [response, operation] Access the result along with the RPC operation
590
+ # @yieldparam response [::Google::Cloud::Bigquery::Storage::V1::FlushRowsResponse]
591
+ # @yieldparam operation [::GRPC::ActiveCall::Operation]
592
+ #
593
+ # @return [::Google::Cloud::Bigquery::Storage::V1::FlushRowsResponse]
594
+ #
595
+ # @raise [::Google::Cloud::Error] if the RPC is aborted.
596
+ #
597
+ def flush_rows request, options = nil
598
+ raise ::ArgumentError, "request must be provided" if request.nil?
599
+
600
+ request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::Storage::V1::FlushRowsRequest
601
+
602
+ # Converts hash and nil to an options object
603
+ options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
604
+
605
+ # Customize the options with defaults
606
+ metadata = @config.rpcs.flush_rows.metadata.to_h
607
+
608
+ # Set x-goog-api-client and x-goog-user-project headers
609
+ metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
610
+ lib_name: @config.lib_name, lib_version: @config.lib_version,
611
+ gapic_version: ::Google::Cloud::Bigquery::Storage::V1::VERSION
612
+ metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
613
+
614
+ header_params = {
615
+ "write_stream" => request.write_stream
616
+ }
617
+ request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
618
+ metadata[:"x-goog-request-params"] ||= request_params_header
619
+
620
+ options.apply_defaults timeout: @config.rpcs.flush_rows.timeout,
621
+ metadata: metadata,
622
+ retry_policy: @config.rpcs.flush_rows.retry_policy
623
+
624
+ options.apply_defaults timeout: @config.timeout,
625
+ metadata: @config.metadata,
626
+ retry_policy: @config.retry_policy
627
+
628
+ @big_query_write_stub.call_rpc :flush_rows, request, options: options do |response, operation|
629
+ yield response, operation if block_given?
630
+ return response
631
+ end
632
+ rescue ::GRPC::BadStatus => e
633
+ raise ::Google::Cloud::Error.from_error(e)
634
+ end
635
+
636
+ ##
637
+ # Configuration class for the BigQueryWrite API.
638
+ #
639
+ # This class represents the configuration for BigQueryWrite,
640
+ # providing control over timeouts, retry behavior, logging, transport
641
+ # parameters, and other low-level controls. Certain parameters can also be
642
+ # applied individually to specific RPCs. See
643
+ # {::Google::Cloud::Bigquery::Storage::V1::BigQueryWrite::Client::Configuration::Rpcs}
644
+ # for a list of RPCs that can be configured independently.
645
+ #
646
+ # Configuration can be applied globally to all clients, or to a single client
647
+ # on construction.
648
+ #
649
+ # @example
650
+ #
651
+ # # Modify the global config, setting the timeout for
652
+ # # create_write_stream to 20 seconds,
653
+ # # and all remaining timeouts to 10 seconds.
654
+ # ::Google::Cloud::Bigquery::Storage::V1::BigQueryWrite::Client.configure do |config|
655
+ # config.timeout = 10.0
656
+ # config.rpcs.create_write_stream.timeout = 20.0
657
+ # end
658
+ #
659
+ # # Apply the above configuration only to a new client.
660
+ # client = ::Google::Cloud::Bigquery::Storage::V1::BigQueryWrite::Client.new do |config|
661
+ # config.timeout = 10.0
662
+ # config.rpcs.create_write_stream.timeout = 20.0
663
+ # end
664
+ #
665
+ # @!attribute [rw] endpoint
666
+ # The hostname or hostname:port of the service endpoint.
667
+ # Defaults to `"bigquerystorage.googleapis.com"`.
668
+ # @return [::String]
669
+ # @!attribute [rw] credentials
670
+ # Credentials to send with calls. You may provide any of the following types:
671
+ # * (`String`) The path to a service account key file in JSON format
672
+ # * (`Hash`) A service account key as a Hash
673
+ # * (`Google::Auth::Credentials`) A googleauth credentials object
674
+ # (see the [googleauth docs](https://googleapis.dev/ruby/googleauth/latest/index.html))
675
+ # * (`Signet::OAuth2::Client`) A signet oauth2 client object
676
+ # (see the [signet docs](https://googleapis.dev/ruby/signet/latest/Signet/OAuth2/Client.html))
677
+ # * (`GRPC::Core::Channel`) a gRPC channel with included credentials
678
+ # * (`GRPC::Core::ChannelCredentials`) a gRPC credentails object
679
+ # * (`nil`) indicating no credentials
680
+ # @return [::Object]
681
+ # @!attribute [rw] scope
682
+ # The OAuth scopes
683
+ # @return [::Array<::String>]
684
+ # @!attribute [rw] lib_name
685
+ # The library name as recorded in instrumentation and logging
686
+ # @return [::String]
687
+ # @!attribute [rw] lib_version
688
+ # The library version as recorded in instrumentation and logging
689
+ # @return [::String]
690
+ # @!attribute [rw] channel_args
691
+ # Extra parameters passed to the gRPC channel. Note: this is ignored if a
692
+ # `GRPC::Core::Channel` object is provided as the credential.
693
+ # @return [::Hash]
694
+ # @!attribute [rw] interceptors
695
+ # An array of interceptors that are run before calls are executed.
696
+ # @return [::Array<::GRPC::ClientInterceptor>]
697
+ # @!attribute [rw] timeout
698
+ # The call timeout in seconds.
699
+ # @return [::Numeric]
700
+ # @!attribute [rw] metadata
701
+ # Additional gRPC headers to be sent with the call.
702
+ # @return [::Hash{::Symbol=>::String}]
703
+ # @!attribute [rw] retry_policy
704
+ # The retry policy. The value is a hash with the following keys:
705
+ # * `:initial_delay` (*type:* `Numeric`) - The initial delay in seconds.
706
+ # * `:max_delay` (*type:* `Numeric`) - The max delay in seconds.
707
+ # * `:multiplier` (*type:* `Numeric`) - The incremental backoff multiplier.
708
+ # * `:retry_codes` (*type:* `Array<String>`) - The error codes that should
709
+ # trigger a retry.
710
+ # @return [::Hash]
711
+ # @!attribute [rw] quota_project
712
+ # A separate project against which to charge quota.
713
+ # @return [::String]
714
+ #
715
+ class Configuration
716
+ extend ::Gapic::Config
717
+
718
+ config_attr :endpoint, "bigquerystorage.googleapis.com", ::String
719
+ config_attr :credentials, nil do |value|
720
+ allowed = [::String, ::Hash, ::Proc, ::Symbol, ::Google::Auth::Credentials, ::Signet::OAuth2::Client, nil]
721
+ allowed += [::GRPC::Core::Channel, ::GRPC::Core::ChannelCredentials] if defined? ::GRPC
722
+ allowed.any? { |klass| klass === value }
723
+ end
724
+ config_attr :scope, nil, ::String, ::Array, nil
725
+ config_attr :lib_name, nil, ::String, nil
726
+ config_attr :lib_version, nil, ::String, nil
727
+ config_attr(:channel_args, { "grpc.service_config_disable_resolution" => 1 }, ::Hash, nil)
728
+ config_attr :interceptors, nil, ::Array, nil
729
+ config_attr :timeout, nil, ::Numeric, nil
730
+ config_attr :metadata, nil, ::Hash, nil
731
+ config_attr :retry_policy, nil, ::Hash, ::Proc, nil
732
+ config_attr :quota_project, nil, ::String, nil
733
+
734
+ # @private
735
+ def initialize parent_config = nil
736
+ @parent_config = parent_config unless parent_config.nil?
737
+
738
+ yield self if block_given?
739
+ end
740
+
741
+ ##
742
+ # Configurations for individual RPCs
743
+ # @return [Rpcs]
744
+ #
745
+ def rpcs
746
+ @rpcs ||= begin
747
+ parent_rpcs = nil
748
+ parent_rpcs = @parent_config.rpcs if defined?(@parent_config) && @parent_config.respond_to?(:rpcs)
749
+ Rpcs.new parent_rpcs
750
+ end
751
+ end
752
+
753
+ ##
754
+ # Configuration RPC class for the BigQueryWrite API.
755
+ #
756
+ # Includes fields providing the configuration for each RPC in this service.
757
+ # Each configuration object is of type `Gapic::Config::Method` and includes
758
+ # the following configuration fields:
759
+ #
760
+ # * `timeout` (*type:* `Numeric`) - The call timeout in seconds
761
+ # * `metadata` (*type:* `Hash{Symbol=>String}`) - Additional gRPC headers
762
+ # * `retry_policy (*type:* `Hash`) - The retry policy. The policy fields
763
+ # include the following keys:
764
+ # * `:initial_delay` (*type:* `Numeric`) - The initial delay in seconds.
765
+ # * `:max_delay` (*type:* `Numeric`) - The max delay in seconds.
766
+ # * `:multiplier` (*type:* `Numeric`) - The incremental backoff multiplier.
767
+ # * `:retry_codes` (*type:* `Array<String>`) - The error codes that should
768
+ # trigger a retry.
769
+ #
770
+ class Rpcs
771
+ ##
772
+ # RPC-specific configuration for `create_write_stream`
773
+ # @return [::Gapic::Config::Method]
774
+ #
775
+ attr_reader :create_write_stream
776
+ ##
777
+ # RPC-specific configuration for `append_rows`
778
+ # @return [::Gapic::Config::Method]
779
+ #
780
+ attr_reader :append_rows
781
+ ##
782
+ # RPC-specific configuration for `get_write_stream`
783
+ # @return [::Gapic::Config::Method]
784
+ #
785
+ attr_reader :get_write_stream
786
+ ##
787
+ # RPC-specific configuration for `finalize_write_stream`
788
+ # @return [::Gapic::Config::Method]
789
+ #
790
+ attr_reader :finalize_write_stream
791
+ ##
792
+ # RPC-specific configuration for `batch_commit_write_streams`
793
+ # @return [::Gapic::Config::Method]
794
+ #
795
+ attr_reader :batch_commit_write_streams
796
+ ##
797
+ # RPC-specific configuration for `flush_rows`
798
+ # @return [::Gapic::Config::Method]
799
+ #
800
+ attr_reader :flush_rows
801
+
802
+ # @private
803
+ def initialize parent_rpcs = nil
804
+ create_write_stream_config = parent_rpcs.create_write_stream if parent_rpcs.respond_to? :create_write_stream
805
+ @create_write_stream = ::Gapic::Config::Method.new create_write_stream_config
806
+ append_rows_config = parent_rpcs.append_rows if parent_rpcs.respond_to? :append_rows
807
+ @append_rows = ::Gapic::Config::Method.new append_rows_config
808
+ get_write_stream_config = parent_rpcs.get_write_stream if parent_rpcs.respond_to? :get_write_stream
809
+ @get_write_stream = ::Gapic::Config::Method.new get_write_stream_config
810
+ finalize_write_stream_config = parent_rpcs.finalize_write_stream if parent_rpcs.respond_to? :finalize_write_stream
811
+ @finalize_write_stream = ::Gapic::Config::Method.new finalize_write_stream_config
812
+ batch_commit_write_streams_config = parent_rpcs.batch_commit_write_streams if parent_rpcs.respond_to? :batch_commit_write_streams
813
+ @batch_commit_write_streams = ::Gapic::Config::Method.new batch_commit_write_streams_config
814
+ flush_rows_config = parent_rpcs.flush_rows if parent_rpcs.respond_to? :flush_rows
815
+ @flush_rows = ::Gapic::Config::Method.new flush_rows_config
816
+
817
+ yield self if block_given?
818
+ end
819
+ end
820
+ end
821
+ end
822
+ end
823
+ end
824
+ end
825
+ end
826
+ end
827
+ end