google-cloud-bigquery-storage-v1 0.6.3 → 0.9.0

Sign up to get free protection for your applications and to get access to all the features.
@@ -0,0 +1,936 @@
1
+ # frozen_string_literal: true
2
+
3
+ # Copyright 2021 Google LLC
4
+ #
5
+ # Licensed under the Apache License, Version 2.0 (the "License");
6
+ # you may not use this file except in compliance with the License.
7
+ # You may obtain a copy of the License at
8
+ #
9
+ # https://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing, software
12
+ # distributed under the License is distributed on an "AS IS" BASIS,
13
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14
+ # See the License for the specific language governing permissions and
15
+ # limitations under the License.
16
+
17
+ # Auto-generated by gapic-generator-ruby. DO NOT EDIT!
18
+
19
+ require "google/cloud/errors"
20
+ require "google/cloud/bigquery/storage/v1/storage_pb"
21
+
22
+ module Google
23
+ module Cloud
24
+ module Bigquery
25
+ module Storage
26
+ module V1
27
+ module BigQueryWrite
28
+ ##
29
+ # Client for the BigQueryWrite service.
30
+ #
31
+ # BigQuery Write API.
32
+ #
33
+ # The Write API can be used to write data to BigQuery.
34
+ #
35
+ # For supplementary information about the Write API, see:
36
+ # https://cloud.google.com/bigquery/docs/write-api
37
+ #
38
+ class Client
39
+ include Paths
40
+
41
+ # @private
42
+ attr_reader :big_query_write_stub
43
+
44
+ ##
45
+ # Configure the BigQueryWrite Client class.
46
+ #
47
+ # See {::Google::Cloud::Bigquery::Storage::V1::BigQueryWrite::Client::Configuration}
48
+ # for a description of the configuration fields.
49
+ #
50
+ # @example
51
+ #
52
+ # # Modify the configuration for all BigQueryWrite clients
53
+ # ::Google::Cloud::Bigquery::Storage::V1::BigQueryWrite::Client.configure do |config|
54
+ # config.timeout = 10.0
55
+ # end
56
+ #
57
+ # @yield [config] Configure the Client client.
58
+ # @yieldparam config [Client::Configuration]
59
+ #
60
+ # @return [Client::Configuration]
61
+ #
62
+ def self.configure
63
+ @configure ||= begin
64
+ namespace = ["Google", "Cloud", "Bigquery", "Storage", "V1"]
65
+ parent_config = while namespace.any?
66
+ parent_name = namespace.join "::"
67
+ parent_const = const_get parent_name
68
+ break parent_const.configure if parent_const.respond_to? :configure
69
+ namespace.pop
70
+ end
71
+ default_config = Client::Configuration.new parent_config
72
+
73
+ default_config.rpcs.create_write_stream.timeout = 600.0
74
+ default_config.rpcs.create_write_stream.retry_policy = {
75
+ initial_delay: 0.1, max_delay: 60.0, multiplier: 1.3, retry_codes: [4, 14]
76
+ }
77
+
78
+ default_config.rpcs.append_rows.timeout = 86_400.0
79
+ default_config.rpcs.append_rows.retry_policy = {
80
+ initial_delay: 0.1, max_delay: 60.0, multiplier: 1.3, retry_codes: [14]
81
+ }
82
+
83
+ default_config.rpcs.get_write_stream.timeout = 600.0
84
+ default_config.rpcs.get_write_stream.retry_policy = {
85
+ initial_delay: 0.1, max_delay: 60.0, multiplier: 1.3, retry_codes: [4, 14]
86
+ }
87
+
88
+ default_config.rpcs.finalize_write_stream.timeout = 600.0
89
+ default_config.rpcs.finalize_write_stream.retry_policy = {
90
+ initial_delay: 0.1, max_delay: 60.0, multiplier: 1.3, retry_codes: [4, 14]
91
+ }
92
+
93
+ default_config.rpcs.batch_commit_write_streams.timeout = 600.0
94
+ default_config.rpcs.batch_commit_write_streams.retry_policy = {
95
+ initial_delay: 0.1, max_delay: 60.0, multiplier: 1.3, retry_codes: [4, 14]
96
+ }
97
+
98
+ default_config.rpcs.flush_rows.timeout = 600.0
99
+ default_config.rpcs.flush_rows.retry_policy = {
100
+ initial_delay: 0.1, max_delay: 60.0, multiplier: 1.3, retry_codes: [4, 14]
101
+ }
102
+
103
+ default_config
104
+ end
105
+ yield @configure if block_given?
106
+ @configure
107
+ end
108
+
109
+ ##
110
+ # Configure the BigQueryWrite Client instance.
111
+ #
112
+ # The configuration is set to the derived mode, meaning that values can be changed,
113
+ # but structural changes (adding new fields, etc.) are not allowed. Structural changes
114
+ # should be made on {Client.configure}.
115
+ #
116
+ # See {::Google::Cloud::Bigquery::Storage::V1::BigQueryWrite::Client::Configuration}
117
+ # for a description of the configuration fields.
118
+ #
119
+ # @yield [config] Configure the Client client.
120
+ # @yieldparam config [Client::Configuration]
121
+ #
122
+ # @return [Client::Configuration]
123
+ #
124
+ def configure
125
+ yield @config if block_given?
126
+ @config
127
+ end
128
+
129
+ ##
130
+ # Create a new BigQueryWrite client object.
131
+ #
132
+ # @example
133
+ #
134
+ # # Create a client using the default configuration
135
+ # client = ::Google::Cloud::Bigquery::Storage::V1::BigQueryWrite::Client.new
136
+ #
137
+ # # Create a client using a custom configuration
138
+ # client = ::Google::Cloud::Bigquery::Storage::V1::BigQueryWrite::Client.new do |config|
139
+ # config.timeout = 10.0
140
+ # end
141
+ #
142
+ # @yield [config] Configure the BigQueryWrite client.
143
+ # @yieldparam config [Client::Configuration]
144
+ #
145
+ def initialize
146
+ # These require statements are intentionally placed here to initialize
147
+ # the gRPC module only when it's required.
148
+ # See https://github.com/googleapis/toolkit/issues/446
149
+ require "gapic/grpc"
150
+ require "google/cloud/bigquery/storage/v1/storage_services_pb"
151
+
152
+ # Create the configuration object
153
+ @config = Configuration.new Client.configure
154
+
155
+ # Yield the configuration if needed
156
+ yield @config if block_given?
157
+
158
+ # Create credentials
159
+ credentials = @config.credentials
160
+ # Use self-signed JWT if the endpoint is unchanged from default,
161
+ # but only if the default endpoint does not have a region prefix.
162
+ enable_self_signed_jwt = @config.endpoint == Client.configure.endpoint &&
163
+ !@config.endpoint.split(".").first.include?("-")
164
+ credentials ||= Credentials.default scope: @config.scope,
165
+ enable_self_signed_jwt: enable_self_signed_jwt
166
+ if credentials.is_a?(::String) || credentials.is_a?(::Hash)
167
+ credentials = Credentials.new credentials, scope: @config.scope
168
+ end
169
+ @quota_project_id = @config.quota_project
170
+ @quota_project_id ||= credentials.quota_project_id if credentials.respond_to? :quota_project_id
171
+
172
+ @big_query_write_stub = ::Gapic::ServiceStub.new(
173
+ ::Google::Cloud::Bigquery::Storage::V1::BigQueryWrite::Stub,
174
+ credentials: credentials,
175
+ endpoint: @config.endpoint,
176
+ channel_args: @config.channel_args,
177
+ interceptors: @config.interceptors
178
+ )
179
+ end
180
+
181
+ # Service calls
182
+
183
+ ##
184
+ # Creates a write stream to the given table.
185
+ # Additionally, every table has a special stream named '_default'
186
+ # to which data can be written. This stream doesn't need to be created using
187
+ # CreateWriteStream. It is a stream that can be used simultaneously by any
188
+ # number of clients. Data written to this stream is considered committed as
189
+ # soon as an acknowledgement is received.
190
+ #
191
+ # @overload create_write_stream(request, options = nil)
192
+ # Pass arguments to `create_write_stream` via a request object, either of type
193
+ # {::Google::Cloud::Bigquery::Storage::V1::CreateWriteStreamRequest} or an equivalent Hash.
194
+ #
195
+ # @param request [::Google::Cloud::Bigquery::Storage::V1::CreateWriteStreamRequest, ::Hash]
196
+ # A request object representing the call parameters. Required. To specify no
197
+ # parameters, or to keep all the default parameter values, pass an empty Hash.
198
+ # @param options [::Gapic::CallOptions, ::Hash]
199
+ # Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
200
+ #
201
+ # @overload create_write_stream(parent: nil, write_stream: nil)
202
+ # Pass arguments to `create_write_stream` via keyword arguments. Note that at
203
+ # least one keyword argument is required. To specify no parameters, or to keep all
204
+ # the default parameter values, pass an empty Hash as a request object (see above).
205
+ #
206
+ # @param parent [::String]
207
+ # Required. Reference to the table to which the stream belongs, in the format
208
+ # of `projects/{project}/datasets/{dataset}/tables/{table}`.
209
+ # @param write_stream [::Google::Cloud::Bigquery::Storage::V1::WriteStream, ::Hash]
210
+ # Required. Stream to be created.
211
+ #
212
+ # @yield [response, operation] Access the result along with the RPC operation
213
+ # @yieldparam response [::Google::Cloud::Bigquery::Storage::V1::WriteStream]
214
+ # @yieldparam operation [::GRPC::ActiveCall::Operation]
215
+ #
216
+ # @return [::Google::Cloud::Bigquery::Storage::V1::WriteStream]
217
+ #
218
+ # @raise [::Google::Cloud::Error] if the RPC is aborted.
219
+ #
220
+ # @example Basic example
221
+ # require "google/cloud/bigquery/storage/v1"
222
+ #
223
+ # # Create a client object. The client can be reused for multiple calls.
224
+ # client = Google::Cloud::Bigquery::Storage::V1::BigQueryWrite::Client.new
225
+ #
226
+ # # Create a request. To set request fields, pass in keyword arguments.
227
+ # request = Google::Cloud::Bigquery::Storage::V1::CreateWriteStreamRequest.new
228
+ #
229
+ # # Call the create_write_stream method.
230
+ # result = client.create_write_stream request
231
+ #
232
+ # # The returned object is of type Google::Cloud::Bigquery::Storage::V1::WriteStream.
233
+ # p result
234
+ #
235
+ def create_write_stream request, options = nil
236
+ raise ::ArgumentError, "request must be provided" if request.nil?
237
+
238
+ request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::Storage::V1::CreateWriteStreamRequest
239
+
240
+ # Converts hash and nil to an options object
241
+ options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
242
+
243
+ # Customize the options with defaults
244
+ metadata = @config.rpcs.create_write_stream.metadata.to_h
245
+
246
+ # Set x-goog-api-client and x-goog-user-project headers
247
+ metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
248
+ lib_name: @config.lib_name, lib_version: @config.lib_version,
249
+ gapic_version: ::Google::Cloud::Bigquery::Storage::V1::VERSION
250
+ metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
251
+
252
+ header_params = {}
253
+ if request.parent
254
+ header_params["parent"] = request.parent
255
+ end
256
+
257
+ request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
258
+ metadata[:"x-goog-request-params"] ||= request_params_header
259
+
260
+ options.apply_defaults timeout: @config.rpcs.create_write_stream.timeout,
261
+ metadata: metadata,
262
+ retry_policy: @config.rpcs.create_write_stream.retry_policy
263
+
264
+ options.apply_defaults timeout: @config.timeout,
265
+ metadata: @config.metadata,
266
+ retry_policy: @config.retry_policy
267
+
268
+ @big_query_write_stub.call_rpc :create_write_stream, request, options: options do |response, operation|
269
+ yield response, operation if block_given?
270
+ return response
271
+ end
272
+ rescue ::GRPC::BadStatus => e
273
+ raise ::Google::Cloud::Error.from_error(e)
274
+ end
275
+
276
+ ##
277
+ # Appends data to the given stream.
278
+ #
279
+ # If `offset` is specified, the `offset` is checked against the end of
280
+ # stream. The server returns `OUT_OF_RANGE` in `AppendRowsResponse` if an
281
+ # attempt is made to append to an offset beyond the current end of the stream
282
+ # or `ALREADY_EXISTS` if user provides an `offset` that has already been
283
+ # written to. User can retry with adjusted offset within the same RPC
284
+ # connection. If `offset` is not specified, append happens at the end of the
285
+ # stream.
286
+ #
287
+ # The response contains an optional offset at which the append
288
+ # happened. No offset information will be returned for appends to a
289
+ # default stream.
290
+ #
291
+ # Responses are received in the same order in which requests are sent.
292
+ # There will be one response for each successful inserted request. Responses
293
+ # may optionally embed error information if the originating AppendRequest was
294
+ # not successfully processed.
295
+ #
296
+ # The specifics of when successfully appended data is made visible to the
297
+ # table are governed by the type of stream:
298
+ #
299
+ # * For COMMITTED streams (which includes the default stream), data is
300
+ # visible immediately upon successful append.
301
+ #
302
+ # * For BUFFERED streams, data is made visible via a subsequent `FlushRows`
303
+ # rpc which advances a cursor to a newer offset in the stream.
304
+ #
305
+ # * For PENDING streams, data is not made visible until the stream itself is
306
+ # finalized (via the `FinalizeWriteStream` rpc), and the stream is explicitly
307
+ # committed via the `BatchCommitWriteStreams` rpc.
308
+ #
309
+ # @param request [::Gapic::StreamInput, ::Enumerable<::Google::Cloud::Bigquery::Storage::V1::AppendRowsRequest, ::Hash>]
310
+ # An enumerable of {::Google::Cloud::Bigquery::Storage::V1::AppendRowsRequest} instances.
311
+ # @param options [::Gapic::CallOptions, ::Hash]
312
+ # Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
313
+ #
314
+ # @yield [response, operation] Access the result along with the RPC operation
315
+ # @yieldparam response [::Enumerable<::Google::Cloud::Bigquery::Storage::V1::AppendRowsResponse>]
316
+ # @yieldparam operation [::GRPC::ActiveCall::Operation]
317
+ #
318
+ # @return [::Enumerable<::Google::Cloud::Bigquery::Storage::V1::AppendRowsResponse>]
319
+ #
320
+ # @raise [::Google::Cloud::Error] if the RPC is aborted.
321
+ #
322
+ # @example Basic example
323
+ # require "google/cloud/bigquery/storage/v1"
324
+ #
325
+ # # Create a client object. The client can be reused for multiple calls.
326
+ # client = Google::Cloud::Bigquery::Storage::V1::BigQueryWrite::Client.new
327
+ #
328
+ # # Create an input stream
329
+ # input = Gapic::StreamInput.new
330
+ #
331
+ # # Call the append_rows method to start streaming.
332
+ # output = client.append_rows input
333
+ #
334
+ # # Send requests on the stream. For each request, pass in keyword
335
+ # # arguments to set fields. Be sure to close the stream when done.
336
+ # input << Google::Cloud::Bigquery::Storage::V1::AppendRowsRequest.new
337
+ # input << Google::Cloud::Bigquery::Storage::V1::AppendRowsRequest.new
338
+ # input.close
339
+ #
340
+ # # Handle streamed responses. These may be interleaved with inputs.
341
+ # # Each response is of type ::Google::Cloud::Bigquery::Storage::V1::AppendRowsResponse.
342
+ # output.each do |response|
343
+ # p response
344
+ # end
345
+ #
346
+ def append_rows request, options = nil
347
+ unless request.is_a? ::Enumerable
348
+ raise ::ArgumentError, "request must be an Enumerable" unless request.respond_to? :to_enum
349
+ request = request.to_enum
350
+ end
351
+
352
+ request = request.lazy.map do |req|
353
+ ::Gapic::Protobuf.coerce req, to: ::Google::Cloud::Bigquery::Storage::V1::AppendRowsRequest
354
+ end
355
+
356
+ # Converts hash and nil to an options object
357
+ options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
358
+
359
+ # Customize the options with defaults
360
+ metadata = @config.rpcs.append_rows.metadata.to_h
361
+
362
+ # Set x-goog-api-client and x-goog-user-project headers
363
+ metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
364
+ lib_name: @config.lib_name, lib_version: @config.lib_version,
365
+ gapic_version: ::Google::Cloud::Bigquery::Storage::V1::VERSION
366
+ metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
367
+
368
+ options.apply_defaults timeout: @config.rpcs.append_rows.timeout,
369
+ metadata: metadata,
370
+ retry_policy: @config.rpcs.append_rows.retry_policy
371
+
372
+ options.apply_defaults timeout: @config.timeout,
373
+ metadata: @config.metadata,
374
+ retry_policy: @config.retry_policy
375
+
376
+ @big_query_write_stub.call_rpc :append_rows, request, options: options do |response, operation|
377
+ yield response, operation if block_given?
378
+ return response
379
+ end
380
+ rescue ::GRPC::BadStatus => e
381
+ raise ::Google::Cloud::Error.from_error(e)
382
+ end
383
+
384
+ ##
385
+ # Gets information about a write stream.
386
+ #
387
+ # @overload get_write_stream(request, options = nil)
388
+ # Pass arguments to `get_write_stream` via a request object, either of type
389
+ # {::Google::Cloud::Bigquery::Storage::V1::GetWriteStreamRequest} or an equivalent Hash.
390
+ #
391
+ # @param request [::Google::Cloud::Bigquery::Storage::V1::GetWriteStreamRequest, ::Hash]
392
+ # A request object representing the call parameters. Required. To specify no
393
+ # parameters, or to keep all the default parameter values, pass an empty Hash.
394
+ # @param options [::Gapic::CallOptions, ::Hash]
395
+ # Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
396
+ #
397
+ # @overload get_write_stream(name: nil)
398
+ # Pass arguments to `get_write_stream` via keyword arguments. Note that at
399
+ # least one keyword argument is required. To specify no parameters, or to keep all
400
+ # the default parameter values, pass an empty Hash as a request object (see above).
401
+ #
402
+ # @param name [::String]
403
+ # Required. Name of the stream to get, in the form of
404
+ # `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`.
405
+ #
406
+ # @yield [response, operation] Access the result along with the RPC operation
407
+ # @yieldparam response [::Google::Cloud::Bigquery::Storage::V1::WriteStream]
408
+ # @yieldparam operation [::GRPC::ActiveCall::Operation]
409
+ #
410
+ # @return [::Google::Cloud::Bigquery::Storage::V1::WriteStream]
411
+ #
412
+ # @raise [::Google::Cloud::Error] if the RPC is aborted.
413
+ #
414
+ # @example Basic example
415
+ # require "google/cloud/bigquery/storage/v1"
416
+ #
417
+ # # Create a client object. The client can be reused for multiple calls.
418
+ # client = Google::Cloud::Bigquery::Storage::V1::BigQueryWrite::Client.new
419
+ #
420
+ # # Create a request. To set request fields, pass in keyword arguments.
421
+ # request = Google::Cloud::Bigquery::Storage::V1::GetWriteStreamRequest.new
422
+ #
423
+ # # Call the get_write_stream method.
424
+ # result = client.get_write_stream request
425
+ #
426
+ # # The returned object is of type Google::Cloud::Bigquery::Storage::V1::WriteStream.
427
+ # p result
428
+ #
429
+ def get_write_stream request, options = nil
430
+ raise ::ArgumentError, "request must be provided" if request.nil?
431
+
432
+ request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::Storage::V1::GetWriteStreamRequest
433
+
434
+ # Converts hash and nil to an options object
435
+ options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
436
+
437
+ # Customize the options with defaults
438
+ metadata = @config.rpcs.get_write_stream.metadata.to_h
439
+
440
+ # Set x-goog-api-client and x-goog-user-project headers
441
+ metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
442
+ lib_name: @config.lib_name, lib_version: @config.lib_version,
443
+ gapic_version: ::Google::Cloud::Bigquery::Storage::V1::VERSION
444
+ metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
445
+
446
+ header_params = {}
447
+ if request.name
448
+ header_params["name"] = request.name
449
+ end
450
+
451
+ request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
452
+ metadata[:"x-goog-request-params"] ||= request_params_header
453
+
454
+ options.apply_defaults timeout: @config.rpcs.get_write_stream.timeout,
455
+ metadata: metadata,
456
+ retry_policy: @config.rpcs.get_write_stream.retry_policy
457
+
458
+ options.apply_defaults timeout: @config.timeout,
459
+ metadata: @config.metadata,
460
+ retry_policy: @config.retry_policy
461
+
462
+ @big_query_write_stub.call_rpc :get_write_stream, request, options: options do |response, operation|
463
+ yield response, operation if block_given?
464
+ return response
465
+ end
466
+ rescue ::GRPC::BadStatus => e
467
+ raise ::Google::Cloud::Error.from_error(e)
468
+ end
469
+
470
+ ##
471
+ # Finalize a write stream so that no new data can be appended to the
472
+ # stream. Finalize is not supported on the '_default' stream.
473
+ #
474
+ # @overload finalize_write_stream(request, options = nil)
475
+ # Pass arguments to `finalize_write_stream` via a request object, either of type
476
+ # {::Google::Cloud::Bigquery::Storage::V1::FinalizeWriteStreamRequest} or an equivalent Hash.
477
+ #
478
+ # @param request [::Google::Cloud::Bigquery::Storage::V1::FinalizeWriteStreamRequest, ::Hash]
479
+ # A request object representing the call parameters. Required. To specify no
480
+ # parameters, or to keep all the default parameter values, pass an empty Hash.
481
+ # @param options [::Gapic::CallOptions, ::Hash]
482
+ # Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
483
+ #
484
+ # @overload finalize_write_stream(name: nil)
485
+ # Pass arguments to `finalize_write_stream` via keyword arguments. Note that at
486
+ # least one keyword argument is required. To specify no parameters, or to keep all
487
+ # the default parameter values, pass an empty Hash as a request object (see above).
488
+ #
489
+ # @param name [::String]
490
+ # Required. Name of the stream to finalize, in the form of
491
+ # `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`.
492
+ #
493
+ # @yield [response, operation] Access the result along with the RPC operation
494
+ # @yieldparam response [::Google::Cloud::Bigquery::Storage::V1::FinalizeWriteStreamResponse]
495
+ # @yieldparam operation [::GRPC::ActiveCall::Operation]
496
+ #
497
+ # @return [::Google::Cloud::Bigquery::Storage::V1::FinalizeWriteStreamResponse]
498
+ #
499
+ # @raise [::Google::Cloud::Error] if the RPC is aborted.
500
+ #
501
+ # @example Basic example
502
+ # require "google/cloud/bigquery/storage/v1"
503
+ #
504
+ # # Create a client object. The client can be reused for multiple calls.
505
+ # client = Google::Cloud::Bigquery::Storage::V1::BigQueryWrite::Client.new
506
+ #
507
+ # # Create a request. To set request fields, pass in keyword arguments.
508
+ # request = Google::Cloud::Bigquery::Storage::V1::FinalizeWriteStreamRequest.new
509
+ #
510
+ # # Call the finalize_write_stream method.
511
+ # result = client.finalize_write_stream request
512
+ #
513
+ # # The returned object is of type Google::Cloud::Bigquery::Storage::V1::FinalizeWriteStreamResponse.
514
+ # p result
515
+ #
516
+ def finalize_write_stream request, options = nil
517
+ raise ::ArgumentError, "request must be provided" if request.nil?
518
+
519
+ request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::Storage::V1::FinalizeWriteStreamRequest
520
+
521
+ # Converts hash and nil to an options object
522
+ options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
523
+
524
+ # Customize the options with defaults
525
+ metadata = @config.rpcs.finalize_write_stream.metadata.to_h
526
+
527
+ # Set x-goog-api-client and x-goog-user-project headers
528
+ metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
529
+ lib_name: @config.lib_name, lib_version: @config.lib_version,
530
+ gapic_version: ::Google::Cloud::Bigquery::Storage::V1::VERSION
531
+ metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
532
+
533
+ header_params = {}
534
+ if request.name
535
+ header_params["name"] = request.name
536
+ end
537
+
538
+ request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
539
+ metadata[:"x-goog-request-params"] ||= request_params_header
540
+
541
+ options.apply_defaults timeout: @config.rpcs.finalize_write_stream.timeout,
542
+ metadata: metadata,
543
+ retry_policy: @config.rpcs.finalize_write_stream.retry_policy
544
+
545
+ options.apply_defaults timeout: @config.timeout,
546
+ metadata: @config.metadata,
547
+ retry_policy: @config.retry_policy
548
+
549
+ @big_query_write_stub.call_rpc :finalize_write_stream, request, options: options do |response, operation|
550
+ yield response, operation if block_given?
551
+ return response
552
+ end
553
+ rescue ::GRPC::BadStatus => e
554
+ raise ::Google::Cloud::Error.from_error(e)
555
+ end
556
+
557
+ ##
558
+ # Atomically commits a group of `PENDING` streams that belong to the same
559
+ # `parent` table.
560
+ #
561
+ # Streams must be finalized before commit and cannot be committed multiple
562
+ # times. Once a stream is committed, data in the stream becomes available
563
+ # for read operations.
564
+ #
565
+ # @overload batch_commit_write_streams(request, options = nil)
566
+ # Pass arguments to `batch_commit_write_streams` via a request object, either of type
567
+ # {::Google::Cloud::Bigquery::Storage::V1::BatchCommitWriteStreamsRequest} or an equivalent Hash.
568
+ #
569
+ # @param request [::Google::Cloud::Bigquery::Storage::V1::BatchCommitWriteStreamsRequest, ::Hash]
570
+ # A request object representing the call parameters. Required. To specify no
571
+ # parameters, or to keep all the default parameter values, pass an empty Hash.
572
+ # @param options [::Gapic::CallOptions, ::Hash]
573
+ # Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
574
+ #
575
+ # @overload batch_commit_write_streams(parent: nil, write_streams: nil)
576
+ # Pass arguments to `batch_commit_write_streams` via keyword arguments. Note that at
577
+ # least one keyword argument is required. To specify no parameters, or to keep all
578
+ # the default parameter values, pass an empty Hash as a request object (see above).
579
+ #
580
+ # @param parent [::String]
581
+ # Required. Parent table that all the streams should belong to, in the form of
582
+ # `projects/{project}/datasets/{dataset}/tables/{table}`.
583
+ # @param write_streams [::Array<::String>]
584
+ # Required. The group of streams that will be committed atomically.
585
+ #
586
+ # @yield [response, operation] Access the result along with the RPC operation
587
+ # @yieldparam response [::Google::Cloud::Bigquery::Storage::V1::BatchCommitWriteStreamsResponse]
588
+ # @yieldparam operation [::GRPC::ActiveCall::Operation]
589
+ #
590
+ # @return [::Google::Cloud::Bigquery::Storage::V1::BatchCommitWriteStreamsResponse]
591
+ #
592
+ # @raise [::Google::Cloud::Error] if the RPC is aborted.
593
+ #
594
+ # @example Basic example
595
+ # require "google/cloud/bigquery/storage/v1"
596
+ #
597
+ # # Create a client object. The client can be reused for multiple calls.
598
+ # client = Google::Cloud::Bigquery::Storage::V1::BigQueryWrite::Client.new
599
+ #
600
+ # # Create a request. To set request fields, pass in keyword arguments.
601
+ # request = Google::Cloud::Bigquery::Storage::V1::BatchCommitWriteStreamsRequest.new
602
+ #
603
+ # # Call the batch_commit_write_streams method.
604
+ # result = client.batch_commit_write_streams request
605
+ #
606
+ # # The returned object is of type Google::Cloud::Bigquery::Storage::V1::BatchCommitWriteStreamsResponse.
607
+ # p result
608
+ #
609
+ def batch_commit_write_streams request, options = nil
610
+ raise ::ArgumentError, "request must be provided" if request.nil?
611
+
612
+ request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::Storage::V1::BatchCommitWriteStreamsRequest
613
+
614
+ # Converts hash and nil to an options object
615
+ options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
616
+
617
+ # Customize the options with defaults
618
+ metadata = @config.rpcs.batch_commit_write_streams.metadata.to_h
619
+
620
+ # Set x-goog-api-client and x-goog-user-project headers
621
+ metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
622
+ lib_name: @config.lib_name, lib_version: @config.lib_version,
623
+ gapic_version: ::Google::Cloud::Bigquery::Storage::V1::VERSION
624
+ metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
625
+
626
+ header_params = {}
627
+ if request.parent
628
+ header_params["parent"] = request.parent
629
+ end
630
+
631
+ request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
632
+ metadata[:"x-goog-request-params"] ||= request_params_header
633
+
634
+ options.apply_defaults timeout: @config.rpcs.batch_commit_write_streams.timeout,
635
+ metadata: metadata,
636
+ retry_policy: @config.rpcs.batch_commit_write_streams.retry_policy
637
+
638
+ options.apply_defaults timeout: @config.timeout,
639
+ metadata: @config.metadata,
640
+ retry_policy: @config.retry_policy
641
+
642
+ @big_query_write_stub.call_rpc :batch_commit_write_streams, request, options: options do |response, operation|
643
+ yield response, operation if block_given?
644
+ return response
645
+ end
646
+ rescue ::GRPC::BadStatus => e
647
+ raise ::Google::Cloud::Error.from_error(e)
648
+ end
649
+
650
+ ##
651
+ # Flushes rows to a BUFFERED stream.
652
+ #
653
+ # If users are appending rows to BUFFERED stream, flush operation is
654
+ # required in order for the rows to become available for reading. A
655
+ # Flush operation flushes up to any previously flushed offset in a BUFFERED
656
+ # stream, to the offset specified in the request.
657
+ #
658
+ # Flush is not supported on the _default stream, since it is not BUFFERED.
659
+ #
660
+ # @overload flush_rows(request, options = nil)
661
+ # Pass arguments to `flush_rows` via a request object, either of type
662
+ # {::Google::Cloud::Bigquery::Storage::V1::FlushRowsRequest} or an equivalent Hash.
663
+ #
664
+ # @param request [::Google::Cloud::Bigquery::Storage::V1::FlushRowsRequest, ::Hash]
665
+ # A request object representing the call parameters. Required. To specify no
666
+ # parameters, or to keep all the default parameter values, pass an empty Hash.
667
+ # @param options [::Gapic::CallOptions, ::Hash]
668
+ # Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
669
+ #
670
+ # @overload flush_rows(write_stream: nil, offset: nil)
671
+ # Pass arguments to `flush_rows` via keyword arguments. Note that at
672
+ # least one keyword argument is required. To specify no parameters, or to keep all
673
+ # the default parameter values, pass an empty Hash as a request object (see above).
674
+ #
675
+ # @param write_stream [::String]
676
+ # Required. The stream that is the target of the flush operation.
677
+ # @param offset [::Google::Protobuf::Int64Value, ::Hash]
678
+ # Ending offset of the flush operation. Rows before this offset(including
679
+ # this offset) will be flushed.
680
+ #
681
+ # @yield [response, operation] Access the result along with the RPC operation
682
+ # @yieldparam response [::Google::Cloud::Bigquery::Storage::V1::FlushRowsResponse]
683
+ # @yieldparam operation [::GRPC::ActiveCall::Operation]
684
+ #
685
+ # @return [::Google::Cloud::Bigquery::Storage::V1::FlushRowsResponse]
686
+ #
687
+ # @raise [::Google::Cloud::Error] if the RPC is aborted.
688
+ #
689
+ # @example Basic example
690
+ # require "google/cloud/bigquery/storage/v1"
691
+ #
692
+ # # Create a client object. The client can be reused for multiple calls.
693
+ # client = Google::Cloud::Bigquery::Storage::V1::BigQueryWrite::Client.new
694
+ #
695
+ # # Create a request. To set request fields, pass in keyword arguments.
696
+ # request = Google::Cloud::Bigquery::Storage::V1::FlushRowsRequest.new
697
+ #
698
+ # # Call the flush_rows method.
699
+ # result = client.flush_rows request
700
+ #
701
+ # # The returned object is of type Google::Cloud::Bigquery::Storage::V1::FlushRowsResponse.
702
+ # p result
703
+ #
704
+ def flush_rows request, options = nil
705
+ raise ::ArgumentError, "request must be provided" if request.nil?
706
+
707
+ request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::Storage::V1::FlushRowsRequest
708
+
709
+ # Converts hash and nil to an options object
710
+ options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
711
+
712
+ # Customize the options with defaults
713
+ metadata = @config.rpcs.flush_rows.metadata.to_h
714
+
715
+ # Set x-goog-api-client and x-goog-user-project headers
716
+ metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
717
+ lib_name: @config.lib_name, lib_version: @config.lib_version,
718
+ gapic_version: ::Google::Cloud::Bigquery::Storage::V1::VERSION
719
+ metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
720
+
721
+ header_params = {}
722
+ if request.write_stream
723
+ header_params["write_stream"] = request.write_stream
724
+ end
725
+
726
+ request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
727
+ metadata[:"x-goog-request-params"] ||= request_params_header
728
+
729
+ options.apply_defaults timeout: @config.rpcs.flush_rows.timeout,
730
+ metadata: metadata,
731
+ retry_policy: @config.rpcs.flush_rows.retry_policy
732
+
733
+ options.apply_defaults timeout: @config.timeout,
734
+ metadata: @config.metadata,
735
+ retry_policy: @config.retry_policy
736
+
737
+ @big_query_write_stub.call_rpc :flush_rows, request, options: options do |response, operation|
738
+ yield response, operation if block_given?
739
+ return response
740
+ end
741
+ rescue ::GRPC::BadStatus => e
742
+ raise ::Google::Cloud::Error.from_error(e)
743
+ end
744
+
745
+ ##
746
+ # Configuration class for the BigQueryWrite API.
747
+ #
748
+ # This class represents the configuration for BigQueryWrite,
749
+ # providing control over timeouts, retry behavior, logging, transport
750
+ # parameters, and other low-level controls. Certain parameters can also be
751
+ # applied individually to specific RPCs. See
752
+ # {::Google::Cloud::Bigquery::Storage::V1::BigQueryWrite::Client::Configuration::Rpcs}
753
+ # for a list of RPCs that can be configured independently.
754
+ #
755
+ # Configuration can be applied globally to all clients, or to a single client
756
+ # on construction.
757
+ #
758
+ # @example
759
+ #
760
+ # # Modify the global config, setting the timeout for
761
+ # # create_write_stream to 20 seconds,
762
+ # # and all remaining timeouts to 10 seconds.
763
+ # ::Google::Cloud::Bigquery::Storage::V1::BigQueryWrite::Client.configure do |config|
764
+ # config.timeout = 10.0
765
+ # config.rpcs.create_write_stream.timeout = 20.0
766
+ # end
767
+ #
768
+ # # Apply the above configuration only to a new client.
769
+ # client = ::Google::Cloud::Bigquery::Storage::V1::BigQueryWrite::Client.new do |config|
770
+ # config.timeout = 10.0
771
+ # config.rpcs.create_write_stream.timeout = 20.0
772
+ # end
773
+ #
774
+ # @!attribute [rw] endpoint
775
+ # The hostname or hostname:port of the service endpoint.
776
+ # Defaults to `"bigquerystorage.googleapis.com"`.
777
+ # @return [::String]
778
+ # @!attribute [rw] credentials
779
+ # Credentials to send with calls. You may provide any of the following types:
780
+ # * (`String`) The path to a service account key file in JSON format
781
+ # * (`Hash`) A service account key as a Hash
782
+ # * (`Google::Auth::Credentials`) A googleauth credentials object
783
+ # (see the [googleauth docs](https://googleapis.dev/ruby/googleauth/latest/index.html))
784
+ # * (`Signet::OAuth2::Client`) A signet oauth2 client object
785
+ # (see the [signet docs](https://googleapis.dev/ruby/signet/latest/Signet/OAuth2/Client.html))
786
+ # * (`GRPC::Core::Channel`) a gRPC channel with included credentials
787
+ # * (`GRPC::Core::ChannelCredentials`) a gRPC credentails object
788
+ # * (`nil`) indicating no credentials
789
+ # @return [::Object]
790
+ # @!attribute [rw] scope
791
+ # The OAuth scopes
792
+ # @return [::Array<::String>]
793
+ # @!attribute [rw] lib_name
794
+ # The library name as recorded in instrumentation and logging
795
+ # @return [::String]
796
+ # @!attribute [rw] lib_version
797
+ # The library version as recorded in instrumentation and logging
798
+ # @return [::String]
799
+ # @!attribute [rw] channel_args
800
+ # Extra parameters passed to the gRPC channel. Note: this is ignored if a
801
+ # `GRPC::Core::Channel` object is provided as the credential.
802
+ # @return [::Hash]
803
+ # @!attribute [rw] interceptors
804
+ # An array of interceptors that are run before calls are executed.
805
+ # @return [::Array<::GRPC::ClientInterceptor>]
806
+ # @!attribute [rw] timeout
807
+ # The call timeout in seconds.
808
+ # @return [::Numeric]
809
+ # @!attribute [rw] metadata
810
+ # Additional gRPC headers to be sent with the call.
811
+ # @return [::Hash{::Symbol=>::String}]
812
+ # @!attribute [rw] retry_policy
813
+ # The retry policy. The value is a hash with the following keys:
814
+ # * `:initial_delay` (*type:* `Numeric`) - The initial delay in seconds.
815
+ # * `:max_delay` (*type:* `Numeric`) - The max delay in seconds.
816
+ # * `:multiplier` (*type:* `Numeric`) - The incremental backoff multiplier.
817
+ # * `:retry_codes` (*type:* `Array<String>`) - The error codes that should
818
+ # trigger a retry.
819
+ # @return [::Hash]
820
+ # @!attribute [rw] quota_project
821
+ # A separate project against which to charge quota.
822
+ # @return [::String]
823
+ #
824
+ class Configuration
825
+ extend ::Gapic::Config
826
+
827
+ config_attr :endpoint, "bigquerystorage.googleapis.com", ::String
828
+ config_attr :credentials, nil do |value|
829
+ allowed = [::String, ::Hash, ::Proc, ::Symbol, ::Google::Auth::Credentials, ::Signet::OAuth2::Client, nil]
830
+ allowed += [::GRPC::Core::Channel, ::GRPC::Core::ChannelCredentials] if defined? ::GRPC
831
+ allowed.any? { |klass| klass === value }
832
+ end
833
+ config_attr :scope, nil, ::String, ::Array, nil
834
+ config_attr :lib_name, nil, ::String, nil
835
+ config_attr :lib_version, nil, ::String, nil
836
+ config_attr(:channel_args, { "grpc.service_config_disable_resolution" => 1 }, ::Hash, nil)
837
+ config_attr :interceptors, nil, ::Array, nil
838
+ config_attr :timeout, nil, ::Numeric, nil
839
+ config_attr :metadata, nil, ::Hash, nil
840
+ config_attr :retry_policy, nil, ::Hash, ::Proc, nil
841
+ config_attr :quota_project, nil, ::String, nil
842
+
843
+ # @private
844
+ def initialize parent_config = nil
845
+ @parent_config = parent_config unless parent_config.nil?
846
+
847
+ yield self if block_given?
848
+ end
849
+
850
+ ##
851
+ # Configurations for individual RPCs
852
+ # @return [Rpcs]
853
+ #
854
+ def rpcs
855
+ @rpcs ||= begin
856
+ parent_rpcs = nil
857
+ parent_rpcs = @parent_config.rpcs if defined?(@parent_config) && @parent_config.respond_to?(:rpcs)
858
+ Rpcs.new parent_rpcs
859
+ end
860
+ end
861
+
862
+ ##
863
+ # Configuration RPC class for the BigQueryWrite API.
864
+ #
865
+ # Includes fields providing the configuration for each RPC in this service.
866
+ # Each configuration object is of type `Gapic::Config::Method` and includes
867
+ # the following configuration fields:
868
+ #
869
+ # * `timeout` (*type:* `Numeric`) - The call timeout in seconds
870
+ # * `metadata` (*type:* `Hash{Symbol=>String}`) - Additional gRPC headers
871
+ # * `retry_policy (*type:* `Hash`) - The retry policy. The policy fields
872
+ # include the following keys:
873
+ # * `:initial_delay` (*type:* `Numeric`) - The initial delay in seconds.
874
+ # * `:max_delay` (*type:* `Numeric`) - The max delay in seconds.
875
+ # * `:multiplier` (*type:* `Numeric`) - The incremental backoff multiplier.
876
+ # * `:retry_codes` (*type:* `Array<String>`) - The error codes that should
877
+ # trigger a retry.
878
+ #
879
+ class Rpcs
880
+ ##
881
+ # RPC-specific configuration for `create_write_stream`
882
+ # @return [::Gapic::Config::Method]
883
+ #
884
+ attr_reader :create_write_stream
885
+ ##
886
+ # RPC-specific configuration for `append_rows`
887
+ # @return [::Gapic::Config::Method]
888
+ #
889
+ attr_reader :append_rows
890
+ ##
891
+ # RPC-specific configuration for `get_write_stream`
892
+ # @return [::Gapic::Config::Method]
893
+ #
894
+ attr_reader :get_write_stream
895
+ ##
896
+ # RPC-specific configuration for `finalize_write_stream`
897
+ # @return [::Gapic::Config::Method]
898
+ #
899
+ attr_reader :finalize_write_stream
900
+ ##
901
+ # RPC-specific configuration for `batch_commit_write_streams`
902
+ # @return [::Gapic::Config::Method]
903
+ #
904
+ attr_reader :batch_commit_write_streams
905
+ ##
906
+ # RPC-specific configuration for `flush_rows`
907
+ # @return [::Gapic::Config::Method]
908
+ #
909
+ attr_reader :flush_rows
910
+
911
+ # @private
912
+ def initialize parent_rpcs = nil
913
+ create_write_stream_config = parent_rpcs.create_write_stream if parent_rpcs.respond_to? :create_write_stream
914
+ @create_write_stream = ::Gapic::Config::Method.new create_write_stream_config
915
+ append_rows_config = parent_rpcs.append_rows if parent_rpcs.respond_to? :append_rows
916
+ @append_rows = ::Gapic::Config::Method.new append_rows_config
917
+ get_write_stream_config = parent_rpcs.get_write_stream if parent_rpcs.respond_to? :get_write_stream
918
+ @get_write_stream = ::Gapic::Config::Method.new get_write_stream_config
919
+ finalize_write_stream_config = parent_rpcs.finalize_write_stream if parent_rpcs.respond_to? :finalize_write_stream
920
+ @finalize_write_stream = ::Gapic::Config::Method.new finalize_write_stream_config
921
+ batch_commit_write_streams_config = parent_rpcs.batch_commit_write_streams if parent_rpcs.respond_to? :batch_commit_write_streams
922
+ @batch_commit_write_streams = ::Gapic::Config::Method.new batch_commit_write_streams_config
923
+ flush_rows_config = parent_rpcs.flush_rows if parent_rpcs.respond_to? :flush_rows
924
+ @flush_rows = ::Gapic::Config::Method.new flush_rows_config
925
+
926
+ yield self if block_given?
927
+ end
928
+ end
929
+ end
930
+ end
931
+ end
932
+ end
933
+ end
934
+ end
935
+ end
936
+ end