google-cloud-bigquery-data_transfer-v1 0.4.2 → 0.4.6
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/.yardopts +1 -1
- data/AUTHENTICATION.md +7 -25
- data/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service/client.rb +379 -93
- data/lib/google/cloud/bigquery/data_transfer/v1/version.rb +1 -1
- data/lib/google/cloud/bigquery/datatransfer/v1/datatransfer_pb.rb +2 -2
- data/lib/google/cloud/bigquery/datatransfer/v1/datatransfer_services_pb.rb +3 -2
- data/lib/google/cloud/bigquery/datatransfer/v1/transfer_pb.rb +3 -2
- data/proto_docs/google/api/resource.rb +10 -71
- data/proto_docs/google/cloud/bigquery/datatransfer/v1/datatransfer.rb +5 -7
- data/proto_docs/google/cloud/bigquery/datatransfer/v1/transfer.rb +25 -14
- metadata +5 -5
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 0dc98a0216f9b79b237f5c2df3b6f0de5ede88ca709f8cd4e994327a227d29c8
|
4
|
+
data.tar.gz: 634ac33b77c2d07a4d2064c7fff482d488e596bd087ca6855ed1fae442416f83
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: fda0deecc390eb39f5abd52c160dbefb67d23740c3a769650b27b356a665f46e92928ad3ccfe38a1278781ee45b75e1f6ac217d54d3b0973f9b1926f7c4e32a9
|
7
|
+
data.tar.gz: 937354981b887fd9649ea6b57ce7936686cbb13a095ef5a7a2e2fa49c106603d99e7cfb804d7858a60b6a414d8b822ece743b4449c6862a97b78d255b292871a
|
data/.yardopts
CHANGED
data/AUTHENTICATION.md
CHANGED
@@ -120,15 +120,6 @@ To configure your system for this, simply:
|
|
120
120
|
**NOTE:** This is _not_ recommended for running in production. The Cloud SDK
|
121
121
|
*should* only be used during development.
|
122
122
|
|
123
|
-
[gce-how-to]: https://cloud.google.com/compute/docs/authentication#using
|
124
|
-
[dev-console]: https://console.cloud.google.com/project
|
125
|
-
|
126
|
-
[enable-apis]: https://raw.githubusercontent.com/GoogleCloudPlatform/gcloud-common/master/authentication/enable-apis.png
|
127
|
-
|
128
|
-
[create-new-service-account]: https://raw.githubusercontent.com/GoogleCloudPlatform/gcloud-common/master/authentication/create-new-service-account.png
|
129
|
-
[create-new-service-account-existing-keys]: https://raw.githubusercontent.com/GoogleCloudPlatform/gcloud-common/master/authentication/create-new-service-account-existing-keys.png
|
130
|
-
[reuse-service-account]: https://raw.githubusercontent.com/GoogleCloudPlatform/gcloud-common/master/authentication/reuse-service-account.png
|
131
|
-
|
132
123
|
## Creating a Service Account
|
133
124
|
|
134
125
|
Google Cloud requires **Service Account Credentials** to
|
@@ -139,31 +130,22 @@ If you are not running this client within
|
|
139
130
|
[Google Cloud Platform environments](#google-cloud-platform-environments), you
|
140
131
|
need a Google Developers service account.
|
141
132
|
|
142
|
-
1. Visit the [Google
|
133
|
+
1. Visit the [Google Cloud Console](https://console.cloud.google.com/project).
|
143
134
|
2. Create a new project or click on an existing project.
|
144
|
-
3. Activate the
|
135
|
+
3. Activate the menu in the upper left and select **APIs & Services**. From
|
145
136
|
here, you will enable the APIs that your application requires.
|
146
137
|
|
147
|
-
![Enable the APIs that your application requires][enable-apis]
|
148
|
-
|
149
138
|
*Note: You may need to enable billing in order to use these services.*
|
150
139
|
|
151
140
|
4. Select **Credentials** from the side navigation.
|
152
141
|
|
153
|
-
|
154
|
-
|
155
|
-
![Create a new service account][create-new-service-account]
|
156
|
-
|
157
|
-
![Create a new service account With Existing Keys][create-new-service-account-existing-keys]
|
158
|
-
|
159
|
-
Find the "Add credentials" drop down and select "Service account" to be
|
160
|
-
guided through downloading a new JSON key file.
|
142
|
+
Find the "Create credentials" drop down near the top of the page, and select
|
143
|
+
"Service account" to be guided through downloading a new JSON key file.
|
161
144
|
|
162
145
|
If you want to re-use an existing service account, you can easily generate a
|
163
|
-
new key file. Just select the account you wish to re-use,
|
164
|
-
|
165
|
-
|
166
|
-
![Re-use an existing service account][reuse-service-account]
|
146
|
+
new key file. Just select the account you wish to re-use, click the pencil
|
147
|
+
tool on the right side to edit the service account, select the **Keys** tab,
|
148
|
+
and then select **Add Key**.
|
167
149
|
|
168
150
|
The key file you download will be used by this library to authenticate API
|
169
151
|
requests and should be stored in a secure location.
|
@@ -45,13 +45,12 @@ module Google
|
|
45
45
|
# See {::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client::Configuration}
|
46
46
|
# for a description of the configuration fields.
|
47
47
|
#
|
48
|
-
#
|
48
|
+
# @example
|
49
49
|
#
|
50
|
-
#
|
51
|
-
#
|
52
|
-
#
|
53
|
-
#
|
54
|
-
# end
|
50
|
+
# # Modify the configuration for all DataTransferService clients
|
51
|
+
# ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.configure do |config|
|
52
|
+
# config.timeout = 10.0
|
53
|
+
# end
|
55
54
|
#
|
56
55
|
# @yield [config] Configure the Client client.
|
57
56
|
# @yieldparam config [Client::Configuration]
|
@@ -154,19 +153,15 @@ module Google
|
|
154
153
|
##
|
155
154
|
# Create a new DataTransferService client object.
|
156
155
|
#
|
157
|
-
#
|
158
|
-
#
|
159
|
-
# To create a new DataTransferService client with the default
|
160
|
-
# configuration:
|
156
|
+
# @example
|
161
157
|
#
|
162
|
-
#
|
158
|
+
# # Create a client using the default configuration
|
159
|
+
# client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new
|
163
160
|
#
|
164
|
-
#
|
165
|
-
#
|
166
|
-
#
|
167
|
-
#
|
168
|
-
# config.timeout = 10.0
|
169
|
-
# end
|
161
|
+
# # Create a client using a custom configuration
|
162
|
+
# client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new do |config|
|
163
|
+
# config.timeout = 10.0
|
164
|
+
# end
|
170
165
|
#
|
171
166
|
# @yield [config] Configure the DataTransferService client.
|
172
167
|
# @yieldparam config [Client::Configuration]
|
@@ -186,10 +181,9 @@ module Google
|
|
186
181
|
|
187
182
|
# Create credentials
|
188
183
|
credentials = @config.credentials
|
189
|
-
# Use self-signed JWT if the
|
184
|
+
# Use self-signed JWT if the endpoint is unchanged from default,
|
190
185
|
# but only if the default endpoint does not have a region prefix.
|
191
|
-
enable_self_signed_jwt = @config.
|
192
|
-
@config.endpoint == Client.configure.endpoint &&
|
186
|
+
enable_self_signed_jwt = @config.endpoint == Client.configure.endpoint &&
|
193
187
|
!@config.endpoint.split(".").first.include?("-")
|
194
188
|
credentials ||= Credentials.default scope: @config.scope,
|
195
189
|
enable_self_signed_jwt: enable_self_signed_jwt
|
@@ -242,6 +236,21 @@ module Google
|
|
242
236
|
#
|
243
237
|
# @raise [::Google::Cloud::Error] if the RPC is aborted.
|
244
238
|
#
|
239
|
+
# @example Basic example
|
240
|
+
# require "google/cloud/bigquery/data_transfer/v1"
|
241
|
+
#
|
242
|
+
# # Create a client object. The client can be reused for multiple calls.
|
243
|
+
# client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new
|
244
|
+
#
|
245
|
+
# # Create a request. To set request fields, pass in keyword arguments.
|
246
|
+
# request = Google::Cloud::Bigquery::DataTransfer::V1::GetDataSourceRequest.new
|
247
|
+
#
|
248
|
+
# # Call the get_data_source method.
|
249
|
+
# result = client.get_data_source request
|
250
|
+
#
|
251
|
+
# # The returned object is of type Google::Cloud::Bigquery::DataTransfer::V1::DataSource.
|
252
|
+
# p result
|
253
|
+
#
|
245
254
|
def get_data_source request, options = nil
|
246
255
|
raise ::ArgumentError, "request must be provided" if request.nil?
|
247
256
|
|
@@ -259,16 +268,20 @@ module Google
|
|
259
268
|
gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION
|
260
269
|
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
|
261
270
|
|
262
|
-
header_params = {
|
263
|
-
|
264
|
-
|
271
|
+
header_params = {}
|
272
|
+
if request.name
|
273
|
+
header_params["name"] = request.name
|
274
|
+
end
|
275
|
+
|
265
276
|
request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
|
266
277
|
metadata[:"x-goog-request-params"] ||= request_params_header
|
267
278
|
|
268
279
|
options.apply_defaults timeout: @config.rpcs.get_data_source.timeout,
|
269
280
|
metadata: metadata,
|
270
281
|
retry_policy: @config.rpcs.get_data_source.retry_policy
|
271
|
-
|
282
|
+
|
283
|
+
options.apply_defaults timeout: @config.timeout,
|
284
|
+
metadata: @config.metadata,
|
272
285
|
retry_policy: @config.retry_policy
|
273
286
|
|
274
287
|
@data_transfer_service_stub.call_rpc :get_data_source, request, options: options do |response, operation|
|
@@ -319,6 +332,27 @@ module Google
|
|
319
332
|
#
|
320
333
|
# @raise [::Google::Cloud::Error] if the RPC is aborted.
|
321
334
|
#
|
335
|
+
# @example Basic example
|
336
|
+
# require "google/cloud/bigquery/data_transfer/v1"
|
337
|
+
#
|
338
|
+
# # Create a client object. The client can be reused for multiple calls.
|
339
|
+
# client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new
|
340
|
+
#
|
341
|
+
# # Create a request. To set request fields, pass in keyword arguments.
|
342
|
+
# request = Google::Cloud::Bigquery::DataTransfer::V1::ListDataSourcesRequest.new
|
343
|
+
#
|
344
|
+
# # Call the list_data_sources method.
|
345
|
+
# result = client.list_data_sources request
|
346
|
+
#
|
347
|
+
# # The returned object is of type Gapic::PagedEnumerable. You can
|
348
|
+
# # iterate over all elements by calling #each, and the enumerable
|
349
|
+
# # will lazily make API calls to fetch subsequent pages. Other
|
350
|
+
# # methods are also available for managing paging directly.
|
351
|
+
# result.each do |response|
|
352
|
+
# # Each element is of type ::Google::Cloud::Bigquery::DataTransfer::V1::DataSource.
|
353
|
+
# p response
|
354
|
+
# end
|
355
|
+
#
|
322
356
|
def list_data_sources request, options = nil
|
323
357
|
raise ::ArgumentError, "request must be provided" if request.nil?
|
324
358
|
|
@@ -336,16 +370,20 @@ module Google
|
|
336
370
|
gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION
|
337
371
|
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
|
338
372
|
|
339
|
-
header_params = {
|
340
|
-
|
341
|
-
|
373
|
+
header_params = {}
|
374
|
+
if request.parent
|
375
|
+
header_params["parent"] = request.parent
|
376
|
+
end
|
377
|
+
|
342
378
|
request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
|
343
379
|
metadata[:"x-goog-request-params"] ||= request_params_header
|
344
380
|
|
345
381
|
options.apply_defaults timeout: @config.rpcs.list_data_sources.timeout,
|
346
382
|
metadata: metadata,
|
347
383
|
retry_policy: @config.rpcs.list_data_sources.retry_policy
|
348
|
-
|
384
|
+
|
385
|
+
options.apply_defaults timeout: @config.timeout,
|
386
|
+
metadata: @config.metadata,
|
349
387
|
retry_policy: @config.retry_policy
|
350
388
|
|
351
389
|
@data_transfer_service_stub.call_rpc :list_data_sources, request, options: options do |response, operation|
|
@@ -420,6 +458,21 @@ module Google
|
|
420
458
|
#
|
421
459
|
# @raise [::Google::Cloud::Error] if the RPC is aborted.
|
422
460
|
#
|
461
|
+
# @example Basic example
|
462
|
+
# require "google/cloud/bigquery/data_transfer/v1"
|
463
|
+
#
|
464
|
+
# # Create a client object. The client can be reused for multiple calls.
|
465
|
+
# client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new
|
466
|
+
#
|
467
|
+
# # Create a request. To set request fields, pass in keyword arguments.
|
468
|
+
# request = Google::Cloud::Bigquery::DataTransfer::V1::CreateTransferConfigRequest.new
|
469
|
+
#
|
470
|
+
# # Call the create_transfer_config method.
|
471
|
+
# result = client.create_transfer_config request
|
472
|
+
#
|
473
|
+
# # The returned object is of type Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig.
|
474
|
+
# p result
|
475
|
+
#
|
423
476
|
def create_transfer_config request, options = nil
|
424
477
|
raise ::ArgumentError, "request must be provided" if request.nil?
|
425
478
|
|
@@ -437,16 +490,20 @@ module Google
|
|
437
490
|
gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION
|
438
491
|
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
|
439
492
|
|
440
|
-
header_params = {
|
441
|
-
|
442
|
-
|
493
|
+
header_params = {}
|
494
|
+
if request.parent
|
495
|
+
header_params["parent"] = request.parent
|
496
|
+
end
|
497
|
+
|
443
498
|
request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
|
444
499
|
metadata[:"x-goog-request-params"] ||= request_params_header
|
445
500
|
|
446
501
|
options.apply_defaults timeout: @config.rpcs.create_transfer_config.timeout,
|
447
502
|
metadata: metadata,
|
448
503
|
retry_policy: @config.rpcs.create_transfer_config.retry_policy
|
449
|
-
|
504
|
+
|
505
|
+
options.apply_defaults timeout: @config.timeout,
|
506
|
+
metadata: @config.metadata,
|
450
507
|
retry_policy: @config.retry_policy
|
451
508
|
|
452
509
|
@data_transfer_service_stub.call_rpc :create_transfer_config, request, options: options do |response, operation|
|
@@ -519,6 +576,21 @@ module Google
|
|
519
576
|
#
|
520
577
|
# @raise [::Google::Cloud::Error] if the RPC is aborted.
|
521
578
|
#
|
579
|
+
# @example Basic example
|
580
|
+
# require "google/cloud/bigquery/data_transfer/v1"
|
581
|
+
#
|
582
|
+
# # Create a client object. The client can be reused for multiple calls.
|
583
|
+
# client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new
|
584
|
+
#
|
585
|
+
# # Create a request. To set request fields, pass in keyword arguments.
|
586
|
+
# request = Google::Cloud::Bigquery::DataTransfer::V1::UpdateTransferConfigRequest.new
|
587
|
+
#
|
588
|
+
# # Call the update_transfer_config method.
|
589
|
+
# result = client.update_transfer_config request
|
590
|
+
#
|
591
|
+
# # The returned object is of type Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig.
|
592
|
+
# p result
|
593
|
+
#
|
522
594
|
def update_transfer_config request, options = nil
|
523
595
|
raise ::ArgumentError, "request must be provided" if request.nil?
|
524
596
|
|
@@ -536,16 +608,20 @@ module Google
|
|
536
608
|
gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION
|
537
609
|
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
|
538
610
|
|
539
|
-
header_params = {
|
540
|
-
|
541
|
-
|
611
|
+
header_params = {}
|
612
|
+
if request.transfer_config&.name
|
613
|
+
header_params["transfer_config.name"] = request.transfer_config.name
|
614
|
+
end
|
615
|
+
|
542
616
|
request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
|
543
617
|
metadata[:"x-goog-request-params"] ||= request_params_header
|
544
618
|
|
545
619
|
options.apply_defaults timeout: @config.rpcs.update_transfer_config.timeout,
|
546
620
|
metadata: metadata,
|
547
621
|
retry_policy: @config.rpcs.update_transfer_config.retry_policy
|
548
|
-
|
622
|
+
|
623
|
+
options.apply_defaults timeout: @config.timeout,
|
624
|
+
metadata: @config.metadata,
|
549
625
|
retry_policy: @config.retry_policy
|
550
626
|
|
551
627
|
@data_transfer_service_stub.call_rpc :update_transfer_config, request, options: options do |response, operation|
|
@@ -588,6 +664,21 @@ module Google
|
|
588
664
|
#
|
589
665
|
# @raise [::Google::Cloud::Error] if the RPC is aborted.
|
590
666
|
#
|
667
|
+
# @example Basic example
|
668
|
+
# require "google/cloud/bigquery/data_transfer/v1"
|
669
|
+
#
|
670
|
+
# # Create a client object. The client can be reused for multiple calls.
|
671
|
+
# client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new
|
672
|
+
#
|
673
|
+
# # Create a request. To set request fields, pass in keyword arguments.
|
674
|
+
# request = Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferConfigRequest.new
|
675
|
+
#
|
676
|
+
# # Call the delete_transfer_config method.
|
677
|
+
# result = client.delete_transfer_config request
|
678
|
+
#
|
679
|
+
# # The returned object is of type Google::Protobuf::Empty.
|
680
|
+
# p result
|
681
|
+
#
|
591
682
|
def delete_transfer_config request, options = nil
|
592
683
|
raise ::ArgumentError, "request must be provided" if request.nil?
|
593
684
|
|
@@ -605,16 +696,20 @@ module Google
|
|
605
696
|
gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION
|
606
697
|
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
|
607
698
|
|
608
|
-
header_params = {
|
609
|
-
|
610
|
-
|
699
|
+
header_params = {}
|
700
|
+
if request.name
|
701
|
+
header_params["name"] = request.name
|
702
|
+
end
|
703
|
+
|
611
704
|
request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
|
612
705
|
metadata[:"x-goog-request-params"] ||= request_params_header
|
613
706
|
|
614
707
|
options.apply_defaults timeout: @config.rpcs.delete_transfer_config.timeout,
|
615
708
|
metadata: metadata,
|
616
709
|
retry_policy: @config.rpcs.delete_transfer_config.retry_policy
|
617
|
-
|
710
|
+
|
711
|
+
options.apply_defaults timeout: @config.timeout,
|
712
|
+
metadata: @config.metadata,
|
618
713
|
retry_policy: @config.retry_policy
|
619
714
|
|
620
715
|
@data_transfer_service_stub.call_rpc :delete_transfer_config, request, options: options do |response, operation|
|
@@ -656,6 +751,21 @@ module Google
|
|
656
751
|
#
|
657
752
|
# @raise [::Google::Cloud::Error] if the RPC is aborted.
|
658
753
|
#
|
754
|
+
# @example Basic example
|
755
|
+
# require "google/cloud/bigquery/data_transfer/v1"
|
756
|
+
#
|
757
|
+
# # Create a client object. The client can be reused for multiple calls.
|
758
|
+
# client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new
|
759
|
+
#
|
760
|
+
# # Create a request. To set request fields, pass in keyword arguments.
|
761
|
+
# request = Google::Cloud::Bigquery::DataTransfer::V1::GetTransferConfigRequest.new
|
762
|
+
#
|
763
|
+
# # Call the get_transfer_config method.
|
764
|
+
# result = client.get_transfer_config request
|
765
|
+
#
|
766
|
+
# # The returned object is of type Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig.
|
767
|
+
# p result
|
768
|
+
#
|
659
769
|
def get_transfer_config request, options = nil
|
660
770
|
raise ::ArgumentError, "request must be provided" if request.nil?
|
661
771
|
|
@@ -673,16 +783,20 @@ module Google
|
|
673
783
|
gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION
|
674
784
|
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
|
675
785
|
|
676
|
-
header_params = {
|
677
|
-
|
678
|
-
|
786
|
+
header_params = {}
|
787
|
+
if request.name
|
788
|
+
header_params["name"] = request.name
|
789
|
+
end
|
790
|
+
|
679
791
|
request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
|
680
792
|
metadata[:"x-goog-request-params"] ||= request_params_header
|
681
793
|
|
682
794
|
options.apply_defaults timeout: @config.rpcs.get_transfer_config.timeout,
|
683
795
|
metadata: metadata,
|
684
796
|
retry_policy: @config.rpcs.get_transfer_config.retry_policy
|
685
|
-
|
797
|
+
|
798
|
+
options.apply_defaults timeout: @config.timeout,
|
799
|
+
metadata: @config.metadata,
|
686
800
|
retry_policy: @config.retry_policy
|
687
801
|
|
688
802
|
@data_transfer_service_stub.call_rpc :get_transfer_config, request, options: options do |response, operation|
|
@@ -694,7 +808,8 @@ module Google
|
|
694
808
|
end
|
695
809
|
|
696
810
|
##
|
697
|
-
# Returns information about all
|
811
|
+
# Returns information about all transfer configs owned by a project in the
|
812
|
+
# specified location.
|
698
813
|
#
|
699
814
|
# @overload list_transfer_configs(request, options = nil)
|
700
815
|
# Pass arguments to `list_transfer_configs` via a request object, either of type
|
@@ -734,6 +849,27 @@ module Google
|
|
734
849
|
#
|
735
850
|
# @raise [::Google::Cloud::Error] if the RPC is aborted.
|
736
851
|
#
|
852
|
+
# @example Basic example
|
853
|
+
# require "google/cloud/bigquery/data_transfer/v1"
|
854
|
+
#
|
855
|
+
# # Create a client object. The client can be reused for multiple calls.
|
856
|
+
# client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new
|
857
|
+
#
|
858
|
+
# # Create a request. To set request fields, pass in keyword arguments.
|
859
|
+
# request = Google::Cloud::Bigquery::DataTransfer::V1::ListTransferConfigsRequest.new
|
860
|
+
#
|
861
|
+
# # Call the list_transfer_configs method.
|
862
|
+
# result = client.list_transfer_configs request
|
863
|
+
#
|
864
|
+
# # The returned object is of type Gapic::PagedEnumerable. You can
|
865
|
+
# # iterate over all elements by calling #each, and the enumerable
|
866
|
+
# # will lazily make API calls to fetch subsequent pages. Other
|
867
|
+
# # methods are also available for managing paging directly.
|
868
|
+
# result.each do |response|
|
869
|
+
# # Each element is of type ::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig.
|
870
|
+
# p response
|
871
|
+
# end
|
872
|
+
#
|
737
873
|
def list_transfer_configs request, options = nil
|
738
874
|
raise ::ArgumentError, "request must be provided" if request.nil?
|
739
875
|
|
@@ -751,16 +887,20 @@ module Google
|
|
751
887
|
gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION
|
752
888
|
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
|
753
889
|
|
754
|
-
header_params = {
|
755
|
-
|
756
|
-
|
890
|
+
header_params = {}
|
891
|
+
if request.parent
|
892
|
+
header_params["parent"] = request.parent
|
893
|
+
end
|
894
|
+
|
757
895
|
request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
|
758
896
|
metadata[:"x-goog-request-params"] ||= request_params_header
|
759
897
|
|
760
898
|
options.apply_defaults timeout: @config.rpcs.list_transfer_configs.timeout,
|
761
899
|
metadata: metadata,
|
762
900
|
retry_policy: @config.rpcs.list_transfer_configs.retry_policy
|
763
|
-
|
901
|
+
|
902
|
+
options.apply_defaults timeout: @config.timeout,
|
903
|
+
metadata: @config.metadata,
|
764
904
|
retry_policy: @config.retry_policy
|
765
905
|
|
766
906
|
@data_transfer_service_stub.call_rpc :list_transfer_configs, request, options: options do |response, operation|
|
@@ -779,6 +919,8 @@ module Google
|
|
779
919
|
# Note that runs are created per UTC time in the time range.
|
780
920
|
# DEPRECATED: use StartManualTransferRuns instead.
|
781
921
|
#
|
922
|
+
# @deprecated This method is deprecated and may be removed in the next major version update.
|
923
|
+
#
|
782
924
|
# @overload schedule_transfer_runs(request, options = nil)
|
783
925
|
# Pass arguments to `schedule_transfer_runs` via a request object, either of type
|
784
926
|
# {::Google::Cloud::Bigquery::DataTransfer::V1::ScheduleTransferRunsRequest} or an equivalent Hash.
|
@@ -813,6 +955,21 @@ module Google
|
|
813
955
|
#
|
814
956
|
# @raise [::Google::Cloud::Error] if the RPC is aborted.
|
815
957
|
#
|
958
|
+
# @example Basic example
|
959
|
+
# require "google/cloud/bigquery/data_transfer/v1"
|
960
|
+
#
|
961
|
+
# # Create a client object. The client can be reused for multiple calls.
|
962
|
+
# client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new
|
963
|
+
#
|
964
|
+
# # Create a request. To set request fields, pass in keyword arguments.
|
965
|
+
# request = Google::Cloud::Bigquery::DataTransfer::V1::ScheduleTransferRunsRequest.new
|
966
|
+
#
|
967
|
+
# # Call the schedule_transfer_runs method.
|
968
|
+
# result = client.schedule_transfer_runs request
|
969
|
+
#
|
970
|
+
# # The returned object is of type Google::Cloud::Bigquery::DataTransfer::V1::ScheduleTransferRunsResponse.
|
971
|
+
# p result
|
972
|
+
#
|
816
973
|
def schedule_transfer_runs request, options = nil
|
817
974
|
raise ::ArgumentError, "request must be provided" if request.nil?
|
818
975
|
|
@@ -830,16 +987,20 @@ module Google
|
|
830
987
|
gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION
|
831
988
|
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
|
832
989
|
|
833
|
-
header_params = {
|
834
|
-
|
835
|
-
|
990
|
+
header_params = {}
|
991
|
+
if request.parent
|
992
|
+
header_params["parent"] = request.parent
|
993
|
+
end
|
994
|
+
|
836
995
|
request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
|
837
996
|
metadata[:"x-goog-request-params"] ||= request_params_header
|
838
997
|
|
839
998
|
options.apply_defaults timeout: @config.rpcs.schedule_transfer_runs.timeout,
|
840
999
|
metadata: metadata,
|
841
1000
|
retry_policy: @config.rpcs.schedule_transfer_runs.retry_policy
|
842
|
-
|
1001
|
+
|
1002
|
+
options.apply_defaults timeout: @config.timeout,
|
1003
|
+
metadata: @config.metadata,
|
843
1004
|
retry_policy: @config.retry_policy
|
844
1005
|
|
845
1006
|
@data_transfer_service_stub.call_rpc :schedule_transfer_runs, request, options: options do |response, operation|
|
@@ -889,6 +1050,21 @@ module Google
|
|
889
1050
|
#
|
890
1051
|
# @raise [::Google::Cloud::Error] if the RPC is aborted.
|
891
1052
|
#
|
1053
|
+
# @example Basic example
|
1054
|
+
# require "google/cloud/bigquery/data_transfer/v1"
|
1055
|
+
#
|
1056
|
+
# # Create a client object. The client can be reused for multiple calls.
|
1057
|
+
# client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new
|
1058
|
+
#
|
1059
|
+
# # Create a request. To set request fields, pass in keyword arguments.
|
1060
|
+
# request = Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsRequest.new
|
1061
|
+
#
|
1062
|
+
# # Call the start_manual_transfer_runs method.
|
1063
|
+
# result = client.start_manual_transfer_runs request
|
1064
|
+
#
|
1065
|
+
# # The returned object is of type Google::Cloud::Bigquery::DataTransfer::V1::StartManualTransferRunsResponse.
|
1066
|
+
# p result
|
1067
|
+
#
|
892
1068
|
def start_manual_transfer_runs request, options = nil
|
893
1069
|
raise ::ArgumentError, "request must be provided" if request.nil?
|
894
1070
|
|
@@ -906,16 +1082,20 @@ module Google
|
|
906
1082
|
gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION
|
907
1083
|
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
|
908
1084
|
|
909
|
-
header_params = {
|
910
|
-
|
911
|
-
|
1085
|
+
header_params = {}
|
1086
|
+
if request.parent
|
1087
|
+
header_params["parent"] = request.parent
|
1088
|
+
end
|
1089
|
+
|
912
1090
|
request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
|
913
1091
|
metadata[:"x-goog-request-params"] ||= request_params_header
|
914
1092
|
|
915
1093
|
options.apply_defaults timeout: @config.rpcs.start_manual_transfer_runs.timeout,
|
916
1094
|
metadata: metadata,
|
917
1095
|
retry_policy: @config.rpcs.start_manual_transfer_runs.retry_policy
|
918
|
-
|
1096
|
+
|
1097
|
+
options.apply_defaults timeout: @config.timeout,
|
1098
|
+
metadata: @config.metadata,
|
919
1099
|
retry_policy: @config.retry_policy
|
920
1100
|
|
921
1101
|
@data_transfer_service_stub.call_rpc :start_manual_transfer_runs, request, options: options do |response, operation|
|
@@ -957,6 +1137,21 @@ module Google
|
|
957
1137
|
#
|
958
1138
|
# @raise [::Google::Cloud::Error] if the RPC is aborted.
|
959
1139
|
#
|
1140
|
+
# @example Basic example
|
1141
|
+
# require "google/cloud/bigquery/data_transfer/v1"
|
1142
|
+
#
|
1143
|
+
# # Create a client object. The client can be reused for multiple calls.
|
1144
|
+
# client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new
|
1145
|
+
#
|
1146
|
+
# # Create a request. To set request fields, pass in keyword arguments.
|
1147
|
+
# request = Google::Cloud::Bigquery::DataTransfer::V1::GetTransferRunRequest.new
|
1148
|
+
#
|
1149
|
+
# # Call the get_transfer_run method.
|
1150
|
+
# result = client.get_transfer_run request
|
1151
|
+
#
|
1152
|
+
# # The returned object is of type Google::Cloud::Bigquery::DataTransfer::V1::TransferRun.
|
1153
|
+
# p result
|
1154
|
+
#
|
960
1155
|
def get_transfer_run request, options = nil
|
961
1156
|
raise ::ArgumentError, "request must be provided" if request.nil?
|
962
1157
|
|
@@ -974,16 +1169,20 @@ module Google
|
|
974
1169
|
gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION
|
975
1170
|
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
|
976
1171
|
|
977
|
-
header_params = {
|
978
|
-
|
979
|
-
|
1172
|
+
header_params = {}
|
1173
|
+
if request.name
|
1174
|
+
header_params["name"] = request.name
|
1175
|
+
end
|
1176
|
+
|
980
1177
|
request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
|
981
1178
|
metadata[:"x-goog-request-params"] ||= request_params_header
|
982
1179
|
|
983
1180
|
options.apply_defaults timeout: @config.rpcs.get_transfer_run.timeout,
|
984
1181
|
metadata: metadata,
|
985
1182
|
retry_policy: @config.rpcs.get_transfer_run.retry_policy
|
986
|
-
|
1183
|
+
|
1184
|
+
options.apply_defaults timeout: @config.timeout,
|
1185
|
+
metadata: @config.metadata,
|
987
1186
|
retry_policy: @config.retry_policy
|
988
1187
|
|
989
1188
|
@data_transfer_service_stub.call_rpc :get_transfer_run, request, options: options do |response, operation|
|
@@ -1025,6 +1224,21 @@ module Google
|
|
1025
1224
|
#
|
1026
1225
|
# @raise [::Google::Cloud::Error] if the RPC is aborted.
|
1027
1226
|
#
|
1227
|
+
# @example Basic example
|
1228
|
+
# require "google/cloud/bigquery/data_transfer/v1"
|
1229
|
+
#
|
1230
|
+
# # Create a client object. The client can be reused for multiple calls.
|
1231
|
+
# client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new
|
1232
|
+
#
|
1233
|
+
# # Create a request. To set request fields, pass in keyword arguments.
|
1234
|
+
# request = Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferRunRequest.new
|
1235
|
+
#
|
1236
|
+
# # Call the delete_transfer_run method.
|
1237
|
+
# result = client.delete_transfer_run request
|
1238
|
+
#
|
1239
|
+
# # The returned object is of type Google::Protobuf::Empty.
|
1240
|
+
# p result
|
1241
|
+
#
|
1028
1242
|
def delete_transfer_run request, options = nil
|
1029
1243
|
raise ::ArgumentError, "request must be provided" if request.nil?
|
1030
1244
|
|
@@ -1042,16 +1256,20 @@ module Google
|
|
1042
1256
|
gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION
|
1043
1257
|
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
|
1044
1258
|
|
1045
|
-
header_params = {
|
1046
|
-
|
1047
|
-
|
1259
|
+
header_params = {}
|
1260
|
+
if request.name
|
1261
|
+
header_params["name"] = request.name
|
1262
|
+
end
|
1263
|
+
|
1048
1264
|
request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
|
1049
1265
|
metadata[:"x-goog-request-params"] ||= request_params_header
|
1050
1266
|
|
1051
1267
|
options.apply_defaults timeout: @config.rpcs.delete_transfer_run.timeout,
|
1052
1268
|
metadata: metadata,
|
1053
1269
|
retry_policy: @config.rpcs.delete_transfer_run.retry_policy
|
1054
|
-
|
1270
|
+
|
1271
|
+
options.apply_defaults timeout: @config.timeout,
|
1272
|
+
metadata: @config.metadata,
|
1055
1273
|
retry_policy: @config.retry_policy
|
1056
1274
|
|
1057
1275
|
@data_transfer_service_stub.call_rpc :delete_transfer_run, request, options: options do |response, operation|
|
@@ -1106,6 +1324,27 @@ module Google
|
|
1106
1324
|
#
|
1107
1325
|
# @raise [::Google::Cloud::Error] if the RPC is aborted.
|
1108
1326
|
#
|
1327
|
+
# @example Basic example
|
1328
|
+
# require "google/cloud/bigquery/data_transfer/v1"
|
1329
|
+
#
|
1330
|
+
# # Create a client object. The client can be reused for multiple calls.
|
1331
|
+
# client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new
|
1332
|
+
#
|
1333
|
+
# # Create a request. To set request fields, pass in keyword arguments.
|
1334
|
+
# request = Google::Cloud::Bigquery::DataTransfer::V1::ListTransferRunsRequest.new
|
1335
|
+
#
|
1336
|
+
# # Call the list_transfer_runs method.
|
1337
|
+
# result = client.list_transfer_runs request
|
1338
|
+
#
|
1339
|
+
# # The returned object is of type Gapic::PagedEnumerable. You can
|
1340
|
+
# # iterate over all elements by calling #each, and the enumerable
|
1341
|
+
# # will lazily make API calls to fetch subsequent pages. Other
|
1342
|
+
# # methods are also available for managing paging directly.
|
1343
|
+
# result.each do |response|
|
1344
|
+
# # Each element is of type ::Google::Cloud::Bigquery::DataTransfer::V1::TransferRun.
|
1345
|
+
# p response
|
1346
|
+
# end
|
1347
|
+
#
|
1109
1348
|
def list_transfer_runs request, options = nil
|
1110
1349
|
raise ::ArgumentError, "request must be provided" if request.nil?
|
1111
1350
|
|
@@ -1123,16 +1362,20 @@ module Google
|
|
1123
1362
|
gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION
|
1124
1363
|
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
|
1125
1364
|
|
1126
|
-
header_params = {
|
1127
|
-
|
1128
|
-
|
1365
|
+
header_params = {}
|
1366
|
+
if request.parent
|
1367
|
+
header_params["parent"] = request.parent
|
1368
|
+
end
|
1369
|
+
|
1129
1370
|
request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
|
1130
1371
|
metadata[:"x-goog-request-params"] ||= request_params_header
|
1131
1372
|
|
1132
1373
|
options.apply_defaults timeout: @config.rpcs.list_transfer_runs.timeout,
|
1133
1374
|
metadata: metadata,
|
1134
1375
|
retry_policy: @config.rpcs.list_transfer_runs.retry_policy
|
1135
|
-
|
1376
|
+
|
1377
|
+
options.apply_defaults timeout: @config.timeout,
|
1378
|
+
metadata: @config.metadata,
|
1136
1379
|
retry_policy: @config.retry_policy
|
1137
1380
|
|
1138
1381
|
@data_transfer_service_stub.call_rpc :list_transfer_runs, request, options: options do |response, operation|
|
@@ -1186,6 +1429,27 @@ module Google
|
|
1186
1429
|
#
|
1187
1430
|
# @raise [::Google::Cloud::Error] if the RPC is aborted.
|
1188
1431
|
#
|
1432
|
+
# @example Basic example
|
1433
|
+
# require "google/cloud/bigquery/data_transfer/v1"
|
1434
|
+
#
|
1435
|
+
# # Create a client object. The client can be reused for multiple calls.
|
1436
|
+
# client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new
|
1437
|
+
#
|
1438
|
+
# # Create a request. To set request fields, pass in keyword arguments.
|
1439
|
+
# request = Google::Cloud::Bigquery::DataTransfer::V1::ListTransferLogsRequest.new
|
1440
|
+
#
|
1441
|
+
# # Call the list_transfer_logs method.
|
1442
|
+
# result = client.list_transfer_logs request
|
1443
|
+
#
|
1444
|
+
# # The returned object is of type Gapic::PagedEnumerable. You can
|
1445
|
+
# # iterate over all elements by calling #each, and the enumerable
|
1446
|
+
# # will lazily make API calls to fetch subsequent pages. Other
|
1447
|
+
# # methods are also available for managing paging directly.
|
1448
|
+
# result.each do |response|
|
1449
|
+
# # Each element is of type ::Google::Cloud::Bigquery::DataTransfer::V1::TransferMessage.
|
1450
|
+
# p response
|
1451
|
+
# end
|
1452
|
+
#
|
1189
1453
|
def list_transfer_logs request, options = nil
|
1190
1454
|
raise ::ArgumentError, "request must be provided" if request.nil?
|
1191
1455
|
|
@@ -1203,16 +1467,20 @@ module Google
|
|
1203
1467
|
gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION
|
1204
1468
|
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
|
1205
1469
|
|
1206
|
-
header_params = {
|
1207
|
-
|
1208
|
-
|
1470
|
+
header_params = {}
|
1471
|
+
if request.parent
|
1472
|
+
header_params["parent"] = request.parent
|
1473
|
+
end
|
1474
|
+
|
1209
1475
|
request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
|
1210
1476
|
metadata[:"x-goog-request-params"] ||= request_params_header
|
1211
1477
|
|
1212
1478
|
options.apply_defaults timeout: @config.rpcs.list_transfer_logs.timeout,
|
1213
1479
|
metadata: metadata,
|
1214
1480
|
retry_policy: @config.rpcs.list_transfer_logs.retry_policy
|
1215
|
-
|
1481
|
+
|
1482
|
+
options.apply_defaults timeout: @config.timeout,
|
1483
|
+
metadata: @config.metadata,
|
1216
1484
|
retry_policy: @config.retry_policy
|
1217
1485
|
|
1218
1486
|
@data_transfer_service_stub.call_rpc :list_transfer_logs, request, options: options do |response, operation|
|
@@ -1260,6 +1528,21 @@ module Google
|
|
1260
1528
|
#
|
1261
1529
|
# @raise [::Google::Cloud::Error] if the RPC is aborted.
|
1262
1530
|
#
|
1531
|
+
# @example Basic example
|
1532
|
+
# require "google/cloud/bigquery/data_transfer/v1"
|
1533
|
+
#
|
1534
|
+
# # Create a client object. The client can be reused for multiple calls.
|
1535
|
+
# client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new
|
1536
|
+
#
|
1537
|
+
# # Create a request. To set request fields, pass in keyword arguments.
|
1538
|
+
# request = Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsRequest.new
|
1539
|
+
#
|
1540
|
+
# # Call the check_valid_creds method.
|
1541
|
+
# result = client.check_valid_creds request
|
1542
|
+
#
|
1543
|
+
# # The returned object is of type Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsResponse.
|
1544
|
+
# p result
|
1545
|
+
#
|
1263
1546
|
def check_valid_creds request, options = nil
|
1264
1547
|
raise ::ArgumentError, "request must be provided" if request.nil?
|
1265
1548
|
|
@@ -1277,16 +1560,20 @@ module Google
|
|
1277
1560
|
gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION
|
1278
1561
|
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
|
1279
1562
|
|
1280
|
-
header_params = {
|
1281
|
-
|
1282
|
-
|
1563
|
+
header_params = {}
|
1564
|
+
if request.name
|
1565
|
+
header_params["name"] = request.name
|
1566
|
+
end
|
1567
|
+
|
1283
1568
|
request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
|
1284
1569
|
metadata[:"x-goog-request-params"] ||= request_params_header
|
1285
1570
|
|
1286
1571
|
options.apply_defaults timeout: @config.rpcs.check_valid_creds.timeout,
|
1287
1572
|
metadata: metadata,
|
1288
1573
|
retry_policy: @config.rpcs.check_valid_creds.retry_policy
|
1289
|
-
|
1574
|
+
|
1575
|
+
options.apply_defaults timeout: @config.timeout,
|
1576
|
+
metadata: @config.metadata,
|
1290
1577
|
retry_policy: @config.retry_policy
|
1291
1578
|
|
1292
1579
|
@data_transfer_service_stub.call_rpc :check_valid_creds, request, options: options do |response, operation|
|
@@ -1310,22 +1597,21 @@ module Google
|
|
1310
1597
|
# Configuration can be applied globally to all clients, or to a single client
|
1311
1598
|
# on construction.
|
1312
1599
|
#
|
1313
|
-
#
|
1314
|
-
#
|
1315
|
-
#
|
1316
|
-
# to 20 seconds,
|
1317
|
-
#
|
1318
|
-
#
|
1319
|
-
#
|
1320
|
-
#
|
1321
|
-
#
|
1322
|
-
#
|
1323
|
-
#
|
1324
|
-
#
|
1325
|
-
#
|
1326
|
-
#
|
1327
|
-
#
|
1328
|
-
# end
|
1600
|
+
# @example
|
1601
|
+
#
|
1602
|
+
# # Modify the global config, setting the timeout for
|
1603
|
+
# # get_data_source to 20 seconds,
|
1604
|
+
# # and all remaining timeouts to 10 seconds.
|
1605
|
+
# ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.configure do |config|
|
1606
|
+
# config.timeout = 10.0
|
1607
|
+
# config.rpcs.get_data_source.timeout = 20.0
|
1608
|
+
# end
|
1609
|
+
#
|
1610
|
+
# # Apply the above configuration only to a new client.
|
1611
|
+
# client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new do |config|
|
1612
|
+
# config.timeout = 10.0
|
1613
|
+
# config.rpcs.get_data_source.timeout = 20.0
|
1614
|
+
# end
|
1329
1615
|
#
|
1330
1616
|
# @!attribute [rw] endpoint
|
1331
1617
|
# The hostname or hostname:port of the service endpoint.
|
@@ -1,8 +1,6 @@
|
|
1
1
|
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
2
2
|
# source: google/cloud/bigquery/datatransfer/v1/datatransfer.proto
|
3
3
|
|
4
|
-
require 'google/protobuf'
|
5
|
-
|
6
4
|
require 'google/api/annotations_pb'
|
7
5
|
require 'google/api/client_pb'
|
8
6
|
require 'google/api/field_behavior_pb'
|
@@ -13,6 +11,8 @@ require 'google/protobuf/empty_pb'
|
|
13
11
|
require 'google/protobuf/field_mask_pb'
|
14
12
|
require 'google/protobuf/timestamp_pb'
|
15
13
|
require 'google/protobuf/wrappers_pb'
|
14
|
+
require 'google/protobuf'
|
15
|
+
|
16
16
|
Google::Protobuf::DescriptorPool.generated_pool.build do
|
17
17
|
add_file("google/cloud/bigquery/datatransfer/v1/datatransfer.proto", :syntax => :proto3) do
|
18
18
|
add_message "google.cloud.bigquery.datatransfer.v1.DataSourceParameter" do
|
@@ -31,7 +31,7 @@ module Google
|
|
31
31
|
# up the frontend.
|
32
32
|
class Service
|
33
33
|
|
34
|
-
include GRPC::GenericService
|
34
|
+
include ::GRPC::GenericService
|
35
35
|
|
36
36
|
self.marshal_class_method = :encode
|
37
37
|
self.unmarshal_class_method = :decode
|
@@ -53,7 +53,8 @@ module Google
|
|
53
53
|
rpc :DeleteTransferConfig, ::Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferConfigRequest, ::Google::Protobuf::Empty
|
54
54
|
# Returns information about a data transfer config.
|
55
55
|
rpc :GetTransferConfig, ::Google::Cloud::Bigquery::DataTransfer::V1::GetTransferConfigRequest, ::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig
|
56
|
-
# Returns information about all
|
56
|
+
# Returns information about all transfer configs owned by a project in the
|
57
|
+
# specified location.
|
57
58
|
rpc :ListTransferConfigs, ::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferConfigsRequest, ::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferConfigsResponse
|
58
59
|
# Creates transfer runs for a time range [start_time, end_time].
|
59
60
|
# For each date - or whatever granularity the data source supports - in the
|
@@ -1,13 +1,14 @@
|
|
1
1
|
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
2
2
|
# source: google/cloud/bigquery/datatransfer/v1/transfer.proto
|
3
3
|
|
4
|
-
require 'google/protobuf'
|
5
|
-
|
6
4
|
require 'google/api/field_behavior_pb'
|
7
5
|
require 'google/api/resource_pb'
|
6
|
+
require 'google/protobuf/duration_pb'
|
8
7
|
require 'google/protobuf/struct_pb'
|
9
8
|
require 'google/protobuf/timestamp_pb'
|
10
9
|
require 'google/rpc/status_pb'
|
10
|
+
require 'google/protobuf'
|
11
|
+
|
11
12
|
Google::Protobuf::DescriptorPool.generated_pool.build do
|
12
13
|
add_file("google/cloud/bigquery/datatransfer/v1/transfer.proto", :syntax => :proto3) do
|
13
14
|
add_message "google.cloud.bigquery.datatransfer.v1.EmailPreferences" do
|
@@ -33,11 +33,7 @@ module Google
|
|
33
33
|
# // For Kubernetes resources, the format is {api group}/{kind}.
|
34
34
|
# option (google.api.resource) = {
|
35
35
|
# type: "pubsub.googleapis.com/Topic"
|
36
|
-
#
|
37
|
-
# pattern: "projects/{project}/topics/{topic}"
|
38
|
-
# parent_type: "cloudresourcemanager.googleapis.com/Project"
|
39
|
-
# parent_name_extractor: "projects/{project}"
|
40
|
-
# }
|
36
|
+
# pattern: "projects/{project}/topics/{topic}"
|
41
37
|
# };
|
42
38
|
# }
|
43
39
|
#
|
@@ -45,10 +41,7 @@ module Google
|
|
45
41
|
#
|
46
42
|
# resources:
|
47
43
|
# - type: "pubsub.googleapis.com/Topic"
|
48
|
-
#
|
49
|
-
# - pattern: "projects/{project}/topics/{topic}"
|
50
|
-
# parent_type: "cloudresourcemanager.googleapis.com/Project"
|
51
|
-
# parent_name_extractor: "projects/{project}"
|
44
|
+
# pattern: "projects/{project}/topics/{topic}"
|
52
45
|
#
|
53
46
|
# Sometimes, resources have multiple patterns, typically because they can
|
54
47
|
# live under multiple parents.
|
@@ -58,26 +51,10 @@ module Google
|
|
58
51
|
# message LogEntry {
|
59
52
|
# option (google.api.resource) = {
|
60
53
|
# type: "logging.googleapis.com/LogEntry"
|
61
|
-
#
|
62
|
-
#
|
63
|
-
#
|
64
|
-
#
|
65
|
-
# }
|
66
|
-
# name_descriptor: {
|
67
|
-
# pattern: "folders/{folder}/logs/{log}"
|
68
|
-
# parent_type: "cloudresourcemanager.googleapis.com/Folder"
|
69
|
-
# parent_name_extractor: "folders/{folder}"
|
70
|
-
# }
|
71
|
-
# name_descriptor: {
|
72
|
-
# pattern: "organizations/{organization}/logs/{log}"
|
73
|
-
# parent_type: "cloudresourcemanager.googleapis.com/Organization"
|
74
|
-
# parent_name_extractor: "organizations/{organization}"
|
75
|
-
# }
|
76
|
-
# name_descriptor: {
|
77
|
-
# pattern: "billingAccounts/{billing_account}/logs/{log}"
|
78
|
-
# parent_type: "billing.googleapis.com/BillingAccount"
|
79
|
-
# parent_name_extractor: "billingAccounts/{billing_account}"
|
80
|
-
# }
|
54
|
+
# pattern: "projects/{project}/logs/{log}"
|
55
|
+
# pattern: "folders/{folder}/logs/{log}"
|
56
|
+
# pattern: "organizations/{organization}/logs/{log}"
|
57
|
+
# pattern: "billingAccounts/{billing_account}/logs/{log}"
|
81
58
|
# };
|
82
59
|
# }
|
83
60
|
#
|
@@ -85,48 +62,10 @@ module Google
|
|
85
62
|
#
|
86
63
|
# resources:
|
87
64
|
# - type: 'logging.googleapis.com/LogEntry'
|
88
|
-
#
|
89
|
-
#
|
90
|
-
#
|
91
|
-
#
|
92
|
-
# - pattern: "folders/{folder}/logs/{log}"
|
93
|
-
# parent_type: "cloudresourcemanager.googleapis.com/Folder"
|
94
|
-
# parent_name_extractor: "folders/{folder}"
|
95
|
-
# - pattern: "organizations/{organization}/logs/{log}"
|
96
|
-
# parent_type: "cloudresourcemanager.googleapis.com/Organization"
|
97
|
-
# parent_name_extractor: "organizations/{organization}"
|
98
|
-
# - pattern: "billingAccounts/{billing_account}/logs/{log}"
|
99
|
-
# parent_type: "billing.googleapis.com/BillingAccount"
|
100
|
-
# parent_name_extractor: "billingAccounts/{billing_account}"
|
101
|
-
#
|
102
|
-
# For flexible resources, the resource name doesn't contain parent names, but
|
103
|
-
# the resource itself has parents for policy evaluation.
|
104
|
-
#
|
105
|
-
# Example:
|
106
|
-
#
|
107
|
-
# message Shelf {
|
108
|
-
# option (google.api.resource) = {
|
109
|
-
# type: "library.googleapis.com/Shelf"
|
110
|
-
# name_descriptor: {
|
111
|
-
# pattern: "shelves/{shelf}"
|
112
|
-
# parent_type: "cloudresourcemanager.googleapis.com/Project"
|
113
|
-
# }
|
114
|
-
# name_descriptor: {
|
115
|
-
# pattern: "shelves/{shelf}"
|
116
|
-
# parent_type: "cloudresourcemanager.googleapis.com/Folder"
|
117
|
-
# }
|
118
|
-
# };
|
119
|
-
# }
|
120
|
-
#
|
121
|
-
# The ResourceDescriptor Yaml config will look like:
|
122
|
-
#
|
123
|
-
# resources:
|
124
|
-
# - type: 'library.googleapis.com/Shelf'
|
125
|
-
# name_descriptor:
|
126
|
-
# - pattern: "shelves/{shelf}"
|
127
|
-
# parent_type: "cloudresourcemanager.googleapis.com/Project"
|
128
|
-
# - pattern: "shelves/{shelf}"
|
129
|
-
# parent_type: "cloudresourcemanager.googleapis.com/Folder"
|
65
|
+
# pattern: "projects/{project}/logs/{log}"
|
66
|
+
# pattern: "folders/{folder}/logs/{log}"
|
67
|
+
# pattern: "organizations/{organization}/logs/{log}"
|
68
|
+
# pattern: "billingAccounts/{billing_account}/logs/{log}"
|
130
69
|
# @!attribute [rw] type
|
131
70
|
# @return [::String]
|
132
71
|
# The resource type. It must be in the format of
|
@@ -195,9 +195,7 @@ module Google
|
|
195
195
|
# exchanged for a refresh token on the backend.
|
196
196
|
GOOGLE_PLUS_AUTHORIZATION_CODE = 2
|
197
197
|
|
198
|
-
# Use First Party
|
199
|
-
# refresh token to get an offline access token. Instead, it uses a
|
200
|
-
# client-signed JWT assertion to retrieve an access token.
|
198
|
+
# Use First Party OAuth.
|
201
199
|
FIRST_PARTY_OAUTH = 3
|
202
200
|
end
|
203
201
|
|
@@ -617,14 +615,14 @@ module Google
|
|
617
615
|
# @return [::Google::Protobuf::Timestamp]
|
618
616
|
# Start time of the range of transfer runs. For example,
|
619
617
|
# `"2017-05-25T00:00:00+00:00"`. The start_time must be strictly less than
|
620
|
-
# the end_time. Creates transfer runs where run_time is in the range
|
621
|
-
# start_time (inclusive) and end_time (
|
618
|
+
# the end_time. Creates transfer runs where run_time is in the range
|
619
|
+
# between start_time (inclusive) and end_time (exclusive).
|
622
620
|
# @!attribute [rw] end_time
|
623
621
|
# @return [::Google::Protobuf::Timestamp]
|
624
622
|
# End time of the range of transfer runs. For example,
|
625
623
|
# `"2017-05-30T00:00:00+00:00"`. The end_time must not be in the future.
|
626
|
-
# Creates transfer runs where run_time is in the range
|
627
|
-
# (inclusive) and end_time (
|
624
|
+
# Creates transfer runs where run_time is in the range between start_time
|
625
|
+
# (inclusive) and end_time (exclusive).
|
628
626
|
class TimeRange
|
629
627
|
include ::Google::Protobuf::MessageExts
|
630
628
|
extend ::Google::Protobuf::MessageExts::ClassMethods
|
@@ -66,12 +66,11 @@ module Google
|
|
66
66
|
# @!attribute [rw] name
|
67
67
|
# @return [::String]
|
68
68
|
# The resource name of the transfer config.
|
69
|
-
# Transfer config names have the form
|
69
|
+
# Transfer config names have the form
|
70
70
|
# `projects/{project_id}/locations/{region}/transferConfigs/{config_id}`.
|
71
|
-
#
|
72
|
-
#
|
73
|
-
#
|
74
|
-
# required, will be generated for config_id.
|
71
|
+
# Where `config_id` is usually a uuid, even though it is not
|
72
|
+
# guaranteed or required. The name is ignored when creating a transfer
|
73
|
+
# config.
|
75
74
|
# @!attribute [rw] destination_dataset_id
|
76
75
|
# @return [::String]
|
77
76
|
# The BigQuery target dataset id.
|
@@ -83,7 +82,10 @@ module Google
|
|
83
82
|
# Data source id. Cannot be changed once data transfer is created.
|
84
83
|
# @!attribute [rw] params
|
85
84
|
# @return [::Google::Protobuf::Struct]
|
86
|
-
#
|
85
|
+
# Parameters specific to each data source. For more information see the
|
86
|
+
# bq tab in the 'Setting up a data transfer' section for each data source.
|
87
|
+
# For example the parameters for Cloud Storage transfers are listed here:
|
88
|
+
# https://cloud.google.com/bigquery-transfer/docs/cloud-storage-transfer#bq
|
87
89
|
# @!attribute [rw] schedule
|
88
90
|
# @return [::String]
|
89
91
|
# Data transfer schedule.
|
@@ -132,6 +134,9 @@ module Google
|
|
132
134
|
# @return [::String]
|
133
135
|
# Pub/Sub topic where notifications will be sent after transfer runs
|
134
136
|
# associated with this transfer config finish.
|
137
|
+
#
|
138
|
+
# The format for specifying a pubsub topic is:
|
139
|
+
# `projects/{project}/topics/{topic}`
|
135
140
|
# @!attribute [rw] email_preferences
|
136
141
|
# @return [::Google::Cloud::Bigquery::DataTransfer::V1::EmailPreferences]
|
137
142
|
# Email notifications will be sent according to these preferences
|
@@ -171,7 +176,10 @@ module Google
|
|
171
176
|
# Output only. Last time the data transfer run state was updated.
|
172
177
|
# @!attribute [r] params
|
173
178
|
# @return [::Google::Protobuf::Struct]
|
174
|
-
# Output only.
|
179
|
+
# Output only. Parameters specific to each data source. For more information see the
|
180
|
+
# bq tab in the 'Setting up a data transfer' section for each data source.
|
181
|
+
# For example the parameters for Cloud Storage transfers are listed here:
|
182
|
+
# https://cloud.google.com/bigquery-transfer/docs/cloud-storage-transfer#bq
|
175
183
|
# @!attribute [r] destination_dataset_id
|
176
184
|
# @return [::String]
|
177
185
|
# Output only. The BigQuery target dataset id.
|
@@ -194,7 +202,10 @@ module Google
|
|
194
202
|
# @!attribute [r] notification_pubsub_topic
|
195
203
|
# @return [::String]
|
196
204
|
# Output only. Pub/Sub topic where a notification will be sent after this
|
197
|
-
# transfer run finishes
|
205
|
+
# transfer run finishes.
|
206
|
+
#
|
207
|
+
# The format for specifying a pubsub topic is:
|
208
|
+
# `projects/{project}/topics/{topic}`
|
198
209
|
# @!attribute [r] email_preferences
|
199
210
|
# @return [::Google::Cloud::Bigquery::DataTransfer::V1::EmailPreferences]
|
200
211
|
# Output only. Email notifications will be sent according to these
|
@@ -250,23 +261,23 @@ module Google
|
|
250
261
|
|
251
262
|
# Represents data transfer run state.
|
252
263
|
module TransferState
|
253
|
-
# State placeholder.
|
264
|
+
# State placeholder (0).
|
254
265
|
TRANSFER_STATE_UNSPECIFIED = 0
|
255
266
|
|
256
267
|
# Data transfer is scheduled and is waiting to be picked up by
|
257
|
-
# data transfer backend.
|
268
|
+
# data transfer backend (2).
|
258
269
|
PENDING = 2
|
259
270
|
|
260
|
-
# Data transfer is in progress.
|
271
|
+
# Data transfer is in progress (3).
|
261
272
|
RUNNING = 3
|
262
273
|
|
263
|
-
# Data transfer completed successfully.
|
274
|
+
# Data transfer completed successfully (4).
|
264
275
|
SUCCEEDED = 4
|
265
276
|
|
266
|
-
# Data transfer failed.
|
277
|
+
# Data transfer failed (5).
|
267
278
|
FAILED = 5
|
268
279
|
|
269
|
-
# Data transfer is cancelled.
|
280
|
+
# Data transfer is cancelled (6).
|
270
281
|
CANCELLED = 6
|
271
282
|
end
|
272
283
|
end
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: google-cloud-bigquery-data_transfer-v1
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.4.
|
4
|
+
version: 0.4.6
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Google LLC
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date:
|
11
|
+
date: 2022-01-11 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: gapic-common
|
@@ -16,7 +16,7 @@ dependencies:
|
|
16
16
|
requirements:
|
17
17
|
- - ">="
|
18
18
|
- !ruby/object:Gem::Version
|
19
|
-
version: '0.
|
19
|
+
version: '0.7'
|
20
20
|
- - "<"
|
21
21
|
- !ruby/object:Gem::Version
|
22
22
|
version: 2.a
|
@@ -26,7 +26,7 @@ dependencies:
|
|
26
26
|
requirements:
|
27
27
|
- - ">="
|
28
28
|
- !ruby/object:Gem::Version
|
29
|
-
version: '0.
|
29
|
+
version: '0.7'
|
30
30
|
- - "<"
|
31
31
|
- !ruby/object:Gem::Version
|
32
32
|
version: 2.a
|
@@ -212,7 +212,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
212
212
|
- !ruby/object:Gem::Version
|
213
213
|
version: '0'
|
214
214
|
requirements: []
|
215
|
-
rubygems_version: 3.
|
215
|
+
rubygems_version: 3.3.4
|
216
216
|
signing_key:
|
217
217
|
specification_version: 4
|
218
218
|
summary: API Client library for the BigQuery Data Transfer Service V1 API
|