google-cloud-bigquery-data_transfer-v1 0.4.6 → 0.6.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/README.md +11 -6
- data/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service/client.rb +108 -16
- data/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service.rb +1 -4
- data/lib/google/cloud/bigquery/data_transfer/v1/version.rb +1 -1
- data/lib/google/cloud/bigquery/data_transfer/v1.rb +2 -0
- data/lib/google/cloud/bigquery/datatransfer/v1/datatransfer_pb.rb +7 -1
- data/lib/google/cloud/bigquery/datatransfer/v1/datatransfer_services_pb.rb +14 -16
- data/lib/google/cloud/bigquery/datatransfer/v1/transfer_pb.rb +7 -2
- data/proto_docs/google/cloud/bigquery/datatransfer/v1/datatransfer.rb +21 -14
- data/proto_docs/google/cloud/bigquery/datatransfer/v1/transfer.rb +17 -1
- data/proto_docs/google/protobuf/any.rb +3 -3
- data/proto_docs/google/protobuf/struct.rb +2 -2
- metadata +12 -12
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 0d3f6c8c15ca8faff1299d946c2a6ca716bc17b770eb2412765a3074d9089b7f
|
4
|
+
data.tar.gz: 393a5f7da248e86dc0b85174295c300838faaeaba5f06cd6221fea22cd2c556a
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: e745340a4b582f05927a65c8717b936831cc58f436ce61b5bfba2bfc4e4a928174da082f1c407f59bbbbb07accab236492403ee3e97cf4c1fdfbb1580dc882c4
|
7
|
+
data.tar.gz: bf8fab631237c803b71212bf01f0375a8e078dc592d0eb49ed9ea3f5d2fdb11e363db67ec3804e8f8cc06e091ecc8bab2f60eeb90d5a93441a9927d1c89d1dfb
|
data/README.md
CHANGED
@@ -37,7 +37,7 @@ request = ::Google::Cloud::Bigquery::DataTransfer::V1::GetDataSourceRequest.new
|
|
37
37
|
response = client.get_data_source request
|
38
38
|
```
|
39
39
|
|
40
|
-
View the [Client Library Documentation](https://
|
40
|
+
View the [Client Library Documentation](https://cloud.google.com/ruby/docs/reference/google-cloud-bigquery-data_transfer-v1/latest)
|
41
41
|
for class and method documentation.
|
42
42
|
|
43
43
|
See also the [Product Documentation](https://cloud.google.com/bigquery/transfer)
|
@@ -69,16 +69,21 @@ module GRPC
|
|
69
69
|
end
|
70
70
|
```
|
71
71
|
|
72
|
+
|
73
|
+
## Google Cloud Samples
|
74
|
+
|
75
|
+
To browse ready to use code samples check [Google Cloud Samples](https://cloud.google.com/docs/samples).
|
76
|
+
|
72
77
|
## Supported Ruby Versions
|
73
78
|
|
74
|
-
This library is supported on Ruby 2.
|
79
|
+
This library is supported on Ruby 2.6+.
|
75
80
|
|
76
81
|
Google provides official support for Ruby versions that are actively supported
|
77
82
|
by Ruby Core—that is, Ruby versions that are either in normal maintenance or
|
78
|
-
in security maintenance, and not end of life.
|
79
|
-
|
80
|
-
|
81
|
-
|
83
|
+
in security maintenance, and not end of life. Older versions of Ruby _may_
|
84
|
+
still work, but are unsupported and not recommended. See
|
85
|
+
https://www.ruby-lang.org/en/downloads/branches/ for details about the Ruby
|
86
|
+
support schedule.
|
82
87
|
|
83
88
|
## Which client should I use?
|
84
89
|
|
@@ -28,10 +28,7 @@ module Google
|
|
28
28
|
##
|
29
29
|
# Client for the DataTransferService service.
|
30
30
|
#
|
31
|
-
#
|
32
|
-
# configure the transfer of their data from other Google Products into
|
33
|
-
# BigQuery. This service contains methods that are end user exposed. It backs
|
34
|
-
# up the frontend.
|
31
|
+
# This API allows users to manage their data transfers into BigQuery.
|
35
32
|
#
|
36
33
|
class Client
|
37
34
|
include Paths
|
@@ -205,8 +202,7 @@ module Google
|
|
205
202
|
# Service calls
|
206
203
|
|
207
204
|
##
|
208
|
-
# Retrieves a supported data source and returns its settings
|
209
|
-
# which can be used for UI rendering.
|
205
|
+
# Retrieves a supported data source and returns its settings.
|
210
206
|
#
|
211
207
|
# @overload get_data_source(request, options = nil)
|
212
208
|
# Pass arguments to `get_data_source` via a request object, either of type
|
@@ -293,8 +289,7 @@ module Google
|
|
293
289
|
end
|
294
290
|
|
295
291
|
##
|
296
|
-
# Lists supported data sources and returns their settings
|
297
|
-
# which can be used for UI rendering.
|
292
|
+
# Lists supported data sources and returns their settings.
|
298
293
|
#
|
299
294
|
# @overload list_data_sources(request, options = nil)
|
300
295
|
# Pass arguments to `list_data_sources` via a request object, either of type
|
@@ -633,8 +628,8 @@ module Google
|
|
633
628
|
end
|
634
629
|
|
635
630
|
##
|
636
|
-
# Deletes a data transfer configuration,
|
637
|
-
#
|
631
|
+
# Deletes a data transfer configuration, including any associated transfer
|
632
|
+
# runs and logs.
|
638
633
|
#
|
639
634
|
# @overload delete_transfer_config(request, options = nil)
|
640
635
|
# Pass arguments to `delete_transfer_config` via a request object, either of type
|
@@ -1281,7 +1276,7 @@ module Google
|
|
1281
1276
|
end
|
1282
1277
|
|
1283
1278
|
##
|
1284
|
-
# Returns information about running and completed
|
1279
|
+
# Returns information about running and completed transfer runs.
|
1285
1280
|
#
|
1286
1281
|
# @overload list_transfer_runs(request, options = nil)
|
1287
1282
|
# Pass arguments to `list_transfer_runs` via a request object, either of type
|
@@ -1388,7 +1383,7 @@ module Google
|
|
1388
1383
|
end
|
1389
1384
|
|
1390
1385
|
##
|
1391
|
-
# Returns
|
1386
|
+
# Returns log messages for the transfer run.
|
1392
1387
|
#
|
1393
1388
|
# @overload list_transfer_logs(request, options = nil)
|
1394
1389
|
# Pass arguments to `list_transfer_logs` via a request object, either of type
|
@@ -1495,10 +1490,6 @@ module Google
|
|
1495
1490
|
##
|
1496
1491
|
# Returns true if valid credentials exist for the given data source and
|
1497
1492
|
# requesting user.
|
1498
|
-
# Some data sources doesn't support service account, so we need to talk to
|
1499
|
-
# them on behalf of the end user. This API just checks whether we have OAuth
|
1500
|
-
# token for the particular user, which is a pre-requisite before user can
|
1501
|
-
# create a transfer config.
|
1502
1493
|
#
|
1503
1494
|
# @overload check_valid_creds(request, options = nil)
|
1504
1495
|
# Pass arguments to `check_valid_creds` via a request object, either of type
|
@@ -1584,6 +1575,100 @@ module Google
|
|
1584
1575
|
raise ::Google::Cloud::Error.from_error(e)
|
1585
1576
|
end
|
1586
1577
|
|
1578
|
+
##
|
1579
|
+
# Enroll data sources in a user project. This allows users to create transfer
|
1580
|
+
# configurations for these data sources. They will also appear in the
|
1581
|
+
# ListDataSources RPC and as such, will appear in the BigQuery UI
|
1582
|
+
# 'https://bigquery.cloud.google.com' (and the documents can be found at
|
1583
|
+
# https://cloud.google.com/bigquery/bigquery-web-ui and
|
1584
|
+
# https://cloud.google.com/bigquery/docs/working-with-transfers).
|
1585
|
+
#
|
1586
|
+
# @overload enroll_data_sources(request, options = nil)
|
1587
|
+
# Pass arguments to `enroll_data_sources` via a request object, either of type
|
1588
|
+
# {::Google::Cloud::Bigquery::DataTransfer::V1::EnrollDataSourcesRequest} or an equivalent Hash.
|
1589
|
+
#
|
1590
|
+
# @param request [::Google::Cloud::Bigquery::DataTransfer::V1::EnrollDataSourcesRequest, ::Hash]
|
1591
|
+
# A request object representing the call parameters. Required. To specify no
|
1592
|
+
# parameters, or to keep all the default parameter values, pass an empty Hash.
|
1593
|
+
# @param options [::Gapic::CallOptions, ::Hash]
|
1594
|
+
# Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
|
1595
|
+
#
|
1596
|
+
# @overload enroll_data_sources(name: nil, data_source_ids: nil)
|
1597
|
+
# Pass arguments to `enroll_data_sources` via keyword arguments. Note that at
|
1598
|
+
# least one keyword argument is required. To specify no parameters, or to keep all
|
1599
|
+
# the default parameter values, pass an empty Hash as a request object (see above).
|
1600
|
+
#
|
1601
|
+
# @param name [::String]
|
1602
|
+
# The name of the project resource in the form:
|
1603
|
+
# `projects/{project_id}`
|
1604
|
+
# @param data_source_ids [::Array<::String>]
|
1605
|
+
# Data sources that are enrolled. It is required to provide at least one
|
1606
|
+
# data source id.
|
1607
|
+
#
|
1608
|
+
# @yield [response, operation] Access the result along with the RPC operation
|
1609
|
+
# @yieldparam response [::Google::Protobuf::Empty]
|
1610
|
+
# @yieldparam operation [::GRPC::ActiveCall::Operation]
|
1611
|
+
#
|
1612
|
+
# @return [::Google::Protobuf::Empty]
|
1613
|
+
#
|
1614
|
+
# @raise [::Google::Cloud::Error] if the RPC is aborted.
|
1615
|
+
#
|
1616
|
+
# @example Basic example
|
1617
|
+
# require "google/cloud/bigquery/data_transfer/v1"
|
1618
|
+
#
|
1619
|
+
# # Create a client object. The client can be reused for multiple calls.
|
1620
|
+
# client = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new
|
1621
|
+
#
|
1622
|
+
# # Create a request. To set request fields, pass in keyword arguments.
|
1623
|
+
# request = Google::Cloud::Bigquery::DataTransfer::V1::EnrollDataSourcesRequest.new
|
1624
|
+
#
|
1625
|
+
# # Call the enroll_data_sources method.
|
1626
|
+
# result = client.enroll_data_sources request
|
1627
|
+
#
|
1628
|
+
# # The returned object is of type Google::Protobuf::Empty.
|
1629
|
+
# p result
|
1630
|
+
#
|
1631
|
+
def enroll_data_sources request, options = nil
|
1632
|
+
raise ::ArgumentError, "request must be provided" if request.nil?
|
1633
|
+
|
1634
|
+
request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigquery::DataTransfer::V1::EnrollDataSourcesRequest
|
1635
|
+
|
1636
|
+
# Converts hash and nil to an options object
|
1637
|
+
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
|
1638
|
+
|
1639
|
+
# Customize the options with defaults
|
1640
|
+
metadata = @config.rpcs.enroll_data_sources.metadata.to_h
|
1641
|
+
|
1642
|
+
# Set x-goog-api-client and x-goog-user-project headers
|
1643
|
+
metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
|
1644
|
+
lib_name: @config.lib_name, lib_version: @config.lib_version,
|
1645
|
+
gapic_version: ::Google::Cloud::Bigquery::DataTransfer::V1::VERSION
|
1646
|
+
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
|
1647
|
+
|
1648
|
+
header_params = {}
|
1649
|
+
if request.name
|
1650
|
+
header_params["name"] = request.name
|
1651
|
+
end
|
1652
|
+
|
1653
|
+
request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
|
1654
|
+
metadata[:"x-goog-request-params"] ||= request_params_header
|
1655
|
+
|
1656
|
+
options.apply_defaults timeout: @config.rpcs.enroll_data_sources.timeout,
|
1657
|
+
metadata: metadata,
|
1658
|
+
retry_policy: @config.rpcs.enroll_data_sources.retry_policy
|
1659
|
+
|
1660
|
+
options.apply_defaults timeout: @config.timeout,
|
1661
|
+
metadata: @config.metadata,
|
1662
|
+
retry_policy: @config.retry_policy
|
1663
|
+
|
1664
|
+
@data_transfer_service_stub.call_rpc :enroll_data_sources, request, options: options do |response, operation|
|
1665
|
+
yield response, operation if block_given?
|
1666
|
+
return response
|
1667
|
+
end
|
1668
|
+
rescue ::GRPC::BadStatus => e
|
1669
|
+
raise ::Google::Cloud::Error.from_error(e)
|
1670
|
+
end
|
1671
|
+
|
1587
1672
|
##
|
1588
1673
|
# Configuration class for the DataTransferService API.
|
1589
1674
|
#
|
@@ -1789,6 +1874,11 @@ module Google
|
|
1789
1874
|
# @return [::Gapic::Config::Method]
|
1790
1875
|
#
|
1791
1876
|
attr_reader :check_valid_creds
|
1877
|
+
##
|
1878
|
+
# RPC-specific configuration for `enroll_data_sources`
|
1879
|
+
# @return [::Gapic::Config::Method]
|
1880
|
+
#
|
1881
|
+
attr_reader :enroll_data_sources
|
1792
1882
|
|
1793
1883
|
# @private
|
1794
1884
|
def initialize parent_rpcs = nil
|
@@ -1820,6 +1910,8 @@ module Google
|
|
1820
1910
|
@list_transfer_logs = ::Gapic::Config::Method.new list_transfer_logs_config
|
1821
1911
|
check_valid_creds_config = parent_rpcs.check_valid_creds if parent_rpcs.respond_to? :check_valid_creds
|
1822
1912
|
@check_valid_creds = ::Gapic::Config::Method.new check_valid_creds_config
|
1913
|
+
enroll_data_sources_config = parent_rpcs.enroll_data_sources if parent_rpcs.respond_to? :enroll_data_sources
|
1914
|
+
@enroll_data_sources = ::Gapic::Config::Method.new enroll_data_sources_config
|
1823
1915
|
|
1824
1916
|
yield self if block_given?
|
1825
1917
|
end
|
@@ -32,10 +32,7 @@ module Google
|
|
32
32
|
module DataTransfer
|
33
33
|
module V1
|
34
34
|
##
|
35
|
-
#
|
36
|
-
# configure the transfer of their data from other Google Products into
|
37
|
-
# BigQuery. This service contains methods that are end user exposed. It backs
|
38
|
-
# up the frontend.
|
35
|
+
# This API allows users to manage their data transfers into BigQuery.
|
39
36
|
#
|
40
37
|
# To load this service and instantiate a client:
|
41
38
|
#
|
@@ -26,6 +26,8 @@ module Google
|
|
26
26
|
##
|
27
27
|
# To load this package, including all its services, and instantiate a client:
|
28
28
|
#
|
29
|
+
# @example
|
30
|
+
#
|
29
31
|
# require "google/cloud/bigquery/data_transfer/v1"
|
30
32
|
# client = ::Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Client.new
|
31
33
|
#
|
@@ -1,6 +1,8 @@
|
|
1
1
|
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
2
2
|
# source: google/cloud/bigquery/datatransfer/v1/datatransfer.proto
|
3
3
|
|
4
|
+
require 'google/protobuf'
|
5
|
+
|
4
6
|
require 'google/api/annotations_pb'
|
5
7
|
require 'google/api/client_pb'
|
6
8
|
require 'google/api/field_behavior_pb'
|
@@ -11,7 +13,6 @@ require 'google/protobuf/empty_pb'
|
|
11
13
|
require 'google/protobuf/field_mask_pb'
|
12
14
|
require 'google/protobuf/timestamp_pb'
|
13
15
|
require 'google/protobuf/wrappers_pb'
|
14
|
-
require 'google/protobuf'
|
15
16
|
|
16
17
|
Google::Protobuf::DescriptorPool.generated_pool.build do
|
17
18
|
add_file("google/cloud/bigquery/datatransfer/v1/datatransfer.proto", :syntax => :proto3) do
|
@@ -174,6 +175,10 @@ Google::Protobuf::DescriptorPool.generated_pool.build do
|
|
174
175
|
add_message "google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsResponse" do
|
175
176
|
repeated :runs, :message, 1, "google.cloud.bigquery.datatransfer.v1.TransferRun"
|
176
177
|
end
|
178
|
+
add_message "google.cloud.bigquery.datatransfer.v1.EnrollDataSourcesRequest" do
|
179
|
+
optional :name, :string, 1
|
180
|
+
repeated :data_source_ids, :string, 2
|
181
|
+
end
|
177
182
|
end
|
178
183
|
end
|
179
184
|
|
@@ -210,6 +215,7 @@ module Google
|
|
210
215
|
StartManualTransferRunsRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsRequest").msgclass
|
211
216
|
StartManualTransferRunsRequest::TimeRange = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsRequest.TimeRange").msgclass
|
212
217
|
StartManualTransferRunsResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsResponse").msgclass
|
218
|
+
EnrollDataSourcesRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.EnrollDataSourcesRequest").msgclass
|
213
219
|
end
|
214
220
|
end
|
215
221
|
end
|
@@ -25,10 +25,7 @@ module Google
|
|
25
25
|
module DataTransfer
|
26
26
|
module V1
|
27
27
|
module DataTransferService
|
28
|
-
#
|
29
|
-
# configure the transfer of their data from other Google Products into
|
30
|
-
# BigQuery. This service contains methods that are end user exposed. It backs
|
31
|
-
# up the frontend.
|
28
|
+
# This API allows users to manage their data transfers into BigQuery.
|
32
29
|
class Service
|
33
30
|
|
34
31
|
include ::GRPC::GenericService
|
@@ -37,19 +34,17 @@ module Google
|
|
37
34
|
self.unmarshal_class_method = :decode
|
38
35
|
self.service_name = 'google.cloud.bigquery.datatransfer.v1.DataTransferService'
|
39
36
|
|
40
|
-
# Retrieves a supported data source and returns its settings
|
41
|
-
# which can be used for UI rendering.
|
37
|
+
# Retrieves a supported data source and returns its settings.
|
42
38
|
rpc :GetDataSource, ::Google::Cloud::Bigquery::DataTransfer::V1::GetDataSourceRequest, ::Google::Cloud::Bigquery::DataTransfer::V1::DataSource
|
43
|
-
# Lists supported data sources and returns their settings
|
44
|
-
# which can be used for UI rendering.
|
39
|
+
# Lists supported data sources and returns their settings.
|
45
40
|
rpc :ListDataSources, ::Google::Cloud::Bigquery::DataTransfer::V1::ListDataSourcesRequest, ::Google::Cloud::Bigquery::DataTransfer::V1::ListDataSourcesResponse
|
46
41
|
# Creates a new data transfer configuration.
|
47
42
|
rpc :CreateTransferConfig, ::Google::Cloud::Bigquery::DataTransfer::V1::CreateTransferConfigRequest, ::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig
|
48
43
|
# Updates a data transfer configuration.
|
49
44
|
# All fields must be set, even if they are not updated.
|
50
45
|
rpc :UpdateTransferConfig, ::Google::Cloud::Bigquery::DataTransfer::V1::UpdateTransferConfigRequest, ::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig
|
51
|
-
# Deletes a data transfer configuration,
|
52
|
-
#
|
46
|
+
# Deletes a data transfer configuration, including any associated transfer
|
47
|
+
# runs and logs.
|
53
48
|
rpc :DeleteTransferConfig, ::Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferConfigRequest, ::Google::Protobuf::Empty
|
54
49
|
# Returns information about a data transfer config.
|
55
50
|
rpc :GetTransferConfig, ::Google::Cloud::Bigquery::DataTransfer::V1::GetTransferConfigRequest, ::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig
|
@@ -71,17 +66,20 @@ module Google
|
|
71
66
|
rpc :GetTransferRun, ::Google::Cloud::Bigquery::DataTransfer::V1::GetTransferRunRequest, ::Google::Cloud::Bigquery::DataTransfer::V1::TransferRun
|
72
67
|
# Deletes the specified transfer run.
|
73
68
|
rpc :DeleteTransferRun, ::Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferRunRequest, ::Google::Protobuf::Empty
|
74
|
-
# Returns information about running and completed
|
69
|
+
# Returns information about running and completed transfer runs.
|
75
70
|
rpc :ListTransferRuns, ::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferRunsRequest, ::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferRunsResponse
|
76
|
-
# Returns
|
71
|
+
# Returns log messages for the transfer run.
|
77
72
|
rpc :ListTransferLogs, ::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferLogsRequest, ::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferLogsResponse
|
78
73
|
# Returns true if valid credentials exist for the given data source and
|
79
74
|
# requesting user.
|
80
|
-
# Some data sources doesn't support service account, so we need to talk to
|
81
|
-
# them on behalf of the end user. This API just checks whether we have OAuth
|
82
|
-
# token for the particular user, which is a pre-requisite before user can
|
83
|
-
# create a transfer config.
|
84
75
|
rpc :CheckValidCreds, ::Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsRequest, ::Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsResponse
|
76
|
+
# Enroll data sources in a user project. This allows users to create transfer
|
77
|
+
# configurations for these data sources. They will also appear in the
|
78
|
+
# ListDataSources RPC and as such, will appear in the BigQuery UI
|
79
|
+
# 'https://bigquery.cloud.google.com' (and the documents can be found at
|
80
|
+
# https://cloud.google.com/bigquery/bigquery-web-ui and
|
81
|
+
# https://cloud.google.com/bigquery/docs/working-with-transfers).
|
82
|
+
rpc :EnrollDataSources, ::Google::Cloud::Bigquery::DataTransfer::V1::EnrollDataSourcesRequest, ::Google::Protobuf::Empty
|
85
83
|
end
|
86
84
|
|
87
85
|
Stub = Service.rpc_stub_class
|
@@ -1,13 +1,13 @@
|
|
1
1
|
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
2
2
|
# source: google/cloud/bigquery/datatransfer/v1/transfer.proto
|
3
3
|
|
4
|
+
require 'google/protobuf'
|
5
|
+
|
4
6
|
require 'google/api/field_behavior_pb'
|
5
7
|
require 'google/api/resource_pb'
|
6
|
-
require 'google/protobuf/duration_pb'
|
7
8
|
require 'google/protobuf/struct_pb'
|
8
9
|
require 'google/protobuf/timestamp_pb'
|
9
10
|
require 'google/rpc/status_pb'
|
10
|
-
require 'google/protobuf'
|
11
11
|
|
12
12
|
Google::Protobuf::DescriptorPool.generated_pool.build do
|
13
13
|
add_file("google/cloud/bigquery/datatransfer/v1/transfer.proto", :syntax => :proto3) do
|
@@ -19,6 +19,9 @@ Google::Protobuf::DescriptorPool.generated_pool.build do
|
|
19
19
|
optional :start_time, :message, 1, "google.protobuf.Timestamp"
|
20
20
|
optional :end_time, :message, 2, "google.protobuf.Timestamp"
|
21
21
|
end
|
22
|
+
add_message "google.cloud.bigquery.datatransfer.v1.UserInfo" do
|
23
|
+
proto3_optional :email, :string, 1
|
24
|
+
end
|
22
25
|
add_message "google.cloud.bigquery.datatransfer.v1.TransferConfig" do
|
23
26
|
optional :name, :string, 1
|
24
27
|
optional :display_name, :string, 3
|
@@ -35,6 +38,7 @@ Google::Protobuf::DescriptorPool.generated_pool.build do
|
|
35
38
|
optional :dataset_region, :string, 14
|
36
39
|
optional :notification_pubsub_topic, :string, 15
|
37
40
|
optional :email_preferences, :message, 18, "google.cloud.bigquery.datatransfer.v1.EmailPreferences"
|
41
|
+
proto3_optional :owner_info, :message, 27, "google.cloud.bigquery.datatransfer.v1.UserInfo"
|
38
42
|
oneof :destination do
|
39
43
|
optional :destination_dataset_id, :string, 2
|
40
44
|
end
|
@@ -92,6 +96,7 @@ module Google
|
|
92
96
|
module V1
|
93
97
|
EmailPreferences = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.EmailPreferences").msgclass
|
94
98
|
ScheduleOptions = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.ScheduleOptions").msgclass
|
99
|
+
UserInfo = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.UserInfo").msgclass
|
95
100
|
TransferConfig = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.TransferConfig").msgclass
|
96
101
|
TransferRun = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.TransferRun").msgclass
|
97
102
|
TransferMessage = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.TransferMessage").msgclass
|
@@ -22,12 +22,7 @@ module Google
|
|
22
22
|
module Bigquery
|
23
23
|
module DataTransfer
|
24
24
|
module V1
|
25
|
-
#
|
26
|
-
# parameters can be rendered in the UI. These parameters are given to us by
|
27
|
-
# supported data sources, and include all needed information for rendering
|
28
|
-
# and validation.
|
29
|
-
# Thus, whoever uses this api can decide to generate either generic ui,
|
30
|
-
# or custom data source specific forms.
|
25
|
+
# A parameter used to define custom fields in a data source definition.
|
31
26
|
# @!attribute [rw] param_id
|
32
27
|
# @return [::String]
|
33
28
|
# Parameter identifier.
|
@@ -108,8 +103,7 @@ module Google
|
|
108
103
|
end
|
109
104
|
end
|
110
105
|
|
111
|
-
#
|
112
|
-
# render UI and request proper OAuth tokens.
|
106
|
+
# Defines the properties and custom parameters for a data source.
|
113
107
|
# @!attribute [r] name
|
114
108
|
# @return [::String]
|
115
109
|
# Output only. Data source resource name.
|
@@ -266,9 +260,9 @@ module Google
|
|
266
260
|
# A request to create a data transfer configuration. If new credentials are
|
267
261
|
# needed for this transfer configuration, an authorization code must be
|
268
262
|
# provided. If an authorization code is provided, the transfer configuration
|
269
|
-
# will be associated with the user id corresponding to the
|
270
|
-
#
|
271
|
-
#
|
263
|
+
# will be associated with the user id corresponding to the authorization code.
|
264
|
+
# Otherwise, the transfer configuration will be associated with the calling
|
265
|
+
# user.
|
272
266
|
# @!attribute [rw] parent
|
273
267
|
# @return [::String]
|
274
268
|
# Required. The BigQuery project id where the transfer configuration should be created.
|
@@ -445,9 +439,7 @@ module Google
|
|
445
439
|
extend ::Google::Protobuf::MessageExts::ClassMethods
|
446
440
|
end
|
447
441
|
|
448
|
-
# A request to list data transfer runs.
|
449
|
-
# specific data transfer runs. The data source can use this method to request
|
450
|
-
# all scheduled transfer runs.
|
442
|
+
# A request to list data transfer runs.
|
451
443
|
# @!attribute [rw] parent
|
452
444
|
# @return [::String]
|
453
445
|
# Required. Name of transfer configuration for which transfer runs should be retrieved.
|
@@ -637,6 +629,21 @@ module Google
|
|
637
629
|
include ::Google::Protobuf::MessageExts
|
638
630
|
extend ::Google::Protobuf::MessageExts::ClassMethods
|
639
631
|
end
|
632
|
+
|
633
|
+
# A request to enroll a set of data sources so they are visible in the
|
634
|
+
# BigQuery UI's `Transfer` tab.
|
635
|
+
# @!attribute [rw] name
|
636
|
+
# @return [::String]
|
637
|
+
# The name of the project resource in the form:
|
638
|
+
# `projects/{project_id}`
|
639
|
+
# @!attribute [rw] data_source_ids
|
640
|
+
# @return [::Array<::String>]
|
641
|
+
# Data sources that are enrolled. It is required to provide at least one
|
642
|
+
# data source id.
|
643
|
+
class EnrollDataSourcesRequest
|
644
|
+
include ::Google::Protobuf::MessageExts
|
645
|
+
extend ::Google::Protobuf::MessageExts::ClassMethods
|
646
|
+
end
|
640
647
|
end
|
641
648
|
end
|
642
649
|
end
|
@@ -57,6 +57,15 @@ module Google
|
|
57
57
|
extend ::Google::Protobuf::MessageExts::ClassMethods
|
58
58
|
end
|
59
59
|
|
60
|
+
# Information about a user.
|
61
|
+
# @!attribute [rw] email
|
62
|
+
# @return [::String]
|
63
|
+
# E-mail address of the user.
|
64
|
+
class UserInfo
|
65
|
+
include ::Google::Protobuf::MessageExts
|
66
|
+
extend ::Google::Protobuf::MessageExts::ClassMethods
|
67
|
+
end
|
68
|
+
|
60
69
|
# Represents a data transfer configuration. A transfer configuration
|
61
70
|
# contains all metadata needed to perform a data transfer. For example,
|
62
71
|
# `destination_dataset_id` specifies where data should be stored.
|
@@ -99,7 +108,9 @@ module Google
|
|
99
108
|
# `first sunday of quarter 00:00`.
|
100
109
|
# See more explanation about the format here:
|
101
110
|
# https://cloud.google.com/appengine/docs/flexible/python/scheduling-jobs-with-cron-yaml#the_schedule_format
|
102
|
-
#
|
111
|
+
#
|
112
|
+
# NOTE: The minimum interval time between recurring transfers depends on the
|
113
|
+
# data source; refer to the documentation for your data source.
|
103
114
|
# @!attribute [rw] schedule_options
|
104
115
|
# @return [::Google::Cloud::Bigquery::DataTransfer::V1::ScheduleOptions]
|
105
116
|
# Options customizing the data transfer schedule.
|
@@ -141,6 +152,11 @@ module Google
|
|
141
152
|
# @return [::Google::Cloud::Bigquery::DataTransfer::V1::EmailPreferences]
|
142
153
|
# Email notifications will be sent according to these preferences
|
143
154
|
# to the email address of the user who owns this transfer config.
|
155
|
+
# @!attribute [r] owner_info
|
156
|
+
# @return [::Google::Cloud::Bigquery::DataTransfer::V1::UserInfo]
|
157
|
+
# Output only. Information about the user whose credentials are used to transfer data.
|
158
|
+
# Populated only for `transferConfigs.get` requests. In case the user
|
159
|
+
# information is not available, this field will not be populated.
|
144
160
|
class TransferConfig
|
145
161
|
include ::Google::Protobuf::MessageExts
|
146
162
|
extend ::Google::Protobuf::MessageExts::ClassMethods
|
@@ -44,7 +44,7 @@ module Google
|
|
44
44
|
# foo = any.unpack(Foo.class);
|
45
45
|
# }
|
46
46
|
#
|
47
|
-
#
|
47
|
+
# Example 3: Pack and unpack a message in Python.
|
48
48
|
#
|
49
49
|
# foo = Foo(...)
|
50
50
|
# any = Any()
|
@@ -54,7 +54,7 @@ module Google
|
|
54
54
|
# any.Unpack(foo)
|
55
55
|
# ...
|
56
56
|
#
|
57
|
-
#
|
57
|
+
# Example 4: Pack and unpack a message in Go
|
58
58
|
#
|
59
59
|
# foo := &pb.Foo{...}
|
60
60
|
# any, err := anypb.New(foo)
|
@@ -75,7 +75,7 @@ module Google
|
|
75
75
|
#
|
76
76
|
#
|
77
77
|
# JSON
|
78
|
-
#
|
78
|
+
#
|
79
79
|
# The JSON representation of an `Any` value uses the regular
|
80
80
|
# representation of the deserialized, embedded message, with an
|
81
81
|
# additional field `@type` which contains the type URL. Example:
|
@@ -46,8 +46,8 @@ module Google
|
|
46
46
|
|
47
47
|
# `Value` represents a dynamically typed value which can be either
|
48
48
|
# null, a number, a string, a boolean, a recursive struct value, or a
|
49
|
-
# list of values. A producer of value is expected to set one of
|
50
|
-
# variants
|
49
|
+
# list of values. A producer of value is expected to set one of these
|
50
|
+
# variants. Absence of any variant indicates an error.
|
51
51
|
#
|
52
52
|
# The JSON representation for `Value` is JSON value.
|
53
53
|
# @!attribute [rw] null_value
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: google-cloud-bigquery-data_transfer-v1
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.
|
4
|
+
version: 0.6.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Google LLC
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2022-01
|
11
|
+
date: 2022-07-01 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: gapic-common
|
@@ -16,7 +16,7 @@ dependencies:
|
|
16
16
|
requirements:
|
17
17
|
- - ">="
|
18
18
|
- !ruby/object:Gem::Version
|
19
|
-
version: '0.
|
19
|
+
version: '0.10'
|
20
20
|
- - "<"
|
21
21
|
- !ruby/object:Gem::Version
|
22
22
|
version: 2.a
|
@@ -26,7 +26,7 @@ dependencies:
|
|
26
26
|
requirements:
|
27
27
|
- - ">="
|
28
28
|
- !ruby/object:Gem::Version
|
29
|
-
version: '0.
|
29
|
+
version: '0.10'
|
30
30
|
- - "<"
|
31
31
|
- !ruby/object:Gem::Version
|
32
32
|
version: 2.a
|
@@ -50,28 +50,28 @@ dependencies:
|
|
50
50
|
requirements:
|
51
51
|
- - "~>"
|
52
52
|
- !ruby/object:Gem::Version
|
53
|
-
version: 1.
|
53
|
+
version: 1.26.1
|
54
54
|
type: :development
|
55
55
|
prerelease: false
|
56
56
|
version_requirements: !ruby/object:Gem::Requirement
|
57
57
|
requirements:
|
58
58
|
- - "~>"
|
59
59
|
- !ruby/object:Gem::Version
|
60
|
-
version: 1.
|
60
|
+
version: 1.26.1
|
61
61
|
- !ruby/object:Gem::Dependency
|
62
62
|
name: minitest
|
63
63
|
requirement: !ruby/object:Gem::Requirement
|
64
64
|
requirements:
|
65
65
|
- - "~>"
|
66
66
|
- !ruby/object:Gem::Version
|
67
|
-
version: '5.
|
67
|
+
version: '5.16'
|
68
68
|
type: :development
|
69
69
|
prerelease: false
|
70
70
|
version_requirements: !ruby/object:Gem::Requirement
|
71
71
|
requirements:
|
72
72
|
- - "~>"
|
73
73
|
- !ruby/object:Gem::Version
|
74
|
-
version: '5.
|
74
|
+
version: '5.16'
|
75
75
|
- !ruby/object:Gem::Dependency
|
76
76
|
name: minitest-focus
|
77
77
|
requirement: !ruby/object:Gem::Requirement
|
@@ -106,14 +106,14 @@ dependencies:
|
|
106
106
|
requirements:
|
107
107
|
- - ">="
|
108
108
|
- !ruby/object:Gem::Version
|
109
|
-
version: '
|
109
|
+
version: '13.0'
|
110
110
|
type: :development
|
111
111
|
prerelease: false
|
112
112
|
version_requirements: !ruby/object:Gem::Requirement
|
113
113
|
requirements:
|
114
114
|
- - ">="
|
115
115
|
- !ruby/object:Gem::Version
|
116
|
-
version: '
|
116
|
+
version: '13.0'
|
117
117
|
- !ruby/object:Gem::Dependency
|
118
118
|
name: redcarpet
|
119
119
|
requirement: !ruby/object:Gem::Requirement
|
@@ -205,14 +205,14 @@ required_ruby_version: !ruby/object:Gem::Requirement
|
|
205
205
|
requirements:
|
206
206
|
- - ">="
|
207
207
|
- !ruby/object:Gem::Version
|
208
|
-
version: '2.
|
208
|
+
version: '2.6'
|
209
209
|
required_rubygems_version: !ruby/object:Gem::Requirement
|
210
210
|
requirements:
|
211
211
|
- - ">="
|
212
212
|
- !ruby/object:Gem::Version
|
213
213
|
version: '0'
|
214
214
|
requirements: []
|
215
|
-
rubygems_version: 3.3.
|
215
|
+
rubygems_version: 3.3.14
|
216
216
|
signing_key:
|
217
217
|
specification_version: 4
|
218
218
|
summary: API Client library for the BigQuery Data Transfer Service V1 API
|