google-cloud-bigquery-data_transfer 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (23) hide show
  1. checksums.yaml +7 -0
  2. data/.yardopts +8 -0
  3. data/LICENSE +201 -0
  4. data/README.md +54 -0
  5. data/lib/google/cloud/bigquery/data_transfer.rb +132 -0
  6. data/lib/google/cloud/bigquery/data_transfer/credentials.rb +32 -0
  7. data/lib/google/cloud/bigquery/data_transfer/v1.rb +125 -0
  8. data/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_pb.rb +189 -0
  9. data/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service_client.rb +860 -0
  10. data/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service_client_config.json +91 -0
  11. data/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_services_pb.rb +86 -0
  12. data/lib/google/cloud/bigquery/data_transfer/v1/doc/google/cloud/bigquery/data_transfer/v1/data_transfer.rb +500 -0
  13. data/lib/google/cloud/bigquery/data_transfer/v1/doc/google/cloud/bigquery/data_transfer/v1/transfer.rb +216 -0
  14. data/lib/google/cloud/bigquery/data_transfer/v1/doc/google/protobuf/any.rb +124 -0
  15. data/lib/google/cloud/bigquery/data_transfer/v1/doc/google/protobuf/duration.rb +90 -0
  16. data/lib/google/cloud/bigquery/data_transfer/v1/doc/google/protobuf/field_mask.rb +223 -0
  17. data/lib/google/cloud/bigquery/data_transfer/v1/doc/google/protobuf/struct.rb +73 -0
  18. data/lib/google/cloud/bigquery/data_transfer/v1/doc/google/protobuf/timestamp.rb +106 -0
  19. data/lib/google/cloud/bigquery/data_transfer/v1/doc/google/protobuf/wrappers.rb +89 -0
  20. data/lib/google/cloud/bigquery/data_transfer/v1/doc/google/rpc/status.rb +83 -0
  21. data/lib/google/cloud/bigquery/data_transfer/v1/doc/overview.rb +79 -0
  22. data/lib/google/cloud/bigquery/data_transfer/v1/transfer_pb.rb +82 -0
  23. metadata +149 -0
@@ -0,0 +1,189 @@
1
+ # Generated by the protocol buffer compiler. DO NOT EDIT!
2
+ # source: google/cloud/bigquery/datatransfer/v1/datatransfer.proto
3
+
4
+ require 'google/protobuf'
5
+
6
+ require 'google/api/annotations_pb'
7
+ require 'google/cloud/bigquery/data_transfer/v1/transfer_pb'
8
+ require 'google/protobuf/duration_pb'
9
+ require 'google/protobuf/empty_pb'
10
+ require 'google/protobuf/field_mask_pb'
11
+ require 'google/protobuf/timestamp_pb'
12
+ require 'google/protobuf/wrappers_pb'
13
+ Google::Protobuf::DescriptorPool.generated_pool.build do
14
+ add_message "google.cloud.bigquery.datatransfer.v1.DataSourceParameter" do
15
+ optional :param_id, :string, 1
16
+ optional :display_name, :string, 2
17
+ optional :description, :string, 3
18
+ optional :type, :enum, 4, "google.cloud.bigquery.datatransfer.v1.DataSourceParameter.Type"
19
+ optional :required, :bool, 5
20
+ optional :repeated, :bool, 6
21
+ optional :validation_regex, :string, 7
22
+ repeated :allowed_values, :string, 8
23
+ optional :min_value, :message, 9, "google.protobuf.DoubleValue"
24
+ optional :max_value, :message, 10, "google.protobuf.DoubleValue"
25
+ repeated :fields, :message, 11, "google.cloud.bigquery.datatransfer.v1.DataSourceParameter"
26
+ optional :validation_description, :string, 12
27
+ optional :validation_help_url, :string, 13
28
+ optional :immutable, :bool, 14
29
+ optional :recurse, :bool, 15
30
+ end
31
+ add_enum "google.cloud.bigquery.datatransfer.v1.DataSourceParameter.Type" do
32
+ value :TYPE_UNSPECIFIED, 0
33
+ value :STRING, 1
34
+ value :INTEGER, 2
35
+ value :DOUBLE, 3
36
+ value :BOOLEAN, 4
37
+ value :RECORD, 5
38
+ value :PLUS_PAGE, 6
39
+ end
40
+ add_message "google.cloud.bigquery.datatransfer.v1.DataSource" do
41
+ optional :name, :string, 1
42
+ optional :data_source_id, :string, 2
43
+ optional :display_name, :string, 3
44
+ optional :description, :string, 4
45
+ optional :client_id, :string, 5
46
+ repeated :scopes, :string, 6
47
+ optional :transfer_type, :enum, 7, "google.cloud.bigquery.datatransfer.v1.TransferType"
48
+ optional :supports_multiple_transfers, :bool, 8
49
+ optional :update_deadline_seconds, :int32, 9
50
+ optional :default_schedule, :string, 10
51
+ optional :supports_custom_schedule, :bool, 11
52
+ repeated :parameters, :message, 12, "google.cloud.bigquery.datatransfer.v1.DataSourceParameter"
53
+ optional :help_url, :string, 13
54
+ optional :authorization_type, :enum, 14, "google.cloud.bigquery.datatransfer.v1.DataSource.AuthorizationType"
55
+ optional :data_refresh_type, :enum, 15, "google.cloud.bigquery.datatransfer.v1.DataSource.DataRefreshType"
56
+ optional :default_data_refresh_window_days, :int32, 16
57
+ optional :manual_runs_disabled, :bool, 17
58
+ optional :minimum_schedule_interval, :message, 18, "google.protobuf.Duration"
59
+ end
60
+ add_enum "google.cloud.bigquery.datatransfer.v1.DataSource.AuthorizationType" do
61
+ value :AUTHORIZATION_TYPE_UNSPECIFIED, 0
62
+ value :AUTHORIZATION_CODE, 1
63
+ value :GOOGLE_PLUS_AUTHORIZATION_CODE, 2
64
+ end
65
+ add_enum "google.cloud.bigquery.datatransfer.v1.DataSource.DataRefreshType" do
66
+ value :DATA_REFRESH_TYPE_UNSPECIFIED, 0
67
+ value :SLIDING_WINDOW, 1
68
+ value :CUSTOM_SLIDING_WINDOW, 2
69
+ end
70
+ add_message "google.cloud.bigquery.datatransfer.v1.GetDataSourceRequest" do
71
+ optional :name, :string, 1
72
+ end
73
+ add_message "google.cloud.bigquery.datatransfer.v1.ListDataSourcesRequest" do
74
+ optional :parent, :string, 1
75
+ optional :page_token, :string, 3
76
+ optional :page_size, :int32, 4
77
+ end
78
+ add_message "google.cloud.bigquery.datatransfer.v1.ListDataSourcesResponse" do
79
+ repeated :data_sources, :message, 1, "google.cloud.bigquery.datatransfer.v1.DataSource"
80
+ optional :next_page_token, :string, 2
81
+ end
82
+ add_message "google.cloud.bigquery.datatransfer.v1.CreateTransferConfigRequest" do
83
+ optional :parent, :string, 1
84
+ optional :transfer_config, :message, 2, "google.cloud.bigquery.datatransfer.v1.TransferConfig"
85
+ optional :authorization_code, :string, 3
86
+ end
87
+ add_message "google.cloud.bigquery.datatransfer.v1.UpdateTransferConfigRequest" do
88
+ optional :transfer_config, :message, 1, "google.cloud.bigquery.datatransfer.v1.TransferConfig"
89
+ optional :authorization_code, :string, 3
90
+ optional :update_mask, :message, 4, "google.protobuf.FieldMask"
91
+ end
92
+ add_message "google.cloud.bigquery.datatransfer.v1.GetTransferConfigRequest" do
93
+ optional :name, :string, 1
94
+ end
95
+ add_message "google.cloud.bigquery.datatransfer.v1.DeleteTransferConfigRequest" do
96
+ optional :name, :string, 1
97
+ end
98
+ add_message "google.cloud.bigquery.datatransfer.v1.GetTransferRunRequest" do
99
+ optional :name, :string, 1
100
+ end
101
+ add_message "google.cloud.bigquery.datatransfer.v1.DeleteTransferRunRequest" do
102
+ optional :name, :string, 1
103
+ end
104
+ add_message "google.cloud.bigquery.datatransfer.v1.ListTransferConfigsRequest" do
105
+ optional :parent, :string, 1
106
+ repeated :data_source_ids, :string, 2
107
+ optional :page_token, :string, 3
108
+ optional :page_size, :int32, 4
109
+ end
110
+ add_message "google.cloud.bigquery.datatransfer.v1.ListTransferConfigsResponse" do
111
+ repeated :transfer_configs, :message, 1, "google.cloud.bigquery.datatransfer.v1.TransferConfig"
112
+ optional :next_page_token, :string, 2
113
+ end
114
+ add_message "google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest" do
115
+ optional :parent, :string, 1
116
+ repeated :states, :enum, 2, "google.cloud.bigquery.datatransfer.v1.TransferState"
117
+ optional :page_token, :string, 3
118
+ optional :page_size, :int32, 4
119
+ optional :run_attempt, :enum, 5, "google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest.RunAttempt"
120
+ end
121
+ add_enum "google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest.RunAttempt" do
122
+ value :RUN_ATTEMPT_UNSPECIFIED, 0
123
+ value :LATEST, 1
124
+ end
125
+ add_message "google.cloud.bigquery.datatransfer.v1.ListTransferRunsResponse" do
126
+ repeated :transfer_runs, :message, 1, "google.cloud.bigquery.datatransfer.v1.TransferRun"
127
+ optional :next_page_token, :string, 2
128
+ end
129
+ add_message "google.cloud.bigquery.datatransfer.v1.ListTransferLogsRequest" do
130
+ optional :parent, :string, 1
131
+ optional :page_token, :string, 4
132
+ optional :page_size, :int32, 5
133
+ repeated :message_types, :enum, 6, "google.cloud.bigquery.datatransfer.v1.TransferMessage.MessageSeverity"
134
+ end
135
+ add_message "google.cloud.bigquery.datatransfer.v1.ListTransferLogsResponse" do
136
+ repeated :transfer_messages, :message, 1, "google.cloud.bigquery.datatransfer.v1.TransferMessage"
137
+ optional :next_page_token, :string, 2
138
+ end
139
+ add_message "google.cloud.bigquery.datatransfer.v1.CheckValidCredsRequest" do
140
+ optional :name, :string, 1
141
+ end
142
+ add_message "google.cloud.bigquery.datatransfer.v1.CheckValidCredsResponse" do
143
+ optional :has_valid_creds, :bool, 1
144
+ end
145
+ add_message "google.cloud.bigquery.datatransfer.v1.ScheduleTransferRunsRequest" do
146
+ optional :parent, :string, 1
147
+ optional :start_time, :message, 2, "google.protobuf.Timestamp"
148
+ optional :end_time, :message, 3, "google.protobuf.Timestamp"
149
+ end
150
+ add_message "google.cloud.bigquery.datatransfer.v1.ScheduleTransferRunsResponse" do
151
+ repeated :runs, :message, 1, "google.cloud.bigquery.datatransfer.v1.TransferRun"
152
+ end
153
+ end
154
+
155
+ module Google
156
+ module Cloud
157
+ module Bigquery
158
+ module DataTransfer
159
+ module V1
160
+ DataSourceParameter = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.DataSourceParameter").msgclass
161
+ DataSourceParameter::Type = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.DataSourceParameter.Type").enummodule
162
+ DataSource = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.DataSource").msgclass
163
+ DataSource::AuthorizationType = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.DataSource.AuthorizationType").enummodule
164
+ DataSource::DataRefreshType = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.DataSource.DataRefreshType").enummodule
165
+ GetDataSourceRequest = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.GetDataSourceRequest").msgclass
166
+ ListDataSourcesRequest = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.ListDataSourcesRequest").msgclass
167
+ ListDataSourcesResponse = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.ListDataSourcesResponse").msgclass
168
+ CreateTransferConfigRequest = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.CreateTransferConfigRequest").msgclass
169
+ UpdateTransferConfigRequest = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.UpdateTransferConfigRequest").msgclass
170
+ GetTransferConfigRequest = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.GetTransferConfigRequest").msgclass
171
+ DeleteTransferConfigRequest = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.DeleteTransferConfigRequest").msgclass
172
+ GetTransferRunRequest = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.GetTransferRunRequest").msgclass
173
+ DeleteTransferRunRequest = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.DeleteTransferRunRequest").msgclass
174
+ ListTransferConfigsRequest = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.ListTransferConfigsRequest").msgclass
175
+ ListTransferConfigsResponse = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.ListTransferConfigsResponse").msgclass
176
+ ListTransferRunsRequest = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest").msgclass
177
+ ListTransferRunsRequest::RunAttempt = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest.RunAttempt").enummodule
178
+ ListTransferRunsResponse = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.ListTransferRunsResponse").msgclass
179
+ ListTransferLogsRequest = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.ListTransferLogsRequest").msgclass
180
+ ListTransferLogsResponse = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.ListTransferLogsResponse").msgclass
181
+ CheckValidCredsRequest = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.CheckValidCredsRequest").msgclass
182
+ CheckValidCredsResponse = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.CheckValidCredsResponse").msgclass
183
+ ScheduleTransferRunsRequest = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.ScheduleTransferRunsRequest").msgclass
184
+ ScheduleTransferRunsResponse = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.ScheduleTransferRunsResponse").msgclass
185
+ end
186
+ end
187
+ end
188
+ end
189
+ end
@@ -0,0 +1,860 @@
1
+ # Copyright 2018 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # https://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ #
15
+ # EDITING INSTRUCTIONS
16
+ # This file was generated from the file
17
+ # https://github.com/googleapis/googleapis/blob/master/google/cloud/bigquery/datatransfer/v1/datatransfer.proto,
18
+ # and updates to that file get reflected here through a refresh process.
19
+ # For the short term, the refresh process will only be runnable by Google
20
+ # engineers.
21
+
22
+ require "json"
23
+ require "pathname"
24
+
25
+ require "google/gax"
26
+
27
+ require "google/cloud/bigquery/data_transfer/v1/data_transfer_pb"
28
+ require "google/cloud/bigquery/data_transfer/credentials"
29
+
30
+ module Google
31
+ module Cloud
32
+ module Bigquery
33
+ module DataTransfer
34
+ module V1
35
+ # The Google BigQuery Data Transfer Service API enables BigQuery users to
36
+ # configure the transfer of their data from other Google Products into BigQuery.
37
+ # This service contains methods that are end user exposed. It backs up the
38
+ # frontend.
39
+ #
40
+ # @!attribute [r] data_transfer_service_stub
41
+ # @return [Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Stub]
42
+ class DataTransferServiceClient
43
+ attr_reader :data_transfer_service_stub
44
+
45
+ # The default address of the service.
46
+ SERVICE_ADDRESS = "bigquerydatatransfer.googleapis.com".freeze
47
+
48
+ # The default port of the service.
49
+ DEFAULT_SERVICE_PORT = 443
50
+
51
+ DEFAULT_TIMEOUT = 30
52
+
53
+ PAGE_DESCRIPTORS = {
54
+ "list_data_sources" => Google::Gax::PageDescriptor.new(
55
+ "page_token",
56
+ "next_page_token",
57
+ "data_sources"),
58
+ "list_transfer_configs" => Google::Gax::PageDescriptor.new(
59
+ "page_token",
60
+ "next_page_token",
61
+ "transfer_configs"),
62
+ "list_transfer_runs" => Google::Gax::PageDescriptor.new(
63
+ "page_token",
64
+ "next_page_token",
65
+ "transfer_runs"),
66
+ "list_transfer_logs" => Google::Gax::PageDescriptor.new(
67
+ "page_token",
68
+ "next_page_token",
69
+ "transfer_messages")
70
+ }.freeze
71
+
72
+ private_constant :PAGE_DESCRIPTORS
73
+
74
+ # The scopes needed to make gRPC calls to all of the methods defined in
75
+ # this service.
76
+ ALL_SCOPES = [
77
+ "https://www.googleapis.com/auth/cloud-platform"
78
+ ].freeze
79
+
80
+
81
+ PROJECT_DATA_SOURCE_PATH_TEMPLATE = Google::Gax::PathTemplate.new(
82
+ "projects/{project}/dataSources/{data_source}"
83
+ )
84
+
85
+ private_constant :PROJECT_DATA_SOURCE_PATH_TEMPLATE
86
+
87
+ PROJECT_PATH_TEMPLATE = Google::Gax::PathTemplate.new(
88
+ "projects/{project}"
89
+ )
90
+
91
+ private_constant :PROJECT_PATH_TEMPLATE
92
+
93
+ PROJECT_TRANSFER_CONFIG_PATH_TEMPLATE = Google::Gax::PathTemplate.new(
94
+ "projects/{project}/transferConfigs/{transfer_config}"
95
+ )
96
+
97
+ private_constant :PROJECT_TRANSFER_CONFIG_PATH_TEMPLATE
98
+
99
+ PROJECT_RUN_PATH_TEMPLATE = Google::Gax::PathTemplate.new(
100
+ "projects/{project}/transferConfigs/{transfer_config}/runs/{run}"
101
+ )
102
+
103
+ private_constant :PROJECT_RUN_PATH_TEMPLATE
104
+
105
+ # Returns a fully-qualified project_data_source resource name string.
106
+ # @param project [String]
107
+ # @param data_source [String]
108
+ # @return [String]
109
+ def self.project_data_source_path project, data_source
110
+ PROJECT_DATA_SOURCE_PATH_TEMPLATE.render(
111
+ :"project" => project,
112
+ :"data_source" => data_source
113
+ )
114
+ end
115
+
116
+ # Returns a fully-qualified project resource name string.
117
+ # @param project [String]
118
+ # @return [String]
119
+ def self.project_path project
120
+ PROJECT_PATH_TEMPLATE.render(
121
+ :"project" => project
122
+ )
123
+ end
124
+
125
+ # Returns a fully-qualified project_transfer_config resource name string.
126
+ # @param project [String]
127
+ # @param transfer_config [String]
128
+ # @return [String]
129
+ def self.project_transfer_config_path project, transfer_config
130
+ PROJECT_TRANSFER_CONFIG_PATH_TEMPLATE.render(
131
+ :"project" => project,
132
+ :"transfer_config" => transfer_config
133
+ )
134
+ end
135
+
136
+ # Returns a fully-qualified project_run resource name string.
137
+ # @param project [String]
138
+ # @param transfer_config [String]
139
+ # @param run [String]
140
+ # @return [String]
141
+ def self.project_run_path project, transfer_config, run
142
+ PROJECT_RUN_PATH_TEMPLATE.render(
143
+ :"project" => project,
144
+ :"transfer_config" => transfer_config,
145
+ :"run" => run
146
+ )
147
+ end
148
+
149
+ # @param credentials [Google::Auth::Credentials, String, Hash, GRPC::Core::Channel, GRPC::Core::ChannelCredentials, Proc]
150
+ # Provides the means for authenticating requests made by the client. This parameter can
151
+ # be many types.
152
+ # A `Google::Auth::Credentials` uses a the properties of its represented keyfile for
153
+ # authenticating requests made by this client.
154
+ # A `String` will be treated as the path to the keyfile to be used for the construction of
155
+ # credentials for this client.
156
+ # A `Hash` will be treated as the contents of a keyfile to be used for the construction of
157
+ # credentials for this client.
158
+ # A `GRPC::Core::Channel` will be used to make calls through.
159
+ # A `GRPC::Core::ChannelCredentials` for the setting up the RPC client. The channel credentials
160
+ # should already be composed with a `GRPC::Core::CallCredentials` object.
161
+ # A `Proc` will be used as an updater_proc for the Grpc channel. The proc transforms the
162
+ # metadata for requests, generally, to give OAuth credentials.
163
+ # @param scopes [Array<String>]
164
+ # The OAuth scopes for this service. This parameter is ignored if
165
+ # an updater_proc is supplied.
166
+ # @param client_config [Hash]
167
+ # A Hash for call options for each method. See
168
+ # Google::Gax#construct_settings for the structure of
169
+ # this data. Falls back to the default config if not specified
170
+ # or the specified config is missing data points.
171
+ # @param timeout [Numeric]
172
+ # The default timeout, in seconds, for calls made through this client.
173
+ def initialize \
174
+ credentials: nil,
175
+ scopes: ALL_SCOPES,
176
+ client_config: {},
177
+ timeout: DEFAULT_TIMEOUT,
178
+ lib_name: nil,
179
+ lib_version: ""
180
+ # These require statements are intentionally placed here to initialize
181
+ # the gRPC module only when it's required.
182
+ # See https://github.com/googleapis/toolkit/issues/446
183
+ require "google/gax/grpc"
184
+ require "google/cloud/bigquery/data_transfer/v1/data_transfer_services_pb"
185
+
186
+ credentials ||= Google::Cloud::Bigquery::DataTransfer::Credentials.default
187
+
188
+ if credentials.is_a?(String) || credentials.is_a?(Hash)
189
+ updater_proc = Google::Cloud::Bigquery::DataTransfer::Credentials.new(credentials).updater_proc
190
+ end
191
+ if credentials.is_a?(GRPC::Core::Channel)
192
+ channel = credentials
193
+ end
194
+ if credentials.is_a?(GRPC::Core::ChannelCredentials)
195
+ chan_creds = credentials
196
+ end
197
+ if credentials.is_a?(Proc)
198
+ updater_proc = credentials
199
+ end
200
+ if credentials.is_a?(Google::Auth::Credentials)
201
+ updater_proc = credentials.updater_proc
202
+ end
203
+
204
+ package_version = Gem.loaded_specs['google-cloud-bigquery-data_transfer'].version.version
205
+
206
+ google_api_client = "gl-ruby/#{RUBY_VERSION}"
207
+ google_api_client << " #{lib_name}/#{lib_version}" if lib_name
208
+ google_api_client << " gapic/#{package_version} gax/#{Google::Gax::VERSION}"
209
+ google_api_client << " grpc/#{GRPC::VERSION}"
210
+ google_api_client.freeze
211
+
212
+ headers = { :"x-goog-api-client" => google_api_client }
213
+ client_config_file = Pathname.new(__dir__).join(
214
+ "data_transfer_service_client_config.json"
215
+ )
216
+ defaults = client_config_file.open do |f|
217
+ Google::Gax.construct_settings(
218
+ "google.cloud.bigquery.datatransfer.v1.DataTransferService",
219
+ JSON.parse(f.read),
220
+ client_config,
221
+ Google::Gax::Grpc::STATUS_CODE_NAMES,
222
+ timeout,
223
+ page_descriptors: PAGE_DESCRIPTORS,
224
+ errors: Google::Gax::Grpc::API_ERRORS,
225
+ kwargs: headers
226
+ )
227
+ end
228
+
229
+ # Allow overriding the service path/port in subclasses.
230
+ service_path = self.class::SERVICE_ADDRESS
231
+ port = self.class::DEFAULT_SERVICE_PORT
232
+ @data_transfer_service_stub = Google::Gax::Grpc.create_stub(
233
+ service_path,
234
+ port,
235
+ chan_creds: chan_creds,
236
+ channel: channel,
237
+ updater_proc: updater_proc,
238
+ scopes: scopes,
239
+ &Google::Cloud::Bigquery::DataTransfer::V1::DataTransferService::Stub.method(:new)
240
+ )
241
+
242
+ @get_data_source = Google::Gax.create_api_call(
243
+ @data_transfer_service_stub.method(:get_data_source),
244
+ defaults["get_data_source"]
245
+ )
246
+ @list_data_sources = Google::Gax.create_api_call(
247
+ @data_transfer_service_stub.method(:list_data_sources),
248
+ defaults["list_data_sources"]
249
+ )
250
+ @create_transfer_config = Google::Gax.create_api_call(
251
+ @data_transfer_service_stub.method(:create_transfer_config),
252
+ defaults["create_transfer_config"]
253
+ )
254
+ @update_transfer_config = Google::Gax.create_api_call(
255
+ @data_transfer_service_stub.method(:update_transfer_config),
256
+ defaults["update_transfer_config"]
257
+ )
258
+ @delete_transfer_config = Google::Gax.create_api_call(
259
+ @data_transfer_service_stub.method(:delete_transfer_config),
260
+ defaults["delete_transfer_config"]
261
+ )
262
+ @get_transfer_config = Google::Gax.create_api_call(
263
+ @data_transfer_service_stub.method(:get_transfer_config),
264
+ defaults["get_transfer_config"]
265
+ )
266
+ @list_transfer_configs = Google::Gax.create_api_call(
267
+ @data_transfer_service_stub.method(:list_transfer_configs),
268
+ defaults["list_transfer_configs"]
269
+ )
270
+ @schedule_transfer_runs = Google::Gax.create_api_call(
271
+ @data_transfer_service_stub.method(:schedule_transfer_runs),
272
+ defaults["schedule_transfer_runs"]
273
+ )
274
+ @get_transfer_run = Google::Gax.create_api_call(
275
+ @data_transfer_service_stub.method(:get_transfer_run),
276
+ defaults["get_transfer_run"]
277
+ )
278
+ @delete_transfer_run = Google::Gax.create_api_call(
279
+ @data_transfer_service_stub.method(:delete_transfer_run),
280
+ defaults["delete_transfer_run"]
281
+ )
282
+ @list_transfer_runs = Google::Gax.create_api_call(
283
+ @data_transfer_service_stub.method(:list_transfer_runs),
284
+ defaults["list_transfer_runs"]
285
+ )
286
+ @list_transfer_logs = Google::Gax.create_api_call(
287
+ @data_transfer_service_stub.method(:list_transfer_logs),
288
+ defaults["list_transfer_logs"]
289
+ )
290
+ @check_valid_creds = Google::Gax.create_api_call(
291
+ @data_transfer_service_stub.method(:check_valid_creds),
292
+ defaults["check_valid_creds"]
293
+ )
294
+ end
295
+
296
+ # Service calls
297
+
298
+ # Retrieves a supported data source and returns its settings,
299
+ # which can be used for UI rendering.
300
+ #
301
+ # @param name [String]
302
+ # The field will contain name of the resource requested, for example:
303
+ # +projects/{project_id}/dataSources/{data_source_id}+
304
+ # @param options [Google::Gax::CallOptions]
305
+ # Overrides the default settings for this call, e.g, timeout,
306
+ # retries, etc.
307
+ # @return [Google::Cloud::Bigquery::DataTransfer::V1::DataSource]
308
+ # @raise [Google::Gax::GaxError] if the RPC is aborted.
309
+ # @example
310
+ # require "google/cloud/bigquery/data_transfer/v1"
311
+ #
312
+ # data_transfer_service_client = Google::Cloud::Bigquery::DataTransfer::V1.new
313
+ # formatted_name = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferServiceClient.project_data_source_path("[PROJECT]", "[DATA_SOURCE]")
314
+ # response = data_transfer_service_client.get_data_source(formatted_name)
315
+
316
+ def get_data_source \
317
+ name,
318
+ options: nil
319
+ req = {
320
+ name: name
321
+ }.delete_if { |_, v| v.nil? }
322
+ req = Google::Gax::to_proto(req, Google::Cloud::Bigquery::DataTransfer::V1::GetDataSourceRequest)
323
+ @get_data_source.call(req, options)
324
+ end
325
+
326
+ # Lists supported data sources and returns their settings,
327
+ # which can be used for UI rendering.
328
+ #
329
+ # @param parent [String]
330
+ # The BigQuery project id for which data sources should be returned.
331
+ # Must be in the form: +projects/{project_id}+
332
+ # @param page_size [Integer]
333
+ # The maximum number of resources contained in the underlying API
334
+ # response. If page streaming is performed per-resource, this
335
+ # parameter does not affect the return value. If page streaming is
336
+ # performed per-page, this determines the maximum number of
337
+ # resources in a page.
338
+ # @param options [Google::Gax::CallOptions]
339
+ # Overrides the default settings for this call, e.g, timeout,
340
+ # retries, etc.
341
+ # @return [Google::Gax::PagedEnumerable<Google::Cloud::Bigquery::DataTransfer::V1::DataSource>]
342
+ # An enumerable of Google::Cloud::Bigquery::DataTransfer::V1::DataSource instances.
343
+ # See Google::Gax::PagedEnumerable documentation for other
344
+ # operations such as per-page iteration or access to the response
345
+ # object.
346
+ # @raise [Google::Gax::GaxError] if the RPC is aborted.
347
+ # @example
348
+ # require "google/cloud/bigquery/data_transfer/v1"
349
+ #
350
+ # data_transfer_service_client = Google::Cloud::Bigquery::DataTransfer::V1.new
351
+ # formatted_parent = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferServiceClient.project_path("[PROJECT]")
352
+ #
353
+ # # Iterate over all results.
354
+ # data_transfer_service_client.list_data_sources(formatted_parent).each do |element|
355
+ # # Process element.
356
+ # end
357
+ #
358
+ # # Or iterate over results one page at a time.
359
+ # data_transfer_service_client.list_data_sources(formatted_parent).each_page do |page|
360
+ # # Process each page at a time.
361
+ # page.each do |element|
362
+ # # Process element.
363
+ # end
364
+ # end
365
+
366
+ def list_data_sources \
367
+ parent,
368
+ page_size: nil,
369
+ options: nil
370
+ req = {
371
+ parent: parent,
372
+ page_size: page_size
373
+ }.delete_if { |_, v| v.nil? }
374
+ req = Google::Gax::to_proto(req, Google::Cloud::Bigquery::DataTransfer::V1::ListDataSourcesRequest)
375
+ @list_data_sources.call(req, options)
376
+ end
377
+
378
+ # Creates a new data transfer configuration.
379
+ #
380
+ # @param parent [String]
381
+ # The BigQuery project id where the transfer configuration should be created.
382
+ # Must be in the format /projects/{project_id}/locations/{location_id}
383
+ # If specified location and location of the destination bigquery dataset
384
+ # do not match - the request will fail.
385
+ # @param transfer_config [Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig | Hash]
386
+ # Data transfer configuration to create.
387
+ # A hash of the same form as `Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig`
388
+ # can also be provided.
389
+ # @param authorization_code [String]
390
+ # Optional OAuth2 authorization code to use with this transfer configuration.
391
+ # This is required if new credentials are needed, as indicated by
392
+ # +CheckValidCreds+.
393
+ # In order to obtain authorization_code, please make a
394
+ # request to
395
+ # https://www.gstatic.com/bigquerydatatransfer/oauthz/auth?client_id=<datatransferapiclientid>&scope=<data_source_scopes>&redirect_uri=<redirect_uri>
396
+ #
397
+ # * client_id should be OAuth client_id of BigQuery DTS API for the given
398
+ # data source returned by ListDataSources method.
399
+ # * data_source_scopes are the scopes returned by ListDataSources method.
400
+ # * redirect_uri is an optional parameter. If not specified, then
401
+ # authorization code is posted to the opener of authorization flow window.
402
+ # Otherwise it will be sent to the redirect uri. A special value of
403
+ # urn:ietf:wg:oauth:2.0:oob means that authorization code should be
404
+ # returned in the title bar of the browser, with the page text prompting
405
+ # the user to copy the code and paste it in the application.
406
+ # @param options [Google::Gax::CallOptions]
407
+ # Overrides the default settings for this call, e.g, timeout,
408
+ # retries, etc.
409
+ # @return [Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig]
410
+ # @raise [Google::Gax::GaxError] if the RPC is aborted.
411
+ # @example
412
+ # require "google/cloud/bigquery/data_transfer/v1"
413
+ #
414
+ # data_transfer_service_client = Google::Cloud::Bigquery::DataTransfer::V1.new
415
+ # formatted_parent = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferServiceClient.project_path("[PROJECT]")
416
+ #
417
+ # # TODO: Initialize +transfer_config+:
418
+ # transfer_config = {}
419
+ # response = data_transfer_service_client.create_transfer_config(formatted_parent, transfer_config)
420
+
421
+ def create_transfer_config \
422
+ parent,
423
+ transfer_config,
424
+ authorization_code: nil,
425
+ options: nil
426
+ req = {
427
+ parent: parent,
428
+ transfer_config: transfer_config,
429
+ authorization_code: authorization_code
430
+ }.delete_if { |_, v| v.nil? }
431
+ req = Google::Gax::to_proto(req, Google::Cloud::Bigquery::DataTransfer::V1::CreateTransferConfigRequest)
432
+ @create_transfer_config.call(req, options)
433
+ end
434
+
435
+ # Updates a data transfer configuration.
436
+ # All fields must be set, even if they are not updated.
437
+ #
438
+ # @param transfer_config [Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig | Hash]
439
+ # Data transfer configuration to create.
440
+ # A hash of the same form as `Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig`
441
+ # can also be provided.
442
+ # @param update_mask [Google::Protobuf::FieldMask | Hash]
443
+ # Required list of fields to be updated in this request.
444
+ # A hash of the same form as `Google::Protobuf::FieldMask`
445
+ # can also be provided.
446
+ # @param authorization_code [String]
447
+ # Optional OAuth2 authorization code to use with this transfer configuration.
448
+ # If it is provided, the transfer configuration will be associated with the
449
+ # authorizing user.
450
+ # In order to obtain authorization_code, please make a
451
+ # request to
452
+ # https://www.gstatic.com/bigquerydatatransfer/oauthz/auth?client_id=<datatransferapiclientid>&scope=<data_source_scopes>&redirect_uri=<redirect_uri>
453
+ #
454
+ # * client_id should be OAuth client_id of BigQuery DTS API for the given
455
+ # data source returned by ListDataSources method.
456
+ # * data_source_scopes are the scopes returned by ListDataSources method.
457
+ # * redirect_uri is an optional parameter. If not specified, then
458
+ # authorization code is posted to the opener of authorization flow window.
459
+ # Otherwise it will be sent to the redirect uri. A special value of
460
+ # urn:ietf:wg:oauth:2.0:oob means that authorization code should be
461
+ # returned in the title bar of the browser, with the page text prompting
462
+ # the user to copy the code and paste it in the application.
463
+ # @param options [Google::Gax::CallOptions]
464
+ # Overrides the default settings for this call, e.g, timeout,
465
+ # retries, etc.
466
+ # @return [Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig]
467
+ # @raise [Google::Gax::GaxError] if the RPC is aborted.
468
+ # @example
469
+ # require "google/cloud/bigquery/data_transfer/v1"
470
+ #
471
+ # data_transfer_service_client = Google::Cloud::Bigquery::DataTransfer::V1.new
472
+ #
473
+ # # TODO: Initialize +transfer_config+:
474
+ # transfer_config = {}
475
+ #
476
+ # # TODO: Initialize +update_mask+:
477
+ # update_mask = {}
478
+ # response = data_transfer_service_client.update_transfer_config(transfer_config, update_mask)
479
+
480
+ def update_transfer_config \
481
+ transfer_config,
482
+ update_mask,
483
+ authorization_code: nil,
484
+ options: nil
485
+ req = {
486
+ transfer_config: transfer_config,
487
+ update_mask: update_mask,
488
+ authorization_code: authorization_code
489
+ }.delete_if { |_, v| v.nil? }
490
+ req = Google::Gax::to_proto(req, Google::Cloud::Bigquery::DataTransfer::V1::UpdateTransferConfigRequest)
491
+ @update_transfer_config.call(req, options)
492
+ end
493
+
494
+ # Deletes a data transfer configuration,
495
+ # including any associated transfer runs and logs.
496
+ #
497
+ # @param name [String]
498
+ # The field will contain name of the resource requested, for example:
499
+ # +projects/{project_id}/transferConfigs/{config_id}+
500
+ # @param options [Google::Gax::CallOptions]
501
+ # Overrides the default settings for this call, e.g, timeout,
502
+ # retries, etc.
503
+ # @raise [Google::Gax::GaxError] if the RPC is aborted.
504
+ # @example
505
+ # require "google/cloud/bigquery/data_transfer/v1"
506
+ #
507
+ # data_transfer_service_client = Google::Cloud::Bigquery::DataTransfer::V1.new
508
+ # formatted_name = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferServiceClient.project_transfer_config_path("[PROJECT]", "[TRANSFER_CONFIG]")
509
+ # data_transfer_service_client.delete_transfer_config(formatted_name)
510
+
511
+ def delete_transfer_config \
512
+ name,
513
+ options: nil
514
+ req = {
515
+ name: name
516
+ }.delete_if { |_, v| v.nil? }
517
+ req = Google::Gax::to_proto(req, Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferConfigRequest)
518
+ @delete_transfer_config.call(req, options)
519
+ nil
520
+ end
521
+
522
+ # Returns information about a data transfer config.
523
+ #
524
+ # @param name [String]
525
+ # The field will contain name of the resource requested, for example:
526
+ # +projects/{project_id}/transferConfigs/{config_id}+
527
+ # @param options [Google::Gax::CallOptions]
528
+ # Overrides the default settings for this call, e.g, timeout,
529
+ # retries, etc.
530
+ # @return [Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig]
531
+ # @raise [Google::Gax::GaxError] if the RPC is aborted.
532
+ # @example
533
+ # require "google/cloud/bigquery/data_transfer/v1"
534
+ #
535
+ # data_transfer_service_client = Google::Cloud::Bigquery::DataTransfer::V1.new
536
+ # formatted_name = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferServiceClient.project_transfer_config_path("[PROJECT]", "[TRANSFER_CONFIG]")
537
+ # response = data_transfer_service_client.get_transfer_config(formatted_name)
538
+
539
+ def get_transfer_config \
540
+ name,
541
+ options: nil
542
+ req = {
543
+ name: name
544
+ }.delete_if { |_, v| v.nil? }
545
+ req = Google::Gax::to_proto(req, Google::Cloud::Bigquery::DataTransfer::V1::GetTransferConfigRequest)
546
+ @get_transfer_config.call(req, options)
547
+ end
548
+
549
+ # Returns information about all data transfers in the project.
550
+ #
551
+ # @param parent [String]
552
+ # The BigQuery project id for which data sources
553
+ # should be returned: +projects/{project_id}+.
554
+ # @param data_source_ids [Array<String>]
555
+ # When specified, only configurations of requested data sources are returned.
556
+ # @param page_size [Integer]
557
+ # The maximum number of resources contained in the underlying API
558
+ # response. If page streaming is performed per-resource, this
559
+ # parameter does not affect the return value. If page streaming is
560
+ # performed per-page, this determines the maximum number of
561
+ # resources in a page.
562
+ # @param options [Google::Gax::CallOptions]
563
+ # Overrides the default settings for this call, e.g, timeout,
564
+ # retries, etc.
565
+ # @return [Google::Gax::PagedEnumerable<Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig>]
566
+ # An enumerable of Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig instances.
567
+ # See Google::Gax::PagedEnumerable documentation for other
568
+ # operations such as per-page iteration or access to the response
569
+ # object.
570
+ # @raise [Google::Gax::GaxError] if the RPC is aborted.
571
+ # @example
572
+ # require "google/cloud/bigquery/data_transfer/v1"
573
+ #
574
+ # data_transfer_service_client = Google::Cloud::Bigquery::DataTransfer::V1.new
575
+ # formatted_parent = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferServiceClient.project_path("[PROJECT]")
576
+ #
577
+ # # Iterate over all results.
578
+ # data_transfer_service_client.list_transfer_configs(formatted_parent).each do |element|
579
+ # # Process element.
580
+ # end
581
+ #
582
+ # # Or iterate over results one page at a time.
583
+ # data_transfer_service_client.list_transfer_configs(formatted_parent).each_page do |page|
584
+ # # Process each page at a time.
585
+ # page.each do |element|
586
+ # # Process element.
587
+ # end
588
+ # end
589
+
590
+ def list_transfer_configs \
591
+ parent,
592
+ data_source_ids: nil,
593
+ page_size: nil,
594
+ options: nil
595
+ req = {
596
+ parent: parent,
597
+ data_source_ids: data_source_ids,
598
+ page_size: page_size
599
+ }.delete_if { |_, v| v.nil? }
600
+ req = Google::Gax::to_proto(req, Google::Cloud::Bigquery::DataTransfer::V1::ListTransferConfigsRequest)
601
+ @list_transfer_configs.call(req, options)
602
+ end
603
+
604
+ # Creates transfer runs for a time range [start_time, end_time].
605
+ # For each date - or whatever granularity the data source supports - in the
606
+ # range, one transfer run is created.
607
+ # Note that runs are created per UTC time in the time range.
608
+ #
609
+ # @param parent [String]
610
+ # Transfer configuration name in the form:
611
+ # +projects/{project_id}/transferConfigs/{config_id}+.
612
+ # @param start_time [Google::Protobuf::Timestamp | Hash]
613
+ # Start time of the range of transfer runs. For example,
614
+ # +"2017-05-25T00:00:00+00:00"+.
615
+ # A hash of the same form as `Google::Protobuf::Timestamp`
616
+ # can also be provided.
617
+ # @param end_time [Google::Protobuf::Timestamp | Hash]
618
+ # End time of the range of transfer runs. For example,
619
+ # +"2017-05-30T00:00:00+00:00"+.
620
+ # A hash of the same form as `Google::Protobuf::Timestamp`
621
+ # can also be provided.
622
+ # @param options [Google::Gax::CallOptions]
623
+ # Overrides the default settings for this call, e.g, timeout,
624
+ # retries, etc.
625
+ # @return [Google::Cloud::Bigquery::DataTransfer::V1::ScheduleTransferRunsResponse]
626
+ # @raise [Google::Gax::GaxError] if the RPC is aborted.
627
+ # @example
628
+ # require "google/cloud/bigquery/data_transfer/v1"
629
+ #
630
+ # data_transfer_service_client = Google::Cloud::Bigquery::DataTransfer::V1.new
631
+ # formatted_parent = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferServiceClient.project_transfer_config_path("[PROJECT]", "[TRANSFER_CONFIG]")
632
+ #
633
+ # # TODO: Initialize +start_time+:
634
+ # start_time = {}
635
+ #
636
+ # # TODO: Initialize +end_time+:
637
+ # end_time = {}
638
+ # response = data_transfer_service_client.schedule_transfer_runs(formatted_parent, start_time, end_time)
639
+
640
+ def schedule_transfer_runs \
641
+ parent,
642
+ start_time,
643
+ end_time,
644
+ options: nil
645
+ req = {
646
+ parent: parent,
647
+ start_time: start_time,
648
+ end_time: end_time
649
+ }.delete_if { |_, v| v.nil? }
650
+ req = Google::Gax::to_proto(req, Google::Cloud::Bigquery::DataTransfer::V1::ScheduleTransferRunsRequest)
651
+ @schedule_transfer_runs.call(req, options)
652
+ end
653
+
654
+ # Returns information about the particular transfer run.
655
+ #
656
+ # @param name [String]
657
+ # The field will contain name of the resource requested, for example:
658
+ # +projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}+
659
+ # @param options [Google::Gax::CallOptions]
660
+ # Overrides the default settings for this call, e.g, timeout,
661
+ # retries, etc.
662
+ # @return [Google::Cloud::Bigquery::DataTransfer::V1::TransferRun]
663
+ # @raise [Google::Gax::GaxError] if the RPC is aborted.
664
+ # @example
665
+ # require "google/cloud/bigquery/data_transfer/v1"
666
+ #
667
+ # data_transfer_service_client = Google::Cloud::Bigquery::DataTransfer::V1.new
668
+ # formatted_name = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferServiceClient.project_run_path("[PROJECT]", "[TRANSFER_CONFIG]", "[RUN]")
669
+ # response = data_transfer_service_client.get_transfer_run(formatted_name)
670
+
671
+ def get_transfer_run \
672
+ name,
673
+ options: nil
674
+ req = {
675
+ name: name
676
+ }.delete_if { |_, v| v.nil? }
677
+ req = Google::Gax::to_proto(req, Google::Cloud::Bigquery::DataTransfer::V1::GetTransferRunRequest)
678
+ @get_transfer_run.call(req, options)
679
+ end
680
+
681
+ # Deletes the specified transfer run.
682
+ #
683
+ # @param name [String]
684
+ # The field will contain name of the resource requested, for example:
685
+ # +projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}+
686
+ # @param options [Google::Gax::CallOptions]
687
+ # Overrides the default settings for this call, e.g, timeout,
688
+ # retries, etc.
689
+ # @raise [Google::Gax::GaxError] if the RPC is aborted.
690
+ # @example
691
+ # require "google/cloud/bigquery/data_transfer/v1"
692
+ #
693
+ # data_transfer_service_client = Google::Cloud::Bigquery::DataTransfer::V1.new
694
+ # formatted_name = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferServiceClient.project_run_path("[PROJECT]", "[TRANSFER_CONFIG]", "[RUN]")
695
+ # data_transfer_service_client.delete_transfer_run(formatted_name)
696
+
697
+ def delete_transfer_run \
698
+ name,
699
+ options: nil
700
+ req = {
701
+ name: name
702
+ }.delete_if { |_, v| v.nil? }
703
+ req = Google::Gax::to_proto(req, Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferRunRequest)
704
+ @delete_transfer_run.call(req, options)
705
+ nil
706
+ end
707
+
708
+ # Returns information about running and completed jobs.
709
+ #
710
+ # @param parent [String]
711
+ # Name of transfer configuration for which transfer runs should be retrieved.
712
+ # Format of transfer configuration resource name is:
713
+ # +projects/{project_id}/transferConfigs/{config_id}+.
714
+ # @param states [Array<Google::Cloud::Bigquery::DataTransfer::V1::TransferState>]
715
+ # When specified, only transfer runs with requested states are returned.
716
+ # @param page_size [Integer]
717
+ # The maximum number of resources contained in the underlying API
718
+ # response. If page streaming is performed per-resource, this
719
+ # parameter does not affect the return value. If page streaming is
720
+ # performed per-page, this determines the maximum number of
721
+ # resources in a page.
722
+ # @param run_attempt [Google::Cloud::Bigquery::DataTransfer::V1::ListTransferRunsRequest::RunAttempt]
723
+ # Indicates how run attempts are to be pulled.
724
+ # @param options [Google::Gax::CallOptions]
725
+ # Overrides the default settings for this call, e.g, timeout,
726
+ # retries, etc.
727
+ # @return [Google::Gax::PagedEnumerable<Google::Cloud::Bigquery::DataTransfer::V1::TransferRun>]
728
+ # An enumerable of Google::Cloud::Bigquery::DataTransfer::V1::TransferRun instances.
729
+ # See Google::Gax::PagedEnumerable documentation for other
730
+ # operations such as per-page iteration or access to the response
731
+ # object.
732
+ # @raise [Google::Gax::GaxError] if the RPC is aborted.
733
+ # @example
734
+ # require "google/cloud/bigquery/data_transfer/v1"
735
+ #
736
+ # data_transfer_service_client = Google::Cloud::Bigquery::DataTransfer::V1.new
737
+ # formatted_parent = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferServiceClient.project_transfer_config_path("[PROJECT]", "[TRANSFER_CONFIG]")
738
+ #
739
+ # # Iterate over all results.
740
+ # data_transfer_service_client.list_transfer_runs(formatted_parent).each do |element|
741
+ # # Process element.
742
+ # end
743
+ #
744
+ # # Or iterate over results one page at a time.
745
+ # data_transfer_service_client.list_transfer_runs(formatted_parent).each_page do |page|
746
+ # # Process each page at a time.
747
+ # page.each do |element|
748
+ # # Process element.
749
+ # end
750
+ # end
751
+
752
+ def list_transfer_runs \
753
+ parent,
754
+ states: nil,
755
+ page_size: nil,
756
+ run_attempt: nil,
757
+ options: nil
758
+ req = {
759
+ parent: parent,
760
+ states: states,
761
+ page_size: page_size,
762
+ run_attempt: run_attempt
763
+ }.delete_if { |_, v| v.nil? }
764
+ req = Google::Gax::to_proto(req, Google::Cloud::Bigquery::DataTransfer::V1::ListTransferRunsRequest)
765
+ @list_transfer_runs.call(req, options)
766
+ end
767
+
768
+ # Returns user facing log messages for the data transfer run.
769
+ #
770
+ # @param parent [String]
771
+ # Transfer run name in the form:
772
+ # +projects/{project_id}/transferConfigs/{config_Id}/runs/{run_id}+.
773
+ # @param page_size [Integer]
774
+ # The maximum number of resources contained in the underlying API
775
+ # response. If page streaming is performed per-resource, this
776
+ # parameter does not affect the return value. If page streaming is
777
+ # performed per-page, this determines the maximum number of
778
+ # resources in a page.
779
+ # @param message_types [Array<Google::Cloud::Bigquery::DataTransfer::V1::TransferMessage::MessageSeverity>]
780
+ # Message types to return. If not populated - INFO, WARNING and ERROR
781
+ # messages are returned.
782
+ # @param options [Google::Gax::CallOptions]
783
+ # Overrides the default settings for this call, e.g, timeout,
784
+ # retries, etc.
785
+ # @return [Google::Gax::PagedEnumerable<Google::Cloud::Bigquery::DataTransfer::V1::TransferMessage>]
786
+ # An enumerable of Google::Cloud::Bigquery::DataTransfer::V1::TransferMessage instances.
787
+ # See Google::Gax::PagedEnumerable documentation for other
788
+ # operations such as per-page iteration or access to the response
789
+ # object.
790
+ # @raise [Google::Gax::GaxError] if the RPC is aborted.
791
+ # @example
792
+ # require "google/cloud/bigquery/data_transfer/v1"
793
+ #
794
+ # data_transfer_service_client = Google::Cloud::Bigquery::DataTransfer::V1.new
795
+ # formatted_parent = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferServiceClient.project_run_path("[PROJECT]", "[TRANSFER_CONFIG]", "[RUN]")
796
+ #
797
+ # # Iterate over all results.
798
+ # data_transfer_service_client.list_transfer_logs(formatted_parent).each do |element|
799
+ # # Process element.
800
+ # end
801
+ #
802
+ # # Or iterate over results one page at a time.
803
+ # data_transfer_service_client.list_transfer_logs(formatted_parent).each_page do |page|
804
+ # # Process each page at a time.
805
+ # page.each do |element|
806
+ # # Process element.
807
+ # end
808
+ # end
809
+
810
+ def list_transfer_logs \
811
+ parent,
812
+ page_size: nil,
813
+ message_types: nil,
814
+ options: nil
815
+ req = {
816
+ parent: parent,
817
+ page_size: page_size,
818
+ message_types: message_types
819
+ }.delete_if { |_, v| v.nil? }
820
+ req = Google::Gax::to_proto(req, Google::Cloud::Bigquery::DataTransfer::V1::ListTransferLogsRequest)
821
+ @list_transfer_logs.call(req, options)
822
+ end
823
+
824
+ # Returns true if valid credentials exist for the given data source and
825
+ # requesting user.
826
+ # Some data sources doesn't support service account, so we need to talk to
827
+ # them on behalf of the end user. This API just checks whether we have OAuth
828
+ # token for the particular user, which is a pre-requisite before user can
829
+ # create a transfer config.
830
+ #
831
+ # @param name [String]
832
+ # The data source in the form:
833
+ # +projects/{project_id}/dataSources/{data_source_id}+
834
+ # @param options [Google::Gax::CallOptions]
835
+ # Overrides the default settings for this call, e.g, timeout,
836
+ # retries, etc.
837
+ # @return [Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsResponse]
838
+ # @raise [Google::Gax::GaxError] if the RPC is aborted.
839
+ # @example
840
+ # require "google/cloud/bigquery/data_transfer/v1"
841
+ #
842
+ # data_transfer_service_client = Google::Cloud::Bigquery::DataTransfer::V1.new
843
+ # formatted_name = Google::Cloud::Bigquery::DataTransfer::V1::DataTransferServiceClient.project_data_source_path("[PROJECT]", "[DATA_SOURCE]")
844
+ # response = data_transfer_service_client.check_valid_creds(formatted_name)
845
+
846
+ def check_valid_creds \
847
+ name,
848
+ options: nil
849
+ req = {
850
+ name: name
851
+ }.delete_if { |_, v| v.nil? }
852
+ req = Google::Gax::to_proto(req, Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsRequest)
853
+ @check_valid_creds.call(req, options)
854
+ end
855
+ end
856
+ end
857
+ end
858
+ end
859
+ end
860
+ end