google-cloud-bigquery-data_transfer 0.1.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (23) hide show
  1. checksums.yaml +7 -0
  2. data/.yardopts +8 -0
  3. data/LICENSE +201 -0
  4. data/README.md +54 -0
  5. data/lib/google/cloud/bigquery/data_transfer.rb +132 -0
  6. data/lib/google/cloud/bigquery/data_transfer/credentials.rb +32 -0
  7. data/lib/google/cloud/bigquery/data_transfer/v1.rb +125 -0
  8. data/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_pb.rb +189 -0
  9. data/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service_client.rb +860 -0
  10. data/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service_client_config.json +91 -0
  11. data/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_services_pb.rb +86 -0
  12. data/lib/google/cloud/bigquery/data_transfer/v1/doc/google/cloud/bigquery/data_transfer/v1/data_transfer.rb +500 -0
  13. data/lib/google/cloud/bigquery/data_transfer/v1/doc/google/cloud/bigquery/data_transfer/v1/transfer.rb +216 -0
  14. data/lib/google/cloud/bigquery/data_transfer/v1/doc/google/protobuf/any.rb +124 -0
  15. data/lib/google/cloud/bigquery/data_transfer/v1/doc/google/protobuf/duration.rb +90 -0
  16. data/lib/google/cloud/bigquery/data_transfer/v1/doc/google/protobuf/field_mask.rb +223 -0
  17. data/lib/google/cloud/bigquery/data_transfer/v1/doc/google/protobuf/struct.rb +73 -0
  18. data/lib/google/cloud/bigquery/data_transfer/v1/doc/google/protobuf/timestamp.rb +106 -0
  19. data/lib/google/cloud/bigquery/data_transfer/v1/doc/google/protobuf/wrappers.rb +89 -0
  20. data/lib/google/cloud/bigquery/data_transfer/v1/doc/google/rpc/status.rb +83 -0
  21. data/lib/google/cloud/bigquery/data_transfer/v1/doc/overview.rb +79 -0
  22. data/lib/google/cloud/bigquery/data_transfer/v1/transfer_pb.rb +82 -0
  23. metadata +149 -0
@@ -0,0 +1,91 @@
1
+ {
2
+ "interfaces": {
3
+ "google.cloud.bigquery.datatransfer.v1.DataTransferService": {
4
+ "retry_codes": {
5
+ "idempotent": [
6
+ "DEADLINE_EXCEEDED",
7
+ "UNAVAILABLE"
8
+ ],
9
+ "non_idempotent": []
10
+ },
11
+ "retry_params": {
12
+ "default": {
13
+ "initial_retry_delay_millis": 100,
14
+ "retry_delay_multiplier": 1.3,
15
+ "max_retry_delay_millis": 60000,
16
+ "initial_rpc_timeout_millis": 20000,
17
+ "rpc_timeout_multiplier": 1.0,
18
+ "max_rpc_timeout_millis": 20000,
19
+ "total_timeout_millis": 600000
20
+ }
21
+ },
22
+ "methods": {
23
+ "GetDataSource": {
24
+ "timeout_millis": 20000,
25
+ "retry_codes_name": "idempotent",
26
+ "retry_params_name": "default"
27
+ },
28
+ "ListDataSources": {
29
+ "timeout_millis": 20000,
30
+ "retry_codes_name": "idempotent",
31
+ "retry_params_name": "default"
32
+ },
33
+ "CreateTransferConfig": {
34
+ "timeout_millis": 30000,
35
+ "retry_codes_name": "non_idempotent",
36
+ "retry_params_name": "default"
37
+ },
38
+ "UpdateTransferConfig": {
39
+ "timeout_millis": 30000,
40
+ "retry_codes_name": "non_idempotent",
41
+ "retry_params_name": "default"
42
+ },
43
+ "DeleteTransferConfig": {
44
+ "timeout_millis": 30000,
45
+ "retry_codes_name": "idempotent",
46
+ "retry_params_name": "default"
47
+ },
48
+ "GetTransferConfig": {
49
+ "timeout_millis": 30000,
50
+ "retry_codes_name": "idempotent",
51
+ "retry_params_name": "default"
52
+ },
53
+ "ListTransferConfigs": {
54
+ "timeout_millis": 30000,
55
+ "retry_codes_name": "idempotent",
56
+ "retry_params_name": "default"
57
+ },
58
+ "ScheduleTransferRuns": {
59
+ "timeout_millis": 30000,
60
+ "retry_codes_name": "non_idempotent",
61
+ "retry_params_name": "default"
62
+ },
63
+ "GetTransferRun": {
64
+ "timeout_millis": 30000,
65
+ "retry_codes_name": "idempotent",
66
+ "retry_params_name": "default"
67
+ },
68
+ "DeleteTransferRun": {
69
+ "timeout_millis": 30000,
70
+ "retry_codes_name": "idempotent",
71
+ "retry_params_name": "default"
72
+ },
73
+ "ListTransferRuns": {
74
+ "timeout_millis": 30000,
75
+ "retry_codes_name": "idempotent",
76
+ "retry_params_name": "default"
77
+ },
78
+ "ListTransferLogs": {
79
+ "timeout_millis": 30000,
80
+ "retry_codes_name": "idempotent",
81
+ "retry_params_name": "default"
82
+ },
83
+ "CheckValidCreds": {
84
+ "timeout_millis": 30000,
85
+ "retry_codes_name": "idempotent",
86
+ "retry_params_name": "default"
87
+ }
88
+ }
89
+ }
90
+ }
91
+ }
@@ -0,0 +1,86 @@
1
+ # Generated by the protocol buffer compiler. DO NOT EDIT!
2
+ # Source: google/cloud/bigquery/datatransfer/v1/datatransfer.proto for package 'google.cloud.bigquery.datatransfer.v1'
3
+ # Original file comments:
4
+ # Copyright 2018 Google Inc.
5
+ #
6
+ # Licensed under the Apache License, Version 2.0 (the "License");
7
+ # you may not use this file except in compliance with the License.
8
+ # You may obtain a copy of the License at
9
+ #
10
+ # http://www.apache.org/licenses/LICENSE-2.0
11
+ #
12
+ # Unless required by applicable law or agreed to in writing, software
13
+ # distributed under the License is distributed on an "AS IS" BASIS,
14
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15
+ # See the License for the specific language governing permissions and
16
+ # limitations under the License.
17
+ #
18
+
19
+ require 'grpc'
20
+ require 'google/cloud/bigquery/data_transfer/v1/data_transfer_pb'
21
+
22
+ module Google
23
+ module Cloud
24
+ module Bigquery
25
+ module DataTransfer
26
+ module V1
27
+ module DataTransferService
28
+ # The Google BigQuery Data Transfer Service API enables BigQuery users to
29
+ # configure the transfer of their data from other Google Products into BigQuery.
30
+ # This service contains methods that are end user exposed. It backs up the
31
+ # frontend.
32
+ class Service
33
+
34
+ include GRPC::GenericService
35
+
36
+ self.marshal_class_method = :encode
37
+ self.unmarshal_class_method = :decode
38
+ self.service_name = 'google.cloud.bigquery.datatransfer.v1.DataTransferService'
39
+
40
+ # Retrieves a supported data source and returns its settings,
41
+ # which can be used for UI rendering.
42
+ rpc :GetDataSource, GetDataSourceRequest, DataSource
43
+ # Lists supported data sources and returns their settings,
44
+ # which can be used for UI rendering.
45
+ rpc :ListDataSources, ListDataSourcesRequest, ListDataSourcesResponse
46
+ # Creates a new data transfer configuration.
47
+ rpc :CreateTransferConfig, CreateTransferConfigRequest, TransferConfig
48
+ # Updates a data transfer configuration.
49
+ # All fields must be set, even if they are not updated.
50
+ rpc :UpdateTransferConfig, UpdateTransferConfigRequest, TransferConfig
51
+ # Deletes a data transfer configuration,
52
+ # including any associated transfer runs and logs.
53
+ rpc :DeleteTransferConfig, DeleteTransferConfigRequest, Google::Protobuf::Empty
54
+ # Returns information about a data transfer config.
55
+ rpc :GetTransferConfig, GetTransferConfigRequest, TransferConfig
56
+ # Returns information about all data transfers in the project.
57
+ rpc :ListTransferConfigs, ListTransferConfigsRequest, ListTransferConfigsResponse
58
+ # Creates transfer runs for a time range [start_time, end_time].
59
+ # For each date - or whatever granularity the data source supports - in the
60
+ # range, one transfer run is created.
61
+ # Note that runs are created per UTC time in the time range.
62
+ rpc :ScheduleTransferRuns, ScheduleTransferRunsRequest, ScheduleTransferRunsResponse
63
+ # Returns information about the particular transfer run.
64
+ rpc :GetTransferRun, GetTransferRunRequest, TransferRun
65
+ # Deletes the specified transfer run.
66
+ rpc :DeleteTransferRun, DeleteTransferRunRequest, Google::Protobuf::Empty
67
+ # Returns information about running and completed jobs.
68
+ rpc :ListTransferRuns, ListTransferRunsRequest, ListTransferRunsResponse
69
+ # Returns user facing log messages for the data transfer run.
70
+ rpc :ListTransferLogs, ListTransferLogsRequest, ListTransferLogsResponse
71
+ # Returns true if valid credentials exist for the given data source and
72
+ # requesting user.
73
+ # Some data sources doesn't support service account, so we need to talk to
74
+ # them on behalf of the end user. This API just checks whether we have OAuth
75
+ # token for the particular user, which is a pre-requisite before user can
76
+ # create a transfer config.
77
+ rpc :CheckValidCreds, CheckValidCredsRequest, CheckValidCredsResponse
78
+ end
79
+
80
+ Stub = Service.rpc_stub_class
81
+ end
82
+ end
83
+ end
84
+ end
85
+ end
86
+ end
@@ -0,0 +1,500 @@
1
+ # Copyright 2018 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # https://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ module Google
16
+ module Cloud
17
+ module Bigquery
18
+ module DataTransfer
19
+ ##
20
+ # # BigQuery Data Transfer API Contents
21
+ #
22
+ # | Class | Description |
23
+ # | ----- | ----------- |
24
+ # | [DataTransferServiceClient][] | The Google BigQuery Data Transfer Service API enables BigQuery users to configure the transfer of their data from other Google Products into BigQuery. |
25
+ # | [Data Types][] | Data types for Google::Cloud::Bigquery::DataTransfer::V1 |
26
+ #
27
+ # [DataTransferServiceClient]: https://googlecloudplatform.github.io/google-cloud-ruby/#/docs/google-cloud-bigquery-data_transfer/latest/google/cloud/bigquery/datatransfer/v1/datatransferserviceclient
28
+ # [Data Types]: https://googlecloudplatform.github.io/google-cloud-ruby/#/docs/google-cloud-bigquery-data_transfer/latest/google/cloud/bigquery/datatransfer/v1/datatypes
29
+ #
30
+ module V1
31
+ # Represents a data source parameter with validation rules, so that
32
+ # parameters can be rendered in the UI. These parameters are given to us by
33
+ # supported data sources, and include all needed information for rendering
34
+ # and validation.
35
+ # Thus, whoever uses this api can decide to generate either generic ui,
36
+ # or custom data source specific forms.
37
+ # @!attribute [rw] param_id
38
+ # @return [String]
39
+ # Parameter identifier.
40
+ # @!attribute [rw] display_name
41
+ # @return [String]
42
+ # Parameter display name in the user interface.
43
+ # @!attribute [rw] description
44
+ # @return [String]
45
+ # Parameter description.
46
+ # @!attribute [rw] type
47
+ # @return [Google::Cloud::Bigquery::DataTransfer::V1::DataSourceParameter::Type]
48
+ # Parameter type.
49
+ # @!attribute [rw] required
50
+ # @return [true, false]
51
+ # Is parameter required.
52
+ # @!attribute [rw] repeated
53
+ # @return [true, false]
54
+ # Can parameter have multiple values.
55
+ # @!attribute [rw] validation_regex
56
+ # @return [String]
57
+ # Regular expression which can be used for parameter validation.
58
+ # @!attribute [rw] allowed_values
59
+ # @return [Array<String>]
60
+ # All possible values for the parameter.
61
+ # @!attribute [rw] min_value
62
+ # @return [Google::Protobuf::DoubleValue]
63
+ # For integer and double values specifies minimum allowed value.
64
+ # @!attribute [rw] max_value
65
+ # @return [Google::Protobuf::DoubleValue]
66
+ # For integer and double values specifies maxminum allowed value.
67
+ # @!attribute [rw] fields
68
+ # @return [Array<Google::Cloud::Bigquery::DataTransfer::V1::DataSourceParameter>]
69
+ # When parameter is a record, describes child fields.
70
+ # @!attribute [rw] validation_description
71
+ # @return [String]
72
+ # Description of the requirements for this field, in case the user input does
73
+ # not fulfill the regex pattern or min/max values.
74
+ # @!attribute [rw] validation_help_url
75
+ # @return [String]
76
+ # URL to a help document to further explain the naming requirements.
77
+ # @!attribute [rw] immutable
78
+ # @return [true, false]
79
+ # Cannot be changed after initial creation.
80
+ # @!attribute [rw] recurse
81
+ # @return [true, false]
82
+ # If set to true, schema should be taken from the parent with the same
83
+ # parameter_id. Only applicable when parameter type is RECORD.
84
+ class DataSourceParameter
85
+ # Parameter type.
86
+ module Type
87
+ # Type unspecified.
88
+ TYPE_UNSPECIFIED = 0
89
+
90
+ # String parameter.
91
+ STRING = 1
92
+
93
+ # Integer parameter (64-bits).
94
+ # Will be serialized to json as string.
95
+ INTEGER = 2
96
+
97
+ # Double precision floating point parameter.
98
+ DOUBLE = 3
99
+
100
+ # Boolean parameter.
101
+ BOOLEAN = 4
102
+
103
+ # Record parameter.
104
+ RECORD = 5
105
+
106
+ # Page ID for a Google+ Page.
107
+ PLUS_PAGE = 6
108
+ end
109
+ end
110
+
111
+ # Represents data source metadata. Metadata is sufficient to
112
+ # render UI and request proper OAuth tokens.
113
+ # @!attribute [rw] name
114
+ # @return [String]
115
+ # Data source resource name.
116
+ # @!attribute [rw] data_source_id
117
+ # @return [String]
118
+ # Data source id.
119
+ # @!attribute [rw] display_name
120
+ # @return [String]
121
+ # User friendly data source name.
122
+ # @!attribute [rw] description
123
+ # @return [String]
124
+ # User friendly data source description string.
125
+ # @!attribute [rw] client_id
126
+ # @return [String]
127
+ # Data source client id which should be used to receive refresh token.
128
+ # When not supplied, no offline credentials are populated for data transfer.
129
+ # @!attribute [rw] scopes
130
+ # @return [Array<String>]
131
+ # Api auth scopes for which refresh token needs to be obtained. Only valid
132
+ # when +client_id+ is specified. Ignored otherwise. These are scopes needed
133
+ # by a data source to prepare data and ingest them into BigQuery,
134
+ # e.g., https://www.googleapis.com/auth/bigquery
135
+ # @!attribute [rw] transfer_type
136
+ # @return [Google::Cloud::Bigquery::DataTransfer::V1::TransferType]
137
+ # Transfer type. Currently supports only batch transfers,
138
+ # which are transfers that use the BigQuery batch APIs (load or
139
+ # query) to ingest the data.
140
+ # @!attribute [rw] supports_multiple_transfers
141
+ # @return [true, false]
142
+ # Indicates whether the data source supports multiple transfers
143
+ # to different BigQuery targets.
144
+ # @!attribute [rw] update_deadline_seconds
145
+ # @return [Integer]
146
+ # The number of seconds to wait for an update from the data source
147
+ # before BigQuery marks the transfer as failed.
148
+ # @!attribute [rw] default_schedule
149
+ # @return [String]
150
+ # Default data transfer schedule.
151
+ # Examples of valid schedules include:
152
+ # +1st,3rd monday of month 15:30+,
153
+ # +every wed,fri of jan,jun 13:15+, and
154
+ # +first sunday of quarter 00:00+.
155
+ # @!attribute [rw] supports_custom_schedule
156
+ # @return [true, false]
157
+ # Specifies whether the data source supports a user defined schedule, or
158
+ # operates on the default schedule.
159
+ # When set to +true+, user can override default schedule.
160
+ # @!attribute [rw] parameters
161
+ # @return [Array<Google::Cloud::Bigquery::DataTransfer::V1::DataSourceParameter>]
162
+ # Data source parameters.
163
+ # @!attribute [rw] help_url
164
+ # @return [String]
165
+ # Url for the help document for this data source.
166
+ # @!attribute [rw] authorization_type
167
+ # @return [Google::Cloud::Bigquery::DataTransfer::V1::DataSource::AuthorizationType]
168
+ # Indicates the type of authorization.
169
+ # @!attribute [rw] data_refresh_type
170
+ # @return [Google::Cloud::Bigquery::DataTransfer::V1::DataSource::DataRefreshType]
171
+ # Specifies whether the data source supports automatic data refresh for the
172
+ # past few days, and how it's supported.
173
+ # For some data sources, data might not be complete until a few days later,
174
+ # so it's useful to refresh data automatically.
175
+ # @!attribute [rw] default_data_refresh_window_days
176
+ # @return [Integer]
177
+ # Default data refresh window on days.
178
+ # Only meaningful when +data_refresh_type+ = +SLIDING_WINDOW+.
179
+ # @!attribute [rw] manual_runs_disabled
180
+ # @return [true, false]
181
+ # Disables backfilling and manual run scheduling
182
+ # for the data source.
183
+ # @!attribute [rw] minimum_schedule_interval
184
+ # @return [Google::Protobuf::Duration]
185
+ # The minimum interval for scheduler to schedule runs.
186
+ class DataSource
187
+ # The type of authorization needed for this data source.
188
+ module AuthorizationType
189
+ # Type unspecified.
190
+ AUTHORIZATION_TYPE_UNSPECIFIED = 0
191
+
192
+ # Use OAuth 2 authorization codes that can be exchanged
193
+ # for a refresh token on the backend.
194
+ AUTHORIZATION_CODE = 1
195
+
196
+ # Return an authorization code for a given Google+ page that can then be
197
+ # exchanged for a refresh token on the backend.
198
+ GOOGLE_PLUS_AUTHORIZATION_CODE = 2
199
+ end
200
+
201
+ # Represents how the data source supports data auto refresh.
202
+ module DataRefreshType
203
+ # The data source won't support data auto refresh, which is default value.
204
+ DATA_REFRESH_TYPE_UNSPECIFIED = 0
205
+
206
+ # The data source supports data auto refresh, and runs will be scheduled
207
+ # for the past few days. Does not allow custom values to be set for each
208
+ # transfer config.
209
+ SLIDING_WINDOW = 1
210
+
211
+ # The data source supports data auto refresh, and runs will be scheduled
212
+ # for the past few days. Allows custom values to be set for each transfer
213
+ # config.
214
+ CUSTOM_SLIDING_WINDOW = 2
215
+ end
216
+ end
217
+
218
+ # A request to get data source info.
219
+ # @!attribute [rw] name
220
+ # @return [String]
221
+ # The field will contain name of the resource requested, for example:
222
+ # +projects/{project_id}/dataSources/{data_source_id}+
223
+ class GetDataSourceRequest; end
224
+
225
+ # Request to list supported data sources and their data transfer settings.
226
+ # @!attribute [rw] parent
227
+ # @return [String]
228
+ # The BigQuery project id for which data sources should be returned.
229
+ # Must be in the form: +projects/{project_id}+
230
+ # @!attribute [rw] page_token
231
+ # @return [String]
232
+ # Pagination token, which can be used to request a specific page
233
+ # of +ListDataSourcesRequest+ list results. For multiple-page
234
+ # results, +ListDataSourcesResponse+ outputs
235
+ # a +next_page+ token, which can be used as the
236
+ # +page_token+ value to request the next page of list results.
237
+ # @!attribute [rw] page_size
238
+ # @return [Integer]
239
+ # Page size. The default page size is the maximum value of 1000 results.
240
+ class ListDataSourcesRequest; end
241
+
242
+ # Returns list of supported data sources and their metadata.
243
+ # @!attribute [rw] data_sources
244
+ # @return [Array<Google::Cloud::Bigquery::DataTransfer::V1::DataSource>]
245
+ # List of supported data sources and their transfer settings.
246
+ # @!attribute [rw] next_page_token
247
+ # @return [String]
248
+ # Output only. The next-pagination token. For multiple-page list results,
249
+ # this token can be used as the
250
+ # +ListDataSourcesRequest.page_token+
251
+ # to request the next page of list results.
252
+ class ListDataSourcesResponse; end
253
+
254
+ # A request to create a data transfer configuration. If new credentials are
255
+ # needed for this transfer configuration, an authorization code must be
256
+ # provided. If an authorization code is provided, the transfer configuration
257
+ # will be associated with the user id corresponding to the
258
+ # authorization code. Otherwise, the transfer configuration will be associated
259
+ # with the calling user.
260
+ # @!attribute [rw] parent
261
+ # @return [String]
262
+ # The BigQuery project id where the transfer configuration should be created.
263
+ # Must be in the format /projects/{project_id}/locations/{location_id}
264
+ # If specified location and location of the destination bigquery dataset
265
+ # do not match - the request will fail.
266
+ # @!attribute [rw] transfer_config
267
+ # @return [Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig]
268
+ # Data transfer configuration to create.
269
+ # @!attribute [rw] authorization_code
270
+ # @return [String]
271
+ # Optional OAuth2 authorization code to use with this transfer configuration.
272
+ # This is required if new credentials are needed, as indicated by
273
+ # +CheckValidCreds+.
274
+ # In order to obtain authorization_code, please make a
275
+ # request to
276
+ # https://www.gstatic.com/bigquerydatatransfer/oauthz/auth?client_id=<datatransferapiclientid>&scope=<data_source_scopes>&redirect_uri=<redirect_uri>
277
+ #
278
+ # * client_id should be OAuth client_id of BigQuery DTS API for the given
279
+ # data source returned by ListDataSources method.
280
+ # * data_source_scopes are the scopes returned by ListDataSources method.
281
+ # * redirect_uri is an optional parameter. If not specified, then
282
+ # authorization code is posted to the opener of authorization flow window.
283
+ # Otherwise it will be sent to the redirect uri. A special value of
284
+ # urn:ietf:wg:oauth:2.0:oob means that authorization code should be
285
+ # returned in the title bar of the browser, with the page text prompting
286
+ # the user to copy the code and paste it in the application.
287
+ class CreateTransferConfigRequest; end
288
+
289
+ # A request to update a transfer configuration. To update the user id of the
290
+ # transfer configuration, an authorization code needs to be provided.
291
+ # @!attribute [rw] transfer_config
292
+ # @return [Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig]
293
+ # Data transfer configuration to create.
294
+ # @!attribute [rw] authorization_code
295
+ # @return [String]
296
+ # Optional OAuth2 authorization code to use with this transfer configuration.
297
+ # If it is provided, the transfer configuration will be associated with the
298
+ # authorizing user.
299
+ # In order to obtain authorization_code, please make a
300
+ # request to
301
+ # https://www.gstatic.com/bigquerydatatransfer/oauthz/auth?client_id=<datatransferapiclientid>&scope=<data_source_scopes>&redirect_uri=<redirect_uri>
302
+ #
303
+ # * client_id should be OAuth client_id of BigQuery DTS API for the given
304
+ # data source returned by ListDataSources method.
305
+ # * data_source_scopes are the scopes returned by ListDataSources method.
306
+ # * redirect_uri is an optional parameter. If not specified, then
307
+ # authorization code is posted to the opener of authorization flow window.
308
+ # Otherwise it will be sent to the redirect uri. A special value of
309
+ # urn:ietf:wg:oauth:2.0:oob means that authorization code should be
310
+ # returned in the title bar of the browser, with the page text prompting
311
+ # the user to copy the code and paste it in the application.
312
+ # @!attribute [rw] update_mask
313
+ # @return [Google::Protobuf::FieldMask]
314
+ # Required list of fields to be updated in this request.
315
+ class UpdateTransferConfigRequest; end
316
+
317
+ # A request to get data transfer information.
318
+ # @!attribute [rw] name
319
+ # @return [String]
320
+ # The field will contain name of the resource requested, for example:
321
+ # +projects/{project_id}/transferConfigs/{config_id}+
322
+ class GetTransferConfigRequest; end
323
+
324
+ # A request to delete data transfer information. All associated transfer runs
325
+ # and log messages will be deleted as well.
326
+ # @!attribute [rw] name
327
+ # @return [String]
328
+ # The field will contain name of the resource requested, for example:
329
+ # +projects/{project_id}/transferConfigs/{config_id}+
330
+ class DeleteTransferConfigRequest; end
331
+
332
+ # A request to get data transfer run information.
333
+ # @!attribute [rw] name
334
+ # @return [String]
335
+ # The field will contain name of the resource requested, for example:
336
+ # +projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}+
337
+ class GetTransferRunRequest; end
338
+
339
+ # A request to delete data transfer run information.
340
+ # @!attribute [rw] name
341
+ # @return [String]
342
+ # The field will contain name of the resource requested, for example:
343
+ # +projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}+
344
+ class DeleteTransferRunRequest; end
345
+
346
+ # A request to list data transfers configured for a BigQuery project.
347
+ # @!attribute [rw] parent
348
+ # @return [String]
349
+ # The BigQuery project id for which data sources
350
+ # should be returned: +projects/{project_id}+.
351
+ # @!attribute [rw] data_source_ids
352
+ # @return [Array<String>]
353
+ # When specified, only configurations of requested data sources are returned.
354
+ # @!attribute [rw] page_token
355
+ # @return [String]
356
+ # Pagination token, which can be used to request a specific page
357
+ # of +ListTransfersRequest+ list results. For multiple-page
358
+ # results, +ListTransfersResponse+ outputs
359
+ # a +next_page+ token, which can be used as the
360
+ # +page_token+ value to request the next page of list results.
361
+ # @!attribute [rw] page_size
362
+ # @return [Integer]
363
+ # Page size. The default page size is the maximum value of 1000 results.
364
+ class ListTransferConfigsRequest; end
365
+
366
+ # The returned list of pipelines in the project.
367
+ # @!attribute [rw] transfer_configs
368
+ # @return [Array<Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig>]
369
+ # Output only. The stored pipeline transfer configurations.
370
+ # @!attribute [rw] next_page_token
371
+ # @return [String]
372
+ # Output only. The next-pagination token. For multiple-page list results,
373
+ # this token can be used as the
374
+ # +ListTransferConfigsRequest.page_token+
375
+ # to request the next page of list results.
376
+ class ListTransferConfigsResponse; end
377
+
378
+ # A request to list data transfer runs. UI can use this method to show/filter
379
+ # specific data transfer runs. The data source can use this method to request
380
+ # all scheduled transfer runs.
381
+ # @!attribute [rw] parent
382
+ # @return [String]
383
+ # Name of transfer configuration for which transfer runs should be retrieved.
384
+ # Format of transfer configuration resource name is:
385
+ # +projects/{project_id}/transferConfigs/{config_id}+.
386
+ # @!attribute [rw] states
387
+ # @return [Array<Google::Cloud::Bigquery::DataTransfer::V1::TransferState>]
388
+ # When specified, only transfer runs with requested states are returned.
389
+ # @!attribute [rw] page_token
390
+ # @return [String]
391
+ # Pagination token, which can be used to request a specific page
392
+ # of +ListTransferRunsRequest+ list results. For multiple-page
393
+ # results, +ListTransferRunsResponse+ outputs
394
+ # a +next_page+ token, which can be used as the
395
+ # +page_token+ value to request the next page of list results.
396
+ # @!attribute [rw] page_size
397
+ # @return [Integer]
398
+ # Page size. The default page size is the maximum value of 1000 results.
399
+ # @!attribute [rw] run_attempt
400
+ # @return [Google::Cloud::Bigquery::DataTransfer::V1::ListTransferRunsRequest::RunAttempt]
401
+ # Indicates how run attempts are to be pulled.
402
+ class ListTransferRunsRequest
403
+ # Represents which runs should be pulled.
404
+ module RunAttempt
405
+ # All runs should be returned.
406
+ RUN_ATTEMPT_UNSPECIFIED = 0
407
+
408
+ # Only latest run per day should be returned.
409
+ LATEST = 1
410
+ end
411
+ end
412
+
413
+ # The returned list of pipelines in the project.
414
+ # @!attribute [rw] transfer_runs
415
+ # @return [Array<Google::Cloud::Bigquery::DataTransfer::V1::TransferRun>]
416
+ # Output only. The stored pipeline transfer runs.
417
+ # @!attribute [rw] next_page_token
418
+ # @return [String]
419
+ # Output only. The next-pagination token. For multiple-page list results,
420
+ # this token can be used as the
421
+ # +ListTransferRunsRequest.page_token+
422
+ # to request the next page of list results.
423
+ class ListTransferRunsResponse; end
424
+
425
+ # A request to get user facing log messages associated with data transfer run.
426
+ # @!attribute [rw] parent
427
+ # @return [String]
428
+ # Transfer run name in the form:
429
+ # +projects/{project_id}/transferConfigs/{config_Id}/runs/{run_id}+.
430
+ # @!attribute [rw] page_token
431
+ # @return [String]
432
+ # Pagination token, which can be used to request a specific page
433
+ # of +ListTransferLogsRequest+ list results. For multiple-page
434
+ # results, +ListTransferLogsResponse+ outputs
435
+ # a +next_page+ token, which can be used as the
436
+ # +page_token+ value to request the next page of list results.
437
+ # @!attribute [rw] page_size
438
+ # @return [Integer]
439
+ # Page size. The default page size is the maximum value of 1000 results.
440
+ # @!attribute [rw] message_types
441
+ # @return [Array<Google::Cloud::Bigquery::DataTransfer::V1::TransferMessage::MessageSeverity>]
442
+ # Message types to return. If not populated - INFO, WARNING and ERROR
443
+ # messages are returned.
444
+ class ListTransferLogsRequest; end
445
+
446
+ # The returned list transfer run messages.
447
+ # @!attribute [rw] transfer_messages
448
+ # @return [Array<Google::Cloud::Bigquery::DataTransfer::V1::TransferMessage>]
449
+ # Output only. The stored pipeline transfer messages.
450
+ # @!attribute [rw] next_page_token
451
+ # @return [String]
452
+ # Output only. The next-pagination token. For multiple-page list results,
453
+ # this token can be used as the
454
+ # +GetTransferRunLogRequest.page_token+
455
+ # to request the next page of list results.
456
+ class ListTransferLogsResponse; end
457
+
458
+ # A request to determine whether the user has valid credentials. This method
459
+ # is used to limit the number of OAuth popups in the user interface. The
460
+ # user id is inferred from the API call context.
461
+ # If the data source has the Google+ authorization type, this method
462
+ # returns false, as it cannot be determined whether the credentials are
463
+ # already valid merely based on the user id.
464
+ # @!attribute [rw] name
465
+ # @return [String]
466
+ # The data source in the form:
467
+ # +projects/{project_id}/dataSources/{data_source_id}+
468
+ class CheckValidCredsRequest; end
469
+
470
+ # A response indicating whether the credentials exist and are valid.
471
+ # @!attribute [rw] has_valid_creds
472
+ # @return [true, false]
473
+ # If set to +true+, the credentials exist and are valid.
474
+ class CheckValidCredsResponse; end
475
+
476
+ # A request to schedule transfer runs for a time range.
477
+ # @!attribute [rw] parent
478
+ # @return [String]
479
+ # Transfer configuration name in the form:
480
+ # +projects/{project_id}/transferConfigs/{config_id}+.
481
+ # @!attribute [rw] start_time
482
+ # @return [Google::Protobuf::Timestamp]
483
+ # Start time of the range of transfer runs. For example,
484
+ # +"2017-05-25T00:00:00+00:00"+.
485
+ # @!attribute [rw] end_time
486
+ # @return [Google::Protobuf::Timestamp]
487
+ # End time of the range of transfer runs. For example,
488
+ # +"2017-05-30T00:00:00+00:00"+.
489
+ class ScheduleTransferRunsRequest; end
490
+
491
+ # A response to schedule transfer runs for a time range.
492
+ # @!attribute [rw] runs
493
+ # @return [Array<Google::Cloud::Bigquery::DataTransfer::V1::TransferRun>]
494
+ # The transfer runs that were scheduled.
495
+ class ScheduleTransferRunsResponse; end
496
+ end
497
+ end
498
+ end
499
+ end
500
+ end