google-cloud-bigquery-data_transfer 0.1.0 → 0.2.0

Sign up to get free protection for your applications and to get access to all the features.
@@ -0,0 +1,214 @@
1
+ # Copyright 2018 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # https://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ module Google
16
+ module Cloud
17
+ module Bigquery
18
+ module Datatransfer
19
+ module V1
20
+ # Represents a data transfer configuration. A transfer configuration
21
+ # contains all metadata needed to perform a data transfer. For example,
22
+ # +destination_dataset_id+ specifies where data should be stored.
23
+ # When a new transfer configuration is created, the specified
24
+ # +destination_dataset_id+ is created when needed and shared with the
25
+ # appropriate data source service account.
26
+ # @!attribute [rw] name
27
+ # @return [String]
28
+ # The resource name of the transfer config.
29
+ # Transfer config names have the form
30
+ # +projects/{project_id}/transferConfigs/{config_id}+.
31
+ # Where +config_id+ is usually a uuid, even though it is not
32
+ # guaranteed or required. The name is ignored when creating a transfer
33
+ # config.
34
+ # @!attribute [rw] destination_dataset_id
35
+ # @return [String]
36
+ # The BigQuery target dataset id.
37
+ # @!attribute [rw] display_name
38
+ # @return [String]
39
+ # User specified display name for the data transfer.
40
+ # @!attribute [rw] data_source_id
41
+ # @return [String]
42
+ # Data source id. Cannot be changed once data transfer is created.
43
+ # @!attribute [rw] params
44
+ # @return [Google::Protobuf::Struct]
45
+ # Data transfer specific parameters.
46
+ # @!attribute [rw] schedule
47
+ # @return [String]
48
+ # Data transfer schedule.
49
+ # If the data source does not support a custom schedule, this should be
50
+ # empty. If it is empty, the default value for the data source will be
51
+ # used.
52
+ # The specified times are in UTC.
53
+ # Examples of valid format:
54
+ # +1st,3rd monday of month 15:30+,
55
+ # +every wed,fri of jan,jun 13:15+, and
56
+ # +first sunday of quarter 00:00+.
57
+ # See more explanation about the format here:
58
+ # https://cloud.google.com/appengine/docs/flexible/python/scheduling-jobs-with-cron-yaml#the_schedule_format
59
+ # NOTE: the granularity should be at least 8 hours, or less frequent.
60
+ # @!attribute [rw] data_refresh_window_days
61
+ # @return [Integer]
62
+ # The number of days to look back to automatically refresh the data.
63
+ # For example, if +data_refresh_window_days = 10+, then every day
64
+ # BigQuery reingests data for [today-10, today-1], rather than ingesting data
65
+ # for just [today-1].
66
+ # Only valid if the data source supports the feature. Set the value to 0
67
+ # to use the default value.
68
+ # @!attribute [rw] disabled
69
+ # @return [true, false]
70
+ # Is this config disabled. When set to true, no runs are scheduled
71
+ # for a given transfer.
72
+ # @!attribute [rw] update_time
73
+ # @return [Google::Protobuf::Timestamp]
74
+ # Output only. Data transfer modification time. Ignored by server on input.
75
+ # @!attribute [rw] next_run_time
76
+ # @return [Google::Protobuf::Timestamp]
77
+ # Output only. Next time when data transfer will run.
78
+ # @!attribute [rw] state
79
+ # @return [Google::Cloud::Bigquery::Datatransfer::V1::TransferState]
80
+ # Output only. State of the most recently updated transfer run.
81
+ # @!attribute [rw] user_id
82
+ # @return [Integer]
83
+ # Output only. Unique ID of the user on whose behalf transfer is done.
84
+ # Applicable only to data sources that do not support service accounts.
85
+ # When set to 0, the data source service account credentials are used.
86
+ # May be negative. Note, that this identifier is not stable.
87
+ # It may change over time even for the same user.
88
+ # @!attribute [rw] dataset_region
89
+ # @return [String]
90
+ # Output only. Region in which BigQuery dataset is located.
91
+ class TransferConfig; end
92
+
93
+ # Represents a data transfer run.
94
+ # @!attribute [rw] name
95
+ # @return [String]
96
+ # The resource name of the transfer run.
97
+ # Transfer run names have the form
98
+ # +projects/{project_id}/locations/{location}/transferConfigs/{config_id}/runs/{run_id}+.
99
+ # The name is ignored when creating a transfer run.
100
+ # @!attribute [rw] schedule_time
101
+ # @return [Google::Protobuf::Timestamp]
102
+ # Minimum time after which a transfer run can be started.
103
+ # @!attribute [rw] run_time
104
+ # @return [Google::Protobuf::Timestamp]
105
+ # For batch transfer runs, specifies the date and time that
106
+ # data should be ingested.
107
+ # @!attribute [rw] error_status
108
+ # @return [Google::Rpc::Status]
109
+ # Status of the transfer run.
110
+ # @!attribute [rw] start_time
111
+ # @return [Google::Protobuf::Timestamp]
112
+ # Output only. Time when transfer run was started.
113
+ # Parameter ignored by server for input requests.
114
+ # @!attribute [rw] end_time
115
+ # @return [Google::Protobuf::Timestamp]
116
+ # Output only. Time when transfer run ended.
117
+ # Parameter ignored by server for input requests.
118
+ # @!attribute [rw] update_time
119
+ # @return [Google::Protobuf::Timestamp]
120
+ # Output only. Last time the data transfer run state was updated.
121
+ # @!attribute [rw] params
122
+ # @return [Google::Protobuf::Struct]
123
+ # Output only. Data transfer specific parameters.
124
+ # @!attribute [rw] destination_dataset_id
125
+ # @return [String]
126
+ # Output only. The BigQuery target dataset id.
127
+ # @!attribute [rw] data_source_id
128
+ # @return [String]
129
+ # Output only. Data source id.
130
+ # @!attribute [rw] state
131
+ # @return [Google::Cloud::Bigquery::Datatransfer::V1::TransferState]
132
+ # Data transfer run state. Ignored for input requests.
133
+ # @!attribute [rw] user_id
134
+ # @return [Integer]
135
+ # Output only. Unique ID of the user on whose behalf transfer is done.
136
+ # Applicable only to data sources that do not support service accounts.
137
+ # When set to 0, the data source service account credentials are used.
138
+ # May be negative. Note, that this identifier is not stable.
139
+ # It may change over time even for the same user.
140
+ # @!attribute [rw] schedule
141
+ # @return [String]
142
+ # Output only. Describes the schedule of this transfer run if it was
143
+ # created as part of a regular schedule. For batch transfer runs that are
144
+ # scheduled manually, this is empty.
145
+ # NOTE: the system might choose to delay the schedule depending on the
146
+ # current load, so +schedule_time+ doesn't always matches this.
147
+ class TransferRun; end
148
+
149
+ # Represents a user facing message for a particular data transfer run.
150
+ # @!attribute [rw] message_time
151
+ # @return [Google::Protobuf::Timestamp]
152
+ # Time when message was logged.
153
+ # @!attribute [rw] severity
154
+ # @return [Google::Cloud::Bigquery::Datatransfer::V1::TransferMessage::MessageSeverity]
155
+ # Message severity.
156
+ # @!attribute [rw] message_text
157
+ # @return [String]
158
+ # Message text.
159
+ class TransferMessage
160
+ # Represents data transfer user facing message severity.
161
+ module MessageSeverity
162
+ # No severity specified.
163
+ MESSAGE_SEVERITY_UNSPECIFIED = 0
164
+
165
+ # Informational message.
166
+ INFO = 1
167
+
168
+ # Warning message.
169
+ WARNING = 2
170
+
171
+ # Error message.
172
+ ERROR = 3
173
+ end
174
+ end
175
+
176
+ # DEPRECATED. Represents data transfer type.
177
+ module TransferType
178
+ # Invalid or Unknown transfer type placeholder.
179
+ TRANSFER_TYPE_UNSPECIFIED = 0
180
+
181
+ # Batch data transfer.
182
+ BATCH = 1
183
+
184
+ # Streaming data transfer. Streaming data source currently doesn't
185
+ # support multiple transfer configs per project.
186
+ STREAMING = 2
187
+ end
188
+
189
+ # Represents data transfer run state.
190
+ module TransferState
191
+ # State placeholder.
192
+ TRANSFER_STATE_UNSPECIFIED = 0
193
+
194
+ # Data transfer is scheduled and is waiting to be picked up by
195
+ # data transfer backend.
196
+ PENDING = 2
197
+
198
+ # Data transfer is in progress.
199
+ RUNNING = 3
200
+
201
+ # Data transfer completed successsfully.
202
+ SUCCEEDED = 4
203
+
204
+ # Data transfer failed.
205
+ FAILED = 5
206
+
207
+ # Data transfer is cancelled.
208
+ CANCELLED = 6
209
+ end
210
+ end
211
+ end
212
+ end
213
+ end
214
+ end
@@ -0,0 +1,28 @@
1
+ # Copyright 2018 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # https://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ module Google
16
+ module Protobuf
17
+ # A generic empty message that you can re-use to avoid defining duplicated
18
+ # empty messages in your APIs. A typical example is to use it as the request
19
+ # or the response type of an API method. For instance:
20
+ #
21
+ # service Foo {
22
+ # rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty);
23
+ # }
24
+ #
25
+ # The JSON representation for +Empty+ is empty JSON object +{}+.
26
+ class Empty; end
27
+ end
28
+ end
@@ -69,6 +69,31 @@ module Google
69
69
  #
70
70
  # [Product Documentation]: https://cloud.google.com/bigquerydatatransfer
71
71
  #
72
+ # ## Enabling Logging
73
+ #
74
+ # To enable logging for this library, set the logger for the underlying [gRPC](https://github.com/grpc/grpc/tree/master/src/ruby) library.
75
+ # The logger that you set may be a Ruby stdlib [`Logger`](https://ruby-doc.org/stdlib-2.5.0/libdoc/logger/rdoc/Logger.html) as shown below,
76
+ # or a [`Google::Cloud::Logging::Logger`](https://googlecloudplatform.github.io/google-cloud-ruby/#/docs/google-cloud-logging/latest/google/cloud/logging/logger)
77
+ # that will write logs to [Stackdriver Logging](https://cloud.google.com/logging/). See [grpc/logconfig.rb](https://github.com/grpc/grpc/blob/master/src/ruby/lib/grpc/logconfig.rb)
78
+ # and the gRPC [spec_helper.rb](https://github.com/grpc/grpc/blob/master/src/ruby/spec/spec_helper.rb) for additional information.
79
+ #
80
+ # Configuring a Ruby stdlib logger:
81
+ #
82
+ # ```ruby
83
+ # require "logger"
84
+ #
85
+ # module MyLogger
86
+ # LOGGER = Logger.new $stderr, level: Logger::WARN
87
+ # def logger
88
+ # LOGGER
89
+ # end
90
+ # end
91
+ #
92
+ # # Define a gRPC module-level logger method before grpc/logconfig.rb loads.
93
+ # module GRPC
94
+ # extend MyLogger
95
+ # end
96
+ # ```
72
97
  #
73
98
  module DataTransfer
74
99
  module V1
@@ -0,0 +1,189 @@
1
+ # Generated by the protocol buffer compiler. DO NOT EDIT!
2
+ # source: google/cloud/bigquery/datatransfer/v1/datatransfer.proto
3
+
4
+ require 'google/protobuf'
5
+
6
+ require 'google/api/annotations_pb'
7
+ require 'google/cloud/bigquery/datatransfer/v1/transfer_pb'
8
+ require 'google/protobuf/duration_pb'
9
+ require 'google/protobuf/empty_pb'
10
+ require 'google/protobuf/field_mask_pb'
11
+ require 'google/protobuf/timestamp_pb'
12
+ require 'google/protobuf/wrappers_pb'
13
+ Google::Protobuf::DescriptorPool.generated_pool.build do
14
+ add_message "google.cloud.bigquery.datatransfer.v1.DataSourceParameter" do
15
+ optional :param_id, :string, 1
16
+ optional :display_name, :string, 2
17
+ optional :description, :string, 3
18
+ optional :type, :enum, 4, "google.cloud.bigquery.datatransfer.v1.DataSourceParameter.Type"
19
+ optional :required, :bool, 5
20
+ optional :repeated, :bool, 6
21
+ optional :validation_regex, :string, 7
22
+ repeated :allowed_values, :string, 8
23
+ optional :min_value, :message, 9, "google.protobuf.DoubleValue"
24
+ optional :max_value, :message, 10, "google.protobuf.DoubleValue"
25
+ repeated :fields, :message, 11, "google.cloud.bigquery.datatransfer.v1.DataSourceParameter"
26
+ optional :validation_description, :string, 12
27
+ optional :validation_help_url, :string, 13
28
+ optional :immutable, :bool, 14
29
+ optional :recurse, :bool, 15
30
+ end
31
+ add_enum "google.cloud.bigquery.datatransfer.v1.DataSourceParameter.Type" do
32
+ value :TYPE_UNSPECIFIED, 0
33
+ value :STRING, 1
34
+ value :INTEGER, 2
35
+ value :DOUBLE, 3
36
+ value :BOOLEAN, 4
37
+ value :RECORD, 5
38
+ value :PLUS_PAGE, 6
39
+ end
40
+ add_message "google.cloud.bigquery.datatransfer.v1.DataSource" do
41
+ optional :name, :string, 1
42
+ optional :data_source_id, :string, 2
43
+ optional :display_name, :string, 3
44
+ optional :description, :string, 4
45
+ optional :client_id, :string, 5
46
+ repeated :scopes, :string, 6
47
+ optional :transfer_type, :enum, 7, "google.cloud.bigquery.datatransfer.v1.TransferType"
48
+ optional :supports_multiple_transfers, :bool, 8
49
+ optional :update_deadline_seconds, :int32, 9
50
+ optional :default_schedule, :string, 10
51
+ optional :supports_custom_schedule, :bool, 11
52
+ repeated :parameters, :message, 12, "google.cloud.bigquery.datatransfer.v1.DataSourceParameter"
53
+ optional :help_url, :string, 13
54
+ optional :authorization_type, :enum, 14, "google.cloud.bigquery.datatransfer.v1.DataSource.AuthorizationType"
55
+ optional :data_refresh_type, :enum, 15, "google.cloud.bigquery.datatransfer.v1.DataSource.DataRefreshType"
56
+ optional :default_data_refresh_window_days, :int32, 16
57
+ optional :manual_runs_disabled, :bool, 17
58
+ optional :minimum_schedule_interval, :message, 18, "google.protobuf.Duration"
59
+ end
60
+ add_enum "google.cloud.bigquery.datatransfer.v1.DataSource.AuthorizationType" do
61
+ value :AUTHORIZATION_TYPE_UNSPECIFIED, 0
62
+ value :AUTHORIZATION_CODE, 1
63
+ value :GOOGLE_PLUS_AUTHORIZATION_CODE, 2
64
+ end
65
+ add_enum "google.cloud.bigquery.datatransfer.v1.DataSource.DataRefreshType" do
66
+ value :DATA_REFRESH_TYPE_UNSPECIFIED, 0
67
+ value :SLIDING_WINDOW, 1
68
+ value :CUSTOM_SLIDING_WINDOW, 2
69
+ end
70
+ add_message "google.cloud.bigquery.datatransfer.v1.GetDataSourceRequest" do
71
+ optional :name, :string, 1
72
+ end
73
+ add_message "google.cloud.bigquery.datatransfer.v1.ListDataSourcesRequest" do
74
+ optional :parent, :string, 1
75
+ optional :page_token, :string, 3
76
+ optional :page_size, :int32, 4
77
+ end
78
+ add_message "google.cloud.bigquery.datatransfer.v1.ListDataSourcesResponse" do
79
+ repeated :data_sources, :message, 1, "google.cloud.bigquery.datatransfer.v1.DataSource"
80
+ optional :next_page_token, :string, 2
81
+ end
82
+ add_message "google.cloud.bigquery.datatransfer.v1.CreateTransferConfigRequest" do
83
+ optional :parent, :string, 1
84
+ optional :transfer_config, :message, 2, "google.cloud.bigquery.datatransfer.v1.TransferConfig"
85
+ optional :authorization_code, :string, 3
86
+ end
87
+ add_message "google.cloud.bigquery.datatransfer.v1.UpdateTransferConfigRequest" do
88
+ optional :transfer_config, :message, 1, "google.cloud.bigquery.datatransfer.v1.TransferConfig"
89
+ optional :authorization_code, :string, 3
90
+ optional :update_mask, :message, 4, "google.protobuf.FieldMask"
91
+ end
92
+ add_message "google.cloud.bigquery.datatransfer.v1.GetTransferConfigRequest" do
93
+ optional :name, :string, 1
94
+ end
95
+ add_message "google.cloud.bigquery.datatransfer.v1.DeleteTransferConfigRequest" do
96
+ optional :name, :string, 1
97
+ end
98
+ add_message "google.cloud.bigquery.datatransfer.v1.GetTransferRunRequest" do
99
+ optional :name, :string, 1
100
+ end
101
+ add_message "google.cloud.bigquery.datatransfer.v1.DeleteTransferRunRequest" do
102
+ optional :name, :string, 1
103
+ end
104
+ add_message "google.cloud.bigquery.datatransfer.v1.ListTransferConfigsRequest" do
105
+ optional :parent, :string, 1
106
+ repeated :data_source_ids, :string, 2
107
+ optional :page_token, :string, 3
108
+ optional :page_size, :int32, 4
109
+ end
110
+ add_message "google.cloud.bigquery.datatransfer.v1.ListTransferConfigsResponse" do
111
+ repeated :transfer_configs, :message, 1, "google.cloud.bigquery.datatransfer.v1.TransferConfig"
112
+ optional :next_page_token, :string, 2
113
+ end
114
+ add_message "google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest" do
115
+ optional :parent, :string, 1
116
+ repeated :states, :enum, 2, "google.cloud.bigquery.datatransfer.v1.TransferState"
117
+ optional :page_token, :string, 3
118
+ optional :page_size, :int32, 4
119
+ optional :run_attempt, :enum, 5, "google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest.RunAttempt"
120
+ end
121
+ add_enum "google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest.RunAttempt" do
122
+ value :RUN_ATTEMPT_UNSPECIFIED, 0
123
+ value :LATEST, 1
124
+ end
125
+ add_message "google.cloud.bigquery.datatransfer.v1.ListTransferRunsResponse" do
126
+ repeated :transfer_runs, :message, 1, "google.cloud.bigquery.datatransfer.v1.TransferRun"
127
+ optional :next_page_token, :string, 2
128
+ end
129
+ add_message "google.cloud.bigquery.datatransfer.v1.ListTransferLogsRequest" do
130
+ optional :parent, :string, 1
131
+ optional :page_token, :string, 4
132
+ optional :page_size, :int32, 5
133
+ repeated :message_types, :enum, 6, "google.cloud.bigquery.datatransfer.v1.TransferMessage.MessageSeverity"
134
+ end
135
+ add_message "google.cloud.bigquery.datatransfer.v1.ListTransferLogsResponse" do
136
+ repeated :transfer_messages, :message, 1, "google.cloud.bigquery.datatransfer.v1.TransferMessage"
137
+ optional :next_page_token, :string, 2
138
+ end
139
+ add_message "google.cloud.bigquery.datatransfer.v1.CheckValidCredsRequest" do
140
+ optional :name, :string, 1
141
+ end
142
+ add_message "google.cloud.bigquery.datatransfer.v1.CheckValidCredsResponse" do
143
+ optional :has_valid_creds, :bool, 1
144
+ end
145
+ add_message "google.cloud.bigquery.datatransfer.v1.ScheduleTransferRunsRequest" do
146
+ optional :parent, :string, 1
147
+ optional :start_time, :message, 2, "google.protobuf.Timestamp"
148
+ optional :end_time, :message, 3, "google.protobuf.Timestamp"
149
+ end
150
+ add_message "google.cloud.bigquery.datatransfer.v1.ScheduleTransferRunsResponse" do
151
+ repeated :runs, :message, 1, "google.cloud.bigquery.datatransfer.v1.TransferRun"
152
+ end
153
+ end
154
+
155
+ module Google
156
+ module Cloud
157
+ module Bigquery
158
+ module Datatransfer
159
+ module V1
160
+ DataSourceParameter = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.DataSourceParameter").msgclass
161
+ DataSourceParameter::Type = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.DataSourceParameter.Type").enummodule
162
+ DataSource = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.DataSource").msgclass
163
+ DataSource::AuthorizationType = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.DataSource.AuthorizationType").enummodule
164
+ DataSource::DataRefreshType = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.DataSource.DataRefreshType").enummodule
165
+ GetDataSourceRequest = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.GetDataSourceRequest").msgclass
166
+ ListDataSourcesRequest = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.ListDataSourcesRequest").msgclass
167
+ ListDataSourcesResponse = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.ListDataSourcesResponse").msgclass
168
+ CreateTransferConfigRequest = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.CreateTransferConfigRequest").msgclass
169
+ UpdateTransferConfigRequest = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.UpdateTransferConfigRequest").msgclass
170
+ GetTransferConfigRequest = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.GetTransferConfigRequest").msgclass
171
+ DeleteTransferConfigRequest = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.DeleteTransferConfigRequest").msgclass
172
+ GetTransferRunRequest = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.GetTransferRunRequest").msgclass
173
+ DeleteTransferRunRequest = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.DeleteTransferRunRequest").msgclass
174
+ ListTransferConfigsRequest = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.ListTransferConfigsRequest").msgclass
175
+ ListTransferConfigsResponse = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.ListTransferConfigsResponse").msgclass
176
+ ListTransferRunsRequest = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest").msgclass
177
+ ListTransferRunsRequest::RunAttempt = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest.RunAttempt").enummodule
178
+ ListTransferRunsResponse = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.ListTransferRunsResponse").msgclass
179
+ ListTransferLogsRequest = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.ListTransferLogsRequest").msgclass
180
+ ListTransferLogsResponse = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.ListTransferLogsResponse").msgclass
181
+ CheckValidCredsRequest = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.CheckValidCredsRequest").msgclass
182
+ CheckValidCredsResponse = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.CheckValidCredsResponse").msgclass
183
+ ScheduleTransferRunsRequest = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.ScheduleTransferRunsRequest").msgclass
184
+ ScheduleTransferRunsResponse = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.ScheduleTransferRunsResponse").msgclass
185
+ end
186
+ end
187
+ end
188
+ end
189
+ end