google-cloud-bigquery-data_transfer 0.2.5 → 0.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -41,7 +41,7 @@ module Google
41
41
  # Is parameter required.
42
42
  # @!attribute [rw] repeated
43
43
  # @return [true, false]
44
- # Can parameter have multiple values.
44
+ # Deprecated. This field has no effect.
45
45
  # @!attribute [rw] validation_regex
46
46
  # @return [String]
47
47
  # Regular expression which can be used for parameter validation.
@@ -56,7 +56,7 @@ module Google
56
56
  # For integer and double values specifies maxminum allowed value.
57
57
  # @!attribute [rw] fields
58
58
  # @return [Array<Google::Cloud::Bigquery::Datatransfer::V1::DataSourceParameter>]
59
- # When parameter is a record, describes child fields.
59
+ # Deprecated. This field has no effect.
60
60
  # @!attribute [rw] validation_description
61
61
  # @return [String]
62
62
  # Description of the requirements for this field, in case the user input does
@@ -69,8 +69,11 @@ module Google
69
69
  # Cannot be changed after initial creation.
70
70
  # @!attribute [rw] recurse
71
71
  # @return [true, false]
72
- # If set to true, schema should be taken from the parent with the same
73
- # parameter_id. Only applicable when parameter type is RECORD.
72
+ # Deprecated. This field has no effect.
73
+ # @!attribute [rw] deprecated
74
+ # @return [true, false]
75
+ # If true, it should not be used in new transfers, and it should not be
76
+ # visible to users.
74
77
  class DataSourceParameter
75
78
  # Parameter type.
76
79
  module Type
@@ -90,7 +93,7 @@ module Google
90
93
  # Boolean parameter.
91
94
  BOOLEAN = 4
92
95
 
93
- # Record parameter.
96
+ # Deprecated. This field has no effect.
94
97
  RECORD = 5
95
98
 
96
99
  # Page ID for a Google+ Page.
@@ -115,24 +118,21 @@ module Google
115
118
  # @!attribute [rw] client_id
116
119
  # @return [String]
117
120
  # Data source client id which should be used to receive refresh token.
118
- # When not supplied, no offline credentials are populated for data transfer.
119
121
  # @!attribute [rw] scopes
120
122
  # @return [Array<String>]
121
- # Api auth scopes for which refresh token needs to be obtained. Only valid
122
- # when `client_id` is specified. Ignored otherwise. These are scopes needed
123
- # by a data source to prepare data and ingest them into BigQuery,
124
- # e.g., https://www.googleapis.com/auth/bigquery
123
+ # Api auth scopes for which refresh token needs to be obtained. These are
124
+ # scopes needed by a data source to prepare data and ingest them into
125
+ # BigQuery, e.g., https://www.googleapis.com/auth/bigquery
125
126
  # @!attribute [rw] transfer_type
126
127
  # @return [Google::Cloud::Bigquery::Datatransfer::V1::TransferType]
127
128
  # Deprecated. This field has no effect.
128
129
  # @!attribute [rw] supports_multiple_transfers
129
130
  # @return [true, false]
130
- # Indicates whether the data source supports multiple transfers
131
- # to different BigQuery targets.
131
+ # Deprecated. This field has no effect.
132
132
  # @!attribute [rw] update_deadline_seconds
133
133
  # @return [Integer]
134
134
  # The number of seconds to wait for an update from the data source
135
- # before BigQuery marks the transfer as failed.
135
+ # before the Data Transfer Service marks the transfer as FAILED.
136
136
  # @!attribute [rw] default_schedule
137
137
  # @return [String]
138
138
  # Default data transfer schedule.
@@ -248,7 +248,7 @@ module Google
248
248
  # @!attribute [rw] parent
249
249
  # @return [String]
250
250
  # The BigQuery project id where the transfer configuration should be created.
251
- # Must be in the format /projects/\\{project_id}/locations/\\{location_id}
251
+ # Must be in the format projects/\\{project_id}/locations/\\{location_id}
252
252
  # If specified location and location of the destination bigquery dataset
253
253
  # do not match - the request will fail.
254
254
  # @!attribute [rw] transfer_config
@@ -272,6 +272,14 @@ module Google
272
272
  # urn:ietf:wg:oauth:2.0:oob means that authorization code should be
273
273
  # returned in the title bar of the browser, with the page text prompting
274
274
  # the user to copy the code and paste it in the application.
275
+ # @!attribute [rw] version_info
276
+ # @return [String]
277
+ # Optional version info. If users want to find a very recent access token,
278
+ # that is, immediately after approving access, users have to set the
279
+ # version_info claim in the token request. To obtain the version_info, users
280
+ # must use the "none+gsession" response type. which be return a
281
+ # version_info back in the authorization response which be be put in a JWT
282
+ # claim in the token request.
275
283
  class CreateTransferConfigRequest; end
276
284
 
277
285
  # A request to update a transfer configuration. To update the user id of the
@@ -300,6 +308,14 @@ module Google
300
308
  # @!attribute [rw] update_mask
301
309
  # @return [Google::Protobuf::FieldMask]
302
310
  # Required list of fields to be updated in this request.
311
+ # @!attribute [rw] version_info
312
+ # @return [String]
313
+ # Optional version info. If users want to find a very recent access token,
314
+ # that is, immediately after approving access, users have to set the
315
+ # version_info claim in the token request. To obtain the version_info, users
316
+ # must use the "none+gsession" response type. which be return a
317
+ # version_info back in the authorization response which be be put in a JWT
318
+ # claim in the token request.
303
319
  class UpdateTransferConfigRequest; end
304
320
 
305
321
  # A request to get data transfer information.
@@ -481,6 +497,42 @@ module Google
481
497
  # @return [Array<Google::Cloud::Bigquery::Datatransfer::V1::TransferRun>]
482
498
  # The transfer runs that were scheduled.
483
499
  class ScheduleTransferRunsResponse; end
500
+
501
+ # A request to start manual transfer runs.
502
+ # @!attribute [rw] parent
503
+ # @return [String]
504
+ # Transfer configuration name in the form:
505
+ # `projects/{project_id}/transferConfigs/{config_id}`.
506
+ # @!attribute [rw] requested_time_range
507
+ # @return [Google::Cloud::Bigquery::Datatransfer::V1::StartManualTransferRunsRequest::TimeRange]
508
+ # Time range for the transfer runs that should be started.
509
+ # @!attribute [rw] requested_run_time
510
+ # @return [Google::Protobuf::Timestamp]
511
+ # Specific run_time for a transfer run to be started. The
512
+ # requested_run_time must not be in the future.
513
+ class StartManualTransferRunsRequest
514
+ # A specification for a time range, this will request transfer runs with
515
+ # run_time between start_time (inclusive) and end_time (exclusive).
516
+ # @!attribute [rw] start_time
517
+ # @return [Google::Protobuf::Timestamp]
518
+ # Start time of the range of transfer runs. For example,
519
+ # `"2017-05-25T00:00:00+00:00"`. The start_time must be strictly less than
520
+ # the end_time. Creates transfer runs where run_time is in the range betwen
521
+ # start_time (inclusive) and end_time (exlusive).
522
+ # @!attribute [rw] end_time
523
+ # @return [Google::Protobuf::Timestamp]
524
+ # End time of the range of transfer runs. For example,
525
+ # `"2017-05-30T00:00:00+00:00"`. The end_time must not be in the future.
526
+ # Creates transfer runs where run_time is in the range betwen start_time
527
+ # (inclusive) and end_time (exlusive).
528
+ class TimeRange; end
529
+ end
530
+
531
+ # A response to start manual transfer runs.
532
+ # @!attribute [rw] runs
533
+ # @return [Array<Google::Cloud::Bigquery::Datatransfer::V1::TransferRun>]
534
+ # The transfer runs that were created.
535
+ class StartManualTransferRunsResponse; end
484
536
  end
485
537
  end
486
538
  end
@@ -18,6 +18,28 @@ module Google
18
18
  module Bigquery
19
19
  module Datatransfer
20
20
  module V1
21
+ # Options customizing the data transfer schedule.
22
+ # @!attribute [rw] disable_auto_scheduling
23
+ # @return [true, false]
24
+ # If true, automatic scheduling of data transfer runs for this configuration
25
+ # will be disabled. The runs can be started on ad-hoc basis using
26
+ # StartManualTransferRuns API. When automatic scheduling is disabled, the
27
+ # TransferConfig.schedule field will be ignored.
28
+ # @!attribute [rw] start_time
29
+ # @return [Google::Protobuf::Timestamp]
30
+ # Specifies time to start scheduling transfer runs. The first run will be
31
+ # scheduled at or after the start time according to a recurrence pattern
32
+ # defined in the schedule string. The start time can be changed at any
33
+ # moment. The time when a data transfer can be trigerred manually is not
34
+ # limited by this option.
35
+ # @!attribute [rw] end_time
36
+ # @return [Google::Protobuf::Timestamp]
37
+ # Defines time to stop scheduling transfer runs. A transfer run cannot be
38
+ # scheduled at or after the end time. The end time can be changed at any
39
+ # moment. The time when a data transfer can be trigerred manually is not
40
+ # limited by this option.
41
+ class ScheduleOptions; end
42
+
21
43
  # Represents a data transfer configuration. A transfer configuration
22
44
  # contains all metadata needed to perform a data transfer. For example,
23
45
  # `destination_dataset_id` specifies where data should be stored.
@@ -27,11 +49,12 @@ module Google
27
49
  # @!attribute [rw] name
28
50
  # @return [String]
29
51
  # The resource name of the transfer config.
30
- # Transfer config names have the form
31
- # `projects/{project_id}/transferConfigs/{config_id}`.
32
- # Where `config_id` is usually a uuid, even though it is not
33
- # guaranteed or required. The name is ignored when creating a transfer
34
- # config.
52
+ # Transfer config names have the form of
53
+ # `projects/{project_id}/locations/{region}/transferConfigs/{config_id}`.
54
+ # The name is automatically generated based on the config_id specified in
55
+ # CreateTransferConfigRequest along with project_id and region. If config_id
56
+ # is not provided, usually a uuid, even though it is not guaranteed or
57
+ # required, will be generated for config_id.
35
58
  # @!attribute [rw] destination_dataset_id
36
59
  # @return [String]
37
60
  # The BigQuery target dataset id.
@@ -58,6 +81,9 @@ module Google
58
81
  # See more explanation about the format here:
59
82
  # https://cloud.google.com/appengine/docs/flexible/python/scheduling-jobs-with-cron-yaml#the_schedule_format
60
83
  # NOTE: the granularity should be at least 8 hours, or less frequent.
84
+ # @!attribute [rw] schedule_options
85
+ # @return [Google::Cloud::Bigquery::Datatransfer::V1::ScheduleOptions]
86
+ # Options customizing the data transfer schedule.
61
87
  # @!attribute [rw] data_refresh_window_days
62
88
  # @return [Integer]
63
89
  # The number of days to look back to automatically refresh the data.
@@ -81,11 +107,7 @@ module Google
81
107
  # Output only. State of the most recently updated transfer run.
82
108
  # @!attribute [rw] user_id
83
109
  # @return [Integer]
84
- # Output only. Unique ID of the user on whose behalf transfer is done.
85
- # Applicable only to data sources that do not support service accounts.
86
- # When set to 0, the data source service account credentials are used.
87
- # May be negative. Note, that this identifier is not stable.
88
- # It may change over time even for the same user.
110
+ # Deprecated. Unique ID of the user on whose behalf transfer is done.
89
111
  # @!attribute [rw] dataset_region
90
112
  # @return [String]
91
113
  # Output only. Region in which BigQuery dataset is located.
@@ -103,8 +125,8 @@ module Google
103
125
  # Minimum time after which a transfer run can be started.
104
126
  # @!attribute [rw] run_time
105
127
  # @return [Google::Protobuf::Timestamp]
106
- # For batch transfer runs, specifies the date and time that
107
- # data should be ingested.
128
+ # For batch transfer runs, specifies the date and time of the data should be
129
+ # ingested.
108
130
  # @!attribute [rw] error_status
109
131
  # @return [Google::Rpc::Status]
110
132
  # Status of the transfer run.
@@ -133,18 +155,14 @@ module Google
133
155
  # Data transfer run state. Ignored for input requests.
134
156
  # @!attribute [rw] user_id
135
157
  # @return [Integer]
136
- # Output only. Unique ID of the user on whose behalf transfer is done.
137
- # Applicable only to data sources that do not support service accounts.
138
- # When set to 0, the data source service account credentials are used.
139
- # May be negative. Note, that this identifier is not stable.
140
- # It may change over time even for the same user.
158
+ # Deprecated. Unique ID of the user on whose behalf transfer is done.
141
159
  # @!attribute [rw] schedule
142
160
  # @return [String]
143
161
  # Output only. Describes the schedule of this transfer run if it was
144
162
  # created as part of a regular schedule. For batch transfer runs that are
145
163
  # scheduled manually, this is empty.
146
164
  # NOTE: the system might choose to delay the schedule depending on the
147
- # current load, so `schedule_time` doesn't always matches this.
165
+ # current load, so `schedule_time` doesn't always match this.
148
166
  class TransferRun; end
149
167
 
150
168
  # Represents a user facing message for a particular data transfer run.
@@ -186,7 +204,7 @@ module Google
186
204
  # Data transfer is in progress.
187
205
  RUNNING = 3
188
206
 
189
- # Data transfer completed successsfully.
207
+ # Data transfer completed successfully.
190
208
  SUCCEEDED = 4
191
209
 
192
210
  # Data transfer failed.
@@ -1,4 +1,4 @@
1
- # Copyright 2018 Google LLC
1
+ # Copyright 2019 Google LLC
2
2
  #
3
3
  # Licensed under the Apache License, Version 2.0 (the "License");
4
4
  # you may not use this file except in compliance with the License.
@@ -15,76 +15,12 @@
15
15
 
16
16
  module Google
17
17
  module Protobuf
18
- # Wrapper message for +double+.
18
+ # Wrapper message for `double`.
19
19
  #
20
- # The JSON representation for +DoubleValue+ is JSON number.
20
+ # The JSON representation for `DoubleValue` is JSON number.
21
21
  # @!attribute [rw] value
22
22
  # @return [Float]
23
23
  # The double value.
24
24
  class DoubleValue; end
25
-
26
- # Wrapper message for +float+.
27
- #
28
- # The JSON representation for +FloatValue+ is JSON number.
29
- # @!attribute [rw] value
30
- # @return [Float]
31
- # The float value.
32
- class FloatValue; end
33
-
34
- # Wrapper message for +int64+.
35
- #
36
- # The JSON representation for +Int64Value+ is JSON string.
37
- # @!attribute [rw] value
38
- # @return [Integer]
39
- # The int64 value.
40
- class Int64Value; end
41
-
42
- # Wrapper message for +uint64+.
43
- #
44
- # The JSON representation for +UInt64Value+ is JSON string.
45
- # @!attribute [rw] value
46
- # @return [Integer]
47
- # The uint64 value.
48
- class UInt64Value; end
49
-
50
- # Wrapper message for +int32+.
51
- #
52
- # The JSON representation for +Int32Value+ is JSON number.
53
- # @!attribute [rw] value
54
- # @return [Integer]
55
- # The int32 value.
56
- class Int32Value; end
57
-
58
- # Wrapper message for +uint32+.
59
- #
60
- # The JSON representation for +UInt32Value+ is JSON number.
61
- # @!attribute [rw] value
62
- # @return [Integer]
63
- # The uint32 value.
64
- class UInt32Value; end
65
-
66
- # Wrapper message for +bool+.
67
- #
68
- # The JSON representation for +BoolValue+ is JSON +true+ and +false+.
69
- # @!attribute [rw] value
70
- # @return [true, false]
71
- # The bool value.
72
- class BoolValue; end
73
-
74
- # Wrapper message for +string+.
75
- #
76
- # The JSON representation for +StringValue+ is JSON string.
77
- # @!attribute [rw] value
78
- # @return [String]
79
- # The string value.
80
- class StringValue; end
81
-
82
- # Wrapper message for +bytes+.
83
- #
84
- # The JSON representation for +BytesValue+ is JSON string.
85
- # @!attribute [rw] value
86
- # @return [String]
87
- # The bytes value.
88
- class BytesValue; end
89
25
  end
90
26
  end
@@ -17,7 +17,7 @@ module Google
17
17
  module Cloud
18
18
  module Bigquery
19
19
  module DataTransfer
20
- VERSION = "0.2.5".freeze
20
+ VERSION = "0.3.0".freeze
21
21
  end
22
22
  end
23
23
  end
@@ -0,0 +1,170 @@
1
+ # Generated by the protocol buffer compiler. DO NOT EDIT!
2
+ # source: google/cloud/bigquery/datatransfer/v1/datasource.proto
3
+
4
+
5
+ require 'google/protobuf'
6
+
7
+ require 'google/api/annotations_pb'
8
+ require 'google/cloud/bigquery/datatransfer/v1/datatransfer_pb'
9
+ require 'google/cloud/bigquery/datatransfer/v1/transfer_pb'
10
+ require 'google/protobuf/duration_pb'
11
+ require 'google/protobuf/empty_pb'
12
+ require 'google/protobuf/field_mask_pb'
13
+ require 'google/protobuf/timestamp_pb'
14
+ require 'google/protobuf/wrappers_pb'
15
+ require 'google/api/client_pb'
16
+ Google::Protobuf::DescriptorPool.generated_pool.build do
17
+ add_message "google.cloud.bigquery.datatransfer.v1.ImportedDataInfo" do
18
+ optional :sql, :string, 1
19
+ optional :destination_table_id, :string, 2
20
+ optional :destination_table_description, :string, 10
21
+ repeated :table_defs, :message, 3, "google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.TableDefinition"
22
+ repeated :user_defined_functions, :string, 4
23
+ optional :write_disposition, :enum, 6, "google.cloud.bigquery.datatransfer.v1.WriteDisposition"
24
+ end
25
+ add_message "google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.FieldSchema" do
26
+ optional :field_name, :string, 1
27
+ optional :type, :enum, 2, "google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.FieldSchema.Type"
28
+ optional :is_repeated, :bool, 3
29
+ optional :description, :string, 4
30
+ optional :schema, :message, 5, "google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.RecordSchema"
31
+ end
32
+ add_enum "google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.FieldSchema.Type" do
33
+ value :TYPE_UNSPECIFIED, 0
34
+ value :STRING, 1
35
+ value :INTEGER, 2
36
+ value :FLOAT, 3
37
+ value :RECORD, 4
38
+ value :BYTES, 5
39
+ value :BOOLEAN, 6
40
+ value :TIMESTAMP, 7
41
+ value :DATE, 8
42
+ value :TIME, 9
43
+ value :DATETIME, 10
44
+ value :NUMERIC, 11
45
+ value :GEOGRAPHY, 12
46
+ end
47
+ add_message "google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.RecordSchema" do
48
+ repeated :fields, :message, 1, "google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.FieldSchema"
49
+ end
50
+ add_message "google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.TableDefinition" do
51
+ optional :table_id, :string, 1
52
+ repeated :source_uris, :string, 2
53
+ optional :format, :enum, 3, "google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.Format"
54
+ optional :max_bad_records, :int32, 4
55
+ optional :encoding, :enum, 5, "google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.Encoding"
56
+ optional :csv_options, :message, 6, "google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.TableDefinition.CsvOptions"
57
+ optional :schema, :message, 7, "google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.RecordSchema"
58
+ optional :ignore_unknown_values, :message, 10, "google.protobuf.BoolValue"
59
+ end
60
+ add_message "google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.TableDefinition.CsvOptions" do
61
+ optional :field_delimiter, :message, 1, "google.protobuf.StringValue"
62
+ optional :allow_quoted_newlines, :message, 2, "google.protobuf.BoolValue"
63
+ optional :quote_char, :message, 3, "google.protobuf.StringValue"
64
+ optional :skip_leading_rows, :message, 4, "google.protobuf.Int64Value"
65
+ optional :allow_jagged_rows, :message, 5, "google.protobuf.BoolValue"
66
+ end
67
+ add_enum "google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.Format" do
68
+ value :FORMAT_UNSPECIFIED, 0
69
+ value :CSV, 1
70
+ value :JSON, 2
71
+ value :AVRO, 3
72
+ value :RECORDIO, 4
73
+ value :COLUMNIO, 5
74
+ value :CAPACITOR, 6
75
+ value :PARQUET, 7
76
+ value :ORC, 8
77
+ end
78
+ add_enum "google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.Encoding" do
79
+ value :ENCODING_UNSPECIFIED, 0
80
+ value :ISO_8859_1, 1
81
+ value :UTF8, 2
82
+ end
83
+ add_message "google.cloud.bigquery.datatransfer.v1.UpdateTransferRunRequest" do
84
+ optional :transfer_run, :message, 1, "google.cloud.bigquery.datatransfer.v1.TransferRun"
85
+ optional :update_mask, :message, 2, "google.protobuf.FieldMask"
86
+ end
87
+ add_message "google.cloud.bigquery.datatransfer.v1.LogTransferRunMessagesRequest" do
88
+ optional :name, :string, 1
89
+ repeated :transfer_messages, :message, 2, "google.cloud.bigquery.datatransfer.v1.TransferMessage"
90
+ end
91
+ add_message "google.cloud.bigquery.datatransfer.v1.StartBigQueryJobsRequest" do
92
+ optional :name, :string, 1
93
+ repeated :imported_data, :message, 2, "google.cloud.bigquery.datatransfer.v1.ImportedDataInfo"
94
+ optional :user_credentials, :bytes, 3
95
+ optional :max_parallelism, :int32, 8
96
+ end
97
+ add_message "google.cloud.bigquery.datatransfer.v1.FinishRunRequest" do
98
+ optional :name, :string, 1
99
+ end
100
+ add_message "google.cloud.bigquery.datatransfer.v1.CreateDataSourceDefinitionRequest" do
101
+ optional :parent, :string, 1
102
+ optional :data_source_definition, :message, 2, "google.cloud.bigquery.datatransfer.v1.DataSourceDefinition"
103
+ end
104
+ add_message "google.cloud.bigquery.datatransfer.v1.UpdateDataSourceDefinitionRequest" do
105
+ optional :data_source_definition, :message, 1, "google.cloud.bigquery.datatransfer.v1.DataSourceDefinition"
106
+ optional :update_mask, :message, 2, "google.protobuf.FieldMask"
107
+ end
108
+ add_message "google.cloud.bigquery.datatransfer.v1.DeleteDataSourceDefinitionRequest" do
109
+ optional :name, :string, 1
110
+ end
111
+ add_message "google.cloud.bigquery.datatransfer.v1.GetDataSourceDefinitionRequest" do
112
+ optional :name, :string, 1
113
+ end
114
+ add_message "google.cloud.bigquery.datatransfer.v1.ListDataSourceDefinitionsRequest" do
115
+ optional :parent, :string, 1
116
+ optional :page_token, :string, 2
117
+ optional :page_size, :int32, 3
118
+ end
119
+ add_message "google.cloud.bigquery.datatransfer.v1.ListDataSourceDefinitionsResponse" do
120
+ repeated :data_source_definitions, :message, 1, "google.cloud.bigquery.datatransfer.v1.DataSourceDefinition"
121
+ optional :next_page_token, :string, 2
122
+ end
123
+ add_message "google.cloud.bigquery.datatransfer.v1.DataSourceDefinition" do
124
+ optional :name, :string, 21
125
+ optional :data_source, :message, 1, "google.cloud.bigquery.datatransfer.v1.DataSource"
126
+ optional :transfer_run_pubsub_topic, :string, 13
127
+ optional :run_time_offset, :message, 16, "google.protobuf.Duration"
128
+ optional :support_email, :string, 22
129
+ optional :service_account, :string, 2
130
+ optional :disabled, :bool, 5
131
+ optional :transfer_config_pubsub_topic, :string, 12
132
+ repeated :supported_location_ids, :string, 23
133
+ end
134
+ add_enum "google.cloud.bigquery.datatransfer.v1.WriteDisposition" do
135
+ value :WRITE_DISPOSITION_UNSPECIFIED, 0
136
+ value :WRITE_TRUNCATE, 1
137
+ value :WRITE_APPEND, 2
138
+ end
139
+ end
140
+
141
+ module Google
142
+ module Cloud
143
+ module Bigquery
144
+ module Datatransfer
145
+ module V1
146
+ ImportedDataInfo = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.ImportedDataInfo").msgclass
147
+ ImportedDataInfo::FieldSchema = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.FieldSchema").msgclass
148
+ ImportedDataInfo::FieldSchema::Type = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.FieldSchema.Type").enummodule
149
+ ImportedDataInfo::RecordSchema = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.RecordSchema").msgclass
150
+ ImportedDataInfo::TableDefinition = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.TableDefinition").msgclass
151
+ ImportedDataInfo::TableDefinition::CsvOptions = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.TableDefinition.CsvOptions").msgclass
152
+ ImportedDataInfo::Format = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.Format").enummodule
153
+ ImportedDataInfo::Encoding = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.Encoding").enummodule
154
+ UpdateTransferRunRequest = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.UpdateTransferRunRequest").msgclass
155
+ LogTransferRunMessagesRequest = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.LogTransferRunMessagesRequest").msgclass
156
+ StartBigQueryJobsRequest = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.StartBigQueryJobsRequest").msgclass
157
+ FinishRunRequest = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.FinishRunRequest").msgclass
158
+ CreateDataSourceDefinitionRequest = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.CreateDataSourceDefinitionRequest").msgclass
159
+ UpdateDataSourceDefinitionRequest = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.UpdateDataSourceDefinitionRequest").msgclass
160
+ DeleteDataSourceDefinitionRequest = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.DeleteDataSourceDefinitionRequest").msgclass
161
+ GetDataSourceDefinitionRequest = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.GetDataSourceDefinitionRequest").msgclass
162
+ ListDataSourceDefinitionsRequest = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.ListDataSourceDefinitionsRequest").msgclass
163
+ ListDataSourceDefinitionsResponse = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.ListDataSourceDefinitionsResponse").msgclass
164
+ DataSourceDefinition = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.DataSourceDefinition").msgclass
165
+ WriteDisposition = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.WriteDisposition").enummodule
166
+ end
167
+ end
168
+ end
169
+ end
170
+ end