google-cloud-bigquery-data_transfer-v1 0.4.3 → 0.5.0

Sign up to get free protection for your applications and to get access to all the features.
@@ -32,10 +32,7 @@ module Google
32
32
  module DataTransfer
33
33
  module V1
34
34
  ##
35
- # The Google BigQuery Data Transfer Service API enables BigQuery users to
36
- # configure the transfer of their data from other Google Products into
37
- # BigQuery. This service contains methods that are end user exposed. It backs
38
- # up the frontend.
35
+ # This API allows users to manage their data transfers into BigQuery.
39
36
  #
40
37
  # To load this service and instantiate a client:
41
38
  #
@@ -22,7 +22,7 @@ module Google
22
22
  module Bigquery
23
23
  module DataTransfer
24
24
  module V1
25
- VERSION = "0.4.3"
25
+ VERSION = "0.5.0"
26
26
  end
27
27
  end
28
28
  end
@@ -1,8 +1,6 @@
1
1
  # Generated by the protocol buffer compiler. DO NOT EDIT!
2
2
  # source: google/cloud/bigquery/datatransfer/v1/datatransfer.proto
3
3
 
4
- require 'google/protobuf'
5
-
6
4
  require 'google/api/annotations_pb'
7
5
  require 'google/api/client_pb'
8
6
  require 'google/api/field_behavior_pb'
@@ -13,6 +11,8 @@ require 'google/protobuf/empty_pb'
13
11
  require 'google/protobuf/field_mask_pb'
14
12
  require 'google/protobuf/timestamp_pb'
15
13
  require 'google/protobuf/wrappers_pb'
14
+ require 'google/protobuf'
15
+
16
16
  Google::Protobuf::DescriptorPool.generated_pool.build do
17
17
  add_file("google/cloud/bigquery/datatransfer/v1/datatransfer.proto", :syntax => :proto3) do
18
18
  add_message "google.cloud.bigquery.datatransfer.v1.DataSourceParameter" do
@@ -174,6 +174,10 @@ Google::Protobuf::DescriptorPool.generated_pool.build do
174
174
  add_message "google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsResponse" do
175
175
  repeated :runs, :message, 1, "google.cloud.bigquery.datatransfer.v1.TransferRun"
176
176
  end
177
+ add_message "google.cloud.bigquery.datatransfer.v1.EnrollDataSourcesRequest" do
178
+ optional :name, :string, 1
179
+ repeated :data_source_ids, :string, 2
180
+ end
177
181
  end
178
182
  end
179
183
 
@@ -210,6 +214,7 @@ module Google
210
214
  StartManualTransferRunsRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsRequest").msgclass
211
215
  StartManualTransferRunsRequest::TimeRange = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsRequest.TimeRange").msgclass
212
216
  StartManualTransferRunsResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsResponse").msgclass
217
+ EnrollDataSourcesRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.EnrollDataSourcesRequest").msgclass
213
218
  end
214
219
  end
215
220
  end
@@ -25,35 +25,31 @@ module Google
25
25
  module DataTransfer
26
26
  module V1
27
27
  module DataTransferService
28
- # The Google BigQuery Data Transfer Service API enables BigQuery users to
29
- # configure the transfer of their data from other Google Products into
30
- # BigQuery. This service contains methods that are end user exposed. It backs
31
- # up the frontend.
28
+ # This API allows users to manage their data transfers into BigQuery.
32
29
  class Service
33
30
 
34
- include GRPC::GenericService
31
+ include ::GRPC::GenericService
35
32
 
36
33
  self.marshal_class_method = :encode
37
34
  self.unmarshal_class_method = :decode
38
35
  self.service_name = 'google.cloud.bigquery.datatransfer.v1.DataTransferService'
39
36
 
40
- # Retrieves a supported data source and returns its settings,
41
- # which can be used for UI rendering.
37
+ # Retrieves a supported data source and returns its settings.
42
38
  rpc :GetDataSource, ::Google::Cloud::Bigquery::DataTransfer::V1::GetDataSourceRequest, ::Google::Cloud::Bigquery::DataTransfer::V1::DataSource
43
- # Lists supported data sources and returns their settings,
44
- # which can be used for UI rendering.
39
+ # Lists supported data sources and returns their settings.
45
40
  rpc :ListDataSources, ::Google::Cloud::Bigquery::DataTransfer::V1::ListDataSourcesRequest, ::Google::Cloud::Bigquery::DataTransfer::V1::ListDataSourcesResponse
46
41
  # Creates a new data transfer configuration.
47
42
  rpc :CreateTransferConfig, ::Google::Cloud::Bigquery::DataTransfer::V1::CreateTransferConfigRequest, ::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig
48
43
  # Updates a data transfer configuration.
49
44
  # All fields must be set, even if they are not updated.
50
45
  rpc :UpdateTransferConfig, ::Google::Cloud::Bigquery::DataTransfer::V1::UpdateTransferConfigRequest, ::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig
51
- # Deletes a data transfer configuration,
52
- # including any associated transfer runs and logs.
46
+ # Deletes a data transfer configuration, including any associated transfer
47
+ # runs and logs.
53
48
  rpc :DeleteTransferConfig, ::Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferConfigRequest, ::Google::Protobuf::Empty
54
49
  # Returns information about a data transfer config.
55
50
  rpc :GetTransferConfig, ::Google::Cloud::Bigquery::DataTransfer::V1::GetTransferConfigRequest, ::Google::Cloud::Bigquery::DataTransfer::V1::TransferConfig
56
- # Returns information about all data transfers in the project.
51
+ # Returns information about all transfer configs owned by a project in the
52
+ # specified location.
57
53
  rpc :ListTransferConfigs, ::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferConfigsRequest, ::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferConfigsResponse
58
54
  # Creates transfer runs for a time range [start_time, end_time].
59
55
  # For each date - or whatever granularity the data source supports - in the
@@ -70,17 +66,20 @@ module Google
70
66
  rpc :GetTransferRun, ::Google::Cloud::Bigquery::DataTransfer::V1::GetTransferRunRequest, ::Google::Cloud::Bigquery::DataTransfer::V1::TransferRun
71
67
  # Deletes the specified transfer run.
72
68
  rpc :DeleteTransferRun, ::Google::Cloud::Bigquery::DataTransfer::V1::DeleteTransferRunRequest, ::Google::Protobuf::Empty
73
- # Returns information about running and completed jobs.
69
+ # Returns information about running and completed transfer runs.
74
70
  rpc :ListTransferRuns, ::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferRunsRequest, ::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferRunsResponse
75
- # Returns user facing log messages for the data transfer run.
71
+ # Returns log messages for the transfer run.
76
72
  rpc :ListTransferLogs, ::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferLogsRequest, ::Google::Cloud::Bigquery::DataTransfer::V1::ListTransferLogsResponse
77
73
  # Returns true if valid credentials exist for the given data source and
78
74
  # requesting user.
79
- # Some data sources doesn't support service account, so we need to talk to
80
- # them on behalf of the end user. This API just checks whether we have OAuth
81
- # token for the particular user, which is a pre-requisite before user can
82
- # create a transfer config.
83
75
  rpc :CheckValidCreds, ::Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsRequest, ::Google::Cloud::Bigquery::DataTransfer::V1::CheckValidCredsResponse
76
+ # Enroll data sources in a user project. This allows users to create transfer
77
+ # configurations for these data sources. They will also appear in the
78
+ # ListDataSources RPC and as such, will appear in the BigQuery UI
79
+ # 'https://bigquery.cloud.google.com' (and the documents can be found at
80
+ # https://cloud.google.com/bigquery/bigquery-web-ui and
81
+ # https://cloud.google.com/bigquery/docs/working-with-transfers).
82
+ rpc :EnrollDataSources, ::Google::Cloud::Bigquery::DataTransfer::V1::EnrollDataSourcesRequest, ::Google::Protobuf::Empty
84
83
  end
85
84
 
86
85
  Stub = Service.rpc_stub_class
@@ -1,13 +1,14 @@
1
1
  # Generated by the protocol buffer compiler. DO NOT EDIT!
2
2
  # source: google/cloud/bigquery/datatransfer/v1/transfer.proto
3
3
 
4
- require 'google/protobuf'
5
-
6
4
  require 'google/api/field_behavior_pb'
7
5
  require 'google/api/resource_pb'
6
+ require 'google/protobuf/duration_pb'
8
7
  require 'google/protobuf/struct_pb'
9
8
  require 'google/protobuf/timestamp_pb'
10
9
  require 'google/rpc/status_pb'
10
+ require 'google/protobuf'
11
+
11
12
  Google::Protobuf::DescriptorPool.generated_pool.build do
12
13
  add_file("google/cloud/bigquery/datatransfer/v1/transfer.proto", :syntax => :proto3) do
13
14
  add_message "google.cloud.bigquery.datatransfer.v1.EmailPreferences" do
@@ -18,6 +19,9 @@ Google::Protobuf::DescriptorPool.generated_pool.build do
18
19
  optional :start_time, :message, 1, "google.protobuf.Timestamp"
19
20
  optional :end_time, :message, 2, "google.protobuf.Timestamp"
20
21
  end
22
+ add_message "google.cloud.bigquery.datatransfer.v1.UserInfo" do
23
+ proto3_optional :email, :string, 1
24
+ end
21
25
  add_message "google.cloud.bigquery.datatransfer.v1.TransferConfig" do
22
26
  optional :name, :string, 1
23
27
  optional :display_name, :string, 3
@@ -34,6 +38,7 @@ Google::Protobuf::DescriptorPool.generated_pool.build do
34
38
  optional :dataset_region, :string, 14
35
39
  optional :notification_pubsub_topic, :string, 15
36
40
  optional :email_preferences, :message, 18, "google.cloud.bigquery.datatransfer.v1.EmailPreferences"
41
+ proto3_optional :owner_info, :message, 27, "google.cloud.bigquery.datatransfer.v1.UserInfo"
37
42
  oneof :destination do
38
43
  optional :destination_dataset_id, :string, 2
39
44
  end
@@ -91,6 +96,7 @@ module Google
91
96
  module V1
92
97
  EmailPreferences = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.EmailPreferences").msgclass
93
98
  ScheduleOptions = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.ScheduleOptions").msgclass
99
+ UserInfo = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.UserInfo").msgclass
94
100
  TransferConfig = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.TransferConfig").msgclass
95
101
  TransferRun = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.TransferRun").msgclass
96
102
  TransferMessage = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.TransferMessage").msgclass
@@ -33,11 +33,7 @@ module Google
33
33
  # // For Kubernetes resources, the format is {api group}/{kind}.
34
34
  # option (google.api.resource) = {
35
35
  # type: "pubsub.googleapis.com/Topic"
36
- # name_descriptor: {
37
- # pattern: "projects/{project}/topics/{topic}"
38
- # parent_type: "cloudresourcemanager.googleapis.com/Project"
39
- # parent_name_extractor: "projects/{project}"
40
- # }
36
+ # pattern: "projects/{project}/topics/{topic}"
41
37
  # };
42
38
  # }
43
39
  #
@@ -45,10 +41,7 @@ module Google
45
41
  #
46
42
  # resources:
47
43
  # - type: "pubsub.googleapis.com/Topic"
48
- # name_descriptor:
49
- # - pattern: "projects/{project}/topics/{topic}"
50
- # parent_type: "cloudresourcemanager.googleapis.com/Project"
51
- # parent_name_extractor: "projects/{project}"
44
+ # pattern: "projects/{project}/topics/{topic}"
52
45
  #
53
46
  # Sometimes, resources have multiple patterns, typically because they can
54
47
  # live under multiple parents.
@@ -58,26 +51,10 @@ module Google
58
51
  # message LogEntry {
59
52
  # option (google.api.resource) = {
60
53
  # type: "logging.googleapis.com/LogEntry"
61
- # name_descriptor: {
62
- # pattern: "projects/{project}/logs/{log}"
63
- # parent_type: "cloudresourcemanager.googleapis.com/Project"
64
- # parent_name_extractor: "projects/{project}"
65
- # }
66
- # name_descriptor: {
67
- # pattern: "folders/{folder}/logs/{log}"
68
- # parent_type: "cloudresourcemanager.googleapis.com/Folder"
69
- # parent_name_extractor: "folders/{folder}"
70
- # }
71
- # name_descriptor: {
72
- # pattern: "organizations/{organization}/logs/{log}"
73
- # parent_type: "cloudresourcemanager.googleapis.com/Organization"
74
- # parent_name_extractor: "organizations/{organization}"
75
- # }
76
- # name_descriptor: {
77
- # pattern: "billingAccounts/{billing_account}/logs/{log}"
78
- # parent_type: "billing.googleapis.com/BillingAccount"
79
- # parent_name_extractor: "billingAccounts/{billing_account}"
80
- # }
54
+ # pattern: "projects/{project}/logs/{log}"
55
+ # pattern: "folders/{folder}/logs/{log}"
56
+ # pattern: "organizations/{organization}/logs/{log}"
57
+ # pattern: "billingAccounts/{billing_account}/logs/{log}"
81
58
  # };
82
59
  # }
83
60
  #
@@ -85,48 +62,10 @@ module Google
85
62
  #
86
63
  # resources:
87
64
  # - type: 'logging.googleapis.com/LogEntry'
88
- # name_descriptor:
89
- # - pattern: "projects/{project}/logs/{log}"
90
- # parent_type: "cloudresourcemanager.googleapis.com/Project"
91
- # parent_name_extractor: "projects/{project}"
92
- # - pattern: "folders/{folder}/logs/{log}"
93
- # parent_type: "cloudresourcemanager.googleapis.com/Folder"
94
- # parent_name_extractor: "folders/{folder}"
95
- # - pattern: "organizations/{organization}/logs/{log}"
96
- # parent_type: "cloudresourcemanager.googleapis.com/Organization"
97
- # parent_name_extractor: "organizations/{organization}"
98
- # - pattern: "billingAccounts/{billing_account}/logs/{log}"
99
- # parent_type: "billing.googleapis.com/BillingAccount"
100
- # parent_name_extractor: "billingAccounts/{billing_account}"
101
- #
102
- # For flexible resources, the resource name doesn't contain parent names, but
103
- # the resource itself has parents for policy evaluation.
104
- #
105
- # Example:
106
- #
107
- # message Shelf {
108
- # option (google.api.resource) = {
109
- # type: "library.googleapis.com/Shelf"
110
- # name_descriptor: {
111
- # pattern: "shelves/{shelf}"
112
- # parent_type: "cloudresourcemanager.googleapis.com/Project"
113
- # }
114
- # name_descriptor: {
115
- # pattern: "shelves/{shelf}"
116
- # parent_type: "cloudresourcemanager.googleapis.com/Folder"
117
- # }
118
- # };
119
- # }
120
- #
121
- # The ResourceDescriptor Yaml config will look like:
122
- #
123
- # resources:
124
- # - type: 'library.googleapis.com/Shelf'
125
- # name_descriptor:
126
- # - pattern: "shelves/{shelf}"
127
- # parent_type: "cloudresourcemanager.googleapis.com/Project"
128
- # - pattern: "shelves/{shelf}"
129
- # parent_type: "cloudresourcemanager.googleapis.com/Folder"
65
+ # pattern: "projects/{project}/logs/{log}"
66
+ # pattern: "folders/{folder}/logs/{log}"
67
+ # pattern: "organizations/{organization}/logs/{log}"
68
+ # pattern: "billingAccounts/{billing_account}/logs/{log}"
130
69
  # @!attribute [rw] type
131
70
  # @return [::String]
132
71
  # The resource type. It must be in the format of
@@ -22,12 +22,7 @@ module Google
22
22
  module Bigquery
23
23
  module DataTransfer
24
24
  module V1
25
- # Represents a data source parameter with validation rules, so that
26
- # parameters can be rendered in the UI. These parameters are given to us by
27
- # supported data sources, and include all needed information for rendering
28
- # and validation.
29
- # Thus, whoever uses this api can decide to generate either generic ui,
30
- # or custom data source specific forms.
25
+ # A parameter used to define custom fields in a data source definition.
31
26
  # @!attribute [rw] param_id
32
27
  # @return [::String]
33
28
  # Parameter identifier.
@@ -108,8 +103,7 @@ module Google
108
103
  end
109
104
  end
110
105
 
111
- # Represents data source metadata. Metadata is sufficient to
112
- # render UI and request proper OAuth tokens.
106
+ # Defines the properties and custom parameters for a data source.
113
107
  # @!attribute [r] name
114
108
  # @return [::String]
115
109
  # Output only. Data source resource name.
@@ -195,9 +189,7 @@ module Google
195
189
  # exchanged for a refresh token on the backend.
196
190
  GOOGLE_PLUS_AUTHORIZATION_CODE = 2
197
191
 
198
- # Use First Party Client OAuth. First Party Client OAuth doesn't require a
199
- # refresh token to get an offline access token. Instead, it uses a
200
- # client-signed JWT assertion to retrieve an access token.
192
+ # Use First Party OAuth.
201
193
  FIRST_PARTY_OAUTH = 3
202
194
  end
203
195
 
@@ -268,9 +260,9 @@ module Google
268
260
  # A request to create a data transfer configuration. If new credentials are
269
261
  # needed for this transfer configuration, an authorization code must be
270
262
  # provided. If an authorization code is provided, the transfer configuration
271
- # will be associated with the user id corresponding to the
272
- # authorization code. Otherwise, the transfer configuration will be associated
273
- # with the calling user.
263
+ # will be associated with the user id corresponding to the authorization code.
264
+ # Otherwise, the transfer configuration will be associated with the calling
265
+ # user.
274
266
  # @!attribute [rw] parent
275
267
  # @return [::String]
276
268
  # Required. The BigQuery project id where the transfer configuration should be created.
@@ -447,9 +439,7 @@ module Google
447
439
  extend ::Google::Protobuf::MessageExts::ClassMethods
448
440
  end
449
441
 
450
- # A request to list data transfer runs. UI can use this method to show/filter
451
- # specific data transfer runs. The data source can use this method to request
452
- # all scheduled transfer runs.
442
+ # A request to list data transfer runs.
453
443
  # @!attribute [rw] parent
454
444
  # @return [::String]
455
445
  # Required. Name of transfer configuration for which transfer runs should be retrieved.
@@ -617,14 +607,14 @@ module Google
617
607
  # @return [::Google::Protobuf::Timestamp]
618
608
  # Start time of the range of transfer runs. For example,
619
609
  # `"2017-05-25T00:00:00+00:00"`. The start_time must be strictly less than
620
- # the end_time. Creates transfer runs where run_time is in the range betwen
621
- # start_time (inclusive) and end_time (exlusive).
610
+ # the end_time. Creates transfer runs where run_time is in the range
611
+ # between start_time (inclusive) and end_time (exclusive).
622
612
  # @!attribute [rw] end_time
623
613
  # @return [::Google::Protobuf::Timestamp]
624
614
  # End time of the range of transfer runs. For example,
625
615
  # `"2017-05-30T00:00:00+00:00"`. The end_time must not be in the future.
626
- # Creates transfer runs where run_time is in the range betwen start_time
627
- # (inclusive) and end_time (exlusive).
616
+ # Creates transfer runs where run_time is in the range between start_time
617
+ # (inclusive) and end_time (exclusive).
628
618
  class TimeRange
629
619
  include ::Google::Protobuf::MessageExts
630
620
  extend ::Google::Protobuf::MessageExts::ClassMethods
@@ -639,6 +629,21 @@ module Google
639
629
  include ::Google::Protobuf::MessageExts
640
630
  extend ::Google::Protobuf::MessageExts::ClassMethods
641
631
  end
632
+
633
+ # A request to enroll a set of data sources so they are visible in the
634
+ # BigQuery UI's `Transfer` tab.
635
+ # @!attribute [rw] name
636
+ # @return [::String]
637
+ # The name of the project resource in the form:
638
+ # `projects/{project_id}`
639
+ # @!attribute [rw] data_source_ids
640
+ # @return [::Array<::String>]
641
+ # Data sources that are enrolled. It is required to provide at least one
642
+ # data source id.
643
+ class EnrollDataSourcesRequest
644
+ include ::Google::Protobuf::MessageExts
645
+ extend ::Google::Protobuf::MessageExts::ClassMethods
646
+ end
642
647
  end
643
648
  end
644
649
  end
@@ -57,6 +57,15 @@ module Google
57
57
  extend ::Google::Protobuf::MessageExts::ClassMethods
58
58
  end
59
59
 
60
+ # Information about a user.
61
+ # @!attribute [rw] email
62
+ # @return [::String]
63
+ # E-mail address of the user.
64
+ class UserInfo
65
+ include ::Google::Protobuf::MessageExts
66
+ extend ::Google::Protobuf::MessageExts::ClassMethods
67
+ end
68
+
60
69
  # Represents a data transfer configuration. A transfer configuration
61
70
  # contains all metadata needed to perform a data transfer. For example,
62
71
  # `destination_dataset_id` specifies where data should be stored.
@@ -66,12 +75,11 @@ module Google
66
75
  # @!attribute [rw] name
67
76
  # @return [::String]
68
77
  # The resource name of the transfer config.
69
- # Transfer config names have the form of
78
+ # Transfer config names have the form
70
79
  # `projects/{project_id}/locations/{region}/transferConfigs/{config_id}`.
71
- # The name is automatically generated based on the config_id specified in
72
- # CreateTransferConfigRequest along with project_id and region. If config_id
73
- # is not provided, usually a uuid, even though it is not guaranteed or
74
- # required, will be generated for config_id.
80
+ # Where `config_id` is usually a uuid, even though it is not
81
+ # guaranteed or required. The name is ignored when creating a transfer
82
+ # config.
75
83
  # @!attribute [rw] destination_dataset_id
76
84
  # @return [::String]
77
85
  # The BigQuery target dataset id.
@@ -83,7 +91,10 @@ module Google
83
91
  # Data source id. Cannot be changed once data transfer is created.
84
92
  # @!attribute [rw] params
85
93
  # @return [::Google::Protobuf::Struct]
86
- # Data transfer specific parameters.
94
+ # Parameters specific to each data source. For more information see the
95
+ # bq tab in the 'Setting up a data transfer' section for each data source.
96
+ # For example the parameters for Cloud Storage transfers are listed here:
97
+ # https://cloud.google.com/bigquery-transfer/docs/cloud-storage-transfer#bq
87
98
  # @!attribute [rw] schedule
88
99
  # @return [::String]
89
100
  # Data transfer schedule.
@@ -97,7 +108,9 @@ module Google
97
108
  # `first sunday of quarter 00:00`.
98
109
  # See more explanation about the format here:
99
110
  # https://cloud.google.com/appengine/docs/flexible/python/scheduling-jobs-with-cron-yaml#the_schedule_format
100
- # NOTE: the granularity should be at least 8 hours, or less frequent.
111
+ #
112
+ # NOTE: The minimum interval time between recurring transfers depends on the
113
+ # data source; refer to the documentation for your data source.
101
114
  # @!attribute [rw] schedule_options
102
115
  # @return [::Google::Cloud::Bigquery::DataTransfer::V1::ScheduleOptions]
103
116
  # Options customizing the data transfer schedule.
@@ -132,10 +145,18 @@ module Google
132
145
  # @return [::String]
133
146
  # Pub/Sub topic where notifications will be sent after transfer runs
134
147
  # associated with this transfer config finish.
148
+ #
149
+ # The format for specifying a pubsub topic is:
150
+ # `projects/{project}/topics/{topic}`
135
151
  # @!attribute [rw] email_preferences
136
152
  # @return [::Google::Cloud::Bigquery::DataTransfer::V1::EmailPreferences]
137
153
  # Email notifications will be sent according to these preferences
138
154
  # to the email address of the user who owns this transfer config.
155
+ # @!attribute [r] owner_info
156
+ # @return [::Google::Cloud::Bigquery::DataTransfer::V1::UserInfo]
157
+ # Output only. Information about the user whose credentials are used to transfer data.
158
+ # Populated only for `transferConfigs.get` requests. In case the user
159
+ # information is not available, this field will not be populated.
139
160
  class TransferConfig
140
161
  include ::Google::Protobuf::MessageExts
141
162
  extend ::Google::Protobuf::MessageExts::ClassMethods
@@ -171,7 +192,10 @@ module Google
171
192
  # Output only. Last time the data transfer run state was updated.
172
193
  # @!attribute [r] params
173
194
  # @return [::Google::Protobuf::Struct]
174
- # Output only. Data transfer specific parameters.
195
+ # Output only. Parameters specific to each data source. For more information see the
196
+ # bq tab in the 'Setting up a data transfer' section for each data source.
197
+ # For example the parameters for Cloud Storage transfers are listed here:
198
+ # https://cloud.google.com/bigquery-transfer/docs/cloud-storage-transfer#bq
175
199
  # @!attribute [r] destination_dataset_id
176
200
  # @return [::String]
177
201
  # Output only. The BigQuery target dataset id.
@@ -194,7 +218,10 @@ module Google
194
218
  # @!attribute [r] notification_pubsub_topic
195
219
  # @return [::String]
196
220
  # Output only. Pub/Sub topic where a notification will be sent after this
197
- # transfer run finishes
221
+ # transfer run finishes.
222
+ #
223
+ # The format for specifying a pubsub topic is:
224
+ # `projects/{project}/topics/{topic}`
198
225
  # @!attribute [r] email_preferences
199
226
  # @return [::Google::Cloud::Bigquery::DataTransfer::V1::EmailPreferences]
200
227
  # Output only. Email notifications will be sent according to these
@@ -250,23 +277,23 @@ module Google
250
277
 
251
278
  # Represents data transfer run state.
252
279
  module TransferState
253
- # State placeholder.
280
+ # State placeholder (0).
254
281
  TRANSFER_STATE_UNSPECIFIED = 0
255
282
 
256
283
  # Data transfer is scheduled and is waiting to be picked up by
257
- # data transfer backend.
284
+ # data transfer backend (2).
258
285
  PENDING = 2
259
286
 
260
- # Data transfer is in progress.
287
+ # Data transfer is in progress (3).
261
288
  RUNNING = 3
262
289
 
263
- # Data transfer completed successfully.
290
+ # Data transfer completed successfully (4).
264
291
  SUCCEEDED = 4
265
292
 
266
- # Data transfer failed.
293
+ # Data transfer failed (5).
267
294
  FAILED = 5
268
295
 
269
- # Data transfer is cancelled.
296
+ # Data transfer is cancelled (6).
270
297
  CANCELLED = 6
271
298
  end
272
299
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: google-cloud-bigquery-data_transfer-v1
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.4.3
4
+ version: 0.5.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Google LLC
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2021-08-11 00:00:00.000000000 Z
11
+ date: 2022-01-20 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: gapic-common
@@ -212,7 +212,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
212
212
  - !ruby/object:Gem::Version
213
213
  version: '0'
214
214
  requirements: []
215
- rubygems_version: 3.2.17
215
+ rubygems_version: 3.3.5
216
216
  signing_key:
217
217
  specification_version: 4
218
218
  summary: API Client library for the BigQuery Data Transfer Service V1 API