google-cloud-bigquery-data_transfer 0.2.5 → 0.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/README.md +4 -4
- data/lib/google/cloud/bigquery/data_transfer.rb +7 -3
- data/lib/google/cloud/bigquery/data_transfer/v1.rb +11 -3
- data/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service_client.rb +160 -5
- data/lib/google/cloud/bigquery/data_transfer/v1/data_transfer_service_client_config.json +5 -0
- data/lib/google/cloud/bigquery/data_transfer/v1/doc/google/cloud/bigquery/data_transfer/v1/data_transfer.rb +1 -2
- data/lib/google/cloud/bigquery/data_transfer/v1/doc/google/cloud/bigquery/datatransfer/v1/datasource.rb +25 -0
- data/lib/google/cloud/bigquery/data_transfer/v1/doc/google/cloud/bigquery/datatransfer/v1/datatransfer.rb +66 -14
- data/lib/google/cloud/bigquery/data_transfer/v1/doc/google/cloud/bigquery/datatransfer/v1/transfer.rb +37 -19
- data/lib/google/cloud/bigquery/data_transfer/v1/doc/google/protobuf/wrappers.rb +3 -67
- data/lib/google/cloud/bigquery/data_transfer/version.rb +1 -1
- data/lib/google/cloud/bigquery/datatransfer/v1/datasource_pb.rb +170 -0
- data/lib/google/cloud/bigquery/datatransfer/v1/datasource_services_pb.rb +103 -0
- data/lib/google/cloud/bigquery/datatransfer/v1/datatransfer_pb.rb +21 -0
- data/lib/google/cloud/bigquery/datatransfer/v1/datatransfer_services_pb.rb +8 -1
- data/lib/google/cloud/bigquery/datatransfer/v1/transfer_pb.rb +7 -0
- metadata +8 -5
@@ -0,0 +1,103 @@
|
|
1
|
+
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
2
|
+
# Source: google/cloud/bigquery/datatransfer/v1/datasource.proto for package 'google.cloud.bigquery.datatransfer.v1'
|
3
|
+
# Original file comments:
|
4
|
+
# Copyright 2019 Google LLC.
|
5
|
+
#
|
6
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
7
|
+
# you may not use this file except in compliance with the License.
|
8
|
+
# You may obtain a copy of the License at
|
9
|
+
#
|
10
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
11
|
+
#
|
12
|
+
# Unless required by applicable law or agreed to in writing, software
|
13
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
14
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
15
|
+
# See the License for the specific language governing permissions and
|
16
|
+
# limitations under the License.
|
17
|
+
#
|
18
|
+
#
|
19
|
+
|
20
|
+
|
21
|
+
require 'grpc'
|
22
|
+
require 'google/cloud/bigquery/datatransfer/v1/datasource_pb'
|
23
|
+
|
24
|
+
module Google
|
25
|
+
module Cloud
|
26
|
+
module Bigquery
|
27
|
+
module Datatransfer
|
28
|
+
module V1
|
29
|
+
module DataSourceService
|
30
|
+
# The Google BigQuery Data Transfer API allows BigQuery users to
|
31
|
+
# configure transfer of their data from other Google Products into BigQuery.
|
32
|
+
# This service exposes methods that should be used by data source backend.
|
33
|
+
class Service
|
34
|
+
|
35
|
+
include GRPC::GenericService
|
36
|
+
|
37
|
+
self.marshal_class_method = :encode
|
38
|
+
self.unmarshal_class_method = :decode
|
39
|
+
self.service_name = 'google.cloud.bigquery.datatransfer.v1.DataSourceService'
|
40
|
+
|
41
|
+
# Update a transfer run. If successful, resets
|
42
|
+
# data_source.update_deadline_seconds timer.
|
43
|
+
rpc :UpdateTransferRun, UpdateTransferRunRequest, TransferRun
|
44
|
+
# Log messages for a transfer run. If successful (at least 1 message), resets
|
45
|
+
# data_source.update_deadline_seconds timer.
|
46
|
+
rpc :LogTransferRunMessages, LogTransferRunMessagesRequest, Google::Protobuf::Empty
|
47
|
+
# Notify the Data Transfer Service that data is ready for loading.
|
48
|
+
# The Data Transfer Service will start and monitor multiple BigQuery Load
|
49
|
+
# jobs for a transfer run. Monitored jobs will be automatically retried
|
50
|
+
# and produce log messages when starting and finishing a job.
|
51
|
+
# Can be called multiple times for the same transfer run.
|
52
|
+
rpc :StartBigQueryJobs, StartBigQueryJobsRequest, Google::Protobuf::Empty
|
53
|
+
# Notify the Data Transfer Service that the data source is done processing
|
54
|
+
# the run. No more status updates or requests to start/monitor jobs will be
|
55
|
+
# accepted. The run will be finalized by the Data Transfer Service when all
|
56
|
+
# monitored jobs are completed.
|
57
|
+
# Does not need to be called if the run is set to FAILED.
|
58
|
+
rpc :FinishRun, FinishRunRequest, Google::Protobuf::Empty
|
59
|
+
# Creates a data source definition. Calling this method will automatically
|
60
|
+
# use your credentials to create the following Google Cloud resources in
|
61
|
+
# YOUR Google Cloud project.
|
62
|
+
# 1. OAuth client
|
63
|
+
# 2. Pub/Sub Topics and Subscriptions in each supported_location_ids. e.g.,
|
64
|
+
# projects/\\{project_id}/{topics|subscriptions}/bigquerydatatransfer.\\{data_source_id}.\\{location_id}.run
|
65
|
+
# The field data_source.client_id should be left empty in the input request,
|
66
|
+
# as the API will create a new OAuth client on behalf of the caller. On the
|
67
|
+
# other hand data_source.scopes usually need to be set when there are OAuth
|
68
|
+
# scopes that need to be granted by end users.
|
69
|
+
# 3. We need a longer deadline due to the 60 seconds SLO from Pub/Sub admin
|
70
|
+
# Operations. This also applies to update and delete data source definition.
|
71
|
+
rpc :CreateDataSourceDefinition, CreateDataSourceDefinitionRequest, DataSourceDefinition
|
72
|
+
# Updates an existing data source definition. If changing
|
73
|
+
# supported_location_ids, triggers same effects as mentioned in "Create a
|
74
|
+
# data source definition."
|
75
|
+
rpc :UpdateDataSourceDefinition, UpdateDataSourceDefinitionRequest, DataSourceDefinition
|
76
|
+
# Deletes a data source definition, all of the transfer configs associated
|
77
|
+
# with this data source definition (if any) must be deleted first by the user
|
78
|
+
# in ALL regions, in order to delete the data source definition.
|
79
|
+
# This method is primarily meant for deleting data sources created during
|
80
|
+
# testing stage.
|
81
|
+
# If the data source is referenced by transfer configs in the region
|
82
|
+
# specified in the request URL, the method will fail immediately. If in the
|
83
|
+
# current region (e.g., US) it's not used by any transfer configs, but in
|
84
|
+
# another region (e.g., EU) it is, then although the method will succeed in
|
85
|
+
# region US, but it will fail when the deletion operation is replicated to
|
86
|
+
# region EU. And eventually, the system will replicate the data source
|
87
|
+
# definition back from EU to US, in order to bring all regions to
|
88
|
+
# consistency. The final effect is that the data source appears to be
|
89
|
+
# 'undeleted' in the US region.
|
90
|
+
rpc :DeleteDataSourceDefinition, DeleteDataSourceDefinitionRequest, Google::Protobuf::Empty
|
91
|
+
# Retrieves an existing data source definition.
|
92
|
+
rpc :GetDataSourceDefinition, GetDataSourceDefinitionRequest, DataSourceDefinition
|
93
|
+
# Lists supported data source definitions.
|
94
|
+
rpc :ListDataSourceDefinitions, ListDataSourceDefinitionsRequest, ListDataSourceDefinitionsResponse
|
95
|
+
end
|
96
|
+
|
97
|
+
Stub = Service.rpc_stub_class
|
98
|
+
end
|
99
|
+
end
|
100
|
+
end
|
101
|
+
end
|
102
|
+
end
|
103
|
+
end
|
@@ -11,6 +11,7 @@ require 'google/protobuf/empty_pb'
|
|
11
11
|
require 'google/protobuf/field_mask_pb'
|
12
12
|
require 'google/protobuf/timestamp_pb'
|
13
13
|
require 'google/protobuf/wrappers_pb'
|
14
|
+
require 'google/api/client_pb'
|
14
15
|
Google::Protobuf::DescriptorPool.generated_pool.build do
|
15
16
|
add_message "google.cloud.bigquery.datatransfer.v1.DataSourceParameter" do
|
16
17
|
optional :param_id, :string, 1
|
@@ -28,6 +29,7 @@ Google::Protobuf::DescriptorPool.generated_pool.build do
|
|
28
29
|
optional :validation_help_url, :string, 13
|
29
30
|
optional :immutable, :bool, 14
|
30
31
|
optional :recurse, :bool, 15
|
32
|
+
optional :deprecated, :bool, 20
|
31
33
|
end
|
32
34
|
add_enum "google.cloud.bigquery.datatransfer.v1.DataSourceParameter.Type" do
|
33
35
|
value :TYPE_UNSPECIFIED, 0
|
@@ -84,11 +86,13 @@ Google::Protobuf::DescriptorPool.generated_pool.build do
|
|
84
86
|
optional :parent, :string, 1
|
85
87
|
optional :transfer_config, :message, 2, "google.cloud.bigquery.datatransfer.v1.TransferConfig"
|
86
88
|
optional :authorization_code, :string, 3
|
89
|
+
optional :version_info, :string, 5
|
87
90
|
end
|
88
91
|
add_message "google.cloud.bigquery.datatransfer.v1.UpdateTransferConfigRequest" do
|
89
92
|
optional :transfer_config, :message, 1, "google.cloud.bigquery.datatransfer.v1.TransferConfig"
|
90
93
|
optional :authorization_code, :string, 3
|
91
94
|
optional :update_mask, :message, 4, "google.protobuf.FieldMask"
|
95
|
+
optional :version_info, :string, 5
|
92
96
|
end
|
93
97
|
add_message "google.cloud.bigquery.datatransfer.v1.GetTransferConfigRequest" do
|
94
98
|
optional :name, :string, 1
|
@@ -151,6 +155,20 @@ Google::Protobuf::DescriptorPool.generated_pool.build do
|
|
151
155
|
add_message "google.cloud.bigquery.datatransfer.v1.ScheduleTransferRunsResponse" do
|
152
156
|
repeated :runs, :message, 1, "google.cloud.bigquery.datatransfer.v1.TransferRun"
|
153
157
|
end
|
158
|
+
add_message "google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsRequest" do
|
159
|
+
optional :parent, :string, 1
|
160
|
+
oneof :time do
|
161
|
+
optional :requested_time_range, :message, 3, "google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsRequest.TimeRange"
|
162
|
+
optional :requested_run_time, :message, 4, "google.protobuf.Timestamp"
|
163
|
+
end
|
164
|
+
end
|
165
|
+
add_message "google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsRequest.TimeRange" do
|
166
|
+
optional :start_time, :message, 1, "google.protobuf.Timestamp"
|
167
|
+
optional :end_time, :message, 2, "google.protobuf.Timestamp"
|
168
|
+
end
|
169
|
+
add_message "google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsResponse" do
|
170
|
+
repeated :runs, :message, 1, "google.cloud.bigquery.datatransfer.v1.TransferRun"
|
171
|
+
end
|
154
172
|
end
|
155
173
|
|
156
174
|
module Google
|
@@ -183,6 +201,9 @@ module Google
|
|
183
201
|
CheckValidCredsResponse = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.CheckValidCredsResponse").msgclass
|
184
202
|
ScheduleTransferRunsRequest = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.ScheduleTransferRunsRequest").msgclass
|
185
203
|
ScheduleTransferRunsResponse = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.ScheduleTransferRunsResponse").msgclass
|
204
|
+
StartManualTransferRunsRequest = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsRequest").msgclass
|
205
|
+
StartManualTransferRunsRequest::TimeRange = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsRequest.TimeRange").msgclass
|
206
|
+
StartManualTransferRunsResponse = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsResponse").msgclass
|
186
207
|
end
|
187
208
|
end
|
188
209
|
end
|
@@ -1,7 +1,7 @@
|
|
1
1
|
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
2
2
|
# Source: google/cloud/bigquery/datatransfer/v1/datatransfer.proto for package 'google.cloud.bigquery.datatransfer.v1'
|
3
3
|
# Original file comments:
|
4
|
-
# Copyright
|
4
|
+
# Copyright 2019 Google LLC.
|
5
5
|
#
|
6
6
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
7
7
|
# you may not use this file except in compliance with the License.
|
@@ -15,6 +15,7 @@
|
|
15
15
|
# See the License for the specific language governing permissions and
|
16
16
|
# limitations under the License.
|
17
17
|
#
|
18
|
+
#
|
18
19
|
|
19
20
|
|
20
21
|
require 'grpc'
|
@@ -60,7 +61,13 @@ module Google
|
|
60
61
|
# For each date - or whatever granularity the data source supports - in the
|
61
62
|
# range, one transfer run is created.
|
62
63
|
# Note that runs are created per UTC time in the time range.
|
64
|
+
# DEPRECATED: use StartManualTransferRuns instead.
|
63
65
|
rpc :ScheduleTransferRuns, ScheduleTransferRunsRequest, ScheduleTransferRunsResponse
|
66
|
+
# Start manual transfer runs to be executed now with schedule_time equal to
|
67
|
+
# current time. The transfer runs can be created for a time range where the
|
68
|
+
# run_time is between start_time (inclusive) and end_time (exclusive), or for
|
69
|
+
# a specific run_time.
|
70
|
+
rpc :StartManualTransferRuns, StartManualTransferRunsRequest, StartManualTransferRunsResponse
|
64
71
|
# Returns information about the particular transfer run.
|
65
72
|
rpc :GetTransferRun, GetTransferRunRequest, TransferRun
|
66
73
|
# Deletes the specified transfer run.
|
@@ -9,6 +9,11 @@ require 'google/protobuf/struct_pb'
|
|
9
9
|
require 'google/protobuf/timestamp_pb'
|
10
10
|
require 'google/rpc/status_pb'
|
11
11
|
Google::Protobuf::DescriptorPool.generated_pool.build do
|
12
|
+
add_message "google.cloud.bigquery.datatransfer.v1.ScheduleOptions" do
|
13
|
+
optional :disable_auto_scheduling, :bool, 3
|
14
|
+
optional :start_time, :message, 1, "google.protobuf.Timestamp"
|
15
|
+
optional :end_time, :message, 2, "google.protobuf.Timestamp"
|
16
|
+
end
|
12
17
|
add_message "google.cloud.bigquery.datatransfer.v1.TransferConfig" do
|
13
18
|
optional :name, :string, 1
|
14
19
|
optional :destination_dataset_id, :string, 2
|
@@ -16,6 +21,7 @@ Google::Protobuf::DescriptorPool.generated_pool.build do
|
|
16
21
|
optional :data_source_id, :string, 5
|
17
22
|
optional :params, :message, 9, "google.protobuf.Struct"
|
18
23
|
optional :schedule, :string, 7
|
24
|
+
optional :schedule_options, :message, 24, "google.cloud.bigquery.datatransfer.v1.ScheduleOptions"
|
19
25
|
optional :data_refresh_window_days, :int32, 12
|
20
26
|
optional :disabled, :bool, 13
|
21
27
|
optional :update_time, :message, 4, "google.protobuf.Timestamp"
|
@@ -70,6 +76,7 @@ module Google
|
|
70
76
|
module Bigquery
|
71
77
|
module Datatransfer
|
72
78
|
module V1
|
79
|
+
ScheduleOptions = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.ScheduleOptions").msgclass
|
73
80
|
TransferConfig = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.TransferConfig").msgclass
|
74
81
|
TransferRun = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.TransferRun").msgclass
|
75
82
|
TransferMessage = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.datatransfer.v1.TransferMessage").msgclass
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: google-cloud-bigquery-data_transfer
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.
|
4
|
+
version: 0.3.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Google LLC
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2019-
|
11
|
+
date: 2019-08-23 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: google-gax
|
@@ -16,14 +16,14 @@ dependencies:
|
|
16
16
|
requirements:
|
17
17
|
- - "~>"
|
18
18
|
- !ruby/object:Gem::Version
|
19
|
-
version: '1.
|
19
|
+
version: '1.7'
|
20
20
|
type: :runtime
|
21
21
|
prerelease: false
|
22
22
|
version_requirements: !ruby/object:Gem::Requirement
|
23
23
|
requirements:
|
24
24
|
- - "~>"
|
25
25
|
- !ruby/object:Gem::Version
|
26
|
-
version: '1.
|
26
|
+
version: '1.7'
|
27
27
|
- !ruby/object:Gem::Dependency
|
28
28
|
name: minitest
|
29
29
|
requirement: !ruby/object:Gem::Requirement
|
@@ -115,6 +115,7 @@ files:
|
|
115
115
|
- lib/google/cloud/bigquery/data_transfer/v1/data_transfer_services_pb.rb
|
116
116
|
- lib/google/cloud/bigquery/data_transfer/v1/doc/google/cloud/bigquery/data_transfer/v1/data_transfer.rb
|
117
117
|
- lib/google/cloud/bigquery/data_transfer/v1/doc/google/cloud/bigquery/data_transfer/v1/transfer.rb
|
118
|
+
- lib/google/cloud/bigquery/data_transfer/v1/doc/google/cloud/bigquery/datatransfer/v1/datasource.rb
|
118
119
|
- lib/google/cloud/bigquery/data_transfer/v1/doc/google/cloud/bigquery/datatransfer/v1/datatransfer.rb
|
119
120
|
- lib/google/cloud/bigquery/data_transfer/v1/doc/google/cloud/bigquery/datatransfer/v1/transfer.rb
|
120
121
|
- lib/google/cloud/bigquery/data_transfer/v1/doc/google/protobuf/any.rb
|
@@ -127,6 +128,8 @@ files:
|
|
127
128
|
- lib/google/cloud/bigquery/data_transfer/v1/doc/google/rpc/status.rb
|
128
129
|
- lib/google/cloud/bigquery/data_transfer/v1/transfer_pb.rb
|
129
130
|
- lib/google/cloud/bigquery/data_transfer/version.rb
|
131
|
+
- lib/google/cloud/bigquery/datatransfer/v1/datasource_pb.rb
|
132
|
+
- lib/google/cloud/bigquery/datatransfer/v1/datasource_services_pb.rb
|
130
133
|
- lib/google/cloud/bigquery/datatransfer/v1/datatransfer_pb.rb
|
131
134
|
- lib/google/cloud/bigquery/datatransfer/v1/datatransfer_services_pb.rb
|
132
135
|
- lib/google/cloud/bigquery/datatransfer/v1/transfer_pb.rb
|
@@ -149,7 +152,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
149
152
|
- !ruby/object:Gem::Version
|
150
153
|
version: '0'
|
151
154
|
requirements: []
|
152
|
-
rubygems_version: 3.0.
|
155
|
+
rubygems_version: 3.0.4
|
153
156
|
signing_key:
|
154
157
|
specification_version: 4
|
155
158
|
summary: API Client library for BigQuery Data Transfer API
|