google-cloud-bigquery-migration-v2 0.5.0 → 0.7.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/AUTHENTICATION.md +1 -1
- data/README.md +3 -3
- data/lib/google/cloud/bigquery/migration/v2/migration_entities_pb.rb +29 -56
- data/lib/google/cloud/bigquery/migration/v2/migration_error_details_pb.rb +25 -14
- data/lib/google/cloud/bigquery/migration/v2/migration_metrics_pb.rb +26 -24
- data/lib/google/cloud/bigquery/migration/v2/migration_service/client.rb +21 -23
- data/lib/google/cloud/bigquery/migration/v2/migration_service.rb +1 -1
- data/lib/google/cloud/bigquery/migration/v2/migration_service_pb.rb +26 -40
- data/lib/google/cloud/bigquery/migration/v2/translation_config_pb.rb +24 -101
- data/lib/google/cloud/bigquery/migration/v2/version.rb +1 -1
- data/lib/google/cloud/bigquery/migration/v2.rb +2 -2
- data/proto_docs/google/api/client.rb +381 -0
- data/proto_docs/google/api/distribution.rb +2 -0
- data/proto_docs/google/api/launch_stage.rb +3 -3
- data/proto_docs/google/api/metric.rb +10 -6
- data/proto_docs/google/cloud/bigquery/migration/v2/migration_entities.rb +6 -1
- data/proto_docs/google/cloud/bigquery/migration/v2/migration_error_details.rb +6 -6
- data/proto_docs/google/cloud/bigquery/migration/v2/migration_metrics.rb +2 -2
- data/proto_docs/google/cloud/bigquery/migration/v2/migration_service.rb +7 -7
- data/proto_docs/google/cloud/bigquery/migration/v2/translation_config.rb +6 -3
- data/proto_docs/google/protobuf/any.rb +7 -4
- data/proto_docs/google/protobuf/timestamp.rb +1 -3
- data/proto_docs/google/rpc/error_details.rb +102 -66
- metadata +10 -8
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 2ed4c0744065f557071e7316e238a71570507ec3ad2f8ca86d5bbfb966936aef
|
4
|
+
data.tar.gz: 9baab5ad81e3422236855d6a8aa3339ff59378b1ee50d126461850b39e8b1a32
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: d5c024f83f89547197f6b7aaa90247389d64e988d6c2f00117b83feb7fd1935ad40cace91d69e0f4139bdb2ef61cacb66f2cc551cfb84485c8c7b48ac53dc853
|
7
|
+
data.tar.gz: 86da2369cec346f571ffebbc33546233df423ff7ecb02b232db62398001b48e6364d96c2d5a263d74edaa2219fe658343e5a8ca071953a7bf33cff00c68a32ab
|
data/AUTHENTICATION.md
CHANGED
@@ -112,7 +112,7 @@ credentials are discovered.
|
|
112
112
|
To configure your system for this, simply:
|
113
113
|
|
114
114
|
1. [Download and install the Cloud SDK](https://cloud.google.com/sdk)
|
115
|
-
2. Authenticate using OAuth 2.0 `$ gcloud auth login`
|
115
|
+
2. Authenticate using OAuth 2.0 `$ gcloud auth application-default login`
|
116
116
|
3. Write code as if already authenticated.
|
117
117
|
|
118
118
|
**NOTE:** This is _not_ recommended for running in production. The Cloud SDK
|
data/README.md
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
# Ruby Client for the BigQuery Migration V2 API
|
2
2
|
|
3
|
-
|
3
|
+
The migration service, exposing apis for migration jobs operations, and agent management.
|
4
4
|
|
5
5
|
The BigQuery Migration Service is a comprehensive solution for migrating your data warehouse to BigQuery.
|
6
6
|
|
@@ -46,8 +46,8 @@ for general usage information.
|
|
46
46
|
## Enabling Logging
|
47
47
|
|
48
48
|
To enable logging for this library, set the logger for the underlying [gRPC](https://github.com/grpc/grpc/tree/master/src/ruby) library.
|
49
|
-
The logger that you set may be a Ruby stdlib [`Logger`](https://ruby-doc.org/
|
50
|
-
or a [`Google::Cloud::Logging::Logger`](https://
|
49
|
+
The logger that you set may be a Ruby stdlib [`Logger`](https://ruby-doc.org/current/stdlibs/logger/Logger.html) as shown below,
|
50
|
+
or a [`Google::Cloud::Logging::Logger`](https://cloud.google.com/ruby/docs/reference/google-cloud-logging/latest)
|
51
51
|
that will write logs to [Cloud Logging](https://cloud.google.com/logging/). See [grpc/logconfig.rb](https://github.com/grpc/grpc/blob/master/src/ruby/lib/grpc/logconfig.rb)
|
52
52
|
and the gRPC [spec_helper.rb](https://github.com/grpc/grpc/blob/master/src/ruby/spec/spec_helper.rb) for additional information.
|
53
53
|
|
@@ -1,3 +1,4 @@
|
|
1
|
+
# frozen_string_literal: true
|
1
2
|
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
2
3
|
# source: google/cloud/bigquery/migration/v2/migration_entities.proto
|
3
4
|
|
@@ -11,64 +12,36 @@ require 'google/cloud/bigquery/migration/v2/translation_config_pb'
|
|
11
12
|
require 'google/protobuf/timestamp_pb'
|
12
13
|
require 'google/rpc/error_details_pb'
|
13
14
|
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
end
|
41
|
-
end
|
42
|
-
add_enum "google.cloud.bigquery.migration.v2.MigrationTask.State" do
|
43
|
-
value :STATE_UNSPECIFIED, 0
|
44
|
-
value :PENDING, 1
|
45
|
-
value :ORCHESTRATING, 2
|
46
|
-
value :RUNNING, 3
|
47
|
-
value :PAUSED, 4
|
48
|
-
value :SUCCEEDED, 5
|
49
|
-
value :FAILED, 6
|
50
|
-
end
|
51
|
-
add_message "google.cloud.bigquery.migration.v2.MigrationSubtask" do
|
52
|
-
optional :name, :string, 1
|
53
|
-
optional :task_id, :string, 2
|
54
|
-
optional :type, :string, 3
|
55
|
-
optional :state, :enum, 5, "google.cloud.bigquery.migration.v2.MigrationSubtask.State"
|
56
|
-
optional :processing_error, :message, 6, "google.rpc.ErrorInfo"
|
57
|
-
repeated :resource_error_details, :message, 12, "google.cloud.bigquery.migration.v2.ResourceErrorDetail"
|
58
|
-
optional :resource_error_count, :int32, 13
|
59
|
-
optional :create_time, :message, 7, "google.protobuf.Timestamp"
|
60
|
-
optional :last_update_time, :message, 8, "google.protobuf.Timestamp"
|
61
|
-
repeated :metrics, :message, 11, "google.cloud.bigquery.migration.v2.TimeSeries"
|
62
|
-
end
|
63
|
-
add_enum "google.cloud.bigquery.migration.v2.MigrationSubtask.State" do
|
64
|
-
value :STATE_UNSPECIFIED, 0
|
65
|
-
value :ACTIVE, 1
|
66
|
-
value :RUNNING, 2
|
67
|
-
value :SUCCEEDED, 3
|
68
|
-
value :FAILED, 4
|
69
|
-
value :PAUSED, 5
|
15
|
+
|
16
|
+
descriptor_data = "\n;google/cloud/bigquery/migration/v2/migration_entities.proto\x12\"google.cloud.bigquery.migration.v2\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a@google/cloud/bigquery/migration/v2/migration_error_details.proto\x1a:google/cloud/bigquery/migration/v2/migration_metrics.proto\x1a;google/cloud/bigquery/migration/v2/translation_config.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/rpc/error_details.proto\"\xf3\x04\n\x11MigrationWorkflow\x12\x14\n\x04name\x18\x01 \x01(\tB\x06\xe0\x41\x03\xe0\x41\x05\x12\x14\n\x0c\x64isplay_name\x18\x06 \x01(\t\x12O\n\x05tasks\x18\x02 \x03(\x0b\x32@.google.cloud.bigquery.migration.v2.MigrationWorkflow.TasksEntry\x12O\n\x05state\x18\x03 \x01(\x0e\x32;.google.cloud.bigquery.migration.v2.MigrationWorkflow.StateB\x03\xe0\x41\x03\x12/\n\x0b\x63reate_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x34\n\x10last_update_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x1a_\n\nTasksEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12@\n\x05value\x18\x02 \x01(\x0b\x32\x31.google.cloud.bigquery.migration.v2.MigrationTask:\x02\x38\x01\"Q\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\t\n\x05\x44RAFT\x10\x01\x12\x0b\n\x07RUNNING\x10\x02\x12\n\n\x06PAUSED\x10\x03\x12\r\n\tCOMPLETED\x10\x04:u\xea\x41r\n2bigquerymigration.googleapis.com/MigrationWorkflow\x12<projects/{project}/locations/{location}/workflows/{workflow}\"\x83\x04\n\rMigrationTask\x12\x62\n\x1atranslation_config_details\x18\x0e \x01(\x0b\x32<.google.cloud.bigquery.migration.v2.TranslationConfigDetailsH\x00\x12\x12\n\x02id\x18\x01 \x01(\tB\x06\xe0\x41\x03\xe0\x41\x05\x12\x0c\n\x04type\x18\x02 \x01(\t\x12K\n\x05state\x18\x04 \x01(\x0e\x32\x37.google.cloud.bigquery.migration.v2.MigrationTask.StateB\x03\xe0\x41\x03\x12\x34\n\x10processing_error\x18\x05 \x01(\x0b\x32\x15.google.rpc.ErrorInfoB\x03\xe0\x41\x03\x12/\n\x0b\x63reate_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x34\n\x10last_update_time\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"r\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x0b\n\x07PENDING\x10\x01\x12\x11\n\rORCHESTRATING\x10\x02\x12\x0b\n\x07RUNNING\x10\x03\x12\n\n\x06PAUSED\x10\x04\x12\r\n\tSUCCEEDED\x10\x05\x12\n\n\x06\x46\x41ILED\x10\x06\x42\x0e\n\x0ctask_details\"\xf4\x05\n\x10MigrationSubtask\x12\x14\n\x04name\x18\x01 \x01(\tB\x06\xe0\x41\x03\xe0\x41\x05\x12\x0f\n\x07task_id\x18\x02 \x01(\t\x12\x0c\n\x04type\x18\x03 \x01(\t\x12N\n\x05state\x18\x05 \x01(\x0e\x32:.google.cloud.bigquery.migration.v2.MigrationSubtask.StateB\x03\xe0\x41\x03\x12\x34\n\x10processing_error\x18\x06 \x01(\x0b\x32\x15.google.rpc.ErrorInfoB\x03\xe0\x41\x03\x12\\\n\x16resource_error_details\x18\x0c \x03(\x0b\x32\x37.google.cloud.bigquery.migration.v2.ResourceErrorDetailB\x03\xe0\x41\x03\x12\x1c\n\x14resource_error_count\x18\r \x01(\x05\x12/\n\x0b\x63reate_time\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x34\n\x10last_update_time\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12?\n\x07metrics\x18\x0b \x03(\x0b\x32..google.cloud.bigquery.migration.v2.TimeSeries\"v\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\n\n\x06\x41\x43TIVE\x10\x01\x12\x0b\n\x07RUNNING\x10\x02\x12\r\n\tSUCCEEDED\x10\x03\x12\n\n\x06\x46\x41ILED\x10\x04\x12\n\n\x06PAUSED\x10\x05\x12\x16\n\x12PENDING_DEPENDENCY\x10\x06:\x88\x01\xea\x41\x84\x01\n1bigquerymigration.googleapis.com/MigrationSubtask\x12Oprojects/{project}/locations/{location}/workflows/{workflow}/subtasks/{subtask}B\xd2\x01\n&com.google.cloud.bigquery.migration.v2B\x16MigrationEntitiesProtoP\x01ZDcloud.google.com/go/bigquery/migration/apiv2/migrationpb;migrationpb\xaa\x02\"Google.Cloud.BigQuery.Migration.V2\xca\x02\"Google\\Cloud\\BigQuery\\Migration\\V2b\x06proto3"
|
17
|
+
|
18
|
+
pool = Google::Protobuf::DescriptorPool.generated_pool
|
19
|
+
|
20
|
+
begin
|
21
|
+
pool.add_serialized_file(descriptor_data)
|
22
|
+
rescue TypeError => e
|
23
|
+
# Compatibility code: will be removed in the next major version.
|
24
|
+
require 'google/protobuf/descriptor_pb'
|
25
|
+
parsed = Google::Protobuf::FileDescriptorProto.decode(descriptor_data)
|
26
|
+
parsed.clear_dependency
|
27
|
+
serialized = parsed.class.encode(parsed)
|
28
|
+
file = pool.add_serialized_file(serialized)
|
29
|
+
warn "Warning: Protobuf detected an import path issue while loading generated file #{__FILE__}"
|
30
|
+
imports = [
|
31
|
+
["google.protobuf.Timestamp", "google/protobuf/timestamp.proto"],
|
32
|
+
["google.cloud.bigquery.migration.v2.TranslationConfigDetails", "google/cloud/bigquery/migration/v2/translation_config.proto"],
|
33
|
+
["google.rpc.ErrorInfo", "google/rpc/error_details.proto"],
|
34
|
+
["google.cloud.bigquery.migration.v2.ResourceErrorDetail", "google/cloud/bigquery/migration/v2/migration_error_details.proto"],
|
35
|
+
["google.cloud.bigquery.migration.v2.TimeSeries", "google/cloud/bigquery/migration/v2/migration_metrics.proto"],
|
36
|
+
]
|
37
|
+
imports.each do |type_name, expected_filename|
|
38
|
+
import_file = pool.lookup(type_name).file_descriptor
|
39
|
+
if import_file.name != expected_filename
|
40
|
+
warn "- #{file.name} imports #{expected_filename}, but that import was loaded as #{import_file.name}"
|
70
41
|
end
|
71
42
|
end
|
43
|
+
warn "Each proto file must use a consistent fully-qualified name."
|
44
|
+
warn "This will become an error in the next major version."
|
72
45
|
end
|
73
46
|
|
74
47
|
module Google
|
@@ -1,3 +1,4 @@
|
|
1
|
+
# frozen_string_literal: true
|
1
2
|
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
2
3
|
# source: google/cloud/bigquery/migration/v2/migration_error_details.proto
|
3
4
|
|
@@ -6,22 +7,32 @@ require 'google/protobuf'
|
|
6
7
|
require 'google/api/field_behavior_pb'
|
7
8
|
require 'google/rpc/error_details_pb'
|
8
9
|
|
9
|
-
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
10
|
+
|
11
|
+
descriptor_data = "\n@google/cloud/bigquery/migration/v2/migration_error_details.proto\x12\"google.cloud.bigquery.migration.v2\x1a\x1fgoogle/api/field_behavior.proto\x1a\x1egoogle/rpc/error_details.proto\"\xb2\x01\n\x13ResourceErrorDetail\x12\x34\n\rresource_info\x18\x01 \x01(\x0b\x32\x18.google.rpc.ResourceInfoB\x03\xe0\x41\x02\x12K\n\rerror_details\x18\x02 \x03(\x0b\x32/.google.cloud.bigquery.migration.v2.ErrorDetailB\x03\xe0\x41\x02\x12\x18\n\x0b\x65rror_count\x18\x03 \x01(\x05\x42\x03\xe0\x41\x02\"\x87\x01\n\x0b\x45rrorDetail\x12H\n\x08location\x18\x01 \x01(\x0b\x32\x31.google.cloud.bigquery.migration.v2.ErrorLocationB\x03\xe0\x41\x01\x12.\n\nerror_info\x18\x02 \x01(\x0b\x32\x15.google.rpc.ErrorInfoB\x03\xe0\x41\x02\"7\n\rErrorLocation\x12\x11\n\x04line\x18\x01 \x01(\x05\x42\x03\xe0\x41\x01\x12\x13\n\x06\x63olumn\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01\x42\xd6\x01\n&com.google.cloud.bigquery.migration.v2B\x1aMigrationErrorDetailsProtoP\x01ZDcloud.google.com/go/bigquery/migration/apiv2/migrationpb;migrationpb\xaa\x02\"Google.Cloud.BigQuery.Migration.V2\xca\x02\"Google\\Cloud\\BigQuery\\Migration\\V2b\x06proto3"
|
12
|
+
|
13
|
+
pool = Google::Protobuf::DescriptorPool.generated_pool
|
14
|
+
|
15
|
+
begin
|
16
|
+
pool.add_serialized_file(descriptor_data)
|
17
|
+
rescue TypeError => e
|
18
|
+
# Compatibility code: will be removed in the next major version.
|
19
|
+
require 'google/protobuf/descriptor_pb'
|
20
|
+
parsed = Google::Protobuf::FileDescriptorProto.decode(descriptor_data)
|
21
|
+
parsed.clear_dependency
|
22
|
+
serialized = parsed.class.encode(parsed)
|
23
|
+
file = pool.add_serialized_file(serialized)
|
24
|
+
warn "Warning: Protobuf detected an import path issue while loading generated file #{__FILE__}"
|
25
|
+
imports = [
|
26
|
+
["google.rpc.ResourceInfo", "google/rpc/error_details.proto"],
|
27
|
+
]
|
28
|
+
imports.each do |type_name, expected_filename|
|
29
|
+
import_file = pool.lookup(type_name).file_descriptor
|
30
|
+
if import_file.name != expected_filename
|
31
|
+
warn "- #{file.name} imports #{expected_filename}, but that import was loaded as #{import_file.name}"
|
23
32
|
end
|
24
33
|
end
|
34
|
+
warn "Each proto file must use a consistent fully-qualified name."
|
35
|
+
warn "This will become an error in the next major version."
|
25
36
|
end
|
26
37
|
|
27
38
|
module Google
|
@@ -1,3 +1,4 @@
|
|
1
|
+
# frozen_string_literal: true
|
1
2
|
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
2
3
|
# source: google/cloud/bigquery/migration/v2/migration_metrics.proto
|
3
4
|
|
@@ -8,32 +9,33 @@ require 'google/api/field_behavior_pb'
|
|
8
9
|
require 'google/api/metric_pb'
|
9
10
|
require 'google/protobuf/timestamp_pb'
|
10
11
|
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
end
|
12
|
+
|
13
|
+
descriptor_data = "\n:google/cloud/bigquery/migration/v2/migration_metrics.proto\x12\"google.cloud.bigquery.migration.v2\x1a\x1dgoogle/api/distribution.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x17google/api/metric.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"\xe5\x01\n\nTimeSeries\x12\x13\n\x06metric\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12?\n\nvalue_type\x18\x02 \x01(\x0e\x32&.google.api.MetricDescriptor.ValueTypeB\x03\xe0\x41\x02\x12\x41\n\x0bmetric_kind\x18\x03 \x01(\x0e\x32\'.google.api.MetricDescriptor.MetricKindB\x03\xe0\x41\x01\x12>\n\x06points\x18\x04 \x03(\x0b\x32).google.cloud.bigquery.migration.v2.PointB\x03\xe0\x41\x02\"\x8a\x01\n\x05Point\x12\x42\n\x08interval\x18\x01 \x01(\x0b\x32\x30.google.cloud.bigquery.migration.v2.TimeInterval\x12=\n\x05value\x18\x02 \x01(\x0b\x32..google.cloud.bigquery.migration.v2.TypedValue\"v\n\x0cTimeInterval\x12\x33\n\nstart_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x01\x12\x31\n\x08\x65nd_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x02\"\xaa\x01\n\nTypedValue\x12\x14\n\nbool_value\x18\x01 \x01(\x08H\x00\x12\x15\n\x0bint64_value\x18\x02 \x01(\x03H\x00\x12\x16\n\x0c\x64ouble_value\x18\x03 \x01(\x01H\x00\x12\x16\n\x0cstring_value\x18\x04 \x01(\tH\x00\x12\x36\n\x12\x64istribution_value\x18\x05 \x01(\x0b\x32\x18.google.api.DistributionH\x00\x42\x07\n\x05valueB\xd1\x01\n&com.google.cloud.bigquery.migration.v2B\x15MigrationMetricsProtoP\x01ZDcloud.google.com/go/bigquery/migration/apiv2/migrationpb;migrationpb\xaa\x02\"Google.Cloud.BigQuery.Migration.V2\xca\x02\"Google\\Cloud\\BigQuery\\Migration\\V2b\x06proto3"
|
14
|
+
|
15
|
+
pool = Google::Protobuf::DescriptorPool.generated_pool
|
16
|
+
|
17
|
+
begin
|
18
|
+
pool.add_serialized_file(descriptor_data)
|
19
|
+
rescue TypeError => e
|
20
|
+
# Compatibility code: will be removed in the next major version.
|
21
|
+
require 'google/protobuf/descriptor_pb'
|
22
|
+
parsed = Google::Protobuf::FileDescriptorProto.decode(descriptor_data)
|
23
|
+
parsed.clear_dependency
|
24
|
+
serialized = parsed.class.encode(parsed)
|
25
|
+
file = pool.add_serialized_file(serialized)
|
26
|
+
warn "Warning: Protobuf detected an import path issue while loading generated file #{__FILE__}"
|
27
|
+
imports = [
|
28
|
+
["google.protobuf.Timestamp", "google/protobuf/timestamp.proto"],
|
29
|
+
["google.api.Distribution", "google/api/distribution.proto"],
|
30
|
+
]
|
31
|
+
imports.each do |type_name, expected_filename|
|
32
|
+
import_file = pool.lookup(type_name).file_descriptor
|
33
|
+
if import_file.name != expected_filename
|
34
|
+
warn "- #{file.name} imports #{expected_filename}, but that import was loaded as #{import_file.name}"
|
35
35
|
end
|
36
36
|
end
|
37
|
+
warn "Each proto file must use a consistent fully-qualified name."
|
38
|
+
warn "This will become an error in the next major version."
|
37
39
|
end
|
38
40
|
|
39
41
|
module Google
|
@@ -143,7 +143,7 @@ module Google
|
|
143
143
|
credentials = @config.credentials
|
144
144
|
# Use self-signed JWT if the endpoint is unchanged from default,
|
145
145
|
# but only if the default endpoint does not have a region prefix.
|
146
|
-
enable_self_signed_jwt = @config.endpoint ==
|
146
|
+
enable_self_signed_jwt = @config.endpoint == Configuration::DEFAULT_ENDPOINT &&
|
147
147
|
!@config.endpoint.split(".").first.include?("-")
|
148
148
|
credentials ||= Credentials.default scope: @config.scope,
|
149
149
|
enable_self_signed_jwt: enable_self_signed_jwt
|
@@ -393,13 +393,11 @@ module Google
|
|
393
393
|
# # Call the list_migration_workflows method.
|
394
394
|
# result = client.list_migration_workflows request
|
395
395
|
#
|
396
|
-
# # The returned object is of type Gapic::PagedEnumerable. You can
|
397
|
-
# #
|
398
|
-
#
|
399
|
-
# # methods are also available for managing paging directly.
|
400
|
-
# result.each do |response|
|
396
|
+
# # The returned object is of type Gapic::PagedEnumerable. You can iterate
|
397
|
+
# # over elements, and API calls will be issued to fetch pages as needed.
|
398
|
+
# result.each do |item|
|
401
399
|
# # Each element is of type ::Google::Cloud::Bigquery::Migration::V2::MigrationWorkflow.
|
402
|
-
# p
|
400
|
+
# p item
|
403
401
|
# end
|
404
402
|
#
|
405
403
|
def list_migration_workflows request, options = nil
|
@@ -731,18 +729,18 @@ module Google
|
|
731
729
|
# @param read_mask [::Google::Protobuf::FieldMask, ::Hash]
|
732
730
|
# Optional. The list of fields to be retrieved.
|
733
731
|
# @param page_size [::Integer]
|
734
|
-
# Optional. The maximum number of migration tasks to return. The service may
|
735
|
-
# fewer than this number.
|
732
|
+
# Optional. The maximum number of migration tasks to return. The service may
|
733
|
+
# return fewer than this number.
|
736
734
|
# @param page_token [::String]
|
737
|
-
# Optional. A page token, received from previous `ListMigrationSubtasks`
|
738
|
-
# Provide this to retrieve the subsequent page.
|
735
|
+
# Optional. A page token, received from previous `ListMigrationSubtasks`
|
736
|
+
# call. Provide this to retrieve the subsequent page.
|
739
737
|
#
|
740
738
|
# When paginating, all other parameters provided to `ListMigrationSubtasks`
|
741
739
|
# must match the call that provided the page token.
|
742
740
|
# @param filter [::String]
|
743
|
-
# Optional. The filter to apply. This can be used to get the subtasks of a
|
744
|
-
# tasks in a workflow, e.g. `migration_task = "ab012"` where
|
745
|
-
# task ID (not the name in the named map).
|
741
|
+
# Optional. The filter to apply. This can be used to get the subtasks of a
|
742
|
+
# specific tasks in a workflow, e.g. `migration_task = "ab012"` where
|
743
|
+
# `"ab012"` is the task ID (not the name in the named map).
|
746
744
|
#
|
747
745
|
# @yield [response, operation] Access the result along with the RPC operation
|
748
746
|
# @yieldparam response [::Gapic::PagedEnumerable<::Google::Cloud::Bigquery::Migration::V2::MigrationSubtask>]
|
@@ -764,13 +762,11 @@ module Google
|
|
764
762
|
# # Call the list_migration_subtasks method.
|
765
763
|
# result = client.list_migration_subtasks request
|
766
764
|
#
|
767
|
-
# # The returned object is of type Gapic::PagedEnumerable. You can
|
768
|
-
# #
|
769
|
-
#
|
770
|
-
# # methods are also available for managing paging directly.
|
771
|
-
# result.each do |response|
|
765
|
+
# # The returned object is of type Gapic::PagedEnumerable. You can iterate
|
766
|
+
# # over elements, and API calls will be issued to fetch pages as needed.
|
767
|
+
# result.each do |item|
|
772
768
|
# # Each element is of type ::Google::Cloud::Bigquery::Migration::V2::MigrationSubtask.
|
773
|
-
# p
|
769
|
+
# p item
|
774
770
|
# end
|
775
771
|
#
|
776
772
|
def list_migration_subtasks request, options = nil
|
@@ -853,9 +849,9 @@ module Google
|
|
853
849
|
# * (`String`) The path to a service account key file in JSON format
|
854
850
|
# * (`Hash`) A service account key as a Hash
|
855
851
|
# * (`Google::Auth::Credentials`) A googleauth credentials object
|
856
|
-
# (see the [googleauth docs](https://
|
852
|
+
# (see the [googleauth docs](https://rubydoc.info/gems/googleauth/Google/Auth/Credentials))
|
857
853
|
# * (`Signet::OAuth2::Client`) A signet oauth2 client object
|
858
|
-
# (see the [signet docs](https://
|
854
|
+
# (see the [signet docs](https://rubydoc.info/gems/signet/Signet/OAuth2/Client))
|
859
855
|
# * (`GRPC::Core::Channel`) a gRPC channel with included credentials
|
860
856
|
# * (`GRPC::Core::ChannelCredentials`) a gRPC credentails object
|
861
857
|
# * (`nil`) indicating no credentials
|
@@ -897,7 +893,9 @@ module Google
|
|
897
893
|
class Configuration
|
898
894
|
extend ::Gapic::Config
|
899
895
|
|
900
|
-
|
896
|
+
DEFAULT_ENDPOINT = "bigquerymigration.googleapis.com"
|
897
|
+
|
898
|
+
config_attr :endpoint, DEFAULT_ENDPOINT, ::String
|
901
899
|
config_attr :credentials, nil do |value|
|
902
900
|
allowed = [::String, ::Hash, ::Proc, ::Symbol, ::Google::Auth::Credentials, ::Signet::OAuth2::Client, nil]
|
903
901
|
allowed += [::GRPC::Core::Channel, ::GRPC::Core::ChannelCredentials] if defined? ::GRPC
|
@@ -34,7 +34,7 @@ module Google
|
|
34
34
|
##
|
35
35
|
# Service to handle EDW migrations.
|
36
36
|
#
|
37
|
-
#
|
37
|
+
# @example Load this service and instantiate a gRPC client
|
38
38
|
#
|
39
39
|
# require "google/cloud/bigquery/migration/v2/migration_service"
|
40
40
|
# client = ::Google::Cloud::Bigquery::Migration::V2::MigrationService::Client.new
|
@@ -1,3 +1,4 @@
|
|
1
|
+
# frozen_string_literal: true
|
1
2
|
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
2
3
|
# source: google/cloud/bigquery/migration/v2/migration_service.proto
|
3
4
|
|
@@ -11,48 +12,33 @@ require 'google/cloud/bigquery/migration/v2/migration_entities_pb'
|
|
11
12
|
require 'google/protobuf/empty_pb'
|
12
13
|
require 'google/protobuf/field_mask_pb'
|
13
14
|
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
add_message "google.cloud.bigquery.migration.v2.StartMigrationWorkflowRequest" do
|
38
|
-
optional :name, :string, 1
|
39
|
-
end
|
40
|
-
add_message "google.cloud.bigquery.migration.v2.GetMigrationSubtaskRequest" do
|
41
|
-
optional :name, :string, 1
|
42
|
-
optional :read_mask, :message, 2, "google.protobuf.FieldMask"
|
43
|
-
end
|
44
|
-
add_message "google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest" do
|
45
|
-
optional :parent, :string, 1
|
46
|
-
optional :read_mask, :message, 2, "google.protobuf.FieldMask"
|
47
|
-
optional :page_size, :int32, 3
|
48
|
-
optional :page_token, :string, 4
|
49
|
-
optional :filter, :string, 5
|
50
|
-
end
|
51
|
-
add_message "google.cloud.bigquery.migration.v2.ListMigrationSubtasksResponse" do
|
52
|
-
repeated :migration_subtasks, :message, 1, "google.cloud.bigquery.migration.v2.MigrationSubtask"
|
53
|
-
optional :next_page_token, :string, 2
|
15
|
+
|
16
|
+
descriptor_data = "\n:google/cloud/bigquery/migration/v2/migration_service.proto\x12\"google.cloud.bigquery.migration.v2\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a;google/cloud/bigquery/migration/v2/migration_entities.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\"\xb3\x01\n\x1e\x43reateMigrationWorkflowRequest\x12\x39\n\x06parent\x18\x01 \x01(\tB)\xe0\x41\x02\xfa\x41#\n!locations.googleapis.com/Location\x12V\n\x12migration_workflow\x18\x02 \x01(\x0b\x32\x35.google.cloud.bigquery.migration.v2.MigrationWorkflowB\x03\xe0\x41\x02\"\x96\x01\n\x1bGetMigrationWorkflowRequest\x12H\n\x04name\x18\x01 \x01(\tB:\xe0\x41\x02\xfa\x41\x34\n2bigquerymigration.googleapis.com/MigrationWorkflow\x12-\n\tread_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\"\xb0\x01\n\x1dListMigrationWorkflowsRequest\x12\x39\n\x06parent\x18\x01 \x01(\tB)\xe0\x41\x02\xfa\x41#\n!locations.googleapis.com/Location\x12-\n\tread_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\x12\x11\n\tpage_size\x18\x03 \x01(\x05\x12\x12\n\npage_token\x18\x04 \x01(\t\"\x8d\x01\n\x1eListMigrationWorkflowsResponse\x12R\n\x13migration_workflows\x18\x01 \x03(\x0b\x32\x35.google.cloud.bigquery.migration.v2.MigrationWorkflow\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"j\n\x1e\x44\x65leteMigrationWorkflowRequest\x12H\n\x04name\x18\x01 \x01(\tB:\xe0\x41\x02\xfa\x41\x34\n2bigquerymigration.googleapis.com/MigrationWorkflow\"i\n\x1dStartMigrationWorkflowRequest\x12H\n\x04name\x18\x01 \x01(\tB:\xe0\x41\x02\xfa\x41\x34\n2bigquerymigration.googleapis.com/MigrationWorkflow\"\x99\x01\n\x1aGetMigrationSubtaskRequest\x12G\n\x04name\x18\x01 \x01(\tB9\xe0\x41\x02\xfa\x41\x33\n1bigquerymigration.googleapis.com/MigrationSubtask\x12\x32\n\tread_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x01\"\xe4\x01\n\x1cListMigrationSubtasksRequest\x12J\n\x06parent\x18\x01 \x01(\tB:\xe0\x41\x02\xfa\x41\x34\n2bigquerymigration.googleapis.com/MigrationWorkflow\x12\x32\n\tread_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x01\x12\x16\n\tpage_size\x18\x03 \x01(\x05\x42\x03\xe0\x41\x01\x12\x17\n\npage_token\x18\x04 \x01(\tB\x03\xe0\x41\x01\x12\x13\n\x06\x66ilter\x18\x05 \x01(\tB\x03\xe0\x41\x01\"\x8a\x01\n\x1dListMigrationSubtasksResponse\x12P\n\x12migration_subtasks\x18\x01 \x03(\x0b\x32\x34.google.cloud.bigquery.migration.v2.MigrationSubtask\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t2\xcb\x0c\n\x10MigrationService\x12\xfb\x01\n\x17\x43reateMigrationWorkflow\x12\x42.google.cloud.bigquery.migration.v2.CreateMigrationWorkflowRequest\x1a\x35.google.cloud.bigquery.migration.v2.MigrationWorkflow\"e\x82\xd3\xe4\x93\x02\x43\"-/v2/{parent=projects/*/locations/*}/workflows:\x12migration_workflow\xda\x41\x19parent,migration_workflow\x12\xcc\x01\n\x14GetMigrationWorkflow\x12?.google.cloud.bigquery.migration.v2.GetMigrationWorkflowRequest\x1a\x35.google.cloud.bigquery.migration.v2.MigrationWorkflow\"<\x82\xd3\xe4\x93\x02/\x12-/v2/{name=projects/*/locations/*/workflows/*}\xda\x41\x04name\x12\xdf\x01\n\x16ListMigrationWorkflows\x12\x41.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsRequest\x1a\x42.google.cloud.bigquery.migration.v2.ListMigrationWorkflowsResponse\">\x82\xd3\xe4\x93\x02/\x12-/v2/{parent=projects/*/locations/*}/workflows\xda\x41\x06parent\x12\xb3\x01\n\x17\x44\x65leteMigrationWorkflow\x12\x42.google.cloud.bigquery.migration.v2.DeleteMigrationWorkflowRequest\x1a\x16.google.protobuf.Empty\"<\x82\xd3\xe4\x93\x02/*-/v2/{name=projects/*/locations/*/workflows/*}\xda\x41\x04name\x12\xba\x01\n\x16StartMigrationWorkflow\x12\x41.google.cloud.bigquery.migration.v2.StartMigrationWorkflowRequest\x1a\x16.google.protobuf.Empty\"E\x82\xd3\xe4\x93\x02\x38\"3/v2/{name=projects/*/locations/*/workflows/*}:start:\x01*\xda\x41\x04name\x12\xd4\x01\n\x13GetMigrationSubtask\x12>.google.cloud.bigquery.migration.v2.GetMigrationSubtaskRequest\x1a\x34.google.cloud.bigquery.migration.v2.MigrationSubtask\"G\x82\xd3\xe4\x93\x02:\x12\x38/v2/{name=projects/*/locations/*/workflows/*/subtasks/*}\xda\x41\x04name\x12\xe7\x01\n\x15ListMigrationSubtasks\x12@.google.cloud.bigquery.migration.v2.ListMigrationSubtasksRequest\x1a\x41.google.cloud.bigquery.migration.v2.ListMigrationSubtasksResponse\"I\x82\xd3\xe4\x93\x02:\x12\x38/v2/{parent=projects/*/locations/*/workflows/*}/subtasks\xda\x41\x06parent\x1aT\xca\x41 bigquerymigration.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformB\xd1\x01\n&com.google.cloud.bigquery.migration.v2B\x15MigrationServiceProtoP\x01ZDcloud.google.com/go/bigquery/migration/apiv2/migrationpb;migrationpb\xaa\x02\"Google.Cloud.BigQuery.Migration.V2\xca\x02\"Google\\Cloud\\BigQuery\\Migration\\V2b\x06proto3"
|
17
|
+
|
18
|
+
pool = Google::Protobuf::DescriptorPool.generated_pool
|
19
|
+
|
20
|
+
begin
|
21
|
+
pool.add_serialized_file(descriptor_data)
|
22
|
+
rescue TypeError => e
|
23
|
+
# Compatibility code: will be removed in the next major version.
|
24
|
+
require 'google/protobuf/descriptor_pb'
|
25
|
+
parsed = Google::Protobuf::FileDescriptorProto.decode(descriptor_data)
|
26
|
+
parsed.clear_dependency
|
27
|
+
serialized = parsed.class.encode(parsed)
|
28
|
+
file = pool.add_serialized_file(serialized)
|
29
|
+
warn "Warning: Protobuf detected an import path issue while loading generated file #{__FILE__}"
|
30
|
+
imports = [
|
31
|
+
["google.cloud.bigquery.migration.v2.MigrationWorkflow", "google/cloud/bigquery/migration/v2/migration_entities.proto"],
|
32
|
+
["google.protobuf.FieldMask", "google/protobuf/field_mask.proto"],
|
33
|
+
]
|
34
|
+
imports.each do |type_name, expected_filename|
|
35
|
+
import_file = pool.lookup(type_name).file_descriptor
|
36
|
+
if import_file.name != expected_filename
|
37
|
+
warn "- #{file.name} imports #{expected_filename}, but that import was loaded as #{import_file.name}"
|
54
38
|
end
|
55
39
|
end
|
40
|
+
warn "Each proto file must use a consistent fully-qualified name."
|
41
|
+
warn "This will become an error in the next major version."
|
56
42
|
end
|
57
43
|
|
58
44
|
module Google
|
@@ -1,111 +1,34 @@
|
|
1
|
+
# frozen_string_literal: true
|
1
2
|
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
2
3
|
# source: google/cloud/bigquery/migration/v2/translation_config.proto
|
3
4
|
|
4
5
|
require 'google/protobuf'
|
5
6
|
|
6
|
-
|
7
|
-
|
8
|
-
|
9
|
-
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
optional :teradata_dialect, :message, 4, "google.cloud.bigquery.migration.v2.TeradataDialect"
|
28
|
-
optional :oracle_dialect, :message, 5, "google.cloud.bigquery.migration.v2.OracleDialect"
|
29
|
-
optional :sparksql_dialect, :message, 6, "google.cloud.bigquery.migration.v2.SparkSQLDialect"
|
30
|
-
optional :snowflake_dialect, :message, 7, "google.cloud.bigquery.migration.v2.SnowflakeDialect"
|
31
|
-
optional :netezza_dialect, :message, 8, "google.cloud.bigquery.migration.v2.NetezzaDialect"
|
32
|
-
optional :azure_synapse_dialect, :message, 9, "google.cloud.bigquery.migration.v2.AzureSynapseDialect"
|
33
|
-
optional :vertica_dialect, :message, 10, "google.cloud.bigquery.migration.v2.VerticaDialect"
|
34
|
-
optional :sql_server_dialect, :message, 11, "google.cloud.bigquery.migration.v2.SQLServerDialect"
|
35
|
-
optional :postgresql_dialect, :message, 12, "google.cloud.bigquery.migration.v2.PostgresqlDialect"
|
36
|
-
optional :presto_dialect, :message, 13, "google.cloud.bigquery.migration.v2.PrestoDialect"
|
37
|
-
optional :mysql_dialect, :message, 14, "google.cloud.bigquery.migration.v2.MySQLDialect"
|
38
|
-
end
|
39
|
-
end
|
40
|
-
add_message "google.cloud.bigquery.migration.v2.BigQueryDialect" do
|
41
|
-
end
|
42
|
-
add_message "google.cloud.bigquery.migration.v2.HiveQLDialect" do
|
43
|
-
end
|
44
|
-
add_message "google.cloud.bigquery.migration.v2.RedshiftDialect" do
|
45
|
-
end
|
46
|
-
add_message "google.cloud.bigquery.migration.v2.TeradataDialect" do
|
47
|
-
optional :mode, :enum, 1, "google.cloud.bigquery.migration.v2.TeradataDialect.Mode"
|
48
|
-
end
|
49
|
-
add_enum "google.cloud.bigquery.migration.v2.TeradataDialect.Mode" do
|
50
|
-
value :MODE_UNSPECIFIED, 0
|
51
|
-
value :SQL, 1
|
52
|
-
value :BTEQ, 2
|
53
|
-
end
|
54
|
-
add_message "google.cloud.bigquery.migration.v2.OracleDialect" do
|
55
|
-
end
|
56
|
-
add_message "google.cloud.bigquery.migration.v2.SparkSQLDialect" do
|
57
|
-
end
|
58
|
-
add_message "google.cloud.bigquery.migration.v2.SnowflakeDialect" do
|
59
|
-
end
|
60
|
-
add_message "google.cloud.bigquery.migration.v2.NetezzaDialect" do
|
61
|
-
end
|
62
|
-
add_message "google.cloud.bigquery.migration.v2.AzureSynapseDialect" do
|
63
|
-
end
|
64
|
-
add_message "google.cloud.bigquery.migration.v2.VerticaDialect" do
|
65
|
-
end
|
66
|
-
add_message "google.cloud.bigquery.migration.v2.SQLServerDialect" do
|
67
|
-
end
|
68
|
-
add_message "google.cloud.bigquery.migration.v2.PostgresqlDialect" do
|
69
|
-
end
|
70
|
-
add_message "google.cloud.bigquery.migration.v2.PrestoDialect" do
|
71
|
-
end
|
72
|
-
add_message "google.cloud.bigquery.migration.v2.MySQLDialect" do
|
73
|
-
end
|
74
|
-
add_message "google.cloud.bigquery.migration.v2.ObjectNameMappingList" do
|
75
|
-
repeated :name_map, :message, 1, "google.cloud.bigquery.migration.v2.ObjectNameMapping"
|
76
|
-
end
|
77
|
-
add_message "google.cloud.bigquery.migration.v2.ObjectNameMapping" do
|
78
|
-
optional :source, :message, 1, "google.cloud.bigquery.migration.v2.NameMappingKey"
|
79
|
-
optional :target, :message, 2, "google.cloud.bigquery.migration.v2.NameMappingValue"
|
80
|
-
end
|
81
|
-
add_message "google.cloud.bigquery.migration.v2.NameMappingKey" do
|
82
|
-
optional :type, :enum, 1, "google.cloud.bigquery.migration.v2.NameMappingKey.Type"
|
83
|
-
optional :database, :string, 2
|
84
|
-
optional :schema, :string, 3
|
85
|
-
optional :relation, :string, 4
|
86
|
-
optional :attribute, :string, 5
|
87
|
-
end
|
88
|
-
add_enum "google.cloud.bigquery.migration.v2.NameMappingKey.Type" do
|
89
|
-
value :TYPE_UNSPECIFIED, 0
|
90
|
-
value :DATABASE, 1
|
91
|
-
value :SCHEMA, 2
|
92
|
-
value :RELATION, 3
|
93
|
-
value :ATTRIBUTE, 4
|
94
|
-
value :RELATION_ALIAS, 5
|
95
|
-
value :ATTRIBUTE_ALIAS, 6
|
96
|
-
value :FUNCTION, 7
|
97
|
-
end
|
98
|
-
add_message "google.cloud.bigquery.migration.v2.NameMappingValue" do
|
99
|
-
optional :database, :string, 1
|
100
|
-
optional :schema, :string, 2
|
101
|
-
optional :relation, :string, 3
|
102
|
-
optional :attribute, :string, 4
|
103
|
-
end
|
104
|
-
add_message "google.cloud.bigquery.migration.v2.SourceEnv" do
|
105
|
-
optional :default_database, :string, 1
|
106
|
-
repeated :schema_search_path, :string, 2
|
7
|
+
|
8
|
+
descriptor_data = "\n;google/cloud/bigquery/migration/v2/translation_config.proto\x12\"google.cloud.bigquery.migration.v2\"\xca\x03\n\x18TranslationConfigDetails\x12\x19\n\x0fgcs_source_path\x18\x01 \x01(\tH\x00\x12\x19\n\x0fgcs_target_path\x18\x02 \x01(\tH\x01\x12V\n\x11name_mapping_list\x18\x05 \x01(\x0b\x32\x39.google.cloud.bigquery.migration.v2.ObjectNameMappingListH\x02\x12\x43\n\x0esource_dialect\x18\x03 \x01(\x0b\x32+.google.cloud.bigquery.migration.v2.Dialect\x12\x43\n\x0etarget_dialect\x18\x04 \x01(\x0b\x32+.google.cloud.bigquery.migration.v2.Dialect\x12\x41\n\nsource_env\x18\x06 \x01(\x0b\x32-.google.cloud.bigquery.migration.v2.SourceEnv\x12\x16\n\x0erequest_source\x18\x08 \x01(\tB\x11\n\x0fsource_locationB\x11\n\x0ftarget_locationB\x15\n\x13output_name_mapping\"\x84\t\n\x07\x44ialect\x12O\n\x10\x62igquery_dialect\x18\x01 \x01(\x0b\x32\x33.google.cloud.bigquery.migration.v2.BigQueryDialectH\x00\x12K\n\x0ehiveql_dialect\x18\x02 \x01(\x0b\x32\x31.google.cloud.bigquery.migration.v2.HiveQLDialectH\x00\x12O\n\x10redshift_dialect\x18\x03 \x01(\x0b\x32\x33.google.cloud.bigquery.migration.v2.RedshiftDialectH\x00\x12O\n\x10teradata_dialect\x18\x04 \x01(\x0b\x32\x33.google.cloud.bigquery.migration.v2.TeradataDialectH\x00\x12K\n\x0eoracle_dialect\x18\x05 \x01(\x0b\x32\x31.google.cloud.bigquery.migration.v2.OracleDialectH\x00\x12O\n\x10sparksql_dialect\x18\x06 \x01(\x0b\x32\x33.google.cloud.bigquery.migration.v2.SparkSQLDialectH\x00\x12Q\n\x11snowflake_dialect\x18\x07 \x01(\x0b\x32\x34.google.cloud.bigquery.migration.v2.SnowflakeDialectH\x00\x12M\n\x0fnetezza_dialect\x18\x08 \x01(\x0b\x32\x32.google.cloud.bigquery.migration.v2.NetezzaDialectH\x00\x12X\n\x15\x61zure_synapse_dialect\x18\t \x01(\x0b\x32\x37.google.cloud.bigquery.migration.v2.AzureSynapseDialectH\x00\x12M\n\x0fvertica_dialect\x18\n \x01(\x0b\x32\x32.google.cloud.bigquery.migration.v2.VerticaDialectH\x00\x12R\n\x12sql_server_dialect\x18\x0b \x01(\x0b\x32\x34.google.cloud.bigquery.migration.v2.SQLServerDialectH\x00\x12S\n\x12postgresql_dialect\x18\x0c \x01(\x0b\x32\x35.google.cloud.bigquery.migration.v2.PostgresqlDialectH\x00\x12K\n\x0epresto_dialect\x18\r \x01(\x0b\x32\x31.google.cloud.bigquery.migration.v2.PrestoDialectH\x00\x12I\n\rmysql_dialect\x18\x0e \x01(\x0b\x32\x30.google.cloud.bigquery.migration.v2.MySQLDialectH\x00\x42\x0f\n\rdialect_value\"\x11\n\x0f\x42igQueryDialect\"\x0f\n\rHiveQLDialect\"\x11\n\x0fRedshiftDialect\"\x8a\x01\n\x0fTeradataDialect\x12\x46\n\x04mode\x18\x01 \x01(\x0e\x32\x38.google.cloud.bigquery.migration.v2.TeradataDialect.Mode\"/\n\x04Mode\x12\x14\n\x10MODE_UNSPECIFIED\x10\x00\x12\x07\n\x03SQL\x10\x01\x12\x08\n\x04\x42TEQ\x10\x02\"\x0f\n\rOracleDialect\"\x11\n\x0fSparkSQLDialect\"\x12\n\x10SnowflakeDialect\"\x10\n\x0eNetezzaDialect\"\x15\n\x13\x41zureSynapseDialect\"\x10\n\x0eVerticaDialect\"\x12\n\x10SQLServerDialect\"\x13\n\x11PostgresqlDialect\"\x0f\n\rPrestoDialect\"\x0e\n\x0cMySQLDialect\"`\n\x15ObjectNameMappingList\x12G\n\x08name_map\x18\x01 \x03(\x0b\x32\x35.google.cloud.bigquery.migration.v2.ObjectNameMapping\"\x9d\x01\n\x11ObjectNameMapping\x12\x42\n\x06source\x18\x01 \x01(\x0b\x32\x32.google.cloud.bigquery.migration.v2.NameMappingKey\x12\x44\n\x06target\x18\x02 \x01(\x0b\x32\x34.google.cloud.bigquery.migration.v2.NameMappingValue\"\xab\x02\n\x0eNameMappingKey\x12\x45\n\x04type\x18\x01 \x01(\x0e\x32\x37.google.cloud.bigquery.migration.v2.NameMappingKey.Type\x12\x10\n\x08\x64\x61tabase\x18\x02 \x01(\t\x12\x0e\n\x06schema\x18\x03 \x01(\t\x12\x10\n\x08relation\x18\x04 \x01(\t\x12\x11\n\tattribute\x18\x05 \x01(\t\"\x8a\x01\n\x04Type\x12\x14\n\x10TYPE_UNSPECIFIED\x10\x00\x12\x0c\n\x08\x44\x41TABASE\x10\x01\x12\n\n\x06SCHEMA\x10\x02\x12\x0c\n\x08RELATION\x10\x03\x12\r\n\tATTRIBUTE\x10\x04\x12\x12\n\x0eRELATION_ALIAS\x10\x05\x12\x13\n\x0f\x41TTRIBUTE_ALIAS\x10\x06\x12\x0c\n\x08\x46UNCTION\x10\x07\"Y\n\x10NameMappingValue\x12\x10\n\x08\x64\x61tabase\x18\x01 \x01(\t\x12\x0e\n\x06schema\x18\x02 \x01(\t\x12\x10\n\x08relation\x18\x03 \x01(\t\x12\x11\n\tattribute\x18\x04 \x01(\t\"A\n\tSourceEnv\x12\x18\n\x10\x64\x65\x66\x61ult_database\x18\x01 \x01(\t\x12\x1a\n\x12schema_search_path\x18\x02 \x03(\tB\xd2\x01\n&com.google.cloud.bigquery.migration.v2B\x16TranslationConfigProtoP\x01ZDcloud.google.com/go/bigquery/migration/apiv2/migrationpb;migrationpb\xaa\x02\"Google.Cloud.BigQuery.Migration.V2\xca\x02\"Google\\Cloud\\BigQuery\\Migration\\V2b\x06proto3"
|
9
|
+
|
10
|
+
pool = Google::Protobuf::DescriptorPool.generated_pool
|
11
|
+
|
12
|
+
begin
|
13
|
+
pool.add_serialized_file(descriptor_data)
|
14
|
+
rescue TypeError => e
|
15
|
+
# Compatibility code: will be removed in the next major version.
|
16
|
+
require 'google/protobuf/descriptor_pb'
|
17
|
+
parsed = Google::Protobuf::FileDescriptorProto.decode(descriptor_data)
|
18
|
+
parsed.clear_dependency
|
19
|
+
serialized = parsed.class.encode(parsed)
|
20
|
+
file = pool.add_serialized_file(serialized)
|
21
|
+
warn "Warning: Protobuf detected an import path issue while loading generated file #{__FILE__}"
|
22
|
+
imports = [
|
23
|
+
]
|
24
|
+
imports.each do |type_name, expected_filename|
|
25
|
+
import_file = pool.lookup(type_name).file_descriptor
|
26
|
+
if import_file.name != expected_filename
|
27
|
+
warn "- #{file.name} imports #{expected_filename}, but that import was loaded as #{import_file.name}"
|
107
28
|
end
|
108
29
|
end
|
30
|
+
warn "Each proto file must use a consistent fully-qualified name."
|
31
|
+
warn "This will become an error in the next major version."
|
109
32
|
end
|
110
33
|
|
111
34
|
module Google
|