google-cloud-dataproc-v1 0.6.3 → 0.7.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (30) hide show
  1. checksums.yaml +4 -4
  2. data/lib/google/cloud/dataproc/v1/autoscaling_policies_pb.rb +3 -2
  3. data/lib/google/cloud/dataproc/v1/autoscaling_policies_services_pb.rb +1 -1
  4. data/lib/google/cloud/dataproc/v1/batch_controller/client.rb +637 -0
  5. data/lib/google/cloud/dataproc/v1/batch_controller/credentials.rb +51 -0
  6. data/lib/google/cloud/dataproc/v1/batch_controller/operations.rb +664 -0
  7. data/lib/google/cloud/dataproc/v1/batch_controller/paths.rb +69 -0
  8. data/lib/google/cloud/dataproc/v1/batch_controller.rb +50 -0
  9. data/lib/google/cloud/dataproc/v1/batches_pb.rb +123 -0
  10. data/lib/google/cloud/dataproc/v1/batches_services_pb.rb +52 -0
  11. data/lib/google/cloud/dataproc/v1/cluster_controller/client.rb +15 -11
  12. data/lib/google/cloud/dataproc/v1/clusters_pb.rb +9 -2
  13. data/lib/google/cloud/dataproc/v1/clusters_services_pb.rb +3 -1
  14. data/lib/google/cloud/dataproc/v1/jobs_pb.rb +2 -2
  15. data/lib/google/cloud/dataproc/v1/jobs_services_pb.rb +1 -1
  16. data/lib/google/cloud/dataproc/v1/operations_pb.rb +18 -3
  17. data/lib/google/cloud/dataproc/v1/shared_pb.rb +40 -2
  18. data/lib/google/cloud/dataproc/v1/version.rb +1 -1
  19. data/lib/google/cloud/dataproc/v1/workflow_template_service/client.rb +1 -2
  20. data/lib/google/cloud/dataproc/v1/workflow_templates_pb.rb +2 -2
  21. data/lib/google/cloud/dataproc/v1/workflow_templates_services_pb.rb +2 -3
  22. data/lib/google/cloud/dataproc/v1.rb +1 -0
  23. data/proto_docs/google/cloud/dataproc/v1/autoscaling_policies.rb +18 -0
  24. data/proto_docs/google/cloud/dataproc/v1/batches.rb +339 -0
  25. data/proto_docs/google/cloud/dataproc/v1/clusters.rb +45 -22
  26. data/proto_docs/google/cloud/dataproc/v1/jobs.rb +8 -9
  27. data/proto_docs/google/cloud/dataproc/v1/operations.rb +48 -0
  28. data/proto_docs/google/cloud/dataproc/v1/shared.rb +117 -1
  29. data/proto_docs/google/cloud/dataproc/v1/workflow_templates.rb +11 -14
  30. metadata +10 -2
@@ -0,0 +1,69 @@
1
+ # frozen_string_literal: true
2
+
3
+ # Copyright 2021 Google LLC
4
+ #
5
+ # Licensed under the Apache License, Version 2.0 (the "License");
6
+ # you may not use this file except in compliance with the License.
7
+ # You may obtain a copy of the License at
8
+ #
9
+ # https://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing, software
12
+ # distributed under the License is distributed on an "AS IS" BASIS,
13
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14
+ # See the License for the specific language governing permissions and
15
+ # limitations under the License.
16
+
17
+ # Auto-generated by gapic-generator-ruby. DO NOT EDIT!
18
+
19
+
20
+ module Google
21
+ module Cloud
22
+ module Dataproc
23
+ module V1
24
+ module BatchController
25
+ # Path helper methods for the BatchController API.
26
+ module Paths
27
+ ##
28
+ # Create a fully-qualified Batch resource string.
29
+ #
30
+ # The resource will be in the following format:
31
+ #
32
+ # `projects/{project}/locations/{location}/batches/{batch}`
33
+ #
34
+ # @param project [String]
35
+ # @param location [String]
36
+ # @param batch [String]
37
+ #
38
+ # @return [::String]
39
+ def batch_path project:, location:, batch:
40
+ raise ::ArgumentError, "project cannot contain /" if project.to_s.include? "/"
41
+ raise ::ArgumentError, "location cannot contain /" if location.to_s.include? "/"
42
+
43
+ "projects/#{project}/locations/#{location}/batches/#{batch}"
44
+ end
45
+
46
+ ##
47
+ # Create a fully-qualified Location resource string.
48
+ #
49
+ # The resource will be in the following format:
50
+ #
51
+ # `projects/{project}/locations/{location}`
52
+ #
53
+ # @param project [String]
54
+ # @param location [String]
55
+ #
56
+ # @return [::String]
57
+ def location_path project:, location:
58
+ raise ::ArgumentError, "project cannot contain /" if project.to_s.include? "/"
59
+
60
+ "projects/#{project}/locations/#{location}"
61
+ end
62
+
63
+ extend self
64
+ end
65
+ end
66
+ end
67
+ end
68
+ end
69
+ end
@@ -0,0 +1,50 @@
1
+ # frozen_string_literal: true
2
+
3
+ # Copyright 2021 Google LLC
4
+ #
5
+ # Licensed under the Apache License, Version 2.0 (the "License");
6
+ # you may not use this file except in compliance with the License.
7
+ # You may obtain a copy of the License at
8
+ #
9
+ # https://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing, software
12
+ # distributed under the License is distributed on an "AS IS" BASIS,
13
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14
+ # See the License for the specific language governing permissions and
15
+ # limitations under the License.
16
+
17
+ # Auto-generated by gapic-generator-ruby. DO NOT EDIT!
18
+
19
+ require "gapic/common"
20
+ require "gapic/config"
21
+ require "gapic/config/method"
22
+
23
+ require "google/cloud/dataproc/v1/version"
24
+
25
+ require "google/cloud/dataproc/v1/batch_controller/credentials"
26
+ require "google/cloud/dataproc/v1/batch_controller/paths"
27
+ require "google/cloud/dataproc/v1/batch_controller/operations"
28
+ require "google/cloud/dataproc/v1/batch_controller/client"
29
+
30
+ module Google
31
+ module Cloud
32
+ module Dataproc
33
+ module V1
34
+ ##
35
+ # The BatchController provides methods to manage batch workloads.
36
+ #
37
+ # To load this service and instantiate a client:
38
+ #
39
+ # require "google/cloud/dataproc/v1/batch_controller"
40
+ # client = ::Google::Cloud::Dataproc::V1::BatchController::Client.new
41
+ #
42
+ module BatchController
43
+ end
44
+ end
45
+ end
46
+ end
47
+ end
48
+
49
+ helper_path = ::File.join __dir__, "batch_controller", "helpers.rb"
50
+ require "google/cloud/dataproc/v1/batch_controller/helpers" if ::File.file? helper_path
@@ -0,0 +1,123 @@
1
+ # Generated by the protocol buffer compiler. DO NOT EDIT!
2
+ # source: google/cloud/dataproc/v1/batches.proto
3
+
4
+ require 'google/api/annotations_pb'
5
+ require 'google/api/client_pb'
6
+ require 'google/api/field_behavior_pb'
7
+ require 'google/api/resource_pb'
8
+ require 'google/cloud/dataproc/v1/shared_pb'
9
+ require 'google/longrunning/operations_pb'
10
+ require 'google/protobuf/empty_pb'
11
+ require 'google/protobuf/timestamp_pb'
12
+ require 'google/protobuf'
13
+
14
+ Google::Protobuf::DescriptorPool.generated_pool.build do
15
+ add_file("google/cloud/dataproc/v1/batches.proto", :syntax => :proto3) do
16
+ add_message "google.cloud.dataproc.v1.CreateBatchRequest" do
17
+ optional :parent, :string, 1
18
+ optional :batch, :message, 2, "google.cloud.dataproc.v1.Batch"
19
+ optional :batch_id, :string, 3
20
+ optional :request_id, :string, 4
21
+ end
22
+ add_message "google.cloud.dataproc.v1.GetBatchRequest" do
23
+ optional :name, :string, 1
24
+ end
25
+ add_message "google.cloud.dataproc.v1.ListBatchesRequest" do
26
+ optional :parent, :string, 1
27
+ optional :page_size, :int32, 2
28
+ optional :page_token, :string, 3
29
+ end
30
+ add_message "google.cloud.dataproc.v1.ListBatchesResponse" do
31
+ repeated :batches, :message, 1, "google.cloud.dataproc.v1.Batch"
32
+ optional :next_page_token, :string, 2
33
+ end
34
+ add_message "google.cloud.dataproc.v1.DeleteBatchRequest" do
35
+ optional :name, :string, 1
36
+ end
37
+ add_message "google.cloud.dataproc.v1.Batch" do
38
+ optional :name, :string, 1
39
+ optional :uuid, :string, 2
40
+ optional :create_time, :message, 3, "google.protobuf.Timestamp"
41
+ optional :runtime_info, :message, 8, "google.cloud.dataproc.v1.RuntimeInfo"
42
+ optional :state, :enum, 9, "google.cloud.dataproc.v1.Batch.State"
43
+ optional :state_message, :string, 10
44
+ optional :state_time, :message, 11, "google.protobuf.Timestamp"
45
+ optional :creator, :string, 12
46
+ map :labels, :string, :string, 13
47
+ optional :runtime_config, :message, 14, "google.cloud.dataproc.v1.RuntimeConfig"
48
+ optional :environment_config, :message, 15, "google.cloud.dataproc.v1.EnvironmentConfig"
49
+ optional :operation, :string, 16
50
+ repeated :state_history, :message, 17, "google.cloud.dataproc.v1.Batch.StateHistory"
51
+ oneof :batch_config do
52
+ optional :pyspark_batch, :message, 4, "google.cloud.dataproc.v1.PySparkBatch"
53
+ optional :spark_batch, :message, 5, "google.cloud.dataproc.v1.SparkBatch"
54
+ optional :spark_r_batch, :message, 6, "google.cloud.dataproc.v1.SparkRBatch"
55
+ optional :spark_sql_batch, :message, 7, "google.cloud.dataproc.v1.SparkSqlBatch"
56
+ end
57
+ end
58
+ add_message "google.cloud.dataproc.v1.Batch.StateHistory" do
59
+ optional :state, :enum, 1, "google.cloud.dataproc.v1.Batch.State"
60
+ optional :state_message, :string, 2
61
+ optional :state_start_time, :message, 3, "google.protobuf.Timestamp"
62
+ end
63
+ add_enum "google.cloud.dataproc.v1.Batch.State" do
64
+ value :STATE_UNSPECIFIED, 0
65
+ value :PENDING, 1
66
+ value :RUNNING, 2
67
+ value :CANCELLING, 3
68
+ value :CANCELLED, 4
69
+ value :SUCCEEDED, 5
70
+ value :FAILED, 6
71
+ end
72
+ add_message "google.cloud.dataproc.v1.PySparkBatch" do
73
+ optional :main_python_file_uri, :string, 1
74
+ repeated :args, :string, 2
75
+ repeated :python_file_uris, :string, 3
76
+ repeated :jar_file_uris, :string, 4
77
+ repeated :file_uris, :string, 5
78
+ repeated :archive_uris, :string, 6
79
+ end
80
+ add_message "google.cloud.dataproc.v1.SparkBatch" do
81
+ repeated :args, :string, 3
82
+ repeated :jar_file_uris, :string, 4
83
+ repeated :file_uris, :string, 5
84
+ repeated :archive_uris, :string, 6
85
+ oneof :driver do
86
+ optional :main_jar_file_uri, :string, 1
87
+ optional :main_class, :string, 2
88
+ end
89
+ end
90
+ add_message "google.cloud.dataproc.v1.SparkRBatch" do
91
+ optional :main_r_file_uri, :string, 1
92
+ repeated :args, :string, 2
93
+ repeated :file_uris, :string, 3
94
+ repeated :archive_uris, :string, 4
95
+ end
96
+ add_message "google.cloud.dataproc.v1.SparkSqlBatch" do
97
+ optional :query_file_uri, :string, 1
98
+ map :query_variables, :string, :string, 2
99
+ repeated :jar_file_uris, :string, 3
100
+ end
101
+ end
102
+ end
103
+
104
+ module Google
105
+ module Cloud
106
+ module Dataproc
107
+ module V1
108
+ CreateBatchRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.CreateBatchRequest").msgclass
109
+ GetBatchRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.GetBatchRequest").msgclass
110
+ ListBatchesRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.ListBatchesRequest").msgclass
111
+ ListBatchesResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.ListBatchesResponse").msgclass
112
+ DeleteBatchRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.DeleteBatchRequest").msgclass
113
+ Batch = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.Batch").msgclass
114
+ Batch::StateHistory = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.Batch.StateHistory").msgclass
115
+ Batch::State = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.Batch.State").enummodule
116
+ PySparkBatch = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.PySparkBatch").msgclass
117
+ SparkBatch = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.SparkBatch").msgclass
118
+ SparkRBatch = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.SparkRBatch").msgclass
119
+ SparkSqlBatch = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.SparkSqlBatch").msgclass
120
+ end
121
+ end
122
+ end
123
+ end
@@ -0,0 +1,52 @@
1
+ # Generated by the protocol buffer compiler. DO NOT EDIT!
2
+ # Source: google/cloud/dataproc/v1/batches.proto for package 'google.cloud.dataproc.v1'
3
+ # Original file comments:
4
+ # Copyright 2021 Google LLC
5
+ #
6
+ # Licensed under the Apache License, Version 2.0 (the "License");
7
+ # you may not use this file except in compliance with the License.
8
+ # You may obtain a copy of the License at
9
+ #
10
+ # http://www.apache.org/licenses/LICENSE-2.0
11
+ #
12
+ # Unless required by applicable law or agreed to in writing, software
13
+ # distributed under the License is distributed on an "AS IS" BASIS,
14
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15
+ # See the License for the specific language governing permissions and
16
+ # limitations under the License.
17
+ #
18
+
19
+ require 'grpc'
20
+ require 'google/cloud/dataproc/v1/batches_pb'
21
+
22
+ module Google
23
+ module Cloud
24
+ module Dataproc
25
+ module V1
26
+ module BatchController
27
+ # The BatchController provides methods to manage batch workloads.
28
+ class Service
29
+
30
+ include ::GRPC::GenericService
31
+
32
+ self.marshal_class_method = :encode
33
+ self.unmarshal_class_method = :decode
34
+ self.service_name = 'google.cloud.dataproc.v1.BatchController'
35
+
36
+ # Creates a batch workload that executes asynchronously.
37
+ rpc :CreateBatch, ::Google::Cloud::Dataproc::V1::CreateBatchRequest, ::Google::Longrunning::Operation
38
+ # Gets the batch workload resource representation.
39
+ rpc :GetBatch, ::Google::Cloud::Dataproc::V1::GetBatchRequest, ::Google::Cloud::Dataproc::V1::Batch
40
+ # Lists batch workloads.
41
+ rpc :ListBatches, ::Google::Cloud::Dataproc::V1::ListBatchesRequest, ::Google::Cloud::Dataproc::V1::ListBatchesResponse
42
+ # Deletes the batch workload resource. If the batch is not in terminal state,
43
+ # the delete fails and the response returns `FAILED_PRECONDITION`.
44
+ rpc :DeleteBatch, ::Google::Cloud::Dataproc::V1::DeleteBatchRequest, ::Google::Protobuf::Empty
45
+ end
46
+
47
+ Stub = Service.rpc_stub_class
48
+ end
49
+ end
50
+ end
51
+ end
52
+ end
@@ -202,7 +202,7 @@ module Google
202
202
  # @param options [::Gapic::CallOptions, ::Hash]
203
203
  # Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
204
204
  #
205
- # @overload create_cluster(project_id: nil, region: nil, cluster: nil, request_id: nil)
205
+ # @overload create_cluster(project_id: nil, region: nil, cluster: nil, request_id: nil, action_on_failed_primary_workers: nil)
206
206
  # Pass arguments to `create_cluster` via keyword arguments. Note that at
207
207
  # least one keyword argument is required. To specify no parameters, or to keep all
208
208
  # the default parameter values, pass an empty Hash as a request object (see above).
@@ -215,7 +215,7 @@ module Google
215
215
  # @param cluster [::Google::Cloud::Dataproc::V1::Cluster, ::Hash]
216
216
  # Required. The cluster to create.
217
217
  # @param request_id [::String]
218
- # Optional. A unique id used to identify the request. If the server receives two
218
+ # Optional. A unique ID used to identify the request. If the server receives two
219
219
  # [CreateClusterRequest](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#google.cloud.dataproc.v1.CreateClusterRequest)s
220
220
  # with the same id, then the second request will be ignored and the
221
221
  # first {::Google::Longrunning::Operation google.longrunning.Operation} created and stored in the backend
@@ -224,8 +224,10 @@ module Google
224
224
  # It is recommended to always set this value to a
225
225
  # [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
226
226
  #
227
- # The id must contain only letters (a-z, A-Z), numbers (0-9),
227
+ # The ID must contain only letters (a-z, A-Z), numbers (0-9),
228
228
  # underscores (_), and hyphens (-). The maximum length is 40 characters.
229
+ # @param action_on_failed_primary_workers [::Google::Cloud::Dataproc::V1::FailureAction]
230
+ # Optional. Failure action when primary worker creation fails.
229
231
  #
230
232
  # @yield [response, operation] Access the result along with the RPC operation
231
233
  # @yieldparam response [::Gapic::Operation]
@@ -280,6 +282,8 @@ module Google
280
282
  # Updates a cluster in a project. The returned
281
283
  # {::Google::Longrunning::Operation#metadata Operation.metadata} will be
282
284
  # [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata).
285
+ # The cluster must be in a {::Google::Cloud::Dataproc::V1::ClusterStatus::State `RUNNING`} state or an error
286
+ # is returned.
283
287
  #
284
288
  # @overload update_cluster(request, options = nil)
285
289
  # Pass arguments to `update_cluster` via a request object, either of type
@@ -368,7 +372,7 @@ module Google
368
372
  # </tbody>
369
373
  # </table>
370
374
  # @param request_id [::String]
371
- # Optional. A unique id used to identify the request. If the server
375
+ # Optional. A unique ID used to identify the request. If the server
372
376
  # receives two
373
377
  # [UpdateClusterRequest](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#google.cloud.dataproc.v1.UpdateClusterRequest)s
374
378
  # with the same id, then the second request will be ignored and the
@@ -378,7 +382,7 @@ module Google
378
382
  # It is recommended to always set this value to a
379
383
  # [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
380
384
  #
381
- # The id must contain only letters (a-z, A-Z), numbers (0-9),
385
+ # The ID must contain only letters (a-z, A-Z), numbers (0-9),
382
386
  # underscores (_), and hyphens (-). The maximum length is 40 characters.
383
387
  #
384
388
  # @yield [response, operation] Access the result along with the RPC operation
@@ -460,7 +464,7 @@ module Google
460
464
  # Optional. Specifying the `cluster_uuid` means the RPC will fail
461
465
  # (with error NOT_FOUND) if a cluster with the specified UUID does not exist.
462
466
  # @param request_id [::String]
463
- # Optional. A unique id used to identify the request. If the server
467
+ # Optional. A unique ID used to identify the request. If the server
464
468
  # receives two
465
469
  # [StopClusterRequest](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#google.cloud.dataproc.v1.StopClusterRequest)s
466
470
  # with the same id, then the second request will be ignored and the
@@ -470,7 +474,7 @@ module Google
470
474
  # Recommendation: Set this value to a
471
475
  # [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
472
476
  #
473
- # The id must contain only letters (a-z, A-Z), numbers (0-9),
477
+ # The ID must contain only letters (a-z, A-Z), numbers (0-9),
474
478
  # underscores (_), and hyphens (-). The maximum length is 40 characters.
475
479
  #
476
480
  # @yield [response, operation] Access the result along with the RPC operation
@@ -552,7 +556,7 @@ module Google
552
556
  # Optional. Specifying the `cluster_uuid` means the RPC will fail
553
557
  # (with error NOT_FOUND) if a cluster with the specified UUID does not exist.
554
558
  # @param request_id [::String]
555
- # Optional. A unique id used to identify the request. If the server
559
+ # Optional. A unique ID used to identify the request. If the server
556
560
  # receives two
557
561
  # [StartClusterRequest](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#google.cloud.dataproc.v1.StartClusterRequest)s
558
562
  # with the same id, then the second request will be ignored and the
@@ -562,7 +566,7 @@ module Google
562
566
  # Recommendation: Set this value to a
563
567
  # [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
564
568
  #
565
- # The id must contain only letters (a-z, A-Z), numbers (0-9),
569
+ # The ID must contain only letters (a-z, A-Z), numbers (0-9),
566
570
  # underscores (_), and hyphens (-). The maximum length is 40 characters.
567
571
  #
568
572
  # @yield [response, operation] Access the result along with the RPC operation
@@ -646,7 +650,7 @@ module Google
646
650
  # Optional. Specifying the `cluster_uuid` means the RPC should fail
647
651
  # (with error NOT_FOUND) if cluster with specified UUID does not exist.
648
652
  # @param request_id [::String]
649
- # Optional. A unique id used to identify the request. If the server
653
+ # Optional. A unique ID used to identify the request. If the server
650
654
  # receives two
651
655
  # [DeleteClusterRequest](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#google.cloud.dataproc.v1.DeleteClusterRequest)s
652
656
  # with the same id, then the second request will be ignored and the
@@ -656,7 +660,7 @@ module Google
656
660
  # It is recommended to always set this value to a
657
661
  # [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
658
662
  #
659
- # The id must contain only letters (a-z, A-Z), numbers (0-9),
663
+ # The ID must contain only letters (a-z, A-Z), numbers (0-9),
660
664
  # underscores (_), and hyphens (-). The maximum length is 40 characters.
661
665
  #
662
666
  # @yield [response, operation] Access the result along with the RPC operation
@@ -1,8 +1,6 @@
1
1
  # Generated by the protocol buffer compiler. DO NOT EDIT!
2
2
  # source: google/cloud/dataproc/v1/clusters.proto
3
3
 
4
- require 'google/protobuf'
5
-
6
4
  require 'google/api/annotations_pb'
7
5
  require 'google/api/client_pb'
8
6
  require 'google/api/field_behavior_pb'
@@ -12,6 +10,8 @@ require 'google/longrunning/operations_pb'
12
10
  require 'google/protobuf/duration_pb'
13
11
  require 'google/protobuf/field_mask_pb'
14
12
  require 'google/protobuf/timestamp_pb'
13
+ require 'google/protobuf'
14
+
15
15
  Google::Protobuf::DescriptorPool.generated_pool.build do
16
16
  add_file("google/cloud/dataproc/v1/clusters.proto", :syntax => :proto3) do
17
17
  add_message "google.cloud.dataproc.v1.Cluster" do
@@ -71,6 +71,7 @@ Google::Protobuf::DescriptorPool.generated_pool.build do
71
71
  optional :reservation_affinity, :message, 11, "google.cloud.dataproc.v1.ReservationAffinity"
72
72
  optional :node_group_affinity, :message, 13, "google.cloud.dataproc.v1.NodeGroupAffinity"
73
73
  optional :shielded_instance_config, :message, 14, "google.cloud.dataproc.v1.ShieldedInstanceConfig"
74
+ optional :confidential_instance_config, :message, 15, "google.cloud.dataproc.v1.ConfidentialInstanceConfig"
74
75
  end
75
76
  add_enum "google.cloud.dataproc.v1.GceClusterConfig.PrivateIpv6GoogleAccess" do
76
77
  value :PRIVATE_IPV6_GOOGLE_ACCESS_UNSPECIFIED, 0
@@ -86,6 +87,9 @@ Google::Protobuf::DescriptorPool.generated_pool.build do
86
87
  optional :enable_vtpm, :bool, 2
87
88
  optional :enable_integrity_monitoring, :bool, 3
88
89
  end
90
+ add_message "google.cloud.dataproc.v1.ConfidentialInstanceConfig" do
91
+ optional :enable_confidential_compute, :bool, 1
92
+ end
89
93
  add_message "google.cloud.dataproc.v1.InstanceGroupConfig" do
90
94
  optional :num_instances, :int32, 1
91
95
  repeated :instance_names, :string, 2
@@ -131,6 +135,7 @@ Google::Protobuf::DescriptorPool.generated_pool.build do
131
135
  value :CREATING, 1
132
136
  value :RUNNING, 2
133
137
  value :ERROR, 3
138
+ value :ERROR_DUE_TO_UPDATE, 9
134
139
  value :DELETING, 4
135
140
  value :UPDATING, 5
136
141
  value :STOPPING, 6
@@ -191,6 +196,7 @@ Google::Protobuf::DescriptorPool.generated_pool.build do
191
196
  optional :region, :string, 3
192
197
  optional :cluster, :message, 2, "google.cloud.dataproc.v1.Cluster"
193
198
  optional :request_id, :string, 4
199
+ optional :action_on_failed_primary_workers, :enum, 5, "google.cloud.dataproc.v1.FailureAction"
194
200
  end
195
201
  add_message "google.cloud.dataproc.v1.UpdateClusterRequest" do
196
202
  optional :project_id, :string, 1
@@ -275,6 +281,7 @@ module Google
275
281
  GceClusterConfig::PrivateIpv6GoogleAccess = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.GceClusterConfig.PrivateIpv6GoogleAccess").enummodule
276
282
  NodeGroupAffinity = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.NodeGroupAffinity").msgclass
277
283
  ShieldedInstanceConfig = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.ShieldedInstanceConfig").msgclass
284
+ ConfidentialInstanceConfig = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.ConfidentialInstanceConfig").msgclass
278
285
  InstanceGroupConfig = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.InstanceGroupConfig").msgclass
279
286
  InstanceGroupConfig::Preemptibility = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.InstanceGroupConfig.Preemptibility").enummodule
280
287
  ManagedGroupConfig = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.ManagedGroupConfig").msgclass
@@ -28,7 +28,7 @@ module Google
28
28
  # of Compute Engine instances.
29
29
  class Service
30
30
 
31
- include GRPC::GenericService
31
+ include ::GRPC::GenericService
32
32
 
33
33
  self.marshal_class_method = :encode
34
34
  self.unmarshal_class_method = :decode
@@ -41,6 +41,8 @@ module Google
41
41
  # Updates a cluster in a project. The returned
42
42
  # [Operation.metadata][google.longrunning.Operation.metadata] will be
43
43
  # [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata).
44
+ # The cluster must be in a [`RUNNING`][google.cloud.dataproc.v1.ClusterStatus.State] state or an error
45
+ # is returned.
44
46
  rpc :UpdateCluster, ::Google::Cloud::Dataproc::V1::UpdateClusterRequest, ::Google::Longrunning::Operation
45
47
  # Stops a cluster in a project.
46
48
  rpc :StopCluster, ::Google::Cloud::Dataproc::V1::StopClusterRequest, ::Google::Longrunning::Operation
@@ -1,8 +1,6 @@
1
1
  # Generated by the protocol buffer compiler. DO NOT EDIT!
2
2
  # source: google/cloud/dataproc/v1/jobs.proto
3
3
 
4
- require 'google/protobuf'
5
-
6
4
  require 'google/api/annotations_pb'
7
5
  require 'google/api/client_pb'
8
6
  require 'google/api/field_behavior_pb'
@@ -10,6 +8,8 @@ require 'google/longrunning/operations_pb'
10
8
  require 'google/protobuf/empty_pb'
11
9
  require 'google/protobuf/field_mask_pb'
12
10
  require 'google/protobuf/timestamp_pb'
11
+ require 'google/protobuf'
12
+
13
13
  Google::Protobuf::DescriptorPool.generated_pool.build do
14
14
  add_file("google/cloud/dataproc/v1/jobs.proto", :syntax => :proto3) do
15
15
  add_message "google.cloud.dataproc.v1.LoggingConfig" do
@@ -27,7 +27,7 @@ module Google
27
27
  # The JobController provides methods to manage jobs.
28
28
  class Service
29
29
 
30
- include GRPC::GenericService
30
+ include ::GRPC::GenericService
31
31
 
32
32
  self.marshal_class_method = :encode
33
33
  self.unmarshal_class_method = :decode
@@ -1,13 +1,26 @@
1
1
  # Generated by the protocol buffer compiler. DO NOT EDIT!
2
2
  # source: google/cloud/dataproc/v1/operations.proto
3
3
 
4
- require 'google/protobuf'
5
-
6
4
  require 'google/api/field_behavior_pb'
7
5
  require 'google/protobuf/timestamp_pb'
8
- require 'google/api/annotations_pb'
6
+ require 'google/protobuf'
7
+
9
8
  Google::Protobuf::DescriptorPool.generated_pool.build do
10
9
  add_file("google/cloud/dataproc/v1/operations.proto", :syntax => :proto3) do
10
+ add_message "google.cloud.dataproc.v1.BatchOperationMetadata" do
11
+ optional :batch, :string, 1
12
+ optional :batch_uuid, :string, 2
13
+ optional :create_time, :message, 3, "google.protobuf.Timestamp"
14
+ optional :done_time, :message, 4, "google.protobuf.Timestamp"
15
+ optional :operation_type, :enum, 6, "google.cloud.dataproc.v1.BatchOperationMetadata.BatchOperationType"
16
+ optional :description, :string, 7
17
+ map :labels, :string, :string, 8
18
+ repeated :warnings, :string, 9
19
+ end
20
+ add_enum "google.cloud.dataproc.v1.BatchOperationMetadata.BatchOperationType" do
21
+ value :BATCH_OPERATION_TYPE_UNSPECIFIED, 0
22
+ value :BATCH, 1
23
+ end
11
24
  add_message "google.cloud.dataproc.v1.ClusterOperationStatus" do
12
25
  optional :state, :enum, 1, "google.cloud.dataproc.v1.ClusterOperationStatus.State"
13
26
  optional :inner_state, :string, 2
@@ -37,6 +50,8 @@ module Google
37
50
  module Cloud
38
51
  module Dataproc
39
52
  module V1
53
+ BatchOperationMetadata = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.BatchOperationMetadata").msgclass
54
+ BatchOperationMetadata::BatchOperationType = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.BatchOperationMetadata.BatchOperationType").enummodule
40
55
  ClusterOperationStatus = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.ClusterOperationStatus").msgclass
41
56
  ClusterOperationStatus::State = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.ClusterOperationStatus.State").enummodule
42
57
  ClusterOperationMetadata = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.ClusterOperationMetadata").msgclass
@@ -1,12 +1,38 @@
1
1
  # Generated by the protocol buffer compiler. DO NOT EDIT!
2
2
  # source: google/cloud/dataproc/v1/shared.proto
3
3
 
4
+ require 'google/api/field_behavior_pb'
4
5
  require 'google/protobuf'
5
6
 
6
- require 'google/api/annotations_pb'
7
- require 'google/api/field_behavior_pb'
8
7
  Google::Protobuf::DescriptorPool.generated_pool.build do
9
8
  add_file("google/cloud/dataproc/v1/shared.proto", :syntax => :proto3) do
9
+ add_message "google.cloud.dataproc.v1.RuntimeConfig" do
10
+ map :properties, :string, :string, 3
11
+ end
12
+ add_message "google.cloud.dataproc.v1.EnvironmentConfig" do
13
+ optional :execution_config, :message, 1, "google.cloud.dataproc.v1.ExecutionConfig"
14
+ optional :peripherals_config, :message, 2, "google.cloud.dataproc.v1.PeripheralsConfig"
15
+ end
16
+ add_message "google.cloud.dataproc.v1.ExecutionConfig" do
17
+ optional :service_account, :string, 2
18
+ repeated :network_tags, :string, 6
19
+ optional :kms_key, :string, 7
20
+ oneof :network do
21
+ optional :network_uri, :string, 4
22
+ optional :subnetwork_uri, :string, 5
23
+ end
24
+ end
25
+ add_message "google.cloud.dataproc.v1.SparkHistoryServerConfig" do
26
+ optional :dataproc_cluster, :string, 1
27
+ end
28
+ add_message "google.cloud.dataproc.v1.PeripheralsConfig" do
29
+ optional :metastore_service, :string, 1
30
+ optional :spark_history_server_config, :message, 2, "google.cloud.dataproc.v1.SparkHistoryServerConfig"
31
+ end
32
+ add_message "google.cloud.dataproc.v1.RuntimeInfo" do
33
+ map :endpoints, :string, :string, 1
34
+ optional :output_uri, :string, 2
35
+ end
10
36
  add_enum "google.cloud.dataproc.v1.Component" do
11
37
  value :COMPONENT_UNSPECIFIED, 0
12
38
  value :ANACONDA, 5
@@ -22,6 +48,11 @@ Google::Protobuf::DescriptorPool.generated_pool.build do
22
48
  value :ZEPPELIN, 4
23
49
  value :ZOOKEEPER, 8
24
50
  end
51
+ add_enum "google.cloud.dataproc.v1.FailureAction" do
52
+ value :FAILURE_ACTION_UNSPECIFIED, 0
53
+ value :NO_ACTION, 1
54
+ value :DELETE, 2
55
+ end
25
56
  end
26
57
  end
27
58
 
@@ -29,7 +60,14 @@ module Google
29
60
  module Cloud
30
61
  module Dataproc
31
62
  module V1
63
+ RuntimeConfig = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.RuntimeConfig").msgclass
64
+ EnvironmentConfig = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.EnvironmentConfig").msgclass
65
+ ExecutionConfig = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.ExecutionConfig").msgclass
66
+ SparkHistoryServerConfig = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.SparkHistoryServerConfig").msgclass
67
+ PeripheralsConfig = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.PeripheralsConfig").msgclass
68
+ RuntimeInfo = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.RuntimeInfo").msgclass
32
69
  Component = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.Component").enummodule
70
+ FailureAction = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.FailureAction").enummodule
33
71
  end
34
72
  end
35
73
  end
@@ -21,7 +21,7 @@ module Google
21
21
  module Cloud
22
22
  module Dataproc
23
23
  module V1
24
- VERSION = "0.6.3"
24
+ VERSION = "0.7.0"
25
25
  end
26
26
  end
27
27
  end
@@ -477,8 +477,7 @@ module Google
477
477
  # Instantiates a template and begins execution.
478
478
  #
479
479
  # This method is equivalent to executing the sequence
480
- # {::Google::Cloud::Dataproc::V1::WorkflowTemplateService::Client#create_workflow_template CreateWorkflowTemplate},
481
- # {::Google::Cloud::Dataproc::V1::WorkflowTemplateService::Client#instantiate_workflow_template InstantiateWorkflowTemplate},
480
+ # {::Google::Cloud::Dataproc::V1::WorkflowTemplateService::Client#create_workflow_template CreateWorkflowTemplate}, {::Google::Cloud::Dataproc::V1::WorkflowTemplateService::Client#instantiate_workflow_template InstantiateWorkflowTemplate},
482
481
  # {::Google::Cloud::Dataproc::V1::WorkflowTemplateService::Client#delete_workflow_template DeleteWorkflowTemplate}.
483
482
  #
484
483
  # The returned Operation can be used to track execution of
@@ -1,8 +1,6 @@
1
1
  # Generated by the protocol buffer compiler. DO NOT EDIT!
2
2
  # source: google/cloud/dataproc/v1/workflow_templates.proto
3
3
 
4
- require 'google/protobuf'
5
-
6
4
  require 'google/api/annotations_pb'
7
5
  require 'google/api/client_pb'
8
6
  require 'google/api/field_behavior_pb'
@@ -13,6 +11,8 @@ require 'google/longrunning/operations_pb'
13
11
  require 'google/protobuf/duration_pb'
14
12
  require 'google/protobuf/empty_pb'
15
13
  require 'google/protobuf/timestamp_pb'
14
+ require 'google/protobuf'
15
+
16
16
  Google::Protobuf::DescriptorPool.generated_pool.build do
17
17
  add_file("google/cloud/dataproc/v1/workflow_templates.proto", :syntax => :proto3) do
18
18
  add_message "google.cloud.dataproc.v1.WorkflowTemplate" do