google-cloud-dataproc-v1 0.6.3 → 0.7.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (30) hide show
  1. checksums.yaml +4 -4
  2. data/lib/google/cloud/dataproc/v1/autoscaling_policies_pb.rb +3 -2
  3. data/lib/google/cloud/dataproc/v1/autoscaling_policies_services_pb.rb +1 -1
  4. data/lib/google/cloud/dataproc/v1/batch_controller/client.rb +637 -0
  5. data/lib/google/cloud/dataproc/v1/batch_controller/credentials.rb +51 -0
  6. data/lib/google/cloud/dataproc/v1/batch_controller/operations.rb +664 -0
  7. data/lib/google/cloud/dataproc/v1/batch_controller/paths.rb +69 -0
  8. data/lib/google/cloud/dataproc/v1/batch_controller.rb +50 -0
  9. data/lib/google/cloud/dataproc/v1/batches_pb.rb +123 -0
  10. data/lib/google/cloud/dataproc/v1/batches_services_pb.rb +52 -0
  11. data/lib/google/cloud/dataproc/v1/cluster_controller/client.rb +15 -11
  12. data/lib/google/cloud/dataproc/v1/clusters_pb.rb +9 -2
  13. data/lib/google/cloud/dataproc/v1/clusters_services_pb.rb +3 -1
  14. data/lib/google/cloud/dataproc/v1/jobs_pb.rb +2 -2
  15. data/lib/google/cloud/dataproc/v1/jobs_services_pb.rb +1 -1
  16. data/lib/google/cloud/dataproc/v1/operations_pb.rb +18 -3
  17. data/lib/google/cloud/dataproc/v1/shared_pb.rb +40 -2
  18. data/lib/google/cloud/dataproc/v1/version.rb +1 -1
  19. data/lib/google/cloud/dataproc/v1/workflow_template_service/client.rb +1 -2
  20. data/lib/google/cloud/dataproc/v1/workflow_templates_pb.rb +2 -2
  21. data/lib/google/cloud/dataproc/v1/workflow_templates_services_pb.rb +2 -3
  22. data/lib/google/cloud/dataproc/v1.rb +1 -0
  23. data/proto_docs/google/cloud/dataproc/v1/autoscaling_policies.rb +18 -0
  24. data/proto_docs/google/cloud/dataproc/v1/batches.rb +339 -0
  25. data/proto_docs/google/cloud/dataproc/v1/clusters.rb +45 -22
  26. data/proto_docs/google/cloud/dataproc/v1/jobs.rb +8 -9
  27. data/proto_docs/google/cloud/dataproc/v1/operations.rb +48 -0
  28. data/proto_docs/google/cloud/dataproc/v1/shared.rb +117 -1
  29. data/proto_docs/google/cloud/dataproc/v1/workflow_templates.rb +11 -14
  30. metadata +10 -2
@@ -28,7 +28,7 @@ module Google
28
28
  # Dataproc API.
29
29
  class Service
30
30
 
31
- include GRPC::GenericService
31
+ include ::GRPC::GenericService
32
32
 
33
33
  self.marshal_class_method = :encode
34
34
  self.unmarshal_class_method = :decode
@@ -65,8 +65,7 @@ module Google
65
65
  # Instantiates a template and begins execution.
66
66
  #
67
67
  # This method is equivalent to executing the sequence
68
- # [CreateWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.CreateWorkflowTemplate],
69
- # [InstantiateWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.InstantiateWorkflowTemplate],
68
+ # [CreateWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.CreateWorkflowTemplate], [InstantiateWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.InstantiateWorkflowTemplate],
70
69
  # [DeleteWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.DeleteWorkflowTemplate].
71
70
  #
72
71
  # The returned Operation can be used to track execution of
@@ -17,6 +17,7 @@
17
17
  # Auto-generated by gapic-generator-ruby. DO NOT EDIT!
18
18
 
19
19
  require "google/cloud/dataproc/v1/autoscaling_policy_service"
20
+ require "google/cloud/dataproc/v1/batch_controller"
20
21
  require "google/cloud/dataproc/v1/cluster_controller"
21
22
  require "google/cloud/dataproc/v1/job_controller"
22
23
  require "google/cloud/dataproc/v1/workflow_template_service"
@@ -49,9 +49,27 @@ module Google
49
49
  # @!attribute [rw] secondary_worker_config
50
50
  # @return [::Google::Cloud::Dataproc::V1::InstanceGroupAutoscalingPolicyConfig]
51
51
  # Optional. Describes how the autoscaler will operate for secondary workers.
52
+ # @!attribute [rw] labels
53
+ # @return [::Google::Protobuf::Map{::String => ::String}]
54
+ # Optional. The labels to associate with this autoscaling policy.
55
+ # Label **keys** must contain 1 to 63 characters, and must conform to
56
+ # [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt).
57
+ # Label **values** may be empty, but, if present, must contain 1 to 63
58
+ # characters, and must conform to [RFC
59
+ # 1035](https://www.ietf.org/rfc/rfc1035.txt). No more than 32 labels can be
60
+ # associated with an autoscaling policy.
52
61
  class AutoscalingPolicy
53
62
  include ::Google::Protobuf::MessageExts
54
63
  extend ::Google::Protobuf::MessageExts::ClassMethods
64
+
65
+ # @!attribute [rw] key
66
+ # @return [::String]
67
+ # @!attribute [rw] value
68
+ # @return [::String]
69
+ class LabelsEntry
70
+ include ::Google::Protobuf::MessageExts
71
+ extend ::Google::Protobuf::MessageExts::ClassMethods
72
+ end
55
73
  end
56
74
 
57
75
  # Basic algorithm for autoscaling.
@@ -0,0 +1,339 @@
1
+ # frozen_string_literal: true
2
+
3
+ # Copyright 2021 Google LLC
4
+ #
5
+ # Licensed under the Apache License, Version 2.0 (the "License");
6
+ # you may not use this file except in compliance with the License.
7
+ # You may obtain a copy of the License at
8
+ #
9
+ # https://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing, software
12
+ # distributed under the License is distributed on an "AS IS" BASIS,
13
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14
+ # See the License for the specific language governing permissions and
15
+ # limitations under the License.
16
+
17
+ # Auto-generated by gapic-generator-ruby. DO NOT EDIT!
18
+
19
+
20
+ module Google
21
+ module Cloud
22
+ module Dataproc
23
+ module V1
24
+ # A request to create a batch workload.
25
+ # @!attribute [rw] parent
26
+ # @return [::String]
27
+ # Required. The parent resource where this batch will be created.
28
+ # @!attribute [rw] batch
29
+ # @return [::Google::Cloud::Dataproc::V1::Batch]
30
+ # Required. The batch to create.
31
+ # @!attribute [rw] batch_id
32
+ # @return [::String]
33
+ # Optional. The ID to use for the batch, which will become the final component of
34
+ # the batch's resource name.
35
+ #
36
+ # This value must be 4-63 characters. Valid characters are `/[a-z][0-9]-/`.
37
+ # @!attribute [rw] request_id
38
+ # @return [::String]
39
+ # Optional. A unique ID used to identify the request. If the service
40
+ # receives two
41
+ # [CreateBatchRequest](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#google.cloud.dataproc.v1.CreateBatchRequest)s
42
+ # with the same request_id, the second request is ignored and the
43
+ # Operation that corresponds to the first Batch created and stored
44
+ # in the backend is returned.
45
+ #
46
+ # Recommendation: Set this value to a
47
+ # [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
48
+ #
49
+ # The value must contain only letters (a-z, A-Z), numbers (0-9),
50
+ # underscores (_), and hyphens (-). The maximum length is 40 characters.
51
+ class CreateBatchRequest
52
+ include ::Google::Protobuf::MessageExts
53
+ extend ::Google::Protobuf::MessageExts::ClassMethods
54
+ end
55
+
56
+ # A request to get the resource representation for a batch workload.
57
+ # @!attribute [rw] name
58
+ # @return [::String]
59
+ # Required. The name of the batch to retrieve.
60
+ class GetBatchRequest
61
+ include ::Google::Protobuf::MessageExts
62
+ extend ::Google::Protobuf::MessageExts::ClassMethods
63
+ end
64
+
65
+ # A request to list batch workloads in a project.
66
+ # @!attribute [rw] parent
67
+ # @return [::String]
68
+ # Required. The parent, which owns this collection of batches.
69
+ # @!attribute [rw] page_size
70
+ # @return [::Integer]
71
+ # Optional. The maximum number of batches to return in each response.
72
+ # The service may return fewer than this value.
73
+ # The default page size is 20; the maximum page size is 1000.
74
+ # @!attribute [rw] page_token
75
+ # @return [::String]
76
+ # Optional. A page token received from a previous `ListBatches` call.
77
+ # Provide this token to retrieve the subsequent page.
78
+ class ListBatchesRequest
79
+ include ::Google::Protobuf::MessageExts
80
+ extend ::Google::Protobuf::MessageExts::ClassMethods
81
+ end
82
+
83
+ # A list of batch workloads.
84
+ # @!attribute [rw] batches
85
+ # @return [::Array<::Google::Cloud::Dataproc::V1::Batch>]
86
+ # The batches from the specified collection.
87
+ # @!attribute [rw] next_page_token
88
+ # @return [::String]
89
+ # A token, which can be sent as `page_token` to retrieve the next page.
90
+ # If this field is omitted, there are no subsequent pages.
91
+ class ListBatchesResponse
92
+ include ::Google::Protobuf::MessageExts
93
+ extend ::Google::Protobuf::MessageExts::ClassMethods
94
+ end
95
+
96
+ # A request to delete a batch workload.
97
+ # @!attribute [rw] name
98
+ # @return [::String]
99
+ # Required. The name of the batch resource to delete.
100
+ class DeleteBatchRequest
101
+ include ::Google::Protobuf::MessageExts
102
+ extend ::Google::Protobuf::MessageExts::ClassMethods
103
+ end
104
+
105
+ # A representation of a batch workload in the service.
106
+ # @!attribute [r] name
107
+ # @return [::String]
108
+ # Output only. The resource name of the batch.
109
+ # @!attribute [r] uuid
110
+ # @return [::String]
111
+ # Output only. A batch UUID (Unique Universal Identifier). The service
112
+ # generates this value when it creates the batch.
113
+ # @!attribute [r] create_time
114
+ # @return [::Google::Protobuf::Timestamp]
115
+ # Output only. The time when the batch was created.
116
+ # @!attribute [rw] pyspark_batch
117
+ # @return [::Google::Cloud::Dataproc::V1::PySparkBatch]
118
+ # Optional. PySpark batch config.
119
+ # @!attribute [rw] spark_batch
120
+ # @return [::Google::Cloud::Dataproc::V1::SparkBatch]
121
+ # Optional. Spark batch config.
122
+ # @!attribute [rw] spark_r_batch
123
+ # @return [::Google::Cloud::Dataproc::V1::SparkRBatch]
124
+ # Optional. SparkR batch config.
125
+ # @!attribute [rw] spark_sql_batch
126
+ # @return [::Google::Cloud::Dataproc::V1::SparkSqlBatch]
127
+ # Optional. SparkSql batch config.
128
+ # @!attribute [r] runtime_info
129
+ # @return [::Google::Cloud::Dataproc::V1::RuntimeInfo]
130
+ # Output only. Runtime information about batch execution.
131
+ # @!attribute [r] state
132
+ # @return [::Google::Cloud::Dataproc::V1::Batch::State]
133
+ # Output only. The state of the batch.
134
+ # @!attribute [r] state_message
135
+ # @return [::String]
136
+ # Output only. Batch state details, such as a failure
137
+ # description if the state is `FAILED`.
138
+ # @!attribute [r] state_time
139
+ # @return [::Google::Protobuf::Timestamp]
140
+ # Output only. The time when the batch entered a current state.
141
+ # @!attribute [r] creator
142
+ # @return [::String]
143
+ # Output only. The email address of the user who created the batch.
144
+ # @!attribute [rw] labels
145
+ # @return [::Google::Protobuf::Map{::String => ::String}]
146
+ # Optional. The labels to associate with this batch.
147
+ # Label **keys** must contain 1 to 63 characters, and must conform to
148
+ # [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt).
149
+ # Label **values** may be empty, but, if present, must contain 1 to 63
150
+ # characters, and must conform to [RFC
151
+ # 1035](https://www.ietf.org/rfc/rfc1035.txt). No more than 32 labels can be
152
+ # associated with a batch.
153
+ # @!attribute [rw] runtime_config
154
+ # @return [::Google::Cloud::Dataproc::V1::RuntimeConfig]
155
+ # Optional. Runtime configuration for the batch execution.
156
+ # @!attribute [rw] environment_config
157
+ # @return [::Google::Cloud::Dataproc::V1::EnvironmentConfig]
158
+ # Optional. Environment configuration for the batch execution.
159
+ # @!attribute [r] operation
160
+ # @return [::String]
161
+ # Output only. The resource name of the operation associated with this batch.
162
+ # @!attribute [r] state_history
163
+ # @return [::Array<::Google::Cloud::Dataproc::V1::Batch::StateHistory>]
164
+ # Output only. Historical state information for the batch.
165
+ class Batch
166
+ include ::Google::Protobuf::MessageExts
167
+ extend ::Google::Protobuf::MessageExts::ClassMethods
168
+
169
+ # Historical state information.
170
+ # @!attribute [r] state
171
+ # @return [::Google::Cloud::Dataproc::V1::Batch::State]
172
+ # Output only. The state of the batch at this point in history.
173
+ # @!attribute [r] state_message
174
+ # @return [::String]
175
+ # Output only. Details about the state at this point in history.
176
+ # @!attribute [r] state_start_time
177
+ # @return [::Google::Protobuf::Timestamp]
178
+ # Output only. The time when the batch entered the historical state.
179
+ class StateHistory
180
+ include ::Google::Protobuf::MessageExts
181
+ extend ::Google::Protobuf::MessageExts::ClassMethods
182
+ end
183
+
184
+ # @!attribute [rw] key
185
+ # @return [::String]
186
+ # @!attribute [rw] value
187
+ # @return [::String]
188
+ class LabelsEntry
189
+ include ::Google::Protobuf::MessageExts
190
+ extend ::Google::Protobuf::MessageExts::ClassMethods
191
+ end
192
+
193
+ # The batch state.
194
+ module State
195
+ # The batch state is unknown.
196
+ STATE_UNSPECIFIED = 0
197
+
198
+ # The batch is created before running.
199
+ PENDING = 1
200
+
201
+ # The batch is running.
202
+ RUNNING = 2
203
+
204
+ # The batch is cancelling.
205
+ CANCELLING = 3
206
+
207
+ # The batch cancellation was successful.
208
+ CANCELLED = 4
209
+
210
+ # The batch completed successfully.
211
+ SUCCEEDED = 5
212
+
213
+ # The batch is no longer running due to an error.
214
+ FAILED = 6
215
+ end
216
+ end
217
+
218
+ # A configuration for running an
219
+ # [Apache
220
+ # PySpark](https://spark.apache.org/docs/latest/api/python/getting_started/quickstart.html)
221
+ # batch workload.
222
+ # @!attribute [rw] main_python_file_uri
223
+ # @return [::String]
224
+ # Required. The HCFS URI of the main Python file to use as the Spark driver. Must
225
+ # be a .py file.
226
+ # @!attribute [rw] args
227
+ # @return [::Array<::String>]
228
+ # Optional. The arguments to pass to the driver. Do not include arguments
229
+ # that can be set as batch properties, such as `--conf`, since a collision
230
+ # can occur that causes an incorrect batch submission.
231
+ # @!attribute [rw] python_file_uris
232
+ # @return [::Array<::String>]
233
+ # Optional. HCFS file URIs of Python files to pass to the PySpark
234
+ # framework. Supported file types: `.py`, `.egg`, and `.zip`.
235
+ # @!attribute [rw] jar_file_uris
236
+ # @return [::Array<::String>]
237
+ # Optional. HCFS URIs of jar files to add to the classpath of the
238
+ # Spark driver and tasks.
239
+ # @!attribute [rw] file_uris
240
+ # @return [::Array<::String>]
241
+ # Optional. HCFS URIs of files to be placed in the working directory of
242
+ # each executor.
243
+ # @!attribute [rw] archive_uris
244
+ # @return [::Array<::String>]
245
+ # Optional. HCFS URIs of archives to be extracted into the working directory
246
+ # of each executor. Supported file types:
247
+ # `.jar`, `.tar`, `.tar.gz`, `.tgz`, and `.zip`.
248
+ class PySparkBatch
249
+ include ::Google::Protobuf::MessageExts
250
+ extend ::Google::Protobuf::MessageExts::ClassMethods
251
+ end
252
+
253
+ # A configuration for running an [Apache Spark](http://spark.apache.org/)
254
+ # batch workload.
255
+ # @!attribute [rw] main_jar_file_uri
256
+ # @return [::String]
257
+ # Optional. The HCFS URI of the jar file that contains the main class.
258
+ # @!attribute [rw] main_class
259
+ # @return [::String]
260
+ # Optional. The name of the driver main class. The jar file that contains the class
261
+ # must be in the classpath or specified in `jar_file_uris`.
262
+ # @!attribute [rw] args
263
+ # @return [::Array<::String>]
264
+ # Optional. The arguments to pass to the driver. Do not include arguments
265
+ # that can be set as batch properties, such as `--conf`, since a collision
266
+ # can occur that causes an incorrect batch submission.
267
+ # @!attribute [rw] jar_file_uris
268
+ # @return [::Array<::String>]
269
+ # Optional. HCFS URIs of jar files to add to the classpath of the
270
+ # Spark driver and tasks.
271
+ # @!attribute [rw] file_uris
272
+ # @return [::Array<::String>]
273
+ # Optional. HCFS URIs of files to be placed in the working directory of
274
+ # each executor.
275
+ # @!attribute [rw] archive_uris
276
+ # @return [::Array<::String>]
277
+ # Optional. HCFS URIs of archives to be extracted into the working directory
278
+ # of each executor. Supported file types:
279
+ # `.jar`, `.tar`, `.tar.gz`, `.tgz`, and `.zip`.
280
+ class SparkBatch
281
+ include ::Google::Protobuf::MessageExts
282
+ extend ::Google::Protobuf::MessageExts::ClassMethods
283
+ end
284
+
285
+ # A configuration for running an
286
+ # [Apache SparkR](https://spark.apache.org/docs/latest/sparkr.html)
287
+ # batch workload.
288
+ # @!attribute [rw] main_r_file_uri
289
+ # @return [::String]
290
+ # Required. The HCFS URI of the main R file to use as the driver.
291
+ # Must be a `.R` or `.r` file.
292
+ # @!attribute [rw] args
293
+ # @return [::Array<::String>]
294
+ # Optional. The arguments to pass to the Spark driver. Do not include arguments
295
+ # that can be set as batch properties, such as `--conf`, since a collision
296
+ # can occur that causes an incorrect batch submission.
297
+ # @!attribute [rw] file_uris
298
+ # @return [::Array<::String>]
299
+ # Optional. HCFS URIs of files to be placed in the working directory of
300
+ # each executor.
301
+ # @!attribute [rw] archive_uris
302
+ # @return [::Array<::String>]
303
+ # Optional. HCFS URIs of archives to be extracted into the working directory
304
+ # of each executor. Supported file types:
305
+ # `.jar`, `.tar`, `.tar.gz`, `.tgz`, and `.zip`.
306
+ class SparkRBatch
307
+ include ::Google::Protobuf::MessageExts
308
+ extend ::Google::Protobuf::MessageExts::ClassMethods
309
+ end
310
+
311
+ # A configuration for running
312
+ # [Apache Spark SQL](http://spark.apache.org/sql/) queries as a batch workload.
313
+ # @!attribute [rw] query_file_uri
314
+ # @return [::String]
315
+ # Required. The HCFS URI of the script that contains Spark SQL queries to execute.
316
+ # @!attribute [rw] query_variables
317
+ # @return [::Google::Protobuf::Map{::String => ::String}]
318
+ # Optional. Mapping of query variable names to values (equivalent to the
319
+ # Spark SQL command: `SET name="value";`).
320
+ # @!attribute [rw] jar_file_uris
321
+ # @return [::Array<::String>]
322
+ # Optional. HCFS URIs of jar files to be added to the Spark CLASSPATH.
323
+ class SparkSqlBatch
324
+ include ::Google::Protobuf::MessageExts
325
+ extend ::Google::Protobuf::MessageExts::ClassMethods
326
+
327
+ # @!attribute [rw] key
328
+ # @return [::String]
329
+ # @!attribute [rw] value
330
+ # @return [::String]
331
+ class QueryVariablesEntry
332
+ include ::Google::Protobuf::MessageExts
333
+ extend ::Google::Protobuf::MessageExts::ClassMethods
334
+ end
335
+ end
336
+ end
337
+ end
338
+ end
339
+ end
@@ -22,7 +22,7 @@ module Google
22
22
  module Dataproc
23
23
  module V1
24
24
  # Describes the identifying information, config, and status of
25
- # a cluster of Compute Engine instances.
25
+ # a Dataproc cluster
26
26
  # @!attribute [rw] project_id
27
27
  # @return [::String]
28
28
  # Required. The Google Cloud Platform project ID that the cluster belongs to.
@@ -83,10 +83,10 @@ module Google
83
83
  # ASIA, or EU) for your cluster's staging bucket according to the
84
84
  # Compute Engine zone where your cluster is deployed, and then create
85
85
  # and manage this project-level, per-location bucket (see
86
- # [Dataproc staging
87
- # bucket](https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/staging-bucket)).
88
- # **This field requires a Cloud Storage bucket name, not a URI to a Cloud
89
- # Storage bucket.**
86
+ # [Dataproc staging and temp
87
+ # buckets](https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/staging-bucket)).
88
+ # **This field requires a Cloud Storage bucket name, not a `gs://...` URI to
89
+ # a Cloud Storage bucket.**
90
90
  # @!attribute [rw] temp_bucket
91
91
  # @return [::String]
92
92
  # Optional. A Cloud Storage bucket used to store ephemeral cluster and jobs data,
@@ -97,9 +97,11 @@ module Google
97
97
  # Compute Engine zone where your cluster is deployed, and then create
98
98
  # and manage this project-level, per-location bucket. The default bucket has
99
99
  # a TTL of 90 days, but you can use any TTL (or none) if you specify a
100
- # bucket.
101
- # **This field requires a Cloud Storage bucket name, not a URI to a Cloud
102
- # Storage bucket.**
100
+ # bucket (see
101
+ # [Dataproc staging and temp
102
+ # buckets](https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/staging-bucket)).
103
+ # **This field requires a Cloud Storage bucket name, not a `gs://...` URI to
104
+ # a Cloud Storage bucket.**
103
105
  # @!attribute [rw] gce_cluster_config
104
106
  # @return [::Google::Cloud::Dataproc::V1::GceClusterConfig]
105
107
  # Optional. The shared Compute Engine config settings for
@@ -107,18 +109,18 @@ module Google
107
109
  # @!attribute [rw] master_config
108
110
  # @return [::Google::Cloud::Dataproc::V1::InstanceGroupConfig]
109
111
  # Optional. The Compute Engine config settings for
110
- # the master instance in a cluster.
112
+ # the cluster's master instance.
111
113
  # @!attribute [rw] worker_config
112
114
  # @return [::Google::Cloud::Dataproc::V1::InstanceGroupConfig]
113
115
  # Optional. The Compute Engine config settings for
114
- # worker instances in a cluster.
116
+ # the cluster's worker instances.
115
117
  # @!attribute [rw] secondary_worker_config
116
118
  # @return [::Google::Cloud::Dataproc::V1::InstanceGroupConfig]
117
119
  # Optional. The Compute Engine config settings for
118
- # additional worker instances in a cluster.
120
+ # a cluster's secondary worker instances
119
121
  # @!attribute [rw] software_config
120
122
  # @return [::Google::Cloud::Dataproc::V1::SoftwareConfig]
121
- # Optional. The config settings for software inside the cluster.
123
+ # Optional. The config settings for cluster software.
122
124
  # @!attribute [rw] initialization_actions
123
125
  # @return [::Array<::Google::Cloud::Dataproc::V1::NodeInitializationAction>]
124
126
  # Optional. Commands to execute on each node after config is
@@ -334,6 +336,10 @@ module Google
334
336
  # @return [::Google::Cloud::Dataproc::V1::ShieldedInstanceConfig]
335
337
  # Optional. Shielded Instance Config for clusters using [Compute Engine Shielded
336
338
  # VMs](https://cloud.google.com/security/shielded-cloud/shielded-vm).
339
+ # @!attribute [rw] confidential_instance_config
340
+ # @return [::Google::Cloud::Dataproc::V1::ConfidentialInstanceConfig]
341
+ # Optional. Confidential Instance Config for clusters using [Confidential
342
+ # VMs](https://cloud.google.com/compute/confidential-vm/docs).
337
343
  class GceClusterConfig
338
344
  include ::Google::Protobuf::MessageExts
339
345
  extend ::Google::Protobuf::MessageExts::ClassMethods
@@ -406,6 +412,16 @@ module Google
406
412
  extend ::Google::Protobuf::MessageExts::ClassMethods
407
413
  end
408
414
 
415
+ # Confidential Instance Config for clusters using [Confidential
416
+ # VMs](https://cloud.google.com/compute/confidential-vm/docs)
417
+ # @!attribute [rw] enable_confidential_compute
418
+ # @return [::Boolean]
419
+ # Optional. Defines whether the instance should have confidential compute enabled.
420
+ class ConfidentialInstanceConfig
421
+ include ::Google::Protobuf::MessageExts
422
+ extend ::Google::Protobuf::MessageExts::ClassMethods
423
+ end
424
+
409
425
  # The config settings for Compute Engine resources in
410
426
  # an instance group, such as a master or worker group.
411
427
  # @!attribute [rw] num_instances
@@ -629,6 +645,10 @@ module Google
629
645
  # The cluster encountered an error. It is not ready for use.
630
646
  ERROR = 3
631
647
 
648
+ # The cluster has encountered an error while being updated. Jobs can
649
+ # be submitted to the cluster, but the cluster cannot be updated.
650
+ ERROR_DUE_TO_UPDATE = 9
651
+
632
652
  # The cluster is being deleted. It cannot be used.
633
653
  DELETING = 4
634
654
 
@@ -905,7 +925,7 @@ module Google
905
925
  # Required. The cluster to create.
906
926
  # @!attribute [rw] request_id
907
927
  # @return [::String]
908
- # Optional. A unique id used to identify the request. If the server receives two
928
+ # Optional. A unique ID used to identify the request. If the server receives two
909
929
  # [CreateClusterRequest](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#google.cloud.dataproc.v1.CreateClusterRequest)s
910
930
  # with the same id, then the second request will be ignored and the
911
931
  # first {::Google::Longrunning::Operation google.longrunning.Operation} created and stored in the backend
@@ -914,8 +934,11 @@ module Google
914
934
  # It is recommended to always set this value to a
915
935
  # [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
916
936
  #
917
- # The id must contain only letters (a-z, A-Z), numbers (0-9),
937
+ # The ID must contain only letters (a-z, A-Z), numbers (0-9),
918
938
  # underscores (_), and hyphens (-). The maximum length is 40 characters.
939
+ # @!attribute [rw] action_on_failed_primary_workers
940
+ # @return [::Google::Cloud::Dataproc::V1::FailureAction]
941
+ # Optional. Failure action when primary worker creation fails.
919
942
  class CreateClusterRequest
920
943
  include ::Google::Protobuf::MessageExts
921
944
  extend ::Google::Protobuf::MessageExts::ClassMethods
@@ -1001,7 +1024,7 @@ module Google
1001
1024
  # </table>
1002
1025
  # @!attribute [rw] request_id
1003
1026
  # @return [::String]
1004
- # Optional. A unique id used to identify the request. If the server
1027
+ # Optional. A unique ID used to identify the request. If the server
1005
1028
  # receives two
1006
1029
  # [UpdateClusterRequest](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#google.cloud.dataproc.v1.UpdateClusterRequest)s
1007
1030
  # with the same id, then the second request will be ignored and the
@@ -1011,7 +1034,7 @@ module Google
1011
1034
  # It is recommended to always set this value to a
1012
1035
  # [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
1013
1036
  #
1014
- # The id must contain only letters (a-z, A-Z), numbers (0-9),
1037
+ # The ID must contain only letters (a-z, A-Z), numbers (0-9),
1015
1038
  # underscores (_), and hyphens (-). The maximum length is 40 characters.
1016
1039
  class UpdateClusterRequest
1017
1040
  include ::Google::Protobuf::MessageExts
@@ -1035,7 +1058,7 @@ module Google
1035
1058
  # (with error NOT_FOUND) if a cluster with the specified UUID does not exist.
1036
1059
  # @!attribute [rw] request_id
1037
1060
  # @return [::String]
1038
- # Optional. A unique id used to identify the request. If the server
1061
+ # Optional. A unique ID used to identify the request. If the server
1039
1062
  # receives two
1040
1063
  # [StopClusterRequest](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#google.cloud.dataproc.v1.StopClusterRequest)s
1041
1064
  # with the same id, then the second request will be ignored and the
@@ -1045,7 +1068,7 @@ module Google
1045
1068
  # Recommendation: Set this value to a
1046
1069
  # [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
1047
1070
  #
1048
- # The id must contain only letters (a-z, A-Z), numbers (0-9),
1071
+ # The ID must contain only letters (a-z, A-Z), numbers (0-9),
1049
1072
  # underscores (_), and hyphens (-). The maximum length is 40 characters.
1050
1073
  class StopClusterRequest
1051
1074
  include ::Google::Protobuf::MessageExts
@@ -1069,7 +1092,7 @@ module Google
1069
1092
  # (with error NOT_FOUND) if a cluster with the specified UUID does not exist.
1070
1093
  # @!attribute [rw] request_id
1071
1094
  # @return [::String]
1072
- # Optional. A unique id used to identify the request. If the server
1095
+ # Optional. A unique ID used to identify the request. If the server
1073
1096
  # receives two
1074
1097
  # [StartClusterRequest](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#google.cloud.dataproc.v1.StartClusterRequest)s
1075
1098
  # with the same id, then the second request will be ignored and the
@@ -1079,7 +1102,7 @@ module Google
1079
1102
  # Recommendation: Set this value to a
1080
1103
  # [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
1081
1104
  #
1082
- # The id must contain only letters (a-z, A-Z), numbers (0-9),
1105
+ # The ID must contain only letters (a-z, A-Z), numbers (0-9),
1083
1106
  # underscores (_), and hyphens (-). The maximum length is 40 characters.
1084
1107
  class StartClusterRequest
1085
1108
  include ::Google::Protobuf::MessageExts
@@ -1103,7 +1126,7 @@ module Google
1103
1126
  # (with error NOT_FOUND) if cluster with specified UUID does not exist.
1104
1127
  # @!attribute [rw] request_id
1105
1128
  # @return [::String]
1106
- # Optional. A unique id used to identify the request. If the server
1129
+ # Optional. A unique ID used to identify the request. If the server
1107
1130
  # receives two
1108
1131
  # [DeleteClusterRequest](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#google.cloud.dataproc.v1.DeleteClusterRequest)s
1109
1132
  # with the same id, then the second request will be ignored and the
@@ -1113,7 +1136,7 @@ module Google
1113
1136
  # It is recommended to always set this value to a
1114
1137
  # [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
1115
1138
  #
1116
- # The id must contain only letters (a-z, A-Z), numbers (0-9),
1139
+ # The ID must contain only letters (a-z, A-Z), numbers (0-9),
1117
1140
  # underscores (_), and hyphens (-). The maximum length is 40 characters.
1118
1141
  class DeleteClusterRequest
1119
1142
  include ::Google::Protobuf::MessageExts
@@ -42,7 +42,7 @@ module Google
42
42
  end
43
43
 
44
44
  # The Log4j level for job execution. When running an
45
- # [Apache Hive](http://hive.apache.org/) job, Cloud
45
+ # [Apache Hive](https://hive.apache.org/) job, Cloud
46
46
  # Dataproc configures the Hive client to an equivalent verbosity level.
47
47
  module Level
48
48
  # Level is unspecified. Use default level for log4j.
@@ -507,8 +507,7 @@ module Google
507
507
  # the job is submitted.
508
508
  # @!attribute [rw] cluster_labels
509
509
  # @return [::Google::Protobuf::Map{::String => ::String}]
510
- # Optional. Cluster labels to identify a cluster where the job will be
511
- # submitted.
510
+ # Optional. Cluster labels to identify a cluster where the job will be submitted.
512
511
  class JobPlacement
513
512
  include ::Google::Protobuf::MessageExts
514
513
  extend ::Google::Protobuf::MessageExts::ClassMethods
@@ -609,8 +608,8 @@ module Google
609
608
  # Encapsulates the full scoping used to reference a job.
610
609
  # @!attribute [rw] project_id
611
610
  # @return [::String]
612
- # Optional. The ID of the Google Cloud Platform project that the job belongs
613
- # to. If specified, must match the request project ID.
611
+ # Optional. The ID of the Google Cloud Platform project that the job belongs to. If
612
+ # specified, must match the request project ID.
614
613
  # @!attribute [rw] job_id
615
614
  # @return [::String]
616
615
  # Optional. The job ID, which must be unique within the project.
@@ -757,8 +756,8 @@ module Google
757
756
  # may be reused over time.
758
757
  # @!attribute [r] done
759
758
  # @return [::Boolean]
760
- # Output only. Indicates whether the job is completed. If the value is
761
- # `false`, the job is still in progress. If `true`, the job is completed, and
759
+ # Output only. Indicates whether the job is completed. If the value is `false`,
760
+ # the job is still in progress. If `true`, the job is completed, and
762
761
  # `status.state` field will indicate if it was successful, failed,
763
762
  # or cancelled.
764
763
  class Job
@@ -788,8 +787,8 @@ module Google
788
787
  # Maximum value is 10.
789
788
  # @!attribute [rw] max_failures_total
790
789
  # @return [::Integer]
791
- # Optional. Maximum number of times in total a driver may be restarted as a
792
- # result of driver exiting with non-zero code before job is reported failed.
790
+ # Optional. Maximum number of times in total a driver may be restarted as a result of
791
+ # driver exiting with non-zero code before job is reported failed.
793
792
  # Maximum value is 240.
794
793
  class JobScheduling
795
794
  include ::Google::Protobuf::MessageExts