google-cloud-dataproc-v1 0.6.3 → 0.9.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (41) hide show
  1. checksums.yaml +4 -4
  2. data/.yardopts +1 -1
  3. data/AUTHENTICATION.md +7 -25
  4. data/README.md +1 -1
  5. data/lib/google/cloud/dataproc/v1/autoscaling_policies_pb.rb +3 -2
  6. data/lib/google/cloud/dataproc/v1/autoscaling_policies_services_pb.rb +1 -1
  7. data/lib/google/cloud/dataproc/v1/autoscaling_policy_service/client.rb +106 -15
  8. data/lib/google/cloud/dataproc/v1/batch_controller/client.rb +719 -0
  9. data/lib/google/cloud/dataproc/v1/batch_controller/credentials.rb +51 -0
  10. data/lib/google/cloud/dataproc/v1/batch_controller/operations.rb +767 -0
  11. data/lib/google/cloud/dataproc/v1/batch_controller/paths.rb +69 -0
  12. data/lib/google/cloud/dataproc/v1/batch_controller.rb +50 -0
  13. data/lib/google/cloud/dataproc/v1/batches_pb.rb +123 -0
  14. data/lib/google/cloud/dataproc/v1/batches_services_pb.rb +52 -0
  15. data/lib/google/cloud/dataproc/v1/cluster_controller/client.rb +266 -49
  16. data/lib/google/cloud/dataproc/v1/cluster_controller/operations.rb +115 -12
  17. data/lib/google/cloud/dataproc/v1/cluster_controller/paths.rb +0 -19
  18. data/lib/google/cloud/dataproc/v1/clusters_pb.rb +23 -10
  19. data/lib/google/cloud/dataproc/v1/clusters_services_pb.rb +3 -1
  20. data/lib/google/cloud/dataproc/v1/job_controller/client.rb +187 -32
  21. data/lib/google/cloud/dataproc/v1/job_controller/operations.rb +115 -12
  22. data/lib/google/cloud/dataproc/v1/jobs_pb.rb +2 -2
  23. data/lib/google/cloud/dataproc/v1/jobs_services_pb.rb +1 -1
  24. data/lib/google/cloud/dataproc/v1/operations_pb.rb +18 -3
  25. data/lib/google/cloud/dataproc/v1/shared_pb.rb +99 -2
  26. data/lib/google/cloud/dataproc/v1/version.rb +1 -1
  27. data/lib/google/cloud/dataproc/v1/workflow_template_service/client.rb +163 -24
  28. data/lib/google/cloud/dataproc/v1/workflow_template_service/operations.rb +115 -12
  29. data/lib/google/cloud/dataproc/v1/workflow_template_service/paths.rb +0 -19
  30. data/lib/google/cloud/dataproc/v1/workflow_templates_pb.rb +2 -2
  31. data/lib/google/cloud/dataproc/v1/workflow_templates_services_pb.rb +2 -3
  32. data/lib/google/cloud/dataproc/v1.rb +1 -0
  33. data/proto_docs/google/api/resource.rb +10 -71
  34. data/proto_docs/google/cloud/dataproc/v1/autoscaling_policies.rb +18 -0
  35. data/proto_docs/google/cloud/dataproc/v1/batches.rb +339 -0
  36. data/proto_docs/google/cloud/dataproc/v1/clusters.rb +117 -47
  37. data/proto_docs/google/cloud/dataproc/v1/jobs.rb +20 -9
  38. data/proto_docs/google/cloud/dataproc/v1/operations.rb +48 -0
  39. data/proto_docs/google/cloud/dataproc/v1/shared.rb +336 -1
  40. data/proto_docs/google/cloud/dataproc/v1/workflow_templates.rb +12 -15
  41. metadata +11 -3
@@ -49,9 +49,27 @@ module Google
49
49
  # @!attribute [rw] secondary_worker_config
50
50
  # @return [::Google::Cloud::Dataproc::V1::InstanceGroupAutoscalingPolicyConfig]
51
51
  # Optional. Describes how the autoscaler will operate for secondary workers.
52
+ # @!attribute [rw] labels
53
+ # @return [::Google::Protobuf::Map{::String => ::String}]
54
+ # Optional. The labels to associate with this autoscaling policy.
55
+ # Label **keys** must contain 1 to 63 characters, and must conform to
56
+ # [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt).
57
+ # Label **values** may be empty, but, if present, must contain 1 to 63
58
+ # characters, and must conform to [RFC
59
+ # 1035](https://www.ietf.org/rfc/rfc1035.txt). No more than 32 labels can be
60
+ # associated with an autoscaling policy.
52
61
  class AutoscalingPolicy
53
62
  include ::Google::Protobuf::MessageExts
54
63
  extend ::Google::Protobuf::MessageExts::ClassMethods
64
+
65
+ # @!attribute [rw] key
66
+ # @return [::String]
67
+ # @!attribute [rw] value
68
+ # @return [::String]
69
+ class LabelsEntry
70
+ include ::Google::Protobuf::MessageExts
71
+ extend ::Google::Protobuf::MessageExts::ClassMethods
72
+ end
55
73
  end
56
74
 
57
75
  # Basic algorithm for autoscaling.
@@ -0,0 +1,339 @@
1
+ # frozen_string_literal: true
2
+
3
+ # Copyright 2021 Google LLC
4
+ #
5
+ # Licensed under the Apache License, Version 2.0 (the "License");
6
+ # you may not use this file except in compliance with the License.
7
+ # You may obtain a copy of the License at
8
+ #
9
+ # https://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing, software
12
+ # distributed under the License is distributed on an "AS IS" BASIS,
13
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14
+ # See the License for the specific language governing permissions and
15
+ # limitations under the License.
16
+
17
+ # Auto-generated by gapic-generator-ruby. DO NOT EDIT!
18
+
19
+
20
+ module Google
21
+ module Cloud
22
+ module Dataproc
23
+ module V1
24
+ # A request to create a batch workload.
25
+ # @!attribute [rw] parent
26
+ # @return [::String]
27
+ # Required. The parent resource where this batch will be created.
28
+ # @!attribute [rw] batch
29
+ # @return [::Google::Cloud::Dataproc::V1::Batch]
30
+ # Required. The batch to create.
31
+ # @!attribute [rw] batch_id
32
+ # @return [::String]
33
+ # Optional. The ID to use for the batch, which will become the final component of
34
+ # the batch's resource name.
35
+ #
36
+ # This value must be 4-63 characters. Valid characters are `/[a-z][0-9]-/`.
37
+ # @!attribute [rw] request_id
38
+ # @return [::String]
39
+ # Optional. A unique ID used to identify the request. If the service
40
+ # receives two
41
+ # [CreateBatchRequest](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#google.cloud.dataproc.v1.CreateBatchRequest)s
42
+ # with the same request_id, the second request is ignored and the
43
+ # Operation that corresponds to the first Batch created and stored
44
+ # in the backend is returned.
45
+ #
46
+ # Recommendation: Set this value to a
47
+ # [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
48
+ #
49
+ # The value must contain only letters (a-z, A-Z), numbers (0-9),
50
+ # underscores (_), and hyphens (-). The maximum length is 40 characters.
51
+ class CreateBatchRequest
52
+ include ::Google::Protobuf::MessageExts
53
+ extend ::Google::Protobuf::MessageExts::ClassMethods
54
+ end
55
+
56
+ # A request to get the resource representation for a batch workload.
57
+ # @!attribute [rw] name
58
+ # @return [::String]
59
+ # Required. The name of the batch to retrieve.
60
+ class GetBatchRequest
61
+ include ::Google::Protobuf::MessageExts
62
+ extend ::Google::Protobuf::MessageExts::ClassMethods
63
+ end
64
+
65
+ # A request to list batch workloads in a project.
66
+ # @!attribute [rw] parent
67
+ # @return [::String]
68
+ # Required. The parent, which owns this collection of batches.
69
+ # @!attribute [rw] page_size
70
+ # @return [::Integer]
71
+ # Optional. The maximum number of batches to return in each response.
72
+ # The service may return fewer than this value.
73
+ # The default page size is 20; the maximum page size is 1000.
74
+ # @!attribute [rw] page_token
75
+ # @return [::String]
76
+ # Optional. A page token received from a previous `ListBatches` call.
77
+ # Provide this token to retrieve the subsequent page.
78
+ class ListBatchesRequest
79
+ include ::Google::Protobuf::MessageExts
80
+ extend ::Google::Protobuf::MessageExts::ClassMethods
81
+ end
82
+
83
+ # A list of batch workloads.
84
+ # @!attribute [rw] batches
85
+ # @return [::Array<::Google::Cloud::Dataproc::V1::Batch>]
86
+ # The batches from the specified collection.
87
+ # @!attribute [rw] next_page_token
88
+ # @return [::String]
89
+ # A token, which can be sent as `page_token` to retrieve the next page.
90
+ # If this field is omitted, there are no subsequent pages.
91
+ class ListBatchesResponse
92
+ include ::Google::Protobuf::MessageExts
93
+ extend ::Google::Protobuf::MessageExts::ClassMethods
94
+ end
95
+
96
+ # A request to delete a batch workload.
97
+ # @!attribute [rw] name
98
+ # @return [::String]
99
+ # Required. The name of the batch resource to delete.
100
+ class DeleteBatchRequest
101
+ include ::Google::Protobuf::MessageExts
102
+ extend ::Google::Protobuf::MessageExts::ClassMethods
103
+ end
104
+
105
+ # A representation of a batch workload in the service.
106
+ # @!attribute [r] name
107
+ # @return [::String]
108
+ # Output only. The resource name of the batch.
109
+ # @!attribute [r] uuid
110
+ # @return [::String]
111
+ # Output only. A batch UUID (Unique Universal Identifier). The service
112
+ # generates this value when it creates the batch.
113
+ # @!attribute [r] create_time
114
+ # @return [::Google::Protobuf::Timestamp]
115
+ # Output only. The time when the batch was created.
116
+ # @!attribute [rw] pyspark_batch
117
+ # @return [::Google::Cloud::Dataproc::V1::PySparkBatch]
118
+ # Optional. PySpark batch config.
119
+ # @!attribute [rw] spark_batch
120
+ # @return [::Google::Cloud::Dataproc::V1::SparkBatch]
121
+ # Optional. Spark batch config.
122
+ # @!attribute [rw] spark_r_batch
123
+ # @return [::Google::Cloud::Dataproc::V1::SparkRBatch]
124
+ # Optional. SparkR batch config.
125
+ # @!attribute [rw] spark_sql_batch
126
+ # @return [::Google::Cloud::Dataproc::V1::SparkSqlBatch]
127
+ # Optional. SparkSql batch config.
128
+ # @!attribute [r] runtime_info
129
+ # @return [::Google::Cloud::Dataproc::V1::RuntimeInfo]
130
+ # Output only. Runtime information about batch execution.
131
+ # @!attribute [r] state
132
+ # @return [::Google::Cloud::Dataproc::V1::Batch::State]
133
+ # Output only. The state of the batch.
134
+ # @!attribute [r] state_message
135
+ # @return [::String]
136
+ # Output only. Batch state details, such as a failure
137
+ # description if the state is `FAILED`.
138
+ # @!attribute [r] state_time
139
+ # @return [::Google::Protobuf::Timestamp]
140
+ # Output only. The time when the batch entered a current state.
141
+ # @!attribute [r] creator
142
+ # @return [::String]
143
+ # Output only. The email address of the user who created the batch.
144
+ # @!attribute [rw] labels
145
+ # @return [::Google::Protobuf::Map{::String => ::String}]
146
+ # Optional. The labels to associate with this batch.
147
+ # Label **keys** must contain 1 to 63 characters, and must conform to
148
+ # [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt).
149
+ # Label **values** may be empty, but, if present, must contain 1 to 63
150
+ # characters, and must conform to [RFC
151
+ # 1035](https://www.ietf.org/rfc/rfc1035.txt). No more than 32 labels can be
152
+ # associated with a batch.
153
+ # @!attribute [rw] runtime_config
154
+ # @return [::Google::Cloud::Dataproc::V1::RuntimeConfig]
155
+ # Optional. Runtime configuration for the batch execution.
156
+ # @!attribute [rw] environment_config
157
+ # @return [::Google::Cloud::Dataproc::V1::EnvironmentConfig]
158
+ # Optional. Environment configuration for the batch execution.
159
+ # @!attribute [r] operation
160
+ # @return [::String]
161
+ # Output only. The resource name of the operation associated with this batch.
162
+ # @!attribute [r] state_history
163
+ # @return [::Array<::Google::Cloud::Dataproc::V1::Batch::StateHistory>]
164
+ # Output only. Historical state information for the batch.
165
+ class Batch
166
+ include ::Google::Protobuf::MessageExts
167
+ extend ::Google::Protobuf::MessageExts::ClassMethods
168
+
169
+ # Historical state information.
170
+ # @!attribute [r] state
171
+ # @return [::Google::Cloud::Dataproc::V1::Batch::State]
172
+ # Output only. The state of the batch at this point in history.
173
+ # @!attribute [r] state_message
174
+ # @return [::String]
175
+ # Output only. Details about the state at this point in history.
176
+ # @!attribute [r] state_start_time
177
+ # @return [::Google::Protobuf::Timestamp]
178
+ # Output only. The time when the batch entered the historical state.
179
+ class StateHistory
180
+ include ::Google::Protobuf::MessageExts
181
+ extend ::Google::Protobuf::MessageExts::ClassMethods
182
+ end
183
+
184
+ # @!attribute [rw] key
185
+ # @return [::String]
186
+ # @!attribute [rw] value
187
+ # @return [::String]
188
+ class LabelsEntry
189
+ include ::Google::Protobuf::MessageExts
190
+ extend ::Google::Protobuf::MessageExts::ClassMethods
191
+ end
192
+
193
+ # The batch state.
194
+ module State
195
+ # The batch state is unknown.
196
+ STATE_UNSPECIFIED = 0
197
+
198
+ # The batch is created before running.
199
+ PENDING = 1
200
+
201
+ # The batch is running.
202
+ RUNNING = 2
203
+
204
+ # The batch is cancelling.
205
+ CANCELLING = 3
206
+
207
+ # The batch cancellation was successful.
208
+ CANCELLED = 4
209
+
210
+ # The batch completed successfully.
211
+ SUCCEEDED = 5
212
+
213
+ # The batch is no longer running due to an error.
214
+ FAILED = 6
215
+ end
216
+ end
217
+
218
+ # A configuration for running an
219
+ # [Apache
220
+ # PySpark](https://spark.apache.org/docs/latest/api/python/getting_started/quickstart.html)
221
+ # batch workload.
222
+ # @!attribute [rw] main_python_file_uri
223
+ # @return [::String]
224
+ # Required. The HCFS URI of the main Python file to use as the Spark driver. Must
225
+ # be a .py file.
226
+ # @!attribute [rw] args
227
+ # @return [::Array<::String>]
228
+ # Optional. The arguments to pass to the driver. Do not include arguments
229
+ # that can be set as batch properties, such as `--conf`, since a collision
230
+ # can occur that causes an incorrect batch submission.
231
+ # @!attribute [rw] python_file_uris
232
+ # @return [::Array<::String>]
233
+ # Optional. HCFS file URIs of Python files to pass to the PySpark
234
+ # framework. Supported file types: `.py`, `.egg`, and `.zip`.
235
+ # @!attribute [rw] jar_file_uris
236
+ # @return [::Array<::String>]
237
+ # Optional. HCFS URIs of jar files to add to the classpath of the
238
+ # Spark driver and tasks.
239
+ # @!attribute [rw] file_uris
240
+ # @return [::Array<::String>]
241
+ # Optional. HCFS URIs of files to be placed in the working directory of
242
+ # each executor.
243
+ # @!attribute [rw] archive_uris
244
+ # @return [::Array<::String>]
245
+ # Optional. HCFS URIs of archives to be extracted into the working directory
246
+ # of each executor. Supported file types:
247
+ # `.jar`, `.tar`, `.tar.gz`, `.tgz`, and `.zip`.
248
+ class PySparkBatch
249
+ include ::Google::Protobuf::MessageExts
250
+ extend ::Google::Protobuf::MessageExts::ClassMethods
251
+ end
252
+
253
+ # A configuration for running an [Apache Spark](http://spark.apache.org/)
254
+ # batch workload.
255
+ # @!attribute [rw] main_jar_file_uri
256
+ # @return [::String]
257
+ # Optional. The HCFS URI of the jar file that contains the main class.
258
+ # @!attribute [rw] main_class
259
+ # @return [::String]
260
+ # Optional. The name of the driver main class. The jar file that contains the class
261
+ # must be in the classpath or specified in `jar_file_uris`.
262
+ # @!attribute [rw] args
263
+ # @return [::Array<::String>]
264
+ # Optional. The arguments to pass to the driver. Do not include arguments
265
+ # that can be set as batch properties, such as `--conf`, since a collision
266
+ # can occur that causes an incorrect batch submission.
267
+ # @!attribute [rw] jar_file_uris
268
+ # @return [::Array<::String>]
269
+ # Optional. HCFS URIs of jar files to add to the classpath of the
270
+ # Spark driver and tasks.
271
+ # @!attribute [rw] file_uris
272
+ # @return [::Array<::String>]
273
+ # Optional. HCFS URIs of files to be placed in the working directory of
274
+ # each executor.
275
+ # @!attribute [rw] archive_uris
276
+ # @return [::Array<::String>]
277
+ # Optional. HCFS URIs of archives to be extracted into the working directory
278
+ # of each executor. Supported file types:
279
+ # `.jar`, `.tar`, `.tar.gz`, `.tgz`, and `.zip`.
280
+ class SparkBatch
281
+ include ::Google::Protobuf::MessageExts
282
+ extend ::Google::Protobuf::MessageExts::ClassMethods
283
+ end
284
+
285
+ # A configuration for running an
286
+ # [Apache SparkR](https://spark.apache.org/docs/latest/sparkr.html)
287
+ # batch workload.
288
+ # @!attribute [rw] main_r_file_uri
289
+ # @return [::String]
290
+ # Required. The HCFS URI of the main R file to use as the driver.
291
+ # Must be a `.R` or `.r` file.
292
+ # @!attribute [rw] args
293
+ # @return [::Array<::String>]
294
+ # Optional. The arguments to pass to the Spark driver. Do not include arguments
295
+ # that can be set as batch properties, such as `--conf`, since a collision
296
+ # can occur that causes an incorrect batch submission.
297
+ # @!attribute [rw] file_uris
298
+ # @return [::Array<::String>]
299
+ # Optional. HCFS URIs of files to be placed in the working directory of
300
+ # each executor.
301
+ # @!attribute [rw] archive_uris
302
+ # @return [::Array<::String>]
303
+ # Optional. HCFS URIs of archives to be extracted into the working directory
304
+ # of each executor. Supported file types:
305
+ # `.jar`, `.tar`, `.tar.gz`, `.tgz`, and `.zip`.
306
+ class SparkRBatch
307
+ include ::Google::Protobuf::MessageExts
308
+ extend ::Google::Protobuf::MessageExts::ClassMethods
309
+ end
310
+
311
+ # A configuration for running
312
+ # [Apache Spark SQL](http://spark.apache.org/sql/) queries as a batch workload.
313
+ # @!attribute [rw] query_file_uri
314
+ # @return [::String]
315
+ # Required. The HCFS URI of the script that contains Spark SQL queries to execute.
316
+ # @!attribute [rw] query_variables
317
+ # @return [::Google::Protobuf::Map{::String => ::String}]
318
+ # Optional. Mapping of query variable names to values (equivalent to the
319
+ # Spark SQL command: `SET name="value";`).
320
+ # @!attribute [rw] jar_file_uris
321
+ # @return [::Array<::String>]
322
+ # Optional. HCFS URIs of jar files to be added to the Spark CLASSPATH.
323
+ class SparkSqlBatch
324
+ include ::Google::Protobuf::MessageExts
325
+ extend ::Google::Protobuf::MessageExts::ClassMethods
326
+
327
+ # @!attribute [rw] key
328
+ # @return [::String]
329
+ # @!attribute [rw] value
330
+ # @return [::String]
331
+ class QueryVariablesEntry
332
+ include ::Google::Protobuf::MessageExts
333
+ extend ::Google::Protobuf::MessageExts::ClassMethods
334
+ end
335
+ end
336
+ end
337
+ end
338
+ end
339
+ end