google-cloud-dataproc-v1beta2 0.2.3 → 0.4.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/lib/google/cloud/dataproc/v1beta2.rb +3 -0
- data/lib/google/cloud/dataproc/v1beta2/autoscaling_policies_services_pb.rb +5 -5
- data/lib/google/cloud/dataproc/v1beta2/autoscaling_policy_service/client.rb +5 -5
- data/lib/google/cloud/dataproc/v1beta2/cluster_controller.rb +1 -0
- data/lib/google/cloud/dataproc/v1beta2/cluster_controller/client.rb +25 -20
- data/lib/google/cloud/dataproc/v1beta2/cluster_controller/operations.rb +2 -2
- data/lib/google/cloud/dataproc/v1beta2/cluster_controller/paths.rb +52 -0
- data/lib/google/cloud/dataproc/v1beta2/clusters_pb.rb +8 -0
- data/lib/google/cloud/dataproc/v1beta2/clusters_services_pb.rb +6 -6
- data/lib/google/cloud/dataproc/v1beta2/job_controller/client.rb +9 -9
- data/lib/google/cloud/dataproc/v1beta2/job_controller/operations.rb +2 -2
- data/lib/google/cloud/dataproc/v1beta2/jobs_services_pb.rb +7 -7
- data/lib/google/cloud/dataproc/v1beta2/shared_pb.rb +1 -2
- data/lib/google/cloud/dataproc/v1beta2/version.rb +1 -1
- data/lib/google/cloud/dataproc/v1beta2/workflow_template_service/client.rb +11 -10
- data/lib/google/cloud/dataproc/v1beta2/workflow_template_service/operations.rb +2 -2
- data/lib/google/cloud/dataproc/v1beta2/workflow_template_service/paths.rb +19 -0
- data/lib/google/cloud/dataproc/v1beta2/workflow_templates_pb.rb +5 -0
- data/lib/google/cloud/dataproc/v1beta2/workflow_templates_services_pb.rb +9 -8
- data/proto_docs/google/api/resource.rb +50 -14
- data/proto_docs/google/cloud/dataproc/v1beta2/autoscaling_policies.rb +13 -7
- data/proto_docs/google/cloud/dataproc/v1beta2/clusters.rb +64 -21
- data/proto_docs/google/cloud/dataproc/v1beta2/jobs.rb +14 -13
- data/proto_docs/google/cloud/dataproc/v1beta2/shared.rb +3 -6
- data/proto_docs/google/cloud/dataproc/v1beta2/workflow_templates.rb +42 -8
- metadata +6 -5
@@ -158,12 +158,12 @@ module Google
|
|
158
158
|
# Spark driver and tasks.
|
159
159
|
# @!attribute [rw] file_uris
|
160
160
|
# @return [::Array<::String>]
|
161
|
-
# Optional. HCFS URIs of files to be
|
162
|
-
#
|
161
|
+
# Optional. HCFS URIs of files to be placed in the working directory of
|
162
|
+
# each executor. Useful for naively parallel tasks.
|
163
163
|
# @!attribute [rw] archive_uris
|
164
164
|
# @return [::Array<::String>]
|
165
|
-
# Optional. HCFS URIs of archives to be extracted
|
166
|
-
# of
|
165
|
+
# Optional. HCFS URIs of archives to be extracted into the working directory
|
166
|
+
# of each executor. Supported file types:
|
167
167
|
# .jar, .tar, .tar.gz, .tgz, and .zip.
|
168
168
|
# @!attribute [rw] properties
|
169
169
|
# @return [::Google::Protobuf::Map{::String => ::String}]
|
@@ -211,11 +211,12 @@ module Google
|
|
211
211
|
# Python driver and tasks.
|
212
212
|
# @!attribute [rw] file_uris
|
213
213
|
# @return [::Array<::String>]
|
214
|
-
# Optional. HCFS URIs of files to be
|
215
|
-
#
|
214
|
+
# Optional. HCFS URIs of files to be placed in the working directory of
|
215
|
+
# each executor. Useful for naively parallel tasks.
|
216
216
|
# @!attribute [rw] archive_uris
|
217
217
|
# @return [::Array<::String>]
|
218
|
-
# Optional. HCFS URIs of archives to be extracted
|
218
|
+
# Optional. HCFS URIs of archives to be extracted into the working directory
|
219
|
+
# of each executor. Supported file types:
|
219
220
|
# .jar, .tar, .tar.gz, .tgz, and .zip.
|
220
221
|
# @!attribute [rw] properties
|
221
222
|
# @return [::Google::Protobuf::Map{::String => ::String}]
|
@@ -426,12 +427,12 @@ module Google
|
|
426
427
|
# occur that causes an incorrect job submission.
|
427
428
|
# @!attribute [rw] file_uris
|
428
429
|
# @return [::Array<::String>]
|
429
|
-
# Optional. HCFS URIs of files to be
|
430
|
-
#
|
430
|
+
# Optional. HCFS URIs of files to be placed in the working directory of
|
431
|
+
# each executor. Useful for naively parallel tasks.
|
431
432
|
# @!attribute [rw] archive_uris
|
432
433
|
# @return [::Array<::String>]
|
433
|
-
# Optional. HCFS URIs of archives to be extracted
|
434
|
-
#
|
434
|
+
# Optional. HCFS URIs of archives to be extracted into the working directory
|
435
|
+
# of each executor. Supported file types:
|
435
436
|
# .jar, .tar, .tar.gz, .tgz, and .zip.
|
436
437
|
# @!attribute [rw] properties
|
437
438
|
# @return [::Google::Protobuf::Map{::String => ::String}]
|
@@ -600,8 +601,8 @@ module Google
|
|
600
601
|
# Encapsulates the full scoping used to reference a job.
|
601
602
|
# @!attribute [rw] project_id
|
602
603
|
# @return [::String]
|
603
|
-
#
|
604
|
-
#
|
604
|
+
# Optional. The ID of the Google Cloud Platform project that the job belongs to. If
|
605
|
+
# specified, must match the request project ID.
|
605
606
|
# @!attribute [rw] job_id
|
606
607
|
# @return [::String]
|
607
608
|
# Optional. The job ID, which must be unique within the project.
|
@@ -23,20 +23,17 @@ module Google
|
|
23
23
|
module V1beta2
|
24
24
|
# Cluster components that can be activated.
|
25
25
|
module Component
|
26
|
-
# Unspecified component.
|
26
|
+
# Unspecified component. Specifying this will cause Cluster creation to fail.
|
27
27
|
COMPONENT_UNSPECIFIED = 0
|
28
28
|
|
29
29
|
# The Anaconda python distribution.
|
30
30
|
ANACONDA = 5
|
31
31
|
|
32
|
-
# Docker
|
33
|
-
DOCKER = 13
|
34
|
-
|
35
32
|
# The Druid query engine.
|
36
33
|
DRUID = 9
|
37
34
|
|
38
|
-
#
|
39
|
-
|
35
|
+
# HBase.
|
36
|
+
HBASE = 11
|
40
37
|
|
41
38
|
# The Hive Web HCatalog (the REST service for accessing HCatalog).
|
42
39
|
HIVE_WEBHCAT = 3
|
@@ -85,6 +85,17 @@ module Google
|
|
85
85
|
# Optional. Template parameters whose values are substituted into the
|
86
86
|
# template. Values for parameters must be provided when the template is
|
87
87
|
# instantiated.
|
88
|
+
# @!attribute [rw] dag_timeout
|
89
|
+
# @return [::Google::Protobuf::Duration]
|
90
|
+
# Optional. Timeout duration for the DAG of jobs. You can use "s", "m", "h",
|
91
|
+
# and "d" suffixes for second, minute, hour, and day duration values,
|
92
|
+
# respectively. The timeout duration must be from 10 minutes ("10m") to 24
|
93
|
+
# hours ("24h" or "1d"). The timer begins when the first job is submitted. If
|
94
|
+
# the workflow is running at the end of the timeout period, any remaining
|
95
|
+
# jobs are cancelled, the workflow is terminated, and if the workflow was
|
96
|
+
# running on a [managed
|
97
|
+
# cluster](https://cloud.google.com/dataproc/docs/concepts/workflows/using-workflows#configuring_or_selecting_a_cluster),
|
98
|
+
# the cluster is deleted.
|
88
99
|
class WorkflowTemplate
|
89
100
|
include ::Google::Protobuf::MessageExts
|
90
101
|
extend ::Google::Protobuf::MessageExts::ClassMethods
|
@@ -188,30 +199,36 @@ module Google
|
|
188
199
|
#
|
189
200
|
# The step id is used as prefix for job id, as job
|
190
201
|
# `goog-dataproc-workflow-step-id` label, and in
|
191
|
-
# {::Google::Cloud::Dataproc::V1beta2::OrderedJob#prerequisite_step_ids prerequisiteStepIds}
|
192
|
-
# steps.
|
202
|
+
# {::Google::Cloud::Dataproc::V1beta2::OrderedJob#prerequisite_step_ids prerequisiteStepIds}
|
203
|
+
# field from other steps.
|
193
204
|
#
|
194
205
|
# The id must contain only letters (a-z, A-Z), numbers (0-9),
|
195
206
|
# underscores (_), and hyphens (-). Cannot begin or end with underscore
|
196
207
|
# or hyphen. Must consist of between 3 and 50 characters.
|
197
208
|
# @!attribute [rw] hadoop_job
|
198
209
|
# @return [::Google::Cloud::Dataproc::V1beta2::HadoopJob]
|
210
|
+
# Optional. Job is a Hadoop job.
|
199
211
|
# @!attribute [rw] spark_job
|
200
212
|
# @return [::Google::Cloud::Dataproc::V1beta2::SparkJob]
|
213
|
+
# Optional. Job is a Spark job.
|
201
214
|
# @!attribute [rw] pyspark_job
|
202
215
|
# @return [::Google::Cloud::Dataproc::V1beta2::PySparkJob]
|
216
|
+
# Optional. Job is a PySpark job.
|
203
217
|
# @!attribute [rw] hive_job
|
204
218
|
# @return [::Google::Cloud::Dataproc::V1beta2::HiveJob]
|
219
|
+
# Optional. Job is a Hive job.
|
205
220
|
# @!attribute [rw] pig_job
|
206
221
|
# @return [::Google::Cloud::Dataproc::V1beta2::PigJob]
|
222
|
+
# Optional. Job is a Pig job.
|
207
223
|
# @!attribute [rw] spark_r_job
|
208
224
|
# @return [::Google::Cloud::Dataproc::V1beta2::SparkRJob]
|
209
|
-
#
|
225
|
+
# Optional. Job is a SparkR job.
|
210
226
|
# @!attribute [rw] spark_sql_job
|
211
227
|
# @return [::Google::Cloud::Dataproc::V1beta2::SparkSqlJob]
|
228
|
+
# Optional. Job is a SparkSql job.
|
212
229
|
# @!attribute [rw] presto_job
|
213
230
|
# @return [::Google::Cloud::Dataproc::V1beta2::PrestoJob]
|
214
|
-
# Presto job
|
231
|
+
# Optional. Job is a Presto job.
|
215
232
|
# @!attribute [rw] labels
|
216
233
|
# @return [::Google::Protobuf::Map{::String => ::String}]
|
217
234
|
# Optional. The labels to associate with this job.
|
@@ -269,10 +286,10 @@ module Google
|
|
269
286
|
# A field is allowed to appear in at most one parameter's list of field
|
270
287
|
# paths.
|
271
288
|
#
|
272
|
-
# A field path is similar in syntax to a
|
273
|
-
# For example, a
|
274
|
-
#
|
275
|
-
# `placement.clusterSelector.zone`.
|
289
|
+
# A field path is similar in syntax to a
|
290
|
+
# {::Google::Protobuf::FieldMask google.protobuf.FieldMask}. For example, a
|
291
|
+
# field path that references the zone field of a workflow template's cluster
|
292
|
+
# selector would be specified as `placement.clusterSelector.zone`.
|
276
293
|
#
|
277
294
|
# Also, field paths can reference fields using the following syntax:
|
278
295
|
#
|
@@ -397,6 +414,23 @@ module Google
|
|
397
414
|
# @!attribute [r] cluster_uuid
|
398
415
|
# @return [::String]
|
399
416
|
# Output only. The UUID of target cluster.
|
417
|
+
# @!attribute [r] dag_timeout
|
418
|
+
# @return [::Google::Protobuf::Duration]
|
419
|
+
# Output only. The timeout duration for the DAG of jobs.
|
420
|
+
# Minimum timeout duration is 10 minutes and maximum is 24 hours, expressed
|
421
|
+
# as a
|
422
|
+
# [google.protobuf.Duration][https://developers.google.com/protocol-buffers/docs/proto3#json_mapping].
|
423
|
+
# For example, "1800" = 1800 seconds/30 minutes duration.
|
424
|
+
# @!attribute [r] dag_start_time
|
425
|
+
# @return [::Google::Protobuf::Timestamp]
|
426
|
+
# Output only. DAG start time, which is only set for workflows with
|
427
|
+
# {::Google::Cloud::Dataproc::V1beta2::WorkflowMetadata#dag_timeout dag_timeout}
|
428
|
+
# when the DAG begins.
|
429
|
+
# @!attribute [r] dag_end_time
|
430
|
+
# @return [::Google::Protobuf::Timestamp]
|
431
|
+
# Output only. DAG end time, which is only set for workflows with
|
432
|
+
# {::Google::Cloud::Dataproc::V1beta2::WorkflowMetadata#dag_timeout dag_timeout}
|
433
|
+
# when the DAG ends.
|
400
434
|
class WorkflowMetadata
|
401
435
|
include ::Google::Protobuf::MessageExts
|
402
436
|
extend ::Google::Protobuf::MessageExts::ClassMethods
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: google-cloud-dataproc-v1beta2
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.
|
4
|
+
version: 0.4.1
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Google LLC
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date:
|
11
|
+
date: 2021-01-20 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: gapic-common
|
@@ -16,14 +16,14 @@ dependencies:
|
|
16
16
|
requirements:
|
17
17
|
- - "~>"
|
18
18
|
- !ruby/object:Gem::Version
|
19
|
-
version: '0.
|
19
|
+
version: '0.3'
|
20
20
|
type: :runtime
|
21
21
|
prerelease: false
|
22
22
|
version_requirements: !ruby/object:Gem::Requirement
|
23
23
|
requirements:
|
24
24
|
- - "~>"
|
25
25
|
- !ruby/object:Gem::Version
|
26
|
-
version: '0.
|
26
|
+
version: '0.3'
|
27
27
|
- !ruby/object:Gem::Dependency
|
28
28
|
name: google-cloud-errors
|
29
29
|
requirement: !ruby/object:Gem::Requirement
|
@@ -172,6 +172,7 @@ files:
|
|
172
172
|
- lib/google/cloud/dataproc/v1beta2/cluster_controller/client.rb
|
173
173
|
- lib/google/cloud/dataproc/v1beta2/cluster_controller/credentials.rb
|
174
174
|
- lib/google/cloud/dataproc/v1beta2/cluster_controller/operations.rb
|
175
|
+
- lib/google/cloud/dataproc/v1beta2/cluster_controller/paths.rb
|
175
176
|
- lib/google/cloud/dataproc/v1beta2/clusters_pb.rb
|
176
177
|
- lib/google/cloud/dataproc/v1beta2/clusters_services_pb.rb
|
177
178
|
- lib/google/cloud/dataproc/v1beta2/job_controller.rb
|
@@ -225,7 +226,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
225
226
|
- !ruby/object:Gem::Version
|
226
227
|
version: '0'
|
227
228
|
requirements: []
|
228
|
-
rubygems_version: 3.
|
229
|
+
rubygems_version: 3.2.6
|
229
230
|
signing_key:
|
230
231
|
specification_version: 4
|
231
232
|
summary: API Client library for the Cloud Dataproc V1beta2 API
|