google-cloud-dataproc 0.8.0 → 0.9.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 50ba4599348472de65875bad49dc3ae2d958123cc77468ac1cfb0c142117f8b0
4
- data.tar.gz: 3d6b2e61d53912b68f2119f01bcd34bbae0b30fb08c54c23b8d1636fafac4c1d
3
+ metadata.gz: 73149665558ed4fef80dcabc2baf92983137ac4bde65d74448dbc26865f47107
4
+ data.tar.gz: d5b0938c6bdd27df5172550f5e635f58fbed0276ec1d3668f61adce7e01c5767
5
5
  SHA512:
6
- metadata.gz: e358cdbe94e7e9762b19b57d1c3f9666d85a4836ee18dfdafc41f3c2e199557c07a5099a6589c05124bd6080a224fd1797b9277af836fdf38621a0be53f4a720
7
- data.tar.gz: 31d4f8955fc9c2f3aaa5961af6a07b5bc54dcae0a14233e7876ef1a875161733fd906eeb06a7d8096ea823b94b1c0c6a7d8617749ecc07773a4ed960818a0cb2
6
+ metadata.gz: bf6e7328361ba5d9e64d47732230f288d451e728328a3e3058e378f53e47f2b796e7f9c75d8212d826f24cc72f117eb5b95119a9ce392e86889e1b056757735f
7
+ data.tar.gz: b8404d746ddae8bdbfb87acae3f1109e9813d68a0f4604e85c7f1ee71af38d1bdc26ab4f5345505cdc91858f46db86011de784e503b1faa83c839bd2585f8939
data/README.md CHANGED
@@ -1,4 +1,4 @@
1
- # Ruby Client for Google Cloud Dataproc API ([Alpha](https://github.com/googleapis/google-cloud-ruby#versioning))
1
+ # Ruby Client for Google Cloud Dataproc API
2
2
 
3
3
  [Google Cloud Dataproc API][Product Documentation]:
4
4
  Manages Hadoop-based clusters and jobs on Google Cloud Platform.
@@ -21,7 +21,7 @@ module Google
21
21
  # rubocop:disable LineLength
22
22
 
23
23
  ##
24
- # # Ruby Client for Google Cloud Dataproc API ([Alpha](https://github.com/googleapis/google-cloud-ruby#versioning))
24
+ # # Ruby Client for Google Cloud Dataproc API
25
25
  #
26
26
  # [Google Cloud Dataproc API][Product Documentation]:
27
27
  # Manages Hadoop-based clusters and jobs on Google Cloud Platform.
@@ -27,7 +27,7 @@ module Google
27
27
  # rubocop:disable LineLength
28
28
 
29
29
  ##
30
- # # Ruby Client for Google Cloud Dataproc API ([Alpha](https://github.com/googleapis/google-cloud-ruby#versioning))
30
+ # # Ruby Client for Google Cloud Dataproc API
31
31
  #
32
32
  # [Google Cloud Dataproc API][Product Documentation]:
33
33
  # Manages Hadoop-based clusters and jobs on Google Cloud Platform.
@@ -1,7 +1,7 @@
1
1
  # Generated by the protocol buffer compiler. DO NOT EDIT!
2
2
  # Source: google/cloud/dataproc/v1/autoscaling_policies.proto for package 'google.cloud.dataproc.v1'
3
3
  # Original file comments:
4
- # Copyright 2019 Google LLC.
4
+ # Copyright 2020 Google LLC
5
5
  #
6
6
  # Licensed under the Apache License, Version 2.0 (the "License");
7
7
  # you may not use this file except in compliance with the License.
@@ -15,7 +15,6 @@
15
15
  # See the License for the specific language governing permissions and
16
16
  # limitations under the License.
17
17
  #
18
- #
19
18
 
20
19
 
21
20
  require 'grpc'
@@ -401,7 +401,8 @@ module Google
401
401
  # interrupting jobs in progress. Timeout specifies how long to wait for jobs
402
402
  # in progress to finish before forcefully removing nodes (and potentially
403
403
  # interrupting jobs). Default timeout is 0 (for forceful decommission), and
404
- # the maximum allowed timeout is 1 day.
404
+ # the maximum allowed timeout is 1 day. (see JSON representation of
405
+ # [Duration](https://developers.google.com/protocol-buffers/docs/proto3#json)).
405
406
  #
406
407
  # Only supported on Dataproc image versions 1.2 and higher.
407
408
  # A hash of the same form as `Google::Protobuf::Duration`
@@ -7,7 +7,6 @@ require 'google/protobuf'
7
7
  require 'google/api/annotations_pb'
8
8
  require 'google/api/client_pb'
9
9
  require 'google/api/field_behavior_pb'
10
- require 'google/cloud/dataproc/v1/operations_pb'
11
10
  require 'google/cloud/dataproc/v1/shared_pb'
12
11
  require 'google/longrunning/operations_pb'
13
12
  require 'google/protobuf/duration_pb'
@@ -35,6 +34,7 @@ Google::Protobuf::DescriptorPool.generated_pool.build do
35
34
  optional :encryption_config, :message, 15, "google.cloud.dataproc.v1.EncryptionConfig"
36
35
  optional :autoscaling_config, :message, 18, "google.cloud.dataproc.v1.AutoscalingConfig"
37
36
  optional :security_config, :message, 16, "google.cloud.dataproc.v1.SecurityConfig"
37
+ optional :lifecycle_config, :message, 17, "google.cloud.dataproc.v1.LifecycleConfig"
38
38
  end
39
39
  add_message "google.cloud.dataproc.v1.AutoscalingConfig" do
40
40
  optional :policy_uri, :string, 1
@@ -51,6 +51,7 @@ Google::Protobuf::DescriptorPool.generated_pool.build do
51
51
  repeated :service_account_scopes, :string, 3
52
52
  repeated :tags, :string, 4
53
53
  map :metadata, :string, :string, 5
54
+ optional :reservation_affinity, :message, 11, "google.cloud.dataproc.v1.ReservationAffinity"
54
55
  end
55
56
  add_message "google.cloud.dataproc.v1.InstanceGroupConfig" do
56
57
  optional :num_instances, :int32, 1
@@ -124,6 +125,14 @@ Google::Protobuf::DescriptorPool.generated_pool.build do
124
125
  map :properties, :string, :string, 2
125
126
  repeated :optional_components, :enum, 3, "google.cloud.dataproc.v1.Component"
126
127
  end
128
+ add_message "google.cloud.dataproc.v1.LifecycleConfig" do
129
+ optional :idle_delete_ttl, :message, 1, "google.protobuf.Duration"
130
+ optional :idle_start_time, :message, 4, "google.protobuf.Timestamp"
131
+ oneof :ttl do
132
+ optional :auto_delete_time, :message, 2, "google.protobuf.Timestamp"
133
+ optional :auto_delete_ttl, :message, 3, "google.protobuf.Duration"
134
+ end
135
+ end
127
136
  add_message "google.cloud.dataproc.v1.ClusterMetrics" do
128
137
  map :hdfs_metrics, :string, :int64, 1
129
138
  map :yarn_metrics, :string, :int64, 2
@@ -174,6 +183,17 @@ Google::Protobuf::DescriptorPool.generated_pool.build do
174
183
  add_message "google.cloud.dataproc.v1.DiagnoseClusterResults" do
175
184
  optional :output_uri, :string, 1
176
185
  end
186
+ add_message "google.cloud.dataproc.v1.ReservationAffinity" do
187
+ optional :consume_reservation_type, :enum, 1, "google.cloud.dataproc.v1.ReservationAffinity.Type"
188
+ optional :key, :string, 2
189
+ repeated :values, :string, 3
190
+ end
191
+ add_enum "google.cloud.dataproc.v1.ReservationAffinity.Type" do
192
+ value :TYPE_UNSPECIFIED, 0
193
+ value :NO_RESERVATION, 1
194
+ value :ANY_RESERVATION, 2
195
+ value :SPECIFIC_RESERVATION, 3
196
+ end
177
197
  end
178
198
 
179
199
  module Google
@@ -196,6 +216,7 @@ module Google
196
216
  SecurityConfig = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.SecurityConfig").msgclass
197
217
  KerberosConfig = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.KerberosConfig").msgclass
198
218
  SoftwareConfig = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.SoftwareConfig").msgclass
219
+ LifecycleConfig = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.LifecycleConfig").msgclass
199
220
  ClusterMetrics = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.ClusterMetrics").msgclass
200
221
  CreateClusterRequest = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.CreateClusterRequest").msgclass
201
222
  UpdateClusterRequest = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.UpdateClusterRequest").msgclass
@@ -205,6 +226,8 @@ module Google
205
226
  ListClustersResponse = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.ListClustersResponse").msgclass
206
227
  DiagnoseClusterRequest = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.DiagnoseClusterRequest").msgclass
207
228
  DiagnoseClusterResults = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.DiagnoseClusterResults").msgclass
229
+ ReservationAffinity = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.ReservationAffinity").msgclass
230
+ ReservationAffinity::Type = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.ReservationAffinity.Type").enummodule
208
231
  end
209
232
  end
210
233
  end
@@ -1,7 +1,7 @@
1
1
  # Generated by the protocol buffer compiler. DO NOT EDIT!
2
2
  # Source: google/cloud/dataproc/v1/clusters.proto for package 'google.cloud.dataproc.v1'
3
3
  # Original file comments:
4
- # Copyright 2019 Google LLC.
4
+ # Copyright 2020 Google LLC
5
5
  #
6
6
  # Licensed under the Apache License, Version 2.0 (the "License");
7
7
  # you may not use this file except in compliance with the License.
@@ -15,7 +15,6 @@
15
15
  # See the License for the specific language governing permissions and
16
16
  # limitations under the License.
17
17
  #
18
- #
19
18
 
20
19
 
21
20
  require 'grpc'
@@ -38,15 +37,15 @@ module Google
38
37
 
39
38
  # Creates a cluster in a project. The returned
40
39
  # [Operation.metadata][google.longrunning.Operation.metadata] will be
41
- # [ClusterOperationMetadata](/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata).
40
+ # [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata).
42
41
  rpc :CreateCluster, CreateClusterRequest, Google::Longrunning::Operation
43
42
  # Updates a cluster in a project. The returned
44
43
  # [Operation.metadata][google.longrunning.Operation.metadata] will be
45
- # [ClusterOperationMetadata](/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata).
44
+ # [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata).
46
45
  rpc :UpdateCluster, UpdateClusterRequest, Google::Longrunning::Operation
47
46
  # Deletes a cluster in a project. The returned
48
47
  # [Operation.metadata][google.longrunning.Operation.metadata] will be
49
- # [ClusterOperationMetadata](/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata).
48
+ # [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata).
50
49
  rpc :DeleteCluster, DeleteClusterRequest, Google::Longrunning::Operation
51
50
  # Gets the resource representation for a cluster in a project.
52
51
  rpc :GetCluster, GetClusterRequest, Cluster
@@ -54,11 +53,11 @@ module Google
54
53
  rpc :ListClusters, ListClustersRequest, ListClustersResponse
55
54
  # Gets cluster diagnostic information. The returned
56
55
  # [Operation.metadata][google.longrunning.Operation.metadata] will be
57
- # [ClusterOperationMetadata](/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata).
56
+ # [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata).
58
57
  # After the operation completes,
59
58
  # [Operation.response][google.longrunning.Operation.response]
60
59
  # contains
61
- # [DiagnoseClusterResults](/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#diagnoseclusterresults).
60
+ # [DiagnoseClusterResults](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#diagnoseclusterresults).
62
61
  rpc :DiagnoseCluster, DiagnoseClusterRequest, Google::Longrunning::Operation
63
62
  end
64
63
 
@@ -113,6 +113,9 @@ module Google
113
113
  # @!attribute [rw] security_config
114
114
  # @return [Google::Cloud::Dataproc::V1::SecurityConfig]
115
115
  # Optional. Security settings for the cluster.
116
+ # @!attribute [rw] lifecycle_config
117
+ # @return [Google::Cloud::Dataproc::V1::LifecycleConfig]
118
+ # Optional. Lifecycle setting for the cluster.
116
119
  class ClusterConfig; end
117
120
 
118
121
  # Autoscaling Policy config associated with the cluster.
@@ -220,9 +223,12 @@ module Google
220
223
  # The Compute Engine metadata entries to add to all instances (see
221
224
  # [Project and instance
222
225
  # metadata](https://cloud.google.com/compute/docs/storing-retrieving-metadata#project_and_instance_metadata)).
226
+ # @!attribute [rw] reservation_affinity
227
+ # @return [Google::Cloud::Dataproc::V1::ReservationAffinity]
228
+ # Optional. Reservation Affinity for consuming Zonal reservation.
223
229
  class GceClusterConfig; end
224
230
 
225
- # Optional. The config settings for Compute Engine resources in
231
+ # The config settings for Compute Engine resources in
226
232
  # an instance group, such as a master or worker group.
227
233
  # @!attribute [rw] num_instances
228
234
  # @return [Integer]
@@ -337,7 +343,10 @@ module Google
337
343
  # @!attribute [rw] execution_timeout
338
344
  # @return [Google::Protobuf::Duration]
339
345
  # Optional. Amount of time executable has to complete. Default is
340
- # 10 minutes. Cluster creation fails with an explanatory error message (the
346
+ # 10 minutes (see JSON representation of
347
+ # [Duration](https://developers.google.com/protocol-buffers/docs/proto3#json)).
348
+ #
349
+ # Cluster creation fails with an explanatory error message (the
341
350
  # name of the executable that caused the error and the exceeded timeout
342
351
  # period) if the executable is not completed at end of the timeout period.
343
352
  class NodeInitializationAction; end
@@ -351,7 +360,8 @@ module Google
351
360
  # Optional. Output only. Details of cluster's state.
352
361
  # @!attribute [rw] state_start_time
353
362
  # @return [Google::Protobuf::Timestamp]
354
- # Output only. Time when this state was entered.
363
+ # Output only. Time when this state was entered (see JSON representation of
364
+ # [Timestamp](https://developers.google.com/protocol-buffers/docs/proto3#json)).
355
365
  # @!attribute [rw] substate
356
366
  # @return [Google::Cloud::Dataproc::V1::ClusterStatus::Substate]
357
367
  # Output only. Additional state information that includes
@@ -508,6 +518,32 @@ module Google
508
518
  # Optional. The set of components to activate on the cluster.
509
519
  class SoftwareConfig; end
510
520
 
521
+ # Specifies the cluster auto-delete schedule configuration.
522
+ # @!attribute [rw] idle_delete_ttl
523
+ # @return [Google::Protobuf::Duration]
524
+ # Optional. The duration to keep the cluster alive while idling (when no jobs
525
+ # are running). Passing this threshold will cause the cluster to be
526
+ # deleted. Minimum value is 10 minutes; maximum value is 14 days (see JSON
527
+ # representation of
528
+ # [Duration](https://developers.google.com/protocol-buffers/docs/proto3#json).
529
+ # @!attribute [rw] auto_delete_time
530
+ # @return [Google::Protobuf::Timestamp]
531
+ # Optional. The time when cluster will be auto-deleted (see JSON representation of
532
+ # [Timestamp](https://developers.google.com/protocol-buffers/docs/proto3#json)).
533
+ # @!attribute [rw] auto_delete_ttl
534
+ # @return [Google::Protobuf::Duration]
535
+ # Optional. The lifetime duration of cluster. The cluster will be
536
+ # auto-deleted at the end of this period. Minimum value is 10 minutes;
537
+ # maximum value is 14 days (see JSON representation of
538
+ # [Duration](https://developers.google.com/protocol-buffers/docs/proto3#json)).
539
+ # @!attribute [rw] idle_start_time
540
+ # @return [Google::Protobuf::Timestamp]
541
+ # Output only. The time when cluster became idle (most recent job finished)
542
+ # and became eligible for deletion due to idleness (see JSON representation
543
+ # of
544
+ # [Timestamp](https://developers.google.com/protocol-buffers/docs/proto3#json)).
545
+ class LifecycleConfig; end
546
+
511
547
  # Contains cluster daemon metrics, such as HDFS and YARN stats.
512
548
  #
513
549
  # **Beta Feature**: This report is available for testing purposes only. It may
@@ -567,7 +603,8 @@ module Google
567
603
  # interrupting jobs in progress. Timeout specifies how long to wait for jobs
568
604
  # in progress to finish before forcefully removing nodes (and potentially
569
605
  # interrupting jobs). Default timeout is 0 (for forceful decommission), and
570
- # the maximum allowed timeout is 1 day.
606
+ # the maximum allowed timeout is 1 day. (see JSON representation of
607
+ # [Duration](https://developers.google.com/protocol-buffers/docs/proto3#json)).
571
608
  #
572
609
  # Only supported on Dataproc image versions 1.2 and higher.
573
610
  # @!attribute [rw] update_mask
@@ -749,6 +786,33 @@ module Google
749
786
  # The output report is a plain text file with a summary of collected
750
787
  # diagnostics.
751
788
  class DiagnoseClusterResults; end
789
+
790
+ # Reservation Affinity for consuming Zonal reservation.
791
+ # @!attribute [rw] consume_reservation_type
792
+ # @return [Google::Cloud::Dataproc::V1::ReservationAffinity::Type]
793
+ # Optional. Type of reservation to consume
794
+ # @!attribute [rw] key
795
+ # @return [String]
796
+ # Optional. Corresponds to the label key of reservation resource.
797
+ # @!attribute [rw] values
798
+ # @return [Array<String>]
799
+ # Optional. Corresponds to the label values of reservation resource.
800
+ class ReservationAffinity
801
+ # Indicates whether to consume capacity from an reservation or not.
802
+ module Type
803
+ TYPE_UNSPECIFIED = 0
804
+
805
+ # Do not consume from any allocated capacity.
806
+ NO_RESERVATION = 1
807
+
808
+ # Consume any reservation available.
809
+ ANY_RESERVATION = 2
810
+
811
+ # Must consume from a specific reservation. Must specify key value fields
812
+ # for specifying the reservations.
813
+ SPECIFIC_RESERVATION = 3
814
+ end
815
+ end
752
816
  end
753
817
  end
754
818
  end
@@ -289,6 +289,67 @@ module Google
289
289
  # Optional. The runtime log config for job execution.
290
290
  class PigJob; end
291
291
 
292
+ # A Dataproc job for running
293
+ # [Apache SparkR](https://spark.apache.org/docs/latest/sparkr.html)
294
+ # applications on YARN.
295
+ # @!attribute [rw] main_r_file_uri
296
+ # @return [String]
297
+ # Required. The HCFS URI of the main R file to use as the driver.
298
+ # Must be a .R file.
299
+ # @!attribute [rw] args
300
+ # @return [Array<String>]
301
+ # Optional. The arguments to pass to the driver. Do not include arguments,
302
+ # such as `--conf`, that can be set as job properties, since a collision may
303
+ # occur that causes an incorrect job submission.
304
+ # @!attribute [rw] file_uris
305
+ # @return [Array<String>]
306
+ # Optional. HCFS URIs of files to be copied to the working directory of
307
+ # R drivers and distributed tasks. Useful for naively parallel tasks.
308
+ # @!attribute [rw] archive_uris
309
+ # @return [Array<String>]
310
+ # Optional. HCFS URIs of archives to be extracted in the working directory of
311
+ # Spark drivers and tasks. Supported file types:
312
+ # .jar, .tar, .tar.gz, .tgz, and .zip.
313
+ # @!attribute [rw] properties
314
+ # @return [Hash{String => String}]
315
+ # Optional. A mapping of property names to values, used to configure SparkR.
316
+ # Properties that conflict with values set by the Dataproc API may be
317
+ # overwritten. Can include properties set in
318
+ # /etc/spark/conf/spark-defaults.conf and classes in user code.
319
+ # @!attribute [rw] logging_config
320
+ # @return [Google::Cloud::Dataproc::V1::LoggingConfig]
321
+ # Optional. The runtime log config for job execution.
322
+ class SparkRJob; end
323
+
324
+ # A Dataproc job for running [Presto](https://prestosql.io/) queries
325
+ # @!attribute [rw] query_file_uri
326
+ # @return [String]
327
+ # The HCFS URI of the script that contains SQL queries.
328
+ # @!attribute [rw] query_list
329
+ # @return [Google::Cloud::Dataproc::V1::QueryList]
330
+ # A list of queries.
331
+ # @!attribute [rw] continue_on_failure
332
+ # @return [true, false]
333
+ # Optional. Whether to continue executing queries if a query fails.
334
+ # The default value is `false`. Setting to `true` can be useful when
335
+ # executing independent parallel queries.
336
+ # @!attribute [rw] output_format
337
+ # @return [String]
338
+ # Optional. The format in which query output will be displayed. See the
339
+ # Presto documentation for supported output formats
340
+ # @!attribute [rw] client_tags
341
+ # @return [Array<String>]
342
+ # Optional. Presto client tags to attach to this query
343
+ # @!attribute [rw] properties
344
+ # @return [Hash{String => String}]
345
+ # Optional. A mapping of property names to values. Used to set Presto
346
+ # [session properties](https://prestodb.io/docs/current/sql/set-session.html)
347
+ # Equivalent to using the --session flag in the Presto CLI
348
+ # @!attribute [rw] logging_config
349
+ # @return [Google::Cloud::Dataproc::V1::LoggingConfig]
350
+ # Optional. The runtime log config for job execution.
351
+ class PrestoJob; end
352
+
292
353
  # Dataproc job config.
293
354
  # @!attribute [rw] cluster_name
294
355
  # @return [String]
@@ -460,22 +521,28 @@ module Google
460
521
  # run the job.
461
522
  # @!attribute [rw] hadoop_job
462
523
  # @return [Google::Cloud::Dataproc::V1::HadoopJob]
463
- # Job is a Hadoop job.
524
+ # Optional. Job is a Hadoop job.
464
525
  # @!attribute [rw] spark_job
465
526
  # @return [Google::Cloud::Dataproc::V1::SparkJob]
466
- # Job is a Spark job.
527
+ # Optional. Job is a Spark job.
467
528
  # @!attribute [rw] pyspark_job
468
529
  # @return [Google::Cloud::Dataproc::V1::PySparkJob]
469
- # Job is a Pyspark job.
530
+ # Optional. Job is a PySpark job.
470
531
  # @!attribute [rw] hive_job
471
532
  # @return [Google::Cloud::Dataproc::V1::HiveJob]
472
- # Job is a Hive job.
533
+ # Optional. Job is a Hive job.
473
534
  # @!attribute [rw] pig_job
474
535
  # @return [Google::Cloud::Dataproc::V1::PigJob]
475
- # Job is a Pig job.
536
+ # Optional. Job is a Pig job.
537
+ # @!attribute [rw] spark_r_job
538
+ # @return [Google::Cloud::Dataproc::V1::SparkRJob]
539
+ # Optional. Job is a SparkR job.
476
540
  # @!attribute [rw] spark_sql_job
477
541
  # @return [Google::Cloud::Dataproc::V1::SparkSqlJob]
478
- # Job is a SparkSql job.
542
+ # Optional. Job is a SparkSql job.
543
+ # @!attribute [rw] presto_job
544
+ # @return [Google::Cloud::Dataproc::V1::PrestoJob]
545
+ # Optional. Job is a Presto job.
479
546
  # @!attribute [rw] status
480
547
  # @return [Google::Cloud::Dataproc::V1::JobStatus]
481
548
  # Output only. The job status. Additional application-specific
@@ -146,22 +146,16 @@ module Google
146
146
  # or hyphen. Must consist of between 3 and 50 characters.
147
147
  # @!attribute [rw] hadoop_job
148
148
  # @return [Google::Cloud::Dataproc::V1::HadoopJob]
149
- # Job is a Hadoop job.
150
149
  # @!attribute [rw] spark_job
151
150
  # @return [Google::Cloud::Dataproc::V1::SparkJob]
152
- # Job is a Spark job.
153
151
  # @!attribute [rw] pyspark_job
154
152
  # @return [Google::Cloud::Dataproc::V1::PySparkJob]
155
- # Job is a Pyspark job.
156
153
  # @!attribute [rw] hive_job
157
154
  # @return [Google::Cloud::Dataproc::V1::HiveJob]
158
- # Job is a Hive job.
159
155
  # @!attribute [rw] pig_job
160
156
  # @return [Google::Cloud::Dataproc::V1::PigJob]
161
- # Job is a Pig job.
162
157
  # @!attribute [rw] spark_sql_job
163
158
  # @return [Google::Cloud::Dataproc::V1::SparkSqlJob]
164
- # Job is a SparkSql job.
165
159
  # @!attribute [rw] labels
166
160
  # @return [Hash{String => String}]
167
161
  # Optional. The labels to associate with this job.
@@ -7,6 +7,7 @@ require 'google/protobuf'
7
7
  require 'google/api/annotations_pb'
8
8
  require 'google/api/client_pb'
9
9
  require 'google/api/field_behavior_pb'
10
+ require 'google/longrunning/operations_pb'
10
11
  require 'google/protobuf/empty_pb'
11
12
  require 'google/protobuf/field_mask_pb'
12
13
  require 'google/protobuf/timestamp_pb'
@@ -93,6 +94,25 @@ Google::Protobuf::DescriptorPool.generated_pool.build do
93
94
  optional :query_list, :message, 2, "google.cloud.dataproc.v1.QueryList"
94
95
  end
95
96
  end
97
+ add_message "google.cloud.dataproc.v1.SparkRJob" do
98
+ optional :main_r_file_uri, :string, 1
99
+ repeated :args, :string, 2
100
+ repeated :file_uris, :string, 3
101
+ repeated :archive_uris, :string, 4
102
+ map :properties, :string, :string, 5
103
+ optional :logging_config, :message, 6, "google.cloud.dataproc.v1.LoggingConfig"
104
+ end
105
+ add_message "google.cloud.dataproc.v1.PrestoJob" do
106
+ optional :continue_on_failure, :bool, 3
107
+ optional :output_format, :string, 4
108
+ repeated :client_tags, :string, 5
109
+ map :properties, :string, :string, 6
110
+ optional :logging_config, :message, 7, "google.cloud.dataproc.v1.LoggingConfig"
111
+ oneof :queries do
112
+ optional :query_file_uri, :string, 1
113
+ optional :query_list, :message, 2, "google.cloud.dataproc.v1.QueryList"
114
+ end
115
+ end
96
116
  add_message "google.cloud.dataproc.v1.JobPlacement" do
97
117
  optional :cluster_name, :string, 1
98
118
  optional :cluster_uuid, :string, 2
@@ -159,7 +179,9 @@ Google::Protobuf::DescriptorPool.generated_pool.build do
159
179
  optional :pyspark_job, :message, 5, "google.cloud.dataproc.v1.PySparkJob"
160
180
  optional :hive_job, :message, 6, "google.cloud.dataproc.v1.HiveJob"
161
181
  optional :pig_job, :message, 7, "google.cloud.dataproc.v1.PigJob"
182
+ optional :spark_r_job, :message, 21, "google.cloud.dataproc.v1.SparkRJob"
162
183
  optional :spark_sql_job, :message, 12, "google.cloud.dataproc.v1.SparkSqlJob"
184
+ optional :presto_job, :message, 23, "google.cloud.dataproc.v1.PrestoJob"
163
185
  end
164
186
  end
165
187
  add_message "google.cloud.dataproc.v1.JobScheduling" do
@@ -226,6 +248,8 @@ module Google
226
248
  HiveJob = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.HiveJob").msgclass
227
249
  SparkSqlJob = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.SparkSqlJob").msgclass
228
250
  PigJob = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.PigJob").msgclass
251
+ SparkRJob = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.SparkRJob").msgclass
252
+ PrestoJob = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.PrestoJob").msgclass
229
253
  JobPlacement = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.JobPlacement").msgclass
230
254
  JobStatus = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.JobStatus").msgclass
231
255
  JobStatus::State = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.JobStatus.State").enummodule
@@ -1,7 +1,7 @@
1
1
  # Generated by the protocol buffer compiler. DO NOT EDIT!
2
2
  # Source: google/cloud/dataproc/v1/jobs.proto for package 'google.cloud.dataproc.v1'
3
3
  # Original file comments:
4
- # Copyright 2019 Google LLC.
4
+ # Copyright 2020 Google LLC
5
5
  #
6
6
  # Licensed under the Apache License, Version 2.0 (the "License");
7
7
  # you may not use this file except in compliance with the License.
@@ -44,9 +44,9 @@ module Google
44
44
  rpc :UpdateJob, UpdateJobRequest, Job
45
45
  # Starts a job cancellation request. To access the job resource
46
46
  # after cancellation, call
47
- # [regions/\\{region}/jobs.list](/dataproc/docs/reference/rest/v1/projects.regions.jobs/list)
47
+ # [regions/\\{region}/jobs.list](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs/list)
48
48
  # or
49
- # [regions/\\{region}/jobs.get](/dataproc/docs/reference/rest/v1/projects.regions.jobs/get).
49
+ # [regions/\\{region}/jobs.get](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs/get).
50
50
  rpc :CancelJob, CancelJobRequest, Job
51
51
  # Deletes the job from the project. If the job is active, the delete fails,
52
52
  # and the response returns `FAILED_PRECONDITION`.
@@ -404,7 +404,7 @@ module Google
404
404
  # The {Google::Longrunning::Operation#metadata Operation#metadata} will be
405
405
  # [WorkflowMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#workflowmetadata).
406
406
  # Also see [Using
407
- # WorkflowMetadata](/dataproc/docs/concepts/workflows/debugging#using_workflowmetadata).
407
+ # WorkflowMetadata](https://cloud.google.com/dataproc/docs/concepts/workflows/debugging#using_workflowmetadata).
408
408
  #
409
409
  # On successful completion,
410
410
  # {Google::Longrunning::Operation#response Operation#response} will be
@@ -522,7 +522,7 @@ module Google
522
522
  # The {Google::Longrunning::Operation#metadata Operation#metadata} will be
523
523
  # [WorkflowMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#workflowmetadata).
524
524
  # Also see [Using
525
- # WorkflowMetadata](/dataproc/docs/concepts/workflows/debugging#using_workflowmetadata).
525
+ # WorkflowMetadata](https://cloud.google.com/dataproc/docs/concepts/workflows/debugging#using_workflowmetadata).
526
526
  #
527
527
  # On successful completion,
528
528
  # {Google::Longrunning::Operation#response Operation#response} will be
@@ -1,7 +1,7 @@
1
1
  # Generated by the protocol buffer compiler. DO NOT EDIT!
2
2
  # Source: google/cloud/dataproc/v1/workflow_templates.proto for package 'google.cloud.dataproc.v1'
3
3
  # Original file comments:
4
- # Copyright 2019 Google LLC.
4
+ # Copyright 2020 Google LLC
5
5
  #
6
6
  # Licensed under the Apache License, Version 2.0 (the "License");
7
7
  # you may not use this file except in compliance with the License.
@@ -15,7 +15,6 @@
15
15
  # See the License for the specific language governing permissions and
16
16
  # limitations under the License.
17
17
  #
18
- #
19
18
 
20
19
 
21
20
  require 'grpc'
@@ -56,9 +55,9 @@ module Google
56
55
  # clusters to be deleted.
57
56
  #
58
57
  # The [Operation.metadata][google.longrunning.Operation.metadata] will be
59
- # [WorkflowMetadata](/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#workflowmetadata).
58
+ # [WorkflowMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#workflowmetadata).
60
59
  # Also see [Using
61
- # WorkflowMetadata](/dataproc/docs/concepts/workflows/debugging#using_workflowmetadata).
60
+ # WorkflowMetadata](https://cloud.google.com/dataproc/docs/concepts/workflows/debugging#using_workflowmetadata).
62
61
  #
63
62
  # On successful completion,
64
63
  # [Operation.response][google.longrunning.Operation.response] will be
@@ -81,9 +80,9 @@ module Google
81
80
  # clusters to be deleted.
82
81
  #
83
82
  # The [Operation.metadata][google.longrunning.Operation.metadata] will be
84
- # [WorkflowMetadata](/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#workflowmetadata).
83
+ # [WorkflowMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#workflowmetadata).
85
84
  # Also see [Using
86
- # WorkflowMetadata](/dataproc/docs/concepts/workflows/debugging#using_workflowmetadata).
85
+ # WorkflowMetadata](https://cloud.google.com/dataproc/docs/concepts/workflows/debugging#using_workflowmetadata).
87
86
  #
88
87
  # On successful completion,
89
88
  # [Operation.response][google.longrunning.Operation.response] will be
@@ -27,7 +27,7 @@ module Google
27
27
  # rubocop:disable LineLength
28
28
 
29
29
  ##
30
- # # Ruby Client for Google Cloud Dataproc API ([Alpha](https://github.com/googleapis/google-cloud-ruby#versioning))
30
+ # # Ruby Client for Google Cloud Dataproc API
31
31
  #
32
32
  # [Google Cloud Dataproc API][Product Documentation]:
33
33
  # Manages Hadoop-based clusters and jobs on Google Cloud Platform.
@@ -16,7 +16,7 @@
16
16
  module Google
17
17
  module Cloud
18
18
  module Dataproc
19
- VERSION = "0.8.0".freeze
19
+ VERSION = "0.9.0".freeze
20
20
  end
21
21
  end
22
22
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: google-cloud-dataproc
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.8.0
4
+ version: 0.9.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Google LLC
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2020-02-05 00:00:00.000000000 Z
11
+ date: 2020-02-24 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: google-gax