google-cloud-dataproc 0.3.1 → 0.4.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (33) hide show
  1. checksums.yaml +4 -4
  2. data/lib/google/cloud/dataproc.rb +53 -0
  3. data/lib/google/cloud/dataproc/v1/cluster_controller_client.rb +14 -16
  4. data/lib/google/cloud/dataproc/v1/clusters_pb.rb +2 -0
  5. data/lib/google/cloud/dataproc/v1/clusters_services_pb.rb +1 -1
  6. data/lib/google/cloud/dataproc/v1/doc/google/cloud/dataproc/v1/clusters.rb +29 -26
  7. data/lib/google/cloud/dataproc/v1/doc/google/cloud/dataproc/v1/jobs.rb +8 -7
  8. data/lib/google/cloud/dataproc/v1/doc/google/cloud/dataproc/v1/workflow_templates.rb +6 -6
  9. data/lib/google/cloud/dataproc/v1/job_controller_client.rb +4 -3
  10. data/lib/google/cloud/dataproc/v1/jobs_services_pb.rb +1 -1
  11. data/lib/google/cloud/dataproc/v1/shared_pb.rb +26 -0
  12. data/lib/google/cloud/dataproc/v1/workflow_template_service_client.rb +3 -3
  13. data/lib/google/cloud/dataproc/v1/workflow_templates_services_pb.rb +2 -3
  14. data/lib/google/cloud/dataproc/v1beta2.rb +58 -0
  15. data/lib/google/cloud/dataproc/v1beta2/autoscaling_policies_pb.rb +81 -0
  16. data/lib/google/cloud/dataproc/v1beta2/autoscaling_policies_services_pb.rb +60 -0
  17. data/lib/google/cloud/dataproc/v1beta2/autoscaling_policy_service_client.rb +457 -0
  18. data/lib/google/cloud/dataproc/v1beta2/autoscaling_policy_service_client_config.json +51 -0
  19. data/lib/google/cloud/dataproc/v1beta2/cluster_controller_client.rb +18 -16
  20. data/lib/google/cloud/dataproc/v1beta2/clusters_pb.rb +48 -0
  21. data/lib/google/cloud/dataproc/v1beta2/clusters_services_pb.rb +1 -1
  22. data/lib/google/cloud/dataproc/v1beta2/doc/google/cloud/dataproc/v1beta2/autoscaling_policies.rb +202 -0
  23. data/lib/google/cloud/dataproc/v1beta2/doc/google/cloud/dataproc/v1beta2/clusters.rb +172 -28
  24. data/lib/google/cloud/dataproc/v1beta2/doc/google/cloud/dataproc/v1beta2/jobs.rb +44 -9
  25. data/lib/google/cloud/dataproc/v1beta2/doc/google/cloud/dataproc/v1beta2/workflow_templates.rb +8 -6
  26. data/lib/google/cloud/dataproc/v1beta2/job_controller_client.rb +5 -5
  27. data/lib/google/cloud/dataproc/v1beta2/jobs_pb.rb +10 -0
  28. data/lib/google/cloud/dataproc/v1beta2/jobs_services_pb.rb +1 -1
  29. data/lib/google/cloud/dataproc/v1beta2/shared_pb.rb +12 -0
  30. data/lib/google/cloud/dataproc/v1beta2/workflow_template_service_client.rb +3 -3
  31. data/lib/google/cloud/dataproc/v1beta2/workflow_templates_services_pb.rb +2 -3
  32. data/lib/google/cloud/dataproc/version.rb +22 -0
  33. metadata +9 -2
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 433b67960fde33d581be68d4316747f4ee70009aabb1c6e89029410a627a947a
4
- data.tar.gz: e10c766af7e68ea6e1ab8d5471c0153bcbb32487f8240ff5bbe7bbc34e4b1849
3
+ metadata.gz: 3a048c44622e0c2237869aa31e2a28e8b5cc795481106efad007a0fb9ca697b6
4
+ data.tar.gz: ca20bc3634851b4ff6897b12506e9372aa219b99a687ee51c82a77b45c401015
5
5
  SHA512:
6
- metadata.gz: 4688350b720a26778e6751f65408f08a76848504eb5c7c2f729841736cb8004c104b274fbd4e31d47b34ee073a771a38500416da149e558cccc03d3d42018b4a
7
- data.tar.gz: 9edfe8721650d1dab30ab294f5174be7d37037198bc1b37de3e56d7b2114a317e38ae03c4d7a03cf716b5659f8ab45e88075616f8fe06a55fea14b2e22b478bd
6
+ metadata.gz: b8e9ee2a0774229f9507678d92303a9966ffd7d967fd84fbdb271ec682bcb6db9ff1fb53c9c8b5aebc429bbd37e5dfced3a240534fbc034fd3c6d63893c72dca
7
+ data.tar.gz: b150beeac6e207686a0f556776ddebdc554950df3ffed53b4034a26fc95ff6e2506a624bebb866afba03c50179b8004551f2dc9346520074d92ee35a32997f99
@@ -109,6 +109,59 @@ module Google
109
109
  .select { |dir| File.exist?(dir + ".rb") }
110
110
  .map { |dir| File.basename(dir) }
111
111
 
112
+ module AutoscalingPolicyService
113
+ ##
114
+ # The API interface for managing autoscaling policies in the
115
+ # Google Cloud Dataproc API.
116
+ #
117
+ # @param version [Symbol, String]
118
+ # The major version of the service to be used. By default :v1
119
+ # is used.
120
+ # @overload new(version:, credentials:, scopes:, client_config:, timeout:)
121
+ # @param credentials [Google::Auth::Credentials, String, Hash, GRPC::Core::Channel, GRPC::Core::ChannelCredentials, Proc]
122
+ # Provides the means for authenticating requests made by the client. This parameter can
123
+ # be many types.
124
+ # A `Google::Auth::Credentials` uses a the properties of its represented keyfile for
125
+ # authenticating requests made by this client.
126
+ # A `String` will be treated as the path to the keyfile to be used for the construction of
127
+ # credentials for this client.
128
+ # A `Hash` will be treated as the contents of a keyfile to be used for the construction of
129
+ # credentials for this client.
130
+ # A `GRPC::Core::Channel` will be used to make calls through.
131
+ # A `GRPC::Core::ChannelCredentials` for the setting up the RPC client. The channel credentials
132
+ # should already be composed with a `GRPC::Core::CallCredentials` object.
133
+ # A `Proc` will be used as an updater_proc for the Grpc channel. The proc transforms the
134
+ # metadata for requests, generally, to give OAuth credentials.
135
+ # @param scopes [Array<String>]
136
+ # The OAuth scopes for this service. This parameter is ignored if
137
+ # an updater_proc is supplied.
138
+ # @param client_config [Hash]
139
+ # A Hash for call options for each method. See
140
+ # Google::Gax#construct_settings for the structure of
141
+ # this data. Falls back to the default config if not specified
142
+ # or the specified config is missing data points.
143
+ # @param timeout [Numeric]
144
+ # The default timeout, in seconds, for calls made through this client.
145
+ # @param metadata [Hash]
146
+ # Default metadata to be sent with each request. This can be overridden on a per call basis.
147
+ # @param exception_transformer [Proc]
148
+ # An optional proc that intercepts any exceptions raised during an API call to inject
149
+ # custom error handling.
150
+ def self.new(*args, version: :v1, **kwargs)
151
+ unless AVAILABLE_VERSIONS.include?(version.to_s.downcase)
152
+ raise "The version: #{version} is not available. The available versions " \
153
+ "are: [#{AVAILABLE_VERSIONS.join(", ")}]"
154
+ end
155
+
156
+ require "#{FILE_DIR}/#{version.to_s.downcase}"
157
+ version_module = Google::Cloud::Dataproc
158
+ .constants
159
+ .select {|sym| sym.to_s.downcase == version.to_s.downcase}
160
+ .first
161
+ Google::Cloud::Dataproc.const_get(version_module)::AutoscalingPolicyService.new(*args, **kwargs)
162
+ end
163
+ end
164
+
112
165
  module ClusterController
113
166
  ##
114
167
  # The ClusterControllerService provides methods to manage clusters
@@ -29,6 +29,7 @@ require "google/longrunning/operations_client"
29
29
 
30
30
  require "google/cloud/dataproc/v1/clusters_pb"
31
31
  require "google/cloud/dataproc/v1/credentials"
32
+ require "google/cloud/dataproc/version"
32
33
 
33
34
  module Google
34
35
  module Cloud
@@ -146,7 +147,7 @@ module Google
146
147
  updater_proc = credentials.updater_proc
147
148
  end
148
149
 
149
- package_version = Gem.loaded_specs['google-cloud-dataproc'].version.version
150
+ package_version = Google::Cloud::Dataproc::VERSION
150
151
 
151
152
  google_api_client = "gl-ruby/#{RUBY_VERSION}"
152
153
  google_api_client << " #{lib_name}/#{lib_version}" if lib_name
@@ -234,11 +235,10 @@ module Google
234
235
  # can also be provided.
235
236
  # @param request_id [String]
236
237
  # Optional. A unique id used to identify the request. If the server
237
- # receives two
238
- # {Google::Cloud::Dataproc::V1::CreateClusterRequest CreateClusterRequest}
239
- # requests with the same id, then the second request will be ignored and the
240
- # first {Google::Longrunning::Operation} created
241
- # and stored in the backend is returned.
238
+ # receives two {Google::Cloud::Dataproc::V1::CreateClusterRequest CreateClusterRequest} requests with the same
239
+ # id, then the second request will be ignored and the
240
+ # first {Google::Longrunning::Operation} created and stored in the backend
241
+ # is returned.
242
242
  #
243
243
  # It is recommended to always set this value to a
244
244
  # [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
@@ -391,11 +391,10 @@ module Google
391
391
  # can also be provided.
392
392
  # @param request_id [String]
393
393
  # Optional. A unique id used to identify the request. If the server
394
- # receives two
395
- # {Google::Cloud::Dataproc::V1::UpdateClusterRequest UpdateClusterRequest}
396
- # requests with the same id, then the second request will be ignored and the
397
- # first {Google::Longrunning::Operation} created
398
- # and stored in the backend is returned.
394
+ # receives two {Google::Cloud::Dataproc::V1::UpdateClusterRequest UpdateClusterRequest} requests with the same
395
+ # id, then the second request will be ignored and the
396
+ # first {Google::Longrunning::Operation} created and stored in the
397
+ # backend is returned.
399
398
  #
400
399
  # It is recommended to always set this value to a
401
400
  # [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
@@ -498,11 +497,10 @@ module Google
498
497
  # (with error NOT_FOUND) if cluster with specified UUID does not exist.
499
498
  # @param request_id [String]
500
499
  # Optional. A unique id used to identify the request. If the server
501
- # receives two
502
- # {Google::Cloud::Dataproc::V1::DeleteClusterRequest DeleteClusterRequest}
503
- # requests with the same id, then the second request will be ignored and the
504
- # first {Google::Longrunning::Operation} created
505
- # and stored in the backend is returned.
500
+ # receives two {Google::Cloud::Dataproc::V1::DeleteClusterRequest DeleteClusterRequest} requests with the same
501
+ # id, then the second request will be ignored and the
502
+ # first {Google::Longrunning::Operation} created and stored in the
503
+ # backend is returned.
506
504
  #
507
505
  # It is recommended to always set this value to a
508
506
  # [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
@@ -6,6 +6,7 @@ require 'google/protobuf'
6
6
 
7
7
  require 'google/api/annotations_pb'
8
8
  require 'google/cloud/dataproc/v1/operations_pb'
9
+ require 'google/cloud/dataproc/v1/shared_pb'
9
10
  require 'google/longrunning/operations_pb'
10
11
  require 'google/protobuf/duration_pb'
11
12
  require 'google/protobuf/field_mask_pb'
@@ -93,6 +94,7 @@ Google::Protobuf::DescriptorPool.generated_pool.build do
93
94
  add_message "google.cloud.dataproc.v1.SoftwareConfig" do
94
95
  optional :image_version, :string, 1
95
96
  map :properties, :string, :string, 2
97
+ repeated :optional_components, :enum, 3, "google.cloud.dataproc.v1.Component"
96
98
  end
97
99
  add_message "google.cloud.dataproc.v1.ClusterMetrics" do
98
100
  map :hdfs_metrics, :string, :int64, 1
@@ -1,7 +1,7 @@
1
1
  # Generated by the protocol buffer compiler. DO NOT EDIT!
2
2
  # Source: google/cloud/dataproc/v1/clusters.proto for package 'google.cloud.dataproc.v1'
3
3
  # Original file comments:
4
- # Copyright 2018 Google LLC.
4
+ # Copyright 2019 Google LLC.
5
5
  #
6
6
  # Licensed under the Apache License, Version 2.0 (the "License");
7
7
  # you may not use this file except in compliance with the License.
@@ -60,15 +60,18 @@ module Google
60
60
  # The cluster config.
61
61
  # @!attribute [rw] config_bucket
62
62
  # @return [String]
63
- # Optional. A Cloud Storage staging bucket used for sharing generated
64
- # SSH keys and config. If you do not specify a staging bucket, Cloud
65
- # Dataproc will determine an appropriate Cloud Storage location (US,
63
+ # Optional. A Google Cloud Storage bucket used to stage job
64
+ # dependencies, config files, and job driver console output.
65
+ # If you do not specify a staging bucket, Cloud
66
+ # Dataproc will determine a Cloud Storage location (US,
66
67
  # ASIA, or EU) for your cluster's staging bucket according to the Google
67
- # Compute Engine zone where your cluster is deployed, and then it will create
68
- # and manage this project-level, per-location bucket for you.
68
+ # Compute Engine zone where your cluster is deployed, and then create
69
+ # and manage this project-level, per-location bucket (see
70
+ # [Cloud Dataproc staging
71
+ # bucket](/dataproc/docs/concepts/configuring-clusters/staging-bucket)).
69
72
  # @!attribute [rw] gce_cluster_config
70
73
  # @return [Google::Cloud::Dataproc::V1::GceClusterConfig]
71
- # Required. The shared Compute Engine config settings for
74
+ # Optional. The shared Compute Engine config settings for
72
75
  # all instances in a cluster.
73
76
  # @!attribute [rw] master_config
74
77
  # @return [Google::Cloud::Dataproc::V1::InstanceGroupConfig]
@@ -147,8 +150,8 @@ module Google
147
150
  #
148
151
  # A full URL, partial URI, or short name are valid. Examples:
149
152
  #
150
- # * `https://www.googleapis.com/compute/v1/projects/[project_id]/regions/us-east1/sub0`
151
- # * `projects/[project_id]/regions/us-east1/sub0`
153
+ # * `https://www.googleapis.com/compute/v1/projects/[project_id]/regions/us-east1/subnetworks/sub0`
154
+ # * `projects/[project_id]/regions/us-east1/subnetworks/sub0`
152
155
  # * `sub0`
153
156
  # @!attribute [rw] internal_ip_only
154
157
  # @return [true, false]
@@ -381,13 +384,13 @@ module Google
381
384
  # such as "1.2" (including a subminor version, such as "1.2.29"), or the
382
385
  # ["preview"
383
386
  # version](/dataproc/docs/concepts/versioning/dataproc-versions#other_versions).
384
- # If unspecified, it defaults to the latest version.
387
+ # If unspecified, it defaults to the latest Debian version.
385
388
  # @!attribute [rw] properties
386
389
  # @return [Hash{String => String}]
387
390
  # Optional. The properties to set on daemon config files.
388
391
  #
389
- # Property keys are specified in `prefix:property` format, such as
390
- # `core:fs.defaultFS`. The following are supported prefixes
392
+ # Property keys are specified in `prefix:property` format, for example
393
+ # `core:hadoop.tmp.dir`. The following are supported prefixes
391
394
  # and their mappings:
392
395
  #
393
396
  # * capacity-scheduler: `capacity-scheduler.xml`
@@ -402,6 +405,9 @@ module Google
402
405
  #
403
406
  # For more information, see
404
407
  # [Cluster properties](https://cloud.google.com/dataproc/docs/concepts/cluster-properties).
408
+ # @!attribute [rw] optional_components
409
+ # @return [Array<Google::Cloud::Dataproc::V1::Component>]
410
+ # The set of optional components to activate on the cluster.
405
411
  class SoftwareConfig; end
406
412
 
407
413
  # Contains cluster daemon metrics, such as HDFS and YARN stats.
@@ -430,11 +436,10 @@ module Google
430
436
  # @!attribute [rw] request_id
431
437
  # @return [String]
432
438
  # Optional. A unique id used to identify the request. If the server
433
- # receives two
434
- # {Google::Cloud::Dataproc::V1::CreateClusterRequest CreateClusterRequest}
435
- # requests with the same id, then the second request will be ignored and the
436
- # first {Google::Longrunning::Operation} created
437
- # and stored in the backend is returned.
439
+ # receives two {Google::Cloud::Dataproc::V1::CreateClusterRequest CreateClusterRequest} requests with the same
440
+ # id, then the second request will be ignored and the
441
+ # first {Google::Longrunning::Operation} created and stored in the backend
442
+ # is returned.
438
443
  #
439
444
  # It is recommended to always set this value to a
440
445
  # [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
@@ -519,11 +524,10 @@ module Google
519
524
  # @!attribute [rw] request_id
520
525
  # @return [String]
521
526
  # Optional. A unique id used to identify the request. If the server
522
- # receives two
523
- # {Google::Cloud::Dataproc::V1::UpdateClusterRequest UpdateClusterRequest}
524
- # requests with the same id, then the second request will be ignored and the
525
- # first {Google::Longrunning::Operation} created
526
- # and stored in the backend is returned.
527
+ # receives two {Google::Cloud::Dataproc::V1::UpdateClusterRequest UpdateClusterRequest} requests with the same
528
+ # id, then the second request will be ignored and the
529
+ # first {Google::Longrunning::Operation} created and stored in the
530
+ # backend is returned.
527
531
  #
528
532
  # It is recommended to always set this value to a
529
533
  # [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
@@ -550,11 +554,10 @@ module Google
550
554
  # @!attribute [rw] request_id
551
555
  # @return [String]
552
556
  # Optional. A unique id used to identify the request. If the server
553
- # receives two
554
- # {Google::Cloud::Dataproc::V1::DeleteClusterRequest DeleteClusterRequest}
555
- # requests with the same id, then the second request will be ignored and the
556
- # first {Google::Longrunning::Operation} created
557
- # and stored in the backend is returned.
557
+ # receives two {Google::Cloud::Dataproc::V1::DeleteClusterRequest DeleteClusterRequest} requests with the same
558
+ # id, then the second request will be ignored and the
559
+ # first {Google::Longrunning::Operation} created and stored in the
560
+ # backend is returned.
558
561
  #
559
562
  # It is recommended to always set this value to a
560
563
  # [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
@@ -386,11 +386,12 @@ module Google
386
386
  # belongs to.
387
387
  # @!attribute [rw] job_id
388
388
  # @return [String]
389
- # Optional. The job ID, which must be unique within the project. The job ID
390
- # is generated by the server upon job submission or provided by the user as a
391
- # means to perform retries without creating duplicate jobs. The ID must
392
- # contain only letters (a-z, A-Z), numbers (0-9), underscores (_), or
393
- # hyphens (-). The maximum length is 100 characters.
389
+ # Optional. The job ID, which must be unique within the project.
390
+ #
391
+ # The ID must contain only letters (a-z, A-Z), numbers (0-9),
392
+ # underscores (_), or hyphens (-). The maximum length is 100 characters.
393
+ #
394
+ # If not specified by the caller, the job ID will be provided by the server.
394
395
  class JobReference; end
395
396
 
396
397
  # A YARN application created by a job. Application information is a subset of
@@ -544,8 +545,8 @@ module Google
544
545
  # @!attribute [rw] request_id
545
546
  # @return [String]
546
547
  # Optional. A unique id used to identify the request. If the server
547
- # receives two {Google::Cloud::Dataproc::V1::SubmitJobRequest SubmitJobRequest}
548
- # requests with the same id, then the second request will be ignored and the
548
+ # receives two {Google::Cloud::Dataproc::V1::SubmitJobRequest SubmitJobRequest} requests with the same
549
+ # id, then the second request will be ignored and the
549
550
  # first {Google::Cloud::Dataproc::V1::Job Job} created and stored in the backend
550
551
  # is returned.
551
552
  #
@@ -136,8 +136,8 @@ module Google
136
136
  #
137
137
  # The step id is used as prefix for job id, as job
138
138
  # `goog-dataproc-workflow-step-id` label, and in
139
- # {Google::Cloud::Dataproc::V1::OrderedJob#prerequisite_step_ids prerequisiteStepIds}
140
- # field from other steps.
139
+ # {Google::Cloud::Dataproc::V1::OrderedJob#prerequisite_step_ids prerequisiteStepIds} field from other
140
+ # steps.
141
141
  #
142
142
  # The id must contain only letters (a-z, A-Z), numbers (0-9),
143
143
  # underscores (_), and hyphens (-). Cannot begin or end with underscore
@@ -205,10 +205,10 @@ module Google
205
205
  # A field is allowed to appear in at most one parameter's list of field
206
206
  # paths.
207
207
  #
208
- # A field path is similar in syntax to a
209
- # {Google::Protobuf::FieldMask}. For example, a
210
- # field path that references the zone field of a workflow template's cluster
211
- # selector would be specified as `placement.clusterSelector.zone`.
208
+ # A field path is similar in syntax to a {Google::Protobuf::FieldMask}.
209
+ # For example, a field path that references the zone field of a workflow
210
+ # template's cluster selector would be specified as
211
+ # `placement.clusterSelector.zone`.
212
212
  #
213
213
  # Also, field paths can reference fields using the following syntax:
214
214
  #
@@ -27,6 +27,7 @@ require "google/gax"
27
27
 
28
28
  require "google/cloud/dataproc/v1/jobs_pb"
29
29
  require "google/cloud/dataproc/v1/credentials"
30
+ require "google/cloud/dataproc/version"
30
31
 
31
32
  module Google
32
33
  module Cloud
@@ -129,7 +130,7 @@ module Google
129
130
  updater_proc = credentials.updater_proc
130
131
  end
131
132
 
132
- package_version = Gem.loaded_specs['google-cloud-dataproc'].version.version
133
+ package_version = Google::Cloud::Dataproc::VERSION
133
134
 
134
135
  google_api_client = "gl-ruby/#{RUBY_VERSION}"
135
136
  google_api_client << " #{lib_name}/#{lib_version}" if lib_name
@@ -217,8 +218,8 @@ module Google
217
218
  # can also be provided.
218
219
  # @param request_id [String]
219
220
  # Optional. A unique id used to identify the request. If the server
220
- # receives two {Google::Cloud::Dataproc::V1::SubmitJobRequest SubmitJobRequest}
221
- # requests with the same id, then the second request will be ignored and the
221
+ # receives two {Google::Cloud::Dataproc::V1::SubmitJobRequest SubmitJobRequest} requests with the same
222
+ # id, then the second request will be ignored and the
222
223
  # first {Google::Cloud::Dataproc::V1::Job Job} created and stored in the backend
223
224
  # is returned.
224
225
  #
@@ -1,7 +1,7 @@
1
1
  # Generated by the protocol buffer compiler. DO NOT EDIT!
2
2
  # Source: google/cloud/dataproc/v1/jobs.proto for package 'google.cloud.dataproc.v1'
3
3
  # Original file comments:
4
- # Copyright 2018 Google LLC.
4
+ # Copyright 2019 Google LLC.
5
5
  #
6
6
  # Licensed under the Apache License, Version 2.0 (the "License");
7
7
  # you may not use this file except in compliance with the License.
@@ -0,0 +1,26 @@
1
+ # Generated by the protocol buffer compiler. DO NOT EDIT!
2
+ # source: google/cloud/dataproc/v1/shared.proto
3
+
4
+
5
+ require 'google/protobuf'
6
+
7
+ require 'google/api/annotations_pb'
8
+ Google::Protobuf::DescriptorPool.generated_pool.build do
9
+ add_enum "google.cloud.dataproc.v1.Component" do
10
+ value :COMPONENT_UNSPECIFIED, 0
11
+ value :ANACONDA, 5
12
+ value :HIVE_WEBHCAT, 3
13
+ value :JUPYTER, 1
14
+ value :ZEPPELIN, 4
15
+ end
16
+ end
17
+
18
+ module Google
19
+ module Cloud
20
+ module Dataproc
21
+ module V1
22
+ Component = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.Component").enummodule
23
+ end
24
+ end
25
+ end
26
+ end
@@ -29,6 +29,7 @@ require "google/longrunning/operations_client"
29
29
 
30
30
  require "google/cloud/dataproc/v1/workflow_templates_pb"
31
31
  require "google/cloud/dataproc/v1/credentials"
32
+ require "google/cloud/dataproc/version"
32
33
 
33
34
  module Google
34
35
  module Cloud
@@ -181,7 +182,7 @@ module Google
181
182
  updater_proc = credentials.updater_proc
182
183
  end
183
184
 
184
- package_version = Gem.loaded_specs['google-cloud-dataproc'].version.version
185
+ package_version = Google::Cloud::Dataproc::VERSION
185
186
 
186
187
  google_api_client = "gl-ruby/#{RUBY_VERSION}"
187
188
  google_api_client << " #{lib_name}/#{lib_version}" if lib_name
@@ -473,8 +474,7 @@ module Google
473
474
  # Instantiates a template and begins execution.
474
475
  #
475
476
  # This method is equivalent to executing the sequence
476
- # {Google::Cloud::Dataproc::V1::WorkflowTemplateService::CreateWorkflowTemplate CreateWorkflowTemplate},
477
- # {Google::Cloud::Dataproc::V1::WorkflowTemplateService::InstantiateWorkflowTemplate InstantiateWorkflowTemplate},
477
+ # {Google::Cloud::Dataproc::V1::WorkflowTemplateService::CreateWorkflowTemplate CreateWorkflowTemplate}, {Google::Cloud::Dataproc::V1::WorkflowTemplateService::InstantiateWorkflowTemplate InstantiateWorkflowTemplate},
478
478
  # {Google::Cloud::Dataproc::V1::WorkflowTemplateService::DeleteWorkflowTemplate DeleteWorkflowTemplate}.
479
479
  #
480
480
  # The returned Operation can be used to track execution of
@@ -1,7 +1,7 @@
1
1
  # Generated by the protocol buffer compiler. DO NOT EDIT!
2
2
  # Source: google/cloud/dataproc/v1/workflow_templates.proto for package 'google.cloud.dataproc.v1'
3
3
  # Original file comments:
4
- # Copyright 2018 Google LLC.
4
+ # Copyright 2019 Google LLC.
5
5
  #
6
6
  # Licensed under the Apache License, Version 2.0 (the "License");
7
7
  # you may not use this file except in compliance with the License.
@@ -65,8 +65,7 @@ module Google
65
65
  # Instantiates a template and begins execution.
66
66
  #
67
67
  # This method is equivalent to executing the sequence
68
- # [CreateWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.CreateWorkflowTemplate],
69
- # [InstantiateWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.InstantiateWorkflowTemplate],
68
+ # [CreateWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.CreateWorkflowTemplate], [InstantiateWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.InstantiateWorkflowTemplate],
70
69
  # [DeleteWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.DeleteWorkflowTemplate].
71
70
  #
72
71
  # The returned Operation can be used to track execution of
@@ -13,6 +13,7 @@
13
13
  # limitations under the License.
14
14
 
15
15
 
16
+ require "google/cloud/dataproc/v1beta2/autoscaling_policy_service_client"
16
17
  require "google/cloud/dataproc/v1beta2/cluster_controller_client"
17
18
  require "google/cloud/dataproc/v1beta2/job_controller_client"
18
19
  require "google/cloud/dataproc/v1beta2/workflow_template_service_client"
@@ -106,6 +107,63 @@ module Google
106
107
  module V1beta2
107
108
  # rubocop:enable LineLength
108
109
 
110
+ module AutoscalingPolicyService
111
+ ##
112
+ # The API interface for managing autoscaling policies in the
113
+ # Google Cloud Dataproc API.
114
+ #
115
+ # @param credentials [Google::Auth::Credentials, String, Hash, GRPC::Core::Channel, GRPC::Core::ChannelCredentials, Proc]
116
+ # Provides the means for authenticating requests made by the client. This parameter can
117
+ # be many types.
118
+ # A `Google::Auth::Credentials` uses a the properties of its represented keyfile for
119
+ # authenticating requests made by this client.
120
+ # A `String` will be treated as the path to the keyfile to be used for the construction of
121
+ # credentials for this client.
122
+ # A `Hash` will be treated as the contents of a keyfile to be used for the construction of
123
+ # credentials for this client.
124
+ # A `GRPC::Core::Channel` will be used to make calls through.
125
+ # A `GRPC::Core::ChannelCredentials` for the setting up the RPC client. The channel credentials
126
+ # should already be composed with a `GRPC::Core::CallCredentials` object.
127
+ # A `Proc` will be used as an updater_proc for the Grpc channel. The proc transforms the
128
+ # metadata for requests, generally, to give OAuth credentials.
129
+ # @param scopes [Array<String>]
130
+ # The OAuth scopes for this service. This parameter is ignored if
131
+ # an updater_proc is supplied.
132
+ # @param client_config [Hash]
133
+ # A Hash for call options for each method. See
134
+ # Google::Gax#construct_settings for the structure of
135
+ # this data. Falls back to the default config if not specified
136
+ # or the specified config is missing data points.
137
+ # @param timeout [Numeric]
138
+ # The default timeout, in seconds, for calls made through this client.
139
+ # @param metadata [Hash]
140
+ # Default metadata to be sent with each request. This can be overridden on a per call basis.
141
+ # @param exception_transformer [Proc]
142
+ # An optional proc that intercepts any exceptions raised during an API call to inject
143
+ # custom error handling.
144
+ def self.new \
145
+ credentials: nil,
146
+ scopes: nil,
147
+ client_config: nil,
148
+ timeout: nil,
149
+ metadata: nil,
150
+ exception_transformer: nil,
151
+ lib_name: nil,
152
+ lib_version: nil
153
+ kwargs = {
154
+ credentials: credentials,
155
+ scopes: scopes,
156
+ client_config: client_config,
157
+ timeout: timeout,
158
+ metadata: metadata,
159
+ exception_transformer: exception_transformer,
160
+ lib_name: lib_name,
161
+ lib_version: lib_version
162
+ }.select { |_, v| v != nil }
163
+ Google::Cloud::Dataproc::V1beta2::AutoscalingPolicyServiceClient.new(**kwargs)
164
+ end
165
+ end
166
+
109
167
  module ClusterController
110
168
  ##
111
169
  # The ClusterControllerService provides methods to manage clusters