google-cloud-dataproc-v1beta2 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (53) hide show
  1. checksums.yaml +7 -0
  2. data/.yardopts +12 -0
  3. data/AUTHENTICATION.md +169 -0
  4. data/LICENSE.md +203 -0
  5. data/README.md +71 -0
  6. data/lib/google-cloud-dataproc-v1beta2.rb +21 -0
  7. data/lib/google/cloud/common_resources_pb.rb +15 -0
  8. data/lib/google/cloud/dataproc/v1beta2.rb +38 -0
  9. data/lib/google/cloud/dataproc/v1beta2/autoscaling_policies_pb.rb +81 -0
  10. data/lib/google/cloud/dataproc/v1beta2/autoscaling_policies_services_pb.rb +58 -0
  11. data/lib/google/cloud/dataproc/v1beta2/autoscaling_policy_service.rb +50 -0
  12. data/lib/google/cloud/dataproc/v1beta2/autoscaling_policy_service/client.rb +734 -0
  13. data/lib/google/cloud/dataproc/v1beta2/autoscaling_policy_service/credentials.rb +51 -0
  14. data/lib/google/cloud/dataproc/v1beta2/autoscaling_policy_service/paths.rb +110 -0
  15. data/lib/google/cloud/dataproc/v1beta2/cluster_controller.rb +50 -0
  16. data/lib/google/cloud/dataproc/v1beta2/cluster_controller/client.rb +979 -0
  17. data/lib/google/cloud/dataproc/v1beta2/cluster_controller/credentials.rb +51 -0
  18. data/lib/google/cloud/dataproc/v1beta2/cluster_controller/operations.rb +564 -0
  19. data/lib/google/cloud/dataproc/v1beta2/clusters_pb.rb +255 -0
  20. data/lib/google/cloud/dataproc/v1beta2/clusters_services_pb.rb +68 -0
  21. data/lib/google/cloud/dataproc/v1beta2/job_controller.rb +49 -0
  22. data/lib/google/cloud/dataproc/v1beta2/job_controller/client.rb +980 -0
  23. data/lib/google/cloud/dataproc/v1beta2/job_controller/credentials.rb +51 -0
  24. data/lib/google/cloud/dataproc/v1beta2/job_controller/operations.rb +564 -0
  25. data/lib/google/cloud/dataproc/v1beta2/jobs_pb.rb +283 -0
  26. data/lib/google/cloud/dataproc/v1beta2/jobs_services_pb.rb +62 -0
  27. data/lib/google/cloud/dataproc/v1beta2/operations_pb.rb +45 -0
  28. data/lib/google/cloud/dataproc/v1beta2/shared_pb.rb +35 -0
  29. data/lib/google/cloud/dataproc/v1beta2/version.rb +28 -0
  30. data/lib/google/cloud/dataproc/v1beta2/workflow_template_service.rb +51 -0
  31. data/lib/google/cloud/dataproc/v1beta2/workflow_template_service/client.rb +1028 -0
  32. data/lib/google/cloud/dataproc/v1beta2/workflow_template_service/credentials.rb +51 -0
  33. data/lib/google/cloud/dataproc/v1beta2/workflow_template_service/operations.rb +564 -0
  34. data/lib/google/cloud/dataproc/v1beta2/workflow_template_service/paths.rb +110 -0
  35. data/lib/google/cloud/dataproc/v1beta2/workflow_templates_pb.rb +189 -0
  36. data/lib/google/cloud/dataproc/v1beta2/workflow_templates_services_pb.rb +104 -0
  37. data/proto_docs/README.md +4 -0
  38. data/proto_docs/google/api/field_behavior.rb +59 -0
  39. data/proto_docs/google/api/resource.rb +247 -0
  40. data/proto_docs/google/cloud/dataproc/v1beta2/autoscaling_policies.rb +272 -0
  41. data/proto_docs/google/cloud/dataproc/v1beta2/clusters.rb +1032 -0
  42. data/proto_docs/google/cloud/dataproc/v1beta2/jobs.rb +984 -0
  43. data/proto_docs/google/cloud/dataproc/v1beta2/operations.rb +98 -0
  44. data/proto_docs/google/cloud/dataproc/v1beta2/shared.rb +68 -0
  45. data/proto_docs/google/cloud/dataproc/v1beta2/workflow_templates.rb +706 -0
  46. data/proto_docs/google/longrunning/operations.rb +150 -0
  47. data/proto_docs/google/protobuf/any.rb +138 -0
  48. data/proto_docs/google/protobuf/duration.rb +98 -0
  49. data/proto_docs/google/protobuf/empty.rb +36 -0
  50. data/proto_docs/google/protobuf/field_mask.rb +229 -0
  51. data/proto_docs/google/protobuf/timestamp.rb +120 -0
  52. data/proto_docs/google/rpc/status.rb +46 -0
  53. metadata +205 -0
@@ -0,0 +1,51 @@
1
+ # frozen_string_literal: true
2
+
3
+ # Copyright 2020 Google LLC
4
+ #
5
+ # Licensed under the Apache License, Version 2.0 (the "License");
6
+ # you may not use this file except in compliance with the License.
7
+ # You may obtain a copy of the License at
8
+ #
9
+ # https://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing, software
12
+ # distributed under the License is distributed on an "AS IS" BASIS,
13
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14
+ # See the License for the specific language governing permissions and
15
+ # limitations under the License.
16
+
17
+ # Auto-generated by gapic-generator-ruby. DO NOT EDIT!
18
+
19
+ require "googleauth"
20
+
21
+ module Google
22
+ module Cloud
23
+ module Dataproc
24
+ module V1beta2
25
+ module AutoscalingPolicyService
26
+ # Credentials for the AutoscalingPolicyService API.
27
+ class Credentials < Google::Auth::Credentials
28
+ self.scope = [
29
+ "https://www.googleapis.com/auth/cloud-platform"
30
+ ]
31
+ self.env_vars = [
32
+ "DATAPROC_CREDENTIALS",
33
+ "DATAPROC_KEYFILE",
34
+ "GOOGLE_CLOUD_CREDENTIALS",
35
+ "GOOGLE_CLOUD_KEYFILE",
36
+ "GCLOUD_KEYFILE",
37
+ "DATAPROC_CREDENTIALS_JSON",
38
+ "DATAPROC_KEYFILE_JSON",
39
+ "GOOGLE_CLOUD_CREDENTIALS_JSON",
40
+ "GOOGLE_CLOUD_KEYFILE_JSON",
41
+ "GCLOUD_KEYFILE_JSON"
42
+ ]
43
+ self.paths = [
44
+ "~/.config/google_cloud/application_default_credentials.json"
45
+ ]
46
+ end
47
+ end
48
+ end
49
+ end
50
+ end
51
+ end
@@ -0,0 +1,110 @@
1
+ # frozen_string_literal: true
2
+
3
+ # Copyright 2020 Google LLC
4
+ #
5
+ # Licensed under the Apache License, Version 2.0 (the "License");
6
+ # you may not use this file except in compliance with the License.
7
+ # You may obtain a copy of the License at
8
+ #
9
+ # https://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing, software
12
+ # distributed under the License is distributed on an "AS IS" BASIS,
13
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14
+ # See the License for the specific language governing permissions and
15
+ # limitations under the License.
16
+
17
+ # Auto-generated by gapic-generator-ruby. DO NOT EDIT!
18
+
19
+
20
+ module Google
21
+ module Cloud
22
+ module Dataproc
23
+ module V1beta2
24
+ module AutoscalingPolicyService
25
+ # Path helper methods for the AutoscalingPolicyService API.
26
+ module Paths
27
+ ##
28
+ # Create a fully-qualified AutoscalingPolicy resource string.
29
+ #
30
+ # @overload autoscaling_policy_path(project:, location:, autoscaling_policy:)
31
+ # The resource will be in the following format:
32
+ #
33
+ # `projects/{project}/locations/{location}/autoscalingPolicies/{autoscaling_policy}`
34
+ #
35
+ # @param project [String]
36
+ # @param location [String]
37
+ # @param autoscaling_policy [String]
38
+ #
39
+ # @overload autoscaling_policy_path(project:, region:, autoscaling_policy:)
40
+ # The resource will be in the following format:
41
+ #
42
+ # `projects/{project}/regions/{region}/autoscalingPolicies/{autoscaling_policy}`
43
+ #
44
+ # @param project [String]
45
+ # @param region [String]
46
+ # @param autoscaling_policy [String]
47
+ #
48
+ # @return [String]
49
+ def autoscaling_policy_path **args
50
+ resources = {
51
+ "autoscaling_policy:location:project" => (proc do |project:, location:, autoscaling_policy:|
52
+ raise ArgumentError, "project cannot contain /" if project.to_s.include? "/"
53
+ raise ArgumentError, "location cannot contain /" if location.to_s.include? "/"
54
+
55
+ "projects/#{project}/locations/#{location}/autoscalingPolicies/#{autoscaling_policy}"
56
+ end),
57
+ "autoscaling_policy:project:region" => (proc do |project:, region:, autoscaling_policy:|
58
+ raise ArgumentError, "project cannot contain /" if project.to_s.include? "/"
59
+ raise ArgumentError, "region cannot contain /" if region.to_s.include? "/"
60
+
61
+ "projects/#{project}/regions/#{region}/autoscalingPolicies/#{autoscaling_policy}"
62
+ end)
63
+ }
64
+
65
+ resource = resources[args.keys.sort.join(":")]
66
+ raise ArgumentError, "no resource found for values #{args.keys}" if resource.nil?
67
+ resource.call(**args)
68
+ end
69
+
70
+ ##
71
+ # Create a fully-qualified Location resource string.
72
+ #
73
+ # The resource will be in the following format:
74
+ #
75
+ # `projects/{project}/locations/{location}`
76
+ #
77
+ # @param project [String]
78
+ # @param location [String]
79
+ #
80
+ # @return [String]
81
+ def location_path project:, location:
82
+ raise ArgumentError, "project cannot contain /" if project.to_s.include? "/"
83
+
84
+ "projects/#{project}/locations/#{location}"
85
+ end
86
+
87
+ ##
88
+ # Create a fully-qualified Region resource string.
89
+ #
90
+ # The resource will be in the following format:
91
+ #
92
+ # `projects/{project}/regions/{region}`
93
+ #
94
+ # @param project [String]
95
+ # @param region [String]
96
+ #
97
+ # @return [String]
98
+ def region_path project:, region:
99
+ raise ArgumentError, "project cannot contain /" if project.to_s.include? "/"
100
+
101
+ "projects/#{project}/regions/#{region}"
102
+ end
103
+
104
+ extend self
105
+ end
106
+ end
107
+ end
108
+ end
109
+ end
110
+ end
@@ -0,0 +1,50 @@
1
+ # frozen_string_literal: true
2
+
3
+ # Copyright 2020 Google LLC
4
+ #
5
+ # Licensed under the Apache License, Version 2.0 (the "License");
6
+ # you may not use this file except in compliance with the License.
7
+ # You may obtain a copy of the License at
8
+ #
9
+ # https://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing, software
12
+ # distributed under the License is distributed on an "AS IS" BASIS,
13
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14
+ # See the License for the specific language governing permissions and
15
+ # limitations under the License.
16
+
17
+ # Auto-generated by gapic-generator-ruby. DO NOT EDIT!
18
+
19
+ require "gapic/common"
20
+ require "gapic/config"
21
+ require "gapic/config/method"
22
+
23
+ require "google/cloud/dataproc/v1beta2/version"
24
+
25
+ require "google/cloud/dataproc/v1beta2/cluster_controller/credentials"
26
+ require "google/cloud/dataproc/v1beta2/cluster_controller/operations"
27
+ require "google/cloud/dataproc/v1beta2/cluster_controller/client"
28
+
29
+ module Google
30
+ module Cloud
31
+ module Dataproc
32
+ module V1beta2
33
+ ##
34
+ # The ClusterControllerService provides methods to manage clusters
35
+ # of Compute Engine instances.
36
+ #
37
+ # To load this service and instantiate a client:
38
+ #
39
+ # require "google/cloud/dataproc/v1beta2/cluster_controller"
40
+ # client = Google::Cloud::Dataproc::V1beta2::ClusterController::Client.new
41
+ #
42
+ module ClusterController
43
+ end
44
+ end
45
+ end
46
+ end
47
+ end
48
+
49
+ helper_path = ::File.join __dir__, "cluster_controller", "helpers.rb"
50
+ require "google/cloud/dataproc/v1beta2/cluster_controller/helpers" if ::File.file? helper_path
@@ -0,0 +1,979 @@
1
+ # frozen_string_literal: true
2
+
3
+ # Copyright 2020 Google LLC
4
+ #
5
+ # Licensed under the Apache License, Version 2.0 (the "License");
6
+ # you may not use this file except in compliance with the License.
7
+ # You may obtain a copy of the License at
8
+ #
9
+ # https://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing, software
12
+ # distributed under the License is distributed on an "AS IS" BASIS,
13
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14
+ # See the License for the specific language governing permissions and
15
+ # limitations under the License.
16
+
17
+ # Auto-generated by gapic-generator-ruby. DO NOT EDIT!
18
+
19
+ require "google/cloud/errors"
20
+ require "google/cloud/dataproc/v1beta2/clusters_pb"
21
+
22
+ module Google
23
+ module Cloud
24
+ module Dataproc
25
+ module V1beta2
26
+ module ClusterController
27
+ ##
28
+ # Client for the ClusterController service.
29
+ #
30
+ # The ClusterControllerService provides methods to manage clusters
31
+ # of Compute Engine instances.
32
+ #
33
+ class Client
34
+ # @private
35
+ attr_reader :cluster_controller_stub
36
+
37
+ ##
38
+ # Configure the ClusterController Client class.
39
+ #
40
+ # See {Google::Cloud::Dataproc::V1beta2::ClusterController::Client::Configuration}
41
+ # for a description of the configuration fields.
42
+ #
43
+ # ## Example
44
+ #
45
+ # To modify the configuration for all ClusterController clients:
46
+ #
47
+ # Google::Cloud::Dataproc::V1beta2::ClusterController::Client.configure do |config|
48
+ # config.timeout = 10_000
49
+ # end
50
+ #
51
+ # @yield [config] Configure the Client client.
52
+ # @yieldparam config [Client::Configuration]
53
+ #
54
+ # @return [Client::Configuration]
55
+ #
56
+ def self.configure
57
+ @configure ||= begin
58
+ namespace = ["Google", "Cloud", "Dataproc", "V1beta2"]
59
+ parent_config = while namespace.any?
60
+ parent_name = namespace.join "::"
61
+ parent_const = const_get parent_name
62
+ break parent_const.configure if parent_const&.respond_to? :configure
63
+ namespace.pop
64
+ end
65
+ default_config = Client::Configuration.new parent_config
66
+
67
+ default_config.rpcs.create_cluster.timeout = 300.0
68
+ default_config.rpcs.create_cluster.retry_policy = {
69
+ initial_delay: 0.1,
70
+ max_delay: 60.0,
71
+ multiplier: 1.3,
72
+ retry_codes: ["UNAVAILABLE"]
73
+ }
74
+
75
+ default_config.rpcs.update_cluster.timeout = 300.0
76
+ default_config.rpcs.update_cluster.retry_policy = {
77
+ initial_delay: 0.1,
78
+ max_delay: 60.0,
79
+ multiplier: 1.3,
80
+ retry_codes: ["UNAVAILABLE"]
81
+ }
82
+
83
+ default_config.rpcs.delete_cluster.timeout = 300.0
84
+ default_config.rpcs.delete_cluster.retry_policy = {
85
+ initial_delay: 0.1,
86
+ max_delay: 60.0,
87
+ multiplier: 1.3,
88
+ retry_codes: ["UNAVAILABLE"]
89
+ }
90
+
91
+ default_config.rpcs.get_cluster.timeout = 300.0
92
+ default_config.rpcs.get_cluster.retry_policy = {
93
+ initial_delay: 0.1,
94
+ max_delay: 60.0,
95
+ multiplier: 1.3,
96
+ retry_codes: ["INTERNAL", "DEADLINE_EXCEEDED", "UNAVAILABLE"]
97
+ }
98
+
99
+ default_config.rpcs.list_clusters.timeout = 300.0
100
+ default_config.rpcs.list_clusters.retry_policy = {
101
+ initial_delay: 0.1,
102
+ max_delay: 60.0,
103
+ multiplier: 1.3,
104
+ retry_codes: ["INTERNAL", "DEADLINE_EXCEEDED", "UNAVAILABLE"]
105
+ }
106
+
107
+ default_config.rpcs.diagnose_cluster.timeout = 300.0
108
+ default_config.rpcs.diagnose_cluster.retry_policy = {
109
+ initial_delay: 0.1,
110
+ max_delay: 60.0,
111
+ multiplier: 1.3,
112
+ retry_codes: ["UNAVAILABLE"]
113
+ }
114
+
115
+ default_config
116
+ end
117
+ yield @configure if block_given?
118
+ @configure
119
+ end
120
+
121
+ ##
122
+ # Configure the ClusterController Client instance.
123
+ #
124
+ # The configuration is set to the derived mode, meaning that values can be changed,
125
+ # but structural changes (adding new fields, etc.) are not allowed. Structural changes
126
+ # should be made on {Client.configure}.
127
+ #
128
+ # See {Google::Cloud::Dataproc::V1beta2::ClusterController::Client::Configuration}
129
+ # for a description of the configuration fields.
130
+ #
131
+ # @yield [config] Configure the Client client.
132
+ # @yieldparam config [Client::Configuration]
133
+ #
134
+ # @return [Client::Configuration]
135
+ #
136
+ def configure
137
+ yield @config if block_given?
138
+ @config
139
+ end
140
+
141
+ ##
142
+ # Create a new ClusterController client object.
143
+ #
144
+ # ## Examples
145
+ #
146
+ # To create a new ClusterController client with the default
147
+ # configuration:
148
+ #
149
+ # client = Google::Cloud::Dataproc::V1beta2::ClusterController::Client.new
150
+ #
151
+ # To create a new ClusterController client with a custom
152
+ # configuration:
153
+ #
154
+ # client = Google::Cloud::Dataproc::V1beta2::ClusterController::Client.new do |config|
155
+ # config.timeout = 10_000
156
+ # end
157
+ #
158
+ # @yield [config] Configure the ClusterController client.
159
+ # @yieldparam config [Client::Configuration]
160
+ #
161
+ def initialize
162
+ # These require statements are intentionally placed here to initialize
163
+ # the gRPC module only when it's required.
164
+ # See https://github.com/googleapis/toolkit/issues/446
165
+ require "gapic/grpc"
166
+ require "google/cloud/dataproc/v1beta2/clusters_services_pb"
167
+
168
+ # Create the configuration object
169
+ @config = Configuration.new Client.configure
170
+
171
+ # Yield the configuration if needed
172
+ yield @config if block_given?
173
+
174
+ # Create credentials
175
+ credentials = @config.credentials
176
+ credentials ||= Credentials.default scope: @config.scope
177
+ if credentials.is_a?(String) || credentials.is_a?(Hash)
178
+ credentials = Credentials.new credentials, scope: @config.scope
179
+ end
180
+ @quota_project_id = credentials.respond_to?(:quota_project_id) ? credentials.quota_project_id : nil
181
+
182
+ @operations_client = Operations.new do |config|
183
+ config.credentials = credentials
184
+ config.endpoint = @config.endpoint
185
+ end
186
+
187
+ @cluster_controller_stub = Gapic::ServiceStub.new(
188
+ Google::Cloud::Dataproc::V1beta2::ClusterController::Stub,
189
+ credentials: credentials,
190
+ endpoint: @config.endpoint,
191
+ channel_args: @config.channel_args,
192
+ interceptors: @config.interceptors
193
+ )
194
+ end
195
+
196
+ ##
197
+ # Get the associated client for long-running operations.
198
+ #
199
+ # @return [Google::Cloud::Dataproc::V1beta2::ClusterController::Operations]
200
+ #
201
+ attr_reader :operations_client
202
+
203
+ # Service calls
204
+
205
+ ##
206
+ # Creates a cluster in a project. The returned
207
+ # {Google::Longrunning::Operation#metadata Operation.metadata} will be
208
+ # [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1beta2#clusteroperationmetadata).
209
+ #
210
+ # @overload create_cluster(request, options = nil)
211
+ # Pass arguments to `create_cluster` via a request object, either of type
212
+ # {Google::Cloud::Dataproc::V1beta2::CreateClusterRequest} or an equivalent Hash.
213
+ #
214
+ # @param request [Google::Cloud::Dataproc::V1beta2::CreateClusterRequest, Hash]
215
+ # A request object representing the call parameters. Required. To specify no
216
+ # parameters, or to keep all the default parameter values, pass an empty Hash.
217
+ # @param options [Gapic::CallOptions, Hash]
218
+ # Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
219
+ #
220
+ # @overload create_cluster(project_id: nil, region: nil, cluster: nil, request_id: nil)
221
+ # Pass arguments to `create_cluster` via keyword arguments. Note that at
222
+ # least one keyword argument is required. To specify no parameters, or to keep all
223
+ # the default parameter values, pass an empty Hash as a request object (see above).
224
+ #
225
+ # @param project_id [String]
226
+ # Required. The ID of the Google Cloud Platform project that the cluster
227
+ # belongs to.
228
+ # @param region [String]
229
+ # Required. The Dataproc region in which to handle the request.
230
+ # @param cluster [Google::Cloud::Dataproc::V1beta2::Cluster, Hash]
231
+ # Required. The cluster to create.
232
+ # @param request_id [String]
233
+ # Optional. A unique id used to identify the request. If the server
234
+ # receives two {Google::Cloud::Dataproc::V1beta2::CreateClusterRequest CreateClusterRequest} requests with the same
235
+ # id, then the second request will be ignored and the
236
+ # first {Google::Longrunning::Operation google.longrunning.Operation} created and stored in the backend
237
+ # is returned.
238
+ #
239
+ # It is recommended to always set this value to a
240
+ # [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
241
+ #
242
+ # The id must contain only letters (a-z, A-Z), numbers (0-9),
243
+ # underscores (_), and hyphens (-). The maximum length is 40 characters.
244
+ #
245
+ # @yield [response, operation] Access the result along with the RPC operation
246
+ # @yieldparam response [Gapic::Operation]
247
+ # @yieldparam operation [GRPC::ActiveCall::Operation]
248
+ #
249
+ # @return [Gapic::Operation]
250
+ #
251
+ # @raise [Google::Cloud::Error] if the RPC is aborted.
252
+ #
253
+ def create_cluster request, options = nil
254
+ raise ArgumentError, "request must be provided" if request.nil?
255
+
256
+ request = Gapic::Protobuf.coerce request, to: Google::Cloud::Dataproc::V1beta2::CreateClusterRequest
257
+
258
+ # Converts hash and nil to an options object
259
+ options = Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
260
+
261
+ # Customize the options with defaults
262
+ metadata = @config.rpcs.create_cluster.metadata.to_h
263
+
264
+ # Set x-goog-api-client and x-goog-user-project headers
265
+ metadata[:"x-goog-api-client"] ||= Gapic::Headers.x_goog_api_client \
266
+ lib_name: @config.lib_name, lib_version: @config.lib_version,
267
+ gapic_version: ::Google::Cloud::Dataproc::V1beta2::VERSION
268
+ metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
269
+
270
+ header_params = {
271
+ "project_id" => request.project_id,
272
+ "region" => request.region
273
+ }
274
+ request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
275
+ metadata[:"x-goog-request-params"] ||= request_params_header
276
+
277
+ options.apply_defaults timeout: @config.rpcs.create_cluster.timeout,
278
+ metadata: metadata,
279
+ retry_policy: @config.rpcs.create_cluster.retry_policy
280
+ options.apply_defaults metadata: @config.metadata,
281
+ retry_policy: @config.retry_policy
282
+
283
+ @cluster_controller_stub.call_rpc :create_cluster, request, options: options do |response, operation|
284
+ response = Gapic::Operation.new response, @operations_client, options: options
285
+ yield response, operation if block_given?
286
+ return response
287
+ end
288
+ rescue GRPC::BadStatus => e
289
+ raise Google::Cloud::Error.from_error(e)
290
+ end
291
+
292
+ ##
293
+ # Updates a cluster in a project. The returned
294
+ # {Google::Longrunning::Operation#metadata Operation.metadata} will be
295
+ # [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1beta2#clusteroperationmetadata).
296
+ #
297
+ # @overload update_cluster(request, options = nil)
298
+ # Pass arguments to `update_cluster` via a request object, either of type
299
+ # {Google::Cloud::Dataproc::V1beta2::UpdateClusterRequest} or an equivalent Hash.
300
+ #
301
+ # @param request [Google::Cloud::Dataproc::V1beta2::UpdateClusterRequest, Hash]
302
+ # A request object representing the call parameters. Required. To specify no
303
+ # parameters, or to keep all the default parameter values, pass an empty Hash.
304
+ # @param options [Gapic::CallOptions, Hash]
305
+ # Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
306
+ #
307
+ # @overload update_cluster(project_id: nil, region: nil, cluster_name: nil, cluster: nil, graceful_decommission_timeout: nil, update_mask: nil, request_id: nil)
308
+ # Pass arguments to `update_cluster` via keyword arguments. Note that at
309
+ # least one keyword argument is required. To specify no parameters, or to keep all
310
+ # the default parameter values, pass an empty Hash as a request object (see above).
311
+ #
312
+ # @param project_id [String]
313
+ # Required. The ID of the Google Cloud Platform project the
314
+ # cluster belongs to.
315
+ # @param region [String]
316
+ # Required. The Dataproc region in which to handle the request.
317
+ # @param cluster_name [String]
318
+ # Required. The cluster name.
319
+ # @param cluster [Google::Cloud::Dataproc::V1beta2::Cluster, Hash]
320
+ # Required. The changes to the cluster.
321
+ # @param graceful_decommission_timeout [Google::Protobuf::Duration, Hash]
322
+ # Optional. Timeout for graceful YARN decomissioning. Graceful
323
+ # decommissioning allows removing nodes from the cluster without
324
+ # interrupting jobs in progress. Timeout specifies how long to wait for jobs
325
+ # in progress to finish before forcefully removing nodes (and potentially
326
+ # interrupting jobs). Default timeout is 0 (for forceful decommission), and
327
+ # the maximum allowed timeout is 1 day (see JSON representation of
328
+ # [Duration](https://developers.google.com/protocol-buffers/docs/proto3#json)).
329
+ #
330
+ # Only supported on Dataproc image versions 1.2 and higher.
331
+ # @param update_mask [Google::Protobuf::FieldMask, Hash]
332
+ # Required. Specifies the path, relative to `Cluster`, of
333
+ # the field to update. For example, to change the number of workers
334
+ # in a cluster to 5, the `update_mask` parameter would be
335
+ # specified as `config.worker_config.num_instances`,
336
+ # and the `PATCH` request body would specify the new value, as follows:
337
+ #
338
+ # {
339
+ # "config":{
340
+ # "workerConfig":{
341
+ # "numInstances":"5"
342
+ # }
343
+ # }
344
+ # }
345
+ #
346
+ # Similarly, to change the number of preemptible workers in a cluster to 5,
347
+ # the `update_mask` parameter would be
348
+ # `config.secondary_worker_config.num_instances`, and the `PATCH` request
349
+ # body would be set as follows:
350
+ #
351
+ # {
352
+ # "config":{
353
+ # "secondaryWorkerConfig":{
354
+ # "numInstances":"5"
355
+ # }
356
+ # }
357
+ # }
358
+ # <strong>Note:</strong> currently only the following fields can be updated:
359
+ #
360
+ # <table>
361
+ # <tr>
362
+ # <td><strong>Mask</strong></td><td><strong>Purpose</strong></td>
363
+ # </tr>
364
+ # <tr>
365
+ # <td>labels</td><td>Updates labels</td>
366
+ # </tr>
367
+ # <tr>
368
+ # <td>config.worker_config.num_instances</td><td>Resize primary worker
369
+ # group</td>
370
+ # </tr>
371
+ # <tr>
372
+ # <td>config.secondary_worker_config.num_instances</td><td>Resize secondary
373
+ # worker group</td>
374
+ # </tr>
375
+ # <tr>
376
+ # <td>config.lifecycle_config.auto_delete_ttl</td><td>Reset MAX TTL
377
+ # duration</td>
378
+ # </tr>
379
+ # <tr>
380
+ # <td>config.lifecycle_config.auto_delete_time</td><td>Update MAX TTL
381
+ # deletion timestamp</td>
382
+ # </tr>
383
+ # <tr>
384
+ # <td>config.lifecycle_config.idle_delete_ttl</td><td>Update Idle TTL
385
+ # duration</td>
386
+ # </tr>
387
+ # <tr>
388
+ # <td>config.autoscaling_config.policy_uri</td><td>Use, stop using, or change
389
+ # autoscaling policies</td>
390
+ # </tr>
391
+ # </table>
392
+ # @param request_id [String]
393
+ # Optional. A unique id used to identify the request. If the server
394
+ # receives two {Google::Cloud::Dataproc::V1beta2::UpdateClusterRequest UpdateClusterRequest} requests with the same
395
+ # id, then the second request will be ignored and the
396
+ # first {Google::Longrunning::Operation google.longrunning.Operation} created and stored in the
397
+ # backend is returned.
398
+ #
399
+ # It is recommended to always set this value to a
400
+ # [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
401
+ #
402
+ # The id must contain only letters (a-z, A-Z), numbers (0-9),
403
+ # underscores (_), and hyphens (-). The maximum length is 40 characters.
404
+ #
405
+ # @yield [response, operation] Access the result along with the RPC operation
406
+ # @yieldparam response [Gapic::Operation]
407
+ # @yieldparam operation [GRPC::ActiveCall::Operation]
408
+ #
409
+ # @return [Gapic::Operation]
410
+ #
411
+ # @raise [Google::Cloud::Error] if the RPC is aborted.
412
+ #
413
+ def update_cluster request, options = nil
414
+ raise ArgumentError, "request must be provided" if request.nil?
415
+
416
+ request = Gapic::Protobuf.coerce request, to: Google::Cloud::Dataproc::V1beta2::UpdateClusterRequest
417
+
418
+ # Converts hash and nil to an options object
419
+ options = Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
420
+
421
+ # Customize the options with defaults
422
+ metadata = @config.rpcs.update_cluster.metadata.to_h
423
+
424
+ # Set x-goog-api-client and x-goog-user-project headers
425
+ metadata[:"x-goog-api-client"] ||= Gapic::Headers.x_goog_api_client \
426
+ lib_name: @config.lib_name, lib_version: @config.lib_version,
427
+ gapic_version: ::Google::Cloud::Dataproc::V1beta2::VERSION
428
+ metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
429
+
430
+ header_params = {
431
+ "project_id" => request.project_id,
432
+ "region" => request.region,
433
+ "cluster_name" => request.cluster_name
434
+ }
435
+ request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
436
+ metadata[:"x-goog-request-params"] ||= request_params_header
437
+
438
+ options.apply_defaults timeout: @config.rpcs.update_cluster.timeout,
439
+ metadata: metadata,
440
+ retry_policy: @config.rpcs.update_cluster.retry_policy
441
+ options.apply_defaults metadata: @config.metadata,
442
+ retry_policy: @config.retry_policy
443
+
444
+ @cluster_controller_stub.call_rpc :update_cluster, request, options: options do |response, operation|
445
+ response = Gapic::Operation.new response, @operations_client, options: options
446
+ yield response, operation if block_given?
447
+ return response
448
+ end
449
+ rescue GRPC::BadStatus => e
450
+ raise Google::Cloud::Error.from_error(e)
451
+ end
452
+
453
+ ##
454
+ # Deletes a cluster in a project. The returned
455
+ # {Google::Longrunning::Operation#metadata Operation.metadata} will be
456
+ # [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1beta2#clusteroperationmetadata).
457
+ #
458
+ # @overload delete_cluster(request, options = nil)
459
+ # Pass arguments to `delete_cluster` via a request object, either of type
460
+ # {Google::Cloud::Dataproc::V1beta2::DeleteClusterRequest} or an equivalent Hash.
461
+ #
462
+ # @param request [Google::Cloud::Dataproc::V1beta2::DeleteClusterRequest, Hash]
463
+ # A request object representing the call parameters. Required. To specify no
464
+ # parameters, or to keep all the default parameter values, pass an empty Hash.
465
+ # @param options [Gapic::CallOptions, Hash]
466
+ # Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
467
+ #
468
+ # @overload delete_cluster(project_id: nil, region: nil, cluster_name: nil, cluster_uuid: nil, request_id: nil)
469
+ # Pass arguments to `delete_cluster` via keyword arguments. Note that at
470
+ # least one keyword argument is required. To specify no parameters, or to keep all
471
+ # the default parameter values, pass an empty Hash as a request object (see above).
472
+ #
473
+ # @param project_id [String]
474
+ # Required. The ID of the Google Cloud Platform project that the cluster
475
+ # belongs to.
476
+ # @param region [String]
477
+ # Required. The Dataproc region in which to handle the request.
478
+ # @param cluster_name [String]
479
+ # Required. The cluster name.
480
+ # @param cluster_uuid [String]
481
+ # Optional. Specifying the `cluster_uuid` means the RPC should fail
482
+ # (with error NOT_FOUND) if cluster with specified UUID does not exist.
483
+ # @param request_id [String]
484
+ # Optional. A unique id used to identify the request. If the server
485
+ # receives two {Google::Cloud::Dataproc::V1beta2::DeleteClusterRequest DeleteClusterRequest} requests with the same
486
+ # id, then the second request will be ignored and the
487
+ # first {Google::Longrunning::Operation google.longrunning.Operation} created and stored in the
488
+ # backend is returned.
489
+ #
490
+ # It is recommended to always set this value to a
491
+ # [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
492
+ #
493
+ # The id must contain only letters (a-z, A-Z), numbers (0-9),
494
+ # underscores (_), and hyphens (-). The maximum length is 40 characters.
495
+ #
496
+ # @yield [response, operation] Access the result along with the RPC operation
497
+ # @yieldparam response [Gapic::Operation]
498
+ # @yieldparam operation [GRPC::ActiveCall::Operation]
499
+ #
500
+ # @return [Gapic::Operation]
501
+ #
502
+ # @raise [Google::Cloud::Error] if the RPC is aborted.
503
+ #
504
+ def delete_cluster request, options = nil
505
+ raise ArgumentError, "request must be provided" if request.nil?
506
+
507
+ request = Gapic::Protobuf.coerce request, to: Google::Cloud::Dataproc::V1beta2::DeleteClusterRequest
508
+
509
+ # Converts hash and nil to an options object
510
+ options = Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
511
+
512
+ # Customize the options with defaults
513
+ metadata = @config.rpcs.delete_cluster.metadata.to_h
514
+
515
+ # Set x-goog-api-client and x-goog-user-project headers
516
+ metadata[:"x-goog-api-client"] ||= Gapic::Headers.x_goog_api_client \
517
+ lib_name: @config.lib_name, lib_version: @config.lib_version,
518
+ gapic_version: ::Google::Cloud::Dataproc::V1beta2::VERSION
519
+ metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
520
+
521
+ header_params = {
522
+ "project_id" => request.project_id,
523
+ "region" => request.region,
524
+ "cluster_name" => request.cluster_name
525
+ }
526
+ request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
527
+ metadata[:"x-goog-request-params"] ||= request_params_header
528
+
529
+ options.apply_defaults timeout: @config.rpcs.delete_cluster.timeout,
530
+ metadata: metadata,
531
+ retry_policy: @config.rpcs.delete_cluster.retry_policy
532
+ options.apply_defaults metadata: @config.metadata,
533
+ retry_policy: @config.retry_policy
534
+
535
+ @cluster_controller_stub.call_rpc :delete_cluster, request, options: options do |response, operation|
536
+ response = Gapic::Operation.new response, @operations_client, options: options
537
+ yield response, operation if block_given?
538
+ return response
539
+ end
540
+ rescue GRPC::BadStatus => e
541
+ raise Google::Cloud::Error.from_error(e)
542
+ end
543
+
544
+ ##
545
+ # Gets the resource representation for a cluster in a project.
546
+ #
547
+ # @overload get_cluster(request, options = nil)
548
+ # Pass arguments to `get_cluster` via a request object, either of type
549
+ # {Google::Cloud::Dataproc::V1beta2::GetClusterRequest} or an equivalent Hash.
550
+ #
551
+ # @param request [Google::Cloud::Dataproc::V1beta2::GetClusterRequest, Hash]
552
+ # A request object representing the call parameters. Required. To specify no
553
+ # parameters, or to keep all the default parameter values, pass an empty Hash.
554
+ # @param options [Gapic::CallOptions, Hash]
555
+ # Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
556
+ #
557
+ # @overload get_cluster(project_id: nil, region: nil, cluster_name: nil)
558
+ # Pass arguments to `get_cluster` via keyword arguments. Note that at
559
+ # least one keyword argument is required. To specify no parameters, or to keep all
560
+ # the default parameter values, pass an empty Hash as a request object (see above).
561
+ #
562
+ # @param project_id [String]
563
+ # Required. The ID of the Google Cloud Platform project that the cluster
564
+ # belongs to.
565
+ # @param region [String]
566
+ # Required. The Dataproc region in which to handle the request.
567
+ # @param cluster_name [String]
568
+ # Required. The cluster name.
569
+ #
570
+ # @yield [response, operation] Access the result along with the RPC operation
571
+ # @yieldparam response [Google::Cloud::Dataproc::V1beta2::Cluster]
572
+ # @yieldparam operation [GRPC::ActiveCall::Operation]
573
+ #
574
+ # @return [Google::Cloud::Dataproc::V1beta2::Cluster]
575
+ #
576
+ # @raise [Google::Cloud::Error] if the RPC is aborted.
577
+ #
578
+ def get_cluster request, options = nil
579
+ raise ArgumentError, "request must be provided" if request.nil?
580
+
581
+ request = Gapic::Protobuf.coerce request, to: Google::Cloud::Dataproc::V1beta2::GetClusterRequest
582
+
583
+ # Converts hash and nil to an options object
584
+ options = Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
585
+
586
+ # Customize the options with defaults
587
+ metadata = @config.rpcs.get_cluster.metadata.to_h
588
+
589
+ # Set x-goog-api-client and x-goog-user-project headers
590
+ metadata[:"x-goog-api-client"] ||= Gapic::Headers.x_goog_api_client \
591
+ lib_name: @config.lib_name, lib_version: @config.lib_version,
592
+ gapic_version: ::Google::Cloud::Dataproc::V1beta2::VERSION
593
+ metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
594
+
595
+ header_params = {
596
+ "project_id" => request.project_id,
597
+ "region" => request.region,
598
+ "cluster_name" => request.cluster_name
599
+ }
600
+ request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
601
+ metadata[:"x-goog-request-params"] ||= request_params_header
602
+
603
+ options.apply_defaults timeout: @config.rpcs.get_cluster.timeout,
604
+ metadata: metadata,
605
+ retry_policy: @config.rpcs.get_cluster.retry_policy
606
+ options.apply_defaults metadata: @config.metadata,
607
+ retry_policy: @config.retry_policy
608
+
609
+ @cluster_controller_stub.call_rpc :get_cluster, request, options: options do |response, operation|
610
+ yield response, operation if block_given?
611
+ return response
612
+ end
613
+ rescue GRPC::BadStatus => e
614
+ raise Google::Cloud::Error.from_error(e)
615
+ end
616
+
617
+ ##
618
+ # Lists all regions/\\{region}/clusters in a project alphabetically.
619
+ #
620
+ # @overload list_clusters(request, options = nil)
621
+ # Pass arguments to `list_clusters` via a request object, either of type
622
+ # {Google::Cloud::Dataproc::V1beta2::ListClustersRequest} or an equivalent Hash.
623
+ #
624
+ # @param request [Google::Cloud::Dataproc::V1beta2::ListClustersRequest, Hash]
625
+ # A request object representing the call parameters. Required. To specify no
626
+ # parameters, or to keep all the default parameter values, pass an empty Hash.
627
+ # @param options [Gapic::CallOptions, Hash]
628
+ # Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
629
+ #
630
+ # @overload list_clusters(project_id: nil, region: nil, filter: nil, page_size: nil, page_token: nil)
631
+ # Pass arguments to `list_clusters` via keyword arguments. Note that at
632
+ # least one keyword argument is required. To specify no parameters, or to keep all
633
+ # the default parameter values, pass an empty Hash as a request object (see above).
634
+ #
635
+ # @param project_id [String]
636
+ # Required. The ID of the Google Cloud Platform project that the cluster
637
+ # belongs to.
638
+ # @param region [String]
639
+ # Required. The Dataproc region in which to handle the request.
640
+ # @param filter [String]
641
+ # Optional. A filter constraining the clusters to list. Filters are
642
+ # case-sensitive and have the following syntax:
643
+ #
644
+ # field = value [AND [field = value]] ...
645
+ #
646
+ # where **field** is one of `status.state`, `clusterName`, or `labels.[KEY]`,
647
+ # and `[KEY]` is a label key. **value** can be `*` to match all values.
648
+ # `status.state` can be one of the following: `ACTIVE`, `INACTIVE`,
649
+ # `CREATING`, `RUNNING`, `ERROR`, `DELETING`, or `UPDATING`. `ACTIVE`
650
+ # contains the `CREATING`, `UPDATING`, and `RUNNING` states. `INACTIVE`
651
+ # contains the `DELETING` and `ERROR` states.
652
+ # `clusterName` is the name of the cluster provided at creation time.
653
+ # Only the logical `AND` operator is supported; space-separated items are
654
+ # treated as having an implicit `AND` operator.
655
+ #
656
+ # Example filter:
657
+ #
658
+ # status.state = ACTIVE AND clusterName = mycluster
659
+ # AND labels.env = staging AND labels.starred = *
660
+ # @param page_size [Integer]
661
+ # Optional. The standard List page size.
662
+ # @param page_token [String]
663
+ # Optional. The standard List page token.
664
+ #
665
+ # @yield [response, operation] Access the result along with the RPC operation
666
+ # @yieldparam response [Gapic::PagedEnumerable<Google::Cloud::Dataproc::V1beta2::Cluster>]
667
+ # @yieldparam operation [GRPC::ActiveCall::Operation]
668
+ #
669
+ # @return [Gapic::PagedEnumerable<Google::Cloud::Dataproc::V1beta2::Cluster>]
670
+ #
671
+ # @raise [Google::Cloud::Error] if the RPC is aborted.
672
+ #
673
+ def list_clusters request, options = nil
674
+ raise ArgumentError, "request must be provided" if request.nil?
675
+
676
+ request = Gapic::Protobuf.coerce request, to: Google::Cloud::Dataproc::V1beta2::ListClustersRequest
677
+
678
+ # Converts hash and nil to an options object
679
+ options = Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
680
+
681
+ # Customize the options with defaults
682
+ metadata = @config.rpcs.list_clusters.metadata.to_h
683
+
684
+ # Set x-goog-api-client and x-goog-user-project headers
685
+ metadata[:"x-goog-api-client"] ||= Gapic::Headers.x_goog_api_client \
686
+ lib_name: @config.lib_name, lib_version: @config.lib_version,
687
+ gapic_version: ::Google::Cloud::Dataproc::V1beta2::VERSION
688
+ metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
689
+
690
+ header_params = {
691
+ "project_id" => request.project_id,
692
+ "region" => request.region
693
+ }
694
+ request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
695
+ metadata[:"x-goog-request-params"] ||= request_params_header
696
+
697
+ options.apply_defaults timeout: @config.rpcs.list_clusters.timeout,
698
+ metadata: metadata,
699
+ retry_policy: @config.rpcs.list_clusters.retry_policy
700
+ options.apply_defaults metadata: @config.metadata,
701
+ retry_policy: @config.retry_policy
702
+
703
+ @cluster_controller_stub.call_rpc :list_clusters, request, options: options do |response, operation|
704
+ response = Gapic::PagedEnumerable.new @cluster_controller_stub, :list_clusters, request, response, operation, options
705
+ yield response, operation if block_given?
706
+ return response
707
+ end
708
+ rescue GRPC::BadStatus => e
709
+ raise Google::Cloud::Error.from_error(e)
710
+ end
711
+
712
+ ##
713
+ # Gets cluster diagnostic information. The returned
714
+ # {Google::Longrunning::Operation#metadata Operation.metadata} will be
715
+ # [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1beta2#clusteroperationmetadata).
716
+ # After the operation completes,
717
+ # {Google::Longrunning::Operation#response Operation.response}
718
+ # contains
719
+ # {Google::Protobuf::Empty Empty}.
720
+ #
721
+ # @overload diagnose_cluster(request, options = nil)
722
+ # Pass arguments to `diagnose_cluster` via a request object, either of type
723
+ # {Google::Cloud::Dataproc::V1beta2::DiagnoseClusterRequest} or an equivalent Hash.
724
+ #
725
+ # @param request [Google::Cloud::Dataproc::V1beta2::DiagnoseClusterRequest, Hash]
726
+ # A request object representing the call parameters. Required. To specify no
727
+ # parameters, or to keep all the default parameter values, pass an empty Hash.
728
+ # @param options [Gapic::CallOptions, Hash]
729
+ # Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
730
+ #
731
+ # @overload diagnose_cluster(project_id: nil, region: nil, cluster_name: nil)
732
+ # Pass arguments to `diagnose_cluster` via keyword arguments. Note that at
733
+ # least one keyword argument is required. To specify no parameters, or to keep all
734
+ # the default parameter values, pass an empty Hash as a request object (see above).
735
+ #
736
+ # @param project_id [String]
737
+ # Required. The ID of the Google Cloud Platform project that the cluster
738
+ # belongs to.
739
+ # @param region [String]
740
+ # Required. The Dataproc region in which to handle the request.
741
+ # @param cluster_name [String]
742
+ # Required. The cluster name.
743
+ #
744
+ # @yield [response, operation] Access the result along with the RPC operation
745
+ # @yieldparam response [Gapic::Operation]
746
+ # @yieldparam operation [GRPC::ActiveCall::Operation]
747
+ #
748
+ # @return [Gapic::Operation]
749
+ #
750
+ # @raise [Google::Cloud::Error] if the RPC is aborted.
751
+ #
752
+ def diagnose_cluster request, options = nil
753
+ raise ArgumentError, "request must be provided" if request.nil?
754
+
755
+ request = Gapic::Protobuf.coerce request, to: Google::Cloud::Dataproc::V1beta2::DiagnoseClusterRequest
756
+
757
+ # Converts hash and nil to an options object
758
+ options = Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
759
+
760
+ # Customize the options with defaults
761
+ metadata = @config.rpcs.diagnose_cluster.metadata.to_h
762
+
763
+ # Set x-goog-api-client and x-goog-user-project headers
764
+ metadata[:"x-goog-api-client"] ||= Gapic::Headers.x_goog_api_client \
765
+ lib_name: @config.lib_name, lib_version: @config.lib_version,
766
+ gapic_version: ::Google::Cloud::Dataproc::V1beta2::VERSION
767
+ metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
768
+
769
+ header_params = {
770
+ "project_id" => request.project_id,
771
+ "region" => request.region,
772
+ "cluster_name" => request.cluster_name
773
+ }
774
+ request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
775
+ metadata[:"x-goog-request-params"] ||= request_params_header
776
+
777
+ options.apply_defaults timeout: @config.rpcs.diagnose_cluster.timeout,
778
+ metadata: metadata,
779
+ retry_policy: @config.rpcs.diagnose_cluster.retry_policy
780
+ options.apply_defaults metadata: @config.metadata,
781
+ retry_policy: @config.retry_policy
782
+
783
+ @cluster_controller_stub.call_rpc :diagnose_cluster, request, options: options do |response, operation|
784
+ response = Gapic::Operation.new response, @operations_client, options: options
785
+ yield response, operation if block_given?
786
+ return response
787
+ end
788
+ rescue GRPC::BadStatus => e
789
+ raise Google::Cloud::Error.from_error(e)
790
+ end
791
+
792
+ ##
793
+ # Configuration class for the ClusterController API.
794
+ #
795
+ # This class represents the configuration for ClusterController,
796
+ # providing control over timeouts, retry behavior, logging, transport
797
+ # parameters, and other low-level controls. Certain parameters can also be
798
+ # applied individually to specific RPCs. See
799
+ # {Google::Cloud::Dataproc::V1beta2::ClusterController::Client::Configuration::Rpcs}
800
+ # for a list of RPCs that can be configured independently.
801
+ #
802
+ # Configuration can be applied globally to all clients, or to a single client
803
+ # on construction.
804
+ #
805
+ # # Examples
806
+ #
807
+ # To modify the global config, setting the timeout for create_cluster
808
+ # to 20 seconds, and all remaining timeouts to 10 seconds:
809
+ #
810
+ # Google::Cloud::Dataproc::V1beta2::ClusterController::Client.configure do |config|
811
+ # config.timeout = 10_000
812
+ # config.rpcs.create_cluster.timeout = 20_000
813
+ # end
814
+ #
815
+ # To apply the above configuration only to a new client:
816
+ #
817
+ # client = Google::Cloud::Dataproc::V1beta2::ClusterController::Client.new do |config|
818
+ # config.timeout = 10_000
819
+ # config.rpcs.create_cluster.timeout = 20_000
820
+ # end
821
+ #
822
+ # @!attribute [rw] endpoint
823
+ # The hostname or hostname:port of the service endpoint.
824
+ # Defaults to `"dataproc.googleapis.com"`.
825
+ # @return [String]
826
+ # @!attribute [rw] credentials
827
+ # Credentials to send with calls. You may provide any of the following types:
828
+ # * (`String`) The path to a service account key file in JSON format
829
+ # * (`Hash`) A service account key as a Hash
830
+ # * (`Google::Auth::Credentials`) A googleauth credentials object
831
+ # (see the [googleauth docs](https://googleapis.dev/ruby/googleauth/latest/index.html))
832
+ # * (`Signet::OAuth2::Client`) A signet oauth2 client object
833
+ # (see the [signet docs](https://googleapis.dev/ruby/signet/latest/Signet/OAuth2/Client.html))
834
+ # * (`GRPC::Core::Channel`) a gRPC channel with included credentials
835
+ # * (`GRPC::Core::ChannelCredentials`) a gRPC credentails object
836
+ # * (`nil`) indicating no credentials
837
+ # @return [Object]
838
+ # @!attribute [rw] scope
839
+ # The OAuth scopes
840
+ # @return [Array<String>]
841
+ # @!attribute [rw] lib_name
842
+ # The library name as recorded in instrumentation and logging
843
+ # @return [String]
844
+ # @!attribute [rw] lib_version
845
+ # The library version as recorded in instrumentation and logging
846
+ # @return [String]
847
+ # @!attribute [rw] channel_args
848
+ # Extra parameters passed to the gRPC channel. Note: this is ignored if a
849
+ # `GRPC::Core::Channel` object is provided as the credential.
850
+ # @return [Hash]
851
+ # @!attribute [rw] interceptors
852
+ # An array of interceptors that are run before calls are executed.
853
+ # @return [Array<GRPC::ClientInterceptor>]
854
+ # @!attribute [rw] timeout
855
+ # The call timeout in milliseconds.
856
+ # @return [Numeric]
857
+ # @!attribute [rw] metadata
858
+ # Additional gRPC headers to be sent with the call.
859
+ # @return [Hash{Symbol=>String}]
860
+ # @!attribute [rw] retry_policy
861
+ # The retry policy. The value is a hash with the following keys:
862
+ # * `:initial_delay` (*type:* `Numeric`) - The initial delay in seconds.
863
+ # * `:max_delay` (*type:* `Numeric`) - The max delay in seconds.
864
+ # * `:multiplier` (*type:* `Numeric`) - The incremental backoff multiplier.
865
+ # * `:retry_codes` (*type:* `Array<String>`) - The error codes that should
866
+ # trigger a retry.
867
+ # @return [Hash]
868
+ #
869
+ class Configuration
870
+ extend Gapic::Config
871
+
872
+ config_attr :endpoint, "dataproc.googleapis.com", String
873
+ config_attr :credentials, nil do |value|
874
+ allowed = [::String, ::Hash, ::Proc, ::Google::Auth::Credentials, ::Signet::OAuth2::Client, nil]
875
+ allowed += [::GRPC::Core::Channel, ::GRPC::Core::ChannelCredentials] if defined? ::GRPC
876
+ allowed.any? { |klass| klass === value }
877
+ end
878
+ config_attr :scope, nil, String, Array, nil
879
+ config_attr :lib_name, nil, String, nil
880
+ config_attr :lib_version, nil, String, nil
881
+ config_attr(:channel_args, { "grpc.service_config_disable_resolution"=>1 }, Hash, nil)
882
+ config_attr :interceptors, nil, Array, nil
883
+ config_attr :timeout, nil, Numeric, nil
884
+ config_attr :metadata, nil, Hash, nil
885
+ config_attr :retry_policy, nil, Hash, Proc, nil
886
+
887
+ # @private
888
+ def initialize parent_config = nil
889
+ @parent_config = parent_config unless parent_config.nil?
890
+
891
+ yield self if block_given?
892
+ end
893
+
894
+ ##
895
+ # Configurations for individual RPCs
896
+ # @return [Rpcs]
897
+ #
898
+ def rpcs
899
+ @rpcs ||= begin
900
+ parent_rpcs = nil
901
+ parent_rpcs = @parent_config.rpcs if @parent_config&.respond_to? :rpcs
902
+ Rpcs.new parent_rpcs
903
+ end
904
+ end
905
+
906
+ ##
907
+ # Configuration RPC class for the ClusterController API.
908
+ #
909
+ # Includes fields providing the configuration for each RPC in this service.
910
+ # Each configuration object is of type `Gapic::Config::Method` and includes
911
+ # the following configuration fields:
912
+ #
913
+ # * `timeout` (*type:* `Numeric`) - The call timeout in milliseconds
914
+ # * `metadata` (*type:* `Hash{Symbol=>String}`) - Additional gRPC headers
915
+ # * `retry_policy (*type:* `Hash`) - The retry policy. The policy fields
916
+ # include the following keys:
917
+ # * `:initial_delay` (*type:* `Numeric`) - The initial delay in seconds.
918
+ # * `:max_delay` (*type:* `Numeric`) - The max delay in seconds.
919
+ # * `:multiplier` (*type:* `Numeric`) - The incremental backoff multiplier.
920
+ # * `:retry_codes` (*type:* `Array<String>`) - The error codes that should
921
+ # trigger a retry.
922
+ #
923
+ class Rpcs
924
+ ##
925
+ # RPC-specific configuration for `create_cluster`
926
+ # @return [Gapic::Config::Method]
927
+ #
928
+ attr_reader :create_cluster
929
+ ##
930
+ # RPC-specific configuration for `update_cluster`
931
+ # @return [Gapic::Config::Method]
932
+ #
933
+ attr_reader :update_cluster
934
+ ##
935
+ # RPC-specific configuration for `delete_cluster`
936
+ # @return [Gapic::Config::Method]
937
+ #
938
+ attr_reader :delete_cluster
939
+ ##
940
+ # RPC-specific configuration for `get_cluster`
941
+ # @return [Gapic::Config::Method]
942
+ #
943
+ attr_reader :get_cluster
944
+ ##
945
+ # RPC-specific configuration for `list_clusters`
946
+ # @return [Gapic::Config::Method]
947
+ #
948
+ attr_reader :list_clusters
949
+ ##
950
+ # RPC-specific configuration for `diagnose_cluster`
951
+ # @return [Gapic::Config::Method]
952
+ #
953
+ attr_reader :diagnose_cluster
954
+
955
+ # @private
956
+ def initialize parent_rpcs = nil
957
+ create_cluster_config = parent_rpcs&.create_cluster if parent_rpcs&.respond_to? :create_cluster
958
+ @create_cluster = Gapic::Config::Method.new create_cluster_config
959
+ update_cluster_config = parent_rpcs&.update_cluster if parent_rpcs&.respond_to? :update_cluster
960
+ @update_cluster = Gapic::Config::Method.new update_cluster_config
961
+ delete_cluster_config = parent_rpcs&.delete_cluster if parent_rpcs&.respond_to? :delete_cluster
962
+ @delete_cluster = Gapic::Config::Method.new delete_cluster_config
963
+ get_cluster_config = parent_rpcs&.get_cluster if parent_rpcs&.respond_to? :get_cluster
964
+ @get_cluster = Gapic::Config::Method.new get_cluster_config
965
+ list_clusters_config = parent_rpcs&.list_clusters if parent_rpcs&.respond_to? :list_clusters
966
+ @list_clusters = Gapic::Config::Method.new list_clusters_config
967
+ diagnose_cluster_config = parent_rpcs&.diagnose_cluster if parent_rpcs&.respond_to? :diagnose_cluster
968
+ @diagnose_cluster = Gapic::Config::Method.new diagnose_cluster_config
969
+
970
+ yield self if block_given?
971
+ end
972
+ end
973
+ end
974
+ end
975
+ end
976
+ end
977
+ end
978
+ end
979
+ end