google-cloud-dataproc-v1 0.6.3 → 0.9.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. checksums.yaml +4 -4
  2. data/.yardopts +1 -1
  3. data/AUTHENTICATION.md +7 -25
  4. data/README.md +1 -1
  5. data/lib/google/cloud/dataproc/v1/autoscaling_policies_pb.rb +3 -2
  6. data/lib/google/cloud/dataproc/v1/autoscaling_policies_services_pb.rb +1 -1
  7. data/lib/google/cloud/dataproc/v1/autoscaling_policy_service/client.rb +106 -15
  8. data/lib/google/cloud/dataproc/v1/batch_controller/client.rb +719 -0
  9. data/lib/google/cloud/dataproc/v1/batch_controller/credentials.rb +51 -0
  10. data/lib/google/cloud/dataproc/v1/batch_controller/operations.rb +767 -0
  11. data/lib/google/cloud/dataproc/v1/batch_controller/paths.rb +69 -0
  12. data/lib/google/cloud/dataproc/v1/batch_controller.rb +50 -0
  13. data/lib/google/cloud/dataproc/v1/batches_pb.rb +123 -0
  14. data/lib/google/cloud/dataproc/v1/batches_services_pb.rb +52 -0
  15. data/lib/google/cloud/dataproc/v1/cluster_controller/client.rb +266 -49
  16. data/lib/google/cloud/dataproc/v1/cluster_controller/operations.rb +115 -12
  17. data/lib/google/cloud/dataproc/v1/cluster_controller/paths.rb +0 -19
  18. data/lib/google/cloud/dataproc/v1/clusters_pb.rb +23 -10
  19. data/lib/google/cloud/dataproc/v1/clusters_services_pb.rb +3 -1
  20. data/lib/google/cloud/dataproc/v1/job_controller/client.rb +187 -32
  21. data/lib/google/cloud/dataproc/v1/job_controller/operations.rb +115 -12
  22. data/lib/google/cloud/dataproc/v1/jobs_pb.rb +2 -2
  23. data/lib/google/cloud/dataproc/v1/jobs_services_pb.rb +1 -1
  24. data/lib/google/cloud/dataproc/v1/operations_pb.rb +18 -3
  25. data/lib/google/cloud/dataproc/v1/shared_pb.rb +99 -2
  26. data/lib/google/cloud/dataproc/v1/version.rb +1 -1
  27. data/lib/google/cloud/dataproc/v1/workflow_template_service/client.rb +163 -24
  28. data/lib/google/cloud/dataproc/v1/workflow_template_service/operations.rb +115 -12
  29. data/lib/google/cloud/dataproc/v1/workflow_template_service/paths.rb +0 -19
  30. data/lib/google/cloud/dataproc/v1/workflow_templates_pb.rb +2 -2
  31. data/lib/google/cloud/dataproc/v1/workflow_templates_services_pb.rb +2 -3
  32. data/lib/google/cloud/dataproc/v1.rb +1 -0
  33. data/proto_docs/google/api/resource.rb +10 -71
  34. data/proto_docs/google/cloud/dataproc/v1/autoscaling_policies.rb +18 -0
  35. data/proto_docs/google/cloud/dataproc/v1/batches.rb +339 -0
  36. data/proto_docs/google/cloud/dataproc/v1/clusters.rb +117 -47
  37. data/proto_docs/google/cloud/dataproc/v1/jobs.rb +20 -9
  38. data/proto_docs/google/cloud/dataproc/v1/operations.rb +48 -0
  39. data/proto_docs/google/cloud/dataproc/v1/shared.rb +336 -1
  40. data/proto_docs/google/cloud/dataproc/v1/workflow_templates.rb +12 -15
  41. metadata +11 -3
@@ -21,8 +21,330 @@ module Google
21
21
  module Cloud
22
22
  module Dataproc
23
23
  module V1
24
+ # Runtime configuration for a workload.
25
+ # @!attribute [rw] version
26
+ # @return [::String]
27
+ # Optional. Version of the batch runtime.
28
+ # @!attribute [rw] container_image
29
+ # @return [::String]
30
+ # Optional. Optional custom container image for the job runtime environment. If
31
+ # not specified, a default container image will be used.
32
+ # @!attribute [rw] properties
33
+ # @return [::Google::Protobuf::Map{::String => ::String}]
34
+ # Optional. A mapping of property names to values, which are used to configure workload
35
+ # execution.
36
+ class RuntimeConfig
37
+ include ::Google::Protobuf::MessageExts
38
+ extend ::Google::Protobuf::MessageExts::ClassMethods
39
+
40
+ # @!attribute [rw] key
41
+ # @return [::String]
42
+ # @!attribute [rw] value
43
+ # @return [::String]
44
+ class PropertiesEntry
45
+ include ::Google::Protobuf::MessageExts
46
+ extend ::Google::Protobuf::MessageExts::ClassMethods
47
+ end
48
+ end
49
+
50
+ # Environment configuration for a workload.
51
+ # @!attribute [rw] execution_config
52
+ # @return [::Google::Cloud::Dataproc::V1::ExecutionConfig]
53
+ # Optional. Execution configuration for a workload.
54
+ # @!attribute [rw] peripherals_config
55
+ # @return [::Google::Cloud::Dataproc::V1::PeripheralsConfig]
56
+ # Optional. Peripherals configuration that workload has access to.
57
+ class EnvironmentConfig
58
+ include ::Google::Protobuf::MessageExts
59
+ extend ::Google::Protobuf::MessageExts::ClassMethods
60
+ end
61
+
62
+ # Execution configuration for a workload.
63
+ # @!attribute [rw] service_account
64
+ # @return [::String]
65
+ # Optional. Service account that used to execute workload.
66
+ # @!attribute [rw] network_uri
67
+ # @return [::String]
68
+ # Optional. Network URI to connect workload to.
69
+ # @!attribute [rw] subnetwork_uri
70
+ # @return [::String]
71
+ # Optional. Subnetwork URI to connect workload to.
72
+ # @!attribute [rw] network_tags
73
+ # @return [::Array<::String>]
74
+ # Optional. Tags used for network traffic control.
75
+ # @!attribute [rw] kms_key
76
+ # @return [::String]
77
+ # Optional. The Cloud KMS key to use for encryption.
78
+ class ExecutionConfig
79
+ include ::Google::Protobuf::MessageExts
80
+ extend ::Google::Protobuf::MessageExts::ClassMethods
81
+ end
82
+
83
+ # Spark History Server configuration for the workload.
84
+ # @!attribute [rw] dataproc_cluster
85
+ # @return [::String]
86
+ # Optional. Resource name of an existing Dataproc Cluster to act as a Spark History
87
+ # Server for the workload.
88
+ #
89
+ # Example:
90
+ #
91
+ # * `projects/[project_id]/regions/[region]/clusters/[cluster_name]`
92
+ class SparkHistoryServerConfig
93
+ include ::Google::Protobuf::MessageExts
94
+ extend ::Google::Protobuf::MessageExts::ClassMethods
95
+ end
96
+
97
+ # Auxiliary services configuration for a workload.
98
+ # @!attribute [rw] metastore_service
99
+ # @return [::String]
100
+ # Optional. Resource name of an existing Dataproc Metastore service.
101
+ #
102
+ # Example:
103
+ #
104
+ # * `projects/[project_id]/locations/[region]/services/[service_id]`
105
+ # @!attribute [rw] spark_history_server_config
106
+ # @return [::Google::Cloud::Dataproc::V1::SparkHistoryServerConfig]
107
+ # Optional. The Spark History Server configuration for the workload.
108
+ class PeripheralsConfig
109
+ include ::Google::Protobuf::MessageExts
110
+ extend ::Google::Protobuf::MessageExts::ClassMethods
111
+ end
112
+
113
+ # Runtime information about workload execution.
114
+ # @!attribute [r] endpoints
115
+ # @return [::Google::Protobuf::Map{::String => ::String}]
116
+ # Output only. Map of remote access endpoints (such as web interfaces and APIs) to their
117
+ # URIs.
118
+ # @!attribute [r] output_uri
119
+ # @return [::String]
120
+ # Output only. A URI pointing to the location of the stdout and stderr of the workload.
121
+ # @!attribute [r] diagnostic_output_uri
122
+ # @return [::String]
123
+ # Output only. A URI pointing to the location of the diagnostics tarball.
124
+ class RuntimeInfo
125
+ include ::Google::Protobuf::MessageExts
126
+ extend ::Google::Protobuf::MessageExts::ClassMethods
127
+
128
+ # @!attribute [rw] key
129
+ # @return [::String]
130
+ # @!attribute [rw] value
131
+ # @return [::String]
132
+ class EndpointsEntry
133
+ include ::Google::Protobuf::MessageExts
134
+ extend ::Google::Protobuf::MessageExts::ClassMethods
135
+ end
136
+ end
137
+
138
+ # The cluster's GKE config.
139
+ # @!attribute [rw] gke_cluster_target
140
+ # @return [::String]
141
+ # Optional. A target GKE cluster to deploy to. It must be in the same project and
142
+ # region as the Dataproc cluster (the GKE cluster can be zonal or regional).
143
+ # Format: 'projects/\\{project}/locations/\\{location}/clusters/\\{cluster_id}'
144
+ # @!attribute [rw] node_pool_target
145
+ # @return [::Array<::Google::Cloud::Dataproc::V1::GkeNodePoolTarget>]
146
+ # Optional. GKE NodePools where workloads will be scheduled. At least one node pool
147
+ # must be assigned the 'default' role. Each role can be given to only a
148
+ # single NodePoolTarget. All NodePools must have the same location settings.
149
+ # If a nodePoolTarget is not specified, Dataproc constructs a default
150
+ # nodePoolTarget.
151
+ class GkeClusterConfig
152
+ include ::Google::Protobuf::MessageExts
153
+ extend ::Google::Protobuf::MessageExts::ClassMethods
154
+ end
155
+
156
+ # The configuration for running the Dataproc cluster on Kubernetes.
157
+ # @!attribute [rw] kubernetes_namespace
158
+ # @return [::String]
159
+ # Optional. A namespace within the Kubernetes cluster to deploy into. If this namespace
160
+ # does not exist, it is created. If it exists, Dataproc
161
+ # verifies that another Dataproc VirtualCluster is not installed
162
+ # into it. If not specified, the name of the Dataproc Cluster is used.
163
+ # @!attribute [rw] gke_cluster_config
164
+ # @return [::Google::Cloud::Dataproc::V1::GkeClusterConfig]
165
+ # Required. The configuration for running the Dataproc cluster on GKE.
166
+ # @!attribute [rw] kubernetes_software_config
167
+ # @return [::Google::Cloud::Dataproc::V1::KubernetesSoftwareConfig]
168
+ # Optional. The software configuration for this Dataproc cluster running on Kubernetes.
169
+ class KubernetesClusterConfig
170
+ include ::Google::Protobuf::MessageExts
171
+ extend ::Google::Protobuf::MessageExts::ClassMethods
172
+ end
173
+
174
+ # The software configuration for this Dataproc cluster running on Kubernetes.
175
+ # @!attribute [rw] component_version
176
+ # @return [::Google::Protobuf::Map{::String => ::String}]
177
+ # The components that should be installed in this Dataproc cluster. The key
178
+ # must be a string from the KubernetesComponent enumeration. The value is
179
+ # the version of the software to be installed.
180
+ # At least one entry must be specified.
181
+ # @!attribute [rw] properties
182
+ # @return [::Google::Protobuf::Map{::String => ::String}]
183
+ # The properties to set on daemon config files.
184
+ #
185
+ # Property keys are specified in `prefix:property` format, for example
186
+ # `spark:spark.kubernetes.container.image`. The following are supported
187
+ # prefixes and their mappings:
188
+ #
189
+ # * spark: `spark-defaults.conf`
190
+ #
191
+ # For more information, see [Cluster
192
+ # properties](https://cloud.google.com/dataproc/docs/concepts/cluster-properties).
193
+ class KubernetesSoftwareConfig
194
+ include ::Google::Protobuf::MessageExts
195
+ extend ::Google::Protobuf::MessageExts::ClassMethods
196
+
197
+ # @!attribute [rw] key
198
+ # @return [::String]
199
+ # @!attribute [rw] value
200
+ # @return [::String]
201
+ class ComponentVersionEntry
202
+ include ::Google::Protobuf::MessageExts
203
+ extend ::Google::Protobuf::MessageExts::ClassMethods
204
+ end
205
+
206
+ # @!attribute [rw] key
207
+ # @return [::String]
208
+ # @!attribute [rw] value
209
+ # @return [::String]
210
+ class PropertiesEntry
211
+ include ::Google::Protobuf::MessageExts
212
+ extend ::Google::Protobuf::MessageExts::ClassMethods
213
+ end
214
+ end
215
+
216
+ # GKE NodePools that Dataproc workloads run on.
217
+ # @!attribute [rw] node_pool
218
+ # @return [::String]
219
+ # Required. The target GKE NodePool.
220
+ # Format:
221
+ # 'projects/\\{project}/locations/\\{location}/clusters/\\{cluster}/nodePools/\\{node_pool}'
222
+ # @!attribute [rw] roles
223
+ # @return [::Array<::Google::Cloud::Dataproc::V1::GkeNodePoolTarget::Role>]
224
+ # Required. The types of role for a GKE NodePool
225
+ # @!attribute [rw] node_pool_config
226
+ # @return [::Google::Cloud::Dataproc::V1::GkeNodePoolConfig]
227
+ # Optional. The configuration for the GKE NodePool.
228
+ #
229
+ # If specified, Dataproc attempts to create a NodePool with the
230
+ # specified shape. If one with the same name already exists, it is
231
+ # verified against all specified fields. If a field differs, the
232
+ # virtual cluster creation will fail.
233
+ #
234
+ # If omitted, any NodePool with the specified name is used. If a
235
+ # NodePool with the specified name does not exist, Dataproc create a NodePool
236
+ # with default values.
237
+ class GkeNodePoolTarget
238
+ include ::Google::Protobuf::MessageExts
239
+ extend ::Google::Protobuf::MessageExts::ClassMethods
240
+
241
+ # `Role` specifies whose tasks will run on the NodePool. The roles can be
242
+ # specific to workloads. Exactly one GkeNodePoolTarget within the
243
+ # VirtualCluster must have 'default' role, which is used to run all workloads
244
+ # that are not associated with a NodePool.
245
+ module Role
246
+ # Role is unspecified.
247
+ ROLE_UNSPECIFIED = 0
248
+
249
+ # Any roles that are not directly assigned to a NodePool run on the
250
+ # `default` role's NodePool.
251
+ DEFAULT = 1
252
+
253
+ # Run controllers and webhooks.
254
+ CONTROLLER = 2
255
+
256
+ # Run spark driver.
257
+ SPARK_DRIVER = 3
258
+
259
+ # Run spark executors.
260
+ SPARK_EXECUTOR = 4
261
+ end
262
+ end
263
+
264
+ # The configuration of a GKE NodePool used by a [Dataproc-on-GKE
265
+ # cluster](https://cloud.google.com/dataproc/docs/concepts/jobs/dataproc-gke#create-a-dataproc-on-gke-cluster).
266
+ # @!attribute [rw] config
267
+ # @return [::Google::Cloud::Dataproc::V1::GkeNodePoolConfig::GkeNodeConfig]
268
+ # Optional. The node pool configuration.
269
+ # @!attribute [rw] locations
270
+ # @return [::Array<::String>]
271
+ # Optional. The list of Compute Engine
272
+ # [zones](https://cloud.google.com/compute/docs/zones#available) where
273
+ # NodePool's nodes will be located.
274
+ #
275
+ # **Note:** Currently, only one zone may be specified.
276
+ #
277
+ # If a location is not specified during NodePool creation, Dataproc will
278
+ # choose a location.
279
+ # @!attribute [rw] autoscaling
280
+ # @return [::Google::Cloud::Dataproc::V1::GkeNodePoolConfig::GkeNodePoolAutoscalingConfig]
281
+ # Optional. The autoscaler configuration for this NodePool. The autoscaler is enabled
282
+ # only when a valid configuration is present.
283
+ class GkeNodePoolConfig
284
+ include ::Google::Protobuf::MessageExts
285
+ extend ::Google::Protobuf::MessageExts::ClassMethods
286
+
287
+ # Parameters that describe cluster nodes.
288
+ # @!attribute [rw] machine_type
289
+ # @return [::String]
290
+ # Optional. The name of a Compute Engine [machine
291
+ # type](https://cloud.google.com/compute/docs/machine-types).
292
+ # @!attribute [rw] preemptible
293
+ # @return [::Boolean]
294
+ # Optional. Whether the nodes are created as [preemptible VM
295
+ # instances](https://cloud.google.com/compute/docs/instances/preemptible).
296
+ # @!attribute [rw] local_ssd_count
297
+ # @return [::Integer]
298
+ # Optional. The number of local SSD disks to attach to the node, which is limited by
299
+ # the maximum number of disks allowable per zone (see [Adding Local
300
+ # SSDs](https://cloud.google.com/compute/docs/disks/local-ssd)).
301
+ # @!attribute [rw] accelerators
302
+ # @return [::Array<::Google::Cloud::Dataproc::V1::GkeNodePoolConfig::GkeNodePoolAcceleratorConfig>]
303
+ # Optional. A list of [hardware
304
+ # accelerators](https://cloud.google.com/compute/docs/gpus) to attach to
305
+ # each node.
306
+ # @!attribute [rw] min_cpu_platform
307
+ # @return [::String]
308
+ # Optional. [Minimum CPU
309
+ # platform](https://cloud.google.com/compute/docs/instances/specify-min-cpu-platform)
310
+ # to be used by this instance. The instance may be scheduled on the
311
+ # specified or a newer CPU platform. Specify the friendly names of CPU
312
+ # platforms, such as "Intel Haswell"` or Intel Sandy Bridge".
313
+ class GkeNodeConfig
314
+ include ::Google::Protobuf::MessageExts
315
+ extend ::Google::Protobuf::MessageExts::ClassMethods
316
+ end
317
+
318
+ # A GkeNodeConfigAcceleratorConfig represents a Hardware Accelerator request
319
+ # for a NodePool.
320
+ # @!attribute [rw] accelerator_count
321
+ # @return [::Integer]
322
+ # The number of accelerator cards exposed to an instance.
323
+ # @!attribute [rw] accelerator_type
324
+ # @return [::String]
325
+ # The accelerator type resource namename (see GPUs on Compute Engine).
326
+ class GkeNodePoolAcceleratorConfig
327
+ include ::Google::Protobuf::MessageExts
328
+ extend ::Google::Protobuf::MessageExts::ClassMethods
329
+ end
330
+
331
+ # GkeNodePoolAutoscaling contains information the cluster autoscaler needs to
332
+ # adjust the size of the node pool to the current cluster usage.
333
+ # @!attribute [rw] min_node_count
334
+ # @return [::Integer]
335
+ # The minimum number of nodes in the NodePool. Must be >= 0 and <=
336
+ # max_node_count.
337
+ # @!attribute [rw] max_node_count
338
+ # @return [::Integer]
339
+ # The maximum number of nodes in the NodePool. Must be >= min_node_count.
340
+ # **Note:** Quota must be sufficient to scale up the cluster.
341
+ class GkeNodePoolAutoscalingConfig
342
+ include ::Google::Protobuf::MessageExts
343
+ extend ::Google::Protobuf::MessageExts::ClassMethods
344
+ end
345
+ end
346
+
24
347
  # Cluster components that can be activated.
25
- # Next ID: 16.
26
348
  module Component
27
349
  # Unspecified component. Specifying this will cause Cluster creation to fail.
28
350
  COMPONENT_UNSPECIFIED = 0
@@ -67,6 +389,19 @@ module Google
67
389
  # The Zookeeper service.
68
390
  ZOOKEEPER = 8
69
391
  end
392
+
393
+ # Actions in response to failure of a resource associated with a cluster.
394
+ module FailureAction
395
+ # When FailureAction is unspecified, failure action defaults to NO_ACTION.
396
+ FAILURE_ACTION_UNSPECIFIED = 0
397
+
398
+ # Take no action on failure to create a cluster resource. NO_ACTION is the
399
+ # default.
400
+ NO_ACTION = 1
401
+
402
+ # Delete the failed cluster resource.
403
+ DELETE = 2
404
+ end
70
405
  end
71
406
  end
72
407
  end
@@ -193,8 +193,8 @@ module Google
193
193
  #
194
194
  # The step id is used as prefix for job id, as job
195
195
  # `goog-dataproc-workflow-step-id` label, and in
196
- # {::Google::Cloud::Dataproc::V1::OrderedJob#prerequisite_step_ids prerequisiteStepIds}
197
- # field from other steps.
196
+ # {::Google::Cloud::Dataproc::V1::OrderedJob#prerequisite_step_ids prerequisiteStepIds} field from other
197
+ # steps.
198
198
  #
199
199
  # The id must contain only letters (a-z, A-Z), numbers (0-9),
200
200
  # underscores (_), and hyphens (-). Cannot begin or end with underscore
@@ -280,10 +280,10 @@ module Google
280
280
  # A field is allowed to appear in at most one parameter's list of field
281
281
  # paths.
282
282
  #
283
- # A field path is similar in syntax to a
284
- # {::Google::Protobuf::FieldMask google.protobuf.FieldMask}. For example, a
285
- # field path that references the zone field of a workflow template's cluster
286
- # selector would be specified as `placement.clusterSelector.zone`.
283
+ # A field path is similar in syntax to a {::Google::Protobuf::FieldMask google.protobuf.FieldMask}.
284
+ # For example, a field path that references the zone field of a workflow
285
+ # template's cluster selector would be specified as
286
+ # `placement.clusterSelector.zone`.
287
287
  #
288
288
  # Also, field paths can reference fields using the following syntax:
289
289
  #
@@ -410,19 +410,16 @@ module Google
410
410
  # Output only. The UUID of target cluster.
411
411
  # @!attribute [r] dag_timeout
412
412
  # @return [::Google::Protobuf::Duration]
413
- # Output only. The timeout duration for the DAG of jobs, expressed in seconds
414
- # (see [JSON representation of
413
+ # Output only. The timeout duration for the DAG of jobs, expressed in seconds (see
414
+ # [JSON representation of
415
415
  # duration](https://developers.google.com/protocol-buffers/docs/proto3#json)).
416
416
  # @!attribute [r] dag_start_time
417
417
  # @return [::Google::Protobuf::Timestamp]
418
- # Output only. DAG start time, only set for workflows with
419
- # {::Google::Cloud::Dataproc::V1::WorkflowMetadata#dag_timeout dag_timeout} when
420
- # DAG begins.
418
+ # Output only. DAG start time, only set for workflows with {::Google::Cloud::Dataproc::V1::WorkflowMetadata#dag_timeout dag_timeout} when DAG
419
+ # begins.
421
420
  # @!attribute [r] dag_end_time
422
421
  # @return [::Google::Protobuf::Timestamp]
423
- # Output only. DAG end time, only set for workflows with
424
- # {::Google::Cloud::Dataproc::V1::WorkflowMetadata#dag_timeout dag_timeout} when
425
- # DAG ends.
422
+ # Output only. DAG end time, only set for workflows with {::Google::Cloud::Dataproc::V1::WorkflowMetadata#dag_timeout dag_timeout} when DAG ends.
426
423
  class WorkflowMetadata
427
424
  include ::Google::Protobuf::MessageExts
428
425
  extend ::Google::Protobuf::MessageExts::ClassMethods
@@ -525,7 +522,7 @@ module Google
525
522
  # Required. The resource name of the region or location, as described
526
523
  # in https://cloud.google.com/apis/design/resource_names.
527
524
  #
528
- # * For `projects.regions.workflowTemplates,create`, the resource name of the
525
+ # * For `projects.regions.workflowTemplates.create`, the resource name of the
529
526
  # region has the following format:
530
527
  # `projects/{project_id}/regions/{region}`
531
528
  #
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: google-cloud-dataproc-v1
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.6.3
4
+ version: 0.9.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Google LLC
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2021-08-11 00:00:00.000000000 Z
11
+ date: 2022-02-17 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: gapic-common
@@ -177,6 +177,13 @@ files:
177
177
  - lib/google/cloud/dataproc/v1/autoscaling_policy_service/client.rb
178
178
  - lib/google/cloud/dataproc/v1/autoscaling_policy_service/credentials.rb
179
179
  - lib/google/cloud/dataproc/v1/autoscaling_policy_service/paths.rb
180
+ - lib/google/cloud/dataproc/v1/batch_controller.rb
181
+ - lib/google/cloud/dataproc/v1/batch_controller/client.rb
182
+ - lib/google/cloud/dataproc/v1/batch_controller/credentials.rb
183
+ - lib/google/cloud/dataproc/v1/batch_controller/operations.rb
184
+ - lib/google/cloud/dataproc/v1/batch_controller/paths.rb
185
+ - lib/google/cloud/dataproc/v1/batches_pb.rb
186
+ - lib/google/cloud/dataproc/v1/batches_services_pb.rb
180
187
  - lib/google/cloud/dataproc/v1/cluster_controller.rb
181
188
  - lib/google/cloud/dataproc/v1/cluster_controller/client.rb
182
189
  - lib/google/cloud/dataproc/v1/cluster_controller/credentials.rb
@@ -204,6 +211,7 @@ files:
204
211
  - proto_docs/google/api/field_behavior.rb
205
212
  - proto_docs/google/api/resource.rb
206
213
  - proto_docs/google/cloud/dataproc/v1/autoscaling_policies.rb
214
+ - proto_docs/google/cloud/dataproc/v1/batches.rb
207
215
  - proto_docs/google/cloud/dataproc/v1/clusters.rb
208
216
  - proto_docs/google/cloud/dataproc/v1/jobs.rb
209
217
  - proto_docs/google/cloud/dataproc/v1/operations.rb
@@ -235,7 +243,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
235
243
  - !ruby/object:Gem::Version
236
244
  version: '0'
237
245
  requirements: []
238
- rubygems_version: 3.2.17
246
+ rubygems_version: 3.3.5
239
247
  signing_key:
240
248
  specification_version: 4
241
249
  summary: API Client library for the Cloud Dataproc V1 API