google-cloud-dataproc-v1 0.8.0 → 0.9.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 75b10ffcc3efa847d380e239d97ac92298df724a70e6f76e161a4357b31e2464
4
- data.tar.gz: ff2d2916e09c5503d8cb4ee6f74058770b64254ecb962aa4e71f453f4bae6401
3
+ metadata.gz: '096fe11db7fcb9a7ae4b9abd2deaf16fd8c1e0226c8705c40c7d70ef7bf175f5'
4
+ data.tar.gz: ff186e05094eee5034c48b4af192bea38e0e047b2d6ebb936ca52cbb1f66a9f1
5
5
  SHA512:
6
- metadata.gz: d2f533ae3bc23fa57971f8fe1faf5ba3abf1bec75d25935cb3e3de5229e9b11bcf4e9dd8d2c7c887f6a0fb54ccfdc21764aea94e5f077c6cf5fa5738648a0b6a
7
- data.tar.gz: 73a70eb049fc046aa5ece71bf473adad5df91872e7b83172c1439c1766901b5af6df17fff54dd561fa48e68c70cfb83282cd417a44a3a127058a36adaf547357
6
+ metadata.gz: c7c3e6d902a3c78603c7fa7b5bc92a5737dbe1e8e9b553874979914c9668a95d8366f621c4e26ea2c0d81ca1f8fa1ec32afd8b8c30244c7769299854f77cb83c
7
+ data.tar.gz: 8590a833f18fce759b1d040684ac0a43de995cce5a4b0f1174cbb209fddb6032be8b42f54eb910a2094aed8a405e0074cb7bbb1ee0d4d3189621fd2b00d097df
data/README.md CHANGED
@@ -37,7 +37,7 @@ request = ::Google::Cloud::Dataproc::V1::CreateAutoscalingPolicyRequest.new # (r
37
37
  response = client.create_autoscaling_policy request
38
38
  ```
39
39
 
40
- View the [Client Library Documentation](https://googleapis.dev/ruby/google-cloud-dataproc-v1/latest)
40
+ View the [Client Library Documentation](https://cloud.google.com/ruby/docs/reference/google-cloud-dataproc-v1/latest)
41
41
  for class and method documentation.
42
42
 
43
43
  See also the [Product Documentation](https://cloud.google.com/dataproc)
@@ -24,25 +24,6 @@ module Google
24
24
  module ClusterController
25
25
  # Path helper methods for the ClusterController API.
26
26
  module Paths
27
- ##
28
- # Create a fully-qualified Cluster resource string.
29
- #
30
- # The resource will be in the following format:
31
- #
32
- # `projects/{project}/locations/{location}/clusters/{cluster}`
33
- #
34
- # @param project [String]
35
- # @param location [String]
36
- # @param cluster [String]
37
- #
38
- # @return [::String]
39
- def cluster_path project:, location:, cluster:
40
- raise ::ArgumentError, "project cannot contain /" if project.to_s.include? "/"
41
- raise ::ArgumentError, "location cannot contain /" if location.to_s.include? "/"
42
-
43
- "projects/#{project}/locations/#{location}/clusters/#{cluster}"
44
- end
45
-
46
27
  ##
47
28
  # Create a fully-qualified Service resource string.
48
29
  #
@@ -18,6 +18,7 @@ Google::Protobuf::DescriptorPool.generated_pool.build do
18
18
  optional :project_id, :string, 1
19
19
  optional :cluster_name, :string, 2
20
20
  optional :config, :message, 3, "google.cloud.dataproc.v1.ClusterConfig"
21
+ optional :virtual_cluster_config, :message, 10, "google.cloud.dataproc.v1.VirtualClusterConfig"
21
22
  map :labels, :string, :string, 8
22
23
  optional :status, :message, 4, "google.cloud.dataproc.v1.ClusterStatus"
23
24
  repeated :status_history, :message, 7, "google.cloud.dataproc.v1.ClusterStatus"
@@ -39,14 +40,18 @@ Google::Protobuf::DescriptorPool.generated_pool.build do
39
40
  optional :lifecycle_config, :message, 17, "google.cloud.dataproc.v1.LifecycleConfig"
40
41
  optional :endpoint_config, :message, 19, "google.cloud.dataproc.v1.EndpointConfig"
41
42
  optional :metastore_config, :message, 20, "google.cloud.dataproc.v1.MetastoreConfig"
42
- optional :gke_cluster_config, :message, 21, "google.cloud.dataproc.v1.GkeClusterConfig"
43
43
  end
44
- add_message "google.cloud.dataproc.v1.GkeClusterConfig" do
45
- optional :namespaced_gke_deployment_target, :message, 1, "google.cloud.dataproc.v1.GkeClusterConfig.NamespacedGkeDeploymentTarget"
44
+ add_message "google.cloud.dataproc.v1.VirtualClusterConfig" do
45
+ optional :staging_bucket, :string, 1
46
+ optional :temp_bucket, :string, 2
47
+ optional :auxiliary_services_config, :message, 7, "google.cloud.dataproc.v1.AuxiliaryServicesConfig"
48
+ oneof :infrastructure_config do
49
+ optional :kubernetes_cluster_config, :message, 6, "google.cloud.dataproc.v1.KubernetesClusterConfig"
50
+ end
46
51
  end
47
- add_message "google.cloud.dataproc.v1.GkeClusterConfig.NamespacedGkeDeploymentTarget" do
48
- optional :target_gke_cluster, :string, 1
49
- optional :cluster_namespace, :string, 2
52
+ add_message "google.cloud.dataproc.v1.AuxiliaryServicesConfig" do
53
+ optional :metastore_config, :message, 1, "google.cloud.dataproc.v1.MetastoreConfig"
54
+ optional :spark_history_server_config, :message, 2, "google.cloud.dataproc.v1.SparkHistoryServerConfig"
50
55
  end
51
56
  add_message "google.cloud.dataproc.v1.EndpointConfig" do
52
57
  map :http_ports, :string, :string, 1
@@ -273,8 +278,8 @@ module Google
273
278
  module V1
274
279
  Cluster = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.Cluster").msgclass
275
280
  ClusterConfig = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.ClusterConfig").msgclass
276
- GkeClusterConfig = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.GkeClusterConfig").msgclass
277
- GkeClusterConfig::NamespacedGkeDeploymentTarget = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.GkeClusterConfig.NamespacedGkeDeploymentTarget").msgclass
281
+ VirtualClusterConfig = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.VirtualClusterConfig").msgclass
282
+ AuxiliaryServicesConfig = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.AuxiliaryServicesConfig").msgclass
278
283
  EndpointConfig = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.EndpointConfig").msgclass
279
284
  AutoscalingConfig = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.AutoscalingConfig").msgclass
280
285
  EncryptionConfig = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.EncryptionConfig").msgclass
@@ -36,6 +36,53 @@ Google::Protobuf::DescriptorPool.generated_pool.build do
36
36
  optional :output_uri, :string, 2
37
37
  optional :diagnostic_output_uri, :string, 3
38
38
  end
39
+ add_message "google.cloud.dataproc.v1.GkeClusterConfig" do
40
+ optional :gke_cluster_target, :string, 2
41
+ repeated :node_pool_target, :message, 3, "google.cloud.dataproc.v1.GkeNodePoolTarget"
42
+ end
43
+ add_message "google.cloud.dataproc.v1.KubernetesClusterConfig" do
44
+ optional :kubernetes_namespace, :string, 1
45
+ optional :kubernetes_software_config, :message, 3, "google.cloud.dataproc.v1.KubernetesSoftwareConfig"
46
+ oneof :config do
47
+ optional :gke_cluster_config, :message, 2, "google.cloud.dataproc.v1.GkeClusterConfig"
48
+ end
49
+ end
50
+ add_message "google.cloud.dataproc.v1.KubernetesSoftwareConfig" do
51
+ map :component_version, :string, :string, 1
52
+ map :properties, :string, :string, 2
53
+ end
54
+ add_message "google.cloud.dataproc.v1.GkeNodePoolTarget" do
55
+ optional :node_pool, :string, 1
56
+ repeated :roles, :enum, 2, "google.cloud.dataproc.v1.GkeNodePoolTarget.Role"
57
+ optional :node_pool_config, :message, 3, "google.cloud.dataproc.v1.GkeNodePoolConfig"
58
+ end
59
+ add_enum "google.cloud.dataproc.v1.GkeNodePoolTarget.Role" do
60
+ value :ROLE_UNSPECIFIED, 0
61
+ value :DEFAULT, 1
62
+ value :CONTROLLER, 2
63
+ value :SPARK_DRIVER, 3
64
+ value :SPARK_EXECUTOR, 4
65
+ end
66
+ add_message "google.cloud.dataproc.v1.GkeNodePoolConfig" do
67
+ optional :config, :message, 2, "google.cloud.dataproc.v1.GkeNodePoolConfig.GkeNodeConfig"
68
+ repeated :locations, :string, 13
69
+ optional :autoscaling, :message, 4, "google.cloud.dataproc.v1.GkeNodePoolConfig.GkeNodePoolAutoscalingConfig"
70
+ end
71
+ add_message "google.cloud.dataproc.v1.GkeNodePoolConfig.GkeNodeConfig" do
72
+ optional :machine_type, :string, 1
73
+ optional :preemptible, :bool, 10
74
+ optional :local_ssd_count, :int32, 7
75
+ repeated :accelerators, :message, 11, "google.cloud.dataproc.v1.GkeNodePoolConfig.GkeNodePoolAcceleratorConfig"
76
+ optional :min_cpu_platform, :string, 13
77
+ end
78
+ add_message "google.cloud.dataproc.v1.GkeNodePoolConfig.GkeNodePoolAcceleratorConfig" do
79
+ optional :accelerator_count, :int64, 1
80
+ optional :accelerator_type, :string, 2
81
+ end
82
+ add_message "google.cloud.dataproc.v1.GkeNodePoolConfig.GkeNodePoolAutoscalingConfig" do
83
+ optional :min_node_count, :int32, 2
84
+ optional :max_node_count, :int32, 3
85
+ end
39
86
  add_enum "google.cloud.dataproc.v1.Component" do
40
87
  value :COMPONENT_UNSPECIFIED, 0
41
88
  value :ANACONDA, 5
@@ -69,6 +116,15 @@ module Google
69
116
  SparkHistoryServerConfig = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.SparkHistoryServerConfig").msgclass
70
117
  PeripheralsConfig = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.PeripheralsConfig").msgclass
71
118
  RuntimeInfo = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.RuntimeInfo").msgclass
119
+ GkeClusterConfig = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.GkeClusterConfig").msgclass
120
+ KubernetesClusterConfig = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.KubernetesClusterConfig").msgclass
121
+ KubernetesSoftwareConfig = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.KubernetesSoftwareConfig").msgclass
122
+ GkeNodePoolTarget = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.GkeNodePoolTarget").msgclass
123
+ GkeNodePoolTarget::Role = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.GkeNodePoolTarget.Role").enummodule
124
+ GkeNodePoolConfig = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.GkeNodePoolConfig").msgclass
125
+ GkeNodePoolConfig::GkeNodeConfig = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.GkeNodePoolConfig.GkeNodeConfig").msgclass
126
+ GkeNodePoolConfig::GkeNodePoolAcceleratorConfig = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.GkeNodePoolConfig.GkeNodePoolAcceleratorConfig").msgclass
127
+ GkeNodePoolConfig::GkeNodePoolAutoscalingConfig = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.GkeNodePoolConfig.GkeNodePoolAutoscalingConfig").msgclass
72
128
  Component = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.Component").enummodule
73
129
  FailureAction = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.FailureAction").enummodule
74
130
  end
@@ -21,7 +21,7 @@ module Google
21
21
  module Cloud
22
22
  module Dataproc
23
23
  module V1
24
- VERSION = "0.8.0"
24
+ VERSION = "0.9.0"
25
25
  end
26
26
  end
27
27
  end
@@ -24,25 +24,6 @@ module Google
24
24
  module WorkflowTemplateService
25
25
  # Path helper methods for the WorkflowTemplateService API.
26
26
  module Paths
27
- ##
28
- # Create a fully-qualified Cluster resource string.
29
- #
30
- # The resource will be in the following format:
31
- #
32
- # `projects/{project}/locations/{location}/clusters/{cluster}`
33
- #
34
- # @param project [String]
35
- # @param location [String]
36
- # @param cluster [String]
37
- #
38
- # @return [::String]
39
- def cluster_path project:, location:, cluster:
40
- raise ::ArgumentError, "project cannot contain /" if project.to_s.include? "/"
41
- raise ::ArgumentError, "location cannot contain /" if location.to_s.include? "/"
42
-
43
- "projects/#{project}/locations/#{location}/clusters/#{cluster}"
44
- end
45
-
46
27
  ##
47
28
  # Create a fully-qualified Location resource string.
48
29
  #
@@ -35,6 +35,15 @@ module Google
35
35
  # Optional. The cluster config for a cluster of Compute Engine Instances.
36
36
  # Note that Dataproc may set default values, and values may change
37
37
  # when clusters are updated.
38
+ # @!attribute [rw] virtual_cluster_config
39
+ # @return [::Google::Cloud::Dataproc::V1::VirtualClusterConfig]
40
+ # Optional. The virtual cluster config, used when creating a Dataproc cluster that
41
+ # does not directly control the underlying compute resources, for example,
42
+ # when creating a [Dataproc-on-GKE
43
+ # cluster](https://cloud.google.com/dataproc/docs/concepts/jobs/dataproc-gke#create-a-dataproc-on-gke-cluster).
44
+ # Note that Dataproc may set default values, and values may change when
45
+ # clusters are updated. Exactly one of config or virtualClusterConfig must be
46
+ # specified.
38
47
  # @!attribute [rw] labels
39
48
  # @return [::Google::Protobuf::Map{::String => ::String}]
40
49
  # Optional. The labels to associate with this cluster.
@@ -156,37 +165,63 @@ module Google
156
165
  # @!attribute [rw] metastore_config
157
166
  # @return [::Google::Cloud::Dataproc::V1::MetastoreConfig]
158
167
  # Optional. Metastore configuration.
159
- # @!attribute [rw] gke_cluster_config
160
- # @return [::Google::Cloud::Dataproc::V1::GkeClusterConfig]
161
- # Optional. BETA. The Kubernetes Engine config for Dataproc clusters deployed to
162
- # Kubernetes. Setting this is considered mutually exclusive with Compute
163
- # Engine-based options such as `gce_cluster_config`, `master_config`,
164
- # `worker_config`, `secondary_worker_config`, and `autoscaling_config`.
165
168
  class ClusterConfig
166
169
  include ::Google::Protobuf::MessageExts
167
170
  extend ::Google::Protobuf::MessageExts::ClassMethods
168
171
  end
169
172
 
170
- # The GKE config for this cluster.
171
- # @!attribute [rw] namespaced_gke_deployment_target
172
- # @return [::Google::Cloud::Dataproc::V1::GkeClusterConfig::NamespacedGkeDeploymentTarget]
173
- # Optional. A target for the deployment.
174
- class GkeClusterConfig
173
+ # Dataproc cluster config for a cluster that does not directly control the
174
+ # underlying compute resources, such as a [Dataproc-on-GKE
175
+ # cluster](https://cloud.google.com/dataproc/docs/concepts/jobs/dataproc-gke#create-a-dataproc-on-gke-cluster).
176
+ # @!attribute [rw] staging_bucket
177
+ # @return [::String]
178
+ # Optional. A Storage bucket used to stage job
179
+ # dependencies, config files, and job driver console output.
180
+ # If you do not specify a staging bucket, Cloud
181
+ # Dataproc will determine a Cloud Storage location (US,
182
+ # ASIA, or EU) for your cluster's staging bucket according to the
183
+ # Compute Engine zone where your cluster is deployed, and then create
184
+ # and manage this project-level, per-location bucket (see
185
+ # [Dataproc staging and temp
186
+ # buckets](https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/staging-bucket)).
187
+ # **This field requires a Cloud Storage bucket name, not a `gs://...` URI to
188
+ # a Cloud Storage bucket.**
189
+ # @!attribute [rw] temp_bucket
190
+ # @return [::String]
191
+ # Optional. A Cloud Storage bucket used to store ephemeral cluster and jobs data,
192
+ # such as Spark and MapReduce history files.
193
+ # If you do not specify a temp bucket,
194
+ # Dataproc will determine a Cloud Storage location (US,
195
+ # ASIA, or EU) for your cluster's temp bucket according to the
196
+ # Compute Engine zone where your cluster is deployed, and then create
197
+ # and manage this project-level, per-location bucket. The default bucket has
198
+ # a TTL of 90 days, but you can use any TTL (or none) if you specify a
199
+ # bucket (see
200
+ # [Dataproc staging and temp
201
+ # buckets](https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/staging-bucket)).
202
+ # **This field requires a Cloud Storage bucket name, not a `gs://...` URI to
203
+ # a Cloud Storage bucket.**
204
+ # @!attribute [rw] kubernetes_cluster_config
205
+ # @return [::Google::Cloud::Dataproc::V1::KubernetesClusterConfig]
206
+ # Required. The configuration for running the Dataproc cluster on Kubernetes.
207
+ # @!attribute [rw] auxiliary_services_config
208
+ # @return [::Google::Cloud::Dataproc::V1::AuxiliaryServicesConfig]
209
+ # Optional. Configuration of auxiliary services used by this cluster.
210
+ class VirtualClusterConfig
175
211
  include ::Google::Protobuf::MessageExts
176
212
  extend ::Google::Protobuf::MessageExts::ClassMethods
213
+ end
177
214
 
178
- # A full, namespace-isolated deployment target for an existing GKE cluster.
179
- # @!attribute [rw] target_gke_cluster
180
- # @return [::String]
181
- # Optional. The target GKE cluster to deploy to.
182
- # Format: 'projects/\\{project}/locations/\\{location}/clusters/\\{cluster_id}'
183
- # @!attribute [rw] cluster_namespace
184
- # @return [::String]
185
- # Optional. A namespace within the GKE cluster to deploy into.
186
- class NamespacedGkeDeploymentTarget
187
- include ::Google::Protobuf::MessageExts
188
- extend ::Google::Protobuf::MessageExts::ClassMethods
189
- end
215
+ # Auxiliary services configuration for a Cluster.
216
+ # @!attribute [rw] metastore_config
217
+ # @return [::Google::Cloud::Dataproc::V1::MetastoreConfig]
218
+ # Optional. The Hive Metastore configuration for this workload.
219
+ # @!attribute [rw] spark_history_server_config
220
+ # @return [::Google::Cloud::Dataproc::V1::SparkHistoryServerConfig]
221
+ # Optional. The Spark History Server configuration for the workload.
222
+ class AuxiliaryServicesConfig
223
+ include ::Google::Protobuf::MessageExts
224
+ extend ::Google::Protobuf::MessageExts::ClassMethods
190
225
  end
191
226
 
192
227
  # Endpoint config for this cluster
@@ -594,8 +629,8 @@ module Google
594
629
  # Optional. Interface type of local SSDs (default is "scsi").
595
630
  # Valid values: "scsi" (Small Computer System Interface),
596
631
  # "nvme" (Non-Volatile Memory Express).
597
- # See [SSD Interface
598
- # types](https://cloud.google.com/compute/docs/disks/local-ssd#performance).
632
+ # See [local SSD
633
+ # performance](https://cloud.google.com/compute/docs/disks/local-ssd#performance).
599
634
  class DiskConfig
600
635
  include ::Google::Protobuf::MessageExts
601
636
  extend ::Google::Protobuf::MessageExts::ClassMethods
@@ -648,6 +683,10 @@ module Google
648
683
  CREATING = 1
649
684
 
650
685
  # The cluster is currently running and healthy. It is ready for use.
686
+ #
687
+ # **Note:** The cluster state changes from "creating" to "running" status
688
+ # after the master node(s), first two primary worker nodes (and the last
689
+ # primary worker node if primary workers > 2) are running.
651
690
  RUNNING = 2
652
691
 
653
692
  # The cluster encountered an error. It is not ready for use.
@@ -135,6 +135,215 @@ module Google
135
135
  end
136
136
  end
137
137
 
138
+ # The cluster's GKE config.
139
+ # @!attribute [rw] gke_cluster_target
140
+ # @return [::String]
141
+ # Optional. A target GKE cluster to deploy to. It must be in the same project and
142
+ # region as the Dataproc cluster (the GKE cluster can be zonal or regional).
143
+ # Format: 'projects/\\{project}/locations/\\{location}/clusters/\\{cluster_id}'
144
+ # @!attribute [rw] node_pool_target
145
+ # @return [::Array<::Google::Cloud::Dataproc::V1::GkeNodePoolTarget>]
146
+ # Optional. GKE NodePools where workloads will be scheduled. At least one node pool
147
+ # must be assigned the 'default' role. Each role can be given to only a
148
+ # single NodePoolTarget. All NodePools must have the same location settings.
149
+ # If a nodePoolTarget is not specified, Dataproc constructs a default
150
+ # nodePoolTarget.
151
+ class GkeClusterConfig
152
+ include ::Google::Protobuf::MessageExts
153
+ extend ::Google::Protobuf::MessageExts::ClassMethods
154
+ end
155
+
156
+ # The configuration for running the Dataproc cluster on Kubernetes.
157
+ # @!attribute [rw] kubernetes_namespace
158
+ # @return [::String]
159
+ # Optional. A namespace within the Kubernetes cluster to deploy into. If this namespace
160
+ # does not exist, it is created. If it exists, Dataproc
161
+ # verifies that another Dataproc VirtualCluster is not installed
162
+ # into it. If not specified, the name of the Dataproc Cluster is used.
163
+ # @!attribute [rw] gke_cluster_config
164
+ # @return [::Google::Cloud::Dataproc::V1::GkeClusterConfig]
165
+ # Required. The configuration for running the Dataproc cluster on GKE.
166
+ # @!attribute [rw] kubernetes_software_config
167
+ # @return [::Google::Cloud::Dataproc::V1::KubernetesSoftwareConfig]
168
+ # Optional. The software configuration for this Dataproc cluster running on Kubernetes.
169
+ class KubernetesClusterConfig
170
+ include ::Google::Protobuf::MessageExts
171
+ extend ::Google::Protobuf::MessageExts::ClassMethods
172
+ end
173
+
174
+ # The software configuration for this Dataproc cluster running on Kubernetes.
175
+ # @!attribute [rw] component_version
176
+ # @return [::Google::Protobuf::Map{::String => ::String}]
177
+ # The components that should be installed in this Dataproc cluster. The key
178
+ # must be a string from the KubernetesComponent enumeration. The value is
179
+ # the version of the software to be installed.
180
+ # At least one entry must be specified.
181
+ # @!attribute [rw] properties
182
+ # @return [::Google::Protobuf::Map{::String => ::String}]
183
+ # The properties to set on daemon config files.
184
+ #
185
+ # Property keys are specified in `prefix:property` format, for example
186
+ # `spark:spark.kubernetes.container.image`. The following are supported
187
+ # prefixes and their mappings:
188
+ #
189
+ # * spark: `spark-defaults.conf`
190
+ #
191
+ # For more information, see [Cluster
192
+ # properties](https://cloud.google.com/dataproc/docs/concepts/cluster-properties).
193
+ class KubernetesSoftwareConfig
194
+ include ::Google::Protobuf::MessageExts
195
+ extend ::Google::Protobuf::MessageExts::ClassMethods
196
+
197
+ # @!attribute [rw] key
198
+ # @return [::String]
199
+ # @!attribute [rw] value
200
+ # @return [::String]
201
+ class ComponentVersionEntry
202
+ include ::Google::Protobuf::MessageExts
203
+ extend ::Google::Protobuf::MessageExts::ClassMethods
204
+ end
205
+
206
+ # @!attribute [rw] key
207
+ # @return [::String]
208
+ # @!attribute [rw] value
209
+ # @return [::String]
210
+ class PropertiesEntry
211
+ include ::Google::Protobuf::MessageExts
212
+ extend ::Google::Protobuf::MessageExts::ClassMethods
213
+ end
214
+ end
215
+
216
+ # GKE NodePools that Dataproc workloads run on.
217
+ # @!attribute [rw] node_pool
218
+ # @return [::String]
219
+ # Required. The target GKE NodePool.
220
+ # Format:
221
+ # 'projects/\\{project}/locations/\\{location}/clusters/\\{cluster}/nodePools/\\{node_pool}'
222
+ # @!attribute [rw] roles
223
+ # @return [::Array<::Google::Cloud::Dataproc::V1::GkeNodePoolTarget::Role>]
224
+ # Required. The types of role for a GKE NodePool
225
+ # @!attribute [rw] node_pool_config
226
+ # @return [::Google::Cloud::Dataproc::V1::GkeNodePoolConfig]
227
+ # Optional. The configuration for the GKE NodePool.
228
+ #
229
+ # If specified, Dataproc attempts to create a NodePool with the
230
+ # specified shape. If one with the same name already exists, it is
231
+ # verified against all specified fields. If a field differs, the
232
+ # virtual cluster creation will fail.
233
+ #
234
+ # If omitted, any NodePool with the specified name is used. If a
235
+ # NodePool with the specified name does not exist, Dataproc create a NodePool
236
+ # with default values.
237
+ class GkeNodePoolTarget
238
+ include ::Google::Protobuf::MessageExts
239
+ extend ::Google::Protobuf::MessageExts::ClassMethods
240
+
241
+ # `Role` specifies whose tasks will run on the NodePool. The roles can be
242
+ # specific to workloads. Exactly one GkeNodePoolTarget within the
243
+ # VirtualCluster must have 'default' role, which is used to run all workloads
244
+ # that are not associated with a NodePool.
245
+ module Role
246
+ # Role is unspecified.
247
+ ROLE_UNSPECIFIED = 0
248
+
249
+ # Any roles that are not directly assigned to a NodePool run on the
250
+ # `default` role's NodePool.
251
+ DEFAULT = 1
252
+
253
+ # Run controllers and webhooks.
254
+ CONTROLLER = 2
255
+
256
+ # Run spark driver.
257
+ SPARK_DRIVER = 3
258
+
259
+ # Run spark executors.
260
+ SPARK_EXECUTOR = 4
261
+ end
262
+ end
263
+
264
+ # The configuration of a GKE NodePool used by a [Dataproc-on-GKE
265
+ # cluster](https://cloud.google.com/dataproc/docs/concepts/jobs/dataproc-gke#create-a-dataproc-on-gke-cluster).
266
+ # @!attribute [rw] config
267
+ # @return [::Google::Cloud::Dataproc::V1::GkeNodePoolConfig::GkeNodeConfig]
268
+ # Optional. The node pool configuration.
269
+ # @!attribute [rw] locations
270
+ # @return [::Array<::String>]
271
+ # Optional. The list of Compute Engine
272
+ # [zones](https://cloud.google.com/compute/docs/zones#available) where
273
+ # NodePool's nodes will be located.
274
+ #
275
+ # **Note:** Currently, only one zone may be specified.
276
+ #
277
+ # If a location is not specified during NodePool creation, Dataproc will
278
+ # choose a location.
279
+ # @!attribute [rw] autoscaling
280
+ # @return [::Google::Cloud::Dataproc::V1::GkeNodePoolConfig::GkeNodePoolAutoscalingConfig]
281
+ # Optional. The autoscaler configuration for this NodePool. The autoscaler is enabled
282
+ # only when a valid configuration is present.
283
+ class GkeNodePoolConfig
284
+ include ::Google::Protobuf::MessageExts
285
+ extend ::Google::Protobuf::MessageExts::ClassMethods
286
+
287
+ # Parameters that describe cluster nodes.
288
+ # @!attribute [rw] machine_type
289
+ # @return [::String]
290
+ # Optional. The name of a Compute Engine [machine
291
+ # type](https://cloud.google.com/compute/docs/machine-types).
292
+ # @!attribute [rw] preemptible
293
+ # @return [::Boolean]
294
+ # Optional. Whether the nodes are created as [preemptible VM
295
+ # instances](https://cloud.google.com/compute/docs/instances/preemptible).
296
+ # @!attribute [rw] local_ssd_count
297
+ # @return [::Integer]
298
+ # Optional. The number of local SSD disks to attach to the node, which is limited by
299
+ # the maximum number of disks allowable per zone (see [Adding Local
300
+ # SSDs](https://cloud.google.com/compute/docs/disks/local-ssd)).
301
+ # @!attribute [rw] accelerators
302
+ # @return [::Array<::Google::Cloud::Dataproc::V1::GkeNodePoolConfig::GkeNodePoolAcceleratorConfig>]
303
+ # Optional. A list of [hardware
304
+ # accelerators](https://cloud.google.com/compute/docs/gpus) to attach to
305
+ # each node.
306
+ # @!attribute [rw] min_cpu_platform
307
+ # @return [::String]
308
+ # Optional. [Minimum CPU
309
+ # platform](https://cloud.google.com/compute/docs/instances/specify-min-cpu-platform)
310
+ # to be used by this instance. The instance may be scheduled on the
311
+ # specified or a newer CPU platform. Specify the friendly names of CPU
312
+ # platforms, such as "Intel Haswell"` or Intel Sandy Bridge".
313
+ class GkeNodeConfig
314
+ include ::Google::Protobuf::MessageExts
315
+ extend ::Google::Protobuf::MessageExts::ClassMethods
316
+ end
317
+
318
+ # A GkeNodeConfigAcceleratorConfig represents a Hardware Accelerator request
319
+ # for a NodePool.
320
+ # @!attribute [rw] accelerator_count
321
+ # @return [::Integer]
322
+ # The number of accelerator cards exposed to an instance.
323
+ # @!attribute [rw] accelerator_type
324
+ # @return [::String]
325
+ # The accelerator type resource namename (see GPUs on Compute Engine).
326
+ class GkeNodePoolAcceleratorConfig
327
+ include ::Google::Protobuf::MessageExts
328
+ extend ::Google::Protobuf::MessageExts::ClassMethods
329
+ end
330
+
331
+ # GkeNodePoolAutoscaling contains information the cluster autoscaler needs to
332
+ # adjust the size of the node pool to the current cluster usage.
333
+ # @!attribute [rw] min_node_count
334
+ # @return [::Integer]
335
+ # The minimum number of nodes in the NodePool. Must be >= 0 and <=
336
+ # max_node_count.
337
+ # @!attribute [rw] max_node_count
338
+ # @return [::Integer]
339
+ # The maximum number of nodes in the NodePool. Must be >= min_node_count.
340
+ # **Note:** Quota must be sufficient to scale up the cluster.
341
+ class GkeNodePoolAutoscalingConfig
342
+ include ::Google::Protobuf::MessageExts
343
+ extend ::Google::Protobuf::MessageExts::ClassMethods
344
+ end
345
+ end
346
+
138
347
  # Cluster components that can be activated.
139
348
  module Component
140
349
  # Unspecified component. Specifying this will cause Cluster creation to fail.
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: google-cloud-dataproc-v1
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.8.0
4
+ version: 0.9.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Google LLC
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2022-01-11 00:00:00.000000000 Z
11
+ date: 2022-02-17 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: gapic-common
@@ -243,7 +243,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
243
243
  - !ruby/object:Gem::Version
244
244
  version: '0'
245
245
  requirements: []
246
- rubygems_version: 3.3.4
246
+ rubygems_version: 3.3.5
247
247
  signing_key:
248
248
  specification_version: 4
249
249
  summary: API Client library for the Cloud Dataproc V1 API