google-cloud-dataproc-v1 0.2.1 → 0.3.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: d57b680ce9132d46310f96ebd8a911c9717f23dfd7af248244e523d42edabeaa
4
- data.tar.gz: e9ec515cff136dfdcbeb8d134d1c8495b1b77eed4e7a3634e88b40039e2e489b
3
+ metadata.gz: 0567d2842abcad9b95fc3eeb90344f036b91853d3f340627573a0e92c24115c3
4
+ data.tar.gz: fc01da53cb50094ffdf26036b7c39e7ed8801755a7093cd1ee6c09c988c6a214
5
5
  SHA512:
6
- metadata.gz: 670363dfd41123ca6c05a757c50073b6c7762f5a178eefaeeffc32333c6141b48ffdae8dba88bdc97b65499269b53c0c09e886ca44048285558f19f12506f9ac
7
- data.tar.gz: 1cc4aee4dc286a1a0c03bbddf4879edd857d9b7a52b077ac0c3bcd78420dfeeccbf45b9b498e7c5bf242e145b4c221ccc9796f7434fc326e6d93a25f59ce531d
6
+ metadata.gz: d7cfd12eef38acea43bdfa13142bb7aeda91d123a45dbfdda445961b4ce9f3048436e23ceff0da5ce3523da44108ff443bd5135ed247eb0a72ef23d9bf1af4f1
7
+ data.tar.gz: 59a089563ef1ef3ab1e8f511cb9116fdf62a14ba9ac2c8461626c89266ed5fad1c929d415d6bf9e21a4c387592d23f06b430696f967e90b71bcf229ab65fe0cd
data/README.md CHANGED
@@ -18,6 +18,7 @@ In order to use this library, you first need to go through the following steps:
18
18
 
19
19
  1. [Select or create a Cloud Platform project.](https://console.cloud.google.com/project)
20
20
  1. [Enable billing for your project.](https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project)
21
+ 1. [Enable the API.](https://console.cloud.google.com/apis/library/dataproc.googleapis.com)
21
22
  1. {file:AUTHENTICATION.md Set up authentication.}
22
23
 
23
24
  ## Quick Start
@@ -33,6 +34,9 @@ response = client.create_autoscaling_policy request
33
34
  View the [Client Library Documentation](https://googleapis.dev/ruby/google-cloud-dataproc-v1/latest)
34
35
  for class and method documentation.
35
36
 
37
+ See also the [Product Documentation](https://cloud.google.com/dataproc)
38
+ for general usage information.
39
+
36
40
  ## Enabling Logging
37
41
 
38
42
  To enable logging for this library, set the logger for the underlying [gRPC](https://github.com/grpc/grpc/tree/master/src/ruby) library.
@@ -36,3 +36,6 @@ module Google
36
36
  end
37
37
  end
38
38
  end
39
+
40
+ helper_path = ::File.join __dir__, "v1", "_helpers.rb"
41
+ require "google/cloud/dataproc/v1/_helpers" if ::File.file? helper_path
@@ -35,19 +35,19 @@ module Google
35
35
  self.service_name = 'google.cloud.dataproc.v1.AutoscalingPolicyService'
36
36
 
37
37
  # Creates new autoscaling policy.
38
- rpc :CreateAutoscalingPolicy, CreateAutoscalingPolicyRequest, AutoscalingPolicy
38
+ rpc :CreateAutoscalingPolicy, ::Google::Cloud::Dataproc::V1::CreateAutoscalingPolicyRequest, ::Google::Cloud::Dataproc::V1::AutoscalingPolicy
39
39
  # Updates (replaces) autoscaling policy.
40
40
  #
41
41
  # Disabled check for update_mask, because all updates will be full
42
42
  # replacements.
43
- rpc :UpdateAutoscalingPolicy, UpdateAutoscalingPolicyRequest, AutoscalingPolicy
43
+ rpc :UpdateAutoscalingPolicy, ::Google::Cloud::Dataproc::V1::UpdateAutoscalingPolicyRequest, ::Google::Cloud::Dataproc::V1::AutoscalingPolicy
44
44
  # Retrieves autoscaling policy.
45
- rpc :GetAutoscalingPolicy, GetAutoscalingPolicyRequest, AutoscalingPolicy
45
+ rpc :GetAutoscalingPolicy, ::Google::Cloud::Dataproc::V1::GetAutoscalingPolicyRequest, ::Google::Cloud::Dataproc::V1::AutoscalingPolicy
46
46
  # Lists autoscaling policies in the project.
47
- rpc :ListAutoscalingPolicies, ListAutoscalingPoliciesRequest, ListAutoscalingPoliciesResponse
47
+ rpc :ListAutoscalingPolicies, ::Google::Cloud::Dataproc::V1::ListAutoscalingPoliciesRequest, ::Google::Cloud::Dataproc::V1::ListAutoscalingPoliciesResponse
48
48
  # Deletes an autoscaling policy. It is an error to delete an autoscaling
49
49
  # policy that is in use by one or more clusters.
50
- rpc :DeleteAutoscalingPolicy, DeleteAutoscalingPolicyRequest, Google::Protobuf::Empty
50
+ rpc :DeleteAutoscalingPolicy, ::Google::Cloud::Dataproc::V1::DeleteAutoscalingPolicyRequest, ::Google::Protobuf::Empty
51
51
  end
52
52
 
53
53
  Stub = Service.rpc_stub_class
@@ -73,7 +73,7 @@ module Google
73
73
  initial_delay: 0.1,
74
74
  max_delay: 60.0,
75
75
  multiplier: 1.3,
76
- retry_codes: ["DEADLINE_EXCEEDED", "UNAVAILABLE"]
76
+ retry_codes: [4, 14]
77
77
  }
78
78
 
79
79
  default_config.rpcs.get_autoscaling_policy.timeout = 600.0
@@ -81,7 +81,7 @@ module Google
81
81
  initial_delay: 0.1,
82
82
  max_delay: 60.0,
83
83
  multiplier: 1.3,
84
- retry_codes: ["DEADLINE_EXCEEDED", "UNAVAILABLE"]
84
+ retry_codes: [4, 14]
85
85
  }
86
86
 
87
87
  default_config.rpcs.list_autoscaling_policies.timeout = 600.0
@@ -89,7 +89,7 @@ module Google
89
89
  initial_delay: 0.1,
90
90
  max_delay: 60.0,
91
91
  multiplier: 1.3,
92
- retry_codes: ["DEADLINE_EXCEEDED", "UNAVAILABLE"]
92
+ retry_codes: [4, 14]
93
93
  }
94
94
 
95
95
  default_config.rpcs.delete_autoscaling_policy.timeout = 600.0
@@ -637,7 +637,7 @@ module Google
637
637
 
638
638
  config_attr :endpoint, "dataproc.googleapis.com", ::String
639
639
  config_attr :credentials, nil do |value|
640
- allowed = [::String, ::Hash, ::Proc, ::Google::Auth::Credentials, ::Signet::OAuth2::Client, nil]
640
+ allowed = [::String, ::Hash, ::Proc, ::Symbol, ::Google::Auth::Credentials, ::Signet::OAuth2::Client, nil]
641
641
  allowed += [::GRPC::Core::Channel, ::GRPC::Core::ChannelCredentials] if defined? ::GRPC
642
642
  allowed.any? { |klass| klass === value }
643
643
  end
@@ -665,7 +665,7 @@ module Google
665
665
  def rpcs
666
666
  @rpcs ||= begin
667
667
  parent_rpcs = nil
668
- parent_rpcs = @parent_config.rpcs if @parent_config&.respond_to? :rpcs
668
+ parent_rpcs = @parent_config.rpcs if defined?(@parent_config) && @parent_config&.respond_to?(:rpcs)
669
669
  Rpcs.new parent_rpcs
670
670
  end
671
671
  end
@@ -677,7 +677,7 @@ module Google
677
677
  # Each configuration object is of type `Gapic::Config::Method` and includes
678
678
  # the following configuration fields:
679
679
  #
680
- # * `timeout` (*type:* `Numeric`) - The call timeout in milliseconds
680
+ # * `timeout` (*type:* `Numeric`) - The call timeout in seconds
681
681
  # * `metadata` (*type:* `Hash{Symbol=>String}`) - Additional gRPC headers
682
682
  # * `retry_policy (*type:* `Hash`) - The retry policy. The policy fields
683
683
  # include the following keys:
@@ -69,7 +69,7 @@ module Google
69
69
  initial_delay: 0.1,
70
70
  max_delay: 60.0,
71
71
  multiplier: 1.3,
72
- retry_codes: ["UNAVAILABLE"]
72
+ retry_codes: [14]
73
73
  }
74
74
 
75
75
  default_config.rpcs.update_cluster.timeout = 300.0
@@ -77,7 +77,7 @@ module Google
77
77
  initial_delay: 0.1,
78
78
  max_delay: 60.0,
79
79
  multiplier: 1.3,
80
- retry_codes: ["UNAVAILABLE"]
80
+ retry_codes: [14]
81
81
  }
82
82
 
83
83
  default_config.rpcs.delete_cluster.timeout = 300.0
@@ -85,7 +85,7 @@ module Google
85
85
  initial_delay: 0.1,
86
86
  max_delay: 60.0,
87
87
  multiplier: 1.3,
88
- retry_codes: ["UNAVAILABLE"]
88
+ retry_codes: [14]
89
89
  }
90
90
 
91
91
  default_config.rpcs.get_cluster.timeout = 300.0
@@ -93,7 +93,7 @@ module Google
93
93
  initial_delay: 0.1,
94
94
  max_delay: 60.0,
95
95
  multiplier: 1.3,
96
- retry_codes: ["INTERNAL", "DEADLINE_EXCEEDED", "UNAVAILABLE"]
96
+ retry_codes: [13, 4, 14]
97
97
  }
98
98
 
99
99
  default_config.rpcs.list_clusters.timeout = 300.0
@@ -101,7 +101,7 @@ module Google
101
101
  initial_delay: 0.1,
102
102
  max_delay: 60.0,
103
103
  multiplier: 1.3,
104
- retry_codes: ["INTERNAL", "DEADLINE_EXCEEDED", "UNAVAILABLE"]
104
+ retry_codes: [13, 4, 14]
105
105
  }
106
106
 
107
107
  default_config.rpcs.diagnose_cluster.timeout = 300.0
@@ -109,7 +109,7 @@ module Google
109
109
  initial_delay: 0.1,
110
110
  max_delay: 60.0,
111
111
  multiplier: 1.3,
112
- retry_codes: ["UNAVAILABLE"]
112
+ retry_codes: [14]
113
113
  }
114
114
 
115
115
  default_config
@@ -866,7 +866,7 @@ module Google
866
866
 
867
867
  config_attr :endpoint, "dataproc.googleapis.com", ::String
868
868
  config_attr :credentials, nil do |value|
869
- allowed = [::String, ::Hash, ::Proc, ::Google::Auth::Credentials, ::Signet::OAuth2::Client, nil]
869
+ allowed = [::String, ::Hash, ::Proc, ::Symbol, ::Google::Auth::Credentials, ::Signet::OAuth2::Client, nil]
870
870
  allowed += [::GRPC::Core::Channel, ::GRPC::Core::ChannelCredentials] if defined? ::GRPC
871
871
  allowed.any? { |klass| klass === value }
872
872
  end
@@ -894,7 +894,7 @@ module Google
894
894
  def rpcs
895
895
  @rpcs ||= begin
896
896
  parent_rpcs = nil
897
- parent_rpcs = @parent_config.rpcs if @parent_config&.respond_to? :rpcs
897
+ parent_rpcs = @parent_config.rpcs if defined?(@parent_config) && @parent_config&.respond_to?(:rpcs)
898
898
  Rpcs.new parent_rpcs
899
899
  end
900
900
  end
@@ -906,7 +906,7 @@ module Google
906
906
  # Each configuration object is of type `Gapic::Config::Method` and includes
907
907
  # the following configuration fields:
908
908
  #
909
- # * `timeout` (*type:* `Numeric`) - The call timeout in milliseconds
909
+ # * `timeout` (*type:* `Numeric`) - The call timeout in seconds
910
910
  # * `metadata` (*type:* `Hash{Symbol=>String}`) - Additional gRPC headers
911
911
  # * `retry_policy (*type:* `Hash`) - The retry policy. The policy fields
912
912
  # include the following keys:
@@ -475,7 +475,7 @@ module Google
475
475
 
476
476
  config_attr :endpoint, "dataproc.googleapis.com", ::String
477
477
  config_attr :credentials, nil do |value|
478
- allowed = [::String, ::Hash, ::Proc, ::Google::Auth::Credentials, ::Signet::OAuth2::Client, nil]
478
+ allowed = [::String, ::Hash, ::Proc, ::Symbol, ::Google::Auth::Credentials, ::Signet::OAuth2::Client, nil]
479
479
  allowed += [::GRPC::Core::Channel, ::GRPC::Core::ChannelCredentials] if defined? ::GRPC
480
480
  allowed.any? { |klass| klass === value }
481
481
  end
@@ -503,7 +503,7 @@ module Google
503
503
  def rpcs
504
504
  @rpcs ||= begin
505
505
  parent_rpcs = nil
506
- parent_rpcs = @parent_config.rpcs if @parent_config&.respond_to? :rpcs
506
+ parent_rpcs = @parent_config.rpcs if defined?(@parent_config) && @parent_config&.respond_to?(:rpcs)
507
507
  Rpcs.new parent_rpcs
508
508
  end
509
509
  end
@@ -515,7 +515,7 @@ module Google
515
515
  # Each configuration object is of type `Gapic::Config::Method` and includes
516
516
  # the following configuration fields:
517
517
  #
518
- # * `timeout` (*type:* `Numeric`) - The call timeout in milliseconds
518
+ # * `timeout` (*type:* `Numeric`) - The call timeout in seconds
519
519
  # * `metadata` (*type:* `Hash{Symbol=>String}`) - Additional gRPC headers
520
520
  # * `retry_policy (*type:* `Hash`) - The retry policy. The policy fields
521
521
  # include the following keys:
@@ -25,6 +25,7 @@ Google::Protobuf::DescriptorPool.generated_pool.build do
25
25
  end
26
26
  add_message "google.cloud.dataproc.v1.ClusterConfig" do
27
27
  optional :config_bucket, :string, 1
28
+ optional :temp_bucket, :string, 2
28
29
  optional :gce_cluster_config, :message, 8, "google.cloud.dataproc.v1.GceClusterConfig"
29
30
  optional :master_config, :message, 9, "google.cloud.dataproc.v1.InstanceGroupConfig"
30
31
  optional :worker_config, :message, 10, "google.cloud.dataproc.v1.InstanceGroupConfig"
@@ -35,6 +36,11 @@ Google::Protobuf::DescriptorPool.generated_pool.build do
35
36
  optional :autoscaling_config, :message, 18, "google.cloud.dataproc.v1.AutoscalingConfig"
36
37
  optional :security_config, :message, 16, "google.cloud.dataproc.v1.SecurityConfig"
37
38
  optional :lifecycle_config, :message, 17, "google.cloud.dataproc.v1.LifecycleConfig"
39
+ optional :endpoint_config, :message, 19, "google.cloud.dataproc.v1.EndpointConfig"
40
+ end
41
+ add_message "google.cloud.dataproc.v1.EndpointConfig" do
42
+ map :http_ports, :string, :string, 1
43
+ optional :enable_http_port_access, :bool, 2
38
44
  end
39
45
  add_message "google.cloud.dataproc.v1.AutoscalingConfig" do
40
46
  optional :policy_uri, :string, 1
@@ -60,10 +66,16 @@ Google::Protobuf::DescriptorPool.generated_pool.build do
60
66
  optional :machine_type_uri, :string, 4
61
67
  optional :disk_config, :message, 5, "google.cloud.dataproc.v1.DiskConfig"
62
68
  optional :is_preemptible, :bool, 6
69
+ optional :preemptibility, :enum, 10, "google.cloud.dataproc.v1.InstanceGroupConfig.Preemptibility"
63
70
  optional :managed_group_config, :message, 7, "google.cloud.dataproc.v1.ManagedGroupConfig"
64
71
  repeated :accelerators, :message, 8, "google.cloud.dataproc.v1.AcceleratorConfig"
65
72
  optional :min_cpu_platform, :string, 9
66
73
  end
74
+ add_enum "google.cloud.dataproc.v1.InstanceGroupConfig.Preemptibility" do
75
+ value :PREEMPTIBILITY_UNSPECIFIED, 0
76
+ value :NON_PREEMPTIBLE, 1
77
+ value :PREEMPTIBLE, 2
78
+ end
67
79
  add_message "google.cloud.dataproc.v1.ManagedGroupConfig" do
68
80
  optional :instance_template_name, :string, 1
69
81
  optional :instance_group_manager_name, :string, 2
@@ -203,10 +215,12 @@ module Google
203
215
  module V1
204
216
  Cluster = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.Cluster").msgclass
205
217
  ClusterConfig = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.ClusterConfig").msgclass
218
+ EndpointConfig = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.EndpointConfig").msgclass
206
219
  AutoscalingConfig = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.AutoscalingConfig").msgclass
207
220
  EncryptionConfig = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.EncryptionConfig").msgclass
208
221
  GceClusterConfig = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.GceClusterConfig").msgclass
209
222
  InstanceGroupConfig = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.InstanceGroupConfig").msgclass
223
+ InstanceGroupConfig::Preemptibility = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.InstanceGroupConfig.Preemptibility").enummodule
210
224
  ManagedGroupConfig = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.ManagedGroupConfig").msgclass
211
225
  AcceleratorConfig = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.AcceleratorConfig").msgclass
212
226
  DiskConfig = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataproc.v1.DiskConfig").msgclass
@@ -37,19 +37,19 @@ module Google
37
37
  # Creates a cluster in a project. The returned
38
38
  # [Operation.metadata][google.longrunning.Operation.metadata] will be
39
39
  # [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata).
40
- rpc :CreateCluster, CreateClusterRequest, Google::Longrunning::Operation
40
+ rpc :CreateCluster, ::Google::Cloud::Dataproc::V1::CreateClusterRequest, ::Google::Longrunning::Operation
41
41
  # Updates a cluster in a project. The returned
42
42
  # [Operation.metadata][google.longrunning.Operation.metadata] will be
43
43
  # [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata).
44
- rpc :UpdateCluster, UpdateClusterRequest, Google::Longrunning::Operation
44
+ rpc :UpdateCluster, ::Google::Cloud::Dataproc::V1::UpdateClusterRequest, ::Google::Longrunning::Operation
45
45
  # Deletes a cluster in a project. The returned
46
46
  # [Operation.metadata][google.longrunning.Operation.metadata] will be
47
47
  # [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata).
48
- rpc :DeleteCluster, DeleteClusterRequest, Google::Longrunning::Operation
48
+ rpc :DeleteCluster, ::Google::Cloud::Dataproc::V1::DeleteClusterRequest, ::Google::Longrunning::Operation
49
49
  # Gets the resource representation for a cluster in a project.
50
- rpc :GetCluster, GetClusterRequest, Cluster
50
+ rpc :GetCluster, ::Google::Cloud::Dataproc::V1::GetClusterRequest, ::Google::Cloud::Dataproc::V1::Cluster
51
51
  # Lists all regions/{region}/clusters in a project alphabetically.
52
- rpc :ListClusters, ListClustersRequest, ListClustersResponse
52
+ rpc :ListClusters, ::Google::Cloud::Dataproc::V1::ListClustersRequest, ::Google::Cloud::Dataproc::V1::ListClustersResponse
53
53
  # Gets cluster diagnostic information. The returned
54
54
  # [Operation.metadata][google.longrunning.Operation.metadata] will be
55
55
  # [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata).
@@ -57,7 +57,7 @@ module Google
57
57
  # [Operation.response][google.longrunning.Operation.response]
58
58
  # contains
59
59
  # [DiagnoseClusterResults](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#diagnoseclusterresults).
60
- rpc :DiagnoseCluster, DiagnoseClusterRequest, Google::Longrunning::Operation
60
+ rpc :DiagnoseCluster, ::Google::Cloud::Dataproc::V1::DiagnoseClusterRequest, ::Google::Longrunning::Operation
61
61
  end
62
62
 
63
63
  Stub = Service.rpc_stub_class
@@ -68,7 +68,7 @@ module Google
68
68
  initial_delay: 0.1,
69
69
  max_delay: 60.0,
70
70
  multiplier: 1.3,
71
- retry_codes: ["UNAVAILABLE"]
71
+ retry_codes: [14]
72
72
  }
73
73
 
74
74
  default_config.rpcs.submit_job_as_operation.timeout = 900.0
@@ -76,7 +76,7 @@ module Google
76
76
  initial_delay: 0.1,
77
77
  max_delay: 60.0,
78
78
  multiplier: 1.3,
79
- retry_codes: ["UNAVAILABLE"]
79
+ retry_codes: [14]
80
80
  }
81
81
 
82
82
  default_config.rpcs.get_job.timeout = 900.0
@@ -84,7 +84,7 @@ module Google
84
84
  initial_delay: 0.1,
85
85
  max_delay: 60.0,
86
86
  multiplier: 1.3,
87
- retry_codes: ["DEADLINE_EXCEEDED", "INTERNAL", "UNAVAILABLE"]
87
+ retry_codes: [4, 13, 14]
88
88
  }
89
89
 
90
90
  default_config.rpcs.list_jobs.timeout = 900.0
@@ -92,7 +92,7 @@ module Google
92
92
  initial_delay: 0.1,
93
93
  max_delay: 60.0,
94
94
  multiplier: 1.3,
95
- retry_codes: ["DEADLINE_EXCEEDED", "INTERNAL", "UNAVAILABLE"]
95
+ retry_codes: [4, 13, 14]
96
96
  }
97
97
 
98
98
  default_config.rpcs.update_job.timeout = 900.0
@@ -100,7 +100,7 @@ module Google
100
100
  initial_delay: 0.1,
101
101
  max_delay: 60.0,
102
102
  multiplier: 1.3,
103
- retry_codes: ["UNAVAILABLE"]
103
+ retry_codes: [14]
104
104
  }
105
105
 
106
106
  default_config.rpcs.cancel_job.timeout = 900.0
@@ -108,7 +108,7 @@ module Google
108
108
  initial_delay: 0.1,
109
109
  max_delay: 60.0,
110
110
  multiplier: 1.3,
111
- retry_codes: ["DEADLINE_EXCEEDED", "INTERNAL", "UNAVAILABLE"]
111
+ retry_codes: [4, 13, 14]
112
112
  }
113
113
 
114
114
  default_config.rpcs.delete_job.timeout = 900.0
@@ -116,7 +116,7 @@ module Google
116
116
  initial_delay: 0.1,
117
117
  max_delay: 60.0,
118
118
  multiplier: 1.3,
119
- retry_codes: ["UNAVAILABLE"]
119
+ retry_codes: [14]
120
120
  }
121
121
 
122
122
  default_config
@@ -869,7 +869,7 @@ module Google
869
869
 
870
870
  config_attr :endpoint, "dataproc.googleapis.com", ::String
871
871
  config_attr :credentials, nil do |value|
872
- allowed = [::String, ::Hash, ::Proc, ::Google::Auth::Credentials, ::Signet::OAuth2::Client, nil]
872
+ allowed = [::String, ::Hash, ::Proc, ::Symbol, ::Google::Auth::Credentials, ::Signet::OAuth2::Client, nil]
873
873
  allowed += [::GRPC::Core::Channel, ::GRPC::Core::ChannelCredentials] if defined? ::GRPC
874
874
  allowed.any? { |klass| klass === value }
875
875
  end
@@ -897,7 +897,7 @@ module Google
897
897
  def rpcs
898
898
  @rpcs ||= begin
899
899
  parent_rpcs = nil
900
- parent_rpcs = @parent_config.rpcs if @parent_config&.respond_to? :rpcs
900
+ parent_rpcs = @parent_config.rpcs if defined?(@parent_config) && @parent_config&.respond_to?(:rpcs)
901
901
  Rpcs.new parent_rpcs
902
902
  end
903
903
  end
@@ -909,7 +909,7 @@ module Google
909
909
  # Each configuration object is of type `Gapic::Config::Method` and includes
910
910
  # the following configuration fields:
911
911
  #
912
- # * `timeout` (*type:* `Numeric`) - The call timeout in milliseconds
912
+ # * `timeout` (*type:* `Numeric`) - The call timeout in seconds
913
913
  # * `metadata` (*type:* `Hash{Symbol=>String}`) - Additional gRPC headers
914
914
  # * `retry_policy (*type:* `Hash`) - The retry policy. The policy fields
915
915
  # include the following keys:
@@ -475,7 +475,7 @@ module Google
475
475
 
476
476
  config_attr :endpoint, "dataproc.googleapis.com", ::String
477
477
  config_attr :credentials, nil do |value|
478
- allowed = [::String, ::Hash, ::Proc, ::Google::Auth::Credentials, ::Signet::OAuth2::Client, nil]
478
+ allowed = [::String, ::Hash, ::Proc, ::Symbol, ::Google::Auth::Credentials, ::Signet::OAuth2::Client, nil]
479
479
  allowed += [::GRPC::Core::Channel, ::GRPC::Core::ChannelCredentials] if defined? ::GRPC
480
480
  allowed.any? { |klass| klass === value }
481
481
  end
@@ -503,7 +503,7 @@ module Google
503
503
  def rpcs
504
504
  @rpcs ||= begin
505
505
  parent_rpcs = nil
506
- parent_rpcs = @parent_config.rpcs if @parent_config&.respond_to? :rpcs
506
+ parent_rpcs = @parent_config.rpcs if defined?(@parent_config) && @parent_config&.respond_to?(:rpcs)
507
507
  Rpcs.new parent_rpcs
508
508
  end
509
509
  end
@@ -515,7 +515,7 @@ module Google
515
515
  # Each configuration object is of type `Gapic::Config::Method` and includes
516
516
  # the following configuration fields:
517
517
  #
518
- # * `timeout` (*type:* `Numeric`) - The call timeout in milliseconds
518
+ # * `timeout` (*type:* `Numeric`) - The call timeout in seconds
519
519
  # * `metadata` (*type:* `Hash{Symbol=>String}`) - Additional gRPC headers
520
520
  # * `retry_policy (*type:* `Hash`) - The retry policy. The policy fields
521
521
  # include the following keys:
@@ -34,24 +34,24 @@ module Google
34
34
  self.service_name = 'google.cloud.dataproc.v1.JobController'
35
35
 
36
36
  # Submits a job to a cluster.
37
- rpc :SubmitJob, SubmitJobRequest, Job
37
+ rpc :SubmitJob, ::Google::Cloud::Dataproc::V1::SubmitJobRequest, ::Google::Cloud::Dataproc::V1::Job
38
38
  # Submits job to a cluster.
39
- rpc :SubmitJobAsOperation, SubmitJobRequest, Google::Longrunning::Operation
39
+ rpc :SubmitJobAsOperation, ::Google::Cloud::Dataproc::V1::SubmitJobRequest, ::Google::Longrunning::Operation
40
40
  # Gets the resource representation for a job in a project.
41
- rpc :GetJob, GetJobRequest, Job
41
+ rpc :GetJob, ::Google::Cloud::Dataproc::V1::GetJobRequest, ::Google::Cloud::Dataproc::V1::Job
42
42
  # Lists regions/{region}/jobs in a project.
43
- rpc :ListJobs, ListJobsRequest, ListJobsResponse
43
+ rpc :ListJobs, ::Google::Cloud::Dataproc::V1::ListJobsRequest, ::Google::Cloud::Dataproc::V1::ListJobsResponse
44
44
  # Updates a job in a project.
45
- rpc :UpdateJob, UpdateJobRequest, Job
45
+ rpc :UpdateJob, ::Google::Cloud::Dataproc::V1::UpdateJobRequest, ::Google::Cloud::Dataproc::V1::Job
46
46
  # Starts a job cancellation request. To access the job resource
47
47
  # after cancellation, call
48
48
  # [regions/{region}/jobs.list](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs/list)
49
49
  # or
50
50
  # [regions/{region}/jobs.get](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs/get).
51
- rpc :CancelJob, CancelJobRequest, Job
51
+ rpc :CancelJob, ::Google::Cloud::Dataproc::V1::CancelJobRequest, ::Google::Cloud::Dataproc::V1::Job
52
52
  # Deletes the job from the project. If the job is active, the delete fails,
53
53
  # and the response returns `FAILED_PRECONDITION`.
54
- rpc :DeleteJob, DeleteJobRequest, Google::Protobuf::Empty
54
+ rpc :DeleteJob, ::Google::Cloud::Dataproc::V1::DeleteJobRequest, ::Google::Protobuf::Empty
55
55
  end
56
56
 
57
57
  Stub = Service.rpc_stub_class
@@ -21,7 +21,7 @@ module Google
21
21
  module Cloud
22
22
  module Dataproc
23
23
  module V1
24
- VERSION = "0.2.1"
24
+ VERSION = "0.3.2"
25
25
  end
26
26
  end
27
27
  end
@@ -71,7 +71,7 @@ module Google
71
71
  initial_delay: 0.1,
72
72
  max_delay: 60.0,
73
73
  multiplier: 1.3,
74
- retry_codes: ["UNAVAILABLE"]
74
+ retry_codes: [14]
75
75
  }
76
76
 
77
77
  default_config.rpcs.get_workflow_template.timeout = 600.0
@@ -79,7 +79,7 @@ module Google
79
79
  initial_delay: 0.1,
80
80
  max_delay: 60.0,
81
81
  multiplier: 1.3,
82
- retry_codes: ["DEADLINE_EXCEEDED", "INTERNAL", "UNAVAILABLE"]
82
+ retry_codes: [4, 13, 14]
83
83
  }
84
84
 
85
85
  default_config.rpcs.instantiate_workflow_template.timeout = 600.0
@@ -87,7 +87,7 @@ module Google
87
87
  initial_delay: 0.1,
88
88
  max_delay: 60.0,
89
89
  multiplier: 1.3,
90
- retry_codes: ["UNAVAILABLE"]
90
+ retry_codes: [14]
91
91
  }
92
92
 
93
93
  default_config.rpcs.instantiate_inline_workflow_template.timeout = 600.0
@@ -95,7 +95,7 @@ module Google
95
95
  initial_delay: 0.1,
96
96
  max_delay: 60.0,
97
97
  multiplier: 1.3,
98
- retry_codes: ["UNAVAILABLE"]
98
+ retry_codes: [14]
99
99
  }
100
100
 
101
101
  default_config.rpcs.update_workflow_template.timeout = 600.0
@@ -103,7 +103,7 @@ module Google
103
103
  initial_delay: 0.1,
104
104
  max_delay: 60.0,
105
105
  multiplier: 1.3,
106
- retry_codes: ["UNAVAILABLE"]
106
+ retry_codes: [14]
107
107
  }
108
108
 
109
109
  default_config.rpcs.list_workflow_templates.timeout = 600.0
@@ -111,7 +111,7 @@ module Google
111
111
  initial_delay: 0.1,
112
112
  max_delay: 60.0,
113
113
  multiplier: 1.3,
114
- retry_codes: ["DEADLINE_EXCEEDED", "INTERNAL", "UNAVAILABLE"]
114
+ retry_codes: [4, 13, 14]
115
115
  }
116
116
 
117
117
  default_config.rpcs.delete_workflow_template.timeout = 600.0
@@ -119,7 +119,7 @@ module Google
119
119
  initial_delay: 0.1,
120
120
  max_delay: 60.0,
121
121
  multiplier: 1.3,
122
- retry_codes: ["UNAVAILABLE"]
122
+ retry_codes: [14]
123
123
  }
124
124
 
125
125
  default_config
@@ -913,7 +913,7 @@ module Google
913
913
 
914
914
  config_attr :endpoint, "dataproc.googleapis.com", ::String
915
915
  config_attr :credentials, nil do |value|
916
- allowed = [::String, ::Hash, ::Proc, ::Google::Auth::Credentials, ::Signet::OAuth2::Client, nil]
916
+ allowed = [::String, ::Hash, ::Proc, ::Symbol, ::Google::Auth::Credentials, ::Signet::OAuth2::Client, nil]
917
917
  allowed += [::GRPC::Core::Channel, ::GRPC::Core::ChannelCredentials] if defined? ::GRPC
918
918
  allowed.any? { |klass| klass === value }
919
919
  end
@@ -941,7 +941,7 @@ module Google
941
941
  def rpcs
942
942
  @rpcs ||= begin
943
943
  parent_rpcs = nil
944
- parent_rpcs = @parent_config.rpcs if @parent_config&.respond_to? :rpcs
944
+ parent_rpcs = @parent_config.rpcs if defined?(@parent_config) && @parent_config&.respond_to?(:rpcs)
945
945
  Rpcs.new parent_rpcs
946
946
  end
947
947
  end
@@ -953,7 +953,7 @@ module Google
953
953
  # Each configuration object is of type `Gapic::Config::Method` and includes
954
954
  # the following configuration fields:
955
955
  #
956
- # * `timeout` (*type:* `Numeric`) - The call timeout in milliseconds
956
+ # * `timeout` (*type:* `Numeric`) - The call timeout in seconds
957
957
  # * `metadata` (*type:* `Hash{Symbol=>String}`) - Additional gRPC headers
958
958
  # * `retry_policy (*type:* `Hash`) - The retry policy. The policy fields
959
959
  # include the following keys:
@@ -475,7 +475,7 @@ module Google
475
475
 
476
476
  config_attr :endpoint, "dataproc.googleapis.com", ::String
477
477
  config_attr :credentials, nil do |value|
478
- allowed = [::String, ::Hash, ::Proc, ::Google::Auth::Credentials, ::Signet::OAuth2::Client, nil]
478
+ allowed = [::String, ::Hash, ::Proc, ::Symbol, ::Google::Auth::Credentials, ::Signet::OAuth2::Client, nil]
479
479
  allowed += [::GRPC::Core::Channel, ::GRPC::Core::ChannelCredentials] if defined? ::GRPC
480
480
  allowed.any? { |klass| klass === value }
481
481
  end
@@ -503,7 +503,7 @@ module Google
503
503
  def rpcs
504
504
  @rpcs ||= begin
505
505
  parent_rpcs = nil
506
- parent_rpcs = @parent_config.rpcs if @parent_config&.respond_to? :rpcs
506
+ parent_rpcs = @parent_config.rpcs if defined?(@parent_config) && @parent_config&.respond_to?(:rpcs)
507
507
  Rpcs.new parent_rpcs
508
508
  end
509
509
  end
@@ -515,7 +515,7 @@ module Google
515
515
  # Each configuration object is of type `Gapic::Config::Method` and includes
516
516
  # the following configuration fields:
517
517
  #
518
- # * `timeout` (*type:* `Numeric`) - The call timeout in milliseconds
518
+ # * `timeout` (*type:* `Numeric`) - The call timeout in seconds
519
519
  # * `metadata` (*type:* `Hash{Symbol=>String}`) - Additional gRPC headers
520
520
  # * `retry_policy (*type:* `Hash`) - The retry policy. The policy fields
521
521
  # include the following keys:
@@ -35,12 +35,12 @@ module Google
35
35
  self.service_name = 'google.cloud.dataproc.v1.WorkflowTemplateService'
36
36
 
37
37
  # Creates new workflow template.
38
- rpc :CreateWorkflowTemplate, CreateWorkflowTemplateRequest, WorkflowTemplate
38
+ rpc :CreateWorkflowTemplate, ::Google::Cloud::Dataproc::V1::CreateWorkflowTemplateRequest, ::Google::Cloud::Dataproc::V1::WorkflowTemplate
39
39
  # Retrieves the latest workflow template.
40
40
  #
41
41
  # Can retrieve previously instantiated template by specifying optional
42
42
  # version parameter.
43
- rpc :GetWorkflowTemplate, GetWorkflowTemplateRequest, WorkflowTemplate
43
+ rpc :GetWorkflowTemplate, ::Google::Cloud::Dataproc::V1::GetWorkflowTemplateRequest, ::Google::Cloud::Dataproc::V1::WorkflowTemplate
44
44
  # Instantiates a template and begins execution.
45
45
  #
46
46
  # The returned Operation can be used to track execution of
@@ -61,7 +61,7 @@ module Google
61
61
  # On successful completion,
62
62
  # [Operation.response][google.longrunning.Operation.response] will be
63
63
  # [Empty][google.protobuf.Empty].
64
- rpc :InstantiateWorkflowTemplate, InstantiateWorkflowTemplateRequest, Google::Longrunning::Operation
64
+ rpc :InstantiateWorkflowTemplate, ::Google::Cloud::Dataproc::V1::InstantiateWorkflowTemplateRequest, ::Google::Longrunning::Operation
65
65
  # Instantiates a template and begins execution.
66
66
  #
67
67
  # This method is equivalent to executing the sequence
@@ -86,14 +86,14 @@ module Google
86
86
  # On successful completion,
87
87
  # [Operation.response][google.longrunning.Operation.response] will be
88
88
  # [Empty][google.protobuf.Empty].
89
- rpc :InstantiateInlineWorkflowTemplate, InstantiateInlineWorkflowTemplateRequest, Google::Longrunning::Operation
89
+ rpc :InstantiateInlineWorkflowTemplate, ::Google::Cloud::Dataproc::V1::InstantiateInlineWorkflowTemplateRequest, ::Google::Longrunning::Operation
90
90
  # Updates (replaces) workflow template. The updated template
91
91
  # must contain version that matches the current server version.
92
- rpc :UpdateWorkflowTemplate, UpdateWorkflowTemplateRequest, WorkflowTemplate
92
+ rpc :UpdateWorkflowTemplate, ::Google::Cloud::Dataproc::V1::UpdateWorkflowTemplateRequest, ::Google::Cloud::Dataproc::V1::WorkflowTemplate
93
93
  # Lists workflows that match the specified filter in the request.
94
- rpc :ListWorkflowTemplates, ListWorkflowTemplatesRequest, ListWorkflowTemplatesResponse
94
+ rpc :ListWorkflowTemplates, ::Google::Cloud::Dataproc::V1::ListWorkflowTemplatesRequest, ::Google::Cloud::Dataproc::V1::ListWorkflowTemplatesResponse
95
95
  # Deletes a workflow template. It does not cancel in-progress workflows.
96
- rpc :DeleteWorkflowTemplate, DeleteWorkflowTemplateRequest, Google::Protobuf::Empty
96
+ rpc :DeleteWorkflowTemplate, ::Google::Cloud::Dataproc::V1::DeleteWorkflowTemplateRequest, ::Google::Protobuf::Empty
97
97
  end
98
98
 
99
99
  Stub = Service.rpc_stub_class
@@ -43,12 +43,12 @@ module Google
43
43
  #
44
44
  # The ResourceDescriptor Yaml config will look like:
45
45
  #
46
- # resources:
47
- # - type: "pubsub.googleapis.com/Topic"
48
- # name_descriptor:
49
- # - pattern: "projects/\\{project}/topics/\\{topic}"
50
- # parent_type: "cloudresourcemanager.googleapis.com/Project"
51
- # parent_name_extractor: "projects/\\{project}"
46
+ # resources:
47
+ # - type: "pubsub.googleapis.com/Topic"
48
+ # name_descriptor:
49
+ # - pattern: "projects/{project}/topics/{topic}"
50
+ # parent_type: "cloudresourcemanager.googleapis.com/Project"
51
+ # parent_name_extractor: "projects/{project}"
52
52
  #
53
53
  # Sometimes, resources have multiple patterns, typically because they can
54
54
  # live under multiple parents.
@@ -183,15 +183,24 @@ module Google
183
183
  # }
184
184
  # @!attribute [rw] plural
185
185
  # @return [::String]
186
- # The plural name used in the resource name, such as 'projects' for
187
- # the name of 'projects/\\{project}'. It is the same concept of the `plural`
188
- # field in k8s CRD spec
186
+ # The plural name used in the resource name and permission names, such as
187
+ # 'projects' for the resource name of 'projects/\\{project}' and the permission
188
+ # name of 'cloudresourcemanager.googleapis.com/projects.get'. It is the same
189
+ # concept of the `plural` field in k8s CRD spec
189
190
  # https://kubernetes.io/docs/tasks/access-kubernetes-api/custom-resources/custom-resource-definitions/
191
+ #
192
+ # Note: The plural form is required even for singleton resources. See
193
+ # https://aip.dev/156
190
194
  # @!attribute [rw] singular
191
195
  # @return [::String]
192
196
  # The same concept of the `singular` field in k8s CRD spec
193
197
  # https://kubernetes.io/docs/tasks/access-kubernetes-api/custom-resources/custom-resource-definitions/
194
198
  # Such as "project" for the `resourcemanager.googleapis.com/Project` type.
199
+ # @!attribute [rw] style
200
+ # @return [::Array<::Google::Api::ResourceDescriptor::Style>]
201
+ # Style flag(s) for this resource.
202
+ # These indicate that a resource is expected to conform to a given
203
+ # style. See the specific style flags for additional information.
195
204
  class ResourceDescriptor
196
205
  include ::Google::Protobuf::MessageExts
197
206
  extend ::Google::Protobuf::MessageExts::ClassMethods
@@ -211,6 +220,22 @@ module Google
211
220
  # that from being necessary once there are multiple patterns.)
212
221
  FUTURE_MULTI_PATTERN = 2
213
222
  end
223
+
224
+ # A flag representing a specific style that a resource claims to conform to.
225
+ module Style
226
+ # The unspecified value. Do not use.
227
+ STYLE_UNSPECIFIED = 0
228
+
229
+ # This resource is intended to be "declarative-friendly".
230
+ #
231
+ # Declarative-friendly resources must be more strictly consistent, and
232
+ # setting this to true communicates to tools that this resource should
233
+ # adhere to declarative-friendly expectations.
234
+ #
235
+ # Note: This is used by the API linter (linter.aip.dev) to enable
236
+ # additional checks.
237
+ DECLARATIVE_FRIENDLY = 1
238
+ end
214
239
  end
215
240
 
216
241
  # Defines a proto annotation that describes a string field that refers to
@@ -226,6 +251,17 @@ module Google
226
251
  # type: "pubsub.googleapis.com/Topic"
227
252
  # }];
228
253
  # }
254
+ #
255
+ # Occasionally, a field may reference an arbitrary resource. In this case,
256
+ # APIs use the special value * in their resource reference.
257
+ #
258
+ # Example:
259
+ #
260
+ # message GetIamPolicyRequest {
261
+ # string resource = 2 [(google.api.resource_reference) = {
262
+ # type: "*"
263
+ # }];
264
+ # }
229
265
  # @!attribute [rw] child_type
230
266
  # @return [::String]
231
267
  # The resource type of a child collection that the annotated field
@@ -234,11 +270,11 @@ module Google
234
270
  #
235
271
  # Example:
236
272
  #
237
- # message ListLogEntriesRequest {
238
- # string parent = 1 [(google.api.resource_reference) = {
239
- # child_type: "logging.googleapis.com/LogEntry"
240
- # };
241
- # }
273
+ # message ListLogEntriesRequest {
274
+ # string parent = 1 [(google.api.resource_reference) = {
275
+ # child_type: "logging.googleapis.com/LogEntry"
276
+ # };
277
+ # }
242
278
  class ResourceReference
243
279
  include ::Google::Protobuf::MessageExts
244
280
  extend ::Google::Protobuf::MessageExts::ClassMethods
@@ -80,20 +80,26 @@ module Google
80
80
  # Bounds: [0s, 1d].
81
81
  # @!attribute [rw] scale_up_factor
82
82
  # @return [::Float]
83
- # Required. Fraction of average pending memory in the last cooldown period
83
+ # Required. Fraction of average YARN pending memory in the last cooldown period
84
84
  # for which to add workers. A scale-up factor of 1.0 will result in scaling
85
85
  # up so that there is no pending memory remaining after the update (more
86
86
  # aggressive scaling). A scale-up factor closer to 0 will result in a smaller
87
87
  # magnitude of scaling up (less aggressive scaling).
88
+ # See [How autoscaling
89
+ # works](https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/autoscaling#how_autoscaling_works)
90
+ # for more information.
88
91
  #
89
92
  # Bounds: [0.0, 1.0].
90
93
  # @!attribute [rw] scale_down_factor
91
94
  # @return [::Float]
92
- # Required. Fraction of average pending memory in the last cooldown period
95
+ # Required. Fraction of average YARN pending memory in the last cooldown period
93
96
  # for which to remove workers. A scale-down factor of 1 will result in
94
97
  # scaling down so that there is no available memory remaining after the
95
98
  # update (more aggressive scaling). A scale-down factor of 0 disables
96
99
  # removing workers, which can be beneficial for autoscaling a single job.
100
+ # See [How autoscaling
101
+ # works](https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/autoscaling#how_autoscaling_works)
102
+ # for more information.
97
103
  #
98
104
  # Bounds: [0.0, 1.0].
99
105
  # @!attribute [rw] scale_up_min_worker_fraction
@@ -85,6 +85,17 @@ module Google
85
85
  # and manage this project-level, per-location bucket (see
86
86
  # [Dataproc staging
87
87
  # bucket](https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/staging-bucket)).
88
+ # @!attribute [rw] temp_bucket
89
+ # @return [::String]
90
+ # Optional. A Cloud Storage bucket used to store ephemeral cluster and jobs data,
91
+ # such as Spark and MapReduce history files.
92
+ # If you do not specify a temp bucket,
93
+ # Dataproc will determine a Cloud Storage location (US,
94
+ # ASIA, or EU) for your cluster's temp bucket according to the
95
+ # Compute Engine zone where your cluster is deployed, and then create
96
+ # and manage this project-level, per-location bucket. The default bucket has
97
+ # a TTL of 90 days, but you can use any TTL (or none) if you specify a
98
+ # bucket.
88
99
  # @!attribute [rw] gce_cluster_config
89
100
  # @return [::Google::Cloud::Dataproc::V1::GceClusterConfig]
90
101
  # Optional. The shared Compute Engine config settings for
@@ -132,11 +143,37 @@ module Google
132
143
  # @!attribute [rw] lifecycle_config
133
144
  # @return [::Google::Cloud::Dataproc::V1::LifecycleConfig]
134
145
  # Optional. Lifecycle setting for the cluster.
146
+ # @!attribute [rw] endpoint_config
147
+ # @return [::Google::Cloud::Dataproc::V1::EndpointConfig]
148
+ # Optional. Port/endpoint configuration for this cluster
135
149
  class ClusterConfig
136
150
  include ::Google::Protobuf::MessageExts
137
151
  extend ::Google::Protobuf::MessageExts::ClassMethods
138
152
  end
139
153
 
154
+ # Endpoint config for this cluster
155
+ # @!attribute [r] http_ports
156
+ # @return [::Google::Protobuf::Map{::String => ::String}]
157
+ # Output only. The map of port descriptions to URLs. Will only be populated
158
+ # if enable_http_port_access is true.
159
+ # @!attribute [rw] enable_http_port_access
160
+ # @return [::Boolean]
161
+ # Optional. If true, enable http access to specific ports on the cluster
162
+ # from external sources. Defaults to false.
163
+ class EndpointConfig
164
+ include ::Google::Protobuf::MessageExts
165
+ extend ::Google::Protobuf::MessageExts::ClassMethods
166
+
167
+ # @!attribute [rw] key
168
+ # @return [::String]
169
+ # @!attribute [rw] value
170
+ # @return [::String]
171
+ class HttpPortsEntry
172
+ include ::Google::Protobuf::MessageExts
173
+ extend ::Google::Protobuf::MessageExts::ClassMethods
174
+ end
175
+ end
176
+
140
177
  # Autoscaling Policy config associated with the cluster.
141
178
  # @!attribute [rw] policy_uri
142
179
  # @return [::String]
@@ -214,7 +251,7 @@ module Google
214
251
  # @!attribute [rw] service_account
215
252
  # @return [::String]
216
253
  # Optional. The [Dataproc service
217
- # account](https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/service-accounts#service_accounts_in_cloud_dataproc)
254
+ # account](https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/service-accounts#service_accounts_in_dataproc)
218
255
  # (also see [VM Data Plane
219
256
  # identity](https://cloud.google.com/dataproc/docs/concepts/iam/dataproc-principals#vm_service_account_data_plane_identity))
220
257
  # used by Dataproc cluster VM instances to access Google Cloud Platform
@@ -318,6 +355,15 @@ module Google
318
355
  # @return [::Boolean]
319
356
  # Output only. Specifies that this instance group contains preemptible
320
357
  # instances.
358
+ # @!attribute [rw] preemptibility
359
+ # @return [::Google::Cloud::Dataproc::V1::InstanceGroupConfig::Preemptibility]
360
+ # Optional. Specifies the preemptibility of the instance group.
361
+ #
362
+ # The default value for master and worker groups is
363
+ # `NON_PREEMPTIBLE`. This default cannot be changed.
364
+ #
365
+ # The default value for secondary instances is
366
+ # `PREEMPTIBLE`.
321
367
  # @!attribute [r] managed_group_config
322
368
  # @return [::Google::Cloud::Dataproc::V1::ManagedGroupConfig]
323
369
  # Output only. The config for Compute Engine Instance Group
@@ -335,6 +381,27 @@ module Google
335
381
  class InstanceGroupConfig
336
382
  include ::Google::Protobuf::MessageExts
337
383
  extend ::Google::Protobuf::MessageExts::ClassMethods
384
+
385
+ # Controls the use of
386
+ # [preemptible instances]
387
+ # (https://cloud.google.com/compute/docs/instances/preemptible)
388
+ # within the group.
389
+ module Preemptibility
390
+ # Preemptibility is unspecified, the system will choose the
391
+ # appropriate setting for each instance group.
392
+ PREEMPTIBILITY_UNSPECIFIED = 0
393
+
394
+ # Instances are non-preemptible.
395
+ #
396
+ # This option is allowed for all instance groups and is the only valid
397
+ # value for Master and Worker instance groups.
398
+ NON_PREEMPTIBLE = 1
399
+
400
+ # Instances are preemptible.
401
+ #
402
+ # This option is allowed only for secondary worker groups.
403
+ PREEMPTIBLE = 2
404
+ end
338
405
  end
339
406
 
340
407
  # Specifies the resources used to actively manage an instance group.
@@ -567,7 +634,7 @@ module Google
567
634
  # @return [::String]
568
635
  # Optional. The version of software inside the cluster. It must be one of the
569
636
  # supported [Dataproc
570
- # Versions](https://cloud.google.com/dataproc/docs/concepts/versioning/dataproc-versions#supported_cloud_dataproc_versions),
637
+ # Versions](https://cloud.google.com/dataproc/docs/concepts/versioning/dataproc-versions#supported_dataproc_versions),
571
638
  # such as "1.2" (including a subminor version, such as "1.2.29"), or the
572
639
  # ["preview"
573
640
  # version](https://cloud.google.com/dataproc/docs/concepts/versioning/dataproc-versions#other_versions).
@@ -153,12 +153,12 @@ module Google
153
153
  # Spark driver and tasks.
154
154
  # @!attribute [rw] file_uris
155
155
  # @return [::Array<::String>]
156
- # Optional. HCFS URIs of files to be copied to the working directory of
157
- # Spark drivers and distributed tasks. Useful for naively parallel tasks.
156
+ # Optional. HCFS URIs of files to be placed in the working directory of
157
+ # each executor. Useful for naively parallel tasks.
158
158
  # @!attribute [rw] archive_uris
159
159
  # @return [::Array<::String>]
160
- # Optional. HCFS URIs of archives to be extracted in the working directory
161
- # of Spark drivers and tasks. Supported file types:
160
+ # Optional. HCFS URIs of archives to be extracted into the working directory
161
+ # of each executor. Supported file types:
162
162
  # .jar, .tar, .tar.gz, .tgz, and .zip.
163
163
  # @!attribute [rw] properties
164
164
  # @return [::Google::Protobuf::Map{::String => ::String}]
@@ -206,11 +206,12 @@ module Google
206
206
  # Python driver and tasks.
207
207
  # @!attribute [rw] file_uris
208
208
  # @return [::Array<::String>]
209
- # Optional. HCFS URIs of files to be copied to the working directory of
210
- # Python drivers and distributed tasks. Useful for naively parallel tasks.
209
+ # Optional. HCFS URIs of files to be placed in the working directory of
210
+ # each executor. Useful for naively parallel tasks.
211
211
  # @!attribute [rw] archive_uris
212
212
  # @return [::Array<::String>]
213
- # Optional. HCFS URIs of archives to be extracted in the working directory of
213
+ # Optional. HCFS URIs of archives to be extracted into the working directory
214
+ # of each executor. Supported file types:
214
215
  # .jar, .tar, .tar.gz, .tgz, and .zip.
215
216
  # @!attribute [rw] properties
216
217
  # @return [::Google::Protobuf::Map{::String => ::String}]
@@ -421,12 +422,12 @@ module Google
421
422
  # occur that causes an incorrect job submission.
422
423
  # @!attribute [rw] file_uris
423
424
  # @return [::Array<::String>]
424
- # Optional. HCFS URIs of files to be copied to the working directory of
425
- # R drivers and distributed tasks. Useful for naively parallel tasks.
425
+ # Optional. HCFS URIs of files to be placed in the working directory of
426
+ # each executor. Useful for naively parallel tasks.
426
427
  # @!attribute [rw] archive_uris
427
428
  # @return [::Array<::String>]
428
- # Optional. HCFS URIs of archives to be extracted in the working directory of
429
- # Spark drivers and tasks. Supported file types:
429
+ # Optional. HCFS URIs of archives to be extracted into the working directory
430
+ # of each executor. Supported file types:
430
431
  # .jar, .tar, .tar.gz, .tgz, and .zip.
431
432
  # @!attribute [rw] properties
432
433
  # @return [::Google::Protobuf::Map{::String => ::String}]
@@ -453,8 +454,9 @@ module Google
453
454
 
454
455
  # A Dataproc job for running [Presto](https://prestosql.io/) queries.
455
456
  # **IMPORTANT**: The [Dataproc Presto Optional
456
- # Component](/dataproc/docs/concepts/components/presto) must be enabled when
457
- # the cluster is created to submit a Presto job to the cluster.
457
+ # Component](https://cloud.google.com/dataproc/docs/concepts/components/presto)
458
+ # must be enabled when the cluster is created to submit a Presto job to the
459
+ # cluster.
458
460
  # @!attribute [rw] query_file_uri
459
461
  # @return [::String]
460
462
  # The HCFS URI of the script that contains SQL queries.
@@ -594,8 +596,8 @@ module Google
594
596
  # Encapsulates the full scoping used to reference a job.
595
597
  # @!attribute [rw] project_id
596
598
  # @return [::String]
597
- # Required. The ID of the Google Cloud Platform project that the job
598
- # belongs to.
599
+ # Optional. The ID of the Google Cloud Platform project that the job belongs to. If
600
+ # specified, must match the request project ID.
599
601
  # @!attribute [rw] job_id
600
602
  # @return [::String]
601
603
  # Optional. The job ID, which must be unique within the project.
@@ -23,7 +23,7 @@ module Google
23
23
  module V1
24
24
  # Cluster components that can be activated.
25
25
  module Component
26
- # Unspecified component.
26
+ # Unspecified component. Specifying this will cause Cluster creation to fail.
27
27
  COMPONENT_UNSPECIFIED = 0
28
28
 
29
29
  # The Anaconda python distribution.
@@ -75,7 +75,7 @@ module Google
75
75
  # Required. The Directed Acyclic Graph of Jobs to submit.
76
76
  # @!attribute [rw] parameters
77
77
  # @return [::Array<::Google::Cloud::Dataproc::V1::TemplateParameter>]
78
- # Optional. emplate parameters whose values are substituted into the
78
+ # Optional. Template parameters whose values are substituted into the
79
79
  # template. Values for parameters must be provided when the template is
80
80
  # instantiated.
81
81
  class WorkflowTemplate
@@ -189,22 +189,28 @@ module Google
189
189
  # or hyphen. Must consist of between 3 and 50 characters.
190
190
  # @!attribute [rw] hadoop_job
191
191
  # @return [::Google::Cloud::Dataproc::V1::HadoopJob]
192
+ # Optional. Job is a Hadoop job.
192
193
  # @!attribute [rw] spark_job
193
194
  # @return [::Google::Cloud::Dataproc::V1::SparkJob]
195
+ # Optional. Job is a Spark job.
194
196
  # @!attribute [rw] pyspark_job
195
197
  # @return [::Google::Cloud::Dataproc::V1::PySparkJob]
198
+ # Optional. Job is a PySpark job.
196
199
  # @!attribute [rw] hive_job
197
200
  # @return [::Google::Cloud::Dataproc::V1::HiveJob]
201
+ # Optional. Job is a Hive job.
198
202
  # @!attribute [rw] pig_job
199
203
  # @return [::Google::Cloud::Dataproc::V1::PigJob]
204
+ # Optional. Job is a Pig job.
200
205
  # @!attribute [rw] spark_r_job
201
206
  # @return [::Google::Cloud::Dataproc::V1::SparkRJob]
202
- # Spark R job
207
+ # Optional. Job is a SparkR job.
203
208
  # @!attribute [rw] spark_sql_job
204
209
  # @return [::Google::Cloud::Dataproc::V1::SparkSqlJob]
210
+ # Optional. Job is a SparkSql job.
205
211
  # @!attribute [rw] presto_job
206
212
  # @return [::Google::Cloud::Dataproc::V1::PrestoJob]
207
- # Presto job
213
+ # Optional. Job is a Presto job.
208
214
  # @!attribute [rw] labels
209
215
  # @return [::Google::Protobuf::Map{::String => ::String}]
210
216
  # Optional. The labels to associate with this job.
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: google-cloud-dataproc-v1
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.2.1
4
+ version: 0.3.2
5
5
  platform: ruby
6
6
  authors:
7
7
  - Google LLC
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2020-05-26 00:00:00.000000000 Z
11
+ date: 2021-01-19 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: gapic-common
@@ -16,14 +16,14 @@ dependencies:
16
16
  requirements:
17
17
  - - "~>"
18
18
  - !ruby/object:Gem::Version
19
- version: '0.2'
19
+ version: '0.3'
20
20
  type: :runtime
21
21
  prerelease: false
22
22
  version_requirements: !ruby/object:Gem::Requirement
23
23
  requirements:
24
24
  - - "~>"
25
25
  - !ruby/object:Gem::Version
26
- version: '0.2'
26
+ version: '0.3'
27
27
  - !ruby/object:Gem::Dependency
28
28
  name: google-cloud-errors
29
29
  requirement: !ruby/object:Gem::Requirement
@@ -225,7 +225,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
225
225
  - !ruby/object:Gem::Version
226
226
  version: '0'
227
227
  requirements: []
228
- rubygems_version: 3.0.6
228
+ rubygems_version: 3.2.6
229
229
  signing_key:
230
230
  specification_version: 4
231
231
  summary: API Client library for the Cloud Dataproc V1 API