google-cloud-dataproc-v1 0.16.0 → 0.18.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (58) hide show
  1. checksums.yaml +4 -4
  2. data/README.md +2 -2
  3. data/lib/google/cloud/dataproc/v1/autoscaling_policies_pb.rb +25 -51
  4. data/lib/google/cloud/dataproc/v1/autoscaling_policy_service/client.rb +6 -4
  5. data/lib/google/cloud/dataproc/v1/autoscaling_policy_service/rest/client.rb +713 -0
  6. data/lib/google/cloud/dataproc/v1/autoscaling_policy_service/rest/service_stub.rb +382 -0
  7. data/lib/google/cloud/dataproc/v1/autoscaling_policy_service/rest.rb +54 -0
  8. data/lib/google/cloud/dataproc/v1/autoscaling_policy_service.rb +6 -0
  9. data/lib/google/cloud/dataproc/v1/batch_controller/client.rb +6 -4
  10. data/lib/google/cloud/dataproc/v1/batch_controller/operations.rb +5 -3
  11. data/lib/google/cloud/dataproc/v1/batch_controller/rest/client.rb +642 -0
  12. data/lib/google/cloud/dataproc/v1/batch_controller/rest/operations.rb +822 -0
  13. data/lib/google/cloud/dataproc/v1/batch_controller/rest/service_stub.rb +285 -0
  14. data/lib/google/cloud/dataproc/v1/batch_controller/rest.rb +54 -0
  15. data/lib/google/cloud/dataproc/v1/batch_controller.rb +6 -0
  16. data/lib/google/cloud/dataproc/v1/batches_pb.rb +26 -88
  17. data/lib/google/cloud/dataproc/v1/bindings_override.rb +257 -0
  18. data/lib/google/cloud/dataproc/v1/cluster_controller/client.rb +6 -4
  19. data/lib/google/cloud/dataproc/v1/cluster_controller/operations.rb +5 -3
  20. data/lib/google/cloud/dataproc/v1/cluster_controller/rest/client.rb +1119 -0
  21. data/lib/google/cloud/dataproc/v1/cluster_controller/rest/operations.rb +822 -0
  22. data/lib/google/cloud/dataproc/v1/cluster_controller/rest/service_stub.rb +539 -0
  23. data/lib/google/cloud/dataproc/v1/cluster_controller/rest.rb +55 -0
  24. data/lib/google/cloud/dataproc/v1/cluster_controller.rb +6 -0
  25. data/lib/google/cloud/dataproc/v1/clusters_pb.rb +28 -289
  26. data/lib/google/cloud/dataproc/v1/job_controller/client.rb +6 -4
  27. data/lib/google/cloud/dataproc/v1/job_controller/operations.rb +5 -3
  28. data/lib/google/cloud/dataproc/v1/job_controller/rest/client.rb +931 -0
  29. data/lib/google/cloud/dataproc/v1/job_controller/rest/operations.rb +822 -0
  30. data/lib/google/cloud/dataproc/v1/job_controller/rest/service_stub.rb +476 -0
  31. data/lib/google/cloud/dataproc/v1/job_controller/rest.rb +53 -0
  32. data/lib/google/cloud/dataproc/v1/job_controller.rb +6 -0
  33. data/lib/google/cloud/dataproc/v1/jobs_pb.rb +26 -248
  34. data/lib/google/cloud/dataproc/v1/node_group_controller/client.rb +6 -4
  35. data/lib/google/cloud/dataproc/v1/node_group_controller/operations.rb +5 -3
  36. data/lib/google/cloud/dataproc/v1/node_group_controller/rest/client.rb +583 -0
  37. data/lib/google/cloud/dataproc/v1/node_group_controller/rest/operations.rb +822 -0
  38. data/lib/google/cloud/dataproc/v1/node_group_controller/rest/service_stub.rb +227 -0
  39. data/lib/google/cloud/dataproc/v1/node_group_controller/rest.rb +55 -0
  40. data/lib/google/cloud/dataproc/v1/node_group_controller.rb +6 -0
  41. data/lib/google/cloud/dataproc/v1/node_groups_pb.rb +26 -16
  42. data/lib/google/cloud/dataproc/v1/operations_pb.rb +25 -55
  43. data/lib/google/cloud/dataproc/v1/rest.rb +43 -0
  44. data/lib/google/cloud/dataproc/v1/shared_pb.rb +26 -116
  45. data/lib/google/cloud/dataproc/v1/version.rb +1 -1
  46. data/lib/google/cloud/dataproc/v1/workflow_template_service/client.rb +6 -4
  47. data/lib/google/cloud/dataproc/v1/workflow_template_service/operations.rb +5 -3
  48. data/lib/google/cloud/dataproc/v1/workflow_template_service/rest/client.rb +985 -0
  49. data/lib/google/cloud/dataproc/v1/workflow_template_service/rest/operations.rb +822 -0
  50. data/lib/google/cloud/dataproc/v1/workflow_template_service/rest/service_stub.rb +518 -0
  51. data/lib/google/cloud/dataproc/v1/workflow_template_service/rest.rb +55 -0
  52. data/lib/google/cloud/dataproc/v1/workflow_template_service.rb +6 -0
  53. data/lib/google/cloud/dataproc/v1/workflow_templates_pb.rb +28 -142
  54. data/lib/google/cloud/dataproc/v1.rb +5 -0
  55. data/proto_docs/google/api/client.rb +67 -4
  56. data/proto_docs/google/protobuf/any.rb +7 -4
  57. data/proto_docs/google/protobuf/timestamp.rb +1 -3
  58. metadata +30 -5
@@ -0,0 +1,285 @@
1
+ # frozen_string_literal: true
2
+
3
+ # Copyright 2023 Google LLC
4
+ #
5
+ # Licensed under the Apache License, Version 2.0 (the "License");
6
+ # you may not use this file except in compliance with the License.
7
+ # You may obtain a copy of the License at
8
+ #
9
+ # https://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing, software
12
+ # distributed under the License is distributed on an "AS IS" BASIS,
13
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14
+ # See the License for the specific language governing permissions and
15
+ # limitations under the License.
16
+
17
+ # Auto-generated by gapic-generator-ruby. DO NOT EDIT!
18
+
19
+ require "google/cloud/dataproc/v1/batches_pb"
20
+
21
+ module Google
22
+ module Cloud
23
+ module Dataproc
24
+ module V1
25
+ module BatchController
26
+ module Rest
27
+ ##
28
+ # REST service stub for the BatchController service.
29
+ # Service stub contains baseline method implementations
30
+ # including transcoding, making the REST call, and deserialing the response.
31
+ #
32
+ class ServiceStub
33
+ def initialize endpoint:, credentials:
34
+ # These require statements are intentionally placed here to initialize
35
+ # the REST modules only when it's required.
36
+ require "gapic/rest"
37
+
38
+ @client_stub = ::Gapic::Rest::ClientStub.new endpoint: endpoint, credentials: credentials,
39
+ numeric_enums: true,
40
+ raise_faraday_errors: false
41
+ end
42
+
43
+ ##
44
+ # Baseline implementation for the create_batch REST call
45
+ #
46
+ # @param request_pb [::Google::Cloud::Dataproc::V1::CreateBatchRequest]
47
+ # A request object representing the call parameters. Required.
48
+ # @param options [::Gapic::CallOptions]
49
+ # Overrides the default settings for this call, e.g, timeout, retries etc. Optional.
50
+ #
51
+ # @yield [result, operation] Access the result along with the TransportOperation object
52
+ # @yieldparam result [::Google::Longrunning::Operation]
53
+ # @yieldparam operation [::Gapic::Rest::TransportOperation]
54
+ #
55
+ # @return [::Google::Longrunning::Operation]
56
+ # A result object deserialized from the server's reply
57
+ def create_batch request_pb, options = nil
58
+ raise ::ArgumentError, "request must be provided" if request_pb.nil?
59
+
60
+ verb, uri, query_string_params, body = ServiceStub.transcode_create_batch_request request_pb
61
+ query_string_params = if query_string_params.any?
62
+ query_string_params.to_h { |p| p.split "=", 2 }
63
+ else
64
+ {}
65
+ end
66
+
67
+ response = @client_stub.make_http_request(
68
+ verb,
69
+ uri: uri,
70
+ body: body || "",
71
+ params: query_string_params,
72
+ options: options
73
+ )
74
+ operation = ::Gapic::Rest::TransportOperation.new response
75
+ result = ::Google::Longrunning::Operation.decode_json response.body, ignore_unknown_fields: true
76
+
77
+ yield result, operation if block_given?
78
+ result
79
+ end
80
+
81
+ ##
82
+ # Baseline implementation for the get_batch REST call
83
+ #
84
+ # @param request_pb [::Google::Cloud::Dataproc::V1::GetBatchRequest]
85
+ # A request object representing the call parameters. Required.
86
+ # @param options [::Gapic::CallOptions]
87
+ # Overrides the default settings for this call, e.g, timeout, retries etc. Optional.
88
+ #
89
+ # @yield [result, operation] Access the result along with the TransportOperation object
90
+ # @yieldparam result [::Google::Cloud::Dataproc::V1::Batch]
91
+ # @yieldparam operation [::Gapic::Rest::TransportOperation]
92
+ #
93
+ # @return [::Google::Cloud::Dataproc::V1::Batch]
94
+ # A result object deserialized from the server's reply
95
+ def get_batch request_pb, options = nil
96
+ raise ::ArgumentError, "request must be provided" if request_pb.nil?
97
+
98
+ verb, uri, query_string_params, body = ServiceStub.transcode_get_batch_request request_pb
99
+ query_string_params = if query_string_params.any?
100
+ query_string_params.to_h { |p| p.split "=", 2 }
101
+ else
102
+ {}
103
+ end
104
+
105
+ response = @client_stub.make_http_request(
106
+ verb,
107
+ uri: uri,
108
+ body: body || "",
109
+ params: query_string_params,
110
+ options: options
111
+ )
112
+ operation = ::Gapic::Rest::TransportOperation.new response
113
+ result = ::Google::Cloud::Dataproc::V1::Batch.decode_json response.body, ignore_unknown_fields: true
114
+
115
+ yield result, operation if block_given?
116
+ result
117
+ end
118
+
119
+ ##
120
+ # Baseline implementation for the list_batches REST call
121
+ #
122
+ # @param request_pb [::Google::Cloud::Dataproc::V1::ListBatchesRequest]
123
+ # A request object representing the call parameters. Required.
124
+ # @param options [::Gapic::CallOptions]
125
+ # Overrides the default settings for this call, e.g, timeout, retries etc. Optional.
126
+ #
127
+ # @yield [result, operation] Access the result along with the TransportOperation object
128
+ # @yieldparam result [::Google::Cloud::Dataproc::V1::ListBatchesResponse]
129
+ # @yieldparam operation [::Gapic::Rest::TransportOperation]
130
+ #
131
+ # @return [::Google::Cloud::Dataproc::V1::ListBatchesResponse]
132
+ # A result object deserialized from the server's reply
133
+ def list_batches request_pb, options = nil
134
+ raise ::ArgumentError, "request must be provided" if request_pb.nil?
135
+
136
+ verb, uri, query_string_params, body = ServiceStub.transcode_list_batches_request request_pb
137
+ query_string_params = if query_string_params.any?
138
+ query_string_params.to_h { |p| p.split "=", 2 }
139
+ else
140
+ {}
141
+ end
142
+
143
+ response = @client_stub.make_http_request(
144
+ verb,
145
+ uri: uri,
146
+ body: body || "",
147
+ params: query_string_params,
148
+ options: options
149
+ )
150
+ operation = ::Gapic::Rest::TransportOperation.new response
151
+ result = ::Google::Cloud::Dataproc::V1::ListBatchesResponse.decode_json response.body, ignore_unknown_fields: true
152
+
153
+ yield result, operation if block_given?
154
+ result
155
+ end
156
+
157
+ ##
158
+ # Baseline implementation for the delete_batch REST call
159
+ #
160
+ # @param request_pb [::Google::Cloud::Dataproc::V1::DeleteBatchRequest]
161
+ # A request object representing the call parameters. Required.
162
+ # @param options [::Gapic::CallOptions]
163
+ # Overrides the default settings for this call, e.g, timeout, retries etc. Optional.
164
+ #
165
+ # @yield [result, operation] Access the result along with the TransportOperation object
166
+ # @yieldparam result [::Google::Protobuf::Empty]
167
+ # @yieldparam operation [::Gapic::Rest::TransportOperation]
168
+ #
169
+ # @return [::Google::Protobuf::Empty]
170
+ # A result object deserialized from the server's reply
171
+ def delete_batch request_pb, options = nil
172
+ raise ::ArgumentError, "request must be provided" if request_pb.nil?
173
+
174
+ verb, uri, query_string_params, body = ServiceStub.transcode_delete_batch_request request_pb
175
+ query_string_params = if query_string_params.any?
176
+ query_string_params.to_h { |p| p.split "=", 2 }
177
+ else
178
+ {}
179
+ end
180
+
181
+ response = @client_stub.make_http_request(
182
+ verb,
183
+ uri: uri,
184
+ body: body || "",
185
+ params: query_string_params,
186
+ options: options
187
+ )
188
+ operation = ::Gapic::Rest::TransportOperation.new response
189
+ result = ::Google::Protobuf::Empty.decode_json response.body, ignore_unknown_fields: true
190
+
191
+ yield result, operation if block_given?
192
+ result
193
+ end
194
+
195
+ ##
196
+ # @private
197
+ #
198
+ # GRPC transcoding helper method for the create_batch REST call
199
+ #
200
+ # @param request_pb [::Google::Cloud::Dataproc::V1::CreateBatchRequest]
201
+ # A request object representing the call parameters. Required.
202
+ # @return [Array(String, [String, nil], Hash{String => String})]
203
+ # Uri, Body, Query string parameters
204
+ def self.transcode_create_batch_request request_pb
205
+ transcoder = Gapic::Rest::GrpcTranscoder.new
206
+ .with_bindings(
207
+ uri_method: :post,
208
+ uri_template: "/v1/{parent}/batches",
209
+ body: "batch",
210
+ matches: [
211
+ ["parent", %r{^projects/[^/]+/locations/[^/]+/?$}, false]
212
+ ]
213
+ )
214
+ transcoder.transcode request_pb
215
+ end
216
+
217
+ ##
218
+ # @private
219
+ #
220
+ # GRPC transcoding helper method for the get_batch REST call
221
+ #
222
+ # @param request_pb [::Google::Cloud::Dataproc::V1::GetBatchRequest]
223
+ # A request object representing the call parameters. Required.
224
+ # @return [Array(String, [String, nil], Hash{String => String})]
225
+ # Uri, Body, Query string parameters
226
+ def self.transcode_get_batch_request request_pb
227
+ transcoder = Gapic::Rest::GrpcTranscoder.new
228
+ .with_bindings(
229
+ uri_method: :get,
230
+ uri_template: "/v1/{name}",
231
+ matches: [
232
+ ["name", %r{^projects/[^/]+/locations/[^/]+/batches/[^/]+/?$}, false]
233
+ ]
234
+ )
235
+ transcoder.transcode request_pb
236
+ end
237
+
238
+ ##
239
+ # @private
240
+ #
241
+ # GRPC transcoding helper method for the list_batches REST call
242
+ #
243
+ # @param request_pb [::Google::Cloud::Dataproc::V1::ListBatchesRequest]
244
+ # A request object representing the call parameters. Required.
245
+ # @return [Array(String, [String, nil], Hash{String => String})]
246
+ # Uri, Body, Query string parameters
247
+ def self.transcode_list_batches_request request_pb
248
+ transcoder = Gapic::Rest::GrpcTranscoder.new
249
+ .with_bindings(
250
+ uri_method: :get,
251
+ uri_template: "/v1/{parent}/batches",
252
+ matches: [
253
+ ["parent", %r{^projects/[^/]+/locations/[^/]+/?$}, false]
254
+ ]
255
+ )
256
+ transcoder.transcode request_pb
257
+ end
258
+
259
+ ##
260
+ # @private
261
+ #
262
+ # GRPC transcoding helper method for the delete_batch REST call
263
+ #
264
+ # @param request_pb [::Google::Cloud::Dataproc::V1::DeleteBatchRequest]
265
+ # A request object representing the call parameters. Required.
266
+ # @return [Array(String, [String, nil], Hash{String => String})]
267
+ # Uri, Body, Query string parameters
268
+ def self.transcode_delete_batch_request request_pb
269
+ transcoder = Gapic::Rest::GrpcTranscoder.new
270
+ .with_bindings(
271
+ uri_method: :delete,
272
+ uri_template: "/v1/{name}",
273
+ matches: [
274
+ ["name", %r{^projects/[^/]+/locations/[^/]+/batches/[^/]+/?$}, false]
275
+ ]
276
+ )
277
+ transcoder.transcode request_pb
278
+ end
279
+ end
280
+ end
281
+ end
282
+ end
283
+ end
284
+ end
285
+ end
@@ -0,0 +1,54 @@
1
+ # frozen_string_literal: true
2
+
3
+ # Copyright 2023 Google LLC
4
+ #
5
+ # Licensed under the Apache License, Version 2.0 (the "License");
6
+ # you may not use this file except in compliance with the License.
7
+ # You may obtain a copy of the License at
8
+ #
9
+ # https://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing, software
12
+ # distributed under the License is distributed on an "AS IS" BASIS,
13
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14
+ # See the License for the specific language governing permissions and
15
+ # limitations under the License.
16
+
17
+ # Auto-generated by gapic-generator-ruby. DO NOT EDIT!
18
+
19
+ require "gapic/rest"
20
+ require "gapic/config"
21
+ require "gapic/config/method"
22
+
23
+ require "google/cloud/dataproc/v1/version"
24
+ require "google/cloud/dataproc/v1/bindings_override"
25
+
26
+ require "google/cloud/dataproc/v1/batch_controller/credentials"
27
+ require "google/cloud/dataproc/v1/batch_controller/paths"
28
+ require "google/cloud/dataproc/v1/batch_controller/rest/operations"
29
+ require "google/cloud/dataproc/v1/batch_controller/rest/client"
30
+
31
+ module Google
32
+ module Cloud
33
+ module Dataproc
34
+ module V1
35
+ ##
36
+ # The BatchController provides methods to manage batch workloads.
37
+ #
38
+ # To load this service and instantiate a REST client:
39
+ #
40
+ # require "google/cloud/dataproc/v1/batch_controller/rest"
41
+ # client = ::Google::Cloud::Dataproc::V1::BatchController::Rest::Client.new
42
+ #
43
+ module BatchController
44
+ # Client for the REST transport
45
+ module Rest
46
+ end
47
+ end
48
+ end
49
+ end
50
+ end
51
+ end
52
+
53
+ helper_path = ::File.join __dir__, "rest", "helpers.rb"
54
+ require "google/cloud/dataproc/v1/batch_controller/rest/helpers" if ::File.file? helper_path
@@ -26,6 +26,7 @@ require "google/cloud/dataproc/v1/batch_controller/credentials"
26
26
  require "google/cloud/dataproc/v1/batch_controller/paths"
27
27
  require "google/cloud/dataproc/v1/batch_controller/operations"
28
28
  require "google/cloud/dataproc/v1/batch_controller/client"
29
+ require "google/cloud/dataproc/v1/batch_controller/rest"
29
30
 
30
31
  module Google
31
32
  module Cloud
@@ -39,6 +40,11 @@ module Google
39
40
  # require "google/cloud/dataproc/v1/batch_controller"
40
41
  # client = ::Google::Cloud::Dataproc::V1::BatchController::Client.new
41
42
  #
43
+ # @example Load this service and instantiate a REST client
44
+ #
45
+ # require "google/cloud/dataproc/v1/batch_controller/rest"
46
+ # client = ::Google::Cloud::Dataproc::V1::BatchController::Rest::Client.new
47
+ #
42
48
  module BatchController
43
49
  end
44
50
  end
@@ -1,3 +1,4 @@
1
+ # frozen_string_literal: true
1
2
  # Generated by the protocol buffer compiler. DO NOT EDIT!
2
3
  # source: google/cloud/dataproc/v1/batches.proto
3
4
 
@@ -12,96 +13,33 @@ require 'google/longrunning/operations_pb'
12
13
  require 'google/protobuf/empty_pb'
13
14
  require 'google/protobuf/timestamp_pb'
14
15
 
15
- Google::Protobuf::DescriptorPool.generated_pool.build do
16
- add_file("google/cloud/dataproc/v1/batches.proto", :syntax => :proto3) do
17
- add_message "google.cloud.dataproc.v1.CreateBatchRequest" do
18
- optional :parent, :string, 1
19
- optional :batch, :message, 2, "google.cloud.dataproc.v1.Batch"
20
- optional :batch_id, :string, 3
21
- optional :request_id, :string, 4
22
- end
23
- add_message "google.cloud.dataproc.v1.GetBatchRequest" do
24
- optional :name, :string, 1
25
- end
26
- add_message "google.cloud.dataproc.v1.ListBatchesRequest" do
27
- optional :parent, :string, 1
28
- optional :page_size, :int32, 2
29
- optional :page_token, :string, 3
30
- optional :filter, :string, 4
31
- optional :order_by, :string, 5
32
- end
33
- add_message "google.cloud.dataproc.v1.ListBatchesResponse" do
34
- repeated :batches, :message, 1, "google.cloud.dataproc.v1.Batch"
35
- optional :next_page_token, :string, 2
36
- end
37
- add_message "google.cloud.dataproc.v1.DeleteBatchRequest" do
38
- optional :name, :string, 1
39
- end
40
- add_message "google.cloud.dataproc.v1.Batch" do
41
- optional :name, :string, 1
42
- optional :uuid, :string, 2
43
- optional :create_time, :message, 3, "google.protobuf.Timestamp"
44
- optional :runtime_info, :message, 8, "google.cloud.dataproc.v1.RuntimeInfo"
45
- optional :state, :enum, 9, "google.cloud.dataproc.v1.Batch.State"
46
- optional :state_message, :string, 10
47
- optional :state_time, :message, 11, "google.protobuf.Timestamp"
48
- optional :creator, :string, 12
49
- map :labels, :string, :string, 13
50
- optional :runtime_config, :message, 14, "google.cloud.dataproc.v1.RuntimeConfig"
51
- optional :environment_config, :message, 15, "google.cloud.dataproc.v1.EnvironmentConfig"
52
- optional :operation, :string, 16
53
- repeated :state_history, :message, 17, "google.cloud.dataproc.v1.Batch.StateHistory"
54
- oneof :batch_config do
55
- optional :pyspark_batch, :message, 4, "google.cloud.dataproc.v1.PySparkBatch"
56
- optional :spark_batch, :message, 5, "google.cloud.dataproc.v1.SparkBatch"
57
- optional :spark_r_batch, :message, 6, "google.cloud.dataproc.v1.SparkRBatch"
58
- optional :spark_sql_batch, :message, 7, "google.cloud.dataproc.v1.SparkSqlBatch"
59
- end
60
- end
61
- add_message "google.cloud.dataproc.v1.Batch.StateHistory" do
62
- optional :state, :enum, 1, "google.cloud.dataproc.v1.Batch.State"
63
- optional :state_message, :string, 2
64
- optional :state_start_time, :message, 3, "google.protobuf.Timestamp"
65
- end
66
- add_enum "google.cloud.dataproc.v1.Batch.State" do
67
- value :STATE_UNSPECIFIED, 0
68
- value :PENDING, 1
69
- value :RUNNING, 2
70
- value :CANCELLING, 3
71
- value :CANCELLED, 4
72
- value :SUCCEEDED, 5
73
- value :FAILED, 6
74
- end
75
- add_message "google.cloud.dataproc.v1.PySparkBatch" do
76
- optional :main_python_file_uri, :string, 1
77
- repeated :args, :string, 2
78
- repeated :python_file_uris, :string, 3
79
- repeated :jar_file_uris, :string, 4
80
- repeated :file_uris, :string, 5
81
- repeated :archive_uris, :string, 6
82
- end
83
- add_message "google.cloud.dataproc.v1.SparkBatch" do
84
- repeated :args, :string, 3
85
- repeated :jar_file_uris, :string, 4
86
- repeated :file_uris, :string, 5
87
- repeated :archive_uris, :string, 6
88
- oneof :driver do
89
- optional :main_jar_file_uri, :string, 1
90
- optional :main_class, :string, 2
91
- end
92
- end
93
- add_message "google.cloud.dataproc.v1.SparkRBatch" do
94
- optional :main_r_file_uri, :string, 1
95
- repeated :args, :string, 2
96
- repeated :file_uris, :string, 3
97
- repeated :archive_uris, :string, 4
98
- end
99
- add_message "google.cloud.dataproc.v1.SparkSqlBatch" do
100
- optional :query_file_uri, :string, 1
101
- map :query_variables, :string, :string, 2
102
- repeated :jar_file_uris, :string, 3
16
+
17
+ descriptor_data = "\n&google/cloud/dataproc/v1/batches.proto\x12\x18google.cloud.dataproc.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a%google/cloud/dataproc/v1/shared.proto\x1a#google/longrunning/operations.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"\xb0\x01\n\x12\x43reateBatchRequest\x12\x35\n\x06parent\x18\x01 \x01(\tB%\xe0\x41\x02\xfa\x41\x1f\x12\x1d\x64\x61taproc.googleapis.com/Batch\x12\x33\n\x05\x62\x61tch\x18\x02 \x01(\x0b\x32\x1f.google.cloud.dataproc.v1.BatchB\x03\xe0\x41\x02\x12\x15\n\x08\x62\x61tch_id\x18\x03 \x01(\tB\x03\xe0\x41\x01\x12\x17\n\nrequest_id\x18\x04 \x01(\tB\x03\xe0\x41\x01\"F\n\x0fGetBatchRequest\x12\x33\n\x04name\x18\x01 \x01(\tB%\xe0\x41\x02\xfa\x41\x1f\n\x1d\x64\x61taproc.googleapis.com/Batch\"\xa8\x01\n\x12ListBatchesRequest\x12\x35\n\x06parent\x18\x01 \x01(\tB%\xe0\x41\x02\xfa\x41\x1f\x12\x1d\x64\x61taproc.googleapis.com/Batch\x12\x16\n\tpage_size\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01\x12\x17\n\npage_token\x18\x03 \x01(\tB\x03\xe0\x41\x01\x12\x13\n\x06\x66ilter\x18\x04 \x01(\tB\x03\xe0\x41\x01\x12\x15\n\x08order_by\x18\x05 \x01(\tB\x03\xe0\x41\x01\"`\n\x13ListBatchesResponse\x12\x30\n\x07\x62\x61tches\x18\x01 \x03(\x0b\x32\x1f.google.cloud.dataproc.v1.Batch\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"I\n\x12\x44\x65leteBatchRequest\x12\x33\n\x04name\x18\x01 \x01(\tB%\xe0\x41\x02\xfa\x41\x1f\n\x1d\x64\x61taproc.googleapis.com/Batch\"\xc8\n\n\x05\x42\x61tch\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x03\x12\x11\n\x04uuid\x18\x02 \x01(\tB\x03\xe0\x41\x03\x12\x34\n\x0b\x63reate_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x44\n\rpyspark_batch\x18\x04 \x01(\x0b\x32&.google.cloud.dataproc.v1.PySparkBatchB\x03\xe0\x41\x01H\x00\x12@\n\x0bspark_batch\x18\x05 \x01(\x0b\x32$.google.cloud.dataproc.v1.SparkBatchB\x03\xe0\x41\x01H\x00\x12\x43\n\rspark_r_batch\x18\x06 \x01(\x0b\x32%.google.cloud.dataproc.v1.SparkRBatchB\x03\xe0\x41\x01H\x00\x12G\n\x0fspark_sql_batch\x18\x07 \x01(\x0b\x32\'.google.cloud.dataproc.v1.SparkSqlBatchB\x03\xe0\x41\x01H\x00\x12@\n\x0cruntime_info\x18\x08 \x01(\x0b\x32%.google.cloud.dataproc.v1.RuntimeInfoB\x03\xe0\x41\x03\x12\x39\n\x05state\x18\t \x01(\x0e\x32%.google.cloud.dataproc.v1.Batch.StateB\x03\xe0\x41\x03\x12\x1a\n\rstate_message\x18\n \x01(\tB\x03\xe0\x41\x03\x12\x33\n\nstate_time\x18\x0b \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x14\n\x07\x63reator\x18\x0c \x01(\tB\x03\xe0\x41\x03\x12@\n\x06labels\x18\r \x03(\x0b\x32+.google.cloud.dataproc.v1.Batch.LabelsEntryB\x03\xe0\x41\x01\x12\x44\n\x0eruntime_config\x18\x0e \x01(\x0b\x32\'.google.cloud.dataproc.v1.RuntimeConfigB\x03\xe0\x41\x01\x12L\n\x12\x65nvironment_config\x18\x0f \x01(\x0b\x32+.google.cloud.dataproc.v1.EnvironmentConfigB\x03\xe0\x41\x01\x12\x16\n\toperation\x18\x10 \x01(\tB\x03\xe0\x41\x03\x12H\n\rstate_history\x18\x11 \x03(\x0b\x32,.google.cloud.dataproc.v1.Batch.StateHistoryB\x03\xe0\x41\x03\x1a\xa0\x01\n\x0cStateHistory\x12\x39\n\x05state\x18\x01 \x01(\x0e\x32%.google.cloud.dataproc.v1.Batch.StateB\x03\xe0\x41\x03\x12\x1a\n\rstate_message\x18\x02 \x01(\tB\x03\xe0\x41\x03\x12\x39\n\x10state_start_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"r\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x0b\n\x07PENDING\x10\x01\x12\x0b\n\x07RUNNING\x10\x02\x12\x0e\n\nCANCELLING\x10\x03\x12\r\n\tCANCELLED\x10\x04\x12\r\n\tSUCCEEDED\x10\x05\x12\n\n\x06\x46\x41ILED\x10\x06:[\xea\x41X\n\x1d\x64\x61taproc.googleapis.com/Batch\x12\x37projects/{project}/locations/{location}/batches/{batch}B\x0e\n\x0c\x62\x61tch_config\"\xb2\x01\n\x0cPySparkBatch\x12!\n\x14main_python_file_uri\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x11\n\x04\x61rgs\x18\x02 \x03(\tB\x03\xe0\x41\x01\x12\x1d\n\x10python_file_uris\x18\x03 \x03(\tB\x03\xe0\x41\x01\x12\x1a\n\rjar_file_uris\x18\x04 \x03(\tB\x03\xe0\x41\x01\x12\x16\n\tfile_uris\x18\x05 \x03(\tB\x03\xe0\x41\x01\x12\x19\n\x0c\x61rchive_uris\x18\x06 \x03(\tB\x03\xe0\x41\x01\"\xb5\x01\n\nSparkBatch\x12 \n\x11main_jar_file_uri\x18\x01 \x01(\tB\x03\xe0\x41\x01H\x00\x12\x19\n\nmain_class\x18\x02 \x01(\tB\x03\xe0\x41\x01H\x00\x12\x11\n\x04\x61rgs\x18\x03 \x03(\tB\x03\xe0\x41\x01\x12\x1a\n\rjar_file_uris\x18\x04 \x03(\tB\x03\xe0\x41\x01\x12\x16\n\tfile_uris\x18\x05 \x03(\tB\x03\xe0\x41\x01\x12\x19\n\x0c\x61rchive_uris\x18\x06 \x03(\tB\x03\xe0\x41\x01\x42\x08\n\x06\x64river\"q\n\x0bSparkRBatch\x12\x1c\n\x0fmain_r_file_uri\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x11\n\x04\x61rgs\x18\x02 \x03(\tB\x03\xe0\x41\x01\x12\x16\n\tfile_uris\x18\x03 \x03(\tB\x03\xe0\x41\x01\x12\x19\n\x0c\x61rchive_uris\x18\x04 \x03(\tB\x03\xe0\x41\x01\"\xda\x01\n\rSparkSqlBatch\x12\x1b\n\x0equery_file_uri\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12Y\n\x0fquery_variables\x18\x02 \x03(\x0b\x32;.google.cloud.dataproc.v1.SparkSqlBatch.QueryVariablesEntryB\x03\xe0\x41\x01\x12\x1a\n\rjar_file_uris\x18\x03 \x03(\tB\x03\xe0\x41\x01\x1a\x35\n\x13QueryVariablesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x32\x9d\x06\n\x0f\x42\x61tchController\x12\xea\x01\n\x0b\x43reateBatch\x12,.google.cloud.dataproc.v1.CreateBatchRequest\x1a\x1d.google.longrunning.Operation\"\x8d\x01\x82\xd3\xe4\x93\x02\x34\"+/v1/{parent=projects/*/locations/*}/batches:\x05\x62\x61tch\xda\x41\x15parent,batch,batch_id\xca\x41\x38\n\x05\x42\x61tch\x12/google.cloud.dataproc.v1.BatchOperationMetadata\x12\x92\x01\n\x08GetBatch\x12).google.cloud.dataproc.v1.GetBatchRequest\x1a\x1f.google.cloud.dataproc.v1.Batch\":\x82\xd3\xe4\x93\x02-\x12+/v1/{name=projects/*/locations/*/batches/*}\xda\x41\x04name\x12\xa8\x01\n\x0bListBatches\x12,.google.cloud.dataproc.v1.ListBatchesRequest\x1a-.google.cloud.dataproc.v1.ListBatchesResponse\"<\x82\xd3\xe4\x93\x02-\x12+/v1/{parent=projects/*/locations/*}/batches\xda\x41\x06parent\x12\x8f\x01\n\x0b\x44\x65leteBatch\x12,.google.cloud.dataproc.v1.DeleteBatchRequest\x1a\x16.google.protobuf.Empty\":\x82\xd3\xe4\x93\x02-*+/v1/{name=projects/*/locations/*/batches/*}\xda\x41\x04name\x1aK\xca\x41\x17\x64\x61taproc.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformBk\n\x1c\x63om.google.cloud.dataproc.v1B\x0c\x42\x61tchesProtoP\x01Z;cloud.google.com/go/dataproc/v2/apiv1/dataprocpb;dataprocpbb\x06proto3"
18
+
19
+ pool = Google::Protobuf::DescriptorPool.generated_pool
20
+
21
+ begin
22
+ pool.add_serialized_file(descriptor_data)
23
+ rescue TypeError => e
24
+ # Compatibility code: will be removed in the next major version.
25
+ require 'google/protobuf/descriptor_pb'
26
+ parsed = Google::Protobuf::FileDescriptorProto.decode(descriptor_data)
27
+ parsed.clear_dependency
28
+ serialized = parsed.class.encode(parsed)
29
+ file = pool.add_serialized_file(serialized)
30
+ warn "Warning: Protobuf detected an import path issue while loading generated file #{__FILE__}"
31
+ imports = [
32
+ ["google.protobuf.Timestamp", "google/protobuf/timestamp.proto"],
33
+ ["google.cloud.dataproc.v1.RuntimeInfo", "google/cloud/dataproc/v1/shared.proto"],
34
+ ]
35
+ imports.each do |type_name, expected_filename|
36
+ import_file = pool.lookup(type_name).file_descriptor
37
+ if import_file.name != expected_filename
38
+ warn "- #{file.name} imports #{expected_filename}, but that import was loaded as #{import_file.name}"
103
39
  end
104
40
  end
41
+ warn "Each proto file must use a consistent fully-qualified name."
42
+ warn "This will become an error in the next major version."
105
43
  end
106
44
 
107
45
  module Google