alibabacloud-emr-serverless-spark20230808 1.3.0__tar.gz → 1.4.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of alibabacloud-emr-serverless-spark20230808 might be problematic. Click here for more details.
- {alibabacloud_emr-serverless-spark20230808-1.3.0 → alibabacloud_emr-serverless-spark20230808-1.4.1}/ChangeLog.md +12 -0
- {alibabacloud_emr-serverless-spark20230808-1.3.0 → alibabacloud_emr-serverless-spark20230808-1.4.1}/PKG-INFO +1 -1
- alibabacloud_emr-serverless-spark20230808-1.4.1/alibabacloud_emr_serverless_spark20230808/__init__.py +1 -0
- {alibabacloud_emr-serverless-spark20230808-1.3.0 → alibabacloud_emr-serverless-spark20230808-1.4.1}/alibabacloud_emr_serverless_spark20230808/client.py +392 -28
- {alibabacloud_emr-serverless-spark20230808-1.3.0 → alibabacloud_emr-serverless-spark20230808-1.4.1}/alibabacloud_emr_serverless_spark20230808/models.py +785 -98
- {alibabacloud_emr-serverless-spark20230808-1.3.0 → alibabacloud_emr-serverless-spark20230808-1.4.1}/alibabacloud_emr_serverless_spark20230808.egg-info/PKG-INFO +1 -1
- {alibabacloud_emr-serverless-spark20230808-1.3.0 → alibabacloud_emr-serverless-spark20230808-1.4.1}/alibabacloud_emr_serverless_spark20230808.egg-info/requires.txt +1 -1
- {alibabacloud_emr-serverless-spark20230808-1.3.0 → alibabacloud_emr-serverless-spark20230808-1.4.1}/setup.py +2 -2
- alibabacloud_emr-serverless-spark20230808-1.3.0/alibabacloud_emr_serverless_spark20230808/__init__.py +0 -1
- {alibabacloud_emr-serverless-spark20230808-1.3.0 → alibabacloud_emr-serverless-spark20230808-1.4.1}/LICENSE +0 -0
- {alibabacloud_emr-serverless-spark20230808-1.3.0 → alibabacloud_emr-serverless-spark20230808-1.4.1}/MANIFEST.in +0 -0
- {alibabacloud_emr-serverless-spark20230808-1.3.0 → alibabacloud_emr-serverless-spark20230808-1.4.1}/README-CN.md +0 -0
- {alibabacloud_emr-serverless-spark20230808-1.3.0 → alibabacloud_emr-serverless-spark20230808-1.4.1}/README.md +0 -0
- {alibabacloud_emr-serverless-spark20230808-1.3.0 → alibabacloud_emr-serverless-spark20230808-1.4.1}/alibabacloud_emr_serverless_spark20230808.egg-info/SOURCES.txt +0 -0
- {alibabacloud_emr-serverless-spark20230808-1.3.0 → alibabacloud_emr-serverless-spark20230808-1.4.1}/alibabacloud_emr_serverless_spark20230808.egg-info/dependency_links.txt +0 -0
- {alibabacloud_emr-serverless-spark20230808-1.3.0 → alibabacloud_emr-serverless-spark20230808-1.4.1}/alibabacloud_emr_serverless_spark20230808.egg-info/top_level.txt +0 -0
- {alibabacloud_emr-serverless-spark20230808-1.3.0 → alibabacloud_emr-serverless-spark20230808-1.4.1}/setup.cfg +0 -0
|
@@ -1,3 +1,15 @@
|
|
|
1
|
+
2024-05-31 Version: 1.4.0
|
|
2
|
+
- Support API CreateSqlStatement.
|
|
3
|
+
- Support API GetSqlStatement.
|
|
4
|
+
- Support API TerminateSqlStatement.
|
|
5
|
+
- Update API ListJobRuns: add param jobRunDeploymentId.
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
2024-05-22 Version: 1.3.0
|
|
9
|
+
- Support API AddMembers.
|
|
10
|
+
- Support API GrantRoleToUsers.
|
|
11
|
+
|
|
12
|
+
|
|
1
13
|
2024-05-21 Version: 1.2.0
|
|
2
14
|
- Support API ListSessionClusters.
|
|
3
15
|
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: alibabacloud_emr-serverless-spark20230808
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.4.1
|
|
4
4
|
Summary: Alibaba Cloud emr-serverless-spark (20230808) SDK Library for Python
|
|
5
5
|
Home-page: https://github.com/aliyun/alibabacloud-python-sdk
|
|
6
6
|
Author: Alibaba Cloud SDK
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
__version__ = '1.4.1'
|
|
@@ -48,7 +48,7 @@ class Client(OpenApiClient):
|
|
|
48
48
|
runtime: util_models.RuntimeOptions,
|
|
49
49
|
) -> emr_serverless_spark_20230808_models.AddMembersResponse:
|
|
50
50
|
"""
|
|
51
|
-
@summary
|
|
51
|
+
@summary Adds a RAM user or RAM role to a workspace as a member.
|
|
52
52
|
|
|
53
53
|
@param request: AddMembersRequest
|
|
54
54
|
@param headers: map
|
|
@@ -92,7 +92,7 @@ class Client(OpenApiClient):
|
|
|
92
92
|
runtime: util_models.RuntimeOptions,
|
|
93
93
|
) -> emr_serverless_spark_20230808_models.AddMembersResponse:
|
|
94
94
|
"""
|
|
95
|
-
@summary
|
|
95
|
+
@summary Adds a RAM user or RAM role to a workspace as a member.
|
|
96
96
|
|
|
97
97
|
@param request: AddMembersRequest
|
|
98
98
|
@param headers: map
|
|
@@ -134,7 +134,7 @@ class Client(OpenApiClient):
|
|
|
134
134
|
request: emr_serverless_spark_20230808_models.AddMembersRequest,
|
|
135
135
|
) -> emr_serverless_spark_20230808_models.AddMembersResponse:
|
|
136
136
|
"""
|
|
137
|
-
@summary
|
|
137
|
+
@summary Adds a RAM user or RAM role to a workspace as a member.
|
|
138
138
|
|
|
139
139
|
@param request: AddMembersRequest
|
|
140
140
|
@return: AddMembersResponse
|
|
@@ -148,7 +148,7 @@ class Client(OpenApiClient):
|
|
|
148
148
|
request: emr_serverless_spark_20230808_models.AddMembersRequest,
|
|
149
149
|
) -> emr_serverless_spark_20230808_models.AddMembersResponse:
|
|
150
150
|
"""
|
|
151
|
-
@summary
|
|
151
|
+
@summary Adds a RAM user or RAM role to a workspace as a member.
|
|
152
152
|
|
|
153
153
|
@param request: AddMembersRequest
|
|
154
154
|
@return: AddMembersResponse
|
|
@@ -166,7 +166,7 @@ class Client(OpenApiClient):
|
|
|
166
166
|
runtime: util_models.RuntimeOptions,
|
|
167
167
|
) -> emr_serverless_spark_20230808_models.CancelJobRunResponse:
|
|
168
168
|
"""
|
|
169
|
-
@summary
|
|
169
|
+
@summary Terminates a Spark job.
|
|
170
170
|
|
|
171
171
|
@param request: CancelJobRunRequest
|
|
172
172
|
@param headers: map
|
|
@@ -206,7 +206,7 @@ class Client(OpenApiClient):
|
|
|
206
206
|
runtime: util_models.RuntimeOptions,
|
|
207
207
|
) -> emr_serverless_spark_20230808_models.CancelJobRunResponse:
|
|
208
208
|
"""
|
|
209
|
-
@summary
|
|
209
|
+
@summary Terminates a Spark job.
|
|
210
210
|
|
|
211
211
|
@param request: CancelJobRunRequest
|
|
212
212
|
@param headers: map
|
|
@@ -244,7 +244,7 @@ class Client(OpenApiClient):
|
|
|
244
244
|
request: emr_serverless_spark_20230808_models.CancelJobRunRequest,
|
|
245
245
|
) -> emr_serverless_spark_20230808_models.CancelJobRunResponse:
|
|
246
246
|
"""
|
|
247
|
-
@summary
|
|
247
|
+
@summary Terminates a Spark job.
|
|
248
248
|
|
|
249
249
|
@param request: CancelJobRunRequest
|
|
250
250
|
@return: CancelJobRunResponse
|
|
@@ -260,7 +260,7 @@ class Client(OpenApiClient):
|
|
|
260
260
|
request: emr_serverless_spark_20230808_models.CancelJobRunRequest,
|
|
261
261
|
) -> emr_serverless_spark_20230808_models.CancelJobRunResponse:
|
|
262
262
|
"""
|
|
263
|
-
@summary
|
|
263
|
+
@summary Terminates a Spark job.
|
|
264
264
|
|
|
265
265
|
@param request: CancelJobRunRequest
|
|
266
266
|
@return: CancelJobRunResponse
|
|
@@ -269,6 +269,138 @@ class Client(OpenApiClient):
|
|
|
269
269
|
headers = {}
|
|
270
270
|
return await self.cancel_job_run_with_options_async(workspace_id, job_run_id, request, headers, runtime)
|
|
271
271
|
|
|
272
|
+
def create_sql_statement_with_options(
|
|
273
|
+
self,
|
|
274
|
+
workspace_id: str,
|
|
275
|
+
request: emr_serverless_spark_20230808_models.CreateSqlStatementRequest,
|
|
276
|
+
headers: Dict[str, str],
|
|
277
|
+
runtime: util_models.RuntimeOptions,
|
|
278
|
+
) -> emr_serverless_spark_20230808_models.CreateSqlStatementResponse:
|
|
279
|
+
"""
|
|
280
|
+
@summary 使用session运行SQL
|
|
281
|
+
|
|
282
|
+
@param request: CreateSqlStatementRequest
|
|
283
|
+
@param headers: map
|
|
284
|
+
@param runtime: runtime options for this request RuntimeOptions
|
|
285
|
+
@return: CreateSqlStatementResponse
|
|
286
|
+
"""
|
|
287
|
+
UtilClient.validate_model(request)
|
|
288
|
+
query = {}
|
|
289
|
+
if not UtilClient.is_unset(request.region_id):
|
|
290
|
+
query['regionId'] = request.region_id
|
|
291
|
+
body = {}
|
|
292
|
+
if not UtilClient.is_unset(request.code_content):
|
|
293
|
+
body['codeContent'] = request.code_content
|
|
294
|
+
if not UtilClient.is_unset(request.default_catalog):
|
|
295
|
+
body['defaultCatalog'] = request.default_catalog
|
|
296
|
+
if not UtilClient.is_unset(request.default_database):
|
|
297
|
+
body['defaultDatabase'] = request.default_database
|
|
298
|
+
if not UtilClient.is_unset(request.limit):
|
|
299
|
+
body['limit'] = request.limit
|
|
300
|
+
if not UtilClient.is_unset(request.sql_compute_id):
|
|
301
|
+
body['sqlComputeId'] = request.sql_compute_id
|
|
302
|
+
req = open_api_models.OpenApiRequest(
|
|
303
|
+
headers=headers,
|
|
304
|
+
query=OpenApiUtilClient.query(query),
|
|
305
|
+
body=OpenApiUtilClient.parse_to_map(body)
|
|
306
|
+
)
|
|
307
|
+
params = open_api_models.Params(
|
|
308
|
+
action='CreateSqlStatement',
|
|
309
|
+
version='2023-08-08',
|
|
310
|
+
protocol='HTTPS',
|
|
311
|
+
pathname=f'/api/interactive/v1/workspace/{OpenApiUtilClient.get_encode_param(workspace_id)}/statement',
|
|
312
|
+
method='PUT',
|
|
313
|
+
auth_type='AK',
|
|
314
|
+
style='ROA',
|
|
315
|
+
req_body_type='json',
|
|
316
|
+
body_type='json'
|
|
317
|
+
)
|
|
318
|
+
return TeaCore.from_map(
|
|
319
|
+
emr_serverless_spark_20230808_models.CreateSqlStatementResponse(),
|
|
320
|
+
self.call_api(params, req, runtime)
|
|
321
|
+
)
|
|
322
|
+
|
|
323
|
+
async def create_sql_statement_with_options_async(
|
|
324
|
+
self,
|
|
325
|
+
workspace_id: str,
|
|
326
|
+
request: emr_serverless_spark_20230808_models.CreateSqlStatementRequest,
|
|
327
|
+
headers: Dict[str, str],
|
|
328
|
+
runtime: util_models.RuntimeOptions,
|
|
329
|
+
) -> emr_serverless_spark_20230808_models.CreateSqlStatementResponse:
|
|
330
|
+
"""
|
|
331
|
+
@summary 使用session运行SQL
|
|
332
|
+
|
|
333
|
+
@param request: CreateSqlStatementRequest
|
|
334
|
+
@param headers: map
|
|
335
|
+
@param runtime: runtime options for this request RuntimeOptions
|
|
336
|
+
@return: CreateSqlStatementResponse
|
|
337
|
+
"""
|
|
338
|
+
UtilClient.validate_model(request)
|
|
339
|
+
query = {}
|
|
340
|
+
if not UtilClient.is_unset(request.region_id):
|
|
341
|
+
query['regionId'] = request.region_id
|
|
342
|
+
body = {}
|
|
343
|
+
if not UtilClient.is_unset(request.code_content):
|
|
344
|
+
body['codeContent'] = request.code_content
|
|
345
|
+
if not UtilClient.is_unset(request.default_catalog):
|
|
346
|
+
body['defaultCatalog'] = request.default_catalog
|
|
347
|
+
if not UtilClient.is_unset(request.default_database):
|
|
348
|
+
body['defaultDatabase'] = request.default_database
|
|
349
|
+
if not UtilClient.is_unset(request.limit):
|
|
350
|
+
body['limit'] = request.limit
|
|
351
|
+
if not UtilClient.is_unset(request.sql_compute_id):
|
|
352
|
+
body['sqlComputeId'] = request.sql_compute_id
|
|
353
|
+
req = open_api_models.OpenApiRequest(
|
|
354
|
+
headers=headers,
|
|
355
|
+
query=OpenApiUtilClient.query(query),
|
|
356
|
+
body=OpenApiUtilClient.parse_to_map(body)
|
|
357
|
+
)
|
|
358
|
+
params = open_api_models.Params(
|
|
359
|
+
action='CreateSqlStatement',
|
|
360
|
+
version='2023-08-08',
|
|
361
|
+
protocol='HTTPS',
|
|
362
|
+
pathname=f'/api/interactive/v1/workspace/{OpenApiUtilClient.get_encode_param(workspace_id)}/statement',
|
|
363
|
+
method='PUT',
|
|
364
|
+
auth_type='AK',
|
|
365
|
+
style='ROA',
|
|
366
|
+
req_body_type='json',
|
|
367
|
+
body_type='json'
|
|
368
|
+
)
|
|
369
|
+
return TeaCore.from_map(
|
|
370
|
+
emr_serverless_spark_20230808_models.CreateSqlStatementResponse(),
|
|
371
|
+
await self.call_api_async(params, req, runtime)
|
|
372
|
+
)
|
|
373
|
+
|
|
374
|
+
def create_sql_statement(
|
|
375
|
+
self,
|
|
376
|
+
workspace_id: str,
|
|
377
|
+
request: emr_serverless_spark_20230808_models.CreateSqlStatementRequest,
|
|
378
|
+
) -> emr_serverless_spark_20230808_models.CreateSqlStatementResponse:
|
|
379
|
+
"""
|
|
380
|
+
@summary 使用session运行SQL
|
|
381
|
+
|
|
382
|
+
@param request: CreateSqlStatementRequest
|
|
383
|
+
@return: CreateSqlStatementResponse
|
|
384
|
+
"""
|
|
385
|
+
runtime = util_models.RuntimeOptions()
|
|
386
|
+
headers = {}
|
|
387
|
+
return self.create_sql_statement_with_options(workspace_id, request, headers, runtime)
|
|
388
|
+
|
|
389
|
+
async def create_sql_statement_async(
|
|
390
|
+
self,
|
|
391
|
+
workspace_id: str,
|
|
392
|
+
request: emr_serverless_spark_20230808_models.CreateSqlStatementRequest,
|
|
393
|
+
) -> emr_serverless_spark_20230808_models.CreateSqlStatementResponse:
|
|
394
|
+
"""
|
|
395
|
+
@summary 使用session运行SQL
|
|
396
|
+
|
|
397
|
+
@param request: CreateSqlStatementRequest
|
|
398
|
+
@return: CreateSqlStatementResponse
|
|
399
|
+
"""
|
|
400
|
+
runtime = util_models.RuntimeOptions()
|
|
401
|
+
headers = {}
|
|
402
|
+
return await self.create_sql_statement_with_options_async(workspace_id, request, headers, runtime)
|
|
403
|
+
|
|
272
404
|
def get_job_run_with_options(
|
|
273
405
|
self,
|
|
274
406
|
workspace_id: str,
|
|
@@ -278,7 +410,7 @@ class Client(OpenApiClient):
|
|
|
278
410
|
runtime: util_models.RuntimeOptions,
|
|
279
411
|
) -> emr_serverless_spark_20230808_models.GetJobRunResponse:
|
|
280
412
|
"""
|
|
281
|
-
@summary
|
|
413
|
+
@summary Obtain the job details.
|
|
282
414
|
|
|
283
415
|
@param request: GetJobRunRequest
|
|
284
416
|
@param headers: map
|
|
@@ -318,7 +450,7 @@ class Client(OpenApiClient):
|
|
|
318
450
|
runtime: util_models.RuntimeOptions,
|
|
319
451
|
) -> emr_serverless_spark_20230808_models.GetJobRunResponse:
|
|
320
452
|
"""
|
|
321
|
-
@summary
|
|
453
|
+
@summary Obtain the job details.
|
|
322
454
|
|
|
323
455
|
@param request: GetJobRunRequest
|
|
324
456
|
@param headers: map
|
|
@@ -356,7 +488,7 @@ class Client(OpenApiClient):
|
|
|
356
488
|
request: emr_serverless_spark_20230808_models.GetJobRunRequest,
|
|
357
489
|
) -> emr_serverless_spark_20230808_models.GetJobRunResponse:
|
|
358
490
|
"""
|
|
359
|
-
@summary
|
|
491
|
+
@summary Obtain the job details.
|
|
360
492
|
|
|
361
493
|
@param request: GetJobRunRequest
|
|
362
494
|
@return: GetJobRunResponse
|
|
@@ -372,7 +504,7 @@ class Client(OpenApiClient):
|
|
|
372
504
|
request: emr_serverless_spark_20230808_models.GetJobRunRequest,
|
|
373
505
|
) -> emr_serverless_spark_20230808_models.GetJobRunResponse:
|
|
374
506
|
"""
|
|
375
|
-
@summary
|
|
507
|
+
@summary Obtain the job details.
|
|
376
508
|
|
|
377
509
|
@param request: GetJobRunRequest
|
|
378
510
|
@return: GetJobRunResponse
|
|
@@ -381,6 +513,118 @@ class Client(OpenApiClient):
|
|
|
381
513
|
headers = {}
|
|
382
514
|
return await self.get_job_run_with_options_async(workspace_id, job_run_id, request, headers, runtime)
|
|
383
515
|
|
|
516
|
+
def get_sql_statement_with_options(
|
|
517
|
+
self,
|
|
518
|
+
workspace_id: str,
|
|
519
|
+
statement_id: str,
|
|
520
|
+
request: emr_serverless_spark_20230808_models.GetSqlStatementRequest,
|
|
521
|
+
headers: Dict[str, str],
|
|
522
|
+
runtime: util_models.RuntimeOptions,
|
|
523
|
+
) -> emr_serverless_spark_20230808_models.GetSqlStatementResponse:
|
|
524
|
+
"""
|
|
525
|
+
@summary 获取Sql Statement状态
|
|
526
|
+
|
|
527
|
+
@param request: GetSqlStatementRequest
|
|
528
|
+
@param headers: map
|
|
529
|
+
@param runtime: runtime options for this request RuntimeOptions
|
|
530
|
+
@return: GetSqlStatementResponse
|
|
531
|
+
"""
|
|
532
|
+
UtilClient.validate_model(request)
|
|
533
|
+
query = {}
|
|
534
|
+
if not UtilClient.is_unset(request.region_id):
|
|
535
|
+
query['regionId'] = request.region_id
|
|
536
|
+
req = open_api_models.OpenApiRequest(
|
|
537
|
+
headers=headers,
|
|
538
|
+
query=OpenApiUtilClient.query(query)
|
|
539
|
+
)
|
|
540
|
+
params = open_api_models.Params(
|
|
541
|
+
action='GetSqlStatement',
|
|
542
|
+
version='2023-08-08',
|
|
543
|
+
protocol='HTTPS',
|
|
544
|
+
pathname=f'/api/interactive/v1/workspace/{OpenApiUtilClient.get_encode_param(workspace_id)}/statement/{OpenApiUtilClient.get_encode_param(statement_id)}',
|
|
545
|
+
method='GET',
|
|
546
|
+
auth_type='AK',
|
|
547
|
+
style='ROA',
|
|
548
|
+
req_body_type='json',
|
|
549
|
+
body_type='json'
|
|
550
|
+
)
|
|
551
|
+
return TeaCore.from_map(
|
|
552
|
+
emr_serverless_spark_20230808_models.GetSqlStatementResponse(),
|
|
553
|
+
self.call_api(params, req, runtime)
|
|
554
|
+
)
|
|
555
|
+
|
|
556
|
+
async def get_sql_statement_with_options_async(
|
|
557
|
+
self,
|
|
558
|
+
workspace_id: str,
|
|
559
|
+
statement_id: str,
|
|
560
|
+
request: emr_serverless_spark_20230808_models.GetSqlStatementRequest,
|
|
561
|
+
headers: Dict[str, str],
|
|
562
|
+
runtime: util_models.RuntimeOptions,
|
|
563
|
+
) -> emr_serverless_spark_20230808_models.GetSqlStatementResponse:
|
|
564
|
+
"""
|
|
565
|
+
@summary 获取Sql Statement状态
|
|
566
|
+
|
|
567
|
+
@param request: GetSqlStatementRequest
|
|
568
|
+
@param headers: map
|
|
569
|
+
@param runtime: runtime options for this request RuntimeOptions
|
|
570
|
+
@return: GetSqlStatementResponse
|
|
571
|
+
"""
|
|
572
|
+
UtilClient.validate_model(request)
|
|
573
|
+
query = {}
|
|
574
|
+
if not UtilClient.is_unset(request.region_id):
|
|
575
|
+
query['regionId'] = request.region_id
|
|
576
|
+
req = open_api_models.OpenApiRequest(
|
|
577
|
+
headers=headers,
|
|
578
|
+
query=OpenApiUtilClient.query(query)
|
|
579
|
+
)
|
|
580
|
+
params = open_api_models.Params(
|
|
581
|
+
action='GetSqlStatement',
|
|
582
|
+
version='2023-08-08',
|
|
583
|
+
protocol='HTTPS',
|
|
584
|
+
pathname=f'/api/interactive/v1/workspace/{OpenApiUtilClient.get_encode_param(workspace_id)}/statement/{OpenApiUtilClient.get_encode_param(statement_id)}',
|
|
585
|
+
method='GET',
|
|
586
|
+
auth_type='AK',
|
|
587
|
+
style='ROA',
|
|
588
|
+
req_body_type='json',
|
|
589
|
+
body_type='json'
|
|
590
|
+
)
|
|
591
|
+
return TeaCore.from_map(
|
|
592
|
+
emr_serverless_spark_20230808_models.GetSqlStatementResponse(),
|
|
593
|
+
await self.call_api_async(params, req, runtime)
|
|
594
|
+
)
|
|
595
|
+
|
|
596
|
+
def get_sql_statement(
|
|
597
|
+
self,
|
|
598
|
+
workspace_id: str,
|
|
599
|
+
statement_id: str,
|
|
600
|
+
request: emr_serverless_spark_20230808_models.GetSqlStatementRequest,
|
|
601
|
+
) -> emr_serverless_spark_20230808_models.GetSqlStatementResponse:
|
|
602
|
+
"""
|
|
603
|
+
@summary 获取Sql Statement状态
|
|
604
|
+
|
|
605
|
+
@param request: GetSqlStatementRequest
|
|
606
|
+
@return: GetSqlStatementResponse
|
|
607
|
+
"""
|
|
608
|
+
runtime = util_models.RuntimeOptions()
|
|
609
|
+
headers = {}
|
|
610
|
+
return self.get_sql_statement_with_options(workspace_id, statement_id, request, headers, runtime)
|
|
611
|
+
|
|
612
|
+
async def get_sql_statement_async(
|
|
613
|
+
self,
|
|
614
|
+
workspace_id: str,
|
|
615
|
+
statement_id: str,
|
|
616
|
+
request: emr_serverless_spark_20230808_models.GetSqlStatementRequest,
|
|
617
|
+
) -> emr_serverless_spark_20230808_models.GetSqlStatementResponse:
|
|
618
|
+
"""
|
|
619
|
+
@summary 获取Sql Statement状态
|
|
620
|
+
|
|
621
|
+
@param request: GetSqlStatementRequest
|
|
622
|
+
@return: GetSqlStatementResponse
|
|
623
|
+
"""
|
|
624
|
+
runtime = util_models.RuntimeOptions()
|
|
625
|
+
headers = {}
|
|
626
|
+
return await self.get_sql_statement_with_options_async(workspace_id, statement_id, request, headers, runtime)
|
|
627
|
+
|
|
384
628
|
def grant_role_to_users_with_options(
|
|
385
629
|
self,
|
|
386
630
|
request: emr_serverless_spark_20230808_models.GrantRoleToUsersRequest,
|
|
@@ -388,7 +632,7 @@ class Client(OpenApiClient):
|
|
|
388
632
|
runtime: util_models.RuntimeOptions,
|
|
389
633
|
) -> emr_serverless_spark_20230808_models.GrantRoleToUsersResponse:
|
|
390
634
|
"""
|
|
391
|
-
@summary
|
|
635
|
+
@summary Assigns a specified role to users.
|
|
392
636
|
|
|
393
637
|
@param request: GrantRoleToUsersRequest
|
|
394
638
|
@param headers: map
|
|
@@ -432,7 +676,7 @@ class Client(OpenApiClient):
|
|
|
432
676
|
runtime: util_models.RuntimeOptions,
|
|
433
677
|
) -> emr_serverless_spark_20230808_models.GrantRoleToUsersResponse:
|
|
434
678
|
"""
|
|
435
|
-
@summary
|
|
679
|
+
@summary Assigns a specified role to users.
|
|
436
680
|
|
|
437
681
|
@param request: GrantRoleToUsersRequest
|
|
438
682
|
@param headers: map
|
|
@@ -474,7 +718,7 @@ class Client(OpenApiClient):
|
|
|
474
718
|
request: emr_serverless_spark_20230808_models.GrantRoleToUsersRequest,
|
|
475
719
|
) -> emr_serverless_spark_20230808_models.GrantRoleToUsersResponse:
|
|
476
720
|
"""
|
|
477
|
-
@summary
|
|
721
|
+
@summary Assigns a specified role to users.
|
|
478
722
|
|
|
479
723
|
@param request: GrantRoleToUsersRequest
|
|
480
724
|
@return: GrantRoleToUsersResponse
|
|
@@ -488,7 +732,7 @@ class Client(OpenApiClient):
|
|
|
488
732
|
request: emr_serverless_spark_20230808_models.GrantRoleToUsersRequest,
|
|
489
733
|
) -> emr_serverless_spark_20230808_models.GrantRoleToUsersResponse:
|
|
490
734
|
"""
|
|
491
|
-
@summary
|
|
735
|
+
@summary Assigns a specified role to users.
|
|
492
736
|
|
|
493
737
|
@param request: GrantRoleToUsersRequest
|
|
494
738
|
@return: GrantRoleToUsersResponse
|
|
@@ -505,7 +749,7 @@ class Client(OpenApiClient):
|
|
|
505
749
|
runtime: util_models.RuntimeOptions,
|
|
506
750
|
) -> emr_serverless_spark_20230808_models.ListJobRunsResponse:
|
|
507
751
|
"""
|
|
508
|
-
@summary
|
|
752
|
+
@summary Queries a list of Spark jobs.
|
|
509
753
|
|
|
510
754
|
@param tmp_req: ListJobRunsRequest
|
|
511
755
|
@param headers: map
|
|
@@ -528,6 +772,8 @@ class Client(OpenApiClient):
|
|
|
528
772
|
query['creator'] = request.creator
|
|
529
773
|
if not UtilClient.is_unset(request.end_time_shrink):
|
|
530
774
|
query['endTime'] = request.end_time_shrink
|
|
775
|
+
if not UtilClient.is_unset(request.job_run_deployment_id):
|
|
776
|
+
query['jobRunDeploymentId'] = request.job_run_deployment_id
|
|
531
777
|
if not UtilClient.is_unset(request.job_run_id):
|
|
532
778
|
query['jobRunId'] = request.job_run_id
|
|
533
779
|
if not UtilClient.is_unset(request.max_results):
|
|
@@ -574,7 +820,7 @@ class Client(OpenApiClient):
|
|
|
574
820
|
runtime: util_models.RuntimeOptions,
|
|
575
821
|
) -> emr_serverless_spark_20230808_models.ListJobRunsResponse:
|
|
576
822
|
"""
|
|
577
|
-
@summary
|
|
823
|
+
@summary Queries a list of Spark jobs.
|
|
578
824
|
|
|
579
825
|
@param tmp_req: ListJobRunsRequest
|
|
580
826
|
@param headers: map
|
|
@@ -597,6 +843,8 @@ class Client(OpenApiClient):
|
|
|
597
843
|
query['creator'] = request.creator
|
|
598
844
|
if not UtilClient.is_unset(request.end_time_shrink):
|
|
599
845
|
query['endTime'] = request.end_time_shrink
|
|
846
|
+
if not UtilClient.is_unset(request.job_run_deployment_id):
|
|
847
|
+
query['jobRunDeploymentId'] = request.job_run_deployment_id
|
|
600
848
|
if not UtilClient.is_unset(request.job_run_id):
|
|
601
849
|
query['jobRunId'] = request.job_run_id
|
|
602
850
|
if not UtilClient.is_unset(request.max_results):
|
|
@@ -641,7 +889,7 @@ class Client(OpenApiClient):
|
|
|
641
889
|
request: emr_serverless_spark_20230808_models.ListJobRunsRequest,
|
|
642
890
|
) -> emr_serverless_spark_20230808_models.ListJobRunsResponse:
|
|
643
891
|
"""
|
|
644
|
-
@summary
|
|
892
|
+
@summary Queries a list of Spark jobs.
|
|
645
893
|
|
|
646
894
|
@param request: ListJobRunsRequest
|
|
647
895
|
@return: ListJobRunsResponse
|
|
@@ -656,7 +904,7 @@ class Client(OpenApiClient):
|
|
|
656
904
|
request: emr_serverless_spark_20230808_models.ListJobRunsRequest,
|
|
657
905
|
) -> emr_serverless_spark_20230808_models.ListJobRunsResponse:
|
|
658
906
|
"""
|
|
659
|
-
@summary
|
|
907
|
+
@summary Queries a list of Spark jobs.
|
|
660
908
|
|
|
661
909
|
@param request: ListJobRunsRequest
|
|
662
910
|
@return: ListJobRunsResponse
|
|
@@ -798,6 +1046,8 @@ class Client(OpenApiClient):
|
|
|
798
1046
|
"""
|
|
799
1047
|
UtilClient.validate_model(request)
|
|
800
1048
|
query = {}
|
|
1049
|
+
if not UtilClient.is_unset(request.kind):
|
|
1050
|
+
query['kind'] = request.kind
|
|
801
1051
|
if not UtilClient.is_unset(request.max_results):
|
|
802
1052
|
query['maxResults'] = request.max_results
|
|
803
1053
|
if not UtilClient.is_unset(request.next_token):
|
|
@@ -845,6 +1095,8 @@ class Client(OpenApiClient):
|
|
|
845
1095
|
"""
|
|
846
1096
|
UtilClient.validate_model(request)
|
|
847
1097
|
query = {}
|
|
1098
|
+
if not UtilClient.is_unset(request.kind):
|
|
1099
|
+
query['kind'] = request.kind
|
|
848
1100
|
if not UtilClient.is_unset(request.max_results):
|
|
849
1101
|
query['maxResults'] = request.max_results
|
|
850
1102
|
if not UtilClient.is_unset(request.next_token):
|
|
@@ -1024,7 +1276,7 @@ class Client(OpenApiClient):
|
|
|
1024
1276
|
runtime: util_models.RuntimeOptions,
|
|
1025
1277
|
) -> emr_serverless_spark_20230808_models.ListWorkspacesResponse:
|
|
1026
1278
|
"""
|
|
1027
|
-
@summary
|
|
1279
|
+
@summary Queries a list of workspaces.
|
|
1028
1280
|
|
|
1029
1281
|
@param request: ListWorkspacesRequest
|
|
1030
1282
|
@param headers: map
|
|
@@ -1070,7 +1322,7 @@ class Client(OpenApiClient):
|
|
|
1070
1322
|
runtime: util_models.RuntimeOptions,
|
|
1071
1323
|
) -> emr_serverless_spark_20230808_models.ListWorkspacesResponse:
|
|
1072
1324
|
"""
|
|
1073
|
-
@summary
|
|
1325
|
+
@summary Queries a list of workspaces.
|
|
1074
1326
|
|
|
1075
1327
|
@param request: ListWorkspacesRequest
|
|
1076
1328
|
@param headers: map
|
|
@@ -1114,7 +1366,7 @@ class Client(OpenApiClient):
|
|
|
1114
1366
|
request: emr_serverless_spark_20230808_models.ListWorkspacesRequest,
|
|
1115
1367
|
) -> emr_serverless_spark_20230808_models.ListWorkspacesResponse:
|
|
1116
1368
|
"""
|
|
1117
|
-
@summary
|
|
1369
|
+
@summary Queries a list of workspaces.
|
|
1118
1370
|
|
|
1119
1371
|
@param request: ListWorkspacesRequest
|
|
1120
1372
|
@return: ListWorkspacesResponse
|
|
@@ -1128,7 +1380,7 @@ class Client(OpenApiClient):
|
|
|
1128
1380
|
request: emr_serverless_spark_20230808_models.ListWorkspacesRequest,
|
|
1129
1381
|
) -> emr_serverless_spark_20230808_models.ListWorkspacesResponse:
|
|
1130
1382
|
"""
|
|
1131
|
-
@summary
|
|
1383
|
+
@summary Queries a list of workspaces.
|
|
1132
1384
|
|
|
1133
1385
|
@param request: ListWorkspacesRequest
|
|
1134
1386
|
@return: ListWorkspacesResponse
|
|
@@ -1145,7 +1397,7 @@ class Client(OpenApiClient):
|
|
|
1145
1397
|
runtime: util_models.RuntimeOptions,
|
|
1146
1398
|
) -> emr_serverless_spark_20230808_models.StartJobRunResponse:
|
|
1147
1399
|
"""
|
|
1148
|
-
@summary
|
|
1400
|
+
@summary Starts a Spark job.
|
|
1149
1401
|
|
|
1150
1402
|
@param request: StartJobRunRequest
|
|
1151
1403
|
@param headers: map
|
|
@@ -1206,7 +1458,7 @@ class Client(OpenApiClient):
|
|
|
1206
1458
|
runtime: util_models.RuntimeOptions,
|
|
1207
1459
|
) -> emr_serverless_spark_20230808_models.StartJobRunResponse:
|
|
1208
1460
|
"""
|
|
1209
|
-
@summary
|
|
1461
|
+
@summary Starts a Spark job.
|
|
1210
1462
|
|
|
1211
1463
|
@param request: StartJobRunRequest
|
|
1212
1464
|
@param headers: map
|
|
@@ -1265,7 +1517,7 @@ class Client(OpenApiClient):
|
|
|
1265
1517
|
request: emr_serverless_spark_20230808_models.StartJobRunRequest,
|
|
1266
1518
|
) -> emr_serverless_spark_20230808_models.StartJobRunResponse:
|
|
1267
1519
|
"""
|
|
1268
|
-
@summary
|
|
1520
|
+
@summary Starts a Spark job.
|
|
1269
1521
|
|
|
1270
1522
|
@param request: StartJobRunRequest
|
|
1271
1523
|
@return: StartJobRunResponse
|
|
@@ -1280,7 +1532,7 @@ class Client(OpenApiClient):
|
|
|
1280
1532
|
request: emr_serverless_spark_20230808_models.StartJobRunRequest,
|
|
1281
1533
|
) -> emr_serverless_spark_20230808_models.StartJobRunResponse:
|
|
1282
1534
|
"""
|
|
1283
|
-
@summary
|
|
1535
|
+
@summary Starts a Spark job.
|
|
1284
1536
|
|
|
1285
1537
|
@param request: StartJobRunRequest
|
|
1286
1538
|
@return: StartJobRunResponse
|
|
@@ -1288,3 +1540,115 @@ class Client(OpenApiClient):
|
|
|
1288
1540
|
runtime = util_models.RuntimeOptions()
|
|
1289
1541
|
headers = {}
|
|
1290
1542
|
return await self.start_job_run_with_options_async(workspace_id, request, headers, runtime)
|
|
1543
|
+
|
|
1544
|
+
def terminate_sql_statement_with_options(
|
|
1545
|
+
self,
|
|
1546
|
+
workspace_id: str,
|
|
1547
|
+
statement_id: str,
|
|
1548
|
+
request: emr_serverless_spark_20230808_models.TerminateSqlStatementRequest,
|
|
1549
|
+
headers: Dict[str, str],
|
|
1550
|
+
runtime: util_models.RuntimeOptions,
|
|
1551
|
+
) -> emr_serverless_spark_20230808_models.TerminateSqlStatementResponse:
|
|
1552
|
+
"""
|
|
1553
|
+
@summary 终止 session statement
|
|
1554
|
+
|
|
1555
|
+
@param request: TerminateSqlStatementRequest
|
|
1556
|
+
@param headers: map
|
|
1557
|
+
@param runtime: runtime options for this request RuntimeOptions
|
|
1558
|
+
@return: TerminateSqlStatementResponse
|
|
1559
|
+
"""
|
|
1560
|
+
UtilClient.validate_model(request)
|
|
1561
|
+
query = {}
|
|
1562
|
+
if not UtilClient.is_unset(request.region_id):
|
|
1563
|
+
query['regionId'] = request.region_id
|
|
1564
|
+
req = open_api_models.OpenApiRequest(
|
|
1565
|
+
headers=headers,
|
|
1566
|
+
query=OpenApiUtilClient.query(query)
|
|
1567
|
+
)
|
|
1568
|
+
params = open_api_models.Params(
|
|
1569
|
+
action='TerminateSqlStatement',
|
|
1570
|
+
version='2023-08-08',
|
|
1571
|
+
protocol='HTTPS',
|
|
1572
|
+
pathname=f'/api/interactive/v1/workspace/{OpenApiUtilClient.get_encode_param(workspace_id)}/statement/{OpenApiUtilClient.get_encode_param(statement_id)}/terminate',
|
|
1573
|
+
method='POST',
|
|
1574
|
+
auth_type='AK',
|
|
1575
|
+
style='ROA',
|
|
1576
|
+
req_body_type='json',
|
|
1577
|
+
body_type='json'
|
|
1578
|
+
)
|
|
1579
|
+
return TeaCore.from_map(
|
|
1580
|
+
emr_serverless_spark_20230808_models.TerminateSqlStatementResponse(),
|
|
1581
|
+
self.call_api(params, req, runtime)
|
|
1582
|
+
)
|
|
1583
|
+
|
|
1584
|
+
async def terminate_sql_statement_with_options_async(
|
|
1585
|
+
self,
|
|
1586
|
+
workspace_id: str,
|
|
1587
|
+
statement_id: str,
|
|
1588
|
+
request: emr_serverless_spark_20230808_models.TerminateSqlStatementRequest,
|
|
1589
|
+
headers: Dict[str, str],
|
|
1590
|
+
runtime: util_models.RuntimeOptions,
|
|
1591
|
+
) -> emr_serverless_spark_20230808_models.TerminateSqlStatementResponse:
|
|
1592
|
+
"""
|
|
1593
|
+
@summary 终止 session statement
|
|
1594
|
+
|
|
1595
|
+
@param request: TerminateSqlStatementRequest
|
|
1596
|
+
@param headers: map
|
|
1597
|
+
@param runtime: runtime options for this request RuntimeOptions
|
|
1598
|
+
@return: TerminateSqlStatementResponse
|
|
1599
|
+
"""
|
|
1600
|
+
UtilClient.validate_model(request)
|
|
1601
|
+
query = {}
|
|
1602
|
+
if not UtilClient.is_unset(request.region_id):
|
|
1603
|
+
query['regionId'] = request.region_id
|
|
1604
|
+
req = open_api_models.OpenApiRequest(
|
|
1605
|
+
headers=headers,
|
|
1606
|
+
query=OpenApiUtilClient.query(query)
|
|
1607
|
+
)
|
|
1608
|
+
params = open_api_models.Params(
|
|
1609
|
+
action='TerminateSqlStatement',
|
|
1610
|
+
version='2023-08-08',
|
|
1611
|
+
protocol='HTTPS',
|
|
1612
|
+
pathname=f'/api/interactive/v1/workspace/{OpenApiUtilClient.get_encode_param(workspace_id)}/statement/{OpenApiUtilClient.get_encode_param(statement_id)}/terminate',
|
|
1613
|
+
method='POST',
|
|
1614
|
+
auth_type='AK',
|
|
1615
|
+
style='ROA',
|
|
1616
|
+
req_body_type='json',
|
|
1617
|
+
body_type='json'
|
|
1618
|
+
)
|
|
1619
|
+
return TeaCore.from_map(
|
|
1620
|
+
emr_serverless_spark_20230808_models.TerminateSqlStatementResponse(),
|
|
1621
|
+
await self.call_api_async(params, req, runtime)
|
|
1622
|
+
)
|
|
1623
|
+
|
|
1624
|
+
def terminate_sql_statement(
|
|
1625
|
+
self,
|
|
1626
|
+
workspace_id: str,
|
|
1627
|
+
statement_id: str,
|
|
1628
|
+
request: emr_serverless_spark_20230808_models.TerminateSqlStatementRequest,
|
|
1629
|
+
) -> emr_serverless_spark_20230808_models.TerminateSqlStatementResponse:
|
|
1630
|
+
"""
|
|
1631
|
+
@summary 终止 session statement
|
|
1632
|
+
|
|
1633
|
+
@param request: TerminateSqlStatementRequest
|
|
1634
|
+
@return: TerminateSqlStatementResponse
|
|
1635
|
+
"""
|
|
1636
|
+
runtime = util_models.RuntimeOptions()
|
|
1637
|
+
headers = {}
|
|
1638
|
+
return self.terminate_sql_statement_with_options(workspace_id, statement_id, request, headers, runtime)
|
|
1639
|
+
|
|
1640
|
+
async def terminate_sql_statement_async(
|
|
1641
|
+
self,
|
|
1642
|
+
workspace_id: str,
|
|
1643
|
+
statement_id: str,
|
|
1644
|
+
request: emr_serverless_spark_20230808_models.TerminateSqlStatementRequest,
|
|
1645
|
+
) -> emr_serverless_spark_20230808_models.TerminateSqlStatementResponse:
|
|
1646
|
+
"""
|
|
1647
|
+
@summary 终止 session statement
|
|
1648
|
+
|
|
1649
|
+
@param request: TerminateSqlStatementRequest
|
|
1650
|
+
@return: TerminateSqlStatementResponse
|
|
1651
|
+
"""
|
|
1652
|
+
runtime = util_models.RuntimeOptions()
|
|
1653
|
+
headers = {}
|
|
1654
|
+
return await self.terminate_sql_statement_with_options_async(workspace_id, statement_id, request, headers, runtime)
|