alibabacloud-emr-serverless-spark20230808 1.3.0__tar.gz → 1.4.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of alibabacloud-emr-serverless-spark20230808 might be problematic. Click here for more details.
- {alibabacloud_emr-serverless-spark20230808-1.3.0 → alibabacloud_emr-serverless-spark20230808-1.4.0}/ChangeLog.md +5 -0
- {alibabacloud_emr-serverless-spark20230808-1.3.0 → alibabacloud_emr-serverless-spark20230808-1.4.0}/PKG-INFO +1 -1
- alibabacloud_emr-serverless-spark20230808-1.4.0/alibabacloud_emr_serverless_spark20230808/__init__.py +1 -0
- {alibabacloud_emr-serverless-spark20230808-1.3.0 → alibabacloud_emr-serverless-spark20230808-1.4.0}/alibabacloud_emr_serverless_spark20230808/client.py +360 -0
- {alibabacloud_emr-serverless-spark20230808-1.3.0 → alibabacloud_emr-serverless-spark20230808-1.4.0}/alibabacloud_emr_serverless_spark20230808/models.py +486 -0
- {alibabacloud_emr-serverless-spark20230808-1.3.0 → alibabacloud_emr-serverless-spark20230808-1.4.0}/alibabacloud_emr_serverless_spark20230808.egg-info/PKG-INFO +1 -1
- {alibabacloud_emr-serverless-spark20230808-1.3.0 → alibabacloud_emr-serverless-spark20230808-1.4.0}/setup.py +1 -1
- alibabacloud_emr-serverless-spark20230808-1.3.0/alibabacloud_emr_serverless_spark20230808/__init__.py +0 -1
- {alibabacloud_emr-serverless-spark20230808-1.3.0 → alibabacloud_emr-serverless-spark20230808-1.4.0}/LICENSE +0 -0
- {alibabacloud_emr-serverless-spark20230808-1.3.0 → alibabacloud_emr-serverless-spark20230808-1.4.0}/MANIFEST.in +0 -0
- {alibabacloud_emr-serverless-spark20230808-1.3.0 → alibabacloud_emr-serverless-spark20230808-1.4.0}/README-CN.md +0 -0
- {alibabacloud_emr-serverless-spark20230808-1.3.0 → alibabacloud_emr-serverless-spark20230808-1.4.0}/README.md +0 -0
- {alibabacloud_emr-serverless-spark20230808-1.3.0 → alibabacloud_emr-serverless-spark20230808-1.4.0}/alibabacloud_emr_serverless_spark20230808.egg-info/SOURCES.txt +0 -0
- {alibabacloud_emr-serverless-spark20230808-1.3.0 → alibabacloud_emr-serverless-spark20230808-1.4.0}/alibabacloud_emr_serverless_spark20230808.egg-info/dependency_links.txt +0 -0
- {alibabacloud_emr-serverless-spark20230808-1.3.0 → alibabacloud_emr-serverless-spark20230808-1.4.0}/alibabacloud_emr_serverless_spark20230808.egg-info/requires.txt +0 -0
- {alibabacloud_emr-serverless-spark20230808-1.3.0 → alibabacloud_emr-serverless-spark20230808-1.4.0}/alibabacloud_emr_serverless_spark20230808.egg-info/top_level.txt +0 -0
- {alibabacloud_emr-serverless-spark20230808-1.3.0 → alibabacloud_emr-serverless-spark20230808-1.4.0}/setup.cfg +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: alibabacloud_emr-serverless-spark20230808
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.4.0
|
|
4
4
|
Summary: Alibaba Cloud emr-serverless-spark (20230808) SDK Library for Python
|
|
5
5
|
Home-page: https://github.com/aliyun/alibabacloud-python-sdk
|
|
6
6
|
Author: Alibaba Cloud SDK
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
__version__ = '1.4.0'
|
|
@@ -269,6 +269,138 @@ class Client(OpenApiClient):
|
|
|
269
269
|
headers = {}
|
|
270
270
|
return await self.cancel_job_run_with_options_async(workspace_id, job_run_id, request, headers, runtime)
|
|
271
271
|
|
|
272
|
+
def create_sql_statement_with_options(
|
|
273
|
+
self,
|
|
274
|
+
workspace_id: str,
|
|
275
|
+
request: emr_serverless_spark_20230808_models.CreateSqlStatementRequest,
|
|
276
|
+
headers: Dict[str, str],
|
|
277
|
+
runtime: util_models.RuntimeOptions,
|
|
278
|
+
) -> emr_serverless_spark_20230808_models.CreateSqlStatementResponse:
|
|
279
|
+
"""
|
|
280
|
+
@summary 使用session运行SQL
|
|
281
|
+
|
|
282
|
+
@param request: CreateSqlStatementRequest
|
|
283
|
+
@param headers: map
|
|
284
|
+
@param runtime: runtime options for this request RuntimeOptions
|
|
285
|
+
@return: CreateSqlStatementResponse
|
|
286
|
+
"""
|
|
287
|
+
UtilClient.validate_model(request)
|
|
288
|
+
query = {}
|
|
289
|
+
if not UtilClient.is_unset(request.region_id):
|
|
290
|
+
query['regionId'] = request.region_id
|
|
291
|
+
body = {}
|
|
292
|
+
if not UtilClient.is_unset(request.code_content):
|
|
293
|
+
body['codeContent'] = request.code_content
|
|
294
|
+
if not UtilClient.is_unset(request.default_catalog):
|
|
295
|
+
body['defaultCatalog'] = request.default_catalog
|
|
296
|
+
if not UtilClient.is_unset(request.default_database):
|
|
297
|
+
body['defaultDatabase'] = request.default_database
|
|
298
|
+
if not UtilClient.is_unset(request.limit):
|
|
299
|
+
body['limit'] = request.limit
|
|
300
|
+
if not UtilClient.is_unset(request.sql_compute_id):
|
|
301
|
+
body['sqlComputeId'] = request.sql_compute_id
|
|
302
|
+
req = open_api_models.OpenApiRequest(
|
|
303
|
+
headers=headers,
|
|
304
|
+
query=OpenApiUtilClient.query(query),
|
|
305
|
+
body=OpenApiUtilClient.parse_to_map(body)
|
|
306
|
+
)
|
|
307
|
+
params = open_api_models.Params(
|
|
308
|
+
action='CreateSqlStatement',
|
|
309
|
+
version='2023-08-08',
|
|
310
|
+
protocol='HTTPS',
|
|
311
|
+
pathname=f'/api/interactive/v1/workspace/{OpenApiUtilClient.get_encode_param(workspace_id)}/statement',
|
|
312
|
+
method='PUT',
|
|
313
|
+
auth_type='AK',
|
|
314
|
+
style='ROA',
|
|
315
|
+
req_body_type='json',
|
|
316
|
+
body_type='json'
|
|
317
|
+
)
|
|
318
|
+
return TeaCore.from_map(
|
|
319
|
+
emr_serverless_spark_20230808_models.CreateSqlStatementResponse(),
|
|
320
|
+
self.call_api(params, req, runtime)
|
|
321
|
+
)
|
|
322
|
+
|
|
323
|
+
async def create_sql_statement_with_options_async(
|
|
324
|
+
self,
|
|
325
|
+
workspace_id: str,
|
|
326
|
+
request: emr_serverless_spark_20230808_models.CreateSqlStatementRequest,
|
|
327
|
+
headers: Dict[str, str],
|
|
328
|
+
runtime: util_models.RuntimeOptions,
|
|
329
|
+
) -> emr_serverless_spark_20230808_models.CreateSqlStatementResponse:
|
|
330
|
+
"""
|
|
331
|
+
@summary 使用session运行SQL
|
|
332
|
+
|
|
333
|
+
@param request: CreateSqlStatementRequest
|
|
334
|
+
@param headers: map
|
|
335
|
+
@param runtime: runtime options for this request RuntimeOptions
|
|
336
|
+
@return: CreateSqlStatementResponse
|
|
337
|
+
"""
|
|
338
|
+
UtilClient.validate_model(request)
|
|
339
|
+
query = {}
|
|
340
|
+
if not UtilClient.is_unset(request.region_id):
|
|
341
|
+
query['regionId'] = request.region_id
|
|
342
|
+
body = {}
|
|
343
|
+
if not UtilClient.is_unset(request.code_content):
|
|
344
|
+
body['codeContent'] = request.code_content
|
|
345
|
+
if not UtilClient.is_unset(request.default_catalog):
|
|
346
|
+
body['defaultCatalog'] = request.default_catalog
|
|
347
|
+
if not UtilClient.is_unset(request.default_database):
|
|
348
|
+
body['defaultDatabase'] = request.default_database
|
|
349
|
+
if not UtilClient.is_unset(request.limit):
|
|
350
|
+
body['limit'] = request.limit
|
|
351
|
+
if not UtilClient.is_unset(request.sql_compute_id):
|
|
352
|
+
body['sqlComputeId'] = request.sql_compute_id
|
|
353
|
+
req = open_api_models.OpenApiRequest(
|
|
354
|
+
headers=headers,
|
|
355
|
+
query=OpenApiUtilClient.query(query),
|
|
356
|
+
body=OpenApiUtilClient.parse_to_map(body)
|
|
357
|
+
)
|
|
358
|
+
params = open_api_models.Params(
|
|
359
|
+
action='CreateSqlStatement',
|
|
360
|
+
version='2023-08-08',
|
|
361
|
+
protocol='HTTPS',
|
|
362
|
+
pathname=f'/api/interactive/v1/workspace/{OpenApiUtilClient.get_encode_param(workspace_id)}/statement',
|
|
363
|
+
method='PUT',
|
|
364
|
+
auth_type='AK',
|
|
365
|
+
style='ROA',
|
|
366
|
+
req_body_type='json',
|
|
367
|
+
body_type='json'
|
|
368
|
+
)
|
|
369
|
+
return TeaCore.from_map(
|
|
370
|
+
emr_serverless_spark_20230808_models.CreateSqlStatementResponse(),
|
|
371
|
+
await self.call_api_async(params, req, runtime)
|
|
372
|
+
)
|
|
373
|
+
|
|
374
|
+
def create_sql_statement(
|
|
375
|
+
self,
|
|
376
|
+
workspace_id: str,
|
|
377
|
+
request: emr_serverless_spark_20230808_models.CreateSqlStatementRequest,
|
|
378
|
+
) -> emr_serverless_spark_20230808_models.CreateSqlStatementResponse:
|
|
379
|
+
"""
|
|
380
|
+
@summary 使用session运行SQL
|
|
381
|
+
|
|
382
|
+
@param request: CreateSqlStatementRequest
|
|
383
|
+
@return: CreateSqlStatementResponse
|
|
384
|
+
"""
|
|
385
|
+
runtime = util_models.RuntimeOptions()
|
|
386
|
+
headers = {}
|
|
387
|
+
return self.create_sql_statement_with_options(workspace_id, request, headers, runtime)
|
|
388
|
+
|
|
389
|
+
async def create_sql_statement_async(
|
|
390
|
+
self,
|
|
391
|
+
workspace_id: str,
|
|
392
|
+
request: emr_serverless_spark_20230808_models.CreateSqlStatementRequest,
|
|
393
|
+
) -> emr_serverless_spark_20230808_models.CreateSqlStatementResponse:
|
|
394
|
+
"""
|
|
395
|
+
@summary 使用session运行SQL
|
|
396
|
+
|
|
397
|
+
@param request: CreateSqlStatementRequest
|
|
398
|
+
@return: CreateSqlStatementResponse
|
|
399
|
+
"""
|
|
400
|
+
runtime = util_models.RuntimeOptions()
|
|
401
|
+
headers = {}
|
|
402
|
+
return await self.create_sql_statement_with_options_async(workspace_id, request, headers, runtime)
|
|
403
|
+
|
|
272
404
|
def get_job_run_with_options(
|
|
273
405
|
self,
|
|
274
406
|
workspace_id: str,
|
|
@@ -381,6 +513,118 @@ class Client(OpenApiClient):
|
|
|
381
513
|
headers = {}
|
|
382
514
|
return await self.get_job_run_with_options_async(workspace_id, job_run_id, request, headers, runtime)
|
|
383
515
|
|
|
516
|
+
def get_sql_statement_with_options(
|
|
517
|
+
self,
|
|
518
|
+
workspace_id: str,
|
|
519
|
+
statement_id: str,
|
|
520
|
+
request: emr_serverless_spark_20230808_models.GetSqlStatementRequest,
|
|
521
|
+
headers: Dict[str, str],
|
|
522
|
+
runtime: util_models.RuntimeOptions,
|
|
523
|
+
) -> emr_serverless_spark_20230808_models.GetSqlStatementResponse:
|
|
524
|
+
"""
|
|
525
|
+
@summary 获取Sql Statement状态
|
|
526
|
+
|
|
527
|
+
@param request: GetSqlStatementRequest
|
|
528
|
+
@param headers: map
|
|
529
|
+
@param runtime: runtime options for this request RuntimeOptions
|
|
530
|
+
@return: GetSqlStatementResponse
|
|
531
|
+
"""
|
|
532
|
+
UtilClient.validate_model(request)
|
|
533
|
+
query = {}
|
|
534
|
+
if not UtilClient.is_unset(request.region_id):
|
|
535
|
+
query['regionId'] = request.region_id
|
|
536
|
+
req = open_api_models.OpenApiRequest(
|
|
537
|
+
headers=headers,
|
|
538
|
+
query=OpenApiUtilClient.query(query)
|
|
539
|
+
)
|
|
540
|
+
params = open_api_models.Params(
|
|
541
|
+
action='GetSqlStatement',
|
|
542
|
+
version='2023-08-08',
|
|
543
|
+
protocol='HTTPS',
|
|
544
|
+
pathname=f'/api/interactive/v1/workspace/{OpenApiUtilClient.get_encode_param(workspace_id)}/statement/{OpenApiUtilClient.get_encode_param(statement_id)}',
|
|
545
|
+
method='GET',
|
|
546
|
+
auth_type='AK',
|
|
547
|
+
style='ROA',
|
|
548
|
+
req_body_type='json',
|
|
549
|
+
body_type='json'
|
|
550
|
+
)
|
|
551
|
+
return TeaCore.from_map(
|
|
552
|
+
emr_serverless_spark_20230808_models.GetSqlStatementResponse(),
|
|
553
|
+
self.call_api(params, req, runtime)
|
|
554
|
+
)
|
|
555
|
+
|
|
556
|
+
async def get_sql_statement_with_options_async(
|
|
557
|
+
self,
|
|
558
|
+
workspace_id: str,
|
|
559
|
+
statement_id: str,
|
|
560
|
+
request: emr_serverless_spark_20230808_models.GetSqlStatementRequest,
|
|
561
|
+
headers: Dict[str, str],
|
|
562
|
+
runtime: util_models.RuntimeOptions,
|
|
563
|
+
) -> emr_serverless_spark_20230808_models.GetSqlStatementResponse:
|
|
564
|
+
"""
|
|
565
|
+
@summary 获取Sql Statement状态
|
|
566
|
+
|
|
567
|
+
@param request: GetSqlStatementRequest
|
|
568
|
+
@param headers: map
|
|
569
|
+
@param runtime: runtime options for this request RuntimeOptions
|
|
570
|
+
@return: GetSqlStatementResponse
|
|
571
|
+
"""
|
|
572
|
+
UtilClient.validate_model(request)
|
|
573
|
+
query = {}
|
|
574
|
+
if not UtilClient.is_unset(request.region_id):
|
|
575
|
+
query['regionId'] = request.region_id
|
|
576
|
+
req = open_api_models.OpenApiRequest(
|
|
577
|
+
headers=headers,
|
|
578
|
+
query=OpenApiUtilClient.query(query)
|
|
579
|
+
)
|
|
580
|
+
params = open_api_models.Params(
|
|
581
|
+
action='GetSqlStatement',
|
|
582
|
+
version='2023-08-08',
|
|
583
|
+
protocol='HTTPS',
|
|
584
|
+
pathname=f'/api/interactive/v1/workspace/{OpenApiUtilClient.get_encode_param(workspace_id)}/statement/{OpenApiUtilClient.get_encode_param(statement_id)}',
|
|
585
|
+
method='GET',
|
|
586
|
+
auth_type='AK',
|
|
587
|
+
style='ROA',
|
|
588
|
+
req_body_type='json',
|
|
589
|
+
body_type='json'
|
|
590
|
+
)
|
|
591
|
+
return TeaCore.from_map(
|
|
592
|
+
emr_serverless_spark_20230808_models.GetSqlStatementResponse(),
|
|
593
|
+
await self.call_api_async(params, req, runtime)
|
|
594
|
+
)
|
|
595
|
+
|
|
596
|
+
def get_sql_statement(
|
|
597
|
+
self,
|
|
598
|
+
workspace_id: str,
|
|
599
|
+
statement_id: str,
|
|
600
|
+
request: emr_serverless_spark_20230808_models.GetSqlStatementRequest,
|
|
601
|
+
) -> emr_serverless_spark_20230808_models.GetSqlStatementResponse:
|
|
602
|
+
"""
|
|
603
|
+
@summary 获取Sql Statement状态
|
|
604
|
+
|
|
605
|
+
@param request: GetSqlStatementRequest
|
|
606
|
+
@return: GetSqlStatementResponse
|
|
607
|
+
"""
|
|
608
|
+
runtime = util_models.RuntimeOptions()
|
|
609
|
+
headers = {}
|
|
610
|
+
return self.get_sql_statement_with_options(workspace_id, statement_id, request, headers, runtime)
|
|
611
|
+
|
|
612
|
+
async def get_sql_statement_async(
|
|
613
|
+
self,
|
|
614
|
+
workspace_id: str,
|
|
615
|
+
statement_id: str,
|
|
616
|
+
request: emr_serverless_spark_20230808_models.GetSqlStatementRequest,
|
|
617
|
+
) -> emr_serverless_spark_20230808_models.GetSqlStatementResponse:
|
|
618
|
+
"""
|
|
619
|
+
@summary 获取Sql Statement状态
|
|
620
|
+
|
|
621
|
+
@param request: GetSqlStatementRequest
|
|
622
|
+
@return: GetSqlStatementResponse
|
|
623
|
+
"""
|
|
624
|
+
runtime = util_models.RuntimeOptions()
|
|
625
|
+
headers = {}
|
|
626
|
+
return await self.get_sql_statement_with_options_async(workspace_id, statement_id, request, headers, runtime)
|
|
627
|
+
|
|
384
628
|
def grant_role_to_users_with_options(
|
|
385
629
|
self,
|
|
386
630
|
request: emr_serverless_spark_20230808_models.GrantRoleToUsersRequest,
|
|
@@ -528,6 +772,8 @@ class Client(OpenApiClient):
|
|
|
528
772
|
query['creator'] = request.creator
|
|
529
773
|
if not UtilClient.is_unset(request.end_time_shrink):
|
|
530
774
|
query['endTime'] = request.end_time_shrink
|
|
775
|
+
if not UtilClient.is_unset(request.job_run_deployment_id):
|
|
776
|
+
query['jobRunDeploymentId'] = request.job_run_deployment_id
|
|
531
777
|
if not UtilClient.is_unset(request.job_run_id):
|
|
532
778
|
query['jobRunId'] = request.job_run_id
|
|
533
779
|
if not UtilClient.is_unset(request.max_results):
|
|
@@ -597,6 +843,8 @@ class Client(OpenApiClient):
|
|
|
597
843
|
query['creator'] = request.creator
|
|
598
844
|
if not UtilClient.is_unset(request.end_time_shrink):
|
|
599
845
|
query['endTime'] = request.end_time_shrink
|
|
846
|
+
if not UtilClient.is_unset(request.job_run_deployment_id):
|
|
847
|
+
query['jobRunDeploymentId'] = request.job_run_deployment_id
|
|
600
848
|
if not UtilClient.is_unset(request.job_run_id):
|
|
601
849
|
query['jobRunId'] = request.job_run_id
|
|
602
850
|
if not UtilClient.is_unset(request.max_results):
|
|
@@ -1288,3 +1536,115 @@ class Client(OpenApiClient):
|
|
|
1288
1536
|
runtime = util_models.RuntimeOptions()
|
|
1289
1537
|
headers = {}
|
|
1290
1538
|
return await self.start_job_run_with_options_async(workspace_id, request, headers, runtime)
|
|
1539
|
+
|
|
1540
|
+
def terminate_sql_statement_with_options(
|
|
1541
|
+
self,
|
|
1542
|
+
workspace_id: str,
|
|
1543
|
+
statement_id: str,
|
|
1544
|
+
request: emr_serverless_spark_20230808_models.TerminateSqlStatementRequest,
|
|
1545
|
+
headers: Dict[str, str],
|
|
1546
|
+
runtime: util_models.RuntimeOptions,
|
|
1547
|
+
) -> emr_serverless_spark_20230808_models.TerminateSqlStatementResponse:
|
|
1548
|
+
"""
|
|
1549
|
+
@summary 终止 session statement
|
|
1550
|
+
|
|
1551
|
+
@param request: TerminateSqlStatementRequest
|
|
1552
|
+
@param headers: map
|
|
1553
|
+
@param runtime: runtime options for this request RuntimeOptions
|
|
1554
|
+
@return: TerminateSqlStatementResponse
|
|
1555
|
+
"""
|
|
1556
|
+
UtilClient.validate_model(request)
|
|
1557
|
+
query = {}
|
|
1558
|
+
if not UtilClient.is_unset(request.region_id):
|
|
1559
|
+
query['regionId'] = request.region_id
|
|
1560
|
+
req = open_api_models.OpenApiRequest(
|
|
1561
|
+
headers=headers,
|
|
1562
|
+
query=OpenApiUtilClient.query(query)
|
|
1563
|
+
)
|
|
1564
|
+
params = open_api_models.Params(
|
|
1565
|
+
action='TerminateSqlStatement',
|
|
1566
|
+
version='2023-08-08',
|
|
1567
|
+
protocol='HTTPS',
|
|
1568
|
+
pathname=f'/api/interactive/v1/workspace/{OpenApiUtilClient.get_encode_param(workspace_id)}/statement/{OpenApiUtilClient.get_encode_param(statement_id)}/terminate',
|
|
1569
|
+
method='POST',
|
|
1570
|
+
auth_type='AK',
|
|
1571
|
+
style='ROA',
|
|
1572
|
+
req_body_type='json',
|
|
1573
|
+
body_type='json'
|
|
1574
|
+
)
|
|
1575
|
+
return TeaCore.from_map(
|
|
1576
|
+
emr_serverless_spark_20230808_models.TerminateSqlStatementResponse(),
|
|
1577
|
+
self.call_api(params, req, runtime)
|
|
1578
|
+
)
|
|
1579
|
+
|
|
1580
|
+
async def terminate_sql_statement_with_options_async(
|
|
1581
|
+
self,
|
|
1582
|
+
workspace_id: str,
|
|
1583
|
+
statement_id: str,
|
|
1584
|
+
request: emr_serverless_spark_20230808_models.TerminateSqlStatementRequest,
|
|
1585
|
+
headers: Dict[str, str],
|
|
1586
|
+
runtime: util_models.RuntimeOptions,
|
|
1587
|
+
) -> emr_serverless_spark_20230808_models.TerminateSqlStatementResponse:
|
|
1588
|
+
"""
|
|
1589
|
+
@summary 终止 session statement
|
|
1590
|
+
|
|
1591
|
+
@param request: TerminateSqlStatementRequest
|
|
1592
|
+
@param headers: map
|
|
1593
|
+
@param runtime: runtime options for this request RuntimeOptions
|
|
1594
|
+
@return: TerminateSqlStatementResponse
|
|
1595
|
+
"""
|
|
1596
|
+
UtilClient.validate_model(request)
|
|
1597
|
+
query = {}
|
|
1598
|
+
if not UtilClient.is_unset(request.region_id):
|
|
1599
|
+
query['regionId'] = request.region_id
|
|
1600
|
+
req = open_api_models.OpenApiRequest(
|
|
1601
|
+
headers=headers,
|
|
1602
|
+
query=OpenApiUtilClient.query(query)
|
|
1603
|
+
)
|
|
1604
|
+
params = open_api_models.Params(
|
|
1605
|
+
action='TerminateSqlStatement',
|
|
1606
|
+
version='2023-08-08',
|
|
1607
|
+
protocol='HTTPS',
|
|
1608
|
+
pathname=f'/api/interactive/v1/workspace/{OpenApiUtilClient.get_encode_param(workspace_id)}/statement/{OpenApiUtilClient.get_encode_param(statement_id)}/terminate',
|
|
1609
|
+
method='POST',
|
|
1610
|
+
auth_type='AK',
|
|
1611
|
+
style='ROA',
|
|
1612
|
+
req_body_type='json',
|
|
1613
|
+
body_type='json'
|
|
1614
|
+
)
|
|
1615
|
+
return TeaCore.from_map(
|
|
1616
|
+
emr_serverless_spark_20230808_models.TerminateSqlStatementResponse(),
|
|
1617
|
+
await self.call_api_async(params, req, runtime)
|
|
1618
|
+
)
|
|
1619
|
+
|
|
1620
|
+
def terminate_sql_statement(
|
|
1621
|
+
self,
|
|
1622
|
+
workspace_id: str,
|
|
1623
|
+
statement_id: str,
|
|
1624
|
+
request: emr_serverless_spark_20230808_models.TerminateSqlStatementRequest,
|
|
1625
|
+
) -> emr_serverless_spark_20230808_models.TerminateSqlStatementResponse:
|
|
1626
|
+
"""
|
|
1627
|
+
@summary 终止 session statement
|
|
1628
|
+
|
|
1629
|
+
@param request: TerminateSqlStatementRequest
|
|
1630
|
+
@return: TerminateSqlStatementResponse
|
|
1631
|
+
"""
|
|
1632
|
+
runtime = util_models.RuntimeOptions()
|
|
1633
|
+
headers = {}
|
|
1634
|
+
return self.terminate_sql_statement_with_options(workspace_id, statement_id, request, headers, runtime)
|
|
1635
|
+
|
|
1636
|
+
async def terminate_sql_statement_async(
|
|
1637
|
+
self,
|
|
1638
|
+
workspace_id: str,
|
|
1639
|
+
statement_id: str,
|
|
1640
|
+
request: emr_serverless_spark_20230808_models.TerminateSqlStatementRequest,
|
|
1641
|
+
) -> emr_serverless_spark_20230808_models.TerminateSqlStatementResponse:
|
|
1642
|
+
"""
|
|
1643
|
+
@summary 终止 session statement
|
|
1644
|
+
|
|
1645
|
+
@param request: TerminateSqlStatementRequest
|
|
1646
|
+
@return: TerminateSqlStatementResponse
|
|
1647
|
+
"""
|
|
1648
|
+
runtime = util_models.RuntimeOptions()
|
|
1649
|
+
headers = {}
|
|
1650
|
+
return await self.terminate_sql_statement_with_options_async(workspace_id, statement_id, request, headers, runtime)
|
|
@@ -754,6 +754,7 @@ class Task(TeaModel):
|
|
|
754
754
|
default_database: str = None,
|
|
755
755
|
default_resource_queue_id: str = None,
|
|
756
756
|
default_sql_compute_id: str = None,
|
|
757
|
+
deployment_id: str = None,
|
|
757
758
|
extra_artifact_ids: List[str] = None,
|
|
758
759
|
extra_spark_submit_params: str = None,
|
|
759
760
|
files: List[str] = None,
|
|
@@ -761,6 +762,7 @@ class Task(TeaModel):
|
|
|
761
762
|
gmt_modified: str = None,
|
|
762
763
|
has_changed: bool = None,
|
|
763
764
|
has_commited: bool = None,
|
|
765
|
+
is_streaming: bool = None,
|
|
764
766
|
jars: List[str] = None,
|
|
765
767
|
last_run_resource_queue_id: str = None,
|
|
766
768
|
modifier: int = None,
|
|
@@ -775,6 +777,7 @@ class Task(TeaModel):
|
|
|
775
777
|
spark_executor_memory: int = None,
|
|
776
778
|
spark_log_level: str = None,
|
|
777
779
|
spark_log_path: str = None,
|
|
780
|
+
spark_submit_clause: str = None,
|
|
778
781
|
spark_version: str = None,
|
|
779
782
|
tags: Dict[str, str] = None,
|
|
780
783
|
type: str = None,
|
|
@@ -791,6 +794,7 @@ class Task(TeaModel):
|
|
|
791
794
|
self.default_database = default_database
|
|
792
795
|
self.default_resource_queue_id = default_resource_queue_id
|
|
793
796
|
self.default_sql_compute_id = default_sql_compute_id
|
|
797
|
+
self.deployment_id = deployment_id
|
|
794
798
|
self.extra_artifact_ids = extra_artifact_ids
|
|
795
799
|
self.extra_spark_submit_params = extra_spark_submit_params
|
|
796
800
|
self.files = files
|
|
@@ -801,6 +805,7 @@ class Task(TeaModel):
|
|
|
801
805
|
self.has_changed = has_changed
|
|
802
806
|
# This parameter is required.
|
|
803
807
|
self.has_commited = has_commited
|
|
808
|
+
self.is_streaming = is_streaming
|
|
804
809
|
self.jars = jars
|
|
805
810
|
self.last_run_resource_queue_id = last_run_resource_queue_id
|
|
806
811
|
# This parameter is required.
|
|
@@ -823,6 +828,7 @@ class Task(TeaModel):
|
|
|
823
828
|
self.spark_log_level = spark_log_level
|
|
824
829
|
# This parameter is required.
|
|
825
830
|
self.spark_log_path = spark_log_path
|
|
831
|
+
self.spark_submit_clause = spark_submit_clause
|
|
826
832
|
# This parameter is required.
|
|
827
833
|
self.spark_version = spark_version
|
|
828
834
|
self.tags = tags
|
|
@@ -861,6 +867,8 @@ class Task(TeaModel):
|
|
|
861
867
|
result['defaultResourceQueueId'] = self.default_resource_queue_id
|
|
862
868
|
if self.default_sql_compute_id is not None:
|
|
863
869
|
result['defaultSqlComputeId'] = self.default_sql_compute_id
|
|
870
|
+
if self.deployment_id is not None:
|
|
871
|
+
result['deploymentId'] = self.deployment_id
|
|
864
872
|
if self.extra_artifact_ids is not None:
|
|
865
873
|
result['extraArtifactIds'] = self.extra_artifact_ids
|
|
866
874
|
if self.extra_spark_submit_params is not None:
|
|
@@ -875,6 +883,8 @@ class Task(TeaModel):
|
|
|
875
883
|
result['hasChanged'] = self.has_changed
|
|
876
884
|
if self.has_commited is not None:
|
|
877
885
|
result['hasCommited'] = self.has_commited
|
|
886
|
+
if self.is_streaming is not None:
|
|
887
|
+
result['isStreaming'] = self.is_streaming
|
|
878
888
|
if self.jars is not None:
|
|
879
889
|
result['jars'] = self.jars
|
|
880
890
|
if self.last_run_resource_queue_id is not None:
|
|
@@ -905,6 +915,8 @@ class Task(TeaModel):
|
|
|
905
915
|
result['sparkLogLevel'] = self.spark_log_level
|
|
906
916
|
if self.spark_log_path is not None:
|
|
907
917
|
result['sparkLogPath'] = self.spark_log_path
|
|
918
|
+
if self.spark_submit_clause is not None:
|
|
919
|
+
result['sparkSubmitClause'] = self.spark_submit_clause
|
|
908
920
|
if self.spark_version is not None:
|
|
909
921
|
result['sparkVersion'] = self.spark_version
|
|
910
922
|
if self.tags is not None:
|
|
@@ -935,6 +947,8 @@ class Task(TeaModel):
|
|
|
935
947
|
self.default_resource_queue_id = m.get('defaultResourceQueueId')
|
|
936
948
|
if m.get('defaultSqlComputeId') is not None:
|
|
937
949
|
self.default_sql_compute_id = m.get('defaultSqlComputeId')
|
|
950
|
+
if m.get('deploymentId') is not None:
|
|
951
|
+
self.deployment_id = m.get('deploymentId')
|
|
938
952
|
if m.get('extraArtifactIds') is not None:
|
|
939
953
|
self.extra_artifact_ids = m.get('extraArtifactIds')
|
|
940
954
|
if m.get('extraSparkSubmitParams') is not None:
|
|
@@ -949,6 +963,8 @@ class Task(TeaModel):
|
|
|
949
963
|
self.has_changed = m.get('hasChanged')
|
|
950
964
|
if m.get('hasCommited') is not None:
|
|
951
965
|
self.has_commited = m.get('hasCommited')
|
|
966
|
+
if m.get('isStreaming') is not None:
|
|
967
|
+
self.is_streaming = m.get('isStreaming')
|
|
952
968
|
if m.get('jars') is not None:
|
|
953
969
|
self.jars = m.get('jars')
|
|
954
970
|
if m.get('lastRunResourceQueueId') is not None:
|
|
@@ -980,6 +996,8 @@ class Task(TeaModel):
|
|
|
980
996
|
self.spark_log_level = m.get('sparkLogLevel')
|
|
981
997
|
if m.get('sparkLogPath') is not None:
|
|
982
998
|
self.spark_log_path = m.get('sparkLogPath')
|
|
999
|
+
if m.get('sparkSubmitClause') is not None:
|
|
1000
|
+
self.spark_submit_clause = m.get('sparkSubmitClause')
|
|
983
1001
|
if m.get('sparkVersion') is not None:
|
|
984
1002
|
self.spark_version = m.get('sparkVersion')
|
|
985
1003
|
if m.get('tags') is not None:
|
|
@@ -1489,6 +1507,166 @@ class CancelJobRunResponse(TeaModel):
|
|
|
1489
1507
|
return self
|
|
1490
1508
|
|
|
1491
1509
|
|
|
1510
|
+
class CreateSqlStatementRequest(TeaModel):
|
|
1511
|
+
def __init__(
|
|
1512
|
+
self,
|
|
1513
|
+
code_content: str = None,
|
|
1514
|
+
default_catalog: str = None,
|
|
1515
|
+
default_database: str = None,
|
|
1516
|
+
limit: int = None,
|
|
1517
|
+
sql_compute_id: str = None,
|
|
1518
|
+
region_id: str = None,
|
|
1519
|
+
):
|
|
1520
|
+
self.code_content = code_content
|
|
1521
|
+
self.default_catalog = default_catalog
|
|
1522
|
+
self.default_database = default_database
|
|
1523
|
+
self.limit = limit
|
|
1524
|
+
self.sql_compute_id = sql_compute_id
|
|
1525
|
+
self.region_id = region_id
|
|
1526
|
+
|
|
1527
|
+
def validate(self):
|
|
1528
|
+
pass
|
|
1529
|
+
|
|
1530
|
+
def to_map(self):
|
|
1531
|
+
_map = super().to_map()
|
|
1532
|
+
if _map is not None:
|
|
1533
|
+
return _map
|
|
1534
|
+
|
|
1535
|
+
result = dict()
|
|
1536
|
+
if self.code_content is not None:
|
|
1537
|
+
result['codeContent'] = self.code_content
|
|
1538
|
+
if self.default_catalog is not None:
|
|
1539
|
+
result['defaultCatalog'] = self.default_catalog
|
|
1540
|
+
if self.default_database is not None:
|
|
1541
|
+
result['defaultDatabase'] = self.default_database
|
|
1542
|
+
if self.limit is not None:
|
|
1543
|
+
result['limit'] = self.limit
|
|
1544
|
+
if self.sql_compute_id is not None:
|
|
1545
|
+
result['sqlComputeId'] = self.sql_compute_id
|
|
1546
|
+
if self.region_id is not None:
|
|
1547
|
+
result['regionId'] = self.region_id
|
|
1548
|
+
return result
|
|
1549
|
+
|
|
1550
|
+
def from_map(self, m: dict = None):
|
|
1551
|
+
m = m or dict()
|
|
1552
|
+
if m.get('codeContent') is not None:
|
|
1553
|
+
self.code_content = m.get('codeContent')
|
|
1554
|
+
if m.get('defaultCatalog') is not None:
|
|
1555
|
+
self.default_catalog = m.get('defaultCatalog')
|
|
1556
|
+
if m.get('defaultDatabase') is not None:
|
|
1557
|
+
self.default_database = m.get('defaultDatabase')
|
|
1558
|
+
if m.get('limit') is not None:
|
|
1559
|
+
self.limit = m.get('limit')
|
|
1560
|
+
if m.get('sqlComputeId') is not None:
|
|
1561
|
+
self.sql_compute_id = m.get('sqlComputeId')
|
|
1562
|
+
if m.get('regionId') is not None:
|
|
1563
|
+
self.region_id = m.get('regionId')
|
|
1564
|
+
return self
|
|
1565
|
+
|
|
1566
|
+
|
|
1567
|
+
class CreateSqlStatementResponseBodyData(TeaModel):
|
|
1568
|
+
def __init__(
|
|
1569
|
+
self,
|
|
1570
|
+
statement_id: str = None,
|
|
1571
|
+
):
|
|
1572
|
+
self.statement_id = statement_id
|
|
1573
|
+
|
|
1574
|
+
def validate(self):
|
|
1575
|
+
pass
|
|
1576
|
+
|
|
1577
|
+
def to_map(self):
|
|
1578
|
+
_map = super().to_map()
|
|
1579
|
+
if _map is not None:
|
|
1580
|
+
return _map
|
|
1581
|
+
|
|
1582
|
+
result = dict()
|
|
1583
|
+
if self.statement_id is not None:
|
|
1584
|
+
result['statementId'] = self.statement_id
|
|
1585
|
+
return result
|
|
1586
|
+
|
|
1587
|
+
def from_map(self, m: dict = None):
|
|
1588
|
+
m = m or dict()
|
|
1589
|
+
if m.get('statementId') is not None:
|
|
1590
|
+
self.statement_id = m.get('statementId')
|
|
1591
|
+
return self
|
|
1592
|
+
|
|
1593
|
+
|
|
1594
|
+
class CreateSqlStatementResponseBody(TeaModel):
|
|
1595
|
+
def __init__(
|
|
1596
|
+
self,
|
|
1597
|
+
data: CreateSqlStatementResponseBodyData = None,
|
|
1598
|
+
request_id: str = None,
|
|
1599
|
+
):
|
|
1600
|
+
self.data = data
|
|
1601
|
+
self.request_id = request_id
|
|
1602
|
+
|
|
1603
|
+
def validate(self):
|
|
1604
|
+
if self.data:
|
|
1605
|
+
self.data.validate()
|
|
1606
|
+
|
|
1607
|
+
def to_map(self):
|
|
1608
|
+
_map = super().to_map()
|
|
1609
|
+
if _map is not None:
|
|
1610
|
+
return _map
|
|
1611
|
+
|
|
1612
|
+
result = dict()
|
|
1613
|
+
if self.data is not None:
|
|
1614
|
+
result['data'] = self.data.to_map()
|
|
1615
|
+
if self.request_id is not None:
|
|
1616
|
+
result['requestId'] = self.request_id
|
|
1617
|
+
return result
|
|
1618
|
+
|
|
1619
|
+
def from_map(self, m: dict = None):
|
|
1620
|
+
m = m or dict()
|
|
1621
|
+
if m.get('data') is not None:
|
|
1622
|
+
temp_model = CreateSqlStatementResponseBodyData()
|
|
1623
|
+
self.data = temp_model.from_map(m['data'])
|
|
1624
|
+
if m.get('requestId') is not None:
|
|
1625
|
+
self.request_id = m.get('requestId')
|
|
1626
|
+
return self
|
|
1627
|
+
|
|
1628
|
+
|
|
1629
|
+
class CreateSqlStatementResponse(TeaModel):
|
|
1630
|
+
def __init__(
|
|
1631
|
+
self,
|
|
1632
|
+
headers: Dict[str, str] = None,
|
|
1633
|
+
status_code: int = None,
|
|
1634
|
+
body: CreateSqlStatementResponseBody = None,
|
|
1635
|
+
):
|
|
1636
|
+
self.headers = headers
|
|
1637
|
+
self.status_code = status_code
|
|
1638
|
+
self.body = body
|
|
1639
|
+
|
|
1640
|
+
def validate(self):
|
|
1641
|
+
if self.body:
|
|
1642
|
+
self.body.validate()
|
|
1643
|
+
|
|
1644
|
+
def to_map(self):
|
|
1645
|
+
_map = super().to_map()
|
|
1646
|
+
if _map is not None:
|
|
1647
|
+
return _map
|
|
1648
|
+
|
|
1649
|
+
result = dict()
|
|
1650
|
+
if self.headers is not None:
|
|
1651
|
+
result['headers'] = self.headers
|
|
1652
|
+
if self.status_code is not None:
|
|
1653
|
+
result['statusCode'] = self.status_code
|
|
1654
|
+
if self.body is not None:
|
|
1655
|
+
result['body'] = self.body.to_map()
|
|
1656
|
+
return result
|
|
1657
|
+
|
|
1658
|
+
def from_map(self, m: dict = None):
|
|
1659
|
+
m = m or dict()
|
|
1660
|
+
if m.get('headers') is not None:
|
|
1661
|
+
self.headers = m.get('headers')
|
|
1662
|
+
if m.get('statusCode') is not None:
|
|
1663
|
+
self.status_code = m.get('statusCode')
|
|
1664
|
+
if m.get('body') is not None:
|
|
1665
|
+
temp_model = CreateSqlStatementResponseBody()
|
|
1666
|
+
self.body = temp_model.from_map(m['body'])
|
|
1667
|
+
return self
|
|
1668
|
+
|
|
1669
|
+
|
|
1492
1670
|
class GetJobRunRequest(TeaModel):
|
|
1493
1671
|
def __init__(
|
|
1494
1672
|
self,
|
|
@@ -1815,6 +1993,207 @@ class GetJobRunResponse(TeaModel):
|
|
|
1815
1993
|
return self
|
|
1816
1994
|
|
|
1817
1995
|
|
|
1996
|
+
class GetSqlStatementRequest(TeaModel):
|
|
1997
|
+
def __init__(
|
|
1998
|
+
self,
|
|
1999
|
+
region_id: str = None,
|
|
2000
|
+
):
|
|
2001
|
+
self.region_id = region_id
|
|
2002
|
+
|
|
2003
|
+
def validate(self):
|
|
2004
|
+
pass
|
|
2005
|
+
|
|
2006
|
+
def to_map(self):
|
|
2007
|
+
_map = super().to_map()
|
|
2008
|
+
if _map is not None:
|
|
2009
|
+
return _map
|
|
2010
|
+
|
|
2011
|
+
result = dict()
|
|
2012
|
+
if self.region_id is not None:
|
|
2013
|
+
result['regionId'] = self.region_id
|
|
2014
|
+
return result
|
|
2015
|
+
|
|
2016
|
+
def from_map(self, m: dict = None):
|
|
2017
|
+
m = m or dict()
|
|
2018
|
+
if m.get('regionId') is not None:
|
|
2019
|
+
self.region_id = m.get('regionId')
|
|
2020
|
+
return self
|
|
2021
|
+
|
|
2022
|
+
|
|
2023
|
+
class GetSqlStatementResponseBodyDataSqlOutputs(TeaModel):
|
|
2024
|
+
def __init__(
|
|
2025
|
+
self,
|
|
2026
|
+
rows: str = None,
|
|
2027
|
+
schema: str = None,
|
|
2028
|
+
):
|
|
2029
|
+
self.rows = rows
|
|
2030
|
+
self.schema = schema
|
|
2031
|
+
|
|
2032
|
+
def validate(self):
|
|
2033
|
+
pass
|
|
2034
|
+
|
|
2035
|
+
def to_map(self):
|
|
2036
|
+
_map = super().to_map()
|
|
2037
|
+
if _map is not None:
|
|
2038
|
+
return _map
|
|
2039
|
+
|
|
2040
|
+
result = dict()
|
|
2041
|
+
if self.rows is not None:
|
|
2042
|
+
result['rows'] = self.rows
|
|
2043
|
+
if self.schema is not None:
|
|
2044
|
+
result['schema'] = self.schema
|
|
2045
|
+
return result
|
|
2046
|
+
|
|
2047
|
+
def from_map(self, m: dict = None):
|
|
2048
|
+
m = m or dict()
|
|
2049
|
+
if m.get('rows') is not None:
|
|
2050
|
+
self.rows = m.get('rows')
|
|
2051
|
+
if m.get('schema') is not None:
|
|
2052
|
+
self.schema = m.get('schema')
|
|
2053
|
+
return self
|
|
2054
|
+
|
|
2055
|
+
|
|
2056
|
+
class GetSqlStatementResponseBodyData(TeaModel):
|
|
2057
|
+
def __init__(
|
|
2058
|
+
self,
|
|
2059
|
+
execution_time: List[int] = None,
|
|
2060
|
+
sql_error_code: str = None,
|
|
2061
|
+
sql_error_message: str = None,
|
|
2062
|
+
sql_outputs: List[GetSqlStatementResponseBodyDataSqlOutputs] = None,
|
|
2063
|
+
state: str = None,
|
|
2064
|
+
statement_id: str = None,
|
|
2065
|
+
):
|
|
2066
|
+
self.execution_time = execution_time
|
|
2067
|
+
self.sql_error_code = sql_error_code
|
|
2068
|
+
self.sql_error_message = sql_error_message
|
|
2069
|
+
self.sql_outputs = sql_outputs
|
|
2070
|
+
self.state = state
|
|
2071
|
+
self.statement_id = statement_id
|
|
2072
|
+
|
|
2073
|
+
def validate(self):
|
|
2074
|
+
if self.sql_outputs:
|
|
2075
|
+
for k in self.sql_outputs:
|
|
2076
|
+
if k:
|
|
2077
|
+
k.validate()
|
|
2078
|
+
|
|
2079
|
+
def to_map(self):
|
|
2080
|
+
_map = super().to_map()
|
|
2081
|
+
if _map is not None:
|
|
2082
|
+
return _map
|
|
2083
|
+
|
|
2084
|
+
result = dict()
|
|
2085
|
+
if self.execution_time is not None:
|
|
2086
|
+
result['executionTime'] = self.execution_time
|
|
2087
|
+
if self.sql_error_code is not None:
|
|
2088
|
+
result['sqlErrorCode'] = self.sql_error_code
|
|
2089
|
+
if self.sql_error_message is not None:
|
|
2090
|
+
result['sqlErrorMessage'] = self.sql_error_message
|
|
2091
|
+
result['sqlOutputs'] = []
|
|
2092
|
+
if self.sql_outputs is not None:
|
|
2093
|
+
for k in self.sql_outputs:
|
|
2094
|
+
result['sqlOutputs'].append(k.to_map() if k else None)
|
|
2095
|
+
if self.state is not None:
|
|
2096
|
+
result['state'] = self.state
|
|
2097
|
+
if self.statement_id is not None:
|
|
2098
|
+
result['statementId'] = self.statement_id
|
|
2099
|
+
return result
|
|
2100
|
+
|
|
2101
|
+
def from_map(self, m: dict = None):
|
|
2102
|
+
m = m or dict()
|
|
2103
|
+
if m.get('executionTime') is not None:
|
|
2104
|
+
self.execution_time = m.get('executionTime')
|
|
2105
|
+
if m.get('sqlErrorCode') is not None:
|
|
2106
|
+
self.sql_error_code = m.get('sqlErrorCode')
|
|
2107
|
+
if m.get('sqlErrorMessage') is not None:
|
|
2108
|
+
self.sql_error_message = m.get('sqlErrorMessage')
|
|
2109
|
+
self.sql_outputs = []
|
|
2110
|
+
if m.get('sqlOutputs') is not None:
|
|
2111
|
+
for k in m.get('sqlOutputs'):
|
|
2112
|
+
temp_model = GetSqlStatementResponseBodyDataSqlOutputs()
|
|
2113
|
+
self.sql_outputs.append(temp_model.from_map(k))
|
|
2114
|
+
if m.get('state') is not None:
|
|
2115
|
+
self.state = m.get('state')
|
|
2116
|
+
if m.get('statementId') is not None:
|
|
2117
|
+
self.statement_id = m.get('statementId')
|
|
2118
|
+
return self
|
|
2119
|
+
|
|
2120
|
+
|
|
2121
|
+
class GetSqlStatementResponseBody(TeaModel):
|
|
2122
|
+
def __init__(
|
|
2123
|
+
self,
|
|
2124
|
+
data: GetSqlStatementResponseBodyData = None,
|
|
2125
|
+
request_id: str = None,
|
|
2126
|
+
):
|
|
2127
|
+
self.data = data
|
|
2128
|
+
self.request_id = request_id
|
|
2129
|
+
|
|
2130
|
+
def validate(self):
|
|
2131
|
+
if self.data:
|
|
2132
|
+
self.data.validate()
|
|
2133
|
+
|
|
2134
|
+
def to_map(self):
|
|
2135
|
+
_map = super().to_map()
|
|
2136
|
+
if _map is not None:
|
|
2137
|
+
return _map
|
|
2138
|
+
|
|
2139
|
+
result = dict()
|
|
2140
|
+
if self.data is not None:
|
|
2141
|
+
result['data'] = self.data.to_map()
|
|
2142
|
+
if self.request_id is not None:
|
|
2143
|
+
result['requestId'] = self.request_id
|
|
2144
|
+
return result
|
|
2145
|
+
|
|
2146
|
+
def from_map(self, m: dict = None):
|
|
2147
|
+
m = m or dict()
|
|
2148
|
+
if m.get('data') is not None:
|
|
2149
|
+
temp_model = GetSqlStatementResponseBodyData()
|
|
2150
|
+
self.data = temp_model.from_map(m['data'])
|
|
2151
|
+
if m.get('requestId') is not None:
|
|
2152
|
+
self.request_id = m.get('requestId')
|
|
2153
|
+
return self
|
|
2154
|
+
|
|
2155
|
+
|
|
2156
|
+
class GetSqlStatementResponse(TeaModel):
|
|
2157
|
+
def __init__(
|
|
2158
|
+
self,
|
|
2159
|
+
headers: Dict[str, str] = None,
|
|
2160
|
+
status_code: int = None,
|
|
2161
|
+
body: GetSqlStatementResponseBody = None,
|
|
2162
|
+
):
|
|
2163
|
+
self.headers = headers
|
|
2164
|
+
self.status_code = status_code
|
|
2165
|
+
self.body = body
|
|
2166
|
+
|
|
2167
|
+
def validate(self):
|
|
2168
|
+
if self.body:
|
|
2169
|
+
self.body.validate()
|
|
2170
|
+
|
|
2171
|
+
def to_map(self):
|
|
2172
|
+
_map = super().to_map()
|
|
2173
|
+
if _map is not None:
|
|
2174
|
+
return _map
|
|
2175
|
+
|
|
2176
|
+
result = dict()
|
|
2177
|
+
if self.headers is not None:
|
|
2178
|
+
result['headers'] = self.headers
|
|
2179
|
+
if self.status_code is not None:
|
|
2180
|
+
result['statusCode'] = self.status_code
|
|
2181
|
+
if self.body is not None:
|
|
2182
|
+
result['body'] = self.body.to_map()
|
|
2183
|
+
return result
|
|
2184
|
+
|
|
2185
|
+
def from_map(self, m: dict = None):
|
|
2186
|
+
m = m or dict()
|
|
2187
|
+
if m.get('headers') is not None:
|
|
2188
|
+
self.headers = m.get('headers')
|
|
2189
|
+
if m.get('statusCode') is not None:
|
|
2190
|
+
self.status_code = m.get('statusCode')
|
|
2191
|
+
if m.get('body') is not None:
|
|
2192
|
+
temp_model = GetSqlStatementResponseBody()
|
|
2193
|
+
self.body = temp_model.from_map(m['body'])
|
|
2194
|
+
return self
|
|
2195
|
+
|
|
2196
|
+
|
|
1818
2197
|
class GrantRoleToUsersRequest(TeaModel):
|
|
1819
2198
|
def __init__(
|
|
1820
2199
|
self,
|
|
@@ -2026,6 +2405,7 @@ class ListJobRunsRequest(TeaModel):
|
|
|
2026
2405
|
self,
|
|
2027
2406
|
creator: str = None,
|
|
2028
2407
|
end_time: ListJobRunsRequestEndTime = None,
|
|
2408
|
+
job_run_deployment_id: str = None,
|
|
2029
2409
|
job_run_id: str = None,
|
|
2030
2410
|
max_results: int = None,
|
|
2031
2411
|
name: str = None,
|
|
@@ -2039,6 +2419,7 @@ class ListJobRunsRequest(TeaModel):
|
|
|
2039
2419
|
# 创建用户Uid。
|
|
2040
2420
|
self.creator = creator
|
|
2041
2421
|
self.end_time = end_time
|
|
2422
|
+
self.job_run_deployment_id = job_run_deployment_id
|
|
2042
2423
|
# 作业id。
|
|
2043
2424
|
self.job_run_id = job_run_id
|
|
2044
2425
|
# 一次获取的最大记录数。
|
|
@@ -2075,6 +2456,8 @@ class ListJobRunsRequest(TeaModel):
|
|
|
2075
2456
|
result['creator'] = self.creator
|
|
2076
2457
|
if self.end_time is not None:
|
|
2077
2458
|
result['endTime'] = self.end_time.to_map()
|
|
2459
|
+
if self.job_run_deployment_id is not None:
|
|
2460
|
+
result['jobRunDeploymentId'] = self.job_run_deployment_id
|
|
2078
2461
|
if self.job_run_id is not None:
|
|
2079
2462
|
result['jobRunId'] = self.job_run_id
|
|
2080
2463
|
if self.max_results is not None:
|
|
@@ -2104,6 +2487,8 @@ class ListJobRunsRequest(TeaModel):
|
|
|
2104
2487
|
if m.get('endTime') is not None:
|
|
2105
2488
|
temp_model = ListJobRunsRequestEndTime()
|
|
2106
2489
|
self.end_time = temp_model.from_map(m['endTime'])
|
|
2490
|
+
if m.get('jobRunDeploymentId') is not None:
|
|
2491
|
+
self.job_run_deployment_id = m.get('jobRunDeploymentId')
|
|
2107
2492
|
if m.get('jobRunId') is not None:
|
|
2108
2493
|
self.job_run_id = m.get('jobRunId')
|
|
2109
2494
|
if m.get('maxResults') is not None:
|
|
@@ -2134,6 +2519,7 @@ class ListJobRunsShrinkRequest(TeaModel):
|
|
|
2134
2519
|
self,
|
|
2135
2520
|
creator: str = None,
|
|
2136
2521
|
end_time_shrink: str = None,
|
|
2522
|
+
job_run_deployment_id: str = None,
|
|
2137
2523
|
job_run_id: str = None,
|
|
2138
2524
|
max_results: int = None,
|
|
2139
2525
|
name: str = None,
|
|
@@ -2147,6 +2533,7 @@ class ListJobRunsShrinkRequest(TeaModel):
|
|
|
2147
2533
|
# 创建用户Uid。
|
|
2148
2534
|
self.creator = creator
|
|
2149
2535
|
self.end_time_shrink = end_time_shrink
|
|
2536
|
+
self.job_run_deployment_id = job_run_deployment_id
|
|
2150
2537
|
# 作业id。
|
|
2151
2538
|
self.job_run_id = job_run_id
|
|
2152
2539
|
# 一次获取的最大记录数。
|
|
@@ -2176,6 +2563,8 @@ class ListJobRunsShrinkRequest(TeaModel):
|
|
|
2176
2563
|
result['creator'] = self.creator
|
|
2177
2564
|
if self.end_time_shrink is not None:
|
|
2178
2565
|
result['endTime'] = self.end_time_shrink
|
|
2566
|
+
if self.job_run_deployment_id is not None:
|
|
2567
|
+
result['jobRunDeploymentId'] = self.job_run_deployment_id
|
|
2179
2568
|
if self.job_run_id is not None:
|
|
2180
2569
|
result['jobRunId'] = self.job_run_id
|
|
2181
2570
|
if self.max_results is not None:
|
|
@@ -2202,6 +2591,8 @@ class ListJobRunsShrinkRequest(TeaModel):
|
|
|
2202
2591
|
self.creator = m.get('creator')
|
|
2203
2592
|
if m.get('endTime') is not None:
|
|
2204
2593
|
self.end_time_shrink = m.get('endTime')
|
|
2594
|
+
if m.get('jobRunDeploymentId') is not None:
|
|
2595
|
+
self.job_run_deployment_id = m.get('jobRunDeploymentId')
|
|
2205
2596
|
if m.get('jobRunId') is not None:
|
|
2206
2597
|
self.job_run_id = m.get('jobRunId')
|
|
2207
2598
|
if m.get('maxResults') is not None:
|
|
@@ -4061,3 +4452,98 @@ class StartJobRunResponse(TeaModel):
|
|
|
4061
4452
|
return self
|
|
4062
4453
|
|
|
4063
4454
|
|
|
4455
|
+
class TerminateSqlStatementRequest(TeaModel):
|
|
4456
|
+
def __init__(
|
|
4457
|
+
self,
|
|
4458
|
+
region_id: str = None,
|
|
4459
|
+
):
|
|
4460
|
+
self.region_id = region_id
|
|
4461
|
+
|
|
4462
|
+
def validate(self):
|
|
4463
|
+
pass
|
|
4464
|
+
|
|
4465
|
+
def to_map(self):
|
|
4466
|
+
_map = super().to_map()
|
|
4467
|
+
if _map is not None:
|
|
4468
|
+
return _map
|
|
4469
|
+
|
|
4470
|
+
result = dict()
|
|
4471
|
+
if self.region_id is not None:
|
|
4472
|
+
result['regionId'] = self.region_id
|
|
4473
|
+
return result
|
|
4474
|
+
|
|
4475
|
+
def from_map(self, m: dict = None):
|
|
4476
|
+
m = m or dict()
|
|
4477
|
+
if m.get('regionId') is not None:
|
|
4478
|
+
self.region_id = m.get('regionId')
|
|
4479
|
+
return self
|
|
4480
|
+
|
|
4481
|
+
|
|
4482
|
+
class TerminateSqlStatementResponseBody(TeaModel):
|
|
4483
|
+
def __init__(
|
|
4484
|
+
self,
|
|
4485
|
+
request_id: str = None,
|
|
4486
|
+
):
|
|
4487
|
+
self.request_id = request_id
|
|
4488
|
+
|
|
4489
|
+
def validate(self):
|
|
4490
|
+
pass
|
|
4491
|
+
|
|
4492
|
+
def to_map(self):
|
|
4493
|
+
_map = super().to_map()
|
|
4494
|
+
if _map is not None:
|
|
4495
|
+
return _map
|
|
4496
|
+
|
|
4497
|
+
result = dict()
|
|
4498
|
+
if self.request_id is not None:
|
|
4499
|
+
result['requestId'] = self.request_id
|
|
4500
|
+
return result
|
|
4501
|
+
|
|
4502
|
+
def from_map(self, m: dict = None):
|
|
4503
|
+
m = m or dict()
|
|
4504
|
+
if m.get('requestId') is not None:
|
|
4505
|
+
self.request_id = m.get('requestId')
|
|
4506
|
+
return self
|
|
4507
|
+
|
|
4508
|
+
|
|
4509
|
+
class TerminateSqlStatementResponse(TeaModel):
|
|
4510
|
+
def __init__(
|
|
4511
|
+
self,
|
|
4512
|
+
headers: Dict[str, str] = None,
|
|
4513
|
+
status_code: int = None,
|
|
4514
|
+
body: TerminateSqlStatementResponseBody = None,
|
|
4515
|
+
):
|
|
4516
|
+
self.headers = headers
|
|
4517
|
+
self.status_code = status_code
|
|
4518
|
+
self.body = body
|
|
4519
|
+
|
|
4520
|
+
def validate(self):
|
|
4521
|
+
if self.body:
|
|
4522
|
+
self.body.validate()
|
|
4523
|
+
|
|
4524
|
+
def to_map(self):
|
|
4525
|
+
_map = super().to_map()
|
|
4526
|
+
if _map is not None:
|
|
4527
|
+
return _map
|
|
4528
|
+
|
|
4529
|
+
result = dict()
|
|
4530
|
+
if self.headers is not None:
|
|
4531
|
+
result['headers'] = self.headers
|
|
4532
|
+
if self.status_code is not None:
|
|
4533
|
+
result['statusCode'] = self.status_code
|
|
4534
|
+
if self.body is not None:
|
|
4535
|
+
result['body'] = self.body.to_map()
|
|
4536
|
+
return result
|
|
4537
|
+
|
|
4538
|
+
def from_map(self, m: dict = None):
|
|
4539
|
+
m = m or dict()
|
|
4540
|
+
if m.get('headers') is not None:
|
|
4541
|
+
self.headers = m.get('headers')
|
|
4542
|
+
if m.get('statusCode') is not None:
|
|
4543
|
+
self.status_code = m.get('statusCode')
|
|
4544
|
+
if m.get('body') is not None:
|
|
4545
|
+
temp_model = TerminateSqlStatementResponseBody()
|
|
4546
|
+
self.body = temp_model.from_map(m['body'])
|
|
4547
|
+
return self
|
|
4548
|
+
|
|
4549
|
+
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: alibabacloud-emr-serverless-spark20230808
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.4.0
|
|
4
4
|
Summary: Alibaba Cloud emr-serverless-spark (20230808) SDK Library for Python
|
|
5
5
|
Home-page: https://github.com/aliyun/alibabacloud-python-sdk
|
|
6
6
|
Author: Alibaba Cloud SDK
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
__version__ = '1.3.0'
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|