alibabacloud-emr-serverless-spark20230808 1.4.2__tar.gz → 1.5.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of alibabacloud-emr-serverless-spark20230808 might be problematic. Click here for more details.
- {alibabacloud_emr-serverless-spark20230808-1.4.2 → alibabacloud_emr-serverless-spark20230808-1.5.0}/ChangeLog.md +12 -0
- {alibabacloud_emr-serverless-spark20230808-1.4.2 → alibabacloud_emr-serverless-spark20230808-1.5.0}/PKG-INFO +2 -2
- {alibabacloud_emr-serverless-spark20230808-1.4.2 → alibabacloud_emr-serverless-spark20230808-1.5.0}/README-CN.md +1 -1
- {alibabacloud_emr-serverless-spark20230808-1.4.2 → alibabacloud_emr-serverless-spark20230808-1.5.0}/README.md +1 -1
- alibabacloud_emr-serverless-spark20230808-1.5.0/alibabacloud_emr_serverless_spark20230808/__init__.py +1 -0
- {alibabacloud_emr-serverless-spark20230808-1.4.2 → alibabacloud_emr-serverless-spark20230808-1.5.0}/alibabacloud_emr_serverless_spark20230808/client.py +260 -16
- {alibabacloud_emr-serverless-spark20230808-1.4.2 → alibabacloud_emr-serverless-spark20230808-1.5.0}/alibabacloud_emr_serverless_spark20230808/models.py +267 -10
- {alibabacloud_emr-serverless-spark20230808-1.4.2 → alibabacloud_emr-serverless-spark20230808-1.5.0}/alibabacloud_emr_serverless_spark20230808.egg-info/PKG-INFO +2 -2
- {alibabacloud_emr-serverless-spark20230808-1.4.2 → alibabacloud_emr-serverless-spark20230808-1.5.0}/alibabacloud_emr_serverless_spark20230808.egg-info/requires.txt +1 -1
- {alibabacloud_emr-serverless-spark20230808-1.4.2 → alibabacloud_emr-serverless-spark20230808-1.5.0}/setup.py +2 -2
- alibabacloud_emr-serverless-spark20230808-1.4.2/alibabacloud_emr_serverless_spark20230808/__init__.py +0 -1
- {alibabacloud_emr-serverless-spark20230808-1.4.2 → alibabacloud_emr-serverless-spark20230808-1.5.0}/LICENSE +0 -0
- {alibabacloud_emr-serverless-spark20230808-1.4.2 → alibabacloud_emr-serverless-spark20230808-1.5.0}/MANIFEST.in +0 -0
- {alibabacloud_emr-serverless-spark20230808-1.4.2 → alibabacloud_emr-serverless-spark20230808-1.5.0}/alibabacloud_emr_serverless_spark20230808.egg-info/SOURCES.txt +0 -0
- {alibabacloud_emr-serverless-spark20230808-1.4.2 → alibabacloud_emr-serverless-spark20230808-1.5.0}/alibabacloud_emr_serverless_spark20230808.egg-info/dependency_links.txt +0 -0
- {alibabacloud_emr-serverless-spark20230808-1.4.2 → alibabacloud_emr-serverless-spark20230808-1.5.0}/alibabacloud_emr_serverless_spark20230808.egg-info/top_level.txt +0 -0
- {alibabacloud_emr-serverless-spark20230808-1.4.2 → alibabacloud_emr-serverless-spark20230808-1.5.0}/setup.cfg +0 -0
|
@@ -1,3 +1,15 @@
|
|
|
1
|
+
2024-08-22 Version: 1.4.3
|
|
2
|
+
- Update API GetJobRun: update response param.
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
2024-08-20 Version: 1.4.2
|
|
6
|
+
- Update API ListJobRuns: update response param.
|
|
7
|
+
- Update API ListReleaseVersions: update response param.
|
|
8
|
+
- Update API ListSessionClusters: update response param.
|
|
9
|
+
- Update API ListWorkspaces: update response param.
|
|
10
|
+
- Update API StartJobRun: update param body.
|
|
11
|
+
|
|
12
|
+
|
|
1
13
|
2024-07-09 Version: 1.4.1
|
|
2
14
|
- Update API ListSessionClusters: add param kind.
|
|
3
15
|
- Update API ListSessionClusters: update response param.
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: alibabacloud_emr-serverless-spark20230808
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.5.0
|
|
4
4
|
Summary: Alibaba Cloud emr-serverless-spark (20230808) SDK Library for Python
|
|
5
5
|
Home-page: https://github.com/aliyun/alibabacloud-python-sdk
|
|
6
6
|
Author: Alibaba Cloud SDK
|
|
@@ -29,7 +29,7 @@ English | [简体中文](README-CN.md)
|
|
|
29
29
|
|
|
30
30
|
## Requirements
|
|
31
31
|
|
|
32
|
-
- Python >= 3.
|
|
32
|
+
- Python >= 3.7
|
|
33
33
|
|
|
34
34
|
## Installation
|
|
35
35
|
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
__version__ = '1.5.0'
|
|
@@ -277,7 +277,7 @@ class Client(OpenApiClient):
|
|
|
277
277
|
runtime: util_models.RuntimeOptions,
|
|
278
278
|
) -> emr_serverless_spark_20230808_models.CreateSqlStatementResponse:
|
|
279
279
|
"""
|
|
280
|
-
@summary
|
|
280
|
+
@summary Creates an SQL query task.
|
|
281
281
|
|
|
282
282
|
@param request: CreateSqlStatementRequest
|
|
283
283
|
@param headers: map
|
|
@@ -328,7 +328,7 @@ class Client(OpenApiClient):
|
|
|
328
328
|
runtime: util_models.RuntimeOptions,
|
|
329
329
|
) -> emr_serverless_spark_20230808_models.CreateSqlStatementResponse:
|
|
330
330
|
"""
|
|
331
|
-
@summary
|
|
331
|
+
@summary Creates an SQL query task.
|
|
332
332
|
|
|
333
333
|
@param request: CreateSqlStatementRequest
|
|
334
334
|
@param headers: map
|
|
@@ -377,7 +377,7 @@ class Client(OpenApiClient):
|
|
|
377
377
|
request: emr_serverless_spark_20230808_models.CreateSqlStatementRequest,
|
|
378
378
|
) -> emr_serverless_spark_20230808_models.CreateSqlStatementResponse:
|
|
379
379
|
"""
|
|
380
|
-
@summary
|
|
380
|
+
@summary Creates an SQL query task.
|
|
381
381
|
|
|
382
382
|
@param request: CreateSqlStatementRequest
|
|
383
383
|
@return: CreateSqlStatementResponse
|
|
@@ -392,7 +392,7 @@ class Client(OpenApiClient):
|
|
|
392
392
|
request: emr_serverless_spark_20230808_models.CreateSqlStatementRequest,
|
|
393
393
|
) -> emr_serverless_spark_20230808_models.CreateSqlStatementResponse:
|
|
394
394
|
"""
|
|
395
|
-
@summary
|
|
395
|
+
@summary Creates an SQL query task.
|
|
396
396
|
|
|
397
397
|
@param request: CreateSqlStatementRequest
|
|
398
398
|
@return: CreateSqlStatementResponse
|
|
@@ -522,7 +522,7 @@ class Client(OpenApiClient):
|
|
|
522
522
|
runtime: util_models.RuntimeOptions,
|
|
523
523
|
) -> emr_serverless_spark_20230808_models.GetSqlStatementResponse:
|
|
524
524
|
"""
|
|
525
|
-
@summary
|
|
525
|
+
@summary Queries the status of an SQL query task.
|
|
526
526
|
|
|
527
527
|
@param request: GetSqlStatementRequest
|
|
528
528
|
@param headers: map
|
|
@@ -562,7 +562,7 @@ class Client(OpenApiClient):
|
|
|
562
562
|
runtime: util_models.RuntimeOptions,
|
|
563
563
|
) -> emr_serverless_spark_20230808_models.GetSqlStatementResponse:
|
|
564
564
|
"""
|
|
565
|
-
@summary
|
|
565
|
+
@summary Queries the status of an SQL query task.
|
|
566
566
|
|
|
567
567
|
@param request: GetSqlStatementRequest
|
|
568
568
|
@param headers: map
|
|
@@ -600,7 +600,7 @@ class Client(OpenApiClient):
|
|
|
600
600
|
request: emr_serverless_spark_20230808_models.GetSqlStatementRequest,
|
|
601
601
|
) -> emr_serverless_spark_20230808_models.GetSqlStatementResponse:
|
|
602
602
|
"""
|
|
603
|
-
@summary
|
|
603
|
+
@summary Queries the status of an SQL query task.
|
|
604
604
|
|
|
605
605
|
@param request: GetSqlStatementRequest
|
|
606
606
|
@return: GetSqlStatementResponse
|
|
@@ -616,7 +616,7 @@ class Client(OpenApiClient):
|
|
|
616
616
|
request: emr_serverless_spark_20230808_models.GetSqlStatementRequest,
|
|
617
617
|
) -> emr_serverless_spark_20230808_models.GetSqlStatementResponse:
|
|
618
618
|
"""
|
|
619
|
-
@summary
|
|
619
|
+
@summary Queries the status of an SQL query task.
|
|
620
620
|
|
|
621
621
|
@param request: GetSqlStatementRequest
|
|
622
622
|
@return: GetSqlStatementResponse
|
|
@@ -920,7 +920,7 @@ class Client(OpenApiClient):
|
|
|
920
920
|
runtime: util_models.RuntimeOptions,
|
|
921
921
|
) -> emr_serverless_spark_20230808_models.ListReleaseVersionsResponse:
|
|
922
922
|
"""
|
|
923
|
-
@summary
|
|
923
|
+
@summary Queries the list of published versions of E-MapReduce (EMR) Serverless Spark.
|
|
924
924
|
|
|
925
925
|
@param request: ListReleaseVersionsRequest
|
|
926
926
|
@param headers: map
|
|
@@ -937,6 +937,8 @@ class Client(OpenApiClient):
|
|
|
937
937
|
query['releaseVersion'] = request.release_version
|
|
938
938
|
if not UtilClient.is_unset(request.release_version_status):
|
|
939
939
|
query['releaseVersionStatus'] = request.release_version_status
|
|
940
|
+
if not UtilClient.is_unset(request.workspace_id):
|
|
941
|
+
query['workspaceId'] = request.workspace_id
|
|
940
942
|
req = open_api_models.OpenApiRequest(
|
|
941
943
|
headers=headers,
|
|
942
944
|
query=OpenApiUtilClient.query(query)
|
|
@@ -964,7 +966,7 @@ class Client(OpenApiClient):
|
|
|
964
966
|
runtime: util_models.RuntimeOptions,
|
|
965
967
|
) -> emr_serverless_spark_20230808_models.ListReleaseVersionsResponse:
|
|
966
968
|
"""
|
|
967
|
-
@summary
|
|
969
|
+
@summary Queries the list of published versions of E-MapReduce (EMR) Serverless Spark.
|
|
968
970
|
|
|
969
971
|
@param request: ListReleaseVersionsRequest
|
|
970
972
|
@param headers: map
|
|
@@ -981,6 +983,8 @@ class Client(OpenApiClient):
|
|
|
981
983
|
query['releaseVersion'] = request.release_version
|
|
982
984
|
if not UtilClient.is_unset(request.release_version_status):
|
|
983
985
|
query['releaseVersionStatus'] = request.release_version_status
|
|
986
|
+
if not UtilClient.is_unset(request.workspace_id):
|
|
987
|
+
query['workspaceId'] = request.workspace_id
|
|
984
988
|
req = open_api_models.OpenApiRequest(
|
|
985
989
|
headers=headers,
|
|
986
990
|
query=OpenApiUtilClient.query(query)
|
|
@@ -1006,7 +1010,7 @@ class Client(OpenApiClient):
|
|
|
1006
1010
|
request: emr_serverless_spark_20230808_models.ListReleaseVersionsRequest,
|
|
1007
1011
|
) -> emr_serverless_spark_20230808_models.ListReleaseVersionsResponse:
|
|
1008
1012
|
"""
|
|
1009
|
-
@summary
|
|
1013
|
+
@summary Queries the list of published versions of E-MapReduce (EMR) Serverless Spark.
|
|
1010
1014
|
|
|
1011
1015
|
@param request: ListReleaseVersionsRequest
|
|
1012
1016
|
@return: ListReleaseVersionsResponse
|
|
@@ -1020,7 +1024,7 @@ class Client(OpenApiClient):
|
|
|
1020
1024
|
request: emr_serverless_spark_20230808_models.ListReleaseVersionsRequest,
|
|
1021
1025
|
) -> emr_serverless_spark_20230808_models.ListReleaseVersionsResponse:
|
|
1022
1026
|
"""
|
|
1023
|
-
@summary
|
|
1027
|
+
@summary Queries the list of published versions of E-MapReduce (EMR) Serverless Spark.
|
|
1024
1028
|
|
|
1025
1029
|
@param request: ListReleaseVersionsRequest
|
|
1026
1030
|
@return: ListReleaseVersionsResponse
|
|
@@ -1549,6 +1553,246 @@ class Client(OpenApiClient):
|
|
|
1549
1553
|
headers = {}
|
|
1550
1554
|
return await self.start_job_run_with_options_async(workspace_id, request, headers, runtime)
|
|
1551
1555
|
|
|
1556
|
+
def start_session_cluster_with_options(
|
|
1557
|
+
self,
|
|
1558
|
+
workspace_id: str,
|
|
1559
|
+
request: emr_serverless_spark_20230808_models.StartSessionClusterRequest,
|
|
1560
|
+
headers: Dict[str, str],
|
|
1561
|
+
runtime: util_models.RuntimeOptions,
|
|
1562
|
+
) -> emr_serverless_spark_20230808_models.StartSessionClusterResponse:
|
|
1563
|
+
"""
|
|
1564
|
+
@summary 启动session集群
|
|
1565
|
+
|
|
1566
|
+
@param request: StartSessionClusterRequest
|
|
1567
|
+
@param headers: map
|
|
1568
|
+
@param runtime: runtime options for this request RuntimeOptions
|
|
1569
|
+
@return: StartSessionClusterResponse
|
|
1570
|
+
"""
|
|
1571
|
+
UtilClient.validate_model(request)
|
|
1572
|
+
query = {}
|
|
1573
|
+
if not UtilClient.is_unset(request.region_id):
|
|
1574
|
+
query['regionId'] = request.region_id
|
|
1575
|
+
body = {}
|
|
1576
|
+
if not UtilClient.is_unset(request.queue_name):
|
|
1577
|
+
body['queueName'] = request.queue_name
|
|
1578
|
+
if not UtilClient.is_unset(request.session_cluster_id):
|
|
1579
|
+
body['sessionClusterId'] = request.session_cluster_id
|
|
1580
|
+
req = open_api_models.OpenApiRequest(
|
|
1581
|
+
headers=headers,
|
|
1582
|
+
query=OpenApiUtilClient.query(query),
|
|
1583
|
+
body=OpenApiUtilClient.parse_to_map(body)
|
|
1584
|
+
)
|
|
1585
|
+
params = open_api_models.Params(
|
|
1586
|
+
action='StartSessionCluster',
|
|
1587
|
+
version='2023-08-08',
|
|
1588
|
+
protocol='HTTPS',
|
|
1589
|
+
pathname=f'/api/v1/workspaces/{OpenApiUtilClient.get_encode_param(workspace_id)}/sessionClusters/action/startSessionCluster',
|
|
1590
|
+
method='POST',
|
|
1591
|
+
auth_type='AK',
|
|
1592
|
+
style='ROA',
|
|
1593
|
+
req_body_type='json',
|
|
1594
|
+
body_type='json'
|
|
1595
|
+
)
|
|
1596
|
+
return TeaCore.from_map(
|
|
1597
|
+
emr_serverless_spark_20230808_models.StartSessionClusterResponse(),
|
|
1598
|
+
self.call_api(params, req, runtime)
|
|
1599
|
+
)
|
|
1600
|
+
|
|
1601
|
+
async def start_session_cluster_with_options_async(
|
|
1602
|
+
self,
|
|
1603
|
+
workspace_id: str,
|
|
1604
|
+
request: emr_serverless_spark_20230808_models.StartSessionClusterRequest,
|
|
1605
|
+
headers: Dict[str, str],
|
|
1606
|
+
runtime: util_models.RuntimeOptions,
|
|
1607
|
+
) -> emr_serverless_spark_20230808_models.StartSessionClusterResponse:
|
|
1608
|
+
"""
|
|
1609
|
+
@summary 启动session集群
|
|
1610
|
+
|
|
1611
|
+
@param request: StartSessionClusterRequest
|
|
1612
|
+
@param headers: map
|
|
1613
|
+
@param runtime: runtime options for this request RuntimeOptions
|
|
1614
|
+
@return: StartSessionClusterResponse
|
|
1615
|
+
"""
|
|
1616
|
+
UtilClient.validate_model(request)
|
|
1617
|
+
query = {}
|
|
1618
|
+
if not UtilClient.is_unset(request.region_id):
|
|
1619
|
+
query['regionId'] = request.region_id
|
|
1620
|
+
body = {}
|
|
1621
|
+
if not UtilClient.is_unset(request.queue_name):
|
|
1622
|
+
body['queueName'] = request.queue_name
|
|
1623
|
+
if not UtilClient.is_unset(request.session_cluster_id):
|
|
1624
|
+
body['sessionClusterId'] = request.session_cluster_id
|
|
1625
|
+
req = open_api_models.OpenApiRequest(
|
|
1626
|
+
headers=headers,
|
|
1627
|
+
query=OpenApiUtilClient.query(query),
|
|
1628
|
+
body=OpenApiUtilClient.parse_to_map(body)
|
|
1629
|
+
)
|
|
1630
|
+
params = open_api_models.Params(
|
|
1631
|
+
action='StartSessionCluster',
|
|
1632
|
+
version='2023-08-08',
|
|
1633
|
+
protocol='HTTPS',
|
|
1634
|
+
pathname=f'/api/v1/workspaces/{OpenApiUtilClient.get_encode_param(workspace_id)}/sessionClusters/action/startSessionCluster',
|
|
1635
|
+
method='POST',
|
|
1636
|
+
auth_type='AK',
|
|
1637
|
+
style='ROA',
|
|
1638
|
+
req_body_type='json',
|
|
1639
|
+
body_type='json'
|
|
1640
|
+
)
|
|
1641
|
+
return TeaCore.from_map(
|
|
1642
|
+
emr_serverless_spark_20230808_models.StartSessionClusterResponse(),
|
|
1643
|
+
await self.call_api_async(params, req, runtime)
|
|
1644
|
+
)
|
|
1645
|
+
|
|
1646
|
+
def start_session_cluster(
|
|
1647
|
+
self,
|
|
1648
|
+
workspace_id: str,
|
|
1649
|
+
request: emr_serverless_spark_20230808_models.StartSessionClusterRequest,
|
|
1650
|
+
) -> emr_serverless_spark_20230808_models.StartSessionClusterResponse:
|
|
1651
|
+
"""
|
|
1652
|
+
@summary 启动session集群
|
|
1653
|
+
|
|
1654
|
+
@param request: StartSessionClusterRequest
|
|
1655
|
+
@return: StartSessionClusterResponse
|
|
1656
|
+
"""
|
|
1657
|
+
runtime = util_models.RuntimeOptions()
|
|
1658
|
+
headers = {}
|
|
1659
|
+
return self.start_session_cluster_with_options(workspace_id, request, headers, runtime)
|
|
1660
|
+
|
|
1661
|
+
async def start_session_cluster_async(
|
|
1662
|
+
self,
|
|
1663
|
+
workspace_id: str,
|
|
1664
|
+
request: emr_serverless_spark_20230808_models.StartSessionClusterRequest,
|
|
1665
|
+
) -> emr_serverless_spark_20230808_models.StartSessionClusterResponse:
|
|
1666
|
+
"""
|
|
1667
|
+
@summary 启动session集群
|
|
1668
|
+
|
|
1669
|
+
@param request: StartSessionClusterRequest
|
|
1670
|
+
@return: StartSessionClusterResponse
|
|
1671
|
+
"""
|
|
1672
|
+
runtime = util_models.RuntimeOptions()
|
|
1673
|
+
headers = {}
|
|
1674
|
+
return await self.start_session_cluster_with_options_async(workspace_id, request, headers, runtime)
|
|
1675
|
+
|
|
1676
|
+
def stop_session_cluster_with_options(
|
|
1677
|
+
self,
|
|
1678
|
+
workspace_id: str,
|
|
1679
|
+
request: emr_serverless_spark_20230808_models.StopSessionClusterRequest,
|
|
1680
|
+
headers: Dict[str, str],
|
|
1681
|
+
runtime: util_models.RuntimeOptions,
|
|
1682
|
+
) -> emr_serverless_spark_20230808_models.StopSessionClusterResponse:
|
|
1683
|
+
"""
|
|
1684
|
+
@summary 启动session集群
|
|
1685
|
+
|
|
1686
|
+
@param request: StopSessionClusterRequest
|
|
1687
|
+
@param headers: map
|
|
1688
|
+
@param runtime: runtime options for this request RuntimeOptions
|
|
1689
|
+
@return: StopSessionClusterResponse
|
|
1690
|
+
"""
|
|
1691
|
+
UtilClient.validate_model(request)
|
|
1692
|
+
query = {}
|
|
1693
|
+
if not UtilClient.is_unset(request.region_id):
|
|
1694
|
+
query['regionId'] = request.region_id
|
|
1695
|
+
body = {}
|
|
1696
|
+
if not UtilClient.is_unset(request.queue_name):
|
|
1697
|
+
body['queueName'] = request.queue_name
|
|
1698
|
+
if not UtilClient.is_unset(request.session_cluster_id):
|
|
1699
|
+
body['sessionClusterId'] = request.session_cluster_id
|
|
1700
|
+
req = open_api_models.OpenApiRequest(
|
|
1701
|
+
headers=headers,
|
|
1702
|
+
query=OpenApiUtilClient.query(query),
|
|
1703
|
+
body=OpenApiUtilClient.parse_to_map(body)
|
|
1704
|
+
)
|
|
1705
|
+
params = open_api_models.Params(
|
|
1706
|
+
action='StopSessionCluster',
|
|
1707
|
+
version='2023-08-08',
|
|
1708
|
+
protocol='HTTPS',
|
|
1709
|
+
pathname=f'/api/v1/workspaces/{OpenApiUtilClient.get_encode_param(workspace_id)}/sessionClusters/action/stopSessionCluster',
|
|
1710
|
+
method='POST',
|
|
1711
|
+
auth_type='AK',
|
|
1712
|
+
style='ROA',
|
|
1713
|
+
req_body_type='json',
|
|
1714
|
+
body_type='json'
|
|
1715
|
+
)
|
|
1716
|
+
return TeaCore.from_map(
|
|
1717
|
+
emr_serverless_spark_20230808_models.StopSessionClusterResponse(),
|
|
1718
|
+
self.call_api(params, req, runtime)
|
|
1719
|
+
)
|
|
1720
|
+
|
|
1721
|
+
async def stop_session_cluster_with_options_async(
|
|
1722
|
+
self,
|
|
1723
|
+
workspace_id: str,
|
|
1724
|
+
request: emr_serverless_spark_20230808_models.StopSessionClusterRequest,
|
|
1725
|
+
headers: Dict[str, str],
|
|
1726
|
+
runtime: util_models.RuntimeOptions,
|
|
1727
|
+
) -> emr_serverless_spark_20230808_models.StopSessionClusterResponse:
|
|
1728
|
+
"""
|
|
1729
|
+
@summary 启动session集群
|
|
1730
|
+
|
|
1731
|
+
@param request: StopSessionClusterRequest
|
|
1732
|
+
@param headers: map
|
|
1733
|
+
@param runtime: runtime options for this request RuntimeOptions
|
|
1734
|
+
@return: StopSessionClusterResponse
|
|
1735
|
+
"""
|
|
1736
|
+
UtilClient.validate_model(request)
|
|
1737
|
+
query = {}
|
|
1738
|
+
if not UtilClient.is_unset(request.region_id):
|
|
1739
|
+
query['regionId'] = request.region_id
|
|
1740
|
+
body = {}
|
|
1741
|
+
if not UtilClient.is_unset(request.queue_name):
|
|
1742
|
+
body['queueName'] = request.queue_name
|
|
1743
|
+
if not UtilClient.is_unset(request.session_cluster_id):
|
|
1744
|
+
body['sessionClusterId'] = request.session_cluster_id
|
|
1745
|
+
req = open_api_models.OpenApiRequest(
|
|
1746
|
+
headers=headers,
|
|
1747
|
+
query=OpenApiUtilClient.query(query),
|
|
1748
|
+
body=OpenApiUtilClient.parse_to_map(body)
|
|
1749
|
+
)
|
|
1750
|
+
params = open_api_models.Params(
|
|
1751
|
+
action='StopSessionCluster',
|
|
1752
|
+
version='2023-08-08',
|
|
1753
|
+
protocol='HTTPS',
|
|
1754
|
+
pathname=f'/api/v1/workspaces/{OpenApiUtilClient.get_encode_param(workspace_id)}/sessionClusters/action/stopSessionCluster',
|
|
1755
|
+
method='POST',
|
|
1756
|
+
auth_type='AK',
|
|
1757
|
+
style='ROA',
|
|
1758
|
+
req_body_type='json',
|
|
1759
|
+
body_type='json'
|
|
1760
|
+
)
|
|
1761
|
+
return TeaCore.from_map(
|
|
1762
|
+
emr_serverless_spark_20230808_models.StopSessionClusterResponse(),
|
|
1763
|
+
await self.call_api_async(params, req, runtime)
|
|
1764
|
+
)
|
|
1765
|
+
|
|
1766
|
+
def stop_session_cluster(
|
|
1767
|
+
self,
|
|
1768
|
+
workspace_id: str,
|
|
1769
|
+
request: emr_serverless_spark_20230808_models.StopSessionClusterRequest,
|
|
1770
|
+
) -> emr_serverless_spark_20230808_models.StopSessionClusterResponse:
|
|
1771
|
+
"""
|
|
1772
|
+
@summary 启动session集群
|
|
1773
|
+
|
|
1774
|
+
@param request: StopSessionClusterRequest
|
|
1775
|
+
@return: StopSessionClusterResponse
|
|
1776
|
+
"""
|
|
1777
|
+
runtime = util_models.RuntimeOptions()
|
|
1778
|
+
headers = {}
|
|
1779
|
+
return self.stop_session_cluster_with_options(workspace_id, request, headers, runtime)
|
|
1780
|
+
|
|
1781
|
+
async def stop_session_cluster_async(
|
|
1782
|
+
self,
|
|
1783
|
+
workspace_id: str,
|
|
1784
|
+
request: emr_serverless_spark_20230808_models.StopSessionClusterRequest,
|
|
1785
|
+
) -> emr_serverless_spark_20230808_models.StopSessionClusterResponse:
|
|
1786
|
+
"""
|
|
1787
|
+
@summary 启动session集群
|
|
1788
|
+
|
|
1789
|
+
@param request: StopSessionClusterRequest
|
|
1790
|
+
@return: StopSessionClusterResponse
|
|
1791
|
+
"""
|
|
1792
|
+
runtime = util_models.RuntimeOptions()
|
|
1793
|
+
headers = {}
|
|
1794
|
+
return await self.stop_session_cluster_with_options_async(workspace_id, request, headers, runtime)
|
|
1795
|
+
|
|
1552
1796
|
def terminate_sql_statement_with_options(
|
|
1553
1797
|
self,
|
|
1554
1798
|
workspace_id: str,
|
|
@@ -1558,7 +1802,7 @@ class Client(OpenApiClient):
|
|
|
1558
1802
|
runtime: util_models.RuntimeOptions,
|
|
1559
1803
|
) -> emr_serverless_spark_20230808_models.TerminateSqlStatementResponse:
|
|
1560
1804
|
"""
|
|
1561
|
-
@summary
|
|
1805
|
+
@summary Terminates an SQL query task.
|
|
1562
1806
|
|
|
1563
1807
|
@param request: TerminateSqlStatementRequest
|
|
1564
1808
|
@param headers: map
|
|
@@ -1598,7 +1842,7 @@ class Client(OpenApiClient):
|
|
|
1598
1842
|
runtime: util_models.RuntimeOptions,
|
|
1599
1843
|
) -> emr_serverless_spark_20230808_models.TerminateSqlStatementResponse:
|
|
1600
1844
|
"""
|
|
1601
|
-
@summary
|
|
1845
|
+
@summary Terminates an SQL query task.
|
|
1602
1846
|
|
|
1603
1847
|
@param request: TerminateSqlStatementRequest
|
|
1604
1848
|
@param headers: map
|
|
@@ -1636,7 +1880,7 @@ class Client(OpenApiClient):
|
|
|
1636
1880
|
request: emr_serverless_spark_20230808_models.TerminateSqlStatementRequest,
|
|
1637
1881
|
) -> emr_serverless_spark_20230808_models.TerminateSqlStatementResponse:
|
|
1638
1882
|
"""
|
|
1639
|
-
@summary
|
|
1883
|
+
@summary Terminates an SQL query task.
|
|
1640
1884
|
|
|
1641
1885
|
@param request: TerminateSqlStatementRequest
|
|
1642
1886
|
@return: TerminateSqlStatementResponse
|
|
@@ -1652,7 +1896,7 @@ class Client(OpenApiClient):
|
|
|
1652
1896
|
request: emr_serverless_spark_20230808_models.TerminateSqlStatementRequest,
|
|
1653
1897
|
) -> emr_serverless_spark_20230808_models.TerminateSqlStatementResponse:
|
|
1654
1898
|
"""
|
|
1655
|
-
@summary
|
|
1899
|
+
@summary Terminates an SQL query task.
|
|
1656
1900
|
|
|
1657
1901
|
@param request: TerminateSqlStatementRequest
|
|
1658
1902
|
@return: TerminateSqlStatementResponse
|
|
@@ -781,6 +781,7 @@ class Task(TeaModel):
|
|
|
781
781
|
spark_submit_clause: str = None,
|
|
782
782
|
spark_version: str = None,
|
|
783
783
|
tags: Dict[str, str] = None,
|
|
784
|
+
timeout: int = None,
|
|
784
785
|
type: str = None,
|
|
785
786
|
):
|
|
786
787
|
self.archives = archives
|
|
@@ -834,6 +835,7 @@ class Task(TeaModel):
|
|
|
834
835
|
# This parameter is required.
|
|
835
836
|
self.spark_version = spark_version
|
|
836
837
|
self.tags = tags
|
|
838
|
+
self.timeout = timeout
|
|
837
839
|
# This parameter is required.
|
|
838
840
|
self.type = type
|
|
839
841
|
|
|
@@ -925,6 +927,8 @@ class Task(TeaModel):
|
|
|
925
927
|
result['sparkVersion'] = self.spark_version
|
|
926
928
|
if self.tags is not None:
|
|
927
929
|
result['tags'] = self.tags
|
|
930
|
+
if self.timeout is not None:
|
|
931
|
+
result['timeout'] = self.timeout
|
|
928
932
|
if self.type is not None:
|
|
929
933
|
result['type'] = self.type
|
|
930
934
|
return result
|
|
@@ -1008,6 +1012,8 @@ class Task(TeaModel):
|
|
|
1008
1012
|
self.spark_version = m.get('sparkVersion')
|
|
1009
1013
|
if m.get('tags') is not None:
|
|
1010
1014
|
self.tags = m.get('tags')
|
|
1015
|
+
if m.get('timeout') is not None:
|
|
1016
|
+
self.timeout = m.get('timeout')
|
|
1011
1017
|
if m.get('type') is not None:
|
|
1012
1018
|
self.type = m.get('type')
|
|
1013
1019
|
return self
|
|
@@ -1549,7 +1555,7 @@ class CreateSqlStatementRequest(TeaModel):
|
|
|
1549
1555
|
self.default_database = default_database
|
|
1550
1556
|
# The maximum number of entries to return. Valid values: 1 to 10000.
|
|
1551
1557
|
self.limit = limit
|
|
1552
|
-
# The SQL
|
|
1558
|
+
# The SQL compute ID. You can create an SQL compute in the workspace created in EMR Serverless Spark.
|
|
1553
1559
|
self.sql_compute_id = sql_compute_id
|
|
1554
1560
|
# The region ID.
|
|
1555
1561
|
self.region_id = region_id
|
|
@@ -1804,8 +1810,10 @@ class GetJobRunResponseBodyJobRun(TeaModel):
|
|
|
1804
1810
|
self,
|
|
1805
1811
|
code_type: str = None,
|
|
1806
1812
|
configuration_overrides: GetJobRunResponseBodyJobRunConfigurationOverrides = None,
|
|
1813
|
+
display_release_version: str = None,
|
|
1807
1814
|
end_time: int = None,
|
|
1808
1815
|
execution_timeout_seconds: int = None,
|
|
1816
|
+
fusion: bool = None,
|
|
1809
1817
|
job_driver: JobDriver = None,
|
|
1810
1818
|
job_run_id: str = None,
|
|
1811
1819
|
log: RunLog = None,
|
|
@@ -1826,12 +1834,14 @@ class GetJobRunResponseBodyJobRun(TeaModel):
|
|
|
1826
1834
|
# * JAR
|
|
1827
1835
|
# * PYTHON
|
|
1828
1836
|
self.code_type = code_type
|
|
1829
|
-
# The
|
|
1837
|
+
# The job configurations of Spark.
|
|
1830
1838
|
self.configuration_overrides = configuration_overrides
|
|
1839
|
+
self.display_release_version = display_release_version
|
|
1831
1840
|
# The end time of the job.
|
|
1832
1841
|
self.end_time = end_time
|
|
1833
1842
|
# The timeout period of the job.
|
|
1834
1843
|
self.execution_timeout_seconds = execution_timeout_seconds
|
|
1844
|
+
self.fusion = fusion
|
|
1835
1845
|
# The information about Spark Driver.
|
|
1836
1846
|
self.job_driver = job_driver
|
|
1837
1847
|
# The job ID.
|
|
@@ -1883,10 +1893,14 @@ class GetJobRunResponseBodyJobRun(TeaModel):
|
|
|
1883
1893
|
result['codeType'] = self.code_type
|
|
1884
1894
|
if self.configuration_overrides is not None:
|
|
1885
1895
|
result['configurationOverrides'] = self.configuration_overrides.to_map()
|
|
1896
|
+
if self.display_release_version is not None:
|
|
1897
|
+
result['displayReleaseVersion'] = self.display_release_version
|
|
1886
1898
|
if self.end_time is not None:
|
|
1887
1899
|
result['endTime'] = self.end_time
|
|
1888
1900
|
if self.execution_timeout_seconds is not None:
|
|
1889
1901
|
result['executionTimeoutSeconds'] = self.execution_timeout_seconds
|
|
1902
|
+
if self.fusion is not None:
|
|
1903
|
+
result['fusion'] = self.fusion
|
|
1890
1904
|
if self.job_driver is not None:
|
|
1891
1905
|
result['jobDriver'] = self.job_driver.to_map()
|
|
1892
1906
|
if self.job_run_id is not None:
|
|
@@ -1924,10 +1938,14 @@ class GetJobRunResponseBodyJobRun(TeaModel):
|
|
|
1924
1938
|
if m.get('configurationOverrides') is not None:
|
|
1925
1939
|
temp_model = GetJobRunResponseBodyJobRunConfigurationOverrides()
|
|
1926
1940
|
self.configuration_overrides = temp_model.from_map(m['configurationOverrides'])
|
|
1941
|
+
if m.get('displayReleaseVersion') is not None:
|
|
1942
|
+
self.display_release_version = m.get('displayReleaseVersion')
|
|
1927
1943
|
if m.get('endTime') is not None:
|
|
1928
1944
|
self.end_time = m.get('endTime')
|
|
1929
1945
|
if m.get('executionTimeoutSeconds') is not None:
|
|
1930
1946
|
self.execution_timeout_seconds = m.get('executionTimeoutSeconds')
|
|
1947
|
+
if m.get('fusion') is not None:
|
|
1948
|
+
self.fusion = m.get('fusion')
|
|
1931
1949
|
if m.get('jobDriver') is not None:
|
|
1932
1950
|
temp_model = JobDriver()
|
|
1933
1951
|
self.job_driver = temp_model.from_map(m['jobDriver'])
|
|
@@ -2189,7 +2207,7 @@ class GetSqlStatementResponseBody(TeaModel):
|
|
|
2189
2207
|
data: GetSqlStatementResponseBodyData = None,
|
|
2190
2208
|
request_id: str = None,
|
|
2191
2209
|
):
|
|
2192
|
-
# The returned
|
|
2210
|
+
# The data returned.
|
|
2193
2211
|
self.data = data
|
|
2194
2212
|
# The request ID.
|
|
2195
2213
|
self.request_id = request_id
|
|
@@ -2492,7 +2510,7 @@ class ListJobRunsRequest(TeaModel):
|
|
|
2492
2510
|
states: List[str] = None,
|
|
2493
2511
|
tags: List[ListJobRunsRequestTags] = None,
|
|
2494
2512
|
):
|
|
2495
|
-
# The ID of the user who
|
|
2513
|
+
# The ID of the user who created the job.
|
|
2496
2514
|
self.creator = creator
|
|
2497
2515
|
# The range of end time.
|
|
2498
2516
|
self.end_time = end_time
|
|
@@ -2610,7 +2628,7 @@ class ListJobRunsShrinkRequest(TeaModel):
|
|
|
2610
2628
|
states_shrink: str = None,
|
|
2611
2629
|
tags_shrink: str = None,
|
|
2612
2630
|
):
|
|
2613
|
-
# The ID of the user who
|
|
2631
|
+
# The ID of the user who created the job.
|
|
2614
2632
|
self.creator = creator
|
|
2615
2633
|
# The range of end time.
|
|
2616
2634
|
self.end_time_shrink = end_time_shrink
|
|
@@ -3052,6 +3070,7 @@ class ListReleaseVersionsRequest(TeaModel):
|
|
|
3052
3070
|
release_type: str = None,
|
|
3053
3071
|
release_version: str = None,
|
|
3054
3072
|
release_version_status: str = None,
|
|
3073
|
+
workspace_id: str = None,
|
|
3055
3074
|
):
|
|
3056
3075
|
# The region ID.
|
|
3057
3076
|
self.region_id = region_id
|
|
@@ -3060,17 +3079,19 @@ class ListReleaseVersionsRequest(TeaModel):
|
|
|
3060
3079
|
# Valid values:
|
|
3061
3080
|
#
|
|
3062
3081
|
# * stable
|
|
3063
|
-
# *
|
|
3082
|
+
# * Beta
|
|
3064
3083
|
self.release_type = release_type
|
|
3065
|
-
# The version of Serverless Spark.
|
|
3084
|
+
# The version of EMR Serverless Spark.
|
|
3066
3085
|
self.release_version = release_version
|
|
3067
|
-
# The status of the version.
|
|
3086
|
+
# The status of the version.
|
|
3068
3087
|
#
|
|
3069
3088
|
# Valid values:
|
|
3070
3089
|
#
|
|
3071
3090
|
# * ONLINE
|
|
3072
3091
|
# * OFFLINE
|
|
3073
3092
|
self.release_version_status = release_version_status
|
|
3093
|
+
# The workspace ID.
|
|
3094
|
+
self.workspace_id = workspace_id
|
|
3074
3095
|
|
|
3075
3096
|
def validate(self):
|
|
3076
3097
|
pass
|
|
@@ -3089,6 +3110,8 @@ class ListReleaseVersionsRequest(TeaModel):
|
|
|
3089
3110
|
result['releaseVersion'] = self.release_version
|
|
3090
3111
|
if self.release_version_status is not None:
|
|
3091
3112
|
result['releaseVersionStatus'] = self.release_version_status
|
|
3113
|
+
if self.workspace_id is not None:
|
|
3114
|
+
result['workspaceId'] = self.workspace_id
|
|
3092
3115
|
return result
|
|
3093
3116
|
|
|
3094
3117
|
def from_map(self, m: dict = None):
|
|
@@ -3101,6 +3124,8 @@ class ListReleaseVersionsRequest(TeaModel):
|
|
|
3101
3124
|
self.release_version = m.get('releaseVersion')
|
|
3102
3125
|
if m.get('releaseVersionStatus') is not None:
|
|
3103
3126
|
self.release_version_status = m.get('releaseVersionStatus')
|
|
3127
|
+
if m.get('workspaceId') is not None:
|
|
3128
|
+
self.workspace_id = m.get('workspaceId')
|
|
3104
3129
|
return self
|
|
3105
3130
|
|
|
3106
3131
|
|
|
@@ -3122,13 +3147,15 @@ class ListReleaseVersionsResponseBodyReleaseVersions(TeaModel):
|
|
|
3122
3147
|
self.community_version = community_version
|
|
3123
3148
|
# The CPU architectures.
|
|
3124
3149
|
self.cpu_architectures = cpu_architectures
|
|
3150
|
+
# The version number.
|
|
3125
3151
|
self.display_release_version = display_release_version
|
|
3152
|
+
# Indicates whether the Fusion engine is used for acceleration.
|
|
3126
3153
|
self.fusion = fusion
|
|
3127
3154
|
# The creation time.
|
|
3128
3155
|
self.gmt_create = gmt_create
|
|
3129
3156
|
# The type of the Infrastructure as a Service (IaaS) layer.
|
|
3130
3157
|
self.iaas_type = iaas_type
|
|
3131
|
-
# The version.
|
|
3158
|
+
# The version number.
|
|
3132
3159
|
self.release_version = release_version
|
|
3133
3160
|
# The version of Scala.
|
|
3134
3161
|
self.scala_version = scala_version
|
|
@@ -3672,7 +3699,7 @@ class ListSessionClustersResponseBody(TeaModel):
|
|
|
3672
3699
|
self.next_token = next_token
|
|
3673
3700
|
# The request ID.
|
|
3674
3701
|
self.request_id = request_id
|
|
3675
|
-
# The SQL
|
|
3702
|
+
# The SQL computes.
|
|
3676
3703
|
self.session_clusters = session_clusters
|
|
3677
3704
|
# The total number of entries returned.
|
|
3678
3705
|
self.total_count = total_count
|
|
@@ -4723,6 +4750,236 @@ class StartJobRunResponse(TeaModel):
|
|
|
4723
4750
|
return self
|
|
4724
4751
|
|
|
4725
4752
|
|
|
4753
|
+
class StartSessionClusterRequest(TeaModel):
|
|
4754
|
+
def __init__(
|
|
4755
|
+
self,
|
|
4756
|
+
queue_name: str = None,
|
|
4757
|
+
session_cluster_id: str = None,
|
|
4758
|
+
region_id: str = None,
|
|
4759
|
+
):
|
|
4760
|
+
self.queue_name = queue_name
|
|
4761
|
+
self.session_cluster_id = session_cluster_id
|
|
4762
|
+
self.region_id = region_id
|
|
4763
|
+
|
|
4764
|
+
def validate(self):
|
|
4765
|
+
pass
|
|
4766
|
+
|
|
4767
|
+
def to_map(self):
|
|
4768
|
+
_map = super().to_map()
|
|
4769
|
+
if _map is not None:
|
|
4770
|
+
return _map
|
|
4771
|
+
|
|
4772
|
+
result = dict()
|
|
4773
|
+
if self.queue_name is not None:
|
|
4774
|
+
result['queueName'] = self.queue_name
|
|
4775
|
+
if self.session_cluster_id is not None:
|
|
4776
|
+
result['sessionClusterId'] = self.session_cluster_id
|
|
4777
|
+
if self.region_id is not None:
|
|
4778
|
+
result['regionId'] = self.region_id
|
|
4779
|
+
return result
|
|
4780
|
+
|
|
4781
|
+
def from_map(self, m: dict = None):
|
|
4782
|
+
m = m or dict()
|
|
4783
|
+
if m.get('queueName') is not None:
|
|
4784
|
+
self.queue_name = m.get('queueName')
|
|
4785
|
+
if m.get('sessionClusterId') is not None:
|
|
4786
|
+
self.session_cluster_id = m.get('sessionClusterId')
|
|
4787
|
+
if m.get('regionId') is not None:
|
|
4788
|
+
self.region_id = m.get('regionId')
|
|
4789
|
+
return self
|
|
4790
|
+
|
|
4791
|
+
|
|
4792
|
+
class StartSessionClusterResponseBody(TeaModel):
|
|
4793
|
+
def __init__(
|
|
4794
|
+
self,
|
|
4795
|
+
request_id: str = None,
|
|
4796
|
+
session_cluster_id: str = None,
|
|
4797
|
+
):
|
|
4798
|
+
# 请求ID。
|
|
4799
|
+
self.request_id = request_id
|
|
4800
|
+
# Workspace Id。
|
|
4801
|
+
self.session_cluster_id = session_cluster_id
|
|
4802
|
+
|
|
4803
|
+
def validate(self):
|
|
4804
|
+
pass
|
|
4805
|
+
|
|
4806
|
+
def to_map(self):
|
|
4807
|
+
_map = super().to_map()
|
|
4808
|
+
if _map is not None:
|
|
4809
|
+
return _map
|
|
4810
|
+
|
|
4811
|
+
result = dict()
|
|
4812
|
+
if self.request_id is not None:
|
|
4813
|
+
result['requestId'] = self.request_id
|
|
4814
|
+
if self.session_cluster_id is not None:
|
|
4815
|
+
result['sessionClusterId'] = self.session_cluster_id
|
|
4816
|
+
return result
|
|
4817
|
+
|
|
4818
|
+
def from_map(self, m: dict = None):
|
|
4819
|
+
m = m or dict()
|
|
4820
|
+
if m.get('requestId') is not None:
|
|
4821
|
+
self.request_id = m.get('requestId')
|
|
4822
|
+
if m.get('sessionClusterId') is not None:
|
|
4823
|
+
self.session_cluster_id = m.get('sessionClusterId')
|
|
4824
|
+
return self
|
|
4825
|
+
|
|
4826
|
+
|
|
4827
|
+
class StartSessionClusterResponse(TeaModel):
|
|
4828
|
+
def __init__(
|
|
4829
|
+
self,
|
|
4830
|
+
headers: Dict[str, str] = None,
|
|
4831
|
+
status_code: int = None,
|
|
4832
|
+
body: StartSessionClusterResponseBody = None,
|
|
4833
|
+
):
|
|
4834
|
+
self.headers = headers
|
|
4835
|
+
self.status_code = status_code
|
|
4836
|
+
self.body = body
|
|
4837
|
+
|
|
4838
|
+
def validate(self):
|
|
4839
|
+
if self.body:
|
|
4840
|
+
self.body.validate()
|
|
4841
|
+
|
|
4842
|
+
def to_map(self):
|
|
4843
|
+
_map = super().to_map()
|
|
4844
|
+
if _map is not None:
|
|
4845
|
+
return _map
|
|
4846
|
+
|
|
4847
|
+
result = dict()
|
|
4848
|
+
if self.headers is not None:
|
|
4849
|
+
result['headers'] = self.headers
|
|
4850
|
+
if self.status_code is not None:
|
|
4851
|
+
result['statusCode'] = self.status_code
|
|
4852
|
+
if self.body is not None:
|
|
4853
|
+
result['body'] = self.body.to_map()
|
|
4854
|
+
return result
|
|
4855
|
+
|
|
4856
|
+
def from_map(self, m: dict = None):
|
|
4857
|
+
m = m or dict()
|
|
4858
|
+
if m.get('headers') is not None:
|
|
4859
|
+
self.headers = m.get('headers')
|
|
4860
|
+
if m.get('statusCode') is not None:
|
|
4861
|
+
self.status_code = m.get('statusCode')
|
|
4862
|
+
if m.get('body') is not None:
|
|
4863
|
+
temp_model = StartSessionClusterResponseBody()
|
|
4864
|
+
self.body = temp_model.from_map(m['body'])
|
|
4865
|
+
return self
|
|
4866
|
+
|
|
4867
|
+
|
|
4868
|
+
class StopSessionClusterRequest(TeaModel):
|
|
4869
|
+
def __init__(
|
|
4870
|
+
self,
|
|
4871
|
+
queue_name: str = None,
|
|
4872
|
+
session_cluster_id: str = None,
|
|
4873
|
+
region_id: str = None,
|
|
4874
|
+
):
|
|
4875
|
+
self.queue_name = queue_name
|
|
4876
|
+
self.session_cluster_id = session_cluster_id
|
|
4877
|
+
self.region_id = region_id
|
|
4878
|
+
|
|
4879
|
+
def validate(self):
|
|
4880
|
+
pass
|
|
4881
|
+
|
|
4882
|
+
def to_map(self):
|
|
4883
|
+
_map = super().to_map()
|
|
4884
|
+
if _map is not None:
|
|
4885
|
+
return _map
|
|
4886
|
+
|
|
4887
|
+
result = dict()
|
|
4888
|
+
if self.queue_name is not None:
|
|
4889
|
+
result['queueName'] = self.queue_name
|
|
4890
|
+
if self.session_cluster_id is not None:
|
|
4891
|
+
result['sessionClusterId'] = self.session_cluster_id
|
|
4892
|
+
if self.region_id is not None:
|
|
4893
|
+
result['regionId'] = self.region_id
|
|
4894
|
+
return result
|
|
4895
|
+
|
|
4896
|
+
def from_map(self, m: dict = None):
|
|
4897
|
+
m = m or dict()
|
|
4898
|
+
if m.get('queueName') is not None:
|
|
4899
|
+
self.queue_name = m.get('queueName')
|
|
4900
|
+
if m.get('sessionClusterId') is not None:
|
|
4901
|
+
self.session_cluster_id = m.get('sessionClusterId')
|
|
4902
|
+
if m.get('regionId') is not None:
|
|
4903
|
+
self.region_id = m.get('regionId')
|
|
4904
|
+
return self
|
|
4905
|
+
|
|
4906
|
+
|
|
4907
|
+
class StopSessionClusterResponseBody(TeaModel):
|
|
4908
|
+
def __init__(
|
|
4909
|
+
self,
|
|
4910
|
+
request_id: str = None,
|
|
4911
|
+
session_cluster_id: str = None,
|
|
4912
|
+
):
|
|
4913
|
+
# 请求ID。
|
|
4914
|
+
self.request_id = request_id
|
|
4915
|
+
# Workspace Id。
|
|
4916
|
+
self.session_cluster_id = session_cluster_id
|
|
4917
|
+
|
|
4918
|
+
def validate(self):
|
|
4919
|
+
pass
|
|
4920
|
+
|
|
4921
|
+
def to_map(self):
|
|
4922
|
+
_map = super().to_map()
|
|
4923
|
+
if _map is not None:
|
|
4924
|
+
return _map
|
|
4925
|
+
|
|
4926
|
+
result = dict()
|
|
4927
|
+
if self.request_id is not None:
|
|
4928
|
+
result['requestId'] = self.request_id
|
|
4929
|
+
if self.session_cluster_id is not None:
|
|
4930
|
+
result['sessionClusterId'] = self.session_cluster_id
|
|
4931
|
+
return result
|
|
4932
|
+
|
|
4933
|
+
def from_map(self, m: dict = None):
|
|
4934
|
+
m = m or dict()
|
|
4935
|
+
if m.get('requestId') is not None:
|
|
4936
|
+
self.request_id = m.get('requestId')
|
|
4937
|
+
if m.get('sessionClusterId') is not None:
|
|
4938
|
+
self.session_cluster_id = m.get('sessionClusterId')
|
|
4939
|
+
return self
|
|
4940
|
+
|
|
4941
|
+
|
|
4942
|
+
class StopSessionClusterResponse(TeaModel):
|
|
4943
|
+
def __init__(
|
|
4944
|
+
self,
|
|
4945
|
+
headers: Dict[str, str] = None,
|
|
4946
|
+
status_code: int = None,
|
|
4947
|
+
body: StopSessionClusterResponseBody = None,
|
|
4948
|
+
):
|
|
4949
|
+
self.headers = headers
|
|
4950
|
+
self.status_code = status_code
|
|
4951
|
+
self.body = body
|
|
4952
|
+
|
|
4953
|
+
def validate(self):
|
|
4954
|
+
if self.body:
|
|
4955
|
+
self.body.validate()
|
|
4956
|
+
|
|
4957
|
+
def to_map(self):
|
|
4958
|
+
_map = super().to_map()
|
|
4959
|
+
if _map is not None:
|
|
4960
|
+
return _map
|
|
4961
|
+
|
|
4962
|
+
result = dict()
|
|
4963
|
+
if self.headers is not None:
|
|
4964
|
+
result['headers'] = self.headers
|
|
4965
|
+
if self.status_code is not None:
|
|
4966
|
+
result['statusCode'] = self.status_code
|
|
4967
|
+
if self.body is not None:
|
|
4968
|
+
result['body'] = self.body.to_map()
|
|
4969
|
+
return result
|
|
4970
|
+
|
|
4971
|
+
def from_map(self, m: dict = None):
|
|
4972
|
+
m = m or dict()
|
|
4973
|
+
if m.get('headers') is not None:
|
|
4974
|
+
self.headers = m.get('headers')
|
|
4975
|
+
if m.get('statusCode') is not None:
|
|
4976
|
+
self.status_code = m.get('statusCode')
|
|
4977
|
+
if m.get('body') is not None:
|
|
4978
|
+
temp_model = StopSessionClusterResponseBody()
|
|
4979
|
+
self.body = temp_model.from_map(m['body'])
|
|
4980
|
+
return self
|
|
4981
|
+
|
|
4982
|
+
|
|
4726
4983
|
class TerminateSqlStatementRequest(TeaModel):
|
|
4727
4984
|
def __init__(
|
|
4728
4985
|
self,
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: alibabacloud-emr-serverless-spark20230808
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.5.0
|
|
4
4
|
Summary: Alibaba Cloud emr-serverless-spark (20230808) SDK Library for Python
|
|
5
5
|
Home-page: https://github.com/aliyun/alibabacloud-python-sdk
|
|
6
6
|
Author: Alibaba Cloud SDK
|
|
@@ -29,7 +29,7 @@ English | [简体中文](README-CN.md)
|
|
|
29
29
|
|
|
30
30
|
## Requirements
|
|
31
31
|
|
|
32
|
-
- Python >= 3.
|
|
32
|
+
- Python >= 3.7
|
|
33
33
|
|
|
34
34
|
## Installation
|
|
35
35
|
|
|
@@ -24,7 +24,7 @@ from setuptools import setup, find_packages
|
|
|
24
24
|
"""
|
|
25
25
|
setup module for alibabacloud_emr-serverless-spark20230808.
|
|
26
26
|
|
|
27
|
-
Created on
|
|
27
|
+
Created on 17/10/2024
|
|
28
28
|
|
|
29
29
|
@author: Alibaba Cloud SDK
|
|
30
30
|
"""
|
|
@@ -38,7 +38,7 @@ URL = "https://github.com/aliyun/alibabacloud-python-sdk"
|
|
|
38
38
|
VERSION = __import__(PACKAGE).__version__
|
|
39
39
|
REQUIRES = [
|
|
40
40
|
"alibabacloud_tea_util>=0.3.13, <1.0.0",
|
|
41
|
-
"alibabacloud_tea_openapi>=0.3.
|
|
41
|
+
"alibabacloud_tea_openapi>=0.3.12, <1.0.0",
|
|
42
42
|
"alibabacloud_openapi_util>=0.2.1, <1.0.0",
|
|
43
43
|
"alibabacloud_endpoint_util>=0.0.3, <1.0.0"
|
|
44
44
|
]
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
__version__ = '1.4.2'
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|