alibabacloud-emr-serverless-spark20230808 1.4.3__tar.gz → 1.6.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of alibabacloud-emr-serverless-spark20230808 might be problematic. Click here for more details.
- {alibabacloud_emr-serverless-spark20230808-1.4.3 → alibabacloud_emr-serverless-spark20230808-1.6.0}/ChangeLog.md +22 -0
- {alibabacloud_emr-serverless-spark20230808-1.4.3 → alibabacloud_emr-serverless-spark20230808-1.6.0}/PKG-INFO +2 -2
- {alibabacloud_emr-serverless-spark20230808-1.4.3 → alibabacloud_emr-serverless-spark20230808-1.6.0}/README-CN.md +1 -1
- {alibabacloud_emr-serverless-spark20230808-1.4.3 → alibabacloud_emr-serverless-spark20230808-1.6.0}/README.md +1 -1
- alibabacloud_emr-serverless-spark20230808-1.6.0/alibabacloud_emr_serverless_spark20230808/__init__.py +1 -0
- {alibabacloud_emr-serverless-spark20230808-1.4.3 → alibabacloud_emr-serverless-spark20230808-1.6.0}/alibabacloud_emr_serverless_spark20230808/client.py +384 -20
- {alibabacloud_emr-serverless-spark20230808-1.4.3 → alibabacloud_emr-serverless-spark20230808-1.6.0}/alibabacloud_emr_serverless_spark20230808/models.py +475 -15
- {alibabacloud_emr-serverless-spark20230808-1.4.3 → alibabacloud_emr-serverless-spark20230808-1.6.0}/alibabacloud_emr_serverless_spark20230808.egg-info/PKG-INFO +2 -2
- alibabacloud_emr-serverless-spark20230808-1.6.0/alibabacloud_emr_serverless_spark20230808.egg-info/requires.txt +4 -0
- {alibabacloud_emr-serverless-spark20230808-1.4.3 → alibabacloud_emr-serverless-spark20230808-1.6.0}/setup.py +3 -3
- alibabacloud_emr-serverless-spark20230808-1.4.3/alibabacloud_emr_serverless_spark20230808/__init__.py +0 -1
- alibabacloud_emr-serverless-spark20230808-1.4.3/alibabacloud_emr_serverless_spark20230808.egg-info/requires.txt +0 -4
- {alibabacloud_emr-serverless-spark20230808-1.4.3 → alibabacloud_emr-serverless-spark20230808-1.6.0}/LICENSE +0 -0
- {alibabacloud_emr-serverless-spark20230808-1.4.3 → alibabacloud_emr-serverless-spark20230808-1.6.0}/MANIFEST.in +0 -0
- {alibabacloud_emr-serverless-spark20230808-1.4.3 → alibabacloud_emr-serverless-spark20230808-1.6.0}/alibabacloud_emr_serverless_spark20230808.egg-info/SOURCES.txt +0 -0
- {alibabacloud_emr-serverless-spark20230808-1.4.3 → alibabacloud_emr-serverless-spark20230808-1.6.0}/alibabacloud_emr_serverless_spark20230808.egg-info/dependency_links.txt +0 -0
- {alibabacloud_emr-serverless-spark20230808-1.4.3 → alibabacloud_emr-serverless-spark20230808-1.6.0}/alibabacloud_emr_serverless_spark20230808.egg-info/top_level.txt +0 -0
- {alibabacloud_emr-serverless-spark20230808-1.4.3 → alibabacloud_emr-serverless-spark20230808-1.6.0}/setup.cfg +0 -0
|
@@ -1,3 +1,25 @@
|
|
|
1
|
+
2024-10-17 Version: 1.5.0
|
|
2
|
+
- Support API StartSessionCluster.
|
|
3
|
+
- Support API StopSessionCluster.
|
|
4
|
+
- Update API ListJobRuns: update param workspaceId.
|
|
5
|
+
- Update API ListJobRuns: update param creator.
|
|
6
|
+
- Update API ListJobRuns: update param endTime.
|
|
7
|
+
- Update API ListJobRuns: update param jobRunDeploymentId.
|
|
8
|
+
- Update API ListJobRuns: update param jobRunId.
|
|
9
|
+
- Update API ListJobRuns: update param maxResults.
|
|
10
|
+
- Update API ListJobRuns: update param name.
|
|
11
|
+
- Update API ListJobRuns: update param nextToken.
|
|
12
|
+
- Update API ListJobRuns: update param resourceQueueId.
|
|
13
|
+
- Update API ListJobRuns: update param startTime.
|
|
14
|
+
- Update API ListJobRuns: update param states.
|
|
15
|
+
- Update API ListJobRuns: update param tags.
|
|
16
|
+
- Update API ListReleaseVersions: add param workspaceId.
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
2024-08-22 Version: 1.4.3
|
|
20
|
+
- Update API GetJobRun: update response param.
|
|
21
|
+
|
|
22
|
+
|
|
1
23
|
2024-08-20 Version: 1.4.2
|
|
2
24
|
- Update API ListJobRuns: update response param.
|
|
3
25
|
- Update API ListReleaseVersions: update response param.
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: alibabacloud_emr-serverless-spark20230808
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.6.0
|
|
4
4
|
Summary: Alibaba Cloud emr-serverless-spark (20230808) SDK Library for Python
|
|
5
5
|
Home-page: https://github.com/aliyun/alibabacloud-python-sdk
|
|
6
6
|
Author: Alibaba Cloud SDK
|
|
@@ -29,7 +29,7 @@ English | [简体中文](README-CN.md)
|
|
|
29
29
|
|
|
30
30
|
## Requirements
|
|
31
31
|
|
|
32
|
-
- Python >= 3.
|
|
32
|
+
- Python >= 3.7
|
|
33
33
|
|
|
34
34
|
## Installation
|
|
35
35
|
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
__version__ = '1.6.0'
|
|
@@ -277,7 +277,7 @@ class Client(OpenApiClient):
|
|
|
277
277
|
runtime: util_models.RuntimeOptions,
|
|
278
278
|
) -> emr_serverless_spark_20230808_models.CreateSqlStatementResponse:
|
|
279
279
|
"""
|
|
280
|
-
@summary
|
|
280
|
+
@summary Creates an SQL query task.
|
|
281
281
|
|
|
282
282
|
@param request: CreateSqlStatementRequest
|
|
283
283
|
@param headers: map
|
|
@@ -328,7 +328,7 @@ class Client(OpenApiClient):
|
|
|
328
328
|
runtime: util_models.RuntimeOptions,
|
|
329
329
|
) -> emr_serverless_spark_20230808_models.CreateSqlStatementResponse:
|
|
330
330
|
"""
|
|
331
|
-
@summary
|
|
331
|
+
@summary Creates an SQL query task.
|
|
332
332
|
|
|
333
333
|
@param request: CreateSqlStatementRequest
|
|
334
334
|
@param headers: map
|
|
@@ -377,7 +377,7 @@ class Client(OpenApiClient):
|
|
|
377
377
|
request: emr_serverless_spark_20230808_models.CreateSqlStatementRequest,
|
|
378
378
|
) -> emr_serverless_spark_20230808_models.CreateSqlStatementResponse:
|
|
379
379
|
"""
|
|
380
|
-
@summary
|
|
380
|
+
@summary Creates an SQL query task.
|
|
381
381
|
|
|
382
382
|
@param request: CreateSqlStatementRequest
|
|
383
383
|
@return: CreateSqlStatementResponse
|
|
@@ -392,7 +392,7 @@ class Client(OpenApiClient):
|
|
|
392
392
|
request: emr_serverless_spark_20230808_models.CreateSqlStatementRequest,
|
|
393
393
|
) -> emr_serverless_spark_20230808_models.CreateSqlStatementResponse:
|
|
394
394
|
"""
|
|
395
|
-
@summary
|
|
395
|
+
@summary Creates an SQL query task.
|
|
396
396
|
|
|
397
397
|
@param request: CreateSqlStatementRequest
|
|
398
398
|
@return: CreateSqlStatementResponse
|
|
@@ -522,7 +522,7 @@ class Client(OpenApiClient):
|
|
|
522
522
|
runtime: util_models.RuntimeOptions,
|
|
523
523
|
) -> emr_serverless_spark_20230808_models.GetSqlStatementResponse:
|
|
524
524
|
"""
|
|
525
|
-
@summary
|
|
525
|
+
@summary Queries the status of an SQL query task.
|
|
526
526
|
|
|
527
527
|
@param request: GetSqlStatementRequest
|
|
528
528
|
@param headers: map
|
|
@@ -562,7 +562,7 @@ class Client(OpenApiClient):
|
|
|
562
562
|
runtime: util_models.RuntimeOptions,
|
|
563
563
|
) -> emr_serverless_spark_20230808_models.GetSqlStatementResponse:
|
|
564
564
|
"""
|
|
565
|
-
@summary
|
|
565
|
+
@summary Queries the status of an SQL query task.
|
|
566
566
|
|
|
567
567
|
@param request: GetSqlStatementRequest
|
|
568
568
|
@param headers: map
|
|
@@ -600,7 +600,7 @@ class Client(OpenApiClient):
|
|
|
600
600
|
request: emr_serverless_spark_20230808_models.GetSqlStatementRequest,
|
|
601
601
|
) -> emr_serverless_spark_20230808_models.GetSqlStatementResponse:
|
|
602
602
|
"""
|
|
603
|
-
@summary
|
|
603
|
+
@summary Queries the status of an SQL query task.
|
|
604
604
|
|
|
605
605
|
@param request: GetSqlStatementRequest
|
|
606
606
|
@return: GetSqlStatementResponse
|
|
@@ -616,7 +616,7 @@ class Client(OpenApiClient):
|
|
|
616
616
|
request: emr_serverless_spark_20230808_models.GetSqlStatementRequest,
|
|
617
617
|
) -> emr_serverless_spark_20230808_models.GetSqlStatementResponse:
|
|
618
618
|
"""
|
|
619
|
-
@summary
|
|
619
|
+
@summary Queries the status of an SQL query task.
|
|
620
620
|
|
|
621
621
|
@param request: GetSqlStatementRequest
|
|
622
622
|
@return: GetSqlStatementResponse
|
|
@@ -913,6 +913,126 @@ class Client(OpenApiClient):
|
|
|
913
913
|
headers = {}
|
|
914
914
|
return await self.list_job_runs_with_options_async(workspace_id, request, headers, runtime)
|
|
915
915
|
|
|
916
|
+
def list_log_contents_with_options(
|
|
917
|
+
self,
|
|
918
|
+
workspace_id: str,
|
|
919
|
+
request: emr_serverless_spark_20230808_models.ListLogContentsRequest,
|
|
920
|
+
headers: Dict[str, str],
|
|
921
|
+
runtime: util_models.RuntimeOptions,
|
|
922
|
+
) -> emr_serverless_spark_20230808_models.ListLogContentsResponse:
|
|
923
|
+
"""
|
|
924
|
+
@summary 获取日志内容
|
|
925
|
+
|
|
926
|
+
@param request: ListLogContentsRequest
|
|
927
|
+
@param headers: map
|
|
928
|
+
@param runtime: runtime options for this request RuntimeOptions
|
|
929
|
+
@return: ListLogContentsResponse
|
|
930
|
+
"""
|
|
931
|
+
UtilClient.validate_model(request)
|
|
932
|
+
query = {}
|
|
933
|
+
if not UtilClient.is_unset(request.file_name):
|
|
934
|
+
query['fileName'] = request.file_name
|
|
935
|
+
if not UtilClient.is_unset(request.length):
|
|
936
|
+
query['length'] = request.length
|
|
937
|
+
if not UtilClient.is_unset(request.offset):
|
|
938
|
+
query['offset'] = request.offset
|
|
939
|
+
if not UtilClient.is_unset(request.region_id):
|
|
940
|
+
query['regionId'] = request.region_id
|
|
941
|
+
req = open_api_models.OpenApiRequest(
|
|
942
|
+
headers=headers,
|
|
943
|
+
query=OpenApiUtilClient.query(query)
|
|
944
|
+
)
|
|
945
|
+
params = open_api_models.Params(
|
|
946
|
+
action='ListLogContents',
|
|
947
|
+
version='2023-08-08',
|
|
948
|
+
protocol='HTTPS',
|
|
949
|
+
pathname=f'/api/v1/workspaces/{OpenApiUtilClient.get_encode_param(workspace_id)}/action/listLogContents',
|
|
950
|
+
method='GET',
|
|
951
|
+
auth_type='AK',
|
|
952
|
+
style='ROA',
|
|
953
|
+
req_body_type='json',
|
|
954
|
+
body_type='json'
|
|
955
|
+
)
|
|
956
|
+
return TeaCore.from_map(
|
|
957
|
+
emr_serverless_spark_20230808_models.ListLogContentsResponse(),
|
|
958
|
+
self.call_api(params, req, runtime)
|
|
959
|
+
)
|
|
960
|
+
|
|
961
|
+
async def list_log_contents_with_options_async(
|
|
962
|
+
self,
|
|
963
|
+
workspace_id: str,
|
|
964
|
+
request: emr_serverless_spark_20230808_models.ListLogContentsRequest,
|
|
965
|
+
headers: Dict[str, str],
|
|
966
|
+
runtime: util_models.RuntimeOptions,
|
|
967
|
+
) -> emr_serverless_spark_20230808_models.ListLogContentsResponse:
|
|
968
|
+
"""
|
|
969
|
+
@summary 获取日志内容
|
|
970
|
+
|
|
971
|
+
@param request: ListLogContentsRequest
|
|
972
|
+
@param headers: map
|
|
973
|
+
@param runtime: runtime options for this request RuntimeOptions
|
|
974
|
+
@return: ListLogContentsResponse
|
|
975
|
+
"""
|
|
976
|
+
UtilClient.validate_model(request)
|
|
977
|
+
query = {}
|
|
978
|
+
if not UtilClient.is_unset(request.file_name):
|
|
979
|
+
query['fileName'] = request.file_name
|
|
980
|
+
if not UtilClient.is_unset(request.length):
|
|
981
|
+
query['length'] = request.length
|
|
982
|
+
if not UtilClient.is_unset(request.offset):
|
|
983
|
+
query['offset'] = request.offset
|
|
984
|
+
if not UtilClient.is_unset(request.region_id):
|
|
985
|
+
query['regionId'] = request.region_id
|
|
986
|
+
req = open_api_models.OpenApiRequest(
|
|
987
|
+
headers=headers,
|
|
988
|
+
query=OpenApiUtilClient.query(query)
|
|
989
|
+
)
|
|
990
|
+
params = open_api_models.Params(
|
|
991
|
+
action='ListLogContents',
|
|
992
|
+
version='2023-08-08',
|
|
993
|
+
protocol='HTTPS',
|
|
994
|
+
pathname=f'/api/v1/workspaces/{OpenApiUtilClient.get_encode_param(workspace_id)}/action/listLogContents',
|
|
995
|
+
method='GET',
|
|
996
|
+
auth_type='AK',
|
|
997
|
+
style='ROA',
|
|
998
|
+
req_body_type='json',
|
|
999
|
+
body_type='json'
|
|
1000
|
+
)
|
|
1001
|
+
return TeaCore.from_map(
|
|
1002
|
+
emr_serverless_spark_20230808_models.ListLogContentsResponse(),
|
|
1003
|
+
await self.call_api_async(params, req, runtime)
|
|
1004
|
+
)
|
|
1005
|
+
|
|
1006
|
+
def list_log_contents(
|
|
1007
|
+
self,
|
|
1008
|
+
workspace_id: str,
|
|
1009
|
+
request: emr_serverless_spark_20230808_models.ListLogContentsRequest,
|
|
1010
|
+
) -> emr_serverless_spark_20230808_models.ListLogContentsResponse:
|
|
1011
|
+
"""
|
|
1012
|
+
@summary 获取日志内容
|
|
1013
|
+
|
|
1014
|
+
@param request: ListLogContentsRequest
|
|
1015
|
+
@return: ListLogContentsResponse
|
|
1016
|
+
"""
|
|
1017
|
+
runtime = util_models.RuntimeOptions()
|
|
1018
|
+
headers = {}
|
|
1019
|
+
return self.list_log_contents_with_options(workspace_id, request, headers, runtime)
|
|
1020
|
+
|
|
1021
|
+
async def list_log_contents_async(
|
|
1022
|
+
self,
|
|
1023
|
+
workspace_id: str,
|
|
1024
|
+
request: emr_serverless_spark_20230808_models.ListLogContentsRequest,
|
|
1025
|
+
) -> emr_serverless_spark_20230808_models.ListLogContentsResponse:
|
|
1026
|
+
"""
|
|
1027
|
+
@summary 获取日志内容
|
|
1028
|
+
|
|
1029
|
+
@param request: ListLogContentsRequest
|
|
1030
|
+
@return: ListLogContentsResponse
|
|
1031
|
+
"""
|
|
1032
|
+
runtime = util_models.RuntimeOptions()
|
|
1033
|
+
headers = {}
|
|
1034
|
+
return await self.list_log_contents_with_options_async(workspace_id, request, headers, runtime)
|
|
1035
|
+
|
|
916
1036
|
def list_release_versions_with_options(
|
|
917
1037
|
self,
|
|
918
1038
|
request: emr_serverless_spark_20230808_models.ListReleaseVersionsRequest,
|
|
@@ -920,7 +1040,7 @@ class Client(OpenApiClient):
|
|
|
920
1040
|
runtime: util_models.RuntimeOptions,
|
|
921
1041
|
) -> emr_serverless_spark_20230808_models.ListReleaseVersionsResponse:
|
|
922
1042
|
"""
|
|
923
|
-
@summary
|
|
1043
|
+
@summary Queries the list of published versions of E-MapReduce (EMR) Serverless Spark.
|
|
924
1044
|
|
|
925
1045
|
@param request: ListReleaseVersionsRequest
|
|
926
1046
|
@param headers: map
|
|
@@ -937,6 +1057,8 @@ class Client(OpenApiClient):
|
|
|
937
1057
|
query['releaseVersion'] = request.release_version
|
|
938
1058
|
if not UtilClient.is_unset(request.release_version_status):
|
|
939
1059
|
query['releaseVersionStatus'] = request.release_version_status
|
|
1060
|
+
if not UtilClient.is_unset(request.workspace_id):
|
|
1061
|
+
query['workspaceId'] = request.workspace_id
|
|
940
1062
|
req = open_api_models.OpenApiRequest(
|
|
941
1063
|
headers=headers,
|
|
942
1064
|
query=OpenApiUtilClient.query(query)
|
|
@@ -964,7 +1086,7 @@ class Client(OpenApiClient):
|
|
|
964
1086
|
runtime: util_models.RuntimeOptions,
|
|
965
1087
|
) -> emr_serverless_spark_20230808_models.ListReleaseVersionsResponse:
|
|
966
1088
|
"""
|
|
967
|
-
@summary
|
|
1089
|
+
@summary Queries the list of published versions of E-MapReduce (EMR) Serverless Spark.
|
|
968
1090
|
|
|
969
1091
|
@param request: ListReleaseVersionsRequest
|
|
970
1092
|
@param headers: map
|
|
@@ -981,6 +1103,8 @@ class Client(OpenApiClient):
|
|
|
981
1103
|
query['releaseVersion'] = request.release_version
|
|
982
1104
|
if not UtilClient.is_unset(request.release_version_status):
|
|
983
1105
|
query['releaseVersionStatus'] = request.release_version_status
|
|
1106
|
+
if not UtilClient.is_unset(request.workspace_id):
|
|
1107
|
+
query['workspaceId'] = request.workspace_id
|
|
984
1108
|
req = open_api_models.OpenApiRequest(
|
|
985
1109
|
headers=headers,
|
|
986
1110
|
query=OpenApiUtilClient.query(query)
|
|
@@ -1006,7 +1130,7 @@ class Client(OpenApiClient):
|
|
|
1006
1130
|
request: emr_serverless_spark_20230808_models.ListReleaseVersionsRequest,
|
|
1007
1131
|
) -> emr_serverless_spark_20230808_models.ListReleaseVersionsResponse:
|
|
1008
1132
|
"""
|
|
1009
|
-
@summary
|
|
1133
|
+
@summary Queries the list of published versions of E-MapReduce (EMR) Serverless Spark.
|
|
1010
1134
|
|
|
1011
1135
|
@param request: ListReleaseVersionsRequest
|
|
1012
1136
|
@return: ListReleaseVersionsResponse
|
|
@@ -1020,7 +1144,7 @@ class Client(OpenApiClient):
|
|
|
1020
1144
|
request: emr_serverless_spark_20230808_models.ListReleaseVersionsRequest,
|
|
1021
1145
|
) -> emr_serverless_spark_20230808_models.ListReleaseVersionsResponse:
|
|
1022
1146
|
"""
|
|
1023
|
-
@summary
|
|
1147
|
+
@summary Queries the list of published versions of E-MapReduce (EMR) Serverless Spark.
|
|
1024
1148
|
|
|
1025
1149
|
@param request: ListReleaseVersionsRequest
|
|
1026
1150
|
@return: ListReleaseVersionsResponse
|
|
@@ -1037,7 +1161,7 @@ class Client(OpenApiClient):
|
|
|
1037
1161
|
runtime: util_models.RuntimeOptions,
|
|
1038
1162
|
) -> emr_serverless_spark_20230808_models.ListSessionClustersResponse:
|
|
1039
1163
|
"""
|
|
1040
|
-
@summary
|
|
1164
|
+
@summary Queries a list of sessions.
|
|
1041
1165
|
|
|
1042
1166
|
@param request: ListSessionClustersRequest
|
|
1043
1167
|
@param headers: map
|
|
@@ -1086,7 +1210,7 @@ class Client(OpenApiClient):
|
|
|
1086
1210
|
runtime: util_models.RuntimeOptions,
|
|
1087
1211
|
) -> emr_serverless_spark_20230808_models.ListSessionClustersResponse:
|
|
1088
1212
|
"""
|
|
1089
|
-
@summary
|
|
1213
|
+
@summary Queries a list of sessions.
|
|
1090
1214
|
|
|
1091
1215
|
@param request: ListSessionClustersRequest
|
|
1092
1216
|
@param headers: map
|
|
@@ -1133,7 +1257,7 @@ class Client(OpenApiClient):
|
|
|
1133
1257
|
request: emr_serverless_spark_20230808_models.ListSessionClustersRequest,
|
|
1134
1258
|
) -> emr_serverless_spark_20230808_models.ListSessionClustersResponse:
|
|
1135
1259
|
"""
|
|
1136
|
-
@summary
|
|
1260
|
+
@summary Queries a list of sessions.
|
|
1137
1261
|
|
|
1138
1262
|
@param request: ListSessionClustersRequest
|
|
1139
1263
|
@return: ListSessionClustersResponse
|
|
@@ -1148,7 +1272,7 @@ class Client(OpenApiClient):
|
|
|
1148
1272
|
request: emr_serverless_spark_20230808_models.ListSessionClustersRequest,
|
|
1149
1273
|
) -> emr_serverless_spark_20230808_models.ListSessionClustersResponse:
|
|
1150
1274
|
"""
|
|
1151
|
-
@summary
|
|
1275
|
+
@summary Queries a list of sessions.
|
|
1152
1276
|
|
|
1153
1277
|
@param request: ListSessionClustersRequest
|
|
1154
1278
|
@return: ListSessionClustersResponse
|
|
@@ -1549,6 +1673,246 @@ class Client(OpenApiClient):
|
|
|
1549
1673
|
headers = {}
|
|
1550
1674
|
return await self.start_job_run_with_options_async(workspace_id, request, headers, runtime)
|
|
1551
1675
|
|
|
1676
|
+
def start_session_cluster_with_options(
|
|
1677
|
+
self,
|
|
1678
|
+
workspace_id: str,
|
|
1679
|
+
request: emr_serverless_spark_20230808_models.StartSessionClusterRequest,
|
|
1680
|
+
headers: Dict[str, str],
|
|
1681
|
+
runtime: util_models.RuntimeOptions,
|
|
1682
|
+
) -> emr_serverless_spark_20230808_models.StartSessionClusterResponse:
|
|
1683
|
+
"""
|
|
1684
|
+
@summary Starts a session.
|
|
1685
|
+
|
|
1686
|
+
@param request: StartSessionClusterRequest
|
|
1687
|
+
@param headers: map
|
|
1688
|
+
@param runtime: runtime options for this request RuntimeOptions
|
|
1689
|
+
@return: StartSessionClusterResponse
|
|
1690
|
+
"""
|
|
1691
|
+
UtilClient.validate_model(request)
|
|
1692
|
+
query = {}
|
|
1693
|
+
if not UtilClient.is_unset(request.region_id):
|
|
1694
|
+
query['regionId'] = request.region_id
|
|
1695
|
+
body = {}
|
|
1696
|
+
if not UtilClient.is_unset(request.queue_name):
|
|
1697
|
+
body['queueName'] = request.queue_name
|
|
1698
|
+
if not UtilClient.is_unset(request.session_cluster_id):
|
|
1699
|
+
body['sessionClusterId'] = request.session_cluster_id
|
|
1700
|
+
req = open_api_models.OpenApiRequest(
|
|
1701
|
+
headers=headers,
|
|
1702
|
+
query=OpenApiUtilClient.query(query),
|
|
1703
|
+
body=OpenApiUtilClient.parse_to_map(body)
|
|
1704
|
+
)
|
|
1705
|
+
params = open_api_models.Params(
|
|
1706
|
+
action='StartSessionCluster',
|
|
1707
|
+
version='2023-08-08',
|
|
1708
|
+
protocol='HTTPS',
|
|
1709
|
+
pathname=f'/api/v1/workspaces/{OpenApiUtilClient.get_encode_param(workspace_id)}/sessionClusters/action/startSessionCluster',
|
|
1710
|
+
method='POST',
|
|
1711
|
+
auth_type='AK',
|
|
1712
|
+
style='ROA',
|
|
1713
|
+
req_body_type='json',
|
|
1714
|
+
body_type='json'
|
|
1715
|
+
)
|
|
1716
|
+
return TeaCore.from_map(
|
|
1717
|
+
emr_serverless_spark_20230808_models.StartSessionClusterResponse(),
|
|
1718
|
+
self.call_api(params, req, runtime)
|
|
1719
|
+
)
|
|
1720
|
+
|
|
1721
|
+
async def start_session_cluster_with_options_async(
|
|
1722
|
+
self,
|
|
1723
|
+
workspace_id: str,
|
|
1724
|
+
request: emr_serverless_spark_20230808_models.StartSessionClusterRequest,
|
|
1725
|
+
headers: Dict[str, str],
|
|
1726
|
+
runtime: util_models.RuntimeOptions,
|
|
1727
|
+
) -> emr_serverless_spark_20230808_models.StartSessionClusterResponse:
|
|
1728
|
+
"""
|
|
1729
|
+
@summary Starts a session.
|
|
1730
|
+
|
|
1731
|
+
@param request: StartSessionClusterRequest
|
|
1732
|
+
@param headers: map
|
|
1733
|
+
@param runtime: runtime options for this request RuntimeOptions
|
|
1734
|
+
@return: StartSessionClusterResponse
|
|
1735
|
+
"""
|
|
1736
|
+
UtilClient.validate_model(request)
|
|
1737
|
+
query = {}
|
|
1738
|
+
if not UtilClient.is_unset(request.region_id):
|
|
1739
|
+
query['regionId'] = request.region_id
|
|
1740
|
+
body = {}
|
|
1741
|
+
if not UtilClient.is_unset(request.queue_name):
|
|
1742
|
+
body['queueName'] = request.queue_name
|
|
1743
|
+
if not UtilClient.is_unset(request.session_cluster_id):
|
|
1744
|
+
body['sessionClusterId'] = request.session_cluster_id
|
|
1745
|
+
req = open_api_models.OpenApiRequest(
|
|
1746
|
+
headers=headers,
|
|
1747
|
+
query=OpenApiUtilClient.query(query),
|
|
1748
|
+
body=OpenApiUtilClient.parse_to_map(body)
|
|
1749
|
+
)
|
|
1750
|
+
params = open_api_models.Params(
|
|
1751
|
+
action='StartSessionCluster',
|
|
1752
|
+
version='2023-08-08',
|
|
1753
|
+
protocol='HTTPS',
|
|
1754
|
+
pathname=f'/api/v1/workspaces/{OpenApiUtilClient.get_encode_param(workspace_id)}/sessionClusters/action/startSessionCluster',
|
|
1755
|
+
method='POST',
|
|
1756
|
+
auth_type='AK',
|
|
1757
|
+
style='ROA',
|
|
1758
|
+
req_body_type='json',
|
|
1759
|
+
body_type='json'
|
|
1760
|
+
)
|
|
1761
|
+
return TeaCore.from_map(
|
|
1762
|
+
emr_serverless_spark_20230808_models.StartSessionClusterResponse(),
|
|
1763
|
+
await self.call_api_async(params, req, runtime)
|
|
1764
|
+
)
|
|
1765
|
+
|
|
1766
|
+
def start_session_cluster(
|
|
1767
|
+
self,
|
|
1768
|
+
workspace_id: str,
|
|
1769
|
+
request: emr_serverless_spark_20230808_models.StartSessionClusterRequest,
|
|
1770
|
+
) -> emr_serverless_spark_20230808_models.StartSessionClusterResponse:
|
|
1771
|
+
"""
|
|
1772
|
+
@summary Starts a session.
|
|
1773
|
+
|
|
1774
|
+
@param request: StartSessionClusterRequest
|
|
1775
|
+
@return: StartSessionClusterResponse
|
|
1776
|
+
"""
|
|
1777
|
+
runtime = util_models.RuntimeOptions()
|
|
1778
|
+
headers = {}
|
|
1779
|
+
return self.start_session_cluster_with_options(workspace_id, request, headers, runtime)
|
|
1780
|
+
|
|
1781
|
+
async def start_session_cluster_async(
|
|
1782
|
+
self,
|
|
1783
|
+
workspace_id: str,
|
|
1784
|
+
request: emr_serverless_spark_20230808_models.StartSessionClusterRequest,
|
|
1785
|
+
) -> emr_serverless_spark_20230808_models.StartSessionClusterResponse:
|
|
1786
|
+
"""
|
|
1787
|
+
@summary Starts a session.
|
|
1788
|
+
|
|
1789
|
+
@param request: StartSessionClusterRequest
|
|
1790
|
+
@return: StartSessionClusterResponse
|
|
1791
|
+
"""
|
|
1792
|
+
runtime = util_models.RuntimeOptions()
|
|
1793
|
+
headers = {}
|
|
1794
|
+
return await self.start_session_cluster_with_options_async(workspace_id, request, headers, runtime)
|
|
1795
|
+
|
|
1796
|
+
def stop_session_cluster_with_options(
|
|
1797
|
+
self,
|
|
1798
|
+
workspace_id: str,
|
|
1799
|
+
request: emr_serverless_spark_20230808_models.StopSessionClusterRequest,
|
|
1800
|
+
headers: Dict[str, str],
|
|
1801
|
+
runtime: util_models.RuntimeOptions,
|
|
1802
|
+
) -> emr_serverless_spark_20230808_models.StopSessionClusterResponse:
|
|
1803
|
+
"""
|
|
1804
|
+
@summary Stops a session.
|
|
1805
|
+
|
|
1806
|
+
@param request: StopSessionClusterRequest
|
|
1807
|
+
@param headers: map
|
|
1808
|
+
@param runtime: runtime options for this request RuntimeOptions
|
|
1809
|
+
@return: StopSessionClusterResponse
|
|
1810
|
+
"""
|
|
1811
|
+
UtilClient.validate_model(request)
|
|
1812
|
+
query = {}
|
|
1813
|
+
if not UtilClient.is_unset(request.region_id):
|
|
1814
|
+
query['regionId'] = request.region_id
|
|
1815
|
+
body = {}
|
|
1816
|
+
if not UtilClient.is_unset(request.queue_name):
|
|
1817
|
+
body['queueName'] = request.queue_name
|
|
1818
|
+
if not UtilClient.is_unset(request.session_cluster_id):
|
|
1819
|
+
body['sessionClusterId'] = request.session_cluster_id
|
|
1820
|
+
req = open_api_models.OpenApiRequest(
|
|
1821
|
+
headers=headers,
|
|
1822
|
+
query=OpenApiUtilClient.query(query),
|
|
1823
|
+
body=OpenApiUtilClient.parse_to_map(body)
|
|
1824
|
+
)
|
|
1825
|
+
params = open_api_models.Params(
|
|
1826
|
+
action='StopSessionCluster',
|
|
1827
|
+
version='2023-08-08',
|
|
1828
|
+
protocol='HTTPS',
|
|
1829
|
+
pathname=f'/api/v1/workspaces/{OpenApiUtilClient.get_encode_param(workspace_id)}/sessionClusters/action/stopSessionCluster',
|
|
1830
|
+
method='POST',
|
|
1831
|
+
auth_type='AK',
|
|
1832
|
+
style='ROA',
|
|
1833
|
+
req_body_type='json',
|
|
1834
|
+
body_type='json'
|
|
1835
|
+
)
|
|
1836
|
+
return TeaCore.from_map(
|
|
1837
|
+
emr_serverless_spark_20230808_models.StopSessionClusterResponse(),
|
|
1838
|
+
self.call_api(params, req, runtime)
|
|
1839
|
+
)
|
|
1840
|
+
|
|
1841
|
+
async def stop_session_cluster_with_options_async(
|
|
1842
|
+
self,
|
|
1843
|
+
workspace_id: str,
|
|
1844
|
+
request: emr_serverless_spark_20230808_models.StopSessionClusterRequest,
|
|
1845
|
+
headers: Dict[str, str],
|
|
1846
|
+
runtime: util_models.RuntimeOptions,
|
|
1847
|
+
) -> emr_serverless_spark_20230808_models.StopSessionClusterResponse:
|
|
1848
|
+
"""
|
|
1849
|
+
@summary Stops a session.
|
|
1850
|
+
|
|
1851
|
+
@param request: StopSessionClusterRequest
|
|
1852
|
+
@param headers: map
|
|
1853
|
+
@param runtime: runtime options for this request RuntimeOptions
|
|
1854
|
+
@return: StopSessionClusterResponse
|
|
1855
|
+
"""
|
|
1856
|
+
UtilClient.validate_model(request)
|
|
1857
|
+
query = {}
|
|
1858
|
+
if not UtilClient.is_unset(request.region_id):
|
|
1859
|
+
query['regionId'] = request.region_id
|
|
1860
|
+
body = {}
|
|
1861
|
+
if not UtilClient.is_unset(request.queue_name):
|
|
1862
|
+
body['queueName'] = request.queue_name
|
|
1863
|
+
if not UtilClient.is_unset(request.session_cluster_id):
|
|
1864
|
+
body['sessionClusterId'] = request.session_cluster_id
|
|
1865
|
+
req = open_api_models.OpenApiRequest(
|
|
1866
|
+
headers=headers,
|
|
1867
|
+
query=OpenApiUtilClient.query(query),
|
|
1868
|
+
body=OpenApiUtilClient.parse_to_map(body)
|
|
1869
|
+
)
|
|
1870
|
+
params = open_api_models.Params(
|
|
1871
|
+
action='StopSessionCluster',
|
|
1872
|
+
version='2023-08-08',
|
|
1873
|
+
protocol='HTTPS',
|
|
1874
|
+
pathname=f'/api/v1/workspaces/{OpenApiUtilClient.get_encode_param(workspace_id)}/sessionClusters/action/stopSessionCluster',
|
|
1875
|
+
method='POST',
|
|
1876
|
+
auth_type='AK',
|
|
1877
|
+
style='ROA',
|
|
1878
|
+
req_body_type='json',
|
|
1879
|
+
body_type='json'
|
|
1880
|
+
)
|
|
1881
|
+
return TeaCore.from_map(
|
|
1882
|
+
emr_serverless_spark_20230808_models.StopSessionClusterResponse(),
|
|
1883
|
+
await self.call_api_async(params, req, runtime)
|
|
1884
|
+
)
|
|
1885
|
+
|
|
1886
|
+
def stop_session_cluster(
|
|
1887
|
+
self,
|
|
1888
|
+
workspace_id: str,
|
|
1889
|
+
request: emr_serverless_spark_20230808_models.StopSessionClusterRequest,
|
|
1890
|
+
) -> emr_serverless_spark_20230808_models.StopSessionClusterResponse:
|
|
1891
|
+
"""
|
|
1892
|
+
@summary Stops a session.
|
|
1893
|
+
|
|
1894
|
+
@param request: StopSessionClusterRequest
|
|
1895
|
+
@return: StopSessionClusterResponse
|
|
1896
|
+
"""
|
|
1897
|
+
runtime = util_models.RuntimeOptions()
|
|
1898
|
+
headers = {}
|
|
1899
|
+
return self.stop_session_cluster_with_options(workspace_id, request, headers, runtime)
|
|
1900
|
+
|
|
1901
|
+
async def stop_session_cluster_async(
|
|
1902
|
+
self,
|
|
1903
|
+
workspace_id: str,
|
|
1904
|
+
request: emr_serverless_spark_20230808_models.StopSessionClusterRequest,
|
|
1905
|
+
) -> emr_serverless_spark_20230808_models.StopSessionClusterResponse:
|
|
1906
|
+
"""
|
|
1907
|
+
@summary Stops a session.
|
|
1908
|
+
|
|
1909
|
+
@param request: StopSessionClusterRequest
|
|
1910
|
+
@return: StopSessionClusterResponse
|
|
1911
|
+
"""
|
|
1912
|
+
runtime = util_models.RuntimeOptions()
|
|
1913
|
+
headers = {}
|
|
1914
|
+
return await self.stop_session_cluster_with_options_async(workspace_id, request, headers, runtime)
|
|
1915
|
+
|
|
1552
1916
|
def terminate_sql_statement_with_options(
|
|
1553
1917
|
self,
|
|
1554
1918
|
workspace_id: str,
|
|
@@ -1558,7 +1922,7 @@ class Client(OpenApiClient):
|
|
|
1558
1922
|
runtime: util_models.RuntimeOptions,
|
|
1559
1923
|
) -> emr_serverless_spark_20230808_models.TerminateSqlStatementResponse:
|
|
1560
1924
|
"""
|
|
1561
|
-
@summary
|
|
1925
|
+
@summary Terminates an SQL query task.
|
|
1562
1926
|
|
|
1563
1927
|
@param request: TerminateSqlStatementRequest
|
|
1564
1928
|
@param headers: map
|
|
@@ -1598,7 +1962,7 @@ class Client(OpenApiClient):
|
|
|
1598
1962
|
runtime: util_models.RuntimeOptions,
|
|
1599
1963
|
) -> emr_serverless_spark_20230808_models.TerminateSqlStatementResponse:
|
|
1600
1964
|
"""
|
|
1601
|
-
@summary
|
|
1965
|
+
@summary Terminates an SQL query task.
|
|
1602
1966
|
|
|
1603
1967
|
@param request: TerminateSqlStatementRequest
|
|
1604
1968
|
@param headers: map
|
|
@@ -1636,7 +2000,7 @@ class Client(OpenApiClient):
|
|
|
1636
2000
|
request: emr_serverless_spark_20230808_models.TerminateSqlStatementRequest,
|
|
1637
2001
|
) -> emr_serverless_spark_20230808_models.TerminateSqlStatementResponse:
|
|
1638
2002
|
"""
|
|
1639
|
-
@summary
|
|
2003
|
+
@summary Terminates an SQL query task.
|
|
1640
2004
|
|
|
1641
2005
|
@param request: TerminateSqlStatementRequest
|
|
1642
2006
|
@return: TerminateSqlStatementResponse
|
|
@@ -1652,7 +2016,7 @@ class Client(OpenApiClient):
|
|
|
1652
2016
|
request: emr_serverless_spark_20230808_models.TerminateSqlStatementRequest,
|
|
1653
2017
|
) -> emr_serverless_spark_20230808_models.TerminateSqlStatementResponse:
|
|
1654
2018
|
"""
|
|
1655
|
-
@summary
|
|
2019
|
+
@summary Terminates an SQL query task.
|
|
1656
2020
|
|
|
1657
2021
|
@param request: TerminateSqlStatementRequest
|
|
1658
2022
|
@return: TerminateSqlStatementResponse
|
|
@@ -781,6 +781,7 @@ class Task(TeaModel):
|
|
|
781
781
|
spark_submit_clause: str = None,
|
|
782
782
|
spark_version: str = None,
|
|
783
783
|
tags: Dict[str, str] = None,
|
|
784
|
+
timeout: int = None,
|
|
784
785
|
type: str = None,
|
|
785
786
|
):
|
|
786
787
|
self.archives = archives
|
|
@@ -834,6 +835,7 @@ class Task(TeaModel):
|
|
|
834
835
|
# This parameter is required.
|
|
835
836
|
self.spark_version = spark_version
|
|
836
837
|
self.tags = tags
|
|
838
|
+
self.timeout = timeout
|
|
837
839
|
# This parameter is required.
|
|
838
840
|
self.type = type
|
|
839
841
|
|
|
@@ -925,6 +927,8 @@ class Task(TeaModel):
|
|
|
925
927
|
result['sparkVersion'] = self.spark_version
|
|
926
928
|
if self.tags is not None:
|
|
927
929
|
result['tags'] = self.tags
|
|
930
|
+
if self.timeout is not None:
|
|
931
|
+
result['timeout'] = self.timeout
|
|
928
932
|
if self.type is not None:
|
|
929
933
|
result['type'] = self.type
|
|
930
934
|
return result
|
|
@@ -1008,6 +1012,8 @@ class Task(TeaModel):
|
|
|
1008
1012
|
self.spark_version = m.get('sparkVersion')
|
|
1009
1013
|
if m.get('tags') is not None:
|
|
1010
1014
|
self.tags = m.get('tags')
|
|
1015
|
+
if m.get('timeout') is not None:
|
|
1016
|
+
self.timeout = m.get('timeout')
|
|
1011
1017
|
if m.get('type') is not None:
|
|
1012
1018
|
self.type = m.get('type')
|
|
1013
1019
|
return self
|
|
@@ -2504,7 +2510,7 @@ class ListJobRunsRequest(TeaModel):
|
|
|
2504
2510
|
states: List[str] = None,
|
|
2505
2511
|
tags: List[ListJobRunsRequestTags] = None,
|
|
2506
2512
|
):
|
|
2507
|
-
# The ID of the user who
|
|
2513
|
+
# The ID of the user who created the job.
|
|
2508
2514
|
self.creator = creator
|
|
2509
2515
|
# The range of end time.
|
|
2510
2516
|
self.end_time = end_time
|
|
@@ -2622,7 +2628,7 @@ class ListJobRunsShrinkRequest(TeaModel):
|
|
|
2622
2628
|
states_shrink: str = None,
|
|
2623
2629
|
tags_shrink: str = None,
|
|
2624
2630
|
):
|
|
2625
|
-
# The ID of the user who
|
|
2631
|
+
# The ID of the user who created the job.
|
|
2626
2632
|
self.creator = creator
|
|
2627
2633
|
# The range of end time.
|
|
2628
2634
|
self.end_time_shrink = end_time_shrink
|
|
@@ -3057,6 +3063,196 @@ class ListJobRunsResponse(TeaModel):
|
|
|
3057
3063
|
return self
|
|
3058
3064
|
|
|
3059
3065
|
|
|
3066
|
+
class ListLogContentsRequest(TeaModel):
|
|
3067
|
+
def __init__(
|
|
3068
|
+
self,
|
|
3069
|
+
file_name: str = None,
|
|
3070
|
+
length: int = None,
|
|
3071
|
+
offset: int = None,
|
|
3072
|
+
region_id: str = None,
|
|
3073
|
+
):
|
|
3074
|
+
self.file_name = file_name
|
|
3075
|
+
self.length = length
|
|
3076
|
+
self.offset = offset
|
|
3077
|
+
self.region_id = region_id
|
|
3078
|
+
|
|
3079
|
+
def validate(self):
|
|
3080
|
+
pass
|
|
3081
|
+
|
|
3082
|
+
def to_map(self):
|
|
3083
|
+
_map = super().to_map()
|
|
3084
|
+
if _map is not None:
|
|
3085
|
+
return _map
|
|
3086
|
+
|
|
3087
|
+
result = dict()
|
|
3088
|
+
if self.file_name is not None:
|
|
3089
|
+
result['fileName'] = self.file_name
|
|
3090
|
+
if self.length is not None:
|
|
3091
|
+
result['length'] = self.length
|
|
3092
|
+
if self.offset is not None:
|
|
3093
|
+
result['offset'] = self.offset
|
|
3094
|
+
if self.region_id is not None:
|
|
3095
|
+
result['regionId'] = self.region_id
|
|
3096
|
+
return result
|
|
3097
|
+
|
|
3098
|
+
def from_map(self, m: dict = None):
|
|
3099
|
+
m = m or dict()
|
|
3100
|
+
if m.get('fileName') is not None:
|
|
3101
|
+
self.file_name = m.get('fileName')
|
|
3102
|
+
if m.get('length') is not None:
|
|
3103
|
+
self.length = m.get('length')
|
|
3104
|
+
if m.get('offset') is not None:
|
|
3105
|
+
self.offset = m.get('offset')
|
|
3106
|
+
if m.get('regionId') is not None:
|
|
3107
|
+
self.region_id = m.get('regionId')
|
|
3108
|
+
return self
|
|
3109
|
+
|
|
3110
|
+
|
|
3111
|
+
class ListLogContentsResponseBodyListLogContentContents(TeaModel):
|
|
3112
|
+
def __init__(
|
|
3113
|
+
self,
|
|
3114
|
+
line_content: str = None,
|
|
3115
|
+
):
|
|
3116
|
+
self.line_content = line_content
|
|
3117
|
+
|
|
3118
|
+
def validate(self):
|
|
3119
|
+
pass
|
|
3120
|
+
|
|
3121
|
+
def to_map(self):
|
|
3122
|
+
_map = super().to_map()
|
|
3123
|
+
if _map is not None:
|
|
3124
|
+
return _map
|
|
3125
|
+
|
|
3126
|
+
result = dict()
|
|
3127
|
+
if self.line_content is not None:
|
|
3128
|
+
result['LineContent'] = self.line_content
|
|
3129
|
+
return result
|
|
3130
|
+
|
|
3131
|
+
def from_map(self, m: dict = None):
|
|
3132
|
+
m = m or dict()
|
|
3133
|
+
if m.get('LineContent') is not None:
|
|
3134
|
+
self.line_content = m.get('LineContent')
|
|
3135
|
+
return self
|
|
3136
|
+
|
|
3137
|
+
|
|
3138
|
+
class ListLogContentsResponseBodyListLogContent(TeaModel):
|
|
3139
|
+
def __init__(
|
|
3140
|
+
self,
|
|
3141
|
+
contents: List[ListLogContentsResponseBodyListLogContentContents] = None,
|
|
3142
|
+
total_length: int = None,
|
|
3143
|
+
):
|
|
3144
|
+
self.contents = contents
|
|
3145
|
+
self.total_length = total_length
|
|
3146
|
+
|
|
3147
|
+
def validate(self):
|
|
3148
|
+
if self.contents:
|
|
3149
|
+
for k in self.contents:
|
|
3150
|
+
if k:
|
|
3151
|
+
k.validate()
|
|
3152
|
+
|
|
3153
|
+
def to_map(self):
|
|
3154
|
+
_map = super().to_map()
|
|
3155
|
+
if _map is not None:
|
|
3156
|
+
return _map
|
|
3157
|
+
|
|
3158
|
+
result = dict()
|
|
3159
|
+
result['contents'] = []
|
|
3160
|
+
if self.contents is not None:
|
|
3161
|
+
for k in self.contents:
|
|
3162
|
+
result['contents'].append(k.to_map() if k else None)
|
|
3163
|
+
if self.total_length is not None:
|
|
3164
|
+
result['totalLength'] = self.total_length
|
|
3165
|
+
return result
|
|
3166
|
+
|
|
3167
|
+
def from_map(self, m: dict = None):
|
|
3168
|
+
m = m or dict()
|
|
3169
|
+
self.contents = []
|
|
3170
|
+
if m.get('contents') is not None:
|
|
3171
|
+
for k in m.get('contents'):
|
|
3172
|
+
temp_model = ListLogContentsResponseBodyListLogContentContents()
|
|
3173
|
+
self.contents.append(temp_model.from_map(k))
|
|
3174
|
+
if m.get('totalLength') is not None:
|
|
3175
|
+
self.total_length = m.get('totalLength')
|
|
3176
|
+
return self
|
|
3177
|
+
|
|
3178
|
+
|
|
3179
|
+
class ListLogContentsResponseBody(TeaModel):
|
|
3180
|
+
def __init__(
|
|
3181
|
+
self,
|
|
3182
|
+
list_log_content: ListLogContentsResponseBodyListLogContent = None,
|
|
3183
|
+
request_id: str = None,
|
|
3184
|
+
):
|
|
3185
|
+
self.list_log_content = list_log_content
|
|
3186
|
+
# 请求ID。
|
|
3187
|
+
self.request_id = request_id
|
|
3188
|
+
|
|
3189
|
+
def validate(self):
|
|
3190
|
+
if self.list_log_content:
|
|
3191
|
+
self.list_log_content.validate()
|
|
3192
|
+
|
|
3193
|
+
def to_map(self):
|
|
3194
|
+
_map = super().to_map()
|
|
3195
|
+
if _map is not None:
|
|
3196
|
+
return _map
|
|
3197
|
+
|
|
3198
|
+
result = dict()
|
|
3199
|
+
if self.list_log_content is not None:
|
|
3200
|
+
result['listLogContent'] = self.list_log_content.to_map()
|
|
3201
|
+
if self.request_id is not None:
|
|
3202
|
+
result['requestId'] = self.request_id
|
|
3203
|
+
return result
|
|
3204
|
+
|
|
3205
|
+
def from_map(self, m: dict = None):
|
|
3206
|
+
m = m or dict()
|
|
3207
|
+
if m.get('listLogContent') is not None:
|
|
3208
|
+
temp_model = ListLogContentsResponseBodyListLogContent()
|
|
3209
|
+
self.list_log_content = temp_model.from_map(m['listLogContent'])
|
|
3210
|
+
if m.get('requestId') is not None:
|
|
3211
|
+
self.request_id = m.get('requestId')
|
|
3212
|
+
return self
|
|
3213
|
+
|
|
3214
|
+
|
|
3215
|
+
class ListLogContentsResponse(TeaModel):
|
|
3216
|
+
def __init__(
|
|
3217
|
+
self,
|
|
3218
|
+
headers: Dict[str, str] = None,
|
|
3219
|
+
status_code: int = None,
|
|
3220
|
+
body: ListLogContentsResponseBody = None,
|
|
3221
|
+
):
|
|
3222
|
+
self.headers = headers
|
|
3223
|
+
self.status_code = status_code
|
|
3224
|
+
self.body = body
|
|
3225
|
+
|
|
3226
|
+
def validate(self):
|
|
3227
|
+
if self.body:
|
|
3228
|
+
self.body.validate()
|
|
3229
|
+
|
|
3230
|
+
def to_map(self):
|
|
3231
|
+
_map = super().to_map()
|
|
3232
|
+
if _map is not None:
|
|
3233
|
+
return _map
|
|
3234
|
+
|
|
3235
|
+
result = dict()
|
|
3236
|
+
if self.headers is not None:
|
|
3237
|
+
result['headers'] = self.headers
|
|
3238
|
+
if self.status_code is not None:
|
|
3239
|
+
result['statusCode'] = self.status_code
|
|
3240
|
+
if self.body is not None:
|
|
3241
|
+
result['body'] = self.body.to_map()
|
|
3242
|
+
return result
|
|
3243
|
+
|
|
3244
|
+
def from_map(self, m: dict = None):
|
|
3245
|
+
m = m or dict()
|
|
3246
|
+
if m.get('headers') is not None:
|
|
3247
|
+
self.headers = m.get('headers')
|
|
3248
|
+
if m.get('statusCode') is not None:
|
|
3249
|
+
self.status_code = m.get('statusCode')
|
|
3250
|
+
if m.get('body') is not None:
|
|
3251
|
+
temp_model = ListLogContentsResponseBody()
|
|
3252
|
+
self.body = temp_model.from_map(m['body'])
|
|
3253
|
+
return self
|
|
3254
|
+
|
|
3255
|
+
|
|
3060
3256
|
class ListReleaseVersionsRequest(TeaModel):
|
|
3061
3257
|
def __init__(
|
|
3062
3258
|
self,
|
|
@@ -3064,6 +3260,7 @@ class ListReleaseVersionsRequest(TeaModel):
|
|
|
3064
3260
|
release_type: str = None,
|
|
3065
3261
|
release_version: str = None,
|
|
3066
3262
|
release_version_status: str = None,
|
|
3263
|
+
workspace_id: str = None,
|
|
3067
3264
|
):
|
|
3068
3265
|
# The region ID.
|
|
3069
3266
|
self.region_id = region_id
|
|
@@ -3072,17 +3269,19 @@ class ListReleaseVersionsRequest(TeaModel):
|
|
|
3072
3269
|
# Valid values:
|
|
3073
3270
|
#
|
|
3074
3271
|
# * stable
|
|
3075
|
-
# *
|
|
3272
|
+
# * Beta
|
|
3076
3273
|
self.release_type = release_type
|
|
3077
|
-
# The version of Serverless Spark.
|
|
3274
|
+
# The version of EMR Serverless Spark.
|
|
3078
3275
|
self.release_version = release_version
|
|
3079
|
-
# The status of the version.
|
|
3276
|
+
# The status of the version.
|
|
3080
3277
|
#
|
|
3081
3278
|
# Valid values:
|
|
3082
3279
|
#
|
|
3083
3280
|
# * ONLINE
|
|
3084
3281
|
# * OFFLINE
|
|
3085
3282
|
self.release_version_status = release_version_status
|
|
3283
|
+
# The workspace ID.
|
|
3284
|
+
self.workspace_id = workspace_id
|
|
3086
3285
|
|
|
3087
3286
|
def validate(self):
|
|
3088
3287
|
pass
|
|
@@ -3101,6 +3300,8 @@ class ListReleaseVersionsRequest(TeaModel):
|
|
|
3101
3300
|
result['releaseVersion'] = self.release_version
|
|
3102
3301
|
if self.release_version_status is not None:
|
|
3103
3302
|
result['releaseVersionStatus'] = self.release_version_status
|
|
3303
|
+
if self.workspace_id is not None:
|
|
3304
|
+
result['workspaceId'] = self.workspace_id
|
|
3104
3305
|
return result
|
|
3105
3306
|
|
|
3106
3307
|
def from_map(self, m: dict = None):
|
|
@@ -3113,6 +3314,8 @@ class ListReleaseVersionsRequest(TeaModel):
|
|
|
3113
3314
|
self.release_version = m.get('releaseVersion')
|
|
3114
3315
|
if m.get('releaseVersionStatus') is not None:
|
|
3115
3316
|
self.release_version_status = m.get('releaseVersionStatus')
|
|
3317
|
+
if m.get('workspaceId') is not None:
|
|
3318
|
+
self.workspace_id = m.get('workspaceId')
|
|
3116
3319
|
return self
|
|
3117
3320
|
|
|
3118
3321
|
|
|
@@ -3134,13 +3337,15 @@ class ListReleaseVersionsResponseBodyReleaseVersions(TeaModel):
|
|
|
3134
3337
|
self.community_version = community_version
|
|
3135
3338
|
# The CPU architectures.
|
|
3136
3339
|
self.cpu_architectures = cpu_architectures
|
|
3340
|
+
# The version number.
|
|
3137
3341
|
self.display_release_version = display_release_version
|
|
3342
|
+
# Indicates whether the Fusion engine is used for acceleration.
|
|
3138
3343
|
self.fusion = fusion
|
|
3139
3344
|
# The creation time.
|
|
3140
3345
|
self.gmt_create = gmt_create
|
|
3141
3346
|
# The type of the Infrastructure as a Service (IaaS) layer.
|
|
3142
3347
|
self.iaas_type = iaas_type
|
|
3143
|
-
# The version.
|
|
3348
|
+
# The version number.
|
|
3144
3349
|
self.release_version = release_version
|
|
3145
3350
|
# The version of Scala.
|
|
3146
3351
|
self.scala_version = scala_version
|
|
@@ -3320,6 +3525,13 @@ class ListSessionClustersRequest(TeaModel):
|
|
|
3320
3525
|
region_id: str = None,
|
|
3321
3526
|
session_cluster_id: str = None,
|
|
3322
3527
|
):
|
|
3528
|
+
# The session type.
|
|
3529
|
+
#
|
|
3530
|
+
# Valid values:
|
|
3531
|
+
#
|
|
3532
|
+
# * NOTEBOOK
|
|
3533
|
+
# * THRIFT
|
|
3534
|
+
# * SQL
|
|
3323
3535
|
self.kind = kind
|
|
3324
3536
|
# The maximum number of entries to return.
|
|
3325
3537
|
self.max_results = max_results
|
|
@@ -3450,7 +3662,7 @@ class ListSessionClustersResponseBodySessionClustersAutoStopConfiguration(TeaMod
|
|
|
3450
3662
|
):
|
|
3451
3663
|
# Indicates whether automatic termination is enabled.
|
|
3452
3664
|
self.enable = enable
|
|
3453
|
-
# The idle timeout period. The
|
|
3665
|
+
# The idle timeout period. The session is automatically terminated when the idle timeout period is exceeded.
|
|
3454
3666
|
self.idle_timeout_minutes = idle_timeout_minutes
|
|
3455
3667
|
|
|
3456
3668
|
def validate(self):
|
|
@@ -3534,32 +3746,44 @@ class ListSessionClustersResponseBodySessionClusters(TeaModel):
|
|
|
3534
3746
|
web_ui: str = None,
|
|
3535
3747
|
workspace_id: str = None,
|
|
3536
3748
|
):
|
|
3537
|
-
# The
|
|
3749
|
+
# The session configurations, which are equivalent to the configurations of the Spark job.
|
|
3538
3750
|
self.application_configs = application_configs
|
|
3539
3751
|
# The automatic startup configurations.
|
|
3540
3752
|
self.auto_start_configuration = auto_start_configuration
|
|
3541
|
-
# The automatic termination
|
|
3753
|
+
# The configurations of automatic termination.
|
|
3542
3754
|
self.auto_stop_configuration = auto_stop_configuration
|
|
3755
|
+
# The version of the Spark engine.
|
|
3543
3756
|
self.display_release_version = display_release_version
|
|
3544
3757
|
self.domain = domain
|
|
3758
|
+
# The ID of the job that is associated with the session.
|
|
3545
3759
|
self.draft_id = draft_id
|
|
3760
|
+
# Indicates whether the Fusion engine is used for acceleration.
|
|
3546
3761
|
self.fusion = fusion
|
|
3762
|
+
# The session type.
|
|
3763
|
+
#
|
|
3764
|
+
# Valid values:
|
|
3765
|
+
#
|
|
3766
|
+
# * NOTEBOOK
|
|
3767
|
+
# * THRIFT
|
|
3768
|
+
# * SQL
|
|
3547
3769
|
self.kind = kind
|
|
3548
|
-
# The name of the
|
|
3770
|
+
# The name of the session.
|
|
3549
3771
|
self.name = name
|
|
3550
|
-
# The name of the queue
|
|
3772
|
+
# The name of the queue that is used to run the session.
|
|
3551
3773
|
self.queue_name = queue_name
|
|
3774
|
+
# The version of EMR Serverless Spark.
|
|
3552
3775
|
self.release_version = release_version
|
|
3553
|
-
# The
|
|
3776
|
+
# The session ID.
|
|
3554
3777
|
self.session_cluster_id = session_cluster_id
|
|
3555
|
-
# The status of the
|
|
3778
|
+
# The status of the session.
|
|
3556
3779
|
self.state = state
|
|
3557
|
-
# The details of the
|
|
3780
|
+
# The details of the most recent status change of the session.
|
|
3558
3781
|
self.state_change_reason = state_change_reason
|
|
3559
3782
|
# The user ID.
|
|
3560
3783
|
self.user_id = user_id
|
|
3561
3784
|
# The name of the user.
|
|
3562
3785
|
self.user_name = user_name
|
|
3786
|
+
# The Spark UI of the session.
|
|
3563
3787
|
self.web_ui = web_ui
|
|
3564
3788
|
# The workspace ID.
|
|
3565
3789
|
self.workspace_id = workspace_id
|
|
@@ -3684,7 +3908,7 @@ class ListSessionClustersResponseBody(TeaModel):
|
|
|
3684
3908
|
self.next_token = next_token
|
|
3685
3909
|
# The request ID.
|
|
3686
3910
|
self.request_id = request_id
|
|
3687
|
-
# The
|
|
3911
|
+
# The list of sessions.
|
|
3688
3912
|
self.session_clusters = session_clusters
|
|
3689
3913
|
# The total number of entries returned.
|
|
3690
3914
|
self.total_count = total_count
|
|
@@ -4735,6 +4959,242 @@ class StartJobRunResponse(TeaModel):
|
|
|
4735
4959
|
return self
|
|
4736
4960
|
|
|
4737
4961
|
|
|
4962
|
+
class StartSessionClusterRequest(TeaModel):
|
|
4963
|
+
def __init__(
|
|
4964
|
+
self,
|
|
4965
|
+
queue_name: str = None,
|
|
4966
|
+
session_cluster_id: str = None,
|
|
4967
|
+
region_id: str = None,
|
|
4968
|
+
):
|
|
4969
|
+
# The queue name.
|
|
4970
|
+
self.queue_name = queue_name
|
|
4971
|
+
# The session ID.
|
|
4972
|
+
self.session_cluster_id = session_cluster_id
|
|
4973
|
+
# The region ID.
|
|
4974
|
+
self.region_id = region_id
|
|
4975
|
+
|
|
4976
|
+
def validate(self):
|
|
4977
|
+
pass
|
|
4978
|
+
|
|
4979
|
+
def to_map(self):
|
|
4980
|
+
_map = super().to_map()
|
|
4981
|
+
if _map is not None:
|
|
4982
|
+
return _map
|
|
4983
|
+
|
|
4984
|
+
result = dict()
|
|
4985
|
+
if self.queue_name is not None:
|
|
4986
|
+
result['queueName'] = self.queue_name
|
|
4987
|
+
if self.session_cluster_id is not None:
|
|
4988
|
+
result['sessionClusterId'] = self.session_cluster_id
|
|
4989
|
+
if self.region_id is not None:
|
|
4990
|
+
result['regionId'] = self.region_id
|
|
4991
|
+
return result
|
|
4992
|
+
|
|
4993
|
+
def from_map(self, m: dict = None):
|
|
4994
|
+
m = m or dict()
|
|
4995
|
+
if m.get('queueName') is not None:
|
|
4996
|
+
self.queue_name = m.get('queueName')
|
|
4997
|
+
if m.get('sessionClusterId') is not None:
|
|
4998
|
+
self.session_cluster_id = m.get('sessionClusterId')
|
|
4999
|
+
if m.get('regionId') is not None:
|
|
5000
|
+
self.region_id = m.get('regionId')
|
|
5001
|
+
return self
|
|
5002
|
+
|
|
5003
|
+
|
|
5004
|
+
class StartSessionClusterResponseBody(TeaModel):
|
|
5005
|
+
def __init__(
|
|
5006
|
+
self,
|
|
5007
|
+
request_id: str = None,
|
|
5008
|
+
session_cluster_id: str = None,
|
|
5009
|
+
):
|
|
5010
|
+
# The request ID.
|
|
5011
|
+
self.request_id = request_id
|
|
5012
|
+
# The workspace ID.
|
|
5013
|
+
self.session_cluster_id = session_cluster_id
|
|
5014
|
+
|
|
5015
|
+
def validate(self):
|
|
5016
|
+
pass
|
|
5017
|
+
|
|
5018
|
+
def to_map(self):
|
|
5019
|
+
_map = super().to_map()
|
|
5020
|
+
if _map is not None:
|
|
5021
|
+
return _map
|
|
5022
|
+
|
|
5023
|
+
result = dict()
|
|
5024
|
+
if self.request_id is not None:
|
|
5025
|
+
result['requestId'] = self.request_id
|
|
5026
|
+
if self.session_cluster_id is not None:
|
|
5027
|
+
result['sessionClusterId'] = self.session_cluster_id
|
|
5028
|
+
return result
|
|
5029
|
+
|
|
5030
|
+
def from_map(self, m: dict = None):
|
|
5031
|
+
m = m or dict()
|
|
5032
|
+
if m.get('requestId') is not None:
|
|
5033
|
+
self.request_id = m.get('requestId')
|
|
5034
|
+
if m.get('sessionClusterId') is not None:
|
|
5035
|
+
self.session_cluster_id = m.get('sessionClusterId')
|
|
5036
|
+
return self
|
|
5037
|
+
|
|
5038
|
+
|
|
5039
|
+
class StartSessionClusterResponse(TeaModel):
|
|
5040
|
+
def __init__(
|
|
5041
|
+
self,
|
|
5042
|
+
headers: Dict[str, str] = None,
|
|
5043
|
+
status_code: int = None,
|
|
5044
|
+
body: StartSessionClusterResponseBody = None,
|
|
5045
|
+
):
|
|
5046
|
+
self.headers = headers
|
|
5047
|
+
self.status_code = status_code
|
|
5048
|
+
self.body = body
|
|
5049
|
+
|
|
5050
|
+
def validate(self):
|
|
5051
|
+
if self.body:
|
|
5052
|
+
self.body.validate()
|
|
5053
|
+
|
|
5054
|
+
def to_map(self):
|
|
5055
|
+
_map = super().to_map()
|
|
5056
|
+
if _map is not None:
|
|
5057
|
+
return _map
|
|
5058
|
+
|
|
5059
|
+
result = dict()
|
|
5060
|
+
if self.headers is not None:
|
|
5061
|
+
result['headers'] = self.headers
|
|
5062
|
+
if self.status_code is not None:
|
|
5063
|
+
result['statusCode'] = self.status_code
|
|
5064
|
+
if self.body is not None:
|
|
5065
|
+
result['body'] = self.body.to_map()
|
|
5066
|
+
return result
|
|
5067
|
+
|
|
5068
|
+
def from_map(self, m: dict = None):
|
|
5069
|
+
m = m or dict()
|
|
5070
|
+
if m.get('headers') is not None:
|
|
5071
|
+
self.headers = m.get('headers')
|
|
5072
|
+
if m.get('statusCode') is not None:
|
|
5073
|
+
self.status_code = m.get('statusCode')
|
|
5074
|
+
if m.get('body') is not None:
|
|
5075
|
+
temp_model = StartSessionClusterResponseBody()
|
|
5076
|
+
self.body = temp_model.from_map(m['body'])
|
|
5077
|
+
return self
|
|
5078
|
+
|
|
5079
|
+
|
|
5080
|
+
class StopSessionClusterRequest(TeaModel):
|
|
5081
|
+
def __init__(
|
|
5082
|
+
self,
|
|
5083
|
+
queue_name: str = None,
|
|
5084
|
+
session_cluster_id: str = None,
|
|
5085
|
+
region_id: str = None,
|
|
5086
|
+
):
|
|
5087
|
+
# The queue name.
|
|
5088
|
+
self.queue_name = queue_name
|
|
5089
|
+
# The session ID.
|
|
5090
|
+
self.session_cluster_id = session_cluster_id
|
|
5091
|
+
# The region ID.
|
|
5092
|
+
self.region_id = region_id
|
|
5093
|
+
|
|
5094
|
+
def validate(self):
|
|
5095
|
+
pass
|
|
5096
|
+
|
|
5097
|
+
def to_map(self):
|
|
5098
|
+
_map = super().to_map()
|
|
5099
|
+
if _map is not None:
|
|
5100
|
+
return _map
|
|
5101
|
+
|
|
5102
|
+
result = dict()
|
|
5103
|
+
if self.queue_name is not None:
|
|
5104
|
+
result['queueName'] = self.queue_name
|
|
5105
|
+
if self.session_cluster_id is not None:
|
|
5106
|
+
result['sessionClusterId'] = self.session_cluster_id
|
|
5107
|
+
if self.region_id is not None:
|
|
5108
|
+
result['regionId'] = self.region_id
|
|
5109
|
+
return result
|
|
5110
|
+
|
|
5111
|
+
def from_map(self, m: dict = None):
|
|
5112
|
+
m = m or dict()
|
|
5113
|
+
if m.get('queueName') is not None:
|
|
5114
|
+
self.queue_name = m.get('queueName')
|
|
5115
|
+
if m.get('sessionClusterId') is not None:
|
|
5116
|
+
self.session_cluster_id = m.get('sessionClusterId')
|
|
5117
|
+
if m.get('regionId') is not None:
|
|
5118
|
+
self.region_id = m.get('regionId')
|
|
5119
|
+
return self
|
|
5120
|
+
|
|
5121
|
+
|
|
5122
|
+
class StopSessionClusterResponseBody(TeaModel):
|
|
5123
|
+
def __init__(
|
|
5124
|
+
self,
|
|
5125
|
+
request_id: str = None,
|
|
5126
|
+
session_cluster_id: str = None,
|
|
5127
|
+
):
|
|
5128
|
+
# The request ID.
|
|
5129
|
+
self.request_id = request_id
|
|
5130
|
+
# The workspace ID.
|
|
5131
|
+
self.session_cluster_id = session_cluster_id
|
|
5132
|
+
|
|
5133
|
+
def validate(self):
|
|
5134
|
+
pass
|
|
5135
|
+
|
|
5136
|
+
def to_map(self):
|
|
5137
|
+
_map = super().to_map()
|
|
5138
|
+
if _map is not None:
|
|
5139
|
+
return _map
|
|
5140
|
+
|
|
5141
|
+
result = dict()
|
|
5142
|
+
if self.request_id is not None:
|
|
5143
|
+
result['requestId'] = self.request_id
|
|
5144
|
+
if self.session_cluster_id is not None:
|
|
5145
|
+
result['sessionClusterId'] = self.session_cluster_id
|
|
5146
|
+
return result
|
|
5147
|
+
|
|
5148
|
+
def from_map(self, m: dict = None):
|
|
5149
|
+
m = m or dict()
|
|
5150
|
+
if m.get('requestId') is not None:
|
|
5151
|
+
self.request_id = m.get('requestId')
|
|
5152
|
+
if m.get('sessionClusterId') is not None:
|
|
5153
|
+
self.session_cluster_id = m.get('sessionClusterId')
|
|
5154
|
+
return self
|
|
5155
|
+
|
|
5156
|
+
|
|
5157
|
+
class StopSessionClusterResponse(TeaModel):
|
|
5158
|
+
def __init__(
|
|
5159
|
+
self,
|
|
5160
|
+
headers: Dict[str, str] = None,
|
|
5161
|
+
status_code: int = None,
|
|
5162
|
+
body: StopSessionClusterResponseBody = None,
|
|
5163
|
+
):
|
|
5164
|
+
self.headers = headers
|
|
5165
|
+
self.status_code = status_code
|
|
5166
|
+
self.body = body
|
|
5167
|
+
|
|
5168
|
+
def validate(self):
|
|
5169
|
+
if self.body:
|
|
5170
|
+
self.body.validate()
|
|
5171
|
+
|
|
5172
|
+
def to_map(self):
|
|
5173
|
+
_map = super().to_map()
|
|
5174
|
+
if _map is not None:
|
|
5175
|
+
return _map
|
|
5176
|
+
|
|
5177
|
+
result = dict()
|
|
5178
|
+
if self.headers is not None:
|
|
5179
|
+
result['headers'] = self.headers
|
|
5180
|
+
if self.status_code is not None:
|
|
5181
|
+
result['statusCode'] = self.status_code
|
|
5182
|
+
if self.body is not None:
|
|
5183
|
+
result['body'] = self.body.to_map()
|
|
5184
|
+
return result
|
|
5185
|
+
|
|
5186
|
+
def from_map(self, m: dict = None):
|
|
5187
|
+
m = m or dict()
|
|
5188
|
+
if m.get('headers') is not None:
|
|
5189
|
+
self.headers = m.get('headers')
|
|
5190
|
+
if m.get('statusCode') is not None:
|
|
5191
|
+
self.status_code = m.get('statusCode')
|
|
5192
|
+
if m.get('body') is not None:
|
|
5193
|
+
temp_model = StopSessionClusterResponseBody()
|
|
5194
|
+
self.body = temp_model.from_map(m['body'])
|
|
5195
|
+
return self
|
|
5196
|
+
|
|
5197
|
+
|
|
4738
5198
|
class TerminateSqlStatementRequest(TeaModel):
|
|
4739
5199
|
def __init__(
|
|
4740
5200
|
self,
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: alibabacloud-emr-serverless-spark20230808
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.6.0
|
|
4
4
|
Summary: Alibaba Cloud emr-serverless-spark (20230808) SDK Library for Python
|
|
5
5
|
Home-page: https://github.com/aliyun/alibabacloud-python-sdk
|
|
6
6
|
Author: Alibaba Cloud SDK
|
|
@@ -29,7 +29,7 @@ English | [简体中文](README-CN.md)
|
|
|
29
29
|
|
|
30
30
|
## Requirements
|
|
31
31
|
|
|
32
|
-
- Python >= 3.
|
|
32
|
+
- Python >= 3.7
|
|
33
33
|
|
|
34
34
|
## Installation
|
|
35
35
|
|
|
@@ -24,7 +24,7 @@ from setuptools import setup, find_packages
|
|
|
24
24
|
"""
|
|
25
25
|
setup module for alibabacloud_emr-serverless-spark20230808.
|
|
26
26
|
|
|
27
|
-
Created on
|
|
27
|
+
Created on 06/11/2024
|
|
28
28
|
|
|
29
29
|
@author: Alibaba Cloud SDK
|
|
30
30
|
"""
|
|
@@ -38,8 +38,8 @@ URL = "https://github.com/aliyun/alibabacloud-python-sdk"
|
|
|
38
38
|
VERSION = __import__(PACKAGE).__version__
|
|
39
39
|
REQUIRES = [
|
|
40
40
|
"alibabacloud_tea_util>=0.3.13, <1.0.0",
|
|
41
|
-
"alibabacloud_tea_openapi>=0.3.
|
|
42
|
-
"alibabacloud_openapi_util>=0.2.
|
|
41
|
+
"alibabacloud_tea_openapi>=0.3.12, <1.0.0",
|
|
42
|
+
"alibabacloud_openapi_util>=0.2.2, <1.0.0",
|
|
43
43
|
"alibabacloud_endpoint_util>=0.0.3, <1.0.0"
|
|
44
44
|
]
|
|
45
45
|
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
__version__ = '1.4.3'
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|