alibabacloud-emr-serverless-spark20230808 1.10.2__tar.gz → 1.11.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of alibabacloud-emr-serverless-spark20230808 might be problematic. Click here for more details.
- {alibabacloud_emr-serverless-spark20230808-1.10.2 → alibabacloud_emr-serverless-spark20230808-1.11.0}/ChangeLog.md +11 -0
- {alibabacloud_emr-serverless-spark20230808-1.10.2 → alibabacloud_emr-serverless-spark20230808-1.11.0}/PKG-INFO +1 -1
- alibabacloud_emr-serverless-spark20230808-1.11.0/alibabacloud_emr_serverless_spark20230808/__init__.py +1 -0
- {alibabacloud_emr-serverless-spark20230808-1.10.2 → alibabacloud_emr-serverless-spark20230808-1.11.0}/alibabacloud_emr_serverless_spark20230808/client.py +226 -0
- {alibabacloud_emr-serverless-spark20230808-1.10.2 → alibabacloud_emr-serverless-spark20230808-1.11.0}/alibabacloud_emr_serverless_spark20230808/models.py +489 -6
- {alibabacloud_emr-serverless-spark20230808-1.10.2 → alibabacloud_emr-serverless-spark20230808-1.11.0}/alibabacloud_emr_serverless_spark20230808.egg-info/PKG-INFO +1 -1
- {alibabacloud_emr-serverless-spark20230808-1.10.2 → alibabacloud_emr-serverless-spark20230808-1.11.0}/alibabacloud_emr_serverless_spark20230808.egg-info/requires.txt +1 -1
- {alibabacloud_emr-serverless-spark20230808-1.10.2 → alibabacloud_emr-serverless-spark20230808-1.11.0}/setup.py +2 -2
- alibabacloud_emr-serverless-spark20230808-1.10.2/alibabacloud_emr_serverless_spark20230808/__init__.py +0 -1
- {alibabacloud_emr-serverless-spark20230808-1.10.2 → alibabacloud_emr-serverless-spark20230808-1.11.0}/LICENSE +0 -0
- {alibabacloud_emr-serverless-spark20230808-1.10.2 → alibabacloud_emr-serverless-spark20230808-1.11.0}/MANIFEST.in +0 -0
- {alibabacloud_emr-serverless-spark20230808-1.10.2 → alibabacloud_emr-serverless-spark20230808-1.11.0}/README-CN.md +0 -0
- {alibabacloud_emr-serverless-spark20230808-1.10.2 → alibabacloud_emr-serverless-spark20230808-1.11.0}/README.md +0 -0
- {alibabacloud_emr-serverless-spark20230808-1.10.2 → alibabacloud_emr-serverless-spark20230808-1.11.0}/alibabacloud_emr_serverless_spark20230808.egg-info/SOURCES.txt +0 -0
- {alibabacloud_emr-serverless-spark20230808-1.10.2 → alibabacloud_emr-serverless-spark20230808-1.11.0}/alibabacloud_emr_serverless_spark20230808.egg-info/dependency_links.txt +0 -0
- {alibabacloud_emr-serverless-spark20230808-1.10.2 → alibabacloud_emr-serverless-spark20230808-1.11.0}/alibabacloud_emr_serverless_spark20230808.egg-info/top_level.txt +0 -0
- {alibabacloud_emr-serverless-spark20230808-1.10.2 → alibabacloud_emr-serverless-spark20230808-1.11.0}/setup.cfg +0 -0
|
@@ -1,3 +1,14 @@
|
|
|
1
|
+
2025-05-30 Version: 1.10.3
|
|
2
|
+
- Generated python 2023-08-08 for emr-serverless-spark.
|
|
3
|
+
|
|
4
|
+
2025-05-19 Version: 1.10.2
|
|
5
|
+
- Update API CreateSessionCluster: add request parameters body.publicEndpointEnabled.
|
|
6
|
+
- Update API GetSessionCluster: add response parameters Body.sessionCluster.publicEndpointEnabled.
|
|
7
|
+
- Update API GetTemplate: add request parameters templateBizId.
|
|
8
|
+
- Update API ListSessionClusters: add response parameters Body.sessionClusters.$.publicEndpointEnabled.
|
|
9
|
+
- Update API ListWorkspaces: add response parameters Body.workspaces.$.prePaidQuota.orderId.
|
|
10
|
+
|
|
11
|
+
|
|
1
12
|
2025-05-16 Version: 1.10.1
|
|
2
13
|
- Generated python 2023-08-08 for emr-serverless-spark.
|
|
3
14
|
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: alibabacloud_emr-serverless-spark20230808
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.11.0
|
|
4
4
|
Summary: Alibaba Cloud emr-serverless-spark (20230808) SDK Library for Python
|
|
5
5
|
Home-page: https://github.com/aliyun/alibabacloud-python-sdk
|
|
6
6
|
Author: Alibaba Cloud SDK
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
__version__ = '1.11.0'
|
|
@@ -1884,6 +1884,8 @@ class Client(OpenApiClient):
|
|
|
1884
1884
|
query['creator'] = request.creator
|
|
1885
1885
|
if not UtilClient.is_unset(request.end_time_shrink):
|
|
1886
1886
|
query['endTime'] = request.end_time_shrink
|
|
1887
|
+
if not UtilClient.is_unset(request.is_workflow):
|
|
1888
|
+
query['isWorkflow'] = request.is_workflow
|
|
1887
1889
|
if not UtilClient.is_unset(request.job_run_deployment_id):
|
|
1888
1890
|
query['jobRunDeploymentId'] = request.job_run_deployment_id
|
|
1889
1891
|
if not UtilClient.is_unset(request.job_run_id):
|
|
@@ -1957,6 +1959,8 @@ class Client(OpenApiClient):
|
|
|
1957
1959
|
query['creator'] = request.creator
|
|
1958
1960
|
if not UtilClient.is_unset(request.end_time_shrink):
|
|
1959
1961
|
query['endTime'] = request.end_time_shrink
|
|
1962
|
+
if not UtilClient.is_unset(request.is_workflow):
|
|
1963
|
+
query['isWorkflow'] = request.is_workflow
|
|
1960
1964
|
if not UtilClient.is_unset(request.job_run_deployment_id):
|
|
1961
1965
|
query['jobRunDeploymentId'] = request.job_run_deployment_id
|
|
1962
1966
|
if not UtilClient.is_unset(request.job_run_id):
|
|
@@ -2029,6 +2033,96 @@ class Client(OpenApiClient):
|
|
|
2029
2033
|
headers = {}
|
|
2030
2034
|
return await self.list_job_runs_with_options_async(workspace_id, request, headers, runtime)
|
|
2031
2035
|
|
|
2036
|
+
def list_kyuubi_services_with_options(
|
|
2037
|
+
self,
|
|
2038
|
+
workspace_id: str,
|
|
2039
|
+
headers: Dict[str, str],
|
|
2040
|
+
runtime: util_models.RuntimeOptions,
|
|
2041
|
+
) -> emr_serverless_spark_20230808_models.ListKyuubiServicesResponse:
|
|
2042
|
+
"""
|
|
2043
|
+
@summary ListKyuubiServices
|
|
2044
|
+
|
|
2045
|
+
@param headers: map
|
|
2046
|
+
@param runtime: runtime options for this request RuntimeOptions
|
|
2047
|
+
@return: ListKyuubiServicesResponse
|
|
2048
|
+
"""
|
|
2049
|
+
req = open_api_models.OpenApiRequest(
|
|
2050
|
+
headers=headers
|
|
2051
|
+
)
|
|
2052
|
+
params = open_api_models.Params(
|
|
2053
|
+
action='ListKyuubiServices',
|
|
2054
|
+
version='2023-08-08',
|
|
2055
|
+
protocol='HTTPS',
|
|
2056
|
+
pathname=f'/api/v1/kyuubi/{OpenApiUtilClient.get_encode_param(workspace_id)}',
|
|
2057
|
+
method='GET',
|
|
2058
|
+
auth_type='AK',
|
|
2059
|
+
style='ROA',
|
|
2060
|
+
req_body_type='json',
|
|
2061
|
+
body_type='json'
|
|
2062
|
+
)
|
|
2063
|
+
return TeaCore.from_map(
|
|
2064
|
+
emr_serverless_spark_20230808_models.ListKyuubiServicesResponse(),
|
|
2065
|
+
self.call_api(params, req, runtime)
|
|
2066
|
+
)
|
|
2067
|
+
|
|
2068
|
+
async def list_kyuubi_services_with_options_async(
|
|
2069
|
+
self,
|
|
2070
|
+
workspace_id: str,
|
|
2071
|
+
headers: Dict[str, str],
|
|
2072
|
+
runtime: util_models.RuntimeOptions,
|
|
2073
|
+
) -> emr_serverless_spark_20230808_models.ListKyuubiServicesResponse:
|
|
2074
|
+
"""
|
|
2075
|
+
@summary ListKyuubiServices
|
|
2076
|
+
|
|
2077
|
+
@param headers: map
|
|
2078
|
+
@param runtime: runtime options for this request RuntimeOptions
|
|
2079
|
+
@return: ListKyuubiServicesResponse
|
|
2080
|
+
"""
|
|
2081
|
+
req = open_api_models.OpenApiRequest(
|
|
2082
|
+
headers=headers
|
|
2083
|
+
)
|
|
2084
|
+
params = open_api_models.Params(
|
|
2085
|
+
action='ListKyuubiServices',
|
|
2086
|
+
version='2023-08-08',
|
|
2087
|
+
protocol='HTTPS',
|
|
2088
|
+
pathname=f'/api/v1/kyuubi/{OpenApiUtilClient.get_encode_param(workspace_id)}',
|
|
2089
|
+
method='GET',
|
|
2090
|
+
auth_type='AK',
|
|
2091
|
+
style='ROA',
|
|
2092
|
+
req_body_type='json',
|
|
2093
|
+
body_type='json'
|
|
2094
|
+
)
|
|
2095
|
+
return TeaCore.from_map(
|
|
2096
|
+
emr_serverless_spark_20230808_models.ListKyuubiServicesResponse(),
|
|
2097
|
+
await self.call_api_async(params, req, runtime)
|
|
2098
|
+
)
|
|
2099
|
+
|
|
2100
|
+
def list_kyuubi_services(
|
|
2101
|
+
self,
|
|
2102
|
+
workspace_id: str,
|
|
2103
|
+
) -> emr_serverless_spark_20230808_models.ListKyuubiServicesResponse:
|
|
2104
|
+
"""
|
|
2105
|
+
@summary ListKyuubiServices
|
|
2106
|
+
|
|
2107
|
+
@return: ListKyuubiServicesResponse
|
|
2108
|
+
"""
|
|
2109
|
+
runtime = util_models.RuntimeOptions()
|
|
2110
|
+
headers = {}
|
|
2111
|
+
return self.list_kyuubi_services_with_options(workspace_id, headers, runtime)
|
|
2112
|
+
|
|
2113
|
+
async def list_kyuubi_services_async(
|
|
2114
|
+
self,
|
|
2115
|
+
workspace_id: str,
|
|
2116
|
+
) -> emr_serverless_spark_20230808_models.ListKyuubiServicesResponse:
|
|
2117
|
+
"""
|
|
2118
|
+
@summary ListKyuubiServices
|
|
2119
|
+
|
|
2120
|
+
@return: ListKyuubiServicesResponse
|
|
2121
|
+
"""
|
|
2122
|
+
runtime = util_models.RuntimeOptions()
|
|
2123
|
+
headers = {}
|
|
2124
|
+
return await self.list_kyuubi_services_with_options_async(workspace_id, headers, runtime)
|
|
2125
|
+
|
|
2032
2126
|
def list_kyuubi_spark_applications_with_options(
|
|
2033
2127
|
self,
|
|
2034
2128
|
workspace_id: str,
|
|
@@ -2048,6 +2142,8 @@ class Client(OpenApiClient):
|
|
|
2048
2142
|
UtilClient.validate_model(tmp_req)
|
|
2049
2143
|
request = emr_serverless_spark_20230808_models.ListKyuubiSparkApplicationsShrinkRequest()
|
|
2050
2144
|
OpenApiUtilClient.convert(tmp_req, request)
|
|
2145
|
+
if not UtilClient.is_unset(tmp_req.order_by):
|
|
2146
|
+
request.order_by_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.order_by, 'orderBy', 'json')
|
|
2051
2147
|
if not UtilClient.is_unset(tmp_req.start_time):
|
|
2052
2148
|
request.start_time_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.start_time, 'startTime', 'json')
|
|
2053
2149
|
query = {}
|
|
@@ -2057,8 +2153,16 @@ class Client(OpenApiClient):
|
|
|
2057
2153
|
query['applicationName'] = request.application_name
|
|
2058
2154
|
if not UtilClient.is_unset(request.max_results):
|
|
2059
2155
|
query['maxResults'] = request.max_results
|
|
2156
|
+
if not UtilClient.is_unset(request.min_duration):
|
|
2157
|
+
query['minDuration'] = request.min_duration
|
|
2060
2158
|
if not UtilClient.is_unset(request.next_token):
|
|
2061
2159
|
query['nextToken'] = request.next_token
|
|
2160
|
+
if not UtilClient.is_unset(request.order_by_shrink):
|
|
2161
|
+
query['orderBy'] = request.order_by_shrink
|
|
2162
|
+
if not UtilClient.is_unset(request.resource_queue_id):
|
|
2163
|
+
query['resourceQueueId'] = request.resource_queue_id
|
|
2164
|
+
if not UtilClient.is_unset(request.sort):
|
|
2165
|
+
query['sort'] = request.sort
|
|
2062
2166
|
if not UtilClient.is_unset(request.start_time_shrink):
|
|
2063
2167
|
query['startTime'] = request.start_time_shrink
|
|
2064
2168
|
req = open_api_models.OpenApiRequest(
|
|
@@ -2100,6 +2204,8 @@ class Client(OpenApiClient):
|
|
|
2100
2204
|
UtilClient.validate_model(tmp_req)
|
|
2101
2205
|
request = emr_serverless_spark_20230808_models.ListKyuubiSparkApplicationsShrinkRequest()
|
|
2102
2206
|
OpenApiUtilClient.convert(tmp_req, request)
|
|
2207
|
+
if not UtilClient.is_unset(tmp_req.order_by):
|
|
2208
|
+
request.order_by_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.order_by, 'orderBy', 'json')
|
|
2103
2209
|
if not UtilClient.is_unset(tmp_req.start_time):
|
|
2104
2210
|
request.start_time_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.start_time, 'startTime', 'json')
|
|
2105
2211
|
query = {}
|
|
@@ -2109,8 +2215,16 @@ class Client(OpenApiClient):
|
|
|
2109
2215
|
query['applicationName'] = request.application_name
|
|
2110
2216
|
if not UtilClient.is_unset(request.max_results):
|
|
2111
2217
|
query['maxResults'] = request.max_results
|
|
2218
|
+
if not UtilClient.is_unset(request.min_duration):
|
|
2219
|
+
query['minDuration'] = request.min_duration
|
|
2112
2220
|
if not UtilClient.is_unset(request.next_token):
|
|
2113
2221
|
query['nextToken'] = request.next_token
|
|
2222
|
+
if not UtilClient.is_unset(request.order_by_shrink):
|
|
2223
|
+
query['orderBy'] = request.order_by_shrink
|
|
2224
|
+
if not UtilClient.is_unset(request.resource_queue_id):
|
|
2225
|
+
query['resourceQueueId'] = request.resource_queue_id
|
|
2226
|
+
if not UtilClient.is_unset(request.sort):
|
|
2227
|
+
query['sort'] = request.sort
|
|
2114
2228
|
if not UtilClient.is_unset(request.start_time_shrink):
|
|
2115
2229
|
query['startTime'] = request.start_time_shrink
|
|
2116
2230
|
req = open_api_models.OpenApiRequest(
|
|
@@ -2165,6 +2279,118 @@ class Client(OpenApiClient):
|
|
|
2165
2279
|
headers = {}
|
|
2166
2280
|
return await self.list_kyuubi_spark_applications_with_options_async(workspace_id, kyuubi_service_id, request, headers, runtime)
|
|
2167
2281
|
|
|
2282
|
+
def list_kyuubi_token_with_options(
|
|
2283
|
+
self,
|
|
2284
|
+
workspace_id: str,
|
|
2285
|
+
kyuubi_service_id: str,
|
|
2286
|
+
request: emr_serverless_spark_20230808_models.ListKyuubiTokenRequest,
|
|
2287
|
+
headers: Dict[str, str],
|
|
2288
|
+
runtime: util_models.RuntimeOptions,
|
|
2289
|
+
) -> emr_serverless_spark_20230808_models.ListKyuubiTokenResponse:
|
|
2290
|
+
"""
|
|
2291
|
+
@summary 列出compute的token
|
|
2292
|
+
|
|
2293
|
+
@param request: ListKyuubiTokenRequest
|
|
2294
|
+
@param headers: map
|
|
2295
|
+
@param runtime: runtime options for this request RuntimeOptions
|
|
2296
|
+
@return: ListKyuubiTokenResponse
|
|
2297
|
+
"""
|
|
2298
|
+
UtilClient.validate_model(request)
|
|
2299
|
+
query = {}
|
|
2300
|
+
if not UtilClient.is_unset(request.region_id):
|
|
2301
|
+
query['regionId'] = request.region_id
|
|
2302
|
+
req = open_api_models.OpenApiRequest(
|
|
2303
|
+
headers=headers,
|
|
2304
|
+
query=OpenApiUtilClient.query(query)
|
|
2305
|
+
)
|
|
2306
|
+
params = open_api_models.Params(
|
|
2307
|
+
action='ListKyuubiToken',
|
|
2308
|
+
version='2023-08-08',
|
|
2309
|
+
protocol='HTTPS',
|
|
2310
|
+
pathname=f'/api/v1/workspaces/{OpenApiUtilClient.get_encode_param(workspace_id)}/kyuubiService/{OpenApiUtilClient.get_encode_param(kyuubi_service_id)}/token',
|
|
2311
|
+
method='GET',
|
|
2312
|
+
auth_type='AK',
|
|
2313
|
+
style='ROA',
|
|
2314
|
+
req_body_type='json',
|
|
2315
|
+
body_type='json'
|
|
2316
|
+
)
|
|
2317
|
+
return TeaCore.from_map(
|
|
2318
|
+
emr_serverless_spark_20230808_models.ListKyuubiTokenResponse(),
|
|
2319
|
+
self.call_api(params, req, runtime)
|
|
2320
|
+
)
|
|
2321
|
+
|
|
2322
|
+
async def list_kyuubi_token_with_options_async(
|
|
2323
|
+
self,
|
|
2324
|
+
workspace_id: str,
|
|
2325
|
+
kyuubi_service_id: str,
|
|
2326
|
+
request: emr_serverless_spark_20230808_models.ListKyuubiTokenRequest,
|
|
2327
|
+
headers: Dict[str, str],
|
|
2328
|
+
runtime: util_models.RuntimeOptions,
|
|
2329
|
+
) -> emr_serverless_spark_20230808_models.ListKyuubiTokenResponse:
|
|
2330
|
+
"""
|
|
2331
|
+
@summary 列出compute的token
|
|
2332
|
+
|
|
2333
|
+
@param request: ListKyuubiTokenRequest
|
|
2334
|
+
@param headers: map
|
|
2335
|
+
@param runtime: runtime options for this request RuntimeOptions
|
|
2336
|
+
@return: ListKyuubiTokenResponse
|
|
2337
|
+
"""
|
|
2338
|
+
UtilClient.validate_model(request)
|
|
2339
|
+
query = {}
|
|
2340
|
+
if not UtilClient.is_unset(request.region_id):
|
|
2341
|
+
query['regionId'] = request.region_id
|
|
2342
|
+
req = open_api_models.OpenApiRequest(
|
|
2343
|
+
headers=headers,
|
|
2344
|
+
query=OpenApiUtilClient.query(query)
|
|
2345
|
+
)
|
|
2346
|
+
params = open_api_models.Params(
|
|
2347
|
+
action='ListKyuubiToken',
|
|
2348
|
+
version='2023-08-08',
|
|
2349
|
+
protocol='HTTPS',
|
|
2350
|
+
pathname=f'/api/v1/workspaces/{OpenApiUtilClient.get_encode_param(workspace_id)}/kyuubiService/{OpenApiUtilClient.get_encode_param(kyuubi_service_id)}/token',
|
|
2351
|
+
method='GET',
|
|
2352
|
+
auth_type='AK',
|
|
2353
|
+
style='ROA',
|
|
2354
|
+
req_body_type='json',
|
|
2355
|
+
body_type='json'
|
|
2356
|
+
)
|
|
2357
|
+
return TeaCore.from_map(
|
|
2358
|
+
emr_serverless_spark_20230808_models.ListKyuubiTokenResponse(),
|
|
2359
|
+
await self.call_api_async(params, req, runtime)
|
|
2360
|
+
)
|
|
2361
|
+
|
|
2362
|
+
def list_kyuubi_token(
|
|
2363
|
+
self,
|
|
2364
|
+
workspace_id: str,
|
|
2365
|
+
kyuubi_service_id: str,
|
|
2366
|
+
request: emr_serverless_spark_20230808_models.ListKyuubiTokenRequest,
|
|
2367
|
+
) -> emr_serverless_spark_20230808_models.ListKyuubiTokenResponse:
|
|
2368
|
+
"""
|
|
2369
|
+
@summary 列出compute的token
|
|
2370
|
+
|
|
2371
|
+
@param request: ListKyuubiTokenRequest
|
|
2372
|
+
@return: ListKyuubiTokenResponse
|
|
2373
|
+
"""
|
|
2374
|
+
runtime = util_models.RuntimeOptions()
|
|
2375
|
+
headers = {}
|
|
2376
|
+
return self.list_kyuubi_token_with_options(workspace_id, kyuubi_service_id, request, headers, runtime)
|
|
2377
|
+
|
|
2378
|
+
async def list_kyuubi_token_async(
|
|
2379
|
+
self,
|
|
2380
|
+
workspace_id: str,
|
|
2381
|
+
kyuubi_service_id: str,
|
|
2382
|
+
request: emr_serverless_spark_20230808_models.ListKyuubiTokenRequest,
|
|
2383
|
+
) -> emr_serverless_spark_20230808_models.ListKyuubiTokenResponse:
|
|
2384
|
+
"""
|
|
2385
|
+
@summary 列出compute的token
|
|
2386
|
+
|
|
2387
|
+
@param request: ListKyuubiTokenRequest
|
|
2388
|
+
@return: ListKyuubiTokenResponse
|
|
2389
|
+
"""
|
|
2390
|
+
runtime = util_models.RuntimeOptions()
|
|
2391
|
+
headers = {}
|
|
2392
|
+
return await self.list_kyuubi_token_with_options_async(workspace_id, kyuubi_service_id, request, headers, runtime)
|
|
2393
|
+
|
|
2168
2394
|
def list_log_contents_with_options(
|
|
2169
2395
|
self,
|
|
2170
2396
|
workspace_id: str,
|
|
@@ -2868,6 +2868,9 @@ class CreateSessionClusterRequest(TeaModel):
|
|
|
2868
2868
|
# The Spark configurations.
|
|
2869
2869
|
self.application_configs = application_configs
|
|
2870
2870
|
# The automatic startup configuration.
|
|
2871
|
+
#
|
|
2872
|
+
# *\
|
|
2873
|
+
# *\
|
|
2871
2874
|
self.auto_start_configuration = auto_start_configuration
|
|
2872
2875
|
# The automatic termination configuration.
|
|
2873
2876
|
self.auto_stop_configuration = auto_stop_configuration
|
|
@@ -4489,6 +4492,7 @@ class GetSessionClusterResponseBodySessionCluster(TeaModel):
|
|
|
4489
4492
|
application_configs: List[GetSessionClusterResponseBodySessionClusterApplicationConfigs] = None,
|
|
4490
4493
|
auto_start_configuration: GetSessionClusterResponseBodySessionClusterAutoStartConfiguration = None,
|
|
4491
4494
|
auto_stop_configuration: GetSessionClusterResponseBodySessionClusterAutoStopConfiguration = None,
|
|
4495
|
+
connection_token: str = None,
|
|
4492
4496
|
display_release_version: str = None,
|
|
4493
4497
|
domain: str = None,
|
|
4494
4498
|
domain_inner: str = None,
|
|
@@ -4517,6 +4521,7 @@ class GetSessionClusterResponseBodySessionCluster(TeaModel):
|
|
|
4517
4521
|
self.auto_start_configuration = auto_start_configuration
|
|
4518
4522
|
# Indicates whether automatic termination is enabled.
|
|
4519
4523
|
self.auto_stop_configuration = auto_stop_configuration
|
|
4524
|
+
self.connection_token = connection_token
|
|
4520
4525
|
# The version of the Spark engine.
|
|
4521
4526
|
self.display_release_version = display_release_version
|
|
4522
4527
|
# The domain name to which the Spark UI of the session belongs.
|
|
@@ -4595,6 +4600,8 @@ class GetSessionClusterResponseBodySessionCluster(TeaModel):
|
|
|
4595
4600
|
result['autoStartConfiguration'] = self.auto_start_configuration.to_map()
|
|
4596
4601
|
if self.auto_stop_configuration is not None:
|
|
4597
4602
|
result['autoStopConfiguration'] = self.auto_stop_configuration.to_map()
|
|
4603
|
+
if self.connection_token is not None:
|
|
4604
|
+
result['connectionToken'] = self.connection_token
|
|
4598
4605
|
if self.display_release_version is not None:
|
|
4599
4606
|
result['displayReleaseVersion'] = self.display_release_version
|
|
4600
4607
|
if self.domain is not None:
|
|
@@ -4652,6 +4659,8 @@ class GetSessionClusterResponseBodySessionCluster(TeaModel):
|
|
|
4652
4659
|
if m.get('autoStopConfiguration') is not None:
|
|
4653
4660
|
temp_model = GetSessionClusterResponseBodySessionClusterAutoStopConfiguration()
|
|
4654
4661
|
self.auto_stop_configuration = temp_model.from_map(m['autoStopConfiguration'])
|
|
4662
|
+
if m.get('connectionToken') is not None:
|
|
4663
|
+
self.connection_token = m.get('connectionToken')
|
|
4655
4664
|
if m.get('displayReleaseVersion') is not None:
|
|
4656
4665
|
self.display_release_version = m.get('displayReleaseVersion')
|
|
4657
4666
|
if m.get('domain') is not None:
|
|
@@ -5013,11 +5022,6 @@ class GetTemplateRequest(TeaModel):
|
|
|
5013
5022
|
self.region_id = region_id
|
|
5014
5023
|
self.template_biz_id = template_biz_id
|
|
5015
5024
|
# The template type.
|
|
5016
|
-
#
|
|
5017
|
-
# Valid values:
|
|
5018
|
-
#
|
|
5019
|
-
# * TASK
|
|
5020
|
-
# * SESSION
|
|
5021
5025
|
self.template_type = template_type
|
|
5022
5026
|
|
|
5023
5027
|
def validate(self):
|
|
@@ -5058,7 +5062,7 @@ class GetTemplateResponseBody(TeaModel):
|
|
|
5058
5062
|
request_id: str = None,
|
|
5059
5063
|
success: bool = None,
|
|
5060
5064
|
):
|
|
5061
|
-
# The data
|
|
5065
|
+
# The returned data.
|
|
5062
5066
|
self.data = data
|
|
5063
5067
|
# * If the value of success was false, an error code was returned.
|
|
5064
5068
|
# * If the value of success was true, a null value was returned.
|
|
@@ -5377,6 +5381,7 @@ class ListJobRunsRequest(TeaModel):
|
|
|
5377
5381
|
self,
|
|
5378
5382
|
creator: str = None,
|
|
5379
5383
|
end_time: ListJobRunsRequestEndTime = None,
|
|
5384
|
+
is_workflow: str = None,
|
|
5380
5385
|
job_run_deployment_id: str = None,
|
|
5381
5386
|
job_run_id: str = None,
|
|
5382
5387
|
max_results: int = None,
|
|
@@ -5393,6 +5398,7 @@ class ListJobRunsRequest(TeaModel):
|
|
|
5393
5398
|
self.creator = creator
|
|
5394
5399
|
# The range of end time.
|
|
5395
5400
|
self.end_time = end_time
|
|
5401
|
+
self.is_workflow = is_workflow
|
|
5396
5402
|
# The job run ID.
|
|
5397
5403
|
self.job_run_deployment_id = job_run_deployment_id
|
|
5398
5404
|
# The job ID.
|
|
@@ -5436,6 +5442,8 @@ class ListJobRunsRequest(TeaModel):
|
|
|
5436
5442
|
result['creator'] = self.creator
|
|
5437
5443
|
if self.end_time is not None:
|
|
5438
5444
|
result['endTime'] = self.end_time.to_map()
|
|
5445
|
+
if self.is_workflow is not None:
|
|
5446
|
+
result['isWorkflow'] = self.is_workflow
|
|
5439
5447
|
if self.job_run_deployment_id is not None:
|
|
5440
5448
|
result['jobRunDeploymentId'] = self.job_run_deployment_id
|
|
5441
5449
|
if self.job_run_id is not None:
|
|
@@ -5469,6 +5477,8 @@ class ListJobRunsRequest(TeaModel):
|
|
|
5469
5477
|
if m.get('endTime') is not None:
|
|
5470
5478
|
temp_model = ListJobRunsRequestEndTime()
|
|
5471
5479
|
self.end_time = temp_model.from_map(m['endTime'])
|
|
5480
|
+
if m.get('isWorkflow') is not None:
|
|
5481
|
+
self.is_workflow = m.get('isWorkflow')
|
|
5472
5482
|
if m.get('jobRunDeploymentId') is not None:
|
|
5473
5483
|
self.job_run_deployment_id = m.get('jobRunDeploymentId')
|
|
5474
5484
|
if m.get('jobRunId') is not None:
|
|
@@ -5503,6 +5513,7 @@ class ListJobRunsShrinkRequest(TeaModel):
|
|
|
5503
5513
|
self,
|
|
5504
5514
|
creator: str = None,
|
|
5505
5515
|
end_time_shrink: str = None,
|
|
5516
|
+
is_workflow: str = None,
|
|
5506
5517
|
job_run_deployment_id: str = None,
|
|
5507
5518
|
job_run_id: str = None,
|
|
5508
5519
|
max_results: int = None,
|
|
@@ -5519,6 +5530,7 @@ class ListJobRunsShrinkRequest(TeaModel):
|
|
|
5519
5530
|
self.creator = creator
|
|
5520
5531
|
# The range of end time.
|
|
5521
5532
|
self.end_time_shrink = end_time_shrink
|
|
5533
|
+
self.is_workflow = is_workflow
|
|
5522
5534
|
# The job run ID.
|
|
5523
5535
|
self.job_run_deployment_id = job_run_deployment_id
|
|
5524
5536
|
# The job ID.
|
|
@@ -5555,6 +5567,8 @@ class ListJobRunsShrinkRequest(TeaModel):
|
|
|
5555
5567
|
result['creator'] = self.creator
|
|
5556
5568
|
if self.end_time_shrink is not None:
|
|
5557
5569
|
result['endTime'] = self.end_time_shrink
|
|
5570
|
+
if self.is_workflow is not None:
|
|
5571
|
+
result['isWorkflow'] = self.is_workflow
|
|
5558
5572
|
if self.job_run_deployment_id is not None:
|
|
5559
5573
|
result['jobRunDeploymentId'] = self.job_run_deployment_id
|
|
5560
5574
|
if self.job_run_id is not None:
|
|
@@ -5585,6 +5599,8 @@ class ListJobRunsShrinkRequest(TeaModel):
|
|
|
5585
5599
|
self.creator = m.get('creator')
|
|
5586
5600
|
if m.get('endTime') is not None:
|
|
5587
5601
|
self.end_time_shrink = m.get('endTime')
|
|
5602
|
+
if m.get('isWorkflow') is not None:
|
|
5603
|
+
self.is_workflow = m.get('isWorkflow')
|
|
5588
5604
|
if m.get('jobRunDeploymentId') is not None:
|
|
5589
5605
|
self.job_run_deployment_id = m.get('jobRunDeploymentId')
|
|
5590
5606
|
if m.get('jobRunId') is not None:
|
|
@@ -5980,6 +5996,223 @@ class ListJobRunsResponse(TeaModel):
|
|
|
5980
5996
|
return self
|
|
5981
5997
|
|
|
5982
5998
|
|
|
5999
|
+
class ListKyuubiServicesResponseBodyDataKyuubiServices(TeaModel):
|
|
6000
|
+
def __init__(
|
|
6001
|
+
self,
|
|
6002
|
+
compute_instance: str = None,
|
|
6003
|
+
create_time: str = None,
|
|
6004
|
+
creator: str = None,
|
|
6005
|
+
inner_endpoint: str = None,
|
|
6006
|
+
kyuubi_configs: str = None,
|
|
6007
|
+
kyuubi_service_id: str = None,
|
|
6008
|
+
name: str = None,
|
|
6009
|
+
public_endpoint: str = None,
|
|
6010
|
+
queue: str = None,
|
|
6011
|
+
release_version: str = None,
|
|
6012
|
+
replica: int = None,
|
|
6013
|
+
spark_configs: str = None,
|
|
6014
|
+
start_time: str = None,
|
|
6015
|
+
state: str = None,
|
|
6016
|
+
):
|
|
6017
|
+
self.compute_instance = compute_instance
|
|
6018
|
+
self.create_time = create_time
|
|
6019
|
+
self.creator = creator
|
|
6020
|
+
self.inner_endpoint = inner_endpoint
|
|
6021
|
+
self.kyuubi_configs = kyuubi_configs
|
|
6022
|
+
# KyuubiServer ID。
|
|
6023
|
+
self.kyuubi_service_id = kyuubi_service_id
|
|
6024
|
+
self.name = name
|
|
6025
|
+
self.public_endpoint = public_endpoint
|
|
6026
|
+
self.queue = queue
|
|
6027
|
+
self.release_version = release_version
|
|
6028
|
+
self.replica = replica
|
|
6029
|
+
self.spark_configs = spark_configs
|
|
6030
|
+
self.start_time = start_time
|
|
6031
|
+
self.state = state
|
|
6032
|
+
|
|
6033
|
+
def validate(self):
|
|
6034
|
+
pass
|
|
6035
|
+
|
|
6036
|
+
def to_map(self):
|
|
6037
|
+
_map = super().to_map()
|
|
6038
|
+
if _map is not None:
|
|
6039
|
+
return _map
|
|
6040
|
+
|
|
6041
|
+
result = dict()
|
|
6042
|
+
if self.compute_instance is not None:
|
|
6043
|
+
result['computeInstance'] = self.compute_instance
|
|
6044
|
+
if self.create_time is not None:
|
|
6045
|
+
result['createTime'] = self.create_time
|
|
6046
|
+
if self.creator is not None:
|
|
6047
|
+
result['creator'] = self.creator
|
|
6048
|
+
if self.inner_endpoint is not None:
|
|
6049
|
+
result['innerEndpoint'] = self.inner_endpoint
|
|
6050
|
+
if self.kyuubi_configs is not None:
|
|
6051
|
+
result['kyuubiConfigs'] = self.kyuubi_configs
|
|
6052
|
+
if self.kyuubi_service_id is not None:
|
|
6053
|
+
result['kyuubiServiceId'] = self.kyuubi_service_id
|
|
6054
|
+
if self.name is not None:
|
|
6055
|
+
result['name'] = self.name
|
|
6056
|
+
if self.public_endpoint is not None:
|
|
6057
|
+
result['publicEndpoint'] = self.public_endpoint
|
|
6058
|
+
if self.queue is not None:
|
|
6059
|
+
result['queue'] = self.queue
|
|
6060
|
+
if self.release_version is not None:
|
|
6061
|
+
result['releaseVersion'] = self.release_version
|
|
6062
|
+
if self.replica is not None:
|
|
6063
|
+
result['replica'] = self.replica
|
|
6064
|
+
if self.spark_configs is not None:
|
|
6065
|
+
result['sparkConfigs'] = self.spark_configs
|
|
6066
|
+
if self.start_time is not None:
|
|
6067
|
+
result['startTime'] = self.start_time
|
|
6068
|
+
if self.state is not None:
|
|
6069
|
+
result['state'] = self.state
|
|
6070
|
+
return result
|
|
6071
|
+
|
|
6072
|
+
def from_map(self, m: dict = None):
|
|
6073
|
+
m = m or dict()
|
|
6074
|
+
if m.get('computeInstance') is not None:
|
|
6075
|
+
self.compute_instance = m.get('computeInstance')
|
|
6076
|
+
if m.get('createTime') is not None:
|
|
6077
|
+
self.create_time = m.get('createTime')
|
|
6078
|
+
if m.get('creator') is not None:
|
|
6079
|
+
self.creator = m.get('creator')
|
|
6080
|
+
if m.get('innerEndpoint') is not None:
|
|
6081
|
+
self.inner_endpoint = m.get('innerEndpoint')
|
|
6082
|
+
if m.get('kyuubiConfigs') is not None:
|
|
6083
|
+
self.kyuubi_configs = m.get('kyuubiConfigs')
|
|
6084
|
+
if m.get('kyuubiServiceId') is not None:
|
|
6085
|
+
self.kyuubi_service_id = m.get('kyuubiServiceId')
|
|
6086
|
+
if m.get('name') is not None:
|
|
6087
|
+
self.name = m.get('name')
|
|
6088
|
+
if m.get('publicEndpoint') is not None:
|
|
6089
|
+
self.public_endpoint = m.get('publicEndpoint')
|
|
6090
|
+
if m.get('queue') is not None:
|
|
6091
|
+
self.queue = m.get('queue')
|
|
6092
|
+
if m.get('releaseVersion') is not None:
|
|
6093
|
+
self.release_version = m.get('releaseVersion')
|
|
6094
|
+
if m.get('replica') is not None:
|
|
6095
|
+
self.replica = m.get('replica')
|
|
6096
|
+
if m.get('sparkConfigs') is not None:
|
|
6097
|
+
self.spark_configs = m.get('sparkConfigs')
|
|
6098
|
+
if m.get('startTime') is not None:
|
|
6099
|
+
self.start_time = m.get('startTime')
|
|
6100
|
+
if m.get('state') is not None:
|
|
6101
|
+
self.state = m.get('state')
|
|
6102
|
+
return self
|
|
6103
|
+
|
|
6104
|
+
|
|
6105
|
+
class ListKyuubiServicesResponseBodyData(TeaModel):
|
|
6106
|
+
def __init__(
|
|
6107
|
+
self,
|
|
6108
|
+
kyuubi_services: List[ListKyuubiServicesResponseBodyDataKyuubiServices] = None,
|
|
6109
|
+
):
|
|
6110
|
+
self.kyuubi_services = kyuubi_services
|
|
6111
|
+
|
|
6112
|
+
def validate(self):
|
|
6113
|
+
if self.kyuubi_services:
|
|
6114
|
+
for k in self.kyuubi_services:
|
|
6115
|
+
if k:
|
|
6116
|
+
k.validate()
|
|
6117
|
+
|
|
6118
|
+
def to_map(self):
|
|
6119
|
+
_map = super().to_map()
|
|
6120
|
+
if _map is not None:
|
|
6121
|
+
return _map
|
|
6122
|
+
|
|
6123
|
+
result = dict()
|
|
6124
|
+
result['kyuubiServices'] = []
|
|
6125
|
+
if self.kyuubi_services is not None:
|
|
6126
|
+
for k in self.kyuubi_services:
|
|
6127
|
+
result['kyuubiServices'].append(k.to_map() if k else None)
|
|
6128
|
+
return result
|
|
6129
|
+
|
|
6130
|
+
def from_map(self, m: dict = None):
|
|
6131
|
+
m = m or dict()
|
|
6132
|
+
self.kyuubi_services = []
|
|
6133
|
+
if m.get('kyuubiServices') is not None:
|
|
6134
|
+
for k in m.get('kyuubiServices'):
|
|
6135
|
+
temp_model = ListKyuubiServicesResponseBodyDataKyuubiServices()
|
|
6136
|
+
self.kyuubi_services.append(temp_model.from_map(k))
|
|
6137
|
+
return self
|
|
6138
|
+
|
|
6139
|
+
|
|
6140
|
+
class ListKyuubiServicesResponseBody(TeaModel):
|
|
6141
|
+
def __init__(
|
|
6142
|
+
self,
|
|
6143
|
+
data: ListKyuubiServicesResponseBodyData = None,
|
|
6144
|
+
request_id: str = None,
|
|
6145
|
+
):
|
|
6146
|
+
self.data = data
|
|
6147
|
+
self.request_id = request_id
|
|
6148
|
+
|
|
6149
|
+
def validate(self):
|
|
6150
|
+
if self.data:
|
|
6151
|
+
self.data.validate()
|
|
6152
|
+
|
|
6153
|
+
def to_map(self):
|
|
6154
|
+
_map = super().to_map()
|
|
6155
|
+
if _map is not None:
|
|
6156
|
+
return _map
|
|
6157
|
+
|
|
6158
|
+
result = dict()
|
|
6159
|
+
if self.data is not None:
|
|
6160
|
+
result['data'] = self.data.to_map()
|
|
6161
|
+
if self.request_id is not None:
|
|
6162
|
+
result['requestId'] = self.request_id
|
|
6163
|
+
return result
|
|
6164
|
+
|
|
6165
|
+
def from_map(self, m: dict = None):
|
|
6166
|
+
m = m or dict()
|
|
6167
|
+
if m.get('data') is not None:
|
|
6168
|
+
temp_model = ListKyuubiServicesResponseBodyData()
|
|
6169
|
+
self.data = temp_model.from_map(m['data'])
|
|
6170
|
+
if m.get('requestId') is not None:
|
|
6171
|
+
self.request_id = m.get('requestId')
|
|
6172
|
+
return self
|
|
6173
|
+
|
|
6174
|
+
|
|
6175
|
+
class ListKyuubiServicesResponse(TeaModel):
|
|
6176
|
+
def __init__(
|
|
6177
|
+
self,
|
|
6178
|
+
headers: Dict[str, str] = None,
|
|
6179
|
+
status_code: int = None,
|
|
6180
|
+
body: ListKyuubiServicesResponseBody = None,
|
|
6181
|
+
):
|
|
6182
|
+
self.headers = headers
|
|
6183
|
+
self.status_code = status_code
|
|
6184
|
+
self.body = body
|
|
6185
|
+
|
|
6186
|
+
def validate(self):
|
|
6187
|
+
if self.body:
|
|
6188
|
+
self.body.validate()
|
|
6189
|
+
|
|
6190
|
+
def to_map(self):
|
|
6191
|
+
_map = super().to_map()
|
|
6192
|
+
if _map is not None:
|
|
6193
|
+
return _map
|
|
6194
|
+
|
|
6195
|
+
result = dict()
|
|
6196
|
+
if self.headers is not None:
|
|
6197
|
+
result['headers'] = self.headers
|
|
6198
|
+
if self.status_code is not None:
|
|
6199
|
+
result['statusCode'] = self.status_code
|
|
6200
|
+
if self.body is not None:
|
|
6201
|
+
result['body'] = self.body.to_map()
|
|
6202
|
+
return result
|
|
6203
|
+
|
|
6204
|
+
def from_map(self, m: dict = None):
|
|
6205
|
+
m = m or dict()
|
|
6206
|
+
if m.get('headers') is not None:
|
|
6207
|
+
self.headers = m.get('headers')
|
|
6208
|
+
if m.get('statusCode') is not None:
|
|
6209
|
+
self.status_code = m.get('statusCode')
|
|
6210
|
+
if m.get('body') is not None:
|
|
6211
|
+
temp_model = ListKyuubiServicesResponseBody()
|
|
6212
|
+
self.body = temp_model.from_map(m['body'])
|
|
6213
|
+
return self
|
|
6214
|
+
|
|
6215
|
+
|
|
5983
6216
|
class ListKyuubiSparkApplicationsRequestStartTime(TeaModel):
|
|
5984
6217
|
def __init__(
|
|
5985
6218
|
self,
|
|
@@ -6021,7 +6254,11 @@ class ListKyuubiSparkApplicationsRequest(TeaModel):
|
|
|
6021
6254
|
application_id: str = None,
|
|
6022
6255
|
application_name: str = None,
|
|
6023
6256
|
max_results: int = None,
|
|
6257
|
+
min_duration: int = None,
|
|
6024
6258
|
next_token: str = None,
|
|
6259
|
+
order_by: List[str] = None,
|
|
6260
|
+
resource_queue_id: str = None,
|
|
6261
|
+
sort: str = None,
|
|
6025
6262
|
start_time: ListKyuubiSparkApplicationsRequestStartTime = None,
|
|
6026
6263
|
):
|
|
6027
6264
|
# The ID of the application that is submitted by using a Kyuubi gateway.
|
|
@@ -6030,8 +6267,12 @@ class ListKyuubiSparkApplicationsRequest(TeaModel):
|
|
|
6030
6267
|
self.application_name = application_name
|
|
6031
6268
|
# The maximum number of entries to return.
|
|
6032
6269
|
self.max_results = max_results
|
|
6270
|
+
self.min_duration = min_duration
|
|
6033
6271
|
# The pagination token that is used in the next request to retrieve a new page of results.
|
|
6034
6272
|
self.next_token = next_token
|
|
6273
|
+
self.order_by = order_by
|
|
6274
|
+
self.resource_queue_id = resource_queue_id
|
|
6275
|
+
self.sort = sort
|
|
6035
6276
|
# The range of start time.
|
|
6036
6277
|
self.start_time = start_time
|
|
6037
6278
|
|
|
@@ -6051,8 +6292,16 @@ class ListKyuubiSparkApplicationsRequest(TeaModel):
|
|
|
6051
6292
|
result['applicationName'] = self.application_name
|
|
6052
6293
|
if self.max_results is not None:
|
|
6053
6294
|
result['maxResults'] = self.max_results
|
|
6295
|
+
if self.min_duration is not None:
|
|
6296
|
+
result['minDuration'] = self.min_duration
|
|
6054
6297
|
if self.next_token is not None:
|
|
6055
6298
|
result['nextToken'] = self.next_token
|
|
6299
|
+
if self.order_by is not None:
|
|
6300
|
+
result['orderBy'] = self.order_by
|
|
6301
|
+
if self.resource_queue_id is not None:
|
|
6302
|
+
result['resourceQueueId'] = self.resource_queue_id
|
|
6303
|
+
if self.sort is not None:
|
|
6304
|
+
result['sort'] = self.sort
|
|
6056
6305
|
if self.start_time is not None:
|
|
6057
6306
|
result['startTime'] = self.start_time.to_map()
|
|
6058
6307
|
return result
|
|
@@ -6065,8 +6314,16 @@ class ListKyuubiSparkApplicationsRequest(TeaModel):
|
|
|
6065
6314
|
self.application_name = m.get('applicationName')
|
|
6066
6315
|
if m.get('maxResults') is not None:
|
|
6067
6316
|
self.max_results = m.get('maxResults')
|
|
6317
|
+
if m.get('minDuration') is not None:
|
|
6318
|
+
self.min_duration = m.get('minDuration')
|
|
6068
6319
|
if m.get('nextToken') is not None:
|
|
6069
6320
|
self.next_token = m.get('nextToken')
|
|
6321
|
+
if m.get('orderBy') is not None:
|
|
6322
|
+
self.order_by = m.get('orderBy')
|
|
6323
|
+
if m.get('resourceQueueId') is not None:
|
|
6324
|
+
self.resource_queue_id = m.get('resourceQueueId')
|
|
6325
|
+
if m.get('sort') is not None:
|
|
6326
|
+
self.sort = m.get('sort')
|
|
6070
6327
|
if m.get('startTime') is not None:
|
|
6071
6328
|
temp_model = ListKyuubiSparkApplicationsRequestStartTime()
|
|
6072
6329
|
self.start_time = temp_model.from_map(m['startTime'])
|
|
@@ -6079,7 +6336,11 @@ class ListKyuubiSparkApplicationsShrinkRequest(TeaModel):
|
|
|
6079
6336
|
application_id: str = None,
|
|
6080
6337
|
application_name: str = None,
|
|
6081
6338
|
max_results: int = None,
|
|
6339
|
+
min_duration: int = None,
|
|
6082
6340
|
next_token: str = None,
|
|
6341
|
+
order_by_shrink: str = None,
|
|
6342
|
+
resource_queue_id: str = None,
|
|
6343
|
+
sort: str = None,
|
|
6083
6344
|
start_time_shrink: str = None,
|
|
6084
6345
|
):
|
|
6085
6346
|
# The ID of the application that is submitted by using a Kyuubi gateway.
|
|
@@ -6088,8 +6349,12 @@ class ListKyuubiSparkApplicationsShrinkRequest(TeaModel):
|
|
|
6088
6349
|
self.application_name = application_name
|
|
6089
6350
|
# The maximum number of entries to return.
|
|
6090
6351
|
self.max_results = max_results
|
|
6352
|
+
self.min_duration = min_duration
|
|
6091
6353
|
# The pagination token that is used in the next request to retrieve a new page of results.
|
|
6092
6354
|
self.next_token = next_token
|
|
6355
|
+
self.order_by_shrink = order_by_shrink
|
|
6356
|
+
self.resource_queue_id = resource_queue_id
|
|
6357
|
+
self.sort = sort
|
|
6093
6358
|
# The range of start time.
|
|
6094
6359
|
self.start_time_shrink = start_time_shrink
|
|
6095
6360
|
|
|
@@ -6108,8 +6373,16 @@ class ListKyuubiSparkApplicationsShrinkRequest(TeaModel):
|
|
|
6108
6373
|
result['applicationName'] = self.application_name
|
|
6109
6374
|
if self.max_results is not None:
|
|
6110
6375
|
result['maxResults'] = self.max_results
|
|
6376
|
+
if self.min_duration is not None:
|
|
6377
|
+
result['minDuration'] = self.min_duration
|
|
6111
6378
|
if self.next_token is not None:
|
|
6112
6379
|
result['nextToken'] = self.next_token
|
|
6380
|
+
if self.order_by_shrink is not None:
|
|
6381
|
+
result['orderBy'] = self.order_by_shrink
|
|
6382
|
+
if self.resource_queue_id is not None:
|
|
6383
|
+
result['resourceQueueId'] = self.resource_queue_id
|
|
6384
|
+
if self.sort is not None:
|
|
6385
|
+
result['sort'] = self.sort
|
|
6113
6386
|
if self.start_time_shrink is not None:
|
|
6114
6387
|
result['startTime'] = self.start_time_shrink
|
|
6115
6388
|
return result
|
|
@@ -6122,8 +6395,16 @@ class ListKyuubiSparkApplicationsShrinkRequest(TeaModel):
|
|
|
6122
6395
|
self.application_name = m.get('applicationName')
|
|
6123
6396
|
if m.get('maxResults') is not None:
|
|
6124
6397
|
self.max_results = m.get('maxResults')
|
|
6398
|
+
if m.get('minDuration') is not None:
|
|
6399
|
+
self.min_duration = m.get('minDuration')
|
|
6125
6400
|
if m.get('nextToken') is not None:
|
|
6126
6401
|
self.next_token = m.get('nextToken')
|
|
6402
|
+
if m.get('orderBy') is not None:
|
|
6403
|
+
self.order_by_shrink = m.get('orderBy')
|
|
6404
|
+
if m.get('resourceQueueId') is not None:
|
|
6405
|
+
self.resource_queue_id = m.get('resourceQueueId')
|
|
6406
|
+
if m.get('sort') is not None:
|
|
6407
|
+
self.sort = m.get('sort')
|
|
6127
6408
|
if m.get('startTime') is not None:
|
|
6128
6409
|
self.start_time_shrink = m.get('startTime')
|
|
6129
6410
|
return self
|
|
@@ -6329,6 +6610,208 @@ class ListKyuubiSparkApplicationsResponse(TeaModel):
|
|
|
6329
6610
|
return self
|
|
6330
6611
|
|
|
6331
6612
|
|
|
6613
|
+
class ListKyuubiTokenRequest(TeaModel):
|
|
6614
|
+
def __init__(
|
|
6615
|
+
self,
|
|
6616
|
+
region_id: str = None,
|
|
6617
|
+
):
|
|
6618
|
+
self.region_id = region_id
|
|
6619
|
+
|
|
6620
|
+
def validate(self):
|
|
6621
|
+
pass
|
|
6622
|
+
|
|
6623
|
+
def to_map(self):
|
|
6624
|
+
_map = super().to_map()
|
|
6625
|
+
if _map is not None:
|
|
6626
|
+
return _map
|
|
6627
|
+
|
|
6628
|
+
result = dict()
|
|
6629
|
+
if self.region_id is not None:
|
|
6630
|
+
result['regionId'] = self.region_id
|
|
6631
|
+
return result
|
|
6632
|
+
|
|
6633
|
+
def from_map(self, m: dict = None):
|
|
6634
|
+
m = m or dict()
|
|
6635
|
+
if m.get('regionId') is not None:
|
|
6636
|
+
self.region_id = m.get('regionId')
|
|
6637
|
+
return self
|
|
6638
|
+
|
|
6639
|
+
|
|
6640
|
+
class ListKyuubiTokenResponseBodyDataTokens(TeaModel):
|
|
6641
|
+
def __init__(
|
|
6642
|
+
self,
|
|
6643
|
+
create_time: int = None,
|
|
6644
|
+
created_by: str = None,
|
|
6645
|
+
expire_time: int = None,
|
|
6646
|
+
last_used_time: int = None,
|
|
6647
|
+
name: str = None,
|
|
6648
|
+
token: str = None,
|
|
6649
|
+
token_id: str = None,
|
|
6650
|
+
):
|
|
6651
|
+
self.create_time = create_time
|
|
6652
|
+
self.created_by = created_by
|
|
6653
|
+
self.expire_time = expire_time
|
|
6654
|
+
self.last_used_time = last_used_time
|
|
6655
|
+
self.name = name
|
|
6656
|
+
self.token = token
|
|
6657
|
+
# Token ID。
|
|
6658
|
+
self.token_id = token_id
|
|
6659
|
+
|
|
6660
|
+
def validate(self):
|
|
6661
|
+
pass
|
|
6662
|
+
|
|
6663
|
+
def to_map(self):
|
|
6664
|
+
_map = super().to_map()
|
|
6665
|
+
if _map is not None:
|
|
6666
|
+
return _map
|
|
6667
|
+
|
|
6668
|
+
result = dict()
|
|
6669
|
+
if self.create_time is not None:
|
|
6670
|
+
result['createTime'] = self.create_time
|
|
6671
|
+
if self.created_by is not None:
|
|
6672
|
+
result['createdBy'] = self.created_by
|
|
6673
|
+
if self.expire_time is not None:
|
|
6674
|
+
result['expireTime'] = self.expire_time
|
|
6675
|
+
if self.last_used_time is not None:
|
|
6676
|
+
result['lastUsedTime'] = self.last_used_time
|
|
6677
|
+
if self.name is not None:
|
|
6678
|
+
result['name'] = self.name
|
|
6679
|
+
if self.token is not None:
|
|
6680
|
+
result['token'] = self.token
|
|
6681
|
+
if self.token_id is not None:
|
|
6682
|
+
result['tokenId'] = self.token_id
|
|
6683
|
+
return result
|
|
6684
|
+
|
|
6685
|
+
def from_map(self, m: dict = None):
|
|
6686
|
+
m = m or dict()
|
|
6687
|
+
if m.get('createTime') is not None:
|
|
6688
|
+
self.create_time = m.get('createTime')
|
|
6689
|
+
if m.get('createdBy') is not None:
|
|
6690
|
+
self.created_by = m.get('createdBy')
|
|
6691
|
+
if m.get('expireTime') is not None:
|
|
6692
|
+
self.expire_time = m.get('expireTime')
|
|
6693
|
+
if m.get('lastUsedTime') is not None:
|
|
6694
|
+
self.last_used_time = m.get('lastUsedTime')
|
|
6695
|
+
if m.get('name') is not None:
|
|
6696
|
+
self.name = m.get('name')
|
|
6697
|
+
if m.get('token') is not None:
|
|
6698
|
+
self.token = m.get('token')
|
|
6699
|
+
if m.get('tokenId') is not None:
|
|
6700
|
+
self.token_id = m.get('tokenId')
|
|
6701
|
+
return self
|
|
6702
|
+
|
|
6703
|
+
|
|
6704
|
+
class ListKyuubiTokenResponseBodyData(TeaModel):
|
|
6705
|
+
def __init__(
|
|
6706
|
+
self,
|
|
6707
|
+
tokens: List[ListKyuubiTokenResponseBodyDataTokens] = None,
|
|
6708
|
+
):
|
|
6709
|
+
self.tokens = tokens
|
|
6710
|
+
|
|
6711
|
+
def validate(self):
|
|
6712
|
+
if self.tokens:
|
|
6713
|
+
for k in self.tokens:
|
|
6714
|
+
if k:
|
|
6715
|
+
k.validate()
|
|
6716
|
+
|
|
6717
|
+
def to_map(self):
|
|
6718
|
+
_map = super().to_map()
|
|
6719
|
+
if _map is not None:
|
|
6720
|
+
return _map
|
|
6721
|
+
|
|
6722
|
+
result = dict()
|
|
6723
|
+
result['tokens'] = []
|
|
6724
|
+
if self.tokens is not None:
|
|
6725
|
+
for k in self.tokens:
|
|
6726
|
+
result['tokens'].append(k.to_map() if k else None)
|
|
6727
|
+
return result
|
|
6728
|
+
|
|
6729
|
+
def from_map(self, m: dict = None):
|
|
6730
|
+
m = m or dict()
|
|
6731
|
+
self.tokens = []
|
|
6732
|
+
if m.get('tokens') is not None:
|
|
6733
|
+
for k in m.get('tokens'):
|
|
6734
|
+
temp_model = ListKyuubiTokenResponseBodyDataTokens()
|
|
6735
|
+
self.tokens.append(temp_model.from_map(k))
|
|
6736
|
+
return self
|
|
6737
|
+
|
|
6738
|
+
|
|
6739
|
+
class ListKyuubiTokenResponseBody(TeaModel):
|
|
6740
|
+
def __init__(
|
|
6741
|
+
self,
|
|
6742
|
+
data: ListKyuubiTokenResponseBodyData = None,
|
|
6743
|
+
request_id: str = None,
|
|
6744
|
+
):
|
|
6745
|
+
self.data = data
|
|
6746
|
+
self.request_id = request_id
|
|
6747
|
+
|
|
6748
|
+
def validate(self):
|
|
6749
|
+
if self.data:
|
|
6750
|
+
self.data.validate()
|
|
6751
|
+
|
|
6752
|
+
def to_map(self):
|
|
6753
|
+
_map = super().to_map()
|
|
6754
|
+
if _map is not None:
|
|
6755
|
+
return _map
|
|
6756
|
+
|
|
6757
|
+
result = dict()
|
|
6758
|
+
if self.data is not None:
|
|
6759
|
+
result['data'] = self.data.to_map()
|
|
6760
|
+
if self.request_id is not None:
|
|
6761
|
+
result['requestId'] = self.request_id
|
|
6762
|
+
return result
|
|
6763
|
+
|
|
6764
|
+
def from_map(self, m: dict = None):
|
|
6765
|
+
m = m or dict()
|
|
6766
|
+
if m.get('data') is not None:
|
|
6767
|
+
temp_model = ListKyuubiTokenResponseBodyData()
|
|
6768
|
+
self.data = temp_model.from_map(m['data'])
|
|
6769
|
+
if m.get('requestId') is not None:
|
|
6770
|
+
self.request_id = m.get('requestId')
|
|
6771
|
+
return self
|
|
6772
|
+
|
|
6773
|
+
|
|
6774
|
+
class ListKyuubiTokenResponse(TeaModel):
|
|
6775
|
+
def __init__(
|
|
6776
|
+
self,
|
|
6777
|
+
headers: Dict[str, str] = None,
|
|
6778
|
+
status_code: int = None,
|
|
6779
|
+
body: ListKyuubiTokenResponseBody = None,
|
|
6780
|
+
):
|
|
6781
|
+
self.headers = headers
|
|
6782
|
+
self.status_code = status_code
|
|
6783
|
+
self.body = body
|
|
6784
|
+
|
|
6785
|
+
def validate(self):
|
|
6786
|
+
if self.body:
|
|
6787
|
+
self.body.validate()
|
|
6788
|
+
|
|
6789
|
+
def to_map(self):
|
|
6790
|
+
_map = super().to_map()
|
|
6791
|
+
if _map is not None:
|
|
6792
|
+
return _map
|
|
6793
|
+
|
|
6794
|
+
result = dict()
|
|
6795
|
+
if self.headers is not None:
|
|
6796
|
+
result['headers'] = self.headers
|
|
6797
|
+
if self.status_code is not None:
|
|
6798
|
+
result['statusCode'] = self.status_code
|
|
6799
|
+
if self.body is not None:
|
|
6800
|
+
result['body'] = self.body.to_map()
|
|
6801
|
+
return result
|
|
6802
|
+
|
|
6803
|
+
def from_map(self, m: dict = None):
|
|
6804
|
+
m = m or dict()
|
|
6805
|
+
if m.get('headers') is not None:
|
|
6806
|
+
self.headers = m.get('headers')
|
|
6807
|
+
if m.get('statusCode') is not None:
|
|
6808
|
+
self.status_code = m.get('statusCode')
|
|
6809
|
+
if m.get('body') is not None:
|
|
6810
|
+
temp_model = ListKyuubiTokenResponseBody()
|
|
6811
|
+
self.body = temp_model.from_map(m['body'])
|
|
6812
|
+
return self
|
|
6813
|
+
|
|
6814
|
+
|
|
6332
6815
|
class ListLogContentsRequest(TeaModel):
|
|
6333
6816
|
def __init__(
|
|
6334
6817
|
self,
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: alibabacloud-emr-serverless-spark20230808
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.11.0
|
|
4
4
|
Summary: Alibaba Cloud emr-serverless-spark (20230808) SDK Library for Python
|
|
5
5
|
Home-page: https://github.com/aliyun/alibabacloud-python-sdk
|
|
6
6
|
Author: Alibaba Cloud SDK
|
|
@@ -24,7 +24,7 @@ from setuptools import setup, find_packages
|
|
|
24
24
|
"""
|
|
25
25
|
setup module for alibabacloud_emr-serverless-spark20230808.
|
|
26
26
|
|
|
27
|
-
Created on
|
|
27
|
+
Created on 26/06/2025
|
|
28
28
|
|
|
29
29
|
@author: Alibaba Cloud SDK
|
|
30
30
|
"""
|
|
@@ -40,7 +40,7 @@ REQUIRES = [
|
|
|
40
40
|
"alibabacloud_tea_util>=0.3.13, <1.0.0",
|
|
41
41
|
"alibabacloud_tea_openapi>=0.3.15, <1.0.0",
|
|
42
42
|
"alibabacloud_openapi_util>=0.2.2, <1.0.0",
|
|
43
|
-
"alibabacloud_endpoint_util>=0.0.
|
|
43
|
+
"alibabacloud_endpoint_util>=0.0.4, <1.0.0"
|
|
44
44
|
]
|
|
45
45
|
|
|
46
46
|
LONG_DESCRIPTION = ''
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
__version__ = '1.10.2'
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|