alibabacloud-emr-serverless-spark20230808 1.15.0__py3-none-any.whl → 1.16.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of alibabacloud-emr-serverless-spark20230808 might be problematic. Click here for more details.
- alibabacloud_emr_serverless_spark20230808/__init__.py +1 -1
- alibabacloud_emr_serverless_spark20230808/client.py +348 -0
- alibabacloud_emr_serverless_spark20230808/models.py +685 -1
- {alibabacloud_emr_serverless_spark20230808-1.15.0.dist-info → alibabacloud_emr_serverless_spark20230808-1.16.1.dist-info}/METADATA +1 -1
- alibabacloud_emr_serverless_spark20230808-1.16.1.dist-info/RECORD +8 -0
- alibabacloud_emr_serverless_spark20230808-1.15.0.dist-info/RECORD +0 -8
- {alibabacloud_emr_serverless_spark20230808-1.15.0.dist-info → alibabacloud_emr_serverless_spark20230808-1.16.1.dist-info}/LICENSE +0 -0
- {alibabacloud_emr_serverless_spark20230808-1.15.0.dist-info → alibabacloud_emr_serverless_spark20230808-1.16.1.dist-info}/WHEEL +0 -0
- {alibabacloud_emr_serverless_spark20230808-1.15.0.dist-info → alibabacloud_emr_serverless_spark20230808-1.16.1.dist-info}/top_level.txt +0 -0
|
@@ -1 +1 @@
|
|
|
1
|
-
__version__ = '1.
|
|
1
|
+
__version__ = '1.16.1'
|
|
@@ -2981,6 +2981,246 @@ class Client(OpenApiClient):
|
|
|
2981
2981
|
headers = {}
|
|
2982
2982
|
return await self.grant_role_to_users_with_options_async(request, headers, runtime)
|
|
2983
2983
|
|
|
2984
|
+
def list_catalogs_with_options(
|
|
2985
|
+
self,
|
|
2986
|
+
workspace_id: str,
|
|
2987
|
+
request: emr_serverless_spark_20230808_models.ListCatalogsRequest,
|
|
2988
|
+
headers: Dict[str, str],
|
|
2989
|
+
runtime: util_models.RuntimeOptions,
|
|
2990
|
+
) -> emr_serverless_spark_20230808_models.ListCatalogsResponse:
|
|
2991
|
+
"""
|
|
2992
|
+
@summary 查看数据目录列表
|
|
2993
|
+
|
|
2994
|
+
@param request: ListCatalogsRequest
|
|
2995
|
+
@param headers: map
|
|
2996
|
+
@param runtime: runtime options for this request RuntimeOptions
|
|
2997
|
+
@return: ListCatalogsResponse
|
|
2998
|
+
"""
|
|
2999
|
+
UtilClient.validate_model(request)
|
|
3000
|
+
query = {}
|
|
3001
|
+
if not UtilClient.is_unset(request.environment):
|
|
3002
|
+
query['environment'] = request.environment
|
|
3003
|
+
if not UtilClient.is_unset(request.region_id):
|
|
3004
|
+
query['regionId'] = request.region_id
|
|
3005
|
+
req = open_api_models.OpenApiRequest(
|
|
3006
|
+
headers=headers,
|
|
3007
|
+
query=OpenApiUtilClient.query(query)
|
|
3008
|
+
)
|
|
3009
|
+
params = open_api_models.Params(
|
|
3010
|
+
action='ListCatalogs',
|
|
3011
|
+
version='2023-08-08',
|
|
3012
|
+
protocol='HTTPS',
|
|
3013
|
+
pathname=f'/api/v1/workspaces/{OpenApiUtilClient.get_encode_param(workspace_id)}/catalogs',
|
|
3014
|
+
method='GET',
|
|
3015
|
+
auth_type='AK',
|
|
3016
|
+
style='ROA',
|
|
3017
|
+
req_body_type='json',
|
|
3018
|
+
body_type='json'
|
|
3019
|
+
)
|
|
3020
|
+
return TeaCore.from_map(
|
|
3021
|
+
emr_serverless_spark_20230808_models.ListCatalogsResponse(),
|
|
3022
|
+
self.call_api(params, req, runtime)
|
|
3023
|
+
)
|
|
3024
|
+
|
|
3025
|
+
async def list_catalogs_with_options_async(
|
|
3026
|
+
self,
|
|
3027
|
+
workspace_id: str,
|
|
3028
|
+
request: emr_serverless_spark_20230808_models.ListCatalogsRequest,
|
|
3029
|
+
headers: Dict[str, str],
|
|
3030
|
+
runtime: util_models.RuntimeOptions,
|
|
3031
|
+
) -> emr_serverless_spark_20230808_models.ListCatalogsResponse:
|
|
3032
|
+
"""
|
|
3033
|
+
@summary 查看数据目录列表
|
|
3034
|
+
|
|
3035
|
+
@param request: ListCatalogsRequest
|
|
3036
|
+
@param headers: map
|
|
3037
|
+
@param runtime: runtime options for this request RuntimeOptions
|
|
3038
|
+
@return: ListCatalogsResponse
|
|
3039
|
+
"""
|
|
3040
|
+
UtilClient.validate_model(request)
|
|
3041
|
+
query = {}
|
|
3042
|
+
if not UtilClient.is_unset(request.environment):
|
|
3043
|
+
query['environment'] = request.environment
|
|
3044
|
+
if not UtilClient.is_unset(request.region_id):
|
|
3045
|
+
query['regionId'] = request.region_id
|
|
3046
|
+
req = open_api_models.OpenApiRequest(
|
|
3047
|
+
headers=headers,
|
|
3048
|
+
query=OpenApiUtilClient.query(query)
|
|
3049
|
+
)
|
|
3050
|
+
params = open_api_models.Params(
|
|
3051
|
+
action='ListCatalogs',
|
|
3052
|
+
version='2023-08-08',
|
|
3053
|
+
protocol='HTTPS',
|
|
3054
|
+
pathname=f'/api/v1/workspaces/{OpenApiUtilClient.get_encode_param(workspace_id)}/catalogs',
|
|
3055
|
+
method='GET',
|
|
3056
|
+
auth_type='AK',
|
|
3057
|
+
style='ROA',
|
|
3058
|
+
req_body_type='json',
|
|
3059
|
+
body_type='json'
|
|
3060
|
+
)
|
|
3061
|
+
return TeaCore.from_map(
|
|
3062
|
+
emr_serverless_spark_20230808_models.ListCatalogsResponse(),
|
|
3063
|
+
await self.call_api_async(params, req, runtime)
|
|
3064
|
+
)
|
|
3065
|
+
|
|
3066
|
+
def list_catalogs(
|
|
3067
|
+
self,
|
|
3068
|
+
workspace_id: str,
|
|
3069
|
+
request: emr_serverless_spark_20230808_models.ListCatalogsRequest,
|
|
3070
|
+
) -> emr_serverless_spark_20230808_models.ListCatalogsResponse:
|
|
3071
|
+
"""
|
|
3072
|
+
@summary 查看数据目录列表
|
|
3073
|
+
|
|
3074
|
+
@param request: ListCatalogsRequest
|
|
3075
|
+
@return: ListCatalogsResponse
|
|
3076
|
+
"""
|
|
3077
|
+
runtime = util_models.RuntimeOptions()
|
|
3078
|
+
headers = {}
|
|
3079
|
+
return self.list_catalogs_with_options(workspace_id, request, headers, runtime)
|
|
3080
|
+
|
|
3081
|
+
async def list_catalogs_async(
|
|
3082
|
+
self,
|
|
3083
|
+
workspace_id: str,
|
|
3084
|
+
request: emr_serverless_spark_20230808_models.ListCatalogsRequest,
|
|
3085
|
+
) -> emr_serverless_spark_20230808_models.ListCatalogsResponse:
|
|
3086
|
+
"""
|
|
3087
|
+
@summary 查看数据目录列表
|
|
3088
|
+
|
|
3089
|
+
@param request: ListCatalogsRequest
|
|
3090
|
+
@return: ListCatalogsResponse
|
|
3091
|
+
"""
|
|
3092
|
+
runtime = util_models.RuntimeOptions()
|
|
3093
|
+
headers = {}
|
|
3094
|
+
return await self.list_catalogs_with_options_async(workspace_id, request, headers, runtime)
|
|
3095
|
+
|
|
3096
|
+
def list_job_executors_with_options(
|
|
3097
|
+
self,
|
|
3098
|
+
workspace_id: str,
|
|
3099
|
+
job_run_id: str,
|
|
3100
|
+
request: emr_serverless_spark_20230808_models.ListJobExecutorsRequest,
|
|
3101
|
+
headers: Dict[str, str],
|
|
3102
|
+
runtime: util_models.RuntimeOptions,
|
|
3103
|
+
) -> emr_serverless_spark_20230808_models.ListJobExecutorsResponse:
|
|
3104
|
+
"""
|
|
3105
|
+
@summary 列出作业的executors
|
|
3106
|
+
|
|
3107
|
+
@param request: ListJobExecutorsRequest
|
|
3108
|
+
@param headers: map
|
|
3109
|
+
@param runtime: runtime options for this request RuntimeOptions
|
|
3110
|
+
@return: ListJobExecutorsResponse
|
|
3111
|
+
"""
|
|
3112
|
+
UtilClient.validate_model(request)
|
|
3113
|
+
query = {}
|
|
3114
|
+
if not UtilClient.is_unset(request.executor_type):
|
|
3115
|
+
query['executorType'] = request.executor_type
|
|
3116
|
+
if not UtilClient.is_unset(request.max_results):
|
|
3117
|
+
query['maxResults'] = request.max_results
|
|
3118
|
+
if not UtilClient.is_unset(request.next_token):
|
|
3119
|
+
query['nextToken'] = request.next_token
|
|
3120
|
+
if not UtilClient.is_unset(request.region_id):
|
|
3121
|
+
query['regionId'] = request.region_id
|
|
3122
|
+
if not UtilClient.is_unset(request.status):
|
|
3123
|
+
query['status'] = request.status
|
|
3124
|
+
req = open_api_models.OpenApiRequest(
|
|
3125
|
+
headers=headers,
|
|
3126
|
+
query=OpenApiUtilClient.query(query)
|
|
3127
|
+
)
|
|
3128
|
+
params = open_api_models.Params(
|
|
3129
|
+
action='ListJobExecutors',
|
|
3130
|
+
version='2023-08-08',
|
|
3131
|
+
protocol='HTTPS',
|
|
3132
|
+
pathname=f'/api/v1/workspaces/{OpenApiUtilClient.get_encode_param(workspace_id)}/jobRuns/{OpenApiUtilClient.get_encode_param(job_run_id)}/executors',
|
|
3133
|
+
method='GET',
|
|
3134
|
+
auth_type='AK',
|
|
3135
|
+
style='ROA',
|
|
3136
|
+
req_body_type='json',
|
|
3137
|
+
body_type='json'
|
|
3138
|
+
)
|
|
3139
|
+
return TeaCore.from_map(
|
|
3140
|
+
emr_serverless_spark_20230808_models.ListJobExecutorsResponse(),
|
|
3141
|
+
self.call_api(params, req, runtime)
|
|
3142
|
+
)
|
|
3143
|
+
|
|
3144
|
+
async def list_job_executors_with_options_async(
|
|
3145
|
+
self,
|
|
3146
|
+
workspace_id: str,
|
|
3147
|
+
job_run_id: str,
|
|
3148
|
+
request: emr_serverless_spark_20230808_models.ListJobExecutorsRequest,
|
|
3149
|
+
headers: Dict[str, str],
|
|
3150
|
+
runtime: util_models.RuntimeOptions,
|
|
3151
|
+
) -> emr_serverless_spark_20230808_models.ListJobExecutorsResponse:
|
|
3152
|
+
"""
|
|
3153
|
+
@summary 列出作业的executors
|
|
3154
|
+
|
|
3155
|
+
@param request: ListJobExecutorsRequest
|
|
3156
|
+
@param headers: map
|
|
3157
|
+
@param runtime: runtime options for this request RuntimeOptions
|
|
3158
|
+
@return: ListJobExecutorsResponse
|
|
3159
|
+
"""
|
|
3160
|
+
UtilClient.validate_model(request)
|
|
3161
|
+
query = {}
|
|
3162
|
+
if not UtilClient.is_unset(request.executor_type):
|
|
3163
|
+
query['executorType'] = request.executor_type
|
|
3164
|
+
if not UtilClient.is_unset(request.max_results):
|
|
3165
|
+
query['maxResults'] = request.max_results
|
|
3166
|
+
if not UtilClient.is_unset(request.next_token):
|
|
3167
|
+
query['nextToken'] = request.next_token
|
|
3168
|
+
if not UtilClient.is_unset(request.region_id):
|
|
3169
|
+
query['regionId'] = request.region_id
|
|
3170
|
+
if not UtilClient.is_unset(request.status):
|
|
3171
|
+
query['status'] = request.status
|
|
3172
|
+
req = open_api_models.OpenApiRequest(
|
|
3173
|
+
headers=headers,
|
|
3174
|
+
query=OpenApiUtilClient.query(query)
|
|
3175
|
+
)
|
|
3176
|
+
params = open_api_models.Params(
|
|
3177
|
+
action='ListJobExecutors',
|
|
3178
|
+
version='2023-08-08',
|
|
3179
|
+
protocol='HTTPS',
|
|
3180
|
+
pathname=f'/api/v1/workspaces/{OpenApiUtilClient.get_encode_param(workspace_id)}/jobRuns/{OpenApiUtilClient.get_encode_param(job_run_id)}/executors',
|
|
3181
|
+
method='GET',
|
|
3182
|
+
auth_type='AK',
|
|
3183
|
+
style='ROA',
|
|
3184
|
+
req_body_type='json',
|
|
3185
|
+
body_type='json'
|
|
3186
|
+
)
|
|
3187
|
+
return TeaCore.from_map(
|
|
3188
|
+
emr_serverless_spark_20230808_models.ListJobExecutorsResponse(),
|
|
3189
|
+
await self.call_api_async(params, req, runtime)
|
|
3190
|
+
)
|
|
3191
|
+
|
|
3192
|
+
def list_job_executors(
|
|
3193
|
+
self,
|
|
3194
|
+
workspace_id: str,
|
|
3195
|
+
job_run_id: str,
|
|
3196
|
+
request: emr_serverless_spark_20230808_models.ListJobExecutorsRequest,
|
|
3197
|
+
) -> emr_serverless_spark_20230808_models.ListJobExecutorsResponse:
|
|
3198
|
+
"""
|
|
3199
|
+
@summary 列出作业的executors
|
|
3200
|
+
|
|
3201
|
+
@param request: ListJobExecutorsRequest
|
|
3202
|
+
@return: ListJobExecutorsResponse
|
|
3203
|
+
"""
|
|
3204
|
+
runtime = util_models.RuntimeOptions()
|
|
3205
|
+
headers = {}
|
|
3206
|
+
return self.list_job_executors_with_options(workspace_id, job_run_id, request, headers, runtime)
|
|
3207
|
+
|
|
3208
|
+
async def list_job_executors_async(
|
|
3209
|
+
self,
|
|
3210
|
+
workspace_id: str,
|
|
3211
|
+
job_run_id: str,
|
|
3212
|
+
request: emr_serverless_spark_20230808_models.ListJobExecutorsRequest,
|
|
3213
|
+
) -> emr_serverless_spark_20230808_models.ListJobExecutorsResponse:
|
|
3214
|
+
"""
|
|
3215
|
+
@summary 列出作业的executors
|
|
3216
|
+
|
|
3217
|
+
@param request: ListJobExecutorsRequest
|
|
3218
|
+
@return: ListJobExecutorsResponse
|
|
3219
|
+
"""
|
|
3220
|
+
runtime = util_models.RuntimeOptions()
|
|
3221
|
+
headers = {}
|
|
3222
|
+
return await self.list_job_executors_with_options_async(workspace_id, job_run_id, request, headers, runtime)
|
|
3223
|
+
|
|
2984
3224
|
def list_job_runs_with_options(
|
|
2985
3225
|
self,
|
|
2986
3226
|
workspace_id: str,
|
|
@@ -4355,6 +4595,114 @@ class Client(OpenApiClient):
|
|
|
4355
4595
|
headers = {}
|
|
4356
4596
|
return await self.list_sql_statement_contents_with_options_async(workspace_id, request, headers, runtime)
|
|
4357
4597
|
|
|
4598
|
+
def list_template_with_options(
|
|
4599
|
+
self,
|
|
4600
|
+
workspace_biz_id: str,
|
|
4601
|
+
request: emr_serverless_spark_20230808_models.ListTemplateRequest,
|
|
4602
|
+
headers: Dict[str, str],
|
|
4603
|
+
runtime: util_models.RuntimeOptions,
|
|
4604
|
+
) -> emr_serverless_spark_20230808_models.ListTemplateResponse:
|
|
4605
|
+
"""
|
|
4606
|
+
@summary 获取任务模板列表
|
|
4607
|
+
|
|
4608
|
+
@param request: ListTemplateRequest
|
|
4609
|
+
@param headers: map
|
|
4610
|
+
@param runtime: runtime options for this request RuntimeOptions
|
|
4611
|
+
@return: ListTemplateResponse
|
|
4612
|
+
"""
|
|
4613
|
+
UtilClient.validate_model(request)
|
|
4614
|
+
query = {}
|
|
4615
|
+
if not UtilClient.is_unset(request.region_id):
|
|
4616
|
+
query['regionId'] = request.region_id
|
|
4617
|
+
req = open_api_models.OpenApiRequest(
|
|
4618
|
+
headers=headers,
|
|
4619
|
+
query=OpenApiUtilClient.query(query)
|
|
4620
|
+
)
|
|
4621
|
+
params = open_api_models.Params(
|
|
4622
|
+
action='ListTemplate',
|
|
4623
|
+
version='2023-08-08',
|
|
4624
|
+
protocol='HTTPS',
|
|
4625
|
+
pathname=f'/api/interactive/v1/workspace/{OpenApiUtilClient.get_encode_param(workspace_biz_id)}/template/listing',
|
|
4626
|
+
method='GET',
|
|
4627
|
+
auth_type='AK',
|
|
4628
|
+
style='ROA',
|
|
4629
|
+
req_body_type='json',
|
|
4630
|
+
body_type='json'
|
|
4631
|
+
)
|
|
4632
|
+
return TeaCore.from_map(
|
|
4633
|
+
emr_serverless_spark_20230808_models.ListTemplateResponse(),
|
|
4634
|
+
self.call_api(params, req, runtime)
|
|
4635
|
+
)
|
|
4636
|
+
|
|
4637
|
+
async def list_template_with_options_async(
|
|
4638
|
+
self,
|
|
4639
|
+
workspace_biz_id: str,
|
|
4640
|
+
request: emr_serverless_spark_20230808_models.ListTemplateRequest,
|
|
4641
|
+
headers: Dict[str, str],
|
|
4642
|
+
runtime: util_models.RuntimeOptions,
|
|
4643
|
+
) -> emr_serverless_spark_20230808_models.ListTemplateResponse:
|
|
4644
|
+
"""
|
|
4645
|
+
@summary 获取任务模板列表
|
|
4646
|
+
|
|
4647
|
+
@param request: ListTemplateRequest
|
|
4648
|
+
@param headers: map
|
|
4649
|
+
@param runtime: runtime options for this request RuntimeOptions
|
|
4650
|
+
@return: ListTemplateResponse
|
|
4651
|
+
"""
|
|
4652
|
+
UtilClient.validate_model(request)
|
|
4653
|
+
query = {}
|
|
4654
|
+
if not UtilClient.is_unset(request.region_id):
|
|
4655
|
+
query['regionId'] = request.region_id
|
|
4656
|
+
req = open_api_models.OpenApiRequest(
|
|
4657
|
+
headers=headers,
|
|
4658
|
+
query=OpenApiUtilClient.query(query)
|
|
4659
|
+
)
|
|
4660
|
+
params = open_api_models.Params(
|
|
4661
|
+
action='ListTemplate',
|
|
4662
|
+
version='2023-08-08',
|
|
4663
|
+
protocol='HTTPS',
|
|
4664
|
+
pathname=f'/api/interactive/v1/workspace/{OpenApiUtilClient.get_encode_param(workspace_biz_id)}/template/listing',
|
|
4665
|
+
method='GET',
|
|
4666
|
+
auth_type='AK',
|
|
4667
|
+
style='ROA',
|
|
4668
|
+
req_body_type='json',
|
|
4669
|
+
body_type='json'
|
|
4670
|
+
)
|
|
4671
|
+
return TeaCore.from_map(
|
|
4672
|
+
emr_serverless_spark_20230808_models.ListTemplateResponse(),
|
|
4673
|
+
await self.call_api_async(params, req, runtime)
|
|
4674
|
+
)
|
|
4675
|
+
|
|
4676
|
+
def list_template(
|
|
4677
|
+
self,
|
|
4678
|
+
workspace_biz_id: str,
|
|
4679
|
+
request: emr_serverless_spark_20230808_models.ListTemplateRequest,
|
|
4680
|
+
) -> emr_serverless_spark_20230808_models.ListTemplateResponse:
|
|
4681
|
+
"""
|
|
4682
|
+
@summary 获取任务模板列表
|
|
4683
|
+
|
|
4684
|
+
@param request: ListTemplateRequest
|
|
4685
|
+
@return: ListTemplateResponse
|
|
4686
|
+
"""
|
|
4687
|
+
runtime = util_models.RuntimeOptions()
|
|
4688
|
+
headers = {}
|
|
4689
|
+
return self.list_template_with_options(workspace_biz_id, request, headers, runtime)
|
|
4690
|
+
|
|
4691
|
+
async def list_template_async(
|
|
4692
|
+
self,
|
|
4693
|
+
workspace_biz_id: str,
|
|
4694
|
+
request: emr_serverless_spark_20230808_models.ListTemplateRequest,
|
|
4695
|
+
) -> emr_serverless_spark_20230808_models.ListTemplateResponse:
|
|
4696
|
+
"""
|
|
4697
|
+
@summary 获取任务模板列表
|
|
4698
|
+
|
|
4699
|
+
@param request: ListTemplateRequest
|
|
4700
|
+
@return: ListTemplateResponse
|
|
4701
|
+
"""
|
|
4702
|
+
runtime = util_models.RuntimeOptions()
|
|
4703
|
+
headers = {}
|
|
4704
|
+
return await self.list_template_with_options_async(workspace_biz_id, request, headers, runtime)
|
|
4705
|
+
|
|
4358
4706
|
def list_workspace_queues_with_options(
|
|
4359
4707
|
self,
|
|
4360
4708
|
workspace_id: str,
|
|
@@ -4523,8 +4523,10 @@ class EditWorkspaceQueueRequestResourceSpec(TeaModel):
|
|
|
4523
4523
|
def __init__(
|
|
4524
4524
|
self,
|
|
4525
4525
|
cu: int = None,
|
|
4526
|
+
max_cu: int = None,
|
|
4526
4527
|
):
|
|
4527
4528
|
self.cu = cu
|
|
4529
|
+
self.max_cu = max_cu
|
|
4528
4530
|
|
|
4529
4531
|
def validate(self):
|
|
4530
4532
|
pass
|
|
@@ -4537,12 +4539,16 @@ class EditWorkspaceQueueRequestResourceSpec(TeaModel):
|
|
|
4537
4539
|
result = dict()
|
|
4538
4540
|
if self.cu is not None:
|
|
4539
4541
|
result['cu'] = self.cu
|
|
4542
|
+
if self.max_cu is not None:
|
|
4543
|
+
result['maxCu'] = self.max_cu
|
|
4540
4544
|
return result
|
|
4541
4545
|
|
|
4542
4546
|
def from_map(self, m: dict = None):
|
|
4543
4547
|
m = m or dict()
|
|
4544
4548
|
if m.get('cu') is not None:
|
|
4545
4549
|
self.cu = m.get('cu')
|
|
4550
|
+
if m.get('maxCu') is not None:
|
|
4551
|
+
self.max_cu = m.get('maxCu')
|
|
4546
4552
|
return self
|
|
4547
4553
|
|
|
4548
4554
|
|
|
@@ -5067,6 +5073,7 @@ class GetJobRunResponseBodyJobRun(TeaModel):
|
|
|
5067
5073
|
job_run_id: str = None,
|
|
5068
5074
|
log: RunLog = None,
|
|
5069
5075
|
name: str = None,
|
|
5076
|
+
notebook_access_url: str = None,
|
|
5070
5077
|
release_version: str = None,
|
|
5071
5078
|
resource_owner_id: str = None,
|
|
5072
5079
|
resource_queue_id: str = None,
|
|
@@ -5103,6 +5110,7 @@ class GetJobRunResponseBodyJobRun(TeaModel):
|
|
|
5103
5110
|
self.log = log
|
|
5104
5111
|
# The job name.
|
|
5105
5112
|
self.name = name
|
|
5113
|
+
self.notebook_access_url = notebook_access_url
|
|
5106
5114
|
# The version of the Spark engine on which the job runs.
|
|
5107
5115
|
self.release_version = release_version
|
|
5108
5116
|
# The ID of the user who created the job.
|
|
@@ -5164,6 +5172,8 @@ class GetJobRunResponseBodyJobRun(TeaModel):
|
|
|
5164
5172
|
result['log'] = self.log.to_map()
|
|
5165
5173
|
if self.name is not None:
|
|
5166
5174
|
result['name'] = self.name
|
|
5175
|
+
if self.notebook_access_url is not None:
|
|
5176
|
+
result['notebookAccessUrl'] = self.notebook_access_url
|
|
5167
5177
|
if self.release_version is not None:
|
|
5168
5178
|
result['releaseVersion'] = self.release_version
|
|
5169
5179
|
if self.resource_owner_id is not None:
|
|
@@ -5213,6 +5223,8 @@ class GetJobRunResponseBodyJobRun(TeaModel):
|
|
|
5213
5223
|
self.log = temp_model.from_map(m['log'])
|
|
5214
5224
|
if m.get('name') is not None:
|
|
5215
5225
|
self.name = m.get('name')
|
|
5226
|
+
if m.get('notebookAccessUrl') is not None:
|
|
5227
|
+
self.notebook_access_url = m.get('notebookAccessUrl')
|
|
5216
5228
|
if m.get('releaseVersion') is not None:
|
|
5217
5229
|
self.release_version = m.get('releaseVersion')
|
|
5218
5230
|
if m.get('resourceOwnerId') is not None:
|
|
@@ -7017,6 +7029,519 @@ class GrantRoleToUsersResponse(TeaModel):
|
|
|
7017
7029
|
return self
|
|
7018
7030
|
|
|
7019
7031
|
|
|
7032
|
+
class ListCatalogsRequest(TeaModel):
|
|
7033
|
+
def __init__(
|
|
7034
|
+
self,
|
|
7035
|
+
environment: str = None,
|
|
7036
|
+
region_id: str = None,
|
|
7037
|
+
):
|
|
7038
|
+
self.environment = environment
|
|
7039
|
+
self.region_id = region_id
|
|
7040
|
+
|
|
7041
|
+
def validate(self):
|
|
7042
|
+
pass
|
|
7043
|
+
|
|
7044
|
+
def to_map(self):
|
|
7045
|
+
_map = super().to_map()
|
|
7046
|
+
if _map is not None:
|
|
7047
|
+
return _map
|
|
7048
|
+
|
|
7049
|
+
result = dict()
|
|
7050
|
+
if self.environment is not None:
|
|
7051
|
+
result['environment'] = self.environment
|
|
7052
|
+
if self.region_id is not None:
|
|
7053
|
+
result['regionId'] = self.region_id
|
|
7054
|
+
return result
|
|
7055
|
+
|
|
7056
|
+
def from_map(self, m: dict = None):
|
|
7057
|
+
m = m or dict()
|
|
7058
|
+
if m.get('environment') is not None:
|
|
7059
|
+
self.environment = m.get('environment')
|
|
7060
|
+
if m.get('regionId') is not None:
|
|
7061
|
+
self.region_id = m.get('regionId')
|
|
7062
|
+
return self
|
|
7063
|
+
|
|
7064
|
+
|
|
7065
|
+
class ListCatalogsResponseBodyCatalogs(TeaModel):
|
|
7066
|
+
def __init__(
|
|
7067
|
+
self,
|
|
7068
|
+
catalog_id: str = None,
|
|
7069
|
+
catalog_provider: str = None,
|
|
7070
|
+
catalog_type: str = None,
|
|
7071
|
+
environments: List[str] = None,
|
|
7072
|
+
extras: Dict[str, str] = None,
|
|
7073
|
+
gmt_create: int = None,
|
|
7074
|
+
gmt_modified: int = None,
|
|
7075
|
+
resource_owner_id: str = None,
|
|
7076
|
+
workspace_id: str = None,
|
|
7077
|
+
):
|
|
7078
|
+
# regionId。
|
|
7079
|
+
self.catalog_id = catalog_id
|
|
7080
|
+
self.catalog_provider = catalog_provider
|
|
7081
|
+
self.catalog_type = catalog_type
|
|
7082
|
+
self.environments = environments
|
|
7083
|
+
self.extras = extras
|
|
7084
|
+
self.gmt_create = gmt_create
|
|
7085
|
+
self.gmt_modified = gmt_modified
|
|
7086
|
+
self.resource_owner_id = resource_owner_id
|
|
7087
|
+
# 工作空间id。
|
|
7088
|
+
self.workspace_id = workspace_id
|
|
7089
|
+
|
|
7090
|
+
def validate(self):
|
|
7091
|
+
pass
|
|
7092
|
+
|
|
7093
|
+
def to_map(self):
|
|
7094
|
+
_map = super().to_map()
|
|
7095
|
+
if _map is not None:
|
|
7096
|
+
return _map
|
|
7097
|
+
|
|
7098
|
+
result = dict()
|
|
7099
|
+
if self.catalog_id is not None:
|
|
7100
|
+
result['catalogId'] = self.catalog_id
|
|
7101
|
+
if self.catalog_provider is not None:
|
|
7102
|
+
result['catalogProvider'] = self.catalog_provider
|
|
7103
|
+
if self.catalog_type is not None:
|
|
7104
|
+
result['catalogType'] = self.catalog_type
|
|
7105
|
+
if self.environments is not None:
|
|
7106
|
+
result['environments'] = self.environments
|
|
7107
|
+
if self.extras is not None:
|
|
7108
|
+
result['extras'] = self.extras
|
|
7109
|
+
if self.gmt_create is not None:
|
|
7110
|
+
result['gmtCreate'] = self.gmt_create
|
|
7111
|
+
if self.gmt_modified is not None:
|
|
7112
|
+
result['gmtModified'] = self.gmt_modified
|
|
7113
|
+
if self.resource_owner_id is not None:
|
|
7114
|
+
result['resourceOwnerId'] = self.resource_owner_id
|
|
7115
|
+
if self.workspace_id is not None:
|
|
7116
|
+
result['workspaceId'] = self.workspace_id
|
|
7117
|
+
return result
|
|
7118
|
+
|
|
7119
|
+
def from_map(self, m: dict = None):
|
|
7120
|
+
m = m or dict()
|
|
7121
|
+
if m.get('catalogId') is not None:
|
|
7122
|
+
self.catalog_id = m.get('catalogId')
|
|
7123
|
+
if m.get('catalogProvider') is not None:
|
|
7124
|
+
self.catalog_provider = m.get('catalogProvider')
|
|
7125
|
+
if m.get('catalogType') is not None:
|
|
7126
|
+
self.catalog_type = m.get('catalogType')
|
|
7127
|
+
if m.get('environments') is not None:
|
|
7128
|
+
self.environments = m.get('environments')
|
|
7129
|
+
if m.get('extras') is not None:
|
|
7130
|
+
self.extras = m.get('extras')
|
|
7131
|
+
if m.get('gmtCreate') is not None:
|
|
7132
|
+
self.gmt_create = m.get('gmtCreate')
|
|
7133
|
+
if m.get('gmtModified') is not None:
|
|
7134
|
+
self.gmt_modified = m.get('gmtModified')
|
|
7135
|
+
if m.get('resourceOwnerId') is not None:
|
|
7136
|
+
self.resource_owner_id = m.get('resourceOwnerId')
|
|
7137
|
+
if m.get('workspaceId') is not None:
|
|
7138
|
+
self.workspace_id = m.get('workspaceId')
|
|
7139
|
+
return self
|
|
7140
|
+
|
|
7141
|
+
|
|
7142
|
+
class ListCatalogsResponseBody(TeaModel):
|
|
7143
|
+
def __init__(
|
|
7144
|
+
self,
|
|
7145
|
+
catalogs: List[ListCatalogsResponseBodyCatalogs] = None,
|
|
7146
|
+
max_results: int = None,
|
|
7147
|
+
next_token: str = None,
|
|
7148
|
+
request_id: str = None,
|
|
7149
|
+
total_count: int = None,
|
|
7150
|
+
):
|
|
7151
|
+
self.catalogs = catalogs
|
|
7152
|
+
# 一次获取的最大记录数。
|
|
7153
|
+
self.max_results = max_results
|
|
7154
|
+
# 下一页TOKEN。
|
|
7155
|
+
self.next_token = next_token
|
|
7156
|
+
# 请求ID。
|
|
7157
|
+
self.request_id = request_id
|
|
7158
|
+
# 记录总数。
|
|
7159
|
+
self.total_count = total_count
|
|
7160
|
+
|
|
7161
|
+
def validate(self):
|
|
7162
|
+
if self.catalogs:
|
|
7163
|
+
for k in self.catalogs:
|
|
7164
|
+
if k:
|
|
7165
|
+
k.validate()
|
|
7166
|
+
|
|
7167
|
+
def to_map(self):
|
|
7168
|
+
_map = super().to_map()
|
|
7169
|
+
if _map is not None:
|
|
7170
|
+
return _map
|
|
7171
|
+
|
|
7172
|
+
result = dict()
|
|
7173
|
+
result['catalogs'] = []
|
|
7174
|
+
if self.catalogs is not None:
|
|
7175
|
+
for k in self.catalogs:
|
|
7176
|
+
result['catalogs'].append(k.to_map() if k else None)
|
|
7177
|
+
if self.max_results is not None:
|
|
7178
|
+
result['maxResults'] = self.max_results
|
|
7179
|
+
if self.next_token is not None:
|
|
7180
|
+
result['nextToken'] = self.next_token
|
|
7181
|
+
if self.request_id is not None:
|
|
7182
|
+
result['requestId'] = self.request_id
|
|
7183
|
+
if self.total_count is not None:
|
|
7184
|
+
result['totalCount'] = self.total_count
|
|
7185
|
+
return result
|
|
7186
|
+
|
|
7187
|
+
def from_map(self, m: dict = None):
|
|
7188
|
+
m = m or dict()
|
|
7189
|
+
self.catalogs = []
|
|
7190
|
+
if m.get('catalogs') is not None:
|
|
7191
|
+
for k in m.get('catalogs'):
|
|
7192
|
+
temp_model = ListCatalogsResponseBodyCatalogs()
|
|
7193
|
+
self.catalogs.append(temp_model.from_map(k))
|
|
7194
|
+
if m.get('maxResults') is not None:
|
|
7195
|
+
self.max_results = m.get('maxResults')
|
|
7196
|
+
if m.get('nextToken') is not None:
|
|
7197
|
+
self.next_token = m.get('nextToken')
|
|
7198
|
+
if m.get('requestId') is not None:
|
|
7199
|
+
self.request_id = m.get('requestId')
|
|
7200
|
+
if m.get('totalCount') is not None:
|
|
7201
|
+
self.total_count = m.get('totalCount')
|
|
7202
|
+
return self
|
|
7203
|
+
|
|
7204
|
+
|
|
7205
|
+
class ListCatalogsResponse(TeaModel):
|
|
7206
|
+
def __init__(
|
|
7207
|
+
self,
|
|
7208
|
+
headers: Dict[str, str] = None,
|
|
7209
|
+
status_code: int = None,
|
|
7210
|
+
body: ListCatalogsResponseBody = None,
|
|
7211
|
+
):
|
|
7212
|
+
self.headers = headers
|
|
7213
|
+
self.status_code = status_code
|
|
7214
|
+
self.body = body
|
|
7215
|
+
|
|
7216
|
+
def validate(self):
|
|
7217
|
+
if self.body:
|
|
7218
|
+
self.body.validate()
|
|
7219
|
+
|
|
7220
|
+
def to_map(self):
|
|
7221
|
+
_map = super().to_map()
|
|
7222
|
+
if _map is not None:
|
|
7223
|
+
return _map
|
|
7224
|
+
|
|
7225
|
+
result = dict()
|
|
7226
|
+
if self.headers is not None:
|
|
7227
|
+
result['headers'] = self.headers
|
|
7228
|
+
if self.status_code is not None:
|
|
7229
|
+
result['statusCode'] = self.status_code
|
|
7230
|
+
if self.body is not None:
|
|
7231
|
+
result['body'] = self.body.to_map()
|
|
7232
|
+
return result
|
|
7233
|
+
|
|
7234
|
+
def from_map(self, m: dict = None):
|
|
7235
|
+
m = m or dict()
|
|
7236
|
+
if m.get('headers') is not None:
|
|
7237
|
+
self.headers = m.get('headers')
|
|
7238
|
+
if m.get('statusCode') is not None:
|
|
7239
|
+
self.status_code = m.get('statusCode')
|
|
7240
|
+
if m.get('body') is not None:
|
|
7241
|
+
temp_model = ListCatalogsResponseBody()
|
|
7242
|
+
self.body = temp_model.from_map(m['body'])
|
|
7243
|
+
return self
|
|
7244
|
+
|
|
7245
|
+
|
|
7246
|
+
class ListJobExecutorsRequest(TeaModel):
|
|
7247
|
+
def __init__(
|
|
7248
|
+
self,
|
|
7249
|
+
executor_type: str = None,
|
|
7250
|
+
max_results: int = None,
|
|
7251
|
+
next_token: str = None,
|
|
7252
|
+
region_id: str = None,
|
|
7253
|
+
status: str = None,
|
|
7254
|
+
):
|
|
7255
|
+
self.executor_type = executor_type
|
|
7256
|
+
self.max_results = max_results
|
|
7257
|
+
self.next_token = next_token
|
|
7258
|
+
self.region_id = region_id
|
|
7259
|
+
self.status = status
|
|
7260
|
+
|
|
7261
|
+
def validate(self):
|
|
7262
|
+
pass
|
|
7263
|
+
|
|
7264
|
+
def to_map(self):
|
|
7265
|
+
_map = super().to_map()
|
|
7266
|
+
if _map is not None:
|
|
7267
|
+
return _map
|
|
7268
|
+
|
|
7269
|
+
result = dict()
|
|
7270
|
+
if self.executor_type is not None:
|
|
7271
|
+
result['executorType'] = self.executor_type
|
|
7272
|
+
if self.max_results is not None:
|
|
7273
|
+
result['maxResults'] = self.max_results
|
|
7274
|
+
if self.next_token is not None:
|
|
7275
|
+
result['nextToken'] = self.next_token
|
|
7276
|
+
if self.region_id is not None:
|
|
7277
|
+
result['regionId'] = self.region_id
|
|
7278
|
+
if self.status is not None:
|
|
7279
|
+
result['status'] = self.status
|
|
7280
|
+
return result
|
|
7281
|
+
|
|
7282
|
+
def from_map(self, m: dict = None):
|
|
7283
|
+
m = m or dict()
|
|
7284
|
+
if m.get('executorType') is not None:
|
|
7285
|
+
self.executor_type = m.get('executorType')
|
|
7286
|
+
if m.get('maxResults') is not None:
|
|
7287
|
+
self.max_results = m.get('maxResults')
|
|
7288
|
+
if m.get('nextToken') is not None:
|
|
7289
|
+
self.next_token = m.get('nextToken')
|
|
7290
|
+
if m.get('regionId') is not None:
|
|
7291
|
+
self.region_id = m.get('regionId')
|
|
7292
|
+
if m.get('status') is not None:
|
|
7293
|
+
self.status = m.get('status')
|
|
7294
|
+
return self
|
|
7295
|
+
|
|
7296
|
+
|
|
7297
|
+
class ListJobExecutorsResponseBodyExexutors(TeaModel):
|
|
7298
|
+
def __init__(
|
|
7299
|
+
self,
|
|
7300
|
+
active_tasks: int = None,
|
|
7301
|
+
add_time: int = None,
|
|
7302
|
+
completed_tasks: int = None,
|
|
7303
|
+
disk_used: int = None,
|
|
7304
|
+
executor_id: str = None,
|
|
7305
|
+
executor_type: str = None,
|
|
7306
|
+
failed_tasks: int = None,
|
|
7307
|
+
host_port: str = None,
|
|
7308
|
+
job_run_id: str = None,
|
|
7309
|
+
max_memory: int = None,
|
|
7310
|
+
memory_used: int = None,
|
|
7311
|
+
rdd_blocks: int = None,
|
|
7312
|
+
status: str = None,
|
|
7313
|
+
total_cores: int = None,
|
|
7314
|
+
total_duration: int = None,
|
|
7315
|
+
total_gctime: int = None,
|
|
7316
|
+
total_input_bytes: int = None,
|
|
7317
|
+
total_shuffle_read: int = None,
|
|
7318
|
+
total_shuffle_write: int = None,
|
|
7319
|
+
total_tasks: int = None,
|
|
7320
|
+
workspace_id: str = None,
|
|
7321
|
+
):
|
|
7322
|
+
self.active_tasks = active_tasks
|
|
7323
|
+
self.add_time = add_time
|
|
7324
|
+
self.completed_tasks = completed_tasks
|
|
7325
|
+
self.disk_used = disk_used
|
|
7326
|
+
self.executor_id = executor_id
|
|
7327
|
+
self.executor_type = executor_type
|
|
7328
|
+
self.failed_tasks = failed_tasks
|
|
7329
|
+
self.host_port = host_port
|
|
7330
|
+
self.job_run_id = job_run_id
|
|
7331
|
+
self.max_memory = max_memory
|
|
7332
|
+
self.memory_used = memory_used
|
|
7333
|
+
self.rdd_blocks = rdd_blocks
|
|
7334
|
+
self.status = status
|
|
7335
|
+
self.total_cores = total_cores
|
|
7336
|
+
self.total_duration = total_duration
|
|
7337
|
+
self.total_gctime = total_gctime
|
|
7338
|
+
self.total_input_bytes = total_input_bytes
|
|
7339
|
+
self.total_shuffle_read = total_shuffle_read
|
|
7340
|
+
self.total_shuffle_write = total_shuffle_write
|
|
7341
|
+
self.total_tasks = total_tasks
|
|
7342
|
+
self.workspace_id = workspace_id
|
|
7343
|
+
|
|
7344
|
+
def validate(self):
|
|
7345
|
+
pass
|
|
7346
|
+
|
|
7347
|
+
def to_map(self):
|
|
7348
|
+
_map = super().to_map()
|
|
7349
|
+
if _map is not None:
|
|
7350
|
+
return _map
|
|
7351
|
+
|
|
7352
|
+
result = dict()
|
|
7353
|
+
if self.active_tasks is not None:
|
|
7354
|
+
result['activeTasks'] = self.active_tasks
|
|
7355
|
+
if self.add_time is not None:
|
|
7356
|
+
result['addTime'] = self.add_time
|
|
7357
|
+
if self.completed_tasks is not None:
|
|
7358
|
+
result['completedTasks'] = self.completed_tasks
|
|
7359
|
+
if self.disk_used is not None:
|
|
7360
|
+
result['diskUsed'] = self.disk_used
|
|
7361
|
+
if self.executor_id is not None:
|
|
7362
|
+
result['executorId'] = self.executor_id
|
|
7363
|
+
if self.executor_type is not None:
|
|
7364
|
+
result['executorType'] = self.executor_type
|
|
7365
|
+
if self.failed_tasks is not None:
|
|
7366
|
+
result['failedTasks'] = self.failed_tasks
|
|
7367
|
+
if self.host_port is not None:
|
|
7368
|
+
result['hostPort'] = self.host_port
|
|
7369
|
+
if self.job_run_id is not None:
|
|
7370
|
+
result['jobRunId'] = self.job_run_id
|
|
7371
|
+
if self.max_memory is not None:
|
|
7372
|
+
result['maxMemory'] = self.max_memory
|
|
7373
|
+
if self.memory_used is not None:
|
|
7374
|
+
result['memoryUsed'] = self.memory_used
|
|
7375
|
+
if self.rdd_blocks is not None:
|
|
7376
|
+
result['rddBlocks'] = self.rdd_blocks
|
|
7377
|
+
if self.status is not None:
|
|
7378
|
+
result['status'] = self.status
|
|
7379
|
+
if self.total_cores is not None:
|
|
7380
|
+
result['totalCores'] = self.total_cores
|
|
7381
|
+
if self.total_duration is not None:
|
|
7382
|
+
result['totalDuration'] = self.total_duration
|
|
7383
|
+
if self.total_gctime is not None:
|
|
7384
|
+
result['totalGCTime'] = self.total_gctime
|
|
7385
|
+
if self.total_input_bytes is not None:
|
|
7386
|
+
result['totalInputBytes'] = self.total_input_bytes
|
|
7387
|
+
if self.total_shuffle_read is not None:
|
|
7388
|
+
result['totalShuffleRead'] = self.total_shuffle_read
|
|
7389
|
+
if self.total_shuffle_write is not None:
|
|
7390
|
+
result['totalShuffleWrite'] = self.total_shuffle_write
|
|
7391
|
+
if self.total_tasks is not None:
|
|
7392
|
+
result['totalTasks'] = self.total_tasks
|
|
7393
|
+
if self.workspace_id is not None:
|
|
7394
|
+
result['workspaceId'] = self.workspace_id
|
|
7395
|
+
return result
|
|
7396
|
+
|
|
7397
|
+
def from_map(self, m: dict = None):
|
|
7398
|
+
m = m or dict()
|
|
7399
|
+
if m.get('activeTasks') is not None:
|
|
7400
|
+
self.active_tasks = m.get('activeTasks')
|
|
7401
|
+
if m.get('addTime') is not None:
|
|
7402
|
+
self.add_time = m.get('addTime')
|
|
7403
|
+
if m.get('completedTasks') is not None:
|
|
7404
|
+
self.completed_tasks = m.get('completedTasks')
|
|
7405
|
+
if m.get('diskUsed') is not None:
|
|
7406
|
+
self.disk_used = m.get('diskUsed')
|
|
7407
|
+
if m.get('executorId') is not None:
|
|
7408
|
+
self.executor_id = m.get('executorId')
|
|
7409
|
+
if m.get('executorType') is not None:
|
|
7410
|
+
self.executor_type = m.get('executorType')
|
|
7411
|
+
if m.get('failedTasks') is not None:
|
|
7412
|
+
self.failed_tasks = m.get('failedTasks')
|
|
7413
|
+
if m.get('hostPort') is not None:
|
|
7414
|
+
self.host_port = m.get('hostPort')
|
|
7415
|
+
if m.get('jobRunId') is not None:
|
|
7416
|
+
self.job_run_id = m.get('jobRunId')
|
|
7417
|
+
if m.get('maxMemory') is not None:
|
|
7418
|
+
self.max_memory = m.get('maxMemory')
|
|
7419
|
+
if m.get('memoryUsed') is not None:
|
|
7420
|
+
self.memory_used = m.get('memoryUsed')
|
|
7421
|
+
if m.get('rddBlocks') is not None:
|
|
7422
|
+
self.rdd_blocks = m.get('rddBlocks')
|
|
7423
|
+
if m.get('status') is not None:
|
|
7424
|
+
self.status = m.get('status')
|
|
7425
|
+
if m.get('totalCores') is not None:
|
|
7426
|
+
self.total_cores = m.get('totalCores')
|
|
7427
|
+
if m.get('totalDuration') is not None:
|
|
7428
|
+
self.total_duration = m.get('totalDuration')
|
|
7429
|
+
if m.get('totalGCTime') is not None:
|
|
7430
|
+
self.total_gctime = m.get('totalGCTime')
|
|
7431
|
+
if m.get('totalInputBytes') is not None:
|
|
7432
|
+
self.total_input_bytes = m.get('totalInputBytes')
|
|
7433
|
+
if m.get('totalShuffleRead') is not None:
|
|
7434
|
+
self.total_shuffle_read = m.get('totalShuffleRead')
|
|
7435
|
+
if m.get('totalShuffleWrite') is not None:
|
|
7436
|
+
self.total_shuffle_write = m.get('totalShuffleWrite')
|
|
7437
|
+
if m.get('totalTasks') is not None:
|
|
7438
|
+
self.total_tasks = m.get('totalTasks')
|
|
7439
|
+
if m.get('workspaceId') is not None:
|
|
7440
|
+
self.workspace_id = m.get('workspaceId')
|
|
7441
|
+
return self
|
|
7442
|
+
|
|
7443
|
+
|
|
7444
|
+
class ListJobExecutorsResponseBody(TeaModel):
|
|
7445
|
+
def __init__(
|
|
7446
|
+
self,
|
|
7447
|
+
exexutors: List[ListJobExecutorsResponseBodyExexutors] = None,
|
|
7448
|
+
max_results: int = None,
|
|
7449
|
+
next_token: str = None,
|
|
7450
|
+
request_id: str = None,
|
|
7451
|
+
total_count: int = None,
|
|
7452
|
+
):
|
|
7453
|
+
self.exexutors = exexutors
|
|
7454
|
+
self.max_results = max_results
|
|
7455
|
+
self.next_token = next_token
|
|
7456
|
+
# Id of the request
|
|
7457
|
+
self.request_id = request_id
|
|
7458
|
+
self.total_count = total_count
|
|
7459
|
+
|
|
7460
|
+
def validate(self):
|
|
7461
|
+
if self.exexutors:
|
|
7462
|
+
for k in self.exexutors:
|
|
7463
|
+
if k:
|
|
7464
|
+
k.validate()
|
|
7465
|
+
|
|
7466
|
+
def to_map(self):
|
|
7467
|
+
_map = super().to_map()
|
|
7468
|
+
if _map is not None:
|
|
7469
|
+
return _map
|
|
7470
|
+
|
|
7471
|
+
result = dict()
|
|
7472
|
+
result['exexutors'] = []
|
|
7473
|
+
if self.exexutors is not None:
|
|
7474
|
+
for k in self.exexutors:
|
|
7475
|
+
result['exexutors'].append(k.to_map() if k else None)
|
|
7476
|
+
if self.max_results is not None:
|
|
7477
|
+
result['maxResults'] = self.max_results
|
|
7478
|
+
if self.next_token is not None:
|
|
7479
|
+
result['nextToken'] = self.next_token
|
|
7480
|
+
if self.request_id is not None:
|
|
7481
|
+
result['requestId'] = self.request_id
|
|
7482
|
+
if self.total_count is not None:
|
|
7483
|
+
result['totalCount'] = self.total_count
|
|
7484
|
+
return result
|
|
7485
|
+
|
|
7486
|
+
def from_map(self, m: dict = None):
|
|
7487
|
+
m = m or dict()
|
|
7488
|
+
self.exexutors = []
|
|
7489
|
+
if m.get('exexutors') is not None:
|
|
7490
|
+
for k in m.get('exexutors'):
|
|
7491
|
+
temp_model = ListJobExecutorsResponseBodyExexutors()
|
|
7492
|
+
self.exexutors.append(temp_model.from_map(k))
|
|
7493
|
+
if m.get('maxResults') is not None:
|
|
7494
|
+
self.max_results = m.get('maxResults')
|
|
7495
|
+
if m.get('nextToken') is not None:
|
|
7496
|
+
self.next_token = m.get('nextToken')
|
|
7497
|
+
if m.get('requestId') is not None:
|
|
7498
|
+
self.request_id = m.get('requestId')
|
|
7499
|
+
if m.get('totalCount') is not None:
|
|
7500
|
+
self.total_count = m.get('totalCount')
|
|
7501
|
+
return self
|
|
7502
|
+
|
|
7503
|
+
|
|
7504
|
+
class ListJobExecutorsResponse(TeaModel):
|
|
7505
|
+
def __init__(
|
|
7506
|
+
self,
|
|
7507
|
+
headers: Dict[str, str] = None,
|
|
7508
|
+
status_code: int = None,
|
|
7509
|
+
body: ListJobExecutorsResponseBody = None,
|
|
7510
|
+
):
|
|
7511
|
+
self.headers = headers
|
|
7512
|
+
self.status_code = status_code
|
|
7513
|
+
self.body = body
|
|
7514
|
+
|
|
7515
|
+
def validate(self):
|
|
7516
|
+
if self.body:
|
|
7517
|
+
self.body.validate()
|
|
7518
|
+
|
|
7519
|
+
def to_map(self):
|
|
7520
|
+
_map = super().to_map()
|
|
7521
|
+
if _map is not None:
|
|
7522
|
+
return _map
|
|
7523
|
+
|
|
7524
|
+
result = dict()
|
|
7525
|
+
if self.headers is not None:
|
|
7526
|
+
result['headers'] = self.headers
|
|
7527
|
+
if self.status_code is not None:
|
|
7528
|
+
result['statusCode'] = self.status_code
|
|
7529
|
+
if self.body is not None:
|
|
7530
|
+
result['body'] = self.body.to_map()
|
|
7531
|
+
return result
|
|
7532
|
+
|
|
7533
|
+
def from_map(self, m: dict = None):
|
|
7534
|
+
m = m or dict()
|
|
7535
|
+
if m.get('headers') is not None:
|
|
7536
|
+
self.headers = m.get('headers')
|
|
7537
|
+
if m.get('statusCode') is not None:
|
|
7538
|
+
self.status_code = m.get('statusCode')
|
|
7539
|
+
if m.get('body') is not None:
|
|
7540
|
+
temp_model = ListJobExecutorsResponseBody()
|
|
7541
|
+
self.body = temp_model.from_map(m['body'])
|
|
7542
|
+
return self
|
|
7543
|
+
|
|
7544
|
+
|
|
7020
7545
|
class ListJobRunsRequestEndTime(TeaModel):
|
|
7021
7546
|
def __init__(
|
|
7022
7547
|
self,
|
|
@@ -7484,6 +8009,7 @@ class ListJobRunsResponseBodyJobRuns(TeaModel):
|
|
|
7484
8009
|
mb_seconds: int = None,
|
|
7485
8010
|
name: str = None,
|
|
7486
8011
|
release_version: str = None,
|
|
8012
|
+
resource_queue_id: str = None,
|
|
7487
8013
|
state: str = None,
|
|
7488
8014
|
state_change_reason: ListJobRunsResponseBodyJobRunsStateChangeReason = None,
|
|
7489
8015
|
submit_time: int = None,
|
|
@@ -7526,6 +8052,7 @@ class ListJobRunsResponseBodyJobRuns(TeaModel):
|
|
|
7526
8052
|
self.name = name
|
|
7527
8053
|
# The version of Spark on which the jobs run.
|
|
7528
8054
|
self.release_version = release_version
|
|
8055
|
+
self.resource_queue_id = resource_queue_id
|
|
7529
8056
|
# The job state.
|
|
7530
8057
|
self.state = state
|
|
7531
8058
|
# The reason of the job status change.
|
|
@@ -7589,6 +8116,8 @@ class ListJobRunsResponseBodyJobRuns(TeaModel):
|
|
|
7589
8116
|
result['name'] = self.name
|
|
7590
8117
|
if self.release_version is not None:
|
|
7591
8118
|
result['releaseVersion'] = self.release_version
|
|
8119
|
+
if self.resource_queue_id is not None:
|
|
8120
|
+
result['resourceQueueId'] = self.resource_queue_id
|
|
7592
8121
|
if self.state is not None:
|
|
7593
8122
|
result['state'] = self.state
|
|
7594
8123
|
if self.state_change_reason is not None:
|
|
@@ -7640,6 +8169,8 @@ class ListJobRunsResponseBodyJobRuns(TeaModel):
|
|
|
7640
8169
|
self.name = m.get('name')
|
|
7641
8170
|
if m.get('releaseVersion') is not None:
|
|
7642
8171
|
self.release_version = m.get('releaseVersion')
|
|
8172
|
+
if m.get('resourceQueueId') is not None:
|
|
8173
|
+
self.resource_queue_id = m.get('resourceQueueId')
|
|
7643
8174
|
if m.get('state') is not None:
|
|
7644
8175
|
self.state = m.get('state')
|
|
7645
8176
|
if m.get('stateChangeReason') is not None:
|
|
@@ -8194,9 +8725,11 @@ class ListKyuubiSparkApplicationsResponseBodyApplications(TeaModel):
|
|
|
8194
8725
|
cu_hours: float = None,
|
|
8195
8726
|
end_time: str = None,
|
|
8196
8727
|
exit_reason: str = None,
|
|
8728
|
+
kyuubi_service_id: str = None,
|
|
8197
8729
|
latest_sql_statement_status: str = None,
|
|
8198
8730
|
mb_seconds: int = None,
|
|
8199
8731
|
resource_queue_id: str = None,
|
|
8732
|
+
run_log: RunLog = None,
|
|
8200
8733
|
start_time: str = None,
|
|
8201
8734
|
state: str = None,
|
|
8202
8735
|
vcore_seconds: int = None,
|
|
@@ -8211,11 +8744,13 @@ class ListKyuubiSparkApplicationsResponseBodyApplications(TeaModel):
|
|
|
8211
8744
|
# The time when the task ended.
|
|
8212
8745
|
self.end_time = end_time
|
|
8213
8746
|
self.exit_reason = exit_reason
|
|
8747
|
+
self.kyuubi_service_id = kyuubi_service_id
|
|
8214
8748
|
self.latest_sql_statement_status = latest_sql_statement_status
|
|
8215
8749
|
# The total amount of memory allocated to the job multiplied by the running duration (seconds).
|
|
8216
8750
|
self.mb_seconds = mb_seconds
|
|
8217
8751
|
# The name of the resource queue on which the Spark jobs run.
|
|
8218
8752
|
self.resource_queue_id = resource_queue_id
|
|
8753
|
+
self.run_log = run_log
|
|
8219
8754
|
# The time when the task started.
|
|
8220
8755
|
self.start_time = start_time
|
|
8221
8756
|
# The status of the Spark application.
|
|
@@ -8230,7 +8765,8 @@ class ListKyuubiSparkApplicationsResponseBodyApplications(TeaModel):
|
|
|
8230
8765
|
self.web_ui = web_ui
|
|
8231
8766
|
|
|
8232
8767
|
def validate(self):
|
|
8233
|
-
|
|
8768
|
+
if self.run_log:
|
|
8769
|
+
self.run_log.validate()
|
|
8234
8770
|
|
|
8235
8771
|
def to_map(self):
|
|
8236
8772
|
_map = super().to_map()
|
|
@@ -8248,12 +8784,16 @@ class ListKyuubiSparkApplicationsResponseBodyApplications(TeaModel):
|
|
|
8248
8784
|
result['endTime'] = self.end_time
|
|
8249
8785
|
if self.exit_reason is not None:
|
|
8250
8786
|
result['exitReason'] = self.exit_reason
|
|
8787
|
+
if self.kyuubi_service_id is not None:
|
|
8788
|
+
result['kyuubiServiceId'] = self.kyuubi_service_id
|
|
8251
8789
|
if self.latest_sql_statement_status is not None:
|
|
8252
8790
|
result['latestSqlStatementStatus'] = self.latest_sql_statement_status
|
|
8253
8791
|
if self.mb_seconds is not None:
|
|
8254
8792
|
result['mbSeconds'] = self.mb_seconds
|
|
8255
8793
|
if self.resource_queue_id is not None:
|
|
8256
8794
|
result['resourceQueueId'] = self.resource_queue_id
|
|
8795
|
+
if self.run_log is not None:
|
|
8796
|
+
result['runLog'] = self.run_log.to_map()
|
|
8257
8797
|
if self.start_time is not None:
|
|
8258
8798
|
result['startTime'] = self.start_time
|
|
8259
8799
|
if self.state is not None:
|
|
@@ -8276,12 +8816,17 @@ class ListKyuubiSparkApplicationsResponseBodyApplications(TeaModel):
|
|
|
8276
8816
|
self.end_time = m.get('endTime')
|
|
8277
8817
|
if m.get('exitReason') is not None:
|
|
8278
8818
|
self.exit_reason = m.get('exitReason')
|
|
8819
|
+
if m.get('kyuubiServiceId') is not None:
|
|
8820
|
+
self.kyuubi_service_id = m.get('kyuubiServiceId')
|
|
8279
8821
|
if m.get('latestSqlStatementStatus') is not None:
|
|
8280
8822
|
self.latest_sql_statement_status = m.get('latestSqlStatementStatus')
|
|
8281
8823
|
if m.get('mbSeconds') is not None:
|
|
8282
8824
|
self.mb_seconds = m.get('mbSeconds')
|
|
8283
8825
|
if m.get('resourceQueueId') is not None:
|
|
8284
8826
|
self.resource_queue_id = m.get('resourceQueueId')
|
|
8827
|
+
if m.get('runLog') is not None:
|
|
8828
|
+
temp_model = RunLog()
|
|
8829
|
+
self.run_log = temp_model.from_map(m['runLog'])
|
|
8285
8830
|
if m.get('startTime') is not None:
|
|
8286
8831
|
self.start_time = m.get('startTime')
|
|
8287
8832
|
if m.get('state') is not None:
|
|
@@ -10626,6 +11171,139 @@ class ListSqlStatementContentsResponse(TeaModel):
|
|
|
10626
11171
|
return self
|
|
10627
11172
|
|
|
10628
11173
|
|
|
11174
|
+
class ListTemplateRequest(TeaModel):
|
|
11175
|
+
def __init__(
|
|
11176
|
+
self,
|
|
11177
|
+
region_id: str = None,
|
|
11178
|
+
):
|
|
11179
|
+
self.region_id = region_id
|
|
11180
|
+
|
|
11181
|
+
def validate(self):
|
|
11182
|
+
pass
|
|
11183
|
+
|
|
11184
|
+
def to_map(self):
|
|
11185
|
+
_map = super().to_map()
|
|
11186
|
+
if _map is not None:
|
|
11187
|
+
return _map
|
|
11188
|
+
|
|
11189
|
+
result = dict()
|
|
11190
|
+
if self.region_id is not None:
|
|
11191
|
+
result['regionId'] = self.region_id
|
|
11192
|
+
return result
|
|
11193
|
+
|
|
11194
|
+
def from_map(self, m: dict = None):
|
|
11195
|
+
m = m or dict()
|
|
11196
|
+
if m.get('regionId') is not None:
|
|
11197
|
+
self.region_id = m.get('regionId')
|
|
11198
|
+
return self
|
|
11199
|
+
|
|
11200
|
+
|
|
11201
|
+
class ListTemplateResponseBody(TeaModel):
|
|
11202
|
+
def __init__(
|
|
11203
|
+
self,
|
|
11204
|
+
data: List[Template] = None,
|
|
11205
|
+
error_code: str = None,
|
|
11206
|
+
error_message: str = None,
|
|
11207
|
+
http_status_code: str = None,
|
|
11208
|
+
request_id: str = None,
|
|
11209
|
+
success: bool = None,
|
|
11210
|
+
):
|
|
11211
|
+
self.data = data
|
|
11212
|
+
self.error_code = error_code
|
|
11213
|
+
self.error_message = error_message
|
|
11214
|
+
self.http_status_code = http_status_code
|
|
11215
|
+
self.request_id = request_id
|
|
11216
|
+
self.success = success
|
|
11217
|
+
|
|
11218
|
+
def validate(self):
|
|
11219
|
+
if self.data:
|
|
11220
|
+
for k in self.data:
|
|
11221
|
+
if k:
|
|
11222
|
+
k.validate()
|
|
11223
|
+
|
|
11224
|
+
def to_map(self):
|
|
11225
|
+
_map = super().to_map()
|
|
11226
|
+
if _map is not None:
|
|
11227
|
+
return _map
|
|
11228
|
+
|
|
11229
|
+
result = dict()
|
|
11230
|
+
result['data'] = []
|
|
11231
|
+
if self.data is not None:
|
|
11232
|
+
for k in self.data:
|
|
11233
|
+
result['data'].append(k.to_map() if k else None)
|
|
11234
|
+
if self.error_code is not None:
|
|
11235
|
+
result['errorCode'] = self.error_code
|
|
11236
|
+
if self.error_message is not None:
|
|
11237
|
+
result['errorMessage'] = self.error_message
|
|
11238
|
+
if self.http_status_code is not None:
|
|
11239
|
+
result['httpStatusCode'] = self.http_status_code
|
|
11240
|
+
if self.request_id is not None:
|
|
11241
|
+
result['requestId'] = self.request_id
|
|
11242
|
+
if self.success is not None:
|
|
11243
|
+
result['success'] = self.success
|
|
11244
|
+
return result
|
|
11245
|
+
|
|
11246
|
+
def from_map(self, m: dict = None):
|
|
11247
|
+
m = m or dict()
|
|
11248
|
+
self.data = []
|
|
11249
|
+
if m.get('data') is not None:
|
|
11250
|
+
for k in m.get('data'):
|
|
11251
|
+
temp_model = Template()
|
|
11252
|
+
self.data.append(temp_model.from_map(k))
|
|
11253
|
+
if m.get('errorCode') is not None:
|
|
11254
|
+
self.error_code = m.get('errorCode')
|
|
11255
|
+
if m.get('errorMessage') is not None:
|
|
11256
|
+
self.error_message = m.get('errorMessage')
|
|
11257
|
+
if m.get('httpStatusCode') is not None:
|
|
11258
|
+
self.http_status_code = m.get('httpStatusCode')
|
|
11259
|
+
if m.get('requestId') is not None:
|
|
11260
|
+
self.request_id = m.get('requestId')
|
|
11261
|
+
if m.get('success') is not None:
|
|
11262
|
+
self.success = m.get('success')
|
|
11263
|
+
return self
|
|
11264
|
+
|
|
11265
|
+
|
|
11266
|
+
class ListTemplateResponse(TeaModel):
|
|
11267
|
+
def __init__(
|
|
11268
|
+
self,
|
|
11269
|
+
headers: Dict[str, str] = None,
|
|
11270
|
+
status_code: int = None,
|
|
11271
|
+
body: ListTemplateResponseBody = None,
|
|
11272
|
+
):
|
|
11273
|
+
self.headers = headers
|
|
11274
|
+
self.status_code = status_code
|
|
11275
|
+
self.body = body
|
|
11276
|
+
|
|
11277
|
+
def validate(self):
|
|
11278
|
+
if self.body:
|
|
11279
|
+
self.body.validate()
|
|
11280
|
+
|
|
11281
|
+
def to_map(self):
|
|
11282
|
+
_map = super().to_map()
|
|
11283
|
+
if _map is not None:
|
|
11284
|
+
return _map
|
|
11285
|
+
|
|
11286
|
+
result = dict()
|
|
11287
|
+
if self.headers is not None:
|
|
11288
|
+
result['headers'] = self.headers
|
|
11289
|
+
if self.status_code is not None:
|
|
11290
|
+
result['statusCode'] = self.status_code
|
|
11291
|
+
if self.body is not None:
|
|
11292
|
+
result['body'] = self.body.to_map()
|
|
11293
|
+
return result
|
|
11294
|
+
|
|
11295
|
+
def from_map(self, m: dict = None):
|
|
11296
|
+
m = m or dict()
|
|
11297
|
+
if m.get('headers') is not None:
|
|
11298
|
+
self.headers = m.get('headers')
|
|
11299
|
+
if m.get('statusCode') is not None:
|
|
11300
|
+
self.status_code = m.get('statusCode')
|
|
11301
|
+
if m.get('body') is not None:
|
|
11302
|
+
temp_model = ListTemplateResponseBody()
|
|
11303
|
+
self.body = temp_model.from_map(m['body'])
|
|
11304
|
+
return self
|
|
11305
|
+
|
|
11306
|
+
|
|
10629
11307
|
class ListWorkspaceQueuesRequest(TeaModel):
|
|
10630
11308
|
def __init__(
|
|
10631
11309
|
self,
|
|
@@ -10732,6 +11410,7 @@ class ListWorkspaceQueuesResponseBodyQueues(TeaModel):
|
|
|
10732
11410
|
max_resource: str = None,
|
|
10733
11411
|
min_resource: str = None,
|
|
10734
11412
|
payment_type: str = None,
|
|
11413
|
+
preheat: bool = None,
|
|
10735
11414
|
properties: str = None,
|
|
10736
11415
|
queue_name: str = None,
|
|
10737
11416
|
queue_scope: str = None,
|
|
@@ -10758,6 +11437,7 @@ class ListWorkspaceQueuesResponseBodyQueues(TeaModel):
|
|
|
10758
11437
|
# * PayAsYouGo
|
|
10759
11438
|
# * Pre
|
|
10760
11439
|
self.payment_type = payment_type
|
|
11440
|
+
self.preheat = preheat
|
|
10761
11441
|
# The queue label.
|
|
10762
11442
|
self.properties = properties
|
|
10763
11443
|
# The name of the queue.
|
|
@@ -10806,6 +11486,8 @@ class ListWorkspaceQueuesResponseBodyQueues(TeaModel):
|
|
|
10806
11486
|
result['minResource'] = self.min_resource
|
|
10807
11487
|
if self.payment_type is not None:
|
|
10808
11488
|
result['paymentType'] = self.payment_type
|
|
11489
|
+
if self.preheat is not None:
|
|
11490
|
+
result['preheat'] = self.preheat
|
|
10809
11491
|
if self.properties is not None:
|
|
10810
11492
|
result['properties'] = self.properties
|
|
10811
11493
|
if self.queue_name is not None:
|
|
@@ -10843,6 +11525,8 @@ class ListWorkspaceQueuesResponseBodyQueues(TeaModel):
|
|
|
10843
11525
|
self.min_resource = m.get('minResource')
|
|
10844
11526
|
if m.get('paymentType') is not None:
|
|
10845
11527
|
self.payment_type = m.get('paymentType')
|
|
11528
|
+
if m.get('preheat') is not None:
|
|
11529
|
+
self.preheat = m.get('preheat')
|
|
10846
11530
|
if m.get('properties') is not None:
|
|
10847
11531
|
self.properties = m.get('properties')
|
|
10848
11532
|
if m.get('queueName') is not None:
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: alibabacloud-emr-serverless-spark20230808
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.16.1
|
|
4
4
|
Summary: Alibaba Cloud emr-serverless-spark (20230808) SDK Library for Python
|
|
5
5
|
Home-page: https://github.com/aliyun/alibabacloud-python-sdk
|
|
6
6
|
Author: Alibaba Cloud SDK
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
alibabacloud_emr_serverless_spark20230808/__init__.py,sha256=mzCFPei5ndQuURVV-lT_WzjvIENwk9avPksZB88fEjk,22
|
|
2
|
+
alibabacloud_emr_serverless_spark20230808/client.py,sha256=WmyqEij_U6g4Qx7BuYviAjqMU91Ug66iYn9_oV5CQVM,269180
|
|
3
|
+
alibabacloud_emr_serverless_spark20230808/models.py,sha256=n4AM-NAF8sF_12Rq8K7Ou21Y9tjIYM8cFGnIDyc_TYE,507118
|
|
4
|
+
alibabacloud_emr_serverless_spark20230808-1.16.1.dist-info/LICENSE,sha256=0CFItL6bHvxqS44T6vlLoW2R4Zaic304OO3WxN0oXF0,600
|
|
5
|
+
alibabacloud_emr_serverless_spark20230808-1.16.1.dist-info/METADATA,sha256=SBqCyvMgl58MC1rJ-9Bi2CqeYTtw4LIF704uoPTWA3Q,2432
|
|
6
|
+
alibabacloud_emr_serverless_spark20230808-1.16.1.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92
|
|
7
|
+
alibabacloud_emr_serverless_spark20230808-1.16.1.dist-info/top_level.txt,sha256=8b6upnqVMtrToMFachj1i07ccsHHbzRruHP9NynLR-A,42
|
|
8
|
+
alibabacloud_emr_serverless_spark20230808-1.16.1.dist-info/RECORD,,
|
|
@@ -1,8 +0,0 @@
|
|
|
1
|
-
alibabacloud_emr_serverless_spark20230808/__init__.py,sha256=59yv4QYm-9lVAj76lMi_bY1HW4FQ4-QjOF9gOTocXaQ,22
|
|
2
|
-
alibabacloud_emr_serverless_spark20230808/client.py,sha256=v6wHiTB7sKSoPYQY8hguqUlYLau_ZO0Y80w5TVLriMM,255898
|
|
3
|
-
alibabacloud_emr_serverless_spark20230808/models.py,sha256=kO4MBj64CO81pjwSsZiRZjM-iR-uL8PBrINx7BnMZto,483529
|
|
4
|
-
alibabacloud_emr_serverless_spark20230808-1.15.0.dist-info/LICENSE,sha256=0CFItL6bHvxqS44T6vlLoW2R4Zaic304OO3WxN0oXF0,600
|
|
5
|
-
alibabacloud_emr_serverless_spark20230808-1.15.0.dist-info/METADATA,sha256=8O0-Xp4q1CvR-9fg-wZxiKq_MQJUGyAnh9Au1_5SmXA,2432
|
|
6
|
-
alibabacloud_emr_serverless_spark20230808-1.15.0.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92
|
|
7
|
-
alibabacloud_emr_serverless_spark20230808-1.15.0.dist-info/top_level.txt,sha256=8b6upnqVMtrToMFachj1i07ccsHHbzRruHP9NynLR-A,42
|
|
8
|
-
alibabacloud_emr_serverless_spark20230808-1.15.0.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|