alibabacloud-quanmiaolightapp20240801 2.0.0__tar.gz → 2.1.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {alibabacloud_quanmiaolightapp20240801-2.0.0 → alibabacloud_quanmiaolightapp20240801-2.1.0}/ChangeLog.md +9 -0
- {alibabacloud_quanmiaolightapp20240801-2.0.0 → alibabacloud_quanmiaolightapp20240801-2.1.0}/PKG-INFO +1 -1
- alibabacloud_quanmiaolightapp20240801-2.1.0/alibabacloud_quanmiaolightapp20240801/__init__.py +1 -0
- {alibabacloud_quanmiaolightapp20240801-2.0.0 → alibabacloud_quanmiaolightapp20240801-2.1.0}/alibabacloud_quanmiaolightapp20240801/client.py +280 -0
- {alibabacloud_quanmiaolightapp20240801-2.0.0 → alibabacloud_quanmiaolightapp20240801-2.1.0}/alibabacloud_quanmiaolightapp20240801/models.py +749 -0
- {alibabacloud_quanmiaolightapp20240801-2.0.0 → alibabacloud_quanmiaolightapp20240801-2.1.0}/alibabacloud_quanmiaolightapp20240801.egg-info/PKG-INFO +1 -1
- {alibabacloud_quanmiaolightapp20240801-2.0.0 → alibabacloud_quanmiaolightapp20240801-2.1.0}/setup.py +1 -1
- alibabacloud_quanmiaolightapp20240801-2.0.0/alibabacloud_quanmiaolightapp20240801/__init__.py +0 -1
- {alibabacloud_quanmiaolightapp20240801-2.0.0 → alibabacloud_quanmiaolightapp20240801-2.1.0}/LICENSE +0 -0
- {alibabacloud_quanmiaolightapp20240801-2.0.0 → alibabacloud_quanmiaolightapp20240801-2.1.0}/MANIFEST.in +0 -0
- {alibabacloud_quanmiaolightapp20240801-2.0.0 → alibabacloud_quanmiaolightapp20240801-2.1.0}/README-CN.md +0 -0
- {alibabacloud_quanmiaolightapp20240801-2.0.0 → alibabacloud_quanmiaolightapp20240801-2.1.0}/README.md +0 -0
- {alibabacloud_quanmiaolightapp20240801-2.0.0 → alibabacloud_quanmiaolightapp20240801-2.1.0}/alibabacloud_quanmiaolightapp20240801.egg-info/SOURCES.txt +0 -0
- {alibabacloud_quanmiaolightapp20240801-2.0.0 → alibabacloud_quanmiaolightapp20240801-2.1.0}/alibabacloud_quanmiaolightapp20240801.egg-info/dependency_links.txt +0 -0
- {alibabacloud_quanmiaolightapp20240801-2.0.0 → alibabacloud_quanmiaolightapp20240801-2.1.0}/alibabacloud_quanmiaolightapp20240801.egg-info/requires.txt +0 -0
- {alibabacloud_quanmiaolightapp20240801-2.0.0 → alibabacloud_quanmiaolightapp20240801-2.1.0}/alibabacloud_quanmiaolightapp20240801.egg-info/top_level.txt +0 -0
- {alibabacloud_quanmiaolightapp20240801-2.0.0 → alibabacloud_quanmiaolightapp20240801-2.1.0}/setup.cfg +0 -0
|
@@ -1,3 +1,12 @@
|
|
|
1
|
+
2024-12-11 Version: 2.0.1
|
|
2
|
+
- Update API RunHotTopicChat: add param messages.
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
2024-12-05 Version: 2.0.0
|
|
6
|
+
- Delete API RunCommentGeneration.
|
|
7
|
+
- Update API RunHotTopicChat: update response param.
|
|
8
|
+
|
|
9
|
+
|
|
1
10
|
2024-11-06 Version: 1.4.1
|
|
2
11
|
- Update API RunMarketingInformationWriting: add param customLimitation.
|
|
3
12
|
- Update API RunMarketingInformationWriting: add param inputExample.
|
{alibabacloud_quanmiaolightapp20240801-2.0.0 → alibabacloud_quanmiaolightapp20240801-2.1.0}/PKG-INFO
RENAMED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: alibabacloud_quanmiaolightapp20240801
|
|
3
|
-
Version: 2.
|
|
3
|
+
Version: 2.1.0
|
|
4
4
|
Summary: Alibaba Cloud QuanMiaoLightApp (20240801) SDK Library for Python
|
|
5
5
|
Home-page: https://github.com/aliyun/alibabacloud-python-sdk
|
|
6
6
|
Author: Alibaba Cloud SDK
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
__version__ = '2.1.0'
|
|
@@ -149,6 +149,138 @@ class Client(OpenApiClient):
|
|
|
149
149
|
headers = {}
|
|
150
150
|
return await self.generate_broadcast_news_with_options_async(workspace_id, request, headers, runtime)
|
|
151
151
|
|
|
152
|
+
def generate_output_format_with_options(
|
|
153
|
+
self,
|
|
154
|
+
workspace_id: str,
|
|
155
|
+
tmp_req: quan_miao_light_app_20240801_models.GenerateOutputFormatRequest,
|
|
156
|
+
headers: Dict[str, str],
|
|
157
|
+
runtime: util_models.RuntimeOptions,
|
|
158
|
+
) -> quan_miao_light_app_20240801_models.GenerateOutputFormatResponse:
|
|
159
|
+
"""
|
|
160
|
+
@summary 轻应用-标签挖掘-获取示例输出格式
|
|
161
|
+
|
|
162
|
+
@param tmp_req: GenerateOutputFormatRequest
|
|
163
|
+
@param headers: map
|
|
164
|
+
@param runtime: runtime options for this request RuntimeOptions
|
|
165
|
+
@return: GenerateOutputFormatResponse
|
|
166
|
+
"""
|
|
167
|
+
UtilClient.validate_model(tmp_req)
|
|
168
|
+
request = quan_miao_light_app_20240801_models.GenerateOutputFormatShrinkRequest()
|
|
169
|
+
OpenApiUtilClient.convert(tmp_req, request)
|
|
170
|
+
if not UtilClient.is_unset(tmp_req.tags):
|
|
171
|
+
request.tags_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.tags, 'tags', 'json')
|
|
172
|
+
body = {}
|
|
173
|
+
if not UtilClient.is_unset(request.business_type):
|
|
174
|
+
body['businessType'] = request.business_type
|
|
175
|
+
if not UtilClient.is_unset(request.content):
|
|
176
|
+
body['content'] = request.content
|
|
177
|
+
if not UtilClient.is_unset(request.extra_info):
|
|
178
|
+
body['extraInfo'] = request.extra_info
|
|
179
|
+
if not UtilClient.is_unset(request.tags_shrink):
|
|
180
|
+
body['tags'] = request.tags_shrink
|
|
181
|
+
if not UtilClient.is_unset(request.task_description):
|
|
182
|
+
body['taskDescription'] = request.task_description
|
|
183
|
+
req = open_api_models.OpenApiRequest(
|
|
184
|
+
headers=headers,
|
|
185
|
+
body=OpenApiUtilClient.parse_to_map(body)
|
|
186
|
+
)
|
|
187
|
+
params = open_api_models.Params(
|
|
188
|
+
action='GenerateOutputFormat',
|
|
189
|
+
version='2024-08-01',
|
|
190
|
+
protocol='HTTPS',
|
|
191
|
+
pathname=f'/{OpenApiUtilClient.get_encode_param(workspace_id)}/quanmiao/lightapp/generateOutputFormat',
|
|
192
|
+
method='POST',
|
|
193
|
+
auth_type='AK',
|
|
194
|
+
style='ROA',
|
|
195
|
+
req_body_type='formData',
|
|
196
|
+
body_type='json'
|
|
197
|
+
)
|
|
198
|
+
return TeaCore.from_map(
|
|
199
|
+
quan_miao_light_app_20240801_models.GenerateOutputFormatResponse(),
|
|
200
|
+
self.call_api(params, req, runtime)
|
|
201
|
+
)
|
|
202
|
+
|
|
203
|
+
async def generate_output_format_with_options_async(
|
|
204
|
+
self,
|
|
205
|
+
workspace_id: str,
|
|
206
|
+
tmp_req: quan_miao_light_app_20240801_models.GenerateOutputFormatRequest,
|
|
207
|
+
headers: Dict[str, str],
|
|
208
|
+
runtime: util_models.RuntimeOptions,
|
|
209
|
+
) -> quan_miao_light_app_20240801_models.GenerateOutputFormatResponse:
|
|
210
|
+
"""
|
|
211
|
+
@summary 轻应用-标签挖掘-获取示例输出格式
|
|
212
|
+
|
|
213
|
+
@param tmp_req: GenerateOutputFormatRequest
|
|
214
|
+
@param headers: map
|
|
215
|
+
@param runtime: runtime options for this request RuntimeOptions
|
|
216
|
+
@return: GenerateOutputFormatResponse
|
|
217
|
+
"""
|
|
218
|
+
UtilClient.validate_model(tmp_req)
|
|
219
|
+
request = quan_miao_light_app_20240801_models.GenerateOutputFormatShrinkRequest()
|
|
220
|
+
OpenApiUtilClient.convert(tmp_req, request)
|
|
221
|
+
if not UtilClient.is_unset(tmp_req.tags):
|
|
222
|
+
request.tags_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.tags, 'tags', 'json')
|
|
223
|
+
body = {}
|
|
224
|
+
if not UtilClient.is_unset(request.business_type):
|
|
225
|
+
body['businessType'] = request.business_type
|
|
226
|
+
if not UtilClient.is_unset(request.content):
|
|
227
|
+
body['content'] = request.content
|
|
228
|
+
if not UtilClient.is_unset(request.extra_info):
|
|
229
|
+
body['extraInfo'] = request.extra_info
|
|
230
|
+
if not UtilClient.is_unset(request.tags_shrink):
|
|
231
|
+
body['tags'] = request.tags_shrink
|
|
232
|
+
if not UtilClient.is_unset(request.task_description):
|
|
233
|
+
body['taskDescription'] = request.task_description
|
|
234
|
+
req = open_api_models.OpenApiRequest(
|
|
235
|
+
headers=headers,
|
|
236
|
+
body=OpenApiUtilClient.parse_to_map(body)
|
|
237
|
+
)
|
|
238
|
+
params = open_api_models.Params(
|
|
239
|
+
action='GenerateOutputFormat',
|
|
240
|
+
version='2024-08-01',
|
|
241
|
+
protocol='HTTPS',
|
|
242
|
+
pathname=f'/{OpenApiUtilClient.get_encode_param(workspace_id)}/quanmiao/lightapp/generateOutputFormat',
|
|
243
|
+
method='POST',
|
|
244
|
+
auth_type='AK',
|
|
245
|
+
style='ROA',
|
|
246
|
+
req_body_type='formData',
|
|
247
|
+
body_type='json'
|
|
248
|
+
)
|
|
249
|
+
return TeaCore.from_map(
|
|
250
|
+
quan_miao_light_app_20240801_models.GenerateOutputFormatResponse(),
|
|
251
|
+
await self.call_api_async(params, req, runtime)
|
|
252
|
+
)
|
|
253
|
+
|
|
254
|
+
def generate_output_format(
|
|
255
|
+
self,
|
|
256
|
+
workspace_id: str,
|
|
257
|
+
request: quan_miao_light_app_20240801_models.GenerateOutputFormatRequest,
|
|
258
|
+
) -> quan_miao_light_app_20240801_models.GenerateOutputFormatResponse:
|
|
259
|
+
"""
|
|
260
|
+
@summary 轻应用-标签挖掘-获取示例输出格式
|
|
261
|
+
|
|
262
|
+
@param request: GenerateOutputFormatRequest
|
|
263
|
+
@return: GenerateOutputFormatResponse
|
|
264
|
+
"""
|
|
265
|
+
runtime = util_models.RuntimeOptions()
|
|
266
|
+
headers = {}
|
|
267
|
+
return self.generate_output_format_with_options(workspace_id, request, headers, runtime)
|
|
268
|
+
|
|
269
|
+
async def generate_output_format_async(
|
|
270
|
+
self,
|
|
271
|
+
workspace_id: str,
|
|
272
|
+
request: quan_miao_light_app_20240801_models.GenerateOutputFormatRequest,
|
|
273
|
+
) -> quan_miao_light_app_20240801_models.GenerateOutputFormatResponse:
|
|
274
|
+
"""
|
|
275
|
+
@summary 轻应用-标签挖掘-获取示例输出格式
|
|
276
|
+
|
|
277
|
+
@param request: GenerateOutputFormatRequest
|
|
278
|
+
@return: GenerateOutputFormatResponse
|
|
279
|
+
"""
|
|
280
|
+
runtime = util_models.RuntimeOptions()
|
|
281
|
+
headers = {}
|
|
282
|
+
return await self.generate_output_format_with_options_async(workspace_id, request, headers, runtime)
|
|
283
|
+
|
|
152
284
|
def list_hot_topic_summaries_with_options(
|
|
153
285
|
self,
|
|
154
286
|
workspace_id: str,
|
|
@@ -295,6 +427,8 @@ class Client(OpenApiClient):
|
|
|
295
427
|
request.generate_options_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.generate_options, 'generateOptions', 'json')
|
|
296
428
|
if not UtilClient.is_unset(tmp_req.hot_topics):
|
|
297
429
|
request.hot_topics_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.hot_topics, 'hotTopics', 'json')
|
|
430
|
+
if not UtilClient.is_unset(tmp_req.messages):
|
|
431
|
+
request.messages_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.messages, 'messages', 'json')
|
|
298
432
|
if not UtilClient.is_unset(tmp_req.step_for_broadcast_content_config):
|
|
299
433
|
request.step_for_broadcast_content_config_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.step_for_broadcast_content_config, 'stepForBroadcastContentConfig', 'json')
|
|
300
434
|
body = {}
|
|
@@ -308,6 +442,8 @@ class Client(OpenApiClient):
|
|
|
308
442
|
body['hotTopics'] = request.hot_topics_shrink
|
|
309
443
|
if not UtilClient.is_unset(request.image_count):
|
|
310
444
|
body['imageCount'] = request.image_count
|
|
445
|
+
if not UtilClient.is_unset(request.messages_shrink):
|
|
446
|
+
body['messages'] = request.messages_shrink
|
|
311
447
|
if not UtilClient.is_unset(request.model_custom_prompt_template):
|
|
312
448
|
body['modelCustomPromptTemplate'] = request.model_custom_prompt_template
|
|
313
449
|
if not UtilClient.is_unset(request.model_id):
|
|
@@ -362,6 +498,8 @@ class Client(OpenApiClient):
|
|
|
362
498
|
request.generate_options_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.generate_options, 'generateOptions', 'json')
|
|
363
499
|
if not UtilClient.is_unset(tmp_req.hot_topics):
|
|
364
500
|
request.hot_topics_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.hot_topics, 'hotTopics', 'json')
|
|
501
|
+
if not UtilClient.is_unset(tmp_req.messages):
|
|
502
|
+
request.messages_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.messages, 'messages', 'json')
|
|
365
503
|
if not UtilClient.is_unset(tmp_req.step_for_broadcast_content_config):
|
|
366
504
|
request.step_for_broadcast_content_config_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.step_for_broadcast_content_config, 'stepForBroadcastContentConfig', 'json')
|
|
367
505
|
body = {}
|
|
@@ -375,6 +513,8 @@ class Client(OpenApiClient):
|
|
|
375
513
|
body['hotTopics'] = request.hot_topics_shrink
|
|
376
514
|
if not UtilClient.is_unset(request.image_count):
|
|
377
515
|
body['imageCount'] = request.image_count
|
|
516
|
+
if not UtilClient.is_unset(request.messages_shrink):
|
|
517
|
+
body['messages'] = request.messages_shrink
|
|
378
518
|
if not UtilClient.is_unset(request.model_custom_prompt_template):
|
|
379
519
|
body['modelCustomPromptTemplate'] = request.model_custom_prompt_template
|
|
380
520
|
if not UtilClient.is_unset(request.model_id):
|
|
@@ -1205,6 +1345,146 @@ class Client(OpenApiClient):
|
|
|
1205
1345
|
headers = {}
|
|
1206
1346
|
return await self.run_style_writing_with_options_async(workspace_id, request, headers, runtime)
|
|
1207
1347
|
|
|
1348
|
+
def run_tag_mining_analysis_with_options(
|
|
1349
|
+
self,
|
|
1350
|
+
workspace_id: str,
|
|
1351
|
+
tmp_req: quan_miao_light_app_20240801_models.RunTagMiningAnalysisRequest,
|
|
1352
|
+
headers: Dict[str, str],
|
|
1353
|
+
runtime: util_models.RuntimeOptions,
|
|
1354
|
+
) -> quan_miao_light_app_20240801_models.RunTagMiningAnalysisResponse:
|
|
1355
|
+
"""
|
|
1356
|
+
@summary 轻应用-标签挖掘
|
|
1357
|
+
|
|
1358
|
+
@param tmp_req: RunTagMiningAnalysisRequest
|
|
1359
|
+
@param headers: map
|
|
1360
|
+
@param runtime: runtime options for this request RuntimeOptions
|
|
1361
|
+
@return: RunTagMiningAnalysisResponse
|
|
1362
|
+
"""
|
|
1363
|
+
UtilClient.validate_model(tmp_req)
|
|
1364
|
+
request = quan_miao_light_app_20240801_models.RunTagMiningAnalysisShrinkRequest()
|
|
1365
|
+
OpenApiUtilClient.convert(tmp_req, request)
|
|
1366
|
+
if not UtilClient.is_unset(tmp_req.tags):
|
|
1367
|
+
request.tags_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.tags, 'tags', 'json')
|
|
1368
|
+
body = {}
|
|
1369
|
+
if not UtilClient.is_unset(request.business_type):
|
|
1370
|
+
body['businessType'] = request.business_type
|
|
1371
|
+
if not UtilClient.is_unset(request.content):
|
|
1372
|
+
body['content'] = request.content
|
|
1373
|
+
if not UtilClient.is_unset(request.extra_info):
|
|
1374
|
+
body['extraInfo'] = request.extra_info
|
|
1375
|
+
if not UtilClient.is_unset(request.model_id):
|
|
1376
|
+
body['modelId'] = request.model_id
|
|
1377
|
+
if not UtilClient.is_unset(request.output_format):
|
|
1378
|
+
body['outputFormat'] = request.output_format
|
|
1379
|
+
if not UtilClient.is_unset(request.tags_shrink):
|
|
1380
|
+
body['tags'] = request.tags_shrink
|
|
1381
|
+
if not UtilClient.is_unset(request.task_description):
|
|
1382
|
+
body['taskDescription'] = request.task_description
|
|
1383
|
+
req = open_api_models.OpenApiRequest(
|
|
1384
|
+
headers=headers,
|
|
1385
|
+
body=OpenApiUtilClient.parse_to_map(body)
|
|
1386
|
+
)
|
|
1387
|
+
params = open_api_models.Params(
|
|
1388
|
+
action='RunTagMiningAnalysis',
|
|
1389
|
+
version='2024-08-01',
|
|
1390
|
+
protocol='HTTPS',
|
|
1391
|
+
pathname=f'/{OpenApiUtilClient.get_encode_param(workspace_id)}/quanmiao/lightapp/runTagMiningAnalysis',
|
|
1392
|
+
method='POST',
|
|
1393
|
+
auth_type='AK',
|
|
1394
|
+
style='ROA',
|
|
1395
|
+
req_body_type='formData',
|
|
1396
|
+
body_type='json'
|
|
1397
|
+
)
|
|
1398
|
+
return TeaCore.from_map(
|
|
1399
|
+
quan_miao_light_app_20240801_models.RunTagMiningAnalysisResponse(),
|
|
1400
|
+
self.call_api(params, req, runtime)
|
|
1401
|
+
)
|
|
1402
|
+
|
|
1403
|
+
async def run_tag_mining_analysis_with_options_async(
|
|
1404
|
+
self,
|
|
1405
|
+
workspace_id: str,
|
|
1406
|
+
tmp_req: quan_miao_light_app_20240801_models.RunTagMiningAnalysisRequest,
|
|
1407
|
+
headers: Dict[str, str],
|
|
1408
|
+
runtime: util_models.RuntimeOptions,
|
|
1409
|
+
) -> quan_miao_light_app_20240801_models.RunTagMiningAnalysisResponse:
|
|
1410
|
+
"""
|
|
1411
|
+
@summary 轻应用-标签挖掘
|
|
1412
|
+
|
|
1413
|
+
@param tmp_req: RunTagMiningAnalysisRequest
|
|
1414
|
+
@param headers: map
|
|
1415
|
+
@param runtime: runtime options for this request RuntimeOptions
|
|
1416
|
+
@return: RunTagMiningAnalysisResponse
|
|
1417
|
+
"""
|
|
1418
|
+
UtilClient.validate_model(tmp_req)
|
|
1419
|
+
request = quan_miao_light_app_20240801_models.RunTagMiningAnalysisShrinkRequest()
|
|
1420
|
+
OpenApiUtilClient.convert(tmp_req, request)
|
|
1421
|
+
if not UtilClient.is_unset(tmp_req.tags):
|
|
1422
|
+
request.tags_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.tags, 'tags', 'json')
|
|
1423
|
+
body = {}
|
|
1424
|
+
if not UtilClient.is_unset(request.business_type):
|
|
1425
|
+
body['businessType'] = request.business_type
|
|
1426
|
+
if not UtilClient.is_unset(request.content):
|
|
1427
|
+
body['content'] = request.content
|
|
1428
|
+
if not UtilClient.is_unset(request.extra_info):
|
|
1429
|
+
body['extraInfo'] = request.extra_info
|
|
1430
|
+
if not UtilClient.is_unset(request.model_id):
|
|
1431
|
+
body['modelId'] = request.model_id
|
|
1432
|
+
if not UtilClient.is_unset(request.output_format):
|
|
1433
|
+
body['outputFormat'] = request.output_format
|
|
1434
|
+
if not UtilClient.is_unset(request.tags_shrink):
|
|
1435
|
+
body['tags'] = request.tags_shrink
|
|
1436
|
+
if not UtilClient.is_unset(request.task_description):
|
|
1437
|
+
body['taskDescription'] = request.task_description
|
|
1438
|
+
req = open_api_models.OpenApiRequest(
|
|
1439
|
+
headers=headers,
|
|
1440
|
+
body=OpenApiUtilClient.parse_to_map(body)
|
|
1441
|
+
)
|
|
1442
|
+
params = open_api_models.Params(
|
|
1443
|
+
action='RunTagMiningAnalysis',
|
|
1444
|
+
version='2024-08-01',
|
|
1445
|
+
protocol='HTTPS',
|
|
1446
|
+
pathname=f'/{OpenApiUtilClient.get_encode_param(workspace_id)}/quanmiao/lightapp/runTagMiningAnalysis',
|
|
1447
|
+
method='POST',
|
|
1448
|
+
auth_type='AK',
|
|
1449
|
+
style='ROA',
|
|
1450
|
+
req_body_type='formData',
|
|
1451
|
+
body_type='json'
|
|
1452
|
+
)
|
|
1453
|
+
return TeaCore.from_map(
|
|
1454
|
+
quan_miao_light_app_20240801_models.RunTagMiningAnalysisResponse(),
|
|
1455
|
+
await self.call_api_async(params, req, runtime)
|
|
1456
|
+
)
|
|
1457
|
+
|
|
1458
|
+
def run_tag_mining_analysis(
|
|
1459
|
+
self,
|
|
1460
|
+
workspace_id: str,
|
|
1461
|
+
request: quan_miao_light_app_20240801_models.RunTagMiningAnalysisRequest,
|
|
1462
|
+
) -> quan_miao_light_app_20240801_models.RunTagMiningAnalysisResponse:
|
|
1463
|
+
"""
|
|
1464
|
+
@summary 轻应用-标签挖掘
|
|
1465
|
+
|
|
1466
|
+
@param request: RunTagMiningAnalysisRequest
|
|
1467
|
+
@return: RunTagMiningAnalysisResponse
|
|
1468
|
+
"""
|
|
1469
|
+
runtime = util_models.RuntimeOptions()
|
|
1470
|
+
headers = {}
|
|
1471
|
+
return self.run_tag_mining_analysis_with_options(workspace_id, request, headers, runtime)
|
|
1472
|
+
|
|
1473
|
+
async def run_tag_mining_analysis_async(
|
|
1474
|
+
self,
|
|
1475
|
+
workspace_id: str,
|
|
1476
|
+
request: quan_miao_light_app_20240801_models.RunTagMiningAnalysisRequest,
|
|
1477
|
+
) -> quan_miao_light_app_20240801_models.RunTagMiningAnalysisResponse:
|
|
1478
|
+
"""
|
|
1479
|
+
@summary 轻应用-标签挖掘
|
|
1480
|
+
|
|
1481
|
+
@param request: RunTagMiningAnalysisRequest
|
|
1482
|
+
@return: RunTagMiningAnalysisResponse
|
|
1483
|
+
"""
|
|
1484
|
+
runtime = util_models.RuntimeOptions()
|
|
1485
|
+
headers = {}
|
|
1486
|
+
return await self.run_tag_mining_analysis_with_options_async(workspace_id, request, headers, runtime)
|
|
1487
|
+
|
|
1208
1488
|
def run_video_analysis_with_options(
|
|
1209
1489
|
self,
|
|
1210
1490
|
workspace_id: str,
|
|
@@ -331,6 +331,279 @@ class GenerateBroadcastNewsResponse(TeaModel):
|
|
|
331
331
|
return self
|
|
332
332
|
|
|
333
333
|
|
|
334
|
+
class GenerateOutputFormatRequestTags(TeaModel):
|
|
335
|
+
def __init__(
|
|
336
|
+
self,
|
|
337
|
+
tag_define_prompt: str = None,
|
|
338
|
+
tag_name: str = None,
|
|
339
|
+
):
|
|
340
|
+
self.tag_define_prompt = tag_define_prompt
|
|
341
|
+
self.tag_name = tag_name
|
|
342
|
+
|
|
343
|
+
def validate(self):
|
|
344
|
+
pass
|
|
345
|
+
|
|
346
|
+
def to_map(self):
|
|
347
|
+
_map = super().to_map()
|
|
348
|
+
if _map is not None:
|
|
349
|
+
return _map
|
|
350
|
+
|
|
351
|
+
result = dict()
|
|
352
|
+
if self.tag_define_prompt is not None:
|
|
353
|
+
result['tagDefinePrompt'] = self.tag_define_prompt
|
|
354
|
+
if self.tag_name is not None:
|
|
355
|
+
result['tagName'] = self.tag_name
|
|
356
|
+
return result
|
|
357
|
+
|
|
358
|
+
def from_map(self, m: dict = None):
|
|
359
|
+
m = m or dict()
|
|
360
|
+
if m.get('tagDefinePrompt') is not None:
|
|
361
|
+
self.tag_define_prompt = m.get('tagDefinePrompt')
|
|
362
|
+
if m.get('tagName') is not None:
|
|
363
|
+
self.tag_name = m.get('tagName')
|
|
364
|
+
return self
|
|
365
|
+
|
|
366
|
+
|
|
367
|
+
class GenerateOutputFormatRequest(TeaModel):
|
|
368
|
+
def __init__(
|
|
369
|
+
self,
|
|
370
|
+
business_type: str = None,
|
|
371
|
+
content: str = None,
|
|
372
|
+
extra_info: str = None,
|
|
373
|
+
tags: List[GenerateOutputFormatRequestTags] = None,
|
|
374
|
+
task_description: str = None,
|
|
375
|
+
):
|
|
376
|
+
self.business_type = business_type
|
|
377
|
+
self.content = content
|
|
378
|
+
self.extra_info = extra_info
|
|
379
|
+
# This parameter is required.
|
|
380
|
+
self.tags = tags
|
|
381
|
+
self.task_description = task_description
|
|
382
|
+
|
|
383
|
+
def validate(self):
|
|
384
|
+
if self.tags:
|
|
385
|
+
for k in self.tags:
|
|
386
|
+
if k:
|
|
387
|
+
k.validate()
|
|
388
|
+
|
|
389
|
+
def to_map(self):
|
|
390
|
+
_map = super().to_map()
|
|
391
|
+
if _map is not None:
|
|
392
|
+
return _map
|
|
393
|
+
|
|
394
|
+
result = dict()
|
|
395
|
+
if self.business_type is not None:
|
|
396
|
+
result['businessType'] = self.business_type
|
|
397
|
+
if self.content is not None:
|
|
398
|
+
result['content'] = self.content
|
|
399
|
+
if self.extra_info is not None:
|
|
400
|
+
result['extraInfo'] = self.extra_info
|
|
401
|
+
result['tags'] = []
|
|
402
|
+
if self.tags is not None:
|
|
403
|
+
for k in self.tags:
|
|
404
|
+
result['tags'].append(k.to_map() if k else None)
|
|
405
|
+
if self.task_description is not None:
|
|
406
|
+
result['taskDescription'] = self.task_description
|
|
407
|
+
return result
|
|
408
|
+
|
|
409
|
+
def from_map(self, m: dict = None):
|
|
410
|
+
m = m or dict()
|
|
411
|
+
if m.get('businessType') is not None:
|
|
412
|
+
self.business_type = m.get('businessType')
|
|
413
|
+
if m.get('content') is not None:
|
|
414
|
+
self.content = m.get('content')
|
|
415
|
+
if m.get('extraInfo') is not None:
|
|
416
|
+
self.extra_info = m.get('extraInfo')
|
|
417
|
+
self.tags = []
|
|
418
|
+
if m.get('tags') is not None:
|
|
419
|
+
for k in m.get('tags'):
|
|
420
|
+
temp_model = GenerateOutputFormatRequestTags()
|
|
421
|
+
self.tags.append(temp_model.from_map(k))
|
|
422
|
+
if m.get('taskDescription') is not None:
|
|
423
|
+
self.task_description = m.get('taskDescription')
|
|
424
|
+
return self
|
|
425
|
+
|
|
426
|
+
|
|
427
|
+
class GenerateOutputFormatShrinkRequest(TeaModel):
|
|
428
|
+
def __init__(
|
|
429
|
+
self,
|
|
430
|
+
business_type: str = None,
|
|
431
|
+
content: str = None,
|
|
432
|
+
extra_info: str = None,
|
|
433
|
+
tags_shrink: str = None,
|
|
434
|
+
task_description: str = None,
|
|
435
|
+
):
|
|
436
|
+
self.business_type = business_type
|
|
437
|
+
self.content = content
|
|
438
|
+
self.extra_info = extra_info
|
|
439
|
+
# This parameter is required.
|
|
440
|
+
self.tags_shrink = tags_shrink
|
|
441
|
+
self.task_description = task_description
|
|
442
|
+
|
|
443
|
+
def validate(self):
|
|
444
|
+
pass
|
|
445
|
+
|
|
446
|
+
def to_map(self):
|
|
447
|
+
_map = super().to_map()
|
|
448
|
+
if _map is not None:
|
|
449
|
+
return _map
|
|
450
|
+
|
|
451
|
+
result = dict()
|
|
452
|
+
if self.business_type is not None:
|
|
453
|
+
result['businessType'] = self.business_type
|
|
454
|
+
if self.content is not None:
|
|
455
|
+
result['content'] = self.content
|
|
456
|
+
if self.extra_info is not None:
|
|
457
|
+
result['extraInfo'] = self.extra_info
|
|
458
|
+
if self.tags_shrink is not None:
|
|
459
|
+
result['tags'] = self.tags_shrink
|
|
460
|
+
if self.task_description is not None:
|
|
461
|
+
result['taskDescription'] = self.task_description
|
|
462
|
+
return result
|
|
463
|
+
|
|
464
|
+
def from_map(self, m: dict = None):
|
|
465
|
+
m = m or dict()
|
|
466
|
+
if m.get('businessType') is not None:
|
|
467
|
+
self.business_type = m.get('businessType')
|
|
468
|
+
if m.get('content') is not None:
|
|
469
|
+
self.content = m.get('content')
|
|
470
|
+
if m.get('extraInfo') is not None:
|
|
471
|
+
self.extra_info = m.get('extraInfo')
|
|
472
|
+
if m.get('tags') is not None:
|
|
473
|
+
self.tags_shrink = m.get('tags')
|
|
474
|
+
if m.get('taskDescription') is not None:
|
|
475
|
+
self.task_description = m.get('taskDescription')
|
|
476
|
+
return self
|
|
477
|
+
|
|
478
|
+
|
|
479
|
+
class GenerateOutputFormatResponseBodyData(TeaModel):
|
|
480
|
+
def __init__(
|
|
481
|
+
self,
|
|
482
|
+
output_format: str = None,
|
|
483
|
+
):
|
|
484
|
+
self.output_format = output_format
|
|
485
|
+
|
|
486
|
+
def validate(self):
|
|
487
|
+
pass
|
|
488
|
+
|
|
489
|
+
def to_map(self):
|
|
490
|
+
_map = super().to_map()
|
|
491
|
+
if _map is not None:
|
|
492
|
+
return _map
|
|
493
|
+
|
|
494
|
+
result = dict()
|
|
495
|
+
if self.output_format is not None:
|
|
496
|
+
result['outputFormat'] = self.output_format
|
|
497
|
+
return result
|
|
498
|
+
|
|
499
|
+
def from_map(self, m: dict = None):
|
|
500
|
+
m = m or dict()
|
|
501
|
+
if m.get('outputFormat') is not None:
|
|
502
|
+
self.output_format = m.get('outputFormat')
|
|
503
|
+
return self
|
|
504
|
+
|
|
505
|
+
|
|
506
|
+
class GenerateOutputFormatResponseBody(TeaModel):
|
|
507
|
+
def __init__(
|
|
508
|
+
self,
|
|
509
|
+
code: str = None,
|
|
510
|
+
data: GenerateOutputFormatResponseBodyData = None,
|
|
511
|
+
http_status_code: int = None,
|
|
512
|
+
message: str = None,
|
|
513
|
+
request_id: str = None,
|
|
514
|
+
success: bool = None,
|
|
515
|
+
):
|
|
516
|
+
self.code = code
|
|
517
|
+
self.data = data
|
|
518
|
+
self.http_status_code = http_status_code
|
|
519
|
+
self.message = message
|
|
520
|
+
# Id of the request
|
|
521
|
+
self.request_id = request_id
|
|
522
|
+
self.success = success
|
|
523
|
+
|
|
524
|
+
def validate(self):
|
|
525
|
+
if self.data:
|
|
526
|
+
self.data.validate()
|
|
527
|
+
|
|
528
|
+
def to_map(self):
|
|
529
|
+
_map = super().to_map()
|
|
530
|
+
if _map is not None:
|
|
531
|
+
return _map
|
|
532
|
+
|
|
533
|
+
result = dict()
|
|
534
|
+
if self.code is not None:
|
|
535
|
+
result['code'] = self.code
|
|
536
|
+
if self.data is not None:
|
|
537
|
+
result['data'] = self.data.to_map()
|
|
538
|
+
if self.http_status_code is not None:
|
|
539
|
+
result['httpStatusCode'] = self.http_status_code
|
|
540
|
+
if self.message is not None:
|
|
541
|
+
result['message'] = self.message
|
|
542
|
+
if self.request_id is not None:
|
|
543
|
+
result['requestId'] = self.request_id
|
|
544
|
+
if self.success is not None:
|
|
545
|
+
result['success'] = self.success
|
|
546
|
+
return result
|
|
547
|
+
|
|
548
|
+
def from_map(self, m: dict = None):
|
|
549
|
+
m = m or dict()
|
|
550
|
+
if m.get('code') is not None:
|
|
551
|
+
self.code = m.get('code')
|
|
552
|
+
if m.get('data') is not None:
|
|
553
|
+
temp_model = GenerateOutputFormatResponseBodyData()
|
|
554
|
+
self.data = temp_model.from_map(m['data'])
|
|
555
|
+
if m.get('httpStatusCode') is not None:
|
|
556
|
+
self.http_status_code = m.get('httpStatusCode')
|
|
557
|
+
if m.get('message') is not None:
|
|
558
|
+
self.message = m.get('message')
|
|
559
|
+
if m.get('requestId') is not None:
|
|
560
|
+
self.request_id = m.get('requestId')
|
|
561
|
+
if m.get('success') is not None:
|
|
562
|
+
self.success = m.get('success')
|
|
563
|
+
return self
|
|
564
|
+
|
|
565
|
+
|
|
566
|
+
class GenerateOutputFormatResponse(TeaModel):
|
|
567
|
+
def __init__(
|
|
568
|
+
self,
|
|
569
|
+
headers: Dict[str, str] = None,
|
|
570
|
+
status_code: int = None,
|
|
571
|
+
body: GenerateOutputFormatResponseBody = None,
|
|
572
|
+
):
|
|
573
|
+
self.headers = headers
|
|
574
|
+
self.status_code = status_code
|
|
575
|
+
self.body = body
|
|
576
|
+
|
|
577
|
+
def validate(self):
|
|
578
|
+
if self.body:
|
|
579
|
+
self.body.validate()
|
|
580
|
+
|
|
581
|
+
def to_map(self):
|
|
582
|
+
_map = super().to_map()
|
|
583
|
+
if _map is not None:
|
|
584
|
+
return _map
|
|
585
|
+
|
|
586
|
+
result = dict()
|
|
587
|
+
if self.headers is not None:
|
|
588
|
+
result['headers'] = self.headers
|
|
589
|
+
if self.status_code is not None:
|
|
590
|
+
result['statusCode'] = self.status_code
|
|
591
|
+
if self.body is not None:
|
|
592
|
+
result['body'] = self.body.to_map()
|
|
593
|
+
return result
|
|
594
|
+
|
|
595
|
+
def from_map(self, m: dict = None):
|
|
596
|
+
m = m or dict()
|
|
597
|
+
if m.get('headers') is not None:
|
|
598
|
+
self.headers = m.get('headers')
|
|
599
|
+
if m.get('statusCode') is not None:
|
|
600
|
+
self.status_code = m.get('statusCode')
|
|
601
|
+
if m.get('body') is not None:
|
|
602
|
+
temp_model = GenerateOutputFormatResponseBody()
|
|
603
|
+
self.body = temp_model.from_map(m['body'])
|
|
604
|
+
return self
|
|
605
|
+
|
|
606
|
+
|
|
334
607
|
class ListHotTopicSummariesRequest(TeaModel):
|
|
335
608
|
def __init__(
|
|
336
609
|
self,
|
|
@@ -741,6 +1014,45 @@ class ListHotTopicSummariesResponse(TeaModel):
|
|
|
741
1014
|
return self
|
|
742
1015
|
|
|
743
1016
|
|
|
1017
|
+
class RunHotTopicChatRequestMessages(TeaModel):
|
|
1018
|
+
def __init__(
|
|
1019
|
+
self,
|
|
1020
|
+
content: str = None,
|
|
1021
|
+
create_time: str = None,
|
|
1022
|
+
role: str = None,
|
|
1023
|
+
):
|
|
1024
|
+
self.content = content
|
|
1025
|
+
self.create_time = create_time
|
|
1026
|
+
self.role = role
|
|
1027
|
+
|
|
1028
|
+
def validate(self):
|
|
1029
|
+
pass
|
|
1030
|
+
|
|
1031
|
+
def to_map(self):
|
|
1032
|
+
_map = super().to_map()
|
|
1033
|
+
if _map is not None:
|
|
1034
|
+
return _map
|
|
1035
|
+
|
|
1036
|
+
result = dict()
|
|
1037
|
+
if self.content is not None:
|
|
1038
|
+
result['content'] = self.content
|
|
1039
|
+
if self.create_time is not None:
|
|
1040
|
+
result['createTime'] = self.create_time
|
|
1041
|
+
if self.role is not None:
|
|
1042
|
+
result['role'] = self.role
|
|
1043
|
+
return result
|
|
1044
|
+
|
|
1045
|
+
def from_map(self, m: dict = None):
|
|
1046
|
+
m = m or dict()
|
|
1047
|
+
if m.get('content') is not None:
|
|
1048
|
+
self.content = m.get('content')
|
|
1049
|
+
if m.get('createTime') is not None:
|
|
1050
|
+
self.create_time = m.get('createTime')
|
|
1051
|
+
if m.get('role') is not None:
|
|
1052
|
+
self.role = m.get('role')
|
|
1053
|
+
return self
|
|
1054
|
+
|
|
1055
|
+
|
|
744
1056
|
class RunHotTopicChatRequestStepForBroadcastContentConfigCustomHotValueWeights(TeaModel):
|
|
745
1057
|
def __init__(
|
|
746
1058
|
self,
|
|
@@ -829,6 +1141,7 @@ class RunHotTopicChatRequest(TeaModel):
|
|
|
829
1141
|
hot_topic_version: str = None,
|
|
830
1142
|
hot_topics: List[str] = None,
|
|
831
1143
|
image_count: int = None,
|
|
1144
|
+
messages: List[RunHotTopicChatRequestMessages] = None,
|
|
832
1145
|
model_custom_prompt_template: str = None,
|
|
833
1146
|
model_id: str = None,
|
|
834
1147
|
original_session_id: str = None,
|
|
@@ -841,6 +1154,7 @@ class RunHotTopicChatRequest(TeaModel):
|
|
|
841
1154
|
self.hot_topic_version = hot_topic_version
|
|
842
1155
|
self.hot_topics = hot_topics
|
|
843
1156
|
self.image_count = image_count
|
|
1157
|
+
self.messages = messages
|
|
844
1158
|
self.model_custom_prompt_template = model_custom_prompt_template
|
|
845
1159
|
self.model_id = model_id
|
|
846
1160
|
self.original_session_id = original_session_id
|
|
@@ -849,6 +1163,10 @@ class RunHotTopicChatRequest(TeaModel):
|
|
|
849
1163
|
self.task_id = task_id
|
|
850
1164
|
|
|
851
1165
|
def validate(self):
|
|
1166
|
+
if self.messages:
|
|
1167
|
+
for k in self.messages:
|
|
1168
|
+
if k:
|
|
1169
|
+
k.validate()
|
|
852
1170
|
if self.step_for_broadcast_content_config:
|
|
853
1171
|
self.step_for_broadcast_content_config.validate()
|
|
854
1172
|
|
|
@@ -868,6 +1186,10 @@ class RunHotTopicChatRequest(TeaModel):
|
|
|
868
1186
|
result['hotTopics'] = self.hot_topics
|
|
869
1187
|
if self.image_count is not None:
|
|
870
1188
|
result['imageCount'] = self.image_count
|
|
1189
|
+
result['messages'] = []
|
|
1190
|
+
if self.messages is not None:
|
|
1191
|
+
for k in self.messages:
|
|
1192
|
+
result['messages'].append(k.to_map() if k else None)
|
|
871
1193
|
if self.model_custom_prompt_template is not None:
|
|
872
1194
|
result['modelCustomPromptTemplate'] = self.model_custom_prompt_template
|
|
873
1195
|
if self.model_id is not None:
|
|
@@ -894,6 +1216,11 @@ class RunHotTopicChatRequest(TeaModel):
|
|
|
894
1216
|
self.hot_topics = m.get('hotTopics')
|
|
895
1217
|
if m.get('imageCount') is not None:
|
|
896
1218
|
self.image_count = m.get('imageCount')
|
|
1219
|
+
self.messages = []
|
|
1220
|
+
if m.get('messages') is not None:
|
|
1221
|
+
for k in m.get('messages'):
|
|
1222
|
+
temp_model = RunHotTopicChatRequestMessages()
|
|
1223
|
+
self.messages.append(temp_model.from_map(k))
|
|
897
1224
|
if m.get('modelCustomPromptTemplate') is not None:
|
|
898
1225
|
self.model_custom_prompt_template = m.get('modelCustomPromptTemplate')
|
|
899
1226
|
if m.get('modelId') is not None:
|
|
@@ -918,6 +1245,7 @@ class RunHotTopicChatShrinkRequest(TeaModel):
|
|
|
918
1245
|
hot_topic_version: str = None,
|
|
919
1246
|
hot_topics_shrink: str = None,
|
|
920
1247
|
image_count: int = None,
|
|
1248
|
+
messages_shrink: str = None,
|
|
921
1249
|
model_custom_prompt_template: str = None,
|
|
922
1250
|
model_id: str = None,
|
|
923
1251
|
original_session_id: str = None,
|
|
@@ -930,6 +1258,7 @@ class RunHotTopicChatShrinkRequest(TeaModel):
|
|
|
930
1258
|
self.hot_topic_version = hot_topic_version
|
|
931
1259
|
self.hot_topics_shrink = hot_topics_shrink
|
|
932
1260
|
self.image_count = image_count
|
|
1261
|
+
self.messages_shrink = messages_shrink
|
|
933
1262
|
self.model_custom_prompt_template = model_custom_prompt_template
|
|
934
1263
|
self.model_id = model_id
|
|
935
1264
|
self.original_session_id = original_session_id
|
|
@@ -956,6 +1285,8 @@ class RunHotTopicChatShrinkRequest(TeaModel):
|
|
|
956
1285
|
result['hotTopics'] = self.hot_topics_shrink
|
|
957
1286
|
if self.image_count is not None:
|
|
958
1287
|
result['imageCount'] = self.image_count
|
|
1288
|
+
if self.messages_shrink is not None:
|
|
1289
|
+
result['messages'] = self.messages_shrink
|
|
959
1290
|
if self.model_custom_prompt_template is not None:
|
|
960
1291
|
result['modelCustomPromptTemplate'] = self.model_custom_prompt_template
|
|
961
1292
|
if self.model_id is not None:
|
|
@@ -982,6 +1313,8 @@ class RunHotTopicChatShrinkRequest(TeaModel):
|
|
|
982
1313
|
self.hot_topics_shrink = m.get('hotTopics')
|
|
983
1314
|
if m.get('imageCount') is not None:
|
|
984
1315
|
self.image_count = m.get('imageCount')
|
|
1316
|
+
if m.get('messages') is not None:
|
|
1317
|
+
self.messages_shrink = m.get('messages')
|
|
985
1318
|
if m.get('modelCustomPromptTemplate') is not None:
|
|
986
1319
|
self.model_custom_prompt_template = m.get('modelCustomPromptTemplate')
|
|
987
1320
|
if m.get('modelId') is not None:
|
|
@@ -3557,6 +3890,422 @@ class RunStyleWritingResponse(TeaModel):
|
|
|
3557
3890
|
return self
|
|
3558
3891
|
|
|
3559
3892
|
|
|
3893
|
+
class RunTagMiningAnalysisRequestTags(TeaModel):
|
|
3894
|
+
def __init__(
|
|
3895
|
+
self,
|
|
3896
|
+
tag_define_prompt: str = None,
|
|
3897
|
+
tag_name: str = None,
|
|
3898
|
+
):
|
|
3899
|
+
self.tag_define_prompt = tag_define_prompt
|
|
3900
|
+
self.tag_name = tag_name
|
|
3901
|
+
|
|
3902
|
+
def validate(self):
|
|
3903
|
+
pass
|
|
3904
|
+
|
|
3905
|
+
def to_map(self):
|
|
3906
|
+
_map = super().to_map()
|
|
3907
|
+
if _map is not None:
|
|
3908
|
+
return _map
|
|
3909
|
+
|
|
3910
|
+
result = dict()
|
|
3911
|
+
if self.tag_define_prompt is not None:
|
|
3912
|
+
result['tagDefinePrompt'] = self.tag_define_prompt
|
|
3913
|
+
if self.tag_name is not None:
|
|
3914
|
+
result['tagName'] = self.tag_name
|
|
3915
|
+
return result
|
|
3916
|
+
|
|
3917
|
+
def from_map(self, m: dict = None):
|
|
3918
|
+
m = m or dict()
|
|
3919
|
+
if m.get('tagDefinePrompt') is not None:
|
|
3920
|
+
self.tag_define_prompt = m.get('tagDefinePrompt')
|
|
3921
|
+
if m.get('tagName') is not None:
|
|
3922
|
+
self.tag_name = m.get('tagName')
|
|
3923
|
+
return self
|
|
3924
|
+
|
|
3925
|
+
|
|
3926
|
+
class RunTagMiningAnalysisRequest(TeaModel):
|
|
3927
|
+
def __init__(
|
|
3928
|
+
self,
|
|
3929
|
+
business_type: str = None,
|
|
3930
|
+
content: str = None,
|
|
3931
|
+
extra_info: str = None,
|
|
3932
|
+
model_id: str = None,
|
|
3933
|
+
output_format: str = None,
|
|
3934
|
+
tags: List[RunTagMiningAnalysisRequestTags] = None,
|
|
3935
|
+
task_description: str = None,
|
|
3936
|
+
):
|
|
3937
|
+
self.business_type = business_type
|
|
3938
|
+
# This parameter is required.
|
|
3939
|
+
self.content = content
|
|
3940
|
+
self.extra_info = extra_info
|
|
3941
|
+
self.model_id = model_id
|
|
3942
|
+
self.output_format = output_format
|
|
3943
|
+
self.tags = tags
|
|
3944
|
+
self.task_description = task_description
|
|
3945
|
+
|
|
3946
|
+
def validate(self):
|
|
3947
|
+
if self.tags:
|
|
3948
|
+
for k in self.tags:
|
|
3949
|
+
if k:
|
|
3950
|
+
k.validate()
|
|
3951
|
+
|
|
3952
|
+
def to_map(self):
|
|
3953
|
+
_map = super().to_map()
|
|
3954
|
+
if _map is not None:
|
|
3955
|
+
return _map
|
|
3956
|
+
|
|
3957
|
+
result = dict()
|
|
3958
|
+
if self.business_type is not None:
|
|
3959
|
+
result['businessType'] = self.business_type
|
|
3960
|
+
if self.content is not None:
|
|
3961
|
+
result['content'] = self.content
|
|
3962
|
+
if self.extra_info is not None:
|
|
3963
|
+
result['extraInfo'] = self.extra_info
|
|
3964
|
+
if self.model_id is not None:
|
|
3965
|
+
result['modelId'] = self.model_id
|
|
3966
|
+
if self.output_format is not None:
|
|
3967
|
+
result['outputFormat'] = self.output_format
|
|
3968
|
+
result['tags'] = []
|
|
3969
|
+
if self.tags is not None:
|
|
3970
|
+
for k in self.tags:
|
|
3971
|
+
result['tags'].append(k.to_map() if k else None)
|
|
3972
|
+
if self.task_description is not None:
|
|
3973
|
+
result['taskDescription'] = self.task_description
|
|
3974
|
+
return result
|
|
3975
|
+
|
|
3976
|
+
def from_map(self, m: dict = None):
|
|
3977
|
+
m = m or dict()
|
|
3978
|
+
if m.get('businessType') is not None:
|
|
3979
|
+
self.business_type = m.get('businessType')
|
|
3980
|
+
if m.get('content') is not None:
|
|
3981
|
+
self.content = m.get('content')
|
|
3982
|
+
if m.get('extraInfo') is not None:
|
|
3983
|
+
self.extra_info = m.get('extraInfo')
|
|
3984
|
+
if m.get('modelId') is not None:
|
|
3985
|
+
self.model_id = m.get('modelId')
|
|
3986
|
+
if m.get('outputFormat') is not None:
|
|
3987
|
+
self.output_format = m.get('outputFormat')
|
|
3988
|
+
self.tags = []
|
|
3989
|
+
if m.get('tags') is not None:
|
|
3990
|
+
for k in m.get('tags'):
|
|
3991
|
+
temp_model = RunTagMiningAnalysisRequestTags()
|
|
3992
|
+
self.tags.append(temp_model.from_map(k))
|
|
3993
|
+
if m.get('taskDescription') is not None:
|
|
3994
|
+
self.task_description = m.get('taskDescription')
|
|
3995
|
+
return self
|
|
3996
|
+
|
|
3997
|
+
|
|
3998
|
+
class RunTagMiningAnalysisShrinkRequest(TeaModel):
|
|
3999
|
+
def __init__(
|
|
4000
|
+
self,
|
|
4001
|
+
business_type: str = None,
|
|
4002
|
+
content: str = None,
|
|
4003
|
+
extra_info: str = None,
|
|
4004
|
+
model_id: str = None,
|
|
4005
|
+
output_format: str = None,
|
|
4006
|
+
tags_shrink: str = None,
|
|
4007
|
+
task_description: str = None,
|
|
4008
|
+
):
|
|
4009
|
+
self.business_type = business_type
|
|
4010
|
+
# This parameter is required.
|
|
4011
|
+
self.content = content
|
|
4012
|
+
self.extra_info = extra_info
|
|
4013
|
+
self.model_id = model_id
|
|
4014
|
+
self.output_format = output_format
|
|
4015
|
+
self.tags_shrink = tags_shrink
|
|
4016
|
+
self.task_description = task_description
|
|
4017
|
+
|
|
4018
|
+
def validate(self):
|
|
4019
|
+
pass
|
|
4020
|
+
|
|
4021
|
+
def to_map(self):
|
|
4022
|
+
_map = super().to_map()
|
|
4023
|
+
if _map is not None:
|
|
4024
|
+
return _map
|
|
4025
|
+
|
|
4026
|
+
result = dict()
|
|
4027
|
+
if self.business_type is not None:
|
|
4028
|
+
result['businessType'] = self.business_type
|
|
4029
|
+
if self.content is not None:
|
|
4030
|
+
result['content'] = self.content
|
|
4031
|
+
if self.extra_info is not None:
|
|
4032
|
+
result['extraInfo'] = self.extra_info
|
|
4033
|
+
if self.model_id is not None:
|
|
4034
|
+
result['modelId'] = self.model_id
|
|
4035
|
+
if self.output_format is not None:
|
|
4036
|
+
result['outputFormat'] = self.output_format
|
|
4037
|
+
if self.tags_shrink is not None:
|
|
4038
|
+
result['tags'] = self.tags_shrink
|
|
4039
|
+
if self.task_description is not None:
|
|
4040
|
+
result['taskDescription'] = self.task_description
|
|
4041
|
+
return result
|
|
4042
|
+
|
|
4043
|
+
def from_map(self, m: dict = None):
|
|
4044
|
+
m = m or dict()
|
|
4045
|
+
if m.get('businessType') is not None:
|
|
4046
|
+
self.business_type = m.get('businessType')
|
|
4047
|
+
if m.get('content') is not None:
|
|
4048
|
+
self.content = m.get('content')
|
|
4049
|
+
if m.get('extraInfo') is not None:
|
|
4050
|
+
self.extra_info = m.get('extraInfo')
|
|
4051
|
+
if m.get('modelId') is not None:
|
|
4052
|
+
self.model_id = m.get('modelId')
|
|
4053
|
+
if m.get('outputFormat') is not None:
|
|
4054
|
+
self.output_format = m.get('outputFormat')
|
|
4055
|
+
if m.get('tags') is not None:
|
|
4056
|
+
self.tags_shrink = m.get('tags')
|
|
4057
|
+
if m.get('taskDescription') is not None:
|
|
4058
|
+
self.task_description = m.get('taskDescription')
|
|
4059
|
+
return self
|
|
4060
|
+
|
|
4061
|
+
|
|
4062
|
+
class RunTagMiningAnalysisResponseBodyHeader(TeaModel):
|
|
4063
|
+
def __init__(
|
|
4064
|
+
self,
|
|
4065
|
+
error_code: str = None,
|
|
4066
|
+
error_message: str = None,
|
|
4067
|
+
event: str = None,
|
|
4068
|
+
session_id: str = None,
|
|
4069
|
+
task_id: str = None,
|
|
4070
|
+
trace_id: str = None,
|
|
4071
|
+
):
|
|
4072
|
+
self.error_code = error_code
|
|
4073
|
+
self.error_message = error_message
|
|
4074
|
+
self.event = event
|
|
4075
|
+
self.session_id = session_id
|
|
4076
|
+
self.task_id = task_id
|
|
4077
|
+
self.trace_id = trace_id
|
|
4078
|
+
|
|
4079
|
+
def validate(self):
|
|
4080
|
+
pass
|
|
4081
|
+
|
|
4082
|
+
def to_map(self):
|
|
4083
|
+
_map = super().to_map()
|
|
4084
|
+
if _map is not None:
|
|
4085
|
+
return _map
|
|
4086
|
+
|
|
4087
|
+
result = dict()
|
|
4088
|
+
if self.error_code is not None:
|
|
4089
|
+
result['errorCode'] = self.error_code
|
|
4090
|
+
if self.error_message is not None:
|
|
4091
|
+
result['errorMessage'] = self.error_message
|
|
4092
|
+
if self.event is not None:
|
|
4093
|
+
result['event'] = self.event
|
|
4094
|
+
if self.session_id is not None:
|
|
4095
|
+
result['sessionId'] = self.session_id
|
|
4096
|
+
if self.task_id is not None:
|
|
4097
|
+
result['taskId'] = self.task_id
|
|
4098
|
+
if self.trace_id is not None:
|
|
4099
|
+
result['traceId'] = self.trace_id
|
|
4100
|
+
return result
|
|
4101
|
+
|
|
4102
|
+
def from_map(self, m: dict = None):
|
|
4103
|
+
m = m or dict()
|
|
4104
|
+
if m.get('errorCode') is not None:
|
|
4105
|
+
self.error_code = m.get('errorCode')
|
|
4106
|
+
if m.get('errorMessage') is not None:
|
|
4107
|
+
self.error_message = m.get('errorMessage')
|
|
4108
|
+
if m.get('event') is not None:
|
|
4109
|
+
self.event = m.get('event')
|
|
4110
|
+
if m.get('sessionId') is not None:
|
|
4111
|
+
self.session_id = m.get('sessionId')
|
|
4112
|
+
if m.get('taskId') is not None:
|
|
4113
|
+
self.task_id = m.get('taskId')
|
|
4114
|
+
if m.get('traceId') is not None:
|
|
4115
|
+
self.trace_id = m.get('traceId')
|
|
4116
|
+
return self
|
|
4117
|
+
|
|
4118
|
+
|
|
4119
|
+
class RunTagMiningAnalysisResponseBodyPayloadOutput(TeaModel):
|
|
4120
|
+
def __init__(
|
|
4121
|
+
self,
|
|
4122
|
+
text: str = None,
|
|
4123
|
+
):
|
|
4124
|
+
self.text = text
|
|
4125
|
+
|
|
4126
|
+
def validate(self):
|
|
4127
|
+
pass
|
|
4128
|
+
|
|
4129
|
+
def to_map(self):
|
|
4130
|
+
_map = super().to_map()
|
|
4131
|
+
if _map is not None:
|
|
4132
|
+
return _map
|
|
4133
|
+
|
|
4134
|
+
result = dict()
|
|
4135
|
+
if self.text is not None:
|
|
4136
|
+
result['text'] = self.text
|
|
4137
|
+
return result
|
|
4138
|
+
|
|
4139
|
+
def from_map(self, m: dict = None):
|
|
4140
|
+
m = m or dict()
|
|
4141
|
+
if m.get('text') is not None:
|
|
4142
|
+
self.text = m.get('text')
|
|
4143
|
+
return self
|
|
4144
|
+
|
|
4145
|
+
|
|
4146
|
+
class RunTagMiningAnalysisResponseBodyPayloadUsage(TeaModel):
|
|
4147
|
+
def __init__(
|
|
4148
|
+
self,
|
|
4149
|
+
input_tokens: int = None,
|
|
4150
|
+
output_tokens: int = None,
|
|
4151
|
+
total_tokens: int = None,
|
|
4152
|
+
):
|
|
4153
|
+
self.input_tokens = input_tokens
|
|
4154
|
+
self.output_tokens = output_tokens
|
|
4155
|
+
self.total_tokens = total_tokens
|
|
4156
|
+
|
|
4157
|
+
def validate(self):
|
|
4158
|
+
pass
|
|
4159
|
+
|
|
4160
|
+
def to_map(self):
|
|
4161
|
+
_map = super().to_map()
|
|
4162
|
+
if _map is not None:
|
|
4163
|
+
return _map
|
|
4164
|
+
|
|
4165
|
+
result = dict()
|
|
4166
|
+
if self.input_tokens is not None:
|
|
4167
|
+
result['inputTokens'] = self.input_tokens
|
|
4168
|
+
if self.output_tokens is not None:
|
|
4169
|
+
result['outputTokens'] = self.output_tokens
|
|
4170
|
+
if self.total_tokens is not None:
|
|
4171
|
+
result['totalTokens'] = self.total_tokens
|
|
4172
|
+
return result
|
|
4173
|
+
|
|
4174
|
+
def from_map(self, m: dict = None):
|
|
4175
|
+
m = m or dict()
|
|
4176
|
+
if m.get('inputTokens') is not None:
|
|
4177
|
+
self.input_tokens = m.get('inputTokens')
|
|
4178
|
+
if m.get('outputTokens') is not None:
|
|
4179
|
+
self.output_tokens = m.get('outputTokens')
|
|
4180
|
+
if m.get('totalTokens') is not None:
|
|
4181
|
+
self.total_tokens = m.get('totalTokens')
|
|
4182
|
+
return self
|
|
4183
|
+
|
|
4184
|
+
|
|
4185
|
+
class RunTagMiningAnalysisResponseBodyPayload(TeaModel):
|
|
4186
|
+
def __init__(
|
|
4187
|
+
self,
|
|
4188
|
+
output: RunTagMiningAnalysisResponseBodyPayloadOutput = None,
|
|
4189
|
+
usage: RunTagMiningAnalysisResponseBodyPayloadUsage = None,
|
|
4190
|
+
):
|
|
4191
|
+
self.output = output
|
|
4192
|
+
self.usage = usage
|
|
4193
|
+
|
|
4194
|
+
def validate(self):
|
|
4195
|
+
if self.output:
|
|
4196
|
+
self.output.validate()
|
|
4197
|
+
if self.usage:
|
|
4198
|
+
self.usage.validate()
|
|
4199
|
+
|
|
4200
|
+
def to_map(self):
|
|
4201
|
+
_map = super().to_map()
|
|
4202
|
+
if _map is not None:
|
|
4203
|
+
return _map
|
|
4204
|
+
|
|
4205
|
+
result = dict()
|
|
4206
|
+
if self.output is not None:
|
|
4207
|
+
result['output'] = self.output.to_map()
|
|
4208
|
+
if self.usage is not None:
|
|
4209
|
+
result['usage'] = self.usage.to_map()
|
|
4210
|
+
return result
|
|
4211
|
+
|
|
4212
|
+
def from_map(self, m: dict = None):
|
|
4213
|
+
m = m or dict()
|
|
4214
|
+
if m.get('output') is not None:
|
|
4215
|
+
temp_model = RunTagMiningAnalysisResponseBodyPayloadOutput()
|
|
4216
|
+
self.output = temp_model.from_map(m['output'])
|
|
4217
|
+
if m.get('usage') is not None:
|
|
4218
|
+
temp_model = RunTagMiningAnalysisResponseBodyPayloadUsage()
|
|
4219
|
+
self.usage = temp_model.from_map(m['usage'])
|
|
4220
|
+
return self
|
|
4221
|
+
|
|
4222
|
+
|
|
4223
|
+
class RunTagMiningAnalysisResponseBody(TeaModel):
|
|
4224
|
+
def __init__(
|
|
4225
|
+
self,
|
|
4226
|
+
header: RunTagMiningAnalysisResponseBodyHeader = None,
|
|
4227
|
+
payload: RunTagMiningAnalysisResponseBodyPayload = None,
|
|
4228
|
+
request_id: str = None,
|
|
4229
|
+
):
|
|
4230
|
+
self.header = header
|
|
4231
|
+
self.payload = payload
|
|
4232
|
+
# Id of the request
|
|
4233
|
+
self.request_id = request_id
|
|
4234
|
+
|
|
4235
|
+
def validate(self):
|
|
4236
|
+
if self.header:
|
|
4237
|
+
self.header.validate()
|
|
4238
|
+
if self.payload:
|
|
4239
|
+
self.payload.validate()
|
|
4240
|
+
|
|
4241
|
+
def to_map(self):
|
|
4242
|
+
_map = super().to_map()
|
|
4243
|
+
if _map is not None:
|
|
4244
|
+
return _map
|
|
4245
|
+
|
|
4246
|
+
result = dict()
|
|
4247
|
+
if self.header is not None:
|
|
4248
|
+
result['header'] = self.header.to_map()
|
|
4249
|
+
if self.payload is not None:
|
|
4250
|
+
result['payload'] = self.payload.to_map()
|
|
4251
|
+
if self.request_id is not None:
|
|
4252
|
+
result['requestId'] = self.request_id
|
|
4253
|
+
return result
|
|
4254
|
+
|
|
4255
|
+
def from_map(self, m: dict = None):
|
|
4256
|
+
m = m or dict()
|
|
4257
|
+
if m.get('header') is not None:
|
|
4258
|
+
temp_model = RunTagMiningAnalysisResponseBodyHeader()
|
|
4259
|
+
self.header = temp_model.from_map(m['header'])
|
|
4260
|
+
if m.get('payload') is not None:
|
|
4261
|
+
temp_model = RunTagMiningAnalysisResponseBodyPayload()
|
|
4262
|
+
self.payload = temp_model.from_map(m['payload'])
|
|
4263
|
+
if m.get('requestId') is not None:
|
|
4264
|
+
self.request_id = m.get('requestId')
|
|
4265
|
+
return self
|
|
4266
|
+
|
|
4267
|
+
|
|
4268
|
+
class RunTagMiningAnalysisResponse(TeaModel):
|
|
4269
|
+
def __init__(
|
|
4270
|
+
self,
|
|
4271
|
+
headers: Dict[str, str] = None,
|
|
4272
|
+
status_code: int = None,
|
|
4273
|
+
body: RunTagMiningAnalysisResponseBody = None,
|
|
4274
|
+
):
|
|
4275
|
+
self.headers = headers
|
|
4276
|
+
self.status_code = status_code
|
|
4277
|
+
self.body = body
|
|
4278
|
+
|
|
4279
|
+
def validate(self):
|
|
4280
|
+
if self.body:
|
|
4281
|
+
self.body.validate()
|
|
4282
|
+
|
|
4283
|
+
def to_map(self):
|
|
4284
|
+
_map = super().to_map()
|
|
4285
|
+
if _map is not None:
|
|
4286
|
+
return _map
|
|
4287
|
+
|
|
4288
|
+
result = dict()
|
|
4289
|
+
if self.headers is not None:
|
|
4290
|
+
result['headers'] = self.headers
|
|
4291
|
+
if self.status_code is not None:
|
|
4292
|
+
result['statusCode'] = self.status_code
|
|
4293
|
+
if self.body is not None:
|
|
4294
|
+
result['body'] = self.body.to_map()
|
|
4295
|
+
return result
|
|
4296
|
+
|
|
4297
|
+
def from_map(self, m: dict = None):
|
|
4298
|
+
m = m or dict()
|
|
4299
|
+
if m.get('headers') is not None:
|
|
4300
|
+
self.headers = m.get('headers')
|
|
4301
|
+
if m.get('statusCode') is not None:
|
|
4302
|
+
self.status_code = m.get('statusCode')
|
|
4303
|
+
if m.get('body') is not None:
|
|
4304
|
+
temp_model = RunTagMiningAnalysisResponseBody()
|
|
4305
|
+
self.body = temp_model.from_map(m['body'])
|
|
4306
|
+
return self
|
|
4307
|
+
|
|
4308
|
+
|
|
3560
4309
|
class RunVideoAnalysisRequest(TeaModel):
|
|
3561
4310
|
def __init__(
|
|
3562
4311
|
self,
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: alibabacloud-quanmiaolightapp20240801
|
|
3
|
-
Version: 2.
|
|
3
|
+
Version: 2.1.0
|
|
4
4
|
Summary: Alibaba Cloud QuanMiaoLightApp (20240801) SDK Library for Python
|
|
5
5
|
Home-page: https://github.com/aliyun/alibabacloud-python-sdk
|
|
6
6
|
Author: Alibaba Cloud SDK
|
alibabacloud_quanmiaolightapp20240801-2.0.0/alibabacloud_quanmiaolightapp20240801/__init__.py
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
__version__ = '2.0.0'
|
{alibabacloud_quanmiaolightapp20240801-2.0.0 → alibabacloud_quanmiaolightapp20240801-2.1.0}/LICENSE
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|