alibabacloud-aimiaobi20230801 1.20.6__tar.gz → 1.22.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {alibabacloud_aimiaobi20230801-1.20.6 → alibabacloud_aimiaobi20230801-1.22.0}/ChangeLog.md +4 -0
- {alibabacloud_aimiaobi20230801-1.20.6 → alibabacloud_aimiaobi20230801-1.22.0}/PKG-INFO +1 -1
- alibabacloud_aimiaobi20230801-1.22.0/alibabacloud_aimiaobi20230801/__init__.py +1 -0
- {alibabacloud_aimiaobi20230801-1.20.6 → alibabacloud_aimiaobi20230801-1.22.0}/alibabacloud_aimiaobi20230801/client.py +140 -0
- {alibabacloud_aimiaobi20230801-1.20.6 → alibabacloud_aimiaobi20230801-1.22.0}/alibabacloud_aimiaobi20230801/models.py +344 -0
- {alibabacloud_aimiaobi20230801-1.20.6 → alibabacloud_aimiaobi20230801-1.22.0}/alibabacloud_aimiaobi20230801.egg-info/PKG-INFO +1 -1
- {alibabacloud_aimiaobi20230801-1.20.6 → alibabacloud_aimiaobi20230801-1.22.0}/alibabacloud_aimiaobi20230801.egg-info/requires.txt +1 -1
- {alibabacloud_aimiaobi20230801-1.20.6 → alibabacloud_aimiaobi20230801-1.22.0}/setup.py +2 -2
- alibabacloud_aimiaobi20230801-1.20.6/alibabacloud_aimiaobi20230801/__init__.py +0 -1
- {alibabacloud_aimiaobi20230801-1.20.6 → alibabacloud_aimiaobi20230801-1.22.0}/LICENSE +0 -0
- {alibabacloud_aimiaobi20230801-1.20.6 → alibabacloud_aimiaobi20230801-1.22.0}/MANIFEST.in +0 -0
- {alibabacloud_aimiaobi20230801-1.20.6 → alibabacloud_aimiaobi20230801-1.22.0}/README-CN.md +0 -0
- {alibabacloud_aimiaobi20230801-1.20.6 → alibabacloud_aimiaobi20230801-1.22.0}/README.md +0 -0
- {alibabacloud_aimiaobi20230801-1.20.6 → alibabacloud_aimiaobi20230801-1.22.0}/alibabacloud_aimiaobi20230801.egg-info/SOURCES.txt +0 -0
- {alibabacloud_aimiaobi20230801-1.20.6 → alibabacloud_aimiaobi20230801-1.22.0}/alibabacloud_aimiaobi20230801.egg-info/dependency_links.txt +0 -0
- {alibabacloud_aimiaobi20230801-1.20.6 → alibabacloud_aimiaobi20230801-1.22.0}/alibabacloud_aimiaobi20230801.egg-info/top_level.txt +0 -0
- {alibabacloud_aimiaobi20230801-1.20.6 → alibabacloud_aimiaobi20230801-1.22.0}/setup.cfg +0 -0
|
@@ -1,3 +1,7 @@
|
|
|
1
|
+
2025-04-17 Version: 1.20.6
|
|
2
|
+
- Update API SubmitEnterpriseVocAnalysisTask: add request parameters ApiKey.
|
|
3
|
+
|
|
4
|
+
|
|
1
5
|
2025-04-17 Version: 1.20.5
|
|
2
6
|
- Update API RunDocBrainmap: add request parameters referenceContent.
|
|
3
7
|
- Update API RunDocIntroduction: add request parameters referenceContent.
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
__version__ = '1.22.0'
|
|
@@ -387,6 +387,8 @@ class Client(OpenApiClient):
|
|
|
387
387
|
body['DatasetType'] = request.dataset_type
|
|
388
388
|
if not UtilClient.is_unset(request.document_handle_config_shrink):
|
|
389
389
|
body['DocumentHandleConfig'] = request.document_handle_config_shrink
|
|
390
|
+
if not UtilClient.is_unset(request.invoke_type):
|
|
391
|
+
body['InvokeType'] = request.invoke_type
|
|
390
392
|
if not UtilClient.is_unset(request.search_dataset_enable):
|
|
391
393
|
body['SearchDatasetEnable'] = request.search_dataset_enable
|
|
392
394
|
if not UtilClient.is_unset(request.workspace_id):
|
|
@@ -440,6 +442,8 @@ class Client(OpenApiClient):
|
|
|
440
442
|
body['DatasetType'] = request.dataset_type
|
|
441
443
|
if not UtilClient.is_unset(request.document_handle_config_shrink):
|
|
442
444
|
body['DocumentHandleConfig'] = request.document_handle_config_shrink
|
|
445
|
+
if not UtilClient.is_unset(request.invoke_type):
|
|
446
|
+
body['InvokeType'] = request.invoke_type
|
|
443
447
|
if not UtilClient.is_unset(request.search_dataset_enable):
|
|
444
448
|
body['SearchDatasetEnable'] = request.search_dataset_enable
|
|
445
449
|
if not UtilClient.is_unset(request.workspace_id):
|
|
@@ -11637,6 +11641,8 @@ class Client(OpenApiClient):
|
|
|
11637
11641
|
body['AgentContext'] = request.agent_context_shrink
|
|
11638
11642
|
if not UtilClient.is_unset(request.chat_config_shrink):
|
|
11639
11643
|
body['ChatConfig'] = request.chat_config_shrink
|
|
11644
|
+
if not UtilClient.is_unset(request.model_id):
|
|
11645
|
+
body['ModelId'] = request.model_id
|
|
11640
11646
|
if not UtilClient.is_unset(request.original_session_id):
|
|
11641
11647
|
body['OriginalSessionId'] = request.original_session_id
|
|
11642
11648
|
if not UtilClient.is_unset(request.prompt):
|
|
@@ -11688,6 +11694,8 @@ class Client(OpenApiClient):
|
|
|
11688
11694
|
body['AgentContext'] = request.agent_context_shrink
|
|
11689
11695
|
if not UtilClient.is_unset(request.chat_config_shrink):
|
|
11690
11696
|
body['ChatConfig'] = request.chat_config_shrink
|
|
11697
|
+
if not UtilClient.is_unset(request.model_id):
|
|
11698
|
+
body['ModelId'] = request.model_id
|
|
11691
11699
|
if not UtilClient.is_unset(request.original_session_id):
|
|
11692
11700
|
body['OriginalSessionId'] = request.original_session_id
|
|
11693
11701
|
if not UtilClient.is_unset(request.prompt):
|
|
@@ -13189,6 +13197,138 @@ class Client(OpenApiClient):
|
|
|
13189
13197
|
runtime = util_models.RuntimeOptions()
|
|
13190
13198
|
return await self.save_material_document_with_options_async(request, runtime)
|
|
13191
13199
|
|
|
13200
|
+
def save_style_learning_result_with_options(
|
|
13201
|
+
self,
|
|
13202
|
+
tmp_req: ai_miao_bi_20230801_models.SaveStyleLearningResultRequest,
|
|
13203
|
+
runtime: util_models.RuntimeOptions,
|
|
13204
|
+
) -> ai_miao_bi_20230801_models.SaveStyleLearningResultResponse:
|
|
13205
|
+
"""
|
|
13206
|
+
@summary 保存自定义文体
|
|
13207
|
+
|
|
13208
|
+
@param tmp_req: SaveStyleLearningResultRequest
|
|
13209
|
+
@param runtime: runtime options for this request RuntimeOptions
|
|
13210
|
+
@return: SaveStyleLearningResultResponse
|
|
13211
|
+
"""
|
|
13212
|
+
UtilClient.validate_model(tmp_req)
|
|
13213
|
+
request = ai_miao_bi_20230801_models.SaveStyleLearningResultShrinkRequest()
|
|
13214
|
+
OpenApiUtilClient.convert(tmp_req, request)
|
|
13215
|
+
if not UtilClient.is_unset(tmp_req.custom_text_id_list):
|
|
13216
|
+
request.custom_text_id_list_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.custom_text_id_list, 'CustomTextIdList', 'json')
|
|
13217
|
+
if not UtilClient.is_unset(tmp_req.material_id_list):
|
|
13218
|
+
request.material_id_list_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.material_id_list, 'MaterialIdList', 'json')
|
|
13219
|
+
body = {}
|
|
13220
|
+
if not UtilClient.is_unset(request.agent_key):
|
|
13221
|
+
body['AgentKey'] = request.agent_key
|
|
13222
|
+
if not UtilClient.is_unset(request.aigc_result):
|
|
13223
|
+
body['AigcResult'] = request.aigc_result
|
|
13224
|
+
if not UtilClient.is_unset(request.custom_text_id_list_shrink):
|
|
13225
|
+
body['CustomTextIdList'] = request.custom_text_id_list_shrink
|
|
13226
|
+
if not UtilClient.is_unset(request.material_id_list_shrink):
|
|
13227
|
+
body['MaterialIdList'] = request.material_id_list_shrink
|
|
13228
|
+
if not UtilClient.is_unset(request.rewrite_result):
|
|
13229
|
+
body['RewriteResult'] = request.rewrite_result
|
|
13230
|
+
if not UtilClient.is_unset(request.style_name):
|
|
13231
|
+
body['StyleName'] = request.style_name
|
|
13232
|
+
if not UtilClient.is_unset(request.task_id):
|
|
13233
|
+
body['TaskId'] = request.task_id
|
|
13234
|
+
req = open_api_models.OpenApiRequest(
|
|
13235
|
+
body=OpenApiUtilClient.parse_to_map(body)
|
|
13236
|
+
)
|
|
13237
|
+
params = open_api_models.Params(
|
|
13238
|
+
action='SaveStyleLearningResult',
|
|
13239
|
+
version='2023-08-01',
|
|
13240
|
+
protocol='HTTPS',
|
|
13241
|
+
pathname='/',
|
|
13242
|
+
method='POST',
|
|
13243
|
+
auth_type='AK',
|
|
13244
|
+
style='RPC',
|
|
13245
|
+
req_body_type='formData',
|
|
13246
|
+
body_type='json'
|
|
13247
|
+
)
|
|
13248
|
+
return TeaCore.from_map(
|
|
13249
|
+
ai_miao_bi_20230801_models.SaveStyleLearningResultResponse(),
|
|
13250
|
+
self.call_api(params, req, runtime)
|
|
13251
|
+
)
|
|
13252
|
+
|
|
13253
|
+
async def save_style_learning_result_with_options_async(
|
|
13254
|
+
self,
|
|
13255
|
+
tmp_req: ai_miao_bi_20230801_models.SaveStyleLearningResultRequest,
|
|
13256
|
+
runtime: util_models.RuntimeOptions,
|
|
13257
|
+
) -> ai_miao_bi_20230801_models.SaveStyleLearningResultResponse:
|
|
13258
|
+
"""
|
|
13259
|
+
@summary 保存自定义文体
|
|
13260
|
+
|
|
13261
|
+
@param tmp_req: SaveStyleLearningResultRequest
|
|
13262
|
+
@param runtime: runtime options for this request RuntimeOptions
|
|
13263
|
+
@return: SaveStyleLearningResultResponse
|
|
13264
|
+
"""
|
|
13265
|
+
UtilClient.validate_model(tmp_req)
|
|
13266
|
+
request = ai_miao_bi_20230801_models.SaveStyleLearningResultShrinkRequest()
|
|
13267
|
+
OpenApiUtilClient.convert(tmp_req, request)
|
|
13268
|
+
if not UtilClient.is_unset(tmp_req.custom_text_id_list):
|
|
13269
|
+
request.custom_text_id_list_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.custom_text_id_list, 'CustomTextIdList', 'json')
|
|
13270
|
+
if not UtilClient.is_unset(tmp_req.material_id_list):
|
|
13271
|
+
request.material_id_list_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.material_id_list, 'MaterialIdList', 'json')
|
|
13272
|
+
body = {}
|
|
13273
|
+
if not UtilClient.is_unset(request.agent_key):
|
|
13274
|
+
body['AgentKey'] = request.agent_key
|
|
13275
|
+
if not UtilClient.is_unset(request.aigc_result):
|
|
13276
|
+
body['AigcResult'] = request.aigc_result
|
|
13277
|
+
if not UtilClient.is_unset(request.custom_text_id_list_shrink):
|
|
13278
|
+
body['CustomTextIdList'] = request.custom_text_id_list_shrink
|
|
13279
|
+
if not UtilClient.is_unset(request.material_id_list_shrink):
|
|
13280
|
+
body['MaterialIdList'] = request.material_id_list_shrink
|
|
13281
|
+
if not UtilClient.is_unset(request.rewrite_result):
|
|
13282
|
+
body['RewriteResult'] = request.rewrite_result
|
|
13283
|
+
if not UtilClient.is_unset(request.style_name):
|
|
13284
|
+
body['StyleName'] = request.style_name
|
|
13285
|
+
if not UtilClient.is_unset(request.task_id):
|
|
13286
|
+
body['TaskId'] = request.task_id
|
|
13287
|
+
req = open_api_models.OpenApiRequest(
|
|
13288
|
+
body=OpenApiUtilClient.parse_to_map(body)
|
|
13289
|
+
)
|
|
13290
|
+
params = open_api_models.Params(
|
|
13291
|
+
action='SaveStyleLearningResult',
|
|
13292
|
+
version='2023-08-01',
|
|
13293
|
+
protocol='HTTPS',
|
|
13294
|
+
pathname='/',
|
|
13295
|
+
method='POST',
|
|
13296
|
+
auth_type='AK',
|
|
13297
|
+
style='RPC',
|
|
13298
|
+
req_body_type='formData',
|
|
13299
|
+
body_type='json'
|
|
13300
|
+
)
|
|
13301
|
+
return TeaCore.from_map(
|
|
13302
|
+
ai_miao_bi_20230801_models.SaveStyleLearningResultResponse(),
|
|
13303
|
+
await self.call_api_async(params, req, runtime)
|
|
13304
|
+
)
|
|
13305
|
+
|
|
13306
|
+
def save_style_learning_result(
|
|
13307
|
+
self,
|
|
13308
|
+
request: ai_miao_bi_20230801_models.SaveStyleLearningResultRequest,
|
|
13309
|
+
) -> ai_miao_bi_20230801_models.SaveStyleLearningResultResponse:
|
|
13310
|
+
"""
|
|
13311
|
+
@summary 保存自定义文体
|
|
13312
|
+
|
|
13313
|
+
@param request: SaveStyleLearningResultRequest
|
|
13314
|
+
@return: SaveStyleLearningResultResponse
|
|
13315
|
+
"""
|
|
13316
|
+
runtime = util_models.RuntimeOptions()
|
|
13317
|
+
return self.save_style_learning_result_with_options(request, runtime)
|
|
13318
|
+
|
|
13319
|
+
async def save_style_learning_result_async(
|
|
13320
|
+
self,
|
|
13321
|
+
request: ai_miao_bi_20230801_models.SaveStyleLearningResultRequest,
|
|
13322
|
+
) -> ai_miao_bi_20230801_models.SaveStyleLearningResultResponse:
|
|
13323
|
+
"""
|
|
13324
|
+
@summary 保存自定义文体
|
|
13325
|
+
|
|
13326
|
+
@param request: SaveStyleLearningResultRequest
|
|
13327
|
+
@return: SaveStyleLearningResultResponse
|
|
13328
|
+
"""
|
|
13329
|
+
runtime = util_models.RuntimeOptions()
|
|
13330
|
+
return await self.save_style_learning_result_with_options_async(request, runtime)
|
|
13331
|
+
|
|
13192
13332
|
def search_dataset_documents_with_options(
|
|
13193
13333
|
self,
|
|
13194
13334
|
request: ai_miao_bi_20230801_models.SearchDatasetDocumentsRequest,
|
|
@@ -1178,6 +1178,7 @@ class CreateDatasetRequest(TeaModel):
|
|
|
1178
1178
|
dataset_name: str = None,
|
|
1179
1179
|
dataset_type: str = None,
|
|
1180
1180
|
document_handle_config: CreateDatasetRequestDocumentHandleConfig = None,
|
|
1181
|
+
invoke_type: str = None,
|
|
1181
1182
|
search_dataset_enable: int = None,
|
|
1182
1183
|
workspace_id: str = None,
|
|
1183
1184
|
):
|
|
@@ -1187,6 +1188,7 @@ class CreateDatasetRequest(TeaModel):
|
|
|
1187
1188
|
self.dataset_name = dataset_name
|
|
1188
1189
|
self.dataset_type = dataset_type
|
|
1189
1190
|
self.document_handle_config = document_handle_config
|
|
1191
|
+
self.invoke_type = invoke_type
|
|
1190
1192
|
self.search_dataset_enable = search_dataset_enable
|
|
1191
1193
|
# This parameter is required.
|
|
1192
1194
|
self.workspace_id = workspace_id
|
|
@@ -1213,6 +1215,8 @@ class CreateDatasetRequest(TeaModel):
|
|
|
1213
1215
|
result['DatasetType'] = self.dataset_type
|
|
1214
1216
|
if self.document_handle_config is not None:
|
|
1215
1217
|
result['DocumentHandleConfig'] = self.document_handle_config.to_map()
|
|
1218
|
+
if self.invoke_type is not None:
|
|
1219
|
+
result['InvokeType'] = self.invoke_type
|
|
1216
1220
|
if self.search_dataset_enable is not None:
|
|
1217
1221
|
result['SearchDatasetEnable'] = self.search_dataset_enable
|
|
1218
1222
|
if self.workspace_id is not None:
|
|
@@ -1233,6 +1237,8 @@ class CreateDatasetRequest(TeaModel):
|
|
|
1233
1237
|
if m.get('DocumentHandleConfig') is not None:
|
|
1234
1238
|
temp_model = CreateDatasetRequestDocumentHandleConfig()
|
|
1235
1239
|
self.document_handle_config = temp_model.from_map(m['DocumentHandleConfig'])
|
|
1240
|
+
if m.get('InvokeType') is not None:
|
|
1241
|
+
self.invoke_type = m.get('InvokeType')
|
|
1236
1242
|
if m.get('SearchDatasetEnable') is not None:
|
|
1237
1243
|
self.search_dataset_enable = m.get('SearchDatasetEnable')
|
|
1238
1244
|
if m.get('WorkspaceId') is not None:
|
|
@@ -1248,6 +1254,7 @@ class CreateDatasetShrinkRequest(TeaModel):
|
|
|
1248
1254
|
dataset_name: str = None,
|
|
1249
1255
|
dataset_type: str = None,
|
|
1250
1256
|
document_handle_config_shrink: str = None,
|
|
1257
|
+
invoke_type: str = None,
|
|
1251
1258
|
search_dataset_enable: int = None,
|
|
1252
1259
|
workspace_id: str = None,
|
|
1253
1260
|
):
|
|
@@ -1257,6 +1264,7 @@ class CreateDatasetShrinkRequest(TeaModel):
|
|
|
1257
1264
|
self.dataset_name = dataset_name
|
|
1258
1265
|
self.dataset_type = dataset_type
|
|
1259
1266
|
self.document_handle_config_shrink = document_handle_config_shrink
|
|
1267
|
+
self.invoke_type = invoke_type
|
|
1260
1268
|
self.search_dataset_enable = search_dataset_enable
|
|
1261
1269
|
# This parameter is required.
|
|
1262
1270
|
self.workspace_id = workspace_id
|
|
@@ -1280,6 +1288,8 @@ class CreateDatasetShrinkRequest(TeaModel):
|
|
|
1280
1288
|
result['DatasetType'] = self.dataset_type
|
|
1281
1289
|
if self.document_handle_config_shrink is not None:
|
|
1282
1290
|
result['DocumentHandleConfig'] = self.document_handle_config_shrink
|
|
1291
|
+
if self.invoke_type is not None:
|
|
1292
|
+
result['InvokeType'] = self.invoke_type
|
|
1283
1293
|
if self.search_dataset_enable is not None:
|
|
1284
1294
|
result['SearchDatasetEnable'] = self.search_dataset_enable
|
|
1285
1295
|
if self.workspace_id is not None:
|
|
@@ -1298,6 +1308,8 @@ class CreateDatasetShrinkRequest(TeaModel):
|
|
|
1298
1308
|
self.dataset_type = m.get('DatasetType')
|
|
1299
1309
|
if m.get('DocumentHandleConfig') is not None:
|
|
1300
1310
|
self.document_handle_config_shrink = m.get('DocumentHandleConfig')
|
|
1311
|
+
if m.get('InvokeType') is not None:
|
|
1312
|
+
self.invoke_type = m.get('InvokeType')
|
|
1301
1313
|
if m.get('SearchDatasetEnable') is not None:
|
|
1302
1314
|
self.search_dataset_enable = m.get('SearchDatasetEnable')
|
|
1303
1315
|
if m.get('WorkspaceId') is not None:
|
|
@@ -6545,11 +6557,13 @@ class GetDataSourceOrderConfigResponseBodyDataUserConfigDataSourceList(TeaModel)
|
|
|
6545
6557
|
def __init__(
|
|
6546
6558
|
self,
|
|
6547
6559
|
code: str = None,
|
|
6560
|
+
enable: bool = None,
|
|
6548
6561
|
name: str = None,
|
|
6549
6562
|
number: int = None,
|
|
6550
6563
|
type: str = None,
|
|
6551
6564
|
):
|
|
6552
6565
|
self.code = code
|
|
6566
|
+
self.enable = enable
|
|
6553
6567
|
self.name = name
|
|
6554
6568
|
self.number = number
|
|
6555
6569
|
self.type = type
|
|
@@ -6565,6 +6579,8 @@ class GetDataSourceOrderConfigResponseBodyDataUserConfigDataSourceList(TeaModel)
|
|
|
6565
6579
|
result = dict()
|
|
6566
6580
|
if self.code is not None:
|
|
6567
6581
|
result['Code'] = self.code
|
|
6582
|
+
if self.enable is not None:
|
|
6583
|
+
result['Enable'] = self.enable
|
|
6568
6584
|
if self.name is not None:
|
|
6569
6585
|
result['Name'] = self.name
|
|
6570
6586
|
if self.number is not None:
|
|
@@ -6577,6 +6593,8 @@ class GetDataSourceOrderConfigResponseBodyDataUserConfigDataSourceList(TeaModel)
|
|
|
6577
6593
|
m = m or dict()
|
|
6578
6594
|
if m.get('Code') is not None:
|
|
6579
6595
|
self.code = m.get('Code')
|
|
6596
|
+
if m.get('Enable') is not None:
|
|
6597
|
+
self.enable = m.get('Enable')
|
|
6580
6598
|
if m.get('Name') is not None:
|
|
6581
6599
|
self.name = m.get('Name')
|
|
6582
6600
|
if m.get('Number') is not None:
|
|
@@ -6589,8 +6607,10 @@ class GetDataSourceOrderConfigResponseBodyDataUserConfigDataSourceList(TeaModel)
|
|
|
6589
6607
|
class GetDataSourceOrderConfigResponseBodyData(TeaModel):
|
|
6590
6608
|
def __init__(
|
|
6591
6609
|
self,
|
|
6610
|
+
total_doc_size: int = None,
|
|
6592
6611
|
user_config_data_source_list: List[GetDataSourceOrderConfigResponseBodyDataUserConfigDataSourceList] = None,
|
|
6593
6612
|
):
|
|
6613
|
+
self.total_doc_size = total_doc_size
|
|
6594
6614
|
self.user_config_data_source_list = user_config_data_source_list
|
|
6595
6615
|
|
|
6596
6616
|
def validate(self):
|
|
@@ -6605,6 +6625,8 @@ class GetDataSourceOrderConfigResponseBodyData(TeaModel):
|
|
|
6605
6625
|
return _map
|
|
6606
6626
|
|
|
6607
6627
|
result = dict()
|
|
6628
|
+
if self.total_doc_size is not None:
|
|
6629
|
+
result['TotalDocSize'] = self.total_doc_size
|
|
6608
6630
|
result['UserConfigDataSourceList'] = []
|
|
6609
6631
|
if self.user_config_data_source_list is not None:
|
|
6610
6632
|
for k in self.user_config_data_source_list:
|
|
@@ -6613,6 +6635,8 @@ class GetDataSourceOrderConfigResponseBodyData(TeaModel):
|
|
|
6613
6635
|
|
|
6614
6636
|
def from_map(self, m: dict = None):
|
|
6615
6637
|
m = m or dict()
|
|
6638
|
+
if m.get('TotalDocSize') is not None:
|
|
6639
|
+
self.total_doc_size = m.get('TotalDocSize')
|
|
6616
6640
|
self.user_config_data_source_list = []
|
|
6617
6641
|
if m.get('UserConfigDataSourceList') is not None:
|
|
6618
6642
|
for k in m.get('UserConfigDataSourceList'):
|
|
@@ -31020,6 +31044,7 @@ class RunSearchGenerationRequest(TeaModel):
|
|
|
31020
31044
|
self,
|
|
31021
31045
|
agent_context: RunSearchGenerationRequestAgentContext = None,
|
|
31022
31046
|
chat_config: RunSearchGenerationRequestChatConfig = None,
|
|
31047
|
+
model_id: str = None,
|
|
31023
31048
|
original_session_id: str = None,
|
|
31024
31049
|
prompt: str = None,
|
|
31025
31050
|
task_id: str = None,
|
|
@@ -31027,6 +31052,7 @@ class RunSearchGenerationRequest(TeaModel):
|
|
|
31027
31052
|
):
|
|
31028
31053
|
self.agent_context = agent_context
|
|
31029
31054
|
self.chat_config = chat_config
|
|
31055
|
+
self.model_id = model_id
|
|
31030
31056
|
self.original_session_id = original_session_id
|
|
31031
31057
|
self.prompt = prompt
|
|
31032
31058
|
self.task_id = task_id
|
|
@@ -31049,6 +31075,8 @@ class RunSearchGenerationRequest(TeaModel):
|
|
|
31049
31075
|
result['AgentContext'] = self.agent_context.to_map()
|
|
31050
31076
|
if self.chat_config is not None:
|
|
31051
31077
|
result['ChatConfig'] = self.chat_config.to_map()
|
|
31078
|
+
if self.model_id is not None:
|
|
31079
|
+
result['ModelId'] = self.model_id
|
|
31052
31080
|
if self.original_session_id is not None:
|
|
31053
31081
|
result['OriginalSessionId'] = self.original_session_id
|
|
31054
31082
|
if self.prompt is not None:
|
|
@@ -31067,6 +31095,8 @@ class RunSearchGenerationRequest(TeaModel):
|
|
|
31067
31095
|
if m.get('ChatConfig') is not None:
|
|
31068
31096
|
temp_model = RunSearchGenerationRequestChatConfig()
|
|
31069
31097
|
self.chat_config = temp_model.from_map(m['ChatConfig'])
|
|
31098
|
+
if m.get('ModelId') is not None:
|
|
31099
|
+
self.model_id = m.get('ModelId')
|
|
31070
31100
|
if m.get('OriginalSessionId') is not None:
|
|
31071
31101
|
self.original_session_id = m.get('OriginalSessionId')
|
|
31072
31102
|
if m.get('Prompt') is not None:
|
|
@@ -31083,6 +31113,7 @@ class RunSearchGenerationShrinkRequest(TeaModel):
|
|
|
31083
31113
|
self,
|
|
31084
31114
|
agent_context_shrink: str = None,
|
|
31085
31115
|
chat_config_shrink: str = None,
|
|
31116
|
+
model_id: str = None,
|
|
31086
31117
|
original_session_id: str = None,
|
|
31087
31118
|
prompt: str = None,
|
|
31088
31119
|
task_id: str = None,
|
|
@@ -31090,6 +31121,7 @@ class RunSearchGenerationShrinkRequest(TeaModel):
|
|
|
31090
31121
|
):
|
|
31091
31122
|
self.agent_context_shrink = agent_context_shrink
|
|
31092
31123
|
self.chat_config_shrink = chat_config_shrink
|
|
31124
|
+
self.model_id = model_id
|
|
31093
31125
|
self.original_session_id = original_session_id
|
|
31094
31126
|
self.prompt = prompt
|
|
31095
31127
|
self.task_id = task_id
|
|
@@ -31109,6 +31141,8 @@ class RunSearchGenerationShrinkRequest(TeaModel):
|
|
|
31109
31141
|
result['AgentContext'] = self.agent_context_shrink
|
|
31110
31142
|
if self.chat_config_shrink is not None:
|
|
31111
31143
|
result['ChatConfig'] = self.chat_config_shrink
|
|
31144
|
+
if self.model_id is not None:
|
|
31145
|
+
result['ModelId'] = self.model_id
|
|
31112
31146
|
if self.original_session_id is not None:
|
|
31113
31147
|
result['OriginalSessionId'] = self.original_session_id
|
|
31114
31148
|
if self.prompt is not None:
|
|
@@ -31125,6 +31159,8 @@ class RunSearchGenerationShrinkRequest(TeaModel):
|
|
|
31125
31159
|
self.agent_context_shrink = m.get('AgentContext')
|
|
31126
31160
|
if m.get('ChatConfig') is not None:
|
|
31127
31161
|
self.chat_config_shrink = m.get('ChatConfig')
|
|
31162
|
+
if m.get('ModelId') is not None:
|
|
31163
|
+
self.model_id = m.get('ModelId')
|
|
31128
31164
|
if m.get('OriginalSessionId') is not None:
|
|
31129
31165
|
self.original_session_id = m.get('OriginalSessionId')
|
|
31130
31166
|
if m.get('Prompt') is not None:
|
|
@@ -32195,11 +32231,13 @@ class RunSearchGenerationResponseBodyPayloadOutputAgentContextBizContextGenerate
|
|
|
32195
32231
|
self,
|
|
32196
32232
|
generate_finished: bool = None,
|
|
32197
32233
|
generate_level: str = None,
|
|
32234
|
+
reason_text_generate: str = None,
|
|
32198
32235
|
search_result: List[RunSearchGenerationResponseBodyPayloadOutputAgentContextBizContextGeneratedContentExcerptResultSearchResult] = None,
|
|
32199
32236
|
text_generate: str = None,
|
|
32200
32237
|
):
|
|
32201
32238
|
self.generate_finished = generate_finished
|
|
32202
32239
|
self.generate_level = generate_level
|
|
32240
|
+
self.reason_text_generate = reason_text_generate
|
|
32203
32241
|
self.search_result = search_result
|
|
32204
32242
|
self.text_generate = text_generate
|
|
32205
32243
|
|
|
@@ -32219,6 +32257,8 @@ class RunSearchGenerationResponseBodyPayloadOutputAgentContextBizContextGenerate
|
|
|
32219
32257
|
result['GenerateFinished'] = self.generate_finished
|
|
32220
32258
|
if self.generate_level is not None:
|
|
32221
32259
|
result['GenerateLevel'] = self.generate_level
|
|
32260
|
+
if self.reason_text_generate is not None:
|
|
32261
|
+
result['ReasonTextGenerate'] = self.reason_text_generate
|
|
32222
32262
|
result['SearchResult'] = []
|
|
32223
32263
|
if self.search_result is not None:
|
|
32224
32264
|
for k in self.search_result:
|
|
@@ -32233,6 +32273,8 @@ class RunSearchGenerationResponseBodyPayloadOutputAgentContextBizContextGenerate
|
|
|
32233
32273
|
self.generate_finished = m.get('GenerateFinished')
|
|
32234
32274
|
if m.get('GenerateLevel') is not None:
|
|
32235
32275
|
self.generate_level = m.get('GenerateLevel')
|
|
32276
|
+
if m.get('ReasonTextGenerate') is not None:
|
|
32277
|
+
self.reason_text_generate = m.get('ReasonTextGenerate')
|
|
32236
32278
|
self.search_result = []
|
|
32237
32279
|
if m.get('SearchResult') is not None:
|
|
32238
32280
|
for k in m.get('SearchResult'):
|
|
@@ -33319,6 +33361,7 @@ class RunSearchGenerationResponseBodyPayloadOutputAgentContextBizContextGenerate
|
|
|
33319
33361
|
generate_level: str = None,
|
|
33320
33362
|
generate_traceability: RunSearchGenerationResponseBodyPayloadOutputAgentContextBizContextGeneratedContentTextGenerateResultGenerateTraceability = None,
|
|
33321
33363
|
multimodal_search_result_list: List[RunSearchGenerationResponseBodyPayloadOutputAgentContextBizContextGeneratedContentTextGenerateResultMultimodalSearchResultList] = None,
|
|
33364
|
+
reason_text_generate: str = None,
|
|
33322
33365
|
reference_list: List[RunSearchGenerationResponseBodyPayloadOutputAgentContextBizContextGeneratedContentTextGenerateResultReferenceList] = None,
|
|
33323
33366
|
text_generate: str = None,
|
|
33324
33367
|
text_generate_multimodal_media_list: List[RunSearchGenerationResponseBodyPayloadOutputAgentContextBizContextGeneratedContentTextGenerateResultTextGenerateMultimodalMediaList] = None,
|
|
@@ -33327,6 +33370,7 @@ class RunSearchGenerationResponseBodyPayloadOutputAgentContextBizContextGenerate
|
|
|
33327
33370
|
self.generate_level = generate_level
|
|
33328
33371
|
self.generate_traceability = generate_traceability
|
|
33329
33372
|
self.multimodal_search_result_list = multimodal_search_result_list
|
|
33373
|
+
self.reason_text_generate = reason_text_generate
|
|
33330
33374
|
self.reference_list = reference_list
|
|
33331
33375
|
self.text_generate = text_generate
|
|
33332
33376
|
self.text_generate_multimodal_media_list = text_generate_multimodal_media_list
|
|
@@ -33363,6 +33407,8 @@ class RunSearchGenerationResponseBodyPayloadOutputAgentContextBizContextGenerate
|
|
|
33363
33407
|
if self.multimodal_search_result_list is not None:
|
|
33364
33408
|
for k in self.multimodal_search_result_list:
|
|
33365
33409
|
result['MultimodalSearchResultList'].append(k.to_map() if k else None)
|
|
33410
|
+
if self.reason_text_generate is not None:
|
|
33411
|
+
result['ReasonTextGenerate'] = self.reason_text_generate
|
|
33366
33412
|
result['ReferenceList'] = []
|
|
33367
33413
|
if self.reference_list is not None:
|
|
33368
33414
|
for k in self.reference_list:
|
|
@@ -33389,6 +33435,8 @@ class RunSearchGenerationResponseBodyPayloadOutputAgentContextBizContextGenerate
|
|
|
33389
33435
|
for k in m.get('MultimodalSearchResultList'):
|
|
33390
33436
|
temp_model = RunSearchGenerationResponseBodyPayloadOutputAgentContextBizContextGeneratedContentTextGenerateResultMultimodalSearchResultList()
|
|
33391
33437
|
self.multimodal_search_result_list.append(temp_model.from_map(k))
|
|
33438
|
+
if m.get('ReasonTextGenerate') is not None:
|
|
33439
|
+
self.reason_text_generate = m.get('ReasonTextGenerate')
|
|
33392
33440
|
self.reference_list = []
|
|
33393
33441
|
if m.get('ReferenceList') is not None:
|
|
33394
33442
|
for k in m.get('ReferenceList'):
|
|
@@ -34174,6 +34222,7 @@ class RunSearchGenerationResponseBodyPayloadOutputAgentContextBizContextGenerate
|
|
|
34174
34222
|
generate_finished: bool = None,
|
|
34175
34223
|
generate_traceability: RunSearchGenerationResponseBodyPayloadOutputAgentContextBizContextGeneratedContentTimelineResultGenerateTraceability = None,
|
|
34176
34224
|
multimodal_search_result_list: List[RunSearchGenerationResponseBodyPayloadOutputAgentContextBizContextGeneratedContentTimelineResultMultimodalSearchResultList] = None,
|
|
34225
|
+
reason_text_generate: str = None,
|
|
34177
34226
|
reference_list: List[RunSearchGenerationResponseBodyPayloadOutputAgentContextBizContextGeneratedContentTimelineResultReferenceList] = None,
|
|
34178
34227
|
text_generate: str = None,
|
|
34179
34228
|
text_generate_multimodal_media_list: List[RunSearchGenerationResponseBodyPayloadOutputAgentContextBizContextGeneratedContentTimelineResultTextGenerateMultimodalMediaList] = None,
|
|
@@ -34181,6 +34230,7 @@ class RunSearchGenerationResponseBodyPayloadOutputAgentContextBizContextGenerate
|
|
|
34181
34230
|
self.generate_finished = generate_finished
|
|
34182
34231
|
self.generate_traceability = generate_traceability
|
|
34183
34232
|
self.multimodal_search_result_list = multimodal_search_result_list
|
|
34233
|
+
self.reason_text_generate = reason_text_generate
|
|
34184
34234
|
self.reference_list = reference_list
|
|
34185
34235
|
self.text_generate = text_generate
|
|
34186
34236
|
self.text_generate_multimodal_media_list = text_generate_multimodal_media_list
|
|
@@ -34215,6 +34265,8 @@ class RunSearchGenerationResponseBodyPayloadOutputAgentContextBizContextGenerate
|
|
|
34215
34265
|
if self.multimodal_search_result_list is not None:
|
|
34216
34266
|
for k in self.multimodal_search_result_list:
|
|
34217
34267
|
result['MultimodalSearchResultList'].append(k.to_map() if k else None)
|
|
34268
|
+
if self.reason_text_generate is not None:
|
|
34269
|
+
result['ReasonTextGenerate'] = self.reason_text_generate
|
|
34218
34270
|
result['ReferenceList'] = []
|
|
34219
34271
|
if self.reference_list is not None:
|
|
34220
34272
|
for k in self.reference_list:
|
|
@@ -34239,6 +34291,8 @@ class RunSearchGenerationResponseBodyPayloadOutputAgentContextBizContextGenerate
|
|
|
34239
34291
|
for k in m.get('MultimodalSearchResultList'):
|
|
34240
34292
|
temp_model = RunSearchGenerationResponseBodyPayloadOutputAgentContextBizContextGeneratedContentTimelineResultMultimodalSearchResultList()
|
|
34241
34293
|
self.multimodal_search_result_list.append(temp_model.from_map(k))
|
|
34294
|
+
if m.get('ReasonTextGenerate') is not None:
|
|
34295
|
+
self.reason_text_generate = m.get('ReasonTextGenerate')
|
|
34242
34296
|
self.reference_list = []
|
|
34243
34297
|
if m.get('ReferenceList') is not None:
|
|
34244
34298
|
for k in m.get('ReferenceList'):
|
|
@@ -34545,30 +34599,87 @@ class RunSearchGenerationResponseBodyPayloadOutputAgentContextBizContextGenerate
|
|
|
34545
34599
|
return self
|
|
34546
34600
|
|
|
34547
34601
|
|
|
34602
|
+
class RunSearchGenerationResponseBodyPayloadOutputAgentContextBizContextTokenCalculate(TeaModel):
|
|
34603
|
+
def __init__(
|
|
34604
|
+
self,
|
|
34605
|
+
first_token_time: float = None,
|
|
34606
|
+
output_avg_time: float = None,
|
|
34607
|
+
search_time: float = None,
|
|
34608
|
+
time: float = None,
|
|
34609
|
+
total_tokens: int = None,
|
|
34610
|
+
):
|
|
34611
|
+
self.first_token_time = first_token_time
|
|
34612
|
+
self.output_avg_time = output_avg_time
|
|
34613
|
+
self.search_time = search_time
|
|
34614
|
+
self.time = time
|
|
34615
|
+
self.total_tokens = total_tokens
|
|
34616
|
+
|
|
34617
|
+
def validate(self):
|
|
34618
|
+
pass
|
|
34619
|
+
|
|
34620
|
+
def to_map(self):
|
|
34621
|
+
_map = super().to_map()
|
|
34622
|
+
if _map is not None:
|
|
34623
|
+
return _map
|
|
34624
|
+
|
|
34625
|
+
result = dict()
|
|
34626
|
+
if self.first_token_time is not None:
|
|
34627
|
+
result['FirstTokenTime'] = self.first_token_time
|
|
34628
|
+
if self.output_avg_time is not None:
|
|
34629
|
+
result['OutputAvgTime'] = self.output_avg_time
|
|
34630
|
+
if self.search_time is not None:
|
|
34631
|
+
result['SearchTime'] = self.search_time
|
|
34632
|
+
if self.time is not None:
|
|
34633
|
+
result['Time'] = self.time
|
|
34634
|
+
if self.total_tokens is not None:
|
|
34635
|
+
result['TotalTokens'] = self.total_tokens
|
|
34636
|
+
return result
|
|
34637
|
+
|
|
34638
|
+
def from_map(self, m: dict = None):
|
|
34639
|
+
m = m or dict()
|
|
34640
|
+
if m.get('FirstTokenTime') is not None:
|
|
34641
|
+
self.first_token_time = m.get('FirstTokenTime')
|
|
34642
|
+
if m.get('OutputAvgTime') is not None:
|
|
34643
|
+
self.output_avg_time = m.get('OutputAvgTime')
|
|
34644
|
+
if m.get('SearchTime') is not None:
|
|
34645
|
+
self.search_time = m.get('SearchTime')
|
|
34646
|
+
if m.get('Time') is not None:
|
|
34647
|
+
self.time = m.get('Time')
|
|
34648
|
+
if m.get('TotalTokens') is not None:
|
|
34649
|
+
self.total_tokens = m.get('TotalTokens')
|
|
34650
|
+
return self
|
|
34651
|
+
|
|
34652
|
+
|
|
34548
34653
|
class RunSearchGenerationResponseBodyPayloadOutputAgentContextBizContext(TeaModel):
|
|
34549
34654
|
def __init__(
|
|
34550
34655
|
self,
|
|
34551
34656
|
current_step: str = None,
|
|
34552
34657
|
generated_content: RunSearchGenerationResponseBodyPayloadOutputAgentContextBizContextGeneratedContent = None,
|
|
34658
|
+
model_id: str = None,
|
|
34553
34659
|
next_step: str = None,
|
|
34554
34660
|
recommend_search_query_list: List[str] = None,
|
|
34555
34661
|
search_keywords: List[str] = None,
|
|
34556
34662
|
search_query_list: List[str] = None,
|
|
34557
34663
|
supplement_data_type: str = None,
|
|
34558
34664
|
supplement_enable: bool = None,
|
|
34665
|
+
token_calculate: RunSearchGenerationResponseBodyPayloadOutputAgentContextBizContextTokenCalculate = None,
|
|
34559
34666
|
):
|
|
34560
34667
|
self.current_step = current_step
|
|
34561
34668
|
self.generated_content = generated_content
|
|
34669
|
+
self.model_id = model_id
|
|
34562
34670
|
self.next_step = next_step
|
|
34563
34671
|
self.recommend_search_query_list = recommend_search_query_list
|
|
34564
34672
|
self.search_keywords = search_keywords
|
|
34565
34673
|
self.search_query_list = search_query_list
|
|
34566
34674
|
self.supplement_data_type = supplement_data_type
|
|
34567
34675
|
self.supplement_enable = supplement_enable
|
|
34676
|
+
self.token_calculate = token_calculate
|
|
34568
34677
|
|
|
34569
34678
|
def validate(self):
|
|
34570
34679
|
if self.generated_content:
|
|
34571
34680
|
self.generated_content.validate()
|
|
34681
|
+
if self.token_calculate:
|
|
34682
|
+
self.token_calculate.validate()
|
|
34572
34683
|
|
|
34573
34684
|
def to_map(self):
|
|
34574
34685
|
_map = super().to_map()
|
|
@@ -34580,6 +34691,8 @@ class RunSearchGenerationResponseBodyPayloadOutputAgentContextBizContext(TeaMode
|
|
|
34580
34691
|
result['CurrentStep'] = self.current_step
|
|
34581
34692
|
if self.generated_content is not None:
|
|
34582
34693
|
result['GeneratedContent'] = self.generated_content.to_map()
|
|
34694
|
+
if self.model_id is not None:
|
|
34695
|
+
result['ModelId'] = self.model_id
|
|
34583
34696
|
if self.next_step is not None:
|
|
34584
34697
|
result['NextStep'] = self.next_step
|
|
34585
34698
|
if self.recommend_search_query_list is not None:
|
|
@@ -34592,6 +34705,8 @@ class RunSearchGenerationResponseBodyPayloadOutputAgentContextBizContext(TeaMode
|
|
|
34592
34705
|
result['SupplementDataType'] = self.supplement_data_type
|
|
34593
34706
|
if self.supplement_enable is not None:
|
|
34594
34707
|
result['SupplementEnable'] = self.supplement_enable
|
|
34708
|
+
if self.token_calculate is not None:
|
|
34709
|
+
result['TokenCalculate'] = self.token_calculate.to_map()
|
|
34595
34710
|
return result
|
|
34596
34711
|
|
|
34597
34712
|
def from_map(self, m: dict = None):
|
|
@@ -34601,6 +34716,8 @@ class RunSearchGenerationResponseBodyPayloadOutputAgentContextBizContext(TeaMode
|
|
|
34601
34716
|
if m.get('GeneratedContent') is not None:
|
|
34602
34717
|
temp_model = RunSearchGenerationResponseBodyPayloadOutputAgentContextBizContextGeneratedContent()
|
|
34603
34718
|
self.generated_content = temp_model.from_map(m['GeneratedContent'])
|
|
34719
|
+
if m.get('ModelId') is not None:
|
|
34720
|
+
self.model_id = m.get('ModelId')
|
|
34604
34721
|
if m.get('NextStep') is not None:
|
|
34605
34722
|
self.next_step = m.get('NextStep')
|
|
34606
34723
|
if m.get('RecommendSearchQueryList') is not None:
|
|
@@ -34613,6 +34730,9 @@ class RunSearchGenerationResponseBodyPayloadOutputAgentContextBizContext(TeaMode
|
|
|
34613
34730
|
self.supplement_data_type = m.get('SupplementDataType')
|
|
34614
34731
|
if m.get('SupplementEnable') is not None:
|
|
34615
34732
|
self.supplement_enable = m.get('SupplementEnable')
|
|
34733
|
+
if m.get('TokenCalculate') is not None:
|
|
34734
|
+
temp_model = RunSearchGenerationResponseBodyPayloadOutputAgentContextBizContextTokenCalculate()
|
|
34735
|
+
self.token_calculate = temp_model.from_map(m['TokenCalculate'])
|
|
34616
34736
|
return self
|
|
34617
34737
|
|
|
34618
34738
|
|
|
@@ -39829,6 +39949,230 @@ class SaveMaterialDocumentResponse(TeaModel):
|
|
|
39829
39949
|
return self
|
|
39830
39950
|
|
|
39831
39951
|
|
|
39952
|
+
class SaveStyleLearningResultRequest(TeaModel):
|
|
39953
|
+
def __init__(
|
|
39954
|
+
self,
|
|
39955
|
+
agent_key: str = None,
|
|
39956
|
+
aigc_result: str = None,
|
|
39957
|
+
custom_text_id_list: List[int] = None,
|
|
39958
|
+
material_id_list: List[int] = None,
|
|
39959
|
+
rewrite_result: str = None,
|
|
39960
|
+
style_name: str = None,
|
|
39961
|
+
task_id: str = None,
|
|
39962
|
+
):
|
|
39963
|
+
self.agent_key = agent_key
|
|
39964
|
+
self.aigc_result = aigc_result
|
|
39965
|
+
self.custom_text_id_list = custom_text_id_list
|
|
39966
|
+
self.material_id_list = material_id_list
|
|
39967
|
+
self.rewrite_result = rewrite_result
|
|
39968
|
+
self.style_name = style_name
|
|
39969
|
+
self.task_id = task_id
|
|
39970
|
+
|
|
39971
|
+
def validate(self):
|
|
39972
|
+
pass
|
|
39973
|
+
|
|
39974
|
+
def to_map(self):
|
|
39975
|
+
_map = super().to_map()
|
|
39976
|
+
if _map is not None:
|
|
39977
|
+
return _map
|
|
39978
|
+
|
|
39979
|
+
result = dict()
|
|
39980
|
+
if self.agent_key is not None:
|
|
39981
|
+
result['AgentKey'] = self.agent_key
|
|
39982
|
+
if self.aigc_result is not None:
|
|
39983
|
+
result['AigcResult'] = self.aigc_result
|
|
39984
|
+
if self.custom_text_id_list is not None:
|
|
39985
|
+
result['CustomTextIdList'] = self.custom_text_id_list
|
|
39986
|
+
if self.material_id_list is not None:
|
|
39987
|
+
result['MaterialIdList'] = self.material_id_list
|
|
39988
|
+
if self.rewrite_result is not None:
|
|
39989
|
+
result['RewriteResult'] = self.rewrite_result
|
|
39990
|
+
if self.style_name is not None:
|
|
39991
|
+
result['StyleName'] = self.style_name
|
|
39992
|
+
if self.task_id is not None:
|
|
39993
|
+
result['TaskId'] = self.task_id
|
|
39994
|
+
return result
|
|
39995
|
+
|
|
39996
|
+
def from_map(self, m: dict = None):
|
|
39997
|
+
m = m or dict()
|
|
39998
|
+
if m.get('AgentKey') is not None:
|
|
39999
|
+
self.agent_key = m.get('AgentKey')
|
|
40000
|
+
if m.get('AigcResult') is not None:
|
|
40001
|
+
self.aigc_result = m.get('AigcResult')
|
|
40002
|
+
if m.get('CustomTextIdList') is not None:
|
|
40003
|
+
self.custom_text_id_list = m.get('CustomTextIdList')
|
|
40004
|
+
if m.get('MaterialIdList') is not None:
|
|
40005
|
+
self.material_id_list = m.get('MaterialIdList')
|
|
40006
|
+
if m.get('RewriteResult') is not None:
|
|
40007
|
+
self.rewrite_result = m.get('RewriteResult')
|
|
40008
|
+
if m.get('StyleName') is not None:
|
|
40009
|
+
self.style_name = m.get('StyleName')
|
|
40010
|
+
if m.get('TaskId') is not None:
|
|
40011
|
+
self.task_id = m.get('TaskId')
|
|
40012
|
+
return self
|
|
40013
|
+
|
|
40014
|
+
|
|
40015
|
+
class SaveStyleLearningResultShrinkRequest(TeaModel):
|
|
40016
|
+
def __init__(
|
|
40017
|
+
self,
|
|
40018
|
+
agent_key: str = None,
|
|
40019
|
+
aigc_result: str = None,
|
|
40020
|
+
custom_text_id_list_shrink: str = None,
|
|
40021
|
+
material_id_list_shrink: str = None,
|
|
40022
|
+
rewrite_result: str = None,
|
|
40023
|
+
style_name: str = None,
|
|
40024
|
+
task_id: str = None,
|
|
40025
|
+
):
|
|
40026
|
+
self.agent_key = agent_key
|
|
40027
|
+
self.aigc_result = aigc_result
|
|
40028
|
+
self.custom_text_id_list_shrink = custom_text_id_list_shrink
|
|
40029
|
+
self.material_id_list_shrink = material_id_list_shrink
|
|
40030
|
+
self.rewrite_result = rewrite_result
|
|
40031
|
+
self.style_name = style_name
|
|
40032
|
+
self.task_id = task_id
|
|
40033
|
+
|
|
40034
|
+
def validate(self):
|
|
40035
|
+
pass
|
|
40036
|
+
|
|
40037
|
+
def to_map(self):
|
|
40038
|
+
_map = super().to_map()
|
|
40039
|
+
if _map is not None:
|
|
40040
|
+
return _map
|
|
40041
|
+
|
|
40042
|
+
result = dict()
|
|
40043
|
+
if self.agent_key is not None:
|
|
40044
|
+
result['AgentKey'] = self.agent_key
|
|
40045
|
+
if self.aigc_result is not None:
|
|
40046
|
+
result['AigcResult'] = self.aigc_result
|
|
40047
|
+
if self.custom_text_id_list_shrink is not None:
|
|
40048
|
+
result['CustomTextIdList'] = self.custom_text_id_list_shrink
|
|
40049
|
+
if self.material_id_list_shrink is not None:
|
|
40050
|
+
result['MaterialIdList'] = self.material_id_list_shrink
|
|
40051
|
+
if self.rewrite_result is not None:
|
|
40052
|
+
result['RewriteResult'] = self.rewrite_result
|
|
40053
|
+
if self.style_name is not None:
|
|
40054
|
+
result['StyleName'] = self.style_name
|
|
40055
|
+
if self.task_id is not None:
|
|
40056
|
+
result['TaskId'] = self.task_id
|
|
40057
|
+
return result
|
|
40058
|
+
|
|
40059
|
+
def from_map(self, m: dict = None):
|
|
40060
|
+
m = m or dict()
|
|
40061
|
+
if m.get('AgentKey') is not None:
|
|
40062
|
+
self.agent_key = m.get('AgentKey')
|
|
40063
|
+
if m.get('AigcResult') is not None:
|
|
40064
|
+
self.aigc_result = m.get('AigcResult')
|
|
40065
|
+
if m.get('CustomTextIdList') is not None:
|
|
40066
|
+
self.custom_text_id_list_shrink = m.get('CustomTextIdList')
|
|
40067
|
+
if m.get('MaterialIdList') is not None:
|
|
40068
|
+
self.material_id_list_shrink = m.get('MaterialIdList')
|
|
40069
|
+
if m.get('RewriteResult') is not None:
|
|
40070
|
+
self.rewrite_result = m.get('RewriteResult')
|
|
40071
|
+
if m.get('StyleName') is not None:
|
|
40072
|
+
self.style_name = m.get('StyleName')
|
|
40073
|
+
if m.get('TaskId') is not None:
|
|
40074
|
+
self.task_id = m.get('TaskId')
|
|
40075
|
+
return self
|
|
40076
|
+
|
|
40077
|
+
|
|
40078
|
+
class SaveStyleLearningResultResponseBody(TeaModel):
|
|
40079
|
+
def __init__(
|
|
40080
|
+
self,
|
|
40081
|
+
code: str = None,
|
|
40082
|
+
data: bool = None,
|
|
40083
|
+
http_status_code: int = None,
|
|
40084
|
+
message: str = None,
|
|
40085
|
+
request_id: str = None,
|
|
40086
|
+
success: bool = None,
|
|
40087
|
+
):
|
|
40088
|
+
self.code = code
|
|
40089
|
+
self.data = data
|
|
40090
|
+
self.http_status_code = http_status_code
|
|
40091
|
+
self.message = message
|
|
40092
|
+
self.request_id = request_id
|
|
40093
|
+
self.success = success
|
|
40094
|
+
|
|
40095
|
+
def validate(self):
|
|
40096
|
+
pass
|
|
40097
|
+
|
|
40098
|
+
def to_map(self):
|
|
40099
|
+
_map = super().to_map()
|
|
40100
|
+
if _map is not None:
|
|
40101
|
+
return _map
|
|
40102
|
+
|
|
40103
|
+
result = dict()
|
|
40104
|
+
if self.code is not None:
|
|
40105
|
+
result['Code'] = self.code
|
|
40106
|
+
if self.data is not None:
|
|
40107
|
+
result['Data'] = self.data
|
|
40108
|
+
if self.http_status_code is not None:
|
|
40109
|
+
result['HttpStatusCode'] = self.http_status_code
|
|
40110
|
+
if self.message is not None:
|
|
40111
|
+
result['Message'] = self.message
|
|
40112
|
+
if self.request_id is not None:
|
|
40113
|
+
result['RequestId'] = self.request_id
|
|
40114
|
+
if self.success is not None:
|
|
40115
|
+
result['Success'] = self.success
|
|
40116
|
+
return result
|
|
40117
|
+
|
|
40118
|
+
def from_map(self, m: dict = None):
|
|
40119
|
+
m = m or dict()
|
|
40120
|
+
if m.get('Code') is not None:
|
|
40121
|
+
self.code = m.get('Code')
|
|
40122
|
+
if m.get('Data') is not None:
|
|
40123
|
+
self.data = m.get('Data')
|
|
40124
|
+
if m.get('HttpStatusCode') is not None:
|
|
40125
|
+
self.http_status_code = m.get('HttpStatusCode')
|
|
40126
|
+
if m.get('Message') is not None:
|
|
40127
|
+
self.message = m.get('Message')
|
|
40128
|
+
if m.get('RequestId') is not None:
|
|
40129
|
+
self.request_id = m.get('RequestId')
|
|
40130
|
+
if m.get('Success') is not None:
|
|
40131
|
+
self.success = m.get('Success')
|
|
40132
|
+
return self
|
|
40133
|
+
|
|
40134
|
+
|
|
40135
|
+
class SaveStyleLearningResultResponse(TeaModel):
|
|
40136
|
+
def __init__(
|
|
40137
|
+
self,
|
|
40138
|
+
headers: Dict[str, str] = None,
|
|
40139
|
+
status_code: int = None,
|
|
40140
|
+
body: SaveStyleLearningResultResponseBody = None,
|
|
40141
|
+
):
|
|
40142
|
+
self.headers = headers
|
|
40143
|
+
self.status_code = status_code
|
|
40144
|
+
self.body = body
|
|
40145
|
+
|
|
40146
|
+
def validate(self):
|
|
40147
|
+
if self.body:
|
|
40148
|
+
self.body.validate()
|
|
40149
|
+
|
|
40150
|
+
def to_map(self):
|
|
40151
|
+
_map = super().to_map()
|
|
40152
|
+
if _map is not None:
|
|
40153
|
+
return _map
|
|
40154
|
+
|
|
40155
|
+
result = dict()
|
|
40156
|
+
if self.headers is not None:
|
|
40157
|
+
result['headers'] = self.headers
|
|
40158
|
+
if self.status_code is not None:
|
|
40159
|
+
result['statusCode'] = self.status_code
|
|
40160
|
+
if self.body is not None:
|
|
40161
|
+
result['body'] = self.body.to_map()
|
|
40162
|
+
return result
|
|
40163
|
+
|
|
40164
|
+
def from_map(self, m: dict = None):
|
|
40165
|
+
m = m or dict()
|
|
40166
|
+
if m.get('headers') is not None:
|
|
40167
|
+
self.headers = m.get('headers')
|
|
40168
|
+
if m.get('statusCode') is not None:
|
|
40169
|
+
self.status_code = m.get('statusCode')
|
|
40170
|
+
if m.get('body') is not None:
|
|
40171
|
+
temp_model = SaveStyleLearningResultResponseBody()
|
|
40172
|
+
self.body = temp_model.from_map(m['body'])
|
|
40173
|
+
return self
|
|
40174
|
+
|
|
40175
|
+
|
|
39832
40176
|
class SearchDatasetDocumentsRequest(TeaModel):
|
|
39833
40177
|
def __init__(
|
|
39834
40178
|
self,
|
|
@@ -24,7 +24,7 @@ from setuptools import setup, find_packages
|
|
|
24
24
|
"""
|
|
25
25
|
setup module for alibabacloud_aimiaobi20230801.
|
|
26
26
|
|
|
27
|
-
Created on
|
|
27
|
+
Created on 07/05/2025
|
|
28
28
|
|
|
29
29
|
@author: Alibaba Cloud SDK
|
|
30
30
|
"""
|
|
@@ -38,7 +38,7 @@ URL = "https://github.com/aliyun/alibabacloud-python-sdk"
|
|
|
38
38
|
VERSION = __import__(PACKAGE).__version__
|
|
39
39
|
REQUIRES = [
|
|
40
40
|
"alibabacloud_tea_util>=0.3.13, <1.0.0",
|
|
41
|
-
"alibabacloud_tea_openapi>=0.3.
|
|
41
|
+
"alibabacloud_tea_openapi>=0.3.15, <1.0.0",
|
|
42
42
|
"alibabacloud_openapi_util>=0.2.2, <1.0.0",
|
|
43
43
|
"alibabacloud_endpoint_util>=0.0.3, <1.0.0"
|
|
44
44
|
]
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
__version__ = '1.20.6'
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|