alibabacloud-aimiaobi20230801 1.30.1__tar.gz → 1.30.3__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {alibabacloud_aimiaobi20230801-1.30.1 → alibabacloud_aimiaobi20230801-1.30.3}/ChangeLog.md +17 -0
- {alibabacloud_aimiaobi20230801-1.30.1 → alibabacloud_aimiaobi20230801-1.30.3}/PKG-INFO +1 -1
- alibabacloud_aimiaobi20230801-1.30.3/alibabacloud_aimiaobi20230801/__init__.py +1 -0
- {alibabacloud_aimiaobi20230801-1.30.1 → alibabacloud_aimiaobi20230801-1.30.3}/alibabacloud_aimiaobi20230801/client.py +48 -0
- {alibabacloud_aimiaobi20230801-1.30.1 → alibabacloud_aimiaobi20230801-1.30.3}/alibabacloud_aimiaobi20230801/models.py +108 -0
- {alibabacloud_aimiaobi20230801-1.30.1 → alibabacloud_aimiaobi20230801-1.30.3}/alibabacloud_aimiaobi20230801.egg-info/PKG-INFO +1 -1
- {alibabacloud_aimiaobi20230801-1.30.1 → alibabacloud_aimiaobi20230801-1.30.3}/setup.py +1 -1
- alibabacloud_aimiaobi20230801-1.30.1/alibabacloud_aimiaobi20230801/__init__.py +0 -1
- {alibabacloud_aimiaobi20230801-1.30.1 → alibabacloud_aimiaobi20230801-1.30.3}/LICENSE +0 -0
- {alibabacloud_aimiaobi20230801-1.30.1 → alibabacloud_aimiaobi20230801-1.30.3}/MANIFEST.in +0 -0
- {alibabacloud_aimiaobi20230801-1.30.1 → alibabacloud_aimiaobi20230801-1.30.3}/README-CN.md +0 -0
- {alibabacloud_aimiaobi20230801-1.30.1 → alibabacloud_aimiaobi20230801-1.30.3}/README.md +0 -0
- {alibabacloud_aimiaobi20230801-1.30.1 → alibabacloud_aimiaobi20230801-1.30.3}/alibabacloud_aimiaobi20230801.egg-info/SOURCES.txt +0 -0
- {alibabacloud_aimiaobi20230801-1.30.1 → alibabacloud_aimiaobi20230801-1.30.3}/alibabacloud_aimiaobi20230801.egg-info/dependency_links.txt +0 -0
- {alibabacloud_aimiaobi20230801-1.30.1 → alibabacloud_aimiaobi20230801-1.30.3}/alibabacloud_aimiaobi20230801.egg-info/requires.txt +0 -0
- {alibabacloud_aimiaobi20230801-1.30.1 → alibabacloud_aimiaobi20230801-1.30.3}/alibabacloud_aimiaobi20230801.egg-info/top_level.txt +0 -0
- {alibabacloud_aimiaobi20230801-1.30.1 → alibabacloud_aimiaobi20230801-1.30.3}/setup.cfg +0 -0
|
@@ -1,3 +1,20 @@
|
|
|
1
|
+
2025-06-25 Version: 1.30.2
|
|
2
|
+
- Update API RunCommentGeneration: add request parameters ModelId.
|
|
3
|
+
- Update API RunDocBrainmap: add request parameters ModelName.
|
|
4
|
+
- Update API RunDocIntroduction: add request parameters ModelName.
|
|
5
|
+
- Update API RunDocQa: add request parameters ModelName.
|
|
6
|
+
- Update API RunDocSmartCard: add request parameters ModelName.
|
|
7
|
+
- Update API RunDocTranslation: add request parameters ModelName.
|
|
8
|
+
- Update API RunDocWashing: add request parameters ModelId.
|
|
9
|
+
- Update API RunGenerateQuestions: add request parameters ModelName.
|
|
10
|
+
- Update API RunHotword: add request parameters ModelName.
|
|
11
|
+
- Update API RunMultiDocIntroduction: add request parameters ModelName.
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
2025-06-23 Version: 1.30.1
|
|
15
|
+
- Update API RunSearchGeneration: add request parameters ChatConfig.ExcludeGenerateOptions.
|
|
16
|
+
|
|
17
|
+
|
|
1
18
|
2025-06-18 Version: 1.30.0
|
|
2
19
|
- Support API AsyncCreateClipsTask.
|
|
3
20
|
- Support API AsyncCreateClipsTimeLine.
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
__version__ = '1.30.3'
|
|
@@ -11791,6 +11791,8 @@ class Client(OpenApiClient):
|
|
|
11791
11791
|
body['Length'] = request.length
|
|
11792
11792
|
if not UtilClient.is_unset(request.length_range_shrink):
|
|
11793
11793
|
body['LengthRange'] = request.length_range_shrink
|
|
11794
|
+
if not UtilClient.is_unset(request.model_id):
|
|
11795
|
+
body['ModelId'] = request.model_id
|
|
11794
11796
|
if not UtilClient.is_unset(request.num_comments):
|
|
11795
11797
|
body['NumComments'] = request.num_comments
|
|
11796
11798
|
if not UtilClient.is_unset(request.sentiment_shrink):
|
|
@@ -11854,6 +11856,8 @@ class Client(OpenApiClient):
|
|
|
11854
11856
|
body['Length'] = request.length
|
|
11855
11857
|
if not UtilClient.is_unset(request.length_range_shrink):
|
|
11856
11858
|
body['LengthRange'] = request.length_range_shrink
|
|
11859
|
+
if not UtilClient.is_unset(request.model_id):
|
|
11860
|
+
body['ModelId'] = request.model_id
|
|
11857
11861
|
if not UtilClient.is_unset(request.num_comments):
|
|
11858
11862
|
body['NumComments'] = request.num_comments
|
|
11859
11863
|
if not UtilClient.is_unset(request.sentiment_shrink):
|
|
@@ -12283,6 +12287,8 @@ class Client(OpenApiClient):
|
|
|
12283
12287
|
body['CleanCache'] = request.clean_cache
|
|
12284
12288
|
if not UtilClient.is_unset(request.doc_id):
|
|
12285
12289
|
body['DocId'] = request.doc_id
|
|
12290
|
+
if not UtilClient.is_unset(request.model_name):
|
|
12291
|
+
body['ModelName'] = request.model_name
|
|
12286
12292
|
if not UtilClient.is_unset(request.node_number):
|
|
12287
12293
|
body['NodeNumber'] = request.node_number
|
|
12288
12294
|
if not UtilClient.is_unset(request.prompt):
|
|
@@ -12332,6 +12338,8 @@ class Client(OpenApiClient):
|
|
|
12332
12338
|
body['CleanCache'] = request.clean_cache
|
|
12333
12339
|
if not UtilClient.is_unset(request.doc_id):
|
|
12334
12340
|
body['DocId'] = request.doc_id
|
|
12341
|
+
if not UtilClient.is_unset(request.model_name):
|
|
12342
|
+
body['ModelName'] = request.model_name
|
|
12335
12343
|
if not UtilClient.is_unset(request.node_number):
|
|
12336
12344
|
body['NodeNumber'] = request.node_number
|
|
12337
12345
|
if not UtilClient.is_unset(request.prompt):
|
|
@@ -12411,6 +12419,8 @@ class Client(OpenApiClient):
|
|
|
12411
12419
|
body['IntroductionPrompt'] = request.introduction_prompt
|
|
12412
12420
|
if not UtilClient.is_unset(request.key_point_prompt):
|
|
12413
12421
|
body['KeyPointPrompt'] = request.key_point_prompt
|
|
12422
|
+
if not UtilClient.is_unset(request.model_name):
|
|
12423
|
+
body['ModelName'] = request.model_name
|
|
12414
12424
|
if not UtilClient.is_unset(request.session_id):
|
|
12415
12425
|
body['SessionId'] = request.session_id
|
|
12416
12426
|
if not UtilClient.is_unset(request.summary_prompt):
|
|
@@ -12460,6 +12470,8 @@ class Client(OpenApiClient):
|
|
|
12460
12470
|
body['IntroductionPrompt'] = request.introduction_prompt
|
|
12461
12471
|
if not UtilClient.is_unset(request.key_point_prompt):
|
|
12462
12472
|
body['KeyPointPrompt'] = request.key_point_prompt
|
|
12473
|
+
if not UtilClient.is_unset(request.model_name):
|
|
12474
|
+
body['ModelName'] = request.model_name
|
|
12463
12475
|
if not UtilClient.is_unset(request.session_id):
|
|
12464
12476
|
body['SessionId'] = request.session_id
|
|
12465
12477
|
if not UtilClient.is_unset(request.summary_prompt):
|
|
@@ -12541,6 +12553,8 @@ class Client(OpenApiClient):
|
|
|
12541
12553
|
body['ConversationContexts'] = request.conversation_contexts_shrink
|
|
12542
12554
|
if not UtilClient.is_unset(request.doc_ids_shrink):
|
|
12543
12555
|
body['DocIds'] = request.doc_ids_shrink
|
|
12556
|
+
if not UtilClient.is_unset(request.model_name):
|
|
12557
|
+
body['ModelName'] = request.model_name
|
|
12544
12558
|
if not UtilClient.is_unset(request.query):
|
|
12545
12559
|
body['Query'] = request.query
|
|
12546
12560
|
if not UtilClient.is_unset(request.reference_content):
|
|
@@ -12598,6 +12612,8 @@ class Client(OpenApiClient):
|
|
|
12598
12612
|
body['ConversationContexts'] = request.conversation_contexts_shrink
|
|
12599
12613
|
if not UtilClient.is_unset(request.doc_ids_shrink):
|
|
12600
12614
|
body['DocIds'] = request.doc_ids_shrink
|
|
12615
|
+
if not UtilClient.is_unset(request.model_name):
|
|
12616
|
+
body['ModelName'] = request.model_name
|
|
12601
12617
|
if not UtilClient.is_unset(request.query):
|
|
12602
12618
|
body['Query'] = request.query
|
|
12603
12619
|
if not UtilClient.is_unset(request.reference_content):
|
|
@@ -12669,6 +12685,8 @@ class Client(OpenApiClient):
|
|
|
12669
12685
|
body = {}
|
|
12670
12686
|
if not UtilClient.is_unset(request.doc_id):
|
|
12671
12687
|
body['DocId'] = request.doc_id
|
|
12688
|
+
if not UtilClient.is_unset(request.model_name):
|
|
12689
|
+
body['ModelName'] = request.model_name
|
|
12672
12690
|
if not UtilClient.is_unset(request.prompt):
|
|
12673
12691
|
body['Prompt'] = request.prompt
|
|
12674
12692
|
if not UtilClient.is_unset(request.session_id):
|
|
@@ -12710,6 +12728,8 @@ class Client(OpenApiClient):
|
|
|
12710
12728
|
body = {}
|
|
12711
12729
|
if not UtilClient.is_unset(request.doc_id):
|
|
12712
12730
|
body['DocId'] = request.doc_id
|
|
12731
|
+
if not UtilClient.is_unset(request.model_name):
|
|
12732
|
+
body['ModelName'] = request.model_name
|
|
12713
12733
|
if not UtilClient.is_unset(request.prompt):
|
|
12714
12734
|
body['Prompt'] = request.prompt
|
|
12715
12735
|
if not UtilClient.is_unset(request.session_id):
|
|
@@ -12899,6 +12919,8 @@ class Client(OpenApiClient):
|
|
|
12899
12919
|
body['CleanCache'] = request.clean_cache
|
|
12900
12920
|
if not UtilClient.is_unset(request.doc_id):
|
|
12901
12921
|
body['DocId'] = request.doc_id
|
|
12922
|
+
if not UtilClient.is_unset(request.model_name):
|
|
12923
|
+
body['ModelName'] = request.model_name
|
|
12902
12924
|
if not UtilClient.is_unset(request.recommend_content):
|
|
12903
12925
|
body['RecommendContent'] = request.recommend_content
|
|
12904
12926
|
if not UtilClient.is_unset(request.session_id):
|
|
@@ -12944,6 +12966,8 @@ class Client(OpenApiClient):
|
|
|
12944
12966
|
body['CleanCache'] = request.clean_cache
|
|
12945
12967
|
if not UtilClient.is_unset(request.doc_id):
|
|
12946
12968
|
body['DocId'] = request.doc_id
|
|
12969
|
+
if not UtilClient.is_unset(request.model_name):
|
|
12970
|
+
body['ModelName'] = request.model_name
|
|
12947
12971
|
if not UtilClient.is_unset(request.recommend_content):
|
|
12948
12972
|
body['RecommendContent'] = request.recommend_content
|
|
12949
12973
|
if not UtilClient.is_unset(request.session_id):
|
|
@@ -13011,6 +13035,8 @@ class Client(OpenApiClient):
|
|
|
13011
13035
|
"""
|
|
13012
13036
|
UtilClient.validate_model(request)
|
|
13013
13037
|
body = {}
|
|
13038
|
+
if not UtilClient.is_unset(request.model_id):
|
|
13039
|
+
body['ModelId'] = request.model_id
|
|
13014
13040
|
if not UtilClient.is_unset(request.prompt):
|
|
13015
13041
|
body['Prompt'] = request.prompt
|
|
13016
13042
|
if not UtilClient.is_unset(request.reference_content):
|
|
@@ -13060,6 +13086,8 @@ class Client(OpenApiClient):
|
|
|
13060
13086
|
"""
|
|
13061
13087
|
UtilClient.validate_model(request)
|
|
13062
13088
|
body = {}
|
|
13089
|
+
if not UtilClient.is_unset(request.model_id):
|
|
13090
|
+
body['ModelId'] = request.model_id
|
|
13063
13091
|
if not UtilClient.is_unset(request.prompt):
|
|
13064
13092
|
body['Prompt'] = request.prompt
|
|
13065
13093
|
if not UtilClient.is_unset(request.reference_content):
|
|
@@ -13237,6 +13265,8 @@ class Client(OpenApiClient):
|
|
|
13237
13265
|
body = {}
|
|
13238
13266
|
if not UtilClient.is_unset(request.doc_id):
|
|
13239
13267
|
body['DocId'] = request.doc_id
|
|
13268
|
+
if not UtilClient.is_unset(request.model_name):
|
|
13269
|
+
body['ModelName'] = request.model_name
|
|
13240
13270
|
if not UtilClient.is_unset(request.reference_content):
|
|
13241
13271
|
body['ReferenceContent'] = request.reference_content
|
|
13242
13272
|
if not UtilClient.is_unset(request.session_id):
|
|
@@ -13278,6 +13308,8 @@ class Client(OpenApiClient):
|
|
|
13278
13308
|
body = {}
|
|
13279
13309
|
if not UtilClient.is_unset(request.doc_id):
|
|
13280
13310
|
body['DocId'] = request.doc_id
|
|
13311
|
+
if not UtilClient.is_unset(request.model_name):
|
|
13312
|
+
body['ModelName'] = request.model_name
|
|
13281
13313
|
if not UtilClient.is_unset(request.reference_content):
|
|
13282
13314
|
body['ReferenceContent'] = request.reference_content
|
|
13283
13315
|
if not UtilClient.is_unset(request.session_id):
|
|
@@ -13345,6 +13377,8 @@ class Client(OpenApiClient):
|
|
|
13345
13377
|
body = {}
|
|
13346
13378
|
if not UtilClient.is_unset(request.doc_id):
|
|
13347
13379
|
body['DocId'] = request.doc_id
|
|
13380
|
+
if not UtilClient.is_unset(request.model_name):
|
|
13381
|
+
body['ModelName'] = request.model_name
|
|
13348
13382
|
if not UtilClient.is_unset(request.prompt):
|
|
13349
13383
|
body['Prompt'] = request.prompt
|
|
13350
13384
|
if not UtilClient.is_unset(request.reference_content):
|
|
@@ -13388,6 +13422,8 @@ class Client(OpenApiClient):
|
|
|
13388
13422
|
body = {}
|
|
13389
13423
|
if not UtilClient.is_unset(request.doc_id):
|
|
13390
13424
|
body['DocId'] = request.doc_id
|
|
13425
|
+
if not UtilClient.is_unset(request.model_name):
|
|
13426
|
+
body['ModelName'] = request.model_name
|
|
13391
13427
|
if not UtilClient.is_unset(request.prompt):
|
|
13392
13428
|
body['Prompt'] = request.prompt
|
|
13393
13429
|
if not UtilClient.is_unset(request.reference_content):
|
|
@@ -13575,6 +13611,8 @@ class Client(OpenApiClient):
|
|
|
13575
13611
|
body['DocIds'] = request.doc_ids_shrink
|
|
13576
13612
|
if not UtilClient.is_unset(request.key_point_prompt):
|
|
13577
13613
|
body['KeyPointPrompt'] = request.key_point_prompt
|
|
13614
|
+
if not UtilClient.is_unset(request.model_name):
|
|
13615
|
+
body['ModelName'] = request.model_name
|
|
13578
13616
|
if not UtilClient.is_unset(request.session_id):
|
|
13579
13617
|
body['SessionId'] = request.session_id
|
|
13580
13618
|
if not UtilClient.is_unset(request.summary_prompt):
|
|
@@ -13622,6 +13660,8 @@ class Client(OpenApiClient):
|
|
|
13622
13660
|
body['DocIds'] = request.doc_ids_shrink
|
|
13623
13661
|
if not UtilClient.is_unset(request.key_point_prompt):
|
|
13624
13662
|
body['KeyPointPrompt'] = request.key_point_prompt
|
|
13663
|
+
if not UtilClient.is_unset(request.model_name):
|
|
13664
|
+
body['ModelName'] = request.model_name
|
|
13625
13665
|
if not UtilClient.is_unset(request.session_id):
|
|
13626
13666
|
body['SessionId'] = request.session_id
|
|
13627
13667
|
if not UtilClient.is_unset(request.summary_prompt):
|
|
@@ -16208,9 +16248,13 @@ class Client(OpenApiClient):
|
|
|
16208
16248
|
UtilClient.validate_model(tmp_req)
|
|
16209
16249
|
request = ai_miao_bi_20230801_models.SubmitCustomSourceTopicAnalysisShrinkRequest()
|
|
16210
16250
|
OpenApiUtilClient.convert(tmp_req, request)
|
|
16251
|
+
if not UtilClient.is_unset(tmp_req.analysis_types):
|
|
16252
|
+
request.analysis_types_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.analysis_types, 'AnalysisTypes', 'json')
|
|
16211
16253
|
if not UtilClient.is_unset(tmp_req.news):
|
|
16212
16254
|
request.news_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.news, 'News', 'json')
|
|
16213
16255
|
body = {}
|
|
16256
|
+
if not UtilClient.is_unset(request.analysis_types_shrink):
|
|
16257
|
+
body['AnalysisTypes'] = request.analysis_types_shrink
|
|
16214
16258
|
if not UtilClient.is_unset(request.file_type):
|
|
16215
16259
|
body['FileType'] = request.file_type
|
|
16216
16260
|
if not UtilClient.is_unset(request.file_url):
|
|
@@ -16255,9 +16299,13 @@ class Client(OpenApiClient):
|
|
|
16255
16299
|
UtilClient.validate_model(tmp_req)
|
|
16256
16300
|
request = ai_miao_bi_20230801_models.SubmitCustomSourceTopicAnalysisShrinkRequest()
|
|
16257
16301
|
OpenApiUtilClient.convert(tmp_req, request)
|
|
16302
|
+
if not UtilClient.is_unset(tmp_req.analysis_types):
|
|
16303
|
+
request.analysis_types_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.analysis_types, 'AnalysisTypes', 'json')
|
|
16258
16304
|
if not UtilClient.is_unset(tmp_req.news):
|
|
16259
16305
|
request.news_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.news, 'News', 'json')
|
|
16260
16306
|
body = {}
|
|
16307
|
+
if not UtilClient.is_unset(request.analysis_types_shrink):
|
|
16308
|
+
body['AnalysisTypes'] = request.analysis_types_shrink
|
|
16261
16309
|
if not UtilClient.is_unset(request.file_type):
|
|
16262
16310
|
body['FileType'] = request.file_type
|
|
16263
16311
|
if not UtilClient.is_unset(request.file_url):
|
|
@@ -8799,6 +8799,8 @@ class GetCustomSourceTopicAnalysisTaskResponseBodyData(TeaModel):
|
|
|
8799
8799
|
max_clustered_topic_news_size: int = None,
|
|
8800
8800
|
parsed_news_size: int = None,
|
|
8801
8801
|
status: str = None,
|
|
8802
|
+
rt: int = None,
|
|
8803
|
+
usages: Dict[str, int] = None,
|
|
8802
8804
|
):
|
|
8803
8805
|
self.cluster_count = cluster_count
|
|
8804
8806
|
self.cluster_results = cluster_results
|
|
@@ -8806,6 +8808,8 @@ class GetCustomSourceTopicAnalysisTaskResponseBodyData(TeaModel):
|
|
|
8806
8808
|
self.max_clustered_topic_news_size = max_clustered_topic_news_size
|
|
8807
8809
|
self.parsed_news_size = parsed_news_size
|
|
8808
8810
|
self.status = status
|
|
8811
|
+
self.rt = rt
|
|
8812
|
+
self.usages = usages
|
|
8809
8813
|
|
|
8810
8814
|
def validate(self):
|
|
8811
8815
|
if self.cluster_results:
|
|
@@ -8833,6 +8837,10 @@ class GetCustomSourceTopicAnalysisTaskResponseBodyData(TeaModel):
|
|
|
8833
8837
|
result['ParsedNewsSize'] = self.parsed_news_size
|
|
8834
8838
|
if self.status is not None:
|
|
8835
8839
|
result['Status'] = self.status
|
|
8840
|
+
if self.rt is not None:
|
|
8841
|
+
result['rt'] = self.rt
|
|
8842
|
+
if self.usages is not None:
|
|
8843
|
+
result['usages'] = self.usages
|
|
8836
8844
|
return result
|
|
8837
8845
|
|
|
8838
8846
|
def from_map(self, m: dict = None):
|
|
@@ -8852,6 +8860,10 @@ class GetCustomSourceTopicAnalysisTaskResponseBodyData(TeaModel):
|
|
|
8852
8860
|
self.parsed_news_size = m.get('ParsedNewsSize')
|
|
8853
8861
|
if m.get('Status') is not None:
|
|
8854
8862
|
self.status = m.get('Status')
|
|
8863
|
+
if m.get('rt') is not None:
|
|
8864
|
+
self.rt = m.get('rt')
|
|
8865
|
+
if m.get('usages') is not None:
|
|
8866
|
+
self.usages = m.get('usages')
|
|
8855
8867
|
return self
|
|
8856
8868
|
|
|
8857
8869
|
|
|
@@ -30542,6 +30554,7 @@ class RunCommentGenerationRequest(TeaModel):
|
|
|
30542
30554
|
extra_info: str = None,
|
|
30543
30555
|
length: str = None,
|
|
30544
30556
|
length_range: Dict[str, Any] = None,
|
|
30557
|
+
model_id: str = None,
|
|
30545
30558
|
num_comments: str = None,
|
|
30546
30559
|
sentiment: Dict[str, Any] = None,
|
|
30547
30560
|
session_id: str = None,
|
|
@@ -30555,6 +30568,7 @@ class RunCommentGenerationRequest(TeaModel):
|
|
|
30555
30568
|
self.length = length
|
|
30556
30569
|
# This parameter is required.
|
|
30557
30570
|
self.length_range = length_range
|
|
30571
|
+
self.model_id = model_id
|
|
30558
30572
|
# This parameter is required.
|
|
30559
30573
|
self.num_comments = num_comments
|
|
30560
30574
|
# This parameter is required.
|
|
@@ -30585,6 +30599,8 @@ class RunCommentGenerationRequest(TeaModel):
|
|
|
30585
30599
|
result['Length'] = self.length
|
|
30586
30600
|
if self.length_range is not None:
|
|
30587
30601
|
result['LengthRange'] = self.length_range
|
|
30602
|
+
if self.model_id is not None:
|
|
30603
|
+
result['ModelId'] = self.model_id
|
|
30588
30604
|
if self.num_comments is not None:
|
|
30589
30605
|
result['NumComments'] = self.num_comments
|
|
30590
30606
|
if self.sentiment is not None:
|
|
@@ -30611,6 +30627,8 @@ class RunCommentGenerationRequest(TeaModel):
|
|
|
30611
30627
|
self.length = m.get('Length')
|
|
30612
30628
|
if m.get('LengthRange') is not None:
|
|
30613
30629
|
self.length_range = m.get('LengthRange')
|
|
30630
|
+
if m.get('ModelId') is not None:
|
|
30631
|
+
self.model_id = m.get('ModelId')
|
|
30614
30632
|
if m.get('NumComments') is not None:
|
|
30615
30633
|
self.num_comments = m.get('NumComments')
|
|
30616
30634
|
if m.get('Sentiment') is not None:
|
|
@@ -30635,6 +30653,7 @@ class RunCommentGenerationShrinkRequest(TeaModel):
|
|
|
30635
30653
|
extra_info: str = None,
|
|
30636
30654
|
length: str = None,
|
|
30637
30655
|
length_range_shrink: str = None,
|
|
30656
|
+
model_id: str = None,
|
|
30638
30657
|
num_comments: str = None,
|
|
30639
30658
|
sentiment_shrink: str = None,
|
|
30640
30659
|
session_id: str = None,
|
|
@@ -30648,6 +30667,7 @@ class RunCommentGenerationShrinkRequest(TeaModel):
|
|
|
30648
30667
|
self.length = length
|
|
30649
30668
|
# This parameter is required.
|
|
30650
30669
|
self.length_range_shrink = length_range_shrink
|
|
30670
|
+
self.model_id = model_id
|
|
30651
30671
|
# This parameter is required.
|
|
30652
30672
|
self.num_comments = num_comments
|
|
30653
30673
|
# This parameter is required.
|
|
@@ -30678,6 +30698,8 @@ class RunCommentGenerationShrinkRequest(TeaModel):
|
|
|
30678
30698
|
result['Length'] = self.length
|
|
30679
30699
|
if self.length_range_shrink is not None:
|
|
30680
30700
|
result['LengthRange'] = self.length_range_shrink
|
|
30701
|
+
if self.model_id is not None:
|
|
30702
|
+
result['ModelId'] = self.model_id
|
|
30681
30703
|
if self.num_comments is not None:
|
|
30682
30704
|
result['NumComments'] = self.num_comments
|
|
30683
30705
|
if self.sentiment_shrink is not None:
|
|
@@ -30704,6 +30726,8 @@ class RunCommentGenerationShrinkRequest(TeaModel):
|
|
|
30704
30726
|
self.length = m.get('Length')
|
|
30705
30727
|
if m.get('LengthRange') is not None:
|
|
30706
30728
|
self.length_range_shrink = m.get('LengthRange')
|
|
30729
|
+
if m.get('ModelId') is not None:
|
|
30730
|
+
self.model_id = m.get('ModelId')
|
|
30707
30731
|
if m.get('NumComments') is not None:
|
|
30708
30732
|
self.num_comments = m.get('NumComments')
|
|
30709
30733
|
if m.get('Sentiment') is not None:
|
|
@@ -32156,6 +32180,7 @@ class RunDocBrainmapRequest(TeaModel):
|
|
|
32156
32180
|
self,
|
|
32157
32181
|
clean_cache: bool = None,
|
|
32158
32182
|
doc_id: str = None,
|
|
32183
|
+
model_name: str = None,
|
|
32159
32184
|
node_number: int = None,
|
|
32160
32185
|
prompt: str = None,
|
|
32161
32186
|
session_id: str = None,
|
|
@@ -32166,6 +32191,7 @@ class RunDocBrainmapRequest(TeaModel):
|
|
|
32166
32191
|
self.clean_cache = clean_cache
|
|
32167
32192
|
# This parameter is required.
|
|
32168
32193
|
self.doc_id = doc_id
|
|
32194
|
+
self.model_name = model_name
|
|
32169
32195
|
self.node_number = node_number
|
|
32170
32196
|
self.prompt = prompt
|
|
32171
32197
|
# This parameter is required.
|
|
@@ -32188,6 +32214,8 @@ class RunDocBrainmapRequest(TeaModel):
|
|
|
32188
32214
|
result['CleanCache'] = self.clean_cache
|
|
32189
32215
|
if self.doc_id is not None:
|
|
32190
32216
|
result['DocId'] = self.doc_id
|
|
32217
|
+
if self.model_name is not None:
|
|
32218
|
+
result['ModelName'] = self.model_name
|
|
32191
32219
|
if self.node_number is not None:
|
|
32192
32220
|
result['NodeNumber'] = self.node_number
|
|
32193
32221
|
if self.prompt is not None:
|
|
@@ -32208,6 +32236,8 @@ class RunDocBrainmapRequest(TeaModel):
|
|
|
32208
32236
|
self.clean_cache = m.get('CleanCache')
|
|
32209
32237
|
if m.get('DocId') is not None:
|
|
32210
32238
|
self.doc_id = m.get('DocId')
|
|
32239
|
+
if m.get('ModelName') is not None:
|
|
32240
|
+
self.model_name = m.get('ModelName')
|
|
32211
32241
|
if m.get('NodeNumber') is not None:
|
|
32212
32242
|
self.node_number = m.get('NodeNumber')
|
|
32213
32243
|
if m.get('Prompt') is not None:
|
|
@@ -32482,6 +32512,7 @@ class RunDocIntroductionRequest(TeaModel):
|
|
|
32482
32512
|
doc_id: str = None,
|
|
32483
32513
|
introduction_prompt: str = None,
|
|
32484
32514
|
key_point_prompt: str = None,
|
|
32515
|
+
model_name: str = None,
|
|
32485
32516
|
session_id: str = None,
|
|
32486
32517
|
summary_prompt: str = None,
|
|
32487
32518
|
workspace_id: str = None,
|
|
@@ -32492,6 +32523,7 @@ class RunDocIntroductionRequest(TeaModel):
|
|
|
32492
32523
|
self.doc_id = doc_id
|
|
32493
32524
|
self.introduction_prompt = introduction_prompt
|
|
32494
32525
|
self.key_point_prompt = key_point_prompt
|
|
32526
|
+
self.model_name = model_name
|
|
32495
32527
|
# This parameter is required.
|
|
32496
32528
|
self.session_id = session_id
|
|
32497
32529
|
self.summary_prompt = summary_prompt
|
|
@@ -32516,6 +32548,8 @@ class RunDocIntroductionRequest(TeaModel):
|
|
|
32516
32548
|
result['IntroductionPrompt'] = self.introduction_prompt
|
|
32517
32549
|
if self.key_point_prompt is not None:
|
|
32518
32550
|
result['KeyPointPrompt'] = self.key_point_prompt
|
|
32551
|
+
if self.model_name is not None:
|
|
32552
|
+
result['ModelName'] = self.model_name
|
|
32519
32553
|
if self.session_id is not None:
|
|
32520
32554
|
result['SessionId'] = self.session_id
|
|
32521
32555
|
if self.summary_prompt is not None:
|
|
@@ -32536,6 +32570,8 @@ class RunDocIntroductionRequest(TeaModel):
|
|
|
32536
32570
|
self.introduction_prompt = m.get('IntroductionPrompt')
|
|
32537
32571
|
if m.get('KeyPointPrompt') is not None:
|
|
32538
32572
|
self.key_point_prompt = m.get('KeyPointPrompt')
|
|
32573
|
+
if m.get('ModelName') is not None:
|
|
32574
|
+
self.model_name = m.get('ModelName')
|
|
32539
32575
|
if m.get('SessionId') is not None:
|
|
32540
32576
|
self.session_id = m.get('SessionId')
|
|
32541
32577
|
if m.get('SummaryPrompt') is not None:
|
|
@@ -32974,6 +33010,7 @@ class RunDocQaRequest(TeaModel):
|
|
|
32974
33010
|
category_ids: List[str] = None,
|
|
32975
33011
|
conversation_contexts: List[RunDocQaRequestConversationContexts] = None,
|
|
32976
33012
|
doc_ids: List[str] = None,
|
|
33013
|
+
model_name: str = None,
|
|
32977
33014
|
query: str = None,
|
|
32978
33015
|
reference_content: str = None,
|
|
32979
33016
|
search_source: str = None,
|
|
@@ -32983,6 +33020,7 @@ class RunDocQaRequest(TeaModel):
|
|
|
32983
33020
|
self.category_ids = category_ids
|
|
32984
33021
|
self.conversation_contexts = conversation_contexts
|
|
32985
33022
|
self.doc_ids = doc_ids
|
|
33023
|
+
self.model_name = model_name
|
|
32986
33024
|
# This parameter is required.
|
|
32987
33025
|
self.query = query
|
|
32988
33026
|
self.reference_content = reference_content
|
|
@@ -33013,6 +33051,8 @@ class RunDocQaRequest(TeaModel):
|
|
|
33013
33051
|
result['ConversationContexts'].append(k.to_map() if k else None)
|
|
33014
33052
|
if self.doc_ids is not None:
|
|
33015
33053
|
result['DocIds'] = self.doc_ids
|
|
33054
|
+
if self.model_name is not None:
|
|
33055
|
+
result['ModelName'] = self.model_name
|
|
33016
33056
|
if self.query is not None:
|
|
33017
33057
|
result['Query'] = self.query
|
|
33018
33058
|
if self.reference_content is not None:
|
|
@@ -33036,6 +33076,8 @@ class RunDocQaRequest(TeaModel):
|
|
|
33036
33076
|
self.conversation_contexts.append(temp_model.from_map(k))
|
|
33037
33077
|
if m.get('DocIds') is not None:
|
|
33038
33078
|
self.doc_ids = m.get('DocIds')
|
|
33079
|
+
if m.get('ModelName') is not None:
|
|
33080
|
+
self.model_name = m.get('ModelName')
|
|
33039
33081
|
if m.get('Query') is not None:
|
|
33040
33082
|
self.query = m.get('Query')
|
|
33041
33083
|
if m.get('ReferenceContent') is not None:
|
|
@@ -33055,6 +33097,7 @@ class RunDocQaShrinkRequest(TeaModel):
|
|
|
33055
33097
|
category_ids_shrink: str = None,
|
|
33056
33098
|
conversation_contexts_shrink: str = None,
|
|
33057
33099
|
doc_ids_shrink: str = None,
|
|
33100
|
+
model_name: str = None,
|
|
33058
33101
|
query: str = None,
|
|
33059
33102
|
reference_content: str = None,
|
|
33060
33103
|
search_source: str = None,
|
|
@@ -33064,6 +33107,7 @@ class RunDocQaShrinkRequest(TeaModel):
|
|
|
33064
33107
|
self.category_ids_shrink = category_ids_shrink
|
|
33065
33108
|
self.conversation_contexts_shrink = conversation_contexts_shrink
|
|
33066
33109
|
self.doc_ids_shrink = doc_ids_shrink
|
|
33110
|
+
self.model_name = model_name
|
|
33067
33111
|
# This parameter is required.
|
|
33068
33112
|
self.query = query
|
|
33069
33113
|
self.reference_content = reference_content
|
|
@@ -33089,6 +33133,8 @@ class RunDocQaShrinkRequest(TeaModel):
|
|
|
33089
33133
|
result['ConversationContexts'] = self.conversation_contexts_shrink
|
|
33090
33134
|
if self.doc_ids_shrink is not None:
|
|
33091
33135
|
result['DocIds'] = self.doc_ids_shrink
|
|
33136
|
+
if self.model_name is not None:
|
|
33137
|
+
result['ModelName'] = self.model_name
|
|
33092
33138
|
if self.query is not None:
|
|
33093
33139
|
result['Query'] = self.query
|
|
33094
33140
|
if self.reference_content is not None:
|
|
@@ -33109,6 +33155,8 @@ class RunDocQaShrinkRequest(TeaModel):
|
|
|
33109
33155
|
self.conversation_contexts_shrink = m.get('ConversationContexts')
|
|
33110
33156
|
if m.get('DocIds') is not None:
|
|
33111
33157
|
self.doc_ids_shrink = m.get('DocIds')
|
|
33158
|
+
if m.get('ModelName') is not None:
|
|
33159
|
+
self.model_name = m.get('ModelName')
|
|
33112
33160
|
if m.get('Query') is not None:
|
|
33113
33161
|
self.query = m.get('Query')
|
|
33114
33162
|
if m.get('ReferenceContent') is not None:
|
|
@@ -33598,12 +33646,14 @@ class RunDocSmartCardRequest(TeaModel):
|
|
|
33598
33646
|
def __init__(
|
|
33599
33647
|
self,
|
|
33600
33648
|
doc_id: str = None,
|
|
33649
|
+
model_name: str = None,
|
|
33601
33650
|
prompt: str = None,
|
|
33602
33651
|
session_id: str = None,
|
|
33603
33652
|
workspace_id: str = None,
|
|
33604
33653
|
):
|
|
33605
33654
|
# This parameter is required.
|
|
33606
33655
|
self.doc_id = doc_id
|
|
33656
|
+
self.model_name = model_name
|
|
33607
33657
|
self.prompt = prompt
|
|
33608
33658
|
# This parameter is required.
|
|
33609
33659
|
self.session_id = session_id
|
|
@@ -33621,6 +33671,8 @@ class RunDocSmartCardRequest(TeaModel):
|
|
|
33621
33671
|
result = dict()
|
|
33622
33672
|
if self.doc_id is not None:
|
|
33623
33673
|
result['DocId'] = self.doc_id
|
|
33674
|
+
if self.model_name is not None:
|
|
33675
|
+
result['ModelName'] = self.model_name
|
|
33624
33676
|
if self.prompt is not None:
|
|
33625
33677
|
result['Prompt'] = self.prompt
|
|
33626
33678
|
if self.session_id is not None:
|
|
@@ -33633,6 +33685,8 @@ class RunDocSmartCardRequest(TeaModel):
|
|
|
33633
33685
|
m = m or dict()
|
|
33634
33686
|
if m.get('DocId') is not None:
|
|
33635
33687
|
self.doc_id = m.get('DocId')
|
|
33688
|
+
if m.get('ModelName') is not None:
|
|
33689
|
+
self.model_name = m.get('ModelName')
|
|
33636
33690
|
if m.get('Prompt') is not None:
|
|
33637
33691
|
self.prompt = m.get('Prompt')
|
|
33638
33692
|
if m.get('SessionId') is not None:
|
|
@@ -34223,6 +34277,7 @@ class RunDocTranslationRequest(TeaModel):
|
|
|
34223
34277
|
self,
|
|
34224
34278
|
clean_cache: bool = None,
|
|
34225
34279
|
doc_id: str = None,
|
|
34280
|
+
model_name: str = None,
|
|
34226
34281
|
recommend_content: str = None,
|
|
34227
34282
|
session_id: str = None,
|
|
34228
34283
|
trans_type: str = None,
|
|
@@ -34230,6 +34285,7 @@ class RunDocTranslationRequest(TeaModel):
|
|
|
34230
34285
|
):
|
|
34231
34286
|
self.clean_cache = clean_cache
|
|
34232
34287
|
self.doc_id = doc_id
|
|
34288
|
+
self.model_name = model_name
|
|
34233
34289
|
self.recommend_content = recommend_content
|
|
34234
34290
|
# This parameter is required.
|
|
34235
34291
|
self.session_id = session_id
|
|
@@ -34250,6 +34306,8 @@ class RunDocTranslationRequest(TeaModel):
|
|
|
34250
34306
|
result['CleanCache'] = self.clean_cache
|
|
34251
34307
|
if self.doc_id is not None:
|
|
34252
34308
|
result['DocId'] = self.doc_id
|
|
34309
|
+
if self.model_name is not None:
|
|
34310
|
+
result['ModelName'] = self.model_name
|
|
34253
34311
|
if self.recommend_content is not None:
|
|
34254
34312
|
result['RecommendContent'] = self.recommend_content
|
|
34255
34313
|
if self.session_id is not None:
|
|
@@ -34266,6 +34324,8 @@ class RunDocTranslationRequest(TeaModel):
|
|
|
34266
34324
|
self.clean_cache = m.get('CleanCache')
|
|
34267
34325
|
if m.get('DocId') is not None:
|
|
34268
34326
|
self.doc_id = m.get('DocId')
|
|
34327
|
+
if m.get('ModelName') is not None:
|
|
34328
|
+
self.model_name = m.get('ModelName')
|
|
34269
34329
|
if m.get('RecommendContent') is not None:
|
|
34270
34330
|
self.recommend_content = m.get('RecommendContent')
|
|
34271
34331
|
if m.get('SessionId') is not None:
|
|
@@ -34532,6 +34592,7 @@ class RunDocTranslationResponse(TeaModel):
|
|
|
34532
34592
|
class RunDocWashingRequest(TeaModel):
|
|
34533
34593
|
def __init__(
|
|
34534
34594
|
self,
|
|
34595
|
+
model_id: str = None,
|
|
34535
34596
|
prompt: str = None,
|
|
34536
34597
|
reference_content: str = None,
|
|
34537
34598
|
session_id: str = None,
|
|
@@ -34541,6 +34602,7 @@ class RunDocWashingRequest(TeaModel):
|
|
|
34541
34602
|
writing_type_name: str = None,
|
|
34542
34603
|
writing_type_ref_doc: str = None,
|
|
34543
34604
|
):
|
|
34605
|
+
self.model_id = model_id
|
|
34544
34606
|
self.prompt = prompt
|
|
34545
34607
|
# This parameter is required.
|
|
34546
34608
|
self.reference_content = reference_content
|
|
@@ -34561,6 +34623,8 @@ class RunDocWashingRequest(TeaModel):
|
|
|
34561
34623
|
return _map
|
|
34562
34624
|
|
|
34563
34625
|
result = dict()
|
|
34626
|
+
if self.model_id is not None:
|
|
34627
|
+
result['ModelId'] = self.model_id
|
|
34564
34628
|
if self.prompt is not None:
|
|
34565
34629
|
result['Prompt'] = self.prompt
|
|
34566
34630
|
if self.reference_content is not None:
|
|
@@ -34581,6 +34645,8 @@ class RunDocWashingRequest(TeaModel):
|
|
|
34581
34645
|
|
|
34582
34646
|
def from_map(self, m: dict = None):
|
|
34583
34647
|
m = m or dict()
|
|
34648
|
+
if m.get('ModelId') is not None:
|
|
34649
|
+
self.model_id = m.get('ModelId')
|
|
34584
34650
|
if m.get('Prompt') is not None:
|
|
34585
34651
|
self.prompt = m.get('Prompt')
|
|
34586
34652
|
if m.get('ReferenceContent') is not None:
|
|
@@ -35150,11 +35216,13 @@ class RunGenerateQuestionsRequest(TeaModel):
|
|
|
35150
35216
|
def __init__(
|
|
35151
35217
|
self,
|
|
35152
35218
|
doc_id: str = None,
|
|
35219
|
+
model_name: str = None,
|
|
35153
35220
|
reference_content: str = None,
|
|
35154
35221
|
session_id: str = None,
|
|
35155
35222
|
workspace_id: str = None,
|
|
35156
35223
|
):
|
|
35157
35224
|
self.doc_id = doc_id
|
|
35225
|
+
self.model_name = model_name
|
|
35158
35226
|
self.reference_content = reference_content
|
|
35159
35227
|
self.session_id = session_id
|
|
35160
35228
|
# This parameter is required.
|
|
@@ -35171,6 +35239,8 @@ class RunGenerateQuestionsRequest(TeaModel):
|
|
|
35171
35239
|
result = dict()
|
|
35172
35240
|
if self.doc_id is not None:
|
|
35173
35241
|
result['DocId'] = self.doc_id
|
|
35242
|
+
if self.model_name is not None:
|
|
35243
|
+
result['ModelName'] = self.model_name
|
|
35174
35244
|
if self.reference_content is not None:
|
|
35175
35245
|
result['ReferenceContent'] = self.reference_content
|
|
35176
35246
|
if self.session_id is not None:
|
|
@@ -35183,6 +35253,8 @@ class RunGenerateQuestionsRequest(TeaModel):
|
|
|
35183
35253
|
m = m or dict()
|
|
35184
35254
|
if m.get('DocId') is not None:
|
|
35185
35255
|
self.doc_id = m.get('DocId')
|
|
35256
|
+
if m.get('ModelName') is not None:
|
|
35257
|
+
self.model_name = m.get('ModelName')
|
|
35186
35258
|
if m.get('ReferenceContent') is not None:
|
|
35187
35259
|
self.reference_content = m.get('ReferenceContent')
|
|
35188
35260
|
if m.get('SessionId') is not None:
|
|
@@ -35449,12 +35521,14 @@ class RunHotwordRequest(TeaModel):
|
|
|
35449
35521
|
def __init__(
|
|
35450
35522
|
self,
|
|
35451
35523
|
doc_id: str = None,
|
|
35524
|
+
model_name: str = None,
|
|
35452
35525
|
prompt: str = None,
|
|
35453
35526
|
reference_content: str = None,
|
|
35454
35527
|
session_id: str = None,
|
|
35455
35528
|
workspace_id: str = None,
|
|
35456
35529
|
):
|
|
35457
35530
|
self.doc_id = doc_id
|
|
35531
|
+
self.model_name = model_name
|
|
35458
35532
|
self.prompt = prompt
|
|
35459
35533
|
self.reference_content = reference_content
|
|
35460
35534
|
self.session_id = session_id
|
|
@@ -35472,6 +35546,8 @@ class RunHotwordRequest(TeaModel):
|
|
|
35472
35546
|
result = dict()
|
|
35473
35547
|
if self.doc_id is not None:
|
|
35474
35548
|
result['DocId'] = self.doc_id
|
|
35549
|
+
if self.model_name is not None:
|
|
35550
|
+
result['ModelName'] = self.model_name
|
|
35475
35551
|
if self.prompt is not None:
|
|
35476
35552
|
result['Prompt'] = self.prompt
|
|
35477
35553
|
if self.reference_content is not None:
|
|
@@ -35486,6 +35562,8 @@ class RunHotwordRequest(TeaModel):
|
|
|
35486
35562
|
m = m or dict()
|
|
35487
35563
|
if m.get('DocId') is not None:
|
|
35488
35564
|
self.doc_id = m.get('DocId')
|
|
35565
|
+
if m.get('ModelName') is not None:
|
|
35566
|
+
self.model_name = m.get('ModelName')
|
|
35489
35567
|
if m.get('Prompt') is not None:
|
|
35490
35568
|
self.prompt = m.get('Prompt')
|
|
35491
35569
|
if m.get('ReferenceContent') is not None:
|
|
@@ -36113,6 +36191,7 @@ class RunMultiDocIntroductionRequest(TeaModel):
|
|
|
36113
36191
|
self,
|
|
36114
36192
|
doc_ids: List[str] = None,
|
|
36115
36193
|
key_point_prompt: str = None,
|
|
36194
|
+
model_name: str = None,
|
|
36116
36195
|
session_id: str = None,
|
|
36117
36196
|
summary_prompt: str = None,
|
|
36118
36197
|
workspace_id: str = None,
|
|
@@ -36120,6 +36199,7 @@ class RunMultiDocIntroductionRequest(TeaModel):
|
|
|
36120
36199
|
# This parameter is required.
|
|
36121
36200
|
self.doc_ids = doc_ids
|
|
36122
36201
|
self.key_point_prompt = key_point_prompt
|
|
36202
|
+
self.model_name = model_name
|
|
36123
36203
|
# This parameter is required.
|
|
36124
36204
|
self.session_id = session_id
|
|
36125
36205
|
self.summary_prompt = summary_prompt
|
|
@@ -36139,6 +36219,8 @@ class RunMultiDocIntroductionRequest(TeaModel):
|
|
|
36139
36219
|
result['DocIds'] = self.doc_ids
|
|
36140
36220
|
if self.key_point_prompt is not None:
|
|
36141
36221
|
result['KeyPointPrompt'] = self.key_point_prompt
|
|
36222
|
+
if self.model_name is not None:
|
|
36223
|
+
result['ModelName'] = self.model_name
|
|
36142
36224
|
if self.session_id is not None:
|
|
36143
36225
|
result['SessionId'] = self.session_id
|
|
36144
36226
|
if self.summary_prompt is not None:
|
|
@@ -36153,6 +36235,8 @@ class RunMultiDocIntroductionRequest(TeaModel):
|
|
|
36153
36235
|
self.doc_ids = m.get('DocIds')
|
|
36154
36236
|
if m.get('KeyPointPrompt') is not None:
|
|
36155
36237
|
self.key_point_prompt = m.get('KeyPointPrompt')
|
|
36238
|
+
if m.get('ModelName') is not None:
|
|
36239
|
+
self.model_name = m.get('ModelName')
|
|
36156
36240
|
if m.get('SessionId') is not None:
|
|
36157
36241
|
self.session_id = m.get('SessionId')
|
|
36158
36242
|
if m.get('SummaryPrompt') is not None:
|
|
@@ -36167,6 +36251,7 @@ class RunMultiDocIntroductionShrinkRequest(TeaModel):
|
|
|
36167
36251
|
self,
|
|
36168
36252
|
doc_ids_shrink: str = None,
|
|
36169
36253
|
key_point_prompt: str = None,
|
|
36254
|
+
model_name: str = None,
|
|
36170
36255
|
session_id: str = None,
|
|
36171
36256
|
summary_prompt: str = None,
|
|
36172
36257
|
workspace_id: str = None,
|
|
@@ -36174,6 +36259,7 @@ class RunMultiDocIntroductionShrinkRequest(TeaModel):
|
|
|
36174
36259
|
# This parameter is required.
|
|
36175
36260
|
self.doc_ids_shrink = doc_ids_shrink
|
|
36176
36261
|
self.key_point_prompt = key_point_prompt
|
|
36262
|
+
self.model_name = model_name
|
|
36177
36263
|
# This parameter is required.
|
|
36178
36264
|
self.session_id = session_id
|
|
36179
36265
|
self.summary_prompt = summary_prompt
|
|
@@ -36193,6 +36279,8 @@ class RunMultiDocIntroductionShrinkRequest(TeaModel):
|
|
|
36193
36279
|
result['DocIds'] = self.doc_ids_shrink
|
|
36194
36280
|
if self.key_point_prompt is not None:
|
|
36195
36281
|
result['KeyPointPrompt'] = self.key_point_prompt
|
|
36282
|
+
if self.model_name is not None:
|
|
36283
|
+
result['ModelName'] = self.model_name
|
|
36196
36284
|
if self.session_id is not None:
|
|
36197
36285
|
result['SessionId'] = self.session_id
|
|
36198
36286
|
if self.summary_prompt is not None:
|
|
@@ -36207,6 +36295,8 @@ class RunMultiDocIntroductionShrinkRequest(TeaModel):
|
|
|
36207
36295
|
self.doc_ids_shrink = m.get('DocIds')
|
|
36208
36296
|
if m.get('KeyPointPrompt') is not None:
|
|
36209
36297
|
self.key_point_prompt = m.get('KeyPointPrompt')
|
|
36298
|
+
if m.get('ModelName') is not None:
|
|
36299
|
+
self.model_name = m.get('ModelName')
|
|
36210
36300
|
if m.get('SessionId') is not None:
|
|
36211
36301
|
self.session_id = m.get('SessionId')
|
|
36212
36302
|
if m.get('SummaryPrompt') is not None:
|
|
@@ -48495,12 +48585,14 @@ class SubmitCustomSourceTopicAnalysisRequestNews(TeaModel):
|
|
|
48495
48585
|
comments: List[SubmitCustomSourceTopicAnalysisRequestNewsComments] = None,
|
|
48496
48586
|
content: str = None,
|
|
48497
48587
|
pub_time: str = None,
|
|
48588
|
+
source: str = None,
|
|
48498
48589
|
title: str = None,
|
|
48499
48590
|
url: str = None,
|
|
48500
48591
|
):
|
|
48501
48592
|
self.comments = comments
|
|
48502
48593
|
self.content = content
|
|
48503
48594
|
self.pub_time = pub_time
|
|
48595
|
+
self.source = source
|
|
48504
48596
|
self.title = title
|
|
48505
48597
|
self.url = url
|
|
48506
48598
|
|
|
@@ -48524,6 +48616,8 @@ class SubmitCustomSourceTopicAnalysisRequestNews(TeaModel):
|
|
|
48524
48616
|
result['Content'] = self.content
|
|
48525
48617
|
if self.pub_time is not None:
|
|
48526
48618
|
result['PubTime'] = self.pub_time
|
|
48619
|
+
if self.source is not None:
|
|
48620
|
+
result['Source'] = self.source
|
|
48527
48621
|
if self.title is not None:
|
|
48528
48622
|
result['Title'] = self.title
|
|
48529
48623
|
if self.url is not None:
|
|
@@ -48541,6 +48635,8 @@ class SubmitCustomSourceTopicAnalysisRequestNews(TeaModel):
|
|
|
48541
48635
|
self.content = m.get('Content')
|
|
48542
48636
|
if m.get('PubTime') is not None:
|
|
48543
48637
|
self.pub_time = m.get('PubTime')
|
|
48638
|
+
if m.get('Source') is not None:
|
|
48639
|
+
self.source = m.get('Source')
|
|
48544
48640
|
if m.get('Title') is not None:
|
|
48545
48641
|
self.title = m.get('Title')
|
|
48546
48642
|
if m.get('Url') is not None:
|
|
@@ -48551,12 +48647,14 @@ class SubmitCustomSourceTopicAnalysisRequestNews(TeaModel):
|
|
|
48551
48647
|
class SubmitCustomSourceTopicAnalysisRequest(TeaModel):
|
|
48552
48648
|
def __init__(
|
|
48553
48649
|
self,
|
|
48650
|
+
analysis_types: List[str] = None,
|
|
48554
48651
|
file_type: str = None,
|
|
48555
48652
|
file_url: str = None,
|
|
48556
48653
|
max_topic_size: int = None,
|
|
48557
48654
|
news: List[SubmitCustomSourceTopicAnalysisRequestNews] = None,
|
|
48558
48655
|
workspace_id: str = None,
|
|
48559
48656
|
):
|
|
48657
|
+
self.analysis_types = analysis_types
|
|
48560
48658
|
self.file_type = file_type
|
|
48561
48659
|
self.file_url = file_url
|
|
48562
48660
|
self.max_topic_size = max_topic_size
|
|
@@ -48576,6 +48674,8 @@ class SubmitCustomSourceTopicAnalysisRequest(TeaModel):
|
|
|
48576
48674
|
return _map
|
|
48577
48675
|
|
|
48578
48676
|
result = dict()
|
|
48677
|
+
if self.analysis_types is not None:
|
|
48678
|
+
result['AnalysisTypes'] = self.analysis_types
|
|
48579
48679
|
if self.file_type is not None:
|
|
48580
48680
|
result['FileType'] = self.file_type
|
|
48581
48681
|
if self.file_url is not None:
|
|
@@ -48592,6 +48692,8 @@ class SubmitCustomSourceTopicAnalysisRequest(TeaModel):
|
|
|
48592
48692
|
|
|
48593
48693
|
def from_map(self, m: dict = None):
|
|
48594
48694
|
m = m or dict()
|
|
48695
|
+
if m.get('AnalysisTypes') is not None:
|
|
48696
|
+
self.analysis_types = m.get('AnalysisTypes')
|
|
48595
48697
|
if m.get('FileType') is not None:
|
|
48596
48698
|
self.file_type = m.get('FileType')
|
|
48597
48699
|
if m.get('FileUrl') is not None:
|
|
@@ -48611,12 +48713,14 @@ class SubmitCustomSourceTopicAnalysisRequest(TeaModel):
|
|
|
48611
48713
|
class SubmitCustomSourceTopicAnalysisShrinkRequest(TeaModel):
|
|
48612
48714
|
def __init__(
|
|
48613
48715
|
self,
|
|
48716
|
+
analysis_types_shrink: str = None,
|
|
48614
48717
|
file_type: str = None,
|
|
48615
48718
|
file_url: str = None,
|
|
48616
48719
|
max_topic_size: int = None,
|
|
48617
48720
|
news_shrink: str = None,
|
|
48618
48721
|
workspace_id: str = None,
|
|
48619
48722
|
):
|
|
48723
|
+
self.analysis_types_shrink = analysis_types_shrink
|
|
48620
48724
|
self.file_type = file_type
|
|
48621
48725
|
self.file_url = file_url
|
|
48622
48726
|
self.max_topic_size = max_topic_size
|
|
@@ -48633,6 +48737,8 @@ class SubmitCustomSourceTopicAnalysisShrinkRequest(TeaModel):
|
|
|
48633
48737
|
return _map
|
|
48634
48738
|
|
|
48635
48739
|
result = dict()
|
|
48740
|
+
if self.analysis_types_shrink is not None:
|
|
48741
|
+
result['AnalysisTypes'] = self.analysis_types_shrink
|
|
48636
48742
|
if self.file_type is not None:
|
|
48637
48743
|
result['FileType'] = self.file_type
|
|
48638
48744
|
if self.file_url is not None:
|
|
@@ -48647,6 +48753,8 @@ class SubmitCustomSourceTopicAnalysisShrinkRequest(TeaModel):
|
|
|
48647
48753
|
|
|
48648
48754
|
def from_map(self, m: dict = None):
|
|
48649
48755
|
m = m or dict()
|
|
48756
|
+
if m.get('AnalysisTypes') is not None:
|
|
48757
|
+
self.analysis_types_shrink = m.get('AnalysisTypes')
|
|
48650
48758
|
if m.get('FileType') is not None:
|
|
48651
48759
|
self.file_type = m.get('FileType')
|
|
48652
48760
|
if m.get('FileUrl') is not None:
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
__version__ = '1.30.1'
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|