alibabacloud-aimiaobi20230801 1.36.4__tar.gz → 1.36.5__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {alibabacloud_aimiaobi20230801-1.36.4 → alibabacloud_aimiaobi20230801-1.36.5}/ChangeLog.md +14 -0
- {alibabacloud_aimiaobi20230801-1.36.4 → alibabacloud_aimiaobi20230801-1.36.5}/PKG-INFO +1 -1
- alibabacloud_aimiaobi20230801-1.36.5/alibabacloud_aimiaobi20230801/__init__.py +1 -0
- {alibabacloud_aimiaobi20230801-1.36.4 → alibabacloud_aimiaobi20230801-1.36.5}/alibabacloud_aimiaobi20230801/models.py +36 -0
- {alibabacloud_aimiaobi20230801-1.36.4 → alibabacloud_aimiaobi20230801-1.36.5}/alibabacloud_aimiaobi20230801.egg-info/PKG-INFO +1 -1
- {alibabacloud_aimiaobi20230801-1.36.4 → alibabacloud_aimiaobi20230801-1.36.5}/setup.py +1 -1
- alibabacloud_aimiaobi20230801-1.36.4/alibabacloud_aimiaobi20230801/__init__.py +0 -1
- {alibabacloud_aimiaobi20230801-1.36.4 → alibabacloud_aimiaobi20230801-1.36.5}/LICENSE +0 -0
- {alibabacloud_aimiaobi20230801-1.36.4 → alibabacloud_aimiaobi20230801-1.36.5}/MANIFEST.in +0 -0
- {alibabacloud_aimiaobi20230801-1.36.4 → alibabacloud_aimiaobi20230801-1.36.5}/README-CN.md +0 -0
- {alibabacloud_aimiaobi20230801-1.36.4 → alibabacloud_aimiaobi20230801-1.36.5}/README.md +0 -0
- {alibabacloud_aimiaobi20230801-1.36.4 → alibabacloud_aimiaobi20230801-1.36.5}/alibabacloud_aimiaobi20230801/client.py +0 -0
- {alibabacloud_aimiaobi20230801-1.36.4 → alibabacloud_aimiaobi20230801-1.36.5}/alibabacloud_aimiaobi20230801.egg-info/SOURCES.txt +0 -0
- {alibabacloud_aimiaobi20230801-1.36.4 → alibabacloud_aimiaobi20230801-1.36.5}/alibabacloud_aimiaobi20230801.egg-info/dependency_links.txt +0 -0
- {alibabacloud_aimiaobi20230801-1.36.4 → alibabacloud_aimiaobi20230801-1.36.5}/alibabacloud_aimiaobi20230801.egg-info/requires.txt +0 -0
- {alibabacloud_aimiaobi20230801-1.36.4 → alibabacloud_aimiaobi20230801-1.36.5}/alibabacloud_aimiaobi20230801.egg-info/top_level.txt +0 -0
- {alibabacloud_aimiaobi20230801-1.36.4 → alibabacloud_aimiaobi20230801-1.36.5}/setup.cfg +0 -0
|
@@ -1,3 +1,17 @@
|
|
|
1
|
+
2025-11-11 Version: 1.36.4
|
|
2
|
+
- Update API AddAuditTerms: add request parameters TermsName.
|
|
3
|
+
- Update API DeleteAuditNote: add request parameters NoteId.
|
|
4
|
+
- Update API DownloadAuditNote: add request parameters NoteId.
|
|
5
|
+
- Update API GetAvailableAuditNotes: add request parameters NoteId.
|
|
6
|
+
- Update API ListAuditTerms: add request parameters TermsName.
|
|
7
|
+
- Update API ListAuditTerms: add response parameters Body.Data.$.TermsName.
|
|
8
|
+
- Update API SubmitAuditNote: add request parameters NoteId.
|
|
9
|
+
- Update API SubmitExportTermsTask: add request parameters TermsName.
|
|
10
|
+
- Update API SubmitImportTermsTask: add request parameters TermsName.
|
|
11
|
+
- Update API SubmitSmartAudit: add request parameters NoteId.
|
|
12
|
+
- Update API SubmitSmartAudit: add request parameters TermsName.
|
|
13
|
+
|
|
14
|
+
|
|
1
15
|
2025-10-11 Version: 1.36.3
|
|
2
16
|
- Update API RunWritingV2: add request parameters OutlineList.
|
|
3
17
|
- Update API RunWritingV2: add request parameters SourceTraceMethod.
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
__version__ = '1.36.5'
|
|
@@ -42338,12 +42338,20 @@ class RunSearchGenerationRequestChatConfigSearchParam(TeaModel):
|
|
|
42338
42338
|
self,
|
|
42339
42339
|
end_time: int = None,
|
|
42340
42340
|
multimodal_search_types: List[str] = None,
|
|
42341
|
+
search_audio_min_score: float = None,
|
|
42342
|
+
search_image_min_score: float = None,
|
|
42341
42343
|
search_sources: List[RunSearchGenerationRequestChatConfigSearchParamSearchSources] = None,
|
|
42344
|
+
search_text_min_score: float = None,
|
|
42345
|
+
search_video_min_score: float = None,
|
|
42342
42346
|
start_time: int = None,
|
|
42343
42347
|
):
|
|
42344
42348
|
self.end_time = end_time
|
|
42345
42349
|
self.multimodal_search_types = multimodal_search_types
|
|
42350
|
+
self.search_audio_min_score = search_audio_min_score
|
|
42351
|
+
self.search_image_min_score = search_image_min_score
|
|
42346
42352
|
self.search_sources = search_sources
|
|
42353
|
+
self.search_text_min_score = search_text_min_score
|
|
42354
|
+
self.search_video_min_score = search_video_min_score
|
|
42347
42355
|
self.start_time = start_time
|
|
42348
42356
|
|
|
42349
42357
|
def validate(self):
|
|
@@ -42362,10 +42370,18 @@ class RunSearchGenerationRequestChatConfigSearchParam(TeaModel):
|
|
|
42362
42370
|
result['EndTime'] = self.end_time
|
|
42363
42371
|
if self.multimodal_search_types is not None:
|
|
42364
42372
|
result['MultimodalSearchTypes'] = self.multimodal_search_types
|
|
42373
|
+
if self.search_audio_min_score is not None:
|
|
42374
|
+
result['SearchAudioMinScore'] = self.search_audio_min_score
|
|
42375
|
+
if self.search_image_min_score is not None:
|
|
42376
|
+
result['SearchImageMinScore'] = self.search_image_min_score
|
|
42365
42377
|
result['SearchSources'] = []
|
|
42366
42378
|
if self.search_sources is not None:
|
|
42367
42379
|
for k in self.search_sources:
|
|
42368
42380
|
result['SearchSources'].append(k.to_map() if k else None)
|
|
42381
|
+
if self.search_text_min_score is not None:
|
|
42382
|
+
result['SearchTextMinScore'] = self.search_text_min_score
|
|
42383
|
+
if self.search_video_min_score is not None:
|
|
42384
|
+
result['SearchVideoMinScore'] = self.search_video_min_score
|
|
42369
42385
|
if self.start_time is not None:
|
|
42370
42386
|
result['StartTime'] = self.start_time
|
|
42371
42387
|
return result
|
|
@@ -42376,11 +42392,19 @@ class RunSearchGenerationRequestChatConfigSearchParam(TeaModel):
|
|
|
42376
42392
|
self.end_time = m.get('EndTime')
|
|
42377
42393
|
if m.get('MultimodalSearchTypes') is not None:
|
|
42378
42394
|
self.multimodal_search_types = m.get('MultimodalSearchTypes')
|
|
42395
|
+
if m.get('SearchAudioMinScore') is not None:
|
|
42396
|
+
self.search_audio_min_score = m.get('SearchAudioMinScore')
|
|
42397
|
+
if m.get('SearchImageMinScore') is not None:
|
|
42398
|
+
self.search_image_min_score = m.get('SearchImageMinScore')
|
|
42379
42399
|
self.search_sources = []
|
|
42380
42400
|
if m.get('SearchSources') is not None:
|
|
42381
42401
|
for k in m.get('SearchSources'):
|
|
42382
42402
|
temp_model = RunSearchGenerationRequestChatConfigSearchParamSearchSources()
|
|
42383
42403
|
self.search_sources.append(temp_model.from_map(k))
|
|
42404
|
+
if m.get('SearchTextMinScore') is not None:
|
|
42405
|
+
self.search_text_min_score = m.get('SearchTextMinScore')
|
|
42406
|
+
if m.get('SearchVideoMinScore') is not None:
|
|
42407
|
+
self.search_video_min_score = m.get('SearchVideoMinScore')
|
|
42384
42408
|
if m.get('StartTime') is not None:
|
|
42385
42409
|
self.start_time = m.get('StartTime')
|
|
42386
42410
|
return self
|
|
@@ -42393,6 +42417,8 @@ class RunSearchGenerationRequestChatConfig(TeaModel):
|
|
|
42393
42417
|
exclude_generate_options: List[str] = None,
|
|
42394
42418
|
generate_level: str = None,
|
|
42395
42419
|
generate_technology: str = None,
|
|
42420
|
+
model_custom_prompt_template: str = None,
|
|
42421
|
+
model_custom_vl_prompt_template: str = None,
|
|
42396
42422
|
search_models: List[str] = None,
|
|
42397
42423
|
search_param: RunSearchGenerationRequestChatConfigSearchParam = None,
|
|
42398
42424
|
):
|
|
@@ -42400,6 +42426,8 @@ class RunSearchGenerationRequestChatConfig(TeaModel):
|
|
|
42400
42426
|
self.exclude_generate_options = exclude_generate_options
|
|
42401
42427
|
self.generate_level = generate_level
|
|
42402
42428
|
self.generate_technology = generate_technology
|
|
42429
|
+
self.model_custom_prompt_template = model_custom_prompt_template
|
|
42430
|
+
self.model_custom_vl_prompt_template = model_custom_vl_prompt_template
|
|
42403
42431
|
self.search_models = search_models
|
|
42404
42432
|
self.search_param = search_param
|
|
42405
42433
|
|
|
@@ -42421,6 +42449,10 @@ class RunSearchGenerationRequestChatConfig(TeaModel):
|
|
|
42421
42449
|
result['GenerateLevel'] = self.generate_level
|
|
42422
42450
|
if self.generate_technology is not None:
|
|
42423
42451
|
result['GenerateTechnology'] = self.generate_technology
|
|
42452
|
+
if self.model_custom_prompt_template is not None:
|
|
42453
|
+
result['ModelCustomPromptTemplate'] = self.model_custom_prompt_template
|
|
42454
|
+
if self.model_custom_vl_prompt_template is not None:
|
|
42455
|
+
result['ModelCustomVlPromptTemplate'] = self.model_custom_vl_prompt_template
|
|
42424
42456
|
if self.search_models is not None:
|
|
42425
42457
|
result['SearchModels'] = self.search_models
|
|
42426
42458
|
if self.search_param is not None:
|
|
@@ -42437,6 +42469,10 @@ class RunSearchGenerationRequestChatConfig(TeaModel):
|
|
|
42437
42469
|
self.generate_level = m.get('GenerateLevel')
|
|
42438
42470
|
if m.get('GenerateTechnology') is not None:
|
|
42439
42471
|
self.generate_technology = m.get('GenerateTechnology')
|
|
42472
|
+
if m.get('ModelCustomPromptTemplate') is not None:
|
|
42473
|
+
self.model_custom_prompt_template = m.get('ModelCustomPromptTemplate')
|
|
42474
|
+
if m.get('ModelCustomVlPromptTemplate') is not None:
|
|
42475
|
+
self.model_custom_vl_prompt_template = m.get('ModelCustomVlPromptTemplate')
|
|
42440
42476
|
if m.get('SearchModels') is not None:
|
|
42441
42477
|
self.search_models = m.get('SearchModels')
|
|
42442
42478
|
if m.get('SearchParam') is not None:
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
__version__ = '1.36.4'
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|