alibabacloud-aimiaobi20230801 1.26.3__tar.gz → 1.26.5__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {alibabacloud_aimiaobi20230801-1.26.3 → alibabacloud_aimiaobi20230801-1.26.5}/ChangeLog.md +9 -0
- {alibabacloud_aimiaobi20230801-1.26.3 → alibabacloud_aimiaobi20230801-1.26.5}/PKG-INFO +1 -1
- alibabacloud_aimiaobi20230801-1.26.5/alibabacloud_aimiaobi20230801/__init__.py +1 -0
- {alibabacloud_aimiaobi20230801-1.26.3 → alibabacloud_aimiaobi20230801-1.26.5}/alibabacloud_aimiaobi20230801/models.py +107 -0
- {alibabacloud_aimiaobi20230801-1.26.3 → alibabacloud_aimiaobi20230801-1.26.5}/alibabacloud_aimiaobi20230801.egg-info/PKG-INFO +1 -1
- {alibabacloud_aimiaobi20230801-1.26.3 → alibabacloud_aimiaobi20230801-1.26.5}/setup.py +1 -1
- alibabacloud_aimiaobi20230801-1.26.3/alibabacloud_aimiaobi20230801/__init__.py +0 -1
- {alibabacloud_aimiaobi20230801-1.26.3 → alibabacloud_aimiaobi20230801-1.26.5}/LICENSE +0 -0
- {alibabacloud_aimiaobi20230801-1.26.3 → alibabacloud_aimiaobi20230801-1.26.5}/MANIFEST.in +0 -0
- {alibabacloud_aimiaobi20230801-1.26.3 → alibabacloud_aimiaobi20230801-1.26.5}/README-CN.md +0 -0
- {alibabacloud_aimiaobi20230801-1.26.3 → alibabacloud_aimiaobi20230801-1.26.5}/README.md +0 -0
- {alibabacloud_aimiaobi20230801-1.26.3 → alibabacloud_aimiaobi20230801-1.26.5}/alibabacloud_aimiaobi20230801/client.py +0 -0
- {alibabacloud_aimiaobi20230801-1.26.3 → alibabacloud_aimiaobi20230801-1.26.5}/alibabacloud_aimiaobi20230801.egg-info/SOURCES.txt +0 -0
- {alibabacloud_aimiaobi20230801-1.26.3 → alibabacloud_aimiaobi20230801-1.26.5}/alibabacloud_aimiaobi20230801.egg-info/dependency_links.txt +0 -0
- {alibabacloud_aimiaobi20230801-1.26.3 → alibabacloud_aimiaobi20230801-1.26.5}/alibabacloud_aimiaobi20230801.egg-info/requires.txt +0 -0
- {alibabacloud_aimiaobi20230801-1.26.3 → alibabacloud_aimiaobi20230801-1.26.5}/alibabacloud_aimiaobi20230801.egg-info/top_level.txt +0 -0
- {alibabacloud_aimiaobi20230801-1.26.3 → alibabacloud_aimiaobi20230801-1.26.5}/setup.cfg +0 -0
|
@@ -1,3 +1,12 @@
|
|
|
1
|
+
2025-06-05 Version: 1.26.4
|
|
2
|
+
- Update API GetProperties: add response parameters Body.Data.MiaosouConfig.
|
|
3
|
+
- Update API RunSearchGeneration: add request parameters ChatConfig.EnableThinking.
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
2025-05-27 Version: 1.26.3
|
|
7
|
+
- Update API RunDocSummary: add request parameters ModelName.
|
|
8
|
+
|
|
9
|
+
|
|
1
10
|
2025-05-23 Version: 1.26.2
|
|
2
11
|
- Update API GetGeneratedContent: add response parameters Body.Data.IgnoreContentAuditWords.
|
|
3
12
|
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
__version__ = '1.26.5'
|
|
@@ -10125,8 +10125,10 @@ class GetHotTopicBroadcastResponseBodyDataData(TeaModel):
|
|
|
10125
10125
|
locations: List[str] = None,
|
|
10126
10126
|
news: List[GetHotTopicBroadcastResponseBodyDataDataNews] = None,
|
|
10127
10127
|
output_token: int = None,
|
|
10128
|
+
pub_time: str = None,
|
|
10128
10129
|
summary: GetHotTopicBroadcastResponseBodyDataDataSummary = None,
|
|
10129
10130
|
text_summary: str = None,
|
|
10131
|
+
url: str = None,
|
|
10130
10132
|
):
|
|
10131
10133
|
self.category = category
|
|
10132
10134
|
self.create_time = create_time
|
|
@@ -10141,8 +10143,10 @@ class GetHotTopicBroadcastResponseBodyDataData(TeaModel):
|
|
|
10141
10143
|
self.locations = locations
|
|
10142
10144
|
self.news = news
|
|
10143
10145
|
self.output_token = output_token
|
|
10146
|
+
self.pub_time = pub_time
|
|
10144
10147
|
self.summary = summary
|
|
10145
10148
|
self.text_summary = text_summary
|
|
10149
|
+
self.url = url
|
|
10146
10150
|
|
|
10147
10151
|
def validate(self):
|
|
10148
10152
|
if self.images:
|
|
@@ -10192,10 +10196,14 @@ class GetHotTopicBroadcastResponseBodyDataData(TeaModel):
|
|
|
10192
10196
|
result['News'].append(k.to_map() if k else None)
|
|
10193
10197
|
if self.output_token is not None:
|
|
10194
10198
|
result['OutputToken'] = self.output_token
|
|
10199
|
+
if self.pub_time is not None:
|
|
10200
|
+
result['PubTime'] = self.pub_time
|
|
10195
10201
|
if self.summary is not None:
|
|
10196
10202
|
result['Summary'] = self.summary.to_map()
|
|
10197
10203
|
if self.text_summary is not None:
|
|
10198
10204
|
result['TextSummary'] = self.text_summary
|
|
10205
|
+
if self.url is not None:
|
|
10206
|
+
result['Url'] = self.url
|
|
10199
10207
|
return result
|
|
10200
10208
|
|
|
10201
10209
|
def from_map(self, m: dict = None):
|
|
@@ -10232,11 +10240,15 @@ class GetHotTopicBroadcastResponseBodyDataData(TeaModel):
|
|
|
10232
10240
|
self.news.append(temp_model.from_map(k))
|
|
10233
10241
|
if m.get('OutputToken') is not None:
|
|
10234
10242
|
self.output_token = m.get('OutputToken')
|
|
10243
|
+
if m.get('PubTime') is not None:
|
|
10244
|
+
self.pub_time = m.get('PubTime')
|
|
10235
10245
|
if m.get('Summary') is not None:
|
|
10236
10246
|
temp_model = GetHotTopicBroadcastResponseBodyDataDataSummary()
|
|
10237
10247
|
self.summary = temp_model.from_map(m['Summary'])
|
|
10238
10248
|
if m.get('TextSummary') is not None:
|
|
10239
10249
|
self.text_summary = m.get('TextSummary')
|
|
10250
|
+
if m.get('Url') is not None:
|
|
10251
|
+
self.url = m.get('Url')
|
|
10240
10252
|
return self
|
|
10241
10253
|
|
|
10242
10254
|
|
|
@@ -11905,6 +11917,86 @@ class GetPropertiesResponseBodyDataIntelligentSearchConfig(TeaModel):
|
|
|
11905
11917
|
return self
|
|
11906
11918
|
|
|
11907
11919
|
|
|
11920
|
+
class GetPropertiesResponseBodyDataMiaosouConfigModelInfos(TeaModel):
|
|
11921
|
+
def __init__(
|
|
11922
|
+
self,
|
|
11923
|
+
model_id: str = None,
|
|
11924
|
+
model_name: str = None,
|
|
11925
|
+
):
|
|
11926
|
+
self.model_id = model_id
|
|
11927
|
+
self.model_name = model_name
|
|
11928
|
+
|
|
11929
|
+
def validate(self):
|
|
11930
|
+
pass
|
|
11931
|
+
|
|
11932
|
+
def to_map(self):
|
|
11933
|
+
_map = super().to_map()
|
|
11934
|
+
if _map is not None:
|
|
11935
|
+
return _map
|
|
11936
|
+
|
|
11937
|
+
result = dict()
|
|
11938
|
+
if self.model_id is not None:
|
|
11939
|
+
result['ModelId'] = self.model_id
|
|
11940
|
+
if self.model_name is not None:
|
|
11941
|
+
result['ModelName'] = self.model_name
|
|
11942
|
+
return result
|
|
11943
|
+
|
|
11944
|
+
def from_map(self, m: dict = None):
|
|
11945
|
+
m = m or dict()
|
|
11946
|
+
if m.get('ModelId') is not None:
|
|
11947
|
+
self.model_id = m.get('ModelId')
|
|
11948
|
+
if m.get('ModelName') is not None:
|
|
11949
|
+
self.model_name = m.get('ModelName')
|
|
11950
|
+
return self
|
|
11951
|
+
|
|
11952
|
+
|
|
11953
|
+
class GetPropertiesResponseBodyDataMiaosouConfig(TeaModel):
|
|
11954
|
+
def __init__(
|
|
11955
|
+
self,
|
|
11956
|
+
max_doc_size: int = None,
|
|
11957
|
+
model_infos: List[GetPropertiesResponseBodyDataMiaosouConfigModelInfos] = None,
|
|
11958
|
+
use_doc_size: int = None,
|
|
11959
|
+
):
|
|
11960
|
+
self.max_doc_size = max_doc_size
|
|
11961
|
+
self.model_infos = model_infos
|
|
11962
|
+
self.use_doc_size = use_doc_size
|
|
11963
|
+
|
|
11964
|
+
def validate(self):
|
|
11965
|
+
if self.model_infos:
|
|
11966
|
+
for k in self.model_infos:
|
|
11967
|
+
if k:
|
|
11968
|
+
k.validate()
|
|
11969
|
+
|
|
11970
|
+
def to_map(self):
|
|
11971
|
+
_map = super().to_map()
|
|
11972
|
+
if _map is not None:
|
|
11973
|
+
return _map
|
|
11974
|
+
|
|
11975
|
+
result = dict()
|
|
11976
|
+
if self.max_doc_size is not None:
|
|
11977
|
+
result['MaxDocSize'] = self.max_doc_size
|
|
11978
|
+
result['ModelInfos'] = []
|
|
11979
|
+
if self.model_infos is not None:
|
|
11980
|
+
for k in self.model_infos:
|
|
11981
|
+
result['ModelInfos'].append(k.to_map() if k else None)
|
|
11982
|
+
if self.use_doc_size is not None:
|
|
11983
|
+
result['UseDocSize'] = self.use_doc_size
|
|
11984
|
+
return result
|
|
11985
|
+
|
|
11986
|
+
def from_map(self, m: dict = None):
|
|
11987
|
+
m = m or dict()
|
|
11988
|
+
if m.get('MaxDocSize') is not None:
|
|
11989
|
+
self.max_doc_size = m.get('MaxDocSize')
|
|
11990
|
+
self.model_infos = []
|
|
11991
|
+
if m.get('ModelInfos') is not None:
|
|
11992
|
+
for k in m.get('ModelInfos'):
|
|
11993
|
+
temp_model = GetPropertiesResponseBodyDataMiaosouConfigModelInfos()
|
|
11994
|
+
self.model_infos.append(temp_model.from_map(k))
|
|
11995
|
+
if m.get('UseDocSize') is not None:
|
|
11996
|
+
self.use_doc_size = m.get('UseDocSize')
|
|
11997
|
+
return self
|
|
11998
|
+
|
|
11999
|
+
|
|
11908
12000
|
class GetPropertiesResponseBodyDataSearchSourceList(TeaModel):
|
|
11909
12001
|
def __init__(
|
|
11910
12002
|
self,
|
|
@@ -12101,6 +12193,7 @@ class GetPropertiesResponseBodyData(TeaModel):
|
|
|
12101
12193
|
console_config: GetPropertiesResponseBodyDataConsoleConfig = None,
|
|
12102
12194
|
general_config_map: Dict[str, Any] = None,
|
|
12103
12195
|
intelligent_search_config: GetPropertiesResponseBodyDataIntelligentSearchConfig = None,
|
|
12196
|
+
miaosou_config: GetPropertiesResponseBodyDataMiaosouConfig = None,
|
|
12104
12197
|
search_source_list: List[GetPropertiesResponseBodyDataSearchSourceList] = None,
|
|
12105
12198
|
search_sources: List[GetPropertiesResponseBodyDataSearchSources] = None,
|
|
12106
12199
|
slr_authorized: bool = None,
|
|
@@ -12112,6 +12205,7 @@ class GetPropertiesResponseBodyData(TeaModel):
|
|
|
12112
12205
|
self.console_config = console_config
|
|
12113
12206
|
self.general_config_map = general_config_map
|
|
12114
12207
|
self.intelligent_search_config = intelligent_search_config
|
|
12208
|
+
self.miaosou_config = miaosou_config
|
|
12115
12209
|
self.search_source_list = search_source_list
|
|
12116
12210
|
self.search_sources = search_sources
|
|
12117
12211
|
self.slr_authorized = slr_authorized
|
|
@@ -12124,6 +12218,8 @@ class GetPropertiesResponseBodyData(TeaModel):
|
|
|
12124
12218
|
self.console_config.validate()
|
|
12125
12219
|
if self.intelligent_search_config:
|
|
12126
12220
|
self.intelligent_search_config.validate()
|
|
12221
|
+
if self.miaosou_config:
|
|
12222
|
+
self.miaosou_config.validate()
|
|
12127
12223
|
if self.search_source_list:
|
|
12128
12224
|
for k in self.search_source_list:
|
|
12129
12225
|
if k:
|
|
@@ -12157,6 +12253,8 @@ class GetPropertiesResponseBodyData(TeaModel):
|
|
|
12157
12253
|
result['GeneralConfigMap'] = self.general_config_map
|
|
12158
12254
|
if self.intelligent_search_config is not None:
|
|
12159
12255
|
result['IntelligentSearchConfig'] = self.intelligent_search_config.to_map()
|
|
12256
|
+
if self.miaosou_config is not None:
|
|
12257
|
+
result['MiaosouConfig'] = self.miaosou_config.to_map()
|
|
12160
12258
|
result['SearchSourceList'] = []
|
|
12161
12259
|
if self.search_source_list is not None:
|
|
12162
12260
|
for k in self.search_source_list:
|
|
@@ -12191,6 +12289,9 @@ class GetPropertiesResponseBodyData(TeaModel):
|
|
|
12191
12289
|
if m.get('IntelligentSearchConfig') is not None:
|
|
12192
12290
|
temp_model = GetPropertiesResponseBodyDataIntelligentSearchConfig()
|
|
12193
12291
|
self.intelligent_search_config = temp_model.from_map(m['IntelligentSearchConfig'])
|
|
12292
|
+
if m.get('MiaosouConfig') is not None:
|
|
12293
|
+
temp_model = GetPropertiesResponseBodyDataMiaosouConfig()
|
|
12294
|
+
self.miaosou_config = temp_model.from_map(m['MiaosouConfig'])
|
|
12194
12295
|
self.search_source_list = []
|
|
12195
12296
|
if m.get('SearchSourceList') is not None:
|
|
12196
12297
|
for k in m.get('SearchSourceList'):
|
|
@@ -33905,11 +34006,13 @@ class RunSearchGenerationRequestChatConfigSearchParam(TeaModel):
|
|
|
33905
34006
|
class RunSearchGenerationRequestChatConfig(TeaModel):
|
|
33906
34007
|
def __init__(
|
|
33907
34008
|
self,
|
|
34009
|
+
enable_thinking: bool = None,
|
|
33908
34010
|
generate_level: str = None,
|
|
33909
34011
|
generate_technology: str = None,
|
|
33910
34012
|
search_models: List[str] = None,
|
|
33911
34013
|
search_param: RunSearchGenerationRequestChatConfigSearchParam = None,
|
|
33912
34014
|
):
|
|
34015
|
+
self.enable_thinking = enable_thinking
|
|
33913
34016
|
self.generate_level = generate_level
|
|
33914
34017
|
self.generate_technology = generate_technology
|
|
33915
34018
|
self.search_models = search_models
|
|
@@ -33925,6 +34028,8 @@ class RunSearchGenerationRequestChatConfig(TeaModel):
|
|
|
33925
34028
|
return _map
|
|
33926
34029
|
|
|
33927
34030
|
result = dict()
|
|
34031
|
+
if self.enable_thinking is not None:
|
|
34032
|
+
result['EnableThinking'] = self.enable_thinking
|
|
33928
34033
|
if self.generate_level is not None:
|
|
33929
34034
|
result['GenerateLevel'] = self.generate_level
|
|
33930
34035
|
if self.generate_technology is not None:
|
|
@@ -33937,6 +34042,8 @@ class RunSearchGenerationRequestChatConfig(TeaModel):
|
|
|
33937
34042
|
|
|
33938
34043
|
def from_map(self, m: dict = None):
|
|
33939
34044
|
m = m or dict()
|
|
34045
|
+
if m.get('EnableThinking') is not None:
|
|
34046
|
+
self.enable_thinking = m.get('EnableThinking')
|
|
33940
34047
|
if m.get('GenerateLevel') is not None:
|
|
33941
34048
|
self.generate_level = m.get('GenerateLevel')
|
|
33942
34049
|
if m.get('GenerateTechnology') is not None:
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
__version__ = '1.26.3'
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|