alibabacloud-iqs20241111 1.5.2__tar.gz → 1.6.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (17) hide show
  1. {alibabacloud_iqs20241111-1.5.2 → alibabacloud_iqs20241111-1.6.1}/ChangeLog.md +8 -0
  2. {alibabacloud_iqs20241111-1.5.2 → alibabacloud_iqs20241111-1.6.1}/PKG-INFO +1 -1
  3. alibabacloud_iqs20241111-1.6.1/alibabacloud_iqs20241111/__init__.py +1 -0
  4. {alibabacloud_iqs20241111-1.5.2 → alibabacloud_iqs20241111-1.6.1}/alibabacloud_iqs20241111/client.py +116 -6
  5. {alibabacloud_iqs20241111-1.5.2 → alibabacloud_iqs20241111-1.6.1}/alibabacloud_iqs20241111/models.py +341 -0
  6. {alibabacloud_iqs20241111-1.5.2 → alibabacloud_iqs20241111-1.6.1}/alibabacloud_iqs20241111.egg-info/PKG-INFO +1 -1
  7. {alibabacloud_iqs20241111-1.5.2 → alibabacloud_iqs20241111-1.6.1}/setup.py +1 -1
  8. alibabacloud_iqs20241111-1.5.2/alibabacloud_iqs20241111/__init__.py +0 -1
  9. {alibabacloud_iqs20241111-1.5.2 → alibabacloud_iqs20241111-1.6.1}/LICENSE +0 -0
  10. {alibabacloud_iqs20241111-1.5.2 → alibabacloud_iqs20241111-1.6.1}/MANIFEST.in +0 -0
  11. {alibabacloud_iqs20241111-1.5.2 → alibabacloud_iqs20241111-1.6.1}/README-CN.md +0 -0
  12. {alibabacloud_iqs20241111-1.5.2 → alibabacloud_iqs20241111-1.6.1}/README.md +0 -0
  13. {alibabacloud_iqs20241111-1.5.2 → alibabacloud_iqs20241111-1.6.1}/alibabacloud_iqs20241111.egg-info/SOURCES.txt +0 -0
  14. {alibabacloud_iqs20241111-1.5.2 → alibabacloud_iqs20241111-1.6.1}/alibabacloud_iqs20241111.egg-info/dependency_links.txt +0 -0
  15. {alibabacloud_iqs20241111-1.5.2 → alibabacloud_iqs20241111-1.6.1}/alibabacloud_iqs20241111.egg-info/requires.txt +0 -0
  16. {alibabacloud_iqs20241111-1.5.2 → alibabacloud_iqs20241111-1.6.1}/alibabacloud_iqs20241111.egg-info/top_level.txt +0 -0
  17. {alibabacloud_iqs20241111-1.5.2 → alibabacloud_iqs20241111-1.6.1}/setup.cfg +0 -0
@@ -1,3 +1,11 @@
1
+ 2025-10-29 Version: 1.6.0
2
+ - Support API ReadPageScrape.
3
+ - Update API GenericSearch: add request parameters advancedParams.
4
+
5
+
6
+ 2025-10-10 Version: 1.5.2
7
+ - Generated python 2024-11-11 for IQS.
8
+
1
9
  2025-10-10 Version: 1.5.1
2
10
  - Generated python 2024-11-11 for IQS.
3
11
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: alibabacloud_iqs20241111
3
- Version: 1.5.2
3
+ Version: 1.6.1
4
4
  Summary: Alibaba Cloud IQS (20241111) SDK Library for Python
5
5
  Home-page: https://github.com/aliyun/alibabacloud-python-sdk
6
6
  Author: Alibaba Cloud SDK
@@ -0,0 +1 @@
1
+ __version__ = '1.6.1'
@@ -279,20 +279,26 @@ class Client(OpenApiClient):
279
279
 
280
280
  def generic_search_with_options(
281
281
  self,
282
- request: iqs20241111_models.GenericSearchRequest,
282
+ tmp_req: iqs20241111_models.GenericSearchRequest,
283
283
  headers: Dict[str, str],
284
284
  runtime: util_models.RuntimeOptions,
285
285
  ) -> iqs20241111_models.GenericSearchResponse:
286
286
  """
287
287
  @summary 通用搜索
288
288
 
289
- @param request: GenericSearchRequest
289
+ @param tmp_req: GenericSearchRequest
290
290
  @param headers: map
291
291
  @param runtime: runtime options for this request RuntimeOptions
292
292
  @return: GenericSearchResponse
293
293
  """
294
- UtilClient.validate_model(request)
294
+ UtilClient.validate_model(tmp_req)
295
+ request = iqs20241111_models.GenericSearchShrinkRequest()
296
+ OpenApiUtilClient.convert(tmp_req, request)
297
+ if not UtilClient.is_unset(tmp_req.advanced_params):
298
+ request.advanced_params_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.advanced_params, 'advancedParams', 'json')
295
299
  query = {}
300
+ if not UtilClient.is_unset(request.advanced_params_shrink):
301
+ query['advancedParams'] = request.advanced_params_shrink
296
302
  if not UtilClient.is_unset(request.enable_rerank):
297
303
  query['enableRerank'] = request.enable_rerank
298
304
  if not UtilClient.is_unset(request.industry):
@@ -335,20 +341,26 @@ class Client(OpenApiClient):
335
341
 
336
342
  async def generic_search_with_options_async(
337
343
  self,
338
- request: iqs20241111_models.GenericSearchRequest,
344
+ tmp_req: iqs20241111_models.GenericSearchRequest,
339
345
  headers: Dict[str, str],
340
346
  runtime: util_models.RuntimeOptions,
341
347
  ) -> iqs20241111_models.GenericSearchResponse:
342
348
  """
343
349
  @summary 通用搜索
344
350
 
345
- @param request: GenericSearchRequest
351
+ @param tmp_req: GenericSearchRequest
346
352
  @param headers: map
347
353
  @param runtime: runtime options for this request RuntimeOptions
348
354
  @return: GenericSearchResponse
349
355
  """
350
- UtilClient.validate_model(request)
356
+ UtilClient.validate_model(tmp_req)
357
+ request = iqs20241111_models.GenericSearchShrinkRequest()
358
+ OpenApiUtilClient.convert(tmp_req, request)
359
+ if not UtilClient.is_unset(tmp_req.advanced_params):
360
+ request.advanced_params_shrink = OpenApiUtilClient.array_to_string_with_specified_style(tmp_req.advanced_params, 'advancedParams', 'json')
351
361
  query = {}
362
+ if not UtilClient.is_unset(request.advanced_params_shrink):
363
+ query['advancedParams'] = request.advanced_params_shrink
352
364
  if not UtilClient.is_unset(request.enable_rerank):
353
365
  query['enableRerank'] = request.enable_rerank
354
366
  if not UtilClient.is_unset(request.industry):
@@ -739,6 +751,104 @@ class Client(OpenApiClient):
739
751
  headers = {}
740
752
  return await self.read_page_basic_with_options_async(request, headers, runtime)
741
753
 
754
+ def read_page_scrape_with_options(
755
+ self,
756
+ request: iqs20241111_models.ReadPageScrapeRequest,
757
+ headers: Dict[str, str],
758
+ runtime: util_models.RuntimeOptions,
759
+ ) -> iqs20241111_models.ReadPageScrapeResponse:
760
+ """
761
+ @summary 动态页面解析
762
+
763
+ @param request: ReadPageScrapeRequest
764
+ @param headers: map
765
+ @param runtime: runtime options for this request RuntimeOptions
766
+ @return: ReadPageScrapeResponse
767
+ """
768
+ UtilClient.validate_model(request)
769
+ req = open_api_models.OpenApiRequest(
770
+ headers=headers,
771
+ body=OpenApiUtilClient.parse_to_map(request.body)
772
+ )
773
+ params = open_api_models.Params(
774
+ action='ReadPageScrape',
775
+ version='2024-11-11',
776
+ protocol='HTTPS',
777
+ pathname=f'/linked-retrieval/linked-retrieval-entry/v1/iqs/readpage/scrape',
778
+ method='POST',
779
+ auth_type='AK',
780
+ style='ROA',
781
+ req_body_type='json',
782
+ body_type='json'
783
+ )
784
+ return TeaCore.from_map(
785
+ iqs20241111_models.ReadPageScrapeResponse(),
786
+ self.call_api(params, req, runtime)
787
+ )
788
+
789
+ async def read_page_scrape_with_options_async(
790
+ self,
791
+ request: iqs20241111_models.ReadPageScrapeRequest,
792
+ headers: Dict[str, str],
793
+ runtime: util_models.RuntimeOptions,
794
+ ) -> iqs20241111_models.ReadPageScrapeResponse:
795
+ """
796
+ @summary 动态页面解析
797
+
798
+ @param request: ReadPageScrapeRequest
799
+ @param headers: map
800
+ @param runtime: runtime options for this request RuntimeOptions
801
+ @return: ReadPageScrapeResponse
802
+ """
803
+ UtilClient.validate_model(request)
804
+ req = open_api_models.OpenApiRequest(
805
+ headers=headers,
806
+ body=OpenApiUtilClient.parse_to_map(request.body)
807
+ )
808
+ params = open_api_models.Params(
809
+ action='ReadPageScrape',
810
+ version='2024-11-11',
811
+ protocol='HTTPS',
812
+ pathname=f'/linked-retrieval/linked-retrieval-entry/v1/iqs/readpage/scrape',
813
+ method='POST',
814
+ auth_type='AK',
815
+ style='ROA',
816
+ req_body_type='json',
817
+ body_type='json'
818
+ )
819
+ return TeaCore.from_map(
820
+ iqs20241111_models.ReadPageScrapeResponse(),
821
+ await self.call_api_async(params, req, runtime)
822
+ )
823
+
824
+ def read_page_scrape(
825
+ self,
826
+ request: iqs20241111_models.ReadPageScrapeRequest,
827
+ ) -> iqs20241111_models.ReadPageScrapeResponse:
828
+ """
829
+ @summary 动态页面解析
830
+
831
+ @param request: ReadPageScrapeRequest
832
+ @return: ReadPageScrapeResponse
833
+ """
834
+ runtime = util_models.RuntimeOptions()
835
+ headers = {}
836
+ return self.read_page_scrape_with_options(request, headers, runtime)
837
+
838
+ async def read_page_scrape_async(
839
+ self,
840
+ request: iqs20241111_models.ReadPageScrapeRequest,
841
+ ) -> iqs20241111_models.ReadPageScrapeResponse:
842
+ """
843
+ @summary 动态页面解析
844
+
845
+ @param request: ReadPageScrapeRequest
846
+ @return: ReadPageScrapeResponse
847
+ """
848
+ runtime = util_models.RuntimeOptions()
849
+ headers = {}
850
+ return await self.read_page_scrape_with_options_async(request, headers, runtime)
851
+
742
852
  def unified_search_with_options(
743
853
  self,
744
854
  request: iqs20241111_models.UnifiedSearchRequest,
@@ -135,6 +135,7 @@ class ScorePageItem(TeaModel):
135
135
  snippet: str = None,
136
136
  summary: str = None,
137
137
  title: str = None,
138
+ website_authority_score: int = None,
138
139
  ):
139
140
  # This parameter is required.
140
141
  self.card_type = card_type
@@ -163,6 +164,7 @@ class ScorePageItem(TeaModel):
163
164
  self.summary = summary
164
165
  # This parameter is required.
165
166
  self.title = title
167
+ self.website_authority_score = website_authority_score
166
168
 
167
169
  def validate(self):
168
170
  if self.images:
@@ -218,6 +220,8 @@ class ScorePageItem(TeaModel):
218
220
  result['summary'] = self.summary
219
221
  if self.title is not None:
220
222
  result['title'] = self.title
223
+ if self.website_authority_score is not None:
224
+ result['websiteAuthorityScore'] = self.website_authority_score
221
225
  return result
222
226
 
223
227
  def from_map(self, m: dict = None):
@@ -265,6 +269,8 @@ class ScorePageItem(TeaModel):
265
269
  self.summary = m.get('summary')
266
270
  if m.get('title') is not None:
267
271
  self.title = m.get('title')
272
+ if m.get('websiteAuthorityScore') is not None:
273
+ self.website_authority_score = m.get('websiteAuthorityScore')
268
274
  return self
269
275
 
270
276
 
@@ -1101,6 +1107,7 @@ class ReadPageItem(TeaModel):
1101
1107
  html: str = None,
1102
1108
  markdown: str = None,
1103
1109
  raw_html: str = None,
1110
+ screenshot: str = None,
1104
1111
  status_code: int = None,
1105
1112
  text: str = None,
1106
1113
  ):
@@ -1108,6 +1115,7 @@ class ReadPageItem(TeaModel):
1108
1115
  self.html = html
1109
1116
  self.markdown = markdown
1110
1117
  self.raw_html = raw_html
1118
+ self.screenshot = screenshot
1111
1119
  self.status_code = status_code
1112
1120
  self.text = text
1113
1121
 
@@ -1128,6 +1136,8 @@ class ReadPageItem(TeaModel):
1128
1136
  result['markdown'] = self.markdown
1129
1137
  if self.raw_html is not None:
1130
1138
  result['rawHtml'] = self.raw_html
1139
+ if self.screenshot is not None:
1140
+ result['screenshot'] = self.screenshot
1131
1141
  if self.status_code is not None:
1132
1142
  result['statusCode'] = self.status_code
1133
1143
  if self.text is not None:
@@ -1144,6 +1154,8 @@ class ReadPageItem(TeaModel):
1144
1154
  self.markdown = m.get('markdown')
1145
1155
  if m.get('rawHtml') is not None:
1146
1156
  self.raw_html = m.get('rawHtml')
1157
+ if m.get('screenshot') is not None:
1158
+ self.screenshot = m.get('screenshot')
1147
1159
  if m.get('statusCode') is not None:
1148
1160
  self.status_code = m.get('statusCode')
1149
1161
  if m.get('text') is not None:
@@ -1151,6 +1163,117 @@ class ReadPageItem(TeaModel):
1151
1163
  return self
1152
1164
 
1153
1165
 
1166
+ class ReadPageScrapeBodyReadability(TeaModel):
1167
+ def __init__(
1168
+ self,
1169
+ exclude_all_images: bool = None,
1170
+ exclude_all_links: bool = None,
1171
+ excluded_tags: List[str] = None,
1172
+ readability_mode: str = None,
1173
+ ):
1174
+ self.exclude_all_images = exclude_all_images
1175
+ self.exclude_all_links = exclude_all_links
1176
+ self.excluded_tags = excluded_tags
1177
+ self.readability_mode = readability_mode
1178
+
1179
+ def validate(self):
1180
+ pass
1181
+
1182
+ def to_map(self):
1183
+ _map = super().to_map()
1184
+ if _map is not None:
1185
+ return _map
1186
+
1187
+ result = dict()
1188
+ if self.exclude_all_images is not None:
1189
+ result['excludeAllImages'] = self.exclude_all_images
1190
+ if self.exclude_all_links is not None:
1191
+ result['excludeAllLinks'] = self.exclude_all_links
1192
+ if self.excluded_tags is not None:
1193
+ result['excludedTags'] = self.excluded_tags
1194
+ if self.readability_mode is not None:
1195
+ result['readabilityMode'] = self.readability_mode
1196
+ return result
1197
+
1198
+ def from_map(self, m: dict = None):
1199
+ m = m or dict()
1200
+ if m.get('excludeAllImages') is not None:
1201
+ self.exclude_all_images = m.get('excludeAllImages')
1202
+ if m.get('excludeAllLinks') is not None:
1203
+ self.exclude_all_links = m.get('excludeAllLinks')
1204
+ if m.get('excludedTags') is not None:
1205
+ self.excluded_tags = m.get('excludedTags')
1206
+ if m.get('readabilityMode') is not None:
1207
+ self.readability_mode = m.get('readabilityMode')
1208
+ return self
1209
+
1210
+
1211
+ class ReadPageScrapeBody(TeaModel):
1212
+ def __init__(
1213
+ self,
1214
+ formats: List[str] = None,
1215
+ location: str = None,
1216
+ max_age: int = None,
1217
+ page_timeout: int = None,
1218
+ readability: ReadPageScrapeBodyReadability = None,
1219
+ timeout: int = None,
1220
+ url: str = None,
1221
+ ):
1222
+ self.formats = formats
1223
+ self.location = location
1224
+ self.max_age = max_age
1225
+ self.page_timeout = page_timeout
1226
+ self.readability = readability
1227
+ self.timeout = timeout
1228
+ # This parameter is required.
1229
+ self.url = url
1230
+
1231
+ def validate(self):
1232
+ if self.readability:
1233
+ self.readability.validate()
1234
+
1235
+ def to_map(self):
1236
+ _map = super().to_map()
1237
+ if _map is not None:
1238
+ return _map
1239
+
1240
+ result = dict()
1241
+ if self.formats is not None:
1242
+ result['formats'] = self.formats
1243
+ if self.location is not None:
1244
+ result['location'] = self.location
1245
+ if self.max_age is not None:
1246
+ result['maxAge'] = self.max_age
1247
+ if self.page_timeout is not None:
1248
+ result['pageTimeout'] = self.page_timeout
1249
+ if self.readability is not None:
1250
+ result['readability'] = self.readability.to_map()
1251
+ if self.timeout is not None:
1252
+ result['timeout'] = self.timeout
1253
+ if self.url is not None:
1254
+ result['url'] = self.url
1255
+ return result
1256
+
1257
+ def from_map(self, m: dict = None):
1258
+ m = m or dict()
1259
+ if m.get('formats') is not None:
1260
+ self.formats = m.get('formats')
1261
+ if m.get('location') is not None:
1262
+ self.location = m.get('location')
1263
+ if m.get('maxAge') is not None:
1264
+ self.max_age = m.get('maxAge')
1265
+ if m.get('pageTimeout') is not None:
1266
+ self.page_timeout = m.get('pageTimeout')
1267
+ if m.get('readability') is not None:
1268
+ temp_model = ReadPageScrapeBodyReadability()
1269
+ self.readability = temp_model.from_map(m['readability'])
1270
+ if m.get('timeout') is not None:
1271
+ self.timeout = m.get('timeout')
1272
+ if m.get('url') is not None:
1273
+ self.url = m.get('url')
1274
+ return self
1275
+
1276
+
1154
1277
  class RequestContents(TeaModel):
1155
1278
  def __init__(
1156
1279
  self,
@@ -1355,6 +1478,7 @@ class UnifiedPageItem(TeaModel):
1355
1478
  snippet: str = None,
1356
1479
  summary: str = None,
1357
1480
  title: str = None,
1481
+ website_authority_score: int = None,
1358
1482
  ):
1359
1483
  self.host_authority_score = host_authority_score
1360
1484
  self.host_logo = host_logo
@@ -1369,6 +1493,7 @@ class UnifiedPageItem(TeaModel):
1369
1493
  self.snippet = snippet
1370
1494
  self.summary = summary
1371
1495
  self.title = title
1496
+ self.website_authority_score = website_authority_score
1372
1497
 
1373
1498
  def validate(self):
1374
1499
  pass
@@ -1405,6 +1530,8 @@ class UnifiedPageItem(TeaModel):
1405
1530
  result['summary'] = self.summary
1406
1531
  if self.title is not None:
1407
1532
  result['title'] = self.title
1533
+ if self.website_authority_score is not None:
1534
+ result['websiteAuthorityScore'] = self.website_authority_score
1408
1535
  return result
1409
1536
 
1410
1537
  def from_map(self, m: dict = None):
@@ -1435,6 +1562,8 @@ class UnifiedPageItem(TeaModel):
1435
1562
  self.summary = m.get('summary')
1436
1563
  if m.get('title') is not None:
1437
1564
  self.title = m.get('title')
1565
+ if m.get('websiteAuthorityScore') is not None:
1566
+ self.website_authority_score = m.get('websiteAuthorityScore')
1438
1567
  return self
1439
1568
 
1440
1569
 
@@ -2111,6 +2240,95 @@ class GenericAdvancedSearchResponse(TeaModel):
2111
2240
  class GenericSearchRequest(TeaModel):
2112
2241
  def __init__(
2113
2242
  self,
2243
+ advanced_params: Dict[str, Any] = None,
2244
+ enable_rerank: bool = None,
2245
+ industry: str = None,
2246
+ page: int = None,
2247
+ query: str = None,
2248
+ return_main_text: bool = None,
2249
+ return_markdown_text: bool = None,
2250
+ return_rich_main_body: bool = None,
2251
+ return_summary: bool = None,
2252
+ session_id: str = None,
2253
+ time_range: str = None,
2254
+ ):
2255
+ self.advanced_params = advanced_params
2256
+ self.enable_rerank = enable_rerank
2257
+ self.industry = industry
2258
+ self.page = page
2259
+ # This parameter is required.
2260
+ self.query = query
2261
+ self.return_main_text = return_main_text
2262
+ self.return_markdown_text = return_markdown_text
2263
+ self.return_rich_main_body = return_rich_main_body
2264
+ self.return_summary = return_summary
2265
+ self.session_id = session_id
2266
+ self.time_range = time_range
2267
+
2268
+ def validate(self):
2269
+ pass
2270
+
2271
+ def to_map(self):
2272
+ _map = super().to_map()
2273
+ if _map is not None:
2274
+ return _map
2275
+
2276
+ result = dict()
2277
+ if self.advanced_params is not None:
2278
+ result['advancedParams'] = self.advanced_params
2279
+ if self.enable_rerank is not None:
2280
+ result['enableRerank'] = self.enable_rerank
2281
+ if self.industry is not None:
2282
+ result['industry'] = self.industry
2283
+ if self.page is not None:
2284
+ result['page'] = self.page
2285
+ if self.query is not None:
2286
+ result['query'] = self.query
2287
+ if self.return_main_text is not None:
2288
+ result['returnMainText'] = self.return_main_text
2289
+ if self.return_markdown_text is not None:
2290
+ result['returnMarkdownText'] = self.return_markdown_text
2291
+ if self.return_rich_main_body is not None:
2292
+ result['returnRichMainBody'] = self.return_rich_main_body
2293
+ if self.return_summary is not None:
2294
+ result['returnSummary'] = self.return_summary
2295
+ if self.session_id is not None:
2296
+ result['sessionId'] = self.session_id
2297
+ if self.time_range is not None:
2298
+ result['timeRange'] = self.time_range
2299
+ return result
2300
+
2301
+ def from_map(self, m: dict = None):
2302
+ m = m or dict()
2303
+ if m.get('advancedParams') is not None:
2304
+ self.advanced_params = m.get('advancedParams')
2305
+ if m.get('enableRerank') is not None:
2306
+ self.enable_rerank = m.get('enableRerank')
2307
+ if m.get('industry') is not None:
2308
+ self.industry = m.get('industry')
2309
+ if m.get('page') is not None:
2310
+ self.page = m.get('page')
2311
+ if m.get('query') is not None:
2312
+ self.query = m.get('query')
2313
+ if m.get('returnMainText') is not None:
2314
+ self.return_main_text = m.get('returnMainText')
2315
+ if m.get('returnMarkdownText') is not None:
2316
+ self.return_markdown_text = m.get('returnMarkdownText')
2317
+ if m.get('returnRichMainBody') is not None:
2318
+ self.return_rich_main_body = m.get('returnRichMainBody')
2319
+ if m.get('returnSummary') is not None:
2320
+ self.return_summary = m.get('returnSummary')
2321
+ if m.get('sessionId') is not None:
2322
+ self.session_id = m.get('sessionId')
2323
+ if m.get('timeRange') is not None:
2324
+ self.time_range = m.get('timeRange')
2325
+ return self
2326
+
2327
+
2328
+ class GenericSearchShrinkRequest(TeaModel):
2329
+ def __init__(
2330
+ self,
2331
+ advanced_params_shrink: str = None,
2114
2332
  enable_rerank: bool = None,
2115
2333
  industry: str = None,
2116
2334
  page: int = None,
@@ -2122,6 +2340,7 @@ class GenericSearchRequest(TeaModel):
2122
2340
  session_id: str = None,
2123
2341
  time_range: str = None,
2124
2342
  ):
2343
+ self.advanced_params_shrink = advanced_params_shrink
2125
2344
  self.enable_rerank = enable_rerank
2126
2345
  self.industry = industry
2127
2346
  self.page = page
@@ -2143,6 +2362,8 @@ class GenericSearchRequest(TeaModel):
2143
2362
  return _map
2144
2363
 
2145
2364
  result = dict()
2365
+ if self.advanced_params_shrink is not None:
2366
+ result['advancedParams'] = self.advanced_params_shrink
2146
2367
  if self.enable_rerank is not None:
2147
2368
  result['enableRerank'] = self.enable_rerank
2148
2369
  if self.industry is not None:
@@ -2167,6 +2388,8 @@ class GenericSearchRequest(TeaModel):
2167
2388
 
2168
2389
  def from_map(self, m: dict = None):
2169
2390
  m = m or dict()
2391
+ if m.get('advancedParams') is not None:
2392
+ self.advanced_params_shrink = m.get('advancedParams')
2170
2393
  if m.get('enableRerank') is not None:
2171
2394
  self.enable_rerank = m.get('enableRerank')
2172
2395
  if m.get('industry') is not None:
@@ -2510,6 +2733,124 @@ class ReadPageBasicResponse(TeaModel):
2510
2733
  return self
2511
2734
 
2512
2735
 
2736
+ class ReadPageScrapeRequest(TeaModel):
2737
+ def __init__(
2738
+ self,
2739
+ body: ReadPageScrapeBody = None,
2740
+ ):
2741
+ # post body
2742
+ self.body = body
2743
+
2744
+ def validate(self):
2745
+ if self.body:
2746
+ self.body.validate()
2747
+
2748
+ def to_map(self):
2749
+ _map = super().to_map()
2750
+ if _map is not None:
2751
+ return _map
2752
+
2753
+ result = dict()
2754
+ if self.body is not None:
2755
+ result['body'] = self.body.to_map()
2756
+ return result
2757
+
2758
+ def from_map(self, m: dict = None):
2759
+ m = m or dict()
2760
+ if m.get('body') is not None:
2761
+ temp_model = ReadPageScrapeBody()
2762
+ self.body = temp_model.from_map(m['body'])
2763
+ return self
2764
+
2765
+
2766
+ class ReadPageScrapeResponseBody(TeaModel):
2767
+ def __init__(
2768
+ self,
2769
+ data: ReadPageItem = None,
2770
+ error_code: str = None,
2771
+ error_message: str = None,
2772
+ request_id: str = None,
2773
+ ):
2774
+ self.data = data
2775
+ self.error_code = error_code
2776
+ self.error_message = error_message
2777
+ self.request_id = request_id
2778
+
2779
+ def validate(self):
2780
+ if self.data:
2781
+ self.data.validate()
2782
+
2783
+ def to_map(self):
2784
+ _map = super().to_map()
2785
+ if _map is not None:
2786
+ return _map
2787
+
2788
+ result = dict()
2789
+ if self.data is not None:
2790
+ result['data'] = self.data.to_map()
2791
+ if self.error_code is not None:
2792
+ result['errorCode'] = self.error_code
2793
+ if self.error_message is not None:
2794
+ result['errorMessage'] = self.error_message
2795
+ if self.request_id is not None:
2796
+ result['requestId'] = self.request_id
2797
+ return result
2798
+
2799
+ def from_map(self, m: dict = None):
2800
+ m = m or dict()
2801
+ if m.get('data') is not None:
2802
+ temp_model = ReadPageItem()
2803
+ self.data = temp_model.from_map(m['data'])
2804
+ if m.get('errorCode') is not None:
2805
+ self.error_code = m.get('errorCode')
2806
+ if m.get('errorMessage') is not None:
2807
+ self.error_message = m.get('errorMessage')
2808
+ if m.get('requestId') is not None:
2809
+ self.request_id = m.get('requestId')
2810
+ return self
2811
+
2812
+
2813
+ class ReadPageScrapeResponse(TeaModel):
2814
+ def __init__(
2815
+ self,
2816
+ headers: Dict[str, str] = None,
2817
+ status_code: int = None,
2818
+ body: ReadPageScrapeResponseBody = None,
2819
+ ):
2820
+ self.headers = headers
2821
+ self.status_code = status_code
2822
+ self.body = body
2823
+
2824
+ def validate(self):
2825
+ if self.body:
2826
+ self.body.validate()
2827
+
2828
+ def to_map(self):
2829
+ _map = super().to_map()
2830
+ if _map is not None:
2831
+ return _map
2832
+
2833
+ result = dict()
2834
+ if self.headers is not None:
2835
+ result['headers'] = self.headers
2836
+ if self.status_code is not None:
2837
+ result['statusCode'] = self.status_code
2838
+ if self.body is not None:
2839
+ result['body'] = self.body.to_map()
2840
+ return result
2841
+
2842
+ def from_map(self, m: dict = None):
2843
+ m = m or dict()
2844
+ if m.get('headers') is not None:
2845
+ self.headers = m.get('headers')
2846
+ if m.get('statusCode') is not None:
2847
+ self.status_code = m.get('statusCode')
2848
+ if m.get('body') is not None:
2849
+ temp_model = ReadPageScrapeResponseBody()
2850
+ self.body = temp_model.from_map(m['body'])
2851
+ return self
2852
+
2853
+
2513
2854
  class UnifiedSearchRequest(TeaModel):
2514
2855
  def __init__(
2515
2856
  self,
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: alibabacloud-iqs20241111
3
- Version: 1.5.2
3
+ Version: 1.6.1
4
4
  Summary: Alibaba Cloud IQS (20241111) SDK Library for Python
5
5
  Home-page: https://github.com/aliyun/alibabacloud-python-sdk
6
6
  Author: Alibaba Cloud SDK
@@ -24,7 +24,7 @@ from setuptools import setup, find_packages
24
24
  """
25
25
  setup module for alibabacloud_iqs20241111.
26
26
 
27
- Created on 10/10/2025
27
+ Created on 24/11/2025
28
28
 
29
29
  @author: Alibaba Cloud SDK
30
30
  """
@@ -1 +0,0 @@
1
- __version__ = '1.5.2'