webscout 8.3.7__py3-none-any.whl → 2025.10.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of webscout might be problematic. Click here for more details.

Files changed (273) hide show
  1. webscout/AIauto.py +250 -250
  2. webscout/AIbase.py +379 -379
  3. webscout/AIutel.py +60 -60
  4. webscout/Bard.py +1012 -1012
  5. webscout/Bing_search.py +417 -417
  6. webscout/DWEBS.py +529 -529
  7. webscout/Extra/Act.md +309 -309
  8. webscout/Extra/GitToolkit/__init__.py +10 -10
  9. webscout/Extra/GitToolkit/gitapi/README.md +110 -110
  10. webscout/Extra/GitToolkit/gitapi/__init__.py +11 -11
  11. webscout/Extra/GitToolkit/gitapi/repository.py +195 -195
  12. webscout/Extra/GitToolkit/gitapi/user.py +96 -96
  13. webscout/Extra/GitToolkit/gitapi/utils.py +61 -61
  14. webscout/Extra/YTToolkit/README.md +375 -375
  15. webscout/Extra/YTToolkit/YTdownloader.py +956 -956
  16. webscout/Extra/YTToolkit/__init__.py +2 -2
  17. webscout/Extra/YTToolkit/transcriber.py +475 -475
  18. webscout/Extra/YTToolkit/ytapi/README.md +44 -44
  19. webscout/Extra/YTToolkit/ytapi/__init__.py +6 -6
  20. webscout/Extra/YTToolkit/ytapi/channel.py +307 -307
  21. webscout/Extra/YTToolkit/ytapi/errors.py +13 -13
  22. webscout/Extra/YTToolkit/ytapi/extras.py +118 -118
  23. webscout/Extra/YTToolkit/ytapi/https.py +88 -88
  24. webscout/Extra/YTToolkit/ytapi/patterns.py +61 -61
  25. webscout/Extra/YTToolkit/ytapi/playlist.py +58 -58
  26. webscout/Extra/YTToolkit/ytapi/pool.py +7 -7
  27. webscout/Extra/YTToolkit/ytapi/query.py +39 -39
  28. webscout/Extra/YTToolkit/ytapi/stream.py +62 -62
  29. webscout/Extra/YTToolkit/ytapi/utils.py +62 -62
  30. webscout/Extra/YTToolkit/ytapi/video.py +232 -232
  31. webscout/Extra/autocoder/__init__.py +9 -9
  32. webscout/Extra/autocoder/autocoder.py +1105 -1105
  33. webscout/Extra/autocoder/autocoder_utiles.py +332 -332
  34. webscout/Extra/gguf.md +429 -429
  35. webscout/Extra/gguf.py +1213 -1213
  36. webscout/Extra/tempmail/README.md +487 -487
  37. webscout/Extra/tempmail/__init__.py +27 -27
  38. webscout/Extra/tempmail/async_utils.py +140 -140
  39. webscout/Extra/tempmail/base.py +160 -160
  40. webscout/Extra/tempmail/cli.py +186 -186
  41. webscout/Extra/tempmail/emailnator.py +84 -84
  42. webscout/Extra/tempmail/mail_tm.py +360 -360
  43. webscout/Extra/tempmail/temp_mail_io.py +291 -291
  44. webscout/Extra/weather.md +281 -281
  45. webscout/Extra/weather.py +193 -193
  46. webscout/Litlogger/README.md +10 -10
  47. webscout/Litlogger/__init__.py +15 -15
  48. webscout/Litlogger/formats.py +13 -13
  49. webscout/Litlogger/handlers.py +121 -121
  50. webscout/Litlogger/levels.py +13 -13
  51. webscout/Litlogger/logger.py +134 -134
  52. webscout/Provider/AISEARCH/Perplexity.py +332 -332
  53. webscout/Provider/AISEARCH/README.md +279 -279
  54. webscout/Provider/AISEARCH/__init__.py +16 -1
  55. webscout/Provider/AISEARCH/felo_search.py +206 -206
  56. webscout/Provider/AISEARCH/genspark_search.py +323 -323
  57. webscout/Provider/AISEARCH/hika_search.py +185 -185
  58. webscout/Provider/AISEARCH/iask_search.py +410 -410
  59. webscout/Provider/AISEARCH/monica_search.py +219 -219
  60. webscout/Provider/AISEARCH/scira_search.py +316 -316
  61. webscout/Provider/AISEARCH/stellar_search.py +177 -177
  62. webscout/Provider/AISEARCH/webpilotai_search.py +255 -255
  63. webscout/Provider/Aitopia.py +314 -314
  64. webscout/Provider/Apriel.py +306 -0
  65. webscout/Provider/ChatGPTClone.py +236 -236
  66. webscout/Provider/ChatSandbox.py +343 -343
  67. webscout/Provider/Cloudflare.py +324 -324
  68. webscout/Provider/Cohere.py +208 -208
  69. webscout/Provider/Deepinfra.py +370 -366
  70. webscout/Provider/ExaAI.py +260 -260
  71. webscout/Provider/ExaChat.py +308 -308
  72. webscout/Provider/Flowith.py +221 -221
  73. webscout/Provider/GMI.py +293 -0
  74. webscout/Provider/Gemini.py +164 -164
  75. webscout/Provider/GeminiProxy.py +167 -167
  76. webscout/Provider/GithubChat.py +371 -372
  77. webscout/Provider/Groq.py +800 -800
  78. webscout/Provider/HeckAI.py +383 -383
  79. webscout/Provider/Jadve.py +282 -282
  80. webscout/Provider/K2Think.py +307 -307
  81. webscout/Provider/Koboldai.py +205 -205
  82. webscout/Provider/LambdaChat.py +423 -423
  83. webscout/Provider/Nemotron.py +244 -244
  84. webscout/Provider/Netwrck.py +248 -248
  85. webscout/Provider/OLLAMA.py +395 -395
  86. webscout/Provider/OPENAI/Cloudflare.py +393 -393
  87. webscout/Provider/OPENAI/FalconH1.py +451 -451
  88. webscout/Provider/OPENAI/FreeGemini.py +296 -296
  89. webscout/Provider/OPENAI/K2Think.py +431 -431
  90. webscout/Provider/OPENAI/NEMOTRON.py +240 -240
  91. webscout/Provider/OPENAI/PI.py +427 -427
  92. webscout/Provider/OPENAI/README.md +959 -959
  93. webscout/Provider/OPENAI/TogetherAI.py +345 -345
  94. webscout/Provider/OPENAI/TwoAI.py +465 -465
  95. webscout/Provider/OPENAI/__init__.py +33 -18
  96. webscout/Provider/OPENAI/base.py +248 -248
  97. webscout/Provider/OPENAI/chatglm.py +528 -0
  98. webscout/Provider/OPENAI/chatgpt.py +592 -592
  99. webscout/Provider/OPENAI/chatgptclone.py +521 -521
  100. webscout/Provider/OPENAI/chatsandbox.py +202 -202
  101. webscout/Provider/OPENAI/deepinfra.py +318 -314
  102. webscout/Provider/OPENAI/e2b.py +1665 -1665
  103. webscout/Provider/OPENAI/exaai.py +420 -420
  104. webscout/Provider/OPENAI/exachat.py +452 -452
  105. webscout/Provider/OPENAI/friendli.py +232 -232
  106. webscout/Provider/OPENAI/{refact.py → gmi.py} +324 -274
  107. webscout/Provider/OPENAI/groq.py +364 -364
  108. webscout/Provider/OPENAI/heckai.py +314 -314
  109. webscout/Provider/OPENAI/llmchatco.py +337 -337
  110. webscout/Provider/OPENAI/netwrck.py +355 -355
  111. webscout/Provider/OPENAI/oivscode.py +290 -290
  112. webscout/Provider/OPENAI/opkfc.py +518 -518
  113. webscout/Provider/OPENAI/pydantic_imports.py +1 -1
  114. webscout/Provider/OPENAI/scirachat.py +535 -535
  115. webscout/Provider/OPENAI/sonus.py +308 -308
  116. webscout/Provider/OPENAI/standardinput.py +442 -442
  117. webscout/Provider/OPENAI/textpollinations.py +340 -340
  118. webscout/Provider/OPENAI/toolbaz.py +419 -416
  119. webscout/Provider/OPENAI/typefully.py +362 -362
  120. webscout/Provider/OPENAI/utils.py +295 -295
  121. webscout/Provider/OPENAI/venice.py +436 -436
  122. webscout/Provider/OPENAI/wisecat.py +387 -387
  123. webscout/Provider/OPENAI/writecream.py +166 -166
  124. webscout/Provider/OPENAI/x0gpt.py +378 -378
  125. webscout/Provider/OPENAI/yep.py +389 -389
  126. webscout/Provider/OpenGPT.py +230 -230
  127. webscout/Provider/Openai.py +243 -243
  128. webscout/Provider/PI.py +405 -405
  129. webscout/Provider/Perplexitylabs.py +430 -430
  130. webscout/Provider/QwenLM.py +272 -272
  131. webscout/Provider/STT/__init__.py +16 -1
  132. webscout/Provider/Sambanova.py +257 -257
  133. webscout/Provider/StandardInput.py +309 -309
  134. webscout/Provider/TTI/README.md +82 -82
  135. webscout/Provider/TTI/__init__.py +33 -18
  136. webscout/Provider/TTI/aiarta.py +413 -413
  137. webscout/Provider/TTI/base.py +136 -136
  138. webscout/Provider/TTI/bing.py +243 -243
  139. webscout/Provider/TTI/gpt1image.py +149 -149
  140. webscout/Provider/TTI/imagen.py +196 -196
  141. webscout/Provider/TTI/infip.py +211 -211
  142. webscout/Provider/TTI/magicstudio.py +232 -232
  143. webscout/Provider/TTI/monochat.py +219 -219
  144. webscout/Provider/TTI/piclumen.py +214 -214
  145. webscout/Provider/TTI/pixelmuse.py +232 -232
  146. webscout/Provider/TTI/pollinations.py +232 -232
  147. webscout/Provider/TTI/together.py +288 -288
  148. webscout/Provider/TTI/utils.py +12 -12
  149. webscout/Provider/TTI/venice.py +367 -367
  150. webscout/Provider/TTS/README.md +192 -192
  151. webscout/Provider/TTS/__init__.py +33 -18
  152. webscout/Provider/TTS/parler.py +110 -110
  153. webscout/Provider/TTS/streamElements.py +333 -333
  154. webscout/Provider/TTS/utils.py +280 -280
  155. webscout/Provider/TeachAnything.py +237 -237
  156. webscout/Provider/TextPollinationsAI.py +310 -310
  157. webscout/Provider/TogetherAI.py +356 -356
  158. webscout/Provider/TwoAI.py +312 -312
  159. webscout/Provider/TypliAI.py +311 -311
  160. webscout/Provider/UNFINISHED/ChatHub.py +208 -208
  161. webscout/Provider/UNFINISHED/ChutesAI.py +313 -313
  162. webscout/Provider/UNFINISHED/GizAI.py +294 -294
  163. webscout/Provider/UNFINISHED/Marcus.py +198 -198
  164. webscout/Provider/UNFINISHED/Qodo.py +477 -477
  165. webscout/Provider/UNFINISHED/VercelAIGateway.py +338 -338
  166. webscout/Provider/UNFINISHED/XenAI.py +324 -324
  167. webscout/Provider/UNFINISHED/Youchat.py +330 -330
  168. webscout/Provider/UNFINISHED/liner.py +334 -0
  169. webscout/Provider/UNFINISHED/liner_api_request.py +262 -262
  170. webscout/Provider/UNFINISHED/puterjs.py +634 -634
  171. webscout/Provider/UNFINISHED/samurai.py +223 -223
  172. webscout/Provider/UNFINISHED/test_lmarena.py +119 -119
  173. webscout/Provider/Venice.py +250 -250
  174. webscout/Provider/VercelAI.py +256 -256
  175. webscout/Provider/WiseCat.py +231 -231
  176. webscout/Provider/WrDoChat.py +366 -366
  177. webscout/Provider/__init__.py +33 -18
  178. webscout/Provider/ai4chat.py +174 -174
  179. webscout/Provider/akashgpt.py +331 -331
  180. webscout/Provider/cerebras.py +446 -446
  181. webscout/Provider/chatglm.py +394 -301
  182. webscout/Provider/cleeai.py +211 -211
  183. webscout/Provider/elmo.py +282 -282
  184. webscout/Provider/geminiapi.py +208 -208
  185. webscout/Provider/granite.py +261 -261
  186. webscout/Provider/hermes.py +263 -263
  187. webscout/Provider/julius.py +223 -223
  188. webscout/Provider/learnfastai.py +309 -309
  189. webscout/Provider/llama3mitril.py +214 -214
  190. webscout/Provider/llmchat.py +243 -243
  191. webscout/Provider/llmchatco.py +290 -290
  192. webscout/Provider/meta.py +801 -801
  193. webscout/Provider/oivscode.py +309 -309
  194. webscout/Provider/scira_chat.py +383 -383
  195. webscout/Provider/searchchat.py +292 -292
  196. webscout/Provider/sonus.py +258 -258
  197. webscout/Provider/toolbaz.py +370 -367
  198. webscout/Provider/turboseek.py +273 -273
  199. webscout/Provider/typefully.py +207 -207
  200. webscout/Provider/yep.py +372 -372
  201. webscout/__init__.py +30 -31
  202. webscout/__main__.py +5 -5
  203. webscout/auth/api_key_manager.py +189 -189
  204. webscout/auth/config.py +175 -175
  205. webscout/auth/models.py +185 -185
  206. webscout/auth/routes.py +664 -664
  207. webscout/auth/simple_logger.py +236 -236
  208. webscout/cli.py +523 -523
  209. webscout/conversation.py +438 -438
  210. webscout/exceptions.py +361 -361
  211. webscout/litagent/Readme.md +298 -298
  212. webscout/litagent/__init__.py +28 -28
  213. webscout/litagent/agent.py +581 -581
  214. webscout/litagent/constants.py +59 -59
  215. webscout/litprinter/__init__.py +58 -58
  216. webscout/models.py +181 -181
  217. webscout/optimizers.py +419 -419
  218. webscout/prompt_manager.py +288 -288
  219. webscout/sanitize.py +1078 -1078
  220. webscout/scout/README.md +401 -401
  221. webscout/scout/__init__.py +8 -8
  222. webscout/scout/core/__init__.py +6 -6
  223. webscout/scout/core/crawler.py +297 -297
  224. webscout/scout/core/scout.py +706 -706
  225. webscout/scout/core/search_result.py +95 -95
  226. webscout/scout/core/text_analyzer.py +62 -62
  227. webscout/scout/core/text_utils.py +277 -277
  228. webscout/scout/core/web_analyzer.py +51 -51
  229. webscout/scout/element.py +599 -599
  230. webscout/scout/parsers/__init__.py +69 -69
  231. webscout/scout/parsers/html5lib_parser.py +172 -172
  232. webscout/scout/parsers/html_parser.py +236 -236
  233. webscout/scout/parsers/lxml_parser.py +178 -178
  234. webscout/scout/utils.py +37 -37
  235. webscout/swiftcli/Readme.md +323 -323
  236. webscout/swiftcli/__init__.py +95 -95
  237. webscout/swiftcli/core/__init__.py +7 -7
  238. webscout/swiftcli/core/cli.py +308 -308
  239. webscout/swiftcli/core/context.py +104 -104
  240. webscout/swiftcli/core/group.py +241 -241
  241. webscout/swiftcli/decorators/__init__.py +28 -28
  242. webscout/swiftcli/decorators/command.py +221 -221
  243. webscout/swiftcli/decorators/options.py +220 -220
  244. webscout/swiftcli/decorators/output.py +302 -302
  245. webscout/swiftcli/exceptions.py +21 -21
  246. webscout/swiftcli/plugins/__init__.py +9 -9
  247. webscout/swiftcli/plugins/base.py +135 -135
  248. webscout/swiftcli/plugins/manager.py +269 -269
  249. webscout/swiftcli/utils/__init__.py +59 -59
  250. webscout/swiftcli/utils/formatting.py +252 -252
  251. webscout/swiftcli/utils/parsing.py +267 -267
  252. webscout/update_checker.py +117 -117
  253. webscout/version.py +1 -1
  254. webscout/webscout_search.py +1183 -1183
  255. webscout/webscout_search_async.py +649 -649
  256. webscout/yep_search.py +346 -346
  257. webscout/zeroart/README.md +89 -89
  258. webscout/zeroart/__init__.py +134 -134
  259. webscout/zeroart/base.py +66 -66
  260. webscout/zeroart/effects.py +100 -100
  261. webscout/zeroart/fonts.py +1238 -1238
  262. {webscout-8.3.7.dist-info → webscout-2025.10.11.dist-info}/METADATA +937 -937
  263. webscout-2025.10.11.dist-info/RECORD +300 -0
  264. webscout/Provider/AISEARCH/DeepFind.py +0 -254
  265. webscout/Provider/OPENAI/Qwen3.py +0 -303
  266. webscout/Provider/OPENAI/qodo.py +0 -630
  267. webscout/Provider/OPENAI/xenai.py +0 -514
  268. webscout/tempid.py +0 -134
  269. webscout-8.3.7.dist-info/RECORD +0 -301
  270. {webscout-8.3.7.dist-info → webscout-2025.10.11.dist-info}/WHEEL +0 -0
  271. {webscout-8.3.7.dist-info → webscout-2025.10.11.dist-info}/entry_points.txt +0 -0
  272. {webscout-8.3.7.dist-info → webscout-2025.10.11.dist-info}/licenses/LICENSE.md +0 -0
  273. {webscout-8.3.7.dist-info → webscout-2025.10.11.dist-info}/top_level.txt +0 -0
webscout/Bing_search.py CHANGED
@@ -1,417 +1,417 @@
1
- """
2
- BingSearch - A Bing search library with advanced features
3
- """
4
- from time import sleep
5
- from curl_cffi.requests import Session
6
- from urllib.parse import urlencode, unquote, urlparse, parse_qs
7
- import base64
8
- from typing import List, Dict, Optional, Any
9
- from concurrent.futures import ThreadPoolExecutor
10
- from webscout.litagent import LitAgent
11
- class BingSearchResult:
12
- """Class to represent a Bing search result with metadata."""
13
- def __init__(self, url: str, title: str, description: str):
14
- self.url = url
15
- self.title = title
16
- self.description = description
17
- self.metadata: Dict[str, Any] = {}
18
-
19
- def __repr__(self) -> str:
20
- return f"BingSearchResult(url={self.url}, title={self.title}, description={self.description})"
21
-
22
- class BingImageResult:
23
- """Class to represent a Bing image search result."""
24
- def __init__(self, title: str, image: str, thumbnail: str, url: str, source: str):
25
- self.title = title
26
- self.image = image
27
- self.thumbnail = thumbnail
28
- self.url = url
29
- self.source = source
30
- def __repr__(self):
31
- return f"BingImageResult(title={self.title}, image={self.image}, url={self.url}, source={self.source})"
32
-
33
- class BingNewsResult:
34
- """Class to represent a Bing news search result."""
35
- def __init__(self, title: str, url: str, description: str, source: str = ""):
36
- self.title = title
37
- self.url = url
38
- self.description = description
39
- self.source = source
40
- def __repr__(self):
41
- return f"BingNewsResult(title={self.title}, url={self.url}, source={self.source})"
42
-
43
- class BingSearch:
44
- """Bing search implementation with configurable parameters and advanced features."""
45
- _executor: ThreadPoolExecutor = ThreadPoolExecutor()
46
-
47
- def __init__(
48
- self,
49
- timeout: int = 10,
50
- proxies: Optional[Dict[str, str]] = None,
51
- verify: bool = True,
52
- lang: str = "en-US",
53
- sleep_interval: float = 0.0,
54
- impersonate: str = "chrome110"
55
- ):
56
- self.timeout = timeout
57
- self.proxies = proxies if proxies else {}
58
- self.verify = verify
59
- self.lang = lang
60
- self.sleep_interval = sleep_interval
61
- self._base_url = "https://www.bing.com"
62
- self.session = Session(
63
- proxies=self.proxies,
64
- verify=self.verify,
65
- timeout=self.timeout,
66
- impersonate=impersonate
67
- )
68
- self.session.headers.update(LitAgent().generate_fingerprint())
69
-
70
- def _selectors(self, element):
71
- selectors = {
72
- 'url': 'h2 a',
73
- 'title': 'h2',
74
- 'text': 'p',
75
- 'links': 'ol#b_results > li.b_algo',
76
- 'next': 'div#b_content nav[role="navigation"] a.sb_pagN'
77
- }
78
- return selectors[element]
79
-
80
- def _first_page(self, query):
81
- url = f'{self._base_url}/search?q={query}&search=&form=QBLH'
82
- return {'url': url, 'data': None}
83
-
84
- def _next_page(self, soup):
85
- selector = self._selectors('next')
86
- next_page_tag = soup.select_one(selector)
87
- url = None
88
- if next_page_tag and next_page_tag.get('href'):
89
- url = self._base_url + next_page_tag['href']
90
- return {'url': url, 'data': None}
91
-
92
- def _get_url(self, tag):
93
- url = tag.get('href', '')
94
- resp = url
95
- try:
96
- parsed_url = urlparse(url)
97
- query_params = parse_qs(parsed_url.query)
98
- if "u" in query_params:
99
- encoded_url = query_params["u"][0][2:]
100
- try:
101
- decoded_bytes = base64.urlsafe_b64decode(encoded_url + '===')
102
- except base64.binascii.Error as e:
103
- print(f"Error decoding Base64 string: {e}")
104
- return url
105
- resp = decoded_bytes.decode('utf-8')
106
- except Exception as e:
107
- print(f"Error decoding Base64 string: {e}")
108
- return resp
109
-
110
- def _make_request(self, term: str, results: int, start: int = 0) -> str:
111
- params = {
112
- "q": term,
113
- "count": results,
114
- "first": start + 1,
115
- "setlang": self.lang,
116
- }
117
- url = self._base_url + "/search"
118
- try:
119
- resp = self.session.get(
120
- url=url,
121
- params=params,
122
- )
123
- resp.raise_for_status()
124
- return resp.text
125
- except Exception as e:
126
- if hasattr(e, 'response') and e.response is not None:
127
- raise Exception(f"Bing search failed with status {e.response.status_code}: {str(e)}")
128
- else:
129
- raise Exception(f"Bing search failed: {str(e)}")
130
-
131
- def text(
132
- self,
133
- keywords: str,
134
- region: str = None,
135
- safesearch: str = "moderate",
136
- max_results: int = 10,
137
- unique: bool = True
138
- ) -> List[BingSearchResult]:
139
- """
140
- Perform a text search on Bing.
141
-
142
- Args:
143
- keywords (str): The search keywords.
144
- region (str, optional): The region for the search. Defaults to None.
145
- safesearch (str): The safe search level ("on", "moderate", "off"). Defaults to "moderate".
146
- max_results (int): The maximum number of results to fetch. Defaults to 10.
147
- unique (bool): Whether to exclude duplicate URLs from the results. Defaults to True.
148
-
149
- Returns:
150
- List[BingSearchResult]: A list of Bing search results.
151
- """
152
- if not keywords:
153
- raise ValueError("Search keywords cannot be empty")
154
- from bs4 import BeautifulSoup
155
- safe_map = {
156
- "on": "Strict",
157
- "moderate": "Moderate",
158
- "off": "Off"
159
- }
160
- safe = safe_map.get(safesearch.lower(), "Moderate")
161
- fetched_results = []
162
- fetched_links = set()
163
- def fetch_page(url):
164
- try:
165
- resp = self.session.get(url)
166
- resp.raise_for_status()
167
- return resp.text
168
- except Exception as e:
169
- if hasattr(e, 'response') and e.response is not None:
170
- raise Exception(f"Bing search failed with status {e.response.status_code}: {str(e)}")
171
- else:
172
- raise Exception(f"Bing search failed: {str(e)}")
173
-
174
- # Fix: get the first page URL
175
- url = self._first_page(keywords)['url']
176
- urls_to_fetch = [url]
177
- while len(fetched_results) < max_results and urls_to_fetch:
178
- html_pages = list(self._executor.map(fetch_page, urls_to_fetch))
179
- urls_to_fetch = []
180
- for html in html_pages:
181
- soup = BeautifulSoup(html, "html.parser")
182
- selector_links = self._selectors('links')
183
- result_blocks = soup.select(selector_links)
184
- for result in result_blocks:
185
- link_tag = result.select_one(self._selectors('url'))
186
- if not link_tag:
187
- continue
188
- url_val = self._get_url(link_tag)
189
- title_tag = result.select_one(self._selectors('title'))
190
- title = title_tag.get_text(strip=True) if title_tag else ''
191
- desc_tag = result.select_one(self._selectors('text'))
192
- description = desc_tag.get_text(strip=True) if desc_tag else ''
193
- if url_val and title:
194
- if unique and url_val in fetched_links:
195
- continue
196
- fetched_results.append(BingSearchResult(url_val, title, description))
197
- fetched_links.add(url_val)
198
- if len(fetched_results) >= max_results:
199
- break
200
- if len(fetched_results) >= max_results:
201
- break
202
- next_page_info = self._next_page(soup)
203
- if next_page_info['url']:
204
- urls_to_fetch.append(next_page_info['url'])
205
- sleep(self.sleep_interval)
206
- next_page_info = self._next_page(soup)
207
- url = next_page_info['url']
208
- sleep(self.sleep_interval)
209
- return fetched_results[:max_results]
210
-
211
- def suggestions(self, query: str, region: str = None) -> List[str]:
212
- """
213
- Fetches search suggestions for a given query.
214
-
215
- Args:
216
- query (str): The search query for which suggestions are needed.
217
- region (str, optional): The region code (e.g., "en-US") for localized suggestions.
218
-
219
- Returns:
220
- List[str]: A list of suggestion strings related to the query.
221
- """
222
- if not query:
223
- raise ValueError("Search query cannot be empty")
224
- params = {
225
- "query": query,
226
- "mkt": region if region else "en-US"
227
- }
228
- url = f"https://api.bing.com/osjson.aspx?{urlencode(params)}"
229
- try:
230
- resp = self.session.get(url)
231
- resp.raise_for_status()
232
- data = resp.json()
233
- if isinstance(data, list) and len(data) > 1 and isinstance(data[1], list):
234
- return data[1]
235
- return []
236
- except Exception as e:
237
- if hasattr(e, 'response') and e.response is not None:
238
- raise Exception(f"Bing suggestions failed with status {e.response.status_code}: {str(e)}")
239
- else:
240
- raise Exception(f"Bing suggestions failed: {str(e)}")
241
-
242
- def images(
243
- self,
244
- keywords: str,
245
- region: str = None,
246
- safesearch: str = "moderate",
247
- max_results: int = 10
248
- ) -> List[BingImageResult]:
249
- """
250
- Perform an image search on Bing.
251
-
252
- Args:
253
- keywords (str): The search keywords.
254
- region (str, optional): The region for the search. Defaults to None.
255
- safesearch (str): The safe search level ("on", "moderate", "off"). Defaults to "moderate".
256
- max_results (int): The maximum number of results to fetch. Defaults to 10.
257
-
258
- Returns:
259
- List[BingImageResult]: A list of Bing image search results.
260
- """
261
- if not keywords:
262
- raise ValueError("Search keywords cannot be empty")
263
- from bs4 import BeautifulSoup
264
- safe_map = {
265
- "on": "Strict",
266
- "moderate": "Moderate",
267
- "off": "Off"
268
- }
269
- safe = safe_map.get(safesearch.lower(), "Moderate")
270
- params = {
271
- "q": keywords,
272
- "count": max_results,
273
- "setlang": self.lang,
274
- "safeSearch": safe,
275
- }
276
- if region:
277
- params["mkt"] = region
278
- url = f"{self._base_url}/images/search?{urlencode(params)}"
279
- try:
280
- resp = self.session.get(url)
281
- resp.raise_for_status()
282
- html = resp.text
283
- except Exception as e:
284
- if hasattr(e, 'response') and e.response is not None:
285
- raise Exception(f"Bing image search failed with status {e.response.status_code}: {str(e)}")
286
- else:
287
- raise Exception(f"Bing image search failed: {str(e)}")
288
- soup = BeautifulSoup(html, "html.parser")
289
- results = []
290
- for item in soup.select("a.iusc"):
291
- try:
292
- m = item.get("m")
293
- import json
294
- meta = json.loads(m) if m else {}
295
- image_url = meta.get("murl", "")
296
- thumb_url = meta.get("turl", "")
297
- title = meta.get("t", "")
298
- page_url = meta.get("purl", "")
299
- source = meta.get("surl", "")
300
- if image_url:
301
- results.append(BingImageResult(title, image_url, thumb_url, page_url, source))
302
- if len(results) >= max_results:
303
- break
304
- except Exception:
305
- continue
306
- return results[:max_results]
307
-
308
- def news(
309
- self,
310
- keywords: str,
311
- region: str = None,
312
- safesearch: str = "moderate",
313
- max_results: int = 10,
314
- ) -> List['BingNewsResult']:
315
- """Bing news search."""
316
- if not keywords:
317
- raise ValueError("Search keywords cannot be empty")
318
- from bs4 import BeautifulSoup
319
- safe_map = {
320
- "on": "Strict",
321
- "moderate": "Moderate",
322
- "off": "Off"
323
- }
324
- safe = safe_map.get(safesearch.lower(), "Moderate")
325
- params = {
326
- "q": keywords,
327
- "form": "QBNH",
328
- "safeSearch": safe,
329
- }
330
- if region:
331
- params["mkt"] = region
332
- url = f"{self._base_url}/news/search?{urlencode(params)}"
333
- try:
334
- resp = self.session.get(url)
335
- resp.raise_for_status()
336
- except Exception as e:
337
- if hasattr(e, 'response') and e.response is not None:
338
- raise Exception(f"Bing news search failed with status {e.response.status_code}: {str(e)}")
339
- else:
340
- raise Exception(f"Bing news search failed: {str(e)}")
341
- soup = BeautifulSoup(resp.text, "html.parser")
342
- results = []
343
- for item in soup.select("div.news-card, div.card, div.newsitem, div.card-content, div.t_s_main"):
344
- a_tag = item.find("a")
345
- title = a_tag.get_text(strip=True) if a_tag else ''
346
- url_val = a_tag['href'] if a_tag and a_tag.has_attr('href') else ''
347
- desc_tag = item.find("div", class_="snippet") or item.find("div", class_="news-card-snippet") or item.find("div", class_="snippetText")
348
- description = desc_tag.get_text(strip=True) if desc_tag else ''
349
- source_tag = item.find("div", class_="source")
350
- source = source_tag.get_text(strip=True) if source_tag else ''
351
- if url_val and title:
352
- results.append(BingNewsResult(title, url_val, description, source))
353
- if len(results) >= max_results:
354
- break
355
- # Fallback: try main news list if above selectors fail
356
- if not results:
357
- for item in soup.select("a.title"):
358
- title = item.get_text(strip=True)
359
- url_val = item['href'] if item.has_attr('href') else ''
360
- description = ''
361
- source = ''
362
- if url_val and title:
363
- results.append(BingNewsResult(title, url_val, description, source))
364
- if len(results) >= max_results:
365
- break
366
- return results[:max_results]
367
-
368
- if __name__ == "__main__":
369
- from rich import print
370
- bing = BingSearch(
371
- timeout=10,
372
- proxies=None,
373
- verify=True
374
- )
375
- print("TEXT SEARCH RESULTS:")
376
- text_results = bing.text(
377
- keywords="Python programming",
378
- region="us",
379
- safesearch="moderate",
380
- max_results=30
381
- )
382
- for result in text_results:
383
- print(f"Title: {result.title}")
384
- print(f"URL: {result.url}")
385
- print(f"Description: {result.description}")
386
- print("---")
387
- print("\nSEARCH SUGGESTIONS:")
388
- suggestions = bing.suggestions("how to")
389
- print(suggestions)
390
-
391
- print("\nIMAGE SEARCH RESULTS:")
392
- image_results = bing.images(
393
- keywords="Python programming",
394
- region="us",
395
- safesearch="moderate",
396
- max_results=10
397
- )
398
- for result in image_results:
399
- print(f"Title: {result.title}")
400
- print(f"Image URL: {result.image}")
401
- print(f"Page URL: {result.url}")
402
- print(f"Source: {result.source}")
403
- print("---")
404
-
405
- print("\nNEWS SEARCH RESULTS:")
406
- news_results = bing.news(
407
- keywords="Python programming",
408
- region="us",
409
- safesearch="moderate",
410
- max_results=10
411
- )
412
- for result in news_results:
413
- print(f"Title: {result.title}")
414
- print(f"URL: {result.url}")
415
- print(f"Description: {result.description}")
416
- print(f"Source: {result.source}")
417
- print("---")
1
+ """
2
+ BingSearch - A Bing search library with advanced features
3
+ """
4
+ from time import sleep
5
+ from curl_cffi.requests import Session
6
+ from urllib.parse import urlencode, unquote, urlparse, parse_qs
7
+ import base64
8
+ from typing import List, Dict, Optional, Any
9
+ from concurrent.futures import ThreadPoolExecutor
10
+ from webscout.litagent import LitAgent
11
+ class BingSearchResult:
12
+ """Class to represent a Bing search result with metadata."""
13
+ def __init__(self, url: str, title: str, description: str):
14
+ self.url = url
15
+ self.title = title
16
+ self.description = description
17
+ self.metadata: Dict[str, Any] = {}
18
+
19
+ def __repr__(self) -> str:
20
+ return f"BingSearchResult(url={self.url}, title={self.title}, description={self.description})"
21
+
22
+ class BingImageResult:
23
+ """Class to represent a Bing image search result."""
24
+ def __init__(self, title: str, image: str, thumbnail: str, url: str, source: str):
25
+ self.title = title
26
+ self.image = image
27
+ self.thumbnail = thumbnail
28
+ self.url = url
29
+ self.source = source
30
+ def __repr__(self):
31
+ return f"BingImageResult(title={self.title}, image={self.image}, url={self.url}, source={self.source})"
32
+
33
+ class BingNewsResult:
34
+ """Class to represent a Bing news search result."""
35
+ def __init__(self, title: str, url: str, description: str, source: str = ""):
36
+ self.title = title
37
+ self.url = url
38
+ self.description = description
39
+ self.source = source
40
+ def __repr__(self):
41
+ return f"BingNewsResult(title={self.title}, url={self.url}, source={self.source})"
42
+
43
+ class BingSearch:
44
+ """Bing search implementation with configurable parameters and advanced features."""
45
+ _executor: ThreadPoolExecutor = ThreadPoolExecutor()
46
+
47
+ def __init__(
48
+ self,
49
+ timeout: int = 10,
50
+ proxies: Optional[Dict[str, str]] = None,
51
+ verify: bool = True,
52
+ lang: str = "en-US",
53
+ sleep_interval: float = 0.0,
54
+ impersonate: str = "chrome110"
55
+ ):
56
+ self.timeout = timeout
57
+ self.proxies = proxies if proxies else {}
58
+ self.verify = verify
59
+ self.lang = lang
60
+ self.sleep_interval = sleep_interval
61
+ self._base_url = "https://www.bing.com"
62
+ self.session = Session(
63
+ proxies=self.proxies,
64
+ verify=self.verify,
65
+ timeout=self.timeout,
66
+ impersonate=impersonate
67
+ )
68
+ self.session.headers.update(LitAgent().generate_fingerprint())
69
+
70
+ def _selectors(self, element):
71
+ selectors = {
72
+ 'url': 'h2 a',
73
+ 'title': 'h2',
74
+ 'text': 'p',
75
+ 'links': 'ol#b_results > li.b_algo',
76
+ 'next': 'div#b_content nav[role="navigation"] a.sb_pagN'
77
+ }
78
+ return selectors[element]
79
+
80
+ def _first_page(self, query):
81
+ url = f'{self._base_url}/search?q={query}&search=&form=QBLH'
82
+ return {'url': url, 'data': None}
83
+
84
+ def _next_page(self, soup):
85
+ selector = self._selectors('next')
86
+ next_page_tag = soup.select_one(selector)
87
+ url = None
88
+ if next_page_tag and next_page_tag.get('href'):
89
+ url = self._base_url + next_page_tag['href']
90
+ return {'url': url, 'data': None}
91
+
92
+ def _get_url(self, tag):
93
+ url = tag.get('href', '')
94
+ resp = url
95
+ try:
96
+ parsed_url = urlparse(url)
97
+ query_params = parse_qs(parsed_url.query)
98
+ if "u" in query_params:
99
+ encoded_url = query_params["u"][0][2:]
100
+ try:
101
+ decoded_bytes = base64.urlsafe_b64decode(encoded_url + '===')
102
+ except base64.binascii.Error as e:
103
+ print(f"Error decoding Base64 string: {e}")
104
+ return url
105
+ resp = decoded_bytes.decode('utf-8')
106
+ except Exception as e:
107
+ print(f"Error decoding Base64 string: {e}")
108
+ return resp
109
+
110
+ def _make_request(self, term: str, results: int, start: int = 0) -> str:
111
+ params = {
112
+ "q": term,
113
+ "count": results,
114
+ "first": start + 1,
115
+ "setlang": self.lang,
116
+ }
117
+ url = self._base_url + "/search"
118
+ try:
119
+ resp = self.session.get(
120
+ url=url,
121
+ params=params,
122
+ )
123
+ resp.raise_for_status()
124
+ return resp.text
125
+ except Exception as e:
126
+ if hasattr(e, 'response') and e.response is not None:
127
+ raise Exception(f"Bing search failed with status {e.response.status_code}: {str(e)}")
128
+ else:
129
+ raise Exception(f"Bing search failed: {str(e)}")
130
+
131
+ def text(
132
+ self,
133
+ keywords: str,
134
+ region: str = None,
135
+ safesearch: str = "moderate",
136
+ max_results: int = 10,
137
+ unique: bool = True
138
+ ) -> List[BingSearchResult]:
139
+ """
140
+ Perform a text search on Bing.
141
+
142
+ Args:
143
+ keywords (str): The search keywords.
144
+ region (str, optional): The region for the search. Defaults to None.
145
+ safesearch (str): The safe search level ("on", "moderate", "off"). Defaults to "moderate".
146
+ max_results (int): The maximum number of results to fetch. Defaults to 10.
147
+ unique (bool): Whether to exclude duplicate URLs from the results. Defaults to True.
148
+
149
+ Returns:
150
+ List[BingSearchResult]: A list of Bing search results.
151
+ """
152
+ if not keywords:
153
+ raise ValueError("Search keywords cannot be empty")
154
+ from bs4 import BeautifulSoup
155
+ safe_map = {
156
+ "on": "Strict",
157
+ "moderate": "Moderate",
158
+ "off": "Off"
159
+ }
160
+ safe = safe_map.get(safesearch.lower(), "Moderate")
161
+ fetched_results = []
162
+ fetched_links = set()
163
+ def fetch_page(url):
164
+ try:
165
+ resp = self.session.get(url)
166
+ resp.raise_for_status()
167
+ return resp.text
168
+ except Exception as e:
169
+ if hasattr(e, 'response') and e.response is not None:
170
+ raise Exception(f"Bing search failed with status {e.response.status_code}: {str(e)}")
171
+ else:
172
+ raise Exception(f"Bing search failed: {str(e)}")
173
+
174
+ # Fix: get the first page URL
175
+ url = self._first_page(keywords)['url']
176
+ urls_to_fetch = [url]
177
+ while len(fetched_results) < max_results and urls_to_fetch:
178
+ html_pages = list(self._executor.map(fetch_page, urls_to_fetch))
179
+ urls_to_fetch = []
180
+ for html in html_pages:
181
+ soup = BeautifulSoup(html, "html.parser")
182
+ selector_links = self._selectors('links')
183
+ result_blocks = soup.select(selector_links)
184
+ for result in result_blocks:
185
+ link_tag = result.select_one(self._selectors('url'))
186
+ if not link_tag:
187
+ continue
188
+ url_val = self._get_url(link_tag)
189
+ title_tag = result.select_one(self._selectors('title'))
190
+ title = title_tag.get_text(strip=True) if title_tag else ''
191
+ desc_tag = result.select_one(self._selectors('text'))
192
+ description = desc_tag.get_text(strip=True) if desc_tag else ''
193
+ if url_val and title:
194
+ if unique and url_val in fetched_links:
195
+ continue
196
+ fetched_results.append(BingSearchResult(url_val, title, description))
197
+ fetched_links.add(url_val)
198
+ if len(fetched_results) >= max_results:
199
+ break
200
+ if len(fetched_results) >= max_results:
201
+ break
202
+ next_page_info = self._next_page(soup)
203
+ if next_page_info['url']:
204
+ urls_to_fetch.append(next_page_info['url'])
205
+ sleep(self.sleep_interval)
206
+ next_page_info = self._next_page(soup)
207
+ url = next_page_info['url']
208
+ sleep(self.sleep_interval)
209
+ return fetched_results[:max_results]
210
+
211
+ def suggestions(self, query: str, region: str = None) -> List[str]:
212
+ """
213
+ Fetches search suggestions for a given query.
214
+
215
+ Args:
216
+ query (str): The search query for which suggestions are needed.
217
+ region (str, optional): The region code (e.g., "en-US") for localized suggestions.
218
+
219
+ Returns:
220
+ List[str]: A list of suggestion strings related to the query.
221
+ """
222
+ if not query:
223
+ raise ValueError("Search query cannot be empty")
224
+ params = {
225
+ "query": query,
226
+ "mkt": region if region else "en-US"
227
+ }
228
+ url = f"https://api.bing.com/osjson.aspx?{urlencode(params)}"
229
+ try:
230
+ resp = self.session.get(url)
231
+ resp.raise_for_status()
232
+ data = resp.json()
233
+ if isinstance(data, list) and len(data) > 1 and isinstance(data[1], list):
234
+ return data[1]
235
+ return []
236
+ except Exception as e:
237
+ if hasattr(e, 'response') and e.response is not None:
238
+ raise Exception(f"Bing suggestions failed with status {e.response.status_code}: {str(e)}")
239
+ else:
240
+ raise Exception(f"Bing suggestions failed: {str(e)}")
241
+
242
+ def images(
243
+ self,
244
+ keywords: str,
245
+ region: str = None,
246
+ safesearch: str = "moderate",
247
+ max_results: int = 10
248
+ ) -> List[BingImageResult]:
249
+ """
250
+ Perform an image search on Bing.
251
+
252
+ Args:
253
+ keywords (str): The search keywords.
254
+ region (str, optional): The region for the search. Defaults to None.
255
+ safesearch (str): The safe search level ("on", "moderate", "off"). Defaults to "moderate".
256
+ max_results (int): The maximum number of results to fetch. Defaults to 10.
257
+
258
+ Returns:
259
+ List[BingImageResult]: A list of Bing image search results.
260
+ """
261
+ if not keywords:
262
+ raise ValueError("Search keywords cannot be empty")
263
+ from bs4 import BeautifulSoup
264
+ safe_map = {
265
+ "on": "Strict",
266
+ "moderate": "Moderate",
267
+ "off": "Off"
268
+ }
269
+ safe = safe_map.get(safesearch.lower(), "Moderate")
270
+ params = {
271
+ "q": keywords,
272
+ "count": max_results,
273
+ "setlang": self.lang,
274
+ "safeSearch": safe,
275
+ }
276
+ if region:
277
+ params["mkt"] = region
278
+ url = f"{self._base_url}/images/search?{urlencode(params)}"
279
+ try:
280
+ resp = self.session.get(url)
281
+ resp.raise_for_status()
282
+ html = resp.text
283
+ except Exception as e:
284
+ if hasattr(e, 'response') and e.response is not None:
285
+ raise Exception(f"Bing image search failed with status {e.response.status_code}: {str(e)}")
286
+ else:
287
+ raise Exception(f"Bing image search failed: {str(e)}")
288
+ soup = BeautifulSoup(html, "html.parser")
289
+ results = []
290
+ for item in soup.select("a.iusc"):
291
+ try:
292
+ m = item.get("m")
293
+ import json
294
+ meta = json.loads(m) if m else {}
295
+ image_url = meta.get("murl", "")
296
+ thumb_url = meta.get("turl", "")
297
+ title = meta.get("t", "")
298
+ page_url = meta.get("purl", "")
299
+ source = meta.get("surl", "")
300
+ if image_url:
301
+ results.append(BingImageResult(title, image_url, thumb_url, page_url, source))
302
+ if len(results) >= max_results:
303
+ break
304
+ except Exception:
305
+ continue
306
+ return results[:max_results]
307
+
308
+ def news(
309
+ self,
310
+ keywords: str,
311
+ region: str = None,
312
+ safesearch: str = "moderate",
313
+ max_results: int = 10,
314
+ ) -> List['BingNewsResult']:
315
+ """Bing news search."""
316
+ if not keywords:
317
+ raise ValueError("Search keywords cannot be empty")
318
+ from bs4 import BeautifulSoup
319
+ safe_map = {
320
+ "on": "Strict",
321
+ "moderate": "Moderate",
322
+ "off": "Off"
323
+ }
324
+ safe = safe_map.get(safesearch.lower(), "Moderate")
325
+ params = {
326
+ "q": keywords,
327
+ "form": "QBNH",
328
+ "safeSearch": safe,
329
+ }
330
+ if region:
331
+ params["mkt"] = region
332
+ url = f"{self._base_url}/news/search?{urlencode(params)}"
333
+ try:
334
+ resp = self.session.get(url)
335
+ resp.raise_for_status()
336
+ except Exception as e:
337
+ if hasattr(e, 'response') and e.response is not None:
338
+ raise Exception(f"Bing news search failed with status {e.response.status_code}: {str(e)}")
339
+ else:
340
+ raise Exception(f"Bing news search failed: {str(e)}")
341
+ soup = BeautifulSoup(resp.text, "html.parser")
342
+ results = []
343
+ for item in soup.select("div.news-card, div.card, div.newsitem, div.card-content, div.t_s_main"):
344
+ a_tag = item.find("a")
345
+ title = a_tag.get_text(strip=True) if a_tag else ''
346
+ url_val = a_tag['href'] if a_tag and a_tag.has_attr('href') else ''
347
+ desc_tag = item.find("div", class_="snippet") or item.find("div", class_="news-card-snippet") or item.find("div", class_="snippetText")
348
+ description = desc_tag.get_text(strip=True) if desc_tag else ''
349
+ source_tag = item.find("div", class_="source")
350
+ source = source_tag.get_text(strip=True) if source_tag else ''
351
+ if url_val and title:
352
+ results.append(BingNewsResult(title, url_val, description, source))
353
+ if len(results) >= max_results:
354
+ break
355
+ # Fallback: try main news list if above selectors fail
356
+ if not results:
357
+ for item in soup.select("a.title"):
358
+ title = item.get_text(strip=True)
359
+ url_val = item['href'] if item.has_attr('href') else ''
360
+ description = ''
361
+ source = ''
362
+ if url_val and title:
363
+ results.append(BingNewsResult(title, url_val, description, source))
364
+ if len(results) >= max_results:
365
+ break
366
+ return results[:max_results]
367
+
368
+ if __name__ == "__main__":
369
+ from rich import print
370
+ bing = BingSearch(
371
+ timeout=10,
372
+ proxies=None,
373
+ verify=True
374
+ )
375
+ print("TEXT SEARCH RESULTS:")
376
+ text_results = bing.text(
377
+ keywords="Python programming",
378
+ region="us",
379
+ safesearch="moderate",
380
+ max_results=30
381
+ )
382
+ for result in text_results:
383
+ print(f"Title: {result.title}")
384
+ print(f"URL: {result.url}")
385
+ print(f"Description: {result.description}")
386
+ print("---")
387
+ print("\nSEARCH SUGGESTIONS:")
388
+ suggestions = bing.suggestions("how to")
389
+ print(suggestions)
390
+
391
+ print("\nIMAGE SEARCH RESULTS:")
392
+ image_results = bing.images(
393
+ keywords="Python programming",
394
+ region="us",
395
+ safesearch="moderate",
396
+ max_results=10
397
+ )
398
+ for result in image_results:
399
+ print(f"Title: {result.title}")
400
+ print(f"Image URL: {result.image}")
401
+ print(f"Page URL: {result.url}")
402
+ print(f"Source: {result.source}")
403
+ print("---")
404
+
405
+ print("\nNEWS SEARCH RESULTS:")
406
+ news_results = bing.news(
407
+ keywords="Python programming",
408
+ region="us",
409
+ safesearch="moderate",
410
+ max_results=10
411
+ )
412
+ for result in news_results:
413
+ print(f"Title: {result.title}")
414
+ print(f"URL: {result.url}")
415
+ print(f"Description: {result.description}")
416
+ print(f"Source: {result.source}")
417
+ print("---")