webscout 8.3.7__py3-none-any.whl → 2025.10.13__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of webscout might be problematic. Click here for more details.
- webscout/AIauto.py +250 -250
- webscout/AIbase.py +379 -379
- webscout/AIutel.py +60 -60
- webscout/Bard.py +1012 -1012
- webscout/Bing_search.py +417 -417
- webscout/DWEBS.py +529 -529
- webscout/Extra/Act.md +309 -309
- webscout/Extra/GitToolkit/__init__.py +10 -10
- webscout/Extra/GitToolkit/gitapi/README.md +110 -110
- webscout/Extra/GitToolkit/gitapi/__init__.py +11 -11
- webscout/Extra/GitToolkit/gitapi/repository.py +195 -195
- webscout/Extra/GitToolkit/gitapi/user.py +96 -96
- webscout/Extra/GitToolkit/gitapi/utils.py +61 -61
- webscout/Extra/YTToolkit/README.md +375 -375
- webscout/Extra/YTToolkit/YTdownloader.py +956 -956
- webscout/Extra/YTToolkit/__init__.py +2 -2
- webscout/Extra/YTToolkit/transcriber.py +475 -475
- webscout/Extra/YTToolkit/ytapi/README.md +44 -44
- webscout/Extra/YTToolkit/ytapi/__init__.py +6 -6
- webscout/Extra/YTToolkit/ytapi/channel.py +307 -307
- webscout/Extra/YTToolkit/ytapi/errors.py +13 -13
- webscout/Extra/YTToolkit/ytapi/extras.py +118 -118
- webscout/Extra/YTToolkit/ytapi/https.py +88 -88
- webscout/Extra/YTToolkit/ytapi/patterns.py +61 -61
- webscout/Extra/YTToolkit/ytapi/playlist.py +58 -58
- webscout/Extra/YTToolkit/ytapi/pool.py +7 -7
- webscout/Extra/YTToolkit/ytapi/query.py +39 -39
- webscout/Extra/YTToolkit/ytapi/stream.py +62 -62
- webscout/Extra/YTToolkit/ytapi/utils.py +62 -62
- webscout/Extra/YTToolkit/ytapi/video.py +232 -232
- webscout/Extra/autocoder/__init__.py +9 -9
- webscout/Extra/autocoder/autocoder.py +1105 -1105
- webscout/Extra/autocoder/autocoder_utiles.py +332 -332
- webscout/Extra/gguf.md +429 -429
- webscout/Extra/gguf.py +1213 -1213
- webscout/Extra/tempmail/README.md +487 -487
- webscout/Extra/tempmail/__init__.py +27 -27
- webscout/Extra/tempmail/async_utils.py +140 -140
- webscout/Extra/tempmail/base.py +160 -160
- webscout/Extra/tempmail/cli.py +186 -186
- webscout/Extra/tempmail/emailnator.py +84 -84
- webscout/Extra/tempmail/mail_tm.py +360 -360
- webscout/Extra/tempmail/temp_mail_io.py +291 -291
- webscout/Extra/weather.md +281 -281
- webscout/Extra/weather.py +193 -193
- webscout/Litlogger/README.md +10 -10
- webscout/Litlogger/__init__.py +15 -15
- webscout/Litlogger/formats.py +13 -13
- webscout/Litlogger/handlers.py +121 -121
- webscout/Litlogger/levels.py +13 -13
- webscout/Litlogger/logger.py +134 -134
- webscout/Provider/AISEARCH/Perplexity.py +332 -332
- webscout/Provider/AISEARCH/README.md +279 -279
- webscout/Provider/AISEARCH/__init__.py +16 -1
- webscout/Provider/AISEARCH/felo_search.py +206 -206
- webscout/Provider/AISEARCH/genspark_search.py +323 -323
- webscout/Provider/AISEARCH/hika_search.py +185 -185
- webscout/Provider/AISEARCH/iask_search.py +410 -410
- webscout/Provider/AISEARCH/monica_search.py +219 -219
- webscout/Provider/AISEARCH/scira_search.py +316 -316
- webscout/Provider/AISEARCH/stellar_search.py +177 -177
- webscout/Provider/AISEARCH/webpilotai_search.py +255 -255
- webscout/Provider/Aitopia.py +314 -314
- webscout/Provider/Andi.py +1 -1
- webscout/Provider/Apriel.py +306 -0
- webscout/Provider/ChatGPTClone.py +237 -236
- webscout/Provider/ChatSandbox.py +343 -343
- webscout/Provider/Cloudflare.py +324 -324
- webscout/Provider/Cohere.py +208 -208
- webscout/Provider/Deepinfra.py +370 -366
- webscout/Provider/ExaAI.py +260 -260
- webscout/Provider/ExaChat.py +308 -308
- webscout/Provider/Flowith.py +221 -221
- webscout/Provider/GMI.py +293 -0
- webscout/Provider/Gemini.py +164 -164
- webscout/Provider/GeminiProxy.py +167 -167
- webscout/Provider/GithubChat.py +371 -372
- webscout/Provider/Groq.py +800 -800
- webscout/Provider/HeckAI.py +383 -383
- webscout/Provider/Jadve.py +282 -282
- webscout/Provider/K2Think.py +307 -307
- webscout/Provider/Koboldai.py +205 -205
- webscout/Provider/LambdaChat.py +423 -423
- webscout/Provider/Nemotron.py +244 -244
- webscout/Provider/Netwrck.py +248 -248
- webscout/Provider/OLLAMA.py +395 -395
- webscout/Provider/OPENAI/Cloudflare.py +393 -393
- webscout/Provider/OPENAI/FalconH1.py +451 -451
- webscout/Provider/OPENAI/FreeGemini.py +296 -296
- webscout/Provider/OPENAI/K2Think.py +431 -431
- webscout/Provider/OPENAI/NEMOTRON.py +240 -240
- webscout/Provider/OPENAI/PI.py +427 -427
- webscout/Provider/OPENAI/README.md +959 -959
- webscout/Provider/OPENAI/TogetherAI.py +345 -345
- webscout/Provider/OPENAI/TwoAI.py +465 -465
- webscout/Provider/OPENAI/__init__.py +33 -18
- webscout/Provider/OPENAI/base.py +248 -248
- webscout/Provider/OPENAI/chatglm.py +528 -0
- webscout/Provider/OPENAI/chatgpt.py +592 -592
- webscout/Provider/OPENAI/chatgptclone.py +521 -521
- webscout/Provider/OPENAI/chatsandbox.py +202 -202
- webscout/Provider/OPENAI/deepinfra.py +318 -314
- webscout/Provider/OPENAI/e2b.py +1665 -1665
- webscout/Provider/OPENAI/exaai.py +420 -420
- webscout/Provider/OPENAI/exachat.py +452 -452
- webscout/Provider/OPENAI/friendli.py +232 -232
- webscout/Provider/OPENAI/{refact.py → gmi.py} +324 -274
- webscout/Provider/OPENAI/groq.py +364 -364
- webscout/Provider/OPENAI/heckai.py +314 -314
- webscout/Provider/OPENAI/llmchatco.py +337 -337
- webscout/Provider/OPENAI/netwrck.py +355 -355
- webscout/Provider/OPENAI/oivscode.py +290 -290
- webscout/Provider/OPENAI/opkfc.py +518 -518
- webscout/Provider/OPENAI/pydantic_imports.py +1 -1
- webscout/Provider/OPENAI/scirachat.py +535 -535
- webscout/Provider/OPENAI/sonus.py +308 -308
- webscout/Provider/OPENAI/standardinput.py +442 -442
- webscout/Provider/OPENAI/textpollinations.py +340 -340
- webscout/Provider/OPENAI/toolbaz.py +419 -416
- webscout/Provider/OPENAI/typefully.py +362 -362
- webscout/Provider/OPENAI/utils.py +295 -295
- webscout/Provider/OPENAI/venice.py +436 -436
- webscout/Provider/OPENAI/wisecat.py +387 -387
- webscout/Provider/OPENAI/writecream.py +166 -166
- webscout/Provider/OPENAI/x0gpt.py +378 -378
- webscout/Provider/OPENAI/yep.py +389 -389
- webscout/Provider/OpenGPT.py +230 -230
- webscout/Provider/Openai.py +243 -243
- webscout/Provider/PI.py +405 -405
- webscout/Provider/Perplexitylabs.py +430 -430
- webscout/Provider/QwenLM.py +272 -272
- webscout/Provider/STT/__init__.py +16 -1
- webscout/Provider/Sambanova.py +257 -257
- webscout/Provider/StandardInput.py +309 -309
- webscout/Provider/TTI/README.md +82 -82
- webscout/Provider/TTI/__init__.py +33 -18
- webscout/Provider/TTI/aiarta.py +413 -413
- webscout/Provider/TTI/base.py +136 -136
- webscout/Provider/TTI/bing.py +243 -243
- webscout/Provider/TTI/gpt1image.py +149 -149
- webscout/Provider/TTI/imagen.py +196 -196
- webscout/Provider/TTI/infip.py +211 -211
- webscout/Provider/TTI/magicstudio.py +232 -232
- webscout/Provider/TTI/monochat.py +219 -219
- webscout/Provider/TTI/piclumen.py +214 -214
- webscout/Provider/TTI/pixelmuse.py +232 -232
- webscout/Provider/TTI/pollinations.py +232 -232
- webscout/Provider/TTI/together.py +288 -288
- webscout/Provider/TTI/utils.py +12 -12
- webscout/Provider/TTI/venice.py +367 -367
- webscout/Provider/TTS/README.md +192 -192
- webscout/Provider/TTS/__init__.py +33 -18
- webscout/Provider/TTS/parler.py +110 -110
- webscout/Provider/TTS/streamElements.py +333 -333
- webscout/Provider/TTS/utils.py +280 -280
- webscout/Provider/TeachAnything.py +237 -237
- webscout/Provider/TextPollinationsAI.py +310 -310
- webscout/Provider/TogetherAI.py +356 -356
- webscout/Provider/TwoAI.py +312 -312
- webscout/Provider/TypliAI.py +311 -311
- webscout/Provider/UNFINISHED/ChatHub.py +208 -208
- webscout/Provider/UNFINISHED/ChutesAI.py +313 -313
- webscout/Provider/UNFINISHED/GizAI.py +294 -294
- webscout/Provider/UNFINISHED/Marcus.py +198 -198
- webscout/Provider/UNFINISHED/Qodo.py +477 -477
- webscout/Provider/UNFINISHED/VercelAIGateway.py +338 -338
- webscout/Provider/UNFINISHED/XenAI.py +324 -324
- webscout/Provider/UNFINISHED/Youchat.py +330 -330
- webscout/Provider/UNFINISHED/liner.py +334 -0
- webscout/Provider/UNFINISHED/liner_api_request.py +262 -262
- webscout/Provider/UNFINISHED/puterjs.py +634 -634
- webscout/Provider/UNFINISHED/samurai.py +223 -223
- webscout/Provider/UNFINISHED/test_lmarena.py +119 -119
- webscout/Provider/Venice.py +250 -250
- webscout/Provider/VercelAI.py +256 -256
- webscout/Provider/WiseCat.py +231 -231
- webscout/Provider/WrDoChat.py +366 -366
- webscout/Provider/__init__.py +33 -18
- webscout/Provider/ai4chat.py +174 -174
- webscout/Provider/akashgpt.py +331 -331
- webscout/Provider/cerebras.py +446 -446
- webscout/Provider/chatglm.py +394 -301
- webscout/Provider/cleeai.py +211 -211
- webscout/Provider/elmo.py +282 -282
- webscout/Provider/geminiapi.py +208 -208
- webscout/Provider/granite.py +261 -261
- webscout/Provider/hermes.py +263 -263
- webscout/Provider/julius.py +223 -223
- webscout/Provider/learnfastai.py +309 -309
- webscout/Provider/llama3mitril.py +214 -214
- webscout/Provider/llmchat.py +243 -243
- webscout/Provider/llmchatco.py +290 -290
- webscout/Provider/meta.py +801 -801
- webscout/Provider/oivscode.py +309 -309
- webscout/Provider/scira_chat.py +383 -383
- webscout/Provider/searchchat.py +292 -292
- webscout/Provider/sonus.py +258 -258
- webscout/Provider/toolbaz.py +370 -367
- webscout/Provider/turboseek.py +273 -273
- webscout/Provider/typefully.py +207 -207
- webscout/Provider/yep.py +372 -372
- webscout/__init__.py +27 -31
- webscout/__main__.py +5 -5
- webscout/auth/api_key_manager.py +189 -189
- webscout/auth/config.py +175 -175
- webscout/auth/models.py +185 -185
- webscout/auth/routes.py +663 -664
- webscout/auth/simple_logger.py +236 -236
- webscout/cli.py +523 -523
- webscout/conversation.py +438 -438
- webscout/exceptions.py +361 -361
- webscout/litagent/Readme.md +298 -298
- webscout/litagent/__init__.py +28 -28
- webscout/litagent/agent.py +581 -581
- webscout/litagent/constants.py +59 -59
- webscout/litprinter/__init__.py +58 -58
- webscout/models.py +181 -181
- webscout/optimizers.py +419 -419
- webscout/prompt_manager.py +288 -288
- webscout/sanitize.py +1078 -1078
- webscout/scout/README.md +401 -401
- webscout/scout/__init__.py +8 -8
- webscout/scout/core/__init__.py +6 -6
- webscout/scout/core/crawler.py +297 -297
- webscout/scout/core/scout.py +706 -706
- webscout/scout/core/search_result.py +95 -95
- webscout/scout/core/text_analyzer.py +62 -62
- webscout/scout/core/text_utils.py +277 -277
- webscout/scout/core/web_analyzer.py +51 -51
- webscout/scout/element.py +599 -599
- webscout/scout/parsers/__init__.py +69 -69
- webscout/scout/parsers/html5lib_parser.py +172 -172
- webscout/scout/parsers/html_parser.py +236 -236
- webscout/scout/parsers/lxml_parser.py +178 -178
- webscout/scout/utils.py +37 -37
- webscout/search/__init__.py +51 -0
- webscout/search/base.py +195 -0
- webscout/search/duckduckgo_main.py +54 -0
- webscout/search/engines/__init__.py +48 -0
- webscout/search/engines/bing.py +84 -0
- webscout/search/engines/bing_news.py +52 -0
- webscout/search/engines/brave.py +43 -0
- webscout/search/engines/duckduckgo/__init__.py +25 -0
- webscout/search/engines/duckduckgo/answers.py +78 -0
- webscout/search/engines/duckduckgo/base.py +187 -0
- webscout/search/engines/duckduckgo/images.py +97 -0
- webscout/search/engines/duckduckgo/maps.py +168 -0
- webscout/search/engines/duckduckgo/news.py +68 -0
- webscout/search/engines/duckduckgo/suggestions.py +21 -0
- webscout/search/engines/duckduckgo/text.py +211 -0
- webscout/search/engines/duckduckgo/translate.py +47 -0
- webscout/search/engines/duckduckgo/videos.py +63 -0
- webscout/search/engines/duckduckgo/weather.py +74 -0
- webscout/search/engines/mojeek.py +37 -0
- webscout/search/engines/wikipedia.py +56 -0
- webscout/search/engines/yahoo.py +65 -0
- webscout/search/engines/yahoo_news.py +64 -0
- webscout/search/engines/yandex.py +43 -0
- webscout/search/engines/yep/__init__.py +13 -0
- webscout/search/engines/yep/base.py +32 -0
- webscout/search/engines/yep/images.py +99 -0
- webscout/search/engines/yep/suggestions.py +35 -0
- webscout/search/engines/yep/text.py +114 -0
- webscout/search/http_client.py +156 -0
- webscout/search/results.py +137 -0
- webscout/search/yep_main.py +44 -0
- webscout/swiftcli/Readme.md +323 -323
- webscout/swiftcli/__init__.py +95 -95
- webscout/swiftcli/core/__init__.py +7 -7
- webscout/swiftcli/core/cli.py +308 -308
- webscout/swiftcli/core/context.py +104 -104
- webscout/swiftcli/core/group.py +241 -241
- webscout/swiftcli/decorators/__init__.py +28 -28
- webscout/swiftcli/decorators/command.py +221 -221
- webscout/swiftcli/decorators/options.py +220 -220
- webscout/swiftcli/decorators/output.py +302 -302
- webscout/swiftcli/exceptions.py +21 -21
- webscout/swiftcli/plugins/__init__.py +9 -9
- webscout/swiftcli/plugins/base.py +135 -135
- webscout/swiftcli/plugins/manager.py +269 -269
- webscout/swiftcli/utils/__init__.py +59 -59
- webscout/swiftcli/utils/formatting.py +252 -252
- webscout/swiftcli/utils/parsing.py +267 -267
- webscout/update_checker.py +117 -117
- webscout/version.py +1 -1
- webscout/version.py.bak +2 -0
- webscout/zeroart/README.md +89 -89
- webscout/zeroart/__init__.py +134 -134
- webscout/zeroart/base.py +66 -66
- webscout/zeroart/effects.py +100 -100
- webscout/zeroart/fonts.py +1238 -1238
- {webscout-8.3.7.dist-info → webscout-2025.10.13.dist-info}/METADATA +936 -937
- webscout-2025.10.13.dist-info/RECORD +329 -0
- webscout/Provider/AISEARCH/DeepFind.py +0 -254
- webscout/Provider/OPENAI/Qwen3.py +0 -303
- webscout/Provider/OPENAI/qodo.py +0 -630
- webscout/Provider/OPENAI/xenai.py +0 -514
- webscout/tempid.py +0 -134
- webscout/webscout_search.py +0 -1183
- webscout/webscout_search_async.py +0 -649
- webscout/yep_search.py +0 -346
- webscout-8.3.7.dist-info/RECORD +0 -301
- {webscout-8.3.7.dist-info → webscout-2025.10.13.dist-info}/WHEEL +0 -0
- {webscout-8.3.7.dist-info → webscout-2025.10.13.dist-info}/entry_points.txt +0 -0
- {webscout-8.3.7.dist-info → webscout-2025.10.13.dist-info}/licenses/LICENSE.md +0 -0
- {webscout-8.3.7.dist-info → webscout-2025.10.13.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
"""Mojeek search engine implementation."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from collections.abc import Mapping
|
|
6
|
+
from typing import Any
|
|
7
|
+
|
|
8
|
+
from ..base import BaseSearchEngine
|
|
9
|
+
from ..results import TextResult
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class Mojeek(BaseSearchEngine[TextResult]):
|
|
13
|
+
"""Mojeek search engine."""
|
|
14
|
+
|
|
15
|
+
name = "mojeek"
|
|
16
|
+
category = "text"
|
|
17
|
+
provider = "mojeek"
|
|
18
|
+
|
|
19
|
+
search_url = "https://www.mojeek.com/search"
|
|
20
|
+
search_method = "GET"
|
|
21
|
+
|
|
22
|
+
items_xpath = "//ul[contains(@class, 'results')]/li"
|
|
23
|
+
elements_xpath: Mapping[str, str] = {
|
|
24
|
+
"title": ".//h2//text()",
|
|
25
|
+
"href": ".//h2/a/@href",
|
|
26
|
+
"body": ".//p[@class='s']//text()",
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
def build_payload(
|
|
30
|
+
self, query: str, region: str, safesearch: str, timelimit: str | None, page: int = 1, **kwargs: Any
|
|
31
|
+
) -> dict[str, Any]:
|
|
32
|
+
"""Build a payload for the search request."""
|
|
33
|
+
safesearch_base = {"on": "1", "moderate": "0", "off": "0"}
|
|
34
|
+
payload = {"q": query, "safe": safesearch_base[safesearch.lower()]}
|
|
35
|
+
if page > 1:
|
|
36
|
+
payload["s"] = f"{(page - 1) * 10 + 1}"
|
|
37
|
+
return payload
|
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
"""Wikipedia text search engine."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import logging
|
|
6
|
+
from typing import Any
|
|
7
|
+
from urllib.parse import quote
|
|
8
|
+
|
|
9
|
+
from ..base import BaseSearchEngine
|
|
10
|
+
from ..results import TextResult
|
|
11
|
+
from ...utils import json_loads
|
|
12
|
+
|
|
13
|
+
logger = logging.getLogger(__name__)
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class Wikipedia(BaseSearchEngine[TextResult]):
|
|
17
|
+
"""Wikipedia text search engine."""
|
|
18
|
+
|
|
19
|
+
name = "wikipedia"
|
|
20
|
+
category = "text"
|
|
21
|
+
provider = "wikipedia"
|
|
22
|
+
priority = 2
|
|
23
|
+
|
|
24
|
+
search_url = "https://{lang}.wikipedia.org/w/api.php?action=opensearch&search={query}"
|
|
25
|
+
search_method = "GET"
|
|
26
|
+
|
|
27
|
+
def build_payload(
|
|
28
|
+
self, query: str, region: str, safesearch: str, timelimit: str | None, page: int = 1, **kwargs: Any
|
|
29
|
+
) -> dict[str, Any]:
|
|
30
|
+
"""Build a payload for the search request."""
|
|
31
|
+
_country, lang = region.lower().split("-")
|
|
32
|
+
encoded_query = quote(query)
|
|
33
|
+
self.search_url = (
|
|
34
|
+
f"https://{lang}.wikipedia.org/w/api.php?action=opensearch&profile=fuzzy&limit=1&search={encoded_query}"
|
|
35
|
+
)
|
|
36
|
+
payload: dict[str, Any] = {}
|
|
37
|
+
self.lang = lang # used in extract_results
|
|
38
|
+
return payload
|
|
39
|
+
|
|
40
|
+
def extract_results(self, html_text: str) -> list[TextResult]:
|
|
41
|
+
"""Extract search results from html text."""
|
|
42
|
+
json_data = json_loads(html_text)
|
|
43
|
+
if not json_data or len(json_data) < 4:
|
|
44
|
+
return []
|
|
45
|
+
|
|
46
|
+
results = []
|
|
47
|
+
titles, descriptions, urls = json_data[1], json_data[2], json_data[3]
|
|
48
|
+
|
|
49
|
+
for title, description, url in zip(titles, descriptions, urls):
|
|
50
|
+
result = TextResult()
|
|
51
|
+
result.title = title
|
|
52
|
+
result.body = description
|
|
53
|
+
result.href = url
|
|
54
|
+
results.append(result)
|
|
55
|
+
|
|
56
|
+
return results
|
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
"""Yahoo search engine."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from collections.abc import Mapping
|
|
6
|
+
from secrets import token_urlsafe
|
|
7
|
+
from typing import Any
|
|
8
|
+
from urllib.parse import unquote_plus
|
|
9
|
+
|
|
10
|
+
from ..base import BaseSearchEngine
|
|
11
|
+
from ..results import TextResult
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def extract_url(u: str) -> str:
|
|
15
|
+
"""Sanitize url."""
|
|
16
|
+
if "/RU=" in u:
|
|
17
|
+
start = u.find("/RU=") + 4
|
|
18
|
+
end = u.find("/RK=", start)
|
|
19
|
+
if end == -1:
|
|
20
|
+
end = len(u)
|
|
21
|
+
return unquote_plus(u[start:end])
|
|
22
|
+
return u
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class Yahoo(BaseSearchEngine[TextResult]):
|
|
26
|
+
"""Yahoo search engine."""
|
|
27
|
+
|
|
28
|
+
name = "yahoo"
|
|
29
|
+
category = "text"
|
|
30
|
+
provider = "bing"
|
|
31
|
+
|
|
32
|
+
search_url = "https://search.yahoo.com/search"
|
|
33
|
+
search_method = "GET"
|
|
34
|
+
|
|
35
|
+
items_xpath = "//div[contains(@class, 'relsrch')]"
|
|
36
|
+
elements_xpath: Mapping[str, str] = {
|
|
37
|
+
"title": ".//div[contains(@class, 'Title')]//h3//text()",
|
|
38
|
+
"href": ".//div[contains(@class, 'Title')]//a/@href",
|
|
39
|
+
"body": ".//div[contains(@class, 'Text')]//text()",
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
def build_payload(
|
|
43
|
+
self, query: str, region: str, safesearch: str, timelimit: str | None, page: int = 1, **kwargs: Any
|
|
44
|
+
) -> dict[str, Any]:
|
|
45
|
+
"""Build a payload for the search request."""
|
|
46
|
+
self.search_url = (
|
|
47
|
+
f"https://search.yahoo.com/search;_ylt={token_urlsafe(24 * 3 // 4)};_ylu={token_urlsafe(47 * 3 // 4)}"
|
|
48
|
+
)
|
|
49
|
+
payload = {"p": query}
|
|
50
|
+
if page > 1:
|
|
51
|
+
payload["b"] = f"{(page - 1) * 7 + 1}"
|
|
52
|
+
if timelimit:
|
|
53
|
+
payload["btf"] = timelimit
|
|
54
|
+
return payload
|
|
55
|
+
|
|
56
|
+
def post_extract_results(self, results: list[TextResult]) -> list[TextResult]:
|
|
57
|
+
"""Post-process search results."""
|
|
58
|
+
post_results = []
|
|
59
|
+
for result in results:
|
|
60
|
+
if result.href.startswith("https://www.bing.com/aclick?"):
|
|
61
|
+
continue
|
|
62
|
+
if "/RU=" in result.href:
|
|
63
|
+
result.href = extract_url(result.href)
|
|
64
|
+
post_results.append(result)
|
|
65
|
+
return post_results
|
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
"""Yahoo news search engine."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from collections.abc import Mapping
|
|
6
|
+
from secrets import token_urlsafe
|
|
7
|
+
from typing import Any
|
|
8
|
+
|
|
9
|
+
from ..base import BaseSearchEngine
|
|
10
|
+
from ..results import NewsResult
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def extract_image(u: str) -> str:
|
|
14
|
+
"""Sanitize image url."""
|
|
15
|
+
if u and u.startswith("data:image"):
|
|
16
|
+
return ""
|
|
17
|
+
return u
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def extract_source(s: str) -> str:
|
|
21
|
+
"""Remove ' via Yahoo' from string."""
|
|
22
|
+
return s.replace(" via Yahoo", "") if s else s
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class YahooNews(BaseSearchEngine[NewsResult]):
|
|
26
|
+
"""Yahoo news search engine."""
|
|
27
|
+
|
|
28
|
+
name = "yahoo"
|
|
29
|
+
category = "news"
|
|
30
|
+
provider = "bing"
|
|
31
|
+
|
|
32
|
+
search_url = "https://news.search.yahoo.com/search"
|
|
33
|
+
search_method = "GET"
|
|
34
|
+
|
|
35
|
+
items_xpath = "//div[contains(@class, 'NewsArticle')]"
|
|
36
|
+
elements_xpath: Mapping[str, str] = {
|
|
37
|
+
"date": ".//span[contains(@class, 'fc-2nd')]//text()",
|
|
38
|
+
"title": ".//h4//a//text()",
|
|
39
|
+
"url": ".//h4//a/@href",
|
|
40
|
+
"body": ".//p//text()",
|
|
41
|
+
"image": ".//img/@src",
|
|
42
|
+
"source": ".//span[contains(@class, 's-source')]//text()",
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
def build_payload(
|
|
46
|
+
self, query: str, region: str, safesearch: str, timelimit: str | None, page: int = 1, **kwargs: Any
|
|
47
|
+
) -> dict[str, Any]:
|
|
48
|
+
"""Build a payload for the search request."""
|
|
49
|
+
self.search_url = (
|
|
50
|
+
f"https://news.search.yahoo.com/search;_ylt={token_urlsafe(24 * 3 // 4)};_ylu={token_urlsafe(47 * 3 // 4)}"
|
|
51
|
+
)
|
|
52
|
+
payload = {"p": query}
|
|
53
|
+
if page > 1:
|
|
54
|
+
payload["b"] = f"{(page - 1) * 10 + 1}"
|
|
55
|
+
if timelimit:
|
|
56
|
+
payload["btf"] = timelimit
|
|
57
|
+
return payload
|
|
58
|
+
|
|
59
|
+
def post_extract_results(self, results: list[NewsResult]) -> list[NewsResult]:
|
|
60
|
+
"""Post-process search results."""
|
|
61
|
+
for result in results:
|
|
62
|
+
result.image = extract_image(result.image)
|
|
63
|
+
result.source = extract_source(result.source)
|
|
64
|
+
return results
|
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
"""Yandex search engine."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from collections.abc import Mapping
|
|
6
|
+
from random import SystemRandom
|
|
7
|
+
from typing import Any
|
|
8
|
+
|
|
9
|
+
from ..base import BaseSearchEngine
|
|
10
|
+
from ..results import TextResult
|
|
11
|
+
|
|
12
|
+
random = SystemRandom()
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class Yandex(BaseSearchEngine[TextResult]):
|
|
16
|
+
"""Yandex search engine."""
|
|
17
|
+
|
|
18
|
+
name = "yandex"
|
|
19
|
+
category = "text"
|
|
20
|
+
provider = "yandex"
|
|
21
|
+
|
|
22
|
+
search_url = "https://yandex.com/search/"
|
|
23
|
+
search_method = "GET"
|
|
24
|
+
|
|
25
|
+
items_xpath = "//li[contains(@class, 'serp-item')]"
|
|
26
|
+
elements_xpath: Mapping[str, str] = {
|
|
27
|
+
"title": ".//h2//text()",
|
|
28
|
+
"href": ".//h2/a/@href",
|
|
29
|
+
"body": ".//div[contains(@class, 'text-container')]//text()",
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
def build_payload(
|
|
33
|
+
self, query: str, region: str, safesearch: str, timelimit: str | None, page: int = 1, **kwargs: Any
|
|
34
|
+
) -> dict[str, Any]:
|
|
35
|
+
"""Build a payload for the search request."""
|
|
36
|
+
safesearch_base = {"on": "1", "moderate": "0", "off": "0"}
|
|
37
|
+
payload = {
|
|
38
|
+
"text": query,
|
|
39
|
+
"family": safesearch_base[safesearch.lower()],
|
|
40
|
+
}
|
|
41
|
+
if page > 1:
|
|
42
|
+
payload["p"] = str(page - 1)
|
|
43
|
+
return payload
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
"""Yep search engines package."""
|
|
2
|
+
|
|
3
|
+
from .base import YepBase
|
|
4
|
+
from .images import YepImages
|
|
5
|
+
from .suggestions import YepSuggestions
|
|
6
|
+
from .text import YepSearch as YepTextSearch
|
|
7
|
+
|
|
8
|
+
__all__ = [
|
|
9
|
+
"YepBase",
|
|
10
|
+
"YepTextSearch",
|
|
11
|
+
"YepImages",
|
|
12
|
+
"YepSuggestions",
|
|
13
|
+
]
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from ....litagent import LitAgent
|
|
4
|
+
from curl_cffi.requests import Session
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class YepBase:
|
|
8
|
+
"""Base class for Yep search engines."""
|
|
9
|
+
|
|
10
|
+
def __init__(
|
|
11
|
+
self,
|
|
12
|
+
timeout: int = 20,
|
|
13
|
+
proxies: dict[str, str] | None = None,
|
|
14
|
+
verify: bool = True,
|
|
15
|
+
impersonate: str = "chrome110",
|
|
16
|
+
):
|
|
17
|
+
self.base_url = "https://api.yep.com/fs/2/search"
|
|
18
|
+
self.timeout = timeout
|
|
19
|
+
self.session = Session(
|
|
20
|
+
proxies=proxies,
|
|
21
|
+
verify=verify,
|
|
22
|
+
impersonate=impersonate,
|
|
23
|
+
timeout=timeout,
|
|
24
|
+
)
|
|
25
|
+
self.session.headers.update(
|
|
26
|
+
{
|
|
27
|
+
**LitAgent().generate_fingerprint(),
|
|
28
|
+
"Origin": "https://yep.com",
|
|
29
|
+
"Referer": "https://yep.com/",
|
|
30
|
+
}
|
|
31
|
+
)
|
|
32
|
+
|
|
@@ -0,0 +1,99 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import Dict, List, Optional
|
|
4
|
+
from urllib.parse import urlencode
|
|
5
|
+
|
|
6
|
+
from .base import YepBase
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class YepImages(YepBase):
|
|
10
|
+
def run(self, *args, **kwargs) -> List[Dict[str, str]]:
|
|
11
|
+
keywords = args[0] if args else kwargs.get("keywords")
|
|
12
|
+
region = args[1] if len(args) > 1 else kwargs.get("region", "all")
|
|
13
|
+
safesearch = args[2] if len(args) > 2 else kwargs.get("safesearch", "moderate")
|
|
14
|
+
max_results = args[3] if len(args) > 3 else kwargs.get("max_results")
|
|
15
|
+
|
|
16
|
+
safe_search_map = {
|
|
17
|
+
"on": "on",
|
|
18
|
+
"moderate": "moderate",
|
|
19
|
+
"off": "off"
|
|
20
|
+
}
|
|
21
|
+
safe_setting = safe_search_map.get(safesearch.lower(), "moderate")
|
|
22
|
+
|
|
23
|
+
params = {
|
|
24
|
+
"client": "web",
|
|
25
|
+
"gl": region,
|
|
26
|
+
"limit": str(max_results) if max_results else "10",
|
|
27
|
+
"no_correct": "false",
|
|
28
|
+
"q": keywords,
|
|
29
|
+
"safeSearch": safe_setting,
|
|
30
|
+
"type": "images"
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
url = f"{self.base_url}?{urlencode(params)}"
|
|
34
|
+
try:
|
|
35
|
+
response = self.session.get(url)
|
|
36
|
+
response.raise_for_status()
|
|
37
|
+
raw_results = response.json()
|
|
38
|
+
|
|
39
|
+
if not raw_results or len(raw_results) < 2:
|
|
40
|
+
return []
|
|
41
|
+
|
|
42
|
+
formatted_results = []
|
|
43
|
+
results = raw_results[1].get('results', [])
|
|
44
|
+
|
|
45
|
+
for result in results:
|
|
46
|
+
if result.get("type") != "Image":
|
|
47
|
+
continue
|
|
48
|
+
|
|
49
|
+
formatted_result = {
|
|
50
|
+
"title": self._remove_html_tags(result.get("title", "")),
|
|
51
|
+
"image": result.get("image_id", ""),
|
|
52
|
+
"thumbnail": result.get("src", ""),
|
|
53
|
+
"url": result.get("host_page", ""),
|
|
54
|
+
"height": result.get("height", 0),
|
|
55
|
+
"width": result.get("width", 0),
|
|
56
|
+
"source": result.get("visual_url", "")
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
if "srcset" in result:
|
|
60
|
+
formatted_result["thumbnail_hd"] = result["srcset"].split(",")[1].strip().split(" ")[0]
|
|
61
|
+
|
|
62
|
+
formatted_results.append(formatted_result)
|
|
63
|
+
|
|
64
|
+
if max_results:
|
|
65
|
+
return formatted_results[:max_results]
|
|
66
|
+
return formatted_results
|
|
67
|
+
|
|
68
|
+
except Exception as e:
|
|
69
|
+
if hasattr(e, 'response') and e.response is not None:
|
|
70
|
+
raise Exception(f"Yep image search failed with status {e.response.status_code}: {str(e)}")
|
|
71
|
+
else:
|
|
72
|
+
raise Exception(f"Yep image search failed: {str(e)}")
|
|
73
|
+
|
|
74
|
+
def _remove_html_tags(self, text: str) -> str:
|
|
75
|
+
result = ""
|
|
76
|
+
in_tag = False
|
|
77
|
+
|
|
78
|
+
for char in text:
|
|
79
|
+
if char == '<':
|
|
80
|
+
in_tag = True
|
|
81
|
+
elif char == '>':
|
|
82
|
+
in_tag = False
|
|
83
|
+
elif not in_tag:
|
|
84
|
+
result += char
|
|
85
|
+
|
|
86
|
+
replacements = {
|
|
87
|
+
' ': ' ',
|
|
88
|
+
'&': '&',
|
|
89
|
+
'<': '<',
|
|
90
|
+
'>': '>',
|
|
91
|
+
'"': '"',
|
|
92
|
+
''': "'",
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
for entity, replacement in replacements.items():
|
|
96
|
+
result = result.replace(entity, replacement)
|
|
97
|
+
|
|
98
|
+
return result.strip()
|
|
99
|
+
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import List
|
|
4
|
+
from urllib.parse import urlencode
|
|
5
|
+
|
|
6
|
+
from .base import YepBase
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class YepSuggestions(YepBase):
|
|
10
|
+
def run(self, *args, **kwargs) -> List[str]:
|
|
11
|
+
keywords = args[0] if args else kwargs.get("keywords")
|
|
12
|
+
region = args[1] if len(args) > 1 else kwargs.get("region", "all")
|
|
13
|
+
|
|
14
|
+
params = {
|
|
15
|
+
"query": keywords,
|
|
16
|
+
"type": "web",
|
|
17
|
+
"gl": region
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
url = f"https://api.yep.com/ac/?{urlencode(params)}"
|
|
21
|
+
|
|
22
|
+
try:
|
|
23
|
+
response = self.session.get(url)
|
|
24
|
+
response.raise_for_status()
|
|
25
|
+
data = response.json()
|
|
26
|
+
if isinstance(data, list) and len(data) > 1 and isinstance(data[1], list):
|
|
27
|
+
return data[1]
|
|
28
|
+
return []
|
|
29
|
+
|
|
30
|
+
except Exception as e:
|
|
31
|
+
if hasattr(e, 'response') and e.response is not None:
|
|
32
|
+
raise Exception(f"Yep suggestions failed with status {e.response.status_code}: {str(e)}")
|
|
33
|
+
else:
|
|
34
|
+
raise Exception(f"Yep suggestions failed: {str(e)}")
|
|
35
|
+
|
|
@@ -0,0 +1,114 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import Dict, List, Optional
|
|
4
|
+
from urllib.parse import urlencode
|
|
5
|
+
|
|
6
|
+
from .base import YepBase
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class YepSearch(YepBase):
|
|
10
|
+
def run(self, *args, **kwargs) -> List[Dict[str, str]]:
|
|
11
|
+
keywords = args[0] if args else kwargs.get("keywords")
|
|
12
|
+
region = args[1] if len(args) > 1 else kwargs.get("region", "all")
|
|
13
|
+
safesearch = args[2] if len(args) > 2 else kwargs.get("safesearch", "moderate")
|
|
14
|
+
max_results = args[3] if len(args) > 3 else kwargs.get("max_results")
|
|
15
|
+
|
|
16
|
+
safe_search_map = {
|
|
17
|
+
"on": "on",
|
|
18
|
+
"moderate": "moderate",
|
|
19
|
+
"off": "off"
|
|
20
|
+
}
|
|
21
|
+
safe_setting = safe_search_map.get(safesearch.lower(), "moderate")
|
|
22
|
+
|
|
23
|
+
params = {
|
|
24
|
+
"client": "web",
|
|
25
|
+
"gl": region,
|
|
26
|
+
"limit": str(max_results) if max_results else "10",
|
|
27
|
+
"no_correct": "false",
|
|
28
|
+
"q": keywords,
|
|
29
|
+
"safeSearch": safe_setting,
|
|
30
|
+
"type": "web"
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
url = f"{self.base_url}?{urlencode(params)}"
|
|
34
|
+
try:
|
|
35
|
+
response = self.session.get(url)
|
|
36
|
+
response.raise_for_status()
|
|
37
|
+
raw_results = response.json()
|
|
38
|
+
|
|
39
|
+
formatted_results = self.format_results(raw_results)
|
|
40
|
+
|
|
41
|
+
if max_results:
|
|
42
|
+
return formatted_results[:max_results]
|
|
43
|
+
return formatted_results
|
|
44
|
+
except Exception as e:
|
|
45
|
+
if hasattr(e, 'response') and e.response is not None:
|
|
46
|
+
raise Exception(f"Yep search failed with status {e.response.status_code}: {str(e)}")
|
|
47
|
+
else:
|
|
48
|
+
raise Exception(f"Yep search failed: {str(e)}")
|
|
49
|
+
|
|
50
|
+
def format_results(self, raw_results: dict) -> List[Dict]:
|
|
51
|
+
formatted_results = []
|
|
52
|
+
|
|
53
|
+
if not raw_results or len(raw_results) < 2:
|
|
54
|
+
return formatted_results
|
|
55
|
+
|
|
56
|
+
results = raw_results[1].get('results', [])
|
|
57
|
+
|
|
58
|
+
for result in results:
|
|
59
|
+
formatted_result = {
|
|
60
|
+
"title": self._remove_html_tags(result.get("title", "")),
|
|
61
|
+
"href": result.get("url", ""),
|
|
62
|
+
"body": self._remove_html_tags(result.get("snippet", "")),
|
|
63
|
+
"source": result.get("visual_url", ""),
|
|
64
|
+
"position": len(formatted_results) + 1,
|
|
65
|
+
"type": result.get("type", "organic"),
|
|
66
|
+
"first_seen": result.get("first_seen", None)
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
if "sitelinks" in result:
|
|
70
|
+
sitelinks = []
|
|
71
|
+
if "full" in result["sitelinks"]:
|
|
72
|
+
sitelinks.extend(result["sitelinks"]["full"])
|
|
73
|
+
if "short" in result["sitelinks"]:
|
|
74
|
+
sitelinks.extend(result["sitelinks"]["short"])
|
|
75
|
+
|
|
76
|
+
if sitelinks:
|
|
77
|
+
formatted_result["sitelinks"] = [
|
|
78
|
+
{
|
|
79
|
+
"title": self._remove_html_tags(link.get("title", "")),
|
|
80
|
+
"href": link.get("url", "")
|
|
81
|
+
}
|
|
82
|
+
for link in sitelinks
|
|
83
|
+
]
|
|
84
|
+
|
|
85
|
+
formatted_results.append(formatted_result)
|
|
86
|
+
|
|
87
|
+
return formatted_results
|
|
88
|
+
|
|
89
|
+
def _remove_html_tags(self, text: str) -> str:
|
|
90
|
+
result = ""
|
|
91
|
+
in_tag = False
|
|
92
|
+
|
|
93
|
+
for char in text:
|
|
94
|
+
if char == '<':
|
|
95
|
+
in_tag = True
|
|
96
|
+
elif char == '>':
|
|
97
|
+
in_tag = False
|
|
98
|
+
elif not in_tag:
|
|
99
|
+
result += char
|
|
100
|
+
|
|
101
|
+
replacements = {
|
|
102
|
+
' ': ' ',
|
|
103
|
+
'&': '&',
|
|
104
|
+
'<': '<',
|
|
105
|
+
'>': '>',
|
|
106
|
+
'"': '"',
|
|
107
|
+
''': "'",
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
for entity, replacement in replacements.items():
|
|
111
|
+
result = result.replace(entity, replacement)
|
|
112
|
+
|
|
113
|
+
return result.strip()
|
|
114
|
+
|