webscout 8.2.9__py3-none-any.whl → 2026.1.19__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- webscout/AIauto.py +524 -251
- webscout/AIbase.py +247 -319
- webscout/AIutel.py +68 -703
- webscout/Bard.py +1072 -1026
- webscout/Extra/GitToolkit/__init__.py +10 -10
- webscout/Extra/GitToolkit/gitapi/__init__.py +20 -12
- webscout/Extra/GitToolkit/gitapi/gist.py +142 -0
- webscout/Extra/GitToolkit/gitapi/organization.py +91 -0
- webscout/Extra/GitToolkit/gitapi/repository.py +308 -195
- webscout/Extra/GitToolkit/gitapi/search.py +162 -0
- webscout/Extra/GitToolkit/gitapi/trending.py +236 -0
- webscout/Extra/GitToolkit/gitapi/user.py +128 -96
- webscout/Extra/GitToolkit/gitapi/utils.py +82 -62
- webscout/Extra/YTToolkit/README.md +443 -375
- webscout/Extra/YTToolkit/YTdownloader.py +953 -957
- webscout/Extra/YTToolkit/__init__.py +3 -3
- webscout/Extra/YTToolkit/transcriber.py +595 -476
- webscout/Extra/YTToolkit/ytapi/README.md +230 -44
- webscout/Extra/YTToolkit/ytapi/__init__.py +22 -6
- webscout/Extra/YTToolkit/ytapi/captions.py +190 -0
- webscout/Extra/YTToolkit/ytapi/channel.py +302 -307
- webscout/Extra/YTToolkit/ytapi/errors.py +13 -13
- webscout/Extra/YTToolkit/ytapi/extras.py +178 -118
- webscout/Extra/YTToolkit/ytapi/hashtag.py +120 -0
- webscout/Extra/YTToolkit/ytapi/https.py +89 -88
- webscout/Extra/YTToolkit/ytapi/patterns.py +61 -61
- webscout/Extra/YTToolkit/ytapi/playlist.py +59 -59
- webscout/Extra/YTToolkit/ytapi/pool.py +8 -8
- webscout/Extra/YTToolkit/ytapi/query.py +143 -40
- webscout/Extra/YTToolkit/ytapi/shorts.py +122 -0
- webscout/Extra/YTToolkit/ytapi/stream.py +68 -63
- webscout/Extra/YTToolkit/ytapi/suggestions.py +97 -0
- webscout/Extra/YTToolkit/ytapi/utils.py +66 -62
- webscout/Extra/YTToolkit/ytapi/video.py +403 -232
- webscout/Extra/__init__.py +2 -3
- webscout/Extra/gguf.py +1298 -684
- webscout/Extra/tempmail/README.md +487 -487
- webscout/Extra/tempmail/__init__.py +28 -28
- webscout/Extra/tempmail/async_utils.py +143 -141
- webscout/Extra/tempmail/base.py +172 -161
- webscout/Extra/tempmail/cli.py +191 -187
- webscout/Extra/tempmail/emailnator.py +88 -84
- webscout/Extra/tempmail/mail_tm.py +378 -361
- webscout/Extra/tempmail/temp_mail_io.py +304 -292
- webscout/Extra/weather.py +196 -194
- webscout/Extra/weather_ascii.py +17 -15
- webscout/Provider/AISEARCH/PERPLEXED_search.py +175 -0
- webscout/Provider/AISEARCH/Perplexity.py +292 -333
- webscout/Provider/AISEARCH/README.md +106 -279
- webscout/Provider/AISEARCH/__init__.py +16 -9
- webscout/Provider/AISEARCH/brave_search.py +298 -0
- webscout/Provider/AISEARCH/iask_search.py +357 -410
- webscout/Provider/AISEARCH/monica_search.py +200 -220
- webscout/Provider/AISEARCH/webpilotai_search.py +242 -255
- webscout/Provider/Algion.py +413 -0
- webscout/Provider/Andi.py +74 -69
- webscout/Provider/Apriel.py +313 -0
- webscout/Provider/Ayle.py +323 -0
- webscout/Provider/ChatSandbox.py +329 -342
- webscout/Provider/ClaudeOnline.py +365 -0
- webscout/Provider/Cohere.py +232 -208
- webscout/Provider/DeepAI.py +367 -0
- webscout/Provider/Deepinfra.py +467 -340
- webscout/Provider/EssentialAI.py +217 -0
- webscout/Provider/ExaAI.py +274 -261
- webscout/Provider/Gemini.py +175 -169
- webscout/Provider/GithubChat.py +385 -369
- webscout/Provider/Gradient.py +286 -0
- webscout/Provider/Groq.py +556 -801
- webscout/Provider/HadadXYZ.py +323 -0
- webscout/Provider/HeckAI.py +392 -375
- webscout/Provider/HuggingFace.py +387 -0
- webscout/Provider/IBM.py +340 -0
- webscout/Provider/Jadve.py +317 -291
- webscout/Provider/K2Think.py +306 -0
- webscout/Provider/Koboldai.py +221 -384
- webscout/Provider/Netwrck.py +273 -270
- webscout/Provider/Nvidia.py +310 -0
- webscout/Provider/OPENAI/DeepAI.py +489 -0
- webscout/Provider/OPENAI/K2Think.py +423 -0
- webscout/Provider/OPENAI/PI.py +463 -0
- webscout/Provider/OPENAI/README.md +890 -952
- webscout/Provider/OPENAI/TogetherAI.py +405 -0
- webscout/Provider/OPENAI/TwoAI.py +255 -357
- webscout/Provider/OPENAI/__init__.py +148 -40
- webscout/Provider/OPENAI/ai4chat.py +348 -293
- webscout/Provider/OPENAI/akashgpt.py +436 -0
- webscout/Provider/OPENAI/algion.py +303 -0
- webscout/Provider/OPENAI/{exachat.py → ayle.py} +365 -444
- webscout/Provider/OPENAI/base.py +253 -249
- webscout/Provider/OPENAI/cerebras.py +296 -0
- webscout/Provider/OPENAI/chatgpt.py +870 -556
- webscout/Provider/OPENAI/chatsandbox.py +233 -173
- webscout/Provider/OPENAI/deepinfra.py +403 -322
- webscout/Provider/OPENAI/e2b.py +2370 -1414
- webscout/Provider/OPENAI/elmo.py +278 -0
- webscout/Provider/OPENAI/exaai.py +452 -417
- webscout/Provider/OPENAI/freeassist.py +446 -0
- webscout/Provider/OPENAI/gradient.py +448 -0
- webscout/Provider/OPENAI/groq.py +380 -364
- webscout/Provider/OPENAI/hadadxyz.py +292 -0
- webscout/Provider/OPENAI/heckai.py +333 -308
- webscout/Provider/OPENAI/huggingface.py +321 -0
- webscout/Provider/OPENAI/ibm.py +425 -0
- webscout/Provider/OPENAI/llmchat.py +253 -0
- webscout/Provider/OPENAI/llmchatco.py +378 -335
- webscout/Provider/OPENAI/meta.py +541 -0
- webscout/Provider/OPENAI/netwrck.py +374 -357
- webscout/Provider/OPENAI/nvidia.py +317 -0
- webscout/Provider/OPENAI/oivscode.py +348 -287
- webscout/Provider/OPENAI/openrouter.py +328 -0
- webscout/Provider/OPENAI/pydantic_imports.py +1 -172
- webscout/Provider/OPENAI/sambanova.py +397 -0
- webscout/Provider/OPENAI/sonus.py +305 -304
- webscout/Provider/OPENAI/textpollinations.py +370 -339
- webscout/Provider/OPENAI/toolbaz.py +375 -413
- webscout/Provider/OPENAI/typefully.py +419 -355
- webscout/Provider/OPENAI/typliai.py +279 -0
- webscout/Provider/OPENAI/utils.py +314 -318
- webscout/Provider/OPENAI/wisecat.py +359 -387
- webscout/Provider/OPENAI/writecream.py +185 -163
- webscout/Provider/OPENAI/x0gpt.py +462 -365
- webscout/Provider/OPENAI/zenmux.py +380 -0
- webscout/Provider/OpenRouter.py +386 -0
- webscout/Provider/Openai.py +337 -496
- webscout/Provider/PI.py +443 -429
- webscout/Provider/QwenLM.py +346 -254
- webscout/Provider/STT/__init__.py +28 -0
- webscout/Provider/STT/base.py +303 -0
- webscout/Provider/STT/elevenlabs.py +264 -0
- webscout/Provider/Sambanova.py +317 -0
- webscout/Provider/TTI/README.md +69 -82
- webscout/Provider/TTI/__init__.py +37 -7
- webscout/Provider/TTI/base.py +147 -64
- webscout/Provider/TTI/claudeonline.py +393 -0
- webscout/Provider/TTI/magicstudio.py +292 -201
- webscout/Provider/TTI/miragic.py +180 -0
- webscout/Provider/TTI/pollinations.py +331 -221
- webscout/Provider/TTI/together.py +334 -0
- webscout/Provider/TTI/utils.py +14 -11
- webscout/Provider/TTS/README.md +186 -192
- webscout/Provider/TTS/__init__.py +43 -10
- webscout/Provider/TTS/base.py +523 -159
- webscout/Provider/TTS/deepgram.py +286 -156
- webscout/Provider/TTS/elevenlabs.py +189 -111
- webscout/Provider/TTS/freetts.py +218 -0
- webscout/Provider/TTS/murfai.py +288 -113
- webscout/Provider/TTS/openai_fm.py +364 -129
- webscout/Provider/TTS/parler.py +203 -111
- webscout/Provider/TTS/qwen.py +334 -0
- webscout/Provider/TTS/sherpa.py +286 -0
- webscout/Provider/TTS/speechma.py +693 -580
- webscout/Provider/TTS/streamElements.py +275 -333
- webscout/Provider/TTS/utils.py +280 -280
- webscout/Provider/TextPollinationsAI.py +331 -308
- webscout/Provider/TogetherAI.py +450 -0
- webscout/Provider/TwoAI.py +309 -475
- webscout/Provider/TypliAI.py +311 -305
- webscout/Provider/UNFINISHED/ChatHub.py +219 -209
- webscout/Provider/{OPENAI/glider.py → UNFINISHED/ChutesAI.py} +331 -326
- webscout/Provider/{GizAI.py → UNFINISHED/GizAI.py} +300 -295
- webscout/Provider/{Marcus.py → UNFINISHED/Marcus.py} +218 -198
- webscout/Provider/UNFINISHED/Qodo.py +481 -0
- webscout/Provider/{MCPCore.py → UNFINISHED/XenAI.py} +330 -315
- webscout/Provider/UNFINISHED/Youchat.py +347 -330
- webscout/Provider/UNFINISHED/aihumanizer.py +41 -0
- webscout/Provider/UNFINISHED/grammerchecker.py +37 -0
- webscout/Provider/UNFINISHED/liner.py +342 -0
- webscout/Provider/UNFINISHED/liner_api_request.py +246 -263
- webscout/Provider/{samurai.py → UNFINISHED/samurai.py} +231 -224
- webscout/Provider/WiseCat.py +256 -233
- webscout/Provider/WrDoChat.py +390 -370
- webscout/Provider/__init__.py +115 -174
- webscout/Provider/ai4chat.py +181 -174
- webscout/Provider/akashgpt.py +330 -335
- webscout/Provider/cerebras.py +397 -290
- webscout/Provider/cleeai.py +236 -213
- webscout/Provider/elmo.py +291 -283
- webscout/Provider/geminiapi.py +343 -208
- webscout/Provider/julius.py +245 -223
- webscout/Provider/learnfastai.py +333 -325
- webscout/Provider/llama3mitril.py +230 -215
- webscout/Provider/llmchat.py +308 -258
- webscout/Provider/llmchatco.py +321 -306
- webscout/Provider/meta.py +996 -801
- webscout/Provider/oivscode.py +332 -309
- webscout/Provider/searchchat.py +316 -292
- webscout/Provider/sonus.py +264 -258
- webscout/Provider/toolbaz.py +359 -353
- webscout/Provider/turboseek.py +332 -266
- webscout/Provider/typefully.py +262 -202
- webscout/Provider/x0gpt.py +332 -299
- webscout/__init__.py +31 -39
- webscout/__main__.py +5 -5
- webscout/cli.py +585 -524
- webscout/client.py +1497 -70
- webscout/conversation.py +140 -436
- webscout/exceptions.py +383 -362
- webscout/litagent/__init__.py +29 -29
- webscout/litagent/agent.py +492 -455
- webscout/litagent/constants.py +60 -60
- webscout/models.py +505 -181
- webscout/optimizers.py +74 -420
- webscout/prompt_manager.py +376 -288
- webscout/sanitize.py +1514 -0
- webscout/scout/README.md +452 -404
- webscout/scout/__init__.py +8 -8
- webscout/scout/core/__init__.py +7 -7
- webscout/scout/core/crawler.py +330 -210
- webscout/scout/core/scout.py +800 -607
- webscout/scout/core/search_result.py +51 -96
- webscout/scout/core/text_analyzer.py +64 -63
- webscout/scout/core/text_utils.py +412 -277
- webscout/scout/core/web_analyzer.py +54 -52
- webscout/scout/element.py +872 -478
- webscout/scout/parsers/__init__.py +70 -69
- webscout/scout/parsers/html5lib_parser.py +182 -172
- webscout/scout/parsers/html_parser.py +238 -236
- webscout/scout/parsers/lxml_parser.py +203 -178
- webscout/scout/utils.py +38 -37
- webscout/search/__init__.py +47 -0
- webscout/search/base.py +201 -0
- webscout/search/bing_main.py +45 -0
- webscout/search/brave_main.py +92 -0
- webscout/search/duckduckgo_main.py +57 -0
- webscout/search/engines/__init__.py +127 -0
- webscout/search/engines/bing/__init__.py +15 -0
- webscout/search/engines/bing/base.py +35 -0
- webscout/search/engines/bing/images.py +114 -0
- webscout/search/engines/bing/news.py +96 -0
- webscout/search/engines/bing/suggestions.py +36 -0
- webscout/search/engines/bing/text.py +109 -0
- webscout/search/engines/brave/__init__.py +19 -0
- webscout/search/engines/brave/base.py +47 -0
- webscout/search/engines/brave/images.py +213 -0
- webscout/search/engines/brave/news.py +353 -0
- webscout/search/engines/brave/suggestions.py +318 -0
- webscout/search/engines/brave/text.py +167 -0
- webscout/search/engines/brave/videos.py +364 -0
- webscout/search/engines/duckduckgo/__init__.py +25 -0
- webscout/search/engines/duckduckgo/answers.py +80 -0
- webscout/search/engines/duckduckgo/base.py +189 -0
- webscout/search/engines/duckduckgo/images.py +100 -0
- webscout/search/engines/duckduckgo/maps.py +183 -0
- webscout/search/engines/duckduckgo/news.py +70 -0
- webscout/search/engines/duckduckgo/suggestions.py +22 -0
- webscout/search/engines/duckduckgo/text.py +221 -0
- webscout/search/engines/duckduckgo/translate.py +48 -0
- webscout/search/engines/duckduckgo/videos.py +80 -0
- webscout/search/engines/duckduckgo/weather.py +84 -0
- webscout/search/engines/mojeek.py +61 -0
- webscout/search/engines/wikipedia.py +77 -0
- webscout/search/engines/yahoo/__init__.py +41 -0
- webscout/search/engines/yahoo/answers.py +19 -0
- webscout/search/engines/yahoo/base.py +34 -0
- webscout/search/engines/yahoo/images.py +323 -0
- webscout/search/engines/yahoo/maps.py +19 -0
- webscout/search/engines/yahoo/news.py +258 -0
- webscout/search/engines/yahoo/suggestions.py +140 -0
- webscout/search/engines/yahoo/text.py +273 -0
- webscout/search/engines/yahoo/translate.py +19 -0
- webscout/search/engines/yahoo/videos.py +302 -0
- webscout/search/engines/yahoo/weather.py +220 -0
- webscout/search/engines/yandex.py +67 -0
- webscout/search/engines/yep/__init__.py +13 -0
- webscout/search/engines/yep/base.py +34 -0
- webscout/search/engines/yep/images.py +101 -0
- webscout/search/engines/yep/suggestions.py +38 -0
- webscout/search/engines/yep/text.py +99 -0
- webscout/search/http_client.py +172 -0
- webscout/search/results.py +141 -0
- webscout/search/yahoo_main.py +57 -0
- webscout/search/yep_main.py +48 -0
- webscout/server/__init__.py +48 -0
- webscout/server/config.py +78 -0
- webscout/server/exceptions.py +69 -0
- webscout/server/providers.py +286 -0
- webscout/server/request_models.py +131 -0
- webscout/server/request_processing.py +404 -0
- webscout/server/routes.py +642 -0
- webscout/server/server.py +351 -0
- webscout/server/ui_templates.py +1171 -0
- webscout/swiftcli/__init__.py +79 -95
- webscout/swiftcli/core/__init__.py +7 -7
- webscout/swiftcli/core/cli.py +574 -297
- webscout/swiftcli/core/context.py +98 -104
- webscout/swiftcli/core/group.py +268 -241
- webscout/swiftcli/decorators/__init__.py +28 -28
- webscout/swiftcli/decorators/command.py +243 -221
- webscout/swiftcli/decorators/options.py +247 -220
- webscout/swiftcli/decorators/output.py +392 -252
- webscout/swiftcli/exceptions.py +21 -21
- webscout/swiftcli/plugins/__init__.py +9 -9
- webscout/swiftcli/plugins/base.py +134 -135
- webscout/swiftcli/plugins/manager.py +269 -269
- webscout/swiftcli/utils/__init__.py +58 -59
- webscout/swiftcli/utils/formatting.py +251 -252
- webscout/swiftcli/utils/parsing.py +368 -267
- webscout/update_checker.py +280 -136
- webscout/utils.py +28 -14
- webscout/version.py +2 -1
- webscout/version.py.bak +3 -0
- webscout/zeroart/__init__.py +218 -135
- webscout/zeroart/base.py +70 -66
- webscout/zeroart/effects.py +155 -101
- webscout/zeroart/fonts.py +1799 -1239
- webscout-2026.1.19.dist-info/METADATA +638 -0
- webscout-2026.1.19.dist-info/RECORD +312 -0
- {webscout-8.2.9.dist-info → webscout-2026.1.19.dist-info}/WHEEL +1 -1
- {webscout-8.2.9.dist-info → webscout-2026.1.19.dist-info}/entry_points.txt +1 -1
- webscout/DWEBS.py +0 -520
- webscout/Extra/Act.md +0 -309
- webscout/Extra/GitToolkit/gitapi/README.md +0 -110
- webscout/Extra/autocoder/__init__.py +0 -9
- webscout/Extra/autocoder/autocoder.py +0 -1105
- webscout/Extra/autocoder/autocoder_utiles.py +0 -332
- webscout/Extra/gguf.md +0 -430
- webscout/Extra/weather.md +0 -281
- webscout/Litlogger/README.md +0 -10
- webscout/Litlogger/__init__.py +0 -15
- webscout/Litlogger/formats.py +0 -4
- webscout/Litlogger/handlers.py +0 -103
- webscout/Litlogger/levels.py +0 -13
- webscout/Litlogger/logger.py +0 -92
- webscout/Provider/AI21.py +0 -177
- webscout/Provider/AISEARCH/DeepFind.py +0 -254
- webscout/Provider/AISEARCH/felo_search.py +0 -202
- webscout/Provider/AISEARCH/genspark_search.py +0 -324
- webscout/Provider/AISEARCH/hika_search.py +0 -186
- webscout/Provider/AISEARCH/scira_search.py +0 -298
- webscout/Provider/Aitopia.py +0 -316
- webscout/Provider/AllenAI.py +0 -440
- webscout/Provider/Blackboxai.py +0 -791
- webscout/Provider/ChatGPTClone.py +0 -237
- webscout/Provider/ChatGPTGratis.py +0 -194
- webscout/Provider/Cloudflare.py +0 -324
- webscout/Provider/ExaChat.py +0 -358
- webscout/Provider/Flowith.py +0 -217
- webscout/Provider/FreeGemini.py +0 -250
- webscout/Provider/Glider.py +0 -225
- webscout/Provider/HF_space/__init__.py +0 -0
- webscout/Provider/HF_space/qwen_qwen2.py +0 -206
- webscout/Provider/HuggingFaceChat.py +0 -469
- webscout/Provider/Hunyuan.py +0 -283
- webscout/Provider/LambdaChat.py +0 -411
- webscout/Provider/Llama3.py +0 -259
- webscout/Provider/Nemotron.py +0 -218
- webscout/Provider/OLLAMA.py +0 -396
- webscout/Provider/OPENAI/BLACKBOXAI.py +0 -766
- webscout/Provider/OPENAI/Cloudflare.py +0 -378
- webscout/Provider/OPENAI/FreeGemini.py +0 -283
- webscout/Provider/OPENAI/NEMOTRON.py +0 -232
- webscout/Provider/OPENAI/Qwen3.py +0 -283
- webscout/Provider/OPENAI/api.py +0 -969
- webscout/Provider/OPENAI/c4ai.py +0 -373
- webscout/Provider/OPENAI/chatgptclone.py +0 -494
- webscout/Provider/OPENAI/copilot.py +0 -242
- webscout/Provider/OPENAI/flowith.py +0 -162
- webscout/Provider/OPENAI/freeaichat.py +0 -359
- webscout/Provider/OPENAI/mcpcore.py +0 -389
- webscout/Provider/OPENAI/multichat.py +0 -376
- webscout/Provider/OPENAI/opkfc.py +0 -496
- webscout/Provider/OPENAI/scirachat.py +0 -477
- webscout/Provider/OPENAI/standardinput.py +0 -433
- webscout/Provider/OPENAI/typegpt.py +0 -364
- webscout/Provider/OPENAI/uncovrAI.py +0 -463
- webscout/Provider/OPENAI/venice.py +0 -431
- webscout/Provider/OPENAI/yep.py +0 -382
- webscout/Provider/OpenGPT.py +0 -209
- webscout/Provider/Perplexitylabs.py +0 -415
- webscout/Provider/Reka.py +0 -214
- webscout/Provider/StandardInput.py +0 -290
- webscout/Provider/TTI/aiarta.py +0 -365
- webscout/Provider/TTI/artbit.py +0 -0
- webscout/Provider/TTI/fastflux.py +0 -200
- webscout/Provider/TTI/piclumen.py +0 -203
- webscout/Provider/TTI/pixelmuse.py +0 -225
- webscout/Provider/TTS/gesserit.py +0 -128
- webscout/Provider/TTS/sthir.py +0 -94
- webscout/Provider/TeachAnything.py +0 -229
- webscout/Provider/UNFINISHED/puterjs.py +0 -635
- webscout/Provider/UNFINISHED/test_lmarena.py +0 -119
- webscout/Provider/Venice.py +0 -258
- webscout/Provider/VercelAI.py +0 -253
- webscout/Provider/Writecream.py +0 -246
- webscout/Provider/WritingMate.py +0 -269
- webscout/Provider/asksteve.py +0 -220
- webscout/Provider/chatglm.py +0 -215
- webscout/Provider/copilot.py +0 -425
- webscout/Provider/freeaichat.py +0 -285
- webscout/Provider/granite.py +0 -235
- webscout/Provider/hermes.py +0 -266
- webscout/Provider/koala.py +0 -170
- webscout/Provider/lmarena.py +0 -198
- webscout/Provider/multichat.py +0 -364
- webscout/Provider/scira_chat.py +0 -299
- webscout/Provider/scnet.py +0 -243
- webscout/Provider/talkai.py +0 -194
- webscout/Provider/typegpt.py +0 -289
- webscout/Provider/uncovr.py +0 -368
- webscout/Provider/yep.py +0 -389
- webscout/litagent/Readme.md +0 -276
- webscout/litprinter/__init__.py +0 -59
- webscout/swiftcli/Readme.md +0 -323
- webscout/tempid.py +0 -128
- webscout/webscout_search.py +0 -1184
- webscout/webscout_search_async.py +0 -654
- webscout/yep_search.py +0 -347
- webscout/zeroart/README.md +0 -89
- webscout-8.2.9.dist-info/METADATA +0 -1033
- webscout-8.2.9.dist-info/RECORD +0 -289
- {webscout-8.2.9.dist-info → webscout-2026.1.19.dist-info}/licenses/LICENSE.md +0 -0
- {webscout-8.2.9.dist-info → webscout-2026.1.19.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,96 @@
|
|
|
1
|
+
"""Bing news search."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from time import sleep
|
|
6
|
+
from typing import List, Optional
|
|
7
|
+
from urllib.parse import urlencode
|
|
8
|
+
|
|
9
|
+
from webscout.scout import Scout
|
|
10
|
+
from webscout.search.results import NewsResult
|
|
11
|
+
|
|
12
|
+
from .base import BingBase
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class BingNewsSearch(BingBase):
|
|
16
|
+
name = "bing"
|
|
17
|
+
category = "news"
|
|
18
|
+
def run(self, *args, **kwargs) -> List[NewsResult]:
|
|
19
|
+
keywords = args[0] if args else kwargs.get("keywords")
|
|
20
|
+
region = args[1] if len(args) > 1 else kwargs.get("region", "us")
|
|
21
|
+
safesearch = args[2] if len(args) > 2 else kwargs.get("safesearch", "moderate")
|
|
22
|
+
max_results = args[3] if len(args) > 3 else kwargs.get("max_results", 10)
|
|
23
|
+
|
|
24
|
+
if max_results is None:
|
|
25
|
+
max_results = 10
|
|
26
|
+
|
|
27
|
+
if not keywords:
|
|
28
|
+
raise ValueError("Keywords are mandatory")
|
|
29
|
+
|
|
30
|
+
safe_map = {
|
|
31
|
+
"on": "Strict",
|
|
32
|
+
"moderate": "Moderate",
|
|
33
|
+
"off": "Off"
|
|
34
|
+
}
|
|
35
|
+
safe_map.get(safesearch.lower(), "Moderate")
|
|
36
|
+
|
|
37
|
+
# Bing news URL
|
|
38
|
+
url = f"{self.base_url}/news/infinitescrollajax"
|
|
39
|
+
params = {
|
|
40
|
+
'q': keywords,
|
|
41
|
+
'InfiniteScroll': '1',
|
|
42
|
+
'first': '1',
|
|
43
|
+
'SFX': '0',
|
|
44
|
+
'cc': region.lower(),
|
|
45
|
+
'setlang': self.lang.split('-')[0]
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
results = []
|
|
49
|
+
first = 1
|
|
50
|
+
sfx = 0
|
|
51
|
+
|
|
52
|
+
while len(results) < max_results:
|
|
53
|
+
params['first'] = str(first)
|
|
54
|
+
params['SFX'] = str(sfx)
|
|
55
|
+
full_url = f"{url}?{urlencode(params)}"
|
|
56
|
+
|
|
57
|
+
try:
|
|
58
|
+
response = self.session.get(full_url, timeout=self.timeout)
|
|
59
|
+
response.raise_for_status()
|
|
60
|
+
html = response.text
|
|
61
|
+
except Exception as e:
|
|
62
|
+
raise Exception(f"Failed to fetch news: {str(e)}")
|
|
63
|
+
|
|
64
|
+
if not html:
|
|
65
|
+
break
|
|
66
|
+
|
|
67
|
+
soup = Scout(html)
|
|
68
|
+
news_items = soup.select('div.newsitem')
|
|
69
|
+
|
|
70
|
+
for item in news_items:
|
|
71
|
+
if len(results) >= max_results:
|
|
72
|
+
break
|
|
73
|
+
|
|
74
|
+
title = item.select_one('a.title')
|
|
75
|
+
snippet = item.select_one('div.snippet')
|
|
76
|
+
source = item.select_one('div.source')
|
|
77
|
+
date = item.select_one('span.date')
|
|
78
|
+
|
|
79
|
+
if title:
|
|
80
|
+
news_result = NewsResult(
|
|
81
|
+
title=title.get_text(strip=True),
|
|
82
|
+
url=title.get('href', ''),
|
|
83
|
+
body=snippet.get_text(strip=True) if snippet else '',
|
|
84
|
+
source=source.get_text(strip=True) if source else '',
|
|
85
|
+
date=date.get_text(strip=True) if date else '',
|
|
86
|
+
image=""
|
|
87
|
+
)
|
|
88
|
+
results.append(news_result)
|
|
89
|
+
|
|
90
|
+
first += 10
|
|
91
|
+
sfx += 1
|
|
92
|
+
|
|
93
|
+
if self.sleep_interval:
|
|
94
|
+
sleep(self.sleep_interval)
|
|
95
|
+
|
|
96
|
+
return results[:max_results]
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
"""Bing suggestions search."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from typing import List, Optional
|
|
6
|
+
from urllib.parse import urlencode
|
|
7
|
+
|
|
8
|
+
from .base import BingBase
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class BingSuggestionsSearch(BingBase):
|
|
12
|
+
name = "bing"
|
|
13
|
+
category = "suggestions"
|
|
14
|
+
def run(self, *args, **kwargs) -> List[str]:
|
|
15
|
+
query = args[0] if args else kwargs.get("query")
|
|
16
|
+
region = args[1] if len(args) > 1 else kwargs.get("region", "en-US")
|
|
17
|
+
|
|
18
|
+
if not query:
|
|
19
|
+
raise ValueError("Query is mandatory")
|
|
20
|
+
|
|
21
|
+
params = {
|
|
22
|
+
"query": query,
|
|
23
|
+
"mkt": region
|
|
24
|
+
}
|
|
25
|
+
url = f"https://api.bing.com/osjson.aspx?{urlencode(params)}"
|
|
26
|
+
|
|
27
|
+
try:
|
|
28
|
+
response = self.session.get(url, timeout=self.timeout)
|
|
29
|
+
response.raise_for_status()
|
|
30
|
+
data = response.json()
|
|
31
|
+
# Bing suggestions API returns [query, [suggestions]]
|
|
32
|
+
if len(data) > 1 and isinstance(data[1], list):
|
|
33
|
+
return data[1]
|
|
34
|
+
return []
|
|
35
|
+
except Exception as e:
|
|
36
|
+
raise Exception(f"Failed to fetch suggestions: {str(e)}")
|
|
@@ -0,0 +1,109 @@
|
|
|
1
|
+
"""Bing text search."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from time import sleep
|
|
6
|
+
from typing import List, Optional
|
|
7
|
+
|
|
8
|
+
from webscout.scout import Scout
|
|
9
|
+
from webscout.search.results import TextResult
|
|
10
|
+
|
|
11
|
+
from .base import BingBase
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class BingTextSearch(BingBase):
|
|
15
|
+
name = "bing"
|
|
16
|
+
category = "text"
|
|
17
|
+
def run(self, *args, **kwargs) -> List[TextResult]:
|
|
18
|
+
keywords = args[0] if args else kwargs.get("keywords")
|
|
19
|
+
args[1] if len(args) > 1 else kwargs.get("region", "us")
|
|
20
|
+
safesearch = args[2] if len(args) > 2 else kwargs.get("safesearch", "moderate")
|
|
21
|
+
max_results = args[3] if len(args) > 3 else kwargs.get("max_results", 10)
|
|
22
|
+
unique = kwargs.get("unique", True)
|
|
23
|
+
|
|
24
|
+
if max_results is None:
|
|
25
|
+
max_results = 10
|
|
26
|
+
|
|
27
|
+
if not keywords:
|
|
28
|
+
raise ValueError("Keywords are mandatory")
|
|
29
|
+
|
|
30
|
+
safe_map = {
|
|
31
|
+
"on": "Strict",
|
|
32
|
+
"moderate": "Moderate",
|
|
33
|
+
"off": "Off"
|
|
34
|
+
}
|
|
35
|
+
safe_map.get(safesearch.lower(), "Moderate")
|
|
36
|
+
|
|
37
|
+
fetched_results = []
|
|
38
|
+
fetched_links = set()
|
|
39
|
+
|
|
40
|
+
def fetch_page(url):
|
|
41
|
+
try:
|
|
42
|
+
response = self.session.get(url, timeout=self.timeout)
|
|
43
|
+
response.raise_for_status()
|
|
44
|
+
return response.text
|
|
45
|
+
except Exception as e:
|
|
46
|
+
raise Exception(f"Failed to fetch page: {str(e)}")
|
|
47
|
+
|
|
48
|
+
# Get first page URL
|
|
49
|
+
url = f'{self.base_url}/search?q={keywords}&search=&form=QBLH'
|
|
50
|
+
urls_to_fetch = [url]
|
|
51
|
+
|
|
52
|
+
while len(fetched_results) < max_results and urls_to_fetch:
|
|
53
|
+
current_url = urls_to_fetch.pop(0)
|
|
54
|
+
html = fetch_page(current_url)
|
|
55
|
+
soup = Scout(html)
|
|
56
|
+
|
|
57
|
+
links = soup.select('ol#b_results > li.b_algo')
|
|
58
|
+
for link in links:
|
|
59
|
+
if len(fetched_results) >= max_results:
|
|
60
|
+
break
|
|
61
|
+
title_tag = link.select_one('h2')
|
|
62
|
+
url_tag = link.select_one('h2 a')
|
|
63
|
+
text_tag = link.select_one('p')
|
|
64
|
+
|
|
65
|
+
if title_tag and url_tag and text_tag:
|
|
66
|
+
title = title_tag.get_text(strip=True)
|
|
67
|
+
href = url_tag.get('href', '')
|
|
68
|
+
body = text_tag.get_text(strip=True)
|
|
69
|
+
|
|
70
|
+
# Decode Bing URL if needed
|
|
71
|
+
if href.startswith('/ck/a?'):
|
|
72
|
+
# Simple unwrap, similar to bing.py
|
|
73
|
+
from urllib.parse import parse_qs, urlparse
|
|
74
|
+
try:
|
|
75
|
+
parsed = urlparse(href)
|
|
76
|
+
query_params = parse_qs(parsed.query)
|
|
77
|
+
if 'u' in query_params:
|
|
78
|
+
encoded_url = query_params['u'][0]
|
|
79
|
+
if encoded_url.startswith('a1'):
|
|
80
|
+
encoded_url = encoded_url[2:]
|
|
81
|
+
padding = len(encoded_url) % 4
|
|
82
|
+
if padding:
|
|
83
|
+
encoded_url += '=' * (4 - padding)
|
|
84
|
+
import base64
|
|
85
|
+
decoded = base64.urlsafe_b64decode(encoded_url).decode()
|
|
86
|
+
href = decoded
|
|
87
|
+
except Exception:
|
|
88
|
+
pass
|
|
89
|
+
|
|
90
|
+
if unique and href in fetched_links:
|
|
91
|
+
continue
|
|
92
|
+
fetched_links.add(href)
|
|
93
|
+
|
|
94
|
+
fetched_results.append(TextResult(
|
|
95
|
+
title=title,
|
|
96
|
+
href=href,
|
|
97
|
+
body=body
|
|
98
|
+
))
|
|
99
|
+
|
|
100
|
+
# Get next page
|
|
101
|
+
next_page_tag = soup.select_one('div#b_content nav[role="navigation"] a.sb_pagN')
|
|
102
|
+
if next_page_tag and next_page_tag.get('href'):
|
|
103
|
+
next_url = self.base_url + next_page_tag['href']
|
|
104
|
+
urls_to_fetch.append(next_url)
|
|
105
|
+
|
|
106
|
+
if self.sleep_interval:
|
|
107
|
+
sleep(self.sleep_interval)
|
|
108
|
+
|
|
109
|
+
return fetched_results[:max_results]
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
"""Brave search engine package."""
|
|
2
|
+
|
|
3
|
+
from .base import BraveBase
|
|
4
|
+
from .images import BraveImages
|
|
5
|
+
from .news import BraveNews
|
|
6
|
+
from .suggestions import BraveSuggestions, SuggestionResult, SuggestionsResponse
|
|
7
|
+
from .text import BraveTextSearch
|
|
8
|
+
from .videos import BraveVideos
|
|
9
|
+
|
|
10
|
+
__all__ = [
|
|
11
|
+
"BraveBase",
|
|
12
|
+
"BraveImages",
|
|
13
|
+
"BraveNews",
|
|
14
|
+
"BraveSuggestions",
|
|
15
|
+
"BraveTextSearch",
|
|
16
|
+
"BraveVideos",
|
|
17
|
+
"SuggestionResult",
|
|
18
|
+
"SuggestionsResponse",
|
|
19
|
+
]
|
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
"""Base class for Brave search implementations."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from typing import Any, Optional
|
|
6
|
+
|
|
7
|
+
from curl_cffi.requests import Session
|
|
8
|
+
|
|
9
|
+
from ....litagent import LitAgent
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class BraveBase:
|
|
13
|
+
"""Base class for Brave search engines."""
|
|
14
|
+
|
|
15
|
+
def __init__(
|
|
16
|
+
self,
|
|
17
|
+
timeout: int = 10,
|
|
18
|
+
proxies: dict[str, str] | None = None,
|
|
19
|
+
verify: bool = True,
|
|
20
|
+
lang: str = "en-US",
|
|
21
|
+
sleep_interval: float = 0.0,
|
|
22
|
+
impersonate: str = "chrome110",
|
|
23
|
+
):
|
|
24
|
+
"""Initialize Brave base client.
|
|
25
|
+
|
|
26
|
+
Args:
|
|
27
|
+
timeout: Timeout value for requests.
|
|
28
|
+
proxies: Dictionary of proxy settings.
|
|
29
|
+
verify: SSL verification flag.
|
|
30
|
+
lang: Language setting.
|
|
31
|
+
sleep_interval: Sleep interval between requests.
|
|
32
|
+
impersonate: Browser to impersonate.
|
|
33
|
+
"""
|
|
34
|
+
self.timeout = timeout
|
|
35
|
+
self.proxies = proxies
|
|
36
|
+
self.verify = verify
|
|
37
|
+
self.lang = lang
|
|
38
|
+
self.sleep_interval = sleep_interval
|
|
39
|
+
self.base_url = "https://search.brave.com"
|
|
40
|
+
from typing import cast
|
|
41
|
+
self.session = Session(
|
|
42
|
+
proxies=cast(Any, proxies),
|
|
43
|
+
verify=verify,
|
|
44
|
+
timeout=timeout,
|
|
45
|
+
impersonate=cast(Any, impersonate),
|
|
46
|
+
)
|
|
47
|
+
self.session.headers.update(LitAgent().generate_fingerprint())
|
|
@@ -0,0 +1,213 @@
|
|
|
1
|
+
"""Brave images search implementation."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import base64
|
|
6
|
+
from typing import Any, Mapping
|
|
7
|
+
|
|
8
|
+
from webscout.scout import Scout
|
|
9
|
+
|
|
10
|
+
from ....search.results import ImagesResult
|
|
11
|
+
from .base import BraveBase
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class BraveImages(BraveBase):
|
|
15
|
+
"""Brave images search engine."""
|
|
16
|
+
|
|
17
|
+
name = "brave_images"
|
|
18
|
+
provider = "brave"
|
|
19
|
+
category = "images"
|
|
20
|
+
search_method = "GET"
|
|
21
|
+
items_xpath = "//button[contains(@class, 'image-result')]"
|
|
22
|
+
elements_xpath: Mapping[str, str] = {
|
|
23
|
+
"title": ".//img/@alt",
|
|
24
|
+
"image": ".//img/@src",
|
|
25
|
+
"source": "string(.//div[contains(@class, 'metadata')])",
|
|
26
|
+
}
|
|
27
|
+
result_type = ImagesResult
|
|
28
|
+
|
|
29
|
+
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
|
30
|
+
super().__init__(*args, **kwargs)
|
|
31
|
+
self.search_url = f"{self.base_url}/images"
|
|
32
|
+
|
|
33
|
+
def build_payload(
|
|
34
|
+
self,
|
|
35
|
+
query: str,
|
|
36
|
+
region: str,
|
|
37
|
+
safesearch: str,
|
|
38
|
+
timelimit: str | None,
|
|
39
|
+
page: int = 1,
|
|
40
|
+
**_: Any,
|
|
41
|
+
) -> dict[str, str]:
|
|
42
|
+
"""Build query parameters for Brave image search."""
|
|
43
|
+
|
|
44
|
+
safesearch_map = {"on": "strict", "moderate": "moderate", "off": "off"}
|
|
45
|
+
payload: dict[str, str] = {
|
|
46
|
+
"q": query,
|
|
47
|
+
"source": "web",
|
|
48
|
+
"safesearch": safesearch_map.get(safesearch.lower(), "moderate"),
|
|
49
|
+
"page": str(page),
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
# Brave uses "offset" in multiples of 40 (page 1 => 0)
|
|
53
|
+
if page > 1:
|
|
54
|
+
payload["offset"] = str((page - 1) * 40)
|
|
55
|
+
|
|
56
|
+
if timelimit:
|
|
57
|
+
payload["tf"] = timelimit
|
|
58
|
+
|
|
59
|
+
if region:
|
|
60
|
+
payload["region"] = region
|
|
61
|
+
|
|
62
|
+
return payload
|
|
63
|
+
|
|
64
|
+
def run(self, *args: Any, **kwargs: Any) -> list[ImagesResult]:
|
|
65
|
+
"""Run image search on Brave."""
|
|
66
|
+
|
|
67
|
+
keywords = args[0] if args else kwargs.get("keywords")
|
|
68
|
+
region = args[1] if len(args) > 1 else kwargs.get("region", "us-en")
|
|
69
|
+
safesearch = args[2] if len(args) > 2 else kwargs.get("safesearch", "moderate")
|
|
70
|
+
max_results = args[3] if len(args) > 3 else kwargs.get("max_results", 10)
|
|
71
|
+
|
|
72
|
+
if max_results is None:
|
|
73
|
+
max_results = 10
|
|
74
|
+
|
|
75
|
+
if not keywords:
|
|
76
|
+
raise ValueError("Keywords are mandatory")
|
|
77
|
+
|
|
78
|
+
safesearch_value = self.build_payload(keywords, region, safesearch, None)["safesearch"]
|
|
79
|
+
|
|
80
|
+
fetched_results: list[ImagesResult] = []
|
|
81
|
+
fetched_urls: set[str] = set()
|
|
82
|
+
|
|
83
|
+
def fetch_page(url: str) -> str:
|
|
84
|
+
"""Fetch page content."""
|
|
85
|
+
try:
|
|
86
|
+
response = self.session.get(url, timeout=self.timeout)
|
|
87
|
+
response.raise_for_status()
|
|
88
|
+
return response.text
|
|
89
|
+
except Exception as exc: # pragma: no cover - network handling
|
|
90
|
+
raise Exception(f"Failed to fetch page: {str(exc)}") from exc
|
|
91
|
+
|
|
92
|
+
page = 1
|
|
93
|
+
while len(fetched_results) < max_results:
|
|
94
|
+
params = {
|
|
95
|
+
"q": keywords,
|
|
96
|
+
"page": str(page),
|
|
97
|
+
"safesearch": safesearch_value,
|
|
98
|
+
}
|
|
99
|
+
full_url = f"{self.search_url}?{'&'.join(f'{k}={v}' for k, v in params.items())}"
|
|
100
|
+
html = fetch_page(full_url)
|
|
101
|
+
soup = Scout(html)
|
|
102
|
+
|
|
103
|
+
img_containers = soup.select("button.image-result")
|
|
104
|
+
if not img_containers:
|
|
105
|
+
break
|
|
106
|
+
|
|
107
|
+
for container in img_containers:
|
|
108
|
+
if len(fetched_results) >= max_results:
|
|
109
|
+
break
|
|
110
|
+
|
|
111
|
+
img_elem = container.select_one("img")
|
|
112
|
+
if not img_elem:
|
|
113
|
+
continue
|
|
114
|
+
|
|
115
|
+
title = img_elem.get("alt", "")
|
|
116
|
+
image_url = img_elem.get("src", "")
|
|
117
|
+
|
|
118
|
+
source_elem = container.select_one('div[class*="metadata"]')
|
|
119
|
+
source = source_elem.get_text(strip=True) if source_elem else ""
|
|
120
|
+
|
|
121
|
+
width = 0
|
|
122
|
+
height = 0
|
|
123
|
+
style = container.get("style", "")
|
|
124
|
+
if style:
|
|
125
|
+
if "--width:" in style:
|
|
126
|
+
try:
|
|
127
|
+
width = int(style.split("--width:")[1].split(";")[0].strip())
|
|
128
|
+
except (ValueError, IndexError):
|
|
129
|
+
pass
|
|
130
|
+
if "--height:" in style:
|
|
131
|
+
try:
|
|
132
|
+
height = int(style.split("--height:")[1].split(";")[0].strip())
|
|
133
|
+
except (ValueError, IndexError):
|
|
134
|
+
pass
|
|
135
|
+
|
|
136
|
+
original_url = image_url
|
|
137
|
+
if image_url and "imgs.search.brave.com" in image_url:
|
|
138
|
+
try:
|
|
139
|
+
original_url = self._extract_original_url(image_url)
|
|
140
|
+
except Exception:
|
|
141
|
+
original_url = image_url
|
|
142
|
+
|
|
143
|
+
if image_url and image_url not in fetched_urls:
|
|
144
|
+
fetched_urls.add(image_url)
|
|
145
|
+
fetched_results.append(
|
|
146
|
+
ImagesResult(
|
|
147
|
+
title=title,
|
|
148
|
+
image=image_url,
|
|
149
|
+
thumbnail=image_url,
|
|
150
|
+
url=original_url,
|
|
151
|
+
height=height,
|
|
152
|
+
width=width,
|
|
153
|
+
source=source,
|
|
154
|
+
)
|
|
155
|
+
)
|
|
156
|
+
|
|
157
|
+
page += 1
|
|
158
|
+
|
|
159
|
+
if self.sleep_interval:
|
|
160
|
+
from time import sleep
|
|
161
|
+
|
|
162
|
+
sleep(self.sleep_interval)
|
|
163
|
+
|
|
164
|
+
return fetched_results[:max_results]
|
|
165
|
+
|
|
166
|
+
def extract_results(self, html_text: str) -> list[ImagesResult]:
|
|
167
|
+
"""Parse Brave image search HTML into results."""
|
|
168
|
+
|
|
169
|
+
soup = Scout(html_text)
|
|
170
|
+
results: list[ImagesResult] = []
|
|
171
|
+
for container in soup.select("button.image-result"):
|
|
172
|
+
img_elem = container.select_one("img")
|
|
173
|
+
if not img_elem:
|
|
174
|
+
continue
|
|
175
|
+
|
|
176
|
+
title = img_elem.get("alt", "")
|
|
177
|
+
image_url = img_elem.get("src", "")
|
|
178
|
+
source_elem = container.select_one('div[class*="metadata"]')
|
|
179
|
+
source = source_elem.get_text(strip=True) if source_elem else ""
|
|
180
|
+
|
|
181
|
+
results.append(
|
|
182
|
+
ImagesResult(
|
|
183
|
+
title=title,
|
|
184
|
+
image=image_url,
|
|
185
|
+
thumbnail=image_url,
|
|
186
|
+
url=image_url,
|
|
187
|
+
height=0,
|
|
188
|
+
width=0,
|
|
189
|
+
source=source,
|
|
190
|
+
)
|
|
191
|
+
)
|
|
192
|
+
|
|
193
|
+
return results
|
|
194
|
+
|
|
195
|
+
def _extract_original_url(self, proxy_url: str) -> str:
|
|
196
|
+
"""Extract the original image URL from the Brave proxy URL."""
|
|
197
|
+
|
|
198
|
+
try:
|
|
199
|
+
parts = proxy_url.split("/")
|
|
200
|
+
base64_part = parts[-1] if parts else ""
|
|
201
|
+
if base64_part and len(base64_part) > 10:
|
|
202
|
+
normalized = base64_part.replace("-", "+").replace("_", "/").replace(" ", "")
|
|
203
|
+
padding = 4 - (len(normalized) % 4)
|
|
204
|
+
if padding and padding != 4:
|
|
205
|
+
normalized += "=" * padding
|
|
206
|
+
|
|
207
|
+
decoded = base64.b64decode(normalized).decode("utf-8", errors="ignore")
|
|
208
|
+
if decoded.startswith("http"):
|
|
209
|
+
return decoded
|
|
210
|
+
except Exception:
|
|
211
|
+
pass
|
|
212
|
+
|
|
213
|
+
return proxy_url
|