webscout 8.2.9__py3-none-any.whl → 2026.1.19__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- webscout/AIauto.py +524 -251
- webscout/AIbase.py +247 -319
- webscout/AIutel.py +68 -703
- webscout/Bard.py +1072 -1026
- webscout/Extra/GitToolkit/__init__.py +10 -10
- webscout/Extra/GitToolkit/gitapi/__init__.py +20 -12
- webscout/Extra/GitToolkit/gitapi/gist.py +142 -0
- webscout/Extra/GitToolkit/gitapi/organization.py +91 -0
- webscout/Extra/GitToolkit/gitapi/repository.py +308 -195
- webscout/Extra/GitToolkit/gitapi/search.py +162 -0
- webscout/Extra/GitToolkit/gitapi/trending.py +236 -0
- webscout/Extra/GitToolkit/gitapi/user.py +128 -96
- webscout/Extra/GitToolkit/gitapi/utils.py +82 -62
- webscout/Extra/YTToolkit/README.md +443 -375
- webscout/Extra/YTToolkit/YTdownloader.py +953 -957
- webscout/Extra/YTToolkit/__init__.py +3 -3
- webscout/Extra/YTToolkit/transcriber.py +595 -476
- webscout/Extra/YTToolkit/ytapi/README.md +230 -44
- webscout/Extra/YTToolkit/ytapi/__init__.py +22 -6
- webscout/Extra/YTToolkit/ytapi/captions.py +190 -0
- webscout/Extra/YTToolkit/ytapi/channel.py +302 -307
- webscout/Extra/YTToolkit/ytapi/errors.py +13 -13
- webscout/Extra/YTToolkit/ytapi/extras.py +178 -118
- webscout/Extra/YTToolkit/ytapi/hashtag.py +120 -0
- webscout/Extra/YTToolkit/ytapi/https.py +89 -88
- webscout/Extra/YTToolkit/ytapi/patterns.py +61 -61
- webscout/Extra/YTToolkit/ytapi/playlist.py +59 -59
- webscout/Extra/YTToolkit/ytapi/pool.py +8 -8
- webscout/Extra/YTToolkit/ytapi/query.py +143 -40
- webscout/Extra/YTToolkit/ytapi/shorts.py +122 -0
- webscout/Extra/YTToolkit/ytapi/stream.py +68 -63
- webscout/Extra/YTToolkit/ytapi/suggestions.py +97 -0
- webscout/Extra/YTToolkit/ytapi/utils.py +66 -62
- webscout/Extra/YTToolkit/ytapi/video.py +403 -232
- webscout/Extra/__init__.py +2 -3
- webscout/Extra/gguf.py +1298 -684
- webscout/Extra/tempmail/README.md +487 -487
- webscout/Extra/tempmail/__init__.py +28 -28
- webscout/Extra/tempmail/async_utils.py +143 -141
- webscout/Extra/tempmail/base.py +172 -161
- webscout/Extra/tempmail/cli.py +191 -187
- webscout/Extra/tempmail/emailnator.py +88 -84
- webscout/Extra/tempmail/mail_tm.py +378 -361
- webscout/Extra/tempmail/temp_mail_io.py +304 -292
- webscout/Extra/weather.py +196 -194
- webscout/Extra/weather_ascii.py +17 -15
- webscout/Provider/AISEARCH/PERPLEXED_search.py +175 -0
- webscout/Provider/AISEARCH/Perplexity.py +292 -333
- webscout/Provider/AISEARCH/README.md +106 -279
- webscout/Provider/AISEARCH/__init__.py +16 -9
- webscout/Provider/AISEARCH/brave_search.py +298 -0
- webscout/Provider/AISEARCH/iask_search.py +357 -410
- webscout/Provider/AISEARCH/monica_search.py +200 -220
- webscout/Provider/AISEARCH/webpilotai_search.py +242 -255
- webscout/Provider/Algion.py +413 -0
- webscout/Provider/Andi.py +74 -69
- webscout/Provider/Apriel.py +313 -0
- webscout/Provider/Ayle.py +323 -0
- webscout/Provider/ChatSandbox.py +329 -342
- webscout/Provider/ClaudeOnline.py +365 -0
- webscout/Provider/Cohere.py +232 -208
- webscout/Provider/DeepAI.py +367 -0
- webscout/Provider/Deepinfra.py +467 -340
- webscout/Provider/EssentialAI.py +217 -0
- webscout/Provider/ExaAI.py +274 -261
- webscout/Provider/Gemini.py +175 -169
- webscout/Provider/GithubChat.py +385 -369
- webscout/Provider/Gradient.py +286 -0
- webscout/Provider/Groq.py +556 -801
- webscout/Provider/HadadXYZ.py +323 -0
- webscout/Provider/HeckAI.py +392 -375
- webscout/Provider/HuggingFace.py +387 -0
- webscout/Provider/IBM.py +340 -0
- webscout/Provider/Jadve.py +317 -291
- webscout/Provider/K2Think.py +306 -0
- webscout/Provider/Koboldai.py +221 -384
- webscout/Provider/Netwrck.py +273 -270
- webscout/Provider/Nvidia.py +310 -0
- webscout/Provider/OPENAI/DeepAI.py +489 -0
- webscout/Provider/OPENAI/K2Think.py +423 -0
- webscout/Provider/OPENAI/PI.py +463 -0
- webscout/Provider/OPENAI/README.md +890 -952
- webscout/Provider/OPENAI/TogetherAI.py +405 -0
- webscout/Provider/OPENAI/TwoAI.py +255 -357
- webscout/Provider/OPENAI/__init__.py +148 -40
- webscout/Provider/OPENAI/ai4chat.py +348 -293
- webscout/Provider/OPENAI/akashgpt.py +436 -0
- webscout/Provider/OPENAI/algion.py +303 -0
- webscout/Provider/OPENAI/{exachat.py → ayle.py} +365 -444
- webscout/Provider/OPENAI/base.py +253 -249
- webscout/Provider/OPENAI/cerebras.py +296 -0
- webscout/Provider/OPENAI/chatgpt.py +870 -556
- webscout/Provider/OPENAI/chatsandbox.py +233 -173
- webscout/Provider/OPENAI/deepinfra.py +403 -322
- webscout/Provider/OPENAI/e2b.py +2370 -1414
- webscout/Provider/OPENAI/elmo.py +278 -0
- webscout/Provider/OPENAI/exaai.py +452 -417
- webscout/Provider/OPENAI/freeassist.py +446 -0
- webscout/Provider/OPENAI/gradient.py +448 -0
- webscout/Provider/OPENAI/groq.py +380 -364
- webscout/Provider/OPENAI/hadadxyz.py +292 -0
- webscout/Provider/OPENAI/heckai.py +333 -308
- webscout/Provider/OPENAI/huggingface.py +321 -0
- webscout/Provider/OPENAI/ibm.py +425 -0
- webscout/Provider/OPENAI/llmchat.py +253 -0
- webscout/Provider/OPENAI/llmchatco.py +378 -335
- webscout/Provider/OPENAI/meta.py +541 -0
- webscout/Provider/OPENAI/netwrck.py +374 -357
- webscout/Provider/OPENAI/nvidia.py +317 -0
- webscout/Provider/OPENAI/oivscode.py +348 -287
- webscout/Provider/OPENAI/openrouter.py +328 -0
- webscout/Provider/OPENAI/pydantic_imports.py +1 -172
- webscout/Provider/OPENAI/sambanova.py +397 -0
- webscout/Provider/OPENAI/sonus.py +305 -304
- webscout/Provider/OPENAI/textpollinations.py +370 -339
- webscout/Provider/OPENAI/toolbaz.py +375 -413
- webscout/Provider/OPENAI/typefully.py +419 -355
- webscout/Provider/OPENAI/typliai.py +279 -0
- webscout/Provider/OPENAI/utils.py +314 -318
- webscout/Provider/OPENAI/wisecat.py +359 -387
- webscout/Provider/OPENAI/writecream.py +185 -163
- webscout/Provider/OPENAI/x0gpt.py +462 -365
- webscout/Provider/OPENAI/zenmux.py +380 -0
- webscout/Provider/OpenRouter.py +386 -0
- webscout/Provider/Openai.py +337 -496
- webscout/Provider/PI.py +443 -429
- webscout/Provider/QwenLM.py +346 -254
- webscout/Provider/STT/__init__.py +28 -0
- webscout/Provider/STT/base.py +303 -0
- webscout/Provider/STT/elevenlabs.py +264 -0
- webscout/Provider/Sambanova.py +317 -0
- webscout/Provider/TTI/README.md +69 -82
- webscout/Provider/TTI/__init__.py +37 -7
- webscout/Provider/TTI/base.py +147 -64
- webscout/Provider/TTI/claudeonline.py +393 -0
- webscout/Provider/TTI/magicstudio.py +292 -201
- webscout/Provider/TTI/miragic.py +180 -0
- webscout/Provider/TTI/pollinations.py +331 -221
- webscout/Provider/TTI/together.py +334 -0
- webscout/Provider/TTI/utils.py +14 -11
- webscout/Provider/TTS/README.md +186 -192
- webscout/Provider/TTS/__init__.py +43 -10
- webscout/Provider/TTS/base.py +523 -159
- webscout/Provider/TTS/deepgram.py +286 -156
- webscout/Provider/TTS/elevenlabs.py +189 -111
- webscout/Provider/TTS/freetts.py +218 -0
- webscout/Provider/TTS/murfai.py +288 -113
- webscout/Provider/TTS/openai_fm.py +364 -129
- webscout/Provider/TTS/parler.py +203 -111
- webscout/Provider/TTS/qwen.py +334 -0
- webscout/Provider/TTS/sherpa.py +286 -0
- webscout/Provider/TTS/speechma.py +693 -580
- webscout/Provider/TTS/streamElements.py +275 -333
- webscout/Provider/TTS/utils.py +280 -280
- webscout/Provider/TextPollinationsAI.py +331 -308
- webscout/Provider/TogetherAI.py +450 -0
- webscout/Provider/TwoAI.py +309 -475
- webscout/Provider/TypliAI.py +311 -305
- webscout/Provider/UNFINISHED/ChatHub.py +219 -209
- webscout/Provider/{OPENAI/glider.py → UNFINISHED/ChutesAI.py} +331 -326
- webscout/Provider/{GizAI.py → UNFINISHED/GizAI.py} +300 -295
- webscout/Provider/{Marcus.py → UNFINISHED/Marcus.py} +218 -198
- webscout/Provider/UNFINISHED/Qodo.py +481 -0
- webscout/Provider/{MCPCore.py → UNFINISHED/XenAI.py} +330 -315
- webscout/Provider/UNFINISHED/Youchat.py +347 -330
- webscout/Provider/UNFINISHED/aihumanizer.py +41 -0
- webscout/Provider/UNFINISHED/grammerchecker.py +37 -0
- webscout/Provider/UNFINISHED/liner.py +342 -0
- webscout/Provider/UNFINISHED/liner_api_request.py +246 -263
- webscout/Provider/{samurai.py → UNFINISHED/samurai.py} +231 -224
- webscout/Provider/WiseCat.py +256 -233
- webscout/Provider/WrDoChat.py +390 -370
- webscout/Provider/__init__.py +115 -174
- webscout/Provider/ai4chat.py +181 -174
- webscout/Provider/akashgpt.py +330 -335
- webscout/Provider/cerebras.py +397 -290
- webscout/Provider/cleeai.py +236 -213
- webscout/Provider/elmo.py +291 -283
- webscout/Provider/geminiapi.py +343 -208
- webscout/Provider/julius.py +245 -223
- webscout/Provider/learnfastai.py +333 -325
- webscout/Provider/llama3mitril.py +230 -215
- webscout/Provider/llmchat.py +308 -258
- webscout/Provider/llmchatco.py +321 -306
- webscout/Provider/meta.py +996 -801
- webscout/Provider/oivscode.py +332 -309
- webscout/Provider/searchchat.py +316 -292
- webscout/Provider/sonus.py +264 -258
- webscout/Provider/toolbaz.py +359 -353
- webscout/Provider/turboseek.py +332 -266
- webscout/Provider/typefully.py +262 -202
- webscout/Provider/x0gpt.py +332 -299
- webscout/__init__.py +31 -39
- webscout/__main__.py +5 -5
- webscout/cli.py +585 -524
- webscout/client.py +1497 -70
- webscout/conversation.py +140 -436
- webscout/exceptions.py +383 -362
- webscout/litagent/__init__.py +29 -29
- webscout/litagent/agent.py +492 -455
- webscout/litagent/constants.py +60 -60
- webscout/models.py +505 -181
- webscout/optimizers.py +74 -420
- webscout/prompt_manager.py +376 -288
- webscout/sanitize.py +1514 -0
- webscout/scout/README.md +452 -404
- webscout/scout/__init__.py +8 -8
- webscout/scout/core/__init__.py +7 -7
- webscout/scout/core/crawler.py +330 -210
- webscout/scout/core/scout.py +800 -607
- webscout/scout/core/search_result.py +51 -96
- webscout/scout/core/text_analyzer.py +64 -63
- webscout/scout/core/text_utils.py +412 -277
- webscout/scout/core/web_analyzer.py +54 -52
- webscout/scout/element.py +872 -478
- webscout/scout/parsers/__init__.py +70 -69
- webscout/scout/parsers/html5lib_parser.py +182 -172
- webscout/scout/parsers/html_parser.py +238 -236
- webscout/scout/parsers/lxml_parser.py +203 -178
- webscout/scout/utils.py +38 -37
- webscout/search/__init__.py +47 -0
- webscout/search/base.py +201 -0
- webscout/search/bing_main.py +45 -0
- webscout/search/brave_main.py +92 -0
- webscout/search/duckduckgo_main.py +57 -0
- webscout/search/engines/__init__.py +127 -0
- webscout/search/engines/bing/__init__.py +15 -0
- webscout/search/engines/bing/base.py +35 -0
- webscout/search/engines/bing/images.py +114 -0
- webscout/search/engines/bing/news.py +96 -0
- webscout/search/engines/bing/suggestions.py +36 -0
- webscout/search/engines/bing/text.py +109 -0
- webscout/search/engines/brave/__init__.py +19 -0
- webscout/search/engines/brave/base.py +47 -0
- webscout/search/engines/brave/images.py +213 -0
- webscout/search/engines/brave/news.py +353 -0
- webscout/search/engines/brave/suggestions.py +318 -0
- webscout/search/engines/brave/text.py +167 -0
- webscout/search/engines/brave/videos.py +364 -0
- webscout/search/engines/duckduckgo/__init__.py +25 -0
- webscout/search/engines/duckduckgo/answers.py +80 -0
- webscout/search/engines/duckduckgo/base.py +189 -0
- webscout/search/engines/duckduckgo/images.py +100 -0
- webscout/search/engines/duckduckgo/maps.py +183 -0
- webscout/search/engines/duckduckgo/news.py +70 -0
- webscout/search/engines/duckduckgo/suggestions.py +22 -0
- webscout/search/engines/duckduckgo/text.py +221 -0
- webscout/search/engines/duckduckgo/translate.py +48 -0
- webscout/search/engines/duckduckgo/videos.py +80 -0
- webscout/search/engines/duckduckgo/weather.py +84 -0
- webscout/search/engines/mojeek.py +61 -0
- webscout/search/engines/wikipedia.py +77 -0
- webscout/search/engines/yahoo/__init__.py +41 -0
- webscout/search/engines/yahoo/answers.py +19 -0
- webscout/search/engines/yahoo/base.py +34 -0
- webscout/search/engines/yahoo/images.py +323 -0
- webscout/search/engines/yahoo/maps.py +19 -0
- webscout/search/engines/yahoo/news.py +258 -0
- webscout/search/engines/yahoo/suggestions.py +140 -0
- webscout/search/engines/yahoo/text.py +273 -0
- webscout/search/engines/yahoo/translate.py +19 -0
- webscout/search/engines/yahoo/videos.py +302 -0
- webscout/search/engines/yahoo/weather.py +220 -0
- webscout/search/engines/yandex.py +67 -0
- webscout/search/engines/yep/__init__.py +13 -0
- webscout/search/engines/yep/base.py +34 -0
- webscout/search/engines/yep/images.py +101 -0
- webscout/search/engines/yep/suggestions.py +38 -0
- webscout/search/engines/yep/text.py +99 -0
- webscout/search/http_client.py +172 -0
- webscout/search/results.py +141 -0
- webscout/search/yahoo_main.py +57 -0
- webscout/search/yep_main.py +48 -0
- webscout/server/__init__.py +48 -0
- webscout/server/config.py +78 -0
- webscout/server/exceptions.py +69 -0
- webscout/server/providers.py +286 -0
- webscout/server/request_models.py +131 -0
- webscout/server/request_processing.py +404 -0
- webscout/server/routes.py +642 -0
- webscout/server/server.py +351 -0
- webscout/server/ui_templates.py +1171 -0
- webscout/swiftcli/__init__.py +79 -95
- webscout/swiftcli/core/__init__.py +7 -7
- webscout/swiftcli/core/cli.py +574 -297
- webscout/swiftcli/core/context.py +98 -104
- webscout/swiftcli/core/group.py +268 -241
- webscout/swiftcli/decorators/__init__.py +28 -28
- webscout/swiftcli/decorators/command.py +243 -221
- webscout/swiftcli/decorators/options.py +247 -220
- webscout/swiftcli/decorators/output.py +392 -252
- webscout/swiftcli/exceptions.py +21 -21
- webscout/swiftcli/plugins/__init__.py +9 -9
- webscout/swiftcli/plugins/base.py +134 -135
- webscout/swiftcli/plugins/manager.py +269 -269
- webscout/swiftcli/utils/__init__.py +58 -59
- webscout/swiftcli/utils/formatting.py +251 -252
- webscout/swiftcli/utils/parsing.py +368 -267
- webscout/update_checker.py +280 -136
- webscout/utils.py +28 -14
- webscout/version.py +2 -1
- webscout/version.py.bak +3 -0
- webscout/zeroart/__init__.py +218 -135
- webscout/zeroart/base.py +70 -66
- webscout/zeroart/effects.py +155 -101
- webscout/zeroart/fonts.py +1799 -1239
- webscout-2026.1.19.dist-info/METADATA +638 -0
- webscout-2026.1.19.dist-info/RECORD +312 -0
- {webscout-8.2.9.dist-info → webscout-2026.1.19.dist-info}/WHEEL +1 -1
- {webscout-8.2.9.dist-info → webscout-2026.1.19.dist-info}/entry_points.txt +1 -1
- webscout/DWEBS.py +0 -520
- webscout/Extra/Act.md +0 -309
- webscout/Extra/GitToolkit/gitapi/README.md +0 -110
- webscout/Extra/autocoder/__init__.py +0 -9
- webscout/Extra/autocoder/autocoder.py +0 -1105
- webscout/Extra/autocoder/autocoder_utiles.py +0 -332
- webscout/Extra/gguf.md +0 -430
- webscout/Extra/weather.md +0 -281
- webscout/Litlogger/README.md +0 -10
- webscout/Litlogger/__init__.py +0 -15
- webscout/Litlogger/formats.py +0 -4
- webscout/Litlogger/handlers.py +0 -103
- webscout/Litlogger/levels.py +0 -13
- webscout/Litlogger/logger.py +0 -92
- webscout/Provider/AI21.py +0 -177
- webscout/Provider/AISEARCH/DeepFind.py +0 -254
- webscout/Provider/AISEARCH/felo_search.py +0 -202
- webscout/Provider/AISEARCH/genspark_search.py +0 -324
- webscout/Provider/AISEARCH/hika_search.py +0 -186
- webscout/Provider/AISEARCH/scira_search.py +0 -298
- webscout/Provider/Aitopia.py +0 -316
- webscout/Provider/AllenAI.py +0 -440
- webscout/Provider/Blackboxai.py +0 -791
- webscout/Provider/ChatGPTClone.py +0 -237
- webscout/Provider/ChatGPTGratis.py +0 -194
- webscout/Provider/Cloudflare.py +0 -324
- webscout/Provider/ExaChat.py +0 -358
- webscout/Provider/Flowith.py +0 -217
- webscout/Provider/FreeGemini.py +0 -250
- webscout/Provider/Glider.py +0 -225
- webscout/Provider/HF_space/__init__.py +0 -0
- webscout/Provider/HF_space/qwen_qwen2.py +0 -206
- webscout/Provider/HuggingFaceChat.py +0 -469
- webscout/Provider/Hunyuan.py +0 -283
- webscout/Provider/LambdaChat.py +0 -411
- webscout/Provider/Llama3.py +0 -259
- webscout/Provider/Nemotron.py +0 -218
- webscout/Provider/OLLAMA.py +0 -396
- webscout/Provider/OPENAI/BLACKBOXAI.py +0 -766
- webscout/Provider/OPENAI/Cloudflare.py +0 -378
- webscout/Provider/OPENAI/FreeGemini.py +0 -283
- webscout/Provider/OPENAI/NEMOTRON.py +0 -232
- webscout/Provider/OPENAI/Qwen3.py +0 -283
- webscout/Provider/OPENAI/api.py +0 -969
- webscout/Provider/OPENAI/c4ai.py +0 -373
- webscout/Provider/OPENAI/chatgptclone.py +0 -494
- webscout/Provider/OPENAI/copilot.py +0 -242
- webscout/Provider/OPENAI/flowith.py +0 -162
- webscout/Provider/OPENAI/freeaichat.py +0 -359
- webscout/Provider/OPENAI/mcpcore.py +0 -389
- webscout/Provider/OPENAI/multichat.py +0 -376
- webscout/Provider/OPENAI/opkfc.py +0 -496
- webscout/Provider/OPENAI/scirachat.py +0 -477
- webscout/Provider/OPENAI/standardinput.py +0 -433
- webscout/Provider/OPENAI/typegpt.py +0 -364
- webscout/Provider/OPENAI/uncovrAI.py +0 -463
- webscout/Provider/OPENAI/venice.py +0 -431
- webscout/Provider/OPENAI/yep.py +0 -382
- webscout/Provider/OpenGPT.py +0 -209
- webscout/Provider/Perplexitylabs.py +0 -415
- webscout/Provider/Reka.py +0 -214
- webscout/Provider/StandardInput.py +0 -290
- webscout/Provider/TTI/aiarta.py +0 -365
- webscout/Provider/TTI/artbit.py +0 -0
- webscout/Provider/TTI/fastflux.py +0 -200
- webscout/Provider/TTI/piclumen.py +0 -203
- webscout/Provider/TTI/pixelmuse.py +0 -225
- webscout/Provider/TTS/gesserit.py +0 -128
- webscout/Provider/TTS/sthir.py +0 -94
- webscout/Provider/TeachAnything.py +0 -229
- webscout/Provider/UNFINISHED/puterjs.py +0 -635
- webscout/Provider/UNFINISHED/test_lmarena.py +0 -119
- webscout/Provider/Venice.py +0 -258
- webscout/Provider/VercelAI.py +0 -253
- webscout/Provider/Writecream.py +0 -246
- webscout/Provider/WritingMate.py +0 -269
- webscout/Provider/asksteve.py +0 -220
- webscout/Provider/chatglm.py +0 -215
- webscout/Provider/copilot.py +0 -425
- webscout/Provider/freeaichat.py +0 -285
- webscout/Provider/granite.py +0 -235
- webscout/Provider/hermes.py +0 -266
- webscout/Provider/koala.py +0 -170
- webscout/Provider/lmarena.py +0 -198
- webscout/Provider/multichat.py +0 -364
- webscout/Provider/scira_chat.py +0 -299
- webscout/Provider/scnet.py +0 -243
- webscout/Provider/talkai.py +0 -194
- webscout/Provider/typegpt.py +0 -289
- webscout/Provider/uncovr.py +0 -368
- webscout/Provider/yep.py +0 -389
- webscout/litagent/Readme.md +0 -276
- webscout/litprinter/__init__.py +0 -59
- webscout/swiftcli/Readme.md +0 -323
- webscout/tempid.py +0 -128
- webscout/webscout_search.py +0 -1184
- webscout/webscout_search_async.py +0 -654
- webscout/yep_search.py +0 -347
- webscout/zeroart/README.md +0 -89
- webscout-8.2.9.dist-info/METADATA +0 -1033
- webscout-8.2.9.dist-info/RECORD +0 -289
- {webscout-8.2.9.dist-info → webscout-2026.1.19.dist-info}/licenses/LICENSE.md +0 -0
- {webscout-8.2.9.dist-info → webscout-2026.1.19.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,413 @@
|
|
|
1
|
+
import json
|
|
2
|
+
from typing import Any, Dict, Generator, Optional, Union, cast
|
|
3
|
+
|
|
4
|
+
from curl_cffi import CurlError
|
|
5
|
+
from curl_cffi.requests import Session
|
|
6
|
+
|
|
7
|
+
from webscout import exceptions
|
|
8
|
+
from webscout.AIbase import Provider, Response
|
|
9
|
+
from webscout.AIutel import AwesomePrompts, Conversation, Optimizers, sanitize_stream
|
|
10
|
+
from webscout.litagent import LitAgent
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class Algion(Provider):
|
|
14
|
+
"""
|
|
15
|
+
A class to interact with the Algion API (OpenAI-compatible free API).
|
|
16
|
+
|
|
17
|
+
Attributes:
|
|
18
|
+
AVAILABLE_MODELS: List of available models
|
|
19
|
+
url: API endpoint URL
|
|
20
|
+
api: API key for authentication
|
|
21
|
+
|
|
22
|
+
Examples:
|
|
23
|
+
>>> from webscout.Provider.Algion import Algion
|
|
24
|
+
>>> ai = Algion()
|
|
25
|
+
>>> response = ai.chat("Hello, how are you?")
|
|
26
|
+
>>> print(response)
|
|
27
|
+
"""
|
|
28
|
+
@classmethod
|
|
29
|
+
def get_models(cls, api_key: Optional[str] = None):
|
|
30
|
+
"""Fetch available models from Algion API.
|
|
31
|
+
|
|
32
|
+
Args:
|
|
33
|
+
api_key (str, optional): Algion API key. If not provided, uses default free key.
|
|
34
|
+
|
|
35
|
+
Returns:
|
|
36
|
+
list: List of available model IDs
|
|
37
|
+
"""
|
|
38
|
+
api_key = api_key or "123123"
|
|
39
|
+
|
|
40
|
+
try:
|
|
41
|
+
# Use a temporary curl_cffi session for this class method
|
|
42
|
+
temp_session = Session()
|
|
43
|
+
headers = {
|
|
44
|
+
"Content-Type": "application/json",
|
|
45
|
+
"Authorization": f"Bearer {api_key}",
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
response = temp_session.get(
|
|
49
|
+
"https://api.algion.dev/v1/models",
|
|
50
|
+
headers=headers,
|
|
51
|
+
impersonate="chrome110"
|
|
52
|
+
)
|
|
53
|
+
|
|
54
|
+
if response.status_code != 200:
|
|
55
|
+
raise Exception(f"Failed to fetch models: HTTP {response.status_code}")
|
|
56
|
+
|
|
57
|
+
data = response.json()
|
|
58
|
+
if "data" in data and isinstance(data["data"], list):
|
|
59
|
+
return [model["id"] for model in data["data"]]
|
|
60
|
+
raise Exception("Invalid response format from API")
|
|
61
|
+
|
|
62
|
+
except (CurlError, Exception) as e:
|
|
63
|
+
raise Exception(f"Failed to fetch models: {str(e)}")
|
|
64
|
+
|
|
65
|
+
required_auth = False
|
|
66
|
+
AVAILABLE_MODELS = ["gpt-4o", "gpt-4o-mini", "claude-3-5-sonnet", "o1-mini"]
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
@staticmethod
|
|
70
|
+
def _algion_extractor(chunk: Union[str, Dict[str, Any]]) -> Optional[str]:
|
|
71
|
+
"""Extracts content from Algion stream JSON objects (OpenAI format)."""
|
|
72
|
+
if isinstance(chunk, dict):
|
|
73
|
+
return chunk.get("choices", [{}])[0].get("delta", {}).get("content")
|
|
74
|
+
return None
|
|
75
|
+
|
|
76
|
+
def __init__(
|
|
77
|
+
self,
|
|
78
|
+
api_key: Optional[str] = "123123", # Default free API key
|
|
79
|
+
is_conversation: bool = True,
|
|
80
|
+
max_tokens: int = 2049,
|
|
81
|
+
timeout: int = 30,
|
|
82
|
+
intro: Optional[str] = None,
|
|
83
|
+
filepath: Optional[str] = None,
|
|
84
|
+
update_file: bool = True,
|
|
85
|
+
proxies: dict = {},
|
|
86
|
+
history_offset: int = 10250,
|
|
87
|
+
act: Optional[str] = None,
|
|
88
|
+
model: str = "gpt-4o",
|
|
89
|
+
system_prompt: str = "You are a helpful assistant.",
|
|
90
|
+
browser: str = "chrome"
|
|
91
|
+
):
|
|
92
|
+
"""Initializes the Algion API client.
|
|
93
|
+
|
|
94
|
+
Args:
|
|
95
|
+
api_key: API key for authentication (default: "123123" - free key)
|
|
96
|
+
is_conversation: Whether to use conversation mode
|
|
97
|
+
max_tokens: Maximum tokens to generate
|
|
98
|
+
timeout: Request timeout in seconds
|
|
99
|
+
intro: Introduction message for conversation
|
|
100
|
+
filepath: Path to save conversation history
|
|
101
|
+
update_file: Whether to update conversation file
|
|
102
|
+
proxies: Proxy configuration
|
|
103
|
+
history_offset: Conversation history offset
|
|
104
|
+
act: Act/role for the assistant
|
|
105
|
+
model: Model to use (default: "gpt-4o")
|
|
106
|
+
system_prompt: System prompt for the assistant
|
|
107
|
+
browser: Browser fingerprint to use
|
|
108
|
+
"""
|
|
109
|
+
if model not in self.AVAILABLE_MODELS:
|
|
110
|
+
raise ValueError(f"Invalid model: {model}. Choose from: {self.AVAILABLE_MODELS}")
|
|
111
|
+
|
|
112
|
+
self.url = "https://api.algion.dev/v1/chat/completions"
|
|
113
|
+
|
|
114
|
+
# Initialize LitAgent
|
|
115
|
+
self.agent = LitAgent()
|
|
116
|
+
self.fingerprint = self.agent.generate_fingerprint(browser)
|
|
117
|
+
self.api = api_key
|
|
118
|
+
|
|
119
|
+
# Set up headers
|
|
120
|
+
self.headers = {
|
|
121
|
+
"Accept": self.fingerprint["accept"],
|
|
122
|
+
"Accept-Language": self.fingerprint["accept_language"],
|
|
123
|
+
"Content-Type": "application/json",
|
|
124
|
+
"Authorization": f"Bearer {self.api}",
|
|
125
|
+
"User-Agent": self.fingerprint.get("user_agent", ""),
|
|
126
|
+
"Sec-CH-UA": self.fingerprint.get("sec_ch_ua", ""),
|
|
127
|
+
"Sec-CH-UA-Mobile": "?0",
|
|
128
|
+
"Sec-CH-UA-Platform": f'"{self.fingerprint.get("platform", "")}"',
|
|
129
|
+
"Origin": "https://algion.dev",
|
|
130
|
+
"Referer": "https://algion.dev/",
|
|
131
|
+
"Sec-Fetch-Dest": "empty",
|
|
132
|
+
"Sec-Fetch-Mode": "cors",
|
|
133
|
+
"Sec-Fetch-Site": "same-site",
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
# Initialize curl_cffi Session
|
|
137
|
+
self.session = Session()
|
|
138
|
+
self.session.headers.update(self.headers)
|
|
139
|
+
if proxies:
|
|
140
|
+
self.session.proxies.update(proxies)
|
|
141
|
+
|
|
142
|
+
self.system_prompt = system_prompt
|
|
143
|
+
self.is_conversation = is_conversation
|
|
144
|
+
self.max_tokens_to_sample = max_tokens
|
|
145
|
+
self.timeout = timeout
|
|
146
|
+
self.last_response = {}
|
|
147
|
+
self.model = model
|
|
148
|
+
|
|
149
|
+
self.__available_optimizers = (
|
|
150
|
+
method
|
|
151
|
+
for method in dir(Optimizers)
|
|
152
|
+
if callable(getattr(Optimizers, method)) and not method.startswith("__")
|
|
153
|
+
)
|
|
154
|
+
self.conversation = Conversation(
|
|
155
|
+
is_conversation, self.max_tokens_to_sample, filepath, update_file
|
|
156
|
+
)
|
|
157
|
+
self.conversation.history_offset = history_offset
|
|
158
|
+
|
|
159
|
+
if act:
|
|
160
|
+
act_val = cast(Union[str, int], act)
|
|
161
|
+
self.conversation.intro = AwesomePrompts().get_act(
|
|
162
|
+
act_val, default=self.conversation.intro, case_insensitive=True
|
|
163
|
+
) or self.conversation.intro
|
|
164
|
+
elif intro:
|
|
165
|
+
self.conversation.intro = intro
|
|
166
|
+
|
|
167
|
+
def refresh_identity(self, browser: Optional[str] = None):
|
|
168
|
+
"""
|
|
169
|
+
Refreshes the browser identity fingerprint.
|
|
170
|
+
|
|
171
|
+
Args:
|
|
172
|
+
browser: Specific browser to use for the new fingerprint
|
|
173
|
+
|
|
174
|
+
Returns:
|
|
175
|
+
dict: New fingerprint
|
|
176
|
+
"""
|
|
177
|
+
browser = browser or self.fingerprint.get("browser_type", "chrome")
|
|
178
|
+
self.fingerprint = self.agent.generate_fingerprint(browser)
|
|
179
|
+
|
|
180
|
+
# Update headers with new fingerprint
|
|
181
|
+
self.headers.update({
|
|
182
|
+
"Accept": self.fingerprint["accept"],
|
|
183
|
+
"Accept-Language": self.fingerprint["accept_language"],
|
|
184
|
+
"User-Agent": self.fingerprint.get("user_agent", ""),
|
|
185
|
+
"Sec-CH-UA": self.fingerprint.get("sec_ch_ua", ""),
|
|
186
|
+
"Sec-CH-UA-Platform": f'"{self.fingerprint.get("platform", "")}"',
|
|
187
|
+
})
|
|
188
|
+
|
|
189
|
+
# Update session headers
|
|
190
|
+
self.session.headers.update(self.headers)
|
|
191
|
+
|
|
192
|
+
return self.fingerprint
|
|
193
|
+
|
|
194
|
+
def ask(
|
|
195
|
+
self,
|
|
196
|
+
prompt: str,
|
|
197
|
+
stream: bool = False,
|
|
198
|
+
raw: bool = False,
|
|
199
|
+
optimizer: Optional[str] = None,
|
|
200
|
+
conversationally: bool = False,
|
|
201
|
+
**kwargs: Any,
|
|
202
|
+
) -> Response:
|
|
203
|
+
"""
|
|
204
|
+
Sends a prompt to the Algion API and returns the response.
|
|
205
|
+
|
|
206
|
+
Args:
|
|
207
|
+
prompt: The prompt to send
|
|
208
|
+
stream: Whether to stream the response
|
|
209
|
+
raw: Whether to return raw response
|
|
210
|
+
optimizer: Optimizer to use for the prompt
|
|
211
|
+
conversationally: Whether to use conversation mode
|
|
212
|
+
|
|
213
|
+
Returns:
|
|
214
|
+
Dict or Generator: Response from the API
|
|
215
|
+
|
|
216
|
+
Examples:
|
|
217
|
+
>>> ai = Algion()
|
|
218
|
+
>>> response = ai.ask("What is AI?")
|
|
219
|
+
>>> print(response['text'])
|
|
220
|
+
"""
|
|
221
|
+
conversation_prompt = self.conversation.gen_complete_prompt(prompt)
|
|
222
|
+
if optimizer:
|
|
223
|
+
if optimizer in self.__available_optimizers:
|
|
224
|
+
conversation_prompt = getattr(Optimizers, optimizer)(
|
|
225
|
+
conversation_prompt if conversationally else prompt
|
|
226
|
+
)
|
|
227
|
+
else:
|
|
228
|
+
raise Exception(f"Optimizer is not one of {self.__available_optimizers}")
|
|
229
|
+
|
|
230
|
+
# Payload construction (OpenAI format)
|
|
231
|
+
payload = {
|
|
232
|
+
"model": self.model,
|
|
233
|
+
"messages": [
|
|
234
|
+
{"role": "system", "content": self.system_prompt},
|
|
235
|
+
{"role": "user", "content": conversation_prompt},
|
|
236
|
+
],
|
|
237
|
+
"stream": stream
|
|
238
|
+
}
|
|
239
|
+
|
|
240
|
+
def for_stream():
|
|
241
|
+
streaming_text = ""
|
|
242
|
+
try:
|
|
243
|
+
response = self.session.post(
|
|
244
|
+
self.url,
|
|
245
|
+
data=json.dumps(payload),
|
|
246
|
+
stream=True,
|
|
247
|
+
timeout=self.timeout,
|
|
248
|
+
impersonate="chrome110"
|
|
249
|
+
)
|
|
250
|
+
response.raise_for_status()
|
|
251
|
+
|
|
252
|
+
# Use sanitize_stream for OpenAI-format streaming
|
|
253
|
+
processed_stream = sanitize_stream(
|
|
254
|
+
data=response.iter_content(chunk_size=None),
|
|
255
|
+
intro_value="data:",
|
|
256
|
+
to_json=True,
|
|
257
|
+
skip_markers=["[DONE]"],
|
|
258
|
+
content_extractor=self._algion_extractor,
|
|
259
|
+
yield_raw_on_error=False,
|
|
260
|
+
raw=raw
|
|
261
|
+
)
|
|
262
|
+
|
|
263
|
+
for content_chunk in processed_stream:
|
|
264
|
+
# Always yield as string, even in raw mode
|
|
265
|
+
if isinstance(content_chunk, bytes):
|
|
266
|
+
content_chunk = content_chunk.decode('utf-8', errors='ignore')
|
|
267
|
+
|
|
268
|
+
if raw:
|
|
269
|
+
yield content_chunk
|
|
270
|
+
else:
|
|
271
|
+
if content_chunk and isinstance(content_chunk, str):
|
|
272
|
+
streaming_text += content_chunk
|
|
273
|
+
yield dict(text=content_chunk)
|
|
274
|
+
|
|
275
|
+
except CurlError as e:
|
|
276
|
+
raise exceptions.FailedToGenerateResponseError(
|
|
277
|
+
f"Request failed (CurlError): {str(e)}"
|
|
278
|
+
) from e
|
|
279
|
+
except Exception as e:
|
|
280
|
+
err_text = ""
|
|
281
|
+
if hasattr(e, 'response'):
|
|
282
|
+
response_obj = getattr(e, 'response')
|
|
283
|
+
if hasattr(response_obj, 'text'):
|
|
284
|
+
err_text = getattr(response_obj, 'text')
|
|
285
|
+
raise exceptions.FailedToGenerateResponseError(
|
|
286
|
+
f"Request failed ({type(e).__name__}): {e} - {err_text}"
|
|
287
|
+
) from e
|
|
288
|
+
finally:
|
|
289
|
+
if not raw and streaming_text:
|
|
290
|
+
self.last_response = {"text": streaming_text}
|
|
291
|
+
self.conversation.update_chat_history(prompt, streaming_text)
|
|
292
|
+
|
|
293
|
+
def for_non_stream():
|
|
294
|
+
try:
|
|
295
|
+
response = self.session.post(
|
|
296
|
+
self.url,
|
|
297
|
+
data=json.dumps(payload),
|
|
298
|
+
timeout=self.timeout,
|
|
299
|
+
impersonate="chrome110"
|
|
300
|
+
)
|
|
301
|
+
response.raise_for_status()
|
|
302
|
+
|
|
303
|
+
response_text = response.text
|
|
304
|
+
|
|
305
|
+
# Parse non-streaming JSON response
|
|
306
|
+
processed_stream = sanitize_stream(
|
|
307
|
+
data=response_text,
|
|
308
|
+
to_json=True,
|
|
309
|
+
intro_value=None,
|
|
310
|
+
content_extractor=lambda chunk: chunk.get("choices", [{}])[0].get("message", {}).get("content") if isinstance(chunk, dict) else None,
|
|
311
|
+
yield_raw_on_error=False
|
|
312
|
+
)
|
|
313
|
+
content = next(processed_stream, None)
|
|
314
|
+
content = content if isinstance(content, str) else ""
|
|
315
|
+
|
|
316
|
+
self.last_response = {"text": content}
|
|
317
|
+
self.conversation.update_chat_history(prompt, content)
|
|
318
|
+
return self.last_response if not raw else content
|
|
319
|
+
|
|
320
|
+
except CurlError as e:
|
|
321
|
+
raise exceptions.FailedToGenerateResponseError(
|
|
322
|
+
f"Request failed (CurlError): {e}"
|
|
323
|
+
) from e
|
|
324
|
+
except Exception as e:
|
|
325
|
+
err_text = ""
|
|
326
|
+
if hasattr(e, 'response'):
|
|
327
|
+
response_obj = getattr(e, 'response')
|
|
328
|
+
if hasattr(response_obj, 'text'):
|
|
329
|
+
err_text = getattr(response_obj, 'text')
|
|
330
|
+
raise exceptions.FailedToGenerateResponseError(
|
|
331
|
+
f"Request failed ({type(e).__name__}): {e} - {err_text}"
|
|
332
|
+
) from e
|
|
333
|
+
|
|
334
|
+
return for_stream() if stream else for_non_stream()
|
|
335
|
+
|
|
336
|
+
def chat(
|
|
337
|
+
self,
|
|
338
|
+
prompt: str,
|
|
339
|
+
stream: bool = False,
|
|
340
|
+
optimizer: Optional[str] = None,
|
|
341
|
+
conversationally: bool = False,
|
|
342
|
+
**kwargs: Any,
|
|
343
|
+
) -> Union[str, Generator[str, None, None]]:
|
|
344
|
+
raw = kwargs.get("raw", False)
|
|
345
|
+
if stream:
|
|
346
|
+
def for_stream_chat():
|
|
347
|
+
gen = self.ask(
|
|
348
|
+
prompt, stream=True, raw=raw,
|
|
349
|
+
optimizer=optimizer, conversationally=conversationally
|
|
350
|
+
)
|
|
351
|
+
if hasattr(gen, "__iter__"):
|
|
352
|
+
for response in gen:
|
|
353
|
+
if raw:
|
|
354
|
+
yield cast(str, response)
|
|
355
|
+
else:
|
|
356
|
+
yield self.get_message(response)
|
|
357
|
+
return for_stream_chat()
|
|
358
|
+
else:
|
|
359
|
+
result = self.ask(
|
|
360
|
+
prompt, stream=False, raw=raw,
|
|
361
|
+
optimizer=optimizer, conversationally=conversationally
|
|
362
|
+
)
|
|
363
|
+
if raw:
|
|
364
|
+
return cast(str, result)
|
|
365
|
+
else:
|
|
366
|
+
return self.get_message(result)
|
|
367
|
+
|
|
368
|
+
def get_message(self, response: Response) -> str:
|
|
369
|
+
"""
|
|
370
|
+
Extracts the message from the API response.
|
|
371
|
+
|
|
372
|
+
Args:
|
|
373
|
+
response: The API response dictionary
|
|
374
|
+
|
|
375
|
+
Returns:
|
|
376
|
+
str: The message content
|
|
377
|
+
"""
|
|
378
|
+
if not isinstance(response, dict):
|
|
379
|
+
return str(response)
|
|
380
|
+
return cast(Dict[str, Any], response).get("text", "")
|
|
381
|
+
|
|
382
|
+
try:
|
|
383
|
+
fetched_models = Algion.get_models()
|
|
384
|
+
if fetched_models:
|
|
385
|
+
Algion.AVAILABLE_MODELS = list(set(Algion.AVAILABLE_MODELS + fetched_models))
|
|
386
|
+
except Exception:
|
|
387
|
+
pass
|
|
388
|
+
|
|
389
|
+
if __name__ == "__main__":
|
|
390
|
+
from rich import print
|
|
391
|
+
|
|
392
|
+
print("-" * 80)
|
|
393
|
+
print(f"{'Model':<50} {'Status':<10} {'Response'}")
|
|
394
|
+
print("-" * 80)
|
|
395
|
+
|
|
396
|
+
for model in Algion.AVAILABLE_MODELS:
|
|
397
|
+
try:
|
|
398
|
+
test_ai = Algion(model=model, timeout=60)
|
|
399
|
+
response = test_ai.chat("Say 'Hello' in one word", stream=True)
|
|
400
|
+
response_text = ""
|
|
401
|
+
for chunk in response:
|
|
402
|
+
response_text += chunk
|
|
403
|
+
|
|
404
|
+
if response_text and len(response_text.strip()) > 0:
|
|
405
|
+
status = "✓"
|
|
406
|
+
clean_text = response_text.strip().encode('utf-8', errors='ignore').decode('utf-8')
|
|
407
|
+
display_text = clean_text[:50] + "..." if len(clean_text) > 50 else clean_text
|
|
408
|
+
else:
|
|
409
|
+
status = "✗"
|
|
410
|
+
display_text = "Empty or invalid response"
|
|
411
|
+
print(f"\r{model:<50} {status:<10} {display_text}")
|
|
412
|
+
except Exception as e:
|
|
413
|
+
print(f"\r{model:<50} {'✗':<10} {str(e)}")
|
webscout/Provider/Andi.py
CHANGED
|
@@ -1,27 +1,29 @@
|
|
|
1
|
+
import json
|
|
2
|
+
from typing import Any, Dict, Generator, Optional, Union, cast
|
|
1
3
|
from uuid import uuid4
|
|
4
|
+
|
|
2
5
|
import requests
|
|
3
|
-
|
|
4
|
-
from webscout.AIutel import Optimizers
|
|
5
|
-
from webscout.AIutel import Conversation
|
|
6
|
-
from webscout.AIutel import AwesomePrompts, sanitize_stream
|
|
7
|
-
from webscout.AIbase import Provider, AsyncProvider
|
|
6
|
+
|
|
8
7
|
from webscout import exceptions
|
|
9
|
-
from
|
|
10
|
-
from webscout import
|
|
8
|
+
from webscout.AIbase import Provider, Response
|
|
9
|
+
from webscout.AIutel import AwesomePrompts, Conversation, Optimizers, sanitize_stream
|
|
11
10
|
from webscout.litagent import LitAgent
|
|
11
|
+
from webscout.search import DuckDuckGoSearch
|
|
12
|
+
|
|
12
13
|
|
|
13
14
|
class AndiSearch(Provider):
|
|
15
|
+
required_auth = False
|
|
14
16
|
def __init__(
|
|
15
17
|
self,
|
|
16
18
|
is_conversation: bool = True,
|
|
17
19
|
max_tokens: int = 600,
|
|
18
20
|
timeout: int = 30,
|
|
19
|
-
intro: str = None,
|
|
20
|
-
filepath: str = None,
|
|
21
|
+
intro: Optional[str] = None,
|
|
22
|
+
filepath: Optional[str] = None,
|
|
21
23
|
update_file: bool = True,
|
|
22
24
|
proxies: dict = {},
|
|
23
25
|
history_offset: int = 10250,
|
|
24
|
-
act: str = None,
|
|
26
|
+
act: Optional[str] = None,
|
|
25
27
|
):
|
|
26
28
|
"""Instantiates AndiSearch
|
|
27
29
|
|
|
@@ -71,27 +73,28 @@ class AndiSearch(Provider):
|
|
|
71
73
|
if callable(getattr(Optimizers, method)) and not method.startswith("__")
|
|
72
74
|
)
|
|
73
75
|
self.session.headers.update(self.headers)
|
|
74
|
-
Conversation.intro = (
|
|
75
|
-
AwesomePrompts().get_act(
|
|
76
|
-
act, raise_not_found=True, default=None, case_insensitive=True
|
|
77
|
-
)
|
|
78
|
-
if act
|
|
79
|
-
else intro or Conversation.intro
|
|
80
|
-
)
|
|
81
76
|
self.conversation = Conversation(
|
|
82
77
|
is_conversation, self.max_tokens_to_sample, filepath, update_file
|
|
83
78
|
)
|
|
84
79
|
self.conversation.history_offset = history_offset
|
|
85
|
-
|
|
80
|
+
|
|
81
|
+
if act:
|
|
82
|
+
self.conversation.intro = AwesomePrompts().get_act(cast(Union[str, int], act), default=self.conversation.intro, case_insensitive=True
|
|
83
|
+
) or self.conversation.intro
|
|
84
|
+
elif intro:
|
|
85
|
+
self.conversation.intro = intro
|
|
86
|
+
if proxies:
|
|
87
|
+
self.session.proxies.update(proxies)
|
|
86
88
|
|
|
87
89
|
def ask(
|
|
88
90
|
self,
|
|
89
91
|
prompt: str,
|
|
90
92
|
stream: bool = False,
|
|
91
93
|
raw: bool = False,
|
|
92
|
-
optimizer: str = None,
|
|
94
|
+
optimizer: Optional[str] = None,
|
|
93
95
|
conversationally: bool = False,
|
|
94
|
-
|
|
96
|
+
**kwargs: Any,
|
|
97
|
+
) -> Response:
|
|
95
98
|
|
|
96
99
|
conversation_prompt = self.conversation.gen_complete_prompt(prompt)
|
|
97
100
|
if optimizer:
|
|
@@ -104,12 +107,12 @@ class AndiSearch(Provider):
|
|
|
104
107
|
f"Optimizer is not one of {self.__available_optimizers}"
|
|
105
108
|
)
|
|
106
109
|
|
|
107
|
-
# Initialize the
|
|
108
|
-
|
|
110
|
+
# Initialize the DuckDuckGo search instance
|
|
111
|
+
ddg_search = DuckDuckGoSearch()
|
|
109
112
|
|
|
110
113
|
# Fetch search results
|
|
111
114
|
search_query = prompt
|
|
112
|
-
search_results =
|
|
115
|
+
search_results = ddg_search.text(search_query, max_results=7)
|
|
113
116
|
|
|
114
117
|
# Format the search results into the required serp payload structure
|
|
115
118
|
serp_payload = {
|
|
@@ -126,12 +129,12 @@ class AndiSearch(Provider):
|
|
|
126
129
|
"engine": "andi-b",
|
|
127
130
|
"results": [
|
|
128
131
|
{
|
|
129
|
-
"title": result
|
|
130
|
-
"link": result
|
|
131
|
-
"desc": result
|
|
132
|
-
"image": "",
|
|
132
|
+
"title": result.title,
|
|
133
|
+
"link": result.href,
|
|
134
|
+
"desc": result.body,
|
|
135
|
+
"image": "",
|
|
133
136
|
"type": "website",
|
|
134
|
-
"source": result
|
|
137
|
+
"source": result.href.split("//")[1].split("/")[0] if "//" in result.href else result.href.split("/")[0] # Extract the domain name
|
|
135
138
|
}
|
|
136
139
|
for result in search_results
|
|
137
140
|
]
|
|
@@ -150,27 +153,30 @@ class AndiSearch(Provider):
|
|
|
150
153
|
)
|
|
151
154
|
|
|
152
155
|
streaming_text = ""
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
chunk_size=self.stream_chunk_size,
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
streaming_text += value + ("\n" if stream else "")
|
|
161
|
-
resp = dict(text=streaming_text)
|
|
162
|
-
self.last_response.update(resp)
|
|
163
|
-
yield value if raw else resp
|
|
164
|
-
except json.decoder.JSONDecodeError:
|
|
165
|
-
pass
|
|
166
|
-
self.conversation.update_chat_history(
|
|
167
|
-
prompt, self.get_message(self.last_response)
|
|
156
|
+
# Use sanitize_stream for processing
|
|
157
|
+
processed_stream = sanitize_stream(
|
|
158
|
+
data=response.iter_lines(decode_unicode=True, chunk_size=self.stream_chunk_size, delimiter="\n"),
|
|
159
|
+
intro_value=None, # No prefix to strip
|
|
160
|
+
to_json=False, # Response is plain text
|
|
161
|
+
yield_raw_on_error=True,
|
|
162
|
+
raw=raw
|
|
168
163
|
)
|
|
169
164
|
|
|
165
|
+
for content_chunk in processed_stream:
|
|
166
|
+
if content_chunk:
|
|
167
|
+
if raw:
|
|
168
|
+
yield content_chunk
|
|
169
|
+
else:
|
|
170
|
+
streaming_text += content_chunk + "\n"
|
|
171
|
+
yield dict(text=content_chunk)
|
|
172
|
+
|
|
173
|
+
self.last_response = {"text": streaming_text.strip()}
|
|
174
|
+
self.conversation.update_chat_history(prompt, streaming_text.strip())
|
|
175
|
+
|
|
170
176
|
def for_non_stream():
|
|
171
177
|
for _ in for_stream():
|
|
172
178
|
pass
|
|
173
|
-
return self.last_response
|
|
179
|
+
return self.last_response if not raw else json.dumps(self.last_response)
|
|
174
180
|
|
|
175
181
|
return for_stream() if stream else for_non_stream()
|
|
176
182
|
|
|
@@ -178,51 +184,50 @@ class AndiSearch(Provider):
|
|
|
178
184
|
self,
|
|
179
185
|
prompt: str,
|
|
180
186
|
stream: bool = False,
|
|
181
|
-
optimizer: str = None,
|
|
187
|
+
optimizer: Optional[str] = None,
|
|
182
188
|
conversationally: bool = False,
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
prompt (str): Prompt to be send.
|
|
187
|
-
stream (bool, optional): Flag for streaming response. Defaults to False.
|
|
188
|
-
optimizer (str, optional): Prompt optimizer name - `[code, shell_command]`. Defaults to None.
|
|
189
|
-
conversationally (bool, optional): Chat conversationally when using optimizer. Defaults to False.
|
|
190
|
-
Returns:
|
|
191
|
-
str: Response generated
|
|
192
|
-
"""
|
|
193
|
-
|
|
189
|
+
**kwargs: Any,
|
|
190
|
+
) -> Union[str, Generator[str, None, None]]:
|
|
191
|
+
raw = kwargs.get("raw", False)
|
|
194
192
|
def for_stream():
|
|
195
193
|
for response in self.ask(
|
|
196
|
-
prompt, True, optimizer=optimizer, conversationally=conversationally
|
|
194
|
+
prompt, True, raw=raw, optimizer=optimizer, conversationally=conversationally
|
|
197
195
|
):
|
|
198
|
-
|
|
196
|
+
if raw:
|
|
197
|
+
yield response
|
|
198
|
+
else:
|
|
199
|
+
yield self.get_message(cast(Dict[str, Any], response))
|
|
199
200
|
|
|
200
201
|
def for_non_stream():
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
)
|
|
202
|
+
result = self.ask(
|
|
203
|
+
prompt,
|
|
204
|
+
False,
|
|
205
|
+
raw=raw,
|
|
206
|
+
optimizer=optimizer,
|
|
207
|
+
conversationally=conversationally,
|
|
208
208
|
)
|
|
209
|
+
if raw:
|
|
210
|
+
return cast(str, result)
|
|
211
|
+
return self.get_message(cast(Dict[str, Any], result))
|
|
209
212
|
|
|
210
213
|
return for_stream() if stream else for_non_stream()
|
|
211
214
|
|
|
212
|
-
def get_message(self, response:
|
|
215
|
+
def get_message(self, response: Response) -> str:
|
|
213
216
|
"""Retrieves message only from response
|
|
214
217
|
|
|
215
218
|
Args:
|
|
216
|
-
response (
|
|
219
|
+
response (Response): Response generated by `self.ask`
|
|
217
220
|
|
|
218
221
|
Returns:
|
|
219
222
|
str: Message extracted
|
|
220
223
|
"""
|
|
221
|
-
|
|
224
|
+
if not isinstance(response, dict):
|
|
225
|
+
return str(response)
|
|
222
226
|
return response["text"]
|
|
227
|
+
|
|
223
228
|
if __name__ == '__main__':
|
|
224
229
|
from rich import print
|
|
225
230
|
ai = AndiSearch()
|
|
226
231
|
response = ai.chat("tell me about india")
|
|
227
232
|
for chunk in response:
|
|
228
|
-
print(chunk, end="", flush=True)
|
|
233
|
+
print(chunk, end="", flush=True)
|