webscout 8.2.9__py3-none-any.whl → 2026.1.19__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- webscout/AIauto.py +524 -251
- webscout/AIbase.py +247 -319
- webscout/AIutel.py +68 -703
- webscout/Bard.py +1072 -1026
- webscout/Extra/GitToolkit/__init__.py +10 -10
- webscout/Extra/GitToolkit/gitapi/__init__.py +20 -12
- webscout/Extra/GitToolkit/gitapi/gist.py +142 -0
- webscout/Extra/GitToolkit/gitapi/organization.py +91 -0
- webscout/Extra/GitToolkit/gitapi/repository.py +308 -195
- webscout/Extra/GitToolkit/gitapi/search.py +162 -0
- webscout/Extra/GitToolkit/gitapi/trending.py +236 -0
- webscout/Extra/GitToolkit/gitapi/user.py +128 -96
- webscout/Extra/GitToolkit/gitapi/utils.py +82 -62
- webscout/Extra/YTToolkit/README.md +443 -375
- webscout/Extra/YTToolkit/YTdownloader.py +953 -957
- webscout/Extra/YTToolkit/__init__.py +3 -3
- webscout/Extra/YTToolkit/transcriber.py +595 -476
- webscout/Extra/YTToolkit/ytapi/README.md +230 -44
- webscout/Extra/YTToolkit/ytapi/__init__.py +22 -6
- webscout/Extra/YTToolkit/ytapi/captions.py +190 -0
- webscout/Extra/YTToolkit/ytapi/channel.py +302 -307
- webscout/Extra/YTToolkit/ytapi/errors.py +13 -13
- webscout/Extra/YTToolkit/ytapi/extras.py +178 -118
- webscout/Extra/YTToolkit/ytapi/hashtag.py +120 -0
- webscout/Extra/YTToolkit/ytapi/https.py +89 -88
- webscout/Extra/YTToolkit/ytapi/patterns.py +61 -61
- webscout/Extra/YTToolkit/ytapi/playlist.py +59 -59
- webscout/Extra/YTToolkit/ytapi/pool.py +8 -8
- webscout/Extra/YTToolkit/ytapi/query.py +143 -40
- webscout/Extra/YTToolkit/ytapi/shorts.py +122 -0
- webscout/Extra/YTToolkit/ytapi/stream.py +68 -63
- webscout/Extra/YTToolkit/ytapi/suggestions.py +97 -0
- webscout/Extra/YTToolkit/ytapi/utils.py +66 -62
- webscout/Extra/YTToolkit/ytapi/video.py +403 -232
- webscout/Extra/__init__.py +2 -3
- webscout/Extra/gguf.py +1298 -684
- webscout/Extra/tempmail/README.md +487 -487
- webscout/Extra/tempmail/__init__.py +28 -28
- webscout/Extra/tempmail/async_utils.py +143 -141
- webscout/Extra/tempmail/base.py +172 -161
- webscout/Extra/tempmail/cli.py +191 -187
- webscout/Extra/tempmail/emailnator.py +88 -84
- webscout/Extra/tempmail/mail_tm.py +378 -361
- webscout/Extra/tempmail/temp_mail_io.py +304 -292
- webscout/Extra/weather.py +196 -194
- webscout/Extra/weather_ascii.py +17 -15
- webscout/Provider/AISEARCH/PERPLEXED_search.py +175 -0
- webscout/Provider/AISEARCH/Perplexity.py +292 -333
- webscout/Provider/AISEARCH/README.md +106 -279
- webscout/Provider/AISEARCH/__init__.py +16 -9
- webscout/Provider/AISEARCH/brave_search.py +298 -0
- webscout/Provider/AISEARCH/iask_search.py +357 -410
- webscout/Provider/AISEARCH/monica_search.py +200 -220
- webscout/Provider/AISEARCH/webpilotai_search.py +242 -255
- webscout/Provider/Algion.py +413 -0
- webscout/Provider/Andi.py +74 -69
- webscout/Provider/Apriel.py +313 -0
- webscout/Provider/Ayle.py +323 -0
- webscout/Provider/ChatSandbox.py +329 -342
- webscout/Provider/ClaudeOnline.py +365 -0
- webscout/Provider/Cohere.py +232 -208
- webscout/Provider/DeepAI.py +367 -0
- webscout/Provider/Deepinfra.py +467 -340
- webscout/Provider/EssentialAI.py +217 -0
- webscout/Provider/ExaAI.py +274 -261
- webscout/Provider/Gemini.py +175 -169
- webscout/Provider/GithubChat.py +385 -369
- webscout/Provider/Gradient.py +286 -0
- webscout/Provider/Groq.py +556 -801
- webscout/Provider/HadadXYZ.py +323 -0
- webscout/Provider/HeckAI.py +392 -375
- webscout/Provider/HuggingFace.py +387 -0
- webscout/Provider/IBM.py +340 -0
- webscout/Provider/Jadve.py +317 -291
- webscout/Provider/K2Think.py +306 -0
- webscout/Provider/Koboldai.py +221 -384
- webscout/Provider/Netwrck.py +273 -270
- webscout/Provider/Nvidia.py +310 -0
- webscout/Provider/OPENAI/DeepAI.py +489 -0
- webscout/Provider/OPENAI/K2Think.py +423 -0
- webscout/Provider/OPENAI/PI.py +463 -0
- webscout/Provider/OPENAI/README.md +890 -952
- webscout/Provider/OPENAI/TogetherAI.py +405 -0
- webscout/Provider/OPENAI/TwoAI.py +255 -357
- webscout/Provider/OPENAI/__init__.py +148 -40
- webscout/Provider/OPENAI/ai4chat.py +348 -293
- webscout/Provider/OPENAI/akashgpt.py +436 -0
- webscout/Provider/OPENAI/algion.py +303 -0
- webscout/Provider/OPENAI/{exachat.py → ayle.py} +365 -444
- webscout/Provider/OPENAI/base.py +253 -249
- webscout/Provider/OPENAI/cerebras.py +296 -0
- webscout/Provider/OPENAI/chatgpt.py +870 -556
- webscout/Provider/OPENAI/chatsandbox.py +233 -173
- webscout/Provider/OPENAI/deepinfra.py +403 -322
- webscout/Provider/OPENAI/e2b.py +2370 -1414
- webscout/Provider/OPENAI/elmo.py +278 -0
- webscout/Provider/OPENAI/exaai.py +452 -417
- webscout/Provider/OPENAI/freeassist.py +446 -0
- webscout/Provider/OPENAI/gradient.py +448 -0
- webscout/Provider/OPENAI/groq.py +380 -364
- webscout/Provider/OPENAI/hadadxyz.py +292 -0
- webscout/Provider/OPENAI/heckai.py +333 -308
- webscout/Provider/OPENAI/huggingface.py +321 -0
- webscout/Provider/OPENAI/ibm.py +425 -0
- webscout/Provider/OPENAI/llmchat.py +253 -0
- webscout/Provider/OPENAI/llmchatco.py +378 -335
- webscout/Provider/OPENAI/meta.py +541 -0
- webscout/Provider/OPENAI/netwrck.py +374 -357
- webscout/Provider/OPENAI/nvidia.py +317 -0
- webscout/Provider/OPENAI/oivscode.py +348 -287
- webscout/Provider/OPENAI/openrouter.py +328 -0
- webscout/Provider/OPENAI/pydantic_imports.py +1 -172
- webscout/Provider/OPENAI/sambanova.py +397 -0
- webscout/Provider/OPENAI/sonus.py +305 -304
- webscout/Provider/OPENAI/textpollinations.py +370 -339
- webscout/Provider/OPENAI/toolbaz.py +375 -413
- webscout/Provider/OPENAI/typefully.py +419 -355
- webscout/Provider/OPENAI/typliai.py +279 -0
- webscout/Provider/OPENAI/utils.py +314 -318
- webscout/Provider/OPENAI/wisecat.py +359 -387
- webscout/Provider/OPENAI/writecream.py +185 -163
- webscout/Provider/OPENAI/x0gpt.py +462 -365
- webscout/Provider/OPENAI/zenmux.py +380 -0
- webscout/Provider/OpenRouter.py +386 -0
- webscout/Provider/Openai.py +337 -496
- webscout/Provider/PI.py +443 -429
- webscout/Provider/QwenLM.py +346 -254
- webscout/Provider/STT/__init__.py +28 -0
- webscout/Provider/STT/base.py +303 -0
- webscout/Provider/STT/elevenlabs.py +264 -0
- webscout/Provider/Sambanova.py +317 -0
- webscout/Provider/TTI/README.md +69 -82
- webscout/Provider/TTI/__init__.py +37 -7
- webscout/Provider/TTI/base.py +147 -64
- webscout/Provider/TTI/claudeonline.py +393 -0
- webscout/Provider/TTI/magicstudio.py +292 -201
- webscout/Provider/TTI/miragic.py +180 -0
- webscout/Provider/TTI/pollinations.py +331 -221
- webscout/Provider/TTI/together.py +334 -0
- webscout/Provider/TTI/utils.py +14 -11
- webscout/Provider/TTS/README.md +186 -192
- webscout/Provider/TTS/__init__.py +43 -10
- webscout/Provider/TTS/base.py +523 -159
- webscout/Provider/TTS/deepgram.py +286 -156
- webscout/Provider/TTS/elevenlabs.py +189 -111
- webscout/Provider/TTS/freetts.py +218 -0
- webscout/Provider/TTS/murfai.py +288 -113
- webscout/Provider/TTS/openai_fm.py +364 -129
- webscout/Provider/TTS/parler.py +203 -111
- webscout/Provider/TTS/qwen.py +334 -0
- webscout/Provider/TTS/sherpa.py +286 -0
- webscout/Provider/TTS/speechma.py +693 -580
- webscout/Provider/TTS/streamElements.py +275 -333
- webscout/Provider/TTS/utils.py +280 -280
- webscout/Provider/TextPollinationsAI.py +331 -308
- webscout/Provider/TogetherAI.py +450 -0
- webscout/Provider/TwoAI.py +309 -475
- webscout/Provider/TypliAI.py +311 -305
- webscout/Provider/UNFINISHED/ChatHub.py +219 -209
- webscout/Provider/{OPENAI/glider.py → UNFINISHED/ChutesAI.py} +331 -326
- webscout/Provider/{GizAI.py → UNFINISHED/GizAI.py} +300 -295
- webscout/Provider/{Marcus.py → UNFINISHED/Marcus.py} +218 -198
- webscout/Provider/UNFINISHED/Qodo.py +481 -0
- webscout/Provider/{MCPCore.py → UNFINISHED/XenAI.py} +330 -315
- webscout/Provider/UNFINISHED/Youchat.py +347 -330
- webscout/Provider/UNFINISHED/aihumanizer.py +41 -0
- webscout/Provider/UNFINISHED/grammerchecker.py +37 -0
- webscout/Provider/UNFINISHED/liner.py +342 -0
- webscout/Provider/UNFINISHED/liner_api_request.py +246 -263
- webscout/Provider/{samurai.py → UNFINISHED/samurai.py} +231 -224
- webscout/Provider/WiseCat.py +256 -233
- webscout/Provider/WrDoChat.py +390 -370
- webscout/Provider/__init__.py +115 -174
- webscout/Provider/ai4chat.py +181 -174
- webscout/Provider/akashgpt.py +330 -335
- webscout/Provider/cerebras.py +397 -290
- webscout/Provider/cleeai.py +236 -213
- webscout/Provider/elmo.py +291 -283
- webscout/Provider/geminiapi.py +343 -208
- webscout/Provider/julius.py +245 -223
- webscout/Provider/learnfastai.py +333 -325
- webscout/Provider/llama3mitril.py +230 -215
- webscout/Provider/llmchat.py +308 -258
- webscout/Provider/llmchatco.py +321 -306
- webscout/Provider/meta.py +996 -801
- webscout/Provider/oivscode.py +332 -309
- webscout/Provider/searchchat.py +316 -292
- webscout/Provider/sonus.py +264 -258
- webscout/Provider/toolbaz.py +359 -353
- webscout/Provider/turboseek.py +332 -266
- webscout/Provider/typefully.py +262 -202
- webscout/Provider/x0gpt.py +332 -299
- webscout/__init__.py +31 -39
- webscout/__main__.py +5 -5
- webscout/cli.py +585 -524
- webscout/client.py +1497 -70
- webscout/conversation.py +140 -436
- webscout/exceptions.py +383 -362
- webscout/litagent/__init__.py +29 -29
- webscout/litagent/agent.py +492 -455
- webscout/litagent/constants.py +60 -60
- webscout/models.py +505 -181
- webscout/optimizers.py +74 -420
- webscout/prompt_manager.py +376 -288
- webscout/sanitize.py +1514 -0
- webscout/scout/README.md +452 -404
- webscout/scout/__init__.py +8 -8
- webscout/scout/core/__init__.py +7 -7
- webscout/scout/core/crawler.py +330 -210
- webscout/scout/core/scout.py +800 -607
- webscout/scout/core/search_result.py +51 -96
- webscout/scout/core/text_analyzer.py +64 -63
- webscout/scout/core/text_utils.py +412 -277
- webscout/scout/core/web_analyzer.py +54 -52
- webscout/scout/element.py +872 -478
- webscout/scout/parsers/__init__.py +70 -69
- webscout/scout/parsers/html5lib_parser.py +182 -172
- webscout/scout/parsers/html_parser.py +238 -236
- webscout/scout/parsers/lxml_parser.py +203 -178
- webscout/scout/utils.py +38 -37
- webscout/search/__init__.py +47 -0
- webscout/search/base.py +201 -0
- webscout/search/bing_main.py +45 -0
- webscout/search/brave_main.py +92 -0
- webscout/search/duckduckgo_main.py +57 -0
- webscout/search/engines/__init__.py +127 -0
- webscout/search/engines/bing/__init__.py +15 -0
- webscout/search/engines/bing/base.py +35 -0
- webscout/search/engines/bing/images.py +114 -0
- webscout/search/engines/bing/news.py +96 -0
- webscout/search/engines/bing/suggestions.py +36 -0
- webscout/search/engines/bing/text.py +109 -0
- webscout/search/engines/brave/__init__.py +19 -0
- webscout/search/engines/brave/base.py +47 -0
- webscout/search/engines/brave/images.py +213 -0
- webscout/search/engines/brave/news.py +353 -0
- webscout/search/engines/brave/suggestions.py +318 -0
- webscout/search/engines/brave/text.py +167 -0
- webscout/search/engines/brave/videos.py +364 -0
- webscout/search/engines/duckduckgo/__init__.py +25 -0
- webscout/search/engines/duckduckgo/answers.py +80 -0
- webscout/search/engines/duckduckgo/base.py +189 -0
- webscout/search/engines/duckduckgo/images.py +100 -0
- webscout/search/engines/duckduckgo/maps.py +183 -0
- webscout/search/engines/duckduckgo/news.py +70 -0
- webscout/search/engines/duckduckgo/suggestions.py +22 -0
- webscout/search/engines/duckduckgo/text.py +221 -0
- webscout/search/engines/duckduckgo/translate.py +48 -0
- webscout/search/engines/duckduckgo/videos.py +80 -0
- webscout/search/engines/duckduckgo/weather.py +84 -0
- webscout/search/engines/mojeek.py +61 -0
- webscout/search/engines/wikipedia.py +77 -0
- webscout/search/engines/yahoo/__init__.py +41 -0
- webscout/search/engines/yahoo/answers.py +19 -0
- webscout/search/engines/yahoo/base.py +34 -0
- webscout/search/engines/yahoo/images.py +323 -0
- webscout/search/engines/yahoo/maps.py +19 -0
- webscout/search/engines/yahoo/news.py +258 -0
- webscout/search/engines/yahoo/suggestions.py +140 -0
- webscout/search/engines/yahoo/text.py +273 -0
- webscout/search/engines/yahoo/translate.py +19 -0
- webscout/search/engines/yahoo/videos.py +302 -0
- webscout/search/engines/yahoo/weather.py +220 -0
- webscout/search/engines/yandex.py +67 -0
- webscout/search/engines/yep/__init__.py +13 -0
- webscout/search/engines/yep/base.py +34 -0
- webscout/search/engines/yep/images.py +101 -0
- webscout/search/engines/yep/suggestions.py +38 -0
- webscout/search/engines/yep/text.py +99 -0
- webscout/search/http_client.py +172 -0
- webscout/search/results.py +141 -0
- webscout/search/yahoo_main.py +57 -0
- webscout/search/yep_main.py +48 -0
- webscout/server/__init__.py +48 -0
- webscout/server/config.py +78 -0
- webscout/server/exceptions.py +69 -0
- webscout/server/providers.py +286 -0
- webscout/server/request_models.py +131 -0
- webscout/server/request_processing.py +404 -0
- webscout/server/routes.py +642 -0
- webscout/server/server.py +351 -0
- webscout/server/ui_templates.py +1171 -0
- webscout/swiftcli/__init__.py +79 -95
- webscout/swiftcli/core/__init__.py +7 -7
- webscout/swiftcli/core/cli.py +574 -297
- webscout/swiftcli/core/context.py +98 -104
- webscout/swiftcli/core/group.py +268 -241
- webscout/swiftcli/decorators/__init__.py +28 -28
- webscout/swiftcli/decorators/command.py +243 -221
- webscout/swiftcli/decorators/options.py +247 -220
- webscout/swiftcli/decorators/output.py +392 -252
- webscout/swiftcli/exceptions.py +21 -21
- webscout/swiftcli/plugins/__init__.py +9 -9
- webscout/swiftcli/plugins/base.py +134 -135
- webscout/swiftcli/plugins/manager.py +269 -269
- webscout/swiftcli/utils/__init__.py +58 -59
- webscout/swiftcli/utils/formatting.py +251 -252
- webscout/swiftcli/utils/parsing.py +368 -267
- webscout/update_checker.py +280 -136
- webscout/utils.py +28 -14
- webscout/version.py +2 -1
- webscout/version.py.bak +3 -0
- webscout/zeroart/__init__.py +218 -135
- webscout/zeroart/base.py +70 -66
- webscout/zeroart/effects.py +155 -101
- webscout/zeroart/fonts.py +1799 -1239
- webscout-2026.1.19.dist-info/METADATA +638 -0
- webscout-2026.1.19.dist-info/RECORD +312 -0
- {webscout-8.2.9.dist-info → webscout-2026.1.19.dist-info}/WHEEL +1 -1
- {webscout-8.2.9.dist-info → webscout-2026.1.19.dist-info}/entry_points.txt +1 -1
- webscout/DWEBS.py +0 -520
- webscout/Extra/Act.md +0 -309
- webscout/Extra/GitToolkit/gitapi/README.md +0 -110
- webscout/Extra/autocoder/__init__.py +0 -9
- webscout/Extra/autocoder/autocoder.py +0 -1105
- webscout/Extra/autocoder/autocoder_utiles.py +0 -332
- webscout/Extra/gguf.md +0 -430
- webscout/Extra/weather.md +0 -281
- webscout/Litlogger/README.md +0 -10
- webscout/Litlogger/__init__.py +0 -15
- webscout/Litlogger/formats.py +0 -4
- webscout/Litlogger/handlers.py +0 -103
- webscout/Litlogger/levels.py +0 -13
- webscout/Litlogger/logger.py +0 -92
- webscout/Provider/AI21.py +0 -177
- webscout/Provider/AISEARCH/DeepFind.py +0 -254
- webscout/Provider/AISEARCH/felo_search.py +0 -202
- webscout/Provider/AISEARCH/genspark_search.py +0 -324
- webscout/Provider/AISEARCH/hika_search.py +0 -186
- webscout/Provider/AISEARCH/scira_search.py +0 -298
- webscout/Provider/Aitopia.py +0 -316
- webscout/Provider/AllenAI.py +0 -440
- webscout/Provider/Blackboxai.py +0 -791
- webscout/Provider/ChatGPTClone.py +0 -237
- webscout/Provider/ChatGPTGratis.py +0 -194
- webscout/Provider/Cloudflare.py +0 -324
- webscout/Provider/ExaChat.py +0 -358
- webscout/Provider/Flowith.py +0 -217
- webscout/Provider/FreeGemini.py +0 -250
- webscout/Provider/Glider.py +0 -225
- webscout/Provider/HF_space/__init__.py +0 -0
- webscout/Provider/HF_space/qwen_qwen2.py +0 -206
- webscout/Provider/HuggingFaceChat.py +0 -469
- webscout/Provider/Hunyuan.py +0 -283
- webscout/Provider/LambdaChat.py +0 -411
- webscout/Provider/Llama3.py +0 -259
- webscout/Provider/Nemotron.py +0 -218
- webscout/Provider/OLLAMA.py +0 -396
- webscout/Provider/OPENAI/BLACKBOXAI.py +0 -766
- webscout/Provider/OPENAI/Cloudflare.py +0 -378
- webscout/Provider/OPENAI/FreeGemini.py +0 -283
- webscout/Provider/OPENAI/NEMOTRON.py +0 -232
- webscout/Provider/OPENAI/Qwen3.py +0 -283
- webscout/Provider/OPENAI/api.py +0 -969
- webscout/Provider/OPENAI/c4ai.py +0 -373
- webscout/Provider/OPENAI/chatgptclone.py +0 -494
- webscout/Provider/OPENAI/copilot.py +0 -242
- webscout/Provider/OPENAI/flowith.py +0 -162
- webscout/Provider/OPENAI/freeaichat.py +0 -359
- webscout/Provider/OPENAI/mcpcore.py +0 -389
- webscout/Provider/OPENAI/multichat.py +0 -376
- webscout/Provider/OPENAI/opkfc.py +0 -496
- webscout/Provider/OPENAI/scirachat.py +0 -477
- webscout/Provider/OPENAI/standardinput.py +0 -433
- webscout/Provider/OPENAI/typegpt.py +0 -364
- webscout/Provider/OPENAI/uncovrAI.py +0 -463
- webscout/Provider/OPENAI/venice.py +0 -431
- webscout/Provider/OPENAI/yep.py +0 -382
- webscout/Provider/OpenGPT.py +0 -209
- webscout/Provider/Perplexitylabs.py +0 -415
- webscout/Provider/Reka.py +0 -214
- webscout/Provider/StandardInput.py +0 -290
- webscout/Provider/TTI/aiarta.py +0 -365
- webscout/Provider/TTI/artbit.py +0 -0
- webscout/Provider/TTI/fastflux.py +0 -200
- webscout/Provider/TTI/piclumen.py +0 -203
- webscout/Provider/TTI/pixelmuse.py +0 -225
- webscout/Provider/TTS/gesserit.py +0 -128
- webscout/Provider/TTS/sthir.py +0 -94
- webscout/Provider/TeachAnything.py +0 -229
- webscout/Provider/UNFINISHED/puterjs.py +0 -635
- webscout/Provider/UNFINISHED/test_lmarena.py +0 -119
- webscout/Provider/Venice.py +0 -258
- webscout/Provider/VercelAI.py +0 -253
- webscout/Provider/Writecream.py +0 -246
- webscout/Provider/WritingMate.py +0 -269
- webscout/Provider/asksteve.py +0 -220
- webscout/Provider/chatglm.py +0 -215
- webscout/Provider/copilot.py +0 -425
- webscout/Provider/freeaichat.py +0 -285
- webscout/Provider/granite.py +0 -235
- webscout/Provider/hermes.py +0 -266
- webscout/Provider/koala.py +0 -170
- webscout/Provider/lmarena.py +0 -198
- webscout/Provider/multichat.py +0 -364
- webscout/Provider/scira_chat.py +0 -299
- webscout/Provider/scnet.py +0 -243
- webscout/Provider/talkai.py +0 -194
- webscout/Provider/typegpt.py +0 -289
- webscout/Provider/uncovr.py +0 -368
- webscout/Provider/yep.py +0 -389
- webscout/litagent/Readme.md +0 -276
- webscout/litprinter/__init__.py +0 -59
- webscout/swiftcli/Readme.md +0 -323
- webscout/tempid.py +0 -128
- webscout/webscout_search.py +0 -1184
- webscout/webscout_search_async.py +0 -654
- webscout/yep_search.py +0 -347
- webscout/zeroart/README.md +0 -89
- webscout-8.2.9.dist-info/METADATA +0 -1033
- webscout-8.2.9.dist-info/RECORD +0 -289
- {webscout-8.2.9.dist-info → webscout-2026.1.19.dist-info}/licenses/LICENSE.md +0 -0
- {webscout-8.2.9.dist-info → webscout-2026.1.19.dist-info}/top_level.txt +0 -0
webscout/Provider/Openai.py
CHANGED
|
@@ -1,496 +1,337 @@
|
|
|
1
|
-
import json
|
|
2
|
-
from
|
|
3
|
-
|
|
4
|
-
from
|
|
5
|
-
from
|
|
6
|
-
|
|
7
|
-
from webscout import exceptions
|
|
8
|
-
from
|
|
9
|
-
import
|
|
10
|
-
import
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
self.
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
self
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
)
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
self.
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
)
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
Args:
|
|
340
|
-
prompt (str): Prompt to be send.
|
|
341
|
-
stream (bool, optional): Flag for streaming response. Defaults to False.
|
|
342
|
-
raw (bool, optional): Stream back raw response as received. Defaults to False.
|
|
343
|
-
optimizer (str, optional): Prompt optimizer name - `[code, shell_command]`. Defaults to None.
|
|
344
|
-
conversationally (bool, optional): Chat conversationally when using optimizer. Defaults to False.
|
|
345
|
-
tools (Optional[List[Dict[str, Any]]], optional): List of tools to be used. Defaults to None.
|
|
346
|
-
Returns:
|
|
347
|
-
dict|AsyncGenerator : ai content.
|
|
348
|
-
```json
|
|
349
|
-
{
|
|
350
|
-
"id": "chatcmpl-TaREJpBZsRVQFRFic1wIA7Q7XfnaD",
|
|
351
|
-
"object": "chat.completion",
|
|
352
|
-
"created": 1704623244,
|
|
353
|
-
"model": "gpt-3.5-turbo",
|
|
354
|
-
"usage": {
|
|
355
|
-
"prompt_tokens": 0,
|
|
356
|
-
"completion_tokens": 0,
|
|
357
|
-
"total_tokens": 0
|
|
358
|
-
},
|
|
359
|
-
"choices": [
|
|
360
|
-
{
|
|
361
|
-
"message": {
|
|
362
|
-
"role": "assistant",
|
|
363
|
-
"content": "Hello! How can I assist you today?"
|
|
364
|
-
},
|
|
365
|
-
"finish_reason": "stop",
|
|
366
|
-
"index": 0
|
|
367
|
-
}
|
|
368
|
-
]
|
|
369
|
-
}
|
|
370
|
-
```
|
|
371
|
-
"""
|
|
372
|
-
conversation_prompt = self.conversation.gen_complete_prompt(prompt)
|
|
373
|
-
if optimizer:
|
|
374
|
-
if optimizer in self.__available_optimizers:
|
|
375
|
-
conversation_prompt = getattr(Optimizers, optimizer)(
|
|
376
|
-
conversation_prompt if conversationally else prompt
|
|
377
|
-
)
|
|
378
|
-
else:
|
|
379
|
-
raise Exception(
|
|
380
|
-
f"Optimizer is not one of {self.__available_optimizers}"
|
|
381
|
-
)
|
|
382
|
-
payload = {
|
|
383
|
-
"frequency_penalty": self.frequency_penalty,
|
|
384
|
-
"messages": [{"content": conversation_prompt, "role": "user"}],
|
|
385
|
-
"model": self.model,
|
|
386
|
-
"presence_penalty": self.presence_penalty,
|
|
387
|
-
"stream": stream,
|
|
388
|
-
"temperature": self.temperature,
|
|
389
|
-
"top_p": self.top_p,
|
|
390
|
-
}
|
|
391
|
-
|
|
392
|
-
async def for_stream():
|
|
393
|
-
async with self.session.stream(
|
|
394
|
-
"POST", self.chat_endpoint, json=payload, timeout=self.timeout
|
|
395
|
-
) as response:
|
|
396
|
-
if not response.is_success:
|
|
397
|
-
raise Exception(
|
|
398
|
-
f"Failed to generate response - ({response.status_code}, {response.reason_phrase})"
|
|
399
|
-
)
|
|
400
|
-
|
|
401
|
-
message_load = ""
|
|
402
|
-
async for value in response.aiter_lines():
|
|
403
|
-
try:
|
|
404
|
-
|
|
405
|
-
resp = sanitize_stream(value)
|
|
406
|
-
incomplete_message = await self.get_message(resp)
|
|
407
|
-
if incomplete_message:
|
|
408
|
-
message_load += incomplete_message
|
|
409
|
-
resp["choices"][0]["delta"]["content"] = message_load
|
|
410
|
-
self.last_response.update(resp)
|
|
411
|
-
yield value if raw else resp
|
|
412
|
-
elif raw:
|
|
413
|
-
yield value
|
|
414
|
-
except json.decoder.JSONDecodeError:
|
|
415
|
-
pass
|
|
416
|
-
self.conversation.update_chat_history(
|
|
417
|
-
prompt, await self.get_message(self.last_response)
|
|
418
|
-
)
|
|
419
|
-
|
|
420
|
-
async def for_non_stream():
|
|
421
|
-
response = httpx.post(
|
|
422
|
-
self.chat_endpoint,
|
|
423
|
-
json=payload,
|
|
424
|
-
timeout=self.timeout,
|
|
425
|
-
headers=self.headers,
|
|
426
|
-
)
|
|
427
|
-
if (
|
|
428
|
-
not response.is_success
|
|
429
|
-
or not response.headers.get("Content-Type", "") == "application/json"
|
|
430
|
-
):
|
|
431
|
-
raise Exception(
|
|
432
|
-
f"Failed to generate response - ({response.status_code}, {response.reason_phrase})"
|
|
433
|
-
)
|
|
434
|
-
resp = response.json()
|
|
435
|
-
self.last_response.update(resp)
|
|
436
|
-
self.conversation.update_chat_history(
|
|
437
|
-
prompt, await self.get_message(self.last_response)
|
|
438
|
-
)
|
|
439
|
-
return resp
|
|
440
|
-
|
|
441
|
-
return for_stream() if stream else await for_non_stream()
|
|
442
|
-
|
|
443
|
-
async def chat(
|
|
444
|
-
self,
|
|
445
|
-
prompt: str,
|
|
446
|
-
stream: bool = False,
|
|
447
|
-
optimizer: str = None,
|
|
448
|
-
conversationally: bool = False,
|
|
449
|
-
tools: Optional[List[Dict[str, Any]]] = None,
|
|
450
|
-
) -> Union[str, AsyncGenerator]:
|
|
451
|
-
"""Generate response `str` asynchronously.
|
|
452
|
-
Args:
|
|
453
|
-
prompt (str): Prompt to be send.
|
|
454
|
-
stream (bool, optional): Flag for streaming response. Defaults to False.
|
|
455
|
-
optimizer (str, optional): Prompt optimizer name - `[code, shell_command]`. Defaults to None.
|
|
456
|
-
conversationally (bool, optional): Chat conversationally when using optimizer. Defaults to False.
|
|
457
|
-
tools (Optional[List[Dict[str, Any]]], optional): List of tools to be used. Defaults to None.
|
|
458
|
-
Returns:
|
|
459
|
-
str|AsyncGenerator: Response generated
|
|
460
|
-
"""
|
|
461
|
-
|
|
462
|
-
async def for_stream():
|
|
463
|
-
async_ask = await self.ask(
|
|
464
|
-
prompt, True, optimizer=optimizer, conversationally=conversationally
|
|
465
|
-
)
|
|
466
|
-
async for response in async_ask:
|
|
467
|
-
yield await self.get_message(response)
|
|
468
|
-
|
|
469
|
-
async def for_non_stream():
|
|
470
|
-
return await self.get_message(
|
|
471
|
-
await self.ask(
|
|
472
|
-
prompt,
|
|
473
|
-
False,
|
|
474
|
-
optimizer=optimizer,
|
|
475
|
-
conversationally=conversationally,
|
|
476
|
-
)
|
|
477
|
-
)
|
|
478
|
-
|
|
479
|
-
return for_stream() if stream else await for_non_stream()
|
|
480
|
-
|
|
481
|
-
async def get_message(self, response: dict) -> str:
|
|
482
|
-
"""Retrieves message only from response asynchronously.
|
|
483
|
-
|
|
484
|
-
Args:
|
|
485
|
-
response (dict): Response generated by `self.ask`
|
|
486
|
-
|
|
487
|
-
Returns:
|
|
488
|
-
str: Message extracted
|
|
489
|
-
"""
|
|
490
|
-
assert isinstance(response, dict), "Response should be of dict data-type only"
|
|
491
|
-
try:
|
|
492
|
-
if response["choices"][0].get("delta"):
|
|
493
|
-
return response["choices"][0]["delta"]["content"]
|
|
494
|
-
return response["choices"][0]["message"]["content"]
|
|
495
|
-
except KeyError:
|
|
496
|
-
return ""
|
|
1
|
+
import json
|
|
2
|
+
from typing import Any, Dict, Generator, Optional, Union, cast
|
|
3
|
+
|
|
4
|
+
from curl_cffi import CurlError
|
|
5
|
+
from curl_cffi.requests import Session
|
|
6
|
+
|
|
7
|
+
from webscout import exceptions
|
|
8
|
+
from webscout.AIbase import Provider, Response
|
|
9
|
+
from webscout.AIutel import AwesomePrompts, Conversation, Optimizers, sanitize_stream
|
|
10
|
+
from webscout.litagent import LitAgent
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class OpenAI(Provider):
|
|
14
|
+
"""
|
|
15
|
+
A class to interact with the OpenAI API with LitAgent user-agent.
|
|
16
|
+
"""
|
|
17
|
+
|
|
18
|
+
required_auth = True
|
|
19
|
+
|
|
20
|
+
@classmethod
|
|
21
|
+
def get_models(cls, api_key: Optional[str] = None):
|
|
22
|
+
"""Fetch available models from OpenAI API.
|
|
23
|
+
|
|
24
|
+
Args:
|
|
25
|
+
api_key (str, optional): OpenAI API key
|
|
26
|
+
|
|
27
|
+
Returns:
|
|
28
|
+
list: List of available model IDs
|
|
29
|
+
"""
|
|
30
|
+
if not api_key:
|
|
31
|
+
return []
|
|
32
|
+
try:
|
|
33
|
+
# Use a temporary curl_cffi session for this class method
|
|
34
|
+
temp_session = Session()
|
|
35
|
+
headers = {
|
|
36
|
+
"Authorization": f"Bearer {api_key}",
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
response = temp_session.get(
|
|
40
|
+
"https://api.openai.com/v1/models", headers=headers, impersonate="chrome110"
|
|
41
|
+
)
|
|
42
|
+
|
|
43
|
+
if response.status_code != 200:
|
|
44
|
+
raise Exception(
|
|
45
|
+
f"API request failed with status {response.status_code}: {response.text}"
|
|
46
|
+
)
|
|
47
|
+
|
|
48
|
+
data = response.json()
|
|
49
|
+
if "data" in data and isinstance(data["data"], list):
|
|
50
|
+
return [model["id"] for model in data["data"] if "id" in model]
|
|
51
|
+
raise Exception("Invalid response format from API")
|
|
52
|
+
|
|
53
|
+
except (CurlError, Exception) as e:
|
|
54
|
+
raise Exception(f"Failed to fetch models: {str(e)}")
|
|
55
|
+
|
|
56
|
+
@staticmethod
|
|
57
|
+
def _openai_extractor(chunk: Union[str, Dict[str, Any]]) -> Optional[str]:
|
|
58
|
+
"""Extracts content from OpenAI stream JSON objects."""
|
|
59
|
+
if isinstance(chunk, dict):
|
|
60
|
+
return chunk.get("choices", [{}])[0].get("delta", {}).get("content")
|
|
61
|
+
return None
|
|
62
|
+
|
|
63
|
+
def __init__(
|
|
64
|
+
self,
|
|
65
|
+
api_key: str,
|
|
66
|
+
is_conversation: bool = True,
|
|
67
|
+
max_tokens: int = 600,
|
|
68
|
+
temperature: float = 1,
|
|
69
|
+
presence_penalty: int = 0,
|
|
70
|
+
frequency_penalty: int = 0,
|
|
71
|
+
top_p: float = 1,
|
|
72
|
+
model: str = "gpt-3.5-turbo",
|
|
73
|
+
timeout: int = 30,
|
|
74
|
+
intro: Optional[str] = None,
|
|
75
|
+
filepath: Optional[str] = None,
|
|
76
|
+
update_file: bool = True,
|
|
77
|
+
proxies: dict = {},
|
|
78
|
+
history_offset: int = 10250,
|
|
79
|
+
act: Optional[str] = None,
|
|
80
|
+
base_url: str = "https://api.openai.com/v1/chat/completions",
|
|
81
|
+
system_prompt: str = "You are a helpful assistant.",
|
|
82
|
+
browser: str = "chrome",
|
|
83
|
+
):
|
|
84
|
+
"""Initializes the OpenAI API client."""
|
|
85
|
+
self.url = base_url
|
|
86
|
+
|
|
87
|
+
# Initialize LitAgent
|
|
88
|
+
self.agent = LitAgent()
|
|
89
|
+
self.fingerprint = self.agent.generate_fingerprint(browser)
|
|
90
|
+
self.api_key = api_key
|
|
91
|
+
# Use the fingerprint for headers
|
|
92
|
+
self.headers = {
|
|
93
|
+
"Accept": self.fingerprint["accept"],
|
|
94
|
+
"Accept-Language": self.fingerprint["accept_language"],
|
|
95
|
+
"User-Agent": self.fingerprint.get("user_agent", ""),
|
|
96
|
+
}
|
|
97
|
+
if self.api_key:
|
|
98
|
+
self.headers["Authorization"] = f"Bearer {self.api_key}"
|
|
99
|
+
|
|
100
|
+
# Initialize curl_cffi Session
|
|
101
|
+
self.session = Session()
|
|
102
|
+
# Update curl_cffi session headers and proxies
|
|
103
|
+
self.session.headers.update(self.headers)
|
|
104
|
+
if proxies:
|
|
105
|
+
self.session.proxies.update(cast(Any, proxies)) # Assign proxies directly
|
|
106
|
+
self.system_prompt = system_prompt
|
|
107
|
+
self.is_conversation = is_conversation
|
|
108
|
+
self.max_tokens_to_sample = max_tokens
|
|
109
|
+
self.timeout = timeout
|
|
110
|
+
self.last_response = {}
|
|
111
|
+
self.model = model
|
|
112
|
+
self.temperature = temperature
|
|
113
|
+
self.presence_penalty = presence_penalty
|
|
114
|
+
self.frequency_penalty = frequency_penalty
|
|
115
|
+
self.top_p = top_p
|
|
116
|
+
|
|
117
|
+
# Fetch available models
|
|
118
|
+
try:
|
|
119
|
+
self.available_models = self.get_models(self.api_key)
|
|
120
|
+
except Exception:
|
|
121
|
+
self.available_models = []
|
|
122
|
+
|
|
123
|
+
if self.available_models and self.model not in self.available_models:
|
|
124
|
+
raise ValueError(f"Invalid model: {self.model}. Choose from: {self.available_models}")
|
|
125
|
+
|
|
126
|
+
self.__available_optimizers = (
|
|
127
|
+
method
|
|
128
|
+
for method in dir(Optimizers)
|
|
129
|
+
if callable(getattr(Optimizers, method)) and not method.startswith("__")
|
|
130
|
+
)
|
|
131
|
+
self.conversation = Conversation(
|
|
132
|
+
is_conversation, self.max_tokens_to_sample, filepath, update_file
|
|
133
|
+
)
|
|
134
|
+
self.conversation.history_offset = history_offset
|
|
135
|
+
|
|
136
|
+
if act:
|
|
137
|
+
self.conversation.intro = (
|
|
138
|
+
AwesomePrompts().get_act(
|
|
139
|
+
cast(Union[str, int], act),
|
|
140
|
+
default=self.conversation.intro,
|
|
141
|
+
case_insensitive=True,
|
|
142
|
+
)
|
|
143
|
+
or self.conversation.intro
|
|
144
|
+
)
|
|
145
|
+
elif intro:
|
|
146
|
+
self.conversation.intro = intro
|
|
147
|
+
|
|
148
|
+
def refresh_identity(self, browser: Optional[str] = None):
|
|
149
|
+
"""
|
|
150
|
+
Refreshes the browser identity fingerprint.
|
|
151
|
+
|
|
152
|
+
Args:
|
|
153
|
+
browser: Specific browser to use for the new fingerprint
|
|
154
|
+
"""
|
|
155
|
+
browser = browser or self.fingerprint.get("browser_type", "chrome")
|
|
156
|
+
self.fingerprint = self.agent.generate_fingerprint(browser)
|
|
157
|
+
|
|
158
|
+
# Update headers with new fingerprint (only relevant ones)
|
|
159
|
+
self.headers.update(
|
|
160
|
+
{
|
|
161
|
+
"Accept": self.fingerprint["accept"],
|
|
162
|
+
"Accept-Language": self.fingerprint["accept_language"],
|
|
163
|
+
}
|
|
164
|
+
)
|
|
165
|
+
|
|
166
|
+
# Update session headers
|
|
167
|
+
self.session.headers.update(self.headers)
|
|
168
|
+
|
|
169
|
+
return self.fingerprint
|
|
170
|
+
|
|
171
|
+
def ask(
|
|
172
|
+
self,
|
|
173
|
+
prompt: str,
|
|
174
|
+
stream: bool = False,
|
|
175
|
+
raw: bool = False,
|
|
176
|
+
optimizer: Optional[str] = None,
|
|
177
|
+
conversationally: bool = False,
|
|
178
|
+
**kwargs: Any,
|
|
179
|
+
) -> Response:
|
|
180
|
+
conversation_prompt = self.conversation.gen_complete_prompt(prompt)
|
|
181
|
+
if optimizer:
|
|
182
|
+
if optimizer in self.__available_optimizers:
|
|
183
|
+
conversation_prompt = getattr(Optimizers, optimizer)(
|
|
184
|
+
conversation_prompt if conversationally else prompt
|
|
185
|
+
)
|
|
186
|
+
else:
|
|
187
|
+
raise exceptions.FailedToGenerateResponseError(
|
|
188
|
+
f"Optimizer is not one of {self.__available_optimizers}"
|
|
189
|
+
)
|
|
190
|
+
|
|
191
|
+
# Payload construction
|
|
192
|
+
payload = {
|
|
193
|
+
"model": self.model,
|
|
194
|
+
"messages": [
|
|
195
|
+
{"role": "system", "content": self.system_prompt},
|
|
196
|
+
{"role": "user", "content": conversation_prompt},
|
|
197
|
+
],
|
|
198
|
+
"stream": stream,
|
|
199
|
+
"temperature": self.temperature,
|
|
200
|
+
"top_p": self.top_p,
|
|
201
|
+
"presence_penalty": self.presence_penalty,
|
|
202
|
+
"frequency_penalty": self.frequency_penalty,
|
|
203
|
+
}
|
|
204
|
+
|
|
205
|
+
def for_stream():
|
|
206
|
+
streaming_text = ""
|
|
207
|
+
try:
|
|
208
|
+
# Use curl_cffi session post with impersonate
|
|
209
|
+
response = self.session.post(
|
|
210
|
+
self.url,
|
|
211
|
+
data=json.dumps(payload),
|
|
212
|
+
stream=True,
|
|
213
|
+
timeout=self.timeout,
|
|
214
|
+
impersonate="chrome110",
|
|
215
|
+
)
|
|
216
|
+
response.raise_for_status()
|
|
217
|
+
|
|
218
|
+
# Use sanitize_stream
|
|
219
|
+
processed_stream = sanitize_stream(
|
|
220
|
+
data=response.iter_content(chunk_size=None),
|
|
221
|
+
intro_value="data:",
|
|
222
|
+
to_json=True,
|
|
223
|
+
skip_markers=["[DONE]"],
|
|
224
|
+
content_extractor=self._openai_extractor,
|
|
225
|
+
yield_raw_on_error=False,
|
|
226
|
+
raw=raw,
|
|
227
|
+
)
|
|
228
|
+
|
|
229
|
+
for content_chunk in processed_stream:
|
|
230
|
+
if raw:
|
|
231
|
+
yield content_chunk
|
|
232
|
+
else:
|
|
233
|
+
if content_chunk and isinstance(content_chunk, str):
|
|
234
|
+
streaming_text += content_chunk
|
|
235
|
+
resp = dict(text=content_chunk)
|
|
236
|
+
yield resp if not raw else content_chunk
|
|
237
|
+
|
|
238
|
+
except CurlError as e:
|
|
239
|
+
raise exceptions.FailedToGenerateResponseError(
|
|
240
|
+
f"Request failed (CurlError): {str(e)}"
|
|
241
|
+
) from e
|
|
242
|
+
except Exception as e:
|
|
243
|
+
raise exceptions.FailedToGenerateResponseError(
|
|
244
|
+
f"Request failed ({type(e).__name__}): {str(e)}"
|
|
245
|
+
) from e
|
|
246
|
+
finally:
|
|
247
|
+
if streaming_text:
|
|
248
|
+
self.last_response = {"text": streaming_text}
|
|
249
|
+
self.conversation.update_chat_history(prompt, streaming_text)
|
|
250
|
+
|
|
251
|
+
def for_non_stream():
|
|
252
|
+
try:
|
|
253
|
+
# Use curl_cffi session post with impersonate for non-streaming
|
|
254
|
+
response = self.session.post(
|
|
255
|
+
self.url,
|
|
256
|
+
data=json.dumps(payload),
|
|
257
|
+
timeout=self.timeout,
|
|
258
|
+
impersonate="chrome110",
|
|
259
|
+
)
|
|
260
|
+
response.raise_for_status()
|
|
261
|
+
|
|
262
|
+
response_text = response.text
|
|
263
|
+
|
|
264
|
+
# Use sanitize_stream to parse the non-streaming JSON response
|
|
265
|
+
processed_stream = sanitize_stream(
|
|
266
|
+
data=response_text,
|
|
267
|
+
to_json=True,
|
|
268
|
+
intro_value=None,
|
|
269
|
+
content_extractor=lambda chunk: chunk.get("choices", [{}])[0]
|
|
270
|
+
.get("message", {})
|
|
271
|
+
.get("content")
|
|
272
|
+
if isinstance(chunk, dict)
|
|
273
|
+
else None,
|
|
274
|
+
yield_raw_on_error=False,
|
|
275
|
+
raw=raw,
|
|
276
|
+
)
|
|
277
|
+
# Extract the single result
|
|
278
|
+
content = next(processed_stream, None)
|
|
279
|
+
if raw:
|
|
280
|
+
return content
|
|
281
|
+
content = content if isinstance(content, str) else ""
|
|
282
|
+
|
|
283
|
+
self.last_response = {"text": content}
|
|
284
|
+
self.conversation.update_chat_history(prompt, content)
|
|
285
|
+
return self.last_response if not raw else content
|
|
286
|
+
|
|
287
|
+
except CurlError as e:
|
|
288
|
+
raise exceptions.FailedToGenerateResponseError(
|
|
289
|
+
f"Request failed (CurlError): {e}"
|
|
290
|
+
) from e
|
|
291
|
+
except Exception as e:
|
|
292
|
+
err_text = ""
|
|
293
|
+
if hasattr(e, "response"):
|
|
294
|
+
response_obj = getattr(e, "response")
|
|
295
|
+
if hasattr(response_obj, "text"):
|
|
296
|
+
err_text = getattr(response_obj, "text")
|
|
297
|
+
raise exceptions.FailedToGenerateResponseError(
|
|
298
|
+
f"Request failed ({type(e).__name__}): {e} - {err_text}"
|
|
299
|
+
) from e
|
|
300
|
+
|
|
301
|
+
return for_stream() if stream else for_non_stream()
|
|
302
|
+
|
|
303
|
+
def chat(
|
|
304
|
+
self,
|
|
305
|
+
prompt: str,
|
|
306
|
+
stream: bool = False,
|
|
307
|
+
optimizer: Optional[str] = None,
|
|
308
|
+
conversationally: bool = False,
|
|
309
|
+
**kwargs: Any,
|
|
310
|
+
) -> Union[str, Generator[str, None, None]]:
|
|
311
|
+
def for_stream_chat():
|
|
312
|
+
gen = self.ask(
|
|
313
|
+
prompt,
|
|
314
|
+
stream=True,
|
|
315
|
+
raw=False,
|
|
316
|
+
optimizer=optimizer,
|
|
317
|
+
conversationally=conversationally,
|
|
318
|
+
)
|
|
319
|
+
for response_dict in gen:
|
|
320
|
+
yield self.get_message(response_dict)
|
|
321
|
+
|
|
322
|
+
def for_non_stream_chat():
|
|
323
|
+
response_data = self.ask(
|
|
324
|
+
prompt,
|
|
325
|
+
stream=False,
|
|
326
|
+
raw=False,
|
|
327
|
+
optimizer=optimizer,
|
|
328
|
+
conversationally=conversationally,
|
|
329
|
+
)
|
|
330
|
+
return self.get_message(response_data)
|
|
331
|
+
|
|
332
|
+
return for_stream_chat() if stream else for_non_stream_chat()
|
|
333
|
+
|
|
334
|
+
def get_message(self, response: Response) -> str:
|
|
335
|
+
if not isinstance(response, dict):
|
|
336
|
+
return str(response)
|
|
337
|
+
return cast(Dict[str, Any], response).get("text", "")
|