webscout 8.3.6__py3-none-any.whl → 2025.10.11__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of webscout might be problematic. Click here for more details.
- webscout/AIauto.py +250 -250
- webscout/AIbase.py +379 -379
- webscout/AIutel.py +60 -58
- webscout/Bard.py +1012 -1012
- webscout/Bing_search.py +417 -417
- webscout/DWEBS.py +529 -529
- webscout/Extra/Act.md +309 -309
- webscout/Extra/GitToolkit/__init__.py +10 -10
- webscout/Extra/GitToolkit/gitapi/README.md +110 -110
- webscout/Extra/GitToolkit/gitapi/__init__.py +11 -11
- webscout/Extra/GitToolkit/gitapi/repository.py +195 -195
- webscout/Extra/GitToolkit/gitapi/user.py +96 -96
- webscout/Extra/GitToolkit/gitapi/utils.py +61 -61
- webscout/Extra/YTToolkit/README.md +375 -375
- webscout/Extra/YTToolkit/YTdownloader.py +956 -956
- webscout/Extra/YTToolkit/__init__.py +2 -2
- webscout/Extra/YTToolkit/transcriber.py +475 -475
- webscout/Extra/YTToolkit/ytapi/README.md +44 -44
- webscout/Extra/YTToolkit/ytapi/__init__.py +6 -6
- webscout/Extra/YTToolkit/ytapi/channel.py +307 -307
- webscout/Extra/YTToolkit/ytapi/errors.py +13 -13
- webscout/Extra/YTToolkit/ytapi/extras.py +118 -118
- webscout/Extra/YTToolkit/ytapi/https.py +88 -88
- webscout/Extra/YTToolkit/ytapi/patterns.py +61 -61
- webscout/Extra/YTToolkit/ytapi/playlist.py +58 -58
- webscout/Extra/YTToolkit/ytapi/pool.py +7 -7
- webscout/Extra/YTToolkit/ytapi/query.py +39 -39
- webscout/Extra/YTToolkit/ytapi/stream.py +62 -62
- webscout/Extra/YTToolkit/ytapi/utils.py +62 -62
- webscout/Extra/YTToolkit/ytapi/video.py +232 -232
- webscout/Extra/autocoder/__init__.py +9 -9
- webscout/Extra/autocoder/autocoder.py +1105 -1105
- webscout/Extra/autocoder/autocoder_utiles.py +332 -332
- webscout/Extra/gguf.md +429 -429
- webscout/Extra/gguf.py +1213 -1213
- webscout/Extra/tempmail/README.md +487 -487
- webscout/Extra/tempmail/__init__.py +27 -27
- webscout/Extra/tempmail/async_utils.py +140 -140
- webscout/Extra/tempmail/base.py +160 -160
- webscout/Extra/tempmail/cli.py +186 -186
- webscout/Extra/tempmail/emailnator.py +84 -84
- webscout/Extra/tempmail/mail_tm.py +360 -360
- webscout/Extra/tempmail/temp_mail_io.py +291 -291
- webscout/Extra/weather.md +281 -281
- webscout/Extra/weather.py +193 -193
- webscout/Litlogger/README.md +10 -10
- webscout/Litlogger/__init__.py +15 -15
- webscout/Litlogger/formats.py +13 -13
- webscout/Litlogger/handlers.py +121 -121
- webscout/Litlogger/levels.py +13 -13
- webscout/Litlogger/logger.py +134 -134
- webscout/Provider/AISEARCH/Perplexity.py +332 -332
- webscout/Provider/AISEARCH/README.md +279 -279
- webscout/Provider/AISEARCH/__init__.py +33 -11
- webscout/Provider/AISEARCH/felo_search.py +206 -206
- webscout/Provider/AISEARCH/genspark_search.py +323 -323
- webscout/Provider/AISEARCH/hika_search.py +185 -185
- webscout/Provider/AISEARCH/iask_search.py +410 -410
- webscout/Provider/AISEARCH/monica_search.py +219 -219
- webscout/Provider/AISEARCH/scira_search.py +316 -314
- webscout/Provider/AISEARCH/stellar_search.py +177 -177
- webscout/Provider/AISEARCH/webpilotai_search.py +255 -255
- webscout/Provider/Aitopia.py +314 -315
- webscout/Provider/Andi.py +3 -3
- webscout/Provider/Apriel.py +306 -0
- webscout/Provider/ChatGPTClone.py +236 -236
- webscout/Provider/ChatSandbox.py +343 -342
- webscout/Provider/Cloudflare.py +324 -324
- webscout/Provider/Cohere.py +208 -207
- webscout/Provider/Deepinfra.py +370 -369
- webscout/Provider/ExaAI.py +260 -260
- webscout/Provider/ExaChat.py +308 -387
- webscout/Provider/Flowith.py +221 -221
- webscout/Provider/GMI.py +293 -0
- webscout/Provider/Gemini.py +164 -162
- webscout/Provider/GeminiProxy.py +167 -166
- webscout/Provider/GithubChat.py +371 -370
- webscout/Provider/Groq.py +800 -800
- webscout/Provider/HeckAI.py +383 -379
- webscout/Provider/Jadve.py +282 -297
- webscout/Provider/K2Think.py +308 -0
- webscout/Provider/Koboldai.py +206 -384
- webscout/Provider/LambdaChat.py +423 -425
- webscout/Provider/Nemotron.py +244 -245
- webscout/Provider/Netwrck.py +248 -247
- webscout/Provider/OLLAMA.py +395 -394
- webscout/Provider/OPENAI/Cloudflare.py +394 -395
- webscout/Provider/OPENAI/FalconH1.py +452 -457
- webscout/Provider/OPENAI/FreeGemini.py +297 -299
- webscout/Provider/OPENAI/{monochat.py → K2Think.py} +432 -329
- webscout/Provider/OPENAI/NEMOTRON.py +241 -244
- webscout/Provider/OPENAI/PI.py +428 -427
- webscout/Provider/OPENAI/README.md +959 -959
- webscout/Provider/OPENAI/TogetherAI.py +345 -345
- webscout/Provider/OPENAI/TwoAI.py +466 -467
- webscout/Provider/OPENAI/__init__.py +33 -59
- webscout/Provider/OPENAI/ai4chat.py +313 -303
- webscout/Provider/OPENAI/base.py +249 -269
- webscout/Provider/OPENAI/chatglm.py +528 -0
- webscout/Provider/OPENAI/chatgpt.py +593 -588
- webscout/Provider/OPENAI/chatgptclone.py +521 -524
- webscout/Provider/OPENAI/chatsandbox.py +202 -177
- webscout/Provider/OPENAI/deepinfra.py +319 -315
- webscout/Provider/OPENAI/e2b.py +1665 -1665
- webscout/Provider/OPENAI/exaai.py +420 -420
- webscout/Provider/OPENAI/exachat.py +452 -452
- webscout/Provider/OPENAI/friendli.py +232 -232
- webscout/Provider/OPENAI/{refact.py → gmi.py} +324 -274
- webscout/Provider/OPENAI/groq.py +364 -364
- webscout/Provider/OPENAI/heckai.py +314 -311
- webscout/Provider/OPENAI/llmchatco.py +337 -337
- webscout/Provider/OPENAI/netwrck.py +355 -354
- webscout/Provider/OPENAI/oivscode.py +290 -290
- webscout/Provider/OPENAI/opkfc.py +518 -518
- webscout/Provider/OPENAI/pydantic_imports.py +1 -1
- webscout/Provider/OPENAI/scirachat.py +535 -529
- webscout/Provider/OPENAI/sonus.py +308 -308
- webscout/Provider/OPENAI/standardinput.py +442 -442
- webscout/Provider/OPENAI/textpollinations.py +340 -348
- webscout/Provider/OPENAI/toolbaz.py +419 -413
- webscout/Provider/OPENAI/typefully.py +362 -362
- webscout/Provider/OPENAI/utils.py +295 -295
- webscout/Provider/OPENAI/venice.py +436 -436
- webscout/Provider/OPENAI/wisecat.py +387 -387
- webscout/Provider/OPENAI/writecream.py +166 -166
- webscout/Provider/OPENAI/x0gpt.py +378 -378
- webscout/Provider/OPENAI/yep.py +389 -389
- webscout/Provider/OpenGPT.py +230 -230
- webscout/Provider/Openai.py +244 -496
- webscout/Provider/PI.py +405 -404
- webscout/Provider/Perplexitylabs.py +430 -431
- webscout/Provider/QwenLM.py +272 -254
- webscout/Provider/STT/__init__.py +32 -2
- webscout/Provider/{Llama3.py → Sambanova.py} +257 -258
- webscout/Provider/StandardInput.py +309 -309
- webscout/Provider/TTI/README.md +82 -82
- webscout/Provider/TTI/__init__.py +33 -12
- webscout/Provider/TTI/aiarta.py +413 -413
- webscout/Provider/TTI/base.py +136 -136
- webscout/Provider/TTI/bing.py +243 -243
- webscout/Provider/TTI/gpt1image.py +149 -149
- webscout/Provider/TTI/imagen.py +196 -196
- webscout/Provider/TTI/infip.py +211 -211
- webscout/Provider/TTI/magicstudio.py +232 -232
- webscout/Provider/TTI/monochat.py +219 -219
- webscout/Provider/TTI/piclumen.py +214 -214
- webscout/Provider/TTI/pixelmuse.py +232 -232
- webscout/Provider/TTI/pollinations.py +232 -232
- webscout/Provider/TTI/together.py +288 -288
- webscout/Provider/TTI/utils.py +12 -12
- webscout/Provider/TTI/venice.py +367 -367
- webscout/Provider/TTS/README.md +192 -192
- webscout/Provider/TTS/__init__.py +33 -10
- webscout/Provider/TTS/parler.py +110 -110
- webscout/Provider/TTS/streamElements.py +333 -333
- webscout/Provider/TTS/utils.py +280 -280
- webscout/Provider/TeachAnything.py +237 -236
- webscout/Provider/TextPollinationsAI.py +311 -318
- webscout/Provider/TogetherAI.py +356 -357
- webscout/Provider/TwoAI.py +313 -569
- webscout/Provider/TypliAI.py +312 -311
- webscout/Provider/UNFINISHED/ChatHub.py +208 -208
- webscout/Provider/UNFINISHED/ChutesAI.py +313 -313
- webscout/Provider/{GizAI.py → UNFINISHED/GizAI.py} +294 -294
- webscout/Provider/{Marcus.py → UNFINISHED/Marcus.py} +198 -198
- webscout/Provider/{Qodo.py → UNFINISHED/Qodo.py} +477 -477
- webscout/Provider/UNFINISHED/VercelAIGateway.py +338 -338
- webscout/Provider/{XenAI.py → UNFINISHED/XenAI.py} +324 -324
- webscout/Provider/UNFINISHED/Youchat.py +330 -330
- webscout/Provider/UNFINISHED/liner.py +334 -0
- webscout/Provider/UNFINISHED/liner_api_request.py +262 -262
- webscout/Provider/UNFINISHED/puterjs.py +634 -634
- webscout/Provider/UNFINISHED/samurai.py +223 -223
- webscout/Provider/UNFINISHED/test_lmarena.py +119 -119
- webscout/Provider/Venice.py +251 -250
- webscout/Provider/VercelAI.py +256 -255
- webscout/Provider/WiseCat.py +232 -231
- webscout/Provider/WrDoChat.py +367 -366
- webscout/Provider/__init__.py +33 -86
- webscout/Provider/ai4chat.py +174 -174
- webscout/Provider/akashgpt.py +331 -334
- webscout/Provider/cerebras.py +446 -340
- webscout/Provider/chatglm.py +394 -214
- webscout/Provider/cleeai.py +211 -212
- webscout/Provider/deepseek_assistant.py +1 -1
- webscout/Provider/elmo.py +282 -282
- webscout/Provider/geminiapi.py +208 -208
- webscout/Provider/granite.py +261 -261
- webscout/Provider/hermes.py +263 -265
- webscout/Provider/julius.py +223 -222
- webscout/Provider/learnfastai.py +309 -309
- webscout/Provider/llama3mitril.py +214 -214
- webscout/Provider/llmchat.py +243 -243
- webscout/Provider/llmchatco.py +290 -290
- webscout/Provider/meta.py +801 -801
- webscout/Provider/oivscode.py +309 -309
- webscout/Provider/scira_chat.py +384 -457
- webscout/Provider/searchchat.py +292 -291
- webscout/Provider/sonus.py +258 -258
- webscout/Provider/toolbaz.py +370 -364
- webscout/Provider/turboseek.py +274 -265
- webscout/Provider/typefully.py +208 -207
- webscout/Provider/x0gpt.py +1 -0
- webscout/Provider/yep.py +372 -371
- webscout/__init__.py +30 -31
- webscout/__main__.py +5 -5
- webscout/auth/api_key_manager.py +189 -189
- webscout/auth/config.py +175 -175
- webscout/auth/models.py +185 -185
- webscout/auth/routes.py +664 -664
- webscout/auth/simple_logger.py +236 -236
- webscout/cli.py +523 -523
- webscout/conversation.py +438 -438
- webscout/exceptions.py +361 -361
- webscout/litagent/Readme.md +298 -298
- webscout/litagent/__init__.py +28 -28
- webscout/litagent/agent.py +581 -581
- webscout/litagent/constants.py +59 -59
- webscout/litprinter/__init__.py +58 -58
- webscout/models.py +181 -181
- webscout/optimizers.py +419 -419
- webscout/prompt_manager.py +288 -288
- webscout/sanitize.py +1078 -1078
- webscout/scout/README.md +401 -401
- webscout/scout/__init__.py +8 -8
- webscout/scout/core/__init__.py +6 -6
- webscout/scout/core/crawler.py +297 -297
- webscout/scout/core/scout.py +706 -706
- webscout/scout/core/search_result.py +95 -95
- webscout/scout/core/text_analyzer.py +62 -62
- webscout/scout/core/text_utils.py +277 -277
- webscout/scout/core/web_analyzer.py +51 -51
- webscout/scout/element.py +599 -599
- webscout/scout/parsers/__init__.py +69 -69
- webscout/scout/parsers/html5lib_parser.py +172 -172
- webscout/scout/parsers/html_parser.py +236 -236
- webscout/scout/parsers/lxml_parser.py +178 -178
- webscout/scout/utils.py +37 -37
- webscout/swiftcli/Readme.md +323 -323
- webscout/swiftcli/__init__.py +95 -95
- webscout/swiftcli/core/__init__.py +7 -7
- webscout/swiftcli/core/cli.py +308 -308
- webscout/swiftcli/core/context.py +104 -104
- webscout/swiftcli/core/group.py +241 -241
- webscout/swiftcli/decorators/__init__.py +28 -28
- webscout/swiftcli/decorators/command.py +221 -221
- webscout/swiftcli/decorators/options.py +220 -220
- webscout/swiftcli/decorators/output.py +302 -302
- webscout/swiftcli/exceptions.py +21 -21
- webscout/swiftcli/plugins/__init__.py +9 -9
- webscout/swiftcli/plugins/base.py +135 -135
- webscout/swiftcli/plugins/manager.py +269 -269
- webscout/swiftcli/utils/__init__.py +59 -59
- webscout/swiftcli/utils/formatting.py +252 -252
- webscout/swiftcli/utils/parsing.py +267 -267
- webscout/update_checker.py +117 -117
- webscout/version.py +1 -1
- webscout/webscout_search.py +1183 -1183
- webscout/webscout_search_async.py +649 -649
- webscout/yep_search.py +346 -346
- webscout/zeroart/README.md +89 -89
- webscout/zeroart/__init__.py +134 -134
- webscout/zeroart/base.py +66 -66
- webscout/zeroart/effects.py +100 -100
- webscout/zeroart/fonts.py +1238 -1238
- {webscout-8.3.6.dist-info → webscout-2025.10.11.dist-info}/METADATA +937 -936
- webscout-2025.10.11.dist-info/RECORD +300 -0
- webscout/Provider/AISEARCH/DeepFind.py +0 -254
- webscout/Provider/AllenAI.py +0 -440
- webscout/Provider/Blackboxai.py +0 -793
- webscout/Provider/FreeGemini.py +0 -250
- webscout/Provider/GptOss.py +0 -207
- webscout/Provider/Hunyuan.py +0 -283
- webscout/Provider/Kimi.py +0 -445
- webscout/Provider/MCPCore.py +0 -322
- webscout/Provider/MiniMax.py +0 -207
- webscout/Provider/OPENAI/BLACKBOXAI.py +0 -1045
- webscout/Provider/OPENAI/MiniMax.py +0 -298
- webscout/Provider/OPENAI/Qwen3.py +0 -304
- webscout/Provider/OPENAI/autoproxy.py +0 -1067
- webscout/Provider/OPENAI/copilot.py +0 -321
- webscout/Provider/OPENAI/gptoss.py +0 -288
- webscout/Provider/OPENAI/kimi.py +0 -469
- webscout/Provider/OPENAI/mcpcore.py +0 -431
- webscout/Provider/OPENAI/multichat.py +0 -378
- webscout/Provider/OPENAI/qodo.py +0 -630
- webscout/Provider/OPENAI/xenai.py +0 -514
- webscout/Provider/Reka.py +0 -214
- webscout/Provider/UNFINISHED/fetch_together_models.py +0 -90
- webscout/Provider/asksteve.py +0 -220
- webscout/Provider/copilot.py +0 -441
- webscout/Provider/freeaichat.py +0 -294
- webscout/Provider/koala.py +0 -182
- webscout/Provider/lmarena.py +0 -198
- webscout/Provider/monochat.py +0 -275
- webscout/Provider/multichat.py +0 -375
- webscout/Provider/scnet.py +0 -244
- webscout/Provider/talkai.py +0 -194
- webscout/tempid.py +0 -128
- webscout-8.3.6.dist-info/RECORD +0 -327
- {webscout-8.3.6.dist-info → webscout-2025.10.11.dist-info}/WHEEL +0 -0
- {webscout-8.3.6.dist-info → webscout-2025.10.11.dist-info}/entry_points.txt +0 -0
- {webscout-8.3.6.dist-info → webscout-2025.10.11.dist-info}/licenses/LICENSE.md +0 -0
- {webscout-8.3.6.dist-info → webscout-2025.10.11.dist-info}/top_level.txt +0 -0
webscout/Provider/__init__.py
CHANGED
|
@@ -1,86 +1,33 @@
|
|
|
1
|
-
#
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
from
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
from .talkai import *
|
|
35
|
-
from .llama3mitril import *
|
|
36
|
-
from .Marcus import *
|
|
37
|
-
from .multichat import *
|
|
38
|
-
from .Jadve import *
|
|
39
|
-
from .chatglm import *
|
|
40
|
-
from .hermes import *
|
|
41
|
-
from .TextPollinationsAI import *
|
|
42
|
-
from .QwenLM import *
|
|
43
|
-
from .granite import *
|
|
44
|
-
from .WiseCat import *
|
|
45
|
-
from .freeaichat import FreeAIChat
|
|
46
|
-
from .akashgpt import *
|
|
47
|
-
from .Perplexitylabs import *
|
|
48
|
-
from .AllenAI import *
|
|
49
|
-
from .HeckAI import *
|
|
50
|
-
from .TwoAI import *
|
|
51
|
-
from .Venice import *
|
|
52
|
-
from .GithubChat import *
|
|
53
|
-
from .copilot import *
|
|
54
|
-
from .sonus import *
|
|
55
|
-
from .LambdaChat import *
|
|
56
|
-
from .ChatGPTClone import *
|
|
57
|
-
from .VercelAI import *
|
|
58
|
-
from .ExaChat import *
|
|
59
|
-
from .asksteve import *
|
|
60
|
-
from .Aitopia import *
|
|
61
|
-
from .searchchat import *
|
|
62
|
-
from .ExaAI import ExaAI
|
|
63
|
-
from .OpenGPT import OpenGPT
|
|
64
|
-
from .scira_chat import *
|
|
65
|
-
from .StandardInput import *
|
|
66
|
-
from .toolbaz import Toolbaz
|
|
67
|
-
from .scnet import SCNet
|
|
68
|
-
from .MCPCore import MCPCore
|
|
69
|
-
from .TypliAI import TypliAI
|
|
70
|
-
from .ChatSandbox import ChatSandbox
|
|
71
|
-
from .GizAI import GizAI
|
|
72
|
-
from .WrDoChat import WrDoChat
|
|
73
|
-
from .Nemotron import NEMOTRON
|
|
74
|
-
from .FreeGemini import FreeGemini
|
|
75
|
-
from .Flowith import Flowith
|
|
76
|
-
from .lmarena import lmarena
|
|
77
|
-
from .oivscode import oivscode
|
|
78
|
-
from .XenAI import XenAI
|
|
79
|
-
from .deepseek_assistant import DeepSeekAssistant
|
|
80
|
-
from .GeminiProxy import GeminiProxy
|
|
81
|
-
from .TogetherAI import TogetherAI
|
|
82
|
-
from .MiniMax import MiniMax
|
|
83
|
-
from .Qodo import *
|
|
84
|
-
from .monochat import MonoChat
|
|
85
|
-
from .Kimi import Kimi
|
|
86
|
-
from .GptOss import GptOss
|
|
1
|
+
# This file marks the directory as a Python package.
|
|
2
|
+
|
|
3
|
+
import os
|
|
4
|
+
import importlib
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
|
|
7
|
+
# Get current directory
|
|
8
|
+
current_dir = Path(__file__).parent
|
|
9
|
+
|
|
10
|
+
# List to store all exported names
|
|
11
|
+
__all__ = []
|
|
12
|
+
|
|
13
|
+
# Auto-import all .py files (except __init__.py)
|
|
14
|
+
for file_path in current_dir.glob("*.py"):
|
|
15
|
+
if file_path.name != "__init__.py":
|
|
16
|
+
module_name = file_path.stem
|
|
17
|
+
try:
|
|
18
|
+
module = importlib.import_module(f".{module_name}", package=__name__)
|
|
19
|
+
|
|
20
|
+
# Import the main class (assumes class name matches filename)
|
|
21
|
+
class_name = module_name
|
|
22
|
+
if hasattr(module, class_name):
|
|
23
|
+
globals()[class_name] = getattr(module, class_name)
|
|
24
|
+
__all__.append(class_name)
|
|
25
|
+
else:
|
|
26
|
+
# If no matching class, import all public attributes
|
|
27
|
+
for attr_name in dir(module):
|
|
28
|
+
if not attr_name.startswith('_'):
|
|
29
|
+
globals()[attr_name] = getattr(module, attr_name)
|
|
30
|
+
if attr_name not in __all__:
|
|
31
|
+
__all__.append(attr_name)
|
|
32
|
+
except ImportError:
|
|
33
|
+
pass # Skip files that can't be imported
|
webscout/Provider/ai4chat.py
CHANGED
|
@@ -1,174 +1,174 @@
|
|
|
1
|
-
from curl_cffi.requests import Session, RequestsError
|
|
2
|
-
import urllib.parse
|
|
3
|
-
from typing import Union, Any, Dict
|
|
4
|
-
|
|
5
|
-
from webscout.AIutel import Optimizers
|
|
6
|
-
from webscout.AIutel import Conversation
|
|
7
|
-
from webscout.AIutel import AwesomePrompts
|
|
8
|
-
from webscout.AIbase import Provider
|
|
9
|
-
|
|
10
|
-
class AI4Chat(Provider):
|
|
11
|
-
"""
|
|
12
|
-
A class to interact with the AI4Chat Riddle API.
|
|
13
|
-
"""
|
|
14
|
-
|
|
15
|
-
def __init__(
|
|
16
|
-
self,
|
|
17
|
-
is_conversation: bool = True,
|
|
18
|
-
max_tokens: int = 600,
|
|
19
|
-
timeout: int = 30,
|
|
20
|
-
intro: str = None,
|
|
21
|
-
filepath: str = None,
|
|
22
|
-
update_file: bool = True,
|
|
23
|
-
proxies: dict = {},
|
|
24
|
-
history_offset: int = 10250,
|
|
25
|
-
act: str = None,
|
|
26
|
-
system_prompt: str = "You are a helpful and informative AI assistant.",
|
|
27
|
-
country: str = "Asia",
|
|
28
|
-
user_id: str = "usersmjb2oaz7y"
|
|
29
|
-
) -> None:
|
|
30
|
-
self.session = Session(timeout=timeout, proxies=proxies)
|
|
31
|
-
self.is_conversation = is_conversation
|
|
32
|
-
self.max_tokens_to_sample = max_tokens
|
|
33
|
-
self.api_endpoint = "https://yw85opafq6.execute-api.us-east-1.amazonaws.com/default/boss_mode_15aug"
|
|
34
|
-
self.timeout = timeout
|
|
35
|
-
self.last_response = {}
|
|
36
|
-
self.country = country
|
|
37
|
-
self.user_id = user_id
|
|
38
|
-
self.headers = {
|
|
39
|
-
"Accept": "*/*",
|
|
40
|
-
"Accept-Language": "id-ID,id;q=0.9",
|
|
41
|
-
"Origin": "https://www.ai4chat.co",
|
|
42
|
-
"Priority": "u=1, i",
|
|
43
|
-
"Referer": "https://www.ai4chat.co/",
|
|
44
|
-
"Sec-CH-UA": '"Chromium";v="131", "Not_A Brand";v="24", "Microsoft Edge Simulate";v="131", "Lemur";v="131"',
|
|
45
|
-
"Sec-CH-UA-Mobile": "?1",
|
|
46
|
-
"Sec-CH-UA-Platform": '"Android"',
|
|
47
|
-
"Sec-Fetch-Dest": "empty",
|
|
48
|
-
"Sec-Fetch-Mode": "cors",
|
|
49
|
-
"Sec-Fetch-Site": "cross-site",
|
|
50
|
-
"User-Agent": "Mozilla/5.0 (Linux; Android 10; K) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Mobile Safari/537.36"
|
|
51
|
-
}
|
|
52
|
-
self.__available_optimizers = tuple(
|
|
53
|
-
method for method in dir(Optimizers)
|
|
54
|
-
if callable(getattr(Optimizers, method)) and not method.startswith("__")
|
|
55
|
-
)
|
|
56
|
-
self.session.headers.update(self.headers)
|
|
57
|
-
Conversation.intro = (
|
|
58
|
-
AwesomePrompts().get_act(
|
|
59
|
-
act, raise_not_found=True, default=None, case_insensitive=True
|
|
60
|
-
)
|
|
61
|
-
if act
|
|
62
|
-
else intro or Conversation.intro
|
|
63
|
-
)
|
|
64
|
-
self.conversation = Conversation(
|
|
65
|
-
is_conversation, self.max_tokens_to_sample, filepath, update_file
|
|
66
|
-
)
|
|
67
|
-
self.conversation.history_offset = history_offset
|
|
68
|
-
self.system_prompt = system_prompt
|
|
69
|
-
|
|
70
|
-
def ask(
|
|
71
|
-
self,
|
|
72
|
-
prompt: str,
|
|
73
|
-
stream: bool = False,
|
|
74
|
-
raw: bool = False,
|
|
75
|
-
optimizer: str = None,
|
|
76
|
-
conversationally: bool = False,
|
|
77
|
-
country: str = None,
|
|
78
|
-
user_id: str = None,
|
|
79
|
-
):
|
|
80
|
-
"""
|
|
81
|
-
Sends a prompt to the AI4Chat API and returns the response.
|
|
82
|
-
If stream=True, yields small chunks of the response (simulated streaming).
|
|
83
|
-
"""
|
|
84
|
-
conversation_prompt = self.conversation.gen_complete_prompt(prompt)
|
|
85
|
-
if optimizer:
|
|
86
|
-
if optimizer in self.__available_optimizers:
|
|
87
|
-
conversation_prompt = getattr(Optimizers, optimizer)(
|
|
88
|
-
conversation_prompt if conversationally else prompt
|
|
89
|
-
)
|
|
90
|
-
else:
|
|
91
|
-
raise Exception(
|
|
92
|
-
f"Optimizer is not one of {self.__available_optimizers}"
|
|
93
|
-
)
|
|
94
|
-
country_param = country or self.country
|
|
95
|
-
user_id_param = user_id or self.user_id
|
|
96
|
-
encoded_text = urllib.parse.quote(conversation_prompt)
|
|
97
|
-
encoded_country = urllib.parse.quote(country_param)
|
|
98
|
-
encoded_user_id = urllib.parse.quote(user_id_param)
|
|
99
|
-
url = f"{self.api_endpoint}?text={encoded_text}&country={encoded_country}&user_id={encoded_user_id}"
|
|
100
|
-
try:
|
|
101
|
-
response = self.session.get(url, headers=self.headers, timeout=self.timeout)
|
|
102
|
-
except RequestsError as e:
|
|
103
|
-
raise Exception(f"Failed to generate response: {e}")
|
|
104
|
-
if not response.ok:
|
|
105
|
-
raise Exception(f"Failed to generate response: {response.status_code} - {response.reason}")
|
|
106
|
-
response_text = response.text
|
|
107
|
-
if response_text.startswith('"'):
|
|
108
|
-
response_text = response_text[1:]
|
|
109
|
-
if response_text.endswith('"'):
|
|
110
|
-
response_text = response_text[:-1]
|
|
111
|
-
response_text = response_text.replace('\\n', '\n').replace('\\n\\n', '\n\n')
|
|
112
|
-
self.last_response.update(dict(text=response_text))
|
|
113
|
-
self.conversation.update_chat_history(prompt, response_text)
|
|
114
|
-
if stream:
|
|
115
|
-
# Simulate streaming by yielding fixed-size character chunks (e.g., 48 chars)
|
|
116
|
-
buffer = response_text
|
|
117
|
-
chunk_size = 48
|
|
118
|
-
while buffer:
|
|
119
|
-
chunk = buffer[:chunk_size]
|
|
120
|
-
buffer = buffer[chunk_size:]
|
|
121
|
-
if chunk.strip():
|
|
122
|
-
yield {"text": chunk}
|
|
123
|
-
else:
|
|
124
|
-
return self.last_response
|
|
125
|
-
|
|
126
|
-
def chat(
|
|
127
|
-
self,
|
|
128
|
-
prompt: str,
|
|
129
|
-
stream: bool = False,
|
|
130
|
-
optimizer: str = None,
|
|
131
|
-
conversationally: bool = False,
|
|
132
|
-
country: str = None,
|
|
133
|
-
user_id: str = None,
|
|
134
|
-
):
|
|
135
|
-
"""
|
|
136
|
-
Generates a response from the AI4Chat API.
|
|
137
|
-
If stream=True, yields each chunk as a string.
|
|
138
|
-
"""
|
|
139
|
-
if stream:
|
|
140
|
-
for chunk in self.ask(
|
|
141
|
-
prompt,
|
|
142
|
-
stream=True,
|
|
143
|
-
optimizer=optimizer,
|
|
144
|
-
conversationally=conversationally,
|
|
145
|
-
country=country,
|
|
146
|
-
user_id=user_id,
|
|
147
|
-
):
|
|
148
|
-
yield self.get_message(chunk)
|
|
149
|
-
else:
|
|
150
|
-
return self.get_message(
|
|
151
|
-
self.ask(
|
|
152
|
-
prompt,
|
|
153
|
-
optimizer=optimizer,
|
|
154
|
-
conversationally=conversationally,
|
|
155
|
-
country=country,
|
|
156
|
-
user_id=user_id,
|
|
157
|
-
)
|
|
158
|
-
)
|
|
159
|
-
|
|
160
|
-
def get_message(self, response: Union[dict, str]) -> str:
|
|
161
|
-
"""
|
|
162
|
-
Retrieves message only from response
|
|
163
|
-
"""
|
|
164
|
-
if isinstance(response, str):
|
|
165
|
-
return response.replace('\\n', '\n').replace('\\n\\n', '\n\n')
|
|
166
|
-
assert isinstance(response, dict), "Response should be either dict or str"
|
|
167
|
-
return response["text"].replace('\\n', '\n').replace('\\n\\n', '\n\n')
|
|
168
|
-
|
|
169
|
-
if __name__ == "__main__":
|
|
170
|
-
from rich import print
|
|
171
|
-
ai = AI4Chat()
|
|
172
|
-
response = ai.chat("Tell me about humans in points", stream=True)
|
|
173
|
-
for c in response:
|
|
174
|
-
print(c, end="")
|
|
1
|
+
from curl_cffi.requests import Session, RequestsError
|
|
2
|
+
import urllib.parse
|
|
3
|
+
from typing import Union, Any, Dict
|
|
4
|
+
|
|
5
|
+
from webscout.AIutel import Optimizers
|
|
6
|
+
from webscout.AIutel import Conversation
|
|
7
|
+
from webscout.AIutel import AwesomePrompts
|
|
8
|
+
from webscout.AIbase import Provider
|
|
9
|
+
|
|
10
|
+
class AI4Chat(Provider):
|
|
11
|
+
"""
|
|
12
|
+
A class to interact with the AI4Chat Riddle API.
|
|
13
|
+
"""
|
|
14
|
+
required_auth = False
|
|
15
|
+
def __init__(
|
|
16
|
+
self,
|
|
17
|
+
is_conversation: bool = True,
|
|
18
|
+
max_tokens: int = 600,
|
|
19
|
+
timeout: int = 30,
|
|
20
|
+
intro: str = None,
|
|
21
|
+
filepath: str = None,
|
|
22
|
+
update_file: bool = True,
|
|
23
|
+
proxies: dict = {},
|
|
24
|
+
history_offset: int = 10250,
|
|
25
|
+
act: str = None,
|
|
26
|
+
system_prompt: str = "You are a helpful and informative AI assistant.",
|
|
27
|
+
country: str = "Asia",
|
|
28
|
+
user_id: str = "usersmjb2oaz7y"
|
|
29
|
+
) -> None:
|
|
30
|
+
self.session = Session(timeout=timeout, proxies=proxies)
|
|
31
|
+
self.is_conversation = is_conversation
|
|
32
|
+
self.max_tokens_to_sample = max_tokens
|
|
33
|
+
self.api_endpoint = "https://yw85opafq6.execute-api.us-east-1.amazonaws.com/default/boss_mode_15aug"
|
|
34
|
+
self.timeout = timeout
|
|
35
|
+
self.last_response = {}
|
|
36
|
+
self.country = country
|
|
37
|
+
self.user_id = user_id
|
|
38
|
+
self.headers = {
|
|
39
|
+
"Accept": "*/*",
|
|
40
|
+
"Accept-Language": "id-ID,id;q=0.9",
|
|
41
|
+
"Origin": "https://www.ai4chat.co",
|
|
42
|
+
"Priority": "u=1, i",
|
|
43
|
+
"Referer": "https://www.ai4chat.co/",
|
|
44
|
+
"Sec-CH-UA": '"Chromium";v="131", "Not_A Brand";v="24", "Microsoft Edge Simulate";v="131", "Lemur";v="131"',
|
|
45
|
+
"Sec-CH-UA-Mobile": "?1",
|
|
46
|
+
"Sec-CH-UA-Platform": '"Android"',
|
|
47
|
+
"Sec-Fetch-Dest": "empty",
|
|
48
|
+
"Sec-Fetch-Mode": "cors",
|
|
49
|
+
"Sec-Fetch-Site": "cross-site",
|
|
50
|
+
"User-Agent": "Mozilla/5.0 (Linux; Android 10; K) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Mobile Safari/537.36"
|
|
51
|
+
}
|
|
52
|
+
self.__available_optimizers = tuple(
|
|
53
|
+
method for method in dir(Optimizers)
|
|
54
|
+
if callable(getattr(Optimizers, method)) and not method.startswith("__")
|
|
55
|
+
)
|
|
56
|
+
self.session.headers.update(self.headers)
|
|
57
|
+
Conversation.intro = (
|
|
58
|
+
AwesomePrompts().get_act(
|
|
59
|
+
act, raise_not_found=True, default=None, case_insensitive=True
|
|
60
|
+
)
|
|
61
|
+
if act
|
|
62
|
+
else intro or Conversation.intro
|
|
63
|
+
)
|
|
64
|
+
self.conversation = Conversation(
|
|
65
|
+
is_conversation, self.max_tokens_to_sample, filepath, update_file
|
|
66
|
+
)
|
|
67
|
+
self.conversation.history_offset = history_offset
|
|
68
|
+
self.system_prompt = system_prompt
|
|
69
|
+
|
|
70
|
+
def ask(
|
|
71
|
+
self,
|
|
72
|
+
prompt: str,
|
|
73
|
+
stream: bool = False,
|
|
74
|
+
raw: bool = False,
|
|
75
|
+
optimizer: str = None,
|
|
76
|
+
conversationally: bool = False,
|
|
77
|
+
country: str = None,
|
|
78
|
+
user_id: str = None,
|
|
79
|
+
):
|
|
80
|
+
"""
|
|
81
|
+
Sends a prompt to the AI4Chat API and returns the response.
|
|
82
|
+
If stream=True, yields small chunks of the response (simulated streaming).
|
|
83
|
+
"""
|
|
84
|
+
conversation_prompt = self.conversation.gen_complete_prompt(prompt)
|
|
85
|
+
if optimizer:
|
|
86
|
+
if optimizer in self.__available_optimizers:
|
|
87
|
+
conversation_prompt = getattr(Optimizers, optimizer)(
|
|
88
|
+
conversation_prompt if conversationally else prompt
|
|
89
|
+
)
|
|
90
|
+
else:
|
|
91
|
+
raise Exception(
|
|
92
|
+
f"Optimizer is not one of {self.__available_optimizers}"
|
|
93
|
+
)
|
|
94
|
+
country_param = country or self.country
|
|
95
|
+
user_id_param = user_id or self.user_id
|
|
96
|
+
encoded_text = urllib.parse.quote(conversation_prompt)
|
|
97
|
+
encoded_country = urllib.parse.quote(country_param)
|
|
98
|
+
encoded_user_id = urllib.parse.quote(user_id_param)
|
|
99
|
+
url = f"{self.api_endpoint}?text={encoded_text}&country={encoded_country}&user_id={encoded_user_id}"
|
|
100
|
+
try:
|
|
101
|
+
response = self.session.get(url, headers=self.headers, timeout=self.timeout)
|
|
102
|
+
except RequestsError as e:
|
|
103
|
+
raise Exception(f"Failed to generate response: {e}")
|
|
104
|
+
if not response.ok:
|
|
105
|
+
raise Exception(f"Failed to generate response: {response.status_code} - {response.reason}")
|
|
106
|
+
response_text = response.text
|
|
107
|
+
if response_text.startswith('"'):
|
|
108
|
+
response_text = response_text[1:]
|
|
109
|
+
if response_text.endswith('"'):
|
|
110
|
+
response_text = response_text[:-1]
|
|
111
|
+
response_text = response_text.replace('\\n', '\n').replace('\\n\\n', '\n\n')
|
|
112
|
+
self.last_response.update(dict(text=response_text))
|
|
113
|
+
self.conversation.update_chat_history(prompt, response_text)
|
|
114
|
+
if stream:
|
|
115
|
+
# Simulate streaming by yielding fixed-size character chunks (e.g., 48 chars)
|
|
116
|
+
buffer = response_text
|
|
117
|
+
chunk_size = 48
|
|
118
|
+
while buffer:
|
|
119
|
+
chunk = buffer[:chunk_size]
|
|
120
|
+
buffer = buffer[chunk_size:]
|
|
121
|
+
if chunk.strip():
|
|
122
|
+
yield {"text": chunk}
|
|
123
|
+
else:
|
|
124
|
+
return self.last_response
|
|
125
|
+
|
|
126
|
+
def chat(
|
|
127
|
+
self,
|
|
128
|
+
prompt: str,
|
|
129
|
+
stream: bool = False,
|
|
130
|
+
optimizer: str = None,
|
|
131
|
+
conversationally: bool = False,
|
|
132
|
+
country: str = None,
|
|
133
|
+
user_id: str = None,
|
|
134
|
+
):
|
|
135
|
+
"""
|
|
136
|
+
Generates a response from the AI4Chat API.
|
|
137
|
+
If stream=True, yields each chunk as a string.
|
|
138
|
+
"""
|
|
139
|
+
if stream:
|
|
140
|
+
for chunk in self.ask(
|
|
141
|
+
prompt,
|
|
142
|
+
stream=True,
|
|
143
|
+
optimizer=optimizer,
|
|
144
|
+
conversationally=conversationally,
|
|
145
|
+
country=country,
|
|
146
|
+
user_id=user_id,
|
|
147
|
+
):
|
|
148
|
+
yield self.get_message(chunk)
|
|
149
|
+
else:
|
|
150
|
+
return self.get_message(
|
|
151
|
+
self.ask(
|
|
152
|
+
prompt,
|
|
153
|
+
optimizer=optimizer,
|
|
154
|
+
conversationally=conversationally,
|
|
155
|
+
country=country,
|
|
156
|
+
user_id=user_id,
|
|
157
|
+
)
|
|
158
|
+
)
|
|
159
|
+
|
|
160
|
+
def get_message(self, response: Union[dict, str]) -> str:
|
|
161
|
+
"""
|
|
162
|
+
Retrieves message only from response
|
|
163
|
+
"""
|
|
164
|
+
if isinstance(response, str):
|
|
165
|
+
return response.replace('\\n', '\n').replace('\\n\\n', '\n\n')
|
|
166
|
+
assert isinstance(response, dict), "Response should be either dict or str"
|
|
167
|
+
return response["text"].replace('\\n', '\n').replace('\\n\\n', '\n\n')
|
|
168
|
+
|
|
169
|
+
if __name__ == "__main__":
|
|
170
|
+
from rich import print
|
|
171
|
+
ai = AI4Chat()
|
|
172
|
+
response = ai.chat("Tell me about humans in points", stream=True)
|
|
173
|
+
for c in response:
|
|
174
|
+
print(c, end="")
|