webscout 7.6__py3-none-any.whl → 7.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of webscout might be problematic. Click here for more details.

Files changed (124) hide show
  1. webscout/AIutel.py +2 -1
  2. webscout/Bard.py +14 -11
  3. webscout/DWEBS.py +431 -415
  4. webscout/Extra/autocoder/autocoder_utiles.py +183 -47
  5. webscout/Extra/autocoder/rawdog.py +848 -649
  6. webscout/Extra/gguf.py +682 -652
  7. webscout/Provider/AI21.py +1 -1
  8. webscout/Provider/AISEARCH/DeepFind.py +2 -2
  9. webscout/Provider/AISEARCH/ISou.py +2 -23
  10. webscout/Provider/AISEARCH/felo_search.py +6 -6
  11. webscout/Provider/AISEARCH/genspark_search.py +1 -1
  12. webscout/Provider/Aitopia.py +292 -0
  13. webscout/Provider/AllenAI.py +5 -22
  14. webscout/Provider/Andi.py +3 -3
  15. webscout/Provider/C4ai.py +1 -1
  16. webscout/Provider/ChatGPTClone.py +226 -0
  17. webscout/Provider/ChatGPTES.py +3 -5
  18. webscout/Provider/ChatGPTGratis.py +4 -4
  19. webscout/Provider/Chatify.py +2 -2
  20. webscout/Provider/Cloudflare.py +3 -2
  21. webscout/Provider/DARKAI.py +3 -2
  22. webscout/Provider/DeepSeek.py +2 -2
  23. webscout/Provider/Deepinfra.py +1 -1
  24. webscout/Provider/EDITEE.py +1 -1
  25. webscout/Provider/ElectronHub.py +178 -96
  26. webscout/Provider/ExaChat.py +310 -0
  27. webscout/Provider/Free2GPT.py +2 -2
  28. webscout/Provider/Gemini.py +5 -19
  29. webscout/Provider/GithubChat.py +1 -1
  30. webscout/Provider/Glider.py +12 -8
  31. webscout/Provider/Groq.py +3 -3
  32. webscout/Provider/HF_space/qwen_qwen2.py +1 -1
  33. webscout/Provider/HeckAI.py +1 -1
  34. webscout/Provider/HuggingFaceChat.py +1 -1
  35. webscout/Provider/Hunyuan.py +272 -0
  36. webscout/Provider/Jadve.py +3 -3
  37. webscout/Provider/Koboldai.py +3 -3
  38. webscout/Provider/LambdaChat.py +391 -0
  39. webscout/Provider/Llama.py +3 -5
  40. webscout/Provider/Llama3.py +4 -12
  41. webscout/Provider/Marcus.py +3 -3
  42. webscout/Provider/OLLAMA.py +260 -36
  43. webscout/Provider/Openai.py +7 -3
  44. webscout/Provider/PI.py +1 -1
  45. webscout/Provider/Perplexitylabs.py +1 -1
  46. webscout/Provider/Phind.py +1 -1
  47. webscout/Provider/PizzaGPT.py +1 -1
  48. webscout/Provider/QwenLM.py +4 -7
  49. webscout/Provider/TTI/FreeAIPlayground/async_freeaiplayground.py +21 -46
  50. webscout/Provider/TTI/FreeAIPlayground/sync_freeaiplayground.py +37 -49
  51. webscout/Provider/TTI/ImgSys/__init__.py +23 -0
  52. webscout/Provider/TTI/ImgSys/async_imgsys.py +202 -0
  53. webscout/Provider/TTI/ImgSys/sync_imgsys.py +195 -0
  54. webscout/Provider/TTI/__init__.py +3 -1
  55. webscout/Provider/TTI/artbit/async_artbit.py +4 -33
  56. webscout/Provider/TTI/artbit/sync_artbit.py +4 -32
  57. webscout/Provider/TTI/fastflux/async_fastflux.py +6 -2
  58. webscout/Provider/TTI/fastflux/sync_fastflux.py +7 -2
  59. webscout/Provider/TTI/huggingface/async_huggingface.py +1 -1
  60. webscout/Provider/TTI/huggingface/sync_huggingface.py +1 -1
  61. webscout/Provider/TTI/pixelmuse/__init__.py +4 -0
  62. webscout/Provider/TTI/pixelmuse/async_pixelmuse.py +249 -0
  63. webscout/Provider/TTI/pixelmuse/sync_pixelmuse.py +182 -0
  64. webscout/Provider/TTI/talkai/sync_talkai.py +1 -1
  65. webscout/Provider/TTS/utils.py +1 -1
  66. webscout/Provider/TeachAnything.py +1 -1
  67. webscout/Provider/TextPollinationsAI.py +4 -4
  68. webscout/Provider/TwoAI.py +1 -2
  69. webscout/Provider/Venice.py +4 -2
  70. webscout/Provider/VercelAI.py +234 -0
  71. webscout/Provider/WebSim.py +228 -0
  72. webscout/Provider/WiseCat.py +10 -12
  73. webscout/Provider/Youchat.py +1 -1
  74. webscout/Provider/__init__.py +22 -1
  75. webscout/Provider/ai4chat.py +1 -1
  76. webscout/Provider/aimathgpt.py +2 -6
  77. webscout/Provider/akashgpt.py +1 -1
  78. webscout/Provider/askmyai.py +4 -4
  79. webscout/Provider/asksteve.py +203 -0
  80. webscout/Provider/bagoodex.py +2 -2
  81. webscout/Provider/cerebras.py +1 -1
  82. webscout/Provider/chatglm.py +4 -4
  83. webscout/Provider/cleeai.py +1 -0
  84. webscout/Provider/copilot.py +427 -415
  85. webscout/Provider/elmo.py +1 -1
  86. webscout/Provider/flowith.py +14 -3
  87. webscout/Provider/freeaichat.py +57 -31
  88. webscout/Provider/gaurish.py +3 -5
  89. webscout/Provider/geminiprorealtime.py +1 -1
  90. webscout/Provider/granite.py +4 -4
  91. webscout/Provider/hermes.py +5 -5
  92. webscout/Provider/julius.py +1 -1
  93. webscout/Provider/koala.py +1 -1
  94. webscout/Provider/labyrinth.py +239 -0
  95. webscout/Provider/learnfastai.py +28 -15
  96. webscout/Provider/lepton.py +1 -1
  97. webscout/Provider/llama3mitril.py +4 -4
  98. webscout/Provider/llamatutor.py +1 -1
  99. webscout/Provider/llmchat.py +3 -3
  100. webscout/Provider/meta.py +1 -1
  101. webscout/Provider/multichat.py +10 -10
  102. webscout/Provider/promptrefine.py +1 -1
  103. webscout/Provider/searchchat.py +293 -0
  104. webscout/Provider/sonus.py +208 -0
  105. webscout/Provider/talkai.py +2 -2
  106. webscout/Provider/turboseek.py +1 -1
  107. webscout/Provider/tutorai.py +1 -1
  108. webscout/Provider/typegpt.py +6 -43
  109. webscout/Provider/uncovr.py +299 -0
  110. webscout/Provider/x0gpt.py +1 -1
  111. webscout/__init__.py +36 -36
  112. webscout/cli.py +293 -283
  113. webscout/litagent/agent.py +14 -9
  114. webscout/tempid.py +11 -11
  115. webscout/utils.py +2 -2
  116. webscout/version.py +1 -1
  117. webscout/webscout_search.py +1282 -1223
  118. webscout/webscout_search_async.py +813 -692
  119. {webscout-7.6.dist-info → webscout-7.8.dist-info}/METADATA +76 -44
  120. {webscout-7.6.dist-info → webscout-7.8.dist-info}/RECORD +124 -106
  121. {webscout-7.6.dist-info → webscout-7.8.dist-info}/LICENSE.md +0 -0
  122. {webscout-7.6.dist-info → webscout-7.8.dist-info}/WHEEL +0 -0
  123. {webscout-7.6.dist-info → webscout-7.8.dist-info}/entry_points.txt +0 -0
  124. {webscout-7.6.dist-info → webscout-7.8.dist-info}/top_level.txt +0 -0
webscout/Provider/AI21.py CHANGED
@@ -1,6 +1,6 @@
1
1
  import requests
2
2
  import json
3
- from typing import Dict, Any
3
+ from typing import Union, Dict, Any
4
4
 
5
5
  from webscout.AIutel import Optimizers
6
6
  from webscout.AIutel import Conversation
@@ -1,7 +1,7 @@
1
1
  from uuid import uuid4
2
2
  import requests
3
3
  import re
4
- from typing import Any, Dict, Generator, Optional
4
+ from typing import Any, Dict, Generator, Optional, Union
5
5
 
6
6
  from webscout.AIbase import AISearch
7
7
  from webscout import exceptions
@@ -114,7 +114,7 @@ class DeepFind(AISearch):
114
114
  prompt: str,
115
115
  stream: bool = False,
116
116
  raw: bool = False,
117
- ) -> Dict[str, Any] | Generator[str, None, None]:
117
+ ) -> Union[Response, Generator[Union[Dict[str, str], Response], None, None]]:
118
118
  """Search using the DeepFind API and get AI-generated responses.
119
119
 
120
120
  This method sends a search query to DeepFind and returns the AI-generated response.
@@ -1,7 +1,7 @@
1
1
  import requests
2
2
  import json
3
3
  import re
4
- from typing import Dict, Optional, Generator, Any
4
+ from typing import Dict, Optional, Generator, Any, Union
5
5
  from webscout import LitAgent
6
6
  from webscout import exceptions
7
7
  from webscout.AIbase import AISearch
@@ -65,7 +65,6 @@ class Isou(AISearch):
65
65
  timeout: int = 120,
66
66
  proxies: Optional[dict] = None,
67
67
  model: str = "siliconflow:deepseek-ai/DeepSeek-R1-Distill-Qwen-32B",
68
- logging: bool = False
69
68
  ):
70
69
  """Initialize the Isou API client.
71
70
 
@@ -73,7 +72,6 @@ class Isou(AISearch):
73
72
  timeout (int, optional): Request timeout in seconds. Defaults to 120.
74
73
  proxies (dict, optional): Proxy configuration for requests. Defaults to None.
75
74
  model (str, optional): Model to use for search. Defaults to DeepSeek-R1.
76
- logging (bool, optional): Enable logging. Defaults to False.
77
75
  """
78
76
  self.available_models = [
79
77
  "siliconflow:deepseek-ai/DeepSeek-R1-Distill-Qwen-32B",
@@ -115,30 +113,12 @@ class Isou(AISearch):
115
113
  self.session.headers.update(self.headers)
116
114
  self.proxies = proxies
117
115
 
118
- # Initialize logger if enabled
119
- if logging:
120
- from webscout.Litlogger import Logger, LogFormat, ConsoleHandler
121
- from webscout.Litlogger.core.level import LogLevel
122
-
123
- console_handler = ConsoleHandler(
124
- level=LogLevel.DEBUG,
125
- )
126
-
127
- self.logger = Logger(
128
- name="Isou",
129
- level=LogLevel.DEBUG,
130
- handlers=[console_handler]
131
- )
132
- self.logger.info("Isou initialized successfully ✨")
133
- else:
134
- self.logger = None
135
-
136
116
  def search(
137
117
  self,
138
118
  prompt: str,
139
119
  stream: bool = False,
140
120
  raw: bool = False,
141
- ) -> Dict[str, Any] | Generator[Dict[str, Any], None, None]:
121
+ ) -> Union[Response, Generator[Union[Dict[str, str], Response], None, None]]:
142
122
  """Search using the Isou API and get AI-generated responses.
143
123
 
144
124
  Args:
@@ -268,7 +248,6 @@ if __name__ == "__main__":
268
248
  # Initialize with specific model and logging
269
249
  ai = Isou(
270
250
  model="siliconflow:deepseek-ai/DeepSeek-R1-Distill-Qwen-32B",
271
- logging=False
272
251
  )
273
252
 
274
253
  response = ai.search(input(">>> "), stream=True, raw=False)
@@ -1,7 +1,7 @@
1
1
  import requests
2
2
  from uuid import uuid4
3
3
  import json
4
- from typing import Any, Dict, Generator, Optional
4
+ from typing import Any, Dict, Generator, Optional, Union
5
5
 
6
6
  from webscout.AIbase import AISearch
7
7
  from webscout import exceptions
@@ -9,7 +9,7 @@ from webscout import LitAgent
9
9
 
10
10
 
11
11
  class Response:
12
- """A wrapper class for Felo API responses.
12
+ """A wrapper class for API responses.
13
13
 
14
14
  This class automatically converts response objects to their text representation
15
15
  when printed or converted to string.
@@ -115,7 +115,7 @@ class Felo(AISearch):
115
115
  prompt: str,
116
116
  stream: bool = False,
117
117
  raw: bool = False,
118
- ) -> Dict[str, Any] | Generator[str, None, None]:
118
+ ) -> Union[Response, Generator[Union[Dict[str, str], Response], None, None]]:
119
119
  """Search using the Felo API and get AI-generated responses.
120
120
 
121
121
  This method sends a search query to Felo and returns the AI-generated response.
@@ -130,9 +130,9 @@ class Felo(AISearch):
130
130
  Defaults to False.
131
131
 
132
132
  Returns:
133
- Union[Dict[str, Any], Generator[str, None, None]]:
134
- - If stream=False: Returns complete response
135
- - If stream=True: Yields response chunks as they arrive
133
+ Union[Response, Generator[Union[Dict[str, str], Response], None, None]]:
134
+ - If stream=False: Returns complete response as Response object
135
+ - If stream=True: Yields response chunks as either Dict or Response objects
136
136
 
137
137
  Raises:
138
138
  APIConnectionError: If the API request fails
@@ -117,7 +117,7 @@ class Genspark(AISearch):
117
117
  prompt: str,
118
118
  stream: bool = False,
119
119
  raw: bool = False,
120
- ) -> Union[Dict[str, Any], Generator[Union[Dict[str, Any], str], None, None]]:
120
+ ) -> Union[Response, Generator[Union[Dict[str, str], Response], None, None]]:
121
121
  """Search using the Genspark API and get AI-generated responses.
122
122
 
123
123
  Args:
@@ -0,0 +1,292 @@
1
+ import requests
2
+ import json
3
+ import uuid
4
+ import time
5
+ import hashlib
6
+ from typing import Any, Dict, Optional, Generator, Union
7
+
8
+ from webscout.AIutel import Optimizers
9
+ from webscout.AIutel import Conversation
10
+ from webscout.AIutel import AwesomePrompts, sanitize_stream
11
+ from webscout.AIbase import Provider, AsyncProvider
12
+ from webscout import exceptions
13
+ from webscout.litagent import LitAgent
14
+
15
+ class Aitopia(Provider):
16
+ """
17
+ A class to interact with the Aitopia API with LitAgent user-agent.
18
+ """
19
+
20
+ AVAILABLE_MODELS = [
21
+ "Claude 3 Haiku",
22
+ "GPT-4o Mini",
23
+ "Gemini 1.5 Flash",
24
+ "Llama 3.1 70B"
25
+ ]
26
+
27
+ def __init__(
28
+ self,
29
+ is_conversation: bool = True,
30
+ max_tokens: int = 2049,
31
+ timeout: int = 30,
32
+ intro: str = None,
33
+ filepath: str = None,
34
+ update_file: bool = True,
35
+ proxies: dict = {},
36
+ history_offset: int = 10250,
37
+ act: str = None,
38
+ model: str = "Claude 3 Haiku",
39
+ browser: str = "chrome"
40
+ ):
41
+ """Initializes the Aitopia API client."""
42
+ if model not in self.AVAILABLE_MODELS:
43
+ raise ValueError(f"Invalid model: {model}. Choose from: {self.AVAILABLE_MODELS}")
44
+
45
+ self.url = "https://extensions.aitopia.ai/ai/send"
46
+
47
+ # Initialize LitAgent for user agent generation
48
+ self.agent = LitAgent()
49
+ # Use fingerprinting to create a consistent browser identity
50
+ self.fingerprint = self.agent.generate_fingerprint(browser)
51
+
52
+ # Use the fingerprint for headers
53
+ self.headers = {
54
+ "accept": "text/plain",
55
+ "accept-language": self.fingerprint["accept_language"],
56
+ "content-type": "text/plain;charset=UTF-8",
57
+ "dnt": "1",
58
+ "origin": "https://chat.aitopia.ai",
59
+ "priority": "u=1, i",
60
+ "referer": "https://chat.aitopia.ai/",
61
+ "sec-ch-ua": self.fingerprint["sec_ch_ua"] or '"Chromium";v="134", "Not:A-Brand";v="24", "Microsoft Edge";v="134"',
62
+ "sec-ch-ua-mobile": "?0",
63
+ "sec-ch-ua-platform": f'"{self.fingerprint["platform"]}"',
64
+ "sec-fetch-dest": "empty",
65
+ "sec-fetch-mode": "cors",
66
+ "sec-fetch-site": "same-site",
67
+ "user-agent": self.fingerprint["user_agent"]
68
+ }
69
+
70
+ self.session = requests.Session()
71
+ self.session.headers.update(self.headers)
72
+ self.session.proxies.update(proxies)
73
+
74
+ self.is_conversation = is_conversation
75
+ self.max_tokens_to_sample = max_tokens
76
+ self.timeout = timeout
77
+ self.last_response = {}
78
+ self.model = model
79
+
80
+ self.__available_optimizers = (
81
+ method
82
+ for method in dir(Optimizers)
83
+ if callable(getattr(Optimizers, method)) and not method.startswith("__")
84
+ )
85
+ Conversation.intro = (
86
+ AwesomePrompts().get_act(
87
+ act, raise_not_found=True, default=None, case_insensitive=True
88
+ )
89
+ if act
90
+ else intro or Conversation.intro
91
+ )
92
+
93
+ self.conversation = Conversation(
94
+ is_conversation, self.max_tokens_to_sample, filepath, update_file
95
+ )
96
+ self.conversation.history_offset = history_offset
97
+
98
+ def refresh_identity(self, browser: str = None):
99
+ """
100
+ Refreshes the browser identity fingerprint.
101
+
102
+ Args:
103
+ browser: Specific browser to use for the new fingerprint
104
+ """
105
+ browser = browser or self.fingerprint.get("browser_type", "chrome")
106
+ self.fingerprint = self.agent.generate_fingerprint(browser)
107
+
108
+ # Update headers with new fingerprint
109
+ self.headers.update({
110
+ "accept-language": self.fingerprint["accept_language"],
111
+ "sec-ch-ua": self.fingerprint["sec_ch_ua"] or self.headers["sec-ch-ua"],
112
+ "sec-ch-ua-platform": f'"{self.fingerprint["platform"]}"',
113
+ "user-agent": self.fingerprint["user_agent"],
114
+ })
115
+
116
+ # Update session headers
117
+ for header, value in self.headers.items():
118
+ self.session.headers[header] = value
119
+
120
+ return self.fingerprint
121
+
122
+ def generate_uuid_search(self):
123
+ """Generate a UUID and convert to base64-like string."""
124
+ uuid_str = str(uuid.uuid4())
125
+ return uuid_str.replace('-', '')
126
+
127
+ def generate_hopekey(self):
128
+ """Generate a random string and hash it."""
129
+ random_str = str(uuid.uuid4()) + str(time.time())
130
+ return hashlib.md5(random_str.encode()).hexdigest()
131
+
132
+ def ask(
133
+ self,
134
+ prompt: str,
135
+ stream: bool = False,
136
+ raw: bool = False,
137
+ optimizer: str = None,
138
+ conversationally: bool = False,
139
+ ) -> Union[Dict[str, Any], Generator]:
140
+ conversation_prompt = self.conversation.gen_complete_prompt(prompt)
141
+ if optimizer:
142
+ if optimizer in self.__available_optimizers:
143
+ conversation_prompt = getattr(Optimizers, optimizer)(
144
+ conversation_prompt if conversationally else prompt
145
+ )
146
+ else:
147
+ raise Exception(f"Optimizer is not one of {self.__available_optimizers}")
148
+
149
+ # Generate hopekey and update headers
150
+ hopekey = self.generate_hopekey()
151
+ self.headers["hopekey"] = hopekey
152
+
153
+ # Default history if none provided
154
+ history = [
155
+ {
156
+ "item": "Hello, how can I help you today?",
157
+ "role": "assistant",
158
+ # "model": "GPT-4o Mini"
159
+ }
160
+ ]
161
+
162
+ # Generate current timestamp for chat_id
163
+ current_time = int(time.time() * 1000)
164
+
165
+ # Request payload
166
+ payload = {
167
+ "history": history,
168
+ "text": conversation_prompt,
169
+ "model": self.model,
170
+ "stream": stream,
171
+ "uuid_search": self.generate_uuid_search(),
172
+ "mode": "ai_chat",
173
+ "prompt_mode": False,
174
+ "extra_key": "__all",
175
+ "extra_data": {"prompt_mode": False},
176
+ "chat_id": current_time,
177
+ "language_detail": {
178
+ "lang_code": "en",
179
+ "name": "English",
180
+ "title": "English"
181
+ },
182
+ "is_continue": False,
183
+ "lang_code": "en"
184
+ }
185
+
186
+ def for_stream():
187
+ try:
188
+ with requests.post(self.url, headers=self.headers, json=payload, stream=True, timeout=self.timeout) as response:
189
+ if response.status_code != 200:
190
+ raise exceptions.FailedToGenerateResponseError(
191
+ f"Request failed with status code {response.status_code}"
192
+ )
193
+
194
+ streaming_text = ""
195
+ for line in response.iter_lines():
196
+ if line:
197
+ line = line.decode('utf-8')
198
+ if line.startswith('data: '):
199
+ data = line[6:]
200
+ if data == '[DONE]':
201
+ break
202
+ try:
203
+ json_data = json.loads(data)
204
+
205
+ # Handle Claude 3 Haiku response format
206
+ if "delta" in json_data and "text" in json_data["delta"]:
207
+ content = json_data["delta"]["text"]
208
+ if content:
209
+ streaming_text += content
210
+ resp = dict(text=content)
211
+ yield resp if raw else resp
212
+ # Handle GPT-4o Mini response format
213
+ elif "choices" in json_data and "0" in json_data["choices"]:
214
+ content = json_data["choices"]["0"]["delta"].get("content", "")
215
+ if content:
216
+ streaming_text += content
217
+ resp = dict(text=content)
218
+ yield resp if raw else resp
219
+ except json.JSONDecodeError:
220
+ continue
221
+
222
+ self.last_response = {"text": streaming_text}
223
+ self.conversation.update_chat_history(prompt, streaming_text)
224
+
225
+ except requests.RequestException as e:
226
+ raise exceptions.FailedToGenerateResponseError(f"Request failed: {str(e)}")
227
+
228
+ def for_non_stream():
229
+ try:
230
+ response = requests.post(self.url, headers=self.headers, json=payload, timeout=self.timeout)
231
+ if response.status_code != 200:
232
+ raise exceptions.FailedToGenerateResponseError(
233
+ f"Request failed with status code {response.status_code}"
234
+ )
235
+
236
+ response_data = response.json()
237
+ if 'choices' in response_data and len(response_data['choices']) > 0:
238
+ content = response_data['choices'][0].get('message', {}).get('content', '')
239
+ self.last_response = {"text": content}
240
+ self.conversation.update_chat_history(prompt, content)
241
+ return {"text": content}
242
+ else:
243
+ raise exceptions.FailedToGenerateResponseError("No response content found")
244
+ except Exception as e:
245
+ raise exceptions.FailedToGenerateResponseError(f"Request failed: {e}")
246
+
247
+ return for_stream() if stream else for_non_stream()
248
+
249
+ def chat(
250
+ self,
251
+ prompt: str,
252
+ stream: bool = False,
253
+ optimizer: str = None,
254
+ conversationally: bool = False,
255
+ ) -> Union[str, Generator[str, None, None]]:
256
+ def for_stream():
257
+ for response in self.ask(prompt, True, optimizer=optimizer, conversationally=conversationally):
258
+ yield self.get_message(response)
259
+ def for_non_stream():
260
+ return self.get_message(
261
+ self.ask(prompt, False, optimizer=optimizer, conversationally=conversationally)
262
+ )
263
+ return for_stream() if stream else for_non_stream()
264
+
265
+ def get_message(self, response: dict) -> str:
266
+ assert isinstance(response, dict), "Response should be of dict data-type only"
267
+ return response["text"]
268
+
269
+ if __name__ == "__main__":
270
+ print("-" * 80)
271
+ print(f"{'Model':<50} {'Status':<10} {'Response'}")
272
+ print("-" * 80)
273
+
274
+ for model in Aitopia.AVAILABLE_MODELS:
275
+ try:
276
+ test_ai = Aitopia(model=model, timeout=60)
277
+ response = test_ai.chat("Say 'Hello' in one word", stream=True)
278
+ response_text = ""
279
+ for chunk in response:
280
+ response_text += chunk
281
+
282
+ if response_text and len(response_text.strip()) > 0:
283
+ status = "✓"
284
+ # Clean and truncate response
285
+ clean_text = response_text.strip().encode('utf-8', errors='ignore').decode('utf-8')
286
+ display_text = clean_text[:50] + "..." if len(clean_text) > 50 else clean_text
287
+ else:
288
+ status = "✗"
289
+ display_text = "Empty or invalid response"
290
+ print(f"\r{model:<50} {status:<10} {display_text}")
291
+ except Exception as e:
292
+ print(f"\r{model:<50} {'✗':<10} {str(e)}")
@@ -9,7 +9,7 @@ from webscout.AIutel import Conversation
9
9
  from webscout.AIutel import AwesomePrompts, sanitize_stream
10
10
  from webscout.AIbase import Provider, AsyncProvider
11
11
  from webscout import exceptions
12
- from webscout import LitAgent
12
+ from webscout.litagent import LitAgent
13
13
 
14
14
  class AllenAI(Provider):
15
15
  """
@@ -37,7 +37,7 @@ class AllenAI(Provider):
37
37
  history_offset: int = 10250,
38
38
  act: str = None,
39
39
  model: str = "tulu3-405b",
40
- system_prompt: str = "You are a helpful AI assistant.",
40
+
41
41
  ):
42
42
  """Initializes the AllenAI API client."""
43
43
  if model not in self.AVAILABLE_MODELS:
@@ -68,7 +68,6 @@ class AllenAI(Provider):
68
68
  self.session.headers.update(self.headers)
69
69
  self.session.proxies.update(proxies)
70
70
  self.model = model
71
- self.system_prompt = system_prompt
72
71
  self.is_conversation = is_conversation
73
72
  self.max_tokens_to_sample = max_tokens
74
73
  self.timeout = timeout
@@ -95,19 +94,6 @@ class AllenAI(Provider):
95
94
  )
96
95
  self.conversation.history_offset = history_offset
97
96
 
98
- def format_prompt(self, messages):
99
- """Format messages into a prompt string"""
100
- formatted = []
101
- for msg in messages:
102
- role = msg.get("role", "")
103
- content = msg.get("content", "")
104
- if role == "system":
105
- formatted.append(f"System: {content}")
106
- elif role == "user":
107
- formatted.append(f"User: {content}")
108
- elif role == "assistant":
109
- formatted.append(f"Assistant: {content}")
110
- return "\n".join(formatted)
111
97
 
112
98
  def ask(
113
99
  self,
@@ -139,11 +125,8 @@ class AllenAI(Provider):
139
125
  "x-anonymous-user-id": self.x_anonymous_user_id
140
126
  })
141
127
 
142
- # Format messages for AllenAI
143
- messages = [
144
- {"role": "system", "content": self.system_prompt},
145
- {"role": "user", "content": conversation_prompt}
146
- ]
128
+
129
+ messages = conversation_prompt
147
130
 
148
131
  # Build multipart form data
149
132
  form_data = [
@@ -154,7 +137,7 @@ class AllenAI(Provider):
154
137
  f'Content-Disposition: form-data; name="host"\r\n\r\n{host}\r\n',
155
138
 
156
139
  f'--{boundary}\r\n'
157
- f'Content-Disposition: form-data; name="content"\r\n\r\n{self.format_prompt(messages)}\r\n',
140
+ f'Content-Disposition: form-data; name="content"\r\n\r\n{messages}\r\n',
158
141
 
159
142
  f'--{boundary}\r\n'
160
143
  f'Content-Disposition: form-data; name="private"\r\n\r\n{str(private).lower()}\r\n'
webscout/Provider/Andi.py CHANGED
@@ -6,9 +6,9 @@ from webscout.AIutel import Conversation
6
6
  from webscout.AIutel import AwesomePrompts, sanitize_stream
7
7
  from webscout.AIbase import Provider, AsyncProvider
8
8
  from webscout import exceptions
9
- from typing import Any, AsyncGenerator, Dict
9
+ from typing import Union, Any, AsyncGenerator, Dict
10
10
  from webscout import WEBS
11
- from rich import print
11
+ from webscout.litagent import LitAgent
12
12
 
13
13
  class AndiSearch(Provider):
14
14
  def __init__(
@@ -60,7 +60,7 @@ class AndiSearch(Provider):
60
60
  "sec-fetch-dest": "empty",
61
61
  "sec-fetch-mode": "cors",
62
62
  "sec-fetch-site": "same-site",
63
- "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/127.0.0.0 Safari/537.36 Edg/127.0.0.0",
63
+ "user-agent": LitAgent().random(),
64
64
  "x-amz-date": "20240730T031106Z",
65
65
  "x-amz-security-token": str(uuid4()),
66
66
  }
webscout/Provider/C4ai.py CHANGED
@@ -9,7 +9,7 @@ from typing import Any, Dict, List, Optional, Union, Generator
9
9
  from webscout.AIutel import Conversation
10
10
  from webscout.AIbase import Provider
11
11
  from webscout import exceptions
12
- from webscout import LitAgent
12
+ from webscout.litagent import LitAgent
13
13
 
14
14
  class C4ai(Provider):
15
15
  """