webscout 8.2.3__py3-none-any.whl → 8.2.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of webscout might be problematic. Click here for more details.

Files changed (87) hide show
  1. inferno/lol.py +589 -0
  2. webscout/AIutel.py +226 -14
  3. webscout/Bard.py +579 -206
  4. webscout/DWEBS.py +78 -35
  5. webscout/Extra/tempmail/base.py +1 -1
  6. webscout/Provider/AISEARCH/hika_search.py +4 -0
  7. webscout/Provider/AllenAI.py +163 -126
  8. webscout/Provider/ChatGPTClone.py +96 -84
  9. webscout/Provider/Deepinfra.py +95 -67
  10. webscout/Provider/ElectronHub.py +55 -0
  11. webscout/Provider/GPTWeb.py +96 -46
  12. webscout/Provider/Groq.py +194 -91
  13. webscout/Provider/HeckAI.py +89 -47
  14. webscout/Provider/HuggingFaceChat.py +113 -106
  15. webscout/Provider/Hunyuan.py +94 -83
  16. webscout/Provider/Jadve.py +107 -75
  17. webscout/Provider/LambdaChat.py +106 -64
  18. webscout/Provider/Llama3.py +94 -39
  19. webscout/Provider/MCPCore.py +318 -0
  20. webscout/Provider/Marcus.py +85 -36
  21. webscout/Provider/Netwrck.py +76 -43
  22. webscout/Provider/OPENAI/__init__.py +4 -1
  23. webscout/Provider/OPENAI/ai4chat.py +286 -0
  24. webscout/Provider/OPENAI/chatgptclone.py +35 -14
  25. webscout/Provider/OPENAI/deepinfra.py +37 -0
  26. webscout/Provider/OPENAI/groq.py +354 -0
  27. webscout/Provider/OPENAI/heckai.py +6 -2
  28. webscout/Provider/OPENAI/mcpcore.py +376 -0
  29. webscout/Provider/OPENAI/multichat.py +368 -0
  30. webscout/Provider/OPENAI/netwrck.py +3 -1
  31. webscout/Provider/OpenGPT.py +48 -38
  32. webscout/Provider/PI.py +168 -92
  33. webscout/Provider/PizzaGPT.py +66 -36
  34. webscout/Provider/TeachAnything.py +85 -51
  35. webscout/Provider/TextPollinationsAI.py +109 -51
  36. webscout/Provider/TwoAI.py +109 -60
  37. webscout/Provider/Venice.py +93 -56
  38. webscout/Provider/VercelAI.py +2 -2
  39. webscout/Provider/WiseCat.py +65 -28
  40. webscout/Provider/Writecream.py +37 -11
  41. webscout/Provider/WritingMate.py +135 -63
  42. webscout/Provider/__init__.py +3 -21
  43. webscout/Provider/ai4chat.py +6 -7
  44. webscout/Provider/copilot.py +0 -3
  45. webscout/Provider/elmo.py +101 -58
  46. webscout/Provider/granite.py +91 -46
  47. webscout/Provider/hermes.py +87 -47
  48. webscout/Provider/koala.py +1 -1
  49. webscout/Provider/learnfastai.py +104 -50
  50. webscout/Provider/llama3mitril.py +86 -51
  51. webscout/Provider/llmchat.py +88 -46
  52. webscout/Provider/llmchatco.py +74 -49
  53. webscout/Provider/meta.py +41 -37
  54. webscout/Provider/multichat.py +54 -25
  55. webscout/Provider/scnet.py +93 -43
  56. webscout/Provider/searchchat.py +82 -75
  57. webscout/Provider/sonus.py +103 -51
  58. webscout/Provider/toolbaz.py +132 -77
  59. webscout/Provider/turboseek.py +92 -41
  60. webscout/Provider/tutorai.py +82 -64
  61. webscout/Provider/typefully.py +75 -33
  62. webscout/Provider/typegpt.py +96 -35
  63. webscout/Provider/uncovr.py +112 -62
  64. webscout/Provider/x0gpt.py +69 -26
  65. webscout/Provider/yep.py +79 -66
  66. webscout/conversation.py +35 -21
  67. webscout/exceptions.py +20 -0
  68. webscout/prompt_manager.py +56 -42
  69. webscout/version.py +1 -1
  70. webscout/webscout_search.py +65 -47
  71. webscout/webscout_search_async.py +81 -126
  72. webscout/yep_search.py +93 -43
  73. {webscout-8.2.3.dist-info → webscout-8.2.4.dist-info}/METADATA +22 -10
  74. {webscout-8.2.3.dist-info → webscout-8.2.4.dist-info}/RECORD +78 -81
  75. {webscout-8.2.3.dist-info → webscout-8.2.4.dist-info}/WHEEL +1 -1
  76. webscout/Provider/C4ai.py +0 -432
  77. webscout/Provider/ChatGPTES.py +0 -237
  78. webscout/Provider/DeepSeek.py +0 -196
  79. webscout/Provider/Llama.py +0 -200
  80. webscout/Provider/Phind.py +0 -535
  81. webscout/Provider/WebSim.py +0 -228
  82. webscout/Provider/labyrinth.py +0 -340
  83. webscout/Provider/lepton.py +0 -194
  84. webscout/Provider/llamatutor.py +0 -192
  85. {webscout-8.2.3.dist-info → webscout-8.2.4.dist-info}/entry_points.txt +0 -0
  86. {webscout-8.2.3.dist-info → webscout-8.2.4.dist-info/licenses}/LICENSE.md +0 -0
  87. {webscout-8.2.3.dist-info → webscout-8.2.4.dist-info}/top_level.txt +0 -0
@@ -1,4 +1,5 @@
1
- import requests
1
+ from curl_cffi.requests import Session
2
+ from curl_cffi import CurlError
2
3
  import json
3
4
  from datetime import datetime
4
5
  from typing import Any, Dict, Optional, Generator, Union
@@ -26,7 +27,6 @@ class SearchChatAI(Provider):
26
27
  proxies: dict = {},
27
28
  history_offset: int = 10250,
28
29
  act: str = None,
29
- system_prompt: str = "You are a helpful assistant."
30
30
  ):
31
31
  """Initializes the SearchChatAI API client."""
32
32
  self.url = "https://search-chat.ai/api/chat-test-stop.php"
@@ -34,7 +34,6 @@ class SearchChatAI(Provider):
34
34
  self.is_conversation = is_conversation
35
35
  self.max_tokens_to_sample = max_tokens
36
36
  self.last_response = {}
37
- self.system_prompt = system_prompt
38
37
 
39
38
  # Initialize LitAgent for user agent generation
40
39
  self.agent = LitAgent()
@@ -58,9 +57,11 @@ class SearchChatAI(Provider):
58
57
  "User-Agent": self.fingerprint["user_agent"],
59
58
  }
60
59
 
61
- self.session = requests.Session()
60
+ # Initialize curl_cffi Session
61
+ self.session = Session()
62
+ # Update curl_cffi session headers and proxies
62
63
  self.session.headers.update(self.headers)
63
- self.session.proxies.update(proxies)
64
+ self.session.proxies = proxies # Assign proxies directly
64
65
 
65
66
  self.__available_optimizers = (
66
67
  method
@@ -99,7 +100,7 @@ class SearchChatAI(Provider):
99
100
  "User-Agent": self.fingerprint["user_agent"],
100
101
  })
101
102
 
102
- # Update session headers
103
+ # Update session headers (already done in the original code, should work with curl_cffi session)
103
104
  for header, value in self.headers.items():
104
105
  self.session.headers[header] = value
105
106
 
@@ -152,62 +153,40 @@ class SearchChatAI(Provider):
152
153
 
153
154
  def for_stream():
154
155
  try:
155
- with self.session.post(
156
+ # Use curl_cffi session post with impersonate
157
+ response = self.session.post(
156
158
  self.url,
159
+ # headers are set on the session
157
160
  json=payload,
158
161
  stream=True,
159
- timeout=self.timeout
160
- ) as response:
161
- if response.status_code != 200:
162
+ timeout=self.timeout,
163
+ impersonate=self.fingerprint.get("browser_type", "chrome110") # Use fingerprint browser type
164
+ )
165
+ if response.status_code != 200:
166
+ # Add identity refresh logic on 403/429
167
+ if response.status_code in [403, 429]:
168
+ self.refresh_identity()
169
+ response = self.session.post(
170
+ self.url,
171
+ json=payload,
172
+ stream=True,
173
+ timeout=self.timeout,
174
+ impersonate=self.fingerprint.get("browser_type", "chrome110") # Use updated fingerprint
175
+ )
176
+ if not response.ok:
177
+ raise exceptions.FailedToGenerateResponseError(
178
+ f"Request failed after identity refresh - ({response.status_code}, {response.reason}) - {response.text}"
179
+ )
180
+ else:
162
181
  raise exceptions.FailedToGenerateResponseError(
163
- f"Request failed with status code {response.status_code}"
182
+ f"Request failed with status code {response.status_code} - {response.text}"
164
183
  )
165
184
 
166
- streaming_text = ""
167
- for line in response.iter_lines():
168
- if line:
169
- line = line.decode('utf-8')
170
- if line.startswith('data: '):
171
- data_str = line[6:] # Remove 'data: ' prefix
172
-
173
- if data_str == '[DONE]':
174
- break
175
-
176
- try:
177
- data = json.loads(data_str)
178
- if "choices" in data and len(data["choices"]) > 0:
179
- delta = data["choices"][0].get("delta", {})
180
- if "content" in delta:
181
- content = delta["content"]
182
- streaming_text += content
183
- resp = dict(text=content)
184
- yield resp if raw else content
185
- except json.JSONDecodeError:
186
- continue
187
-
188
- self.last_response = {"text": streaming_text}
189
- self.conversation.update_chat_history(prompt, streaming_text)
190
-
191
- except requests.RequestException as e:
192
- raise exceptions.FailedToGenerateResponseError(f"Request failed: {str(e)}")
193
-
194
- def for_non_stream():
195
- try:
196
- response = self.session.post(
197
- self.url,
198
- json=payload,
199
- stream=True, # Keep streaming enabled
200
- timeout=self.timeout
201
- )
202
- if response.status_code != 200:
203
- raise exceptions.FailedToGenerateResponseError(
204
- f"Request failed with status code {response.status_code}"
205
- )
206
-
207
- full_text = ""
208
- for line in response.iter_lines():
209
- if line:
210
- line = line.decode('utf-8')
185
+ streaming_text = ""
186
+ # Iterate over bytes and decode manually
187
+ for line_bytes in response.iter_lines():
188
+ if line_bytes:
189
+ line = line_bytes.decode('utf-8')
211
190
  if line.startswith('data: '):
212
191
  data_str = line[6:] # Remove 'data: ' prefix
213
192
 
@@ -218,21 +197,40 @@ class SearchChatAI(Provider):
218
197
  data = json.loads(data_str)
219
198
  if "choices" in data and len(data["choices"]) > 0:
220
199
  delta = data["choices"][0].get("delta", {})
221
- if "content" in delta:
200
+ if "content" in delta and delta["content"] is not None:
222
201
  content = delta["content"]
223
- full_text += content
224
- except json.JSONDecodeError:
202
+ streaming_text += content
203
+ resp = dict(text=content)
204
+ # Yield dict or raw string
205
+ yield resp if not raw else content
206
+ except (json.JSONDecodeError, UnicodeDecodeError):
225
207
  continue
226
208
 
227
- if full_text:
228
- self.last_response = {"text": full_text}
229
- self.conversation.update_chat_history(prompt, full_text)
230
- return {"text": full_text}
231
- else:
232
- raise exceptions.FailedToGenerateResponseError("No response content found")
209
+ # Update history and last response after stream finishes
210
+ self.last_response = {"text": streaming_text}
211
+ self.conversation.update_chat_history(prompt, streaming_text)
233
212
 
234
- except Exception as e:
235
- raise exceptions.FailedToGenerateResponseError(f"Request failed: {str(e)}")
213
+ except CurlError as e: # Catch CurlError
214
+ raise exceptions.FailedToGenerateResponseError(f"Request failed (CurlError): {str(e)}") from e
215
+ except Exception as e: # Catch other potential exceptions
216
+ raise exceptions.FailedToGenerateResponseError(f"An unexpected error occurred ({type(e).__name__}): {e}") from e
217
+
218
+ def for_non_stream():
219
+ # Aggregate the stream using the updated for_stream logic
220
+ full_text = ""
221
+ # Iterate through the generator provided by for_stream
222
+ # Ensure raw=False so for_stream yields dicts
223
+ for chunk_data in for_stream():
224
+ if isinstance(chunk_data, dict) and "text" in chunk_data:
225
+ full_text += chunk_data["text"]
226
+ # If raw=True was somehow passed, handle string chunks
227
+ elif isinstance(chunk_data, str):
228
+ full_text += chunk_data
229
+
230
+ # last_response and history are updated within for_stream
231
+ # Return the final aggregated response dict or raw string
232
+ return full_text if raw else self.last_response
233
+
236
234
 
237
235
  return for_stream() if stream else for_non_stream()
238
236
 
@@ -255,16 +253,24 @@ class SearchChatAI(Provider):
255
253
  Returns:
256
254
  Either a string response or a generator for streaming
257
255
  """
258
- def for_stream():
259
- for response in self.ask(prompt, True, optimizer=optimizer, conversationally=conversationally):
260
- yield self.get_message(response)
256
+ def for_stream_chat():
257
+ # ask() yields dicts or strings when streaming
258
+ gen = self.ask(
259
+ prompt, stream=True, raw=False, # Ensure ask yields dicts
260
+ optimizer=optimizer, conversationally=conversationally
261
+ )
262
+ for response_dict in gen:
263
+ yield self.get_message(response_dict) # get_message expects dict
261
264
 
262
- def for_non_stream():
263
- return self.get_message(
264
- self.ask(prompt, False, optimizer=optimizer, conversationally=conversationally)
265
+ def for_non_stream_chat():
266
+ # ask() returns dict or str when not streaming
267
+ response_data = self.ask(
268
+ prompt, stream=False, raw=False, # Ensure ask returns dict
269
+ optimizer=optimizer, conversationally=conversationally
265
270
  )
271
+ return self.get_message(response_data) # get_message expects dict
266
272
 
267
- return for_stream() if stream else for_non_stream()
273
+ return for_stream_chat() if stream else for_non_stream_chat()
268
274
 
269
275
  def get_message(self, response: dict) -> str:
270
276
  """Extract the message from the response."""
@@ -272,6 +278,7 @@ class SearchChatAI(Provider):
272
278
  return response["text"]
273
279
 
274
280
  if __name__ == "__main__":
281
+ # Ensure curl_cffi is installed
275
282
  print("-" * 80)
276
283
  print(f"{'Status':<10} {'Response'}")
277
284
  print("-" * 80)
@@ -290,4 +297,4 @@ if __name__ == "__main__":
290
297
  display_text = "Empty or invalid response"
291
298
  print(f"{status:<10} {display_text}")
292
299
  except Exception as e:
293
- print(f"{'✗':<10} {str(e)}")
300
+ print(f"{'✗':<10} {str(e)}")
@@ -1,4 +1,5 @@
1
- import requests
1
+ from curl_cffi.requests import Session
2
+ from curl_cffi import CurlError
2
3
  import json
3
4
  from typing import Any, Dict, Optional, Generator, Union
4
5
  from webscout.AIutel import Optimizers
@@ -21,7 +22,7 @@ class SonusAI(Provider):
21
22
  def __init__(
22
23
  self,
23
24
  is_conversation: bool = True,
24
- max_tokens: int = 2049,
25
+ max_tokens: int = 2049, # Note: max_tokens is not directly used by this API
25
26
  timeout: int = 30,
26
27
  intro: str = None,
27
28
  filepath: str = None,
@@ -44,11 +45,14 @@ class SonusAI(Provider):
44
45
  'Origin': 'https://chat.sonus.ai',
45
46
  'Referer': 'https://chat.sonus.ai/',
46
47
  'User-Agent': LitAgent().random()
48
+ # Add sec-ch-ua headers if needed for impersonation consistency
47
49
  }
48
50
 
49
- self.session = requests.Session()
51
+ # Initialize curl_cffi Session
52
+ self.session = Session()
53
+ # Update curl_cffi session headers and proxies
50
54
  self.session.headers.update(self.headers)
51
- self.session.proxies.update(proxies)
55
+ self.session.proxies = proxies # Assign proxies directly
52
56
 
53
57
  self.is_conversation = is_conversation
54
58
  self.max_tokens_to_sample = max_tokens
@@ -92,61 +96,95 @@ class SonusAI(Provider):
92
96
  else:
93
97
  raise Exception(f"Optimizer is not one of {self.__available_optimizers}")
94
98
 
95
- # Prepare the multipart form data
96
- files = {
97
- 'message': (None, conversation_prompt),
98
- 'history': (None),
99
- 'reasoning': (None, str(reasoning).lower()),
100
- 'model': (None, self.model)
99
+ # Prepare the multipart form data (curl_cffi handles tuples for files/data)
100
+ # No need for explicit (None, ...) for simple fields when using `data=`
101
+ form_data = {
102
+ 'message': conversation_prompt,
103
+ 'history': "", # Explicitly empty string if needed, or omit if None is acceptable
104
+ 'reasoning': str(reasoning).lower(),
105
+ 'model': self.model
101
106
  }
107
+ # Note: curl_cffi's `files` parameter is for actual file uploads.
108
+ # For simple key-value pairs like this, `data` is usually sufficient for multipart/form-data.
109
+ # If the server strictly requires `files`, keep the original structure but it might not work as expected with curl_cffi without actual file objects.
102
110
 
103
111
  def for_stream():
104
112
  try:
105
- with requests.post(self.url, files=files, headers=self.headers, stream=True, timeout=self.timeout) as response:
106
- if response.status_code != 200:
107
- raise exceptions.FailedToGenerateResponseError(
108
- f"Request failed with status code {response.status_code}"
109
- )
113
+ # Use curl_cffi session post with impersonate
114
+ # Use `data` instead of `files` for simple key-value multipart
115
+ response = self.session.post(
116
+ self.url,
117
+ # headers are set on the session
118
+ data=form_data, # Use data for multipart form fields
119
+ stream=True,
120
+ timeout=self.timeout,
121
+ impersonate="chrome110" # Use a common impersonation profile
122
+ )
123
+ if response.status_code != 200:
124
+ raise exceptions.FailedToGenerateResponseError(
125
+ f"Request failed with status code {response.status_code} - {response.text}"
126
+ )
110
127
 
111
- streaming_text = ""
112
- for line in response.iter_lines():
113
- if line:
114
- try:
115
- # Decode the line and remove 'data: ' prefix if present
116
- line = line.decode('utf-8')
117
- if line.startswith('data: '):
118
- line = line[6:]
119
-
120
- data = json.loads(line)
121
- if "content" in data:
122
- content = data["content"]
123
- streaming_text += content
124
- resp = dict(text=content)
125
- yield resp if raw else resp
126
- except (json.JSONDecodeError, UnicodeDecodeError):
128
+ streaming_text = ""
129
+ # Iterate over bytes and decode manually
130
+ for line_bytes in response.iter_lines():
131
+ if line_bytes:
132
+ try:
133
+ # Decode the line and remove 'data: ' prefix if present
134
+ line = line_bytes.decode('utf-8')
135
+ if line.startswith('data: '):
136
+ line = line[6:]
137
+
138
+ # Handle potential empty lines after prefix removal
139
+ if not line.strip():
127
140
  continue
141
+
142
+ data = json.loads(line)
143
+ if "content" in data:
144
+ content = data["content"]
145
+ streaming_text += content
146
+ resp = dict(text=content)
147
+ # Yield dict or raw string
148
+ yield resp if raw else resp
149
+ except (json.JSONDecodeError, UnicodeDecodeError):
150
+ continue
151
+
152
+ # Update history and last response after stream finishes
153
+ self.last_response = {"text": streaming_text}
154
+ self.conversation.update_chat_history(prompt, streaming_text)
128
155
 
129
- self.last_response = {"text": streaming_text}
130
- self.conversation.update_chat_history(prompt, streaming_text)
131
-
132
- except requests.RequestException as e:
133
- raise exceptions.FailedToGenerateResponseError(f"Request failed: {str(e)}")
156
+ except CurlError as e: # Catch CurlError
157
+ raise exceptions.FailedToGenerateResponseError(f"Request failed (CurlError): {str(e)}") from e
158
+ except Exception as e: # Catch other potential exceptions
159
+ raise exceptions.FailedToGenerateResponseError(f"An unexpected error occurred ({type(e).__name__}): {e}") from e
134
160
 
135
161
  def for_non_stream():
136
162
  try:
137
- response = requests.post(self.url, files=files, headers=self.headers, timeout=self.timeout)
163
+ # Use curl_cffi session post with impersonate
164
+ response = self.session.post(
165
+ self.url,
166
+ # headers are set on the session
167
+ data=form_data, # Use data for multipart form fields
168
+ timeout=self.timeout,
169
+ impersonate="chrome110" # Use a common impersonation profile
170
+ )
138
171
  if response.status_code != 200:
139
172
  raise exceptions.FailedToGenerateResponseError(
140
- f"Request failed with status code {response.status_code}"
173
+ f"Request failed with status code {response.status_code} - {response.text}"
141
174
  )
142
175
 
143
176
  full_response = ""
144
- for line in response.iter_lines():
177
+ # Process the full response text which might contain multiple JSON objects
178
+ # Split by lines and process each potential JSON object
179
+ for line in response.text.splitlines():
145
180
  if line:
146
181
  try:
147
- line = line.decode('utf-8')
148
182
  if line.startswith('data: '):
149
183
  line = line[6:]
184
+
185
+ if not line.strip():
186
+ continue
187
+
150
188
  data = json.loads(line)
151
189
  if "content" in data:
152
190
  full_response += data["content"]
@@ -155,9 +193,13 @@ class SonusAI(Provider):
155
193
 
156
194
  self.last_response = {"text": full_response}
157
195
  self.conversation.update_chat_history(prompt, full_response)
158
- return {"text": full_response}
159
- except Exception as e:
160
- raise exceptions.FailedToGenerateResponseError(f"Request failed: {e}")
196
+ # Return dict or raw string
197
+ return full_response if raw else {"text": full_response}
198
+
199
+ except CurlError as e: # Catch CurlError
200
+ raise exceptions.FailedToGenerateResponseError(f"Request failed (CurlError): {str(e)}") from e
201
+ except Exception as e: # Catch other potential exceptions
202
+ raise exceptions.FailedToGenerateResponseError(f"An unexpected error occurred ({type(e).__name__}): {e}") from e
161
203
 
162
204
  return for_stream() if stream else for_non_stream()
163
205
 
@@ -169,20 +211,30 @@ class SonusAI(Provider):
169
211
  conversationally: bool = False,
170
212
  reasoning: bool = False,
171
213
  ) -> Union[str, Generator[str, None, None]]:
172
- def for_stream():
173
- for response in self.ask(prompt, True, optimizer=optimizer, conversationally=conversationally, reasoning=reasoning):
174
- yield self.get_message(response)
175
- def for_non_stream():
176
- return self.get_message(
177
- self.ask(prompt, False, optimizer=optimizer, conversationally=conversationally, reasoning=reasoning)
214
+ def for_stream_chat():
215
+ # ask() yields dicts when raw=False
216
+ for response_dict in self.ask(
217
+ prompt, stream=True, raw=False, # Ensure ask yields dicts
218
+ optimizer=optimizer, conversationally=conversationally, reasoning=reasoning
219
+ ):
220
+ yield self.get_message(response_dict)
221
+
222
+ def for_non_stream_chat():
223
+ # ask() returns dict or str when raw=False/True
224
+ response_data = self.ask(
225
+ prompt, stream=False, raw=False, # Ensure ask returns dict
226
+ optimizer=optimizer, conversationally=conversationally, reasoning=reasoning
178
227
  )
179
- return for_stream() if stream else for_non_stream()
228
+ return self.get_message(response_data) # get_message expects dict
229
+
230
+ return for_stream_chat() if stream else for_non_stream_chat()
180
231
 
181
232
  def get_message(self, response: dict) -> str:
182
233
  assert isinstance(response, dict), "Response should be of dict data-type only"
183
234
  return response["text"]
184
235
 
185
236
  if __name__ == "__main__":
237
+ # Ensure curl_cffi is installed
186
238
  print("-" * 80)
187
239
  print(f"{'Model':<50} {'Status':<10} {'Response'}")
188
240
  print("-" * 80)
@@ -205,4 +257,4 @@ if __name__ == "__main__":
205
257
  display_text = "Empty or invalid response"
206
258
  print(f"\r{model:<50} {status:<10} {display_text}")
207
259
  except Exception as e:
208
- print(f"\r{model:<50} {'✗':<10} {str(e)}")
260
+ print(f"\r{model:<50} {'✗':<10} {str(e)}")