webscout 2025.10.19.2__py3-none-any.whl → 2025.10.22.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of webscout might be problematic. Click here for more details.

@@ -18,7 +18,7 @@ class DeepAI(Provider):
18
18
  A provider for DeepAI's chat functionality, supporting both streaming and non-streaming responses.
19
19
  Structured similarly to other providers like DeepInfra and X0GPT.
20
20
  """
21
- required_auth = False
21
+ required_auth = True
22
22
  AVAILABLE_MODELS = [
23
23
  "standard",
24
24
  "genius",
@@ -19,7 +19,7 @@ class Flowith(Provider):
19
19
  AVAILABLE_MODELS = [
20
20
  "gpt-5-nano", "gpt-5-mini", "glm-4.5", "gpt-oss-120b", "gpt-oss-20b", "kimi-k2",
21
21
  "gpt-4.1", "gpt-4.1-mini", "deepseek-chat", "deepseek-reasoner",
22
- "gemini-2.5-flash", "grok-3-mini"
22
+ "gemini-2.5-flash", "grok-3-mini", "claude-haiku-4.5"
23
23
  ]
24
24
 
25
25
  def __init__(
webscout/Provider/GMI.py CHANGED
@@ -29,7 +29,10 @@ class GMI(Provider):
29
29
  "Qwen/Qwen3-Coder-480B-A35B-Instruct-FP8",
30
30
  "zai-org/GLM-4.5-Air-FP8",
31
31
  "zai-org/GLM-4.5-FP8",
32
- "zai-org/GLM-4.6"
32
+ "zai-org/GLM-4.6",
33
+ "openai/gpt-oss-20b",
34
+ "openai/gpt-oss-120b"
35
+
33
36
  ]
34
37
 
35
38
  def __init__(
@@ -5,9 +5,10 @@ from webscout.AIbase import Provider
5
5
  from typing import AsyncGenerator, Dict, List, Optional, Union
6
6
 
7
7
  try:
8
- from ollama import AsyncClient, Client, ResponseError
8
+ from ollama import AsyncClient, Client, ResponseError # type: ignore
9
9
  except ImportError as e:
10
- pass
10
+ print("Please install the 'ollama' package to use the OLLAMA provider: pip install ollama")
11
+ raise e
11
12
 
12
13
  class OLLAMA(Provider):
13
14
  required_auth = True
@@ -1,45 +1,36 @@
1
- """
2
- DeepAI Chat Provider for webscout
3
-
4
- This provider implements the DeepAI chat API discovered through reverse engineering.
5
- The API uses a POST endpoint with multipart/form-data containing chat history and parameters.
6
-
7
- API Details:
8
- - Endpoint: https://api.deepai.org/hacking_is_a_serious_crime
9
- - Method: POST
10
- - Authentication: api-key header (trial key provided)
11
- - Content-Type: multipart/form-data
12
- - Response: Plain text AI response
13
-
14
- Features:
15
- - Streaming and non-streaming support
16
- - Conversation history management
17
- - Error handling and retries
18
- - Configurable model and chat style
19
- """
20
1
 
2
+ from curl_cffi.requests import Session, RequestsError
21
3
  from typing import List, Dict, Optional, Union, Generator, Any
22
4
 
23
- # Import requests for HTTP requests
24
- import requests
5
+ # Import base classes and utility structures
6
+ from webscout.Provider.OPENAI.base import OpenAICompatibleProvider, BaseChat, BaseCompletions
7
+ from webscout.Provider.OPENAI.utils import (
8
+ ChatCompletionChunk, ChatCompletion, Choice, ChoiceDelta,
9
+ ChatCompletionMessage, CompletionUsage, format_prompt, count_tokens
10
+ )
25
11
 
26
12
  # Standard library imports
27
13
  import json
28
14
  import time
29
15
  import uuid
30
16
 
31
- # Import base classes and utility structures
32
- from .base import OpenAICompatibleProvider, BaseChat, BaseCompletions
33
- from .utils import (
34
- ChatCompletionChunk, ChatCompletion, Choice, ChoiceDelta,
35
- ChatCompletionMessage, CompletionUsage
36
- )
37
-
38
17
  # Attempt to import LitAgent, fallback if not available
39
18
  try:
40
19
  from webscout.litagent import LitAgent
41
20
  except ImportError:
42
- LitAgent = None
21
+ # Define a dummy LitAgent if webscout is not installed or accessible
22
+ class LitAgent:
23
+ def generate_fingerprint(self, browser: str = "chrome") -> Dict[str, Any]:
24
+ # Return minimal default headers if LitAgent is unavailable
25
+ print("Warning: LitAgent not found. Using default minimal headers.")
26
+ return {
27
+ "accept": "*/*",
28
+ "accept_language": "en-US,en;q=0.9",
29
+ "platform": "Windows",
30
+ "sec_ch_ua": '"Not/A)Brand";v="99", "Google Chrome";v="127", "Chromium";v="127"',
31
+ "user_agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/127.0.0.0 Safari/537.36",
32
+ "browser_type": browser,
33
+ }
43
34
 
44
35
  # --- DeepAI Client ---
45
36
 
@@ -91,20 +82,27 @@ class Completions(BaseCompletions):
91
82
  created_time = int(time.time())
92
83
 
93
84
  if stream:
94
- return self._create_stream(request_id, created_time, model, payload)
85
+ return self._create_stream(request_id, created_time, model, payload, timeout=timeout, proxies=proxies)
95
86
  else:
96
- return self._create_non_stream(request_id, created_time, model, payload)
87
+ return self._create_non_stream(request_id, created_time, model, payload, timeout=timeout, proxies=proxies)
97
88
 
98
89
  def _create_stream(
99
- self, request_id: str, created_time: int, model: str, payload: Dict[str, Any]
90
+ self, request_id: str, created_time: int, model: str, payload: Dict[str, Any],
91
+ timeout: Optional[int] = None, proxies: Optional[Dict[str, str]] = None
100
92
  ) -> Generator[ChatCompletionChunk, None, None]:
101
93
  # DeepAI doesn't actually support streaming, but we'll implement it for compatibility
102
94
  # For now, just yield the non-stream response as a single chunk
95
+ original_proxies = self._client.session.proxies
96
+ if proxies is not None:
97
+ self._client.session.proxies = proxies
98
+ else:
99
+ self._client.session.proxies = {}
103
100
  try:
101
+ timeout_val = timeout if timeout is not None else self._client.timeout
104
102
  response = self._client.session.post(
105
103
  "https://api.deepai.org/hacking_is_a_serious_crime",
106
104
  data=payload,
107
- timeout=self._client.timeout
105
+ timeout=timeout_val
108
106
  )
109
107
 
110
108
  if response.status_code != 200:
@@ -113,6 +111,11 @@ class Completions(BaseCompletions):
113
111
  # Get response text
114
112
  content = response.text.strip()
115
113
 
114
+ # Estimate token usage
115
+ prompt_tokens = count_tokens(payload.get("chatHistory", ""))
116
+ completion_tokens = count_tokens(content)
117
+ total_tokens = prompt_tokens + completion_tokens
118
+
116
119
  # Create the delta object
117
120
  delta = ChoiceDelta(
118
121
  content=content,
@@ -137,28 +140,40 @@ class Completions(BaseCompletions):
137
140
  system_fingerprint=None
138
141
  )
139
142
 
140
- # Set usage directly on the chunk object (estimated)
143
+ # Set usage directly on the chunk object
141
144
  chunk.usage = {
142
- "prompt_tokens": len(json.dumps(payload.get("chatHistory", []))),
143
- "completion_tokens": len(content),
144
- "total_tokens": len(json.dumps(payload.get("chatHistory", []))) + len(content),
145
+ "prompt_tokens": prompt_tokens,
146
+ "completion_tokens": completion_tokens,
147
+ "total_tokens": total_tokens,
145
148
  "estimated_cost": None
146
149
  }
147
150
 
148
151
  yield chunk
149
152
 
150
- except Exception as e:
153
+ except RequestsError as e:
151
154
  print(f"Error during DeepAI stream request: {e}")
152
155
  raise IOError(f"DeepAI request failed: {e}") from e
156
+ except Exception as e:
157
+ print(f"Unexpected error during DeepAI stream request: {e}")
158
+ raise IOError(f"DeepAI request failed: {e}") from e
159
+ finally:
160
+ self._client.session.proxies = original_proxies
153
161
 
154
162
  def _create_non_stream(
155
- self, request_id: str, created_time: int, model: str, payload: Dict[str, Any]
163
+ self, request_id: str, created_time: int, model: str, payload: Dict[str, Any],
164
+ timeout: Optional[int] = None, proxies: Optional[Dict[str, str]] = None
156
165
  ) -> ChatCompletion:
166
+ original_proxies = self._client.session.proxies
167
+ if proxies is not None:
168
+ self._client.session.proxies = proxies
169
+ else:
170
+ self._client.session.proxies = {}
157
171
  try:
172
+ timeout_val = timeout if timeout is not None else self._client.timeout
158
173
  response = self._client.session.post(
159
174
  "https://api.deepai.org/hacking_is_a_serious_crime",
160
175
  data=payload,
161
- timeout=self._client.timeout
176
+ timeout=timeout_val
162
177
  )
163
178
 
164
179
  if response.status_code != 200:
@@ -167,6 +182,11 @@ class Completions(BaseCompletions):
167
182
  # Get response text
168
183
  content = response.text.strip()
169
184
 
185
+ # Estimate token usage
186
+ prompt_tokens = count_tokens(payload.get("chatHistory", ""))
187
+ completion_tokens = count_tokens(content)
188
+ total_tokens = prompt_tokens + completion_tokens
189
+
170
190
  # Create the message object
171
191
  message = ChatCompletionMessage(
172
192
  role="assistant",
@@ -180,11 +200,11 @@ class Completions(BaseCompletions):
180
200
  finish_reason="stop"
181
201
  )
182
202
 
183
- # Create the usage object (estimated)
203
+ # Create the usage object
184
204
  usage = CompletionUsage(
185
- prompt_tokens=len(json.dumps(payload.get("chatHistory", []))),
186
- completion_tokens=len(content),
187
- total_tokens=len(json.dumps(payload.get("chatHistory", []))) + len(content)
205
+ prompt_tokens=prompt_tokens,
206
+ completion_tokens=completion_tokens,
207
+ total_tokens=total_tokens
188
208
  )
189
209
 
190
210
  # Create the completion object
@@ -197,15 +217,31 @@ class Completions(BaseCompletions):
197
217
  )
198
218
  return completion
199
219
 
200
- except Exception as e:
220
+ except RequestsError as e:
201
221
  print(f"Error during DeepAI non-stream request: {e}")
202
222
  raise IOError(f"DeepAI request failed: {e}") from e
223
+ except Exception as e:
224
+ print(f"Unexpected error during DeepAI non-stream request: {e}")
225
+ raise IOError(f"DeepAI request failed: {e}") from e
226
+ finally:
227
+ self._client.session.proxies = original_proxies
203
228
 
204
229
  class Chat(BaseChat):
205
230
  def __init__(self, client: 'DeepAI'):
206
231
  self.completions = Completions(client)
207
232
 
208
233
  class DeepAI(OpenAICompatibleProvider):
234
+ """
235
+ OpenAI-compatible client for DeepAI API.
236
+
237
+ Usage:
238
+ client = DeepAI()
239
+ response = client.chat.completions.create(
240
+ model="standard",
241
+ messages=[{"role": "user", "content": "Hello!"}]
242
+ )
243
+ """
244
+
209
245
  AVAILABLE_MODELS = [
210
246
  "standard",
211
247
  "genius",
@@ -233,7 +269,6 @@ class DeepAI(OpenAICompatibleProvider):
233
269
  "gemini-2.5-pro",
234
270
  "grok-code-fast-1",
235
271
  "gpt-4.1",
236
-
237
272
  ]
238
273
 
239
274
  def __init__(
@@ -244,31 +279,84 @@ class DeepAI(OpenAICompatibleProvider):
244
279
  model: str = "standard",
245
280
  chat_style: str = "chat",
246
281
  enabled_tools: Optional[List[str]] = None,
282
+ proxies: Optional[Dict[str, str]] = None,
247
283
  **kwargs
248
284
  ):
285
+ """
286
+ Initialize the DeepAI client.
287
+
288
+ Args:
289
+ api_key: DeepAI API key
290
+ timeout: Request timeout in seconds
291
+ browser: Browser type for fingerprinting
292
+ model: Default model to use
293
+ chat_style: Chat style parameter
294
+ enabled_tools: List of enabled tools
295
+ proxies: Optional proxy configuration
296
+ """
297
+ super().__init__(proxies=proxies)
249
298
  self.timeout = timeout
250
299
  self.api_key = api_key
251
300
  self.model = model
252
301
  self.chat_style = chat_style
253
302
  self.enabled_tools = enabled_tools or ["image_generator"]
254
303
 
255
- # Initialize requests Session
256
- self.session = requests.Session()
257
-
258
- # Set up headers with API key
304
+ # Use LitAgent for fingerprint if available, else fallback
305
+ agent = LitAgent()
306
+ self.fingerprint = agent.generate_fingerprint(browser)
307
+
308
+ # Use the fingerprint for headers
259
309
  self.headers = {
260
310
  "Content-Type": "application/x-www-form-urlencoded",
261
311
  "api-key": self.api_key,
262
- "Accept": "text/plain, */*",
263
- "Accept-Encoding": "gzip, deflate, br",
264
- "Accept-Language": "en-US,en;q=0.9",
265
- "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36",
312
+ "Accept": self.fingerprint["accept"],
313
+ "Accept-Encoding": "gzip, deflate, br, zstd",
314
+ "Accept-Language": self.fingerprint["accept_language"],
315
+ "User-Agent": self.fingerprint["user_agent"],
316
+ "DNT": "1",
317
+ "Sec-CH-UA": self.fingerprint["sec_ch_ua"] or '"Not/A)Brand";v="99", "Google Chrome";v="127", "Chromium";v="127"',
318
+ "Sec-CH-UA-Mobile": "?0",
319
+ "Sec-CH-UA-Platform": f'"{self.fingerprint["platform"]}"',
266
320
  }
267
-
268
- # Update session headers
269
- self.session.headers.update(self.headers) # Initialize chat interface
321
+
322
+ # Create session cookies with unique identifiers
323
+ self.cookies = {"__Host-session": uuid.uuid4().hex, '__cf_bm': uuid.uuid4().hex}
324
+
325
+ # Set consistent headers for the scraper session
326
+ self.session = Session()
327
+ for header, value in self.headers.items():
328
+ self.session.headers.update({header: value})
329
+
330
+ # Set cookies
331
+ self.session.cookies.update(self.cookies)
332
+
333
+ # Initialize the chat interface
270
334
  self.chat = Chat(self)
271
335
 
336
+ def refresh_identity(self, browser: str = None, impersonate: str = "chrome120"):
337
+ """Refreshes the browser identity fingerprint and curl_cffi session."""
338
+ browser = browser or self.fingerprint.get("browser_type", "chrome")
339
+ self.fingerprint = LitAgent().generate_fingerprint(browser)
340
+ self.session = Session(impersonate=impersonate)
341
+ # Update headers with new fingerprint
342
+ self.headers.update({
343
+ "Accept": self.fingerprint["accept"],
344
+ "Accept-Language": self.fingerprint["accept_language"],
345
+ "Sec-CH-UA": self.fingerprint["sec_ch_ua"] or self.headers["Sec-CH-UA"],
346
+ "Sec-CH-UA-Platform": f'"{self.fingerprint["platform"]}"',
347
+ "User-Agent": self.fingerprint["user_agent"],
348
+ })
349
+
350
+ # Update session headers
351
+ for header, value in self.headers.items():
352
+ self.session.headers.update({header: value})
353
+
354
+ # Generate new cookies
355
+ self.cookies = {"__Host-session": uuid.uuid4().hex, '__cf_bm': uuid.uuid4().hex}
356
+ self.session.cookies.update(self.cookies)
357
+
358
+ return self.fingerprint
359
+
272
360
  @classmethod
273
361
  def get_models(cls, api_key: str = None):
274
362
  """Fetch available models from DeepAI API.
@@ -1,3 +1,4 @@
1
+ #!/usr/bin/env python3
1
2
  from typing import List, Dict, Optional, Union, Generator, Any
2
3
  import time
3
4
  import json
@@ -152,7 +153,7 @@ class Flowith(OpenAICompatibleProvider):
152
153
  AVAILABLE_MODELS = [
153
154
  "gpt-5-nano", "gpt-5-mini", "glm-4.5", "gpt-oss-120b", "gpt-oss-20b", "kimi-k2",
154
155
  "gpt-4.1", "gpt-4.1-mini", "deepseek-chat", "deepseek-reasoner",
155
- "gemini-2.5-flash", "grok-3-mini"
156
+ "gemini-2.5-flash", "grok-3-mini", "claude-haiku-4.5"
156
157
  ]
157
158
 
158
159
  chat: Chat
@@ -171,7 +172,7 @@ if __name__ == "__main__":
171
172
  client = Flowith()
172
173
  messages = [{"role": "user", "content": "Hello, how are you?"}]
173
174
  response = client.chat.completions.create(
174
- model="gpt-5-nano",
175
+ model="gpt-oss-120b",
175
176
  messages=messages,
176
177
  stream=True
177
178
  )
@@ -261,7 +261,10 @@ class GMI(OpenAICompatibleProvider):
261
261
  "Qwen/Qwen3-Coder-480B-A35B-Instruct-FP8",
262
262
  "zai-org/GLM-4.5-Air-FP8",
263
263
  "zai-org/GLM-4.5-FP8",
264
- "zai-org/GLM-4.6"
264
+ "zai-org/GLM-4.6",
265
+ "openai/gpt-oss-20b",
266
+ "openai/gpt-oss-120b"
267
+
265
268
  ]
266
269
 
267
270
  def __init__(self, browser: str = "chrome", api_key: str = None, **kwargs):
webscout/version.py CHANGED
@@ -1,2 +1,2 @@
1
- __version__ = "2025.10.19.2"
1
+ __version__ = "2025.10.22.1"
2
2
  __prog__ = "webscout"
webscout/version.py.bak CHANGED
@@ -1,2 +1,2 @@
1
- __version__ = "2025.10.19.1"
1
+ __version__ = "2025.10.22"
2
2
  __prog__ = "webscout"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: webscout
3
- Version: 2025.10.19.2
3
+ Version: 2025.10.22.1
4
4
  Summary: Search for anything using Google, DuckDuckGo, phind.com, Contains AI models, can transcribe yt videos, temporary email and phone number generation, has TTS support, webai (terminal gpt and open interpreter) and offline LLMs and more
5
5
  Author-email: OEvortex <helpingai5@gmail.com>
6
6
  License: HelpingAI
@@ -14,8 +14,8 @@ webscout/prompt_manager.py,sha256=ysKFgPhkV3uqrOCilqcS9rG8xhzdU_d2wx0grC9WCCc,98
14
14
  webscout/sanitize.py,sha256=pw2Dzn-Jw9mOD4mpALYAvAf-medA-9AqdzsOmdXQbl0,46577
15
15
  webscout/update_checker.py,sha256=bz0TzRxip9DOIVMFyNz9HsGj4RKB0xZgo57AUVSJINo,3708
16
16
  webscout/utils.py,sha256=o2hU3qaVPk25sog3e4cyVZO3l8xwaZpYRziZPotEzNo,3075
17
- webscout/version.py,sha256=N_VqqQ31d_XNbPVnrX8RC1vAP0b8CdNu1xPPZHxN33A,53
18
- webscout/version.py.bak,sha256=784ARdA7UvryqoKR4H7eKxEhubkQarVxJeBzZNM5p7E,53
17
+ webscout/version.py,sha256=tpDfve30bR6-BnhG1CAgIUu7vn3hmQXpXe5Hbzdr_hg,53
18
+ webscout/version.py.bak,sha256=nfux5jrqPs5E5KYUPEBPASSB2Z1rinLgUlMZThId2uo,51
19
19
  webscout/Extra/Act.md,sha256=_C2VW_Dc-dc7eejpGYKAOZhImHKPiQ7NSwE3bkzr6fg,18952
20
20
  webscout/Extra/__init__.py,sha256=KvJRsRBRO-fZp2jSCl6KQnPppi93hriA6O_U1O1s31c,177
21
21
  webscout/Extra/gguf.md,sha256=McXGz5sTfzOO9X4mH8yIqu5K3CgjzyXKi4_HQtezdZ4,12435
@@ -70,12 +70,12 @@ webscout/Provider/ChatSandbox.py,sha256=Hl8vOQzij7VyYVoL3DvJO6HGUs6tXZY3xrbCLKrF
70
70
  webscout/Provider/ClaudeOnline.py,sha256=3J5LEjvxzpYgIcycCq1aG_kFjks7ECkJS6l0HQ5bEyQ,12748
71
71
  webscout/Provider/Cloudflare.py,sha256=nrHCZ9SYNNRIxxzR_QRU1fy-jh31WnErxIimF0aDZms,14155
72
72
  webscout/Provider/Cohere.py,sha256=wPULeG_2JZdhN8oTBjs_QNqs6atjkYkjCa01mRmg8Fw,8082
73
- webscout/Provider/DeepAI.py,sha256=z7TBsidQkxfrGvlBGARcdUDclQnLBq5wC1euzHLiEI8,12661
73
+ webscout/Provider/DeepAI.py,sha256=eKPauGUMdWG14v4593HwfLKmAFhWE90WZ6_X5LCby9Q,12660
74
74
  webscout/Provider/Deepinfra.py,sha256=Z3FNMaaVd4KiitDG8LBgGWycNuT6Y1Z06sCFURd0Ynw,15882
75
75
  webscout/Provider/ExaAI.py,sha256=HQ0BH1lInjsrpPSfIZkZf52q_gbHmnFnMJtRiZoxTXw,9548
76
76
  webscout/Provider/ExaChat.py,sha256=6ryax7zFeUrFTBa3inMrOGPxY-tfbavDQIgOTZr0-cY,11700
77
- webscout/Provider/Flowith.py,sha256=5Foxpx1Jj4HhMkWrPNo8hZDMhtC-YfCCKgTCXsPhUoQ,8497
78
- webscout/Provider/GMI.py,sha256=NkFVwrGFPu_dqy4YWPAjgPHmNVX-jltYuBH623ooZvs,11366
77
+ webscout/Provider/Flowith.py,sha256=GUYdf_AFR2qWx_OHtSPzAK5vTcaBfgv0GeWVMoAQnC8,8517
78
+ webscout/Provider/GMI.py,sha256=fB2yIDel5MRQd_mjhh8mrbwqXKIkoLZDrIkRbxgNymc,11428
79
79
  webscout/Provider/Gemini.py,sha256=Idpl9B_2yF2hK8agb6B4Qnvg6jmaQT008aOx8M2w2O4,6288
80
80
  webscout/Provider/GeminiProxy.py,sha256=JzOnUMNEcriTXbVZvp9SauYWx4ekgCj2DyRyD-jUj9M,6515
81
81
  webscout/Provider/GithubChat.py,sha256=FeRQfy1C9gxPlDmfH0VfBgd6CSCmN1XI6YES1Mp9mQM,14374
@@ -88,7 +88,7 @@ webscout/Provider/Koboldai.py,sha256=jv0zVxMp_Y56qZGZY8K_2DY9ysB0GzneEujTNd8W-Hw
88
88
  webscout/Provider/LambdaChat.py,sha256=SrvKTlEiqTX-e6ixCQ68e7DJVyDmd9MBnWMlnfcvQOk,18983
89
89
  webscout/Provider/Nemotron.py,sha256=Sj2D3Vng6icocejV45wWKvXYh8NG_pYMkfH-F1UL4CA,8838
90
90
  webscout/Provider/Netwrck.py,sha256=Wni4zV1J2MLt_G-sKwEdgsSwQTlGCZ1nKrD8akdG9LY,10295
91
- webscout/Provider/OLLAMA.py,sha256=1FEXxOwPX67Hl022aHyQipk_MLptCiNkyZSSwxOqIOA,14692
91
+ webscout/Provider/OLLAMA.py,sha256=PSnRnxiW18L3Gy827D2JPsOBNTJDfOTG-8whR-jHj9E,14806
92
92
  webscout/Provider/OpenGPT.py,sha256=R2H0iewJmaaW-KeHVOCPaL1lMyagy1KvrTALxhOBgQU,9389
93
93
  webscout/Provider/Openai.py,sha256=yxPXvACdA7cOyBEUN_fCbDujCzhpzXHVXlhteeg6JRo,9381
94
94
  webscout/Provider/PI.py,sha256=CFD_z6UFm0FKMvALSSefCdQ_fM-fRqpLRuXVmMJ2s3w,16230
@@ -111,7 +111,6 @@ webscout/Provider/akashgpt.py,sha256=PjRgZL0hxfhZPydn4a6tOVCa18SCseV6QJjXu7LZauY
111
111
  webscout/Provider/cerebras.py,sha256=C0rbHL65sVFUHe7zx0UbIlWhA06qUKvip5txgRsp_bU,17030
112
112
  webscout/Provider/chatglm.py,sha256=hAWtwlAUefQLc9zh3ji3-IJwH7z2fV-4tLN5_Wi0VAM,15887
113
113
  webscout/Provider/cleeai.py,sha256=WpSOoJZ69ttEosbJNH3J4UAkoOTOCy1hXyTjZsAzMTw,7782
114
- webscout/Provider/deepseek_assistant.py,sha256=7jxTWEUwvGwvj8NsSjk8PSvNKUgxQXPp8GwD7JcufC0,14582
115
114
  webscout/Provider/elmo.py,sha256=tjqB8zxmpKb_Ps0zJ_nd63KQ8FbwzUEEKWR0_Mhc20Y,12618
116
115
  webscout/Provider/geminiapi.py,sha256=xvxQzTX36MTb2ukiKjhfzomGR3OXOmtg40eMrYLB5rA,8321
117
116
  webscout/Provider/granite.py,sha256=u5-kyemo3lmPMc_R-OWCfusZMy-olmKo1hhzJ9ZYWLQ,11015
@@ -144,8 +143,7 @@ webscout/Provider/AISEARCH/scira_search.py,sha256=YeSgjsZPgcoTZDmtTmcLlROQhLK9Wo
144
143
  webscout/Provider/AISEARCH/stellar_search.py,sha256=BFEGmcOHZUtFx-Z4tqUIrgZ-qgdzI76FcilvI80pPh4,8634
145
144
  webscout/Provider/AISEARCH/webpilotai_search.py,sha256=C7j-xe2If6FwS-YyXkn8U5-Uw09eG7ZrESiCFJo9eYo,11256
146
145
  webscout/Provider/OPENAI/Cloudflare.py,sha256=RGf1aH08UzkxRq9hF3nmKbkOrDzGXU_KFkdtsE8SVpY,14454
147
- webscout/Provider/OPENAI/DeepAI.py,sha256=L24hPoM4uDv_7UniF6DtJqasA7HzLc2X2JPqTEmC12Q,9465
148
- webscout/Provider/OPENAI/FalconH1.py,sha256=SlMZF-2TzquEsKFTuPGR039OnJ3Z4ro49nuLyNFT0Sk,21880
146
+ webscout/Provider/OPENAI/DeepAI.py,sha256=IeGpsbsW8URM3Lulfp3VGetZOqVEq9wK-AjfZA7d9Lw,13743
149
147
  webscout/Provider/OPENAI/FreeGemini.py,sha256=C8ZdV0FxzP4D2g5scW1Fp7zG4BmV-Cjztdp0KeuQqIw,10919
150
148
  webscout/Provider/OPENAI/GeminiProxy.py,sha256=9_6VHFylM3-ct0m5XDvxfZ1tmd70RnyZl5HT-qv1g4E,11266
151
149
  webscout/Provider/OPENAI/K2Think.py,sha256=bNdq-oy2ie8PH7r6RDX7ZosYKFGjqzLSBvC2d_HAWAg,14822
@@ -165,10 +163,10 @@ webscout/Provider/OPENAI/deepinfra.py,sha256=RzlBVBTsrLeRpTV8PrZxlqEN0XTRgnL2Jtm
165
163
  webscout/Provider/OPENAI/e2b.py,sha256=1Eg70mzeh31kyCfctvVLQVODLBz3LPPtUPcQBbksYZ4,72311
166
164
  webscout/Provider/OPENAI/exaai.py,sha256=NKsmz8mka3jncDe7S-jeJpRbw26ds2fqAvChd9ltNpM,14646
167
165
  webscout/Provider/OPENAI/exachat.py,sha256=xxT-COXVbCgjUYyi4Zu469eUSSwABYYLdQ7HljLm6a8,15409
168
- webscout/Provider/OPENAI/flowith.py,sha256=ZNgVFeEBf0Pj4Ey11-RrEP8qlLKNzJul6FA-Tg-Y8hA,6638
166
+ webscout/Provider/OPENAI/flowith.py,sha256=LMof65tofDbYfg4oqzvjolC1DEpSj_4BOUEAhzsHWnY,6683
169
167
  webscout/Provider/OPENAI/friendli.py,sha256=NlTNz-3nBFPKA1xXwZx8aJPsuQh-_QB3AzM14x5Z3Qw,10214
170
168
  webscout/Provider/OPENAI/generate_api_key.py,sha256=yh8rUBbNLdbe-uetelw2sVfPaNNx7CYIHoDfcyEjRy4,1490
171
- webscout/Provider/OPENAI/gmi.py,sha256=cUuSsi0kAvTxp1RbhwfKIX2UAIqyv2uKssRs57cB4Bo,13009
169
+ webscout/Provider/OPENAI/gmi.py,sha256=oNAiEiJkxBAR7Vmw0kqmGlasV17f9hRkNqY6f6EV3VE,13071
172
170
  webscout/Provider/OPENAI/groq.py,sha256=Kw5mm___iKDte1XXumEd0aCWQSDr9WioX_lpL07KGx4,14200
173
171
  webscout/Provider/OPENAI/heckai.py,sha256=XCh_D8KccmLtDATcp9WJ0RuE0tXhklq9dBrmVctcVto,11457
174
172
  webscout/Provider/OPENAI/llmchatco.py,sha256=izvK7XENNZCm6QugZ4f6DfALuMCjO4tLlg2izpyO3fM,15034
@@ -332,9 +330,9 @@ webscout/zeroart/__init__.py,sha256=Cy9AUtXnOaFBQjNvCpN19IXJo7Lg15VTaNcTBxOTFek,
332
330
  webscout/zeroart/base.py,sha256=I-xhDEfArBb6q7hiF5oPoyXeu2hzL6orp7uWgS_YtG8,2299
333
331
  webscout/zeroart/effects.py,sha256=XUNZY1-wMPd6GNL3glFXtWaF9wDis_z55qTyCdnzHDo,5063
334
332
  webscout/zeroart/fonts.py,sha256=S7qDhUmDXl1makMreZl_eVW_7-sqVQiGn-kQKl0Hg_A,51006
335
- webscout-2025.10.19.2.dist-info/licenses/LICENSE.md,sha256=hyfFlVn7pWcrvuvs-piB8k4J8DlXdOsYje9RyPxc6Ik,7543
336
- webscout-2025.10.19.2.dist-info/METADATA,sha256=J5rqcgdqYu5uwHQrJD-fB-e76eGTAeNcKPMp0fGWnig,21640
337
- webscout-2025.10.19.2.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
338
- webscout-2025.10.19.2.dist-info/entry_points.txt,sha256=4xAgKHWwNhAvJyShLCFs_IU8Reb8zR3wqf8egrsDr8g,118
339
- webscout-2025.10.19.2.dist-info/top_level.txt,sha256=nYIw7OKBQDr_Z33IzZUKidRD3zQEo8jOJYkMVMeN334,9
340
- webscout-2025.10.19.2.dist-info/RECORD,,
333
+ webscout-2025.10.22.1.dist-info/licenses/LICENSE.md,sha256=hyfFlVn7pWcrvuvs-piB8k4J8DlXdOsYje9RyPxc6Ik,7543
334
+ webscout-2025.10.22.1.dist-info/METADATA,sha256=hOXQdp-SN7a864HFBckSK_EUBn-ZfauOnxTnK0AP4Gk,21640
335
+ webscout-2025.10.22.1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
336
+ webscout-2025.10.22.1.dist-info/entry_points.txt,sha256=4xAgKHWwNhAvJyShLCFs_IU8Reb8zR3wqf8egrsDr8g,118
337
+ webscout-2025.10.22.1.dist-info/top_level.txt,sha256=nYIw7OKBQDr_Z33IzZUKidRD3zQEo8jOJYkMVMeN334,9
338
+ webscout-2025.10.22.1.dist-info/RECORD,,