webscout 7.4__py3-none-any.whl → 7.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of webscout might be problematic. Click here for more details.

Files changed (42) hide show
  1. webscout/Provider/C4ai.py +414 -0
  2. webscout/Provider/Cloudflare.py +18 -21
  3. webscout/Provider/DeepSeek.py +3 -32
  4. webscout/Provider/Deepinfra.py +30 -21
  5. webscout/Provider/GithubChat.py +362 -0
  6. webscout/Provider/HeckAI.py +20 -3
  7. webscout/Provider/HuggingFaceChat.py +462 -0
  8. webscout/Provider/Marcus.py +7 -50
  9. webscout/Provider/Netwrck.py +6 -53
  10. webscout/Provider/Phind.py +29 -3
  11. webscout/Provider/TTI/aiarta/__init__.py +2 -0
  12. webscout/Provider/TTI/aiarta/async_aiarta.py +482 -0
  13. webscout/Provider/TTI/aiarta/sync_aiarta.py +409 -0
  14. webscout/Provider/Venice.py +200 -200
  15. webscout/Provider/Youchat.py +1 -1
  16. webscout/Provider/__init__.py +13 -2
  17. webscout/Provider/akashgpt.py +8 -5
  18. webscout/Provider/copilot.py +416 -0
  19. webscout/Provider/flowith.py +181 -0
  20. webscout/Provider/granite.py +17 -53
  21. webscout/Provider/llamatutor.py +6 -46
  22. webscout/Provider/llmchat.py +7 -46
  23. webscout/Provider/multichat.py +29 -91
  24. webscout/exceptions.py +19 -9
  25. webscout/update_checker.py +55 -93
  26. webscout/version.py +1 -1
  27. webscout-7.5.dist-info/LICENSE.md +146 -0
  28. {webscout-7.4.dist-info → webscout-7.5.dist-info}/METADATA +5 -126
  29. {webscout-7.4.dist-info → webscout-7.5.dist-info}/RECORD +32 -33
  30. webscout/Local/__init__.py +0 -10
  31. webscout/Local/_version.py +0 -3
  32. webscout/Local/formats.py +0 -747
  33. webscout/Local/model.py +0 -1368
  34. webscout/Local/samplers.py +0 -125
  35. webscout/Local/thread.py +0 -539
  36. webscout/Local/ui.py +0 -401
  37. webscout/Local/utils.py +0 -388
  38. webscout/Provider/dgaf.py +0 -214
  39. webscout-7.4.dist-info/LICENSE.md +0 -211
  40. {webscout-7.4.dist-info → webscout-7.5.dist-info}/WHEEL +0 -0
  41. {webscout-7.4.dist-info → webscout-7.5.dist-info}/entry_points.txt +0 -0
  42. {webscout-7.4.dist-info → webscout-7.5.dist-info}/top_level.txt +0 -0
@@ -8,7 +8,6 @@ from datetime import date
8
8
  from webscout.AIutel import Optimizers, Conversation, AwesomePrompts
9
9
  from webscout.AIbase import Provider
10
10
  from webscout import exceptions
11
- from webscout.Litlogger import Logger, LogFormat
12
11
  from webscout.litagent import LitAgent
13
12
 
14
13
  class Netwrck(Provider):
@@ -20,7 +19,7 @@ class Netwrck(Provider):
20
19
  AVAILABLE_MODELS = {
21
20
  "lumimaid": "neversleep/llama-3-lumimaid-8b:extended",
22
21
  "grok": "x-ai/grok-2",
23
- "claude": "anthropic/claude-3.5-sonnet:beta",
22
+ "claude": "anthropic/claude-3-7-sonnet-20250219",
24
23
  "euryale": "sao10k/l3-euryale-70b",
25
24
  "gpt4mini": "openai/gpt-4o-mini",
26
25
  "mythomax": "gryphe/mythomax-l2-13b",
@@ -44,22 +43,11 @@ class Netwrck(Provider):
44
43
  act: Optional[str] = None,
45
44
  system_prompt: str = "You are a helpful assistant.",
46
45
  temperature: float = 0.7,
47
- top_p: float = 0.8,
48
- logging: bool = False
46
+ top_p: float = 0.8
49
47
  ):
50
48
  """Initializes the Netwrck API client."""
51
- # Initialize logger first for initialization logging
52
- self.logger = Logger(
53
- name="Netwrck",
54
- format=LogFormat.MODERN_EMOJI,
55
-
56
- ) if logging else None
57
-
58
49
  if model not in self.AVAILABLE_MODELS:
59
- error_msg = f"Invalid model: {model}. Choose from: {list(self.AVAILABLE_MODELS.keys())}"
60
- if self.logger:
61
- self.logger.error(error_msg)
62
- raise ValueError(error_msg)
50
+ raise ValueError(f"Invalid model: {model}. Choose from: {list(self.AVAILABLE_MODELS.keys())}")
63
51
 
64
52
  self.model = model
65
53
  self.model_name = self.AVAILABLE_MODELS[model]
@@ -99,9 +87,6 @@ class Netwrck(Provider):
99
87
  if callable(getattr(Optimizers, method)) and not method.startswith("__")
100
88
  )
101
89
 
102
- if self.logger:
103
- self.logger.info(f"Initialized Netwrck with model: {self.model_name}")
104
-
105
90
  def ask(
106
91
  self,
107
92
  prompt: str,
@@ -112,18 +97,13 @@ class Netwrck(Provider):
112
97
  ) -> Union[Dict[str, Any], Generator]:
113
98
  """Sends a prompt to the Netwrck API and returns the response."""
114
99
  if optimizer and optimizer not in self.__available_optimizers:
115
- error_msg = f"Optimizer is not one of {self.__available_optimizers}"
116
- if self.logger:
117
- self.logger.error(f"Invalid optimizer requested: {optimizer}")
118
- raise exceptions.FailedToGenerateResponseError(error_msg)
100
+ raise exceptions.FailedToGenerateResponseError(f"Optimizer is not one of {self.__available_optimizers}")
119
101
 
120
102
  conversation_prompt = self.conversation.gen_complete_prompt(prompt)
121
103
  if optimizer:
122
104
  conversation_prompt = getattr(Optimizers, optimizer)(
123
105
  conversation_prompt if conversationally else prompt
124
106
  )
125
- if self.logger:
126
- self.logger.debug(f"Applied optimizer: {optimizer}")
127
107
 
128
108
  payload = {
129
109
  "query": prompt,
@@ -133,9 +113,6 @@ class Netwrck(Provider):
133
113
  "greeting": self.greeting
134
114
  }
135
115
 
136
- if self.logger:
137
- self.logger.debug(f"Sending request to Netwrck API [stream={stream}]")
138
-
139
116
  def for_stream():
140
117
  try:
141
118
  response = self.session.post(
@@ -158,12 +135,8 @@ class Netwrck(Provider):
158
135
  self.conversation.update_chat_history(payload["query"], streaming_text)
159
136
 
160
137
  except requests.exceptions.RequestException as e:
161
- if self.logger:
162
- self.logger.error(f"Network error: {str(e)}")
163
138
  raise exceptions.ProviderConnectionError(f"Network error: {str(e)}") from e
164
139
  except Exception as e:
165
- if self.logger:
166
- self.logger.error(f"Unexpected error: {str(e)}")
167
140
  raise exceptions.ProviderConnectionError(f"Unexpected error: {str(e)}") from e
168
141
 
169
142
  def for_non_stream():
@@ -177,9 +150,6 @@ class Netwrck(Provider):
177
150
  )
178
151
  response.raise_for_status()
179
152
 
180
- if self.logger:
181
- self.logger.debug(f"Response status: {response.status_code}")
182
-
183
153
  text = response.text.strip('"')
184
154
  self.last_response = {"text": text}
185
155
  self.conversation.update_chat_history(prompt, text)
@@ -187,12 +157,8 @@ class Netwrck(Provider):
187
157
  return self.last_response
188
158
 
189
159
  except requests.exceptions.RequestException as e:
190
- if self.logger:
191
- self.logger.error(f"Network error: {str(e)}")
192
160
  raise exceptions.FailedToGenerateResponseError(f"Network error: {str(e)}") from e
193
161
  except Exception as e:
194
- if self.logger:
195
- self.logger.error(f"Unexpected error: {str(e)}")
196
162
  raise exceptions.FailedToGenerateResponseError(f"Unexpected error: {str(e)}") from e
197
163
 
198
164
  return for_stream() if stream else for_non_stream()
@@ -205,9 +171,6 @@ class Netwrck(Provider):
205
171
  conversationally: bool = False,
206
172
  ) -> str:
207
173
  """Generates a response from the Netwrck API."""
208
- if self.logger:
209
- self.logger.debug(f"Processing chat request [stream={stream}]")
210
-
211
174
  def for_stream():
212
175
  for response in self.ask(
213
176
  prompt,
@@ -237,15 +200,5 @@ class Netwrck(Provider):
237
200
  if __name__ == "__main__":
238
201
  from rich import print
239
202
 
240
- # Example with logging enabled
241
- netwrck = Netwrck(model="claude", logging=False)
242
-
243
- print("Non-Streaming Response:")
244
- response = netwrck.chat("Tell me about Russia")
245
- print(response)
246
-
247
- print("\nStreaming Response:")
248
- response = netwrck.chat("Tell me about India", stream=True)
249
- for chunk in response:
250
- print(chunk, end="", flush=True)
251
- print()
203
+ netwrck = Netwrck(model="claude")
204
+ print(netwrck.chat("Hello! How are you?"))
@@ -13,7 +13,17 @@ from typing import Any, AsyncGenerator, Dict
13
13
 
14
14
  #------------------------------------------------------phind-------------------------------------------------------------
15
15
  class PhindSearch:
16
- # default_model = "Phind Model"
16
+ # Available models for Phind
17
+ AVAILABLE_MODELS = [
18
+ "Phind Model",
19
+ "Claude 3.7 Sonnet",
20
+ "Claude Opus",
21
+ "GPT-4o",
22
+ "o3-mini",
23
+ "Phind-405B",
24
+ "Phind-70B"
25
+ ]
26
+
17
27
  def __init__(
18
28
  self,
19
29
  is_conversation: bool = True,
@@ -43,6 +53,9 @@ class PhindSearch:
43
53
  model (str, optional): Model name. Defaults to "Phind Model".
44
54
  quiet (bool, optional): Ignore web search-results and yield final response only. Defaults to False.
45
55
  """
56
+ if model not in self.AVAILABLE_MODELS:
57
+ raise ValueError(f"Invalid model: {model}. Choose from: {self.AVAILABLE_MODELS}")
58
+
46
59
  self.session = requests.Session()
47
60
  self.max_tokens_to_sample = max_tokens
48
61
  self.is_conversation = is_conversation
@@ -256,6 +269,16 @@ class PhindSearch:
256
269
  )
257
270
 
258
271
  class Phindv2(Provider):
272
+ # Available models for Phindv2
273
+ AVAILABLE_MODELS = [
274
+ "Claude 3.7 Sonnet",
275
+ "Claude Opus",
276
+ "GPT-4o",
277
+ "o3-mini",
278
+ "Phind-405B",
279
+ "Phind-70B"
280
+ ]
281
+
259
282
  def __init__(
260
283
  self,
261
284
  is_conversation: bool = True,
@@ -267,7 +290,7 @@ class Phindv2(Provider):
267
290
  proxies: dict = {},
268
291
  history_offset: int = 10250,
269
292
  act: str = None,
270
- model: str = "Phind Instant",
293
+ model: str = "Claude 3.7 Sonnet",
271
294
  quiet: bool = False,
272
295
  system_prompt: str = "Be Helpful and Friendly",
273
296
  ):
@@ -287,6 +310,9 @@ class Phindv2(Provider):
287
310
  quiet (bool, optional): Ignore web search-results and yield final response only. Defaults to False.
288
311
  system_prompt (str, optional): System prompt for Phindv2. Defaults to "Be Helpful and Friendly".
289
312
  """
313
+ if model not in self.AVAILABLE_MODELS:
314
+ raise ValueError(f"Invalid model: {model}. Choose from: {self.AVAILABLE_MODELS}")
315
+
290
316
  self.session = requests.Session()
291
317
  self.max_tokens_to_sample = max_tokens
292
318
  self.is_conversation = is_conversation
@@ -505,5 +531,5 @@ if __name__ == "__main__":
505
531
  from rich import print
506
532
 
507
533
  ai = Phindv2()
508
- print(ai.chat("Tell me a joke"))
534
+ print(ai.chat("Who are u"))
509
535
  # Returns the chat response from the Phindv2 API.
@@ -0,0 +1,2 @@
1
+ from .async_aiarta import *
2
+ from .sync_aiarta import *