webscout 7.3__py3-none-any.whl → 7.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of webscout might be problematic. Click here for more details.

Files changed (62) hide show
  1. webscout/Provider/AISEARCH/__init__.py +4 -3
  2. webscout/Provider/AISEARCH/genspark_search.py +208 -0
  3. webscout/Provider/AllenAI.py +282 -0
  4. webscout/Provider/C4ai.py +414 -0
  5. webscout/Provider/Cloudflare.py +18 -21
  6. webscout/Provider/DeepSeek.py +3 -32
  7. webscout/Provider/Deepinfra.py +52 -44
  8. webscout/Provider/ElectronHub.py +634 -0
  9. webscout/Provider/GithubChat.py +362 -0
  10. webscout/Provider/Glider.py +7 -41
  11. webscout/Provider/HeckAI.py +217 -0
  12. webscout/Provider/HuggingFaceChat.py +462 -0
  13. webscout/Provider/Jadve.py +49 -63
  14. webscout/Provider/Marcus.py +7 -50
  15. webscout/Provider/Netwrck.py +6 -53
  16. webscout/Provider/PI.py +106 -93
  17. webscout/Provider/Perplexitylabs.py +395 -0
  18. webscout/Provider/Phind.py +29 -3
  19. webscout/Provider/QwenLM.py +7 -61
  20. webscout/Provider/TTI/__init__.py +1 -0
  21. webscout/Provider/TTI/aiarta/__init__.py +2 -0
  22. webscout/Provider/TTI/aiarta/async_aiarta.py +482 -0
  23. webscout/Provider/TTI/aiarta/sync_aiarta.py +409 -0
  24. webscout/Provider/TTI/piclumen/__init__.py +23 -0
  25. webscout/Provider/TTI/piclumen/async_piclumen.py +268 -0
  26. webscout/Provider/TTI/piclumen/sync_piclumen.py +233 -0
  27. webscout/Provider/TextPollinationsAI.py +3 -2
  28. webscout/Provider/TwoAI.py +200 -0
  29. webscout/Provider/Venice.py +200 -0
  30. webscout/Provider/WiseCat.py +1 -18
  31. webscout/Provider/Youchat.py +1 -1
  32. webscout/Provider/__init__.py +25 -2
  33. webscout/Provider/akashgpt.py +315 -0
  34. webscout/Provider/chatglm.py +5 -5
  35. webscout/Provider/copilot.py +416 -0
  36. webscout/Provider/flowith.py +181 -0
  37. webscout/Provider/freeaichat.py +251 -221
  38. webscout/Provider/granite.py +17 -53
  39. webscout/Provider/koala.py +9 -1
  40. webscout/Provider/llamatutor.py +6 -46
  41. webscout/Provider/llmchat.py +7 -46
  42. webscout/Provider/multichat.py +29 -91
  43. webscout/Provider/yep.py +4 -24
  44. webscout/exceptions.py +19 -9
  45. webscout/update_checker.py +55 -93
  46. webscout/version.py +1 -1
  47. webscout-7.5.dist-info/LICENSE.md +146 -0
  48. {webscout-7.3.dist-info → webscout-7.5.dist-info}/METADATA +46 -172
  49. {webscout-7.3.dist-info → webscout-7.5.dist-info}/RECORD +52 -42
  50. webscout/Local/__init__.py +0 -10
  51. webscout/Local/_version.py +0 -3
  52. webscout/Local/formats.py +0 -747
  53. webscout/Local/model.py +0 -1368
  54. webscout/Local/samplers.py +0 -125
  55. webscout/Local/thread.py +0 -539
  56. webscout/Local/ui.py +0 -401
  57. webscout/Local/utils.py +0 -388
  58. webscout/Provider/dgaf.py +0 -214
  59. webscout-7.3.dist-info/LICENSE.md +0 -211
  60. {webscout-7.3.dist-info → webscout-7.5.dist-info}/WHEEL +0 -0
  61. {webscout-7.3.dist-info → webscout-7.5.dist-info}/entry_points.txt +0 -0
  62. {webscout-7.3.dist-info → webscout-7.5.dist-info}/top_level.txt +0 -0
@@ -9,14 +9,57 @@ from webscout.AIutel import AwesomePrompts, sanitize_stream
9
9
  from webscout.AIbase import Provider, AsyncProvider
10
10
  from webscout import exceptions
11
11
  from webscout import LitAgent
12
- from webscout.Litlogger import Logger, LogFormat, ConsoleHandler
13
- from webscout.Litlogger.core.level import LogLevel
14
12
 
15
13
  class DeepInfra(Provider):
16
14
  """
17
- A class to interact with the DeepInfra API with logging and LitAgent user-agent.
15
+ A class to interact with the DeepInfra API with LitAgent user-agent.
18
16
  """
19
17
 
18
+ AVAILABLE_MODELS = [
19
+ "anthropic/claude-3-7-sonnet-latest",
20
+ "deepseek-ai/DeepSeek-R1",
21
+ "deepseek-ai/DeepSeek-R1-Distill-Llama-70B",
22
+ "deepseek-ai/DeepSeek-R1-Distill-Qwen-32B",
23
+ "deepseek-ai/DeepSeek-R1-Turbo",
24
+ "deepseek-ai/DeepSeek-V3",
25
+ "google/gemma-2-27b-it",
26
+ "google/gemma-2-9b-it",
27
+ "google/gemma-3-27b-it",
28
+ "google/gemini-1.5-flash",
29
+ "google/gemini-1.5-flash-8b",
30
+ "google/gemini-2.0-flash-001",
31
+ "Gryphe/MythoMax-L2-13b",
32
+ "meta-llama/Llama-3.2-1B-Instruct",
33
+ "meta-llama/Llama-3.2-3B-Instruct",
34
+ "meta-llama/Llama-3.2-90B-Vision-Instruct",
35
+ "meta-llama/Llama-3.2-11B-Vision-Instruct",
36
+ "meta-llama/Meta-Llama-3-70B-Instruct",
37
+ "meta-llama/Meta-Llama-3-8B-Instruct",
38
+ "meta-llama/Meta-Llama-3.1-70B-Instruct",
39
+ "meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo",
40
+ "meta-llama/Meta-Llama-3.1-8B-Instruct",
41
+ "meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo",
42
+ "meta-llama/Meta-Llama-3.1-405B-Instruct",
43
+ "microsoft/phi-4",
44
+ "microsoft/Phi-4-multimodal-instruct",
45
+ "microsoft/WizardLM-2-8x22B",
46
+ "mistralai/Mixtral-8x7B-Instruct-v0.1",
47
+ "mistralai/Mistral-7B-Instruct-v0.3",
48
+ "mistralai/Mistral-Nemo-Instruct-2407",
49
+ "mistralai/Mistral-Small-24B-Instruct-2501",
50
+ "nvidia/Llama-3.1-Nemotron-70B-Instruct",
51
+ "NousResearch/Hermes-3-Llama-3.1-405B",
52
+ "NovaSky-AI/Sky-T1-32B-Preview",
53
+ "Qwen/QwQ-32B",
54
+ "Qwen/Qwen2.5-7B-Instruct",
55
+ "Qwen/Qwen2.5-72B-Instruct",
56
+ "Qwen/Qwen2.5-Coder-32B-Instruct",
57
+ "Sao10K/L3.1-70B-Euryale-v2.2",
58
+ "Sao10K/L3.3-70B-Euryale-v2.3",
59
+ "meta-llama/Llama-3.3-70B-Instruct",
60
+ "meta-llama/Llama-3.3-70B-Instruct-Turbo",
61
+ ]
62
+
20
63
  def __init__(
21
64
  self,
22
65
  is_conversation: bool = True,
@@ -28,10 +71,12 @@ class DeepInfra(Provider):
28
71
  proxies: dict = {},
29
72
  history_offset: int = 10250,
30
73
  act: str = None,
31
- model: str = "Qwen/Qwen2.5-72B-Instruct",
32
- logging: bool = False
74
+ model: str = "meta-llama/Llama-3.3-70B-Instruct-Turbo" # Updated default model
33
75
  ):
34
- """Initializes the DeepInfra API client with logging support."""
76
+ """Initializes the DeepInfra API client."""
77
+ if model not in self.AVAILABLE_MODELS:
78
+ raise ValueError(f"Invalid model: {model}. Choose from: {self.AVAILABLE_MODELS}")
79
+
35
80
  self.url = "https://api.deepinfra.com/v1/openai/chat/completions"
36
81
  # Use LitAgent for user-agent instead of hardcoded string.
37
82
  self.headers = {
@@ -80,21 +125,6 @@ class DeepInfra(Provider):
80
125
  )
81
126
  self.conversation.history_offset = history_offset
82
127
 
83
- # Initialize logger with proper configuration
84
- if logging:
85
- console_handler = ConsoleHandler(
86
- level=LogLevel.DEBUG,
87
- )
88
-
89
- self.logger = Logger(
90
- name="DeepInfra",
91
- level=LogLevel.DEBUG,
92
- handlers=[console_handler]
93
- )
94
- self.logger.info("DeepInfra initialized successfully ✨")
95
- else:
96
- self.logger = None
97
-
98
128
  def ask(
99
129
  self,
100
130
  prompt: str,
@@ -103,20 +133,13 @@ class DeepInfra(Provider):
103
133
  optimizer: str = None,
104
134
  conversationally: bool = False,
105
135
  ) -> Union[Dict[str, Any], Generator]:
106
- if self.logger:
107
- self.logger.debug(f"Processing request - Stream: {stream}, Optimizer: {optimizer}")
108
-
109
136
  conversation_prompt = self.conversation.gen_complete_prompt(prompt)
110
137
  if optimizer:
111
138
  if optimizer in self.__available_optimizers:
112
139
  conversation_prompt = getattr(Optimizers, optimizer)(
113
140
  conversation_prompt if conversationally else prompt
114
141
  )
115
- if self.logger:
116
- self.logger.info(f"Applied optimizer: {optimizer} 🔧")
117
142
  else:
118
- if self.logger:
119
- self.logger.error(f"Invalid optimizer requested: {optimizer}")
120
143
  raise Exception(f"Optimizer is not one of {self.__available_optimizers}")
121
144
 
122
145
  # Payload construction
@@ -129,17 +152,10 @@ class DeepInfra(Provider):
129
152
  "stream": stream
130
153
  }
131
154
 
132
- if self.logger:
133
- self.logger.debug(f"Sending request to model: {self.model} 🚀")
134
-
135
155
  def for_stream():
136
- if self.logger:
137
- self.logger.info("Starting stream processing ⚡")
138
156
  try:
139
157
  with requests.post(self.url, headers=self.headers, data=json.dumps(payload), stream=True, timeout=self.timeout) as response:
140
158
  if response.status_code != 200:
141
- if self.logger:
142
- self.logger.error(f"Request failed with status {response.status_code} ❌")
143
159
  raise exceptions.FailedToGenerateResponseError(
144
160
  f"Request failed with status code {response.status_code}"
145
161
  )
@@ -151,8 +167,6 @@ class DeepInfra(Provider):
151
167
  if line.startswith("data: "):
152
168
  json_str = line[6:]
153
169
  if json_str == "[DONE]":
154
- if self.logger:
155
- self.logger.info("Stream completed successfully ✅")
156
170
  break
157
171
  try:
158
172
  json_data = json.loads(json_str)
@@ -164,20 +178,14 @@ class DeepInfra(Provider):
164
178
  resp = dict(text=content)
165
179
  yield resp if raw else resp
166
180
  except json.JSONDecodeError:
167
- if self.logger:
168
- self.logger.error("Failed to decode JSON response 🔥")
169
181
  continue
170
182
 
171
183
  self.conversation.update_chat_history(prompt, streaming_text)
172
184
 
173
185
  except requests.RequestException as e:
174
- if self.logger:
175
- self.logger.error(f"Request failed: {str(e)} 🔥")
176
186
  raise exceptions.FailedToGenerateResponseError(f"Request failed: {str(e)}")
177
187
 
178
188
  def for_non_stream():
179
- if self.logger:
180
- self.logger.debug("Processing non-stream request")
181
189
  for _ in for_stream():
182
190
  pass
183
191
  return self.last_response
@@ -206,7 +214,7 @@ class DeepInfra(Provider):
206
214
 
207
215
  if __name__ == "__main__":
208
216
  from rich import print
209
- ai = DeepInfra(timeout=5000, logging=True)
217
+ ai = DeepInfra(timeout=5000)
210
218
  response = ai.chat("write a poem about AI", stream=True)
211
219
  for chunk in response:
212
220
  print(chunk, end="", flush=True)