webscout 6.2b0__py3-none-any.whl → 6.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of webscout might be problematic. Click here for more details.

Files changed (97) hide show
  1. webscout/AIauto.py +191 -176
  2. webscout/AIbase.py +112 -239
  3. webscout/AIutel.py +488 -1130
  4. webscout/Agents/functioncall.py +248 -198
  5. webscout/Bing_search.py +250 -153
  6. webscout/DWEBS.py +454 -178
  7. webscout/Extra/__init__.py +2 -1
  8. webscout/Extra/autocoder/__init__.py +9 -0
  9. webscout/Extra/autocoder/autocoder_utiles.py +121 -0
  10. webscout/Extra/autocoder/rawdog.py +681 -0
  11. webscout/Extra/autollama.py +246 -195
  12. webscout/Extra/gguf.py +441 -226
  13. webscout/Extra/weather.py +172 -67
  14. webscout/LLM.py +442 -100
  15. webscout/Litlogger/__init__.py +681 -0
  16. webscout/Local/formats.py +4 -2
  17. webscout/Provider/Amigo.py +19 -10
  18. webscout/Provider/Andi.py +0 -33
  19. webscout/Provider/Blackboxai.py +4 -204
  20. webscout/Provider/DARKAI.py +1 -1
  21. webscout/Provider/EDITEE.py +1 -1
  22. webscout/Provider/Llama3.py +1 -1
  23. webscout/Provider/Marcus.py +137 -0
  24. webscout/Provider/NinjaChat.py +1 -1
  25. webscout/Provider/PI.py +221 -207
  26. webscout/Provider/Perplexity.py +598 -598
  27. webscout/Provider/RoboCoders.py +206 -0
  28. webscout/Provider/TTI/AiForce/__init__.py +22 -0
  29. webscout/Provider/TTI/AiForce/async_aiforce.py +257 -0
  30. webscout/Provider/TTI/AiForce/sync_aiforce.py +242 -0
  31. webscout/Provider/TTI/Nexra/__init__.py +22 -0
  32. webscout/Provider/TTI/Nexra/async_nexra.py +286 -0
  33. webscout/Provider/TTI/Nexra/sync_nexra.py +258 -0
  34. webscout/Provider/TTI/PollinationsAI/__init__.py +23 -0
  35. webscout/Provider/TTI/PollinationsAI/async_pollinations.py +330 -0
  36. webscout/Provider/TTI/PollinationsAI/sync_pollinations.py +285 -0
  37. webscout/Provider/TTI/__init__.py +3 -4
  38. webscout/Provider/TTI/artbit/__init__.py +22 -0
  39. webscout/Provider/TTI/artbit/async_artbit.py +184 -0
  40. webscout/Provider/TTI/artbit/sync_artbit.py +176 -0
  41. webscout/Provider/TTI/blackbox/__init__.py +4 -0
  42. webscout/Provider/TTI/blackbox/async_blackbox.py +212 -0
  43. webscout/Provider/TTI/{blackboximage.py → blackbox/sync_blackbox.py} +199 -153
  44. webscout/Provider/TTI/deepinfra/__init__.py +4 -0
  45. webscout/Provider/TTI/deepinfra/async_deepinfra.py +227 -0
  46. webscout/Provider/TTI/deepinfra/sync_deepinfra.py +199 -0
  47. webscout/Provider/TTI/huggingface/__init__.py +22 -0
  48. webscout/Provider/TTI/huggingface/async_huggingface.py +199 -0
  49. webscout/Provider/TTI/huggingface/sync_huggingface.py +195 -0
  50. webscout/Provider/TTI/imgninza/__init__.py +4 -0
  51. webscout/Provider/TTI/imgninza/async_ninza.py +214 -0
  52. webscout/Provider/TTI/{imgninza.py → imgninza/sync_ninza.py} +209 -136
  53. webscout/Provider/TTI/talkai/__init__.py +4 -0
  54. webscout/Provider/TTI/talkai/async_talkai.py +229 -0
  55. webscout/Provider/TTI/talkai/sync_talkai.py +207 -0
  56. webscout/Provider/__init__.py +146 -132
  57. webscout/Provider/askmyai.py +158 -0
  58. webscout/Provider/cerebras.py +227 -206
  59. webscout/Provider/geminiapi.py +208 -198
  60. webscout/Provider/llama3mitril.py +180 -0
  61. webscout/Provider/llmchat.py +203 -0
  62. webscout/Provider/mhystical.py +176 -0
  63. webscout/Provider/perplexitylabs.py +265 -0
  64. webscout/Provider/talkai.py +196 -0
  65. webscout/Provider/twitterclone.py +251 -244
  66. webscout/Provider/typegpt.py +359 -0
  67. webscout/__init__.py +28 -23
  68. webscout/__main__.py +5 -5
  69. webscout/cli.py +327 -347
  70. webscout/conversation.py +227 -0
  71. webscout/exceptions.py +161 -29
  72. webscout/litagent/__init__.py +172 -0
  73. webscout/litprinter/__init__.py +831 -0
  74. webscout/optimizers.py +270 -0
  75. webscout/prompt_manager.py +279 -0
  76. webscout/swiftcli/__init__.py +810 -0
  77. webscout/transcriber.py +479 -551
  78. webscout/update_checker.py +125 -0
  79. webscout/version.py +1 -1
  80. webscout-6.4.dist-info/LICENSE.md +211 -0
  81. {webscout-6.2b0.dist-info → webscout-6.4.dist-info}/METADATA +34 -55
  82. webscout-6.4.dist-info/RECORD +154 -0
  83. webscout/Provider/TTI/AIuncensored.py +0 -103
  84. webscout/Provider/TTI/Nexra.py +0 -120
  85. webscout/Provider/TTI/PollinationsAI.py +0 -138
  86. webscout/Provider/TTI/WebSimAI.py +0 -142
  87. webscout/Provider/TTI/aiforce.py +0 -160
  88. webscout/Provider/TTI/artbit.py +0 -141
  89. webscout/Provider/TTI/deepinfra.py +0 -148
  90. webscout/Provider/TTI/huggingface.py +0 -155
  91. webscout/models.py +0 -23
  92. webscout-6.2b0.dist-info/LICENSE.md +0 -50
  93. webscout-6.2b0.dist-info/RECORD +0 -118
  94. /webscout/{g4f.py → gpt4free.py} +0 -0
  95. {webscout-6.2b0.dist-info → webscout-6.4.dist-info}/WHEEL +0 -0
  96. {webscout-6.2b0.dist-info → webscout-6.4.dist-info}/entry_points.txt +0 -0
  97. {webscout-6.2b0.dist-info → webscout-6.4.dist-info}/top_level.txt +0 -0
@@ -1,206 +1,227 @@
1
- import re
2
- import requests
3
- import json
4
- import os
5
- from typing import Any, Dict, Optional, Generator, List, Union
6
-
7
- from webscout.AIutel import Optimizers
8
- from webscout.AIutel import Conversation
9
- from webscout.AIutel import AwesomePrompts, sanitize_stream
10
- from webscout.AIbase import Provider, AsyncProvider
11
- from webscout import exceptions
12
- from fake_useragent import UserAgent
13
- from cerebras.cloud.sdk import Cerebras
14
-
15
-
16
- class Cerebras(Provider):
17
- """
18
- A class to interact with the Cerebras API using a cookie for authentication.
19
- """
20
-
21
- def __init__(
22
- self,
23
- is_conversation: bool = True,
24
- max_tokens: int = 2049,
25
- timeout: int = 30,
26
- intro: str = None,
27
- filepath: str = None,
28
- update_file: bool = True,
29
- proxies: dict = {},
30
- history_offset: int = 10250,
31
- act: str = None,
32
- cookie_path: str = "cookie.json", # Path to cookie file
33
- model: str = "llama3.1-8b", # Default model
34
- system_prompt: str = "You are a helpful assistant.",
35
- ):
36
- """
37
- Initializes the Cerebras client with the provided cookie.
38
-
39
- Args:
40
- cookie_path (str): Path to the cookie JSON file.
41
- model (str, optional): Model name to use. Defaults to 'llama3.1-8b'.
42
- system_prompt (str, optional): The system prompt to send with every request. Defaults to "You are a helpful assistant.".
43
-
44
- Raises:
45
- FileNotFoundError: If the cookie file is not found.
46
- json.JSONDecodeError: If the cookie file has an invalid JSON format.
47
- requests.exceptions.RequestException: If there's an error retrieving the API key.
48
- """
49
- self.api_key = self.get_demo_api_key(cookie_path)
50
- self.client = Cerebras(api_key=self.api_key)
51
- self.model = model
52
- self.system_prompt = system_prompt
53
-
54
- self.is_conversation = is_conversation
55
- self.max_tokens_to_sample = max_tokens
56
- self.timeout = timeout
57
- self.last_response = {}
58
-
59
- self.__available_optimizers = (
60
- method
61
- for method in dir(Optimizers)
62
- if callable(getattr(Optimizers, method)) and not method.startswith("__")
63
- )
64
-
65
- Conversation.intro = (
66
- AwesomePrompts().get_act(
67
- act, raise_not_found=True, default=None, case_insensitive=True
68
- )
69
- if act
70
- else intro or Conversation.intro
71
- )
72
- self.conversation = Conversation(
73
- is_conversation, self.max_tokens_to_sample, filepath, update_file
74
- )
75
- self.conversation.history_offset = history_offset
76
-
77
-
78
- @staticmethod
79
- def extract_query(text: str) -> str:
80
- """
81
- Extracts the first code block from the given text.
82
- """
83
- pattern = r"```(.*?)```"
84
- matches = re.findall(pattern, text, re.DOTALL)
85
- return matches[0].strip() if matches else text.strip()
86
-
87
- @staticmethod
88
- def refiner(text: str) -> str:
89
- """Refines the input text by removing surrounding quotes."""
90
- return text.strip('"')
91
-
92
- def get_demo_api_key(self, cookie_path: str) -> str:
93
- """Retrieves the demo API key using the provided cookie."""
94
- try:
95
- with open(cookie_path, "r") as file:
96
- cookies = {item["name"]: item["value"] for item in json.load(file)}
97
- except FileNotFoundError:
98
- raise FileNotFoundError(f"Cookie file not found at path: {cookie_path}")
99
- except json.JSONDecodeError:
100
- raise json.JSONDecodeError("Invalid JSON format in the cookie file.")
101
-
102
- headers = {
103
- "Accept": "*/*",
104
- "Accept-Language": "en-US,en;q=0.9",
105
- "Content-Type": "application/json",
106
- "Origin": "https://inference.cerebras.ai",
107
- "Referer": "https://inference.cerebras.ai/",
108
- "user-agent": UserAgent().random,
109
- }
110
-
111
- json_data = {
112
- "operationName": "GetMyDemoApiKey",
113
- "variables": {},
114
- "query": "query GetMyDemoApiKey {\n GetMyDemoApiKey\n}",
115
- }
116
-
117
- try:
118
- response = requests.post(
119
- "https://inference.cerebras.ai/api/graphql",
120
- cookies=cookies,
121
- headers=headers,
122
- json=json_data,
123
- timeout=self.timeout,
124
- )
125
- response.raise_for_status()
126
- api_key = response.json()["data"]["GetMyDemoApiKey"]
127
- return api_key
128
- except requests.exceptions.RequestException as e:
129
- raise exceptions.APIConnectionError(f"Failed to retrieve API key: {e}")
130
- except KeyError:
131
- raise exceptions.InvalidResponseError("API key not found in response.")
132
-
133
-
134
- def ask(
135
- self,
136
- prompt: str,
137
- stream: bool = False,
138
- raw: bool = False,
139
- optimizer: str = None,
140
- conversationally: bool = False,
141
- ) -> Union[Dict, Generator]:
142
-
143
- conversation_prompt = self.conversation.gen_complete_prompt(prompt)
144
- if optimizer:
145
- if optimizer in self.__available_optimizers:
146
- conversation_prompt = getattr(Optimizers, optimizer)(
147
- conversation_prompt if conversationally else prompt
148
- )
149
- else:
150
- raise Exception(f"Optimizer is not one of {self.__available_optimizers}")
151
-
152
- messages = [
153
- {"content": self.system_prompt, "role": "system"},
154
- {"content": conversation_prompt, "role": "user"},
155
- ]
156
-
157
- def for_stream():
158
- try:
159
- response = self.client.chat.completions.create(
160
- model=self.model, messages=messages, stream=True
161
- )
162
- for choice in response.choices:
163
- if choice.delta.content:
164
- yield dict(text=choice.delta.content)
165
- self.last_response.update({"text": response.choices[0].message.content})
166
-
167
- except Exception as e:
168
- raise exceptions.FailedToGenerateResponseError(f"Error during stream: {e}")
169
-
170
- def for_non_stream():
171
- try:
172
- response = self.client.chat.completions.create(
173
- model=self.model, messages=messages
174
- )
175
- self.last_response.update({"text": response.choices[0].message.content})
176
- return self.last_response
177
- except Exception as e:
178
- raise exceptions.FailedToGenerateResponseError(f"Error during non-stream: {e}")
179
-
180
- return for_stream() if stream else for_non_stream()
181
-
182
- def chat(
183
- self,
184
- prompt: str,
185
- stream: bool = False,
186
- optimizer: str = None,
187
- conversationally: bool = False,
188
- ) -> Union[str, Generator]:
189
- return self.get_message(
190
- self.ask(
191
- prompt, stream, optimizer=optimizer, conversationally=conversationally
192
- )
193
- )
194
-
195
- def get_message(self, response: dict) -> str:
196
- """Retrieves message only from response"""
197
- assert isinstance(response, dict), "Response should be of dict data-type only"
198
- return response["text"]
199
-
200
-
201
- if __name__ == "__main__":
202
- from rich import print
203
- cerebras = Cerebras(cookie_path='cookie.json', model='llama3.1-8b', system_prompt="You are a helpful AI assistant.")
204
- response = cerebras.chat("What is the meaning of life?", sys_prompt='', stream=True)
205
- for chunk in response:
206
- print(chunk, end="", flush=True)
1
+ import re
2
+ import requests
3
+ import json
4
+ import os
5
+ from typing import Any, Dict, Optional, Generator, List, Union
6
+ from webscout.AIutel import Optimizers, Conversation, AwesomePrompts
7
+ from webscout.AIbase import Provider
8
+ from webscout import exceptions
9
+ from fake_useragent import UserAgent
10
+
11
+ class Cerebras(Provider):
12
+ """
13
+ A class to interact with the Cerebras API using a cookie for authentication.
14
+ """
15
+ def __init__(
16
+ self,
17
+ is_conversation: bool = True,
18
+ max_tokens: int = 2049,
19
+ timeout: int = 30,
20
+ intro: str = None,
21
+ filepath: str = None,
22
+ update_file: bool = True,
23
+ proxies: dict = {},
24
+ history_offset: int = 10250,
25
+ act: str = None,
26
+ cookie_path: str = "cookie.json",
27
+ model: str = "llama3.1-8b",
28
+ system_prompt: str = "You are a helpful assistant.",
29
+ ):
30
+ # Initialize basic settings first
31
+ self.timeout = timeout
32
+ self.model = model
33
+ self.system_prompt = system_prompt
34
+ self.is_conversation = is_conversation
35
+ self.max_tokens_to_sample = max_tokens
36
+ self.last_response = {}
37
+
38
+ # Get API key first
39
+ try:
40
+ self.api_key = self.get_demo_api_key(cookie_path)
41
+ except Exception as e:
42
+ raise exceptions.APIConnectionError(f"Failed to initialize Cerebras client: {e}")
43
+
44
+ # Initialize optimizers
45
+ self.__available_optimizers = (
46
+ method
47
+ for method in dir(Optimizers)
48
+ if callable(getattr(Optimizers, method)) and not method.startswith("__")
49
+ )
50
+
51
+ # Initialize conversation settings
52
+ Conversation.intro = (
53
+ AwesomePrompts().get_act(
54
+ act, raise_not_found=True, default=None, case_insensitive=True
55
+ )
56
+ if act
57
+ else None
58
+ )
59
+ self.conversation = Conversation(
60
+ is_conversation, self.max_tokens_to_sample, filepath, update_file
61
+ )
62
+ self.conversation.history_offset = history_offset
63
+
64
+ @staticmethod
65
+ def extract_query(text: str) -> str:
66
+ """Extracts the first code block from the given text."""
67
+ pattern = r"```(.*?)```"
68
+ matches = re.findall(pattern, text, re.DOTALL)
69
+ return matches[0].strip() if matches else text.strip()
70
+
71
+ @staticmethod
72
+ def refiner(text: str) -> str:
73
+ """Refines the input text by removing surrounding quotes."""
74
+ return text.strip('"')
75
+
76
+ def get_demo_api_key(self, cookie_path: str) -> str:
77
+ """Retrieves the demo API key using the provided cookie."""
78
+ try:
79
+ with open(cookie_path, "r") as file:
80
+ cookies = {item["name"]: item["value"] for item in json.load(file)}
81
+ except FileNotFoundError:
82
+ raise FileNotFoundError(f"Cookie file not found at path: {cookie_path}")
83
+ except json.JSONDecodeError:
84
+ raise json.JSONDecodeError("Invalid JSON format in the cookie file.", "", 0)
85
+
86
+ headers = {
87
+ "Accept": "*/*",
88
+ "Accept-Language": "en-US,en;q=0.9",
89
+ "Content-Type": "application/json",
90
+ "Origin": "https://inference.cerebras.ai",
91
+ "Referer": "https://inference.cerebras.ai/",
92
+ "user-agent": UserAgent().random,
93
+ }
94
+
95
+ json_data = {
96
+ "operationName": "GetMyDemoApiKey",
97
+ "variables": {},
98
+ "query": "query GetMyDemoApiKey {\n GetMyDemoApiKey\n}",
99
+ }
100
+
101
+ try:
102
+ response = requests.post(
103
+ "https://inference.cerebras.ai/api/graphql",
104
+ cookies=cookies,
105
+ headers=headers,
106
+ json=json_data,
107
+ timeout=self.timeout,
108
+ )
109
+ response.raise_for_status()
110
+ api_key = response.json()["data"]["GetMyDemoApiKey"]
111
+ return api_key
112
+ except requests.exceptions.RequestException as e:
113
+ raise exceptions.APIConnectionError(f"Failed to retrieve API key: {e}")
114
+ except KeyError:
115
+ raise exceptions.InvalidResponseError("API key not found in response.")
116
+
117
+ def _make_request(self, messages: List[Dict], stream: bool = False) -> Union[Dict, Generator]:
118
+ """Make a request to the Cerebras API."""
119
+ headers = {
120
+ "Authorization": f"Bearer {self.api_key}",
121
+ "Content-Type": "application/json",
122
+ "User-Agent": UserAgent().random
123
+ }
124
+
125
+ payload = {
126
+ "model": self.model,
127
+ "messages": messages,
128
+ "stream": stream
129
+ }
130
+
131
+ try:
132
+ response = requests.post(
133
+ "https://api.cerebras.ai/v1/chat/completions",
134
+ headers=headers,
135
+ json=payload,
136
+ stream=stream,
137
+ timeout=self.timeout
138
+ )
139
+ response.raise_for_status()
140
+
141
+ if stream:
142
+ def generate_stream():
143
+ for line in response.iter_lines():
144
+ if line:
145
+ line = line.decode('utf-8')
146
+ if line.startswith('data:'):
147
+ try:
148
+ data = json.loads(line[6:])
149
+ if data.get('choices') and data['choices'][0].get('delta', {}).get('content'):
150
+ content = data['choices'][0]['delta']['content']
151
+ yield content
152
+ except json.JSONDecodeError:
153
+ continue
154
+
155
+ return generate_stream()
156
+ else:
157
+ response_json = response.json()
158
+ return response_json['choices'][0]['message']['content']
159
+
160
+ except requests.exceptions.RequestException as e:
161
+ raise exceptions.APIConnectionError(f"Request failed: {e}")
162
+
163
+ def ask(
164
+ self,
165
+ prompt: str,
166
+ stream: bool = False,
167
+ optimizer: str = None,
168
+ conversationally: bool = False,
169
+ ) -> Union[Dict, Generator]:
170
+ """Send a prompt to the model and get a response."""
171
+ conversation_prompt = self.conversation.gen_complete_prompt(prompt)
172
+ if optimizer:
173
+ if optimizer in self.__available_optimizers:
174
+ conversation_prompt = getattr(Optimizers, optimizer)(
175
+ conversation_prompt if conversationally else prompt
176
+ )
177
+ else:
178
+ raise Exception(f"Optimizer is not one of {self.__available_optimizers}")
179
+
180
+ messages = [
181
+ {"role": "system", "content": self.system_prompt},
182
+ {"role": "user", "content": conversation_prompt}
183
+ ]
184
+
185
+ try:
186
+ response = self._make_request(messages, stream)
187
+ if stream:
188
+ return response
189
+
190
+ self.last_response = response
191
+ return response
192
+
193
+ except Exception as e:
194
+ raise exceptions.FailedToGenerateResponseError(f"Error during request: {e}")
195
+
196
+ def chat(
197
+ self,
198
+ prompt: str,
199
+ stream: bool = False,
200
+ optimizer: str = None,
201
+ conversationally: bool = False,
202
+ ) -> Union[str, Generator]:
203
+ """Chat with the model."""
204
+ response = self.ask(prompt, stream, optimizer, conversationally)
205
+ if stream:
206
+ return response
207
+ return response
208
+
209
+ def get_message(self, response: str) -> str:
210
+ """Retrieves message from response."""
211
+ return response
212
+
213
+
214
+ if __name__ == "__main__":
215
+ from rich import print
216
+
217
+ # Example usage
218
+ cerebras = Cerebras(
219
+ cookie_path=r'C:\Users\koula\OneDrive\Desktop\Webscout\cookie.json',
220
+ model='llama3.1-8b',
221
+ system_prompt="You are a helpful AI assistant."
222
+ )
223
+
224
+ # Test with streaming
225
+ response = cerebras.chat("Hello!", stream=True)
226
+ for chunk in response:
227
+ print(chunk, end="", flush=True)