webscout 6.7__py3-none-any.whl → 6.9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of webscout might be problematic. Click here for more details.

Files changed (43) hide show
  1. webscout/Extra/YTToolkit/YTdownloader.py +7 -2
  2. webscout/Extra/YTToolkit/ytapi/channel.py +1 -1
  3. webscout/Extra/YTToolkit/ytapi/query.py +3 -0
  4. webscout/Extra/YTToolkit/ytapi/stream.py +3 -0
  5. webscout/Extra/YTToolkit/ytapi/video.py +3 -1
  6. webscout/Provider/Cloudflare.py +2 -1
  7. webscout/Provider/DARKAI.py +2 -2
  8. webscout/Provider/Free2GPT.py +5 -5
  9. webscout/Provider/Marcus.py +3 -3
  10. webscout/Provider/PI.py +113 -47
  11. webscout/Provider/Phind.py +6 -0
  12. webscout/Provider/PizzaGPT.py +62 -53
  13. webscout/Provider/RUBIKSAI.py +93 -38
  14. webscout/Provider/__init__.py +0 -8
  15. webscout/Provider/cerebras.py +3 -3
  16. webscout/Provider/cleeai.py +2 -2
  17. webscout/Provider/elmo.py +2 -2
  18. webscout/Provider/gaurish.py +2 -2
  19. webscout/Provider/geminiprorealtime.py +2 -2
  20. webscout/Provider/lepton.py +2 -2
  21. webscout/Provider/llama3mitril.py +3 -3
  22. webscout/Provider/llamatutor.py +2 -2
  23. webscout/Provider/llmchat.py +3 -2
  24. webscout/Provider/meta.py +2 -2
  25. webscout/Provider/tutorai.py +1 -1
  26. webscout/__init__.py +0 -1
  27. webscout/swiftcli/__init__.py +1 -0
  28. webscout/version.py +1 -1
  29. webscout/webscout_search.py +1140 -1104
  30. webscout/webscout_search_async.py +635 -361
  31. {webscout-6.7.dist-info → webscout-6.9.dist-info}/METADATA +4 -32
  32. {webscout-6.7.dist-info → webscout-6.9.dist-info}/RECORD +36 -43
  33. webscout/Extra/markdownlite/__init__.py +0 -862
  34. webscout/Provider/Deepseek.py +0 -227
  35. webscout/Provider/Farfalle.py +0 -227
  36. webscout/Provider/NinjaChat.py +0 -200
  37. webscout/Provider/mhystical.py +0 -176
  38. webscout/zerodir/__init__.py +0 -225
  39. webstoken/t.py +0 -75
  40. {webscout-6.7.dist-info → webscout-6.9.dist-info}/LICENSE.md +0 -0
  41. {webscout-6.7.dist-info → webscout-6.9.dist-info}/WHEEL +0 -0
  42. {webscout-6.7.dist-info → webscout-6.9.dist-info}/entry_points.txt +0 -0
  43. {webscout-6.7.dist-info → webscout-6.9.dist-info}/top_level.txt +0 -0
@@ -1,4 +1,4 @@
1
- import requests
1
+ import cloudscraper
2
2
  import json
3
3
  from typing import Any, Dict, Optional
4
4
 
@@ -30,6 +30,7 @@ class RUBIKSAI(Provider):
30
30
  history_offset: int = 10250,
31
31
  act: str = None,
32
32
  model: str = "gpt-4o-mini",
33
+ temperature: float = 0.6,
33
34
  ) -> None:
34
35
  """
35
36
  Initializes the RUBIKSAI API with given parameters.
@@ -48,25 +49,54 @@ class RUBIKSAI(Provider):
48
49
  act (str|int, optional): Awesome prompt key or index. (Used as intro). Defaults to None.
49
50
  model (str, optional): AI model to use. Defaults to "gpt-4o-mini".
50
51
  Available models: "gpt-4o-mini", "gemini-1.5-pro"
52
+ temperature (float, optional): Sampling temperature. Defaults to 0.6.
51
53
  """
52
54
  if model not in self.AVAILABLE_MODELS:
53
55
  raise ValueError(f"Invalid model: {model}. Choose from: {self.AVAILABLE_MODELS}")
54
56
 
55
- self.session = requests.Session()
57
+ self.temperature = temperature
58
+ self.session = cloudscraper.create_scraper()
59
+ self.api_endpoint = "https://rubiks.ai/search/api/"
60
+
61
+ # Updated headers with all necessary fields
62
+ self.headers = {
63
+ "authority": "rubiks.ai",
64
+ "accept": "*/*",
65
+ "accept-encoding": "gzip, deflate, br, zstd",
66
+ "accept-language": "en-US,en;q=0.9,en-IN;q=0.8",
67
+ "content-type": "application/json",
68
+ "dnt": "1",
69
+ "origin": "https://rubiks.ai",
70
+ "referer": f"https://rubiks.ai/search/?q=&model={model}",
71
+ "sec-ch-ua": '"Microsoft Edge";v="131", "Chromium";v="131", "Not_A Brand";v="24"',
72
+ "sec-ch-ua-mobile": "?0",
73
+ "sec-ch-ua-platform": '"Windows"',
74
+ "sec-fetch-dest": "empty",
75
+ "sec-fetch-mode": "cors",
76
+ "sec-fetch-site": "same-origin",
77
+ "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36 Edg/131.0.0.0"
78
+ }
79
+
80
+ # Get initial cookies
81
+ init_response = self.session.get("https://rubiks.ai/search/")
82
+ if not init_response.ok:
83
+ raise exceptions.FailedToGenerateResponseError("Failed to initialize session")
84
+
85
+ # Extract cf_clearance and other cookies
86
+ self.cookies = {
87
+ 'cf_clearance': init_response.cookies.get('cf_clearance', ''),
88
+ }
89
+
90
+ # Update session with cookies and headers
91
+ self.session.headers.update(self.headers)
92
+ self.session.cookies.update(self.cookies)
93
+
56
94
  self.is_conversation = is_conversation
57
95
  self.max_tokens_to_sample = max_tokens
58
- self.api_endpoint = "https://rubiks.ai/search/api.php"
59
96
  self.stream_chunk_size = 64
60
97
  self.timeout = timeout
61
98
  self.last_response = {}
62
99
  self.model = model
63
- self.headers = {
64
- "accept": "text/event-stream",
65
- "accept-encoding": "gzip, deflate, br, zstd",
66
- "accept-language": "en-US,en;q=0.9,en-IN;q=0.8",
67
- "cache-control": "no-cache",
68
- "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/127.0.0.0 Safari/537.36 Edg/127.0.0.0"
69
- }
70
100
 
71
101
  self.__available_optimizers = (
72
102
  method
@@ -119,41 +149,66 @@ class RUBIKSAI(Provider):
119
149
  f"Optimizer is not one of {self.__available_optimizers}"
120
150
  )
121
151
 
122
- params = {
123
- "q": conversation_prompt,
152
+ payload = {
124
153
  "model": self.model,
154
+ "stream": True,
155
+ "messages": [{"role": "user", "content": conversation_prompt}],
156
+ "temperature": self.temperature,
157
+ "search": ""
125
158
  }
126
159
 
127
160
  def for_stream():
128
- response = self.session.get(
129
- self.api_endpoint, params=params, headers=self.headers, stream=True, timeout=self.timeout
130
- )
161
+ try:
162
+ response = self.session.post(
163
+ self.api_endpoint,
164
+ json=payload,
165
+ stream=True,
166
+ timeout=self.timeout
167
+ )
168
+
169
+ if response.status_code == 403:
170
+ # Try to refresh the session
171
+ init_response = self.session.get("https://rubiks.ai/search/")
172
+ self.cookies['cf_clearance'] = init_response.cookies.get('cf_clearance', '')
173
+ self.session.cookies.update(self.cookies)
174
+
175
+ # Retry the request
176
+ response = self.session.post(
177
+ self.api_endpoint,
178
+ json=payload,
179
+ stream=True,
180
+ timeout=self.timeout
181
+ )
182
+
183
+ if not response.ok:
184
+ raise exceptions.FailedToGenerateResponseError(
185
+ f"Failed to generate response - ({response.status_code}, {response.reason})"
186
+ )
131
187
 
132
- if not response.ok:
133
- raise exceptions.FailedToGenerateResponseError(
134
- f"Failed to generate response - ({response.status_code}, {response.reason})"
188
+ # ...rest of the streaming code...
189
+ streaming_response = ""
190
+ for line in response.iter_lines(decode_unicode=True):
191
+ if line:
192
+ if line.startswith("data: "):
193
+ json_data = line[6:]
194
+ if json_data == "[DONE]":
195
+ break
196
+ try:
197
+ data = json.loads(json_data)
198
+ if "choices" in data and len(data["choices"]) > 0:
199
+ content = data["choices"][0]["delta"].get("content", "")
200
+ streaming_response += content
201
+ yield content if raw else dict(text=content)
202
+ except json.decoder.JSONDecodeError:
203
+ continue
204
+
205
+ self.last_response.update(dict(text=streaming_response))
206
+ self.conversation.update_chat_history(
207
+ prompt, self.get_message(self.last_response)
135
208
  )
136
209
 
137
- streaming_response = ""
138
- for line in response.iter_lines(decode_unicode=True):
139
- if line:
140
- if line.startswith("data: "):
141
- json_data = line[6:]
142
- if json_data == "[DONE]":
143
- break
144
- try:
145
- data = json.loads(json_data)
146
- if "choices" in data and len(data["choices"]) > 0:
147
- content = data["choices"][0]["delta"].get("content", "")
148
- streaming_response += content
149
- yield content if raw else dict(text=content)
150
- except json.decoder.JSONDecodeError:
151
- continue
152
-
153
- self.last_response.update(dict(text=streaming_response))
154
- self.conversation.update_chat_history(
155
- prompt, self.get_message(self.last_response)
156
- )
210
+ except Exception as e:
211
+ raise exceptions.FailedToGenerateResponseError(f"Request failed: {str(e)}")
157
212
 
158
213
  def for_non_stream():
159
214
  for _ in for_stream():
@@ -14,9 +14,7 @@ from .Phind import PhindSearch
14
14
  from .Phind import Phindv2
15
15
  from .ai4chat import *
16
16
  from .Gemini import GEMINI
17
- from .Deepseek import DeepSeek
18
17
  from .Deepinfra import DeepInfra
19
- from .Farfalle import *
20
18
  from .cleeai import *
21
19
  from .OLLAMA import OLLAMA
22
20
  from .Andi import AndiSearch
@@ -54,18 +52,15 @@ from .bagoodex import *
54
52
  from .aimathgpt import *
55
53
  from .gaurish import *
56
54
  from .geminiprorealtime import *
57
- from .NinjaChat import *
58
55
  from .llmchat import *
59
56
  from .talkai import *
60
57
  from .askmyai import *
61
58
  from .llama3mitril import *
62
59
  from .Marcus import *
63
60
  from .typegpt import *
64
- from .mhystical import *
65
61
  from .multichat import *
66
62
  from .Jadve import *
67
63
  __all__ = [
68
- 'Farfalle',
69
64
  'LLAMA',
70
65
  'Cohere',
71
66
  'REKA',
@@ -78,7 +73,6 @@ __all__ = [
78
73
  'BLACKBOXAI',
79
74
  'PhindSearch',
80
75
  'GEMINI',
81
- 'DeepSeek',
82
76
  'DeepInfra',
83
77
  'AI4Chat',
84
78
  'Phindv2',
@@ -121,13 +115,11 @@ __all__ = [
121
115
  'AIMathGPT',
122
116
  'GaurishCerebras',
123
117
  'GeminiPro',
124
- 'NinjaChat',
125
118
  'LLMChat',
126
119
  'Talkai',
127
120
  'Llama3Mitril',
128
121
  'Marcus',
129
122
  'TypeGPT',
130
- 'Mhystical',
131
123
  'Netwrck',
132
124
  'MultiChatAI',
133
125
  'JadveOpenAI',
@@ -6,7 +6,7 @@ from typing import Any, Dict, Optional, Generator, List, Union
6
6
  from webscout.AIutel import Optimizers, Conversation, AwesomePrompts
7
7
  from webscout.AIbase import Provider
8
8
  from webscout import exceptions
9
- from fake_useragent import UserAgent
9
+ from webscout import LitAgent as UserAgent
10
10
 
11
11
  class Cerebras(Provider):
12
12
  """
@@ -89,7 +89,7 @@ class Cerebras(Provider):
89
89
  "Content-Type": "application/json",
90
90
  "Origin": "https://inference.cerebras.ai",
91
91
  "Referer": "https://inference.cerebras.ai/",
92
- "user-agent": UserAgent().random,
92
+ "user-agent": UserAgent().random(),
93
93
  }
94
94
 
95
95
  json_data = {
@@ -119,7 +119,7 @@ class Cerebras(Provider):
119
119
  headers = {
120
120
  "Authorization": f"Bearer {self.api_key}",
121
121
  "Content-Type": "application/json",
122
- "User-Agent": UserAgent().random
122
+ "User-Agent": UserAgent().random(),
123
123
  }
124
124
 
125
125
  payload = {
@@ -6,7 +6,7 @@ from webscout.AIutel import Optimizers
6
6
  from webscout.AIutel import Conversation
7
7
  from webscout.AIutel import AwesomePrompts
8
8
  from webscout.AIbase import Provider
9
-
9
+ import webscout
10
10
  class Cleeai(Provider):
11
11
  """
12
12
  A class to interact with the Cleeai.com API.
@@ -59,7 +59,7 @@ class Cleeai(Provider):
59
59
  "sec-fetch-dest": "empty",
60
60
  "sec-fetch-mode": "cors",
61
61
  "sec-fetch-site": "same-site",
62
- "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/128.0.0.0 Safari/537.36 Edg/128.0.0.0",
62
+ "user-agent": webscout.LitAgent().random()
63
63
  }
64
64
 
65
65
  self.__available_optimizers = (
webscout/Provider/elmo.py CHANGED
@@ -3,7 +3,7 @@ from webscout.AIutel import Optimizers
3
3
  from webscout.AIutel import Conversation
4
4
  from webscout.AIutel import AwesomePrompts
5
5
  from webscout.AIbase import Provider
6
-
6
+ from webscout import LitAgent
7
7
 
8
8
  class Elmo(Provider):
9
9
  """
@@ -62,7 +62,7 @@ class Elmo(Provider):
62
62
  "sec-fetch-dest": "empty",
63
63
  "sec-fetch-mode": "cors",
64
64
  "sec-fetch-site": "cross-site",
65
- "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/128.0.0.0 Safari/537.36 Edg/128.0.0.0",
65
+ "user-agent": LitAgent().random(),
66
66
  }
67
67
 
68
68
  self.__available_optimizers = (
@@ -10,7 +10,7 @@ from webscout.AIutel import AwesomePrompts, sanitize_stream
10
10
  from webscout.AIbase import Provider, AsyncProvider
11
11
  from webscout import exceptions
12
12
 
13
-
13
+ from webscout import LitAgent
14
14
  class GaurishCerebras(Provider):
15
15
  """
16
16
  A class to interact with the Gaurish Cerebras API.
@@ -65,7 +65,7 @@ class GaurishCerebras(Provider):
65
65
  "sec-fetch-dest": "empty",
66
66
  "sec-fetch-mode": "cors",
67
67
  "sec-fetch-site": "same-site",
68
- "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/130.0.0.0 Safari/537.36 Edg/130.0.0.0",
68
+ "user-agent": LitAgent().random(),
69
69
  "x-stainless-arch": "unknown",
70
70
  "x-stainless-lang": "js",
71
71
  "x-stainless-os": "Unknown",
@@ -9,7 +9,7 @@ from webscout.AIutel import Conversation
9
9
  from webscout.AIutel import AwesomePrompts, sanitize_stream
10
10
  from webscout.AIbase import Provider, AsyncProvider
11
11
  from webscout import exceptions
12
-
12
+ from webscout import LitAgent
13
13
 
14
14
  class GeminiPro(Provider):
15
15
  """
@@ -47,7 +47,7 @@ class GeminiPro(Provider):
47
47
  'sec-fetch-dest': 'empty',
48
48
  'sec-fetch-mode': 'cors',
49
49
  'sec-fetch-site': 'same-origin',
50
- 'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/129.0.0.0 Safari/537.36 Edg/129.0.0.0',
50
+ 'user-agent': LitAgent().random(),
51
51
  'x-requested-with': 'XMLHttpRequest'
52
52
  }
53
53
  self.session = requests.Session()
@@ -6,7 +6,7 @@ from webscout.AIutel import Optimizers
6
6
  from webscout.AIutel import Conversation
7
7
  from webscout.AIutel import AwesomePrompts
8
8
  from webscout.AIbase import Provider
9
-
9
+ from webscout import LitAgent as Lit
10
10
  class Lepton(Provider):
11
11
  """
12
12
  A class to interact with the Lepton.run API.
@@ -58,7 +58,7 @@ class Lepton(Provider):
58
58
  "sec-fetch-dest": "empty",
59
59
  "sec-fetch-mode": "cors",
60
60
  "sec-fetch-site": "same-origin",
61
- "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/128.0.0.0 Safari/537.36 Edg/128.0.0.0",
61
+ "user-agent": Lit().random(),
62
62
  }
63
63
 
64
64
  self.__available_optimizers = (
@@ -7,7 +7,7 @@ from webscout.AIutel import Conversation
7
7
  from webscout.AIutel import AwesomePrompts
8
8
  from webscout.AIbase import Provider
9
9
  from webscout import exceptions
10
-
10
+ from webscout import LitAgent as Lit
11
11
 
12
12
  class Llama3Mitril(Provider):
13
13
  """
@@ -40,7 +40,7 @@ class Llama3Mitril(Provider):
40
40
  self.headers = {
41
41
  "Content-Type": "application/json",
42
42
  "DNT": "1",
43
- "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/130.0.0.0 Safari/537.36 Edg/130.0.0.0"
43
+ "User-Agent": Lit().random(),
44
44
  }
45
45
  self.__available_optimizers = (
46
46
  method
@@ -177,4 +177,4 @@ if __name__ == "__main__":
177
177
  )
178
178
 
179
179
  for response in ai.chat("Hello", stream=True):
180
- print(response)
180
+ print(response, end="", flush=True)
@@ -6,7 +6,7 @@ from webscout.AIutel import Conversation
6
6
  from webscout.AIutel import AwesomePrompts
7
7
  from webscout.AIbase import Provider
8
8
  from webscout import exceptions
9
-
9
+ from webscout import LitAgent as Lit
10
10
  class LlamaTutor(Provider):
11
11
  """
12
12
  A class to interact with the LlamaTutor API (Together.ai).
@@ -63,7 +63,7 @@ class LlamaTutor(Provider):
63
63
  "Sec-Fetch-Dest": "empty",
64
64
  "Sec-Fetch-Mode": "cors",
65
65
  "Sec-Fetch-Site": "same-origin",
66
- "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/128.0.0.0 Safari/537.36 Edg/128.0.0.0"
66
+ "User-Agent": Lit().random(),
67
67
  }
68
68
 
69
69
  self.__available_optimizers = (
@@ -7,7 +7,7 @@ from webscout.AIutel import Conversation
7
7
  from webscout.AIutel import AwesomePrompts
8
8
  from webscout.AIbase import Provider
9
9
  from webscout import exceptions
10
-
10
+ from webscout import LitAgent as Lit
11
11
  class LLMChat(Provider):
12
12
  """
13
13
  A class to interact with the LLMChat API.
@@ -18,6 +18,7 @@ class LLMChat(Provider):
18
18
  "@cf/meta/llama-3.1-8b-instruct",
19
19
  "@cf/meta/llama-3.2-3b-instruct",
20
20
  "@cf/meta/llama-3.2-1b-instruct"
21
+ "@cf/meta/llama-3.3-70b-instruct-fp8-fast"
21
22
  ]
22
23
 
23
24
  def __init__(
@@ -51,7 +52,7 @@ class LLMChat(Provider):
51
52
  self.headers = {
52
53
  "Content-Type": "application/json",
53
54
  "Accept": "*/*",
54
- "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/130.0.0.0",
55
+ "User-Agent": Lit().random(),
55
56
  "Origin": "https://llmchat.in",
56
57
  "Referer": "https://llmchat.in/"
57
58
  }
webscout/Provider/meta.py CHANGED
@@ -14,7 +14,7 @@ from webscout.AIutel import Conversation
14
14
  from webscout.AIutel import AwesomePrompts, sanitize_stream
15
15
  from webscout.AIbase import Provider
16
16
  from webscout import exceptions
17
-
17
+ from webscout import LitAgent as Lit
18
18
  MAX_RETRIES = 3
19
19
 
20
20
  def generate_offline_threading_id() -> str:
@@ -103,7 +103,7 @@ def get_fb_session(email, password, proxies=None):
103
103
  "sec-fetch-site": "none",
104
104
  "sec-fetch-user": "?1",
105
105
  "upgrade-insecure-requests": "1",
106
- "user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/122.0.0.0 Safari/537.36",
106
+ "user-agent": Lit().random(),
107
107
  }
108
108
  # Send the GET request
109
109
  response = requests.get(login_url, headers=headers, proxies=proxies)
@@ -48,7 +48,7 @@ class TutorAI(Provider):
48
48
  self.session = requests.Session()
49
49
  self.is_conversation = is_conversation
50
50
  self.max_tokens_to_sample = max_tokens
51
- self.api_endpoint = "https://tutorai.me/api/generate-homeworkify-response"
51
+ self.api_endpoint = "https://ai-tutor.ai/api/generate-homeworkify-response"
52
52
  self.stream_chunk_size = 1024
53
53
  self.timeout = timeout
54
54
  self.last_response = {}
webscout/__init__.py CHANGED
@@ -17,7 +17,6 @@ from .swiftcli import *
17
17
  from .litagent import LitAgent
18
18
  from .scout import *
19
19
  from .zeroart import *
20
- from .zerodir import *
21
20
  agent = LitAgent()
22
21
 
23
22
  __repo__ = "https://github.com/OE-LUCIFER/Webscout"
@@ -27,6 +27,7 @@ Advanced Usage:
27
27
  For more examples, check out the documentation!
28
28
  """
29
29
 
30
+ import importlib
30
31
  import os
31
32
  import sys
32
33
  import json
webscout/version.py CHANGED
@@ -1,2 +1,2 @@
1
- __version__ = "6.7"
1
+ __version__ = "6.9"
2
2
  __prog__ = "webscout"