webscout 7.8__py3-none-any.whl → 7.9__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of webscout might be problematic. Click here for more details.
- webscout/Bard.py +5 -25
- webscout/DWEBS.py +476 -476
- webscout/Extra/__init__.py +2 -0
- webscout/Extra/autocoder/__init__.py +1 -1
- webscout/Extra/autocoder/{rawdog.py → autocoder.py} +849 -849
- webscout/Extra/tempmail/__init__.py +26 -0
- webscout/Extra/tempmail/async_utils.py +141 -0
- webscout/Extra/tempmail/base.py +156 -0
- webscout/Extra/tempmail/cli.py +187 -0
- webscout/Extra/tempmail/mail_tm.py +361 -0
- webscout/Extra/tempmail/temp_mail_io.py +292 -0
- webscout/Provider/Deepinfra.py +288 -286
- webscout/Provider/ElectronHub.py +709 -716
- webscout/Provider/ExaChat.py +20 -5
- webscout/Provider/Gemini.py +167 -165
- webscout/Provider/Groq.py +38 -24
- webscout/Provider/LambdaChat.py +2 -1
- webscout/Provider/TextPollinationsAI.py +232 -230
- webscout/Provider/__init__.py +0 -4
- webscout/Provider/copilot.py +427 -427
- webscout/Provider/freeaichat.py +8 -1
- webscout/Provider/uncovr.py +312 -299
- webscout/Provider/yep.py +64 -12
- webscout/__init__.py +38 -36
- webscout/cli.py +293 -293
- webscout/conversation.py +350 -17
- webscout/litprinter/__init__.py +59 -667
- webscout/optimizers.py +419 -419
- webscout/update_checker.py +14 -12
- webscout/version.py +1 -1
- webscout/webscout_search.py +1282 -1282
- webscout/webscout_search_async.py +813 -813
- {webscout-7.8.dist-info → webscout-7.9.dist-info}/METADATA +44 -39
- {webscout-7.8.dist-info → webscout-7.9.dist-info}/RECORD +38 -35
- webscout/Provider/DARKAI.py +0 -225
- webscout/Provider/EDITEE.py +0 -192
- webscout/litprinter/colors.py +0 -54
- {webscout-7.8.dist-info → webscout-7.9.dist-info}/LICENSE.md +0 -0
- {webscout-7.8.dist-info → webscout-7.9.dist-info}/WHEEL +0 -0
- {webscout-7.8.dist-info → webscout-7.9.dist-info}/entry_points.txt +0 -0
- {webscout-7.8.dist-info → webscout-7.9.dist-info}/top_level.txt +0 -0
webscout/Provider/EDITEE.py
DELETED
|
@@ -1,192 +0,0 @@
|
|
|
1
|
-
import cloudscraper
|
|
2
|
-
from webscout.AIutel import Optimizers
|
|
3
|
-
from webscout.AIutel import Conversation
|
|
4
|
-
from webscout.AIutel import AwesomePrompts, sanitize_stream
|
|
5
|
-
from webscout.AIbase import Provider, AsyncProvider
|
|
6
|
-
from webscout import exceptions
|
|
7
|
-
from typing import Union, Any, AsyncGenerator, Dict
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
class Editee(Provider):
|
|
11
|
-
"""
|
|
12
|
-
A class to interact with the Editee.com API.
|
|
13
|
-
"""
|
|
14
|
-
AVAILABLE_MODELS = [
|
|
15
|
-
"gemini", # it is gemini 1.5pro
|
|
16
|
-
"claude", # it is claude 3.5
|
|
17
|
-
"gpt4", # it is gpt4o
|
|
18
|
-
"mistrallarge", # it is mistral large2
|
|
19
|
-
]
|
|
20
|
-
|
|
21
|
-
def __init__(
|
|
22
|
-
self,
|
|
23
|
-
is_conversation: bool = True,
|
|
24
|
-
max_tokens: int = 600,
|
|
25
|
-
timeout: int = 30,
|
|
26
|
-
intro: str = None,
|
|
27
|
-
filepath: str = None,
|
|
28
|
-
update_file: bool = True,
|
|
29
|
-
proxies: dict = {},
|
|
30
|
-
history_offset: int = 10250,
|
|
31
|
-
act: str = None,
|
|
32
|
-
model: str = "mistrallarge",
|
|
33
|
-
) -> None:
|
|
34
|
-
"""
|
|
35
|
-
Initializes the Editee API with given parameters.
|
|
36
|
-
|
|
37
|
-
Args:
|
|
38
|
-
is_conversation (bool, optional): Flag for chatting conversationally. Defaults to True.
|
|
39
|
-
max_tokens (int, optional): Maximum number of tokens to be generated upon completion. Defaults to 600.
|
|
40
|
-
timeout (int, optional): Http request timeout. Defaults to 30.
|
|
41
|
-
intro (str, optional): Conversation introductory prompt. Defaults to None.
|
|
42
|
-
filepath (str, optional): Path to file containing conversation history. Defaults to None.
|
|
43
|
-
update_file (bool, optional): Add new prompts and responses to the file. Defaults to True.
|
|
44
|
-
proxies (dict, optional): Http request proxies. Defaults to {}.
|
|
45
|
-
history_offset (int, optional): Limit conversation history to this number of last texts. Defaults to 10250.
|
|
46
|
-
act (str|int, optional): Awesome prompt key or index. (Used as intro). Defaults to None.
|
|
47
|
-
model (str, optional): AI model to use for text generation. Defaults to "gemini".
|
|
48
|
-
"""
|
|
49
|
-
if model not in self.AVAILABLE_MODELS:
|
|
50
|
-
raise ValueError(f"Invalid model: {model}. Choose from: {self.AVAILABLE_MODELS}")
|
|
51
|
-
|
|
52
|
-
self.session = cloudscraper.create_scraper()
|
|
53
|
-
self.is_conversation = is_conversation
|
|
54
|
-
self.max_tokens_to_sample = max_tokens
|
|
55
|
-
self.api_endpoint = "https://editee.com/submit/chatgptfree"
|
|
56
|
-
self.stream_chunk_size = 64
|
|
57
|
-
self.timeout = timeout
|
|
58
|
-
self.last_response = {}
|
|
59
|
-
self.model = model
|
|
60
|
-
self._sessionValue = self._get_session()
|
|
61
|
-
self.headers = {
|
|
62
|
-
"authority": "editee.com",
|
|
63
|
-
"path": "/submit/chatgptfree",
|
|
64
|
-
"scheme": "https",
|
|
65
|
-
"accept": "application/json, text/plain, */*",
|
|
66
|
-
"accept-encoding": "gzip, deflate, br",
|
|
67
|
-
"accept-language": "ru-RU,ru;q=0.9,en-US;q=0.8,en;q=0.7",
|
|
68
|
-
"content-type": "application/json",
|
|
69
|
-
"cookie": f"editeecom_session={self._sessionValue}",
|
|
70
|
-
"origin": "https://editee.com",
|
|
71
|
-
"referer": "https://editee.com/chat-gpt",
|
|
72
|
-
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/109.0.0.0 Safari/537.36",
|
|
73
|
-
"x-requested-with": "XMLHttpRequest"
|
|
74
|
-
}
|
|
75
|
-
self.__available_optimizers = (
|
|
76
|
-
method
|
|
77
|
-
for method in dir(Optimizers)
|
|
78
|
-
if callable(getattr(Optimizers, method)) and not method.startswith("__")
|
|
79
|
-
)
|
|
80
|
-
self.session.headers.update(self.headers)
|
|
81
|
-
Conversation.intro = (
|
|
82
|
-
AwesomePrompts().get_act(
|
|
83
|
-
act, raise_not_found=True, default=None, case_insensitive=True
|
|
84
|
-
)
|
|
85
|
-
if act
|
|
86
|
-
else intro or Conversation.intro
|
|
87
|
-
)
|
|
88
|
-
self.conversation = Conversation(
|
|
89
|
-
is_conversation, self.max_tokens_to_sample, filepath, update_file
|
|
90
|
-
)
|
|
91
|
-
self.conversation.history_offset = history_offset
|
|
92
|
-
self.session.proxies = proxies
|
|
93
|
-
|
|
94
|
-
def _get_session(self):
|
|
95
|
-
"""Gets the editeecom_session value."""
|
|
96
|
-
res = self.session.get("https://editee.com/chat-gpt")
|
|
97
|
-
if res.cookies.get_dict():
|
|
98
|
-
first_cookie_name, session_value = next(iter(res.cookies.get_dict().items()))
|
|
99
|
-
return session_value
|
|
100
|
-
|
|
101
|
-
def ask(
|
|
102
|
-
self,
|
|
103
|
-
prompt: str,
|
|
104
|
-
stream: bool = False,
|
|
105
|
-
raw: bool = False,
|
|
106
|
-
optimizer: str = None,
|
|
107
|
-
conversationally: bool = False,
|
|
108
|
-
) -> dict:
|
|
109
|
-
"""Chat with AI
|
|
110
|
-
|
|
111
|
-
Args:
|
|
112
|
-
prompt (str): Prompt to be send.
|
|
113
|
-
stream (bool, optional): Whether to stream the response. Defaults to False.
|
|
114
|
-
raw (bool, optional): Whether to return the raw response. Defaults to False.
|
|
115
|
-
optimizer (str, optional): The name of the optimizer to use. Defaults to None.
|
|
116
|
-
conversationally (bool, optional): Whether to chat conversationally. Defaults to False.
|
|
117
|
-
|
|
118
|
-
Returns:
|
|
119
|
-
The response from the API.
|
|
120
|
-
"""
|
|
121
|
-
conversation_prompt = self.conversation.gen_complete_prompt(prompt)
|
|
122
|
-
if optimizer:
|
|
123
|
-
if optimizer in self.__available_optimizers:
|
|
124
|
-
conversation_prompt = getattr(Optimizers, optimizer)(
|
|
125
|
-
conversation_prompt if conversationally else prompt
|
|
126
|
-
)
|
|
127
|
-
else:
|
|
128
|
-
raise Exception(
|
|
129
|
-
f"Optimizer is not one of {self.__available_optimizers}"
|
|
130
|
-
)
|
|
131
|
-
|
|
132
|
-
payload = {
|
|
133
|
-
"context": " ",
|
|
134
|
-
"selected_model": self.model,
|
|
135
|
-
"template_id": "",
|
|
136
|
-
"user_input": conversation_prompt
|
|
137
|
-
}
|
|
138
|
-
|
|
139
|
-
response = self.session.post(self.api_endpoint, headers=self.headers, json=payload, timeout=self.timeout)
|
|
140
|
-
if not response.ok:
|
|
141
|
-
raise exceptions.FailedToGenerateResponseError(
|
|
142
|
-
f"Failed to generate response - ({response.status_code}, {response.reason}) - {response.text}"
|
|
143
|
-
)
|
|
144
|
-
|
|
145
|
-
resp = response.json()
|
|
146
|
-
self.last_response.update(dict(text=resp['text']))
|
|
147
|
-
self.conversation.update_chat_history(
|
|
148
|
-
prompt, self.get_message(self.last_response)
|
|
149
|
-
)
|
|
150
|
-
return self.last_response
|
|
151
|
-
|
|
152
|
-
def chat(
|
|
153
|
-
self,
|
|
154
|
-
prompt: str,
|
|
155
|
-
stream: bool = False,
|
|
156
|
-
optimizer: str = None,
|
|
157
|
-
conversationally: bool = False,
|
|
158
|
-
) -> str:
|
|
159
|
-
"""Generate response `str`
|
|
160
|
-
Args:
|
|
161
|
-
prompt (str): Prompt to be send.
|
|
162
|
-
stream (bool, optional): Flag for streaming response. Defaults to False.
|
|
163
|
-
optimizer (str, optional): Prompt optimizer name - `[code, shell_command]`. Defaults to None.
|
|
164
|
-
conversationally (bool, optional): Chat conversationally when using optimizer. Defaults to False.
|
|
165
|
-
Returns:
|
|
166
|
-
str: Response generated
|
|
167
|
-
"""
|
|
168
|
-
return self.get_message(
|
|
169
|
-
self.ask(
|
|
170
|
-
prompt,
|
|
171
|
-
optimizer=optimizer,
|
|
172
|
-
conversationally=conversationally,
|
|
173
|
-
)
|
|
174
|
-
)
|
|
175
|
-
|
|
176
|
-
def get_message(self, response: dict) -> str:
|
|
177
|
-
"""Retrieves message only from response
|
|
178
|
-
|
|
179
|
-
Args:
|
|
180
|
-
response (dict): Response generated by `self.ask`
|
|
181
|
-
|
|
182
|
-
Returns:
|
|
183
|
-
str: Message extracted
|
|
184
|
-
"""
|
|
185
|
-
assert isinstance(response, dict), "Response should be of dict data-type only"
|
|
186
|
-
return response["text"]
|
|
187
|
-
if __name__ == '__main__':
|
|
188
|
-
from rich import print
|
|
189
|
-
ai = Editee()
|
|
190
|
-
response = ai.chat("tell me about india")
|
|
191
|
-
for chunk in response:
|
|
192
|
-
print(chunk, end="", flush=True)
|
webscout/litprinter/colors.py
DELETED
|
@@ -1,54 +0,0 @@
|
|
|
1
|
-
class Colors:
|
|
2
|
-
"""ANSI color codes for terminal output."""
|
|
3
|
-
# Base colors
|
|
4
|
-
BLACK = '\033[30m'
|
|
5
|
-
RED = '\033[31m'
|
|
6
|
-
GREEN = '\033[32m'
|
|
7
|
-
YELLOW = '\033[33m'
|
|
8
|
-
BLUE = '\033[34m'
|
|
9
|
-
MAGENTA = '\033[35m'
|
|
10
|
-
CYAN = '\033[36m'
|
|
11
|
-
WHITE = '\033[37m'
|
|
12
|
-
GRAY = '\033[90m'
|
|
13
|
-
|
|
14
|
-
# Bright colors
|
|
15
|
-
BRIGHT_BLACK = '\033[90m'
|
|
16
|
-
BRIGHT_RED = '\033[91m'
|
|
17
|
-
BRIGHT_GREEN = '\033[92m'
|
|
18
|
-
BRIGHT_YELLOW = '\033[93m'
|
|
19
|
-
BRIGHT_BLUE = '\033[94m'
|
|
20
|
-
BRIGHT_MAGENTA = '\033[95m'
|
|
21
|
-
BRIGHT_CYAN = '\033[96m'
|
|
22
|
-
BRIGHT_WHITE = '\033[97m'
|
|
23
|
-
|
|
24
|
-
# Background colors
|
|
25
|
-
BG_BLACK = '\033[40m'
|
|
26
|
-
BG_RED = '\033[41m'
|
|
27
|
-
BG_GREEN = '\033[42m'
|
|
28
|
-
BG_YELLOW = '\033[43m'
|
|
29
|
-
BG_BLUE = '\033[44m'
|
|
30
|
-
BG_MAGENTA = '\033[45m'
|
|
31
|
-
BG_CYAN = '\033[46m'
|
|
32
|
-
BG_WHITE = '\033[47m'
|
|
33
|
-
|
|
34
|
-
# Styles
|
|
35
|
-
BOLD = '\033[1m'
|
|
36
|
-
DIM = '\033[2m'
|
|
37
|
-
ITALIC = '\033[3m'
|
|
38
|
-
UNDERLINE = '\033[4m'
|
|
39
|
-
BLINK = '\033[5m'
|
|
40
|
-
REVERSE = '\033[7m'
|
|
41
|
-
STRIKE = '\033[9m'
|
|
42
|
-
HIDDEN = '\033[8m'
|
|
43
|
-
|
|
44
|
-
# Special
|
|
45
|
-
RESET = '\033[0m'
|
|
46
|
-
CLEAR_SCREEN = '\033[2J'
|
|
47
|
-
CLEAR_LINE = '\033[2K'
|
|
48
|
-
|
|
49
|
-
# Cursor movement
|
|
50
|
-
UP = '\033[1A'
|
|
51
|
-
DOWN = '\033[1B'
|
|
52
|
-
RIGHT = '\033[1C'
|
|
53
|
-
LEFT = '\033[1D'
|
|
54
|
-
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|