webscout 7.2__py3-none-any.whl → 7.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of webscout might be problematic. Click here for more details.
- webscout/Bard.py +2 -2
- webscout/Litlogger/core/level.py +3 -0
- webscout/Litlogger/core/logger.py +101 -58
- webscout/Litlogger/handlers/console.py +14 -31
- webscout/Litlogger/handlers/network.py +16 -17
- webscout/Litlogger/styles/colors.py +81 -63
- webscout/Litlogger/styles/formats.py +163 -80
- webscout/Provider/AISEARCH/ISou.py +277 -0
- webscout/Provider/AISEARCH/__init__.py +2 -1
- webscout/Provider/Deepinfra.py +40 -24
- webscout/Provider/TTI/FreeAIPlayground/__init__.py +9 -0
- webscout/Provider/TTI/FreeAIPlayground/async_freeaiplayground.py +206 -0
- webscout/Provider/TTI/FreeAIPlayground/sync_freeaiplayground.py +192 -0
- webscout/Provider/TTI/__init__.py +2 -1
- webscout/Provider/TextPollinationsAI.py +26 -5
- webscout/Provider/__init__.py +2 -0
- webscout/Provider/freeaichat.py +221 -0
- webscout/Provider/yep.py +1 -1
- webscout/__init__.py +1 -0
- webscout/version.py +1 -1
- webscout/webscout_search.py +82 -2
- webscout/webscout_search_async.py +58 -1
- webscout/yep_search.py +297 -0
- {webscout-7.2.dist-info → webscout-7.3.dist-info}/METADATA +59 -20
- {webscout-7.2.dist-info → webscout-7.3.dist-info}/RECORD +29 -23
- {webscout-7.2.dist-info → webscout-7.3.dist-info}/WHEEL +1 -1
- {webscout-7.2.dist-info → webscout-7.3.dist-info}/LICENSE.md +0 -0
- {webscout-7.2.dist-info → webscout-7.3.dist-info}/entry_points.txt +0 -0
- {webscout-7.2.dist-info → webscout-7.3.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,221 @@
|
|
|
1
|
+
import requests
|
|
2
|
+
import json
|
|
3
|
+
import time
|
|
4
|
+
from typing import Any, Dict, Optional, Generator, Union
|
|
5
|
+
|
|
6
|
+
from webscout.AIutel import Optimizers
|
|
7
|
+
from webscout.AIutel import Conversation
|
|
8
|
+
from webscout.AIutel import AwesomePrompts, sanitize_stream
|
|
9
|
+
from webscout.AIbase import Provider, AsyncProvider
|
|
10
|
+
from webscout import exceptions
|
|
11
|
+
from webscout import LitAgent
|
|
12
|
+
from webscout.Litlogger import Logger, LogFormat
|
|
13
|
+
|
|
14
|
+
class FreeAIChat(Provider):
|
|
15
|
+
"""
|
|
16
|
+
A class to interact with the FreeAIChat API with logging and LitAgent user-agent.
|
|
17
|
+
"""
|
|
18
|
+
|
|
19
|
+
AVAILABLE_MODELS = [
|
|
20
|
+
"mistral-nemo",
|
|
21
|
+
"mistral-large",
|
|
22
|
+
"llama3.1-70b-fast",
|
|
23
|
+
"gemini-2.0-flash",
|
|
24
|
+
"gemini-1.5-pro",
|
|
25
|
+
"gemini-1.5-flash",
|
|
26
|
+
"gemini-2.0-pro-exp-02-05",
|
|
27
|
+
"deepseek-r1",
|
|
28
|
+
"deepseek-v3",
|
|
29
|
+
"Deepseek r1 14B",
|
|
30
|
+
"Deepseek r1 32B",
|
|
31
|
+
"o3-mini-high",
|
|
32
|
+
"o3-mini-medium",
|
|
33
|
+
"o3-mini-low",
|
|
34
|
+
"o3-mini",
|
|
35
|
+
"GPT-4o-mini",
|
|
36
|
+
"o1",
|
|
37
|
+
"o1-mini",
|
|
38
|
+
"GPT-4o"
|
|
39
|
+
]
|
|
40
|
+
|
|
41
|
+
def __init__(
|
|
42
|
+
self,
|
|
43
|
+
is_conversation: bool = True,
|
|
44
|
+
max_tokens: int = 2049,
|
|
45
|
+
timeout: int = 30,
|
|
46
|
+
intro: str = None,
|
|
47
|
+
filepath: str = None,
|
|
48
|
+
update_file: bool = True,
|
|
49
|
+
proxies: dict = {},
|
|
50
|
+
history_offset: int = 10250,
|
|
51
|
+
act: str = None,
|
|
52
|
+
model: str = "GPT-4o",
|
|
53
|
+
system_prompt: str = "You are a helpful AI assistant.",
|
|
54
|
+
logging: bool = False
|
|
55
|
+
):
|
|
56
|
+
"""Initializes the FreeAIChat API client with logging support."""
|
|
57
|
+
if model not in self.AVAILABLE_MODELS:
|
|
58
|
+
raise ValueError(f"Invalid model: {model}. Choose from: {self.AVAILABLE_MODELS}")
|
|
59
|
+
|
|
60
|
+
self.url = "https://freeaichatplayground.com/api/v1/chat/completions"
|
|
61
|
+
self.headers = {
|
|
62
|
+
'User-Agent': LitAgent().random(),
|
|
63
|
+
'Accept': '*/*',
|
|
64
|
+
'Content-Type': 'application/json',
|
|
65
|
+
'Origin': 'https://freeaichatplayground.com',
|
|
66
|
+
'Referer': 'https://freeaichatplayground.com/',
|
|
67
|
+
'Sec-Fetch-Mode': 'cors',
|
|
68
|
+
'Sec-Fetch-Site': 'same-origin'
|
|
69
|
+
}
|
|
70
|
+
self.session = requests.Session()
|
|
71
|
+
self.session.headers.update(self.headers)
|
|
72
|
+
self.session.proxies.update(proxies)
|
|
73
|
+
|
|
74
|
+
self.is_conversation = is_conversation
|
|
75
|
+
self.max_tokens_to_sample = max_tokens
|
|
76
|
+
self.timeout = timeout
|
|
77
|
+
self.last_response = {}
|
|
78
|
+
self.model = model
|
|
79
|
+
self.system_prompt = system_prompt
|
|
80
|
+
|
|
81
|
+
self.__available_optimizers = (
|
|
82
|
+
method
|
|
83
|
+
for method in dir(Optimizers)
|
|
84
|
+
if callable(getattr(Optimizers, method)) and not method.startswith("__")
|
|
85
|
+
)
|
|
86
|
+
Conversation.intro = (
|
|
87
|
+
AwesomePrompts().get_act(
|
|
88
|
+
act, raise_not_found=True, default=None, case_insensitive=True
|
|
89
|
+
)
|
|
90
|
+
if act
|
|
91
|
+
else intro or Conversation.intro
|
|
92
|
+
)
|
|
93
|
+
|
|
94
|
+
self.conversation = Conversation(
|
|
95
|
+
is_conversation, self.max_tokens_to_sample, filepath, update_file
|
|
96
|
+
)
|
|
97
|
+
self.conversation.history_offset = history_offset
|
|
98
|
+
|
|
99
|
+
self.logger = Logger(
|
|
100
|
+
name="FreeAIChat",
|
|
101
|
+
format=LogFormat.MODERN_EMOJI,
|
|
102
|
+
) if logging else None
|
|
103
|
+
|
|
104
|
+
if self.logger:
|
|
105
|
+
self.logger.info(f"FreeAIChat initialized successfully with model: {model}")
|
|
106
|
+
|
|
107
|
+
def ask(
|
|
108
|
+
self,
|
|
109
|
+
prompt: str,
|
|
110
|
+
stream: bool = False,
|
|
111
|
+
raw: bool = False,
|
|
112
|
+
optimizer: str = None,
|
|
113
|
+
conversationally: bool = False,
|
|
114
|
+
) -> Union[Dict[str, Any], Generator]:
|
|
115
|
+
conversation_prompt = self.conversation.gen_complete_prompt(prompt)
|
|
116
|
+
if optimizer:
|
|
117
|
+
if optimizer in self.__available_optimizers:
|
|
118
|
+
conversation_prompt = getattr(Optimizers, optimizer)(
|
|
119
|
+
conversation_prompt if conversationally else prompt
|
|
120
|
+
)
|
|
121
|
+
if self.logger:
|
|
122
|
+
self.logger.debug(f"Applied optimizer: {optimizer}")
|
|
123
|
+
else:
|
|
124
|
+
if self.logger:
|
|
125
|
+
self.logger.error(f"Invalid optimizer requested: {optimizer}")
|
|
126
|
+
raise Exception(f"Optimizer is not one of {self.__available_optimizers}")
|
|
127
|
+
|
|
128
|
+
messages = [
|
|
129
|
+
{
|
|
130
|
+
"role": "system",
|
|
131
|
+
"content": self.system_prompt
|
|
132
|
+
},
|
|
133
|
+
{
|
|
134
|
+
"role": "user",
|
|
135
|
+
"content": conversation_prompt
|
|
136
|
+
}
|
|
137
|
+
]
|
|
138
|
+
|
|
139
|
+
payload = {
|
|
140
|
+
"model": self.model,
|
|
141
|
+
"messages": messages
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
def for_stream():
|
|
145
|
+
if self.logger:
|
|
146
|
+
self.logger.debug("Sending streaming request to FreeAIChat API...")
|
|
147
|
+
try:
|
|
148
|
+
with requests.post(self.url, headers=self.headers, json=payload, stream=True, timeout=self.timeout) as response:
|
|
149
|
+
if response.status_code != 200:
|
|
150
|
+
if self.logger:
|
|
151
|
+
self.logger.error(f"Request failed with status code {response.status_code}")
|
|
152
|
+
raise exceptions.FailedToGenerateResponseError(
|
|
153
|
+
f"Request failed with status code {response.status_code}"
|
|
154
|
+
)
|
|
155
|
+
|
|
156
|
+
streaming_text = ""
|
|
157
|
+
for line in response.iter_lines(decode_unicode=True):
|
|
158
|
+
if line:
|
|
159
|
+
line = line.strip()
|
|
160
|
+
if line.startswith("data: "):
|
|
161
|
+
json_str = line[6:] # Remove "data: " prefix
|
|
162
|
+
if json_str == "[DONE]":
|
|
163
|
+
break
|
|
164
|
+
try:
|
|
165
|
+
json_data = json.loads(json_str)
|
|
166
|
+
if 'choices' in json_data:
|
|
167
|
+
choice = json_data['choices'][0]
|
|
168
|
+
if 'delta' in choice and 'content' in choice['delta']:
|
|
169
|
+
content = choice['delta']['content']
|
|
170
|
+
streaming_text += content
|
|
171
|
+
resp = dict(text=content)
|
|
172
|
+
yield resp if raw else resp
|
|
173
|
+
except json.JSONDecodeError:
|
|
174
|
+
if self.logger:
|
|
175
|
+
self.logger.error("JSON decode error in streaming data")
|
|
176
|
+
pass
|
|
177
|
+
|
|
178
|
+
self.conversation.update_chat_history(prompt, streaming_text)
|
|
179
|
+
if self.logger:
|
|
180
|
+
self.logger.info("Streaming response completed successfully")
|
|
181
|
+
|
|
182
|
+
except requests.RequestException as e:
|
|
183
|
+
if self.logger:
|
|
184
|
+
self.logger.error(f"Request failed: {e}")
|
|
185
|
+
raise exceptions.FailedToGenerateResponseError(f"Request failed: {e}")
|
|
186
|
+
|
|
187
|
+
def for_non_stream():
|
|
188
|
+
for _ in for_stream():
|
|
189
|
+
pass
|
|
190
|
+
return self.last_response
|
|
191
|
+
|
|
192
|
+
return for_stream() if stream else for_non_stream()
|
|
193
|
+
|
|
194
|
+
def chat(
|
|
195
|
+
self,
|
|
196
|
+
prompt: str,
|
|
197
|
+
stream: bool = False,
|
|
198
|
+
optimizer: str = None,
|
|
199
|
+
conversationally: bool = False,
|
|
200
|
+
) -> str:
|
|
201
|
+
def for_stream():
|
|
202
|
+
for response in self.ask(prompt, True, optimizer=optimizer, conversationally=conversationally):
|
|
203
|
+
yield self.get_message(response)
|
|
204
|
+
|
|
205
|
+
def for_non_stream():
|
|
206
|
+
return self.get_message(
|
|
207
|
+
self.ask(prompt, False, optimizer=optimizer, conversationally=conversationally)
|
|
208
|
+
)
|
|
209
|
+
|
|
210
|
+
return for_stream() if stream else for_non_stream()
|
|
211
|
+
|
|
212
|
+
def get_message(self, response: dict) -> str:
|
|
213
|
+
assert isinstance(response, dict), "Response should be of dict data-type only"
|
|
214
|
+
return response["text"]
|
|
215
|
+
|
|
216
|
+
if __name__ == "__main__":
|
|
217
|
+
from rich import print
|
|
218
|
+
ai = FreeAIChat(model="GPT-4o", logging=True)
|
|
219
|
+
response = ai.chat("Write a hello world program in Python", stream=True)
|
|
220
|
+
for chunk in response:
|
|
221
|
+
print(chunk, end="", flush=True)
|
webscout/Provider/yep.py
CHANGED
|
@@ -24,7 +24,7 @@ class YEPCHAT(Provider):
|
|
|
24
24
|
AVAILABLE_MODELS (list): List of available models for the provider.
|
|
25
25
|
"""
|
|
26
26
|
|
|
27
|
-
AVAILABLE_MODELS = ["DeepSeek-R1-Distill-Qwen-32B"]
|
|
27
|
+
AVAILABLE_MODELS = ["DeepSeek-R1-Distill-Qwen-32B", "Mixtral-8x7B-Instruct-v0.1"]
|
|
28
28
|
|
|
29
29
|
def __init__(
|
|
30
30
|
self,
|
webscout/__init__.py
CHANGED
webscout/version.py
CHANGED
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
__version__ = "7.
|
|
1
|
+
__version__ = "7.3"
|
|
2
2
|
__prog__ = "webscout"
|
webscout/webscout_search.py
CHANGED
|
@@ -1,6 +1,8 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
3
|
# import logging
|
|
4
|
+
import json
|
|
5
|
+
from urllib.parse import quote
|
|
4
6
|
import warnings
|
|
5
7
|
from concurrent.futures import ThreadPoolExecutor
|
|
6
8
|
from datetime import datetime, timezone
|
|
@@ -11,7 +13,7 @@ from random import choice, shuffle
|
|
|
11
13
|
from threading import Event
|
|
12
14
|
from time import sleep, time
|
|
13
15
|
from types import TracebackType
|
|
14
|
-
from typing import cast
|
|
16
|
+
from typing import Any, cast
|
|
15
17
|
|
|
16
18
|
import primp # type: ignore
|
|
17
19
|
|
|
@@ -1140,4 +1142,82 @@ class WEBS:
|
|
|
1140
1142
|
except Exception as e:
|
|
1141
1143
|
raise e
|
|
1142
1144
|
|
|
1143
|
-
return results
|
|
1145
|
+
return results
|
|
1146
|
+
|
|
1147
|
+
def weather(
|
|
1148
|
+
self,
|
|
1149
|
+
location: str,
|
|
1150
|
+
language: str = "en",
|
|
1151
|
+
) -> dict[str, Any]:
|
|
1152
|
+
"""Get weather information for a location from DuckDuckGo.
|
|
1153
|
+
|
|
1154
|
+
Args:
|
|
1155
|
+
location: Location to get weather for.
|
|
1156
|
+
language: Language code (e.g. 'en', 'es'). Defaults to "en".
|
|
1157
|
+
|
|
1158
|
+
Returns:
|
|
1159
|
+
Dictionary containing weather data with structure described in docstring.
|
|
1160
|
+
|
|
1161
|
+
Raises:
|
|
1162
|
+
WebscoutE: Base exception for webscout errors.
|
|
1163
|
+
RatelimitE: Inherits from WebscoutE, raised for exceeding API request rate limits.
|
|
1164
|
+
TimeoutE: Inherits from WebscoutE, raised for API request timeouts.
|
|
1165
|
+
"""
|
|
1166
|
+
assert location, "location is mandatory"
|
|
1167
|
+
lang = language.split('-')[0]
|
|
1168
|
+
url = f"https://duckduckgo.com/js/spice/forecast/{quote(location)}/{lang}"
|
|
1169
|
+
|
|
1170
|
+
resp = self._get_url("GET", url)
|
|
1171
|
+
resp_text = resp.decode('utf-8')
|
|
1172
|
+
|
|
1173
|
+
if "ddg_spice_forecast(" not in resp_text:
|
|
1174
|
+
raise WebscoutE(f"No weather data found for {location}")
|
|
1175
|
+
|
|
1176
|
+
json_text = resp_text[resp_text.find('(') + 1:resp_text.rfind(')')]
|
|
1177
|
+
try:
|
|
1178
|
+
result = json.loads(json_text)
|
|
1179
|
+
except Exception as e:
|
|
1180
|
+
raise WebscoutE(f"Error parsing weather JSON: {e}")
|
|
1181
|
+
|
|
1182
|
+
if not result or 'currentWeather' not in result or 'forecastDaily' not in result:
|
|
1183
|
+
raise WebscoutE(f"Invalid weather data format for {location}")
|
|
1184
|
+
|
|
1185
|
+
formatted_data = {
|
|
1186
|
+
"location": result["currentWeather"]["metadata"].get("ddg-location", "Unknown"),
|
|
1187
|
+
"current": {
|
|
1188
|
+
"condition": result["currentWeather"].get("conditionCode"),
|
|
1189
|
+
"temperature_c": result["currentWeather"].get("temperature"),
|
|
1190
|
+
"feels_like_c": result["currentWeather"].get("temperatureApparent"),
|
|
1191
|
+
"humidity": result["currentWeather"].get("humidity"),
|
|
1192
|
+
"wind_speed_ms": result["currentWeather"].get("windSpeed"),
|
|
1193
|
+
"wind_direction": result["currentWeather"].get("windDirection"),
|
|
1194
|
+
"visibility_m": result["currentWeather"].get("visibility"),
|
|
1195
|
+
},
|
|
1196
|
+
"daily_forecast": [],
|
|
1197
|
+
"hourly_forecast": []
|
|
1198
|
+
}
|
|
1199
|
+
|
|
1200
|
+
for day in result["forecastDaily"]["days"]:
|
|
1201
|
+
formatted_data["daily_forecast"].append({
|
|
1202
|
+
"date": datetime.fromisoformat(day["forecastStart"].replace("Z", "+00:00")).strftime("%Y-%m-%d"),
|
|
1203
|
+
"condition": day["daytimeForecast"].get("conditionCode"),
|
|
1204
|
+
"max_temp_c": day["temperatureMax"],
|
|
1205
|
+
"min_temp_c": day["temperatureMin"],
|
|
1206
|
+
"sunrise": datetime.fromisoformat(day["sunrise"].replace("Z", "+00:00")).strftime("%H:%M"),
|
|
1207
|
+
"sunset": datetime.fromisoformat(day["sunset"].replace("Z", "+00:00")).strftime("%H:%M"),
|
|
1208
|
+
})
|
|
1209
|
+
|
|
1210
|
+
if 'forecastHourly' in result and 'hours' in result['forecastHourly']:
|
|
1211
|
+
for hour in result['forecastHourly']['hours']:
|
|
1212
|
+
formatted_data["hourly_forecast"].append({
|
|
1213
|
+
"time": datetime.fromisoformat(hour["forecastStart"].replace("Z", "+00:00")).strftime("%H:%M"),
|
|
1214
|
+
"condition": hour.get("conditionCode"),
|
|
1215
|
+
"temperature_c": hour.get("temperature"),
|
|
1216
|
+
"feels_like_c": hour.get("temperatureApparent"),
|
|
1217
|
+
"humidity": hour.get("humidity"),
|
|
1218
|
+
"wind_speed_ms": hour.get("windSpeed"),
|
|
1219
|
+
"wind_direction": hour.get("windDirection"),
|
|
1220
|
+
"visibility_m": hour.get("visibility"),
|
|
1221
|
+
})
|
|
1222
|
+
|
|
1223
|
+
return formatted_data
|
|
@@ -633,4 +633,61 @@ class AsyncWEBS:
|
|
|
633
633
|
from_,
|
|
634
634
|
to,
|
|
635
635
|
)
|
|
636
|
-
return result
|
|
636
|
+
return result
|
|
637
|
+
|
|
638
|
+
async def aweather(
|
|
639
|
+
self,
|
|
640
|
+
location: str,
|
|
641
|
+
language: str = "en",
|
|
642
|
+
) -> dict[str, Any]:
|
|
643
|
+
"""Async version of weather information retrieval from DuckDuckGo.
|
|
644
|
+
|
|
645
|
+
Args:
|
|
646
|
+
location: Location to get weather for.
|
|
647
|
+
language: Language code (e.g. 'en', 'es'). Defaults to "en".
|
|
648
|
+
|
|
649
|
+
Returns:
|
|
650
|
+
Dictionary containing weather data with the following structure:
|
|
651
|
+
{
|
|
652
|
+
"location": str,
|
|
653
|
+
"current": {
|
|
654
|
+
"condition": str,
|
|
655
|
+
"temperature_c": float,
|
|
656
|
+
"feels_like_c": float,
|
|
657
|
+
"humidity": float,
|
|
658
|
+
"wind_speed_ms": float,
|
|
659
|
+
"wind_direction": float,
|
|
660
|
+
"visibility_m": float
|
|
661
|
+
},
|
|
662
|
+
"daily_forecast": List[{
|
|
663
|
+
"date": str,
|
|
664
|
+
"condition": str,
|
|
665
|
+
"max_temp_c": float,
|
|
666
|
+
"min_temp_c": float,
|
|
667
|
+
"sunrise": str,
|
|
668
|
+
"sunset": str
|
|
669
|
+
}],
|
|
670
|
+
"hourly_forecast": List[{
|
|
671
|
+
"time": str,
|
|
672
|
+
"condition": str,
|
|
673
|
+
"temperature_c": float,
|
|
674
|
+
"feels_like_c": float,
|
|
675
|
+
"humidity": float,
|
|
676
|
+
"wind_speed_ms": float,
|
|
677
|
+
"wind_direction": float,
|
|
678
|
+
"visibility_m": float
|
|
679
|
+
}]
|
|
680
|
+
}
|
|
681
|
+
|
|
682
|
+
Raises:
|
|
683
|
+
WebscoutE: Base exception for webscout errors.
|
|
684
|
+
RatelimitE: Inherits from WebscoutE, raised for exceeding API request rate limits.
|
|
685
|
+
TimeoutE: Inherits from WebscoutE, raised for API request timeouts.
|
|
686
|
+
"""
|
|
687
|
+
result = await self._loop.run_in_executor(
|
|
688
|
+
self._executor,
|
|
689
|
+
super().weather,
|
|
690
|
+
location,
|
|
691
|
+
language,
|
|
692
|
+
)
|
|
693
|
+
return result
|