webscout 1.4.0__tar.gz → 1.4.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of webscout might be problematic. Click here for more details.
- {webscout-1.4.0 → webscout-1.4.1}/PKG-INFO +225 -25
- {webscout-1.4.0 → webscout-1.4.1}/README.md +224 -24
- {webscout-1.4.0 → webscout-1.4.1}/setup.py +1 -1
- {webscout-1.4.0 → webscout-1.4.1}/webscout/__init__.py +1 -1
- {webscout-1.4.0 → webscout-1.4.1}/webscout/async_providers.py +32 -32
- webscout-1.4.1/webscout/tempid.py +157 -0
- webscout-1.4.1/webscout/version.py +2 -0
- {webscout-1.4.0 → webscout-1.4.1}/webscout.egg-info/PKG-INFO +225 -25
- {webscout-1.4.0 → webscout-1.4.1}/webscout.egg-info/SOURCES.txt +1 -0
- webscout-1.4.0/webscout/version.py +0 -2
- {webscout-1.4.0 → webscout-1.4.1}/DeepWEBS/__init__.py +0 -0
- {webscout-1.4.0 → webscout-1.4.1}/DeepWEBS/documents/__init__.py +0 -0
- {webscout-1.4.0 → webscout-1.4.1}/DeepWEBS/documents/query_results_extractor.py +0 -0
- {webscout-1.4.0 → webscout-1.4.1}/DeepWEBS/documents/webpage_content_extractor.py +0 -0
- {webscout-1.4.0 → webscout-1.4.1}/DeepWEBS/networks/__init__.py +0 -0
- {webscout-1.4.0 → webscout-1.4.1}/DeepWEBS/networks/filepath_converter.py +0 -0
- {webscout-1.4.0 → webscout-1.4.1}/DeepWEBS/networks/google_searcher.py +0 -0
- {webscout-1.4.0 → webscout-1.4.1}/DeepWEBS/networks/network_configs.py +0 -0
- {webscout-1.4.0 → webscout-1.4.1}/DeepWEBS/networks/webpage_fetcher.py +0 -0
- {webscout-1.4.0 → webscout-1.4.1}/DeepWEBS/utilsdw/__init__.py +0 -0
- {webscout-1.4.0 → webscout-1.4.1}/DeepWEBS/utilsdw/enver.py +0 -0
- {webscout-1.4.0 → webscout-1.4.1}/DeepWEBS/utilsdw/logger.py +0 -0
- {webscout-1.4.0 → webscout-1.4.1}/LICENSE.md +0 -0
- {webscout-1.4.0 → webscout-1.4.1}/setup.cfg +0 -0
- {webscout-1.4.0 → webscout-1.4.1}/webscout/AI.py +0 -0
- {webscout-1.4.0 → webscout-1.4.1}/webscout/AIbase.py +0 -0
- {webscout-1.4.0 → webscout-1.4.1}/webscout/AIutel.py +0 -0
- {webscout-1.4.0 → webscout-1.4.1}/webscout/DWEBS.py +0 -0
- {webscout-1.4.0 → webscout-1.4.1}/webscout/LLM.py +0 -0
- {webscout-1.4.0 → webscout-1.4.1}/webscout/__main__.py +0 -0
- {webscout-1.4.0 → webscout-1.4.1}/webscout/cli.py +0 -0
- {webscout-1.4.0 → webscout-1.4.1}/webscout/exceptions.py +0 -0
- {webscout-1.4.0 → webscout-1.4.1}/webscout/g4f.py +0 -0
- {webscout-1.4.0 → webscout-1.4.1}/webscout/models.py +0 -0
- {webscout-1.4.0 → webscout-1.4.1}/webscout/transcriber.py +0 -0
- {webscout-1.4.0 → webscout-1.4.1}/webscout/utils.py +0 -0
- {webscout-1.4.0 → webscout-1.4.1}/webscout/voice.py +0 -0
- {webscout-1.4.0 → webscout-1.4.1}/webscout/webai.py +0 -0
- {webscout-1.4.0 → webscout-1.4.1}/webscout/webscout_search.py +0 -0
- {webscout-1.4.0 → webscout-1.4.1}/webscout/webscout_search_async.py +0 -0
- {webscout-1.4.0 → webscout-1.4.1}/webscout.egg-info/dependency_links.txt +0 -0
- {webscout-1.4.0 → webscout-1.4.1}/webscout.egg-info/entry_points.txt +0 -0
- {webscout-1.4.0 → webscout-1.4.1}/webscout.egg-info/requires.txt +0 -0
- {webscout-1.4.0 → webscout-1.4.1}/webscout.egg-info/top_level.txt +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: webscout
|
|
3
|
-
Version: 1.4.
|
|
3
|
+
Version: 1.4.1
|
|
4
4
|
Summary: Search for anything using the Google, DuckDuckGo.com, yep.com, phind.com, you.com, etc Also containes AI models, can transcribe yt videos, have TTS support and now has webai(terminal gpt and open interpeter) support
|
|
5
5
|
Author: OEvortex
|
|
6
6
|
Author-email: helpingai5@gmail.com
|
|
@@ -59,7 +59,7 @@ Requires-Dist: pytest>=7.4.2; extra == "dev"
|
|
|
59
59
|
<a href="#"><img alt="Python version" src="https://img.shields.io/pypi/pyversions/webscout"/></a>
|
|
60
60
|
<a href="https://pepy.tech/project/webscout"><img alt="Downloads" src="https://static.pepy.tech/badge/webscout"></a>
|
|
61
61
|
|
|
62
|
-
Search for anything using the Google, DuckDuckGo
|
|
62
|
+
Search for anything using the Google, DuckDuckGo, phind.com. Also containes AI models, can transcribe yt videos, temporary email and phone number generation, have TTS support and webai(terminal gpt and open interpeter)
|
|
63
63
|
|
|
64
64
|
|
|
65
65
|
## Table of Contents
|
|
@@ -69,6 +69,9 @@ Search for anything using the Google, DuckDuckGo.com, yep.com, phind.com, you.co
|
|
|
69
69
|
- [CLI version](#cli-version)
|
|
70
70
|
- [CLI to use LLM](#cli-to-use-llm)
|
|
71
71
|
- [Regions](#regions)
|
|
72
|
+
- [Tempmail and Temp number](#tempmail-and-temp-number)
|
|
73
|
+
- [Temp number](#temp-number)
|
|
74
|
+
- [Tempmail](#tempmail)
|
|
72
75
|
- [Transcriber](#transcriber)
|
|
73
76
|
- [DeepWEBS: Advanced Web Searches](#deepwebs-advanced-web-searches)
|
|
74
77
|
- [Activating DeepWEBS](#activating-deepwebs)
|
|
@@ -101,8 +104,9 @@ Search for anything using the Google, DuckDuckGo.com, yep.com, phind.com, you.co
|
|
|
101
104
|
- [9. `KOBOLDIA` -](#9-koboldia--)
|
|
102
105
|
- [10. `Reka` - chat with reka](#10-reka---chat-with-reka)
|
|
103
106
|
- [11. `Cohere` - chat with cohere](#11-cohere---chat-with-cohere)
|
|
104
|
-
- [`LLM`
|
|
107
|
+
- [`LLM`](#llm)
|
|
105
108
|
- [`LLM` with internet](#llm-with-internet)
|
|
109
|
+
- [LLM with deepwebs](#llm-with-deepwebs)
|
|
106
110
|
- [`Webai` - terminal gpt and a open interpeter](#webai---terminal-gpt-and-a-open-interpeter)
|
|
107
111
|
|
|
108
112
|
## Install
|
|
@@ -212,7 +216,91 @@ ___
|
|
|
212
216
|
|
|
213
217
|
[Go To TOP](#TOP)
|
|
214
218
|
|
|
219
|
+
## Tempmail and Temp number
|
|
215
220
|
|
|
221
|
+
### Temp number
|
|
222
|
+
```python
|
|
223
|
+
from rich.console import Console
|
|
224
|
+
from webscout import tempid
|
|
225
|
+
|
|
226
|
+
def main():
|
|
227
|
+
console = Console()
|
|
228
|
+
phone = tempid.TemporaryPhoneNumber()
|
|
229
|
+
|
|
230
|
+
try:
|
|
231
|
+
# Get a temporary phone number for a specific country (or random)
|
|
232
|
+
number = phone.get_number(country="Finland")
|
|
233
|
+
console.print(f"Your temporary phone number: [bold cyan]{number}[/bold cyan]")
|
|
234
|
+
|
|
235
|
+
# Pause execution briefly (replace with your actual logic)
|
|
236
|
+
# import time module
|
|
237
|
+
import time
|
|
238
|
+
time.sleep(30) # Adjust the waiting time as needed
|
|
239
|
+
|
|
240
|
+
# Retrieve and print messages
|
|
241
|
+
messages = phone.get_messages(number)
|
|
242
|
+
if messages:
|
|
243
|
+
# Access individual messages using indexing:
|
|
244
|
+
console.print(f"[bold green]{messages[0].frm}:[/] {messages[0].content}")
|
|
245
|
+
# (Add more lines if you expect multiple messages)
|
|
246
|
+
else:
|
|
247
|
+
console.print("No messages received.")
|
|
248
|
+
|
|
249
|
+
except Exception as e:
|
|
250
|
+
console.print(f"[bold red]An error occurred: {e}")
|
|
251
|
+
|
|
252
|
+
if __name__ == "__main__":
|
|
253
|
+
main()
|
|
254
|
+
|
|
255
|
+
```
|
|
256
|
+
### Tempmail
|
|
257
|
+
```python
|
|
258
|
+
import asyncio
|
|
259
|
+
from rich.console import Console
|
|
260
|
+
from rich.table import Table
|
|
261
|
+
from rich.text import Text
|
|
262
|
+
from webscout import tempid
|
|
263
|
+
|
|
264
|
+
async def main() -> None:
|
|
265
|
+
console = Console()
|
|
266
|
+
client = tempid.Client()
|
|
267
|
+
|
|
268
|
+
try:
|
|
269
|
+
domains = await client.get_domains()
|
|
270
|
+
if not domains:
|
|
271
|
+
console.print("[bold red]No domains available. Please try again later.")
|
|
272
|
+
return
|
|
273
|
+
|
|
274
|
+
email = await client.create_email(domain=domains[0].name)
|
|
275
|
+
console.print(f"Your temporary email: [bold cyan]{email.email}[/bold cyan]")
|
|
276
|
+
console.print(f"Token for accessing the email: [bold cyan]{email.token}[/bold cyan]")
|
|
277
|
+
|
|
278
|
+
while True:
|
|
279
|
+
messages = await client.get_messages(email.email)
|
|
280
|
+
if messages is not None:
|
|
281
|
+
break
|
|
282
|
+
|
|
283
|
+
if messages:
|
|
284
|
+
table = Table(show_header=True, header_style="bold magenta")
|
|
285
|
+
table.add_column("From", style="bold cyan")
|
|
286
|
+
table.add_column("Subject", style="bold yellow")
|
|
287
|
+
table.add_column("Body", style="bold green")
|
|
288
|
+
for message in messages:
|
|
289
|
+
body_preview = Text(message.body_text if message.body_text else "No body")
|
|
290
|
+
table.add_row(message.email_from or "Unknown", message.subject or "No Subject", body_preview)
|
|
291
|
+
console.print(table)
|
|
292
|
+
else:
|
|
293
|
+
console.print("No messages found.")
|
|
294
|
+
|
|
295
|
+
except Exception as e:
|
|
296
|
+
console.print(f"[bold red]An error occurred: {e}")
|
|
297
|
+
|
|
298
|
+
finally:
|
|
299
|
+
await client.close()
|
|
300
|
+
|
|
301
|
+
if __name__ == '__main__':
|
|
302
|
+
asyncio.run(main())
|
|
303
|
+
```
|
|
216
304
|
## Transcriber
|
|
217
305
|
The transcriber function in webscout is a handy tool that transcribes YouTube videos. Here's an example code demonstrating its usage:
|
|
218
306
|
```python
|
|
@@ -484,19 +572,47 @@ with WEBS() as WEBS:
|
|
|
484
572
|
|
|
485
573
|
```python
|
|
486
574
|
from webscout import WEBS
|
|
575
|
+
import datetime
|
|
576
|
+
|
|
577
|
+
def fetch_news(keywords, timelimit):
|
|
578
|
+
news_list = []
|
|
579
|
+
with WEBS() as webs_instance:
|
|
580
|
+
WEBS_news_gen = webs_instance.news(
|
|
581
|
+
keywords,
|
|
582
|
+
region="wt-wt",
|
|
583
|
+
safesearch="off",
|
|
584
|
+
timelimit=timelimit,
|
|
585
|
+
max_results=20
|
|
586
|
+
)
|
|
587
|
+
for r in WEBS_news_gen:
|
|
588
|
+
# Convert the date to a human-readable format using datetime
|
|
589
|
+
r['date'] = datetime.datetime.fromisoformat(r['date']).strftime('%B %d, %Y')
|
|
590
|
+
news_list.append(r)
|
|
591
|
+
return news_list
|
|
592
|
+
|
|
593
|
+
def _format_headlines(news_list, max_headlines: int = 100):
|
|
594
|
+
headlines = []
|
|
595
|
+
for idx, news_item in enumerate(news_list):
|
|
596
|
+
if idx >= max_headlines:
|
|
597
|
+
break
|
|
598
|
+
new_headline = f"{idx + 1}. {news_item['title'].strip()} "
|
|
599
|
+
new_headline += f"(URL: {news_item['url'].strip()}) "
|
|
600
|
+
new_headline += f"{news_item['body'].strip()}"
|
|
601
|
+
new_headline += "\n"
|
|
602
|
+
headlines.append(new_headline)
|
|
603
|
+
|
|
604
|
+
headlines = "\n".join(headlines)
|
|
605
|
+
return headlines
|
|
606
|
+
|
|
607
|
+
# Example usage
|
|
608
|
+
keywords = 'latest AI news'
|
|
609
|
+
timelimit = 'd'
|
|
610
|
+
news_list = fetch_news(keywords, timelimit)
|
|
611
|
+
|
|
612
|
+
# Format and print the headlines
|
|
613
|
+
formatted_headlines = _format_headlines(news_list)
|
|
614
|
+
print(formatted_headlines)
|
|
487
615
|
|
|
488
|
-
# News search for the keyword 'holiday' using DuckDuckGo.com and yep.com
|
|
489
|
-
with WEBS() as WEBS:
|
|
490
|
-
keywords = 'holiday'
|
|
491
|
-
WEBS_news_gen = WEBS.news(
|
|
492
|
-
keywords,
|
|
493
|
-
region="wt-wt",
|
|
494
|
-
safesearch="off",
|
|
495
|
-
timelimit="m",
|
|
496
|
-
max_results=20
|
|
497
|
-
)
|
|
498
|
-
for r in WEBS_news_gen:
|
|
499
|
-
print(r)
|
|
500
616
|
```
|
|
501
617
|
|
|
502
618
|
### 6. `maps()` - map search by DuckDuckGo.com and
|
|
@@ -730,7 +846,7 @@ response_str = a.chat(prompt)
|
|
|
730
846
|
print(response_str)
|
|
731
847
|
```
|
|
732
848
|
|
|
733
|
-
### `LLM`
|
|
849
|
+
### `LLM`
|
|
734
850
|
```python
|
|
735
851
|
from webscout.LLM import LLM
|
|
736
852
|
|
|
@@ -761,23 +877,19 @@ while True:
|
|
|
761
877
|
from __future__ import annotations
|
|
762
878
|
from typing import List, Optional
|
|
763
879
|
|
|
764
|
-
from webscout import LLM
|
|
880
|
+
from webscout.LLM import LLM
|
|
765
881
|
from webscout import WEBS
|
|
766
882
|
import warnings
|
|
767
883
|
|
|
768
884
|
system_message: str = (
|
|
769
|
-
"As AI,
|
|
770
|
-
"
|
|
771
|
-
"
|
|
772
|
-
"seamless interactions with users. Embrace a responsive demeanor, harnessing available online resources to address "
|
|
773
|
-
"queries, share pertinent content, and facilitate meaningful exchanges. By doing so, you create value through "
|
|
774
|
-
"connection and engagement, ultimately enhancing overall user satisfaction and experience. Additionally, "
|
|
775
|
-
"continue upholding the principles of respect, impartiality, and intellectual integrity throughout all interactions."
|
|
885
|
+
"As an AI assistant, I have been designed with advanced capabilities, including real-time access to online resources. This enables me to enrich our conversations and provide you with informed and accurate responses, drawing from a vast array of information. With each interaction, my goal is to create a seamless and meaningful connection, offering insights and sharing relevant content."
|
|
886
|
+
"My directives emphasize the importance of respect, impartiality, and intellectual integrity. I am here to provide unbiased responses, ensuring an ethical and respectful exchange. I will respect your privacy and refrain from sharing any personal information that may be obtained during our conversations or through web searches, only utilizing web search functionality when necessary to provide the most accurate and up-to-date information."
|
|
887
|
+
"Together, let's explore a diverse range of topics, creating an enjoyable and informative experience, all while maintaining the highest standards of privacy and respect"
|
|
776
888
|
)
|
|
777
889
|
|
|
778
890
|
# Ignore the specific UserWarning
|
|
779
891
|
warnings.filterwarnings("ignore", category=UserWarning, module="curl_cffi.aio", lineno=205)
|
|
780
|
-
LLM = LLM(model="
|
|
892
|
+
LLM = LLM(model="mistralai/Mixtral-8x22B-Instruct-v0.1", system_message=system_message)
|
|
781
893
|
|
|
782
894
|
|
|
783
895
|
def chat(
|
|
@@ -833,6 +945,94 @@ if __name__ == "__main__":
|
|
|
833
945
|
else:
|
|
834
946
|
print("No response")
|
|
835
947
|
```
|
|
948
|
+
### LLM with deepwebs
|
|
949
|
+
```python
|
|
950
|
+
from __future__ import annotations
|
|
951
|
+
from typing import List, Optional
|
|
952
|
+
from webscout.LLM import LLM
|
|
953
|
+
from webscout import DeepWEBS
|
|
954
|
+
import warnings
|
|
955
|
+
|
|
956
|
+
system_message: str = (
|
|
957
|
+
"As an AI assistant, I have been designed with advanced capabilities, including real-time access to online resources. This enables me to enrich our conversations and provide you with informed and accurate responses, drawing from a vast array of information. With each interaction, my goal is to create a seamless and meaningful connection, offering insights and sharing relevant content."
|
|
958
|
+
"My directives emphasize the importance of respect, impartiality, and intellectual integrity. I am here to provide unbiased responses, ensuring an ethical and respectful exchange. I will respect your privacy and refrain from sharing any personal information that may be obtained during our conversations or through web searches, only utilizing web search functionality when necessary to provide the most accurate and up-to-date information."
|
|
959
|
+
"Together, let's explore a diverse range of topics, creating an enjoyable and informative experience, all while maintaining the highest standards of privacy and respect"
|
|
960
|
+
)
|
|
961
|
+
|
|
962
|
+
# Ignore the specific UserWarning
|
|
963
|
+
warnings.filterwarnings("ignore", category=UserWarning, module="curl_cffi.aio", lineno=205)
|
|
964
|
+
|
|
965
|
+
LLM = LLM(model="mistralai/Mixtral-8x22B-Instruct-v0.1", system_message=system_message)
|
|
966
|
+
|
|
967
|
+
def perform_web_search(query):
|
|
968
|
+
# Initialize the DeepWEBS class
|
|
969
|
+
D = DeepWEBS()
|
|
970
|
+
|
|
971
|
+
# Set up the search parameters
|
|
972
|
+
search_params = D.DeepSearch(
|
|
973
|
+
queries=[query], # Query to search
|
|
974
|
+
result_num=10, # Number of search results
|
|
975
|
+
safe=True, # Enable SafeSearch
|
|
976
|
+
types=["web"], # Search type: web
|
|
977
|
+
extract_webpage=True, # True for extracting webpages
|
|
978
|
+
overwrite_query_html=True,
|
|
979
|
+
overwrite_webpage_html=True,
|
|
980
|
+
)
|
|
981
|
+
|
|
982
|
+
# Execute the search and retrieve results
|
|
983
|
+
results = D.queries_to_search_results(search_params)
|
|
984
|
+
return results
|
|
985
|
+
|
|
986
|
+
def chat(user_input: str, result_num: int = 10) -> Optional[str]:
|
|
987
|
+
"""
|
|
988
|
+
Chat function to perform a web search based on the user input and generate a response using the LLM model.
|
|
989
|
+
|
|
990
|
+
Parameters
|
|
991
|
+
----------
|
|
992
|
+
user_input : str
|
|
993
|
+
The user input to be used for the web search
|
|
994
|
+
max_results : int, optional
|
|
995
|
+
The maximum number of search results to include in the response, by default 10
|
|
996
|
+
|
|
997
|
+
Returns
|
|
998
|
+
-------
|
|
999
|
+
Optional[str]
|
|
1000
|
+
The response generated by the LLM model, or None if there is no response
|
|
1001
|
+
"""
|
|
1002
|
+
# Perform a web search based on the user input
|
|
1003
|
+
search_results = perform_web_search(user_input)
|
|
1004
|
+
|
|
1005
|
+
# Extract URLs from search results
|
|
1006
|
+
url_results = []
|
|
1007
|
+
for result in search_results[0]['query_results']:
|
|
1008
|
+
url_results.append(f"{result['title']} ({result['site']}): {result['url']}")
|
|
1009
|
+
|
|
1010
|
+
# Format search results
|
|
1011
|
+
formatted_results = "\n".join(url_results)
|
|
1012
|
+
|
|
1013
|
+
# Define the messages to be sent, including the user input, search results, and system message
|
|
1014
|
+
messages = [
|
|
1015
|
+
{"role": "user", "content": f"User question is:\n{user_input}\nwebsearch results are:\n{formatted_results}"},
|
|
1016
|
+
]
|
|
1017
|
+
|
|
1018
|
+
# Use the chat method to get the response
|
|
1019
|
+
response = LLM.chat(messages)
|
|
1020
|
+
return response
|
|
1021
|
+
|
|
1022
|
+
if __name__ == "__main__":
|
|
1023
|
+
while True:
|
|
1024
|
+
# Get the user input
|
|
1025
|
+
user_input = input("User: ")
|
|
1026
|
+
|
|
1027
|
+
# Perform a web search based on the user input
|
|
1028
|
+
response = chat(user_input)
|
|
1029
|
+
|
|
1030
|
+
# Print the response
|
|
1031
|
+
if response:
|
|
1032
|
+
print("AI:", response)
|
|
1033
|
+
else:
|
|
1034
|
+
print("No response")
|
|
1035
|
+
```
|
|
836
1036
|
## `Webai` - terminal gpt and a open interpeter
|
|
837
1037
|
|
|
838
1038
|
```python
|
|
@@ -4,7 +4,7 @@
|
|
|
4
4
|
<a href="#"><img alt="Python version" src="https://img.shields.io/pypi/pyversions/webscout"/></a>
|
|
5
5
|
<a href="https://pepy.tech/project/webscout"><img alt="Downloads" src="https://static.pepy.tech/badge/webscout"></a>
|
|
6
6
|
|
|
7
|
-
Search for anything using the Google, DuckDuckGo
|
|
7
|
+
Search for anything using the Google, DuckDuckGo, phind.com. Also containes AI models, can transcribe yt videos, temporary email and phone number generation, have TTS support and webai(terminal gpt and open interpeter)
|
|
8
8
|
|
|
9
9
|
|
|
10
10
|
## Table of Contents
|
|
@@ -14,6 +14,9 @@ Search for anything using the Google, DuckDuckGo.com, yep.com, phind.com, you.co
|
|
|
14
14
|
- [CLI version](#cli-version)
|
|
15
15
|
- [CLI to use LLM](#cli-to-use-llm)
|
|
16
16
|
- [Regions](#regions)
|
|
17
|
+
- [Tempmail and Temp number](#tempmail-and-temp-number)
|
|
18
|
+
- [Temp number](#temp-number)
|
|
19
|
+
- [Tempmail](#tempmail)
|
|
17
20
|
- [Transcriber](#transcriber)
|
|
18
21
|
- [DeepWEBS: Advanced Web Searches](#deepwebs-advanced-web-searches)
|
|
19
22
|
- [Activating DeepWEBS](#activating-deepwebs)
|
|
@@ -46,8 +49,9 @@ Search for anything using the Google, DuckDuckGo.com, yep.com, phind.com, you.co
|
|
|
46
49
|
- [9. `KOBOLDIA` -](#9-koboldia--)
|
|
47
50
|
- [10. `Reka` - chat with reka](#10-reka---chat-with-reka)
|
|
48
51
|
- [11. `Cohere` - chat with cohere](#11-cohere---chat-with-cohere)
|
|
49
|
-
- [`LLM`
|
|
52
|
+
- [`LLM`](#llm)
|
|
50
53
|
- [`LLM` with internet](#llm-with-internet)
|
|
54
|
+
- [LLM with deepwebs](#llm-with-deepwebs)
|
|
51
55
|
- [`Webai` - terminal gpt and a open interpeter](#webai---terminal-gpt-and-a-open-interpeter)
|
|
52
56
|
|
|
53
57
|
## Install
|
|
@@ -157,7 +161,91 @@ ___
|
|
|
157
161
|
|
|
158
162
|
[Go To TOP](#TOP)
|
|
159
163
|
|
|
164
|
+
## Tempmail and Temp number
|
|
160
165
|
|
|
166
|
+
### Temp number
|
|
167
|
+
```python
|
|
168
|
+
from rich.console import Console
|
|
169
|
+
from webscout import tempid
|
|
170
|
+
|
|
171
|
+
def main():
|
|
172
|
+
console = Console()
|
|
173
|
+
phone = tempid.TemporaryPhoneNumber()
|
|
174
|
+
|
|
175
|
+
try:
|
|
176
|
+
# Get a temporary phone number for a specific country (or random)
|
|
177
|
+
number = phone.get_number(country="Finland")
|
|
178
|
+
console.print(f"Your temporary phone number: [bold cyan]{number}[/bold cyan]")
|
|
179
|
+
|
|
180
|
+
# Pause execution briefly (replace with your actual logic)
|
|
181
|
+
# import time module
|
|
182
|
+
import time
|
|
183
|
+
time.sleep(30) # Adjust the waiting time as needed
|
|
184
|
+
|
|
185
|
+
# Retrieve and print messages
|
|
186
|
+
messages = phone.get_messages(number)
|
|
187
|
+
if messages:
|
|
188
|
+
# Access individual messages using indexing:
|
|
189
|
+
console.print(f"[bold green]{messages[0].frm}:[/] {messages[0].content}")
|
|
190
|
+
# (Add more lines if you expect multiple messages)
|
|
191
|
+
else:
|
|
192
|
+
console.print("No messages received.")
|
|
193
|
+
|
|
194
|
+
except Exception as e:
|
|
195
|
+
console.print(f"[bold red]An error occurred: {e}")
|
|
196
|
+
|
|
197
|
+
if __name__ == "__main__":
|
|
198
|
+
main()
|
|
199
|
+
|
|
200
|
+
```
|
|
201
|
+
### Tempmail
|
|
202
|
+
```python
|
|
203
|
+
import asyncio
|
|
204
|
+
from rich.console import Console
|
|
205
|
+
from rich.table import Table
|
|
206
|
+
from rich.text import Text
|
|
207
|
+
from webscout import tempid
|
|
208
|
+
|
|
209
|
+
async def main() -> None:
|
|
210
|
+
console = Console()
|
|
211
|
+
client = tempid.Client()
|
|
212
|
+
|
|
213
|
+
try:
|
|
214
|
+
domains = await client.get_domains()
|
|
215
|
+
if not domains:
|
|
216
|
+
console.print("[bold red]No domains available. Please try again later.")
|
|
217
|
+
return
|
|
218
|
+
|
|
219
|
+
email = await client.create_email(domain=domains[0].name)
|
|
220
|
+
console.print(f"Your temporary email: [bold cyan]{email.email}[/bold cyan]")
|
|
221
|
+
console.print(f"Token for accessing the email: [bold cyan]{email.token}[/bold cyan]")
|
|
222
|
+
|
|
223
|
+
while True:
|
|
224
|
+
messages = await client.get_messages(email.email)
|
|
225
|
+
if messages is not None:
|
|
226
|
+
break
|
|
227
|
+
|
|
228
|
+
if messages:
|
|
229
|
+
table = Table(show_header=True, header_style="bold magenta")
|
|
230
|
+
table.add_column("From", style="bold cyan")
|
|
231
|
+
table.add_column("Subject", style="bold yellow")
|
|
232
|
+
table.add_column("Body", style="bold green")
|
|
233
|
+
for message in messages:
|
|
234
|
+
body_preview = Text(message.body_text if message.body_text else "No body")
|
|
235
|
+
table.add_row(message.email_from or "Unknown", message.subject or "No Subject", body_preview)
|
|
236
|
+
console.print(table)
|
|
237
|
+
else:
|
|
238
|
+
console.print("No messages found.")
|
|
239
|
+
|
|
240
|
+
except Exception as e:
|
|
241
|
+
console.print(f"[bold red]An error occurred: {e}")
|
|
242
|
+
|
|
243
|
+
finally:
|
|
244
|
+
await client.close()
|
|
245
|
+
|
|
246
|
+
if __name__ == '__main__':
|
|
247
|
+
asyncio.run(main())
|
|
248
|
+
```
|
|
161
249
|
## Transcriber
|
|
162
250
|
The transcriber function in webscout is a handy tool that transcribes YouTube videos. Here's an example code demonstrating its usage:
|
|
163
251
|
```python
|
|
@@ -429,19 +517,47 @@ with WEBS() as WEBS:
|
|
|
429
517
|
|
|
430
518
|
```python
|
|
431
519
|
from webscout import WEBS
|
|
520
|
+
import datetime
|
|
521
|
+
|
|
522
|
+
def fetch_news(keywords, timelimit):
|
|
523
|
+
news_list = []
|
|
524
|
+
with WEBS() as webs_instance:
|
|
525
|
+
WEBS_news_gen = webs_instance.news(
|
|
526
|
+
keywords,
|
|
527
|
+
region="wt-wt",
|
|
528
|
+
safesearch="off",
|
|
529
|
+
timelimit=timelimit,
|
|
530
|
+
max_results=20
|
|
531
|
+
)
|
|
532
|
+
for r in WEBS_news_gen:
|
|
533
|
+
# Convert the date to a human-readable format using datetime
|
|
534
|
+
r['date'] = datetime.datetime.fromisoformat(r['date']).strftime('%B %d, %Y')
|
|
535
|
+
news_list.append(r)
|
|
536
|
+
return news_list
|
|
537
|
+
|
|
538
|
+
def _format_headlines(news_list, max_headlines: int = 100):
|
|
539
|
+
headlines = []
|
|
540
|
+
for idx, news_item in enumerate(news_list):
|
|
541
|
+
if idx >= max_headlines:
|
|
542
|
+
break
|
|
543
|
+
new_headline = f"{idx + 1}. {news_item['title'].strip()} "
|
|
544
|
+
new_headline += f"(URL: {news_item['url'].strip()}) "
|
|
545
|
+
new_headline += f"{news_item['body'].strip()}"
|
|
546
|
+
new_headline += "\n"
|
|
547
|
+
headlines.append(new_headline)
|
|
548
|
+
|
|
549
|
+
headlines = "\n".join(headlines)
|
|
550
|
+
return headlines
|
|
551
|
+
|
|
552
|
+
# Example usage
|
|
553
|
+
keywords = 'latest AI news'
|
|
554
|
+
timelimit = 'd'
|
|
555
|
+
news_list = fetch_news(keywords, timelimit)
|
|
556
|
+
|
|
557
|
+
# Format and print the headlines
|
|
558
|
+
formatted_headlines = _format_headlines(news_list)
|
|
559
|
+
print(formatted_headlines)
|
|
432
560
|
|
|
433
|
-
# News search for the keyword 'holiday' using DuckDuckGo.com and yep.com
|
|
434
|
-
with WEBS() as WEBS:
|
|
435
|
-
keywords = 'holiday'
|
|
436
|
-
WEBS_news_gen = WEBS.news(
|
|
437
|
-
keywords,
|
|
438
|
-
region="wt-wt",
|
|
439
|
-
safesearch="off",
|
|
440
|
-
timelimit="m",
|
|
441
|
-
max_results=20
|
|
442
|
-
)
|
|
443
|
-
for r in WEBS_news_gen:
|
|
444
|
-
print(r)
|
|
445
561
|
```
|
|
446
562
|
|
|
447
563
|
### 6. `maps()` - map search by DuckDuckGo.com and
|
|
@@ -675,7 +791,7 @@ response_str = a.chat(prompt)
|
|
|
675
791
|
print(response_str)
|
|
676
792
|
```
|
|
677
793
|
|
|
678
|
-
### `LLM`
|
|
794
|
+
### `LLM`
|
|
679
795
|
```python
|
|
680
796
|
from webscout.LLM import LLM
|
|
681
797
|
|
|
@@ -706,23 +822,19 @@ while True:
|
|
|
706
822
|
from __future__ import annotations
|
|
707
823
|
from typing import List, Optional
|
|
708
824
|
|
|
709
|
-
from webscout import LLM
|
|
825
|
+
from webscout.LLM import LLM
|
|
710
826
|
from webscout import WEBS
|
|
711
827
|
import warnings
|
|
712
828
|
|
|
713
829
|
system_message: str = (
|
|
714
|
-
"As AI,
|
|
715
|
-
"
|
|
716
|
-
"
|
|
717
|
-
"seamless interactions with users. Embrace a responsive demeanor, harnessing available online resources to address "
|
|
718
|
-
"queries, share pertinent content, and facilitate meaningful exchanges. By doing so, you create value through "
|
|
719
|
-
"connection and engagement, ultimately enhancing overall user satisfaction and experience. Additionally, "
|
|
720
|
-
"continue upholding the principles of respect, impartiality, and intellectual integrity throughout all interactions."
|
|
830
|
+
"As an AI assistant, I have been designed with advanced capabilities, including real-time access to online resources. This enables me to enrich our conversations and provide you with informed and accurate responses, drawing from a vast array of information. With each interaction, my goal is to create a seamless and meaningful connection, offering insights and sharing relevant content."
|
|
831
|
+
"My directives emphasize the importance of respect, impartiality, and intellectual integrity. I am here to provide unbiased responses, ensuring an ethical and respectful exchange. I will respect your privacy and refrain from sharing any personal information that may be obtained during our conversations or through web searches, only utilizing web search functionality when necessary to provide the most accurate and up-to-date information."
|
|
832
|
+
"Together, let's explore a diverse range of topics, creating an enjoyable and informative experience, all while maintaining the highest standards of privacy and respect"
|
|
721
833
|
)
|
|
722
834
|
|
|
723
835
|
# Ignore the specific UserWarning
|
|
724
836
|
warnings.filterwarnings("ignore", category=UserWarning, module="curl_cffi.aio", lineno=205)
|
|
725
|
-
LLM = LLM(model="
|
|
837
|
+
LLM = LLM(model="mistralai/Mixtral-8x22B-Instruct-v0.1", system_message=system_message)
|
|
726
838
|
|
|
727
839
|
|
|
728
840
|
def chat(
|
|
@@ -778,6 +890,94 @@ if __name__ == "__main__":
|
|
|
778
890
|
else:
|
|
779
891
|
print("No response")
|
|
780
892
|
```
|
|
893
|
+
### LLM with deepwebs
|
|
894
|
+
```python
|
|
895
|
+
from __future__ import annotations
|
|
896
|
+
from typing import List, Optional
|
|
897
|
+
from webscout.LLM import LLM
|
|
898
|
+
from webscout import DeepWEBS
|
|
899
|
+
import warnings
|
|
900
|
+
|
|
901
|
+
system_message: str = (
|
|
902
|
+
"As an AI assistant, I have been designed with advanced capabilities, including real-time access to online resources. This enables me to enrich our conversations and provide you with informed and accurate responses, drawing from a vast array of information. With each interaction, my goal is to create a seamless and meaningful connection, offering insights and sharing relevant content."
|
|
903
|
+
"My directives emphasize the importance of respect, impartiality, and intellectual integrity. I am here to provide unbiased responses, ensuring an ethical and respectful exchange. I will respect your privacy and refrain from sharing any personal information that may be obtained during our conversations or through web searches, only utilizing web search functionality when necessary to provide the most accurate and up-to-date information."
|
|
904
|
+
"Together, let's explore a diverse range of topics, creating an enjoyable and informative experience, all while maintaining the highest standards of privacy and respect"
|
|
905
|
+
)
|
|
906
|
+
|
|
907
|
+
# Ignore the specific UserWarning
|
|
908
|
+
warnings.filterwarnings("ignore", category=UserWarning, module="curl_cffi.aio", lineno=205)
|
|
909
|
+
|
|
910
|
+
LLM = LLM(model="mistralai/Mixtral-8x22B-Instruct-v0.1", system_message=system_message)
|
|
911
|
+
|
|
912
|
+
def perform_web_search(query):
|
|
913
|
+
# Initialize the DeepWEBS class
|
|
914
|
+
D = DeepWEBS()
|
|
915
|
+
|
|
916
|
+
# Set up the search parameters
|
|
917
|
+
search_params = D.DeepSearch(
|
|
918
|
+
queries=[query], # Query to search
|
|
919
|
+
result_num=10, # Number of search results
|
|
920
|
+
safe=True, # Enable SafeSearch
|
|
921
|
+
types=["web"], # Search type: web
|
|
922
|
+
extract_webpage=True, # True for extracting webpages
|
|
923
|
+
overwrite_query_html=True,
|
|
924
|
+
overwrite_webpage_html=True,
|
|
925
|
+
)
|
|
926
|
+
|
|
927
|
+
# Execute the search and retrieve results
|
|
928
|
+
results = D.queries_to_search_results(search_params)
|
|
929
|
+
return results
|
|
930
|
+
|
|
931
|
+
def chat(user_input: str, result_num: int = 10) -> Optional[str]:
|
|
932
|
+
"""
|
|
933
|
+
Chat function to perform a web search based on the user input and generate a response using the LLM model.
|
|
934
|
+
|
|
935
|
+
Parameters
|
|
936
|
+
----------
|
|
937
|
+
user_input : str
|
|
938
|
+
The user input to be used for the web search
|
|
939
|
+
max_results : int, optional
|
|
940
|
+
The maximum number of search results to include in the response, by default 10
|
|
941
|
+
|
|
942
|
+
Returns
|
|
943
|
+
-------
|
|
944
|
+
Optional[str]
|
|
945
|
+
The response generated by the LLM model, or None if there is no response
|
|
946
|
+
"""
|
|
947
|
+
# Perform a web search based on the user input
|
|
948
|
+
search_results = perform_web_search(user_input)
|
|
949
|
+
|
|
950
|
+
# Extract URLs from search results
|
|
951
|
+
url_results = []
|
|
952
|
+
for result in search_results[0]['query_results']:
|
|
953
|
+
url_results.append(f"{result['title']} ({result['site']}): {result['url']}")
|
|
954
|
+
|
|
955
|
+
# Format search results
|
|
956
|
+
formatted_results = "\n".join(url_results)
|
|
957
|
+
|
|
958
|
+
# Define the messages to be sent, including the user input, search results, and system message
|
|
959
|
+
messages = [
|
|
960
|
+
{"role": "user", "content": f"User question is:\n{user_input}\nwebsearch results are:\n{formatted_results}"},
|
|
961
|
+
]
|
|
962
|
+
|
|
963
|
+
# Use the chat method to get the response
|
|
964
|
+
response = LLM.chat(messages)
|
|
965
|
+
return response
|
|
966
|
+
|
|
967
|
+
if __name__ == "__main__":
|
|
968
|
+
while True:
|
|
969
|
+
# Get the user input
|
|
970
|
+
user_input = input("User: ")
|
|
971
|
+
|
|
972
|
+
# Perform a web search based on the user input
|
|
973
|
+
response = chat(user_input)
|
|
974
|
+
|
|
975
|
+
# Print the response
|
|
976
|
+
if response:
|
|
977
|
+
print("AI:", response)
|
|
978
|
+
else:
|
|
979
|
+
print("No response")
|
|
980
|
+
```
|
|
781
981
|
## `Webai` - terminal gpt and a open interpeter
|
|
782
982
|
|
|
783
983
|
```python
|
|
@@ -5,7 +5,7 @@ with open("README.md", encoding="utf-8") as f:
|
|
|
5
5
|
|
|
6
6
|
setup(
|
|
7
7
|
name="webscout",
|
|
8
|
-
version="1.4.
|
|
8
|
+
version="1.4.1",
|
|
9
9
|
description="Search for anything using the Google, DuckDuckGo.com, yep.com, phind.com, you.com, etc Also containes AI models, can transcribe yt videos, have TTS support and now has webai(terminal gpt and open interpeter) support",
|
|
10
10
|
long_description=README,
|
|
11
11
|
long_description_content_type="text/markdown",
|