webscout 1.4.0__py3-none-any.whl → 1.4.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of webscout might be problematic. Click here for more details.
- webscout/AI.py +294 -185
- webscout/AIutel.py +2 -0
- webscout/__init__.py +3 -1
- webscout/async_providers.py +32 -32
- webscout/g4f.py +1 -1
- webscout/tempid.py +157 -0
- webscout/version.py +1 -1
- webscout/webai.py +31 -15
- {webscout-1.4.0.dist-info → webscout-1.4.2.dist-info}/METADATA +275 -58
- {webscout-1.4.0.dist-info → webscout-1.4.2.dist-info}/RECORD +14 -13
- {webscout-1.4.0.dist-info → webscout-1.4.2.dist-info}/LICENSE.md +0 -0
- {webscout-1.4.0.dist-info → webscout-1.4.2.dist-info}/WHEEL +0 -0
- {webscout-1.4.0.dist-info → webscout-1.4.2.dist-info}/entry_points.txt +0 -0
- {webscout-1.4.0.dist-info → webscout-1.4.2.dist-info}/top_level.txt +0 -0
webscout/g4f.py
CHANGED
|
@@ -4,7 +4,7 @@ from webscout.AIutel import Conversation
|
|
|
4
4
|
from webscout.AIutel import AwesomePrompts
|
|
5
5
|
from webscout.AIbase import Provider, AsyncProvider
|
|
6
6
|
from webscout.AIutel import available_providers
|
|
7
|
-
|
|
7
|
+
from typing import Any, AsyncGenerator
|
|
8
8
|
|
|
9
9
|
g4f.debug.version_check = False
|
|
10
10
|
|
webscout/tempid.py
ADDED
|
@@ -0,0 +1,157 @@
|
|
|
1
|
+
import aiohttp
|
|
2
|
+
from dataclasses import dataclass
|
|
3
|
+
from bs4 import BeautifulSoup
|
|
4
|
+
import tls_client
|
|
5
|
+
import random
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
@dataclass
|
|
9
|
+
class DomainModel:
|
|
10
|
+
name: str
|
|
11
|
+
type: str
|
|
12
|
+
forward_available: str
|
|
13
|
+
forward_max_seconds: str
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
@dataclass
|
|
17
|
+
class CreateEmailResponseModel:
|
|
18
|
+
email: str
|
|
19
|
+
token: str
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
@dataclass
|
|
23
|
+
class MessageResponseModel:
|
|
24
|
+
attachments: list | None
|
|
25
|
+
body_html: str | None
|
|
26
|
+
body_text: str | None
|
|
27
|
+
cc: str | None
|
|
28
|
+
created_at: str
|
|
29
|
+
email_from: str | None
|
|
30
|
+
id: str
|
|
31
|
+
subject: str | None
|
|
32
|
+
email_to: str | None
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
class Client:
|
|
36
|
+
def __init__(self):
|
|
37
|
+
self._session = aiohttp.ClientSession(
|
|
38
|
+
base_url="https://api.internal.temp-mail.io",
|
|
39
|
+
headers={
|
|
40
|
+
'Host': 'api.internal.temp-mail.io',
|
|
41
|
+
'User-Agent': 'okhttp/4.5.0',
|
|
42
|
+
'Connection': 'close'
|
|
43
|
+
}
|
|
44
|
+
)
|
|
45
|
+
|
|
46
|
+
async def close(self) -> None:
|
|
47
|
+
if not self._session.closed:
|
|
48
|
+
await self._session.close()
|
|
49
|
+
|
|
50
|
+
async def __aenter__(self):
|
|
51
|
+
return self
|
|
52
|
+
|
|
53
|
+
async def __aexit__(self) -> None:
|
|
54
|
+
await self.close()
|
|
55
|
+
return None
|
|
56
|
+
|
|
57
|
+
async def get_domains(self) -> list[DomainModel]:
|
|
58
|
+
async with self._session.get("/api/v3/domains") as response:
|
|
59
|
+
response_json = await response.json()
|
|
60
|
+
return [DomainModel(domain['name'], domain['type'], domain['forward_available'], domain['forward_max_seconds']) for domain in response_json['domains']]
|
|
61
|
+
|
|
62
|
+
async def create_email(self, alias: str | None = None, domain: str | None = None) -> CreateEmailResponseModel:
|
|
63
|
+
async with self._session.post("/api/v3/email/new", data={'name': alias, 'domain': domain}) as response:
|
|
64
|
+
response_json = await response.json()
|
|
65
|
+
return CreateEmailResponseModel(response_json['email'], response_json['token'])
|
|
66
|
+
|
|
67
|
+
async def delete_email(self, email: str, token: str) -> bool:
|
|
68
|
+
async with self._session.delete(f"/api/v3/email/{email}", data={'token': token}) as response:
|
|
69
|
+
if response.status == 200:
|
|
70
|
+
return True
|
|
71
|
+
else:
|
|
72
|
+
return False
|
|
73
|
+
|
|
74
|
+
async def get_messages(self, email: str) -> list[MessageResponseModel] | None:
|
|
75
|
+
async with self._session.get(f"/api/v3/email/{email}/messages") as response:
|
|
76
|
+
response_json = await response.json()
|
|
77
|
+
if len(response_json) == 0:
|
|
78
|
+
return None
|
|
79
|
+
return [MessageResponseModel(message['attachments'], message['body_html'], message['body_text'], message['cc'], message['created_at'], message['from'], message['id'], message['subject'], message['to']) for message in response_json]
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
class TemporaryPhoneNumber:
|
|
83
|
+
def __init__(self):
|
|
84
|
+
self.maxpages = {"UK": 59, "US": 3, "France": 73, "Netherlands": 60, "Finland": 47}
|
|
85
|
+
self.minpages = {"UK": 20, "US": 1, "France": 20, "Netherlands": 20, "Finland": 20}
|
|
86
|
+
self.plist = {"UK": "+44", "US": "+1", "France": "+33", "Netherlands": "+31", "Finland": "+358"}
|
|
87
|
+
self.countries = {"44": "UK", "1": "US", "33": "France", "31": "Netherlands", "358": "Finland"}
|
|
88
|
+
|
|
89
|
+
def get_number(self, country="UK"):
|
|
90
|
+
if country == "Random":
|
|
91
|
+
country = random.choice(list(self.countries.values()))
|
|
92
|
+
if country not in self.countries.values():
|
|
93
|
+
raise ValueError("Unsupported Country")
|
|
94
|
+
|
|
95
|
+
session = tls_client.Session(client_identifier="chrome112", random_tls_extension_order=True)
|
|
96
|
+
maxpage = self.maxpages[country]
|
|
97
|
+
minpage = self.minpages[country]
|
|
98
|
+
page = random.randint(minpage, maxpage)
|
|
99
|
+
|
|
100
|
+
if page == 1:
|
|
101
|
+
res = session.get(f"https://temporary-phone-number.com/{country}-Phone-Number")
|
|
102
|
+
else:
|
|
103
|
+
res = session.get(f"https://temporary-phone-number.com/{country}-Phone-Number/page{page}")
|
|
104
|
+
|
|
105
|
+
soup = BeautifulSoup(res.content, "lxml")
|
|
106
|
+
numbers = []
|
|
107
|
+
p = self.plist[country]
|
|
108
|
+
for a in soup.find_all("a"):
|
|
109
|
+
a = a.get("title", "none")
|
|
110
|
+
if f"{country} Phone Number {p}" in a:
|
|
111
|
+
a = a.replace(f"{country} Phone Number ", "").replace(" ", "")
|
|
112
|
+
numbers.append(a)
|
|
113
|
+
return random.choice(numbers)
|
|
114
|
+
|
|
115
|
+
def get_messages(self, number: str):
|
|
116
|
+
number = number.replace("+", "")
|
|
117
|
+
try:
|
|
118
|
+
i = int(number)
|
|
119
|
+
except:
|
|
120
|
+
raise ValueError("Wrong Number")
|
|
121
|
+
|
|
122
|
+
country = None
|
|
123
|
+
for key, value in self.countries.items():
|
|
124
|
+
if number.startswith(key):
|
|
125
|
+
country = value
|
|
126
|
+
|
|
127
|
+
if country == None:
|
|
128
|
+
raise ValueError("Unsupported Country")
|
|
129
|
+
|
|
130
|
+
session = tls_client.Session(client_identifier="chrome112", random_tls_extension_order=True)
|
|
131
|
+
res = session.get(f"https://temporary-phone-number.com/{country}-Phone-Number/{number}")
|
|
132
|
+
|
|
133
|
+
if res.status_code == 404:
|
|
134
|
+
raise ValueError("Number doesn't exist")
|
|
135
|
+
|
|
136
|
+
soup = BeautifulSoup(res.content, "lxml")
|
|
137
|
+
messages = []
|
|
138
|
+
message = {"content": None, "frm": "", "time": ""}
|
|
139
|
+
|
|
140
|
+
for div in soup.find_all("div"):
|
|
141
|
+
divclass = div.get("class", "None")[0]
|
|
142
|
+
if divclass == "direct-chat-info":
|
|
143
|
+
message["frm"] = div.text.split("\n")[1].replace("From ", "")
|
|
144
|
+
message["time"] = div.text.split("\n")[2]
|
|
145
|
+
if divclass == "direct-chat-text":
|
|
146
|
+
message["content"] = div.text
|
|
147
|
+
messages.append(sms_message(content=message["content"], frm=message["frm"], time=message["time"]))
|
|
148
|
+
message = {"content": None, "frm": "", "time": ""}
|
|
149
|
+
|
|
150
|
+
return messages
|
|
151
|
+
|
|
152
|
+
|
|
153
|
+
class sms_message:
|
|
154
|
+
def __init__(self, content, frm, time):
|
|
155
|
+
self.content = content
|
|
156
|
+
self.frm = frm
|
|
157
|
+
self.time = time
|
webscout/version.py
CHANGED
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
__version__ = "1.4.
|
|
1
|
+
__version__ = "1.4.2"
|
|
2
2
|
|
webscout/webai.py
CHANGED
|
@@ -514,20 +514,6 @@ class Main(cmd.Cmd):
|
|
|
514
514
|
history_offset=history_offset,
|
|
515
515
|
act=awesome_prompt,
|
|
516
516
|
)
|
|
517
|
-
elif provider == "sean":
|
|
518
|
-
from webscout.AI import Sean
|
|
519
|
-
|
|
520
|
-
self.bot = Sean(
|
|
521
|
-
is_conversation=disable_conversation,
|
|
522
|
-
max_tokens=max_tokens,
|
|
523
|
-
timeout=timeout,
|
|
524
|
-
intro=intro,
|
|
525
|
-
filepath=filepath,
|
|
526
|
-
update_file=update_file,
|
|
527
|
-
proxies=proxies,
|
|
528
|
-
history_offset=history_offset,
|
|
529
|
-
act=awesome_prompt,
|
|
530
|
-
)
|
|
531
517
|
elif provider == "cohere":
|
|
532
518
|
assert auth, (
|
|
533
519
|
"Cohere's API-key is required. Use the flag `--key` or `-k`"
|
|
@@ -583,6 +569,22 @@ class Main(cmd.Cmd):
|
|
|
583
569
|
history_offset=history_offset,
|
|
584
570
|
act=awesome_prompt,
|
|
585
571
|
)
|
|
572
|
+
elif provider == "xjai":
|
|
573
|
+
from webscout.AI import Xjai
|
|
574
|
+
|
|
575
|
+
self.bot = Xjai(
|
|
576
|
+
is_conversation=disable_conversation,
|
|
577
|
+
max_tokens=max_tokens,
|
|
578
|
+
temperature=temperature,
|
|
579
|
+
top_p=top_p,
|
|
580
|
+
timeout=timeout,
|
|
581
|
+
intro=intro,
|
|
582
|
+
filepath=filepath,
|
|
583
|
+
update_file=update_file,
|
|
584
|
+
proxies=proxies,
|
|
585
|
+
history_offset=history_offset,
|
|
586
|
+
act=awesome_prompt,
|
|
587
|
+
)
|
|
586
588
|
|
|
587
589
|
elif provider == "gemini":
|
|
588
590
|
from webscout.AI import GEMINI
|
|
@@ -629,7 +631,21 @@ class Main(cmd.Cmd):
|
|
|
629
631
|
history_offset=history_offset,
|
|
630
632
|
act=awesome_prompt,
|
|
631
633
|
)
|
|
634
|
+
elif provider == "you":
|
|
635
|
+
|
|
636
|
+
from webscout.AI import YouChat
|
|
632
637
|
|
|
638
|
+
self.bot = YouChat(
|
|
639
|
+
is_conversation=disable_conversation,
|
|
640
|
+
max_tokens=max_tokens,
|
|
641
|
+
timeout=timeout,
|
|
642
|
+
intro=intro,
|
|
643
|
+
filepath=filepath,
|
|
644
|
+
update_file=update_file,
|
|
645
|
+
proxies=proxies,
|
|
646
|
+
history_offset=history_offset,
|
|
647
|
+
act=awesome_prompt,
|
|
648
|
+
)
|
|
633
649
|
|
|
634
650
|
elif provider in webscout.gpt4free_providers:
|
|
635
651
|
from webscout.g4f import GPT4FREE
|
|
@@ -2351,7 +2367,7 @@ class Utils:
|
|
|
2351
2367
|
source = "git+" + webscout.__repo__ + ".git"
|
|
2352
2368
|
source = "webscout" if source is None else source
|
|
2353
2369
|
assert (
|
|
2354
|
-
"
|
|
2370
|
+
"webscout" in source or source == "."
|
|
2355
2371
|
), f"Cannot update webscout from the source '{source}'"
|
|
2356
2372
|
click.secho(
|
|
2357
2373
|
f"[*] Updating from '{'pip' if source=='webscout' else source}'",
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: webscout
|
|
3
|
-
Version: 1.4.
|
|
4
|
-
Summary: Search for anything using the Google, DuckDuckGo
|
|
3
|
+
Version: 1.4.2
|
|
4
|
+
Summary: Search for anything using the Google, DuckDuckGo, phind.com. Also containes AI models, can transcribe yt videos, temporary email and phone number generation, have TTS support and webai(terminal gpt and open interpeter)
|
|
5
5
|
Author: OEvortex
|
|
6
6
|
Author-email: helpingai5@gmail.com
|
|
7
7
|
License: HelpingAI Simplified Universal License
|
|
@@ -49,6 +49,7 @@ Requires-Dist: orjson
|
|
|
49
49
|
Requires-Dist: PyYAML
|
|
50
50
|
Requires-Dist: appdirs
|
|
51
51
|
Requires-Dist: GoogleBard1 >=2.1.4
|
|
52
|
+
Requires-Dist: tls-client
|
|
52
53
|
Provides-Extra: dev
|
|
53
54
|
Requires-Dist: ruff >=0.1.6 ; extra == 'dev'
|
|
54
55
|
Requires-Dist: pytest >=7.4.2 ; extra == 'dev'
|
|
@@ -59,7 +60,7 @@ Requires-Dist: pytest >=7.4.2 ; extra == 'dev'
|
|
|
59
60
|
<a href="#"><img alt="Python version" src="https://img.shields.io/pypi/pyversions/webscout"/></a>
|
|
60
61
|
<a href="https://pepy.tech/project/webscout"><img alt="Downloads" src="https://static.pepy.tech/badge/webscout"></a>
|
|
61
62
|
|
|
62
|
-
Search for anything using the Google, DuckDuckGo
|
|
63
|
+
Search for anything using the Google, DuckDuckGo, phind.com. Also containes AI models, can transcribe yt videos, temporary email and phone number generation, have TTS support and webai(terminal gpt and open interpeter)
|
|
63
64
|
|
|
64
65
|
|
|
65
66
|
## Table of Contents
|
|
@@ -69,6 +70,9 @@ Search for anything using the Google, DuckDuckGo.com, yep.com, phind.com, you.co
|
|
|
69
70
|
- [CLI version](#cli-version)
|
|
70
71
|
- [CLI to use LLM](#cli-to-use-llm)
|
|
71
72
|
- [Regions](#regions)
|
|
73
|
+
- [Tempmail and Temp number](#tempmail-and-temp-number)
|
|
74
|
+
- [Temp number](#temp-number)
|
|
75
|
+
- [Tempmail](#tempmail)
|
|
72
76
|
- [Transcriber](#transcriber)
|
|
73
77
|
- [DeepWEBS: Advanced Web Searches](#deepwebs-advanced-web-searches)
|
|
74
78
|
- [Activating DeepWEBS](#activating-deepwebs)
|
|
@@ -91,9 +95,8 @@ Search for anything using the Google, DuckDuckGo.com, yep.com, phind.com, you.co
|
|
|
91
95
|
- [usage of webscout.AI](#usage-of-webscoutai)
|
|
92
96
|
- [1. `PhindSearch` - Search using Phind.com](#1-phindsearch---search-using-phindcom)
|
|
93
97
|
- [2. `YepChat` - Chat with mistral 8x7b powered by yepchat](#2-yepchat---chat-with-mistral-8x7b-powered-by-yepchat)
|
|
94
|
-
- [3. `You.com` - search with you.com
|
|
98
|
+
- [3. `You.com` - search/chat with you.com](#3-youcom---searchchat-with-youcom)
|
|
95
99
|
- [4. `Gemini` - search with google gemini](#4-gemini---search-with-google-gemini)
|
|
96
|
-
- [usage of image generator from Webscout.AI](#usage-of-image-generator-from-webscoutai)
|
|
97
100
|
- [5. `Prodia` - make image using prodia](#5-prodia---make-image-using-prodia)
|
|
98
101
|
- [6. `BlackBox` - Search/chat With BlackBox](#6-blackbox---searchchat-with-blackbox)
|
|
99
102
|
- [7. `PERPLEXITY` - Search With PERPLEXITY](#7-perplexity---search-with-perplexity)
|
|
@@ -101,8 +104,10 @@ Search for anything using the Google, DuckDuckGo.com, yep.com, phind.com, you.co
|
|
|
101
104
|
- [9. `KOBOLDIA` -](#9-koboldia--)
|
|
102
105
|
- [10. `Reka` - chat with reka](#10-reka---chat-with-reka)
|
|
103
106
|
- [11. `Cohere` - chat with cohere](#11-cohere---chat-with-cohere)
|
|
104
|
-
- [`
|
|
107
|
+
- [12. `Xjai` - chat with free gpt 3.5](#12-xjai---chat-with-free-gpt-35)
|
|
108
|
+
- [`LLM`](#llm)
|
|
105
109
|
- [`LLM` with internet](#llm-with-internet)
|
|
110
|
+
- [LLM with deepwebs](#llm-with-deepwebs)
|
|
106
111
|
- [`Webai` - terminal gpt and a open interpeter](#webai---terminal-gpt-and-a-open-interpeter)
|
|
107
112
|
|
|
108
113
|
## Install
|
|
@@ -212,7 +217,91 @@ ___
|
|
|
212
217
|
|
|
213
218
|
[Go To TOP](#TOP)
|
|
214
219
|
|
|
220
|
+
## Tempmail and Temp number
|
|
215
221
|
|
|
222
|
+
### Temp number
|
|
223
|
+
```python
|
|
224
|
+
from rich.console import Console
|
|
225
|
+
from webscout import tempid
|
|
226
|
+
|
|
227
|
+
def main():
|
|
228
|
+
console = Console()
|
|
229
|
+
phone = tempid.TemporaryPhoneNumber()
|
|
230
|
+
|
|
231
|
+
try:
|
|
232
|
+
# Get a temporary phone number for a specific country (or random)
|
|
233
|
+
number = phone.get_number(country="Finland")
|
|
234
|
+
console.print(f"Your temporary phone number: [bold cyan]{number}[/bold cyan]")
|
|
235
|
+
|
|
236
|
+
# Pause execution briefly (replace with your actual logic)
|
|
237
|
+
# import time module
|
|
238
|
+
import time
|
|
239
|
+
time.sleep(30) # Adjust the waiting time as needed
|
|
240
|
+
|
|
241
|
+
# Retrieve and print messages
|
|
242
|
+
messages = phone.get_messages(number)
|
|
243
|
+
if messages:
|
|
244
|
+
# Access individual messages using indexing:
|
|
245
|
+
console.print(f"[bold green]{messages[0].frm}:[/] {messages[0].content}")
|
|
246
|
+
# (Add more lines if you expect multiple messages)
|
|
247
|
+
else:
|
|
248
|
+
console.print("No messages received.")
|
|
249
|
+
|
|
250
|
+
except Exception as e:
|
|
251
|
+
console.print(f"[bold red]An error occurred: {e}")
|
|
252
|
+
|
|
253
|
+
if __name__ == "__main__":
|
|
254
|
+
main()
|
|
255
|
+
|
|
256
|
+
```
|
|
257
|
+
### Tempmail
|
|
258
|
+
```python
|
|
259
|
+
import asyncio
|
|
260
|
+
from rich.console import Console
|
|
261
|
+
from rich.table import Table
|
|
262
|
+
from rich.text import Text
|
|
263
|
+
from webscout import tempid
|
|
264
|
+
|
|
265
|
+
async def main() -> None:
|
|
266
|
+
console = Console()
|
|
267
|
+
client = tempid.Client()
|
|
268
|
+
|
|
269
|
+
try:
|
|
270
|
+
domains = await client.get_domains()
|
|
271
|
+
if not domains:
|
|
272
|
+
console.print("[bold red]No domains available. Please try again later.")
|
|
273
|
+
return
|
|
274
|
+
|
|
275
|
+
email = await client.create_email(domain=domains[0].name)
|
|
276
|
+
console.print(f"Your temporary email: [bold cyan]{email.email}[/bold cyan]")
|
|
277
|
+
console.print(f"Token for accessing the email: [bold cyan]{email.token}[/bold cyan]")
|
|
278
|
+
|
|
279
|
+
while True:
|
|
280
|
+
messages = await client.get_messages(email.email)
|
|
281
|
+
if messages is not None:
|
|
282
|
+
break
|
|
283
|
+
|
|
284
|
+
if messages:
|
|
285
|
+
table = Table(show_header=True, header_style="bold magenta")
|
|
286
|
+
table.add_column("From", style="bold cyan")
|
|
287
|
+
table.add_column("Subject", style="bold yellow")
|
|
288
|
+
table.add_column("Body", style="bold green")
|
|
289
|
+
for message in messages:
|
|
290
|
+
body_preview = Text(message.body_text if message.body_text else "No body")
|
|
291
|
+
table.add_row(message.email_from or "Unknown", message.subject or "No Subject", body_preview)
|
|
292
|
+
console.print(table)
|
|
293
|
+
else:
|
|
294
|
+
console.print("No messages found.")
|
|
295
|
+
|
|
296
|
+
except Exception as e:
|
|
297
|
+
console.print(f"[bold red]An error occurred: {e}")
|
|
298
|
+
|
|
299
|
+
finally:
|
|
300
|
+
await client.close()
|
|
301
|
+
|
|
302
|
+
if __name__ == '__main__':
|
|
303
|
+
asyncio.run(main())
|
|
304
|
+
```
|
|
216
305
|
## Transcriber
|
|
217
306
|
The transcriber function in webscout is a handy tool that transcribes YouTube videos. Here's an example code demonstrating its usage:
|
|
218
307
|
```python
|
|
@@ -484,19 +573,47 @@ with WEBS() as WEBS:
|
|
|
484
573
|
|
|
485
574
|
```python
|
|
486
575
|
from webscout import WEBS
|
|
576
|
+
import datetime
|
|
577
|
+
|
|
578
|
+
def fetch_news(keywords, timelimit):
|
|
579
|
+
news_list = []
|
|
580
|
+
with WEBS() as webs_instance:
|
|
581
|
+
WEBS_news_gen = webs_instance.news(
|
|
582
|
+
keywords,
|
|
583
|
+
region="wt-wt",
|
|
584
|
+
safesearch="off",
|
|
585
|
+
timelimit=timelimit,
|
|
586
|
+
max_results=20
|
|
587
|
+
)
|
|
588
|
+
for r in WEBS_news_gen:
|
|
589
|
+
# Convert the date to a human-readable format using datetime
|
|
590
|
+
r['date'] = datetime.datetime.fromisoformat(r['date']).strftime('%B %d, %Y')
|
|
591
|
+
news_list.append(r)
|
|
592
|
+
return news_list
|
|
593
|
+
|
|
594
|
+
def _format_headlines(news_list, max_headlines: int = 100):
|
|
595
|
+
headlines = []
|
|
596
|
+
for idx, news_item in enumerate(news_list):
|
|
597
|
+
if idx >= max_headlines:
|
|
598
|
+
break
|
|
599
|
+
new_headline = f"{idx + 1}. {news_item['title'].strip()} "
|
|
600
|
+
new_headline += f"(URL: {news_item['url'].strip()}) "
|
|
601
|
+
new_headline += f"{news_item['body'].strip()}"
|
|
602
|
+
new_headline += "\n"
|
|
603
|
+
headlines.append(new_headline)
|
|
604
|
+
|
|
605
|
+
headlines = "\n".join(headlines)
|
|
606
|
+
return headlines
|
|
607
|
+
|
|
608
|
+
# Example usage
|
|
609
|
+
keywords = 'latest AI news'
|
|
610
|
+
timelimit = 'd'
|
|
611
|
+
news_list = fetch_news(keywords, timelimit)
|
|
612
|
+
|
|
613
|
+
# Format and print the headlines
|
|
614
|
+
formatted_headlines = _format_headlines(news_list)
|
|
615
|
+
print(formatted_headlines)
|
|
487
616
|
|
|
488
|
-
# News search for the keyword 'holiday' using DuckDuckGo.com and yep.com
|
|
489
|
-
with WEBS() as WEBS:
|
|
490
|
-
keywords = 'holiday'
|
|
491
|
-
WEBS_news_gen = WEBS.news(
|
|
492
|
-
keywords,
|
|
493
|
-
region="wt-wt",
|
|
494
|
-
safesearch="off",
|
|
495
|
-
timelimit="m",
|
|
496
|
-
max_results=20
|
|
497
|
-
)
|
|
498
|
-
for r in WEBS_news_gen:
|
|
499
|
-
print(r)
|
|
500
617
|
```
|
|
501
618
|
|
|
502
619
|
### 6. `maps()` - map search by DuckDuckGo.com and
|
|
@@ -568,27 +685,31 @@ print(r)
|
|
|
568
685
|
|
|
569
686
|
```
|
|
570
687
|
|
|
571
|
-
### 3. `You.com` - search with you.com
|
|
688
|
+
### 3. `You.com` - search/chat with you.com
|
|
572
689
|
```python
|
|
573
|
-
from webscout.AI import youChat
|
|
574
690
|
|
|
575
|
-
|
|
576
|
-
|
|
691
|
+
from webscout.AI import YouChat
|
|
692
|
+
from rich import print
|
|
577
693
|
|
|
578
|
-
|
|
579
|
-
|
|
580
|
-
|
|
581
|
-
|
|
582
|
-
|
|
583
|
-
|
|
584
|
-
|
|
585
|
-
|
|
586
|
-
|
|
587
|
-
|
|
588
|
-
|
|
589
|
-
|
|
590
|
-
|
|
591
|
-
|
|
694
|
+
ai = YouChat(
|
|
695
|
+
is_conversation=True,
|
|
696
|
+
max_tokens=800,
|
|
697
|
+
timeout=30,
|
|
698
|
+
intro=None,
|
|
699
|
+
filepath=None,
|
|
700
|
+
update_file=True,
|
|
701
|
+
proxies={},
|
|
702
|
+
history_offset=10250,
|
|
703
|
+
act=None,
|
|
704
|
+
)
|
|
705
|
+
|
|
706
|
+
prompt = "what is meaning of life"
|
|
707
|
+
|
|
708
|
+
response = ai.ask(prompt)
|
|
709
|
+
|
|
710
|
+
# Extract and print the message from the response
|
|
711
|
+
message = ai.get_message(response)
|
|
712
|
+
print(message)
|
|
592
713
|
```
|
|
593
714
|
|
|
594
715
|
### 4. `Gemini` - search with google gemini
|
|
@@ -612,19 +733,7 @@ gemini = GEMINI(cookie_file=COOKIE_FILE, proxy=PROXIES)
|
|
|
612
733
|
# Ask a question and print the response
|
|
613
734
|
response = gemini.chat("What is the meaning of life?")
|
|
614
735
|
print(response)
|
|
615
|
-
|
|
616
|
-
# Ask another question, this time streaming the response
|
|
617
|
-
for chunk in gemini.chat("Tell me a story", stream=True):
|
|
618
|
-
print(chunk, end="")
|
|
619
|
-
|
|
620
|
-
# Reset the conversation to start a new interaction
|
|
621
|
-
gemini.reset()
|
|
622
|
-
|
|
623
|
-
# Ask a question with the code optimizer
|
|
624
|
-
response = gemini.chat("Write Python code to print 'Hello, world!'", optimizer="code")
|
|
625
|
-
print(response)
|
|
626
736
|
```
|
|
627
|
-
## usage of image generator from Webscout.AI
|
|
628
737
|
### 5. `Prodia` - make image using prodia
|
|
629
738
|
```python
|
|
630
739
|
from webscout.AI import Prodia
|
|
@@ -730,7 +839,31 @@ response_str = a.chat(prompt)
|
|
|
730
839
|
print(response_str)
|
|
731
840
|
```
|
|
732
841
|
|
|
733
|
-
### `
|
|
842
|
+
### 12. `Xjai` - chat with free gpt 3.5
|
|
843
|
+
Gratitude to [Devs do Code](http://www.youtube.com/@DevsDoCode) for their assistance.
|
|
844
|
+
```python
|
|
845
|
+
from webscout.AI import Xjai
|
|
846
|
+
from rich import print
|
|
847
|
+
|
|
848
|
+
ai = Xjai(
|
|
849
|
+
is_conversation=True,
|
|
850
|
+
max_tokens=800,
|
|
851
|
+
timeout=30,
|
|
852
|
+
intro=None,
|
|
853
|
+
filepath=None,
|
|
854
|
+
update_file=True,
|
|
855
|
+
proxies={},
|
|
856
|
+
history_offset=10250,
|
|
857
|
+
act=None,
|
|
858
|
+
)
|
|
859
|
+
|
|
860
|
+
prompt = "Tell me about india"
|
|
861
|
+
|
|
862
|
+
response = ai.chat(prompt)
|
|
863
|
+
print(response)
|
|
864
|
+
```
|
|
865
|
+
|
|
866
|
+
### `LLM`
|
|
734
867
|
```python
|
|
735
868
|
from webscout.LLM import LLM
|
|
736
869
|
|
|
@@ -761,23 +894,19 @@ while True:
|
|
|
761
894
|
from __future__ import annotations
|
|
762
895
|
from typing import List, Optional
|
|
763
896
|
|
|
764
|
-
from webscout import LLM
|
|
897
|
+
from webscout.LLM import LLM
|
|
765
898
|
from webscout import WEBS
|
|
766
899
|
import warnings
|
|
767
900
|
|
|
768
901
|
system_message: str = (
|
|
769
|
-
"As AI,
|
|
770
|
-
"
|
|
771
|
-
"
|
|
772
|
-
"seamless interactions with users. Embrace a responsive demeanor, harnessing available online resources to address "
|
|
773
|
-
"queries, share pertinent content, and facilitate meaningful exchanges. By doing so, you create value through "
|
|
774
|
-
"connection and engagement, ultimately enhancing overall user satisfaction and experience. Additionally, "
|
|
775
|
-
"continue upholding the principles of respect, impartiality, and intellectual integrity throughout all interactions."
|
|
902
|
+
"As an AI assistant, I have been designed with advanced capabilities, including real-time access to online resources. This enables me to enrich our conversations and provide you with informed and accurate responses, drawing from a vast array of information. With each interaction, my goal is to create a seamless and meaningful connection, offering insights and sharing relevant content."
|
|
903
|
+
"My directives emphasize the importance of respect, impartiality, and intellectual integrity. I am here to provide unbiased responses, ensuring an ethical and respectful exchange. I will respect your privacy and refrain from sharing any personal information that may be obtained during our conversations or through web searches, only utilizing web search functionality when necessary to provide the most accurate and up-to-date information."
|
|
904
|
+
"Together, let's explore a diverse range of topics, creating an enjoyable and informative experience, all while maintaining the highest standards of privacy and respect"
|
|
776
905
|
)
|
|
777
906
|
|
|
778
907
|
# Ignore the specific UserWarning
|
|
779
908
|
warnings.filterwarnings("ignore", category=UserWarning, module="curl_cffi.aio", lineno=205)
|
|
780
|
-
LLM = LLM(model="
|
|
909
|
+
LLM = LLM(model="mistralai/Mixtral-8x22B-Instruct-v0.1", system_message=system_message)
|
|
781
910
|
|
|
782
911
|
|
|
783
912
|
def chat(
|
|
@@ -833,6 +962,94 @@ if __name__ == "__main__":
|
|
|
833
962
|
else:
|
|
834
963
|
print("No response")
|
|
835
964
|
```
|
|
965
|
+
### LLM with deepwebs
|
|
966
|
+
```python
|
|
967
|
+
from __future__ import annotations
|
|
968
|
+
from typing import List, Optional
|
|
969
|
+
from webscout.LLM import LLM
|
|
970
|
+
from webscout import DeepWEBS
|
|
971
|
+
import warnings
|
|
972
|
+
|
|
973
|
+
system_message: str = (
|
|
974
|
+
"As an AI assistant, I have been designed with advanced capabilities, including real-time access to online resources. This enables me to enrich our conversations and provide you with informed and accurate responses, drawing from a vast array of information. With each interaction, my goal is to create a seamless and meaningful connection, offering insights and sharing relevant content."
|
|
975
|
+
"My directives emphasize the importance of respect, impartiality, and intellectual integrity. I am here to provide unbiased responses, ensuring an ethical and respectful exchange. I will respect your privacy and refrain from sharing any personal information that may be obtained during our conversations or through web searches, only utilizing web search functionality when necessary to provide the most accurate and up-to-date information."
|
|
976
|
+
"Together, let's explore a diverse range of topics, creating an enjoyable and informative experience, all while maintaining the highest standards of privacy and respect"
|
|
977
|
+
)
|
|
978
|
+
|
|
979
|
+
# Ignore the specific UserWarning
|
|
980
|
+
warnings.filterwarnings("ignore", category=UserWarning, module="curl_cffi.aio", lineno=205)
|
|
981
|
+
|
|
982
|
+
LLM = LLM(model="mistralai/Mixtral-8x22B-Instruct-v0.1", system_message=system_message)
|
|
983
|
+
|
|
984
|
+
def perform_web_search(query):
|
|
985
|
+
# Initialize the DeepWEBS class
|
|
986
|
+
D = DeepWEBS()
|
|
987
|
+
|
|
988
|
+
# Set up the search parameters
|
|
989
|
+
search_params = D.DeepSearch(
|
|
990
|
+
queries=[query], # Query to search
|
|
991
|
+
result_num=10, # Number of search results
|
|
992
|
+
safe=True, # Enable SafeSearch
|
|
993
|
+
types=["web"], # Search type: web
|
|
994
|
+
extract_webpage=True, # True for extracting webpages
|
|
995
|
+
overwrite_query_html=True,
|
|
996
|
+
overwrite_webpage_html=True,
|
|
997
|
+
)
|
|
998
|
+
|
|
999
|
+
# Execute the search and retrieve results
|
|
1000
|
+
results = D.queries_to_search_results(search_params)
|
|
1001
|
+
return results
|
|
1002
|
+
|
|
1003
|
+
def chat(user_input: str, result_num: int = 10) -> Optional[str]:
|
|
1004
|
+
"""
|
|
1005
|
+
Chat function to perform a web search based on the user input and generate a response using the LLM model.
|
|
1006
|
+
|
|
1007
|
+
Parameters
|
|
1008
|
+
----------
|
|
1009
|
+
user_input : str
|
|
1010
|
+
The user input to be used for the web search
|
|
1011
|
+
max_results : int, optional
|
|
1012
|
+
The maximum number of search results to include in the response, by default 10
|
|
1013
|
+
|
|
1014
|
+
Returns
|
|
1015
|
+
-------
|
|
1016
|
+
Optional[str]
|
|
1017
|
+
The response generated by the LLM model, or None if there is no response
|
|
1018
|
+
"""
|
|
1019
|
+
# Perform a web search based on the user input
|
|
1020
|
+
search_results = perform_web_search(user_input)
|
|
1021
|
+
|
|
1022
|
+
# Extract URLs from search results
|
|
1023
|
+
url_results = []
|
|
1024
|
+
for result in search_results[0]['query_results']:
|
|
1025
|
+
url_results.append(f"{result['title']} ({result['site']}): {result['url']}")
|
|
1026
|
+
|
|
1027
|
+
# Format search results
|
|
1028
|
+
formatted_results = "\n".join(url_results)
|
|
1029
|
+
|
|
1030
|
+
# Define the messages to be sent, including the user input, search results, and system message
|
|
1031
|
+
messages = [
|
|
1032
|
+
{"role": "user", "content": f"User question is:\n{user_input}\nwebsearch results are:\n{formatted_results}"},
|
|
1033
|
+
]
|
|
1034
|
+
|
|
1035
|
+
# Use the chat method to get the response
|
|
1036
|
+
response = LLM.chat(messages)
|
|
1037
|
+
return response
|
|
1038
|
+
|
|
1039
|
+
if __name__ == "__main__":
|
|
1040
|
+
while True:
|
|
1041
|
+
# Get the user input
|
|
1042
|
+
user_input = input("User: ")
|
|
1043
|
+
|
|
1044
|
+
# Perform a web search based on the user input
|
|
1045
|
+
response = chat(user_input)
|
|
1046
|
+
|
|
1047
|
+
# Print the response
|
|
1048
|
+
if response:
|
|
1049
|
+
print("AI:", response)
|
|
1050
|
+
else:
|
|
1051
|
+
print("No response")
|
|
1052
|
+
```
|
|
836
1053
|
## `Webai` - terminal gpt and a open interpeter
|
|
837
1054
|
|
|
838
1055
|
```python
|