webscout 4.7__tar.gz → 4.9__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of webscout might be problematic. Click here for more details.
- {webscout-4.7/webscout.egg-info → webscout-4.9}/PKG-INFO +249 -323
- {webscout-4.7 → webscout-4.9}/README.md +245 -322
- {webscout-4.7 → webscout-4.9}/setup.py +5 -2
- webscout-4.9/webscout/Agents/functioncall.py +186 -0
- webscout-4.9/webscout/Bard.py +365 -0
- webscout-4.9/webscout/Bing_search.py +124 -0
- webscout-4.9/webscout/DWEBS.py +157 -0
- {webscout-4.7 → webscout-4.9}/webscout/Local/_version.py +1 -1
- {webscout-4.7 → webscout-4.9}/webscout/Provider/Andi.py +7 -1
- {webscout-4.7 → webscout-4.9}/webscout/Provider/BasedGPT.py +11 -5
- {webscout-4.7 → webscout-4.9}/webscout/Provider/Berlin4h.py +11 -5
- {webscout-4.7 → webscout-4.9}/webscout/Provider/Blackboxai.py +10 -4
- webscout-4.9/webscout/Provider/Cloudflare.py +286 -0
- {webscout-4.7 → webscout-4.9}/webscout/Provider/Cohere.py +11 -5
- {webscout-4.7 → webscout-4.9}/webscout/Provider/DARKAI.py +25 -7
- {webscout-4.7 → webscout-4.9}/webscout/Provider/Deepinfra.py +2 -1
- {webscout-4.7 → webscout-4.9}/webscout/Provider/Deepseek.py +25 -9
- webscout-4.9/webscout/Provider/DiscordRocks.py +389 -0
- webscout-4.9/webscout/Provider/Farfalle.py +227 -0
- {webscout-4.7 → webscout-4.9}/webscout/Provider/Gemini.py +1 -1
- {webscout-4.7 → webscout-4.9}/webscout/Provider/Groq.py +244 -110
- {webscout-4.7 → webscout-4.9}/webscout/Provider/Llama.py +13 -5
- {webscout-4.7 → webscout-4.9}/webscout/Provider/Llama3.py +15 -2
- {webscout-4.7 → webscout-4.9}/webscout/Provider/OLLAMA.py +8 -7
- webscout-4.7/webscout/Provider/ChatGPTUK.py → webscout-4.9/webscout/Provider/PI.py +63 -69
- webscout-4.9/webscout/Provider/Perplexity.py +600 -0
- {webscout-4.7 → webscout-4.9}/webscout/Provider/Phind.py +6 -5
- {webscout-4.7 → webscout-4.9}/webscout/Provider/PizzaGPT.py +7 -1
- webscout-4.9/webscout/Provider/Youchat.py +247 -0
- {webscout-4.7 → webscout-4.9}/webscout/Provider/__init__.py +26 -31
- webscout-4.9/webscout/Provider/ai4chat.py +193 -0
- webscout-4.9/webscout/Provider/felo_search.py +238 -0
- webscout-4.7/webscout/Provider/VTLchat.py → webscout-4.9/webscout/Provider/julius.py +93 -82
- {webscout-4.7 → webscout-4.9}/webscout/Provider/koala.py +11 -5
- webscout-4.9/webscout/Provider/liaobots.py +268 -0
- {webscout-4.7 → webscout-4.9}/webscout/Provider/meta.py +2 -1
- webscout-4.9/webscout/Provider/turboseek.py +237 -0
- webscout-4.7/webscout/Provider/FreeGemini.py → webscout-4.9/webscout/Provider/xdash.py +51 -18
- webscout-4.9/webscout/Provider/yep.py +258 -0
- {webscout-4.7 → webscout-4.9}/webscout/__init__.py +1 -59
- {webscout-4.7 → webscout-4.9}/webscout/version.py +1 -1
- {webscout-4.7 → webscout-4.9}/webscout/webai.py +2 -64
- {webscout-4.7 → webscout-4.9}/webscout/webscout_search.py +1 -1
- {webscout-4.7 → webscout-4.9/webscout.egg-info}/PKG-INFO +249 -323
- {webscout-4.7 → webscout-4.9}/webscout.egg-info/SOURCES.txt +14 -11
- {webscout-4.7 → webscout-4.9}/webscout.egg-info/requires.txt +3 -0
- webscout-4.7/webscout/Agents/functioncall.py +0 -126
- webscout-4.7/webscout/DWEBS.py +0 -793
- webscout-4.7/webscout/GoogleS.py +0 -342
- webscout-4.7/webscout/Provider/Geminiflash.py +0 -152
- webscout-4.7/webscout/Provider/Geminipro.py +0 -152
- webscout-4.7/webscout/Provider/Leo.py +0 -469
- webscout-4.7/webscout/Provider/OpenGPT.py +0 -867
- webscout-4.7/webscout/Provider/Perplexity.py +0 -230
- webscout-4.7/webscout/Provider/Xjai.py +0 -230
- webscout-4.7/webscout/Provider/Yepchat.py +0 -478
- webscout-4.7/webscout/Provider/Youchat.py +0 -225
- {webscout-4.7 → webscout-4.9}/LICENSE.md +0 -0
- {webscout-4.7 → webscout-4.9}/setup.cfg +0 -0
- {webscout-4.7 → webscout-4.9}/webscout/AIauto.py +0 -0
- {webscout-4.7 → webscout-4.9}/webscout/AIbase.py +0 -0
- {webscout-4.7 → webscout-4.9}/webscout/AIutel.py +0 -0
- {webscout-4.7 → webscout-4.9}/webscout/Agents/Onlinesearcher.py +0 -0
- {webscout-4.7 → webscout-4.9}/webscout/Agents/__init__.py +0 -0
- {webscout-4.7 → webscout-4.9}/webscout/Extra/__init__.py +0 -0
- {webscout-4.7 → webscout-4.9}/webscout/Extra/autollama.py +0 -0
- {webscout-4.7 → webscout-4.9}/webscout/Extra/gguf.py +0 -0
- {webscout-4.7 → webscout-4.9}/webscout/Extra/weather.py +0 -0
- {webscout-4.7 → webscout-4.9}/webscout/Extra/weather_ascii.py +0 -0
- {webscout-4.7 → webscout-4.9}/webscout/LLM.py +0 -0
- {webscout-4.7 → webscout-4.9}/webscout/Local/__init__.py +0 -0
- {webscout-4.7 → webscout-4.9}/webscout/Local/formats.py +0 -0
- {webscout-4.7 → webscout-4.9}/webscout/Local/model.py +0 -0
- {webscout-4.7 → webscout-4.9}/webscout/Local/rawdog.py +0 -0
- {webscout-4.7 → webscout-4.9}/webscout/Local/samplers.py +0 -0
- {webscout-4.7 → webscout-4.9}/webscout/Local/thread.py +0 -0
- {webscout-4.7 → webscout-4.9}/webscout/Local/utils.py +0 -0
- {webscout-4.7 → webscout-4.9}/webscout/Provider/Koboldai.py +0 -0
- {webscout-4.7 → webscout-4.9}/webscout/Provider/Openai.py +0 -0
- {webscout-4.7 → webscout-4.9}/webscout/Provider/Poe.py +0 -0
- {webscout-4.7 → webscout-4.9}/webscout/Provider/RUBIKSAI.py +0 -0
- {webscout-4.7 → webscout-4.9}/webscout/Provider/Reka.py +0 -0
- {webscout-4.7 → webscout-4.9}/webscout/Provider/ThinkAnyAI.py +0 -0
- {webscout-4.7 → webscout-4.9}/webscout/YTdownloader.py +0 -0
- {webscout-4.7 → webscout-4.9}/webscout/__main__.py +0 -0
- {webscout-4.7 → webscout-4.9}/webscout/async_providers.py +0 -0
- {webscout-4.7 → webscout-4.9}/webscout/cli.py +0 -0
- {webscout-4.7 → webscout-4.9}/webscout/exceptions.py +0 -0
- {webscout-4.7 → webscout-4.9}/webscout/g4f.py +0 -0
- {webscout-4.7 → webscout-4.9}/webscout/models.py +0 -0
- {webscout-4.7 → webscout-4.9}/webscout/tempid.py +0 -0
- {webscout-4.7 → webscout-4.9}/webscout/transcriber.py +0 -0
- {webscout-4.7 → webscout-4.9}/webscout/utils.py +0 -0
- {webscout-4.7 → webscout-4.9}/webscout/voice.py +0 -0
- {webscout-4.7 → webscout-4.9}/webscout/webscout_search_async.py +0 -0
- {webscout-4.7 → webscout-4.9}/webscout/websx_search.py +0 -0
- {webscout-4.7 → webscout-4.9}/webscout.egg-info/dependency_links.txt +0 -0
- {webscout-4.7 → webscout-4.9}/webscout.egg-info/entry_points.txt +0 -0
- {webscout-4.7 → webscout-4.9}/webscout.egg-info/top_level.txt +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: webscout
|
|
3
|
-
Version: 4.
|
|
3
|
+
Version: 4.9
|
|
4
4
|
Summary: Search for anything using Google, DuckDuckGo, brave, qwant, phind.com, Contains AI models, can transcribe yt videos, temporary email and phone number generation, has TTS support, webai (terminal gpt and open interpreter) and offline LLMs and more
|
|
5
5
|
Author: OEvortex
|
|
6
6
|
Author-email: helpingai5@gmail.com
|
|
@@ -63,6 +63,9 @@ Requires-Dist: pyfiglet
|
|
|
63
63
|
Requires-Dist: yaspin
|
|
64
64
|
Requires-Dist: pillow
|
|
65
65
|
Requires-Dist: requests_html
|
|
66
|
+
Requires-Dist: bson
|
|
67
|
+
Requires-Dist: cloudscraper
|
|
68
|
+
Requires-Dist: emoji
|
|
66
69
|
Provides-Extra: dev
|
|
67
70
|
Requires-Dist: ruff>=0.1.6; extra == "dev"
|
|
68
71
|
Requires-Dist: pytest>=7.4.2; extra == "dev"
|
|
@@ -103,62 +106,6 @@ Requires-Dist: huggingface_hub[cli]; extra == "local"
|
|
|
103
106
|
Search for anything using Google, DuckDuckGo, phind.com, Contains AI models, can transcribe yt videos, temporary email and phone number generation, has TTS support, webai (terminal gpt and open interpreter) and offline LLMs and more
|
|
104
107
|
|
|
105
108
|
|
|
106
|
-
## Table of Contents
|
|
107
|
-
- [WEBSCOUT](#webscout)
|
|
108
|
-
- [Table of Contents](#table-of-contents)
|
|
109
|
-
- [Install](#install)
|
|
110
|
-
- [CLI version](#cli-version)
|
|
111
|
-
- [Regions](#regions)
|
|
112
|
-
- [Tempmail and Temp number](#tempmail-and-temp-number)
|
|
113
|
-
- [Temp number](#temp-number)
|
|
114
|
-
- [Tempmail](#tempmail)
|
|
115
|
-
- [Transcriber](#transcriber)
|
|
116
|
-
- [DWEBS: Advanced Web Searches](#dwebs-advanced-web-searches)
|
|
117
|
-
- [Activating DWEBS](#activating-dwebs)
|
|
118
|
-
- [Point to remember before using `DWEBS`](#point-to-remember-before-using-dwebs)
|
|
119
|
-
- [Usage Example](#usage-example)
|
|
120
|
-
- [Text-to-Speech:](#text-to-speech)
|
|
121
|
-
- [Available TTS Voices:](#available-tts-voices)
|
|
122
|
-
- [Exceptions](#exceptions)
|
|
123
|
-
- [usage of WEBS](#usage-of-webs)
|
|
124
|
-
- [1. `text()` - text search by DuckDuckGo.com](#1-text---text-search-by-duckduckgocom)
|
|
125
|
-
- [2. `answers()` - instant answers by DuckDuckGo.com](#2-answers---instant-answers-by-duckduckgocom)
|
|
126
|
-
- [3. `images()` - image search by DuckDuckGo.com](#3-images---image-search-by-duckduckgocom)
|
|
127
|
-
- [4. `videos()` - video search by DuckDuckGo.com](#4-videos---video-search-by-duckduckgocom)
|
|
128
|
-
- [5. `news()` - news search by DuckDuckGo.com](#5-news---news-search-by-duckduckgocom)
|
|
129
|
-
- [6. `maps()` - map search by DuckDuckGo.com](#6-maps---map-search-by-duckduckgocom)
|
|
130
|
-
- [7. `translate()` - translation by DuckDuckGo.com](#7-translate---translation-by-duckduckgocom)
|
|
131
|
-
- [8. `suggestions()` - suggestions by DuckDuckGo.com](#8-suggestions---suggestions-by-duckduckgocom)
|
|
132
|
-
- [usage of WEBSX -- Another Websearch thing](#usage-of-websx----another-websearch-thing)
|
|
133
|
-
- [ALL acts](#all-acts)
|
|
134
|
-
- [Webscout Supported Acts:](#webscout-supported-acts)
|
|
135
|
-
- [usage of webscout AI](#usage-of-webscout-ai)
|
|
136
|
-
- [0. `Duckchat` - chat with LLM](#0-duckchat---chat-with-llm)
|
|
137
|
-
- [1. `PhindSearch` - Search using Phind.com](#1-phindsearch---search-using-phindcom)
|
|
138
|
-
- [2. `YepChat` - Chat with mistral 8x7b powered by yepchat](#2-yepchat---chat-with-mistral-8x7b-powered-by-yepchat)
|
|
139
|
-
- [3. `You.com` - search/chat with you.com](#3-youcom---searchchat-with-youcom)
|
|
140
|
-
- [4. `Gemini` - search with google gemini](#4-gemini---search-with-google-gemini)
|
|
141
|
-
- [5. `Berlin4h` - chat with Berlin4h](#5-berlin4h---chat-with-berlin4h)
|
|
142
|
-
- [6. `BlackBox` - Search/chat With BlackBox](#6-blackbox---searchchat-with-blackbox)
|
|
143
|
-
- [7. `PERPLEXITY` - Search With PERPLEXITY](#7-perplexity---search-with-perplexity)
|
|
144
|
-
- [8. `OpenGPT` - chat With OPENGPT](#8-opengpt---chat-with-opengpt)
|
|
145
|
-
- [9. `KOBOLDAI` -](#9-koboldai--)
|
|
146
|
-
- [10. `Reka` - chat with reka](#10-reka---chat-with-reka)
|
|
147
|
-
- [11. `Cohere` - chat with cohere](#11-cohere---chat-with-cohere)
|
|
148
|
-
- [12. `Xjai` - chat with free gpt 3.5](#12-xjai---chat-with-free-gpt-35)
|
|
149
|
-
- [13. `ThinkAny` - AI search engine](#13-thinkany---ai-search-engine)
|
|
150
|
-
- [14. `chatgptuk` - Chat with gemini-pro](#14-chatgptuk---chat-with-gemini-pro)
|
|
151
|
-
- [15. `poe`- chat with poe](#15-poe--chat-with-poe)
|
|
152
|
-
- [16. `BasedGPT` - chat with GPT](#16-basedgpt---chat-with-gpt)
|
|
153
|
-
- [17. `DeepSeek` -chat with deepseek](#17-deepseek--chat-with-deepseek)
|
|
154
|
-
- [18. Deepinfra](#18-deepinfra)
|
|
155
|
-
- [19. Deepinfra - VLM](#19-deepinfra---vlm)
|
|
156
|
-
- [`LLM`](#llm)
|
|
157
|
-
- [`Local-LLM` webscout can now run GGUF models](#local-llm-webscout-can-now-run-gguf-models)
|
|
158
|
-
- [`Local-rawdog`](#local-rawdog)
|
|
159
|
-
- [`LLM` with internet](#llm-with-internet)
|
|
160
|
-
- [`Webai` - terminal gpt and a open interpeter](#webai---terminal-gpt-and-a-open-interpeter)
|
|
161
|
-
|
|
162
109
|
## Install
|
|
163
110
|
```python
|
|
164
111
|
pip install -U webscout
|
|
@@ -263,7 +210,7 @@ python -m webscout --help
|
|
|
263
210
|
|
|
264
211
|
|
|
265
212
|
[Go To TOP](#TOP)
|
|
266
|
-
## YTdownloader
|
|
213
|
+
## YTdownloader
|
|
267
214
|
|
|
268
215
|
```python
|
|
269
216
|
from os import rename, getcwd
|
|
@@ -287,7 +234,7 @@ if __name__ == "__main__":
|
|
|
287
234
|
download_video("https://www.youtube.com/watch?v=c0tMvzB0OKw")
|
|
288
235
|
```
|
|
289
236
|
|
|
290
|
-
## Weather
|
|
237
|
+
## Weather
|
|
291
238
|
1. weather
|
|
292
239
|
```python
|
|
293
240
|
from webscout import weather as w
|
|
@@ -386,6 +333,7 @@ async def main() -> None:
|
|
|
386
333
|
if __name__ == '__main__':
|
|
387
334
|
asyncio.run(main())
|
|
388
335
|
```
|
|
336
|
+
|
|
389
337
|
## Transcriber
|
|
390
338
|
The transcriber function in webscout is a handy tool that transcribes YouTube videos. Here's an example code demonstrating its usage:
|
|
391
339
|
```python
|
|
@@ -442,77 +390,25 @@ if __name__ == "__main__":
|
|
|
442
390
|
main()
|
|
443
391
|
```
|
|
444
392
|
|
|
445
|
-
##
|
|
446
|
-
|
|
447
|
-
`DWEBS` is a standalone feature designed to perform advanced web searches with enhanced capabilities. It is particularly powerful in extracting relevant information directly from webpages and Search engine, focusing exclusively on text (web) searches. Unlike the `WEBS` , which provides a broader range of search functionalities, `DWEBS` is specifically tailored for in-depth web searches.
|
|
448
|
-
|
|
449
|
-
### Activating DWEBS
|
|
450
|
-
|
|
451
|
-
To utilize the `DWEBS` feature, you must first create an instance of the `DWEBS` . This is designed to be used independently of the `WEBS` , offering a focused approach to web searches.
|
|
452
|
-
|
|
453
|
-
### Point to remember before using `DWEBS`
|
|
454
|
-
As `DWEBS` is designed to extract relevant information directly from webpages and Search engine, It extracts html from webpages and saves them to folder named files
|
|
455
|
-
|
|
456
|
-
### Usage Example
|
|
457
|
-
|
|
458
|
-
Here's a basic example of how to use the `DWEBS` :
|
|
393
|
+
## GoogleS -- formerly DWEBS
|
|
459
394
|
```python
|
|
460
|
-
from webscout import
|
|
461
|
-
|
|
462
|
-
|
|
463
|
-
|
|
464
|
-
|
|
465
|
-
|
|
466
|
-
query_html_path = google_searcher.search(
|
|
467
|
-
query='HelpingAI-9B',
|
|
468
|
-
result_num=10,
|
|
469
|
-
safe=False,
|
|
470
|
-
overwrite=False,
|
|
471
|
-
)
|
|
472
|
-
|
|
473
|
-
# 2. Search Result Extraction
|
|
474
|
-
query_results_extractor = DWEBS.QueryResultsExtractor()
|
|
475
|
-
query_search_results = query_results_extractor.extract(query_html_path)
|
|
476
|
-
|
|
477
|
-
if extract_webpage:
|
|
478
|
-
print('---------------Batch Webpage Fetcher--------------------')
|
|
479
|
-
# 3. Batch Webpage Fetching
|
|
480
|
-
batch_webpage_fetcher = DWEBS.BatchWebpageFetcher()
|
|
481
|
-
urls = [query_extracts['url'] for query_extracts in query_search_results['query_results']]
|
|
482
|
-
url_and_html_path_list = batch_webpage_fetcher.fetch(
|
|
483
|
-
urls,
|
|
484
|
-
overwrite=False,
|
|
485
|
-
output_parent=query_search_results["query"],
|
|
486
|
-
)
|
|
487
|
-
|
|
488
|
-
print('---------------Batch Webpage Extractor--------------------')
|
|
489
|
-
# 4. Batch Webpage Content Extraction
|
|
490
|
-
batch_webpage_content_extractor = DWEBS.BatchWebpageContentExtractor()
|
|
491
|
-
webpageurls = [url_and_html['html_path'] for url_and_html in url_and_html_path_list]
|
|
492
|
-
html_path_and_extracted_content_list = batch_webpage_content_extractor.extract(webpageurls)
|
|
493
|
-
|
|
494
|
-
# 5. Printing Extracted Content
|
|
495
|
-
for html_path_and_extracted_content in html_path_and_extracted_content_list:
|
|
496
|
-
print(html_path_and_extracted_content['extracted_content'])
|
|
497
|
-
else:
|
|
498
|
-
# Print only search results if extract_webpage is False
|
|
499
|
-
for result in query_search_results['query_results']:
|
|
500
|
-
DWEBS.logger.mesg(
|
|
501
|
-
f"{result['title']}\n"
|
|
502
|
-
f" - {result['site']}\n"
|
|
503
|
-
f" - {result['url']}\n"
|
|
504
|
-
f" - {result['abstract']}\n"
|
|
505
|
-
f"\n"
|
|
506
|
-
)
|
|
507
|
-
|
|
508
|
-
DWEBS.logger.success(f"- {len(query_search_results['query_results'])} query results")
|
|
509
|
-
DWEBS.logger.success(f"- {len(query_search_results['related_questions'])} related questions")
|
|
510
|
-
|
|
511
|
-
# Example usage:
|
|
512
|
-
finalextractor(extract_webpage=True) # Extract webpage content
|
|
513
|
-
finalextractor(extract_webpage=False) # Skip webpage extraction and print search results only
|
|
514
|
-
|
|
395
|
+
from webscout import GoogleS
|
|
396
|
+
from rich import print
|
|
397
|
+
searcher = GoogleS()
|
|
398
|
+
results = searcher.search("HelpingAI-9B", max_results=20, extract_webpage_text=False, max_extract_characters=100)
|
|
399
|
+
for result in results:
|
|
400
|
+
print(result)
|
|
515
401
|
```
|
|
402
|
+
### BingS
|
|
403
|
+
```python
|
|
404
|
+
from webscout import BingS
|
|
405
|
+
from rich import print
|
|
406
|
+
searcher = BingS()
|
|
407
|
+
results = searcher.search("Python development tools", max_results=30)
|
|
408
|
+
for result in results:
|
|
409
|
+
print(result)
|
|
410
|
+
```
|
|
411
|
+
|
|
516
412
|
## Text-to-Speech:
|
|
517
413
|
```python
|
|
518
414
|
from webscout import play_audio
|
|
@@ -530,7 +426,7 @@ You can choose from a wide range of voices, including:
|
|
|
530
426
|
- Standard and WaveNet voices for various languages (e.g., en-US, es-ES, ja-JP, etc.)
|
|
531
427
|
|
|
532
428
|
|
|
533
|
-
The WEBS and AsyncWEBS classes are used to retrieve search results from DuckDuckGo.com
|
|
429
|
+
The WEBS and AsyncWEBS classes are used to retrieve search results from DuckDuckGo.com
|
|
534
430
|
To use the AsyncWEBS class, you can perform asynchronous operations using Python's asyncio library.
|
|
535
431
|
To initialize an instance of the WEBS or AsyncWEBS classes, you can provide the following optional arguments:
|
|
536
432
|
|
|
@@ -1049,23 +945,9 @@ response = ph.ask(prompt)
|
|
|
1049
945
|
message = ph.get_message(response)
|
|
1050
946
|
print(message)
|
|
1051
947
|
```
|
|
1052
|
-
### 2. `YepChat` - Chat with mistral 8x7b powered by yepchat
|
|
1053
|
-
```python
|
|
1054
|
-
from webscout import YEPCHAT
|
|
1055
|
-
|
|
1056
|
-
# Instantiate the YEPCHAT class with default parameters
|
|
1057
|
-
YEPCHAT = YEPCHAT()
|
|
1058
948
|
|
|
1059
|
-
# Define a prompt to send to the AI
|
|
1060
|
-
prompt = "What is the capital of France?"
|
|
1061
949
|
|
|
1062
|
-
|
|
1063
|
-
r = YEPCHAT.chat(prompt)
|
|
1064
|
-
print(r)
|
|
1065
|
-
|
|
1066
|
-
```
|
|
1067
|
-
|
|
1068
|
-
### 3. `You.com` - search/chat with you.com - Not working
|
|
950
|
+
### . `You.com` - search/chat with you.com - Not working
|
|
1069
951
|
```python
|
|
1070
952
|
|
|
1071
953
|
from webscout import YouChat
|
|
@@ -1092,32 +974,28 @@ message = ai.get_message(response)
|
|
|
1092
974
|
print(message)
|
|
1093
975
|
```
|
|
1094
976
|
|
|
1095
|
-
###
|
|
977
|
+
### . `Gemini` - search with google gemini
|
|
1096
978
|
|
|
1097
979
|
```python
|
|
1098
980
|
import webscout
|
|
1099
981
|
from webscout import GEMINI
|
|
1100
|
-
|
|
1101
|
-
|
|
1102
|
-
COOKIE_FILE = "path/to/bard.google.com.cookies.json"
|
|
982
|
+
from rich import print
|
|
983
|
+
COOKIE_FILE = "cookies.json"
|
|
1103
984
|
|
|
1104
985
|
# Optional: Provide proxy details if needed
|
|
1105
|
-
PROXIES = {
|
|
1106
|
-
"http": "http://proxy_server:port",
|
|
1107
|
-
"https": "https://proxy_server:port",
|
|
1108
|
-
}
|
|
986
|
+
PROXIES = {}
|
|
1109
987
|
|
|
1110
988
|
# Initialize GEMINI with cookie file and optional proxies
|
|
1111
989
|
gemini = GEMINI(cookie_file=COOKIE_FILE, proxy=PROXIES)
|
|
1112
990
|
|
|
1113
991
|
# Ask a question and print the response
|
|
1114
|
-
response = gemini.chat("
|
|
992
|
+
response = gemini.chat("websearch about HelpingAI and who is its developer")
|
|
1115
993
|
print(response)
|
|
1116
994
|
```
|
|
1117
|
-
###
|
|
995
|
+
### . `Berlin4h` - chat with Berlin4h
|
|
1118
996
|
```python
|
|
1119
997
|
from webscout import Berlin4h
|
|
1120
|
-
|
|
998
|
+
|
|
1121
999
|
ai = Berlin4h(
|
|
1122
1000
|
is_conversation=True,
|
|
1123
1001
|
max_tokens=800,
|
|
@@ -1135,7 +1013,7 @@ prompt = "Explain the concept of recursion in simple terms."
|
|
|
1135
1013
|
response = ai.chat(prompt)
|
|
1136
1014
|
print(response)
|
|
1137
1015
|
```
|
|
1138
|
-
###
|
|
1016
|
+
### . `BlackBox` - Search/chat With BlackBox
|
|
1139
1017
|
```python
|
|
1140
1018
|
from webscout import BLACKBOXAI
|
|
1141
1019
|
from rich import print
|
|
@@ -1166,18 +1044,20 @@ while True:
|
|
|
1166
1044
|
r = ai.chat(prompt)
|
|
1167
1045
|
print(r)
|
|
1168
1046
|
```
|
|
1169
|
-
###
|
|
1047
|
+
### . `PERPLEXITY` - Search With PERPLEXITY
|
|
1170
1048
|
```python
|
|
1171
|
-
from webscout import
|
|
1172
|
-
|
|
1173
|
-
perplexity = PERPLEXITY()
|
|
1049
|
+
from webscout import Perplexity
|
|
1050
|
+
from rich import print
|
|
1174
1051
|
|
|
1175
|
-
|
|
1176
|
-
|
|
1177
|
-
response = perplexity.chat(
|
|
1178
|
-
|
|
1052
|
+
perplexity = Perplexity()
|
|
1053
|
+
# Stream the response
|
|
1054
|
+
response = perplexity.chat(input(">>> "))
|
|
1055
|
+
for chunk in response:
|
|
1056
|
+
print(chunk, end="", flush=True)
|
|
1057
|
+
|
|
1058
|
+
perplexity.close()
|
|
1179
1059
|
```
|
|
1180
|
-
###
|
|
1060
|
+
### `meta ai` - chat With meta ai
|
|
1181
1061
|
```python
|
|
1182
1062
|
from webscout import Meta
|
|
1183
1063
|
from rich import print
|
|
@@ -1209,7 +1089,7 @@ for media in response["media"]:
|
|
|
1209
1089
|
print(media["url"]) # Access image URLs
|
|
1210
1090
|
|
|
1211
1091
|
```
|
|
1212
|
-
###
|
|
1092
|
+
### `KOBOLDAI` -
|
|
1213
1093
|
```python
|
|
1214
1094
|
from webscout import KOBOLDAI
|
|
1215
1095
|
|
|
@@ -1228,7 +1108,7 @@ print(message)
|
|
|
1228
1108
|
|
|
1229
1109
|
```
|
|
1230
1110
|
|
|
1231
|
-
###
|
|
1111
|
+
### `Reka` - chat with reka
|
|
1232
1112
|
```python
|
|
1233
1113
|
from webscout import REKA
|
|
1234
1114
|
|
|
@@ -1239,7 +1119,7 @@ response_str = a.chat(prompt)
|
|
|
1239
1119
|
print(response_str)
|
|
1240
1120
|
```
|
|
1241
1121
|
|
|
1242
|
-
###
|
|
1122
|
+
### `Cohere` - chat with cohere
|
|
1243
1123
|
```python
|
|
1244
1124
|
from webscout import Cohere
|
|
1245
1125
|
|
|
@@ -1250,30 +1130,8 @@ response_str = a.chat(prompt)
|
|
|
1250
1130
|
print(response_str)
|
|
1251
1131
|
```
|
|
1252
1132
|
|
|
1253
|
-
### 12. `Xjai` - chat with free gpt 3.5
|
|
1254
|
-
Gratitude to [Devs do Code](http://www.youtube.com/@DevsDoCode) for their assistance.
|
|
1255
|
-
```python
|
|
1256
|
-
from webscout import Xjai
|
|
1257
|
-
from rich import print
|
|
1258
|
-
|
|
1259
|
-
ai = Xjai(
|
|
1260
|
-
is_conversation=True,
|
|
1261
|
-
max_tokens=800,
|
|
1262
|
-
timeout=30,
|
|
1263
|
-
intro=None,
|
|
1264
|
-
filepath=None,
|
|
1265
|
-
update_file=True,
|
|
1266
|
-
proxies={},
|
|
1267
|
-
history_offset=10250,
|
|
1268
|
-
act=None,
|
|
1269
|
-
)
|
|
1270
|
-
|
|
1271
|
-
prompt = "Tell me about india"
|
|
1272
1133
|
|
|
1273
|
-
|
|
1274
|
-
print(response)
|
|
1275
|
-
```
|
|
1276
|
-
### 13. `ThinkAny` - AI search engine
|
|
1134
|
+
### `ThinkAny` - AI search engine
|
|
1277
1135
|
```python
|
|
1278
1136
|
from webscout import ThinkAnyAI
|
|
1279
1137
|
|
|
@@ -1298,32 +1156,11 @@ response = ai.ask(prompt)
|
|
|
1298
1156
|
message = ai.get_message(response)
|
|
1299
1157
|
print(message)
|
|
1300
1158
|
```
|
|
1301
|
-
### 14. `chatgptuk` - Chat with gemini-pro
|
|
1302
|
-
```python
|
|
1303
|
-
from webscout import ChatGPTUK
|
|
1304
|
-
# Create an instance of the PERPLEXITY class
|
|
1305
|
-
ai = ChatGPTUK(
|
|
1306
|
-
is_conversation=True,
|
|
1307
|
-
max_tokens=800,
|
|
1308
|
-
timeout=30,
|
|
1309
|
-
intro=None,
|
|
1310
|
-
filepath=None,
|
|
1311
|
-
update_file=True,
|
|
1312
|
-
proxies={},
|
|
1313
|
-
history_offset=10250,
|
|
1314
|
-
act=None,
|
|
1315
|
-
)
|
|
1316
|
-
|
|
1317
|
-
# Example usage:
|
|
1318
|
-
prompt = "Explain the concept of recursion in simple terms."
|
|
1319
|
-
response = ai.chat(prompt)
|
|
1320
|
-
print(response)
|
|
1321
1159
|
|
|
1322
|
-
|
|
1323
|
-
### 15. `poe`- chat with poe
|
|
1160
|
+
### `poe`- chat with poe
|
|
1324
1161
|
Usage code similar to other proviers
|
|
1325
1162
|
|
|
1326
|
-
###
|
|
1163
|
+
### `BasedGPT` - chat with GPT
|
|
1327
1164
|
```
|
|
1328
1165
|
from webscout import BasedGPT
|
|
1329
1166
|
|
|
@@ -1344,14 +1181,14 @@ response = basedgpt.chat(prompt)
|
|
|
1344
1181
|
# Print the AI's response
|
|
1345
1182
|
print(response)
|
|
1346
1183
|
```
|
|
1347
|
-
###
|
|
1184
|
+
### `DeepSeek` -chat with deepseek
|
|
1348
1185
|
```python
|
|
1349
1186
|
from webscout import DeepSeek
|
|
1350
1187
|
from rich import print
|
|
1351
1188
|
|
|
1352
1189
|
ai = DeepSeek(
|
|
1353
1190
|
is_conversation=True,
|
|
1354
|
-
api_key='
|
|
1191
|
+
api_key='cookie,
|
|
1355
1192
|
max_tokens=800,
|
|
1356
1193
|
timeout=30,
|
|
1357
1194
|
intro=None,
|
|
@@ -1370,7 +1207,7 @@ prompt = "Tell me about india"
|
|
|
1370
1207
|
r = ai.chat(prompt)
|
|
1371
1208
|
print(r)
|
|
1372
1209
|
```
|
|
1373
|
-
###
|
|
1210
|
+
### `Deepinfra`
|
|
1374
1211
|
```python
|
|
1375
1212
|
from webscout import DeepInfra
|
|
1376
1213
|
|
|
@@ -1396,7 +1233,7 @@ message = ai.get_message(response)
|
|
|
1396
1233
|
print(message)
|
|
1397
1234
|
```
|
|
1398
1235
|
|
|
1399
|
-
###
|
|
1236
|
+
### `Deepinfra` - VLM
|
|
1400
1237
|
```python
|
|
1401
1238
|
from webscout.Provider import VLM
|
|
1402
1239
|
|
|
@@ -1415,45 +1252,96 @@ prompt = {
|
|
|
1415
1252
|
response = vlm_instance.chat(prompt)
|
|
1416
1253
|
print(response)
|
|
1417
1254
|
|
|
1418
|
-
```
|
|
1419
|
-
### 20. `VTLchat` - Free gpt3.5
|
|
1420
|
-
```python
|
|
1421
|
-
from webscout import VTLchat
|
|
1422
|
-
|
|
1423
|
-
provider = VTLchat()
|
|
1424
|
-
response = provider.chat("Hello, how are you?")
|
|
1425
|
-
print(response)
|
|
1426
1255
|
```
|
|
1427
1256
|
|
|
1428
|
-
### 21. GeminiFlash and geminipro
|
|
1429
|
-
**Usage similar to other providers**
|
|
1430
|
-
|
|
1431
|
-
### 22. `Ollama` - chat will AI models locally
|
|
1432
|
-
```python
|
|
1433
|
-
from webscout import OLLAMA
|
|
1434
|
-
ollama_provider = OLLAMA(model="qwen2:0.5b")
|
|
1435
|
-
response = ollama_provider.chat("What is the meaning of life?")
|
|
1436
|
-
print(response)
|
|
1437
|
-
```
|
|
1438
1257
|
|
|
1439
|
-
###
|
|
1258
|
+
### GROQ
|
|
1440
1259
|
```python
|
|
1441
1260
|
from webscout import GROQ
|
|
1442
1261
|
ai = GROQ(api_key="")
|
|
1443
1262
|
response = ai.chat("What is the meaning of life?")
|
|
1444
1263
|
print(response)
|
|
1264
|
+
#----------------------TOOL CALL------------------
|
|
1265
|
+
from webscout import GROQ # Adjust import based on your project structure
|
|
1266
|
+
from webscout import WEBS
|
|
1267
|
+
import json
|
|
1445
1268
|
|
|
1446
|
-
|
|
1269
|
+
# Initialize the GROQ client
|
|
1270
|
+
client = GROQ(api_key="")
|
|
1271
|
+
MODEL = 'llama3-groq-70b-8192-tool-use-preview'
|
|
1272
|
+
|
|
1273
|
+
# Function to evaluate a mathematical expression
|
|
1274
|
+
def calculate(expression):
|
|
1275
|
+
"""Evaluate a mathematical expression"""
|
|
1276
|
+
try:
|
|
1277
|
+
result = eval(expression)
|
|
1278
|
+
return json.dumps({"result": result})
|
|
1279
|
+
except Exception as e:
|
|
1280
|
+
return json.dumps({"error": str(e)})
|
|
1281
|
+
|
|
1282
|
+
# Function to perform a text search using DuckDuckGo.com
|
|
1283
|
+
def search(query):
|
|
1284
|
+
"""Perform a text search using DuckDuckGo.com"""
|
|
1285
|
+
try:
|
|
1286
|
+
results = WEBS().text(query, max_results=5)
|
|
1287
|
+
return json.dumps({"results": results})
|
|
1288
|
+
except Exception as e:
|
|
1289
|
+
return json.dumps({"error": str(e)})
|
|
1290
|
+
|
|
1291
|
+
# Add the functions to the provider
|
|
1292
|
+
client.add_function("calculate", calculate)
|
|
1293
|
+
client.add_function("search", search)
|
|
1294
|
+
|
|
1295
|
+
# Define the tools
|
|
1296
|
+
tools = [
|
|
1297
|
+
{
|
|
1298
|
+
"type": "function",
|
|
1299
|
+
"function": {
|
|
1300
|
+
"name": "calculate",
|
|
1301
|
+
"description": "Evaluate a mathematical expression",
|
|
1302
|
+
"parameters": {
|
|
1303
|
+
"type": "object",
|
|
1304
|
+
"properties": {
|
|
1305
|
+
"expression": {
|
|
1306
|
+
"type": "string",
|
|
1307
|
+
"description": "The mathematical expression to evaluate",
|
|
1308
|
+
}
|
|
1309
|
+
},
|
|
1310
|
+
"required": ["expression"],
|
|
1311
|
+
},
|
|
1312
|
+
}
|
|
1313
|
+
},
|
|
1314
|
+
{
|
|
1315
|
+
"type": "function",
|
|
1316
|
+
"function": {
|
|
1317
|
+
"name": "search",
|
|
1318
|
+
"description": "Perform a text search using DuckDuckGo.com and Yep.com",
|
|
1319
|
+
"parameters": {
|
|
1320
|
+
"type": "object",
|
|
1321
|
+
"properties": {
|
|
1322
|
+
"query": {
|
|
1323
|
+
"type": "string",
|
|
1324
|
+
"description": "The search query to execute",
|
|
1325
|
+
}
|
|
1326
|
+
},
|
|
1327
|
+
"required": ["query"],
|
|
1328
|
+
},
|
|
1329
|
+
}
|
|
1330
|
+
}
|
|
1331
|
+
]
|
|
1332
|
+
|
|
1333
|
+
|
|
1334
|
+
user_prompt_calculate = "What is 25 * 4 + 10?"
|
|
1335
|
+
response_calculate = client.chat(user_prompt_calculate, tools=tools)
|
|
1336
|
+
print(response_calculate)
|
|
1337
|
+
|
|
1338
|
+
user_prompt_search = "Find information on HelpingAI and who is its developer"
|
|
1339
|
+
response_search = client.chat(user_prompt_search, tools=tools)
|
|
1340
|
+
print(response_search)
|
|
1447
1341
|
|
|
1448
|
-
### 23. Freegemini - chat with gemini for free
|
|
1449
|
-
```python
|
|
1450
|
-
from webscout import FreeGemini
|
|
1451
|
-
ai = FreeGemini()
|
|
1452
|
-
response = ai.chat("What is the meaning of life?")
|
|
1453
|
-
print(response)
|
|
1454
1342
|
```
|
|
1455
1343
|
|
|
1456
|
-
###
|
|
1344
|
+
### LLama 70b - chat with meta's llama 3 70b
|
|
1457
1345
|
```python
|
|
1458
1346
|
|
|
1459
1347
|
from webscout import LLAMA
|
|
@@ -1463,15 +1351,127 @@ llama = LLAMA()
|
|
|
1463
1351
|
r = llama.chat("What is the meaning of life?")
|
|
1464
1352
|
print(r)
|
|
1465
1353
|
```
|
|
1466
|
-
###
|
|
1354
|
+
### AndiSearch
|
|
1467
1355
|
```python
|
|
1468
1356
|
from webscout import AndiSearch
|
|
1469
1357
|
a = AndiSearch()
|
|
1470
1358
|
print(a.chat("HelpingAI-9B"))
|
|
1471
1359
|
```
|
|
1360
|
+
### Function calling-beta
|
|
1361
|
+
```python
|
|
1362
|
+
import json
|
|
1363
|
+
import logging
|
|
1364
|
+
from webscout import LLAMA3, WEBS
|
|
1365
|
+
from webscout.Agents.functioncall import FunctionCallingAgent
|
|
1366
|
+
|
|
1367
|
+
# Define tools that the agent can use
|
|
1368
|
+
tools = [
|
|
1369
|
+
{
|
|
1370
|
+
"type": "function",
|
|
1371
|
+
"function": {
|
|
1372
|
+
"name": "UserDetail",
|
|
1373
|
+
"parameters": {
|
|
1374
|
+
"type": "object",
|
|
1375
|
+
"title": "UserDetail",
|
|
1376
|
+
"properties": {
|
|
1377
|
+
"name": {
|
|
1378
|
+
"title": "Name",
|
|
1379
|
+
"type": "string"
|
|
1380
|
+
},
|
|
1381
|
+
"age": {
|
|
1382
|
+
"title": "Age",
|
|
1383
|
+
"type": "integer"
|
|
1384
|
+
}
|
|
1385
|
+
},
|
|
1386
|
+
"required": ["name", "age"]
|
|
1387
|
+
}
|
|
1388
|
+
}
|
|
1389
|
+
},
|
|
1390
|
+
{
|
|
1391
|
+
"type": "function",
|
|
1392
|
+
"function": {
|
|
1393
|
+
"name": "web_search",
|
|
1394
|
+
"description": "Search query on google",
|
|
1395
|
+
"parameters": {
|
|
1396
|
+
"type": "object",
|
|
1397
|
+
"properties": {
|
|
1398
|
+
"query": {
|
|
1399
|
+
"type": "string",
|
|
1400
|
+
"description": "web search query"
|
|
1401
|
+
}
|
|
1402
|
+
},
|
|
1403
|
+
"required": ["query"]
|
|
1404
|
+
}
|
|
1405
|
+
}
|
|
1406
|
+
},
|
|
1407
|
+
{ # New general AI tool
|
|
1408
|
+
"type": "function",
|
|
1409
|
+
"function": {
|
|
1410
|
+
"name": "general_ai",
|
|
1411
|
+
"description": "Use general AI knowledge to answer the question",
|
|
1412
|
+
"parameters": {
|
|
1413
|
+
"type": "object",
|
|
1414
|
+
"properties": {
|
|
1415
|
+
"question": {
|
|
1416
|
+
"type": "string",
|
|
1417
|
+
"description": "The question to answer"
|
|
1418
|
+
}
|
|
1419
|
+
},
|
|
1420
|
+
"required": ["question"]
|
|
1421
|
+
}
|
|
1422
|
+
}
|
|
1423
|
+
}
|
|
1424
|
+
]
|
|
1425
|
+
|
|
1426
|
+
# Initialize the FunctionCallingAgent with the specified tools
|
|
1427
|
+
agent = FunctionCallingAgent(tools=tools)
|
|
1428
|
+
llama = LLAMA3()
|
|
1429
|
+
from rich import print
|
|
1430
|
+
# Input message from the user
|
|
1431
|
+
user = input(">>> ")
|
|
1432
|
+
message = user
|
|
1433
|
+
function_call_data = agent.function_call_handler(message)
|
|
1434
|
+
print(f"Function Call Data: {function_call_data}")
|
|
1435
|
+
|
|
1436
|
+
# Check for errors in the function call data
|
|
1437
|
+
if "error" not in function_call_data:
|
|
1438
|
+
function_name = function_call_data.get("tool_name") # Use 'tool_name' instead of 'name'
|
|
1439
|
+
if function_name == "web_search":
|
|
1440
|
+
arguments = function_call_data.get("tool_input", {}) # Get tool input arguments
|
|
1441
|
+
query = arguments.get("query")
|
|
1442
|
+
if query:
|
|
1443
|
+
with WEBS() as webs:
|
|
1444
|
+
search_results = webs.text(query, max_results=5)
|
|
1445
|
+
prompt = (
|
|
1446
|
+
f"Based on the following search results:\n\n{search_results}\n\n"
|
|
1447
|
+
f"Question: {user}\n\n"
|
|
1448
|
+
"Please provide a comprehensive answer to the question based on the search results above. "
|
|
1449
|
+
"Include relevant webpage URLs in your answer when appropriate. "
|
|
1450
|
+
"If the search results don't contain relevant information, please state that and provide the best answer you can based on your general knowledge."
|
|
1451
|
+
)
|
|
1452
|
+
response = llama.chat(prompt)
|
|
1453
|
+
for c in response:
|
|
1454
|
+
print(c, end="", flush=True)
|
|
1472
1455
|
|
|
1473
|
-
|
|
1474
|
-
|
|
1456
|
+
else:
|
|
1457
|
+
print("Please provide a search query.")
|
|
1458
|
+
elif function_name == "general_ai": # Handle general AI tool
|
|
1459
|
+
arguments = function_call_data.get("tool_input", {})
|
|
1460
|
+
question = arguments.get("question")
|
|
1461
|
+
if question:
|
|
1462
|
+
response = llama.chat(question) # Use LLM directly
|
|
1463
|
+
for c in response:
|
|
1464
|
+
print(c, end="", flush=True)
|
|
1465
|
+
else:
|
|
1466
|
+
print("Please provide a question.")
|
|
1467
|
+
else:
|
|
1468
|
+
result = agent.execute_function(function_call_data)
|
|
1469
|
+
print(f"Function Execution Result: {result}")
|
|
1470
|
+
else:
|
|
1471
|
+
print(f"Error: {function_call_data['error']}")
|
|
1472
|
+
```
|
|
1473
|
+
### LLAMA3, pizzagpt, RUBIKSAI, Koala, Darkai, AI4Chat, Farfalle, PIAI, Felo, XDASH, Julius, YouChat, YEPCHAT, Cloudflare, TurboSeek,
|
|
1474
|
+
code similar to other provider
|
|
1475
1475
|
### `LLM`
|
|
1476
1476
|
```python
|
|
1477
1477
|
from webscout.LLM import LLM
|
|
@@ -1514,7 +1514,7 @@ from webscout.Local import formats
|
|
|
1514
1514
|
# 1. Download the model
|
|
1515
1515
|
repo_id = "microsoft/Phi-3-mini-4k-instruct-gguf" # Replace with the desired Hugging Face repo
|
|
1516
1516
|
filename = "Phi-3-mini-4k-instruct-q4.gguf" # Replace with the correct filename
|
|
1517
|
-
model_path = download_model(repo_id, filename)
|
|
1517
|
+
model_path = download_model(repo_id, filename, token="")
|
|
1518
1518
|
|
|
1519
1519
|
# 2. Load the model
|
|
1520
1520
|
model = Model(model_path, n_gpu_layers=4)
|
|
@@ -1674,82 +1674,8 @@ autollama(
|
|
|
1674
1674
|
* The `model_path` in `autollama` is the Hugging Face model ID, and `gguf_file` is the GGUF file ID.
|
|
1675
1675
|
|
|
1676
1676
|
|
|
1677
|
-
### `LLM` with internet
|
|
1678
|
-
```python
|
|
1679
|
-
from __future__ import annotations
|
|
1680
|
-
from typing import List, Optional
|
|
1681
|
-
|
|
1682
|
-
from webscout.LLM import LLM
|
|
1683
|
-
from webscout import WEBS
|
|
1684
|
-
import warnings
|
|
1685
|
-
|
|
1686
|
-
system_message: str = (
|
|
1687
|
-
"As an AI assistant, I have been designed with advanced capabilities, including real-time access to online resources. This enables me to enrich our conversations and provide you with informed and accurate responses, drawing from a vast array of information. With each interaction, my goal is to create a seamless and meaningful connection, offering insights and sharing relevant content."
|
|
1688
|
-
"My directives emphasize the importance of respect, impartiality, and intellectual integrity. I am here to provide unbiased responses, ensuring an ethical and respectful exchange. I will respect your privacy and refrain from sharing any personal information that may be obtained during our conversations or through web searches, only utilizing web search functionality when necessary to provide the most accurate and up-to-date information."
|
|
1689
|
-
"Together, let's explore a diverse range of topics, creating an enjoyable and informative experience, all while maintaining the highest standards of privacy and respect"
|
|
1690
|
-
)
|
|
1691
|
-
|
|
1692
|
-
# Ignore the specific UserWarning
|
|
1693
|
-
warnings.filterwarnings("ignore", category=UserWarning, module="curl_cffio", lineno=205)
|
|
1694
|
-
LLM = LLM(model="mistralai/Mixtral-8x22B-Instruct-v0.1", system_message=system_message)
|
|
1695
|
-
|
|
1696
|
-
|
|
1697
|
-
def chat(
|
|
1698
|
-
user_input: str, webs: WEBS, max_results: int = 10
|
|
1699
|
-
) -> Optional[str]:
|
|
1700
|
-
"""
|
|
1701
|
-
Chat function to perform a web search based on the user input and generate a response using the LLM model.
|
|
1702
|
-
|
|
1703
|
-
Parameters
|
|
1704
|
-
----------
|
|
1705
|
-
user_input : str
|
|
1706
|
-
The user input to be used for the web search
|
|
1707
|
-
webs : WEBS
|
|
1708
|
-
The web search instance to be used to perform the search
|
|
1709
|
-
max_results : int, optional
|
|
1710
|
-
The maximum number of search results to include in the response, by default 10
|
|
1711
|
-
|
|
1712
|
-
Returns
|
|
1713
|
-
-------
|
|
1714
|
-
Optional[str]
|
|
1715
|
-
The response generated by the LLM model, or None if there is no response
|
|
1716
|
-
"""
|
|
1717
|
-
# Perform a web search based on the user input
|
|
1718
|
-
search_results: List[str] = []
|
|
1719
|
-
for r in webs.text(
|
|
1720
|
-
user_input, region="wt-wt", safesearch="off", timelimit="y", max_results=max_results
|
|
1721
|
-
):
|
|
1722
|
-
search_results.append(str(r)) # Convert each result to a string
|
|
1723
|
-
|
|
1724
|
-
# Define the messages to be sent, including the user input, search results, and system message
|
|
1725
|
-
messages = [
|
|
1726
|
-
{"role": "user", "content": user_input + "\n" + "websearch results are:" + "\n".join(search_results)},
|
|
1727
|
-
]
|
|
1728
|
-
|
|
1729
|
-
# Use the chat method to get the response
|
|
1730
|
-
response = LLM.chat(messages)
|
|
1731
|
-
|
|
1732
|
-
return response
|
|
1733
|
-
|
|
1734
|
-
|
|
1735
|
-
if __name__ == "__main__":
|
|
1736
|
-
while True:
|
|
1737
|
-
# Get the user input
|
|
1738
|
-
user_input = input("User: ")
|
|
1739
|
-
|
|
1740
|
-
# Perform a web search based on the user input
|
|
1741
|
-
with WEBS() as webs:
|
|
1742
|
-
response = chat(user_input, webs)
|
|
1743
|
-
|
|
1744
|
-
# Print the response
|
|
1745
|
-
if response:
|
|
1746
|
-
print("AI:", response)
|
|
1747
|
-
else:
|
|
1748
|
-
print("No response")
|
|
1749
|
-
```
|
|
1750
1677
|
|
|
1751
1678
|
## `Webai` - terminal gpt and a open interpeter
|
|
1752
|
-
Code is in rawdog.py file
|
|
1753
1679
|
```
|
|
1754
1680
|
```shell
|
|
1755
1681
|
python -m webscout.webai webai --provider "phind" --rawdog
|