webscout 4.6__py3-none-any.whl → 4.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of webscout might be problematic. Click here for more details.

Files changed (44) hide show
  1. webscout/Agents/functioncall.py +97 -37
  2. webscout/Bard.py +365 -0
  3. webscout/Local/_version.py +1 -1
  4. webscout/Provider/Andi.py +7 -1
  5. webscout/Provider/BasedGPT.py +11 -5
  6. webscout/Provider/Berlin4h.py +11 -5
  7. webscout/Provider/Blackboxai.py +10 -4
  8. webscout/Provider/Cohere.py +11 -5
  9. webscout/Provider/DARKAI.py +25 -7
  10. webscout/Provider/Deepinfra.py +2 -1
  11. webscout/Provider/Deepseek.py +25 -9
  12. webscout/Provider/DiscordRocks.py +389 -0
  13. webscout/Provider/{ChatGPTUK.py → Farfalle.py} +80 -67
  14. webscout/Provider/Gemini.py +1 -1
  15. webscout/Provider/Groq.py +244 -110
  16. webscout/Provider/Llama.py +13 -5
  17. webscout/Provider/Llama3.py +15 -2
  18. webscout/Provider/OLLAMA.py +8 -7
  19. webscout/Provider/Perplexity.py +422 -52
  20. webscout/Provider/Phind.py +6 -5
  21. webscout/Provider/PizzaGPT.py +7 -1
  22. webscout/Provider/__init__.py +15 -31
  23. webscout/Provider/ai4chat.py +193 -0
  24. webscout/Provider/koala.py +11 -5
  25. webscout/Provider/{VTLchat.py → liaobots.py} +120 -104
  26. webscout/Provider/meta.py +779 -0
  27. webscout/exceptions.py +6 -0
  28. webscout/version.py +1 -1
  29. webscout/webai.py +2 -64
  30. webscout/webscout_search.py +1 -1
  31. {webscout-4.6.dist-info → webscout-4.8.dist-info}/METADATA +254 -297
  32. {webscout-4.6.dist-info → webscout-4.8.dist-info}/RECORD +36 -40
  33. webscout/Provider/FreeGemini.py +0 -169
  34. webscout/Provider/Geminiflash.py +0 -152
  35. webscout/Provider/Geminipro.py +0 -152
  36. webscout/Provider/Leo.py +0 -469
  37. webscout/Provider/OpenGPT.py +0 -867
  38. webscout/Provider/Xjai.py +0 -230
  39. webscout/Provider/Yepchat.py +0 -478
  40. webscout/Provider/Youchat.py +0 -225
  41. {webscout-4.6.dist-info → webscout-4.8.dist-info}/LICENSE.md +0 -0
  42. {webscout-4.6.dist-info → webscout-4.8.dist-info}/WHEEL +0 -0
  43. {webscout-4.6.dist-info → webscout-4.8.dist-info}/entry_points.txt +0 -0
  44. {webscout-4.6.dist-info → webscout-4.8.dist-info}/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: webscout
3
- Version: 4.6
3
+ Version: 4.8
4
4
  Summary: Search for anything using Google, DuckDuckGo, brave, qwant, phind.com, Contains AI models, can transcribe yt videos, temporary email and phone number generation, has TTS support, webai (terminal gpt and open interpreter) and offline LLMs and more
5
5
  Author: OEvortex
6
6
  Author-email: helpingai5@gmail.com
@@ -62,6 +62,8 @@ Requires-Dist: ollama
62
62
  Requires-Dist: pyfiglet
63
63
  Requires-Dist: yaspin
64
64
  Requires-Dist: pillow
65
+ Requires-Dist: requests-html
66
+ Requires-Dist: bson
65
67
  Provides-Extra: dev
66
68
  Requires-Dist: ruff >=0.1.6 ; extra == 'dev'
67
69
  Requires-Dist: pytest >=7.4.2 ; extra == 'dev'
@@ -102,62 +104,6 @@ Requires-Dist: huggingface-hub[cli] ; extra == 'local'
102
104
  Search for anything using Google, DuckDuckGo, phind.com, Contains AI models, can transcribe yt videos, temporary email and phone number generation, has TTS support, webai (terminal gpt and open interpreter) and offline LLMs and more
103
105
 
104
106
 
105
- ## Table of Contents
106
- - [WEBSCOUT](#webscout)
107
- - [Table of Contents](#table-of-contents)
108
- - [Install](#install)
109
- - [CLI version](#cli-version)
110
- - [Regions](#regions)
111
- - [Tempmail and Temp number](#tempmail-and-temp-number)
112
- - [Temp number](#temp-number)
113
- - [Tempmail](#tempmail)
114
- - [Transcriber](#transcriber)
115
- - [DWEBS: Advanced Web Searches](#dwebs-advanced-web-searches)
116
- - [Activating DWEBS](#activating-dwebs)
117
- - [Point to remember before using `DWEBS`](#point-to-remember-before-using-dwebs)
118
- - [Usage Example](#usage-example)
119
- - [Text-to-Speech:](#text-to-speech)
120
- - [Available TTS Voices:](#available-tts-voices)
121
- - [Exceptions](#exceptions)
122
- - [usage of WEBS](#usage-of-webs)
123
- - [1. `text()` - text search by DuckDuckGo.com](#1-text---text-search-by-duckduckgocom)
124
- - [2. `answers()` - instant answers by DuckDuckGo.com](#2-answers---instant-answers-by-duckduckgocom)
125
- - [3. `images()` - image search by DuckDuckGo.com](#3-images---image-search-by-duckduckgocom)
126
- - [4. `videos()` - video search by DuckDuckGo.com](#4-videos---video-search-by-duckduckgocom)
127
- - [5. `news()` - news search by DuckDuckGo.com](#5-news---news-search-by-duckduckgocom)
128
- - [6. `maps()` - map search by DuckDuckGo.com](#6-maps---map-search-by-duckduckgocom)
129
- - [7. `translate()` - translation by DuckDuckGo.com](#7-translate---translation-by-duckduckgocom)
130
- - [8. `suggestions()` - suggestions by DuckDuckGo.com](#8-suggestions---suggestions-by-duckduckgocom)
131
- - [usage of WEBSX -- Another Websearch thing](#usage-of-websx----another-websearch-thing)
132
- - [ALL acts](#all-acts)
133
- - [Webscout Supported Acts:](#webscout-supported-acts)
134
- - [usage of webscout AI](#usage-of-webscout-ai)
135
- - [0. `Duckchat` - chat with LLM](#0-duckchat---chat-with-llm)
136
- - [1. `PhindSearch` - Search using Phind.com](#1-phindsearch---search-using-phindcom)
137
- - [2. `YepChat` - Chat with mistral 8x7b powered by yepchat](#2-yepchat---chat-with-mistral-8x7b-powered-by-yepchat)
138
- - [3. `You.com` - search/chat with you.com](#3-youcom---searchchat-with-youcom)
139
- - [4. `Gemini` - search with google gemini](#4-gemini---search-with-google-gemini)
140
- - [5. `Berlin4h` - chat with Berlin4h](#5-berlin4h---chat-with-berlin4h)
141
- - [6. `BlackBox` - Search/chat With BlackBox](#6-blackbox---searchchat-with-blackbox)
142
- - [7. `PERPLEXITY` - Search With PERPLEXITY](#7-perplexity---search-with-perplexity)
143
- - [8. `OpenGPT` - chat With OPENGPT](#8-opengpt---chat-with-opengpt)
144
- - [9. `KOBOLDAI` -](#9-koboldai--)
145
- - [10. `Reka` - chat with reka](#10-reka---chat-with-reka)
146
- - [11. `Cohere` - chat with cohere](#11-cohere---chat-with-cohere)
147
- - [12. `Xjai` - chat with free gpt 3.5](#12-xjai---chat-with-free-gpt-35)
148
- - [13. `ThinkAny` - AI search engine](#13-thinkany---ai-search-engine)
149
- - [14. `chatgptuk` - Chat with gemini-pro](#14-chatgptuk---chat-with-gemini-pro)
150
- - [15. `poe`- chat with poe](#15-poe--chat-with-poe)
151
- - [16. `BasedGPT` - chat with GPT](#16-basedgpt---chat-with-gpt)
152
- - [17. `DeepSeek` -chat with deepseek](#17-deepseek--chat-with-deepseek)
153
- - [18. Deepinfra](#18-deepinfra)
154
- - [19. Deepinfra - VLM](#19-deepinfra---vlm)
155
- - [`LLM`](#llm)
156
- - [`Local-LLM` webscout can now run GGUF models](#local-llm-webscout-can-now-run-gguf-models)
157
- - [`Local-rawdog`](#local-rawdog)
158
- - [`LLM` with internet](#llm-with-internet)
159
- - [`Webai` - terminal gpt and a open interpeter](#webai---terminal-gpt-and-a-open-interpeter)
160
-
161
107
  ## Install
162
108
  ```python
163
109
  pip install -U webscout
@@ -529,7 +475,7 @@ You can choose from a wide range of voices, including:
529
475
  - Standard and WaveNet voices for various languages (e.g., en-US, es-ES, ja-JP, etc.)
530
476
 
531
477
 
532
- The WEBS and AsyncWEBS classes are used to retrieve search results from DuckDuckGo.com and yep.com periodically.
478
+ The WEBS and AsyncWEBS classes are used to retrieve search results from DuckDuckGo.com
533
479
  To use the AsyncWEBS class, you can perform asynchronous operations using Python's asyncio library.
534
480
  To initialize an instance of the WEBS or AsyncWEBS classes, you can provide the following optional arguments:
535
481
 
@@ -1048,23 +994,9 @@ response = ph.ask(prompt)
1048
994
  message = ph.get_message(response)
1049
995
  print(message)
1050
996
  ```
1051
- ### 2. `YepChat` - Chat with mistral 8x7b powered by yepchat
1052
- ```python
1053
- from webscout import YEPCHAT
1054
-
1055
- # Instantiate the YEPCHAT class with default parameters
1056
- YEPCHAT = YEPCHAT()
1057
-
1058
- # Define a prompt to send to the AI
1059
- prompt = "What is the capital of France?"
1060
997
 
1061
- # Use the 'cha' method to get a response from the AI
1062
- r = YEPCHAT.chat(prompt)
1063
- print(r)
1064
998
 
1065
- ```
1066
-
1067
- ### 3. `You.com` - search/chat with you.com - Not working
999
+ ### . `You.com` - search/chat with you.com - Not working
1068
1000
  ```python
1069
1001
 
1070
1002
  from webscout import YouChat
@@ -1091,32 +1023,28 @@ message = ai.get_message(response)
1091
1023
  print(message)
1092
1024
  ```
1093
1025
 
1094
- ### 4. `Gemini` - search with google gemini
1026
+ ### . `Gemini` - search with google gemini
1095
1027
 
1096
1028
  ```python
1097
1029
  import webscout
1098
1030
  from webscout import GEMINI
1099
-
1100
- # Replace with the path to your bard.google.com.cookies.json file
1101
- COOKIE_FILE = "path/to/bard.google.com.cookies.json"
1031
+ from rich import print
1032
+ COOKIE_FILE = "cookies.json"
1102
1033
 
1103
1034
  # Optional: Provide proxy details if needed
1104
- PROXIES = {
1105
- "http": "http://proxy_server:port",
1106
- "https": "https://proxy_server:port",
1107
- }
1035
+ PROXIES = {}
1108
1036
 
1109
1037
  # Initialize GEMINI with cookie file and optional proxies
1110
1038
  gemini = GEMINI(cookie_file=COOKIE_FILE, proxy=PROXIES)
1111
1039
 
1112
1040
  # Ask a question and print the response
1113
- response = gemini.chat("What is the meaning of life?")
1041
+ response = gemini.chat("websearch about HelpingAI and who is its developer")
1114
1042
  print(response)
1115
1043
  ```
1116
- ### 5. `Berlin4h` - chat with Berlin4h
1044
+ ### . `Berlin4h` - chat with Berlin4h
1117
1045
  ```python
1118
1046
  from webscout import Berlin4h
1119
- # Create an instance of the PERPLEXITY class
1047
+
1120
1048
  ai = Berlin4h(
1121
1049
  is_conversation=True,
1122
1050
  max_tokens=800,
@@ -1134,7 +1062,7 @@ prompt = "Explain the concept of recursion in simple terms."
1134
1062
  response = ai.chat(prompt)
1135
1063
  print(response)
1136
1064
  ```
1137
- ### 6. `BlackBox` - Search/chat With BlackBox
1065
+ ### . `BlackBox` - Search/chat With BlackBox
1138
1066
  ```python
1139
1067
  from webscout import BLACKBOXAI
1140
1068
  from rich import print
@@ -1165,69 +1093,52 @@ while True:
1165
1093
  r = ai.chat(prompt)
1166
1094
  print(r)
1167
1095
  ```
1168
- ### 7. `PERPLEXITY` - Search With PERPLEXITY
1096
+ ### . `PERPLEXITY` - Search With PERPLEXITY
1169
1097
  ```python
1170
- from webscout import PERPLEXITY
1171
- # Create an instance of the PERPLEXITY class
1172
- perplexity = PERPLEXITY()
1098
+ from webscout import Perplexity
1099
+ from rich import print
1173
1100
 
1174
- # Example usage:
1175
- prompt = "Explain the concept of recursion in simple terms."
1176
- response = perplexity.chat(prompt)
1177
- print(response)
1178
- ```
1179
- ### 8. `OpenGPT` - chat With OPENGPT
1180
- ```python
1181
- from webscout import OPENGPT
1101
+ perplexity = Perplexity()
1102
+ # Stream the response
1103
+ response = perplexity.chat(input(">>> "))
1104
+ for chunk in response:
1105
+ print(chunk, end="", flush=True)
1182
1106
 
1183
- opengpt = OPENGPT(is_conversation=True, max_tokens=8000, timeout=30, assistant_id="bca37014-6f97-4f2b-8928-81ea8d478d88")
1184
- while True:
1185
- # Prompt the user for input
1186
- prompt = input("Enter your prompt: ")
1187
- # Send the prompt to the OPENGPT model and print the response
1188
- response_str = opengpt.chat(prompt)
1189
- print(response_str)
1107
+ perplexity.close()
1190
1108
  ```
1109
+ ### `meta ai` - chat With meta ai
1191
1110
  ```python
1192
- from webscout import OPENGPTv2
1193
-
1194
- # Initialize the bot with all specified settings
1195
- bot = OPENGPTv2(
1196
- generate_new_agents=True, # Set to True to generate new IDs, False to load from file
1197
- assistant_name="My Custom Assistant",
1198
- retrieval_description="Helpful information from my files.",
1199
- agent_system_message="",
1200
- enable_action_server=False, # Assuming you want to disable Action Server by Robocorp
1201
- enable_ddg_search=False, # Enable DuckDuckGo search tool
1202
- enable_arxiv=False, # Assuming you want to disable Arxiv
1203
- enable_press_releases=False, # Assuming you want to disable Press Releases (Kay.ai)
1204
- enable_pubmed=False, # Assuming you want to disable PubMed
1205
- enable_sec_filings=False, # Assuming you want to disable SEC Filings (Kay.ai)
1206
- enable_retrieval=False, # Assuming you want to disable Retrieval
1207
- enable_search_tavily=False, # Assuming you want to disable Search (Tavily)
1208
- enable_search_short_answer_tavily=False, # Assuming you want to disable Search (short answer, Tavily)
1209
- enable_you_com_search=True, # Assuming you want to disable You.com Search
1210
- enable_wikipedia=False, # Enable Wikipedia tool
1211
- is_public=True,
1212
- is_conversation=True,
1213
- max_tokens=800,
1214
- timeout=40,
1215
- filepath="opengpt_conversation_history.txt",
1216
- update_file=True,
1217
- history_offset=10250,
1218
- act=None,
1219
- )
1111
+ from webscout import Meta
1112
+ from rich import print
1113
+ # **For unauthenticated usage**
1114
+ meta_ai = Meta()
1220
1115
 
1221
- # Example interaction loop
1222
- while True:
1223
- prompt = input("You: ")
1224
- if prompt.strip().lower() == 'exit':
1225
- break
1226
- response = bot.chat(prompt)
1227
- print(response)
1116
+ # Simple text prompt
1117
+ response = meta_ai.chat("What is the capital of France?")
1118
+ print(response)
1119
+
1120
+ # Streaming response
1121
+ for chunk in meta_ai.chat("Tell me a story about a cat."):
1122
+ print(chunk, end="", flush=True)
1123
+
1124
+ # **For authenticated usage (including image generation)**
1125
+ fb_email = "abcd@abc.com"
1126
+ fb_password = "qwertfdsa"
1127
+ meta_ai = Meta(fb_email=fb_email, fb_password=fb_password)
1128
+
1129
+ # Text prompt with web search
1130
+ response = meta_ai.ask("what is currently happning in bangladesh in aug 2024")
1131
+ print(response["message"]) # Access the text message
1132
+ print("Sources:", response["sources"]) # Access sources (if any)
1133
+
1134
+ # Image generation
1135
+ response = meta_ai.ask("Create an image of a cat wearing a hat.")
1136
+ print(response["message"]) # Print the text message from the response
1137
+ for media in response["media"]:
1138
+ print(media["url"]) # Access image URLs
1228
1139
 
1229
1140
  ```
1230
- ### 9. `KOBOLDAI` -
1141
+ ### `KOBOLDAI` -
1231
1142
  ```python
1232
1143
  from webscout import KOBOLDAI
1233
1144
 
@@ -1246,7 +1157,7 @@ print(message)
1246
1157
 
1247
1158
  ```
1248
1159
 
1249
- ### 10. `Reka` - chat with reka
1160
+ ### `Reka` - chat with reka
1250
1161
  ```python
1251
1162
  from webscout import REKA
1252
1163
 
@@ -1257,7 +1168,7 @@ response_str = a.chat(prompt)
1257
1168
  print(response_str)
1258
1169
  ```
1259
1170
 
1260
- ### 11. `Cohere` - chat with cohere
1171
+ ### `Cohere` - chat with cohere
1261
1172
  ```python
1262
1173
  from webscout import Cohere
1263
1174
 
@@ -1268,30 +1179,8 @@ response_str = a.chat(prompt)
1268
1179
  print(response_str)
1269
1180
  ```
1270
1181
 
1271
- ### 12. `Xjai` - chat with free gpt 3.5
1272
- Gratitude to [Devs do Code](http://www.youtube.com/@DevsDoCode) for their assistance.
1273
- ```python
1274
- from webscout import Xjai
1275
- from rich import print
1276
1182
 
1277
- ai = Xjai(
1278
- is_conversation=True,
1279
- max_tokens=800,
1280
- timeout=30,
1281
- intro=None,
1282
- filepath=None,
1283
- update_file=True,
1284
- proxies={},
1285
- history_offset=10250,
1286
- act=None,
1287
- )
1288
-
1289
- prompt = "Tell me about india"
1290
-
1291
- response = ai.chat(prompt)
1292
- print(response)
1293
- ```
1294
- ### 13. `ThinkAny` - AI search engine
1183
+ ### `ThinkAny` - AI search engine
1295
1184
  ```python
1296
1185
  from webscout import ThinkAnyAI
1297
1186
 
@@ -1316,32 +1205,11 @@ response = ai.ask(prompt)
1316
1205
  message = ai.get_message(response)
1317
1206
  print(message)
1318
1207
  ```
1319
- ### 14. `chatgptuk` - Chat with gemini-pro
1320
- ```python
1321
- from webscout import ChatGPTUK
1322
- # Create an instance of the PERPLEXITY class
1323
- ai = ChatGPTUK(
1324
- is_conversation=True,
1325
- max_tokens=800,
1326
- timeout=30,
1327
- intro=None,
1328
- filepath=None,
1329
- update_file=True,
1330
- proxies={},
1331
- history_offset=10250,
1332
- act=None,
1333
- )
1334
1208
 
1335
- # Example usage:
1336
- prompt = "Explain the concept of recursion in simple terms."
1337
- response = ai.chat(prompt)
1338
- print(response)
1339
-
1340
- ```
1341
- ### 15. `poe`- chat with poe
1209
+ ### `poe`- chat with poe
1342
1210
  Usage code similar to other proviers
1343
1211
 
1344
- ### 16. `BasedGPT` - chat with GPT
1212
+ ### `BasedGPT` - chat with GPT
1345
1213
  ```
1346
1214
  from webscout import BasedGPT
1347
1215
 
@@ -1362,14 +1230,14 @@ response = basedgpt.chat(prompt)
1362
1230
  # Print the AI's response
1363
1231
  print(response)
1364
1232
  ```
1365
- ### 17. `DeepSeek` -chat with deepseek
1233
+ ### `DeepSeek` -chat with deepseek
1366
1234
  ```python
1367
1235
  from webscout import DeepSeek
1368
1236
  from rich import print
1369
1237
 
1370
1238
  ai = DeepSeek(
1371
1239
  is_conversation=True,
1372
- api_key='23bfff080d38429c9fbbf3c76f88454c',
1240
+ api_key='cookie,
1373
1241
  max_tokens=800,
1374
1242
  timeout=30,
1375
1243
  intro=None,
@@ -1388,7 +1256,7 @@ prompt = "Tell me about india"
1388
1256
  r = ai.chat(prompt)
1389
1257
  print(r)
1390
1258
  ```
1391
- ### 18. `Deepinfra`
1259
+ ### `Deepinfra`
1392
1260
  ```python
1393
1261
  from webscout import DeepInfra
1394
1262
 
@@ -1414,7 +1282,7 @@ message = ai.get_message(response)
1414
1282
  print(message)
1415
1283
  ```
1416
1284
 
1417
- ### 19. `Deepinfra` - VLM
1285
+ ### `Deepinfra` - VLM
1418
1286
  ```python
1419
1287
  from webscout.Provider import VLM
1420
1288
 
@@ -1434,44 +1302,95 @@ response = vlm_instance.chat(prompt)
1434
1302
  print(response)
1435
1303
 
1436
1304
  ```
1437
- ### 20. `VTLchat` - Free gpt3.5
1438
- ```python
1439
- from webscout import VTLchat
1440
1305
 
1441
- provider = VTLchat()
1442
- response = provider.chat("Hello, how are you?")
1443
- print(response)
1444
- ```
1445
1306
 
1446
- ### 21. GeminiFlash and geminipro
1447
- **Usage similar to other providers**
1448
-
1449
- ### 22. `Ollama` - chat will AI models locally
1450
- ```python
1451
- from webscout import OLLAMA
1452
- ollama_provider = OLLAMA(model="qwen2:0.5b")
1453
- response = ollama_provider.chat("What is the meaning of life?")
1454
- print(response)
1455
- ```
1456
-
1457
- ### 22. GROQ
1307
+ ### GROQ
1458
1308
  ```python
1459
1309
  from webscout import GROQ
1460
1310
  ai = GROQ(api_key="")
1461
1311
  response = ai.chat("What is the meaning of life?")
1462
1312
  print(response)
1313
+ #----------------------TOOL CALL------------------
1314
+ from webscout import GROQ # Adjust import based on your project structure
1315
+ from webscout import WEBS
1316
+ import json
1463
1317
 
1464
- ```
1318
+ # Initialize the GROQ client
1319
+ client = GROQ(api_key="")
1320
+ MODEL = 'llama3-groq-70b-8192-tool-use-preview'
1321
+
1322
+ # Function to evaluate a mathematical expression
1323
+ def calculate(expression):
1324
+ """Evaluate a mathematical expression"""
1325
+ try:
1326
+ result = eval(expression)
1327
+ return json.dumps({"result": result})
1328
+ except Exception as e:
1329
+ return json.dumps({"error": str(e)})
1330
+
1331
+ # Function to perform a text search using DuckDuckGo.com
1332
+ def search(query):
1333
+ """Perform a text search using DuckDuckGo.com"""
1334
+ try:
1335
+ results = WEBS().text(query, max_results=5)
1336
+ return json.dumps({"results": results})
1337
+ except Exception as e:
1338
+ return json.dumps({"error": str(e)})
1339
+
1340
+ # Add the functions to the provider
1341
+ client.add_function("calculate", calculate)
1342
+ client.add_function("search", search)
1343
+
1344
+ # Define the tools
1345
+ tools = [
1346
+ {
1347
+ "type": "function",
1348
+ "function": {
1349
+ "name": "calculate",
1350
+ "description": "Evaluate a mathematical expression",
1351
+ "parameters": {
1352
+ "type": "object",
1353
+ "properties": {
1354
+ "expression": {
1355
+ "type": "string",
1356
+ "description": "The mathematical expression to evaluate",
1357
+ }
1358
+ },
1359
+ "required": ["expression"],
1360
+ },
1361
+ }
1362
+ },
1363
+ {
1364
+ "type": "function",
1365
+ "function": {
1366
+ "name": "search",
1367
+ "description": "Perform a text search using DuckDuckGo.com and Yep.com",
1368
+ "parameters": {
1369
+ "type": "object",
1370
+ "properties": {
1371
+ "query": {
1372
+ "type": "string",
1373
+ "description": "The search query to execute",
1374
+ }
1375
+ },
1376
+ "required": ["query"],
1377
+ },
1378
+ }
1379
+ }
1380
+ ]
1381
+
1382
+
1383
+ user_prompt_calculate = "What is 25 * 4 + 10?"
1384
+ response_calculate = client.chat(user_prompt_calculate, tools=tools)
1385
+ print(response_calculate)
1386
+
1387
+ user_prompt_search = "Find information on HelpingAI and who is its developer"
1388
+ response_search = client.chat(user_prompt_search, tools=tools)
1389
+ print(response_search)
1465
1390
 
1466
- ### 23. Freegemini - chat with gemini for free
1467
- ```python
1468
- from webscout import FreeGemini
1469
- ai = FreeGemini()
1470
- response = ai.chat("What is the meaning of life?")
1471
- print(response)
1472
1391
  ```
1473
1392
 
1474
- ### 24. LLama 70b - chat with meta's llama 3 70b
1393
+ ### LLama 70b - chat with meta's llama 3 70b
1475
1394
  ```python
1476
1395
 
1477
1396
  from webscout import LLAMA
@@ -1481,15 +1400,127 @@ llama = LLAMA()
1481
1400
  r = llama.chat("What is the meaning of life?")
1482
1401
  print(r)
1483
1402
  ```
1484
- ### 25. AndiSearch
1403
+ ### AndiSearch
1485
1404
  ```python
1486
1405
  from webscout import AndiSearch
1487
1406
  a = AndiSearch()
1488
1407
  print(a.chat("HelpingAI-9B"))
1489
1408
  ```
1409
+ ### Function calling-bete
1410
+ ```python
1411
+ import json
1412
+ import logging
1413
+ from webscout import LLAMA3, WEBS
1414
+ from webscout.Agents.functioncall import FunctionCallingAgent
1415
+
1416
+ # Define tools that the agent can use
1417
+ tools = [
1418
+ {
1419
+ "type": "function",
1420
+ "function": {
1421
+ "name": "UserDetail",
1422
+ "parameters": {
1423
+ "type": "object",
1424
+ "title": "UserDetail",
1425
+ "properties": {
1426
+ "name": {
1427
+ "title": "Name",
1428
+ "type": "string"
1429
+ },
1430
+ "age": {
1431
+ "title": "Age",
1432
+ "type": "integer"
1433
+ }
1434
+ },
1435
+ "required": ["name", "age"]
1436
+ }
1437
+ }
1438
+ },
1439
+ {
1440
+ "type": "function",
1441
+ "function": {
1442
+ "name": "web_search",
1443
+ "description": "Search query on google",
1444
+ "parameters": {
1445
+ "type": "object",
1446
+ "properties": {
1447
+ "query": {
1448
+ "type": "string",
1449
+ "description": "web search query"
1450
+ }
1451
+ },
1452
+ "required": ["query"]
1453
+ }
1454
+ }
1455
+ },
1456
+ { # New general AI tool
1457
+ "type": "function",
1458
+ "function": {
1459
+ "name": "general_ai",
1460
+ "description": "Use general AI knowledge to answer the question",
1461
+ "parameters": {
1462
+ "type": "object",
1463
+ "properties": {
1464
+ "question": {
1465
+ "type": "string",
1466
+ "description": "The question to answer"
1467
+ }
1468
+ },
1469
+ "required": ["question"]
1470
+ }
1471
+ }
1472
+ }
1473
+ ]
1474
+
1475
+ # Initialize the FunctionCallingAgent with the specified tools
1476
+ agent = FunctionCallingAgent(tools=tools)
1477
+ llama = LLAMA3()
1478
+ from rich import print
1479
+ # Input message from the user
1480
+ user = input(">>> ")
1481
+ message = user
1482
+ function_call_data = agent.function_call_handler(message)
1483
+ print(f"Function Call Data: {function_call_data}")
1484
+
1485
+ # Check for errors in the function call data
1486
+ if "error" not in function_call_data:
1487
+ function_name = function_call_data.get("tool_name") # Use 'tool_name' instead of 'name'
1488
+ if function_name == "web_search":
1489
+ arguments = function_call_data.get("tool_input", {}) # Get tool input arguments
1490
+ query = arguments.get("query")
1491
+ if query:
1492
+ with WEBS() as webs:
1493
+ search_results = webs.text(query, max_results=5)
1494
+ prompt = (
1495
+ f"Based on the following search results:\n\n{search_results}\n\n"
1496
+ f"Question: {user}\n\n"
1497
+ "Please provide a comprehensive answer to the question based on the search results above. "
1498
+ "Include relevant webpage URLs in your answer when appropriate. "
1499
+ "If the search results don't contain relevant information, please state that and provide the best answer you can based on your general knowledge."
1500
+ )
1501
+ response = llama.chat(prompt)
1502
+ for c in response:
1503
+ print(c, end="", flush=True)
1490
1504
 
1491
- ### 25. LLAMA3, pizzagpt, RUBIKSAI, Koala, Darkai
1492
- code similar to other providers
1505
+ else:
1506
+ print("Please provide a search query.")
1507
+ elif function_name == "general_ai": # Handle general AI tool
1508
+ arguments = function_call_data.get("tool_input", {})
1509
+ question = arguments.get("question")
1510
+ if question:
1511
+ response = llama.chat(question) # Use LLM directly
1512
+ for c in response:
1513
+ print(c, end="", flush=True)
1514
+ else:
1515
+ print("Please provide a question.")
1516
+ else:
1517
+ result = agent.execute_function(function_call_data)
1518
+ print(f"Function Execution Result: {result}")
1519
+ else:
1520
+ print(f"Error: {function_call_data['error']}")
1521
+ ```
1522
+ ### LLAMA3, pizzagpt, RUBIKSAI, Koala, Darkai, AI4Chat, Farfalle
1523
+ code similar to other provider
1493
1524
  ### `LLM`
1494
1525
  ```python
1495
1526
  from webscout.LLM import LLM
@@ -1532,7 +1563,7 @@ from webscout.Local import formats
1532
1563
  # 1. Download the model
1533
1564
  repo_id = "microsoft/Phi-3-mini-4k-instruct-gguf" # Replace with the desired Hugging Face repo
1534
1565
  filename = "Phi-3-mini-4k-instruct-q4.gguf" # Replace with the correct filename
1535
- model_path = download_model(repo_id, filename)
1566
+ model_path = download_model(repo_id, filename, token="")
1536
1567
 
1537
1568
  # 2. Load the model
1538
1569
  model = Model(model_path, n_gpu_layers=4)
@@ -1692,82 +1723,8 @@ autollama(
1692
1723
  * The `model_path` in `autollama` is the Hugging Face model ID, and `gguf_file` is the GGUF file ID.
1693
1724
 
1694
1725
 
1695
- ### `LLM` with internet
1696
- ```python
1697
- from __future__ import annotations
1698
- from typing import List, Optional
1699
-
1700
- from webscout.LLM import LLM
1701
- from webscout import WEBS
1702
- import warnings
1703
-
1704
- system_message: str = (
1705
- "As an AI assistant, I have been designed with advanced capabilities, including real-time access to online resources. This enables me to enrich our conversations and provide you with informed and accurate responses, drawing from a vast array of information. With each interaction, my goal is to create a seamless and meaningful connection, offering insights and sharing relevant content."
1706
- "My directives emphasize the importance of respect, impartiality, and intellectual integrity. I am here to provide unbiased responses, ensuring an ethical and respectful exchange. I will respect your privacy and refrain from sharing any personal information that may be obtained during our conversations or through web searches, only utilizing web search functionality when necessary to provide the most accurate and up-to-date information."
1707
- "Together, let's explore a diverse range of topics, creating an enjoyable and informative experience, all while maintaining the highest standards of privacy and respect"
1708
- )
1709
-
1710
- # Ignore the specific UserWarning
1711
- warnings.filterwarnings("ignore", category=UserWarning, module="curl_cffio", lineno=205)
1712
- LLM = LLM(model="mistralai/Mixtral-8x22B-Instruct-v0.1", system_message=system_message)
1713
-
1714
-
1715
- def chat(
1716
- user_input: str, webs: WEBS, max_results: int = 10
1717
- ) -> Optional[str]:
1718
- """
1719
- Chat function to perform a web search based on the user input and generate a response using the LLM model.
1720
-
1721
- Parameters
1722
- ----------
1723
- user_input : str
1724
- The user input to be used for the web search
1725
- webs : WEBS
1726
- The web search instance to be used to perform the search
1727
- max_results : int, optional
1728
- The maximum number of search results to include in the response, by default 10
1729
-
1730
- Returns
1731
- -------
1732
- Optional[str]
1733
- The response generated by the LLM model, or None if there is no response
1734
- """
1735
- # Perform a web search based on the user input
1736
- search_results: List[str] = []
1737
- for r in webs.text(
1738
- user_input, region="wt-wt", safesearch="off", timelimit="y", max_results=max_results
1739
- ):
1740
- search_results.append(str(r)) # Convert each result to a string
1741
-
1742
- # Define the messages to be sent, including the user input, search results, and system message
1743
- messages = [
1744
- {"role": "user", "content": user_input + "\n" + "websearch results are:" + "\n".join(search_results)},
1745
- ]
1746
-
1747
- # Use the chat method to get the response
1748
- response = LLM.chat(messages)
1749
-
1750
- return response
1751
-
1752
-
1753
- if __name__ == "__main__":
1754
- while True:
1755
- # Get the user input
1756
- user_input = input("User: ")
1757
-
1758
- # Perform a web search based on the user input
1759
- with WEBS() as webs:
1760
- response = chat(user_input, webs)
1761
-
1762
- # Print the response
1763
- if response:
1764
- print("AI:", response)
1765
- else:
1766
- print("No response")
1767
- ```
1768
1726
 
1769
1727
  ## `Webai` - terminal gpt and a open interpeter
1770
- Code is in rawdog.py file
1771
1728
  ```
1772
1729
  ```shell
1773
1730
  python -m webscout.webai webai --provider "phind" --rawdog