webscout 6.0__tar.gz → 6.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of webscout might be problematic. Click here for more details.

Files changed (136) hide show
  1. {webscout-6.0/webscout.egg-info → webscout-6.1}/PKG-INFO +123 -120
  2. {webscout-6.0 → webscout-6.1}/README.md +114 -119
  3. {webscout-6.0 → webscout-6.1}/setup.py +8 -0
  4. {webscout-6.0 → webscout-6.1}/webscout/Agents/Onlinesearcher.py +22 -10
  5. {webscout-6.0 → webscout-6.1}/webscout/Agents/functioncall.py +2 -2
  6. {webscout-6.0 → webscout-6.1}/webscout/Bard.py +21 -21
  7. webscout-6.1/webscout/Local/__init__.py +10 -0
  8. webscout-6.1/webscout/Local/formats.py +745 -0
  9. webscout-6.1/webscout/Local/model.py +1368 -0
  10. webscout-6.1/webscout/Local/samplers.py +125 -0
  11. webscout-6.1/webscout/Local/thread.py +539 -0
  12. webscout-6.1/webscout/Local/ui.py +401 -0
  13. webscout-6.1/webscout/Local/utils.py +363 -0
  14. {webscout-6.0 → webscout-6.1}/webscout/Provider/Amigo.py +1 -1
  15. webscout-6.1/webscout/Provider/NinjaChat.py +200 -0
  16. {webscout-6.0 → webscout-6.1}/webscout/Provider/TTI/Nexra.py +3 -3
  17. {webscout-6.0 → webscout-6.1}/webscout/Provider/TTI/__init__.py +2 -1
  18. {webscout-6.0 → webscout-6.1}/webscout/Provider/TTI/aiforce.py +2 -2
  19. webscout-6.1/webscout/Provider/TTI/imgninza.py +136 -0
  20. {webscout-6.0 → webscout-6.1}/webscout/Provider/Youchat.py +1 -1
  21. {webscout-6.0 → webscout-6.1}/webscout/Provider/__init__.py +8 -1
  22. webscout-6.1/webscout/Provider/aimathgpt.py +193 -0
  23. {webscout-6.0 → webscout-6.1}/webscout/Provider/felo_search.py +1 -1
  24. webscout-6.1/webscout/Provider/gaurish.py +168 -0
  25. webscout-6.1/webscout/Provider/geminiprorealtime.py +160 -0
  26. {webscout-6.0 → webscout-6.1}/webscout/Provider/julius.py +4 -0
  27. {webscout-6.0 → webscout-6.1}/webscout/exceptions.py +5 -1
  28. {webscout-6.0 → webscout-6.1}/webscout/utils.py +3 -0
  29. {webscout-6.0 → webscout-6.1}/webscout/version.py +1 -1
  30. {webscout-6.0 → webscout-6.1}/webscout/webscout_search.py +154 -123
  31. {webscout-6.0 → webscout-6.1/webscout.egg-info}/PKG-INFO +123 -120
  32. {webscout-6.0 → webscout-6.1}/webscout.egg-info/SOURCES.txt +6 -1
  33. {webscout-6.0 → webscout-6.1}/webscout.egg-info/requires.txt +8 -0
  34. webscout-6.0/webscout/Local/__init__.py +0 -11
  35. webscout-6.0/webscout/Local/formats.py +0 -535
  36. webscout-6.0/webscout/Local/model.py +0 -771
  37. webscout-6.0/webscout/Local/rawdog.py +0 -946
  38. webscout-6.0/webscout/Local/samplers.py +0 -161
  39. webscout-6.0/webscout/Local/thread.py +0 -698
  40. webscout-6.0/webscout/Local/utils.py +0 -186
  41. {webscout-6.0 → webscout-6.1}/LICENSE.md +0 -0
  42. {webscout-6.0 → webscout-6.1}/setup.cfg +0 -0
  43. {webscout-6.0 → webscout-6.1}/webscout/AIauto.py +0 -0
  44. {webscout-6.0 → webscout-6.1}/webscout/AIbase.py +0 -0
  45. {webscout-6.0 → webscout-6.1}/webscout/AIutel.py +0 -0
  46. {webscout-6.0 → webscout-6.1}/webscout/Agents/__init__.py +0 -0
  47. {webscout-6.0 → webscout-6.1}/webscout/Bing_search.py +0 -0
  48. {webscout-6.0 → webscout-6.1}/webscout/DWEBS.py +0 -0
  49. {webscout-6.0 → webscout-6.1}/webscout/Extra/__init__.py +0 -0
  50. {webscout-6.0 → webscout-6.1}/webscout/Extra/autollama.py +0 -0
  51. {webscout-6.0 → webscout-6.1}/webscout/Extra/gguf.py +0 -0
  52. {webscout-6.0 → webscout-6.1}/webscout/Extra/weather.py +0 -0
  53. {webscout-6.0 → webscout-6.1}/webscout/Extra/weather_ascii.py +0 -0
  54. {webscout-6.0 → webscout-6.1}/webscout/LLM.py +0 -0
  55. {webscout-6.0 → webscout-6.1}/webscout/Local/_version.py +0 -0
  56. {webscout-6.0 → webscout-6.1}/webscout/Provider/AI21.py +0 -0
  57. {webscout-6.0 → webscout-6.1}/webscout/Provider/Andi.py +0 -0
  58. {webscout-6.0 → webscout-6.1}/webscout/Provider/BasedGPT.py +0 -0
  59. {webscout-6.0 → webscout-6.1}/webscout/Provider/Bing.py +0 -0
  60. {webscout-6.0 → webscout-6.1}/webscout/Provider/Blackboxai.py +0 -0
  61. {webscout-6.0 → webscout-6.1}/webscout/Provider/ChatGPTES.py +0 -0
  62. {webscout-6.0 → webscout-6.1}/webscout/Provider/ChatHub.py +0 -0
  63. {webscout-6.0 → webscout-6.1}/webscout/Provider/Chatify.py +0 -0
  64. {webscout-6.0 → webscout-6.1}/webscout/Provider/Cloudflare.py +0 -0
  65. {webscout-6.0 → webscout-6.1}/webscout/Provider/Cohere.py +0 -0
  66. {webscout-6.0 → webscout-6.1}/webscout/Provider/DARKAI.py +0 -0
  67. {webscout-6.0 → webscout-6.1}/webscout/Provider/Deepinfra.py +0 -0
  68. {webscout-6.0 → webscout-6.1}/webscout/Provider/Deepseek.py +0 -0
  69. {webscout-6.0 → webscout-6.1}/webscout/Provider/DiscordRocks.py +0 -0
  70. {webscout-6.0 → webscout-6.1}/webscout/Provider/EDITEE.py +0 -0
  71. {webscout-6.0 → webscout-6.1}/webscout/Provider/Farfalle.py +0 -0
  72. {webscout-6.0 → webscout-6.1}/webscout/Provider/Free2GPT.py +0 -0
  73. {webscout-6.0 → webscout-6.1}/webscout/Provider/GPTWeb.py +0 -0
  74. {webscout-6.0 → webscout-6.1}/webscout/Provider/Gemini.py +0 -0
  75. {webscout-6.0 → webscout-6.1}/webscout/Provider/Groq.py +0 -0
  76. {webscout-6.0 → webscout-6.1}/webscout/Provider/Koboldai.py +0 -0
  77. {webscout-6.0 → webscout-6.1}/webscout/Provider/Llama.py +0 -0
  78. {webscout-6.0 → webscout-6.1}/webscout/Provider/Llama3.py +0 -0
  79. {webscout-6.0 → webscout-6.1}/webscout/Provider/OLLAMA.py +0 -0
  80. {webscout-6.0 → webscout-6.1}/webscout/Provider/Openai.py +0 -0
  81. {webscout-6.0 → webscout-6.1}/webscout/Provider/PI.py +0 -0
  82. {webscout-6.0 → webscout-6.1}/webscout/Provider/Perplexity.py +0 -0
  83. {webscout-6.0 → webscout-6.1}/webscout/Provider/Phind.py +0 -0
  84. {webscout-6.0 → webscout-6.1}/webscout/Provider/PizzaGPT.py +0 -0
  85. {webscout-6.0 → webscout-6.1}/webscout/Provider/RUBIKSAI.py +0 -0
  86. {webscout-6.0 → webscout-6.1}/webscout/Provider/Reka.py +0 -0
  87. {webscout-6.0 → webscout-6.1}/webscout/Provider/TTI/PollinationsAI.py +0 -0
  88. {webscout-6.0 → webscout-6.1}/webscout/Provider/TTI/WebSimAI.py +0 -0
  89. {webscout-6.0 → webscout-6.1}/webscout/Provider/TTI/amigo.py +0 -0
  90. {webscout-6.0 → webscout-6.1}/webscout/Provider/TTI/artbit.py +0 -0
  91. {webscout-6.0 → webscout-6.1}/webscout/Provider/TTI/blackboximage.py +0 -0
  92. {webscout-6.0 → webscout-6.1}/webscout/Provider/TTI/deepinfra.py +0 -0
  93. {webscout-6.0 → webscout-6.1}/webscout/Provider/TTI/huggingface.py +0 -0
  94. {webscout-6.0 → webscout-6.1}/webscout/Provider/TTS/__init__.py +0 -0
  95. {webscout-6.0 → webscout-6.1}/webscout/Provider/TTS/parler.py +0 -0
  96. {webscout-6.0 → webscout-6.1}/webscout/Provider/TTS/streamElements.py +0 -0
  97. {webscout-6.0 → webscout-6.1}/webscout/Provider/TTS/voicepod.py +0 -0
  98. {webscout-6.0 → webscout-6.1}/webscout/Provider/TeachAnything.py +0 -0
  99. {webscout-6.0 → webscout-6.1}/webscout/Provider/ai4chat.py +0 -0
  100. {webscout-6.0 → webscout-6.1}/webscout/Provider/aigames.py +0 -0
  101. {webscout-6.0 → webscout-6.1}/webscout/Provider/bagoodex.py +0 -0
  102. {webscout-6.0 → webscout-6.1}/webscout/Provider/bixin.py +0 -0
  103. {webscout-6.0 → webscout-6.1}/webscout/Provider/cerebras.py +0 -0
  104. {webscout-6.0 → webscout-6.1}/webscout/Provider/cleeai.py +0 -0
  105. {webscout-6.0 → webscout-6.1}/webscout/Provider/elmo.py +0 -0
  106. {webscout-6.0 → webscout-6.1}/webscout/Provider/geminiapi.py +0 -0
  107. {webscout-6.0 → webscout-6.1}/webscout/Provider/genspark.py +0 -0
  108. {webscout-6.0 → webscout-6.1}/webscout/Provider/koala.py +0 -0
  109. {webscout-6.0 → webscout-6.1}/webscout/Provider/learnfastai.py +0 -0
  110. {webscout-6.0 → webscout-6.1}/webscout/Provider/lepton.py +0 -0
  111. {webscout-6.0 → webscout-6.1}/webscout/Provider/llamatutor.py +0 -0
  112. {webscout-6.0 → webscout-6.1}/webscout/Provider/meta.py +0 -0
  113. {webscout-6.0 → webscout-6.1}/webscout/Provider/prefind.py +0 -0
  114. {webscout-6.0 → webscout-6.1}/webscout/Provider/promptrefine.py +0 -0
  115. {webscout-6.0 → webscout-6.1}/webscout/Provider/turboseek.py +0 -0
  116. {webscout-6.0 → webscout-6.1}/webscout/Provider/tutorai.py +0 -0
  117. {webscout-6.0 → webscout-6.1}/webscout/Provider/twitterclone.py +0 -0
  118. {webscout-6.0 → webscout-6.1}/webscout/Provider/upstage.py +0 -0
  119. {webscout-6.0 → webscout-6.1}/webscout/Provider/x0gpt.py +0 -0
  120. {webscout-6.0 → webscout-6.1}/webscout/Provider/xdash.py +0 -0
  121. {webscout-6.0 → webscout-6.1}/webscout/Provider/yep.py +0 -0
  122. {webscout-6.0 → webscout-6.1}/webscout/YTdownloader.py +0 -0
  123. {webscout-6.0 → webscout-6.1}/webscout/__init__.py +0 -0
  124. {webscout-6.0 → webscout-6.1}/webscout/__main__.py +0 -0
  125. {webscout-6.0 → webscout-6.1}/webscout/cli.py +0 -0
  126. {webscout-6.0 → webscout-6.1}/webscout/g4f.py +0 -0
  127. {webscout-6.0 → webscout-6.1}/webscout/models.py +0 -0
  128. {webscout-6.0 → webscout-6.1}/webscout/requestsHTMLfix.py +0 -0
  129. {webscout-6.0 → webscout-6.1}/webscout/tempid.py +0 -0
  130. {webscout-6.0 → webscout-6.1}/webscout/transcriber.py +0 -0
  131. {webscout-6.0 → webscout-6.1}/webscout/webai.py +0 -0
  132. {webscout-6.0 → webscout-6.1}/webscout/webscout_search_async.py +0 -0
  133. {webscout-6.0 → webscout-6.1}/webscout/websx_search.py +0 -0
  134. {webscout-6.0 → webscout-6.1}/webscout.egg-info/dependency_links.txt +0 -0
  135. {webscout-6.0 → webscout-6.1}/webscout.egg-info/entry_points.txt +0 -0
  136. {webscout-6.0 → webscout-6.1}/webscout.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: webscout
3
- Version: 6.0
3
+ Version: 6.1
4
4
  Summary: Search for anything using Google, DuckDuckGo, phind.com, Contains AI models, can transcribe yt videos, temporary email and phone number generation, has TTS support, webai (terminal gpt and open interpreter) and offline LLMs and more
5
5
  Author: OEvortex
6
6
  Author-email: helpingai5@gmail.com
@@ -53,6 +53,13 @@ Requires-Dist: emoji
53
53
  Requires-Dist: openai
54
54
  Requires-Dist: prompt-toolkit
55
55
  Requires-Dist: fake_useragent
56
+ Requires-Dist: primp
57
+ Requires-Dist: pyreqwest_impersonate
58
+ Requires-Dist: lxml_html_clean
59
+ Requires-Dist: gradio_client
60
+ Requires-Dist: psutil
61
+ Requires-Dist: colorlog
62
+ Requires-Dist: yaspin
56
63
  Provides-Extra: dev
57
64
  Requires-Dist: ruff>=0.1.6; extra == "dev"
58
65
  Requires-Dist: pytest>=7.4.2; extra == "dev"
@@ -61,6 +68,7 @@ Requires-Dist: llama-cpp-python; extra == "local"
61
68
  Requires-Dist: colorama; extra == "local"
62
69
  Requires-Dist: numpy; extra == "local"
63
70
  Requires-Dist: huggingface_hub[cli]; extra == "local"
71
+ Requires-Dist: unicorn; extra == "local"
64
72
 
65
73
  <div align="center">
66
74
  <!-- Replace `#` with your actual links -->
@@ -868,7 +876,7 @@ print(result)
868
876
  ___
869
877
  </details>
870
878
 
871
- ### 🖼️ Text to Images - DeepInfraImager, PollinationsAI, BlackboxAIImager, AiForceimagger, NexraImager, HFimager, ArtbitImager
879
+ ### 🖼️ Text to Images - DeepInfraImager, PollinationsAI, BlackboxAIImager, AiForceimager, NexraImager, HFimager, ArtbitImager, NinjaImager, WebSimAI, AmigoImager
872
880
 
873
881
  **Every TTI provider has the same usage code, you just need to change the import.**
874
882
 
@@ -897,7 +905,7 @@ voicepods.play_audio(audio_file)
897
905
 
898
906
  ```python
899
907
  from webscout import WEBS as w
900
- R = w().chat("Who are you", model='gpt-4o-mini') # GPT-3.5 Turbo, mixtral-8x7b, llama-3-70b, claude-3-haiku, gpt-4o-mini
908
+ R = w().chat("Who are you", model='gpt-4o-mini') # mixtral-8x7b, llama-3.1-70b, claude-3-haiku, gpt-4o-mini
901
909
  print(R)
902
910
  ```
903
911
 
@@ -1316,117 +1324,120 @@ print(a.chat("HelpingAI-9B"))
1316
1324
  ```python
1317
1325
  import json
1318
1326
  import logging
1319
- from webscout import LLAMA3, WEBS
1327
+ from webscout import Julius, WEBS
1320
1328
  from webscout.Agents.functioncall import FunctionCallingAgent
1329
+ from rich import print
1321
1330
 
1322
- # Define tools that the agent can use
1323
- tools = [
1324
- {
1325
- "type": "function",
1326
- "function": {
1327
- "name": "UserDetail",
1328
- "parameters": {
1329
- "type": "object",
1330
- "title": "UserDetail",
1331
- "properties": {
1332
- "name": {
1333
- "title": "Name",
1334
- "type": "string"
1331
+ class FunctionExecutor:
1332
+ def __init__(self, llama):
1333
+ self.llama = llama
1334
+
1335
+ def execute_web_search(self, arguments):
1336
+ query = arguments.get("query")
1337
+ if not query:
1338
+ return "Please provide a search query."
1339
+ with WEBS() as webs:
1340
+ search_results = webs.text(query, max_results=5)
1341
+ prompt = (
1342
+ f"Based on the following search results:\n\n{search_results}\n\n"
1343
+ f"Question: {query}\n\n"
1344
+ "Please provide a comprehensive answer to the question based on the search results above. "
1345
+ "Include relevant webpage URLs in your answer when appropriate. "
1346
+ "If the search results don't contain relevant information, please state that and provide the best answer you can based on your general knowledge."
1347
+ )
1348
+ return self.llama.chat(prompt)
1349
+
1350
+ def execute_general_ai(self, arguments):
1351
+ question = arguments.get("question")
1352
+ if not question:
1353
+ return "Please provide a question."
1354
+ return self.llama.chat(question)
1355
+
1356
+ def execute_UserDetail(self, arguments):
1357
+ name = arguments.get("name")
1358
+ age = arguments.get("age")
1359
+ return f"User details - Name: {name}, Age: {age}"
1360
+
1361
+ def main():
1362
+ tools = [
1363
+ {
1364
+ "type": "function",
1365
+ "function": {
1366
+ "name": "UserDetail",
1367
+ "parameters": {
1368
+ "type": "object",
1369
+ "properties": {
1370
+ "name": {"title": "Name", "type": "string"},
1371
+ "age": {"title": "Age", "type": "integer"}
1335
1372
  },
1336
- "age": {
1337
- "title": "Age",
1338
- "type": "integer"
1339
- }
1340
- },
1341
- "required": ["name", "age"]
1373
+ "required": ["name", "age"]
1374
+ }
1342
1375
  }
1343
- }
1344
- },
1345
- {
1346
- "type": "function",
1347
- "function": {
1348
- "name": "web_search",
1349
- "description": "Search query on google",
1350
- "parameters": {
1351
- "type": "object",
1352
- "properties": {
1353
- "query": {
1354
- "type": "string",
1355
- "description": "web search query"
1356
- }
1357
- },
1358
- "required": ["query"]
1376
+ },
1377
+ {
1378
+ "type": "function",
1379
+ "function": {
1380
+ "name": "web_search",
1381
+ "description": "Search the web for information using Google Search.",
1382
+ "parameters": {
1383
+ "type": "object",
1384
+ "properties": {
1385
+ "query": {
1386
+ "type": "string",
1387
+ "description": "The search query to be executed."
1388
+ }
1389
+ },
1390
+ "required": ["query"]
1391
+ }
1359
1392
  }
1360
- }
1361
- },
1362
- { # New general AI tool
1363
- "type": "function",
1364
- "function": {
1365
- "name": "general_ai",
1366
- "description": "Use general AI knowledge to answer the question",
1367
- "parameters": {
1368
- "type": "object",
1369
- "properties": {
1370
- "question": {
1371
- "type": "string",
1372
- "description": "The question to answer"
1373
- }
1374
- },
1375
- "required": ["question"]
1393
+ },
1394
+ {
1395
+ "type": "function",
1396
+ "function": {
1397
+ "name": "general_ai",
1398
+ "description": "Use general AI knowledge to answer the question",
1399
+ "parameters": {
1400
+ "type": "object",
1401
+ "properties": {
1402
+ "question": {"type": "string", "description": "The question to answer"}
1403
+ },
1404
+ "required": ["question"]
1405
+ }
1376
1406
  }
1377
1407
  }
1378
- }
1379
- ]
1408
+ ]
1380
1409
 
1381
- # Initialize the FunctionCallingAgent with the specified tools
1382
- agent = FunctionCallingAgent(tools=tools)
1383
- llama = LLAMA3()
1384
- from rich import print
1385
- # Input message from the user
1386
- user = input(">>> ")
1387
- message = user
1388
- function_call_data = agent.function_call_handler(message)
1389
- print(f"Function Call Data: {function_call_data}")
1390
-
1391
- # Check for errors in the function call data
1392
- if "error" not in function_call_data:
1393
- function_name = function_call_data.get("tool_name") # Use 'tool_name' instead of 'name'
1394
- if function_name == "web_search":
1395
- arguments = function_call_data.get("tool_input", {}) # Get tool input arguments
1396
- query = arguments.get("query")
1397
- if query:
1398
- with WEBS() as webs:
1399
- search_results = webs.text(query, max_results=5)
1400
- prompt = (
1401
- f"Based on the following search results:\n\n{search_results}\n\n"
1402
- f"Question: {user}\n\n"
1403
- "Please provide a comprehensive answer to the question based on the search results above. "
1404
- "Include relevant webpage URLs in your answer when appropriate. "
1405
- "If the search results don't contain relevant information, please state that and provide the best answer you can based on your general knowledge."
1406
- )
1407
- response = llama.chat(prompt)
1408
- for c in response:
1409
- print(c, end="", flush=True)
1410
+ agent = FunctionCallingAgent(tools=tools)
1411
+ llama = Julius()
1412
+ function_executor = FunctionExecutor(llama)
1410
1413
 
1414
+ user_input = input(">>> ")
1415
+ function_call_data = agent.function_call_handler(user_input)
1416
+ print(f"Function Call Data: {function_call_data}")
1417
+
1418
+ try:
1419
+ if "error" not in function_call_data:
1420
+ function_name = function_call_data.get("tool_name")
1421
+ arguments = function_call_data.get("tool_input", {})
1422
+
1423
+ execute_function = getattr(function_executor, f"execute_{function_name}", None)
1424
+ if execute_function:
1425
+ result = execute_function(arguments)
1426
+ print("Function Execution Result:")
1427
+ for c in result:
1428
+ print(c, end="", flush=True)
1429
+ else:
1430
+ print(f"Unknown function: {function_name}")
1411
1431
  else:
1412
- print("Please provide a search query.")
1413
- elif function_name == "general_ai": # Handle general AI tool
1414
- arguments = function_call_data.get("tool_input", {})
1415
- question = arguments.get("question")
1416
- if question:
1417
- response = llama.chat(question) # Use LLM directly
1418
- for c in response:
1419
- print(c, end="", flush=True)
1420
- else:
1421
- print("Please provide a question.")
1422
- else:
1423
- result = agent.execute_function(function_call_data)
1424
- print(f"Function Execution Result: {result}")
1425
- else:
1426
- print(f"Error: {function_call_data['error']}")
1432
+ print(f"Error: {function_call_data['error']}")
1433
+ except Exception as e:
1434
+ print(f"An error occurred: {str(e)}")
1435
+
1436
+ if __name__ == "__main__":
1437
+ main()
1427
1438
  ```
1428
1439
 
1429
- ### LLAMA3, pizzagpt, RUBIKSAI, Koala, Darkai, AI4Chat, Farfalle, PIAI, Felo, XDASH, Julius, YouChat, YEPCHAT, Cloudflare, TurboSeek, Editee, AI21, Chatify, Cerebras, X0GPT, Lepton, GEMINIAPI, Cleeai, Elmo, Genspark, Upstage, Free2GPT, Bing, DiscordRocks, GPTWeb, AIGameIO, LlamaTutor, PromptRefine, AIUncensored, TutorAI, Bixin, ChatGPTES, Bagoodex, ChatHub, AmigoChat
1440
+ ### LLAMA3, pizzagpt, RUBIKSAI, Koala, Darkai, AI4Chat, Farfalle, PIAI, Felo, XDASH, Julius, YouChat, YEPCHAT, Cloudflare, TurboSeek, Editee, AI21, Chatify, Cerebras, X0GPT, Lepton, GEMINIAPI, Cleeai, Elmo, Genspark, Upstage, Free2GPT, Bing, DiscordRocks, GPTWeb, AIGameIO, LlamaTutor, PromptRefine, AIUncensored, TutorAI, Bixin, ChatGPTES, Bagoodex, ChatHub, AmigoChat, AIMathGPT, GaurishCerebras, NinjaChat, GeminiPro
1430
1441
 
1431
1442
  Code is similar to other providers.
1432
1443
 
@@ -1465,24 +1476,16 @@ Webscout can now run GGUF models locally. You can download and run your favorite
1465
1476
  **Example:**
1466
1477
 
1467
1478
  ```python
1468
- from webscout.Local.utils import download_model
1469
- from webscout.Local.model import Model
1470
- from webscout.Local.thread import Thread
1471
- from webscout.Local import formats
1472
-
1473
- # 1. Download the model
1474
- repo_id = "microsoft/Phi-3-mini-4k-instruct-gguf" # Replace with the desired Hugging Face repo
1475
- filename = "Phi-3-mini-4k-instruct-q4.gguf" # Replace with the correct filename
1476
- model_path = download_model(repo_id, filename, token="")
1477
-
1478
- # 2. Load the model
1479
- model = Model(model_path, n_gpu_layers=4)
1480
-
1481
- # 3. Create a Thread for conversation
1482
- thread = Thread(model, formats.phi3)
1483
-
1484
- # 4. Start interacting with the model
1485
- thread.interact()
1479
+ from webscout.Local import *
1480
+ model_path = download_model("Qwen/Qwen2.5-0.5B-Instruct-GGUF", "qwen2.5-0.5b-instruct-q2_k.gguf", token=None)
1481
+ model = Model(model_path, n_gpu_layers=0, context_length=2048)
1482
+ thread = Thread(model, format=chatml)
1483
+ # print(thread.send("hi")) #send a single msg to ai
1484
+
1485
+ # thread.interact() # interact with the model in terminal
1486
+ # start webui
1487
+ # webui = WebUI(thread)
1488
+ # webui.start(host="0.0.0.0", port=8080, ssl=True) #Use ssl=True and make cert and key for https
1486
1489
  ```
1487
1490
 
1488
1491
  ## 🐶 Local-rawdog
@@ -804,7 +804,7 @@ print(result)
804
804
  ___
805
805
  </details>
806
806
 
807
- ### 🖼️ Text to Images - DeepInfraImager, PollinationsAI, BlackboxAIImager, AiForceimagger, NexraImager, HFimager, ArtbitImager
807
+ ### 🖼️ Text to Images - DeepInfraImager, PollinationsAI, BlackboxAIImager, AiForceimager, NexraImager, HFimager, ArtbitImager, NinjaImager, WebSimAI, AmigoImager
808
808
 
809
809
  **Every TTI provider has the same usage code, you just need to change the import.**
810
810
 
@@ -833,7 +833,7 @@ voicepods.play_audio(audio_file)
833
833
 
834
834
  ```python
835
835
  from webscout import WEBS as w
836
- R = w().chat("Who are you", model='gpt-4o-mini') # GPT-3.5 Turbo, mixtral-8x7b, llama-3-70b, claude-3-haiku, gpt-4o-mini
836
+ R = w().chat("Who are you", model='gpt-4o-mini') # mixtral-8x7b, llama-3.1-70b, claude-3-haiku, gpt-4o-mini
837
837
  print(R)
838
838
  ```
839
839
 
@@ -1252,117 +1252,120 @@ print(a.chat("HelpingAI-9B"))
1252
1252
  ```python
1253
1253
  import json
1254
1254
  import logging
1255
- from webscout import LLAMA3, WEBS
1255
+ from webscout import Julius, WEBS
1256
1256
  from webscout.Agents.functioncall import FunctionCallingAgent
1257
+ from rich import print
1257
1258
 
1258
- # Define tools that the agent can use
1259
- tools = [
1260
- {
1261
- "type": "function",
1262
- "function": {
1263
- "name": "UserDetail",
1264
- "parameters": {
1265
- "type": "object",
1266
- "title": "UserDetail",
1267
- "properties": {
1268
- "name": {
1269
- "title": "Name",
1270
- "type": "string"
1259
+ class FunctionExecutor:
1260
+ def __init__(self, llama):
1261
+ self.llama = llama
1262
+
1263
+ def execute_web_search(self, arguments):
1264
+ query = arguments.get("query")
1265
+ if not query:
1266
+ return "Please provide a search query."
1267
+ with WEBS() as webs:
1268
+ search_results = webs.text(query, max_results=5)
1269
+ prompt = (
1270
+ f"Based on the following search results:\n\n{search_results}\n\n"
1271
+ f"Question: {query}\n\n"
1272
+ "Please provide a comprehensive answer to the question based on the search results above. "
1273
+ "Include relevant webpage URLs in your answer when appropriate. "
1274
+ "If the search results don't contain relevant information, please state that and provide the best answer you can based on your general knowledge."
1275
+ )
1276
+ return self.llama.chat(prompt)
1277
+
1278
+ def execute_general_ai(self, arguments):
1279
+ question = arguments.get("question")
1280
+ if not question:
1281
+ return "Please provide a question."
1282
+ return self.llama.chat(question)
1283
+
1284
+ def execute_UserDetail(self, arguments):
1285
+ name = arguments.get("name")
1286
+ age = arguments.get("age")
1287
+ return f"User details - Name: {name}, Age: {age}"
1288
+
1289
+ def main():
1290
+ tools = [
1291
+ {
1292
+ "type": "function",
1293
+ "function": {
1294
+ "name": "UserDetail",
1295
+ "parameters": {
1296
+ "type": "object",
1297
+ "properties": {
1298
+ "name": {"title": "Name", "type": "string"},
1299
+ "age": {"title": "Age", "type": "integer"}
1271
1300
  },
1272
- "age": {
1273
- "title": "Age",
1274
- "type": "integer"
1275
- }
1276
- },
1277
- "required": ["name", "age"]
1301
+ "required": ["name", "age"]
1302
+ }
1278
1303
  }
1279
- }
1280
- },
1281
- {
1282
- "type": "function",
1283
- "function": {
1284
- "name": "web_search",
1285
- "description": "Search query on google",
1286
- "parameters": {
1287
- "type": "object",
1288
- "properties": {
1289
- "query": {
1290
- "type": "string",
1291
- "description": "web search query"
1292
- }
1293
- },
1294
- "required": ["query"]
1304
+ },
1305
+ {
1306
+ "type": "function",
1307
+ "function": {
1308
+ "name": "web_search",
1309
+ "description": "Search the web for information using Google Search.",
1310
+ "parameters": {
1311
+ "type": "object",
1312
+ "properties": {
1313
+ "query": {
1314
+ "type": "string",
1315
+ "description": "The search query to be executed."
1316
+ }
1317
+ },
1318
+ "required": ["query"]
1319
+ }
1295
1320
  }
1296
- }
1297
- },
1298
- { # New general AI tool
1299
- "type": "function",
1300
- "function": {
1301
- "name": "general_ai",
1302
- "description": "Use general AI knowledge to answer the question",
1303
- "parameters": {
1304
- "type": "object",
1305
- "properties": {
1306
- "question": {
1307
- "type": "string",
1308
- "description": "The question to answer"
1309
- }
1310
- },
1311
- "required": ["question"]
1321
+ },
1322
+ {
1323
+ "type": "function",
1324
+ "function": {
1325
+ "name": "general_ai",
1326
+ "description": "Use general AI knowledge to answer the question",
1327
+ "parameters": {
1328
+ "type": "object",
1329
+ "properties": {
1330
+ "question": {"type": "string", "description": "The question to answer"}
1331
+ },
1332
+ "required": ["question"]
1333
+ }
1312
1334
  }
1313
1335
  }
1314
- }
1315
- ]
1336
+ ]
1316
1337
 
1317
- # Initialize the FunctionCallingAgent with the specified tools
1318
- agent = FunctionCallingAgent(tools=tools)
1319
- llama = LLAMA3()
1320
- from rich import print
1321
- # Input message from the user
1322
- user = input(">>> ")
1323
- message = user
1324
- function_call_data = agent.function_call_handler(message)
1325
- print(f"Function Call Data: {function_call_data}")
1326
-
1327
- # Check for errors in the function call data
1328
- if "error" not in function_call_data:
1329
- function_name = function_call_data.get("tool_name") # Use 'tool_name' instead of 'name'
1330
- if function_name == "web_search":
1331
- arguments = function_call_data.get("tool_input", {}) # Get tool input arguments
1332
- query = arguments.get("query")
1333
- if query:
1334
- with WEBS() as webs:
1335
- search_results = webs.text(query, max_results=5)
1336
- prompt = (
1337
- f"Based on the following search results:\n\n{search_results}\n\n"
1338
- f"Question: {user}\n\n"
1339
- "Please provide a comprehensive answer to the question based on the search results above. "
1340
- "Include relevant webpage URLs in your answer when appropriate. "
1341
- "If the search results don't contain relevant information, please state that and provide the best answer you can based on your general knowledge."
1342
- )
1343
- response = llama.chat(prompt)
1344
- for c in response:
1345
- print(c, end="", flush=True)
1338
+ agent = FunctionCallingAgent(tools=tools)
1339
+ llama = Julius()
1340
+ function_executor = FunctionExecutor(llama)
1346
1341
 
1342
+ user_input = input(">>> ")
1343
+ function_call_data = agent.function_call_handler(user_input)
1344
+ print(f"Function Call Data: {function_call_data}")
1345
+
1346
+ try:
1347
+ if "error" not in function_call_data:
1348
+ function_name = function_call_data.get("tool_name")
1349
+ arguments = function_call_data.get("tool_input", {})
1350
+
1351
+ execute_function = getattr(function_executor, f"execute_{function_name}", None)
1352
+ if execute_function:
1353
+ result = execute_function(arguments)
1354
+ print("Function Execution Result:")
1355
+ for c in result:
1356
+ print(c, end="", flush=True)
1357
+ else:
1358
+ print(f"Unknown function: {function_name}")
1347
1359
  else:
1348
- print("Please provide a search query.")
1349
- elif function_name == "general_ai": # Handle general AI tool
1350
- arguments = function_call_data.get("tool_input", {})
1351
- question = arguments.get("question")
1352
- if question:
1353
- response = llama.chat(question) # Use LLM directly
1354
- for c in response:
1355
- print(c, end="", flush=True)
1356
- else:
1357
- print("Please provide a question.")
1358
- else:
1359
- result = agent.execute_function(function_call_data)
1360
- print(f"Function Execution Result: {result}")
1361
- else:
1362
- print(f"Error: {function_call_data['error']}")
1360
+ print(f"Error: {function_call_data['error']}")
1361
+ except Exception as e:
1362
+ print(f"An error occurred: {str(e)}")
1363
+
1364
+ if __name__ == "__main__":
1365
+ main()
1363
1366
  ```
1364
1367
 
1365
- ### LLAMA3, pizzagpt, RUBIKSAI, Koala, Darkai, AI4Chat, Farfalle, PIAI, Felo, XDASH, Julius, YouChat, YEPCHAT, Cloudflare, TurboSeek, Editee, AI21, Chatify, Cerebras, X0GPT, Lepton, GEMINIAPI, Cleeai, Elmo, Genspark, Upstage, Free2GPT, Bing, DiscordRocks, GPTWeb, AIGameIO, LlamaTutor, PromptRefine, AIUncensored, TutorAI, Bixin, ChatGPTES, Bagoodex, ChatHub, AmigoChat
1368
+ ### LLAMA3, pizzagpt, RUBIKSAI, Koala, Darkai, AI4Chat, Farfalle, PIAI, Felo, XDASH, Julius, YouChat, YEPCHAT, Cloudflare, TurboSeek, Editee, AI21, Chatify, Cerebras, X0GPT, Lepton, GEMINIAPI, Cleeai, Elmo, Genspark, Upstage, Free2GPT, Bing, DiscordRocks, GPTWeb, AIGameIO, LlamaTutor, PromptRefine, AIUncensored, TutorAI, Bixin, ChatGPTES, Bagoodex, ChatHub, AmigoChat, AIMathGPT, GaurishCerebras, NinjaChat, GeminiPro
1366
1369
 
1367
1370
  Code is similar to other providers.
1368
1371
 
@@ -1401,24 +1404,16 @@ Webscout can now run GGUF models locally. You can download and run your favorite
1401
1404
  **Example:**
1402
1405
 
1403
1406
  ```python
1404
- from webscout.Local.utils import download_model
1405
- from webscout.Local.model import Model
1406
- from webscout.Local.thread import Thread
1407
- from webscout.Local import formats
1408
-
1409
- # 1. Download the model
1410
- repo_id = "microsoft/Phi-3-mini-4k-instruct-gguf" # Replace with the desired Hugging Face repo
1411
- filename = "Phi-3-mini-4k-instruct-q4.gguf" # Replace with the correct filename
1412
- model_path = download_model(repo_id, filename, token="")
1413
-
1414
- # 2. Load the model
1415
- model = Model(model_path, n_gpu_layers=4)
1416
-
1417
- # 3. Create a Thread for conversation
1418
- thread = Thread(model, formats.phi3)
1419
-
1420
- # 4. Start interacting with the model
1421
- thread.interact()
1407
+ from webscout.Local import *
1408
+ model_path = download_model("Qwen/Qwen2.5-0.5B-Instruct-GGUF", "qwen2.5-0.5b-instruct-q2_k.gguf", token=None)
1409
+ model = Model(model_path, n_gpu_layers=0, context_length=2048)
1410
+ thread = Thread(model, format=chatml)
1411
+ # print(thread.send("hi")) #send a single msg to ai
1412
+
1413
+ # thread.interact() # interact with the model in terminal
1414
+ # start webui
1415
+ # webui = WebUI(thread)
1416
+ # webui.start(host="0.0.0.0", port=8080, ssl=True) #Use ssl=True and make cert and key for https
1422
1417
  ```
1423
1418
 
1424
1419
  ## 🐶 Local-rawdog
@@ -57,6 +57,13 @@ setup(
57
57
  "openai",
58
58
  "prompt-toolkit",
59
59
  "fake_useragent",
60
+ "primp",
61
+ "pyreqwest_impersonate",
62
+ "lxml_html_clean",
63
+ "gradio_client",
64
+ "psutil",
65
+ "colorlog",
66
+ "yaspin"
60
67
  ],
61
68
  entry_points={
62
69
  "console_scripts": [
@@ -74,6 +81,7 @@ setup(
74
81
  'colorama',
75
82
  'numpy',
76
83
  'huggingface_hub[cli]',
84
+ 'unicorn',
77
85
  ],
78
86
  },
79
87
  license="HelpingAI",