webscout 5.0__tar.gz → 5.2__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of webscout might be problematic. Click here for more details.

Files changed (105) hide show
  1. {webscout-5.0/webscout.egg-info → webscout-5.2}/PKG-INFO +38 -18
  2. {webscout-5.0 → webscout-5.2}/README.md +34 -16
  3. {webscout-5.0 → webscout-5.2}/setup.py +5 -3
  4. webscout-5.2/webscout/AIauto.py +362 -0
  5. webscout-5.2/webscout/AIbase.py +240 -0
  6. {webscout-5.0 → webscout-5.2}/webscout/AIutel.py +31 -0
  7. webscout-5.2/webscout/Agents/Onlinesearcher.py +162 -0
  8. webscout-5.2/webscout/Agents/__init__.py +3 -0
  9. webscout-5.2/webscout/Agents/ai.py +186 -0
  10. {webscout-5.0 → webscout-5.2}/webscout/Agents/functioncall.py +57 -27
  11. webscout-5.2/webscout/Bing_search.py +154 -0
  12. webscout-5.2/webscout/Extra/autollama.py +179 -0
  13. {webscout-5.0 → webscout-5.2}/webscout/Local/_version.py +1 -1
  14. webscout-5.2/webscout/Provider/AI21.py +177 -0
  15. {webscout-5.0 → webscout-5.2}/webscout/Provider/Cloudflare.py +0 -4
  16. webscout-5.2/webscout/Provider/EDITEE.py +215 -0
  17. {webscout-5.0 → webscout-5.2}/webscout/Provider/Llama3.py +1 -1
  18. webscout-5.2/webscout/Provider/NetFly.py +256 -0
  19. {webscout-5.0 → webscout-5.2}/webscout/Provider/OLLAMA.py +11 -10
  20. webscout-5.2/webscout/Provider/TTI/PollinationsAI.py +138 -0
  21. webscout-5.2/webscout/Provider/TTI/__init__.py +2 -0
  22. webscout-5.2/webscout/Provider/TTI/deepinfra.py +148 -0
  23. webscout-5.2/webscout/Provider/TTS/__init__.py +2 -0
  24. webscout-5.2/webscout/Provider/TTS/streamElements.py +296 -0
  25. webscout-5.2/webscout/Provider/TTS/voicepod.py +114 -0
  26. webscout-5.2/webscout/Provider/TeachAnything.py +177 -0
  27. {webscout-5.0 → webscout-5.2}/webscout/Provider/__init__.py +8 -0
  28. {webscout-5.0 → webscout-5.2}/webscout/__init__.py +2 -0
  29. {webscout-5.0 → webscout-5.2}/webscout/version.py +1 -1
  30. {webscout-5.0 → webscout-5.2/webscout.egg-info}/PKG-INFO +38 -18
  31. {webscout-5.0 → webscout-5.2}/webscout.egg-info/SOURCES.txt +12 -2
  32. {webscout-5.0 → webscout-5.2}/webscout.egg-info/requires.txt +2 -0
  33. webscout-5.0/webscout/AIauto.py +0 -556
  34. webscout-5.0/webscout/AIbase.py +0 -138
  35. webscout-5.0/webscout/Agents/Onlinesearcher.py +0 -175
  36. webscout-5.0/webscout/Agents/__init__.py +0 -2
  37. webscout-5.0/webscout/Bing_search.py +0 -124
  38. webscout-5.0/webscout/Extra/autollama.py +0 -209
  39. webscout-5.0/webscout/async_providers.py +0 -21
  40. {webscout-5.0 → webscout-5.2}/LICENSE.md +0 -0
  41. {webscout-5.0 → webscout-5.2}/setup.cfg +0 -0
  42. {webscout-5.0 → webscout-5.2}/webscout/Bard.py +0 -0
  43. {webscout-5.0 → webscout-5.2}/webscout/DWEBS.py +0 -0
  44. {webscout-5.0 → webscout-5.2}/webscout/Extra/__init__.py +0 -0
  45. {webscout-5.0 → webscout-5.2}/webscout/Extra/gguf.py +0 -0
  46. {webscout-5.0 → webscout-5.2}/webscout/Extra/weather.py +0 -0
  47. {webscout-5.0 → webscout-5.2}/webscout/Extra/weather_ascii.py +0 -0
  48. {webscout-5.0 → webscout-5.2}/webscout/LLM.py +0 -0
  49. {webscout-5.0 → webscout-5.2}/webscout/Local/__init__.py +0 -0
  50. {webscout-5.0 → webscout-5.2}/webscout/Local/formats.py +0 -0
  51. {webscout-5.0 → webscout-5.2}/webscout/Local/model.py +0 -0
  52. {webscout-5.0 → webscout-5.2}/webscout/Local/rawdog.py +0 -0
  53. {webscout-5.0 → webscout-5.2}/webscout/Local/samplers.py +0 -0
  54. {webscout-5.0 → webscout-5.2}/webscout/Local/thread.py +0 -0
  55. {webscout-5.0 → webscout-5.2}/webscout/Local/utils.py +0 -0
  56. {webscout-5.0 → webscout-5.2}/webscout/Provider/Andi.py +0 -0
  57. {webscout-5.0 → webscout-5.2}/webscout/Provider/BasedGPT.py +0 -0
  58. {webscout-5.0 → webscout-5.2}/webscout/Provider/Berlin4h.py +0 -0
  59. {webscout-5.0 → webscout-5.2}/webscout/Provider/Blackboxai.py +0 -0
  60. {webscout-5.0 → webscout-5.2}/webscout/Provider/Cohere.py +0 -0
  61. {webscout-5.0 → webscout-5.2}/webscout/Provider/DARKAI.py +0 -0
  62. {webscout-5.0 → webscout-5.2}/webscout/Provider/Deepinfra.py +0 -0
  63. {webscout-5.0 → webscout-5.2}/webscout/Provider/Deepseek.py +0 -0
  64. {webscout-5.0 → webscout-5.2}/webscout/Provider/DiscordRocks.py +0 -0
  65. {webscout-5.0 → webscout-5.2}/webscout/Provider/Farfalle.py +0 -0
  66. {webscout-5.0 → webscout-5.2}/webscout/Provider/Gemini.py +0 -0
  67. {webscout-5.0 → webscout-5.2}/webscout/Provider/Groq.py +0 -0
  68. {webscout-5.0 → webscout-5.2}/webscout/Provider/Koboldai.py +0 -0
  69. {webscout-5.0 → webscout-5.2}/webscout/Provider/Llama.py +0 -0
  70. {webscout-5.0 → webscout-5.2}/webscout/Provider/Openai.py +0 -0
  71. {webscout-5.0 → webscout-5.2}/webscout/Provider/PI.py +0 -0
  72. {webscout-5.0 → webscout-5.2}/webscout/Provider/Perplexity.py +0 -0
  73. {webscout-5.0 → webscout-5.2}/webscout/Provider/Phind.py +0 -0
  74. {webscout-5.0 → webscout-5.2}/webscout/Provider/PizzaGPT.py +0 -0
  75. {webscout-5.0 → webscout-5.2}/webscout/Provider/Poe.py +0 -0
  76. {webscout-5.0 → webscout-5.2}/webscout/Provider/RUBIKSAI.py +0 -0
  77. {webscout-5.0 → webscout-5.2}/webscout/Provider/Reka.py +0 -0
  78. {webscout-5.0 → webscout-5.2}/webscout/Provider/ThinkAnyAI.py +0 -0
  79. {webscout-5.0 → webscout-5.2}/webscout/Provider/Youchat.py +0 -0
  80. {webscout-5.0 → webscout-5.2}/webscout/Provider/ai4chat.py +0 -0
  81. {webscout-5.0 → webscout-5.2}/webscout/Provider/felo_search.py +0 -0
  82. {webscout-5.0 → webscout-5.2}/webscout/Provider/julius.py +0 -0
  83. {webscout-5.0 → webscout-5.2}/webscout/Provider/koala.py +0 -0
  84. {webscout-5.0 → webscout-5.2}/webscout/Provider/liaobots.py +0 -0
  85. {webscout-5.0 → webscout-5.2}/webscout/Provider/meta.py +0 -0
  86. {webscout-5.0 → webscout-5.2}/webscout/Provider/turboseek.py +0 -0
  87. {webscout-5.0 → webscout-5.2}/webscout/Provider/xdash.py +0 -0
  88. {webscout-5.0 → webscout-5.2}/webscout/Provider/yep.py +0 -0
  89. {webscout-5.0 → webscout-5.2}/webscout/YTdownloader.py +0 -0
  90. {webscout-5.0 → webscout-5.2}/webscout/__main__.py +0 -0
  91. {webscout-5.0 → webscout-5.2}/webscout/cli.py +0 -0
  92. {webscout-5.0 → webscout-5.2}/webscout/exceptions.py +0 -0
  93. {webscout-5.0 → webscout-5.2}/webscout/g4f.py +0 -0
  94. {webscout-5.0 → webscout-5.2}/webscout/models.py +0 -0
  95. {webscout-5.0 → webscout-5.2}/webscout/tempid.py +0 -0
  96. {webscout-5.0 → webscout-5.2}/webscout/transcriber.py +0 -0
  97. {webscout-5.0 → webscout-5.2}/webscout/utils.py +0 -0
  98. {webscout-5.0 → webscout-5.2}/webscout/voice.py +0 -0
  99. {webscout-5.0 → webscout-5.2}/webscout/webai.py +0 -0
  100. {webscout-5.0 → webscout-5.2}/webscout/webscout_search.py +0 -0
  101. {webscout-5.0 → webscout-5.2}/webscout/webscout_search_async.py +0 -0
  102. {webscout-5.0 → webscout-5.2}/webscout/websx_search.py +0 -0
  103. {webscout-5.0 → webscout-5.2}/webscout.egg-info/dependency_links.txt +0 -0
  104. {webscout-5.0 → webscout-5.2}/webscout.egg-info/entry_points.txt +0 -0
  105. {webscout-5.0 → webscout-5.2}/webscout.egg-info/top_level.txt +0 -0
@@ -1,7 +1,7 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: webscout
3
- Version: 5.0
4
- Summary: Search for anything using Google, DuckDuckGo, brave, qwant, phind.com, Contains AI models, can transcribe yt videos, temporary email and phone number generation, has TTS support, webai (terminal gpt and open interpreter) and offline LLMs and more
3
+ Version: 5.2
4
+ Summary: Search for anything using Google, DuckDuckGo, phind.com, Contains AI models, can transcribe yt videos, temporary email and phone number generation, has TTS support, webai (terminal gpt and open interpreter) and offline LLMs and more
5
5
  Author: OEvortex
6
6
  Author-email: helpingai5@gmail.com
7
7
  License: HelpingAI
@@ -66,6 +66,8 @@ Requires-Dist: requests_html
66
66
  Requires-Dist: bson
67
67
  Requires-Dist: cloudscraper
68
68
  Requires-Dist: emoji
69
+ Requires-Dist: colorlog
70
+ Requires-Dist: openai
69
71
  Provides-Extra: dev
70
72
  Requires-Dist: ruff>=0.1.6; extra == "dev"
71
73
  Requires-Dist: pytest>=7.4.2; extra == "dev"
@@ -404,7 +406,7 @@ for result in results:
404
406
  from webscout import BingS
405
407
  from rich import print
406
408
  searcher = BingS()
407
- results = searcher.search("Python development tools", max_results=30)
409
+ results = searcher.search("HelpingAI-9B", max_results=20, extract_webpage_text=True, max_extract_characters=1000)
408
410
  for result in results:
409
411
  print(result)
410
412
  ```
@@ -903,14 +905,34 @@ print(result)
903
905
  ___
904
906
  </details>
905
907
 
906
- ## usage of webscout AI
907
- ### 0. `Duckchat` - chat with LLM
908
+ ### Text to images - DeepInfraImager, PollinationsAI
909
+ ```python
910
+ from webscout import DeepInfraImager
911
+ bot = DeepInfraImager()
912
+ resp = bot.generate("AI-generated image - webscout", 1)
913
+ print(bot.save(resp))
914
+ ```
915
+
916
+ ### Text to Speach - Voicepods, StreamElements
917
+ ```python
918
+ from webscout import Voicepods
919
+ voicepods = Voicepods()
920
+ text = "Hello, this is a test of the Voicepods text-to-speech"
921
+
922
+ print("Generating audio...")
923
+ audio_file = voicepods.tts(text)
924
+
925
+ print("Playing audio...")
926
+ voicepods.play_audio(audio_file)
927
+ ```
928
+
929
+ ### `Duckchat` - chat with LLM
908
930
  ```python
909
931
  from webscout import WEBS as w
910
932
  R = w().chat("Who are you", model='gpt-4o-mini') # GPT-3.5 Turbo, mixtral-8x7b, llama-3-70b, claude-3-haiku, gpt-4o-mini
911
933
  print(R)
912
934
  ```
913
- ### 1. `PhindSearch` - Search using Phind.com
935
+ ### `PhindSearch` - Search using Phind.com
914
936
 
915
937
  ```python
916
938
  from webscout import PhindSearch
@@ -947,7 +969,7 @@ print(message)
947
969
  ```
948
970
 
949
971
 
950
- ### . `You.com` - search/chat with you.com - Not working
972
+ ### `You.com` - search/chat with you.com - Not working
951
973
  ```python
952
974
 
953
975
  from webscout import YouChat
@@ -974,7 +996,7 @@ message = ai.get_message(response)
974
996
  print(message)
975
997
  ```
976
998
 
977
- ### . `Gemini` - search with google gemini
999
+ ### `Gemini` - search with google gemini
978
1000
 
979
1001
  ```python
980
1002
  import webscout
@@ -992,7 +1014,7 @@ gemini = GEMINI(cookie_file=COOKIE_FILE, proxy=PROXIES)
992
1014
  response = gemini.chat("websearch about HelpingAI and who is its developer")
993
1015
  print(response)
994
1016
  ```
995
- ### . `Berlin4h` - chat with Berlin4h
1017
+ ### `Berlin4h` - chat with Berlin4h
996
1018
  ```python
997
1019
  from webscout import Berlin4h
998
1020
 
@@ -1013,7 +1035,7 @@ prompt = "Explain the concept of recursion in simple terms."
1013
1035
  response = ai.chat(prompt)
1014
1036
  print(response)
1015
1037
  ```
1016
- ### . `BlackBox` - Search/chat With BlackBox
1038
+ ### `BlackBox` - Search/chat With BlackBox
1017
1039
  ```python
1018
1040
  from webscout import BLACKBOXAI
1019
1041
  from rich import print
@@ -1044,7 +1066,7 @@ while True:
1044
1066
  r = ai.chat(prompt)
1045
1067
  print(r)
1046
1068
  ```
1047
- ### . `PERPLEXITY` - Search With PERPLEXITY
1069
+ ### `PERPLEXITY` - Search With PERPLEXITY
1048
1070
  ```python
1049
1071
  from webscout import Perplexity
1050
1072
  from rich import print
@@ -1470,7 +1492,7 @@ if "error" not in function_call_data:
1470
1492
  else:
1471
1493
  print(f"Error: {function_call_data['error']}")
1472
1494
  ```
1473
- ### LLAMA3, pizzagpt, RUBIKSAI, Koala, Darkai, AI4Chat, Farfalle, PIAI, Felo, XDASH, Julius, YouChat, YEPCHAT, Cloudflare, TurboSeek,
1495
+ ### LLAMA3, pizzagpt, RUBIKSAI, Koala, Darkai, AI4Chat, Farfalle, PIAI, Felo, XDASH, Julius, YouChat, YEPCHAT, Cloudflare, TurboSeek, NetFly, Editee
1474
1496
  code similar to other provider
1475
1497
  ### `LLM`
1476
1498
  ```python
@@ -1645,15 +1667,13 @@ gguf.convert(
1645
1667
 
1646
1668
  Webscout's `autollama` utility download model from huggingface and then automatically makes it ollama ready
1647
1669
 
1648
- **Example:**
1649
-
1650
1670
  ```python
1651
1671
  from webscout import autollama
1652
1672
 
1653
- autollama(
1654
- model_path="OEvortex/HelpingAI-Lite-1.5T", # Hugging Face model ID
1655
- gguf_file="HelpingAI-Lite-1.5T.q4_k_m.gguf" # GGUF file ID
1656
- )
1673
+ model_path = "Vortex4ai/Jarvis-0.5B"
1674
+ gguf_file = "test2-q4_k_m.gguf"
1675
+
1676
+ autollama.main(model_path, gguf_file)
1657
1677
  ```
1658
1678
 
1659
1679
  **Command Line Usage:**
@@ -327,7 +327,7 @@ for result in results:
327
327
  from webscout import BingS
328
328
  from rich import print
329
329
  searcher = BingS()
330
- results = searcher.search("Python development tools", max_results=30)
330
+ results = searcher.search("HelpingAI-9B", max_results=20, extract_webpage_text=True, max_extract_characters=1000)
331
331
  for result in results:
332
332
  print(result)
333
333
  ```
@@ -826,14 +826,34 @@ print(result)
826
826
  ___
827
827
  </details>
828
828
 
829
- ## usage of webscout AI
830
- ### 0. `Duckchat` - chat with LLM
829
+ ### Text to images - DeepInfraImager, PollinationsAI
830
+ ```python
831
+ from webscout import DeepInfraImager
832
+ bot = DeepInfraImager()
833
+ resp = bot.generate("AI-generated image - webscout", 1)
834
+ print(bot.save(resp))
835
+ ```
836
+
837
+ ### Text to Speach - Voicepods, StreamElements
838
+ ```python
839
+ from webscout import Voicepods
840
+ voicepods = Voicepods()
841
+ text = "Hello, this is a test of the Voicepods text-to-speech"
842
+
843
+ print("Generating audio...")
844
+ audio_file = voicepods.tts(text)
845
+
846
+ print("Playing audio...")
847
+ voicepods.play_audio(audio_file)
848
+ ```
849
+
850
+ ### `Duckchat` - chat with LLM
831
851
  ```python
832
852
  from webscout import WEBS as w
833
853
  R = w().chat("Who are you", model='gpt-4o-mini') # GPT-3.5 Turbo, mixtral-8x7b, llama-3-70b, claude-3-haiku, gpt-4o-mini
834
854
  print(R)
835
855
  ```
836
- ### 1. `PhindSearch` - Search using Phind.com
856
+ ### `PhindSearch` - Search using Phind.com
837
857
 
838
858
  ```python
839
859
  from webscout import PhindSearch
@@ -870,7 +890,7 @@ print(message)
870
890
  ```
871
891
 
872
892
 
873
- ### . `You.com` - search/chat with you.com - Not working
893
+ ### `You.com` - search/chat with you.com - Not working
874
894
  ```python
875
895
 
876
896
  from webscout import YouChat
@@ -897,7 +917,7 @@ message = ai.get_message(response)
897
917
  print(message)
898
918
  ```
899
919
 
900
- ### . `Gemini` - search with google gemini
920
+ ### `Gemini` - search with google gemini
901
921
 
902
922
  ```python
903
923
  import webscout
@@ -915,7 +935,7 @@ gemini = GEMINI(cookie_file=COOKIE_FILE, proxy=PROXIES)
915
935
  response = gemini.chat("websearch about HelpingAI and who is its developer")
916
936
  print(response)
917
937
  ```
918
- ### . `Berlin4h` - chat with Berlin4h
938
+ ### `Berlin4h` - chat with Berlin4h
919
939
  ```python
920
940
  from webscout import Berlin4h
921
941
 
@@ -936,7 +956,7 @@ prompt = "Explain the concept of recursion in simple terms."
936
956
  response = ai.chat(prompt)
937
957
  print(response)
938
958
  ```
939
- ### . `BlackBox` - Search/chat With BlackBox
959
+ ### `BlackBox` - Search/chat With BlackBox
940
960
  ```python
941
961
  from webscout import BLACKBOXAI
942
962
  from rich import print
@@ -967,7 +987,7 @@ while True:
967
987
  r = ai.chat(prompt)
968
988
  print(r)
969
989
  ```
970
- ### . `PERPLEXITY` - Search With PERPLEXITY
990
+ ### `PERPLEXITY` - Search With PERPLEXITY
971
991
  ```python
972
992
  from webscout import Perplexity
973
993
  from rich import print
@@ -1393,7 +1413,7 @@ if "error" not in function_call_data:
1393
1413
  else:
1394
1414
  print(f"Error: {function_call_data['error']}")
1395
1415
  ```
1396
- ### LLAMA3, pizzagpt, RUBIKSAI, Koala, Darkai, AI4Chat, Farfalle, PIAI, Felo, XDASH, Julius, YouChat, YEPCHAT, Cloudflare, TurboSeek,
1416
+ ### LLAMA3, pizzagpt, RUBIKSAI, Koala, Darkai, AI4Chat, Farfalle, PIAI, Felo, XDASH, Julius, YouChat, YEPCHAT, Cloudflare, TurboSeek, NetFly, Editee
1397
1417
  code similar to other provider
1398
1418
  ### `LLM`
1399
1419
  ```python
@@ -1568,15 +1588,13 @@ gguf.convert(
1568
1588
 
1569
1589
  Webscout's `autollama` utility download model from huggingface and then automatically makes it ollama ready
1570
1590
 
1571
- **Example:**
1572
-
1573
1591
  ```python
1574
1592
  from webscout import autollama
1575
1593
 
1576
- autollama(
1577
- model_path="OEvortex/HelpingAI-Lite-1.5T", # Hugging Face model ID
1578
- gguf_file="HelpingAI-Lite-1.5T.q4_k_m.gguf" # GGUF file ID
1579
- )
1594
+ model_path = "Vortex4ai/Jarvis-0.5B"
1595
+ gguf_file = "test2-q4_k_m.gguf"
1596
+
1597
+ autollama.main(model_path, gguf_file)
1580
1598
  ```
1581
1599
 
1582
1600
  **Command Line Usage:**
@@ -5,8 +5,8 @@ with open("README.md", encoding="utf-8") as f:
5
5
 
6
6
  setup(
7
7
  name="webscout",
8
- version="5.0",
9
- description="Search for anything using Google, DuckDuckGo, brave, qwant, phind.com, Contains AI models, can transcribe yt videos, temporary email and phone number generation, has TTS support, webai (terminal gpt and open interpreter) and offline LLMs and more",
8
+ version="5.2",
9
+ description="Search for anything using Google, DuckDuckGo, phind.com, Contains AI models, can transcribe yt videos, temporary email and phone number generation, has TTS support, webai (terminal gpt and open interpreter) and offline LLMs and more",
10
10
  long_description=README,
11
11
  long_description_content_type="text/markdown",
12
12
  author="OEvortex",
@@ -69,7 +69,9 @@ setup(
69
69
  "requests_html",
70
70
  "bson",
71
71
  "cloudscraper",
72
- "emoji"
72
+ "emoji",
73
+ "colorlog",
74
+ "openai",
73
75
  ],
74
76
  entry_points={
75
77
  "console_scripts": [
@@ -0,0 +1,362 @@
1
+ from webscout.AIbase import Provider, AsyncProvider
2
+ from webscout.Provider.ThinkAnyAI import ThinkAnyAI
3
+ from webscout.Provider.Llama import LLAMA
4
+
5
+ from webscout.Provider.Koboldai import KOBOLDAI
6
+ from webscout.Provider.Koboldai import AsyncKOBOLDAI
7
+
8
+ from webscout.Provider.Perplexity import Perplexity
9
+ from webscout.Provider.Blackboxai import BLACKBOXAI
10
+ from webscout.Provider.Blackboxai import AsyncBLACKBOXAI
11
+ from webscout.Provider.Phind import PhindSearch
12
+ from webscout.Provider.Phind import AsyncPhindSearch
13
+ from webscout.Provider.Phind import Phindv2
14
+ from webscout.Provider.Phind import AsyncPhindv2
15
+ from webscout.Provider.yep import YEPCHAT
16
+ from webscout.Provider.Berlin4h import Berlin4h
17
+ from webscout.Provider.Poe import POE
18
+ from webscout.Provider.BasedGPT import BasedGPT
19
+ from webscout.Provider.Deepseek import DeepSeek
20
+ from webscout.Provider.Deepinfra import DeepInfra, VLM, AsyncDeepInfra
21
+ from webscout.Provider.OLLAMA import OLLAMA
22
+ from webscout.Provider.Andi import AndiSearch
23
+ from webscout.Provider.Llama3 import LLAMA3
24
+ from webscout.Provider.DARKAI import DARKAI
25
+ from webscout.Provider.koala import KOALA
26
+ from webscout.Provider.RUBIKSAI import RUBIKSAI
27
+ from webscout.Provider.meta import Meta
28
+ from webscout.Provider.liaobots import LiaoBots
29
+ from webscout.Provider.DiscordRocks import DiscordRocks
30
+ from webscout.Provider.felo_search import Felo
31
+ from webscout.Provider.xdash import XDASH
32
+ from webscout.Provider.julius import Julius
33
+ from webscout.Provider.Youchat import YouChat
34
+ from webscout.Provider.Cloudflare import Cloudflare
35
+ from webscout.Provider.turboseek import TurboSeek
36
+ from webscout.Provider.NetFly import NetFly
37
+ from webscout.Provider.EDITEE import Editee
38
+ # from webscout.Provider.Chatify import Chatify # TODO: UNFINISHED
39
+ from webscout.Provider.PI import PiAI
40
+ from webscout.g4f import GPT4FREE, AsyncGPT4FREE
41
+ from webscout.g4f import TestProviders
42
+ from webscout.exceptions import AllProvidersFailure
43
+ from typing import AsyncGenerator
44
+
45
+ from typing import Union
46
+ from typing import Any
47
+ import logging
48
+
49
+
50
+ provider_map: dict[
51
+ str,
52
+ Union[
53
+ ThinkAnyAI,
54
+ LLAMA,
55
+ KOBOLDAI,
56
+ Perplexity,
57
+ BLACKBOXAI,
58
+ PhindSearch,
59
+ Phindv2,
60
+ YEPCHAT,
61
+ Berlin4h,
62
+ POE,
63
+ BasedGPT,
64
+ DeepSeek,
65
+ DeepInfra,
66
+ VLM,
67
+ GPT4FREE,
68
+ OLLAMA,
69
+ AndiSearch,
70
+ LLAMA3,
71
+ DARKAI,
72
+ KOALA,
73
+ RUBIKSAI,
74
+ Meta,
75
+ LiaoBots,
76
+ DiscordRocks,
77
+ Felo,
78
+ XDASH,
79
+ Julius,
80
+ YouChat,
81
+ Cloudflare,
82
+ TurboSeek,
83
+ NetFly,
84
+ Editee,
85
+ # Chatify,
86
+ PiAI,
87
+ ],
88
+ ] = {
89
+ "ThinkAnyAI": ThinkAnyAI,
90
+ "LLAMA2": LLAMA,
91
+ "KOBOLDAI": KOBOLDAI,
92
+ "PERPLEXITY": Perplexity,
93
+ "BLACKBOXAI": BLACKBOXAI,
94
+ "PhindSearch": PhindSearch,
95
+ "Phindv2": Phindv2,
96
+ "YEPCHAT": YEPCHAT,
97
+ "Berlin4h": Berlin4h,
98
+ "POE": POE,
99
+ "BasedGPT": BasedGPT,
100
+ "DeepSeek": DeepSeek,
101
+ "DeepInfra": DeepInfra,
102
+ "VLM": VLM,
103
+ "gpt4free": GPT4FREE,
104
+ "ollama": OLLAMA,
105
+ "andi": AndiSearch,
106
+ "llama3": LLAMA3,
107
+ "darkai": DARKAI,
108
+ "koala": KOALA,
109
+ "rubiksai": RUBIKSAI,
110
+ "meta": Meta,
111
+ "liaobots": LiaoBots,
112
+ "discordrocks": DiscordRocks,
113
+ "felo": Felo,
114
+ "xdash": XDASH,
115
+ "julius": Julius,
116
+ "you": YouChat,
117
+ "cloudflare": Cloudflare,
118
+ "turboseek": TurboSeek,
119
+ "netfly": NetFly,
120
+ "editee": Editee,
121
+ # "chatify": Chatify,
122
+ "pi": PiAI,
123
+ }
124
+
125
+
126
+ class AUTO(Provider):
127
+ def __init__(
128
+ self,
129
+ is_conversation: bool = True,
130
+ max_tokens: int = 600,
131
+ timeout: int = 30,
132
+ intro: str = None,
133
+ filepath: str = None,
134
+ update_file: bool = True,
135
+ proxies: dict = {},
136
+ history_offset: int = 10250,
137
+ act: str = None,
138
+ exclude: list[str] = [],
139
+ ):
140
+ """Instantiates AUTO
141
+
142
+ Args:
143
+ is_conversation (bool, optional): Flag for chatting conversationally. Defaults to True
144
+ max_tokens (int, optional): Maximum number of tokens to be generated upon completion. Defaults to 600.
145
+ timeout (int, optional): Http request timeout. Defaults to 30.
146
+ intro (str, optional): Conversation introductory prompt. Defaults to None.
147
+ filepath (str, optional): Path to file containing conversation history. Defaults to None.
148
+ update_file (bool, optional): Add new prompts and responses to the file. Defaults to True.
149
+ proxies (dict, optional): Http request proxies. Defaults to {}.
150
+ history_offset (int, optional): Limit conversation history to this number of last texts. Defaults to 10250.
151
+ act (str|int, optional): Awesome prompt key or index. (Used as intro). Defaults to None.
152
+ exclude(list[str], optional): List of providers to be excluded. Defaults to [].
153
+ """
154
+ self.provider: Union[
155
+ ThinkAnyAI,
156
+ LLAMA,
157
+ KOBOLDAI,
158
+ Perplexity,
159
+ BLACKBOXAI,
160
+ PhindSearch,
161
+ Phindv2,
162
+ YEPCHAT,
163
+ Berlin4h,
164
+ POE,
165
+ BasedGPT,
166
+ DeepSeek,
167
+ DeepInfra,
168
+ VLM,
169
+ GPT4FREE,
170
+ OLLAMA,
171
+ AndiSearch,
172
+ LLAMA3,
173
+ DARKAI,
174
+ KOALA,
175
+ RUBIKSAI,
176
+ Meta,
177
+ LiaoBots,
178
+ DiscordRocks,
179
+ Felo,
180
+ XDASH,
181
+ Julius,
182
+ YouChat,
183
+ Cloudflare,
184
+ TurboSeek,
185
+ NetFly,
186
+ Editee,
187
+ # Chatify,
188
+ PiAI,
189
+ ] = None
190
+ self.provider_name: str = None
191
+ self.is_conversation = is_conversation
192
+ self.max_tokens = max_tokens
193
+ self.timeout = timeout
194
+ self.intro = intro
195
+ self.filepath = filepath
196
+ self.update_file = update_file
197
+ self.proxies = proxies
198
+ self.history_offset = history_offset
199
+ self.act = act
200
+ self.exclude = exclude
201
+
202
+ @property
203
+ def last_response(self) -> dict[str, Any]:
204
+ return self.provider.last_response
205
+
206
+ @property
207
+ def conversation(self) -> object:
208
+ return self.provider.conversation
209
+
210
+ def ask(
211
+ self,
212
+ prompt: str,
213
+ stream: bool = False,
214
+ raw: bool = False,
215
+ optimizer: str = None,
216
+ conversationally: bool = False,
217
+ run_new_test: bool = False,
218
+ ) -> dict:
219
+ """Chat with AI
220
+
221
+ Args:
222
+ prompt (str): Prompt to be send.
223
+ stream (bool, optional): Flag for streaming response. Defaults to False.
224
+ raw (bool, optional): Stream back raw response as received. Defaults to False.
225
+ optimizer (str, optional): Prompt optimizer name - `[code, shell_command]`. Defaults to None.
226
+ conversationally (bool, optional): Chat conversationally when using optimizer. Defaults to False.
227
+ run_new_test (bool, optional): Perform new test on g4f-based providers. Defaults to False.
228
+ Returns:
229
+ dict : {}
230
+ """
231
+ ask_kwargs: dict[str, Union[str, bool]] = {
232
+ "prompt": prompt,
233
+ "stream": stream,
234
+ "raw": raw,
235
+ "optimizer": optimizer,
236
+ "conversationally": conversationally,
237
+ }
238
+
239
+ # webscout-based providers
240
+ for provider_name, provider_obj in provider_map.items():
241
+ # continue
242
+ if provider_name in self.exclude:
243
+ continue
244
+ try:
245
+ self.provider_name = f"webscout-{provider_name}"
246
+ self.provider = provider_obj(
247
+ is_conversation=self.is_conversation,
248
+ max_tokens=self.max_tokens,
249
+ timeout=self.timeout,
250
+ intro=self.intro,
251
+ filepath=self.filepath,
252
+ update_file=self.update_file,
253
+ proxies=self.proxies,
254
+ history_offset=self.history_offset,
255
+ act=self.act,
256
+ )
257
+
258
+ def for_stream():
259
+ for chunk in self.provider.ask(**ask_kwargs):
260
+ yield chunk
261
+
262
+ def for_non_stream():
263
+ return self.provider.ask(**ask_kwargs)
264
+
265
+ return for_stream() if stream else for_non_stream()
266
+
267
+ except Exception as e:
268
+ logging.debug(
269
+ f"Failed to generate response using provider {provider_name} - {e}"
270
+ )
271
+
272
+ # g4f-based providers
273
+
274
+ for provider_info in TestProviders(timeout=self.timeout).get_results(
275
+ run=run_new_test
276
+ ):
277
+ if provider_info["name"] in self.exclude:
278
+ continue
279
+ try:
280
+ self.provider_name = f"g4f-{provider_info['name']}"
281
+ self.provider = GPT4FREE(
282
+ provider=provider_info["name"],
283
+ is_conversation=self.is_conversation,
284
+ max_tokens=self.max_tokens,
285
+ intro=self.intro,
286
+ filepath=self.filepath,
287
+ update_file=self.update_file,
288
+ proxies=self.proxies,
289
+ history_offset=self.history_offset,
290
+ act=self.act,
291
+ )
292
+
293
+ def for_stream():
294
+ for chunk in self.provider.ask(**ask_kwargs):
295
+ yield chunk
296
+
297
+ def for_non_stream():
298
+ return self.provider.ask(**ask_kwargs)
299
+
300
+ return for_stream() if stream else for_non_stream()
301
+
302
+ except Exception as e:
303
+ logging.debug(
304
+ f"Failed to generate response using GPT4FREE-base provider {provider_name} - {e}"
305
+ )
306
+
307
+ raise AllProvidersFailure(
308
+ "None of the providers generated response successfully."
309
+ )
310
+
311
+ def chat(
312
+ self,
313
+ prompt: str,
314
+ stream: bool = False,
315
+ optimizer: str = None,
316
+ conversationally: bool = False,
317
+ run_new_test: bool = False,
318
+ ) -> str:
319
+ """Generate response `str`
320
+ Args:
321
+ prompt (str): Prompt to be send.
322
+ stream (bool, optional): Flag for streaming response. Defaults to False.
323
+ optimizer (str, optional): Prompt optimizer name - `[code, shell_command]`. Defaults to None.
324
+ conversationally (bool, optional): Chat conversationally when using optimizer. Defaults to False.
325
+ run_new_test (bool, optional): Perform new test on g4f-based providers. Defaults to False.
326
+ Returns:
327
+ str: Response generated
328
+ """
329
+
330
+ def for_stream():
331
+ for response in self.ask(
332
+ prompt,
333
+ True,
334
+ optimizer=optimizer,
335
+ conversationally=conversationally,
336
+ run_new_test=run_new_test,
337
+ ):
338
+ yield self.get_message(response)
339
+
340
+ def for_non_stream():
341
+ ask_response = self.ask(
342
+ prompt,
343
+ False,
344
+ optimizer=optimizer,
345
+ conversationally=conversationally,
346
+ run_new_test=run_new_test,
347
+ )
348
+ return self.get_message(ask_response)
349
+
350
+ return for_stream() if stream else for_non_stream()
351
+
352
+ def get_message(self, response: dict) -> str:
353
+ """Retrieves message only from response
354
+
355
+ Args:
356
+ response (dict): Response generated by `self.ask`
357
+
358
+ Returns:
359
+ str: Message extracted
360
+ """
361
+ assert self.provider is not None, "Chat with AI first"
362
+ return self.provider.get_message(response)