webscout 2.7__tar.gz → 2.9__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of webscout might be problematic. Click here for more details.

Files changed (73) hide show
  1. {webscout-2.7 → webscout-2.9}/PKG-INFO +4 -1
  2. {webscout-2.7 → webscout-2.9}/setup.py +4 -1
  3. webscout-2.9/webscout/LLM.py +100 -0
  4. {webscout-2.7 → webscout-2.9}/webscout/Local/_version.py +1 -1
  5. {webscout-2.7 → webscout-2.9}/webscout/Local/utils.py +3 -2
  6. webscout-2.9/webscout/version.py +2 -0
  7. {webscout-2.7 → webscout-2.9}/webscout.egg-info/PKG-INFO +4 -1
  8. {webscout-2.7 → webscout-2.9}/webscout.egg-info/requires.txt +3 -0
  9. webscout-2.7/webscout/LLM.py +0 -45
  10. webscout-2.7/webscout/version.py +0 -2
  11. {webscout-2.7 → webscout-2.9}/DeepWEBS/__init__.py +0 -0
  12. {webscout-2.7 → webscout-2.9}/DeepWEBS/documents/__init__.py +0 -0
  13. {webscout-2.7 → webscout-2.9}/DeepWEBS/documents/query_results_extractor.py +0 -0
  14. {webscout-2.7 → webscout-2.9}/DeepWEBS/documents/webpage_content_extractor.py +0 -0
  15. {webscout-2.7 → webscout-2.9}/DeepWEBS/networks/__init__.py +0 -0
  16. {webscout-2.7 → webscout-2.9}/DeepWEBS/networks/filepath_converter.py +0 -0
  17. {webscout-2.7 → webscout-2.9}/DeepWEBS/networks/google_searcher.py +0 -0
  18. {webscout-2.7 → webscout-2.9}/DeepWEBS/networks/network_configs.py +0 -0
  19. {webscout-2.7 → webscout-2.9}/DeepWEBS/networks/webpage_fetcher.py +0 -0
  20. {webscout-2.7 → webscout-2.9}/DeepWEBS/utilsdw/__init__.py +0 -0
  21. {webscout-2.7 → webscout-2.9}/DeepWEBS/utilsdw/enver.py +0 -0
  22. {webscout-2.7 → webscout-2.9}/DeepWEBS/utilsdw/logger.py +0 -0
  23. {webscout-2.7 → webscout-2.9}/LICENSE.md +0 -0
  24. {webscout-2.7 → webscout-2.9}/README.md +0 -0
  25. {webscout-2.7 → webscout-2.9}/setup.cfg +0 -0
  26. {webscout-2.7 → webscout-2.9}/webscout/AIauto.py +0 -0
  27. {webscout-2.7 → webscout-2.9}/webscout/AIbase.py +0 -0
  28. {webscout-2.7 → webscout-2.9}/webscout/AIutel.py +0 -0
  29. {webscout-2.7 → webscout-2.9}/webscout/DWEBS.py +0 -0
  30. {webscout-2.7 → webscout-2.9}/webscout/Local/__init__.py +0 -0
  31. {webscout-2.7 → webscout-2.9}/webscout/Local/formats.py +0 -0
  32. {webscout-2.7 → webscout-2.9}/webscout/Local/model.py +0 -0
  33. {webscout-2.7 → webscout-2.9}/webscout/Local/samplers.py +0 -0
  34. {webscout-2.7 → webscout-2.9}/webscout/Local/thread.py +0 -0
  35. {webscout-2.7 → webscout-2.9}/webscout/Provider/BasedGPT.py +0 -0
  36. {webscout-2.7 → webscout-2.9}/webscout/Provider/Berlin4h.py +0 -0
  37. {webscout-2.7 → webscout-2.9}/webscout/Provider/Blackboxai.py +0 -0
  38. {webscout-2.7 → webscout-2.9}/webscout/Provider/ChatGPTUK.py +0 -0
  39. {webscout-2.7 → webscout-2.9}/webscout/Provider/Cohere.py +0 -0
  40. {webscout-2.7 → webscout-2.9}/webscout/Provider/Gemini.py +0 -0
  41. {webscout-2.7 → webscout-2.9}/webscout/Provider/Groq.py +0 -0
  42. {webscout-2.7 → webscout-2.9}/webscout/Provider/Koboldai.py +0 -0
  43. {webscout-2.7 → webscout-2.9}/webscout/Provider/Leo.py +0 -0
  44. {webscout-2.7 → webscout-2.9}/webscout/Provider/Llama2.py +0 -0
  45. {webscout-2.7 → webscout-2.9}/webscout/Provider/OpenGPT.py +0 -0
  46. {webscout-2.7 → webscout-2.9}/webscout/Provider/Openai.py +0 -0
  47. {webscout-2.7 → webscout-2.9}/webscout/Provider/Perplexity.py +0 -0
  48. {webscout-2.7 → webscout-2.9}/webscout/Provider/Phind.py +0 -0
  49. {webscout-2.7 → webscout-2.9}/webscout/Provider/Poe.py +0 -0
  50. {webscout-2.7 → webscout-2.9}/webscout/Provider/Reka.py +0 -0
  51. {webscout-2.7 → webscout-2.9}/webscout/Provider/ThinkAnyAI.py +0 -0
  52. {webscout-2.7 → webscout-2.9}/webscout/Provider/Xjai.py +0 -0
  53. {webscout-2.7 → webscout-2.9}/webscout/Provider/Yepchat.py +0 -0
  54. {webscout-2.7 → webscout-2.9}/webscout/Provider/Youchat.py +0 -0
  55. {webscout-2.7 → webscout-2.9}/webscout/Provider/__init__.py +0 -0
  56. {webscout-2.7 → webscout-2.9}/webscout/__init__.py +0 -0
  57. {webscout-2.7 → webscout-2.9}/webscout/__main__.py +0 -0
  58. {webscout-2.7 → webscout-2.9}/webscout/async_providers.py +0 -0
  59. {webscout-2.7 → webscout-2.9}/webscout/cli.py +0 -0
  60. {webscout-2.7 → webscout-2.9}/webscout/exceptions.py +0 -0
  61. {webscout-2.7 → webscout-2.9}/webscout/g4f.py +0 -0
  62. {webscout-2.7 → webscout-2.9}/webscout/models.py +0 -0
  63. {webscout-2.7 → webscout-2.9}/webscout/tempid.py +0 -0
  64. {webscout-2.7 → webscout-2.9}/webscout/transcriber.py +0 -0
  65. {webscout-2.7 → webscout-2.9}/webscout/utils.py +0 -0
  66. {webscout-2.7 → webscout-2.9}/webscout/voice.py +0 -0
  67. {webscout-2.7 → webscout-2.9}/webscout/webai.py +0 -0
  68. {webscout-2.7 → webscout-2.9}/webscout/webscout_search.py +0 -0
  69. {webscout-2.7 → webscout-2.9}/webscout/webscout_search_async.py +0 -0
  70. {webscout-2.7 → webscout-2.9}/webscout.egg-info/SOURCES.txt +0 -0
  71. {webscout-2.7 → webscout-2.9}/webscout.egg-info/dependency_links.txt +0 -0
  72. {webscout-2.7 → webscout-2.9}/webscout.egg-info/entry_points.txt +0 -0
  73. {webscout-2.7 → webscout-2.9}/webscout.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: webscout
3
- Version: 2.7
3
+ Version: 2.9
4
4
  Summary: Search for anything using Google, DuckDuckGo, phind.com, Contains AI models, can transcribe yt videos, temporary email and phone number generation, has TTS support, webai (terminal gpt and open interpreter) and offline LLMs
5
5
  Author: OEvortex
6
6
  Author-email: helpingai5@gmail.com
@@ -52,6 +52,8 @@ Requires-Dist: tls_client
52
52
  Requires-Dist: clipman
53
53
  Requires-Dist: Helpingai-T2
54
54
  Requires-Dist: playsound
55
+ Requires-Dist: poe_api_wrapper
56
+ Requires-Dist: pyreqwest_impersonate
55
57
  Provides-Extra: dev
56
58
  Requires-Dist: ruff>=0.1.6; extra == "dev"
57
59
  Requires-Dist: pytest>=7.4.2; extra == "dev"
@@ -59,6 +61,7 @@ Provides-Extra: local
59
61
  Requires-Dist: llama-cpp-python; extra == "local"
60
62
  Requires-Dist: colorama; extra == "local"
61
63
  Requires-Dist: numpy; extra == "local"
64
+ Requires-Dist: huggingface_hub; extra == "local"
62
65
 
63
66
  <div align="center">
64
67
  <!-- Replace `#` with your actual links -->
@@ -5,7 +5,7 @@ with open("README.md", encoding="utf-8") as f:
5
5
 
6
6
  setup(
7
7
  name="webscout",
8
- version="2.7",
8
+ version="2.9",
9
9
  description="Search for anything using Google, DuckDuckGo, phind.com, Contains AI models, can transcribe yt videos, temporary email and phone number generation, has TTS support, webai (terminal gpt and open interpreter) and offline LLMs",
10
10
  long_description=README,
11
11
  long_description_content_type="text/markdown",
@@ -56,6 +56,8 @@ setup(
56
56
  "clipman",
57
57
  "Helpingai-T2",
58
58
  "playsound",
59
+ "poe_api_wrapper",
60
+ "pyreqwest_impersonate"
59
61
  ],
60
62
  entry_points={
61
63
  "console_scripts": [
@@ -72,6 +74,7 @@ setup(
72
74
  'llama-cpp-python',
73
75
  'colorama',
74
76
  'numpy',
77
+ 'huggingface_hub',
75
78
  ],
76
79
  },
77
80
  license="HelpingAI Simplified Universal License",
@@ -0,0 +1,100 @@
1
+ import requests
2
+ import base64
3
+ from typing import List, Dict, Union
4
+ import json
5
+ import requests
6
+ import base64
7
+ from typing import List, Dict, Union
8
+
9
+ class LLM:
10
+ def __init__(self, model: str, system_message: str = "You are a Helpful AI."):
11
+ self.model = model
12
+ self.conversation_history = [{"role": "system", "content": system_message}]
13
+
14
+ def chat(self, messages: List[Dict[str, str]]) -> Union[str, None]:
15
+ url = "https://api.deepinfra.com/v1/openai/chat/completions"
16
+ headers = {
17
+ 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/119.0.0.0 Safari/537.36',
18
+ 'Accept-Language': 'en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3',
19
+ 'Cache-Control': 'no-cache',
20
+ 'Connection': 'keep-alive',
21
+ 'Content-Type': 'application/json',
22
+ 'Origin': 'https://deepinfra.com',
23
+ 'Pragma': 'no-cache',
24
+ 'Referer': 'https://deepinfra.com/',
25
+ 'Sec-Fetch-Dest': 'empty',
26
+ 'Sec-Fetch-Mode': 'cors',
27
+ 'Sec-Fetch-Site': 'same-site',
28
+ 'X-Deepinfra-Source': 'web-embed',
29
+ 'accept': 'text/event-stream',
30
+ 'sec-ch-ua': '"Google Chrome";v="119", "Chromium";v="119", "Not?A_Brand";v="24"',
31
+ 'sec-ch-ua-mobile': '?0',
32
+ 'sec-ch-ua-platform': '"macOS"'
33
+ }
34
+ data = json.dumps(
35
+ {
36
+ 'model': self.model,
37
+ 'messages': messages,
38
+ 'temperature': 0.7,
39
+ 'max_tokens': 8028,
40
+ 'stop': [],
41
+ 'stream': False #dont change it
42
+ }, separators=(',', ':')
43
+ )
44
+ try:
45
+ result = requests.post(url=url, data=data, headers=headers)
46
+ return result.json()['choices'][0]['message']['content']
47
+ except:
48
+ return None
49
+ # def main():
50
+ # llm = LLM(model="meta-llama/Meta-Llama-3-70B-Instruct")
51
+ # messages = [
52
+ # {"role": "user", "content": "Hello, how are you?"}
53
+ # ]
54
+ # response = llm.chat(messages)
55
+ # print(response)
56
+
57
+ # if __name__ == "__main__":
58
+ # main()
59
+
60
+
61
+ class VLM:
62
+ def __init__(self, model: str, system_message: str = "You are a Helpful AI."):
63
+ self.model = model
64
+ self.conversation_history = [{"role": "system", "content": system_message}]
65
+
66
+ def chat(self, messages: List[Dict[str, Union[str, List[Dict[str, Union[str, Dict[str, str]]]]]]]) -> Union[str, None]:
67
+ api_url = "https://api.deepinfra.com/v1/openai/chat/completions"
68
+ headers = {
69
+ 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/119.0.0.0 Safari/537.36',
70
+ 'Accept-Language': 'en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3',
71
+ 'Cache-Control': 'no-cache',
72
+ 'Connection': 'keep-alive',
73
+ 'Content-Type': 'application/json',
74
+ 'Origin': 'https://deepinfra.com',
75
+ 'Pragma': 'no-cache',
76
+ 'Referer': 'https://deepinfra.com/',
77
+ 'Sec-Fetch-Dest': 'empty',
78
+ 'Sec-Fetch-Mode': 'cors',
79
+ 'Sec-Fetch-Site': 'same-site',
80
+ 'X-Deepinfra-Source': 'web-embed',
81
+ 'accept': 'text/event-stream',
82
+ 'sec-ch-ua': '"Google Chrome";v="119", "Chromium";v="119", "Not?A_Brand";v="24"',
83
+ 'sec-ch-ua-mobile': '?0',
84
+ 'sec-ch-ua-platform': '"macOS"'
85
+ }
86
+ payload = {
87
+ "model": self.model,
88
+ "messages": messages,
89
+ "stream": False
90
+ }
91
+ try:
92
+ response = requests.post(api_url, headers=headers, json=payload)
93
+ return response.json()['choices'][0]['message']['content']
94
+ except Exception as e:
95
+ print(f"An error occurred: {e}")
96
+ return None
97
+
98
+ def encode_image_to_base64(image_path: str) -> str:
99
+ with open(image_path, "rb") as image_file:
100
+ return base64.b64encode(image_file.read()).decode("utf-8")
@@ -1,3 +1,3 @@
1
1
  from llama_cpp import __version__ as __llama_cpp_version__
2
2
 
3
- __version__ = '2.7'
3
+ __version__ = '2.9'
@@ -25,18 +25,19 @@ class _ArrayLike(Iterable):
25
25
  class _SupportsWriteAndFlush(TextIO):
26
26
  pass
27
27
 
28
- def download_model(repo_id: str, filename: str, cache_dir: str = ".cache") -> str:
28
+ def download_model(repo_id: str, filename: str, token: str, cache_dir: str = ".cache") -> str:
29
29
  """
30
30
  Downloads a GGUF model file from Hugging Face Hub.
31
31
 
32
32
  repo_id: The Hugging Face repository ID (e.g., 'facebook/bart-large-cnn').
33
33
  filename: The name of the GGUF file within the repository (e.g., 'model.gguf').
34
+ token: The Hugging Face token for authentication.
34
35
  cache_dir: The directory where the downloaded file should be stored.
35
36
 
36
37
  Returns: The path to the downloaded file.
37
38
  """
38
39
  url = hf_hub_url(repo_id, filename)
39
- filepath = cached_download(url, cache_dir=cache_dir, force_filename=filename)
40
+ filepath = cached_download(url, cache_dir=cache_dir, force_filename=filename, use_auth_token=token)
40
41
  return filepath
41
42
 
42
43
  class GGUFReader:
@@ -0,0 +1,2 @@
1
+ __version__ = "2.9"
2
+
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: webscout
3
- Version: 2.7
3
+ Version: 2.9
4
4
  Summary: Search for anything using Google, DuckDuckGo, phind.com, Contains AI models, can transcribe yt videos, temporary email and phone number generation, has TTS support, webai (terminal gpt and open interpreter) and offline LLMs
5
5
  Author: OEvortex
6
6
  Author-email: helpingai5@gmail.com
@@ -52,6 +52,8 @@ Requires-Dist: tls_client
52
52
  Requires-Dist: clipman
53
53
  Requires-Dist: Helpingai-T2
54
54
  Requires-Dist: playsound
55
+ Requires-Dist: poe_api_wrapper
56
+ Requires-Dist: pyreqwest_impersonate
55
57
  Provides-Extra: dev
56
58
  Requires-Dist: ruff>=0.1.6; extra == "dev"
57
59
  Requires-Dist: pytest>=7.4.2; extra == "dev"
@@ -59,6 +61,7 @@ Provides-Extra: local
59
61
  Requires-Dist: llama-cpp-python; extra == "local"
60
62
  Requires-Dist: colorama; extra == "local"
61
63
  Requires-Dist: numpy; extra == "local"
64
+ Requires-Dist: huggingface_hub; extra == "local"
62
65
 
63
66
  <div align="center">
64
67
  <!-- Replace `#` with your actual links -->
@@ -26,6 +26,8 @@ tls_client
26
26
  clipman
27
27
  Helpingai-T2
28
28
  playsound
29
+ poe_api_wrapper
30
+ pyreqwest_impersonate
29
31
 
30
32
  [dev]
31
33
  ruff>=0.1.6
@@ -35,3 +37,4 @@ pytest>=7.4.2
35
37
  llama-cpp-python
36
38
  colorama
37
39
  numpy
40
+ huggingface_hub
@@ -1,45 +0,0 @@
1
- import argparse
2
- import requests
3
- import json
4
- from typing import List, Dict, Union
5
-
6
- class LLM:
7
- def __init__(self, model: str, system_message: str = "You are a Helpful AI."):
8
- self.model = model
9
- self.conversation_history = [{"role": "system", "content": system_message}]
10
-
11
- def chat(self, messages: List[Dict[str, str]]) -> Union[str, None]:
12
- url = "https://api.deepinfra.com/v1/openai/chat/completions"
13
- headers = {
14
- 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/119.0.0.0 Safari/537.36',
15
- 'Accept-Language': 'en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3',
16
- 'Cache-Control': 'no-cache',
17
- 'Connection': 'keep-alive',
18
- 'Content-Type': 'application/json',
19
- 'Origin': 'https://deepinfra.com',
20
- 'Pragma': 'no-cache',
21
- 'Referer': 'https://deepinfra.com/',
22
- 'Sec-Fetch-Dest': 'empty',
23
- 'Sec-Fetch-Mode': 'cors',
24
- 'Sec-Fetch-Site': 'same-site',
25
- 'X-Deepinfra-Source': 'web-embed',
26
- 'accept': 'text/event-stream',
27
- 'sec-ch-ua': '"Google Chrome";v="119", "Chromium";v="119", "Not?A_Brand";v="24"',
28
- 'sec-ch-ua-mobile': '?0',
29
- 'sec-ch-ua-platform': '"macOS"'
30
- }
31
- data = json.dumps(
32
- {
33
- 'model': self.model,
34
- 'messages': messages,
35
- 'temperature': 0.7,
36
- 'max_tokens': 8028,
37
- 'stop': [],
38
- 'stream': False #dont change it
39
- }, separators=(',', ':')
40
- )
41
- try:
42
- result = requests.post(url=url, data=data, headers=headers)
43
- return result.json()['choices'][0]['message']['content']
44
- except:
45
- return None
@@ -1,2 +0,0 @@
1
- __version__ = "2.7"
2
-
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes