webscout 1.3.0__py3-none-any.whl → 1.3.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of webscout might be problematic. Click here for more details.
- webscout/LLM.py +1 -24
- webscout/__init__.py +1 -0
- webscout/version.py +1 -1
- {webscout-1.3.0.dist-info → webscout-1.3.1.dist-info}/METADATA +99 -7
- {webscout-1.3.0.dist-info → webscout-1.3.1.dist-info}/RECORD +9 -9
- {webscout-1.3.0.dist-info → webscout-1.3.1.dist-info}/LICENSE.md +0 -0
- {webscout-1.3.0.dist-info → webscout-1.3.1.dist-info}/WHEEL +0 -0
- {webscout-1.3.0.dist-info → webscout-1.3.1.dist-info}/entry_points.txt +0 -0
- {webscout-1.3.0.dist-info → webscout-1.3.1.dist-info}/top_level.txt +0 -0
webscout/LLM.py
CHANGED
|
@@ -8,7 +8,7 @@ class LLM:
|
|
|
8
8
|
self.model = model
|
|
9
9
|
self.conversation_history = [{"role": "system", "content": system_message}]
|
|
10
10
|
|
|
11
|
-
def
|
|
11
|
+
def chat(self, messages: List[Dict[str, str]]) -> Union[str, None]:
|
|
12
12
|
url = "https://api.deepinfra.com/v1/openai/chat/completions"
|
|
13
13
|
headers = {
|
|
14
14
|
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/119.0.0.0 Safari/537.36',
|
|
@@ -43,26 +43,3 @@ class LLM:
|
|
|
43
43
|
return result.json()['choices'][0]['message']['content']
|
|
44
44
|
except:
|
|
45
45
|
return None
|
|
46
|
-
|
|
47
|
-
def chat(self):
|
|
48
|
-
while True:
|
|
49
|
-
prompt = input("👦: ")
|
|
50
|
-
user_message = {"role": "user", "content": prompt}
|
|
51
|
-
self.conversation_history.append(user_message)
|
|
52
|
-
try:
|
|
53
|
-
resp = self.mistral_chat(self.conversation_history)
|
|
54
|
-
print(f"🤖: {resp}")
|
|
55
|
-
self.conversation_history.append({"role": "assistant", "content": resp})
|
|
56
|
-
except Exception as e:
|
|
57
|
-
print(f"🤖: Oops, something went wrong: {e}! Looks like even AI needs some oiling sometimes.")
|
|
58
|
-
|
|
59
|
-
if __name__ == "__main__":
|
|
60
|
-
parser = argparse.ArgumentParser(description='LLM CLI', epilog='To use a specific model, run:\n'
|
|
61
|
-
'python -m webscout.LLM model_name\n'
|
|
62
|
-
'Replace "model_name" with the name of the model you wish to use It supports ALL text generation models on deepinfra.com.')
|
|
63
|
-
parser.add_argument('model', type=str, help='Model to use for text generation. Specify the full model name, e.g., "mistralai/Mistral-7B-Instruct-v0.1".')
|
|
64
|
-
parser.add_argument('--system-message', type=str, default="You are a Helpful AI.", help='Custom system prompt for the AI.')
|
|
65
|
-
args = parser.parse_args()
|
|
66
|
-
|
|
67
|
-
LLM = LLM(args.model, args.system_message)
|
|
68
|
-
LLM.chat()
|
webscout/__init__.py
CHANGED
webscout/version.py
CHANGED
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
__version__ = "1.3.
|
|
1
|
+
__version__ = "1.3.1"
|
|
2
2
|
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: webscout
|
|
3
|
-
Version: 1.3.
|
|
4
|
-
Summary: Search for words, documents, images, videos, news, maps and text translation using the Google, DuckDuckGo.com, yep.com, phind.com, you.com, etc Also containes AI models
|
|
3
|
+
Version: 1.3.1
|
|
4
|
+
Summary: Search for words, documents, images, videos, news, maps and text translation using the Google, DuckDuckGo.com, yep.com, phind.com, you.com, etc Also containes AI models, can transcribe yt videos and have TTS support
|
|
5
5
|
Author: OEvortex
|
|
6
6
|
Author-email: helpingai5@gmail.com
|
|
7
7
|
License: HelpingAI Simplified Universal License
|
|
@@ -99,6 +99,7 @@ Search for words, documents, images, videos, news, maps and text translation usi
|
|
|
99
99
|
- [9. `KOBOLDIA` -](#9-koboldia--)
|
|
100
100
|
- [usage of special .LLM file from webscout (webscout.LLM)](#usage-of-special-llm-file-from-webscout-webscoutllm)
|
|
101
101
|
- [`LLM`](#llm)
|
|
102
|
+
- [`LLM` with internet](#llm-with-internet)
|
|
102
103
|
|
|
103
104
|
## Install
|
|
104
105
|
```python
|
|
@@ -704,11 +705,102 @@ print(message)
|
|
|
704
705
|
```python
|
|
705
706
|
from webscout.LLM import LLM
|
|
706
707
|
|
|
707
|
-
|
|
708
|
-
|
|
709
|
-
|
|
708
|
+
# Read the system message from the file
|
|
709
|
+
with open('system.txt', 'r') as file:
|
|
710
|
+
system_message = file.read()
|
|
711
|
+
|
|
712
|
+
# Initialize the LLM class with the model name and system message
|
|
713
|
+
llm = LLM(model="microsoft/WizardLM-2-8x22B", system_message=system_message)
|
|
714
|
+
|
|
715
|
+
while True:
|
|
716
|
+
# Get the user input
|
|
717
|
+
user_input = input("User: ")
|
|
718
|
+
|
|
719
|
+
# Define the messages to be sent
|
|
720
|
+
messages = [
|
|
721
|
+
{"role": "user", "content": user_input}
|
|
722
|
+
]
|
|
723
|
+
|
|
724
|
+
# Use the mistral_chat method to get the response
|
|
725
|
+
response = llm.chat(messages)
|
|
726
|
+
|
|
727
|
+
# Print the response
|
|
728
|
+
print("AI: ", response)
|
|
729
|
+
```
|
|
730
|
+
### `LLM` with internet
|
|
731
|
+
```python
|
|
732
|
+
from __future__ import annotations
|
|
733
|
+
from typing import List, Optional
|
|
734
|
+
|
|
735
|
+
from webscout import LLM
|
|
736
|
+
from webscout import WEBS
|
|
737
|
+
import warnings
|
|
738
|
+
|
|
739
|
+
system_message: str = (
|
|
740
|
+
"As AI, you possess internet access and are capable of executing real-time web searches based on user inputs. "
|
|
741
|
+
"You shall utilize this capability to enrich conversations, offer informed insights, and augment your ability to "
|
|
742
|
+
"respond accurately and thoroughly. However, refrain from stating 'You have provided a list of strings,' ensuring "
|
|
743
|
+
"seamless interactions with users. Embrace a responsive demeanor, harnessing available online resources to address "
|
|
744
|
+
"queries, share pertinent content, and facilitate meaningful exchanges. By doing so, you create value through "
|
|
745
|
+
"connection and engagement, ultimately enhancing overall user satisfaction and experience. Additionally, "
|
|
746
|
+
"continue upholding the principles of respect, impartiality, and intellectual integrity throughout all interactions."
|
|
747
|
+
)
|
|
748
|
+
|
|
749
|
+
# Ignore the specific UserWarning
|
|
750
|
+
warnings.filterwarnings("ignore", category=UserWarning, module="curl_cffi.aio", lineno=205)
|
|
751
|
+
LLM = LLM(model="meta-llama/Meta-Llama-3-70B-Instruct", system_message=system_message)
|
|
752
|
+
|
|
753
|
+
|
|
754
|
+
def chat(
|
|
755
|
+
user_input: str, webs: WEBS, max_results: int = 10
|
|
756
|
+
) -> Optional[str]:
|
|
757
|
+
"""
|
|
758
|
+
Chat function to perform a web search based on the user input and generate a response using the LLM model.
|
|
759
|
+
|
|
760
|
+
Parameters
|
|
761
|
+
----------
|
|
762
|
+
user_input : str
|
|
763
|
+
The user input to be used for the web search
|
|
764
|
+
webs : WEBS
|
|
765
|
+
The web search instance to be used to perform the search
|
|
766
|
+
max_results : int, optional
|
|
767
|
+
The maximum number of search results to include in the response, by default 10
|
|
768
|
+
|
|
769
|
+
Returns
|
|
770
|
+
-------
|
|
771
|
+
Optional[str]
|
|
772
|
+
The response generated by the LLM model, or None if there is no response
|
|
773
|
+
"""
|
|
774
|
+
# Perform a web search based on the user input
|
|
775
|
+
search_results: List[str] = []
|
|
776
|
+
for r in webs.text(
|
|
777
|
+
user_input, region="wt-wt", safesearch="off", timelimit="y", max_results=max_results
|
|
778
|
+
):
|
|
779
|
+
search_results.append(str(r)) # Convert each result to a string
|
|
780
|
+
|
|
781
|
+
# Define the messages to be sent, including the user input, search results, and system message
|
|
782
|
+
messages = [
|
|
783
|
+
{"role": "user", "content": user_input + "\n" + "websearch results are:" + "\n".join(search_results)},
|
|
784
|
+
]
|
|
785
|
+
|
|
786
|
+
# Use the chat method to get the response
|
|
787
|
+
response = LLM.chat(messages)
|
|
788
|
+
|
|
789
|
+
return response
|
|
790
|
+
|
|
710
791
|
|
|
711
792
|
if __name__ == "__main__":
|
|
712
|
-
|
|
713
|
-
|
|
793
|
+
while True:
|
|
794
|
+
# Get the user input
|
|
795
|
+
user_input = input("User: ")
|
|
796
|
+
|
|
797
|
+
# Perform a web search based on the user input
|
|
798
|
+
with WEBS() as webs:
|
|
799
|
+
response = chat(user_input, webs)
|
|
800
|
+
|
|
801
|
+
# Print the response
|
|
802
|
+
if response:
|
|
803
|
+
print("AI:", response)
|
|
804
|
+
else:
|
|
805
|
+
print("No response")
|
|
714
806
|
```
|
|
@@ -15,21 +15,21 @@ webscout/AIbase.py,sha256=vQi2ougu5bG-QdmoYmxCQsOg7KTEgG7EF6nZh5qqUGw,2343
|
|
|
15
15
|
webscout/AIutel.py,sha256=fNN4mmjXcxjJGq2CVJP1MU2oQ78p8OyExQBjVif6e-k,24123
|
|
16
16
|
webscout/DWEBS.py,sha256=QT-7-dUgWhQ_H7EVZD53AVyXxyskoPMKCkFIpzkN56Q,7332
|
|
17
17
|
webscout/HelpingAI.py,sha256=YeZw0zYVHMcBFFPNdd3_Ghpm9ebt_EScQjHO_IIs4lg,8103
|
|
18
|
-
webscout/LLM.py,sha256=
|
|
19
|
-
webscout/__init__.py,sha256=
|
|
18
|
+
webscout/LLM.py,sha256=CiDz0okZNEoXuxMwadZnwRGSLpqk2zg0vzvXSxQZjcE,1910
|
|
19
|
+
webscout/__init__.py,sha256=rgmTILV0qx0x2PVdMq7flk5nas102sQN5z8p_OZaTzg,572
|
|
20
20
|
webscout/__main__.py,sha256=ZtTRgsRjUi2JOvYFLF1ZCh55Sdoz94I-BS-TlJC7WDU,126
|
|
21
21
|
webscout/cli.py,sha256=F888fdrFUQgczMBN4yMOSf6Nh-IbvkqpPhDsbnA2FtQ,17059
|
|
22
22
|
webscout/exceptions.py,sha256=4AOO5wexeL96nvUS-badcckcwrPS7UpZyAgB9vknHZE,276
|
|
23
23
|
webscout/models.py,sha256=5iQIdtedT18YuTZ3npoG7kLMwcrKwhQ7928dl_7qZW0,692
|
|
24
24
|
webscout/transcriber.py,sha256=EddvTSq7dPJ42V3pQVnGuEiYQ7WjJ9uyeR9kMSxN7uY,20622
|
|
25
25
|
webscout/utils.py,sha256=c_98M4oqpb54pUun3fpGGlCerFD6ZHUbghyp5b7Mwgo,2605
|
|
26
|
-
webscout/version.py,sha256=
|
|
26
|
+
webscout/version.py,sha256=xBny7vFjm_5sB7eDRX5tmYecHaNfJlEkwuSeLudsFXo,25
|
|
27
27
|
webscout/voice.py,sha256=1Ids_2ToPBMX0cH_UyPMkY_6eSE9H4Gazrl0ujPmFag,941
|
|
28
28
|
webscout/webscout_search.py,sha256=3_lli-hDb8_kCGwscK29xuUcOS833ROgpNhDzrxh0dk,3085
|
|
29
29
|
webscout/webscout_search_async.py,sha256=Y5frH0k3hLqBCR-8dn7a_b7EvxdYxn6wHiKl3jWosE0,40670
|
|
30
|
-
webscout-1.3.
|
|
31
|
-
webscout-1.3.
|
|
32
|
-
webscout-1.3.
|
|
33
|
-
webscout-1.3.
|
|
34
|
-
webscout-1.3.
|
|
35
|
-
webscout-1.3.
|
|
30
|
+
webscout-1.3.1.dist-info/LICENSE.md,sha256=mRVwJuT4SXC5O93BFdsfWBjlXjGn2Np90Zm5SocUzM0,3150
|
|
31
|
+
webscout-1.3.1.dist-info/METADATA,sha256=Lqt3sYRj9TpssiZ_mD3z0Sh_UrdVPPm5L9o0hPAIFWw,31547
|
|
32
|
+
webscout-1.3.1.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92
|
|
33
|
+
webscout-1.3.1.dist-info/entry_points.txt,sha256=8-93eRslYrzTHs5E-6yFRJrve00C9q-SkXJD113jzRY,197
|
|
34
|
+
webscout-1.3.1.dist-info/top_level.txt,sha256=OD5YKy6Y3hldL7SmuxsiEDxAG4LgdSSWwzYk22MF9fk,18
|
|
35
|
+
webscout-1.3.1.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|