webscout 6.0__py3-none-any.whl → 6.2b0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of webscout might be problematic. Click here for more details.
- webscout/AIauto.py +77 -259
- webscout/Agents/Onlinesearcher.py +22 -10
- webscout/Agents/functioncall.py +2 -2
- webscout/Bard.py +21 -21
- webscout/Extra/autollama.py +37 -20
- webscout/Local/__init__.py +6 -7
- webscout/Local/formats.py +404 -194
- webscout/Local/model.py +1074 -477
- webscout/Local/samplers.py +108 -144
- webscout/Local/thread.py +251 -410
- webscout/Local/ui.py +401 -0
- webscout/Local/utils.py +338 -136
- webscout/Provider/Amigo.py +51 -38
- webscout/Provider/Deepseek.py +7 -6
- webscout/Provider/EDITEE.py +2 -2
- webscout/Provider/GPTWeb.py +1 -1
- webscout/Provider/NinjaChat.py +200 -0
- webscout/Provider/OLLAMA.py +1 -1
- webscout/Provider/Perplexity.py +1 -1
- webscout/Provider/Reka.py +12 -5
- webscout/Provider/TTI/AIuncensored.py +103 -0
- webscout/Provider/TTI/Nexra.py +3 -3
- webscout/Provider/TTI/__init__.py +3 -2
- webscout/Provider/TTI/aiforce.py +2 -2
- webscout/Provider/TTI/imgninza.py +136 -0
- webscout/Provider/TeachAnything.py +0 -3
- webscout/Provider/Youchat.py +1 -1
- webscout/Provider/__init__.py +12 -11
- webscout/Provider/{ChatHub.py → aimathgpt.py} +72 -88
- webscout/Provider/cerebras.py +125 -118
- webscout/Provider/cleeai.py +1 -1
- webscout/Provider/felo_search.py +1 -1
- webscout/Provider/gaurish.py +207 -0
- webscout/Provider/geminiprorealtime.py +160 -0
- webscout/Provider/genspark.py +1 -1
- webscout/Provider/julius.py +8 -3
- webscout/Provider/learnfastai.py +1 -1
- webscout/Provider/promptrefine.py +3 -1
- webscout/Provider/turboseek.py +3 -8
- webscout/Provider/tutorai.py +1 -1
- webscout/__init__.py +2 -43
- webscout/exceptions.py +5 -1
- webscout/tempid.py +4 -73
- webscout/utils.py +3 -0
- webscout/version.py +1 -1
- webscout/webai.py +1 -1
- webscout/webscout_search.py +154 -123
- {webscout-6.0.dist-info → webscout-6.2b0.dist-info}/METADATA +156 -236
- {webscout-6.0.dist-info → webscout-6.2b0.dist-info}/RECORD +53 -54
- webscout/Local/rawdog.py +0 -946
- webscout/Provider/BasedGPT.py +0 -214
- webscout/Provider/TTI/amigo.py +0 -148
- webscout/Provider/aigames.py +0 -213
- webscout/Provider/bixin.py +0 -264
- webscout/Provider/xdash.py +0 -182
- webscout/websx_search.py +0 -19
- {webscout-6.0.dist-info → webscout-6.2b0.dist-info}/LICENSE.md +0 -0
- {webscout-6.0.dist-info → webscout-6.2b0.dist-info}/WHEEL +0 -0
- {webscout-6.0.dist-info → webscout-6.2b0.dist-info}/entry_points.txt +0 -0
- {webscout-6.0.dist-info → webscout-6.2b0.dist-info}/top_level.txt +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: webscout
|
|
3
|
-
Version: 6.
|
|
3
|
+
Version: 6.2b0
|
|
4
4
|
Summary: Search for anything using Google, DuckDuckGo, phind.com, Contains AI models, can transcribe yt videos, temporary email and phone number generation, has TTS support, webai (terminal gpt and open interpreter) and offline LLMs and more
|
|
5
5
|
Author: OEvortex
|
|
6
6
|
Author-email: helpingai5@gmail.com
|
|
@@ -22,6 +22,7 @@ Classifier: Programming Language :: Python :: 3.12
|
|
|
22
22
|
Classifier: Programming Language :: Python :: Implementation :: CPython
|
|
23
23
|
Classifier: Topic :: Internet :: WWW/HTTP :: Indexing/Search
|
|
24
24
|
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
|
25
|
+
Requires-Python: >=3.7
|
|
25
26
|
Description-Content-Type: text/markdown
|
|
26
27
|
License-File: LICENSE.md
|
|
27
28
|
Requires-Dist: docstring-inheritance
|
|
@@ -53,6 +54,14 @@ Requires-Dist: emoji
|
|
|
53
54
|
Requires-Dist: openai
|
|
54
55
|
Requires-Dist: prompt-toolkit
|
|
55
56
|
Requires-Dist: fake-useragent
|
|
57
|
+
Requires-Dist: primp
|
|
58
|
+
Requires-Dist: pyreqwest-impersonate
|
|
59
|
+
Requires-Dist: lxml-html-clean
|
|
60
|
+
Requires-Dist: gradio-client
|
|
61
|
+
Requires-Dist: psutil
|
|
62
|
+
Requires-Dist: colorlog
|
|
63
|
+
Requires-Dist: yaspin
|
|
64
|
+
Requires-Dist: cerebras-cloud-sdk
|
|
56
65
|
Provides-Extra: dev
|
|
57
66
|
Requires-Dist: ruff>=0.1.6; extra == "dev"
|
|
58
67
|
Requires-Dist: pytest>=7.4.2; extra == "dev"
|
|
@@ -61,6 +70,7 @@ Requires-Dist: llama-cpp-python; extra == "local"
|
|
|
61
70
|
Requires-Dist: colorama; extra == "local"
|
|
62
71
|
Requires-Dist: numpy; extra == "local"
|
|
63
72
|
Requires-Dist: huggingface-hub[cli]; extra == "local"
|
|
73
|
+
Requires-Dist: unicorn; extra == "local"
|
|
64
74
|
|
|
65
75
|
<div align="center">
|
|
66
76
|
<!-- Replace `#` with your actual links -->
|
|
@@ -257,90 +267,41 @@ weather = w.get("Qazigund")
|
|
|
257
267
|
print(weather)
|
|
258
268
|
```
|
|
259
269
|
|
|
260
|
-
## ✉️
|
|
270
|
+
## ✉️ TempMail and VNEngine
|
|
261
271
|
|
|
262
|
-
### Temp Number
|
|
263
|
-
```python
|
|
264
|
-
from rich.console import Console
|
|
265
|
-
from webscout import tempid
|
|
266
|
-
|
|
267
|
-
def main():
|
|
268
|
-
console = Console()
|
|
269
|
-
phone = tempid.TemporaryPhoneNumber()
|
|
270
|
-
|
|
271
|
-
try:
|
|
272
|
-
# Get a temporary phone number for a specific country (or random)
|
|
273
|
-
number = phone.get_number(country="Finland")
|
|
274
|
-
console.print(f"Your temporary phone number: [bold cyan]{number}[/bold cyan]")
|
|
275
|
-
|
|
276
|
-
# Pause execution briefly (replace with your actual logic)
|
|
277
|
-
# import time module
|
|
278
|
-
import time
|
|
279
|
-
time.sleep(30) # Adjust the waiting time as needed
|
|
280
|
-
|
|
281
|
-
# Retrieve and print messages
|
|
282
|
-
messages = phone.get_messages(number)
|
|
283
|
-
if messages:
|
|
284
|
-
# Access individual messages using indexing:
|
|
285
|
-
console.print(f"[bold green]{messages[0].frm}:[/] {messages[0].content}")
|
|
286
|
-
# (Add more lines if you expect multiple messages)
|
|
287
|
-
else:
|
|
288
|
-
console.print("No messages received.")
|
|
289
|
-
|
|
290
|
-
except Exception as e:
|
|
291
|
-
console.print(f"[bold red]An error occurred: {e}")
|
|
292
|
-
|
|
293
|
-
if __name__ == "__main__":
|
|
294
|
-
main()
|
|
295
|
-
|
|
296
|
-
```
|
|
297
|
-
|
|
298
|
-
### Tempmail
|
|
299
272
|
```python
|
|
273
|
+
import json
|
|
300
274
|
import asyncio
|
|
301
|
-
from
|
|
302
|
-
from
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
275
|
+
from webscout import VNEngine
|
|
276
|
+
from webscout import TempMail
|
|
277
|
+
|
|
278
|
+
async def main():
|
|
279
|
+
vn = VNEngine()
|
|
280
|
+
countries = vn.get_online_countries()
|
|
281
|
+
if countries:
|
|
282
|
+
country = countries[0]['country']
|
|
283
|
+
numbers = vn.get_country_numbers(country)
|
|
284
|
+
if numbers:
|
|
285
|
+
number = numbers[0]['full_number']
|
|
286
|
+
inbox = vn.get_number_inbox(country, number)
|
|
287
|
+
|
|
288
|
+
# Serialize inbox data to JSON string
|
|
289
|
+
json_data = json.dumps(inbox, ensure_ascii=False, indent=4)
|
|
290
|
+
|
|
291
|
+
# Print with UTF-8 encoding
|
|
292
|
+
print(json_data)
|
|
309
293
|
|
|
310
|
-
|
|
294
|
+
async with TempMail() as client:
|
|
311
295
|
domains = await client.get_domains()
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
while True:
|
|
321
|
-
messages = await client.get_messages(email.email)
|
|
322
|
-
if messages is not None:
|
|
323
|
-
break
|
|
324
|
-
|
|
325
|
-
if messages:
|
|
326
|
-
table = Table(show_header=True, header_style="bold magenta")
|
|
327
|
-
table.add_column("From", style="bold cyan")
|
|
328
|
-
table.add_column("Subject", style="bold yellow")
|
|
329
|
-
table.add_column("Body", style="bold green")
|
|
330
|
-
for message in messages:
|
|
331
|
-
body_preview = Text(message.body_text if message.body_text else "No body")
|
|
332
|
-
table.add_row(message.email_from or "Unknown", message.subject or "No Subject", body_preview)
|
|
333
|
-
console.print(table)
|
|
334
|
-
else:
|
|
335
|
-
console.print("No messages found.")
|
|
336
|
-
|
|
337
|
-
except Exception as e:
|
|
338
|
-
console.print(f"[bold red]An error occurred: {e}")
|
|
339
|
-
|
|
340
|
-
finally:
|
|
341
|
-
await client.close()
|
|
296
|
+
print("Available Domains:", domains)
|
|
297
|
+
email_response = await client.create_email(alias="testuser")
|
|
298
|
+
print("Created Email:", email_response)
|
|
299
|
+
messages = await client.get_messages(email_response.email)
|
|
300
|
+
print("Messages:", messages)
|
|
301
|
+
await client.delete_email(email_response.email, email_response.token)
|
|
302
|
+
print("Email Deleted")
|
|
342
303
|
|
|
343
|
-
if __name__ ==
|
|
304
|
+
if __name__ == "__main__":
|
|
344
305
|
asyncio.run(main())
|
|
345
306
|
```
|
|
346
307
|
|
|
@@ -599,16 +560,6 @@ with WEBS() as WEBS:
|
|
|
599
560
|
print(r)
|
|
600
561
|
```
|
|
601
562
|
|
|
602
|
-
## 🌐 WEBSX - Another Web Search Tool
|
|
603
|
-
|
|
604
|
-
```python
|
|
605
|
-
from webscout import WEBSX
|
|
606
|
-
s = "Python development tools"
|
|
607
|
-
|
|
608
|
-
result = WEBSX(s)
|
|
609
|
-
|
|
610
|
-
print(result)
|
|
611
|
-
```
|
|
612
563
|
|
|
613
564
|
## 🎭 ALL Acts
|
|
614
565
|
|
|
@@ -868,7 +819,7 @@ print(result)
|
|
|
868
819
|
___
|
|
869
820
|
</details>
|
|
870
821
|
|
|
871
|
-
### 🖼️ Text to Images - DeepInfraImager, PollinationsAI, BlackboxAIImager,
|
|
822
|
+
### 🖼️ Text to Images - DeepInfraImager, PollinationsAI, BlackboxAIImager, AiForceimager, NexraImager, HFimager, ArtbitImager, NinjaImager, WebSimAI, AIUncensoredImager
|
|
872
823
|
|
|
873
824
|
**Every TTI provider has the same usage code, you just need to change the import.**
|
|
874
825
|
|
|
@@ -897,7 +848,7 @@ voicepods.play_audio(audio_file)
|
|
|
897
848
|
|
|
898
849
|
```python
|
|
899
850
|
from webscout import WEBS as w
|
|
900
|
-
R = w().chat("Who are you", model='gpt-4o-mini') #
|
|
851
|
+
R = w().chat("Who are you", model='gpt-4o-mini') # mixtral-8x7b, llama-3.1-70b, claude-3-haiku, gpt-4o-mini
|
|
901
852
|
print(R)
|
|
902
853
|
```
|
|
903
854
|
|
|
@@ -1125,29 +1076,6 @@ response_str = a.chat(prompt)
|
|
|
1125
1076
|
print(response_str)
|
|
1126
1077
|
```
|
|
1127
1078
|
|
|
1128
|
-
### `BasedGPT` - Chat with GPT
|
|
1129
|
-
|
|
1130
|
-
```python
|
|
1131
|
-
from webscout import BasedGPT
|
|
1132
|
-
|
|
1133
|
-
# Initialize the BasedGPT provider
|
|
1134
|
-
basedgpt = BasedGPT(
|
|
1135
|
-
is_conversation=True, # Chat conversationally
|
|
1136
|
-
max_tokens=600, # Maximum tokens to generate
|
|
1137
|
-
timeout=30, # HTTP request timeout
|
|
1138
|
-
intro="You are a helpful and friendly AI.", # Introductory prompt
|
|
1139
|
-
filepath="chat_history.txt", # File to store conversation history
|
|
1140
|
-
update_file=True, # Update the chat history file
|
|
1141
|
-
)
|
|
1142
|
-
|
|
1143
|
-
# Send a prompt to the AI
|
|
1144
|
-
prompt = "What is the meaning of life?"
|
|
1145
|
-
response = basedgpt.chat(prompt)
|
|
1146
|
-
|
|
1147
|
-
# Print the AI's response
|
|
1148
|
-
print(response)
|
|
1149
|
-
```
|
|
1150
|
-
|
|
1151
1079
|
### `DeepSeek` - Chat with DeepSeek
|
|
1152
1080
|
|
|
1153
1081
|
```python
|
|
@@ -1316,117 +1244,120 @@ print(a.chat("HelpingAI-9B"))
|
|
|
1316
1244
|
```python
|
|
1317
1245
|
import json
|
|
1318
1246
|
import logging
|
|
1319
|
-
from webscout import
|
|
1247
|
+
from webscout import Julius, WEBS
|
|
1320
1248
|
from webscout.Agents.functioncall import FunctionCallingAgent
|
|
1249
|
+
from rich import print
|
|
1321
1250
|
|
|
1322
|
-
|
|
1323
|
-
|
|
1324
|
-
|
|
1325
|
-
|
|
1326
|
-
|
|
1327
|
-
|
|
1328
|
-
|
|
1329
|
-
|
|
1330
|
-
|
|
1331
|
-
|
|
1332
|
-
|
|
1333
|
-
|
|
1334
|
-
|
|
1251
|
+
class FunctionExecutor:
|
|
1252
|
+
def __init__(self, llama):
|
|
1253
|
+
self.llama = llama
|
|
1254
|
+
|
|
1255
|
+
def execute_web_search(self, arguments):
|
|
1256
|
+
query = arguments.get("query")
|
|
1257
|
+
if not query:
|
|
1258
|
+
return "Please provide a search query."
|
|
1259
|
+
with WEBS() as webs:
|
|
1260
|
+
search_results = webs.text(query, max_results=5)
|
|
1261
|
+
prompt = (
|
|
1262
|
+
f"Based on the following search results:\n\n{search_results}\n\n"
|
|
1263
|
+
f"Question: {query}\n\n"
|
|
1264
|
+
"Please provide a comprehensive answer to the question based on the search results above. "
|
|
1265
|
+
"Include relevant webpage URLs in your answer when appropriate. "
|
|
1266
|
+
"If the search results don't contain relevant information, please state that and provide the best answer you can based on your general knowledge."
|
|
1267
|
+
)
|
|
1268
|
+
return self.llama.chat(prompt)
|
|
1269
|
+
|
|
1270
|
+
def execute_general_ai(self, arguments):
|
|
1271
|
+
question = arguments.get("question")
|
|
1272
|
+
if not question:
|
|
1273
|
+
return "Please provide a question."
|
|
1274
|
+
return self.llama.chat(question)
|
|
1275
|
+
|
|
1276
|
+
def execute_UserDetail(self, arguments):
|
|
1277
|
+
name = arguments.get("name")
|
|
1278
|
+
age = arguments.get("age")
|
|
1279
|
+
return f"User details - Name: {name}, Age: {age}"
|
|
1280
|
+
|
|
1281
|
+
def main():
|
|
1282
|
+
tools = [
|
|
1283
|
+
{
|
|
1284
|
+
"type": "function",
|
|
1285
|
+
"function": {
|
|
1286
|
+
"name": "UserDetail",
|
|
1287
|
+
"parameters": {
|
|
1288
|
+
"type": "object",
|
|
1289
|
+
"properties": {
|
|
1290
|
+
"name": {"title": "Name", "type": "string"},
|
|
1291
|
+
"age": {"title": "Age", "type": "integer"}
|
|
1335
1292
|
},
|
|
1336
|
-
"
|
|
1337
|
-
|
|
1338
|
-
"type": "integer"
|
|
1339
|
-
}
|
|
1340
|
-
},
|
|
1341
|
-
"required": ["name", "age"]
|
|
1293
|
+
"required": ["name", "age"]
|
|
1294
|
+
}
|
|
1342
1295
|
}
|
|
1343
|
-
}
|
|
1344
|
-
|
|
1345
|
-
|
|
1346
|
-
|
|
1347
|
-
|
|
1348
|
-
|
|
1349
|
-
|
|
1350
|
-
|
|
1351
|
-
|
|
1352
|
-
|
|
1353
|
-
|
|
1354
|
-
|
|
1355
|
-
|
|
1356
|
-
}
|
|
1357
|
-
|
|
1358
|
-
|
|
1296
|
+
},
|
|
1297
|
+
{
|
|
1298
|
+
"type": "function",
|
|
1299
|
+
"function": {
|
|
1300
|
+
"name": "web_search",
|
|
1301
|
+
"description": "Search the web for information using Google Search.",
|
|
1302
|
+
"parameters": {
|
|
1303
|
+
"type": "object",
|
|
1304
|
+
"properties": {
|
|
1305
|
+
"query": {
|
|
1306
|
+
"type": "string",
|
|
1307
|
+
"description": "The search query to be executed."
|
|
1308
|
+
}
|
|
1309
|
+
},
|
|
1310
|
+
"required": ["query"]
|
|
1311
|
+
}
|
|
1359
1312
|
}
|
|
1360
|
-
}
|
|
1361
|
-
|
|
1362
|
-
|
|
1363
|
-
|
|
1364
|
-
|
|
1365
|
-
|
|
1366
|
-
|
|
1367
|
-
|
|
1368
|
-
|
|
1369
|
-
|
|
1370
|
-
|
|
1371
|
-
|
|
1372
|
-
|
|
1373
|
-
}
|
|
1374
|
-
},
|
|
1375
|
-
"required": ["question"]
|
|
1313
|
+
},
|
|
1314
|
+
{
|
|
1315
|
+
"type": "function",
|
|
1316
|
+
"function": {
|
|
1317
|
+
"name": "general_ai",
|
|
1318
|
+
"description": "Use general AI knowledge to answer the question",
|
|
1319
|
+
"parameters": {
|
|
1320
|
+
"type": "object",
|
|
1321
|
+
"properties": {
|
|
1322
|
+
"question": {"type": "string", "description": "The question to answer"}
|
|
1323
|
+
},
|
|
1324
|
+
"required": ["question"]
|
|
1325
|
+
}
|
|
1376
1326
|
}
|
|
1377
1327
|
}
|
|
1378
|
-
|
|
1379
|
-
]
|
|
1328
|
+
]
|
|
1380
1329
|
|
|
1381
|
-
|
|
1382
|
-
|
|
1383
|
-
|
|
1384
|
-
from rich import print
|
|
1385
|
-
# Input message from the user
|
|
1386
|
-
user = input(">>> ")
|
|
1387
|
-
message = user
|
|
1388
|
-
function_call_data = agent.function_call_handler(message)
|
|
1389
|
-
print(f"Function Call Data: {function_call_data}")
|
|
1390
|
-
|
|
1391
|
-
# Check for errors in the function call data
|
|
1392
|
-
if "error" not in function_call_data:
|
|
1393
|
-
function_name = function_call_data.get("tool_name") # Use 'tool_name' instead of 'name'
|
|
1394
|
-
if function_name == "web_search":
|
|
1395
|
-
arguments = function_call_data.get("tool_input", {}) # Get tool input arguments
|
|
1396
|
-
query = arguments.get("query")
|
|
1397
|
-
if query:
|
|
1398
|
-
with WEBS() as webs:
|
|
1399
|
-
search_results = webs.text(query, max_results=5)
|
|
1400
|
-
prompt = (
|
|
1401
|
-
f"Based on the following search results:\n\n{search_results}\n\n"
|
|
1402
|
-
f"Question: {user}\n\n"
|
|
1403
|
-
"Please provide a comprehensive answer to the question based on the search results above. "
|
|
1404
|
-
"Include relevant webpage URLs in your answer when appropriate. "
|
|
1405
|
-
"If the search results don't contain relevant information, please state that and provide the best answer you can based on your general knowledge."
|
|
1406
|
-
)
|
|
1407
|
-
response = llama.chat(prompt)
|
|
1408
|
-
for c in response:
|
|
1409
|
-
print(c, end="", flush=True)
|
|
1330
|
+
agent = FunctionCallingAgent(tools=tools)
|
|
1331
|
+
llama = Julius()
|
|
1332
|
+
function_executor = FunctionExecutor(llama)
|
|
1410
1333
|
|
|
1334
|
+
user_input = input(">>> ")
|
|
1335
|
+
function_call_data = agent.function_call_handler(user_input)
|
|
1336
|
+
print(f"Function Call Data: {function_call_data}")
|
|
1337
|
+
|
|
1338
|
+
try:
|
|
1339
|
+
if "error" not in function_call_data:
|
|
1340
|
+
function_name = function_call_data.get("tool_name")
|
|
1341
|
+
arguments = function_call_data.get("tool_input", {})
|
|
1342
|
+
|
|
1343
|
+
execute_function = getattr(function_executor, f"execute_{function_name}", None)
|
|
1344
|
+
if execute_function:
|
|
1345
|
+
result = execute_function(arguments)
|
|
1346
|
+
print("Function Execution Result:")
|
|
1347
|
+
for c in result:
|
|
1348
|
+
print(c, end="", flush=True)
|
|
1349
|
+
else:
|
|
1350
|
+
print(f"Unknown function: {function_name}")
|
|
1411
1351
|
else:
|
|
1412
|
-
print("
|
|
1413
|
-
|
|
1414
|
-
|
|
1415
|
-
|
|
1416
|
-
|
|
1417
|
-
|
|
1418
|
-
for c in response:
|
|
1419
|
-
print(c, end="", flush=True)
|
|
1420
|
-
else:
|
|
1421
|
-
print("Please provide a question.")
|
|
1422
|
-
else:
|
|
1423
|
-
result = agent.execute_function(function_call_data)
|
|
1424
|
-
print(f"Function Execution Result: {result}")
|
|
1425
|
-
else:
|
|
1426
|
-
print(f"Error: {function_call_data['error']}")
|
|
1352
|
+
print(f"Error: {function_call_data['error']}")
|
|
1353
|
+
except Exception as e:
|
|
1354
|
+
print(f"An error occurred: {str(e)}")
|
|
1355
|
+
|
|
1356
|
+
if __name__ == "__main__":
|
|
1357
|
+
main()
|
|
1427
1358
|
```
|
|
1428
1359
|
|
|
1429
|
-
### LLAMA3, pizzagpt, RUBIKSAI, Koala, Darkai, AI4Chat, Farfalle, PIAI, Felo,
|
|
1360
|
+
### LLAMA3, pizzagpt, RUBIKSAI, Koala, Darkai, AI4Chat, Farfalle, PIAI, Felo, Julius, YouChat, YEPCHAT, Cloudflare, TurboSeek, Editee, AI21, Chatify, Cerebras, X0GPT, Lepton, GEMINIAPI, Cleeai, Elmo, Genspark, Upstage, Free2GPT, Bing, DiscordRocks, GPTWeb, LlamaTutor, PromptRefine, AIUncensored, TutorAI, ChatGPTES, Bagoodex, ChatHub, AmigoChat, AIMathGPT, GaurishCerebras, NinjaChat, GeminiPro
|
|
1430
1361
|
|
|
1431
1362
|
Code is similar to other providers.
|
|
1432
1363
|
|
|
@@ -1465,24 +1396,16 @@ Webscout can now run GGUF models locally. You can download and run your favorite
|
|
|
1465
1396
|
**Example:**
|
|
1466
1397
|
|
|
1467
1398
|
```python
|
|
1468
|
-
from webscout.Local
|
|
1469
|
-
|
|
1470
|
-
|
|
1471
|
-
|
|
1472
|
-
|
|
1473
|
-
|
|
1474
|
-
|
|
1475
|
-
|
|
1476
|
-
|
|
1477
|
-
|
|
1478
|
-
# 2. Load the model
|
|
1479
|
-
model = Model(model_path, n_gpu_layers=4)
|
|
1480
|
-
|
|
1481
|
-
# 3. Create a Thread for conversation
|
|
1482
|
-
thread = Thread(model, formats.phi3)
|
|
1483
|
-
|
|
1484
|
-
# 4. Start interacting with the model
|
|
1485
|
-
thread.interact()
|
|
1399
|
+
from webscout.Local import *
|
|
1400
|
+
model_path = download_model("Qwen/Qwen2.5-0.5B-Instruct-GGUF", "qwen2.5-0.5b-instruct-q2_k.gguf", token=None)
|
|
1401
|
+
model = Model(model_path, n_gpu_layers=0, context_length=2048)
|
|
1402
|
+
thread = Thread(model, format=chatml)
|
|
1403
|
+
# print(thread.send("hi")) #send a single msg to ai
|
|
1404
|
+
|
|
1405
|
+
# thread.interact() # interact with the model in terminal
|
|
1406
|
+
# start webui
|
|
1407
|
+
# webui = WebUI(thread)
|
|
1408
|
+
# webui.start(host="0.0.0.0", port=8080, ssl=True) #Use ssl=True and make cert and key for https
|
|
1486
1409
|
```
|
|
1487
1410
|
|
|
1488
1411
|
## 🐶 Local-rawdog
|
|
@@ -1585,7 +1508,7 @@ Webscout provides tools to convert and quantize Hugging Face models into the GGU
|
|
|
1585
1508
|
**Example:**
|
|
1586
1509
|
|
|
1587
1510
|
```python
|
|
1588
|
-
from webscout import gguf
|
|
1511
|
+
from webscout.Extra import gguf
|
|
1589
1512
|
"""
|
|
1590
1513
|
Valid quantization methods:
|
|
1591
1514
|
"q2_k", "q3_k_l", "q3_k_m", "q3_k_s",
|
|
@@ -1606,7 +1529,7 @@ gguf.convert(
|
|
|
1606
1529
|
Webscout's `autollama` utility downloads a model from Hugging Face and then automatically makes it Ollama-ready.
|
|
1607
1530
|
|
|
1608
1531
|
```python
|
|
1609
|
-
from webscout import autollama
|
|
1532
|
+
from webscout.Extra import autollama
|
|
1610
1533
|
|
|
1611
1534
|
model_path = "Vortex4ai/Jarvis-0.5B"
|
|
1612
1535
|
gguf_file = "test2-q4_k_m.gguf"
|
|
@@ -1667,9 +1590,6 @@ Contributions are welcome! If you'd like to contribute to Webscout, please follo
|
|
|
1667
1590
|
4. Push your branch to your forked repository.
|
|
1668
1591
|
5. Submit a pull request to the main repository.
|
|
1669
1592
|
|
|
1670
|
-
## 📜 License
|
|
1671
|
-
|
|
1672
|
-
This project is licensed under the MIT License - see the [LICENSE](LICENSE) file for details.
|
|
1673
1593
|
|
|
1674
1594
|
## 🙏 Acknowledgments
|
|
1675
1595
|
|