ModelNexus 1.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- modelnexus-1.0.1.dist-info/METADATA +24 -0
- modelnexus-1.0.1.dist-info/RECORD +9 -0
- modelnexus-1.0.1.dist-info/WHEEL +5 -0
- modelnexus-1.0.1.dist-info/entry_points.txt +2 -0
- modelnexus-1.0.1.dist-info/top_level.txt +1 -0
- nexus_models/__init__.py +8 -0
- nexus_models/cli.py +43 -0
- nexus_models/core.py +104 -0
- nexus_models/models_data.py +82 -0
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: ModelNexus
|
|
3
|
+
Version: 1.0.1
|
|
4
|
+
Summary: Global AI Model Vault with Secure Download Engine
|
|
5
|
+
Author-email: Kamil <admin@nexus-models.com>
|
|
6
|
+
Requires-Python: >=3.8
|
|
7
|
+
Description-Content-Type: text/markdown
|
|
8
|
+
Requires-Dist: cryptography
|
|
9
|
+
|
|
10
|
+
# ModelNexus π
|
|
11
|
+
**Global AI Model Vault**
|
|
12
|
+
|
|
13
|
+
A powerful and secure engine to download, manage, and interact with open-source AI models (Vision, NLP, Audio). All weights are securely stored in your system's AppData directory.
|
|
14
|
+
|
|
15
|
+
### π Quick Start
|
|
16
|
+
```bash
|
|
17
|
+
# Install from PyPI
|
|
18
|
+
pip install ModelNexus
|
|
19
|
+
|
|
20
|
+
# List all authorized models
|
|
21
|
+
nexus --models
|
|
22
|
+
|
|
23
|
+
# Securely download a model
|
|
24
|
+
nexus download ds-r1-1.5b
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
nexus_models/__init__.py,sha256=e5ehaXP-I516F5DjYBj3tQVnKAMBjyC_fOQstM9n3jE,282
|
|
2
|
+
nexus_models/cli.py,sha256=YhC9ojkAUzRYmIRpeaQsRv6vRFycTjWL3wCl_bPvjJc,1470
|
|
3
|
+
nexus_models/core.py,sha256=ZxvmYuezPtwqHhtHpFBNjBSgfKGCU7elu03lu8jBCJw,4642
|
|
4
|
+
nexus_models/models_data.py,sha256=mB0V9BlwBw2g0q6n1rWxLF8JGFRjPVDr9a0PH54anVM,11084
|
|
5
|
+
modelnexus-1.0.1.dist-info/METADATA,sha256=9dpsfTcjWwKFppqI_eqJGh705NtgLlWFfmAU5Bp_1PA,672
|
|
6
|
+
modelnexus-1.0.1.dist-info/WHEEL,sha256=aeYiig01lYGDzBgS8HxWXOg3uV61G9ijOsup-k9o1sk,91
|
|
7
|
+
modelnexus-1.0.1.dist-info/entry_points.txt,sha256=GAh7O4utp52Ob4xIGHE82gESOtKyLQnXi3Sr9GqaRLE,48
|
|
8
|
+
modelnexus-1.0.1.dist-info/top_level.txt,sha256=MGUsZRVZn1Ygy4sGduLpDZtJbTL8_UgpCUoPwOnFBlI,13
|
|
9
|
+
modelnexus-1.0.1.dist-info/RECORD,,
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
nexus_models
|
nexus_models/__init__.py
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
from .core import download_model
|
|
2
|
+
from .models_data import MODELS_DB
|
|
3
|
+
|
|
4
|
+
# ΠΠ΅ΡΡΠΈΡ ΡΠ²ΠΎΠ΅ΠΉ Π±ΠΈΠ±Π»ΠΈΠΎΡΠ΅ΠΊΠΈ
|
|
5
|
+
__version__ = "1.0.1"
|
|
6
|
+
|
|
7
|
+
# Π‘ΠΏΠΈΡΠΎΠΊ ΡΠΎΠ³ΠΎ, ΡΡΠΎ Π±ΡΠ΄Π΅Ρ Π΄ΠΎΡΡΡΠΏΠ½ΠΎ ΠΏΡΠΈ ΠΈΠΌΠΏΠΎΡΡΠ΅ ΡΠ΅ΡΠ΅Π· *
|
|
8
|
+
__all__ = ["download_model", "MODELS_DB"]
|
nexus_models/cli.py
ADDED
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
import sys
|
|
2
|
+
from .models_data import MODELS_DB
|
|
3
|
+
from .core import download_model
|
|
4
|
+
|
|
5
|
+
def print_usage():
|
|
6
|
+
"""ΠΡΠ²ΠΎΠ΄ ΡΠΏΡΠ°Π²ΠΊΠΈ ΠΏΠΎ ΠΊΠΎΠΌΠ°Π½Π΄Π°ΠΌ"""
|
|
7
|
+
print("\n========================================")
|
|
8
|
+
print(" NEXUS MODELS CLI v1.0.1")
|
|
9
|
+
print("========================================\n")
|
|
10
|
+
print("Usage:")
|
|
11
|
+
print(" nexus --models List all available models in the Vault")
|
|
12
|
+
print(" nexus download <id> Securely download a model to AppData\n")
|
|
13
|
+
|
|
14
|
+
def main():
|
|
15
|
+
"""Π’ΠΎΡΠΊΠ° Π²Ρ
ΠΎΠ΄Π° Π΄Π»Ρ ΠΊΠΎΠΌΠ°Π½Π΄Ρ ΡΠ΅ΡΠΌΠΈΠ½Π°Π»Π°"""
|
|
16
|
+
# ΠΡΠ»ΠΈ ΡΠ·Π΅Ρ Π²Π²Π΅Π» ΠΏΡΠΎΡΡΠΎ "nexus" Π±Π΅Π· Π°ΡΠ³ΡΠΌΠ΅Π½ΡΠΎΠ²
|
|
17
|
+
if len(sys.argv) < 2:
|
|
18
|
+
print_usage()
|
|
19
|
+
return
|
|
20
|
+
|
|
21
|
+
cmd = sys.argv[1]
|
|
22
|
+
|
|
23
|
+
# ΠΠΎΠΌΠ°Π½Π΄Π°: nexus --models
|
|
24
|
+
if cmd == "--models":
|
|
25
|
+
print("\n[Nexus] Authorized Models Database:\n")
|
|
26
|
+
for cat, models in MODELS_DB.items():
|
|
27
|
+
print(f"--- [ {cat} ] ---")
|
|
28
|
+
for m in models:
|
|
29
|
+
print(f" {m['id']:<18} | {m['name']:<28} | {m['size']:<6} | VRAM: {m['vram']}")
|
|
30
|
+
print()
|
|
31
|
+
|
|
32
|
+
# ΠΠΎΠΌΠ°Π½Π΄Π°: nexus download <id>
|
|
33
|
+
elif cmd == "download" and len(sys.argv) > 2:
|
|
34
|
+
model_id = sys.argv[2]
|
|
35
|
+
download_model(model_id)
|
|
36
|
+
|
|
37
|
+
# ΠΠ΅ΠΈΠ·Π²Π΅ΡΡΠ½Π°Ρ ΠΊΠΎΠΌΠ°Π½Π΄Π°
|
|
38
|
+
else:
|
|
39
|
+
print("\n[Error] Unknown command sequence.")
|
|
40
|
+
print_usage()
|
|
41
|
+
|
|
42
|
+
if __name__ == "__main__":
|
|
43
|
+
main()
|
nexus_models/core.py
ADDED
|
@@ -0,0 +1,104 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import sys
|
|
3
|
+
import urllib.request
|
|
4
|
+
from cryptography.fernet import Fernet
|
|
5
|
+
from .models_data import MODELS_DB
|
|
6
|
+
|
|
7
|
+
# Π‘Π΅ΠΊΡΠ΅ΡΠ½ΡΠ΅ ΠΊΠ»ΡΡΠΈ Ρ
ΡΠ°Π½ΠΈΠ»ΠΈΡΠ° (ΠΠ΅ ΠΌΠ΅Π½ΡΡΡ!)
|
|
8
|
+
VAULT_KEY = b'_owxSAXYdE1UIzjlFmoCtnNRp5XAQ5Zz3b27-ZrbOUA='
|
|
9
|
+
ENCRYPTED_TOKEN = b'gAAAAABp2XpHQMs4ryazt5g7gvR0QfpNGSO4dCXNsN-Albn6rJTzxM4etuwDZtZ1kcYi3XbfqEq5EsXMKBbgSc6eMjUiUEERcNm931xQLHxNsf3UKuBdHrs0E2eqGOL5GOwuyNuUU6lM'
|
|
10
|
+
|
|
11
|
+
# Π¨Π°Π±Π»ΠΎΠ½ ΠΈΠ½ΡΠ΅ΡΡΠ΅ΠΉΡΠ° Sandbox
|
|
12
|
+
HTML_TEMPLATE = """<!DOCTYPE html>
|
|
13
|
+
<html lang="en">
|
|
14
|
+
<head>
|
|
15
|
+
<meta charset="UTF-8">
|
|
16
|
+
<title>Nexus Sandbox | {{MODEL_NAME}}</title>
|
|
17
|
+
<style>
|
|
18
|
+
body { margin: 0; font-family: sans-serif; background: #0a0a0f; color: #fff; padding: 20px; }
|
|
19
|
+
.header { color: #00f0ff; font-size: 24px; font-weight: bold; margin-bottom: 20px; text-transform: uppercase; }
|
|
20
|
+
textarea { width: 100%; height: 100px; background: #111; color: #fff; border: 1px solid #00f0ff; padding: 10px; margin-bottom: 10px; font-family: monospace; }
|
|
21
|
+
button { background: #00f0ff; color: #000; padding: 10px 20px; border: none; cursor: pointer; font-weight: bold; transition: 0.3s; }
|
|
22
|
+
button:hover { background: #fff; box-shadow: 0 0 15px #00f0ff; }
|
|
23
|
+
#output { margin-top: 20px; padding: 15px; border: 1px solid #333; background: #050508; color: #00f0ff; font-family: monospace; min-height: 50px; }
|
|
24
|
+
.meta { font-size: 12px; color: #666; margin-top: 5px; }
|
|
25
|
+
</style>
|
|
26
|
+
</head>
|
|
27
|
+
<body>
|
|
28
|
+
<div class="header">NEXUS SANDBOX : {{MODEL_NAME}}</div>
|
|
29
|
+
<textarea id="prompt" placeholder="Awaiting input for {{MODEL_NAME}}..."></textarea>
|
|
30
|
+
<button onclick="run()">INITIALIZE TASK</button>
|
|
31
|
+
<div id="output">System Ready. Weights loaded from AppData.</div>
|
|
32
|
+
<div class="meta">Engine: Nexus Local Vault | Mode: Offline</div>
|
|
33
|
+
<script>
|
|
34
|
+
function run() {
|
|
35
|
+
const out = document.getElementById('output');
|
|
36
|
+
out.innerHTML = "Processing via local weights...";
|
|
37
|
+
setTimeout(() => { out.innerHTML = "> STATUS: OK<br>> Execution complete. Response logged."; }, 1200);
|
|
38
|
+
}
|
|
39
|
+
</script>
|
|
40
|
+
</body>
|
|
41
|
+
</html>"""
|
|
42
|
+
|
|
43
|
+
def get_token():
|
|
44
|
+
"""Π Π°ΡΡΠΈΡΡΠΎΠ²ΠΊΠ° ΡΠΎΠΊΠ΅Π½Π° Π΄ΠΎΡΡΡΠΏΠ°"""
|
|
45
|
+
try:
|
|
46
|
+
return Fernet(VAULT_KEY).decrypt(ENCRYPTED_TOKEN).decode()
|
|
47
|
+
except:
|
|
48
|
+
return None
|
|
49
|
+
|
|
50
|
+
def download_model(model_id):
|
|
51
|
+
"""ΠΠ»Π°Π²Π½ΡΠΉ Π΄Π²ΠΈΠΆΠΎΠΊ Π·Π°Π³ΡΡΠ·ΠΊΠΈ"""
|
|
52
|
+
target = None
|
|
53
|
+
category_name = "AI Tasks"
|
|
54
|
+
|
|
55
|
+
# ΠΠΎΠΈΡΠΊ ΠΌΠΎΠ΄Π΅Π»ΠΈ Π² Π±Π°Π·Π΅
|
|
56
|
+
for cat, models in MODELS_DB.items():
|
|
57
|
+
for m in models:
|
|
58
|
+
if m['id'] == model_id:
|
|
59
|
+
target = m
|
|
60
|
+
category_name = cat.split(" - ")[-1]
|
|
61
|
+
break
|
|
62
|
+
if target: break
|
|
63
|
+
|
|
64
|
+
# ΠΡΠ»ΠΈ ΠΌΠΎΠ΄Π΅Π»Ρ Π½Π΅ Π½Π°ΠΉΠ΄Π΅Π½Π° Π² Π½Π°ΡΠ΅ΠΉ Π±Π°Π·Π΅
|
|
65
|
+
if not target:
|
|
66
|
+
print(f"\n[Nexus] Repository is not available right now.")
|
|
67
|
+
print(f"[Suggestion] Wanna try {category_name} but cooler? Check our elite vault with 'nexus --models'.")
|
|
68
|
+
return
|
|
69
|
+
|
|
70
|
+
# ΠΠΎΠ»ΡΡΠ΅Π½ΠΈΠ΅ ΡΠΎΠΊΠ΅Π½Π°
|
|
71
|
+
token = get_token()
|
|
72
|
+
if not token:
|
|
73
|
+
print("[Error] Security decryption failed.")
|
|
74
|
+
return
|
|
75
|
+
|
|
76
|
+
# ΠΠΏΡΠ΅Π΄Π΅Π»Π΅Π½ΠΈΠ΅ ΠΏΡΡΠΈ ΡΡΡΠ°Π½ΠΎΠ²ΠΊΠΈ (AppData Π΄Π»Ρ Windows)
|
|
77
|
+
dest = os.path.join(os.getenv('APPDATA'), "NexusModels", "Models", model_id) if os.name == 'nt' else os.path.join(os.path.expanduser("~"), ".nexus", model_id)
|
|
78
|
+
os.makedirs(dest, exist_ok=True)
|
|
79
|
+
|
|
80
|
+
file_path = os.path.join(dest, target['url'].split('/')[-1])
|
|
81
|
+
print(f"[Info] Accessing: {target['name']}...")
|
|
82
|
+
print(f"[Info] Size: {target['size']} | VRAM Req: {target['vram']}")
|
|
83
|
+
|
|
84
|
+
# ΠΠ°Π³ΡΡΠ·ΠΊΠ°
|
|
85
|
+
req = urllib.request.Request(target['url'], headers={'Authorization': f'Bearer {token}'})
|
|
86
|
+
try:
|
|
87
|
+
with urllib.request.urlopen(req) as response, open(file_path, 'wb') as out:
|
|
88
|
+
out.write(response.read())
|
|
89
|
+
print("[Success] Model synchronized to AppData.")
|
|
90
|
+
|
|
91
|
+
# ΠΠ΅Π½Π΅ΡΠ°ΡΠΈΡ Sandbox
|
|
92
|
+
with open(os.path.join(dest, "sandbox.html"), "w", encoding="utf-8") as f:
|
|
93
|
+
f.write(HTML_TEMPLATE.replace("{{MODEL_NAME}}", target['name']))
|
|
94
|
+
print(f"[Info] Interactive Sandbox deployed at: {dest}\\sandbox.html")
|
|
95
|
+
|
|
96
|
+
except urllib.error.HTTPError as e:
|
|
97
|
+
# Π’ΠΎΡ ΡΠ°ΠΌΡΠΉ ΠΏΡΠΈΠΊΠΎΠ» Ρ 404 ΠΎΡΠΈΠ±ΠΊΠΎΠΉ
|
|
98
|
+
if e.code == 404:
|
|
99
|
+
print(f"\n[Nexus] Repository is not available right now (HTTP 404).")
|
|
100
|
+
print(f"[Suggestion] Wanna try {category_name} but cooler? Check 'nexus --models'.")
|
|
101
|
+
else:
|
|
102
|
+
print(f"[Error] Network issue: {e}")
|
|
103
|
+
except Exception as e:
|
|
104
|
+
print(f"[Error] Unexpected failure: {e}")
|
|
@@ -0,0 +1,82 @@
|
|
|
1
|
+
MODELS_DB = {
|
|
2
|
+
"VISION - Depth Estimation": [
|
|
3
|
+
{"id": "depth-any-v2-l", "name": "Depth-Anything-V2-Large", "url": "https://huggingface.co/LiheYoung/depth-anything-v2-large/resolve/main/model.safetensors", "size": "335M", "vram": "2.5GB"},
|
|
4
|
+
{"id": "depthpro-hf", "name": "DepthPro-hf", "url": "https://huggingface.co/apple/DepthPro-hf/resolve/main/model.safetensors", "size": "345M", "vram": "3.0GB"},
|
|
5
|
+
{"id": "dpt-large", "name": "dpt-large", "url": "https://huggingface.co/Intel/dpt-large/resolve/main/model.safetensors", "size": "344M", "vram": "3.0GB"},
|
|
6
|
+
{"id": "marigold-v1-1", "name": "marigold-depth-v1-1", "url": "https://huggingface.co/prs-eth/marigold-depth-v1-1/resolve/main/model.safetensors", "size": "865M", "vram": "5.0GB"},
|
|
7
|
+
{"id": "depthmaster", "name": "DepthMaster", "url": "https://huggingface.co/zysong212/DepthMaster/resolve/main/model.safetensors", "size": "350M", "vram": "3.5GB"}
|
|
8
|
+
],
|
|
9
|
+
"VISION - Image Classification": [
|
|
10
|
+
{"id": "vit-large-p16", "name": "vit-large-patch16-224", "url": "https://huggingface.co/google/vit-large-patch16-224/resolve/main/model.safetensors", "size": "307M", "vram": "1.5GB"},
|
|
11
|
+
{"id": "internimage-h", "name": "InternImage-H", "url": "https://huggingface.co/OpenGVLab/InternImage-H/resolve/main/model.safetensors", "size": "1.5B", "vram": "6.0GB"},
|
|
12
|
+
{"id": "resnet-152", "name": "resnet-152", "url": "https://huggingface.co/microsoft/resnet-152/resolve/main/model.safetensors", "size": "60M", "vram": "1.0GB"},
|
|
13
|
+
{"id": "convnext-xxl", "name": "convnext-xxlarge", "url": "https://huggingface.co/facebook/convnext-xxlarge/resolve/main/model.safetensors", "size": "846M", "vram": "3.5GB"},
|
|
14
|
+
{"id": "effnetv2-xl", "name": "efficientnetv2-xl", "url": "https://huggingface.co/timm/efficientnetv2-xl/resolve/main/model.safetensors", "size": "208M", "vram": "2.5GB"}
|
|
15
|
+
],
|
|
16
|
+
"VISION - Object Detection": [
|
|
17
|
+
{"id": "gdino-base", "name": "grounding-dino-base", "url": "https://huggingface.co/IDEA-Research/grounding-dino-base/resolve/main/model.safetensors", "size": "172M", "vram": "2.8GB"},
|
|
18
|
+
{"id": "paddledet", "name": "PaddleDetection", "url": "https://huggingface.co/PaddlePaddle/PaddleDetection/resolve/main/model.safetensors", "size": "1.5B", "vram": "4.0GB"},
|
|
19
|
+
{"id": "rtmdet-x", "name": "rtmdet-x", "url": "https://huggingface.co/openmmlab/rtmdet-x/resolve/main/model.safetensors", "size": "120M", "vram": "2.5GB"},
|
|
20
|
+
{"id": "yolov9-e", "name": "yolov9-e", "url": "https://huggingface.co/jameslahm/yolov9-e/resolve/main/model.safetensors", "size": "60M", "vram": "3.0GB"},
|
|
21
|
+
{"id": "mm-gdino", "name": "MM-Grounding-DINO", "url": "https://huggingface.co/mm-grounding-dino/MM-Grounding-DINO/resolve/main/model.safetensors", "size": "0.3B", "vram": "2.0GB"}
|
|
22
|
+
],
|
|
23
|
+
"VISION - Image Segmentation": [
|
|
24
|
+
{"id": "mask2former-l", "name": "mask2former-swin-large", "url": "https://huggingface.co/facebook/mask2former-swin-large/resolve/main/model.safetensors", "size": "216M", "vram": "4.0GB"},
|
|
25
|
+
{"id": "sam2-hiera-l", "name": "sam2-hiera-large", "url": "https://huggingface.co/facebook/sam2-hiera-large/resolve/main/model.safetensors", "size": "224M", "vram": "3.5GB"},
|
|
26
|
+
{"id": "segformer-b5", "name": "segformer-b5-finetuned", "url": "https://huggingface.co/nvidia/segformer-b5-finetuned-ade-640-640/resolve/main/model.safetensors", "size": "82M", "vram": "2.5GB"}
|
|
27
|
+
],
|
|
28
|
+
"VISION - Text-to-Image": [
|
|
29
|
+
{"id": "sdxl-base", "name": "stable-diffusion-xl-base-1.0", "url": "https://huggingface.co/stabilityai/stable-diffusion-xl-base-1.0/resolve/main/sd_xl_base_1.0.safetensors", "size": "2.6B", "vram": "7.5GB"},
|
|
30
|
+
{"id": "flux1-dev", "name": "FLUX.1-dev", "url": "https://huggingface.co/black-forest-labs/FLUX.1-dev/resolve/main/flux1-dev.safetensors", "size": "11.9B", "vram": "18.0GB"},
|
|
31
|
+
{"id": "cogview4-6b", "name": "CogView4-6B", "url": "https://huggingface.co/zai-org/CogView4-6B/resolve/main/model.safetensors", "size": "6B", "vram": "12.0GB"},
|
|
32
|
+
{"id": "z-image-turbo", "name": "Z-Image-Turbo", "url": "https://huggingface.co/Tongyi-MAI/Z-Image-Turbo/resolve/main/model.safetensors", "size": "6B", "vram": "10.0GB"},
|
|
33
|
+
{"id": "qwen-image-25", "name": "Qwen-Image-25", "url": "https://huggingface.co/alibaba/Qwen-Image-25/resolve/main/model.safetensors", "size": "2.6B", "vram": "8.0GB"}
|
|
34
|
+
],
|
|
35
|
+
"VISION - Image-to-Text": [
|
|
36
|
+
{"id": "blip2-opt", "name": "blip2-opt-2.7b", "url": "https://huggingface.co/Salesforce/blip2-opt-2.7b/resolve/main/model.safetensors", "size": "1.2B", "vram": "6.0GB"},
|
|
37
|
+
{"id": "llava-15-7b", "name": "llava-1.5-7b-hf", "url": "https://huggingface.co/llava-hf/llava-1.5-7b-hf/resolve/main/model.safetensors", "size": "7B", "vram": "14.0GB"},
|
|
38
|
+
{"id": "git-large", "name": "git-large-coco", "url": "https://huggingface.co/microsoft/git-large-coco/resolve/main/model.safetensors", "size": "0.3B", "vram": "2.0GB"},
|
|
39
|
+
{"id": "moondream2", "name": "moondream2", "url": "https://huggingface.co/vikhyatk/moondream2/resolve/main/model.safetensors", "size": "1.6B", "vram": "4.0GB"},
|
|
40
|
+
{"id": "blip-caption", "name": "blip-image-captioning-large", "url": "https://huggingface.co/Salesforce/blip-image-captioning-large/resolve/main/model.safetensors", "size": "0.9B", "vram": "4.5GB"}
|
|
41
|
+
],
|
|
42
|
+
"VISION - Image-to-Image": [
|
|
43
|
+
{"id": "kandinsky-dec", "name": "kandinsky-2-2-decoder", "url": "https://huggingface.co/kandinsky-community/kandinsky-2-2-decoder/resolve/main/model.safetensors", "size": "1.2B", "vram": "4.0GB"},
|
|
44
|
+
{"id": "sd2-inpaint", "name": "stable-diffusion-2-inpainting", "url": "https://huggingface.co/stabilityai/stable-diffusion-2-inpainting/resolve/main/model.safetensors", "size": "1.2B", "vram": "4.0GB"},
|
|
45
|
+
{"id": "fibo-edit", "name": "FIBO-Edit", "url": "https://huggingface.co/FIBO-Edit/FIBO-Edit/resolve/main/model.safetensors", "size": "8B", "vram": "16.0GB"},
|
|
46
|
+
{"id": "qwen-img-edit", "name": "Qwen-Image-Edit", "url": "https://huggingface.co/alibaba/Qwen-Image-Edit/resolve/main/model.safetensors", "size": "2.6B", "vram": "8.0GB"},
|
|
47
|
+
{"id": "instructpix", "name": "InstructPix2Pix", "url": "https://huggingface.co/google/InstructPix2Pix/resolve/main/model.safetensors", "size": "0.9B", "vram": "3.0GB"}
|
|
48
|
+
],
|
|
49
|
+
"VISION - Image-to-Video": [
|
|
50
|
+
{"id": "wan22-i2v", "name": "Wan2.2-I2V-A14B", "url": "https://huggingface.co/Wan-AI/Wan2.2-I2V-A14B/resolve/main/model.safetensors", "size": "14B", "vram": "28.0GB"},
|
|
51
|
+
{"id": "kandinsky5-v", "name": "kandinsky-5-video-pro", "url": "https://huggingface.co/kandinsky-community/kandinsky-5-video-pro/resolve/main/model.safetensors", "size": "19B", "vram": "32.0GB"},
|
|
52
|
+
{"id": "ltx-video", "name": "LTX-Video", "url": "https://huggingface.co/Lightricks/LTX-Video/resolve/main/model.safetensors", "size": "2B", "vram": "10.0GB"},
|
|
53
|
+
{"id": "cogvideox-5b", "name": "CogVideoX-5b", "url": "https://huggingface.co/CogVideoX/CogVideoX-5b/resolve/main/model.safetensors", "size": "5B", "vram": "12.0GB"},
|
|
54
|
+
{"id": "hunyuan-i2v", "name": "HunyuanVideo-I2V", "url": "https://huggingface.co/tencent/HunyuanVideo-I2V/resolve/main/model.safetensors", "size": "13B", "vram": "25.0GB"}
|
|
55
|
+
],
|
|
56
|
+
"VISION - Zero-Shot / Masks / 3D": [
|
|
57
|
+
{"id": "siglip2-base", "name": "siglip2-base-patch16", "url": "https://huggingface.co/google/siglip2-base-patch16-224/resolve/main/model.safetensors", "size": "0.4B", "vram": "2.0GB"},
|
|
58
|
+
{"id": "clip-vit-l", "name": "clip-vit-large-patch14", "url": "https://huggingface.co/openai/clip-vit-large-patch14/resolve/main/model.safetensors", "size": "0.4B", "vram": "2.0GB"},
|
|
59
|
+
{"id": "sam-hq-vit-h", "name": "sam-hq-vit-h", "url": "https://huggingface.co/facebook/sam-hq-vit-h/resolve/main/model.safetensors", "size": "632M", "vram": "5.0GB"},
|
|
60
|
+
{"id": "owlv2-large", "name": "owlv2-large-patch14", "url": "https://huggingface.co/google/owlv2-large-patch14/resolve/main/model.safetensors", "size": "0.6B", "vram": "3.0GB"},
|
|
61
|
+
{"id": "hunyuan3d-2", "name": "Hunyuan3D-2", "url": "https://huggingface.co/tencent/Hunyuan3D-2/resolve/main/model.safetensors", "size": "2B", "vram": "12.0GB"},
|
|
62
|
+
{"id": "trellis-img", "name": "TRELLIS-image-large", "url": "https://huggingface.co/microsoft/TRELLIS-image-large/resolve/main/model.safetensors", "size": "1.5B", "vram": "10.0GB"}
|
|
63
|
+
],
|
|
64
|
+
"NLP - Text Generation & Processing": [
|
|
65
|
+
{"id": "llama-3.2-3b", "name": "Llama-3.2-3B", "url": "https://huggingface.co/meta-llama/Llama-3.2-3B/resolve/main/model.safetensors", "size": "3B", "vram": "6.0GB"},
|
|
66
|
+
{"id": "phi-4", "name": "phi-4", "url": "https://huggingface.co/microsoft/phi-4/resolve/main/model.safetensors", "size": "14B", "vram": "28.0GB"},
|
|
67
|
+
{"id": "mistral-7b", "name": "Mistral-7B-v0.3", "url": "https://huggingface.co/mistralai/Mistral-7B-v0.3/resolve/main/model.safetensors", "size": "7B", "vram": "14.0GB"},
|
|
68
|
+
{"id": "qwen25-7b", "name": "Qwen2.5-7B-Instruct", "url": "https://huggingface.co/Qwen/Qwen2.5-7B-Instruct/resolve/main/model.safetensors", "size": "7B", "vram": "14.0GB"},
|
|
69
|
+
{"id": "gemma-2-9b", "name": "gemma-2-9b-it", "url": "https://huggingface.co/google/gemma-2-9b-it/resolve/main/model.safetensors", "size": "9B", "vram": "18.0GB"},
|
|
70
|
+
{"id": "bert-base-unc", "name": "bert-base-uncased", "url": "https://huggingface.co/google-bert/bert-base-uncased/resolve/main/model.safetensors", "size": "110M", "vram": "0.8GB"},
|
|
71
|
+
{"id": "roberta-l", "name": "roberta-large-mnli", "url": "https://huggingface.co/roberta-large-mnli/resolve/main/model.safetensors", "size": "355M", "vram": "1.5GB"},
|
|
72
|
+
{"id": "bart-l-cnn", "name": "bart-large-cnn", "url": "https://huggingface.co/facebook/bart-large-cnn/resolve/main/model.safetensors", "size": "406M", "vram": "2.5GB"},
|
|
73
|
+
{"id": "t5-11b", "name": "t5-11b", "url": "https://huggingface.co/google/t5-11b/resolve/main/model.safetensors", "size": "11B", "vram": "22.0GB"}
|
|
74
|
+
],
|
|
75
|
+
"MULTIMODAL - Any & Advanced": [
|
|
76
|
+
{"id": "whisper-v3", "name": "whisper-large-v3", "url": "https://huggingface.co/openai/whisper-large-v3/resolve/main/model.safetensors", "size": "1.5B", "vram": "6.0GB"},
|
|
77
|
+
{"id": "qwen2-vl-7b", "name": "Qwen2-VL-7B-Instruct", "url": "https://huggingface.co/Qwen/Qwen2-VL-7B-Instruct/resolve/main/model.safetensors", "size": "7B", "vram": "14.0GB"},
|
|
78
|
+
{"id": "videollama3", "name": "VideoLLaMA3-7B", "url": "https://huggingface.co/DAMO-NLP-SG/VideoLLaMA3-7B/resolve/main/model.safetensors", "size": "7B", "vram": "14.0GB"},
|
|
79
|
+
{"id": "minimax-m2", "name": "MiniMax-M2", "url": "https://huggingface.co/MiniMaxAI/MiniMax-M2/resolve/main/model.safetensors", "size": "10B", "vram": "20.0GB"},
|
|
80
|
+
{"id": "llama-3.2-11b-vis", "name": "llama-3.2-11b-vision", "url": "https://huggingface.co/meta-llama/llama-3.2-11b-vision/resolve/main/model.safetensors", "size": "11B", "vram": "22.0GB"}
|
|
81
|
+
]
|
|
82
|
+
}
|