pullama-cli 1.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
pullama/__init__.py
ADDED
|
File without changes
|
pullama/__main__.py
ADDED
|
@@ -0,0 +1,818 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
Pullama — Resumable Ollama Model Downloader & Installer
|
|
4
|
+
https://github.com/Steve-sy/pullama
|
|
5
|
+
---------------------------------------------------
|
|
6
|
+
Fix for ollama pull TLS handshake timeouts and disconnects.
|
|
7
|
+
Downloads models with resume support and installs them directly into Ollama.
|
|
8
|
+
|
|
9
|
+
pullama pull tinyllama:latest
|
|
10
|
+
pullama get gemma2:2b
|
|
11
|
+
pullama install --model gemma2:2b --blobsPath ./downloads
|
|
12
|
+
pullama list
|
|
13
|
+
"""
|
|
14
|
+
|
|
15
|
+
import argparse
|
|
16
|
+
import sys
|
|
17
|
+
import os
|
|
18
|
+
import json
|
|
19
|
+
import urllib.request
|
|
20
|
+
import urllib.error
|
|
21
|
+
import shutil
|
|
22
|
+
import hashlib
|
|
23
|
+
import platform
|
|
24
|
+
import time
|
|
25
|
+
import subprocess
|
|
26
|
+
import datetime
|
|
27
|
+
|
|
28
|
+
# Constants
|
|
29
|
+
DEFAULT_REGISTRY = "registry.ollama.ai"
|
|
30
|
+
BLOBS_PATTERN = "blobs"
|
|
31
|
+
PULLAMA_DIR = os.path.expanduser("~/.pullama")
|
|
32
|
+
STATE_FILE = os.path.join(PULLAMA_DIR, "state.json")
|
|
33
|
+
VERSION = "1.0.0"
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
# ─── Colors ───────────────────────────────────────────────────────────────────
|
|
37
|
+
|
|
38
|
+
class Colors:
|
|
39
|
+
HEADER = '\033[95m'
|
|
40
|
+
OKBLUE = '\033[94m'
|
|
41
|
+
OKCYAN = '\033[96m'
|
|
42
|
+
OKGREEN = '\033[92m'
|
|
43
|
+
WARNING = '\033[93m'
|
|
44
|
+
FAIL = '\033[91m'
|
|
45
|
+
ENDC = '\033[0m'
|
|
46
|
+
BOLD = '\033[1m'
|
|
47
|
+
DIM = '\033[2m'
|
|
48
|
+
|
|
49
|
+
def print_success(msg):
|
|
50
|
+
print(f"{Colors.OKGREEN}✔ {msg}{Colors.ENDC}")
|
|
51
|
+
|
|
52
|
+
def print_info(msg):
|
|
53
|
+
print(f"{Colors.OKCYAN}ℹ {msg}{Colors.ENDC}")
|
|
54
|
+
|
|
55
|
+
def print_warning(msg):
|
|
56
|
+
print(f"{Colors.WARNING}⚠ {msg}{Colors.ENDC}")
|
|
57
|
+
|
|
58
|
+
def print_error(msg):
|
|
59
|
+
print(f"{Colors.FAIL}✖ {msg}{Colors.ENDC}", file=sys.stderr)
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
# ─── State ────────────────────────────────────────────────────────────────────
|
|
63
|
+
|
|
64
|
+
def ensure_pullama_dir():
|
|
65
|
+
os.makedirs(PULLAMA_DIR, exist_ok=True)
|
|
66
|
+
|
|
67
|
+
def load_state():
|
|
68
|
+
ensure_pullama_dir()
|
|
69
|
+
if not os.path.exists(STATE_FILE):
|
|
70
|
+
return {}
|
|
71
|
+
try:
|
|
72
|
+
with open(STATE_FILE, "r", encoding="utf-8") as f:
|
|
73
|
+
return json.load(f)
|
|
74
|
+
except Exception:
|
|
75
|
+
return {}
|
|
76
|
+
|
|
77
|
+
def save_state(state):
|
|
78
|
+
ensure_pullama_dir()
|
|
79
|
+
with open(STATE_FILE, "w", encoding="utf-8") as f:
|
|
80
|
+
json.dump(state, f, indent=2)
|
|
81
|
+
|
|
82
|
+
def update_model_state(state, model_key, **kwargs):
|
|
83
|
+
if model_key not in state:
|
|
84
|
+
state[model_key] = {}
|
|
85
|
+
state[model_key].update(kwargs)
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
# ─── Utilities ────────────────────────────────────────────────────────────────
|
|
89
|
+
|
|
90
|
+
def format_size(size_in_bytes):
|
|
91
|
+
if not isinstance(size_in_bytes, (int, float)) or size_in_bytes < 0:
|
|
92
|
+
return "?"
|
|
93
|
+
for unit in ['B', 'KB', 'MB', 'GB', 'TB']:
|
|
94
|
+
if size_in_bytes < 1024.0:
|
|
95
|
+
if unit == 'B':
|
|
96
|
+
return f"{int(size_in_bytes)} B"
|
|
97
|
+
return f"{size_in_bytes:.1f} {unit}"
|
|
98
|
+
size_in_bytes /= 1024.0
|
|
99
|
+
return f"{size_in_bytes:.1f} PB"
|
|
100
|
+
|
|
101
|
+
def format_eta(seconds):
|
|
102
|
+
if seconds < 0:
|
|
103
|
+
return "--:--"
|
|
104
|
+
seconds = int(seconds)
|
|
105
|
+
if seconds < 60:
|
|
106
|
+
return f"{seconds}s"
|
|
107
|
+
elif seconds < 3600:
|
|
108
|
+
return f"{seconds // 60}m{seconds % 60:02d}s"
|
|
109
|
+
else:
|
|
110
|
+
h = seconds // 3600
|
|
111
|
+
m = (seconds % 3600) // 60
|
|
112
|
+
return f"{h}h{m:02d}m"
|
|
113
|
+
|
|
114
|
+
def parse_model_name(model_name_input):
|
|
115
|
+
tag = "latest"
|
|
116
|
+
if ":" in model_name_input:
|
|
117
|
+
base, tag = model_name_input.split(":", 1)
|
|
118
|
+
else:
|
|
119
|
+
base = model_name_input
|
|
120
|
+
|
|
121
|
+
if "/" in base:
|
|
122
|
+
namespace, model = base.split("/", 1)
|
|
123
|
+
else:
|
|
124
|
+
namespace = "library"
|
|
125
|
+
model = base
|
|
126
|
+
|
|
127
|
+
return namespace, model, tag
|
|
128
|
+
|
|
129
|
+
def get_default_models_path():
|
|
130
|
+
"""Auto-detect where Ollama actually stores models."""
|
|
131
|
+
# Priority 1: explicit env var
|
|
132
|
+
env_path = os.environ.get("OLLAMA_MODELS")
|
|
133
|
+
if env_path:
|
|
134
|
+
return os.path.expanduser(env_path)
|
|
135
|
+
|
|
136
|
+
system = platform.system().lower()
|
|
137
|
+
if system == "windows":
|
|
138
|
+
base = os.environ.get("USERPROFILE", os.path.expanduser("~"))
|
|
139
|
+
candidates = [os.path.join(base, ".ollama", "models")]
|
|
140
|
+
else:
|
|
141
|
+
candidates = [
|
|
142
|
+
"/usr/share/ollama/.ollama/models", # system service (Linux official install)
|
|
143
|
+
"/var/lib/ollama/.ollama/models", # some Linux distros
|
|
144
|
+
os.path.expanduser("~/.ollama/models"), # user install / macOS
|
|
145
|
+
]
|
|
146
|
+
|
|
147
|
+
# Return the first path that already has Ollama's directory structure
|
|
148
|
+
for path in candidates:
|
|
149
|
+
if os.path.isdir(os.path.join(path, "blobs")) or \
|
|
150
|
+
os.path.isdir(os.path.join(path, "manifests")):
|
|
151
|
+
return path
|
|
152
|
+
|
|
153
|
+
# Fall back to user home path
|
|
154
|
+
if system == "windows":
|
|
155
|
+
return candidates[0]
|
|
156
|
+
return os.path.expanduser("~/.ollama/models")
|
|
157
|
+
|
|
158
|
+
def verify_ollama_sees_model(model_key):
|
|
159
|
+
"""Check if the Ollama service can see the installed model via its API."""
|
|
160
|
+
try:
|
|
161
|
+
req = urllib.request.Request("http://localhost:11434/api/tags")
|
|
162
|
+
with urllib.request.urlopen(req, timeout=3) as r:
|
|
163
|
+
data = json.loads(r.read())
|
|
164
|
+
names = [m.get("name", "") for m in data.get("models", [])]
|
|
165
|
+
# model_key may be "gemma2:2b" — check if it appears in any listed name
|
|
166
|
+
return any(model_key.split(":")[0] in n for n in names)
|
|
167
|
+
except Exception:
|
|
168
|
+
return None # Ollama not running or unreachable
|
|
169
|
+
|
|
170
|
+
def get_models_path(explicit_path=None):
|
|
171
|
+
if explicit_path:
|
|
172
|
+
return os.path.expanduser(explicit_path)
|
|
173
|
+
return get_default_models_path()
|
|
174
|
+
|
|
175
|
+
def get_file_hash(filepath):
|
|
176
|
+
hasher = hashlib.sha256()
|
|
177
|
+
with open(filepath, 'rb') as f:
|
|
178
|
+
for chunk in iter(lambda: f.read(4096 * 1024), b""):
|
|
179
|
+
hasher.update(chunk)
|
|
180
|
+
return hasher.hexdigest()
|
|
181
|
+
|
|
182
|
+
def verify_blob(filepath, expected_digest):
|
|
183
|
+
"""Verify SHA256 of a downloaded blob. expected_digest is like 'sha256:abc123...'"""
|
|
184
|
+
expected_hash = expected_digest.replace("sha256:", "")
|
|
185
|
+
actual_hash = get_file_hash(filepath)
|
|
186
|
+
return actual_hash == expected_hash
|
|
187
|
+
|
|
188
|
+
|
|
189
|
+
# ─── Download Engine ──────────────────────────────────────────────────────────
|
|
190
|
+
|
|
191
|
+
def check_aria2():
|
|
192
|
+
return shutil.which("aria2c") is not None
|
|
193
|
+
|
|
194
|
+
def print_aria2_hint():
|
|
195
|
+
system = platform.system().lower()
|
|
196
|
+
if system == "linux":
|
|
197
|
+
install_cmd = "sudo apt install aria2"
|
|
198
|
+
elif system == "darwin":
|
|
199
|
+
install_cmd = "brew install aria2"
|
|
200
|
+
else:
|
|
201
|
+
install_cmd = "winget install aria2"
|
|
202
|
+
print(f"{Colors.DIM}⚡ Tip: Install aria2 for faster downloads: {install_cmd}{Colors.ENDC}\n")
|
|
203
|
+
|
|
204
|
+
def _render_progress(label, downloaded, total, speed_bps, elapsed):
|
|
205
|
+
try:
|
|
206
|
+
cols = os.get_terminal_size().columns
|
|
207
|
+
except OSError:
|
|
208
|
+
cols = 80
|
|
209
|
+
|
|
210
|
+
pct = downloaded / total if total > 0 else 0
|
|
211
|
+
|
|
212
|
+
# Build fixed right side: " 165/261 MB 2.6 MB/s ETA 58s"
|
|
213
|
+
size_str = f"{format_size(downloaded)}/{format_size(total)}"
|
|
214
|
+
speed_str = f"{format_size(speed_bps)}/s" if speed_bps > 0 else ""
|
|
215
|
+
remaining = total - downloaded
|
|
216
|
+
eta_str = f"ETA {format_eta(remaining / speed_bps)}" if speed_bps > 0 else ""
|
|
217
|
+
right = f" {size_str} {speed_str} {eta_str}"
|
|
218
|
+
|
|
219
|
+
# Calculate how much space is left for label + bar
|
|
220
|
+
# Layout: " {label} {bar}{right}" — prefix=2, sep=2
|
|
221
|
+
bar_width = 18
|
|
222
|
+
prefix = " "
|
|
223
|
+
sep = " "
|
|
224
|
+
available = cols - len(prefix) - len(sep) - bar_width - len(right) - 1
|
|
225
|
+
label_display = label[:max(0, available)]
|
|
226
|
+
|
|
227
|
+
# Shrink bar if terminal is very narrow
|
|
228
|
+
if available < 0:
|
|
229
|
+
bar_width = max(5, cols - len(prefix) - len(right) - 2)
|
|
230
|
+
label_display = ""
|
|
231
|
+
|
|
232
|
+
filled = int(bar_width * pct)
|
|
233
|
+
bar = "█" * filled + "░" * (bar_width - filled)
|
|
234
|
+
|
|
235
|
+
# Build plain version to measure true visible length
|
|
236
|
+
if label_display:
|
|
237
|
+
plain = f"{prefix}{label_display}{sep}{bar}{right}"
|
|
238
|
+
else:
|
|
239
|
+
plain = f"{prefix}{bar}{right}"
|
|
240
|
+
|
|
241
|
+
trailing = " " * max(0, cols - len(plain) - 1)
|
|
242
|
+
|
|
243
|
+
# Build colored version (same structure, ANSI codes don't affect visible width)
|
|
244
|
+
if label_display:
|
|
245
|
+
colored = (
|
|
246
|
+
f"{prefix}{Colors.DIM}{label_display}{Colors.ENDC}{sep}"
|
|
247
|
+
f"{Colors.OKCYAN}{bar}{Colors.ENDC}"
|
|
248
|
+
f"{Colors.BOLD}{right}{Colors.ENDC}"
|
|
249
|
+
)
|
|
250
|
+
else:
|
|
251
|
+
colored = (
|
|
252
|
+
f"{prefix}{Colors.OKCYAN}{bar}{Colors.ENDC}"
|
|
253
|
+
f"{Colors.BOLD}{right}{Colors.ENDC}"
|
|
254
|
+
)
|
|
255
|
+
|
|
256
|
+
print(f"\r{colored}{trailing}", end="", flush=True)
|
|
257
|
+
|
|
258
|
+
def download_with_urllib(url, dest_path, expected_size, label=""):
|
|
259
|
+
"""Download with resume support using HTTP Range requests."""
|
|
260
|
+
existing = os.path.getsize(dest_path) if os.path.exists(dest_path) else 0
|
|
261
|
+
|
|
262
|
+
if existing >= expected_size:
|
|
263
|
+
return True # already complete
|
|
264
|
+
|
|
265
|
+
headers = {}
|
|
266
|
+
if existing > 0:
|
|
267
|
+
headers["Range"] = f"bytes={existing}-"
|
|
268
|
+
|
|
269
|
+
req = urllib.request.Request(url, headers=headers)
|
|
270
|
+
|
|
271
|
+
try:
|
|
272
|
+
response = urllib.request.urlopen(req, timeout=30)
|
|
273
|
+
except urllib.error.HTTPError as e:
|
|
274
|
+
if e.code == 416:
|
|
275
|
+
# Range not satisfiable — file already fully downloaded
|
|
276
|
+
return True
|
|
277
|
+
print_error(f"HTTP {e.code} downloading {label}")
|
|
278
|
+
return False
|
|
279
|
+
except urllib.error.URLError as e:
|
|
280
|
+
print_error(f"Connection error: {e.reason}")
|
|
281
|
+
return False
|
|
282
|
+
|
|
283
|
+
chunk_size = 65536 # 64 KB
|
|
284
|
+
downloaded = existing
|
|
285
|
+
start_time = time.time()
|
|
286
|
+
last_print = start_time
|
|
287
|
+
|
|
288
|
+
try:
|
|
289
|
+
with open(dest_path, "ab" if existing > 0 else "wb") as f:
|
|
290
|
+
while True:
|
|
291
|
+
chunk = response.read(chunk_size)
|
|
292
|
+
if not chunk:
|
|
293
|
+
break
|
|
294
|
+
f.write(chunk)
|
|
295
|
+
downloaded += len(chunk)
|
|
296
|
+
|
|
297
|
+
now = time.time()
|
|
298
|
+
elapsed = now - start_time
|
|
299
|
+
if now - last_print >= 0.3:
|
|
300
|
+
speed = (downloaded - existing) / elapsed if elapsed > 0 else 0
|
|
301
|
+
_render_progress(label, downloaded, expected_size, speed, elapsed)
|
|
302
|
+
last_print = now
|
|
303
|
+
except KeyboardInterrupt:
|
|
304
|
+
print()
|
|
305
|
+
raise
|
|
306
|
+
except Exception as e:
|
|
307
|
+
print()
|
|
308
|
+
print_error(f"Download interrupted: {e}")
|
|
309
|
+
return False
|
|
310
|
+
|
|
311
|
+
# Final progress line
|
|
312
|
+
elapsed = time.time() - start_time
|
|
313
|
+
speed = (downloaded - existing) / elapsed if elapsed > 0 else 0
|
|
314
|
+
_render_progress(label, downloaded, expected_size, speed, elapsed)
|
|
315
|
+
print()
|
|
316
|
+
return downloaded >= expected_size
|
|
317
|
+
|
|
318
|
+
def download_with_aria2(url, dest_path, expected_size, label=""):
|
|
319
|
+
"""Download using aria2c for maximum reliability on slow connections."""
|
|
320
|
+
dest_dir = os.path.dirname(dest_path)
|
|
321
|
+
dest_file = os.path.basename(dest_path)
|
|
322
|
+
|
|
323
|
+
cmd = [
|
|
324
|
+
"aria2c",
|
|
325
|
+
"--continue=true",
|
|
326
|
+
"--max-connection-per-server=4",
|
|
327
|
+
"--split=4",
|
|
328
|
+
"--min-split-size=1M",
|
|
329
|
+
"--timeout=60",
|
|
330
|
+
"--retry-wait=5",
|
|
331
|
+
"--max-tries=10",
|
|
332
|
+
"--dir", dest_dir,
|
|
333
|
+
"--out", dest_file,
|
|
334
|
+
url
|
|
335
|
+
]
|
|
336
|
+
|
|
337
|
+
print(f" {Colors.DIM}{label}{Colors.ENDC} {Colors.OKCYAN}[aria2]{Colors.ENDC} downloading...", flush=True)
|
|
338
|
+
|
|
339
|
+
try:
|
|
340
|
+
result = subprocess.run(cmd, capture_output=True, text=True)
|
|
341
|
+
if result.returncode == 0:
|
|
342
|
+
print(f"\r {Colors.DIM}{label}{Colors.ENDC} {Colors.OKGREEN}✔{Colors.ENDC}{' ' * 60}")
|
|
343
|
+
return True
|
|
344
|
+
else:
|
|
345
|
+
print_error(f"aria2 failed: {result.stderr.strip()}")
|
|
346
|
+
return False
|
|
347
|
+
except Exception as e:
|
|
348
|
+
print_error(f"aria2 error: {e}")
|
|
349
|
+
return False
|
|
350
|
+
|
|
351
|
+
def download_blob(url, dest_path, expected_size, label="", use_aria2=False):
|
|
352
|
+
"""Download a blob, skipping if already complete."""
|
|
353
|
+
existing = os.path.getsize(dest_path) if os.path.exists(dest_path) else 0
|
|
354
|
+
if existing >= expected_size:
|
|
355
|
+
print(f" {Colors.DIM}{label}{Colors.ENDC} {Colors.OKGREEN}✔ already complete{Colors.ENDC}")
|
|
356
|
+
return True
|
|
357
|
+
|
|
358
|
+
if existing > 0:
|
|
359
|
+
print_info(f"Resuming {label} from {format_size(existing)} / {format_size(expected_size)}")
|
|
360
|
+
|
|
361
|
+
os.makedirs(os.path.dirname(dest_path), exist_ok=True)
|
|
362
|
+
|
|
363
|
+
if use_aria2:
|
|
364
|
+
return download_with_aria2(url, dest_path, expected_size, label)
|
|
365
|
+
else:
|
|
366
|
+
return download_with_urllib(url, dest_path, expected_size, label)
|
|
367
|
+
|
|
368
|
+
|
|
369
|
+
# ─── Manifest Fetching ────────────────────────────────────────────────────────
|
|
370
|
+
|
|
371
|
+
def fetch_manifest(namespace, model, tag):
|
|
372
|
+
url = f"https://{DEFAULT_REGISTRY}/v2/{namespace}/{model}/manifests/{tag}"
|
|
373
|
+
req = urllib.request.Request(url, headers={
|
|
374
|
+
"Accept": "application/vnd.docker.distribution.manifest.v2+json"
|
|
375
|
+
})
|
|
376
|
+
try:
|
|
377
|
+
with urllib.request.urlopen(req, timeout=30) as response:
|
|
378
|
+
raw = response.read().decode("utf-8")
|
|
379
|
+
return json.loads(raw), raw, url
|
|
380
|
+
except urllib.error.HTTPError as e:
|
|
381
|
+
if e.code == 404:
|
|
382
|
+
print_error(f"Model not found on registry.")
|
|
383
|
+
else:
|
|
384
|
+
print_error(f"HTTP {e.code} fetching manifest.")
|
|
385
|
+
sys.exit(1)
|
|
386
|
+
except urllib.error.URLError as e:
|
|
387
|
+
print_error(f"Connection error: {e.reason}")
|
|
388
|
+
sys.exit(1)
|
|
389
|
+
|
|
390
|
+
|
|
391
|
+
# ─── Commands ─────────────────────────────────────────────────────────────────
|
|
392
|
+
|
|
393
|
+
def cmd_get(args):
|
|
394
|
+
namespace, model, tag = parse_model_name(args.model)
|
|
395
|
+
model_key = f"{namespace}/{model}:{tag}" if namespace != "library" else f"{model}:{tag}"
|
|
396
|
+
|
|
397
|
+
print(f"\n{Colors.BOLD}Fetching {Colors.OKCYAN}{args.model}{Colors.ENDC}{Colors.BOLD} from registry...{Colors.ENDC}")
|
|
398
|
+
|
|
399
|
+
data, raw, manifest_url = fetch_manifest(namespace, model, tag)
|
|
400
|
+
|
|
401
|
+
layers = data.get("layers", [])
|
|
402
|
+
config = data.get("config")
|
|
403
|
+
if config:
|
|
404
|
+
layers.append(config)
|
|
405
|
+
|
|
406
|
+
total_size = sum(l.get("size", 0) for l in layers if l.get("digest"))
|
|
407
|
+
|
|
408
|
+
print(f"\n {Colors.BOLD}Model:{Colors.ENDC} {Colors.OKCYAN}{args.model}{Colors.ENDC}")
|
|
409
|
+
print(f" {Colors.BOLD}Files:{Colors.ENDC} {len(layers)} blobs • {Colors.BOLD}Total:{Colors.ENDC} {format_size(total_size)}")
|
|
410
|
+
print(f"\n {Colors.BOLD}Manifest URL:{Colors.ENDC}")
|
|
411
|
+
print(f" {Colors.DIM}{manifest_url}{Colors.ENDC}")
|
|
412
|
+
print(f"\n {Colors.BOLD}Download URLs:{Colors.ENDC}")
|
|
413
|
+
|
|
414
|
+
for i, layer in enumerate(layers, 1):
|
|
415
|
+
digest = layer.get("digest")
|
|
416
|
+
size = layer.get("size", 0)
|
|
417
|
+
if digest:
|
|
418
|
+
blob_url = f"https://{DEFAULT_REGISTRY}/v2/{namespace}/{model}/blobs/{digest}"
|
|
419
|
+
out_name = digest.replace(":", "-")
|
|
420
|
+
print(f" [{i}] {Colors.DIM}{format_size(size):>10}{Colors.ENDC} {blob_url}")
|
|
421
|
+
|
|
422
|
+
print(f"\n {Colors.BOLD}Curl commands:{Colors.ENDC}")
|
|
423
|
+
print(f" {Colors.DIM}curl -L \"{manifest_url}\" -o \"manifest\"{Colors.ENDC}")
|
|
424
|
+
for layer in layers:
|
|
425
|
+
digest = layer.get("digest")
|
|
426
|
+
if digest:
|
|
427
|
+
blob_url = f"https://{DEFAULT_REGISTRY}/v2/{namespace}/{model}/blobs/{digest}"
|
|
428
|
+
out_name = digest.replace(":", "-")
|
|
429
|
+
print(f" {Colors.DIM}curl -L \"{blob_url}\" -o \"{out_name}\"{Colors.ENDC}")
|
|
430
|
+
|
|
431
|
+
print(f"\n {Colors.OKGREEN}Tip:{Colors.ENDC} Run {Colors.BOLD}pullama pull {args.model}{Colors.ENDC} to download & install automatically.\n")
|
|
432
|
+
|
|
433
|
+
|
|
434
|
+
def cmd_pull(args):
|
|
435
|
+
namespace, model, tag = parse_model_name(args.model)
|
|
436
|
+
model_key = f"{namespace}/{model}:{tag}" if namespace != "library" else f"{model}:{tag}"
|
|
437
|
+
|
|
438
|
+
# Detect whether Ollama is installed
|
|
439
|
+
ollama_installed = shutil.which("ollama") is not None
|
|
440
|
+
explicit_path = getattr(args, 'modelsPath', None)
|
|
441
|
+
|
|
442
|
+
if ollama_installed or explicit_path:
|
|
443
|
+
# Mode A: Ollama is installed — download directly into its models folder
|
|
444
|
+
ollama_mode = True
|
|
445
|
+
models_path = get_models_path(explicit_path)
|
|
446
|
+
blobs_dir = os.path.join(models_path, "blobs")
|
|
447
|
+
|
|
448
|
+
# Check write permission before doing anything
|
|
449
|
+
os.makedirs(blobs_dir, exist_ok=True) if os.path.isdir(models_path) else None
|
|
450
|
+
test_path = blobs_dir if os.path.isdir(blobs_dir) else models_path
|
|
451
|
+
if not os.access(test_path, os.W_OK):
|
|
452
|
+
print_error(f"No write permission to: {models_path}")
|
|
453
|
+
print(f"\n Ollama's models folder requires elevated permissions.")
|
|
454
|
+
if platform.system().lower() == "windows":
|
|
455
|
+
print(f" Re-run this terminal as Administrator, then:")
|
|
456
|
+
print(f"\n {Colors.BOLD}pullama pull {args.model}{Colors.ENDC}\n")
|
|
457
|
+
else:
|
|
458
|
+
print(f" Re-run with sudo:\n")
|
|
459
|
+
print(f" {Colors.BOLD}sudo pullama pull {args.model}{Colors.ENDC}\n")
|
|
460
|
+
sys.exit(1)
|
|
461
|
+
else:
|
|
462
|
+
# Mode B: Ollama not installed — download to ~/pullama-models/model-tag/
|
|
463
|
+
ollama_mode = False
|
|
464
|
+
safe_name = model_key.replace(":", "-").replace("/", "_")
|
|
465
|
+
models_path = os.path.expanduser(f"~/pullama-models/{safe_name}")
|
|
466
|
+
blobs_dir = models_path
|
|
467
|
+
print_warning(f"Ollama not found — downloading to: {models_path}")
|
|
468
|
+
print(f" {Colors.DIM}Install Ollama from https://ollama.com then run pullama install.{Colors.ENDC}\n")
|
|
469
|
+
|
|
470
|
+
use_aria2 = check_aria2()
|
|
471
|
+
if not use_aria2:
|
|
472
|
+
print_aria2_hint()
|
|
473
|
+
print()
|
|
474
|
+
|
|
475
|
+
print(f"\n{Colors.BOLD}Pulling {Colors.OKCYAN}{args.model}{Colors.ENDC}{Colors.BOLD}...{Colors.ENDC}\n")
|
|
476
|
+
|
|
477
|
+
# Fetch manifest
|
|
478
|
+
print(f" Fetching manifest...", end="", flush=True)
|
|
479
|
+
data, raw_manifest, manifest_url = fetch_manifest(namespace, model, tag)
|
|
480
|
+
print(f"\r Fetching manifest...{' ' * 20} {Colors.OKGREEN}✔{Colors.ENDC}")
|
|
481
|
+
|
|
482
|
+
layers = data.get("layers", [])
|
|
483
|
+
config = data.get("config")
|
|
484
|
+
if config:
|
|
485
|
+
layers.append(config)
|
|
486
|
+
|
|
487
|
+
total_size = sum(l.get("size", 0) for l in layers if l.get("digest"))
|
|
488
|
+
blobs_count = len([l for l in layers if l.get("digest")])
|
|
489
|
+
|
|
490
|
+
print(f" {Colors.DIM}{blobs_count} files • {format_size(total_size)} total{Colors.ENDC}\n")
|
|
491
|
+
|
|
492
|
+
# Save state
|
|
493
|
+
state = load_state()
|
|
494
|
+
update_model_state(state, model_key,
|
|
495
|
+
namespace=namespace,
|
|
496
|
+
model=model,
|
|
497
|
+
tag=tag,
|
|
498
|
+
total_size=total_size,
|
|
499
|
+
manifest_url=manifest_url,
|
|
500
|
+
installed=False,
|
|
501
|
+
started_at=datetime.datetime.now().isoformat(),
|
|
502
|
+
blobs=[{"digest": l["digest"], "size": l.get("size", 0)} for l in layers if l.get("digest")]
|
|
503
|
+
)
|
|
504
|
+
save_state(state)
|
|
505
|
+
|
|
506
|
+
# Download blobs
|
|
507
|
+
os.makedirs(blobs_dir, exist_ok=True)
|
|
508
|
+
|
|
509
|
+
for i, layer in enumerate(layers, 1):
|
|
510
|
+
digest = layer.get("digest")
|
|
511
|
+
size = layer.get("size", 0)
|
|
512
|
+
if not digest:
|
|
513
|
+
continue
|
|
514
|
+
|
|
515
|
+
blob_url = f"https://{DEFAULT_REGISTRY}/v2/{namespace}/{model}/blobs/{digest}"
|
|
516
|
+
out_name = digest.replace(":", "-")
|
|
517
|
+
dest_path = os.path.join(blobs_dir, out_name)
|
|
518
|
+
label = f"[{i}/{blobs_count}] {out_name[:20]}..."
|
|
519
|
+
|
|
520
|
+
success = download_blob(blob_url, dest_path, size, label=label, use_aria2=use_aria2)
|
|
521
|
+
|
|
522
|
+
if not success:
|
|
523
|
+
print_error(f"Failed to download blob {digest}.")
|
|
524
|
+
print_info(f"Run the same command again to resume: pullama pull {args.model}")
|
|
525
|
+
sys.exit(1)
|
|
526
|
+
|
|
527
|
+
# Verify SHA256
|
|
528
|
+
print(f" {Colors.DIM}Verifying...{Colors.ENDC}", end="", flush=True)
|
|
529
|
+
if not verify_blob(dest_path, digest):
|
|
530
|
+
print(f"\r {Colors.FAIL}✖ Verification failed for {out_name[:30]}{Colors.ENDC}")
|
|
531
|
+
os.remove(dest_path)
|
|
532
|
+
print_error("Corrupted file removed. Run the command again to re-download.")
|
|
533
|
+
sys.exit(1)
|
|
534
|
+
print(f"\r{' ' * 40}\r", end="")
|
|
535
|
+
|
|
536
|
+
if ollama_mode:
|
|
537
|
+
# Write manifest into Ollama's folder (makes model visible to Ollama)
|
|
538
|
+
print(f"\n Installing into Ollama...", end="", flush=True)
|
|
539
|
+
manifest_dir = os.path.join(models_path, "manifests", DEFAULT_REGISTRY, namespace, model)
|
|
540
|
+
os.makedirs(manifest_dir, exist_ok=True)
|
|
541
|
+
manifest_dest = os.path.join(manifest_dir, tag)
|
|
542
|
+
with open(manifest_dest, "w", encoding="utf-8") as f:
|
|
543
|
+
f.write(raw_manifest)
|
|
544
|
+
print(f"\r Installing into Ollama...{' ' * 10} {Colors.OKGREEN}✔{Colors.ENDC}")
|
|
545
|
+
|
|
546
|
+
# Update state
|
|
547
|
+
state = load_state()
|
|
548
|
+
update_model_state(state, model_key, installed=True, models_path=models_path)
|
|
549
|
+
save_state(state)
|
|
550
|
+
|
|
551
|
+
# Verify Ollama can actually see the model
|
|
552
|
+
seen = verify_ollama_sees_model(model_key)
|
|
553
|
+
if seen:
|
|
554
|
+
print(f"\n{Colors.OKGREEN}{Colors.BOLD}✔ {args.model} is ready!{Colors.ENDC}")
|
|
555
|
+
print(f" {Colors.DIM}Installed to: {models_path}{Colors.ENDC}")
|
|
556
|
+
print(f" Run: {Colors.BOLD}ollama run {args.model}{Colors.ENDC}\n")
|
|
557
|
+
elif seen is False:
|
|
558
|
+
print(f"\n{Colors.WARNING}⚠ Installed to: {models_path}{Colors.ENDC}")
|
|
559
|
+
print(f" But Ollama can't see the model — it may use a different models directory.")
|
|
560
|
+
print(f" Find the correct path with:")
|
|
561
|
+
print(f" {Colors.BOLD}ls /usr/share/ollama/.ollama/models{Colors.ENDC} (common on Linux)")
|
|
562
|
+
print(f" Then re-run with:")
|
|
563
|
+
print(f" {Colors.BOLD}pullama pull {args.model} --modelsPath <correct-path>{Colors.ENDC}\n")
|
|
564
|
+
else:
|
|
565
|
+
print(f"\n{Colors.OKGREEN}{Colors.BOLD}✔ {args.model} is ready!{Colors.ENDC}")
|
|
566
|
+
print(f" {Colors.DIM}Installed to: {models_path}{Colors.ENDC}")
|
|
567
|
+
print(f" Run: {Colors.BOLD}ollama run {args.model}{Colors.ENDC}\n")
|
|
568
|
+
else:
|
|
569
|
+
# No Ollama — write manifest into the local folder alongside blobs
|
|
570
|
+
manifest_dest = os.path.join(models_path, "manifest")
|
|
571
|
+
with open(manifest_dest, "w", encoding="utf-8") as f:
|
|
572
|
+
f.write(raw_manifest)
|
|
573
|
+
|
|
574
|
+
state = load_state()
|
|
575
|
+
update_model_state(state, model_key, installed=False, models_path=models_path)
|
|
576
|
+
save_state(state)
|
|
577
|
+
|
|
578
|
+
print(f"\n{Colors.OKGREEN}{Colors.BOLD}✔ {args.model} downloaded!{Colors.ENDC}")
|
|
579
|
+
print(f" {Colors.DIM}Saved to: {models_path}{Colors.ENDC}")
|
|
580
|
+
print(f"\n Once Ollama is installed, run:")
|
|
581
|
+
print(f" {Colors.BOLD}pullama install --model {args.model} --blobsPath {models_path}{Colors.ENDC}")
|
|
582
|
+
print(f"\n {Colors.DIM}Or copy that folder to another machine and run the same command.{Colors.ENDC}\n")
|
|
583
|
+
|
|
584
|
+
|
|
585
|
+
def cmd_list(args):
|
|
586
|
+
state = load_state()
|
|
587
|
+
|
|
588
|
+
if not state:
|
|
589
|
+
print_info("No models tracked yet. Run: pullama pull <model>")
|
|
590
|
+
return
|
|
591
|
+
|
|
592
|
+
models_path = get_default_models_path()
|
|
593
|
+
|
|
594
|
+
col_model = 26
|
|
595
|
+
col_size = 10
|
|
596
|
+
col_dl = 18
|
|
597
|
+
col_installed = 12
|
|
598
|
+
|
|
599
|
+
header = (
|
|
600
|
+
f" {'Model':<{col_model}}"
|
|
601
|
+
f"{'Size':>{col_size}}"
|
|
602
|
+
f" {'Downloaded':<{col_dl}}"
|
|
603
|
+
f"{'Installed':<{col_installed}}"
|
|
604
|
+
)
|
|
605
|
+
sep = " " + "─" * (col_model + col_size + col_dl + col_installed + 2)
|
|
606
|
+
|
|
607
|
+
print(f"\n{Colors.BOLD}{header}{Colors.ENDC}")
|
|
608
|
+
print(sep)
|
|
609
|
+
|
|
610
|
+
for model_key, info in sorted(state.items()):
|
|
611
|
+
total_size = info.get("total_size", 0)
|
|
612
|
+
blobs = info.get("blobs", [])
|
|
613
|
+
namespace = info.get("namespace", "library")
|
|
614
|
+
model_name = info.get("model", "")
|
|
615
|
+
tag = info.get("tag", "latest")
|
|
616
|
+
|
|
617
|
+
# Calculate actual downloaded bytes from disk
|
|
618
|
+
blobs_dir = os.path.join(models_path, "blobs")
|
|
619
|
+
downloaded = 0
|
|
620
|
+
for blob in blobs:
|
|
621
|
+
digest = blob.get("digest", "")
|
|
622
|
+
expected = blob.get("size", 0)
|
|
623
|
+
out_name = digest.replace(":", "-")
|
|
624
|
+
fpath = os.path.join(blobs_dir, out_name)
|
|
625
|
+
if os.path.exists(fpath):
|
|
626
|
+
downloaded += min(os.path.getsize(fpath), expected)
|
|
627
|
+
|
|
628
|
+
# Check if actually installed (manifest exists on disk)
|
|
629
|
+
manifest_path = os.path.join(models_path, "manifests", DEFAULT_REGISTRY, namespace, model_name, tag)
|
|
630
|
+
is_installed = os.path.exists(manifest_path)
|
|
631
|
+
|
|
632
|
+
# Format columns
|
|
633
|
+
if total_size > 0:
|
|
634
|
+
pct = downloaded / total_size * 100
|
|
635
|
+
if downloaded >= total_size:
|
|
636
|
+
dl_str = f"{format_size(downloaded)} {Colors.OKGREEN}✔{Colors.ENDC}"
|
|
637
|
+
else:
|
|
638
|
+
dl_str = f"{format_size(downloaded)} {Colors.WARNING}{pct:.0f}%{Colors.ENDC}"
|
|
639
|
+
else:
|
|
640
|
+
dl_str = Colors.DIM + "?" + Colors.ENDC
|
|
641
|
+
|
|
642
|
+
inst_str = f"{Colors.OKGREEN}✔ yes{Colors.ENDC}" if is_installed else f"{Colors.FAIL}✗ no{Colors.ENDC}"
|
|
643
|
+
size_str = format_size(total_size)
|
|
644
|
+
|
|
645
|
+
print(
|
|
646
|
+
f" {Colors.BOLD}{model_key:<{col_model}}{Colors.ENDC}"
|
|
647
|
+
f"{size_str:>{col_size}}"
|
|
648
|
+
f" {dl_str:<{col_dl + 10}}" # extra for ANSI codes
|
|
649
|
+
f"{inst_str}"
|
|
650
|
+
)
|
|
651
|
+
|
|
652
|
+
print()
|
|
653
|
+
incomplete = [k for k, v in state.items() if not v.get("installed")]
|
|
654
|
+
if incomplete:
|
|
655
|
+
print(f" {Colors.DIM}Tip: Resume incomplete downloads with: pullama pull <model>{Colors.ENDC}\n")
|
|
656
|
+
|
|
657
|
+
|
|
658
|
+
def cmd_install(args):
|
|
659
|
+
namespace, model, tag = parse_model_name(args.model)
|
|
660
|
+
model_key = f"{namespace}/{model}:{tag}" if namespace != "library" else f"{model}:{tag}"
|
|
661
|
+
blobs_path = os.path.expanduser(args.blobsPath)
|
|
662
|
+
|
|
663
|
+
print_info(f"Installing model: {Colors.BOLD}{args.model}{Colors.ENDC}")
|
|
664
|
+
|
|
665
|
+
if not os.path.exists(blobs_path):
|
|
666
|
+
print_error(f"Path '{blobs_path}' does not exist.")
|
|
667
|
+
sys.exit(1)
|
|
668
|
+
|
|
669
|
+
manifest_source = os.path.join(blobs_path, "manifest")
|
|
670
|
+
if not os.path.isfile(manifest_source):
|
|
671
|
+
print_error(f"No 'manifest' file found in '{blobs_path}'.")
|
|
672
|
+
sys.exit(1)
|
|
673
|
+
|
|
674
|
+
models_path = get_models_path(getattr(args, 'modelsPath', None))
|
|
675
|
+
|
|
676
|
+
# Check if we need elevated permissions (system path)
|
|
677
|
+
if not models_path.startswith(os.path.expanduser("~")):
|
|
678
|
+
system = platform.system().lower()
|
|
679
|
+
if system != "windows" and os.geteuid() != 0:
|
|
680
|
+
print_error("System models path requires sudo. Re-run with: sudo pullama install ...")
|
|
681
|
+
sys.exit(1)
|
|
682
|
+
|
|
683
|
+
# Copy manifest
|
|
684
|
+
manifest_dest_dir = os.path.join(models_path, "manifests", DEFAULT_REGISTRY, namespace, model)
|
|
685
|
+
os.makedirs(manifest_dest_dir, exist_ok=True)
|
|
686
|
+
manifest_dest = os.path.join(manifest_dest_dir, tag)
|
|
687
|
+
|
|
688
|
+
if os.path.exists(manifest_dest):
|
|
689
|
+
print_warning("Model already installed. Overwrite? (Y/n) ", end="")
|
|
690
|
+
choice = input("").strip().upper()
|
|
691
|
+
if choice not in ("Y", ""):
|
|
692
|
+
print_error("Installation aborted.")
|
|
693
|
+
sys.exit(1)
|
|
694
|
+
|
|
695
|
+
shutil.copy2(manifest_source, manifest_dest)
|
|
696
|
+
print_success("Manifest copied.")
|
|
697
|
+
|
|
698
|
+
# Copy blobs
|
|
699
|
+
blobs_dest_dir = os.path.join(models_path, "blobs")
|
|
700
|
+
os.makedirs(blobs_dest_dir, exist_ok=True)
|
|
701
|
+
|
|
702
|
+
print_info("Copying blobs (this may take a while)...")
|
|
703
|
+
for filename in os.listdir(blobs_path):
|
|
704
|
+
if filename == "manifest" or os.path.isdir(os.path.join(blobs_path, filename)):
|
|
705
|
+
continue
|
|
706
|
+
|
|
707
|
+
file_source = os.path.join(blobs_path, filename)
|
|
708
|
+
|
|
709
|
+
if "sha256" not in filename:
|
|
710
|
+
print_info(f"Computing SHA256 for {filename}...")
|
|
711
|
+
hashed_name = "sha256-" + get_file_hash(file_source)
|
|
712
|
+
elif filename.startswith("sha256-"):
|
|
713
|
+
hashed_name = filename
|
|
714
|
+
elif filename.startswith("sha256:"):
|
|
715
|
+
hashed_name = filename.replace("sha256:", "sha256-", 1)
|
|
716
|
+
else:
|
|
717
|
+
hashed_name = filename
|
|
718
|
+
|
|
719
|
+
file_dest = os.path.join(blobs_dest_dir, hashed_name)
|
|
720
|
+
print_info(f" {filename} → {hashed_name}")
|
|
721
|
+
shutil.copy2(file_source, file_dest)
|
|
722
|
+
|
|
723
|
+
# Update state
|
|
724
|
+
state = load_state()
|
|
725
|
+
update_model_state(state, model_key, installed=True,
|
|
726
|
+
namespace=namespace, model=model, tag=tag)
|
|
727
|
+
save_state(state)
|
|
728
|
+
|
|
729
|
+
print_success(f"Model installed successfully!")
|
|
730
|
+
print_info(f"Run: {Colors.BOLD}ollama run {args.model}{Colors.ENDC}")
|
|
731
|
+
|
|
732
|
+
|
|
733
|
+
# ─── Help Banner ──────────────────────────────────────────────────────────────
|
|
734
|
+
|
|
735
|
+
def print_main_help():
|
|
736
|
+
print(f"""
|
|
737
|
+
{Colors.BOLD}{Colors.HEADER}╔══════════════════════════════════════╗
|
|
738
|
+
║ Pullama 🦙 v{VERSION} ║
|
|
739
|
+
║ Resumable Ollama Model Downloader ║
|
|
740
|
+
╚══════════════════════════════════════╝{Colors.ENDC}
|
|
741
|
+
|
|
742
|
+
{Colors.DIM}Fix for: ollama pull TLS handshake timeout, disconnects, slow connections{Colors.ENDC}
|
|
743
|
+
|
|
744
|
+
{Colors.BOLD}USAGE{Colors.ENDC}
|
|
745
|
+
pullama <command> [options]
|
|
746
|
+
|
|
747
|
+
{Colors.BOLD}COMMANDS{Colors.ENDC}
|
|
748
|
+
{Colors.OKGREEN}pull <model>{Colors.ENDC} Download & install a model (resume supported)
|
|
749
|
+
{Colors.OKGREEN}get <model>{Colors.ENDC} Print direct download URLs only
|
|
750
|
+
{Colors.OKGREEN}install{Colors.ENDC} [options] Install from manually downloaded files
|
|
751
|
+
{Colors.OKGREEN}list{Colors.ENDC} Show all tracked models and their status
|
|
752
|
+
|
|
753
|
+
{Colors.BOLD}QUICK START{Colors.ENDC}
|
|
754
|
+
{Colors.OKCYAN}pullama pull tinyllama:latest{Colors.ENDC}
|
|
755
|
+
{Colors.OKCYAN}pullama pull gemma2:2b{Colors.ENDC}
|
|
756
|
+
{Colors.OKCYAN}pullama pull deepseek-r1:7b{Colors.ENDC}
|
|
757
|
+
|
|
758
|
+
{Colors.BOLD}RESUME{Colors.ENDC}
|
|
759
|
+
Just run the same pull command again — it resumes automatically.
|
|
760
|
+
|
|
761
|
+
{Colors.BOLD}MODEL NAME FORMAT{Colors.ENDC}
|
|
762
|
+
<model>:<tag> Official → gemma2:2b
|
|
763
|
+
<namespace>/<model>:<tag> Community → huihui_ai/deepseek-r1:8b
|
|
764
|
+
""")
|
|
765
|
+
|
|
766
|
+
|
|
767
|
+
# ─── Main ─────────────────────────────────────────────────────────────────────
|
|
768
|
+
|
|
769
|
+
def main():
|
|
770
|
+
if len(sys.argv) == 1 or sys.argv[1] in ("-h", "--help"):
|
|
771
|
+
print_main_help()
|
|
772
|
+
sys.exit(0)
|
|
773
|
+
|
|
774
|
+
parser = argparse.ArgumentParser(
|
|
775
|
+
prog="pullama",
|
|
776
|
+
description="Pullama — Resumable Ollama Model Downloader",
|
|
777
|
+
add_help=False,
|
|
778
|
+
formatter_class=argparse.RawDescriptionHelpFormatter
|
|
779
|
+
)
|
|
780
|
+
subparsers = parser.add_subparsers(dest="command")
|
|
781
|
+
subparsers.required = True
|
|
782
|
+
|
|
783
|
+
# pull
|
|
784
|
+
parser_pull = subparsers.add_parser("pull", help="Download & install a model")
|
|
785
|
+
parser_pull.add_argument("model", help="Model name, e.g. tinyllama:latest")
|
|
786
|
+
parser_pull.add_argument("--modelsPath", default=None, metavar="PATH",
|
|
787
|
+
help="Override Ollama models directory")
|
|
788
|
+
parser_pull.set_defaults(func=cmd_pull)
|
|
789
|
+
|
|
790
|
+
# get
|
|
791
|
+
parser_get = subparsers.add_parser("get", help="Print direct download URLs")
|
|
792
|
+
parser_get.add_argument("model", help="Model name, e.g. gemma2:2b")
|
|
793
|
+
parser_get.set_defaults(func=cmd_get)
|
|
794
|
+
|
|
795
|
+
# install
|
|
796
|
+
parser_install = subparsers.add_parser("install", help="Install from downloaded files")
|
|
797
|
+
parser_install.add_argument("--model", required=True, metavar="MODEL")
|
|
798
|
+
parser_install.add_argument("--blobsPath", required=True, metavar="PATH",
|
|
799
|
+
help="Folder with manifest + blob files")
|
|
800
|
+
parser_install.add_argument("--modelsPath", default=None, metavar="PATH",
|
|
801
|
+
help="Override Ollama models directory")
|
|
802
|
+
parser_install.set_defaults(func=cmd_install)
|
|
803
|
+
|
|
804
|
+
# list
|
|
805
|
+
parser_list = subparsers.add_parser("list", help="Show all tracked models")
|
|
806
|
+
parser_list.set_defaults(func=cmd_list)
|
|
807
|
+
|
|
808
|
+
args = parser.parse_args()
|
|
809
|
+
try:
|
|
810
|
+
args.func(args)
|
|
811
|
+
except KeyboardInterrupt:
|
|
812
|
+
print(f"\n\n{Colors.WARNING}⏸ Download paused.{Colors.ENDC}")
|
|
813
|
+
print(f" Run the same command again to resume.\n")
|
|
814
|
+
sys.exit(0)
|
|
815
|
+
|
|
816
|
+
|
|
817
|
+
if __name__ == "__main__":
|
|
818
|
+
main()
|
|
@@ -0,0 +1,210 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: pullama-cli
|
|
3
|
+
Version: 1.0.0
|
|
4
|
+
Summary: Fix ollama pull TLS timeout and disconnects — resumable Ollama model downloader for slow connections
|
|
5
|
+
Project-URL: Homepage, https://github.com/Steve-sy/pullama
|
|
6
|
+
Project-URL: Issues, https://github.com/Steve-sy/pullama/issues
|
|
7
|
+
License: MIT
|
|
8
|
+
License-File: LICENSE
|
|
9
|
+
Keywords: ai,download,llm,model,offline,ollama,ollama-download-manager,ollama-offline,ollama-pull,ollama-pull-alternative,resume,slow-internet,tls-handshake-timeout,tls-timeout
|
|
10
|
+
Classifier: Development Status :: 4 - Beta
|
|
11
|
+
Classifier: Environment :: Console
|
|
12
|
+
Classifier: Intended Audience :: Developers
|
|
13
|
+
Classifier: Intended Audience :: Science/Research
|
|
14
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
15
|
+
Classifier: Operating System :: OS Independent
|
|
16
|
+
Classifier: Programming Language :: Python :: 3
|
|
17
|
+
Classifier: Programming Language :: Python :: 3.8
|
|
18
|
+
Classifier: Programming Language :: Python :: 3.9
|
|
19
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
20
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
21
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
22
|
+
Classifier: Topic :: Utilities
|
|
23
|
+
Requires-Python: >=3.8
|
|
24
|
+
Description-Content-Type: text/markdown
|
|
25
|
+
|
|
26
|
+
# Pullama 🦙 Ollama Model Downloader & Installer
|
|
27
|
+
|
|
28
|
+
**The ollama pull alternative built for slow, unstable, and limited internet connections.**
|
|
29
|
+
|
|
30
|
+
If `ollama pull` keeps restarting, times out, or disconnects mid-download — Pullama fixes that.
|
|
31
|
+
It resumes interrupted downloads automatically, supports parallel connections via aria2, and installs models directly into Ollama when done, Works on slow connections, unstable Wi-Fi, mobile data, and VPNs.
|
|
32
|
+
|
|
33
|
+
```
|
|
34
|
+
# Common Ollama pull errors Pullama solves:
|
|
35
|
+
net/http: TLS handshake timeout
|
|
36
|
+
context deadline exceeded
|
|
37
|
+
download interrupted, starting from scratch
|
|
38
|
+
```
|
|
39
|
+
|
|
40
|
+

|
|
41
|
+
|
|
42
|
+
---
|
|
43
|
+
|
|
44
|
+
## Install
|
|
45
|
+
|
|
46
|
+
```bash
|
|
47
|
+
pip install pullama-cli
|
|
48
|
+
```
|
|
49
|
+
|
|
50
|
+
**For faster, more reliable downloads — install aria2 (optional but recommended):**
|
|
51
|
+
|
|
52
|
+
```bash
|
|
53
|
+
# Linux (Debian/Ubuntu)
|
|
54
|
+
sudo apt install aria2
|
|
55
|
+
|
|
56
|
+
# macOS
|
|
57
|
+
brew install aria2
|
|
58
|
+
|
|
59
|
+
# Windows
|
|
60
|
+
winget install aria2
|
|
61
|
+
```
|
|
62
|
+
|
|
63
|
+
With aria2, Pullama downloads Ollama models using multiple parallel connections — significantly faster and more resilient on slow or throttled connections.
|
|
64
|
+
|
|
65
|
+
---
|
|
66
|
+
|
|
67
|
+
## Quick Start
|
|
68
|
+
|
|
69
|
+
### Download & install ollama models in one command
|
|
70
|
+
```bash
|
|
71
|
+
pullama pull tinyllama:latest
|
|
72
|
+
pullama pull gemma2:2b
|
|
73
|
+
pullama pull deepseek-r1:7b
|
|
74
|
+
```
|
|
75
|
+
|
|
76
|
+
Pullama downloads the model and installs it into Ollama automatically. Then:
|
|
77
|
+
|
|
78
|
+
```bash
|
|
79
|
+
ollama run tinyllama:latest
|
|
80
|
+
```
|
|
81
|
+
|
|
82
|
+
---
|
|
83
|
+
|
|
84
|
+
## Resume interrupted Ollama model downloads
|
|
85
|
+
|
|
86
|
+
If your connection drops, just run the same command again — Pullama resumes from where it stopped:
|
|
87
|
+
|
|
88
|
+
```bash
|
|
89
|
+
pullama pull gemma2:2b
|
|
90
|
+
# ... connection drops at 60% ...
|
|
91
|
+
|
|
92
|
+
pullama pull gemma2:2b
|
|
93
|
+
# ℹ Resuming from 1.1 GB / 1.7 GB
|
|
94
|
+
```
|
|
95
|
+
|
|
96
|
+
No flags, no setup. Works after power cuts, network switches, sleep, or days later.
|
|
97
|
+
This is the core feature `ollama pull` is missing — once it disconnects, you lose everything.
|
|
98
|
+
|
|
99
|
+
---
|
|
100
|
+
|
|
101
|
+
## Pullama vs ollama pull
|
|
102
|
+
|
|
103
|
+
| Feature | `ollama pull` | `pullama` |
|
|
104
|
+
|---|---|---|
|
|
105
|
+
| Resume interrupted download | ❌ | ✅ |
|
|
106
|
+
| Parallel chunk downloads (aria2) | ❌ | ✅ |
|
|
107
|
+
| Offline / manual install | ❌ | ✅ |
|
|
108
|
+
| Download without Ollama installed | ❌ | ✅ |
|
|
109
|
+
| Export ollama model to another machine | ❌ | ✅ |
|
|
110
|
+
| Track download progress across sessions | ❌ | ✅ |
|
|
111
|
+
| Works on slow / unstable connections | ⚠️ unreliable | ✅ |
|
|
112
|
+
| SHA256 verification | ❌ | ✅ |
|
|
113
|
+
|
|
114
|
+
---
|
|
115
|
+
|
|
116
|
+
## Commands
|
|
117
|
+
|
|
118
|
+
### Track your downloads
|
|
119
|
+
|
|
120
|
+
```bash
|
|
121
|
+
pullama list
|
|
122
|
+
```
|
|
123
|
+
|
|
124
|
+
```
|
|
125
|
+
Model Size Downloaded Installed
|
|
126
|
+
────────────────────────────────────────────────────────────
|
|
127
|
+
tinyllama:latest 608 MB 608/608 MB ✔ ✔ yes
|
|
128
|
+
gemma2:2b 1.7 GB 856 MB/1.7 GB ✗ no
|
|
129
|
+
```
|
|
130
|
+
|
|
131
|
+
### Get direct download URLs
|
|
132
|
+
|
|
133
|
+
For users who prefer to download Ollama models manually with wget, curl, IDM, or any other download manager:
|
|
134
|
+
|
|
135
|
+
```bash
|
|
136
|
+
pullama get gemma2:2b
|
|
137
|
+
```
|
|
138
|
+
|
|
139
|
+
Prints direct blob URLs and ready-to-use curl commands — useful for downloading ollama models on a separate machine or through a proxy.
|
|
140
|
+
|
|
141
|
+
### Manual Ollama model installation
|
|
142
|
+
|
|
143
|
+
Already downloaded the files? Install them into Ollama without re-downloading:
|
|
144
|
+
|
|
145
|
+
```bash
|
|
146
|
+
pullama install --model gemma2:2b --blobsPath ./downloads
|
|
147
|
+
```
|
|
148
|
+
|
|
149
|
+
---
|
|
150
|
+
|
|
151
|
+
## Download Ollama models without ollama (offline install)
|
|
152
|
+
|
|
153
|
+
Pullama works even if Ollama isn't installed yet. It saves the model files locally so you can install them later — or copy them to another machine or a friend with no internet:
|
|
154
|
+
|
|
155
|
+
```bash
|
|
156
|
+
pullama pull gemma2:2b
|
|
157
|
+
# ⚠ Ollama not found — downloading to: ~/pullama-models/gemma2-2b/
|
|
158
|
+
# ✔ gemma2:2b downloaded!
|
|
159
|
+
# Saved to: ~/pullama-models/gemma2-2b/
|
|
160
|
+
#
|
|
161
|
+
# Once Ollama is installed, run:
|
|
162
|
+
# pullama install --model gemma2:2b --blobsPath ~/pullama-models/gemma2-2b/
|
|
163
|
+
```
|
|
164
|
+
|
|
165
|
+
Copy the folder to a USB drive, give it to a friend, install on an air-gapped machine — it just works.
|
|
166
|
+
|
|
167
|
+
---
|
|
168
|
+
|
|
169
|
+
## How it works
|
|
170
|
+
|
|
171
|
+
Ollama stores models as SHA256-named blob files. Pullama downloads each blob directly into Ollama's models directory (`~/.ollama/models` or `/usr/share/ollama/.ollama/models` for system installs) and writes the manifest **last** — so Ollama only sees the model once everything is verified complete.
|
|
172
|
+
|
|
173
|
+
If a download is interrupted, the partial blob stays on disk. On the next run, Pullama checks the existing file size and sends an HTTP `Range: bytes=X-` request to continue exactly where it stopped — no re-downloading from zero.
|
|
174
|
+
|
|
175
|
+
**With aria2:** splits each file into 4 parallel chunks. Bypasses per-connection throttling and dramatically improves speed on slow connections.
|
|
176
|
+
|
|
177
|
+
**Without aria2:** uses Python's built-in HTTP client with the same resume logic.
|
|
178
|
+
|
|
179
|
+
---
|
|
180
|
+
|
|
181
|
+
## Model name format
|
|
182
|
+
|
|
183
|
+
```
|
|
184
|
+
tinyllama:latest # official model, explicit tag
|
|
185
|
+
gemma2:2b # official model
|
|
186
|
+
deepseek-r1:7b # official model
|
|
187
|
+
huihui_ai/deepseek-r1:8b # community model (namespace/model:tag)
|
|
188
|
+
```
|
|
189
|
+
|
|
190
|
+
---
|
|
191
|
+
|
|
192
|
+
## Platform support
|
|
193
|
+
|
|
194
|
+
| Platform | Supported |
|
|
195
|
+
|---|---|
|
|
196
|
+
| Linux | ✔ |
|
|
197
|
+
| macOS | ✔ |
|
|
198
|
+
| Windows | ✔ |
|
|
199
|
+
|
|
200
|
+
---
|
|
201
|
+
|
|
202
|
+
## License
|
|
203
|
+
|
|
204
|
+
MIT
|
|
205
|
+
|
|
206
|
+
---
|
|
207
|
+
|
|
208
|
+
## Credits
|
|
209
|
+
|
|
210
|
+
Pullama started as a fork of [oget](https://github.com/fr0stb1rd/oget) by [fr0stb1rd](https://github.com/fr0stb1rd). The original idea of fetching direct download URLs from the Ollama registry belongs to them. Pullama extends it with resumable downloads, automatic Ollama install, aria2 support, state tracking, smart path detection, and a fully rewritten CLI built for slow and unstable connections.
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
pullama/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
2
|
+
pullama/__main__.py,sha256=J82EcleKB8wM7fwnpnJFwhKx48kf4zwOGe1nCrV3hOY,31829
|
|
3
|
+
pullama_cli-1.0.0.dist-info/METADATA,sha256=gJCJWGIm_bTiPvaVoBUNRLEXhbWmvYI70mxo6YT9uZc,6729
|
|
4
|
+
pullama_cli-1.0.0.dist-info/WHEEL,sha256=QccIxa26bgl1E6uMy58deGWi-0aeIkkangHcxk2kWfw,87
|
|
5
|
+
pullama_cli-1.0.0.dist-info/entry_points.txt,sha256=WB3-UthotUDapzrKY4xaI6Iy8zWCS898QEgWst-WeOs,50
|
|
6
|
+
pullama_cli-1.0.0.dist-info/licenses/LICENSE,sha256=yaDSr7vwcYVrblvoJOyA74RW803Y1kIE6ueiRT9XnSE,1120
|
|
7
|
+
pullama_cli-1.0.0.dist-info/RECORD,,
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2026 fr0stb1rd (original oget)
|
|
4
|
+
Copyright (c) 2026 Steve-sy (pullama)
|
|
5
|
+
|
|
6
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
7
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
8
|
+
in the Software without restriction, including without limitation the rights
|
|
9
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
10
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
11
|
+
furnished to do so, subject to the following conditions:
|
|
12
|
+
|
|
13
|
+
The above copyright notice and this permission notice shall be included in all
|
|
14
|
+
copies or substantial portions of the Software.
|
|
15
|
+
|
|
16
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
17
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
18
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
19
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
20
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
21
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
22
|
+
SOFTWARE.
|