sideloader 2.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- sideloader/__init__.py +0 -0
- sideloader/cli.py +956 -0
- sideloader/jsonbin_connector.py +345 -0
- sideloader/scripts/cleanup_pypi.py +352 -0
- sideloader/server.py +379 -0
- sideloader-2.0.0.dist-info/METADATA +140 -0
- sideloader-2.0.0.dist-info/RECORD +9 -0
- sideloader-2.0.0.dist-info/WHEEL +4 -0
- sideloader-2.0.0.dist-info/entry_points.txt +4 -0
sideloader/cli.py
ADDED
|
@@ -0,0 +1,956 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
Sideload CLI Client
|
|
4
|
+
A beautiful command-line interface for downloading files via the Sideload service.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import os
|
|
8
|
+
import sys
|
|
9
|
+
import time
|
|
10
|
+
import subprocess
|
|
11
|
+
import tempfile
|
|
12
|
+
import argparse
|
|
13
|
+
from pathlib import Path
|
|
14
|
+
from typing import List, Dict
|
|
15
|
+
|
|
16
|
+
from rich.console import Console
|
|
17
|
+
from rich.progress import (
|
|
18
|
+
Progress,
|
|
19
|
+
SpinnerColumn,
|
|
20
|
+
TextColumn,
|
|
21
|
+
BarColumn,
|
|
22
|
+
TaskProgressColumn,
|
|
23
|
+
)
|
|
24
|
+
from rich.panel import Panel
|
|
25
|
+
from rich.table import Table
|
|
26
|
+
from rich.text import Text
|
|
27
|
+
from rich.align import Align
|
|
28
|
+
from rich.rule import Rule
|
|
29
|
+
|
|
30
|
+
from sideloader.jsonbin_connector import JSONBinConnector, SideloadBinManager
|
|
31
|
+
|
|
32
|
+
console = Console()
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
class SideloadClient:
|
|
36
|
+
def __init__(
|
|
37
|
+
self,
|
|
38
|
+
jsonbin_token: str,
|
|
39
|
+
collection_id: str,
|
|
40
|
+
verify_ssl: bool = True,
|
|
41
|
+
key_type: str = "master",
|
|
42
|
+
hookdeck_source_id: str | None = None,
|
|
43
|
+
hookdeck_api_key: str | None = None,
|
|
44
|
+
):
|
|
45
|
+
self.collection_id = collection_id
|
|
46
|
+
self.connector = JSONBinConnector(
|
|
47
|
+
jsonbin_token, verify_ssl=verify_ssl, key_type=key_type
|
|
48
|
+
)
|
|
49
|
+
self.manager = SideloadBinManager(self.connector)
|
|
50
|
+
self.hookdeck_source_id = hookdeck_source_id
|
|
51
|
+
self.hookdeck_api_key = hookdeck_api_key
|
|
52
|
+
|
|
53
|
+
def __enter__(self):
|
|
54
|
+
return self
|
|
55
|
+
|
|
56
|
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
57
|
+
self.connector.close()
|
|
58
|
+
|
|
59
|
+
def notify_hookdeck(self, bin_id: str):
|
|
60
|
+
"""Send an event to Hookdeck via the Publish API to trigger server-side processing"""
|
|
61
|
+
if not self.hookdeck_source_id or not self.hookdeck_api_key:
|
|
62
|
+
return
|
|
63
|
+
import httpx
|
|
64
|
+
|
|
65
|
+
try:
|
|
66
|
+
response = httpx.post(
|
|
67
|
+
"https://hkdk.events/v1/publish",
|
|
68
|
+
headers={"x-hookdeck-source-id": self.hookdeck_source_id},
|
|
69
|
+
auth=(self.hookdeck_api_key, ""),
|
|
70
|
+
json={"request_id": bin_id},
|
|
71
|
+
)
|
|
72
|
+
response.raise_for_status()
|
|
73
|
+
console.print(
|
|
74
|
+
f"š” Hookdeck event sent for request [bold cyan]{bin_id}[/bold cyan]"
|
|
75
|
+
)
|
|
76
|
+
except Exception as e:
|
|
77
|
+
console.print(f"ā ļø Failed to send Hookdeck event: {e}", style="yellow")
|
|
78
|
+
|
|
79
|
+
def create_request(self, url: str) -> str:
|
|
80
|
+
"""Create a new sideload request and return the bin ID"""
|
|
81
|
+
with console.status("[bold green]Creating sideload request..."):
|
|
82
|
+
bin_id = self.manager.create_sideload_request(url, self.collection_id)
|
|
83
|
+
|
|
84
|
+
console.print(f"ā
Created sideload request: [bold cyan]{bin_id}[/bold cyan]")
|
|
85
|
+
self.notify_hookdeck(bin_id)
|
|
86
|
+
return bin_id
|
|
87
|
+
|
|
88
|
+
def monitor_request(self, bin_id: str) -> Dict:
|
|
89
|
+
"""Monitor the sideload request progress"""
|
|
90
|
+
|
|
91
|
+
with Progress(
|
|
92
|
+
SpinnerColumn(),
|
|
93
|
+
TextColumn("[progress.description]{task.description}"),
|
|
94
|
+
BarColumn(),
|
|
95
|
+
TaskProgressColumn(),
|
|
96
|
+
console=console,
|
|
97
|
+
transient=True,
|
|
98
|
+
) as progress:
|
|
99
|
+
download_task = progress.add_task("Monitoring request...", total=100)
|
|
100
|
+
|
|
101
|
+
while True:
|
|
102
|
+
try:
|
|
103
|
+
data = self.manager.get_sideload_data(bin_id)
|
|
104
|
+
|
|
105
|
+
status = data.get("status", "UNKNOWN")
|
|
106
|
+
current_progress = data.get("progress", 0)
|
|
107
|
+
|
|
108
|
+
if status == "DOWNLOADING":
|
|
109
|
+
progress.update(
|
|
110
|
+
download_task,
|
|
111
|
+
description=f"š„ Downloading... ({current_progress}%)",
|
|
112
|
+
completed=current_progress,
|
|
113
|
+
)
|
|
114
|
+
elif status == "BUILDING":
|
|
115
|
+
progress.update(
|
|
116
|
+
download_task,
|
|
117
|
+
description="šØ Building packages...",
|
|
118
|
+
completed=90,
|
|
119
|
+
)
|
|
120
|
+
elif status == "UPLOADING":
|
|
121
|
+
current_part = data.get("current_part", 1)
|
|
122
|
+
total_parts = data.get("total_parts", 1)
|
|
123
|
+
progress.update(
|
|
124
|
+
download_task,
|
|
125
|
+
description=f"š¤ Uploading part {current_part}/{total_parts}...",
|
|
126
|
+
completed=95,
|
|
127
|
+
)
|
|
128
|
+
elif status == "UPLOADED":
|
|
129
|
+
progress.update(
|
|
130
|
+
download_task,
|
|
131
|
+
description="ā
Upload complete!",
|
|
132
|
+
completed=100,
|
|
133
|
+
)
|
|
134
|
+
break
|
|
135
|
+
elif status in ["FAILED", "REJECTED"]:
|
|
136
|
+
reason = data.get("reason", "Unknown error")
|
|
137
|
+
console.print(f"ā Request failed: {reason}", style="bold red")
|
|
138
|
+
return data
|
|
139
|
+
|
|
140
|
+
time.sleep(2)
|
|
141
|
+
|
|
142
|
+
except KeyboardInterrupt:
|
|
143
|
+
console.print("\nā ļø Monitoring interrupted by user", style="yellow")
|
|
144
|
+
console.print(
|
|
145
|
+
f"š” To resume this download, run: [bold cyan]sideload download --resume {bin_id}[/bold cyan]"
|
|
146
|
+
)
|
|
147
|
+
break
|
|
148
|
+
except Exception as e:
|
|
149
|
+
console.print(f"ā Error monitoring request: {e}", style="red")
|
|
150
|
+
break
|
|
151
|
+
|
|
152
|
+
# Get final data
|
|
153
|
+
return self.manager.get_sideload_data(bin_id)
|
|
154
|
+
|
|
155
|
+
def download_packages(
|
|
156
|
+
self, package_names: List[str], output_dir: Path, debug: bool = False
|
|
157
|
+
) -> List[Path]:
|
|
158
|
+
"""Download all packages to a temporary directory"""
|
|
159
|
+
downloaded_files = []
|
|
160
|
+
|
|
161
|
+
if debug:
|
|
162
|
+
console.print(f"[dim]Package names to download: {package_names}[/dim]")
|
|
163
|
+
|
|
164
|
+
with Progress(
|
|
165
|
+
SpinnerColumn(),
|
|
166
|
+
TextColumn("[progress.description]{task.description}"),
|
|
167
|
+
BarColumn(),
|
|
168
|
+
TaskProgressColumn(),
|
|
169
|
+
console=console,
|
|
170
|
+
disable=debug, # Disable progress bar in debug mode
|
|
171
|
+
) as progress:
|
|
172
|
+
download_task = progress.add_task(
|
|
173
|
+
"Downloading packages...", total=len(package_names)
|
|
174
|
+
)
|
|
175
|
+
|
|
176
|
+
for i, package_name in enumerate(package_names):
|
|
177
|
+
progress.update(
|
|
178
|
+
download_task,
|
|
179
|
+
description=f"š¦ Downloading {package_name}...",
|
|
180
|
+
completed=i,
|
|
181
|
+
)
|
|
182
|
+
|
|
183
|
+
# Download using pip to temporary directory
|
|
184
|
+
try:
|
|
185
|
+
cmd = [
|
|
186
|
+
sys.executable,
|
|
187
|
+
"-m",
|
|
188
|
+
"pip",
|
|
189
|
+
"download",
|
|
190
|
+
"--no-deps",
|
|
191
|
+
"--dest",
|
|
192
|
+
str(output_dir),
|
|
193
|
+
package_name,
|
|
194
|
+
]
|
|
195
|
+
|
|
196
|
+
if debug:
|
|
197
|
+
console.print(f"[dim]Running: {' '.join(cmd)}[/dim]")
|
|
198
|
+
|
|
199
|
+
result = subprocess.run(
|
|
200
|
+
cmd,
|
|
201
|
+
capture_output=True,
|
|
202
|
+
text=True,
|
|
203
|
+
)
|
|
204
|
+
|
|
205
|
+
if result.returncode != 0:
|
|
206
|
+
console.print(
|
|
207
|
+
f"ā Failed to download {package_name}", style="red"
|
|
208
|
+
)
|
|
209
|
+
if result.stderr:
|
|
210
|
+
# Show last few lines of stderr for context
|
|
211
|
+
stderr_lines = result.stderr.strip().split("\n")
|
|
212
|
+
for line in stderr_lines[-3:]:
|
|
213
|
+
console.print(f" [dim]{line}[/dim]")
|
|
214
|
+
continue
|
|
215
|
+
|
|
216
|
+
# Find the downloaded wheel file
|
|
217
|
+
# Pip normalizes package names: lowercase, underscores collapsed, hyphens become underscores
|
|
218
|
+
def normalize_package_name(name: str) -> str:
|
|
219
|
+
"""Normalize package name to match pip's wheel naming convention"""
|
|
220
|
+
import re
|
|
221
|
+
|
|
222
|
+
# Replace runs of underscores/hyphens/dots with single underscore, then lowercase
|
|
223
|
+
return re.sub(r"[-_.]+", "_", name).lower()
|
|
224
|
+
|
|
225
|
+
normalized_name = normalize_package_name(package_name)
|
|
226
|
+
wheel_files = [
|
|
227
|
+
f
|
|
228
|
+
for f in output_dir.glob("*.whl")
|
|
229
|
+
if normalize_package_name(f.name.split("-")[0])
|
|
230
|
+
== normalized_name
|
|
231
|
+
]
|
|
232
|
+
if wheel_files:
|
|
233
|
+
downloaded_files.append(wheel_files[0])
|
|
234
|
+
if debug:
|
|
235
|
+
console.print(
|
|
236
|
+
f"[green]ā Downloaded: {wheel_files[0].name}[/green]"
|
|
237
|
+
)
|
|
238
|
+
else:
|
|
239
|
+
console.print(
|
|
240
|
+
f"[yellow]ā No wheel file found for {package_name}[/yellow]"
|
|
241
|
+
)
|
|
242
|
+
if debug:
|
|
243
|
+
all_wheels = list(output_dir.glob("*.whl"))
|
|
244
|
+
console.print(
|
|
245
|
+
f"[dim]Normalized package name: {normalized_name}[/dim]"
|
|
246
|
+
)
|
|
247
|
+
console.print(
|
|
248
|
+
f"[dim]Available wheels in dir: {[w.name for w in all_wheels]}[/dim]"
|
|
249
|
+
)
|
|
250
|
+
|
|
251
|
+
except subprocess.CalledProcessError as e:
|
|
252
|
+
error_msg = (
|
|
253
|
+
e.stderr if hasattr(e, "stderr") and e.stderr else str(e)
|
|
254
|
+
)
|
|
255
|
+
console.print(
|
|
256
|
+
f"ā Failed to download {package_name}: {error_msg}",
|
|
257
|
+
style="red",
|
|
258
|
+
)
|
|
259
|
+
continue
|
|
260
|
+
|
|
261
|
+
progress.update(
|
|
262
|
+
download_task,
|
|
263
|
+
description="ā
Download complete!",
|
|
264
|
+
completed=len(package_names),
|
|
265
|
+
)
|
|
266
|
+
|
|
267
|
+
return downloaded_files
|
|
268
|
+
|
|
269
|
+
def extract_and_reassemble(
|
|
270
|
+
self,
|
|
271
|
+
wheel_files: List[Path],
|
|
272
|
+
package_names: List[str],
|
|
273
|
+
original_filename: str,
|
|
274
|
+
output_path: Path,
|
|
275
|
+
debug: bool = False,
|
|
276
|
+
work_dir: Path = None,
|
|
277
|
+
):
|
|
278
|
+
"""Extract parts from wheel files and reassemble the original file"""
|
|
279
|
+
# Use provided work directory or create a temporary one
|
|
280
|
+
use_temp = work_dir is None
|
|
281
|
+
if use_temp:
|
|
282
|
+
temp_dir_obj = tempfile.TemporaryDirectory()
|
|
283
|
+
temp_path = Path(temp_dir_obj.name)
|
|
284
|
+
else:
|
|
285
|
+
temp_dir_obj = None
|
|
286
|
+
temp_path = work_dir
|
|
287
|
+
temp_path.mkdir(parents=True, exist_ok=True)
|
|
288
|
+
|
|
289
|
+
try:
|
|
290
|
+
part_files = []
|
|
291
|
+
|
|
292
|
+
if not use_temp:
|
|
293
|
+
console.print(f"[yellow]ā ļø Using work directory: {temp_path}[/yellow]")
|
|
294
|
+
console.print(
|
|
295
|
+
f"[yellow] Files will be kept after extraction for debugging[/yellow]"
|
|
296
|
+
)
|
|
297
|
+
|
|
298
|
+
with Progress(
|
|
299
|
+
SpinnerColumn(),
|
|
300
|
+
TextColumn("[progress.description]{task.description}"),
|
|
301
|
+
BarColumn(),
|
|
302
|
+
TaskProgressColumn(),
|
|
303
|
+
console=console,
|
|
304
|
+
disable=debug, # Disable progress bar in debug mode
|
|
305
|
+
) as progress:
|
|
306
|
+
extract_task = progress.add_task(
|
|
307
|
+
"Extracting packages...", total=len(wheel_files)
|
|
308
|
+
)
|
|
309
|
+
|
|
310
|
+
# Extract each wheel file
|
|
311
|
+
for i, (wheel_file, package_name) in enumerate(
|
|
312
|
+
zip(wheel_files, package_names)
|
|
313
|
+
):
|
|
314
|
+
progress.update(
|
|
315
|
+
extract_task,
|
|
316
|
+
description=f"š Extracting {wheel_file.name}...",
|
|
317
|
+
completed=i,
|
|
318
|
+
)
|
|
319
|
+
|
|
320
|
+
if debug:
|
|
321
|
+
console.print(
|
|
322
|
+
f"\n[cyan]Extracting package {i + 1}/{len(wheel_files)}: {package_name}[/cyan]"
|
|
323
|
+
)
|
|
324
|
+
|
|
325
|
+
# Extract wheel file (it's just a zip)
|
|
326
|
+
import zipfile
|
|
327
|
+
|
|
328
|
+
with zipfile.ZipFile(wheel_file, "r") as zip_ref:
|
|
329
|
+
if debug:
|
|
330
|
+
console.print(
|
|
331
|
+
f"\n[dim]Contents of {wheel_file.name}:[/dim]"
|
|
332
|
+
)
|
|
333
|
+
for name in sorted(zip_ref.namelist()):
|
|
334
|
+
console.print(f"[dim] {name}[/dim]")
|
|
335
|
+
zip_ref.extractall(temp_path)
|
|
336
|
+
|
|
337
|
+
# Find the part file using the normalized package name
|
|
338
|
+
# Pattern: pkgname-version.data/data/share/pkgname/pkgname
|
|
339
|
+
# Need to find the .data directory that starts with normalized package_name
|
|
340
|
+
import re
|
|
341
|
+
|
|
342
|
+
def normalize_package_name(name: str) -> str:
|
|
343
|
+
"""Normalize package name to match pip's wheel naming convention"""
|
|
344
|
+
return re.sub(r"[-_.]+", "_", name).lower()
|
|
345
|
+
|
|
346
|
+
normalized_name = normalize_package_name(package_name)
|
|
347
|
+
data_dir = None
|
|
348
|
+
actual_package_name = None
|
|
349
|
+
|
|
350
|
+
for d in temp_path.iterdir():
|
|
351
|
+
if d.is_dir() and d.name.endswith(".data"):
|
|
352
|
+
# Extract package name from directory (before -version)
|
|
353
|
+
dir_pkg_name = d.name.split("-")[0]
|
|
354
|
+
if normalize_package_name(dir_pkg_name) == normalized_name:
|
|
355
|
+
data_dir = d
|
|
356
|
+
actual_package_name = dir_pkg_name
|
|
357
|
+
break
|
|
358
|
+
|
|
359
|
+
if data_dir and actual_package_name:
|
|
360
|
+
# The share directory uses the original package name (with __ etc)
|
|
361
|
+
# Try both the actual_package_name (from .data dir) and the original package_name
|
|
362
|
+
share_dir = data_dir / "data" / "share"
|
|
363
|
+
part_file_path = None
|
|
364
|
+
|
|
365
|
+
# First try: use actual_package_name from wheel (normalized)
|
|
366
|
+
candidate = (
|
|
367
|
+
share_dir / actual_package_name / actual_package_name
|
|
368
|
+
)
|
|
369
|
+
if candidate.is_file():
|
|
370
|
+
part_file_path = candidate
|
|
371
|
+
else:
|
|
372
|
+
# Second try: use original package_name (with __)
|
|
373
|
+
candidate = share_dir / package_name / package_name
|
|
374
|
+
if candidate.is_file():
|
|
375
|
+
part_file_path = candidate
|
|
376
|
+
else:
|
|
377
|
+
# Third try: search for any subdirectory in share
|
|
378
|
+
if share_dir.exists():
|
|
379
|
+
for subdir in share_dir.iterdir():
|
|
380
|
+
if subdir.is_dir():
|
|
381
|
+
inner_file = subdir / subdir.name
|
|
382
|
+
if inner_file.is_file():
|
|
383
|
+
part_file_path = inner_file
|
|
384
|
+
break
|
|
385
|
+
|
|
386
|
+
if debug:
|
|
387
|
+
console.print(
|
|
388
|
+
f"[dim]Looking for part file in: {share_dir}[/dim]"
|
|
389
|
+
)
|
|
390
|
+
if share_dir.exists():
|
|
391
|
+
console.print(
|
|
392
|
+
f"[dim]Share dir contents: {[x.name for x in share_dir.iterdir()]}[/dim]"
|
|
393
|
+
)
|
|
394
|
+
|
|
395
|
+
if part_file_path and part_file_path.is_file():
|
|
396
|
+
part_files.append(part_file_path)
|
|
397
|
+
if debug:
|
|
398
|
+
console.print(
|
|
399
|
+
f"[green]ā Found part file: {part_file_path.name} (size: {part_file_path.stat().st_size:,} bytes)[/green]"
|
|
400
|
+
)
|
|
401
|
+
else:
|
|
402
|
+
if debug:
|
|
403
|
+
console.print(
|
|
404
|
+
f"[yellow]ā Part file not found at expected path[/yellow]"
|
|
405
|
+
)
|
|
406
|
+
else:
|
|
407
|
+
if debug:
|
|
408
|
+
console.print(
|
|
409
|
+
f"[yellow]ā Could not find .data directory for {package_name}[/yellow]"
|
|
410
|
+
)
|
|
411
|
+
console.print(
|
|
412
|
+
f"[dim]Available directories: {[d.name for d in temp_path.iterdir() if d.is_dir()]}[/dim]"
|
|
413
|
+
)
|
|
414
|
+
|
|
415
|
+
progress.update(
|
|
416
|
+
extract_task,
|
|
417
|
+
description="ā
Extraction complete!",
|
|
418
|
+
completed=len(wheel_files),
|
|
419
|
+
)
|
|
420
|
+
|
|
421
|
+
# Sort part files to ensure correct order
|
|
422
|
+
part_files.sort(key=lambda x: x.name)
|
|
423
|
+
|
|
424
|
+
if len(part_files) == 1 and part_files[0].name == original_filename:
|
|
425
|
+
# Single file, just copy it
|
|
426
|
+
console.print("š Single file detected, copying...")
|
|
427
|
+
import shutil
|
|
428
|
+
|
|
429
|
+
shutil.copy2(part_files[0], output_path)
|
|
430
|
+
else:
|
|
431
|
+
# Multiple parts, concatenate them
|
|
432
|
+
console.print(f"š Reassembling {len(part_files)} parts...")
|
|
433
|
+
|
|
434
|
+
with Progress(
|
|
435
|
+
SpinnerColumn(),
|
|
436
|
+
TextColumn("[progress.description]{task.description}"),
|
|
437
|
+
BarColumn(),
|
|
438
|
+
TaskProgressColumn(),
|
|
439
|
+
console=console,
|
|
440
|
+
disable=debug, # Disable progress bar in debug mode
|
|
441
|
+
) as progress:
|
|
442
|
+
reassemble_task = progress.add_task(
|
|
443
|
+
"Reassembling file...", total=len(part_files)
|
|
444
|
+
)
|
|
445
|
+
|
|
446
|
+
with open(output_path, "wb") as output_file:
|
|
447
|
+
for i, part_file in enumerate(part_files):
|
|
448
|
+
progress.update(
|
|
449
|
+
reassemble_task,
|
|
450
|
+
description=f"š Assembling part {i + 1}/{len(part_files)}...",
|
|
451
|
+
completed=i,
|
|
452
|
+
)
|
|
453
|
+
|
|
454
|
+
if debug:
|
|
455
|
+
console.print(
|
|
456
|
+
f"[dim]Reading part {i + 1}/{len(part_files)}: {part_file.name} ({part_file.stat().st_size:,} bytes)[/dim]"
|
|
457
|
+
)
|
|
458
|
+
|
|
459
|
+
with open(part_file, "rb") as part:
|
|
460
|
+
output_file.write(part.read())
|
|
461
|
+
|
|
462
|
+
progress.update(
|
|
463
|
+
reassemble_task,
|
|
464
|
+
description="ā
Reassembly complete!",
|
|
465
|
+
completed=len(part_files),
|
|
466
|
+
)
|
|
467
|
+
finally:
|
|
468
|
+
# Clean up temporary directory if we created one
|
|
469
|
+
if use_temp and temp_dir_obj:
|
|
470
|
+
temp_dir_obj.cleanup()
|
|
471
|
+
|
|
472
|
+
|
|
473
|
+
def display_header():
|
|
474
|
+
"""Display the application header"""
|
|
475
|
+
header = Text("š SIDELOAD", style="bold magenta")
|
|
476
|
+
subtitle = Text("Download large files via PyPI packages", style="dim")
|
|
477
|
+
|
|
478
|
+
panel = Panel(
|
|
479
|
+
Align.center(f"{header}\n{subtitle}"), border_style="magenta", padding=(1, 2)
|
|
480
|
+
)
|
|
481
|
+
console.print(panel)
|
|
482
|
+
|
|
483
|
+
|
|
484
|
+
def display_summary(data: Dict):
|
|
485
|
+
"""Display a summary of the completed request"""
|
|
486
|
+
table = Table(title="š Download Summary", style="cyan")
|
|
487
|
+
table.add_column("Property", style="bold")
|
|
488
|
+
table.add_column("Value")
|
|
489
|
+
|
|
490
|
+
table.add_row("Original Filename", data.get("filename", "Unknown"))
|
|
491
|
+
table.add_row("File Size", f"{data.get('file_size', 0):,} bytes")
|
|
492
|
+
table.add_row("Total Packages", str(data.get("total_packages", 0)))
|
|
493
|
+
table.add_row("Status", f"ā
{data.get('status', 'Unknown')}")
|
|
494
|
+
|
|
495
|
+
console.print(table)
|
|
496
|
+
|
|
497
|
+
|
|
498
|
+
def list_resumable_downloads(connector: JSONBinConnector, collection_id: str):
|
|
499
|
+
"""List all resumable downloads from the collection"""
|
|
500
|
+
table = Table(title="š Resumable Downloads", style="cyan")
|
|
501
|
+
table.add_column("Request ID", style="bold cyan")
|
|
502
|
+
table.add_column("Status", style="yellow")
|
|
503
|
+
table.add_column("Filename", style="white")
|
|
504
|
+
table.add_column("Progress", style="green")
|
|
505
|
+
table.add_column("Created", style="dim")
|
|
506
|
+
|
|
507
|
+
# Resumable statuses (in-progress or completed but not yet downloaded)
|
|
508
|
+
resumable_statuses = {"CREATED", "DOWNLOADING", "BUILDING", "UPLOADING", "UPLOADED"}
|
|
509
|
+
|
|
510
|
+
found_count = 0
|
|
511
|
+
last_bin_id = None
|
|
512
|
+
|
|
513
|
+
with console.status("[bold green]Fetching downloads..."):
|
|
514
|
+
while True:
|
|
515
|
+
try:
|
|
516
|
+
bins = connector.get_collection_bins(collection_id, last_bin_id)
|
|
517
|
+
if not bins:
|
|
518
|
+
break
|
|
519
|
+
|
|
520
|
+
for bin_data in bins:
|
|
521
|
+
bin_id = bin_data["record"]
|
|
522
|
+
last_bin_id = bin_id
|
|
523
|
+
|
|
524
|
+
try:
|
|
525
|
+
data = connector.get_bin(bin_id)
|
|
526
|
+
status = data.get("status", "UNKNOWN")
|
|
527
|
+
|
|
528
|
+
if status in resumable_statuses:
|
|
529
|
+
found_count += 1
|
|
530
|
+
filename = data.get("filename", data.get("url", "Unknown"))
|
|
531
|
+
# Truncate long filenames
|
|
532
|
+
if len(filename) > 40:
|
|
533
|
+
filename = filename[:37] + "..."
|
|
534
|
+
|
|
535
|
+
progress = data.get("progress", 0)
|
|
536
|
+
progress_str = (
|
|
537
|
+
f"{progress}%" if status == "DOWNLOADING" else "-"
|
|
538
|
+
)
|
|
539
|
+
|
|
540
|
+
# Format created time
|
|
541
|
+
created_at = data.get("created_at", 0)
|
|
542
|
+
if created_at:
|
|
543
|
+
from datetime import datetime
|
|
544
|
+
|
|
545
|
+
created_str = datetime.fromtimestamp(
|
|
546
|
+
created_at
|
|
547
|
+
).strftime("%Y-%m-%d %H:%M")
|
|
548
|
+
else:
|
|
549
|
+
created_str = "Unknown"
|
|
550
|
+
|
|
551
|
+
# Status with emoji
|
|
552
|
+
status_display = {
|
|
553
|
+
"CREATED": "ā³ CREATED",
|
|
554
|
+
"DOWNLOADING": "š„ DOWNLOADING",
|
|
555
|
+
"BUILDING": "šØ BUILDING",
|
|
556
|
+
"UPLOADING": "š¤ UPLOADING",
|
|
557
|
+
"UPLOADED": "ā
UPLOADED",
|
|
558
|
+
}.get(status, status)
|
|
559
|
+
|
|
560
|
+
table.add_row(
|
|
561
|
+
bin_id,
|
|
562
|
+
status_display,
|
|
563
|
+
filename,
|
|
564
|
+
progress_str,
|
|
565
|
+
created_str,
|
|
566
|
+
)
|
|
567
|
+
except Exception:
|
|
568
|
+
continue
|
|
569
|
+
|
|
570
|
+
except Exception as e:
|
|
571
|
+
console.print(f"[red]Error fetching bins: {e}[/red]")
|
|
572
|
+
break
|
|
573
|
+
|
|
574
|
+
if found_count > 0:
|
|
575
|
+
console.print(table)
|
|
576
|
+
console.print(
|
|
577
|
+
f"\nš” To resume a download: [bold cyan]sideload download --resume <REQUEST_ID>[/bold cyan]"
|
|
578
|
+
)
|
|
579
|
+
else:
|
|
580
|
+
console.print("[yellow]No resumable downloads found.[/yellow]")
|
|
581
|
+
|
|
582
|
+
|
|
583
|
+
def cli_main():
|
|
584
|
+
parser = argparse.ArgumentParser(
|
|
585
|
+
description="Sideload CLI - Download large files via PyPI packages",
|
|
586
|
+
formatter_class=argparse.RawDescriptionHelpFormatter,
|
|
587
|
+
epilog="""
|
|
588
|
+
Examples:
|
|
589
|
+
sideload download https://example.com/largefile.zip
|
|
590
|
+
sideload download https://example.com/file.zip --output ./downloads/
|
|
591
|
+
sideload download --resume abc123xyz # Resume an interrupted download
|
|
592
|
+
sideload list # List resumable downloads
|
|
593
|
+
""",
|
|
594
|
+
)
|
|
595
|
+
|
|
596
|
+
subparsers = parser.add_subparsers(dest="command", help="Available commands")
|
|
597
|
+
|
|
598
|
+
# List command
|
|
599
|
+
list_parser = subparsers.add_parser("list", help="List resumable downloads")
|
|
600
|
+
list_parser.add_argument("--collection", help="JSONBin collection ID")
|
|
601
|
+
list_parser.add_argument("--token", help="JSONBin API token")
|
|
602
|
+
list_parser.add_argument(
|
|
603
|
+
"--no-verify-ssl",
|
|
604
|
+
action="store_true",
|
|
605
|
+
help="Disable SSL certificate verification for JSONBin API",
|
|
606
|
+
)
|
|
607
|
+
list_parser.add_argument(
|
|
608
|
+
"--jsonbin-key-type",
|
|
609
|
+
choices=["master", "access"],
|
|
610
|
+
default=None,
|
|
611
|
+
help="JSONBin key type to use (default: from JSONBIN_KEY_TYPE env or 'master')",
|
|
612
|
+
)
|
|
613
|
+
|
|
614
|
+
# Download command
|
|
615
|
+
download_parser = subparsers.add_parser(
|
|
616
|
+
"download", help="Download a file via sideload"
|
|
617
|
+
)
|
|
618
|
+
download_parser.add_argument(
|
|
619
|
+
"url",
|
|
620
|
+
nargs="?",
|
|
621
|
+
help="URL of the file to download (not required if --resume is used)",
|
|
622
|
+
)
|
|
623
|
+
download_parser.add_argument(
|
|
624
|
+
"--resume",
|
|
625
|
+
"-r",
|
|
626
|
+
metavar="REQUEST_ID",
|
|
627
|
+
help="Resume an interrupted download by providing the request ID",
|
|
628
|
+
)
|
|
629
|
+
download_parser.add_argument(
|
|
630
|
+
"--output",
|
|
631
|
+
"-o",
|
|
632
|
+
type=Path,
|
|
633
|
+
default=Path.cwd(),
|
|
634
|
+
help="Output directory (default: current directory)",
|
|
635
|
+
)
|
|
636
|
+
download_parser.add_argument("--collection", help="JSONBin collection ID")
|
|
637
|
+
download_parser.add_argument("--token", help="JSONBin API token")
|
|
638
|
+
download_parser.add_argument(
|
|
639
|
+
"--debug",
|
|
640
|
+
action="store_true",
|
|
641
|
+
help="Enable debug logging",
|
|
642
|
+
)
|
|
643
|
+
download_parser.add_argument(
|
|
644
|
+
"--work-dir",
|
|
645
|
+
type=Path,
|
|
646
|
+
help="Working directory for extraction (for debugging, defaults to temp directory)",
|
|
647
|
+
)
|
|
648
|
+
download_parser.add_argument(
|
|
649
|
+
"--no-verify-ssl",
|
|
650
|
+
action="store_true",
|
|
651
|
+
help="Disable SSL certificate verification for JSONBin API",
|
|
652
|
+
)
|
|
653
|
+
download_parser.add_argument(
|
|
654
|
+
"--jsonbin-key-type",
|
|
655
|
+
choices=["master", "access"],
|
|
656
|
+
default=None,
|
|
657
|
+
help="JSONBin key type to use (default: from JSONBIN_KEY_TYPE env or 'master')",
|
|
658
|
+
)
|
|
659
|
+
download_parser.add_argument(
|
|
660
|
+
"--hookdeck-source-id",
|
|
661
|
+
type=str,
|
|
662
|
+
default=None,
|
|
663
|
+
help="Hookdeck source ID for the Publish API (default: from HOOKDECK_SOURCE_ID env)",
|
|
664
|
+
)
|
|
665
|
+
download_parser.add_argument(
|
|
666
|
+
"--hookdeck-api-key",
|
|
667
|
+
type=str,
|
|
668
|
+
default=None,
|
|
669
|
+
help="Hookdeck API key for the Publish API (default: from HOOKDECK_API_KEY env)",
|
|
670
|
+
)
|
|
671
|
+
|
|
672
|
+
args = parser.parse_args()
|
|
673
|
+
|
|
674
|
+
if not args.command:
|
|
675
|
+
parser.print_help()
|
|
676
|
+
return
|
|
677
|
+
|
|
678
|
+
display_header()
|
|
679
|
+
|
|
680
|
+
if args.command == "list":
|
|
681
|
+
# Get credentials
|
|
682
|
+
jsonbin_token = args.token or os.environ.get("JSONBIN_TOKEN")
|
|
683
|
+
collection_id = args.collection or os.environ.get("SIDELOAD_COLLECTION_ID")
|
|
684
|
+
|
|
685
|
+
# Get SSL verification setting
|
|
686
|
+
verify_ssl = not args.no_verify_ssl
|
|
687
|
+
if not args.no_verify_ssl:
|
|
688
|
+
env_verify = os.environ.get("JSONBIN_VERIFY_SSL", "true").lower()
|
|
689
|
+
verify_ssl = env_verify in ("true", "1", "yes")
|
|
690
|
+
|
|
691
|
+
# Get key type
|
|
692
|
+
key_type = (
|
|
693
|
+
args.jsonbin_key_type
|
|
694
|
+
or os.environ.get("JSONBIN_KEY_TYPE", "master").lower()
|
|
695
|
+
)
|
|
696
|
+
|
|
697
|
+
if not jsonbin_token:
|
|
698
|
+
console.print(
|
|
699
|
+
"ā JSONBin token required. Set JSONBIN_TOKEN environment variable or use --token",
|
|
700
|
+
style="red",
|
|
701
|
+
)
|
|
702
|
+
return
|
|
703
|
+
|
|
704
|
+
if not collection_id:
|
|
705
|
+
console.print(
|
|
706
|
+
"ā Collection ID required. Set SIDELOAD_COLLECTION_ID environment variable or use --collection",
|
|
707
|
+
style="red",
|
|
708
|
+
)
|
|
709
|
+
return
|
|
710
|
+
|
|
711
|
+
connector = JSONBinConnector(
|
|
712
|
+
jsonbin_token, verify_ssl=verify_ssl, key_type=key_type
|
|
713
|
+
)
|
|
714
|
+
try:
|
|
715
|
+
list_resumable_downloads(connector, collection_id)
|
|
716
|
+
finally:
|
|
717
|
+
connector.close()
|
|
718
|
+
|
|
719
|
+
elif args.command == "download":
|
|
720
|
+
# Get credentials
|
|
721
|
+
jsonbin_token = args.token or os.environ.get("JSONBIN_TOKEN")
|
|
722
|
+
collection_id = args.collection or os.environ.get("SIDELOAD_COLLECTION_ID")
|
|
723
|
+
|
|
724
|
+
# Get SSL verification setting (CLI flag overrides env var)
|
|
725
|
+
verify_ssl = not args.no_verify_ssl
|
|
726
|
+
if args.no_verify_ssl:
|
|
727
|
+
# If CLI flag is set, disable SSL verification
|
|
728
|
+
verify_ssl = False
|
|
729
|
+
else:
|
|
730
|
+
# Otherwise check environment variable (default to True)
|
|
731
|
+
env_verify = os.environ.get("JSONBIN_VERIFY_SSL", "true").lower()
|
|
732
|
+
verify_ssl = env_verify in ("true", "1", "yes")
|
|
733
|
+
|
|
734
|
+
# Get key type (CLI arg > env var > default)
|
|
735
|
+
key_type = (
|
|
736
|
+
args.jsonbin_key_type
|
|
737
|
+
or os.environ.get("JSONBIN_KEY_TYPE", "master").lower()
|
|
738
|
+
)
|
|
739
|
+
|
|
740
|
+
if not jsonbin_token:
|
|
741
|
+
console.print(
|
|
742
|
+
"ā JSONBin token required. Set JSONBIN_TOKEN environment variable or use --token",
|
|
743
|
+
style="red",
|
|
744
|
+
)
|
|
745
|
+
return
|
|
746
|
+
|
|
747
|
+
if not collection_id:
|
|
748
|
+
console.print(
|
|
749
|
+
"ā Collection ID required. Set SIDELOAD_COLLECTION_ID environment variable or use --collection",
|
|
750
|
+
style="red",
|
|
751
|
+
)
|
|
752
|
+
return
|
|
753
|
+
|
|
754
|
+
# Ensure output directory exists
|
|
755
|
+
args.output.mkdir(parents=True, exist_ok=True)
|
|
756
|
+
|
|
757
|
+
# Show debug flags if enabled
|
|
758
|
+
if args.debug:
|
|
759
|
+
console.print("\n[bold cyan]Debug Mode Enabled[/bold cyan]")
|
|
760
|
+
if args.resume:
|
|
761
|
+
console.print(f" [dim]Resume Request ID:[/dim] {args.resume}")
|
|
762
|
+
else:
|
|
763
|
+
console.print(f" [dim]URL:[/dim] {args.url}")
|
|
764
|
+
console.print(f" [dim]Output:[/dim] {args.output}")
|
|
765
|
+
console.print(
|
|
766
|
+
f" [dim]Work Directory:[/dim] {args.work_dir or 'temp (auto-cleanup)'}"
|
|
767
|
+
)
|
|
768
|
+
console.print(f" [dim]Collection ID:[/dim] {collection_id}")
|
|
769
|
+
console.print(f" [dim]SSL Verification:[/dim] {verify_ssl}")
|
|
770
|
+
console.print(f" [dim]JSONBin Key Type:[/dim] {key_type}")
|
|
771
|
+
console.print()
|
|
772
|
+
|
|
773
|
+
# Validate that either URL or --resume is provided
|
|
774
|
+
if not args.url and not args.resume:
|
|
775
|
+
console.print(
|
|
776
|
+
"ā Either a URL or --resume REQUEST_ID is required",
|
|
777
|
+
style="red",
|
|
778
|
+
)
|
|
779
|
+
return
|
|
780
|
+
|
|
781
|
+
if args.url and args.resume:
|
|
782
|
+
console.print(
|
|
783
|
+
"ā Cannot specify both URL and --resume at the same time",
|
|
784
|
+
style="red",
|
|
785
|
+
)
|
|
786
|
+
return
|
|
787
|
+
|
|
788
|
+
# Get hookdeck config
|
|
789
|
+
hookdeck_source_id = getattr(
|
|
790
|
+
args, "hookdeck_source_id", None
|
|
791
|
+
) or os.environ.get("HOOKDECK_SOURCE_ID")
|
|
792
|
+
hookdeck_api_key = getattr(args, "hookdeck_api_key", None) or os.environ.get(
|
|
793
|
+
"HOOKDECK_API_KEY"
|
|
794
|
+
)
|
|
795
|
+
|
|
796
|
+
try:
|
|
797
|
+
with SideloadClient(
|
|
798
|
+
jsonbin_token,
|
|
799
|
+
collection_id,
|
|
800
|
+
verify_ssl,
|
|
801
|
+
key_type,
|
|
802
|
+
hookdeck_source_id,
|
|
803
|
+
hookdeck_api_key,
|
|
804
|
+
) as client:
|
|
805
|
+
if args.resume:
|
|
806
|
+
# Resume an existing request
|
|
807
|
+
bin_id = args.resume
|
|
808
|
+
console.print(
|
|
809
|
+
f"š Resuming request: [bold cyan]{bin_id}[/bold cyan]"
|
|
810
|
+
)
|
|
811
|
+
|
|
812
|
+
# Check if request exists and get its current status
|
|
813
|
+
try:
|
|
814
|
+
data = client.manager.get_sideload_data(bin_id)
|
|
815
|
+
status = data.get("status", "UNKNOWN")
|
|
816
|
+
console.print(
|
|
817
|
+
f"š Current status: [bold yellow]{status}[/bold yellow]"
|
|
818
|
+
)
|
|
819
|
+
|
|
820
|
+
if status == "UPLOADED":
|
|
821
|
+
console.print(
|
|
822
|
+
"ā
Request already completed, proceeding to download packages..."
|
|
823
|
+
)
|
|
824
|
+
elif status in ["FAILED", "REJECTED"]:
|
|
825
|
+
reason = data.get(
|
|
826
|
+
"reason", data.get("details", "Unknown error")
|
|
827
|
+
)
|
|
828
|
+
console.print(
|
|
829
|
+
f"ā Request failed: {reason}", style="bold red"
|
|
830
|
+
)
|
|
831
|
+
return
|
|
832
|
+
else:
|
|
833
|
+
# Continue monitoring if not yet complete
|
|
834
|
+
console.print(Rule("š” Monitoring Progress"))
|
|
835
|
+
data = client.monitor_request(bin_id)
|
|
836
|
+
except Exception as e:
|
|
837
|
+
console.print(
|
|
838
|
+
f"ā Failed to retrieve request {bin_id}: {e}", style="red"
|
|
839
|
+
)
|
|
840
|
+
return
|
|
841
|
+
else:
|
|
842
|
+
# Create a new request
|
|
843
|
+
console.print(
|
|
844
|
+
f"š Requesting download for: [bold blue]{args.url}[/bold blue]"
|
|
845
|
+
)
|
|
846
|
+
bin_id = client.create_request(args.url)
|
|
847
|
+
|
|
848
|
+
# Monitor the request
|
|
849
|
+
console.print(Rule("š” Monitoring Progress"))
|
|
850
|
+
data = client.monitor_request(bin_id)
|
|
851
|
+
|
|
852
|
+
if data.get("status") != "UPLOADED":
|
|
853
|
+
console.print(
|
|
854
|
+
"ā Request did not complete successfully", style="red"
|
|
855
|
+
)
|
|
856
|
+
return
|
|
857
|
+
|
|
858
|
+
# Display summary
|
|
859
|
+
display_summary(data)
|
|
860
|
+
|
|
861
|
+
# Download packages
|
|
862
|
+
package_names = data.get("packages_names", [])
|
|
863
|
+
if not package_names:
|
|
864
|
+
console.print("ā No packages found in the response", style="red")
|
|
865
|
+
return
|
|
866
|
+
|
|
867
|
+
console.print(Rule("š¦ Downloading Packages"))
|
|
868
|
+
|
|
869
|
+
# Use work_dir for downloads if provided, otherwise use temp directory
|
|
870
|
+
use_work_dir = args.work_dir is not None
|
|
871
|
+
if use_work_dir:
|
|
872
|
+
download_dir = args.work_dir / "wheels"
|
|
873
|
+
download_dir.mkdir(parents=True, exist_ok=True)
|
|
874
|
+
console.print(
|
|
875
|
+
f"[yellow]š„ Downloading packages to: {download_dir}[/yellow]"
|
|
876
|
+
)
|
|
877
|
+
|
|
878
|
+
wheel_files = client.download_packages(
|
|
879
|
+
package_names, download_dir, args.debug
|
|
880
|
+
)
|
|
881
|
+
|
|
882
|
+
if not wheel_files:
|
|
883
|
+
console.print(
|
|
884
|
+
"ā No packages were downloaded successfully", style="red"
|
|
885
|
+
)
|
|
886
|
+
return
|
|
887
|
+
|
|
888
|
+
# Extract and reassemble
|
|
889
|
+
console.print(Rule("š§ Reassembling File"))
|
|
890
|
+
original_filename = data.get("filename", "downloaded_file")
|
|
891
|
+
output_file = args.output / original_filename
|
|
892
|
+
|
|
893
|
+
client.extract_and_reassemble(
|
|
894
|
+
wheel_files,
|
|
895
|
+
package_names,
|
|
896
|
+
original_filename,
|
|
897
|
+
output_file,
|
|
898
|
+
args.debug,
|
|
899
|
+
args.work_dir,
|
|
900
|
+
)
|
|
901
|
+
|
|
902
|
+
# Success!
|
|
903
|
+
console.print(Rule("⨠Complete"))
|
|
904
|
+
console.print(
|
|
905
|
+
f"š File successfully downloaded to: [bold green]{output_file}[/bold green]"
|
|
906
|
+
)
|
|
907
|
+
console.print(
|
|
908
|
+
f"š File size: [cyan]{output_file.stat().st_size:,} bytes[/cyan]"
|
|
909
|
+
)
|
|
910
|
+
else:
|
|
911
|
+
# Use temporary directory for downloads
|
|
912
|
+
with tempfile.TemporaryDirectory() as temp_dir:
|
|
913
|
+
temp_path = Path(temp_dir)
|
|
914
|
+
wheel_files = client.download_packages(
|
|
915
|
+
package_names, temp_path, args.debug
|
|
916
|
+
)
|
|
917
|
+
|
|
918
|
+
if not wheel_files:
|
|
919
|
+
console.print(
|
|
920
|
+
"ā No packages were downloaded successfully",
|
|
921
|
+
style="red",
|
|
922
|
+
)
|
|
923
|
+
return
|
|
924
|
+
|
|
925
|
+
# Extract and reassemble
|
|
926
|
+
console.print(Rule("š§ Reassembling File"))
|
|
927
|
+
original_filename = data.get("filename", "downloaded_file")
|
|
928
|
+
output_file = args.output / original_filename
|
|
929
|
+
|
|
930
|
+
client.extract_and_reassemble(
|
|
931
|
+
wheel_files,
|
|
932
|
+
package_names,
|
|
933
|
+
original_filename,
|
|
934
|
+
output_file,
|
|
935
|
+
args.debug,
|
|
936
|
+
args.work_dir,
|
|
937
|
+
)
|
|
938
|
+
|
|
939
|
+
# Success!
|
|
940
|
+
console.print(Rule("⨠Complete"))
|
|
941
|
+
console.print(
|
|
942
|
+
f"š File successfully downloaded to: [bold green]{output_file}[/bold green]"
|
|
943
|
+
)
|
|
944
|
+
console.print(
|
|
945
|
+
f"š File size: [cyan]{output_file.stat().st_size:,} bytes[/cyan]"
|
|
946
|
+
)
|
|
947
|
+
|
|
948
|
+
except KeyboardInterrupt:
|
|
949
|
+
console.print("\nā ļø Download interrupted by user", style="yellow")
|
|
950
|
+
except Exception as e:
|
|
951
|
+
console.print(f"ā Error: {e}", style="bold red")
|
|
952
|
+
raise
|
|
953
|
+
|
|
954
|
+
|
|
955
|
+
if __name__ == "__main__":
|
|
956
|
+
cli_main()
|