sideload 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of sideload might be problematic. Click here for more details.
- sideload/__init__.py +13 -0
- sideload/cli.py +416 -0
- sideload/jsonbin.py +0 -0
- sideload/jsonbin_connector.py +49 -0
- sideload/jsonbin_old.py +302 -0
- sideload/main.py +237 -0
- sideload-0.1.0.dist-info/METADATA +138 -0
- sideload-0.1.0.dist-info/RECORD +10 -0
- sideload-0.1.0.dist-info/WHEEL +4 -0
- sideload-0.1.0.dist-info/entry_points.txt +3 -0
sideload/__init__.py
ADDED
sideload/cli.py
ADDED
|
@@ -0,0 +1,416 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
Sideload CLI Client
|
|
4
|
+
A beautiful command-line interface for downloading files via the Sideload service.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import os
|
|
8
|
+
import sys
|
|
9
|
+
import time
|
|
10
|
+
import subprocess
|
|
11
|
+
import tempfile
|
|
12
|
+
import argparse
|
|
13
|
+
from pathlib import Path
|
|
14
|
+
from typing import List, Dict
|
|
15
|
+
|
|
16
|
+
from rich.console import Console
|
|
17
|
+
from rich.progress import (
|
|
18
|
+
Progress,
|
|
19
|
+
SpinnerColumn,
|
|
20
|
+
TextColumn,
|
|
21
|
+
BarColumn,
|
|
22
|
+
TaskProgressColumn,
|
|
23
|
+
)
|
|
24
|
+
from rich.panel import Panel
|
|
25
|
+
from rich.table import Table
|
|
26
|
+
from rich.text import Text
|
|
27
|
+
from rich.align import Align
|
|
28
|
+
from rich.rule import Rule
|
|
29
|
+
|
|
30
|
+
from sideload.jsonbin_old import JSONBinConnector, SideloadBinManager
|
|
31
|
+
|
|
32
|
+
console = Console()
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
class SideloadClient:
|
|
36
|
+
def __init__(self, jsonbin_token: str, collection_id: str):
|
|
37
|
+
self.collection_id = collection_id
|
|
38
|
+
self.connector = JSONBinConnector(jsonbin_token)
|
|
39
|
+
self.manager = SideloadBinManager(self.connector)
|
|
40
|
+
|
|
41
|
+
def __enter__(self):
|
|
42
|
+
return self
|
|
43
|
+
|
|
44
|
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
45
|
+
self.connector.close()
|
|
46
|
+
|
|
47
|
+
def create_request(self, url: str) -> str:
|
|
48
|
+
"""Create a new sideload request and return the bin ID"""
|
|
49
|
+
with console.status("[bold green]Creating sideload request..."):
|
|
50
|
+
bin_id = self.manager.create_sideload_request(url, self.collection_id)
|
|
51
|
+
|
|
52
|
+
console.print(f"โ
Created sideload request: [bold cyan]{bin_id}[/bold cyan]")
|
|
53
|
+
return bin_id
|
|
54
|
+
|
|
55
|
+
def monitor_request(self, bin_id: str) -> Dict:
|
|
56
|
+
"""Monitor the sideload request progress"""
|
|
57
|
+
|
|
58
|
+
with Progress(
|
|
59
|
+
SpinnerColumn(),
|
|
60
|
+
TextColumn("[progress.description]{task.description}"),
|
|
61
|
+
BarColumn(),
|
|
62
|
+
TaskProgressColumn(),
|
|
63
|
+
console=console,
|
|
64
|
+
transient=True,
|
|
65
|
+
) as progress:
|
|
66
|
+
download_task = progress.add_task("Monitoring request...", total=100)
|
|
67
|
+
|
|
68
|
+
while True:
|
|
69
|
+
try:
|
|
70
|
+
data = self.manager.get_sideload_data(bin_id)
|
|
71
|
+
|
|
72
|
+
status = data.get("status", "UNKNOWN")
|
|
73
|
+
current_progress = data.get("progress", 0)
|
|
74
|
+
|
|
75
|
+
if status == "DOWNLOADING":
|
|
76
|
+
progress.update(
|
|
77
|
+
download_task,
|
|
78
|
+
description=f"๐ฅ Downloading... ({current_progress}%)",
|
|
79
|
+
completed=current_progress,
|
|
80
|
+
)
|
|
81
|
+
elif status == "BUILDING":
|
|
82
|
+
progress.update(
|
|
83
|
+
download_task,
|
|
84
|
+
description="๐จ Building packages...",
|
|
85
|
+
completed=90,
|
|
86
|
+
)
|
|
87
|
+
elif status == "UPLOADING":
|
|
88
|
+
current_part = data.get("current_part", 1)
|
|
89
|
+
total_parts = data.get("total_parts", 1)
|
|
90
|
+
progress.update(
|
|
91
|
+
download_task,
|
|
92
|
+
description=f"๐ค Uploading part {current_part}/{total_parts}...",
|
|
93
|
+
completed=95,
|
|
94
|
+
)
|
|
95
|
+
elif status == "UPLOADED":
|
|
96
|
+
progress.update(
|
|
97
|
+
download_task,
|
|
98
|
+
description="โ
Upload complete!",
|
|
99
|
+
completed=100,
|
|
100
|
+
)
|
|
101
|
+
break
|
|
102
|
+
elif status in ["FAILED", "REJECTED"]:
|
|
103
|
+
reason = data.get("reason", "Unknown error")
|
|
104
|
+
console.print(f"โ Request failed: {reason}", style="bold red")
|
|
105
|
+
return data
|
|
106
|
+
|
|
107
|
+
time.sleep(2)
|
|
108
|
+
|
|
109
|
+
except KeyboardInterrupt:
|
|
110
|
+
console.print("\nโ ๏ธ Monitoring interrupted by user", style="yellow")
|
|
111
|
+
break
|
|
112
|
+
except Exception as e:
|
|
113
|
+
console.print(f"โ Error monitoring request: {e}", style="red")
|
|
114
|
+
break
|
|
115
|
+
|
|
116
|
+
# Get final data
|
|
117
|
+
return self.manager.get_sideload_data(bin_id)
|
|
118
|
+
|
|
119
|
+
def download_packages(
|
|
120
|
+
self, package_names: List[str], output_dir: Path
|
|
121
|
+
) -> List[Path]:
|
|
122
|
+
"""Download all packages to a temporary directory"""
|
|
123
|
+
downloaded_files = []
|
|
124
|
+
|
|
125
|
+
with Progress(
|
|
126
|
+
SpinnerColumn(),
|
|
127
|
+
TextColumn("[progress.description]{task.description}"),
|
|
128
|
+
BarColumn(),
|
|
129
|
+
TaskProgressColumn(),
|
|
130
|
+
console=console,
|
|
131
|
+
) as progress:
|
|
132
|
+
download_task = progress.add_task(
|
|
133
|
+
"Downloading packages...", total=len(package_names)
|
|
134
|
+
)
|
|
135
|
+
|
|
136
|
+
for i, package_name in enumerate(package_names):
|
|
137
|
+
progress.update(
|
|
138
|
+
download_task,
|
|
139
|
+
description=f"๐ฆ Downloading {package_name}...",
|
|
140
|
+
completed=i,
|
|
141
|
+
)
|
|
142
|
+
|
|
143
|
+
# Download using pip to temporary directory
|
|
144
|
+
try:
|
|
145
|
+
subprocess.run(
|
|
146
|
+
[
|
|
147
|
+
sys.executable,
|
|
148
|
+
"-m",
|
|
149
|
+
"pip",
|
|
150
|
+
"download",
|
|
151
|
+
"--no-deps",
|
|
152
|
+
"--dest",
|
|
153
|
+
str(output_dir),
|
|
154
|
+
package_name,
|
|
155
|
+
],
|
|
156
|
+
capture_output=True,
|
|
157
|
+
text=True,
|
|
158
|
+
check=True,
|
|
159
|
+
)
|
|
160
|
+
|
|
161
|
+
# Find the downloaded wheel file
|
|
162
|
+
wheel_files = list(output_dir.glob(f"{package_name}*.whl"))
|
|
163
|
+
if wheel_files:
|
|
164
|
+
downloaded_files.append(wheel_files[0])
|
|
165
|
+
|
|
166
|
+
except subprocess.CalledProcessError as e:
|
|
167
|
+
console.print(
|
|
168
|
+
f"โ Failed to download {package_name}: {e.stderr}", style="red"
|
|
169
|
+
)
|
|
170
|
+
continue
|
|
171
|
+
|
|
172
|
+
progress.update(
|
|
173
|
+
download_task,
|
|
174
|
+
description="โ
Download complete!",
|
|
175
|
+
completed=len(package_names),
|
|
176
|
+
)
|
|
177
|
+
|
|
178
|
+
return downloaded_files
|
|
179
|
+
|
|
180
|
+
def extract_and_reassemble(
|
|
181
|
+
self, wheel_files: List[Path], original_filename: str, output_path: Path
|
|
182
|
+
):
|
|
183
|
+
"""Extract parts from wheel files and reassemble the original file"""
|
|
184
|
+
with tempfile.TemporaryDirectory() as temp_dir:
|
|
185
|
+
temp_path = Path(temp_dir)
|
|
186
|
+
part_files = []
|
|
187
|
+
|
|
188
|
+
with Progress(
|
|
189
|
+
SpinnerColumn(),
|
|
190
|
+
TextColumn("[progress.description]{task.description}"),
|
|
191
|
+
BarColumn(),
|
|
192
|
+
TaskProgressColumn(),
|
|
193
|
+
console=console,
|
|
194
|
+
) as progress:
|
|
195
|
+
extract_task = progress.add_task(
|
|
196
|
+
"Extracting packages...", total=len(wheel_files)
|
|
197
|
+
)
|
|
198
|
+
|
|
199
|
+
# Extract each wheel file
|
|
200
|
+
for i, wheel_file in enumerate(wheel_files):
|
|
201
|
+
progress.update(
|
|
202
|
+
extract_task,
|
|
203
|
+
description=f"๐ Extracting {wheel_file.name}...",
|
|
204
|
+
completed=i,
|
|
205
|
+
)
|
|
206
|
+
|
|
207
|
+
# Extract wheel file (it's just a zip)
|
|
208
|
+
import zipfile
|
|
209
|
+
|
|
210
|
+
with zipfile.ZipFile(wheel_file, "r") as zip_ref:
|
|
211
|
+
zip_ref.extractall(temp_path)
|
|
212
|
+
|
|
213
|
+
# Find the part file in the wheel's data directory structure
|
|
214
|
+
# Pattern: pkgname.data/data/share/pkgname/pkgname
|
|
215
|
+
data_dirs = list(temp_path.glob("*.data/data/share/*/*"))
|
|
216
|
+
for part_file in data_dirs:
|
|
217
|
+
if part_file.is_file():
|
|
218
|
+
part_files.append(part_file)
|
|
219
|
+
|
|
220
|
+
progress.update(
|
|
221
|
+
extract_task,
|
|
222
|
+
description="โ
Extraction complete!",
|
|
223
|
+
completed=len(wheel_files),
|
|
224
|
+
)
|
|
225
|
+
|
|
226
|
+
# Sort part files to ensure correct order
|
|
227
|
+
part_files.sort(key=lambda x: x.name)
|
|
228
|
+
|
|
229
|
+
if len(part_files) == 1 and part_files[0].name == original_filename:
|
|
230
|
+
# Single file, just copy it
|
|
231
|
+
console.print("๐ Single file detected, copying...")
|
|
232
|
+
import shutil
|
|
233
|
+
|
|
234
|
+
shutil.copy2(part_files[0], output_path)
|
|
235
|
+
else:
|
|
236
|
+
# Multiple parts, concatenate them
|
|
237
|
+
console.print(f"๐ Reassembling {len(part_files)} parts...")
|
|
238
|
+
|
|
239
|
+
with Progress(
|
|
240
|
+
SpinnerColumn(),
|
|
241
|
+
TextColumn("[progress.description]{task.description}"),
|
|
242
|
+
BarColumn(),
|
|
243
|
+
TaskProgressColumn(),
|
|
244
|
+
console=console,
|
|
245
|
+
) as progress:
|
|
246
|
+
reassemble_task = progress.add_task(
|
|
247
|
+
"Reassembling file...", total=len(part_files)
|
|
248
|
+
)
|
|
249
|
+
|
|
250
|
+
with open(output_path, "wb") as output_file:
|
|
251
|
+
for i, part_file in enumerate(part_files):
|
|
252
|
+
progress.update(
|
|
253
|
+
reassemble_task,
|
|
254
|
+
description=f"๐ Assembling part {i + 1}/{len(part_files)}...",
|
|
255
|
+
completed=i,
|
|
256
|
+
)
|
|
257
|
+
|
|
258
|
+
with open(part_file, "rb") as part:
|
|
259
|
+
output_file.write(part.read())
|
|
260
|
+
|
|
261
|
+
progress.update(
|
|
262
|
+
reassemble_task,
|
|
263
|
+
description="โ
Reassembly complete!",
|
|
264
|
+
completed=len(part_files),
|
|
265
|
+
)
|
|
266
|
+
|
|
267
|
+
|
|
268
|
+
def display_header():
|
|
269
|
+
"""Display the application header"""
|
|
270
|
+
header = Text("๐ SIDELOAD", style="bold magenta")
|
|
271
|
+
subtitle = Text("Download large files via PyPI packages", style="dim")
|
|
272
|
+
|
|
273
|
+
panel = Panel(
|
|
274
|
+
Align.center(f"{header}\n{subtitle}"), border_style="magenta", padding=(1, 2)
|
|
275
|
+
)
|
|
276
|
+
console.print(panel)
|
|
277
|
+
|
|
278
|
+
|
|
279
|
+
def display_summary(data: Dict):
|
|
280
|
+
"""Display a summary of the completed request"""
|
|
281
|
+
table = Table(title="๐ Download Summary", style="cyan")
|
|
282
|
+
table.add_column("Property", style="bold")
|
|
283
|
+
table.add_column("Value")
|
|
284
|
+
|
|
285
|
+
table.add_row("Original Filename", data.get("filename", "Unknown"))
|
|
286
|
+
table.add_row("File Size", f"{data.get('file_size', 0):,} bytes")
|
|
287
|
+
table.add_row("Total Packages", str(data.get("total_packages", 0)))
|
|
288
|
+
table.add_row("Status", f"โ
{data.get('status', 'Unknown')}")
|
|
289
|
+
|
|
290
|
+
console.print(table)
|
|
291
|
+
|
|
292
|
+
|
|
293
|
+
def main():
|
|
294
|
+
parser = argparse.ArgumentParser(
|
|
295
|
+
description="Sideload CLI - Download large files via PyPI packages",
|
|
296
|
+
formatter_class=argparse.RawDescriptionHelpFormatter,
|
|
297
|
+
epilog="""
|
|
298
|
+
Examples:
|
|
299
|
+
sideload download https://example.com/largefile.zip
|
|
300
|
+
sideload download https://example.com/file.zip --output ./downloads/
|
|
301
|
+
""",
|
|
302
|
+
)
|
|
303
|
+
|
|
304
|
+
subparsers = parser.add_subparsers(dest="command", help="Available commands")
|
|
305
|
+
|
|
306
|
+
# Download command
|
|
307
|
+
download_parser = subparsers.add_parser(
|
|
308
|
+
"download", help="Download a file via sideload"
|
|
309
|
+
)
|
|
310
|
+
download_parser.add_argument("url", help="URL of the file to download")
|
|
311
|
+
download_parser.add_argument(
|
|
312
|
+
"--output",
|
|
313
|
+
"-o",
|
|
314
|
+
type=Path,
|
|
315
|
+
default=Path.cwd(),
|
|
316
|
+
help="Output directory (default: current directory)",
|
|
317
|
+
)
|
|
318
|
+
download_parser.add_argument("--collection", help="JSONBin collection ID")
|
|
319
|
+
download_parser.add_argument("--token", help="JSONBin API token")
|
|
320
|
+
|
|
321
|
+
args = parser.parse_args()
|
|
322
|
+
|
|
323
|
+
if not args.command:
|
|
324
|
+
parser.print_help()
|
|
325
|
+
return
|
|
326
|
+
|
|
327
|
+
display_header()
|
|
328
|
+
|
|
329
|
+
if args.command == "download":
|
|
330
|
+
# Get credentials
|
|
331
|
+
jsonbin_token = args.token or os.environ.get("JSONBIN_TOKEN")
|
|
332
|
+
collection_id = args.collection or os.environ.get("SIDELOAD_COLLECTION_ID")
|
|
333
|
+
|
|
334
|
+
if not jsonbin_token:
|
|
335
|
+
console.print(
|
|
336
|
+
"โ JSONBin token required. Set JSONBIN_TOKEN environment variable or use --token",
|
|
337
|
+
style="red",
|
|
338
|
+
)
|
|
339
|
+
return
|
|
340
|
+
|
|
341
|
+
if not collection_id:
|
|
342
|
+
console.print(
|
|
343
|
+
"โ Collection ID required. Set SIDELOAD_COLLECTION_ID environment variable or use --collection",
|
|
344
|
+
style="red",
|
|
345
|
+
)
|
|
346
|
+
return
|
|
347
|
+
|
|
348
|
+
# Ensure output directory exists
|
|
349
|
+
args.output.mkdir(parents=True, exist_ok=True)
|
|
350
|
+
|
|
351
|
+
try:
|
|
352
|
+
with SideloadClient(jsonbin_token, collection_id) as client:
|
|
353
|
+
# Create the request
|
|
354
|
+
console.print(
|
|
355
|
+
f"๐ Requesting download for: [bold blue]{args.url}[/bold blue]"
|
|
356
|
+
)
|
|
357
|
+
bin_id = client.create_request(args.url)
|
|
358
|
+
|
|
359
|
+
# Monitor the request
|
|
360
|
+
console.print(Rule("๐ก Monitoring Progress"))
|
|
361
|
+
data = client.monitor_request(bin_id)
|
|
362
|
+
|
|
363
|
+
if data.get("status") != "UPLOADED":
|
|
364
|
+
console.print(
|
|
365
|
+
"โ Request did not complete successfully", style="red"
|
|
366
|
+
)
|
|
367
|
+
return
|
|
368
|
+
|
|
369
|
+
# Display summary
|
|
370
|
+
display_summary(data)
|
|
371
|
+
|
|
372
|
+
# Download packages
|
|
373
|
+
package_names = data.get("packages_names", [])
|
|
374
|
+
if not package_names:
|
|
375
|
+
console.print("โ No packages found in the response", style="red")
|
|
376
|
+
return
|
|
377
|
+
|
|
378
|
+
console.print(Rule("๐ฆ Downloading Packages"))
|
|
379
|
+
|
|
380
|
+
with tempfile.TemporaryDirectory() as temp_dir:
|
|
381
|
+
temp_path = Path(temp_dir)
|
|
382
|
+
wheel_files = client.download_packages(package_names, temp_path)
|
|
383
|
+
|
|
384
|
+
if not wheel_files:
|
|
385
|
+
console.print(
|
|
386
|
+
"โ No packages were downloaded successfully", style="red"
|
|
387
|
+
)
|
|
388
|
+
return
|
|
389
|
+
|
|
390
|
+
# Extract and reassemble
|
|
391
|
+
console.print(Rule("๐ง Reassembling File"))
|
|
392
|
+
original_filename = data.get("filename", "downloaded_file")
|
|
393
|
+
output_file = args.output / original_filename
|
|
394
|
+
|
|
395
|
+
client.extract_and_reassemble(
|
|
396
|
+
wheel_files, original_filename, output_file
|
|
397
|
+
)
|
|
398
|
+
|
|
399
|
+
# Success!
|
|
400
|
+
console.print(Rule("โจ Complete"))
|
|
401
|
+
console.print(
|
|
402
|
+
f"๐ File successfully downloaded to: [bold green]{output_file}[/bold green]"
|
|
403
|
+
)
|
|
404
|
+
console.print(
|
|
405
|
+
f"๐ File size: [cyan]{output_file.stat().st_size:,} bytes[/cyan]"
|
|
406
|
+
)
|
|
407
|
+
|
|
408
|
+
except KeyboardInterrupt:
|
|
409
|
+
console.print("\nโ ๏ธ Download interrupted by user", style="yellow")
|
|
410
|
+
except Exception as e:
|
|
411
|
+
console.print(f"โ Error: {e}", style="bold red")
|
|
412
|
+
raise
|
|
413
|
+
|
|
414
|
+
|
|
415
|
+
if __name__ == "__main__":
|
|
416
|
+
main()
|
sideload/jsonbin.py
ADDED
|
File without changes
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import httpx
|
|
3
|
+
|
|
4
|
+
JSONBIN_TOKEN = os.environ["JSONBIN_TOKEN"]
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class JSONBinConnector:
|
|
8
|
+
def __init__(self):
|
|
9
|
+
self.client = httpx.Client(
|
|
10
|
+
base_url="https://api.jsonbin.io/v3",
|
|
11
|
+
headers={"X-Master-Key": JSONBIN_TOKEN, "Content-Type": "application/json"},
|
|
12
|
+
)
|
|
13
|
+
|
|
14
|
+
def get_collections(self) -> list:
|
|
15
|
+
response = self.client.get("/c")
|
|
16
|
+
response.raise_for_status()
|
|
17
|
+
return response.json()
|
|
18
|
+
|
|
19
|
+
def get_collection_bins(
|
|
20
|
+
self, collection_id: str, last_bin_id: str | None = None
|
|
21
|
+
) -> list:
|
|
22
|
+
url = f"/c/{collection_id}/bins"
|
|
23
|
+
if last_bin_id:
|
|
24
|
+
url = f"{url}/{last_bin_id}"
|
|
25
|
+
response = self.client.get(
|
|
26
|
+
url,
|
|
27
|
+
headers={"X-Sort-Order": "ascending"},
|
|
28
|
+
)
|
|
29
|
+
response.raise_for_status()
|
|
30
|
+
return response.json()
|
|
31
|
+
|
|
32
|
+
def create_bin(self, collection_id: str, bin_name: str, data: dict) -> str:
|
|
33
|
+
headers = {"X-Collection-Id": collection_id, "X-Bin-Name": bin_name}
|
|
34
|
+
response = self.client.post("/b", json=data, headers=headers)
|
|
35
|
+
response.raise_for_status()
|
|
36
|
+
return response.json()["metadata"]["id"]
|
|
37
|
+
|
|
38
|
+
def get_bin(self, bin_id: str) -> dict:
|
|
39
|
+
response = self.client.get(f"/b/{bin_id}", headers={"X-Bin-Name": bin_id})
|
|
40
|
+
response.raise_for_status()
|
|
41
|
+
return response.json()["record"]
|
|
42
|
+
|
|
43
|
+
def update_bin(self, bin_id: str, data: dict) -> None:
|
|
44
|
+
existing_data = self.get_bin(bin_id)
|
|
45
|
+
updated_data = {**existing_data, **data}
|
|
46
|
+
response = self.client.put(
|
|
47
|
+
f"/b/{bin_id}", json=updated_data, headers={"X-Bin-Name": bin_id}
|
|
48
|
+
)
|
|
49
|
+
response.raise_for_status()
|
sideload/jsonbin_old.py
ADDED
|
@@ -0,0 +1,302 @@
|
|
|
1
|
+
"""
|
|
2
|
+
JSONBin API Connector
|
|
3
|
+
A reusable connector for interacting with JSONBin.io API
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
import time
|
|
7
|
+
import httpx
|
|
8
|
+
from typing import Dict, List, Optional, Any
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class JSONBinConnector:
|
|
12
|
+
"""A connector for JSONBin.io API with httpx"""
|
|
13
|
+
|
|
14
|
+
def __init__(self, api_token: str, base_url: str = "https://api.jsonbin.io/v3"):
|
|
15
|
+
"""
|
|
16
|
+
Initialize the JSONBin connector
|
|
17
|
+
|
|
18
|
+
Args:
|
|
19
|
+
api_token: JSONBin API token
|
|
20
|
+
base_url: JSONBin API base URL
|
|
21
|
+
"""
|
|
22
|
+
self.api_token = api_token
|
|
23
|
+
self.base_url = base_url
|
|
24
|
+
self.client = httpx.Client(
|
|
25
|
+
base_url=base_url,
|
|
26
|
+
headers={"X-Master-Key": api_token, "Content-Type": "application/json"},
|
|
27
|
+
)
|
|
28
|
+
|
|
29
|
+
def __enter__(self):
|
|
30
|
+
return self
|
|
31
|
+
|
|
32
|
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
33
|
+
self.client.close()
|
|
34
|
+
|
|
35
|
+
def create_bin(
|
|
36
|
+
self, data: Dict[str, Any], collection_id: Optional[str] = None
|
|
37
|
+
) -> str:
|
|
38
|
+
"""
|
|
39
|
+
Create a new bin
|
|
40
|
+
|
|
41
|
+
Args:
|
|
42
|
+
data: The data to store in the bin
|
|
43
|
+
collection_id: Optional collection ID to add the bin to
|
|
44
|
+
|
|
45
|
+
Returns:
|
|
46
|
+
The created bin ID
|
|
47
|
+
"""
|
|
48
|
+
headers = {}
|
|
49
|
+
if collection_id:
|
|
50
|
+
headers["X-Collection-Id"] = collection_id
|
|
51
|
+
|
|
52
|
+
response = self.client.post("/b", json=data, headers=headers)
|
|
53
|
+
response.raise_for_status()
|
|
54
|
+
return response.json()["metadata"]["id"]
|
|
55
|
+
|
|
56
|
+
def get_bin(self, bin_id: str) -> Dict[str, Any]:
|
|
57
|
+
"""
|
|
58
|
+
Get bin data by ID
|
|
59
|
+
|
|
60
|
+
Args:
|
|
61
|
+
bin_id: The bin ID to retrieve
|
|
62
|
+
|
|
63
|
+
Returns:
|
|
64
|
+
The bin data
|
|
65
|
+
"""
|
|
66
|
+
response = self.client.get(f"/b/{bin_id}", headers={"X-Bin-Name": bin_id})
|
|
67
|
+
response.raise_for_status()
|
|
68
|
+
return response.json()["record"]
|
|
69
|
+
|
|
70
|
+
def update_bin(self, bin_id: str, **data: Any) -> None:
|
|
71
|
+
"""
|
|
72
|
+
Update bin data
|
|
73
|
+
|
|
74
|
+
Args:
|
|
75
|
+
bin_id: The bin ID to update
|
|
76
|
+
**data: Key-value pairs to update in the bin
|
|
77
|
+
"""
|
|
78
|
+
# Get existing data first
|
|
79
|
+
existing_data = self.get_bin(bin_id)
|
|
80
|
+
|
|
81
|
+
# Merge with new data
|
|
82
|
+
updated_data = {**existing_data, **data}
|
|
83
|
+
|
|
84
|
+
# Update the bin
|
|
85
|
+
response = self.client.put(
|
|
86
|
+
f"/b/{bin_id}", json=updated_data, headers={"X-Bin-Name": bin_id}
|
|
87
|
+
)
|
|
88
|
+
response.raise_for_status()
|
|
89
|
+
|
|
90
|
+
def delete_bin(self, bin_id: str) -> None:
|
|
91
|
+
"""
|
|
92
|
+
Delete a bin
|
|
93
|
+
|
|
94
|
+
Args:
|
|
95
|
+
bin_id: The bin ID to delete
|
|
96
|
+
"""
|
|
97
|
+
response = self.client.delete(f"/b/{bin_id}", headers={"X-Bin-Name": bin_id})
|
|
98
|
+
response.raise_for_status()
|
|
99
|
+
|
|
100
|
+
def get_collections(self) -> List[Dict[str, Any]]:
|
|
101
|
+
"""
|
|
102
|
+
Get all collections
|
|
103
|
+
|
|
104
|
+
Returns:
|
|
105
|
+
List of collections
|
|
106
|
+
"""
|
|
107
|
+
response = self.client.get("/c")
|
|
108
|
+
response.raise_for_status()
|
|
109
|
+
return response.json()
|
|
110
|
+
|
|
111
|
+
def create_collection(self, name: str) -> str:
|
|
112
|
+
"""
|
|
113
|
+
Create a new collection
|
|
114
|
+
|
|
115
|
+
Args:
|
|
116
|
+
name: The collection name
|
|
117
|
+
|
|
118
|
+
Returns:
|
|
119
|
+
The created collection ID
|
|
120
|
+
"""
|
|
121
|
+
response = self.client.post("/c", json={"name": name})
|
|
122
|
+
response.raise_for_status()
|
|
123
|
+
return response.json()["metadata"]["id"]
|
|
124
|
+
|
|
125
|
+
def get_collection_bins(
|
|
126
|
+
self, collection_id: str, after_bin_id: Optional[str] = None
|
|
127
|
+
) -> List[Dict[str, Any]]:
|
|
128
|
+
"""
|
|
129
|
+
Get bins from a collection
|
|
130
|
+
|
|
131
|
+
Args:
|
|
132
|
+
collection_id: The collection ID
|
|
133
|
+
after_bin_id: Optional bin ID to get bins after (for pagination)
|
|
134
|
+
|
|
135
|
+
Returns:
|
|
136
|
+
List of bins in the collection
|
|
137
|
+
"""
|
|
138
|
+
endpoint = f"/c/{collection_id}/bins"
|
|
139
|
+
if after_bin_id:
|
|
140
|
+
endpoint += f"/{after_bin_id}"
|
|
141
|
+
|
|
142
|
+
response = self.client.get(
|
|
143
|
+
endpoint,
|
|
144
|
+
headers={"X-Collection-Id": collection_id, "X-Sort-Order": "ascending"},
|
|
145
|
+
)
|
|
146
|
+
response.raise_for_status()
|
|
147
|
+
return response.json()
|
|
148
|
+
|
|
149
|
+
def close(self):
|
|
150
|
+
"""Close the HTTP client"""
|
|
151
|
+
self.client.close()
|
|
152
|
+
|
|
153
|
+
|
|
154
|
+
class SideloadBinManager:
|
|
155
|
+
"""High-level manager for Sideload-specific JSONBin operations"""
|
|
156
|
+
|
|
157
|
+
def __init__(self, connector: JSONBinConnector):
|
|
158
|
+
"""
|
|
159
|
+
Initialize the sideload bin manager
|
|
160
|
+
|
|
161
|
+
Args:
|
|
162
|
+
connector: JSONBin connector instance
|
|
163
|
+
"""
|
|
164
|
+
self.connector = connector
|
|
165
|
+
|
|
166
|
+
def create_sideload_request(
|
|
167
|
+
self, url: str, collection_id: Optional[str] = None
|
|
168
|
+
) -> str:
|
|
169
|
+
"""
|
|
170
|
+
Create a new sideload request
|
|
171
|
+
|
|
172
|
+
Args:
|
|
173
|
+
url: The URL to sideload
|
|
174
|
+
collection_id: Optional collection ID
|
|
175
|
+
|
|
176
|
+
Returns:
|
|
177
|
+
The created bin ID
|
|
178
|
+
"""
|
|
179
|
+
data = {"url": url, "status": "CREATED", "created_at": time.time()}
|
|
180
|
+
return self.connector.create_bin(data, collection_id)
|
|
181
|
+
|
|
182
|
+
def update_sideload_status(
|
|
183
|
+
self, bin_id: str, status: str, **additional_data: Any
|
|
184
|
+
) -> None:
|
|
185
|
+
"""
|
|
186
|
+
Update sideload request status
|
|
187
|
+
|
|
188
|
+
Args:
|
|
189
|
+
bin_id: The bin ID to update
|
|
190
|
+
status: The new status
|
|
191
|
+
**additional_data: Additional data to update
|
|
192
|
+
"""
|
|
193
|
+
self.connector.update_bin(bin_id, status=status, **additional_data)
|
|
194
|
+
|
|
195
|
+
def update_progress(self, bin_id: str, progress: int) -> None:
|
|
196
|
+
"""
|
|
197
|
+
Update download progress
|
|
198
|
+
|
|
199
|
+
Args:
|
|
200
|
+
bin_id: The bin ID to update
|
|
201
|
+
progress: Progress percentage (0-100)
|
|
202
|
+
"""
|
|
203
|
+
self.connector.update_bin(bin_id, progress=progress)
|
|
204
|
+
|
|
205
|
+
def mark_completed(
|
|
206
|
+
self,
|
|
207
|
+
bin_id: str,
|
|
208
|
+
package_names: List[str],
|
|
209
|
+
original_filename: str,
|
|
210
|
+
file_size: int,
|
|
211
|
+
) -> None:
|
|
212
|
+
"""
|
|
213
|
+
Mark sideload request as completed
|
|
214
|
+
|
|
215
|
+
Args:
|
|
216
|
+
bin_id: The bin ID to update
|
|
217
|
+
package_names: List of created package names
|
|
218
|
+
original_filename: Original filename
|
|
219
|
+
file_size: Original file size
|
|
220
|
+
"""
|
|
221
|
+
self.connector.update_bin(
|
|
222
|
+
bin_id,
|
|
223
|
+
status="UPLOADED",
|
|
224
|
+
package_names=package_names,
|
|
225
|
+
total_packages=len(package_names),
|
|
226
|
+
original_filename=original_filename,
|
|
227
|
+
file_size=file_size,
|
|
228
|
+
)
|
|
229
|
+
|
|
230
|
+
def mark_failed(self, bin_id: str, reason: str) -> None:
|
|
231
|
+
"""
|
|
232
|
+
Mark sideload request as failed
|
|
233
|
+
|
|
234
|
+
Args:
|
|
235
|
+
bin_id: The bin ID to update
|
|
236
|
+
reason: Failure reason
|
|
237
|
+
"""
|
|
238
|
+
self.connector.update_bin(bin_id, status="FAILED", reason=reason)
|
|
239
|
+
|
|
240
|
+
def mark_rejected(self, bin_id: str, reason: str) -> None:
|
|
241
|
+
"""
|
|
242
|
+
Mark sideload request as rejected
|
|
243
|
+
|
|
244
|
+
Args:
|
|
245
|
+
bin_id: The bin ID to update
|
|
246
|
+
reason: Rejection reason
|
|
247
|
+
"""
|
|
248
|
+
self.connector.update_bin(bin_id, status="REJECTED", reason=reason)
|
|
249
|
+
|
|
250
|
+
def get_sideload_data(self, bin_id: str) -> Dict[str, Any]:
|
|
251
|
+
"""
|
|
252
|
+
Get sideload request data
|
|
253
|
+
|
|
254
|
+
Args:
|
|
255
|
+
bin_id: The bin ID to retrieve
|
|
256
|
+
|
|
257
|
+
Returns:
|
|
258
|
+
The sideload data
|
|
259
|
+
"""
|
|
260
|
+
return self.connector.get_bin(bin_id)
|
|
261
|
+
|
|
262
|
+
def find_sideload_collections(self) -> List[Dict[str, Any]]:
|
|
263
|
+
"""
|
|
264
|
+
Find collections that start with 'sideload_'
|
|
265
|
+
|
|
266
|
+
Returns:
|
|
267
|
+
List of sideload collections
|
|
268
|
+
"""
|
|
269
|
+
collections = self.connector.get_collections()
|
|
270
|
+
return [
|
|
271
|
+
collection
|
|
272
|
+
for collection in collections
|
|
273
|
+
if collection["collectionMeta"]["name"].startswith("sideload_")
|
|
274
|
+
]
|
|
275
|
+
|
|
276
|
+
def get_pending_requests(
|
|
277
|
+
self, collection_id: str, after_bin_id: Optional[str] = None
|
|
278
|
+
) -> List[str]:
|
|
279
|
+
"""
|
|
280
|
+
Get pending sideload requests from a collection
|
|
281
|
+
|
|
282
|
+
Args:
|
|
283
|
+
collection_id: The collection ID to check
|
|
284
|
+
after_bin_id: Optional bin ID for pagination
|
|
285
|
+
|
|
286
|
+
Returns:
|
|
287
|
+
List of bin IDs that need processing
|
|
288
|
+
"""
|
|
289
|
+
bins = self.connector.get_collection_bins(collection_id, after_bin_id)
|
|
290
|
+
pending_bins = []
|
|
291
|
+
|
|
292
|
+
for bin_data in bins:
|
|
293
|
+
bin_id = bin_data["record"]
|
|
294
|
+
try:
|
|
295
|
+
data = self.get_sideload_data(bin_id)
|
|
296
|
+
if data.get("status") == "CREATED":
|
|
297
|
+
pending_bins.append(bin_id)
|
|
298
|
+
except Exception:
|
|
299
|
+
# Skip bins that can't be read
|
|
300
|
+
continue
|
|
301
|
+
|
|
302
|
+
return pending_bins
|
sideload/main.py
ADDED
|
@@ -0,0 +1,237 @@
|
|
|
1
|
+
import os
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
import subprocess
|
|
4
|
+
import tempfile
|
|
5
|
+
import threading
|
|
6
|
+
import time
|
|
7
|
+
|
|
8
|
+
import requests
|
|
9
|
+
|
|
10
|
+
from sideload.jsonbin_connector import JSONBinConnector
|
|
11
|
+
|
|
12
|
+
JSONBIN_TOKEN = os.environ["JSONBIN_TOKEN"]
|
|
13
|
+
PYPI_TOKEN = os.environ["PYPI_TOKEN"]
|
|
14
|
+
MAX_PACKAGE_SIZE = 95 * 1024 * 1024 # 95 MB
|
|
15
|
+
|
|
16
|
+
LAST_BINS: dict[str, str | None] = {}
|
|
17
|
+
|
|
18
|
+
PYPROJECT_TEMPLATE = """
|
|
19
|
+
[build-system]
|
|
20
|
+
requires = ["setuptools"]
|
|
21
|
+
build-backend = "setuptools.build_meta"
|
|
22
|
+
|
|
23
|
+
[project]
|
|
24
|
+
name = "{package_name}"
|
|
25
|
+
version = "1.0.0"
|
|
26
|
+
description = "Sideloaded package"
|
|
27
|
+
requires-python = ">=3.8"
|
|
28
|
+
authors = [
|
|
29
|
+
{{name = "Null Void" }}
|
|
30
|
+
]
|
|
31
|
+
|
|
32
|
+
[tool.setuptools.data-files]
|
|
33
|
+
"share/{package_name}" = ["{package_name}"]
|
|
34
|
+
"""
|
|
35
|
+
|
|
36
|
+
jsonbin_connector = JSONBinConnector()
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
def package_build(directory: Path) -> bool:
|
|
40
|
+
result = subprocess.run(
|
|
41
|
+
["python3", "-m", "build", "--wheel"], cwd=str(directory), check=True
|
|
42
|
+
)
|
|
43
|
+
return result.returncode == 0
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
def twine_upload(directory: Path):
|
|
47
|
+
result = subprocess.run(
|
|
48
|
+
[
|
|
49
|
+
"twine",
|
|
50
|
+
"upload",
|
|
51
|
+
"dist/*",
|
|
52
|
+
"-u",
|
|
53
|
+
"__token__",
|
|
54
|
+
"-p",
|
|
55
|
+
PYPI_TOKEN,
|
|
56
|
+
],
|
|
57
|
+
cwd=str(directory),
|
|
58
|
+
check=True,
|
|
59
|
+
)
|
|
60
|
+
return result.returncode == 0
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
def download_file(bin_id: str, url: str):
|
|
64
|
+
try:
|
|
65
|
+
# Send a HTTP request to the server.
|
|
66
|
+
response = requests.get(url, stream=True)
|
|
67
|
+
except Exception as e:
|
|
68
|
+
jsonbin_connector.update_bin(
|
|
69
|
+
bin_id,
|
|
70
|
+
{
|
|
71
|
+
"status": "REJECTED",
|
|
72
|
+
"details": f"Failed to download file: [{e.__class__.__name__}]: {e}",
|
|
73
|
+
},
|
|
74
|
+
)
|
|
75
|
+
return
|
|
76
|
+
if not response.ok:
|
|
77
|
+
jsonbin_connector.update_bin(
|
|
78
|
+
bin_id,
|
|
79
|
+
{
|
|
80
|
+
"status": "REJECTED",
|
|
81
|
+
"details": f"URL returned code {response.status_code}: {response.reason}",
|
|
82
|
+
},
|
|
83
|
+
)
|
|
84
|
+
return
|
|
85
|
+
# Total size in bytes.
|
|
86
|
+
total_size = int(response.headers.get("content-length", 1))
|
|
87
|
+
try:
|
|
88
|
+
filename = response.headers["Content-Disposition"].split("; filename=")[1]
|
|
89
|
+
except Exception:
|
|
90
|
+
filename = response.url.removesuffix("/").split("/")[-1]
|
|
91
|
+
|
|
92
|
+
# Initialize variables to track progress.
|
|
93
|
+
downloaded = 0
|
|
94
|
+
chunk_size = 1024 * 1000 # Size of each chunk in bytes.
|
|
95
|
+
last_progress = 0
|
|
96
|
+
filename_root = filename.split(".")[0]
|
|
97
|
+
package_name = f"sideload_{filename_root}_bin_{bin_id}"
|
|
98
|
+
# replace all non-alphanumeric characters with an underscore
|
|
99
|
+
package_name = "".join(c if c.isalnum() else "_" for c in package_name)
|
|
100
|
+
parts: list[Path] = []
|
|
101
|
+
|
|
102
|
+
def make_part_name():
|
|
103
|
+
return f"{package_name}_p{len(parts)}"
|
|
104
|
+
|
|
105
|
+
def make_new_part():
|
|
106
|
+
part_name = make_part_name()
|
|
107
|
+
part_directory = Path(temp_dir) / package_name / part_name
|
|
108
|
+
part_directory.mkdir(parents=True, exist_ok=False)
|
|
109
|
+
part_path = part_directory / part_name
|
|
110
|
+
parts.append(part_path)
|
|
111
|
+
return open(part_path, "wb")
|
|
112
|
+
|
|
113
|
+
# Open a local file for writing in binary mode.
|
|
114
|
+
with tempfile.TemporaryDirectory() as temp_dir:
|
|
115
|
+
# temp_dir = "./dumptmp3" # only for debugging
|
|
116
|
+
os.mkdir(os.path.join(temp_dir, package_name))
|
|
117
|
+
jsonbin_connector.update_bin(
|
|
118
|
+
bin_id,
|
|
119
|
+
{"status": "DOWNLOADING", "progress": 0},
|
|
120
|
+
)
|
|
121
|
+
current_part_fp = make_new_part()
|
|
122
|
+
try:
|
|
123
|
+
current_chunk_size = 0
|
|
124
|
+
for data in response.iter_content(chunk_size=chunk_size):
|
|
125
|
+
current_part_fp.write(data)
|
|
126
|
+
downloaded += len(data)
|
|
127
|
+
current_chunk_size += len(data)
|
|
128
|
+
if current_chunk_size >= MAX_PACKAGE_SIZE:
|
|
129
|
+
current_part_fp.close()
|
|
130
|
+
current_part_fp = make_new_part()
|
|
131
|
+
current_chunk_size = 0
|
|
132
|
+
if total_size < downloaded:
|
|
133
|
+
total_size = downloaded
|
|
134
|
+
progress = 99
|
|
135
|
+
else:
|
|
136
|
+
progress = int((downloaded / total_size) * 100)
|
|
137
|
+
if progress != last_progress:
|
|
138
|
+
jsonbin_connector.update_bin(bin_id, {"progress": progress})
|
|
139
|
+
last_progress = progress
|
|
140
|
+
finally:
|
|
141
|
+
current_part_fp.close()
|
|
142
|
+
jsonbin_connector.update_bin(bin_id, {"progress": 100, "status": "DOWNLOADED"})
|
|
143
|
+
for part_idx, path_part in enumerate(parts):
|
|
144
|
+
with open(
|
|
145
|
+
path_part.parent / "pyproject.toml",
|
|
146
|
+
"w",
|
|
147
|
+
encoding="utf-8",
|
|
148
|
+
) as pyproject_file:
|
|
149
|
+
pyproject_file.write(
|
|
150
|
+
PYPROJECT_TEMPLATE.format(package_name=path_part.name)
|
|
151
|
+
)
|
|
152
|
+
|
|
153
|
+
jsonbin_connector.update_bin(
|
|
154
|
+
bin_id,
|
|
155
|
+
{
|
|
156
|
+
"status": "BUILDING",
|
|
157
|
+
"details": f"Building package part {part_idx}/{len(parts)}.",
|
|
158
|
+
},
|
|
159
|
+
)
|
|
160
|
+
if not package_build(path_part.parent):
|
|
161
|
+
jsonbin_connector.update_bin(
|
|
162
|
+
bin_id,
|
|
163
|
+
{
|
|
164
|
+
"status": "BULDING",
|
|
165
|
+
"details": f"Failed to build package part {part_idx}/{len(parts)}.",
|
|
166
|
+
},
|
|
167
|
+
)
|
|
168
|
+
return
|
|
169
|
+
jsonbin_connector.update_bin(
|
|
170
|
+
bin_id,
|
|
171
|
+
{
|
|
172
|
+
"status": "UPLOADING",
|
|
173
|
+
"details": f"Uploading package part {part_idx}/{len(parts)}.",
|
|
174
|
+
},
|
|
175
|
+
)
|
|
176
|
+
if not twine_upload(path_part.parent):
|
|
177
|
+
jsonbin_connector.update_bin(
|
|
178
|
+
bin_id,
|
|
179
|
+
{
|
|
180
|
+
"status": "FAILED",
|
|
181
|
+
"details": f"Failed to upload package part {part_idx}/{len(parts)}.",
|
|
182
|
+
},
|
|
183
|
+
)
|
|
184
|
+
return
|
|
185
|
+
jsonbin_connector.update_bin(
|
|
186
|
+
bin_id,
|
|
187
|
+
{
|
|
188
|
+
"status": "UPLOADED",
|
|
189
|
+
"packages_names": [path_part.name for path_part in parts],
|
|
190
|
+
"filename": filename,
|
|
191
|
+
"file_size": total_size,
|
|
192
|
+
"total_packages": len(parts),
|
|
193
|
+
},
|
|
194
|
+
)
|
|
195
|
+
|
|
196
|
+
|
|
197
|
+
def process_bin(bin_id: str):
|
|
198
|
+
url = f"https://api.jsonbin.io/v3/b/{bin_id}"
|
|
199
|
+
bin_data = requests.get(url, headers={"X-Master-Key": JSONBIN_TOKEN}).json()
|
|
200
|
+
bin_record = bin_data["record"]
|
|
201
|
+
if bin_record["status"] == "CREATED":
|
|
202
|
+
print("Processing bin:", bin_id)
|
|
203
|
+
download_file(bin_id, bin_record["url"])
|
|
204
|
+
elif bin_record["status"] != "UPLOADED":
|
|
205
|
+
jsonbin_connector.update_bin(
|
|
206
|
+
bin_id, {"status": "FAILED", "details": "Server interruption"}
|
|
207
|
+
)
|
|
208
|
+
else:
|
|
209
|
+
print("Bin already processed:", bin_id)
|
|
210
|
+
|
|
211
|
+
|
|
212
|
+
def watch_collection(collection_id: str):
|
|
213
|
+
print("Watching collection:", collection_id)
|
|
214
|
+
while True:
|
|
215
|
+
collection_data = jsonbin_connector.get_collection_bins(
|
|
216
|
+
collection_id, LAST_BINS.get(collection_id)
|
|
217
|
+
)
|
|
218
|
+
last_bin: str | None = None
|
|
219
|
+
for bin_data in collection_data:
|
|
220
|
+
bin_id = bin_data["record"]
|
|
221
|
+
process_bin(bin_id)
|
|
222
|
+
last_bin = bin_id
|
|
223
|
+
if last_bin is not None:
|
|
224
|
+
LAST_BINS[collection_id] = last_bin
|
|
225
|
+
time.sleep(3)
|
|
226
|
+
|
|
227
|
+
|
|
228
|
+
def main():
|
|
229
|
+
collections = jsonbin_connector.get_collections()
|
|
230
|
+
for collection in collections:
|
|
231
|
+
if collection["collectionMeta"]["name"].startswith("sideload_"):
|
|
232
|
+
threading.Thread(
|
|
233
|
+
target=watch_collection, args=(collection["record"],)
|
|
234
|
+
).start()
|
|
235
|
+
|
|
236
|
+
|
|
237
|
+
main()
|
|
@@ -0,0 +1,138 @@
|
|
|
1
|
+
Metadata-Version: 2.3
|
|
2
|
+
Name: sideload
|
|
3
|
+
Version: 0.1.0
|
|
4
|
+
Summary: Download large files via PyPI packages
|
|
5
|
+
Author: Sygmei
|
|
6
|
+
Author-email: Sygmei <3835355+Sygmei@users.noreply.github.com>
|
|
7
|
+
Requires-Dist: build>=1.3.0
|
|
8
|
+
Requires-Dist: twine>=6.2.0
|
|
9
|
+
Requires-Dist: wheel>=0.45.1
|
|
10
|
+
Requires-Dist: rich>=13.0.0
|
|
11
|
+
Requires-Dist: httpx>=0.28.1
|
|
12
|
+
Requires-Dist: pip>=25.2
|
|
13
|
+
Requires-Python: >=3.12
|
|
14
|
+
Description-Content-Type: text/markdown
|
|
15
|
+
|
|
16
|
+
# ๐ Sideload
|
|
17
|
+
|
|
18
|
+
Download large files via PyPI packages! Sideload automatically splits large files into PyPI-compliant chunks and allows you to download them through a beautiful CLI interface.
|
|
19
|
+
|
|
20
|
+
## Features
|
|
21
|
+
|
|
22
|
+
- โจ **Beautiful CLI** powered by Rich with progress bars and status updates
|
|
23
|
+
- ๐ฆ **Automatic file splitting** into 99MB PyPI-compliant chunks
|
|
24
|
+
- ๐ **Automatic reassembly** of downloaded parts
|
|
25
|
+
- ๐ **JSONBin integration** for request tracking
|
|
26
|
+
- ๐ก๏ธ **Error handling** with detailed progress monitoring
|
|
27
|
+
|
|
28
|
+
## Installation
|
|
29
|
+
|
|
30
|
+
```bash
|
|
31
|
+
git clone <repository-url>
|
|
32
|
+
cd Sideload
|
|
33
|
+
uv install
|
|
34
|
+
```
|
|
35
|
+
|
|
36
|
+
## Usage
|
|
37
|
+
|
|
38
|
+
### Environment Variables
|
|
39
|
+
|
|
40
|
+
Set up your credentials:
|
|
41
|
+
|
|
42
|
+
```bash
|
|
43
|
+
export JSONBIN_TOKEN="your_jsonbin_token"
|
|
44
|
+
export SIDELOAD_COLLECTION_ID="your_collection_id"
|
|
45
|
+
```
|
|
46
|
+
|
|
47
|
+
### Download a File
|
|
48
|
+
|
|
49
|
+
```bash
|
|
50
|
+
# Basic usage
|
|
51
|
+
uv run sideload download https://example.com/largefile.zip
|
|
52
|
+
|
|
53
|
+
# Specify output directory
|
|
54
|
+
uv run sideload download https://example.com/largefile.zip --output ./downloads/
|
|
55
|
+
|
|
56
|
+
# Override credentials
|
|
57
|
+
uv run sideload download https://example.com/largefile.zip --token YOUR_TOKEN --collection YOUR_COLLECTION
|
|
58
|
+
```
|
|
59
|
+
|
|
60
|
+
### How it Works
|
|
61
|
+
|
|
62
|
+
1. **Submit Request**: The CLI creates a new request in your JSONBin collection
|
|
63
|
+
2. **Monitor Progress**: Real-time progress monitoring with beautiful progress bars
|
|
64
|
+
3. **Download Packages**: Automatically downloads all PyPI packages containing file parts
|
|
65
|
+
4. **Reassemble**: Extracts and concatenates parts to rebuild the original file
|
|
66
|
+
|
|
67
|
+
### CLI Interface
|
|
68
|
+
|
|
69
|
+
The CLI provides:
|
|
70
|
+
|
|
71
|
+
- ๐ **Colorful output** with status indicators
|
|
72
|
+
- ๐ **Progress bars** for downloads and processing
|
|
73
|
+
- ๐ **Real-time monitoring** of server-side processing
|
|
74
|
+
- โ
**Success/error reporting** with detailed information
|
|
75
|
+
- ๐ **Summary tables** showing download statistics
|
|
76
|
+
|
|
77
|
+
### Example Output
|
|
78
|
+
|
|
79
|
+
```
|
|
80
|
+
๐ SIDELOAD
|
|
81
|
+
Download large files via PyPI packages
|
|
82
|
+
|
|
83
|
+
๐ Requesting download for: https://example.com/largefile.zip
|
|
84
|
+
โ
Created sideload request: abc123def456
|
|
85
|
+
|
|
86
|
+
๐ก Monitoring Progress
|
|
87
|
+
๐ฅ Downloading... (45%) โโโโโโโโโโโโโโโโโ
|
|
88
|
+
๐จ Building packages...
|
|
89
|
+
๐ค Uploading part 1/3...
|
|
90
|
+
|
|
91
|
+
๐ Download Summary
|
|
92
|
+
โโโโโโโโโโโโโโโโโโโโโณโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
|
|
93
|
+
โ Property โ Value โ
|
|
94
|
+
โกโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโฉ
|
|
95
|
+
โ Original Filename โ largefile.zip โ
|
|
96
|
+
โ File Size โ 250,123,456 bytes โ
|
|
97
|
+
โ Total Packages โ 3 โ
|
|
98
|
+
โ Status โ โ
UPLOADED โ
|
|
99
|
+
โโโโโโโโโโโโโโโโโโโโโดโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
|
|
100
|
+
|
|
101
|
+
๐ฆ Downloading Packages
|
|
102
|
+
๐ฆ Downloading package 1/3... โโโโโโโโโโโโโโโโโโโโโโโโโโโโ
|
|
103
|
+
|
|
104
|
+
๐ง Reassembling File
|
|
105
|
+
๐ Assembling part 1/3... โโโโโโโโโโโโโโโโโโโโโโโโโโโโ
|
|
106
|
+
|
|
107
|
+
โจ Complete
|
|
108
|
+
๐ File successfully downloaded to: largefile.zip
|
|
109
|
+
๐ File size: 250,123,456 bytes
|
|
110
|
+
```
|
|
111
|
+
|
|
112
|
+
## Development
|
|
113
|
+
|
|
114
|
+
### Server Setup
|
|
115
|
+
|
|
116
|
+
The server component handles file processing and PyPI uploads:
|
|
117
|
+
|
|
118
|
+
```bash
|
|
119
|
+
# Set environment variables
|
|
120
|
+
export JSONBIN_TOKEN="your_token"
|
|
121
|
+
export PYPI_TOKEN="your_pypi_token"
|
|
122
|
+
|
|
123
|
+
# Run the server
|
|
124
|
+
uv run python src/sideload/main.py
|
|
125
|
+
```
|
|
126
|
+
|
|
127
|
+
### Project Structure
|
|
128
|
+
|
|
129
|
+
```
|
|
130
|
+
src/sideload/
|
|
131
|
+
โโโ __init__.py # Package initialization
|
|
132
|
+
โโโ main.py # Server component
|
|
133
|
+
โโโ cli.py # CLI client
|
|
134
|
+
```
|
|
135
|
+
|
|
136
|
+
## License
|
|
137
|
+
|
|
138
|
+
[Your License Here]
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
sideload/__init__.py,sha256=Y3rHLtR7n0sjLXrn-BEcrbIHx-9uE1tPZkooavo7xcA,222
|
|
2
|
+
sideload/cli.py,sha256=2UQ_DgvB7S0MXmBhXwjx2T78-ZZzCUO4hN2YfCJ2hf8,15061
|
|
3
|
+
sideload/jsonbin.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
4
|
+
sideload/jsonbin_connector.py,sha256=HtR1Pwnpm5jfYcmnvcug9HaKxFpsyJBXYYlLuzWPiTE,1680
|
|
5
|
+
sideload/jsonbin_old.py,sha256=ve21WsV7Ay60moFfrR4lSM_JRZNqo4z5v69cNw0Iqo0,8411
|
|
6
|
+
sideload/main.py,sha256=5iAQQlHeCdCHxOWE9OPKHF9TNs0k45n9rnUbruleT8w,7589
|
|
7
|
+
sideload-0.1.0.dist-info/WHEEL,sha256=-neZj6nU9KAMg2CnCY6T3w8J53nx1kFGw_9HfoSzM60,79
|
|
8
|
+
sideload-0.1.0.dist-info/entry_points.txt,sha256=7ULrIjaVhrxMhuddTeoPjeIrqmIvVc9cSU3lZU2_YqE,44
|
|
9
|
+
sideload-0.1.0.dist-info/METADATA,sha256=Ouf83o2V29NcOs2pEPWVxmhgzUBP1D-SvgDt69-jZlY,4247
|
|
10
|
+
sideload-0.1.0.dist-info/RECORD,,
|