sideloader 2.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- sideloader/__init__.py +0 -0
- sideloader/cli.py +956 -0
- sideloader/jsonbin_connector.py +345 -0
- sideloader/scripts/cleanup_pypi.py +352 -0
- sideloader/server.py +379 -0
- sideloader-2.0.0.dist-info/METADATA +140 -0
- sideloader-2.0.0.dist-info/RECORD +9 -0
- sideloader-2.0.0.dist-info/WHEEL +4 -0
- sideloader-2.0.0.dist-info/entry_points.txt +4 -0
sideloader/server.py
ADDED
|
@@ -0,0 +1,379 @@
|
|
|
1
|
+
import os
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
import subprocess
|
|
4
|
+
import tempfile
|
|
5
|
+
import threading
|
|
6
|
+
import time
|
|
7
|
+
|
|
8
|
+
import requests
|
|
9
|
+
|
|
10
|
+
from sideloader.jsonbin_connector import JSONBinConnector
|
|
11
|
+
|
|
12
|
+
JSONBIN_TOKEN = os.environ["JSONBIN_TOKEN"]
|
|
13
|
+
PYPI_TOKEN = os.environ["PYPI_TOKEN"]
|
|
14
|
+
MAX_PACKAGE_SIZE = 92 * 1024 * 1024 # 95 MB
|
|
15
|
+
|
|
16
|
+
LAST_BINS: dict[str, str | None] = {}
|
|
17
|
+
|
|
18
|
+
PYPROJECT_TEMPLATE = """
|
|
19
|
+
[build-system]
|
|
20
|
+
requires = ["setuptools"]
|
|
21
|
+
build-backend = "setuptools.build_meta"
|
|
22
|
+
|
|
23
|
+
[project]
|
|
24
|
+
name = "{package_name}"
|
|
25
|
+
version = "1.0.0"
|
|
26
|
+
description = "Sideloaded package"
|
|
27
|
+
requires-python = ">=3.8"
|
|
28
|
+
authors = [
|
|
29
|
+
{{name = "Null Void" }}
|
|
30
|
+
]
|
|
31
|
+
|
|
32
|
+
[tool.setuptools.data-files]
|
|
33
|
+
"share/{package_name}" = ["{package_name}"]
|
|
34
|
+
"""
|
|
35
|
+
|
|
36
|
+
jsonbin_connector = JSONBinConnector()
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
def package_build(directory: Path) -> bool:
|
|
40
|
+
result = subprocess.run(
|
|
41
|
+
["python3", "-m", "build", "--wheel"], cwd=str(directory), check=True
|
|
42
|
+
)
|
|
43
|
+
return result.returncode == 0
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
def twine_upload(directory: Path):
|
|
47
|
+
result = subprocess.run(
|
|
48
|
+
[
|
|
49
|
+
"twine",
|
|
50
|
+
"upload",
|
|
51
|
+
"dist/*",
|
|
52
|
+
"-u",
|
|
53
|
+
"__token__",
|
|
54
|
+
"-p",
|
|
55
|
+
PYPI_TOKEN,
|
|
56
|
+
],
|
|
57
|
+
cwd=str(directory),
|
|
58
|
+
check=True,
|
|
59
|
+
)
|
|
60
|
+
return result.returncode == 0
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
def download_file(bin_id: str, url: str):
|
|
64
|
+
try:
|
|
65
|
+
# Send a HTTP request to the server.
|
|
66
|
+
response = requests.get(url, stream=True)
|
|
67
|
+
except Exception as e:
|
|
68
|
+
jsonbin_connector.update_bin(
|
|
69
|
+
bin_id,
|
|
70
|
+
{
|
|
71
|
+
"status": "REJECTED",
|
|
72
|
+
"details": f"Failed to download file: [{e.__class__.__name__}]: {e}",
|
|
73
|
+
},
|
|
74
|
+
)
|
|
75
|
+
return
|
|
76
|
+
if not response.ok:
|
|
77
|
+
jsonbin_connector.update_bin(
|
|
78
|
+
bin_id,
|
|
79
|
+
{
|
|
80
|
+
"status": "REJECTED",
|
|
81
|
+
"details": f"URL returned code {response.status_code}: {response.reason}",
|
|
82
|
+
},
|
|
83
|
+
)
|
|
84
|
+
return
|
|
85
|
+
# Total size in bytes.
|
|
86
|
+
total_size = int(response.headers.get("content-length", 1))
|
|
87
|
+
try:
|
|
88
|
+
content_disposition = response.headers.get("Content-Disposition", "")
|
|
89
|
+
filename = None
|
|
90
|
+
|
|
91
|
+
# Try to extract filename from Content-Disposition header
|
|
92
|
+
# Format can be: attachment; filename="file.ext"; filename*=utf-8''encoded%20name.ext
|
|
93
|
+
if content_disposition:
|
|
94
|
+
import re
|
|
95
|
+
from urllib.parse import unquote
|
|
96
|
+
|
|
97
|
+
# First try filename*= (RFC 5987 encoded, preferred)
|
|
98
|
+
match = re.search(
|
|
99
|
+
r"filename\*=(?:utf-8''|UTF-8'')([^;]+)", content_disposition
|
|
100
|
+
)
|
|
101
|
+
if match:
|
|
102
|
+
filename = unquote(match.group(1).strip())
|
|
103
|
+
else:
|
|
104
|
+
# Fall back to filename= (may be quoted)
|
|
105
|
+
match = re.search(
|
|
106
|
+
r'filename=(?:"([^"]+)"|([^;\s]+))', content_disposition
|
|
107
|
+
)
|
|
108
|
+
if match:
|
|
109
|
+
filename = match.group(1) or match.group(2)
|
|
110
|
+
filename = filename.strip('"').strip()
|
|
111
|
+
|
|
112
|
+
if not filename:
|
|
113
|
+
raise ValueError("No filename found in Content-Disposition")
|
|
114
|
+
except Exception:
|
|
115
|
+
filename = response.url.removesuffix("/").split("/")[-1]
|
|
116
|
+
# URL decode and clean up the filename
|
|
117
|
+
from urllib.parse import unquote
|
|
118
|
+
|
|
119
|
+
filename = unquote(filename)
|
|
120
|
+
|
|
121
|
+
# Initialize variables to track progress.
|
|
122
|
+
downloaded = 0
|
|
123
|
+
chunk_size = 1024 * 1024 # Size of each chunk in bytes.
|
|
124
|
+
last_progress = 0
|
|
125
|
+
filename_root = filename.split(".")[0]
|
|
126
|
+
package_name = f"sideload_{filename_root}_bin_{bin_id}"
|
|
127
|
+
# replace all non-alphanumeric characters with an underscore
|
|
128
|
+
package_name = "".join(c if c.isalnum() else "_" for c in package_name)
|
|
129
|
+
parts: list[Path] = []
|
|
130
|
+
|
|
131
|
+
def make_part_name():
|
|
132
|
+
return f"{package_name}_p{len(parts)}"
|
|
133
|
+
|
|
134
|
+
def make_new_part():
|
|
135
|
+
part_name = make_part_name()
|
|
136
|
+
part_directory = Path(temp_dir) / package_name / part_name
|
|
137
|
+
part_directory.mkdir(parents=True, exist_ok=False)
|
|
138
|
+
part_path = part_directory / part_name
|
|
139
|
+
parts.append(part_path)
|
|
140
|
+
return open(part_path, "wb")
|
|
141
|
+
|
|
142
|
+
# Open a local file for writing in binary mode.
|
|
143
|
+
with tempfile.TemporaryDirectory() as temp_dir:
|
|
144
|
+
# temp_dir = "./dumptmp3" # only for debugging
|
|
145
|
+
os.mkdir(os.path.join(temp_dir, package_name))
|
|
146
|
+
jsonbin_connector.update_bin(
|
|
147
|
+
bin_id,
|
|
148
|
+
{"status": "DOWNLOADING", "progress": 0},
|
|
149
|
+
)
|
|
150
|
+
current_part_fp = make_new_part()
|
|
151
|
+
try:
|
|
152
|
+
current_chunk_size = 0
|
|
153
|
+
for data in response.iter_content(chunk_size=chunk_size):
|
|
154
|
+
if current_chunk_size + len(data) > MAX_PACKAGE_SIZE:
|
|
155
|
+
current_part_fp.close()
|
|
156
|
+
current_part_fp = make_new_part()
|
|
157
|
+
current_chunk_size = 0
|
|
158
|
+
current_part_fp.write(data)
|
|
159
|
+
downloaded += len(data)
|
|
160
|
+
current_chunk_size += len(data)
|
|
161
|
+
if total_size < downloaded:
|
|
162
|
+
total_size = downloaded
|
|
163
|
+
progress = 99
|
|
164
|
+
else:
|
|
165
|
+
progress = int((downloaded / total_size) * 100)
|
|
166
|
+
if progress != last_progress:
|
|
167
|
+
jsonbin_connector.update_bin(bin_id, {"progress": progress})
|
|
168
|
+
last_progress = progress
|
|
169
|
+
finally:
|
|
170
|
+
current_part_fp.close()
|
|
171
|
+
jsonbin_connector.update_bin(bin_id, {"progress": 100, "status": "DOWNLOADED"})
|
|
172
|
+
for part_idx, path_part in enumerate(parts):
|
|
173
|
+
with open(
|
|
174
|
+
path_part.parent / "pyproject.toml",
|
|
175
|
+
"w",
|
|
176
|
+
encoding="utf-8",
|
|
177
|
+
) as pyproject_file:
|
|
178
|
+
pyproject_file.write(
|
|
179
|
+
PYPROJECT_TEMPLATE.format(package_name=path_part.name)
|
|
180
|
+
)
|
|
181
|
+
|
|
182
|
+
jsonbin_connector.update_bin(
|
|
183
|
+
bin_id,
|
|
184
|
+
{
|
|
185
|
+
"status": "BUILDING",
|
|
186
|
+
"details": f"Building package part {part_idx}/{len(parts)}.",
|
|
187
|
+
},
|
|
188
|
+
)
|
|
189
|
+
if not package_build(path_part.parent):
|
|
190
|
+
jsonbin_connector.update_bin(
|
|
191
|
+
bin_id,
|
|
192
|
+
{
|
|
193
|
+
"status": "BULDING",
|
|
194
|
+
"details": f"Failed to build package part {part_idx}/{len(parts)}.",
|
|
195
|
+
},
|
|
196
|
+
)
|
|
197
|
+
return
|
|
198
|
+
jsonbin_connector.update_bin(
|
|
199
|
+
bin_id,
|
|
200
|
+
{
|
|
201
|
+
"status": "UPLOADING",
|
|
202
|
+
"details": f"Uploading package part {part_idx}/{len(parts)}.",
|
|
203
|
+
},
|
|
204
|
+
)
|
|
205
|
+
if not twine_upload(path_part.parent):
|
|
206
|
+
jsonbin_connector.update_bin(
|
|
207
|
+
bin_id,
|
|
208
|
+
{
|
|
209
|
+
"status": "FAILED",
|
|
210
|
+
"details": f"Failed to upload package part {part_idx}/{len(parts)}.",
|
|
211
|
+
},
|
|
212
|
+
)
|
|
213
|
+
return
|
|
214
|
+
jsonbin_connector.update_bin(
|
|
215
|
+
bin_id,
|
|
216
|
+
{
|
|
217
|
+
"status": "UPLOADED",
|
|
218
|
+
"packages_names": [path_part.name for path_part in parts],
|
|
219
|
+
"filename": filename,
|
|
220
|
+
"file_size": total_size,
|
|
221
|
+
"total_packages": len(parts),
|
|
222
|
+
},
|
|
223
|
+
)
|
|
224
|
+
|
|
225
|
+
|
|
226
|
+
def process_bin(bin_id: str):
|
|
227
|
+
url = f"https://api.jsonbin.io/v3/b/{bin_id}"
|
|
228
|
+
bin_data = requests.get(url, headers={"X-Master-Key": JSONBIN_TOKEN}).json()
|
|
229
|
+
bin_record = bin_data["record"]
|
|
230
|
+
if bin_record["status"] == "CREATED":
|
|
231
|
+
print("Processing bin:", bin_id)
|
|
232
|
+
download_file(bin_id, bin_record["url"])
|
|
233
|
+
elif bin_record["status"] != "UPLOADED":
|
|
234
|
+
jsonbin_connector.update_bin(
|
|
235
|
+
bin_id, {"status": "FAILED", "details": "Server interruption"}
|
|
236
|
+
)
|
|
237
|
+
else:
|
|
238
|
+
print("Bin already processed:", bin_id)
|
|
239
|
+
|
|
240
|
+
|
|
241
|
+
def watch_collection(collection_id: str):
|
|
242
|
+
print("Watching collection:", collection_id)
|
|
243
|
+
while True:
|
|
244
|
+
collection_data = jsonbin_connector.get_collection_bins(
|
|
245
|
+
collection_id, LAST_BINS.get(collection_id)
|
|
246
|
+
)
|
|
247
|
+
last_bin: str | None = None
|
|
248
|
+
for bin_data in collection_data:
|
|
249
|
+
bin_id = bin_data["record"]
|
|
250
|
+
process_bin(bin_id)
|
|
251
|
+
last_bin = bin_id
|
|
252
|
+
if last_bin is not None:
|
|
253
|
+
LAST_BINS[collection_id] = last_bin
|
|
254
|
+
time.sleep(3)
|
|
255
|
+
|
|
256
|
+
|
|
257
|
+
# Statuses that indicate a bin can be cleaned up
|
|
258
|
+
CLEANUP_STATUSES = {"UPLOADED", "FAILED", "REJECTED"}
|
|
259
|
+
# Max age for bins in CREATED status (considered dead/stale) - 24 hours
|
|
260
|
+
MAX_CREATED_AGE_SECONDS = 24 * 60 * 60
|
|
261
|
+
|
|
262
|
+
|
|
263
|
+
def cleanup_collection(collection_id: str) -> tuple[int, int]:
|
|
264
|
+
"""
|
|
265
|
+
Clean up finished and dead bins from a collection.
|
|
266
|
+
Handles pagination to process ALL bins.
|
|
267
|
+
|
|
268
|
+
Returns:
|
|
269
|
+
Tuple of (deleted_count, error_count)
|
|
270
|
+
"""
|
|
271
|
+
deleted = 0
|
|
272
|
+
errors = 0
|
|
273
|
+
current_time = time.time()
|
|
274
|
+
last_bin_id: str | None = None
|
|
275
|
+
|
|
276
|
+
try:
|
|
277
|
+
while True:
|
|
278
|
+
# Get bins in the collection (paginated)
|
|
279
|
+
collection_data = jsonbin_connector.get_collection_bins(
|
|
280
|
+
collection_id, last_bin_id
|
|
281
|
+
)
|
|
282
|
+
|
|
283
|
+
if not collection_data:
|
|
284
|
+
break # No more bins
|
|
285
|
+
|
|
286
|
+
for bin_data in collection_data:
|
|
287
|
+
bin_id = bin_data["record"]
|
|
288
|
+
last_bin_id = bin_id # Track for pagination
|
|
289
|
+
|
|
290
|
+
try:
|
|
291
|
+
bin_record = jsonbin_connector.get_bin(bin_id)
|
|
292
|
+
status = bin_record.get("status", "UNKNOWN")
|
|
293
|
+
created_at = bin_record.get("created_at", 0)
|
|
294
|
+
|
|
295
|
+
should_delete = False
|
|
296
|
+
reason = ""
|
|
297
|
+
|
|
298
|
+
# Delete finished/failed bins
|
|
299
|
+
if status in CLEANUP_STATUSES:
|
|
300
|
+
should_delete = True
|
|
301
|
+
reason = f"status={status}"
|
|
302
|
+
# Delete stale CREATED bins (stuck/dead requests)
|
|
303
|
+
elif status == "CREATED" and created_at > 0:
|
|
304
|
+
age = current_time - created_at
|
|
305
|
+
if age > MAX_CREATED_AGE_SECONDS:
|
|
306
|
+
should_delete = True
|
|
307
|
+
reason = f"stale CREATED (age={age / 3600:.1f}h)"
|
|
308
|
+
|
|
309
|
+
if should_delete:
|
|
310
|
+
print(f" Deleting bin {bin_id}: {reason}")
|
|
311
|
+
jsonbin_connector.delete_bin(bin_id)
|
|
312
|
+
deleted += 1
|
|
313
|
+
|
|
314
|
+
except Exception as e:
|
|
315
|
+
print(f" Error processing bin {bin_id}: {e}")
|
|
316
|
+
errors += 1
|
|
317
|
+
|
|
318
|
+
except Exception as e:
|
|
319
|
+
print(f" Error fetching collection bins: {e}")
|
|
320
|
+
errors += 1
|
|
321
|
+
|
|
322
|
+
return deleted, errors
|
|
323
|
+
|
|
324
|
+
|
|
325
|
+
def cleanup_all_collections():
|
|
326
|
+
"""Clean up all sideload collections on startup."""
|
|
327
|
+
print("๐งน Cleaning up old bins...")
|
|
328
|
+
total_deleted = 0
|
|
329
|
+
total_errors = 0
|
|
330
|
+
|
|
331
|
+
collections = jsonbin_connector.get_collections()
|
|
332
|
+
for collection in collections:
|
|
333
|
+
if collection["collectionMeta"]["name"].startswith("sideload_"):
|
|
334
|
+
collection_id = collection["record"]
|
|
335
|
+
collection_name = collection["collectionMeta"]["name"]
|
|
336
|
+
print(f" Cleaning collection: {collection_name} ({collection_id})")
|
|
337
|
+
|
|
338
|
+
deleted, errors = cleanup_collection(collection_id)
|
|
339
|
+
total_deleted += deleted
|
|
340
|
+
total_errors += errors
|
|
341
|
+
|
|
342
|
+
print(f"โ
Cleanup complete: {total_deleted} bins deleted, {total_errors} errors")
|
|
343
|
+
return total_deleted, total_errors
|
|
344
|
+
|
|
345
|
+
|
|
346
|
+
def server_main():
|
|
347
|
+
import argparse
|
|
348
|
+
|
|
349
|
+
parser = argparse.ArgumentParser(description="Sideload server")
|
|
350
|
+
parser.add_argument(
|
|
351
|
+
"--request-id",
|
|
352
|
+
type=str,
|
|
353
|
+
default=None,
|
|
354
|
+
help="Process a single request by JSONBin ID and exit",
|
|
355
|
+
)
|
|
356
|
+
args = parser.parse_args()
|
|
357
|
+
|
|
358
|
+
if args.request_id:
|
|
359
|
+
# Single-request mode: process one request and exit
|
|
360
|
+
print(f"๐ฏ Processing single request: {args.request_id}")
|
|
361
|
+
process_bin(args.request_id)
|
|
362
|
+
print(f"โ
Finished processing request: {args.request_id}")
|
|
363
|
+
return
|
|
364
|
+
|
|
365
|
+
# Polling mode: clean up and watch all collections
|
|
366
|
+
cleanup_all_collections()
|
|
367
|
+
|
|
368
|
+
print("๐ Starting sideload server...")
|
|
369
|
+
collections = jsonbin_connector.get_collections()
|
|
370
|
+
for collection in collections:
|
|
371
|
+
if collection["collectionMeta"]["name"].startswith("sideload_"):
|
|
372
|
+
collection_id = collection["record"]
|
|
373
|
+
collection_name = collection["collectionMeta"]["name"]
|
|
374
|
+
print(f" Watching collection: {collection_name} ({collection_id})")
|
|
375
|
+
threading.Thread(target=watch_collection, args=(collection_id,)).start()
|
|
376
|
+
|
|
377
|
+
|
|
378
|
+
if __name__ == "__main__":
|
|
379
|
+
server_main()
|
|
@@ -0,0 +1,140 @@
|
|
|
1
|
+
Metadata-Version: 2.3
|
|
2
|
+
Name: sideloader
|
|
3
|
+
Version: 2.0.0
|
|
4
|
+
Summary: Download large files via PyPI packages
|
|
5
|
+
Author: Sygmei
|
|
6
|
+
Author-email: Sygmei <3835355+Sygmei@users.noreply.github.com>
|
|
7
|
+
Requires-Dist: build>=1.3.0
|
|
8
|
+
Requires-Dist: twine>=6.2.0
|
|
9
|
+
Requires-Dist: wheel>=0.45.1
|
|
10
|
+
Requires-Dist: rich>=13.0.0
|
|
11
|
+
Requires-Dist: httpx>=0.28.1
|
|
12
|
+
Requires-Dist: pip>=25.2
|
|
13
|
+
Requires-Dist: playwright>=1.55.0
|
|
14
|
+
Requires-Dist: pyotp>=2.9.0
|
|
15
|
+
Requires-Python: >=3.12
|
|
16
|
+
Description-Content-Type: text/markdown
|
|
17
|
+
|
|
18
|
+
# ๐ Sideload
|
|
19
|
+
|
|
20
|
+
Download large files via PyPI packages! Sideload automatically splits large files into PyPI-compliant chunks and allows you to download them through a beautiful CLI interface.
|
|
21
|
+
|
|
22
|
+
## Features
|
|
23
|
+
|
|
24
|
+
- โจ **Beautiful CLI** powered by Rich with progress bars and status updates
|
|
25
|
+
- ๐ฆ **Automatic file splitting** into 99MB PyPI-compliant chunks
|
|
26
|
+
- ๐ **Automatic reassembly** of downloaded parts
|
|
27
|
+
- ๐ **JSONBin integration** for request tracking
|
|
28
|
+
- ๐ก๏ธ **Error handling** with detailed progress monitoring
|
|
29
|
+
|
|
30
|
+
## Installation
|
|
31
|
+
|
|
32
|
+
```bash
|
|
33
|
+
git clone <repository-url>
|
|
34
|
+
cd Sideload
|
|
35
|
+
uv install
|
|
36
|
+
```
|
|
37
|
+
|
|
38
|
+
## Usage
|
|
39
|
+
|
|
40
|
+
### Environment Variables
|
|
41
|
+
|
|
42
|
+
Set up your credentials:
|
|
43
|
+
|
|
44
|
+
```bash
|
|
45
|
+
export JSONBIN_TOKEN="your_jsonbin_token"
|
|
46
|
+
export SIDELOAD_COLLECTION_ID="your_collection_id"
|
|
47
|
+
```
|
|
48
|
+
|
|
49
|
+
### Download a File
|
|
50
|
+
|
|
51
|
+
```bash
|
|
52
|
+
# Basic usage
|
|
53
|
+
uv run sideload download https://example.com/largefile.zip
|
|
54
|
+
|
|
55
|
+
# Specify output directory
|
|
56
|
+
uv run sideload download https://example.com/largefile.zip --output ./downloads/
|
|
57
|
+
|
|
58
|
+
# Override credentials
|
|
59
|
+
uv run sideload download https://example.com/largefile.zip --token YOUR_TOKEN --collection YOUR_COLLECTION
|
|
60
|
+
```
|
|
61
|
+
|
|
62
|
+
### How it Works
|
|
63
|
+
|
|
64
|
+
1. **Submit Request**: The CLI creates a new request in your JSONBin collection
|
|
65
|
+
2. **Monitor Progress**: Real-time progress monitoring with beautiful progress bars
|
|
66
|
+
3. **Download Packages**: Automatically downloads all PyPI packages containing file parts
|
|
67
|
+
4. **Reassemble**: Extracts and concatenates parts to rebuild the original file
|
|
68
|
+
|
|
69
|
+
### CLI Interface
|
|
70
|
+
|
|
71
|
+
The CLI provides:
|
|
72
|
+
|
|
73
|
+
- ๐ **Colorful output** with status indicators
|
|
74
|
+
- ๐ **Progress bars** for downloads and processing
|
|
75
|
+
- ๐ **Real-time monitoring** of server-side processing
|
|
76
|
+
- โ
**Success/error reporting** with detailed information
|
|
77
|
+
- ๐ **Summary tables** showing download statistics
|
|
78
|
+
|
|
79
|
+
### Example Output
|
|
80
|
+
|
|
81
|
+
```
|
|
82
|
+
๐ SIDELOAD
|
|
83
|
+
Download large files via PyPI packages
|
|
84
|
+
|
|
85
|
+
๐ Requesting download for: https://example.com/largefile.zip
|
|
86
|
+
โ
Created sideload request: abc123def456
|
|
87
|
+
|
|
88
|
+
๐ก Monitoring Progress
|
|
89
|
+
๐ฅ Downloading... (45%) โโโโโโโโโโโโโโโโโ
|
|
90
|
+
๐จ Building packages...
|
|
91
|
+
๐ค Uploading part 1/3...
|
|
92
|
+
|
|
93
|
+
๐ Download Summary
|
|
94
|
+
โโโโโโโโโโโโโโโโโโโโโณโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
|
|
95
|
+
โ Property โ Value โ
|
|
96
|
+
โกโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโฉ
|
|
97
|
+
โ Original Filename โ largefile.zip โ
|
|
98
|
+
โ File Size โ 250,123,456 bytes โ
|
|
99
|
+
โ Total Packages โ 3 โ
|
|
100
|
+
โ Status โ โ
UPLOADED โ
|
|
101
|
+
โโโโโโโโโโโโโโโโโโโโโดโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
|
|
102
|
+
|
|
103
|
+
๐ฆ Downloading Packages
|
|
104
|
+
๐ฆ Downloading package 1/3... โโโโโโโโโโโโโโโโโโโโโโโโโโโโ
|
|
105
|
+
|
|
106
|
+
๐ง Reassembling File
|
|
107
|
+
๐ Assembling part 1/3... โโโโโโโโโโโโโโโโโโโโโโโโโโโโ
|
|
108
|
+
|
|
109
|
+
โจ Complete
|
|
110
|
+
๐ File successfully downloaded to: largefile.zip
|
|
111
|
+
๐ File size: 250,123,456 bytes
|
|
112
|
+
```
|
|
113
|
+
|
|
114
|
+
## Development
|
|
115
|
+
|
|
116
|
+
### Server Setup
|
|
117
|
+
|
|
118
|
+
The server component handles file processing and PyPI uploads:
|
|
119
|
+
|
|
120
|
+
```bash
|
|
121
|
+
# Set environment variables
|
|
122
|
+
export JSONBIN_TOKEN="your_token"
|
|
123
|
+
export PYPI_TOKEN="your_pypi_token"
|
|
124
|
+
|
|
125
|
+
# Run the server
|
|
126
|
+
uv run python src/sideload/main.py
|
|
127
|
+
```
|
|
128
|
+
|
|
129
|
+
### Project Structure
|
|
130
|
+
|
|
131
|
+
```
|
|
132
|
+
src/sideload/
|
|
133
|
+
โโโ __init__.py # Package initialization
|
|
134
|
+
โโโ main.py # Server component
|
|
135
|
+
โโโ cli.py # CLI client
|
|
136
|
+
```
|
|
137
|
+
|
|
138
|
+
## License
|
|
139
|
+
|
|
140
|
+
[Your License Here]
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
sideloader/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
2
|
+
sideloader/cli.py,sha256=2rLvz2N1BsEZOtCoXopIUAl5mIeFgerOujuu48uETPA,37994
|
|
3
|
+
sideloader/jsonbin_connector.py,sha256=rgaynojjVT3zK-me_IGssWMDt43w98IOnH8yLBX_HOk,10014
|
|
4
|
+
sideloader/scripts/cleanup_pypi.py,sha256=YPzYVsutFVsRoCi4qG6vZo8x5YiLzpAfJ7xKxZiZWgI,15264
|
|
5
|
+
sideloader/server.py,sha256=qUh4YUSKyGCzlzx_I5e2T8P-PmFFpr93sx8qx6yUfNU,12753
|
|
6
|
+
sideloader-2.0.0.dist-info/WHEEL,sha256=Jb20R3Ili4n9P1fcwuLup21eQ5r9WXhs4_qy7VTrgPI,79
|
|
7
|
+
sideloader-2.0.0.dist-info/entry_points.txt,sha256=XIfbsCo-UMQvQwAuocZo7d7o9YMJ6yNy1P2sK5OeGA0,106
|
|
8
|
+
sideloader-2.0.0.dist-info/METADATA,sha256=o8S-kWlUipN5n6iKw_Vj62gJagMQFeMyeytiDVB584c,4311
|
|
9
|
+
sideloader-2.0.0.dist-info/RECORD,,
|