sideloader 2.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- sideloader/__init__.py +0 -0
- sideloader/cli.py +956 -0
- sideloader/jsonbin_connector.py +345 -0
- sideloader/scripts/cleanup_pypi.py +352 -0
- sideloader/server.py +379 -0
- sideloader-2.0.0.dist-info/METADATA +140 -0
- sideloader-2.0.0.dist-info/RECORD +9 -0
- sideloader-2.0.0.dist-info/WHEEL +4 -0
- sideloader-2.0.0.dist-info/entry_points.txt +4 -0
|
@@ -0,0 +1,345 @@
|
|
|
1
|
+
"""
|
|
2
|
+
JSONBin API Connector
|
|
3
|
+
A reusable connector for interacting with JSONBin.io API
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
import os
|
|
7
|
+
import time
|
|
8
|
+
import httpx
|
|
9
|
+
from typing import Dict, List, Optional, Any
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def _get_env_token() -> str:
|
|
13
|
+
"""Get JSONBin token from environment variable"""
|
|
14
|
+
token = os.environ.get("JSONBIN_TOKEN")
|
|
15
|
+
if not token:
|
|
16
|
+
raise ValueError("JSONBIN_TOKEN environment variable is not set")
|
|
17
|
+
return token
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def _get_env_verify_ssl() -> bool:
|
|
21
|
+
"""Get SSL verification setting from environment variable"""
|
|
22
|
+
return os.environ.get("JSONBIN_VERIFY_SSL", "true").lower() in ("true", "1", "yes")
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def _get_env_key_type() -> str:
|
|
26
|
+
"""Get key type from environment variable"""
|
|
27
|
+
return os.environ.get("JSONBIN_KEY_TYPE", "master").lower()
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
class JSONBinConnector:
|
|
31
|
+
"""A connector for JSONBin.io API with httpx"""
|
|
32
|
+
|
|
33
|
+
def __init__(
|
|
34
|
+
self,
|
|
35
|
+
api_token: Optional[str] = None,
|
|
36
|
+
base_url: str = "https://api.jsonbin.io/v3",
|
|
37
|
+
verify_ssl: Optional[bool] = None,
|
|
38
|
+
key_type: Optional[str] = None,
|
|
39
|
+
):
|
|
40
|
+
"""
|
|
41
|
+
Initialize the JSONBin connector
|
|
42
|
+
|
|
43
|
+
Args:
|
|
44
|
+
api_token: JSONBin API token (defaults to JSONBIN_TOKEN env var)
|
|
45
|
+
base_url: JSONBin API base URL
|
|
46
|
+
verify_ssl: Whether to verify SSL certificates (defaults to JSONBIN_VERIFY_SSL env var or True)
|
|
47
|
+
key_type: Type of key to use - "master" or "access" (defaults to JSONBIN_KEY_TYPE env var or "master")
|
|
48
|
+
"""
|
|
49
|
+
# Use provided values or fall back to environment variables
|
|
50
|
+
self.api_token = api_token or _get_env_token()
|
|
51
|
+
self.base_url = base_url
|
|
52
|
+
verify = _get_env_verify_ssl() if verify_ssl is None else verify_ssl
|
|
53
|
+
key_type = key_type or _get_env_key_type()
|
|
54
|
+
|
|
55
|
+
# Determine the key header to use
|
|
56
|
+
if key_type == "access":
|
|
57
|
+
key_header = "X-Access-Key"
|
|
58
|
+
else:
|
|
59
|
+
key_header = "X-Master-Key"
|
|
60
|
+
|
|
61
|
+
self.client = httpx.Client(
|
|
62
|
+
base_url=base_url,
|
|
63
|
+
headers={key_header: self.api_token, "Content-Type": "application/json"},
|
|
64
|
+
verify=verify,
|
|
65
|
+
)
|
|
66
|
+
|
|
67
|
+
def __enter__(self):
|
|
68
|
+
return self
|
|
69
|
+
|
|
70
|
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
71
|
+
self.close()
|
|
72
|
+
|
|
73
|
+
def close(self):
|
|
74
|
+
"""Close the HTTP client"""
|
|
75
|
+
self.client.close()
|
|
76
|
+
|
|
77
|
+
def create_bin(
|
|
78
|
+
self, data: Dict[str, Any], collection_id: Optional[str] = None
|
|
79
|
+
) -> str:
|
|
80
|
+
"""
|
|
81
|
+
Create a new bin
|
|
82
|
+
|
|
83
|
+
Args:
|
|
84
|
+
data: The data to store in the bin
|
|
85
|
+
collection_id: Optional collection ID to add the bin to
|
|
86
|
+
|
|
87
|
+
Returns:
|
|
88
|
+
The created bin ID
|
|
89
|
+
"""
|
|
90
|
+
headers = {}
|
|
91
|
+
if collection_id:
|
|
92
|
+
headers["X-Collection-Id"] = collection_id
|
|
93
|
+
|
|
94
|
+
response = self.client.post("/b", json=data, headers=headers)
|
|
95
|
+
response.raise_for_status()
|
|
96
|
+
return response.json()["metadata"]["id"]
|
|
97
|
+
|
|
98
|
+
def get_bin(self, bin_id: str) -> Dict[str, Any]:
|
|
99
|
+
"""
|
|
100
|
+
Get bin data by ID
|
|
101
|
+
|
|
102
|
+
Args:
|
|
103
|
+
bin_id: The bin ID to retrieve
|
|
104
|
+
|
|
105
|
+
Returns:
|
|
106
|
+
The bin data
|
|
107
|
+
"""
|
|
108
|
+
response = self.client.get(f"/b/{bin_id}", headers={"X-Bin-Name": bin_id})
|
|
109
|
+
response.raise_for_status()
|
|
110
|
+
return response.json()["record"]
|
|
111
|
+
|
|
112
|
+
def update_bin(
|
|
113
|
+
self, bin_id: str, data: Optional[Dict[str, Any]] = None, **kwargs: Any
|
|
114
|
+
) -> None:
|
|
115
|
+
"""
|
|
116
|
+
Update bin data
|
|
117
|
+
|
|
118
|
+
Args:
|
|
119
|
+
bin_id: The bin ID to update
|
|
120
|
+
data: Dictionary of key-value pairs to update
|
|
121
|
+
**kwargs: Additional key-value pairs to update in the bin
|
|
122
|
+
"""
|
|
123
|
+
# Get existing data first
|
|
124
|
+
existing_data = self.get_bin(bin_id)
|
|
125
|
+
|
|
126
|
+
# Merge with new data (support both dict and kwargs)
|
|
127
|
+
updates = data or {}
|
|
128
|
+
updates.update(kwargs)
|
|
129
|
+
updated_data = {**existing_data, **updates}
|
|
130
|
+
|
|
131
|
+
# Update the bin
|
|
132
|
+
response = self.client.put(
|
|
133
|
+
f"/b/{bin_id}", json=updated_data, headers={"X-Bin-Name": bin_id}
|
|
134
|
+
)
|
|
135
|
+
response.raise_for_status()
|
|
136
|
+
|
|
137
|
+
def delete_bin(self, bin_id: str) -> None:
|
|
138
|
+
"""
|
|
139
|
+
Delete a bin
|
|
140
|
+
|
|
141
|
+
Args:
|
|
142
|
+
bin_id: The bin ID to delete
|
|
143
|
+
"""
|
|
144
|
+
response = self.client.delete(f"/b/{bin_id}", headers={"X-Bin-Name": bin_id})
|
|
145
|
+
response.raise_for_status()
|
|
146
|
+
|
|
147
|
+
def get_collections(self) -> List[Dict[str, Any]]:
|
|
148
|
+
"""
|
|
149
|
+
Get all collections
|
|
150
|
+
|
|
151
|
+
Returns:
|
|
152
|
+
List of collections
|
|
153
|
+
"""
|
|
154
|
+
response = self.client.get("/c")
|
|
155
|
+
response.raise_for_status()
|
|
156
|
+
return response.json()
|
|
157
|
+
|
|
158
|
+
def create_collection(self, name: str) -> str:
|
|
159
|
+
"""
|
|
160
|
+
Create a new collection
|
|
161
|
+
|
|
162
|
+
Args:
|
|
163
|
+
name: The collection name
|
|
164
|
+
|
|
165
|
+
Returns:
|
|
166
|
+
The created collection ID
|
|
167
|
+
"""
|
|
168
|
+
response = self.client.post("/c", json={"name": name})
|
|
169
|
+
response.raise_for_status()
|
|
170
|
+
return response.json()["metadata"]["id"]
|
|
171
|
+
|
|
172
|
+
def get_collection_bins(
|
|
173
|
+
self, collection_id: str, after_bin_id: Optional[str] = None
|
|
174
|
+
) -> List[Dict[str, Any]]:
|
|
175
|
+
"""
|
|
176
|
+
Get bins from a collection
|
|
177
|
+
|
|
178
|
+
Args:
|
|
179
|
+
collection_id: The collection ID
|
|
180
|
+
after_bin_id: Optional bin ID to get bins after (for pagination)
|
|
181
|
+
|
|
182
|
+
Returns:
|
|
183
|
+
List of bins in the collection
|
|
184
|
+
"""
|
|
185
|
+
endpoint = f"/c/{collection_id}/bins"
|
|
186
|
+
if after_bin_id:
|
|
187
|
+
endpoint += f"/{after_bin_id}"
|
|
188
|
+
|
|
189
|
+
response = self.client.get(
|
|
190
|
+
endpoint,
|
|
191
|
+
headers={"X-Collection-Id": collection_id, "X-Sort-Order": "ascending"},
|
|
192
|
+
)
|
|
193
|
+
response.raise_for_status()
|
|
194
|
+
return response.json()
|
|
195
|
+
|
|
196
|
+
|
|
197
|
+
class SideloadBinManager:
|
|
198
|
+
"""High-level manager for Sideload-specific JSONBin operations"""
|
|
199
|
+
|
|
200
|
+
def __init__(self, connector: JSONBinConnector):
|
|
201
|
+
"""
|
|
202
|
+
Initialize the sideload bin manager
|
|
203
|
+
|
|
204
|
+
Args:
|
|
205
|
+
connector: JSONBin connector instance
|
|
206
|
+
"""
|
|
207
|
+
self.connector = connector
|
|
208
|
+
|
|
209
|
+
def create_sideload_request(
|
|
210
|
+
self, url: str, collection_id: Optional[str] = None
|
|
211
|
+
) -> str:
|
|
212
|
+
"""
|
|
213
|
+
Create a new sideload request
|
|
214
|
+
|
|
215
|
+
Args:
|
|
216
|
+
url: The URL to sideload
|
|
217
|
+
collection_id: Optional collection ID
|
|
218
|
+
|
|
219
|
+
Returns:
|
|
220
|
+
The created bin ID
|
|
221
|
+
"""
|
|
222
|
+
data = {"url": url, "status": "CREATED", "created_at": time.time()}
|
|
223
|
+
return self.connector.create_bin(data, collection_id)
|
|
224
|
+
|
|
225
|
+
def update_sideload_status(
|
|
226
|
+
self, bin_id: str, status: str, **additional_data: Any
|
|
227
|
+
) -> None:
|
|
228
|
+
"""
|
|
229
|
+
Update sideload request status
|
|
230
|
+
|
|
231
|
+
Args:
|
|
232
|
+
bin_id: The bin ID to update
|
|
233
|
+
status: The new status
|
|
234
|
+
**additional_data: Additional data to update
|
|
235
|
+
"""
|
|
236
|
+
self.connector.update_bin(bin_id, status=status, **additional_data)
|
|
237
|
+
|
|
238
|
+
def update_progress(self, bin_id: str, progress: int) -> None:
|
|
239
|
+
"""
|
|
240
|
+
Update download progress
|
|
241
|
+
|
|
242
|
+
Args:
|
|
243
|
+
bin_id: The bin ID to update
|
|
244
|
+
progress: Progress percentage (0-100)
|
|
245
|
+
"""
|
|
246
|
+
self.connector.update_bin(bin_id, progress=progress)
|
|
247
|
+
|
|
248
|
+
def mark_completed(
|
|
249
|
+
self,
|
|
250
|
+
bin_id: str,
|
|
251
|
+
package_names: List[str],
|
|
252
|
+
original_filename: str,
|
|
253
|
+
file_size: int,
|
|
254
|
+
) -> None:
|
|
255
|
+
"""
|
|
256
|
+
Mark sideload request as completed
|
|
257
|
+
|
|
258
|
+
Args:
|
|
259
|
+
bin_id: The bin ID to update
|
|
260
|
+
package_names: List of created package names
|
|
261
|
+
original_filename: Original filename
|
|
262
|
+
file_size: Original file size
|
|
263
|
+
"""
|
|
264
|
+
self.connector.update_bin(
|
|
265
|
+
bin_id,
|
|
266
|
+
status="UPLOADED",
|
|
267
|
+
package_names=package_names,
|
|
268
|
+
total_packages=len(package_names),
|
|
269
|
+
original_filename=original_filename,
|
|
270
|
+
file_size=file_size,
|
|
271
|
+
)
|
|
272
|
+
|
|
273
|
+
def mark_failed(self, bin_id: str, reason: str) -> None:
|
|
274
|
+
"""
|
|
275
|
+
Mark sideload request as failed
|
|
276
|
+
|
|
277
|
+
Args:
|
|
278
|
+
bin_id: The bin ID to update
|
|
279
|
+
reason: Failure reason
|
|
280
|
+
"""
|
|
281
|
+
self.connector.update_bin(bin_id, status="FAILED", reason=reason)
|
|
282
|
+
|
|
283
|
+
def mark_rejected(self, bin_id: str, reason: str) -> None:
|
|
284
|
+
"""
|
|
285
|
+
Mark sideload request as rejected
|
|
286
|
+
|
|
287
|
+
Args:
|
|
288
|
+
bin_id: The bin ID to update
|
|
289
|
+
reason: Rejection reason
|
|
290
|
+
"""
|
|
291
|
+
self.connector.update_bin(bin_id, status="REJECTED", reason=reason)
|
|
292
|
+
|
|
293
|
+
def get_sideload_data(self, bin_id: str) -> Dict[str, Any]:
|
|
294
|
+
"""
|
|
295
|
+
Get sideload request data
|
|
296
|
+
|
|
297
|
+
Args:
|
|
298
|
+
bin_id: The bin ID to retrieve
|
|
299
|
+
|
|
300
|
+
Returns:
|
|
301
|
+
The sideload data
|
|
302
|
+
"""
|
|
303
|
+
return self.connector.get_bin(bin_id)
|
|
304
|
+
|
|
305
|
+
def find_sideload_collections(self) -> List[Dict[str, Any]]:
|
|
306
|
+
"""
|
|
307
|
+
Find collections that start with 'sideload_'
|
|
308
|
+
|
|
309
|
+
Returns:
|
|
310
|
+
List of sideload collections
|
|
311
|
+
"""
|
|
312
|
+
collections = self.connector.get_collections()
|
|
313
|
+
return [
|
|
314
|
+
collection
|
|
315
|
+
for collection in collections
|
|
316
|
+
if collection["collectionMeta"]["name"].startswith("sideload_")
|
|
317
|
+
]
|
|
318
|
+
|
|
319
|
+
def get_pending_requests(
|
|
320
|
+
self, collection_id: str, after_bin_id: Optional[str] = None
|
|
321
|
+
) -> List[str]:
|
|
322
|
+
"""
|
|
323
|
+
Get pending sideload requests from a collection
|
|
324
|
+
|
|
325
|
+
Args:
|
|
326
|
+
collection_id: The collection ID to check
|
|
327
|
+
after_bin_id: Optional bin ID for pagination
|
|
328
|
+
|
|
329
|
+
Returns:
|
|
330
|
+
List of bin IDs that need processing
|
|
331
|
+
"""
|
|
332
|
+
bins = self.connector.get_collection_bins(collection_id, after_bin_id)
|
|
333
|
+
pending_bins = []
|
|
334
|
+
|
|
335
|
+
for bin_data in bins:
|
|
336
|
+
bin_id = bin_data["record"]
|
|
337
|
+
try:
|
|
338
|
+
data = self.get_sideload_data(bin_id)
|
|
339
|
+
if data.get("status") == "CREATED":
|
|
340
|
+
pending_bins.append(bin_id)
|
|
341
|
+
except Exception:
|
|
342
|
+
# Skip bins that can't be read
|
|
343
|
+
continue
|
|
344
|
+
|
|
345
|
+
return pending_bins
|
|
@@ -0,0 +1,352 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
Admin script to delete all sideload-* packages from PyPI using browser automation
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
import os
|
|
7
|
+
import sys
|
|
8
|
+
import asyncio
|
|
9
|
+
import random
|
|
10
|
+
from playwright.async_api import async_playwright
|
|
11
|
+
import pyotp
|
|
12
|
+
|
|
13
|
+
PYPI_USER = os.environ.get("PYPI_USER")
|
|
14
|
+
PYPI_PASSWORD = os.environ.get("PYPI_PASSWORD")
|
|
15
|
+
PYPI_TOTP = os.environ.get("PYPI_TOTP") # Optional: TOTP secret key
|
|
16
|
+
|
|
17
|
+
if not PYPI_USER or not PYPI_PASSWORD:
|
|
18
|
+
print("ā PYPI_USER and PYPI_PASSWORD environment variables must be set")
|
|
19
|
+
sys.exit(1)
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
async def human_like_mouse_movement(page):
|
|
23
|
+
"""Simulate human-like mouse movements across the page"""
|
|
24
|
+
viewport_size = page.viewport_size
|
|
25
|
+
width = viewport_size.get('width', 1280) if viewport_size else 1280
|
|
26
|
+
height = viewport_size.get('height', 720) if viewport_size else 720
|
|
27
|
+
|
|
28
|
+
for _ in range(random.randint(3, 6)):
|
|
29
|
+
x = random.randint(50, width - 50)
|
|
30
|
+
y = random.randint(50, height - 50)
|
|
31
|
+
await page.mouse.move(x, y)
|
|
32
|
+
await asyncio.sleep(random.uniform(0.15, 0.4))
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
async def delete_project(page, package_name: str) -> bool:
|
|
36
|
+
"""Delete a project from PyPI using browser automation"""
|
|
37
|
+
try:
|
|
38
|
+
print(f"šļø Deleting {package_name}...")
|
|
39
|
+
|
|
40
|
+
# Go to project settings
|
|
41
|
+
settings_url = f'https://pypi.org/manage/project/{package_name}/settings/'
|
|
42
|
+
await page.goto(settings_url)
|
|
43
|
+
|
|
44
|
+
# Check if project exists (if we get 404, project doesn't exist)
|
|
45
|
+
title = await page.title()
|
|
46
|
+
if "404" in title or "Not Found" in title:
|
|
47
|
+
print(f" ā ļø Project {package_name} not found")
|
|
48
|
+
return False
|
|
49
|
+
|
|
50
|
+
# Scroll to the bottom of the page to find delete section
|
|
51
|
+
await page.evaluate('window.scrollTo(0, document.body.scrollHeight)')
|
|
52
|
+
await asyncio.sleep(0.5)
|
|
53
|
+
|
|
54
|
+
# Extract the exact project name from the page to ensure correct case
|
|
55
|
+
# Look for the project name in the delete section
|
|
56
|
+
exact_project_name = await page.evaluate('''() => {
|
|
57
|
+
// Find the label that mentions the project name
|
|
58
|
+
const labels = Array.from(document.querySelectorAll('label'));
|
|
59
|
+
for (const label of labels) {
|
|
60
|
+
const text = label.textContent;
|
|
61
|
+
if (text && text.includes('confirm by typing the project name')) {
|
|
62
|
+
// Extract the project name from something like "confirm by typing the project name (project-name) below"
|
|
63
|
+
const match = text.match(/\\(([^)]+)\\)/);
|
|
64
|
+
if (match) return match[1];
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
// Fallback: get from URL
|
|
68
|
+
const path = window.location.pathname;
|
|
69
|
+
const urlMatch = path.match(/\\/manage\\/project\\/([^\\/]+)/);
|
|
70
|
+
return urlMatch ? decodeURIComponent(urlMatch[1]) : null;
|
|
71
|
+
}''')
|
|
72
|
+
|
|
73
|
+
if not exact_project_name:
|
|
74
|
+
print(f" ā ļø Could not extract exact project name from page, using: {package_name}")
|
|
75
|
+
exact_project_name = package_name
|
|
76
|
+
else:
|
|
77
|
+
print(f" š Extracted exact project name: {exact_project_name}")
|
|
78
|
+
|
|
79
|
+
# Find and check all "I understand..." checkboxes
|
|
80
|
+
checkboxes = await page.query_selector_all('input[type="checkbox"]')
|
|
81
|
+
print(f" ā Found {len(checkboxes)} checkboxes")
|
|
82
|
+
for checkbox in checkboxes:
|
|
83
|
+
await checkbox.check()
|
|
84
|
+
|
|
85
|
+
# Find and type project name in confirmation field
|
|
86
|
+
confirm_input = await page.query_selector('input[name="confirm_project_name"]')
|
|
87
|
+
if not confirm_input:
|
|
88
|
+
print(f" ā ļø Could not find confirmation input for {package_name}")
|
|
89
|
+
return False
|
|
90
|
+
|
|
91
|
+
# Clear and type the exact project name with human-like behavior
|
|
92
|
+
await confirm_input.clear()
|
|
93
|
+
await asyncio.sleep(random.uniform(0.2, 0.5))
|
|
94
|
+
|
|
95
|
+
# Move mouse before typing
|
|
96
|
+
await human_like_mouse_movement(page)
|
|
97
|
+
|
|
98
|
+
# Type each character with varying delays
|
|
99
|
+
for char in exact_project_name:
|
|
100
|
+
await confirm_input.type(char, delay=random.randint(80, 200))
|
|
101
|
+
await asyncio.sleep(random.uniform(0.05, 0.15))
|
|
102
|
+
|
|
103
|
+
print(f" ā Typed project name: {exact_project_name}")
|
|
104
|
+
|
|
105
|
+
# Wait a moment for the button to become enabled (human-like pause)
|
|
106
|
+
await asyncio.sleep(random.uniform(1.0, 2.0))
|
|
107
|
+
|
|
108
|
+
# Move mouse naturally
|
|
109
|
+
await human_like_mouse_movement(page)
|
|
110
|
+
|
|
111
|
+
# Find and click the delete link
|
|
112
|
+
delete_link = await page.query_selector('[data-delete-confirm-target="button"]')
|
|
113
|
+
if not delete_link:
|
|
114
|
+
print(f" ā ļø Could not find delete button")
|
|
115
|
+
return False
|
|
116
|
+
|
|
117
|
+
# Check if the button is enabled
|
|
118
|
+
is_disabled = await delete_link.evaluate('(el) => el.classList.contains("button--disabled") || el.hasAttribute("disabled")')
|
|
119
|
+
if is_disabled:
|
|
120
|
+
print(f" ā ļø Delete button is still disabled")
|
|
121
|
+
# Take a screenshot for debugging
|
|
122
|
+
await page.screenshot(path=f"debug_{package_name}.png")
|
|
123
|
+
print(f" šø Screenshot saved as debug_{package_name}.png")
|
|
124
|
+
return False
|
|
125
|
+
|
|
126
|
+
print(f" š±ļø Clicking delete button...")
|
|
127
|
+
|
|
128
|
+
# Move mouse to the button area naturally
|
|
129
|
+
box = await delete_link.bounding_box()
|
|
130
|
+
if box:
|
|
131
|
+
# Move to a random point within the button
|
|
132
|
+
target_x = box['x'] + random.uniform(10, box['width'] - 10)
|
|
133
|
+
target_y = box['y'] + random.uniform(10, box['height'] - 10)
|
|
134
|
+
await page.mouse.move(target_x, target_y)
|
|
135
|
+
await asyncio.sleep(random.uniform(0.2, 0.5))
|
|
136
|
+
|
|
137
|
+
# Click and wait for navigation
|
|
138
|
+
await delete_link.click()
|
|
139
|
+
await asyncio.sleep(random.uniform(0.5, 1.0))
|
|
140
|
+
await page.wait_for_load_state("networkidle")
|
|
141
|
+
|
|
142
|
+
print(f" ā
Deleted {package_name}")
|
|
143
|
+
return True
|
|
144
|
+
|
|
145
|
+
except Exception as e:
|
|
146
|
+
print(f" ā Error deleting {package_name}: {e}")
|
|
147
|
+
import traceback
|
|
148
|
+
traceback.print_exc()
|
|
149
|
+
# Take screenshot on error
|
|
150
|
+
try:
|
|
151
|
+
await page.screenshot(path=f"error_{package_name}.png")
|
|
152
|
+
print(f" šø Error screenshot saved as error_{package_name}.png")
|
|
153
|
+
except:
|
|
154
|
+
pass
|
|
155
|
+
return False
|
|
156
|
+
|
|
157
|
+
|
|
158
|
+
async def get_user_projects(page) -> list[str]:
|
|
159
|
+
"""Get all projects owned by the user that start with sideload-"""
|
|
160
|
+
try:
|
|
161
|
+
print("š Fetching your PyPI projects...")
|
|
162
|
+
|
|
163
|
+
# Go to projects page
|
|
164
|
+
await page.goto('https://pypi.org/manage/projects/')
|
|
165
|
+
await page.wait_for_load_state("networkidle")
|
|
166
|
+
|
|
167
|
+
# Extract project names
|
|
168
|
+
projects = await page.evaluate('''() => {
|
|
169
|
+
const links = Array.from(document.querySelectorAll('a[href*="/manage/project/"]'));
|
|
170
|
+
return links.map(link => {
|
|
171
|
+
const match = link.href.match(/\\/manage\\/project\\/([^\\/]+)\\//);
|
|
172
|
+
return match ? match[1] : null;
|
|
173
|
+
}).filter(name => name && name.startsWith('sideload-'));
|
|
174
|
+
}''')
|
|
175
|
+
|
|
176
|
+
return projects
|
|
177
|
+
|
|
178
|
+
except Exception as e:
|
|
179
|
+
print(f"ā Error fetching projects: {e}")
|
|
180
|
+
return []
|
|
181
|
+
|
|
182
|
+
|
|
183
|
+
async def main():
|
|
184
|
+
print("š§¹ PyPI Sideload Package Cleanup Tool")
|
|
185
|
+
print("=" * 50)
|
|
186
|
+
if PYPI_TOTP:
|
|
187
|
+
print("š TOTP auto-generation enabled")
|
|
188
|
+
else:
|
|
189
|
+
print("ā ļø TOTP auto-generation disabled (set PYPI_TOTP to enable)")
|
|
190
|
+
print()
|
|
191
|
+
|
|
192
|
+
async with async_playwright() as p:
|
|
193
|
+
# Launch browser
|
|
194
|
+
browser = await p.chromium.launch(headless=False)
|
|
195
|
+
context = await browser.new_context()
|
|
196
|
+
page = await context.new_page()
|
|
197
|
+
|
|
198
|
+
try:
|
|
199
|
+
# Navigate to PyPI login page
|
|
200
|
+
print("š Logging in to PyPI...")
|
|
201
|
+
print(" Opening login page...")
|
|
202
|
+
await page.goto('https://pypi.org/account/login/')
|
|
203
|
+
|
|
204
|
+
# Fill in login credentials (fast, no need to be slow here)
|
|
205
|
+
print(" Filling credentials...")
|
|
206
|
+
await page.fill('#username', PYPI_USER)
|
|
207
|
+
await asyncio.sleep(random.uniform(0.2, 0.4))
|
|
208
|
+
await page.fill('#password', PYPI_PASSWORD)
|
|
209
|
+
await asyncio.sleep(random.uniform(0.3, 0.6))
|
|
210
|
+
|
|
211
|
+
# Submit login form
|
|
212
|
+
await page.click('input[type="submit"]')
|
|
213
|
+
await asyncio.sleep(2)
|
|
214
|
+
|
|
215
|
+
# Wait for user to complete TOTP if required
|
|
216
|
+
current_url = page.url
|
|
217
|
+
if '/account/two-factor/' in current_url:
|
|
218
|
+
if PYPI_TOTP:
|
|
219
|
+
print(" š Generating TOTP code...")
|
|
220
|
+
|
|
221
|
+
# Simulate human-like delay before interacting
|
|
222
|
+
await asyncio.sleep(random.uniform(1.5, 2.5))
|
|
223
|
+
|
|
224
|
+
# Move mouse around naturally across the page
|
|
225
|
+
print(" š±ļø Moving mouse naturally...")
|
|
226
|
+
await human_like_mouse_movement(page)
|
|
227
|
+
|
|
228
|
+
totp = pyotp.TOTP(PYPI_TOTP)
|
|
229
|
+
code = totp.now()
|
|
230
|
+
print(f" ā Generated TOTP code: {code}")
|
|
231
|
+
|
|
232
|
+
# Find TOTP input field and enter the code
|
|
233
|
+
totp_input = await page.query_selector('input[name="totp_value"]')
|
|
234
|
+
if not totp_input:
|
|
235
|
+
totp_input = await page.query_selector('input[type="text"]')
|
|
236
|
+
|
|
237
|
+
if totp_input:
|
|
238
|
+
# Click on the input field naturally
|
|
239
|
+
await totp_input.click()
|
|
240
|
+
await asyncio.sleep(random.uniform(0.4, 0.8))
|
|
241
|
+
|
|
242
|
+
# Type each character slowly with human-like delays
|
|
243
|
+
print(" āØļø Typing TOTP code slowly...")
|
|
244
|
+
for i, char in enumerate(code):
|
|
245
|
+
await totp_input.type(char, delay=random.randint(100, 250))
|
|
246
|
+
await asyncio.sleep(random.uniform(0.1, 0.25))
|
|
247
|
+
# Occasionally move mouse during typing
|
|
248
|
+
if i % 2 == 0:
|
|
249
|
+
x = random.randint(200, 600)
|
|
250
|
+
y = random.randint(200, 500)
|
|
251
|
+
await page.mouse.move(x, y)
|
|
252
|
+
|
|
253
|
+
print(" ā Entered TOTP code")
|
|
254
|
+
|
|
255
|
+
# Wait a bit before submitting (like a human would)
|
|
256
|
+
await asyncio.sleep(random.uniform(0.8, 1.5))
|
|
257
|
+
|
|
258
|
+
# More mouse movements
|
|
259
|
+
print(" š±ļø Moving mouse before submit...")
|
|
260
|
+
await human_like_mouse_movement(page)
|
|
261
|
+
|
|
262
|
+
# Submit the form
|
|
263
|
+
submit_button = await page.query_selector('button[type="submit"]')
|
|
264
|
+
if not submit_button:
|
|
265
|
+
submit_button = await page.query_selector('input[type="submit"]')
|
|
266
|
+
|
|
267
|
+
if submit_button:
|
|
268
|
+
await submit_button.click()
|
|
269
|
+
print(" ā Submitted TOTP, waiting for response...")
|
|
270
|
+
|
|
271
|
+
# Wait for navigation
|
|
272
|
+
await asyncio.sleep(4)
|
|
273
|
+
|
|
274
|
+
# Check if we've successfully logged in
|
|
275
|
+
current_url = page.url
|
|
276
|
+
if '/account/' in current_url or '/manage/' in current_url:
|
|
277
|
+
print(" ā Successfully logged in!")
|
|
278
|
+
elif '/account/two-factor/' in current_url:
|
|
279
|
+
# Still on 2FA page - likely a captcha
|
|
280
|
+
print("\n" + "=" * 60)
|
|
281
|
+
print(" š¤ CAPTCHA DETECTED!")
|
|
282
|
+
print(" š¤ Please solve the captcha in the browser window")
|
|
283
|
+
print(" ā³ Waiting for you to complete it...")
|
|
284
|
+
print("=" * 60 + "\n")
|
|
285
|
+
|
|
286
|
+
# Wait for user to solve captcha - keep checking
|
|
287
|
+
# We need to wait until we're actually logged in (on account page or similar)
|
|
288
|
+
while True:
|
|
289
|
+
await asyncio.sleep(2)
|
|
290
|
+
current_url = page.url
|
|
291
|
+
# Check if we've successfully logged in (not just left the 2FA page)
|
|
292
|
+
if ('/account/' in current_url or '/manage/' in current_url) and '/account/two-factor/' not in current_url:
|
|
293
|
+
break
|
|
294
|
+
# Print status every 2 seconds to show we're still waiting
|
|
295
|
+
print(" ā³ Still waiting for captcha completion...", end='\r')
|
|
296
|
+
|
|
297
|
+
print("\n ā Captcha solved and logged in! Continuing...")
|
|
298
|
+
else:
|
|
299
|
+
print(f" ā ļø Unexpected page: {current_url}")
|
|
300
|
+
print(" ā³ Waiting for login to complete...")
|
|
301
|
+
# Wait until we're on a known good page
|
|
302
|
+
while True:
|
|
303
|
+
await asyncio.sleep(2)
|
|
304
|
+
current_url = page.url
|
|
305
|
+
if '/account/' in current_url or '/manage/' in current_url:
|
|
306
|
+
break
|
|
307
|
+
print(" ā³ Still waiting...", end='\r')
|
|
308
|
+
print("\n ā Login completed!")
|
|
309
|
+
else:
|
|
310
|
+
print(" ā ļø Could not find submit button")
|
|
311
|
+
else:
|
|
312
|
+
print(" ā ļø Could not find TOTP input field")
|
|
313
|
+
else:
|
|
314
|
+
print(" ā ļø TOTP required - please enter your 2FA code in the browser...")
|
|
315
|
+
print(" š¤ Waiting for you to complete 2FA (and captcha if present)...")
|
|
316
|
+
while True:
|
|
317
|
+
await asyncio.sleep(2)
|
|
318
|
+
current_url = page.url
|
|
319
|
+
# Wait until we're actually logged in
|
|
320
|
+
if '/account/two-factor/' not in current_url and ('/account/' in current_url or '/manage/' in current_url):
|
|
321
|
+
break
|
|
322
|
+
print(" ā³ Still waiting for 2FA completion...", end='\r')
|
|
323
|
+
print("\n ā 2FA completed!")
|
|
324
|
+
|
|
325
|
+
print("\nā
Login completed!")
|
|
326
|
+
|
|
327
|
+
# Get all sideload projects
|
|
328
|
+
projects = await get_user_projects(page)
|
|
329
|
+
|
|
330
|
+
if not projects:
|
|
331
|
+
print("ā No sideload-* projects found")
|
|
332
|
+
return
|
|
333
|
+
|
|
334
|
+
print(f"\nš Found {len(projects)} sideload projects:")
|
|
335
|
+
for pkg in projects:
|
|
336
|
+
print(f" - {pkg}")
|
|
337
|
+
|
|
338
|
+
# Delete each project
|
|
339
|
+
print("\nšļø Deleting projects...\n")
|
|
340
|
+
deleted = 0
|
|
341
|
+
for pkg in projects:
|
|
342
|
+
if await delete_project(page, pkg):
|
|
343
|
+
deleted += 1
|
|
344
|
+
|
|
345
|
+
print(f"\nā
Deleted {deleted}/{len(projects)} projects")
|
|
346
|
+
|
|
347
|
+
finally:
|
|
348
|
+
await browser.close()
|
|
349
|
+
|
|
350
|
+
|
|
351
|
+
if __name__ == "__main__":
|
|
352
|
+
asyncio.run(main())
|