ytp-dl 0.6.7__py3-none-any.whl → 0.6.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- scripts/api.py +97 -157
- scripts/downloader.py +186 -128
- {ytp_dl-0.6.7.dist-info → ytp_dl-0.6.8.dist-info}/METADATA +5 -5
- ytp_dl-0.6.8.dist-info/RECORD +8 -0
- ytp_dl-0.6.7.dist-info/RECORD +0 -8
- {ytp_dl-0.6.7.dist-info → ytp_dl-0.6.8.dist-info}/WHEEL +0 -0
- {ytp_dl-0.6.7.dist-info → ytp_dl-0.6.8.dist-info}/entry_points.txt +0 -0
- {ytp_dl-0.6.7.dist-info → ytp_dl-0.6.8.dist-info}/top_level.txt +0 -0
scripts/api.py
CHANGED
|
@@ -3,21 +3,15 @@ from __future__ import annotations
|
|
|
3
3
|
|
|
4
4
|
import json
|
|
5
5
|
import os
|
|
6
|
-
import secrets
|
|
7
6
|
import shutil
|
|
8
7
|
import tempfile
|
|
9
8
|
import time
|
|
10
9
|
from threading import BoundedSemaphore, Lock
|
|
11
|
-
from typing import Optional
|
|
10
|
+
from typing import Optional
|
|
12
11
|
|
|
13
|
-
from flask import Flask, request, jsonify, Response,
|
|
12
|
+
from flask import Flask, request, send_file, jsonify, Response, stream_with_context
|
|
14
13
|
|
|
15
|
-
from .downloader import
|
|
16
|
-
validate_environment,
|
|
17
|
-
prepare_network,
|
|
18
|
-
cleanup_network,
|
|
19
|
-
build_download_plan,
|
|
20
|
-
)
|
|
14
|
+
from .downloader import validate_environment, download_video_stream
|
|
21
15
|
|
|
22
16
|
app = Flask(__name__)
|
|
23
17
|
|
|
@@ -77,7 +71,7 @@ def _job_meta_path(job_dir: str) -> str:
|
|
|
77
71
|
return os.path.join(job_dir, "job.json")
|
|
78
72
|
|
|
79
73
|
|
|
80
|
-
def _write_job_meta(job_dir: str, meta:
|
|
74
|
+
def _write_job_meta(job_dir: str, meta: dict) -> None:
|
|
81
75
|
try:
|
|
82
76
|
with open(_job_meta_path(job_dir), "w", encoding="utf-8") as f:
|
|
83
77
|
json.dump(meta, f, ensure_ascii=False)
|
|
@@ -85,36 +79,36 @@ def _write_job_meta(job_dir: str, meta: Dict[str, Any]) -> None:
|
|
|
85
79
|
pass
|
|
86
80
|
|
|
87
81
|
|
|
88
|
-
def _read_job_meta(job_dir: str) -> Optional[
|
|
89
|
-
|
|
90
|
-
if not os.path.exists(
|
|
82
|
+
def _read_job_meta(job_dir: str) -> Optional[dict]:
|
|
83
|
+
path = _job_meta_path(job_dir)
|
|
84
|
+
if not os.path.exists(path):
|
|
91
85
|
return None
|
|
92
86
|
try:
|
|
93
|
-
with open(
|
|
87
|
+
with open(path, "r", encoding="utf-8") as f:
|
|
94
88
|
return json.load(f)
|
|
95
89
|
except Exception:
|
|
96
90
|
return None
|
|
97
91
|
|
|
98
92
|
|
|
99
|
-
def
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
except Exception:
|
|
103
|
-
pass
|
|
93
|
+
def _sse_message(data: str) -> str:
|
|
94
|
+
# one "message" event
|
|
95
|
+
return f"data: {data}\n\n"
|
|
104
96
|
|
|
105
97
|
|
|
106
|
-
def
|
|
107
|
-
#
|
|
108
|
-
return f"
|
|
98
|
+
def _sse_event(event_name: str, data: str) -> str:
|
|
99
|
+
# custom event type
|
|
100
|
+
return f"event: {event_name}\ndata: {data}\n\n"
|
|
109
101
|
|
|
110
102
|
|
|
111
103
|
@app.route("/api/download", methods=["POST"])
|
|
112
104
|
def handle_download():
|
|
113
105
|
"""
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
106
|
+
Streams yt-dlp logs via SSE (real-time), then emits a final custom `result` event:
|
|
107
|
+
event: result
|
|
108
|
+
data: {"job_id":"ytpdl_xxx","filename":"file.mp4"}
|
|
109
|
+
|
|
110
|
+
The finished file is retrieved separately via:
|
|
111
|
+
GET /api/file/<job_id>
|
|
118
112
|
"""
|
|
119
113
|
_cleanup_stale_jobs()
|
|
120
114
|
|
|
@@ -150,135 +144,87 @@ def handle_download():
|
|
|
150
144
|
|
|
151
145
|
job_dir = tempfile.mkdtemp(prefix="ytpdl_", dir=BASE_DOWNLOAD_DIR)
|
|
152
146
|
job_id = os.path.basename(job_dir)
|
|
153
|
-
token = secrets.token_urlsafe(24)
|
|
154
147
|
|
|
155
|
-
def stream()
|
|
148
|
+
def stream():
|
|
156
149
|
nonlocal job_dir
|
|
157
|
-
assert job_dir is not None
|
|
158
|
-
|
|
159
|
-
# Internal control info (caller should NOT forward to browser).
|
|
160
|
-
yield _sse(f"[internal] job={job_id} token={token}")
|
|
161
|
-
|
|
162
150
|
try:
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
out_dir = job_dir
|
|
167
|
-
out_dir_abs = os.path.abspath(out_dir)
|
|
168
|
-
|
|
169
|
-
# Track final file path without printing it to the client.
|
|
170
|
-
final_path: Optional[str] = None
|
|
171
|
-
# Track "pretty" filename from [download_complete] marker.
|
|
172
|
-
final_name: Optional[str] = None
|
|
173
|
-
|
|
174
|
-
plan = build_download_plan(
|
|
151
|
+
# ---- yt-dlp streamed logs ----
|
|
152
|
+
# download_video_stream yields yt-dlp stdout lines.
|
|
153
|
+
filename_path = yield from download_video_stream(
|
|
175
154
|
url=url,
|
|
176
155
|
resolution=resolution,
|
|
177
156
|
extension=extension,
|
|
178
|
-
out_dir=
|
|
157
|
+
out_dir=job_dir,
|
|
179
158
|
)
|
|
180
159
|
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
import subprocess
|
|
187
|
-
|
|
188
|
-
proc = subprocess.Popen(
|
|
189
|
-
argv,
|
|
190
|
-
stdout=subprocess.PIPE,
|
|
191
|
-
stderr=subprocess.STDOUT,
|
|
192
|
-
text=True,
|
|
193
|
-
bufsize=1,
|
|
160
|
+
if not (filename_path and os.path.exists(filename_path)):
|
|
161
|
+
yield _sse_message("ERROR: Download failed (file missing).")
|
|
162
|
+
yield _sse_event(
|
|
163
|
+
"error",
|
|
164
|
+
json.dumps({"error": "Download failed (file missing)"}),
|
|
194
165
|
)
|
|
195
|
-
|
|
196
|
-
assert proc.stdout is not None
|
|
197
|
-
|
|
198
|
-
for raw in iter(proc.stdout.readline, ""):
|
|
199
|
-
line = (raw or "").strip()
|
|
200
|
-
if not line:
|
|
201
|
-
continue
|
|
202
|
-
|
|
203
|
-
# yt-dlp --print after_move:filepath outputs an absolute path.
|
|
204
|
-
# Keep it internal so browser logs match local.
|
|
205
|
-
if os.path.isabs(line) and line.startswith(out_dir_abs):
|
|
206
|
-
final_path = line.strip("'\"")
|
|
207
|
-
continue
|
|
208
|
-
|
|
209
|
-
# Capture local-style marker for filename.
|
|
210
|
-
if line.startswith("[download_complete]"):
|
|
211
|
-
# same semantics as your local parser
|
|
212
|
-
try:
|
|
213
|
-
final_name = line.split("[download_complete]", 1)[1].strip()
|
|
214
|
-
except Exception:
|
|
215
|
-
final_name = None
|
|
216
|
-
|
|
217
|
-
yield _sse(line)
|
|
218
|
-
|
|
219
|
-
proc.stdout.close()
|
|
220
|
-
rc = proc.wait()
|
|
221
|
-
|
|
222
|
-
if rc == 0:
|
|
223
|
-
break
|
|
224
|
-
|
|
225
|
-
# Validate output
|
|
226
|
-
if not final_path:
|
|
227
|
-
# Try to discover any output file in job dir if filepath line wasn't produced.
|
|
228
|
-
try:
|
|
229
|
-
for name in os.listdir(job_dir):
|
|
230
|
-
if name.endswith((".part", ".ytdl", ".tmp")):
|
|
231
|
-
continue
|
|
232
|
-
p = os.path.join(job_dir, name)
|
|
233
|
-
if os.path.isfile(p):
|
|
234
|
-
final_path = os.path.abspath(p)
|
|
235
|
-
break
|
|
236
|
-
except Exception:
|
|
237
|
-
pass
|
|
238
|
-
|
|
239
|
-
if not (final_path and os.path.exists(final_path)):
|
|
240
|
-
yield _sse("ERROR: Download completed but output file not found")
|
|
241
166
|
return
|
|
242
167
|
|
|
243
|
-
#
|
|
244
|
-
meta = {
|
|
245
|
-
"job_id": job_id,
|
|
246
|
-
"token": token,
|
|
247
|
-
"file_path": os.path.abspath(final_path),
|
|
248
|
-
"file_name": final_name or os.path.basename(final_path),
|
|
249
|
-
"created_at": time.time(),
|
|
250
|
-
}
|
|
251
|
-
_write_job_meta(job_dir, meta)
|
|
252
|
-
_touch(job_dir)
|
|
253
|
-
|
|
254
|
-
# Release slot as soon as yt-dlp is done (do NOT hold during fetch).
|
|
168
|
+
# Release slot as soon as yt-dlp is done.
|
|
255
169
|
_release_once()
|
|
256
170
|
|
|
257
|
-
|
|
258
|
-
|
|
171
|
+
out_name = os.path.basename(filename_path)
|
|
172
|
+
|
|
173
|
+
# Persist meta for /api/file/<job_id>
|
|
174
|
+
_write_job_meta(
|
|
175
|
+
job_dir,
|
|
176
|
+
{
|
|
177
|
+
"job_id": job_id,
|
|
178
|
+
"filename": out_name,
|
|
179
|
+
"file_path": filename_path,
|
|
180
|
+
"created_at": time.time(),
|
|
181
|
+
},
|
|
182
|
+
)
|
|
183
|
+
|
|
184
|
+
# Result event (custom type) so your browser log UI won't display it
|
|
185
|
+
yield _sse_event(
|
|
186
|
+
"result",
|
|
187
|
+
json.dumps({"job_id": job_id, "filename": out_name}),
|
|
188
|
+
)
|
|
259
189
|
|
|
260
|
-
|
|
261
|
-
|
|
190
|
+
# keep-alive tail
|
|
191
|
+
yield _sse_message("All downloads complete.")
|
|
192
|
+
return
|
|
262
193
|
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
"Cache-Control": "no-cache",
|
|
268
|
-
"X-Accel-Buffering": "no",
|
|
269
|
-
"Connection": "close",
|
|
270
|
-
},
|
|
271
|
-
)
|
|
194
|
+
except RuntimeError as e:
|
|
195
|
+
if job_dir:
|
|
196
|
+
shutil.rmtree(job_dir, ignore_errors=True)
|
|
197
|
+
_release_once()
|
|
272
198
|
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
199
|
+
msg = str(e)
|
|
200
|
+
# make it visible in logs
|
|
201
|
+
yield _sse_message(f"ERROR: {msg}")
|
|
202
|
+
# also machine-readable
|
|
203
|
+
code = 503 if "Mullvad not logged in" in msg else 500
|
|
204
|
+
yield _sse_event("error", json.dumps({"error": msg, "code": code}))
|
|
205
|
+
return
|
|
206
|
+
|
|
207
|
+
except GeneratorExit:
|
|
208
|
+
# Client disconnected mid-stream; best-effort cleanup.
|
|
209
|
+
if job_dir:
|
|
210
|
+
shutil.rmtree(job_dir, ignore_errors=True)
|
|
211
|
+
_release_once()
|
|
212
|
+
raise
|
|
277
213
|
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
214
|
+
except Exception as e:
|
|
215
|
+
if job_dir:
|
|
216
|
+
shutil.rmtree(job_dir, ignore_errors=True)
|
|
217
|
+
_release_once()
|
|
218
|
+
|
|
219
|
+
msg = f"Download failed: {str(e)}"
|
|
220
|
+
yield _sse_message(f"ERROR: {msg}")
|
|
221
|
+
yield _sse_event("error", json.dumps({"error": msg, "code": 500}))
|
|
222
|
+
return
|
|
223
|
+
|
|
224
|
+
resp = Response(stream_with_context(stream()), content_type="text/event-stream")
|
|
225
|
+
resp.headers["Cache-Control"] = "no-cache"
|
|
226
|
+
resp.headers["X-Accel-Buffering"] = "no"
|
|
227
|
+
return resp
|
|
282
228
|
|
|
283
229
|
except Exception as e:
|
|
284
230
|
if job_dir:
|
|
@@ -287,17 +233,13 @@ def handle_download():
|
|
|
287
233
|
return jsonify(error=f"Download failed: {str(e)}"), 500
|
|
288
234
|
|
|
289
235
|
|
|
290
|
-
@app.route("/api/
|
|
291
|
-
def
|
|
236
|
+
@app.route("/api/file/<job_id>", methods=["GET"])
|
|
237
|
+
def fetch_file(job_id: str):
|
|
292
238
|
"""
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
Requires:
|
|
296
|
-
/api/fetch/<job_id>?token=...
|
|
239
|
+
After /api/download SSE completes with `event: result`,
|
|
240
|
+
the caller fetches the finished file here.
|
|
297
241
|
"""
|
|
298
|
-
|
|
299
|
-
if not token:
|
|
300
|
-
return jsonify(error="Missing token"), 400
|
|
242
|
+
_cleanup_stale_jobs()
|
|
301
243
|
|
|
302
244
|
job_dir = os.path.join(BASE_DOWNLOAD_DIR, job_id)
|
|
303
245
|
if not os.path.isdir(job_dir):
|
|
@@ -307,26 +249,24 @@ def fetch(job_id: str):
|
|
|
307
249
|
if not meta:
|
|
308
250
|
return jsonify(error="Job metadata missing"), 404
|
|
309
251
|
|
|
310
|
-
if meta.get("token") != token:
|
|
311
|
-
return jsonify(error="Invalid token"), 403
|
|
312
|
-
|
|
313
252
|
file_path = meta.get("file_path")
|
|
314
|
-
|
|
315
|
-
return jsonify(error="File not found"), 404
|
|
253
|
+
filename = meta.get("filename") or (os.path.basename(file_path) if file_path else None)
|
|
316
254
|
|
|
317
|
-
|
|
255
|
+
if not file_path or not os.path.exists(file_path):
|
|
256
|
+
shutil.rmtree(job_dir, ignore_errors=True)
|
|
257
|
+
return jsonify(error="File not found"), 404
|
|
318
258
|
|
|
319
|
-
|
|
259
|
+
response = send_file(file_path, as_attachment=True, download_name=filename)
|
|
320
260
|
|
|
321
|
-
# Cleanup after
|
|
261
|
+
# Cleanup directory after client finishes consuming the response.
|
|
322
262
|
def _cleanup() -> None:
|
|
323
263
|
try:
|
|
324
264
|
shutil.rmtree(job_dir, ignore_errors=True)
|
|
325
265
|
except Exception:
|
|
326
266
|
pass
|
|
327
267
|
|
|
328
|
-
|
|
329
|
-
return
|
|
268
|
+
response.call_on_close(_cleanup)
|
|
269
|
+
return response
|
|
330
270
|
|
|
331
271
|
|
|
332
272
|
@app.route("/healthz", methods=["GET"])
|
scripts/downloader.py
CHANGED
|
@@ -6,7 +6,7 @@ import shlex
|
|
|
6
6
|
import shutil
|
|
7
7
|
import subprocess
|
|
8
8
|
import time
|
|
9
|
-
from typing import Optional, List, Tuple,
|
|
9
|
+
from typing import Optional, List, Tuple, Generator
|
|
10
10
|
|
|
11
11
|
# =========================
|
|
12
12
|
# Config / constants
|
|
@@ -29,8 +29,6 @@ DEFAULT_OUT_DIR = os.environ.get("YTPDL_DOWNLOAD_DIR", "/root")
|
|
|
29
29
|
_MAX_ERR_LINES = 80
|
|
30
30
|
_MAX_ERR_CHARS = 4000
|
|
31
31
|
|
|
32
|
-
_ALLOWED_EXTENSIONS = {"mp3", "mp4", "best"}
|
|
33
|
-
|
|
34
32
|
|
|
35
33
|
# =========================
|
|
36
34
|
# Shell helpers
|
|
@@ -119,25 +117,6 @@ def mullvad_wait_connected(timeout: int = 20) -> bool:
|
|
|
119
117
|
return False
|
|
120
118
|
|
|
121
119
|
|
|
122
|
-
def prepare_network() -> None:
|
|
123
|
-
"""
|
|
124
|
-
Called by VPS API server before launching yt-dlp.
|
|
125
|
-
"""
|
|
126
|
-
validate_environment()
|
|
127
|
-
require_mullvad_login()
|
|
128
|
-
mullvad_connect(MULLVAD_LOCATION)
|
|
129
|
-
if not mullvad_wait_connected():
|
|
130
|
-
raise RuntimeError("Mullvad connection failed")
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
def cleanup_network() -> None:
|
|
134
|
-
"""
|
|
135
|
-
Called by VPS API server after yt-dlp finishes.
|
|
136
|
-
"""
|
|
137
|
-
if _mullvad_present():
|
|
138
|
-
_run_argv(["mullvad", "disconnect"], check=False)
|
|
139
|
-
|
|
140
|
-
|
|
141
120
|
# =========================
|
|
142
121
|
# yt-dlp helpers
|
|
143
122
|
# =========================
|
|
@@ -155,25 +134,6 @@ def _common_flags() -> List[str]:
|
|
|
155
134
|
]
|
|
156
135
|
|
|
157
136
|
|
|
158
|
-
def _sanitize_title_flags() -> List[str]:
|
|
159
|
-
# Mirrors your local downloader's title sanitization intent.
|
|
160
|
-
return [
|
|
161
|
-
"--replace-in-metadata",
|
|
162
|
-
"title",
|
|
163
|
-
"[\\U0001F600-\\U0001F64F\\U0001F300-\\U0001F5FF"
|
|
164
|
-
"\\U0001F680-\\U0001F6FF\\U0001F700-\\U0001F77F"
|
|
165
|
-
"\\U0001F780-\\U0001F7FF\\U0001F800-\\U0001F8FF"
|
|
166
|
-
"\\U0001F900-\\U0001F9FF\\U0001FA00-\\U0001FA6F"
|
|
167
|
-
"\\U0001FA70-\\U0001FAFF\\U00002702-\\U000027B0"
|
|
168
|
-
"\\U000024C2-\\U0001F251]",
|
|
169
|
-
"",
|
|
170
|
-
"--replace-in-metadata",
|
|
171
|
-
"title",
|
|
172
|
-
r"[\\\/:*?\"<>|]|[\s.]+$",
|
|
173
|
-
"",
|
|
174
|
-
]
|
|
175
|
-
|
|
176
|
-
|
|
177
137
|
def _fmt_mp4_apple_safe(cap: int) -> str:
|
|
178
138
|
# Always pick the best Apple-safe MP4/H.264 + M4A/AAC up to cap.
|
|
179
139
|
return (
|
|
@@ -188,124 +148,222 @@ def _fmt_best(cap: int) -> str:
|
|
|
188
148
|
return f"bv*[height<={cap}]+ba/b[height<={cap}]"
|
|
189
149
|
|
|
190
150
|
|
|
191
|
-
def
|
|
151
|
+
def _newest_non_temp_file(out_dir: str) -> Optional[str]:
|
|
152
|
+
try:
|
|
153
|
+
best_path = None
|
|
154
|
+
best_mtime = -1.0
|
|
155
|
+
for name in os.listdir(out_dir):
|
|
156
|
+
if name.endswith((".part", ".ytdl", ".tmp")):
|
|
157
|
+
continue
|
|
158
|
+
full = os.path.join(out_dir, name)
|
|
159
|
+
if not os.path.isfile(full):
|
|
160
|
+
continue
|
|
161
|
+
mt = os.path.getmtime(full)
|
|
162
|
+
if mt > best_mtime:
|
|
163
|
+
best_mtime = mt
|
|
164
|
+
best_path = full
|
|
165
|
+
return best_path
|
|
166
|
+
except Exception:
|
|
167
|
+
return None
|
|
168
|
+
|
|
169
|
+
|
|
170
|
+
def _download_with_format_stream(
|
|
171
|
+
*,
|
|
172
|
+
url: str,
|
|
173
|
+
out_dir: str,
|
|
174
|
+
fmt: str,
|
|
175
|
+
merge_output_format: Optional[str] = None,
|
|
176
|
+
extract_mp3: bool = False,
|
|
177
|
+
) -> Generator[str, None, str]:
|
|
178
|
+
"""
|
|
179
|
+
Stream yt-dlp stdout lines (same style as local: --progress --newline),
|
|
180
|
+
while capturing final output path reliably.
|
|
181
|
+
|
|
182
|
+
Returns absolute file path (generator return value).
|
|
183
|
+
"""
|
|
192
184
|
out_dir = os.path.abspath(out_dir)
|
|
185
|
+
os.makedirs(out_dir, exist_ok=True)
|
|
186
|
+
|
|
193
187
|
out_tpl = os.path.join(out_dir, "%(title)s.%(ext)s")
|
|
194
188
|
|
|
195
|
-
|
|
189
|
+
argv: List[str] = [
|
|
196
190
|
YTDLP_BIN,
|
|
191
|
+
url,
|
|
197
192
|
"--progress",
|
|
198
193
|
"--newline",
|
|
199
194
|
"--continue",
|
|
200
|
-
|
|
201
|
-
*
|
|
195
|
+
"-f", fmt,
|
|
196
|
+
*(_common_flags()),
|
|
202
197
|
"--output", out_tpl,
|
|
203
|
-
#
|
|
198
|
+
# Absolute final path for internal capture (we do NOT emit this line).
|
|
204
199
|
"--print", "after_move:filepath",
|
|
205
|
-
#
|
|
200
|
+
# Local parity signal:
|
|
206
201
|
"--print", "after_move:[download_complete] %(title)s.%(ext)s",
|
|
207
202
|
]
|
|
208
203
|
|
|
204
|
+
if extract_mp3:
|
|
205
|
+
argv.extend(
|
|
206
|
+
[
|
|
207
|
+
"--extract-audio",
|
|
208
|
+
"--audio-format", "mp3",
|
|
209
|
+
"--audio-quality", "0",
|
|
210
|
+
"--embed-thumbnail",
|
|
211
|
+
"--add-metadata",
|
|
212
|
+
]
|
|
213
|
+
)
|
|
209
214
|
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
resolution: Any | None = 1080,
|
|
213
|
-
extension: Optional[str] = None,
|
|
214
|
-
out_dir: str = DEFAULT_OUT_DIR,
|
|
215
|
-
) -> List[List[str]]:
|
|
216
|
-
"""
|
|
217
|
-
Returns a list of argv arrays to try in order.
|
|
218
|
-
The VPS API server will run them with Popen and stream stdout.
|
|
219
|
-
"""
|
|
220
|
-
if not url:
|
|
221
|
-
raise RuntimeError("Missing URL")
|
|
215
|
+
if merge_output_format:
|
|
216
|
+
argv.extend(["--merge-output-format", merge_output_format])
|
|
222
217
|
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
"
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
218
|
+
proc = subprocess.Popen(
|
|
219
|
+
argv,
|
|
220
|
+
stdout=subprocess.PIPE,
|
|
221
|
+
stderr=subprocess.STDOUT,
|
|
222
|
+
text=True,
|
|
223
|
+
bufsize=1,
|
|
224
|
+
universal_newlines=True,
|
|
225
|
+
)
|
|
226
|
+
|
|
227
|
+
final_path: Optional[str] = None
|
|
228
|
+
tail_lines: List[str] = []
|
|
229
|
+
|
|
230
|
+
try:
|
|
231
|
+
assert proc.stdout is not None
|
|
232
|
+
for raw in iter(proc.stdout.readline, ""):
|
|
233
|
+
line = (raw or "").rstrip("\n").rstrip("\r")
|
|
234
|
+
if not line:
|
|
235
|
+
continue
|
|
236
|
+
|
|
237
|
+
# Capture absolute final path from after_move:filepath (do not emit to logs)
|
|
238
|
+
if os.path.isabs(line) and line.startswith(out_dir):
|
|
239
|
+
final_path = line.strip("'\"")
|
|
240
|
+
continue
|
|
241
|
+
|
|
242
|
+
# Keep a small tail buffer for error reporting
|
|
243
|
+
tail_lines.append(line)
|
|
244
|
+
if len(tail_lines) > _MAX_ERR_LINES:
|
|
245
|
+
tail_lines.pop(0)
|
|
246
|
+
|
|
247
|
+
# Emit everything else (yt-dlp progress + [download_complete] line)
|
|
248
|
+
yield line
|
|
249
|
+
|
|
250
|
+
proc.wait()
|
|
251
|
+
finally:
|
|
252
|
+
try:
|
|
253
|
+
if proc.stdout:
|
|
254
|
+
proc.stdout.close()
|
|
255
|
+
except Exception:
|
|
256
|
+
pass
|
|
257
|
+
|
|
258
|
+
if proc.returncode != 0:
|
|
259
|
+
tail = "\n".join(tail_lines)
|
|
260
|
+
tail = _tail(tail)
|
|
261
|
+
raise RuntimeError(f"yt-dlp failed (format: {fmt})\n{tail}")
|
|
262
|
+
|
|
263
|
+
# Resolve final path
|
|
264
|
+
if final_path and os.path.exists(final_path):
|
|
265
|
+
return os.path.abspath(final_path)
|
|
266
|
+
|
|
267
|
+
# Fallback: newest output in out_dir
|
|
268
|
+
newest = _newest_non_temp_file(out_dir)
|
|
269
|
+
if newest and os.path.exists(newest):
|
|
270
|
+
return os.path.abspath(newest)
|
|
271
|
+
|
|
272
|
+
tail = _tail("\n".join(tail_lines))
|
|
273
|
+
raise RuntimeError(f"Download completed but output file not found (format: {fmt})\n{tail}")
|
|
266
274
|
|
|
267
275
|
|
|
268
276
|
# =========================
|
|
269
|
-
#
|
|
277
|
+
# Public APIs
|
|
270
278
|
# =========================
|
|
271
|
-
def
|
|
279
|
+
def download_video_stream(
|
|
272
280
|
url: str,
|
|
273
281
|
resolution: int | None = 1080,
|
|
274
282
|
extension: Optional[str] = None,
|
|
275
283
|
out_dir: str = DEFAULT_OUT_DIR,
|
|
276
|
-
) -> str:
|
|
284
|
+
) -> Generator[str, None, str]:
|
|
277
285
|
"""
|
|
278
|
-
|
|
279
|
-
The VPS API server now uses build_download_plan + Popen streaming instead.
|
|
286
|
+
Streams yt-dlp logs and returns final file path (generator return value).
|
|
280
287
|
"""
|
|
281
|
-
import os
|
|
282
|
-
|
|
283
288
|
if not url:
|
|
284
289
|
raise RuntimeError("Missing URL")
|
|
285
290
|
|
|
286
291
|
out_dir = os.path.abspath(out_dir)
|
|
287
292
|
os.makedirs(out_dir, exist_ok=True)
|
|
288
293
|
|
|
289
|
-
|
|
290
|
-
try:
|
|
291
|
-
plan = build_download_plan(url=url, resolution=resolution, extension=extension, out_dir=out_dir)
|
|
292
|
-
|
|
293
|
-
last_out = ""
|
|
294
|
-
for argv in plan:
|
|
295
|
-
rc, out = _run_argv_capture(argv)
|
|
296
|
-
last_out = out or ""
|
|
297
|
-
# Find the printed after_move:filepath absolute path (first matching line).
|
|
298
|
-
for raw in (last_out or "").splitlines():
|
|
299
|
-
line = (raw or "").strip()
|
|
300
|
-
if os.path.isabs(line) and line.startswith(out_dir):
|
|
301
|
-
if os.path.exists(line):
|
|
302
|
-
return os.path.abspath(line)
|
|
294
|
+
validate_environment()
|
|
303
295
|
|
|
304
|
-
|
|
305
|
-
|
|
296
|
+
require_mullvad_login()
|
|
297
|
+
mullvad_connect(MULLVAD_LOCATION)
|
|
298
|
+
if not mullvad_wait_connected():
|
|
299
|
+
raise RuntimeError("Mullvad connection failed")
|
|
306
300
|
|
|
307
|
-
|
|
308
|
-
|
|
301
|
+
try:
|
|
302
|
+
mode = (extension or "mp4").lower().strip()
|
|
303
|
+
|
|
304
|
+
if mode == "mp3":
|
|
305
|
+
return (yield from _download_with_format_stream(
|
|
306
|
+
url=url,
|
|
307
|
+
out_dir=out_dir,
|
|
308
|
+
fmt="bestaudio",
|
|
309
|
+
merge_output_format=None,
|
|
310
|
+
extract_mp3=True,
|
|
311
|
+
))
|
|
312
|
+
|
|
313
|
+
cap = int(resolution or 1080)
|
|
314
|
+
|
|
315
|
+
if mode == "best":
|
|
316
|
+
# Try best first (may produce webm/mkv/etc). If it fails, fall back to Apple-safe MP4.
|
|
317
|
+
try:
|
|
318
|
+
return (yield from _download_with_format_stream(
|
|
319
|
+
url=url,
|
|
320
|
+
out_dir=out_dir,
|
|
321
|
+
fmt=_fmt_best(cap),
|
|
322
|
+
merge_output_format=None,
|
|
323
|
+
extract_mp3=False,
|
|
324
|
+
))
|
|
325
|
+
except Exception:
|
|
326
|
+
return (yield from _download_with_format_stream(
|
|
327
|
+
url=url,
|
|
328
|
+
out_dir=out_dir,
|
|
329
|
+
fmt=_fmt_mp4_apple_safe(cap),
|
|
330
|
+
merge_output_format="mp4",
|
|
331
|
+
extract_mp3=False,
|
|
332
|
+
))
|
|
333
|
+
|
|
334
|
+
# Default / "mp4" mode
|
|
335
|
+
return (yield from _download_with_format_stream(
|
|
336
|
+
url=url,
|
|
337
|
+
out_dir=out_dir,
|
|
338
|
+
fmt=_fmt_mp4_apple_safe(cap),
|
|
339
|
+
merge_output_format="mp4",
|
|
340
|
+
extract_mp3=False,
|
|
341
|
+
))
|
|
309
342
|
|
|
310
343
|
finally:
|
|
311
|
-
|
|
344
|
+
if _mullvad_present():
|
|
345
|
+
_run_argv(["mullvad", "disconnect"], check=False)
|
|
346
|
+
|
|
347
|
+
|
|
348
|
+
def download_video(
|
|
349
|
+
url: str,
|
|
350
|
+
resolution: int | None = 1080,
|
|
351
|
+
extension: Optional[str] = None,
|
|
352
|
+
out_dir: str = DEFAULT_OUT_DIR,
|
|
353
|
+
) -> str:
|
|
354
|
+
"""
|
|
355
|
+
Backward-compatible non-streaming wrapper.
|
|
356
|
+
"""
|
|
357
|
+
gen = download_video_stream(
|
|
358
|
+
url=url,
|
|
359
|
+
resolution=resolution,
|
|
360
|
+
extension=extension,
|
|
361
|
+
out_dir=out_dir,
|
|
362
|
+
)
|
|
363
|
+
try:
|
|
364
|
+
for _ in gen:
|
|
365
|
+
pass
|
|
366
|
+
except StopIteration as si:
|
|
367
|
+
return si.value # type: ignore[attr-defined]
|
|
368
|
+
# Should never happen
|
|
369
|
+
raise RuntimeError("Download failed (no result)")
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: ytp-dl
|
|
3
|
-
Version: 0.6.
|
|
3
|
+
Version: 0.6.8
|
|
4
4
|
Summary: YouTube video downloader with Mullvad VPN integration and Flask API
|
|
5
5
|
Home-page: https://github.com/yourusername/ytp-dl
|
|
6
6
|
Author: dumgum82
|
|
@@ -57,7 +57,7 @@ A lightweight YouTube downloader with Mullvad VPN integration and an HTTP API.
|
|
|
57
57
|
## Installation
|
|
58
58
|
|
|
59
59
|
```bash
|
|
60
|
-
pip install ytp-dl==0.6.
|
|
60
|
+
pip install ytp-dl==0.6.8 yt-dlp[default]
|
|
61
61
|
```
|
|
62
62
|
|
|
63
63
|
Requirements:
|
|
@@ -242,7 +242,7 @@ When Mullvad connects/disconnects, Linux routing can change in a way that breaks
|
|
|
242
242
|
|
|
243
243
|
* Installs Python, FFmpeg, Mullvad CLI, and Deno
|
|
244
244
|
* Creates a virtualenv at `/opt/yt-dlp-mullvad/venv`
|
|
245
|
-
* Installs `ytp-dl==0.6.
|
|
245
|
+
* Installs `ytp-dl==0.6.8` + `yt-dlp[default]` + `gunicorn`
|
|
246
246
|
* Installs a policy-routing oneshot service to keep the public API reachable
|
|
247
247
|
* Sets up a systemd service on port 5000
|
|
248
248
|
* Runs Gunicorn with `gthread` (threaded) workers
|
|
@@ -258,7 +258,7 @@ Note: `gthread` is a built-in Gunicorn worker class (no extra Python dependency)
|
|
|
258
258
|
# - Installs Deno system-wide (JS runtime required for modern YouTube extraction via yt-dlp)
|
|
259
259
|
# - Configures policy routing so the public API stays reachable while Mullvad toggles
|
|
260
260
|
# - Creates a virtualenv at /opt/yt-dlp-mullvad/venv
|
|
261
|
-
# - Installs ytp-dl==0.6.
|
|
261
|
+
# - Installs ytp-dl==0.6.8 + yt-dlp[default] + gunicorn in that venv
|
|
262
262
|
# - Creates a systemd service ytp-dl-api.service on port 5000
|
|
263
263
|
#
|
|
264
264
|
# Mullvad connect/disconnect is handled per-job by downloader.py.
|
|
@@ -394,7 +394,7 @@ mkdir -p "${APP_DIR}"
|
|
|
394
394
|
python3 -m venv "${VENV_DIR}"
|
|
395
395
|
source "${VENV_DIR}/bin/activate"
|
|
396
396
|
pip install --upgrade pip
|
|
397
|
-
pip install "ytp-dl==0.6.
|
|
397
|
+
pip install "ytp-dl==0.6.8" "yt-dlp[default]" gunicorn
|
|
398
398
|
deactivate
|
|
399
399
|
|
|
400
400
|
echo "==> 3) API environment file (/etc/default/ytp-dl-api)"
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
scripts/__init__.py,sha256=EbAplfCcyLD3Q_9sxemm6owCc5_UJv53vmlxy810p2s,152
|
|
2
|
+
scripts/api.py,sha256=EMpD_vRX5FZQ-ICIxLuRqJxitvlbi1VnUflWMC4yvmw,8560
|
|
3
|
+
scripts/downloader.py,sha256=LT7ANnpf7DRgVuRdNynqIXMKfKaeohy508OCXmltLtA,10529
|
|
4
|
+
ytp_dl-0.6.8.dist-info/METADATA,sha256=g5Q33WgF9ZBJYLdFYSt2cAiyl9QPSD1YpOeXcFUw628,14547
|
|
5
|
+
ytp_dl-0.6.8.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
|
|
6
|
+
ytp_dl-0.6.8.dist-info/entry_points.txt,sha256=QqjqZZAEt3Y7RGrleqZ312sjjboUpbMLdo7qFxuCH30,48
|
|
7
|
+
ytp_dl-0.6.8.dist-info/top_level.txt,sha256=rmzd5mewlrJy4sT608KPib7sM7edoY75AeqJeY3SPB4,8
|
|
8
|
+
ytp_dl-0.6.8.dist-info/RECORD,,
|
ytp_dl-0.6.7.dist-info/RECORD
DELETED
|
@@ -1,8 +0,0 @@
|
|
|
1
|
-
scripts/__init__.py,sha256=EbAplfCcyLD3Q_9sxemm6owCc5_UJv53vmlxy810p2s,152
|
|
2
|
-
scripts/api.py,sha256=gHVQgAJE2x0V1UpE0UTuBr1HU5yleYv7uhLmjN-2HXA,10454
|
|
3
|
-
scripts/downloader.py,sha256=NWPJPP2WcXUt4X2yr4EPEqHG5e6N2GMdNufDgzhwDDg,8768
|
|
4
|
-
ytp_dl-0.6.7.dist-info/METADATA,sha256=JvG6aK5Z4VEshRBy0FWpDmh3Dcr3ro5c49kOsnOT3do,14547
|
|
5
|
-
ytp_dl-0.6.7.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
|
|
6
|
-
ytp_dl-0.6.7.dist-info/entry_points.txt,sha256=QqjqZZAEt3Y7RGrleqZ312sjjboUpbMLdo7qFxuCH30,48
|
|
7
|
-
ytp_dl-0.6.7.dist-info/top_level.txt,sha256=rmzd5mewlrJy4sT608KPib7sM7edoY75AeqJeY3SPB4,8
|
|
8
|
-
ytp_dl-0.6.7.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|