weeb-cli 1.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- weeb_cli/__init__.py +1 -0
- weeb_cli/__main__.py +4 -0
- weeb_cli/commands/downloads.py +126 -0
- weeb_cli/commands/search.py +428 -0
- weeb_cli/commands/settings.py +254 -0
- weeb_cli/commands/setup.py +26 -0
- weeb_cli/commands/watchlist.py +130 -0
- weeb_cli/config.py +50 -0
- weeb_cli/i18n.py +65 -0
- weeb_cli/locales/en.json +168 -0
- weeb_cli/locales/tr.json +168 -0
- weeb_cli/main.py +85 -0
- weeb_cli/providers/__init__.py +21 -0
- weeb_cli/providers/animecix.py +276 -0
- weeb_cli/providers/anizle.py +450 -0
- weeb_cli/providers/base.py +98 -0
- weeb_cli/providers/registry.py +45 -0
- weeb_cli/providers/turkanime.py +499 -0
- weeb_cli/services/__init__.py +0 -0
- weeb_cli/services/dependency_manager.py +321 -0
- weeb_cli/services/details.py +32 -0
- weeb_cli/services/downloader.py +308 -0
- weeb_cli/services/player.py +47 -0
- weeb_cli/services/progress.py +136 -0
- weeb_cli/services/scraper.py +91 -0
- weeb_cli/services/search.py +16 -0
- weeb_cli/services/updater.py +199 -0
- weeb_cli/services/watch.py +19 -0
- weeb_cli/ui/__init__.py +1 -0
- weeb_cli/ui/header.py +30 -0
- weeb_cli/ui/menu.py +59 -0
- weeb_cli/ui/prompt.py +120 -0
- weeb_cli-1.0.0.dist-info/METADATA +148 -0
- weeb_cli-1.0.0.dist-info/RECORD +38 -0
- weeb_cli-1.0.0.dist-info/WHEEL +5 -0
- weeb_cli-1.0.0.dist-info/entry_points.txt +2 -0
- weeb_cli-1.0.0.dist-info/licenses/LICENSE +390 -0
- weeb_cli-1.0.0.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,321 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import sys
|
|
3
|
+
import platform
|
|
4
|
+
import shutil
|
|
5
|
+
import requests
|
|
6
|
+
import zipfile
|
|
7
|
+
import tarfile
|
|
8
|
+
import stat
|
|
9
|
+
import subprocess
|
|
10
|
+
from pathlib import Path
|
|
11
|
+
from rich.console import Console
|
|
12
|
+
from rich.progress import Progress, SpinnerColumn, TextColumn, BarColumn, DownloadColumn, TransferSpeedColumn
|
|
13
|
+
from ..config import config
|
|
14
|
+
from ..i18n import i18n
|
|
15
|
+
|
|
16
|
+
try:
|
|
17
|
+
import py7zr
|
|
18
|
+
except ImportError:
|
|
19
|
+
py7zr = None
|
|
20
|
+
|
|
21
|
+
console = Console()
|
|
22
|
+
|
|
23
|
+
class DependencyManager:
|
|
24
|
+
def __init__(self):
|
|
25
|
+
self.os_type = platform.system().lower()
|
|
26
|
+
self.arch = platform.machine().lower()
|
|
27
|
+
self.bin_dir = Path.home() / ".weeb-cli" / "bin"
|
|
28
|
+
self._ensure_bin_dir()
|
|
29
|
+
|
|
30
|
+
mpv_macos_url = "https://laboratory.stolendata.net/~djinn/mpv_osx/mpv-latest.tar.gz"
|
|
31
|
+
if self.arch == "arm64":
|
|
32
|
+
mpv_macos_url = "https://laboratory.stolendata.net/~djinn/mpv_osx/mpv-arm64-latest.tar.gz"
|
|
33
|
+
|
|
34
|
+
self.dependencies = {
|
|
35
|
+
"windows": {
|
|
36
|
+
"yt-dlp": {
|
|
37
|
+
"url": ["https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp.exe"],
|
|
38
|
+
"type": "binary",
|
|
39
|
+
"filename": "yt-dlp.exe",
|
|
40
|
+
"pkg": {"winget": "yt-dlp", "choco": "yt-dlp", "scoop": "yt-dlp"}
|
|
41
|
+
},
|
|
42
|
+
"ffmpeg": {
|
|
43
|
+
"url": [
|
|
44
|
+
"https://www.gyan.dev/ffmpeg/builds/ffmpeg-release-essentials.zip",
|
|
45
|
+
"https://github.com/BtbN/FFmpeg-Builds/releases/download/latest/ffmpeg-master-latest-win64-gpl.zip"
|
|
46
|
+
],
|
|
47
|
+
"type": "archive",
|
|
48
|
+
"files": ["ffmpeg.exe", "ffprobe.exe"],
|
|
49
|
+
"pkg": {"winget": "Gyan.FFmpeg", "choco": "ffmpeg", "scoop": "ffmpeg"}
|
|
50
|
+
},
|
|
51
|
+
"mpv": {
|
|
52
|
+
"url": [
|
|
53
|
+
"https://github.com/shinchiro/mpv-winbuild-cmake/releases/download/v20240114/mpv-x86_64-v3-20240114-git-07ec82e.7z",
|
|
54
|
+
"https://sourceforge.net/projects/mpv-player-windows/files/64bit/mpv-x86_64-20231224-git-0a30b42.7z/download"
|
|
55
|
+
],
|
|
56
|
+
"type": "archive",
|
|
57
|
+
"filename": "mpv.exe",
|
|
58
|
+
"pkg": {"winget": "MutanteOz.mpv", "choco": "mpv", "scoop": "mpv"}
|
|
59
|
+
},
|
|
60
|
+
"aria2": {
|
|
61
|
+
"url": ["https://github.com/aria2/aria2/releases/download/release-1.37.0/aria2-1.37.0-win-64bit-build1.zip"],
|
|
62
|
+
"type": "archive",
|
|
63
|
+
"files": ["aria2c.exe"],
|
|
64
|
+
"pkg": {"winget": "aria2", "choco": "aria2", "scoop": "aria2"}
|
|
65
|
+
}
|
|
66
|
+
},
|
|
67
|
+
"linux": {
|
|
68
|
+
"yt-dlp": {
|
|
69
|
+
"url": ["https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp"],
|
|
70
|
+
"type": "binary",
|
|
71
|
+
"filename": "yt-dlp",
|
|
72
|
+
"pkg": {"brew": "yt-dlp", "yay": "yt-dlp", "pacman": "yt-dlp", "apt": "yt-dlp"}
|
|
73
|
+
},
|
|
74
|
+
"ffmpeg": {
|
|
75
|
+
"url": ["https://johnvansickle.com/ffmpeg/releases/ffmpeg-release-amd64-static.tar.xz"],
|
|
76
|
+
"type": "archive",
|
|
77
|
+
"files": ["ffmpeg", "ffprobe"],
|
|
78
|
+
"pkg": {"brew": "ffmpeg", "yay": "ffmpeg", "pacman": "ffmpeg", "apt": "ffmpeg"}
|
|
79
|
+
},
|
|
80
|
+
"aria2": {
|
|
81
|
+
"url": ["https://github.com/q3aql/aria2-static-builds/releases/download/v1.36.0/aria2-1.36.0-linux-gnu-64bit-build1.tar.bz2"],
|
|
82
|
+
"type": "archive",
|
|
83
|
+
"files": ["aria2c"],
|
|
84
|
+
"pkg": {"brew": "aria2", "yay": "aria2", "pacman": "aria2", "apt": "aria2"}
|
|
85
|
+
},
|
|
86
|
+
"mpv": {
|
|
87
|
+
"url": ["https://github.com/pkgforge-dev/mpv-AppImage/releases/latest/download/mpv-x86_64.AppImage"],
|
|
88
|
+
"type": "binary",
|
|
89
|
+
"filename": "mpv",
|
|
90
|
+
"pkg": {"brew": "mpv", "yay": "mpv-git", "pacman": "mpv", "apt": "mpv"}
|
|
91
|
+
}
|
|
92
|
+
},
|
|
93
|
+
"darwin": {
|
|
94
|
+
"yt-dlp": {
|
|
95
|
+
"url": ["https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp_macos"],
|
|
96
|
+
"type": "binary",
|
|
97
|
+
"filename": "yt-dlp",
|
|
98
|
+
"pkg": {"brew": "yt-dlp"}
|
|
99
|
+
},
|
|
100
|
+
"ffmpeg": {
|
|
101
|
+
"url": ["https://evermeet.cx/ffmpeg/getrelease/zip"],
|
|
102
|
+
"type": "archive",
|
|
103
|
+
"files": ["ffmpeg", "ffprobe"],
|
|
104
|
+
"pkg": {"brew": "ffmpeg"}
|
|
105
|
+
},
|
|
106
|
+
"mpv": {
|
|
107
|
+
"url": [mpv_macos_url],
|
|
108
|
+
"type": "archive",
|
|
109
|
+
"files": ["mpv"],
|
|
110
|
+
"pkg": {"brew": "mpv"}
|
|
111
|
+
}
|
|
112
|
+
}
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
def _ensure_bin_dir(self):
|
|
116
|
+
if not self.bin_dir.exists():
|
|
117
|
+
self.bin_dir.mkdir(parents=True, exist_ok=True)
|
|
118
|
+
if str(self.bin_dir) not in os.environ["PATH"]:
|
|
119
|
+
os.environ["PATH"] += os.pathsep + str(self.bin_dir)
|
|
120
|
+
|
|
121
|
+
def check_dependency(self, name):
|
|
122
|
+
exe_name = f"{name}.exe" if self.os_type == "windows" else name
|
|
123
|
+
local_path = self.bin_dir / exe_name
|
|
124
|
+
|
|
125
|
+
if local_path.exists():
|
|
126
|
+
return str(local_path)
|
|
127
|
+
|
|
128
|
+
system_path = shutil.which(name)
|
|
129
|
+
return system_path
|
|
130
|
+
|
|
131
|
+
def install_dependency(self, name):
|
|
132
|
+
if self.os_type not in self.dependencies or name not in self.dependencies[self.os_type]:
|
|
133
|
+
console.print(f"[yellow]{i18n.t('setup.manual_required', tool=name)}[/yellow]")
|
|
134
|
+
return False
|
|
135
|
+
|
|
136
|
+
if self._try_package_managers(name):
|
|
137
|
+
return True
|
|
138
|
+
|
|
139
|
+
return self._install_direct(name)
|
|
140
|
+
|
|
141
|
+
def _try_package_managers(self, name):
|
|
142
|
+
info = self.dependencies[self.os_type][name]
|
|
143
|
+
pkg_map = info.get("pkg", {})
|
|
144
|
+
|
|
145
|
+
managers = {
|
|
146
|
+
"winget": ["winget", "install", "-e", "--id"],
|
|
147
|
+
"choco": ["choco", "install", "-y"],
|
|
148
|
+
"scoop": ["scoop", "install"],
|
|
149
|
+
"brew": ["brew", "install"],
|
|
150
|
+
"yay": ["yay", "-S", "--noconfirm"],
|
|
151
|
+
"pacman": ["sudo", "pacman", "-S", "--noconfirm"],
|
|
152
|
+
"apt": ["sudo", "apt", "install", "-y"],
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
for mgr, cmd_prefix in managers.items():
|
|
156
|
+
if mgr in pkg_map and shutil.which(mgr):
|
|
157
|
+
pkg_name = pkg_map[mgr]
|
|
158
|
+
console.print(f"[cyan]{i18n.t('setup.pkg_manager_try', manager=mgr)}[/cyan]")
|
|
159
|
+
|
|
160
|
+
full_cmd = cmd_prefix + [pkg_name]
|
|
161
|
+
try:
|
|
162
|
+
subprocess.run(full_cmd, check=True)
|
|
163
|
+
console.print(f"[green]{i18n.t('setup.success', tool=name)}[/green]")
|
|
164
|
+
return True
|
|
165
|
+
except subprocess.CalledProcessError:
|
|
166
|
+
continue
|
|
167
|
+
return False
|
|
168
|
+
|
|
169
|
+
def _install_direct(self, name):
|
|
170
|
+
info = self.dependencies[self.os_type][name]
|
|
171
|
+
urls = info["url"]
|
|
172
|
+
|
|
173
|
+
console.print(f"[cyan]{i18n.t('setup.downloading', tool=name)}[/cyan]")
|
|
174
|
+
|
|
175
|
+
success = False
|
|
176
|
+
downloaded_file = None
|
|
177
|
+
|
|
178
|
+
for url in urls:
|
|
179
|
+
try:
|
|
180
|
+
downloaded_file = self._download_file(url, f"temp_{name}")
|
|
181
|
+
success = True
|
|
182
|
+
break
|
|
183
|
+
except Exception as e:
|
|
184
|
+
console.print(f"[red]{i18n.t('common.error')}: {e}[/red]")
|
|
185
|
+
continue
|
|
186
|
+
|
|
187
|
+
if not success or not downloaded_file:
|
|
188
|
+
console.print(f"[red]{i18n.t('setup.failed', tool=name)}[/red]")
|
|
189
|
+
return False
|
|
190
|
+
|
|
191
|
+
try:
|
|
192
|
+
if info["type"] == "binary":
|
|
193
|
+
target = self.bin_dir / info["filename"]
|
|
194
|
+
|
|
195
|
+
if target.exists():
|
|
196
|
+
os.remove(target)
|
|
197
|
+
|
|
198
|
+
shutil.move(downloaded_file, target)
|
|
199
|
+
self._make_executable(target)
|
|
200
|
+
|
|
201
|
+
elif info["type"] == "archive":
|
|
202
|
+
self._extract_and_install(downloaded_file, info.get("files", []), name)
|
|
203
|
+
|
|
204
|
+
console.print(f"[green]{i18n.t('setup.success', tool=name)}[/green]")
|
|
205
|
+
return True
|
|
206
|
+
|
|
207
|
+
except Exception as e:
|
|
208
|
+
console.print(f"[red]{i18n.t('setup.failed', tool=name)}: {e}[/red]")
|
|
209
|
+
return False
|
|
210
|
+
finally:
|
|
211
|
+
if downloaded_file and os.path.exists(downloaded_file):
|
|
212
|
+
os.remove(downloaded_file)
|
|
213
|
+
|
|
214
|
+
def _get_temp_dir(self):
|
|
215
|
+
temp_dir = self.bin_dir / "temp"
|
|
216
|
+
temp_dir.mkdir(exist_ok=True)
|
|
217
|
+
return temp_dir
|
|
218
|
+
|
|
219
|
+
def _download_file(self, url, prefix):
|
|
220
|
+
response = requests.get(url, stream=True)
|
|
221
|
+
response.raise_for_status()
|
|
222
|
+
|
|
223
|
+
total_size = int(response.headers.get('content-length', 0))
|
|
224
|
+
filename = url.split("/")[-1]
|
|
225
|
+
temp_path = self._get_temp_dir() / (prefix + "_" + filename)
|
|
226
|
+
|
|
227
|
+
with Progress(
|
|
228
|
+
SpinnerColumn(),
|
|
229
|
+
TextColumn("[progress.description]{task.description}"),
|
|
230
|
+
BarColumn(),
|
|
231
|
+
DownloadColumn(),
|
|
232
|
+
TransferSpeedColumn(),
|
|
233
|
+
) as progress:
|
|
234
|
+
task = progress.add_task(f"[cyan]Downloading...", total=total_size)
|
|
235
|
+
|
|
236
|
+
with open(temp_path, "wb") as f:
|
|
237
|
+
for chunk in response.iter_content(chunk_size=8192):
|
|
238
|
+
f.write(chunk)
|
|
239
|
+
progress.update(task, advance=len(chunk))
|
|
240
|
+
|
|
241
|
+
return temp_path
|
|
242
|
+
|
|
243
|
+
def _extract_and_install(self, archive_path, target_files, tool_name):
|
|
244
|
+
archive_path = str(archive_path)
|
|
245
|
+
|
|
246
|
+
temp_extract = self._get_temp_dir() / "extract"
|
|
247
|
+
if temp_extract.exists():
|
|
248
|
+
shutil.rmtree(temp_extract)
|
|
249
|
+
temp_extract.mkdir()
|
|
250
|
+
|
|
251
|
+
try:
|
|
252
|
+
if archive_path.endswith(".zip"):
|
|
253
|
+
with zipfile.ZipFile(archive_path, 'r') as zip_ref:
|
|
254
|
+
zip_ref.extractall(temp_extract)
|
|
255
|
+
elif archive_path.endswith(".7z"):
|
|
256
|
+
if not py7zr:
|
|
257
|
+
raise Exception("py7zr missing")
|
|
258
|
+
with py7zr.SevenZipFile(archive_path, mode='r') as z:
|
|
259
|
+
z.extractall(path=temp_extract)
|
|
260
|
+
elif archive_path.endswith((".tar.gz", ".tar.xz", ".tar.bz2", ".tgz")):
|
|
261
|
+
with tarfile.open(archive_path, "r:*") as tar_ref:
|
|
262
|
+
tar_ref.extractall(temp_extract)
|
|
263
|
+
else:
|
|
264
|
+
raise Exception("Unsupported format")
|
|
265
|
+
|
|
266
|
+
found_count = 0
|
|
267
|
+
for root, dirs, files in os.walk(temp_extract):
|
|
268
|
+
for file in files:
|
|
269
|
+
is_target = False
|
|
270
|
+
|
|
271
|
+
if file in target_files:
|
|
272
|
+
is_target = True
|
|
273
|
+
|
|
274
|
+
if self.os_type == "windows" and not is_target:
|
|
275
|
+
simple_name = file.lower().replace(".exe", "")
|
|
276
|
+
if f"{simple_name}.exe" == file.lower() and \
|
|
277
|
+
(simple_name in target_files or simple_name == tool_name):
|
|
278
|
+
is_target = True
|
|
279
|
+
|
|
280
|
+
if self.os_type == "darwin" and tool_name == "mpv" and file == "mpv":
|
|
281
|
+
if "Contents" in root and "MacOS" in root:
|
|
282
|
+
is_target = True
|
|
283
|
+
|
|
284
|
+
if is_target:
|
|
285
|
+
source = Path(root) / file
|
|
286
|
+
target_name = file
|
|
287
|
+
target = self.bin_dir / target_name
|
|
288
|
+
|
|
289
|
+
if target.exists():
|
|
290
|
+
os.remove(target)
|
|
291
|
+
|
|
292
|
+
shutil.move(str(source), str(target))
|
|
293
|
+
self._make_executable(target)
|
|
294
|
+
found_count += 1
|
|
295
|
+
|
|
296
|
+
if found_count == 0:
|
|
297
|
+
for root, dirs, files in os.walk(temp_extract):
|
|
298
|
+
for file in files:
|
|
299
|
+
if file.startswith(tool_name) and (file.endswith(".exe") or "." not in file):
|
|
300
|
+
source = Path(root) / file
|
|
301
|
+
target = self.bin_dir / file
|
|
302
|
+
if target.exists():
|
|
303
|
+
os.remove(target)
|
|
304
|
+
shutil.move(str(source), str(target))
|
|
305
|
+
self._make_executable(target)
|
|
306
|
+
found_count += 1
|
|
307
|
+
break
|
|
308
|
+
|
|
309
|
+
if found_count == 0:
|
|
310
|
+
raise Exception("Binary not found in archive")
|
|
311
|
+
|
|
312
|
+
finally:
|
|
313
|
+
if temp_extract.exists():
|
|
314
|
+
shutil.rmtree(temp_extract)
|
|
315
|
+
|
|
316
|
+
def _make_executable(self, path):
|
|
317
|
+
if self.os_type != "windows":
|
|
318
|
+
st = os.stat(path)
|
|
319
|
+
os.chmod(path, st.st_mode | stat.S_IEXEC)
|
|
320
|
+
|
|
321
|
+
dependency_manager = DependencyManager()
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
from weeb_cli.services.scraper import scraper
|
|
2
|
+
|
|
3
|
+
def get_details(anime_id):
|
|
4
|
+
details = scraper.get_details(anime_id)
|
|
5
|
+
if not details:
|
|
6
|
+
return None
|
|
7
|
+
|
|
8
|
+
return {
|
|
9
|
+
"id": details.id,
|
|
10
|
+
"slug": details.id,
|
|
11
|
+
"title": details.title,
|
|
12
|
+
"name": details.title,
|
|
13
|
+
"description": details.description,
|
|
14
|
+
"synopsis": details.description,
|
|
15
|
+
"cover": details.cover,
|
|
16
|
+
"genres": details.genres,
|
|
17
|
+
"year": details.year,
|
|
18
|
+
"status": details.status,
|
|
19
|
+
"total_episodes": details.total_episodes,
|
|
20
|
+
"episodes": [
|
|
21
|
+
{
|
|
22
|
+
"id": ep.id,
|
|
23
|
+
"number": ep.number,
|
|
24
|
+
"ep_num": ep.number,
|
|
25
|
+
"title": ep.title,
|
|
26
|
+
"name": ep.title or f"Bölüm {ep.number}",
|
|
27
|
+
"season": ep.season,
|
|
28
|
+
"url": ep.url
|
|
29
|
+
}
|
|
30
|
+
for ep in details.episodes
|
|
31
|
+
]
|
|
32
|
+
}
|
|
@@ -0,0 +1,308 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import json
|
|
3
|
+
import re
|
|
4
|
+
import threading
|
|
5
|
+
import time
|
|
6
|
+
import subprocess
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
from rich.console import Console
|
|
9
|
+
from weeb_cli.config import config
|
|
10
|
+
from weeb_cli.services.dependency_manager import dependency_manager
|
|
11
|
+
|
|
12
|
+
console = Console()
|
|
13
|
+
|
|
14
|
+
class QueueManager:
|
|
15
|
+
def __init__(self):
|
|
16
|
+
self.config_dir = Path.home() / ".weeb-cli"
|
|
17
|
+
self.queue_file = self.config_dir / "download_queue.json"
|
|
18
|
+
self.queue = []
|
|
19
|
+
self.active_downloads = 0
|
|
20
|
+
self.lock = threading.Lock()
|
|
21
|
+
self.running = False
|
|
22
|
+
self.worker_thread = None
|
|
23
|
+
self._load_queue()
|
|
24
|
+
|
|
25
|
+
def _load_queue(self):
|
|
26
|
+
if self.queue_file.exists():
|
|
27
|
+
try:
|
|
28
|
+
with open(self.queue_file, 'r', encoding='utf-8') as f:
|
|
29
|
+
self.queue = json.load(f)
|
|
30
|
+
except:
|
|
31
|
+
self.queue = []
|
|
32
|
+
|
|
33
|
+
def _save_queue(self):
|
|
34
|
+
with self.lock:
|
|
35
|
+
with open(self.queue_file, 'w', encoding='utf-8') as f:
|
|
36
|
+
json.dump(self.queue, f, indent=2, ensure_ascii=False)
|
|
37
|
+
|
|
38
|
+
def start_queue(self):
|
|
39
|
+
if self.running:
|
|
40
|
+
return
|
|
41
|
+
self.running = True
|
|
42
|
+
if self.worker_thread is None or not self.worker_thread.is_alive():
|
|
43
|
+
self.worker_thread = threading.Thread(target=self._manage_queue, daemon=True)
|
|
44
|
+
self.worker_thread.start()
|
|
45
|
+
|
|
46
|
+
def stop_queue(self):
|
|
47
|
+
self.running = False
|
|
48
|
+
|
|
49
|
+
def is_running(self):
|
|
50
|
+
return self.running and self.worker_thread is not None and self.worker_thread.is_alive()
|
|
51
|
+
|
|
52
|
+
def has_incomplete_downloads(self):
|
|
53
|
+
return any(item["status"] in ["pending", "processing"] for item in self.queue)
|
|
54
|
+
|
|
55
|
+
def get_incomplete_count(self):
|
|
56
|
+
return len([item for item in self.queue if item["status"] in ["pending", "processing"]])
|
|
57
|
+
|
|
58
|
+
def get_pending_count(self):
|
|
59
|
+
return len([item for item in self.queue if item["status"] == "pending"])
|
|
60
|
+
|
|
61
|
+
def resume_incomplete(self):
|
|
62
|
+
for item in self.queue:
|
|
63
|
+
if item["status"] == "processing":
|
|
64
|
+
item["status"] = "pending"
|
|
65
|
+
self._save_queue()
|
|
66
|
+
self.start_queue()
|
|
67
|
+
|
|
68
|
+
def cancel_incomplete(self):
|
|
69
|
+
self.queue = [item for item in self.queue if item["status"] not in ["pending", "processing"]]
|
|
70
|
+
self._save_queue()
|
|
71
|
+
|
|
72
|
+
def is_downloading(self, slug, episode_id=None):
|
|
73
|
+
for item in self.queue:
|
|
74
|
+
if item["slug"] == slug and item["status"] in ["pending", "processing"]:
|
|
75
|
+
if episode_id is None or item["episode_id"] == episode_id:
|
|
76
|
+
return True
|
|
77
|
+
return False
|
|
78
|
+
|
|
79
|
+
def add_to_queue(self, anime_title, episodes, slug):
|
|
80
|
+
added = 0
|
|
81
|
+
with self.lock:
|
|
82
|
+
for ep in episodes:
|
|
83
|
+
ep_id = ep.get("id")
|
|
84
|
+
if self.is_downloading(slug, ep_id):
|
|
85
|
+
continue
|
|
86
|
+
|
|
87
|
+
item = {
|
|
88
|
+
"anime_title": anime_title,
|
|
89
|
+
"episode_number": ep.get("number") or ep.get("ep_num"),
|
|
90
|
+
"episode_id": ep_id,
|
|
91
|
+
"slug": slug,
|
|
92
|
+
"status": "pending",
|
|
93
|
+
"added_at": time.time(),
|
|
94
|
+
"progress": 0,
|
|
95
|
+
"eta": "?"
|
|
96
|
+
}
|
|
97
|
+
if not any(x['episode_id'] == item['episode_id'] and x['status'] in ['pending', 'processing'] for x in self.queue):
|
|
98
|
+
self.queue.append(item)
|
|
99
|
+
added += 1
|
|
100
|
+
self._save_queue()
|
|
101
|
+
return added
|
|
102
|
+
|
|
103
|
+
def _sanitize_filename(self, name):
|
|
104
|
+
return re.sub(r'[<>:"/\\|?*]', '', name).strip()
|
|
105
|
+
|
|
106
|
+
def _manage_queue(self):
|
|
107
|
+
while self.running:
|
|
108
|
+
max_workers = config.get("max_concurrent_downloads", 3)
|
|
109
|
+
|
|
110
|
+
with self.lock:
|
|
111
|
+
active_count = len([x for x in self.queue if x["status"] == "processing"])
|
|
112
|
+
pending = [x for x in self.queue if x["status"] == "pending"]
|
|
113
|
+
|
|
114
|
+
if active_count < max_workers and pending:
|
|
115
|
+
to_start = pending[0]
|
|
116
|
+
to_start["status"] = "processing"
|
|
117
|
+
self._save_queue()
|
|
118
|
+
|
|
119
|
+
t = threading.Thread(target=self._run_task, args=(to_start,))
|
|
120
|
+
t.start()
|
|
121
|
+
|
|
122
|
+
if not pending and active_count == 0:
|
|
123
|
+
self.running = False
|
|
124
|
+
break
|
|
125
|
+
|
|
126
|
+
time.sleep(1)
|
|
127
|
+
|
|
128
|
+
def _run_task(self, item):
|
|
129
|
+
try:
|
|
130
|
+
self._download_item(item)
|
|
131
|
+
item["status"] = "completed"
|
|
132
|
+
item["progress"] = 100
|
|
133
|
+
item["eta"] = "-"
|
|
134
|
+
except Exception as e:
|
|
135
|
+
item["status"] = "failed"
|
|
136
|
+
item["error"] = str(e)
|
|
137
|
+
item["eta"] = ""
|
|
138
|
+
self._save_queue()
|
|
139
|
+
|
|
140
|
+
def _download_item(self, item):
|
|
141
|
+
from weeb_cli.services.watch import get_streams
|
|
142
|
+
download_dir = Path(config.get("download_dir"))
|
|
143
|
+
safe_title = self._sanitize_filename(item["anime_title"])
|
|
144
|
+
anime_dir = download_dir / safe_title
|
|
145
|
+
anime_dir.mkdir(parents=True, exist_ok=True)
|
|
146
|
+
|
|
147
|
+
ep_num = item["episode_number"]
|
|
148
|
+
filename = f"{safe_title} - S1B{ep_num}.mp4"
|
|
149
|
+
output_path = anime_dir / filename
|
|
150
|
+
|
|
151
|
+
stream_data = get_streams(item["slug"], item["episode_id"])
|
|
152
|
+
if not stream_data:
|
|
153
|
+
raise Exception("No stream data")
|
|
154
|
+
|
|
155
|
+
stream_url = self._extract_url(stream_data)
|
|
156
|
+
if not stream_url:
|
|
157
|
+
raise Exception("No stream URL")
|
|
158
|
+
|
|
159
|
+
is_hls = ".m3u8" in stream_url
|
|
160
|
+
|
|
161
|
+
if is_hls:
|
|
162
|
+
if config.get("ytdlp_enabled") and dependency_manager.check_dependency("yt-dlp"):
|
|
163
|
+
self._download_ytdlp(stream_url, output_path, item)
|
|
164
|
+
else:
|
|
165
|
+
self._download_ffmpeg(stream_url, output_path, item)
|
|
166
|
+
else:
|
|
167
|
+
if config.get("aria2_enabled") and dependency_manager.check_dependency("aria2"):
|
|
168
|
+
self._download_aria2(stream_url, output_path, item)
|
|
169
|
+
else:
|
|
170
|
+
self._download_generic(stream_url, output_path, item)
|
|
171
|
+
|
|
172
|
+
def _extract_url(self, data):
|
|
173
|
+
PRIORITY = ["ALUCARD", "AMATERASU", "SIBNET", "MP4UPLOAD", "UQLOAD"]
|
|
174
|
+
|
|
175
|
+
if isinstance(data, dict):
|
|
176
|
+
node = data
|
|
177
|
+
for _ in range(3):
|
|
178
|
+
if "data" in node and isinstance(node["data"], (dict, list)):
|
|
179
|
+
node = node["data"]
|
|
180
|
+
else:
|
|
181
|
+
break
|
|
182
|
+
|
|
183
|
+
sources = node if isinstance(node, list) else node.get("links") or node.get("sources")
|
|
184
|
+
if sources and isinstance(sources, list) and len(sources) > 0:
|
|
185
|
+
def get_priority(s):
|
|
186
|
+
server = (s.get("server") or "").upper()
|
|
187
|
+
for i, p in enumerate(PRIORITY):
|
|
188
|
+
if p in server:
|
|
189
|
+
return i
|
|
190
|
+
return 999
|
|
191
|
+
|
|
192
|
+
sorted_sources = sorted(sources, key=get_priority)
|
|
193
|
+
|
|
194
|
+
for src in sorted_sources:
|
|
195
|
+
url = src.get("url")
|
|
196
|
+
if url:
|
|
197
|
+
return url
|
|
198
|
+
|
|
199
|
+
elif isinstance(node, dict) and "url" in node:
|
|
200
|
+
return node["url"]
|
|
201
|
+
return None
|
|
202
|
+
|
|
203
|
+
def _download_aria2(self, url, path, item):
|
|
204
|
+
aria2 = dependency_manager.check_dependency("aria2")
|
|
205
|
+
conn = config.get("aria2_max_connections", 16)
|
|
206
|
+
cmd = [
|
|
207
|
+
aria2,
|
|
208
|
+
url,
|
|
209
|
+
"-d", str(path.parent),
|
|
210
|
+
"-o", path.name,
|
|
211
|
+
"-x", str(conn),
|
|
212
|
+
"-s", str(conn),
|
|
213
|
+
"-j", "1",
|
|
214
|
+
"-c",
|
|
215
|
+
"--summary-interval=2",
|
|
216
|
+
"--console-log-level=warn"
|
|
217
|
+
]
|
|
218
|
+
|
|
219
|
+
process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True, encoding='utf-8', errors='replace')
|
|
220
|
+
|
|
221
|
+
while True:
|
|
222
|
+
line = process.stdout.readline()
|
|
223
|
+
if not line and process.poll() is not None:
|
|
224
|
+
break
|
|
225
|
+
if line:
|
|
226
|
+
if "ETA:" in line:
|
|
227
|
+
try:
|
|
228
|
+
parts = line.split("ETA:")
|
|
229
|
+
eta_part = parts[1].split("]")[0]
|
|
230
|
+
item["eta"] = eta_part.strip()
|
|
231
|
+
|
|
232
|
+
match = re.search(r'\((\d+)%\)', line)
|
|
233
|
+
if match:
|
|
234
|
+
item["progress"] = int(match.group(1))
|
|
235
|
+
except:
|
|
236
|
+
pass
|
|
237
|
+
|
|
238
|
+
if process.returncode != 0:
|
|
239
|
+
raise Exception("Aria2 failed")
|
|
240
|
+
|
|
241
|
+
def _download_ytdlp(self, url, path, item):
|
|
242
|
+
ytdlp = dependency_manager.check_dependency("yt-dlp")
|
|
243
|
+
fmt = config.get("ytdlp_format", "best")
|
|
244
|
+
cmd = [
|
|
245
|
+
ytdlp,
|
|
246
|
+
"-f", fmt,
|
|
247
|
+
"-o", str(path),
|
|
248
|
+
"--no-part",
|
|
249
|
+
"--newline",
|
|
250
|
+
url
|
|
251
|
+
]
|
|
252
|
+
process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True, encoding='utf-8', errors='replace')
|
|
253
|
+
while True:
|
|
254
|
+
line = process.stdout.readline()
|
|
255
|
+
if not line and process.poll() is not None:
|
|
256
|
+
break
|
|
257
|
+
if line:
|
|
258
|
+
if "[download]" in line and "%" in line:
|
|
259
|
+
try:
|
|
260
|
+
p_str = line.split("%")[0].split()[-1]
|
|
261
|
+
item["progress"] = float(p_str)
|
|
262
|
+
if "ETA" in line:
|
|
263
|
+
item["eta"] = line.split("ETA")[-1].strip()
|
|
264
|
+
except:
|
|
265
|
+
pass
|
|
266
|
+
if process.returncode != 0:
|
|
267
|
+
raise Exception("yt-dlp failed")
|
|
268
|
+
|
|
269
|
+
def _download_ffmpeg(self, url, path, item):
|
|
270
|
+
item["eta"] = ""
|
|
271
|
+
ffmpeg = dependency_manager.check_dependency("ffmpeg")
|
|
272
|
+
cmd = [
|
|
273
|
+
ffmpeg,
|
|
274
|
+
"-i", url,
|
|
275
|
+
"-c", "copy",
|
|
276
|
+
"-bsf:a", "aac_adtstoasc",
|
|
277
|
+
str(path),
|
|
278
|
+
"-y"
|
|
279
|
+
]
|
|
280
|
+
subprocess.run(cmd, check=True, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
|
|
281
|
+
|
|
282
|
+
def _download_generic(self, url, path, item):
|
|
283
|
+
import requests
|
|
284
|
+
item["eta"] = "..."
|
|
285
|
+
with requests.get(url, stream=True) as r:
|
|
286
|
+
r.raise_for_status()
|
|
287
|
+
total = int(r.headers.get('content-length', 0))
|
|
288
|
+
if total > 0:
|
|
289
|
+
downloaded = 0
|
|
290
|
+
start_time = time.time()
|
|
291
|
+
with open(path, 'wb') as f:
|
|
292
|
+
for chunk in r.iter_content(chunk_size=8192):
|
|
293
|
+
f.write(chunk)
|
|
294
|
+
downloaded += len(chunk)
|
|
295
|
+
item["progress"] = int((downloaded / total) * 100)
|
|
296
|
+
|
|
297
|
+
elapsed = time.time() - start_time
|
|
298
|
+
if elapsed > 0:
|
|
299
|
+
speed = downloaded / elapsed
|
|
300
|
+
remaining = total - downloaded
|
|
301
|
+
eta_s = remaining / speed
|
|
302
|
+
item["eta"] = f"{int(eta_s)}s"
|
|
303
|
+
else:
|
|
304
|
+
with open(path, 'wb') as f:
|
|
305
|
+
for chunk in r.iter_content(chunk_size=8192):
|
|
306
|
+
f.write(chunk)
|
|
307
|
+
|
|
308
|
+
queue_manager = QueueManager()
|