rcdl 3.0.0b18__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of rcdl might be problematic. Click here for more details.
- rcdl/__init__.py +10 -0
- rcdl/__main__.py +37 -0
- rcdl/core/__init__.py +0 -0
- rcdl/core/adapters.py +241 -0
- rcdl/core/api.py +76 -0
- rcdl/core/config.py +212 -0
- rcdl/core/db.py +283 -0
- rcdl/core/db_queries.py +97 -0
- rcdl/core/downloader.py +307 -0
- rcdl/core/downloader_subprocess.py +366 -0
- rcdl/core/file_io.py +41 -0
- rcdl/core/fuse.py +127 -0
- rcdl/core/models.py +105 -0
- rcdl/core/opti.py +90 -0
- rcdl/core/parser.py +282 -0
- rcdl/gui/__init__.py +0 -0
- rcdl/gui/__main__.py +5 -0
- rcdl/gui/db_viewer.py +41 -0
- rcdl/gui/gui.py +54 -0
- rcdl/gui/video_manager.py +170 -0
- rcdl/interface/__init__.py +0 -0
- rcdl/interface/cli.py +216 -0
- rcdl/interface/ui.py +194 -0
- rcdl/utils.py +180 -0
- rcdl-3.0.0b18.dist-info/METADATA +122 -0
- rcdl-3.0.0b18.dist-info/RECORD +28 -0
- rcdl-3.0.0b18.dist-info/WHEEL +4 -0
- rcdl-3.0.0b18.dist-info/entry_points.txt +3 -0
rcdl/__init__.py
ADDED
rcdl/__main__.py
ADDED
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
# __main__.py
|
|
2
|
+
|
|
3
|
+
"""
|
|
4
|
+
__main__: entry in the programm
|
|
5
|
+
Setup logging, create files/folders structures, check dependencies,
|
|
6
|
+
init Config global variables, init database,
|
|
7
|
+
Then call cli group
|
|
8
|
+
"""
|
|
9
|
+
|
|
10
|
+
import logging
|
|
11
|
+
|
|
12
|
+
from rcdl.core.config import Config, setup_logging, check_dependencies
|
|
13
|
+
|
|
14
|
+
# setup file structure
|
|
15
|
+
Config.ensure_dirs()
|
|
16
|
+
Config.ensure_files()
|
|
17
|
+
|
|
18
|
+
# load config file settings
|
|
19
|
+
Config.load_config()
|
|
20
|
+
|
|
21
|
+
# setup logging
|
|
22
|
+
setup_logging(Config.LOG_FILE, level=0)
|
|
23
|
+
|
|
24
|
+
# check dependencies
|
|
25
|
+
check_dependencies()
|
|
26
|
+
|
|
27
|
+
logging.info("--- INIT ---")
|
|
28
|
+
logging.info("Logger initialized")
|
|
29
|
+
|
|
30
|
+
# init database
|
|
31
|
+
from rcdl.core.db import DB # noqa: E402
|
|
32
|
+
|
|
33
|
+
db = DB()
|
|
34
|
+
db.init_database()
|
|
35
|
+
db.close()
|
|
36
|
+
|
|
37
|
+
from rcdl.interface.cli import cli # noqa: E402, F401
|
rcdl/core/__init__.py
ADDED
|
File without changes
|
rcdl/core/adapters.py
ADDED
|
@@ -0,0 +1,241 @@
|
|
|
1
|
+
# core/adapters.py
|
|
2
|
+
|
|
3
|
+
"""Convert [Any] into proper Models from models.py"""
|
|
4
|
+
|
|
5
|
+
import json
|
|
6
|
+
import sqlite3
|
|
7
|
+
from dataclasses import fields
|
|
8
|
+
|
|
9
|
+
from rcdl.core import parser
|
|
10
|
+
from rcdl.interface.ui import UI
|
|
11
|
+
from rcdl.core.models import Post, Media, Status, FusedMedia
|
|
12
|
+
from rcdl.utils import get_date_now, get_json_hash
|
|
13
|
+
|
|
14
|
+
VALID_POST_KEYS = set(
|
|
15
|
+
[
|
|
16
|
+
"id",
|
|
17
|
+
"user",
|
|
18
|
+
"service",
|
|
19
|
+
"title",
|
|
20
|
+
"substring",
|
|
21
|
+
"published",
|
|
22
|
+
"file",
|
|
23
|
+
"attachments",
|
|
24
|
+
]
|
|
25
|
+
)
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def _compute_json_metadata(raw: dict) -> tuple[str, str, str]:
|
|
29
|
+
"""From a json dict, return:
|
|
30
|
+
- raw_json: str
|
|
31
|
+
- json_hash: str
|
|
32
|
+
- fetched_at str (datetime)
|
|
33
|
+
"""
|
|
34
|
+
raw_json, json_hash = get_json_hash(raw)
|
|
35
|
+
fetched_at = get_date_now()
|
|
36
|
+
return raw_json, json_hash, fetched_at
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
def json_posts_to_posts(posts: list[dict]) -> list[Post]:
|
|
40
|
+
"""Convert a list of json post (dict) into a list of Post model
|
|
41
|
+
Ignore if conversion failed"""
|
|
42
|
+
formatted_posts = []
|
|
43
|
+
for post in posts:
|
|
44
|
+
p = json_post_to_post(post)
|
|
45
|
+
if p is not None:
|
|
46
|
+
formatted_posts.append(p)
|
|
47
|
+
return formatted_posts
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
def json_post_to_post(post: dict) -> Post | None:
|
|
51
|
+
"""Convert a json post (dict) into a Post model
|
|
52
|
+
or return None if covnersion failed"""
|
|
53
|
+
post_keys = set(post)
|
|
54
|
+
if post_keys != VALID_POST_KEYS:
|
|
55
|
+
UI.error(
|
|
56
|
+
f"Post id {post.get('id')} of {post.get('user')} "
|
|
57
|
+
f"has invalid schema. "
|
|
58
|
+
f"Missing: {VALID_POST_KEYS - post_keys}, "
|
|
59
|
+
f"Extra: {post_keys - VALID_POST_KEYS}"
|
|
60
|
+
)
|
|
61
|
+
return None
|
|
62
|
+
|
|
63
|
+
try:
|
|
64
|
+
domain = parser.get_domain(post["service"])
|
|
65
|
+
raw_json, json_hash, fetched_at = _compute_json_metadata(post)
|
|
66
|
+
return Post(
|
|
67
|
+
**post,
|
|
68
|
+
domain=domain,
|
|
69
|
+
json_hash=json_hash,
|
|
70
|
+
raw_json=raw_json,
|
|
71
|
+
fetched_at=fetched_at,
|
|
72
|
+
)
|
|
73
|
+
except TypeError as e:
|
|
74
|
+
UI.error(
|
|
75
|
+
f"Post id {post.get('id')} from {post.get('user')} could not be parsed: {e}"
|
|
76
|
+
)
|
|
77
|
+
return None
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
def row_to_post(row: sqlite3.Row) -> Post | None:
|
|
81
|
+
"""Convert a sqlite3 row into a Post model.
|
|
82
|
+
Return None if conversion failed"""
|
|
83
|
+
try:
|
|
84
|
+
raw = json.loads(row["raw_json"])
|
|
85
|
+
return Post(
|
|
86
|
+
id=row["id"],
|
|
87
|
+
user=row["user"],
|
|
88
|
+
service=row["service"],
|
|
89
|
+
domain=row["domain"],
|
|
90
|
+
published=row["published"],
|
|
91
|
+
json_hash=row["json_hash"],
|
|
92
|
+
raw_json=row["raw_json"],
|
|
93
|
+
fetched_at=row["fetched_at"],
|
|
94
|
+
title=raw["title"],
|
|
95
|
+
substring=raw["substring"],
|
|
96
|
+
file=raw["file"],
|
|
97
|
+
attachments=raw["attachments"],
|
|
98
|
+
)
|
|
99
|
+
except KeyError as e:
|
|
100
|
+
UI.error(
|
|
101
|
+
f"KeyError: Failed to convert {row['id']} (row_id) into Post model due to: {e}"
|
|
102
|
+
)
|
|
103
|
+
return None
|
|
104
|
+
except TypeError as e:
|
|
105
|
+
UI.error(
|
|
106
|
+
f"TypeError: Failed to convert {row['id']} (row_id) into Post model due to: {e}"
|
|
107
|
+
)
|
|
108
|
+
return None
|
|
109
|
+
except ValueError as e:
|
|
110
|
+
UI.error(
|
|
111
|
+
f"ValueError/JSONDecodeError: Failed to convert "
|
|
112
|
+
f"{row['id']} (row_id) into Post model due to: {e}"
|
|
113
|
+
)
|
|
114
|
+
return None
|
|
115
|
+
|
|
116
|
+
|
|
117
|
+
def rows_to_posts(rows: list[sqlite3.Row]) -> list[Post]:
|
|
118
|
+
"""Convert a list of sqlite3 rows. Return a list of Post model.
|
|
119
|
+
Ignore the row if conversion fail"""
|
|
120
|
+
posts: list[Post] = []
|
|
121
|
+
for row in rows:
|
|
122
|
+
post = row_to_post(row)
|
|
123
|
+
if post is not None:
|
|
124
|
+
posts.append(post)
|
|
125
|
+
|
|
126
|
+
if len(posts) != len(rows):
|
|
127
|
+
UI.error(
|
|
128
|
+
f"From {len(rows)} rows, only converted {len(posts)}."
|
|
129
|
+
f" {len(rows) - len(posts)} error."
|
|
130
|
+
)
|
|
131
|
+
|
|
132
|
+
return posts
|
|
133
|
+
|
|
134
|
+
|
|
135
|
+
def row_to_media(row: sqlite3.Row) -> Media | None:
|
|
136
|
+
"""Convert a sqlite3 row into a Media model.
|
|
137
|
+
Return None if conversion failed"""
|
|
138
|
+
try:
|
|
139
|
+
# create a dict to hold column of row that are present in Media.
|
|
140
|
+
# Ignore column (like default autoincrement ID) that are not a field in Media
|
|
141
|
+
media_data = {}
|
|
142
|
+
for field in fields(Media):
|
|
143
|
+
field_name = field.name
|
|
144
|
+
if field_name in row.keys():
|
|
145
|
+
value = row[field_name]
|
|
146
|
+
if field_name == "status" and value is not None:
|
|
147
|
+
value = Status(value)
|
|
148
|
+
media_data[field_name] = value
|
|
149
|
+
return Media(**media_data)
|
|
150
|
+
except (KeyError, TypeError, ValueError) as e:
|
|
151
|
+
UI.error(
|
|
152
|
+
f"Key/Type/Value Error: Failed to convert row {row['id']} into Post model due to {e}"
|
|
153
|
+
)
|
|
154
|
+
return None
|
|
155
|
+
|
|
156
|
+
|
|
157
|
+
def rows_to_medias(rows: list[sqlite3.Row]) -> list[Media]:
|
|
158
|
+
"""Convert a list of sqlite3 rows. Return a list of Media model.
|
|
159
|
+
Ignore row if conversion failed"""
|
|
160
|
+
medias: list[Media] = []
|
|
161
|
+
for row in rows:
|
|
162
|
+
media = row_to_media(row)
|
|
163
|
+
if media is not None:
|
|
164
|
+
medias.append(media)
|
|
165
|
+
|
|
166
|
+
if len(medias) != len(rows):
|
|
167
|
+
UI.error(
|
|
168
|
+
f"From {len(rows)} rows, only converted {len(medias)}."
|
|
169
|
+
f" {len(rows) - len(medias)} error."
|
|
170
|
+
)
|
|
171
|
+
|
|
172
|
+
return medias
|
|
173
|
+
|
|
174
|
+
|
|
175
|
+
def row_to_fused_media(row: sqlite3.Row) -> FusedMedia | None:
|
|
176
|
+
"""Convert a sqlite3 row into a FusedMedia model.
|
|
177
|
+
Return None if conversion fail"""
|
|
178
|
+
if row is None:
|
|
179
|
+
return None
|
|
180
|
+
try:
|
|
181
|
+
fuses_data = {}
|
|
182
|
+
for field in fields(FusedMedia):
|
|
183
|
+
field_name = field.name
|
|
184
|
+
if field_name in row.keys():
|
|
185
|
+
value = row[field_name]
|
|
186
|
+
if field_name == "status" and value is not None:
|
|
187
|
+
value = Status(value)
|
|
188
|
+
fuses_data[field_name] = value
|
|
189
|
+
return FusedMedia(**fuses_data)
|
|
190
|
+
except (KeyError, TypeError, ValueError) as e:
|
|
191
|
+
UI.error(
|
|
192
|
+
f"Key/Type/Value Error: Failed to convert row "
|
|
193
|
+
f"{row['id']} into FusedMedia model due to {e}"
|
|
194
|
+
)
|
|
195
|
+
return None
|
|
196
|
+
|
|
197
|
+
|
|
198
|
+
def rows_to_fuses(rows: list[sqlite3.Row]) -> list[FusedMedia]:
|
|
199
|
+
"""Convert a lsit of sqlite3 rows into a list of FusedMedia model
|
|
200
|
+
Ignore row if conversion failed"""
|
|
201
|
+
fuses: list[FusedMedia] = []
|
|
202
|
+
for row in rows:
|
|
203
|
+
fuse = row_to_fused_media(row)
|
|
204
|
+
if fuse is not None:
|
|
205
|
+
fuses.append(fuse)
|
|
206
|
+
|
|
207
|
+
if len(fuses) != len(rows):
|
|
208
|
+
UI.error(
|
|
209
|
+
f"From {len(rows)} rows, only converted {len(fuses)}."
|
|
210
|
+
f" {len(rows) - len(fuses)} error."
|
|
211
|
+
)
|
|
212
|
+
|
|
213
|
+
return fuses
|
|
214
|
+
|
|
215
|
+
|
|
216
|
+
def post_to_videos(post: Post) -> list[Media]:
|
|
217
|
+
"""Extract a list of Media model from a Post model"""
|
|
218
|
+
json_post = json.loads(post.raw_json)
|
|
219
|
+
|
|
220
|
+
urls = parser.extract_video_urls(json_post)
|
|
221
|
+
sequence = 0
|
|
222
|
+
medias: list[Media] = []
|
|
223
|
+
for url in urls:
|
|
224
|
+
medias.append(
|
|
225
|
+
Media(
|
|
226
|
+
post_id=post.id,
|
|
227
|
+
service=post.service,
|
|
228
|
+
url=url,
|
|
229
|
+
duration=0.0,
|
|
230
|
+
sequence=sequence,
|
|
231
|
+
status=Status.PENDING,
|
|
232
|
+
checksum="",
|
|
233
|
+
file_path=parser.get_filename(json_post, url),
|
|
234
|
+
created_at="",
|
|
235
|
+
updated_at="",
|
|
236
|
+
file_size=0,
|
|
237
|
+
fail_count=0,
|
|
238
|
+
)
|
|
239
|
+
)
|
|
240
|
+
sequence += 1
|
|
241
|
+
return medias
|
rcdl/core/api.py
ADDED
|
@@ -0,0 +1,76 @@
|
|
|
1
|
+
# core/api.py
|
|
2
|
+
|
|
3
|
+
"""Build real URL for api request"""
|
|
4
|
+
|
|
5
|
+
from rcdl.core.models import Creator
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class URL:
|
|
9
|
+
"""Build real URL for api request"""
|
|
10
|
+
|
|
11
|
+
DOMAINS_BASE_URL = {
|
|
12
|
+
"coomer": "https://coomer.st/api/v1/",
|
|
13
|
+
"kemono": "https://kemono.cr/api/v1/",
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
@staticmethod
|
|
17
|
+
def get_base_url(domain: str) -> str:
|
|
18
|
+
"""Return https://domain.com"""
|
|
19
|
+
if domain not in URL.DOMAINS_BASE_URL:
|
|
20
|
+
raise KeyError(f"{domain} not in known domains urls")
|
|
21
|
+
return URL.DOMAINS_BASE_URL[domain]
|
|
22
|
+
|
|
23
|
+
@staticmethod
|
|
24
|
+
def get_post_revision(creator: Creator, post_id) -> str:
|
|
25
|
+
"""Return post revision url"""
|
|
26
|
+
return (
|
|
27
|
+
f"{URL.get_base_url(creator.domain)}{creator.service}"
|
|
28
|
+
f"/user/{creator.id}/post/{post_id}/revisions"
|
|
29
|
+
)
|
|
30
|
+
|
|
31
|
+
@staticmethod
|
|
32
|
+
def get_headers() -> dict:
|
|
33
|
+
"""Return necessary request header for successful request"""
|
|
34
|
+
return {
|
|
35
|
+
"User-Agent": (
|
|
36
|
+
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) "
|
|
37
|
+
"AppleWebKit/537.36 (KHTML, like Gecko) "
|
|
38
|
+
"Chrome/117.0 Safari/537.36"
|
|
39
|
+
),
|
|
40
|
+
"Accept": "text/css",
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
@staticmethod
|
|
44
|
+
def get_url_from_file(domain: str, path_url: str):
|
|
45
|
+
"""Add path_url to based domain url"""
|
|
46
|
+
if domain == "coomer":
|
|
47
|
+
return f"https://coomer.st{path_url}"
|
|
48
|
+
if domain == "kemono":
|
|
49
|
+
return f"https://kemono.cr{path_url}"
|
|
50
|
+
|
|
51
|
+
raise ValueError(
|
|
52
|
+
f"Domain {domain} is not an accepted value/does not exist. "
|
|
53
|
+
f"Please check your creators.json file"
|
|
54
|
+
)
|
|
55
|
+
|
|
56
|
+
@staticmethod
|
|
57
|
+
def add_params(url: str, params: dict):
|
|
58
|
+
"""Create all parameters string (key=params&key=...)"""
|
|
59
|
+
url += "?"
|
|
60
|
+
for key in params:
|
|
61
|
+
url += f"{key}={params[key]}&"
|
|
62
|
+
return url[:-1]
|
|
63
|
+
|
|
64
|
+
@staticmethod
|
|
65
|
+
def get_creator_post_wo_param(creator: Creator) -> str:
|
|
66
|
+
"""Get creator post without parameters"""
|
|
67
|
+
return (
|
|
68
|
+
f"{URL.get_base_url(creator.domain)}{creator.service}"
|
|
69
|
+
f"/user/{creator.id}/posts"
|
|
70
|
+
)
|
|
71
|
+
|
|
72
|
+
@staticmethod
|
|
73
|
+
def get_posts_page_url_wo_param():
|
|
74
|
+
"""Get posts page without parameters -> use in tag search"""
|
|
75
|
+
domain = URL.DOMAINS_BASE_URL["coomer"]
|
|
76
|
+
return f"{domain}posts"
|
rcdl/core/config.py
ADDED
|
@@ -0,0 +1,212 @@
|
|
|
1
|
+
# core/config.py
|
|
2
|
+
|
|
3
|
+
"""
|
|
4
|
+
Handle Config init, dependencies check, logging setup,
|
|
5
|
+
files and folders structures, config settings parameters init
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
import logging
|
|
10
|
+
import os
|
|
11
|
+
import tomllib
|
|
12
|
+
import subprocess
|
|
13
|
+
|
|
14
|
+
from rcdl.core.file_io import write_txt
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class Config:
|
|
18
|
+
"""Global app var/parameters"""
|
|
19
|
+
|
|
20
|
+
# paths
|
|
21
|
+
APP_NAME = "rcdl"
|
|
22
|
+
|
|
23
|
+
BASE_DIR = Path(os.environ.get("RCDL_BASE_DIR", Path.home() / "Videos/rcdl"))
|
|
24
|
+
|
|
25
|
+
CACHE_DIR = BASE_DIR / ".cache"
|
|
26
|
+
DB_PATH = CACHE_DIR / "cdl.db"
|
|
27
|
+
LOG_FILE = CACHE_DIR / "cdl.log"
|
|
28
|
+
CREATORS_FILE = CACHE_DIR / "creators.txt"
|
|
29
|
+
DISCOVER_DIR = CACHE_DIR / "discover"
|
|
30
|
+
CONFIG_FILE = CACHE_DIR / "config.toml"
|
|
31
|
+
|
|
32
|
+
DEBUG = False
|
|
33
|
+
DRY_RUN = False
|
|
34
|
+
|
|
35
|
+
# api settings
|
|
36
|
+
POST_PER_PAGE: int = 50
|
|
37
|
+
DEFAULT_MAX_PAGE: int = 10
|
|
38
|
+
MAX_FAIL_COUNT: int = 7
|
|
39
|
+
TIMEOUT: int = 10
|
|
40
|
+
|
|
41
|
+
# fuse settings
|
|
42
|
+
MAX_WIDTH: int = 1920
|
|
43
|
+
MAX_HEIGHT: int = 1080
|
|
44
|
+
FPS: int = 30
|
|
45
|
+
PRESET: str = "veryfast"
|
|
46
|
+
THREADS: int = 0
|
|
47
|
+
|
|
48
|
+
HANDBRAKE_RUN_CMD = "HandBrakeCLI"
|
|
49
|
+
|
|
50
|
+
CHECKSUM_RETRY = 2
|
|
51
|
+
|
|
52
|
+
@classmethod
|
|
53
|
+
def ensure_dirs(cls):
|
|
54
|
+
"""Ensure directory exist"""
|
|
55
|
+
cls.CACHE_DIR.mkdir(parents=True, exist_ok=True)
|
|
56
|
+
cls.DISCOVER_DIR.mkdir(exist_ok=True)
|
|
57
|
+
|
|
58
|
+
@classmethod
|
|
59
|
+
def ensure_files(cls):
|
|
60
|
+
"""Ensure file exist, populate default if necessary"""
|
|
61
|
+
files = [cls.DB_PATH, cls.CREATORS_FILE, cls.CONFIG_FILE]
|
|
62
|
+
for file in files:
|
|
63
|
+
if not file.exists():
|
|
64
|
+
file.touch()
|
|
65
|
+
logging.info("Created file %s", file)
|
|
66
|
+
if file == cls.CREATORS_FILE:
|
|
67
|
+
write_txt(cls.CREATORS_FILE, DEFAULT_CREATORS, mode="w")
|
|
68
|
+
if file == cls.CONFIG_FILE:
|
|
69
|
+
write_txt(cls.CONFIG_FILE, DEFAULT_CONFIG, mode="w")
|
|
70
|
+
|
|
71
|
+
@classmethod
|
|
72
|
+
def creator_folder(cls, creator_id: str) -> Path:
|
|
73
|
+
"""Return creator folder path base on user/creator_id"""
|
|
74
|
+
folder = cls.BASE_DIR / creator_id
|
|
75
|
+
folder.mkdir(exist_ok=True)
|
|
76
|
+
return folder
|
|
77
|
+
|
|
78
|
+
@classmethod
|
|
79
|
+
def cache_file(cls, filename: str, ext: str = ".json") -> Path:
|
|
80
|
+
"""Return filepath of a file in the .cache/ folder"""
|
|
81
|
+
file_name = filename + ext
|
|
82
|
+
file = cls.CACHE_DIR / file_name
|
|
83
|
+
return file
|
|
84
|
+
|
|
85
|
+
@classmethod
|
|
86
|
+
def set_debug(cls, debug: bool):
|
|
87
|
+
"""Set class variable DEBUG"""
|
|
88
|
+
cls.DEBUG = debug
|
|
89
|
+
|
|
90
|
+
@classmethod
|
|
91
|
+
def set_dry_run(cls, dry_run: bool):
|
|
92
|
+
"""Set class variable DRY_RUN"""
|
|
93
|
+
cls.DRY_RUN = dry_run
|
|
94
|
+
|
|
95
|
+
@classmethod
|
|
96
|
+
def load_config(cls):
|
|
97
|
+
"""Load config.toml and set class var with value in config.toml"""
|
|
98
|
+
with open(cls.CONFIG_FILE, "rb") as f:
|
|
99
|
+
data = tomllib.load(f)
|
|
100
|
+
app = data.get("app", {})
|
|
101
|
+
cls.DEFAULT_MAX_PAGE = app.get("default_max_page", cls.DEFAULT_MAX_PAGE)
|
|
102
|
+
cls.MAX_FAIL_COUNT = app.get("max_fail_count", cls.MAX_FAIL_COUNT)
|
|
103
|
+
cls.TIMEOUT = app.get("timeout", cls.TIMEOUT)
|
|
104
|
+
cls.CHECKSUM_RETRY = app.get("checksum_retry", cls.CHECKSUM_RETRY)
|
|
105
|
+
|
|
106
|
+
video = data.get("video", {})
|
|
107
|
+
cls.MAX_WIDTH = video.get("max_width", cls.MAX_WIDTH)
|
|
108
|
+
cls.MAX_HEIGHT = video.get("max_height", cls.MAX_HEIGHT)
|
|
109
|
+
cls.FPS = video.get("fps", cls.FPS)
|
|
110
|
+
cls.PRESET = video.get("preset", cls.PRESET)
|
|
111
|
+
cls.THREADS = video.get("threads", cls.THREADS)
|
|
112
|
+
|
|
113
|
+
paths = data.get("paths", {})
|
|
114
|
+
if "base_dir" in paths:
|
|
115
|
+
cls.BASE_DIR = Path(
|
|
116
|
+
os.environ.get("RCDL_BASE_DIR", os.path.expanduser(paths["base_dir"]))
|
|
117
|
+
)
|
|
118
|
+
cls.CACHE_DIR = cls.BASE_DIR / ".cache"
|
|
119
|
+
if "handbrake_run_cmd" in paths:
|
|
120
|
+
cls.HANDBRAKE_RUN_CMD = paths.get("handbrake_run_cmd")
|
|
121
|
+
|
|
122
|
+
|
|
123
|
+
def setup_logging(log_file: Path, level: int = 0):
|
|
124
|
+
"""Setup logging for rcdl"""
|
|
125
|
+
logger = logging.getLogger()
|
|
126
|
+
logger.setLevel(level)
|
|
127
|
+
logger.handlers.clear() # avoid double handlers if called multiple times
|
|
128
|
+
|
|
129
|
+
# loggin format & file handler
|
|
130
|
+
file_handler = logging.FileHandler(log_file, encoding="utf-8", mode="a")
|
|
131
|
+
file_handler.setFormatter(
|
|
132
|
+
logging.Formatter(
|
|
133
|
+
"{asctime} - {levelname} - {message}",
|
|
134
|
+
style="{",
|
|
135
|
+
datefmt="%Y-%m-%d %H:%M:%S",
|
|
136
|
+
)
|
|
137
|
+
)
|
|
138
|
+
logger.addHandler(file_handler)
|
|
139
|
+
|
|
140
|
+
# log library warning/errors
|
|
141
|
+
stream = logging.StreamHandler()
|
|
142
|
+
stream.setLevel(logging.ERROR) # only show warnings/errors from libraries
|
|
143
|
+
logger.addHandler(stream)
|
|
144
|
+
|
|
145
|
+
|
|
146
|
+
def check_dependencies():
|
|
147
|
+
"""Check external program version against last tested working version"""
|
|
148
|
+
for prgrm, info in DEPENDENCIES_TEST_VERSION.items():
|
|
149
|
+
try:
|
|
150
|
+
result = subprocess.run(
|
|
151
|
+
info["cmd"],
|
|
152
|
+
capture_output=True,
|
|
153
|
+
text=True,
|
|
154
|
+
shell=True,
|
|
155
|
+
check=False,
|
|
156
|
+
)
|
|
157
|
+
version = result.stdout.strip()
|
|
158
|
+
|
|
159
|
+
if version != info["version"]:
|
|
160
|
+
print(
|
|
161
|
+
f"Last tested version for {prgrm}:"
|
|
162
|
+
f" {info['version']} -> yours: {version}"
|
|
163
|
+
)
|
|
164
|
+
if version == "":
|
|
165
|
+
print(f"{prgrm} is not installed.")
|
|
166
|
+
print(f"Check {prgrm} is installed if your version is empty.")
|
|
167
|
+
except (OSError, subprocess.SubprocessError) as e:
|
|
168
|
+
print(
|
|
169
|
+
f"Failed to check {prgrm} version due to: {e}\nCheck {prgrm} is installed."
|
|
170
|
+
)
|
|
171
|
+
|
|
172
|
+
|
|
173
|
+
DEPENDENCIES_TEST_VERSION = {
|
|
174
|
+
"yt-dlp": {"cmd": "yt-dlp --version", "version": "2025.12.08"},
|
|
175
|
+
"aria2c": {
|
|
176
|
+
"cmd": "aria2c -v | head -n 1",
|
|
177
|
+
"version": "aria2 version 1.37.0",
|
|
178
|
+
},
|
|
179
|
+
"ffmpeg": {
|
|
180
|
+
"cmd": 'ffmpeg -version | sed -n "s/ffmpeg version \\([-0-9.]*\\).*/\\1/p;"',
|
|
181
|
+
"version": "7.1.1-1",
|
|
182
|
+
},
|
|
183
|
+
"handbrake": {
|
|
184
|
+
"cmd": Config.HANDBRAKE_RUN_CMD
|
|
185
|
+
+ ' --version 2>&1 | sed -n "s/HandBrake \\([0-9.]*\\).*/\\1/p"',
|
|
186
|
+
"version": "1.9.2",
|
|
187
|
+
},
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
|
|
191
|
+
# default creators
|
|
192
|
+
DEFAULT_CREATORS = ["boixd/onlyfans"]
|
|
193
|
+
|
|
194
|
+
# default config params
|
|
195
|
+
DEFAULT_CONFIG: str = """\
|
|
196
|
+
[app]
|
|
197
|
+
default_max_page = 10
|
|
198
|
+
max_fail_count = 7
|
|
199
|
+
timeout = 10
|
|
200
|
+
checksum_retry = 2
|
|
201
|
+
|
|
202
|
+
[fuse]
|
|
203
|
+
max_width = 1920
|
|
204
|
+
max_height = 1080
|
|
205
|
+
fps = 30
|
|
206
|
+
preset = "veryfast"
|
|
207
|
+
threads = 0
|
|
208
|
+
|
|
209
|
+
[paths]
|
|
210
|
+
base_dir = "~/Videos/rcdl"
|
|
211
|
+
handbrake_run_cmd = "HandBrakeCLI"
|
|
212
|
+
"""
|