rcdl 3.0.0b18__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of rcdl might be problematic. Click here for more details.

rcdl/core/db.py ADDED
@@ -0,0 +1,283 @@
1
+ # core/db.py
2
+
3
+ """
4
+ Handle SQL Database
5
+ """
6
+
7
+ import sqlite3
8
+
9
+ from rcdl.core import adapters
10
+ from rcdl.core import db_queries as queries
11
+ from rcdl.core.config import Config
12
+ from rcdl.core.models import Post, Media, Status, FusedMedia, FusedStatus
13
+ from rcdl.utils import get_date_now
14
+
15
+ from rcdl.interface.ui import UI
16
+
17
+
18
+ class DB:
19
+ """Handle all sqlite database command"""
20
+
21
+ def __init__(self):
22
+ self.conn = sqlite3.connect(Config.DB_PATH)
23
+ self.conn.row_factory = sqlite3.Row
24
+
25
+ def __enter__(self):
26
+ """necessary to use with openDB()"""
27
+ return self
28
+
29
+ def __exit__(self, exc_type, exc_value, traceback):
30
+ """necessary to use with openDB()"""
31
+ self.close()
32
+
33
+ def close(self):
34
+ """Properly close database"""
35
+ self.conn.close()
36
+
37
+ def init_database(self):
38
+ """Create tables (posts, fuses, medias) if they dont exist"""
39
+ self.conn.execute(queries.CREATE_POSTS_TABLE)
40
+ self.conn.execute(queries.CREATE_MEDIAS_TABLE)
41
+ self.conn.execute(queries.CREATE_FUSE_TABLE)
42
+
43
+ self.conn.commit()
44
+
45
+ def get_nb_per_status(self):
46
+ """Return an info dict per tables with number of entry per status
47
+ info['tables1']['status1'] = X
48
+ ...
49
+ """
50
+ info = {}
51
+ info["medias"] = {}
52
+ info["fuses"] = {}
53
+ info["posts"] = 0
54
+ for status in Status:
55
+ info["medias"][status] = len(self.query_media_by_status(status))
56
+ for status in FusedStatus:
57
+ info["fuses"][status] = len(self.query_fuses_by_status(status))
58
+
59
+ cur = self.conn.execute(("SELECT COUNT(*) AS count FROM posts"))
60
+ row = cur.fetchone()
61
+ info["posts"] = row["count"] if row else 0
62
+ return info
63
+
64
+ def query_post_by_id(self, _id: str) -> Post | None:
65
+ """Get a post from his post id"""
66
+ row = self.conn.execute(queries.QUERY_POST_ID, (_id,)).fetchone()
67
+
68
+ UI.debug(f"{queries.QUERY_POST_ID} {_id} returned {row}")
69
+
70
+ if row is None:
71
+ return None
72
+
73
+ return adapters.row_to_post(row)
74
+
75
+ def query_post_by_user(self, user: str) -> list[Post]:
76
+ """Get all posts of a user"""
77
+ cur = self.conn.cursor()
78
+ cur.execute(queries.QUERY_POST_USER, (user,))
79
+ rows = cur.fetchall()
80
+
81
+ UI.debug(f"{queries.QUERY_POST_USER} {user} returned {len(rows)} results")
82
+
83
+ return adapters.rows_to_posts(rows)
84
+
85
+ def query_media_by_status(self, status: Status) -> list[Media]:
86
+ """Get all medias with specified status"""
87
+ rows = self.conn.execute(queries.QUERY_MEDIA_STATUS, (status.value,)).fetchall()
88
+ UI.debug(
89
+ f"{queries.QUERY_MEDIA_STATUS} {status.value} returned {len(rows)} result"
90
+ )
91
+
92
+ return adapters.rows_to_medias(rows)
93
+
94
+ def query_medias_by_status_sorted(
95
+ self,
96
+ status: Status | list[Status],
97
+ sort_by: str | None = None,
98
+ ascending: bool = True,
99
+ ) -> list[Media]:
100
+ """Get all medias with specified status (one or multiple)
101
+ Return them sorted by column and asc or desc"""
102
+
103
+ # validate sort column
104
+ valid_columns = {
105
+ "id",
106
+ "post_id",
107
+ "service",
108
+ "url",
109
+ "duration",
110
+ "sequence",
111
+ "status",
112
+ "checksum",
113
+ "file_path",
114
+ "created_at",
115
+ "updated_at",
116
+ "file_size",
117
+ "fail_count",
118
+ }
119
+ order_clause = ""
120
+ if sort_by:
121
+ if sort_by not in valid_columns:
122
+ UI.error(f"Invalid sort column: {sort_by}")
123
+ order_clause = f"ORDER BY {sort_by} {'ASC' if ascending else 'DESC'}"
124
+
125
+ # status filter
126
+ if isinstance(status, Status):
127
+ status = [status]
128
+
129
+ status_values = [s.value if isinstance(s, Status) else s for s in status]
130
+ placeholders = ", ".join("?" for _ in status_values)
131
+
132
+ sql = f"SELECT * FROM medias WHERE status IN ({placeholders}) {order_clause}"
133
+ rows = self.conn.execute(sql, status_values).fetchall()
134
+
135
+ UI.debug(
136
+ f"Queried medias with status={status_values}, sorted by {sort_by}, ascending={ascending}, {len(rows)} results"
137
+ )
138
+
139
+ return adapters.rows_to_medias(rows)
140
+
141
+ def query_media_by_post_id(self, _id: str) -> list[Media]:
142
+ """Get all medias from the same post by post id"""
143
+ rows = self.conn.execute(queries.QUERY_MEDIA_ID, (_id,)).fetchall()
144
+ UI.debug(f"{queries.QUERY_MEDIA_ID} {_id} returned {len(rows)} result")
145
+ return adapters.rows_to_medias(rows)
146
+
147
+ def query_fuses_by_status(self, status: FusedStatus) -> list[FusedMedia]:
148
+ """Get all fused_media with specified status"""
149
+ rows = self.conn.execute(queries.QUERY_FUSES_STATUS, (status.value,)).fetchall()
150
+ UI.debug(
151
+ f"{queries.QUERY_FUSES_STATUS} {status.value} returned {len(rows)} result"
152
+ )
153
+
154
+ return adapters.rows_to_fuses(rows)
155
+
156
+ def query_fuses_by_id(self, _id: str) -> FusedMedia | None:
157
+ """Get a fuse group by its unique post id"""
158
+ row = self.conn.execute(queries.QUERY_FUSES_ID, (_id,)).fetchone()
159
+ UI.debug(f"{queries.QUERY_FUSES_ID} {_id} returned {row} result")
160
+ return adapters.row_to_fused_media(row)
161
+
162
+ def insert_posts(self, posts: list[Post] | Post):
163
+ """Add post to DB if it does not already exist (UNIQUE post_id)"""
164
+ if isinstance(posts, Post):
165
+ posts = [posts]
166
+
167
+ values = []
168
+ for post in posts:
169
+ values.append(
170
+ (
171
+ post.id,
172
+ post.user,
173
+ post.service,
174
+ post.domain,
175
+ post.published,
176
+ post.json_hash,
177
+ post.raw_json,
178
+ post.fetched_at,
179
+ )
180
+ )
181
+
182
+ with self.conn:
183
+ self.conn.executemany(queries.INSERT_POST, values)
184
+
185
+ inserted = self.conn.total_changes
186
+ UI.debug(f"Inserted {inserted} new posts out of {len(posts)} total posts")
187
+
188
+ def insert_medias(self, medias: list[Media] | Media):
189
+ """Insert media into the db if it does not already exist (UNIQUE post_id, url)"""
190
+ if isinstance(medias, Media):
191
+ medias = [medias]
192
+
193
+ values = []
194
+ for media in medias:
195
+ values.append(
196
+ (
197
+ media.post_id,
198
+ media.service,
199
+ media.url,
200
+ media.duration,
201
+ media.sequence,
202
+ media.status.value,
203
+ media.checksum,
204
+ media.file_path,
205
+ media.created_at,
206
+ get_date_now(),
207
+ media.file_size,
208
+ media.fail_count,
209
+ )
210
+ )
211
+
212
+ with self.conn:
213
+ self.conn.executemany(queries.INSERT_MEDIA, values)
214
+
215
+ inserted = self.conn.total_changes
216
+ UI.debug(f"Inserted {inserted} new media out of {len(medias)} total medias")
217
+
218
+ def update_media(self, media: Media):
219
+ """Update media entry in the db. Found it by post_id & url, and update:
220
+ - duration, file_size, checksum, status, create_at, updated_at, fail_count"""
221
+ params = (
222
+ media.duration,
223
+ media.file_size,
224
+ media.checksum,
225
+ media.status.value,
226
+ media.created_at,
227
+ get_date_now(),
228
+ media.fail_count,
229
+ media.post_id,
230
+ media.url,
231
+ )
232
+ with self.conn:
233
+ self.conn.execute(queries.UPDATE_MEDIA, params)
234
+ UI.debug(f"Updated media {media.post_id} / {media.url}")
235
+
236
+ def insert_fused_media(self, fuses: list[FusedMedia] | FusedMedia):
237
+ """Insert fused_media into the db if it does not already exist (UNIQUE post_id)"""
238
+ if isinstance(fuses, FusedMedia):
239
+ fuses = [fuses]
240
+
241
+ values = []
242
+ for fuse in fuses:
243
+ values.append(
244
+ (
245
+ fuse.id,
246
+ fuse.duration,
247
+ fuse.total_parts,
248
+ fuse.status.value,
249
+ fuse.checksum,
250
+ fuse.file_path,
251
+ fuse.created_at,
252
+ get_date_now(),
253
+ fuse.file_size,
254
+ fuse.fail_count,
255
+ )
256
+ )
257
+
258
+ with self.conn:
259
+ self.conn.executemany(queries.INSERT_FUSED_MEDIA, values)
260
+
261
+ inserted = self.conn.total_changes
262
+ UI.debug(
263
+ f"Inserted {inserted} new fused_media out of {len(fuses)} total fused_media"
264
+ )
265
+
266
+ def update_fuse(self, fuse: FusedMedia):
267
+ """Update fuse group: duration, status, checksum,
268
+ created_at, updated_at, file_size, fail_count
269
+
270
+ """
271
+ params = (
272
+ fuse.duration,
273
+ fuse.status.value,
274
+ fuse.checksum,
275
+ fuse.created_at,
276
+ get_date_now(),
277
+ fuse.file_size,
278
+ fuse.fail_count,
279
+ fuse.id,
280
+ )
281
+ with self.conn:
282
+ self.conn.execute(queries.UPDATE_FUSE, params)
283
+ UI.debug(f"Updated fuse {fuse.id} / {fuse.file_path}")
@@ -0,0 +1,97 @@
1
+ # core/db_queries.py
2
+ """
3
+ Hold SQL STRING
4
+ """
5
+
6
+ CREATE_MEDIAS_TABLE = """
7
+ CREATE TABLE IF NOT EXISTS medias (
8
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
9
+ post_id TEXT,
10
+ service TEXT,
11
+ url TEXT,
12
+ duration REAL,
13
+ sequence INTEGER,
14
+ status TEXT,
15
+ checksum TEXT,
16
+ file_path TEXT,
17
+ created_at DATETIME,
18
+ updated_at DATETIME,
19
+ file_size INTEGER,
20
+ fail_count INTEGER,
21
+ UNIQUE(post_id, url)
22
+ )
23
+ """
24
+
25
+ CREATE_POSTS_TABLE = """
26
+ CREATE TABLE IF NOT EXISTS posts (
27
+ id TEXT PRIMARY KEY,
28
+ user TEXT,
29
+ service TEXT,
30
+ domain TEXT,
31
+ published DATETIME,
32
+ json_hash TEXT,
33
+ raw_json JSON,
34
+ fetched_at DATETIME
35
+ )
36
+ """
37
+
38
+ CREATE_FUSE_TABLE = """
39
+ CREATE TABLE IF NOT EXISTS fuses (
40
+ id TEXT PRIMARY KEY,
41
+ duration INTEGER,
42
+ total_parts INTEGER,
43
+ status TEXT,
44
+ checksum TEXT,
45
+ file_path TEXT,
46
+ created_at DATETIME,
47
+ updated_at DATETIME,
48
+ file_size INTEGER,
49
+ fail_count INTEGER
50
+ )
51
+ """
52
+
53
+ INSERT_POST = """
54
+ INSERT OR IGNORE INTO posts (
55
+ id, user, service, domain, published,
56
+ json_hash, raw_json, fetched_at
57
+ )
58
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?)
59
+ """
60
+
61
+ INSERT_FUSED_MEDIA = """
62
+ INSERT OR IGNORE INTO fuses (
63
+ id, duration, total_parts, status, checksum,
64
+ file_path, created_at, updated_at, file_size, fail_count
65
+ )
66
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
67
+ """
68
+
69
+ INSERT_MEDIA = """
70
+ INSERT OR IGNORE INTO medias (
71
+ post_id, service, url, duration, sequence, status,
72
+ checksum, file_path, created_at, updated_at, file_size, fail_count
73
+ )
74
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
75
+ """
76
+
77
+ UPDATE_MEDIA = """
78
+ UPDATE medias
79
+ SET duration = ?, file_size = ?, checksum = ?, status = ?,
80
+ created_at = ?, updated_at = ?, fail_count = ?
81
+ WHERE post_id = ? AND url = ?
82
+ """
83
+
84
+ UPDATE_FUSE = """
85
+ UPDATE fuses
86
+ SET duration = ?, status = ?, checksum = ?,
87
+ created_at = ?, updated_at = ?, file_size = ?,
88
+ fail_count = ?
89
+ WHERE id = ?
90
+ """
91
+
92
+ QUERY_POST_ID = "SELECT * FROM posts WHERE id = ?"
93
+ QUERY_POST_USER = "SELECT * FROM posts WHERE user = ?"
94
+ QUERY_MEDIA_STATUS = "SELECT * FROM medias WHERE status = ?"
95
+ QUERY_MEDIA_ID = "SELECT * FROM medias WHERE post_id = ?"
96
+ QUERY_FUSES_STATUS = "SELECT * FROM fuses WHERE status = ?"
97
+ QUERY_FUSES_ID = "SELECT * FROM fuses WHERE id = ?"
@@ -0,0 +1,307 @@
1
+ # core/downloader.py
2
+
3
+ """
4
+ Handle post, media download to website
5
+ """
6
+
7
+ import logging
8
+ import os
9
+ import json
10
+
11
+ import requests
12
+
13
+ from rcdl.interface.ui import UI, NestedProgress
14
+ from rcdl.core import parser
15
+ from rcdl.core import adapters
16
+ from rcdl.core.api import URL
17
+ from rcdl.core.config import Config
18
+ from rcdl.core.models import (
19
+ Creator,
20
+ Status,
21
+ Media,
22
+ Post,
23
+ FusedMedia,
24
+ FusedStatus,
25
+ )
26
+ from rcdl.core.db import DB
27
+ from rcdl.core.downloader_subprocess import ytdlp_subprocess
28
+ from rcdl.core.file_io import write_json, load_json
29
+ from rcdl.utils import get_date_now, get_media_metadata
30
+
31
+
32
+ class PostsFetcher:
33
+ """
34
+ Fetch posts from api. Save as JSON. Handle multiple pages requests
35
+ """
36
+
37
+ def __init__(
38
+ self, url: str, json_path: str, max_page: int = Config.DEFAULT_MAX_PAGE
39
+ ):
40
+ self.url = url
41
+ self.json_path = json_path
42
+
43
+ self.page = 0
44
+ self.max_page = max_page
45
+
46
+ self.status = 200
47
+
48
+ def _request_page(self, url: str) -> requests.Response:
49
+ """Request a single page and return json dict"""
50
+ logging.info("RequestEngine url %s", url)
51
+ headers = URL.get_headers()
52
+ response = requests.get(url, headers=headers, timeout=Config.TIMEOUT)
53
+ if response.status_code != 200:
54
+ logging.warning("Failed request %s: %s", url, response.status_code)
55
+ return response
56
+
57
+ def request(self, params: dict | None = None):
58
+ """Request multiple page of an url"""
59
+ if params is None:
60
+ params = {}
61
+
62
+ with UI.progress_posts_fetcher(self.max_page) as progress:
63
+ task = progress.add_task("Fetching posts", total=self.max_page)
64
+
65
+ while self.status == 200 and self.page < self.max_page:
66
+ o = self.page * Config.POST_PER_PAGE
67
+ params["o"] = o
68
+ url = URL.add_params(self.url, params)
69
+
70
+ try:
71
+ # Dry run: not request acutally made
72
+ if Config.DRY_RUN:
73
+ logging.debug(
74
+ "DRY-RUN posts fetcher %s -> %s", url, self.json_path
75
+ )
76
+ self.page += 1
77
+ continue
78
+
79
+ response = self._request_page(url)
80
+ self.status = response.status_code
81
+
82
+ # if the programm crash while doing requests,
83
+ # previous requests are still saved and not overwritten.
84
+ if self.page > 0:
85
+ json_data = list(load_json(self.json_path))
86
+ else:
87
+ json_data = []
88
+
89
+ # for discover command, response json is in a
90
+ # different format and contains 'posts'
91
+ if self.status == 200:
92
+ if "posts" in response.json():
93
+ json_data.extend(response.json()["posts"])
94
+ else:
95
+ json_data.extend(response.json())
96
+
97
+ write_json(self.json_path, json_data, mode="w")
98
+
99
+ progress.update(
100
+ task,
101
+ advance=1,
102
+ description=(
103
+ f"Fetched {len(json_data)}"
104
+ f" posts (page {self.page + 1}/{self.max_page})"
105
+ ),
106
+ )
107
+ except requests.RequestException as e:
108
+ logging.error(
109
+ "Failed to request %s (page: %s) deu to: %s", url, self.page, e
110
+ )
111
+ except json.JSONDecodeError as e:
112
+ logging.error(
113
+ "Failed to decode JSON response of request %s due to: %s",
114
+ url,
115
+ e,
116
+ )
117
+ finally:
118
+ self.page += 1
119
+
120
+
121
+ class MediaDownloader:
122
+ """Handle downloading a list of media and update DB status"""
123
+
124
+ def __init__(self):
125
+ pass
126
+
127
+ def _build_url(self, domain: str, url: str):
128
+ """Return full url"""
129
+ return URL.get_url_from_file(domain, url)
130
+
131
+ def _build_full_path(self, user: str, media_path: str):
132
+ """Return full path"""
133
+ return os.path.join(Config.creator_folder(user), media_path)
134
+
135
+ def _media_exist(self, full_path: str):
136
+ """Check a file exist"""
137
+ return os.path.exists(full_path)
138
+
139
+ def _update_db(self, result: int, media: Media, full_path: str):
140
+ """Update db information"""
141
+
142
+ # video failed to download
143
+ if result != 0:
144
+ media.fail_count += 1
145
+ else:
146
+ duration, file_size, checksum = get_media_metadata(full_path)
147
+ media.duration = duration
148
+ media.status = Status.DOWNLOADED
149
+ media.checksum = checksum
150
+ media.created_at = get_date_now()
151
+ media.file_size = file_size
152
+
153
+ with DB() as db:
154
+ db.update_media(media)
155
+
156
+ def download(self, medias: list[Media], max_fail_count: int | None = None):
157
+ """Download all medias in media with PENDING stats"""
158
+ # init progress bar
159
+ progress = NestedProgress(UI.console)
160
+ progress.start(
161
+ total=len(medias),
162
+ total_label="Downloading videos",
163
+ current_label="Current video",
164
+ )
165
+
166
+ max_try = Config.MAX_FAIL_COUNT
167
+ if max_fail_count is not None:
168
+ max_try = max_fail_count
169
+ for media in medias:
170
+ progress.start_current("Downloading", total=2)
171
+ if media.fail_count > max_try:
172
+ UI.warning(
173
+ f"Video skipped due to too many failed download attempt ({media.fail_count})"
174
+ )
175
+ progress.advance_total()
176
+ continue
177
+
178
+ # match post info from db with post_id to get user/creator_id
179
+ with DB() as db:
180
+ post = db.query_post_by_id(media.post_id)
181
+ if post is None:
182
+ UI.error(f"Could not match media post_id {media.post_id} with a post")
183
+ progress.advance_total()
184
+ continue
185
+
186
+ # build full url and full path
187
+ url = self._build_url(post.domain, media.url)
188
+ full_path = self._build_full_path(post.user, media.file_path)
189
+
190
+ # update progress bar info (video in download info)
191
+ progress.set_status(f"{post.user}@({post.service}) -> ", media.file_path)
192
+
193
+ # check video does not already exist
194
+ if self._media_exist(full_path):
195
+ UI.warning(
196
+ f"Video {url} @ {full_path} already exists. Possible DB problem"
197
+ )
198
+ self._update_db(0, media, full_path)
199
+ progress.advance_total()
200
+ continue
201
+
202
+ # dry run: no actual download, skippe rest of fn
203
+ if Config.DRY_RUN:
204
+ UI.debug(f"(dry-run) dl {post.user}@{full_path} from {url}")
205
+ progress.advance_total()
206
+ continue
207
+
208
+ result = ytdlp_subprocess(url, full_path)
209
+ self._update_db(result, media, full_path)
210
+ progress.advance_total()
211
+ progress.close()
212
+
213
+
214
+ def fetch_posts_by_tag(tag: str, max_page: int = Config.DEFAULT_MAX_PAGE) -> dict:
215
+ """Helper function to get all posts from a search results"""
216
+ url = URL.get_posts_page_url_wo_param()
217
+ path = Config.cache_file(tag)
218
+ pf = PostsFetcher(url, str(path), max_page=max_page)
219
+ pf.request(params={"tag": tag})
220
+
221
+ return load_json(path)
222
+
223
+
224
+ def fetch_posts_by_creator(creator: Creator) -> dict:
225
+ """Helper function to get all posts from a creator"""
226
+ url = URL.get_creator_post_wo_param(creator)
227
+ path = Config.cache_file(f"{creator.id}_{creator.service}")
228
+ pf = PostsFetcher(url, str(path))
229
+ pf.request()
230
+
231
+ return load_json(path)
232
+
233
+
234
+ def get_fuses_from_post(posts: list[Post]) -> list[FusedMedia]:
235
+ """Update data on fuses database table for video to be fused"""
236
+ fuses: list[FusedMedia] = []
237
+ for post in posts:
238
+ json_post = json.loads(post.raw_json)
239
+ total_parts = len(parser.extract_video_urls(json_post))
240
+ if total_parts > 1:
241
+ fuses.append(
242
+ FusedMedia(
243
+ id=post.id,
244
+ duration=0,
245
+ total_parts=total_parts,
246
+ status=FusedStatus.PENDING,
247
+ checksum="",
248
+ file_path=parser.get_filename_fuse(post),
249
+ created_at="",
250
+ updated_at="",
251
+ file_size=0,
252
+ fail_count=0,
253
+ )
254
+ )
255
+ return fuses
256
+
257
+
258
+ def refresh_creators_videos():
259
+ """
260
+ For each creator:
261
+ - get posts with videos & update posts DB
262
+ - extract all medias & update medias DB
263
+ - extract fuses group & update fuses DB
264
+ """
265
+ creators = parser.get_creators()
266
+ for creator in creators:
267
+ UI.info(f"Creator {creator.id} from {creator.service}")
268
+
269
+ # request all posts by creator
270
+ fetch_posts_by_creator(creator)
271
+
272
+ # only keep posts with video url (mp4, m4v, ...)
273
+ posts_with_videos = parser.filter_posts_with_videos_from_json(
274
+ str(Config.cache_file(f"{creator.id}_{creator.service}"))
275
+ )
276
+
277
+ # convert all json dict into Post model
278
+ posts = adapters.json_posts_to_posts(posts_with_videos)
279
+
280
+ # insert posts in db
281
+ with DB() as db:
282
+ db.insert_posts(posts)
283
+
284
+ # find all multiple part videos and update db
285
+ fuses = get_fuses_from_post(posts)
286
+ with DB() as db:
287
+ db.insert_fused_media(fuses)
288
+
289
+ # convert all posts into videos
290
+ medias = []
291
+ for post in posts:
292
+ medias.extend(adapters.post_to_videos(post))
293
+
294
+ # insert videos in db
295
+ with DB() as db:
296
+ db.insert_medias(medias)
297
+
298
+
299
+ def download_videos_to_be_dl(max_fail_count: int | None):
300
+ """
301
+ Download all media with PENDING status in DB
302
+ """
303
+ with DB() as db:
304
+ medias = db.query_media_by_status(Status.PENDING)
305
+
306
+ media_downloader = MediaDownloader()
307
+ media_downloader.download(medias, max_fail_count=max_fail_count)