plex-generate-previews 2.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- plex_generate_previews/__init__.py +10 -0
- plex_generate_previews/__main__.py +11 -0
- plex_generate_previews/cli.py +474 -0
- plex_generate_previews/config.py +479 -0
- plex_generate_previews/gpu_detection.py +541 -0
- plex_generate_previews/media_processing.py +439 -0
- plex_generate_previews/plex_client.py +211 -0
- plex_generate_previews/utils.py +135 -0
- plex_generate_previews/version_check.py +178 -0
- plex_generate_previews/worker.py +478 -0
- plex_generate_previews-2.0.0.dist-info/METADATA +728 -0
- plex_generate_previews-2.0.0.dist-info/RECORD +15 -0
- plex_generate_previews-2.0.0.dist-info/WHEEL +5 -0
- plex_generate_previews-2.0.0.dist-info/entry_points.txt +2 -0
- plex_generate_previews-2.0.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,439 @@
|
|
1
|
+
"""
|
2
|
+
Media processing functions for video thumbnail generation.
|
3
|
+
|
4
|
+
Handles FFmpeg execution, BIF file generation, and all media processing
|
5
|
+
logic including HDR detection, skip frame heuristics, and GPU acceleration.
|
6
|
+
"""
|
7
|
+
|
8
|
+
import os
|
9
|
+
import re
|
10
|
+
import struct
|
11
|
+
import array
|
12
|
+
import glob
|
13
|
+
import time
|
14
|
+
import subprocess
|
15
|
+
import shutil
|
16
|
+
import sys
|
17
|
+
import http.client
|
18
|
+
import xml.etree.ElementTree
|
19
|
+
from typing import Optional
|
20
|
+
from loguru import logger
|
21
|
+
|
22
|
+
from .utils import sanitize_path
|
23
|
+
|
24
|
+
try:
|
25
|
+
from pymediainfo import MediaInfo
|
26
|
+
except ImportError:
|
27
|
+
print('MediaInfo not found. MediaInfo must be installed and available in PATH.')
|
28
|
+
sys.exit(1)
|
29
|
+
|
30
|
+
from .config import Config
|
31
|
+
from .plex_client import retry_plex_call
|
32
|
+
|
33
|
+
|
34
|
+
def parse_ffmpeg_progress_line(line: str, total_duration: float, progress_callback=None):
|
35
|
+
"""
|
36
|
+
Parse a single FFmpeg progress line and call progress callback if provided.
|
37
|
+
|
38
|
+
Args:
|
39
|
+
line: FFmpeg output line to parse
|
40
|
+
total_duration: Total video duration in seconds
|
41
|
+
progress_callback: Callback function for progress updates
|
42
|
+
"""
|
43
|
+
# Parse duration
|
44
|
+
if 'Duration:' in line:
|
45
|
+
duration_match = re.search(r'Duration: (\d{2}):(\d{2}):(\d{2}\.\d{2})', line)
|
46
|
+
if duration_match:
|
47
|
+
hours, minutes, seconds = duration_match.groups()
|
48
|
+
return int(hours) * 3600 + int(minutes) * 60 + float(seconds)
|
49
|
+
return total_duration
|
50
|
+
|
51
|
+
# Parse FFmpeg progress line with all data
|
52
|
+
elif 'time=' in line:
|
53
|
+
# Extract all FFmpeg data fields
|
54
|
+
frame_match = re.search(r'frame=\s*(\d+)', line)
|
55
|
+
fps_match = re.search(r'fps=\s*([0-9.]+)', line)
|
56
|
+
q_match = re.search(r'q=([0-9.]+)', line)
|
57
|
+
size_match = re.search(r'size=\s*(\d+)kB', line)
|
58
|
+
time_match = re.search(r'time=(\d{2}):(\d{2}):(\d{2}\.\d{2})', line)
|
59
|
+
bitrate_match = re.search(r'bitrate=\s*([0-9.]+)kbits/s', line)
|
60
|
+
speed_match = re.search(r'speed=\s*([0-9]+\.?[0-9]*|\.[0-9]+)x', line)
|
61
|
+
|
62
|
+
# Extract values
|
63
|
+
frame = int(frame_match.group(1)) if frame_match else 0
|
64
|
+
fps = float(fps_match.group(1)) if fps_match else 0
|
65
|
+
q = float(q_match.group(1)) if q_match else 0
|
66
|
+
size = int(size_match.group(1)) if size_match else 0
|
67
|
+
bitrate = float(bitrate_match.group(1)) if bitrate_match else 0
|
68
|
+
speed = speed_match.group(1) + "x" if speed_match else "0.0x"
|
69
|
+
|
70
|
+
if time_match:
|
71
|
+
hours, minutes, seconds = time_match.groups()
|
72
|
+
current_time = int(hours) * 3600 + int(minutes) * 60 + float(seconds)
|
73
|
+
time_str = f"{hours}:{minutes}:{seconds}"
|
74
|
+
|
75
|
+
# Update progress
|
76
|
+
progress_percent = 0
|
77
|
+
if total_duration and total_duration > 0:
|
78
|
+
progress_percent = min(100, int((current_time / total_duration) * 100))
|
79
|
+
|
80
|
+
# Calculate remaining time from FFmpeg data
|
81
|
+
remaining_time = 0
|
82
|
+
if total_duration and total_duration > 0 and current_time < total_duration:
|
83
|
+
remaining_time = total_duration - current_time
|
84
|
+
|
85
|
+
# Call progress callback with all FFmpeg data
|
86
|
+
if progress_callback:
|
87
|
+
progress_callback(progress_percent, current_time, total_duration, speed,
|
88
|
+
remaining_time, frame, fps, q, size, time_str, bitrate)
|
89
|
+
|
90
|
+
return total_duration
|
91
|
+
|
92
|
+
|
93
|
+
def heuristic_allows_skip(ffmpeg_path: str, video_file: str) -> bool:
|
94
|
+
"""
|
95
|
+
Using the first 10 frames of file to decide if -skip_frame:v nokey is safe.
|
96
|
+
Uses -err_detect explode + -xerror to bail immediately on the first decode error.
|
97
|
+
Returns True if the probe succeeds, else False. Logs a short tail if available.
|
98
|
+
"""
|
99
|
+
null_sink = "NUL" if os.name == "nt" else "/dev/null"
|
100
|
+
cmd = [
|
101
|
+
ffmpeg_path,
|
102
|
+
"-hide_banner", "-nostats",
|
103
|
+
"-v", "error", # only errors
|
104
|
+
"-xerror", # make errors set non-zero exit
|
105
|
+
"-err_detect", "explode", # fail fast on decode issues
|
106
|
+
"-skip_frame:v", "nokey",
|
107
|
+
"-threads:v", "1",
|
108
|
+
"-i", video_file,
|
109
|
+
"-an", "-sn", "-dn",
|
110
|
+
"-frames:v", "10", # stop as soon as one frame decodes
|
111
|
+
"-f", "null", null_sink
|
112
|
+
]
|
113
|
+
proc = subprocess.run(cmd, stdout=subprocess.DEVNULL, stderr=subprocess.PIPE, text=True)
|
114
|
+
ok = (proc.returncode == 0)
|
115
|
+
if not ok:
|
116
|
+
last = (proc.stderr or "").strip().splitlines()[-1:] # tail(1)
|
117
|
+
logger.debug(f"skip_frame probe FAILED at 0s: rc={proc.returncode} msg={last}")
|
118
|
+
else:
|
119
|
+
logger.debug("skip_frame probe OK at 0s")
|
120
|
+
return ok
|
121
|
+
|
122
|
+
|
123
|
+
def generate_images(video_file: str, output_folder: str, gpu: Optional[str],
|
124
|
+
gpu_device_path: Optional[str], config: Config, progress_callback=None) -> None:
|
125
|
+
"""
|
126
|
+
Generate thumbnail images from video file using FFmpeg.
|
127
|
+
|
128
|
+
Args:
|
129
|
+
video_file: Path to input video file
|
130
|
+
output_folder: Directory to save thumbnail images
|
131
|
+
gpu: GPU type ('NVIDIA', 'AMD', 'INTEL', 'WSL2', or None)
|
132
|
+
gpu_device_path: GPU device path for VAAPI
|
133
|
+
config: Configuration object
|
134
|
+
progress_callback: Callback function for progress updates
|
135
|
+
"""
|
136
|
+
media_info = MediaInfo.parse(video_file)
|
137
|
+
fps_value = round(1 / config.plex_bif_frame_interval, 6)
|
138
|
+
vf_parameters = f"fps=fps={fps_value}:round=up,scale=w=320:h=240:force_original_aspect_ratio=decrease"
|
139
|
+
|
140
|
+
# Check if we have a HDR Format. Note: Sometimes it can be returned as "None" (string) hence the check for None type or "None" (String)
|
141
|
+
if media_info.video_tracks:
|
142
|
+
if media_info.video_tracks[0].hdr_format != "None" and media_info.video_tracks[0].hdr_format is not None:
|
143
|
+
vf_parameters = f"fps=fps={fps_value}:round=up,zscale=t=linear:npl=100,format=gbrpf32le,zscale=p=bt709,tonemap=tonemap=hable:desat=0,zscale=t=bt709:m=bt709:r=tv,format=yuv420p,scale=w=320:h=240:force_original_aspect_ratio=decrease"
|
144
|
+
|
145
|
+
args = [
|
146
|
+
config.ffmpeg_path, "-loglevel", "info",
|
147
|
+
"-threads:v", "1", # fix: was '-threads:0 1'
|
148
|
+
]
|
149
|
+
|
150
|
+
use_skip = heuristic_allows_skip(config.ffmpeg_path, video_file)
|
151
|
+
if use_skip:
|
152
|
+
args += ["-skip_frame:v", "nokey"]
|
153
|
+
|
154
|
+
args += [
|
155
|
+
"-i", video_file, "-an", "-sn", "-dn",
|
156
|
+
"-q:v", str(config.thumbnail_quality),
|
157
|
+
"-vf", vf_parameters,
|
158
|
+
f'{output_folder}/img-%06d.jpg'
|
159
|
+
]
|
160
|
+
|
161
|
+
start = time.time()
|
162
|
+
hw = False
|
163
|
+
|
164
|
+
# Determine GPU usage - if gpu is set, use GPU
|
165
|
+
use_gpu = gpu is not None
|
166
|
+
|
167
|
+
# Apply GPU acceleration if using GPU
|
168
|
+
if use_gpu:
|
169
|
+
hw = True
|
170
|
+
|
171
|
+
if gpu == 'NVIDIA':
|
172
|
+
args.insert(5, "-hwaccel")
|
173
|
+
args.insert(6, "cuda")
|
174
|
+
elif gpu == 'WSL2':
|
175
|
+
args.insert(5, "-hwaccel")
|
176
|
+
args.insert(6, "d3d11va")
|
177
|
+
else:
|
178
|
+
# AMD or Intel VAAPI
|
179
|
+
args.insert(5, "-hwaccel")
|
180
|
+
args.insert(6, "vaapi")
|
181
|
+
args.insert(7, "-vaapi_device")
|
182
|
+
args.insert(8, gpu_device_path)
|
183
|
+
# Check if Intel GPU (Intel devices typically have 'renderD' in path)
|
184
|
+
if gpu == 'INTEL':
|
185
|
+
vf_parameters = vf_parameters.replace(
|
186
|
+
"scale=w=320:h=240:force_original_aspect_ratio=decrease",
|
187
|
+
"format=nv12,hwupload,scale_vaapi=w=320:h=240:force_original_aspect_ratio=decrease,hwdownload,format=nv12")
|
188
|
+
else:
|
189
|
+
# Adjust vf_parameters for AMD VAAPI
|
190
|
+
vf_parameters = vf_parameters.replace(
|
191
|
+
"scale=w=320:h=240:force_original_aspect_ratio=decrease",
|
192
|
+
"format=nv12|vaapi,hwupload,scale_vaapi=w=320:h=240:force_original_aspect_ratio=decrease")
|
193
|
+
|
194
|
+
args[args.index("-vf") + 1] = vf_parameters
|
195
|
+
|
196
|
+
logger.debug(f'Executing: {" ".join(args)}')
|
197
|
+
|
198
|
+
# Use file polling approach for non-blocking, high-frequency progress monitoring
|
199
|
+
# This is faster than subprocess.PIPE which would block on readline() calls
|
200
|
+
output_file = f'/tmp/ffmpeg_output_{os.getpid()}_{int(time.time())}.log'
|
201
|
+
proc = subprocess.Popen(args, stderr=open(output_file, 'w'), stdout=subprocess.DEVNULL)
|
202
|
+
|
203
|
+
# Signal that FFmpeg process has started
|
204
|
+
if progress_callback:
|
205
|
+
progress_callback(0, 0, 0, "0.0x")
|
206
|
+
|
207
|
+
# Track progress
|
208
|
+
total_duration = None
|
209
|
+
current_time = 0
|
210
|
+
speed = "0.0x"
|
211
|
+
progress_percent = 0
|
212
|
+
ffmpeg_output_lines = [] # Store all FFmpeg output for debugging
|
213
|
+
line_count = 0
|
214
|
+
|
215
|
+
# Allow time for it to start
|
216
|
+
time.sleep(0.05) # Reduced from 0.1s
|
217
|
+
|
218
|
+
# Parse FFmpeg output using file polling (much faster)
|
219
|
+
while proc.poll() is None:
|
220
|
+
if os.path.exists(output_file):
|
221
|
+
with open(output_file, 'r') as f:
|
222
|
+
lines = f.readlines()
|
223
|
+
if len(lines) > line_count:
|
224
|
+
# Process new lines
|
225
|
+
for i in range(line_count, len(lines)):
|
226
|
+
line = lines[i].strip()
|
227
|
+
if line:
|
228
|
+
ffmpeg_output_lines.append(line) # Store for debugging
|
229
|
+
|
230
|
+
# Parse FFmpeg output line
|
231
|
+
total_duration = parse_ffmpeg_progress_line(line, total_duration, progress_callback)
|
232
|
+
line_count = len(lines)
|
233
|
+
|
234
|
+
time.sleep(0.005) # Poll every 5ms for very responsive updates
|
235
|
+
|
236
|
+
# Process any remaining data in the output file
|
237
|
+
if os.path.exists(output_file):
|
238
|
+
with open(output_file, 'r') as f:
|
239
|
+
lines = f.readlines()
|
240
|
+
if len(lines) > line_count:
|
241
|
+
# Process any remaining lines
|
242
|
+
for i in range(line_count, len(lines)):
|
243
|
+
line = lines[i].strip()
|
244
|
+
if line:
|
245
|
+
ffmpeg_output_lines.append(line)
|
246
|
+
# Parse any remaining progress lines
|
247
|
+
total_duration = parse_ffmpeg_progress_line(line, total_duration, progress_callback)
|
248
|
+
|
249
|
+
# Clean up the output file
|
250
|
+
try:
|
251
|
+
os.remove(output_file)
|
252
|
+
except OSError:
|
253
|
+
pass
|
254
|
+
|
255
|
+
# Check for errors
|
256
|
+
if proc.returncode != 0:
|
257
|
+
logger.error(f'FFmpeg failed with return code {proc.returncode} for {video_file}')
|
258
|
+
# Only show detailed output in debug mode
|
259
|
+
if logger.level("DEBUG").no <= logger._core.min_level:
|
260
|
+
logger.debug(f"FFmpeg output ({len(ffmpeg_output_lines)} lines):")
|
261
|
+
for i, line in enumerate(ffmpeg_output_lines[-10:]): # Show last 10 lines only
|
262
|
+
logger.debug(f" {i+1:3d}: {line}")
|
263
|
+
|
264
|
+
# Final timing
|
265
|
+
end = time.time()
|
266
|
+
seconds = round(end - start, 1)
|
267
|
+
|
268
|
+
# Optimize and Rename Images
|
269
|
+
for image in glob.glob(f'{output_folder}/img*.jpg'):
|
270
|
+
frame_no = int(os.path.basename(image).strip('-img').strip('.jpg')) - 1
|
271
|
+
frame_second = frame_no * config.plex_bif_frame_interval
|
272
|
+
os.rename(image, os.path.join(output_folder, f'{frame_second:010d}.jpg'))
|
273
|
+
|
274
|
+
logger.info(f'Generated Video Preview for {video_file} HW={hw} TIME={seconds}seconds SPEED={speed}x')
|
275
|
+
|
276
|
+
|
277
|
+
def generate_bif(bif_filename: str, images_path: str, config: Config) -> None:
|
278
|
+
"""
|
279
|
+
Build a .bif file from thumbnail images.
|
280
|
+
|
281
|
+
Args:
|
282
|
+
bif_filename: Path to output .bif file
|
283
|
+
images_path: Directory containing .jpg thumbnail images
|
284
|
+
config: Configuration object
|
285
|
+
"""
|
286
|
+
magic = [0x89, 0x42, 0x49, 0x46, 0x0d, 0x0a, 0x1a, 0x0a]
|
287
|
+
version = 0
|
288
|
+
|
289
|
+
images = [img for img in os.listdir(images_path) if os.path.splitext(img)[1] == '.jpg']
|
290
|
+
images.sort()
|
291
|
+
|
292
|
+
with open(bif_filename, "wb") as f:
|
293
|
+
array.array('B', magic).tofile(f)
|
294
|
+
f.write(struct.pack("<I", version))
|
295
|
+
f.write(struct.pack("<I", len(images)))
|
296
|
+
f.write(struct.pack("<I", 1000 * config.plex_bif_frame_interval))
|
297
|
+
array.array('B', [0x00 for x in range(20, 64)]).tofile(f)
|
298
|
+
|
299
|
+
bif_table_size = 8 + (8 * len(images))
|
300
|
+
image_index = 64 + bif_table_size
|
301
|
+
timestamp = 0
|
302
|
+
|
303
|
+
# Get the length of each image
|
304
|
+
for image in images:
|
305
|
+
statinfo = os.stat(os.path.join(images_path, image))
|
306
|
+
f.write(struct.pack("<I", timestamp))
|
307
|
+
f.write(struct.pack("<I", image_index))
|
308
|
+
timestamp += 1
|
309
|
+
image_index += statinfo.st_size
|
310
|
+
|
311
|
+
f.write(struct.pack("<I", 0xffffffff))
|
312
|
+
f.write(struct.pack("<I", image_index))
|
313
|
+
|
314
|
+
# Now copy the images
|
315
|
+
for image in images:
|
316
|
+
with open(os.path.join(images_path, image), "rb") as img_file:
|
317
|
+
data = img_file.read()
|
318
|
+
f.write(data)
|
319
|
+
logger.debug(f'Generated BIF file: {bif_filename}')
|
320
|
+
|
321
|
+
|
322
|
+
def process_item(item_key: str, gpu: Optional[str], gpu_device_path: Optional[str],
|
323
|
+
config: Config, plex, progress_callback=None) -> None:
|
324
|
+
"""
|
325
|
+
Process a single media item: generate thumbnails and BIF file.
|
326
|
+
|
327
|
+
This is the core processing function that handles:
|
328
|
+
- Plex API queries
|
329
|
+
- Path mapping for remote generation
|
330
|
+
- Bundle hash generation
|
331
|
+
- Plex directory structure creation
|
332
|
+
- Thumbnail generation with FFmpeg
|
333
|
+
- BIF file creation
|
334
|
+
- Cleanup
|
335
|
+
|
336
|
+
Args:
|
337
|
+
item_key: Plex media item key
|
338
|
+
gpu: GPU type for acceleration
|
339
|
+
gpu_device_path: GPU device path
|
340
|
+
config: Configuration object
|
341
|
+
plex: Plex server instance
|
342
|
+
progress_callback: Callback function for progress updates
|
343
|
+
"""
|
344
|
+
try:
|
345
|
+
data = retry_plex_call(plex.query, f'{item_key}/tree')
|
346
|
+
except (Exception, http.client.BadStatusLine, xml.etree.ElementTree.ParseError) as e:
|
347
|
+
logger.error(f"Failed to query Plex for item {item_key} after retries: {e}")
|
348
|
+
logger.error(f"Exception type: {type(e).__name__}")
|
349
|
+
# For connection errors, log more details
|
350
|
+
if hasattr(e, 'request') and e.request:
|
351
|
+
logger.error(f"Request URL: {e.request.url}")
|
352
|
+
logger.error(f"Request method: {e.request.method}")
|
353
|
+
logger.error(f"Request headers: {e.request.headers}")
|
354
|
+
return
|
355
|
+
except Exception as e:
|
356
|
+
logger.error(f"Error querying Plex for item {item_key}: {e}")
|
357
|
+
return
|
358
|
+
|
359
|
+
for media_part in data.findall('.//MediaPart'):
|
360
|
+
if 'hash' in media_part.attrib:
|
361
|
+
bundle_hash = media_part.attrib['hash']
|
362
|
+
# Apply path mapping if both mapping parameters are provided (for remote generation)
|
363
|
+
if config.plex_videos_path_mapping and config.plex_local_videos_path_mapping:
|
364
|
+
media_file = sanitize_path(media_part.attrib['file'].replace(config.plex_videos_path_mapping, config.plex_local_videos_path_mapping))
|
365
|
+
else:
|
366
|
+
# Use file path directly (for local generation)
|
367
|
+
media_file = sanitize_path(media_part.attrib['file'])
|
368
|
+
|
369
|
+
if not os.path.isfile(media_file):
|
370
|
+
logger.warning(f'Skipping as file not found {media_file}')
|
371
|
+
continue
|
372
|
+
|
373
|
+
try:
|
374
|
+
bundle_file = sanitize_path(f'{bundle_hash[0]}/{bundle_hash[1::1]}.bundle')
|
375
|
+
except Exception as e:
|
376
|
+
logger.error(f'Error generating bundle_file for {media_file} due to {type(e).__name__}:{str(e)}')
|
377
|
+
continue
|
378
|
+
|
379
|
+
bundle_path = sanitize_path(os.path.join(config.plex_config_folder, 'Media', 'localhost', bundle_file))
|
380
|
+
indexes_path = sanitize_path(os.path.join(bundle_path, 'Contents', 'Indexes'))
|
381
|
+
index_bif = sanitize_path(os.path.join(indexes_path, 'index-sd.bif'))
|
382
|
+
tmp_path = sanitize_path(os.path.join(config.tmp_folder, bundle_hash))
|
383
|
+
|
384
|
+
if os.path.isfile(index_bif) and config.regenerate_thumbnails:
|
385
|
+
logger.debug(f'Found existing thumbnails for {media_file}, deleting the thumbnail index at {index_bif} so we can regenerate')
|
386
|
+
try:
|
387
|
+
os.remove(index_bif)
|
388
|
+
logger.debug(f'Successfully deleted existing BIF file: {index_bif}')
|
389
|
+
except Exception as e:
|
390
|
+
logger.error(f'Error {type(e).__name__} deleting index file {media_file}: {str(e)}')
|
391
|
+
continue
|
392
|
+
|
393
|
+
if not os.path.isfile(index_bif):
|
394
|
+
logger.debug(f'Generating bundle_file for {media_file} at {index_bif}')
|
395
|
+
|
396
|
+
if not os.path.isdir(indexes_path):
|
397
|
+
try:
|
398
|
+
os.makedirs(indexes_path)
|
399
|
+
except OSError as e:
|
400
|
+
logger.error(f'Error generating images for {media_file}. `{type(e).__name__}:{str(e)}` error when creating index path {indexes_path}')
|
401
|
+
continue
|
402
|
+
|
403
|
+
try:
|
404
|
+
if not os.path.isdir(tmp_path):
|
405
|
+
os.makedirs(tmp_path)
|
406
|
+
except OSError as e:
|
407
|
+
logger.error(f'Error generating images for {media_file}. `{type(e).__name__}:{str(e)}` error when creating tmp path {tmp_path}')
|
408
|
+
continue
|
409
|
+
|
410
|
+
try:
|
411
|
+
generate_images(media_file, tmp_path, gpu, gpu_device_path, config, progress_callback)
|
412
|
+
except Exception as e:
|
413
|
+
logger.error(f'Error generating images for {media_file}. `{type(e).__name__}:{str(e)}` error when generating images')
|
414
|
+
# Clean up temp directory on error
|
415
|
+
try:
|
416
|
+
if os.path.exists(tmp_path):
|
417
|
+
shutil.rmtree(tmp_path)
|
418
|
+
except Exception as cleanup_error:
|
419
|
+
logger.warning(f"Failed to clean up temp directory {tmp_path}: {cleanup_error}")
|
420
|
+
continue
|
421
|
+
|
422
|
+
try:
|
423
|
+
generate_bif(index_bif, tmp_path, config)
|
424
|
+
except Exception as e:
|
425
|
+
# Remove bif, as it prob failed to generate
|
426
|
+
try:
|
427
|
+
if os.path.exists(index_bif):
|
428
|
+
os.remove(index_bif)
|
429
|
+
except Exception as remove_error:
|
430
|
+
logger.warning(f"Failed to remove failed BIF file {index_bif}: {remove_error}")
|
431
|
+
logger.error(f'Error generating images for {media_file}. `{type(e).__name__}:{str(e)}` error when generating bif')
|
432
|
+
continue
|
433
|
+
finally:
|
434
|
+
# Always clean up temp directory
|
435
|
+
try:
|
436
|
+
if os.path.exists(tmp_path):
|
437
|
+
shutil.rmtree(tmp_path)
|
438
|
+
except Exception as cleanup_error:
|
439
|
+
logger.warning(f"Failed to clean up temp directory {tmp_path}: {cleanup_error}")
|
@@ -0,0 +1,211 @@
|
|
1
|
+
"""
|
2
|
+
Plex Media Server client and API interactions.
|
3
|
+
|
4
|
+
Handles Plex server connection, XML parsing monkey patch for debugging,
|
5
|
+
library querying, and duplicate location filtering.
|
6
|
+
"""
|
7
|
+
|
8
|
+
import os
|
9
|
+
import time
|
10
|
+
import http.client
|
11
|
+
import xml.etree.ElementTree
|
12
|
+
import requests
|
13
|
+
import urllib3
|
14
|
+
from requests.adapters import HTTPAdapter
|
15
|
+
from urllib3.util.retry import Retry
|
16
|
+
from loguru import logger
|
17
|
+
|
18
|
+
# Disable SSL warnings
|
19
|
+
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
20
|
+
|
21
|
+
from .config import Config
|
22
|
+
|
23
|
+
|
24
|
+
def retry_plex_call(func, *args, max_retries=3, retry_delay=1.0, **kwargs):
|
25
|
+
"""
|
26
|
+
Retry a Plex API call if it fails due to XML parsing errors.
|
27
|
+
|
28
|
+
This handles cases where Plex returns incomplete XML due to being busy.
|
29
|
+
|
30
|
+
Args:
|
31
|
+
func: Function to call
|
32
|
+
*args: Positional arguments for the function
|
33
|
+
max_retries: Maximum number of retries (default: 3)
|
34
|
+
retry_delay: Delay between retries in seconds (default: 1.0)
|
35
|
+
**kwargs: Keyword arguments for the function
|
36
|
+
|
37
|
+
Returns:
|
38
|
+
Result of the function call
|
39
|
+
|
40
|
+
Raises:
|
41
|
+
Exception: If all retries fail
|
42
|
+
"""
|
43
|
+
last_exception = None
|
44
|
+
|
45
|
+
for attempt in range(max_retries + 1):
|
46
|
+
try:
|
47
|
+
return func(*args, **kwargs)
|
48
|
+
except xml.etree.ElementTree.ParseError as e:
|
49
|
+
last_exception = e
|
50
|
+
if attempt < max_retries:
|
51
|
+
logger.warning(f"XML parsing error on attempt {attempt + 1}/{max_retries + 1}: {e}")
|
52
|
+
logger.info(f"Retrying in {retry_delay} seconds... (Plex may be busy)")
|
53
|
+
time.sleep(retry_delay)
|
54
|
+
retry_delay *= 1.5 # Exponential backoff
|
55
|
+
else:
|
56
|
+
logger.error(f"XML parsing failed after {max_retries + 1} attempts: {e}")
|
57
|
+
logger.debug(f"Raw XML content (first 2000 chars):")
|
58
|
+
# Try to get the raw XML for debugging
|
59
|
+
try:
|
60
|
+
# This is a bit of a hack - we'll log what we can
|
61
|
+
logger.debug("XML content not available for debugging in retry wrapper")
|
62
|
+
except:
|
63
|
+
pass
|
64
|
+
except Exception as e:
|
65
|
+
# For non-XML errors, don't retry
|
66
|
+
raise e
|
67
|
+
|
68
|
+
# If we get here, all retries failed
|
69
|
+
raise last_exception
|
70
|
+
|
71
|
+
|
72
|
+
def plex_server(config: Config):
|
73
|
+
"""
|
74
|
+
Create Plex server connection with retry strategy and XML debugging.
|
75
|
+
|
76
|
+
Args:
|
77
|
+
config: Configuration object
|
78
|
+
|
79
|
+
Returns:
|
80
|
+
PlexServer: Configured Plex server instance
|
81
|
+
|
82
|
+
Raises:
|
83
|
+
ConnectionError: If unable to connect to Plex server
|
84
|
+
requests.exceptions.RequestException: If connection fails after retries
|
85
|
+
"""
|
86
|
+
# Plex Interface with retry strategy
|
87
|
+
retry_strategy = Retry(
|
88
|
+
total=3,
|
89
|
+
backoff_factor=0.3,
|
90
|
+
status_forcelist=[500, 502, 503, 504],
|
91
|
+
)
|
92
|
+
adapter = HTTPAdapter(max_retries=retry_strategy)
|
93
|
+
session = requests.Session()
|
94
|
+
session.verify = False
|
95
|
+
session.mount("http://", adapter)
|
96
|
+
session.mount("https://", adapter)
|
97
|
+
|
98
|
+
# Create Plex server instance with proper error handling
|
99
|
+
from plexapi.server import PlexServer
|
100
|
+
try:
|
101
|
+
logger.info(f"Connecting to Plex server at {config.plex_url}...")
|
102
|
+
plex = PlexServer(config.plex_url, config.plex_token, timeout=config.plex_timeout, session=session)
|
103
|
+
logger.info("Successfully connected to Plex server")
|
104
|
+
return plex
|
105
|
+
except (requests.exceptions.ConnectionError, requests.exceptions.ConnectTimeout,
|
106
|
+
requests.exceptions.ReadTimeout, requests.exceptions.RequestException) as e:
|
107
|
+
logger.error(f"Failed to connect to Plex server at {config.plex_url}")
|
108
|
+
logger.error(f"Connection error: {e}")
|
109
|
+
logger.error("Please check:")
|
110
|
+
logger.error(" - Plex server is running and accessible")
|
111
|
+
logger.error(" - Plex URL is correct (including http:// or https://)")
|
112
|
+
logger.error(" - Network connectivity to Plex server")
|
113
|
+
logger.error(" - Firewall settings allow connections to port 32400")
|
114
|
+
raise ConnectionError(f"Unable to connect to Plex server at {config.plex_url}: {e}") from e
|
115
|
+
|
116
|
+
|
117
|
+
def filter_duplicate_locations(media_items):
|
118
|
+
"""
|
119
|
+
Filter out duplicate media items based on file locations.
|
120
|
+
|
121
|
+
This function prevents processing the same video file multiple times
|
122
|
+
when it appears in multiple episodes (common with multi-part episodes).
|
123
|
+
It keeps the first occurrence and skips subsequent duplicates.
|
124
|
+
|
125
|
+
Args:
|
126
|
+
media_items: List of tuples (key, locations, title, media_type)
|
127
|
+
|
128
|
+
Returns:
|
129
|
+
list: Filtered list of tuples (key, title, media_type) without duplicates
|
130
|
+
"""
|
131
|
+
seen_locations = set()
|
132
|
+
filtered_items = []
|
133
|
+
|
134
|
+
for key, locations, title, media_type in media_items:
|
135
|
+
# Check if any location has been seen before
|
136
|
+
if any(location in seen_locations for location in locations):
|
137
|
+
continue
|
138
|
+
|
139
|
+
# Add all locations to seen set and keep this item
|
140
|
+
seen_locations.update(locations)
|
141
|
+
filtered_items.append((key, title, media_type)) # Return tuple with key, title, and media_type
|
142
|
+
|
143
|
+
return filtered_items
|
144
|
+
|
145
|
+
|
146
|
+
def get_library_sections(plex, config: Config):
|
147
|
+
"""
|
148
|
+
Get all library sections from Plex server.
|
149
|
+
|
150
|
+
Args:
|
151
|
+
plex: Plex server instance
|
152
|
+
config: Configuration object
|
153
|
+
|
154
|
+
Yields:
|
155
|
+
tuple: (section, media_items) for each library
|
156
|
+
"""
|
157
|
+
import time
|
158
|
+
|
159
|
+
# Step 1: Get all library sections (1 API call)
|
160
|
+
logger.info("Getting all Plex library sections...")
|
161
|
+
start_time = time.time()
|
162
|
+
|
163
|
+
try:
|
164
|
+
sections = retry_plex_call(plex.library.sections)
|
165
|
+
except (requests.exceptions.RequestException, http.client.BadStatusLine, xml.etree.ElementTree.ParseError) as e:
|
166
|
+
logger.error(f"Failed to get Plex library sections after retries: {e}")
|
167
|
+
logger.error(f"Exception type: {type(e).__name__}")
|
168
|
+
logger.error("Cannot proceed without library access. Please check your Plex server status.")
|
169
|
+
return
|
170
|
+
|
171
|
+
sections_time = time.time() - start_time
|
172
|
+
logger.info(f"Retrieved {len(sections)} library sections in {sections_time:.2f} seconds")
|
173
|
+
|
174
|
+
# Step 2: Filter and process each library
|
175
|
+
for section in sections:
|
176
|
+
# Skip libraries that aren't in the PLEX_LIBRARIES list if it's not empty
|
177
|
+
if config.plex_libraries and section.title.lower() not in config.plex_libraries:
|
178
|
+
logger.info('Skipping library \'{}\' as it\'s not in the configured libraries list'.format(section.title))
|
179
|
+
continue
|
180
|
+
|
181
|
+
logger.info('Getting media files from library \'{}\'...'.format(section.title))
|
182
|
+
library_start_time = time.time()
|
183
|
+
|
184
|
+
try:
|
185
|
+
if section.METADATA_TYPE == 'episode':
|
186
|
+
# Get episodes with locations for duplicate filtering
|
187
|
+
search_results = retry_plex_call(section.search, libtype='episode')
|
188
|
+
media_with_locations = []
|
189
|
+
for m in search_results:
|
190
|
+
# Format episode title as "Show Title S01E01"
|
191
|
+
show_title = m.grandparentTitle
|
192
|
+
season_episode = m.seasonEpisode.upper()
|
193
|
+
formatted_title = f"{show_title} {season_episode}"
|
194
|
+
media_with_locations.append((m.key, m.locations, formatted_title, 'episode'))
|
195
|
+
# Filter out multi episode files based on file locations
|
196
|
+
media = filter_duplicate_locations(media_with_locations)
|
197
|
+
elif section.METADATA_TYPE == 'movie':
|
198
|
+
search_results = retry_plex_call(section.search)
|
199
|
+
media = [(m.key, m.title, 'movie') for m in search_results]
|
200
|
+
else:
|
201
|
+
logger.info('Skipping library {} as \'{}\' is unsupported'.format(section.title, section.METADATA_TYPE))
|
202
|
+
continue
|
203
|
+
except (requests.exceptions.RequestException, http.client.BadStatusLine, xml.etree.ElementTree.ParseError) as e:
|
204
|
+
logger.error(f"Failed to search library '{section.title}' after retries: {e}")
|
205
|
+
logger.error(f"Exception type: {type(e).__name__}")
|
206
|
+
logger.warning(f"Skipping library '{section.title}' due to error")
|
207
|
+
continue
|
208
|
+
|
209
|
+
library_time = time.time() - library_start_time
|
210
|
+
logger.info('Retrieved {} media files from library \'{}\' in {:.2f} seconds'.format(len(media), section.title, library_time))
|
211
|
+
yield section, media
|