TonieToolbox 0.3.0__py3-none-any.whl → 0.4.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -54,7 +54,7 @@ def check_identification_header(page):
54
54
  page: OggPage to check
55
55
 
56
56
  Raises:
57
- AssertionError: If the header is invalid or unsupported
57
+ RuntimeError: If the header is invalid or unsupported
58
58
  """
59
59
  segment = page.segments[0]
60
60
  unpacked = struct.unpack("<8sBBHLH", segment.data[0:18])
@@ -62,19 +62,19 @@ def check_identification_header(page):
62
62
 
63
63
  if unpacked[0] != b"OpusHead":
64
64
  logger.error("Invalid opus file: OpusHead signature not found")
65
- assert unpacked[0] == b"OpusHead", "Invalid opus file?"
65
+ raise RuntimeError("Invalid opus file: OpusHead signature not found")
66
66
 
67
67
  if unpacked[1] != 1:
68
68
  logger.error("Invalid opus file: Version mismatch")
69
- assert unpacked[1] == 1, "Invalid opus file?"
69
+ raise RuntimeError("Invalid opus file: Opus version mismatch")
70
70
 
71
71
  if unpacked[2] != 2:
72
72
  logger.error("Only stereo tracks are supported, found channel count: %d", unpacked[2])
73
- assert unpacked[2] == 2, "Only stereo tracks are supported"
73
+ raise RuntimeError(f"Only stereo tracks (2 channels) are supported. Found {unpacked[2]} channel(s). Please convert your audio to stereo format.")
74
74
 
75
75
  if unpacked[4] != SAMPLE_RATE_KHZ * 1000:
76
76
  logger.error("Sample rate needs to be 48 kHz, found: %d Hz", unpacked[4])
77
- assert unpacked[4] == SAMPLE_RATE_KHZ * 1000, "Sample rate needs to be 48 kHz"
77
+ raise RuntimeError(f"Sample rate needs to be 48 kHz. Found {unpacked[4]} Hz.")
78
78
 
79
79
  logger.debug("Opus identification header is valid")
80
80
 
@@ -223,21 +223,26 @@ def skip_first_two_pages(in_file):
223
223
  in_file: Input file handle
224
224
 
225
225
  Raises:
226
- RuntimeError: If OGG pages cannot be found
226
+ RuntimeError: If OGG pages cannot be found or are invalid
227
227
  """
228
228
  logger.debug("Skipping first two pages")
229
229
  found = OggPage.seek_to_page_header(in_file)
230
230
  if not found:
231
231
  logger.error("First OGG page not found in input file")
232
- raise RuntimeError("First ogg page not found")
232
+ raise RuntimeError("First OGG page not found in input file")
233
233
 
234
- page = OggPage(in_file)
235
- check_identification_header(page)
234
+ try:
235
+ page = OggPage(in_file)
236
+ check_identification_header(page)
237
+ except RuntimeError as e:
238
+ # The check_identification_header function already logs errors
239
+ # Just re-raise with the same message
240
+ raise RuntimeError(str(e))
236
241
 
237
242
  found = OggPage.seek_to_page_header(in_file)
238
243
  if not found:
239
244
  logger.error("Second OGG page not found in input file")
240
- raise RuntimeError("Second ogg page not found")
245
+ raise RuntimeError("Second OGG page not found in input file")
241
246
 
242
247
  OggPage(in_file)
243
248
  logger.debug("First two pages skipped successfully")
@@ -391,6 +396,11 @@ def create_tonie_file(output_file, input_files, no_tonie_header=False, user_time
391
396
  """
392
397
  from .audio_conversion import get_opus_tempfile
393
398
 
399
+ logger.trace("Entering create_tonie_file(output_file=%s, input_files=%s, no_tonie_header=%s, user_timestamp=%s, "
400
+ "bitrate=%d, vbr=%s, ffmpeg_binary=%s, opus_binary=%s, keep_temp=%s, auto_download=%s, use_custom_tags=%s)",
401
+ output_file, input_files, no_tonie_header, user_timestamp, bitrate, vbr, ffmpeg_binary,
402
+ opus_binary, keep_temp, auto_download, use_custom_tags)
403
+
394
404
  logger.info("Creating Tonie file from %d input files", len(input_files))
395
405
  logger.debug("Output file: %s, Bitrate: %d kbps, VBR: %s, No header: %s",
396
406
  output_file, bitrate, vbr, no_tonie_header)
@@ -473,6 +483,7 @@ def create_tonie_file(output_file, input_files, no_tonie_header=False, user_time
473
483
 
474
484
  logger.debug("Reading remaining pages from file")
475
485
  pages = read_all_remaining_pages(handle)
486
+ logger.debug("Read %d pages from file", len(pages))
476
487
 
477
488
  if template_page is None:
478
489
  template_page = OggPage.from_page(pages[0])
@@ -489,8 +500,10 @@ def create_tonie_file(output_file, input_files, no_tonie_header=False, user_time
489
500
  logger.debug("Resizing pages for track %d", index + 1)
490
501
  new_pages = resize_pages(pages, max_size, other_size, template_page,
491
502
  total_granule, next_page_no, last_track)
503
+ logger.debug("Resized to %d pages for track %d", len(new_pages), index + 1)
492
504
 
493
- for new_page in new_pages:
505
+ for i, new_page in enumerate(new_pages):
506
+ logger.trace("Writing page %d/%d (page number: %d)", i+1, len(new_pages), new_page.page_no)
494
507
  new_page.write_page(out_file, sha1)
495
508
 
496
509
  last_page = new_pages[len(new_pages) - 1]
@@ -498,11 +511,18 @@ def create_tonie_file(output_file, input_files, no_tonie_header=False, user_time
498
511
  next_page_no = last_page.page_no + 1
499
512
  logger.debug("Track %d processed, next page no: %d, total granule: %d",
500
513
  index + 1, next_page_no, total_granule)
514
+ except Exception as e:
515
+ logger.error("Error processing file %s: %s", fname, str(e))
516
+ raise
501
517
  finally:
502
518
  handle.close()
503
519
 
504
520
  if not no_tonie_header:
521
+ logger.debug("Writing Tonie header")
505
522
  fix_tonie_header(out_file, chapters, timestamp, sha1)
506
523
 
507
524
  if keep_temp and temp_files:
508
- logger.info("Kept %d temporary opus files in %s", len(temp_files), os.path.dirname(temp_files[0]))
525
+ logger.info("Kept %d temporary opus files in %s", len(temp_files), os.path.dirname(temp_files[0]))
526
+
527
+ logger.trace("Exiting create_tonie_file() successfully")
528
+ logger.info("Successfully created Tonie file: %s", output_file)
@@ -0,0 +1,502 @@
1
+ """
2
+ TonieToolbox module for handling the tonies.custom.json operations.
3
+
4
+ This module handles fetching, updating, and saving custom tonies JSON data,
5
+ which can be used to manage custom Tonies on TeddyCloud servers.
6
+ """
7
+
8
+ import os
9
+ import json
10
+ import time
11
+ import urllib.error
12
+ import ssl
13
+ import uuid
14
+ import locale
15
+ import re
16
+ from typing import Dict, Any, List, Optional, Tuple
17
+
18
+ from .logger import get_logger
19
+ from .media_tags import get_file_tags, extract_album_info
20
+ from .constants import LANGUAGE_MAPPING, GENRE_MAPPING
21
+ from .teddycloud import get_tonies_custom_json_from_server, put_tonies_custom_json_to_server
22
+
23
+ logger = get_logger('tonies_json')
24
+
25
+ class ToniesJsonHandler:
26
+ """Handler for tonies.custom.json operations."""
27
+
28
+ def __init__(self, teddycloud_url: Optional[str] = None, ignore_ssl_verify: bool = False):
29
+ """
30
+ Initialize the handler.
31
+
32
+ Args:
33
+ teddycloud_url: URL of the TeddyCloud instance (optional)
34
+ ignore_ssl_verify: If True, SSL certificate verification will be disabled
35
+ """
36
+ self.teddycloud_url = teddycloud_url.rstrip('/') if teddycloud_url else None
37
+ self.ignore_ssl_verify = ignore_ssl_verify
38
+ self.custom_json = []
39
+ self.is_loaded = False
40
+
41
+ def load_from_server(self) -> bool:
42
+ """
43
+ Load tonies.custom.json from the TeddyCloud server.
44
+
45
+ Returns:
46
+ True if successful, False otherwise
47
+ """
48
+ if not self.teddycloud_url:
49
+ logger.error("Cannot load from server: No TeddyCloud URL provided")
50
+ return False
51
+
52
+ try:
53
+ result = get_tonies_custom_json_from_server(self.teddycloud_url, self.ignore_ssl_verify)
54
+
55
+ if result is not None:
56
+ self.custom_json = result
57
+ self.is_loaded = True
58
+ logger.info("Successfully loaded tonies.custom.json with %d entries", len(self.custom_json))
59
+ return True
60
+ else:
61
+ logger.error("Failed to load tonies.custom.json from server")
62
+ return False
63
+
64
+ except Exception as e:
65
+ logger.error("Error loading tonies.custom.json: %s", e)
66
+ return False
67
+
68
+ def load_from_file(self, file_path: str) -> bool:
69
+ """
70
+ Load tonies.custom.json from a local file.
71
+
72
+ Args:
73
+ file_path: Path to the tonies.custom.json file
74
+
75
+ Returns:
76
+ True if successful, False otherwise
77
+ """
78
+ try:
79
+ if os.path.exists(file_path):
80
+ logger.info("Loading tonies.custom.json from file: %s", file_path)
81
+ with open(file_path, 'r', encoding='utf-8') as f:
82
+ data = json.load(f)
83
+ if isinstance(data, list):
84
+ self.custom_json = data
85
+ self.is_loaded = True
86
+ logger.info("Successfully loaded tonies.custom.json with %d entries", len(self.custom_json))
87
+ return True
88
+ else:
89
+ logger.error("Invalid tonies.custom.json format in file, expected list")
90
+ return False
91
+ else:
92
+ logger.info("tonies.custom.json file not found, starting with empty list")
93
+ self.custom_json = []
94
+ self.is_loaded = True
95
+ return True
96
+
97
+ except Exception as e:
98
+ logger.error("Error loading tonies.custom.json from file: %s", e)
99
+ return False
100
+
101
+ def save_to_server(self) -> bool:
102
+ """
103
+ Save tonies.custom.json to the TeddyCloud server.
104
+
105
+ Returns:
106
+ True if successful, False otherwise
107
+ """
108
+ if not self.teddycloud_url:
109
+ logger.error("Cannot save to server: No TeddyCloud URL provided")
110
+ return False
111
+
112
+ if not self.is_loaded:
113
+ logger.error("Cannot save tonies.custom.json: data not loaded")
114
+ return False
115
+
116
+ try:
117
+ result = put_tonies_custom_json_to_server(
118
+ self.teddycloud_url,
119
+ self.custom_json,
120
+ self.ignore_ssl_verify
121
+ )
122
+
123
+ if result:
124
+ logger.info("Successfully saved tonies.custom.json to server")
125
+ return True
126
+ else:
127
+ logger.error("Failed to save tonies.custom.json to server")
128
+ return False
129
+
130
+ except Exception as e:
131
+ logger.error("Error saving tonies.custom.json to server: %s", e)
132
+ return False
133
+
134
+ def save_to_file(self, file_path: str) -> bool:
135
+ """
136
+ Save tonies.custom.json to a local file.
137
+
138
+ Args:
139
+ file_path: Path where to save the tonies.custom.json file
140
+
141
+ Returns:
142
+ True if successful, False otherwise
143
+ """
144
+ if not self.is_loaded:
145
+ logger.error("Cannot save tonies.custom.json: data not loaded")
146
+ return False
147
+
148
+ try:
149
+ # Ensure the directory exists
150
+ os.makedirs(os.path.dirname(os.path.abspath(file_path)), exist_ok=True)
151
+
152
+ logger.info("Saving tonies.custom.json to file: %s", file_path)
153
+ with open(file_path, 'w', encoding='utf-8') as f:
154
+ json.dump(self.custom_json, f, indent=2, ensure_ascii=False)
155
+
156
+ logger.info("Successfully saved tonies.custom.json to file")
157
+ return True
158
+
159
+ except Exception as e:
160
+ logger.error("Error saving tonies.custom.json to file: %s", e)
161
+ return False
162
+
163
+ def add_entry_from_taf(self, taf_file: str, input_files: List[str], artwork_url: Optional[str] = None) -> bool:
164
+ """
165
+ Add an entry to the custom JSON from a TAF file.
166
+
167
+ Args:
168
+ taf_file: Path to the TAF file
169
+ input_files: List of input audio files used to create the TAF
170
+ artwork_url: URL of the uploaded artwork (if any)
171
+
172
+ Returns:
173
+ True if successful, False otherwise
174
+ """
175
+ logger.trace("Entering add_entry_from_taf() with taf_file=%s, input_files=%s, artwork_url=%s",
176
+ taf_file, input_files, artwork_url)
177
+
178
+ if not self.is_loaded:
179
+ logger.error("Cannot add entry: tonies.custom.json not loaded")
180
+ return False
181
+
182
+ try:
183
+ logger.info("Adding entry for %s to tonies.custom.json", taf_file)
184
+
185
+ logger.debug("Generating article ID")
186
+ article_id = self._generate_article_id()
187
+ logger.debug("Generated article ID: %s", article_id)
188
+
189
+ logger.debug("Extracting metadata from input files")
190
+ metadata = self._extract_metadata_from_files(input_files)
191
+ logger.debug("Extracted metadata: %s", metadata)
192
+
193
+ logger.debug("Creating JSON entry")
194
+ entry = self._create_json_entry(article_id, taf_file, metadata, input_files, artwork_url)
195
+ logger.debug("Created entry: %s", entry)
196
+
197
+ self.custom_json.append(entry)
198
+ logger.debug("Added entry to custom_json (new length: %d)", len(self.custom_json))
199
+
200
+ logger.info("Successfully added entry for %s", taf_file)
201
+ logger.trace("Exiting add_entry_from_taf() with success=True")
202
+ return True
203
+
204
+ except Exception as e:
205
+ logger.error("Error adding entry for %s: %s", taf_file, e)
206
+ logger.trace("Exiting add_entry_from_taf() with success=False due to exception: %s", str(e))
207
+ return False
208
+
209
+ def _generate_article_id(self) -> str:
210
+ """
211
+ Generate a unique article ID for a new entry.
212
+
213
+ Returns:
214
+ Unique article ID in the format "tt-42" followed by sequential number starting from 0
215
+ """
216
+ logger.trace("Entering _generate_article_id()")
217
+
218
+ # Find the highest sequential number for tt-42 IDs
219
+ highest_num = -1
220
+ pattern = re.compile(r'tt-42(\d+)')
221
+
222
+ logger.debug("Searching for highest tt-42 ID in %d existing entries", len(self.custom_json))
223
+ for entry in self.custom_json:
224
+ article = entry.get('article', '')
225
+ logger.trace("Checking article ID: %s", article)
226
+ match = pattern.match(article)
227
+ if match:
228
+ try:
229
+ num = int(match.group(1))
230
+ logger.trace("Found numeric part: %d", num)
231
+ highest_num = max(highest_num, num)
232
+ except (IndexError, ValueError) as e:
233
+ logger.trace("Failed to parse article ID: %s (%s)", article, str(e))
234
+ pass
235
+
236
+ logger.debug("Highest tt-42 ID number found: %d", highest_num)
237
+
238
+ # Generate the next sequential number
239
+ next_num = highest_num + 1
240
+
241
+ # Format the ID with leading zeros to make it 10 digits
242
+ result = f"tt-42{next_num:010d}"
243
+ logger.debug("Generated new article ID: %s", result)
244
+
245
+ logger.trace("Exiting _generate_article_id() with result=%s", result)
246
+ return result
247
+
248
+ def _extract_metadata_from_files(self, input_files: List[str]) -> Dict[str, Any]:
249
+ metadata = {}
250
+
251
+ # If there are multiple files in the same folder, use album info
252
+ if len(input_files) > 1 and os.path.dirname(input_files[0]) == os.path.dirname(input_files[-1]):
253
+ folder_path = os.path.dirname(input_files[0])
254
+ album_info = extract_album_info(folder_path)
255
+ metadata.update(album_info)
256
+
257
+ # For all files, collect tags to use for track descriptions
258
+ track_descriptions = []
259
+ for file_path in input_files:
260
+ tags = get_file_tags(file_path)
261
+ if 'title' in tags:
262
+ track_descriptions.append(tags['title'])
263
+ else:
264
+ # Use filename as fallback
265
+ filename = os.path.splitext(os.path.basename(file_path))[0]
266
+ track_descriptions.append(filename)
267
+
268
+ # Extract language and genre from the first file if not already present
269
+ if 'language' not in metadata and 'language' in tags:
270
+ metadata['language'] = tags['language']
271
+
272
+ if 'genre' not in metadata and 'genre' in tags:
273
+ metadata['genre'] = tags['genre']
274
+
275
+ metadata['track_descriptions'] = track_descriptions
276
+
277
+ return metadata
278
+
279
+ def _determine_language(self, metadata: Dict[str, Any]) -> str:
280
+ # Check for language tag in metadata
281
+ if 'language' in metadata:
282
+ lang_value = metadata['language'].lower().strip()
283
+ if lang_value in LANGUAGE_MAPPING:
284
+ return LANGUAGE_MAPPING[lang_value]
285
+
286
+ # If not found, try to use system locale
287
+ try:
288
+ system_lang, _ = locale.getdefaultlocale()
289
+ if system_lang:
290
+ lang_code = system_lang.split('_')[0].lower()
291
+ if lang_code in LANGUAGE_MAPPING:
292
+ return LANGUAGE_MAPPING[lang_code]
293
+ # Try to map system language code to tonie format
294
+ if lang_code == 'de':
295
+ return 'de-de'
296
+ elif lang_code == 'en':
297
+ return 'en-us'
298
+ elif lang_code == 'fr':
299
+ return 'fr-fr'
300
+ elif lang_code == 'it':
301
+ return 'it-it'
302
+ elif lang_code == 'es':
303
+ return 'es-es'
304
+ except Exception:
305
+ pass
306
+
307
+ # Default to German as it's most common for Tonies
308
+ return 'de-de'
309
+
310
+ def _determine_category(self, metadata: Dict[str, Any]) -> str:
311
+ # Check for genre tag in metadata
312
+ if 'genre' in metadata:
313
+ genre_value = metadata['genre'].lower().strip()
314
+
315
+ # Check for direct mapping
316
+ if genre_value in GENRE_MAPPING:
317
+ return GENRE_MAPPING[genre_value]
318
+
319
+ # Check for partial matching
320
+ for genre_key, category in GENRE_MAPPING.items():
321
+ if genre_key in genre_value:
322
+ return category
323
+
324
+ # Check for common keywords in the genre
325
+ if any(keyword in genre_value for keyword in ['musik', 'song', 'music', 'lied']):
326
+ return 'music'
327
+ elif any(keyword in genre_value for keyword in ['hörspiel', 'hörspiele', 'audio play']):
328
+ return 'Hörspiele & Hörbücher'
329
+ elif any(keyword in genre_value for keyword in ['hörbuch', 'audiobook', 'book']):
330
+ return 'Hörspiele & Hörbücher'
331
+ elif any(keyword in genre_value for keyword in ['märchen', 'fairy', 'tales']):
332
+ return 'Hörspiele & Hörbücher'
333
+ elif any(keyword in genre_value for keyword in ['wissen', 'knowledge', 'learn']):
334
+ return 'Wissen & Hörmagazine'
335
+ elif any(keyword in genre_value for keyword in ['schlaf', 'sleep', 'meditation']):
336
+ return 'Schlaflieder & Entspannung'
337
+
338
+ # Default to standard category for most custom content
339
+ return 'Hörspiele & Hörbücher'
340
+
341
+ def _estimate_age(self, metadata: Dict[str, Any]) -> int:
342
+ default_age = 3
343
+ if 'comment' in metadata:
344
+ comment = metadata['comment'].lower()
345
+ age_indicators = ['ab ', 'age ', 'alter ', 'Jahre']
346
+ for indicator in age_indicators:
347
+ if indicator in comment:
348
+ try:
349
+ idx = comment.index(indicator) + len(indicator)
350
+ age_str = ''.join(c for c in comment[idx:idx+2] if c.isdigit())
351
+ if age_str:
352
+ return int(age_str)
353
+ except (ValueError, IndexError):
354
+ pass
355
+ if 'genre' in metadata:
356
+ genre = metadata['genre'].lower()
357
+ if any(term in genre for term in ['kind', 'child', 'kids']):
358
+ return 3
359
+ if any(term in genre for term in ['jugend', 'teen', 'youth']):
360
+ return 10
361
+ if any(term in genre for term in ['erwachsen', 'adult']):
362
+ return 18
363
+
364
+ return default_age
365
+
366
+ def _create_json_entry(self, article_id: str, taf_file: str, metadata: Dict[str, Any],
367
+ input_files: List[str], artwork_url: Optional[str] = None) -> Dict[str, Any]:
368
+ # Calculate the size in bytes
369
+ taf_size = os.path.getsize(taf_file)
370
+
371
+ # Get current timestamp
372
+ timestamp = int(time.time())
373
+
374
+ # Create entry from metadata
375
+ series = metadata.get('albumartist', metadata.get('artist', 'Unknown Artist'))
376
+ episode = metadata.get('album', os.path.splitext(os.path.basename(taf_file))[0])
377
+ track_desc = metadata.get('track_descriptions', [])
378
+ language = self._determine_language(metadata)
379
+ category = self._determine_category(metadata)
380
+ age = self._estimate_age(metadata)
381
+
382
+ # Create a unique hash for the file
383
+ import hashlib
384
+ with open(taf_file, 'rb') as f:
385
+ taf_hash = hashlib.sha1(f.read()).hexdigest()
386
+
387
+ # Build the entry
388
+ entry = {
389
+ "article": article_id,
390
+ "data": [
391
+ {
392
+ "series": series,
393
+ "episode": episode,
394
+ "release": timestamp,
395
+ "language": language,
396
+ "category": category,
397
+ "runtime": 0, # Could calculate this with proper audio analysis
398
+ "age": age,
399
+ "origin": "custom",
400
+ "image": artwork_url if artwork_url else "",
401
+ "track-desc": track_desc,
402
+ "ids": [
403
+ {
404
+ "audio-id": timestamp,
405
+ "hash": taf_hash,
406
+ "size": taf_size,
407
+ "tracks": len(track_desc),
408
+ "confidence": 1
409
+ }
410
+ ]
411
+ }
412
+ ]
413
+ }
414
+
415
+ return entry
416
+
417
+
418
+ def fetch_and_update_tonies_json(teddycloud_url: Optional[str] = None, ignore_ssl_verify: bool = False,
419
+ taf_file: Optional[str] = None, input_files: Optional[List[str]] = None,
420
+ artwork_url: Optional[str] = None, output_dir: Optional[str] = None) -> bool:
421
+ """
422
+ Fetch tonies.custom.json from server and merge with local file if it exists, then update with new entry.
423
+
424
+ Args:
425
+ teddycloud_url: URL of the TeddyCloud instance (optional)
426
+ ignore_ssl_verify: If True, SSL certificate verification will be disabled
427
+ taf_file: Path to the TAF file to add
428
+ input_files: List of input audio files used to create the TAF
429
+ artwork_url: URL of the uploaded artwork (if any)
430
+ output_dir: Directory where to save the tonies.custom.json file (defaults to './output')
431
+
432
+ Returns:
433
+ True if successful, False otherwise
434
+ """
435
+ handler = ToniesJsonHandler(teddycloud_url, ignore_ssl_verify)
436
+
437
+ # Determine where to load from and save to
438
+ if not output_dir:
439
+ output_dir = './output'
440
+
441
+ # Ensure output directory exists
442
+ os.makedirs(output_dir, exist_ok=True)
443
+
444
+ # Create the full path for the JSON file
445
+ json_file_path = os.path.join(output_dir, 'tonies.custom.json')
446
+
447
+ loaded_from_server = False
448
+
449
+ # Step 1: Try to get live version from the server first
450
+ if teddycloud_url:
451
+ logger.info("Attempting to load tonies.custom.json from server")
452
+ loaded_from_server = handler.load_from_server()
453
+
454
+ # Step 2: If we have a local file, merge with the server content
455
+ if os.path.exists(json_file_path):
456
+ logger.info("Local tonies.custom.json file found, merging with server content")
457
+
458
+ # Create a temporary handler to load local content
459
+ local_handler = ToniesJsonHandler()
460
+ if local_handler.load_from_file(json_file_path):
461
+ if loaded_from_server:
462
+ # Merge local content with server content
463
+ # Use server-loaded content as base, then add any local entries not in server version
464
+ server_article_ids = {entry.get('article') for entry in handler.custom_json}
465
+ for local_entry in local_handler.custom_json:
466
+ local_article_id = local_entry.get('article')
467
+ if local_article_id not in server_article_ids:
468
+ logger.info(f"Adding local-only entry {local_article_id} to merged content")
469
+ handler.custom_json.append(local_entry)
470
+ else:
471
+ # Use local content as we couldn't load from server
472
+ handler.custom_json = local_handler.custom_json
473
+ handler.is_loaded = True
474
+ logger.info("Using local tonies.custom.json content")
475
+ elif not loaded_from_server:
476
+ # No server content and no local file, start with empty list
477
+ handler.custom_json = []
478
+ handler.is_loaded = True
479
+ logger.info("No tonies.custom.json found, starting with empty list")
480
+
481
+ # Add entry if needed
482
+ if taf_file and input_files and handler.is_loaded:
483
+ if not handler.add_entry_from_taf(taf_file, input_files, artwork_url):
484
+ logger.error("Failed to add entry to tonies.custom.json")
485
+ return False
486
+
487
+ # Save to file
488
+ if not handler.save_to_file(json_file_path):
489
+ logger.error("Failed to save tonies.custom.json to file")
490
+ return False
491
+
492
+ # Try to save to server if URL is provided
493
+ # For future use if the API enpoints are available
494
+ #if teddycloud_url and handler.is_loaded:
495
+ try:
496
+ if not handler.save_to_server():
497
+ logger.warning("Could not save tonies.custom.json to server")
498
+ except Exception as e:
499
+ logger.warning("Error when saving tonies.custom.json to server: %s", e)
500
+ # Don't fail the operation if server upload fails
501
+
502
+ return True