TonieToolbox 0.4.1__py3-none-any.whl → 0.5.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- TonieToolbox/__init__.py +1 -1
- TonieToolbox/__main__.py +269 -349
- TonieToolbox/artwork.py +105 -0
- TonieToolbox/audio_conversion.py +48 -5
- TonieToolbox/media_tags.py +5 -4
- TonieToolbox/recursive_processor.py +24 -19
- TonieToolbox/tags.py +74 -0
- TonieToolbox/teddycloud.py +250 -593
- TonieToolbox/tonie_analysis.py +173 -13
- TonieToolbox/tonie_file.py +17 -29
- TonieToolbox/tonies_json.py +1036 -170
- TonieToolbox/version_handler.py +26 -22
- {tonietoolbox-0.4.1.dist-info → tonietoolbox-0.5.0.dist-info}/METADATA +147 -99
- tonietoolbox-0.5.0.dist-info/RECORD +26 -0
- {tonietoolbox-0.4.1.dist-info → tonietoolbox-0.5.0.dist-info}/WHEEL +1 -1
- tonietoolbox-0.4.1.dist-info/RECORD +0 -24
- {tonietoolbox-0.4.1.dist-info → tonietoolbox-0.5.0.dist-info}/entry_points.txt +0 -0
- {tonietoolbox-0.4.1.dist-info → tonietoolbox-0.5.0.dist-info}/licenses/LICENSE.md +0 -0
- {tonietoolbox-0.4.1.dist-info → tonietoolbox-0.5.0.dist-info}/top_level.txt +0 -0
TonieToolbox/tonies_json.py
CHANGED
@@ -8,52 +8,53 @@ which can be used to manage custom Tonies on TeddyCloud servers.
|
|
8
8
|
import os
|
9
9
|
import json
|
10
10
|
import time
|
11
|
-
import urllib.error
|
12
|
-
import ssl
|
13
|
-
import uuid
|
14
11
|
import locale
|
15
12
|
import re
|
16
|
-
|
13
|
+
import hashlib
|
14
|
+
import mutagen
|
15
|
+
from typing import Dict, Any, List, Optional
|
17
16
|
|
18
17
|
from .logger import get_logger
|
19
18
|
from .media_tags import get_file_tags, extract_album_info
|
20
19
|
from .constants import LANGUAGE_MAPPING, GENRE_MAPPING
|
21
|
-
from .teddycloud import
|
20
|
+
from .teddycloud import TeddyCloudClient
|
22
21
|
|
23
22
|
logger = get_logger('tonies_json')
|
24
23
|
|
25
|
-
class
|
26
|
-
"""Handler for tonies.custom.json operations."""
|
24
|
+
class ToniesJsonHandlerv1:
|
25
|
+
"""Handler for tonies.custom.json operations using v1 format."""
|
27
26
|
|
28
|
-
def __init__(self,
|
27
|
+
def __init__(self, client: TeddyCloudClient = None):
|
29
28
|
"""
|
30
29
|
Initialize the handler.
|
31
30
|
|
32
31
|
Args:
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
self.teddycloud_url = teddycloud_url.rstrip('/') if teddycloud_url else None
|
37
|
-
self.ignore_ssl_verify = ignore_ssl_verify
|
32
|
+
client: TeddyCloudClient instance to use for API communication
|
33
|
+
"""
|
34
|
+
self.client = client
|
38
35
|
self.custom_json = []
|
39
36
|
self.is_loaded = False
|
40
|
-
|
37
|
+
|
41
38
|
def load_from_server(self) -> bool:
|
42
39
|
"""
|
43
40
|
Load tonies.custom.json from the TeddyCloud server.
|
44
41
|
|
45
42
|
Returns:
|
46
43
|
True if successful, False otherwise
|
47
|
-
"""
|
48
|
-
if
|
49
|
-
logger.error("Cannot load from server:
|
44
|
+
"""
|
45
|
+
if self.client is None:
|
46
|
+
logger.error("Cannot load from server: no client provided")
|
50
47
|
return False
|
51
48
|
|
52
49
|
try:
|
53
|
-
result =
|
54
|
-
|
50
|
+
result = self.client.get_tonies_custom_json()
|
55
51
|
if result is not None:
|
56
|
-
|
52
|
+
# Convert v2 format to v1 format if necessary
|
53
|
+
if len(result) > 0 and "data" in result[0]:
|
54
|
+
logger.debug("Converting v2 format from server to v1 format")
|
55
|
+
self.custom_json = self._convert_v2_to_v1(result)
|
56
|
+
else:
|
57
|
+
self.custom_json = result
|
57
58
|
self.is_loaded = True
|
58
59
|
logger.info("Successfully loaded tonies.custom.json with %d entries", len(self.custom_json))
|
59
60
|
return True
|
@@ -81,7 +82,12 @@ class ToniesJsonHandler:
|
|
81
82
|
with open(file_path, 'r', encoding='utf-8') as f:
|
82
83
|
data = json.load(f)
|
83
84
|
if isinstance(data, list):
|
84
|
-
|
85
|
+
# Convert v2 format to v1 format if necessary
|
86
|
+
if len(data) > 0 and "data" in data[0]:
|
87
|
+
logger.debug("Converting v2 format from file to v1 format")
|
88
|
+
self.custom_json = self._convert_v2_to_v1(data)
|
89
|
+
else:
|
90
|
+
self.custom_json = data
|
85
91
|
self.is_loaded = True
|
86
92
|
logger.info("Successfully loaded tonies.custom.json with %d entries", len(self.custom_json))
|
87
93
|
return True
|
@@ -98,37 +104,675 @@ class ToniesJsonHandler:
|
|
98
104
|
logger.error("Error loading tonies.custom.json from file: %s", e)
|
99
105
|
return False
|
100
106
|
|
101
|
-
def
|
107
|
+
def save_to_file(self, file_path: str) -> bool:
|
102
108
|
"""
|
103
|
-
Save tonies.custom.json to
|
109
|
+
Save tonies.custom.json to a local file.
|
104
110
|
|
111
|
+
Args:
|
112
|
+
file_path: Path where to save the tonies.custom.json file
|
113
|
+
|
105
114
|
Returns:
|
106
115
|
True if successful, False otherwise
|
107
116
|
"""
|
108
|
-
if not self.
|
109
|
-
logger.error("Cannot save
|
117
|
+
if not self.is_loaded:
|
118
|
+
logger.error("Cannot save tonies.custom.json: data not loaded")
|
119
|
+
return False
|
120
|
+
|
121
|
+
try:
|
122
|
+
os.makedirs(os.path.dirname(os.path.abspath(file_path)), exist_ok=True)
|
123
|
+
logger.info("Saving tonies.custom.json to file: %s", file_path)
|
124
|
+
with open(file_path, 'w', encoding='utf-8') as f:
|
125
|
+
json.dump(self.custom_json, f, indent=2, ensure_ascii=False)
|
126
|
+
|
127
|
+
logger.info("Successfully saved tonies.custom.json to file")
|
128
|
+
return True
|
129
|
+
|
130
|
+
except Exception as e:
|
131
|
+
logger.error("Error saving tonies.custom.json to file: %s", e)
|
110
132
|
return False
|
133
|
+
|
134
|
+
def renumber_series_entries(self, series: str):
|
135
|
+
"""
|
136
|
+
Re-sort and re-number all entries for a series by year (chronological),
|
137
|
+
with entries without a year coming last.
|
138
|
+
"""
|
139
|
+
# Collect all entries for the series
|
140
|
+
series_entries = [entry for entry in self.custom_json if entry.get('series') == series]
|
141
|
+
# Separate entries with and without year
|
142
|
+
with_year = []
|
143
|
+
without_year = []
|
144
|
+
for entry in series_entries:
|
145
|
+
year = self._extract_year_from_text(entry.get('title', ''))
|
146
|
+
if not year:
|
147
|
+
year = self._extract_year_from_text(entry.get('episodes', ''))
|
148
|
+
if year:
|
149
|
+
with_year.append((year, entry))
|
150
|
+
else:
|
151
|
+
without_year.append(entry)
|
152
|
+
# Sort entries with year
|
153
|
+
with_year.sort(key=lambda x: x[0])
|
154
|
+
# Assign new numbers
|
155
|
+
new_no = 1
|
156
|
+
for _, entry in with_year:
|
157
|
+
entry['no'] = str(new_no)
|
158
|
+
new_no += 1
|
159
|
+
for entry in without_year:
|
160
|
+
entry['no'] = str(new_no)
|
161
|
+
new_no += 1
|
162
|
+
|
163
|
+
def add_entry_from_taf(self, taf_file: str, input_files: List[str], artwork_url: Optional[str] = None) -> bool:
|
164
|
+
"""
|
165
|
+
Add an entry to the custom JSON from a TAF file.
|
166
|
+
If an entry with the same hash exists, it will be updated.
|
167
|
+
If an entry with the same series+episodes exists, the new hash will be added to it.
|
168
|
+
|
169
|
+
Args:
|
170
|
+
taf_file: Path to the TAF file
|
171
|
+
input_files: List of input audio files used to create the TAF
|
172
|
+
artwork_url: URL of the uploaded artwork (if any)
|
111
173
|
|
174
|
+
Returns:
|
175
|
+
True if successful, False otherwise
|
176
|
+
"""
|
177
|
+
logger.trace("Entering add_entry_from_taf() with taf_file=%s, input_files=%s, artwork_url=%s",
|
178
|
+
taf_file, input_files, artwork_url)
|
179
|
+
|
112
180
|
if not self.is_loaded:
|
113
|
-
logger.error("Cannot
|
181
|
+
logger.error("Cannot add entry: tonies.custom.json not loaded")
|
114
182
|
return False
|
183
|
+
|
184
|
+
try:
|
185
|
+
logger.info("Adding entry for %s to tonies.custom.json", taf_file)
|
186
|
+
logger.debug("Extracting metadata from input files")
|
187
|
+
metadata = self._extract_metadata_from_files(input_files)
|
188
|
+
logger.debug("Extracted metadata: %s", metadata)
|
189
|
+
with open(taf_file, 'rb') as f:
|
190
|
+
taf_hash = hashlib.sha1(f.read()).hexdigest().upper()
|
191
|
+
|
192
|
+
timestamp = str(int(time.time()))
|
193
|
+
series = metadata.get('albumartist', metadata.get('artist', 'Unknown Artist'))
|
194
|
+
episodes = metadata.get('album', os.path.splitext(os.path.basename(taf_file))[0])
|
195
|
+
copyright = metadata.get('copyright', '')
|
196
|
+
|
197
|
+
# Extract year from metadata or from episode title
|
198
|
+
year = None
|
199
|
+
year_str = metadata.get('year', metadata.get('date', None))
|
200
|
+
|
201
|
+
# Try to convert metadata year to int if it exists
|
202
|
+
if year_str:
|
203
|
+
try:
|
204
|
+
# Extract 4 digits if the date includes more information (e.g., "2022-05-01")
|
205
|
+
import re
|
206
|
+
year_match = re.search(r'(\d{4})', str(year_str))
|
207
|
+
if year_match:
|
208
|
+
year = int(year_match.group(1))
|
209
|
+
else:
|
210
|
+
# If year is just a number, try to format it properly
|
211
|
+
year_val = int(year_str)
|
212
|
+
if 0 <= year_val <= 99: # Assume 2-digit year format
|
213
|
+
if year_val <= 25: # Arbitrary cutoff for 20xx vs 19xx
|
214
|
+
year = 2000 + year_val
|
215
|
+
else:
|
216
|
+
year = 1900 + year_val
|
217
|
+
else:
|
218
|
+
year = year_val
|
219
|
+
except (ValueError, TypeError):
|
220
|
+
logger.debug("Could not convert metadata year '%s' to integer", year_str)
|
221
|
+
|
222
|
+
if not year:
|
223
|
+
year_from_episodes = self._extract_year_from_text(episodes)
|
224
|
+
year_from_copyright = self._extract_year_from_text(copyright)
|
225
|
+
if year_from_episodes:
|
226
|
+
year = year_from_episodes
|
227
|
+
else:
|
228
|
+
year = year_from_copyright
|
229
|
+
|
230
|
+
# Ensure year is in YYYY format
|
231
|
+
year_formatted = None
|
232
|
+
if year:
|
233
|
+
# Validate the year is in the reasonable range
|
234
|
+
if 1900 <= year <= 2099:
|
235
|
+
year_formatted = f"{year:04d}" # Format as 4 digits
|
236
|
+
logger.debug("Formatted year '%s' as '%s'", year, year_formatted)
|
237
|
+
else:
|
238
|
+
logger.warning("Year '%s' outside reasonable range (1900-2099), ignoring", year)
|
239
|
+
|
240
|
+
if year_formatted:
|
241
|
+
title = f"{series} - {year_formatted} - {episodes}"
|
242
|
+
else:
|
243
|
+
title = f"{series} - {episodes}"
|
244
|
+
|
245
|
+
tracks = metadata.get('track_descriptions', [])
|
246
|
+
language = self._determine_language(metadata)
|
247
|
+
category = self._determine_category_v1(metadata)
|
248
|
+
|
249
|
+
existing_entry, entry_idx = self.find_entry_by_hash(taf_hash)
|
250
|
+
if existing_entry:
|
251
|
+
logger.info("Found existing entry with the same hash, updating it")
|
252
|
+
if artwork_url and artwork_url != existing_entry.get('pic', ''):
|
253
|
+
logger.debug("Updating artwork URL")
|
254
|
+
existing_entry['pic'] = artwork_url
|
255
|
+
if tracks and tracks != existing_entry.get('tracks', []):
|
256
|
+
logger.debug("Updating track descriptions")
|
257
|
+
existing_entry['tracks'] = tracks
|
258
|
+
if episodes and episodes != existing_entry.get('episodes', ''):
|
259
|
+
logger.debug("Updating episodes")
|
260
|
+
existing_entry['episodes'] = episodes
|
261
|
+
if series and series != existing_entry.get('series', ''):
|
262
|
+
logger.debug("Updating series")
|
263
|
+
existing_entry['series'] = series
|
264
|
+
logger.info("Successfully updated existing entry for %s", taf_file)
|
265
|
+
self.renumber_series_entries(series)
|
266
|
+
return True
|
267
|
+
|
268
|
+
existing_entry, entry_idx = self.find_entry_by_series_episodes(series, episodes)
|
269
|
+
if existing_entry:
|
270
|
+
logger.info("Found existing entry with the same series/episodes, adding hash to it")
|
271
|
+
if 'audio_id' not in existing_entry:
|
272
|
+
existing_entry['audio_id'] = []
|
273
|
+
if 'hash' not in existing_entry:
|
274
|
+
existing_entry['hash'] = []
|
275
|
+
|
276
|
+
existing_entry['audio_id'].append(timestamp)
|
277
|
+
existing_entry['hash'].append(taf_hash)
|
278
|
+
|
279
|
+
if artwork_url and artwork_url != existing_entry.get('pic', ''):
|
280
|
+
logger.debug("Updating artwork URL")
|
281
|
+
existing_entry['pic'] = artwork_url
|
282
|
+
|
283
|
+
logger.info("Successfully added new hash to existing entry for %s", taf_file)
|
284
|
+
self.renumber_series_entries(series)
|
285
|
+
return True
|
286
|
+
|
287
|
+
logger.debug("No existing entry found, creating new entry")
|
288
|
+
|
289
|
+
logger.debug("Generating entry number")
|
290
|
+
entry_no = self._generate_entry_no(series, episodes, year)
|
291
|
+
logger.debug("Generated entry number: %s", entry_no)
|
292
|
+
|
293
|
+
logger.debug("Generating model number")
|
294
|
+
model_number = self._generate_model_number()
|
295
|
+
logger.debug("Generated model number: %s", model_number)
|
296
|
+
|
297
|
+
entry = {
|
298
|
+
"no": entry_no,
|
299
|
+
"model": model_number,
|
300
|
+
"audio_id": [timestamp],
|
301
|
+
"hash": [taf_hash],
|
302
|
+
"title": title,
|
303
|
+
"series": series,
|
304
|
+
"episodes": episodes,
|
305
|
+
"tracks": tracks,
|
306
|
+
"release": timestamp,
|
307
|
+
"language": language,
|
308
|
+
"category": category,
|
309
|
+
"pic": artwork_url if artwork_url else ""
|
310
|
+
}
|
311
|
+
|
312
|
+
self.custom_json.append(entry)
|
313
|
+
logger.debug("Added entry to custom_json (new length: %d)", len(self.custom_json))
|
314
|
+
|
315
|
+
logger.info("Successfully added entry for %s", taf_file)
|
316
|
+
self.renumber_series_entries(series)
|
317
|
+
logger.trace("Exiting add_entry_from_taf() with success=True")
|
318
|
+
return True
|
319
|
+
|
320
|
+
except Exception as e:
|
321
|
+
logger.error("Error adding entry for %s: %s", taf_file, e)
|
322
|
+
logger.trace("Exiting add_entry_from_taf() with success=False due to exception: %s", str(e))
|
323
|
+
return False
|
324
|
+
|
325
|
+
def _generate_entry_no(self, series: str, episodes: str, year: Optional[int] = None) -> str:
|
326
|
+
"""
|
327
|
+
Generate an entry number based on specific rules:
|
328
|
+
1. For series entries with years: assign numbers in chronological order (1, 2, 3, etc.)
|
329
|
+
2. For entries without years: assign the next available number after those with years
|
330
|
+
|
331
|
+
Args:
|
332
|
+
series: Series name
|
333
|
+
episodes: Episodes name
|
334
|
+
year: Release year from metadata, if available
|
335
|
+
|
336
|
+
Returns:
|
337
|
+
Generated entry number as string
|
338
|
+
"""
|
339
|
+
logger.trace("Entering _generate_entry_no() with series='%s', episodes='%s', year=%s",
|
340
|
+
series, episodes, year)
|
341
|
+
|
342
|
+
# If we don't have a series name, use a simple approach to get the next number
|
343
|
+
if not series:
|
344
|
+
max_no = 0
|
345
|
+
for entry in self.custom_json:
|
346
|
+
try:
|
347
|
+
no_value = int(entry.get('no', '0'))
|
348
|
+
max_no = max(max_no, no_value)
|
349
|
+
except (ValueError, TypeError):
|
350
|
+
pass
|
351
|
+
return str(max_no + 1)
|
352
|
+
|
353
|
+
logger.debug("Generating entry number for series '%s'", series)
|
354
|
+
|
355
|
+
# Step 1: Collect all existing entries for this series and extract their years
|
356
|
+
series_entries = []
|
357
|
+
used_numbers = set()
|
358
|
+
|
359
|
+
for entry in self.custom_json:
|
360
|
+
entry_series = entry.get('series', '')
|
361
|
+
if entry_series == series:
|
362
|
+
entry_no = entry.get('no', '')
|
363
|
+
try:
|
364
|
+
entry_no_int = int(entry_no)
|
365
|
+
used_numbers.add(entry_no_int)
|
366
|
+
except (ValueError, TypeError):
|
367
|
+
pass
|
368
|
+
|
369
|
+
entry_title = entry.get('title', '')
|
370
|
+
entry_episodes = entry.get('episodes', '')
|
371
|
+
|
372
|
+
# Extract year from title and episodes
|
373
|
+
entry_year = self._extract_year_from_text(entry_title)
|
374
|
+
if not entry_year:
|
375
|
+
entry_year = self._extract_year_from_text(entry_episodes)
|
376
|
+
|
377
|
+
series_entries.append({
|
378
|
+
'no': entry_no,
|
379
|
+
'title': entry_title,
|
380
|
+
'episodes': entry_episodes,
|
381
|
+
'year': entry_year
|
382
|
+
})
|
383
|
+
|
384
|
+
# Try to extract year from episodes if not explicitly provided
|
385
|
+
if not year:
|
386
|
+
extracted_year = self._extract_year_from_text(episodes)
|
387
|
+
if extracted_year:
|
388
|
+
year = extracted_year
|
389
|
+
logger.debug("Extracted year %d from episodes '%s'", year, episodes)
|
390
|
+
|
391
|
+
# Step 2: Split entries into those with years and those without
|
392
|
+
entries_with_years = [e for e in series_entries if e['year'] is not None]
|
393
|
+
entries_without_years = [e for e in series_entries if e['year'] is None]
|
394
|
+
|
395
|
+
# Sort entries with years by year (oldest first)
|
396
|
+
entries_with_years.sort(key=lambda x: x['year'])
|
397
|
+
|
398
|
+
logger.debug("Found %d entries with years and %d entries without years",
|
399
|
+
len(entries_with_years), len(entries_without_years))
|
400
|
+
|
401
|
+
# Step 3: If this entry has a year, determine where it should be inserted
|
402
|
+
if year:
|
403
|
+
# Find position based on chronological order
|
404
|
+
insertion_index = 0
|
405
|
+
while insertion_index < len(entries_with_years) and entries_with_years[insertion_index]['year'] < year:
|
406
|
+
insertion_index += 1
|
115
407
|
|
408
|
+
# Resulting position is 1-indexed
|
409
|
+
position = insertion_index + 1
|
410
|
+
logger.debug("For year %d, calculated position %d based on chronological order", year, position)
|
411
|
+
|
412
|
+
# Now adjust position if needed to avoid conflicts with existing entries
|
413
|
+
while position in used_numbers:
|
414
|
+
position += 1
|
415
|
+
logger.debug("Position %d already used, incrementing to %d", position-1, position)
|
416
|
+
|
417
|
+
logger.debug("Final assigned entry number: %d", position)
|
418
|
+
return str(position)
|
419
|
+
else:
|
420
|
+
# Step 4: If this entry has no year, it should come after all entries with years
|
421
|
+
# Find the highest number used by entries with years
|
422
|
+
years_highest_no = 0
|
423
|
+
if entries_with_years:
|
424
|
+
for i, entry in enumerate(entries_with_years):
|
425
|
+
try:
|
426
|
+
expected_no = i + 1 # 1-indexed
|
427
|
+
actual_no = int(entry['no'])
|
428
|
+
years_highest_no = max(years_highest_no, actual_no)
|
429
|
+
except (ValueError, TypeError):
|
430
|
+
pass
|
431
|
+
|
432
|
+
# Find the highest number used overall
|
433
|
+
highest_no = max(used_numbers) if used_numbers else 0
|
434
|
+
|
435
|
+
# Next number should be at least one more than the highest from entries with years
|
436
|
+
next_no = max(years_highest_no, highest_no) + 1
|
437
|
+
|
438
|
+
logger.debug("No year available, assigned next number: %d", next_no)
|
439
|
+
return str(next_no)
|
440
|
+
|
441
|
+
def _extract_year_from_text(self, text: str) -> Optional[int]:
|
442
|
+
"""
|
443
|
+
Extract a year (1900-2099) from text.
|
444
|
+
|
445
|
+
Args:
|
446
|
+
text: The text to extract the year from
|
447
|
+
|
448
|
+
Returns:
|
449
|
+
The extracted year as int, or None if no valid year found
|
450
|
+
"""
|
451
|
+
import re
|
452
|
+
year_pattern = re.compile(r'(19\d{2}|20\d{2})')
|
453
|
+
year_match = year_pattern.search(text)
|
454
|
+
|
455
|
+
if year_match:
|
456
|
+
try:
|
457
|
+
extracted_year = int(year_match.group(1))
|
458
|
+
if 1900 <= extracted_year <= 2099:
|
459
|
+
return extracted_year
|
460
|
+
except (ValueError, TypeError):
|
461
|
+
pass
|
462
|
+
|
463
|
+
return None
|
464
|
+
|
465
|
+
def _format_number(self, number: int, existing_entries: List[Dict[str, Any]]) -> str:
|
466
|
+
"""
|
467
|
+
Format a number to match the existing entry number format (e.g., with leading zeros).
|
468
|
+
|
469
|
+
Args:
|
470
|
+
number: The number to format
|
471
|
+
existing_entries: List of existing entries with their numbers
|
472
|
+
|
473
|
+
Returns:
|
474
|
+
Formatted number as string
|
475
|
+
"""
|
476
|
+
max_digits = 1
|
477
|
+
for entry in existing_entries:
|
478
|
+
entry_no = entry.get('no', '')
|
479
|
+
if entry_no and isinstance(entry_no, str) and entry_no.isdigit():
|
480
|
+
leading_zeros = len(entry_no) - len(entry_no.lstrip('0'))
|
481
|
+
if leading_zeros > 0:
|
482
|
+
digits = len(entry_no)
|
483
|
+
max_digits = max(max_digits, digits)
|
484
|
+
if max_digits > 1:
|
485
|
+
logger.trace("Formatting with %d digits", max_digits)
|
486
|
+
return f"{number:0{max_digits}d}"
|
487
|
+
|
488
|
+
return str(number)
|
489
|
+
|
490
|
+
def _generate_model_number(self) -> str:
|
491
|
+
"""
|
492
|
+
Generate a unique model number for a new entry.
|
493
|
+
|
494
|
+
Returns:
|
495
|
+
Unique model number in the format "model-" followed by sequential number with zero padding
|
496
|
+
"""
|
497
|
+
logger.trace("Entering _generate_model_number()")
|
498
|
+
highest_num = -1
|
499
|
+
pattern = re.compile(r'tt-42(\d+)')
|
500
|
+
|
501
|
+
logger.debug("Searching for highest tt-42 ID in %d existing entries", len(self.custom_json))
|
502
|
+
for entry in self.custom_json:
|
503
|
+
model = entry.get('model', '')
|
504
|
+
logger.trace("Checking model ID: %s", model)
|
505
|
+
match = pattern.match(model)
|
506
|
+
if match:
|
507
|
+
try:
|
508
|
+
num = int(match.group(1))
|
509
|
+
logger.trace("Found numeric part: %d", num)
|
510
|
+
highest_num = max(highest_num, num)
|
511
|
+
except (IndexError, ValueError) as e:
|
512
|
+
logger.trace("Failed to parse model ID: %s (%s)", model, str(e))
|
513
|
+
pass
|
514
|
+
|
515
|
+
logger.debug("Highest tt-42 ID number found: %d", highest_num)
|
516
|
+
next_num = highest_num + 1
|
517
|
+
result = f"tt-42{next_num:010d}"
|
518
|
+
logger.debug("Generated new model ID: %s", result)
|
519
|
+
|
520
|
+
logger.trace("Exiting _generate_model_number() with result=%s", result)
|
521
|
+
return result
|
522
|
+
|
523
|
+
def _determine_category_v1(self, metadata: Dict[str, Any]) -> str:
|
524
|
+
"""
|
525
|
+
Determine the category in v1 format.
|
526
|
+
|
527
|
+
Args:
|
528
|
+
metadata: Dictionary containing file metadata
|
529
|
+
|
530
|
+
Returns:
|
531
|
+
Category string in v1 format
|
532
|
+
"""
|
533
|
+
if 'genre' in metadata:
|
534
|
+
genre_value = metadata['genre'].lower().strip()
|
535
|
+
|
536
|
+
if any(keyword in genre_value for keyword in ['musik', 'song', 'music', 'lied']):
|
537
|
+
return "music"
|
538
|
+
elif any(keyword in genre_value for keyword in ['hörspiel', 'audio play', 'hörbuch', 'audiobook']):
|
539
|
+
return "audio-play"
|
540
|
+
elif any(keyword in genre_value for keyword in ['märchen', 'fairy', 'tales']):
|
541
|
+
return "fairy-tale"
|
542
|
+
elif any(keyword in genre_value for keyword in ['wissen', 'knowledge', 'learn']):
|
543
|
+
return "knowledge"
|
544
|
+
elif any(keyword in genre_value for keyword in ['schlaf', 'sleep', 'meditation']):
|
545
|
+
return "sleep"
|
546
|
+
|
547
|
+
return "audio-play"
|
548
|
+
|
549
|
+
def find_entry_by_hash(self, taf_hash: str) -> tuple[Optional[Dict[str, Any]], Optional[int]]:
|
550
|
+
"""
|
551
|
+
Find an entry in the custom JSON by TAF hash.
|
552
|
+
|
553
|
+
Args:
|
554
|
+
taf_hash: SHA1 hash of the TAF file to find
|
555
|
+
|
556
|
+
Returns:
|
557
|
+
Tuple of (entry, entry_index) if found, or (None, None) if not found
|
558
|
+
"""
|
559
|
+
logger.trace("Searching for entry with hash %s", taf_hash)
|
560
|
+
|
561
|
+
for entry_idx, entry in enumerate(self.custom_json):
|
562
|
+
if 'hash' not in entry:
|
563
|
+
continue
|
564
|
+
|
565
|
+
for hash_value in entry['hash']:
|
566
|
+
if hash_value == taf_hash:
|
567
|
+
logger.debug("Found existing entry with matching hash %s", taf_hash)
|
568
|
+
return entry, entry_idx
|
569
|
+
|
570
|
+
logger.debug("No entry found with hash %s", taf_hash)
|
571
|
+
return None, None
|
572
|
+
|
573
|
+
def find_entry_by_series_episodes(self, series: str, episodes: str) -> tuple[Optional[Dict[str, Any]], Optional[int]]:
|
574
|
+
"""
|
575
|
+
Find an entry in the custom JSON by series and episodes.
|
576
|
+
|
577
|
+
Args:
|
578
|
+
series: Series name to find
|
579
|
+
episodes: Episodes name to find
|
580
|
+
|
581
|
+
Returns:
|
582
|
+
Tuple of (entry, entry_index) if found, or (None, None) if not found
|
583
|
+
"""
|
584
|
+
logger.trace("Searching for entry with series='%s', episodes='%s'", series, episodes)
|
585
|
+
|
586
|
+
for entry_idx, entry in enumerate(self.custom_json):
|
587
|
+
if entry.get('series') == series and entry.get('episodes') == episodes:
|
588
|
+
logger.debug("Found existing entry with matching series/episodes: %s / %s", series, episodes)
|
589
|
+
return entry, entry_idx
|
590
|
+
|
591
|
+
logger.debug("No entry found with series/episodes: %s / %s", series, episodes)
|
592
|
+
return None, None
|
593
|
+
|
594
|
+
def _extract_metadata_from_files(self, input_files: List[str]) -> Dict[str, Any]:
|
595
|
+
"""
|
596
|
+
Extract metadata from audio files to use in the custom JSON entry.
|
597
|
+
|
598
|
+
Args:
|
599
|
+
input_files: List of paths to audio files
|
600
|
+
|
601
|
+
Returns:
|
602
|
+
Dictionary containing metadata extracted from files
|
603
|
+
"""
|
604
|
+
metadata = {}
|
605
|
+
track_descriptions = []
|
606
|
+
for file_path in input_files:
|
607
|
+
tags = get_file_tags(file_path)
|
608
|
+
if 'title' in tags:
|
609
|
+
track_descriptions.append(tags['title'])
|
610
|
+
else:
|
611
|
+
filename = os.path.splitext(os.path.basename(file_path))[0]
|
612
|
+
track_descriptions.append(filename)
|
613
|
+
for tag_name, tag_value in tags.items():
|
614
|
+
if tag_name not in metadata:
|
615
|
+
metadata[tag_name] = tag_value
|
616
|
+
|
617
|
+
metadata['track_descriptions'] = track_descriptions
|
618
|
+
|
619
|
+
return metadata
|
620
|
+
|
621
|
+
def _determine_language(self, metadata: Dict[str, Any]) -> str:
|
622
|
+
if 'language' in metadata:
|
623
|
+
lang_value = metadata['language'].lower().strip()
|
624
|
+
if lang_value in LANGUAGE_MAPPING:
|
625
|
+
return LANGUAGE_MAPPING[lang_value]
|
116
626
|
try:
|
117
|
-
|
118
|
-
|
119
|
-
|
120
|
-
|
121
|
-
|
122
|
-
|
123
|
-
|
124
|
-
|
627
|
+
system_lang, _ = locale.getdefaultlocale()
|
628
|
+
if system_lang:
|
629
|
+
lang_code = system_lang.split('_')[0].lower()
|
630
|
+
if lang_code in LANGUAGE_MAPPING:
|
631
|
+
return LANGUAGE_MAPPING[lang_code]
|
632
|
+
except Exception:
|
633
|
+
pass
|
634
|
+
return 'de-de'
|
635
|
+
|
636
|
+
def _convert_v2_to_v1(self, v2_data: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
|
637
|
+
"""
|
638
|
+
Convert data from v2 format to v1 format.
|
639
|
+
|
640
|
+
Args:
|
641
|
+
v2_data: Data in v2 format
|
642
|
+
|
643
|
+
Returns:
|
644
|
+
Converted data in v1 format
|
645
|
+
"""
|
646
|
+
v1_data = []
|
647
|
+
|
648
|
+
entry_no = 0
|
649
|
+
for v2_entry in v2_data:
|
650
|
+
if 'data' not in v2_entry:
|
651
|
+
continue
|
652
|
+
|
653
|
+
for v2_data_item in v2_entry['data']:
|
654
|
+
series = v2_data_item.get('series', '')
|
655
|
+
episodes = v2_data_item.get('episode', '')
|
656
|
+
model = v2_data_item.get('article', '')
|
657
|
+
title = f"{series} - {episodes}" if series and episodes else episodes
|
658
|
+
|
659
|
+
v1_entry = {
|
660
|
+
"no": str(entry_no),
|
661
|
+
"model": model,
|
662
|
+
"audio_id": [],
|
663
|
+
"hash": [],
|
664
|
+
"title": title,
|
665
|
+
"series": series,
|
666
|
+
"episodes": episodes,
|
667
|
+
"tracks": v2_data_item.get('track-desc', []),
|
668
|
+
"release": str(v2_data_item.get('release', int(time.time()))),
|
669
|
+
"language": v2_data_item.get('language', 'de-de'),
|
670
|
+
"category": self._convert_category_v2_to_v1(v2_data_item.get('category', '')),
|
671
|
+
"pic": v2_data_item.get('image', '')
|
672
|
+
}
|
673
|
+
if 'ids' in v2_data_item:
|
674
|
+
for id_entry in v2_data_item['ids']:
|
675
|
+
if 'audio-id' in id_entry:
|
676
|
+
v1_entry['audio_id'].append(str(id_entry['audio-id']))
|
677
|
+
if 'hash' in id_entry:
|
678
|
+
v1_entry['hash'].append(id_entry['hash'].upper())
|
679
|
+
|
680
|
+
v1_data.append(v1_entry)
|
681
|
+
entry_no += 1
|
682
|
+
|
683
|
+
return v1_data
|
684
|
+
|
685
|
+
def _convert_category_v2_to_v1(self, v2_category: str) -> str:
|
686
|
+
"""
|
687
|
+
Convert category from v2 format to v1 format.
|
688
|
+
|
689
|
+
Args:
|
690
|
+
v2_category: Category in v2 format
|
691
|
+
|
692
|
+
Returns:
|
693
|
+
Category in v1 format
|
694
|
+
"""
|
695
|
+
v2_to_v1_mapping = {
|
696
|
+
"music": "music",
|
697
|
+
"Hörspiele & Hörbücher": "audio-play",
|
698
|
+
"Schlaflieder & Entspannung": "sleep",
|
699
|
+
"Wissen & Hörmagazine": "knowledge",
|
700
|
+
"Märchen": "fairy-tale"
|
701
|
+
}
|
702
|
+
|
703
|
+
return v2_to_v1_mapping.get(v2_category, "audio-play")
|
704
|
+
|
705
|
+
class ToniesJsonHandlerv2:
|
706
|
+
"""Handler for tonies.custom.json operations."""
|
707
|
+
|
708
|
+
def __init__(self, client: TeddyCloudClient = None):
|
709
|
+
"""
|
710
|
+
Initialize the handler.
|
711
|
+
|
712
|
+
Args:
|
713
|
+
client: TeddyCloudClient instance to use for API communication
|
714
|
+
"""
|
715
|
+
self.client = client
|
716
|
+
self.custom_json = []
|
717
|
+
self.is_loaded = False
|
718
|
+
|
719
|
+
def load_from_server(self) -> bool:
|
720
|
+
"""
|
721
|
+
Load tonies.custom.json from the TeddyCloud server.
|
722
|
+
|
723
|
+
Returns:
|
724
|
+
True if successful, False otherwise
|
725
|
+
"""
|
726
|
+
if self.client is None:
|
727
|
+
logger.error("Cannot load from server: no client provided")
|
728
|
+
return False
|
729
|
+
|
730
|
+
try:
|
731
|
+
result = self.client.get_tonies_custom_json()
|
732
|
+
if result is not None:
|
733
|
+
self.custom_json = result
|
734
|
+
self.is_loaded = True
|
735
|
+
logger.info("Successfully loaded tonies.custom.json with %d entries", len(self.custom_json))
|
125
736
|
return True
|
126
737
|
else:
|
127
|
-
logger.error("Failed to
|
738
|
+
logger.error("Failed to load tonies.custom.json from server")
|
128
739
|
return False
|
129
740
|
|
130
741
|
except Exception as e:
|
131
|
-
logger.error("Error
|
742
|
+
logger.error("Error loading tonies.custom.json: %s", e)
|
743
|
+
return False
|
744
|
+
|
745
|
+
def load_from_file(self, file_path: str) -> bool:
|
746
|
+
"""
|
747
|
+
Load tonies.custom.json from a local file.
|
748
|
+
|
749
|
+
Args:
|
750
|
+
file_path: Path to the tonies.custom.json file
|
751
|
+
|
752
|
+
Returns:
|
753
|
+
True if successful, False otherwise
|
754
|
+
"""
|
755
|
+
try:
|
756
|
+
if os.path.exists(file_path):
|
757
|
+
logger.info("Loading tonies.custom.json from file: %s", file_path)
|
758
|
+
with open(file_path, 'r', encoding='utf-8') as f:
|
759
|
+
data = json.load(f)
|
760
|
+
if isinstance(data, list):
|
761
|
+
self.custom_json = data
|
762
|
+
self.is_loaded = True
|
763
|
+
logger.info("Successfully loaded tonies.custom.json with %d entries", len(self.custom_json))
|
764
|
+
return True
|
765
|
+
else:
|
766
|
+
logger.error("Invalid tonies.custom.json format in file, expected list")
|
767
|
+
return False
|
768
|
+
else:
|
769
|
+
logger.info("tonies.custom.json file not found, starting with empty list")
|
770
|
+
self.custom_json = []
|
771
|
+
self.is_loaded = True
|
772
|
+
return True
|
773
|
+
|
774
|
+
except Exception as e:
|
775
|
+
logger.error("Error loading tonies.custom.json from file: %s", e)
|
132
776
|
return False
|
133
777
|
|
134
778
|
def save_to_file(self, file_path: str) -> bool:
|
@@ -146,9 +790,7 @@ class ToniesJsonHandler:
|
|
146
790
|
return False
|
147
791
|
|
148
792
|
try:
|
149
|
-
# Ensure the directory exists
|
150
793
|
os.makedirs(os.path.dirname(os.path.abspath(file_path)), exist_ok=True)
|
151
|
-
|
152
794
|
logger.info("Saving tonies.custom.json to file: %s", file_path)
|
153
795
|
with open(file_path, 'w', encoding='utf-8') as f:
|
154
796
|
json.dump(self.custom_json, f, indent=2, ensure_ascii=False)
|
@@ -163,6 +805,8 @@ class ToniesJsonHandler:
|
|
163
805
|
def add_entry_from_taf(self, taf_file: str, input_files: List[str], artwork_url: Optional[str] = None) -> bool:
|
164
806
|
"""
|
165
807
|
Add an entry to the custom JSON from a TAF file.
|
808
|
+
If an entry with the same hash exists, it will be updated.
|
809
|
+
If an entry with the same series+episode exists, the new hash will be added to it.
|
166
810
|
|
167
811
|
Args:
|
168
812
|
taf_file: Path to the TAF file
|
@@ -182,17 +826,77 @@ class ToniesJsonHandler:
|
|
182
826
|
try:
|
183
827
|
logger.info("Adding entry for %s to tonies.custom.json", taf_file)
|
184
828
|
|
185
|
-
logger.debug("Generating article ID")
|
186
|
-
article_id = self._generate_article_id()
|
187
|
-
logger.debug("Generated article ID: %s", article_id)
|
188
|
-
|
189
829
|
logger.debug("Extracting metadata from input files")
|
190
830
|
metadata = self._extract_metadata_from_files(input_files)
|
191
831
|
logger.debug("Extracted metadata: %s", metadata)
|
832
|
+
with open(taf_file, 'rb') as f:
|
833
|
+
taf_hash = hashlib.sha1(f.read()).hexdigest()
|
192
834
|
|
193
|
-
|
194
|
-
|
195
|
-
|
835
|
+
taf_size = os.path.getsize(taf_file)
|
836
|
+
timestamp = int(time.time())
|
837
|
+
series = metadata.get('albumartist', metadata.get('artist', 'Unknown Artist'))
|
838
|
+
episode = metadata.get('album', os.path.splitext(os.path.basename(taf_file))[0])
|
839
|
+
track_desc = metadata.get('track_descriptions', [])
|
840
|
+
language = self._determine_language(metadata)
|
841
|
+
category = self._determine_category(metadata)
|
842
|
+
age = self._estimate_age(metadata)
|
843
|
+
new_id_entry = {
|
844
|
+
"audio-id": timestamp,
|
845
|
+
"hash": taf_hash,
|
846
|
+
"size": taf_size,
|
847
|
+
"tracks": len(track_desc),
|
848
|
+
"confidence": 1
|
849
|
+
}
|
850
|
+
existing_entry, entry_idx, data_idx = self.find_entry_by_hash(taf_hash)
|
851
|
+
if existing_entry:
|
852
|
+
logger.info("Found existing entry with the same hash, updating it")
|
853
|
+
data = existing_entry['data'][data_idx]
|
854
|
+
if artwork_url and artwork_url != data.get('image', ''):
|
855
|
+
logger.debug("Updating artwork URL")
|
856
|
+
data['image'] = artwork_url
|
857
|
+
if track_desc and track_desc != data.get('track-desc', []):
|
858
|
+
logger.debug("Updating track descriptions")
|
859
|
+
data['track-desc'] = track_desc
|
860
|
+
|
861
|
+
logger.info("Successfully updated existing entry for %s", taf_file)
|
862
|
+
return True
|
863
|
+
existing_entry, entry_idx, data_idx = self.find_entry_by_series_episode(series, episode)
|
864
|
+
if existing_entry:
|
865
|
+
logger.info("Found existing entry with the same series/episode, adding hash to it")
|
866
|
+
existing_data = existing_entry['data'][data_idx]
|
867
|
+
if 'ids' not in existing_data:
|
868
|
+
existing_data['ids'] = []
|
869
|
+
|
870
|
+
existing_data['ids'].append(new_id_entry)
|
871
|
+
if artwork_url and artwork_url != existing_data.get('image', ''):
|
872
|
+
logger.debug("Updating artwork URL")
|
873
|
+
existing_data['image'] = artwork_url
|
874
|
+
|
875
|
+
logger.info("Successfully added new hash to existing entry for %s", taf_file)
|
876
|
+
return True
|
877
|
+
logger.debug("No existing entry found, creating new entry")
|
878
|
+
logger.debug("Generating article ID")
|
879
|
+
article_id = self._generate_article_id()
|
880
|
+
logger.debug("Generated article ID: %s", article_id)
|
881
|
+
|
882
|
+
entry = {
|
883
|
+
"article": article_id,
|
884
|
+
"data": [
|
885
|
+
{
|
886
|
+
"series": series,
|
887
|
+
"episode": episode,
|
888
|
+
"release": timestamp,
|
889
|
+
"language": language,
|
890
|
+
"category": category,
|
891
|
+
"runtime": self._calculate_runtime(input_files),
|
892
|
+
"age": age,
|
893
|
+
"origin": "custom",
|
894
|
+
"image": artwork_url if artwork_url else "",
|
895
|
+
"track-desc": track_desc,
|
896
|
+
"ids": [new_id_entry]
|
897
|
+
}
|
898
|
+
]
|
899
|
+
}
|
196
900
|
|
197
901
|
self.custom_json.append(entry)
|
198
902
|
logger.debug("Added entry to custom_json (new length: %d)", len(self.custom_json))
|
@@ -214,8 +918,6 @@ class ToniesJsonHandler:
|
|
214
918
|
Unique article ID in the format "tt-42" followed by sequential number starting from 0
|
215
919
|
"""
|
216
920
|
logger.trace("Entering _generate_article_id()")
|
217
|
-
|
218
|
-
# Find the highest sequential number for tt-42 IDs
|
219
921
|
highest_num = -1
|
220
922
|
pattern = re.compile(r'tt-42(\d+)')
|
221
923
|
|
@@ -234,11 +936,7 @@ class ToniesJsonHandler:
|
|
234
936
|
pass
|
235
937
|
|
236
938
|
logger.debug("Highest tt-42 ID number found: %d", highest_num)
|
237
|
-
|
238
|
-
# Generate the next sequential number
|
239
939
|
next_num = highest_num + 1
|
240
|
-
|
241
|
-
# Format the ID with leading zeros to make it 10 digits
|
242
940
|
result = f"tt-42{next_num:010d}"
|
243
941
|
logger.debug("Generated new article ID: %s", result)
|
244
942
|
|
@@ -246,82 +944,61 @@ class ToniesJsonHandler:
|
|
246
944
|
return result
|
247
945
|
|
248
946
|
def _extract_metadata_from_files(self, input_files: List[str]) -> Dict[str, Any]:
|
249
|
-
|
250
|
-
|
251
|
-
# If there are multiple files in the same folder, use album info
|
252
|
-
if len(input_files) > 1 and os.path.dirname(input_files[0]) == os.path.dirname(input_files[-1]):
|
253
|
-
folder_path = os.path.dirname(input_files[0])
|
254
|
-
album_info = extract_album_info(folder_path)
|
255
|
-
metadata.update(album_info)
|
947
|
+
"""
|
948
|
+
Extract metadata from audio files to use in the custom JSON entry.
|
256
949
|
|
257
|
-
|
950
|
+
Args:
|
951
|
+
input_files: List of paths to audio files
|
952
|
+
|
953
|
+
Returns:
|
954
|
+
Dictionary containing metadata extracted from files
|
955
|
+
"""
|
956
|
+
metadata = {}
|
258
957
|
track_descriptions = []
|
259
958
|
for file_path in input_files:
|
260
959
|
tags = get_file_tags(file_path)
|
960
|
+
# Extract track descriptions
|
261
961
|
if 'title' in tags:
|
262
962
|
track_descriptions.append(tags['title'])
|
263
963
|
else:
|
264
|
-
# Use filename as fallback
|
265
964
|
filename = os.path.splitext(os.path.basename(file_path))[0]
|
266
965
|
track_descriptions.append(filename)
|
267
|
-
|
268
|
-
# Extract language and genre from the first file if not already present
|
269
|
-
if 'language' not in metadata and 'language' in tags:
|
270
|
-
metadata['language'] = tags['language']
|
271
966
|
|
272
|
-
|
273
|
-
|
967
|
+
# Copy all available tags, but don't overwrite existing ones
|
968
|
+
for tag_name, tag_value in tags.items():
|
969
|
+
if tag_name not in metadata:
|
970
|
+
metadata[tag_name] = tag_value
|
274
971
|
|
275
972
|
metadata['track_descriptions'] = track_descriptions
|
276
973
|
|
277
974
|
return metadata
|
278
975
|
|
279
976
|
def _determine_language(self, metadata: Dict[str, Any]) -> str:
|
280
|
-
# Check for language tag in metadata
|
281
977
|
if 'language' in metadata:
|
282
978
|
lang_value = metadata['language'].lower().strip()
|
283
979
|
if lang_value in LANGUAGE_MAPPING:
|
284
980
|
return LANGUAGE_MAPPING[lang_value]
|
285
|
-
|
286
|
-
# If not found, try to use system locale
|
287
981
|
try:
|
288
982
|
system_lang, _ = locale.getdefaultlocale()
|
289
983
|
if system_lang:
|
290
984
|
lang_code = system_lang.split('_')[0].lower()
|
291
985
|
if lang_code in LANGUAGE_MAPPING:
|
292
986
|
return LANGUAGE_MAPPING[lang_code]
|
293
|
-
# Try to map system language code to tonie format
|
294
|
-
if lang_code == 'de':
|
295
|
-
return 'de-de'
|
296
|
-
elif lang_code == 'en':
|
297
|
-
return 'en-us'
|
298
|
-
elif lang_code == 'fr':
|
299
|
-
return 'fr-fr'
|
300
|
-
elif lang_code == 'it':
|
301
|
-
return 'it-it'
|
302
|
-
elif lang_code == 'es':
|
303
|
-
return 'es-es'
|
304
987
|
except Exception:
|
305
988
|
pass
|
306
|
-
|
307
|
-
# Default to German as it's most common for Tonies
|
308
989
|
return 'de-de'
|
309
990
|
|
310
991
|
def _determine_category(self, metadata: Dict[str, Any]) -> str:
|
311
|
-
# Check for genre tag in metadata
|
312
992
|
if 'genre' in metadata:
|
313
993
|
genre_value = metadata['genre'].lower().strip()
|
314
994
|
|
315
|
-
# Check for direct mapping
|
316
995
|
if genre_value in GENRE_MAPPING:
|
317
996
|
return GENRE_MAPPING[genre_value]
|
318
997
|
|
319
|
-
# Check for partial matching
|
320
998
|
for genre_key, category in GENRE_MAPPING.items():
|
321
999
|
if genre_key in genre_value:
|
322
1000
|
return category
|
323
|
-
|
324
|
-
# Check for common keywords in the genre
|
1001
|
+
|
325
1002
|
if any(keyword in genre_value for keyword in ['musik', 'song', 'music', 'lied']):
|
326
1003
|
return 'music'
|
327
1004
|
elif any(keyword in genre_value for keyword in ['hörspiel', 'hörspiele', 'audio play']):
|
@@ -334,8 +1011,6 @@ class ToniesJsonHandler:
|
|
334
1011
|
return 'Wissen & Hörmagazine'
|
335
1012
|
elif any(keyword in genre_value for keyword in ['schlaf', 'sleep', 'meditation']):
|
336
1013
|
return 'Schlaflieder & Entspannung'
|
337
|
-
|
338
|
-
# Default to standard category for most custom content
|
339
1014
|
return 'Hörspiele & Hörbücher'
|
340
1015
|
|
341
1016
|
def _estimate_age(self, metadata: Dict[str, Any]) -> int:
|
@@ -363,67 +1038,135 @@ class ToniesJsonHandler:
|
|
363
1038
|
|
364
1039
|
return default_age
|
365
1040
|
|
366
|
-
def
|
367
|
-
|
368
|
-
|
369
|
-
|
370
|
-
|
371
|
-
|
372
|
-
|
373
|
-
|
374
|
-
|
375
|
-
|
376
|
-
|
377
|
-
|
378
|
-
|
379
|
-
|
380
|
-
|
381
|
-
|
382
|
-
|
383
|
-
|
384
|
-
|
385
|
-
|
386
|
-
|
387
|
-
|
388
|
-
|
389
|
-
|
390
|
-
|
391
|
-
|
392
|
-
|
393
|
-
|
394
|
-
|
395
|
-
|
396
|
-
|
397
|
-
|
398
|
-
|
399
|
-
|
400
|
-
|
401
|
-
|
402
|
-
|
403
|
-
|
404
|
-
|
405
|
-
|
406
|
-
|
407
|
-
|
408
|
-
|
409
|
-
|
410
|
-
|
411
|
-
|
412
|
-
|
413
|
-
|
1041
|
+
def find_entry_by_hash(self, taf_hash: str) -> tuple[Optional[Dict[str, Any]], Optional[int], Optional[int]]:
|
1042
|
+
"""
|
1043
|
+
Find an entry in the custom JSON by TAF hash.
|
1044
|
+
|
1045
|
+
Args:
|
1046
|
+
taf_hash: SHA1 hash of the TAF file to find
|
1047
|
+
|
1048
|
+
Returns:
|
1049
|
+
Tuple of (entry, entry_index, data_index) if found, or (None, None, None) if not found
|
1050
|
+
"""
|
1051
|
+
logger.trace("Searching for entry with hash %s", taf_hash)
|
1052
|
+
|
1053
|
+
for entry_idx, entry in enumerate(self.custom_json):
|
1054
|
+
if 'data' not in entry:
|
1055
|
+
continue
|
1056
|
+
|
1057
|
+
for data_idx, data in enumerate(entry['data']):
|
1058
|
+
if 'ids' not in data:
|
1059
|
+
continue
|
1060
|
+
|
1061
|
+
for id_entry in data['ids']:
|
1062
|
+
if id_entry.get('hash') == taf_hash:
|
1063
|
+
logger.debug("Found existing entry with matching hash %s", taf_hash)
|
1064
|
+
return entry, entry_idx, data_idx
|
1065
|
+
|
1066
|
+
logger.debug("No entry found with hash %s", taf_hash)
|
1067
|
+
return None, None, None
|
1068
|
+
|
1069
|
+
def find_entry_by_series_episode(self, series: str, episode: str) -> tuple[Optional[Dict[str, Any]], Optional[int], Optional[int]]:
|
1070
|
+
"""
|
1071
|
+
Find an entry in the custom JSON by series and episode.
|
1072
|
+
|
1073
|
+
Args:
|
1074
|
+
series: Series name to find
|
1075
|
+
episode: Episode name to find
|
1076
|
+
|
1077
|
+
Returns:
|
1078
|
+
Tuple of (entry, entry_index, data_index) if found, or (None, None, None) if not found
|
1079
|
+
"""
|
1080
|
+
logger.trace("Searching for entry with series='%s', episode='%s'", series, episode)
|
1081
|
+
|
1082
|
+
for entry_idx, entry in enumerate(self.custom_json):
|
1083
|
+
if 'data' not in entry:
|
1084
|
+
continue
|
1085
|
+
|
1086
|
+
for data_idx, data in enumerate(entry['data']):
|
1087
|
+
if data.get('series') == series and data.get('episode') == episode:
|
1088
|
+
logger.debug("Found existing entry with matching series/episode: %s / %s", series, episode)
|
1089
|
+
return entry, entry_idx, data_idx
|
414
1090
|
|
415
|
-
|
1091
|
+
logger.debug("No entry found with series/episode: %s / %s", series, episode)
|
1092
|
+
return None, None, None
|
416
1093
|
|
1094
|
+
def _calculate_runtime(self, input_files: List[str]) -> int:
|
1095
|
+
"""
|
1096
|
+
Calculate the total runtime in minutes from a list of audio files.
|
1097
|
+
|
1098
|
+
Args:
|
1099
|
+
input_files: List of paths to audio files
|
1100
|
+
|
1101
|
+
Returns:
|
1102
|
+
Total runtime in minutes (rounded to the nearest minute)
|
1103
|
+
"""
|
1104
|
+
logger.trace("Entering _calculate_runtime() with %d input files", len(input_files))
|
1105
|
+
total_runtime_seconds = 0
|
1106
|
+
processed_files = 0
|
1107
|
+
|
1108
|
+
try:
|
1109
|
+
logger.debug("Starting runtime calculation for %d audio files", len(input_files))
|
1110
|
+
|
1111
|
+
for i, file_path in enumerate(input_files):
|
1112
|
+
logger.trace("Processing file %d/%d: %s", i+1, len(input_files), file_path)
|
1113
|
+
|
1114
|
+
if not os.path.exists(file_path):
|
1115
|
+
logger.warning("File does not exist: %s", file_path)
|
1116
|
+
continue
|
1117
|
+
|
1118
|
+
try:
|
1119
|
+
logger.trace("Loading audio file with mutagen: %s", file_path)
|
1120
|
+
audio = mutagen.File(file_path)
|
1121
|
+
|
1122
|
+
if audio is None:
|
1123
|
+
logger.warning("Mutagen could not identify file format: %s", file_path)
|
1124
|
+
continue
|
1125
|
+
|
1126
|
+
if not hasattr(audio, 'info'):
|
1127
|
+
logger.warning("Audio file has no info attribute: %s", file_path)
|
1128
|
+
continue
|
1129
|
+
|
1130
|
+
if not hasattr(audio.info, 'length'):
|
1131
|
+
logger.warning("Audio info has no length attribute: %s", file_path)
|
1132
|
+
continue
|
1133
|
+
|
1134
|
+
file_runtime_seconds = int(audio.info.length)
|
1135
|
+
total_runtime_seconds += file_runtime_seconds
|
1136
|
+
processed_files += 1
|
1137
|
+
|
1138
|
+
logger.debug("File %s: runtime=%d seconds, format=%s",
|
1139
|
+
file_path, file_runtime_seconds, audio.__class__.__name__)
|
1140
|
+
logger.trace("Current total runtime: %d seconds after %d/%d files",
|
1141
|
+
total_runtime_seconds, i+1, len(input_files))
|
1142
|
+
|
1143
|
+
except Exception as e:
|
1144
|
+
logger.warning("Error processing file %s: %s", file_path, e)
|
1145
|
+
logger.trace("Exception details for %s: %s", file_path, str(e), exc_info=True)
|
417
1146
|
|
418
|
-
|
419
|
-
|
1147
|
+
total_runtime_minutes = round(total_runtime_seconds / 60)
|
1148
|
+
|
1149
|
+
logger.info("Calculated total runtime: %d seconds (%d minutes) from %d/%d files",
|
1150
|
+
total_runtime_seconds, total_runtime_minutes, processed_files, len(input_files))
|
1151
|
+
|
1152
|
+
except ImportError as e:
|
1153
|
+
logger.warning("Mutagen library not available, cannot calculate runtime: %s", str(e))
|
1154
|
+
return 0
|
1155
|
+
except Exception as e:
|
1156
|
+
logger.error("Unexpected error during runtime calculation: %s", str(e))
|
1157
|
+
logger.trace("Exception details: %s", str(e), exc_info=True)
|
1158
|
+
return 0
|
1159
|
+
|
1160
|
+
logger.trace("Exiting _calculate_runtime() with total runtime=%d minutes", total_runtime_minutes)
|
1161
|
+
return total_runtime_minutes
|
1162
|
+
|
1163
|
+
def fetch_and_update_tonies_json_v1(client: TeddyCloudClient, taf_file: Optional[str] = None, input_files: Optional[List[str]] = None,
|
420
1164
|
artwork_url: Optional[str] = None, output_dir: Optional[str] = None) -> bool:
|
421
1165
|
"""
|
422
|
-
Fetch tonies.custom.json from server and merge with local file if it exists, then update with new entry.
|
1166
|
+
Fetch tonies.custom.json from server and merge with local file if it exists, then update with new entry in v1 format.
|
423
1167
|
|
424
1168
|
Args:
|
425
|
-
|
426
|
-
ignore_ssl_verify: If True, SSL certificate verification will be disabled
|
1169
|
+
client: TeddyCloudClient instance to use for API communication
|
427
1170
|
taf_file: Path to the TAF file to add
|
428
1171
|
input_files: List of input audio files used to create the TAF
|
429
1172
|
artwork_url: URL of the uploaded artwork (if any)
|
@@ -432,71 +1175,194 @@ def fetch_and_update_tonies_json(teddycloud_url: Optional[str] = None, ignore_ss
|
|
432
1175
|
Returns:
|
433
1176
|
True if successful, False otherwise
|
434
1177
|
"""
|
435
|
-
|
1178
|
+
logger.trace("Entering fetch_and_update_tonies_json_v1 with client=%s, taf_file=%s, input_files=%s, artwork_url=%s, output_dir=%s",
|
1179
|
+
client, taf_file, input_files, artwork_url, output_dir)
|
436
1180
|
|
437
|
-
|
1181
|
+
handler = ToniesJsonHandlerv1(client)
|
438
1182
|
if not output_dir:
|
439
1183
|
output_dir = './output'
|
440
|
-
|
441
|
-
|
1184
|
+
logger.debug("No output directory specified, using default: %s", output_dir)
|
1185
|
+
|
442
1186
|
os.makedirs(output_dir, exist_ok=True)
|
1187
|
+
logger.debug("Ensuring output directory exists: %s", output_dir)
|
443
1188
|
|
444
|
-
# Create the full path for the JSON file
|
445
1189
|
json_file_path = os.path.join(output_dir, 'tonies.custom.json')
|
1190
|
+
logger.debug("JSON file path: %s", json_file_path)
|
446
1191
|
|
447
1192
|
loaded_from_server = False
|
1193
|
+
if client:
|
1194
|
+
logger.info("Attempting to load tonies.custom.json from server")
|
1195
|
+
loaded_from_server = handler.load_from_server()
|
1196
|
+
logger.debug("Load from server result: %s", "success" if loaded_from_server else "failed")
|
1197
|
+
else:
|
1198
|
+
logger.debug("No client provided, skipping server load")
|
1199
|
+
|
1200
|
+
if os.path.exists(json_file_path):
|
1201
|
+
logger.info("Local tonies.custom.json file found, merging with server content")
|
1202
|
+
logger.debug("Local file exists at %s, size: %d bytes", json_file_path, os.path.getsize(json_file_path))
|
1203
|
+
|
1204
|
+
local_handler = ToniesJsonHandlerv1()
|
1205
|
+
if local_handler.load_from_file(json_file_path):
|
1206
|
+
logger.debug("Successfully loaded local file with %d entries", len(local_handler.custom_json))
|
1207
|
+
|
1208
|
+
if loaded_from_server:
|
1209
|
+
logger.debug("Merging local entries with server entries")
|
1210
|
+
server_hashes = set()
|
1211
|
+
for entry in handler.custom_json:
|
1212
|
+
if 'hash' in entry:
|
1213
|
+
for hash_value in entry['hash']:
|
1214
|
+
server_hashes.add(hash_value)
|
1215
|
+
|
1216
|
+
logger.debug("Found %d unique hash values from server", len(server_hashes))
|
1217
|
+
|
1218
|
+
added_count = 0
|
1219
|
+
for local_entry in local_handler.custom_json:
|
1220
|
+
if 'hash' in local_entry:
|
1221
|
+
has_unique_hash = False
|
1222
|
+
for hash_value in local_entry['hash']:
|
1223
|
+
if hash_value not in server_hashes:
|
1224
|
+
has_unique_hash = True
|
1225
|
+
break
|
1226
|
+
|
1227
|
+
if has_unique_hash:
|
1228
|
+
logger.trace("Adding local-only entry to merged content")
|
1229
|
+
handler.custom_json.append(local_entry)
|
1230
|
+
added_count += 1
|
1231
|
+
|
1232
|
+
logger.debug("Added %d local-only entries to merged content", added_count)
|
1233
|
+
else:
|
1234
|
+
logger.debug("Using only local entries (server load failed or no client)")
|
1235
|
+
handler.custom_json = local_handler.custom_json
|
1236
|
+
handler.is_loaded = True
|
1237
|
+
logger.info("Using local tonies.custom.json content")
|
1238
|
+
elif not loaded_from_server:
|
1239
|
+
logger.debug("No local file found and server load failed, starting with empty list")
|
1240
|
+
handler.custom_json = []
|
1241
|
+
handler.is_loaded = True
|
1242
|
+
logger.info("No tonies.custom.json found, starting with empty list")
|
1243
|
+
|
1244
|
+
if taf_file and input_files and handler.is_loaded:
|
1245
|
+
logger.debug("Adding new entry for TAF file: %s", taf_file)
|
1246
|
+
logger.debug("Using %d input files for metadata extraction", len(input_files))
|
1247
|
+
|
1248
|
+
if not handler.add_entry_from_taf(taf_file, input_files, artwork_url):
|
1249
|
+
logger.error("Failed to add entry to tonies.custom.json")
|
1250
|
+
logger.trace("Exiting fetch_and_update_tonies_json_v1 with success=False (failed to add entry)")
|
1251
|
+
return False
|
1252
|
+
|
1253
|
+
logger.debug("Successfully added new entry for %s", taf_file)
|
1254
|
+
else:
|
1255
|
+
if not taf_file:
|
1256
|
+
logger.debug("No TAF file provided, skipping add entry step")
|
1257
|
+
elif not input_files:
|
1258
|
+
logger.debug("No input files provided, skipping add entry step")
|
1259
|
+
elif not handler.is_loaded:
|
1260
|
+
logger.debug("Handler not properly loaded, skipping add entry step")
|
1261
|
+
|
1262
|
+
logger.debug("Saving updated tonies.custom.json to %s", json_file_path)
|
1263
|
+
if not handler.save_to_file(json_file_path):
|
1264
|
+
logger.error("Failed to save tonies.custom.json to file")
|
1265
|
+
logger.trace("Exiting fetch_and_update_tonies_json_v1 with success=False (failed to save file)")
|
1266
|
+
return False
|
448
1267
|
|
449
|
-
|
450
|
-
|
1268
|
+
logger.debug("Successfully saved tonies.custom.json with %d entries", len(handler.custom_json))
|
1269
|
+
logger.trace("Exiting fetch_and_update_tonies_json_v1 with success=True")
|
1270
|
+
return True
|
1271
|
+
|
1272
|
+
def fetch_and_update_tonies_json_v2(client: TeddyCloudClient, taf_file: Optional[str] = None, input_files: Optional[List[str]] = None,
|
1273
|
+
artwork_url: Optional[str] = None, output_dir: Optional[str] = None) -> bool:
|
1274
|
+
"""
|
1275
|
+
Fetch tonies.custom.json from server and merge with local file if it exists, then update with new entry.
|
1276
|
+
|
1277
|
+
Args:
|
1278
|
+
client: TeddyCloudClient instance to use for API communication
|
1279
|
+
taf_file: Path to the TAF file to add
|
1280
|
+
input_files: List of input audio files used to create the TAF
|
1281
|
+
artwork_url: URL of the uploaded artwork (if any)
|
1282
|
+
output_dir: Directory where to save the tonies.custom.json file (defaults to './output')
|
1283
|
+
|
1284
|
+
Returns:
|
1285
|
+
True if successful, False otherwise
|
1286
|
+
"""
|
1287
|
+
logger.trace("Entering fetch_and_update_tonies_json with client=%s, taf_file=%s, input_files=%s, artwork_url=%s, output_dir=%s",
|
1288
|
+
client, taf_file, input_files, artwork_url, output_dir)
|
1289
|
+
|
1290
|
+
handler = ToniesJsonHandlerv2(client)
|
1291
|
+
if not output_dir:
|
1292
|
+
output_dir = './output'
|
1293
|
+
logger.debug("No output directory specified, using default: %s", output_dir)
|
1294
|
+
|
1295
|
+
os.makedirs(output_dir, exist_ok=True)
|
1296
|
+
logger.debug("Ensuring output directory exists: %s", output_dir)
|
1297
|
+
|
1298
|
+
json_file_path = os.path.join(output_dir, 'tonies.custom.json')
|
1299
|
+
logger.debug("JSON file path: %s", json_file_path)
|
1300
|
+
|
1301
|
+
loaded_from_server = False
|
1302
|
+
if client:
|
451
1303
|
logger.info("Attempting to load tonies.custom.json from server")
|
452
1304
|
loaded_from_server = handler.load_from_server()
|
1305
|
+
logger.debug("Load from server result: %s", "success" if loaded_from_server else "failed")
|
1306
|
+
else:
|
1307
|
+
logger.debug("No client provided, skipping server load")
|
453
1308
|
|
454
|
-
# Step 2: If we have a local file, merge with the server content
|
455
1309
|
if os.path.exists(json_file_path):
|
456
1310
|
logger.info("Local tonies.custom.json file found, merging with server content")
|
1311
|
+
logger.debug("Local file exists at %s, size: %d bytes", json_file_path, os.path.getsize(json_file_path))
|
457
1312
|
|
458
|
-
|
459
|
-
local_handler = ToniesJsonHandler()
|
1313
|
+
local_handler = ToniesJsonHandlerv2()
|
460
1314
|
if local_handler.load_from_file(json_file_path):
|
1315
|
+
logger.debug("Successfully loaded local file with %d entries", len(local_handler.custom_json))
|
1316
|
+
|
461
1317
|
if loaded_from_server:
|
462
|
-
|
463
|
-
# Use server-loaded content as base, then add any local entries not in server version
|
1318
|
+
logger.debug("Merging local entries with server entries")
|
464
1319
|
server_article_ids = {entry.get('article') for entry in handler.custom_json}
|
1320
|
+
logger.debug("Found %d unique article IDs from server", len(server_article_ids))
|
1321
|
+
|
1322
|
+
added_count = 0
|
465
1323
|
for local_entry in local_handler.custom_json:
|
466
1324
|
local_article_id = local_entry.get('article')
|
467
1325
|
if local_article_id not in server_article_ids:
|
468
|
-
logger.
|
1326
|
+
logger.trace("Adding local-only entry %s to merged content", local_article_id)
|
469
1327
|
handler.custom_json.append(local_entry)
|
1328
|
+
added_count += 1
|
1329
|
+
|
1330
|
+
logger.debug("Added %d local-only entries to merged content", added_count)
|
470
1331
|
else:
|
471
|
-
|
1332
|
+
logger.debug("Using only local entries (server load failed or no client)")
|
472
1333
|
handler.custom_json = local_handler.custom_json
|
473
1334
|
handler.is_loaded = True
|
474
1335
|
logger.info("Using local tonies.custom.json content")
|
475
1336
|
elif not loaded_from_server:
|
476
|
-
|
1337
|
+
logger.debug("No local file found and server load failed, starting with empty list")
|
477
1338
|
handler.custom_json = []
|
478
1339
|
handler.is_loaded = True
|
479
1340
|
logger.info("No tonies.custom.json found, starting with empty list")
|
480
1341
|
|
481
|
-
# Add entry if needed
|
482
1342
|
if taf_file and input_files and handler.is_loaded:
|
1343
|
+
logger.debug("Adding new entry for TAF file: %s", taf_file)
|
1344
|
+
logger.debug("Using %d input files for metadata extraction", len(input_files))
|
1345
|
+
|
483
1346
|
if not handler.add_entry_from_taf(taf_file, input_files, artwork_url):
|
484
1347
|
logger.error("Failed to add entry to tonies.custom.json")
|
1348
|
+
logger.trace("Exiting fetch_and_update_tonies_json with success=False (failed to add entry)")
|
485
1349
|
return False
|
1350
|
+
|
1351
|
+
logger.debug("Successfully added new entry for %s", taf_file)
|
1352
|
+
else:
|
1353
|
+
if not taf_file:
|
1354
|
+
logger.debug("No TAF file provided, skipping add entry step")
|
1355
|
+
elif not input_files:
|
1356
|
+
logger.debug("No input files provided, skipping add entry step")
|
1357
|
+
elif not handler.is_loaded:
|
1358
|
+
logger.debug("Handler not properly loaded, skipping add entry step")
|
486
1359
|
|
487
|
-
|
1360
|
+
logger.debug("Saving updated tonies.custom.json to %s", json_file_path)
|
488
1361
|
if not handler.save_to_file(json_file_path):
|
489
1362
|
logger.error("Failed to save tonies.custom.json to file")
|
1363
|
+
logger.trace("Exiting fetch_and_update_tonies_json with success=False (failed to save file)")
|
490
1364
|
return False
|
491
1365
|
|
492
|
-
|
493
|
-
|
494
|
-
#if teddycloud_url and handler.is_loaded:
|
495
|
-
try:
|
496
|
-
if not handler.save_to_server():
|
497
|
-
logger.warning("Could not save tonies.custom.json to server")
|
498
|
-
except Exception as e:
|
499
|
-
logger.warning("Error when saving tonies.custom.json to server: %s", e)
|
500
|
-
# Don't fail the operation if server upload fails
|
501
|
-
|
1366
|
+
logger.debug("Successfully saved tonies.custom.json with %d entries", len(handler.custom_json))
|
1367
|
+
logger.trace("Exiting fetch_and_update_tonies_json with success=True")
|
502
1368
|
return True
|