TonieToolbox 0.5.0a1__py3-none-any.whl → 0.5.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- TonieToolbox/__init__.py +1 -1
- TonieToolbox/__main__.py +88 -68
- TonieToolbox/audio_conversion.py +75 -7
- TonieToolbox/media_tags.py +5 -4
- TonieToolbox/recursive_processor.py +10 -11
- TonieToolbox/tonie_file.py +17 -29
- TonieToolbox/tonies_json.py +799 -10
- {tonietoolbox-0.5.0a1.dist-info → tonietoolbox-0.5.1.dist-info}/METADATA +141 -98
- tonietoolbox-0.5.1.dist-info/RECORD +26 -0
- {tonietoolbox-0.5.0a1.dist-info → tonietoolbox-0.5.1.dist-info}/WHEEL +1 -1
- tonietoolbox-0.5.0a1.dist-info/RECORD +0 -26
- {tonietoolbox-0.5.0a1.dist-info → tonietoolbox-0.5.1.dist-info}/entry_points.txt +0 -0
- {tonietoolbox-0.5.0a1.dist-info → tonietoolbox-0.5.1.dist-info}/licenses/LICENSE.md +0 -0
- {tonietoolbox-0.5.0a1.dist-info → tonietoolbox-0.5.1.dist-info}/top_level.txt +0 -0
TonieToolbox/tonies_json.py
CHANGED
@@ -21,7 +21,688 @@ from .teddycloud import TeddyCloudClient
|
|
21
21
|
|
22
22
|
logger = get_logger('tonies_json')
|
23
23
|
|
24
|
-
class
|
24
|
+
class ToniesJsonHandlerv1:
|
25
|
+
"""Handler for tonies.custom.json operations using v1 format."""
|
26
|
+
|
27
|
+
def __init__(self, client: TeddyCloudClient = None):
|
28
|
+
"""
|
29
|
+
Initialize the handler.
|
30
|
+
|
31
|
+
Args:
|
32
|
+
client: TeddyCloudClient instance to use for API communication
|
33
|
+
"""
|
34
|
+
self.client = client
|
35
|
+
self.custom_json = []
|
36
|
+
self.is_loaded = False
|
37
|
+
|
38
|
+
def load_from_server(self) -> bool:
|
39
|
+
"""
|
40
|
+
Load tonies.custom.json from the TeddyCloud server.
|
41
|
+
|
42
|
+
Returns:
|
43
|
+
True if successful, False otherwise
|
44
|
+
"""
|
45
|
+
if self.client is None:
|
46
|
+
logger.error("Cannot load from server: no client provided")
|
47
|
+
return False
|
48
|
+
|
49
|
+
try:
|
50
|
+
result = self.client.get_tonies_custom_json()
|
51
|
+
if result is not None:
|
52
|
+
# Convert v2 format to v1 format if necessary
|
53
|
+
if len(result) > 0 and "data" in result[0]:
|
54
|
+
logger.debug("Converting v2 format from server to v1 format")
|
55
|
+
self.custom_json = self._convert_v2_to_v1(result)
|
56
|
+
else:
|
57
|
+
self.custom_json = result
|
58
|
+
self.is_loaded = True
|
59
|
+
logger.info("Successfully loaded tonies.custom.json with %d entries", len(self.custom_json))
|
60
|
+
return True
|
61
|
+
else:
|
62
|
+
logger.error("Failed to load tonies.custom.json from server")
|
63
|
+
return False
|
64
|
+
|
65
|
+
except Exception as e:
|
66
|
+
logger.error("Error loading tonies.custom.json: %s", e)
|
67
|
+
return False
|
68
|
+
|
69
|
+
def load_from_file(self, file_path: str) -> bool:
|
70
|
+
"""
|
71
|
+
Load tonies.custom.json from a local file.
|
72
|
+
|
73
|
+
Args:
|
74
|
+
file_path: Path to the tonies.custom.json file
|
75
|
+
|
76
|
+
Returns:
|
77
|
+
True if successful, False otherwise
|
78
|
+
"""
|
79
|
+
try:
|
80
|
+
if os.path.exists(file_path):
|
81
|
+
logger.info("Loading tonies.custom.json from file: %s", file_path)
|
82
|
+
with open(file_path, 'r', encoding='utf-8') as f:
|
83
|
+
data = json.load(f)
|
84
|
+
if isinstance(data, list):
|
85
|
+
# Convert v2 format to v1 format if necessary
|
86
|
+
if len(data) > 0 and "data" in data[0]:
|
87
|
+
logger.debug("Converting v2 format from file to v1 format")
|
88
|
+
self.custom_json = self._convert_v2_to_v1(data)
|
89
|
+
else:
|
90
|
+
self.custom_json = data
|
91
|
+
self.is_loaded = True
|
92
|
+
logger.info("Successfully loaded tonies.custom.json with %d entries", len(self.custom_json))
|
93
|
+
return True
|
94
|
+
else:
|
95
|
+
logger.error("Invalid tonies.custom.json format in file, expected list")
|
96
|
+
return False
|
97
|
+
else:
|
98
|
+
logger.info("tonies.custom.json file not found, starting with empty list")
|
99
|
+
self.custom_json = []
|
100
|
+
self.is_loaded = True
|
101
|
+
return True
|
102
|
+
|
103
|
+
except Exception as e:
|
104
|
+
logger.error("Error loading tonies.custom.json from file: %s", e)
|
105
|
+
return False
|
106
|
+
|
107
|
+
def save_to_file(self, file_path: str) -> bool:
|
108
|
+
"""
|
109
|
+
Save tonies.custom.json to a local file.
|
110
|
+
|
111
|
+
Args:
|
112
|
+
file_path: Path where to save the tonies.custom.json file
|
113
|
+
|
114
|
+
Returns:
|
115
|
+
True if successful, False otherwise
|
116
|
+
"""
|
117
|
+
if not self.is_loaded:
|
118
|
+
logger.error("Cannot save tonies.custom.json: data not loaded")
|
119
|
+
return False
|
120
|
+
|
121
|
+
try:
|
122
|
+
os.makedirs(os.path.dirname(os.path.abspath(file_path)), exist_ok=True)
|
123
|
+
logger.info("Saving tonies.custom.json to file: %s", file_path)
|
124
|
+
with open(file_path, 'w', encoding='utf-8') as f:
|
125
|
+
json.dump(self.custom_json, f, indent=2, ensure_ascii=False)
|
126
|
+
|
127
|
+
logger.info("Successfully saved tonies.custom.json to file")
|
128
|
+
return True
|
129
|
+
|
130
|
+
except Exception as e:
|
131
|
+
logger.error("Error saving tonies.custom.json to file: %s", e)
|
132
|
+
return False
|
133
|
+
|
134
|
+
def renumber_series_entries(self, series: str):
|
135
|
+
"""
|
136
|
+
Re-sort and re-number all entries for a series by year (chronological),
|
137
|
+
with entries without a year coming last.
|
138
|
+
"""
|
139
|
+
# Collect all entries for the series
|
140
|
+
series_entries = [entry for entry in self.custom_json if entry.get('series') == series]
|
141
|
+
# Separate entries with and without year
|
142
|
+
with_year = []
|
143
|
+
without_year = []
|
144
|
+
for entry in series_entries:
|
145
|
+
year = self._extract_year_from_text(entry.get('title', ''))
|
146
|
+
if not year:
|
147
|
+
year = self._extract_year_from_text(entry.get('episodes', ''))
|
148
|
+
if year:
|
149
|
+
with_year.append((year, entry))
|
150
|
+
else:
|
151
|
+
without_year.append(entry)
|
152
|
+
# Sort entries with year
|
153
|
+
with_year.sort(key=lambda x: x[0])
|
154
|
+
# Assign new numbers
|
155
|
+
new_no = 1
|
156
|
+
for _, entry in with_year:
|
157
|
+
entry['no'] = str(new_no)
|
158
|
+
new_no += 1
|
159
|
+
for entry in without_year:
|
160
|
+
entry['no'] = str(new_no)
|
161
|
+
new_no += 1
|
162
|
+
|
163
|
+
def add_entry_from_taf(self, taf_file: str, input_files: List[str], artwork_url: Optional[str] = None) -> bool:
|
164
|
+
"""
|
165
|
+
Add an entry to the custom JSON from a TAF file.
|
166
|
+
If an entry with the same hash exists, it will be updated.
|
167
|
+
If an entry with the same series+episodes exists, the new hash will be added to it.
|
168
|
+
|
169
|
+
Args:
|
170
|
+
taf_file: Path to the TAF file
|
171
|
+
input_files: List of input audio files used to create the TAF
|
172
|
+
artwork_url: URL of the uploaded artwork (if any)
|
173
|
+
|
174
|
+
Returns:
|
175
|
+
True if successful, False otherwise
|
176
|
+
"""
|
177
|
+
logger.trace("Entering add_entry_from_taf() with taf_file=%s, input_files=%s, artwork_url=%s",
|
178
|
+
taf_file, input_files, artwork_url)
|
179
|
+
|
180
|
+
if not self.is_loaded:
|
181
|
+
logger.error("Cannot add entry: tonies.custom.json not loaded")
|
182
|
+
return False
|
183
|
+
|
184
|
+
try:
|
185
|
+
logger.info("Adding entry for %s to tonies.custom.json", taf_file)
|
186
|
+
logger.debug("Extracting metadata from input files")
|
187
|
+
metadata = self._extract_metadata_from_files(input_files)
|
188
|
+
logger.debug("Extracted metadata: %s", metadata)
|
189
|
+
with open(taf_file, 'rb') as f:
|
190
|
+
taf_hash = hashlib.sha1(f.read()).hexdigest().upper()
|
191
|
+
|
192
|
+
timestamp = str(int(time.time()))
|
193
|
+
series = metadata.get('albumartist', metadata.get('artist', 'Unknown Artist'))
|
194
|
+
episodes = metadata.get('album', os.path.splitext(os.path.basename(taf_file))[0])
|
195
|
+
copyright = metadata.get('copyright', '')
|
196
|
+
|
197
|
+
# Extract year from metadata or from episode title
|
198
|
+
year = None
|
199
|
+
year_str = metadata.get('year', metadata.get('date', None))
|
200
|
+
|
201
|
+
# Try to convert metadata year to int if it exists
|
202
|
+
if year_str:
|
203
|
+
try:
|
204
|
+
# Extract 4 digits if the date includes more information (e.g., "2022-05-01")
|
205
|
+
import re
|
206
|
+
year_match = re.search(r'(\d{4})', str(year_str))
|
207
|
+
if year_match:
|
208
|
+
year = int(year_match.group(1))
|
209
|
+
else:
|
210
|
+
# If year is just a number, try to format it properly
|
211
|
+
year_val = int(year_str)
|
212
|
+
if 0 <= year_val <= 99: # Assume 2-digit year format
|
213
|
+
if year_val <= 25: # Arbitrary cutoff for 20xx vs 19xx
|
214
|
+
year = 2000 + year_val
|
215
|
+
else:
|
216
|
+
year = 1900 + year_val
|
217
|
+
else:
|
218
|
+
year = year_val
|
219
|
+
except (ValueError, TypeError):
|
220
|
+
logger.debug("Could not convert metadata year '%s' to integer", year_str)
|
221
|
+
|
222
|
+
if not year:
|
223
|
+
year_from_episodes = self._extract_year_from_text(episodes)
|
224
|
+
year_from_copyright = self._extract_year_from_text(copyright)
|
225
|
+
if year_from_episodes:
|
226
|
+
year = year_from_episodes
|
227
|
+
else:
|
228
|
+
year = year_from_copyright
|
229
|
+
|
230
|
+
# Ensure year is in YYYY format
|
231
|
+
year_formatted = None
|
232
|
+
if year:
|
233
|
+
# Validate the year is in the reasonable range
|
234
|
+
if 1900 <= year <= 2099:
|
235
|
+
year_formatted = f"{year:04d}" # Format as 4 digits
|
236
|
+
logger.debug("Formatted year '%s' as '%s'", year, year_formatted)
|
237
|
+
else:
|
238
|
+
logger.warning("Year '%s' outside reasonable range (1900-2099), ignoring", year)
|
239
|
+
|
240
|
+
if year_formatted:
|
241
|
+
title = f"{series} - {year_formatted} - {episodes}"
|
242
|
+
else:
|
243
|
+
title = f"{series} - {episodes}"
|
244
|
+
|
245
|
+
tracks = metadata.get('track_descriptions', [])
|
246
|
+
language = self._determine_language(metadata)
|
247
|
+
category = self._determine_category_v1(metadata)
|
248
|
+
|
249
|
+
existing_entry, entry_idx = self.find_entry_by_hash(taf_hash)
|
250
|
+
if existing_entry:
|
251
|
+
logger.info("Found existing entry with the same hash, updating it")
|
252
|
+
if artwork_url and artwork_url != existing_entry.get('pic', ''):
|
253
|
+
logger.debug("Updating artwork URL")
|
254
|
+
existing_entry['pic'] = artwork_url
|
255
|
+
if tracks and tracks != existing_entry.get('tracks', []):
|
256
|
+
logger.debug("Updating track descriptions")
|
257
|
+
existing_entry['tracks'] = tracks
|
258
|
+
if episodes and episodes != existing_entry.get('episodes', ''):
|
259
|
+
logger.debug("Updating episodes")
|
260
|
+
existing_entry['episodes'] = episodes
|
261
|
+
if series and series != existing_entry.get('series', ''):
|
262
|
+
logger.debug("Updating series")
|
263
|
+
existing_entry['series'] = series
|
264
|
+
logger.info("Successfully updated existing entry for %s", taf_file)
|
265
|
+
self.renumber_series_entries(series)
|
266
|
+
return True
|
267
|
+
|
268
|
+
existing_entry, entry_idx = self.find_entry_by_series_episodes(series, episodes)
|
269
|
+
if existing_entry:
|
270
|
+
logger.info("Found existing entry with the same series/episodes, adding hash to it")
|
271
|
+
if 'audio_id' not in existing_entry:
|
272
|
+
existing_entry['audio_id'] = []
|
273
|
+
if 'hash' not in existing_entry:
|
274
|
+
existing_entry['hash'] = []
|
275
|
+
|
276
|
+
existing_entry['audio_id'].append(timestamp)
|
277
|
+
existing_entry['hash'].append(taf_hash)
|
278
|
+
|
279
|
+
if artwork_url and artwork_url != existing_entry.get('pic', ''):
|
280
|
+
logger.debug("Updating artwork URL")
|
281
|
+
existing_entry['pic'] = artwork_url
|
282
|
+
|
283
|
+
logger.info("Successfully added new hash to existing entry for %s", taf_file)
|
284
|
+
self.renumber_series_entries(series)
|
285
|
+
return True
|
286
|
+
|
287
|
+
logger.debug("No existing entry found, creating new entry")
|
288
|
+
|
289
|
+
logger.debug("Generating entry number")
|
290
|
+
entry_no = self._generate_entry_no(series, episodes, year)
|
291
|
+
logger.debug("Generated entry number: %s", entry_no)
|
292
|
+
|
293
|
+
logger.debug("Generating model number")
|
294
|
+
model_number = self._generate_model_number()
|
295
|
+
logger.debug("Generated model number: %s", model_number)
|
296
|
+
|
297
|
+
entry = {
|
298
|
+
"no": entry_no,
|
299
|
+
"model": model_number,
|
300
|
+
"audio_id": [timestamp],
|
301
|
+
"hash": [taf_hash],
|
302
|
+
"title": title,
|
303
|
+
"series": series,
|
304
|
+
"episodes": episodes,
|
305
|
+
"tracks": tracks,
|
306
|
+
"release": timestamp,
|
307
|
+
"language": language,
|
308
|
+
"category": category,
|
309
|
+
"pic": artwork_url if artwork_url else ""
|
310
|
+
}
|
311
|
+
|
312
|
+
self.custom_json.append(entry)
|
313
|
+
logger.debug("Added entry to custom_json (new length: %d)", len(self.custom_json))
|
314
|
+
|
315
|
+
logger.info("Successfully added entry for %s", taf_file)
|
316
|
+
self.renumber_series_entries(series)
|
317
|
+
logger.trace("Exiting add_entry_from_taf() with success=True")
|
318
|
+
return True
|
319
|
+
|
320
|
+
except Exception as e:
|
321
|
+
logger.error("Error adding entry for %s: %s", taf_file, e)
|
322
|
+
logger.trace("Exiting add_entry_from_taf() with success=False due to exception: %s", str(e))
|
323
|
+
return False
|
324
|
+
|
325
|
+
def _generate_entry_no(self, series: str, episodes: str, year: Optional[int] = None) -> str:
|
326
|
+
"""
|
327
|
+
Generate an entry number based on specific rules:
|
328
|
+
1. For series entries with years: assign numbers in chronological order (1, 2, 3, etc.)
|
329
|
+
2. For entries without years: assign the next available number after those with years
|
330
|
+
|
331
|
+
Args:
|
332
|
+
series: Series name
|
333
|
+
episodes: Episodes name
|
334
|
+
year: Release year from metadata, if available
|
335
|
+
|
336
|
+
Returns:
|
337
|
+
Generated entry number as string
|
338
|
+
"""
|
339
|
+
logger.trace("Entering _generate_entry_no() with series='%s', episodes='%s', year=%s",
|
340
|
+
series, episodes, year)
|
341
|
+
|
342
|
+
# If we don't have a series name, use a simple approach to get the next number
|
343
|
+
if not series:
|
344
|
+
max_no = 0
|
345
|
+
for entry in self.custom_json:
|
346
|
+
try:
|
347
|
+
no_value = int(entry.get('no', '0'))
|
348
|
+
max_no = max(max_no, no_value)
|
349
|
+
except (ValueError, TypeError):
|
350
|
+
pass
|
351
|
+
return str(max_no + 1)
|
352
|
+
|
353
|
+
logger.debug("Generating entry number for series '%s'", series)
|
354
|
+
|
355
|
+
# Step 1: Collect all existing entries for this series and extract their years
|
356
|
+
series_entries = []
|
357
|
+
used_numbers = set()
|
358
|
+
|
359
|
+
for entry in self.custom_json:
|
360
|
+
entry_series = entry.get('series', '')
|
361
|
+
if entry_series == series:
|
362
|
+
entry_no = entry.get('no', '')
|
363
|
+
try:
|
364
|
+
entry_no_int = int(entry_no)
|
365
|
+
used_numbers.add(entry_no_int)
|
366
|
+
except (ValueError, TypeError):
|
367
|
+
pass
|
368
|
+
|
369
|
+
entry_title = entry.get('title', '')
|
370
|
+
entry_episodes = entry.get('episodes', '')
|
371
|
+
|
372
|
+
# Extract year from title and episodes
|
373
|
+
entry_year = self._extract_year_from_text(entry_title)
|
374
|
+
if not entry_year:
|
375
|
+
entry_year = self._extract_year_from_text(entry_episodes)
|
376
|
+
|
377
|
+
series_entries.append({
|
378
|
+
'no': entry_no,
|
379
|
+
'title': entry_title,
|
380
|
+
'episodes': entry_episodes,
|
381
|
+
'year': entry_year
|
382
|
+
})
|
383
|
+
|
384
|
+
# Try to extract year from episodes if not explicitly provided
|
385
|
+
if not year:
|
386
|
+
extracted_year = self._extract_year_from_text(episodes)
|
387
|
+
if extracted_year:
|
388
|
+
year = extracted_year
|
389
|
+
logger.debug("Extracted year %d from episodes '%s'", year, episodes)
|
390
|
+
|
391
|
+
# Step 2: Split entries into those with years and those without
|
392
|
+
entries_with_years = [e for e in series_entries if e['year'] is not None]
|
393
|
+
entries_without_years = [e for e in series_entries if e['year'] is None]
|
394
|
+
|
395
|
+
# Sort entries with years by year (oldest first)
|
396
|
+
entries_with_years.sort(key=lambda x: x['year'])
|
397
|
+
|
398
|
+
logger.debug("Found %d entries with years and %d entries without years",
|
399
|
+
len(entries_with_years), len(entries_without_years))
|
400
|
+
|
401
|
+
# Step 3: If this entry has a year, determine where it should be inserted
|
402
|
+
if year:
|
403
|
+
# Find position based on chronological order
|
404
|
+
insertion_index = 0
|
405
|
+
while insertion_index < len(entries_with_years) and entries_with_years[insertion_index]['year'] < year:
|
406
|
+
insertion_index += 1
|
407
|
+
|
408
|
+
# Resulting position is 1-indexed
|
409
|
+
position = insertion_index + 1
|
410
|
+
logger.debug("For year %d, calculated position %d based on chronological order", year, position)
|
411
|
+
|
412
|
+
# Now adjust position if needed to avoid conflicts with existing entries
|
413
|
+
while position in used_numbers:
|
414
|
+
position += 1
|
415
|
+
logger.debug("Position %d already used, incrementing to %d", position-1, position)
|
416
|
+
|
417
|
+
logger.debug("Final assigned entry number: %d", position)
|
418
|
+
return str(position)
|
419
|
+
else:
|
420
|
+
# Step 4: If this entry has no year, it should come after all entries with years
|
421
|
+
# Find the highest number used by entries with years
|
422
|
+
years_highest_no = 0
|
423
|
+
if entries_with_years:
|
424
|
+
for i, entry in enumerate(entries_with_years):
|
425
|
+
try:
|
426
|
+
expected_no = i + 1 # 1-indexed
|
427
|
+
actual_no = int(entry['no'])
|
428
|
+
years_highest_no = max(years_highest_no, actual_no)
|
429
|
+
except (ValueError, TypeError):
|
430
|
+
pass
|
431
|
+
|
432
|
+
# Find the highest number used overall
|
433
|
+
highest_no = max(used_numbers) if used_numbers else 0
|
434
|
+
|
435
|
+
# Next number should be at least one more than the highest from entries with years
|
436
|
+
next_no = max(years_highest_no, highest_no) + 1
|
437
|
+
|
438
|
+
logger.debug("No year available, assigned next number: %d", next_no)
|
439
|
+
return str(next_no)
|
440
|
+
|
441
|
+
def _extract_year_from_text(self, text: str) -> Optional[int]:
|
442
|
+
"""
|
443
|
+
Extract a year (1900-2099) from text.
|
444
|
+
|
445
|
+
Args:
|
446
|
+
text: The text to extract the year from
|
447
|
+
|
448
|
+
Returns:
|
449
|
+
The extracted year as int, or None if no valid year found
|
450
|
+
"""
|
451
|
+
import re
|
452
|
+
year_pattern = re.compile(r'(19\d{2}|20\d{2})')
|
453
|
+
year_match = year_pattern.search(text)
|
454
|
+
|
455
|
+
if year_match:
|
456
|
+
try:
|
457
|
+
extracted_year = int(year_match.group(1))
|
458
|
+
if 1900 <= extracted_year <= 2099:
|
459
|
+
return extracted_year
|
460
|
+
except (ValueError, TypeError):
|
461
|
+
pass
|
462
|
+
|
463
|
+
return None
|
464
|
+
|
465
|
+
def _format_number(self, number: int, existing_entries: List[Dict[str, Any]]) -> str:
|
466
|
+
"""
|
467
|
+
Format a number to match the existing entry number format (e.g., with leading zeros).
|
468
|
+
|
469
|
+
Args:
|
470
|
+
number: The number to format
|
471
|
+
existing_entries: List of existing entries with their numbers
|
472
|
+
|
473
|
+
Returns:
|
474
|
+
Formatted number as string
|
475
|
+
"""
|
476
|
+
max_digits = 1
|
477
|
+
for entry in existing_entries:
|
478
|
+
entry_no = entry.get('no', '')
|
479
|
+
if entry_no and isinstance(entry_no, str) and entry_no.isdigit():
|
480
|
+
leading_zeros = len(entry_no) - len(entry_no.lstrip('0'))
|
481
|
+
if leading_zeros > 0:
|
482
|
+
digits = len(entry_no)
|
483
|
+
max_digits = max(max_digits, digits)
|
484
|
+
if max_digits > 1:
|
485
|
+
logger.trace("Formatting with %d digits", max_digits)
|
486
|
+
return f"{number:0{max_digits}d}"
|
487
|
+
|
488
|
+
return str(number)
|
489
|
+
|
490
|
+
def _generate_model_number(self) -> str:
|
491
|
+
"""
|
492
|
+
Generate a unique model number for a new entry.
|
493
|
+
|
494
|
+
Returns:
|
495
|
+
Unique model number in the format "model-" followed by sequential number with zero padding
|
496
|
+
"""
|
497
|
+
logger.trace("Entering _generate_model_number()")
|
498
|
+
highest_num = -1
|
499
|
+
pattern = re.compile(r'tt-42(\d+)')
|
500
|
+
|
501
|
+
logger.debug("Searching for highest tt-42 ID in %d existing entries", len(self.custom_json))
|
502
|
+
for entry in self.custom_json:
|
503
|
+
model = entry.get('model', '')
|
504
|
+
logger.trace("Checking model ID: %s", model)
|
505
|
+
match = pattern.match(model)
|
506
|
+
if match:
|
507
|
+
try:
|
508
|
+
num = int(match.group(1))
|
509
|
+
logger.trace("Found numeric part: %d", num)
|
510
|
+
highest_num = max(highest_num, num)
|
511
|
+
except (IndexError, ValueError) as e:
|
512
|
+
logger.trace("Failed to parse model ID: %s (%s)", model, str(e))
|
513
|
+
pass
|
514
|
+
|
515
|
+
logger.debug("Highest tt-42 ID number found: %d", highest_num)
|
516
|
+
next_num = highest_num + 1
|
517
|
+
result = f"tt-42{next_num:010d}"
|
518
|
+
logger.debug("Generated new model ID: %s", result)
|
519
|
+
|
520
|
+
logger.trace("Exiting _generate_model_number() with result=%s", result)
|
521
|
+
return result
|
522
|
+
|
523
|
+
def _determine_category_v1(self, metadata: Dict[str, Any]) -> str:
|
524
|
+
"""
|
525
|
+
Determine the category in v1 format.
|
526
|
+
|
527
|
+
Args:
|
528
|
+
metadata: Dictionary containing file metadata
|
529
|
+
|
530
|
+
Returns:
|
531
|
+
Category string in v1 format
|
532
|
+
"""
|
533
|
+
if 'genre' in metadata:
|
534
|
+
genre_value = metadata['genre'].lower().strip()
|
535
|
+
|
536
|
+
if any(keyword in genre_value for keyword in ['musik', 'song', 'music', 'lied']):
|
537
|
+
return "music"
|
538
|
+
elif any(keyword in genre_value for keyword in ['hörspiel', 'audio play', 'hörbuch', 'audiobook']):
|
539
|
+
return "audio-play"
|
540
|
+
elif any(keyword in genre_value for keyword in ['märchen', 'fairy', 'tales']):
|
541
|
+
return "fairy-tale"
|
542
|
+
elif any(keyword in genre_value for keyword in ['wissen', 'knowledge', 'learn']):
|
543
|
+
return "knowledge"
|
544
|
+
elif any(keyword in genre_value for keyword in ['schlaf', 'sleep', 'meditation']):
|
545
|
+
return "sleep"
|
546
|
+
|
547
|
+
return "audio-play"
|
548
|
+
|
549
|
+
def find_entry_by_hash(self, taf_hash: str) -> tuple[Optional[Dict[str, Any]], Optional[int]]:
|
550
|
+
"""
|
551
|
+
Find an entry in the custom JSON by TAF hash.
|
552
|
+
|
553
|
+
Args:
|
554
|
+
taf_hash: SHA1 hash of the TAF file to find
|
555
|
+
|
556
|
+
Returns:
|
557
|
+
Tuple of (entry, entry_index) if found, or (None, None) if not found
|
558
|
+
"""
|
559
|
+
logger.trace("Searching for entry with hash %s", taf_hash)
|
560
|
+
|
561
|
+
for entry_idx, entry in enumerate(self.custom_json):
|
562
|
+
if 'hash' not in entry:
|
563
|
+
continue
|
564
|
+
|
565
|
+
for hash_value in entry['hash']:
|
566
|
+
if hash_value == taf_hash:
|
567
|
+
logger.debug("Found existing entry with matching hash %s", taf_hash)
|
568
|
+
return entry, entry_idx
|
569
|
+
|
570
|
+
logger.debug("No entry found with hash %s", taf_hash)
|
571
|
+
return None, None
|
572
|
+
|
573
|
+
def find_entry_by_series_episodes(self, series: str, episodes: str) -> tuple[Optional[Dict[str, Any]], Optional[int]]:
|
574
|
+
"""
|
575
|
+
Find an entry in the custom JSON by series and episodes.
|
576
|
+
|
577
|
+
Args:
|
578
|
+
series: Series name to find
|
579
|
+
episodes: Episodes name to find
|
580
|
+
|
581
|
+
Returns:
|
582
|
+
Tuple of (entry, entry_index) if found, or (None, None) if not found
|
583
|
+
"""
|
584
|
+
logger.trace("Searching for entry with series='%s', episodes='%s'", series, episodes)
|
585
|
+
|
586
|
+
for entry_idx, entry in enumerate(self.custom_json):
|
587
|
+
if entry.get('series') == series and entry.get('episodes') == episodes:
|
588
|
+
logger.debug("Found existing entry with matching series/episodes: %s / %s", series, episodes)
|
589
|
+
return entry, entry_idx
|
590
|
+
|
591
|
+
logger.debug("No entry found with series/episodes: %s / %s", series, episodes)
|
592
|
+
return None, None
|
593
|
+
|
594
|
+
def _extract_metadata_from_files(self, input_files: List[str]) -> Dict[str, Any]:
|
595
|
+
"""
|
596
|
+
Extract metadata from audio files to use in the custom JSON entry.
|
597
|
+
|
598
|
+
Args:
|
599
|
+
input_files: List of paths to audio files
|
600
|
+
|
601
|
+
Returns:
|
602
|
+
Dictionary containing metadata extracted from files
|
603
|
+
"""
|
604
|
+
metadata = {}
|
605
|
+
track_descriptions = []
|
606
|
+
for file_path in input_files:
|
607
|
+
tags = get_file_tags(file_path)
|
608
|
+
if 'title' in tags:
|
609
|
+
track_descriptions.append(tags['title'])
|
610
|
+
else:
|
611
|
+
filename = os.path.splitext(os.path.basename(file_path))[0]
|
612
|
+
track_descriptions.append(filename)
|
613
|
+
for tag_name, tag_value in tags.items():
|
614
|
+
if tag_name not in metadata:
|
615
|
+
metadata[tag_name] = tag_value
|
616
|
+
|
617
|
+
metadata['track_descriptions'] = track_descriptions
|
618
|
+
|
619
|
+
return metadata
|
620
|
+
|
621
|
+
def _determine_language(self, metadata: Dict[str, Any]) -> str:
|
622
|
+
if 'language' in metadata:
|
623
|
+
lang_value = metadata['language'].lower().strip()
|
624
|
+
if lang_value in LANGUAGE_MAPPING:
|
625
|
+
return LANGUAGE_MAPPING[lang_value]
|
626
|
+
try:
|
627
|
+
system_lang, _ = locale.getdefaultlocale()
|
628
|
+
if system_lang:
|
629
|
+
lang_code = system_lang.split('_')[0].lower()
|
630
|
+
if lang_code in LANGUAGE_MAPPING:
|
631
|
+
return LANGUAGE_MAPPING[lang_code]
|
632
|
+
except Exception:
|
633
|
+
pass
|
634
|
+
return 'de-de'
|
635
|
+
|
636
|
+
def _convert_v2_to_v1(self, v2_data: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
|
637
|
+
"""
|
638
|
+
Convert data from v2 format to v1 format.
|
639
|
+
|
640
|
+
Args:
|
641
|
+
v2_data: Data in v2 format
|
642
|
+
|
643
|
+
Returns:
|
644
|
+
Converted data in v1 format
|
645
|
+
"""
|
646
|
+
v1_data = []
|
647
|
+
|
648
|
+
entry_no = 0
|
649
|
+
for v2_entry in v2_data:
|
650
|
+
if 'data' not in v2_entry:
|
651
|
+
continue
|
652
|
+
|
653
|
+
for v2_data_item in v2_entry['data']:
|
654
|
+
series = v2_data_item.get('series', '')
|
655
|
+
episodes = v2_data_item.get('episode', '')
|
656
|
+
model = v2_data_item.get('article', '')
|
657
|
+
title = f"{series} - {episodes}" if series and episodes else episodes
|
658
|
+
|
659
|
+
v1_entry = {
|
660
|
+
"no": str(entry_no),
|
661
|
+
"model": model,
|
662
|
+
"audio_id": [],
|
663
|
+
"hash": [],
|
664
|
+
"title": title,
|
665
|
+
"series": series,
|
666
|
+
"episodes": episodes,
|
667
|
+
"tracks": v2_data_item.get('track-desc', []),
|
668
|
+
"release": str(v2_data_item.get('release', int(time.time()))),
|
669
|
+
"language": v2_data_item.get('language', 'de-de'),
|
670
|
+
"category": self._convert_category_v2_to_v1(v2_data_item.get('category', '')),
|
671
|
+
"pic": v2_data_item.get('image', '')
|
672
|
+
}
|
673
|
+
if 'ids' in v2_data_item:
|
674
|
+
for id_entry in v2_data_item['ids']:
|
675
|
+
if 'audio-id' in id_entry:
|
676
|
+
v1_entry['audio_id'].append(str(id_entry['audio-id']))
|
677
|
+
if 'hash' in id_entry:
|
678
|
+
v1_entry['hash'].append(id_entry['hash'].upper())
|
679
|
+
|
680
|
+
v1_data.append(v1_entry)
|
681
|
+
entry_no += 1
|
682
|
+
|
683
|
+
return v1_data
|
684
|
+
|
685
|
+
def _convert_category_v2_to_v1(self, v2_category: str) -> str:
|
686
|
+
"""
|
687
|
+
Convert category from v2 format to v1 format.
|
688
|
+
|
689
|
+
Args:
|
690
|
+
v2_category: Category in v2 format
|
691
|
+
|
692
|
+
Returns:
|
693
|
+
Category in v1 format
|
694
|
+
"""
|
695
|
+
v2_to_v1_mapping = {
|
696
|
+
"music": "music",
|
697
|
+
"Hörspiele & Hörbücher": "audio-play",
|
698
|
+
"Schlaflieder & Entspannung": "sleep",
|
699
|
+
"Wissen & Hörmagazine": "knowledge",
|
700
|
+
"Märchen": "fairy-tale"
|
701
|
+
}
|
702
|
+
|
703
|
+
return v2_to_v1_mapping.get(v2_category, "audio-play")
|
704
|
+
|
705
|
+
class ToniesJsonHandlerv2:
|
25
706
|
"""Handler for tonies.custom.json operations."""
|
26
707
|
|
27
708
|
def __init__(self, client: TeddyCloudClient = None):
|
@@ -276,17 +957,17 @@ class ToniesJsonHandler:
|
|
276
957
|
track_descriptions = []
|
277
958
|
for file_path in input_files:
|
278
959
|
tags = get_file_tags(file_path)
|
960
|
+
# Extract track descriptions
|
279
961
|
if 'title' in tags:
|
280
962
|
track_descriptions.append(tags['title'])
|
281
963
|
else:
|
282
964
|
filename = os.path.splitext(os.path.basename(file_path))[0]
|
283
965
|
track_descriptions.append(filename)
|
284
|
-
|
285
|
-
if 'language' not in metadata and 'language' in tags:
|
286
|
-
metadata['language'] = tags['language']
|
287
966
|
|
288
|
-
|
289
|
-
|
967
|
+
# Copy all available tags, but don't overwrite existing ones
|
968
|
+
for tag_name, tag_value in tags.items():
|
969
|
+
if tag_name not in metadata:
|
970
|
+
metadata[tag_name] = tag_value
|
290
971
|
|
291
972
|
metadata['track_descriptions'] = track_descriptions
|
292
973
|
|
@@ -463,7 +1144,6 @@ class ToniesJsonHandler:
|
|
463
1144
|
logger.warning("Error processing file %s: %s", file_path, e)
|
464
1145
|
logger.trace("Exception details for %s: %s", file_path, str(e), exc_info=True)
|
465
1146
|
|
466
|
-
# Convert seconds to minutes, rounding to nearest minute
|
467
1147
|
total_runtime_minutes = round(total_runtime_seconds / 60)
|
468
1148
|
|
469
1149
|
logger.info("Calculated total runtime: %d seconds (%d minutes) from %d/%d files",
|
@@ -480,7 +1160,116 @@ class ToniesJsonHandler:
|
|
480
1160
|
logger.trace("Exiting _calculate_runtime() with total runtime=%d minutes", total_runtime_minutes)
|
481
1161
|
return total_runtime_minutes
|
482
1162
|
|
483
|
-
def
|
1163
|
+
def fetch_and_update_tonies_json_v1(client: TeddyCloudClient, taf_file: Optional[str] = None, input_files: Optional[List[str]] = None,
|
1164
|
+
artwork_url: Optional[str] = None, output_dir: Optional[str] = None) -> bool:
|
1165
|
+
"""
|
1166
|
+
Fetch tonies.custom.json from server and merge with local file if it exists, then update with new entry in v1 format.
|
1167
|
+
|
1168
|
+
Args:
|
1169
|
+
client: TeddyCloudClient instance to use for API communication
|
1170
|
+
taf_file: Path to the TAF file to add
|
1171
|
+
input_files: List of input audio files used to create the TAF
|
1172
|
+
artwork_url: URL of the uploaded artwork (if any)
|
1173
|
+
output_dir: Directory where to save the tonies.custom.json file (defaults to './output')
|
1174
|
+
|
1175
|
+
Returns:
|
1176
|
+
True if successful, False otherwise
|
1177
|
+
"""
|
1178
|
+
logger.trace("Entering fetch_and_update_tonies_json_v1 with client=%s, taf_file=%s, input_files=%s, artwork_url=%s, output_dir=%s",
|
1179
|
+
client, taf_file, input_files, artwork_url, output_dir)
|
1180
|
+
|
1181
|
+
handler = ToniesJsonHandlerv1(client)
|
1182
|
+
if not output_dir:
|
1183
|
+
output_dir = './output'
|
1184
|
+
logger.debug("No output directory specified, using default: %s", output_dir)
|
1185
|
+
|
1186
|
+
os.makedirs(output_dir, exist_ok=True)
|
1187
|
+
logger.debug("Ensuring output directory exists: %s", output_dir)
|
1188
|
+
|
1189
|
+
json_file_path = os.path.join(output_dir, 'tonies.custom.json')
|
1190
|
+
logger.debug("JSON file path: %s", json_file_path)
|
1191
|
+
|
1192
|
+
loaded_from_server = False
|
1193
|
+
if client:
|
1194
|
+
logger.info("Attempting to load tonies.custom.json from server")
|
1195
|
+
loaded_from_server = handler.load_from_server()
|
1196
|
+
logger.debug("Load from server result: %s", "success" if loaded_from_server else "failed")
|
1197
|
+
else:
|
1198
|
+
logger.debug("No client provided, skipping server load")
|
1199
|
+
|
1200
|
+
if os.path.exists(json_file_path):
|
1201
|
+
logger.info("Local tonies.custom.json file found, merging with server content")
|
1202
|
+
logger.debug("Local file exists at %s, size: %d bytes", json_file_path, os.path.getsize(json_file_path))
|
1203
|
+
|
1204
|
+
local_handler = ToniesJsonHandlerv1()
|
1205
|
+
if local_handler.load_from_file(json_file_path):
|
1206
|
+
logger.debug("Successfully loaded local file with %d entries", len(local_handler.custom_json))
|
1207
|
+
|
1208
|
+
if loaded_from_server:
|
1209
|
+
logger.debug("Merging local entries with server entries")
|
1210
|
+
server_hashes = set()
|
1211
|
+
for entry in handler.custom_json:
|
1212
|
+
if 'hash' in entry:
|
1213
|
+
for hash_value in entry['hash']:
|
1214
|
+
server_hashes.add(hash_value)
|
1215
|
+
|
1216
|
+
logger.debug("Found %d unique hash values from server", len(server_hashes))
|
1217
|
+
|
1218
|
+
added_count = 0
|
1219
|
+
for local_entry in local_handler.custom_json:
|
1220
|
+
if 'hash' in local_entry:
|
1221
|
+
has_unique_hash = False
|
1222
|
+
for hash_value in local_entry['hash']:
|
1223
|
+
if hash_value not in server_hashes:
|
1224
|
+
has_unique_hash = True
|
1225
|
+
break
|
1226
|
+
|
1227
|
+
if has_unique_hash:
|
1228
|
+
logger.trace("Adding local-only entry to merged content")
|
1229
|
+
handler.custom_json.append(local_entry)
|
1230
|
+
added_count += 1
|
1231
|
+
|
1232
|
+
logger.debug("Added %d local-only entries to merged content", added_count)
|
1233
|
+
else:
|
1234
|
+
logger.debug("Using only local entries (server load failed or no client)")
|
1235
|
+
handler.custom_json = local_handler.custom_json
|
1236
|
+
handler.is_loaded = True
|
1237
|
+
logger.info("Using local tonies.custom.json content")
|
1238
|
+
elif not loaded_from_server:
|
1239
|
+
logger.debug("No local file found and server load failed, starting with empty list")
|
1240
|
+
handler.custom_json = []
|
1241
|
+
handler.is_loaded = True
|
1242
|
+
logger.info("No tonies.custom.json found, starting with empty list")
|
1243
|
+
|
1244
|
+
if taf_file and input_files and handler.is_loaded:
|
1245
|
+
logger.debug("Adding new entry for TAF file: %s", taf_file)
|
1246
|
+
logger.debug("Using %d input files for metadata extraction", len(input_files))
|
1247
|
+
|
1248
|
+
if not handler.add_entry_from_taf(taf_file, input_files, artwork_url):
|
1249
|
+
logger.error("Failed to add entry to tonies.custom.json")
|
1250
|
+
logger.trace("Exiting fetch_and_update_tonies_json_v1 with success=False (failed to add entry)")
|
1251
|
+
return False
|
1252
|
+
|
1253
|
+
logger.debug("Successfully added new entry for %s", taf_file)
|
1254
|
+
else:
|
1255
|
+
if not taf_file:
|
1256
|
+
logger.debug("No TAF file provided, skipping add entry step")
|
1257
|
+
elif not input_files:
|
1258
|
+
logger.debug("No input files provided, skipping add entry step")
|
1259
|
+
elif not handler.is_loaded:
|
1260
|
+
logger.debug("Handler not properly loaded, skipping add entry step")
|
1261
|
+
|
1262
|
+
logger.debug("Saving updated tonies.custom.json to %s", json_file_path)
|
1263
|
+
if not handler.save_to_file(json_file_path):
|
1264
|
+
logger.error("Failed to save tonies.custom.json to file")
|
1265
|
+
logger.trace("Exiting fetch_and_update_tonies_json_v1 with success=False (failed to save file)")
|
1266
|
+
return False
|
1267
|
+
|
1268
|
+
logger.debug("Successfully saved tonies.custom.json with %d entries", len(handler.custom_json))
|
1269
|
+
logger.trace("Exiting fetch_and_update_tonies_json_v1 with success=True")
|
1270
|
+
return True
|
1271
|
+
|
1272
|
+
def fetch_and_update_tonies_json_v2(client: TeddyCloudClient, taf_file: Optional[str] = None, input_files: Optional[List[str]] = None,
|
484
1273
|
artwork_url: Optional[str] = None, output_dir: Optional[str] = None) -> bool:
|
485
1274
|
"""
|
486
1275
|
Fetch tonies.custom.json from server and merge with local file if it exists, then update with new entry.
|
@@ -498,7 +1287,7 @@ def fetch_and_update_tonies_json(client: TeddyCloudClient, taf_file: Optional[st
|
|
498
1287
|
logger.trace("Entering fetch_and_update_tonies_json with client=%s, taf_file=%s, input_files=%s, artwork_url=%s, output_dir=%s",
|
499
1288
|
client, taf_file, input_files, artwork_url, output_dir)
|
500
1289
|
|
501
|
-
handler =
|
1290
|
+
handler = ToniesJsonHandlerv2(client)
|
502
1291
|
if not output_dir:
|
503
1292
|
output_dir = './output'
|
504
1293
|
logger.debug("No output directory specified, using default: %s", output_dir)
|
@@ -521,7 +1310,7 @@ def fetch_and_update_tonies_json(client: TeddyCloudClient, taf_file: Optional[st
|
|
521
1310
|
logger.info("Local tonies.custom.json file found, merging with server content")
|
522
1311
|
logger.debug("Local file exists at %s, size: %d bytes", json_file_path, os.path.getsize(json_file_path))
|
523
1312
|
|
524
|
-
local_handler =
|
1313
|
+
local_handler = ToniesJsonHandlerv2()
|
525
1314
|
if local_handler.load_from_file(json_file_path):
|
526
1315
|
logger.debug("Successfully loaded local file with %d entries", len(local_handler.custom_json))
|
527
1316
|
|