TonieToolbox 0.5.0a1__py3-none-any.whl → 0.6.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- TonieToolbox/__init__.py +2 -1
- TonieToolbox/__main__.py +303 -141
- TonieToolbox/artwork.py +59 -10
- TonieToolbox/audio_conversion.py +106 -34
- TonieToolbox/constants.py +133 -10
- TonieToolbox/dependency_manager.py +679 -184
- TonieToolbox/filename_generator.py +57 -10
- TonieToolbox/integration.py +73 -0
- TonieToolbox/integration_macos.py +613 -0
- TonieToolbox/integration_ubuntu.py +2 -0
- TonieToolbox/integration_windows.py +445 -0
- TonieToolbox/logger.py +9 -10
- TonieToolbox/media_tags.py +24 -104
- TonieToolbox/ogg_page.py +41 -41
- TonieToolbox/opus_packet.py +15 -15
- TonieToolbox/recursive_processor.py +34 -34
- TonieToolbox/tags.py +4 -5
- TonieToolbox/teddycloud.py +164 -51
- TonieToolbox/tonie_analysis.py +26 -24
- TonieToolbox/tonie_file.py +88 -72
- TonieToolbox/tonies_json.py +830 -37
- TonieToolbox/version_handler.py +14 -20
- {tonietoolbox-0.5.0a1.dist-info → tonietoolbox-0.6.0.dist-info}/METADATA +257 -177
- tonietoolbox-0.6.0.dist-info/RECORD +30 -0
- {tonietoolbox-0.5.0a1.dist-info → tonietoolbox-0.6.0.dist-info}/WHEEL +1 -1
- tonietoolbox-0.5.0a1.dist-info/RECORD +0 -26
- {tonietoolbox-0.5.0a1.dist-info → tonietoolbox-0.6.0.dist-info}/entry_points.txt +0 -0
- {tonietoolbox-0.5.0a1.dist-info → tonietoolbox-0.6.0.dist-info}/licenses/LICENSE.md +0 -0
- {tonietoolbox-0.5.0a1.dist-info → tonietoolbox-0.6.0.dist-info}/top_level.txt +0 -0
TonieToolbox/tonies_json.py
CHANGED
@@ -1,3 +1,4 @@
|
|
1
|
+
#!/usr/bin/python3
|
1
2
|
"""
|
2
3
|
TonieToolbox module for handling the tonies.custom.json operations.
|
3
4
|
|
@@ -19,9 +20,693 @@ from .media_tags import get_file_tags, extract_album_info
|
|
19
20
|
from .constants import LANGUAGE_MAPPING, GENRE_MAPPING
|
20
21
|
from .teddycloud import TeddyCloudClient
|
21
22
|
|
22
|
-
logger = get_logger(
|
23
|
+
logger = get_logger(__name__)
|
23
24
|
|
24
|
-
class
|
25
|
+
class ToniesJsonHandlerv1:
|
26
|
+
"""Handler for tonies.custom.json operations using v1 format."""
|
27
|
+
|
28
|
+
def __init__(self, client: TeddyCloudClient = None):
|
29
|
+
"""
|
30
|
+
Initialize the handler.
|
31
|
+
|
32
|
+
Args:
|
33
|
+
client (TeddyCloudClient | None): TeddyCloudClient instance to use for API communication
|
34
|
+
"""
|
35
|
+
self.client = client
|
36
|
+
self.custom_json = []
|
37
|
+
self.is_loaded = False
|
38
|
+
|
39
|
+
def load_from_server(self) -> bool:
|
40
|
+
"""
|
41
|
+
Load tonies.custom.json from the TeddyCloud server.
|
42
|
+
|
43
|
+
Returns:
|
44
|
+
bool: True if successful, False otherwise
|
45
|
+
"""
|
46
|
+
if self.client is None:
|
47
|
+
logger.error("Cannot load from server: no client provided")
|
48
|
+
return False
|
49
|
+
|
50
|
+
try:
|
51
|
+
result = self.client.get_tonies_custom_json()
|
52
|
+
if result is not None:
|
53
|
+
# Convert v2 format to v1 format if necessary
|
54
|
+
if len(result) > 0 and "data" in result[0]:
|
55
|
+
logger.debug("Converting v2 format from server to v1 format")
|
56
|
+
self.custom_json = self._convert_v2_to_v1(result)
|
57
|
+
else:
|
58
|
+
self.custom_json = result
|
59
|
+
self.is_loaded = True
|
60
|
+
logger.info("Successfully loaded tonies.custom.json with %d entries", len(self.custom_json))
|
61
|
+
return True
|
62
|
+
else:
|
63
|
+
logger.error("Failed to load tonies.custom.json from server")
|
64
|
+
return False
|
65
|
+
|
66
|
+
except Exception as e:
|
67
|
+
logger.error("Error loading tonies.custom.json: %s", e)
|
68
|
+
return False
|
69
|
+
|
70
|
+
def load_from_file(self, file_path: str) -> bool:
|
71
|
+
"""
|
72
|
+
Load tonies.custom.json from a local file.
|
73
|
+
|
74
|
+
Args:
|
75
|
+
file_path (str): Path to the tonies.custom.json file
|
76
|
+
|
77
|
+
Returns:
|
78
|
+
bool: True if successful, False otherwise
|
79
|
+
"""
|
80
|
+
try:
|
81
|
+
if os.path.exists(file_path):
|
82
|
+
logger.info("Loading tonies.custom.json from file: %s", file_path)
|
83
|
+
with open(file_path, 'r', encoding='utf-8') as f:
|
84
|
+
data = json.load(f)
|
85
|
+
if isinstance(data, list):
|
86
|
+
# Convert v2 format to v1 format if necessary
|
87
|
+
if len(data) > 0 and "data" in data[0]:
|
88
|
+
logger.debug("Converting v2 format from file to v1 format")
|
89
|
+
self.custom_json = self._convert_v2_to_v1(data)
|
90
|
+
else:
|
91
|
+
self.custom_json = data
|
92
|
+
self.is_loaded = True
|
93
|
+
logger.info("Successfully loaded tonies.custom.json with %d entries", len(self.custom_json))
|
94
|
+
return True
|
95
|
+
else:
|
96
|
+
logger.error("Invalid tonies.custom.json format in file, expected list")
|
97
|
+
return False
|
98
|
+
else:
|
99
|
+
logger.info("tonies.custom.json file not found, starting with empty list")
|
100
|
+
self.custom_json = []
|
101
|
+
self.is_loaded = True
|
102
|
+
return True
|
103
|
+
|
104
|
+
except Exception as e:
|
105
|
+
logger.error("Error loading tonies.custom.json from file: %s", e)
|
106
|
+
return False
|
107
|
+
|
108
|
+
def save_to_file(self, file_path: str) -> bool:
|
109
|
+
"""
|
110
|
+
Save tonies.custom.json to a local file.
|
111
|
+
|
112
|
+
Args:
|
113
|
+
file_path (str): Path where to save the tonies.custom.json file
|
114
|
+
|
115
|
+
Returns:
|
116
|
+
bool: True if successful, False otherwise
|
117
|
+
"""
|
118
|
+
if not self.is_loaded:
|
119
|
+
logger.error("Cannot save tonies.custom.json: data not loaded")
|
120
|
+
return False
|
121
|
+
|
122
|
+
try:
|
123
|
+
os.makedirs(os.path.dirname(os.path.abspath(file_path)), exist_ok=True)
|
124
|
+
logger.info("Saving tonies.custom.json to file: %s", file_path)
|
125
|
+
with open(file_path, 'w', encoding='utf-8') as f:
|
126
|
+
json.dump(self.custom_json, f, indent=2, ensure_ascii=False)
|
127
|
+
|
128
|
+
logger.info("Successfully saved tonies.custom.json to file")
|
129
|
+
return True
|
130
|
+
|
131
|
+
except Exception as e:
|
132
|
+
logger.error("Error saving tonies.custom.json to file: %s", e)
|
133
|
+
return False
|
134
|
+
|
135
|
+
def renumber_series_entries(self, series: str) -> None:
|
136
|
+
"""
|
137
|
+
Re-sort and re-number all entries for a series by year (chronological),
|
138
|
+
with entries without a year coming last.
|
139
|
+
|
140
|
+
Args:
|
141
|
+
series (str): Series name to renumber
|
142
|
+
"""
|
143
|
+
# Collect all entries for the series
|
144
|
+
series_entries = [entry for entry in self.custom_json if entry.get('series') == series]
|
145
|
+
# Separate entries with and without year
|
146
|
+
with_year = []
|
147
|
+
without_year = []
|
148
|
+
for entry in series_entries:
|
149
|
+
year = self._extract_year_from_text(entry.get('title', ''))
|
150
|
+
if not year:
|
151
|
+
year = self._extract_year_from_text(entry.get('episodes', ''))
|
152
|
+
if year:
|
153
|
+
with_year.append((year, entry))
|
154
|
+
else:
|
155
|
+
without_year.append(entry)
|
156
|
+
# Sort entries with year
|
157
|
+
with_year.sort(key=lambda x: x[0])
|
158
|
+
# Assign new numbers
|
159
|
+
new_no = 1
|
160
|
+
for _, entry in with_year:
|
161
|
+
entry['no'] = str(new_no)
|
162
|
+
new_no += 1
|
163
|
+
for entry in without_year:
|
164
|
+
entry['no'] = str(new_no)
|
165
|
+
new_no += 1
|
166
|
+
|
167
|
+
def add_entry_from_taf(self, taf_file: str, input_files: List[str], artwork_url: Optional[str] = None) -> bool:
|
168
|
+
"""
|
169
|
+
Add an entry to the custom JSON from a TAF file.
|
170
|
+
If an entry with the same hash exists, it will be updated.
|
171
|
+
If an entry with the same series+episodes exists, the new hash will be added to it.
|
172
|
+
|
173
|
+
Args:
|
174
|
+
taf_file (str): Path to the TAF file
|
175
|
+
input_files (list[str]): List of input audio files used to create the TAF
|
176
|
+
artwork_url (str | None): URL of the uploaded artwork (if any)
|
177
|
+
|
178
|
+
Returns:
|
179
|
+
bool: True if successful, False otherwise
|
180
|
+
"""
|
181
|
+
logger.trace("Entering add_entry_from_taf() with taf_file=%s, input_files=%s, artwork_url=%s",
|
182
|
+
taf_file, input_files, artwork_url)
|
183
|
+
|
184
|
+
if not self.is_loaded:
|
185
|
+
logger.error("Cannot add entry: tonies.custom.json not loaded")
|
186
|
+
return False
|
187
|
+
|
188
|
+
try:
|
189
|
+
logger.info("Adding entry for %s to tonies.custom.json", taf_file)
|
190
|
+
logger.debug("Extracting metadata from input files")
|
191
|
+
metadata = self._extract_metadata_from_files(input_files)
|
192
|
+
logger.debug("Extracted metadata: %s", metadata)
|
193
|
+
with open(taf_file, 'rb') as f:
|
194
|
+
taf_hash = hashlib.sha1(f.read()).hexdigest().upper()
|
195
|
+
|
196
|
+
timestamp = str(int(time.time()))
|
197
|
+
series = metadata.get('albumartist', metadata.get('artist', 'Unknown Artist'))
|
198
|
+
episodes = metadata.get('album', os.path.splitext(os.path.basename(taf_file))[0])
|
199
|
+
copyright = metadata.get('copyright', '')
|
200
|
+
|
201
|
+
# Extract year from metadata or from episode title
|
202
|
+
year = None
|
203
|
+
year_str = metadata.get('year', metadata.get('date', None))
|
204
|
+
|
205
|
+
# Try to convert metadata year to int if it exists
|
206
|
+
if year_str:
|
207
|
+
try:
|
208
|
+
# Extract 4 digits if the date includes more information (e.g., "2022-05-01")
|
209
|
+
import re
|
210
|
+
year_match = re.search(r'(\d{4})', str(year_str))
|
211
|
+
if year_match:
|
212
|
+
year = int(year_match.group(1))
|
213
|
+
else:
|
214
|
+
# If year is just a number, try to format it properly
|
215
|
+
year_val = int(year_str)
|
216
|
+
if 0 <= year_val <= 99: # Assume 2-digit year format
|
217
|
+
if year_val <= 25: # Arbitrary cutoff for 20xx vs 19xx
|
218
|
+
year = 2000 + year_val
|
219
|
+
else:
|
220
|
+
year = 1900 + year_val
|
221
|
+
else:
|
222
|
+
year = year_val
|
223
|
+
except (ValueError, TypeError):
|
224
|
+
logger.debug("Could not convert metadata year '%s' to integer", year_str)
|
225
|
+
|
226
|
+
if not year:
|
227
|
+
year_from_episodes = self._extract_year_from_text(episodes)
|
228
|
+
year_from_copyright = self._extract_year_from_text(copyright)
|
229
|
+
if year_from_episodes:
|
230
|
+
year = year_from_episodes
|
231
|
+
else:
|
232
|
+
year = year_from_copyright
|
233
|
+
|
234
|
+
# Ensure year is in YYYY format
|
235
|
+
year_formatted = None
|
236
|
+
if year:
|
237
|
+
# Validate the year is in the reasonable range
|
238
|
+
if 1900 <= year <= 2099:
|
239
|
+
year_formatted = f"{year:04d}" # Format as 4 digits
|
240
|
+
logger.debug("Formatted year '%s' as '%s'", year, year_formatted)
|
241
|
+
else:
|
242
|
+
logger.warning("Year '%s' outside reasonable range (1900-2099), ignoring", year)
|
243
|
+
|
244
|
+
if year_formatted:
|
245
|
+
title = f"{series} - {year_formatted} - {episodes}"
|
246
|
+
else:
|
247
|
+
title = f"{series} - {episodes}"
|
248
|
+
|
249
|
+
tracks = metadata.get('track_descriptions', [])
|
250
|
+
language = self._determine_language(metadata)
|
251
|
+
category = self._determine_category_v1(metadata)
|
252
|
+
|
253
|
+
existing_entry, entry_idx = self.find_entry_by_hash(taf_hash)
|
254
|
+
if existing_entry:
|
255
|
+
logger.info("Found existing entry with the same hash, updating it")
|
256
|
+
if artwork_url and artwork_url != existing_entry.get('pic', ''):
|
257
|
+
logger.debug("Updating artwork URL")
|
258
|
+
existing_entry['pic'] = artwork_url
|
259
|
+
if tracks and tracks != existing_entry.get('tracks', []):
|
260
|
+
logger.debug("Updating track descriptions")
|
261
|
+
existing_entry['tracks'] = tracks
|
262
|
+
if episodes and episodes != existing_entry.get('episodes', ''):
|
263
|
+
logger.debug("Updating episodes")
|
264
|
+
existing_entry['episodes'] = episodes
|
265
|
+
if series and series != existing_entry.get('series', ''):
|
266
|
+
logger.debug("Updating series")
|
267
|
+
existing_entry['series'] = series
|
268
|
+
logger.info("Successfully updated existing entry for %s", taf_file)
|
269
|
+
self.renumber_series_entries(series)
|
270
|
+
return True
|
271
|
+
|
272
|
+
existing_entry, entry_idx = self.find_entry_by_series_episodes(series, episodes)
|
273
|
+
if existing_entry:
|
274
|
+
logger.info("Found existing entry with the same series/episodes, adding hash to it")
|
275
|
+
if 'audio_id' not in existing_entry:
|
276
|
+
existing_entry['audio_id'] = []
|
277
|
+
if 'hash' not in existing_entry:
|
278
|
+
existing_entry['hash'] = []
|
279
|
+
|
280
|
+
existing_entry['audio_id'].append(timestamp)
|
281
|
+
existing_entry['hash'].append(taf_hash)
|
282
|
+
|
283
|
+
if artwork_url and artwork_url != existing_entry.get('pic', ''):
|
284
|
+
logger.debug("Updating artwork URL")
|
285
|
+
existing_entry['pic'] = artwork_url
|
286
|
+
|
287
|
+
logger.info("Successfully added new hash to existing entry for %s", taf_file)
|
288
|
+
self.renumber_series_entries(series)
|
289
|
+
return True
|
290
|
+
|
291
|
+
logger.debug("No existing entry found, creating new entry")
|
292
|
+
|
293
|
+
logger.debug("Generating entry number")
|
294
|
+
entry_no = self._generate_entry_no(series, episodes, year)
|
295
|
+
logger.debug("Generated entry number: %s", entry_no)
|
296
|
+
|
297
|
+
logger.debug("Generating model number")
|
298
|
+
model_number = self._generate_model_number()
|
299
|
+
logger.debug("Generated model number: %s", model_number)
|
300
|
+
|
301
|
+
entry = {
|
302
|
+
"no": entry_no,
|
303
|
+
"model": model_number,
|
304
|
+
"audio_id": [timestamp],
|
305
|
+
"hash": [taf_hash],
|
306
|
+
"title": title,
|
307
|
+
"series": series,
|
308
|
+
"episodes": episodes,
|
309
|
+
"tracks": tracks,
|
310
|
+
"release": timestamp,
|
311
|
+
"language": language,
|
312
|
+
"category": category,
|
313
|
+
"pic": artwork_url if artwork_url else ""
|
314
|
+
}
|
315
|
+
|
316
|
+
self.custom_json.append(entry)
|
317
|
+
logger.debug("Added entry to custom_json (new length: %d)", len(self.custom_json))
|
318
|
+
|
319
|
+
logger.info("Successfully added entry for %s", taf_file)
|
320
|
+
self.renumber_series_entries(series)
|
321
|
+
logger.trace("Exiting add_entry_from_taf() with success=True")
|
322
|
+
return True
|
323
|
+
|
324
|
+
except Exception as e:
|
325
|
+
logger.error("Error adding entry for %s: %s", taf_file, e)
|
326
|
+
logger.trace("Exiting add_entry_from_taf() with success=False due to exception: %s", str(e))
|
327
|
+
return False
|
328
|
+
|
329
|
+
def _generate_entry_no(self, series: str, episodes: str, year: Optional[int] = None) -> str:
|
330
|
+
"""
|
331
|
+
Generate an entry number based on specific rules:
|
332
|
+
1. For series entries with years: assign numbers in chronological order (1, 2, 3, etc.)
|
333
|
+
2. For entries without years: assign the next available number after those with years
|
334
|
+
|
335
|
+
Args:
|
336
|
+
series (str): Series name
|
337
|
+
episodes (str): Episodes name
|
338
|
+
year (int | None): Release year from metadata, if available
|
339
|
+
|
340
|
+
Returns:
|
341
|
+
str: Generated entry number as string
|
342
|
+
"""
|
343
|
+
logger.trace("Entering _generate_entry_no() with series='%s', episodes='%s', year=%s",
|
344
|
+
series, episodes, year)
|
345
|
+
|
346
|
+
# If we don't have a series name, use a simple approach to get the next number
|
347
|
+
if not series:
|
348
|
+
max_no = 0
|
349
|
+
for entry in self.custom_json:
|
350
|
+
try:
|
351
|
+
no_value = int(entry.get('no', '0'))
|
352
|
+
max_no = max(max_no, no_value)
|
353
|
+
except (ValueError, TypeError):
|
354
|
+
pass
|
355
|
+
return str(max_no + 1)
|
356
|
+
|
357
|
+
logger.debug("Generating entry number for series '%s'", series)
|
358
|
+
|
359
|
+
# Step 1: Collect all existing entries for this series and extract their years
|
360
|
+
series_entries = []
|
361
|
+
used_numbers = set()
|
362
|
+
|
363
|
+
for entry in self.custom_json:
|
364
|
+
entry_series = entry.get('series', '')
|
365
|
+
if entry_series == series:
|
366
|
+
entry_no = entry.get('no', '')
|
367
|
+
try:
|
368
|
+
entry_no_int = int(entry_no)
|
369
|
+
used_numbers.add(entry_no_int)
|
370
|
+
except (ValueError, TypeError):
|
371
|
+
pass
|
372
|
+
|
373
|
+
entry_title = entry.get('title', '')
|
374
|
+
entry_episodes = entry.get('episodes', '')
|
375
|
+
|
376
|
+
# Extract year from title and episodes
|
377
|
+
entry_year = self._extract_year_from_text(entry_title)
|
378
|
+
if not entry_year:
|
379
|
+
entry_year = self._extract_year_from_text(entry_episodes)
|
380
|
+
|
381
|
+
series_entries.append({
|
382
|
+
'no': entry_no,
|
383
|
+
'title': entry_title,
|
384
|
+
'episodes': entry_episodes,
|
385
|
+
'year': entry_year
|
386
|
+
})
|
387
|
+
|
388
|
+
# Try to extract year from episodes if not explicitly provided
|
389
|
+
if not year:
|
390
|
+
extracted_year = self._extract_year_from_text(episodes)
|
391
|
+
if extracted_year:
|
392
|
+
year = extracted_year
|
393
|
+
logger.debug("Extracted year %d from episodes '%s'", year, episodes)
|
394
|
+
|
395
|
+
# Step 2: Split entries into those with years and those without
|
396
|
+
entries_with_years = [e for e in series_entries if e['year'] is not None]
|
397
|
+
entries_without_years = [e for e in series_entries if e['year'] is None]
|
398
|
+
|
399
|
+
# Sort entries with years by year (oldest first)
|
400
|
+
entries_with_years.sort(key=lambda x: x['year'])
|
401
|
+
|
402
|
+
logger.debug("Found %d entries with years and %d entries without years",
|
403
|
+
len(entries_with_years), len(entries_without_years))
|
404
|
+
|
405
|
+
# Step 3: If this entry has a year, determine where it should be inserted
|
406
|
+
if year:
|
407
|
+
# Find position based on chronological order
|
408
|
+
insertion_index = 0
|
409
|
+
while insertion_index < len(entries_with_years) and entries_with_years[insertion_index]['year'] < year:
|
410
|
+
insertion_index += 1
|
411
|
+
|
412
|
+
# Resulting position is 1-indexed
|
413
|
+
position = insertion_index + 1
|
414
|
+
logger.debug("For year %d, calculated position %d based on chronological order", year, position)
|
415
|
+
|
416
|
+
# Now adjust position if needed to avoid conflicts with existing entries
|
417
|
+
while position in used_numbers:
|
418
|
+
position += 1
|
419
|
+
logger.debug("Position %d already used, incrementing to %d", position-1, position)
|
420
|
+
|
421
|
+
logger.debug("Final assigned entry number: %d", position)
|
422
|
+
return str(position)
|
423
|
+
else:
|
424
|
+
# Step 4: If this entry has no year, it should come after all entries with years
|
425
|
+
# Find the highest number used by entries with years
|
426
|
+
years_highest_no = 0
|
427
|
+
if entries_with_years:
|
428
|
+
for i, entry in enumerate(entries_with_years):
|
429
|
+
try:
|
430
|
+
expected_no = i + 1 # 1-indexed
|
431
|
+
actual_no = int(entry['no'])
|
432
|
+
years_highest_no = max(years_highest_no, actual_no)
|
433
|
+
except (ValueError, TypeError):
|
434
|
+
pass
|
435
|
+
|
436
|
+
# Find the highest number used overall
|
437
|
+
highest_no = max(used_numbers) if used_numbers else 0
|
438
|
+
|
439
|
+
# Next number should be at least one more than the highest from entries with years
|
440
|
+
next_no = max(years_highest_no, highest_no) + 1
|
441
|
+
|
442
|
+
logger.debug("No year available, assigned next number: %d", next_no)
|
443
|
+
return str(next_no)
|
444
|
+
|
445
|
+
def _extract_year_from_text(self, text: str) -> Optional[int]:
|
446
|
+
"""
|
447
|
+
Extract a year (1900-2099) from text.
|
448
|
+
|
449
|
+
Args:
|
450
|
+
text (str): The text to extract the year from
|
451
|
+
|
452
|
+
Returns:
|
453
|
+
int | None: The extracted year as int, or None if no valid year found
|
454
|
+
"""
|
455
|
+
import re
|
456
|
+
year_pattern = re.compile(r'(19\d{2}|20\d{2})')
|
457
|
+
year_match = year_pattern.search(text)
|
458
|
+
|
459
|
+
if year_match:
|
460
|
+
try:
|
461
|
+
extracted_year = int(year_match.group(1))
|
462
|
+
if 1900 <= extracted_year <= 2099:
|
463
|
+
return extracted_year
|
464
|
+
except (ValueError, TypeError):
|
465
|
+
pass
|
466
|
+
|
467
|
+
return None
|
468
|
+
|
469
|
+
def _format_number(self, number: int, existing_entries: List[Dict[str, Any]]) -> str:
|
470
|
+
"""
|
471
|
+
Format a number to match the existing entry number format (e.g., with leading zeros).
|
472
|
+
|
473
|
+
Args:
|
474
|
+
number (int): The number to format
|
475
|
+
existing_entries (list[dict]): List of existing entries with their numbers
|
476
|
+
|
477
|
+
Returns:
|
478
|
+
str: Formatted number as string
|
479
|
+
"""
|
480
|
+
max_digits = 1
|
481
|
+
for entry in existing_entries:
|
482
|
+
entry_no = entry.get('no', '')
|
483
|
+
if entry_no and isinstance(entry_no, str) and entry_no.isdigit():
|
484
|
+
leading_zeros = len(entry_no) - len(entry_no.lstrip('0'))
|
485
|
+
if leading_zeros > 0:
|
486
|
+
digits = len(entry_no)
|
487
|
+
max_digits = max(max_digits, digits)
|
488
|
+
if max_digits > 1:
|
489
|
+
logger.trace("Formatting with %d digits", max_digits)
|
490
|
+
return f"{number:0{max_digits}d}"
|
491
|
+
|
492
|
+
return str(number)
|
493
|
+
|
494
|
+
def _generate_model_number(self) -> str:
|
495
|
+
"""
|
496
|
+
Generate a unique model number for a new entry.
|
497
|
+
|
498
|
+
Returns:
|
499
|
+
str: Unique model number in the format "model-" followed by sequential number with zero padding
|
500
|
+
"""
|
501
|
+
logger.trace("Entering _generate_model_number()")
|
502
|
+
highest_num = -1
|
503
|
+
pattern = re.compile(r'tt-42(\d+)')
|
504
|
+
|
505
|
+
logger.debug("Searching for highest tt-42 ID in %d existing entries", len(self.custom_json))
|
506
|
+
for entry in self.custom_json:
|
507
|
+
model = entry.get('model', '')
|
508
|
+
logger.trace("Checking model ID: %s", model)
|
509
|
+
match = pattern.match(model)
|
510
|
+
if match:
|
511
|
+
try:
|
512
|
+
num = int(match.group(1))
|
513
|
+
logger.trace("Found numeric part: %d", num)
|
514
|
+
highest_num = max(highest_num, num)
|
515
|
+
except (IndexError, ValueError) as e:
|
516
|
+
logger.trace("Failed to parse model ID: %s (%s)", model, str(e))
|
517
|
+
pass
|
518
|
+
|
519
|
+
logger.debug("Highest tt-42 ID number found: %d", highest_num)
|
520
|
+
next_num = highest_num + 1
|
521
|
+
result = f"tt-42{next_num:010d}"
|
522
|
+
logger.debug("Generated new model ID: %s", result)
|
523
|
+
|
524
|
+
logger.trace("Exiting _generate_model_number() with result=%s", result)
|
525
|
+
return result
|
526
|
+
|
527
|
+
def _determine_category_v1(self, metadata: Dict[str, Any]) -> str:
|
528
|
+
"""
|
529
|
+
Determine the category in v1 format.
|
530
|
+
|
531
|
+
Args:
|
532
|
+
metadata (dict): Dictionary containing file metadata
|
533
|
+
|
534
|
+
Returns:
|
535
|
+
str: Category string in v1 format
|
536
|
+
"""
|
537
|
+
if 'genre' in metadata:
|
538
|
+
genre_value = metadata['genre'].lower().strip()
|
539
|
+
|
540
|
+
if any(keyword in genre_value for keyword in ['musik', 'song', 'music', 'lied']):
|
541
|
+
return "music"
|
542
|
+
elif any(keyword in genre_value for keyword in ['hörspiel', 'audio play', 'hörbuch', 'audiobook']):
|
543
|
+
return "audio-play"
|
544
|
+
elif any(keyword in genre_value for keyword in ['märchen', 'fairy', 'tales']):
|
545
|
+
return "fairy-tale"
|
546
|
+
elif any(keyword in genre_value for keyword in ['wissen', 'knowledge', 'learn']):
|
547
|
+
return "knowledge"
|
548
|
+
elif any(keyword in genre_value for keyword in ['schlaf', 'sleep', 'meditation']):
|
549
|
+
return "sleep"
|
550
|
+
|
551
|
+
return "audio-play"
|
552
|
+
|
553
|
+
def find_entry_by_hash(self, taf_hash: str) -> tuple[Optional[Dict[str, Any]], Optional[int]]:
|
554
|
+
"""
|
555
|
+
Find an entry in the custom JSON by TAF hash.
|
556
|
+
|
557
|
+
Args:
|
558
|
+
taf_hash (str): SHA1 hash of the TAF file to find
|
559
|
+
|
560
|
+
Returns:
|
561
|
+
tuple[dict | None, int | None]: Tuple of (entry, entry_index) if found, or (None, None) if not found
|
562
|
+
"""
|
563
|
+
logger.trace("Searching for entry with hash %s", taf_hash)
|
564
|
+
|
565
|
+
for entry_idx, entry in enumerate(self.custom_json):
|
566
|
+
if 'hash' not in entry:
|
567
|
+
continue
|
568
|
+
|
569
|
+
for hash_value in entry['hash']:
|
570
|
+
if hash_value == taf_hash:
|
571
|
+
logger.debug("Found existing entry with matching hash %s", taf_hash)
|
572
|
+
return entry, entry_idx
|
573
|
+
|
574
|
+
logger.debug("No entry found with hash %s", taf_hash)
|
575
|
+
return None, None
|
576
|
+
|
577
|
+
def find_entry_by_series_episodes(self, series: str, episodes: str) -> tuple[Optional[Dict[str, Any]], Optional[int]]:
|
578
|
+
"""
|
579
|
+
Find an entry in the custom JSON by series and episodes.
|
580
|
+
|
581
|
+
Args:
|
582
|
+
series (str): Series name to find
|
583
|
+
episodes (str): Episodes name to find
|
584
|
+
|
585
|
+
Returns:
|
586
|
+
tuple[dict | None, int | None]: Tuple of (entry, entry_index) if found, or (None, None) if not found
|
587
|
+
"""
|
588
|
+
logger.trace("Searching for entry with series='%s', episodes='%s'", series, episodes)
|
589
|
+
|
590
|
+
for entry_idx, entry in enumerate(self.custom_json):
|
591
|
+
if entry.get('series') == series and entry.get('episodes') == episodes:
|
592
|
+
logger.debug("Found existing entry with matching series/episodes: %s / %s", series, episodes)
|
593
|
+
return entry, entry_idx
|
594
|
+
|
595
|
+
logger.debug("No entry found with series/episodes: %s / %s", series, episodes)
|
596
|
+
return None, None
|
597
|
+
|
598
|
+
def _extract_metadata_from_files(self, input_files: List[str]) -> Dict[str, Any]:
|
599
|
+
"""
|
600
|
+
Extract metadata from audio files to use in the custom JSON entry.
|
601
|
+
|
602
|
+
Args:
|
603
|
+
input_files (list[str]): List of paths to audio files
|
604
|
+
|
605
|
+
Returns:
|
606
|
+
dict: Dictionary containing metadata extracted from files
|
607
|
+
"""
|
608
|
+
metadata = {}
|
609
|
+
track_descriptions = []
|
610
|
+
for file_path in input_files:
|
611
|
+
tags = get_file_tags(file_path)
|
612
|
+
if 'title' in tags:
|
613
|
+
track_descriptions.append(tags['title'])
|
614
|
+
else:
|
615
|
+
filename = os.path.splitext(os.path.basename(file_path))[0]
|
616
|
+
track_descriptions.append(filename)
|
617
|
+
for tag_name, tag_value in tags.items():
|
618
|
+
if tag_name not in metadata:
|
619
|
+
metadata[tag_name] = tag_value
|
620
|
+
|
621
|
+
metadata['track_descriptions'] = track_descriptions
|
622
|
+
|
623
|
+
return metadata
|
624
|
+
|
625
|
+
def _determine_language(self, metadata: Dict[str, Any]) -> str:
|
626
|
+
if 'language' in metadata:
|
627
|
+
lang_value = metadata['language'].lower().strip()
|
628
|
+
if lang_value in LANGUAGE_MAPPING:
|
629
|
+
return LANGUAGE_MAPPING[lang_value]
|
630
|
+
try:
|
631
|
+
system_lang, _ = locale.getdefaultlocale()
|
632
|
+
if system_lang:
|
633
|
+
lang_code = system_lang.split('_')[0].lower()
|
634
|
+
if lang_code in LANGUAGE_MAPPING:
|
635
|
+
return LANGUAGE_MAPPING[lang_code]
|
636
|
+
except Exception:
|
637
|
+
pass
|
638
|
+
return 'de-de'
|
639
|
+
|
640
|
+
def _convert_v2_to_v1(self, v2_data: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
|
641
|
+
"""
|
642
|
+
Convert data from v2 format to v1 format.
|
643
|
+
|
644
|
+
Args:
|
645
|
+
v2_data (list[dict]): Data in v2 format
|
646
|
+
|
647
|
+
Returns:
|
648
|
+
list[dict]: Converted data in v1 format
|
649
|
+
"""
|
650
|
+
v1_data = []
|
651
|
+
|
652
|
+
entry_no = 0
|
653
|
+
for v2_entry in v2_data:
|
654
|
+
if 'data' not in v2_entry:
|
655
|
+
continue
|
656
|
+
|
657
|
+
for v2_data_item in v2_entry['data']:
|
658
|
+
series = v2_data_item.get('series', '')
|
659
|
+
episodes = v2_data_item.get('episode', '')
|
660
|
+
model = v2_data_item.get('article', '')
|
661
|
+
title = f"{series} - {episodes}" if series and episodes else episodes
|
662
|
+
|
663
|
+
v1_entry = {
|
664
|
+
"no": str(entry_no),
|
665
|
+
"model": model,
|
666
|
+
"audio_id": [],
|
667
|
+
"hash": [],
|
668
|
+
"title": title,
|
669
|
+
"series": series,
|
670
|
+
"episodes": episodes,
|
671
|
+
"tracks": v2_data_item.get('track-desc', []),
|
672
|
+
"release": str(v2_data_item.get('release', int(time.time()))),
|
673
|
+
"language": v2_data_item.get('language', 'de-de'),
|
674
|
+
"category": self._convert_category_v2_to_v1(v2_data_item.get('category', '')),
|
675
|
+
"pic": v2_data_item.get('image', '')
|
676
|
+
}
|
677
|
+
if 'ids' in v2_data_item:
|
678
|
+
for id_entry in v2_data_item['ids']:
|
679
|
+
if 'audio-id' in id_entry:
|
680
|
+
v1_entry['audio_id'].append(str(id_entry['audio-id']))
|
681
|
+
if 'hash' in id_entry:
|
682
|
+
v1_entry['hash'].append(id_entry['hash'].upper())
|
683
|
+
|
684
|
+
v1_data.append(v1_entry)
|
685
|
+
entry_no += 1
|
686
|
+
|
687
|
+
return v1_data
|
688
|
+
|
689
|
+
def _convert_category_v2_to_v1(self, v2_category: str) -> str:
|
690
|
+
"""
|
691
|
+
Convert category from v2 format to v1 format.
|
692
|
+
|
693
|
+
Args:
|
694
|
+
v2_category (str): Category in v2 format
|
695
|
+
|
696
|
+
Returns:
|
697
|
+
str: Category in v1 format
|
698
|
+
"""
|
699
|
+
v2_to_v1_mapping = {
|
700
|
+
"music": "music",
|
701
|
+
"Hörspiele & Hörbücher": "audio-play",
|
702
|
+
"Schlaflieder & Entspannung": "sleep",
|
703
|
+
"Wissen & Hörmagazine": "knowledge",
|
704
|
+
"Märchen": "fairy-tale"
|
705
|
+
}
|
706
|
+
|
707
|
+
return v2_to_v1_mapping.get(v2_category, "audio-play")
|
708
|
+
|
709
|
+
class ToniesJsonHandlerv2:
|
25
710
|
"""Handler for tonies.custom.json operations."""
|
26
711
|
|
27
712
|
def __init__(self, client: TeddyCloudClient = None):
|
@@ -29,7 +714,7 @@ class ToniesJsonHandler:
|
|
29
714
|
Initialize the handler.
|
30
715
|
|
31
716
|
Args:
|
32
|
-
client: TeddyCloudClient instance to use for API communication
|
717
|
+
client (TeddyCloudClient | None): TeddyCloudClient instance to use for API communication
|
33
718
|
"""
|
34
719
|
self.client = client
|
35
720
|
self.custom_json = []
|
@@ -40,7 +725,7 @@ class ToniesJsonHandler:
|
|
40
725
|
Load tonies.custom.json from the TeddyCloud server.
|
41
726
|
|
42
727
|
Returns:
|
43
|
-
True if successful, False otherwise
|
728
|
+
bool: True if successful, False otherwise
|
44
729
|
"""
|
45
730
|
if self.client is None:
|
46
731
|
logger.error("Cannot load from server: no client provided")
|
@@ -66,10 +751,10 @@ class ToniesJsonHandler:
|
|
66
751
|
Load tonies.custom.json from a local file.
|
67
752
|
|
68
753
|
Args:
|
69
|
-
file_path: Path to the tonies.custom.json file
|
754
|
+
file_path (str): Path to the tonies.custom.json file
|
70
755
|
|
71
756
|
Returns:
|
72
|
-
True if successful, False otherwise
|
757
|
+
bool: True if successful, False otherwise
|
73
758
|
"""
|
74
759
|
try:
|
75
760
|
if os.path.exists(file_path):
|
@@ -99,10 +784,10 @@ class ToniesJsonHandler:
|
|
99
784
|
Save tonies.custom.json to a local file.
|
100
785
|
|
101
786
|
Args:
|
102
|
-
file_path: Path where to save the tonies.custom.json file
|
787
|
+
file_path (str): Path where to save the tonies.custom.json file
|
103
788
|
|
104
789
|
Returns:
|
105
|
-
True if successful, False otherwise
|
790
|
+
bool: True if successful, False otherwise
|
106
791
|
"""
|
107
792
|
if not self.is_loaded:
|
108
793
|
logger.error("Cannot save tonies.custom.json: data not loaded")
|
@@ -128,12 +813,12 @@ class ToniesJsonHandler:
|
|
128
813
|
If an entry with the same series+episode exists, the new hash will be added to it.
|
129
814
|
|
130
815
|
Args:
|
131
|
-
taf_file: Path to the TAF file
|
132
|
-
input_files: List of input audio files used to create the TAF
|
133
|
-
artwork_url: URL of the uploaded artwork (if any)
|
816
|
+
taf_file (str): Path to the TAF file
|
817
|
+
input_files (list[str]): List of input audio files used to create the TAF
|
818
|
+
artwork_url (str | None): URL of the uploaded artwork (if any)
|
134
819
|
|
135
820
|
Returns:
|
136
|
-
True if successful, False otherwise
|
821
|
+
bool: True if successful, False otherwise
|
137
822
|
"""
|
138
823
|
logger.trace("Entering add_entry_from_taf() with taf_file=%s, input_files=%s, artwork_url=%s",
|
139
824
|
taf_file, input_files, artwork_url)
|
@@ -234,7 +919,7 @@ class ToniesJsonHandler:
|
|
234
919
|
Generate a unique article ID for a new entry.
|
235
920
|
|
236
921
|
Returns:
|
237
|
-
Unique article ID in the format "tt-42" followed by sequential number starting from 0
|
922
|
+
str: Unique article ID in the format "tt-42" followed by sequential number starting from 0
|
238
923
|
"""
|
239
924
|
logger.trace("Entering _generate_article_id()")
|
240
925
|
highest_num = -1
|
@@ -267,26 +952,26 @@ class ToniesJsonHandler:
|
|
267
952
|
Extract metadata from audio files to use in the custom JSON entry.
|
268
953
|
|
269
954
|
Args:
|
270
|
-
input_files: List of paths to audio files
|
955
|
+
input_files (list[str]): List of paths to audio files
|
271
956
|
|
272
957
|
Returns:
|
273
|
-
Dictionary containing metadata extracted from files
|
958
|
+
dict: Dictionary containing metadata extracted from files
|
274
959
|
"""
|
275
960
|
metadata = {}
|
276
961
|
track_descriptions = []
|
277
962
|
for file_path in input_files:
|
278
963
|
tags = get_file_tags(file_path)
|
964
|
+
# Extract track descriptions
|
279
965
|
if 'title' in tags:
|
280
966
|
track_descriptions.append(tags['title'])
|
281
967
|
else:
|
282
968
|
filename = os.path.splitext(os.path.basename(file_path))[0]
|
283
969
|
track_descriptions.append(filename)
|
284
|
-
|
285
|
-
if 'language' not in metadata and 'language' in tags:
|
286
|
-
metadata['language'] = tags['language']
|
287
970
|
|
288
|
-
|
289
|
-
|
971
|
+
# Copy all available tags, but don't overwrite existing ones
|
972
|
+
for tag_name, tag_value in tags.items():
|
973
|
+
if tag_name not in metadata:
|
974
|
+
metadata[tag_name] = tag_value
|
290
975
|
|
291
976
|
metadata['track_descriptions'] = track_descriptions
|
292
977
|
|
@@ -362,10 +1047,10 @@ class ToniesJsonHandler:
|
|
362
1047
|
Find an entry in the custom JSON by TAF hash.
|
363
1048
|
|
364
1049
|
Args:
|
365
|
-
taf_hash: SHA1 hash of the TAF file to find
|
1050
|
+
taf_hash (str): SHA1 hash of the TAF file to find
|
366
1051
|
|
367
1052
|
Returns:
|
368
|
-
Tuple of (entry, entry_index, data_index) if found, or (None, None, None) if not found
|
1053
|
+
tuple[dict | None, int | None, int | None]: Tuple of (entry, entry_index, data_index) if found, or (None, None, None) if not found
|
369
1054
|
"""
|
370
1055
|
logger.trace("Searching for entry with hash %s", taf_hash)
|
371
1056
|
|
@@ -390,11 +1075,11 @@ class ToniesJsonHandler:
|
|
390
1075
|
Find an entry in the custom JSON by series and episode.
|
391
1076
|
|
392
1077
|
Args:
|
393
|
-
series: Series name to find
|
394
|
-
episode: Episode name to find
|
1078
|
+
series (str): Series name to find
|
1079
|
+
episode (str): Episode name to find
|
395
1080
|
|
396
1081
|
Returns:
|
397
|
-
Tuple of (entry, entry_index, data_index) if found, or (None, None, None) if not found
|
1082
|
+
tuple[dict | None, int | None, int | None]: Tuple of (entry, entry_index, data_index) if found, or (None, None, None) if not found
|
398
1083
|
"""
|
399
1084
|
logger.trace("Searching for entry with series='%s', episode='%s'", series, episode)
|
400
1085
|
|
@@ -415,10 +1100,10 @@ class ToniesJsonHandler:
|
|
415
1100
|
Calculate the total runtime in minutes from a list of audio files.
|
416
1101
|
|
417
1102
|
Args:
|
418
|
-
input_files: List of paths to audio files
|
1103
|
+
input_files (list[str]): List of paths to audio files
|
419
1104
|
|
420
1105
|
Returns:
|
421
|
-
Total runtime in minutes (rounded to the nearest minute)
|
1106
|
+
int: Total runtime in minutes (rounded to the nearest minute)
|
422
1107
|
"""
|
423
1108
|
logger.trace("Entering _calculate_runtime() with %d input files", len(input_files))
|
424
1109
|
total_runtime_seconds = 0
|
@@ -463,7 +1148,6 @@ class ToniesJsonHandler:
|
|
463
1148
|
logger.warning("Error processing file %s: %s", file_path, e)
|
464
1149
|
logger.trace("Exception details for %s: %s", file_path, str(e), exc_info=True)
|
465
1150
|
|
466
|
-
# Convert seconds to minutes, rounding to nearest minute
|
467
1151
|
total_runtime_minutes = round(total_runtime_seconds / 60)
|
468
1152
|
|
469
1153
|
logger.info("Calculated total runtime: %d seconds (%d minutes) from %d/%d files",
|
@@ -480,25 +1164,134 @@ class ToniesJsonHandler:
|
|
480
1164
|
logger.trace("Exiting _calculate_runtime() with total runtime=%d minutes", total_runtime_minutes)
|
481
1165
|
return total_runtime_minutes
|
482
1166
|
|
483
|
-
def
|
1167
|
+
def fetch_and_update_tonies_json_v1(client: TeddyCloudClient, taf_file: Optional[str] = None, input_files: Optional[List[str]] = None,
|
1168
|
+
artwork_url: Optional[str] = None, output_dir: Optional[str] = None) -> bool:
|
1169
|
+
"""
|
1170
|
+
Fetch tonies.custom.json from server and merge with local file if it exists, then update with new entry in v1 format.
|
1171
|
+
|
1172
|
+
Args:
|
1173
|
+
client (TeddyCloudClient): TeddyCloudClient instance to use for API communication
|
1174
|
+
taf_file (str | None): Path to the TAF file to add
|
1175
|
+
input_files (list[str] | None): List of input audio files used to create the TAF
|
1176
|
+
artwork_url (str | None): URL of the uploaded artwork (if any)
|
1177
|
+
output_dir (str | None): Directory where to save the tonies.custom.json file (defaults to './output')
|
1178
|
+
|
1179
|
+
Returns:
|
1180
|
+
bool: True if successful, False otherwise
|
1181
|
+
"""
|
1182
|
+
logger.trace("Entering fetch_and_update_tonies_json_v1 with client=%s, taf_file=%s, input_files=%s, artwork_url=%s, output_dir=%s",
|
1183
|
+
client, taf_file, input_files, artwork_url, output_dir)
|
1184
|
+
|
1185
|
+
handler = ToniesJsonHandlerv1(client)
|
1186
|
+
if not output_dir:
|
1187
|
+
output_dir = './output'
|
1188
|
+
logger.debug("No output directory specified, using default: %s", output_dir)
|
1189
|
+
|
1190
|
+
os.makedirs(output_dir, exist_ok=True)
|
1191
|
+
logger.debug("Ensuring output directory exists: %s", output_dir)
|
1192
|
+
|
1193
|
+
json_file_path = os.path.join(output_dir, 'tonies.custom.json')
|
1194
|
+
logger.debug("JSON file path: %s", json_file_path)
|
1195
|
+
|
1196
|
+
loaded_from_server = False
|
1197
|
+
if client:
|
1198
|
+
logger.info("Attempting to load tonies.custom.json from server")
|
1199
|
+
loaded_from_server = handler.load_from_server()
|
1200
|
+
logger.debug("Load from server result: %s", "success" if loaded_from_server else "failed")
|
1201
|
+
else:
|
1202
|
+
logger.debug("No client provided, skipping server load")
|
1203
|
+
|
1204
|
+
if os.path.exists(json_file_path):
|
1205
|
+
logger.info("Local tonies.custom.json file found, merging with server content")
|
1206
|
+
logger.debug("Local file exists at %s, size: %d bytes", json_file_path, os.path.getsize(json_file_path))
|
1207
|
+
|
1208
|
+
local_handler = ToniesJsonHandlerv1()
|
1209
|
+
if local_handler.load_from_file(json_file_path):
|
1210
|
+
logger.debug("Successfully loaded local file with %d entries", len(local_handler.custom_json))
|
1211
|
+
|
1212
|
+
if loaded_from_server:
|
1213
|
+
logger.debug("Merging local entries with server entries")
|
1214
|
+
server_hashes = set()
|
1215
|
+
for entry in handler.custom_json:
|
1216
|
+
if 'hash' in entry:
|
1217
|
+
for hash_value in entry['hash']:
|
1218
|
+
server_hashes.add(hash_value)
|
1219
|
+
|
1220
|
+
logger.debug("Found %d unique hash values from server", len(server_hashes))
|
1221
|
+
|
1222
|
+
added_count = 0
|
1223
|
+
for local_entry in local_handler.custom_json:
|
1224
|
+
if 'hash' in local_entry:
|
1225
|
+
has_unique_hash = False
|
1226
|
+
for hash_value in local_entry['hash']:
|
1227
|
+
if hash_value not in server_hashes:
|
1228
|
+
has_unique_hash = True
|
1229
|
+
break
|
1230
|
+
|
1231
|
+
if has_unique_hash:
|
1232
|
+
logger.trace("Adding local-only entry to merged content")
|
1233
|
+
handler.custom_json.append(local_entry)
|
1234
|
+
added_count += 1
|
1235
|
+
|
1236
|
+
logger.debug("Added %d local-only entries to merged content", added_count)
|
1237
|
+
else:
|
1238
|
+
logger.debug("Using only local entries (server load failed or no client)")
|
1239
|
+
handler.custom_json = local_handler.custom_json
|
1240
|
+
handler.is_loaded = True
|
1241
|
+
logger.info("Using local tonies.custom.json content")
|
1242
|
+
elif not loaded_from_server:
|
1243
|
+
logger.debug("No local file found and server load failed, starting with empty list")
|
1244
|
+
handler.custom_json = []
|
1245
|
+
handler.is_loaded = True
|
1246
|
+
logger.info("No tonies.custom.json found, starting with empty list")
|
1247
|
+
|
1248
|
+
if taf_file and input_files and handler.is_loaded:
|
1249
|
+
logger.debug("Adding new entry for TAF file: %s", taf_file)
|
1250
|
+
logger.debug("Using %d input files for metadata extraction", len(input_files))
|
1251
|
+
|
1252
|
+
if not handler.add_entry_from_taf(taf_file, input_files, artwork_url):
|
1253
|
+
logger.error("Failed to add entry to tonies.custom.json")
|
1254
|
+
logger.trace("Exiting fetch_and_update_tonies_json_v1 with success=False (failed to add entry)")
|
1255
|
+
return False
|
1256
|
+
|
1257
|
+
logger.debug("Successfully added new entry for %s", taf_file)
|
1258
|
+
else:
|
1259
|
+
if not taf_file:
|
1260
|
+
logger.debug("No TAF file provided, skipping add entry step")
|
1261
|
+
elif not input_files:
|
1262
|
+
logger.debug("No input files provided, skipping add entry step")
|
1263
|
+
elif not handler.is_loaded:
|
1264
|
+
logger.debug("Handler not properly loaded, skipping add entry step")
|
1265
|
+
|
1266
|
+
logger.debug("Saving updated tonies.custom.json to %s", json_file_path)
|
1267
|
+
if not handler.save_to_file(json_file_path):
|
1268
|
+
logger.error("Failed to save tonies.custom.json to file")
|
1269
|
+
logger.trace("Exiting fetch_and_update_tonies_json_v1 with success=False (failed to save file)")
|
1270
|
+
return False
|
1271
|
+
|
1272
|
+
logger.debug("Successfully saved tonies.custom.json with %d entries", len(handler.custom_json))
|
1273
|
+
logger.trace("Exiting fetch_and_update_tonies_json_v1 with success=True")
|
1274
|
+
return True
|
1275
|
+
|
1276
|
+
def fetch_and_update_tonies_json_v2(client: TeddyCloudClient, taf_file: Optional[str] = None, input_files: Optional[List[str]] = None,
|
484
1277
|
artwork_url: Optional[str] = None, output_dir: Optional[str] = None) -> bool:
|
485
1278
|
"""
|
486
1279
|
Fetch tonies.custom.json from server and merge with local file if it exists, then update with new entry.
|
487
1280
|
|
488
1281
|
Args:
|
489
|
-
client: TeddyCloudClient instance to use for API communication
|
490
|
-
taf_file: Path to the TAF file to add
|
491
|
-
input_files: List of input audio files used to create the TAF
|
492
|
-
artwork_url: URL of the uploaded artwork (if any)
|
493
|
-
output_dir: Directory where to save the tonies.custom.json file (defaults to './output')
|
1282
|
+
client (TeddyCloudClient): TeddyCloudClient instance to use for API communication
|
1283
|
+
taf_file (str | None): Path to the TAF file to add
|
1284
|
+
input_files (list[str] | None): List of input audio files used to create the TAF
|
1285
|
+
artwork_url (str | None): URL of the uploaded artwork (if any)
|
1286
|
+
output_dir (str | None): Directory where to save the tonies.custom.json file (defaults to './output')
|
494
1287
|
|
495
1288
|
Returns:
|
496
|
-
True if successful, False otherwise
|
1289
|
+
bool: True if successful, False otherwise
|
497
1290
|
"""
|
498
1291
|
logger.trace("Entering fetch_and_update_tonies_json with client=%s, taf_file=%s, input_files=%s, artwork_url=%s, output_dir=%s",
|
499
1292
|
client, taf_file, input_files, artwork_url, output_dir)
|
500
1293
|
|
501
|
-
handler =
|
1294
|
+
handler = ToniesJsonHandlerv2(client)
|
502
1295
|
if not output_dir:
|
503
1296
|
output_dir = './output'
|
504
1297
|
logger.debug("No output directory specified, using default: %s", output_dir)
|
@@ -521,7 +1314,7 @@ def fetch_and_update_tonies_json(client: TeddyCloudClient, taf_file: Optional[st
|
|
521
1314
|
logger.info("Local tonies.custom.json file found, merging with server content")
|
522
1315
|
logger.debug("Local file exists at %s, size: %d bytes", json_file_path, os.path.getsize(json_file_path))
|
523
1316
|
|
524
|
-
local_handler =
|
1317
|
+
local_handler = ToniesJsonHandlerv2()
|
525
1318
|
if local_handler.load_from_file(json_file_path):
|
526
1319
|
logger.debug("Successfully loaded local file with %d entries", len(local_handler.custom_json))
|
527
1320
|
|