TonieToolbox 0.2.3__py3-none-any.whl → 0.4.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,679 @@
1
+ #!/usr/bin/python3
2
+ """
3
+ TeddyCloud API client for TonieToolbox.
4
+ Handles uploading .taf files to a TeddyCloud instance and interacting with the TeddyCloud API.
5
+ """
6
+
7
+ import os
8
+ import sys
9
+ import json
10
+ import logging
11
+ import urllib.parse
12
+ import urllib.request
13
+ import urllib.error
14
+ import base64
15
+ import mimetypes
16
+ import ssl
17
+ import time
18
+ import socket
19
+ import glob
20
+ from typing import Optional, Dict, Any, Tuple, List
21
+
22
+ from .logger import get_logger
23
+
24
+ logger = get_logger('teddycloud')
25
+
26
+ # Default timeout settings (in seconds)
27
+ DEFAULT_CONNECTION_TIMEOUT = 10
28
+ DEFAULT_READ_TIMEOUT = 300 # 5 minutes
29
+ DEFAULT_MAX_RETRIES = 3
30
+ DEFAULT_RETRY_DELAY = 5 # seconds
31
+
32
+ # Add function to get file paths for any file type (not just audio)
33
+ def get_file_paths(input_pattern):
34
+ """
35
+ Get file paths based on a pattern, without filtering by file type.
36
+ This is different from audio_conversion.get_input_files as it doesn't filter for audio files.
37
+
38
+ Args:
39
+ input_pattern: Input file pattern or direct file path
40
+
41
+ Returns:
42
+ list: List of file paths
43
+ """
44
+ logger.debug("Getting file paths for pattern: %s", input_pattern)
45
+
46
+ if input_pattern.endswith(".lst"):
47
+ logger.debug("Processing list file: %s", input_pattern)
48
+ list_dir = os.path.dirname(os.path.abspath(input_pattern))
49
+ file_paths = []
50
+
51
+ with open(input_pattern, 'r', encoding='utf-8') as file_list:
52
+ for line_num, line in enumerate(file_list, 1):
53
+ fname = line.strip()
54
+ if not fname or fname.startswith('#'): # Skip empty lines and comments
55
+ continue
56
+
57
+ # Remove any quote characters from path
58
+ fname = fname.strip('"\'')
59
+
60
+ # Check if the path is absolute or has a drive letter (Windows)
61
+ if os.path.isabs(fname) or (len(fname) > 1 and fname[1] == ':'):
62
+ full_path = fname # Use as is if it's an absolute path
63
+ logger.trace("Using absolute path from list: %s", full_path)
64
+ else:
65
+ full_path = os.path.join(list_dir, fname)
66
+ logger.trace("Using relative path from list: %s", full_path)
67
+
68
+ # Handle directory paths by finding all files in the directory
69
+ if os.path.isdir(full_path):
70
+ logger.debug("Path is a directory, finding files in: %s", full_path)
71
+ dir_glob = os.path.join(full_path, "*")
72
+ dir_files = sorted(glob.glob(dir_glob))
73
+ if dir_files:
74
+ file_paths.extend([f for f in dir_files if os.path.isfile(f)])
75
+ logger.debug("Found %d files in directory", len(dir_files))
76
+ else:
77
+ logger.warning("No files found in directory at line %d: %s", line_num, full_path)
78
+ elif os.path.isfile(full_path):
79
+ file_paths.append(full_path)
80
+ else:
81
+ logger.warning("File not found at line %d: %s", line_num, full_path)
82
+
83
+ logger.debug("Found %d files in list file", len(file_paths))
84
+ return file_paths
85
+ else:
86
+ # Process as glob pattern
87
+ logger.debug("Processing glob pattern: %s", input_pattern)
88
+ file_paths = sorted([f for f in glob.glob(input_pattern) if os.path.isfile(f)])
89
+
90
+ if not file_paths:
91
+ # Try with explicit directory if the pattern didn't work
92
+ # This is helpful for Windows paths with backslashes
93
+ dir_name = os.path.dirname(input_pattern)
94
+ file_name = os.path.basename(input_pattern)
95
+ if dir_name:
96
+ alt_pattern = os.path.join(dir_name, file_name)
97
+ file_paths = sorted([f for f in glob.glob(alt_pattern) if os.path.isfile(f)])
98
+
99
+ # If still no files, try with the literal path (no glob interpretation)
100
+ if not file_paths and os.path.isfile(input_pattern):
101
+ file_paths = [input_pattern]
102
+
103
+ logger.debug("Found %d files matching pattern", len(file_paths))
104
+ return file_paths
105
+
106
+ class ProgressTracker:
107
+ """Helper class to track and display upload progress."""
108
+
109
+ def __init__(self, total_size, file_name):
110
+ """
111
+ Initialize progress tracker.
112
+
113
+ Args:
114
+ total_size: Total size of the file in bytes
115
+ file_name: Name of the file being uploaded
116
+ """
117
+ self.total_size = total_size
118
+ self.file_name = file_name
119
+ self.uploaded = 0
120
+ self.start_time = time.time()
121
+ self.last_update = 0
122
+ self.last_percent = 0
123
+
124
+ def update(self, chunk_size):
125
+ """
126
+ Update progress by the given chunk size.
127
+
128
+ Args:
129
+ chunk_size: Size of the chunk that was uploaded
130
+ """
131
+ self.uploaded += chunk_size
132
+ current_time = time.time()
133
+
134
+ # Limit updates to max 10 per second to avoid flooding console
135
+ if current_time - self.last_update >= 0.1:
136
+ percent = min(100, int((self.uploaded / self.total_size) * 100))
137
+
138
+ # Only update if percentage changed or it's been more than a second
139
+ if percent != self.last_percent or current_time - self.last_update >= 1:
140
+ self.print_progress(percent)
141
+ self.last_update = current_time
142
+ self.last_percent = percent
143
+
144
+ def print_progress(self, percent):
145
+ """
146
+ Print progress bar.
147
+
148
+ Args:
149
+ percent: Current percentage of upload completed
150
+ """
151
+ bar_length = 30
152
+ filled_length = int(bar_length * percent // 100)
153
+ bar = '█' * filled_length + '-' * (bar_length - filled_length)
154
+
155
+ # Calculate speed
156
+ elapsed_time = max(0.1, time.time() - self.start_time)
157
+ speed = self.uploaded / elapsed_time / 1024 # KB/s
158
+
159
+ # Format speed based on magnitude
160
+ if speed >= 1024:
161
+ speed_str = f"{speed/1024:.2f} MB/s"
162
+ else:
163
+ speed_str = f"{speed:.2f} KB/s"
164
+
165
+ # Format size
166
+ if self.total_size >= 1048576: # 1 MB
167
+ size_str = f"{self.uploaded/1048576:.2f}/{self.total_size/1048576:.2f} MB"
168
+ else:
169
+ size_str = f"{self.uploaded/1024:.2f}/{self.total_size/1024:.2f} KB"
170
+
171
+ # Calculate remaining time
172
+ if percent > 0:
173
+ remaining = (self.total_size - self.uploaded) / (self.uploaded / elapsed_time)
174
+ if remaining > 60:
175
+ remaining_str = f"{int(remaining/60)}m {int(remaining%60)}s"
176
+ else:
177
+ remaining_str = f"{int(remaining)}s"
178
+ else:
179
+ remaining_str = "calculating..."
180
+
181
+ # Print progress bar
182
+ sys.stdout.write(f"\r{self.file_name}: |{bar}| {percent}% {size_str} {speed_str} ETA: {remaining_str}")
183
+ sys.stdout.flush()
184
+
185
+ if percent >= 100:
186
+ sys.stdout.write("\n")
187
+ sys.stdout.flush()
188
+
189
+ class ProgressTrackerHandler(urllib.request.HTTPHandler):
190
+ """Custom HTTP handler to track upload progress."""
191
+
192
+ def __init__(self, tracker=None):
193
+ """
194
+ Initialize handler.
195
+
196
+ Args:
197
+ tracker: ProgressTracker instance to use for tracking
198
+ """
199
+ super().__init__()
200
+ self.tracker = tracker
201
+
202
+ def http_request(self, req):
203
+ """
204
+ Hook into HTTP request to track upload progress.
205
+
206
+ Args:
207
+ req: HTTP request object
208
+
209
+ Returns:
210
+ Modified request object
211
+ """
212
+ if self.tracker and req.data:
213
+ req.add_unredirected_header('Content-Length', str(len(req.data)))
214
+ old_data = req.data
215
+
216
+ # Replace data with an iterator that tracks progress
217
+ def data_iterator():
218
+ chunk_size = 8192
219
+ total_sent = 0
220
+ data = old_data
221
+ while total_sent < len(data):
222
+ chunk = data[total_sent:total_sent + chunk_size]
223
+ total_sent += len(chunk)
224
+ self.tracker.update(len(chunk))
225
+ yield chunk
226
+
227
+ req.data = data_iterator()
228
+
229
+ return req
230
+
231
+ class TeddyCloudClient:
232
+ """Client for interacting with TeddyCloud API."""
233
+
234
+ def __init__(self, base_url: str, ignore_ssl_verify: bool = False,
235
+ connection_timeout: int = DEFAULT_CONNECTION_TIMEOUT,
236
+ read_timeout: int = DEFAULT_READ_TIMEOUT,
237
+ max_retries: int = DEFAULT_MAX_RETRIES,
238
+ retry_delay: int = DEFAULT_RETRY_DELAY):
239
+ """
240
+ Initialize the TeddyCloud client.
241
+
242
+ Args:
243
+ base_url: Base URL of the TeddyCloud instance (e.g., https://teddycloud.example.com)
244
+ ignore_ssl_verify: If True, SSL certificate verification will be disabled (useful for self-signed certificates)
245
+ connection_timeout: Timeout for establishing a connection
246
+ read_timeout: Timeout for reading data from the server
247
+ max_retries: Maximum number of retries for failed requests
248
+ retry_delay: Delay between retries
249
+ """
250
+ self.base_url = base_url.rstrip('/')
251
+ self.ignore_ssl_verify = ignore_ssl_verify
252
+ self.connection_timeout = connection_timeout
253
+ self.read_timeout = read_timeout
254
+ self.max_retries = max_retries
255
+ self.retry_delay = retry_delay
256
+
257
+ # Create SSL context if needed
258
+ self.ssl_context = None
259
+ if ignore_ssl_verify:
260
+ logger.warning("SSL certificate verification is disabled. This is insecure!")
261
+ self.ssl_context = ssl._create_unverified_context()
262
+
263
+ def _urlopen(self, req):
264
+ """Helper method to open URLs with optional SSL verification bypass and retry logic."""
265
+ for attempt in range(self.max_retries):
266
+ try:
267
+ if self.ignore_ssl_verify:
268
+ return urllib.request.urlopen(req, context=self.ssl_context, timeout=self.connection_timeout)
269
+ else:
270
+ return urllib.request.urlopen(req, timeout=self.connection_timeout)
271
+ except (urllib.error.URLError, socket.timeout) as e:
272
+ logger.warning("Request failed (attempt %d/%d): %s", attempt + 1, self.max_retries, e)
273
+ if attempt < self.max_retries - 1:
274
+ time.sleep(self.retry_delay)
275
+ else:
276
+ raise
277
+ except Exception as e:
278
+ logger.error("Unexpected error during request: %s", e)
279
+ raise
280
+
281
+ def get_tag_index(self) -> Optional[list]:
282
+ """
283
+ Get list of tags from TeddyCloud.
284
+
285
+ Returns:
286
+ List of tags or None if request failed
287
+ """
288
+ try:
289
+ url = f"{self.base_url}/api/getTagIndex"
290
+ headers = {'Content-Type': 'application/json'}
291
+
292
+ req = urllib.request.Request(url, headers=headers)
293
+
294
+ with self._urlopen(req) as response:
295
+ tags = json.loads(response.read().decode('utf-8'))
296
+ logger.debug("Found %d tags", len(tags))
297
+ return tags
298
+
299
+ except urllib.error.HTTPError as e:
300
+ logger.error("Failed to get tags: %s", e)
301
+ return None
302
+ except Exception as e:
303
+ logger.error("Error getting tags: %s", e)
304
+ return None
305
+
306
+ def upload_file(self, file_path: str, special_folder: str = None, path: str = None, show_progress: bool = True) -> bool:
307
+ """
308
+ Upload a .taf or image file to TeddyCloud.
309
+
310
+ Args:
311
+ file_path: Path to the file to upload (.taf, .jpg, .jpeg, .png)
312
+ special_folder: Special folder to upload to (currently only 'library' is supported)
313
+ path: Path where to write the file within the special folder
314
+ show_progress: Whether to show a progress bar during upload
315
+
316
+ Returns:
317
+ True if upload was successful, False otherwise
318
+ """
319
+ try:
320
+ if not os.path.exists(file_path):
321
+ logger.error("File does not exist: %s", file_path)
322
+ return False
323
+
324
+ # Check for supported file types
325
+ file_ext = os.path.splitext(file_path.lower())[1]
326
+ is_taf = file_ext == '.taf'
327
+ is_image = file_ext in ['.jpg', '.jpeg', '.png']
328
+
329
+ if not (is_taf or is_image):
330
+ logger.error("Unsupported file type %s: %s", file_ext, file_path)
331
+ return False
332
+
333
+ # Read file and prepare for upload
334
+ file_size = os.path.getsize(file_path)
335
+ logger.info("File size: %.2f MB", file_size / (1024 * 1024))
336
+
337
+ with open(file_path, 'rb') as f:
338
+ file_content = f.read()
339
+
340
+ filename = os.path.basename(file_path)
341
+
342
+ # Build multipart form data
343
+ boundary = '----WebKitFormBoundary7MA4YWxkTrZu0gW'
344
+ headers = {
345
+ 'Content-Type': f'multipart/form-data; boundary={boundary}',
346
+ 'User-Agent': 'TonieToolbox/1.0'
347
+ }
348
+
349
+ # Start request data with boundary
350
+ body = []
351
+ body.append(f'--{boundary}'.encode())
352
+
353
+ # Set appropriate content type based on file extension
354
+ content_type = 'application/octet-stream'
355
+ if is_image:
356
+ if file_ext == '.jpg' or file_ext == '.jpeg':
357
+ content_type = 'image/jpeg'
358
+ elif file_ext == '.png':
359
+ content_type = 'image/png'
360
+
361
+ body.append(f'Content-Disposition: form-data; name="file"; filename="{filename}"'.encode())
362
+ body.append(f'Content-Type: {content_type}'.encode())
363
+ body.append(b'')
364
+ body.append(file_content)
365
+ body.append(f'--{boundary}--'.encode())
366
+ body.append(b'')
367
+
368
+ # Join all parts with CRLF
369
+ body = b'\r\n'.join(body)
370
+
371
+ # Build the upload URL with query parameters
372
+ if special_folder or path:
373
+ query_params = []
374
+
375
+ if special_folder:
376
+ query_params.append(f"special={urllib.parse.quote(special_folder)}")
377
+
378
+ if path:
379
+ query_params.append(f"path={urllib.parse.quote(path)}")
380
+
381
+ query_string = "&".join(query_params)
382
+ upload_url = f"{self.base_url}/api/fileUpload?{query_string}"
383
+ logger.debug("Using endpoint with params: %s", upload_url)
384
+ else:
385
+ # Fallback to previous endpoint for backward compatibility
386
+ upload_url = f"{self.base_url}/api/v1/audio"
387
+ logger.debug("Using legacy endpoint: %s", upload_url)
388
+
389
+ # Setup progress tracking if requested
390
+ if show_progress:
391
+ tracker = ProgressTracker(total_size=len(body), file_name=filename)
392
+ handler = ProgressTrackerHandler(tracker=tracker)
393
+ opener = urllib.request.build_opener(handler)
394
+ urllib.request.install_opener(opener)
395
+
396
+ # Try upload with retries
397
+ for attempt in range(self.max_retries):
398
+ try:
399
+ # Create a fresh request for each attempt
400
+ req = urllib.request.Request(upload_url, data=body, headers=headers, method='POST')
401
+
402
+ # Set timeouts
403
+ socket.setdefaulttimeout(self.read_timeout)
404
+
405
+ with self._urlopen(req) as response:
406
+ result_text = response.read().decode('utf-8')
407
+
408
+ # Try to parse as JSON, but handle plain text responses too
409
+ try:
410
+ result = json.loads(result_text)
411
+ logger.info("Upload successful: %s", result.get('name', 'Unknown'))
412
+ except json.JSONDecodeError:
413
+ logger.info("Upload successful, response: %s", result_text)
414
+
415
+ return True
416
+
417
+ except urllib.error.HTTPError as e:
418
+ logger.error("HTTP error during upload (attempt %d/%d): %s",
419
+ attempt + 1, self.max_retries, e)
420
+
421
+ # Try to parse error response
422
+ try:
423
+ error_msg = json.loads(e.read().decode('utf-8'))
424
+ logger.error("Error details: %s", error_msg)
425
+ except:
426
+ pass
427
+
428
+ # Only retry on certain HTTP errors (e.g. 500, 502, 503, 504)
429
+ if e.code >= 500 and attempt < self.max_retries - 1:
430
+ time.sleep(self.retry_delay)
431
+ continue
432
+
433
+ return False
434
+
435
+ except (urllib.error.URLError, socket.timeout) as e:
436
+ # Network errors, timeout errors
437
+ logger.error("Network error during upload (attempt %d/%d): %s",
438
+ attempt + 1, self.max_retries, e)
439
+
440
+ if attempt < self.max_retries - 1:
441
+ time.sleep(self.retry_delay)
442
+ continue
443
+
444
+ return False
445
+
446
+ except Exception as e:
447
+ logger.error("Unexpected error during upload: %s", e)
448
+ return False
449
+
450
+ return False
451
+
452
+ except Exception as e:
453
+ logger.error("Error preparing file for upload: %s", e)
454
+ return False
455
+
456
+ def get_tonies_custom_json(self) -> Optional[list]:
457
+ """
458
+ Get tonies.custom.json from the TeddyCloud server.
459
+
460
+ Returns:
461
+ List of custom tonie entries or None if request failed
462
+ """
463
+ try:
464
+ url = f"{self.base_url}/api/toniesCustomJson"
465
+ logger.info("Loading tonies.custom.json from %s", url)
466
+
467
+ req = urllib.request.Request(url)
468
+
469
+ with self._urlopen(req) as response:
470
+ data = json.loads(response.read().decode('utf-8'))
471
+ if isinstance(data, list):
472
+ logger.info("Successfully loaded tonies.custom.json with %d entries", len(data))
473
+ return data
474
+ else:
475
+ logger.error("Invalid tonies.custom.json format, expected list")
476
+ return None
477
+
478
+ except urllib.error.HTTPError as e:
479
+ if e.code == 404:
480
+ logger.info("tonies.custom.json not found on server, starting with empty list")
481
+ return []
482
+ else:
483
+ logger.error("HTTP error loading tonies.custom.json: %s", e)
484
+ return None
485
+ except Exception as e:
486
+ logger.error("Error loading tonies.custom.json: %s", e)
487
+ return None
488
+
489
+ def put_tonies_custom_json(self, custom_json_data: List[Dict[str, Any]]) -> bool:
490
+ """
491
+ Save tonies.custom.json to the TeddyCloud server.
492
+
493
+ Args:
494
+ custom_json_data: List of custom tonie entries to save
495
+
496
+ Returns:
497
+ True if successful, False otherwise
498
+ """
499
+ try:
500
+ url = f"{self.base_url}/api/toniesCustomJson"
501
+ logger.info("Saving tonies.custom.json to %s", url)
502
+
503
+ data = json.dumps(custom_json_data, indent=2).encode('utf-8')
504
+ headers = {'Content-Type': 'application/json'}
505
+
506
+ req = urllib.request.Request(url, data=data, headers=headers, method='PUT')
507
+
508
+ with self._urlopen(req) as response:
509
+ result = response.read().decode('utf-8')
510
+ logger.info("Successfully saved tonies.custom.json to server")
511
+ return True
512
+
513
+ except Exception as e:
514
+ logger.error("Error saving tonies.custom.json to server: %s", e)
515
+ return False
516
+
517
+ def upload_to_teddycloud(file_path: str, teddycloud_url: str, ignore_ssl_verify: bool = False,
518
+ special_folder: str = None, path: str = None, show_progress: bool = True,
519
+ connection_timeout: int = DEFAULT_CONNECTION_TIMEOUT,
520
+ read_timeout: int = DEFAULT_READ_TIMEOUT,
521
+ max_retries: int = DEFAULT_MAX_RETRIES,
522
+ retry_delay: int = DEFAULT_RETRY_DELAY) -> bool:
523
+ """
524
+ Upload a .taf file to TeddyCloud.
525
+
526
+ Args:
527
+ file_path: Path to the .taf file to upload
528
+ teddycloud_url: URL of the TeddyCloud instance
529
+ ignore_ssl_verify: If True, SSL certificate verification will be disabled
530
+ special_folder: Special folder to upload to (currently only 'library' is supported)
531
+ path: Path where to write the file within the special folder
532
+ show_progress: Whether to show a progress bar during upload
533
+ connection_timeout: Timeout for establishing a connection in seconds
534
+ read_timeout: Timeout for reading data from the server in seconds
535
+ max_retries: Maximum number of retry attempts
536
+ retry_delay: Delay between retry attempts in seconds
537
+
538
+ Returns:
539
+ True if upload was successful, False otherwise
540
+ """
541
+ logger.info("Uploading %s to TeddyCloud %s", file_path, teddycloud_url)
542
+
543
+ if special_folder:
544
+ logger.info("Using special folder: %s", special_folder)
545
+
546
+ if path:
547
+ logger.info("Using custom path: %s", path)
548
+
549
+ if max_retries > 1:
550
+ logger.info("Will retry up to %d times with %d second delay if upload fails",
551
+ max_retries, retry_delay)
552
+
553
+ client = TeddyCloudClient(
554
+ teddycloud_url, ignore_ssl_verify,
555
+ connection_timeout=connection_timeout,
556
+ read_timeout=read_timeout,
557
+ max_retries=max_retries,
558
+ retry_delay=retry_delay
559
+ )
560
+
561
+ return client.upload_file(file_path, special_folder, path, show_progress)
562
+
563
+ def get_tags_from_teddycloud(teddycloud_url: str, ignore_ssl_verify: bool = False) -> bool:
564
+ """
565
+ Get and display tags from a TeddyCloud instance.
566
+
567
+ Args:
568
+ teddycloud_url: URL of the TeddyCloud instance
569
+ ignore_ssl_verify: If True, SSL certificate verification will be disabled
570
+
571
+ Returns:
572
+ True if tags were retrieved successfully, False otherwise
573
+ """
574
+ logger.info("Getting tags from TeddyCloud %s", teddycloud_url)
575
+
576
+ client = TeddyCloudClient(teddycloud_url, ignore_ssl_verify)
577
+ response = client.get_tag_index()
578
+
579
+ if not response:
580
+ logger.error("Failed to retrieve tags from TeddyCloud")
581
+ return False
582
+
583
+ # Handle the specific JSON structure returned by TeddyCloud API
584
+ if isinstance(response, dict) and 'tags' in response:
585
+ tags = response['tags']
586
+ logger.info("Successfully retrieved %d tags from TeddyCloud", len(tags))
587
+
588
+ print("\nAvailable Tags from TeddyCloud:")
589
+ print("-" * 60)
590
+
591
+ # Sort tags by type and then by uid for better organization
592
+ sorted_tags = sorted(tags, key=lambda x: (x.get('type', ''), x.get('uid', '')))
593
+
594
+ for tag in sorted_tags:
595
+ uid = tag.get('uid', 'Unknown UID')
596
+ tag_type = tag.get('type', 'Unknown')
597
+ valid = "✓" if tag.get('valid', False) else "✗"
598
+ series = tag.get('tonieInfo', {}).get('series', '')
599
+ episode = tag.get('tonieInfo', {}).get('episode', '')
600
+ source = tag.get('source', '')
601
+
602
+ # Format header with key information
603
+ print(f"UID: {uid} ({tag_type}) - Valid: {valid}")
604
+
605
+ # Show more detailed information
606
+ if series:
607
+ print(f"Series: {series}")
608
+ if episode:
609
+ print(f"Episode: {episode}")
610
+ if source:
611
+ print(f"Source: {source}")
612
+
613
+ # Show track information if available
614
+ tracks = tag.get('tonieInfo', {}).get('tracks', [])
615
+ if tracks:
616
+ print("Tracks:")
617
+ for i, track in enumerate(tracks, 1):
618
+ print(f" {i}. {track}")
619
+
620
+ # Show track duration information
621
+ track_seconds = tag.get('trackSeconds', [])
622
+ if track_seconds and len(track_seconds) > 1:
623
+ total_seconds = track_seconds[-1]
624
+ minutes = total_seconds // 60
625
+ seconds = total_seconds % 60
626
+ print(f"Duration: {minutes}:{seconds:02d} ({len(track_seconds)-1} tracks)")
627
+
628
+ print("-" * 60)
629
+ else:
630
+ # Fallback for unexpected formats
631
+ logger.info("Successfully retrieved tag data from TeddyCloud")
632
+ print("\nTag data from TeddyCloud:")
633
+ print("-" * 60)
634
+
635
+ # Pretty print JSON data
636
+ import json
637
+ print(json.dumps(response, indent=2))
638
+
639
+ print("-" * 60)
640
+
641
+ return True
642
+
643
+ def get_tonies_custom_json_from_server(teddycloud_url: str, ignore_ssl_verify: bool = False) -> Optional[list]:
644
+ """
645
+ Get tonies.custom.json from the TeddyCloud server.
646
+
647
+ Args:
648
+ teddycloud_url: URL of the TeddyCloud instance
649
+ ignore_ssl_verify: If True, SSL certificate verification will be disabled
650
+
651
+ Returns:
652
+ List of custom tonie entries or None if request failed
653
+ """
654
+ if not teddycloud_url:
655
+ logger.error("Cannot load from server: No TeddyCloud URL provided")
656
+ return None
657
+
658
+ client = TeddyCloudClient(teddycloud_url, ignore_ssl_verify)
659
+ return client.get_tonies_custom_json()
660
+
661
+ def put_tonies_custom_json_to_server(teddycloud_url: str, custom_json_data: List[Dict[str, Any]],
662
+ ignore_ssl_verify: bool = False) -> bool:
663
+ """
664
+ Save tonies.custom.json to the TeddyCloud server.
665
+
666
+ Args:
667
+ teddycloud_url: URL of the TeddyCloud instance
668
+ custom_json_data: List of custom tonie entries to save
669
+ ignore_ssl_verify: If True, SSL certificate verification will be disabled
670
+
671
+ Returns:
672
+ True if successful, False otherwise
673
+ """
674
+ if not teddycloud_url:
675
+ logger.error("Cannot save to server: No TeddyCloud URL provided")
676
+ return False
677
+
678
+ client = TeddyCloudClient(teddycloud_url, ignore_ssl_verify)
679
+ return client.put_tonies_custom_json(custom_json_data)