TonieToolbox 0.4.1__py3-none-any.whl → 0.5.0a1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -5,229 +5,17 @@ Handles uploading .taf files to a TeddyCloud instance and interacting with the T
5
5
  """
6
6
 
7
7
  import os
8
- import sys
9
- import json
10
- import logging
11
- import urllib.parse
12
- import urllib.request
13
- import urllib.error
14
8
  import base64
15
- import mimetypes
16
9
  import ssl
17
- import time
18
10
  import socket
19
- import glob
20
- from typing import Optional, Dict, Any, Tuple, List
21
-
11
+ import requests
22
12
  from .logger import get_logger
23
-
24
13
  logger = get_logger('teddycloud')
25
-
26
- # Default timeout settings (in seconds)
27
14
  DEFAULT_CONNECTION_TIMEOUT = 10
28
- DEFAULT_READ_TIMEOUT = 300 # 5 minutes
15
+ DEFAULT_READ_TIMEOUT = 15 # seconds
29
16
  DEFAULT_MAX_RETRIES = 3
30
17
  DEFAULT_RETRY_DELAY = 5 # seconds
31
18
 
32
- # Add function to get file paths for any file type (not just audio)
33
- def get_file_paths(input_pattern):
34
- """
35
- Get file paths based on a pattern, without filtering by file type.
36
- This is different from audio_conversion.get_input_files as it doesn't filter for audio files.
37
-
38
- Args:
39
- input_pattern: Input file pattern or direct file path
40
-
41
- Returns:
42
- list: List of file paths
43
- """
44
- logger.debug("Getting file paths for pattern: %s", input_pattern)
45
-
46
- if input_pattern.endswith(".lst"):
47
- logger.debug("Processing list file: %s", input_pattern)
48
- list_dir = os.path.dirname(os.path.abspath(input_pattern))
49
- file_paths = []
50
-
51
- with open(input_pattern, 'r', encoding='utf-8') as file_list:
52
- for line_num, line in enumerate(file_list, 1):
53
- fname = line.strip()
54
- if not fname or fname.startswith('#'): # Skip empty lines and comments
55
- continue
56
-
57
- # Remove any quote characters from path
58
- fname = fname.strip('"\'')
59
-
60
- # Check if the path is absolute or has a drive letter (Windows)
61
- if os.path.isabs(fname) or (len(fname) > 1 and fname[1] == ':'):
62
- full_path = fname # Use as is if it's an absolute path
63
- logger.trace("Using absolute path from list: %s", full_path)
64
- else:
65
- full_path = os.path.join(list_dir, fname)
66
- logger.trace("Using relative path from list: %s", full_path)
67
-
68
- # Handle directory paths by finding all files in the directory
69
- if os.path.isdir(full_path):
70
- logger.debug("Path is a directory, finding files in: %s", full_path)
71
- dir_glob = os.path.join(full_path, "*")
72
- dir_files = sorted(glob.glob(dir_glob))
73
- if dir_files:
74
- file_paths.extend([f for f in dir_files if os.path.isfile(f)])
75
- logger.debug("Found %d files in directory", len(dir_files))
76
- else:
77
- logger.warning("No files found in directory at line %d: %s", line_num, full_path)
78
- elif os.path.isfile(full_path):
79
- file_paths.append(full_path)
80
- else:
81
- logger.warning("File not found at line %d: %s", line_num, full_path)
82
-
83
- logger.debug("Found %d files in list file", len(file_paths))
84
- return file_paths
85
- else:
86
- # Process as glob pattern
87
- logger.debug("Processing glob pattern: %s", input_pattern)
88
- file_paths = sorted([f for f in glob.glob(input_pattern) if os.path.isfile(f)])
89
-
90
- if not file_paths:
91
- # Try with explicit directory if the pattern didn't work
92
- # This is helpful for Windows paths with backslashes
93
- dir_name = os.path.dirname(input_pattern)
94
- file_name = os.path.basename(input_pattern)
95
- if dir_name:
96
- alt_pattern = os.path.join(dir_name, file_name)
97
- file_paths = sorted([f for f in glob.glob(alt_pattern) if os.path.isfile(f)])
98
-
99
- # If still no files, try with the literal path (no glob interpretation)
100
- if not file_paths and os.path.isfile(input_pattern):
101
- file_paths = [input_pattern]
102
-
103
- logger.debug("Found %d files matching pattern", len(file_paths))
104
- return file_paths
105
-
106
- class ProgressTracker:
107
- """Helper class to track and display upload progress."""
108
-
109
- def __init__(self, total_size, file_name):
110
- """
111
- Initialize progress tracker.
112
-
113
- Args:
114
- total_size: Total size of the file in bytes
115
- file_name: Name of the file being uploaded
116
- """
117
- self.total_size = total_size
118
- self.file_name = file_name
119
- self.uploaded = 0
120
- self.start_time = time.time()
121
- self.last_update = 0
122
- self.last_percent = 0
123
-
124
- def update(self, chunk_size):
125
- """
126
- Update progress by the given chunk size.
127
-
128
- Args:
129
- chunk_size: Size of the chunk that was uploaded
130
- """
131
- self.uploaded += chunk_size
132
- current_time = time.time()
133
-
134
- # Limit updates to max 10 per second to avoid flooding console
135
- if current_time - self.last_update >= 0.1:
136
- percent = min(100, int((self.uploaded / self.total_size) * 100))
137
-
138
- # Only update if percentage changed or it's been more than a second
139
- if percent != self.last_percent or current_time - self.last_update >= 1:
140
- self.print_progress(percent)
141
- self.last_update = current_time
142
- self.last_percent = percent
143
-
144
- def print_progress(self, percent):
145
- """
146
- Print progress bar.
147
-
148
- Args:
149
- percent: Current percentage of upload completed
150
- """
151
- bar_length = 30
152
- filled_length = int(bar_length * percent // 100)
153
- bar = '█' * filled_length + '-' * (bar_length - filled_length)
154
-
155
- # Calculate speed
156
- elapsed_time = max(0.1, time.time() - self.start_time)
157
- speed = self.uploaded / elapsed_time / 1024 # KB/s
158
-
159
- # Format speed based on magnitude
160
- if speed >= 1024:
161
- speed_str = f"{speed/1024:.2f} MB/s"
162
- else:
163
- speed_str = f"{speed:.2f} KB/s"
164
-
165
- # Format size
166
- if self.total_size >= 1048576: # 1 MB
167
- size_str = f"{self.uploaded/1048576:.2f}/{self.total_size/1048576:.2f} MB"
168
- else:
169
- size_str = f"{self.uploaded/1024:.2f}/{self.total_size/1024:.2f} KB"
170
-
171
- # Calculate remaining time
172
- if percent > 0:
173
- remaining = (self.total_size - self.uploaded) / (self.uploaded / elapsed_time)
174
- if remaining > 60:
175
- remaining_str = f"{int(remaining/60)}m {int(remaining%60)}s"
176
- else:
177
- remaining_str = f"{int(remaining)}s"
178
- else:
179
- remaining_str = "calculating..."
180
-
181
- # Print progress bar
182
- sys.stdout.write(f"\r{self.file_name}: |{bar}| {percent}% {size_str} {speed_str} ETA: {remaining_str}")
183
- sys.stdout.flush()
184
-
185
- if percent >= 100:
186
- sys.stdout.write("\n")
187
- sys.stdout.flush()
188
-
189
- class ProgressTrackerHandler(urllib.request.HTTPHandler):
190
- """Custom HTTP handler to track upload progress."""
191
-
192
- def __init__(self, tracker=None):
193
- """
194
- Initialize handler.
195
-
196
- Args:
197
- tracker: ProgressTracker instance to use for tracking
198
- """
199
- super().__init__()
200
- self.tracker = tracker
201
-
202
- def http_request(self, req):
203
- """
204
- Hook into HTTP request to track upload progress.
205
-
206
- Args:
207
- req: HTTP request object
208
-
209
- Returns:
210
- Modified request object
211
- """
212
- if self.tracker and req.data:
213
- req.add_unredirected_header('Content-Length', str(len(req.data)))
214
- old_data = req.data
215
-
216
- # Replace data with an iterator that tracks progress
217
- def data_iterator():
218
- chunk_size = 8192
219
- total_sent = 0
220
- data = old_data
221
- while total_sent < len(data):
222
- chunk = data[total_sent:total_sent + chunk_size]
223
- total_sent += len(chunk)
224
- self.tracker.update(len(chunk))
225
- yield chunk
226
-
227
- req.data = data_iterator()
228
-
229
- return req
230
-
231
19
  class TeddyCloudClient:
232
20
  """Client for interacting with TeddyCloud API."""
233
21
 
@@ -235,7 +23,9 @@ class TeddyCloudClient:
235
23
  connection_timeout: int = DEFAULT_CONNECTION_TIMEOUT,
236
24
  read_timeout: int = DEFAULT_READ_TIMEOUT,
237
25
  max_retries: int = DEFAULT_MAX_RETRIES,
238
- retry_delay: int = DEFAULT_RETRY_DELAY):
26
+ retry_delay: int = DEFAULT_RETRY_DELAY,
27
+ username: str = None, password: str = None,
28
+ cert_file: str = None, key_file: str = None):
239
29
  """
240
30
  Initialize the TeddyCloud client.
241
31
 
@@ -246,6 +36,10 @@ class TeddyCloudClient:
246
36
  read_timeout: Timeout for reading data from the server
247
37
  max_retries: Maximum number of retries for failed requests
248
38
  retry_delay: Delay between retries
39
+ username: Username for basic authentication (optional)
40
+ password: Password for basic authentication (optional)
41
+ cert_file: Path to client certificate file for certificate-based authentication (optional)
42
+ key_file: Path to client private key file for certificate-based authentication (optional)
249
43
  """
250
44
  self.base_url = base_url.rstrip('/')
251
45
  self.ignore_ssl_verify = ignore_ssl_verify
@@ -253,427 +47,290 @@ class TeddyCloudClient:
253
47
  self.read_timeout = read_timeout
254
48
  self.max_retries = max_retries
255
49
  self.retry_delay = retry_delay
256
-
257
- # Create SSL context if needed
258
- self.ssl_context = None
50
+ self.username = username
51
+ self.password = password
52
+ self.cert_file = cert_file
53
+ self.key_file = key_file
54
+ self.ssl_context = ssl.create_default_context()
259
55
  if ignore_ssl_verify:
260
56
  logger.warning("SSL certificate verification is disabled. This is insecure!")
261
- self.ssl_context = ssl._create_unverified_context()
262
-
263
- def _urlopen(self, req):
264
- """Helper method to open URLs with optional SSL verification bypass and retry logic."""
265
- for attempt in range(self.max_retries):
57
+ self.ssl_context.check_hostname = False
58
+ self.ssl_context.verify_mode = ssl.CERT_NONE
59
+ if cert_file:
60
+ if not os.path.isfile(cert_file):
61
+ raise ValueError(f"Client certificate file not found: {cert_file}")
62
+ cert_key_file = key_file if key_file else cert_file
63
+ if not os.path.isfile(cert_key_file):
64
+ raise ValueError(f"Client key file not found: {cert_key_file}")
266
65
  try:
267
- if self.ignore_ssl_verify:
268
- return urllib.request.urlopen(req, context=self.ssl_context, timeout=self.connection_timeout)
269
- else:
270
- return urllib.request.urlopen(req, timeout=self.connection_timeout)
271
- except (urllib.error.URLError, socket.timeout) as e:
272
- logger.warning("Request failed (attempt %d/%d): %s", attempt + 1, self.max_retries, e)
273
- if attempt < self.max_retries - 1:
274
- time.sleep(self.retry_delay)
275
- else:
276
- raise
277
- except Exception as e:
278
- logger.error("Unexpected error during request: %s", e)
279
- raise
280
-
281
- def get_tag_index(self) -> Optional[list]:
66
+ logger.info("Using client certificate authentication")
67
+ try:
68
+ with open(cert_file, 'r') as f:
69
+ cert_content = f.read(50)
70
+ logger.debug(f"Certificate file starts with: {cert_content[:20]}...")
71
+ with open(cert_key_file, 'r') as f:
72
+ key_content = f.read(50)
73
+ logger.debug(f"Key file starts with: {key_content[:20]}...")
74
+ except Exception as e:
75
+ logger.warning(f"Error reading certificate files: {e}")
76
+ self.cert = (cert_file, cert_key_file)
77
+ logger.info(f"Client cert setup: {cert_file}, {cert_key_file}")
78
+ self.ssl_context.load_cert_chain(cert_file, cert_key_file)
79
+ logger.debug("Successfully loaded certificate into SSL context")
80
+
81
+ except ssl.SSLError as e:
82
+ raise ValueError(f"Failed to load client certificate: {e}")
83
+
84
+ def _create_request_kwargs(self):
282
85
  """
283
- Get list of tags from TeddyCloud.
86
+ Create common request keyword arguments for all API calls.
284
87
 
285
88
  Returns:
286
- List of tags or None if request failed
89
+ dict: Dictionary with common request kwargs
287
90
  """
288
- try:
289
- url = f"{self.base_url}/api/getTagIndex"
290
- headers = {'Content-Type': 'application/json'}
291
-
292
- req = urllib.request.Request(url, headers=headers)
293
-
294
- with self._urlopen(req) as response:
295
- tags = json.loads(response.read().decode('utf-8'))
296
- logger.debug("Found %d tags", len(tags))
297
- return tags
298
-
299
- except urllib.error.HTTPError as e:
300
- logger.error("Failed to get tags: %s", e)
301
- return None
302
- except Exception as e:
303
- logger.error("Error getting tags: %s", e)
304
- return None
305
-
306
- def upload_file(self, file_path: str, special_folder: str = None, path: str = None, show_progress: bool = True) -> bool:
91
+ kwargs = {
92
+ 'timeout': (self.connection_timeout, self.read_timeout),
93
+ 'verify': not self.ignore_ssl_verify
94
+ }
95
+ if self.username and self.password:
96
+ kwargs['auth'] = (self.username, self.password)
97
+ if self.cert_file:
98
+ kwargs['cert'] = self.cert
99
+ return kwargs
100
+
101
+ def _make_request(self, method, endpoint, **kwargs):
307
102
  """
308
- Upload a .taf or image file to TeddyCloud.
103
+ Make an HTTP request to the TeddyCloud API with retry logic.
309
104
 
310
105
  Args:
311
- file_path: Path to the file to upload (.taf, .jpg, .jpeg, .png)
312
- special_folder: Special folder to upload to (currently only 'library' is supported)
313
- path: Path where to write the file within the special folder
314
- show_progress: Whether to show a progress bar during upload
315
-
106
+ method: HTTP method (GET, POST, etc.)
107
+ endpoint: API endpoint (without base URL)
108
+ **kwargs: Additional arguments to pass to requests
109
+
316
110
  Returns:
317
- True if upload was successful, False otherwise
111
+ requests.Response: Response object
112
+
113
+ Raises:
114
+ requests.exceptions.RequestException: If request fails after all retries
318
115
  """
116
+ url = f"{self.base_url}/{endpoint.lstrip('/')}"
117
+ request_kwargs = self._create_request_kwargs()
118
+ request_kwargs.update(kwargs)
119
+ retry_count = 0
120
+ last_exception = None
121
+ old_timeout = socket.getdefaulttimeout()
122
+ socket.setdefaulttimeout(self.connection_timeout * 2)
123
+
319
124
  try:
320
- if not os.path.exists(file_path):
321
- logger.error("File does not exist: %s", file_path)
322
- return False
323
-
324
- # Check for supported file types
325
- file_ext = os.path.splitext(file_path.lower())[1]
326
- is_taf = file_ext == '.taf'
327
- is_image = file_ext in ['.jpg', '.jpeg', '.png']
328
-
329
- if not (is_taf or is_image):
330
- logger.error("Unsupported file type %s: %s", file_ext, file_path)
331
- return False
332
-
333
- # Read file and prepare for upload
334
- file_size = os.path.getsize(file_path)
335
- logger.info("File size: %.2f MB", file_size / (1024 * 1024))
336
-
337
- with open(file_path, 'rb') as f:
338
- file_content = f.read()
339
-
340
- filename = os.path.basename(file_path)
341
-
342
- # Build multipart form data
343
- boundary = '----WebKitFormBoundary7MA4YWxkTrZu0gW'
344
- headers = {
345
- 'Content-Type': f'multipart/form-data; boundary={boundary}',
346
- 'User-Agent': 'TonieToolbox/1.0'
347
- }
348
-
349
- # Start request data with boundary
350
- body = []
351
- body.append(f'--{boundary}'.encode())
352
-
353
- # Set appropriate content type based on file extension
354
- content_type = 'application/octet-stream'
355
- if is_image:
356
- if file_ext == '.jpg' or file_ext == '.jpeg':
357
- content_type = 'image/jpeg'
358
- elif file_ext == '.png':
359
- content_type = 'image/png'
360
-
361
- body.append(f'Content-Disposition: form-data; name="file"; filename="{filename}"'.encode())
362
- body.append(f'Content-Type: {content_type}'.encode())
363
- body.append(b'')
364
- body.append(file_content)
365
- body.append(f'--{boundary}--'.encode())
366
- body.append(b'')
367
-
368
- # Join all parts with CRLF
369
- body = b'\r\n'.join(body)
370
-
371
- # Build the upload URL with query parameters
372
- if special_folder or path:
373
- query_params = []
374
-
375
- if special_folder:
376
- query_params.append(f"special={urllib.parse.quote(special_folder)}")
377
-
378
- if path:
379
- query_params.append(f"path={urllib.parse.quote(path)}")
380
-
381
- query_string = "&".join(query_params)
382
- upload_url = f"{self.base_url}/api/fileUpload?{query_string}"
383
- logger.debug("Using endpoint with params: %s", upload_url)
384
- else:
385
- # Fallback to previous endpoint for backward compatibility
386
- upload_url = f"{self.base_url}/api/v1/audio"
387
- logger.debug("Using legacy endpoint: %s", upload_url)
388
-
389
- # Setup progress tracking if requested
390
- if show_progress:
391
- tracker = ProgressTracker(total_size=len(body), file_name=filename)
392
- handler = ProgressTrackerHandler(tracker=tracker)
393
- opener = urllib.request.build_opener(handler)
394
- urllib.request.install_opener(opener)
395
-
396
- # Try upload with retries
397
- for attempt in range(self.max_retries):
125
+ while retry_count < self.max_retries:
398
126
  try:
399
- # Create a fresh request for each attempt
400
- req = urllib.request.Request(upload_url, data=body, headers=headers, method='POST')
401
-
402
- # Set timeouts
403
- socket.setdefaulttimeout(self.read_timeout)
404
-
405
- with self._urlopen(req) as response:
406
- result_text = response.read().decode('utf-8')
407
-
408
- # Try to parse as JSON, but handle plain text responses too
409
- try:
410
- result = json.loads(result_text)
411
- logger.info("Upload successful: %s", result.get('name', 'Unknown'))
412
- except json.JSONDecodeError:
413
- logger.info("Upload successful, response: %s", result_text)
414
-
415
- return True
416
-
417
- except urllib.error.HTTPError as e:
418
- logger.error("HTTP error during upload (attempt %d/%d): %s",
419
- attempt + 1, self.max_retries, e)
420
-
421
- # Try to parse error response
127
+ logger.debug(f"Making {method} request to {url}")
128
+ logger.debug(f"Using connection timeout: {self.connection_timeout}s, read timeout: {self.read_timeout}s")
129
+ session = requests.Session()
422
130
  try:
423
- error_msg = json.loads(e.read().decode('utf-8'))
424
- logger.error("Error details: %s", error_msg)
425
- except:
426
- pass
131
+ response = session.request(method, url, **request_kwargs)
132
+ logger.debug(f"Received response with status code {response.status_code}")
133
+ response.raise_for_status()
134
+ return response
135
+ finally:
136
+ session.close()
137
+
138
+ except requests.exceptions.Timeout as e:
139
+ retry_count += 1
140
+ last_exception = e
141
+ logger.warning(f"Request timed out (attempt {retry_count}/{self.max_retries}): {e}")
427
142
 
428
- # Only retry on certain HTTP errors (e.g. 500, 502, 503, 504)
429
- if e.code >= 500 and attempt < self.max_retries - 1:
143
+ if retry_count < self.max_retries:
144
+ import time
145
+ logger.info(f"Waiting {self.retry_delay} seconds before retrying...")
430
146
  time.sleep(self.retry_delay)
431
- continue
432
-
433
- return False
434
-
435
- except (urllib.error.URLError, socket.timeout) as e:
436
- # Network errors, timeout errors
437
- logger.error("Network error during upload (attempt %d/%d): %s",
438
- attempt + 1, self.max_retries, e)
147
+
148
+ except requests.exceptions.ConnectionError as e:
149
+ retry_count += 1
150
+ last_exception = e
151
+ logger.warning(f"Connection error (attempt {retry_count}/{self.max_retries}): {e}")
439
152
 
440
- if attempt < self.max_retries - 1:
153
+ if retry_count < self.max_retries:
154
+ import time
155
+ logger.info(f"Waiting {self.retry_delay} seconds before retrying...")
441
156
  time.sleep(self.retry_delay)
442
- continue
443
-
444
- return False
157
+
158
+ except requests.exceptions.RequestException as e:
159
+ retry_count += 1
160
+ last_exception = e
161
+ logger.warning(f"Request failed (attempt {retry_count}/{self.max_retries}): {e}")
445
162
 
446
- except Exception as e:
447
- logger.error("Unexpected error during upload: %s", e)
448
- return False
449
-
450
- return False
451
-
452
- except Exception as e:
453
- logger.error("Error preparing file for upload: %s", e)
454
- return False
163
+ if retry_count < self.max_retries:
164
+ import time
165
+ logger.info(f"Waiting {self.retry_delay} seconds before retrying...")
166
+ time.sleep(self.retry_delay)
167
+ logger.error(f"Request failed after {self.max_retries} attempts: {last_exception}")
168
+ raise last_exception
169
+ finally:
170
+ socket.setdefaulttimeout(old_timeout)
455
171
 
456
- def get_tonies_custom_json(self) -> Optional[list]:
172
+ # ------------- GET API Methods -------------
173
+
174
+ def get_tonies_custom_json(self):
457
175
  """
458
- Get tonies.custom.json from the TeddyCloud server.
176
+ Get custom Tonies JSON data from the TeddyCloud server.
459
177
 
460
178
  Returns:
461
- List of custom tonie entries or None if request failed
179
+ dict: JSON response containing custom Tonies data
462
180
  """
463
- try:
464
- url = f"{self.base_url}/api/toniesCustomJson"
465
- logger.info("Loading tonies.custom.json from %s", url)
466
-
467
- req = urllib.request.Request(url)
468
-
469
- with self._urlopen(req) as response:
470
- data = json.loads(response.read().decode('utf-8'))
471
- if isinstance(data, list):
472
- logger.info("Successfully loaded tonies.custom.json with %d entries", len(data))
473
- return data
474
- else:
475
- logger.error("Invalid tonies.custom.json format, expected list")
476
- return None
477
-
478
- except urllib.error.HTTPError as e:
479
- if e.code == 404:
480
- logger.info("tonies.custom.json not found on server, starting with empty list")
481
- return []
482
- else:
483
- logger.error("HTTP error loading tonies.custom.json: %s", e)
484
- return None
485
- except Exception as e:
486
- logger.error("Error loading tonies.custom.json: %s", e)
487
- return None
181
+ response = self._make_request('GET', '/api/toniesCustomJson')
182
+ return response.json()
488
183
 
489
- def put_tonies_custom_json(self, custom_json_data: List[Dict[str, Any]]) -> bool:
184
+ def get_tonies_json(self):
490
185
  """
491
- Save tonies.custom.json to the TeddyCloud server.
186
+ Get Tonies JSON data from the TeddyCloud server.
492
187
 
493
- Args:
494
- custom_json_data: List of custom tonie entries to save
495
-
496
188
  Returns:
497
- True if successful, False otherwise
189
+ dict: JSON response containing Tonies data
498
190
  """
499
- try:
500
- url = f"{self.base_url}/api/toniesCustomJson"
501
- logger.info("Saving tonies.custom.json to %s", url)
502
-
503
- data = json.dumps(custom_json_data, indent=2).encode('utf-8')
504
- headers = {'Content-Type': 'application/json'}
505
-
506
- req = urllib.request.Request(url, data=data, headers=headers, method='PUT')
507
-
508
- with self._urlopen(req) as response:
509
- result = response.read().decode('utf-8')
510
- logger.info("Successfully saved tonies.custom.json to server")
511
- return True
512
-
513
- except Exception as e:
514
- logger.error("Error saving tonies.custom.json to server: %s", e)
515
- return False
516
-
517
- def upload_to_teddycloud(file_path: str, teddycloud_url: str, ignore_ssl_verify: bool = False,
518
- special_folder: str = None, path: str = None, show_progress: bool = True,
519
- connection_timeout: int = DEFAULT_CONNECTION_TIMEOUT,
520
- read_timeout: int = DEFAULT_READ_TIMEOUT,
521
- max_retries: int = DEFAULT_MAX_RETRIES,
522
- retry_delay: int = DEFAULT_RETRY_DELAY) -> bool:
523
- """
524
- Upload a .taf file to TeddyCloud.
191
+ response = self._make_request('GET', '/api/toniesJson')
192
+ return response.json()
525
193
 
526
- Args:
527
- file_path: Path to the .taf file to upload
528
- teddycloud_url: URL of the TeddyCloud instance
529
- ignore_ssl_verify: If True, SSL certificate verification will be disabled
530
- special_folder: Special folder to upload to (currently only 'library' is supported)
531
- path: Path where to write the file within the special folder
532
- show_progress: Whether to show a progress bar during upload
533
- connection_timeout: Timeout for establishing a connection in seconds
534
- read_timeout: Timeout for reading data from the server in seconds
535
- max_retries: Maximum number of retry attempts
536
- retry_delay: Delay between retry attempts in seconds
194
+ def get_tag_index(self):
195
+ """
196
+ Get tag index data from the TeddyCloud server.
537
197
 
538
- Returns:
539
- True if upload was successful, False otherwise
540
- """
541
- logger.info("Uploading %s to TeddyCloud %s", file_path, teddycloud_url)
542
-
543
- if special_folder:
544
- logger.info("Using special folder: %s", special_folder)
545
-
546
- if path:
547
- logger.info("Using custom path: %s", path)
548
-
549
- if max_retries > 1:
550
- logger.info("Will retry up to %d times with %d second delay if upload fails",
551
- max_retries, retry_delay)
552
-
553
- client = TeddyCloudClient(
554
- teddycloud_url, ignore_ssl_verify,
555
- connection_timeout=connection_timeout,
556
- read_timeout=read_timeout,
557
- max_retries=max_retries,
558
- retry_delay=retry_delay
559
- )
198
+ Returns:
199
+ dict: JSON response containing tag index data
200
+ """
201
+ response = self._make_request('GET', '/api/getTagIndex')
202
+ return response.json()
560
203
 
561
- return client.upload_file(file_path, special_folder, path, show_progress)
562
-
563
- def get_tags_from_teddycloud(teddycloud_url: str, ignore_ssl_verify: bool = False) -> bool:
564
- """
565
- Get and display tags from a TeddyCloud instance.
204
+ def get_file_index(self):
205
+ """
206
+ Get file index data from the TeddyCloud server.
207
+
208
+ Returns:
209
+ dict: JSON response containing file index data
210
+ """
211
+ response = self._make_request('GET', '/api/fileIndex')
212
+ return response.json()
566
213
 
567
- Args:
568
- teddycloud_url: URL of the TeddyCloud instance
569
- ignore_ssl_verify: If True, SSL certificate verification will be disabled
214
+ def get_file_index_v2(self):
215
+ """
216
+ Get version 2 file index data from the TeddyCloud server.
570
217
 
571
- Returns:
572
- True if tags were retrieved successfully, False otherwise
573
- """
574
- logger.info("Getting tags from TeddyCloud %s", teddycloud_url)
218
+ Returns:
219
+ dict: JSON response containing version 2 file index data
220
+ """
221
+ response = self._make_request('GET', '/api/fileIndexV2')
222
+ return response.json()
575
223
 
576
- client = TeddyCloudClient(teddycloud_url, ignore_ssl_verify)
577
- response = client.get_tag_index()
224
+ def get_tonieboxes_json(self):
225
+ """
226
+ Get Tonieboxes JSON data from the TeddyCloud server.
227
+
228
+ Returns:
229
+ dict: JSON response containing Tonieboxes data
230
+ """
231
+ response = self._make_request('GET', '/api/tonieboxesJson')
232
+ return response.json()
578
233
 
579
- if not response:
580
- logger.error("Failed to retrieve tags from TeddyCloud")
581
- return False
234
+ # ------------- POST API Methods -------------
582
235
 
583
- # Handle the specific JSON structure returned by TeddyCloud API
584
- if isinstance(response, dict) and 'tags' in response:
585
- tags = response['tags']
586
- logger.info("Successfully retrieved %d tags from TeddyCloud", len(tags))
587
-
588
- print("\nAvailable Tags from TeddyCloud:")
589
- print("-" * 60)
590
-
591
- # Sort tags by type and then by uid for better organization
592
- sorted_tags = sorted(tags, key=lambda x: (x.get('type', ''), x.get('uid', '')))
236
+ def create_directory(self, path, overlay=None, special=None):
237
+ """
238
+ Create a directory on the TeddyCloud server.
593
239
 
594
- for tag in sorted_tags:
595
- uid = tag.get('uid', 'Unknown UID')
596
- tag_type = tag.get('type', 'Unknown')
597
- valid = "✓" if tag.get('valid', False) else "✗"
598
- series = tag.get('tonieInfo', {}).get('series', '')
599
- episode = tag.get('tonieInfo', {}).get('episode', '')
600
- source = tag.get('source', '')
601
-
602
- # Format header with key information
603
- print(f"UID: {uid} ({tag_type}) - Valid: {valid}")
240
+ Args:
241
+ path: Directory path to create
242
+ overlay: Settings overlay ID (optional)
243
+ special: Special folder source, only 'library' supported yet (optional)
604
244
 
605
- # Show more detailed information
606
- if series:
607
- print(f"Series: {series}")
608
- if episode:
609
- print(f"Episode: {episode}")
610
- if source:
611
- print(f"Source: {source}")
612
-
613
- # Show track information if available
614
- tracks = tag.get('tonieInfo', {}).get('tracks', [])
615
- if tracks:
616
- print("Tracks:")
617
- for i, track in enumerate(tracks, 1):
618
- print(f" {i}. {track}")
619
-
620
- # Show track duration information
621
- track_seconds = tag.get('trackSeconds', [])
622
- if track_seconds and len(track_seconds) > 1:
623
- total_seconds = track_seconds[-1]
624
- minutes = total_seconds // 60
625
- seconds = total_seconds % 60
626
- print(f"Duration: {minutes}:{seconds:02d} ({len(track_seconds)-1} tracks)")
245
+ Returns:
246
+ str: Response message from server (usually "OK")
247
+ """
248
+ params = {}
249
+ if overlay:
250
+ params['overlay'] = overlay
251
+ if special:
252
+ params['special'] = special
627
253
 
628
- print("-" * 60)
629
- else:
630
- # Fallback for unexpected formats
631
- logger.info("Successfully retrieved tag data from TeddyCloud")
632
- print("\nTag data from TeddyCloud:")
633
- print("-" * 60)
634
-
635
- # Pretty print JSON data
636
- import json
637
- print(json.dumps(response, indent=2))
638
-
639
- print("-" * 60)
254
+ response = self._make_request('POST', '/api/dirCreate', params=params, data=path)
255
+ return response.text
640
256
 
641
- return True
642
-
643
- def get_tonies_custom_json_from_server(teddycloud_url: str, ignore_ssl_verify: bool = False) -> Optional[list]:
644
- """
645
- Get tonies.custom.json from the TeddyCloud server.
646
-
647
- Args:
648
- teddycloud_url: URL of the TeddyCloud instance
649
- ignore_ssl_verify: If True, SSL certificate verification will be disabled
650
-
651
- Returns:
652
- List of custom tonie entries or None if request failed
653
- """
654
- if not teddycloud_url:
655
- logger.error("Cannot load from server: No TeddyCloud URL provided")
656
- return None
257
+ def delete_directory(self, path, overlay=None, special=None):
258
+ """
259
+ Delete a directory from the TeddyCloud server.
657
260
 
658
- client = TeddyCloudClient(teddycloud_url, ignore_ssl_verify)
659
- return client.get_tonies_custom_json()
660
-
661
- def put_tonies_custom_json_to_server(teddycloud_url: str, custom_json_data: List[Dict[str, Any]],
662
- ignore_ssl_verify: bool = False) -> bool:
663
- """
664
- Save tonies.custom.json to the TeddyCloud server.
261
+ Args:
262
+ path: Directory path to delete
263
+ overlay: Settings overlay ID (optional)
264
+ special: Special folder source, only 'library' supported yet (optional)
265
+
266
+ Returns:
267
+ str: Response message from server (usually "OK")
268
+ """
269
+ params = {}
270
+ if overlay:
271
+ params['overlay'] = overlay
272
+ if special:
273
+ params['special'] = special
274
+
275
+ response = self._make_request('POST', '/api/dirDelete', params=params, data=path)
276
+ return response.text
665
277
 
666
- Args:
667
- teddycloud_url: URL of the TeddyCloud instance
668
- custom_json_data: List of custom tonie entries to save
669
- ignore_ssl_verify: If True, SSL certificate verification will be disabled
278
+ def delete_file(self, path, overlay=None, special=None):
279
+ """
280
+ Delete a file from the TeddyCloud server.
670
281
 
671
- Returns:
672
- True if successful, False otherwise
673
- """
674
- if not teddycloud_url:
675
- logger.error("Cannot save to server: No TeddyCloud URL provided")
676
- return False
282
+ Args:
283
+ path: File path to delete
284
+ overlay: Settings overlay ID (optional)
285
+ special: Special folder source, only 'library' supported yet (optional)
286
+
287
+ Returns:
288
+ str: Response message from server (usually "OK")
289
+ """
290
+ params = {}
291
+ if overlay:
292
+ params['overlay'] = overlay
293
+ if special:
294
+ params['special'] = special
295
+
296
+ response = self._make_request('POST', '/api/fileDelete', params=params, data=path)
297
+ return response.text
298
+
299
+ def upload_file(self, file_path, destination_path=None, overlay=None, special=None):
300
+ """
301
+ Upload a file to the TeddyCloud server.
677
302
 
678
- client = TeddyCloudClient(teddycloud_url, ignore_ssl_verify)
679
- return client.put_tonies_custom_json(custom_json_data)
303
+ Args:
304
+ file_path: Local path to the file to upload
305
+ destination_path: Server path where to write the file to (optional)
306
+ overlay: Settings overlay ID (optional)
307
+ special: Special folder source, only 'library' supported yet (optional)
308
+
309
+ Returns:
310
+ dict: JSON response from server
311
+ """
312
+ if not os.path.isfile(file_path):
313
+ raise FileNotFoundError(f"File to upload not found: {file_path}")
314
+
315
+ params = {}
316
+ if destination_path:
317
+ params['path'] = destination_path
318
+ if overlay:
319
+ params['overlay'] = overlay
320
+ if special:
321
+ params['special'] = special
322
+
323
+ with open(file_path, 'rb') as f:
324
+ files = {'file': (os.path.basename(file_path), f, 'application/octet-stream')}
325
+ response = self._make_request('POST', '/api/fileUpload', params=params, files=files)
326
+
327
+ try:
328
+ return response.json()
329
+ except ValueError:
330
+ return {
331
+ 'success': response.status_code == 200,
332
+ 'status_code': response.status_code,
333
+ 'message': response.text
334
+ }
335
+
336
+ # ------------- Custom API Methods -------------