TonieToolbox 0.5.0a1__py3-none-any.whl → 0.6.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,3 +1,4 @@
1
+ #!/usr/bin/python3
1
2
  """
2
3
  Recursive folder processing functionality for the TonieToolbox package
3
4
  """
@@ -11,19 +12,19 @@ import re
11
12
  from .audio_conversion import filter_directories
12
13
  from .logger import get_logger
13
14
 
14
- logger = get_logger('recursive_processor')
15
+ logger = get_logger(__name__)
15
16
 
16
17
 
17
- def find_audio_folders(root_path: str) -> List[Dict[str, any]]:
18
+ def find_audio_folders(root_path: str) -> list[dict[str, any]]:
18
19
  """
19
20
  Find and return all folders that contain audio files in a recursive manner,
20
21
  organized in a way that handles nested folder structures.
21
22
 
22
23
  Args:
23
- root_path: Root directory to start searching from
24
+ root_path (str): Root directory to start searching from
24
25
 
25
26
  Returns:
26
- List of dictionaries with folder information, including paths and relationships
27
+ list[dict[str, any]]: List of dictionaries with folder information, including paths and relationships
27
28
  """
28
29
  logger.info("Finding folders with audio files in: %s", root_path)
29
30
 
@@ -68,15 +69,15 @@ def find_audio_folders(root_path: str) -> List[Dict[str, any]]:
68
69
  return folder_list
69
70
 
70
71
 
71
- def determine_processing_folders(folders: List[Dict[str, any]]) -> List[Dict[str, any]]:
72
+ def determine_processing_folders(folders: list[dict[str, any]]) -> list[dict[str, any]]:
72
73
  """
73
74
  Determine which folders should be processed based on their position in the hierarchy.
74
75
 
75
76
  Args:
76
- folders: List of folder dictionaries with hierarchy information
77
+ folders (list[dict[str, any]]): List of folder dictionaries with hierarchy information
77
78
 
78
79
  Returns:
79
- List of folders that should be processed (filtered)
80
+ list[dict[str, any]]: List of folders that should be processed (filtered)
80
81
  """
81
82
  # We'll use a set to track which folders we've decided to process
82
83
  to_process = set()
@@ -120,15 +121,15 @@ def determine_processing_folders(folders: List[Dict[str, any]]) -> List[Dict[str
120
121
  return result
121
122
 
122
123
 
123
- def get_folder_audio_files(folder_path: str) -> List[str]:
124
+ def get_folder_audio_files(folder_path: str) -> list[str]:
124
125
  """
125
126
  Get all audio files in a specific folder.
126
127
 
127
128
  Args:
128
- folder_path: Path to folder
129
+ folder_path (str): Path to folder
129
130
 
130
131
  Returns:
131
- List of paths to audio files in natural sort order
132
+ list[str]: List of paths to audio files in natural sort order
132
133
  """
133
134
  audio_files = glob.glob(os.path.join(folder_path, "*"))
134
135
  filtered_files = filter_directories(audio_files)
@@ -140,15 +141,15 @@ def get_folder_audio_files(folder_path: str) -> List[str]:
140
141
  return sorted_files
141
142
 
142
143
 
143
- def natural_sort(file_list: List[str]) -> List[str]:
144
+ def natural_sort(file_list: list[str]) -> list[str]:
144
145
  """
145
146
  Sort a list of files in natural order (so that 2 comes before 10).
146
147
 
147
148
  Args:
148
- file_list: List of file paths
149
+ file_list (list[str]): List of file paths
149
150
 
150
151
  Returns:
151
- Naturally sorted list of file paths
152
+ list[str]: Naturally sorted list of file paths
152
153
  """
153
154
  def convert(text):
154
155
  return int(text) if text.isdigit() else text.lower()
@@ -159,16 +160,16 @@ def natural_sort(file_list: List[str]) -> List[str]:
159
160
  return sorted(file_list, key=alphanum_key)
160
161
 
161
162
 
162
- def extract_folder_meta(folder_path: str) -> Dict[str, str]:
163
+ def extract_folder_meta(folder_path: str) -> dict[str, str]:
163
164
  """
164
165
  Extract metadata from folder name.
165
166
  Common format might be: "YYYY - NNN - Title"
166
167
 
167
168
  Args:
168
- folder_path: Path to folder
169
+ folder_path (str): Path to folder
169
170
 
170
171
  Returns:
171
- Dictionary with extracted metadata (year, number, title)
172
+ dict[str, str]: Dictionary with extracted metadata (year, number, title)
172
173
  """
173
174
  folder_name = os.path.basename(folder_path)
174
175
  logger.debug("Extracting metadata from folder: %s", folder_name)
@@ -210,21 +211,17 @@ def get_folder_name_from_metadata(folder_path: str, use_media_tags: bool = False
210
211
  and optionally audio file metadata.
211
212
 
212
213
  Args:
213
- folder_path: Path to folder
214
- use_media_tags: Whether to use media tags from audio files if available
215
- template: Optional template for formatting output name using media tags
214
+ folder_path (str): Path to folder
215
+ use_media_tags (bool): Whether to use media tags from audio files if available
216
+ template (str | None): Optional template for formatting output name using media tags
216
217
 
217
218
  Returns:
218
- String with cleaned output name
219
+ str: String with cleaned output name
219
220
  """
220
- # Start with folder name metadata
221
221
  folder_meta = extract_folder_meta(folder_path)
222
- output_name = None
223
-
224
- # Try to get metadata from audio files if requested
222
+ output_name = None
225
223
  if use_media_tags:
226
224
  try:
227
- # Import here to avoid circular imports
228
225
  from .media_tags import extract_album_info, format_metadata_filename, is_available, normalize_tag_value
229
226
 
230
227
  if is_available():
@@ -247,12 +244,15 @@ def get_folder_name_from_metadata(folder_path: str, use_media_tags: bool = False
247
244
  if 'album' not in album_info or not album_info['album']:
248
245
  album_info['album'] = normalize_tag_value(folder_meta['title'])
249
246
 
250
- # Use template or default format
251
- format_template = template or "{album}"
252
- if 'artist' in album_info and album_info['artist']:
253
- format_template = format_template + " - {artist}"
254
- if 'number' in folder_meta and folder_meta['number']:
255
- format_template = "{tracknumber} - " + format_template
247
+ if template:
248
+ format_template = template
249
+ logger.debug("Using provided name template: %s", format_template)
250
+ else:
251
+ format_template = "{album}"
252
+ if 'artist' in album_info and album_info['artist']:
253
+ format_template = format_template + " - {artist}"
254
+ if 'number' in folder_meta and folder_meta['number']:
255
+ format_template = "{tracknumber} - " + format_template
256
256
 
257
257
  formatted_name = format_metadata_filename(album_info, format_template)
258
258
 
@@ -290,17 +290,17 @@ def get_folder_name_from_metadata(folder_path: str, use_media_tags: bool = False
290
290
  return output_name
291
291
 
292
292
 
293
- def process_recursive_folders(root_path, use_media_tags=False, name_template=None):
293
+ def process_recursive_folders(root_path: str, use_media_tags: bool = False, name_template: str = None) -> list[tuple[str, str, list[str]]]:
294
294
  """
295
295
  Process folders recursively for audio files to create Tonie files.
296
296
 
297
297
  Args:
298
298
  root_path (str): The root path to start processing from
299
299
  use_media_tags (bool): Whether to use media tags for naming
300
- name_template (str): Template for naming files using media tags
300
+ name_template (str | None): Template for naming files using media tags
301
301
 
302
302
  Returns:
303
- list: A list of tuples (output_name, folder_path, audio_files)
303
+ list[tuple[str, str, list[str]]]: A list of tuples (output_name, folder_path, audio_files)
304
304
  """
305
305
  logger = get_logger("recursive_processor")
306
306
  logger.info("Processing folders recursively: %s", root_path)
TonieToolbox/tags.py CHANGED
@@ -8,17 +8,16 @@ from .teddycloud import TeddyCloudClient
8
8
  import json
9
9
  from typing import Optional, Union
10
10
 
11
- logger = get_logger('tags')
11
+ logger = get_logger(__name__)
12
12
 
13
- def get_tags(client: TeddyCloudClient) -> bool:
13
+ def get_tags(client: 'TeddyCloudClient') -> bool:
14
14
  """
15
15
  Get and display tags from a TeddyCloud instance.
16
16
 
17
17
  Args:
18
- client: TeddyCloudClient instance to use for API communication
19
-
18
+ client (TeddyCloudClient): TeddyCloudClient instance to use for API communication
20
19
  Returns:
21
- True if tags were retrieved successfully, False otherwise
20
+ bool: True if tags were retrieved successfully, False otherwise
22
21
  """
23
22
  logger.info("Getting tags from TeddyCloud using provided client")
24
23
 
@@ -9,8 +9,9 @@ import base64
9
9
  import ssl
10
10
  import socket
11
11
  import requests
12
+ import json
12
13
  from .logger import get_logger
13
- logger = get_logger('teddycloud')
14
+ logger = get_logger(__name__)
14
15
  DEFAULT_CONNECTION_TIMEOUT = 10
15
16
  DEFAULT_READ_TIMEOUT = 15 # seconds
16
17
  DEFAULT_MAX_RETRIES = 3
@@ -19,27 +20,33 @@ DEFAULT_RETRY_DELAY = 5 # seconds
19
20
  class TeddyCloudClient:
20
21
  """Client for interacting with TeddyCloud API."""
21
22
 
22
- def __init__(self, base_url: str, ignore_ssl_verify: bool = False,
23
- connection_timeout: int = DEFAULT_CONNECTION_TIMEOUT,
24
- read_timeout: int = DEFAULT_READ_TIMEOUT,
25
- max_retries: int = DEFAULT_MAX_RETRIES,
26
- retry_delay: int = DEFAULT_RETRY_DELAY,
27
- username: str = None, password: str = None,
28
- cert_file: str = None, key_file: str = None):
23
+ def __init__(
24
+ self,
25
+ base_url: str,
26
+ ignore_ssl_verify: bool = False,
27
+ connection_timeout: int = DEFAULT_CONNECTION_TIMEOUT,
28
+ read_timeout: int = DEFAULT_READ_TIMEOUT,
29
+ max_retries: int = DEFAULT_MAX_RETRIES,
30
+ retry_delay: int = DEFAULT_RETRY_DELAY,
31
+ username: str = None,
32
+ password: str = None,
33
+ cert_file: str = None,
34
+ key_file: str = None
35
+ ) -> None:
29
36
  """
30
37
  Initialize the TeddyCloud client.
31
38
 
32
39
  Args:
33
- base_url: Base URL of the TeddyCloud instance (e.g., https://teddycloud.example.com)
34
- ignore_ssl_verify: If True, SSL certificate verification will be disabled (useful for self-signed certificates)
35
- connection_timeout: Timeout for establishing a connection
36
- read_timeout: Timeout for reading data from the server
37
- max_retries: Maximum number of retries for failed requests
38
- retry_delay: Delay between retries
39
- username: Username for basic authentication (optional)
40
- password: Password for basic authentication (optional)
41
- cert_file: Path to client certificate file for certificate-based authentication (optional)
42
- key_file: Path to client private key file for certificate-based authentication (optional)
40
+ base_url (str): Base URL of the TeddyCloud instance (e.g., https://teddycloud.example.com)
41
+ ignore_ssl_verify (bool): If True, SSL certificate verification will be disabled (useful for self-signed certificates)
42
+ connection_timeout (int): Timeout for establishing a connection
43
+ read_timeout (int): Timeout for reading data from the server
44
+ max_retries (int): Maximum number of retries for failed requests
45
+ retry_delay (int): Delay between retries
46
+ username (str | None): Username for basic authentication (optional)
47
+ password (str | None): Password for basic authentication (optional)
48
+ cert_file (str | None): Path to client certificate file for certificate-based authentication (optional)
49
+ key_file (str | None): Path to client private key file for certificate-based authentication (optional)
43
50
  """
44
51
  self.base_url = base_url.rstrip('/')
45
52
  self.ignore_ssl_verify = ignore_ssl_verify
@@ -81,7 +88,7 @@ class TeddyCloudClient:
81
88
  except ssl.SSLError as e:
82
89
  raise ValueError(f"Failed to load client certificate: {e}")
83
90
 
84
- def _create_request_kwargs(self):
91
+ def _create_request_kwargs(self) -> dict:
85
92
  """
86
93
  Create common request keyword arguments for all API calls.
87
94
 
@@ -98,18 +105,16 @@ class TeddyCloudClient:
98
105
  kwargs['cert'] = self.cert
99
106
  return kwargs
100
107
 
101
- def _make_request(self, method, endpoint, **kwargs):
108
+ def _make_request(self, method: str, endpoint: str, **kwargs) -> 'requests.Response':
102
109
  """
103
110
  Make an HTTP request to the TeddyCloud API with retry logic.
104
111
 
105
112
  Args:
106
- method: HTTP method (GET, POST, etc.)
107
- endpoint: API endpoint (without base URL)
113
+ method (str): HTTP method (GET, POST, etc.)
114
+ endpoint (str): API endpoint (without base URL)
108
115
  **kwargs: Additional arguments to pass to requests
109
-
110
116
  Returns:
111
117
  requests.Response: Response object
112
-
113
118
  Raises:
114
119
  requests.exceptions.RequestException: If request fails after all retries
115
120
  """
@@ -171,7 +176,7 @@ class TeddyCloudClient:
171
176
 
172
177
  # ------------- GET API Methods -------------
173
178
 
174
- def get_tonies_custom_json(self):
179
+ def get_tonies_custom_json(self) -> dict:
175
180
  """
176
181
  Get custom Tonies JSON data from the TeddyCloud server.
177
182
 
@@ -181,7 +186,7 @@ class TeddyCloudClient:
181
186
  response = self._make_request('GET', '/api/toniesCustomJson')
182
187
  return response.json()
183
188
 
184
- def get_tonies_json(self):
189
+ def get_tonies_json(self) -> dict:
185
190
  """
186
191
  Get Tonies JSON data from the TeddyCloud server.
187
192
 
@@ -191,7 +196,7 @@ class TeddyCloudClient:
191
196
  response = self._make_request('GET', '/api/toniesJson')
192
197
  return response.json()
193
198
 
194
- def get_tag_index(self):
199
+ def get_tag_index(self) -> dict:
195
200
  """
196
201
  Get tag index data from the TeddyCloud server.
197
202
 
@@ -201,7 +206,7 @@ class TeddyCloudClient:
201
206
  response = self._make_request('GET', '/api/getTagIndex')
202
207
  return response.json()
203
208
 
204
- def get_file_index(self):
209
+ def get_file_index(self) -> dict:
205
210
  """
206
211
  Get file index data from the TeddyCloud server.
207
212
 
@@ -211,7 +216,7 @@ class TeddyCloudClient:
211
216
  response = self._make_request('GET', '/api/fileIndex')
212
217
  return response.json()
213
218
 
214
- def get_file_index_v2(self):
219
+ def get_file_index_v2(self) -> dict:
215
220
  """
216
221
  Get version 2 file index data from the TeddyCloud server.
217
222
 
@@ -221,7 +226,7 @@ class TeddyCloudClient:
221
226
  response = self._make_request('GET', '/api/fileIndexV2')
222
227
  return response.json()
223
228
 
224
- def get_tonieboxes_json(self):
229
+ def get_tonieboxes_json(self) -> dict:
225
230
  """
226
231
  Get Tonieboxes JSON data from the TeddyCloud server.
227
232
 
@@ -233,15 +238,14 @@ class TeddyCloudClient:
233
238
 
234
239
  # ------------- POST API Methods -------------
235
240
 
236
- def create_directory(self, path, overlay=None, special=None):
241
+ def create_directory(self, path: str, overlay: str = None, special: str = None) -> str:
237
242
  """
238
243
  Create a directory on the TeddyCloud server.
239
244
 
240
245
  Args:
241
- path: Directory path to create
242
- overlay: Settings overlay ID (optional)
243
- special: Special folder source, only 'library' supported yet (optional)
244
-
246
+ path (str): Directory path to create
247
+ overlay (str | None): Settings overlay ID (optional)
248
+ special (str | None): Special folder source, only 'library' supported yet (optional)
245
249
  Returns:
246
250
  str: Response message from server (usually "OK")
247
251
  """
@@ -254,15 +258,14 @@ class TeddyCloudClient:
254
258
  response = self._make_request('POST', '/api/dirCreate', params=params, data=path)
255
259
  return response.text
256
260
 
257
- def delete_directory(self, path, overlay=None, special=None):
261
+ def delete_directory(self, path: str, overlay: str = None, special: str = None) -> str:
258
262
  """
259
263
  Delete a directory from the TeddyCloud server.
260
264
 
261
265
  Args:
262
- path: Directory path to delete
263
- overlay: Settings overlay ID (optional)
264
- special: Special folder source, only 'library' supported yet (optional)
265
-
266
+ path (str): Directory path to delete
267
+ overlay (str | None): Settings overlay ID (optional)
268
+ special (str | None): Special folder source, only 'library' supported yet (optional)
266
269
  Returns:
267
270
  str: Response message from server (usually "OK")
268
271
  """
@@ -275,15 +278,14 @@ class TeddyCloudClient:
275
278
  response = self._make_request('POST', '/api/dirDelete', params=params, data=path)
276
279
  return response.text
277
280
 
278
- def delete_file(self, path, overlay=None, special=None):
281
+ def delete_file(self, path: str, overlay: str = None, special: str = None) -> str:
279
282
  """
280
283
  Delete a file from the TeddyCloud server.
281
284
 
282
285
  Args:
283
- path: File path to delete
284
- overlay: Settings overlay ID (optional)
285
- special: Special folder source, only 'library' supported yet (optional)
286
-
286
+ path (str): File path to delete
287
+ overlay (str | None): Settings overlay ID (optional)
288
+ special (str | None): Special folder source, only 'library' supported yet (optional)
287
289
  Returns:
288
290
  str: Response message from server (usually "OK")
289
291
  """
@@ -296,16 +298,15 @@ class TeddyCloudClient:
296
298
  response = self._make_request('POST', '/api/fileDelete', params=params, data=path)
297
299
  return response.text
298
300
 
299
- def upload_file(self, file_path, destination_path=None, overlay=None, special=None):
301
+ def upload_file(self, file_path: str, destination_path: str = None, overlay: str = None, special: str = None) -> dict:
300
302
  """
301
303
  Upload a file to the TeddyCloud server.
302
304
 
303
305
  Args:
304
- file_path: Local path to the file to upload
305
- destination_path: Server path where to write the file to (optional)
306
- overlay: Settings overlay ID (optional)
307
- special: Special folder source, only 'library' supported yet (optional)
308
-
306
+ file_path (str): Local path to the file to upload
307
+ destination_path (str | None): Server path where to write the file to (optional)
308
+ overlay (str | None): Settings overlay ID (optional)
309
+ special (str | None): Special folder source, only 'library' supported yet (optional)
309
310
  Returns:
310
311
  dict: JSON response from server
311
312
  """
@@ -334,3 +335,115 @@ class TeddyCloudClient:
334
335
  }
335
336
 
336
337
  # ------------- Custom API Methods -------------
338
+
339
+ def _get_paths_cache_file(self) -> str:
340
+ """
341
+ Get the path to the paths cache file.
342
+
343
+ Returns:
344
+ str: Path to the paths cache file
345
+ """
346
+ cache_dir = os.path.join(os.path.expanduser("~"), ".tonietoolbox")
347
+ os.makedirs(cache_dir, exist_ok=True)
348
+ return os.path.join(cache_dir, "paths.json")
349
+
350
+ def _load_paths_cache(self) -> set:
351
+ """
352
+ Load the paths cache from the cache file.
353
+
354
+ Returns:
355
+ set: Set of existing directory paths
356
+ """
357
+ cache_file = self._get_paths_cache_file()
358
+ try:
359
+ if os.path.exists(cache_file):
360
+ with open(cache_file, 'r', encoding='utf-8') as f:
361
+ paths_data = json.load(f)
362
+ # Convert to set for faster lookups
363
+ return set(paths_data.get('paths', []))
364
+ return set()
365
+ except Exception as e:
366
+ logger.warning(f"Failed to load paths cache: {e}")
367
+ return set()
368
+
369
+ def _save_paths_cache(self, paths: set) -> None:
370
+ """
371
+ Save the paths cache to the cache file.
372
+
373
+ Args:
374
+ paths (set): Set of directory paths to save
375
+ """
376
+ cache_file = self._get_paths_cache_file()
377
+ try:
378
+ paths_data = {'paths': list(paths)}
379
+ with open(cache_file, 'w', encoding='utf-8') as f:
380
+ json.dump(paths_data, f, indent=2)
381
+ logger.debug(f"Saved {len(paths)} paths to cache file")
382
+ except Exception as e:
383
+ logger.warning(f"Failed to save paths cache: {e}")
384
+
385
+ def create_directories_recursive(self, path: str, overlay: str = None, special: str = "library") -> str:
386
+ """
387
+ Create directories recursively on the TeddyCloud server.
388
+
389
+ This function handles both cases:
390
+ - Directories that already exist (prevents 500 errors)
391
+ - Parent directories that don't exist yet (creates them first)
392
+
393
+ This optimized version uses a local paths cache instead of querying the file index,
394
+ since the file index might not represent the correct folders.
395
+
396
+ Args:
397
+ path (str): Directory path to create (can contain multiple levels)
398
+ overlay (str | None): Settings overlay ID (optional)
399
+ special (str | None): Special folder source, only 'library' supported yet (optional)
400
+
401
+ Returns:
402
+ str: Response message from server
403
+ """
404
+ path = path.replace('\\', '/').strip('/')
405
+ if not path:
406
+ return "Path is empty"
407
+ existing_dirs = self._load_paths_cache()
408
+ logger.debug(f"Loaded {len(existing_dirs)} existing paths from cache")
409
+ path_components = path.split('/')
410
+ current_path = ""
411
+ result = "OK"
412
+ paths_updated = False
413
+ for component in path_components:
414
+ if current_path:
415
+ current_path += f"/{component}"
416
+ else:
417
+ current_path = component
418
+ if current_path in existing_dirs:
419
+ logger.debug(f"Directory '{current_path}' exists in paths cache, skipping creation")
420
+ continue
421
+
422
+ try:
423
+ result = self.create_directory(current_path, overlay, special)
424
+ logger.debug(f"Created directory: {current_path}")
425
+ # Add the newly created directory to our cache
426
+ existing_dirs.add(current_path)
427
+ paths_updated = True
428
+ except requests.exceptions.HTTPError as e:
429
+ # If it's a 500 error, likely the directory already exists
430
+ if e.response.status_code == 500:
431
+ if "already exists" in e.response.text.lower():
432
+ logger.debug(f"Directory '{current_path}' already exists, continuing")
433
+ # Add to our cache for future operations
434
+ existing_dirs.add(current_path)
435
+ paths_updated = True
436
+ else:
437
+ # Log the actual error message but continue anyway
438
+ # This allows us to continue even if the error is something else
439
+ logger.warning(f"Warning while creating '{current_path}': {str(e)}")
440
+ else:
441
+ # Re-raise for other HTTP errors
442
+ logger.error(f"Failed to create directory '{current_path}': {str(e)}")
443
+ raise
444
+
445
+ # Save updated paths cache if any changes were made
446
+ if paths_updated:
447
+ self._save_paths_cache(existing_dirs)
448
+
449
+ return result