TonieToolbox 0.5.1__py3-none-any.whl → 0.6.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- TonieToolbox/__init__.py +2 -1
- TonieToolbox/__main__.py +240 -98
- TonieToolbox/artwork.py +59 -10
- TonieToolbox/audio_conversion.py +33 -29
- TonieToolbox/constants.py +133 -10
- TonieToolbox/dependency_manager.py +679 -184
- TonieToolbox/filename_generator.py +57 -10
- TonieToolbox/integration.py +73 -0
- TonieToolbox/integration_macos.py +613 -0
- TonieToolbox/integration_ubuntu.py +2 -0
- TonieToolbox/integration_windows.py +445 -0
- TonieToolbox/logger.py +9 -10
- TonieToolbox/media_tags.py +19 -100
- TonieToolbox/ogg_page.py +41 -41
- TonieToolbox/opus_packet.py +15 -15
- TonieToolbox/recursive_processor.py +24 -23
- TonieToolbox/tags.py +4 -5
- TonieToolbox/teddycloud.py +164 -51
- TonieToolbox/tonie_analysis.py +26 -24
- TonieToolbox/tonie_file.py +73 -45
- TonieToolbox/tonies_json.py +71 -67
- TonieToolbox/version_handler.py +14 -20
- {tonietoolbox-0.5.1.dist-info → tonietoolbox-0.6.0.dist-info}/METADATA +129 -92
- tonietoolbox-0.6.0.dist-info/RECORD +30 -0
- {tonietoolbox-0.5.1.dist-info → tonietoolbox-0.6.0.dist-info}/WHEEL +1 -1
- tonietoolbox-0.5.1.dist-info/RECORD +0 -26
- {tonietoolbox-0.5.1.dist-info → tonietoolbox-0.6.0.dist-info}/entry_points.txt +0 -0
- {tonietoolbox-0.5.1.dist-info → tonietoolbox-0.6.0.dist-info}/licenses/LICENSE.md +0 -0
- {tonietoolbox-0.5.1.dist-info → tonietoolbox-0.6.0.dist-info}/top_level.txt +0 -0
@@ -1,3 +1,4 @@
|
|
1
|
+
#!/usr/bin/python3
|
1
2
|
"""
|
2
3
|
Recursive folder processing functionality for the TonieToolbox package
|
3
4
|
"""
|
@@ -11,19 +12,19 @@ import re
|
|
11
12
|
from .audio_conversion import filter_directories
|
12
13
|
from .logger import get_logger
|
13
14
|
|
14
|
-
logger = get_logger(
|
15
|
+
logger = get_logger(__name__)
|
15
16
|
|
16
17
|
|
17
|
-
def find_audio_folders(root_path: str) ->
|
18
|
+
def find_audio_folders(root_path: str) -> list[dict[str, any]]:
|
18
19
|
"""
|
19
20
|
Find and return all folders that contain audio files in a recursive manner,
|
20
21
|
organized in a way that handles nested folder structures.
|
21
22
|
|
22
23
|
Args:
|
23
|
-
root_path: Root directory to start searching from
|
24
|
+
root_path (str): Root directory to start searching from
|
24
25
|
|
25
26
|
Returns:
|
26
|
-
List of dictionaries with folder information, including paths and relationships
|
27
|
+
list[dict[str, any]]: List of dictionaries with folder information, including paths and relationships
|
27
28
|
"""
|
28
29
|
logger.info("Finding folders with audio files in: %s", root_path)
|
29
30
|
|
@@ -68,15 +69,15 @@ def find_audio_folders(root_path: str) -> List[Dict[str, any]]:
|
|
68
69
|
return folder_list
|
69
70
|
|
70
71
|
|
71
|
-
def determine_processing_folders(folders:
|
72
|
+
def determine_processing_folders(folders: list[dict[str, any]]) -> list[dict[str, any]]:
|
72
73
|
"""
|
73
74
|
Determine which folders should be processed based on their position in the hierarchy.
|
74
75
|
|
75
76
|
Args:
|
76
|
-
folders: List of folder dictionaries with hierarchy information
|
77
|
+
folders (list[dict[str, any]]): List of folder dictionaries with hierarchy information
|
77
78
|
|
78
79
|
Returns:
|
79
|
-
List of folders that should be processed (filtered)
|
80
|
+
list[dict[str, any]]: List of folders that should be processed (filtered)
|
80
81
|
"""
|
81
82
|
# We'll use a set to track which folders we've decided to process
|
82
83
|
to_process = set()
|
@@ -120,15 +121,15 @@ def determine_processing_folders(folders: List[Dict[str, any]]) -> List[Dict[str
|
|
120
121
|
return result
|
121
122
|
|
122
123
|
|
123
|
-
def get_folder_audio_files(folder_path: str) ->
|
124
|
+
def get_folder_audio_files(folder_path: str) -> list[str]:
|
124
125
|
"""
|
125
126
|
Get all audio files in a specific folder.
|
126
127
|
|
127
128
|
Args:
|
128
|
-
folder_path: Path to folder
|
129
|
+
folder_path (str): Path to folder
|
129
130
|
|
130
131
|
Returns:
|
131
|
-
List of paths to audio files in natural sort order
|
132
|
+
list[str]: List of paths to audio files in natural sort order
|
132
133
|
"""
|
133
134
|
audio_files = glob.glob(os.path.join(folder_path, "*"))
|
134
135
|
filtered_files = filter_directories(audio_files)
|
@@ -140,15 +141,15 @@ def get_folder_audio_files(folder_path: str) -> List[str]:
|
|
140
141
|
return sorted_files
|
141
142
|
|
142
143
|
|
143
|
-
def natural_sort(file_list:
|
144
|
+
def natural_sort(file_list: list[str]) -> list[str]:
|
144
145
|
"""
|
145
146
|
Sort a list of files in natural order (so that 2 comes before 10).
|
146
147
|
|
147
148
|
Args:
|
148
|
-
file_list: List of file paths
|
149
|
+
file_list (list[str]): List of file paths
|
149
150
|
|
150
151
|
Returns:
|
151
|
-
Naturally sorted list of file paths
|
152
|
+
list[str]: Naturally sorted list of file paths
|
152
153
|
"""
|
153
154
|
def convert(text):
|
154
155
|
return int(text) if text.isdigit() else text.lower()
|
@@ -159,16 +160,16 @@ def natural_sort(file_list: List[str]) -> List[str]:
|
|
159
160
|
return sorted(file_list, key=alphanum_key)
|
160
161
|
|
161
162
|
|
162
|
-
def extract_folder_meta(folder_path: str) ->
|
163
|
+
def extract_folder_meta(folder_path: str) -> dict[str, str]:
|
163
164
|
"""
|
164
165
|
Extract metadata from folder name.
|
165
166
|
Common format might be: "YYYY - NNN - Title"
|
166
167
|
|
167
168
|
Args:
|
168
|
-
folder_path: Path to folder
|
169
|
+
folder_path (str): Path to folder
|
169
170
|
|
170
171
|
Returns:
|
171
|
-
Dictionary with extracted metadata (year, number, title)
|
172
|
+
dict[str, str]: Dictionary with extracted metadata (year, number, title)
|
172
173
|
"""
|
173
174
|
folder_name = os.path.basename(folder_path)
|
174
175
|
logger.debug("Extracting metadata from folder: %s", folder_name)
|
@@ -210,12 +211,12 @@ def get_folder_name_from_metadata(folder_path: str, use_media_tags: bool = False
|
|
210
211
|
and optionally audio file metadata.
|
211
212
|
|
212
213
|
Args:
|
213
|
-
folder_path: Path to folder
|
214
|
-
use_media_tags: Whether to use media tags from audio files if available
|
215
|
-
template: Optional template for formatting output name using media tags
|
214
|
+
folder_path (str): Path to folder
|
215
|
+
use_media_tags (bool): Whether to use media tags from audio files if available
|
216
|
+
template (str | None): Optional template for formatting output name using media tags
|
216
217
|
|
217
218
|
Returns:
|
218
|
-
String with cleaned output name
|
219
|
+
str: String with cleaned output name
|
219
220
|
"""
|
220
221
|
folder_meta = extract_folder_meta(folder_path)
|
221
222
|
output_name = None
|
@@ -289,17 +290,17 @@ def get_folder_name_from_metadata(folder_path: str, use_media_tags: bool = False
|
|
289
290
|
return output_name
|
290
291
|
|
291
292
|
|
292
|
-
def process_recursive_folders(root_path, use_media_tags=False, name_template=None):
|
293
|
+
def process_recursive_folders(root_path: str, use_media_tags: bool = False, name_template: str = None) -> list[tuple[str, str, list[str]]]:
|
293
294
|
"""
|
294
295
|
Process folders recursively for audio files to create Tonie files.
|
295
296
|
|
296
297
|
Args:
|
297
298
|
root_path (str): The root path to start processing from
|
298
299
|
use_media_tags (bool): Whether to use media tags for naming
|
299
|
-
name_template (str): Template for naming files using media tags
|
300
|
+
name_template (str | None): Template for naming files using media tags
|
300
301
|
|
301
302
|
Returns:
|
302
|
-
list: A list of tuples (output_name, folder_path, audio_files)
|
303
|
+
list[tuple[str, str, list[str]]]: A list of tuples (output_name, folder_path, audio_files)
|
303
304
|
"""
|
304
305
|
logger = get_logger("recursive_processor")
|
305
306
|
logger.info("Processing folders recursively: %s", root_path)
|
TonieToolbox/tags.py
CHANGED
@@ -8,17 +8,16 @@ from .teddycloud import TeddyCloudClient
|
|
8
8
|
import json
|
9
9
|
from typing import Optional, Union
|
10
10
|
|
11
|
-
logger = get_logger(
|
11
|
+
logger = get_logger(__name__)
|
12
12
|
|
13
|
-
def get_tags(client: TeddyCloudClient) -> bool:
|
13
|
+
def get_tags(client: 'TeddyCloudClient') -> bool:
|
14
14
|
"""
|
15
15
|
Get and display tags from a TeddyCloud instance.
|
16
16
|
|
17
17
|
Args:
|
18
|
-
client: TeddyCloudClient instance to use for API communication
|
19
|
-
|
18
|
+
client (TeddyCloudClient): TeddyCloudClient instance to use for API communication
|
20
19
|
Returns:
|
21
|
-
True if tags were retrieved successfully, False otherwise
|
20
|
+
bool: True if tags were retrieved successfully, False otherwise
|
22
21
|
"""
|
23
22
|
logger.info("Getting tags from TeddyCloud using provided client")
|
24
23
|
|
TonieToolbox/teddycloud.py
CHANGED
@@ -9,8 +9,9 @@ import base64
|
|
9
9
|
import ssl
|
10
10
|
import socket
|
11
11
|
import requests
|
12
|
+
import json
|
12
13
|
from .logger import get_logger
|
13
|
-
logger = get_logger(
|
14
|
+
logger = get_logger(__name__)
|
14
15
|
DEFAULT_CONNECTION_TIMEOUT = 10
|
15
16
|
DEFAULT_READ_TIMEOUT = 15 # seconds
|
16
17
|
DEFAULT_MAX_RETRIES = 3
|
@@ -19,27 +20,33 @@ DEFAULT_RETRY_DELAY = 5 # seconds
|
|
19
20
|
class TeddyCloudClient:
|
20
21
|
"""Client for interacting with TeddyCloud API."""
|
21
22
|
|
22
|
-
def __init__(
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
23
|
+
def __init__(
|
24
|
+
self,
|
25
|
+
base_url: str,
|
26
|
+
ignore_ssl_verify: bool = False,
|
27
|
+
connection_timeout: int = DEFAULT_CONNECTION_TIMEOUT,
|
28
|
+
read_timeout: int = DEFAULT_READ_TIMEOUT,
|
29
|
+
max_retries: int = DEFAULT_MAX_RETRIES,
|
30
|
+
retry_delay: int = DEFAULT_RETRY_DELAY,
|
31
|
+
username: str = None,
|
32
|
+
password: str = None,
|
33
|
+
cert_file: str = None,
|
34
|
+
key_file: str = None
|
35
|
+
) -> None:
|
29
36
|
"""
|
30
37
|
Initialize the TeddyCloud client.
|
31
38
|
|
32
39
|
Args:
|
33
|
-
base_url: Base URL of the TeddyCloud instance (e.g., https://teddycloud.example.com)
|
34
|
-
ignore_ssl_verify: If True, SSL certificate verification will be disabled (useful for self-signed certificates)
|
35
|
-
connection_timeout: Timeout for establishing a connection
|
36
|
-
read_timeout: Timeout for reading data from the server
|
37
|
-
max_retries: Maximum number of retries for failed requests
|
38
|
-
retry_delay: Delay between retries
|
39
|
-
username: Username for basic authentication (optional)
|
40
|
-
password: Password for basic authentication (optional)
|
41
|
-
cert_file: Path to client certificate file for certificate-based authentication (optional)
|
42
|
-
key_file: Path to client private key file for certificate-based authentication (optional)
|
40
|
+
base_url (str): Base URL of the TeddyCloud instance (e.g., https://teddycloud.example.com)
|
41
|
+
ignore_ssl_verify (bool): If True, SSL certificate verification will be disabled (useful for self-signed certificates)
|
42
|
+
connection_timeout (int): Timeout for establishing a connection
|
43
|
+
read_timeout (int): Timeout for reading data from the server
|
44
|
+
max_retries (int): Maximum number of retries for failed requests
|
45
|
+
retry_delay (int): Delay between retries
|
46
|
+
username (str | None): Username for basic authentication (optional)
|
47
|
+
password (str | None): Password for basic authentication (optional)
|
48
|
+
cert_file (str | None): Path to client certificate file for certificate-based authentication (optional)
|
49
|
+
key_file (str | None): Path to client private key file for certificate-based authentication (optional)
|
43
50
|
"""
|
44
51
|
self.base_url = base_url.rstrip('/')
|
45
52
|
self.ignore_ssl_verify = ignore_ssl_verify
|
@@ -81,7 +88,7 @@ class TeddyCloudClient:
|
|
81
88
|
except ssl.SSLError as e:
|
82
89
|
raise ValueError(f"Failed to load client certificate: {e}")
|
83
90
|
|
84
|
-
def _create_request_kwargs(self):
|
91
|
+
def _create_request_kwargs(self) -> dict:
|
85
92
|
"""
|
86
93
|
Create common request keyword arguments for all API calls.
|
87
94
|
|
@@ -98,18 +105,16 @@ class TeddyCloudClient:
|
|
98
105
|
kwargs['cert'] = self.cert
|
99
106
|
return kwargs
|
100
107
|
|
101
|
-
def _make_request(self, method, endpoint, **kwargs):
|
108
|
+
def _make_request(self, method: str, endpoint: str, **kwargs) -> 'requests.Response':
|
102
109
|
"""
|
103
110
|
Make an HTTP request to the TeddyCloud API with retry logic.
|
104
111
|
|
105
112
|
Args:
|
106
|
-
method: HTTP method (GET, POST, etc.)
|
107
|
-
endpoint: API endpoint (without base URL)
|
113
|
+
method (str): HTTP method (GET, POST, etc.)
|
114
|
+
endpoint (str): API endpoint (without base URL)
|
108
115
|
**kwargs: Additional arguments to pass to requests
|
109
|
-
|
110
116
|
Returns:
|
111
117
|
requests.Response: Response object
|
112
|
-
|
113
118
|
Raises:
|
114
119
|
requests.exceptions.RequestException: If request fails after all retries
|
115
120
|
"""
|
@@ -171,7 +176,7 @@ class TeddyCloudClient:
|
|
171
176
|
|
172
177
|
# ------------- GET API Methods -------------
|
173
178
|
|
174
|
-
def get_tonies_custom_json(self):
|
179
|
+
def get_tonies_custom_json(self) -> dict:
|
175
180
|
"""
|
176
181
|
Get custom Tonies JSON data from the TeddyCloud server.
|
177
182
|
|
@@ -181,7 +186,7 @@ class TeddyCloudClient:
|
|
181
186
|
response = self._make_request('GET', '/api/toniesCustomJson')
|
182
187
|
return response.json()
|
183
188
|
|
184
|
-
def get_tonies_json(self):
|
189
|
+
def get_tonies_json(self) -> dict:
|
185
190
|
"""
|
186
191
|
Get Tonies JSON data from the TeddyCloud server.
|
187
192
|
|
@@ -191,7 +196,7 @@ class TeddyCloudClient:
|
|
191
196
|
response = self._make_request('GET', '/api/toniesJson')
|
192
197
|
return response.json()
|
193
198
|
|
194
|
-
def get_tag_index(self):
|
199
|
+
def get_tag_index(self) -> dict:
|
195
200
|
"""
|
196
201
|
Get tag index data from the TeddyCloud server.
|
197
202
|
|
@@ -201,7 +206,7 @@ class TeddyCloudClient:
|
|
201
206
|
response = self._make_request('GET', '/api/getTagIndex')
|
202
207
|
return response.json()
|
203
208
|
|
204
|
-
def get_file_index(self):
|
209
|
+
def get_file_index(self) -> dict:
|
205
210
|
"""
|
206
211
|
Get file index data from the TeddyCloud server.
|
207
212
|
|
@@ -211,7 +216,7 @@ class TeddyCloudClient:
|
|
211
216
|
response = self._make_request('GET', '/api/fileIndex')
|
212
217
|
return response.json()
|
213
218
|
|
214
|
-
def get_file_index_v2(self):
|
219
|
+
def get_file_index_v2(self) -> dict:
|
215
220
|
"""
|
216
221
|
Get version 2 file index data from the TeddyCloud server.
|
217
222
|
|
@@ -221,7 +226,7 @@ class TeddyCloudClient:
|
|
221
226
|
response = self._make_request('GET', '/api/fileIndexV2')
|
222
227
|
return response.json()
|
223
228
|
|
224
|
-
def get_tonieboxes_json(self):
|
229
|
+
def get_tonieboxes_json(self) -> dict:
|
225
230
|
"""
|
226
231
|
Get Tonieboxes JSON data from the TeddyCloud server.
|
227
232
|
|
@@ -233,15 +238,14 @@ class TeddyCloudClient:
|
|
233
238
|
|
234
239
|
# ------------- POST API Methods -------------
|
235
240
|
|
236
|
-
def create_directory(self, path, overlay=None, special=None):
|
241
|
+
def create_directory(self, path: str, overlay: str = None, special: str = None) -> str:
|
237
242
|
"""
|
238
243
|
Create a directory on the TeddyCloud server.
|
239
244
|
|
240
245
|
Args:
|
241
|
-
path: Directory path to create
|
242
|
-
overlay: Settings overlay ID (optional)
|
243
|
-
special: Special folder source, only 'library' supported yet (optional)
|
244
|
-
|
246
|
+
path (str): Directory path to create
|
247
|
+
overlay (str | None): Settings overlay ID (optional)
|
248
|
+
special (str | None): Special folder source, only 'library' supported yet (optional)
|
245
249
|
Returns:
|
246
250
|
str: Response message from server (usually "OK")
|
247
251
|
"""
|
@@ -254,15 +258,14 @@ class TeddyCloudClient:
|
|
254
258
|
response = self._make_request('POST', '/api/dirCreate', params=params, data=path)
|
255
259
|
return response.text
|
256
260
|
|
257
|
-
def delete_directory(self, path, overlay=None, special=None):
|
261
|
+
def delete_directory(self, path: str, overlay: str = None, special: str = None) -> str:
|
258
262
|
"""
|
259
263
|
Delete a directory from the TeddyCloud server.
|
260
264
|
|
261
265
|
Args:
|
262
|
-
path: Directory path to delete
|
263
|
-
overlay: Settings overlay ID (optional)
|
264
|
-
special: Special folder source, only 'library' supported yet (optional)
|
265
|
-
|
266
|
+
path (str): Directory path to delete
|
267
|
+
overlay (str | None): Settings overlay ID (optional)
|
268
|
+
special (str | None): Special folder source, only 'library' supported yet (optional)
|
266
269
|
Returns:
|
267
270
|
str: Response message from server (usually "OK")
|
268
271
|
"""
|
@@ -275,15 +278,14 @@ class TeddyCloudClient:
|
|
275
278
|
response = self._make_request('POST', '/api/dirDelete', params=params, data=path)
|
276
279
|
return response.text
|
277
280
|
|
278
|
-
def delete_file(self, path, overlay=None, special=None):
|
281
|
+
def delete_file(self, path: str, overlay: str = None, special: str = None) -> str:
|
279
282
|
"""
|
280
283
|
Delete a file from the TeddyCloud server.
|
281
284
|
|
282
285
|
Args:
|
283
|
-
path: File path to delete
|
284
|
-
overlay: Settings overlay ID (optional)
|
285
|
-
special: Special folder source, only 'library' supported yet (optional)
|
286
|
-
|
286
|
+
path (str): File path to delete
|
287
|
+
overlay (str | None): Settings overlay ID (optional)
|
288
|
+
special (str | None): Special folder source, only 'library' supported yet (optional)
|
287
289
|
Returns:
|
288
290
|
str: Response message from server (usually "OK")
|
289
291
|
"""
|
@@ -296,16 +298,15 @@ class TeddyCloudClient:
|
|
296
298
|
response = self._make_request('POST', '/api/fileDelete', params=params, data=path)
|
297
299
|
return response.text
|
298
300
|
|
299
|
-
def upload_file(self, file_path, destination_path=None, overlay=None, special=None):
|
301
|
+
def upload_file(self, file_path: str, destination_path: str = None, overlay: str = None, special: str = None) -> dict:
|
300
302
|
"""
|
301
303
|
Upload a file to the TeddyCloud server.
|
302
304
|
|
303
305
|
Args:
|
304
|
-
file_path: Local path to the file to upload
|
305
|
-
destination_path: Server path where to write the file to (optional)
|
306
|
-
overlay: Settings overlay ID (optional)
|
307
|
-
special: Special folder source, only 'library' supported yet (optional)
|
308
|
-
|
306
|
+
file_path (str): Local path to the file to upload
|
307
|
+
destination_path (str | None): Server path where to write the file to (optional)
|
308
|
+
overlay (str | None): Settings overlay ID (optional)
|
309
|
+
special (str | None): Special folder source, only 'library' supported yet (optional)
|
309
310
|
Returns:
|
310
311
|
dict: JSON response from server
|
311
312
|
"""
|
@@ -334,3 +335,115 @@ class TeddyCloudClient:
|
|
334
335
|
}
|
335
336
|
|
336
337
|
# ------------- Custom API Methods -------------
|
338
|
+
|
339
|
+
def _get_paths_cache_file(self) -> str:
|
340
|
+
"""
|
341
|
+
Get the path to the paths cache file.
|
342
|
+
|
343
|
+
Returns:
|
344
|
+
str: Path to the paths cache file
|
345
|
+
"""
|
346
|
+
cache_dir = os.path.join(os.path.expanduser("~"), ".tonietoolbox")
|
347
|
+
os.makedirs(cache_dir, exist_ok=True)
|
348
|
+
return os.path.join(cache_dir, "paths.json")
|
349
|
+
|
350
|
+
def _load_paths_cache(self) -> set:
|
351
|
+
"""
|
352
|
+
Load the paths cache from the cache file.
|
353
|
+
|
354
|
+
Returns:
|
355
|
+
set: Set of existing directory paths
|
356
|
+
"""
|
357
|
+
cache_file = self._get_paths_cache_file()
|
358
|
+
try:
|
359
|
+
if os.path.exists(cache_file):
|
360
|
+
with open(cache_file, 'r', encoding='utf-8') as f:
|
361
|
+
paths_data = json.load(f)
|
362
|
+
# Convert to set for faster lookups
|
363
|
+
return set(paths_data.get('paths', []))
|
364
|
+
return set()
|
365
|
+
except Exception as e:
|
366
|
+
logger.warning(f"Failed to load paths cache: {e}")
|
367
|
+
return set()
|
368
|
+
|
369
|
+
def _save_paths_cache(self, paths: set) -> None:
|
370
|
+
"""
|
371
|
+
Save the paths cache to the cache file.
|
372
|
+
|
373
|
+
Args:
|
374
|
+
paths (set): Set of directory paths to save
|
375
|
+
"""
|
376
|
+
cache_file = self._get_paths_cache_file()
|
377
|
+
try:
|
378
|
+
paths_data = {'paths': list(paths)}
|
379
|
+
with open(cache_file, 'w', encoding='utf-8') as f:
|
380
|
+
json.dump(paths_data, f, indent=2)
|
381
|
+
logger.debug(f"Saved {len(paths)} paths to cache file")
|
382
|
+
except Exception as e:
|
383
|
+
logger.warning(f"Failed to save paths cache: {e}")
|
384
|
+
|
385
|
+
def create_directories_recursive(self, path: str, overlay: str = None, special: str = "library") -> str:
|
386
|
+
"""
|
387
|
+
Create directories recursively on the TeddyCloud server.
|
388
|
+
|
389
|
+
This function handles both cases:
|
390
|
+
- Directories that already exist (prevents 500 errors)
|
391
|
+
- Parent directories that don't exist yet (creates them first)
|
392
|
+
|
393
|
+
This optimized version uses a local paths cache instead of querying the file index,
|
394
|
+
since the file index might not represent the correct folders.
|
395
|
+
|
396
|
+
Args:
|
397
|
+
path (str): Directory path to create (can contain multiple levels)
|
398
|
+
overlay (str | None): Settings overlay ID (optional)
|
399
|
+
special (str | None): Special folder source, only 'library' supported yet (optional)
|
400
|
+
|
401
|
+
Returns:
|
402
|
+
str: Response message from server
|
403
|
+
"""
|
404
|
+
path = path.replace('\\', '/').strip('/')
|
405
|
+
if not path:
|
406
|
+
return "Path is empty"
|
407
|
+
existing_dirs = self._load_paths_cache()
|
408
|
+
logger.debug(f"Loaded {len(existing_dirs)} existing paths from cache")
|
409
|
+
path_components = path.split('/')
|
410
|
+
current_path = ""
|
411
|
+
result = "OK"
|
412
|
+
paths_updated = False
|
413
|
+
for component in path_components:
|
414
|
+
if current_path:
|
415
|
+
current_path += f"/{component}"
|
416
|
+
else:
|
417
|
+
current_path = component
|
418
|
+
if current_path in existing_dirs:
|
419
|
+
logger.debug(f"Directory '{current_path}' exists in paths cache, skipping creation")
|
420
|
+
continue
|
421
|
+
|
422
|
+
try:
|
423
|
+
result = self.create_directory(current_path, overlay, special)
|
424
|
+
logger.debug(f"Created directory: {current_path}")
|
425
|
+
# Add the newly created directory to our cache
|
426
|
+
existing_dirs.add(current_path)
|
427
|
+
paths_updated = True
|
428
|
+
except requests.exceptions.HTTPError as e:
|
429
|
+
# If it's a 500 error, likely the directory already exists
|
430
|
+
if e.response.status_code == 500:
|
431
|
+
if "already exists" in e.response.text.lower():
|
432
|
+
logger.debug(f"Directory '{current_path}' already exists, continuing")
|
433
|
+
# Add to our cache for future operations
|
434
|
+
existing_dirs.add(current_path)
|
435
|
+
paths_updated = True
|
436
|
+
else:
|
437
|
+
# Log the actual error message but continue anyway
|
438
|
+
# This allows us to continue even if the error is something else
|
439
|
+
logger.warning(f"Warning while creating '{current_path}': {str(e)}")
|
440
|
+
else:
|
441
|
+
# Re-raise for other HTTP errors
|
442
|
+
logger.error(f"Failed to create directory '{current_path}': {str(e)}")
|
443
|
+
raise
|
444
|
+
|
445
|
+
# Save updated paths cache if any changes were made
|
446
|
+
if paths_updated:
|
447
|
+
self._save_paths_cache(existing_dirs)
|
448
|
+
|
449
|
+
return result
|
TonieToolbox/tonie_analysis.py
CHANGED
@@ -1,3 +1,4 @@
|
|
1
|
+
#!/usr/bin/python3
|
1
2
|
"""
|
2
3
|
Functions for analyzing Tonie files
|
3
4
|
"""
|
@@ -10,13 +11,14 @@ from . import tonie_header_pb2
|
|
10
11
|
from .ogg_page import OggPage
|
11
12
|
from .logger import get_logger
|
12
13
|
|
13
|
-
logger = get_logger(
|
14
|
-
|
14
|
+
logger = get_logger(__name__)
|
15
|
+
|
16
|
+
def format_time(ts: float) -> str:
|
15
17
|
"""
|
16
18
|
Format a timestamp as a human-readable date and time string.
|
17
19
|
|
18
20
|
Args:
|
19
|
-
ts: Timestamp to format
|
21
|
+
ts (float): Timestamp to format
|
20
22
|
|
21
23
|
Returns:
|
22
24
|
str: Formatted date and time string
|
@@ -24,12 +26,12 @@ def format_time(ts):
|
|
24
26
|
return datetime.datetime.fromtimestamp(ts, datetime.timezone.utc).strftime('%Y-%m-%d %H:%M:%S')
|
25
27
|
|
26
28
|
|
27
|
-
def format_hex(data):
|
29
|
+
def format_hex(data: bytes) -> str:
|
28
30
|
"""
|
29
31
|
Format binary data as a hex string.
|
30
32
|
|
31
33
|
Args:
|
32
|
-
data: Binary data to format
|
34
|
+
data (bytes): Binary data to format
|
33
35
|
|
34
36
|
Returns:
|
35
37
|
str: Formatted hex string
|
@@ -37,13 +39,13 @@ def format_hex(data):
|
|
37
39
|
return "".join(format(x, "02X") for x in data)
|
38
40
|
|
39
41
|
|
40
|
-
def granule_to_time_string(granule, sample_rate=1):
|
42
|
+
def granule_to_time_string(granule: int, sample_rate: int = 1) -> str:
|
41
43
|
"""
|
42
44
|
Convert a granule position to a time string.
|
43
45
|
|
44
46
|
Args:
|
45
|
-
granule: Granule position
|
46
|
-
sample_rate: Sample rate in Hz
|
47
|
+
granule (int): Granule position
|
48
|
+
sample_rate (int): Sample rate in Hz
|
47
49
|
|
48
50
|
Returns:
|
49
51
|
str: Formatted time string (HH:MM:SS.FF)
|
@@ -56,7 +58,7 @@ def granule_to_time_string(granule, sample_rate=1):
|
|
56
58
|
return "{:02d}:{:02d}:{:02d}.{:02d}".format(hours, minutes, seconds, fraction)
|
57
59
|
|
58
60
|
|
59
|
-
def get_header_info(in_file):
|
61
|
+
def get_header_info(in_file) -> tuple:
|
60
62
|
"""
|
61
63
|
Get header information from a Tonie file.
|
62
64
|
|
@@ -164,15 +166,15 @@ def get_header_info(in_file):
|
|
164
166
|
)
|
165
167
|
|
166
168
|
|
167
|
-
def get_audio_info(in_file, sample_rate, tonie_header, header_size):
|
169
|
+
def get_audio_info(in_file, sample_rate: int, tonie_header, header_size: int) -> tuple:
|
168
170
|
"""
|
169
171
|
Get audio information from a Tonie file.
|
170
172
|
|
171
173
|
Args:
|
172
174
|
in_file: Input file handle
|
173
|
-
sample_rate: Sample rate in Hz
|
175
|
+
sample_rate (int): Sample rate in Hz
|
174
176
|
tonie_header: Tonie header object
|
175
|
-
header_size: Header size in bytes
|
177
|
+
header_size (int): Header size in bytes
|
176
178
|
|
177
179
|
Returns:
|
178
180
|
tuple: Page count, alignment OK flag, page size OK flag, total time, chapter times
|
@@ -228,12 +230,12 @@ def get_audio_info(in_file, sample_rate, tonie_header, header_size):
|
|
228
230
|
return page_count, alignment_okay, page_size_okay, total_time, chapter_times
|
229
231
|
|
230
232
|
|
231
|
-
def check_tonie_file(filename):
|
233
|
+
def check_tonie_file(filename: str) -> bool:
|
232
234
|
"""
|
233
235
|
Check if a file is a valid Tonie file and display information about it.
|
234
236
|
|
235
237
|
Args:
|
236
|
-
filename: Path to the file to check
|
238
|
+
filename (str): Path to the file to check
|
237
239
|
|
238
240
|
Returns:
|
239
241
|
bool: True if the file is valid, False otherwise
|
@@ -315,13 +317,13 @@ def check_tonie_file(filename):
|
|
315
317
|
return all_ok
|
316
318
|
|
317
319
|
|
318
|
-
def split_to_opus_files(filename, output=None):
|
320
|
+
def split_to_opus_files(filename: str, output: str = None) -> None:
|
319
321
|
"""
|
320
322
|
Split a Tonie file into individual Opus files.
|
321
323
|
|
322
324
|
Args:
|
323
|
-
filename: Path to the Tonie file
|
324
|
-
output: Output directory path (optional)
|
325
|
+
filename (str): Path to the Tonie file
|
326
|
+
output (str | None): Output directory path (optional)
|
325
327
|
"""
|
326
328
|
logger.info("Splitting Tonie file into individual Opus tracks: %s", filename)
|
327
329
|
|
@@ -412,14 +414,14 @@ def split_to_opus_files(filename, output=None):
|
|
412
414
|
logger.info("Successfully split Tonie file into %d individual tracks", len(tonie_header.chapterPages))
|
413
415
|
|
414
416
|
|
415
|
-
def compare_taf_files(file1, file2, detailed=False):
|
417
|
+
def compare_taf_files(file1: str, file2: str, detailed: bool = False) -> bool:
|
416
418
|
"""
|
417
419
|
Compare two .taf files for debugging purposes.
|
418
420
|
|
419
421
|
Args:
|
420
|
-
file1: Path to the first .taf file
|
421
|
-
file2: Path to the second .taf file
|
422
|
-
detailed: Whether to show detailed comparison results
|
422
|
+
file1 (str): Path to the first .taf file
|
423
|
+
file2 (str): Path to the second .taf file
|
424
|
+
detailed (bool): Whether to show detailed comparison results
|
423
425
|
|
424
426
|
Returns:
|
425
427
|
bool: True if files are equivalent, False otherwise
|
@@ -572,7 +574,7 @@ def compare_taf_files(file1, file2, detailed=False):
|
|
572
574
|
logger.info("Files comparison result: Equivalent")
|
573
575
|
return True
|
574
576
|
|
575
|
-
def get_header_info_cli(in_file):
|
577
|
+
def get_header_info_cli(in_file) -> tuple:
|
576
578
|
"""
|
577
579
|
Get header information from a Tonie file.
|
578
580
|
|
@@ -687,12 +689,12 @@ def get_header_info_cli(in_file):
|
|
687
689
|
return (0, tonie_header_pb2.TonieHeader(), 0, 0, None, False, 0, 0, 0, 0, {}, False)
|
688
690
|
|
689
691
|
|
690
|
-
def check_tonie_file_cli(filename):
|
692
|
+
def check_tonie_file_cli(filename: str) -> bool:
|
691
693
|
"""
|
692
694
|
Check if a file is a valid Tonie file
|
693
695
|
|
694
696
|
Args:
|
695
|
-
filename: Path to the file to check
|
697
|
+
filename (str): Path to the file to check
|
696
698
|
|
697
699
|
Returns:
|
698
700
|
bool: True if the file is valid, False otherwise
|