audex 1.0.7a3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (192) hide show
  1. audex/__init__.py +9 -0
  2. audex/__main__.py +7 -0
  3. audex/cli/__init__.py +189 -0
  4. audex/cli/apis/__init__.py +12 -0
  5. audex/cli/apis/init/__init__.py +34 -0
  6. audex/cli/apis/init/gencfg.py +130 -0
  7. audex/cli/apis/init/setup.py +330 -0
  8. audex/cli/apis/init/vprgroup.py +125 -0
  9. audex/cli/apis/serve.py +141 -0
  10. audex/cli/args.py +356 -0
  11. audex/cli/exceptions.py +44 -0
  12. audex/cli/helper/__init__.py +0 -0
  13. audex/cli/helper/ansi.py +193 -0
  14. audex/cli/helper/display.py +288 -0
  15. audex/config/__init__.py +64 -0
  16. audex/config/core/__init__.py +30 -0
  17. audex/config/core/app.py +29 -0
  18. audex/config/core/audio.py +45 -0
  19. audex/config/core/logging.py +163 -0
  20. audex/config/core/session.py +11 -0
  21. audex/config/helper/__init__.py +1 -0
  22. audex/config/helper/client/__init__.py +1 -0
  23. audex/config/helper/client/http.py +28 -0
  24. audex/config/helper/client/websocket.py +21 -0
  25. audex/config/helper/provider/__init__.py +1 -0
  26. audex/config/helper/provider/dashscope.py +13 -0
  27. audex/config/helper/provider/unisound.py +18 -0
  28. audex/config/helper/provider/xfyun.py +23 -0
  29. audex/config/infrastructure/__init__.py +31 -0
  30. audex/config/infrastructure/cache.py +51 -0
  31. audex/config/infrastructure/database.py +48 -0
  32. audex/config/infrastructure/recorder.py +32 -0
  33. audex/config/infrastructure/store.py +19 -0
  34. audex/config/provider/__init__.py +18 -0
  35. audex/config/provider/transcription.py +109 -0
  36. audex/config/provider/vpr.py +99 -0
  37. audex/container.py +40 -0
  38. audex/entity/__init__.py +468 -0
  39. audex/entity/doctor.py +109 -0
  40. audex/entity/doctor.pyi +51 -0
  41. audex/entity/fields.py +401 -0
  42. audex/entity/segment.py +115 -0
  43. audex/entity/segment.pyi +38 -0
  44. audex/entity/session.py +133 -0
  45. audex/entity/session.pyi +47 -0
  46. audex/entity/utterance.py +142 -0
  47. audex/entity/utterance.pyi +48 -0
  48. audex/entity/vp.py +68 -0
  49. audex/entity/vp.pyi +35 -0
  50. audex/exceptions.py +157 -0
  51. audex/filters/__init__.py +692 -0
  52. audex/filters/generated/__init__.py +21 -0
  53. audex/filters/generated/doctor.py +987 -0
  54. audex/filters/generated/segment.py +723 -0
  55. audex/filters/generated/session.py +978 -0
  56. audex/filters/generated/utterance.py +939 -0
  57. audex/filters/generated/vp.py +815 -0
  58. audex/helper/__init__.py +1 -0
  59. audex/helper/hash.py +33 -0
  60. audex/helper/mixin.py +65 -0
  61. audex/helper/net.py +19 -0
  62. audex/helper/settings/__init__.py +830 -0
  63. audex/helper/settings/fields.py +317 -0
  64. audex/helper/stream.py +153 -0
  65. audex/injectors/__init__.py +1 -0
  66. audex/injectors/config.py +12 -0
  67. audex/injectors/lifespan.py +7 -0
  68. audex/lib/__init__.py +1 -0
  69. audex/lib/cache/__init__.py +383 -0
  70. audex/lib/cache/inmemory.py +513 -0
  71. audex/lib/database/__init__.py +83 -0
  72. audex/lib/database/sqlite.py +406 -0
  73. audex/lib/exporter.py +189 -0
  74. audex/lib/injectors/__init__.py +1 -0
  75. audex/lib/injectors/cache.py +25 -0
  76. audex/lib/injectors/container.py +47 -0
  77. audex/lib/injectors/exporter.py +26 -0
  78. audex/lib/injectors/recorder.py +33 -0
  79. audex/lib/injectors/server.py +17 -0
  80. audex/lib/injectors/session.py +18 -0
  81. audex/lib/injectors/sqlite.py +24 -0
  82. audex/lib/injectors/store.py +13 -0
  83. audex/lib/injectors/transcription.py +42 -0
  84. audex/lib/injectors/usb.py +12 -0
  85. audex/lib/injectors/vpr.py +65 -0
  86. audex/lib/injectors/wifi.py +7 -0
  87. audex/lib/recorder.py +844 -0
  88. audex/lib/repos/__init__.py +149 -0
  89. audex/lib/repos/container.py +23 -0
  90. audex/lib/repos/database/__init__.py +1 -0
  91. audex/lib/repos/database/sqlite.py +672 -0
  92. audex/lib/repos/decorators.py +74 -0
  93. audex/lib/repos/doctor.py +286 -0
  94. audex/lib/repos/segment.py +302 -0
  95. audex/lib/repos/session.py +285 -0
  96. audex/lib/repos/tables/__init__.py +70 -0
  97. audex/lib/repos/tables/doctor.py +137 -0
  98. audex/lib/repos/tables/segment.py +113 -0
  99. audex/lib/repos/tables/session.py +140 -0
  100. audex/lib/repos/tables/utterance.py +131 -0
  101. audex/lib/repos/tables/vp.py +102 -0
  102. audex/lib/repos/utterance.py +288 -0
  103. audex/lib/repos/vp.py +286 -0
  104. audex/lib/restful.py +251 -0
  105. audex/lib/server/__init__.py +97 -0
  106. audex/lib/server/auth.py +98 -0
  107. audex/lib/server/handlers.py +248 -0
  108. audex/lib/server/templates/index.html.j2 +226 -0
  109. audex/lib/server/templates/login.html.j2 +111 -0
  110. audex/lib/server/templates/static/script.js +68 -0
  111. audex/lib/server/templates/static/style.css +579 -0
  112. audex/lib/server/types.py +123 -0
  113. audex/lib/session.py +503 -0
  114. audex/lib/store/__init__.py +238 -0
  115. audex/lib/store/localfile.py +411 -0
  116. audex/lib/transcription/__init__.py +33 -0
  117. audex/lib/transcription/dashscope.py +525 -0
  118. audex/lib/transcription/events.py +62 -0
  119. audex/lib/usb.py +554 -0
  120. audex/lib/vpr/__init__.py +38 -0
  121. audex/lib/vpr/unisound/__init__.py +185 -0
  122. audex/lib/vpr/unisound/types.py +469 -0
  123. audex/lib/vpr/xfyun/__init__.py +483 -0
  124. audex/lib/vpr/xfyun/types.py +679 -0
  125. audex/lib/websocket/__init__.py +8 -0
  126. audex/lib/websocket/connection.py +485 -0
  127. audex/lib/websocket/pool.py +991 -0
  128. audex/lib/wifi.py +1146 -0
  129. audex/lifespan.py +75 -0
  130. audex/service/__init__.py +27 -0
  131. audex/service/decorators.py +73 -0
  132. audex/service/doctor/__init__.py +652 -0
  133. audex/service/doctor/const.py +36 -0
  134. audex/service/doctor/exceptions.py +96 -0
  135. audex/service/doctor/types.py +54 -0
  136. audex/service/export/__init__.py +236 -0
  137. audex/service/export/const.py +17 -0
  138. audex/service/export/exceptions.py +34 -0
  139. audex/service/export/types.py +21 -0
  140. audex/service/injectors/__init__.py +1 -0
  141. audex/service/injectors/container.py +53 -0
  142. audex/service/injectors/doctor.py +34 -0
  143. audex/service/injectors/export.py +27 -0
  144. audex/service/injectors/session.py +49 -0
  145. audex/service/session/__init__.py +754 -0
  146. audex/service/session/const.py +34 -0
  147. audex/service/session/exceptions.py +67 -0
  148. audex/service/session/types.py +91 -0
  149. audex/types.py +39 -0
  150. audex/utils.py +287 -0
  151. audex/valueobj/__init__.py +81 -0
  152. audex/valueobj/common/__init__.py +1 -0
  153. audex/valueobj/common/auth.py +84 -0
  154. audex/valueobj/common/email.py +16 -0
  155. audex/valueobj/common/ops.py +22 -0
  156. audex/valueobj/common/phone.py +84 -0
  157. audex/valueobj/common/version.py +72 -0
  158. audex/valueobj/session.py +19 -0
  159. audex/valueobj/utterance.py +15 -0
  160. audex/view/__init__.py +51 -0
  161. audex/view/container.py +17 -0
  162. audex/view/decorators.py +303 -0
  163. audex/view/pages/__init__.py +1 -0
  164. audex/view/pages/dashboard/__init__.py +286 -0
  165. audex/view/pages/dashboard/wifi.py +407 -0
  166. audex/view/pages/login.py +110 -0
  167. audex/view/pages/recording.py +348 -0
  168. audex/view/pages/register.py +202 -0
  169. audex/view/pages/sessions/__init__.py +196 -0
  170. audex/view/pages/sessions/details.py +224 -0
  171. audex/view/pages/sessions/export.py +443 -0
  172. audex/view/pages/settings.py +374 -0
  173. audex/view/pages/voiceprint/__init__.py +1 -0
  174. audex/view/pages/voiceprint/enroll.py +195 -0
  175. audex/view/pages/voiceprint/update.py +195 -0
  176. audex/view/static/css/dashboard.css +452 -0
  177. audex/view/static/css/glass.css +22 -0
  178. audex/view/static/css/global.css +541 -0
  179. audex/view/static/css/login.css +386 -0
  180. audex/view/static/css/recording.css +439 -0
  181. audex/view/static/css/register.css +293 -0
  182. audex/view/static/css/sessions/styles.css +501 -0
  183. audex/view/static/css/settings.css +186 -0
  184. audex/view/static/css/voiceprint/enroll.css +43 -0
  185. audex/view/static/css/voiceprint/styles.css +209 -0
  186. audex/view/static/css/voiceprint/update.css +44 -0
  187. audex/view/static/images/logo.svg +95 -0
  188. audex/view/static/js/recording.js +42 -0
  189. audex-1.0.7a3.dist-info/METADATA +361 -0
  190. audex-1.0.7a3.dist-info/RECORD +192 -0
  191. audex-1.0.7a3.dist-info/WHEEL +4 -0
  192. audex-1.0.7a3.dist-info/entry_points.txt +3 -0
@@ -0,0 +1,238 @@
1
+ from __future__ import annotations
2
+
3
+ import abc
4
+ import builtins
5
+ import typing as t
6
+
7
+ from audex import __title__
8
+ from audex.helper.mixin import LoggingMixin
9
+
10
+
11
+ class KeyBuilder:
12
+ """Utility class for building store keys with a consistent format.
13
+
14
+ Attributes:
15
+ split_char: Character used to split parts of the key.
16
+ prefix: Prefix to prepend to all keys.
17
+ """
18
+
19
+ __slots__ = ("prefix", "split_char")
20
+
21
+ def __init__(self, split_char: str = "/", prefix: str = __title__) -> None:
22
+ self.split_char = split_char
23
+ self.prefix = prefix
24
+
25
+ def build(self, *parts: str) -> str:
26
+ """Build a store key by joining the prefix and parts.
27
+
28
+ Args:
29
+ *parts: Parts to include in the key.
30
+
31
+ Returns:
32
+ The constructed store key.
33
+ """
34
+ return self.split_char.join((self.prefix, *parts))
35
+
36
+ def validate(self, key: str) -> bool:
37
+ """Validate if a given key starts with the defined prefix.
38
+
39
+ Args:
40
+ key: The store key to validate.
41
+
42
+ Returns:
43
+ True if the key starts with the prefix, False otherwise.
44
+ """
45
+ return key.startswith(self.prefix + self.split_char)
46
+
47
+
48
+ class Store(LoggingMixin, abc.ABC):
49
+ """Abstract base class for storage operations.
50
+
51
+ This class defines the interface for storage backends, providing
52
+ methods for uploading, downloading, deleting, and managing stored
53
+ objects.
54
+ """
55
+
56
+ @property
57
+ @abc.abstractmethod
58
+ def key_builder(self) -> KeyBuilder:
59
+ """Get a KeyBuilder instance for constructing store keys.
60
+
61
+ Returns:
62
+ An instance of KeyBuilder.
63
+ """
64
+
65
+ @abc.abstractmethod
66
+ async def upload(
67
+ self,
68
+ data: bytes | t.IO[bytes],
69
+ key: str,
70
+ metadata: t.Mapping[str, t.Any] | None = None,
71
+ **kwargs: t.Any,
72
+ ) -> str:
73
+ """Upload data to storage.
74
+
75
+ Args:
76
+ data: The data to upload, either as bytes or a file-like object.
77
+ key: The unique identifier for the stored object.
78
+ metadata: Optional metadata to associate with the object.
79
+ **kwargs: Additional storage-specific parameters.
80
+
81
+ Returns:
82
+ The key of the uploaded object.
83
+
84
+ Raises:
85
+ Exception: If the upload fails.
86
+ """
87
+
88
+ @abc.abstractmethod
89
+ async def upload_multipart(
90
+ self,
91
+ parts: t.AsyncIterable[bytes],
92
+ key: str,
93
+ metadata: t.Mapping[str, t.Any] | None = None,
94
+ **kwargs: t.Any,
95
+ ) -> str:
96
+ """Upload data in multiple parts.
97
+
98
+ Args:
99
+ parts: An async iterable of byte chunks to upload.
100
+ key: The unique identifier for the stored object.
101
+ metadata: Optional metadata to associate with the object.
102
+ **kwargs: Additional storage-specific parameters.
103
+
104
+ Returns:
105
+ The key of the uploaded object.
106
+
107
+ Raises:
108
+ Exception: If the multipart upload fails.
109
+ """
110
+
111
+ @abc.abstractmethod
112
+ async def get_metadata(self, key: str) -> builtins.dict[str, t.Any]:
113
+ """Retrieve metadata for a stored object.
114
+
115
+ Args:
116
+ key: The unique identifier of the object.
117
+
118
+ Returns:
119
+ A dictionary containing the object's metadata.
120
+
121
+ Raises:
122
+ Exception: If the object doesn't exist or metadata retrieval fails.
123
+ """
124
+
125
+ @t.overload
126
+ async def download(
127
+ self,
128
+ key: str,
129
+ *,
130
+ stream: t.Literal[False] = False,
131
+ chunk_size: int = 8192,
132
+ **kwargs: t.Any,
133
+ ) -> bytes: ...
134
+ @t.overload
135
+ async def download(
136
+ self,
137
+ key: str,
138
+ *,
139
+ stream: t.Literal[True],
140
+ chunk_size: int = 8192,
141
+ **kwargs: t.Any,
142
+ ) -> bytes: ...
143
+ @abc.abstractmethod
144
+ async def download(
145
+ self,
146
+ key: str,
147
+ *,
148
+ stream: bool = False,
149
+ chunk_size: int = 8192,
150
+ **kwargs: t.Any,
151
+ ) -> bytes | t.AsyncIterable[bytes]:
152
+ """Download data from storage.
153
+
154
+ Args:
155
+ key: The unique identifier of the object to download.
156
+ stream: If True, return an async iterable of chunks; otherwise return all bytes.
157
+ chunk_size: Size of each chunk when streaming (in bytes).
158
+ **kwargs: Additional storage-specific parameters.
159
+
160
+ Returns:
161
+ The object's data as bytes, or an async iterable of byte chunks if streaming.
162
+
163
+ Raises:
164
+ Exception: If the download fails or object doesn't exist.
165
+ """
166
+
167
+ @abc.abstractmethod
168
+ async def delete(self, key: str) -> None:
169
+ """Delete an object from storage.
170
+
171
+ Args:
172
+ key: The unique identifier of the object to delete.
173
+
174
+ Raises:
175
+ Exception: If the deletion fails.
176
+ """
177
+
178
+ @abc.abstractmethod
179
+ def list(
180
+ self,
181
+ prefix: str = "",
182
+ page_size: int = 10,
183
+ **kwargs: t.Any,
184
+ ) -> t.AsyncIterable[builtins.list[str]]:
185
+ """List objects in storage with the given prefix.
186
+
187
+ Args:
188
+ prefix: Optional prefix to filter objects.
189
+ page_size: Number of object keys to return per iteration.
190
+ **kwargs: Additional storage-specific parameters.
191
+
192
+ Yields:
193
+ Lists of object keys matching the prefix.
194
+
195
+ Raises:
196
+ Exception: If listing fails.
197
+ """
198
+
199
+ @abc.abstractmethod
200
+ async def exists(self, key: str) -> bool:
201
+ """Check if an object exists in storage.
202
+
203
+ Args:
204
+ key: The unique identifier of the object.
205
+
206
+ Returns:
207
+ True if the object exists, False otherwise.
208
+
209
+ Raises:
210
+ Exception: If the existence check fails.
211
+ """
212
+
213
+ @abc.abstractmethod
214
+ async def clear(self, prefix: str = "") -> None:
215
+ """Delete all objects with the given prefix.
216
+
217
+ Args:
218
+ prefix: Optional prefix to filter objects for deletion.
219
+
220
+ Raises:
221
+ Exception: If the clear operation fails.
222
+ """
223
+
224
+ @abc.abstractmethod
225
+ async def copy(self, source_key: str, dest_key: str, **kwargs: t.Any) -> str:
226
+ """Copy an object to a new location.
227
+
228
+ Args:
229
+ source_key: The unique identifier of the source object.
230
+ dest_key: The unique identifier for the destination object.
231
+ **kwargs: Additional storage-specific parameters.
232
+
233
+ Returns:
234
+ The key of the copied object.
235
+
236
+ Raises:
237
+ Exception: If the copy operation fails.
238
+ """
@@ -0,0 +1,411 @@
1
+ from __future__ import annotations
2
+
3
+ import builtins
4
+ import json
5
+ import pathlib
6
+ import typing as t
7
+
8
+ import aiofiles
9
+ import aiofiles.os
10
+
11
+ from audex.lib.store import KeyBuilder
12
+ from audex.lib.store import Store
13
+
14
+
15
+ class LocalFileStore(Store):
16
+ """File-based storage implementation using local filesystem.
17
+
18
+ Args:
19
+ base_path: Base directory path for storing files
20
+ """
21
+
22
+ __logtag__ = "audex.lib.store.localfile"
23
+
24
+ METADATA_SUFFIX: t.ClassVar[str] = ".metadata.json"
25
+ DEFAULT_CHUNK_SIZE: t.ClassVar[int] = 8192
26
+
27
+ def __init__(self, base_path: str | pathlib.Path):
28
+ super().__init__()
29
+ self.base_path = pathlib.Path(base_path).resolve()
30
+ self.base_path.mkdir(parents=True, exist_ok=True)
31
+ self._key_builder = KeyBuilder(split_char="/", prefix="")
32
+
33
+ @property
34
+ def key_builder(self) -> KeyBuilder:
35
+ return self._key_builder
36
+
37
+ def fullpath(self, key: str) -> pathlib.Path:
38
+ """Get the full file path for a given key.
39
+
40
+ Args:
41
+ key: File key
42
+
43
+ Returns:
44
+ Full resolved file path
45
+
46
+ Raises:
47
+ ValueError: If the key attempts to escape the base_path
48
+ """
49
+ # Remove leading slashes to prevent path injection
50
+ key = key.lstrip("/")
51
+ full_path = (self.base_path / key).resolve()
52
+
53
+ # Security check: ensure path is within base_path
54
+ if not str(full_path).startswith(str(self.base_path)):
55
+ raise ValueError(f"Invalid key: {key}")
56
+
57
+ return full_path
58
+
59
+ def metadata_path(self, key: str) -> pathlib.Path:
60
+ """Get the metadata file path for a given key.
61
+
62
+ Args:
63
+ key: File key
64
+
65
+ Returns:
66
+ Metadata file path
67
+ """
68
+ file_path = self.fullpath(key)
69
+ return file_path.parent / (file_path.name + self.METADATA_SUFFIX)
70
+
71
+ async def upload(
72
+ self,
73
+ data: bytes | t.IO[bytes],
74
+ key: str,
75
+ metadata: t.Mapping[str, t.Any] | None = None,
76
+ **_kwargs: t.Any,
77
+ ) -> str:
78
+ """Upload a file.
79
+
80
+ Args:
81
+ data: File data (bytes or file-like object)
82
+ key: File key
83
+ metadata: Additional metadata
84
+ **_kwargs: Additional arguments (unused)
85
+
86
+ Returns:
87
+ File key
88
+
89
+ Raises:
90
+ ValueError: If the key is invalid
91
+ """
92
+ file_path = self.fullpath(key)
93
+
94
+ # Ensure parent directory exists
95
+ file_path.parent.mkdir(parents=True, exist_ok=True)
96
+
97
+ # Write file
98
+ async with aiofiles.open(file_path, "wb") as f:
99
+ if isinstance(data, bytes):
100
+ await f.write(data)
101
+ else:
102
+ # Handle file object
103
+ while True:
104
+ chunk = data.read(self.DEFAULT_CHUNK_SIZE)
105
+ if not chunk:
106
+ break
107
+ await f.write(chunk)
108
+
109
+ # Write metadata if provided
110
+ if metadata:
111
+ metadata_file = self.metadata_path(key)
112
+ async with aiofiles.open(metadata_file, "w", encoding="utf-8") as f:
113
+ await f.write(json.dumps(metadata, ensure_ascii=False, indent=2))
114
+
115
+ return key
116
+
117
+ async def upload_multipart(
118
+ self,
119
+ parts: t.AsyncIterable[bytes],
120
+ key: str,
121
+ metadata: t.Mapping[str, t.Any] | None = None,
122
+ **_kwargs: t.Any,
123
+ ) -> str:
124
+ """Upload a file from multiple parts.
125
+
126
+ Args:
127
+ parts: Async iterable of file data parts
128
+ key: File key
129
+ metadata: Additional metadata
130
+ **_kwargs: Additional arguments (unused)
131
+
132
+ Returns:
133
+ File key
134
+
135
+ Raises:
136
+ ValueError: If the key is invalid
137
+ """
138
+ file_path = self.fullpath(key)
139
+
140
+ # Ensure parent directory exists
141
+ file_path.parent.mkdir(parents=True, exist_ok=True)
142
+
143
+ # Write file parts
144
+ async with aiofiles.open(file_path, "wb") as f:
145
+ async for part in parts:
146
+ await f.write(part)
147
+
148
+ # Write metadata if provided
149
+ if metadata:
150
+ metadata_file = self.metadata_path(key)
151
+ async with aiofiles.open(metadata_file, "w", encoding="utf-8") as f:
152
+ await f.write(json.dumps(metadata, ensure_ascii=False, indent=2))
153
+
154
+ return key
155
+
156
+ async def get_metadata(self, key: str) -> builtins.dict[str, t.Any]:
157
+ """Get metadata for a file.
158
+
159
+ Args:
160
+ key: File key
161
+
162
+ Returns:
163
+ Metadata dictionary (empty if no metadata exists)
164
+
165
+ Raises:
166
+ FileNotFoundError: If the file does not exist
167
+ """
168
+ file_path = self.fullpath(key)
169
+
170
+ if not file_path.exists():
171
+ raise FileNotFoundError(f"File not found: {key}")
172
+
173
+ if not file_path.is_file():
174
+ raise IsADirectoryError(f"Path is not a file: {key}")
175
+
176
+ metadata_file = self.metadata_path(key)
177
+
178
+ if not metadata_file.exists():
179
+ return {}
180
+
181
+ async with aiofiles.open(metadata_file, encoding="utf-8") as f:
182
+ content = await f.read()
183
+ return json.loads(content) # type: ignore
184
+
185
+ @t.overload
186
+ async def download(
187
+ self,
188
+ key: str,
189
+ *,
190
+ stream: t.Literal[False] = False,
191
+ chunk_size: int = 8192,
192
+ **kwargs: t.Any,
193
+ ) -> bytes: ...
194
+ @t.overload
195
+ async def download(
196
+ self,
197
+ key: str,
198
+ *,
199
+ stream: t.Literal[True],
200
+ chunk_size: int = 8192,
201
+ **kwargs: t.Any,
202
+ ) -> bytes: ...
203
+ async def download(
204
+ self,
205
+ key: str,
206
+ *,
207
+ stream: bool = False,
208
+ chunk_size: int = DEFAULT_CHUNK_SIZE,
209
+ **_kwargs: t.Any,
210
+ ) -> bytes | t.AsyncIterable[bytes]:
211
+ """Download a file.
212
+
213
+ Args:
214
+ key: File key
215
+ stream: Whether to return as a stream
216
+ chunk_size: Size of each chunk in bytes (used if stream is True)
217
+ **_kwargs: Additional arguments (unused)
218
+
219
+ Returns:
220
+ File data (bytes or async iterator)
221
+
222
+ Raises:
223
+ FileNotFoundError: If the file does not exist
224
+ IsADirectoryError: If the path is a directory
225
+ """
226
+ file_path = self.fullpath(key)
227
+
228
+ if not file_path.exists():
229
+ raise FileNotFoundError(f"File not found: {key}")
230
+
231
+ if not file_path.is_file():
232
+ raise IsADirectoryError(f"Path is not a file: {key}")
233
+
234
+ if stream:
235
+ return self.stream_file(file_path, chunk_size)
236
+ async with aiofiles.open(file_path, "rb") as f:
237
+ return await f.read()
238
+
239
+ async def stream_file(
240
+ self,
241
+ file_path: pathlib.Path,
242
+ chunk_size: int = DEFAULT_CHUNK_SIZE,
243
+ ) -> t.AsyncIterable[bytes]:
244
+ """Stream a file in chunks.
245
+
246
+ Args:
247
+ file_path: Path to the file
248
+ chunk_size: Size of each chunk in bytes
249
+
250
+ Yields:
251
+ File data chunks
252
+ """
253
+ async with aiofiles.open(file_path, "rb") as f:
254
+ while True:
255
+ chunk = await f.read(chunk_size)
256
+ if not chunk:
257
+ break
258
+ yield chunk
259
+
260
+ async def delete(self, key: str) -> None:
261
+ """Delete a file and its metadata.
262
+
263
+ Args:
264
+ key: File key
265
+
266
+ Raises:
267
+ IsADirectoryError: If the path is a directory
268
+ """
269
+ file_path = self.fullpath(key)
270
+
271
+ if file_path.exists():
272
+ if file_path.is_file():
273
+ await aiofiles.os.remove(file_path)
274
+
275
+ # Delete metadata file if exists
276
+ metadata_file = self.metadata_path(key)
277
+ if metadata_file.exists():
278
+ await aiofiles.os.remove(metadata_file)
279
+ else:
280
+ raise IsADirectoryError(f"Path is not a file: {key}")
281
+
282
+ async def list(
283
+ self,
284
+ prefix: str = "",
285
+ page_size: int = 10,
286
+ **_kwargs: t.Any,
287
+ ) -> t.AsyncIterable[builtins.list[str]]:
288
+ """List files with a given prefix.
289
+
290
+ Args:
291
+ prefix: Key prefix to filter files
292
+ page_size: Number of items per page
293
+ **_kwargs: Additional arguments (unused)
294
+
295
+ Returns:
296
+ List of file keys matching the prefix (excludes metadata files)
297
+ """
298
+ prefix = prefix.lstrip("/")
299
+ search_path = self.base_path / prefix if prefix else self.base_path
300
+
301
+ if not search_path.exists():
302
+ yield []
303
+ else:
304
+ current_page: builtins.list[str] = []
305
+ for item in search_path.rglob("*"):
306
+ if item.is_file() and not item.name.endswith(self.METADATA_SUFFIX):
307
+ relative_path = item.relative_to(self.base_path)
308
+ current_page.append(str(relative_path))
309
+
310
+ if len(current_page) >= page_size:
311
+ yield current_page
312
+ current_page = []
313
+
314
+ if current_page: # Yield any remaining items
315
+ yield current_page
316
+
317
+ async def exists(self, key: str) -> bool:
318
+ """Check if a file exists.
319
+
320
+ Args:
321
+ key: File key
322
+
323
+ Returns:
324
+ True if the file exists, False otherwise
325
+ """
326
+ file_path = self.fullpath(key)
327
+ return file_path.exists() and file_path.is_file()
328
+
329
+ async def clear(self, prefix: str = "") -> None:
330
+ """Clear files with a given prefix.
331
+
332
+ Args:
333
+ prefix: Key prefix to filter files
334
+ """
335
+ prefix = prefix.lstrip("/")
336
+ search_path = self.base_path / prefix if prefix else self.base_path
337
+
338
+ if not search_path.exists():
339
+ return
340
+
341
+ # If search_path is a file, delete it directly
342
+ if search_path.is_file():
343
+ await aiofiles.os.remove(search_path)
344
+ # Delete metadata file if exists
345
+ if not search_path.name.endswith(self.METADATA_SUFFIX):
346
+ metadata_file = search_path.parent / (search_path.name + self.METADATA_SUFFIX)
347
+ if metadata_file.exists():
348
+ await aiofiles.os.remove(metadata_file)
349
+ return
350
+
351
+ # Recursively traverse directory and delete files
352
+ for item in search_path.rglob("*"):
353
+ if item.is_file():
354
+ await aiofiles.os.remove(item)
355
+
356
+ async def copy(self, source_key: str, dest_key: str, **_kwargs: t.Any) -> str:
357
+ """Copy a file and its metadata.
358
+
359
+ Args:
360
+ source_key: Source file key
361
+ dest_key: Destination file key
362
+ **_kwargs: Additional arguments (unused)
363
+
364
+ Returns:
365
+ Destination file key
366
+
367
+ Raises:
368
+ FileNotFoundError: If the source file does not exist
369
+ IsADirectoryError: If the source path is a directory
370
+ """
371
+ source_path = self.fullpath(source_key)
372
+ dest_path = self.fullpath(dest_key)
373
+
374
+ if not source_path.exists():
375
+ raise FileNotFoundError(f"Source file not found: {source_key}")
376
+
377
+ if not source_path.is_file():
378
+ raise IsADirectoryError(f"Source path is not a file: {source_key}")
379
+
380
+ # Ensure parent directory of destination exists
381
+ dest_path.parent.mkdir(parents=True, exist_ok=True)
382
+
383
+ # Copy file
384
+ async with (
385
+ aiofiles.open(source_path, "rb") as src_file,
386
+ aiofiles.open(dest_path, "wb") as dest_file,
387
+ ):
388
+ while True:
389
+ chunk = await src_file.read(self.DEFAULT_CHUNK_SIZE)
390
+ if not chunk:
391
+ break
392
+ await dest_file.write(chunk)
393
+
394
+ # Copy metadata file if exists
395
+ source_metadata = self.metadata_path(source_key)
396
+ if source_metadata.exists():
397
+ dest_metadata = self.metadata_path(dest_key)
398
+ async with (
399
+ aiofiles.open(source_metadata, "rb") as src_meta,
400
+ aiofiles.open(dest_metadata, "wb") as dest_meta,
401
+ ):
402
+ while True:
403
+ chunk = await src_meta.read(self.DEFAULT_CHUNK_SIZE)
404
+ if not chunk:
405
+ break
406
+ await dest_meta.write(chunk)
407
+
408
+ return dest_key
409
+
410
+ def __repr__(self) -> str:
411
+ return f"FILE STORE <{self.__class__.__name__}(base_path={self.base_path})>"
@@ -0,0 +1,33 @@
1
+ from __future__ import annotations
2
+
3
+ import abc
4
+ import typing as t
5
+
6
+ from audex.exceptions import AudexError
7
+ from audex.lib.transcription.events import Delta
8
+ from audex.lib.transcription.events import Done
9
+ from audex.lib.transcription.events import Start
10
+ from audex.types import DuplexAbstractSession
11
+
12
+
13
+ class Transcription(abc.ABC):
14
+ @abc.abstractmethod
15
+ def session(
16
+ self,
17
+ *,
18
+ fmt: t.Literal["pcm", "mp3"] = "pcm",
19
+ sample_rate: int = 16000,
20
+ silence_duration_ms: int | None = None,
21
+ vocabulary_id: str | None = None,
22
+ ) -> TranscriptSession:
23
+ pass
24
+
25
+
26
+ ReceiveType: t.TypeAlias = Start | Delta | Done
27
+
28
+
29
+ class TranscriptSession(DuplexAbstractSession[bytes, ReceiveType], abc.ABC): ...
30
+
31
+
32
+ class TranscriptionError(AudexError):
33
+ default_message = "Transcription service error"