mycelium-ai 0.5.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (164) hide show
  1. mycelium/__init__.py +0 -0
  2. mycelium/api/__init__.py +0 -0
  3. mycelium/api/app.py +1147 -0
  4. mycelium/api/client_app.py +170 -0
  5. mycelium/api/generated_sources/__init__.py +0 -0
  6. mycelium/api/generated_sources/server_schemas/__init__.py +97 -0
  7. mycelium/api/generated_sources/server_schemas/api/__init__.py +5 -0
  8. mycelium/api/generated_sources/server_schemas/api/default_api.py +2473 -0
  9. mycelium/api/generated_sources/server_schemas/api_client.py +766 -0
  10. mycelium/api/generated_sources/server_schemas/api_response.py +25 -0
  11. mycelium/api/generated_sources/server_schemas/configuration.py +434 -0
  12. mycelium/api/generated_sources/server_schemas/exceptions.py +166 -0
  13. mycelium/api/generated_sources/server_schemas/models/__init__.py +41 -0
  14. mycelium/api/generated_sources/server_schemas/models/api_section.py +71 -0
  15. mycelium/api/generated_sources/server_schemas/models/chroma_section.py +69 -0
  16. mycelium/api/generated_sources/server_schemas/models/clap_section.py +75 -0
  17. mycelium/api/generated_sources/server_schemas/models/compute_on_server200_response.py +79 -0
  18. mycelium/api/generated_sources/server_schemas/models/compute_on_server_request.py +67 -0
  19. mycelium/api/generated_sources/server_schemas/models/compute_text_search_request.py +69 -0
  20. mycelium/api/generated_sources/server_schemas/models/config_request.py +81 -0
  21. mycelium/api/generated_sources/server_schemas/models/config_response.py +107 -0
  22. mycelium/api/generated_sources/server_schemas/models/create_playlist_request.py +71 -0
  23. mycelium/api/generated_sources/server_schemas/models/get_similar_by_track200_response.py +143 -0
  24. mycelium/api/generated_sources/server_schemas/models/library_stats_response.py +77 -0
  25. mycelium/api/generated_sources/server_schemas/models/logging_section.py +67 -0
  26. mycelium/api/generated_sources/server_schemas/models/media_server_section.py +67 -0
  27. mycelium/api/generated_sources/server_schemas/models/playlist_response.py +73 -0
  28. mycelium/api/generated_sources/server_schemas/models/plex_section.py +71 -0
  29. mycelium/api/generated_sources/server_schemas/models/processing_response.py +90 -0
  30. mycelium/api/generated_sources/server_schemas/models/save_config_response.py +73 -0
  31. mycelium/api/generated_sources/server_schemas/models/scan_library_response.py +75 -0
  32. mycelium/api/generated_sources/server_schemas/models/search_result_response.py +75 -0
  33. mycelium/api/generated_sources/server_schemas/models/server_section.py +67 -0
  34. mycelium/api/generated_sources/server_schemas/models/stop_processing_response.py +71 -0
  35. mycelium/api/generated_sources/server_schemas/models/task_status_response.py +87 -0
  36. mycelium/api/generated_sources/server_schemas/models/track_database_stats.py +75 -0
  37. mycelium/api/generated_sources/server_schemas/models/track_response.py +77 -0
  38. mycelium/api/generated_sources/server_schemas/models/tracks_list_response.py +81 -0
  39. mycelium/api/generated_sources/server_schemas/rest.py +329 -0
  40. mycelium/api/generated_sources/server_schemas/test/__init__.py +0 -0
  41. mycelium/api/generated_sources/server_schemas/test/test_api_section.py +57 -0
  42. mycelium/api/generated_sources/server_schemas/test/test_chroma_section.py +55 -0
  43. mycelium/api/generated_sources/server_schemas/test/test_clap_section.py +60 -0
  44. mycelium/api/generated_sources/server_schemas/test/test_compute_on_server200_response.py +52 -0
  45. mycelium/api/generated_sources/server_schemas/test/test_compute_on_server_request.py +53 -0
  46. mycelium/api/generated_sources/server_schemas/test/test_compute_text_search_request.py +54 -0
  47. mycelium/api/generated_sources/server_schemas/test/test_config_request.py +66 -0
  48. mycelium/api/generated_sources/server_schemas/test/test_config_response.py +97 -0
  49. mycelium/api/generated_sources/server_schemas/test/test_create_playlist_request.py +60 -0
  50. mycelium/api/generated_sources/server_schemas/test/test_default_api.py +150 -0
  51. mycelium/api/generated_sources/server_schemas/test/test_get_similar_by_track200_response.py +61 -0
  52. mycelium/api/generated_sources/server_schemas/test/test_library_stats_response.py +63 -0
  53. mycelium/api/generated_sources/server_schemas/test/test_logging_section.py +53 -0
  54. mycelium/api/generated_sources/server_schemas/test/test_media_server_section.py +53 -0
  55. mycelium/api/generated_sources/server_schemas/test/test_playlist_response.py +58 -0
  56. mycelium/api/generated_sources/server_schemas/test/test_plex_section.py +56 -0
  57. mycelium/api/generated_sources/server_schemas/test/test_processing_response.py +61 -0
  58. mycelium/api/generated_sources/server_schemas/test/test_save_config_response.py +58 -0
  59. mycelium/api/generated_sources/server_schemas/test/test_scan_library_response.py +61 -0
  60. mycelium/api/generated_sources/server_schemas/test/test_search_result_response.py +69 -0
  61. mycelium/api/generated_sources/server_schemas/test/test_server_section.py +53 -0
  62. mycelium/api/generated_sources/server_schemas/test/test_stop_processing_response.py +55 -0
  63. mycelium/api/generated_sources/server_schemas/test/test_task_status_response.py +71 -0
  64. mycelium/api/generated_sources/server_schemas/test/test_track_database_stats.py +60 -0
  65. mycelium/api/generated_sources/server_schemas/test/test_track_response.py +63 -0
  66. mycelium/api/generated_sources/server_schemas/test/test_tracks_list_response.py +75 -0
  67. mycelium/api/generated_sources/worker_schemas/__init__.py +61 -0
  68. mycelium/api/generated_sources/worker_schemas/api/__init__.py +5 -0
  69. mycelium/api/generated_sources/worker_schemas/api/default_api.py +318 -0
  70. mycelium/api/generated_sources/worker_schemas/api_client.py +766 -0
  71. mycelium/api/generated_sources/worker_schemas/api_response.py +25 -0
  72. mycelium/api/generated_sources/worker_schemas/configuration.py +434 -0
  73. mycelium/api/generated_sources/worker_schemas/exceptions.py +166 -0
  74. mycelium/api/generated_sources/worker_schemas/models/__init__.py +23 -0
  75. mycelium/api/generated_sources/worker_schemas/models/save_config_response.py +73 -0
  76. mycelium/api/generated_sources/worker_schemas/models/worker_clap_section.py +75 -0
  77. mycelium/api/generated_sources/worker_schemas/models/worker_client_api_section.py +69 -0
  78. mycelium/api/generated_sources/worker_schemas/models/worker_client_section.py +79 -0
  79. mycelium/api/generated_sources/worker_schemas/models/worker_config_request.py +73 -0
  80. mycelium/api/generated_sources/worker_schemas/models/worker_config_response.py +89 -0
  81. mycelium/api/generated_sources/worker_schemas/models/worker_logging_section.py +67 -0
  82. mycelium/api/generated_sources/worker_schemas/rest.py +329 -0
  83. mycelium/api/generated_sources/worker_schemas/test/__init__.py +0 -0
  84. mycelium/api/generated_sources/worker_schemas/test/test_default_api.py +45 -0
  85. mycelium/api/generated_sources/worker_schemas/test/test_save_config_response.py +58 -0
  86. mycelium/api/generated_sources/worker_schemas/test/test_worker_clap_section.py +60 -0
  87. mycelium/api/generated_sources/worker_schemas/test/test_worker_client_api_section.py +55 -0
  88. mycelium/api/generated_sources/worker_schemas/test/test_worker_client_section.py +65 -0
  89. mycelium/api/generated_sources/worker_schemas/test/test_worker_config_request.py +59 -0
  90. mycelium/api/generated_sources/worker_schemas/test/test_worker_config_response.py +89 -0
  91. mycelium/api/generated_sources/worker_schemas/test/test_worker_logging_section.py +53 -0
  92. mycelium/api/worker_models.py +99 -0
  93. mycelium/application/__init__.py +11 -0
  94. mycelium/application/job_queue.py +323 -0
  95. mycelium/application/library_management_use_cases.py +292 -0
  96. mycelium/application/search_use_cases.py +96 -0
  97. mycelium/application/services.py +340 -0
  98. mycelium/client.py +554 -0
  99. mycelium/client_config.py +251 -0
  100. mycelium/client_frontend_dist/404.html +1 -0
  101. mycelium/client_frontend_dist/_next/static/a4iyRdfsvkjdyMAK9cE9Y/_buildManifest.js +1 -0
  102. mycelium/client_frontend_dist/_next/static/a4iyRdfsvkjdyMAK9cE9Y/_ssgManifest.js +1 -0
  103. mycelium/client_frontend_dist/_next/static/chunks/4bd1b696-cf72ae8a39fa05aa.js +1 -0
  104. mycelium/client_frontend_dist/_next/static/chunks/964-830f77d7ce1c2463.js +1 -0
  105. mycelium/client_frontend_dist/_next/static/chunks/app/_not-found/page-d25eede5a9099bd3.js +1 -0
  106. mycelium/client_frontend_dist/_next/static/chunks/app/layout-9b3d32f96dfe13b6.js +1 -0
  107. mycelium/client_frontend_dist/_next/static/chunks/app/page-cc6bad295789134e.js +1 -0
  108. mycelium/client_frontend_dist/_next/static/chunks/framework-7c95b8e5103c9e90.js +1 -0
  109. mycelium/client_frontend_dist/_next/static/chunks/main-6b37be50736577a2.js +1 -0
  110. mycelium/client_frontend_dist/_next/static/chunks/main-app-4153d115599d3126.js +1 -0
  111. mycelium/client_frontend_dist/_next/static/chunks/pages/_app-0a0020ddd67f79cf.js +1 -0
  112. mycelium/client_frontend_dist/_next/static/chunks/pages/_error-03529f2c21436739.js +1 -0
  113. mycelium/client_frontend_dist/_next/static/chunks/polyfills-42372ed130431b0a.js +1 -0
  114. mycelium/client_frontend_dist/_next/static/chunks/webpack-c81e624915b2ea70.js +1 -0
  115. mycelium/client_frontend_dist/_next/static/css/1eb7f0e2c78e0734.css +1 -0
  116. mycelium/client_frontend_dist/favicon.ico +0 -0
  117. mycelium/client_frontend_dist/file.svg +1 -0
  118. mycelium/client_frontend_dist/globe.svg +1 -0
  119. mycelium/client_frontend_dist/index.html +1 -0
  120. mycelium/client_frontend_dist/index.txt +20 -0
  121. mycelium/client_frontend_dist/next.svg +1 -0
  122. mycelium/client_frontend_dist/vercel.svg +1 -0
  123. mycelium/client_frontend_dist/window.svg +1 -0
  124. mycelium/config.py +346 -0
  125. mycelium/domain/__init__.py +13 -0
  126. mycelium/domain/models.py +71 -0
  127. mycelium/domain/repositories.py +98 -0
  128. mycelium/domain/worker.py +77 -0
  129. mycelium/frontend_dist/404.html +1 -0
  130. mycelium/frontend_dist/_next/static/chunks/4bd1b696-cf72ae8a39fa05aa.js +1 -0
  131. mycelium/frontend_dist/_next/static/chunks/964-830f77d7ce1c2463.js +1 -0
  132. mycelium/frontend_dist/_next/static/chunks/app/_not-found/page-d25eede5a9099bd3.js +1 -0
  133. mycelium/frontend_dist/_next/static/chunks/app/layout-9b3d32f96dfe13b6.js +1 -0
  134. mycelium/frontend_dist/_next/static/chunks/app/page-a761463485e0540b.js +1 -0
  135. mycelium/frontend_dist/_next/static/chunks/framework-7c95b8e5103c9e90.js +1 -0
  136. mycelium/frontend_dist/_next/static/chunks/main-6b37be50736577a2.js +1 -0
  137. mycelium/frontend_dist/_next/static/chunks/main-app-4153d115599d3126.js +1 -0
  138. mycelium/frontend_dist/_next/static/chunks/pages/_app-0a0020ddd67f79cf.js +1 -0
  139. mycelium/frontend_dist/_next/static/chunks/pages/_error-03529f2c21436739.js +1 -0
  140. mycelium/frontend_dist/_next/static/chunks/polyfills-42372ed130431b0a.js +1 -0
  141. mycelium/frontend_dist/_next/static/chunks/webpack-c81e624915b2ea70.js +1 -0
  142. mycelium/frontend_dist/_next/static/css/1eb7f0e2c78e0734.css +1 -0
  143. mycelium/frontend_dist/_next/static/glVJ0yJSL0zWN7anTTG3_/_buildManifest.js +1 -0
  144. mycelium/frontend_dist/_next/static/glVJ0yJSL0zWN7anTTG3_/_ssgManifest.js +1 -0
  145. mycelium/frontend_dist/favicon.ico +0 -0
  146. mycelium/frontend_dist/file.svg +1 -0
  147. mycelium/frontend_dist/globe.svg +1 -0
  148. mycelium/frontend_dist/index.html +10 -0
  149. mycelium/frontend_dist/index.txt +20 -0
  150. mycelium/frontend_dist/next.svg +1 -0
  151. mycelium/frontend_dist/vercel.svg +1 -0
  152. mycelium/frontend_dist/window.svg +1 -0
  153. mycelium/infrastructure/__init__.py +17 -0
  154. mycelium/infrastructure/chroma_adapter.py +232 -0
  155. mycelium/infrastructure/clap_adapter.py +280 -0
  156. mycelium/infrastructure/plex_adapter.py +145 -0
  157. mycelium/infrastructure/track_database.py +467 -0
  158. mycelium/main.py +183 -0
  159. mycelium_ai-0.5.0.dist-info/METADATA +312 -0
  160. mycelium_ai-0.5.0.dist-info/RECORD +164 -0
  161. mycelium_ai-0.5.0.dist-info/WHEEL +5 -0
  162. mycelium_ai-0.5.0.dist-info/entry_points.txt +2 -0
  163. mycelium_ai-0.5.0.dist-info/licenses/LICENSE +21 -0
  164. mycelium_ai-0.5.0.dist-info/top_level.txt +1 -0
mycelium/api/app.py ADDED
@@ -0,0 +1,1147 @@
1
+ """FastAPI application for Mycelium web interface."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import functools
6
+ import yaml
7
+ import logging
8
+ import os
9
+ import tempfile
10
+ import threading
11
+ from datetime import datetime
12
+ from pathlib import Path
13
+ from typing import List, Optional, Union
14
+
15
+ import uvicorn
16
+ from fastapi import BackgroundTasks, FastAPI, File, Form, HTTPException, Query, UploadFile
17
+ from fastapi.middleware.cors import CORSMiddleware
18
+ from fastapi.responses import FileResponse, RedirectResponse
19
+ from fastapi.staticfiles import StaticFiles
20
+ from mycelium.api.generated_sources.server_schemas.models import (
21
+ ConfigRequest,
22
+ ConfigResponse,
23
+ CreatePlaylistRequest,
24
+ LibraryStatsResponse,
25
+ PlaylistResponse,
26
+ ProcessingResponse,
27
+ SaveConfigResponse,
28
+ ScanLibraryResponse,
29
+ SearchResultResponse,
30
+ StopProcessingResponse,
31
+ TaskStatusResponse,
32
+ TrackDatabaseStats,
33
+ TrackResponse,
34
+ TracksListResponse,
35
+ )
36
+ from mycelium.domain import SearchResult
37
+
38
+ # Worker API request/response models
39
+ from .worker_models import (
40
+ WorkerRegistrationRequest,
41
+ WorkerRegistrationResponse,
42
+ JobRequest,
43
+ TaskResultRequest,
44
+ TaskResultResponse,
45
+ ComputeOnServerRequest,
46
+ ComputeSearchOnServerRequest,
47
+ )
48
+
49
+ from ..application.job_queue import JobQueueService
50
+ from ..application.services import MyceliumService
51
+ from ..config import (
52
+ APIConfig,
53
+ CLAPConfig,
54
+ ChromaConfig,
55
+ DatabaseConfig,
56
+ LoggingConfig,
57
+ MediaServerConfig,
58
+ MyceliumConfig,
59
+ PlexConfig,
60
+ ServerConfig,
61
+ )
62
+ from ..domain.worker import ContextType, TaskResult, TaskStatus, TaskType
63
+
64
+ # Setup logger for this module
65
+ logger = logging.getLogger(__name__)
66
+
67
+ # Initialize configuration and service
68
+ config = MyceliumConfig.load_from_yaml()
69
+
70
+ # Setup logging
71
+ config.setup_logging()
72
+
73
+ logger.info("Initializing Mycelium service...")
74
+
75
+ # Initialize the main service
76
+ service = MyceliumService(config=config)
77
+
78
+ # Initialize job queue service
79
+ job_queue = JobQueueService()
80
+
81
+ # Initialize worker processing in the service
82
+ service.initialize_worker_processing(job_queue, config.api.host, config.api.port)
83
+
84
+ # Global lock for thread-safe config reloading
85
+ shared_resources_lock = threading.RLock()
86
+
87
+
88
+ def with_service_lock(func):
89
+ """Decorator to ensure thread-safe access to service and config."""
90
+
91
+ @functools.wraps(func)
92
+ async def wrapper(*args, **kwargs):
93
+ with shared_resources_lock:
94
+ return await func(*args, **kwargs)
95
+
96
+ return wrapper
97
+
98
+
99
+ def reload_config() -> None:
100
+ """Reload configuration and reinitialize services."""
101
+ global config, service, job_queue
102
+
103
+ with shared_resources_lock:
104
+ try:
105
+ logger.info("Reloading configuration...")
106
+
107
+ # Load new configuration
108
+ new_config = MyceliumConfig.load_from_yaml()
109
+
110
+ # Update logging if level changed
111
+ if new_config.logging.level != config.logging.level:
112
+ new_config.setup_logging()
113
+ logger.info(f"Updated logging level to {new_config.logging.level}")
114
+
115
+ # Reinitialize service with new configuration
116
+ new_service = MyceliumService(config=new_config)
117
+
118
+ # Reinitialize job queue service
119
+ new_job_queue = JobQueueService()
120
+
121
+ # Initialize worker processing in the new service
122
+ new_service.initialize_worker_processing(
123
+ new_job_queue, new_config.api.host, new_config.api.port
124
+ )
125
+
126
+ # Update global references atomically
127
+ config = new_config
128
+ service = new_service
129
+ job_queue = new_job_queue
130
+
131
+ logger.info("Configuration reloaded successfully")
132
+
133
+ except Exception as e:
134
+ logger.error(f"Failed to reload configuration: {e}", exc_info=True)
135
+ raise
136
+
137
+
138
+ # Create FastAPI app
139
+ app = FastAPI(
140
+ title="Mycelium API",
141
+ description="Plex music collection and recommendation system using CLAP embeddings"
142
+ )
143
+
144
+ SERVER_SPEC_PATH = Path(__file__).resolve().parents[3] / "openapi" / "server_openapi.yaml"
145
+ app.state.external_openapi_cache = None
146
+
147
+ def _custom_openapi():
148
+ if app.state.external_openapi_cache is None:
149
+ with SERVER_SPEC_PATH.open("r", encoding="utf-8") as f:
150
+ app.state.external_openapi_cache = yaml.safe_load(f)
151
+ return app.state.external_openapi_cache
152
+
153
+ app.openapi = _custom_openapi
154
+
155
+ # Add CORS middleware for frontend
156
+ app.add_middleware(
157
+ CORSMiddleware,
158
+ allow_origins=["*"],
159
+ allow_credentials=False,
160
+ allow_methods=["*"],
161
+ allow_headers=["*"],
162
+ )
163
+
164
+ # Serve static frontend files
165
+ frontend_dist_path = Path(__file__).parent.parent / "frontend_dist"
166
+ if frontend_dist_path.exists():
167
+ # Mount Next.js static assets at their expected path
168
+ next_static_path = frontend_dist_path / "_next"
169
+ if next_static_path.exists():
170
+ app.mount("/_next", StaticFiles(directory=str(next_static_path)), name="next_static")
171
+
172
+ # Mount frontend application under /app with SPA routing support
173
+ app.mount("/app", StaticFiles(directory=str(frontend_dist_path), html=True), name="frontend")
174
+
175
+
176
+ # Serve the API-first OpenAPI YAML (for tooling and validation)
177
+ @app.get("/openapi.yaml")
178
+ async def get_openapi_yaml():
179
+ """Serve the external API-first OpenAPI YAML if available."""
180
+ if SERVER_SPEC_PATH.exists():
181
+ return FileResponse(path=str(SERVER_SPEC_PATH), media_type="application/yaml")
182
+ raise HTTPException(status_code=404, detail="OpenAPI YAML not found")
183
+
184
+
185
+ @app.get("/")
186
+ async def root():
187
+ """Redirect root to frontend application."""
188
+ return RedirectResponse("/app")
189
+
190
+ @app.get("/api/library/stats", response_model=LibraryStatsResponse)
191
+ @with_service_lock
192
+ async def get_library_stats():
193
+ """Get statistics about the current music library database."""
194
+ logger.debug("Getting library stats")
195
+ try:
196
+ stats = service.get_database_stats()
197
+ return LibraryStatsResponse(**stats)
198
+ except Exception as e:
199
+ raise HTTPException(status_code=500, detail=str(e))
200
+
201
+
202
+ @app.get("/api/search/text", response_model=ProcessingResponse)
203
+ async def search_by_text_get(
204
+ q: str = Query(...),
205
+ n_results: int = Query(10),
206
+ ):
207
+ """Search for music tracks by text description (GET endpoint)."""
208
+ logger.info(f"Text search GET request - q: '{q}', n_results: {n_results}")
209
+
210
+ try:
211
+ # Check if there are active workers
212
+ active_workers = job_queue.get_active_workers()
213
+ if active_workers:
214
+ logger.info(
215
+ f"Found {len(active_workers)} active workers, creating text search task"
216
+ )
217
+ # Create task for worker processing
218
+ task = job_queue.create_text_search_task(
219
+ text_query=q, n_results=n_results, prioritize=True
220
+ )
221
+
222
+ logger.info(f"Created text search task {task.task_id} for query '{q}'")
223
+ # Return processing response
224
+ return ProcessingResponse(
225
+ status="processing",
226
+ message=
227
+ "Text embedding computation has been sent to a worker. Please try again in a few moments.",
228
+ task_id=task.task_id,
229
+ query=q,
230
+ )
231
+
232
+ logger.info("No active workers available for text search")
233
+ # No active workers - return confirmation required
234
+ return ProcessingResponse(status="confirmation_required", query=q)
235
+
236
+ except HTTPException:
237
+ # Re-raise HTTP exceptions unchanged
238
+ raise
239
+ except Exception as e:
240
+ logger.error(f"Text search GET failed for q '{q}': {e}", exc_info=True)
241
+ raise HTTPException(status_code=500, detail=str(e))
242
+
243
+
244
+ @app.post("/api/search/audio", response_model=ProcessingResponse)
245
+ async def search_by_audio(
246
+ audio: UploadFile = File(..., description="Audio file to search with"),
247
+ n_results: int = Form(10, description="Number of results to return"),
248
+ ):
249
+ """Search for music tracks by audio file."""
250
+ logger.info(
251
+ f"Audio search request received - filename: {audio.filename}, content_type: {audio.content_type}"
252
+ )
253
+
254
+ try:
255
+ # Validate file type
256
+ if not audio.content_type or not any(
257
+ audio.content_type.startswith(mime)
258
+ for mime in ["audio/", "application/octet-stream"]
259
+ ):
260
+ logger.warning(f"Invalid file type: {audio.content_type}")
261
+ raise HTTPException(
262
+ status_code=400, detail="Invalid file type. Please upload an audio file."
263
+ )
264
+
265
+ # Read audio content
266
+ content = await audio.read()
267
+ logger.info(f"Audio file read successfully - size: {len(content)} bytes")
268
+
269
+ # Check if there are active workers
270
+ active_workers = job_queue.get_active_workers()
271
+ if active_workers:
272
+ logger.info(
273
+ f"Found {len(active_workers)} active workers, creating audio search task"
274
+ )
275
+ # Create task for worker processing
276
+ task = job_queue.create_task(
277
+ audio_data=content,
278
+ audio_filename=audio.filename or "upload.tmp",
279
+ n_results=n_results,
280
+ prioritize=True,
281
+ context_type=ContextType.AUDIO_SEARCH,
282
+ )
283
+
284
+ logger.info(
285
+ f"Created audio search task {task.task_id} for file '{audio.filename}'"
286
+ )
287
+ # Return processing response
288
+ return ProcessingResponse(
289
+ status="processing",
290
+ message=
291
+ "Audio embedding computation has been sent to a worker. Please try again in a few moments.",
292
+ task_id=task.task_id,
293
+ filename=audio.filename,
294
+ )
295
+
296
+ logger.info("No active workers available for audio search")
297
+ # No active workers - return confirmation required
298
+ return ProcessingResponse(status="confirmation_required", filename=audio.filename)
299
+
300
+ except HTTPException:
301
+ # Re-raise HTTP exceptions unchanged
302
+ raise
303
+ except Exception as e:
304
+ logger.error(f"Audio search failed: {e}", exc_info=True)
305
+ raise HTTPException(status_code=500, detail=f"Audio search failed: {str(e)}")
306
+
307
+
308
+ @app.get("/api/library/tracks", response_model=TracksListResponse)
309
+ async def get_library_tracks(
310
+ page: int = Query(1, ge=1, description="Page number (starting from 1)"),
311
+ limit: int = Query(50, ge=1, le=200, description="Number of tracks per page"),
312
+ search: Optional[str] = Query(
313
+ None, description="Search query for filtering tracks (simple search)"
314
+ ),
315
+ artist: Optional[str] = Query(None, description="Filter by artist name"),
316
+ album: Optional[str] = Query(None, description="Filter by album name"),
317
+ title: Optional[str] = Query(None, description="Filter by track title"),
318
+ ):
319
+ """Get tracks from the library with pagination and optional search.
320
+
321
+ Supports both simple search (search parameter) and advanced search (artist, album, title parameters).
322
+ Advanced search uses AND logic between fields, while simple search uses OR logic across all fields.
323
+ """
324
+ logger.info(
325
+ f"Library tracks request - page: {page}, limit: {limit}, search: {search}, artist: {artist}, album: {album}, title: {title}"
326
+ )
327
+
328
+ try:
329
+ # Determine search type and execute appropriate query
330
+ if artist or album or title:
331
+ # Use advanced search with AND logic
332
+ logger.info(
333
+ f"Performing advanced library search - artist: {artist}, album: {album}, title: {title}"
334
+ )
335
+ tracks = service.search_tracks_advanced(
336
+ artist=artist, album=album, title=title, limit=limit, offset=(page - 1) * limit
337
+ )
338
+ total_count = service.count_tracks_advanced(
339
+ artist=artist, album=album, title=title
340
+ )
341
+ elif search and search.strip():
342
+ # Simple search query
343
+ logger.info(f"Performing simple library search for: '{search.strip()}'")
344
+ tracks = service.search_tracks_in_database(
345
+ search.strip(), limit=limit, offset=(page - 1) * limit
346
+ )
347
+ total_count = service.count_tracks_in_database(search.strip())
348
+ else:
349
+ # Regular pagination with no search
350
+ offset = (page - 1) * limit
351
+ tracks = service.get_all_tracks(limit=limit, offset=offset)
352
+
353
+ # Get total count for pagination info
354
+ stats = service.get_database_stats()
355
+ total_count = stats.get("track_database_stats", {}).get("total_tracks", 0)
356
+
357
+ logger.info(f"Retrieved {len(tracks)} tracks from database")
358
+
359
+ return TracksListResponse(
360
+ tracks=[
361
+ TrackResponse(
362
+ artist=track.artist,
363
+ album=track.album,
364
+ title=track.title,
365
+ filepath=str(track.filepath),
366
+ media_server_rating_key=track.media_server_rating_key,
367
+ media_server_type=track.media_server_type.value,
368
+ )
369
+ for track in tracks
370
+ ],
371
+ total_count=total_count,
372
+ page=page,
373
+ limit=limit,
374
+ )
375
+ except Exception as e:
376
+ logger.error(f"Error getting library tracks: {e}", exc_info=True)
377
+ raise HTTPException(status_code=500, detail=f"Failed to get library tracks: {str(e)}")
378
+
379
+
380
+ @app.get("/api/config", response_model=ConfigResponse)
381
+ async def get_config():
382
+ """Get current configuration."""
383
+ try:
384
+ logger.info("Configuration get request received")
385
+ # Use thread-safe access to config
386
+ with shared_resources_lock:
387
+ config_dict = config.to_dict()
388
+ logger.info("Configuration retrieved successfully")
389
+ # Pydantic will validate and coerce into the typed shape
390
+ return ConfigResponse(**config_dict)
391
+ except Exception as e:
392
+ logger.error(f"Failed to get configuration: {e}", exc_info=True)
393
+ raise HTTPException(status_code=500, detail=f"Failed to get configuration: {str(e)}")
394
+
395
+
396
+ @app.post("/api/config", response_model=SaveConfigResponse)
397
+ async def save_config(config_request: ConfigRequest):
398
+ """Save configuration to YAML file and hot-reload the application."""
399
+ try:
400
+ logger.info("Configuration save request received")
401
+
402
+ media_server_config = MediaServerConfig(**config_request.media_server)
403
+ plex_config = PlexConfig(**config_request.plex)
404
+ clap_config = CLAPConfig(**config_request.clap)
405
+ chroma_config = ChromaConfig(**config_request.chroma)
406
+ database_config = DatabaseConfig()
407
+ api_config = APIConfig(**config_request.api)
408
+ logging_config = LoggingConfig(**config_request.logging)
409
+ server = ServerConfig(**config_request.server)
410
+
411
+ yaml_config = MyceliumConfig(
412
+ media_server=media_server_config,
413
+ plex=plex_config,
414
+ clap=clap_config,
415
+ chroma=chroma_config,
416
+ database=database_config,
417
+ api=api_config,
418
+ logging=logging_config,
419
+ server=server,
420
+ )
421
+
422
+ # Save to default YAML location
423
+ yaml_config.save_to_yaml()
424
+ logger.info("Configuration saved successfully to YAML file")
425
+
426
+ # Hot-reload the configuration and services
427
+ try:
428
+ reload_config()
429
+ logger.info("Configuration hot-reloaded successfully")
430
+ return SaveConfigResponse(
431
+ message="Configuration saved and reloaded successfully! Changes are now active.",
432
+ status="success",
433
+ reloaded=True,
434
+ )
435
+ except Exception as reload_error:
436
+ logger.error(
437
+ f"Configuration saved but hot-reload failed: {reload_error}",
438
+ exc_info=True,
439
+ )
440
+ return SaveConfigResponse(
441
+ message="Configuration saved successfully, but hot-reload failed. Please restart the server to apply changes.",
442
+ status="warning",
443
+ reloaded=False,
444
+ reload_error=str(reload_error),
445
+ )
446
+
447
+ except Exception as e:
448
+ logger.error(f"Failed to save configuration: {e}", exc_info=True)
449
+ raise HTTPException(status_code=500, detail=f"Failed to save configuration: {str(e)}")
450
+
451
+
452
+ @app.post("/api/library/scan", response_model=ScanLibraryResponse)
453
+ @with_service_lock
454
+ async def scan_library():
455
+ """Scan the Plex music library and save metadata to database."""
456
+ try:
457
+ result = service.scan_library_to_database()
458
+ return ScanLibraryResponse(
459
+ message="Successfully scanned library and saved to database",
460
+ total_tracks=result["total_tracks"],
461
+ new_tracks=result["new_tracks"],
462
+ updated_tracks=result["updated_tracks"],
463
+ scan_timestamp=result["scan_timestamp"],
464
+ )
465
+ except Exception as e:
466
+ raise HTTPException(status_code=500, detail=str(e))
467
+
468
+
469
+ @app.post("/api/library/process", response_model=ProcessingResponse)
470
+ @with_service_lock
471
+ async def process_library():
472
+ """Process embeddings - prioritize workers, fallback to server with confirmation."""
473
+ try:
474
+ # Check if processing is already running
475
+ if service.is_processing_active():
476
+ return ProcessingResponse(
477
+ status="already_running", message="Processing is already in progress"
478
+ )
479
+
480
+ # Check for active workers first
481
+ if service.can_use_workers():
482
+ # Use worker-based processing
483
+ result = service.create_worker_tasks()
484
+
485
+ if result["success"]:
486
+ return ProcessingResponse(
487
+ status="worker_processing_started",
488
+ message=f"Created {result['tasks_created']} tasks for worker processing",
489
+ tasks_created=result["tasks_created"],
490
+ active_workers=result["worker_info"]["active_workers"],
491
+ )
492
+ else:
493
+ return ProcessingResponse(
494
+ status="worker_error",
495
+ message=result["message"],
496
+ active_workers=0,
497
+ confirmation_required=False,
498
+ )
499
+ else:
500
+ # No workers available - require confirmation for server processing
501
+ return ProcessingResponse(
502
+ status="no_workers",
503
+ message=
504
+ "No client workers are available. The server hardware may not have sufficient resources for CLAP model processing. Do you want to proceed with server processing anyway?",
505
+ active_workers=0,
506
+ confirmation_required=True,
507
+ )
508
+
509
+ except Exception as e:
510
+ raise HTTPException(status_code=500, detail=str(e))
511
+
512
+
513
+ @app.post("/api/library/process/server", response_model=ProcessingResponse)
514
+ @with_service_lock
515
+ async def process_library_on_server(background_tasks: BackgroundTasks):
516
+ """Process embeddings on server after user confirmation."""
517
+ try:
518
+ # Check if processing is already running
519
+ if service.is_processing_active():
520
+ return ProcessingResponse(
521
+ message="Processing is already in progress", status="already_running"
522
+ )
523
+
524
+ # Start processing in background on server
525
+ background_tasks.add_task(service.process_embeddings_from_database)
526
+
527
+ return ProcessingResponse(
528
+ message="Server-side embedding processing started in background",
529
+ status="server_started",
530
+ )
531
+ except Exception as e:
532
+ raise HTTPException(status_code=500, detail=str(e))
533
+
534
+
535
+ @app.post("/api/library/process/stop", response_model=StopProcessingResponse)
536
+ @with_service_lock
537
+ async def stop_processing():
538
+ """Stop the current embedding processing."""
539
+ try:
540
+ service.stop_processing()
541
+
542
+ # Also check for worker processing
543
+ if service.has_active_worker_processing():
544
+ worker_result = service.stop_worker_processing()
545
+ return StopProcessingResponse(
546
+ message=f"Processing stop requested. {worker_result['message']}",
547
+ cleared_tasks=worker_result.get("cleared_tasks", 0),
548
+ type="worker_processing",
549
+ )
550
+ else:
551
+ return StopProcessingResponse(
552
+ message="Processing stop requested - will finish current track and stop",
553
+ type="server_processing",
554
+ )
555
+ except Exception as e:
556
+ raise HTTPException(status_code=500, detail=str(e))
557
+
558
+
559
+ @app.get("/api/library/progress", response_model=TrackDatabaseStats)
560
+ @with_service_lock
561
+ async def get_processing_progress(
562
+ model_id: Optional[str] = Query(None, description="Model ID to get progress for")
563
+ ):
564
+ """Get current processing progress and statistics."""
565
+ logger.debug("Processing progress request received")
566
+ try:
567
+ stats = service.get_processing_progress(model_id)
568
+ return TrackDatabaseStats(**stats)
569
+ except Exception as e:
570
+ raise HTTPException(status_code=500, detail=str(e))
571
+
572
+
573
+ @app.post("/api/playlists/create", response_model=PlaylistResponse)
574
+ @with_service_lock
575
+ async def create_playlist(request: CreatePlaylistRequest):
576
+ """Create a playlist from a list of track IDs."""
577
+ try:
578
+ playlist = service.create_playlist(request.name, request.track_ids, request.batch_size)
579
+ return PlaylistResponse(
580
+ name=playlist.name,
581
+ track_count=playlist.track_count,
582
+ created_at=playlist.created_at.isoformat() if playlist.created_at else "",
583
+ server_id=playlist.server_id,
584
+ )
585
+ except Exception as e:
586
+ logger.error(f"Error creating playlist '{request.name}': {e}", exc_info=True)
587
+ raise HTTPException(status_code=500, detail=str(e))
588
+
589
+
590
+ # Worker Coordination API
591
+ @app.post("/workers/register", response_model=WorkerRegistrationResponse)
592
+ async def register_worker(request: WorkerRegistrationRequest):
593
+ """Register a worker with the server."""
594
+ logger.info(f"Worker registration request received for worker ID {request.worker_id}")
595
+ try:
596
+ worker = job_queue.register_worker(request.worker_id, request.ip_address)
597
+ return WorkerRegistrationResponse(
598
+ worker_id=worker.id,
599
+ registration_time=worker.registration_time.isoformat(),
600
+ message="Worker registered successfully",
601
+ )
602
+ except Exception as e:
603
+ raise HTTPException(status_code=500, detail=str(e))
604
+
605
+
606
+ @app.get("/workers/get_job")
607
+ async def get_job(
608
+ worker_id: str = Query(..., description="Worker ID"),
609
+ ip_address: str = Query(..., description="Client IP address"),
610
+ ):
611
+ """Get the next job for a worker."""
612
+ logger.debug(f"Worker job request received for worker ID {worker_id}")
613
+ try:
614
+ task = job_queue.get_next_job(worker_id=worker_id, ip_address=ip_address)
615
+ if task is None:
616
+ # No job available - return 204 No Content
617
+ logger.debug(f"No job available for worker {worker_id}")
618
+ return None
619
+
620
+ logger.info(
621
+ f"Assigning task {task.task_id} to worker {worker_id} for track {task.track_id}"
622
+ )
623
+
624
+ # Workers download files via URL
625
+ return JobRequest(
626
+ task_id=task.task_id,
627
+ task_type=task.task_type,
628
+ track_id=task.track_id,
629
+ download_url=task.download_url,
630
+ text_query=task.text_query,
631
+ audio_filename=task.audio_filename,
632
+ n_results=task.n_results,
633
+ )
634
+ except Exception as e:
635
+ logger.error(f"Error getting job for worker {worker_id}: {e}", exc_info=True)
636
+ raise HTTPException(status_code=500, detail=str(e))
637
+
638
+
639
+ @app.post("/workers/submit_result", response_model=TaskResultResponse)
640
+ async def submit_result(request: TaskResultRequest):
641
+ """Submit the result of a completed task."""
642
+ try:
643
+ logger.info(
644
+ f"Worker result submission for task {request.task_id}, track {request.track_id}, status: {request.status}"
645
+ )
646
+
647
+ task_result = TaskResult(
648
+ task_id=request.task_id,
649
+ track_id=request.track_id,
650
+ status=request.status,
651
+ embedding=request.embedding,
652
+ error_message=request.error_message,
653
+ search_results=request.search_results,
654
+ )
655
+
656
+ success = job_queue.submit_result(task_result)
657
+ logger.info(f"Task result submission: success={success}")
658
+
659
+ # Handle different task types
660
+ if success and request.embedding:
661
+ # Get the task to check its type
662
+ task = job_queue.get_task_status(request.task_id)
663
+
664
+ if task and task.task_type == TaskType.COMPUTE_AUDIO_EMBEDDING:
665
+ # Track embedding task
666
+ logger.info(
667
+ f"Saving worker-generated embedding for track {request.track_id}, size: {len(request.embedding)}"
668
+ )
669
+ if (request.track_id is not None) and (request.track_id.strip() != ""):
670
+ service.save_embedding(request.track_id, request.embedding)
671
+ logger.info(
672
+ f"Successfully saved worker-generated embedding for track {request.track_id}"
673
+ )
674
+
675
+ if task.context_type == ContextType.AUDIO_SEARCH:
676
+ # Audio search task - perform search on server
677
+ context_info = (
678
+ f"track '{request.track_id}'" if task.track_id else f"file '{task.audio_filename}'"
679
+ )
680
+ logger.info(
681
+ f"Performing audio search for task {request.task_id} with {context_info}"
682
+ )
683
+ try:
684
+ # Use the embedding to search directly
685
+ search_results = service.embedding_repository.search_by_embedding(
686
+ request.embedding, task.n_results or 10
687
+ )
688
+
689
+ # Convert search results to API response models
690
+ results_responses = [
691
+ map_search_result_to_response(result)
692
+ for result in search_results
693
+ ]
694
+
695
+ # Convert to dict format for storage in task
696
+ results_dict = [result.model_dump() for result in results_responses]
697
+
698
+ # Update task with search results - ensure task status is set to success
699
+ with job_queue._lock:
700
+ task.search_results = results_dict
701
+ if task.status != TaskStatus.SUCCESS:
702
+ logger.info(
703
+ f"Setting task {request.task_id} status to SUCCESS"
704
+ )
705
+ task.status = TaskStatus.SUCCESS
706
+ task.completed_at = datetime.now()
707
+
708
+ logger.info(
709
+ f"Audio search completed for task {request.task_id}, found {len(results_dict)} results"
710
+ )
711
+ except Exception as e:
712
+ logger.error(
713
+ f"Error performing audio search for task {request.task_id}: {e}",
714
+ exc_info=True,
715
+ )
716
+ # Set task status to failed
717
+ with job_queue._lock:
718
+ task.status = TaskStatus.FAILED
719
+ task.error_message = str(e)
720
+ task.completed_at = datetime.now()
721
+
722
+ # Clean up temporary audio file for audio search tasks
723
+ job_queue.cleanup_task_files(request.task_id)
724
+
725
+ elif task and task.task_type == TaskType.COMPUTE_TEXT_EMBEDDING:
726
+ # Text search task
727
+ logger.info(
728
+ f"Performing text search for task {request.task_id} with query '{task.text_query}'"
729
+ )
730
+ try:
731
+ # Use the embedding to search directly
732
+ search_results = service.embedding_repository.search_by_embedding(
733
+ request.embedding, task.n_results or 10
734
+ )
735
+
736
+ # Update task with search results - ensure task status is set to success
737
+ with job_queue._lock:
738
+ task.search_results = search_results
739
+ if task.status != TaskStatus.SUCCESS:
740
+ logger.info(
741
+ f"Setting task {request.task_id} status to SUCCESS"
742
+ )
743
+ task.status = TaskStatus.SUCCESS
744
+ task.completed_at = datetime.now()
745
+
746
+ logger.info(
747
+ f"Text search completed for task {request.task_id}, found {len(search_results)} results"
748
+ )
749
+ except Exception as e:
750
+ logger.error(
751
+ f"Error performing text search for task {request.task_id}: {e}",
752
+ exc_info=True,
753
+ )
754
+ # Set task status to failed
755
+ with job_queue._lock:
756
+ task.status = TaskStatus.FAILED
757
+ task.error_message = str(e)
758
+ task.completed_at = datetime.now()
759
+
760
+ # Clean up any temporary files (for consistency)
761
+ job_queue.cleanup_task_files(request.task_id)
762
+ elif request.error_message:
763
+ logger.error(
764
+ f"Worker task failed for track {request.track_id}: {request.error_message}"
765
+ )
766
+ # Clean up temporary files for failed tasks
767
+ job_queue.cleanup_task_files(request.task_id)
768
+ else:
769
+ logger.warning(
770
+ f"Task {request.task_id} completed but no embedding provided"
771
+ )
772
+
773
+ return TaskResultResponse(
774
+ success=success,
775
+ message="Result submitted successfully" if success else "Task not found",
776
+ )
777
+ except Exception as e:
778
+ logger.error(
779
+ f"Error submitting worker result for task {request.task_id}: {e}",
780
+ exc_info=True,
781
+ )
782
+ raise HTTPException(status_code=500, detail=str(e))
783
+
784
+
785
+ # File Server for Audio Downloads
786
+ @app.get("/download_track/{track_id}")
787
+ async def download_track(track_id: str):
788
+ """Download an audio file for processing."""
789
+ logger.debug(f"Download request for track {track_id}")
790
+ try:
791
+ # Get track info from service
792
+ track_info = service.get_track_by_id(track_id)
793
+ if not track_info:
794
+ raise HTTPException(status_code=404, detail="Track not found")
795
+
796
+ # Verify file exists
797
+ file_path = Path(track_info.filepath)
798
+ if not file_path.exists():
799
+ raise HTTPException(status_code=404, detail="Audio file not found")
800
+
801
+ # Return file response
802
+ return FileResponse(
803
+ path=str(file_path),
804
+ media_type="application/octet-stream",
805
+ filename=file_path.name,
806
+ )
807
+ except Exception as e:
808
+ raise HTTPException(status_code=500, detail=str(e))
809
+
810
+
811
+ @app.get("/download_audio/{task_id}")
812
+ async def download_audio(task_id: str):
813
+ """Download an audio file for a search task."""
814
+ logger.debug(f"Download request for audio task {task_id}")
815
+ try:
816
+ # Get the temporary file path for this task
817
+ temp_file_path = job_queue.get_audio_task_file(task_id)
818
+ if not temp_file_path or not temp_file_path.exists():
819
+ raise HTTPException(status_code=404, detail="Audio task file not found")
820
+
821
+ # Return file response
822
+ return FileResponse(
823
+ path=str(temp_file_path),
824
+ media_type="application/octet-stream",
825
+ filename=f"audio_task_{task_id}.tmp",
826
+ )
827
+ except Exception as e:
828
+ raise HTTPException(status_code=500, detail=str(e))
829
+
830
+
831
+ # Main API for Similar Tracks
832
+ @app.get(
833
+ "/similar/by_track/{track_id}",
834
+ response_model=Union[List[SearchResultResponse], ProcessingResponse],
835
+ )
836
+ async def get_similar_tracks(
837
+ track_id: str, n_results: int = Query(10, description="Number of results")
838
+ ):
839
+ """Find tracks similar to a given track."""
840
+ logger.info(f"Similar tracks request for track_id: {track_id}")
841
+
842
+ try:
843
+ # Check if embedding already exists
844
+ has_emb = service.has_embedding(track_id=track_id)
845
+ logger.info(f"Embedding check for track {track_id}: {has_emb}")
846
+
847
+ if has_emb:
848
+ logger.info(
849
+ f"Embedding exists for track {track_id}, performing similarity search"
850
+ )
851
+ # Perform similarity search
852
+ results = service.search_similar_by_track_id(track_id, n_results)
853
+ logger.info(f"Found {len(results)} similar tracks for track {track_id}")
854
+
855
+ response_data = [
856
+ SearchResultResponse(
857
+ track=TrackResponse(
858
+ artist=result.track.artist,
859
+ album=result.track.album,
860
+ title=result.track.title,
861
+ filepath=str(result.track.filepath),
862
+ media_server_rating_key=result.track.media_server_rating_key,
863
+ media_server_type=result.track.media_server_type.value,
864
+ ),
865
+ similarity_score=result.similarity_score,
866
+ distance=result.distance,
867
+ )
868
+ for result in results
869
+ ]
870
+
871
+ # Log the first result for debugging
872
+ if response_data:
873
+ logger.info(f"First result - similarity_score: {response_data[0].similarity_score}, distance: {response_data[0].distance}")
874
+ logger.info(f"First result model_dump: {response_data[0].model_dump()}")
875
+ logger.info(f"First result model_dump(by_alias=True): {response_data[0].model_dump(by_alias=True)}")
876
+
877
+ return response_data
878
+
879
+ logger.info(f"No embedding found for track {track_id}, checking for workers")
880
+
881
+ # Check if there are active workers
882
+ active_workers = job_queue.get_active_workers()
883
+ if active_workers:
884
+ logger.info(f"Found {len(active_workers)} active workers, creating task")
885
+ # Create task for worker processing
886
+ download_url = f"/download_track/{track_id}"
887
+ task = job_queue.create_task(
888
+ track_id=track_id,
889
+ download_url=download_url,
890
+ prioritize=True,
891
+ context_type=ContextType.SIMILAR_TRACKS,
892
+ )
893
+
894
+ logger.info(f"Created worker task {task.task_id} for track {track_id}")
895
+ # Return processing response instead of blocking
896
+ response = ProcessingResponse(
897
+ status="processing",
898
+ message=
899
+ "Processing has been sent to a worker. Please try again in a few moments.",
900
+ track_id=track_id,
901
+ task_id=task.task_id,
902
+ )
903
+ logger.info(f"Returning worker processing response: {response.model_dump()}")
904
+ return response
905
+
906
+ logger.info(f"No active workers available for track {track_id}")
907
+ # No active workers - return confirmation required
908
+ return ProcessingResponse(
909
+ status="confirmation_required",
910
+ message=
911
+ "The sonic signature for this song needs to be calculated, and no workers are active. Do you wish to continue on the server hardware?",
912
+ track_id=track_id,
913
+ )
914
+
915
+ except HTTPException as e:
916
+ logger.error(
917
+ f"Error in similar tracks endpoint for track {track_id}: {e}",
918
+ exc_info=True,
919
+ )
920
+ # Re-raise HTTP exceptions as they are
921
+ raise
922
+ except Exception as e:
923
+ logger.error(
924
+ f"Error in similar tracks endpoint for track {track_id}: {e}",
925
+ exc_info=True,
926
+ )
927
+ raise HTTPException(status_code=500, detail=f"Similar tracks search failed: {str(e)}")
928
+
929
+
930
+ @app.post("/compute/on_server")
931
+ async def compute_on_server(request: ComputeOnServerRequest):
932
+ """Compute embedding on server CPU after user confirmation."""
933
+ try:
934
+ logger.info(f"Starting server-side computation for track {request.track_id}")
935
+ track_info = service.get_track_by_id(request.track_id)
936
+ if not track_info:
937
+ logger.warning(f"Track not found for ID: {request.track_id}")
938
+ raise HTTPException(status_code=404, detail="Track not found")
939
+
940
+ logger.info(
941
+ f"Computing embedding for track {request.track_id}: {track_info.artist} - {track_info.title}"
942
+ )
943
+
944
+ embedding = service.compute_single_embedding(os.fspath(track_info.filepath))
945
+
946
+ if embedding is None or len(embedding) == 0:
947
+ logger.error(f"Failed to compute embedding for track {request.track_id}")
948
+ raise HTTPException(status_code=500, detail="Failed to compute embedding")
949
+
950
+ logger.info(
951
+ f"Successfully computed embedding for track {request.track_id}, size: {len(embedding)}"
952
+ )
953
+
954
+ # Save to database
955
+ service.save_embedding(request.track_id, embedding)
956
+ logger.info(
957
+ f"Successfully computed and saved embedding for track: {request.track_id}"
958
+ )
959
+
960
+ except HTTPException:
961
+ # Re-raise HTTP exceptions as they are
962
+ raise
963
+ except Exception as e:
964
+ logger.error(
965
+ f"Error computing embedding on server for track {request.track_id}: {e}",
966
+ exc_info=True,
967
+ )
968
+ raise HTTPException(status_code=500, detail=f"Server computation failed: {str(e)}")
969
+
970
+
971
+ @app.post("/compute/search/text", response_model=List[SearchResultResponse])
972
+ async def compute_text_search_on_server(request: ComputeSearchOnServerRequest):
973
+ """Compute text search on server CPU after user confirmation."""
974
+ try:
975
+ if not request.query:
976
+ raise HTTPException(
977
+ status_code=400, detail="Query is required for text search"
978
+ )
979
+
980
+ logger.info(f"Starting server-side text search for query: '{request.query}'")
981
+
982
+ # Perform text search directly on server
983
+ results = service.search_similar_by_text(request.query, request.n_results)
984
+
985
+ logger.info(
986
+ f"Text search completed successfully - found {len(results)} results"
987
+ )
988
+
989
+ return [
990
+ SearchResultResponse(
991
+ track=TrackResponse(
992
+ artist=result.track.artist,
993
+ album=result.track.album,
994
+ title=result.track.title,
995
+ filepath=str(result.track.filepath),
996
+ media_server_rating_key=result.track.media_server_rating_key,
997
+ media_server_type=result.track.media_server_type.value,
998
+ ),
999
+ similarity_score=result.similarity_score,
1000
+ distance=result.distance,
1001
+ )
1002
+ for result in results
1003
+ ]
1004
+
1005
+ except HTTPException:
1006
+ # Re-raise HTTP exceptions as they are
1007
+ raise
1008
+ except Exception as e:
1009
+ logger.error(
1010
+ f"Error computing text search on server for query '{request.query}': {e}",
1011
+ exc_info=True,
1012
+ )
1013
+ raise HTTPException(status_code=500, detail=f"Server text search failed: {str(e)}")
1014
+
1015
+
1016
+ @app.post("/compute/search/audio", response_model=List[SearchResultResponse])
1017
+ async def compute_audio_search_on_server(
1018
+ audio: UploadFile = File(..., description="Audio file to search with"),
1019
+ n_results: int = Form(10, description="Number of results to return"),
1020
+ ):
1021
+ """Compute audio search on server CPU after user confirmation."""
1022
+ try:
1023
+ # Validate file type
1024
+ if not audio.content_type or not any(
1025
+ audio.content_type.startswith(mime)
1026
+ for mime in ["audio/", "application/octet-stream"]
1027
+ ):
1028
+ logger.warning(f"Invalid file type: {audio.content_type}")
1029
+ raise HTTPException(
1030
+ status_code=400, detail="Invalid file type. Please upload an audio file."
1031
+ )
1032
+
1033
+ # Read audio content
1034
+ content = await audio.read()
1035
+ if not content:
1036
+ raise HTTPException(
1037
+ status_code=400, detail="Audio data is required for audio search"
1038
+ )
1039
+
1040
+ logger.info(
1041
+ f"Starting server-side audio search for file: '{audio.filename}', size: {len(content)} bytes"
1042
+ )
1043
+
1044
+ # Create temporary file for the audio data
1045
+ with tempfile.NamedTemporaryFile(delete=False, suffix=".tmp") as temp_file:
1046
+ temp_file.write(content)
1047
+ temp_file_path = temp_file.name
1048
+
1049
+ try:
1050
+ # Perform audio search directly on server
1051
+ results = service.search_similar_by_audio(Path(temp_file_path), n_results)
1052
+
1053
+ logger.info(
1054
+ f"Audio search completed successfully - found {len(results)} results"
1055
+ )
1056
+
1057
+ return [
1058
+ SearchResultResponse(
1059
+ track=TrackResponse(
1060
+ artist=result.track.artist,
1061
+ album=result.track.album,
1062
+ title=result.track.title,
1063
+ filepath=str(result.track.filepath),
1064
+ media_server_rating_key=result.track.media_server_rating_key,
1065
+ media_server_type=result.track.media_server_type.value,
1066
+ ),
1067
+ similarity_score=result.similarity_score,
1068
+ distance=result.distance,
1069
+ )
1070
+ for result in results
1071
+ ]
1072
+ finally:
1073
+ # Clean up temporary file
1074
+ try:
1075
+ os.unlink(temp_file_path)
1076
+ except OSError:
1077
+ pass
1078
+
1079
+ except HTTPException:
1080
+ # Re-raise HTTP exceptions as they are
1081
+ raise
1082
+ except Exception as e:
1083
+ logger.error(f"Audio search on server failed: {e}", exc_info=True)
1084
+ raise HTTPException(status_code=500, detail=f"Audio search failed: {str(e)}")
1085
+
1086
+
1087
+ @app.get("/api/queue/task/{task_id}", response_model=TaskStatusResponse)
1088
+ async def get_task_status(task_id: str):
1089
+ """Get status of a specific task."""
1090
+ try:
1091
+ task = job_queue.get_task_status(task_id)
1092
+ if task:
1093
+ # Build typed response; coerce search_results if present
1094
+ search_results_typed: Optional[List[SearchResultResponse]] = None
1095
+ if task.search_results:
1096
+ try:
1097
+ search_results_typed = [
1098
+ map_search_result_to_response(sr) for sr in task.search_results
1099
+ ]
1100
+ except Exception:
1101
+ # Fallback to None if coercion fails
1102
+ search_results_typed = None
1103
+ logger.debug(
1104
+ f"Task {task_id} status: {task.status.value}, has search_results: {len(task.search_results)} results"
1105
+ )
1106
+ else:
1107
+ logger.debug(
1108
+ f"Task {task_id} status: {task.status.value}, no search_results yet"
1109
+ )
1110
+
1111
+ return TaskStatusResponse(
1112
+ task_id=task.task_id,
1113
+ status=task.status.value,
1114
+ track_id=task.track_id,
1115
+ started_at=task.started_at.isoformat() if task.started_at else None,
1116
+ completed_at=task.completed_at.isoformat() if task.completed_at else None,
1117
+ error_message=task.error_message,
1118
+ search_results=search_results_typed,
1119
+ )
1120
+ else:
1121
+ logger.warning(f"Task {task_id} not found in queue")
1122
+ raise HTTPException(status_code=404, detail="Task not found")
1123
+ except Exception as e:
1124
+ logger.error(f"Error getting task status for {task_id}: {e}", exc_info=True)
1125
+ raise HTTPException(status_code=500, detail=f"Error getting task status: {str(e)}")
1126
+
1127
+ def map_search_result_to_response(result: SearchResult) -> SearchResultResponse:
1128
+ return SearchResultResponse(
1129
+ track=TrackResponse(
1130
+ artist=result.track.artist,
1131
+ album=result.track.album,
1132
+ title=result.track.title,
1133
+ filepath=str(result.track.filepath),
1134
+ media_server_rating_key=result.track.media_server_rating_key,
1135
+ media_server_type=result.track.media_server_type.value,
1136
+ ),
1137
+ similarity_score=result.similarity_score,
1138
+ distance=result.distance,
1139
+ )
1140
+
1141
+ if __name__ == "__main__":
1142
+ uvicorn.run(
1143
+ "mycelium.api.app:app",
1144
+ host=config.api.host,
1145
+ port=config.api.port,
1146
+ reload=config.api.reload,
1147
+ )