mycelium-ai 0.5.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- mycelium/__init__.py +0 -0
- mycelium/api/__init__.py +0 -0
- mycelium/api/app.py +1147 -0
- mycelium/api/client_app.py +170 -0
- mycelium/api/generated_sources/__init__.py +0 -0
- mycelium/api/generated_sources/server_schemas/__init__.py +97 -0
- mycelium/api/generated_sources/server_schemas/api/__init__.py +5 -0
- mycelium/api/generated_sources/server_schemas/api/default_api.py +2473 -0
- mycelium/api/generated_sources/server_schemas/api_client.py +766 -0
- mycelium/api/generated_sources/server_schemas/api_response.py +25 -0
- mycelium/api/generated_sources/server_schemas/configuration.py +434 -0
- mycelium/api/generated_sources/server_schemas/exceptions.py +166 -0
- mycelium/api/generated_sources/server_schemas/models/__init__.py +41 -0
- mycelium/api/generated_sources/server_schemas/models/api_section.py +71 -0
- mycelium/api/generated_sources/server_schemas/models/chroma_section.py +69 -0
- mycelium/api/generated_sources/server_schemas/models/clap_section.py +75 -0
- mycelium/api/generated_sources/server_schemas/models/compute_on_server200_response.py +79 -0
- mycelium/api/generated_sources/server_schemas/models/compute_on_server_request.py +67 -0
- mycelium/api/generated_sources/server_schemas/models/compute_text_search_request.py +69 -0
- mycelium/api/generated_sources/server_schemas/models/config_request.py +81 -0
- mycelium/api/generated_sources/server_schemas/models/config_response.py +107 -0
- mycelium/api/generated_sources/server_schemas/models/create_playlist_request.py +71 -0
- mycelium/api/generated_sources/server_schemas/models/get_similar_by_track200_response.py +143 -0
- mycelium/api/generated_sources/server_schemas/models/library_stats_response.py +77 -0
- mycelium/api/generated_sources/server_schemas/models/logging_section.py +67 -0
- mycelium/api/generated_sources/server_schemas/models/media_server_section.py +67 -0
- mycelium/api/generated_sources/server_schemas/models/playlist_response.py +73 -0
- mycelium/api/generated_sources/server_schemas/models/plex_section.py +71 -0
- mycelium/api/generated_sources/server_schemas/models/processing_response.py +90 -0
- mycelium/api/generated_sources/server_schemas/models/save_config_response.py +73 -0
- mycelium/api/generated_sources/server_schemas/models/scan_library_response.py +75 -0
- mycelium/api/generated_sources/server_schemas/models/search_result_response.py +75 -0
- mycelium/api/generated_sources/server_schemas/models/server_section.py +67 -0
- mycelium/api/generated_sources/server_schemas/models/stop_processing_response.py +71 -0
- mycelium/api/generated_sources/server_schemas/models/task_status_response.py +87 -0
- mycelium/api/generated_sources/server_schemas/models/track_database_stats.py +75 -0
- mycelium/api/generated_sources/server_schemas/models/track_response.py +77 -0
- mycelium/api/generated_sources/server_schemas/models/tracks_list_response.py +81 -0
- mycelium/api/generated_sources/server_schemas/rest.py +329 -0
- mycelium/api/generated_sources/server_schemas/test/__init__.py +0 -0
- mycelium/api/generated_sources/server_schemas/test/test_api_section.py +57 -0
- mycelium/api/generated_sources/server_schemas/test/test_chroma_section.py +55 -0
- mycelium/api/generated_sources/server_schemas/test/test_clap_section.py +60 -0
- mycelium/api/generated_sources/server_schemas/test/test_compute_on_server200_response.py +52 -0
- mycelium/api/generated_sources/server_schemas/test/test_compute_on_server_request.py +53 -0
- mycelium/api/generated_sources/server_schemas/test/test_compute_text_search_request.py +54 -0
- mycelium/api/generated_sources/server_schemas/test/test_config_request.py +66 -0
- mycelium/api/generated_sources/server_schemas/test/test_config_response.py +97 -0
- mycelium/api/generated_sources/server_schemas/test/test_create_playlist_request.py +60 -0
- mycelium/api/generated_sources/server_schemas/test/test_default_api.py +150 -0
- mycelium/api/generated_sources/server_schemas/test/test_get_similar_by_track200_response.py +61 -0
- mycelium/api/generated_sources/server_schemas/test/test_library_stats_response.py +63 -0
- mycelium/api/generated_sources/server_schemas/test/test_logging_section.py +53 -0
- mycelium/api/generated_sources/server_schemas/test/test_media_server_section.py +53 -0
- mycelium/api/generated_sources/server_schemas/test/test_playlist_response.py +58 -0
- mycelium/api/generated_sources/server_schemas/test/test_plex_section.py +56 -0
- mycelium/api/generated_sources/server_schemas/test/test_processing_response.py +61 -0
- mycelium/api/generated_sources/server_schemas/test/test_save_config_response.py +58 -0
- mycelium/api/generated_sources/server_schemas/test/test_scan_library_response.py +61 -0
- mycelium/api/generated_sources/server_schemas/test/test_search_result_response.py +69 -0
- mycelium/api/generated_sources/server_schemas/test/test_server_section.py +53 -0
- mycelium/api/generated_sources/server_schemas/test/test_stop_processing_response.py +55 -0
- mycelium/api/generated_sources/server_schemas/test/test_task_status_response.py +71 -0
- mycelium/api/generated_sources/server_schemas/test/test_track_database_stats.py +60 -0
- mycelium/api/generated_sources/server_schemas/test/test_track_response.py +63 -0
- mycelium/api/generated_sources/server_schemas/test/test_tracks_list_response.py +75 -0
- mycelium/api/generated_sources/worker_schemas/__init__.py +61 -0
- mycelium/api/generated_sources/worker_schemas/api/__init__.py +5 -0
- mycelium/api/generated_sources/worker_schemas/api/default_api.py +318 -0
- mycelium/api/generated_sources/worker_schemas/api_client.py +766 -0
- mycelium/api/generated_sources/worker_schemas/api_response.py +25 -0
- mycelium/api/generated_sources/worker_schemas/configuration.py +434 -0
- mycelium/api/generated_sources/worker_schemas/exceptions.py +166 -0
- mycelium/api/generated_sources/worker_schemas/models/__init__.py +23 -0
- mycelium/api/generated_sources/worker_schemas/models/save_config_response.py +73 -0
- mycelium/api/generated_sources/worker_schemas/models/worker_clap_section.py +75 -0
- mycelium/api/generated_sources/worker_schemas/models/worker_client_api_section.py +69 -0
- mycelium/api/generated_sources/worker_schemas/models/worker_client_section.py +79 -0
- mycelium/api/generated_sources/worker_schemas/models/worker_config_request.py +73 -0
- mycelium/api/generated_sources/worker_schemas/models/worker_config_response.py +89 -0
- mycelium/api/generated_sources/worker_schemas/models/worker_logging_section.py +67 -0
- mycelium/api/generated_sources/worker_schemas/rest.py +329 -0
- mycelium/api/generated_sources/worker_schemas/test/__init__.py +0 -0
- mycelium/api/generated_sources/worker_schemas/test/test_default_api.py +45 -0
- mycelium/api/generated_sources/worker_schemas/test/test_save_config_response.py +58 -0
- mycelium/api/generated_sources/worker_schemas/test/test_worker_clap_section.py +60 -0
- mycelium/api/generated_sources/worker_schemas/test/test_worker_client_api_section.py +55 -0
- mycelium/api/generated_sources/worker_schemas/test/test_worker_client_section.py +65 -0
- mycelium/api/generated_sources/worker_schemas/test/test_worker_config_request.py +59 -0
- mycelium/api/generated_sources/worker_schemas/test/test_worker_config_response.py +89 -0
- mycelium/api/generated_sources/worker_schemas/test/test_worker_logging_section.py +53 -0
- mycelium/api/worker_models.py +99 -0
- mycelium/application/__init__.py +11 -0
- mycelium/application/job_queue.py +323 -0
- mycelium/application/library_management_use_cases.py +292 -0
- mycelium/application/search_use_cases.py +96 -0
- mycelium/application/services.py +340 -0
- mycelium/client.py +554 -0
- mycelium/client_config.py +251 -0
- mycelium/client_frontend_dist/404.html +1 -0
- mycelium/client_frontend_dist/_next/static/a4iyRdfsvkjdyMAK9cE9Y/_buildManifest.js +1 -0
- mycelium/client_frontend_dist/_next/static/a4iyRdfsvkjdyMAK9cE9Y/_ssgManifest.js +1 -0
- mycelium/client_frontend_dist/_next/static/chunks/4bd1b696-cf72ae8a39fa05aa.js +1 -0
- mycelium/client_frontend_dist/_next/static/chunks/964-830f77d7ce1c2463.js +1 -0
- mycelium/client_frontend_dist/_next/static/chunks/app/_not-found/page-d25eede5a9099bd3.js +1 -0
- mycelium/client_frontend_dist/_next/static/chunks/app/layout-9b3d32f96dfe13b6.js +1 -0
- mycelium/client_frontend_dist/_next/static/chunks/app/page-cc6bad295789134e.js +1 -0
- mycelium/client_frontend_dist/_next/static/chunks/framework-7c95b8e5103c9e90.js +1 -0
- mycelium/client_frontend_dist/_next/static/chunks/main-6b37be50736577a2.js +1 -0
- mycelium/client_frontend_dist/_next/static/chunks/main-app-4153d115599d3126.js +1 -0
- mycelium/client_frontend_dist/_next/static/chunks/pages/_app-0a0020ddd67f79cf.js +1 -0
- mycelium/client_frontend_dist/_next/static/chunks/pages/_error-03529f2c21436739.js +1 -0
- mycelium/client_frontend_dist/_next/static/chunks/polyfills-42372ed130431b0a.js +1 -0
- mycelium/client_frontend_dist/_next/static/chunks/webpack-c81e624915b2ea70.js +1 -0
- mycelium/client_frontend_dist/_next/static/css/1eb7f0e2c78e0734.css +1 -0
- mycelium/client_frontend_dist/favicon.ico +0 -0
- mycelium/client_frontend_dist/file.svg +1 -0
- mycelium/client_frontend_dist/globe.svg +1 -0
- mycelium/client_frontend_dist/index.html +1 -0
- mycelium/client_frontend_dist/index.txt +20 -0
- mycelium/client_frontend_dist/next.svg +1 -0
- mycelium/client_frontend_dist/vercel.svg +1 -0
- mycelium/client_frontend_dist/window.svg +1 -0
- mycelium/config.py +346 -0
- mycelium/domain/__init__.py +13 -0
- mycelium/domain/models.py +71 -0
- mycelium/domain/repositories.py +98 -0
- mycelium/domain/worker.py +77 -0
- mycelium/frontend_dist/404.html +1 -0
- mycelium/frontend_dist/_next/static/chunks/4bd1b696-cf72ae8a39fa05aa.js +1 -0
- mycelium/frontend_dist/_next/static/chunks/964-830f77d7ce1c2463.js +1 -0
- mycelium/frontend_dist/_next/static/chunks/app/_not-found/page-d25eede5a9099bd3.js +1 -0
- mycelium/frontend_dist/_next/static/chunks/app/layout-9b3d32f96dfe13b6.js +1 -0
- mycelium/frontend_dist/_next/static/chunks/app/page-a761463485e0540b.js +1 -0
- mycelium/frontend_dist/_next/static/chunks/framework-7c95b8e5103c9e90.js +1 -0
- mycelium/frontend_dist/_next/static/chunks/main-6b37be50736577a2.js +1 -0
- mycelium/frontend_dist/_next/static/chunks/main-app-4153d115599d3126.js +1 -0
- mycelium/frontend_dist/_next/static/chunks/pages/_app-0a0020ddd67f79cf.js +1 -0
- mycelium/frontend_dist/_next/static/chunks/pages/_error-03529f2c21436739.js +1 -0
- mycelium/frontend_dist/_next/static/chunks/polyfills-42372ed130431b0a.js +1 -0
- mycelium/frontend_dist/_next/static/chunks/webpack-c81e624915b2ea70.js +1 -0
- mycelium/frontend_dist/_next/static/css/1eb7f0e2c78e0734.css +1 -0
- mycelium/frontend_dist/_next/static/glVJ0yJSL0zWN7anTTG3_/_buildManifest.js +1 -0
- mycelium/frontend_dist/_next/static/glVJ0yJSL0zWN7anTTG3_/_ssgManifest.js +1 -0
- mycelium/frontend_dist/favicon.ico +0 -0
- mycelium/frontend_dist/file.svg +1 -0
- mycelium/frontend_dist/globe.svg +1 -0
- mycelium/frontend_dist/index.html +10 -0
- mycelium/frontend_dist/index.txt +20 -0
- mycelium/frontend_dist/next.svg +1 -0
- mycelium/frontend_dist/vercel.svg +1 -0
- mycelium/frontend_dist/window.svg +1 -0
- mycelium/infrastructure/__init__.py +17 -0
- mycelium/infrastructure/chroma_adapter.py +232 -0
- mycelium/infrastructure/clap_adapter.py +280 -0
- mycelium/infrastructure/plex_adapter.py +145 -0
- mycelium/infrastructure/track_database.py +467 -0
- mycelium/main.py +183 -0
- mycelium_ai-0.5.0.dist-info/METADATA +312 -0
- mycelium_ai-0.5.0.dist-info/RECORD +164 -0
- mycelium_ai-0.5.0.dist-info/WHEEL +5 -0
- mycelium_ai-0.5.0.dist-info/entry_points.txt +2 -0
- mycelium_ai-0.5.0.dist-info/licenses/LICENSE +21 -0
- mycelium_ai-0.5.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,467 @@
|
|
1
|
+
"""SQLite database for storing track metadata and processing state."""
|
2
|
+
|
3
|
+
import sqlite3
|
4
|
+
import logging
|
5
|
+
from dataclasses import dataclass
|
6
|
+
from datetime import datetime, timezone
|
7
|
+
from pathlib import Path
|
8
|
+
from typing import List, Optional, Dict, Any
|
9
|
+
|
10
|
+
from ..domain.models import Track, MediaServerType
|
11
|
+
from ..config import get_user_data_dir
|
12
|
+
|
13
|
+
|
14
|
+
@dataclass
|
15
|
+
class StoredTrack:
|
16
|
+
"""Track with additional metadata for database storage."""
|
17
|
+
media_server_rating_key: str
|
18
|
+
media_server_type: str
|
19
|
+
artist: str
|
20
|
+
album: str
|
21
|
+
title: str
|
22
|
+
filepath: str
|
23
|
+
added_at: datetime
|
24
|
+
last_scanned: datetime
|
25
|
+
|
26
|
+
def to_track(self) -> Track:
|
27
|
+
"""Convert to domain Track model."""
|
28
|
+
return Track(
|
29
|
+
artist=self.artist,
|
30
|
+
album=self.album,
|
31
|
+
title=self.title,
|
32
|
+
filepath=Path(self.filepath),
|
33
|
+
media_server_rating_key=self.media_server_rating_key,
|
34
|
+
media_server_type=MediaServerType(self.media_server_type)
|
35
|
+
)
|
36
|
+
|
37
|
+
@classmethod
|
38
|
+
def from_track(cls, track: Track, added_at: datetime = None) -> "StoredTrack":
|
39
|
+
"""Create StoredTrack from domain Track model."""
|
40
|
+
now = datetime.now(timezone.utc)
|
41
|
+
return cls(
|
42
|
+
media_server_rating_key=track.media_server_rating_key,
|
43
|
+
media_server_type=track.media_server_type.value,
|
44
|
+
artist=track.artist,
|
45
|
+
album=track.album,
|
46
|
+
title=track.title,
|
47
|
+
filepath=str(track.filepath),
|
48
|
+
added_at=added_at or now,
|
49
|
+
last_scanned=now
|
50
|
+
)
|
51
|
+
|
52
|
+
|
53
|
+
@dataclass
|
54
|
+
class TrackEmbeddingRecord:
|
55
|
+
"""Record of an embedding processed for a specific track and model."""
|
56
|
+
id: Optional[int]
|
57
|
+
media_server_rating_key: str
|
58
|
+
media_server_type: str
|
59
|
+
model_id: str
|
60
|
+
processed_at: datetime
|
61
|
+
|
62
|
+
logger = logging.getLogger(__name__)
|
63
|
+
|
64
|
+
class TrackDatabase:
|
65
|
+
"""SQLite database for managing track metadata and processing state."""
|
66
|
+
|
67
|
+
def __init__(self, db_path: Optional[str], media_server_type: MediaServerType) -> None:
|
68
|
+
# Default to user data directory if path is not provided
|
69
|
+
if not db_path:
|
70
|
+
db_path = str(get_user_data_dir() / "mycelium_tracks.db")
|
71
|
+
self.db_path = Path(db_path)
|
72
|
+
self.db_path.parent.mkdir(parents=True, exist_ok=True)
|
73
|
+
self.media_server_type = media_server_type
|
74
|
+
logger.debug(f"Initializing TrackDatabase with path: {self.db_path}, media_server_type: {media_server_type}")
|
75
|
+
self._init_database()
|
76
|
+
|
77
|
+
def _init_database(self) -> None:
|
78
|
+
"""Initialize database tables."""
|
79
|
+
logger.debug(f"Initializing database tables at {self.db_path}")
|
80
|
+
with sqlite3.connect(self.db_path) as conn:
|
81
|
+
# Create tracks table
|
82
|
+
conn.execute("""
|
83
|
+
CREATE TABLE IF NOT EXISTS tracks (
|
84
|
+
media_server_rating_key TEXT NOT NULL,
|
85
|
+
media_server_type TEXT NOT NULL DEFAULT 'plex',
|
86
|
+
artist TEXT NOT NULL,
|
87
|
+
album TEXT NOT NULL,
|
88
|
+
title TEXT NOT NULL,
|
89
|
+
filepath TEXT NOT NULL,
|
90
|
+
added_at TIMESTAMP NOT NULL,
|
91
|
+
last_scanned TIMESTAMP NOT NULL,
|
92
|
+
PRIMARY KEY (media_server_rating_key, media_server_type)
|
93
|
+
)
|
94
|
+
""")
|
95
|
+
|
96
|
+
# Create track_embeddings table for tracking processed models
|
97
|
+
conn.execute("""
|
98
|
+
CREATE TABLE IF NOT EXISTS track_embeddings (
|
99
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
100
|
+
media_server_rating_key TEXT NOT NULL,
|
101
|
+
media_server_type TEXT NOT NULL,
|
102
|
+
model_id TEXT NOT NULL,
|
103
|
+
processed_at TIMESTAMP NOT NULL,
|
104
|
+
UNIQUE(media_server_rating_key, media_server_type, model_id),
|
105
|
+
FOREIGN KEY (media_server_rating_key, media_server_type)
|
106
|
+
REFERENCES tracks(media_server_rating_key, media_server_type)
|
107
|
+
)
|
108
|
+
""")
|
109
|
+
|
110
|
+
# Create indexes for performance
|
111
|
+
conn.execute("CREATE INDEX IF NOT EXISTS idx_tracks_media_server ON tracks(media_server_type)")
|
112
|
+
conn.execute("CREATE INDEX IF NOT EXISTS idx_tracks_scanned ON tracks(last_scanned)")
|
113
|
+
conn.execute("CREATE INDEX IF NOT EXISTS idx_track_embeddings_lookup ON track_embeddings(media_server_rating_key, media_server_type)")
|
114
|
+
conn.execute("CREATE INDEX IF NOT EXISTS idx_track_embeddings_model ON track_embeddings(model_id)")
|
115
|
+
conn.commit()
|
116
|
+
logger.debug("Database tables and indexes created/verified successfully")
|
117
|
+
|
118
|
+
def save_tracks(self, tracks: List[Track], scan_timestamp: datetime = None) -> Dict[str, int]:
|
119
|
+
"""Save tracks to database, return statistics."""
|
120
|
+
if scan_timestamp is None:
|
121
|
+
scan_timestamp = datetime.now(timezone.utc)
|
122
|
+
|
123
|
+
logger.debug(f"Saving {len(tracks)} tracks to database with timestamp {scan_timestamp}")
|
124
|
+
stats = {"new": 0, "updated": 0, "total": len(tracks)}
|
125
|
+
|
126
|
+
with sqlite3.connect(self.db_path) as conn:
|
127
|
+
for i, track in enumerate(tracks):
|
128
|
+
if i % 100 == 0 and i > 0:
|
129
|
+
logger.debug(f"Processing track {i}/{len(tracks)}")
|
130
|
+
|
131
|
+
# Check if track exists
|
132
|
+
existing = conn.execute(
|
133
|
+
"SELECT media_server_rating_key, last_scanned FROM tracks WHERE media_server_rating_key = ? AND media_server_type = ?",
|
134
|
+
(track.media_server_rating_key, track.media_server_type.value)
|
135
|
+
).fetchone()
|
136
|
+
|
137
|
+
if existing:
|
138
|
+
# Update existing track
|
139
|
+
conn.execute("""
|
140
|
+
UPDATE tracks
|
141
|
+
SET artist = ?, album = ?, title = ?, filepath = ?, last_scanned = ?
|
142
|
+
WHERE media_server_rating_key = ? AND media_server_type = ?
|
143
|
+
""", (track.artist, track.album, track.title, str(track.filepath),
|
144
|
+
scan_timestamp, track.media_server_rating_key, track.media_server_type.value))
|
145
|
+
stats["updated"] += 1
|
146
|
+
logger.debug(f"Updated track: {track.artist} - {track.title}")
|
147
|
+
else:
|
148
|
+
# Insert new track
|
149
|
+
conn.execute("""
|
150
|
+
INSERT INTO tracks
|
151
|
+
(media_server_rating_key, media_server_type, artist, album, title, filepath, added_at, last_scanned)
|
152
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
|
153
|
+
""", (track.media_server_rating_key, track.media_server_type.value, track.artist, track.album, track.title,
|
154
|
+
str(track.filepath), scan_timestamp, scan_timestamp))
|
155
|
+
stats["new"] += 1
|
156
|
+
logger.debug(f"Added new track: {track.artist} - {track.title}")
|
157
|
+
|
158
|
+
conn.commit()
|
159
|
+
|
160
|
+
logger.debug(f"Track save operation completed: {stats}")
|
161
|
+
return stats
|
162
|
+
|
163
|
+
def get_unprocessed_tracks(self, model_id: str, limit: Optional[int] = None) -> List[StoredTrack]:
|
164
|
+
"""Get tracks that haven't been processed for embeddings with the specified model."""
|
165
|
+
logger.debug(f"Getting unprocessed tracks for model: {model_id}, limit: {limit}")
|
166
|
+
query = """
|
167
|
+
SELECT t.media_server_rating_key, t.media_server_type, t.artist, t.album, t.title,
|
168
|
+
t.filepath, t.added_at, t.last_scanned
|
169
|
+
FROM tracks t
|
170
|
+
LEFT JOIN track_embeddings te ON (
|
171
|
+
t.media_server_rating_key = te.media_server_rating_key
|
172
|
+
AND t.media_server_type = te.media_server_type
|
173
|
+
AND te.model_id = ?
|
174
|
+
)
|
175
|
+
WHERE te.id IS NULL
|
176
|
+
ORDER BY t.added_at
|
177
|
+
"""
|
178
|
+
|
179
|
+
params = [model_id]
|
180
|
+
if limit:
|
181
|
+
query += f" LIMIT {limit}"
|
182
|
+
|
183
|
+
with sqlite3.connect(self.db_path) as conn:
|
184
|
+
conn.row_factory = sqlite3.Row
|
185
|
+
rows = conn.execute(query, params).fetchall()
|
186
|
+
|
187
|
+
tracks = [
|
188
|
+
StoredTrack(
|
189
|
+
media_server_rating_key=row["media_server_rating_key"],
|
190
|
+
media_server_type=row["media_server_type"],
|
191
|
+
artist=row["artist"],
|
192
|
+
album=row["album"],
|
193
|
+
title=row["title"],
|
194
|
+
filepath=row["filepath"],
|
195
|
+
added_at=datetime.fromisoformat(row["added_at"]),
|
196
|
+
last_scanned=datetime.fromisoformat(row["last_scanned"])
|
197
|
+
)
|
198
|
+
for row in rows
|
199
|
+
]
|
200
|
+
|
201
|
+
logger.debug(f"Found {len(tracks)} unprocessed tracks for model {model_id}")
|
202
|
+
return tracks
|
203
|
+
|
204
|
+
def mark_track_processed(self, media_server_rating_key: str, model_id: str, processed_at: datetime = None) -> None:
|
205
|
+
"""Mark a track as processed for embeddings with a specific model."""
|
206
|
+
if processed_at is None:
|
207
|
+
processed_at = datetime.now(timezone.utc)
|
208
|
+
|
209
|
+
logger.debug(f"Marking track {media_server_rating_key} as processed for model {model_id}")
|
210
|
+
with sqlite3.connect(self.db_path) as conn:
|
211
|
+
conn.execute("""
|
212
|
+
INSERT OR REPLACE INTO track_embeddings
|
213
|
+
(media_server_rating_key, media_server_type, model_id, processed_at)
|
214
|
+
VALUES (?, ?, ?, ?)
|
215
|
+
""", (media_server_rating_key, self.media_server_type.value, model_id, processed_at))
|
216
|
+
conn.commit()
|
217
|
+
logger.debug(f"Track {media_server_rating_key} marked as processed for model {model_id}")
|
218
|
+
|
219
|
+
|
220
|
+
|
221
|
+
def get_processing_stats(self, model_id: Optional[str] = None) -> Dict[str, int]:
|
222
|
+
"""Get processing statistics, optionally filtered by model."""
|
223
|
+
logger.debug(f"Getting processing stats for model: {model_id}")
|
224
|
+
with sqlite3.connect(self.db_path) as conn:
|
225
|
+
stats = {}
|
226
|
+
|
227
|
+
result = conn.execute("SELECT COUNT(*) as total FROM tracks").fetchone()
|
228
|
+
stats["total_tracks"] = result[0]
|
229
|
+
|
230
|
+
if model_id:
|
231
|
+
# Get stats for specific model
|
232
|
+
result = conn.execute("""
|
233
|
+
SELECT COUNT(*) as processed
|
234
|
+
FROM track_embeddings
|
235
|
+
WHERE model_id = ?
|
236
|
+
""", (model_id,)).fetchone()
|
237
|
+
stats["processed_tracks"] = result[0]
|
238
|
+
else:
|
239
|
+
# Get stats for any model (at least one embedding exists)
|
240
|
+
result = conn.execute("""
|
241
|
+
SELECT COUNT(DISTINCT t.media_server_rating_key, t.media_server_type) as processed
|
242
|
+
FROM tracks t
|
243
|
+
INNER JOIN track_embeddings te ON (
|
244
|
+
t.media_server_rating_key = te.media_server_rating_key
|
245
|
+
AND t.media_server_type = te.media_server_type
|
246
|
+
)
|
247
|
+
""").fetchone()
|
248
|
+
stats["processed_tracks"] = result[0]
|
249
|
+
|
250
|
+
stats["unprocessed_tracks"] = stats["total_tracks"] - stats["processed_tracks"]
|
251
|
+
|
252
|
+
logger.debug(f"Processing stats: {stats}")
|
253
|
+
return stats
|
254
|
+
|
255
|
+
def get_track_by_id(self, media_server_rating_key: str) -> Optional[StoredTrack]:
|
256
|
+
"""Get a specific track by media server rating key."""
|
257
|
+
logger.debug(f"Getting track by ID: {media_server_rating_key}")
|
258
|
+
with sqlite3.connect(self.db_path) as conn:
|
259
|
+
conn.row_factory = sqlite3.Row
|
260
|
+
row = conn.execute("""
|
261
|
+
SELECT media_server_rating_key, media_server_type, artist, album, title, filepath, added_at, last_scanned
|
262
|
+
FROM tracks
|
263
|
+
WHERE media_server_rating_key = ? AND media_server_type = ?
|
264
|
+
""", (media_server_rating_key, self.media_server_type.value)).fetchone()
|
265
|
+
|
266
|
+
if row:
|
267
|
+
track = StoredTrack(
|
268
|
+
media_server_rating_key=row["media_server_rating_key"],
|
269
|
+
media_server_type=row["media_server_type"],
|
270
|
+
artist=row["artist"],
|
271
|
+
album=row["album"],
|
272
|
+
title=row["title"],
|
273
|
+
filepath=row["filepath"],
|
274
|
+
added_at=datetime.fromisoformat(row["added_at"]),
|
275
|
+
last_scanned=datetime.fromisoformat(row["last_scanned"])
|
276
|
+
)
|
277
|
+
logger.debug(f"Found track: {track.artist} - {track.title}")
|
278
|
+
return track
|
279
|
+
|
280
|
+
logger.debug(f"Track not found: {media_server_rating_key}")
|
281
|
+
return None
|
282
|
+
|
283
|
+
|
284
|
+
|
285
|
+
def get_all_tracks(self, limit: Optional[int] = None, offset: int = 0) -> List[StoredTrack]:
|
286
|
+
"""Get all tracks from the database with optional pagination."""
|
287
|
+
query = """
|
288
|
+
SELECT media_server_rating_key, media_server_type, artist, album, title, filepath, added_at, last_scanned
|
289
|
+
FROM tracks
|
290
|
+
ORDER BY artist, album, title
|
291
|
+
"""
|
292
|
+
|
293
|
+
if limit:
|
294
|
+
query += f" LIMIT {limit} OFFSET {offset}"
|
295
|
+
|
296
|
+
with sqlite3.connect(self.db_path) as conn:
|
297
|
+
conn.row_factory = sqlite3.Row
|
298
|
+
rows = conn.execute(query).fetchall()
|
299
|
+
|
300
|
+
return [
|
301
|
+
StoredTrack(
|
302
|
+
media_server_rating_key=row["media_server_rating_key"],
|
303
|
+
media_server_type=row["media_server_type"],
|
304
|
+
artist=row["artist"],
|
305
|
+
album=row["album"],
|
306
|
+
title=row["title"],
|
307
|
+
filepath=row["filepath"],
|
308
|
+
added_at=datetime.fromisoformat(row["added_at"]),
|
309
|
+
last_scanned=datetime.fromisoformat(row["last_scanned"])
|
310
|
+
)
|
311
|
+
for row in rows
|
312
|
+
]
|
313
|
+
|
314
|
+
def search_tracks(self, search_query: str, limit: Optional[int] = None, offset: int = 0) -> List[StoredTrack]:
|
315
|
+
"""Search tracks by artist, album, or title."""
|
316
|
+
logger.debug(f"Searching tracks with query: '{search_query}', limit: {limit}, offset: {offset}")
|
317
|
+
query = """
|
318
|
+
SELECT media_server_rating_key, media_server_type, artist, album, title, filepath, added_at, last_scanned
|
319
|
+
FROM tracks
|
320
|
+
WHERE artist LIKE ? OR album LIKE ? OR title LIKE ?
|
321
|
+
ORDER BY artist, album, title
|
322
|
+
"""
|
323
|
+
|
324
|
+
search_pattern = f"%{search_query}%"
|
325
|
+
params = [search_pattern, search_pattern, search_pattern]
|
326
|
+
|
327
|
+
if limit:
|
328
|
+
query += f" LIMIT {limit} OFFSET {offset}"
|
329
|
+
|
330
|
+
with sqlite3.connect(self.db_path) as conn:
|
331
|
+
conn.row_factory = sqlite3.Row
|
332
|
+
rows = conn.execute(query, params).fetchall()
|
333
|
+
|
334
|
+
tracks = [
|
335
|
+
StoredTrack(
|
336
|
+
media_server_rating_key=row["media_server_rating_key"],
|
337
|
+
media_server_type=row["media_server_type"],
|
338
|
+
artist=row["artist"],
|
339
|
+
album=row["album"],
|
340
|
+
title=row["title"],
|
341
|
+
filepath=row["filepath"],
|
342
|
+
added_at=datetime.fromisoformat(row["added_at"]),
|
343
|
+
last_scanned=datetime.fromisoformat(row["last_scanned"])
|
344
|
+
)
|
345
|
+
for row in rows
|
346
|
+
]
|
347
|
+
|
348
|
+
logger.debug(f"Search found {len(tracks)} tracks matching '{search_query}'")
|
349
|
+
return tracks
|
350
|
+
|
351
|
+
def count_search_tracks(self, search_query: str) -> int:
|
352
|
+
"""Count tracks matching search query."""
|
353
|
+
query = """
|
354
|
+
SELECT COUNT(*) as count
|
355
|
+
FROM tracks
|
356
|
+
WHERE artist LIKE ? OR album LIKE ? OR title LIKE ?
|
357
|
+
"""
|
358
|
+
|
359
|
+
search_pattern = f"%{search_query}%"
|
360
|
+
params = [search_pattern, search_pattern, search_pattern]
|
361
|
+
|
362
|
+
with sqlite3.connect(self.db_path) as conn:
|
363
|
+
result = conn.execute(query, params).fetchone()
|
364
|
+
return result[0]
|
365
|
+
|
366
|
+
def search_tracks_advanced(
|
367
|
+
self,
|
368
|
+
artist: Optional[str] = None,
|
369
|
+
album: Optional[str] = None,
|
370
|
+
title: Optional[str] = None,
|
371
|
+
limit: Optional[int] = None,
|
372
|
+
offset: int = 0
|
373
|
+
) -> List[StoredTrack]:
|
374
|
+
"""Search tracks by specific artist, album, and/or title criteria using AND logic."""
|
375
|
+
logger.debug(f"Advanced search: artist='{artist}', album='{album}', title='{title}', limit={limit}, offset={offset}")
|
376
|
+
conditions = []
|
377
|
+
params = []
|
378
|
+
|
379
|
+
if artist and artist.strip():
|
380
|
+
conditions.append("artist LIKE ?")
|
381
|
+
params.append(f"%{artist.strip()}%")
|
382
|
+
|
383
|
+
if album and album.strip():
|
384
|
+
conditions.append("album LIKE ?")
|
385
|
+
params.append(f"%{album.strip()}%")
|
386
|
+
|
387
|
+
if title and title.strip():
|
388
|
+
conditions.append("title LIKE ?")
|
389
|
+
params.append(f"%{title.strip()}%")
|
390
|
+
|
391
|
+
if not conditions:
|
392
|
+
# If no search criteria provided, return all tracks
|
393
|
+
logger.debug("No search criteria provided, returning all tracks")
|
394
|
+
return self.get_all_tracks(limit=limit, offset=offset)
|
395
|
+
|
396
|
+
query = f"""
|
397
|
+
SELECT media_server_rating_key, media_server_type, artist, album, title, filepath, added_at, last_scanned
|
398
|
+
FROM tracks
|
399
|
+
WHERE {' AND '.join(conditions)}
|
400
|
+
ORDER BY artist, album, title
|
401
|
+
"""
|
402
|
+
|
403
|
+
if limit:
|
404
|
+
query += f" LIMIT {limit} OFFSET {offset}"
|
405
|
+
|
406
|
+
with sqlite3.connect(self.db_path) as conn:
|
407
|
+
conn.row_factory = sqlite3.Row
|
408
|
+
rows = conn.execute(query, params).fetchall()
|
409
|
+
|
410
|
+
tracks = [
|
411
|
+
StoredTrack(
|
412
|
+
media_server_rating_key=row["media_server_rating_key"],
|
413
|
+
media_server_type=row["media_server_type"],
|
414
|
+
artist=row["artist"],
|
415
|
+
album=row["album"],
|
416
|
+
title=row["title"],
|
417
|
+
filepath=row["filepath"],
|
418
|
+
added_at=datetime.fromisoformat(row["added_at"]),
|
419
|
+
last_scanned=datetime.fromisoformat(row["last_scanned"])
|
420
|
+
)
|
421
|
+
for row in rows
|
422
|
+
]
|
423
|
+
|
424
|
+
logger.debug(f"Advanced search found {len(tracks)} tracks")
|
425
|
+
return tracks
|
426
|
+
|
427
|
+
def count_search_tracks_advanced(
|
428
|
+
self,
|
429
|
+
artist: Optional[str] = None,
|
430
|
+
album: Optional[str] = None,
|
431
|
+
title: Optional[str] = None
|
432
|
+
) -> int:
|
433
|
+
"""Count tracks matching advanced search criteria."""
|
434
|
+
conditions = []
|
435
|
+
params = []
|
436
|
+
|
437
|
+
if artist and artist.strip():
|
438
|
+
conditions.append("artist LIKE ?")
|
439
|
+
params.append(f"%{artist.strip()}%")
|
440
|
+
|
441
|
+
if album and album.strip():
|
442
|
+
conditions.append("album LIKE ?")
|
443
|
+
params.append(f"%{album.strip()}%")
|
444
|
+
|
445
|
+
if title and title.strip():
|
446
|
+
conditions.append("title LIKE ?")
|
447
|
+
params.append(f"%{title.strip()}%")
|
448
|
+
|
449
|
+
if not conditions:
|
450
|
+
# If no search criteria provided, return total count
|
451
|
+
return self.get_track_count()
|
452
|
+
|
453
|
+
query = f"""
|
454
|
+
SELECT COUNT(*) as count
|
455
|
+
FROM tracks
|
456
|
+
WHERE {' AND '.join(conditions)}
|
457
|
+
"""
|
458
|
+
|
459
|
+
with sqlite3.connect(self.db_path) as conn:
|
460
|
+
result = conn.execute(query, params).fetchone()
|
461
|
+
return result[0]
|
462
|
+
|
463
|
+
def get_track_count(self) -> int:
|
464
|
+
"""Get total number of tracks in the database."""
|
465
|
+
with sqlite3.connect(self.db_path) as conn:
|
466
|
+
result = conn.execute("SELECT COUNT(*) as count FROM tracks").fetchone()
|
467
|
+
return result[0]
|
mycelium/main.py
ADDED
@@ -0,0 +1,183 @@
|
|
1
|
+
"""Main entry point for the Mycelium application."""
|
2
|
+
|
3
|
+
import atexit
|
4
|
+
import logging
|
5
|
+
import threading
|
6
|
+
from typing import Optional
|
7
|
+
|
8
|
+
import typer
|
9
|
+
from typing_extensions import Annotated
|
10
|
+
|
11
|
+
app = typer.Typer(
|
12
|
+
name="mycelium-ai",
|
13
|
+
help="Mycelium AI - Plex Music Recommendation System",
|
14
|
+
no_args_is_help=True
|
15
|
+
)
|
16
|
+
|
17
|
+
logger = logging.getLogger(__name__)
|
18
|
+
|
19
|
+
# Global reference for service cleanup
|
20
|
+
_server_service = None
|
21
|
+
|
22
|
+
# Register cleanup on exit
|
23
|
+
atexit.register(lambda: cleanup_server_resources())
|
24
|
+
|
25
|
+
|
26
|
+
def cleanup_server_resources():
|
27
|
+
"""Clean up server resources, including model unloading."""
|
28
|
+
global _server_service
|
29
|
+
if _server_service is not None:
|
30
|
+
try:
|
31
|
+
logger.info("Cleaning up server resources...")
|
32
|
+
_server_service.cleanup()
|
33
|
+
logger.info("Server resources cleaned up successfully")
|
34
|
+
except Exception as e:
|
35
|
+
logger.error(f"Error during server cleanup: {e}")
|
36
|
+
finally:
|
37
|
+
_server_service = None
|
38
|
+
|
39
|
+
|
40
|
+
def get_server_service():
|
41
|
+
"""Get the server service instance for cleanup."""
|
42
|
+
global _server_service
|
43
|
+
if _server_service is None:
|
44
|
+
# Import here to get the service from app.py
|
45
|
+
from mycelium.api.app import service
|
46
|
+
try:
|
47
|
+
_server_service = service
|
48
|
+
logger.debug("Server service reference acquired for cleanup")
|
49
|
+
except ImportError as e:
|
50
|
+
logger.warning(f"Could not import service for cleanup: {e}")
|
51
|
+
except Exception as e:
|
52
|
+
logger.warning(f"Error getting service reference: {e}")
|
53
|
+
return _server_service
|
54
|
+
|
55
|
+
|
56
|
+
def run_server_api(config) -> None:
|
57
|
+
"""Run the FastAPI server."""
|
58
|
+
# Lazy import uvicorn only when needed
|
59
|
+
import uvicorn
|
60
|
+
|
61
|
+
logger.info(f"Starting API server on {config.api.host}:{config.api.port}")
|
62
|
+
uvicorn.run(
|
63
|
+
"mycelium.api.app:app",
|
64
|
+
host=config.api.host,
|
65
|
+
port=config.api.port,
|
66
|
+
reload=config.api.reload
|
67
|
+
)
|
68
|
+
|
69
|
+
|
70
|
+
def run_server_mode(config) -> None:
|
71
|
+
"""Run server mode (API + Frontend served by FastAPI)."""
|
72
|
+
# Lazy import uvicorn only when needed
|
73
|
+
import uvicorn
|
74
|
+
|
75
|
+
logger.info("Starting Mycelium Server...")
|
76
|
+
|
77
|
+
# Get service reference for cleanup
|
78
|
+
get_server_service()
|
79
|
+
|
80
|
+
try:
|
81
|
+
logger.info(f"Starting server on {config.api.host}:{config.api.port}")
|
82
|
+
logger.info("Frontend will be served at the same address")
|
83
|
+
uvicorn.run(
|
84
|
+
"mycelium.api.app:app",
|
85
|
+
host=config.api.host,
|
86
|
+
port=config.api.port
|
87
|
+
)
|
88
|
+
except KeyboardInterrupt:
|
89
|
+
logger.info("Shutting down server...")
|
90
|
+
cleanup_server_resources()
|
91
|
+
except Exception as e:
|
92
|
+
logger.error(f"Server error: {e}")
|
93
|
+
cleanup_server_resources()
|
94
|
+
raise
|
95
|
+
|
96
|
+
|
97
|
+
def run_client_mode(
|
98
|
+
client_config
|
99
|
+
) -> None:
|
100
|
+
"""Run client mode (GPU worker + Client API with Frontend)."""
|
101
|
+
# Lazy import client dependencies only when needed
|
102
|
+
import uvicorn
|
103
|
+
from mycelium.client import run_client
|
104
|
+
|
105
|
+
logger.info("Starting Mycelium Client...")
|
106
|
+
|
107
|
+
client_thread = threading.Thread(
|
108
|
+
target=run_client
|
109
|
+
)
|
110
|
+
client_thread.daemon = True
|
111
|
+
client_thread.start()
|
112
|
+
|
113
|
+
# Start the client API server in main thread
|
114
|
+
try:
|
115
|
+
host = client_config.client_api.host
|
116
|
+
port = client_config.client_api.port
|
117
|
+
logger.info(f"Starting client API server on {host}:{port}")
|
118
|
+
logger.info("Frontend will be served at the same address")
|
119
|
+
uvicorn.run(
|
120
|
+
"mycelium.api.client_app:app",
|
121
|
+
host=host,
|
122
|
+
port=port
|
123
|
+
)
|
124
|
+
except KeyboardInterrupt:
|
125
|
+
logger.info("Shutting down client...")
|
126
|
+
|
127
|
+
|
128
|
+
@app.command()
|
129
|
+
def server() -> None:
|
130
|
+
"""Start server mode (API + Frontend)."""
|
131
|
+
try:
|
132
|
+
# Lazy import config only when needed
|
133
|
+
from mycelium.config import MyceliumConfig
|
134
|
+
|
135
|
+
config = MyceliumConfig.load_from_yaml()
|
136
|
+
config.setup_logging()
|
137
|
+
|
138
|
+
run_server_mode(config)
|
139
|
+
except KeyboardInterrupt:
|
140
|
+
logger.info("Server interrupted by user")
|
141
|
+
cleanup_server_resources()
|
142
|
+
typer.echo("\nServer stopped")
|
143
|
+
raise typer.Exit(130)
|
144
|
+
except Exception as e:
|
145
|
+
cleanup_server_resources()
|
146
|
+
typer.echo(f"Server error: {e}", err=True)
|
147
|
+
raise typer.Exit(1)
|
148
|
+
|
149
|
+
|
150
|
+
@app.command()
|
151
|
+
def client() -> None:
|
152
|
+
"""Start client mode (GPU worker)."""
|
153
|
+
try:
|
154
|
+
# Lazy import client config only when needed
|
155
|
+
from mycelium.client_config import MyceliumClientConfig
|
156
|
+
|
157
|
+
client_config = MyceliumClientConfig.load_from_yaml()
|
158
|
+
client_config.setup_logging()
|
159
|
+
|
160
|
+
run_client_mode(
|
161
|
+
client_config=client_config
|
162
|
+
)
|
163
|
+
except Exception as e:
|
164
|
+
typer.echo(f"Client error: {e}", err=True)
|
165
|
+
raise typer.Exit(1)
|
166
|
+
|
167
|
+
|
168
|
+
def main() -> None:
|
169
|
+
"""Main entry point for the CLI application."""
|
170
|
+
try:
|
171
|
+
app()
|
172
|
+
except KeyboardInterrupt:
|
173
|
+
logger.info("Operation cancelled by user")
|
174
|
+
cleanup_server_resources()
|
175
|
+
typer.echo("\nOperation cancelled by user")
|
176
|
+
raise typer.Exit(130)
|
177
|
+
except Exception:
|
178
|
+
cleanup_server_resources()
|
179
|
+
raise
|
180
|
+
|
181
|
+
|
182
|
+
if __name__ == "__main__":
|
183
|
+
main()
|