mycelium-ai 0.5.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- mycelium/__init__.py +0 -0
- mycelium/api/__init__.py +0 -0
- mycelium/api/app.py +1147 -0
- mycelium/api/client_app.py +170 -0
- mycelium/api/generated_sources/__init__.py +0 -0
- mycelium/api/generated_sources/server_schemas/__init__.py +97 -0
- mycelium/api/generated_sources/server_schemas/api/__init__.py +5 -0
- mycelium/api/generated_sources/server_schemas/api/default_api.py +2473 -0
- mycelium/api/generated_sources/server_schemas/api_client.py +766 -0
- mycelium/api/generated_sources/server_schemas/api_response.py +25 -0
- mycelium/api/generated_sources/server_schemas/configuration.py +434 -0
- mycelium/api/generated_sources/server_schemas/exceptions.py +166 -0
- mycelium/api/generated_sources/server_schemas/models/__init__.py +41 -0
- mycelium/api/generated_sources/server_schemas/models/api_section.py +71 -0
- mycelium/api/generated_sources/server_schemas/models/chroma_section.py +69 -0
- mycelium/api/generated_sources/server_schemas/models/clap_section.py +75 -0
- mycelium/api/generated_sources/server_schemas/models/compute_on_server200_response.py +79 -0
- mycelium/api/generated_sources/server_schemas/models/compute_on_server_request.py +67 -0
- mycelium/api/generated_sources/server_schemas/models/compute_text_search_request.py +69 -0
- mycelium/api/generated_sources/server_schemas/models/config_request.py +81 -0
- mycelium/api/generated_sources/server_schemas/models/config_response.py +107 -0
- mycelium/api/generated_sources/server_schemas/models/create_playlist_request.py +71 -0
- mycelium/api/generated_sources/server_schemas/models/get_similar_by_track200_response.py +143 -0
- mycelium/api/generated_sources/server_schemas/models/library_stats_response.py +77 -0
- mycelium/api/generated_sources/server_schemas/models/logging_section.py +67 -0
- mycelium/api/generated_sources/server_schemas/models/media_server_section.py +67 -0
- mycelium/api/generated_sources/server_schemas/models/playlist_response.py +73 -0
- mycelium/api/generated_sources/server_schemas/models/plex_section.py +71 -0
- mycelium/api/generated_sources/server_schemas/models/processing_response.py +90 -0
- mycelium/api/generated_sources/server_schemas/models/save_config_response.py +73 -0
- mycelium/api/generated_sources/server_schemas/models/scan_library_response.py +75 -0
- mycelium/api/generated_sources/server_schemas/models/search_result_response.py +75 -0
- mycelium/api/generated_sources/server_schemas/models/server_section.py +67 -0
- mycelium/api/generated_sources/server_schemas/models/stop_processing_response.py +71 -0
- mycelium/api/generated_sources/server_schemas/models/task_status_response.py +87 -0
- mycelium/api/generated_sources/server_schemas/models/track_database_stats.py +75 -0
- mycelium/api/generated_sources/server_schemas/models/track_response.py +77 -0
- mycelium/api/generated_sources/server_schemas/models/tracks_list_response.py +81 -0
- mycelium/api/generated_sources/server_schemas/rest.py +329 -0
- mycelium/api/generated_sources/server_schemas/test/__init__.py +0 -0
- mycelium/api/generated_sources/server_schemas/test/test_api_section.py +57 -0
- mycelium/api/generated_sources/server_schemas/test/test_chroma_section.py +55 -0
- mycelium/api/generated_sources/server_schemas/test/test_clap_section.py +60 -0
- mycelium/api/generated_sources/server_schemas/test/test_compute_on_server200_response.py +52 -0
- mycelium/api/generated_sources/server_schemas/test/test_compute_on_server_request.py +53 -0
- mycelium/api/generated_sources/server_schemas/test/test_compute_text_search_request.py +54 -0
- mycelium/api/generated_sources/server_schemas/test/test_config_request.py +66 -0
- mycelium/api/generated_sources/server_schemas/test/test_config_response.py +97 -0
- mycelium/api/generated_sources/server_schemas/test/test_create_playlist_request.py +60 -0
- mycelium/api/generated_sources/server_schemas/test/test_default_api.py +150 -0
- mycelium/api/generated_sources/server_schemas/test/test_get_similar_by_track200_response.py +61 -0
- mycelium/api/generated_sources/server_schemas/test/test_library_stats_response.py +63 -0
- mycelium/api/generated_sources/server_schemas/test/test_logging_section.py +53 -0
- mycelium/api/generated_sources/server_schemas/test/test_media_server_section.py +53 -0
- mycelium/api/generated_sources/server_schemas/test/test_playlist_response.py +58 -0
- mycelium/api/generated_sources/server_schemas/test/test_plex_section.py +56 -0
- mycelium/api/generated_sources/server_schemas/test/test_processing_response.py +61 -0
- mycelium/api/generated_sources/server_schemas/test/test_save_config_response.py +58 -0
- mycelium/api/generated_sources/server_schemas/test/test_scan_library_response.py +61 -0
- mycelium/api/generated_sources/server_schemas/test/test_search_result_response.py +69 -0
- mycelium/api/generated_sources/server_schemas/test/test_server_section.py +53 -0
- mycelium/api/generated_sources/server_schemas/test/test_stop_processing_response.py +55 -0
- mycelium/api/generated_sources/server_schemas/test/test_task_status_response.py +71 -0
- mycelium/api/generated_sources/server_schemas/test/test_track_database_stats.py +60 -0
- mycelium/api/generated_sources/server_schemas/test/test_track_response.py +63 -0
- mycelium/api/generated_sources/server_schemas/test/test_tracks_list_response.py +75 -0
- mycelium/api/generated_sources/worker_schemas/__init__.py +61 -0
- mycelium/api/generated_sources/worker_schemas/api/__init__.py +5 -0
- mycelium/api/generated_sources/worker_schemas/api/default_api.py +318 -0
- mycelium/api/generated_sources/worker_schemas/api_client.py +766 -0
- mycelium/api/generated_sources/worker_schemas/api_response.py +25 -0
- mycelium/api/generated_sources/worker_schemas/configuration.py +434 -0
- mycelium/api/generated_sources/worker_schemas/exceptions.py +166 -0
- mycelium/api/generated_sources/worker_schemas/models/__init__.py +23 -0
- mycelium/api/generated_sources/worker_schemas/models/save_config_response.py +73 -0
- mycelium/api/generated_sources/worker_schemas/models/worker_clap_section.py +75 -0
- mycelium/api/generated_sources/worker_schemas/models/worker_client_api_section.py +69 -0
- mycelium/api/generated_sources/worker_schemas/models/worker_client_section.py +79 -0
- mycelium/api/generated_sources/worker_schemas/models/worker_config_request.py +73 -0
- mycelium/api/generated_sources/worker_schemas/models/worker_config_response.py +89 -0
- mycelium/api/generated_sources/worker_schemas/models/worker_logging_section.py +67 -0
- mycelium/api/generated_sources/worker_schemas/rest.py +329 -0
- mycelium/api/generated_sources/worker_schemas/test/__init__.py +0 -0
- mycelium/api/generated_sources/worker_schemas/test/test_default_api.py +45 -0
- mycelium/api/generated_sources/worker_schemas/test/test_save_config_response.py +58 -0
- mycelium/api/generated_sources/worker_schemas/test/test_worker_clap_section.py +60 -0
- mycelium/api/generated_sources/worker_schemas/test/test_worker_client_api_section.py +55 -0
- mycelium/api/generated_sources/worker_schemas/test/test_worker_client_section.py +65 -0
- mycelium/api/generated_sources/worker_schemas/test/test_worker_config_request.py +59 -0
- mycelium/api/generated_sources/worker_schemas/test/test_worker_config_response.py +89 -0
- mycelium/api/generated_sources/worker_schemas/test/test_worker_logging_section.py +53 -0
- mycelium/api/worker_models.py +99 -0
- mycelium/application/__init__.py +11 -0
- mycelium/application/job_queue.py +323 -0
- mycelium/application/library_management_use_cases.py +292 -0
- mycelium/application/search_use_cases.py +96 -0
- mycelium/application/services.py +340 -0
- mycelium/client.py +554 -0
- mycelium/client_config.py +251 -0
- mycelium/client_frontend_dist/404.html +1 -0
- mycelium/client_frontend_dist/_next/static/a4iyRdfsvkjdyMAK9cE9Y/_buildManifest.js +1 -0
- mycelium/client_frontend_dist/_next/static/a4iyRdfsvkjdyMAK9cE9Y/_ssgManifest.js +1 -0
- mycelium/client_frontend_dist/_next/static/chunks/4bd1b696-cf72ae8a39fa05aa.js +1 -0
- mycelium/client_frontend_dist/_next/static/chunks/964-830f77d7ce1c2463.js +1 -0
- mycelium/client_frontend_dist/_next/static/chunks/app/_not-found/page-d25eede5a9099bd3.js +1 -0
- mycelium/client_frontend_dist/_next/static/chunks/app/layout-9b3d32f96dfe13b6.js +1 -0
- mycelium/client_frontend_dist/_next/static/chunks/app/page-cc6bad295789134e.js +1 -0
- mycelium/client_frontend_dist/_next/static/chunks/framework-7c95b8e5103c9e90.js +1 -0
- mycelium/client_frontend_dist/_next/static/chunks/main-6b37be50736577a2.js +1 -0
- mycelium/client_frontend_dist/_next/static/chunks/main-app-4153d115599d3126.js +1 -0
- mycelium/client_frontend_dist/_next/static/chunks/pages/_app-0a0020ddd67f79cf.js +1 -0
- mycelium/client_frontend_dist/_next/static/chunks/pages/_error-03529f2c21436739.js +1 -0
- mycelium/client_frontend_dist/_next/static/chunks/polyfills-42372ed130431b0a.js +1 -0
- mycelium/client_frontend_dist/_next/static/chunks/webpack-c81e624915b2ea70.js +1 -0
- mycelium/client_frontend_dist/_next/static/css/1eb7f0e2c78e0734.css +1 -0
- mycelium/client_frontend_dist/favicon.ico +0 -0
- mycelium/client_frontend_dist/file.svg +1 -0
- mycelium/client_frontend_dist/globe.svg +1 -0
- mycelium/client_frontend_dist/index.html +1 -0
- mycelium/client_frontend_dist/index.txt +20 -0
- mycelium/client_frontend_dist/next.svg +1 -0
- mycelium/client_frontend_dist/vercel.svg +1 -0
- mycelium/client_frontend_dist/window.svg +1 -0
- mycelium/config.py +346 -0
- mycelium/domain/__init__.py +13 -0
- mycelium/domain/models.py +71 -0
- mycelium/domain/repositories.py +98 -0
- mycelium/domain/worker.py +77 -0
- mycelium/frontend_dist/404.html +1 -0
- mycelium/frontend_dist/_next/static/chunks/4bd1b696-cf72ae8a39fa05aa.js +1 -0
- mycelium/frontend_dist/_next/static/chunks/964-830f77d7ce1c2463.js +1 -0
- mycelium/frontend_dist/_next/static/chunks/app/_not-found/page-d25eede5a9099bd3.js +1 -0
- mycelium/frontend_dist/_next/static/chunks/app/layout-9b3d32f96dfe13b6.js +1 -0
- mycelium/frontend_dist/_next/static/chunks/app/page-a761463485e0540b.js +1 -0
- mycelium/frontend_dist/_next/static/chunks/framework-7c95b8e5103c9e90.js +1 -0
- mycelium/frontend_dist/_next/static/chunks/main-6b37be50736577a2.js +1 -0
- mycelium/frontend_dist/_next/static/chunks/main-app-4153d115599d3126.js +1 -0
- mycelium/frontend_dist/_next/static/chunks/pages/_app-0a0020ddd67f79cf.js +1 -0
- mycelium/frontend_dist/_next/static/chunks/pages/_error-03529f2c21436739.js +1 -0
- mycelium/frontend_dist/_next/static/chunks/polyfills-42372ed130431b0a.js +1 -0
- mycelium/frontend_dist/_next/static/chunks/webpack-c81e624915b2ea70.js +1 -0
- mycelium/frontend_dist/_next/static/css/1eb7f0e2c78e0734.css +1 -0
- mycelium/frontend_dist/_next/static/glVJ0yJSL0zWN7anTTG3_/_buildManifest.js +1 -0
- mycelium/frontend_dist/_next/static/glVJ0yJSL0zWN7anTTG3_/_ssgManifest.js +1 -0
- mycelium/frontend_dist/favicon.ico +0 -0
- mycelium/frontend_dist/file.svg +1 -0
- mycelium/frontend_dist/globe.svg +1 -0
- mycelium/frontend_dist/index.html +10 -0
- mycelium/frontend_dist/index.txt +20 -0
- mycelium/frontend_dist/next.svg +1 -0
- mycelium/frontend_dist/vercel.svg +1 -0
- mycelium/frontend_dist/window.svg +1 -0
- mycelium/infrastructure/__init__.py +17 -0
- mycelium/infrastructure/chroma_adapter.py +232 -0
- mycelium/infrastructure/clap_adapter.py +280 -0
- mycelium/infrastructure/plex_adapter.py +145 -0
- mycelium/infrastructure/track_database.py +467 -0
- mycelium/main.py +183 -0
- mycelium_ai-0.5.0.dist-info/METADATA +312 -0
- mycelium_ai-0.5.0.dist-info/RECORD +164 -0
- mycelium_ai-0.5.0.dist-info/WHEEL +5 -0
- mycelium_ai-0.5.0.dist-info/entry_points.txt +2 -0
- mycelium_ai-0.5.0.dist-info/licenses/LICENSE +21 -0
- mycelium_ai-0.5.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,89 @@
|
|
1
|
+
# coding: utf-8
|
2
|
+
|
3
|
+
"""
|
4
|
+
Mycelium Worker (Client) API
|
5
|
+
|
6
|
+
Configuration API for Mycelium client workers
|
7
|
+
|
8
|
+
The version of the OpenAPI document: 0.1.0
|
9
|
+
Generated by OpenAPI Generator (https://openapi-generator.tech)
|
10
|
+
|
11
|
+
Do not edit the class manually.
|
12
|
+
""" # noqa: E501
|
13
|
+
|
14
|
+
|
15
|
+
import unittest
|
16
|
+
import datetime
|
17
|
+
|
18
|
+
from mycelium.api.generated_sources.worker_schemas.models.worker_config_response import WorkerConfigResponse # noqa: E501
|
19
|
+
|
20
|
+
class TestWorkerConfigResponse(unittest.TestCase):
|
21
|
+
"""WorkerConfigResponse unit test stubs"""
|
22
|
+
|
23
|
+
def setUp(self):
|
24
|
+
pass
|
25
|
+
|
26
|
+
def tearDown(self):
|
27
|
+
pass
|
28
|
+
|
29
|
+
def make_instance(self, include_optional) -> WorkerConfigResponse:
|
30
|
+
"""Test WorkerConfigResponse
|
31
|
+
include_option is a boolean, when False only required
|
32
|
+
params are included, when True both required and
|
33
|
+
optional params are included """
|
34
|
+
# uncomment below to create an instance of `WorkerConfigResponse`
|
35
|
+
"""
|
36
|
+
model = WorkerConfigResponse() # noqa: E501
|
37
|
+
if include_optional:
|
38
|
+
return WorkerConfigResponse(
|
39
|
+
client = mycelium.api.generated_sources.worker_schemas.models.worker_client_section.WorkerClientSection(
|
40
|
+
server_host = '',
|
41
|
+
server_port = 56,
|
42
|
+
download_queue_size = 56,
|
43
|
+
job_queue_size = 56,
|
44
|
+
poll_interval = 1.337,
|
45
|
+
download_workers = 56,
|
46
|
+
gpu_batch_size = 56, ),
|
47
|
+
client_api = mycelium.api.generated_sources.worker_schemas.models.worker_client_api_section.WorkerClientAPISection(
|
48
|
+
host = '',
|
49
|
+
port = 56, ),
|
50
|
+
clap = mycelium.api.generated_sources.worker_schemas.models.worker_clap_section.WorkerClapSection(
|
51
|
+
model_id = '',
|
52
|
+
target_sr = 56,
|
53
|
+
chunk_duration_s = 56,
|
54
|
+
num_chunks = 56,
|
55
|
+
max_load_duration_s = 56, ),
|
56
|
+
logging = mycelium.api.generated_sources.worker_schemas.models.worker_logging_section.WorkerLoggingSection(
|
57
|
+
level = '', )
|
58
|
+
)
|
59
|
+
else:
|
60
|
+
return WorkerConfigResponse(
|
61
|
+
client = mycelium.api.generated_sources.worker_schemas.models.worker_client_section.WorkerClientSection(
|
62
|
+
server_host = '',
|
63
|
+
server_port = 56,
|
64
|
+
download_queue_size = 56,
|
65
|
+
job_queue_size = 56,
|
66
|
+
poll_interval = 1.337,
|
67
|
+
download_workers = 56,
|
68
|
+
gpu_batch_size = 56, ),
|
69
|
+
client_api = mycelium.api.generated_sources.worker_schemas.models.worker_client_api_section.WorkerClientAPISection(
|
70
|
+
host = '',
|
71
|
+
port = 56, ),
|
72
|
+
clap = mycelium.api.generated_sources.worker_schemas.models.worker_clap_section.WorkerClapSection(
|
73
|
+
model_id = '',
|
74
|
+
target_sr = 56,
|
75
|
+
chunk_duration_s = 56,
|
76
|
+
num_chunks = 56,
|
77
|
+
max_load_duration_s = 56, ),
|
78
|
+
logging = mycelium.api.generated_sources.worker_schemas.models.worker_logging_section.WorkerLoggingSection(
|
79
|
+
level = '', ),
|
80
|
+
)
|
81
|
+
"""
|
82
|
+
|
83
|
+
def testWorkerConfigResponse(self):
|
84
|
+
"""Test WorkerConfigResponse"""
|
85
|
+
# inst_req_only = self.make_instance(include_optional=False)
|
86
|
+
# inst_req_and_optional = self.make_instance(include_optional=True)
|
87
|
+
|
88
|
+
if __name__ == '__main__':
|
89
|
+
unittest.main()
|
@@ -0,0 +1,53 @@
|
|
1
|
+
# coding: utf-8
|
2
|
+
|
3
|
+
"""
|
4
|
+
Mycelium Worker (Client) API
|
5
|
+
|
6
|
+
Configuration API for Mycelium client workers
|
7
|
+
|
8
|
+
The version of the OpenAPI document: 0.1.0
|
9
|
+
Generated by OpenAPI Generator (https://openapi-generator.tech)
|
10
|
+
|
11
|
+
Do not edit the class manually.
|
12
|
+
""" # noqa: E501
|
13
|
+
|
14
|
+
|
15
|
+
import unittest
|
16
|
+
import datetime
|
17
|
+
|
18
|
+
from mycelium.api.generated_sources.worker_schemas.models.worker_logging_section import WorkerLoggingSection # noqa: E501
|
19
|
+
|
20
|
+
class TestWorkerLoggingSection(unittest.TestCase):
|
21
|
+
"""WorkerLoggingSection unit test stubs"""
|
22
|
+
|
23
|
+
def setUp(self):
|
24
|
+
pass
|
25
|
+
|
26
|
+
def tearDown(self):
|
27
|
+
pass
|
28
|
+
|
29
|
+
def make_instance(self, include_optional) -> WorkerLoggingSection:
|
30
|
+
"""Test WorkerLoggingSection
|
31
|
+
include_option is a boolean, when False only required
|
32
|
+
params are included, when True both required and
|
33
|
+
optional params are included """
|
34
|
+
# uncomment below to create an instance of `WorkerLoggingSection`
|
35
|
+
"""
|
36
|
+
model = WorkerLoggingSection() # noqa: E501
|
37
|
+
if include_optional:
|
38
|
+
return WorkerLoggingSection(
|
39
|
+
level = ''
|
40
|
+
)
|
41
|
+
else:
|
42
|
+
return WorkerLoggingSection(
|
43
|
+
level = '',
|
44
|
+
)
|
45
|
+
"""
|
46
|
+
|
47
|
+
def testWorkerLoggingSection(self):
|
48
|
+
"""Test WorkerLoggingSection"""
|
49
|
+
# inst_req_only = self.make_instance(include_optional=False)
|
50
|
+
# inst_req_and_optional = self.make_instance(include_optional=True)
|
51
|
+
|
52
|
+
if __name__ == '__main__':
|
53
|
+
unittest.main()
|
@@ -0,0 +1,99 @@
|
|
1
|
+
"""API models for worker coordination."""
|
2
|
+
|
3
|
+
from typing import List, Optional
|
4
|
+
|
5
|
+
from pydantic import BaseModel, Field
|
6
|
+
|
7
|
+
from ..domain.worker import TaskType, TaskStatus
|
8
|
+
|
9
|
+
|
10
|
+
class WorkerRegistrationRequest(BaseModel):
|
11
|
+
"""Request model for worker registration."""
|
12
|
+
worker_id: str
|
13
|
+
ip_address: str
|
14
|
+
|
15
|
+
|
16
|
+
class WorkerRegistrationResponse(BaseModel):
|
17
|
+
"""Response model for worker registration."""
|
18
|
+
worker_id: str
|
19
|
+
registration_time: str
|
20
|
+
message: str
|
21
|
+
|
22
|
+
|
23
|
+
class JobRequest(BaseModel):
|
24
|
+
"""Response model for job requests."""
|
25
|
+
task_id: str
|
26
|
+
task_type: TaskType
|
27
|
+
track_id: str
|
28
|
+
download_url: str
|
29
|
+
text_query: Optional[str] = None # For text search tasks
|
30
|
+
audio_filename: Optional[str] = None # For audio search tasks
|
31
|
+
n_results: Optional[int] = None # For search tasks
|
32
|
+
|
33
|
+
|
34
|
+
class TaskResultRequest(BaseModel):
|
35
|
+
"""Request model for task result submission."""
|
36
|
+
task_id: str
|
37
|
+
track_id: str
|
38
|
+
status: TaskStatus
|
39
|
+
embedding: Optional[List[float]] = None
|
40
|
+
error_message: Optional[str] = None
|
41
|
+
search_results: Optional[List[dict]] = None
|
42
|
+
|
43
|
+
|
44
|
+
class TaskResultResponse(BaseModel):
|
45
|
+
"""Response model for task result submission."""
|
46
|
+
success: bool
|
47
|
+
message: str
|
48
|
+
|
49
|
+
|
50
|
+
class ConfirmationRequiredResponse(BaseModel):
|
51
|
+
"""Response when user confirmation is required."""
|
52
|
+
status: str
|
53
|
+
message: str
|
54
|
+
track_id: str
|
55
|
+
|
56
|
+
|
57
|
+
class ComputeOnServerRequest(BaseModel):
|
58
|
+
"""Request model for server-side computation."""
|
59
|
+
track_id: str
|
60
|
+
|
61
|
+
|
62
|
+
class WorkerProcessingResponse(BaseModel):
|
63
|
+
"""Response when worker processing is initiated."""
|
64
|
+
status: str
|
65
|
+
message: str
|
66
|
+
track_id: Optional[str] = None
|
67
|
+
task_id: Optional[str] = None
|
68
|
+
tasks_created: Optional[int] = None
|
69
|
+
active_workers: Optional[int] = None
|
70
|
+
|
71
|
+
|
72
|
+
class NoWorkersResponse(BaseModel):
|
73
|
+
"""Response when no workers are available."""
|
74
|
+
status: str
|
75
|
+
message: str
|
76
|
+
active_workers: int
|
77
|
+
confirmation_required: bool
|
78
|
+
|
79
|
+
|
80
|
+
class SearchProcessingResponse(BaseModel):
|
81
|
+
"""Response when search task is processing on workers."""
|
82
|
+
status: str
|
83
|
+
message: str
|
84
|
+
task_id: str
|
85
|
+
query: Optional[str] = None # For text search
|
86
|
+
filename: Optional[str] = None # For audio search
|
87
|
+
|
88
|
+
|
89
|
+
class SearchConfirmationRequiredResponse(BaseModel):
|
90
|
+
"""Response when search requires user confirmation for server processing."""
|
91
|
+
status: str
|
92
|
+
query: Optional[str] = None # For text search
|
93
|
+
filename: Optional[str] = None # For audio search
|
94
|
+
|
95
|
+
|
96
|
+
class ComputeSearchOnServerRequest(BaseModel):
|
97
|
+
"""Request model for server-side search computation."""
|
98
|
+
query: Optional[str] = None # For text search
|
99
|
+
n_results: int = 10
|
@@ -0,0 +1,323 @@
|
|
1
|
+
"""Job queue and worker management service."""
|
2
|
+
import logging
|
3
|
+
import shutil
|
4
|
+
import tempfile
|
5
|
+
import uuid
|
6
|
+
from datetime import datetime, timedelta
|
7
|
+
from pathlib import Path
|
8
|
+
from threading import Lock
|
9
|
+
from typing import List, Optional, Dict
|
10
|
+
|
11
|
+
from ..domain.worker import Worker, Task, TaskResult, TaskType, TaskStatus, ContextType
|
12
|
+
|
13
|
+
logger = logging.getLogger(__name__)
|
14
|
+
|
15
|
+
|
16
|
+
class JobQueueService:
|
17
|
+
"""Service for managing job queue and worker coordination."""
|
18
|
+
|
19
|
+
def __init__(self):
|
20
|
+
self._workers: Dict[str, Worker] = {}
|
21
|
+
self._tasks: Dict[str, Task] = {}
|
22
|
+
self._pending_tasks: List[str] = []
|
23
|
+
self._lock = Lock()
|
24
|
+
# Temporary directory for audio files to avoid base64 encoding large files
|
25
|
+
self._temp_dir = Path(tempfile.mkdtemp(prefix="mycelium_audio_"))
|
26
|
+
self._cleanup_orphan_files()
|
27
|
+
self._temp_files: Dict[str, Path] = {} # task_id -> temp_file_path
|
28
|
+
|
29
|
+
def _register_worker_internal(self, worker_id: str, ip_address: str) -> Worker:
|
30
|
+
"""Internal worker registration without lock (assumes lock is already held)."""
|
31
|
+
now = datetime.now()
|
32
|
+
if worker_id in self._workers:
|
33
|
+
# Update existing worker
|
34
|
+
worker = self._workers[worker_id]
|
35
|
+
worker.last_heartbeat = now
|
36
|
+
worker.is_active = True
|
37
|
+
else:
|
38
|
+
# Create new worker
|
39
|
+
worker = Worker(
|
40
|
+
id=worker_id,
|
41
|
+
ip_address=ip_address,
|
42
|
+
registration_time=now,
|
43
|
+
last_heartbeat=now,
|
44
|
+
is_active=True
|
45
|
+
)
|
46
|
+
self._workers[worker_id] = worker
|
47
|
+
|
48
|
+
return worker
|
49
|
+
|
50
|
+
def register_worker(self, worker_id: str, ip_address: str) -> Worker:
|
51
|
+
"""Register a new worker or update existing one."""
|
52
|
+
with self._lock:
|
53
|
+
return self._register_worker_internal(worker_id, ip_address)
|
54
|
+
|
55
|
+
def get_active_workers(self) -> List[Worker]:
|
56
|
+
"""Get list of active workers."""
|
57
|
+
with self._lock:
|
58
|
+
# Clean up inactive workers
|
59
|
+
cutoff_time = datetime.now() - timedelta(seconds=10)
|
60
|
+
for worker in self._workers.values():
|
61
|
+
if worker.last_heartbeat < cutoff_time:
|
62
|
+
worker.is_active = False
|
63
|
+
|
64
|
+
return [w for w in self._workers.values() if w.is_active]
|
65
|
+
|
66
|
+
def create_task(self, track_id: str = "", download_url: str = "",
|
67
|
+
audio_data: bytes = None, audio_filename: str = "",
|
68
|
+
n_results: int = 10, prioritize: bool = True,
|
69
|
+
context_type: ContextType = None) -> Task:
|
70
|
+
"""Create a new task and add it to the queue.
|
71
|
+
|
72
|
+
Can create either:
|
73
|
+
- Traditional embedding task: provide track_id and download_url
|
74
|
+
- Audio search task: provide audio_data and audio_filename
|
75
|
+
"""
|
76
|
+
with self._lock:
|
77
|
+
task_id = str(uuid.uuid4())
|
78
|
+
|
79
|
+
# Determine task type based on provided parameters
|
80
|
+
if audio_data is not None:
|
81
|
+
# Audio search task - create temporary file and internal URL
|
82
|
+
task_type = TaskType.COMPUTE_AUDIO_EMBEDDING
|
83
|
+
|
84
|
+
# Create temporary file for audio data to avoid base64 encoding overhead
|
85
|
+
temp_file = self._temp_dir / f"audio_task_{task_id}.tmp"
|
86
|
+
temp_file.write_bytes(audio_data)
|
87
|
+
self._temp_files[task_id] = temp_file
|
88
|
+
|
89
|
+
# Create download URL for the worker (internal URL)
|
90
|
+
download_url = f"/download_audio/{task_id}"
|
91
|
+
track_id = "" # Not needed for audio search
|
92
|
+
|
93
|
+
task = Task(
|
94
|
+
task_id=task_id,
|
95
|
+
task_type=task_type,
|
96
|
+
track_id=track_id,
|
97
|
+
download_url=download_url,
|
98
|
+
audio_filename=audio_filename,
|
99
|
+
n_results=n_results,
|
100
|
+
context_type=context_type
|
101
|
+
)
|
102
|
+
else:
|
103
|
+
# Traditional embedding task
|
104
|
+
task_type = TaskType.COMPUTE_AUDIO_EMBEDDING
|
105
|
+
|
106
|
+
task = Task(
|
107
|
+
task_id=task_id,
|
108
|
+
task_type=task_type,
|
109
|
+
track_id=track_id,
|
110
|
+
download_url=download_url,
|
111
|
+
context_type=context_type
|
112
|
+
)
|
113
|
+
|
114
|
+
self._tasks[task_id] = task
|
115
|
+
if prioritize:
|
116
|
+
self._pending_tasks.insert(0, task_id)
|
117
|
+
else:
|
118
|
+
self._pending_tasks.append(task_id)
|
119
|
+
return task
|
120
|
+
|
121
|
+
def create_text_search_task(self, text_query: str, n_results: int = 10, prioritize: bool = True) -> Task:
|
122
|
+
"""Create a new text search task and add it to the queue."""
|
123
|
+
with self._lock:
|
124
|
+
task_id = str(uuid.uuid4())
|
125
|
+
task = Task(
|
126
|
+
task_id=task_id,
|
127
|
+
task_type=TaskType.COMPUTE_TEXT_EMBEDDING,
|
128
|
+
context_type=ContextType.TEXT_SEARCH,
|
129
|
+
track_id="", # Not needed for text search
|
130
|
+
download_url="", # Not needed for text search
|
131
|
+
text_query=text_query,
|
132
|
+
n_results=n_results
|
133
|
+
)
|
134
|
+
self._tasks[task_id] = task
|
135
|
+
if prioritize:
|
136
|
+
self._pending_tasks.insert(0, task_id)
|
137
|
+
else:
|
138
|
+
self._pending_tasks.append(task_id)
|
139
|
+
return task
|
140
|
+
|
141
|
+
def get_next_job(self, worker_id: str, ip_address: str) -> Optional[Task]:
|
142
|
+
"""Get the next job for a worker."""
|
143
|
+
with self._lock:
|
144
|
+
# Update worker heartbeat
|
145
|
+
if worker_id in self._workers:
|
146
|
+
self._workers[worker_id].last_heartbeat = datetime.now()
|
147
|
+
else:
|
148
|
+
logger.warning(f"Received heartbeat from unknown worker, registering {worker_id}...")
|
149
|
+
self._register_worker_internal(worker_id=worker_id, ip_address=ip_address)
|
150
|
+
|
151
|
+
# Get next pending task
|
152
|
+
if not self._pending_tasks:
|
153
|
+
return None
|
154
|
+
|
155
|
+
task_id = self._pending_tasks.pop(0)
|
156
|
+
task = self._tasks[task_id]
|
157
|
+
task.status = TaskStatus.IN_PROGRESS
|
158
|
+
task.assigned_worker_id = worker_id
|
159
|
+
task.started_at = datetime.now()
|
160
|
+
|
161
|
+
return task
|
162
|
+
|
163
|
+
def submit_result(self, result: TaskResult) -> bool:
|
164
|
+
"""Submit the result of a completed task."""
|
165
|
+
with self._lock:
|
166
|
+
if result.task_id not in self._tasks:
|
167
|
+
return False
|
168
|
+
|
169
|
+
task = self._tasks[result.task_id]
|
170
|
+
task.status = result.status
|
171
|
+
task.completed_at = datetime.now()
|
172
|
+
|
173
|
+
if result.error_message:
|
174
|
+
task.error_message = result.error_message
|
175
|
+
|
176
|
+
# Store search results for search tasks
|
177
|
+
if result.search_results:
|
178
|
+
task.search_results = result.search_results
|
179
|
+
|
180
|
+
return True
|
181
|
+
|
182
|
+
def get_task_status(self, task_id: str) -> Optional[Task]:
|
183
|
+
"""Get the status of a specific task."""
|
184
|
+
with self._lock:
|
185
|
+
return self._tasks.get(task_id)
|
186
|
+
|
187
|
+
def wait_for_task_completion(self, task_id: str, timeout_seconds: int = 300) -> Optional[Task]:
|
188
|
+
"""Wait for a task to complete with timeout."""
|
189
|
+
import time
|
190
|
+
|
191
|
+
start_time = time.time()
|
192
|
+
while time.time() - start_time < timeout_seconds:
|
193
|
+
task = self.get_task_status(task_id)
|
194
|
+
if task and task.status in [TaskStatus.SUCCESS, TaskStatus.FAILED]:
|
195
|
+
return task
|
196
|
+
time.sleep(1) # Poll every second
|
197
|
+
|
198
|
+
return None
|
199
|
+
|
200
|
+
def get_queue_stats(self) -> Dict:
|
201
|
+
"""Get statistics about the job queue."""
|
202
|
+
with self._lock:
|
203
|
+
active_workers = len([w for w in self._workers.values() if w.is_active])
|
204
|
+
pending_tasks = len(self._pending_tasks)
|
205
|
+
in_progress_tasks = len([t for t in self._tasks.values() if t.status == TaskStatus.IN_PROGRESS])
|
206
|
+
completed_tasks = len([t for t in self._tasks.values() if t.status == TaskStatus.SUCCESS])
|
207
|
+
failed_tasks = len([t for t in self._tasks.values() if t.status == TaskStatus.FAILED])
|
208
|
+
|
209
|
+
return {
|
210
|
+
"active_workers": active_workers,
|
211
|
+
"pending_tasks": pending_tasks,
|
212
|
+
"in_progress_tasks": in_progress_tasks,
|
213
|
+
"completed_tasks": completed_tasks,
|
214
|
+
"failed_tasks": failed_tasks,
|
215
|
+
"total_tasks": len(self._tasks)
|
216
|
+
}
|
217
|
+
|
218
|
+
def clear_pending_tasks(self) -> int:
|
219
|
+
"""Clear all pending tasks from the queue. Returns number of tasks cleared."""
|
220
|
+
with self._lock:
|
221
|
+
cleared_count = len(self._pending_tasks)
|
222
|
+
|
223
|
+
# Mark all pending tasks as cancelled
|
224
|
+
for task_id in self._pending_tasks:
|
225
|
+
if task_id in self._tasks:
|
226
|
+
self._tasks[task_id].status = TaskStatus.FAILED
|
227
|
+
self._tasks[task_id].error_message = "Processing stopped by user"
|
228
|
+
self._tasks[task_id].completed_at = datetime.now()
|
229
|
+
|
230
|
+
# Clear the pending tasks list
|
231
|
+
self._pending_tasks.clear()
|
232
|
+
|
233
|
+
# When stopping, clean up ALL in-progress tasks, not just from inactive workers
|
234
|
+
# This ensures processing state is properly cleared even if workers are still active
|
235
|
+
in_progress_cleaned = self._cleanup_all_in_progress_tasks()
|
236
|
+
|
237
|
+
return cleared_count + in_progress_cleaned
|
238
|
+
|
239
|
+
def _cleanup_stale_tasks(self) -> int:
|
240
|
+
"""Clean up tasks assigned to inactive workers. Returns number of tasks cleaned up."""
|
241
|
+
active_worker_ids = {w.id for w in self._workers.values() if w.is_active}
|
242
|
+
cleaned_count = 0
|
243
|
+
|
244
|
+
for task in self._tasks.values():
|
245
|
+
# Mark IN_PROGRESS tasks from inactive workers as failed
|
246
|
+
if (task.status == TaskStatus.IN_PROGRESS and
|
247
|
+
task.assigned_worker_id and
|
248
|
+
task.assigned_worker_id not in active_worker_ids):
|
249
|
+
task.status = TaskStatus.FAILED
|
250
|
+
task.error_message = "Worker became inactive during processing"
|
251
|
+
task.completed_at = datetime.now()
|
252
|
+
cleaned_count += 1
|
253
|
+
|
254
|
+
return cleaned_count
|
255
|
+
|
256
|
+
def _cleanup_all_in_progress_tasks(self) -> int:
|
257
|
+
"""Clean up ALL in-progress tasks when stopping processing. Returns number of tasks cleaned up."""
|
258
|
+
cleaned_count = 0
|
259
|
+
|
260
|
+
for task in self._tasks.values():
|
261
|
+
# Mark ALL IN_PROGRESS tasks as failed when explicitly stopping
|
262
|
+
if task.status == TaskStatus.IN_PROGRESS:
|
263
|
+
task.status = TaskStatus.FAILED
|
264
|
+
task.error_message = "Processing stopped by user request"
|
265
|
+
task.completed_at = datetime.now()
|
266
|
+
cleaned_count += 1
|
267
|
+
|
268
|
+
return cleaned_count
|
269
|
+
|
270
|
+
def cleanup_stale_tasks(self) -> int:
|
271
|
+
"""Public method to clean up stale tasks. Returns number of tasks cleaned up."""
|
272
|
+
with self._lock:
|
273
|
+
return self._cleanup_stale_tasks()
|
274
|
+
|
275
|
+
def has_active_processing(self) -> bool:
|
276
|
+
"""Check if there are any library processing tasks currently being processed or pending.
|
277
|
+
|
278
|
+
Note: This excludes search tasks (text/audio search) which have their own loading states.
|
279
|
+
Only counts tasks with AUDIO_PROCESSING context for library processing status.
|
280
|
+
"""
|
281
|
+
with self._lock:
|
282
|
+
# Clean up stale in-progress tasks from inactive workers first
|
283
|
+
self._cleanup_stale_tasks()
|
284
|
+
|
285
|
+
# Only count library processing tasks, not search tasks
|
286
|
+
library_pending_tasks = [
|
287
|
+
task_id for task_id in self._pending_tasks
|
288
|
+
if self._tasks.get(task_id) and self._tasks[task_id].context_type == ContextType.AUDIO_PROCESSING
|
289
|
+
]
|
290
|
+
|
291
|
+
library_in_progress_tasks = [
|
292
|
+
t for t in self._tasks.values()
|
293
|
+
if t.status == TaskStatus.IN_PROGRESS and t.context_type == ContextType.AUDIO_PROCESSING
|
294
|
+
]
|
295
|
+
|
296
|
+
return len(library_pending_tasks) > 0 or len(library_in_progress_tasks) > 0
|
297
|
+
|
298
|
+
def get_audio_task_file(self, task_id: str) -> Optional[Path]:
|
299
|
+
"""Get the temporary file path for an audio task."""
|
300
|
+
with self._lock:
|
301
|
+
return self._temp_files.get(task_id)
|
302
|
+
|
303
|
+
def cleanup_task_files(self, task_id: str) -> None:
|
304
|
+
"""Clean up temporary files for a completed task."""
|
305
|
+
with self._lock:
|
306
|
+
if task_id in self._temp_files:
|
307
|
+
temp_file = self._temp_files[task_id]
|
308
|
+
try:
|
309
|
+
if temp_file.exists():
|
310
|
+
temp_file.unlink()
|
311
|
+
except OSError:
|
312
|
+
pass # Ignore cleanup errors
|
313
|
+
del self._temp_files[task_id]
|
314
|
+
|
315
|
+
def _cleanup_orphan_files(self):
|
316
|
+
""" Clean up any orphaned temporary files in the temp directory on startup. """
|
317
|
+
try:
|
318
|
+
if self._temp_dir.exists():
|
319
|
+
shutil.rmtree(self._temp_dir)
|
320
|
+
self._temp_dir.mkdir(parents=True, exist_ok=True)
|
321
|
+
logging.info(f"Temp dir recreated in: {self._temp_dir}")
|
322
|
+
except Exception as e:
|
323
|
+
logging.error(f"Failed to clean up temp dir {self._temp_dir}: {e}")
|