nedo-vision-worker 1.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- nedo_vision_worker/__init__.py +10 -0
- nedo_vision_worker/cli.py +195 -0
- nedo_vision_worker/config/ConfigurationManager.py +196 -0
- nedo_vision_worker/config/__init__.py +1 -0
- nedo_vision_worker/database/DatabaseManager.py +219 -0
- nedo_vision_worker/database/__init__.py +1 -0
- nedo_vision_worker/doctor.py +453 -0
- nedo_vision_worker/initializer/AppInitializer.py +78 -0
- nedo_vision_worker/initializer/__init__.py +1 -0
- nedo_vision_worker/models/__init__.py +15 -0
- nedo_vision_worker/models/ai_model.py +29 -0
- nedo_vision_worker/models/auth.py +14 -0
- nedo_vision_worker/models/config.py +9 -0
- nedo_vision_worker/models/dataset_source.py +30 -0
- nedo_vision_worker/models/logs.py +9 -0
- nedo_vision_worker/models/ppe_detection.py +39 -0
- nedo_vision_worker/models/ppe_detection_label.py +20 -0
- nedo_vision_worker/models/restricted_area_violation.py +20 -0
- nedo_vision_worker/models/user.py +10 -0
- nedo_vision_worker/models/worker_source.py +19 -0
- nedo_vision_worker/models/worker_source_pipeline.py +21 -0
- nedo_vision_worker/models/worker_source_pipeline_config.py +24 -0
- nedo_vision_worker/models/worker_source_pipeline_debug.py +15 -0
- nedo_vision_worker/models/worker_source_pipeline_detection.py +14 -0
- nedo_vision_worker/protos/AIModelService_pb2.py +46 -0
- nedo_vision_worker/protos/AIModelService_pb2_grpc.py +140 -0
- nedo_vision_worker/protos/DatasetSourceService_pb2.py +46 -0
- nedo_vision_worker/protos/DatasetSourceService_pb2_grpc.py +140 -0
- nedo_vision_worker/protos/HumanDetectionService_pb2.py +44 -0
- nedo_vision_worker/protos/HumanDetectionService_pb2_grpc.py +140 -0
- nedo_vision_worker/protos/PPEDetectionService_pb2.py +46 -0
- nedo_vision_worker/protos/PPEDetectionService_pb2_grpc.py +140 -0
- nedo_vision_worker/protos/VisionWorkerService_pb2.py +72 -0
- nedo_vision_worker/protos/VisionWorkerService_pb2_grpc.py +471 -0
- nedo_vision_worker/protos/WorkerSourcePipelineService_pb2.py +64 -0
- nedo_vision_worker/protos/WorkerSourcePipelineService_pb2_grpc.py +312 -0
- nedo_vision_worker/protos/WorkerSourceService_pb2.py +50 -0
- nedo_vision_worker/protos/WorkerSourceService_pb2_grpc.py +183 -0
- nedo_vision_worker/protos/__init__.py +1 -0
- nedo_vision_worker/repositories/AIModelRepository.py +44 -0
- nedo_vision_worker/repositories/DatasetSourceRepository.py +150 -0
- nedo_vision_worker/repositories/PPEDetectionRepository.py +112 -0
- nedo_vision_worker/repositories/RestrictedAreaRepository.py +88 -0
- nedo_vision_worker/repositories/WorkerSourcePipelineDebugRepository.py +90 -0
- nedo_vision_worker/repositories/WorkerSourcePipelineDetectionRepository.py +48 -0
- nedo_vision_worker/repositories/WorkerSourcePipelineRepository.py +174 -0
- nedo_vision_worker/repositories/WorkerSourceRepository.py +46 -0
- nedo_vision_worker/repositories/__init__.py +1 -0
- nedo_vision_worker/services/AIModelClient.py +362 -0
- nedo_vision_worker/services/ConnectionInfoClient.py +57 -0
- nedo_vision_worker/services/DatasetSourceClient.py +88 -0
- nedo_vision_worker/services/FileToRTMPServer.py +78 -0
- nedo_vision_worker/services/GrpcClientBase.py +155 -0
- nedo_vision_worker/services/GrpcClientManager.py +141 -0
- nedo_vision_worker/services/ImageUploadClient.py +82 -0
- nedo_vision_worker/services/PPEDetectionClient.py +108 -0
- nedo_vision_worker/services/RTSPtoRTMPStreamer.py +98 -0
- nedo_vision_worker/services/RestrictedAreaClient.py +100 -0
- nedo_vision_worker/services/SystemUsageClient.py +77 -0
- nedo_vision_worker/services/VideoStreamClient.py +161 -0
- nedo_vision_worker/services/WorkerSourceClient.py +215 -0
- nedo_vision_worker/services/WorkerSourcePipelineClient.py +393 -0
- nedo_vision_worker/services/WorkerSourceUpdater.py +134 -0
- nedo_vision_worker/services/WorkerStatusClient.py +65 -0
- nedo_vision_worker/services/__init__.py +1 -0
- nedo_vision_worker/util/HardwareID.py +104 -0
- nedo_vision_worker/util/ImageUploader.py +92 -0
- nedo_vision_worker/util/Networking.py +94 -0
- nedo_vision_worker/util/PlatformDetector.py +50 -0
- nedo_vision_worker/util/SystemMonitor.py +299 -0
- nedo_vision_worker/util/VideoProbeUtil.py +120 -0
- nedo_vision_worker/util/__init__.py +1 -0
- nedo_vision_worker/worker/CoreActionWorker.py +125 -0
- nedo_vision_worker/worker/DataSenderWorker.py +168 -0
- nedo_vision_worker/worker/DataSyncWorker.py +143 -0
- nedo_vision_worker/worker/DatasetFrameSender.py +208 -0
- nedo_vision_worker/worker/DatasetFrameWorker.py +412 -0
- nedo_vision_worker/worker/PPEDetectionManager.py +86 -0
- nedo_vision_worker/worker/PipelineActionWorker.py +129 -0
- nedo_vision_worker/worker/PipelineImageWorker.py +116 -0
- nedo_vision_worker/worker/RabbitMQListener.py +170 -0
- nedo_vision_worker/worker/RestrictedAreaManager.py +85 -0
- nedo_vision_worker/worker/SystemUsageManager.py +111 -0
- nedo_vision_worker/worker/VideoStreamWorker.py +139 -0
- nedo_vision_worker/worker/WorkerManager.py +155 -0
- nedo_vision_worker/worker/__init__.py +1 -0
- nedo_vision_worker/worker_service.py +264 -0
- nedo_vision_worker-1.0.0.dist-info/METADATA +563 -0
- nedo_vision_worker-1.0.0.dist-info/RECORD +92 -0
- nedo_vision_worker-1.0.0.dist-info/WHEEL +5 -0
- nedo_vision_worker-1.0.0.dist-info/entry_points.txt +2 -0
- nedo_vision_worker-1.0.0.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
from sqlalchemy.orm import Session
|
|
3
|
+
from ..database.DatabaseManager import DatabaseManager
|
|
4
|
+
from ..models.worker_source import WorkerSourceEntity
|
|
5
|
+
|
|
6
|
+
logger = logging.getLogger(__name__)
|
|
7
|
+
|
|
8
|
+
class WorkerSourceRepository:
|
|
9
|
+
def __init__(self):
|
|
10
|
+
self.db_manager = DatabaseManager()
|
|
11
|
+
self.session: Session = self.db_manager.get_session("config")
|
|
12
|
+
|
|
13
|
+
def get_all_worker_sources(self):
|
|
14
|
+
"""Retrieve all worker sources from the database."""
|
|
15
|
+
try:
|
|
16
|
+
return self.session.query(WorkerSourceEntity).all()
|
|
17
|
+
except Exception as e:
|
|
18
|
+
logger.error(f"🚨 [APP] Database error while fetching worker sources: {e}", exc_info=True)
|
|
19
|
+
return []
|
|
20
|
+
|
|
21
|
+
def bulk_update_worker_sources(self, updated_records):
|
|
22
|
+
"""Batch update worker sources in the database."""
|
|
23
|
+
try:
|
|
24
|
+
if not updated_records:
|
|
25
|
+
logger.info("✅ [APP] No worker sources to update.")
|
|
26
|
+
return
|
|
27
|
+
|
|
28
|
+
self.session.bulk_save_objects(updated_records)
|
|
29
|
+
self.session.commit()
|
|
30
|
+
logger.info(f"✅ [APP] Bulk updated {len(updated_records)} worker sources in the database.")
|
|
31
|
+
except Exception as e:
|
|
32
|
+
self.session.rollback()
|
|
33
|
+
logger.error(f"🚨 [APP] Database error while updating worker sources: {e}", exc_info=True)
|
|
34
|
+
|
|
35
|
+
def get_worker_source_by_id(self, worker_source_id: str):
|
|
36
|
+
"""Retrieve a worker source by its ID from the database."""
|
|
37
|
+
try:
|
|
38
|
+
worker_source = self.session.query(WorkerSourceEntity).filter_by(id=worker_source_id).first()
|
|
39
|
+
if worker_source:
|
|
40
|
+
return worker_source
|
|
41
|
+
else:
|
|
42
|
+
logger.warning(f"⚠️ [APP] Worker Source ID {worker_source_id} not found.")
|
|
43
|
+
return None
|
|
44
|
+
except Exception as e:
|
|
45
|
+
logger.error(f"🚨 [APP] Database error while fetching worker source by ID {worker_source_id}: {e}", exc_info=True)
|
|
46
|
+
return None
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
|
|
@@ -0,0 +1,362 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import logging
|
|
3
|
+
import threading
|
|
4
|
+
import time
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from enum import Enum
|
|
7
|
+
from typing import Dict, Optional
|
|
8
|
+
|
|
9
|
+
from ..models.ai_model import AIModelEntity
|
|
10
|
+
from ..repositories.AIModelRepository import AIModelRepository
|
|
11
|
+
from .GrpcClientBase import GrpcClientBase
|
|
12
|
+
from ..protos.AIModelService_pb2_grpc import AIModelGRPCServiceStub
|
|
13
|
+
from ..protos.AIModelService_pb2 import (
|
|
14
|
+
GetAIModelListRequest,
|
|
15
|
+
DownloadAIModelRequest
|
|
16
|
+
)
|
|
17
|
+
from ..database.DatabaseManager import _get_storage_paths
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class DownloadState(Enum):
|
|
21
|
+
"""Enum for tracking download states."""
|
|
22
|
+
PENDING = "pending"
|
|
23
|
+
DOWNLOADING = "downloading"
|
|
24
|
+
COMPLETED = "completed"
|
|
25
|
+
FAILED = "failed"
|
|
26
|
+
CANCELLED = "cancelled"
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class DownloadInfo:
|
|
30
|
+
"""Class to track download information."""
|
|
31
|
+
def __init__(self, model_id: str, model_name: str, version: str):
|
|
32
|
+
self.model_id = model_id
|
|
33
|
+
self.model_name = model_name
|
|
34
|
+
self.version = version
|
|
35
|
+
self.state = DownloadState.PENDING
|
|
36
|
+
self.start_time = None
|
|
37
|
+
self.end_time = None
|
|
38
|
+
self.error_message = None
|
|
39
|
+
self.thread = None
|
|
40
|
+
self.stop_event = threading.Event()
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
class AIModelClient(GrpcClientBase):
|
|
44
|
+
"""Client for interacting with AI models via gRPC with improved download tracking."""
|
|
45
|
+
|
|
46
|
+
def __init__(self, token, server_host: str, server_port: int = 50051):
|
|
47
|
+
super().__init__(server_host, server_port)
|
|
48
|
+
storage_paths = _get_storage_paths()
|
|
49
|
+
self.models_path = storage_paths["models"]
|
|
50
|
+
self.models_path.mkdir(parents=True, exist_ok=True)
|
|
51
|
+
self.repository = AIModelRepository()
|
|
52
|
+
self.token = token
|
|
53
|
+
|
|
54
|
+
# Download tracking
|
|
55
|
+
self.download_tracker: Dict[str, DownloadInfo] = {}
|
|
56
|
+
self.download_lock = threading.Lock()
|
|
57
|
+
|
|
58
|
+
try:
|
|
59
|
+
self.connect(AIModelGRPCServiceStub)
|
|
60
|
+
except Exception as e:
|
|
61
|
+
logging.error(f"Failed to connect to gRPC server: {e}")
|
|
62
|
+
self.stub = None
|
|
63
|
+
|
|
64
|
+
def _get_model_path(self, file: str) -> Path:
|
|
65
|
+
"""Get the path to a local AI model file."""
|
|
66
|
+
return self.models_path / os.path.basename(file)
|
|
67
|
+
|
|
68
|
+
def _is_model_file_exists(self, file_path: str) -> bool:
|
|
69
|
+
"""Check if the model file actually exists on disk."""
|
|
70
|
+
if not file_path:
|
|
71
|
+
return False
|
|
72
|
+
model_path = self._get_model_path(file_path)
|
|
73
|
+
return model_path.exists() and model_path.stat().st_size > 0
|
|
74
|
+
|
|
75
|
+
def _get_download_info(self, model_id: str) -> Optional[DownloadInfo]:
|
|
76
|
+
"""Get download info for a model."""
|
|
77
|
+
with self.download_lock:
|
|
78
|
+
return self.download_tracker.get(model_id)
|
|
79
|
+
|
|
80
|
+
def _set_download_info(self, model_id: str, download_info: DownloadInfo):
|
|
81
|
+
"""Set download info for a model."""
|
|
82
|
+
with self.download_lock:
|
|
83
|
+
self.download_tracker[model_id] = download_info
|
|
84
|
+
|
|
85
|
+
def _remove_download_info(self, model_id: str):
|
|
86
|
+
"""Remove download info for a model."""
|
|
87
|
+
with self.download_lock:
|
|
88
|
+
self.download_tracker.pop(model_id, None)
|
|
89
|
+
|
|
90
|
+
def _is_downloading(self, model_id: str) -> bool:
|
|
91
|
+
"""Check if a model is currently being downloaded."""
|
|
92
|
+
download_info = self._get_download_info(model_id)
|
|
93
|
+
if not download_info:
|
|
94
|
+
return False
|
|
95
|
+
return download_info.state in [DownloadState.PENDING, DownloadState.DOWNLOADING]
|
|
96
|
+
|
|
97
|
+
def _cancel_download(self, model_id: str):
|
|
98
|
+
"""Cancel an ongoing download."""
|
|
99
|
+
download_info = self._get_download_info(model_id)
|
|
100
|
+
if download_info and download_info.state in [DownloadState.PENDING, DownloadState.DOWNLOADING]:
|
|
101
|
+
download_info.state = DownloadState.CANCELLED
|
|
102
|
+
download_info.stop_event.set()
|
|
103
|
+
if download_info.thread and download_info.thread.is_alive():
|
|
104
|
+
download_info.thread.join(timeout=5)
|
|
105
|
+
self._update_model_download_status(model_id, "cancelled", "Download cancelled")
|
|
106
|
+
logging.info(f"🛑 Cancelled download for model {download_info.model_name}")
|
|
107
|
+
|
|
108
|
+
def _update_model_download_status(self, model_id: str, status: str, error_message: str = None):
|
|
109
|
+
"""Update the download status in the database."""
|
|
110
|
+
try:
|
|
111
|
+
from datetime import datetime
|
|
112
|
+
model = self.repository.get_model_by_id(model_id)
|
|
113
|
+
if model:
|
|
114
|
+
model.download_status = status
|
|
115
|
+
model.last_download_attempt = datetime.utcnow()
|
|
116
|
+
if error_message:
|
|
117
|
+
model.download_error = error_message
|
|
118
|
+
self.repository.session.commit()
|
|
119
|
+
except Exception as e:
|
|
120
|
+
logging.error(f"❌ Error updating model download status: {e}")
|
|
121
|
+
self.repository.session.rollback()
|
|
122
|
+
|
|
123
|
+
def sync_ai_models(self, worker_id: str) -> dict:
|
|
124
|
+
"""Fetch and sync AI model list from gRPC service using token authentication."""
|
|
125
|
+
if not self.stub:
|
|
126
|
+
return {"success": False, "message": "gRPC connection is not established."}
|
|
127
|
+
|
|
128
|
+
try:
|
|
129
|
+
# Get model list from server
|
|
130
|
+
response = self._fetch_model_list(worker_id)
|
|
131
|
+
if not response or not response.success:
|
|
132
|
+
return {"success": False, "message": response.message if response else "Unknown error"}
|
|
133
|
+
|
|
134
|
+
# Process models
|
|
135
|
+
self._process_server_models(response.data)
|
|
136
|
+
|
|
137
|
+
return {"success": True, "message": response.message, "data": response.data}
|
|
138
|
+
|
|
139
|
+
except Exception as e:
|
|
140
|
+
logging.error(f"Error fetching AI model list: {e}")
|
|
141
|
+
return {"success": False, "message": f"Error occurred: {e}"}
|
|
142
|
+
|
|
143
|
+
def _fetch_model_list(self, worker_id: str):
|
|
144
|
+
"""Fetch model list from server using token authentication."""
|
|
145
|
+
request = GetAIModelListRequest(worker_id=worker_id, token=self.token)
|
|
146
|
+
return self.handle_rpc(self.stub.GetAIModelList, request)
|
|
147
|
+
|
|
148
|
+
def _process_server_models(self, server_models):
|
|
149
|
+
"""Process server models, handling additions, updates, and deletions."""
|
|
150
|
+
local_models = {model.id: model for model in self.repository.get_models()}
|
|
151
|
+
server_model_ids = set()
|
|
152
|
+
|
|
153
|
+
new_models = []
|
|
154
|
+
updated_models = []
|
|
155
|
+
|
|
156
|
+
# Process each model from the server
|
|
157
|
+
for model in server_models:
|
|
158
|
+
server_model_ids.add(model.id)
|
|
159
|
+
existing_model = local_models.get(model.id)
|
|
160
|
+
|
|
161
|
+
if existing_model:
|
|
162
|
+
self._handle_existing_model(model, existing_model, updated_models)
|
|
163
|
+
else:
|
|
164
|
+
self._handle_new_model(model, new_models)
|
|
165
|
+
|
|
166
|
+
# Handle models that no longer exist on the server
|
|
167
|
+
models_to_delete = [
|
|
168
|
+
model for model_id, model in local_models.items()
|
|
169
|
+
if model_id not in server_model_ids
|
|
170
|
+
]
|
|
171
|
+
|
|
172
|
+
self._save_changes(new_models, updated_models, models_to_delete)
|
|
173
|
+
|
|
174
|
+
def _handle_existing_model(self, server_model, local_model, updated_models):
|
|
175
|
+
"""Handle model that exists locally but might need updates."""
|
|
176
|
+
# Check if model file actually exists
|
|
177
|
+
if not self._is_model_file_exists(local_model.file):
|
|
178
|
+
logging.warning(f"⚠️ Model file missing for {local_model.name}. Re-downloading...")
|
|
179
|
+
self._schedule_model_download(server_model)
|
|
180
|
+
return
|
|
181
|
+
|
|
182
|
+
# Check if version or type changed
|
|
183
|
+
if server_model.version == local_model.version and server_model.ai_model_type_code == local_model.type:
|
|
184
|
+
return
|
|
185
|
+
|
|
186
|
+
logging.info(f"🔄 Model update detected: {server_model.name} "
|
|
187
|
+
f"(Version {local_model.version} -> {server_model.version}). Updating...")
|
|
188
|
+
|
|
189
|
+
# Cancel any ongoing download for this model
|
|
190
|
+
self._cancel_download(server_model.id)
|
|
191
|
+
|
|
192
|
+
# Delete old model file
|
|
193
|
+
self.delete_local_model(local_model.file)
|
|
194
|
+
|
|
195
|
+
# Schedule new download
|
|
196
|
+
self._schedule_model_download(server_model)
|
|
197
|
+
|
|
198
|
+
# Update properties regardless
|
|
199
|
+
local_model.name = server_model.name
|
|
200
|
+
local_model.type = server_model.ai_model_type_code
|
|
201
|
+
local_model.version = server_model.version
|
|
202
|
+
updated_models.append(local_model)
|
|
203
|
+
|
|
204
|
+
def _handle_new_model(self, server_model, new_models):
|
|
205
|
+
"""Handle model that doesn't exist locally."""
|
|
206
|
+
# Check if already downloading this model
|
|
207
|
+
if self._is_downloading(server_model.id):
|
|
208
|
+
logging.info(f"⏳ Model {server_model.name} is already being downloaded. Skipping...")
|
|
209
|
+
return
|
|
210
|
+
|
|
211
|
+
new_model = AIModelEntity(
|
|
212
|
+
id=server_model.id,
|
|
213
|
+
name=server_model.name,
|
|
214
|
+
type=server_model.ai_model_type_code,
|
|
215
|
+
file=os.path.basename(server_model.file_path),
|
|
216
|
+
version=server_model.version
|
|
217
|
+
)
|
|
218
|
+
new_models.append(new_model)
|
|
219
|
+
|
|
220
|
+
logging.info(f"⬇️ New model detected: {server_model.name}. Scheduling download...")
|
|
221
|
+
self._schedule_model_download(server_model)
|
|
222
|
+
|
|
223
|
+
def _schedule_model_download(self, model):
|
|
224
|
+
"""Schedule a model download in background thread."""
|
|
225
|
+
# Cancel any existing download for this model
|
|
226
|
+
self._cancel_download(model.id)
|
|
227
|
+
|
|
228
|
+
# Create new download info
|
|
229
|
+
download_info = DownloadInfo(
|
|
230
|
+
model_id=model.id,
|
|
231
|
+
model_name=model.name,
|
|
232
|
+
version=model.version
|
|
233
|
+
)
|
|
234
|
+
download_info.state = DownloadState.PENDING
|
|
235
|
+
download_info.start_time = time.time()
|
|
236
|
+
|
|
237
|
+
# Update database status
|
|
238
|
+
self._update_model_download_status(model.id, "pending", None)
|
|
239
|
+
|
|
240
|
+
# Start download in background thread
|
|
241
|
+
download_info.thread = threading.Thread(
|
|
242
|
+
target=self._download_model_worker,
|
|
243
|
+
args=(model, download_info),
|
|
244
|
+
daemon=True,
|
|
245
|
+
name=f"ModelDownload-{model.id}"
|
|
246
|
+
)
|
|
247
|
+
download_info.thread.start()
|
|
248
|
+
|
|
249
|
+
self._set_download_info(model.id, download_info)
|
|
250
|
+
|
|
251
|
+
def _download_model_worker(self, model, download_info):
|
|
252
|
+
"""Background worker for downloading a model."""
|
|
253
|
+
try:
|
|
254
|
+
download_info.state = DownloadState.DOWNLOADING
|
|
255
|
+
self._update_model_download_status(model.id, "downloading", None)
|
|
256
|
+
logging.info(f"📥 Starting download for AI model '{model.name}'...")
|
|
257
|
+
|
|
258
|
+
if self.download_model(model, download_info):
|
|
259
|
+
download_info.state = DownloadState.COMPLETED
|
|
260
|
+
download_info.end_time = time.time()
|
|
261
|
+
duration = download_info.end_time - download_info.start_time
|
|
262
|
+
self._update_model_download_status(model.id, "completed", None)
|
|
263
|
+
logging.info(f"✅ AI Model '{model.name}' downloaded successfully in {duration:.2f}s")
|
|
264
|
+
else:
|
|
265
|
+
download_info.state = DownloadState.FAILED
|
|
266
|
+
download_info.error_message = "Download failed"
|
|
267
|
+
self._update_model_download_status(model.id, "failed", "Download failed")
|
|
268
|
+
logging.error(f"❌ Failed to download AI Model '{model.name}'")
|
|
269
|
+
|
|
270
|
+
except Exception as e:
|
|
271
|
+
download_info.state = DownloadState.FAILED
|
|
272
|
+
download_info.error_message = str(e)
|
|
273
|
+
self._update_model_download_status(model.id, "failed", str(e))
|
|
274
|
+
logging.error(f"❌ Error downloading AI Model '{model.name}': {e}")
|
|
275
|
+
finally:
|
|
276
|
+
# Clean up download info after a delay to allow status checking
|
|
277
|
+
threading.Timer(300, lambda: self._remove_download_info(model.id)).start()
|
|
278
|
+
|
|
279
|
+
def _save_changes(self, new_models, updated_models, models_to_delete):
|
|
280
|
+
"""Save all changes to database in a single transaction."""
|
|
281
|
+
try:
|
|
282
|
+
if new_models:
|
|
283
|
+
self.repository.session.bulk_save_objects(new_models)
|
|
284
|
+
|
|
285
|
+
if updated_models:
|
|
286
|
+
self.repository.session.bulk_save_objects(updated_models)
|
|
287
|
+
|
|
288
|
+
for model in models_to_delete:
|
|
289
|
+
logging.info(f"🗑️ Model removed from server: {model.name}. Deleting local copy...")
|
|
290
|
+
# Cancel any ongoing download
|
|
291
|
+
self._cancel_download(model.id)
|
|
292
|
+
self.repository.session.delete(model)
|
|
293
|
+
self.delete_local_model(model.file)
|
|
294
|
+
|
|
295
|
+
self.repository.session.commit()
|
|
296
|
+
except Exception as e:
|
|
297
|
+
self.repository.session.rollback()
|
|
298
|
+
logging.error(f"Error saving model changes: {e}")
|
|
299
|
+
raise
|
|
300
|
+
|
|
301
|
+
def download_model(self, model, download_info=None) -> bool:
|
|
302
|
+
"""Download the AI model and save it to the models directory."""
|
|
303
|
+
if not self.stub:
|
|
304
|
+
logging.error("gRPC connection is not established.")
|
|
305
|
+
return False
|
|
306
|
+
|
|
307
|
+
try:
|
|
308
|
+
request = DownloadAIModelRequest(ai_model_id=model.id, token=self.token)
|
|
309
|
+
file_path = self._get_model_path(model.file_path)
|
|
310
|
+
|
|
311
|
+
# Check if download was cancelled
|
|
312
|
+
if download_info and download_info.stop_event.is_set():
|
|
313
|
+
logging.info(f"🛑 Download cancelled for model '{model.name}'")
|
|
314
|
+
return False
|
|
315
|
+
|
|
316
|
+
with open(file_path, "wb") as f:
|
|
317
|
+
for chunk in self.stub.DownloadAIModel(request):
|
|
318
|
+
# Check if download was cancelled during streaming
|
|
319
|
+
if download_info and download_info.stop_event.is_set():
|
|
320
|
+
logging.info(f"🛑 Download cancelled during streaming for model '{model.name}'")
|
|
321
|
+
return False
|
|
322
|
+
f.write(chunk.file_chunk)
|
|
323
|
+
|
|
324
|
+
return True
|
|
325
|
+
|
|
326
|
+
except Exception as e:
|
|
327
|
+
logging.error(f"❌ Error downloading AI Model '{model.name}': {e}")
|
|
328
|
+
return False
|
|
329
|
+
|
|
330
|
+
def delete_local_model(self, file: str) -> None:
|
|
331
|
+
"""Delete a local AI model file."""
|
|
332
|
+
file_path = self._get_model_path(file)
|
|
333
|
+
try:
|
|
334
|
+
if file_path.exists():
|
|
335
|
+
file_path.unlink()
|
|
336
|
+
logging.info(f"🗑️ Model file deleted: {file}")
|
|
337
|
+
except Exception as e:
|
|
338
|
+
logging.error(f"❌ Error deleting model file: {e}")
|
|
339
|
+
|
|
340
|
+
def get_download_status(self, model_id: str) -> Optional[Dict]:
|
|
341
|
+
"""Get the download status for a specific model."""
|
|
342
|
+
download_info = self._get_download_info(model_id)
|
|
343
|
+
if not download_info:
|
|
344
|
+
return None
|
|
345
|
+
|
|
346
|
+
return {
|
|
347
|
+
"model_id": download_info.model_id,
|
|
348
|
+
"model_name": download_info.model_name,
|
|
349
|
+
"version": download_info.version,
|
|
350
|
+
"state": download_info.state.value,
|
|
351
|
+
"start_time": download_info.start_time,
|
|
352
|
+
"end_time": download_info.end_time,
|
|
353
|
+
"error_message": download_info.error_message
|
|
354
|
+
}
|
|
355
|
+
|
|
356
|
+
def get_all_download_status(self) -> Dict[str, Dict]:
|
|
357
|
+
"""Get download status for all models."""
|
|
358
|
+
with self.download_lock:
|
|
359
|
+
return {
|
|
360
|
+
model_id: self.get_download_status(model_id)
|
|
361
|
+
for model_id in self.download_tracker.keys()
|
|
362
|
+
}
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
from .GrpcClientBase import GrpcClientBase
|
|
3
|
+
from ..protos.VisionWorkerService_pb2_grpc import VisionWorkerServiceStub
|
|
4
|
+
from ..protos.VisionWorkerService_pb2 import GetWorkerConnectionInfoRequest
|
|
5
|
+
|
|
6
|
+
class ConnectionInfoClient(GrpcClientBase):
|
|
7
|
+
"""
|
|
8
|
+
Client for fetching connection information using token-based authentication.
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
def __init__(self, host: str, port: int, token: str):
|
|
12
|
+
"""
|
|
13
|
+
Initialize the connection info client.
|
|
14
|
+
|
|
15
|
+
Args:
|
|
16
|
+
host (str): The server hostname or IP address.
|
|
17
|
+
port (int): The server port. Default is 50051.
|
|
18
|
+
token (str): Authentication token for the worker.
|
|
19
|
+
"""
|
|
20
|
+
super().__init__(host, port)
|
|
21
|
+
self.token = token
|
|
22
|
+
self.connect(VisionWorkerServiceStub)
|
|
23
|
+
|
|
24
|
+
def get_connection_info(self) -> dict:
|
|
25
|
+
"""
|
|
26
|
+
Fetch connection information from the server using token authentication.
|
|
27
|
+
|
|
28
|
+
Returns:
|
|
29
|
+
dict: A dictionary containing the connection information and result.
|
|
30
|
+
"""
|
|
31
|
+
try:
|
|
32
|
+
if not self.stub:
|
|
33
|
+
raise Exception("Not connected to manager")
|
|
34
|
+
|
|
35
|
+
# Prepare the request
|
|
36
|
+
request = GetWorkerConnectionInfoRequest(token=self.token)
|
|
37
|
+
|
|
38
|
+
# Call the GetConnectionInfo RPC using base class error handling
|
|
39
|
+
response = self.handle_rpc(self.stub.GetConnectionInfo, request)
|
|
40
|
+
|
|
41
|
+
# Handle response
|
|
42
|
+
if response and response.success:
|
|
43
|
+
return {
|
|
44
|
+
"success": True,
|
|
45
|
+
"message": response.message,
|
|
46
|
+
"rabbitmq_host": response.rabbitmq_host,
|
|
47
|
+
"rabbitmq_port": response.rabbitmq_port,
|
|
48
|
+
"rabbitmq_username": response.rabbitmq_username,
|
|
49
|
+
"rabbitmq_password": response.rabbitmq_password,
|
|
50
|
+
"id": getattr(response, "id", None)
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
return {"success": False, "message": response.message if response else "Unknown error"}
|
|
54
|
+
|
|
55
|
+
except Exception as e:
|
|
56
|
+
logging.error(f"Failed to fetch connection info: {e}")
|
|
57
|
+
return {"success": False, "message": f"Failed to fetch connection info: {e}"}
|
|
@@ -0,0 +1,88 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
from .GrpcClientBase import GrpcClientBase
|
|
3
|
+
from ..protos.DatasetSourceService_pb2_grpc import DatasetSourceServiceStub
|
|
4
|
+
from ..protos.DatasetSourceService_pb2 import (
|
|
5
|
+
GetDatasetSourceListRequest,
|
|
6
|
+
SendDatasetFrameRequest
|
|
7
|
+
)
|
|
8
|
+
|
|
9
|
+
logger = logging.getLogger(__name__)
|
|
10
|
+
|
|
11
|
+
class DatasetSourceClient(GrpcClientBase):
|
|
12
|
+
def __init__(self, server_host: str, server_port: int = 50051):
|
|
13
|
+
"""
|
|
14
|
+
Initialize the DatasetSource client.
|
|
15
|
+
|
|
16
|
+
Args:
|
|
17
|
+
server_host (str): The server hostname or IP address.
|
|
18
|
+
server_port (int): The server port. Default is 50051.
|
|
19
|
+
"""
|
|
20
|
+
super().__init__(server_host, server_port)
|
|
21
|
+
|
|
22
|
+
try:
|
|
23
|
+
self.connect(DatasetSourceServiceStub)
|
|
24
|
+
except Exception as e:
|
|
25
|
+
logging.error(f"Failed to connect to gRPC server: {e}")
|
|
26
|
+
self.stub = None
|
|
27
|
+
|
|
28
|
+
def get_dataset_source_list(self, token: str) -> dict:
|
|
29
|
+
"""
|
|
30
|
+
Get dataset source list from the server using token authentication.
|
|
31
|
+
|
|
32
|
+
Args:
|
|
33
|
+
token (str): Authentication token for the worker.
|
|
34
|
+
|
|
35
|
+
Returns:
|
|
36
|
+
dict: A dictionary containing the result and dataset source list.
|
|
37
|
+
"""
|
|
38
|
+
if not self.stub:
|
|
39
|
+
return {"success": False, "message": "gRPC connection is not established."}
|
|
40
|
+
|
|
41
|
+
try:
|
|
42
|
+
request = GetDatasetSourceListRequest(token=token)
|
|
43
|
+
response = self.handle_rpc(self.stub.GetDatasetSourceList, request)
|
|
44
|
+
|
|
45
|
+
if response and response.success:
|
|
46
|
+
return {"success": True, "message": response.message, "data": response.data}
|
|
47
|
+
|
|
48
|
+
return {"success": False, "message": response.message if response else "Unknown error"}
|
|
49
|
+
|
|
50
|
+
except Exception as e:
|
|
51
|
+
logging.error(f"Error fetching dataset source list: {e}")
|
|
52
|
+
return {"success": False, "message": f"Error occurred: {e}"}
|
|
53
|
+
|
|
54
|
+
def send_dataset_frame(self, dataset_source_id: str, uuid: str, image: bytes, timestamp: int, token: str) -> dict:
|
|
55
|
+
"""
|
|
56
|
+
Send a dataset frame to the server using token authentication.
|
|
57
|
+
|
|
58
|
+
Args:
|
|
59
|
+
dataset_source_id (str): The ID of the dataset source.
|
|
60
|
+
uuid (str): Unique identifier for the frame.
|
|
61
|
+
image (bytes): The image data as bytes.
|
|
62
|
+
timestamp (int): Unix timestamp of the frame.
|
|
63
|
+
token (str): Authentication token for the worker.
|
|
64
|
+
|
|
65
|
+
Returns:
|
|
66
|
+
dict: A dictionary containing the result of sending the frame.
|
|
67
|
+
"""
|
|
68
|
+
if not self.stub:
|
|
69
|
+
return {"success": False, "message": "gRPC connection is not established."}
|
|
70
|
+
|
|
71
|
+
try:
|
|
72
|
+
request = SendDatasetFrameRequest(
|
|
73
|
+
dataset_source_id=dataset_source_id,
|
|
74
|
+
uuid=uuid,
|
|
75
|
+
image=image,
|
|
76
|
+
timestamp=timestamp,
|
|
77
|
+
token=token
|
|
78
|
+
)
|
|
79
|
+
response = self.handle_rpc(self.stub.SendDatasetFrame, request)
|
|
80
|
+
|
|
81
|
+
if response and response.success:
|
|
82
|
+
return {"success": True, "message": response.message}
|
|
83
|
+
|
|
84
|
+
return {"success": False, "message": response.message if response else "Unknown error"}
|
|
85
|
+
|
|
86
|
+
except Exception as e:
|
|
87
|
+
logging.error(f"Error sending dataset frame: {e}")
|
|
88
|
+
return {"success": False, "message": f"Error occurred: {e}"}
|
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
import subprocess
|
|
2
|
+
import logging
|
|
3
|
+
import os
|
|
4
|
+
|
|
5
|
+
class FileToRTMPStreamer:
|
|
6
|
+
def __init__(self, video_path, rtmp_url, stream_key, fps=30, resolution="1280x720", loop=False):
|
|
7
|
+
"""
|
|
8
|
+
Initialize the file streamer.
|
|
9
|
+
|
|
10
|
+
Args:
|
|
11
|
+
video_path (str): Path to the local video file.
|
|
12
|
+
rtmp_url (str): The RTMP server URL (without stream key).
|
|
13
|
+
stream_key (str): The unique stream key for RTMP.
|
|
14
|
+
fps (int): Frames per second for output stream.
|
|
15
|
+
resolution (str): Resolution of the output stream.
|
|
16
|
+
loop (bool): Loop the video until manually stopped.
|
|
17
|
+
"""
|
|
18
|
+
self.video_path = video_path
|
|
19
|
+
self.rtmp_url = f"{rtmp_url}/{stream_key}"
|
|
20
|
+
self.fps = fps
|
|
21
|
+
self.resolution = resolution
|
|
22
|
+
self.loop = loop
|
|
23
|
+
self.stream_key = stream_key
|
|
24
|
+
self.process = None
|
|
25
|
+
|
|
26
|
+
def start_stream(self):
|
|
27
|
+
"""Start streaming video file to RTMP using FFmpeg."""
|
|
28
|
+
if not os.path.exists(self.video_path):
|
|
29
|
+
logging.error(f"❌ [APP] Video file not found: {self.video_path}")
|
|
30
|
+
return
|
|
31
|
+
|
|
32
|
+
logging.info(f"📼 [APP] Starting file stream: {self.video_path} → {self.rtmp_url}")
|
|
33
|
+
|
|
34
|
+
# FFmpeg command
|
|
35
|
+
ffmpeg_command = [
|
|
36
|
+
"ffmpeg",
|
|
37
|
+
"-re", # Read input at native frame rate
|
|
38
|
+
"-stream_loop", "-1" if self.loop else "0", # Loop if needed
|
|
39
|
+
"-i", self.video_path,
|
|
40
|
+
|
|
41
|
+
"-c:v", "libx264",
|
|
42
|
+
"-preset", "ultrafast",
|
|
43
|
+
"-tune", "zerolatency",
|
|
44
|
+
"-r", str(self.fps),
|
|
45
|
+
"-b:v", "1500k",
|
|
46
|
+
"-maxrate", "2000k",
|
|
47
|
+
"-bufsize", "4000k",
|
|
48
|
+
"-g", str(self.fps),
|
|
49
|
+
"-vf", f"scale={self.resolution}",
|
|
50
|
+
|
|
51
|
+
"-an", # Disable audio
|
|
52
|
+
|
|
53
|
+
"-f", "flv",
|
|
54
|
+
self.rtmp_url
|
|
55
|
+
]
|
|
56
|
+
|
|
57
|
+
try:
|
|
58
|
+
with open(os.devnull, "w") as devnull:
|
|
59
|
+
self.process = subprocess.Popen(
|
|
60
|
+
ffmpeg_command,
|
|
61
|
+
stdout=devnull,
|
|
62
|
+
stderr=devnull,
|
|
63
|
+
text=True
|
|
64
|
+
)
|
|
65
|
+
|
|
66
|
+
logging.info("✅ [APP] FFmpeg file stream process started successfully.")
|
|
67
|
+
self.process.wait() # Block until process is terminated
|
|
68
|
+
|
|
69
|
+
except Exception as e:
|
|
70
|
+
logging.error(f"🚨 [APP] Failed to start FFmpeg file stream: {e}")
|
|
71
|
+
self.stop_stream()
|
|
72
|
+
|
|
73
|
+
def stop_stream(self):
|
|
74
|
+
"""Stop the streaming process."""
|
|
75
|
+
if self.process:
|
|
76
|
+
self.process.terminate()
|
|
77
|
+
self.process.wait()
|
|
78
|
+
logging.info("🛑 [APP] FFmpeg file stream process terminated.")
|