nedo-vision-worker 1.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (92) hide show
  1. nedo_vision_worker/__init__.py +10 -0
  2. nedo_vision_worker/cli.py +195 -0
  3. nedo_vision_worker/config/ConfigurationManager.py +196 -0
  4. nedo_vision_worker/config/__init__.py +1 -0
  5. nedo_vision_worker/database/DatabaseManager.py +219 -0
  6. nedo_vision_worker/database/__init__.py +1 -0
  7. nedo_vision_worker/doctor.py +453 -0
  8. nedo_vision_worker/initializer/AppInitializer.py +78 -0
  9. nedo_vision_worker/initializer/__init__.py +1 -0
  10. nedo_vision_worker/models/__init__.py +15 -0
  11. nedo_vision_worker/models/ai_model.py +29 -0
  12. nedo_vision_worker/models/auth.py +14 -0
  13. nedo_vision_worker/models/config.py +9 -0
  14. nedo_vision_worker/models/dataset_source.py +30 -0
  15. nedo_vision_worker/models/logs.py +9 -0
  16. nedo_vision_worker/models/ppe_detection.py +39 -0
  17. nedo_vision_worker/models/ppe_detection_label.py +20 -0
  18. nedo_vision_worker/models/restricted_area_violation.py +20 -0
  19. nedo_vision_worker/models/user.py +10 -0
  20. nedo_vision_worker/models/worker_source.py +19 -0
  21. nedo_vision_worker/models/worker_source_pipeline.py +21 -0
  22. nedo_vision_worker/models/worker_source_pipeline_config.py +24 -0
  23. nedo_vision_worker/models/worker_source_pipeline_debug.py +15 -0
  24. nedo_vision_worker/models/worker_source_pipeline_detection.py +14 -0
  25. nedo_vision_worker/protos/AIModelService_pb2.py +46 -0
  26. nedo_vision_worker/protos/AIModelService_pb2_grpc.py +140 -0
  27. nedo_vision_worker/protos/DatasetSourceService_pb2.py +46 -0
  28. nedo_vision_worker/protos/DatasetSourceService_pb2_grpc.py +140 -0
  29. nedo_vision_worker/protos/HumanDetectionService_pb2.py +44 -0
  30. nedo_vision_worker/protos/HumanDetectionService_pb2_grpc.py +140 -0
  31. nedo_vision_worker/protos/PPEDetectionService_pb2.py +46 -0
  32. nedo_vision_worker/protos/PPEDetectionService_pb2_grpc.py +140 -0
  33. nedo_vision_worker/protos/VisionWorkerService_pb2.py +72 -0
  34. nedo_vision_worker/protos/VisionWorkerService_pb2_grpc.py +471 -0
  35. nedo_vision_worker/protos/WorkerSourcePipelineService_pb2.py +64 -0
  36. nedo_vision_worker/protos/WorkerSourcePipelineService_pb2_grpc.py +312 -0
  37. nedo_vision_worker/protos/WorkerSourceService_pb2.py +50 -0
  38. nedo_vision_worker/protos/WorkerSourceService_pb2_grpc.py +183 -0
  39. nedo_vision_worker/protos/__init__.py +1 -0
  40. nedo_vision_worker/repositories/AIModelRepository.py +44 -0
  41. nedo_vision_worker/repositories/DatasetSourceRepository.py +150 -0
  42. nedo_vision_worker/repositories/PPEDetectionRepository.py +112 -0
  43. nedo_vision_worker/repositories/RestrictedAreaRepository.py +88 -0
  44. nedo_vision_worker/repositories/WorkerSourcePipelineDebugRepository.py +90 -0
  45. nedo_vision_worker/repositories/WorkerSourcePipelineDetectionRepository.py +48 -0
  46. nedo_vision_worker/repositories/WorkerSourcePipelineRepository.py +174 -0
  47. nedo_vision_worker/repositories/WorkerSourceRepository.py +46 -0
  48. nedo_vision_worker/repositories/__init__.py +1 -0
  49. nedo_vision_worker/services/AIModelClient.py +362 -0
  50. nedo_vision_worker/services/ConnectionInfoClient.py +57 -0
  51. nedo_vision_worker/services/DatasetSourceClient.py +88 -0
  52. nedo_vision_worker/services/FileToRTMPServer.py +78 -0
  53. nedo_vision_worker/services/GrpcClientBase.py +155 -0
  54. nedo_vision_worker/services/GrpcClientManager.py +141 -0
  55. nedo_vision_worker/services/ImageUploadClient.py +82 -0
  56. nedo_vision_worker/services/PPEDetectionClient.py +108 -0
  57. nedo_vision_worker/services/RTSPtoRTMPStreamer.py +98 -0
  58. nedo_vision_worker/services/RestrictedAreaClient.py +100 -0
  59. nedo_vision_worker/services/SystemUsageClient.py +77 -0
  60. nedo_vision_worker/services/VideoStreamClient.py +161 -0
  61. nedo_vision_worker/services/WorkerSourceClient.py +215 -0
  62. nedo_vision_worker/services/WorkerSourcePipelineClient.py +393 -0
  63. nedo_vision_worker/services/WorkerSourceUpdater.py +134 -0
  64. nedo_vision_worker/services/WorkerStatusClient.py +65 -0
  65. nedo_vision_worker/services/__init__.py +1 -0
  66. nedo_vision_worker/util/HardwareID.py +104 -0
  67. nedo_vision_worker/util/ImageUploader.py +92 -0
  68. nedo_vision_worker/util/Networking.py +94 -0
  69. nedo_vision_worker/util/PlatformDetector.py +50 -0
  70. nedo_vision_worker/util/SystemMonitor.py +299 -0
  71. nedo_vision_worker/util/VideoProbeUtil.py +120 -0
  72. nedo_vision_worker/util/__init__.py +1 -0
  73. nedo_vision_worker/worker/CoreActionWorker.py +125 -0
  74. nedo_vision_worker/worker/DataSenderWorker.py +168 -0
  75. nedo_vision_worker/worker/DataSyncWorker.py +143 -0
  76. nedo_vision_worker/worker/DatasetFrameSender.py +208 -0
  77. nedo_vision_worker/worker/DatasetFrameWorker.py +412 -0
  78. nedo_vision_worker/worker/PPEDetectionManager.py +86 -0
  79. nedo_vision_worker/worker/PipelineActionWorker.py +129 -0
  80. nedo_vision_worker/worker/PipelineImageWorker.py +116 -0
  81. nedo_vision_worker/worker/RabbitMQListener.py +170 -0
  82. nedo_vision_worker/worker/RestrictedAreaManager.py +85 -0
  83. nedo_vision_worker/worker/SystemUsageManager.py +111 -0
  84. nedo_vision_worker/worker/VideoStreamWorker.py +139 -0
  85. nedo_vision_worker/worker/WorkerManager.py +155 -0
  86. nedo_vision_worker/worker/__init__.py +1 -0
  87. nedo_vision_worker/worker_service.py +264 -0
  88. nedo_vision_worker-1.0.0.dist-info/METADATA +563 -0
  89. nedo_vision_worker-1.0.0.dist-info/RECORD +92 -0
  90. nedo_vision_worker-1.0.0.dist-info/WHEEL +5 -0
  91. nedo_vision_worker-1.0.0.dist-info/entry_points.txt +2 -0
  92. nedo_vision_worker-1.0.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,77 @@
1
+ from .GrpcClientBase import GrpcClientBase
2
+ from ..protos.VisionWorkerService_pb2_grpc import VisionWorkerServiceStub
3
+ from ..protos.VisionWorkerService_pb2 import SystemUsageRequest, GPUUsage
4
+ import logging
5
+
6
+
7
+ class SystemUsageClient(GrpcClientBase):
8
+ def __init__(self, server_host: str, server_port: int = 50051):
9
+ """
10
+ Initialize the system usage client.
11
+
12
+ Args:
13
+ server_host (str): The server hostname or IP address.
14
+ server_port (int): The server port. Default is 50051.
15
+ """
16
+ super().__init__(server_host, server_port)
17
+
18
+ try:
19
+ self.connect(VisionWorkerServiceStub)
20
+ except Exception as e:
21
+ logging.error(f"Failed to connect to gRPC server: {e}")
22
+ self.stub = None
23
+
24
+ def send_system_usage(self, device_id: str, cpu_usage: float, ram_usage: dict, gpu_usage: list, latency: float, token: str) -> dict:
25
+ """
26
+ Send system usage data to the server using token authentication.
27
+
28
+ Args:
29
+ device_id (str): The unique device ID.
30
+ cpu_usage (float): CPU usage percentage.
31
+ ram_usage (dict): RAM usage details.
32
+ gpu_usage (list): GPU usage details.
33
+ latency (float): Measured network latency in milliseconds.
34
+ token (str): Authentication token for the worker.
35
+
36
+ Returns:
37
+ dict: A dictionary containing the result of sending system usage.
38
+ """
39
+ if not self.stub:
40
+ return {"success": False, "message": "gRPC connection is not established."}
41
+
42
+ try:
43
+ # Prepare the request
44
+ request = SystemUsageRequest(
45
+ device_id=device_id,
46
+ cpu_usage=cpu_usage,
47
+ ram_usage_percent=ram_usage.get("percent", 0.0),
48
+ ram_total=ram_usage.get("total", 0),
49
+ ram_used=ram_usage.get("used", 0),
50
+ ram_free=ram_usage.get("free", 0),
51
+ latency_ms=latency,
52
+ token=token,
53
+ gpu=[
54
+ GPUUsage(
55
+ gpu_index=gpu.get("gpu_index", 0),
56
+ gpu_usage_percent=gpu.get("gpu_usage_percent", 0.0),
57
+ memory_usage_percent=gpu.get("memory_usage_percent", 0.0),
58
+ temperature_celsius=gpu.get("temperature_celsius", 0.0),
59
+ total_memory=gpu.get("total_memory", 0),
60
+ used_memory=gpu.get("used_memory", 0),
61
+ free_memory=gpu.get("free_memory", 0),
62
+ )
63
+ for gpu in (gpu_usage or []) # Handle None or empty list
64
+ ],
65
+ )
66
+
67
+ # Call the SendSystemUsage RPC
68
+ response = self.handle_rpc(self.stub.SendSystemUsage, request)
69
+
70
+ if response and response.success:
71
+ return {"success": True, "message": response.message}
72
+
73
+ return {"success": False, "message": response.message if response else "Unknown error"}
74
+
75
+ except Exception as e:
76
+ logging.error(f"Error sending system usage: {e}")
77
+ return {"success": False, "message": f"Error occurred: {e}"}
@@ -0,0 +1,161 @@
1
+ import grpc
2
+ import time
3
+ import logging
4
+ import json
5
+ import subprocess
6
+ import ffmpeg
7
+ import fractions
8
+ from urllib.parse import urlparse
9
+ from .GrpcClientBase import GrpcClientBase
10
+ from ..protos.VisionWorkerService_pb2_grpc import VideoStreamServiceStub
11
+ from ..protos.VisionWorkerService_pb2 import VideoFrame
12
+
13
+
14
+ class VideoStreamClient(GrpcClientBase):
15
+ def __init__(self, server_host: str, server_port: int = 50051):
16
+ """Initialize the video stream client."""
17
+ super().__init__(server_host, server_port)
18
+
19
+ def _detect_stream_type(self, url):
20
+ """Detect whether the stream is RTSP or HLS based on the URL scheme."""
21
+ parsed_url = urlparse(url)
22
+ if parsed_url.scheme == "rtsp":
23
+ return "rtsp"
24
+ elif parsed_url.scheme in ["http", "https"] and url.endswith(".m3u8"):
25
+ return "hls"
26
+ else:
27
+ return "unknown"
28
+
29
+ def _get_video_properties(self, url, stream_type):
30
+ """Extract FPS, resolution, and pixel format dynamically for RTSP or HLS."""
31
+ try:
32
+ probe_cmd = [
33
+ "ffprobe",
34
+ "-i", url,
35
+ "-select_streams", "v:0",
36
+ "-show_entries", "stream=width,height,r_frame_rate,pix_fmt",
37
+ "-of", "json",
38
+ "-v", "quiet"
39
+ ]
40
+
41
+ if stream_type == "rtsp":
42
+ probe_cmd.insert(1, "-rtsp_transport")
43
+ probe_cmd.insert(2, "tcp")
44
+
45
+ result = subprocess.run(probe_cmd, capture_output=True, text=True)
46
+ probe_data = json.loads(result.stdout)
47
+
48
+ if "streams" in probe_data and len(probe_data["streams"]) > 0:
49
+ stream = probe_data["streams"][0]
50
+ width = int(stream["width"])
51
+ height = int(stream["height"])
52
+ fps = float(fractions.Fraction(stream["r_frame_rate"])) # Safe FPS conversion
53
+ pixel_format = stream["pix_fmt"]
54
+ return width, height, fps, pixel_format
55
+
56
+ except Exception as e:
57
+ logging.error(f"Error extracting video properties: {e}")
58
+
59
+ return None, None, None, None
60
+
61
+ def _get_bytes_per_pixel(self, pixel_format):
62
+ """Determine bytes per pixel based on pixel format."""
63
+ pixel_map = {"rgb24": 3, "yuv420p": 1.5, "gray": 1}
64
+ return pixel_map.get(pixel_format, 3) # Default to 3 bytes (RGB)
65
+
66
+ def _generate_frames(self, url, worker_id, uuid, stream_duration):
67
+ """Generator function to continuously stream video frames to gRPC."""
68
+ stream_type = self._detect_stream_type(url)
69
+ if stream_type == "unknown":
70
+ logging.error(f"Unsupported stream type: {url}")
71
+ return
72
+
73
+ width, height, fps, pixel_format = self._get_video_properties(url, stream_type)
74
+ if not width or not height or not fps:
75
+ logging.error("Failed to retrieve stream properties.")
76
+ return
77
+
78
+ bytes_per_pixel = self._get_bytes_per_pixel(pixel_format)
79
+ frame_size = int(width * height * bytes_per_pixel)
80
+ frame_interval = 1.0 / fps # Time between frames
81
+ start_time = time.time()
82
+ empty_frame_count = 0
83
+
84
+ logging.info(f"Streaming {stream_type.upper()} from: {url} for {stream_duration} seconds...")
85
+
86
+ if stream_type == "rtsp":
87
+ ffmpeg_input = (
88
+ ffmpeg
89
+ .input(url, rtsp_transport="tcp", fflags="nobuffer", timeout="5000000")
90
+ )
91
+ elif stream_type == "hls":
92
+ ffmpeg_input = (
93
+ ffmpeg
94
+ .input(url, format="hls", analyzeduration="10000000", probesize="10000000")
95
+ )
96
+ else:
97
+ logging.error(f"Unsupported stream type: {url}")
98
+ return
99
+
100
+ process = (
101
+ ffmpeg_input
102
+ .output("pipe:", format="rawvideo", pix_fmt=pixel_format, vsync="passthrough")
103
+ .overwrite_output() # Replaces `global_args()` for avoiding conflicts
104
+ .run_async(pipe_stdout=True, pipe_stderr=True)
105
+ )
106
+
107
+ try:
108
+ while time.time() - start_time < stream_duration:
109
+ frame_bytes = process.stdout.read(frame_size)
110
+
111
+ if not frame_bytes:
112
+ empty_frame_count += 1
113
+ logging.warning(f"Empty frame received ({empty_frame_count}), retrying...")
114
+
115
+ if empty_frame_count > 5: # Stop if 5 consecutive empty frames
116
+ logging.error("Too many empty frames, stopping stream...")
117
+ break
118
+ continue # Try reading the next frame
119
+
120
+ empty_frame_count = 0 # Reset empty frame count
121
+ yield VideoFrame(
122
+ worker_id=worker_id,
123
+ uuid=uuid,
124
+ frame_data=frame_bytes,
125
+ timestamp=int(time.time() * 1000),
126
+ )
127
+
128
+ time.sleep(frame_interval) # Ensure proper frame timing
129
+
130
+ except Exception as e:
131
+ logging.error(f"Streaming error: {e}")
132
+
133
+ finally:
134
+ stderr_output = process.stderr.read().decode()
135
+ logging.error(f"FFmpeg stderr: {stderr_output}") # Log errors from FFmpeg
136
+ process.terminate()
137
+ process.wait() # Ensures cleanup
138
+
139
+ def stream_video(self, worker_id, uuid, url, stream_duration):
140
+ """
141
+ Stream video frames from RTSP or HLS to gRPC server.
142
+
143
+ Args:
144
+ worker_id (str): Worker ID
145
+ uuid (str): Unique stream session ID
146
+ url (str): Stream URL (RTSP or HLS)
147
+ stream_duration (int): Duration in seconds to stream
148
+ """
149
+ self.connect(VideoStreamServiceStub) # Ensure connection and stub are established
150
+
151
+ try:
152
+ for response in self.stub.StreamVideo(self._generate_frames(url, worker_id, uuid, stream_duration)):
153
+ if response.success:
154
+ logging.info(f"Frame sent successfully: {response.message}")
155
+ else:
156
+ logging.error(f"Frame rejected: {response.message}")
157
+
158
+ except grpc.RpcError as e:
159
+ logging.error(f"gRPC error: {e.code()} - {e.details()}") # Log more details
160
+ except Exception as e:
161
+ logging.error(f"Unexpected streaming error: {e}")
@@ -0,0 +1,215 @@
1
+ import logging
2
+ import os
3
+ from pathlib import Path
4
+ from ..database.DatabaseManager import _get_storage_paths
5
+ from ..models.worker_source import WorkerSourceEntity
6
+ from .GrpcClientBase import GrpcClientBase
7
+ from ..protos.WorkerSourceService_pb2_grpc import WorkerSourceServiceStub
8
+ from ..protos.WorkerSourceService_pb2 import (
9
+ GetWorkerSourceListRequest,
10
+ UpdateWorkerSourceRequest,
11
+ DownloadSourceFileRequest
12
+ )
13
+ from ..repositories.WorkerSourceRepository import WorkerSourceRepository
14
+
15
+ logger = logging.getLogger(__name__)
16
+
17
+ class WorkerSourceClient(GrpcClientBase):
18
+ def __init__(self, server_host: str, server_port: int = 50051):
19
+ super().__init__(server_host, server_port)
20
+ storage_paths = _get_storage_paths()
21
+ self.source_file_path = storage_paths["files"] / "source_files"
22
+ self.source_file_path.mkdir(parents=True, exist_ok=True)
23
+ self.repo = WorkerSourceRepository()
24
+ try:
25
+ self.connect(WorkerSourceServiceStub)
26
+ except Exception as e:
27
+ logging.error(f"Failed to connect to gRPC server: {e}")
28
+ self.stub = None
29
+
30
+ def _get_path(self, file: str) -> Path:
31
+ """Get the path to a local AI model file."""
32
+ return self.source_file_path / os.path.basename(file)
33
+
34
+ def sync_worker_sources(self, worker_id: str, token: str) -> dict:
35
+ """Fetch and sync worker source list from gRPC service using token authentication."""
36
+ if not self.stub:
37
+ return {"success": False, "message": "gRPC connection is not established."}
38
+
39
+ try:
40
+ request = GetWorkerSourceListRequest(worker_id=worker_id, token=token)
41
+ response = self.handle_rpc(self.stub.GetWorkerSourceList, request)
42
+
43
+ if response and response.success:
44
+ self._process_server_sources(response.data)
45
+ return {"success": True, "message": response.message, "data": response.data}
46
+
47
+ return {"success": False, "message": response.message if response else "Unknown error"}
48
+
49
+ except Exception as e:
50
+ logging.error(f"Error fetching worker source list: {e}")
51
+ return {"success": False, "message": f"Error occurred: {e}"}
52
+
53
+ def _process_server_sources(self, server_sources):
54
+ """Process server sources, handling additions, updates, and deletions."""
55
+ local_sources = {source.id: source for source in self.repo.get_all_worker_sources()}
56
+ server_source_ids = set()
57
+
58
+ new_sources = []
59
+ updated_sources = []
60
+ changed_records = []
61
+
62
+ # Process each source from the server
63
+ for source in server_sources:
64
+ server_source_ids.add(source.id)
65
+ existing_source = local_sources.get(source.id)
66
+
67
+ if existing_source:
68
+ self._handle_existing_source(source, existing_source, updated_sources, changed_records)
69
+ else:
70
+ self._handle_new_source(source, new_sources)
71
+
72
+ # Handle sources that no longer exist on the server
73
+ sources_to_delete = [
74
+ source for source_id, source in local_sources.items()
75
+ if source_id not in server_source_ids
76
+ ]
77
+
78
+ self._save_changes(new_sources, updated_sources, sources_to_delete, changed_records)
79
+
80
+ def _handle_existing_source(self, source, existing_source, updated_sources, changed_records):
81
+ """Handle source that exists locally but might need updates."""
82
+ changes = []
83
+
84
+ # Check for field changes and log them
85
+ if existing_source.name != source.name:
86
+ changes.append(f"name: '{existing_source.name}' → '{source.name}'")
87
+ if existing_source.worker_id != source.worker_id:
88
+ changes.append(f"worker_id: {existing_source.worker_id} → {source.worker_id}")
89
+ if existing_source.type_code != source.type_code:
90
+ if source.type_code == "live":
91
+ self.delete_local_source_file(existing_source.file_path)
92
+ elif existing_source.file_path == source.file_path:
93
+ self.download_source_file(source)
94
+
95
+ changes.append(f"type_code: {existing_source.type_code} → {source.type_code}")
96
+ if existing_source.url != source.url:
97
+ changes.append(f"url: {existing_source.url} → {source.url}")
98
+ if existing_source.file_path != source.file_path:
99
+ self.delete_local_source_file(existing_source.file_path)
100
+ self.download_source_file(source)
101
+
102
+ changes.append(f"file_path: {existing_source.file_path} → {source.file_path}")
103
+
104
+ if changes:
105
+ # Update existing record
106
+ existing_source.name = source.name
107
+ existing_source.worker_id = source.worker_id
108
+ existing_source.type_code = source.type_code
109
+ existing_source.url = source.url
110
+ existing_source.file_path = source.file_path
111
+
112
+ updated_sources.append(existing_source)
113
+ changed_records.append(f"🔄 [APP] [UPDATE] Worker Source ID {source.id}: " + ", ".join(changes))
114
+
115
+ def _handle_new_source(self, source, new_sources):
116
+ """Handle source that doesn't exist locally."""
117
+ new_record = WorkerSourceEntity(
118
+ id=source.id,
119
+ name=source.name,
120
+ worker_id=source.worker_id,
121
+ type_code=source.type_code,
122
+ file_path=source.file_path,
123
+ url=source.url
124
+ )
125
+ new_sources.append(new_record)
126
+ logger.info(f"🆕 [APP] [INSERT] Added Worker Source ID {source.id} - {source.name}")
127
+ if source.type_code == "file":
128
+ self.download_source_file(source)
129
+
130
+ def _save_changes(self, new_sources, updated_sources, sources_to_delete, changed_records):
131
+ """Save all changes to database in a single transaction."""
132
+ try:
133
+ if new_sources:
134
+ self.repo.session.bulk_save_objects(new_sources)
135
+
136
+ if updated_sources:
137
+ self.repo.session.bulk_save_objects(updated_sources)
138
+
139
+ for source in sources_to_delete:
140
+ logger.info(f"❌ [APP] [DELETE] Removing Worker Source ID {source.id} - {source.name}")
141
+ self.repo.session.delete(source)
142
+ self.delete_local_source_file(source.file_path)
143
+
144
+ self.repo.session.commit()
145
+
146
+ for change in changed_records:
147
+ logger.info(change)
148
+
149
+ except Exception as e:
150
+ self.repo.session.rollback()
151
+ logging.error(f"Error saving sources changes: {e}")
152
+ raise
153
+
154
+ def update_worker_source(
155
+ self, worker_source_id: str, resolution: str, status_code: str, frame_rate: float, worker_timestamp: str, token: str
156
+ ) -> dict:
157
+ """Updates a worker source entry using gRPC with token authentication."""
158
+ if not self.stub:
159
+ return {"success": False, "message": "gRPC connection is not established."}
160
+
161
+ try:
162
+ request = UpdateWorkerSourceRequest(
163
+ worker_source_id=worker_source_id,
164
+ resolution=resolution,
165
+ status_code=status_code,
166
+ frame_rate=frame_rate,
167
+ worker_timestamp=worker_timestamp,
168
+ token=token
169
+ )
170
+ response = self.handle_rpc(self.stub.Update, request)
171
+
172
+ if response and response.success:
173
+ return {"success": True, "message": response.message}
174
+
175
+ return {"success": False, "message": response.message if response else "Unknown error"}
176
+
177
+ except Exception as e:
178
+ logging.error(f"Error updating worker source: {e}")
179
+ return {"success": False, "message": f"Error occurred: {e}"}
180
+
181
+ def download_source_file(self, source) -> bool:
182
+ """Download the AI model and save it to the models directory."""
183
+ if not self.stub:
184
+ logging.error("gRPC connection is not established.")
185
+ return False
186
+ if not source.file_path:
187
+ return False
188
+
189
+ try:
190
+ logging.info(f"📥 Downloading source file '{source.name}'...")
191
+ request = DownloadSourceFileRequest(source_id=source.id)
192
+ file_path = self._get_path(source.file_path)
193
+
194
+ with open(file_path, "wb") as f:
195
+ for chunk in self.stub.DownloadSourceFile(request):
196
+ f.write(chunk.file_chunk)
197
+
198
+ logging.info(f"✅ Source File '{source.name}' downloaded successfully")
199
+ return True
200
+
201
+ except Exception as e:
202
+ logging.error(f"❌ Error downloading Source File '{source.name}': {e}")
203
+ return False
204
+
205
+ def delete_local_source_file(self, file: str) -> None:
206
+ """Delete a local Source file."""
207
+ if not file:
208
+ return
209
+
210
+ file_path = self._get_path(file)
211
+ try:
212
+ file_path.unlink()
213
+ logging.info(f"🗑️ File deleted")
214
+ except Exception as e:
215
+ logging.error(f"❌ Error deleting file: {e}")