nedo-vision-worker 1.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- nedo_vision_worker/__init__.py +10 -0
- nedo_vision_worker/cli.py +195 -0
- nedo_vision_worker/config/ConfigurationManager.py +196 -0
- nedo_vision_worker/config/__init__.py +1 -0
- nedo_vision_worker/database/DatabaseManager.py +219 -0
- nedo_vision_worker/database/__init__.py +1 -0
- nedo_vision_worker/doctor.py +453 -0
- nedo_vision_worker/initializer/AppInitializer.py +78 -0
- nedo_vision_worker/initializer/__init__.py +1 -0
- nedo_vision_worker/models/__init__.py +15 -0
- nedo_vision_worker/models/ai_model.py +29 -0
- nedo_vision_worker/models/auth.py +14 -0
- nedo_vision_worker/models/config.py +9 -0
- nedo_vision_worker/models/dataset_source.py +30 -0
- nedo_vision_worker/models/logs.py +9 -0
- nedo_vision_worker/models/ppe_detection.py +39 -0
- nedo_vision_worker/models/ppe_detection_label.py +20 -0
- nedo_vision_worker/models/restricted_area_violation.py +20 -0
- nedo_vision_worker/models/user.py +10 -0
- nedo_vision_worker/models/worker_source.py +19 -0
- nedo_vision_worker/models/worker_source_pipeline.py +21 -0
- nedo_vision_worker/models/worker_source_pipeline_config.py +24 -0
- nedo_vision_worker/models/worker_source_pipeline_debug.py +15 -0
- nedo_vision_worker/models/worker_source_pipeline_detection.py +14 -0
- nedo_vision_worker/protos/AIModelService_pb2.py +46 -0
- nedo_vision_worker/protos/AIModelService_pb2_grpc.py +140 -0
- nedo_vision_worker/protos/DatasetSourceService_pb2.py +46 -0
- nedo_vision_worker/protos/DatasetSourceService_pb2_grpc.py +140 -0
- nedo_vision_worker/protos/HumanDetectionService_pb2.py +44 -0
- nedo_vision_worker/protos/HumanDetectionService_pb2_grpc.py +140 -0
- nedo_vision_worker/protos/PPEDetectionService_pb2.py +46 -0
- nedo_vision_worker/protos/PPEDetectionService_pb2_grpc.py +140 -0
- nedo_vision_worker/protos/VisionWorkerService_pb2.py +72 -0
- nedo_vision_worker/protos/VisionWorkerService_pb2_grpc.py +471 -0
- nedo_vision_worker/protos/WorkerSourcePipelineService_pb2.py +64 -0
- nedo_vision_worker/protos/WorkerSourcePipelineService_pb2_grpc.py +312 -0
- nedo_vision_worker/protos/WorkerSourceService_pb2.py +50 -0
- nedo_vision_worker/protos/WorkerSourceService_pb2_grpc.py +183 -0
- nedo_vision_worker/protos/__init__.py +1 -0
- nedo_vision_worker/repositories/AIModelRepository.py +44 -0
- nedo_vision_worker/repositories/DatasetSourceRepository.py +150 -0
- nedo_vision_worker/repositories/PPEDetectionRepository.py +112 -0
- nedo_vision_worker/repositories/RestrictedAreaRepository.py +88 -0
- nedo_vision_worker/repositories/WorkerSourcePipelineDebugRepository.py +90 -0
- nedo_vision_worker/repositories/WorkerSourcePipelineDetectionRepository.py +48 -0
- nedo_vision_worker/repositories/WorkerSourcePipelineRepository.py +174 -0
- nedo_vision_worker/repositories/WorkerSourceRepository.py +46 -0
- nedo_vision_worker/repositories/__init__.py +1 -0
- nedo_vision_worker/services/AIModelClient.py +362 -0
- nedo_vision_worker/services/ConnectionInfoClient.py +57 -0
- nedo_vision_worker/services/DatasetSourceClient.py +88 -0
- nedo_vision_worker/services/FileToRTMPServer.py +78 -0
- nedo_vision_worker/services/GrpcClientBase.py +155 -0
- nedo_vision_worker/services/GrpcClientManager.py +141 -0
- nedo_vision_worker/services/ImageUploadClient.py +82 -0
- nedo_vision_worker/services/PPEDetectionClient.py +108 -0
- nedo_vision_worker/services/RTSPtoRTMPStreamer.py +98 -0
- nedo_vision_worker/services/RestrictedAreaClient.py +100 -0
- nedo_vision_worker/services/SystemUsageClient.py +77 -0
- nedo_vision_worker/services/VideoStreamClient.py +161 -0
- nedo_vision_worker/services/WorkerSourceClient.py +215 -0
- nedo_vision_worker/services/WorkerSourcePipelineClient.py +393 -0
- nedo_vision_worker/services/WorkerSourceUpdater.py +134 -0
- nedo_vision_worker/services/WorkerStatusClient.py +65 -0
- nedo_vision_worker/services/__init__.py +1 -0
- nedo_vision_worker/util/HardwareID.py +104 -0
- nedo_vision_worker/util/ImageUploader.py +92 -0
- nedo_vision_worker/util/Networking.py +94 -0
- nedo_vision_worker/util/PlatformDetector.py +50 -0
- nedo_vision_worker/util/SystemMonitor.py +299 -0
- nedo_vision_worker/util/VideoProbeUtil.py +120 -0
- nedo_vision_worker/util/__init__.py +1 -0
- nedo_vision_worker/worker/CoreActionWorker.py +125 -0
- nedo_vision_worker/worker/DataSenderWorker.py +168 -0
- nedo_vision_worker/worker/DataSyncWorker.py +143 -0
- nedo_vision_worker/worker/DatasetFrameSender.py +208 -0
- nedo_vision_worker/worker/DatasetFrameWorker.py +412 -0
- nedo_vision_worker/worker/PPEDetectionManager.py +86 -0
- nedo_vision_worker/worker/PipelineActionWorker.py +129 -0
- nedo_vision_worker/worker/PipelineImageWorker.py +116 -0
- nedo_vision_worker/worker/RabbitMQListener.py +170 -0
- nedo_vision_worker/worker/RestrictedAreaManager.py +85 -0
- nedo_vision_worker/worker/SystemUsageManager.py +111 -0
- nedo_vision_worker/worker/VideoStreamWorker.py +139 -0
- nedo_vision_worker/worker/WorkerManager.py +155 -0
- nedo_vision_worker/worker/__init__.py +1 -0
- nedo_vision_worker/worker_service.py +264 -0
- nedo_vision_worker-1.0.0.dist-info/METADATA +563 -0
- nedo_vision_worker-1.0.0.dist-info/RECORD +92 -0
- nedo_vision_worker-1.0.0.dist-info/WHEEL +5 -0
- nedo_vision_worker-1.0.0.dist-info/entry_points.txt +2 -0
- nedo_vision_worker-1.0.0.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,120 @@
|
|
|
1
|
+
import cv2
|
|
2
|
+
import subprocess
|
|
3
|
+
import logging
|
|
4
|
+
import json
|
|
5
|
+
import fractions
|
|
6
|
+
import shutil
|
|
7
|
+
from urllib.parse import urlparse
|
|
8
|
+
|
|
9
|
+
class VideoProbeUtil:
|
|
10
|
+
"""Utility to extract metadata from video URLs using OpenCV and ffmpeg."""
|
|
11
|
+
|
|
12
|
+
@staticmethod
|
|
13
|
+
def get_video_metadata(video_url: str) -> dict:
|
|
14
|
+
"""Extracts resolution and frame rate from a video URL using OpenCV or ffmpeg."""
|
|
15
|
+
try:
|
|
16
|
+
# metadata = VideoProbeUtil._get_metadata_opencv(video_url)
|
|
17
|
+
metadata = VideoProbeUtil._get_metadata_ffmpeg(video_url)
|
|
18
|
+
return metadata
|
|
19
|
+
|
|
20
|
+
except Exception as e:
|
|
21
|
+
logging.error(f"🚨 [APP] Error probing video {video_url}: {e}", exc_info=True)
|
|
22
|
+
return None
|
|
23
|
+
|
|
24
|
+
@staticmethod
|
|
25
|
+
def _get_metadata_opencv(video_url: str) -> dict:
|
|
26
|
+
cap = cv2.VideoCapture(video_url)
|
|
27
|
+
if not cap.isOpened():
|
|
28
|
+
logging.warning(f"⚠️ [APP] OpenCV failed to open video: {video_url}")
|
|
29
|
+
return None
|
|
30
|
+
|
|
31
|
+
# Read first frame to ensure the video is valid
|
|
32
|
+
ret, frame = cap.read()
|
|
33
|
+
if not ret or frame is None:
|
|
34
|
+
logging.warning(f"⚠️ [APP] OpenCV failed to read a frame from {video_url}")
|
|
35
|
+
cap.release()
|
|
36
|
+
return None
|
|
37
|
+
|
|
38
|
+
height, width = frame.shape[:2]
|
|
39
|
+
frame_rate = round(cap.get(cv2.CAP_PROP_FPS), 2)
|
|
40
|
+
cap.release()
|
|
41
|
+
|
|
42
|
+
return {
|
|
43
|
+
"resolution": f"{width}x{height}" if width and height else None,
|
|
44
|
+
"frame_rate": frame_rate if frame_rate > 0 else None,
|
|
45
|
+
"timestamp": None
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
@staticmethod
|
|
49
|
+
def _detect_stream_type(video_url: str) -> str:
|
|
50
|
+
"""Detect whether the URL is RTSP, local file, or other type."""
|
|
51
|
+
# Convert PosixPath to string if needed
|
|
52
|
+
if hasattr(video_url, '__str__'):
|
|
53
|
+
video_url = str(video_url)
|
|
54
|
+
|
|
55
|
+
parsed_url = urlparse(video_url)
|
|
56
|
+
if parsed_url.scheme == "rtsp":
|
|
57
|
+
return "rtsp"
|
|
58
|
+
elif parsed_url.scheme in ["http", "https"]:
|
|
59
|
+
return "http"
|
|
60
|
+
else:
|
|
61
|
+
return "file"
|
|
62
|
+
|
|
63
|
+
@staticmethod
|
|
64
|
+
def _get_metadata_ffmpeg(video_url: str) -> dict:
|
|
65
|
+
# Check if ffprobe is available
|
|
66
|
+
if not shutil.which("ffprobe"):
|
|
67
|
+
logging.error("⚠️ [APP] ffprobe is not installed or not found in PATH.")
|
|
68
|
+
return None
|
|
69
|
+
|
|
70
|
+
# Detect stream type
|
|
71
|
+
stream_type = VideoProbeUtil._detect_stream_type(video_url)
|
|
72
|
+
|
|
73
|
+
# Build ffprobe command based on stream type
|
|
74
|
+
cmd = ["ffprobe", "-v", "error", "-select_streams", "v:0",
|
|
75
|
+
"-show_entries", "stream=width,height,avg_frame_rate", "-of", "json"]
|
|
76
|
+
|
|
77
|
+
# Add RTSP transport option only for RTSP streams
|
|
78
|
+
if stream_type == "rtsp":
|
|
79
|
+
cmd.insert(1, "-rtsp_transport")
|
|
80
|
+
cmd.insert(2, "tcp")
|
|
81
|
+
|
|
82
|
+
# Add the video URL
|
|
83
|
+
cmd.append(video_url)
|
|
84
|
+
|
|
85
|
+
try:
|
|
86
|
+
# Run ffprobe command
|
|
87
|
+
result = subprocess.run(cmd, capture_output=True, text=True, timeout=10)
|
|
88
|
+
|
|
89
|
+
# Check for errors
|
|
90
|
+
if result.returncode != 0 or not result.stdout.strip():
|
|
91
|
+
logging.warning(f"⚠️ [APP] ffprobe failed for {video_url}: {result.stderr.strip()}")
|
|
92
|
+
return None
|
|
93
|
+
|
|
94
|
+
# Parse JSON output
|
|
95
|
+
metadata = json.loads(result.stdout)
|
|
96
|
+
streams = metadata.get("streams", [{}])[0]
|
|
97
|
+
|
|
98
|
+
# Extract metadata
|
|
99
|
+
width = streams.get("width")
|
|
100
|
+
height = streams.get("height")
|
|
101
|
+
avg_fps = streams.get("avg_frame_rate", "0/1")
|
|
102
|
+
|
|
103
|
+
# Convert FPS safely
|
|
104
|
+
try:
|
|
105
|
+
frame_rate = round(float(fractions.Fraction(avg_fps)), 2)
|
|
106
|
+
except (ValueError, ZeroDivisionError):
|
|
107
|
+
frame_rate = None
|
|
108
|
+
|
|
109
|
+
return {
|
|
110
|
+
"resolution": f"{width}x{height}" if width and height else None,
|
|
111
|
+
"frame_rate": frame_rate,
|
|
112
|
+
"timestamp": None # Placeholder if you need a timestamp later
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
except subprocess.TimeoutExpired:
|
|
116
|
+
logging.warning(f"⚠️ [APP] ffprobe timeout for {video_url}")
|
|
117
|
+
except json.JSONDecodeError:
|
|
118
|
+
logging.error(f"❌ [APP] Failed to parse ffprobe output for {video_url}")
|
|
119
|
+
|
|
120
|
+
return None
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
|
|
@@ -0,0 +1,125 @@
|
|
|
1
|
+
import threading
|
|
2
|
+
import logging
|
|
3
|
+
import json
|
|
4
|
+
from .RabbitMQListener import RabbitMQListener
|
|
5
|
+
|
|
6
|
+
logger = logging.getLogger(__name__)
|
|
7
|
+
|
|
8
|
+
def safe_join_thread(thread, timeout=5):
|
|
9
|
+
"""Safely join a thread, avoiding RuntimeError when joining current thread."""
|
|
10
|
+
if thread and thread != threading.current_thread():
|
|
11
|
+
thread.join(timeout=timeout)
|
|
12
|
+
elif thread == threading.current_thread():
|
|
13
|
+
logging.info("🛑 [APP] Thread stopping from within itself, skipping join.")
|
|
14
|
+
|
|
15
|
+
class CoreActionWorker:
|
|
16
|
+
def __init__(self, config: dict, start_cb, stop_cb):
|
|
17
|
+
"""
|
|
18
|
+
Initialize Core Action Worker.
|
|
19
|
+
|
|
20
|
+
Args:
|
|
21
|
+
config (dict): Configuration object containing settings.
|
|
22
|
+
start_cb: Callback function to start the worker.
|
|
23
|
+
stop_cb: Callback function to stop the worker.
|
|
24
|
+
"""
|
|
25
|
+
if not isinstance(config, dict):
|
|
26
|
+
raise ValueError("⚠️ [APP] config must be a dictionary.")
|
|
27
|
+
|
|
28
|
+
self.config = config
|
|
29
|
+
self.worker_id = self.config.get("worker_id")
|
|
30
|
+
self.start_cb = start_cb
|
|
31
|
+
self.stop_cb = stop_cb
|
|
32
|
+
|
|
33
|
+
if not self.worker_id:
|
|
34
|
+
raise ValueError("⚠️ [APP] Configuration is missing 'worker_id'.")
|
|
35
|
+
|
|
36
|
+
self.thread = None
|
|
37
|
+
self.stop_event = threading.Event()
|
|
38
|
+
self.lock = threading.Lock()
|
|
39
|
+
|
|
40
|
+
# Initialize RabbitMQ listener
|
|
41
|
+
self.listener = RabbitMQListener(
|
|
42
|
+
self.config, self.worker_id, self.stop_event, self._process_core_action_message
|
|
43
|
+
)
|
|
44
|
+
|
|
45
|
+
def start(self):
|
|
46
|
+
"""Start the Core Action Worker."""
|
|
47
|
+
with self.lock:
|
|
48
|
+
if self.thread and self.thread.is_alive():
|
|
49
|
+
logger.warning("⚠️ [APP] Core Action Worker is already running.")
|
|
50
|
+
return
|
|
51
|
+
|
|
52
|
+
self.stop_event.clear()
|
|
53
|
+
self.thread = threading.Thread(target=self._run, daemon=True)
|
|
54
|
+
self.thread.start()
|
|
55
|
+
logger.info(f"🚀 [APP] Core Action Worker started (Device: {self.worker_id}).")
|
|
56
|
+
|
|
57
|
+
def stop(self):
|
|
58
|
+
"""Stop the Core Action Worker."""
|
|
59
|
+
with self.lock:
|
|
60
|
+
if not self.thread or not self.thread.is_alive():
|
|
61
|
+
logger.warning("⚠️ [APP] Core Action Worker is not running.")
|
|
62
|
+
return
|
|
63
|
+
|
|
64
|
+
self.stop_event.set()
|
|
65
|
+
self.listener.stop_listening()
|
|
66
|
+
|
|
67
|
+
safe_join_thread(self.thread)
|
|
68
|
+
self.thread = None
|
|
69
|
+
logger.info(f"🛑 [APP] Core Action Worker stopped (Device: {self.worker_id}).")
|
|
70
|
+
|
|
71
|
+
def _run(self):
|
|
72
|
+
"""Main loop to manage RabbitMQ listener."""
|
|
73
|
+
try:
|
|
74
|
+
while not self.stop_event.is_set():
|
|
75
|
+
logger.info("📡 [APP] Waiting for core action messages...")
|
|
76
|
+
self.listener.start_listening(
|
|
77
|
+
exchange_name="nedo.worker.core.action",
|
|
78
|
+
queue_name=f"nedo.worker.core.{self.worker_id}"
|
|
79
|
+
)
|
|
80
|
+
safe_join_thread(self.listener.listener_thread)
|
|
81
|
+
except Exception as e:
|
|
82
|
+
logger.error("🚨 [APP] Unexpected error in Core Action Worker loop.", exc_info=True)
|
|
83
|
+
|
|
84
|
+
def _process_core_action_message(self, message):
|
|
85
|
+
"""
|
|
86
|
+
Process received core action messages.
|
|
87
|
+
|
|
88
|
+
Args:
|
|
89
|
+
message (str): JSON message containing action and timestamp
|
|
90
|
+
"""
|
|
91
|
+
try:
|
|
92
|
+
data = json.loads(message)
|
|
93
|
+
action = data.get('action')
|
|
94
|
+
timestamp = data.get('timestamp')
|
|
95
|
+
|
|
96
|
+
logger.info(f"📥 [APP] Received core action: {action} at {timestamp}")
|
|
97
|
+
|
|
98
|
+
if action == "start":
|
|
99
|
+
logger.info(f"🚀 [APP] Starting processing workers")
|
|
100
|
+
self.start_cb()
|
|
101
|
+
logger.info(f"✅ [APP] Started processing workers")
|
|
102
|
+
|
|
103
|
+
elif action == "stop":
|
|
104
|
+
logger.info(f"🛑 [APP] Stopping processing workers")
|
|
105
|
+
self.stop_cb()
|
|
106
|
+
logger.info(f"✅ [APP] Stopped processing workers")
|
|
107
|
+
|
|
108
|
+
elif action == "restart":
|
|
109
|
+
logger.info(f"🔄 [APP] Restarting processing workers")
|
|
110
|
+
self.start_cb()
|
|
111
|
+
self.stop_cb()
|
|
112
|
+
logger.info(f"✅ [APP] Restarted processing workers")
|
|
113
|
+
|
|
114
|
+
elif action == "debug":
|
|
115
|
+
# TODO: do something on debugging, not now
|
|
116
|
+
logger.info(f"🔍 [APP] Debugging")
|
|
117
|
+
|
|
118
|
+
else:
|
|
119
|
+
logger.warning(f"⚠️ [APP] Unknown core action received: {action}")
|
|
120
|
+
|
|
121
|
+
except json.JSONDecodeError:
|
|
122
|
+
logger.error("🚨 [APP] Failed to parse core action message JSON")
|
|
123
|
+
except Exception as e:
|
|
124
|
+
logger.error(f"🚨 [APP] Error processing core action: {str(e)}")
|
|
125
|
+
|
|
@@ -0,0 +1,168 @@
|
|
|
1
|
+
import threading
|
|
2
|
+
import time
|
|
3
|
+
import logging
|
|
4
|
+
from .RestrictedAreaManager import RestrictedAreaManager
|
|
5
|
+
from .SystemUsageManager import SystemUsageManager
|
|
6
|
+
from .PPEDetectionManager import PPEDetectionManager
|
|
7
|
+
from .DatasetFrameSender import DatasetFrameSender
|
|
8
|
+
from ..util.ImageUploader import ImageUploader
|
|
9
|
+
from ..services.ImageUploadClient import ImageUploadClient
|
|
10
|
+
from ..services.WorkerSourceUpdater import WorkerSourceUpdater
|
|
11
|
+
|
|
12
|
+
def safe_join_thread(thread, timeout=5):
|
|
13
|
+
"""Safely join a thread, avoiding RuntimeError when joining current thread."""
|
|
14
|
+
if thread and thread != threading.current_thread():
|
|
15
|
+
thread.join(timeout=timeout)
|
|
16
|
+
elif thread == threading.current_thread():
|
|
17
|
+
logging.info("🛑 [APP] Thread stopping from within itself, skipping join.")
|
|
18
|
+
|
|
19
|
+
logger = logging.getLogger(__name__)
|
|
20
|
+
|
|
21
|
+
class DataSenderWorker:
|
|
22
|
+
def __init__(self, config: dict, send_interval=5, update_interval=10):
|
|
23
|
+
"""
|
|
24
|
+
Initializes the Data Sender Worker.
|
|
25
|
+
|
|
26
|
+
Args:
|
|
27
|
+
config (dict): Configuration dictionary.
|
|
28
|
+
send_interval (int): Interval (in seconds) for sending system usage & images.
|
|
29
|
+
update_interval (int): Interval (in seconds) for updating worker sources.
|
|
30
|
+
"""
|
|
31
|
+
if not isinstance(config, dict):
|
|
32
|
+
raise ValueError("⚠️ [APP] config must be a dictionary.")
|
|
33
|
+
|
|
34
|
+
self.config = config
|
|
35
|
+
self.worker_id = self.config.get("worker_id")
|
|
36
|
+
self.server_host = self.config.get("server_host")
|
|
37
|
+
self.token = self.config.get("token")
|
|
38
|
+
|
|
39
|
+
if not self.worker_id:
|
|
40
|
+
raise ValueError("⚠️ [APP] Configuration is missing 'worker_id'.")
|
|
41
|
+
if not self.server_host:
|
|
42
|
+
raise ValueError("⚠️ [APP] Configuration is missing 'server_host'.")
|
|
43
|
+
if not self.token:
|
|
44
|
+
raise ValueError("⚠️ [APP] Configuration is missing 'token'.")
|
|
45
|
+
|
|
46
|
+
self.should_update = True
|
|
47
|
+
|
|
48
|
+
self.send_interval = send_interval
|
|
49
|
+
self.update_interval = update_interval
|
|
50
|
+
|
|
51
|
+
self.main_thread = None
|
|
52
|
+
self.worker_update_thread = None
|
|
53
|
+
self.stop_event = threading.Event()
|
|
54
|
+
self.lock = threading.Lock()
|
|
55
|
+
|
|
56
|
+
# Initialize services
|
|
57
|
+
self.system_usage_manager = SystemUsageManager(self.server_host, self.worker_id, self.token)
|
|
58
|
+
self.image_upload_client = ImageUploadClient(self.server_host)
|
|
59
|
+
self.image_uploader = ImageUploader(self.image_upload_client, self.worker_id)
|
|
60
|
+
self.ppe_detection_manager = PPEDetectionManager(self.server_host, self.worker_id, "worker_source_id", self.token)
|
|
61
|
+
self.restricted_area_manager = RestrictedAreaManager(self.server_host, self.worker_id, "worker_source_id", self.token)
|
|
62
|
+
self.dataset_frame_sender = DatasetFrameSender(self.server_host, self.token)
|
|
63
|
+
|
|
64
|
+
self.source_updater = WorkerSourceUpdater(self.server_host, self.token)
|
|
65
|
+
|
|
66
|
+
def start(self):
|
|
67
|
+
"""Start the Data Sender Worker threads."""
|
|
68
|
+
with self.lock:
|
|
69
|
+
if self.main_thread and self.main_thread.is_alive():
|
|
70
|
+
logger.warning("⚠️ [APP] Data Sender Worker is already running.")
|
|
71
|
+
return
|
|
72
|
+
|
|
73
|
+
self.stop_event.clear()
|
|
74
|
+
|
|
75
|
+
# ✅ Start the main worker thread (System usage + Image upload)
|
|
76
|
+
self.main_thread = threading.Thread(target=self._run_main_worker, daemon=True)
|
|
77
|
+
self.main_thread.start()
|
|
78
|
+
|
|
79
|
+
# ✅ Start the worker source update thread
|
|
80
|
+
self.worker_update_thread = threading.Thread(target=self._run_worker_source_updater, daemon=True)
|
|
81
|
+
self.worker_update_thread.start()
|
|
82
|
+
|
|
83
|
+
logger.info(f"🚀 [APP] Data Sender Worker started (Device: {self.worker_id}).")
|
|
84
|
+
|
|
85
|
+
def stop(self):
|
|
86
|
+
"""Stop the Data Sender Worker and Worker Source Updater threads."""
|
|
87
|
+
with self.lock:
|
|
88
|
+
if not self.main_thread or not self.main_thread.is_alive():
|
|
89
|
+
logger.warning("⚠️ [APP] Data Sender Worker is not running.")
|
|
90
|
+
return
|
|
91
|
+
|
|
92
|
+
self.stop_event.set()
|
|
93
|
+
|
|
94
|
+
# ✅ Stop the main worker thread
|
|
95
|
+
if self.main_thread:
|
|
96
|
+
safe_join_thread(self.main_thread, timeout=5)
|
|
97
|
+
|
|
98
|
+
# ✅ Stop the worker source update thread
|
|
99
|
+
if self.worker_update_thread:
|
|
100
|
+
safe_join_thread(self.worker_update_thread, timeout=5)
|
|
101
|
+
|
|
102
|
+
self.main_thread = None
|
|
103
|
+
self.worker_update_thread = None
|
|
104
|
+
|
|
105
|
+
logger.info(f"🛑 [APP] Data Sender Worker stopped (Device: {self.worker_id}).")
|
|
106
|
+
|
|
107
|
+
def start_updating(self):
|
|
108
|
+
"""Start updating worker sources."""
|
|
109
|
+
self.should_update = True
|
|
110
|
+
|
|
111
|
+
def stop_updating(self):
|
|
112
|
+
"""Stop updating worker sources."""
|
|
113
|
+
self.should_update = False
|
|
114
|
+
self.source_updater.stop_worker_sources()
|
|
115
|
+
|
|
116
|
+
def _run_main_worker(self):
|
|
117
|
+
"""Main loop for sending system usage and uploading images."""
|
|
118
|
+
try:
|
|
119
|
+
while not self.stop_event.is_set():
|
|
120
|
+
self.system_usage_manager.process_system_usage()
|
|
121
|
+
self.ppe_detection_manager.send_ppe_detection_batch()
|
|
122
|
+
self.restricted_area_manager.send_violation_batch()
|
|
123
|
+
self._process_image_upload()
|
|
124
|
+
self._process_dataset_frames()
|
|
125
|
+
time.sleep(self.send_interval)
|
|
126
|
+
except Exception as e:
|
|
127
|
+
logger.error("🚨 [APP] Unexpected error in main worker loop.", exc_info=True)
|
|
128
|
+
|
|
129
|
+
def _run_worker_source_updater(self):
|
|
130
|
+
"""Dedicated loop for updating worker sources at a different interval."""
|
|
131
|
+
try:
|
|
132
|
+
while not self.stop_event.is_set():
|
|
133
|
+
if self.should_update:
|
|
134
|
+
self._update_worker_sources()
|
|
135
|
+
|
|
136
|
+
time.sleep(self.update_interval)
|
|
137
|
+
except Exception as e:
|
|
138
|
+
logger.error("🚨 [APP] Unexpected error in Worker Source Updater loop.", exc_info=True)
|
|
139
|
+
|
|
140
|
+
def _process_image_upload(self):
|
|
141
|
+
"""Check and upload images to the server."""
|
|
142
|
+
try:
|
|
143
|
+
self.image_uploader.check_and_upload_images()
|
|
144
|
+
except Exception as e:
|
|
145
|
+
logger.error("🚨 [APP] Error uploading images.", exc_info=True)
|
|
146
|
+
|
|
147
|
+
def _process_dataset_frames(self):
|
|
148
|
+
"""Send pending dataset frames to the server."""
|
|
149
|
+
try:
|
|
150
|
+
stats = self.dataset_frame_sender.send_pending_frames(max_batch_size=5)
|
|
151
|
+
|
|
152
|
+
if stats:
|
|
153
|
+
total_sent = sum(stats.values())
|
|
154
|
+
logger.info(f"📤 [APP] Sent {total_sent} dataset frames: {stats}")
|
|
155
|
+
else:
|
|
156
|
+
pending_count = self.dataset_frame_sender.get_pending_frame_count()
|
|
157
|
+
if pending_count > 0:
|
|
158
|
+
logger.debug(f"📋 [APP] {pending_count} dataset frames pending")
|
|
159
|
+
|
|
160
|
+
except Exception as e:
|
|
161
|
+
logger.error("🚨 [APP] Error processing dataset frames.", exc_info=True)
|
|
162
|
+
|
|
163
|
+
def _update_worker_sources(self):
|
|
164
|
+
"""Synchronize and update worker sources."""
|
|
165
|
+
try:
|
|
166
|
+
self.source_updater.update_worker_sources()
|
|
167
|
+
except Exception as e:
|
|
168
|
+
logger.error("🚨 [APP] Error updating worker sources.", exc_info=True)
|
|
@@ -0,0 +1,143 @@
|
|
|
1
|
+
import threading
|
|
2
|
+
import time
|
|
3
|
+
import logging
|
|
4
|
+
from ..services.AIModelClient import AIModelClient
|
|
5
|
+
from ..services.WorkerSourceClient import WorkerSourceClient
|
|
6
|
+
from ..services.WorkerSourcePipelineClient import WorkerSourcePipelineClient
|
|
7
|
+
from ..services.GrpcClientBase import GrpcClientBase
|
|
8
|
+
|
|
9
|
+
def safe_join_thread(thread, timeout=5):
|
|
10
|
+
"""Safely join a thread, avoiding RuntimeError when joining current thread."""
|
|
11
|
+
if thread and thread != threading.current_thread():
|
|
12
|
+
thread.join(timeout=timeout)
|
|
13
|
+
elif thread == threading.current_thread():
|
|
14
|
+
logging.info("🛑 [APP] Thread stopping from within itself, skipping join.")
|
|
15
|
+
|
|
16
|
+
logger = logging.getLogger(__name__)
|
|
17
|
+
|
|
18
|
+
class DataSyncWorker:
|
|
19
|
+
def __init__(self, config: dict, sync_interval=10):
|
|
20
|
+
"""
|
|
21
|
+
Initializes the Data Sync Worker.
|
|
22
|
+
|
|
23
|
+
Args:
|
|
24
|
+
config (dict): Configuration dictionary.
|
|
25
|
+
sync_interval (int): Interval (in seconds) for synchronization.
|
|
26
|
+
"""
|
|
27
|
+
if not isinstance(config, dict):
|
|
28
|
+
raise ValueError("⚠️ [APP] config must be a dictionary.")
|
|
29
|
+
|
|
30
|
+
self.config = config
|
|
31
|
+
self.worker_id = self.config.get("worker_id")
|
|
32
|
+
self.server_host = self.config.get("server_host")
|
|
33
|
+
self.token = self.config.get("token")
|
|
34
|
+
|
|
35
|
+
if not self.worker_id:
|
|
36
|
+
raise ValueError("⚠️ [APP] Configuration is missing 'worker_id'.")
|
|
37
|
+
if not self.server_host:
|
|
38
|
+
raise ValueError("⚠️ [APP] Configuration is missing 'server_host'.")
|
|
39
|
+
if not self.token:
|
|
40
|
+
raise ValueError("⚠️ [APP] Configuration is missing 'token'.")
|
|
41
|
+
|
|
42
|
+
self.ai_model_client = AIModelClient(self.token, self.server_host)
|
|
43
|
+
self.worker_source_client = WorkerSourceClient(self.server_host)
|
|
44
|
+
self.worker_source_pipeline_client = WorkerSourcePipelineClient(self.server_host)
|
|
45
|
+
|
|
46
|
+
self.sync_interval = sync_interval
|
|
47
|
+
self.thread = None
|
|
48
|
+
self.stop_event = threading.Event()
|
|
49
|
+
self.lock = threading.Lock()
|
|
50
|
+
|
|
51
|
+
def start(self):
|
|
52
|
+
"""Start the data synchronization worker thread."""
|
|
53
|
+
with self.lock:
|
|
54
|
+
if self.thread and self.thread.is_alive():
|
|
55
|
+
logger.warning("⚠️ [APP] Sync Worker is already running.")
|
|
56
|
+
return
|
|
57
|
+
|
|
58
|
+
self.stop_event.clear()
|
|
59
|
+
self.thread = threading.Thread(target=self._run, daemon=True)
|
|
60
|
+
self.thread.start()
|
|
61
|
+
logger.info(f"🚀 [APP] Sync Worker started (Device: {self.worker_id}).")
|
|
62
|
+
|
|
63
|
+
def stop(self):
|
|
64
|
+
"""Stop the data sync worker."""
|
|
65
|
+
logging.info("🛑 [DATA SYNC] Stopping DataSyncWorker.")
|
|
66
|
+
self.running = False
|
|
67
|
+
safe_join_thread(self.thread)
|
|
68
|
+
logging.info("🛑 [DATA SYNC] DataSyncWorker stopped.")
|
|
69
|
+
|
|
70
|
+
def _run(self):
|
|
71
|
+
"""Main loop for syncing worker sources and pipelines."""
|
|
72
|
+
try:
|
|
73
|
+
while not self.stop_event.is_set():
|
|
74
|
+
self._sync_ai_models()
|
|
75
|
+
self._sync_worker_sources()
|
|
76
|
+
self._sync_worker_source_pipelines()
|
|
77
|
+
self._sync_worker_source_pipelines_debug()
|
|
78
|
+
self._sync_worker_source_pipelines_detection()
|
|
79
|
+
time.sleep(self.sync_interval)
|
|
80
|
+
except Exception as e:
|
|
81
|
+
logger.error("🚨 [APP] Unexpected error in Sync Worker main loop.", exc_info=True)
|
|
82
|
+
|
|
83
|
+
def _sync_ai_models(self):
|
|
84
|
+
"""Synchronize worker sources from the server."""
|
|
85
|
+
try:
|
|
86
|
+
response = self.ai_model_client.sync_ai_models(self.worker_id)
|
|
87
|
+
|
|
88
|
+
if not response or not response.get("success"):
|
|
89
|
+
error_message = GrpcClientBase.get_error_message(response)
|
|
90
|
+
logger.error(f"❌ [APP] Failed to sync AI Models: {error_message}")
|
|
91
|
+
|
|
92
|
+
except Exception as e:
|
|
93
|
+
logger.error("🚨 [APP] Error syncing AI Models.", exc_info=True)
|
|
94
|
+
|
|
95
|
+
def _sync_worker_sources(self):
|
|
96
|
+
"""Synchronize worker sources from the server."""
|
|
97
|
+
try:
|
|
98
|
+
response = self.worker_source_client.sync_worker_sources(self.worker_id, self.token)
|
|
99
|
+
|
|
100
|
+
if not response or not response.get("success"):
|
|
101
|
+
error_message = GrpcClientBase.get_error_message(response)
|
|
102
|
+
logger.error(f"❌ [APP] Failed to sync worker sources: {error_message}")
|
|
103
|
+
|
|
104
|
+
except Exception as e:
|
|
105
|
+
logger.error("🚨 [APP] Error syncing worker sources.", exc_info=True)
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
def _sync_worker_source_pipelines(self):
|
|
109
|
+
"""Synchronize worker source pipelines from the server."""
|
|
110
|
+
try:
|
|
111
|
+
response = self.worker_source_pipeline_client.get_worker_source_pipeline_list(self.worker_id, self.token)
|
|
112
|
+
|
|
113
|
+
if not response or not response.get("success"):
|
|
114
|
+
error_message = GrpcClientBase.get_error_message(response)
|
|
115
|
+
logger.error(f"❌ [APP] Failed to sync worker source pipelines: {error_message}")
|
|
116
|
+
|
|
117
|
+
except Exception as e:
|
|
118
|
+
logger.error("🚨 [APP] Error syncing worker source pipelines.", exc_info=True)
|
|
119
|
+
|
|
120
|
+
def _sync_worker_source_pipelines_debug(self):
|
|
121
|
+
"""Synchronize worker source pipelines debug with the server."""
|
|
122
|
+
try:
|
|
123
|
+
response = self.worker_source_pipeline_client.sync_pipeline_debug(self.token)
|
|
124
|
+
|
|
125
|
+
if not response or not response.get("success"):
|
|
126
|
+
error_message = GrpcClientBase.get_error_message(response)
|
|
127
|
+
logger.error(f"❌ [APP] Failed to sync restricted area violations: {error_message}")
|
|
128
|
+
|
|
129
|
+
except Exception as e:
|
|
130
|
+
logger.error("🚨 [APP] Error syncing worker source pipelines debug.", exc_info=True)
|
|
131
|
+
|
|
132
|
+
|
|
133
|
+
def _sync_worker_source_pipelines_detection(self):
|
|
134
|
+
"""Synchronize worker source pipelines detection with the server."""
|
|
135
|
+
try:
|
|
136
|
+
response = self.worker_source_pipeline_client.sync_pipeline_detection(self.token)
|
|
137
|
+
|
|
138
|
+
if not response or not response.get("success"):
|
|
139
|
+
error_message = GrpcClientBase.get_error_message(response)
|
|
140
|
+
logger.error(f"❌ [APP] Failed to sync dataset sources: {error_message}")
|
|
141
|
+
|
|
142
|
+
except Exception as e:
|
|
143
|
+
logger.error("🚨 [APP] Error syncing worker source pipelines detection.", exc_info=True)
|