nedo-vision-worker 1.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (92) hide show
  1. nedo_vision_worker/__init__.py +10 -0
  2. nedo_vision_worker/cli.py +195 -0
  3. nedo_vision_worker/config/ConfigurationManager.py +196 -0
  4. nedo_vision_worker/config/__init__.py +1 -0
  5. nedo_vision_worker/database/DatabaseManager.py +219 -0
  6. nedo_vision_worker/database/__init__.py +1 -0
  7. nedo_vision_worker/doctor.py +453 -0
  8. nedo_vision_worker/initializer/AppInitializer.py +78 -0
  9. nedo_vision_worker/initializer/__init__.py +1 -0
  10. nedo_vision_worker/models/__init__.py +15 -0
  11. nedo_vision_worker/models/ai_model.py +29 -0
  12. nedo_vision_worker/models/auth.py +14 -0
  13. nedo_vision_worker/models/config.py +9 -0
  14. nedo_vision_worker/models/dataset_source.py +30 -0
  15. nedo_vision_worker/models/logs.py +9 -0
  16. nedo_vision_worker/models/ppe_detection.py +39 -0
  17. nedo_vision_worker/models/ppe_detection_label.py +20 -0
  18. nedo_vision_worker/models/restricted_area_violation.py +20 -0
  19. nedo_vision_worker/models/user.py +10 -0
  20. nedo_vision_worker/models/worker_source.py +19 -0
  21. nedo_vision_worker/models/worker_source_pipeline.py +21 -0
  22. nedo_vision_worker/models/worker_source_pipeline_config.py +24 -0
  23. nedo_vision_worker/models/worker_source_pipeline_debug.py +15 -0
  24. nedo_vision_worker/models/worker_source_pipeline_detection.py +14 -0
  25. nedo_vision_worker/protos/AIModelService_pb2.py +46 -0
  26. nedo_vision_worker/protos/AIModelService_pb2_grpc.py +140 -0
  27. nedo_vision_worker/protos/DatasetSourceService_pb2.py +46 -0
  28. nedo_vision_worker/protos/DatasetSourceService_pb2_grpc.py +140 -0
  29. nedo_vision_worker/protos/HumanDetectionService_pb2.py +44 -0
  30. nedo_vision_worker/protos/HumanDetectionService_pb2_grpc.py +140 -0
  31. nedo_vision_worker/protos/PPEDetectionService_pb2.py +46 -0
  32. nedo_vision_worker/protos/PPEDetectionService_pb2_grpc.py +140 -0
  33. nedo_vision_worker/protos/VisionWorkerService_pb2.py +72 -0
  34. nedo_vision_worker/protos/VisionWorkerService_pb2_grpc.py +471 -0
  35. nedo_vision_worker/protos/WorkerSourcePipelineService_pb2.py +64 -0
  36. nedo_vision_worker/protos/WorkerSourcePipelineService_pb2_grpc.py +312 -0
  37. nedo_vision_worker/protos/WorkerSourceService_pb2.py +50 -0
  38. nedo_vision_worker/protos/WorkerSourceService_pb2_grpc.py +183 -0
  39. nedo_vision_worker/protos/__init__.py +1 -0
  40. nedo_vision_worker/repositories/AIModelRepository.py +44 -0
  41. nedo_vision_worker/repositories/DatasetSourceRepository.py +150 -0
  42. nedo_vision_worker/repositories/PPEDetectionRepository.py +112 -0
  43. nedo_vision_worker/repositories/RestrictedAreaRepository.py +88 -0
  44. nedo_vision_worker/repositories/WorkerSourcePipelineDebugRepository.py +90 -0
  45. nedo_vision_worker/repositories/WorkerSourcePipelineDetectionRepository.py +48 -0
  46. nedo_vision_worker/repositories/WorkerSourcePipelineRepository.py +174 -0
  47. nedo_vision_worker/repositories/WorkerSourceRepository.py +46 -0
  48. nedo_vision_worker/repositories/__init__.py +1 -0
  49. nedo_vision_worker/services/AIModelClient.py +362 -0
  50. nedo_vision_worker/services/ConnectionInfoClient.py +57 -0
  51. nedo_vision_worker/services/DatasetSourceClient.py +88 -0
  52. nedo_vision_worker/services/FileToRTMPServer.py +78 -0
  53. nedo_vision_worker/services/GrpcClientBase.py +155 -0
  54. nedo_vision_worker/services/GrpcClientManager.py +141 -0
  55. nedo_vision_worker/services/ImageUploadClient.py +82 -0
  56. nedo_vision_worker/services/PPEDetectionClient.py +108 -0
  57. nedo_vision_worker/services/RTSPtoRTMPStreamer.py +98 -0
  58. nedo_vision_worker/services/RestrictedAreaClient.py +100 -0
  59. nedo_vision_worker/services/SystemUsageClient.py +77 -0
  60. nedo_vision_worker/services/VideoStreamClient.py +161 -0
  61. nedo_vision_worker/services/WorkerSourceClient.py +215 -0
  62. nedo_vision_worker/services/WorkerSourcePipelineClient.py +393 -0
  63. nedo_vision_worker/services/WorkerSourceUpdater.py +134 -0
  64. nedo_vision_worker/services/WorkerStatusClient.py +65 -0
  65. nedo_vision_worker/services/__init__.py +1 -0
  66. nedo_vision_worker/util/HardwareID.py +104 -0
  67. nedo_vision_worker/util/ImageUploader.py +92 -0
  68. nedo_vision_worker/util/Networking.py +94 -0
  69. nedo_vision_worker/util/PlatformDetector.py +50 -0
  70. nedo_vision_worker/util/SystemMonitor.py +299 -0
  71. nedo_vision_worker/util/VideoProbeUtil.py +120 -0
  72. nedo_vision_worker/util/__init__.py +1 -0
  73. nedo_vision_worker/worker/CoreActionWorker.py +125 -0
  74. nedo_vision_worker/worker/DataSenderWorker.py +168 -0
  75. nedo_vision_worker/worker/DataSyncWorker.py +143 -0
  76. nedo_vision_worker/worker/DatasetFrameSender.py +208 -0
  77. nedo_vision_worker/worker/DatasetFrameWorker.py +412 -0
  78. nedo_vision_worker/worker/PPEDetectionManager.py +86 -0
  79. nedo_vision_worker/worker/PipelineActionWorker.py +129 -0
  80. nedo_vision_worker/worker/PipelineImageWorker.py +116 -0
  81. nedo_vision_worker/worker/RabbitMQListener.py +170 -0
  82. nedo_vision_worker/worker/RestrictedAreaManager.py +85 -0
  83. nedo_vision_worker/worker/SystemUsageManager.py +111 -0
  84. nedo_vision_worker/worker/VideoStreamWorker.py +139 -0
  85. nedo_vision_worker/worker/WorkerManager.py +155 -0
  86. nedo_vision_worker/worker/__init__.py +1 -0
  87. nedo_vision_worker/worker_service.py +264 -0
  88. nedo_vision_worker-1.0.0.dist-info/METADATA +563 -0
  89. nedo_vision_worker-1.0.0.dist-info/RECORD +92 -0
  90. nedo_vision_worker-1.0.0.dist-info/WHEEL +5 -0
  91. nedo_vision_worker-1.0.0.dist-info/entry_points.txt +2 -0
  92. nedo_vision_worker-1.0.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,150 @@
1
+ import logging
2
+ from typing import List, Optional
3
+ from sqlalchemy.orm import Session
4
+ from sqlalchemy.exc import SQLAlchemyError
5
+ from ..models.dataset_source import DatasetSourceEntity
6
+ from ..database.DatabaseManager import DatabaseManager
7
+
8
+ logger = logging.getLogger(__name__)
9
+
10
+ class DatasetSourceRepository:
11
+ """Repository for managing dataset source data using SQLAlchemy."""
12
+
13
+ def __init__(self):
14
+ """Initialize the repository."""
15
+ self.db_manager = DatabaseManager()
16
+ self.session: Session = self.db_manager.get_session("default")
17
+
18
+ def sync_dataset_sources(self, dataset_sources_data, update_callback=None):
19
+ """
20
+ Synchronize dataset sources from server data with intelligent change detection.
21
+
22
+ Args:
23
+ dataset_sources_data: List of dataset source data from server
24
+ update_callback: Optional callback function for status updates
25
+ """
26
+ try:
27
+ # Get existing dataset sources from local database
28
+ local_dataset_sources = {ds.id: ds for ds in self.session.query(DatasetSourceEntity).all()}
29
+
30
+ new_dataset_sources = []
31
+ updated_dataset_sources = []
32
+ changed_dataset_sources = []
33
+ server_dataset_source_ids = set()
34
+
35
+ for dataset_source_data in dataset_sources_data:
36
+ server_dataset_source_ids.add(dataset_source_data.id)
37
+ existing_dataset_source = local_dataset_sources.get(dataset_source_data.id)
38
+ changes = []
39
+
40
+ if existing_dataset_source:
41
+ # Check for changes in each field
42
+ if existing_dataset_source.dataset_id != dataset_source_data.dataset_id:
43
+ changes.append(f"dataset_id: {existing_dataset_source.dataset_id} → {dataset_source_data.dataset_id}")
44
+ if existing_dataset_source.worker_source_id != dataset_source_data.worker_source_id:
45
+ changes.append(f"worker_source_id: {existing_dataset_source.worker_source_id} → {dataset_source_data.worker_source_id}")
46
+ if existing_dataset_source.sampling_interval != dataset_source_data.sampling_interval:
47
+ changes.append(f"sampling_interval: {existing_dataset_source.sampling_interval} → {dataset_source_data.sampling_interval}")
48
+ if existing_dataset_source.dataset_name != dataset_source_data.dataset_name:
49
+ changes.append(f"dataset_name: '{existing_dataset_source.dataset_name}' → '{dataset_source_data.dataset_name}'")
50
+ if existing_dataset_source.worker_source_name != dataset_source_data.worker_source_name:
51
+ changes.append(f"worker_source_name: '{existing_dataset_source.worker_source_name}' → '{dataset_source_data.worker_source_name}'")
52
+ if existing_dataset_source.worker_source_url != dataset_source_data.worker_source_url:
53
+ changes.append(f"worker_source_url: '{existing_dataset_source.worker_source_url}' → '{dataset_source_data.worker_source_url}'")
54
+
55
+ if changes:
56
+ # Update existing record
57
+ existing_dataset_source.dataset_id = dataset_source_data.dataset_id
58
+ existing_dataset_source.worker_source_id = dataset_source_data.worker_source_id
59
+ existing_dataset_source.sampling_interval = dataset_source_data.sampling_interval
60
+ existing_dataset_source.dataset_name = dataset_source_data.dataset_name
61
+ existing_dataset_source.worker_source_name = dataset_source_data.worker_source_name
62
+ existing_dataset_source.worker_source_url = dataset_source_data.worker_source_url
63
+ updated_dataset_sources.append(existing_dataset_source)
64
+ changed_dataset_sources.append(f"🔄 [APP] [UPDATE] Dataset Source ID {dataset_source_data.id}: " + ", ".join(changes))
65
+ else:
66
+ # Create new record
67
+ new_dataset_source = DatasetSourceEntity(
68
+ id=dataset_source_data.id,
69
+ dataset_id=dataset_source_data.dataset_id,
70
+ worker_source_id=dataset_source_data.worker_source_id,
71
+ sampling_interval=dataset_source_data.sampling_interval,
72
+ dataset_name=dataset_source_data.dataset_name,
73
+ worker_source_name=dataset_source_data.worker_source_name,
74
+ worker_source_url=dataset_source_data.worker_source_url
75
+ )
76
+ new_dataset_sources.append(new_dataset_source)
77
+ logger.info(f"🆕 [APP] [INSERT] Added Dataset Source ID {dataset_source_data.id} - {dataset_source_data.dataset_name}")
78
+
79
+ # Identify and delete dataset sources not in the server response
80
+ records_to_delete = [
81
+ dataset_source for dataset_source_id, dataset_source in local_dataset_sources.items()
82
+ if dataset_source_id not in server_dataset_source_ids
83
+ ]
84
+
85
+ # Perform batch operations in a single transaction
86
+ if new_dataset_sources:
87
+ self.session.bulk_save_objects(new_dataset_sources) # Bulk insert
88
+
89
+ if updated_dataset_sources:
90
+ self.session.bulk_save_objects(updated_dataset_sources) # Bulk update
91
+
92
+ if records_to_delete:
93
+ for record in records_to_delete:
94
+ self.session.delete(record) # Mark for deletion
95
+ logger.info(f"❌ [APP] [DELETE] Dataset Source ID {record.id} - {record.dataset_name}")
96
+
97
+ self.session.commit() # Commit once (reducing DB round trips)
98
+
99
+ # Log all changes
100
+ for change in changed_dataset_sources:
101
+ logger.info(change)
102
+
103
+ if new_dataset_sources or updated_dataset_sources or records_to_delete:
104
+ logger.info(f"✅ [APP] Synced {len(dataset_sources_data)} dataset sources (Added: {len(new_dataset_sources)}, Updated: {len(updated_dataset_sources)}, Deleted: {len(records_to_delete)})")
105
+
106
+ except SQLAlchemyError as e:
107
+ self.session.rollback()
108
+ logger.error(f"🚨 [APP] Database error while syncing dataset sources: {e}", exc_info=True)
109
+ except Exception as e:
110
+ self.session.rollback()
111
+ logger.error(f"🚨 [APP] Error syncing dataset sources: {e}")
112
+
113
+ def get_all_dataset_sources(self) -> List[DatasetSourceEntity]:
114
+ """Get all dataset sources from local database."""
115
+ try:
116
+ dataset_sources = self.session.query(DatasetSourceEntity).all()
117
+ return dataset_sources
118
+
119
+ except SQLAlchemyError as e:
120
+ logger.error(f"🚨 [APP] Database error while fetching dataset sources: {e}", exc_info=True)
121
+ return []
122
+ except Exception as e:
123
+ logger.error(f"🚨 [APP] Error fetching dataset sources: {e}")
124
+ return []
125
+
126
+ def get_dataset_source_by_id(self, dataset_source_id: str) -> Optional[DatasetSourceEntity]:
127
+ """Get a specific dataset source by ID."""
128
+ try:
129
+ dataset_source = self.session.query(DatasetSourceEntity).filter_by(id=dataset_source_id).first()
130
+ return dataset_source
131
+
132
+ except SQLAlchemyError as e:
133
+ logger.error(f"🚨 [APP] Database error while fetching dataset source by ID {dataset_source_id}: {e}", exc_info=True)
134
+ return None
135
+ except Exception as e:
136
+ logger.error(f"🚨 [APP] Error fetching dataset source by ID {dataset_source_id}: {e}")
137
+ return None
138
+
139
+ def get_dataset_sources_by_worker_source_id(self, worker_source_id: str) -> List[DatasetSourceEntity]:
140
+ """Get all dataset sources for a specific worker source."""
141
+ try:
142
+ dataset_sources = self.session.query(DatasetSourceEntity).filter_by(worker_source_id=worker_source_id).all()
143
+ return dataset_sources
144
+
145
+ except SQLAlchemyError as e:
146
+ logger.error(f"🚨 [APP] Database error while fetching dataset sources by worker source ID {worker_source_id}: {e}", exc_info=True)
147
+ return []
148
+ except Exception as e:
149
+ logger.error(f"🚨 [APP] Error fetching dataset sources by worker source ID {worker_source_id}: {e}")
150
+ return []
@@ -0,0 +1,112 @@
1
+ import os
2
+ import logging
3
+ from pathlib import Path
4
+ from sqlalchemy.orm import Session, joinedload
5
+ from sqlalchemy import asc
6
+ from sqlalchemy.exc import SQLAlchemyError
7
+ from ..database.DatabaseManager import DatabaseManager, get_storage_path
8
+ from ..models.ppe_detection import PPEDetectionEntity
9
+ from ..models.ppe_detection_label import PPEDetectionLabelEntity
10
+
11
+ class PPEDetectionRepository:
12
+ """Handles storage of PPE detections into SQLite using SQLAlchemy."""
13
+
14
+ def __init__(self):
15
+
16
+ self.storage_dir = get_storage_path("files")
17
+ self.db_manager = DatabaseManager()
18
+ self.session: Session = self.db_manager.get_session("default")
19
+ os.makedirs(self.storage_dir, exist_ok=True)
20
+
21
+
22
+ def get_latest_5_detections(self) -> list:
23
+ """
24
+ Retrieves the latest 5 PPE detections ordered by the 'created_at' timestamp.
25
+
26
+ Returns:
27
+ list: A list of PPEDetectionEntity objects.
28
+ """
29
+ try:
30
+ latest_detections = (
31
+ self.session.query(PPEDetectionEntity)
32
+ .options(joinedload(PPEDetectionEntity.ppe_labels))
33
+ .order_by(asc(PPEDetectionEntity.created_at))
34
+ .limit(5)
35
+ .all()
36
+ )
37
+
38
+ # Prepare the list of UpsertPPEDetectionRequest messages
39
+ ppe_detection_requests = []
40
+ for detection in latest_detections:
41
+ ppe_detection_labels = [
42
+ {
43
+ 'code': label.code,
44
+ 'confidence_score': label.confidence_score,
45
+ 'b_box_x1': label.b_box_x1,
46
+ 'b_box_y1': label.b_box_y2,
47
+ 'b_box_x2': label.b_box_x2,
48
+ 'b_box_y2': label.b_box_y2,
49
+ }
50
+ for label in detection.ppe_labels
51
+ ]
52
+
53
+ worker_timestamp = detection.created_at.strftime('%Y-%m-%dT%H:%M:%SZ') # Remove microseconds and add Z for UTC
54
+
55
+ # Create request object
56
+ request = {
57
+ 'person_id': detection.person_id,
58
+ 'image': detection.image_path,
59
+ 'image_tile': detection.image_tile_path,
60
+ 'worker_source_id': detection.worker_source_id,
61
+ "worker_timestamp": worker_timestamp,
62
+ 'ppe_detection_labels': ppe_detection_labels
63
+ }
64
+ ppe_detection_requests.append(request)
65
+
66
+ return ppe_detection_requests
67
+
68
+ except SQLAlchemyError as e:
69
+ self.session.rollback()
70
+ logging.error(f"❌ Database error while retrieving latest 5 detections: {e}")
71
+ return []
72
+
73
+ def delete_records_from_db(self, detection_data: list):
74
+ """
75
+ Deletes PPE detection records from the database based on detection data.
76
+
77
+ Args:
78
+ detection_data (list): List of dictionaries containing the detection data.
79
+ """
80
+ try:
81
+ # Extract person_id from detection data to delete the corresponding records
82
+ person_ids_to_delete = [data['person_id'] for data in detection_data]
83
+
84
+ # Delete corresponding PPEDetectionEntity records
85
+ detections_to_delete = (
86
+ self.session.query(PPEDetectionEntity)
87
+ .filter(PPEDetectionEntity.person_id.in_(person_ids_to_delete))
88
+ .all()
89
+ )
90
+
91
+ for detection in detections_to_delete:
92
+ # Also delete related PPE detection labels from PPEDetectionLabelEntity
93
+ self.session.query(PPEDetectionLabelEntity).filter(PPEDetectionLabelEntity.detection_id == detection.id).delete()
94
+
95
+ # Delete the image file associated with the detection if it exists
96
+ image_path = detection.image_path
97
+ if os.path.exists(image_path):
98
+ os.remove(image_path)
99
+ logging.info(f"Deleted image file: {image_path}")
100
+ else:
101
+ logging.warning(f"Image file not found for detection {detection.id}: {image_path}")
102
+
103
+ # Delete the detection record
104
+ self.session.delete(detection)
105
+
106
+ # Commit the transaction
107
+ self.session.commit()
108
+ logging.info(f"Successfully deleted {len(detections_to_delete)} PPE detection records.")
109
+
110
+ except SQLAlchemyError as e:
111
+ self.session.rollback()
112
+ logging.error(f"❌ Error occurred while deleting records from DB: {e}")
@@ -0,0 +1,88 @@
1
+ import os
2
+ import logging
3
+ from pathlib import Path
4
+ from sqlalchemy.orm import Session
5
+ from sqlalchemy import asc
6
+ from sqlalchemy.exc import SQLAlchemyError
7
+ from ..database.DatabaseManager import DatabaseManager, get_storage_path
8
+ from ..models.restricted_area_violation import RestrictedAreaViolationEntity
9
+
10
+ class RestrictedAreaRepository:
11
+ """Handles storage and retrieval of restricted area violations in SQLite using SQLAlchemy."""
12
+
13
+ def __init__(self):
14
+ self.storage_dir = get_storage_path("restricted_violations")
15
+ self.db_manager = DatabaseManager()
16
+ self.session: Session = self.db_manager.get_session("default")
17
+ os.makedirs(self.storage_dir, exist_ok=True)
18
+
19
+ def get_latest_5_violations(self) -> list:
20
+ """
21
+ Retrieves the latest 5 restricted area violations ordered by the 'created_at' timestamp.
22
+
23
+ Returns:
24
+ list: A list of dictionaries representing restricted area violations.
25
+ """
26
+ try:
27
+ latest_violations = (
28
+ self.session.query(RestrictedAreaViolationEntity)
29
+ .order_by(asc(RestrictedAreaViolationEntity.created_at))
30
+ .limit(5)
31
+ .all()
32
+ )
33
+
34
+ result = []
35
+ for violation in latest_violations:
36
+ timestamp = violation.created_at.strftime('%Y-%m-%dT%H:%M:%SZ')
37
+ result.append({
38
+ 'person_id': violation.person_id,
39
+ 'image': violation.image_path,
40
+ 'image_tile': violation.image_tile_path,
41
+ 'worker_source_id': violation.worker_source_id,
42
+ 'worker_timestamp': timestamp,
43
+ 'confidence_score': violation.confidence_score,
44
+ 'b_box_x1': violation.b_box_x1,
45
+ 'b_box_y1': violation.b_box_y1,
46
+ 'b_box_x2': violation.b_box_x2,
47
+ 'b_box_y2': violation.b_box_y2,
48
+ })
49
+
50
+ return result
51
+
52
+ except SQLAlchemyError as e:
53
+ self.session.rollback()
54
+ logging.error(f"❌ Database error while retrieving latest 5 violations: {e}")
55
+ return []
56
+
57
+ def delete_records_from_db(self, violation_data: list):
58
+ """
59
+ Deletes restricted area violation records from the database based on the provided data.
60
+
61
+ Args:
62
+ violation_data (list): List of dictionaries containing the violation data.
63
+ """
64
+ try:
65
+ person_ids_to_delete = [data['person_id'] for data in violation_data]
66
+
67
+ violations_to_delete = (
68
+ self.session.query(RestrictedAreaViolationEntity)
69
+ .filter(RestrictedAreaViolationEntity.person_id.in_(person_ids_to_delete))
70
+ .all()
71
+ )
72
+
73
+ for violation in violations_to_delete:
74
+ image_path = violation.image_path
75
+ if os.path.exists(image_path):
76
+ os.remove(image_path)
77
+ logging.info(f"Deleted image file: {image_path}")
78
+ else:
79
+ logging.warning(f"Image file not found for violation {violation.id}: {image_path}")
80
+
81
+ self.session.delete(violation)
82
+
83
+ self.session.commit()
84
+ logging.info(f"Successfully deleted {len(violations_to_delete)} violation records.")
85
+
86
+ except SQLAlchemyError as e:
87
+ self.session.rollback()
88
+ logging.error(f"❌ Error occurred while deleting violation records: {e}")
@@ -0,0 +1,90 @@
1
+ from datetime import datetime, timedelta, timezone
2
+ import os
3
+ from sqlalchemy.orm import Session
4
+ from ..database.DatabaseManager import DatabaseManager
5
+ from ..models.worker_source_pipeline_debug import WorkerSourcePipelineDebugEntity
6
+
7
+
8
+ class WorkerSourcePipelineDebugRepository:
9
+ def __init__(self):
10
+ self.db_manager = DatabaseManager()
11
+ self.session: Session = self.db_manager.get_session("default")
12
+
13
+ def create_debug_entry(self, uuid: str, worker_source_pipeline_id: str) -> WorkerSourcePipelineDebugEntity:
14
+ """
15
+ Create a new debug entry for a worker source pipeline.
16
+
17
+ Args:
18
+ uuid (str): The requester ID
19
+ worker_source_pipeline_id (str): The ID of the worker source pipeline
20
+
21
+ Returns:
22
+ WorkerSourcePipelineDebugEntity: The created debug entry
23
+ """
24
+ debug_entry = WorkerSourcePipelineDebugEntity(
25
+ uuid=uuid,
26
+ worker_source_pipeline_id=worker_source_pipeline_id,
27
+ )
28
+ self.session.add(debug_entry)
29
+ self.session.commit()
30
+ return debug_entry
31
+
32
+ def get_debug_entries_with_data(self) -> list[WorkerSourcePipelineDebugEntity]:
33
+ """
34
+ Fetch all debug entries that have non-null data, ordered by creation date (oldest first).
35
+
36
+ Returns:
37
+ list[WorkerSourcePipelineDebugEntity]: List of debug entries with data
38
+ """
39
+ self.session.expire_all()
40
+ cutoff_time = datetime.now(timezone.utc) - timedelta(minutes=1)
41
+
42
+ # Delete old entries
43
+ old_entries = self.session.query(WorkerSourcePipelineDebugEntity)\
44
+ .filter(
45
+ WorkerSourcePipelineDebugEntity.data.isnot(None),
46
+ WorkerSourcePipelineDebugEntity.created_at < cutoff_time
47
+ ).all()
48
+
49
+ for entry in old_entries:
50
+ if entry.image_path and os.path.exists(entry.image_path):
51
+ try:
52
+ os.remove(entry.image_path)
53
+ except Exception as e:
54
+ print(f"Warning: Failed to delete image at {entry.image_path} - {e}")
55
+ self.session.delete(entry)
56
+
57
+ self.session.commit()
58
+
59
+ # Fetch new entries
60
+ entries = self.session.query(WorkerSourcePipelineDebugEntity)\
61
+ .filter(WorkerSourcePipelineDebugEntity.data.isnot(None))\
62
+ .order_by(WorkerSourcePipelineDebugEntity.created_at.asc())\
63
+ .all()
64
+
65
+ return entries
66
+
67
+ def delete_entry_by_id(self, id: str):
68
+ """
69
+ Delete a debug entry by its ID, including the associated image file (if it exists).
70
+
71
+ :param entry_id: The ID of the entry to delete.
72
+ :return: True if the entry was found and deleted, False otherwise.
73
+ """
74
+ entry = self.session.query(WorkerSourcePipelineDebugEntity).filter_by(id=id).first()
75
+
76
+ if not entry:
77
+ return
78
+
79
+ # Delete image file if it exists
80
+ if entry.image_path and os.path.exists(entry.image_path):
81
+ try:
82
+ os.remove(entry.image_path)
83
+ except Exception as e:
84
+ # Optional: Log or handle file deletion error
85
+ print(f"Failed to delete image file: {entry.image_path}, error: {e}")
86
+
87
+ # Delete DB entry
88
+ self.session.delete(entry)
89
+ self.session.commit()
90
+
@@ -0,0 +1,48 @@
1
+ import os
2
+ from sqlalchemy.orm import Session
3
+ from ..database.DatabaseManager import _get_storage_paths, DatabaseManager
4
+ from ..models.worker_source_pipeline_detection import WorkerSourcePipelineDetectionEntity
5
+
6
+
7
+ class WorkerSourcePipelineDetectionRepository:
8
+ def __init__(self):
9
+ self.db_manager = DatabaseManager()
10
+ self.session: Session = self.db_manager.get_session("default")
11
+ storage_paths = _get_storage_paths()
12
+ self.storage_dir = storage_paths["files"] / "detection_image"
13
+ os.makedirs(self.storage_dir, exist_ok=True)
14
+
15
+ def get_entries(self) -> list[WorkerSourcePipelineDetectionEntity]:
16
+ self.session.expire_all()
17
+
18
+ # Fetch new entries
19
+ entries = self.session.query(WorkerSourcePipelineDetectionEntity)\
20
+ .order_by(WorkerSourcePipelineDetectionEntity.created_at.asc())\
21
+ .all()
22
+
23
+ return entries
24
+
25
+ def delete_entry_by_id(self, id: str):
26
+ """
27
+ Delete a debug entry by its ID, including the associated image file (if it exists).
28
+
29
+ :param entry_id: The ID of the entry to delete.
30
+ :return: True if the entry was found and deleted, False otherwise.
31
+ """
32
+ entry = self.session.query(WorkerSourcePipelineDetectionEntity).filter_by(id=id).first()
33
+
34
+ if not entry:
35
+ return
36
+
37
+ # Delete image file if it exists
38
+ if entry.image_path and os.path.exists(entry.image_path):
39
+ try:
40
+ os.remove(entry.image_path)
41
+ except Exception as e:
42
+ # Optional: Log or handle file deletion error
43
+ print(f"Failed to delete image file: {entry.image_path}, error: {e}")
44
+
45
+ # Delete DB entry
46
+ self.session.delete(entry)
47
+ self.session.commit()
48
+
@@ -0,0 +1,174 @@
1
+ import logging
2
+ from sqlalchemy.orm import Session
3
+ from sqlalchemy.exc import SQLAlchemyError
4
+ from ..database.DatabaseManager import DatabaseManager
5
+ from ..protos.WorkerSourcePipelineService_pb2 import WorkerSourcePipelineListResponse
6
+ from ..models.worker_source_pipeline import WorkerSourcePipelineEntity
7
+ from ..models.worker_source_pipeline_config import WorkerSourcePipelineConfigEntity
8
+
9
+ logger = logging.getLogger(__name__)
10
+
11
+ class WorkerSourcePipelineRepository:
12
+ def __init__(self):
13
+ self.db_manager = DatabaseManager()
14
+ self.session: Session = self.db_manager.get_session("config")
15
+
16
+ def sync_worker_source_pipelines(self, response: WorkerSourcePipelineListResponse, on_status_update):
17
+ """
18
+ Synchronize worker source pipelines from the server with the local database.
19
+ This includes both WorkerSourcePipelineEntity and WorkerSourcePipelineConfigEntity.
20
+
21
+ Args:
22
+ response (WorkerSourcePipelineListResponse): The gRPC response containing worker source pipeline data.
23
+ """
24
+ try:
25
+ local_pipelines = {pipeline.id: pipeline for pipeline in self.session.query(WorkerSourcePipelineEntity).all()}
26
+ local_pipeline_configs = {config.id: config for config in self.session.query(WorkerSourcePipelineConfigEntity).all()}
27
+
28
+ new_pipelines = []
29
+ updated_pipelines = []
30
+ new_pipeline_configs = []
31
+ updated_pipeline_configs = []
32
+ changed_pipelines = []
33
+ changed_configs = []
34
+ server_pipeline_ids = set()
35
+ server_pipeline_config_ids = set()
36
+
37
+ for pipeline in response.data:
38
+ server_pipeline_ids.add(pipeline.id)
39
+ existing_pipeline = local_pipelines.get(pipeline.id)
40
+ changes = []
41
+
42
+ if existing_pipeline:
43
+ if existing_pipeline.name != pipeline.name:
44
+ changes.append(f"name: '{existing_pipeline.name}' → '{pipeline.name}'")
45
+ if existing_pipeline.worker_source_id != pipeline.worker_source_id:
46
+ changes.append(f"worker_source_id: {existing_pipeline.worker_source_id} → {pipeline.worker_source_id}")
47
+ if existing_pipeline.worker_id != pipeline.worker_id:
48
+ changes.append(f"worker_id: {existing_pipeline.worker_id} → {pipeline.worker_id}")
49
+ if existing_pipeline.ai_model_id != pipeline.ai_model_id:
50
+ changes.append(f"ai_model_id: {existing_pipeline.ai_model_id} → {pipeline.ai_model_id}")
51
+ if existing_pipeline.location_name != pipeline.location_name:
52
+ changes.append(f"location_name: {existing_pipeline.location_name} → {pipeline.location_name}")
53
+ if existing_pipeline.pipeline_status_code != pipeline.pipeline_status_code and existing_pipeline.pipeline_status_code != "restart":
54
+ on_status_update(pipeline.id, existing_pipeline.pipeline_status_code)
55
+
56
+ if changes:
57
+ existing_pipeline.name = pipeline.name
58
+ existing_pipeline.worker_source_id = pipeline.worker_source_id
59
+ existing_pipeline.worker_id = pipeline.worker_id
60
+ existing_pipeline.ai_model_id = pipeline.ai_model_id
61
+ existing_pipeline.location_name = pipeline.location_name
62
+ updated_pipelines.append(existing_pipeline)
63
+ changed_pipelines.append(f"🔄 [APP] [UPDATE] Worker Source Pipeline ID {pipeline.id}: " + ", ".join(changes))
64
+ else:
65
+ new_pipelines.append(WorkerSourcePipelineEntity(
66
+ id=pipeline.id,
67
+ name=pipeline.name,
68
+ worker_source_id=pipeline.worker_source_id,
69
+ worker_id=pipeline.worker_id,
70
+ ai_model_id=pipeline.ai_model_id,
71
+ pipeline_status_code=pipeline.pipeline_status_code,
72
+ location_name=pipeline.location_name
73
+ ))
74
+ logger.info(f"🆕 [APP] [INSERT] Added Worker Source Pipeline ID {pipeline.id} - {pipeline.name}")
75
+
76
+ for config in pipeline.worker_source_pipeline_configs:
77
+ server_pipeline_config_ids.add(config.id)
78
+ existing_config = local_pipeline_configs.get(config.id)
79
+ config_changes = []
80
+
81
+ if existing_config:
82
+ if existing_config.worker_source_pipeline_id != config.worker_source_pipeline_id:
83
+ config_changes.append(f"worker_source_pipeline_id: {existing_config.worker_source_pipeline_id} → {config.worker_source_pipeline_id}")
84
+ if existing_config.pipeline_config_id != config.pipeline_config_id:
85
+ config_changes.append(f"pipeline_config_id: {existing_config.pipeline_config_id} → {config.pipeline_config_id}")
86
+ if existing_config.is_enabled != config.is_enabled:
87
+ config_changes.append(f"is_enabled: {existing_config.is_enabled} → {config.is_enabled}")
88
+ if existing_config.value != config.value:
89
+ config_changes.append(f"value: '{existing_config.value}' → '{config.value}'")
90
+ if existing_config.pipeline_config_name != config.pipeline_config.name:
91
+ config_changes.append(f"pipeline_config_name: '{existing_config.pipeline_config_name}' → '{config.pipeline_config.name}'")
92
+ if existing_config.pipeline_config_code != config.pipeline_config.code:
93
+ config_changes.append(f"pipeline_config_code: '{existing_config.pipeline_config_code}' → '{config.pipeline_config.code}'")
94
+
95
+ if config_changes:
96
+ existing_config.worker_source_pipeline_id = config.worker_source_pipeline_id
97
+ existing_config.pipeline_config_id = config.pipeline_config_id
98
+ existing_config.is_enabled = config.is_enabled
99
+ existing_config.value = config.value
100
+ existing_config.pipeline_config_name = config.pipeline_config.name
101
+ existing_config.pipeline_config_code = config.pipeline_config.code
102
+ updated_pipeline_configs.append(existing_config)
103
+ changed_configs.append(f"🔄 [APP] [UPDATE] Worker Source Pipeline Config ID {config.id}: " + ", ".join(config_changes))
104
+ else:
105
+ new_pipeline_configs.append(WorkerSourcePipelineConfigEntity(
106
+ id=config.id,
107
+ worker_source_pipeline_id=config.worker_source_pipeline_id,
108
+ pipeline_config_id=config.pipeline_config_id,
109
+ is_enabled=config.is_enabled,
110
+ value=config.value,
111
+ pipeline_config_name=config.pipeline_config.name,
112
+ pipeline_config_code=config.pipeline_config.code
113
+ ))
114
+ logger.info(f"🆕 [APP] [INSERT] Added Worker Source Pipeline Config ID {config.id}")
115
+
116
+ self.session.commit()
117
+
118
+ for change in changed_pipelines:
119
+ logger.info(change)
120
+ for change in changed_configs:
121
+ logger.info(change)
122
+
123
+ # Identify and delete pipelines not in the server response
124
+ records_to_delete = [
125
+ pipeline for pipeline_id, pipeline in local_pipelines.items()
126
+ if pipeline_id not in server_pipeline_ids
127
+ ]
128
+
129
+ # Identify and delete pipeline configs not in the server response
130
+ configs_to_delete = [
131
+ config for config_id, config in local_pipeline_configs.items()
132
+ if config_id not in server_pipeline_config_ids
133
+ ]
134
+
135
+ # Perform batch insert, update, and delete in a single transaction
136
+ if new_pipelines:
137
+ self.session.bulk_save_objects(new_pipelines) # Bulk insert
138
+
139
+ if updated_pipelines:
140
+ self.session.bulk_save_objects(updated_pipelines) # Bulk update
141
+
142
+ if new_pipeline_configs:
143
+ self.session.bulk_save_objects(new_pipeline_configs) # Bulk insert configs
144
+
145
+ if updated_pipeline_configs:
146
+ self.session.bulk_save_objects(updated_pipeline_configs) # Bulk update configs
147
+
148
+ if records_to_delete:
149
+ for record in records_to_delete:
150
+ self.session.delete(record) # Mark for deletion
151
+ logger.info(f"❌ [APP] [DELETE] Worker Source Pipeline ID {record.id} - {record.name}")
152
+
153
+ if configs_to_delete:
154
+ for config in configs_to_delete:
155
+ self.session.delete(config) # Mark for deletion
156
+
157
+ self.session.commit() # Commit once (reducing DB round trips)
158
+
159
+ except SQLAlchemyError as e:
160
+ self.session.rollback()
161
+ logger.error(f"❌ [APP] [DATABASE ERROR] Error during sync: {e}", exc_info=True)
162
+
163
+ def get_worker_source_pipelines(self):
164
+ try:
165
+ return self.session.query(WorkerSourcePipelineEntity).all()
166
+ except Exception as e:
167
+ logger.error(f"🚨 [APP] Database error while fetching worker source pipelines: {e}", exc_info=True)
168
+ return []
169
+
170
+ def get_worker_source_pipeline(self, pipeline_id):
171
+ try:
172
+ return self.session.query(WorkerSourcePipelineEntity).filter_by(id=pipeline_id).first()
173
+ except Exception as e:
174
+ logger.error(f"🚨 [APP] Database error while fetching worker source pipeline: {e}", exc_info=True)