nedo-vision-worker 1.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (92) hide show
  1. nedo_vision_worker/__init__.py +10 -0
  2. nedo_vision_worker/cli.py +195 -0
  3. nedo_vision_worker/config/ConfigurationManager.py +196 -0
  4. nedo_vision_worker/config/__init__.py +1 -0
  5. nedo_vision_worker/database/DatabaseManager.py +219 -0
  6. nedo_vision_worker/database/__init__.py +1 -0
  7. nedo_vision_worker/doctor.py +453 -0
  8. nedo_vision_worker/initializer/AppInitializer.py +78 -0
  9. nedo_vision_worker/initializer/__init__.py +1 -0
  10. nedo_vision_worker/models/__init__.py +15 -0
  11. nedo_vision_worker/models/ai_model.py +29 -0
  12. nedo_vision_worker/models/auth.py +14 -0
  13. nedo_vision_worker/models/config.py +9 -0
  14. nedo_vision_worker/models/dataset_source.py +30 -0
  15. nedo_vision_worker/models/logs.py +9 -0
  16. nedo_vision_worker/models/ppe_detection.py +39 -0
  17. nedo_vision_worker/models/ppe_detection_label.py +20 -0
  18. nedo_vision_worker/models/restricted_area_violation.py +20 -0
  19. nedo_vision_worker/models/user.py +10 -0
  20. nedo_vision_worker/models/worker_source.py +19 -0
  21. nedo_vision_worker/models/worker_source_pipeline.py +21 -0
  22. nedo_vision_worker/models/worker_source_pipeline_config.py +24 -0
  23. nedo_vision_worker/models/worker_source_pipeline_debug.py +15 -0
  24. nedo_vision_worker/models/worker_source_pipeline_detection.py +14 -0
  25. nedo_vision_worker/protos/AIModelService_pb2.py +46 -0
  26. nedo_vision_worker/protos/AIModelService_pb2_grpc.py +140 -0
  27. nedo_vision_worker/protos/DatasetSourceService_pb2.py +46 -0
  28. nedo_vision_worker/protos/DatasetSourceService_pb2_grpc.py +140 -0
  29. nedo_vision_worker/protos/HumanDetectionService_pb2.py +44 -0
  30. nedo_vision_worker/protos/HumanDetectionService_pb2_grpc.py +140 -0
  31. nedo_vision_worker/protos/PPEDetectionService_pb2.py +46 -0
  32. nedo_vision_worker/protos/PPEDetectionService_pb2_grpc.py +140 -0
  33. nedo_vision_worker/protos/VisionWorkerService_pb2.py +72 -0
  34. nedo_vision_worker/protos/VisionWorkerService_pb2_grpc.py +471 -0
  35. nedo_vision_worker/protos/WorkerSourcePipelineService_pb2.py +64 -0
  36. nedo_vision_worker/protos/WorkerSourcePipelineService_pb2_grpc.py +312 -0
  37. nedo_vision_worker/protos/WorkerSourceService_pb2.py +50 -0
  38. nedo_vision_worker/protos/WorkerSourceService_pb2_grpc.py +183 -0
  39. nedo_vision_worker/protos/__init__.py +1 -0
  40. nedo_vision_worker/repositories/AIModelRepository.py +44 -0
  41. nedo_vision_worker/repositories/DatasetSourceRepository.py +150 -0
  42. nedo_vision_worker/repositories/PPEDetectionRepository.py +112 -0
  43. nedo_vision_worker/repositories/RestrictedAreaRepository.py +88 -0
  44. nedo_vision_worker/repositories/WorkerSourcePipelineDebugRepository.py +90 -0
  45. nedo_vision_worker/repositories/WorkerSourcePipelineDetectionRepository.py +48 -0
  46. nedo_vision_worker/repositories/WorkerSourcePipelineRepository.py +174 -0
  47. nedo_vision_worker/repositories/WorkerSourceRepository.py +46 -0
  48. nedo_vision_worker/repositories/__init__.py +1 -0
  49. nedo_vision_worker/services/AIModelClient.py +362 -0
  50. nedo_vision_worker/services/ConnectionInfoClient.py +57 -0
  51. nedo_vision_worker/services/DatasetSourceClient.py +88 -0
  52. nedo_vision_worker/services/FileToRTMPServer.py +78 -0
  53. nedo_vision_worker/services/GrpcClientBase.py +155 -0
  54. nedo_vision_worker/services/GrpcClientManager.py +141 -0
  55. nedo_vision_worker/services/ImageUploadClient.py +82 -0
  56. nedo_vision_worker/services/PPEDetectionClient.py +108 -0
  57. nedo_vision_worker/services/RTSPtoRTMPStreamer.py +98 -0
  58. nedo_vision_worker/services/RestrictedAreaClient.py +100 -0
  59. nedo_vision_worker/services/SystemUsageClient.py +77 -0
  60. nedo_vision_worker/services/VideoStreamClient.py +161 -0
  61. nedo_vision_worker/services/WorkerSourceClient.py +215 -0
  62. nedo_vision_worker/services/WorkerSourcePipelineClient.py +393 -0
  63. nedo_vision_worker/services/WorkerSourceUpdater.py +134 -0
  64. nedo_vision_worker/services/WorkerStatusClient.py +65 -0
  65. nedo_vision_worker/services/__init__.py +1 -0
  66. nedo_vision_worker/util/HardwareID.py +104 -0
  67. nedo_vision_worker/util/ImageUploader.py +92 -0
  68. nedo_vision_worker/util/Networking.py +94 -0
  69. nedo_vision_worker/util/PlatformDetector.py +50 -0
  70. nedo_vision_worker/util/SystemMonitor.py +299 -0
  71. nedo_vision_worker/util/VideoProbeUtil.py +120 -0
  72. nedo_vision_worker/util/__init__.py +1 -0
  73. nedo_vision_worker/worker/CoreActionWorker.py +125 -0
  74. nedo_vision_worker/worker/DataSenderWorker.py +168 -0
  75. nedo_vision_worker/worker/DataSyncWorker.py +143 -0
  76. nedo_vision_worker/worker/DatasetFrameSender.py +208 -0
  77. nedo_vision_worker/worker/DatasetFrameWorker.py +412 -0
  78. nedo_vision_worker/worker/PPEDetectionManager.py +86 -0
  79. nedo_vision_worker/worker/PipelineActionWorker.py +129 -0
  80. nedo_vision_worker/worker/PipelineImageWorker.py +116 -0
  81. nedo_vision_worker/worker/RabbitMQListener.py +170 -0
  82. nedo_vision_worker/worker/RestrictedAreaManager.py +85 -0
  83. nedo_vision_worker/worker/SystemUsageManager.py +111 -0
  84. nedo_vision_worker/worker/VideoStreamWorker.py +139 -0
  85. nedo_vision_worker/worker/WorkerManager.py +155 -0
  86. nedo_vision_worker/worker/__init__.py +1 -0
  87. nedo_vision_worker/worker_service.py +264 -0
  88. nedo_vision_worker-1.0.0.dist-info/METADATA +563 -0
  89. nedo_vision_worker-1.0.0.dist-info/RECORD +92 -0
  90. nedo_vision_worker-1.0.0.dist-info/WHEEL +5 -0
  91. nedo_vision_worker-1.0.0.dist-info/entry_points.txt +2 -0
  92. nedo_vision_worker-1.0.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,104 @@
1
+ import os
2
+ import platform
3
+ import subprocess
4
+ import uuid
5
+
6
+ class HardwareID:
7
+ @staticmethod
8
+ def get_unique_id():
9
+ """Returns a unique hardware identifier based on the OS."""
10
+ system = platform.system().lower()
11
+ try:
12
+ if system == "windows":
13
+ return HardwareID._get_windows_uuid()
14
+ elif system == "darwin":
15
+ return HardwareID._get_mac_uuid()
16
+ elif system == "linux":
17
+ return HardwareID._get_linux_uuid()
18
+ else:
19
+ return HardwareID._get_fallback_uuid()
20
+ except Exception as e:
21
+ return f"Error: {str(e)}"
22
+
23
+ @staticmethod
24
+ def _get_windows_uuid():
25
+ """Fetches the hardware UUID from Windows using WMIC."""
26
+ try:
27
+ # First attempt: Use wmic
28
+ output = subprocess.check_output("wmic csproduct get UUID", shell=True)
29
+ uuid_str = output.decode().split("\n")[1].strip()
30
+ if uuid_str:
31
+ return uuid_str
32
+ else:
33
+ # Fallback: Machine GUID from Registry
34
+ output = subprocess.check_output("reg query HKEY_LOCAL_MACHINE\\SOFTWARE\\Microsoft\\Cryptography /v MachineGuid", shell=True)
35
+ return HardwareID.convert_to_uuid(output.decode().split()[-1])
36
+ except subprocess.CalledProcessError:
37
+ try:
38
+ # Fallback attempt: Use PowerShell if wmic fails
39
+ output = subprocess.check_output(['powershell', 'Get-WmiObject -Class Win32_ComputerSystemProduct | Select-Object -ExpandProperty UUID'], shell=True, text=True)
40
+ return output.strip() # Clean up the result
41
+ except subprocess.CalledProcessError:
42
+ # Both methods failed, return fallback UUID
43
+ return HardwareID._get_fallback_uuid()
44
+
45
+ @staticmethod
46
+ def _get_mac_uuid():
47
+ """Fetches the hardware UUID from macOS using ioreg."""
48
+ try:
49
+ output = subprocess.check_output("ioreg -rd1 -c IOPlatformExpertDevice | awk '/IOPlatformUUID/ { print $3; }'", shell=True)
50
+ return output.decode().strip().replace('"', '')
51
+ except:
52
+ return HardwareID._get_fallback_uuid()
53
+
54
+ @staticmethod
55
+ def _get_linux_uuid():
56
+ """Fetches the hardware UUID from Linux or NVIDIA Jetson."""
57
+ try:
58
+ # Check for Jetson Serial Number
59
+ if os.path.exists("/sys/devices/soc0/serial_number"):
60
+ with open("/sys/devices/soc0/serial_number", "r") as f:
61
+ return HardwareID.convert_to_uuid(f.read().strip())
62
+ elif os.path.exists("/proc/device-tree/serial-number"):
63
+ with open("/proc/device-tree/serial-number", "r") as f:
64
+ return HardwareID.convert_to_uuid(f.read().strip())
65
+
66
+ # Standard Linux machine-id
67
+ if os.path.exists("/etc/machine-id"):
68
+ with open("/etc/machine-id", "r") as f:
69
+ return HardwareID.convert_to_uuid(f.read().strip())
70
+ elif os.path.exists("/var/lib/dbus/machine-id"):
71
+ with open("/var/lib/dbus/machine-id", "r") as f:
72
+ return HardwareID.convert_to_uuid(f.read().strip())
73
+
74
+ # Fallback to DMI Product UUID
75
+ output = subprocess.check_output("cat /sys/class/dmi/id/product_uuid", shell=True)
76
+ return HardwareID.convert_to_uuid(output.decode().strip())
77
+ except:
78
+ return HardwareID._get_fallback_uuid()
79
+
80
+ @staticmethod
81
+ def _get_fallback_uuid():
82
+ """Fallback method using MAC address hash."""
83
+ return str(uuid.getnode())
84
+
85
+ @staticmethod
86
+ def convert_to_uuid(hardware_id: str) -> str:
87
+ """
88
+ Converts a hardware ID to a valid UUID format.
89
+
90
+ Args:
91
+ hardware_id (str): The raw hardware ID string.
92
+
93
+ Returns:
94
+ str: A valid UUID string.
95
+ """
96
+ # Remove null characters and non-printable characters
97
+ cleaned_id = hardware_id.strip().replace("\x00", "")
98
+
99
+ # Ensure the length is 32 characters (UUID hex format)
100
+ hex_id = cleaned_id.ljust(32, "0")[:32] # Pad with zeros if needed
101
+
102
+ # Convert to UUID format
103
+ return str(uuid.UUID(hex_id))
104
+
@@ -0,0 +1,92 @@
1
+ import os
2
+ import time
3
+ import json
4
+ import logging
5
+
6
+ logger = logging.getLogger(__name__)
7
+
8
+ class ImageUploader:
9
+ def __init__(self, image_client, device_id, image_dir="images"):
10
+ """
11
+ Initialize the ImageUploader.
12
+
13
+ Args:
14
+ image_client: The ImageUploadClient instance.
15
+ device_id (str): The unique device ID.
16
+ image_dir (str): Directory containing images to upload.
17
+ """
18
+ self.image_client = image_client
19
+ self.device_id = device_id
20
+ self.image_dir = image_dir
21
+
22
+ def check_and_upload_images(self):
23
+ """
24
+ Check the last uploaded image date and upload new images.
25
+ """
26
+ try:
27
+ response = self.image_client.get_last_uploaded_date(device_id=self.device_id)
28
+
29
+ # Ensure response is a dictionary
30
+ if not response or not isinstance(response, dict):
31
+ logger.error("🚨 [APP] Invalid response from server.")
32
+ return {"success": False, "message": "Invalid response from server."}
33
+
34
+ if not response.get("success"):
35
+ error_message = response.get("message", "Unknown error")
36
+ logger.error(f"⚠️ [APP] Failed to get last uploaded image date: {error_message}")
37
+ return {"success": False, "message": error_message}
38
+
39
+ last_uploaded_date = response.get("last_uploaded_date", "1970-01-01T00:00:00")
40
+ # logger.info(f"📸 [APP] Last uploaded date: {last_uploaded_date}")
41
+
42
+ self._upload_new_images(last_uploaded_date)
43
+ return response
44
+
45
+ except Exception as e:
46
+ logger.error("🚨 [APP] Unexpected error while checking/uploading images.", exc_info=True)
47
+ return {"success": False, "message": str(e)}
48
+
49
+ def _upload_new_images(self, last_uploaded_date):
50
+ """
51
+ Upload images newer than the last uploaded date and delete them after successful upload.
52
+
53
+ Args:
54
+ last_uploaded_date (str): The last uploaded image date.
55
+ """
56
+ try:
57
+ images_uploaded = 0
58
+ for root, _, files in os.walk(self.image_dir):
59
+ for file in sorted(files):
60
+ file_path = os.path.join(root, file)
61
+ file_mod_time = time.strftime('%Y-%m-%dT%H:%M:%S', time.gmtime(os.path.getmtime(file_path)))
62
+
63
+ if file_mod_time > last_uploaded_date:
64
+ metadata = json.dumps({"file_name": file})
65
+ response = self.image_client.upload_image(self.device_id, metadata, file_path)
66
+
67
+ if response and response.get("success"):
68
+ logger.info(f"✅ [APP] Image '{file}' uploaded successfully: {response.get('message')}")
69
+ self._delete_file(file_path)
70
+ images_uploaded += 1
71
+ else:
72
+ error_message = response.get("message", "Unknown error")
73
+ logger.error(f"❌ [APP] Failed to upload image '{file}': {error_message}")
74
+
75
+ if images_uploaded > 0:
76
+ logger.info(f"📊 [APP] Total images uploaded: {images_uploaded}")
77
+
78
+ except Exception as e:
79
+ logger.error("🚨 [APP] Unexpected error during image upload process.", exc_info=True)
80
+
81
+ def _delete_file(self, file_path):
82
+ """
83
+ Delete the specified file.
84
+
85
+ Args:
86
+ file_path (str): Path to the file to delete.
87
+ """
88
+ try:
89
+ os.remove(file_path)
90
+ logger.info(f"🗑️ [APP] File deleted: {file_path}")
91
+ except Exception as e:
92
+ logger.error(f"⚠️ [APP] Failed to delete file '{file_path}': {e}")
@@ -0,0 +1,94 @@
1
+ import requests
2
+ import socket
3
+ import logging
4
+ import time
5
+ import grpc
6
+ from ..protos.VisionWorkerService_pb2_grpc import HealthCheckServiceStub
7
+ from ..protos.VisionWorkerService_pb2 import HealthCheckRequest
8
+
9
+
10
+
11
+ class Networking:
12
+
13
+ @staticmethod
14
+ def check_grpc_latency(server_host: str, server_port: int = 50051) -> float:
15
+ try:
16
+ channel = grpc.insecure_channel(f"{server_host}:{server_port}")
17
+ stub = HealthCheckServiceStub(channel)
18
+ request = HealthCheckRequest()
19
+
20
+ start_time = time.time()
21
+ response = stub.HealthCheck(request)
22
+ end_time = time.time()
23
+
24
+ latency = (end_time - start_time) * 1000
25
+ return latency
26
+
27
+ except grpc.RpcError as e:
28
+ print(f"gRPC error: {e}")
29
+ return -1
30
+
31
+ @staticmethod
32
+ def check_latency(server_url: str) -> float:
33
+ """
34
+ Measures the latency to the specified server URL.
35
+
36
+ Args:
37
+ server_url (str): The URL of the server to check latency for.
38
+
39
+ Returns:
40
+ float: The latency in milliseconds.
41
+ """
42
+ import time
43
+
44
+ try:
45
+ start_time = time.time() # Record the start time
46
+ response = requests.get(server_url) # Send the GET request
47
+ end_time = time.time() # Record the end time
48
+
49
+ if response.status_code == 200:
50
+ latency = (end_time - start_time) * 1000 # Convert to milliseconds
51
+ return latency
52
+ else:
53
+ raise Exception(f"Server responded with status code: {response.status_code}")
54
+ except requests.exceptions.RequestException as e:
55
+ raise Exception(f"Error occurred while checking latency: {e}")
56
+
57
+ @staticmethod
58
+ def get_public_ip() -> str:
59
+ """
60
+ Gets the current public IP address.
61
+
62
+ Returns:
63
+ str: The public IP address as a string.
64
+ """
65
+ try:
66
+ response = requests.get("https://api.ipify.org?format=json")
67
+ if response.status_code == 200:
68
+ return response.json().get("ip", "Unable to retrieve IP address")
69
+ else:
70
+ raise Exception(f"Failed to get public IP. Status code: {response.status_code}")
71
+ except requests.exceptions.RequestException as e:
72
+ raise Exception(f"Error occurred while retrieving public IP: {e}")
73
+
74
+ @staticmethod
75
+ def get_local_ip() -> str:
76
+ """
77
+ Gets the local IP address assigned to the system's network interface.
78
+
79
+ Returns:
80
+ str: The local IP address (not loopback), or a fallback message if not connected.
81
+ """
82
+ try:
83
+ # Create a temporary socket to determine the local IP
84
+ with socket.socket(socket.AF_INET, socket.SOCK_DGRAM) as s:
85
+ # Connect to a public IP (Google's DNS server) on port 80
86
+ s.connect(("8.8.8.8", 80))
87
+ local_ip = s.getsockname()[0] # Retrieve the local IP address
88
+ return local_ip
89
+ except OSError as e:
90
+ # Handle network unreachable or other OS-related errors
91
+ logging.warning(f"Could not determine local IP address: {e}")
92
+ return "127.0.0.1"
93
+
94
+
@@ -0,0 +1,50 @@
1
+ import os
2
+ import platform
3
+
4
+ class PlatformDetector:
5
+ """
6
+ A class to detect whether the application is running on a Jetson device or a PC.
7
+ """
8
+
9
+ @staticmethod
10
+ def is_jetson():
11
+ """
12
+ Determines if the platform is an NVIDIA Jetson device.
13
+
14
+ Returns:
15
+ bool: True if running on a Jetson device, False otherwise.
16
+ """
17
+ try:
18
+ # Check for Jetson-specific device tree file
19
+ if os.path.exists("/proc/device-tree/model"):
20
+ with open("/proc/device-tree/model", "r") as f:
21
+ model = f.read().strip()
22
+ if "NVIDIA Jetson" in model:
23
+ return True
24
+
25
+ # Check for Jetson-specific libraries
26
+ jetson_libraries = ["/usr/lib/aarch64-linux-gnu/tegra"]
27
+ if any(os.path.exists(lib) for lib in jetson_libraries):
28
+ return True
29
+
30
+ # Check architecture
31
+ arch = platform.machine()
32
+ if arch == "aarch64": # Jetson typically runs on ARM64 (aarch64)
33
+ return True
34
+
35
+ except Exception as e:
36
+ print(f"Error detecting Jetson platform: {e}")
37
+
38
+ return False
39
+
40
+ @staticmethod
41
+ def get_platform_type():
42
+ """
43
+ Gets the platform type as a string.
44
+
45
+ Returns:
46
+ str: "Jetson" if running on an NVIDIA Jetson device, "PC" otherwise.
47
+ """
48
+ if PlatformDetector.is_jetson():
49
+ return "jetson"
50
+ return "pc"
@@ -0,0 +1,299 @@
1
+ import platform
2
+ import psutil
3
+ import os
4
+ import re
5
+ import subprocess
6
+ from pynvml import (
7
+ nvmlInit,
8
+ nvmlDeviceGetHandleByIndex,
9
+ nvmlDeviceGetUtilizationRates,
10
+ nvmlDeviceGetCount,
11
+ nvmlDeviceGetMemoryInfo,
12
+ nvmlDeviceGetTemperature,
13
+ nvmlShutdown,
14
+ NVML_TEMPERATURE_GPU,
15
+ )
16
+
17
+
18
+ class SystemMonitor:
19
+ def __init__(self):
20
+ """
21
+ Initialize the System Monitor.
22
+ """
23
+ try:
24
+ nvmlInit() # Initialize NVIDIA Management Library
25
+ self.gpu_available = True
26
+ self.gpu_count = nvmlDeviceGetCount() # Get the number of GPUs
27
+ except Exception:
28
+ self.gpu_available = False
29
+ self.gpu_count = 0
30
+
31
+ # Detect platform (Windows, Linux, macOS, Jetson, etc.)
32
+ self.system = platform.system().lower()
33
+ self.is_jetson = self._detect_jetson()
34
+
35
+ # Jetson always have gpu
36
+ if self.is_jetson:
37
+ self.gpu_available = True
38
+ self.gpu_count = 1
39
+
40
+
41
+ def get_cpu_usage(self):
42
+ """
43
+ Get CPU usage percentage.
44
+ """
45
+ return psutil.cpu_percent(interval=1)
46
+
47
+ def get_cpu_temperature(self):
48
+ """
49
+ Get CPU temperature (if supported).
50
+ """
51
+ try:
52
+ if self.system == "windows":
53
+ try:
54
+ import wmi
55
+ w = wmi.WMI(namespace="root\\OpenHardwareMonitor") # Use OpenHardwareMonitor
56
+ sensors = w.Sensor()
57
+ cpu_temps = [s.Value for s in sensors if s.SensorType == "Temperature" and "CPU" in s.Name]
58
+ return cpu_temps[0]
59
+ except Exception:
60
+ return 0
61
+
62
+ elif self.is_jetson:
63
+ # For NVIDIA Jetson, read CPU temperature from thermal zones
64
+ with open("/sys/class/thermal/thermal_zone1/temp", "r") as f:
65
+ return float(f.read().strip()) / 1000 # Convert millidegree to Celsius
66
+ elif self.system == "darwin": # macOS
67
+ try:
68
+ temp = subprocess.check_output(["osx-cpu-temp"], text=True).strip()
69
+ return float(temp.replace("°C", ""))
70
+ except FileNotFoundError:
71
+ return {"error": "Install 'osx-cpu-temp' for macOS temperature monitoring"}
72
+ except ValueError:
73
+ return {"error": "Invalid temperature value retrieved from osx-cpu-temp"}
74
+ else: # Linux
75
+ sensors = psutil.sensors_temperatures()
76
+ if "coretemp" in sensors:
77
+ core_temps = sensors["coretemp"]
78
+ return [temp.current for temp in core_temps if hasattr(temp, "current")]
79
+ elif "cpu-thermal" in sensors:
80
+ return [sensors["cpu-thermal"][0].current]
81
+ else:
82
+ return {"error": "CPU temperature sensor not found"}
83
+ except Exception as e:
84
+ return {"error": str(e)}
85
+
86
+ def get_ram_usage(self):
87
+ """
88
+ Get refined RAM usage details for macOS.
89
+ """
90
+ memory = psutil.virtual_memory()
91
+ cached_memory = memory.cached if hasattr(memory, "cached") else 0 # Check if cached memory is available
92
+
93
+ # Adjust "used" to exclude cached memory on macOS
94
+ adjusted_used = memory.used - cached_memory if self.system == "darwin" else memory.used
95
+ adjusted_free = memory.total - adjusted_used if self.system == "darwin" else memory.available
96
+
97
+ return {
98
+ "total": memory.total,
99
+ "used": adjusted_used,
100
+ "free": adjusted_free,
101
+ "percent": round((adjusted_used / memory.total) * 100, 2),
102
+ }
103
+
104
+ def get_gpu_usage(self):
105
+ """
106
+ Get usage details for all available GPUs.
107
+ """
108
+ if self.is_jetson:
109
+ return self.get_jetson_gpu_usage() # Use Jetson-specific method
110
+
111
+ if not self.gpu_available:
112
+ return {"error": "GPU monitoring is not supported on this system."}
113
+
114
+ gpu_data = []
115
+ try:
116
+ for i in range(self.gpu_count):
117
+ handle = nvmlDeviceGetHandleByIndex(i)
118
+ utilization = nvmlDeviceGetUtilizationRates(handle)
119
+ memory = nvmlDeviceGetMemoryInfo(handle)
120
+ temperature = nvmlDeviceGetTemperature(handle, NVML_TEMPERATURE_GPU)
121
+
122
+ gpu_data.append({
123
+ "gpu_index": i,
124
+ "gpu_usage_percent": utilization.gpu,
125
+ "memory_usage_percent": (memory.used / memory.total) * 100 if memory.total > 0 else 0,
126
+ "temperature_celsius": temperature,
127
+ "total_memory": memory.total,
128
+ "used_memory": memory.used,
129
+ "free_memory": memory.free,
130
+ })
131
+ except Exception as e:
132
+ return {"error": str(e)}
133
+
134
+ return gpu_data
135
+
136
+ def get_system_usage(self):
137
+ """
138
+ Get combined system usage (CPU, RAM, GPU, and temperatures).
139
+ """
140
+ system_usage = {
141
+ "cpu": {
142
+ "usage_percent": self.get_cpu_usage(),
143
+ "temperature_celsius": self.get_cpu_temperature(),
144
+ },
145
+ "ram": self.get_ram_usage(),
146
+ }
147
+
148
+ if self.gpu_available:
149
+ system_usage["gpu"] = self.get_gpu_usage()
150
+ else:
151
+ system_usage["gpu"] = []
152
+
153
+ return system_usage
154
+
155
+ def print_usage(self):
156
+ """
157
+ Print the system usage details (CPU, RAM, GPU).
158
+ """
159
+ usage = self.get_system_usage()
160
+
161
+ # CPU Usage
162
+ print("CPU Usage:")
163
+ print(f" Usage Percent: {usage['cpu']['usage_percent']}%")
164
+ cpu_temp = usage['cpu']['temperature_celsius']
165
+ if isinstance(cpu_temp, dict) and "error" in cpu_temp:
166
+ print(f" Temperature: {cpu_temp['error']}")
167
+ else:
168
+ print(f" Temperature: {cpu_temp}°C")
169
+
170
+ # RAM Usage
171
+ print("RAM Usage:")
172
+ ram = usage['ram']
173
+ print(f" Total: {ram['total'] / (1024**3):.2f} GB")
174
+ print(f" Used: {ram['used'] / (1024**3):.2f} GB")
175
+ print(f" Free: {ram['free'] / (1024**3):.2f} GB")
176
+ print(f" Usage Percent: {ram['percent']}%")
177
+
178
+ # GPU Usage
179
+ gpu = usage.get("gpu", {})
180
+ if isinstance(gpu, list):
181
+ for gpu_info in gpu:
182
+ print(f"GPU {gpu_info['gpu_index']} Usage:")
183
+ print(f" GPU Utilization: {gpu_info['gpu_usage_percent']}%")
184
+ print(f" Memory Usage Percent: {gpu_info['memory_usage_percent']:.2f}%")
185
+ print(f" Temperature: {gpu_info['temperature_celsius']}°C")
186
+ print(f" Total Memory: {gpu_info['total_memory'] / (1024**2):.2f} MB")
187
+ print(f" Used Memory: {gpu_info['used_memory'] / (1024**2):.2f} MB")
188
+ print(f" Free Memory: {gpu_info['free_memory'] / (1024**2):.2f} MB")
189
+ else:
190
+ print("GPU Usage:")
191
+ print(f" {gpu}")
192
+
193
+ def shutdown(self):
194
+ """
195
+ Shutdown the NVIDIA Management Library (for cleanup).
196
+ """
197
+ if self.gpu_available:
198
+ try:
199
+ nvmlShutdown()
200
+ except Exception:
201
+ pass # Ignore shutdown errors
202
+
203
+ def _detect_jetson(self):
204
+ """
205
+ Check if the system is an NVIDIA Jetson by reading /proc/device-tree/compatible.
206
+ """
207
+ try:
208
+ if os.path.exists("/proc/device-tree/compatible"):
209
+ with open("/proc/device-tree/compatible", "r") as f:
210
+ compatible = f.read().lower()
211
+ return "nvidia,jetson" in compatible or "tegra" in compatible
212
+ except Exception:
213
+ pass
214
+ return False
215
+
216
+ def _get_jetson_gpu_stats(self):
217
+ """
218
+ Get Jetson GPU utilization, memory usage, and temperature using 'tegrastats'.
219
+ """
220
+ try:
221
+ process = subprocess.Popen("tegrastats", shell=True, stdout=subprocess.PIPE, text=True)
222
+ output = process.stdout.readline().strip() # Read only the first line
223
+ process.terminate() # Stop the process
224
+
225
+ # Extract GPU Utilization from "GR3D_FREQ X%"
226
+ gpu_match = re.search(r"GR3D_FREQ\s+(\d+)%", output)
227
+ gpu_usage_percent = float(gpu_match.group(1)) if gpu_match else 0.0
228
+
229
+ # Extract GPU Memory Usage from "RAM X/YMB"
230
+ mem_match = re.search(r"RAM\s+(\d+)/(\d+)MB", output)
231
+ if mem_match:
232
+ used_memory = int(mem_match.group(1)) * 1024 * 1024 # Convert MB to Bytes
233
+ total_memory = int(mem_match.group(2)) * 1024 * 1024 # Convert MB to Bytes
234
+ free_memory = total_memory - used_memory
235
+ memory_usage_percent = (used_memory / total_memory) * 100 if total_memory > 0 else 0
236
+ else:
237
+ total_memory = used_memory = free_memory = memory_usage_percent = 0
238
+
239
+ # Extract GPU Temperature from "GPU@XXC"
240
+ temp_match = re.search(r"GPU@(\d+\.?\d*)C", output)
241
+ temperature_celsius = float(temp_match.group(1)) if temp_match else 0.0
242
+
243
+ return {
244
+ "gpu_index": 0, # Jetson has only 1 integrated GPU
245
+ "gpu_usage_percent": gpu_usage_percent,
246
+ "memory_usage_percent": memory_usage_percent,
247
+ "temperature_celsius": temperature_celsius,
248
+ "total_memory": total_memory,
249
+ "used_memory": used_memory,
250
+ "free_memory": free_memory,
251
+ }
252
+ except Exception as e:
253
+ return {"error": str(e)}
254
+
255
+ def get_jetson_gpu_usage(self):
256
+ """
257
+ Get Jetson GPU usage percentage, memory usage, and temperature using 'tegrastats'.
258
+ """
259
+ try:
260
+ gpu_stats = self._get_jetson_gpu_stats()
261
+ return [gpu_stats] # Return as a list
262
+ except Exception as e:
263
+ return [{"error": str(e)}]
264
+
265
+
266
+ def _get_jetson_memory_usage(self):
267
+ """
268
+ Get Jetson GPU memory usage using 'jtop' (if installed).
269
+ """
270
+ try:
271
+ from jtop import jtop
272
+ with jtop() as jetson:
273
+ ram_info = jetson.memory["RAM"]
274
+ return {
275
+ "total": ram_info["tot"], # Total RAM in KB
276
+ "used": ram_info["used"], # Used RAM in KB
277
+ "free": ram_info["free"], # Free RAM in KB
278
+ }
279
+ except ImportError:
280
+ return {"error": "Install 'jtop' using: pip install jtop"}
281
+ except Exception as e:
282
+ return {"error": str(e)}
283
+
284
+ def _get_jetson_gpu_temperature(self):
285
+ """
286
+ Get GPU temperature from 'tegrastats'.
287
+ """
288
+ try:
289
+ process = subprocess.Popen("tegrastats", shell=True, stdout=subprocess.PIPE, text=True)
290
+ output = process.stdout.readline().strip() # Read only one line
291
+ process.terminate()
292
+
293
+ # Extract GPU temperature
294
+ match = re.search(r"GPU@(\d+\.?\d*)C", output)
295
+ if match:
296
+ return float(match.group(1))
297
+ return 0.0 # Default if temperature not found
298
+ except Exception as e:
299
+ return {"error": str(e)}