isar 1.20.2__py3-none-any.whl → 1.34.13__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- isar/apis/api.py +135 -86
- isar/apis/models/__init__.py +0 -1
- isar/apis/models/models.py +21 -11
- isar/apis/models/start_mission_definition.py +115 -170
- isar/apis/robot_control/robot_controller.py +41 -0
- isar/apis/schedule/scheduling_controller.py +123 -187
- isar/apis/security/authentication.py +5 -5
- isar/config/certs/ca-cert.pem +33 -31
- isar/config/keyvault/keyvault_service.py +4 -2
- isar/config/log.py +45 -40
- isar/config/logging.conf +16 -31
- isar/config/open_telemetry.py +102 -0
- isar/config/settings.py +74 -117
- isar/eventhandlers/eventhandler.py +123 -0
- isar/models/events.py +184 -0
- isar/models/status.py +22 -0
- isar/modules.py +117 -200
- isar/robot/robot.py +383 -0
- isar/robot/robot_battery.py +60 -0
- isar/robot/robot_monitor_mission.py +357 -0
- isar/robot/robot_pause_mission.py +74 -0
- isar/robot/robot_resume_mission.py +67 -0
- isar/robot/robot_start_mission.py +66 -0
- isar/robot/robot_status.py +61 -0
- isar/robot/robot_stop_mission.py +68 -0
- isar/robot/robot_upload_inspection.py +75 -0
- isar/script.py +58 -41
- isar/services/service_connections/mqtt/mqtt_client.py +47 -11
- isar/services/service_connections/mqtt/robot_heartbeat_publisher.py +5 -2
- isar/services/service_connections/mqtt/robot_info_publisher.py +3 -3
- isar/services/service_connections/persistent_memory.py +69 -0
- isar/services/utilities/mqtt_utilities.py +93 -0
- isar/services/utilities/robot_utilities.py +20 -0
- isar/services/utilities/scheduling_utilities.py +386 -100
- isar/state_machine/state_machine.py +242 -539
- isar/state_machine/states/__init__.py +0 -8
- isar/state_machine/states/await_next_mission.py +114 -0
- isar/state_machine/states/blocked_protective_stop.py +60 -0
- isar/state_machine/states/going_to_lockdown.py +95 -0
- isar/state_machine/states/going_to_recharging.py +92 -0
- isar/state_machine/states/home.py +115 -0
- isar/state_machine/states/intervention_needed.py +77 -0
- isar/state_machine/states/lockdown.py +38 -0
- isar/state_machine/states/maintenance.py +43 -0
- isar/state_machine/states/monitor.py +137 -247
- isar/state_machine/states/offline.py +51 -53
- isar/state_machine/states/paused.py +92 -23
- isar/state_machine/states/pausing.py +48 -0
- isar/state_machine/states/pausing_return_home.py +48 -0
- isar/state_machine/states/recharging.py +80 -0
- isar/state_machine/states/resuming.py +57 -0
- isar/state_machine/states/resuming_return_home.py +64 -0
- isar/state_machine/states/return_home_paused.py +109 -0
- isar/state_machine/states/returning_home.py +217 -0
- isar/state_machine/states/stopping.py +69 -0
- isar/state_machine/states/stopping_due_to_maintenance.py +61 -0
- isar/state_machine/states/stopping_go_to_lockdown.py +60 -0
- isar/state_machine/states/stopping_go_to_recharge.py +51 -0
- isar/state_machine/states/stopping_paused_mission.py +36 -0
- isar/state_machine/states/stopping_paused_return_home.py +59 -0
- isar/state_machine/states/stopping_return_home.py +59 -0
- isar/state_machine/states/unknown_status.py +74 -0
- isar/state_machine/states_enum.py +23 -5
- isar/state_machine/transitions/mission.py +225 -0
- isar/state_machine/transitions/return_home.py +108 -0
- isar/state_machine/transitions/robot_status.py +87 -0
- isar/state_machine/utils/common_event_handlers.py +138 -0
- isar/storage/blob_storage.py +70 -52
- isar/storage/local_storage.py +25 -12
- isar/storage/storage_interface.py +28 -7
- isar/storage/uploader.py +174 -55
- isar/storage/utilities.py +32 -29
- {isar-1.20.2.dist-info → isar-1.34.13.dist-info}/METADATA +119 -123
- isar-1.34.13.dist-info/RECORD +120 -0
- {isar-1.20.2.dist-info → isar-1.34.13.dist-info}/WHEEL +1 -1
- {isar-1.20.2.dist-info → isar-1.34.13.dist-info}/entry_points.txt +1 -0
- robot_interface/models/exceptions/robot_exceptions.py +91 -41
- robot_interface/models/inspection/__init__.py +0 -13
- robot_interface/models/inspection/inspection.py +42 -33
- robot_interface/models/mission/mission.py +14 -15
- robot_interface/models/mission/status.py +20 -26
- robot_interface/models/mission/task.py +154 -121
- robot_interface/models/robots/battery_state.py +6 -0
- robot_interface/models/robots/media.py +13 -0
- robot_interface/models/robots/robot_model.py +7 -7
- robot_interface/robot_interface.py +119 -84
- robot_interface/telemetry/mqtt_client.py +74 -12
- robot_interface/telemetry/payloads.py +91 -13
- robot_interface/utilities/json_service.py +7 -1
- isar/config/configuration_error.py +0 -2
- isar/config/keyvault/keyvault_error.py +0 -2
- isar/config/predefined_mission_definition/__init__.py +0 -0
- isar/config/predefined_mission_definition/default_exr.json +0 -51
- isar/config/predefined_mission_definition/default_mission.json +0 -91
- isar/config/predefined_mission_definition/default_turtlebot.json +0 -124
- isar/config/predefined_missions/__init__.py +0 -0
- isar/config/predefined_missions/default.json +0 -92
- isar/config/predefined_missions/default_turtlebot.json +0 -110
- isar/config/predefined_poses/__init__.py +0 -0
- isar/config/predefined_poses/predefined_poses.py +0 -616
- isar/config/settings.env +0 -25
- isar/mission_planner/__init__.py +0 -0
- isar/mission_planner/local_planner.py +0 -82
- isar/mission_planner/mission_planner_interface.py +0 -26
- isar/mission_planner/sequential_task_selector.py +0 -23
- isar/mission_planner/task_selector_interface.py +0 -31
- isar/models/communication/__init__.py +0 -0
- isar/models/communication/message.py +0 -12
- isar/models/communication/queues/__init__.py +0 -4
- isar/models/communication/queues/queue_io.py +0 -12
- isar/models/communication/queues/queue_timeout_error.py +0 -2
- isar/models/communication/queues/queues.py +0 -19
- isar/models/communication/queues/status_queue.py +0 -20
- isar/models/mission_metadata/__init__.py +0 -0
- isar/services/auth/__init__.py +0 -0
- isar/services/auth/azure_credentials.py +0 -14
- isar/services/readers/__init__.py +0 -0
- isar/services/readers/base_reader.py +0 -37
- isar/services/service_connections/request_handler.py +0 -153
- isar/services/service_connections/stid/__init__.py +0 -0
- isar/services/utilities/queue_utilities.py +0 -39
- isar/services/utilities/threaded_request.py +0 -68
- isar/state_machine/states/idle.py +0 -85
- isar/state_machine/states/initialize.py +0 -71
- isar/state_machine/states/initiate.py +0 -142
- isar/state_machine/states/off.py +0 -18
- isar/state_machine/states/stop.py +0 -95
- isar/storage/slimm_storage.py +0 -191
- isar-1.20.2.dist-info/RECORD +0 -116
- robot_interface/models/initialize/__init__.py +0 -1
- robot_interface/models/initialize/initialize_params.py +0 -9
- robot_interface/models/mission/step.py +0 -234
- {isar-1.20.2.dist-info → isar-1.34.13.dist-info/licenses}/LICENSE +0 -0
- {isar-1.20.2.dist-info → isar-1.34.13.dist-info}/top_level.txt +0 -0
isar/storage/blob_storage.py
CHANGED
|
@@ -1,83 +1,101 @@
|
|
|
1
1
|
import logging
|
|
2
2
|
from pathlib import Path
|
|
3
|
-
from typing import Union
|
|
4
3
|
|
|
5
4
|
from azure.core.exceptions import ResourceExistsError
|
|
6
|
-
from azure.storage.blob import
|
|
7
|
-
from injector import inject
|
|
5
|
+
from azure.storage.blob import BlobServiceClient, ContainerClient
|
|
8
6
|
|
|
9
7
|
from isar.config.keyvault.keyvault_service import Keyvault
|
|
10
8
|
from isar.config.settings import settings
|
|
11
|
-
from isar.storage.storage_interface import
|
|
9
|
+
from isar.storage.storage_interface import (
|
|
10
|
+
BlobStoragePath,
|
|
11
|
+
StorageException,
|
|
12
|
+
StorageInterface,
|
|
13
|
+
StoragePaths,
|
|
14
|
+
)
|
|
12
15
|
from isar.storage.utilities import construct_metadata_file, construct_paths
|
|
13
|
-
from robot_interface.models.inspection.inspection import
|
|
16
|
+
from robot_interface.models.inspection.inspection import InspectionBlob
|
|
14
17
|
from robot_interface.models.mission.mission import Mission
|
|
15
18
|
|
|
16
19
|
|
|
17
20
|
class BlobStorage(StorageInterface):
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
self.container_name = container_name
|
|
27
|
-
|
|
28
|
-
self.blob_service_client = self._get_blob_service_client()
|
|
29
|
-
self.container_client = self._get_container_client(
|
|
30
|
-
blob_service_client=self.blob_service_client
|
|
21
|
+
def __init__(self, keyvault: Keyvault) -> None:
|
|
22
|
+
self.logger = logging.getLogger("uploader")
|
|
23
|
+
|
|
24
|
+
self.container_client_data = self._get_container_client(
|
|
25
|
+
keyvault, "AZURE-STORAGE-CONNECTION-STRING-DATA"
|
|
26
|
+
)
|
|
27
|
+
self.container_client_metadata = self._get_container_client(
|
|
28
|
+
keyvault, "AZURE-STORAGE-CONNECTION-STRING-METADATA"
|
|
31
29
|
)
|
|
32
30
|
|
|
33
|
-
|
|
31
|
+
def _get_container_client(self, keyvault: Keyvault, secret_name: str):
|
|
32
|
+
storage_connection_string = keyvault.get_secret(secret_name).value
|
|
34
33
|
|
|
35
|
-
|
|
36
|
-
|
|
34
|
+
if storage_connection_string is None:
|
|
35
|
+
raise RuntimeError(f"{secret_name} from keyvault is None")
|
|
36
|
+
|
|
37
|
+
try:
|
|
38
|
+
blob_service_client = BlobServiceClient.from_connection_string(
|
|
39
|
+
storage_connection_string
|
|
40
|
+
)
|
|
41
|
+
except Exception as e:
|
|
42
|
+
self.logger.error("Unable to retrieve blob service client. Error: %s", e)
|
|
43
|
+
raise e
|
|
44
|
+
|
|
45
|
+
container_client = blob_service_client.get_container_client(
|
|
46
|
+
settings.BLOB_CONTAINER
|
|
47
|
+
)
|
|
48
|
+
|
|
49
|
+
if not container_client.exists():
|
|
50
|
+
raise RuntimeError(
|
|
51
|
+
"The configured blob container %s does not exist",
|
|
52
|
+
settings.BLOB_CONTAINER,
|
|
53
|
+
)
|
|
54
|
+
return container_client
|
|
55
|
+
|
|
56
|
+
def store(
|
|
57
|
+
self, inspection: InspectionBlob, mission: Mission
|
|
58
|
+
) -> StoragePaths[BlobStoragePath]:
|
|
59
|
+
if inspection.data is None:
|
|
60
|
+
raise StorageException("Nothing to store. The inspection data is empty")
|
|
61
|
+
|
|
62
|
+
data_filename, metadata_filename = construct_paths(
|
|
37
63
|
inspection=inspection, mission=mission
|
|
38
64
|
)
|
|
39
65
|
|
|
40
66
|
metadata_bytes: bytes = construct_metadata_file(
|
|
41
|
-
inspection=inspection, mission=mission, filename=
|
|
67
|
+
inspection=inspection, mission=mission, filename=data_filename.name
|
|
42
68
|
)
|
|
43
69
|
|
|
44
|
-
self._upload_file(
|
|
45
|
-
|
|
70
|
+
data_path = self._upload_file(
|
|
71
|
+
filename=data_filename,
|
|
72
|
+
data=inspection.data,
|
|
73
|
+
container_client=self.container_client_data,
|
|
74
|
+
)
|
|
75
|
+
metadata_path = self._upload_file(
|
|
76
|
+
filename=metadata_filename,
|
|
77
|
+
data=metadata_bytes,
|
|
78
|
+
container_client=self.container_client_metadata,
|
|
79
|
+
)
|
|
80
|
+
return StoragePaths(data_path=data_path, metadata_path=metadata_path)
|
|
46
81
|
|
|
47
|
-
def _upload_file(
|
|
48
|
-
|
|
82
|
+
def _upload_file(
|
|
83
|
+
self, filename: Path, data: bytes, container_client: ContainerClient
|
|
84
|
+
) -> BlobStoragePath:
|
|
85
|
+
blob_client = container_client.get_blob_client(filename.as_posix())
|
|
49
86
|
try:
|
|
50
|
-
|
|
87
|
+
blob_client.upload_blob(data=data)
|
|
51
88
|
except ResourceExistsError as e:
|
|
52
89
|
self.logger.error(
|
|
53
|
-
|
|
90
|
+
"Blob %s already exists in container. Error: %s", filename.as_posix(), e
|
|
54
91
|
)
|
|
55
92
|
raise StorageException from e
|
|
56
93
|
except Exception as e:
|
|
57
94
|
self.logger.error("An unexpected error occurred while uploading blob")
|
|
58
95
|
raise StorageException from e
|
|
59
96
|
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
}
|
|
66
|
-
return absolute_inspection_path
|
|
67
|
-
|
|
68
|
-
def _get_blob_service_client(self) -> BlobServiceClient:
|
|
69
|
-
try:
|
|
70
|
-
return BlobServiceClient.from_connection_string(
|
|
71
|
-
self.storage_connection_string
|
|
72
|
-
)
|
|
73
|
-
except Exception as e:
|
|
74
|
-
self.logger.error(f"Unable to retrieve blob service client. Error: {e}")
|
|
75
|
-
raise e
|
|
76
|
-
|
|
77
|
-
def _get_container_client(
|
|
78
|
-
self, blob_service_client: BlobServiceClient
|
|
79
|
-
) -> ContainerClient:
|
|
80
|
-
return blob_service_client.get_container_client(self.container_name)
|
|
81
|
-
|
|
82
|
-
def _get_blob_client(self, path_to_blob: Path) -> BlobClient:
|
|
83
|
-
return self.container_client.get_blob_client(path_to_blob.as_posix())
|
|
97
|
+
return BlobStoragePath(
|
|
98
|
+
storage_account=settings.BLOB_STORAGE_ACCOUNT,
|
|
99
|
+
blob_container=settings.BLOB_CONTAINER,
|
|
100
|
+
blob_name=blob_client.blob_name,
|
|
101
|
+
)
|
isar/storage/local_storage.py
CHANGED
|
@@ -2,9 +2,14 @@ import logging
|
|
|
2
2
|
from pathlib import Path
|
|
3
3
|
|
|
4
4
|
from isar.config.settings import settings
|
|
5
|
-
from isar.storage.storage_interface import
|
|
5
|
+
from isar.storage.storage_interface import (
|
|
6
|
+
LocalStoragePath,
|
|
7
|
+
StorageException,
|
|
8
|
+
StorageInterface,
|
|
9
|
+
StoragePaths,
|
|
10
|
+
)
|
|
6
11
|
from isar.storage.utilities import construct_metadata_file, construct_paths
|
|
7
|
-
from robot_interface.models.inspection.inspection import
|
|
12
|
+
from robot_interface.models.inspection.inspection import InspectionBlob
|
|
8
13
|
from robot_interface.models.mission.mission import Mission
|
|
9
14
|
|
|
10
15
|
|
|
@@ -13,30 +18,35 @@ class LocalStorage(StorageInterface):
|
|
|
13
18
|
self.root_folder: Path = Path(settings.LOCAL_STORAGE_PATH)
|
|
14
19
|
self.logger = logging.getLogger("uploader")
|
|
15
20
|
|
|
16
|
-
def store(
|
|
17
|
-
|
|
21
|
+
def store(
|
|
22
|
+
self, inspection: InspectionBlob, mission: Mission
|
|
23
|
+
) -> StoragePaths[LocalStoragePath]:
|
|
24
|
+
if inspection.data is None:
|
|
25
|
+
raise StorageException("Nothing to store. The inspection data is empty")
|
|
26
|
+
|
|
27
|
+
local_filename, local_metadata_filename = construct_paths(
|
|
18
28
|
inspection=inspection, mission=mission
|
|
19
29
|
)
|
|
20
30
|
|
|
21
|
-
|
|
22
|
-
|
|
31
|
+
data_path: Path = self.root_folder.joinpath(local_filename)
|
|
32
|
+
metadata_path: Path = self.root_folder.joinpath(local_metadata_filename)
|
|
23
33
|
|
|
24
|
-
|
|
34
|
+
data_path.parent.mkdir(parents=True, exist_ok=True)
|
|
25
35
|
|
|
26
36
|
metadata_bytes: bytes = construct_metadata_file(
|
|
27
|
-
inspection=inspection, mission=mission, filename=
|
|
37
|
+
inspection=inspection, mission=mission, filename=local_filename.name
|
|
28
38
|
)
|
|
29
39
|
try:
|
|
30
40
|
with (
|
|
31
|
-
open(
|
|
32
|
-
open(
|
|
41
|
+
open(data_path, "wb") as file,
|
|
42
|
+
open(metadata_path, "wb") as metadata_file,
|
|
33
43
|
):
|
|
34
44
|
file.write(inspection.data)
|
|
35
45
|
metadata_file.write(metadata_bytes)
|
|
36
46
|
except IOError as e:
|
|
37
47
|
self.logger.warning(
|
|
38
48
|
f"Failed open/write for one of the following files: \n"
|
|
39
|
-
f"{
|
|
49
|
+
f"{data_path}\n{metadata_path}"
|
|
40
50
|
)
|
|
41
51
|
raise StorageException from e
|
|
42
52
|
except Exception as e:
|
|
@@ -44,4 +54,7 @@ class LocalStorage(StorageInterface):
|
|
|
44
54
|
"An unexpected error occurred while writing to local storage"
|
|
45
55
|
)
|
|
46
56
|
raise StorageException from e
|
|
47
|
-
return
|
|
57
|
+
return StoragePaths(
|
|
58
|
+
data_path=LocalStoragePath(file_path=data_path),
|
|
59
|
+
metadata_path=LocalStoragePath(file_path=metadata_path),
|
|
60
|
+
)
|
|
@@ -1,25 +1,46 @@
|
|
|
1
1
|
from abc import ABCMeta, abstractmethod
|
|
2
|
-
from
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
from typing import Generic, TypeVar
|
|
3
4
|
|
|
4
|
-
from
|
|
5
|
+
from pydantic import BaseModel
|
|
6
|
+
|
|
7
|
+
from robot_interface.models.inspection.inspection import InspectionBlob
|
|
5
8
|
from robot_interface.models.mission.mission import Mission
|
|
6
9
|
|
|
7
10
|
|
|
11
|
+
class BlobStoragePath(BaseModel):
|
|
12
|
+
storage_account: str
|
|
13
|
+
blob_container: str
|
|
14
|
+
blob_name: str
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class LocalStoragePath(BaseModel):
|
|
18
|
+
file_path: Path
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
TPath = TypeVar("TPath", BlobStoragePath, LocalStoragePath)
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class StoragePaths(BaseModel, Generic[TPath]):
|
|
25
|
+
data_path: TPath
|
|
26
|
+
metadata_path: TPath
|
|
27
|
+
|
|
28
|
+
|
|
8
29
|
class StorageInterface(metaclass=ABCMeta):
|
|
9
30
|
@abstractmethod
|
|
10
|
-
def store(self, inspection:
|
|
31
|
+
def store(self, inspection: InspectionBlob, mission: Mission) -> StoragePaths:
|
|
11
32
|
"""
|
|
12
33
|
Parameters
|
|
13
34
|
----------
|
|
35
|
+
inspection : InspectionBlob
|
|
36
|
+
The inspection object to be stored.
|
|
14
37
|
mission : Mission
|
|
15
38
|
Mission the inspection is a part of.
|
|
16
|
-
inspection : Inspection
|
|
17
|
-
The inspection object to be stored.
|
|
18
39
|
|
|
19
40
|
Returns
|
|
20
41
|
----------
|
|
21
|
-
|
|
22
|
-
|
|
42
|
+
StoragePaths
|
|
43
|
+
Paths to the data and metadata
|
|
23
44
|
|
|
24
45
|
Raises
|
|
25
46
|
----------
|
isar/storage/uploader.py
CHANGED
|
@@ -1,33 +1,64 @@
|
|
|
1
1
|
import json
|
|
2
2
|
import logging
|
|
3
3
|
from dataclasses import dataclass
|
|
4
|
-
from datetime import
|
|
4
|
+
from datetime import datetime, timedelta, timezone
|
|
5
5
|
from queue import Empty, Queue
|
|
6
|
-
from
|
|
7
|
-
|
|
8
|
-
from injector import inject
|
|
6
|
+
from threading import Event
|
|
7
|
+
from typing import List
|
|
9
8
|
|
|
10
9
|
from isar.config.settings import settings
|
|
11
|
-
from isar.models.
|
|
12
|
-
from isar.storage.storage_interface import
|
|
13
|
-
|
|
10
|
+
from isar.models.events import Events
|
|
11
|
+
from isar.storage.storage_interface import (
|
|
12
|
+
BlobStoragePath,
|
|
13
|
+
LocalStoragePath,
|
|
14
|
+
StorageException,
|
|
15
|
+
StorageInterface,
|
|
16
|
+
StoragePaths,
|
|
17
|
+
)
|
|
18
|
+
from robot_interface.models.inspection.inspection import (
|
|
19
|
+
Inspection,
|
|
20
|
+
InspectionBlob,
|
|
21
|
+
InspectionValue,
|
|
22
|
+
)
|
|
14
23
|
from robot_interface.models.mission.mission import Mission
|
|
15
24
|
from robot_interface.telemetry.mqtt_client import MqttClientInterface
|
|
25
|
+
from robot_interface.telemetry.payloads import (
|
|
26
|
+
InspectionResultPayload,
|
|
27
|
+
InspectionValuePayload,
|
|
28
|
+
)
|
|
16
29
|
from robot_interface.utilities.json_service import EnhancedJSONEncoder
|
|
17
30
|
|
|
18
31
|
|
|
32
|
+
def has_empty_blob_storage_path(storage_paths: StoragePaths):
|
|
33
|
+
for path in (storage_paths.data_path, storage_paths.metadata_path):
|
|
34
|
+
for value in (path.storage_account, path.blob_container, path.blob_name):
|
|
35
|
+
if not (value and value.strip()):
|
|
36
|
+
return True
|
|
37
|
+
return False
|
|
38
|
+
|
|
39
|
+
|
|
19
40
|
@dataclass
|
|
20
41
|
class UploaderQueueItem:
|
|
21
42
|
inspection: Inspection
|
|
22
43
|
mission: Mission
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
@dataclass
|
|
47
|
+
class ValueItem(UploaderQueueItem):
|
|
48
|
+
inspection: InspectionValue
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
@dataclass
|
|
52
|
+
class BlobItem(UploaderQueueItem):
|
|
53
|
+
inspection: InspectionBlob
|
|
23
54
|
storage_handler: StorageInterface
|
|
24
55
|
_retry_count: int
|
|
25
|
-
_next_retry_time: datetime = datetime.now(
|
|
56
|
+
_next_retry_time: datetime = datetime.now(timezone.utc)
|
|
26
57
|
|
|
27
58
|
def increment_retry(self, max_wait_time: int) -> None:
|
|
28
59
|
self._retry_count += 1
|
|
29
60
|
seconds_until_retry: int = min(2**self._retry_count, max_wait_time)
|
|
30
|
-
self._next_retry_time = datetime.now(
|
|
61
|
+
self._next_retry_time = datetime.now(timezone.utc) + timedelta(
|
|
31
62
|
seconds=seconds_until_retry
|
|
32
63
|
)
|
|
33
64
|
|
|
@@ -35,17 +66,18 @@ class UploaderQueueItem:
|
|
|
35
66
|
return self._retry_count
|
|
36
67
|
|
|
37
68
|
def is_ready_for_upload(self) -> bool:
|
|
38
|
-
return datetime.now(
|
|
69
|
+
return datetime.now(timezone.utc) >= self._next_retry_time
|
|
39
70
|
|
|
40
71
|
def seconds_until_retry(self) -> int:
|
|
41
|
-
return max(
|
|
72
|
+
return max(
|
|
73
|
+
0, int((self._next_retry_time - datetime.now(timezone.utc)).total_seconds())
|
|
74
|
+
)
|
|
42
75
|
|
|
43
76
|
|
|
44
77
|
class Uploader:
|
|
45
|
-
@inject
|
|
46
78
|
def __init__(
|
|
47
79
|
self,
|
|
48
|
-
|
|
80
|
+
events: Events,
|
|
49
81
|
storage_handlers: List[StorageInterface],
|
|
50
82
|
mqtt_publisher: MqttClientInterface,
|
|
51
83
|
max_wait_time: int = settings.UPLOAD_FAILURE_MAX_WAIT,
|
|
@@ -55,8 +87,8 @@ class Uploader:
|
|
|
55
87
|
|
|
56
88
|
Parameters
|
|
57
89
|
----------
|
|
58
|
-
|
|
59
|
-
|
|
90
|
+
events : Events
|
|
91
|
+
Events used for cross-thread communication.
|
|
60
92
|
storage_handlers : List[StorageInterface]
|
|
61
93
|
List of handlers for different upload options
|
|
62
94
|
max_wait_time : float
|
|
@@ -64,7 +96,7 @@ class Uploader:
|
|
|
64
96
|
max_retry_attempts : int
|
|
65
97
|
Maximum attempts to retry an upload when it fails
|
|
66
98
|
"""
|
|
67
|
-
self.upload_queue: Queue =
|
|
99
|
+
self.upload_queue: Queue = events.upload_queue
|
|
68
100
|
self.storage_handlers: List[StorageInterface] = storage_handlers
|
|
69
101
|
self.mqtt_publisher = mqtt_publisher
|
|
70
102
|
|
|
@@ -72,11 +104,16 @@ class Uploader:
|
|
|
72
104
|
self.max_retry_attempts = max_retry_attempts
|
|
73
105
|
self._internal_upload_queue: List[UploaderQueueItem] = []
|
|
74
106
|
|
|
107
|
+
self.signal_thread_quitting: Event = Event()
|
|
108
|
+
|
|
75
109
|
self.logger = logging.getLogger("uploader")
|
|
76
110
|
|
|
111
|
+
def stop(self) -> None:
|
|
112
|
+
self.signal_thread_quitting.set()
|
|
113
|
+
|
|
77
114
|
def run(self) -> None:
|
|
78
115
|
self.logger.info("Started uploader")
|
|
79
|
-
while
|
|
116
|
+
while not self.signal_thread_quitting.wait(0):
|
|
80
117
|
inspection: Inspection
|
|
81
118
|
mission: Mission
|
|
82
119
|
try:
|
|
@@ -91,76 +128,158 @@ class Uploader:
|
|
|
91
128
|
)
|
|
92
129
|
continue
|
|
93
130
|
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
new_item
|
|
97
|
-
inspection, mission, storage_handler, _retry_count=-1
|
|
98
|
-
)
|
|
131
|
+
new_item: UploaderQueueItem
|
|
132
|
+
if isinstance(inspection, InspectionValue):
|
|
133
|
+
new_item = ValueItem(inspection, mission)
|
|
99
134
|
self._internal_upload_queue.append(new_item)
|
|
135
|
+
|
|
136
|
+
elif isinstance(inspection, InspectionBlob):
|
|
137
|
+
# If new item from thread queue, add one per handler to internal queue:
|
|
138
|
+
for storage_handler in self.storage_handlers:
|
|
139
|
+
new_item = BlobItem(
|
|
140
|
+
inspection, mission, storage_handler, _retry_count=-1
|
|
141
|
+
)
|
|
142
|
+
self._internal_upload_queue.append(new_item)
|
|
143
|
+
else:
|
|
144
|
+
self.logger.warning(
|
|
145
|
+
f"Unable to add UploaderQueueItem as its type {type(inspection).__name__} is unsupported"
|
|
146
|
+
)
|
|
100
147
|
except Empty:
|
|
101
148
|
continue
|
|
149
|
+
except Exception as e:
|
|
150
|
+
self.logger.error(f"Unexpected error in uploader thread: {e}")
|
|
151
|
+
continue
|
|
102
152
|
|
|
103
|
-
def _upload(self,
|
|
104
|
-
|
|
153
|
+
def _upload(self, item: BlobItem) -> StoragePaths:
|
|
154
|
+
inspection_paths: StoragePaths
|
|
105
155
|
try:
|
|
106
|
-
|
|
107
|
-
inspection=
|
|
156
|
+
inspection_paths = item.storage_handler.store(
|
|
157
|
+
inspection=item.inspection, mission=item.mission
|
|
108
158
|
)
|
|
109
159
|
self.logger.info(
|
|
110
|
-
f"Storage handler: {type(
|
|
111
|
-
f"uploaded inspection {str(
|
|
160
|
+
f"Storage handler: {type(item.storage_handler).__name__} "
|
|
161
|
+
f"uploaded inspection {str(item.inspection.id)[:8]}"
|
|
112
162
|
)
|
|
113
|
-
self._internal_upload_queue.remove(
|
|
114
|
-
except StorageException:
|
|
115
|
-
if
|
|
116
|
-
|
|
163
|
+
self._internal_upload_queue.remove(item)
|
|
164
|
+
except StorageException as e:
|
|
165
|
+
if item.get_retry_count() < self.max_retry_attempts:
|
|
166
|
+
item.increment_retry(self.max_wait_time)
|
|
117
167
|
self.logger.warning(
|
|
118
|
-
f"Storage handler: {type(
|
|
168
|
+
f"Storage handler: {type(item.storage_handler).__name__} "
|
|
119
169
|
f"failed to upload inspection: "
|
|
120
|
-
f"{str(
|
|
121
|
-
f"Retrying in {
|
|
170
|
+
f"{str(item.inspection.id)[:8]}. "
|
|
171
|
+
f"Retrying in {item.seconds_until_retry()}s."
|
|
122
172
|
)
|
|
123
173
|
else:
|
|
124
|
-
self._internal_upload_queue.remove(upload_item)
|
|
125
174
|
self.logger.error(
|
|
126
|
-
f"Storage handler: {type(
|
|
175
|
+
f"Storage handler: {type(item.storage_handler).__name__} "
|
|
127
176
|
f"exceeded max retries to upload inspection: "
|
|
128
|
-
f"{str(
|
|
177
|
+
f"{str(item.inspection.id)[:8]}. Aborting upload."
|
|
129
178
|
)
|
|
130
|
-
|
|
179
|
+
self._internal_upload_queue.remove(item)
|
|
180
|
+
raise e
|
|
181
|
+
return inspection_paths
|
|
131
182
|
|
|
132
183
|
def _process_upload_queue(self) -> None:
|
|
184
|
+
def should_upload(_item):
|
|
185
|
+
if isinstance(_item, ValueItem):
|
|
186
|
+
return True
|
|
187
|
+
if _item.is_ready_for_upload():
|
|
188
|
+
return True
|
|
189
|
+
return False
|
|
190
|
+
|
|
133
191
|
ready_items: List[UploaderQueueItem] = [
|
|
134
|
-
item for item in self._internal_upload_queue if item
|
|
192
|
+
item for item in self._internal_upload_queue if should_upload(item)
|
|
135
193
|
]
|
|
136
194
|
for item in ready_items:
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
195
|
+
if isinstance(item, ValueItem):
|
|
196
|
+
self._publish_inspection_value(item.inspection)
|
|
197
|
+
self.logger.info(
|
|
198
|
+
f"Published value for inspection {str(item.inspection.id)[:8]}"
|
|
199
|
+
)
|
|
200
|
+
self._internal_upload_queue.remove(item)
|
|
201
|
+
elif isinstance(item, BlobItem):
|
|
202
|
+
try:
|
|
203
|
+
inspection_paths = self._upload(item)
|
|
204
|
+
if isinstance(inspection_paths.data_path, LocalStoragePath):
|
|
205
|
+
self.logger.info("Skipping publishing when using local storage")
|
|
206
|
+
elif isinstance(
|
|
207
|
+
inspection_paths.data_path, BlobStoragePath
|
|
208
|
+
) and has_empty_blob_storage_path(inspection_paths):
|
|
209
|
+
self.logger.warning(
|
|
210
|
+
"Skipping publishing: Blob storage paths are empty for inspection %s",
|
|
211
|
+
str(item.inspection.id)[:8],
|
|
212
|
+
)
|
|
213
|
+
else:
|
|
214
|
+
self._publish_inspection_result(
|
|
215
|
+
inspection=item.inspection,
|
|
216
|
+
inspection_paths=inspection_paths,
|
|
217
|
+
)
|
|
218
|
+
except StorageException:
|
|
219
|
+
pass
|
|
220
|
+
else:
|
|
221
|
+
self.logger.warning(
|
|
222
|
+
f"Unable to process upload item as its type {type(item).__name__} is not supported"
|
|
223
|
+
)
|
|
224
|
+
|
|
225
|
+
def _publish_inspection_value(self, inspection: InspectionValue) -> None:
|
|
226
|
+
if not self.mqtt_publisher:
|
|
227
|
+
return
|
|
228
|
+
|
|
229
|
+
if not isinstance(inspection, InspectionValue):
|
|
230
|
+
logging.warning(
|
|
231
|
+
f"Excpected type InspectionValue but got {type(inspection).__name__} instead"
|
|
140
232
|
)
|
|
233
|
+
return
|
|
234
|
+
|
|
235
|
+
payload: InspectionValuePayload = InspectionValuePayload(
|
|
236
|
+
isar_id=settings.ISAR_ID,
|
|
237
|
+
robot_name=settings.ROBOT_NAME,
|
|
238
|
+
inspection_id=inspection.id,
|
|
239
|
+
installation_code=settings.PLANT_SHORT_NAME,
|
|
240
|
+
tag_id=inspection.metadata.tag_id,
|
|
241
|
+
inspection_type=type(inspection).__name__,
|
|
242
|
+
inspection_description=inspection.metadata.inspection_description,
|
|
243
|
+
value=inspection.value,
|
|
244
|
+
unit=inspection.unit,
|
|
245
|
+
x=inspection.metadata.robot_pose.position.x,
|
|
246
|
+
y=inspection.metadata.robot_pose.position.y,
|
|
247
|
+
z=inspection.metadata.robot_pose.position.z,
|
|
248
|
+
timestamp=inspection.metadata.start_time,
|
|
249
|
+
)
|
|
250
|
+
self.mqtt_publisher.publish(
|
|
251
|
+
topic=settings.TOPIC_ISAR_INSPECTION_VALUE,
|
|
252
|
+
payload=json.dumps(payload, cls=EnhancedJSONEncoder),
|
|
253
|
+
qos=1,
|
|
254
|
+
retain=True,
|
|
255
|
+
)
|
|
141
256
|
|
|
142
257
|
def _publish_inspection_result(
|
|
143
|
-
self,
|
|
258
|
+
self,
|
|
259
|
+
inspection: InspectionBlob,
|
|
260
|
+
inspection_paths: StoragePaths[BlobStoragePath],
|
|
144
261
|
) -> None:
|
|
145
262
|
"""Publishes the reference of the inspection result to the MQTT Broker
|
|
146
263
|
along with the analysis type
|
|
147
264
|
"""
|
|
148
265
|
if not self.mqtt_publisher:
|
|
149
266
|
return
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
267
|
+
|
|
268
|
+
payload: InspectionResultPayload = InspectionResultPayload(
|
|
269
|
+
isar_id=settings.ISAR_ID,
|
|
270
|
+
robot_name=settings.ROBOT_NAME,
|
|
271
|
+
inspection_id=inspection.id,
|
|
272
|
+
blob_storage_data_path=inspection_paths.data_path,
|
|
273
|
+
blob_storage_metadata_path=inspection_paths.metadata_path,
|
|
274
|
+
installation_code=settings.PLANT_SHORT_NAME,
|
|
275
|
+
tag_id=inspection.metadata.tag_id,
|
|
276
|
+
inspection_type=type(inspection).__name__,
|
|
277
|
+
inspection_description=inspection.metadata.inspection_description,
|
|
278
|
+
timestamp=inspection.metadata.start_time,
|
|
160
279
|
)
|
|
161
280
|
self.mqtt_publisher.publish(
|
|
162
281
|
topic=settings.TOPIC_ISAR_INSPECTION_RESULT,
|
|
163
|
-
payload=payload,
|
|
282
|
+
payload=json.dumps(payload, cls=EnhancedJSONEncoder),
|
|
164
283
|
qos=1,
|
|
165
284
|
retain=True,
|
|
166
285
|
)
|