digitalkin 0.3.2.dev2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- base_server/__init__.py +1 -0
- base_server/mock/__init__.py +5 -0
- base_server/mock/mock_pb2.py +39 -0
- base_server/mock/mock_pb2_grpc.py +102 -0
- base_server/server_async_insecure.py +125 -0
- base_server/server_async_secure.py +143 -0
- base_server/server_sync_insecure.py +103 -0
- base_server/server_sync_secure.py +122 -0
- digitalkin/__init__.py +8 -0
- digitalkin/__version__.py +8 -0
- digitalkin/core/__init__.py +1 -0
- digitalkin/core/common/__init__.py +9 -0
- digitalkin/core/common/factories.py +156 -0
- digitalkin/core/job_manager/__init__.py +1 -0
- digitalkin/core/job_manager/base_job_manager.py +288 -0
- digitalkin/core/job_manager/single_job_manager.py +354 -0
- digitalkin/core/job_manager/taskiq_broker.py +311 -0
- digitalkin/core/job_manager/taskiq_job_manager.py +541 -0
- digitalkin/core/task_manager/__init__.py +1 -0
- digitalkin/core/task_manager/base_task_manager.py +539 -0
- digitalkin/core/task_manager/local_task_manager.py +108 -0
- digitalkin/core/task_manager/remote_task_manager.py +87 -0
- digitalkin/core/task_manager/surrealdb_repository.py +266 -0
- digitalkin/core/task_manager/task_executor.py +249 -0
- digitalkin/core/task_manager/task_session.py +406 -0
- digitalkin/grpc_servers/__init__.py +1 -0
- digitalkin/grpc_servers/_base_server.py +486 -0
- digitalkin/grpc_servers/module_server.py +208 -0
- digitalkin/grpc_servers/module_servicer.py +516 -0
- digitalkin/grpc_servers/utils/__init__.py +1 -0
- digitalkin/grpc_servers/utils/exceptions.py +29 -0
- digitalkin/grpc_servers/utils/grpc_client_wrapper.py +88 -0
- digitalkin/grpc_servers/utils/grpc_error_handler.py +53 -0
- digitalkin/grpc_servers/utils/utility_schema_extender.py +97 -0
- digitalkin/logger.py +157 -0
- digitalkin/mixins/__init__.py +19 -0
- digitalkin/mixins/base_mixin.py +10 -0
- digitalkin/mixins/callback_mixin.py +24 -0
- digitalkin/mixins/chat_history_mixin.py +110 -0
- digitalkin/mixins/cost_mixin.py +76 -0
- digitalkin/mixins/file_history_mixin.py +93 -0
- digitalkin/mixins/filesystem_mixin.py +46 -0
- digitalkin/mixins/logger_mixin.py +51 -0
- digitalkin/mixins/storage_mixin.py +79 -0
- digitalkin/models/__init__.py +8 -0
- digitalkin/models/core/__init__.py +1 -0
- digitalkin/models/core/job_manager_models.py +36 -0
- digitalkin/models/core/task_monitor.py +70 -0
- digitalkin/models/grpc_servers/__init__.py +1 -0
- digitalkin/models/grpc_servers/models.py +275 -0
- digitalkin/models/grpc_servers/types.py +24 -0
- digitalkin/models/module/__init__.py +25 -0
- digitalkin/models/module/module.py +40 -0
- digitalkin/models/module/module_context.py +149 -0
- digitalkin/models/module/module_types.py +393 -0
- digitalkin/models/module/utility.py +146 -0
- digitalkin/models/services/__init__.py +10 -0
- digitalkin/models/services/cost.py +54 -0
- digitalkin/models/services/registry.py +42 -0
- digitalkin/models/services/storage.py +44 -0
- digitalkin/modules/__init__.py +11 -0
- digitalkin/modules/_base_module.py +517 -0
- digitalkin/modules/archetype_module.py +23 -0
- digitalkin/modules/tool_module.py +23 -0
- digitalkin/modules/trigger_handler.py +48 -0
- digitalkin/modules/triggers/__init__.py +12 -0
- digitalkin/modules/triggers/healthcheck_ping_trigger.py +45 -0
- digitalkin/modules/triggers/healthcheck_services_trigger.py +63 -0
- digitalkin/modules/triggers/healthcheck_status_trigger.py +52 -0
- digitalkin/py.typed +0 -0
- digitalkin/services/__init__.py +30 -0
- digitalkin/services/agent/__init__.py +6 -0
- digitalkin/services/agent/agent_strategy.py +19 -0
- digitalkin/services/agent/default_agent.py +13 -0
- digitalkin/services/base_strategy.py +22 -0
- digitalkin/services/communication/__init__.py +7 -0
- digitalkin/services/communication/communication_strategy.py +76 -0
- digitalkin/services/communication/default_communication.py +101 -0
- digitalkin/services/communication/grpc_communication.py +223 -0
- digitalkin/services/cost/__init__.py +14 -0
- digitalkin/services/cost/cost_strategy.py +100 -0
- digitalkin/services/cost/default_cost.py +114 -0
- digitalkin/services/cost/grpc_cost.py +138 -0
- digitalkin/services/filesystem/__init__.py +7 -0
- digitalkin/services/filesystem/default_filesystem.py +417 -0
- digitalkin/services/filesystem/filesystem_strategy.py +252 -0
- digitalkin/services/filesystem/grpc_filesystem.py +317 -0
- digitalkin/services/identity/__init__.py +6 -0
- digitalkin/services/identity/default_identity.py +15 -0
- digitalkin/services/identity/identity_strategy.py +14 -0
- digitalkin/services/registry/__init__.py +27 -0
- digitalkin/services/registry/default_registry.py +141 -0
- digitalkin/services/registry/exceptions.py +47 -0
- digitalkin/services/registry/grpc_registry.py +306 -0
- digitalkin/services/registry/registry_models.py +43 -0
- digitalkin/services/registry/registry_strategy.py +98 -0
- digitalkin/services/services_config.py +200 -0
- digitalkin/services/services_models.py +65 -0
- digitalkin/services/setup/__init__.py +1 -0
- digitalkin/services/setup/default_setup.py +219 -0
- digitalkin/services/setup/grpc_setup.py +343 -0
- digitalkin/services/setup/setup_strategy.py +145 -0
- digitalkin/services/snapshot/__init__.py +6 -0
- digitalkin/services/snapshot/default_snapshot.py +39 -0
- digitalkin/services/snapshot/snapshot_strategy.py +30 -0
- digitalkin/services/storage/__init__.py +7 -0
- digitalkin/services/storage/default_storage.py +228 -0
- digitalkin/services/storage/grpc_storage.py +214 -0
- digitalkin/services/storage/storage_strategy.py +273 -0
- digitalkin/services/user_profile/__init__.py +12 -0
- digitalkin/services/user_profile/default_user_profile.py +55 -0
- digitalkin/services/user_profile/grpc_user_profile.py +69 -0
- digitalkin/services/user_profile/user_profile_strategy.py +40 -0
- digitalkin/utils/__init__.py +29 -0
- digitalkin/utils/arg_parser.py +92 -0
- digitalkin/utils/development_mode_action.py +51 -0
- digitalkin/utils/dynamic_schema.py +483 -0
- digitalkin/utils/llm_ready_schema.py +75 -0
- digitalkin/utils/package_discover.py +357 -0
- digitalkin-0.3.2.dev2.dist-info/METADATA +602 -0
- digitalkin-0.3.2.dev2.dist-info/RECORD +131 -0
- digitalkin-0.3.2.dev2.dist-info/WHEEL +5 -0
- digitalkin-0.3.2.dev2.dist-info/licenses/LICENSE +430 -0
- digitalkin-0.3.2.dev2.dist-info/top_level.txt +4 -0
- modules/__init__.py +0 -0
- modules/cpu_intensive_module.py +280 -0
- modules/dynamic_setup_module.py +338 -0
- modules/minimal_llm_module.py +347 -0
- modules/text_transform_module.py +203 -0
- services/filesystem_module.py +200 -0
- services/storage_module.py +206 -0
|
@@ -0,0 +1,145 @@
|
|
|
1
|
+
"""This module contains the abstract base class for setup strategies."""
|
|
2
|
+
|
|
3
|
+
import datetime
|
|
4
|
+
from abc import ABC, abstractmethod
|
|
5
|
+
from typing import Any
|
|
6
|
+
|
|
7
|
+
from pydantic import BaseModel
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class SetupServiceError(Exception):
|
|
11
|
+
"""Base exception for Setup service errors."""
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class SetupVersionData(BaseModel):
|
|
15
|
+
"""Pydantic model for SetupVersion data validation."""
|
|
16
|
+
|
|
17
|
+
id: str
|
|
18
|
+
setup_id: str
|
|
19
|
+
version: str
|
|
20
|
+
content: dict[str, Any]
|
|
21
|
+
creation_date: datetime.datetime
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class SetupData(BaseModel):
|
|
25
|
+
"""Pydantic model for Setup data validation."""
|
|
26
|
+
|
|
27
|
+
id: str
|
|
28
|
+
name: str
|
|
29
|
+
organisation_id: str
|
|
30
|
+
owner_id: str
|
|
31
|
+
module_id: str
|
|
32
|
+
current_setup_version: SetupVersionData
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
class SetupStrategy(ABC):
|
|
36
|
+
"""Abstract base class for setup strategies."""
|
|
37
|
+
|
|
38
|
+
def __init__(self) -> None:
|
|
39
|
+
"""Initialize the setup strategy."""
|
|
40
|
+
|
|
41
|
+
def __post_init__(self, *args, **kwargs) -> None: # noqa: ANN002, ANN003
|
|
42
|
+
"""Initialize the setup strategy."""
|
|
43
|
+
|
|
44
|
+
@abstractmethod
|
|
45
|
+
def create_setup(self, setup_dict: dict[str, Any]) -> str:
|
|
46
|
+
"""Create a new setup with comprehensive validation.
|
|
47
|
+
|
|
48
|
+
Args:
|
|
49
|
+
setup_dict: Dictionary containing setup details.
|
|
50
|
+
|
|
51
|
+
Returns:
|
|
52
|
+
bool: Success status of setup creation.
|
|
53
|
+
|
|
54
|
+
Raises:
|
|
55
|
+
ValidationError: If setup data is invalid.
|
|
56
|
+
GrpcOperationError: If gRPC operation fails.
|
|
57
|
+
"""
|
|
58
|
+
|
|
59
|
+
@abstractmethod
|
|
60
|
+
def get_setup(self, setup_dict: dict[str, Any]) -> SetupData:
|
|
61
|
+
"""Retrieve a setup by its unique identifier.
|
|
62
|
+
|
|
63
|
+
Args:
|
|
64
|
+
setup_dict: Dictionary with 'name' and optional 'version'.
|
|
65
|
+
|
|
66
|
+
Returns:
|
|
67
|
+
Dict[str, Any]: Setup details including optional setup version.
|
|
68
|
+
"""
|
|
69
|
+
|
|
70
|
+
@abstractmethod
|
|
71
|
+
def update_setup(self, setup_dict: dict[str, Any]) -> bool:
|
|
72
|
+
"""Update an existing setup.
|
|
73
|
+
|
|
74
|
+
Args:
|
|
75
|
+
setup_dict: Dictionary with setup update details.
|
|
76
|
+
|
|
77
|
+
Returns:
|
|
78
|
+
bool: Success status of the update operation.
|
|
79
|
+
"""
|
|
80
|
+
|
|
81
|
+
@abstractmethod
|
|
82
|
+
def delete_setup(self, setup_dict: dict[str, Any]) -> bool:
|
|
83
|
+
"""Delete a setup by its unique identifier.
|
|
84
|
+
|
|
85
|
+
Args:
|
|
86
|
+
setup_dict: Dictionary with the setup 'name'.
|
|
87
|
+
|
|
88
|
+
Returns:
|
|
89
|
+
bool: Success status of deletion.
|
|
90
|
+
"""
|
|
91
|
+
|
|
92
|
+
@abstractmethod
|
|
93
|
+
def create_setup_version(self, setup_version_dict: dict[str, Any]) -> str:
|
|
94
|
+
"""Create a new setup version.
|
|
95
|
+
|
|
96
|
+
Args:
|
|
97
|
+
setup_version_dict: Dictionary with setup version details.
|
|
98
|
+
|
|
99
|
+
Returns:
|
|
100
|
+
str: name of setup version creation.
|
|
101
|
+
"""
|
|
102
|
+
|
|
103
|
+
@abstractmethod
|
|
104
|
+
def get_setup_version(self, setup_version_dict: dict[str, Any]) -> SetupVersionData:
|
|
105
|
+
"""Retrieve a setup version by its unique identifier.
|
|
106
|
+
|
|
107
|
+
Args:
|
|
108
|
+
setup_version_dict: Dictionary with the setup version 'name'.
|
|
109
|
+
|
|
110
|
+
Returns:
|
|
111
|
+
Dict[str, Any]: Setup version details.
|
|
112
|
+
"""
|
|
113
|
+
|
|
114
|
+
@abstractmethod
|
|
115
|
+
def search_setup_versions(self, setup_version_dict: dict[str, Any]) -> list[SetupVersionData]:
|
|
116
|
+
"""Search for setup versions based on filters.
|
|
117
|
+
|
|
118
|
+
Args:
|
|
119
|
+
setup_version_dict: Dictionary with optional 'name' and 'version' filters.
|
|
120
|
+
|
|
121
|
+
Returns:
|
|
122
|
+
List[Dict[str, Any]]: A list of matching setup version details.
|
|
123
|
+
"""
|
|
124
|
+
|
|
125
|
+
@abstractmethod
|
|
126
|
+
def update_setup_version(self, setup_version_dict: dict[str, Any]) -> bool:
|
|
127
|
+
"""Update an existing setup version.
|
|
128
|
+
|
|
129
|
+
Args:
|
|
130
|
+
setup_version_dict: Dictionary with setup version update details.
|
|
131
|
+
|
|
132
|
+
Returns:
|
|
133
|
+
bool: Success status of the update operation.
|
|
134
|
+
"""
|
|
135
|
+
|
|
136
|
+
@abstractmethod
|
|
137
|
+
def delete_setup_version(self, setup_version_dict: dict[str, Any]) -> bool:
|
|
138
|
+
"""Delete a setup version by its unique identifier.
|
|
139
|
+
|
|
140
|
+
Args:
|
|
141
|
+
setup_version_dict: Dictionary with the setup version 'name'.
|
|
142
|
+
|
|
143
|
+
Returns:
|
|
144
|
+
bool: Success status of version deletion.
|
|
145
|
+
"""
|
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
"""This module is responsible for handling the snapshot service."""
|
|
2
|
+
|
|
3
|
+
from digitalkin.services.snapshot.default_snapshot import DefaultSnapshot
|
|
4
|
+
from digitalkin.services.snapshot.snapshot_strategy import SnapshotStrategy
|
|
5
|
+
|
|
6
|
+
__all__ = ["DefaultSnapshot", "SnapshotStrategy"]
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
"""Default snapshot."""
|
|
2
|
+
|
|
3
|
+
from typing import Any
|
|
4
|
+
|
|
5
|
+
from digitalkin.services.snapshot.snapshot_strategy import SnapshotStrategy
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class DefaultSnapshot(SnapshotStrategy):
|
|
9
|
+
"""Default snapshot strategy."""
|
|
10
|
+
|
|
11
|
+
def create(self, data: dict[str, Any]) -> str: # noqa: ARG002, PLR6301
|
|
12
|
+
"""Create a new snapshot in the file system.
|
|
13
|
+
|
|
14
|
+
Returns:
|
|
15
|
+
str: The ID of the new snapshot
|
|
16
|
+
"""
|
|
17
|
+
return "1"
|
|
18
|
+
|
|
19
|
+
def get(self, data: dict[str, Any]) -> None:
|
|
20
|
+
"""Get snapshots from the file system."""
|
|
21
|
+
|
|
22
|
+
def update(self, data: dict[str, Any]) -> int: # noqa: ARG002, PLR6301
|
|
23
|
+
"""Update snapshots in the file system.
|
|
24
|
+
|
|
25
|
+
Returns:
|
|
26
|
+
int: The number of snapshots updated
|
|
27
|
+
"""
|
|
28
|
+
return 1
|
|
29
|
+
|
|
30
|
+
def delete(self, data: dict[str, Any]) -> int: # noqa: ARG002, PLR6301
|
|
31
|
+
"""Delete snapshots from the file system.
|
|
32
|
+
|
|
33
|
+
Returns:
|
|
34
|
+
int: The number of snapshots deleted
|
|
35
|
+
"""
|
|
36
|
+
return 1
|
|
37
|
+
|
|
38
|
+
def get_all(self) -> None:
|
|
39
|
+
"""Get all snapshots from the file system."""
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
"""This module contains the abstract base class for snapshot strategies."""
|
|
2
|
+
|
|
3
|
+
from abc import ABC, abstractmethod
|
|
4
|
+
from typing import Any
|
|
5
|
+
|
|
6
|
+
from digitalkin.services.base_strategy import BaseStrategy
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class SnapshotStrategy(BaseStrategy, ABC):
|
|
10
|
+
"""Abstract base class for snapshot strategies."""
|
|
11
|
+
|
|
12
|
+
@abstractmethod
|
|
13
|
+
def create(self, data: dict[str, Any]) -> str:
|
|
14
|
+
"""Create a new snapshot in the file system."""
|
|
15
|
+
|
|
16
|
+
@abstractmethod
|
|
17
|
+
def get(self, data: dict[str, Any]) -> None:
|
|
18
|
+
"""Get snapshots from the file system."""
|
|
19
|
+
|
|
20
|
+
@abstractmethod
|
|
21
|
+
def update(self, data: dict[str, Any]) -> int:
|
|
22
|
+
"""Update snapshots in the file system."""
|
|
23
|
+
|
|
24
|
+
@abstractmethod
|
|
25
|
+
def delete(self, data: dict[str, Any]) -> int:
|
|
26
|
+
"""Delete snapshots from the file system."""
|
|
27
|
+
|
|
28
|
+
@abstractmethod
|
|
29
|
+
def get_all(self) -> None:
|
|
30
|
+
"""Get all snapshots from the file system."""
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
"""This module is responsible for handling the storage service."""
|
|
2
|
+
|
|
3
|
+
from digitalkin.services.storage.default_storage import DefaultStorage
|
|
4
|
+
from digitalkin.services.storage.grpc_storage import GrpcStorage
|
|
5
|
+
from digitalkin.services.storage.storage_strategy import StorageStrategy
|
|
6
|
+
|
|
7
|
+
__all__ = ["DefaultStorage", "GrpcStorage", "StorageStrategy"]
|
|
@@ -0,0 +1,228 @@
|
|
|
1
|
+
"""This module implements the default storage strategy."""
|
|
2
|
+
|
|
3
|
+
import datetime
|
|
4
|
+
import json
|
|
5
|
+
import tempfile
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
from typing import Any
|
|
8
|
+
|
|
9
|
+
from pydantic import BaseModel
|
|
10
|
+
|
|
11
|
+
from digitalkin.logger import logger
|
|
12
|
+
from digitalkin.services.storage.storage_strategy import (
|
|
13
|
+
DataType,
|
|
14
|
+
StorageRecord,
|
|
15
|
+
StorageStrategy,
|
|
16
|
+
)
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class DefaultStorage(StorageStrategy):
|
|
20
|
+
"""Persist records in a local JSON file for quick local development.
|
|
21
|
+
|
|
22
|
+
File format: a JSON object of
|
|
23
|
+
{ "<collection>:<record_id>": { ... StorageRecord fields ... },
|
|
24
|
+
"""
|
|
25
|
+
|
|
26
|
+
@staticmethod
|
|
27
|
+
def _json_default(o: Any) -> str: # noqa: ANN401
|
|
28
|
+
"""JSON serializer for non-standard types (datetime → ISO).
|
|
29
|
+
|
|
30
|
+
Args:
|
|
31
|
+
o: The object to serialize
|
|
32
|
+
|
|
33
|
+
Returns:
|
|
34
|
+
str: The serialized object
|
|
35
|
+
|
|
36
|
+
Raises:
|
|
37
|
+
TypeError: If the object is not serializable
|
|
38
|
+
"""
|
|
39
|
+
if isinstance(o, datetime.datetime):
|
|
40
|
+
return o.isoformat()
|
|
41
|
+
msg = f"Type {o.__class__.__name__} not serializable"
|
|
42
|
+
raise TypeError(msg)
|
|
43
|
+
|
|
44
|
+
def _load_from_file(self) -> dict[str, StorageRecord]:
|
|
45
|
+
"""Load storage data from the file.
|
|
46
|
+
|
|
47
|
+
Returns:
|
|
48
|
+
A dictionary containing the loaded storage records
|
|
49
|
+
"""
|
|
50
|
+
if not self.storage_file.exists():
|
|
51
|
+
return {}
|
|
52
|
+
|
|
53
|
+
try:
|
|
54
|
+
raw = json.loads(self.storage_file.read_text(encoding="utf-8"))
|
|
55
|
+
out: dict[str, StorageRecord] = {}
|
|
56
|
+
|
|
57
|
+
for key, rd in raw.items():
|
|
58
|
+
# rd is a dict with the StorageRecord fields
|
|
59
|
+
model_cls = self.config.get(rd["collection"])
|
|
60
|
+
if not model_cls:
|
|
61
|
+
logger.warning("No model for collection '%s'", rd["collection"])
|
|
62
|
+
continue
|
|
63
|
+
data_model = model_cls.model_validate(rd["data"])
|
|
64
|
+
rec = StorageRecord(
|
|
65
|
+
mission_id=rd["mission_id"],
|
|
66
|
+
collection=rd["collection"],
|
|
67
|
+
record_id=rd["record_id"],
|
|
68
|
+
data=data_model,
|
|
69
|
+
data_type=DataType[rd["data_type"]],
|
|
70
|
+
creation_date=datetime.datetime.fromisoformat(rd["creation_date"])
|
|
71
|
+
if rd.get("creation_date")
|
|
72
|
+
else None,
|
|
73
|
+
update_date=datetime.datetime.fromisoformat(rd["update_date"]) if rd.get("update_date") else None,
|
|
74
|
+
)
|
|
75
|
+
out[key] = rec
|
|
76
|
+
except Exception:
|
|
77
|
+
logger.exception("Failed to load default storage file")
|
|
78
|
+
return {}
|
|
79
|
+
return out
|
|
80
|
+
|
|
81
|
+
def _save_to_file(self) -> None:
|
|
82
|
+
"""Atomically write `self.storage` back to disk as JSON."""
|
|
83
|
+
self.storage_file.parent.mkdir(parents=True, exist_ok=True)
|
|
84
|
+
with tempfile.NamedTemporaryFile(
|
|
85
|
+
mode="w",
|
|
86
|
+
encoding="utf-8",
|
|
87
|
+
delete=False,
|
|
88
|
+
dir=str(self.storage_file.parent),
|
|
89
|
+
suffix=".tmp",
|
|
90
|
+
) as temp:
|
|
91
|
+
try:
|
|
92
|
+
# Convert storage to a serializable format
|
|
93
|
+
serial: dict[str, dict] = {}
|
|
94
|
+
for key, record in self.storage.items():
|
|
95
|
+
serial[key] = {
|
|
96
|
+
"mission_id": record.mission_id,
|
|
97
|
+
"collection": record.collection,
|
|
98
|
+
"record_id": record.record_id,
|
|
99
|
+
"data_type": record.data_type.name,
|
|
100
|
+
"data": record.data.model_dump(),
|
|
101
|
+
"creation_date": record.creation_date.isoformat() if record.creation_date else None,
|
|
102
|
+
"update_date": record.update_date.isoformat() if record.update_date else None,
|
|
103
|
+
}
|
|
104
|
+
json.dump(serial, temp, indent=2, default=self._json_default)
|
|
105
|
+
temp.flush()
|
|
106
|
+
Path(temp.name).replace(self.storage_file)
|
|
107
|
+
except Exception:
|
|
108
|
+
logger.exception("Unexpected error saving storage")
|
|
109
|
+
|
|
110
|
+
def _store(self, record: StorageRecord) -> StorageRecord:
|
|
111
|
+
"""Store a new record in the database and persist to file.
|
|
112
|
+
|
|
113
|
+
Args:
|
|
114
|
+
record: The record to store
|
|
115
|
+
|
|
116
|
+
Returns:
|
|
117
|
+
str: The ID of the new record
|
|
118
|
+
|
|
119
|
+
Raises:
|
|
120
|
+
ValueError: If the record already exists
|
|
121
|
+
"""
|
|
122
|
+
key = f"{record.collection}:{record.record_id}"
|
|
123
|
+
if key in self.storage:
|
|
124
|
+
msg = f"Document {key!r} already exists"
|
|
125
|
+
raise ValueError(msg)
|
|
126
|
+
now = datetime.datetime.now(datetime.timezone.utc)
|
|
127
|
+
record.creation_date = now
|
|
128
|
+
record.update_date = now
|
|
129
|
+
self.storage[key] = record
|
|
130
|
+
self._save_to_file()
|
|
131
|
+
logger.debug("Created %s", key)
|
|
132
|
+
return record
|
|
133
|
+
|
|
134
|
+
def _read(self, collection: str, record_id: str) -> StorageRecord | None:
|
|
135
|
+
"""Get records from the database.
|
|
136
|
+
|
|
137
|
+
Args:
|
|
138
|
+
collection: The unique name to retrieve data for
|
|
139
|
+
record_id: The unique ID of the record
|
|
140
|
+
|
|
141
|
+
Returns:
|
|
142
|
+
StorageRecord: The corresponding record
|
|
143
|
+
"""
|
|
144
|
+
key = f"{collection}:{record_id}"
|
|
145
|
+
return self.storage.get(key)
|
|
146
|
+
|
|
147
|
+
def _update(self, collection: str, record_id: str, data: BaseModel) -> StorageRecord | None:
|
|
148
|
+
"""Update records in the database and persist to file.
|
|
149
|
+
|
|
150
|
+
Args:
|
|
151
|
+
collection: The unique name to retrieve data for
|
|
152
|
+
record_id: The unique ID of the record
|
|
153
|
+
data: The data to modify
|
|
154
|
+
|
|
155
|
+
Returns:
|
|
156
|
+
StorageRecord: The modified record
|
|
157
|
+
"""
|
|
158
|
+
key = f"{collection}:{record_id}"
|
|
159
|
+
rec = self.storage.get(key)
|
|
160
|
+
if not rec:
|
|
161
|
+
return None
|
|
162
|
+
rec.data = data
|
|
163
|
+
rec.update_date = datetime.datetime.now(datetime.timezone.utc)
|
|
164
|
+
self._save_to_file()
|
|
165
|
+
logger.debug("Modified %s", key)
|
|
166
|
+
return rec
|
|
167
|
+
|
|
168
|
+
def _remove(self, collection: str, record_id: str) -> bool:
|
|
169
|
+
"""Delete records from the database and update file.
|
|
170
|
+
|
|
171
|
+
Args:
|
|
172
|
+
collection: The unique name to retrieve data for
|
|
173
|
+
record_id: The unique ID of the record
|
|
174
|
+
|
|
175
|
+
Returns:
|
|
176
|
+
bool: True if the record was removed, False otherwise
|
|
177
|
+
"""
|
|
178
|
+
key = f"{collection}:{record_id}"
|
|
179
|
+
if key not in self.storage:
|
|
180
|
+
return False
|
|
181
|
+
del self.storage[key]
|
|
182
|
+
self._save_to_file()
|
|
183
|
+
logger.debug("Removed %s", key)
|
|
184
|
+
return True
|
|
185
|
+
|
|
186
|
+
def _list(self, collection: str) -> list[StorageRecord]:
|
|
187
|
+
"""Implements StorageStrategy._list.
|
|
188
|
+
|
|
189
|
+
Args:
|
|
190
|
+
collection: The unique name to retrieve data for
|
|
191
|
+
|
|
192
|
+
Returns:
|
|
193
|
+
A list of storage records
|
|
194
|
+
"""
|
|
195
|
+
prefix = f"{collection}:"
|
|
196
|
+
return [r for k, r in self.storage.items() if k.startswith(prefix)]
|
|
197
|
+
|
|
198
|
+
def _remove_collection(self, collection: str) -> bool:
|
|
199
|
+
"""Implements StorageStrategy._remove_collection.
|
|
200
|
+
|
|
201
|
+
Args:
|
|
202
|
+
collection: The unique name to retrieve data for
|
|
203
|
+
|
|
204
|
+
Returns:
|
|
205
|
+
bool: True if the collection was removed, False otherwise
|
|
206
|
+
"""
|
|
207
|
+
prefix = f"{collection}:"
|
|
208
|
+
to_delete = [k for k in self.storage if k.startswith(prefix)]
|
|
209
|
+
for k in to_delete:
|
|
210
|
+
del self.storage[k]
|
|
211
|
+
self._save_to_file()
|
|
212
|
+
logger.debug("Removed collection %s (%d docs)", collection, len(to_delete))
|
|
213
|
+
return True
|
|
214
|
+
|
|
215
|
+
def __init__(
|
|
216
|
+
self,
|
|
217
|
+
mission_id: str,
|
|
218
|
+
setup_id: str,
|
|
219
|
+
setup_version_id: str,
|
|
220
|
+
config: dict[str, type[BaseModel]],
|
|
221
|
+
storage_file_path: str = "local_storage",
|
|
222
|
+
**kwargs, # noqa: ANN003, ARG002
|
|
223
|
+
) -> None:
|
|
224
|
+
"""Initialize the storage."""
|
|
225
|
+
super().__init__(mission_id=mission_id, setup_id=setup_id, setup_version_id=setup_version_id, config=config)
|
|
226
|
+
self.storage_file_path = f"{self.mission_id}_{storage_file_path}.json"
|
|
227
|
+
self.storage_file = Path(self.storage_file_path)
|
|
228
|
+
self.storage = self._load_from_file()
|
|
@@ -0,0 +1,214 @@
|
|
|
1
|
+
"""This module implements the default storage strategy."""
|
|
2
|
+
|
|
3
|
+
from agentic_mesh_protocol.storage.v1 import data_pb2, storage_service_pb2_grpc
|
|
4
|
+
from google.protobuf import json_format
|
|
5
|
+
from google.protobuf.struct_pb2 import Struct
|
|
6
|
+
from pydantic import BaseModel
|
|
7
|
+
|
|
8
|
+
from digitalkin.grpc_servers.utils.grpc_client_wrapper import GrpcClientWrapper
|
|
9
|
+
from digitalkin.logger import logger
|
|
10
|
+
from digitalkin.models.grpc_servers.models import ClientConfig
|
|
11
|
+
from digitalkin.services.storage.storage_strategy import (
|
|
12
|
+
DataType,
|
|
13
|
+
StorageRecord,
|
|
14
|
+
StorageServiceError,
|
|
15
|
+
StorageStrategy,
|
|
16
|
+
)
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class GrpcStorage(StorageStrategy, GrpcClientWrapper):
|
|
20
|
+
"""This class implements the default storage strategy."""
|
|
21
|
+
|
|
22
|
+
def _build_record_from_proto(self, proto: data_pb2.StorageRecord) -> StorageRecord:
|
|
23
|
+
"""Convert a protobuf StorageRecord message into our Pydantic model.
|
|
24
|
+
|
|
25
|
+
Args:
|
|
26
|
+
proto: gRPC StorageRecord
|
|
27
|
+
|
|
28
|
+
Returns:
|
|
29
|
+
A fully validated StorageRecord.
|
|
30
|
+
"""
|
|
31
|
+
raw = json_format.MessageToDict(
|
|
32
|
+
proto,
|
|
33
|
+
preserving_proto_field_name=True,
|
|
34
|
+
always_print_fields_with_no_presence=True,
|
|
35
|
+
)
|
|
36
|
+
mission = raw["mission_id"]
|
|
37
|
+
coll = raw["collection"]
|
|
38
|
+
rid = raw["record_id"]
|
|
39
|
+
dtype = DataType[raw["data_type"]]
|
|
40
|
+
payload = raw.get("data", {})
|
|
41
|
+
|
|
42
|
+
validated = self._validate_data(coll, payload)
|
|
43
|
+
return StorageRecord(
|
|
44
|
+
mission_id=mission,
|
|
45
|
+
collection=coll,
|
|
46
|
+
record_id=rid,
|
|
47
|
+
data=validated,
|
|
48
|
+
data_type=dtype,
|
|
49
|
+
creation_date=raw.get("creation_date"),
|
|
50
|
+
update_date=raw.get("update_date"),
|
|
51
|
+
)
|
|
52
|
+
|
|
53
|
+
def _store(self, record: StorageRecord) -> StorageRecord:
|
|
54
|
+
"""Create a new record in the database.
|
|
55
|
+
|
|
56
|
+
Parameters:
|
|
57
|
+
record: The record to store
|
|
58
|
+
|
|
59
|
+
Returns:
|
|
60
|
+
StorageRecord: The corresponding record
|
|
61
|
+
|
|
62
|
+
Raises:
|
|
63
|
+
StorageServiceError: If there is an error while storing the record
|
|
64
|
+
"""
|
|
65
|
+
try:
|
|
66
|
+
data_struct = Struct()
|
|
67
|
+
data_struct.update(record.data.model_dump())
|
|
68
|
+
req = data_pb2.StoreRecordRequest(
|
|
69
|
+
data=data_struct,
|
|
70
|
+
mission_id=record.mission_id,
|
|
71
|
+
collection=record.collection,
|
|
72
|
+
record_id=record.record_id,
|
|
73
|
+
data_type=record.data_type.name,
|
|
74
|
+
)
|
|
75
|
+
resp = self.exec_grpc_query("StoreRecord", req)
|
|
76
|
+
return self._build_record_from_proto(resp.stored_data)
|
|
77
|
+
except Exception as e:
|
|
78
|
+
logger.exception(
|
|
79
|
+
"gRPC StoreRecord failed for %s:%s",
|
|
80
|
+
record.collection,
|
|
81
|
+
record.record_id,
|
|
82
|
+
)
|
|
83
|
+
raise StorageServiceError(str(e)) from e
|
|
84
|
+
|
|
85
|
+
def _read(self, collection: str, record_id: str) -> StorageRecord | None:
|
|
86
|
+
"""Fetch a single document by collection + record_id.
|
|
87
|
+
|
|
88
|
+
Returns:
|
|
89
|
+
StorageData: The record
|
|
90
|
+
"""
|
|
91
|
+
try:
|
|
92
|
+
req = data_pb2.ReadRecordRequest(
|
|
93
|
+
mission_id=self.mission_id,
|
|
94
|
+
collection=collection,
|
|
95
|
+
record_id=record_id,
|
|
96
|
+
)
|
|
97
|
+
resp = self.exec_grpc_query("ReadRecord", req)
|
|
98
|
+
return self._build_record_from_proto(resp.stored_data)
|
|
99
|
+
except Exception:
|
|
100
|
+
logger.warning("gRPC ReadRecord failed for %s:%s", collection, record_id)
|
|
101
|
+
return None
|
|
102
|
+
|
|
103
|
+
def _update(
|
|
104
|
+
self,
|
|
105
|
+
collection: str,
|
|
106
|
+
record_id: str,
|
|
107
|
+
data: BaseModel,
|
|
108
|
+
) -> StorageRecord | None:
|
|
109
|
+
"""Overwrite a document via gRPC.
|
|
110
|
+
|
|
111
|
+
Args:
|
|
112
|
+
collection: The unique name for the record type
|
|
113
|
+
record_id: The unique ID for the record
|
|
114
|
+
data: The validated data model
|
|
115
|
+
|
|
116
|
+
Returns:
|
|
117
|
+
StorageRecord: The updated record
|
|
118
|
+
"""
|
|
119
|
+
try:
|
|
120
|
+
struct = Struct()
|
|
121
|
+
struct.update(data.model_dump())
|
|
122
|
+
req = data_pb2.UpdateRecordRequest(
|
|
123
|
+
data=struct,
|
|
124
|
+
mission_id=self.mission_id,
|
|
125
|
+
collection=collection,
|
|
126
|
+
record_id=record_id,
|
|
127
|
+
)
|
|
128
|
+
resp = self.exec_grpc_query("UpdateRecord", req)
|
|
129
|
+
return self._build_record_from_proto(resp.stored_data)
|
|
130
|
+
except Exception:
|
|
131
|
+
logger.warning("gRPC UpdateRecord failed for %s:%s", collection, record_id)
|
|
132
|
+
return None
|
|
133
|
+
|
|
134
|
+
def _remove(self, collection: str, record_id: str) -> bool:
|
|
135
|
+
"""Delete a document via gRPC.
|
|
136
|
+
|
|
137
|
+
Args:
|
|
138
|
+
collection: The unique name for the record type
|
|
139
|
+
record_id: The unique ID for the record
|
|
140
|
+
|
|
141
|
+
Returns:
|
|
142
|
+
bool: True if the record was deleted, False otherwise
|
|
143
|
+
"""
|
|
144
|
+
try:
|
|
145
|
+
req = data_pb2.RemoveRecordRequest(
|
|
146
|
+
mission_id=self.mission_id,
|
|
147
|
+
collection=collection,
|
|
148
|
+
record_id=record_id,
|
|
149
|
+
)
|
|
150
|
+
self.exec_grpc_query("RemoveRecord", req)
|
|
151
|
+
except Exception:
|
|
152
|
+
logger.warning(
|
|
153
|
+
"gRPC RemoveRecord failed for %s:%s",
|
|
154
|
+
collection,
|
|
155
|
+
record_id,
|
|
156
|
+
)
|
|
157
|
+
return False
|
|
158
|
+
return True
|
|
159
|
+
|
|
160
|
+
def _list(self, collection: str) -> list[StorageRecord]:
|
|
161
|
+
"""List all documents in a collection via gRPC.
|
|
162
|
+
|
|
163
|
+
Args:
|
|
164
|
+
collection: The unique name for the record type
|
|
165
|
+
|
|
166
|
+
Returns:
|
|
167
|
+
list[StorageRecord]: A list of storage records
|
|
168
|
+
"""
|
|
169
|
+
try:
|
|
170
|
+
req = data_pb2.ListRecordsRequest(
|
|
171
|
+
mission_id=self.mission_id,
|
|
172
|
+
collection=collection,
|
|
173
|
+
)
|
|
174
|
+
resp = self.exec_grpc_query("ListRecords", req)
|
|
175
|
+
return [self._build_record_from_proto(r) for r in resp.records]
|
|
176
|
+
except Exception:
|
|
177
|
+
logger.warning("gRPC ListRecords failed for %s", collection)
|
|
178
|
+
return []
|
|
179
|
+
|
|
180
|
+
def _remove_collection(self, collection: str) -> bool:
|
|
181
|
+
"""Delete an entire collection via gRPC.
|
|
182
|
+
|
|
183
|
+
Args:
|
|
184
|
+
collection: The unique name for the record type
|
|
185
|
+
|
|
186
|
+
Returns:
|
|
187
|
+
bool: True if the collection was deleted, False otherwise
|
|
188
|
+
"""
|
|
189
|
+
try:
|
|
190
|
+
req = data_pb2.RemoveCollectionRequest(
|
|
191
|
+
mission_id=self.mission_id,
|
|
192
|
+
collection=collection,
|
|
193
|
+
)
|
|
194
|
+
self.exec_grpc_query("RemoveCollection", req)
|
|
195
|
+
except Exception:
|
|
196
|
+
logger.warning("gRPC RemoveCollection failed for %s", collection)
|
|
197
|
+
return False
|
|
198
|
+
return True
|
|
199
|
+
|
|
200
|
+
def __init__(
|
|
201
|
+
self,
|
|
202
|
+
mission_id: str,
|
|
203
|
+
setup_id: str,
|
|
204
|
+
setup_version_id: str,
|
|
205
|
+
config: dict[str, type[BaseModel]],
|
|
206
|
+
client_config: ClientConfig,
|
|
207
|
+
**kwargs, # noqa: ANN003, ARG002
|
|
208
|
+
) -> None:
|
|
209
|
+
"""Initialize the storage."""
|
|
210
|
+
super().__init__(mission_id=mission_id, setup_id=setup_id, setup_version_id=setup_version_id, config=config)
|
|
211
|
+
|
|
212
|
+
channel = self._init_channel(client_config)
|
|
213
|
+
self.stub = storage_service_pb2_grpc.StorageServiceStub(channel)
|
|
214
|
+
logger.debug("Channel client 'storage' initialized successfully")
|