digitalkin 0.2.25rc0__py3-none-any.whl → 0.3.2.dev14__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- base_server/server_async_insecure.py +6 -5
- base_server/server_async_secure.py +6 -5
- base_server/server_sync_insecure.py +5 -4
- base_server/server_sync_secure.py +5 -4
- digitalkin/__version__.py +1 -1
- digitalkin/core/__init__.py +1 -0
- digitalkin/core/common/__init__.py +9 -0
- digitalkin/core/common/factories.py +156 -0
- digitalkin/core/job_manager/__init__.py +1 -0
- digitalkin/{modules → core}/job_manager/base_job_manager.py +138 -32
- digitalkin/core/job_manager/single_job_manager.py +373 -0
- digitalkin/{modules → core}/job_manager/taskiq_broker.py +121 -26
- digitalkin/core/job_manager/taskiq_job_manager.py +541 -0
- digitalkin/core/task_manager/__init__.py +1 -0
- digitalkin/core/task_manager/base_task_manager.py +539 -0
- digitalkin/core/task_manager/local_task_manager.py +108 -0
- digitalkin/core/task_manager/remote_task_manager.py +87 -0
- digitalkin/core/task_manager/surrealdb_repository.py +266 -0
- digitalkin/core/task_manager/task_executor.py +249 -0
- digitalkin/core/task_manager/task_session.py +368 -0
- digitalkin/grpc_servers/__init__.py +1 -19
- digitalkin/grpc_servers/_base_server.py +3 -3
- digitalkin/grpc_servers/module_server.py +120 -195
- digitalkin/grpc_servers/module_servicer.py +81 -44
- digitalkin/grpc_servers/utils/__init__.py +1 -0
- digitalkin/grpc_servers/utils/exceptions.py +0 -8
- digitalkin/grpc_servers/utils/grpc_client_wrapper.py +25 -9
- digitalkin/grpc_servers/utils/grpc_error_handler.py +53 -0
- digitalkin/grpc_servers/utils/utility_schema_extender.py +100 -0
- digitalkin/logger.py +64 -27
- digitalkin/mixins/__init__.py +19 -0
- digitalkin/mixins/base_mixin.py +10 -0
- digitalkin/mixins/callback_mixin.py +24 -0
- digitalkin/mixins/chat_history_mixin.py +110 -0
- digitalkin/mixins/cost_mixin.py +76 -0
- digitalkin/mixins/file_history_mixin.py +93 -0
- digitalkin/mixins/filesystem_mixin.py +46 -0
- digitalkin/mixins/logger_mixin.py +51 -0
- digitalkin/mixins/storage_mixin.py +79 -0
- digitalkin/models/__init__.py +1 -1
- digitalkin/models/core/__init__.py +1 -0
- digitalkin/{modules/job_manager → models/core}/job_manager_models.py +3 -11
- digitalkin/models/core/task_monitor.py +74 -0
- digitalkin/models/grpc_servers/__init__.py +1 -0
- digitalkin/{grpc_servers/utils → models/grpc_servers}/models.py +92 -7
- digitalkin/models/module/__init__.py +18 -11
- digitalkin/models/module/base_types.py +61 -0
- digitalkin/models/module/module.py +9 -1
- digitalkin/models/module/module_context.py +282 -6
- digitalkin/models/module/module_types.py +29 -105
- digitalkin/models/module/setup_types.py +490 -0
- digitalkin/models/module/tool_cache.py +68 -0
- digitalkin/models/module/tool_reference.py +117 -0
- digitalkin/models/module/utility.py +167 -0
- digitalkin/models/services/__init__.py +9 -0
- digitalkin/models/services/cost.py +1 -0
- digitalkin/models/services/registry.py +35 -0
- digitalkin/models/services/storage.py +39 -5
- digitalkin/modules/__init__.py +5 -1
- digitalkin/modules/_base_module.py +265 -167
- digitalkin/modules/archetype_module.py +6 -1
- digitalkin/modules/tool_module.py +16 -3
- digitalkin/modules/trigger_handler.py +7 -6
- digitalkin/modules/triggers/__init__.py +8 -0
- digitalkin/modules/triggers/healthcheck_ping_trigger.py +45 -0
- digitalkin/modules/triggers/healthcheck_services_trigger.py +63 -0
- digitalkin/modules/triggers/healthcheck_status_trigger.py +52 -0
- digitalkin/services/__init__.py +4 -0
- digitalkin/services/communication/__init__.py +7 -0
- digitalkin/services/communication/communication_strategy.py +76 -0
- digitalkin/services/communication/default_communication.py +101 -0
- digitalkin/services/communication/grpc_communication.py +234 -0
- digitalkin/services/cost/__init__.py +9 -2
- digitalkin/services/cost/grpc_cost.py +9 -42
- digitalkin/services/filesystem/default_filesystem.py +0 -2
- digitalkin/services/filesystem/grpc_filesystem.py +10 -39
- digitalkin/services/registry/__init__.py +22 -1
- digitalkin/services/registry/default_registry.py +135 -4
- digitalkin/services/registry/exceptions.py +47 -0
- digitalkin/services/registry/grpc_registry.py +306 -0
- digitalkin/services/registry/registry_models.py +15 -0
- digitalkin/services/registry/registry_strategy.py +88 -4
- digitalkin/services/services_config.py +25 -3
- digitalkin/services/services_models.py +5 -1
- digitalkin/services/setup/default_setup.py +6 -7
- digitalkin/services/setup/grpc_setup.py +52 -15
- digitalkin/services/storage/grpc_storage.py +4 -4
- digitalkin/services/user_profile/__init__.py +12 -0
- digitalkin/services/user_profile/default_user_profile.py +55 -0
- digitalkin/services/user_profile/grpc_user_profile.py +69 -0
- digitalkin/services/user_profile/user_profile_strategy.py +25 -0
- digitalkin/utils/__init__.py +28 -0
- digitalkin/utils/arg_parser.py +1 -1
- digitalkin/utils/development_mode_action.py +2 -2
- digitalkin/utils/dynamic_schema.py +483 -0
- digitalkin/utils/package_discover.py +1 -2
- digitalkin/utils/schema_splitter.py +207 -0
- {digitalkin-0.2.25rc0.dist-info → digitalkin-0.3.2.dev14.dist-info}/METADATA +11 -30
- digitalkin-0.3.2.dev14.dist-info/RECORD +143 -0
- {digitalkin-0.2.25rc0.dist-info → digitalkin-0.3.2.dev14.dist-info}/top_level.txt +1 -0
- modules/archetype_with_tools_module.py +244 -0
- modules/cpu_intensive_module.py +1 -1
- modules/dynamic_setup_module.py +338 -0
- modules/minimal_llm_module.py +1 -1
- modules/text_transform_module.py +1 -1
- monitoring/digitalkin_observability/__init__.py +46 -0
- monitoring/digitalkin_observability/http_server.py +150 -0
- monitoring/digitalkin_observability/interceptors.py +176 -0
- monitoring/digitalkin_observability/metrics.py +201 -0
- monitoring/digitalkin_observability/prometheus.py +137 -0
- monitoring/tests/test_metrics.py +172 -0
- services/filesystem_module.py +7 -5
- services/storage_module.py +4 -2
- digitalkin/grpc_servers/registry_server.py +0 -65
- digitalkin/grpc_servers/registry_servicer.py +0 -456
- digitalkin/grpc_servers/utils/factory.py +0 -180
- digitalkin/modules/job_manager/single_job_manager.py +0 -294
- digitalkin/modules/job_manager/taskiq_job_manager.py +0 -290
- digitalkin-0.2.25rc0.dist-info/RECORD +0 -89
- /digitalkin/{grpc_servers/utils → models/grpc_servers}/types.py +0 -0
- {digitalkin-0.2.25rc0.dist-info → digitalkin-0.3.2.dev14.dist-info}/WHEEL +0 -0
- {digitalkin-0.2.25rc0.dist-info → digitalkin-0.3.2.dev14.dist-info}/licenses/LICENSE +0 -0
|
@@ -6,8 +6,8 @@ from typing import Any
|
|
|
6
6
|
import grpc
|
|
7
7
|
|
|
8
8
|
from digitalkin.grpc_servers.utils.exceptions import ServerError
|
|
9
|
-
from digitalkin.grpc_servers.utils.models import ClientConfig, SecurityMode
|
|
10
9
|
from digitalkin.logger import logger
|
|
10
|
+
from digitalkin.models.grpc_servers.models import ClientConfig, SecurityMode
|
|
11
11
|
|
|
12
12
|
|
|
13
13
|
class GrpcClientWrapper:
|
|
@@ -43,9 +43,9 @@ class GrpcClientWrapper:
|
|
|
43
43
|
private_key=private_key,
|
|
44
44
|
)
|
|
45
45
|
|
|
46
|
-
return grpc.secure_channel(config.address, channel_credentials, options=config.
|
|
46
|
+
return grpc.secure_channel(config.address, channel_credentials, options=config.grpc_options)
|
|
47
47
|
# Insecure channel
|
|
48
|
-
return grpc.insecure_channel(config.address, options=config.
|
|
48
|
+
return grpc.insecure_channel(config.address, options=config.grpc_options)
|
|
49
49
|
|
|
50
50
|
def exec_grpc_query(self, query_endpoint: str, request: Any) -> Any: # noqa: ANN401
|
|
51
51
|
"""Execute a gRPC query with from the query's rpc endpoint name.
|
|
@@ -58,15 +58,31 @@ class GrpcClientWrapper:
|
|
|
58
58
|
corresponding gRPC reponse.
|
|
59
59
|
|
|
60
60
|
Raises:
|
|
61
|
-
ServerError: gRPC error catching
|
|
61
|
+
ServerError: gRPC error catching with status code and details
|
|
62
62
|
"""
|
|
63
|
+
service_name = getattr(self, "service_name", "unknown")
|
|
63
64
|
try:
|
|
64
|
-
|
|
65
|
-
|
|
65
|
+
logger.debug(
|
|
66
|
+
"Sending gRPC request to %s",
|
|
67
|
+
query_endpoint,
|
|
68
|
+
extra={"request": str(request), "service": service_name},
|
|
69
|
+
)
|
|
66
70
|
response = getattr(self.stub, query_endpoint)(request)
|
|
67
|
-
logger.debug(
|
|
71
|
+
logger.debug(
|
|
72
|
+
"Received gRPC response from %s",
|
|
73
|
+
query_endpoint,
|
|
74
|
+
extra={"response": str(response), "service": service_name},
|
|
75
|
+
)
|
|
68
76
|
except grpc.RpcError as e:
|
|
69
|
-
|
|
70
|
-
|
|
77
|
+
status_code = e.code().name if hasattr(e, "code") else "UNKNOWN"
|
|
78
|
+
details = e.details() if hasattr(e, "details") else str(e)
|
|
79
|
+
msg = f"[{status_code}] {details}"
|
|
80
|
+
logger.error(
|
|
81
|
+
"gRPC %s failed: %s",
|
|
82
|
+
query_endpoint,
|
|
83
|
+
msg,
|
|
84
|
+
extra={"service": service_name},
|
|
85
|
+
)
|
|
86
|
+
raise ServerError(msg) from e
|
|
71
87
|
else:
|
|
72
88
|
return response
|
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
"""Shared error handling utilities for gRPC services."""
|
|
2
|
+
|
|
3
|
+
from collections.abc import Generator
|
|
4
|
+
from contextlib import contextmanager
|
|
5
|
+
from typing import Any
|
|
6
|
+
|
|
7
|
+
from digitalkin.grpc_servers.utils.exceptions import ServerError
|
|
8
|
+
from digitalkin.logger import logger
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class GrpcErrorHandlerMixin:
|
|
12
|
+
"""Mixin class providing common gRPC error handling functionality."""
|
|
13
|
+
|
|
14
|
+
@contextmanager
|
|
15
|
+
def handle_grpc_errors( # noqa: PLR6301
|
|
16
|
+
self,
|
|
17
|
+
operation: str,
|
|
18
|
+
service_error_class: type[Exception] | None = None,
|
|
19
|
+
) -> Generator[Any, Any, Any]:
|
|
20
|
+
"""Handle gRPC errors for the given operation.
|
|
21
|
+
|
|
22
|
+
Args:
|
|
23
|
+
operation: Name of the operation being performed.
|
|
24
|
+
service_error_class: Optional specific service exception class to raise.
|
|
25
|
+
If not provided, uses the generic ServerError.
|
|
26
|
+
|
|
27
|
+
Yields:
|
|
28
|
+
Context for the operation.
|
|
29
|
+
|
|
30
|
+
Raises:
|
|
31
|
+
ServerError: For gRPC-related errors.
|
|
32
|
+
service_error_class: For service-specific errors if provided.
|
|
33
|
+
"""
|
|
34
|
+
if service_error_class is None:
|
|
35
|
+
service_error_class = ServerError
|
|
36
|
+
|
|
37
|
+
try:
|
|
38
|
+
yield
|
|
39
|
+
except service_error_class as e:
|
|
40
|
+
# Re-raise service-specific errors as-is
|
|
41
|
+
msg = f"{service_error_class.__name__} in {operation}: {e}"
|
|
42
|
+
logger.exception(msg)
|
|
43
|
+
raise service_error_class(msg) from e
|
|
44
|
+
except ServerError as e:
|
|
45
|
+
# Handle gRPC server errors
|
|
46
|
+
msg = f"gRPC {operation} failed: {e}"
|
|
47
|
+
logger.exception(msg)
|
|
48
|
+
raise ServerError(msg) from e
|
|
49
|
+
except Exception as e:
|
|
50
|
+
# Handle unexpected errors
|
|
51
|
+
msg = f"Unexpected error in {operation}: {e}"
|
|
52
|
+
logger.exception(msg)
|
|
53
|
+
raise service_error_class(msg) from e
|
|
@@ -0,0 +1,100 @@
|
|
|
1
|
+
"""Utility schema extender for gRPC API responses.
|
|
2
|
+
|
|
3
|
+
This module extends module schemas with SDK utility protocols for API responses.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
import types
|
|
7
|
+
from typing import Annotated, Union, get_args, get_origin
|
|
8
|
+
|
|
9
|
+
from pydantic import Field, create_model
|
|
10
|
+
|
|
11
|
+
from digitalkin.models.module.module_types import DataModel
|
|
12
|
+
from digitalkin.models.module.utility import (
|
|
13
|
+
EndOfStreamOutput,
|
|
14
|
+
HealthcheckPingInput,
|
|
15
|
+
HealthcheckPingOutput,
|
|
16
|
+
HealthcheckServicesInput,
|
|
17
|
+
HealthcheckServicesOutput,
|
|
18
|
+
HealthcheckStatusInput,
|
|
19
|
+
HealthcheckStatusOutput,
|
|
20
|
+
)
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class UtilitySchemaExtender:
|
|
24
|
+
"""Extends module schemas with SDK utility protocols for API responses.
|
|
25
|
+
|
|
26
|
+
This class provides methods to create extended Pydantic models that include
|
|
27
|
+
both user-defined protocols and SDK utility protocols in their schemas.
|
|
28
|
+
"""
|
|
29
|
+
|
|
30
|
+
_output_protocols = (
|
|
31
|
+
EndOfStreamOutput,
|
|
32
|
+
HealthcheckPingOutput,
|
|
33
|
+
HealthcheckServicesOutput,
|
|
34
|
+
HealthcheckStatusOutput,
|
|
35
|
+
)
|
|
36
|
+
|
|
37
|
+
_input_protocols = (
|
|
38
|
+
HealthcheckPingInput,
|
|
39
|
+
HealthcheckServicesInput,
|
|
40
|
+
HealthcheckStatusInput,
|
|
41
|
+
)
|
|
42
|
+
|
|
43
|
+
@classmethod
|
|
44
|
+
def _extract_union_types(cls, annotation: type) -> tuple:
|
|
45
|
+
"""Extract individual types from a Union or Annotated[Union, ...] annotation.
|
|
46
|
+
|
|
47
|
+
Returns:
|
|
48
|
+
A tuple of individual types contained in the Union.
|
|
49
|
+
"""
|
|
50
|
+
if get_origin(annotation) is Annotated:
|
|
51
|
+
inner_args = get_args(annotation)
|
|
52
|
+
if inner_args:
|
|
53
|
+
return cls._extract_union_types(inner_args[0])
|
|
54
|
+
if get_origin(annotation) is Union or isinstance(annotation, types.UnionType):
|
|
55
|
+
return get_args(annotation)
|
|
56
|
+
return (annotation,)
|
|
57
|
+
|
|
58
|
+
@classmethod
|
|
59
|
+
def create_extended_output_model(cls, base_model: type[DataModel]) -> type[DataModel]:
|
|
60
|
+
"""Create an extended output model that includes utility output protocols.
|
|
61
|
+
|
|
62
|
+
Args:
|
|
63
|
+
base_model: The module's output_format class (a DataModel subclass).
|
|
64
|
+
|
|
65
|
+
Returns:
|
|
66
|
+
A new DataModel subclass with root typed as Union[original_types, utility_types].
|
|
67
|
+
"""
|
|
68
|
+
original_annotation = base_model.model_fields["root"].annotation
|
|
69
|
+
original_types = cls._extract_union_types(original_annotation)
|
|
70
|
+
extended_types = (*original_types, *cls._output_protocols)
|
|
71
|
+
union_type = Union[extended_types] # type: ignore[valid-type] # noqa: UP007
|
|
72
|
+
extended_root = Annotated[union_type, Field(discriminator="protocol")] # type: ignore[valid-type]
|
|
73
|
+
return create_model(
|
|
74
|
+
f"{base_model.__name__}Utilities",
|
|
75
|
+
__base__=DataModel,
|
|
76
|
+
root=(extended_root, ...),
|
|
77
|
+
annotations=(dict[str, str], Field(default={})),
|
|
78
|
+
)
|
|
79
|
+
|
|
80
|
+
@classmethod
|
|
81
|
+
def create_extended_input_model(cls, base_model: type[DataModel]) -> type[DataModel]:
|
|
82
|
+
"""Create an extended input model that includes utility input protocols.
|
|
83
|
+
|
|
84
|
+
Args:
|
|
85
|
+
base_model: The module's input_format class (a DataModel subclass).
|
|
86
|
+
|
|
87
|
+
Returns:
|
|
88
|
+
A new DataModel subclass with root typed as Union[original_types, utility_types].
|
|
89
|
+
"""
|
|
90
|
+
original_annotation = base_model.model_fields["root"].annotation
|
|
91
|
+
original_types = cls._extract_union_types(original_annotation)
|
|
92
|
+
extended_types = (*original_types, *cls._input_protocols)
|
|
93
|
+
union_type = Union[extended_types] # type: ignore[valid-type] # noqa: UP007
|
|
94
|
+
extended_root = Annotated[union_type, Field(discriminator="protocol")] # type: ignore[valid-type]
|
|
95
|
+
return create_model(
|
|
96
|
+
f"{base_model.__name__}Utilities",
|
|
97
|
+
__base__=DataModel,
|
|
98
|
+
root=(extended_root, ...),
|
|
99
|
+
annotations=(dict[str, str], Field(default={})),
|
|
100
|
+
)
|
digitalkin/logger.py
CHANGED
|
@@ -48,12 +48,10 @@ class ColorJSONFormatter(logging.Formatter):
|
|
|
48
48
|
log_obj: dict[str, Any] = {
|
|
49
49
|
"timestamp": datetime.fromtimestamp(record.created, tz=timezone.utc).isoformat(),
|
|
50
50
|
"level": record.levelname.lower(),
|
|
51
|
-
"logger": record.name,
|
|
52
51
|
"message": record.getMessage(),
|
|
53
|
-
"
|
|
54
|
-
"
|
|
52
|
+
"module": record.module,
|
|
53
|
+
"location": f"{record.pathname}:{record.lineno}:{record.funcName}",
|
|
55
54
|
}
|
|
56
|
-
|
|
57
55
|
# Add exception info if present
|
|
58
56
|
if record.exc_info:
|
|
59
57
|
log_obj["exception"] = self.formatException(record.exc_info)
|
|
@@ -91,30 +89,69 @@ class ColorJSONFormatter(logging.Formatter):
|
|
|
91
89
|
# Pretty print with color
|
|
92
90
|
color = self.COLORS.get(record.levelno, self.grey)
|
|
93
91
|
if self.is_production:
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
92
|
+
log_obj["message"] = f"{color}{log_obj.get('message', '')}{self.reset}"
|
|
93
|
+
return json.dumps(log_obj, default=str, separators=(",", ":"))
|
|
94
|
+
json_str = json.dumps(log_obj, indent=2, default=str)
|
|
95
|
+
json_str = json_str.replace("\\n", "\n")
|
|
98
96
|
return f"{color}{json_str}{self.reset}"
|
|
99
97
|
|
|
100
98
|
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
99
|
+
def setup_logger(
|
|
100
|
+
name: str,
|
|
101
|
+
level: int = logging.INFO,
|
|
102
|
+
additional_loggers: dict[str, int] | None = None,
|
|
103
|
+
*,
|
|
104
|
+
is_production: bool | None = None,
|
|
105
|
+
configure_root: bool = True,
|
|
106
|
+
) -> logging.Logger:
|
|
107
|
+
"""Set up a logger with the ColorJSONFormatter.
|
|
108
|
+
|
|
109
|
+
Args:
|
|
110
|
+
name: Name of the logger to create
|
|
111
|
+
level: Logging level (default: logging.INFO)
|
|
112
|
+
is_production: Whether running in production. If None, checks RAILWAY_SERVICE_NAME env var
|
|
113
|
+
configure_root: Whether to configure root logger (default: True)
|
|
114
|
+
additional_loggers: Dict of additional logger names and their levels to configure
|
|
115
|
+
|
|
116
|
+
Returns:
|
|
117
|
+
logging.Logger: Configured logger instance
|
|
118
|
+
"""
|
|
119
|
+
# Determine if we're in production
|
|
120
|
+
if is_production is None:
|
|
121
|
+
is_production = os.getenv("RAILWAY_SERVICE_NAME") is not None
|
|
122
|
+
|
|
123
|
+
# Configure root logger if requested
|
|
124
|
+
if configure_root:
|
|
125
|
+
logging.basicConfig(
|
|
126
|
+
level=logging.DEBUG,
|
|
127
|
+
stream=sys.stdout,
|
|
128
|
+
datefmt="%Y-%m-%d %H:%M:%S",
|
|
129
|
+
)
|
|
130
|
+
|
|
131
|
+
# Configure additional loggers
|
|
132
|
+
if additional_loggers:
|
|
133
|
+
for logger_name, logger_level in additional_loggers.items():
|
|
134
|
+
logging.getLogger(logger_name).setLevel(logger_level)
|
|
135
|
+
|
|
136
|
+
# Create and configure the main logger
|
|
137
|
+
logger = logging.getLogger(name)
|
|
138
|
+
logger.setLevel(level)
|
|
139
|
+
# Only add handler if not already configured
|
|
140
|
+
if not logger.handlers:
|
|
141
|
+
ch = logging.StreamHandler()
|
|
142
|
+
ch.setLevel(level)
|
|
143
|
+
ch.setFormatter(ColorJSONFormatter(is_production=is_production))
|
|
144
|
+
logger.addHandler(ch)
|
|
145
|
+
logger.propagate = False
|
|
146
|
+
|
|
147
|
+
return logger
|
|
148
|
+
|
|
149
|
+
|
|
150
|
+
logger = setup_logger(
|
|
151
|
+
"digitalkin",
|
|
152
|
+
level=logging.INFO,
|
|
153
|
+
additional_loggers={
|
|
154
|
+
"grpc": logging.DEBUG,
|
|
155
|
+
"asyncio": logging.DEBUG,
|
|
156
|
+
},
|
|
105
157
|
)
|
|
106
|
-
|
|
107
|
-
logging.getLogger("grpc").setLevel(logging.DEBUG)
|
|
108
|
-
logging.getLogger("asyncio").setLevel(logging.DEBUG)
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
logger = logging.getLogger("digitalkin")
|
|
112
|
-
is_production = os.getenv("RAILWAY_SERVICE_NAME") is not None
|
|
113
|
-
|
|
114
|
-
if not logger.handlers:
|
|
115
|
-
ch = logging.StreamHandler()
|
|
116
|
-
ch.setLevel(logging.INFO)
|
|
117
|
-
ch.setFormatter(ColorJSONFormatter(is_production=is_production))
|
|
118
|
-
|
|
119
|
-
logger.addHandler(ch)
|
|
120
|
-
logger.propagate = False
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
"""Mixin definitions."""
|
|
2
|
+
|
|
3
|
+
from digitalkin.mixins.base_mixin import BaseMixin
|
|
4
|
+
from digitalkin.mixins.callback_mixin import UserMessageMixin
|
|
5
|
+
from digitalkin.mixins.chat_history_mixin import ChatHistoryMixin
|
|
6
|
+
from digitalkin.mixins.cost_mixin import CostMixin
|
|
7
|
+
from digitalkin.mixins.filesystem_mixin import FilesystemMixin
|
|
8
|
+
from digitalkin.mixins.logger_mixin import LoggerMixin
|
|
9
|
+
from digitalkin.mixins.storage_mixin import StorageMixin
|
|
10
|
+
|
|
11
|
+
__all__ = [
|
|
12
|
+
"BaseMixin",
|
|
13
|
+
"ChatHistoryMixin",
|
|
14
|
+
"CostMixin",
|
|
15
|
+
"FilesystemMixin",
|
|
16
|
+
"LoggerMixin",
|
|
17
|
+
"StorageMixin",
|
|
18
|
+
"UserMessageMixin",
|
|
19
|
+
]
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
"""Simple toolkit class with basic and simple API access in the Triggers."""
|
|
2
|
+
|
|
3
|
+
from digitalkin.mixins.chat_history_mixin import ChatHistoryMixin
|
|
4
|
+
from digitalkin.mixins.cost_mixin import CostMixin
|
|
5
|
+
from digitalkin.mixins.file_history_mixin import FileHistoryMixin
|
|
6
|
+
from digitalkin.mixins.logger_mixin import LoggerMixin
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class BaseMixin(CostMixin, ChatHistoryMixin, FileHistoryMixin, LoggerMixin):
|
|
10
|
+
"""Base Mixin to access to minimum Module Context functionnalities in the Triggers."""
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
"""User callback to send a message from the Trigger."""
|
|
2
|
+
|
|
3
|
+
from typing import Generic
|
|
4
|
+
|
|
5
|
+
from digitalkin.models.module.module_context import ModuleContext
|
|
6
|
+
from digitalkin.models.module.module_types import OutputModelT
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class UserMessageMixin(Generic[OutputModelT]):
|
|
10
|
+
"""Mixin providing callback operations through the callbacks .
|
|
11
|
+
|
|
12
|
+
This mixin wraps callback strategy calls to provide a cleaner API
|
|
13
|
+
for direct messaging in trigger handlers.
|
|
14
|
+
"""
|
|
15
|
+
|
|
16
|
+
@staticmethod
|
|
17
|
+
async def send_message(context: ModuleContext, output: OutputModelT) -> None:
|
|
18
|
+
"""Send a message using the callbacks strategy.
|
|
19
|
+
|
|
20
|
+
Args:
|
|
21
|
+
context: Module context containing the callbacks strategy.
|
|
22
|
+
output: Message to send with the Module defined output Type.
|
|
23
|
+
"""
|
|
24
|
+
await context.callbacks.send_message(output)
|
|
@@ -0,0 +1,110 @@
|
|
|
1
|
+
"""Context mixins providing ergonomic access to service strategies.
|
|
2
|
+
|
|
3
|
+
This module provides mixins that wrap service strategy calls with cleaner APIs,
|
|
4
|
+
following Django/FastAPI patterns where context is passed explicitly to each method.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from typing import Any, Generic
|
|
8
|
+
|
|
9
|
+
from digitalkin.mixins.callback_mixin import UserMessageMixin
|
|
10
|
+
from digitalkin.mixins.logger_mixin import LoggerMixin
|
|
11
|
+
from digitalkin.mixins.storage_mixin import StorageMixin
|
|
12
|
+
from digitalkin.models.module.module_context import ModuleContext
|
|
13
|
+
from digitalkin.models.module.module_types import InputModelT, OutputModelT
|
|
14
|
+
from digitalkin.models.services.storage import BaseMessage, ChatHistory, Role
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class ChatHistoryMixin(UserMessageMixin, StorageMixin, LoggerMixin, Generic[InputModelT, OutputModelT]):
|
|
18
|
+
"""Mixin providing chat history operations through storage strategy.
|
|
19
|
+
|
|
20
|
+
This mixin provides a higher-level API for managing chat history,
|
|
21
|
+
using the storage strategy as the underlying persistence mechanism.
|
|
22
|
+
"""
|
|
23
|
+
|
|
24
|
+
CHAT_HISTORY_COLLECTION = "chat_history"
|
|
25
|
+
CHAT_HISTORY_RECORD_ID = "full_chat_history"
|
|
26
|
+
|
|
27
|
+
def _get_history_key(self, context: ModuleContext) -> str:
|
|
28
|
+
"""Get session-specific history key.
|
|
29
|
+
|
|
30
|
+
Args:
|
|
31
|
+
context: Module context containing session information
|
|
32
|
+
|
|
33
|
+
Returns:
|
|
34
|
+
Unique history key for the current session
|
|
35
|
+
"""
|
|
36
|
+
# TODO: define mission-specific chat history key not dependant on mission_id
|
|
37
|
+
# or need customization by user
|
|
38
|
+
mission_id = getattr(context.session, "mission_id", None) or "default"
|
|
39
|
+
return f"{self.CHAT_HISTORY_RECORD_ID}_{mission_id}"
|
|
40
|
+
|
|
41
|
+
def load_chat_history(self, context: ModuleContext) -> ChatHistory:
|
|
42
|
+
"""Load chat history for the current session.
|
|
43
|
+
|
|
44
|
+
Args:
|
|
45
|
+
context: Module context containing storage strategy
|
|
46
|
+
|
|
47
|
+
Returns:
|
|
48
|
+
Chat history object, empty if none exists or loading fails
|
|
49
|
+
"""
|
|
50
|
+
history_key = self._get_history_key(context)
|
|
51
|
+
|
|
52
|
+
if (raw_history := self.read_storage(context, self.CHAT_HISTORY_COLLECTION, history_key)) is not None:
|
|
53
|
+
return ChatHistory.model_validate(raw_history.data)
|
|
54
|
+
return ChatHistory(messages=[])
|
|
55
|
+
|
|
56
|
+
def append_chat_history_message(
|
|
57
|
+
self,
|
|
58
|
+
context: ModuleContext,
|
|
59
|
+
role: Role,
|
|
60
|
+
content: Any, # noqa: ANN401
|
|
61
|
+
) -> None:
|
|
62
|
+
"""Append a message to chat history.
|
|
63
|
+
|
|
64
|
+
Args:
|
|
65
|
+
context: Module context containing storage strategy
|
|
66
|
+
role: Message role (user, assistant, system)
|
|
67
|
+
content: Message content
|
|
68
|
+
|
|
69
|
+
Raises:
|
|
70
|
+
StorageServiceError: If history update fails
|
|
71
|
+
"""
|
|
72
|
+
history_key = self._get_history_key(context)
|
|
73
|
+
chat_history = self.load_chat_history(context)
|
|
74
|
+
|
|
75
|
+
chat_history.messages.append(BaseMessage(role=role, content=content))
|
|
76
|
+
if len(chat_history.messages) == 1:
|
|
77
|
+
# Create new record
|
|
78
|
+
self.log_debug(context, f"Creating new chat history for session: {history_key}")
|
|
79
|
+
self.store_storage(
|
|
80
|
+
context,
|
|
81
|
+
self.CHAT_HISTORY_COLLECTION,
|
|
82
|
+
history_key,
|
|
83
|
+
chat_history.model_dump(),
|
|
84
|
+
data_type="OUTPUT",
|
|
85
|
+
)
|
|
86
|
+
else:
|
|
87
|
+
self.log_debug(context, f"Updating chat history for session: {history_key}")
|
|
88
|
+
self.update_storage(
|
|
89
|
+
context,
|
|
90
|
+
self.CHAT_HISTORY_COLLECTION,
|
|
91
|
+
history_key,
|
|
92
|
+
chat_history.model_dump(),
|
|
93
|
+
)
|
|
94
|
+
|
|
95
|
+
async def save_send_message(
|
|
96
|
+
self,
|
|
97
|
+
context: ModuleContext,
|
|
98
|
+
output: OutputModelT,
|
|
99
|
+
role: Role,
|
|
100
|
+
) -> None:
|
|
101
|
+
"""Save the output message to the chat history and send a response to the Module request.
|
|
102
|
+
|
|
103
|
+
Args:
|
|
104
|
+
context: Module context containing storage strategy
|
|
105
|
+
role: Message role (user, assistant, system)
|
|
106
|
+
output: Message content as Pydantic Class
|
|
107
|
+
"""
|
|
108
|
+
# TO-DO: we should define a default output message type to ease user experience
|
|
109
|
+
self.append_chat_history_message(context=context, role=role, content=output.root)
|
|
110
|
+
await self.send_message(context=context, output=output)
|
|
@@ -0,0 +1,76 @@
|
|
|
1
|
+
"""Cost Mixin to ease trigger deveolpment."""
|
|
2
|
+
|
|
3
|
+
from typing import Literal
|
|
4
|
+
|
|
5
|
+
from digitalkin.models.module.module_context import ModuleContext
|
|
6
|
+
from digitalkin.services.cost.cost_strategy import CostData
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class CostMixin:
|
|
10
|
+
"""Mixin providing cost tracking operations through the cost strategy.
|
|
11
|
+
|
|
12
|
+
This mixin wraps cost strategy calls to provide a cleaner API
|
|
13
|
+
for cost tracking in trigger handlers.
|
|
14
|
+
"""
|
|
15
|
+
|
|
16
|
+
@staticmethod
|
|
17
|
+
def add_cost(context: ModuleContext, name: str, cost_config_name: str, quantity: float) -> None:
|
|
18
|
+
"""Add a cost entry using the cost strategy.
|
|
19
|
+
|
|
20
|
+
Args:
|
|
21
|
+
context: Module context containing the cost strategy
|
|
22
|
+
name: Name/identifier for this cost entry
|
|
23
|
+
cost_config_name: Name of the cost configuration to use
|
|
24
|
+
quantity: Quantity of units consumed
|
|
25
|
+
|
|
26
|
+
Raises:
|
|
27
|
+
CostServiceError: If cost addition fails
|
|
28
|
+
"""
|
|
29
|
+
return context.cost.add(name, cost_config_name, quantity)
|
|
30
|
+
|
|
31
|
+
@staticmethod
|
|
32
|
+
def get_cost(context: ModuleContext, name: str) -> list[CostData]:
|
|
33
|
+
"""Get cost entries for a specific name.
|
|
34
|
+
|
|
35
|
+
Args:
|
|
36
|
+
context: Module context containing the cost strategy
|
|
37
|
+
name: Name/identifier to get costs for
|
|
38
|
+
|
|
39
|
+
Returns:
|
|
40
|
+
List of cost data entries
|
|
41
|
+
|
|
42
|
+
Raises:
|
|
43
|
+
CostServiceError: If cost retrieval fails
|
|
44
|
+
"""
|
|
45
|
+
return context.cost.get(name)
|
|
46
|
+
|
|
47
|
+
@staticmethod
|
|
48
|
+
def get_costs(
|
|
49
|
+
context: ModuleContext,
|
|
50
|
+
names: list[str] | None = None,
|
|
51
|
+
cost_types: list[
|
|
52
|
+
Literal[
|
|
53
|
+
"TOKEN_INPUT",
|
|
54
|
+
"TOKEN_OUTPUT",
|
|
55
|
+
"API_CALL",
|
|
56
|
+
"STORAGE",
|
|
57
|
+
"TIME",
|
|
58
|
+
"OTHER",
|
|
59
|
+
]
|
|
60
|
+
]
|
|
61
|
+
| None = None,
|
|
62
|
+
) -> list[CostData]:
|
|
63
|
+
"""Get filtered cost entries.
|
|
64
|
+
|
|
65
|
+
Args:
|
|
66
|
+
context: Module context containing the cost strategy
|
|
67
|
+
names: Optional list of names to filter by
|
|
68
|
+
cost_types: Optional list of cost types to filter by
|
|
69
|
+
|
|
70
|
+
Returns:
|
|
71
|
+
List of filtered cost data entries
|
|
72
|
+
|
|
73
|
+
Raises:
|
|
74
|
+
CostServiceError: If cost retrieval fails
|
|
75
|
+
"""
|
|
76
|
+
return context.cost.get_filtered(names, cost_types)
|
|
@@ -0,0 +1,93 @@
|
|
|
1
|
+
"""Context mixins providing ergonomic access to service strategies.
|
|
2
|
+
|
|
3
|
+
This module provides mixins that wrap service strategy calls with cleaner APIs,
|
|
4
|
+
following Django/FastAPI patterns where context is passed explicitly to each method.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from digitalkin.mixins.logger_mixin import LoggerMixin
|
|
8
|
+
from digitalkin.mixins.storage_mixin import StorageMixin
|
|
9
|
+
from digitalkin.models.module.module_context import ModuleContext
|
|
10
|
+
from digitalkin.models.services.storage import FileHistory, FileModel
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class FileHistoryMixin(StorageMixin, LoggerMixin):
|
|
14
|
+
"""Mixin providing File history operations through storage strategy.
|
|
15
|
+
|
|
16
|
+
This mixin provides a higher-level API for managing File history,
|
|
17
|
+
using the storage strategy as the underlying persistence mechanism.
|
|
18
|
+
"""
|
|
19
|
+
|
|
20
|
+
file_history_front: FileHistory = FileHistory(files=[])
|
|
21
|
+
FILE_HISTORY_COLLECTION = "file_history"
|
|
22
|
+
FILE_HISTORY_RECORD_ID = "full_file_history"
|
|
23
|
+
|
|
24
|
+
def _get_history_key(self, context: ModuleContext) -> str:
|
|
25
|
+
"""Get session-specific history key.
|
|
26
|
+
|
|
27
|
+
Args:
|
|
28
|
+
context: Module context containing session information
|
|
29
|
+
|
|
30
|
+
Returns:
|
|
31
|
+
Unique history key for the current session
|
|
32
|
+
"""
|
|
33
|
+
# TODO: define mission-specific chat history key not dependant on mission_id
|
|
34
|
+
# or need customization by user
|
|
35
|
+
mission_id = getattr(context.session, "mission_id", None) or "default"
|
|
36
|
+
return f"{self.FILE_HISTORY_RECORD_ID}_{mission_id}"
|
|
37
|
+
|
|
38
|
+
def load_file_history(self, context: ModuleContext) -> FileHistory:
|
|
39
|
+
"""Load File history for the current session.
|
|
40
|
+
|
|
41
|
+
Args:
|
|
42
|
+
context: Module context containing storage strategy
|
|
43
|
+
|
|
44
|
+
Returns:
|
|
45
|
+
File history object, empty if none exists or loading fails
|
|
46
|
+
"""
|
|
47
|
+
history_key = self._get_history_key(context)
|
|
48
|
+
|
|
49
|
+
if self.file_history_front is None:
|
|
50
|
+
try:
|
|
51
|
+
record = self.read_storage(
|
|
52
|
+
context,
|
|
53
|
+
self.FILE_HISTORY_COLLECTION,
|
|
54
|
+
history_key,
|
|
55
|
+
)
|
|
56
|
+
if record and record.data:
|
|
57
|
+
return FileHistory.model_validate(record.data)
|
|
58
|
+
except Exception as e:
|
|
59
|
+
self.log_warning(context, f"Failed to load File history: {e}")
|
|
60
|
+
return self.file_history_front
|
|
61
|
+
|
|
62
|
+
def append_files_history(self, context: ModuleContext, files: list[FileModel]) -> None:
|
|
63
|
+
"""Append a message to File history.
|
|
64
|
+
|
|
65
|
+
Args:
|
|
66
|
+
context: Module context containing storage strategy
|
|
67
|
+
files: list of files model
|
|
68
|
+
|
|
69
|
+
Raises:
|
|
70
|
+
StorageServiceError: If history update fails
|
|
71
|
+
"""
|
|
72
|
+
history_key = self._get_history_key(context)
|
|
73
|
+
file_history = self.load_file_history(context)
|
|
74
|
+
|
|
75
|
+
file_history.files.extend(files)
|
|
76
|
+
if len(file_history.files) == len(files):
|
|
77
|
+
# Create new record
|
|
78
|
+
self.log_debug(context, f"Creating new file history for session: {history_key}")
|
|
79
|
+
self.store_storage(
|
|
80
|
+
context,
|
|
81
|
+
self.FILE_HISTORY_COLLECTION,
|
|
82
|
+
history_key,
|
|
83
|
+
file_history.model_dump(),
|
|
84
|
+
data_type="OUTPUT",
|
|
85
|
+
)
|
|
86
|
+
else:
|
|
87
|
+
self.log_debug(context, f"Updating file history for session: {history_key}")
|
|
88
|
+
self.update_storage(
|
|
89
|
+
context,
|
|
90
|
+
self.FILE_HISTORY_COLLECTION,
|
|
91
|
+
history_key,
|
|
92
|
+
file_history.model_dump(),
|
|
93
|
+
)
|