digitalkin 0.2.23__py3-none-any.whl → 0.3.1.dev2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- digitalkin/__version__.py +1 -1
- digitalkin/core/__init__.py +1 -0
- digitalkin/core/common/__init__.py +9 -0
- digitalkin/core/common/factories.py +156 -0
- digitalkin/core/job_manager/__init__.py +1 -0
- digitalkin/{modules → core}/job_manager/base_job_manager.py +137 -31
- digitalkin/core/job_manager/single_job_manager.py +354 -0
- digitalkin/{modules → core}/job_manager/taskiq_broker.py +116 -22
- digitalkin/core/job_manager/taskiq_job_manager.py +541 -0
- digitalkin/core/task_manager/__init__.py +1 -0
- digitalkin/core/task_manager/base_task_manager.py +539 -0
- digitalkin/core/task_manager/local_task_manager.py +108 -0
- digitalkin/core/task_manager/remote_task_manager.py +87 -0
- digitalkin/core/task_manager/surrealdb_repository.py +266 -0
- digitalkin/core/task_manager/task_executor.py +249 -0
- digitalkin/core/task_manager/task_session.py +406 -0
- digitalkin/grpc_servers/__init__.py +1 -19
- digitalkin/grpc_servers/_base_server.py +3 -3
- digitalkin/grpc_servers/module_server.py +27 -43
- digitalkin/grpc_servers/module_servicer.py +51 -36
- digitalkin/grpc_servers/registry_server.py +2 -2
- digitalkin/grpc_servers/registry_servicer.py +4 -4
- digitalkin/grpc_servers/utils/__init__.py +1 -0
- digitalkin/grpc_servers/utils/exceptions.py +0 -8
- digitalkin/grpc_servers/utils/grpc_client_wrapper.py +4 -4
- digitalkin/grpc_servers/utils/grpc_error_handler.py +53 -0
- digitalkin/logger.py +73 -24
- digitalkin/mixins/__init__.py +19 -0
- digitalkin/mixins/base_mixin.py +10 -0
- digitalkin/mixins/callback_mixin.py +24 -0
- digitalkin/mixins/chat_history_mixin.py +110 -0
- digitalkin/mixins/cost_mixin.py +76 -0
- digitalkin/mixins/file_history_mixin.py +93 -0
- digitalkin/mixins/filesystem_mixin.py +46 -0
- digitalkin/mixins/logger_mixin.py +51 -0
- digitalkin/mixins/storage_mixin.py +79 -0
- digitalkin/models/core/__init__.py +1 -0
- digitalkin/{modules/job_manager → models/core}/job_manager_models.py +3 -3
- digitalkin/models/core/task_monitor.py +70 -0
- digitalkin/models/grpc_servers/__init__.py +1 -0
- digitalkin/{grpc_servers/utils → models/grpc_servers}/models.py +5 -5
- digitalkin/models/module/__init__.py +2 -0
- digitalkin/models/module/module.py +9 -1
- digitalkin/models/module/module_context.py +122 -6
- digitalkin/models/module/module_types.py +307 -19
- digitalkin/models/services/__init__.py +9 -0
- digitalkin/models/services/cost.py +1 -0
- digitalkin/models/services/storage.py +39 -5
- digitalkin/modules/_base_module.py +123 -118
- digitalkin/modules/tool_module.py +10 -2
- digitalkin/modules/trigger_handler.py +7 -6
- digitalkin/services/cost/__init__.py +9 -2
- digitalkin/services/cost/grpc_cost.py +9 -42
- digitalkin/services/filesystem/default_filesystem.py +0 -2
- digitalkin/services/filesystem/grpc_filesystem.py +10 -39
- digitalkin/services/setup/default_setup.py +5 -6
- digitalkin/services/setup/grpc_setup.py +52 -15
- digitalkin/services/storage/grpc_storage.py +4 -4
- digitalkin/services/user_profile/__init__.py +1 -0
- digitalkin/services/user_profile/default_user_profile.py +55 -0
- digitalkin/services/user_profile/grpc_user_profile.py +69 -0
- digitalkin/services/user_profile/user_profile_strategy.py +40 -0
- digitalkin/utils/__init__.py +28 -0
- digitalkin/utils/arg_parser.py +1 -1
- digitalkin/utils/development_mode_action.py +2 -2
- digitalkin/utils/dynamic_schema.py +483 -0
- digitalkin/utils/package_discover.py +1 -2
- {digitalkin-0.2.23.dist-info → digitalkin-0.3.1.dev2.dist-info}/METADATA +11 -30
- digitalkin-0.3.1.dev2.dist-info/RECORD +119 -0
- modules/dynamic_setup_module.py +362 -0
- digitalkin/grpc_servers/utils/factory.py +0 -180
- digitalkin/modules/job_manager/single_job_manager.py +0 -294
- digitalkin/modules/job_manager/taskiq_job_manager.py +0 -290
- digitalkin-0.2.23.dist-info/RECORD +0 -89
- /digitalkin/{grpc_servers/utils → models/grpc_servers}/types.py +0 -0
- {digitalkin-0.2.23.dist-info → digitalkin-0.3.1.dev2.dist-info}/WHEEL +0 -0
- {digitalkin-0.2.23.dist-info → digitalkin-0.3.1.dev2.dist-info}/licenses/LICENSE +0 -0
- {digitalkin-0.2.23.dist-info → digitalkin-0.3.1.dev2.dist-info}/top_level.txt +0 -0
|
@@ -5,7 +5,7 @@ from collections.abc import AsyncGenerator
|
|
|
5
5
|
from typing import Any
|
|
6
6
|
|
|
7
7
|
import grpc
|
|
8
|
-
from digitalkin_proto.
|
|
8
|
+
from digitalkin_proto.agentic_mesh_protocol.module.v1 import (
|
|
9
9
|
information_pb2,
|
|
10
10
|
lifecycle_pb2,
|
|
11
11
|
module_service_pb2_grpc,
|
|
@@ -13,12 +13,12 @@ from digitalkin_proto.digitalkin.module.v2 import (
|
|
|
13
13
|
)
|
|
14
14
|
from google.protobuf import json_format, struct_pb2
|
|
15
15
|
|
|
16
|
+
from digitalkin.core.job_manager.base_job_manager import BaseJobManager
|
|
16
17
|
from digitalkin.grpc_servers.utils.exceptions import ServicerError
|
|
17
18
|
from digitalkin.logger import logger
|
|
19
|
+
from digitalkin.models.core.job_manager_models import JobManagerMode
|
|
18
20
|
from digitalkin.models.module.module import ModuleStatus
|
|
19
21
|
from digitalkin.modules._base_module import BaseModule
|
|
20
|
-
from digitalkin.modules.job_manager.base_job_manager import BaseJobManager
|
|
21
|
-
from digitalkin.modules.job_manager.job_manager_models import JobManagerMode
|
|
22
22
|
from digitalkin.services.services_models import ServicesMode
|
|
23
23
|
from digitalkin.services.setup.default_setup import DefaultSetup
|
|
24
24
|
from digitalkin.services.setup.grpc_setup import GrpcSetup
|
|
@@ -76,9 +76,9 @@ class ModuleServicer(module_service_pb2_grpc.ModuleServiceServicer, ArgParser):
|
|
|
76
76
|
self.job_manager = job_manager_class(module_class, self.args.services_mode)
|
|
77
77
|
|
|
78
78
|
logger.debug(
|
|
79
|
-
"ModuleServicer initialized with job manager: %s
|
|
79
|
+
"ModuleServicer initialized with job manager: %s",
|
|
80
80
|
self.args.job_manager_mode,
|
|
81
|
-
self.job_manager,
|
|
81
|
+
extra={"job_manager": self.job_manager},
|
|
82
82
|
)
|
|
83
83
|
self.setup = GrpcSetup() if self.args.services_mode == ServicesMode.REMOTE else DefaultSetup()
|
|
84
84
|
|
|
@@ -112,7 +112,7 @@ class ModuleServicer(module_service_pb2_grpc.ModuleServiceServicer, ArgParser):
|
|
|
112
112
|
# TODO: Secret should be used here as well
|
|
113
113
|
setup_version = request.setup_version
|
|
114
114
|
config_setup_data = self.module_class.create_config_setup_model(json_format.MessageToDict(request.content))
|
|
115
|
-
setup_version_data = self.module_class.create_setup_model(
|
|
115
|
+
setup_version_data = await self.module_class.create_setup_model(
|
|
116
116
|
json_format.MessageToDict(request.setup_version.content),
|
|
117
117
|
config_fields=True,
|
|
118
118
|
)
|
|
@@ -172,7 +172,8 @@ class ModuleServicer(module_service_pb2_grpc.ModuleServiceServicer, ArgParser):
|
|
|
172
172
|
)
|
|
173
173
|
# Process the module input
|
|
174
174
|
# TODO: Check failure of input data format
|
|
175
|
-
input_data = self.module_class.create_input_model(
|
|
175
|
+
input_data = self.module_class.create_input_model(json_format.MessageToDict(request.input))
|
|
176
|
+
|
|
176
177
|
setup_data_class = self.setup.get_setup(
|
|
177
178
|
setup_dict={
|
|
178
179
|
"setup_id": request.setup_id,
|
|
@@ -184,7 +185,7 @@ class ModuleServicer(module_service_pb2_grpc.ModuleServiceServicer, ArgParser):
|
|
|
184
185
|
msg = "No setup data returned."
|
|
185
186
|
raise ServicerError(msg)
|
|
186
187
|
|
|
187
|
-
setup_data = self.module_class.create_setup_model(setup_data_class.current_setup_version.content)
|
|
188
|
+
setup_data = await self.module_class.create_setup_model(setup_data_class.current_setup_version.content)
|
|
188
189
|
|
|
189
190
|
# create a task to run the module in background
|
|
190
191
|
job_id = await self.job_manager.create_module_instance_job(
|
|
@@ -201,27 +202,37 @@ class ModuleServicer(module_service_pb2_grpc.ModuleServiceServicer, ArgParser):
|
|
|
201
202
|
yield lifecycle_pb2.StartModuleResponse(success=False)
|
|
202
203
|
return
|
|
203
204
|
|
|
204
|
-
|
|
205
|
-
async
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
205
|
+
try:
|
|
206
|
+
async with self.job_manager.generate_stream_consumer(job_id) as stream: # type: ignore
|
|
207
|
+
async for message in stream:
|
|
208
|
+
if message.get("error", None) is not None:
|
|
209
|
+
logger.error("Error in output_data", extra={"message": message})
|
|
210
|
+
context.set_code(message["error"]["code"])
|
|
211
|
+
context.set_details(message["error"]["error_message"])
|
|
212
|
+
yield lifecycle_pb2.StartModuleResponse(success=False, job_id=job_id)
|
|
213
|
+
break
|
|
214
|
+
|
|
215
|
+
if message.get("exception", None) is not None:
|
|
216
|
+
logger.error("Exception in output_data", extra={"message": message})
|
|
217
|
+
context.set_code(message["short_description"])
|
|
218
|
+
context.set_details(message["exception"])
|
|
219
|
+
yield lifecycle_pb2.StartModuleResponse(success=False, job_id=job_id)
|
|
220
|
+
break
|
|
221
|
+
|
|
222
|
+
if message.get("code", None) is not None and message.get("code") == "__END_OF_STREAM__":
|
|
223
|
+
logger.info(
|
|
224
|
+
"End of stream via __END_OF_STREAM__",
|
|
225
|
+
extra={"job_id": job_id, "mission_id": request.mission_id},
|
|
226
|
+
)
|
|
227
|
+
break
|
|
228
|
+
|
|
229
|
+
logger.info("Yielding message from job %s: %s", job_id, message)
|
|
230
|
+
proto = json_format.ParseDict(message, struct_pb2.Struct(), ignore_unknown_fields=True)
|
|
231
|
+
yield lifecycle_pb2.StartModuleResponse(success=True, output=proto, job_id=job_id)
|
|
232
|
+
finally:
|
|
233
|
+
await self.job_manager.wait_for_completion(job_id)
|
|
234
|
+
await self.job_manager.clean_session(job_id, mission_id=request.mission_id)
|
|
235
|
+
|
|
225
236
|
logger.info("Job %s finished", job_id)
|
|
226
237
|
|
|
227
238
|
async def StopModule( # noqa: N802
|
|
@@ -248,7 +259,7 @@ class ModuleServicer(module_service_pb2_grpc.ModuleServiceServicer, ArgParser):
|
|
|
248
259
|
context.set_details(message)
|
|
249
260
|
return lifecycle_pb2.StopModuleResponse(success=False)
|
|
250
261
|
|
|
251
|
-
logger.debug("Job %s stopped successfully", request.job_id)
|
|
262
|
+
logger.debug("Job %s stopped successfully", request.job_id, extra={"job_id": request.job_id})
|
|
252
263
|
return lifecycle_pb2.StopModuleResponse(success=True)
|
|
253
264
|
|
|
254
265
|
async def GetModuleStatus( # noqa: N802
|
|
@@ -339,7 +350,9 @@ class ModuleServicer(module_service_pb2_grpc.ModuleServiceServicer, ArgParser):
|
|
|
339
350
|
# Get input schema if available
|
|
340
351
|
try:
|
|
341
352
|
# Convert schema to proto format
|
|
342
|
-
input_schema_proto = self.module_class.get_input_format(
|
|
353
|
+
input_schema_proto = await self.module_class.get_input_format(
|
|
354
|
+
llm_format=request.llm_format,
|
|
355
|
+
)
|
|
343
356
|
input_format_struct = json_format.Parse(
|
|
344
357
|
text=input_schema_proto,
|
|
345
358
|
message=struct_pb2.Struct(), # pylint: disable=no-member
|
|
@@ -375,7 +388,9 @@ class ModuleServicer(module_service_pb2_grpc.ModuleServiceServicer, ArgParser):
|
|
|
375
388
|
# Get output schema if available
|
|
376
389
|
try:
|
|
377
390
|
# Convert schema to proto format
|
|
378
|
-
output_schema_proto = self.module_class.get_output_format(
|
|
391
|
+
output_schema_proto = await self.module_class.get_output_format(
|
|
392
|
+
llm_format=request.llm_format,
|
|
393
|
+
)
|
|
379
394
|
output_format_struct = json_format.Parse(
|
|
380
395
|
text=output_schema_proto,
|
|
381
396
|
message=struct_pb2.Struct(), # pylint: disable=no-member
|
|
@@ -411,7 +426,7 @@ class ModuleServicer(module_service_pb2_grpc.ModuleServiceServicer, ArgParser):
|
|
|
411
426
|
# Get setup schema if available
|
|
412
427
|
try:
|
|
413
428
|
# Convert schema to proto format
|
|
414
|
-
setup_schema_proto = self.module_class.get_setup_format(llm_format=request.llm_format)
|
|
429
|
+
setup_schema_proto = await self.module_class.get_setup_format(llm_format=request.llm_format)
|
|
415
430
|
setup_format_struct = json_format.Parse(
|
|
416
431
|
text=setup_schema_proto,
|
|
417
432
|
message=struct_pb2.Struct(), # pylint: disable=no-member
|
|
@@ -428,7 +443,7 @@ class ModuleServicer(module_service_pb2_grpc.ModuleServiceServicer, ArgParser):
|
|
|
428
443
|
setup_schema=setup_format_struct,
|
|
429
444
|
)
|
|
430
445
|
|
|
431
|
-
def GetModuleSecret( # noqa: N802
|
|
446
|
+
async def GetModuleSecret( # noqa: N802
|
|
432
447
|
self,
|
|
433
448
|
request: information_pb2.GetModuleSecretRequest,
|
|
434
449
|
context: grpc.ServicerContext,
|
|
@@ -447,7 +462,7 @@ class ModuleServicer(module_service_pb2_grpc.ModuleServiceServicer, ArgParser):
|
|
|
447
462
|
# Get secret schema if available
|
|
448
463
|
try:
|
|
449
464
|
# Convert schema to proto format
|
|
450
|
-
secret_schema_proto = self.module_class.get_secret_format(llm_format=request.llm_format)
|
|
465
|
+
secret_schema_proto = await self.module_class.get_secret_format(llm_format=request.llm_format)
|
|
451
466
|
secret_format_struct = json_format.Parse(
|
|
452
467
|
text=secret_schema_proto,
|
|
453
468
|
message=struct_pb2.Struct(), # pylint: disable=no-member
|
|
@@ -483,7 +498,7 @@ class ModuleServicer(module_service_pb2_grpc.ModuleServiceServicer, ArgParser):
|
|
|
483
498
|
# Get setup schema if available
|
|
484
499
|
try:
|
|
485
500
|
# Convert schema to proto format
|
|
486
|
-
config_setup_schema_proto = self.module_class.get_config_setup_format(llm_format=request.llm_format)
|
|
501
|
+
config_setup_schema_proto = await self.module_class.get_config_setup_format(llm_format=request.llm_format)
|
|
487
502
|
config_setup_format_struct = json_format.Parse(
|
|
488
503
|
text=config_setup_schema_proto,
|
|
489
504
|
message=struct_pb2.Struct(), # pylint: disable=no-member
|
|
@@ -1,14 +1,14 @@
|
|
|
1
1
|
"""Registry gRPC server implementation for DigitalKin."""
|
|
2
2
|
|
|
3
|
-
from digitalkin_proto.
|
|
3
|
+
from digitalkin_proto.agentic_mesh_protocol.module_registry.v1 import (
|
|
4
4
|
module_registry_service_pb2,
|
|
5
5
|
module_registry_service_pb2_grpc,
|
|
6
6
|
)
|
|
7
7
|
|
|
8
8
|
from digitalkin.grpc_servers._base_server import BaseServer
|
|
9
9
|
from digitalkin.grpc_servers.registry_servicer import RegistryModule, RegistryServicer
|
|
10
|
-
from digitalkin.grpc_servers.utils.models import RegistryServerConfig
|
|
11
10
|
from digitalkin.logger import logger
|
|
11
|
+
from digitalkin.models.grpc_servers.models import RegistryServerConfig
|
|
12
12
|
|
|
13
13
|
|
|
14
14
|
class RegistryServer(BaseServer):
|
|
@@ -9,7 +9,7 @@ from collections.abc import Iterator
|
|
|
9
9
|
from enum import Enum
|
|
10
10
|
|
|
11
11
|
import grpc
|
|
12
|
-
from digitalkin_proto.
|
|
12
|
+
from digitalkin_proto.agentic_mesh_protocol.module_registry.v1 import (
|
|
13
13
|
discover_pb2,
|
|
14
14
|
metadata_pb2,
|
|
15
15
|
module_registry_service_pb2_grpc,
|
|
@@ -344,7 +344,7 @@ class RegistryServicer(module_registry_service_pb2_grpc.ModuleRegistryServiceSer
|
|
|
344
344
|
return status_pb2.ModuleStatusResponse()
|
|
345
345
|
|
|
346
346
|
module = self.registered_modules[request.module_id]
|
|
347
|
-
return status_pb2.ModuleStatusResponse(module_id=module.module_id, status=module.status.
|
|
347
|
+
return status_pb2.ModuleStatusResponse(module_id=module.module_id, status=module.status.value)
|
|
348
348
|
|
|
349
349
|
def ListModuleStatus( # noqa: N802
|
|
350
350
|
self,
|
|
@@ -379,7 +379,7 @@ class RegistryServicer(module_registry_service_pb2_grpc.ModuleRegistryServiceSer
|
|
|
379
379
|
list_size = len(self.registered_modules)
|
|
380
380
|
|
|
381
381
|
modules_statuses = [
|
|
382
|
-
status_pb2.ModuleStatusResponse(module_id=module.module_id, status=module.status.
|
|
382
|
+
status_pb2.ModuleStatusResponse(module_id=module.module_id, status=module.status.value)
|
|
383
383
|
for module in list(self.registered_modules.values())[request.offset : request.offset + list_size]
|
|
384
384
|
]
|
|
385
385
|
|
|
@@ -409,7 +409,7 @@ class RegistryServicer(module_registry_service_pb2_grpc.ModuleRegistryServiceSer
|
|
|
409
409
|
for module in self.registered_modules.values():
|
|
410
410
|
yield status_pb2.ModuleStatusResponse(
|
|
411
411
|
module_id=module.module_id,
|
|
412
|
-
status=module.status.
|
|
412
|
+
status=module.status.value,
|
|
413
413
|
)
|
|
414
414
|
|
|
415
415
|
def UpdateModuleStatus( # noqa: N802
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
"""gRPC servers utilities package."""
|
|
@@ -27,11 +27,3 @@ class ServerStateError(ServerError):
|
|
|
27
27
|
|
|
28
28
|
class ReflectionError(ServerError):
|
|
29
29
|
"""Error related to gRPC reflection service."""
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
class HealthCheckError(ServerError):
|
|
33
|
-
"""Error related to gRPC health check service."""
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
class OptionalFeatureNotImplementedError(NotImplementedError):
|
|
37
|
-
"""Raised when an optional feature is not implemented, but was requested."""
|
|
@@ -6,8 +6,8 @@ from typing import Any
|
|
|
6
6
|
import grpc
|
|
7
7
|
|
|
8
8
|
from digitalkin.grpc_servers.utils.exceptions import ServerError
|
|
9
|
-
from digitalkin.grpc_servers.utils.models import ClientConfig, SecurityMode
|
|
10
9
|
from digitalkin.logger import logger
|
|
10
|
+
from digitalkin.models.grpc_servers.models import ClientConfig, SecurityMode
|
|
11
11
|
|
|
12
12
|
|
|
13
13
|
class GrpcClientWrapper:
|
|
@@ -62,11 +62,11 @@ class GrpcClientWrapper:
|
|
|
62
62
|
"""
|
|
63
63
|
try:
|
|
64
64
|
# Call the register method
|
|
65
|
-
logger.debug("send request to %s", query_endpoint)
|
|
65
|
+
logger.debug("send request to %s", query_endpoint, extra={"request": request})
|
|
66
66
|
response = getattr(self.stub, query_endpoint)(request)
|
|
67
|
-
logger.debug("receive response from request to
|
|
67
|
+
logger.debug("receive response from request to %s", query_endpoint, extra={"response": response})
|
|
68
68
|
except grpc.RpcError as e:
|
|
69
|
-
logger.exception("RPC error during %s
|
|
69
|
+
logger.exception("RPC error during %s", query_endpoint, extra={"error": e.details()})
|
|
70
70
|
raise ServerError
|
|
71
71
|
else:
|
|
72
72
|
return response
|
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
"""Shared error handling utilities for gRPC services."""
|
|
2
|
+
|
|
3
|
+
from collections.abc import Generator
|
|
4
|
+
from contextlib import contextmanager
|
|
5
|
+
from typing import Any
|
|
6
|
+
|
|
7
|
+
from digitalkin.grpc_servers.utils.exceptions import ServerError
|
|
8
|
+
from digitalkin.logger import logger
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class GrpcErrorHandlerMixin:
|
|
12
|
+
"""Mixin class providing common gRPC error handling functionality."""
|
|
13
|
+
|
|
14
|
+
@contextmanager
|
|
15
|
+
def handle_grpc_errors( # noqa: PLR6301
|
|
16
|
+
self,
|
|
17
|
+
operation: str,
|
|
18
|
+
service_error_class: type[Exception] | None = None,
|
|
19
|
+
) -> Generator[Any, Any, Any]:
|
|
20
|
+
"""Handle gRPC errors for the given operation.
|
|
21
|
+
|
|
22
|
+
Args:
|
|
23
|
+
operation: Name of the operation being performed.
|
|
24
|
+
service_error_class: Optional specific service exception class to raise.
|
|
25
|
+
If not provided, uses the generic ServerError.
|
|
26
|
+
|
|
27
|
+
Yields:
|
|
28
|
+
Context for the operation.
|
|
29
|
+
|
|
30
|
+
Raises:
|
|
31
|
+
ServerError: For gRPC-related errors.
|
|
32
|
+
service_error_class: For service-specific errors if provided.
|
|
33
|
+
"""
|
|
34
|
+
if service_error_class is None:
|
|
35
|
+
service_error_class = ServerError
|
|
36
|
+
|
|
37
|
+
try:
|
|
38
|
+
yield
|
|
39
|
+
except service_error_class as e:
|
|
40
|
+
# Re-raise service-specific errors as-is
|
|
41
|
+
msg = f"{service_error_class.__name__} in {operation}: {e}"
|
|
42
|
+
logger.exception(msg)
|
|
43
|
+
raise service_error_class(msg) from e
|
|
44
|
+
except ServerError as e:
|
|
45
|
+
# Handle gRPC server errors
|
|
46
|
+
msg = f"gRPC {operation} failed: {e}"
|
|
47
|
+
logger.exception(msg)
|
|
48
|
+
raise ServerError(msg) from e
|
|
49
|
+
except Exception as e:
|
|
50
|
+
# Handle unexpected errors
|
|
51
|
+
msg = f"Unexpected error in {operation}: {e}"
|
|
52
|
+
logger.exception(msg)
|
|
53
|
+
raise service_error_class(msg) from e
|
digitalkin/logger.py
CHANGED
|
@@ -11,6 +11,15 @@ from typing import Any, ClassVar
|
|
|
11
11
|
class ColorJSONFormatter(logging.Formatter):
|
|
12
12
|
"""Color JSON formatter for development (pretty-printed with colors)."""
|
|
13
13
|
|
|
14
|
+
def __init__(self, *, is_production: bool = False) -> None:
|
|
15
|
+
"""Initialize the formatter.
|
|
16
|
+
|
|
17
|
+
Args:
|
|
18
|
+
is_production: Whether the application is running in production.
|
|
19
|
+
"""
|
|
20
|
+
self.is_production = is_production
|
|
21
|
+
super().__init__()
|
|
22
|
+
|
|
14
23
|
grey = "\x1b[38;20m"
|
|
15
24
|
green = "\x1b[32;20m"
|
|
16
25
|
blue = "\x1b[34;20m"
|
|
@@ -39,12 +48,10 @@ class ColorJSONFormatter(logging.Formatter):
|
|
|
39
48
|
log_obj: dict[str, Any] = {
|
|
40
49
|
"timestamp": datetime.fromtimestamp(record.created, tz=timezone.utc).isoformat(),
|
|
41
50
|
"level": record.levelname.lower(),
|
|
42
|
-
"logger": record.name,
|
|
43
51
|
"message": record.getMessage(),
|
|
44
|
-
"
|
|
45
|
-
"
|
|
52
|
+
"module": record.module,
|
|
53
|
+
"location": f"{record.pathname}:{record.lineno}:{record.funcName}",
|
|
46
54
|
}
|
|
47
|
-
|
|
48
55
|
# Add exception info if present
|
|
49
56
|
if record.exc_info:
|
|
50
57
|
log_obj["exception"] = self.formatException(record.exc_info)
|
|
@@ -81,28 +88,70 @@ class ColorJSONFormatter(logging.Formatter):
|
|
|
81
88
|
|
|
82
89
|
# Pretty print with color
|
|
83
90
|
color = self.COLORS.get(record.levelno, self.grey)
|
|
91
|
+
if self.is_production:
|
|
92
|
+
log_obj["message"] = f"{color}{log_obj.get('message', '')}{self.reset}"
|
|
93
|
+
return json.dumps(log_obj, default=str, separators=(",", ":"))
|
|
84
94
|
json_str = json.dumps(log_obj, indent=2, default=str)
|
|
85
|
-
|
|
86
|
-
json_str.replace("\\n", "\n")
|
|
95
|
+
json_str = json_str.replace("\\n", "\n")
|
|
87
96
|
return f"{color}{json_str}{self.reset}"
|
|
88
97
|
|
|
89
98
|
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
99
|
+
def setup_logger(
|
|
100
|
+
name: str,
|
|
101
|
+
level: int = logging.INFO,
|
|
102
|
+
additional_loggers: dict[str, int] | None = None,
|
|
103
|
+
*,
|
|
104
|
+
is_production: bool | None = None,
|
|
105
|
+
configure_root: bool = True,
|
|
106
|
+
) -> logging.Logger:
|
|
107
|
+
"""Set up a logger with the ColorJSONFormatter.
|
|
108
|
+
|
|
109
|
+
Args:
|
|
110
|
+
name: Name of the logger to create
|
|
111
|
+
level: Logging level (default: logging.INFO)
|
|
112
|
+
is_production: Whether running in production. If None, checks RAILWAY_SERVICE_NAME env var
|
|
113
|
+
configure_root: Whether to configure root logger (default: True)
|
|
114
|
+
additional_loggers: Dict of additional logger names and their levels to configure
|
|
115
|
+
|
|
116
|
+
Returns:
|
|
117
|
+
logging.Logger: Configured logger instance
|
|
118
|
+
"""
|
|
119
|
+
# Determine if we're in production
|
|
120
|
+
if is_production is None:
|
|
121
|
+
is_production = os.getenv("RAILWAY_SERVICE_NAME") is not None
|
|
122
|
+
|
|
123
|
+
# Configure root logger if requested
|
|
124
|
+
if configure_root:
|
|
125
|
+
logging.basicConfig(
|
|
126
|
+
level=logging.DEBUG,
|
|
127
|
+
stream=sys.stdout,
|
|
128
|
+
datefmt="%Y-%m-%d %H:%M:%S",
|
|
129
|
+
)
|
|
130
|
+
|
|
131
|
+
# Configure additional loggers
|
|
132
|
+
if additional_loggers:
|
|
133
|
+
for logger_name, logger_level in additional_loggers.items():
|
|
134
|
+
logging.getLogger(logger_name).setLevel(logger_level)
|
|
135
|
+
|
|
136
|
+
# Create and configure the main logger
|
|
137
|
+
logger = logging.getLogger(name)
|
|
138
|
+
logger.setLevel(level)
|
|
139
|
+
# Only add handler if not already configured
|
|
140
|
+
if not logger.handlers:
|
|
141
|
+
ch = logging.StreamHandler()
|
|
142
|
+
ch.setLevel(level)
|
|
143
|
+
ch.setFormatter(ColorJSONFormatter(is_production=is_production))
|
|
144
|
+
logger.addHandler(ch)
|
|
145
|
+
logger.propagate = False
|
|
146
|
+
|
|
147
|
+
return logger
|
|
148
|
+
|
|
149
|
+
|
|
150
|
+
logger = setup_logger(
|
|
151
|
+
"digitalkin",
|
|
152
|
+
level=logging.INFO,
|
|
153
|
+
additional_loggers={
|
|
154
|
+
"grpc": logging.DEBUG,
|
|
155
|
+
"asyncio": logging.DEBUG,
|
|
156
|
+
},
|
|
94
157
|
)
|
|
95
|
-
|
|
96
|
-
logging.getLogger("grpc").setLevel(logging.DEBUG)
|
|
97
|
-
logging.getLogger("asyncio").setLevel(logging.DEBUG)
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
logger = logging.getLogger("digitalkin")
|
|
101
|
-
|
|
102
|
-
if not logger.handlers:
|
|
103
|
-
ch = logging.StreamHandler()
|
|
104
|
-
ch.setLevel(logging.INFO)
|
|
105
|
-
ch.setFormatter(ColorJSONFormatter())
|
|
106
|
-
|
|
107
|
-
logger.addHandler(ch)
|
|
108
|
-
logger.propagate = False
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
"""Mixin definitions."""
|
|
2
|
+
|
|
3
|
+
from digitalkin.mixins.base_mixin import BaseMixin
|
|
4
|
+
from digitalkin.mixins.callback_mixin import UserMessageMixin
|
|
5
|
+
from digitalkin.mixins.chat_history_mixin import ChatHistoryMixin
|
|
6
|
+
from digitalkin.mixins.cost_mixin import CostMixin
|
|
7
|
+
from digitalkin.mixins.filesystem_mixin import FilesystemMixin
|
|
8
|
+
from digitalkin.mixins.logger_mixin import LoggerMixin
|
|
9
|
+
from digitalkin.mixins.storage_mixin import StorageMixin
|
|
10
|
+
|
|
11
|
+
__all__ = [
|
|
12
|
+
"BaseMixin",
|
|
13
|
+
"ChatHistoryMixin",
|
|
14
|
+
"CostMixin",
|
|
15
|
+
"FilesystemMixin",
|
|
16
|
+
"LoggerMixin",
|
|
17
|
+
"StorageMixin",
|
|
18
|
+
"UserMessageMixin",
|
|
19
|
+
]
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
"""Simple toolkit class with basic and simple API access in the Triggers."""
|
|
2
|
+
|
|
3
|
+
from digitalkin.mixins.chat_history_mixin import ChatHistoryMixin
|
|
4
|
+
from digitalkin.mixins.cost_mixin import CostMixin
|
|
5
|
+
from digitalkin.mixins.file_history_mixin import FileHistoryMixin
|
|
6
|
+
from digitalkin.mixins.logger_mixin import LoggerMixin
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class BaseMixin(CostMixin, ChatHistoryMixin, FileHistoryMixin, LoggerMixin):
|
|
10
|
+
"""Base Mixin to access to minimum Module Context functionnalities in the Triggers."""
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
"""User callback to send a message from the Trigger."""
|
|
2
|
+
|
|
3
|
+
from typing import Generic
|
|
4
|
+
|
|
5
|
+
from digitalkin.models.module.module_context import ModuleContext
|
|
6
|
+
from digitalkin.models.module.module_types import OutputModelT
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class UserMessageMixin(Generic[OutputModelT]):
|
|
10
|
+
"""Mixin providing callback operations through the callbacks .
|
|
11
|
+
|
|
12
|
+
This mixin wraps callback strategy calls to provide a cleaner API
|
|
13
|
+
for direct messaging in trigger handlers.
|
|
14
|
+
"""
|
|
15
|
+
|
|
16
|
+
@staticmethod
|
|
17
|
+
async def send_message(context: ModuleContext, output: OutputModelT) -> None:
|
|
18
|
+
"""Send a message using the callbacks strategy.
|
|
19
|
+
|
|
20
|
+
Args:
|
|
21
|
+
context: Module context containing the callbacks strategy.
|
|
22
|
+
output: Message to send with the Module defined output Type.
|
|
23
|
+
"""
|
|
24
|
+
await context.callbacks.send_message(output)
|
|
@@ -0,0 +1,110 @@
|
|
|
1
|
+
"""Context mixins providing ergonomic access to service strategies.
|
|
2
|
+
|
|
3
|
+
This module provides mixins that wrap service strategy calls with cleaner APIs,
|
|
4
|
+
following Django/FastAPI patterns where context is passed explicitly to each method.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from typing import Any, Generic
|
|
8
|
+
|
|
9
|
+
from digitalkin.mixins.callback_mixin import UserMessageMixin
|
|
10
|
+
from digitalkin.mixins.logger_mixin import LoggerMixin
|
|
11
|
+
from digitalkin.mixins.storage_mixin import StorageMixin
|
|
12
|
+
from digitalkin.models.module.module_context import ModuleContext
|
|
13
|
+
from digitalkin.models.module.module_types import InputModelT, OutputModelT
|
|
14
|
+
from digitalkin.models.services.storage import BaseMessage, ChatHistory, Role
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class ChatHistoryMixin(UserMessageMixin, StorageMixin, LoggerMixin, Generic[InputModelT, OutputModelT]):
|
|
18
|
+
"""Mixin providing chat history operations through storage strategy.
|
|
19
|
+
|
|
20
|
+
This mixin provides a higher-level API for managing chat history,
|
|
21
|
+
using the storage strategy as the underlying persistence mechanism.
|
|
22
|
+
"""
|
|
23
|
+
|
|
24
|
+
CHAT_HISTORY_COLLECTION = "chat_history"
|
|
25
|
+
CHAT_HISTORY_RECORD_ID = "full_chat_history"
|
|
26
|
+
|
|
27
|
+
def _get_history_key(self, context: ModuleContext) -> str:
|
|
28
|
+
"""Get session-specific history key.
|
|
29
|
+
|
|
30
|
+
Args:
|
|
31
|
+
context: Module context containing session information
|
|
32
|
+
|
|
33
|
+
Returns:
|
|
34
|
+
Unique history key for the current session
|
|
35
|
+
"""
|
|
36
|
+
# TODO: define mission-specific chat history key not dependant on mission_id
|
|
37
|
+
# or need customization by user
|
|
38
|
+
mission_id = getattr(context.session, "mission_id", None) or "default"
|
|
39
|
+
return f"{self.CHAT_HISTORY_RECORD_ID}_{mission_id}"
|
|
40
|
+
|
|
41
|
+
def load_chat_history(self, context: ModuleContext) -> ChatHistory:
|
|
42
|
+
"""Load chat history for the current session.
|
|
43
|
+
|
|
44
|
+
Args:
|
|
45
|
+
context: Module context containing storage strategy
|
|
46
|
+
|
|
47
|
+
Returns:
|
|
48
|
+
Chat history object, empty if none exists or loading fails
|
|
49
|
+
"""
|
|
50
|
+
history_key = self._get_history_key(context)
|
|
51
|
+
|
|
52
|
+
if (raw_history := self.read_storage(context, self.CHAT_HISTORY_COLLECTION, history_key)) is not None:
|
|
53
|
+
return ChatHistory.model_validate(raw_history.data)
|
|
54
|
+
return ChatHistory(messages=[])
|
|
55
|
+
|
|
56
|
+
def append_chat_history_message(
|
|
57
|
+
self,
|
|
58
|
+
context: ModuleContext,
|
|
59
|
+
role: Role,
|
|
60
|
+
content: Any, # noqa: ANN401
|
|
61
|
+
) -> None:
|
|
62
|
+
"""Append a message to chat history.
|
|
63
|
+
|
|
64
|
+
Args:
|
|
65
|
+
context: Module context containing storage strategy
|
|
66
|
+
role: Message role (user, assistant, system)
|
|
67
|
+
content: Message content
|
|
68
|
+
|
|
69
|
+
Raises:
|
|
70
|
+
StorageServiceError: If history update fails
|
|
71
|
+
"""
|
|
72
|
+
history_key = self._get_history_key(context)
|
|
73
|
+
chat_history = self.load_chat_history(context)
|
|
74
|
+
|
|
75
|
+
chat_history.messages.append(BaseMessage(role=role, content=content))
|
|
76
|
+
if len(chat_history.messages) == 1:
|
|
77
|
+
# Create new record
|
|
78
|
+
self.log_debug(context, f"Creating new chat history for session: {history_key}")
|
|
79
|
+
self.store_storage(
|
|
80
|
+
context,
|
|
81
|
+
self.CHAT_HISTORY_COLLECTION,
|
|
82
|
+
history_key,
|
|
83
|
+
chat_history.model_dump(),
|
|
84
|
+
data_type="OUTPUT",
|
|
85
|
+
)
|
|
86
|
+
else:
|
|
87
|
+
self.log_debug(context, f"Updating chat history for session: {history_key}")
|
|
88
|
+
self.update_storage(
|
|
89
|
+
context,
|
|
90
|
+
self.CHAT_HISTORY_COLLECTION,
|
|
91
|
+
history_key,
|
|
92
|
+
chat_history.model_dump(),
|
|
93
|
+
)
|
|
94
|
+
|
|
95
|
+
async def save_send_message(
|
|
96
|
+
self,
|
|
97
|
+
context: ModuleContext,
|
|
98
|
+
output: OutputModelT,
|
|
99
|
+
role: Role,
|
|
100
|
+
) -> None:
|
|
101
|
+
"""Save the output message to the chat history and send a response to the Module request.
|
|
102
|
+
|
|
103
|
+
Args:
|
|
104
|
+
context: Module context containing storage strategy
|
|
105
|
+
role: Message role (user, assistant, system)
|
|
106
|
+
output: Message content as Pydantic Class
|
|
107
|
+
"""
|
|
108
|
+
# TO-DO: we should define a default output message type to ease user experience
|
|
109
|
+
self.append_chat_history_message(context=context, role=role, content=output.root)
|
|
110
|
+
await self.send_message(context=context, output=output)
|