digitalkin 0.3.1.dev2__py3-none-any.whl → 0.3.2.dev14__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (79) hide show
  1. base_server/server_async_insecure.py +6 -5
  2. base_server/server_async_secure.py +6 -5
  3. base_server/server_sync_insecure.py +5 -4
  4. base_server/server_sync_secure.py +5 -4
  5. digitalkin/__version__.py +1 -1
  6. digitalkin/core/job_manager/base_job_manager.py +1 -1
  7. digitalkin/core/job_manager/single_job_manager.py +28 -9
  8. digitalkin/core/job_manager/taskiq_broker.py +7 -6
  9. digitalkin/core/job_manager/taskiq_job_manager.py +1 -1
  10. digitalkin/core/task_manager/surrealdb_repository.py +7 -7
  11. digitalkin/core/task_manager/task_session.py +60 -98
  12. digitalkin/grpc_servers/module_server.py +109 -168
  13. digitalkin/grpc_servers/module_servicer.py +38 -16
  14. digitalkin/grpc_servers/utils/grpc_client_wrapper.py +24 -8
  15. digitalkin/grpc_servers/utils/utility_schema_extender.py +100 -0
  16. digitalkin/models/__init__.py +1 -1
  17. digitalkin/models/core/job_manager_models.py +0 -8
  18. digitalkin/models/core/task_monitor.py +4 -0
  19. digitalkin/models/grpc_servers/models.py +91 -6
  20. digitalkin/models/module/__init__.py +18 -13
  21. digitalkin/models/module/base_types.py +61 -0
  22. digitalkin/models/module/module_context.py +173 -13
  23. digitalkin/models/module/module_types.py +28 -392
  24. digitalkin/models/module/setup_types.py +490 -0
  25. digitalkin/models/module/tool_cache.py +68 -0
  26. digitalkin/models/module/tool_reference.py +117 -0
  27. digitalkin/models/module/utility.py +167 -0
  28. digitalkin/models/services/registry.py +35 -0
  29. digitalkin/modules/__init__.py +5 -1
  30. digitalkin/modules/_base_module.py +154 -61
  31. digitalkin/modules/archetype_module.py +6 -1
  32. digitalkin/modules/tool_module.py +6 -1
  33. digitalkin/modules/triggers/__init__.py +8 -0
  34. digitalkin/modules/triggers/healthcheck_ping_trigger.py +45 -0
  35. digitalkin/modules/triggers/healthcheck_services_trigger.py +63 -0
  36. digitalkin/modules/triggers/healthcheck_status_trigger.py +52 -0
  37. digitalkin/services/__init__.py +4 -0
  38. digitalkin/services/communication/__init__.py +7 -0
  39. digitalkin/services/communication/communication_strategy.py +76 -0
  40. digitalkin/services/communication/default_communication.py +101 -0
  41. digitalkin/services/communication/grpc_communication.py +234 -0
  42. digitalkin/services/cost/grpc_cost.py +1 -1
  43. digitalkin/services/filesystem/grpc_filesystem.py +1 -1
  44. digitalkin/services/registry/__init__.py +22 -1
  45. digitalkin/services/registry/default_registry.py +135 -4
  46. digitalkin/services/registry/exceptions.py +47 -0
  47. digitalkin/services/registry/grpc_registry.py +306 -0
  48. digitalkin/services/registry/registry_models.py +15 -0
  49. digitalkin/services/registry/registry_strategy.py +88 -4
  50. digitalkin/services/services_config.py +25 -3
  51. digitalkin/services/services_models.py +5 -1
  52. digitalkin/services/setup/default_setup.py +1 -1
  53. digitalkin/services/setup/grpc_setup.py +1 -1
  54. digitalkin/services/storage/grpc_storage.py +1 -1
  55. digitalkin/services/user_profile/__init__.py +11 -0
  56. digitalkin/services/user_profile/grpc_user_profile.py +2 -2
  57. digitalkin/services/user_profile/user_profile_strategy.py +0 -15
  58. digitalkin/utils/schema_splitter.py +207 -0
  59. {digitalkin-0.3.1.dev2.dist-info → digitalkin-0.3.2.dev14.dist-info}/METADATA +5 -5
  60. digitalkin-0.3.2.dev14.dist-info/RECORD +143 -0
  61. {digitalkin-0.3.1.dev2.dist-info → digitalkin-0.3.2.dev14.dist-info}/top_level.txt +1 -0
  62. modules/archetype_with_tools_module.py +244 -0
  63. modules/cpu_intensive_module.py +1 -1
  64. modules/dynamic_setup_module.py +5 -29
  65. modules/minimal_llm_module.py +1 -1
  66. modules/text_transform_module.py +1 -1
  67. monitoring/digitalkin_observability/__init__.py +46 -0
  68. monitoring/digitalkin_observability/http_server.py +150 -0
  69. monitoring/digitalkin_observability/interceptors.py +176 -0
  70. monitoring/digitalkin_observability/metrics.py +201 -0
  71. monitoring/digitalkin_observability/prometheus.py +137 -0
  72. monitoring/tests/test_metrics.py +172 -0
  73. services/filesystem_module.py +7 -5
  74. services/storage_module.py +4 -2
  75. digitalkin/grpc_servers/registry_server.py +0 -65
  76. digitalkin/grpc_servers/registry_servicer.py +0 -456
  77. digitalkin-0.3.1.dev2.dist-info/RECORD +0 -119
  78. {digitalkin-0.3.1.dev2.dist-info → digitalkin-0.3.2.dev14.dist-info}/WHEEL +0 -0
  79. {digitalkin-0.3.1.dev2.dist-info → digitalkin-0.3.2.dev14.dist-info}/licenses/LICENSE +0 -0
@@ -9,8 +9,9 @@ from pathlib import Path
9
9
  # Add parent directory to path to enable imports
10
10
  sys.path.insert(0, str(Path(__file__).parent.parent.parent.parent))
11
11
 
12
- from digitalkin.grpc_servers._base_server import BaseServer
13
12
  from digitalkin.grpc_servers.utils.models import SecurityMode, ServerConfig, ServerMode
13
+
14
+ from digitalkin.grpc_servers._base_server import BaseServer
14
15
  from examples.base_server.mock.mock_pb2 import DESCRIPTOR, HelloReply # type: ignore
15
16
  from examples.base_server.mock.mock_pb2_grpc import (
16
17
  Greeter,
@@ -30,7 +31,7 @@ class AsyncGreeterImpl(Greeter):
30
31
 
31
32
  async def SayHello(self, request, context): # noqa: N802
32
33
  """Asynchronous implementation of SayHello method."""
33
- logger.info(f"Received request object: {request}")
34
+ logger.info("Received request object: %s", request)
34
35
  logger.info(f"Request attributes: {vars(request)}")
35
36
  logger.info(f"Received request with name: {request.name}")
36
37
 
@@ -40,7 +41,7 @@ class AsyncGreeterImpl(Greeter):
40
41
  name = "unknown"
41
42
  # Check context metadata
42
43
  for key, value in context.invocation_metadata():
43
- logger.info(f"Metadata: {key}={value}")
44
+ logger.info("Metadata: %s=%s", key, value)
44
45
  if key.lower() == "name":
45
46
  name = value
46
47
 
@@ -97,7 +98,7 @@ async def main_async() -> int:
97
98
  # as the KeyboardInterrupt usually breaks out of asyncio.run()
98
99
  logger.info("Server stopping due to keyboard interrupt...")
99
100
  except Exception as e:
100
- logger.exception(f"Error running server: {e}")
101
+ logger.exception("Error running server: %s", e)
101
102
  return 1
102
103
  finally:
103
104
  # Clean up resources if server was started
@@ -116,7 +117,7 @@ def main():
116
117
  logger.info("Server stopped by keyboard interrupt")
117
118
  return 0 # Clean exit
118
119
  except Exception as e:
119
- logger.exception(f"Fatal error: {e}")
120
+ logger.exception("Fatal error: %s", e)
120
121
  return 1
121
122
 
122
123
 
@@ -9,13 +9,14 @@ from pathlib import Path
9
9
  # Add parent directory to path to enable imports
10
10
  sys.path.insert(0, str(Path(__file__).parent.parent.parent.parent))
11
11
 
12
- from digitalkin.grpc_servers._base_server import BaseServer
13
12
  from digitalkin.grpc_servers.utils.models import (
14
13
  SecurityMode,
15
14
  ServerConfig,
16
15
  ServerCredentials,
17
16
  ServerMode,
18
17
  )
18
+
19
+ from digitalkin.grpc_servers._base_server import BaseServer
19
20
  from examples.base_server.mock.mock_pb2 import DESCRIPTOR, HelloReply # type: ignore
20
21
  from examples.base_server.mock.mock_pb2_grpc import (
21
22
  Greeter,
@@ -35,7 +36,7 @@ class AsyncGreeterImpl(Greeter):
35
36
 
36
37
  async def SayHello(self, request, context): # noqa: N802
37
38
  """Asynchronous implementation of SayHello method."""
38
- logger.info(f"Received request object: {request}")
39
+ logger.info("Received request object: %s", request)
39
40
  logger.info(f"Request attributes: {vars(request)}")
40
41
  logger.info(f"Received request with name: {request.name}")
41
42
 
@@ -45,7 +46,7 @@ class AsyncGreeterImpl(Greeter):
45
46
  name = "unknown"
46
47
  # Check context metadata
47
48
  for key, value in context.invocation_metadata():
48
- logger.info(f"Metadata: {key}={value}")
49
+ logger.info("Metadata: %s=%s", key, value)
49
50
  if key.lower() == "name":
50
51
  name = value
51
52
 
@@ -115,7 +116,7 @@ async def main_async() -> int:
115
116
  # as the KeyboardInterrupt usually breaks out of asyncio.run()
116
117
  logger.info("Server stopping due to keyboard interrupt...")
117
118
  except Exception as e:
118
- logger.exception(f"Error running server: {e}")
119
+ logger.exception("Error running server: %s", e)
119
120
  return 1
120
121
  finally:
121
122
  # Clean up resources if server was started
@@ -134,7 +135,7 @@ def main():
134
135
  logger.info("Server stopped by keyboard interrupt")
135
136
  return 0 # Clean exit
136
137
  except Exception as e:
137
- logger.exception(f"Fatal error: {e}")
138
+ logger.exception("Fatal error: %s", e)
138
139
  return 1
139
140
 
140
141
 
@@ -8,8 +8,9 @@ from pathlib import Path
8
8
  # Add parent directory to path to enable imports
9
9
  sys.path.insert(0, str(Path(__file__).parent.parent.parent.parent))
10
10
 
11
- from digitalkin.grpc_servers._base_server import BaseServer
12
11
  from digitalkin.grpc_servers.utils.models import SecurityMode, ServerConfig, ServerMode
12
+
13
+ from digitalkin.grpc_servers._base_server import BaseServer
13
14
  from examples.base_server.mock.mock_pb2 import DESCRIPTOR, HelloReply # type: ignore
14
15
  from examples.base_server.mock.mock_pb2_grpc import (
15
16
  Greeter,
@@ -29,7 +30,7 @@ class SyncGreeterServicer(Greeter):
29
30
 
30
31
  def SayHello(self, request, context): # noqa: N802
31
32
  """Implementation of SayHello method."""
32
- logger.info(f"Received request object: {request}")
33
+ logger.info("Received request object: %s", request)
33
34
  logger.info(f"Request attributes: {vars(request)}")
34
35
  logger.info(f"Received request with name: {request.name}")
35
36
 
@@ -39,7 +40,7 @@ class SyncGreeterServicer(Greeter):
39
40
  name = "unknown"
40
41
  # Check context metadata
41
42
  for key, value in context.invocation_metadata():
42
- logger.info(f"Metadata: {key}={value}")
43
+ logger.info("Metadata: %s=%s", key, value)
43
44
  if key.lower() == "name":
44
45
  name = value
45
46
 
@@ -92,7 +93,7 @@ def main() -> int:
92
93
  server.stop()
93
94
 
94
95
  except Exception as e:
95
- logger.exception(f"Error running server: {e}")
96
+ logger.exception("Error running server: %s", e)
96
97
  return 1
97
98
 
98
99
  return 0
@@ -8,13 +8,14 @@ from pathlib import Path
8
8
  # Add parent directory to path to enable imports
9
9
  sys.path.insert(0, str(Path(__file__).parent.parent.parent.parent))
10
10
 
11
- from digitalkin.grpc_servers._base_server import BaseServer
12
11
  from digitalkin.grpc_servers.utils.models import (
13
12
  SecurityMode,
14
13
  ServerConfig,
15
14
  ServerCredentials,
16
15
  ServerMode,
17
16
  )
17
+
18
+ from digitalkin.grpc_servers._base_server import BaseServer
18
19
  from examples.base_server.mock.mock_pb2 import DESCRIPTOR, HelloReply # type: ignore
19
20
  from examples.base_server.mock.mock_pb2_grpc import (
20
21
  Greeter,
@@ -34,7 +35,7 @@ class SyncGreeterServicer(Greeter):
34
35
 
35
36
  def SayHello(self, request, context): # noqa: N802
36
37
  """Implementation of SayHello method."""
37
- logger.info(f"Received request object: {request}")
38
+ logger.info("Received request object: %s", request)
38
39
  logger.info(f"Request attributes: {vars(request)}")
39
40
  logger.info(f"Received request with name: {request.name}")
40
41
 
@@ -44,7 +45,7 @@ class SyncGreeterServicer(Greeter):
44
45
  name = "unknown"
45
46
  # Check context metadata
46
47
  for key, value in context.invocation_metadata():
47
- logger.info(f"Metadata: {key}={value}")
48
+ logger.info("Metadata: %s=%s", key, value)
48
49
  if key.lower() == "name":
49
50
  name = value
50
51
 
@@ -111,7 +112,7 @@ def main() -> int:
111
112
  server.stop()
112
113
 
113
114
  except Exception as e:
114
- logger.exception(f"Error running server: {e}")
115
+ logger.exception("Error running server: %s", e)
115
116
  return 1
116
117
 
117
118
  return 0
digitalkin/__version__.py CHANGED
@@ -5,4 +5,4 @@ from importlib.metadata import PackageNotFoundError, version
5
5
  try:
6
6
  __version__ = version("digitalkin")
7
7
  except PackageNotFoundError:
8
- __version__ = "0.3.1.dev2"
8
+ __version__ = "0.3.2.dev14"
@@ -8,8 +8,8 @@ from typing import Any, Generic
8
8
  from digitalkin.core.task_manager.base_task_manager import BaseTaskManager
9
9
  from digitalkin.core.task_manager.task_session import TaskSession
10
10
  from digitalkin.models.core.task_monitor import TaskStatus
11
- from digitalkin.models.module import InputModelT, OutputModelT, SetupModelT
12
11
  from digitalkin.models.module.module import ModuleCodeModel
12
+ from digitalkin.models.module.module_types import InputModelT, OutputModelT, SetupModelT
13
13
  from digitalkin.modules._base_module import BaseModule
14
14
  from digitalkin.services.services_config import ServicesConfig
15
15
  from digitalkin.services.services_models import ServicesMode
@@ -15,7 +15,7 @@ from digitalkin.core.task_manager.local_task_manager import LocalTaskManager
15
15
  from digitalkin.core.task_manager.task_session import TaskSession
16
16
  from digitalkin.logger import logger
17
17
  from digitalkin.models.core.task_monitor import TaskStatus
18
- from digitalkin.models.module import InputModelT, OutputModelT, SetupModelT
18
+ from digitalkin.models.module.base_types import InputModelT, OutputModelT, SetupModelT
19
19
  from digitalkin.models.module.module import ModuleCodeModel
20
20
  from digitalkin.modules._base_module import BaseModule
21
21
  from digitalkin.services.services_models import ServicesMode
@@ -86,7 +86,10 @@ class SingleJobManager(BaseJobManager[InputModelT, OutputModelT, SetupModelT]):
86
86
  message=f"Module {job_id} did not respond within 30 seconds",
87
87
  )
88
88
  finally:
89
- logger.info(f"{job_id=}: {session.queue.empty()}")
89
+ logger.debug(
90
+ "Config setup response retrieved",
91
+ extra={"job_id": job_id, "queue_empty": session.queue.empty()},
92
+ )
90
93
 
91
94
  async def create_config_setup_instance_job(
92
95
  self,
@@ -126,7 +129,7 @@ class SingleJobManager(BaseJobManager[InputModelT, OutputModelT, SetupModelT]):
126
129
  except Exception:
127
130
  # Remove the module from the manager in case of an error.
128
131
  del self.tasks_sessions[job_id]
129
- logger.exception("Failed to start module %s: %s", job_id)
132
+ logger.exception("Failed to start module", extra={"job_id": job_id})
130
133
  raise
131
134
  else:
132
135
  return job_id
@@ -140,7 +143,8 @@ class SingleJobManager(BaseJobManager[InputModelT, OutputModelT, SetupModelT]):
140
143
  job_id: The unique identifier of the job.
141
144
  output_data: The output data produced by the job.
142
145
  """
143
- await self.tasks_sessions[job_id].queue.put(output_data.model_dump())
146
+ session = self.tasks_sessions[job_id]
147
+ await session.queue.put(output_data.model_dump())
144
148
 
145
149
  @asynccontextmanager # type: ignore
146
150
  async def generate_stream_consumer(self, job_id: str) -> AsyncIterator[AsyncGenerator[dict[str, Any], None]]: # type: ignore
@@ -259,6 +263,18 @@ class SingleJobManager(BaseJobManager[InputModelT, OutputModelT, SetupModelT]):
259
263
  logger.info("Managed task started: '%s'", job_id, extra={"task_id": job_id})
260
264
  return job_id
261
265
 
266
+ async def clean_session(self, task_id: str, mission_id: str) -> bool:
267
+ """Clean a task's session.
268
+
269
+ Args:
270
+ task_id: Unique identifier for the task.
271
+ mission_id: Mission identifier.
272
+
273
+ Returns:
274
+ bool: True if the task was successfully cleaned, False otherwise.
275
+ """
276
+ return await self._task_manager.clean_session(task_id, mission_id)
277
+
262
278
  async def stop_module(self, job_id: str) -> bool:
263
279
  """Stop a running module job.
264
280
 
@@ -271,20 +287,23 @@ class SingleJobManager(BaseJobManager[InputModelT, OutputModelT, SetupModelT]):
271
287
  Raises:
272
288
  Exception: If an error occurs while stopping the module.
273
289
  """
274
- logger.info(f"STOP required for {job_id=}")
290
+ logger.info("Stop module requested", extra={"job_id": job_id})
275
291
 
276
292
  async with self._lock:
277
293
  session = self.tasks_sessions.get(job_id)
278
294
 
279
295
  if not session:
280
- logger.warning(f"session with id: {job_id} not found")
296
+ logger.warning("Session not found", extra={"job_id": job_id})
281
297
  return False
282
298
  try:
283
299
  await session.module.stop()
284
300
  await self.cancel_task(job_id, session.mission_id)
285
- logger.debug(f"session {job_id} ({session.module.name}) stopped successfully")
286
- except Exception as e:
287
- logger.error(f"Error while stopping module {job_id}: {e}")
301
+ logger.debug(
302
+ "Module stopped successfully",
303
+ extra={"job_id": job_id, "mission_id": session.mission_id},
304
+ )
305
+ except Exception:
306
+ logger.exception("Error stopping module", extra={"job_id": job_id})
288
307
  raise
289
308
  else:
290
309
  return True
@@ -21,8 +21,9 @@ from digitalkin.core.job_manager.base_job_manager import BaseJobManager
21
21
  from digitalkin.core.task_manager.task_executor import TaskExecutor
22
22
  from digitalkin.core.task_manager.task_session import TaskSession
23
23
  from digitalkin.logger import logger
24
- from digitalkin.models.core.job_manager_models import StreamCodeModel
25
- from digitalkin.models.module.module_types import OutputModelT
24
+ from digitalkin.models.module.module import ModuleCodeModel
25
+ from digitalkin.models.module.module_types import DataModel, OutputModelT
26
+ from digitalkin.models.module.utility import EndOfStreamOutput
26
27
  from digitalkin.modules._base_module import BaseModule
27
28
  from digitalkin.services.services_config import ServicesConfig
28
29
  from digitalkin.services.services_models import ServicesMode
@@ -141,7 +142,7 @@ async def cleanup_global_resources() -> None:
141
142
  logger.warning("Failed to shutdown Taskiq broker: %s", e)
142
143
 
143
144
 
144
- async def send_message_to_stream(job_id: str, output_data: OutputModelT) -> None: # type: ignore
145
+ async def send_message_to_stream(job_id: str, output_data: OutputModelT | ModuleCodeModel) -> None: # type: ignore[type-var]
145
146
  """Callback define to add a message frame to the Rstream.
146
147
 
147
148
  Args:
@@ -186,7 +187,7 @@ async def run_start_module(
186
187
  module_class.discover()
187
188
 
188
189
  job_id = context.message.task_id
189
- callback = await BaseJobManager.job_specific_callback(send_message_to_stream, job_id)
190
+ callback = await BaseJobManager.job_specific_callback(send_message_to_stream, job_id) # type: ignore[type-var]
190
191
  module = ModuleFactory.create_module_instance(module_class, job_id, mission_id, setup_id, setup_version_id)
191
192
 
192
193
  channel = None
@@ -201,7 +202,7 @@ async def run_start_module(
201
202
  # Create a proper done callback that handles errors
202
203
  async def send_end_of_stream(_: Any) -> None: # noqa: ANN401
203
204
  try:
204
- await callback(StreamCodeModel(code="__END_OF_STREAM__"))
205
+ await callback(DataModel(root=EndOfStreamOutput()))
205
206
  except Exception as e:
206
207
  logger.error("Error sending end of stream: %s", e, exc_info=True)
207
208
 
@@ -272,7 +273,7 @@ async def run_config_module(
272
273
  logger.debug("Services config: %s | Module config: %s", services_config, module_class.services_config)
273
274
 
274
275
  job_id = context.message.task_id
275
- callback = await BaseJobManager.job_specific_callback(send_message_to_stream, job_id)
276
+ callback = await BaseJobManager.job_specific_callback(send_message_to_stream, job_id) # type: ignore[type-var]
276
277
  module = ModuleFactory.create_module_instance(module_class, job_id, mission_id, setup_id, setup_version_id)
277
278
 
278
279
  # Override environment variables temporarily to use manager's SurrealDB
@@ -24,7 +24,7 @@ from digitalkin.core.job_manager.taskiq_broker import STREAM, STREAM_RETENTION,
24
24
  from digitalkin.core.task_manager.remote_task_manager import RemoteTaskManager
25
25
  from digitalkin.logger import logger
26
26
  from digitalkin.models.core.task_monitor import TaskStatus
27
- from digitalkin.models.module import InputModelT, OutputModelT, SetupModelT
27
+ from digitalkin.models.module.module_types import InputModelT, OutputModelT, SetupModelT
28
28
  from digitalkin.modules._base_module import BaseModule
29
29
  from digitalkin.services.services_models import ServicesMode
30
30
 
@@ -4,7 +4,7 @@ import asyncio
4
4
  import datetime
5
5
  import os
6
6
  from collections.abc import AsyncGenerator
7
- from typing import Any, Generic, TypeVar
7
+ from typing import Any, Generic, TypeVar, cast
8
8
  from uuid import UUID
9
9
 
10
10
  from surrealdb import AsyncHttpSurrealConnection, AsyncSurreal, AsyncWsSurrealConnection, RecordID
@@ -91,7 +91,7 @@ class SurrealDBConnection(Generic[TSurreal]):
91
91
  logger.debug("Connecting to SurrealDB at %s", self.url)
92
92
  self.db = AsyncSurreal(self.url) # type: ignore
93
93
  await self.db.signin({"username": self.username, "password": self.password})
94
- await self.db.use(self.namespace, self.database)
94
+ await self.db.use(self.namespace, self.database) # type: ignore[arg-type]
95
95
  logger.debug("Successfully connected to SurrealDB")
96
96
 
97
97
  async def close(self) -> None:
@@ -112,7 +112,7 @@ class SurrealDBConnection(Generic[TSurreal]):
112
112
  # Process results and track failures
113
113
  failed_queries = []
114
114
  for live_id, result in zip(live_query_ids, results):
115
- if isinstance(result, (ConnectionError, TimeoutError, Exception)):
115
+ if isinstance(result, ConnectionError | TimeoutError | Exception):
116
116
  failed_queries.append((live_id, str(result)))
117
117
  else:
118
118
  self._live_queries.discard(live_id)
@@ -146,7 +146,7 @@ class SurrealDBConnection(Generic[TSurreal]):
146
146
  logger.debug("Creating record in %s with data: %s", table_name, data)
147
147
  result = await self.db.create(table_name, data)
148
148
  logger.debug("create result: %s", result)
149
- return result
149
+ return cast("list[dict[str, Any]] | dict[str, Any]", result)
150
150
 
151
151
  async def merge(
152
152
  self,
@@ -170,7 +170,7 @@ class SurrealDBConnection(Generic[TSurreal]):
170
170
  logger.debug("Updating record in %s with data: %s", record_id, data)
171
171
  result = await self.db.merge(record_id, data)
172
172
  logger.debug("update result: %s", result)
173
- return result
173
+ return cast("list[dict[str, Any]] | dict[str, Any]", result)
174
174
 
175
175
  async def update(
176
176
  self,
@@ -194,7 +194,7 @@ class SurrealDBConnection(Generic[TSurreal]):
194
194
  logger.debug("Updating record in %s with data: %s", record_id, data)
195
195
  result = await self.db.update(record_id, data)
196
196
  logger.debug("update result: %s", result)
197
- return result
197
+ return cast("list[dict[str, Any]] | dict[str, Any]", result)
198
198
 
199
199
  async def execute_query(self, query: str, params: dict[str, Any] | None = None) -> list[dict[str, Any]]:
200
200
  """Execute a custom SurrealQL query.
@@ -209,7 +209,7 @@ class SurrealDBConnection(Generic[TSurreal]):
209
209
  logger.debug("execute_query: %s with params: %s", query, params)
210
210
  result = await self.db.query(query, params or {})
211
211
  logger.debug("execute_query result: %s", result)
212
- return [result] if isinstance(result, dict) else result
212
+ return cast("list[dict[str, Any]]", [result] if isinstance(result, dict) else result)
213
213
 
214
214
  async def select_by_task_id(self, table: str, value: str) -> dict[str, Any]:
215
215
  """Fetch a record from a table by a unique field.