digitalkin 0.3.0rc0__py3-none-any.whl → 0.3.0rc2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. digitalkin/__version__.py +1 -1
  2. digitalkin/core/__init__.py +1 -0
  3. digitalkin/core/job_manager/__init__.py +1 -0
  4. digitalkin/{modules → core}/job_manager/base_job_manager.py +5 -3
  5. digitalkin/{modules → core}/job_manager/single_job_manager.py +9 -10
  6. digitalkin/{modules → core}/job_manager/taskiq_broker.py +2 -3
  7. digitalkin/{modules → core}/job_manager/taskiq_job_manager.py +5 -6
  8. digitalkin/core/task_manager/__init__.py +1 -0
  9. digitalkin/{modules/job_manager → core/task_manager}/surrealdb_repository.py +0 -1
  10. digitalkin/{modules/job_manager → core/task_manager}/task_manager.py +102 -49
  11. digitalkin/{modules/job_manager → core/task_manager}/task_session.py +71 -18
  12. digitalkin/grpc_servers/__init__.py +1 -19
  13. digitalkin/grpc_servers/_base_server.py +2 -2
  14. digitalkin/grpc_servers/module_server.py +2 -2
  15. digitalkin/grpc_servers/module_servicer.py +3 -3
  16. digitalkin/grpc_servers/registry_server.py +1 -1
  17. digitalkin/grpc_servers/utils/__init__.py +1 -0
  18. digitalkin/grpc_servers/utils/exceptions.py +0 -8
  19. digitalkin/grpc_servers/utils/grpc_client_wrapper.py +1 -1
  20. digitalkin/mixins/chat_history_mixin.py +2 -0
  21. digitalkin/mixins/file_history_mixin.py +14 -20
  22. digitalkin/mixins/filesystem_mixin.py +1 -2
  23. digitalkin/mixins/logger_mixin.py +4 -12
  24. digitalkin/models/core/__init__.py +1 -0
  25. digitalkin/{modules/job_manager → models/core}/job_manager_models.py +3 -3
  26. digitalkin/models/{module → core}/task_monitor.py +7 -5
  27. digitalkin/models/grpc_servers/__init__.py +1 -0
  28. digitalkin/{grpc_servers/utils → models/grpc_servers}/models.py +4 -4
  29. digitalkin/models/module/module_context.py +33 -1
  30. digitalkin/models/module/module_types.py +5 -1
  31. digitalkin/models/services/cost.py +1 -0
  32. digitalkin/modules/_base_module.py +16 -80
  33. digitalkin/services/cost/grpc_cost.py +1 -1
  34. digitalkin/services/filesystem/grpc_filesystem.py +1 -1
  35. digitalkin/services/setup/grpc_setup.py +1 -1
  36. digitalkin/services/storage/grpc_storage.py +1 -1
  37. digitalkin/utils/arg_parser.py +1 -1
  38. digitalkin/utils/development_mode_action.py +2 -2
  39. digitalkin/utils/package_discover.py +1 -2
  40. {digitalkin-0.3.0rc0.dist-info → digitalkin-0.3.0rc2.dist-info}/METADATA +5 -25
  41. {digitalkin-0.3.0rc0.dist-info → digitalkin-0.3.0rc2.dist-info}/RECORD +45 -40
  42. digitalkin/grpc_servers/utils/factory.py +0 -180
  43. /digitalkin/{grpc_servers/utils → models/grpc_servers}/types.py +0 -0
  44. {digitalkin-0.3.0rc0.dist-info → digitalkin-0.3.0rc2.dist-info}/WHEEL +0 -0
  45. {digitalkin-0.3.0rc0.dist-info → digitalkin-0.3.0rc2.dist-info}/licenses/LICENSE +0 -0
  46. {digitalkin-0.3.0rc0.dist-info → digitalkin-0.3.0rc2.dist-info}/top_level.txt +0 -0
@@ -1,13 +1,13 @@
1
- """."""
1
+ """Task session easing task lifecycle management."""
2
2
 
3
3
  import asyncio
4
4
  import datetime
5
5
  from collections.abc import AsyncGenerator
6
6
 
7
+ from digitalkin.core.task_manager.surrealdb_repository import SurrealDBConnection
7
8
  from digitalkin.logger import logger
8
- from digitalkin.models.module.task_monitor import HeartbeatMessage, SignalMessage, SignalType, TaskStatus
9
+ from digitalkin.models.core.task_monitor import HeartbeatMessage, SignalMessage, SignalType, TaskStatus
9
10
  from digitalkin.modules._base_module import BaseModule
10
- from digitalkin.modules.job_manager.surrealdb_repository import SurrealDBConnection
11
11
 
12
12
 
13
13
  class TaskSession:
@@ -23,6 +23,7 @@ class TaskSession:
23
23
  signal_queue: AsyncGenerator | None
24
24
 
25
25
  task_id: str
26
+ mission_id: str
26
27
  signal_record_id: str | None
27
28
  heartbeat_record_id: str | None
28
29
 
@@ -37,18 +38,31 @@ class TaskSession:
37
38
  def __init__(
38
39
  self,
39
40
  task_id: str,
41
+ mission_id: str,
40
42
  db: SurrealDBConnection,
41
43
  module: BaseModule,
42
44
  heartbeat_interval: datetime.timedelta = datetime.timedelta(seconds=2),
45
+ queue_maxsize: int = 1000,
43
46
  ) -> None:
44
- """."""
47
+ """Initialize Task Session.
48
+
49
+ Args:
50
+ task_id: Unique task identifier
51
+ mission_id: Mission identifier
52
+ db: SurrealDB connection
53
+ module: Module instance
54
+ heartbeat_interval: Interval between heartbeats
55
+ queue_maxsize: Maximum size for the queue (0 = unlimited)
56
+ """
45
57
  self.db = db
46
58
  self.module = module
47
59
 
48
60
  self.status = TaskStatus.PENDING
49
- self.queue: asyncio.Queue = asyncio.Queue()
61
+ self.queue: asyncio.Queue = asyncio.Queue(maxsize=queue_maxsize)
50
62
 
51
63
  self.task_id = task_id
64
+ self.mission_id = mission_id
65
+
52
66
  self.heartbeat = None
53
67
  self.started_at = None
54
68
  self.completed_at = None
@@ -63,17 +77,17 @@ class TaskSession:
63
77
  logger.info(
64
78
  "TaskContext initialized for task: '%s'",
65
79
  task_id,
66
- extra={"task_id": task_id, "heartbeat_interval": heartbeat_interval},
80
+ extra={"task_id": task_id, "mission_id": mission_id, "heartbeat_interval": heartbeat_interval},
67
81
  )
68
82
 
69
83
  @property
70
84
  def cancelled(self) -> bool:
71
- """."""
85
+ """Task cancellation status."""
72
86
  return self.is_cancelled.is_set()
73
87
 
74
88
  @property
75
89
  def paused(self) -> bool:
76
- """."""
90
+ """Task paused status."""
77
91
  return self._paused.is_set()
78
92
 
79
93
  async def send_heartbeat(self) -> bool:
@@ -84,6 +98,7 @@ class TaskSession:
84
98
  """
85
99
  heartbeat = HeartbeatMessage(
86
100
  task_id=self.task_id,
101
+ mission_id=self.mission_id,
87
102
  timestamp=datetime.datetime.now(datetime.timezone.utc),
88
103
  )
89
104
 
@@ -142,7 +157,11 @@ class TaskSession:
142
157
  logger.debug(f"Heartbeat tick for task: '{self.task_id}' | {self.cancelled=}")
143
158
  success = await self.send_heartbeat()
144
159
  if not success:
145
- logger.error("Heartbeat failed, cancelling task: '%s'", self.task_id, extra={"task_id": self.task_id})
160
+ logger.error(
161
+ "Heartbeat failed, cancelling task: '%s'",
162
+ self.task_id,
163
+ extra={"task_id": self.task_id},
164
+ )
146
165
  await self._handle_cancel()
147
166
  break
148
167
  await asyncio.sleep(self._heartbeat_interval.total_seconds())
@@ -150,7 +169,11 @@ class TaskSession:
150
169
  async def wait_if_paused(self) -> None:
151
170
  """Block execution if task is paused."""
152
171
  if self._paused.is_set():
153
- logger.info("Task paused, waiting for resume: '%s'", self.task_id, extra={"task_id": self.task_id})
172
+ logger.info(
173
+ "Task paused, waiting for resume: '%s'",
174
+ self.task_id,
175
+ extra={"task_id": self.task_id},
176
+ )
154
177
  await self._paused.wait()
155
178
 
156
179
  async def listen_signals(self) -> None: # noqa: C901
@@ -159,7 +182,11 @@ class TaskSession:
159
182
  Raises:
160
183
  CancelledError: Asyncio when task cancelling
161
184
  """
162
- logger.info("Signal listener started for task: '%s'", self.task_id, extra={"task_id": self.task_id})
185
+ logger.info(
186
+ "Signal listener started for task: '%s'",
187
+ self.task_id,
188
+ extra={"task_id": self.task_id},
189
+ )
163
190
  if self.signal_record_id is None:
164
191
  self.signal_record_id = (await self.db.select_by_task_id("tasks", self.task_id)).get("id")
165
192
 
@@ -183,7 +210,11 @@ class TaskSession:
183
210
  await self._handle_status_request()
184
211
 
185
212
  except asyncio.CancelledError:
186
- logger.debug("Signal listener cancelled for task: '%s'", self.task_id, extra={"task_id": self.task_id})
213
+ logger.debug(
214
+ "Signal listener cancelled for task: '%s'",
215
+ self.task_id,
216
+ extra={"task_id": self.task_id},
217
+ )
187
218
  raise
188
219
  except Exception as e:
189
220
  logger.error(
@@ -194,18 +225,28 @@ class TaskSession:
194
225
  )
195
226
  finally:
196
227
  await self.db.stop_live(live_id)
197
- logger.info("Signal listener stopped for task: '%s'", self.task_id, extra={"task_id": self.task_id})
228
+ logger.info(
229
+ "Signal listener stopped for task: '%s'",
230
+ self.task_id,
231
+ extra={"task_id": self.task_id},
232
+ )
198
233
 
199
234
  async def _handle_cancel(self) -> None:
200
235
  """Idempotent cancellation with acknowledgment."""
201
236
  logger.debug("Handle cancel called")
202
237
  if self.is_cancelled.is_set():
203
238
  logger.debug(
204
- "Cancel signal ignored - task already cancelled: '%s'", self.task_id, extra={"task_id": self.task_id}
239
+ "Cancel signal ignored - task already cancelled: '%s'",
240
+ self.task_id,
241
+ extra={"task_id": self.task_id},
205
242
  )
206
243
  return
207
244
 
208
- logger.info("Cancelling task: '%s'", self.task_id, extra={"task_id": self.task_id})
245
+ logger.info(
246
+ "Cancelling task: '%s'",
247
+ self.task_id,
248
+ extra={"task_id": self.task_id},
249
+ )
209
250
 
210
251
  self.status = TaskStatus.CANCELLED
211
252
  self.is_cancelled.set()
@@ -219,6 +260,7 @@ class TaskSession:
219
260
  self.signal_record_id, # type: ignore
220
261
  SignalMessage(
221
262
  task_id=self.task_id,
263
+ mission_id=self.mission_id,
222
264
  action=SignalType.ACK_CANCEL,
223
265
  status=self.status,
224
266
  ).model_dump(),
@@ -227,7 +269,11 @@ class TaskSession:
227
269
  async def _handle_pause(self) -> None:
228
270
  """Pause task execution."""
229
271
  if not self._paused.is_set():
230
- logger.info("Pausing task: '%s'", self.task_id, extra={"task_id": self.task_id})
272
+ logger.info(
273
+ "Pausing task: '%s'",
274
+ self.task_id,
275
+ extra={"task_id": self.task_id},
276
+ )
231
277
  self._paused.set()
232
278
 
233
279
  await self.db.update(
@@ -235,6 +281,7 @@ class TaskSession:
235
281
  self.signal_record_id, # type: ignore
236
282
  SignalMessage(
237
283
  task_id=self.task_id,
284
+ mission_id=self.mission_id,
238
285
  action=SignalType.ACK_PAUSE,
239
286
  status=self.status,
240
287
  ).model_dump(),
@@ -243,7 +290,11 @@ class TaskSession:
243
290
  async def _handle_resume(self) -> None:
244
291
  """Resume paused task."""
245
292
  if self._paused.is_set():
246
- logger.info("Resuming task: '%s'", self.task_id, extra={"task_id": self.task_id})
293
+ logger.info(
294
+ "Resuming task: '%s'",
295
+ self.task_id,
296
+ extra={"task_id": self.task_id},
297
+ )
247
298
  self._paused.clear()
248
299
 
249
300
  await self.db.update(
@@ -251,6 +302,7 @@ class TaskSession:
251
302
  self.signal_record_id, # type: ignore
252
303
  SignalMessage(
253
304
  task_id=self.task_id,
305
+ mission_id=self.mission_id,
254
306
  action=SignalType.ACK_RESUME,
255
307
  status=self.status,
256
308
  ).model_dump(),
@@ -262,9 +314,10 @@ class TaskSession:
262
314
  "tasks",
263
315
  self.signal_record_id, # type: ignore
264
316
  SignalMessage(
265
- action=SignalType.ACK_STATUS,
317
+ mission_id=self.mission_id,
266
318
  task_id=self.task_id,
267
319
  status=self.status,
320
+ action=SignalType.ACK_STATUS,
268
321
  ).model_dump(),
269
322
  )
270
323
 
@@ -1,19 +1 @@
1
- """This package contains the gRPC server and client implementations.
2
-
3
- ```shell
4
- digitalkin/grpc/
5
- ├── __init__.py
6
- ├── base_server.py # Base server implementation with common functionality
7
- ├── module_server.py # Module-specific server implementation
8
- ├── registry_server.py # Registry-specific server implementation
9
- ├── module_servicer.py # gRPC servicer for Module service
10
- ├── registry_servicer.py # gRPC servicer for Registry service
11
- ├── client/ # Client libraries for connecting to servers
12
- │ ├── __init__.py
13
- │ ├── module_client.py
14
- │ └── registry_client.py
15
- └── utils/ # Utility functions
16
- ├── __init__.py
17
- └── server_utils.py # Common server utilities
18
- ```
19
- """
1
+ """This package contains the gRPC server and client implementations."""
@@ -17,9 +17,9 @@ from digitalkin.grpc_servers.utils.exceptions import (
17
17
  ServerStateError,
18
18
  ServicerError,
19
19
  )
20
- from digitalkin.grpc_servers.utils.models import SecurityMode, ServerConfig, ServerMode
21
- from digitalkin.grpc_servers.utils.types import GrpcServer, ServiceDescriptor, T
22
20
  from digitalkin.logger import logger
21
+ from digitalkin.models.grpc_servers.models import SecurityMode, ServerConfig, ServerMode
22
+ from digitalkin.models.grpc_servers.types import GrpcServer, ServiceDescriptor, T
23
23
 
24
24
 
25
25
  class BaseServer(abc.ABC):
@@ -17,12 +17,12 @@ from digitalkin_proto.digitalkin.module_registry.v2 import (
17
17
  from digitalkin.grpc_servers._base_server import BaseServer
18
18
  from digitalkin.grpc_servers.module_servicer import ModuleServicer
19
19
  from digitalkin.grpc_servers.utils.exceptions import ServerError
20
- from digitalkin.grpc_servers.utils.models import (
20
+ from digitalkin.logger import logger
21
+ from digitalkin.models.grpc_servers.models import (
21
22
  ClientConfig,
22
23
  ModuleServerConfig,
23
24
  SecurityMode,
24
25
  )
25
- from digitalkin.logger import logger
26
26
  from digitalkin.modules._base_module import BaseModule
27
27
 
28
28
 
@@ -13,12 +13,12 @@ from digitalkin_proto.digitalkin.module.v2 import (
13
13
  )
14
14
  from google.protobuf import json_format, struct_pb2
15
15
 
16
+ from digitalkin.core.job_manager.base_job_manager import BaseJobManager
16
17
  from digitalkin.grpc_servers.utils.exceptions import ServicerError
17
18
  from digitalkin.logger import logger
19
+ from digitalkin.models.core.job_manager_models import JobManagerMode
18
20
  from digitalkin.models.module.module import ModuleStatus
19
21
  from digitalkin.modules._base_module import BaseModule
20
- from digitalkin.modules.job_manager.base_job_manager import BaseJobManager
21
- from digitalkin.modules.job_manager.job_manager_models import JobManagerMode
22
22
  from digitalkin.services.services_models import ServicesMode
23
23
  from digitalkin.services.setup.default_setup import DefaultSetup
24
24
  from digitalkin.services.setup.grpc_setup import GrpcSetup
@@ -226,7 +226,7 @@ class ModuleServicer(module_service_pb2_grpc.ModuleServiceServicer, ArgParser):
226
226
  yield lifecycle_pb2.StartModuleResponse(success=True, output=proto, job_id=job_id)
227
227
  finally:
228
228
  await self.job_manager.tasks[job_id]
229
- await self.job_manager.clean_session(job_id)
229
+ await self.job_manager.clean_session(job_id, mission_id=request.mission_id)
230
230
 
231
231
  logger.info("Job %s finished", job_id)
232
232
 
@@ -7,8 +7,8 @@ from digitalkin_proto.digitalkin.module_registry.v2 import (
7
7
 
8
8
  from digitalkin.grpc_servers._base_server import BaseServer
9
9
  from digitalkin.grpc_servers.registry_servicer import RegistryModule, RegistryServicer
10
- from digitalkin.grpc_servers.utils.models import RegistryServerConfig
11
10
  from digitalkin.logger import logger
11
+ from digitalkin.models.grpc_servers.models import RegistryServerConfig
12
12
 
13
13
 
14
14
  class RegistryServer(BaseServer):
@@ -0,0 +1 @@
1
+ """gRPC servers utilities package."""
@@ -27,11 +27,3 @@ class ServerStateError(ServerError):
27
27
 
28
28
  class ReflectionError(ServerError):
29
29
  """Error related to gRPC reflection service."""
30
-
31
-
32
- class HealthCheckError(ServerError):
33
- """Error related to gRPC health check service."""
34
-
35
-
36
- class OptionalFeatureNotImplementedError(NotImplementedError):
37
- """Raised when an optional feature is not implemented, but was requested."""
@@ -6,8 +6,8 @@ from typing import Any
6
6
  import grpc
7
7
 
8
8
  from digitalkin.grpc_servers.utils.exceptions import ServerError
9
- from digitalkin.grpc_servers.utils.models import ClientConfig, SecurityMode
10
9
  from digitalkin.logger import logger
10
+ from digitalkin.models.grpc_servers.models import ClientConfig, SecurityMode
11
11
 
12
12
 
13
13
  class GrpcClientWrapper:
@@ -33,6 +33,8 @@ class ChatHistoryMixin(UserMessageMixin, StorageMixin, LoggerMixin, Generic[Inpu
33
33
  Returns:
34
34
  Unique history key for the current session
35
35
  """
36
+ # TODO: define mission-specific chat history key not dependant on mission_id
37
+ # or need customization by user
36
38
  mission_id = getattr(context.session, "mission_id", None) or "default"
37
39
  return f"{self.CHAT_HISTORY_RECORD_ID}_{mission_id}"
38
40
 
@@ -30,6 +30,8 @@ class FileHistoryMixin(StorageMixin, LoggerMixin):
30
30
  Returns:
31
31
  Unique history key for the current session
32
32
  """
33
+ # TODO: define mission-specific chat history key not dependant on mission_id
34
+ # or need customization by user
33
35
  mission_id = getattr(context.session, "mission_id", None) or "default"
34
36
  return f"{self.FILE_HISTORY_RECORD_ID}_{mission_id}"
35
37
 
@@ -62,38 +64,30 @@ class FileHistoryMixin(StorageMixin, LoggerMixin):
62
64
 
63
65
  Args:
64
66
  context: Module context containing storage strategy
65
- role: Message role (user, assistant, system)
66
67
  files: list of files model
67
68
 
68
69
  Raises:
69
70
  StorageServiceError: If history update fails
70
71
  """
71
72
  history_key = self._get_history_key(context)
73
+ file_history = self.load_file_history(context)
72
74
 
73
- try:
74
- if not self.file_history_front.files:
75
- self.file_history_front = self.load_file_history(context)
76
- self.file_history_front.files.extend(files)
77
- except Exception as e:
78
- self.log_error(context, f"Failed to append message to File history: {e}")
79
-
80
- try:
81
- self.log_debug(context, f"Updating File history for session: {history_key}")
82
- self.update_storage(
75
+ file_history.files.extend(files)
76
+ if len(file_history.files) == len(files):
77
+ # Create new record
78
+ self.log_debug(context, f"Creating new file history for session: {history_key}")
79
+ self.store_storage(
83
80
  context,
84
81
  self.FILE_HISTORY_COLLECTION,
85
82
  history_key,
86
- self.file_history_front.model_dump(),
83
+ file_history.model_dump(),
84
+ data_type="OUTPUT",
87
85
  )
88
- except Exception as e:
89
- self.log_error(context, f"Updating File history for session: {history_key} with error: {e}")
90
-
91
- # Create new record
92
- self.log_debug(context, f"Creating new File history for session: {history_key}")
93
- self.store_storage(
86
+ else:
87
+ self.log_debug(context, f"Updating file history for session: {history_key}")
88
+ self.update_storage(
94
89
  context,
95
90
  self.FILE_HISTORY_COLLECTION,
96
91
  history_key,
97
- self.file_history_front.model_dump(),
98
- data_type="OUTPUT",
92
+ file_history.model_dump(),
99
93
  )
@@ -30,13 +30,12 @@ class FilesystemMixin:
30
30
  return context.filesystem.upload_files(files)
31
31
 
32
32
  @staticmethod
33
- def get_file(context: ModuleContext, file_id: str) -> tuple[FilesystemRecord, bytes | None]:
33
+ def get_file(context: ModuleContext, file_id: str) -> FilesystemRecord:
34
34
  """Retrieve a file by ID with the content.
35
35
 
36
36
  Args:
37
37
  context: Module context containing the filesystem strategy
38
38
  file_id: Unique identifier for the file
39
- include_content: Whether to include file content in response
40
39
 
41
40
  Returns:
42
41
  File object with metadata and optionally content
@@ -17,10 +17,8 @@ class LoggerMixin:
17
17
  Args:
18
18
  context: Module context containing the callbacks strategy
19
19
  message: Debug message to log
20
- *args: Positional arguments for message formatting
21
- **kwargs: Keyword arguments for logger
22
20
  """
23
- return context.callbacks.logger.debug(message)
21
+ return context.callbacks.logger.debug(message, extra=context.session.current_ids())
24
22
 
25
23
  @staticmethod
26
24
  def log_info(context: ModuleContext, message: str) -> None:
@@ -29,10 +27,8 @@ class LoggerMixin:
29
27
  Args:
30
28
  context: Module context containing the callbacks strategy
31
29
  message: Info message to log
32
- *args: Positional arguments for message formatting
33
- **kwargs: Keyword arguments for logger
34
30
  """
35
- return context.callbacks.logger.info(message)
31
+ return context.callbacks.logger.info(message, extra=context.session.current_ids())
36
32
 
37
33
  @staticmethod
38
34
  def log_warning(context: ModuleContext, message: str) -> None:
@@ -41,10 +37,8 @@ class LoggerMixin:
41
37
  Args:
42
38
  context: Module context containing the callbacks strategy
43
39
  message: Warning message to log
44
- *args: Positional arguments for message formatting
45
- **kwargs: Keyword arguments for logger
46
40
  """
47
- return context.callbacks.logger.warning(message)
41
+ return context.callbacks.logger.warning(message, extra=context.session.current_ids())
48
42
 
49
43
  @staticmethod
50
44
  def log_error(context: ModuleContext, message: str) -> None:
@@ -53,7 +47,5 @@ class LoggerMixin:
53
47
  Args:
54
48
  context: Module context containing the callbacks strategy
55
49
  message: Error message to log
56
- *args: Positional arguments for message formatting
57
- **kwargs: Keyword arguments for logger
58
50
  """
59
- return context.callbacks.logger.error(message)
51
+ return context.callbacks.logger.error(message, extra=context.session.current_ids())
@@ -0,0 +1 @@
1
+ """Core models."""
@@ -4,7 +4,7 @@ from enum import Enum
4
4
 
5
5
  from pydantic import BaseModel
6
6
 
7
- from digitalkin.modules.job_manager.base_job_manager import BaseJobManager
7
+ from digitalkin.core.job_manager.base_job_manager import BaseJobManager
8
8
 
9
9
 
10
10
  class StreamCodeModel(BaseModel):
@@ -35,10 +35,10 @@ class JobManagerMode(Enum):
35
35
  """
36
36
  match self:
37
37
  case JobManagerMode.SINGLE:
38
- from digitalkin.modules.job_manager.single_job_manager import SingleJobManager # noqa: PLC0415
38
+ from digitalkin.core.job_manager.single_job_manager import SingleJobManager # noqa: PLC0415
39
39
 
40
40
  return SingleJobManager
41
41
  case JobManagerMode.TASKIQ:
42
- from digitalkin.modules.job_manager.taskiq_job_manager import TaskiqJobManager # noqa: PLC0415
42
+ from digitalkin.core.job_manager.taskiq_job_manager import TaskiqJobManager # noqa: PLC0415
43
43
 
44
44
  return TaskiqJobManager
@@ -1,4 +1,4 @@
1
- """."""
1
+ """Task monitoring models for signaling and heartbeat messages."""
2
2
 
3
3
  from datetime import datetime, timezone
4
4
  from enum import Enum
@@ -8,7 +8,7 @@ from pydantic import BaseModel, Field
8
8
 
9
9
 
10
10
  class TaskStatus(Enum):
11
- """."""
11
+ """Task status enumeration."""
12
12
 
13
13
  PENDING = "pending"
14
14
  RUNNING = "running"
@@ -18,7 +18,7 @@ class TaskStatus(Enum):
18
18
 
19
19
 
20
20
  class SignalType(Enum):
21
- """."""
21
+ """Signal type enumeration."""
22
22
 
23
23
  START = "start"
24
24
  STOP = "stop"
@@ -34,9 +34,10 @@ class SignalType(Enum):
34
34
 
35
35
 
36
36
  class SignalMessage(BaseModel):
37
- """."""
37
+ """Signal message model for task monitoring."""
38
38
 
39
39
  task_id: str = Field(..., description="Unique identifier for the task")
40
+ mission_id: str = Field(..., description="Identifier for the mission")
40
41
  status: TaskStatus = Field(..., description="Current status of the task")
41
42
  action: SignalType = Field(..., description="Type of signal action")
42
43
  timestamp: datetime = Field(default_factory=lambda: datetime.now(timezone.utc))
@@ -45,7 +46,8 @@ class SignalMessage(BaseModel):
45
46
 
46
47
 
47
48
  class HeartbeatMessage(BaseModel):
48
- """."""
49
+ """Heartbeat message model for task monitoring."""
49
50
 
50
51
  task_id: str = Field(..., description="Unique identifier for the task")
52
+ mission_id: str = Field(..., description="Identifier for the mission")
51
53
  timestamp: datetime = Field(default_factory=lambda: datetime.now(timezone.utc))
@@ -0,0 +1 @@
1
+ """Base gRPC server and client models."""
@@ -175,8 +175,8 @@ class ClientConfig(ChannelConfig):
175
175
  credentials: ClientCredentials | None = Field(None, description="Client credentials for secure mode")
176
176
  channel_options: list[tuple[str, Any]] = Field(
177
177
  default_factory=lambda: [
178
- ("grpc.max_receive_message_length", 50 * 1024 * 1024), # 50MB
179
- ("grpc.max_send_message_length", 50 * 1024 * 1024), # 50MB
178
+ ("grpc.max_receive_message_length", 100 * 1024 * 1024), # 100MB
179
+ ("grpc.max_send_message_length", 100 * 1024 * 1024), # 100MB
180
180
  ],
181
181
  description="Additional channel options",
182
182
  )
@@ -223,8 +223,8 @@ class ServerConfig(ChannelConfig):
223
223
  credentials: ServerCredentials | None = Field(None, description="Server credentials for secure mode")
224
224
  server_options: list[tuple[str, Any]] = Field(
225
225
  default_factory=lambda: [
226
- ("grpc.max_receive_message_length", 50 * 1024 * 1024), # 50MB
227
- ("grpc.max_send_message_length", 50 * 1024 * 1024), # 50MB
226
+ ("grpc.max_receive_message_length", 100 * 1024 * 1024), # 100MB
227
+ ("grpc.max_send_message_length", 100 * 1024 * 1024), # 100MB
228
228
  ],
229
229
  description="Additional server options",
230
230
  )
@@ -15,32 +15,64 @@ from digitalkin.services.storage.storage_strategy import StorageStrategy
15
15
  class Session(SimpleNamespace):
16
16
  """Session data container with mandatory setup_id and mission_id."""
17
17
 
18
+ job_id: str
18
19
  mission_id: str
20
+ setup_id: str
19
21
  setup_version_id: str
20
22
 
21
- def __init__(self, mission_id: str, setup_version_id: str, **kwargs: dict[str, Any]) -> None:
23
+ def __init__(
24
+ self,
25
+ job_id: str,
26
+ mission_id: str,
27
+ setup_id: str,
28
+ setup_version_id: str,
29
+ **kwargs: dict[str, Any],
30
+ ) -> None:
22
31
  """Init Module Session.
23
32
 
24
33
  Args:
34
+ job_id: current job_id.
25
35
  mission_id: current mission_id.
36
+ setup_id: used setup config.
26
37
  setup_version_id: used setup config.
27
38
  kwargs: user defined session variables.
28
39
 
29
40
  Raises:
30
41
  ValueError: If mandatory args are missing
31
42
  """
43
+ if not setup_id:
44
+ msg = "setup_id is mandatory and cannot be empty"
45
+ raise ValueError(msg)
32
46
  if not setup_version_id:
33
47
  msg = "setup_version_id is mandatory and cannot be empty"
34
48
  raise ValueError(msg)
35
49
  if not mission_id:
36
50
  msg = "mission_id is mandatory and cannot be empty"
37
51
  raise ValueError(msg)
52
+ if not job_id:
53
+ msg = "job_id is mandatory and cannot be empty"
54
+ raise ValueError(msg)
38
55
 
56
+ self.job_id = job_id
39
57
  self.mission_id = mission_id
58
+ self.setup_id = setup_id
40
59
  self.setup_version_id = setup_version_id
41
60
 
42
61
  super().__init__(**kwargs)
43
62
 
63
+ def current_ids(self) -> dict[str, str]:
64
+ """Return current session ids as a dictionary.
65
+
66
+ Returns:
67
+ A dictionary containing the current session ids.
68
+ """
69
+ return {
70
+ "job_id": self.job_id,
71
+ "mission_id": self.mission_id,
72
+ "setup_id": self.setup_id,
73
+ "setup_version_id": self.setup_version_id,
74
+ }
75
+
44
76
 
45
77
  class ModuleContext:
46
78
  """ModuleContext provides a container for strategies and resources used by a module.
@@ -24,7 +24,11 @@ class DataTrigger(BaseModel):
24
24
  """
25
25
 
26
26
  protocol: ClassVar[str]
27
- created_at: str = datetime.now(tz=timezone.utc).isoformat()
27
+ created_at: str = Field(
28
+ default_factory=lambda: datetime.now(tz=timezone.utc).isoformat(),
29
+ title="Created At",
30
+ description="Timestamp when the payload was created.",
31
+ )
28
32
 
29
33
 
30
34
  DataTriggerT = TypeVar("DataTriggerT", bound=DataTrigger)
@@ -38,6 +38,7 @@ class CostConfig(BaseModel):
38
38
  class CostEvent(BaseModel):
39
39
  """Pydantic model that represents a cost event registered during service execution.
40
40
 
41
+ # DEPRECATED
41
42
  :param cost_name: Identifier for the cost configuration.
42
43
  :param cost_type: The type of cost.
43
44
  :param usage: The amount or units consumed.