digitalkin 0.2.13__py3-none-any.whl → 0.2.15__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
digitalkin/__version__.py CHANGED
@@ -5,4 +5,4 @@ from importlib.metadata import PackageNotFoundError, version
5
5
  try:
6
6
  __version__ = version("digitalkin")
7
7
  except PackageNotFoundError:
8
- __version__ = "0.2.13"
8
+ __version__ = "0.2.15"
@@ -81,6 +81,62 @@ class ModuleServicer(module_service_pb2_grpc.ModuleServiceServicer, ArgParser):
81
81
  )
82
82
  self.setup = GrpcSetup() if self.args.services_mode == ServicesMode.REMOTE else DefaultSetup()
83
83
 
84
+ async def ConfigSetupModule( # noqa: N802
85
+ self,
86
+ request: lifecycle_pb2.ConfigSetupModuleRequest,
87
+ context: grpc.aio.ServicerContext,
88
+ ) -> lifecycle_pb2.ConfigSetupModuleResponse:
89
+ """Configure the module setup.
90
+
91
+ Args:
92
+ request: The configuration request.
93
+ context: The gRPC context.
94
+
95
+ Returns:
96
+ A response indicating success or failure.
97
+
98
+ Raises:
99
+ ServicerError: if the setup data is not returned or job creation fails.
100
+ """
101
+ logger.info("ConfigSetupVersion called for module: '%s'", self.module_class.__name__)
102
+ # Process the module input
103
+ # TODO: Secret should be used here as well
104
+ setup_version = request.setup_version
105
+ config_setup_data = self.module_class.create_config_setup_model(json_format.MessageToDict(request.content))
106
+ setup_version_data = self.module_class.create_setup_model(
107
+ json_format.MessageToDict(request.setup_version.content)
108
+ )
109
+
110
+ if not setup_version_data:
111
+ msg = "No setup data returned."
112
+ raise ServicerError(msg)
113
+
114
+ if not config_setup_data:
115
+ msg = "No config setup data returned."
116
+ raise ServicerError(msg)
117
+
118
+ # create a task to run the module in background
119
+ job_id = await self.job_manager.create_config_setup_instance_job(
120
+ config_setup_data,
121
+ setup_version_data,
122
+ request.mission_id,
123
+ setup_version.id,
124
+ )
125
+
126
+ if job_id is None:
127
+ context.set_code(grpc.StatusCode.NOT_FOUND)
128
+ context.set_details("Failed to create module instance")
129
+ return lifecycle_pb2.ConfigSetupModuleResponse(success=False)
130
+
131
+ updated_setup_data = await self.job_manager.generate_config_setup_module_response(job_id)
132
+ logger.warning(f"Updated setup data: {updated_setup_data=}")
133
+ setup_version.content = json_format.ParseDict(
134
+ updated_setup_data,
135
+ struct_pb2.Struct(),
136
+ ignore_unknown_fields=True,
137
+ )
138
+ return lifecycle_pb2.ConfigSetupModuleResponse(success=True, setup_version=setup_version)
139
+
84
140
  async def StartModule( # noqa: N802
85
141
  self,
86
142
  request: lifecycle_pb2.StartModuleRequest,
@@ -116,7 +172,7 @@ class ModuleServicer(module_service_pb2_grpc.ModuleServiceServicer, ArgParser):
116
172
  setup_data = self.module_class.create_setup_model(setup_data_class.current_setup_version.content)
117
173
 
118
174
  # create a task to run the module in background
119
- job_id = await self.job_manager.create_job(
175
+ job_id = await self.job_manager.create_module_instance_job(
120
176
  input_data,
121
177
  setup_data,
122
178
  mission_id=request.mission_id,
@@ -391,3 +447,39 @@ class ModuleServicer(module_service_pb2_grpc.ModuleServiceServicer, ArgParser):
391
447
  success=True,
392
448
  secret_schema=secret_format_struct,
393
449
  )
450
+
451
+ async def GetConfigSetupModule( # noqa: N802
452
+ self,
453
+ request: information_pb2.GetConfigSetupModuleRequest,
454
+ context: grpc.ServicerContext,
455
+ ) -> information_pb2.GetConfigSetupModuleResponse:
456
+ """Get information about the module's setup and configuration.
457
+
458
+ Args:
459
+ request: The get module setup request.
460
+ context: The gRPC context.
461
+
462
+ Returns:
463
+ A response with the module's setup information.
464
+ """
465
+ logger.debug("GetConfigSetupModule called for module: '%s'", self.module_class.__name__)
466
+
467
+ # Get setup schema if available
468
+ try:
469
+ # Convert schema to proto format
470
+ config_setup_schema_proto = self.module_class.get_config_setup_format(llm_format=request.llm_format)
471
+ config_setup_format_struct = json_format.Parse(
472
+ text=config_setup_schema_proto,
473
+ message=struct_pb2.Struct(), # pylint: disable=no-member
474
+ ignore_unknown_fields=True,
475
+ )
476
+ except NotImplementedError as e:
477
+ logger.warning(e)
478
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
479
+ context.set_details(e)
480
+ return information_pb2.GetConfigSetupModuleResponse()
481
+
482
+ return information_pb2.GetConfigSetupModuleResponse(
483
+ success=True,
484
+ config_setup_schema=config_setup_format_struct,
485
+ )
@@ -31,3 +31,7 @@ class ReflectionError(ServerError):
31
31
 
32
32
  class HealthCheckError(ServerError):
33
33
  """Error related to gRPC health check service."""
34
+
35
+
36
+ class OptionalFeatureNotImplementedError(NotImplementedError):
37
+ """Raised when an optional feature is not implemented, but was requested."""
@@ -2,10 +2,11 @@
2
2
 
3
3
  from digitalkin.models.module.module import Module, ModuleStatus
4
4
  from digitalkin.models.module.module_types import (
5
+ ConfigSetupModelT,
5
6
  InputModelT,
6
7
  OutputModelT,
7
8
  SecretModelT,
8
9
  SetupModelT,
9
10
  )
10
11
 
11
- __all__ = ["InputModelT", "Module", "ModuleStatus", "OutputModelT", "SecretModelT", "SetupModelT"]
12
+ __all__ = ["ConfigSetupModelT", "InputModelT", "Module", "ModuleStatus", "OutputModelT", "SecretModelT", "SetupModelT"]
@@ -4,6 +4,7 @@ from typing import TypeVar
4
4
 
5
5
  from pydantic import BaseModel
6
6
 
7
+ ConfigSetupModelT = TypeVar("ConfigSetupModelT", bound=BaseModel | None)
7
8
  InputModelT = TypeVar("InputModelT", bound=BaseModel)
8
9
  OutputModelT = TypeVar("OutputModelT", bound=BaseModel)
9
10
  SetupModelT = TypeVar("SetupModelT", bound=BaseModel)
@@ -9,8 +9,16 @@ from typing import Any, ClassVar, Generic
9
9
 
10
10
  from pydantic import BaseModel
11
11
 
12
+ from digitalkin.grpc_servers.utils.exceptions import OptionalFeatureNotImplementedError
12
13
  from digitalkin.logger import logger
13
- from digitalkin.models.module import InputModelT, ModuleStatus, OutputModelT, SecretModelT, SetupModelT
14
+ from digitalkin.models.module import (
15
+ ConfigSetupModelT,
16
+ InputModelT,
17
+ ModuleStatus,
18
+ OutputModelT,
19
+ SecretModelT,
20
+ SetupModelT,
21
+ )
14
22
  from digitalkin.services.agent.agent_strategy import AgentStrategy
15
23
  from digitalkin.services.cost.cost_strategy import CostStrategy
16
24
  from digitalkin.services.filesystem.filesystem_strategy import FilesystemStrategy
@@ -30,11 +38,22 @@ class ModuleErrorModel(BaseModel):
30
38
  short_description: str
31
39
 
32
40
 
33
- class BaseModule(ABC, Generic[InputModelT, OutputModelT, SetupModelT, SecretModelT]):
41
+ class BaseModule(
42
+ ABC,
43
+ Generic[
44
+ InputModelT,
45
+ OutputModelT,
46
+ SetupModelT,
47
+ SecretModelT,
48
+ ConfigSetupModelT,
49
+ ],
50
+ ):
34
51
  """BaseModule is the abstract base for all modules in the DigitalKin SDK."""
35
52
 
36
53
  name: str
37
54
  description: str
55
+
56
+ config_setup_format: type[ConfigSetupModelT]
38
57
  input_format: type[InputModelT]
39
58
  output_format: type[OutputModelT]
40
59
  setup_format: type[SetupModelT]
@@ -136,6 +155,23 @@ class BaseModule(ABC, Generic[InputModelT, OutputModelT, SetupModelT, SecretMode
136
155
  msg = "'%s' class does not define an 'output_format'."
137
156
  raise NotImplementedError(msg)
138
157
 
158
+ @classmethod
159
+ def get_config_setup_format(cls, *, llm_format: bool) -> str:
160
+ """Gets the JSON schema of the config setup format model.
161
+
162
+ Raises:
163
+ OptionalFeatureNotImplementedError: If the `config_setup_format` is not defined.
164
+
165
+ Returns:
166
+ The JSON schema of the config setup format as a string.
167
+ """
168
+ if cls.config_setup_format is not None:
169
+ if llm_format:
170
+ return json.dumps(llm_ready_schema(cls.config_setup_format), indent=2)
171
+ return json.dumps(cls.config_setup_format.model_json_schema(), indent=2)
172
+ msg = "'%s' class does not define an 'config_setup_format'."
173
+ raise OptionalFeatureNotImplementedError(msg)
174
+
139
175
  @classmethod
140
176
  def get_setup_format(cls, *, llm_format: bool) -> str:
141
177
  """Gets the JSON schema of the setup format model.
@@ -153,6 +189,18 @@ class BaseModule(ABC, Generic[InputModelT, OutputModelT, SetupModelT, SecretMode
153
189
  msg = "'%s' class does not define an 'setup_format'."
154
190
  raise NotImplementedError(msg)
155
191
 
192
+ @classmethod
193
+ def create_config_setup_model(cls, config_setup_data: dict[str, Any]) -> ConfigSetupModelT:
194
+ """Create the setup model from the setup data.
195
+
196
+ Args:
197
+ config_setup_data: The setup data to create the model from.
198
+
199
+ Returns:
200
+ The setup model.
201
+ """
202
+ return cls.config_setup_format(**config_setup_data)
203
+
156
204
  @classmethod
157
205
  def create_input_model(cls, input_data: dict[str, Any]) -> InputModelT:
158
206
  """Create the input model from the input data.
@@ -201,6 +249,21 @@ class BaseModule(ABC, Generic[InputModelT, OutputModelT, SetupModelT, SecretMode
201
249
  """
202
250
  return cls.output_format(**output_data)
203
251
 
252
+ @abstractmethod
253
+ async def run_config_setup(
254
+ self,
255
+ config_setup_data: ConfigSetupModelT,
256
+ setup_data: SetupModelT,
257
+ callback: Callable,
258
+ ) -> None:
259
+ """Run config setup the module.
260
+
261
+ Raises:
262
+ OptionalFeatureNotImplementedError: If the config setup feature is not implemented.
263
+ """
264
+ msg = f"'{self}' class does not define an optional 'run_config_setup' attribute."
265
+ raise OptionalFeatureNotImplementedError(msg)
266
+
204
267
  @abstractmethod
205
268
  async def initialize(self, setup_data: SetupModelT) -> None:
206
269
  """Initialize the module."""
@@ -302,3 +365,18 @@ class BaseModule(ABC, Generic[InputModelT, OutputModelT, SetupModelT, SecretMode
302
365
  except Exception:
303
366
  self._status = ModuleStatus.FAILED
304
367
  logger.exception("Error stopping module")
368
+
369
+ async def start_config_setup(
370
+ self,
371
+ config_setup_data: ConfigSetupModelT,
372
+ setup_data: SetupModelT,
373
+ callback: Callable[[OutputModelT | ModuleErrorModel], Coroutine[Any, Any, None]],
374
+ ) -> None:
375
+ """Start the module."""
376
+ try:
377
+ logger.info("Run Config Setup lifecycle")
378
+ self._status = ModuleStatus.RUNNING
379
+ await self.run_config_setup(config_setup_data, setup_data, callback)
380
+ except Exception:
381
+ self._status = ModuleStatus.FAILED
382
+ logger.exception("Error during module lifecyle")
@@ -3,8 +3,18 @@
3
3
  from abc import ABC
4
4
 
5
5
  from digitalkin.models.module import InputModelT, OutputModelT, SecretModelT, SetupModelT
6
+ from digitalkin.models.module.module_types import ConfigSetupModelT
6
7
  from digitalkin.modules._base_module import BaseModule
7
8
 
8
9
 
9
- class ArchetypeModule(BaseModule[InputModelT, OutputModelT, SetupModelT, SecretModelT], ABC):
10
+ class ArchetypeModule(
11
+ BaseModule[
12
+ InputModelT,
13
+ OutputModelT,
14
+ SetupModelT,
15
+ SecretModelT,
16
+ ConfigSetupModelT,
17
+ ],
18
+ ABC,
19
+ ):
10
20
  """ArchetypeModule extends BaseModule to implement specific module types."""
@@ -7,12 +7,13 @@ from typing import Any, Generic
7
7
 
8
8
  from digitalkin.models import ModuleStatus
9
9
  from digitalkin.models.module import InputModelT, OutputModelT, SetupModelT
10
+ from digitalkin.models.module.module_types import ConfigSetupModelT
10
11
  from digitalkin.modules._base_module import BaseModule
11
12
  from digitalkin.services.services_config import ServicesConfig
12
13
  from digitalkin.services.services_models import ServicesMode
13
14
 
14
15
 
15
- class BaseJobManager(abc.ABC, Generic[InputModelT, SetupModelT]):
16
+ class BaseJobManager(abc.ABC, Generic[InputModelT, SetupModelT, ConfigSetupModelT]):
16
17
  """Abstract base class for managing background module jobs."""
17
18
 
18
19
  async def _start(self) -> None:
@@ -82,14 +83,14 @@ class BaseJobManager(abc.ABC, Generic[InputModelT, SetupModelT]):
82
83
  """
83
84
 
84
85
  @abc.abstractmethod
85
- async def create_job(
86
+ async def create_module_instance_job(
86
87
  self,
87
88
  input_data: InputModelT,
88
89
  setup_data: SetupModelT,
89
90
  mission_id: str,
90
91
  setup_version_id: str,
91
92
  ) -> str:
92
- """Create and start a new job for the module.
93
+ """Create and start a new job for the module's instance.
93
94
 
94
95
  Args:
95
96
  input_data: The input data required to start the job.
@@ -101,6 +102,47 @@ class BaseJobManager(abc.ABC, Generic[InputModelT, SetupModelT]):
101
102
  str: The unique identifier (job ID) of the created job.
102
103
  """
103
104
 
105
+ @abc.abstractmethod
106
+ async def generate_config_setup_module_response(self, job_id: str) -> SetupModelT:
107
+ """Generate a stream consumer for a module's output data.
108
+
109
+ This method creates an asynchronous generator that streams output data
110
+ from a specific module job. If the module does not exist, it generates
111
+ an error message.
112
+
113
+ Args:
114
+ job_id: The unique identifier of the job.
115
+
116
+ Returns:
117
+ SetupModelT: the SetupModelT object fully processed.
118
+ """
119
+
120
+ @abc.abstractmethod
121
+ async def create_config_setup_instance_job(
122
+ self,
123
+ config_setup_data: ConfigSetupModelT,
124
+ setup_data: SetupModelT,
125
+ mission_id: str,
126
+ setup_version_id: str,
127
+ ) -> str:
128
+ """Create and start a new module job.
129
+
130
+ This method initializes a new module job, assigns it a unique job ID,
131
+ and starts it in the background.
132
+
133
+ Args:
134
+ config_setup_data: The input data required to start the job.
135
+ setup_data: The setup configuration for the module.
136
+ mission_id: The mission ID associated with the job.
137
+ setup_version_id: The setup ID.
138
+
139
+ Returns:
140
+ str: The unique identifier (job ID) of the created job.
141
+
142
+ Raises:
143
+ Exception: If the module fails to start.
144
+ """
145
+
104
146
  @abc.abstractmethod
105
147
  async def stop_module(self, job_id: str) -> bool:
106
148
  """Stop a running module job.
@@ -10,13 +10,13 @@ import grpc
10
10
 
11
11
  from digitalkin.logger import logger
12
12
  from digitalkin.models import ModuleStatus
13
- from digitalkin.models.module import InputModelT, OutputModelT, SetupModelT
13
+ from digitalkin.models.module import ConfigSetupModelT, InputModelT, OutputModelT, SetupModelT
14
14
  from digitalkin.modules._base_module import BaseModule
15
15
  from digitalkin.modules.job_manager.base_job_manager import BaseJobManager
16
16
  from digitalkin.services.services_models import ServicesMode
17
17
 
18
18
 
19
- class SingleJobManager(BaseJobManager, Generic[InputModelT, SetupModelT]):
19
+ class SingleJobManager(BaseJobManager, Generic[InputModelT, SetupModelT, ConfigSetupModelT]):
20
20
  """Manages a single instance of a module job.
21
21
 
22
22
  This class ensures that only one instance of a module job is active at a time.
@@ -44,6 +44,73 @@ class SingleJobManager(BaseJobManager, Generic[InputModelT, SetupModelT]):
44
44
  self.modules: dict[str, BaseModule] = {}
45
45
  self.queues: dict[str, asyncio.Queue] = {}
46
46
 
47
+ async def generate_config_setup_module_response(self, job_id: str) -> SetupModelT:
48
+ """Generate a stream consumer for a module's output data.
49
+
50
+ This method creates an asynchronous generator that streams output data
51
+ from a specific module job. If the module does not exist, it generates
52
+ an error message.
53
+
54
+ Args:
55
+ job_id: The unique identifier of the job.
56
+
57
+ Returns:
58
+ SetupModelT: the SetupModelT object fully processed.
59
+ """
60
+ module = self.modules.get(job_id, None)
61
+ logger.debug("Module %s found: %s", job_id, module)
62
+
63
+ try:
64
+ return await self.queues[job_id].get()
65
+ finally:
66
+ logger.info(f"{job_id=}: {self.queues[job_id].empty()}")
67
+ del self.queues[job_id]
68
+
69
+ async def create_config_setup_instance_job(
70
+ self,
71
+ config_setup_data: ConfigSetupModelT,
72
+ setup_data: SetupModelT,
73
+ mission_id: str,
74
+ setup_version_id: str,
75
+ ) -> str:
76
+ """Create and start a new module setup configuration job.
77
+
78
+ This method initializes a new module job, assigns it a unique job ID,
79
+ and starts the config setup it in the background.
80
+
81
+ Args:
82
+ config_setup_data: The input data required to start the job.
83
+ setup_data: The setup configuration for the module.
84
+ mission_id: The mission ID associated with the job.
85
+ setup_version_id: The setup ID.
86
+
87
+ Returns:
88
+ str: The unique identifier (job ID) of the created job.
89
+
90
+ Raises:
91
+ Exception: If the module fails to start.
92
+ """
93
+ job_id = str(uuid.uuid4())
94
+ # TODO: Ensure the job_id is unique.
95
+ module = self.module_class(job_id, mission_id=mission_id, setup_version_id=setup_version_id)
96
+ self.modules[job_id] = module
97
+ self.queues[job_id] = asyncio.Queue()
98
+
99
+ try:
100
+ await module.start_config_setup(
101
+ config_setup_data,
102
+ setup_data,
103
+ await self.job_specific_callback(self.add_to_queue, job_id),
104
+ )
105
+ logger.debug("Module %s (%s) started successfully", job_id, module.name)
106
+ except Exception:
107
+ # Remove the module from the manager in case of an error.
108
+ del self.modules[job_id]
109
+ logger.exception("Failed to start module %s: %s", job_id)
110
+ raise
111
+ else:
112
+ return job_id
113
+
47
114
  async def add_to_queue(self, job_id: str, output_data: OutputModelT) -> None: # type: ignore
48
115
  """Add output data to the queue for a specific job.
49
116
 
@@ -106,7 +173,7 @@ class SingleJobManager(BaseJobManager, Generic[InputModelT, SetupModelT]):
106
173
 
107
174
  yield _stream()
108
175
 
109
- async def create_job(
176
+ async def create_module_instance_job(
110
177
  self,
111
178
  input_data: InputModelT,
112
179
  setup_data: SetupModelT,
@@ -130,7 +130,7 @@ async def send_message_to_stream(job_id: str, output_data: OutputModelT) -> None
130
130
 
131
131
 
132
132
  @TASKIQ_BROKER.task
133
- async def run_task(
133
+ async def run_start_module(
134
134
  mission_id: str,
135
135
  setup_version_id: str,
136
136
  module_class: type[BaseModule],
@@ -171,3 +171,44 @@ async def run_task(
171
171
  # TODO: should define a BaseModel for stream code / error
172
172
  done_callback=lambda _: asyncio.create_task(callback(StreamCodeModel(code="__END_OF_STREAM__"))),
173
173
  )
174
+
175
+
176
+ @TASKIQ_BROKER.task
177
+ async def run_config_module(
178
+ mission_id: str,
179
+ setup_version_id: str,
180
+ module_class: type[BaseModule],
181
+ services_mode: ServicesMode,
182
+ config_setup_data: dict,
183
+ setup_data: dict,
184
+ context: Context = TaskiqDepends(),
185
+ ) -> None:
186
+ """TaskIQ task allowing a module to compute in the background asynchronously.
187
+
188
+ Args:
189
+ mission_id: str,
190
+ setup_version_id: The setup ID associated with the module.
191
+ module_class: type[BaseModule],
192
+ services_mode: ServicesMode,
193
+ config_setup_data: dict,
194
+ setup_data: dict,
195
+ context: Allow TaskIQ context access
196
+ """
197
+ logger.warning("%s", services_mode)
198
+ services_config = ServicesConfig(
199
+ services_config_strategies=module_class.services_config_strategies,
200
+ services_config_params=module_class.services_config_params,
201
+ mode=services_mode,
202
+ )
203
+ setattr(module_class, "services_config", services_config)
204
+ logger.warning("%s | %s", services_config, module_class.services_config)
205
+
206
+ job_id = context.message.task_id
207
+ callback = await BaseJobManager.job_specific_callback(send_message_to_stream, job_id)
208
+ module = module_class(job_id, mission_id=mission_id, setup_version_id=setup_version_id)
209
+
210
+ await module.start_config_setup(
211
+ module_class.create_config_setup_model(config_setup_data),
212
+ module_class.create_setup_model(setup_data),
213
+ callback,
214
+ )
@@ -18,7 +18,7 @@ from typing import TYPE_CHECKING, Any, Generic
18
18
  from rstream import Consumer, ConsumerOffsetSpecification, MessageContext, OffsetType
19
19
 
20
20
  from digitalkin.logger import logger
21
- from digitalkin.models.module import InputModelT, SetupModelT
21
+ from digitalkin.models.module import ConfigSetupModelT, InputModelT, SetupModelT
22
22
  from digitalkin.models.module.module import ModuleStatus
23
23
  from digitalkin.modules._base_module import BaseModule
24
24
  from digitalkin.modules.job_manager.base_job_manager import BaseJobManager
@@ -29,7 +29,7 @@ if TYPE_CHECKING:
29
29
  from taskiq.task import AsyncTaskiqTask
30
30
 
31
31
 
32
- class TaskiqJobManager(BaseJobManager, Generic[InputModelT, SetupModelT]):
32
+ class TaskiqJobManager(BaseJobManager, Generic[InputModelT, SetupModelT, ConfigSetupModelT]):
33
33
  """Taskiq job manager for running modules in Taskiq tasks."""
34
34
 
35
35
  services_mode: ServicesMode
@@ -95,6 +95,92 @@ class TaskiqJobManager(BaseJobManager, Generic[InputModelT, SetupModelT]):
95
95
  with contextlib.suppress(asyncio.CancelledError):
96
96
  await self.stream_consumer_task
97
97
 
98
+ def __init__(
99
+ self,
100
+ module_class: type[BaseModule],
101
+ services_mode: ServicesMode,
102
+ ) -> None:
103
+ """Initialize the Taskiq job manager."""
104
+ super().__init__(module_class, services_mode)
105
+
106
+ logger.warning("TaskiqJobManager initialized with app: %s", TASKIQ_BROKER)
107
+ self.services_mode = services_mode
108
+ self.job_queues: dict[str, asyncio.Queue] = {}
109
+ self.max_queue_size = 1000
110
+
111
+ async def generate_config_setup_module_response(self, job_id: str) -> SetupModelT:
112
+ """Generate a stream consumer for a module's output data.
113
+
114
+ This method creates an asynchronous generator that streams output data
115
+ from a specific module job. If the module does not exist, it generates
116
+ an error message.
117
+
118
+ Args:
119
+ job_id: The unique identifier of the job.
120
+
121
+ Returns:
122
+ SetupModelT: the SetupModelT object fully processed.
123
+ """
124
+ queue: asyncio.Queue = asyncio.Queue(maxsize=self.max_queue_size)
125
+ self.job_queues[job_id] = queue
126
+
127
+ try:
128
+ item = await queue.get()
129
+ queue.task_done()
130
+ return item
131
+ finally:
132
+ logger.info(f"generate_config_setup_module_response: {job_id=}: {self.job_queues[job_id].empty()}")
133
+ self.job_queues.pop(job_id, None)
134
+
135
+ async def create_config_setup_instance_job(
136
+ self,
137
+ config_setup_data: ConfigSetupModelT,
138
+ setup_data: SetupModelT,
139
+ mission_id: str,
140
+ setup_version_id: str,
141
+ ) -> str:
142
+ """Create and start a new module setup configuration job.
143
+
144
+ This method initializes a new module job, assigns it a unique job ID,
145
+ and starts the config setup it in the background.
146
+
147
+ Args:
148
+ config_setup_data: The input data required to start the job.
149
+ setup_data: The setup configuration for the module.
150
+ mission_id: The mission ID associated with the job.
151
+ setup_version_id: The setup ID.
152
+
153
+ Returns:
154
+ str: The unique identifier (job ID) of the created job.
155
+
156
+ Raises:
157
+ TypeError: If the function is called with bad data type.
158
+ ValueError: If the module fails to start.
159
+ """
160
+ task = TASKIQ_BROKER.find_task("digitalkin.modules.job_manager.taskiq_broker:run_config_module")
161
+
162
+ if task is None:
163
+ msg = "Task not found"
164
+ raise ValueError(msg)
165
+
166
+ if config_setup_data is None:
167
+ msg = "config_setup_data must be a valid model with model_dump method"
168
+ raise TypeError(msg)
169
+
170
+ running_task: AsyncTaskiqTask[Any] = await task.kiq(
171
+ mission_id,
172
+ setup_version_id,
173
+ self.module_class,
174
+ self.services_mode,
175
+ config_setup_data.model_dump(), # type: ignore
176
+ setup_data.model_dump(),
177
+ )
178
+
179
+ job_id = running_task.task_id
180
+ result = await running_task.wait_result(timeout=10)
181
+ logger.info("Job %s with data %s", job_id, result)
182
+ return job_id
183
+
98
184
  @asynccontextmanager # type: ignore
99
185
  async def generate_stream_consumer(self, job_id: str) -> AsyncIterator[AsyncGenerator[dict[str, Any], None]]: # type: ignore
100
186
  """Generate a stream consumer for the RStream stream.
@@ -132,20 +218,7 @@ class TaskiqJobManager(BaseJobManager, Generic[InputModelT, SetupModelT]):
132
218
  finally:
133
219
  self.job_queues.pop(job_id, None)
134
220
 
135
- def __init__(
136
- self,
137
- module_class: type[BaseModule],
138
- services_mode: ServicesMode,
139
- ) -> None:
140
- """Initialize the Taskiq job manager."""
141
- super().__init__(module_class, services_mode)
142
-
143
- logger.warning("TaskiqJobManager initialized with app: %s", TASKIQ_BROKER)
144
- self.services_mode = services_mode
145
- self.job_queues: dict[str, asyncio.Queue] = {}
146
- self.max_queue_size = 1000
147
-
148
- async def create_job(
221
+ async def create_module_instance_job(
149
222
  self,
150
223
  input_data: InputModelT,
151
224
  setup_data: SetupModelT,
@@ -166,7 +239,7 @@ class TaskiqJobManager(BaseJobManager, Generic[InputModelT, SetupModelT]):
166
239
  Raises:
167
240
  ValueError: If the task is not found.
168
241
  """
169
- task = TASKIQ_BROKER.find_task("digitalkin.modules.job_manager.taskiq_broker:run_task")
242
+ task = TASKIQ_BROKER.find_task("digitalkin.modules.job_manager.taskiq_broker:run_start_module")
170
243
 
171
244
  if task is None:
172
245
  msg = "Task not found"
@@ -3,8 +3,9 @@
3
3
  from abc import ABC
4
4
 
5
5
  from digitalkin.models.module import InputModelT, OutputModelT, SecretModelT, SetupModelT
6
+ from digitalkin.models.module.module_types import ConfigSetupModelT
6
7
  from digitalkin.modules._base_module import BaseModule # type: ignore
7
8
 
8
9
 
9
- class ToolModule(BaseModule[InputModelT, OutputModelT, SetupModelT, SecretModelT], ABC):
10
+ class ToolModule(BaseModule[InputModelT, OutputModelT, SetupModelT, SecretModelT,ConfigSetupModelT,], ABC):
10
11
  """ToolModule extends BaseModule to implement specific module types."""