rasa-pro 3.13.0a1.dev5__py3-none-any.whl → 3.13.0a1.dev7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of rasa-pro might be problematic. Click here for more details.

Files changed (49) hide show
  1. rasa/builder/README.md +120 -0
  2. rasa/builder/config.py +69 -0
  3. rasa/builder/create_openai_vector_store.py +204 -45
  4. rasa/builder/exceptions.py +49 -0
  5. rasa/builder/llm_helper_prompt.jinja2 +245 -0
  6. rasa/builder/llm_service.py +327 -0
  7. rasa/builder/logging_utils.py +51 -0
  8. rasa/builder/main.py +61 -0
  9. rasa/builder/models.py +174 -0
  10. rasa/builder/project_generator.py +264 -0
  11. rasa/builder/service.py +447 -0
  12. rasa/builder/{skill_to_bot_prompt.jinja → skill_to_bot_prompt.jinja2} +10 -4
  13. rasa/builder/training_service.py +123 -0
  14. rasa/builder/validation_service.py +79 -0
  15. rasa/cli/project_templates/finance/config.yml +17 -0
  16. rasa/cli/project_templates/finance/credentials.yml +33 -0
  17. rasa/cli/project_templates/finance/data/flows/transfer_money.yml +5 -0
  18. rasa/cli/project_templates/finance/data/patterns/pattern_session_start.yml +7 -0
  19. rasa/cli/project_templates/finance/domain.yml +7 -0
  20. rasa/cli/project_templates/finance/endpoints.yml +58 -0
  21. rasa/cli/project_templates/plain/config.yml +17 -0
  22. rasa/cli/project_templates/plain/credentials.yml +33 -0
  23. rasa/cli/project_templates/plain/data/patterns/pattern_session_start.yml +7 -0
  24. rasa/cli/project_templates/plain/domain.yml +5 -0
  25. rasa/cli/project_templates/plain/endpoints.yml +58 -0
  26. rasa/cli/project_templates/telecom/config.yml +17 -0
  27. rasa/cli/project_templates/telecom/credentials.yml +33 -0
  28. rasa/cli/project_templates/telecom/data/flows/upgrade_contract.yml +5 -0
  29. rasa/cli/project_templates/telecom/data/patterns/pattern_session_start.yml +7 -0
  30. rasa/cli/project_templates/telecom/domain.yml +7 -0
  31. rasa/cli/project_templates/telecom/endpoints.yml +58 -0
  32. rasa/cli/scaffold.py +19 -3
  33. rasa/core/actions/action.py +5 -3
  34. rasa/core/channels/studio_chat.py +17 -3
  35. rasa/model_manager/model_api.py +1 -1
  36. rasa/model_manager/runner_service.py +1 -1
  37. rasa/model_manager/trainer_service.py +1 -1
  38. rasa/model_manager/utils.py +1 -29
  39. rasa/shared/core/domain.py +62 -15
  40. rasa/shared/core/flows/yaml_flows_io.py +16 -8
  41. rasa/telemetry.py +2 -1
  42. rasa/utils/io.py +27 -9
  43. rasa/version.py +1 -1
  44. {rasa_pro-3.13.0a1.dev5.dist-info → rasa_pro-3.13.0a1.dev7.dist-info}/METADATA +1 -1
  45. {rasa_pro-3.13.0a1.dev5.dist-info → rasa_pro-3.13.0a1.dev7.dist-info}/RECORD +48 -20
  46. rasa/builder/prompt_to_bot.py +0 -696
  47. {rasa_pro-3.13.0a1.dev5.dist-info → rasa_pro-3.13.0a1.dev7.dist-info}/NOTICE +0 -0
  48. {rasa_pro-3.13.0a1.dev5.dist-info → rasa_pro-3.13.0a1.dev7.dist-info}/WHEEL +0 -0
  49. {rasa_pro-3.13.0a1.dev5.dist-info → rasa_pro-3.13.0a1.dev7.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,447 @@
1
+ """Main service for the prompt-to-bot functionality."""
2
+
3
+ from typing import Optional
4
+
5
+ import structlog
6
+ from sanic import Sanic, response
7
+ from sanic.request import Request
8
+
9
+ from rasa.builder import config
10
+ from rasa.builder.exceptions import (
11
+ LLMGenerationError,
12
+ ProjectGenerationError,
13
+ TrainingError,
14
+ ValidationError,
15
+ )
16
+ from rasa.builder.llm_service import llm_service
17
+ from rasa.builder.logging_utils import get_recent_logs
18
+ from rasa.builder.models import (
19
+ ApiErrorResponse,
20
+ ApiResponse,
21
+ LLMBuilderContext,
22
+ LLMBuilderRequest,
23
+ PromptRequest,
24
+ ServerSentEvent,
25
+ TemplateRequest,
26
+ )
27
+ from rasa.builder.project_generator import ProjectGenerator
28
+ from rasa.builder.training_service import train_and_load_agent
29
+ from rasa.builder.validation_service import validate_project
30
+ from rasa.cli.scaffold import ProjectTemplateName
31
+ from rasa.core.channels.studio_chat import StudioChatInput
32
+ from rasa.server import configure_cors
33
+ from rasa.shared.core.trackers import DialogueStateTracker
34
+
35
+ structlogger = structlog.get_logger()
36
+
37
+
38
+ class PromptToBotService:
39
+ """Main service for prompt-to-bot functionality."""
40
+
41
+ def __init__(self, project_folder: Optional[str] = None):
42
+ """Initialize the service with a project folder for file persistence.
43
+
44
+ Args:
45
+ project_folder: Path to the folder where project files will be stored.
46
+ If None, defaults to a temporary directory.
47
+ """
48
+ if project_folder is None:
49
+ import tempfile
50
+
51
+ project_folder = tempfile.mkdtemp(prefix="rasa_builder_")
52
+
53
+ structlogger.info(
54
+ "prompt_to_bot.service_initialized", project_folder=project_folder
55
+ )
56
+
57
+ self.project_generator = ProjectGenerator(project_folder)
58
+
59
+ self.app = Sanic("PromptToBotService")
60
+ self.app.config.REQUEST_TIMEOUT = 60 # 1 minute timeout
61
+ self.app.ctx.agent = None
62
+ self.input_channel = self.setup_input_channel()
63
+ self.setup_routes()
64
+ self.setup_middleware()
65
+
66
+ configure_cors(self.app, cors_origins=config.CORS_ORIGINS)
67
+
68
+ def setup_input_channel(self) -> StudioChatInput:
69
+ """Setup the input channel for chat interactions."""
70
+ studio_chat_credentials = config.get_default_credentials().get(
71
+ StudioChatInput.name()
72
+ )
73
+ return StudioChatInput.from_credentials(credentials=studio_chat_credentials)
74
+
75
+ def setup_routes(self):
76
+ """Setup all API routes."""
77
+ # Core endpoints
78
+ self.app.add_route(
79
+ self.handle_prompt_to_bot, "/api/prompt-to-bot", methods=["POST"]
80
+ )
81
+ self.app.add_route(
82
+ self.handle_template_to_bot, "/api/template-to-bot", methods=["POST"]
83
+ )
84
+ self.app.add_route(self.get_bot_data, "/api/bot-data", methods=["GET"])
85
+ self.app.add_route(self.update_bot_data, "/api/bot-data", methods=["PUT"])
86
+ self.app.add_route(self.llm_builder, "/api/llm-builder", methods=["POST"])
87
+
88
+ # Health check
89
+ self.app.add_route(self.health, "/", methods=["GET"])
90
+
91
+ # Register input channel webhooks
92
+ from rasa.core import channels
93
+
94
+ channels.channel.register([self.input_channel], self.app, route="/webhooks/")
95
+
96
+ def setup_middleware(self):
97
+ """Setup middleware for request/response processing."""
98
+
99
+ @self.app.middleware("request")
100
+ async def log_request(request):
101
+ structlogger.info(
102
+ "request.received",
103
+ method=request.method,
104
+ path=request.path,
105
+ remote_addr=request.remote_addr or "unknown",
106
+ )
107
+
108
+ @self.app.middleware("response")
109
+ async def log_response(request, response):
110
+ structlogger.info(
111
+ "request.completed",
112
+ method=request.method,
113
+ path=request.path,
114
+ status=response.status,
115
+ )
116
+
117
+ async def health(self, request: Request):
118
+ """Health check endpoint."""
119
+ return response.json({"status": "ok", "service": "prompt-to-bot"})
120
+
121
+ async def handle_prompt_to_bot(self, request: Request):
122
+ """Handle prompt-to-bot generation requests."""
123
+ try:
124
+ # Validate request
125
+ prompt_data = PromptRequest(**request.json)
126
+
127
+ # Generate project with retries
128
+ bot_files = await self.project_generator.generate_project_with_retries(
129
+ prompt_data.prompt,
130
+ template=ProjectTemplateName.PLAIN,
131
+ )
132
+
133
+ # Train and load agent
134
+ importer = self.project_generator._create_importer()
135
+ self.app.ctx.agent = await train_and_load_agent(importer)
136
+
137
+ # Update input channel with new agent
138
+ self.input_channel.agent = self.app.ctx.agent
139
+
140
+ structlogger.info(
141
+ "prompt_to_bot.success",
142
+ client_id=prompt_data.client_id,
143
+ files_generated=list(bot_files.keys()),
144
+ )
145
+
146
+ return response.json(
147
+ ApiResponse(
148
+ status="success",
149
+ message="Bot generated successfully",
150
+ data={"bot_data": bot_files},
151
+ ).model_dump()
152
+ )
153
+
154
+ except ValidationError as e:
155
+ structlogger.error("prompt_to_bot.validation_error", error=str(e))
156
+ return response.json(
157
+ ApiErrorResponse(
158
+ error="Validation failed", details={"validation_error": str(e)}
159
+ ).model_dump(),
160
+ status=400,
161
+ )
162
+
163
+ except ProjectGenerationError as e:
164
+ structlogger.error("prompt_to_bot.generation_error", error=str(e))
165
+ return response.json(
166
+ ApiErrorResponse(
167
+ error="Project generation failed",
168
+ details={"attempts": e.attempts, "error": str(e)},
169
+ ).model_dump(),
170
+ status=500,
171
+ )
172
+
173
+ except TrainingError as e:
174
+ structlogger.error("prompt_to_bot.training_error", error=str(e))
175
+ return response.json(
176
+ ApiErrorResponse(
177
+ error="Model training failed", details={"training_error": str(e)}
178
+ ).model_dump(),
179
+ status=500,
180
+ )
181
+
182
+ except LLMGenerationError as e:
183
+ structlogger.error("prompt_to_bot.llm_error", error=str(e))
184
+ return response.json(
185
+ ApiErrorResponse(
186
+ error="LLM generation failed", details={"llm_error": str(e)}
187
+ ).model_dump(),
188
+ status=502,
189
+ )
190
+
191
+ except Exception as e:
192
+ structlogger.error("prompt_to_bot.unexpected_error", error=str(e))
193
+ return response.json(
194
+ ApiErrorResponse(error="Unexpected error occurred").model_dump(),
195
+ status=500,
196
+ )
197
+
198
+ async def handle_template_to_bot(self, request: Request):
199
+ """Handle template-to-bot generation requests."""
200
+ try:
201
+ # Validate request
202
+ template_data = TemplateRequest(**request.json)
203
+
204
+ # Generate project with retries
205
+ self.project_generator.init_from_template(
206
+ template_data.template_name,
207
+ )
208
+ bot_files = self.project_generator.get_bot_files()
209
+
210
+ # Train and load agent
211
+ importer = self.project_generator._create_importer()
212
+ self.app.ctx.agent = await train_and_load_agent(importer)
213
+
214
+ # Update input channel with new agent
215
+ self.input_channel.agent = self.app.ctx.agent
216
+
217
+ structlogger.info(
218
+ "template_to_bot.success",
219
+ client_id=template_data.client_id,
220
+ files_generated=list(bot_files.keys()),
221
+ )
222
+
223
+ return response.json(
224
+ ApiResponse(
225
+ status="success",
226
+ message="Bot generated successfully",
227
+ data={"bot_data": bot_files},
228
+ ).model_dump()
229
+ )
230
+
231
+ except ValidationError as e:
232
+ structlogger.error("template_to_bot.validation_error", error=str(e))
233
+ return response.json(
234
+ ApiErrorResponse(
235
+ error="Validation failed", details={"validation_error": str(e)}
236
+ ).model_dump(),
237
+ status=400,
238
+ )
239
+
240
+ except ProjectGenerationError as e:
241
+ structlogger.error("template_to_bot.generation_error", error=str(e))
242
+ return response.json(
243
+ ApiErrorResponse(
244
+ error="Project generation failed",
245
+ details={"attempts": e.attempts, "error": str(e)},
246
+ ).model_dump(),
247
+ status=500,
248
+ )
249
+
250
+ except TrainingError as e:
251
+ structlogger.error("template_to_bot.training_error", error=str(e))
252
+ return response.json(
253
+ ApiErrorResponse(
254
+ error="Model training failed", details={"training_error": str(e)}
255
+ ).model_dump(),
256
+ status=500,
257
+ )
258
+
259
+ except LLMGenerationError as e:
260
+ structlogger.error("template_to_bot.llm_error", error=str(e))
261
+ return response.json(
262
+ ApiErrorResponse(
263
+ error="LLM generation failed", details={"llm_error": str(e)}
264
+ ).model_dump(),
265
+ status=502,
266
+ )
267
+
268
+ except Exception as e:
269
+ structlogger.error("template_to_bot.unexpected_error", error=str(e))
270
+ return response.json(
271
+ ApiErrorResponse(error="Unexpected error occurred").model_dump(),
272
+ status=500,
273
+ )
274
+
275
+ async def get_bot_data(self, request: Request):
276
+ """Get current bot data."""
277
+ bot_files = self.project_generator.get_bot_files()
278
+ return response.json(
279
+ ApiResponse(status="success", data={"bot_data": bot_files}).model_dump()
280
+ )
281
+
282
+ async def update_bot_data(self, request: Request):
283
+ """Update bot data with server-sent events for progress tracking."""
284
+ sse_response = await request.respond(content_type="text/event-stream")
285
+
286
+ try:
287
+ # 1. Received
288
+ await self._send_sse_event(
289
+ sse_response,
290
+ ServerSentEvent(event="received", data={"status": "received"}),
291
+ )
292
+
293
+ # Update bot files
294
+ bot_data = request.json
295
+ self.project_generator.update_bot_files(bot_data)
296
+
297
+ # 2. Validating
298
+ await self._send_sse_event(
299
+ sse_response,
300
+ ServerSentEvent(event="validating", data={"status": "validating"}),
301
+ )
302
+
303
+ try:
304
+ importer = self.project_generator._create_importer()
305
+ validation_error = await validate_project(importer)
306
+
307
+ if validation_error:
308
+ raise ValidationError(validation_error)
309
+
310
+ await self._send_sse_event(
311
+ sse_response,
312
+ ServerSentEvent(
313
+ event="validation_success",
314
+ data={"status": "validation_success"},
315
+ ),
316
+ )
317
+
318
+ except ValidationError as e:
319
+ await self._send_sse_event(
320
+ sse_response,
321
+ ServerSentEvent(
322
+ event="validation_error",
323
+ data={"status": "validation_error", "error": str(e)},
324
+ ),
325
+ )
326
+ await sse_response.eof()
327
+ return
328
+
329
+ # 3. Training
330
+ await self._send_sse_event(
331
+ sse_response,
332
+ ServerSentEvent(event="training", data={"status": "training"}),
333
+ )
334
+
335
+ try:
336
+ self.app.ctx.agent = await train_and_load_agent(importer)
337
+ self.input_channel.agent = self.app.ctx.agent
338
+
339
+ await self._send_sse_event(
340
+ sse_response,
341
+ ServerSentEvent(
342
+ event="train_success", data={"status": "train_success"}
343
+ ),
344
+ )
345
+
346
+ except TrainingError as e:
347
+ await self._send_sse_event(
348
+ sse_response,
349
+ ServerSentEvent(
350
+ event="train_error",
351
+ data={"status": "train_error", "error": str(e)},
352
+ ),
353
+ )
354
+ await sse_response.eof()
355
+ return
356
+
357
+ # 4. Done
358
+ await self._send_sse_event(
359
+ sse_response,
360
+ ServerSentEvent(
361
+ event="done",
362
+ data={
363
+ "status": "done",
364
+ "bot_data": self.project_generator.get_bot_files(),
365
+ },
366
+ ),
367
+ )
368
+
369
+ except Exception as e:
370
+ await self._send_sse_event(
371
+ sse_response,
372
+ ServerSentEvent(
373
+ event="error", data={"status": "error", "error": str(e)}
374
+ ),
375
+ )
376
+ finally:
377
+ await sse_response.eof()
378
+
379
+ async def llm_builder(self, request: Request):
380
+ """Handle LLM builder requests."""
381
+ try:
382
+ # Validate request
383
+ builder_request = LLMBuilderRequest(**request.json)
384
+
385
+ # Get current conversation context
386
+ current_tracker = await self.current_tracker_from_input_channel()
387
+ bot_logs = get_recent_logs()
388
+ chat_bot_files = self.project_generator.get_bot_files()
389
+
390
+ # create LLM builder context
391
+ llm_builder_context = LLMBuilderContext(
392
+ tracker=current_tracker,
393
+ bot_logs=bot_logs,
394
+ chat_bot_files=chat_bot_files,
395
+ chat_history=builder_request.messages,
396
+ )
397
+
398
+ # Generate response
399
+ messages = await llm_service.create_helper_messages(llm_builder_context)
400
+ llm_response = await llm_service.generate_helper_response(messages)
401
+
402
+ return response.json(llm_response)
403
+
404
+ except LLMGenerationError as e:
405
+ structlogger.error("llm_builder.generation_error", error=str(e))
406
+ return response.json(
407
+ ApiErrorResponse(
408
+ error="LLM helper generation failed", details={"llm_error": str(e)}
409
+ ).model_dump(),
410
+ status=502,
411
+ )
412
+
413
+ except Exception as e:
414
+ structlogger.error("llm_builder.unexpected_error", error=str(e))
415
+ return response.json(
416
+ ApiErrorResponse(error="Unexpected error in LLM builder").model_dump(),
417
+ status=500,
418
+ )
419
+
420
+ async def current_tracker_from_input_channel(
421
+ self,
422
+ ) -> Optional[DialogueStateTracker]:
423
+ """Generate chat bot context from current conversation."""
424
+ if self.app.ctx.agent and self.input_channel.latest_tracker_session_id:
425
+ return await self.app.ctx.agent.tracker_store.retrieve(
426
+ self.input_channel.latest_tracker_session_id
427
+ )
428
+ else:
429
+ return None
430
+
431
+ @staticmethod
432
+ async def _send_sse_event(sse_response, event: ServerSentEvent):
433
+ """Send a server-sent event."""
434
+ await sse_response.send(event.format())
435
+
436
+ def run(self):
437
+ """Run the service."""
438
+ structlogger.info(
439
+ "service.starting", host=config.SERVER_HOST, port=config.SERVER_PORT
440
+ )
441
+
442
+ self.app.run(
443
+ host=config.SERVER_HOST,
444
+ port=config.SERVER_PORT,
445
+ legacy=True,
446
+ motd=False,
447
+ )
@@ -127,11 +127,11 @@ CALM BOT JSON:
127
127
  - Use `steps` to outline the conversation flow
128
128
  - Implement `collect` steps for gathering information
129
129
  - `collect` steps should reference an existing slot
130
- - For a collect step referencing a slot `A`, there should be a corresponding `utter_ask_A`
130
+ - For a collect step referencing a slot `A`, there should be a corresponding `utter_ask_A`
131
131
  utterance that is used in the collect step to ask for the information to be stored in `A`
132
132
  - Use `action` steps for custom actions
133
133
  - Implement conditional logic with `if`, `then`, and `else` where appropriate
134
- - Use `next` to define the flow between steps. If the flow should end after a step, add next: END.
134
+ - Use `next` to define the flow between steps. If the flow should end after a step, add next: END.
135
135
  - The content after `then` or `else` can be: the id of another step defined in the flow, a list of steps, or an END
136
136
  - End the flow with an appropriate action or message
137
137
 
@@ -153,6 +153,12 @@ CALM BOT JSON:
153
153
  - Consider error handling and alternative conversation paths
154
154
  - Aim for a balance between simplicity and functionality
155
155
 
156
- Now, please generate Rasa CALM flow and domain JSON configuration for the following skill description:
156
+ Now, please generate Rasa CALM flow and domain JSON based on the initial bot data.
157
+ Modify the initial bot data where it makes sense based on the users description
158
+ of the skill (e.g. the initial bot greeting utter_greet).
157
159
 
158
- ${skillDescription}
160
+ INITIAL BOT DATA:
161
+ {{project_data|tojson}}
162
+
163
+ USER_SKILL_DESCRIPTION:
164
+ {{skill_description}}
@@ -0,0 +1,123 @@
1
+ """Functions for training and loading Rasa models."""
2
+
3
+ import tempfile
4
+
5
+ import structlog
6
+
7
+ from rasa.builder import config
8
+ from rasa.builder.exceptions import AgentLoadError, TrainingError
9
+ from rasa.core import agent
10
+ from rasa.core.utils import AvailableEndpoints, read_endpoints_from_path
11
+ from rasa.model_training import train
12
+ from rasa.shared.importers.importer import TrainingDataImporter
13
+ from rasa.shared.utils.yaml import dump_obj_as_yaml_to_string
14
+
15
+ structlogger = structlog.get_logger()
16
+
17
+
18
+ async def train_and_load_agent(importer: TrainingDataImporter) -> agent.Agent:
19
+ """Train a model and load an agent.
20
+
21
+ Args:
22
+ importer: Training data importer with domain, flows, and config
23
+
24
+ Returns:
25
+ Loaded and ready agent
26
+
27
+ Raises:
28
+ TrainingError: If training fails
29
+ AgentLoadError: If agent loading fails
30
+ """
31
+ try:
32
+ # Setup endpoints for training validation
33
+ await _setup_endpoints()
34
+
35
+ # Train the model
36
+ training_result = await _train_model(importer)
37
+
38
+ # Load the agent
39
+ agent_instance = await _load_agent(training_result.model)
40
+
41
+ # Verify agent is ready
42
+ if not agent_instance.is_ready():
43
+ raise AgentLoadError("Agent failed to load properly - model is not ready")
44
+
45
+ structlogger.info("training.agent_ready", model_path=training_result.model)
46
+
47
+ return agent_instance
48
+
49
+ except (TrainingError, AgentLoadError):
50
+ raise
51
+ except Exception as e:
52
+ raise TrainingError(f"Unexpected error during training: {e}")
53
+
54
+
55
+ async def _setup_endpoints():
56
+ """Setup endpoints configuration for training."""
57
+ try:
58
+ with tempfile.NamedTemporaryFile(
59
+ mode="w", suffix=".yml", delete=False
60
+ ) as temp_file:
61
+ endpoints_yaml = dump_obj_as_yaml_to_string(config.get_default_endpoints())
62
+ temp_file.write(endpoints_yaml)
63
+ temp_file.flush()
64
+
65
+ # Reset and load endpoints
66
+ AvailableEndpoints.reset_instance()
67
+ read_endpoints_from_path(temp_file.name)
68
+
69
+ structlogger.debug("training.endpoints_setup", temp_file=temp_file.name)
70
+
71
+ except Exception as e:
72
+ raise TrainingError(f"Failed to setup endpoints: {e}")
73
+
74
+
75
+ async def _train_model(importer: TrainingDataImporter):
76
+ """Train the Rasa model."""
77
+ try:
78
+ structlogger.info("training.started")
79
+
80
+ training_result = await train(
81
+ domain="",
82
+ config="",
83
+ training_files=None,
84
+ file_importer=importer,
85
+ )
86
+
87
+ if not training_result or not training_result.model:
88
+ raise TrainingError("Training completed but no model was produced")
89
+
90
+ structlogger.info("training.completed", model_path=training_result.model)
91
+
92
+ return training_result
93
+
94
+ except Exception as e:
95
+ raise TrainingError(f"Model training failed: {e}")
96
+
97
+
98
+ async def _load_agent(model_path: str) -> agent.Agent:
99
+ """Load the trained agent."""
100
+ try:
101
+ structlogger.info("training.loading_agent", model_path=model_path)
102
+
103
+ available_endpoints = AvailableEndpoints.get_instance()
104
+ if available_endpoints is None:
105
+ raise AgentLoadError("No endpoints available for agent loading")
106
+
107
+ agent_instance = await agent.load_agent(
108
+ model_path=model_path,
109
+ remote_storage=None,
110
+ endpoints=available_endpoints,
111
+ )
112
+
113
+ if agent_instance is None:
114
+ raise AgentLoadError("Agent loading returned None")
115
+
116
+ structlogger.info("training.agent_loaded", model_path=model_path)
117
+
118
+ return agent_instance
119
+
120
+ except AgentLoadError:
121
+ raise
122
+ except Exception as e:
123
+ raise AgentLoadError(f"Failed to load agent: {e}")
@@ -0,0 +1,79 @@
1
+ """Functions for validating Rasa projects."""
2
+
3
+ import sys
4
+ from contextlib import contextmanager
5
+ from typing import Optional
6
+
7
+ import structlog
8
+ from structlog.testing import capture_logs
9
+
10
+ from rasa.builder import config
11
+ from rasa.builder.exceptions import ValidationError
12
+ from rasa.cli.utils import validate_files
13
+ from rasa.shared.importers.importer import TrainingDataImporter
14
+
15
+ structlogger = structlog.get_logger()
16
+
17
+
18
+ @contextmanager
19
+ def _mock_sys_exit():
20
+ """Context manager to prevent sys.exit from being called during validation."""
21
+ was_sys_exit_called = {"value": False}
22
+
23
+ def sys_exit_mock(code: int = 0):
24
+ was_sys_exit_called["value"] = True
25
+
26
+ original_exit = sys.exit
27
+ sys.exit = sys_exit_mock
28
+
29
+ try:
30
+ yield was_sys_exit_called
31
+ finally:
32
+ sys.exit = original_exit
33
+
34
+
35
+ async def validate_project(importer: TrainingDataImporter) -> Optional[str]:
36
+ """Validate a Rasa project.
37
+
38
+ Args:
39
+ importer: Training data importer with domain, flows, and config
40
+
41
+ Returns:
42
+ None if validation passes, error message if validation fails.
43
+
44
+ Raises:
45
+ ValidationError: If validation fails
46
+ """
47
+ try:
48
+ with _mock_sys_exit() as exit_tracker:
49
+ with capture_logs() as cap_logs:
50
+ validate_files(
51
+ fail_on_warnings=config.VALIDATION_FAIL_ON_WARNINGS,
52
+ max_history=config.VALIDATION_MAX_HISTORY,
53
+ importer=importer,
54
+ )
55
+
56
+ if exit_tracker["value"]:
57
+ error_logs = [
58
+ log for log in cap_logs if log.get("log_level") != "debug"
59
+ ]
60
+ structlogger.error(
61
+ "validation.failed.sys_exit",
62
+ error_logs=error_logs,
63
+ )
64
+ raise ValidationError(
65
+ "Validation failed with sys.exit", validation_logs=error_logs
66
+ )
67
+
68
+ structlogger.info("validation.success")
69
+ return None
70
+
71
+ except ValidationError:
72
+ raise
73
+ except Exception as e:
74
+ error_msg = f"Validation failed with exception: {e}"
75
+ structlogger.error(
76
+ "validation.failed.exception",
77
+ error=str(e),
78
+ )
79
+ raise ValidationError(error_msg)
@@ -0,0 +1,17 @@
1
+ # The config recipe.
2
+ recipe: default.v1
3
+
4
+ # The assistant project unique identifier
5
+ # This default value must be replaced with a unique assistant name within your deployment
6
+ assistant_id: placeholder_default
7
+
8
+ language: en
9
+ pipeline:
10
+ - name: CompactLLMCommandGenerator
11
+ llm:
12
+ model_group: openai-gpt-4o
13
+
14
+ # Configuration for Rasa Core.
15
+ policies:
16
+ - name: FlowPolicy
17
+ - name: IntentlessPolicy