rasa-pro 3.13.1a14__py3-none-any.whl → 3.13.1a16__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of rasa-pro might be problematic. Click here for more details.

rasa/builder/service.py CHANGED
@@ -1,13 +1,13 @@
1
1
  """Main service for the prompt-to-bot functionality."""
2
2
 
3
3
  import os
4
- from typing import Optional
4
+ from typing import Any, Optional
5
5
 
6
6
  import structlog
7
- from sanic import HTTPResponse, Sanic, response
7
+ from sanic import Blueprint, HTTPResponse, response
8
8
  from sanic.request import Request
9
+ from sanic_openapi import openapi
9
10
 
10
- from rasa.builder import config
11
11
  from rasa.builder.exceptions import (
12
12
  LLMGenerationError,
13
13
  ProjectGenerationError,
@@ -18,9 +18,9 @@ from rasa.builder.llm_service import llm_service
18
18
  from rasa.builder.logging_utils import get_recent_logs
19
19
  from rasa.builder.models import (
20
20
  ApiErrorResponse,
21
- ApiResponse,
22
21
  LLMBuilderContext,
23
22
  LLMBuilderRequest,
23
+ LLMHelperResponse,
24
24
  PromptRequest,
25
25
  ServerSentEvent,
26
26
  TemplateRequest,
@@ -30,523 +30,713 @@ from rasa.builder.training_service import train_and_load_agent
30
30
  from rasa.builder.validation_service import validate_project
31
31
  from rasa.cli.scaffold import ProjectTemplateName
32
32
  from rasa.core.channels.studio_chat import StudioChatInput
33
- from rasa.server import configure_cors
34
33
  from rasa.shared.core.trackers import DialogueStateTracker
34
+ from rasa.studio.upload import CALMUserData, extract_calm_import_parts_from_importer
35
+ from rasa.utils.openapi import model_to_schema
35
36
 
36
37
  structlogger = structlog.get_logger()
37
38
 
38
-
39
- class BotBuilderService:
40
- """Main service for bot building functionality."""
41
-
42
- def __init__(self, project_folder: Optional[str] = None):
43
- """Initialize the service with a project folder for file persistence.
44
-
45
- Args:
46
- project_folder: Path to the folder where project files will be stored.
47
- If None, defaults to a temporary directory.
48
- """
49
- if project_folder is None:
50
- import tempfile
51
-
52
- project_folder = tempfile.mkdtemp(prefix="rasa_builder_")
53
-
54
- # working directory needs to be the project folder, e.g.
55
- # for relative paths (./docs) in a projects config to work
56
- os.chdir(project_folder)
57
-
58
- structlogger.info(
59
- "bot_builder_service.service_initialized", project_folder=project_folder
39
+ # Create the blueprint
40
+ bp = Blueprint("bot_builder", url_prefix="/api")
41
+
42
+
43
+ def setup_project_generator(project_folder: Optional[str] = None) -> ProjectGenerator:
44
+ """Initialize and return a ProjectGenerator instance."""
45
+ if project_folder is None:
46
+ import tempfile
47
+
48
+ project_folder = tempfile.mkdtemp(prefix="rasa_builder_")
49
+
50
+ # working directory needs to be the project folder, e.g.
51
+ # for relative paths (./docs) in a projects config to work
52
+ os.chdir(project_folder)
53
+
54
+ structlogger.info(
55
+ "bot_builder_service.service_initialized", project_folder=project_folder
56
+ )
57
+
58
+ return ProjectGenerator(project_folder)
59
+
60
+
61
+ def get_project_generator(request: Request) -> ProjectGenerator:
62
+ """Get the project generator from app context."""
63
+ return request.app.ctx.project_generator
64
+
65
+
66
+ def get_input_channel(request: Request) -> StudioChatInput:
67
+ """Get the input channel from app context."""
68
+ return request.app.ctx.input_channel
69
+
70
+
71
+ def extract_calm_import_parts_from_project_generator(
72
+ project_generator: ProjectGenerator,
73
+ ) -> CALMUserData:
74
+ """Extract CALMUserData from a ProjectGenerator.
75
+
76
+ Args:
77
+ project_generator: The project generator to extract data from
78
+
79
+ Returns:
80
+ CALMUserData containing flows, domain, config, endpoints, and nlu data
81
+ """
82
+ # Get the training data importer
83
+ importer = project_generator._create_importer()
84
+
85
+ # Extract endpoints (if exists)
86
+ endpoints_path = project_generator.project_folder / "endpoints.yml"
87
+ if endpoints_path.exists():
88
+ from rasa.shared.utils.yaml import read_yaml_file
89
+
90
+ endpoints = read_yaml_file(endpoints_path, expand_env_vars=False)
91
+ else:
92
+ endpoints = {}
93
+
94
+ # Use the shared function with the importer and project data paths
95
+ return extract_calm_import_parts_from_importer(
96
+ importer=importer,
97
+ config=None, # Let the shared function get config from importer
98
+ endpoints=endpoints,
99
+ )
100
+
101
+
102
+ # Health check endpoint
103
+ @bp.route("/", methods=["GET"])
104
+ @openapi.summary("Health check endpoint")
105
+ @openapi.description("Returns the health status of the Bot Builder service")
106
+ @openapi.tag("health")
107
+ @openapi.response(200, {"application/json": {"status": str, "service": str}})
108
+ async def health(request: Request) -> HTTPResponse:
109
+ """Health check endpoint."""
110
+ return response.json({"status": "ok", "service": "bot-builder"})
111
+
112
+
113
+ @bp.route("/prompt-to-bot", methods=["POST"])
114
+ @openapi.summary("Generate bot from natural language prompt")
115
+ @openapi.description(
116
+ "Creates a complete conversational AI bot from a natural language prompt "
117
+ "using LLM generation. Returns server-sent events (SSE) for real-time "
118
+ "progress tracking through the entire bot creation process.\n\n"
119
+ "**SSE Event Flow:**\n"
120
+ "1. `received` - Request received by server\n"
121
+ "2. `generating` - Generating bot project files\n"
122
+ "3. `generation_success` - Bot generation completed successfully\n"
123
+ "4. `training` - Training the bot model\n"
124
+ "5. `train_success` - Model training completed\n"
125
+ "6. `done` - Bot creation completed\n\n"
126
+ "**Error Events (can occur at any time):**\n"
127
+ "- `generation_error` - Failed to generate bot from prompt\n"
128
+ "- `train_error` - Bot generated but training failed\n"
129
+ "- `validation_error` - Generated bot configuration is invalid\n"
130
+ "- `error` - Unexpected error occurred\n\n"
131
+ "**Usage:** Send POST request with Content-Type: application/json and "
132
+ "Accept: text/event-stream"
133
+ )
134
+ @openapi.tag("bot-generation")
135
+ @openapi.body(
136
+ {"application/json": model_to_schema(PromptRequest)},
137
+ description="Prompt request with natural language description and client ID "
138
+ "for tracking",
139
+ required=True,
140
+ )
141
+ @openapi.response(
142
+ 200,
143
+ {"text/event-stream": str},
144
+ description="Server-sent events stream with real-time progress updates",
145
+ )
146
+ @openapi.response(
147
+ 400,
148
+ {"application/json": model_to_schema(ApiErrorResponse)},
149
+ description="Validation error in request payload",
150
+ )
151
+ @openapi.response(
152
+ 500,
153
+ {"application/json": model_to_schema(ApiErrorResponse)},
154
+ description="Internal server error",
155
+ )
156
+ async def handle_prompt_to_bot(request: Request) -> None:
157
+ """Handle prompt-to-bot generation requests."""
158
+ sse_response = await request.respond(content_type="text/event-stream")
159
+ project_generator = get_project_generator(request)
160
+ input_channel = get_input_channel(request)
161
+
162
+ try:
163
+ # 1. Received
164
+ await _send_sse_event(
165
+ sse_response,
166
+ ServerSentEvent(event="received", data={"status": "received"}),
60
167
  )
61
168
 
62
- self.project_generator = ProjectGenerator(project_folder)
63
- self.app = Sanic("BotBuilderService")
64
- self.app.config.REQUEST_TIMEOUT = 60 # 1 minute timeout
65
- self.app.ctx.agent = None
66
- self.input_channel = self.setup_input_channel()
67
- self.setup_routes()
68
- self.setup_middleware()
69
-
70
- configure_cors(self.app, cors_origins=config.CORS_ORIGINS)
71
-
72
- def setup_input_channel(self) -> StudioChatInput:
73
- """Setup the input channel for chat interactions."""
74
- studio_chat_credentials = config.get_default_credentials().get(
75
- StudioChatInput.name()
76
- )
77
- return StudioChatInput.from_credentials(credentials=studio_chat_credentials)
169
+ # Validate request
170
+ prompt_data = PromptRequest(**request.json)
78
171
 
79
- def setup_routes(self) -> None:
80
- """Setup all API routes."""
81
- # Core endpoints
82
- self.app.add_route(
83
- self.handle_prompt_to_bot, "/api/prompt-to-bot", methods=["POST"]
172
+ # 2. Generating
173
+ await _send_sse_event(
174
+ sse_response,
175
+ ServerSentEvent(event="generating", data={"status": "generating"}),
84
176
  )
85
- self.app.add_route(
86
- self.handle_template_to_bot, "/api/template-to-bot", methods=["POST"]
87
- )
88
- self.app.add_route(self.get_bot_data, "/api/bot-data", methods=["GET"])
89
- self.app.add_route(self.update_bot_data, "/api/bot-data", methods=["PUT"])
90
- self.app.add_route(self.llm_builder, "/api/llm-builder", methods=["POST"])
91
-
92
- # Health check
93
- self.app.add_route(self.health, "/", methods=["GET"])
94
-
95
- # Register input channel webhooks
96
- from rasa.core import channels
97
-
98
- channels.channel.register([self.input_channel], self.app, route="/webhooks/")
99
-
100
- def setup_middleware(self) -> None:
101
- """Setup middleware for request/response processing."""
102
-
103
- @self.app.middleware("request") # type: ignore[no-untyped-call]
104
- async def log_request(request: Request) -> None:
105
- structlogger.info(
106
- "request.received",
107
- method=request.method,
108
- path=request.path,
109
- remote_addr=request.remote_addr or "unknown",
110
- )
111
-
112
- @self.app.middleware("response") # type: ignore[no-untyped-call]
113
- async def log_response(request: Request, response: HTTPResponse) -> None:
114
- structlogger.info(
115
- "request.completed",
116
- method=request.method,
117
- path=request.path,
118
- status=response.status,
119
- )
120
-
121
- async def health(self, request: Request) -> HTTPResponse:
122
- """Health check endpoint."""
123
- return response.json({"status": "ok", "service": "bot-builder"})
124
-
125
- async def handle_prompt_to_bot(self, request: Request) -> None:
126
- """Handle prompt-to-bot generation requests."""
127
- sse_response = await request.respond(content_type="text/event-stream")
128
177
 
129
178
  try:
130
- # 1. Received
131
- await self._send_sse_event(
132
- sse_response,
133
- ServerSentEvent(event="received", data={"status": "received"}),
134
- )
135
-
136
- # Validate request
137
- prompt_data = PromptRequest(**request.json)
138
-
139
- # 2. Generating
140
- await self._send_sse_event(
141
- sse_response,
142
- ServerSentEvent(event="generating", data={"status": "generating"}),
179
+ # Generate project with retries
180
+ bot_files = await project_generator.generate_project_with_retries(
181
+ prompt_data.prompt,
182
+ template=ProjectTemplateName.PLAIN,
143
183
  )
144
184
 
145
- try:
146
- # Generate project with retries
147
- bot_files = await self.project_generator.generate_project_with_retries(
148
- prompt_data.prompt,
149
- template=ProjectTemplateName.PLAIN,
150
- )
151
-
152
- await self._send_sse_event(
153
- sse_response,
154
- ServerSentEvent(
155
- event="generation_success",
156
- data={"status": "generation_success"},
157
- ),
158
- )
159
-
160
- except (ProjectGenerationError, LLMGenerationError) as e:
161
- await self._send_sse_event(
162
- sse_response,
163
- ServerSentEvent(
164
- event="generation_error",
165
- data={"status": "generation_error", "error": str(e)},
166
- ),
167
- )
168
- await sse_response.eof()
169
- return
170
-
171
- # 3. Training
172
- await self._send_sse_event(
185
+ await _send_sse_event(
173
186
  sse_response,
174
- ServerSentEvent(event="training", data={"status": "training"}),
187
+ ServerSentEvent(
188
+ event="generation_success",
189
+ data={"status": "generation_success"},
190
+ ),
175
191
  )
176
192
 
177
- try:
178
- # Train and load agent
179
- importer = self.project_generator._create_importer()
180
- self.app.ctx.agent = await train_and_load_agent(importer)
181
-
182
- # Update input channel with new agent
183
- self.input_channel.agent = self.app.ctx.agent
184
-
185
- await self._send_sse_event(
186
- sse_response,
187
- ServerSentEvent(
188
- event="train_success", data={"status": "train_success"}
189
- ),
190
- )
191
-
192
- except TrainingError as e:
193
- await self._send_sse_event(
194
- sse_response,
195
- ServerSentEvent(
196
- event="train_error",
197
- data={"status": "train_error", "error": str(e)},
198
- ),
199
- )
200
- await sse_response.eof()
201
- return
202
-
203
- # 4. Done
204
- await self._send_sse_event(
193
+ except (ProjectGenerationError, LLMGenerationError) as e:
194
+ await _send_sse_event(
205
195
  sse_response,
206
196
  ServerSentEvent(
207
- event="done",
208
- data={
209
- "status": "done",
210
- },
197
+ event="generation_error",
198
+ data={"status": "generation_error", "error": str(e)},
211
199
  ),
212
200
  )
201
+ await sse_response.eof()
202
+ return
213
203
 
214
- structlogger.info(
215
- "bot_builder_service.prompt_to_bot.success",
216
- client_id=prompt_data.client_id,
217
- files_generated=list(bot_files.keys()),
218
- )
204
+ # 3. Training
205
+ await _send_sse_event(
206
+ sse_response,
207
+ ServerSentEvent(event="training", data={"status": "training"}),
208
+ )
219
209
 
220
- except ValidationError as e:
221
- structlogger.error(
222
- "bot_builder_service.prompt_to_bot.validation_error", error=str(e)
223
- )
224
- await self._send_sse_event(
210
+ try:
211
+ # Train and load agent
212
+ importer = project_generator._create_importer()
213
+ request.app.ctx.agent = await train_and_load_agent(importer)
214
+
215
+ # Update input channel with new agent
216
+ input_channel.agent = request.app.ctx.agent
217
+
218
+ await _send_sse_event(
225
219
  sse_response,
226
220
  ServerSentEvent(
227
- event="validation_error",
228
- data={"status": "validation_error", "error": str(e)},
221
+ event="train_success", data={"status": "train_success"}
229
222
  ),
230
223
  )
231
224
 
232
- except Exception as e:
233
- structlogger.error(
234
- "bot_builder_service.prompt_to_bot.unexpected_error", error=str(e)
235
- )
236
- await self._send_sse_event(
225
+ except TrainingError as e:
226
+ await _send_sse_event(
237
227
  sse_response,
238
228
  ServerSentEvent(
239
- event="error", data={"status": "error", "error": str(e)}
229
+ event="train_error",
230
+ data={"status": "train_error", "error": str(e)},
240
231
  ),
241
232
  )
242
- finally:
243
233
  await sse_response.eof()
234
+ return
235
+
236
+ # 4. Done
237
+ await _send_sse_event(
238
+ sse_response,
239
+ ServerSentEvent(
240
+ event="done",
241
+ data={
242
+ "status": "done",
243
+ },
244
+ ),
245
+ )
246
+
247
+ structlogger.info(
248
+ "bot_builder_service.prompt_to_bot.success",
249
+ client_id=prompt_data.client_id,
250
+ files_generated=list(bot_files.keys()),
251
+ )
244
252
 
245
- async def handle_template_to_bot(self, request: Request) -> None:
246
- """Handle template-to-bot generation requests."""
247
- sse_response = await request.respond(content_type="text/event-stream")
253
+ except ValidationError as e:
254
+ structlogger.error(
255
+ "bot_builder_service.prompt_to_bot.validation_error", error=str(e)
256
+ )
257
+ await _send_sse_event(
258
+ sse_response,
259
+ ServerSentEvent(
260
+ event="validation_error",
261
+ data={"status": "validation_error", "error": str(e)},
262
+ ),
263
+ )
248
264
 
249
- try:
250
- # 1. Received
251
- await self._send_sse_event(
252
- sse_response,
253
- ServerSentEvent(event="received", data={"status": "received"}),
254
- )
265
+ except Exception as e:
266
+ structlogger.error(
267
+ "bot_builder_service.prompt_to_bot.unexpected_error", error=str(e)
268
+ )
269
+ await _send_sse_event(
270
+ sse_response,
271
+ ServerSentEvent(event="error", data={"status": "error", "error": str(e)}),
272
+ )
273
+ finally:
274
+ await sse_response.eof()
275
+
276
+
277
+ @bp.route("/template-to-bot", methods=["POST"])
278
+ @openapi.summary("Generate bot from predefined template")
279
+ @openapi.description(
280
+ "Creates a complete conversational AI bot from a predefined template with "
281
+ "immediate setup. Returns server-sent events (SSE) for real-time progress "
282
+ "tracking through the entire bot creation process.\n\n"
283
+ "**SSE Event Flow:**\n"
284
+ "1. `received` - Request received by server\n"
285
+ "2. `generating` - Initializing bot from template\n"
286
+ "3. `generation_success` - Template initialization completed successfully\n"
287
+ "4. `training` - Training the bot model\n"
288
+ "5. `train_success` - Model training completed\n"
289
+ "6. `done` - Bot creation completed\n\n"
290
+ "**Error Events (can occur at any time):**\n"
291
+ "- `generation_error` - Failed to initialize bot from template\n"
292
+ "- `train_error` - Template loaded but training failed\n"
293
+ "- `validation_error` - Template configuration is invalid\n"
294
+ "- `error` - Unexpected error occurred\n\n"
295
+ "**Usage:** Send POST request with Content-Type: application/json and "
296
+ "Accept: text/event-stream\n"
297
+ "**Templates Available:** Check available templates through the API or "
298
+ "documentation"
299
+ )
300
+ @openapi.tag("bot-generation")
301
+ @openapi.body(
302
+ {"application/json": model_to_schema(TemplateRequest)},
303
+ description="Template request with template name and client ID for " "tracking",
304
+ required=True,
305
+ )
306
+ @openapi.response(
307
+ 200,
308
+ {"text/event-stream": model_to_schema(ServerSentEvent)},
309
+ description="Server-sent events stream with real-time progress updates",
310
+ example=ServerSentEvent(
311
+ event="generation_success",
312
+ data={"status": "generation_success"},
313
+ ).model_dump(),
314
+ )
315
+ @openapi.response(
316
+ 400,
317
+ {"application/json": model_to_schema(ApiErrorResponse)},
318
+ description="Validation error in request payload or invalid template name",
319
+ )
320
+ @openapi.response(
321
+ 500,
322
+ {"application/json": model_to_schema(ApiErrorResponse)},
323
+ description="Internal server error",
324
+ )
325
+ async def handle_template_to_bot(request: Request) -> None:
326
+ """Handle template-to-bot generation requests."""
327
+ sse_response = await request.respond(content_type="text/event-stream")
328
+ project_generator = get_project_generator(request)
329
+ input_channel = get_input_channel(request)
330
+
331
+ try:
332
+ # 1. Received
333
+ await _send_sse_event(
334
+ sse_response,
335
+ ServerSentEvent(event="received", data={"status": "received"}),
336
+ )
255
337
 
256
- # Validate request
257
- template_data = TemplateRequest(**request.json)
338
+ # Validate request
339
+ template_data = TemplateRequest(**request.json)
258
340
 
259
- # 2. Generating
260
- await self._send_sse_event(
261
- sse_response,
262
- ServerSentEvent(event="generating", data={"status": "generating"}),
341
+ # 2. Generating
342
+ await _send_sse_event(
343
+ sse_response,
344
+ ServerSentEvent(event="generating", data={"status": "generating"}),
345
+ )
346
+
347
+ try:
348
+ # Generate project with retries
349
+ project_generator.init_from_template(
350
+ template_data.template_name,
263
351
  )
352
+ bot_files = project_generator.get_bot_files()
264
353
 
265
- try:
266
- # Generate project with retries
267
- self.project_generator.init_from_template(
268
- template_data.template_name,
269
- )
270
- bot_files = self.project_generator.get_bot_files()
271
-
272
- await self._send_sse_event(
273
- sse_response,
274
- ServerSentEvent(
275
- event="generation_success",
276
- data={"status": "generation_success"},
277
- ),
278
- )
279
-
280
- except ProjectGenerationError as e:
281
- await self._send_sse_event(
282
- sse_response,
283
- ServerSentEvent(
284
- event="generation_error",
285
- data={"status": "generation_error", "error": str(e)},
286
- ),
287
- )
288
- await sse_response.eof()
289
- return
290
-
291
- # 3. Training
292
- await self._send_sse_event(
354
+ await _send_sse_event(
293
355
  sse_response,
294
- ServerSentEvent(event="training", data={"status": "training"}),
356
+ ServerSentEvent(
357
+ event="generation_success",
358
+ data={"status": "generation_success"},
359
+ ),
295
360
  )
296
361
 
297
- try:
298
- # Train and load agent
299
- importer = self.project_generator._create_importer()
300
- self.app.ctx.agent = await train_and_load_agent(importer)
301
-
302
- # Update input channel with new agent
303
- self.input_channel.agent = self.app.ctx.agent
304
-
305
- await self._send_sse_event(
306
- sse_response,
307
- ServerSentEvent(
308
- event="train_success", data={"status": "train_success"}
309
- ),
310
- )
311
-
312
- except TrainingError as e:
313
- await self._send_sse_event(
314
- sse_response,
315
- ServerSentEvent(
316
- event="train_error",
317
- data={"status": "train_error", "error": str(e)},
318
- ),
319
- )
320
- await sse_response.eof()
321
- return
322
-
323
- # 4. Done
324
- await self._send_sse_event(
362
+ except ProjectGenerationError as e:
363
+ await _send_sse_event(
325
364
  sse_response,
326
365
  ServerSentEvent(
327
- event="done",
328
- data={
329
- "status": "done",
330
- },
366
+ event="generation_error",
367
+ data={"status": "generation_error", "error": str(e)},
331
368
  ),
332
369
  )
370
+ await sse_response.eof()
371
+ return
333
372
 
334
- structlogger.info(
335
- "bot_builder_service.template_to_bot.success",
336
- client_id=template_data.client_id,
337
- files_generated=list(bot_files.keys()),
338
- )
373
+ # 3. Training
374
+ await _send_sse_event(
375
+ sse_response,
376
+ ServerSentEvent(event="training", data={"status": "training"}),
377
+ )
339
378
 
340
- except ValidationError as e:
341
- structlogger.error(
342
- "bot_builder_service.template_to_bot.validation_error", error=str(e)
343
- )
344
- await self._send_sse_event(
379
+ try:
380
+ # Train and load agent
381
+ importer = project_generator._create_importer()
382
+ request.app.ctx.agent = await train_and_load_agent(importer)
383
+
384
+ # Update input channel with new agent
385
+ input_channel.agent = request.app.ctx.agent
386
+
387
+ await _send_sse_event(
345
388
  sse_response,
346
389
  ServerSentEvent(
347
- event="validation_error",
348
- data={"status": "validation_error", "error": str(e)},
390
+ event="train_success", data={"status": "train_success"}
349
391
  ),
350
392
  )
351
393
 
352
- except Exception as e:
353
- structlogger.error(
354
- "bot_builder_service.template_to_bot.unexpected_error", error=str(e)
355
- )
356
- await self._send_sse_event(
394
+ except TrainingError as e:
395
+ await _send_sse_event(
357
396
  sse_response,
358
397
  ServerSentEvent(
359
- event="error", data={"status": "error", "error": str(e)}
398
+ event="train_error",
399
+ data={"status": "train_error", "error": str(e)},
360
400
  ),
361
401
  )
362
- finally:
363
402
  await sse_response.eof()
403
+ return
404
+
405
+ # 4. Done
406
+ await _send_sse_event(
407
+ sse_response,
408
+ ServerSentEvent(
409
+ event="done",
410
+ data={
411
+ "status": "done",
412
+ },
413
+ ),
414
+ )
364
415
 
365
- async def get_bot_data(self, request: Request) -> HTTPResponse:
366
- """Get current bot data."""
367
- bot_files = self.project_generator.get_bot_files()
368
- return response.json(
369
- ApiResponse(
370
- status="success",
371
- message="Bot data fetched successfully",
372
- data={"bot_data": bot_files},
373
- ).model_dump()
416
+ structlogger.info(
417
+ "bot_builder_service.template_to_bot.success",
418
+ client_id=template_data.client_id,
419
+ files_generated=list(bot_files.keys()),
420
+ )
421
+
422
+ except ValidationError as e:
423
+ structlogger.error(
424
+ "bot_builder_service.template_to_bot.validation_error", error=str(e)
425
+ )
426
+ await _send_sse_event(
427
+ sse_response,
428
+ ServerSentEvent(
429
+ event="validation_error",
430
+ data={"status": "validation_error", "error": str(e)},
431
+ ),
432
+ )
433
+
434
+ except Exception as e:
435
+ structlogger.error(
436
+ "bot_builder_service.template_to_bot.unexpected_error", error=str(e)
437
+ )
438
+ await _send_sse_event(
439
+ sse_response,
440
+ ServerSentEvent(event="error", data={"status": "error", "error": str(e)}),
441
+ )
442
+ finally:
443
+ await sse_response.eof()
444
+
445
+
446
+ @bp.route("/files", methods=["GET"])
447
+ @openapi.summary("Get bot files")
448
+ @openapi.description("Retrieves the current bot configuration files and data")
449
+ @openapi.tag("bot-files")
450
+ @openapi.response(
451
+ 200,
452
+ {"application/json": {str: Optional[str]}},
453
+ description="Bot files retrieved successfully",
454
+ )
455
+ @openapi.response(
456
+ 500,
457
+ {"application/json": model_to_schema(ApiErrorResponse)},
458
+ description="Internal server error",
459
+ )
460
+ async def get_bot_files(request: Request) -> HTTPResponse:
461
+ """Get current bot files."""
462
+ project_generator = get_project_generator(request)
463
+ bot_files = project_generator.get_bot_files()
464
+ return response.json(bot_files)
465
+
466
+
467
+ @bp.route("/files", methods=["PUT"])
468
+ @openapi.summary("Update bot files")
469
+ @openapi.description(
470
+ "Updates the bot configuration files and retrains the model. "
471
+ "Returns server-sent events (SSE) for real-time progress tracking "
472
+ "through the entire update process.\n\n"
473
+ "**SSE Event Flow:**\n"
474
+ "1. `received` - Request received by server\n"
475
+ "2. `validating` - Validating bot configuration files\n"
476
+ "3. `validation_success` - File validation completed successfully\n"
477
+ "4. `training` - Training the bot model with updated files\n"
478
+ "5. `train_success` - Model training completed\n"
479
+ "6. `done` - Bot files update completed\n\n"
480
+ "**Error Events (can occur at any time):**\n"
481
+ "- `validation_error` - Bot configuration files are invalid\n"
482
+ "- `train_error` - Files updated but training failed\n"
483
+ "- `error` - Unexpected error occurred\n\n"
484
+ "**Usage:** Send PUT request with Content-Type: application/json and "
485
+ "Accept: text/event-stream"
486
+ )
487
+ @openapi.tag("bot-files")
488
+ @openapi.body(
489
+ {"application/json": {"file_name": str}},
490
+ description="A dictionary mapping file names to their updated content. "
491
+ "The file name should be the name of the file in the project folder. "
492
+ "Files that are not in the request will not be updated.",
493
+ required=True,
494
+ )
495
+ @openapi.response(
496
+ 200,
497
+ {"text/event-stream": str},
498
+ description="Server-sent events stream with update progress",
499
+ )
500
+ @openapi.response(
501
+ 400,
502
+ {"application/json": model_to_schema(ApiErrorResponse)},
503
+ description="Validation error in bot files",
504
+ )
505
+ @openapi.response(
506
+ 500,
507
+ {"application/json": model_to_schema(ApiErrorResponse)},
508
+ description="Internal server error",
509
+ )
510
+ async def update_bot_files(request: Request) -> None:
511
+ """Update bot files with server-sent events for progress tracking."""
512
+ sse_response = await request.respond(content_type="text/event-stream")
513
+ project_generator = get_project_generator(request)
514
+ input_channel = get_input_channel(request)
515
+
516
+ try:
517
+ # 1. Received
518
+ await _send_sse_event(
519
+ sse_response,
520
+ ServerSentEvent(event="received", data={"status": "received"}),
374
521
  )
375
522
 
376
- async def update_bot_data(self, request: Request) -> None:
377
- """Update bot data with server-sent events for progress tracking."""
378
- sse_response = await request.respond(content_type="text/event-stream")
523
+ # Update bot files
524
+ bot_files = request.json
525
+ project_generator.update_bot_files(bot_files)
526
+
527
+ # 2. Validating
528
+ await _send_sse_event(
529
+ sse_response,
530
+ ServerSentEvent(event="validating", data={"status": "validating"}),
531
+ )
379
532
 
380
533
  try:
381
- # 1. Received
382
- await self._send_sse_event(
383
- sse_response,
384
- ServerSentEvent(event="received", data={"status": "received"}),
385
- )
534
+ importer = project_generator._create_importer()
535
+ validation_error = await validate_project(importer)
386
536
 
387
- # Update bot files
388
- bot_data = request.json
389
- self.project_generator.update_bot_files(bot_data)
537
+ if validation_error:
538
+ raise ValidationError(validation_error)
390
539
 
391
- # 2. Validating
392
- await self._send_sse_event(
540
+ await _send_sse_event(
393
541
  sse_response,
394
- ServerSentEvent(event="validating", data={"status": "validating"}),
542
+ ServerSentEvent(
543
+ event="validation_success",
544
+ data={"status": "validation_success"},
545
+ ),
395
546
  )
396
547
 
397
- try:
398
- importer = self.project_generator._create_importer()
399
- validation_error = await validate_project(importer)
400
-
401
- if validation_error:
402
- raise ValidationError(validation_error)
403
-
404
- await self._send_sse_event(
405
- sse_response,
406
- ServerSentEvent(
407
- event="validation_success",
408
- data={"status": "validation_success"},
409
- ),
410
- )
411
-
412
- except ValidationError as e:
413
- await self._send_sse_event(
414
- sse_response,
415
- ServerSentEvent(
416
- event="validation_error",
417
- data={"status": "validation_error", "error": str(e)},
418
- ),
419
- )
420
- await sse_response.eof()
421
- return
422
-
423
- # 3. Training
424
- await self._send_sse_event(
548
+ except ValidationError as e:
549
+ await _send_sse_event(
425
550
  sse_response,
426
- ServerSentEvent(event="training", data={"status": "training"}),
551
+ ServerSentEvent(
552
+ event="validation_error",
553
+ data={"status": "validation_error", "error": str(e)},
554
+ ),
427
555
  )
556
+ await sse_response.eof()
557
+ return
558
+
559
+ # 3. Training
560
+ await _send_sse_event(
561
+ sse_response,
562
+ ServerSentEvent(event="training", data={"status": "training"}),
563
+ )
428
564
 
429
- try:
430
- self.app.ctx.agent = await train_and_load_agent(importer)
431
- self.input_channel.agent = self.app.ctx.agent
432
-
433
- await self._send_sse_event(
434
- sse_response,
435
- ServerSentEvent(
436
- event="train_success", data={"status": "train_success"}
437
- ),
438
- )
439
-
440
- except TrainingError as e:
441
- await self._send_sse_event(
442
- sse_response,
443
- ServerSentEvent(
444
- event="train_error",
445
- data={"status": "train_error", "error": str(e)},
446
- ),
447
- )
448
- await sse_response.eof()
449
- return
450
-
451
- # 4. Done
452
- await self._send_sse_event(
565
+ try:
566
+ request.app.ctx.agent = await train_and_load_agent(importer)
567
+ input_channel.agent = request.app.ctx.agent
568
+
569
+ await _send_sse_event(
453
570
  sse_response,
454
571
  ServerSentEvent(
455
- event="done",
456
- data={
457
- "status": "done",
458
- "bot_data": self.project_generator.get_bot_files(),
459
- },
572
+ event="train_success", data={"status": "train_success"}
460
573
  ),
461
574
  )
462
575
 
463
- except Exception as e:
464
- await self._send_sse_event(
576
+ except TrainingError as e:
577
+ await _send_sse_event(
465
578
  sse_response,
466
579
  ServerSentEvent(
467
- event="error", data={"status": "error", "error": str(e)}
580
+ event="train_error",
581
+ data={"status": "train_error", "error": str(e)},
468
582
  ),
469
583
  )
470
- finally:
471
584
  await sse_response.eof()
585
+ return
586
+
587
+ # 4. Done
588
+ await _send_sse_event(
589
+ sse_response,
590
+ ServerSentEvent(
591
+ event="done",
592
+ data={
593
+ "status": "done",
594
+ },
595
+ ),
596
+ )
472
597
 
473
- async def llm_builder(self, request: Request) -> HTTPResponse:
474
- """Handle LLM builder requests."""
475
- try:
476
- # Validate request
477
- builder_request = LLMBuilderRequest(**request.json)
478
-
479
- # Get current conversation context
480
- current_tracker = await self.current_tracker_from_input_channel()
481
- bot_logs = get_recent_logs()
482
- chat_bot_files = self.project_generator.get_bot_files()
483
-
484
- # create LLM builder context
485
- llm_builder_context = LLMBuilderContext(
486
- tracker=current_tracker,
487
- bot_logs=bot_logs,
488
- chat_bot_files=chat_bot_files,
489
- chat_history=builder_request.messages,
490
- )
598
+ except Exception as e:
599
+ await _send_sse_event(
600
+ sse_response,
601
+ ServerSentEvent(event="error", data={"status": "error", "error": str(e)}),
602
+ )
603
+ finally:
604
+ await sse_response.eof()
491
605
 
492
- # Generate response
493
- messages = await llm_service.create_helper_messages(llm_builder_context)
494
- llm_response = await llm_service.generate_helper_response(messages)
495
606
 
496
- return response.json(llm_response)
607
+ @bp.route("/data", methods=["GET"])
608
+ @openapi.summary("Get bot data")
609
+ @openapi.description(
610
+ "Retrieves the current bot data in CALM import format with flows, domain, "
611
+ "config, endpoints, and NLU data"
612
+ )
613
+ @openapi.tag("bot-data")
614
+ @openapi.response(
615
+ 200,
616
+ {"application/json": model_to_schema(CALMUserData)},
617
+ description="Bot data retrieved successfully",
618
+ )
619
+ @openapi.response(
620
+ 500,
621
+ {"application/json": model_to_schema(ApiErrorResponse)},
622
+ description="Internal server error",
623
+ )
624
+ async def get_bot_data(request: Request) -> HTTPResponse:
625
+ """Get current bot data in CALM import format."""
626
+ try:
627
+ project_generator = get_project_generator(request)
628
+ calm_parts = extract_calm_import_parts_from_project_generator(project_generator)
629
+
630
+ return response.json(calm_parts.model_dump())
631
+ except Exception as e:
632
+ structlogger.error("bot_builder_service.get_bot_data.error", error=str(e))
633
+ return response.json(
634
+ ApiErrorResponse(
635
+ error="Failed to retrieve bot data",
636
+ details={"error": str(e)},
637
+ ).model_dump(),
638
+ status=500,
639
+ )
497
640
 
498
- except LLMGenerationError as e:
499
- structlogger.error(
500
- "bot_builder_service.llm_builder.generation_error", error=str(e)
501
- )
502
- return response.json(
503
- ApiErrorResponse(
504
- error="LLM helper generation failed", details={"llm_error": str(e)}
505
- ).model_dump(),
506
- status=502,
507
- )
508
641
 
509
- except Exception as e:
510
- structlogger.error(
511
- "bot_builder_service.llm_builder.unexpected_error", error=str(e)
512
- )
513
- return response.json(
514
- ApiErrorResponse(
515
- error="Unexpected error in LLM builder",
516
- details=None,
517
- ).model_dump(),
518
- status=500,
519
- )
642
+ @bp.route("/llm-builder", methods=["POST"])
643
+ @openapi.summary("LLM assistant for bot building")
644
+ @openapi.description(
645
+ "Provides LLM-powered assistance for bot building tasks, including "
646
+ "debugging, suggestions, and explanations"
647
+ )
648
+ @openapi.tag("llm-assistant")
649
+ @openapi.body(
650
+ {"application/json": model_to_schema(LLMBuilderRequest)},
651
+ description="LLM builder request containing chat messages and context",
652
+ required=True,
653
+ )
654
+ @openapi.response(
655
+ 200,
656
+ {"application/json": model_to_schema(LLMHelperResponse)},
657
+ description="LLM response with assistance and suggestions",
658
+ )
659
+ @openapi.response(
660
+ 400,
661
+ {"application/json": model_to_schema(ApiErrorResponse)},
662
+ description="Validation error in request",
663
+ )
664
+ @openapi.response(
665
+ 502,
666
+ {"application/json": model_to_schema(ApiErrorResponse)},
667
+ description="LLM generation failed",
668
+ )
669
+ @openapi.response(
670
+ 500,
671
+ {"application/json": model_to_schema(ApiErrorResponse)},
672
+ description="Internal server error",
673
+ )
674
+ async def llm_builder(request: Request) -> HTTPResponse:
675
+ """Handle LLM builder requests."""
676
+ project_generator = get_project_generator(request)
677
+ input_channel = get_input_channel(request)
678
+
679
+ try:
680
+ # Validate request
681
+ builder_request = LLMBuilderRequest(**request.json)
682
+
683
+ # Get current conversation context
684
+ current_tracker = await current_tracker_from_input_channel(
685
+ request.app, input_channel
686
+ )
687
+ bot_logs = get_recent_logs()
688
+ chat_bot_files = project_generator.get_bot_files()
689
+
690
+ # create LLM builder context
691
+ llm_builder_context = LLMBuilderContext(
692
+ tracker=current_tracker,
693
+ bot_logs=bot_logs,
694
+ chat_bot_files=chat_bot_files,
695
+ chat_history=builder_request.messages,
696
+ )
520
697
 
521
- async def current_tracker_from_input_channel(
522
- self,
523
- ) -> Optional[DialogueStateTracker]:
524
- """Generate chat bot context from current conversation."""
525
- if self.app.ctx.agent and self.input_channel.latest_tracker_session_id:
526
- return await self.app.ctx.agent.tracker_store.retrieve(
527
- self.input_channel.latest_tracker_session_id
528
- )
529
- else:
530
- return None
531
-
532
- @staticmethod
533
- async def _send_sse_event(
534
- sse_response: HTTPResponse, event: ServerSentEvent
535
- ) -> None:
536
- """Send a server-sent event."""
537
- await sse_response.send(event.format())
538
-
539
- def run(self) -> None:
540
- """Run the service."""
541
- structlogger.info(
542
- "service.starting",
543
- host=config.BUILDER_SERVER_HOST,
544
- port=config.BUILDER_SERVER_PORT,
698
+ # Generate response
699
+ messages = await llm_service.create_helper_messages(llm_builder_context)
700
+ llm_response = await llm_service.generate_helper_response(messages)
701
+
702
+ return response.json(llm_response)
703
+
704
+ except LLMGenerationError as e:
705
+ structlogger.error(
706
+ "bot_builder_service.llm_builder.generation_error", error=str(e)
707
+ )
708
+ return response.json(
709
+ ApiErrorResponse(
710
+ error="LLM helper generation failed", details={"llm_error": str(e)}
711
+ ).model_dump(),
712
+ status=502,
545
713
  )
546
714
 
547
- self.app.run(
548
- host=config.BUILDER_SERVER_HOST,
549
- port=config.BUILDER_SERVER_PORT,
550
- legacy=True,
551
- motd=False,
715
+ except Exception as e:
716
+ structlogger.error(
717
+ "bot_builder_service.llm_builder.unexpected_error", error=str(e)
552
718
  )
719
+ return response.json(
720
+ ApiErrorResponse(
721
+ error="Unexpected error in LLM builder",
722
+ details=None,
723
+ ).model_dump(),
724
+ status=500,
725
+ )
726
+
727
+
728
+ async def current_tracker_from_input_channel(
729
+ app: Any, input_channel: StudioChatInput
730
+ ) -> Optional[DialogueStateTracker]:
731
+ """Generate chat bot context from current conversation."""
732
+ if app.ctx.agent and input_channel.latest_tracker_session_id:
733
+ return await app.ctx.agent.tracker_store.retrieve(
734
+ input_channel.latest_tracker_session_id
735
+ )
736
+ else:
737
+ return None
738
+
739
+
740
+ async def _send_sse_event(sse_response: HTTPResponse, event: ServerSentEvent) -> None:
741
+ """Send a server-sent event."""
742
+ await sse_response.send(event.format())