rasa-pro 3.14.1__py3-none-any.whl → 3.15.0a3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of rasa-pro might be problematic. Click here for more details.

Files changed (69) hide show
  1. rasa/builder/config.py +4 -0
  2. rasa/builder/constants.py +5 -0
  3. rasa/builder/copilot/copilot.py +28 -9
  4. rasa/builder/copilot/models.py +251 -32
  5. rasa/builder/document_retrieval/inkeep_document_retrieval.py +2 -0
  6. rasa/builder/download.py +111 -1
  7. rasa/builder/evaluator/__init__.py +0 -0
  8. rasa/builder/evaluator/constants.py +15 -0
  9. rasa/builder/evaluator/copilot_executor.py +89 -0
  10. rasa/builder/evaluator/dataset/models.py +173 -0
  11. rasa/builder/evaluator/exceptions.py +4 -0
  12. rasa/builder/evaluator/response_classification/__init__.py +0 -0
  13. rasa/builder/evaluator/response_classification/constants.py +66 -0
  14. rasa/builder/evaluator/response_classification/evaluator.py +346 -0
  15. rasa/builder/evaluator/response_classification/langfuse_runner.py +463 -0
  16. rasa/builder/evaluator/response_classification/models.py +61 -0
  17. rasa/builder/evaluator/scripts/__init__.py +0 -0
  18. rasa/builder/evaluator/scripts/run_response_classification_evaluator.py +152 -0
  19. rasa/builder/jobs.py +208 -1
  20. rasa/builder/logging_utils.py +25 -24
  21. rasa/builder/main.py +6 -1
  22. rasa/builder/models.py +23 -0
  23. rasa/builder/project_generator.py +29 -10
  24. rasa/builder/service.py +205 -46
  25. rasa/builder/telemetry/__init__.py +0 -0
  26. rasa/builder/telemetry/copilot_langfuse_telemetry.py +384 -0
  27. rasa/builder/{copilot/telemetry.py → telemetry/copilot_segment_telemetry.py} +21 -3
  28. rasa/builder/training_service.py +13 -1
  29. rasa/builder/validation_service.py +2 -1
  30. rasa/constants.py +1 -0
  31. rasa/core/actions/action_clean_stack.py +32 -0
  32. rasa/core/actions/constants.py +4 -0
  33. rasa/core/actions/custom_action_executor.py +70 -12
  34. rasa/core/actions/grpc_custom_action_executor.py +41 -2
  35. rasa/core/actions/http_custom_action_executor.py +49 -25
  36. rasa/core/channels/voice_stream/voice_channel.py +14 -2
  37. rasa/core/policies/flows/flow_executor.py +20 -6
  38. rasa/core/run.py +15 -4
  39. rasa/dialogue_understanding/generator/llm_based_command_generator.py +6 -3
  40. rasa/dialogue_understanding/generator/single_step/compact_llm_command_generator.py +15 -7
  41. rasa/dialogue_understanding/generator/single_step/search_ready_llm_command_generator.py +15 -8
  42. rasa/dialogue_understanding/processor/command_processor.py +49 -7
  43. rasa/e2e_test/e2e_config.py +4 -3
  44. rasa/engine/recipes/default_components.py +16 -6
  45. rasa/graph_components/validators/default_recipe_validator.py +10 -4
  46. rasa/nlu/classifiers/diet_classifier.py +2 -0
  47. rasa/shared/core/slots.py +55 -24
  48. rasa/shared/providers/_configs/azure_openai_client_config.py +4 -5
  49. rasa/shared/providers/_configs/default_litellm_client_config.py +4 -4
  50. rasa/shared/providers/_configs/litellm_router_client_config.py +3 -2
  51. rasa/shared/providers/_configs/openai_client_config.py +5 -7
  52. rasa/shared/providers/_configs/rasa_llm_client_config.py +4 -4
  53. rasa/shared/providers/_configs/self_hosted_llm_client_config.py +4 -4
  54. rasa/shared/providers/llm/_base_litellm_client.py +42 -14
  55. rasa/shared/providers/llm/litellm_router_llm_client.py +38 -15
  56. rasa/shared/providers/llm/self_hosted_llm_client.py +34 -32
  57. rasa/shared/utils/common.py +9 -1
  58. rasa/shared/utils/configs.py +5 -8
  59. rasa/utils/common.py +9 -0
  60. rasa/utils/endpoints.py +8 -0
  61. rasa/utils/installation_utils.py +111 -0
  62. rasa/utils/tensorflow/callback.py +2 -0
  63. rasa/utils/train_utils.py +2 -0
  64. rasa/version.py +1 -1
  65. {rasa_pro-3.14.1.dist-info → rasa_pro-3.15.0a3.dist-info}/METADATA +15 -13
  66. {rasa_pro-3.14.1.dist-info → rasa_pro-3.15.0a3.dist-info}/RECORD +69 -53
  67. {rasa_pro-3.14.1.dist-info → rasa_pro-3.15.0a3.dist-info}/NOTICE +0 -0
  68. {rasa_pro-3.14.1.dist-info → rasa_pro-3.15.0a3.dist-info}/WHEEL +0 -0
  69. {rasa_pro-3.14.1.dist-info → rasa_pro-3.15.0a3.dist-info}/entry_points.txt +0 -0
rasa/builder/service.py CHANGED
@@ -5,6 +5,7 @@ import time
5
5
  from http import HTTPStatus
6
6
  from typing import Any, Optional
7
7
 
8
+ import langfuse
8
9
  import structlog
9
10
  from sanic import Blueprint, HTTPResponse, response
10
11
  from sanic.request import Request
@@ -41,7 +42,6 @@ from rasa.builder.copilot.signing import (
41
42
  create_signature_envelope_for_text,
42
43
  verify_signature,
43
44
  )
44
- from rasa.builder.copilot.telemetry import CopilotTelemetry
45
45
  from rasa.builder.download import create_bot_project_archive
46
46
  from rasa.builder.guardrails.constants import (
47
47
  BLOCK_SCOPE_PROJECT,
@@ -51,6 +51,7 @@ from rasa.builder.guardrails.constants import (
51
51
  from rasa.builder.guardrails.store import guardrails_store
52
52
  from rasa.builder.job_manager import job_manager
53
53
  from rasa.builder.jobs import (
54
+ run_backup_to_bot_job,
54
55
  run_prompt_to_bot_job,
55
56
  run_replace_all_files_job,
56
57
  run_template_to_bot_job,
@@ -65,15 +66,19 @@ from rasa.builder.models import (
65
66
  ApiErrorResponse,
66
67
  AssistantInfo,
67
68
  BotData,
69
+ BotFiles,
68
70
  JobCreateResponse,
69
71
  JobStatus,
70
72
  JobStatusEvent,
71
73
  PromptRequest,
74
+ RestoreFromBackupRequest,
72
75
  ServerSentEvent,
73
76
  TemplateRequest,
74
77
  )
75
78
  from rasa.builder.project_generator import ProjectGenerator
76
79
  from rasa.builder.shared.tracker_context import TrackerContext
80
+ from rasa.builder.telemetry.copilot_langfuse_telemetry import CopilotLangfuseTelemetry
81
+ from rasa.builder.telemetry.copilot_segment_telemetry import CopilotSegmentTelemetry
77
82
  from rasa.core.agent import Agent
78
83
  from rasa.core.channels.studio_chat import StudioChatInput
79
84
  from rasa.core.exceptions import AgentNotReady
@@ -463,6 +468,95 @@ async def handle_template_to_bot(request: Request) -> HTTPResponse:
463
468
  )
464
469
 
465
470
 
471
+ @bp.route("/backup-to-bot", methods=["POST"])
472
+ @openapi.summary("Generate bot from backup archive")
473
+ @openapi.description(
474
+ "Creates a complete conversational AI bot from a backup tar.gz archive. "
475
+ "Returns immediately with a job ID. Connect to `/job-events/<job_id>` to "
476
+ "receive server-sent events (SSE) for real-time progress tracking "
477
+ "throughout the bot restoration process.\n\n"
478
+ "**SSE Event Flow** (via `/job-events/<job_id>`):\n"
479
+ "1. `received` - Request received by server\n"
480
+ "2. `generating` - Extracting and restoring bot from backup\n"
481
+ "3. `generation_success` - Backup restoration completed successfully\n"
482
+ "4. `training` - Training the bot model (if no existing model found)\n"
483
+ "5. `train_success` - Model training completed (if training was needed)\n"
484
+ "6. `done` - Bot restoration completed\n\n"
485
+ "**Error Events:**\n"
486
+ "- `generation_error` - Failed to restore bot from backup\n"
487
+ "- `train_error` - Backup restored but training failed\n"
488
+ "- `validation_error` - Restored bot configuration is invalid\n"
489
+ "- `error` - Unexpected error occurred\n\n"
490
+ "**Usage:**\n"
491
+ "1. Send POST request with Content-Type: application/json\n"
492
+ "2. The response will be a JSON object `{job_id: ...}`\n"
493
+ "3. Connect to `/job-events/<job_id>` for a server-sent event stream of progress."
494
+ )
495
+ @openapi.tag("bot-generation")
496
+ @openapi.body(
497
+ {"application/json": model_to_schema(RestoreFromBackupRequest)},
498
+ description="Backup request with presigned URL to tar.gz archive.",
499
+ required=True,
500
+ example={"presigned_url": "https://s3.amazonaws.com/bucket/path?signature=..."},
501
+ )
502
+ @openapi.response(
503
+ 200,
504
+ {"application/json": model_to_schema(JobCreateResponse)},
505
+ description="Job created. Poll or subscribe to /job-events/<job_id> for progress.",
506
+ )
507
+ @openapi.response(
508
+ 400,
509
+ {"application/json": model_to_schema(ApiErrorResponse)},
510
+ description="Validation error in request payload or invalid presigned URL",
511
+ )
512
+ @openapi.response(
513
+ 500,
514
+ {"application/json": model_to_schema(ApiErrorResponse)},
515
+ description="Internal server error",
516
+ )
517
+ @openapi.parameter(
518
+ HEADER_USER_ID,
519
+ description=(
520
+ "Optional user id to associate requests (e.g., for telemetry/guardrails)."
521
+ ),
522
+ _in="header",
523
+ required=False,
524
+ schema=str,
525
+ )
526
+ async def handle_backup_to_bot(request: Request) -> HTTPResponse:
527
+ """Handle backup-to-bot restoration requests."""
528
+ try:
529
+ payload = RestoreFromBackupRequest(**request.json)
530
+ except Exception as exc:
531
+ return response.json(
532
+ ApiErrorResponse(
533
+ error="Invalid request", details={"error": str(exc)}
534
+ ).model_dump(),
535
+ status=400,
536
+ )
537
+
538
+ try:
539
+ # Allocate job and schedule background task
540
+ job = job_manager.create_job()
541
+ request.app.add_task(
542
+ run_backup_to_bot_job(request.app, job, payload.presigned_url)
543
+ )
544
+ return response.json(JobCreateResponse(job_id=job.id).model_dump(), status=200)
545
+ except Exception as exc:
546
+ capture_exception_with_context(
547
+ exc,
548
+ "bot_builder_service.backup_to_bot.unexpected_error",
549
+ tags={"endpoint": "/api/backup-to-bot"},
550
+ )
551
+ return response.json(
552
+ ApiErrorResponse(
553
+ error="Failed to create backup-to-bot job",
554
+ details={"error": str(exc)},
555
+ ).model_dump(),
556
+ status=HTTPStatus.INTERNAL_SERVER_ERROR,
557
+ )
558
+
559
+
466
560
  @bp.route("/files", methods=["GET"])
467
561
  @openapi.summary("Get bot files")
468
562
  @openapi.description(
@@ -798,16 +892,14 @@ async def get_bot_info(request: Request) -> HTTPResponse:
798
892
  @openapi.summary("Download bot project as tar.gz")
799
893
  @openapi.description(
800
894
  "Downloads the current bot project files as a compressed tar.gz archive. "
801
- "Includes all configuration files and a .env file with RASA_PRO_LICENSE. "
802
- "Requires valid JWT token in Authorization header."
895
+ "Includes all configuration files and a .env file with RASA_PRO_LICENSE."
803
896
  )
804
897
  @openapi.tag("bot-files")
805
898
  @openapi.parameter(
806
- "Authorization",
807
- description=("Bearer token for authentication. Always required for this endpoint."),
808
- _in="header",
809
- required=True,
810
- schema=str,
899
+ "exclude_models_directory",
900
+ bool,
901
+ location="query",
902
+ description="Whether to exclude the models directory",
811
903
  )
812
904
  @openapi.parameter(
813
905
  HEADER_USER_ID,
@@ -830,32 +922,25 @@ async def get_bot_info(request: Request) -> HTTPResponse:
830
922
  {"application/gzip": bytes},
831
923
  description="Bot project downloaded successfully as tar.gz",
832
924
  )
833
- @openapi.response(
834
- 401,
835
- {"application/json": model_to_schema(ApiErrorResponse)},
836
- description=(
837
- "Authentication failed - Authorization header missing or invalid. "
838
- "Authentication is always required for this endpoint."
839
- ),
840
- )
841
925
  @openapi.response(
842
926
  500,
843
927
  {"application/json": model_to_schema(ApiErrorResponse)},
844
928
  description="Internal server error",
845
929
  )
846
- @protected(always_required=True)
847
930
  async def download_bot_project(request: Request) -> HTTPResponse:
848
931
  """Download bot project as tar.gz archive."""
849
932
  try:
850
- # Token verification is enforced by the
851
- # protected(always_required=True) decorator.
933
+ # Get query parameters
934
+ exclude_models_directory = (
935
+ request.args.get("exclude_models_directory", "true").lower() == "true"
936
+ )
937
+ project_name = request.args.get("project_name", "bot-project")
852
938
 
853
939
  # Get bot files
854
940
  project_generator = get_project_generator(request)
855
- bot_files = project_generator.get_bot_files()
856
-
857
- # Get project name from query parameters, default to "bot-project"
858
- project_name = request.args.get("project_name", "bot-project")
941
+ bot_files = project_generator.get_bot_files(
942
+ exclude_models_directory=exclude_models_directory
943
+ )
859
944
 
860
945
  # Create tar.gz archive
861
946
  tar_data = create_bot_project_archive(bot_files, project_name)
@@ -1020,6 +1105,9 @@ async def download_bot_project(request: Request) -> HTTPResponse:
1020
1105
  schema=str,
1021
1106
  )
1022
1107
  @protected()
1108
+ # Disable automatic input/output capture for langfuse tracing
1109
+ # This allows manual control over what data is sent to langfuse
1110
+ @langfuse.observe(capture_input=False, capture_output=False)
1023
1111
  async def copilot(request: Request) -> None:
1024
1112
  """Handle copilot requests with streaming markdown responses."""
1025
1113
  sse = await request.respond(content_type="text/event-stream")
@@ -1046,9 +1134,12 @@ async def copilot(request: Request) -> None:
1046
1134
  )
1047
1135
  return
1048
1136
 
1049
- telemetry = CopilotTelemetry(project_id=HELLO_RASA_PROJECT_ID, user_id=user_id)
1137
+ telemetry = CopilotSegmentTelemetry(
1138
+ project_id=HELLO_RASA_PROJECT_ID, user_id=user_id
1139
+ )
1050
1140
  structlogger.debug("builder.copilot.telemetry.request.init")
1051
1141
 
1142
+ # TODO: This can be removed once Langfuse is completed.
1052
1143
  if req.last_message and req.last_message.role == ROLE_USER:
1053
1144
  structlogger.debug("builder.copilot.telemetry.request.user_turn")
1054
1145
  # Offload telemetry logging to a background task
@@ -1088,26 +1179,9 @@ async def copilot(request: Request) -> None:
1088
1179
  return
1089
1180
 
1090
1181
  # 4. Get the necessary context for the copilot
1091
- tracker = await current_tracker_from_input_channel(request.app, req.session_id)
1092
- tracker_context = TrackerContext.from_tracker(
1093
- tracker, max_turns=COPILOT_ASSISTANT_TRACKER_MAX_TURNS
1094
- )
1095
- if (
1096
- tracker_context is not None
1097
- and llm_service.guardrails_policy_checker is not None
1098
- ):
1099
- tracker_context = await llm_service.guardrails_policy_checker.check_assistant_chat_for_policy_violations( # noqa: E501
1100
- tracker_context=tracker_context,
1101
- hello_rasa_user_id=user_id,
1102
- hello_rasa_project_id=HELLO_RASA_PROJECT_ID,
1103
- lakera_project_id=LAKERA_ASSISTANT_HISTORY_GUARDRAIL_PROJECT_ID,
1104
- )
1105
-
1106
- # Copilot doesn't need to know about the docs and any file that is not a core
1107
- # assistant file
1108
- relevant_assistant_files = project_generator.get_bot_files(
1109
- exclude_docs_directory=True,
1110
- allowed_file_extensions=["yaml", "yml", "py", "jinja", "jinja2"],
1182
+ tracker_context = await get_tracker_context_for_copilot(request, req, user_id)
1183
+ relevant_assistant_files = get_relevant_assistant_files_for_copilot(
1184
+ project_generator,
1111
1185
  )
1112
1186
  context = CopilotContext(
1113
1187
  tracker_context=tracker_context,
@@ -1162,7 +1236,7 @@ async def copilot(request: Request) -> None:
1162
1236
  async for token in intercepted_stream:
1163
1237
  await sse.send(token.to_sse_event().format())
1164
1238
 
1165
- # 8. Offload telemetry logging to a background task
1239
+ # 8a. Offload metabase telemetry logging to a background task
1166
1240
  request.app.add_task(
1167
1241
  asyncio.to_thread(
1168
1242
  telemetry.log_copilot_from_handler,
@@ -1177,9 +1251,27 @@ async def copilot(request: Request) -> None:
1177
1251
  else None
1178
1252
  ),
1179
1253
  tracker_event_attachments=generation_context.tracker_event_attachments,
1180
- **copilot_client.usage_statistics.model_dump(),
1254
+ model=copilot_client.usage_statistics.model or "N/A",
1255
+ prompt_tokens=copilot_client.usage_statistics.prompt_tokens or 0,
1256
+ cached_prompt_tokens=(
1257
+ copilot_client.usage_statistics.cached_prompt_tokens or 0
1258
+ ),
1259
+ completion_tokens=(
1260
+ copilot_client.usage_statistics.completion_tokens or 0
1261
+ ),
1262
+ total_tokens=copilot_client.usage_statistics.total_tokens or 0,
1181
1263
  )
1182
1264
  )
1265
+ # 8b. Setup output trace attributes for Langfuse
1266
+ CopilotLangfuseTelemetry.setup_copilot_endpoint_call_trace_attributes(
1267
+ hello_rasa_project_id=HELLO_RASA_PROJECT_ID or "N/A",
1268
+ chat_id=req.session_id or "N/A",
1269
+ user_id=user_id,
1270
+ request=req,
1271
+ handler=copilot_response_handler,
1272
+ relevant_documents=generation_context.relevant_documents,
1273
+ copilot_context=context,
1274
+ )
1183
1275
 
1184
1276
  # 9. Once the stream is over, extract and send references
1185
1277
  # if any documents were used
@@ -1365,3 +1457,70 @@ async def _handle_guardrail_violation_and_maybe_block(
1365
1457
 
1366
1458
  await sse.send(message.to_sse_event().format())
1367
1459
  return message
1460
+
1461
+
1462
+ @langfuse.observe(capture_input=False, capture_output=False)
1463
+ async def get_tracker_context_for_copilot(
1464
+ request: Request,
1465
+ req: CopilotRequest,
1466
+ user_id: str,
1467
+ ) -> Optional[TrackerContext]:
1468
+ """Check the assistant chat for guardrail policy violations.
1469
+
1470
+ Args:
1471
+ request: The request object.
1472
+ req: The CopilotRequest object.
1473
+ user_id: The user ID.
1474
+
1475
+ Returns:
1476
+ The tracker context if the tracker is available.
1477
+ """
1478
+ tracker = await current_tracker_from_input_channel(request.app, req.session_id)
1479
+ tracker_context = TrackerContext.from_tracker(
1480
+ tracker, max_turns=COPILOT_ASSISTANT_TRACKER_MAX_TURNS
1481
+ )
1482
+ if (
1483
+ tracker_context is not None
1484
+ and llm_service.guardrails_policy_checker is not None
1485
+ ):
1486
+ tracker_context = await llm_service.guardrails_policy_checker.check_assistant_chat_for_policy_violations( # noqa: E501
1487
+ tracker_context=tracker_context,
1488
+ hello_rasa_user_id=user_id,
1489
+ hello_rasa_project_id=HELLO_RASA_PROJECT_ID,
1490
+ lakera_project_id=LAKERA_ASSISTANT_HISTORY_GUARDRAIL_PROJECT_ID,
1491
+ )
1492
+
1493
+ # Track the retrieved tracker context
1494
+ CopilotLangfuseTelemetry.trace_copilot_tracker_context(
1495
+ tracker_context=tracker_context,
1496
+ max_conversation_turns=COPILOT_ASSISTANT_TRACKER_MAX_TURNS,
1497
+ session_id=req.session_id,
1498
+ )
1499
+
1500
+ return tracker_context
1501
+
1502
+
1503
+ @langfuse.observe(capture_input=False, capture_output=False)
1504
+ def get_relevant_assistant_files_for_copilot(
1505
+ project_generator: ProjectGenerator,
1506
+ ) -> BotFiles:
1507
+ """Get the relevant assistant files for the copilot.
1508
+
1509
+ Args:
1510
+ project_generator: The project generator.
1511
+
1512
+ Returns:
1513
+ The relevant assistant files.
1514
+ """
1515
+ # Copilot doesn't need to know about the docs and any file that is not a core
1516
+ # assistant file
1517
+ files = project_generator.get_bot_files(
1518
+ exclude_docs_directory=True,
1519
+ allowed_file_extensions=["yaml", "yml", "py", "jinja", "jinja2"],
1520
+ )
1521
+
1522
+ # Track the retrieved assistant files
1523
+ CopilotLangfuseTelemetry.trace_copilot_relevant_assistant_files(
1524
+ relevant_assistant_files=files,
1525
+ )
1526
+ return files
File without changes