rasa-pro 3.13.0a1.dev5__py3-none-any.whl → 3.13.0a1.dev7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of rasa-pro might be problematic. Click here for more details.

Files changed (49) hide show
  1. rasa/builder/README.md +120 -0
  2. rasa/builder/config.py +69 -0
  3. rasa/builder/create_openai_vector_store.py +204 -45
  4. rasa/builder/exceptions.py +49 -0
  5. rasa/builder/llm_helper_prompt.jinja2 +245 -0
  6. rasa/builder/llm_service.py +327 -0
  7. rasa/builder/logging_utils.py +51 -0
  8. rasa/builder/main.py +61 -0
  9. rasa/builder/models.py +174 -0
  10. rasa/builder/project_generator.py +264 -0
  11. rasa/builder/service.py +447 -0
  12. rasa/builder/{skill_to_bot_prompt.jinja → skill_to_bot_prompt.jinja2} +10 -4
  13. rasa/builder/training_service.py +123 -0
  14. rasa/builder/validation_service.py +79 -0
  15. rasa/cli/project_templates/finance/config.yml +17 -0
  16. rasa/cli/project_templates/finance/credentials.yml +33 -0
  17. rasa/cli/project_templates/finance/data/flows/transfer_money.yml +5 -0
  18. rasa/cli/project_templates/finance/data/patterns/pattern_session_start.yml +7 -0
  19. rasa/cli/project_templates/finance/domain.yml +7 -0
  20. rasa/cli/project_templates/finance/endpoints.yml +58 -0
  21. rasa/cli/project_templates/plain/config.yml +17 -0
  22. rasa/cli/project_templates/plain/credentials.yml +33 -0
  23. rasa/cli/project_templates/plain/data/patterns/pattern_session_start.yml +7 -0
  24. rasa/cli/project_templates/plain/domain.yml +5 -0
  25. rasa/cli/project_templates/plain/endpoints.yml +58 -0
  26. rasa/cli/project_templates/telecom/config.yml +17 -0
  27. rasa/cli/project_templates/telecom/credentials.yml +33 -0
  28. rasa/cli/project_templates/telecom/data/flows/upgrade_contract.yml +5 -0
  29. rasa/cli/project_templates/telecom/data/patterns/pattern_session_start.yml +7 -0
  30. rasa/cli/project_templates/telecom/domain.yml +7 -0
  31. rasa/cli/project_templates/telecom/endpoints.yml +58 -0
  32. rasa/cli/scaffold.py +19 -3
  33. rasa/core/actions/action.py +5 -3
  34. rasa/core/channels/studio_chat.py +17 -3
  35. rasa/model_manager/model_api.py +1 -1
  36. rasa/model_manager/runner_service.py +1 -1
  37. rasa/model_manager/trainer_service.py +1 -1
  38. rasa/model_manager/utils.py +1 -29
  39. rasa/shared/core/domain.py +62 -15
  40. rasa/shared/core/flows/yaml_flows_io.py +16 -8
  41. rasa/telemetry.py +2 -1
  42. rasa/utils/io.py +27 -9
  43. rasa/version.py +1 -1
  44. {rasa_pro-3.13.0a1.dev5.dist-info → rasa_pro-3.13.0a1.dev7.dist-info}/METADATA +1 -1
  45. {rasa_pro-3.13.0a1.dev5.dist-info → rasa_pro-3.13.0a1.dev7.dist-info}/RECORD +48 -20
  46. rasa/builder/prompt_to_bot.py +0 -696
  47. {rasa_pro-3.13.0a1.dev5.dist-info → rasa_pro-3.13.0a1.dev7.dist-info}/NOTICE +0 -0
  48. {rasa_pro-3.13.0a1.dev5.dist-info → rasa_pro-3.13.0a1.dev7.dist-info}/WHEEL +0 -0
  49. {rasa_pro-3.13.0a1.dev5.dist-info → rasa_pro-3.13.0a1.dev7.dist-info}/entry_points.txt +0 -0
rasa/builder/README.md ADDED
@@ -0,0 +1,120 @@
1
+ # Rasa Prompt-to-Bot Service
2
+
3
+ A production-ready service that generates Rasa chatbots from natural language descriptions using LLMs.
4
+
5
+ ## Architecture
6
+
7
+ The service follows functional programming principles with minimal use of classes:
8
+
9
+ ### Core Modules
10
+
11
+ - **`config.py`** - Configuration management using module-level constants
12
+ - **`exceptions.py`** - Custom exception hierarchy for error handling
13
+ - **`models.py`** - Pydantic models for request/response validation
14
+ - **`llm_service.py`** - LLM interactions (minimal class for state management)
15
+ - **`validation_service.py`** - Project validation functions
16
+ - **`training_service.py`** - Model training functions
17
+ - **`project_generator.py`** - Project generation (class for bot files state)
18
+ - **`service.py`** - Main orchestrating service (class for app state)
19
+ - **`logging_utils.py`** - Thread-safe logging utilities
20
+ - **`llm_context.py`** - Conversation context formatting
21
+ - **`main.py`** - Application entry point
22
+
23
+ ### Utility Scripts
24
+
25
+ - **`scrape_rasa_docs.py`** - Documentation scraping
26
+ - **`create_openai_vector_store.py`** - Documentation indexing
27
+
28
+ ## Key Design Principles
29
+
30
+ 1. **Functional First**: Use functions for stateless operations
31
+ 2. **Minimal Classes**: Classes only when state management is needed
32
+ 3. **Configuration**: Environment variables with sensible defaults
33
+ 4. **Error Handling**: Structured exception hierarchy
34
+ 5. **Type Safety**: Full type annotations and Pydantic validation
35
+ 6. **Thread Safety**: Safe concurrent operations
36
+ 7. **Resource Management**: Proper cleanup and lifecycle handling
37
+
38
+ ## Usage
39
+
40
+ ### Running the Service
41
+
42
+ ```bash
43
+ python rasa/builder/main.py
44
+ ```
45
+
46
+ ### Environment Configuration
47
+
48
+ ```bash
49
+ # OpenAI Settings
50
+ export OPENAI_MODEL="gpt-4.1-2025-04-14"
51
+ export OPENAI_TEMPERATURE="0.7"
52
+ export OPENAI_VECTOR_STORE_ID="vs_xxxxx"
53
+ export OPENAI_TIMEOUT="30"
54
+
55
+ # Server Settings
56
+ export SERVER_HOST="0.0.0.0"
57
+ export SERVER_PORT="5005"
58
+ export MAX_RETRIES="5"
59
+ export CORS_ORIGINS="http://localhost:3000,https://example.com"
60
+
61
+ # Validation Settings
62
+ export VALIDATION_FAIL_ON_WARNINGS="false"
63
+ ```
64
+
65
+ ### API Endpoints
66
+
67
+ - `POST /api/prompt-to-bot` - Generate bot from description
68
+ - `GET /api/bot-data` - Get current bot configuration
69
+ - `PUT /api/bot-data` - Update bot configuration (SSE)
70
+ - `POST /api/llm-builder` - LLM helper for bot development
71
+ - `GET /` - Health check
72
+
73
+ ### Documentation Setup
74
+
75
+ ```bash
76
+ # 1. Scrape Rasa documentation
77
+ python rasa/builder/scrape_rasa_docs.py
78
+
79
+ # 2. Create OpenAI vector store
80
+ python rasa/builder/create_openai_vector_store.py
81
+ ```
82
+
83
+ ## Benefits of Functional Approach
84
+
85
+ - **Simpler**: Easy to understand and reason about
86
+ - **Testable**: Functions are easier to unit test
87
+ - **Reusable**: Pure functions can be composed
88
+ - **Maintainable**: Clear separation of concerns
89
+ - **Performant**: No unnecessary object overhead
90
+ - **Debuggable**: Clear call stacks and data flow
91
+
92
+ ## Error Handling
93
+
94
+ The service uses a structured exception hierarchy:
95
+
96
+ - `PromptToBotError` - Base exception
97
+ - `ValidationError` - Project validation failures
98
+ - `TrainingError` - Model training issues
99
+ - `LLMGenerationError` - LLM API problems
100
+ - `ProjectGenerationError` - Generation retry exhaustion
101
+ - `AgentLoadError` - Agent loading failures
102
+
103
+ ## Logging
104
+
105
+ Structured logging with context using `structlog`:
106
+
107
+ ```python
108
+ structlogger.info("operation.success", key="value")
109
+ structlogger.error("operation.failed", error=str(e))
110
+ ```
111
+
112
+ ## State Management
113
+
114
+ Only classes that truly need state:
115
+
116
+ 1. **`LLMService`** - Caches schemas and manages OpenAI client
117
+ 2. **`ProjectGenerator`** - Maintains current bot files
118
+ 3. **`PromptToBotService`** - Manages Sanic app and agent state
119
+
120
+ Everything else uses pure functions for maximum simplicity and testability.
rasa/builder/config.py ADDED
@@ -0,0 +1,69 @@
1
+ """Configuration module for the prompt-to-bot service."""
2
+
3
+ import os
4
+ from typing import Any, Dict
5
+
6
+ import importlib_resources
7
+
8
+ from rasa.constants import PACKAGE_NAME
9
+ from rasa.shared.utils.yaml import read_yaml, read_yaml_file
10
+
11
+ # OpenAI Configuration
12
+ OPENAI_MODEL = os.getenv("OPENAI_MODEL", "gpt-4.1-2025-04-14")
13
+ OPENAI_TEMPERATURE = float(os.getenv("OPENAI_TEMPERATURE", "0.7"))
14
+ OPENAI_VECTOR_STORE_ID = os.getenv(
15
+ "OPENAI_VECTOR_STORE_ID", "vs_685123376e288191a005b6b144d3026f"
16
+ )
17
+ OPENAI_MAX_VECTOR_RESULTS = int(os.getenv("OPENAI_MAX_VECTOR_RESULTS", "10"))
18
+ OPENAI_TIMEOUT = int(os.getenv("OPENAI_TIMEOUT", "30"))
19
+
20
+ # Server Configuration
21
+ SERVER_HOST = os.getenv("SERVER_HOST", "0.0.0.0")
22
+ SERVER_PORT = int(os.getenv("SERVER_PORT", "5005"))
23
+ MAX_RETRIES = int(os.getenv("MAX_RETRIES", "5"))
24
+ MAX_LOG_ENTRIES = int(os.getenv("MAX_LOG_ENTRIES", "30"))
25
+
26
+ # CORS Configuration
27
+ _cors_origins_env = os.getenv("CORS_ORIGINS", "*")
28
+ CORS_ORIGINS = _cors_origins_env.split(",") if _cors_origins_env != "*" else ["*"]
29
+
30
+ # Validation Configuration
31
+ VALIDATION_FAIL_ON_WARNINGS = (
32
+ os.getenv("VALIDATION_FAIL_ON_WARNINGS", "false").lower() == "true"
33
+ )
34
+ VALIDATION_MAX_HISTORY = None # Could be configured if needed
35
+
36
+
37
+ def get_default_config(assistant_id: str) -> Dict[str, Any]:
38
+ """Get default Rasa configuration."""
39
+ base_config = read_yaml_file(
40
+ str(
41
+ importlib_resources.files(PACKAGE_NAME).joinpath(
42
+ "cli/project_templates/default/config.yml"
43
+ )
44
+ )
45
+ )
46
+ base_config["assistant_id"] = assistant_id
47
+ return base_config
48
+
49
+
50
+ def get_default_endpoints() -> Dict[str, Any]:
51
+ """Get default endpoints configuration."""
52
+ return read_yaml_file(
53
+ str(
54
+ importlib_resources.files(PACKAGE_NAME).joinpath(
55
+ "cli/project_templates/default/endpoints.yml"
56
+ )
57
+ )
58
+ )
59
+
60
+
61
+ def get_default_credentials() -> Dict[str, Any]:
62
+ """Get default credentials configuration."""
63
+ default_credentials_yaml = """
64
+ studio_chat:
65
+ user_message_evt: "user_message"
66
+ bot_message_evt: "bot_message"
67
+ session_persistence: true
68
+ """
69
+ return read_yaml(default_credentials_yaml)
@@ -1,69 +1,228 @@
1
+ #!/usr/bin/env python3
2
+ """Script to create and populate OpenAI vector store with Rasa documentation."""
3
+
1
4
  import json
2
- from dataclasses import dataclass
5
+ import sys
3
6
  from pathlib import Path
7
+ from typing import Dict, List
4
8
 
5
9
  import openai
10
+ import structlog
11
+
12
+ structlogger = structlog.get_logger()
13
+
14
+ # Configuration
15
+ DOCS_DIR = "rasa_docs_md"
16
+ FILE_IDS_FILE = "file_ids.json"
17
+ MARKDOWN_TO_URL_FILE = "markdown_to_url.json"
18
+ ASSISTANT_NAME = "Rasa Docs Assistant"
19
+
20
+
21
+ def load_url_mapping() -> Dict[str, str]:
22
+ """Load the markdown filename to URL mapping."""
23
+ markdown_to_url_file = Path(MARKDOWN_TO_URL_FILE)
24
+
25
+ if not markdown_to_url_file.exists():
26
+ raise FileNotFoundError(
27
+ f"URL mapping file {markdown_to_url_file} not found. "
28
+ "Please run scrape_rasa_docs.py first."
29
+ )
30
+
31
+ with open(markdown_to_url_file, "r") as f:
32
+ return json.load(f)
33
+
34
+
35
+ def get_markdown_files(docs_dir: str = DOCS_DIR) -> List[Path]:
36
+ """Get all markdown files in the docs directory."""
37
+ docs_path = Path(docs_dir)
38
+
39
+ if not docs_path.exists():
40
+ raise FileNotFoundError(f"Documentation directory {docs_dir} not found")
6
41
 
7
- DOCS_DIR = "rasa_docs_md" # Folder with scraped Markdown files
8
- assistant_name = "Rasa Docs Assistant"
42
+ md_files = list(docs_path.glob("*.md"))
43
+ if not md_files:
44
+ raise FileNotFoundError(f"No markdown files found in {docs_dir}")
9
45
 
46
+ structlogger.info("vector_store.found_files", count=len(md_files))
47
+ return md_files
10
48
 
11
- @dataclass
12
- class FileWithAttributes:
13
- file_id: str
14
- file_name: str
15
- attributes: dict
16
49
 
50
+ def load_or_upload_files(client: openai.OpenAI) -> List[Dict[str, str]]:
51
+ """Load existing file IDs or upload files to OpenAI."""
52
+ file_ids_file = Path(FILE_IDS_FILE)
53
+
54
+ if file_ids_file.exists():
55
+ structlogger.info("vector_store.loading_existing_files")
56
+ with open(file_ids_file, "r") as f:
57
+ return json.load(f)
58
+
59
+ return upload_files(client)
60
+
61
+
62
+ def upload_files(client: openai.OpenAI) -> List[Dict[str, str]]:
63
+ """Upload markdown files to OpenAI."""
64
+ structlogger.info("vector_store.uploading_files")
65
+
66
+ md_files = get_markdown_files()
67
+ uploaded_files = []
68
+
69
+ for md_file in md_files:
70
+ try:
71
+ with open(md_file, "rb") as f:
72
+ uploaded = client.files.create(file=f, purpose="assistants")
73
+
74
+ file_info = {"file_id": uploaded.id, "file_name": md_file.name}
75
+ uploaded_files.append(file_info)
76
+
77
+ structlogger.info(
78
+ "vector_store.file_uploaded",
79
+ file_name=md_file.name,
80
+ file_id=uploaded.id,
81
+ )
82
+
83
+ except Exception as e:
84
+ structlogger.error(
85
+ "vector_store.upload_failed", file_name=md_file.name, error=str(e)
86
+ )
87
+ raise
17
88
 
18
- def get_files_with_metadata(files) -> list[FileWithAttributes]:
19
- with open("markdown_to_url.json", "r") as f:
20
- markdown_to_url = json.load(f)
89
+ # Save file IDs for future use
90
+ with open(FILE_IDS_FILE, "w") as f:
91
+ json.dump(uploaded_files, f, indent=2)
92
+
93
+ structlogger.info("vector_store.upload_complete", count=len(uploaded_files))
94
+ return uploaded_files
95
+
96
+
97
+ def prepare_files_with_metadata(files: List[Dict[str, str]]) -> List[Dict[str, str]]:
98
+ """Prepare files with URL metadata."""
99
+ url_mapping = load_url_mapping()
21
100
 
22
101
  files_with_metadata = []
23
- for file in files:
102
+ for file_info in files:
103
+ file_name = file_info["file_name"]
104
+ url = url_mapping.get(file_name, "")
105
+
106
+ if not url:
107
+ structlogger.warning("vector_store.missing_url", file_name=file_name)
108
+
24
109
  files_with_metadata.append(
25
- FileWithAttributes(
26
- file_id=file["file_id"],
27
- file_name=file["file_name"],
28
- attributes={"url": markdown_to_url[file["file_name"]]},
29
- )
110
+ {"file_id": file_info["file_id"], "file_name": file_name, "url": url}
30
111
  )
31
112
 
32
113
  return files_with_metadata
33
114
 
34
115
 
35
- # Step 1: Upload files
36
- print("📤 Uploading files to OpenAI...")
116
+ def create_vector_store(
117
+ client: openai.OpenAI, files_with_metadata: List[Dict[str, str]]
118
+ ) -> str:
119
+ """Create vector store and add files."""
120
+ try:
121
+ # Create vector store
122
+ structlogger.info("vector_store.creating")
123
+ vector_store = client.vector_stores.create(name=ASSISTANT_NAME)
124
+
125
+ # Add files to vector store
126
+ for file_meta in files_with_metadata:
127
+ try:
128
+ client.vector_stores.files.create(
129
+ vector_store_id=vector_store.id,
130
+ file_id=file_meta["file_id"],
131
+ attributes={"url": file_meta["url"]},
132
+ )
133
+
134
+ structlogger.info(
135
+ "vector_store.file_added",
136
+ file_name=file_meta["file_name"],
137
+ url=file_meta["url"],
138
+ )
139
+
140
+ except Exception as e:
141
+ structlogger.error(
142
+ "vector_store.file_add_failed",
143
+ file_name=file_meta["file_name"],
144
+ error=str(e),
145
+ )
146
+ # Continue with other files
147
+
148
+ structlogger.info(
149
+ "vector_store.created",
150
+ vector_store_id=vector_store.id,
151
+ files_count=len(files_with_metadata),
152
+ )
153
+
154
+ return vector_store.id
155
+
156
+ except Exception as e:
157
+ structlogger.error("vector_store.creation_failed", error=str(e))
158
+ raise
159
+
160
+
161
+ def run_vector_store_creation() -> str:
162
+ """Run the complete vector store creation process."""
163
+ client = openai.OpenAI()
164
+
165
+ try:
166
+ # Load or upload files
167
+ files = load_or_upload_files(client)
168
+
169
+ # Prepare files with metadata
170
+ files_with_metadata = prepare_files_with_metadata(files)
171
+
172
+ # Create vector store
173
+ vector_store_id = create_vector_store(client, files_with_metadata)
174
+
175
+ print("\n🎉 Vector store created successfully!")
176
+ print(f"Vector store ID: {vector_store_id}")
177
+ print(f"Files processed: {len(files_with_metadata)}")
178
+
179
+ return vector_store_id
180
+
181
+ except Exception as e:
182
+ structlogger.error("vector_store.process_failed", error=str(e))
183
+ print(f"\n❌ Vector store creation failed: {e}")
184
+ raise
185
+
186
+
187
+ def setup_logging():
188
+ """Setup basic logging."""
189
+ import logging.config
190
+
191
+ logging_config = {
192
+ "version": 1,
193
+ "disable_existing_loggers": False,
194
+ "formatters": {
195
+ "standard": {"format": "%(asctime)s [%(levelname)s] %(name)s: %(message)s"},
196
+ },
197
+ "handlers": {
198
+ "default": {
199
+ "level": "INFO",
200
+ "formatter": "standard",
201
+ "class": "logging.StreamHandler",
202
+ },
203
+ },
204
+ "loggers": {"": {"handlers": ["default"], "level": "INFO", "propagate": False}},
205
+ }
206
+
207
+ logging.config.dictConfig(logging_config)
37
208
 
38
- files = []
39
209
 
40
- # try to read the file ids from the json file
41
- if Path("file_ids.json").exists():
42
- with open("file_ids.json", "r") as f:
43
- files = json.load(f)
44
- else:
45
- for md_file in Path(DOCS_DIR).glob("*.md"):
46
- with open(md_file, "rb") as f:
47
- uploaded = openai.files.create(file=f, purpose="assistants")
48
- files.append({"file_id": uploaded.id, "file_name": md_file.name})
49
- print(f"✅ Uploaded {md_file.name} → {uploaded.id}")
210
+ def main():
211
+ """Main entry point for the script."""
212
+ setup_logging()
50
213
 
51
- # persist the file ids in a json file
52
- with open("file_ids.json", "w") as f:
53
- json.dump(files, f, indent=2)
214
+ try:
215
+ run_vector_store_creation()
216
+ return 0
54
217
 
55
- # Step 2: Create Vector Store
56
- print("\n🤖 Creating vector store...")
218
+ except KeyboardInterrupt:
219
+ print("\n⏹️ Process interrupted by user")
220
+ return 1
57
221
 
58
- vector_store = openai.vector_stores.create(name="Rasa Docs")
222
+ except Exception as e:
223
+ print(f"\n💥 Unexpected error: {e}")
224
+ return 1
59
225
 
60
- for file in get_files_with_metadata(files):
61
- openai.vector_stores.files.create(
62
- vector_store_id=vector_store.id,
63
- file_id=file.file_id,
64
- attributes=file.attributes,
65
- )
66
- print(f"✅ Added {file.file_name} to vector store.")
67
226
 
68
- print("\n🎉 Vector store created successfully!")
69
- print(f"Vector store ID: {vector_store.id}")
227
+ if __name__ == "__main__":
228
+ sys.exit(main())
@@ -0,0 +1,49 @@
1
+ """Custom exceptions for the prompt-to-bot service."""
2
+
3
+ from typing import Any, Optional
4
+
5
+
6
+ class PromptToBotError(Exception):
7
+ """Base exception for prompt-to-bot service."""
8
+
9
+ pass
10
+
11
+
12
+ class ValidationError(PromptToBotError):
13
+ """Raised when Rasa project validation fails."""
14
+
15
+ def __init__(self, message: str, validation_logs: Optional[Any] = None):
16
+ super().__init__(message)
17
+ self.validation_logs = validation_logs
18
+
19
+
20
+ class TrainingError(PromptToBotError):
21
+ """Raised when model training fails."""
22
+
23
+ pass
24
+
25
+
26
+ class LLMGenerationError(PromptToBotError):
27
+ """Raised when LLM generation fails."""
28
+
29
+ pass
30
+
31
+
32
+ class SchemaValidationError(PromptToBotError):
33
+ """Raised when schema validation fails."""
34
+
35
+ pass
36
+
37
+
38
+ class AgentLoadError(PromptToBotError):
39
+ """Raised when agent loading fails."""
40
+
41
+ pass
42
+
43
+
44
+ class ProjectGenerationError(PromptToBotError):
45
+ """Raised when project generation fails after retries."""
46
+
47
+ def __init__(self, message: str, attempts: int):
48
+ super().__init__(f"{message} (failed after {attempts} attempts)")
49
+ self.attempts = attempts