rasa-pro 3.13.0rc1__py3-none-any.whl → 3.13.0rc3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of rasa-pro might be problematic. Click here for more details.

Files changed (49) hide show
  1. rasa/cli/studio/link.py +0 -16
  2. rasa/cli/studio/train.py +1 -4
  3. rasa/cli/studio/upload.py +1 -1
  4. rasa/core/agent.py +6 -0
  5. rasa/core/channels/__init__.py +1 -0
  6. rasa/core/channels/voice_ready/jambonz.py +5 -6
  7. rasa/core/channels/voice_ready/twilio_voice.py +13 -12
  8. rasa/core/channels/voice_ready/utils.py +22 -0
  9. rasa/core/channels/voice_stream/audiocodes.py +5 -11
  10. rasa/core/channels/voice_stream/genesys.py +35 -16
  11. rasa/core/channels/voice_stream/jambonz.py +69 -3
  12. rasa/core/channels/voice_stream/twilio_media_streams.py +5 -7
  13. rasa/core/channels/voice_stream/voice_channel.py +39 -10
  14. rasa/core/policies/enterprise_search_policy.py +197 -68
  15. rasa/core/policies/enterprise_search_prompt_with_relevancy_check_and_citation_template.jinja2 +4 -1
  16. rasa/core/policies/flows/flow_executor.py +9 -3
  17. rasa/core/processor.py +6 -0
  18. rasa/core/tracker_stores/redis_tracker_store.py +15 -5
  19. rasa/dialogue_understanding/coexistence/llm_based_router.py +11 -0
  20. rasa/dialogue_understanding/generator/multi_step/multi_step_llm_command_generator.py +3 -2
  21. rasa/dialogue_understanding/generator/single_step/single_step_llm_command_generator.py +9 -0
  22. rasa/dialogue_understanding/patterns/default_flows_for_patterns.yml +5 -2
  23. rasa/dialogue_understanding/processor/command_processor.py +12 -10
  24. rasa/e2e_test/constants.py +1 -1
  25. rasa/llm_fine_tuning/annotation_module.py +43 -11
  26. rasa/llm_fine_tuning/paraphrasing/conversation_rephraser.py +1 -1
  27. rasa/model_manager/runner_service.py +20 -4
  28. rasa/model_manager/trainer_service.py +6 -0
  29. rasa/privacy/privacy_filter.py +57 -4
  30. rasa/privacy/privacy_manager.py +31 -16
  31. rasa/shared/constants.py +2 -0
  32. rasa/shared/core/constants.py +1 -0
  33. rasa/shared/utils/llm.py +86 -2
  34. rasa/studio/data_handler.py +27 -13
  35. rasa/studio/download.py +5 -1
  36. rasa/studio/link.py +12 -1
  37. rasa/studio/prompts.py +5 -7
  38. rasa/studio/pull/domains.py +14 -3
  39. rasa/studio/pull/pull.py +6 -2
  40. rasa/studio/push.py +2 -0
  41. rasa/studio/upload.py +61 -5
  42. rasa/studio/utils.py +33 -0
  43. rasa/tracing/instrumentation/attribute_extractors.py +1 -1
  44. rasa/version.py +1 -1
  45. {rasa_pro-3.13.0rc1.dist-info → rasa_pro-3.13.0rc3.dist-info}/METADATA +1 -1
  46. {rasa_pro-3.13.0rc1.dist-info → rasa_pro-3.13.0rc3.dist-info}/RECORD +49 -48
  47. {rasa_pro-3.13.0rc1.dist-info → rasa_pro-3.13.0rc3.dist-info}/NOTICE +0 -0
  48. {rasa_pro-3.13.0rc1.dist-info → rasa_pro-3.13.0rc3.dist-info}/WHEEL +0 -0
  49. {rasa_pro-3.13.0rc1.dist-info → rasa_pro-3.13.0rc3.dist-info}/entry_points.txt +0 -0
@@ -35,7 +35,7 @@ DEFAULT_REPHRASING_PROMPT_TEMPLATE = importlib.resources.read_text(
35
35
 
36
36
  DEFAULT_LLM_CONFIG = {
37
37
  PROVIDER_CONFIG_KEY: OPENAI_PROVIDER,
38
- MODEL_CONFIG_KEY: "gpt-4o-mini",
38
+ MODEL_CONFIG_KEY: "gpt-4.1-mini-2025-04-14",
39
39
  TIMEOUT_CONFIG_KEY: 7,
40
40
  TEMPERATURE_CONFIG_KEY: 0.0,
41
41
  MAX_COMPLETION_TOKENS_CONFIG_KEY: 4096,
@@ -2,7 +2,8 @@ import os
2
2
  import shutil
3
3
  import subprocess
4
4
  from enum import Enum
5
- from typing import Dict, Optional
5
+ from pathlib import Path
6
+ from typing import Dict, Optional, Union
6
7
 
7
8
  import aiohttp
8
9
  import structlog
@@ -18,6 +19,7 @@ from rasa.model_manager.utils import (
18
19
  write_encoded_data_to_file,
19
20
  )
20
21
  from rasa.model_manager.warm_rasa_process import start_rasa_process
22
+ from rasa.studio.prompts import handle_prompts
21
23
 
22
24
  structlogger = structlog.get_logger()
23
25
 
@@ -121,11 +123,25 @@ def get_open_port() -> int:
121
123
 
122
124
 
123
125
  def write_encoded_config_data_to_files(
124
- encoded_configs: Dict[str, bytes], base_path: str
126
+ encoded_configs: Dict[str, Union[bytes, Dict[str, str]]], base_path: str
125
127
  ) -> None:
126
128
  """Write the encoded config data to files."""
127
- for key, value in encoded_configs.items():
128
- write_encoded_data_to_file(value, subpath(base_path, f"{key}.yml"))
129
+ endpoints_encoded = encoded_configs.get("endpoints")
130
+ if endpoints_encoded:
131
+ write_encoded_data_to_file(
132
+ endpoints_encoded, subpath(base_path, "endpoints.yml")
133
+ )
134
+ config_encoded = encoded_configs.get("config")
135
+ if config_encoded:
136
+ write_encoded_data_to_file(config_encoded, subpath(base_path, "config.yml"))
137
+ credentials_encoded = encoded_configs.get("credentials")
138
+ if credentials_encoded:
139
+ write_encoded_data_to_file(
140
+ credentials_encoded, subpath(base_path, "credentials.yml")
141
+ )
142
+
143
+ if prompts := encoded_configs.get("prompts"):
144
+ handle_prompts(prompts, Path(base_path))
129
145
 
130
146
 
131
147
  def prepare_bot_directory(
@@ -2,6 +2,7 @@ import os
2
2
  import shutil
3
3
  import subprocess
4
4
  from enum import Enum
5
+ from pathlib import Path
5
6
  from typing import Any, Dict, Optional
6
7
 
7
8
  import structlog
@@ -20,6 +21,7 @@ from rasa.model_manager.warm_rasa_process import (
20
21
  start_rasa_process,
21
22
  )
22
23
  from rasa.model_training import generate_random_model_name
24
+ from rasa.studio.prompts import handle_prompts
23
25
 
24
26
  structlogger = structlog.get_logger()
25
27
 
@@ -208,6 +210,7 @@ def write_training_data_to_files(
208
210
  "stories": "base64 encoded stories.yml",
209
211
  "rules": "base64 encoded rules.yml",
210
212
  "nlu": "base64 encoded nlu.yml"
213
+ "prompts": "dictionary with the prompts",
211
214
  }
212
215
  ```
213
216
  """
@@ -230,6 +233,9 @@ def write_training_data_to_files(
230
233
  subpath(training_base_path + "/" + parent_path, file_name),
231
234
  )
232
235
 
236
+ if prompts := encoded_training_data.get("prompts"):
237
+ handle_prompts(prompts, Path(training_base_path))
238
+
233
239
 
234
240
  def prepare_training_directory(
235
241
  training_base_path: str, assistant_id: str, encoded_training_data: Dict[str, Any]
@@ -2,6 +2,7 @@ import copy
2
2
  import datetime
3
3
  import json
4
4
  import os
5
+ from pathlib import Path
5
6
  from typing import Any, Dict, List, Optional
6
7
 
7
8
  import structlog
@@ -60,8 +61,17 @@ class PrivacyFilter:
60
61
  def _load_gliner_model() -> Optional[Any]:
61
62
  """Load the GLiNER model for PII detection."""
62
63
  local_model_path = os.getenv(GLINER_MODEL_PATH_ENV_VAR_NAME)
63
- cache_dir = os.getenv(HUGGINGFACE_CACHE_DIR_ENV_VAR_NAME)
64
- model_path = local_model_path or DEFAULT_PII_MODEL
64
+ cache_dir_env_value = os.getenv(HUGGINGFACE_CACHE_DIR_ENV_VAR_NAME)
65
+ cache_dir = Path(cache_dir_env_value).resolve() if cache_dir_env_value else None
66
+ model_path = (
67
+ Path(local_model_path).resolve() if local_model_path else DEFAULT_PII_MODEL
68
+ )
69
+ local_files_only = isinstance(model_path, Path) and model_path.exists()
70
+
71
+ structlogger.debug(
72
+ "rasa.privacy.privacy_filter.loading_gliner_model",
73
+ local_files_only=local_files_only,
74
+ )
65
75
 
66
76
  try:
67
77
  from gliner import GLiNER
@@ -69,6 +79,7 @@ class PrivacyFilter:
69
79
  return GLiNER.from_pretrained(
70
80
  model_path,
71
81
  cache_dir=cache_dir,
82
+ local_files_only=local_files_only,
72
83
  )
73
84
  except ImportError:
74
85
  structlogger.warning(
@@ -193,7 +204,9 @@ class PrivacyFilter:
193
204
 
194
205
  for key, slot in anonymized_slots.items():
195
206
  original_slot_value = key.split(":", 1)[1]
196
- anonymized_text = user_event.text.replace(original_slot_value, slot.value)
207
+ anonymized_text = self._smart_replace(
208
+ user_event.text, original_slot_value, slot.value
209
+ )
197
210
  user_event.text = anonymized_text
198
211
 
199
212
  anonymized_parse_data[TEXT_KEY] = anonymized_text
@@ -232,7 +245,9 @@ class PrivacyFilter:
232
245
 
233
246
  for key, slot in anonymized_slots.items():
234
247
  original_slot_value = key.split(":", 1)[1]
235
- anonymized_text = bot_event.text.replace(original_slot_value, slot.value)
248
+ anonymized_text = self._smart_replace(
249
+ bot_event.text, original_slot_value, slot.value
250
+ )
236
251
  bot_event.text = anonymized_text
237
252
 
238
253
  bot_event.text = self._anonymize_edge_cases(bot_event.text, anonymized_slots)
@@ -338,3 +353,41 @@ class PrivacyFilter:
338
353
  text = text.replace(entity_value, self._mask(entity[ENTITY_LABEL_KEY]))
339
354
 
340
355
  return text
356
+
357
+ @staticmethod
358
+ def _smart_replace(text: str, original_value: str, replacement: str) -> str:
359
+ """Replace original_value with replacement in text.
360
+
361
+ This method performs a string replacement in the text,
362
+ with special handling for floats.
363
+ If original_value is a float string like "24.0",
364
+ also tries replacing the integer version "24".
365
+
366
+ Args:
367
+ text (str): The text to perform replacements on
368
+ original_value (str): The value to replace
369
+ replacement (str): The replacement value
370
+
371
+ Returns:
372
+ str: The text with replacements applied
373
+ """
374
+ # First try the original replacement
375
+ result = text.replace(original_value, replacement)
376
+ if text != result:
377
+ return result
378
+
379
+ # If replacement didn't happen and it's a float,
380
+ # try replacing the integer version
381
+ if "." in original_value:
382
+ try:
383
+ float_val = float(original_value)
384
+ if float_val.is_integer():
385
+ int_version = str(int(float_val))
386
+ result = result.replace(int_version, replacement)
387
+ except ValueError:
388
+ structlogger.warning(
389
+ "rasa.privacy.privacy_filter.smart_replace_float_error",
390
+ event_info="Unable to anonymize float value.",
391
+ )
392
+
393
+ return result
@@ -12,7 +12,7 @@ import structlog
12
12
  from apscheduler.schedulers.background import BackgroundScheduler
13
13
 
14
14
  import rasa.shared.core.trackers
15
- from rasa.core.tracker_stores.tracker_store import TrackerStore
15
+ from rasa.core.tracker_stores.tracker_store import FailSafeTrackerStore, TrackerStore
16
16
  from rasa.privacy.constants import (
17
17
  TEXT_KEY,
18
18
  USER_CHAT_INACTIVITY_IN_MINUTES_ENV_VAR_NAME,
@@ -63,6 +63,7 @@ class BackgroundPrivacyManager:
63
63
  self,
64
64
  endpoints: Optional["AvailableEndpoints"],
65
65
  event_loop: Optional["AbstractEventLoop"] = None,
66
+ in_memory_tracker_store: Optional[TrackerStore] = None,
66
67
  ):
67
68
  self.config = (
68
69
  PrivacyConfig.from_dict(endpoints.privacy)
@@ -76,15 +77,28 @@ class BackgroundPrivacyManager:
76
77
  os.getenv(USER_CHAT_INACTIVITY_IN_MINUTES_ENV_VAR_NAME, 30)
77
78
  )
78
79
 
79
- # we recreate the tracker store here to ensure
80
- # that this instance has no event brokers
81
- # that could publish events during the tracker store
82
- # background jobs
83
- self.tracker_store = (
84
- TrackerStore.create(endpoints.tracker_store)
85
- if endpoints
86
- else TrackerStore.create(None)
87
- )
80
+ if in_memory_tracker_store is not None:
81
+ # if an in-memory tracker store is provided,
82
+ # we need to keep the reference to it
83
+ # so that the background jobs can access it.
84
+ # We also set the event broker to None
85
+ # to prevent it from publishing events
86
+ # during the tracker store background jobs
87
+ in_memory_tracker_store.event_broker = None
88
+ tracker_store = in_memory_tracker_store
89
+ else:
90
+ # we recreate the tracker store here to ensure
91
+ # that this instance has no event brokers
92
+ # that could publish events during the tracker store
93
+ # background jobs
94
+ tracker_store = (
95
+ TrackerStore.create(endpoints.tracker_store)
96
+ if endpoints
97
+ else TrackerStore.create(None)
98
+ )
99
+
100
+ self.tracker_store = FailSafeTrackerStore(tracker_store)
101
+
88
102
  self.event_brokers: List["EventBroker"] = []
89
103
  self.event_loop = event_loop
90
104
 
@@ -124,9 +138,10 @@ class BackgroundPrivacyManager:
124
138
  cls,
125
139
  endpoints: Optional["AvailableEndpoints"],
126
140
  event_loop: Optional["AbstractEventLoop"] = None,
141
+ in_memory_tracker_store: Optional[TrackerStore] = None,
127
142
  ) -> BackgroundPrivacyManager:
128
143
  """Create an instance of BackgroundPrivacyManager."""
129
- instance = cls(endpoints, event_loop)
144
+ instance = cls(endpoints, event_loop, in_memory_tracker_store)
130
145
  return await instance.initialize(endpoints)
131
146
 
132
147
  def stop(self) -> None:
@@ -337,7 +352,7 @@ class BackgroundPrivacyManager:
337
352
  if not full_tracker:
338
353
  structlogger.debug(
339
354
  "rasa.privacy_manager.no_tracker_found_for_sender_id",
340
- key=key,
355
+ sender_id=key,
341
356
  )
342
357
  continue
343
358
 
@@ -359,7 +374,7 @@ class BackgroundPrivacyManager:
359
374
 
360
375
  structlogger.info(
361
376
  "rasa.privacy_manager.save_tracker_after_deletion",
362
- key=key,
377
+ sender_id=key,
363
378
  event_info="Saved tracker with events not scheduled "
364
379
  "for deletion yet.",
365
380
  )
@@ -508,7 +523,7 @@ class BackgroundPrivacyManager:
508
523
  ):
509
524
  structlogger.info(
510
525
  "rasa.privacy_manager.anonymizing_tracker_session",
511
- key=session.sender_id,
526
+ sender_id=session.sender_id,
512
527
  last_event_timestamp=last_event_timestamp,
513
528
  triggered_by="anonymization_cron_job",
514
529
  )
@@ -524,7 +539,7 @@ class BackgroundPrivacyManager:
524
539
  uneligible_events.extend(events)
525
540
  structlogger.debug(
526
541
  "rasa.privacy_manager.session_not_valid_for_anonymization",
527
- key=session.sender_id,
542
+ sender_id=session.sender_id,
528
543
  session_id=session.sender_id,
529
544
  last_event_timestamp=last_event_timestamp,
530
545
  )
@@ -560,7 +575,7 @@ class BackgroundPrivacyManager:
560
575
 
561
576
  structlogger.info(
562
577
  "rasa.privacy_manager.tracker_session_scheduled_for_deletion",
563
- key=full_tracker.sender_id,
578
+ sender_id=full_tracker.sender_id,
564
579
  last_event_timestamp=last_event_timestamp,
565
580
  triggered_by="deletion_cron_job",
566
581
  )
rasa/shared/constants.py CHANGED
@@ -104,6 +104,8 @@ UTTER_FREE_CHITCHAT_RESPONSE = "utter_free_chitchat_response"
104
104
  ASSISTANT_ID_KEY = "assistant_id"
105
105
  ASSISTANT_ID_DEFAULT_VALUE = "placeholder_default"
106
106
 
107
+ ENDPOINTS_NLG_KEY = "nlg"
108
+
107
109
  CONFIG_MANDATORY_COMMON_KEYS = [ASSISTANT_ID_KEY]
108
110
  CONFIG_NAME_KEY = "name"
109
111
  CONFIG_POLICIES_KEY = "policies"
@@ -182,6 +182,7 @@ class SetSlotExtractor(Enum):
182
182
  # the keys for `State` (USER, PREVIOUS_ACTION, SLOTS, ACTIVE_LOOP)
183
183
  # represent the origin of a `SubState`
184
184
  USER = "user"
185
+ BOT = "bot"
185
186
  SLOTS = "slots"
186
187
 
187
188
  USE_TEXT_FOR_FEATURIZATION = "use_text_for_featurization"
rasa/shared/utils/llm.py CHANGED
@@ -6,6 +6,7 @@ import logging
6
6
  from copy import deepcopy
7
7
  from datetime import datetime
8
8
  from functools import wraps
9
+ from pathlib import Path
9
10
  from typing import (
10
11
  TYPE_CHECKING,
11
12
  Any,
@@ -24,6 +25,9 @@ from typing import (
24
25
  import structlog
25
26
  from pydantic import BaseModel, Field
26
27
 
28
+ import rasa.cli.telemetry
29
+ import rasa.cli.utils
30
+ import rasa.shared.utils.cli
27
31
  import rasa.shared.utils.io
28
32
  from rasa.core.available_endpoints import AvailableEndpoints
29
33
  from rasa.shared.constants import (
@@ -31,6 +35,7 @@ from rasa.shared.constants import (
31
35
  CONFIG_PIPELINE_KEY,
32
36
  CONFIG_POLICIES_KEY,
33
37
  DEFAULT_PROMPT_PACKAGE_NAME,
38
+ ENDPOINTS_NLG_KEY,
34
39
  LLM_CONFIG_KEY,
35
40
  MODEL_CONFIG_KEY,
36
41
  MODEL_GROUP_CONFIG_KEY,
@@ -578,7 +583,7 @@ def embedder_factory(
578
583
  ```
579
584
  {
580
585
  "provider": "openai",
581
- "model": "text-embedding-3-small",
586
+ "model": "text-embedding-3-large",
582
587
  "timeout": 10,
583
588
  "num_retries": 3,
584
589
  }
@@ -591,7 +596,7 @@ def embedder_factory(
591
596
  "models": [
592
597
  {
593
598
  "provider": "openai",
594
- "model": "test-embedding-3-small",
599
+ "model": "test-embedding-3-large",
595
600
  "api_key": "test"
596
601
  },
597
602
  ],
@@ -1072,3 +1077,82 @@ def get_system_default_prompts(
1072
1077
  enterprise_search=_get_enterprise_search_prompt(config),
1073
1078
  contextual_response_rephraser=DEFAULT_RESPONSE_VARIATION_PROMPT_TEMPLATE,
1074
1079
  )
1080
+
1081
+
1082
+ def collect_custom_prompts(
1083
+ config: Dict[Text, Any],
1084
+ endpoints: Dict[Text, Any],
1085
+ project_root: Optional[Path] = None,
1086
+ ) -> Dict[Text, Text]:
1087
+ """Collects custom prompts from the project configuration and endpoints.
1088
+
1089
+ Args:
1090
+ config: The configuration dictionary of the project.
1091
+ endpoints: The endpoints configuration dictionary.
1092
+ project_root: The root directory of the project.
1093
+
1094
+ Returns:
1095
+ A dictionary containing custom prompts.
1096
+ The keys are:
1097
+ - 'contextual_response_rephraser'
1098
+ - 'command_generator'
1099
+ - 'enterprise_search'
1100
+ """
1101
+ from rasa.core.policies.enterprise_search_policy import EnterpriseSearchPolicy
1102
+ from rasa.dialogue_understanding.generator.llm_based_command_generator import (
1103
+ LLMBasedCommandGenerator,
1104
+ )
1105
+ from rasa.studio.prompts import (
1106
+ COMMAND_GENERATOR_NAME,
1107
+ CONTEXTUAL_RESPONSE_REPHRASER_NAME,
1108
+ ENTERPRISE_SEARCH_NAME,
1109
+ )
1110
+
1111
+ prompts: Dict[Text, Text] = {}
1112
+ project_root = project_root or Path(".").resolve()
1113
+
1114
+ def _read_prompt(root: Path, path_in_yaml: Text) -> Optional[Text]:
1115
+ if not path_in_yaml:
1116
+ return None
1117
+
1118
+ prompt_path = (
1119
+ (root / path_in_yaml).resolve()
1120
+ if not Path(path_in_yaml).is_absolute()
1121
+ else Path(path_in_yaml)
1122
+ )
1123
+ if prompt_path.exists():
1124
+ return prompt_path.read_text(encoding="utf-8")
1125
+
1126
+ structlogger.warning(
1127
+ "utils.llm.collect_custom_prompts.prompt_not_found",
1128
+ event_info=(f"Prompt file '{prompt_path}' not found. "),
1129
+ prompt_path=prompt_path,
1130
+ project_root=root,
1131
+ )
1132
+ return None
1133
+
1134
+ # contextual_response_rephraser
1135
+ nlg_conf = endpoints.get(ENDPOINTS_NLG_KEY) or {}
1136
+ if prompt_text := _read_prompt(project_root, nlg_conf.get(PROMPT_CONFIG_KEY)):
1137
+ prompts[CONTEXTUAL_RESPONSE_REPHRASER_NAME] = prompt_text
1138
+
1139
+ # command_generator
1140
+ command_generator_classes = {
1141
+ cls.__name__ for cls in all_subclasses(LLMBasedCommandGenerator)
1142
+ }
1143
+ for component in config.get(CONFIG_PIPELINE_KEY, []):
1144
+ if component.get(CONFIG_NAME_KEY) in command_generator_classes:
1145
+ if prompt_text := _read_prompt(
1146
+ project_root, component.get(PROMPT_TEMPLATE_CONFIG_KEY)
1147
+ ):
1148
+ prompts[COMMAND_GENERATOR_NAME] = prompt_text
1149
+ break
1150
+
1151
+ # enterprise_search
1152
+ for policy in config.get(CONFIG_POLICIES_KEY, []):
1153
+ if policy.get(CONFIG_NAME_KEY) == EnterpriseSearchPolicy.__name__:
1154
+ if prompt_text := _read_prompt(project_root, policy.get(PROMPT_CONFIG_KEY)):
1155
+ prompts[ENTERPRISE_SEARCH_NAME] = prompt_text
1156
+ break
1157
+
1158
+ return prompts
@@ -1,5 +1,4 @@
1
1
  import base64
2
- import json
3
2
  import logging
4
3
  from pathlib import Path
5
4
  from typing import Any, Dict, List, Optional, Tuple
@@ -46,15 +45,33 @@ class StudioDataHandler:
46
45
  intent_names: Optional[List[str]] = None,
47
46
  entity_names: Optional[List[str]] = None,
48
47
  ) -> dict:
48
+ from rasa.studio.prompts import (
49
+ COMMAND_GENERATOR_NAME,
50
+ CONTEXTUAL_RESPONSE_REPHRASER_NAME,
51
+ ENTERPRISE_SEARCH_NAME,
52
+ )
53
+
49
54
  request = {
50
- "query": (
51
- "query ExportAsEncodedYaml($input: ExportAsEncodedYamlInput!) "
52
- "{ exportAsEncodedYaml(input: $input) "
53
- "{ ... on ExportModernAsEncodedYamlOutput "
54
- "{ nlu flows domain endpoints config prompts } "
55
- "... on ExportClassicAsEncodedYamlOutput "
56
- "{ nlu domain }}}"
57
- ),
55
+ "query": "query ExportAsEncodedYaml($input: ExportAsEncodedYamlInput!) {\n"
56
+ " exportAsEncodedYaml(input: $input) {\n"
57
+ " ... on ExportModernAsEncodedYamlOutput {\n"
58
+ " nlu\n"
59
+ " flows\n"
60
+ " domain\n"
61
+ " endpoints\n"
62
+ " config\n"
63
+ " prompts {\n"
64
+ f" {COMMAND_GENERATOR_NAME}\n"
65
+ f" {CONTEXTUAL_RESPONSE_REPHRASER_NAME}\n"
66
+ f" {ENTERPRISE_SEARCH_NAME}\n"
67
+ " }\n"
68
+ " }\n"
69
+ " ... on ExportClassicAsEncodedYamlOutput {\n"
70
+ " nlu\n"
71
+ " domain\n"
72
+ " }\n"
73
+ " }\n"
74
+ "}\n",
58
75
  "variables": {"input": {"assistantName": self.assistant_name}},
59
76
  }
60
77
  if intent_names or entity_names:
@@ -98,7 +115,6 @@ class StudioDataHandler:
98
115
  },
99
116
  verify=verify,
100
117
  )
101
-
102
118
  if res.status_code != 200:
103
119
  raise RasaException(
104
120
  f"Download from Studio with URL: "
@@ -203,9 +219,7 @@ class StudioDataHandler:
203
219
  self.flows = self._decode_response(return_data.get("flows"))
204
220
  self.config = self._decode_response(return_data.get("config"))
205
221
  self.endpoints = self._decode_response(return_data.get("endpoints"))
206
-
207
- prompts_string = self._decode_response(return_data.get("prompts"))
208
- self.prompts = json.loads(prompts_string) if prompts_string else None
222
+ self.prompts = return_data.get("prompts")
209
223
 
210
224
  if not self.has_nlu() and not self.has_flows():
211
225
  raise RasaException("No nlu or flows data in Studio response.")
rasa/studio/download.py CHANGED
@@ -25,6 +25,7 @@ from rasa.studio.constants import DOMAIN_FILENAME
25
25
  from rasa.studio.data_handler import StudioDataHandler
26
26
  from rasa.studio.prompts import handle_prompts
27
27
  from rasa.studio.pull.data import _dump_flows_as_separate_files
28
+ from rasa.studio.utils import validate_argument_paths
28
29
 
29
30
  structlogger = structlog.get_logger()
30
31
 
@@ -35,6 +36,7 @@ def handle_download(args: argparse.Namespace) -> None:
35
36
  Args:
36
37
  args: The command line arguments.
37
38
  """
39
+ validate_argument_paths(args)
38
40
  assistant_name = args.assistant_name
39
41
  target_root = _prepare_target_directory(assistant_name)
40
42
 
@@ -47,7 +49,9 @@ def handle_download(args: argparse.Namespace) -> None:
47
49
  _handle_endpoints(handler, target_root)
48
50
  _handle_domain(handler, target_root)
49
51
  _handle_data(handler, target_root)
50
- handle_prompts(handler, target_root)
52
+
53
+ if prompts := handler.get_prompts():
54
+ handle_prompts(prompts, target_root)
51
55
 
52
56
  structlogger.info(
53
57
  "studio.download.success",
rasa/studio/link.py CHANGED
@@ -179,8 +179,19 @@ def handle_link(args: argparse.Namespace) -> None:
179
179
  link_file = _link_file(project_root)
180
180
 
181
181
  if link_file.exists():
182
+ linked_assistant_name = read_assistant_name(project_root)
183
+ if linked_assistant_name == assistant_name:
184
+ rasa.shared.utils.cli.print_info(
185
+ f"Project is already linked to assistant '{assistant_name}'."
186
+ )
187
+ sys.exit(0)
188
+
182
189
  overwrite = questionary.confirm(
183
- f"This project is already linked " f"(link file '{link_file}').\nOverwrite?"
190
+ f"Project is currently linked to the following Rasa Studio assistant:\n\n"
191
+ f" Assistant name: {linked_assistant_name}\n"
192
+ f" Studio URL: {studio_cfg.studio_url}\n"
193
+ f" Keycloak Auth URL: {studio_cfg.authentication_server_url}\n\n"
194
+ f"Do you want to overwrite it with the new assistant '{assistant_name}'?"
184
195
  ).ask()
185
196
  if not overwrite:
186
197
  rasa.shared.utils.cli.print_info(
rasa/studio/prompts.py CHANGED
@@ -20,7 +20,6 @@ from rasa.shared.constants import (
20
20
  from rasa.shared.utils.common import all_subclasses
21
21
  from rasa.shared.utils.llm import get_system_default_prompts
22
22
  from rasa.shared.utils.yaml import read_yaml, write_yaml
23
- from rasa.studio.data_handler import StudioDataHandler
24
23
 
25
24
  structlogger = structlog.get_logger()
26
25
 
@@ -29,14 +28,13 @@ COMMAND_GENERATOR_NAME = "command_generator"
29
28
  ENTERPRISE_SEARCH_NAME = "enterprise_search"
30
29
 
31
30
 
32
- def handle_prompts(handler: StudioDataHandler, root: Path) -> None:
31
+ def handle_prompts(prompts: Dict[Text, Text], root: Path) -> None:
33
32
  """Handle prompts for the assistant.
34
33
 
35
34
  Args:
36
- handler: The data handler to retrieve prompts from.
35
+ prompts: A dict containing prompt names as keys and their content as values.
37
36
  root: The root directory where the prompts should be saved.
38
37
  """
39
- prompts = handler.get_prompts()
40
38
  if not prompts:
41
39
  return
42
40
 
@@ -85,7 +83,7 @@ def _handle_contextual_response_rephraser(
85
83
  return
86
84
 
87
85
  prompt_path = _save_prompt_file(
88
- root, f"{CONTEXTUAL_RESPONSE_REPHRASER_NAME}.jinja", prompt_content
86
+ root, f"{CONTEXTUAL_RESPONSE_REPHRASER_NAME}.jinja2", prompt_content
89
87
  )
90
88
 
91
89
  endpoints["nlg"] = endpoints.get("nlg") or {}
@@ -113,7 +111,7 @@ def _handle_command_generator(
113
111
  return
114
112
 
115
113
  prompt_path = _save_prompt_file(
116
- root, f"{COMMAND_GENERATOR_NAME}.jinja", prompt_content
114
+ root, f"{COMMAND_GENERATOR_NAME}.jinja2", prompt_content
117
115
  )
118
116
 
119
117
  command_generator_names: List[str] = [
@@ -149,7 +147,7 @@ def _handle_enterprise_search(
149
147
  return
150
148
 
151
149
  prompt_path = _save_prompt_file(
152
- root, f"{ENTERPRISE_SEARCH_NAME}.jinja", prompt_content
150
+ root, f"{ENTERPRISE_SEARCH_NAME}.jinja2", prompt_content
153
151
  )
154
152
 
155
153
  _add_prompt_to_config(
@@ -15,8 +15,7 @@ def merge_domain(
15
15
  data_local: TrainingDataImporter,
16
16
  domain_path: Path,
17
17
  ) -> None:
18
- """
19
- Merges the domain from Rasa Studio with the local domain.
18
+ """Merges the domain from Rasa Studio with the local domain.
20
19
 
21
20
  Args:
22
21
  data_from_studio: The training data importer for the Rasa Studio domain.
@@ -29,10 +28,22 @@ def merge_domain(
29
28
  else:
30
29
  all_local_domain_files = data_local.get_domain_files([str(domain_path)])
31
30
 
31
+ studio_domain_file_path = domain_path / STUDIO_DOMAIN_FILENAME
32
+
32
33
  # leftover_domain represents the items in the studio
33
34
  # domain that are not in the local domain
34
35
  leftover_domain = data_from_studio.get_user_domain()
35
36
  for file_path in all_local_domain_files:
37
+ if file_path == str(studio_domain_file_path):
38
+ # we need to exclude the studio domain file from the merge,
39
+ # since we want to dump ALL the remaining items to this path
40
+ # after the merge. if we include it here, we will remove the existing
41
+ # items from the leftover domain and after this loop we will
42
+ # overwrite the studio domain file with the remaining items in
43
+ # the leftover domain - this means we loose the items that were
44
+ # in the studio domain file before we started the merge.
45
+ continue
46
+
36
47
  # For each local domain file, we do a partial merge
37
48
  local_domain = Domain.from_file(str(file_path))
38
49
  updated_local_domain = local_domain.partial_merge(leftover_domain)
@@ -46,4 +57,4 @@ def merge_domain(
46
57
 
47
58
  # If there are still items in leftover_domain, persist them
48
59
  if not leftover_domain.is_empty():
49
- leftover_domain.persist(domain_path / STUDIO_DOMAIN_FILENAME)
60
+ leftover_domain.persist(studio_domain_file_path)