rasa-pro 3.13.0.dev9__py3-none-any.whl → 3.13.0.dev11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of rasa-pro might be problematic. Click here for more details.

Files changed (45) hide show
  1. rasa/cli/export.py +2 -0
  2. rasa/cli/studio/download.py +3 -9
  3. rasa/cli/studio/link.py +1 -2
  4. rasa/cli/studio/pull.py +3 -2
  5. rasa/cli/studio/push.py +1 -1
  6. rasa/cli/studio/train.py +0 -1
  7. rasa/core/exporter.py +36 -0
  8. rasa/core/policies/enterprise_search_policy.py +151 -240
  9. rasa/core/policies/enterprise_search_policy_config.py +242 -0
  10. rasa/core/policies/enterprise_search_prompt_with_relevancy_check_and_citation_template.jinja2 +6 -5
  11. rasa/core/utils.py +11 -2
  12. rasa/dialogue_understanding/commands/__init__.py +4 -0
  13. rasa/dialogue_understanding/generator/command_generator.py +11 -1
  14. rasa/dialogue_understanding/generator/prompt_templates/command_prompt_v3_claude_3_5_sonnet_20240620_template.jinja2 +78 -0
  15. rasa/dialogue_understanding/generator/single_step/search_ready_llm_command_generator.py +2 -2
  16. rasa/dialogue_understanding/processor/command_processor.py +5 -5
  17. rasa/shared/core/flows/validation.py +9 -2
  18. rasa/shared/providers/_configs/azure_openai_client_config.py +2 -2
  19. rasa/shared/providers/_configs/default_litellm_client_config.py +1 -1
  20. rasa/shared/providers/_configs/huggingface_local_embedding_client_config.py +1 -1
  21. rasa/shared/providers/_configs/openai_client_config.py +1 -1
  22. rasa/shared/providers/_configs/rasa_llm_client_config.py +1 -1
  23. rasa/shared/providers/_configs/self_hosted_llm_client_config.py +1 -1
  24. rasa/shared/providers/_configs/utils.py +0 -99
  25. rasa/shared/utils/configs.py +110 -0
  26. rasa/shared/utils/constants.py +0 -3
  27. rasa/shared/utils/pykwalify_extensions.py +0 -9
  28. rasa/studio/constants.py +1 -0
  29. rasa/studio/download.py +164 -0
  30. rasa/studio/link.py +1 -1
  31. rasa/studio/{download/flows.py → pull/data.py} +2 -131
  32. rasa/studio/{download → pull}/domains.py +1 -1
  33. rasa/studio/pull/pull.py +235 -0
  34. rasa/studio/push.py +5 -0
  35. rasa/studio/train.py +1 -1
  36. rasa/tracing/instrumentation/attribute_extractors.py +10 -5
  37. rasa/version.py +1 -1
  38. {rasa_pro-3.13.0.dev9.dist-info → rasa_pro-3.13.0.dev11.dist-info}/METADATA +1 -1
  39. {rasa_pro-3.13.0.dev9.dist-info → rasa_pro-3.13.0.dev11.dist-info}/RECORD +43 -40
  40. rasa/studio/download/download.py +0 -416
  41. rasa/studio/pull.py +0 -94
  42. /rasa/studio/{download → pull}/__init__.py +0 -0
  43. {rasa_pro-3.13.0.dev9.dist-info → rasa_pro-3.13.0.dev11.dist-info}/NOTICE +0 -0
  44. {rasa_pro-3.13.0.dev9.dist-info → rasa_pro-3.13.0.dev11.dist-info}/WHEEL +0 -0
  45. {rasa_pro-3.13.0.dev9.dist-info → rasa_pro-3.13.0.dev11.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,110 @@
1
+ from typing import Optional
2
+
3
+ import structlog
4
+
5
+ from rasa.shared.utils.io import raise_deprecation_warning
6
+
7
+ structlogger = structlog.get_logger()
8
+
9
+
10
+ def resolve_aliases(config: dict, deprecated_alias_mapping: dict) -> dict:
11
+ """
12
+ Resolve aliases in the configuration to standard keys.
13
+
14
+ Args:
15
+ config: Dictionary containing the configuration.
16
+ deprecated_alias_mapping: Dictionary mapping aliases to
17
+ their standard keys.
18
+
19
+ Returns:
20
+ New dictionary containing the processed configuration.
21
+ """
22
+ config = config.copy()
23
+
24
+ for alias, standard_key in deprecated_alias_mapping.items():
25
+ # We check for the alias instead of the standard key because our goal is to
26
+ # update the standard key when the alias is found. Since the standard key is
27
+ # always included in the default component configurations, we overwrite it
28
+ # with the alias value if the alias exists.
29
+ if alias in config:
30
+ config[standard_key] = config.pop(alias)
31
+
32
+ return config
33
+
34
+
35
+ def raise_deprecation_warnings(
36
+ config: dict,
37
+ deprecated_alias_mapping: dict,
38
+ source: Optional[str] = None,
39
+ ) -> None:
40
+ """
41
+ Raises warnings for deprecated keys in the configuration.
42
+
43
+ Args:
44
+ config: Dictionary containing the configuration.
45
+ deprecated_alias_mapping: Dictionary mapping deprecated keys to
46
+ their standard keys.
47
+
48
+ Raises:
49
+ DeprecationWarning: If any deprecated key is found in the config.
50
+ """
51
+ for alias, standard_key in deprecated_alias_mapping.items():
52
+ if alias in config:
53
+ source = f"{source}: " or ""
54
+ raise_deprecation_warning(
55
+ message=(
56
+ f"{source}"
57
+ f"'{alias}' is deprecated and will be removed in "
58
+ f"4.0.0. Use '{standard_key}' instead."
59
+ )
60
+ )
61
+
62
+
63
+ def validate_required_keys(config: dict, required_keys: list) -> None:
64
+ """
65
+ Validates that the passed config contains all the required keys.
66
+
67
+ Args:
68
+ config: Dictionary containing the configuration.
69
+ required_keys: List of keys that must be present in the config.
70
+
71
+ Raises:
72
+ ValueError: If any required key is missing.
73
+ """
74
+ missing_keys = [key for key in required_keys if key not in config]
75
+ if missing_keys:
76
+ message = f"Missing required keys '{missing_keys}' for configuration."
77
+ structlogger.error(
78
+ "validate_required_keys",
79
+ message=message,
80
+ missing_keys=missing_keys,
81
+ config=config,
82
+ )
83
+ raise ValueError(message)
84
+
85
+
86
+ def validate_forbidden_keys(config: dict, forbidden_keys: list) -> None:
87
+ """
88
+ Validates that the passed config doesn't contain any forbidden keys.
89
+
90
+ Args:
91
+ config: Dictionary containing the configuration.
92
+ forbidden_keys: List of keys that are forbidden in the config.
93
+
94
+ Raises:
95
+ ValueError: If any forbidden key is present.
96
+ """
97
+ forbidden_keys_in_config = set(config.keys()).intersection(set(forbidden_keys))
98
+
99
+ if forbidden_keys_in_config:
100
+ message = (
101
+ f"Forbidden keys '{forbidden_keys_in_config}' present "
102
+ f"in the configuration."
103
+ )
104
+ structlogger.error(
105
+ "validate_forbidden_keys",
106
+ message=message,
107
+ forbidden_keys=forbidden_keys_in_config,
108
+ config=config,
109
+ )
110
+ raise ValueError(message)
@@ -2,9 +2,6 @@ DEFAULT_ENCODING = "utf-8"
2
2
 
3
3
  READ_YAML_FILE_CACHE_MAXSIZE_ENV_VAR = "READ_YAML_FILE_CACHE_MAXSIZE"
4
4
  DEFAULT_READ_YAML_FILE_CACHE_MAXSIZE = 256
5
- RASA_PRO_BETA_PREDICATES_IN_RESPONSE_CONDITIONS_ENV_VAR_NAME = (
6
- "RASA_PRO_BETA_PREDICATES_IN_RESPONSE_CONDITIONS"
7
- )
8
5
 
9
6
  LOG_COMPONENT_SOURCE_METHOD_INIT = "init"
10
7
  LOG_COMPONENT_SOURCE_METHOD_FINGERPRINT_ADDON = "fingerprint_addon"
@@ -8,11 +8,6 @@ from typing import Any, Dict, List, Text, Union
8
8
 
9
9
  from pykwalify.errors import SchemaError
10
10
 
11
- from rasa.shared.utils.constants import (
12
- RASA_PRO_BETA_PREDICATES_IN_RESPONSE_CONDITIONS_ENV_VAR_NAME,
13
- )
14
- from rasa.utils.beta import ensure_beta_feature_is_enabled
15
-
16
11
 
17
12
  def require_response_keys(
18
13
  responses: List[Dict[Text, Any]], _: Dict, __: Text
@@ -31,10 +26,6 @@ def require_response_keys(
31
26
 
32
27
  conditions = response.get("condition", [])
33
28
  if isinstance(conditions, str):
34
- ensure_beta_feature_is_enabled(
35
- "predicates in response conditions",
36
- RASA_PRO_BETA_PREDICATES_IN_RESPONSE_CONDITIONS_ENV_VAR_NAME,
37
- )
38
29
  continue
39
30
 
40
31
  for condition in conditions:
rasa/studio/constants.py CHANGED
@@ -14,6 +14,7 @@ RASA_STUDIO_CLI_DISABLE_VERIFY_KEY_ENV = "RASA_STUDIO_CLI_DISABLE_VERIFY_KEY"
14
14
 
15
15
  STUDIO_NLU_FILENAME = "studio_nlu.yml"
16
16
  STUDIO_DOMAIN_FILENAME = "studio_domain.yml"
17
+ DOMAIN_FILENAME = "domain.yml"
17
18
  STUDIO_FLOWS_FILENAME = "studio_flows.yml"
18
19
  STUDIO_CONFIG_FILENAME = "studio_config.yml"
19
20
  STUDIO_ENDPOINTS_FILENAME = "studio_endpoints.yml"
@@ -0,0 +1,164 @@
1
+ import argparse
2
+ import shutil
3
+ from pathlib import Path
4
+ from typing import Dict
5
+
6
+ import questionary
7
+ import structlog
8
+ from ruamel import yaml
9
+ from ruamel.yaml.scalarstring import LiteralScalarString
10
+
11
+ import rasa.cli.utils
12
+ import rasa.shared.utils.cli
13
+ from rasa.shared.constants import (
14
+ DEFAULT_CONFIG_PATH,
15
+ DEFAULT_DATA_PATH,
16
+ DEFAULT_ENDPOINTS_PATH,
17
+ )
18
+ from rasa.shared.core.flows.yaml_flows_io import FlowsList
19
+ from rasa.shared.nlu.training_data.training_data import (
20
+ DEFAULT_TRAINING_DATA_OUTPUT_PATH,
21
+ )
22
+ from rasa.shared.utils.yaml import read_yaml, write_yaml
23
+ from rasa.studio.config import StudioConfig
24
+ from rasa.studio.constants import DOMAIN_FILENAME
25
+ from rasa.studio.data_handler import StudioDataHandler
26
+ from rasa.studio.pull.data import _dump_flows_as_separate_files
27
+
28
+ structlogger = structlog.get_logger()
29
+
30
+
31
+ def handle_download(args: argparse.Namespace) -> None:
32
+ """Download an assistant from Studio and store it in `<assistant_name>/`.
33
+
34
+ Args:
35
+ args: The command line arguments.
36
+ """
37
+ assistant_name = args.assistant_name
38
+ target_root = _prepare_target_directory(assistant_name)
39
+
40
+ handler = StudioDataHandler(
41
+ studio_config=StudioConfig.read_config(), assistant_name=assistant_name
42
+ )
43
+ handler.request_all_data()
44
+
45
+ _handle_config(handler, target_root)
46
+ _handle_endpoints(handler, target_root)
47
+ _handle_domain(handler, target_root)
48
+ _handle_data(handler, target_root)
49
+ structlogger.info(
50
+ "studio.download.success",
51
+ event_info=f"Downloaded assistant '{assistant_name}' from Studio.",
52
+ assistant_name=assistant_name,
53
+ )
54
+ rasa.shared.utils.cli.print_success(
55
+ f"Downloaded assistant '{assistant_name}' from Studio."
56
+ )
57
+
58
+
59
+ def _prepare_target_directory(assistant_name: str) -> Path:
60
+ """Create (or overwrite) the directory where everything is stored.
61
+
62
+ Args:
63
+ assistant_name: The name of the assistant to download.
64
+
65
+ Returns:
66
+ The path to the target directory where the assistant will be stored.
67
+ """
68
+ target_root = Path(assistant_name)
69
+
70
+ if target_root.exists():
71
+ overwrite = questionary.confirm(
72
+ f"Directory '{assistant_name}' already exists. Overwrite it?"
73
+ ).ask()
74
+ if not overwrite:
75
+ rasa.shared.utils.cli.print_error_and_exit("Download cancelled.")
76
+
77
+ shutil.rmtree(target_root)
78
+
79
+ target_root.mkdir(parents=True, exist_ok=True)
80
+ return target_root
81
+
82
+
83
+ def _handle_config(handler: StudioDataHandler, root: Path) -> None:
84
+ """Download and persist the assistant’s config file.
85
+
86
+ Args:
87
+ handler: The data handler to retrieve the config from.
88
+ root: The root directory where the config file will be stored.
89
+ """
90
+ config_data = handler.get_config()
91
+ if not config_data:
92
+ rasa.shared.utils.cli.print_error_and_exit("No config data found.")
93
+
94
+ config_path = root / DEFAULT_CONFIG_PATH
95
+ config_path.write_text(config_data, encoding="utf-8")
96
+
97
+
98
+ def _handle_endpoints(handler: StudioDataHandler, root: Path) -> None:
99
+ """Download and persist the assistant’s endpoints file.
100
+
101
+ Args:
102
+ handler: The data handler to retrieve the endpoints from.
103
+ root: The root directory where the endpoints file will be stored.
104
+ """
105
+ endpoints_data = handler.get_endpoints()
106
+ if not endpoints_data:
107
+ rasa.shared.utils.cli.print_error_and_exit("No endpoints data found.")
108
+
109
+ endpoints_path = root / DEFAULT_ENDPOINTS_PATH
110
+ endpoints_path.write_text(endpoints_data, encoding="utf-8")
111
+
112
+
113
+ def _handle_domain(handler: StudioDataHandler, root: Path) -> None:
114
+ """Persist the assistant’s domain file.
115
+
116
+ Args:
117
+ handler: The data handler to retrieve the domain from.
118
+ root: The root directory where the domain file will be stored.
119
+ """
120
+ domain_yaml = handler.domain
121
+ data = read_yaml(domain_yaml)
122
+ target = root / DOMAIN_FILENAME
123
+ write_yaml(
124
+ data=data,
125
+ target=target,
126
+ should_preserve_key_order=True,
127
+ )
128
+
129
+
130
+ def _handle_data(handler: StudioDataHandler, root: Path) -> None:
131
+ """Persist NLU data and flows.
132
+
133
+ Args:
134
+ handler: The data handler to retrieve the NLU data and flows from.
135
+ root: The root directory where the NLU data and flows will be stored.
136
+ """
137
+ data_path = root / DEFAULT_DATA_PATH
138
+ data_path.mkdir(parents=True, exist_ok=True)
139
+
140
+ if handler.has_nlu():
141
+ nlu_yaml = handler.nlu
142
+ nlu_data = read_yaml(nlu_yaml)
143
+ if nlu_data.get("nlu"):
144
+ pretty_write_nlu_yaml(
145
+ nlu_data, data_path / DEFAULT_TRAINING_DATA_OUTPUT_PATH
146
+ )
147
+
148
+ if handler.has_flows():
149
+ flows_yaml = handler.flows
150
+ data = read_yaml(flows_yaml)
151
+ flows_data = data.get("flows", {})
152
+ flows_list = FlowsList.from_json(flows_data)
153
+ _dump_flows_as_separate_files(flows_list.underlying_flows, data_path)
154
+
155
+
156
+ def pretty_write_nlu_yaml(data: Dict, file: Path) -> None:
157
+ """Writes the NLU YAML in a pretty way."""
158
+ dumper = yaml.YAML()
159
+ if nlu_data := data.get("nlu"):
160
+ for item in nlu_data:
161
+ if item.get("examples"):
162
+ item["examples"] = LiteralScalarString(item["examples"])
163
+ with file.open("w", encoding="utf-8") as outfile:
164
+ dumper.dump(data, outfile)
rasa/studio/link.py CHANGED
@@ -167,7 +167,7 @@ def handle_link(args: argparse.Namespace) -> None:
167
167
  Args:
168
168
  args: The command line arguments.
169
169
  """
170
- assistant_name: Text = args.assistant_name[0]
170
+ assistant_name: Text = args.assistant_name
171
171
  studio_cfg = get_studio_config()
172
172
  assistant_exists = _ensure_assistant_exists(assistant_name, studio_cfg, args)
173
173
  if not assistant_exists:
@@ -3,101 +3,16 @@ from pathlib import Path
3
3
  from typing import Any, Dict, List, Set, Text
4
4
 
5
5
  from rasa.shared.core.flows import Flow
6
- from rasa.shared.core.flows.flow_step_links import StaticFlowStepLink
7
6
  from rasa.shared.core.flows.flows_list import FlowsList
8
7
  from rasa.shared.core.flows.yaml_flows_io import YAMLFlowsReader, YamlFlowsWriter
9
8
  from rasa.shared.importers.importer import TrainingDataImporter
10
9
  from rasa.shared.utils.yaml import read_yaml
11
10
  from rasa.studio.constants import STUDIO_NLU_FILENAME
12
- from rasa.studio.data_handler import StudioDataHandler
13
11
  from rasa.utils.mapper import RasaPrimitiveStorageMapper
14
12
 
15
13
  logger = logging.getLogger(__name__)
16
14
 
17
- STUDIO_FLOWS_DIR_NAME = "studio_flows"
18
-
19
-
20
- def merge_flows_with_overwrite(
21
- data_path: Path,
22
- handler: Any,
23
- data_from_studio: TrainingDataImporter,
24
- data_local: TrainingDataImporter,
25
- mapper: RasaPrimitiveStorageMapper,
26
- ) -> None:
27
- """
28
- Merges flows data from a file or directory when overwrite is enabled.
29
-
30
- Args:
31
- data_path: List of paths to the training data.
32
- handler: The StudioDataHandler instance.
33
- data_from_studio: The TrainingDataImporter instance for Studio data.
34
- data_local: The TrainingDataImporter instance for local data.
35
- mapper: The RasaPrimitiveStorageMapper instance for mapping.
36
- """
37
- if data_path.is_file():
38
- merge_training_data_file(handler, data_from_studio, data_local, data_path)
39
- elif data_path.is_dir():
40
- merge_training_data_dir(
41
- handler, data_from_studio, data_local, data_path, mapper
42
- )
43
- else:
44
- raise ValueError("Provided data path is neither a file nor a directory.")
45
-
46
-
47
- def merge_training_data_file(
48
- handler: StudioDataHandler,
49
- data_from_studio: TrainingDataImporter,
50
- data_local: TrainingDataImporter,
51
- file_path: Path,
52
- ) -> None:
53
- """
54
- Merges NLU and flows data when training data is stored in a single file.
55
-
56
- Args:
57
- handler: The StudioDataHandler instance.
58
- data_from_studio: The TrainingDataImporter instance for Studio data.
59
- data_local: The TrainingDataImporter instance for local data.
60
- file_path: The path to the training data file.
61
- """
62
- if handler.has_nlu():
63
- nlu_data_merged = data_from_studio.get_nlu_data().merge(
64
- data_local.get_nlu_data()
65
- )
66
- nlu_data_merged.persist_nlu(file_path)
67
-
68
- if handler.has_flows():
69
- flows_data_merged = data_from_studio.get_user_flows().merge(
70
- data_local.get_user_flows()
71
- )
72
- YamlFlowsWriter.dump(
73
- flows=flows_data_merged.underlying_flows,
74
- filename=file_path,
75
- should_clean_json=True,
76
- )
77
-
78
-
79
- def merge_training_data_dir(
80
- handler: StudioDataHandler,
81
- data_from_studio: TrainingDataImporter,
82
- data_local: TrainingDataImporter,
83
- data_path: Path,
84
- mapper: RasaPrimitiveStorageMapper,
85
- ) -> None:
86
- """
87
- Merges NLU and flows data when training data is stored in a directory.
88
-
89
- Args:
90
- handler: The StudioDataHandler instance.
91
- data_from_studio: The TrainingDataImporter instance for Studio data.
92
- data_local: The TrainingDataImporter instance for local data.
93
- data_path: The path to the training data directory.
94
- mapper: The RasaPrimitiveStorageMapper instance for mapping.
95
- """
96
- if handler.has_nlu():
97
- merge_nlu_in_directory(data_from_studio, data_local, data_path, mapper)
98
-
99
- if handler.has_flows():
100
- merge_flows_in_directory(data_from_studio, data_path, mapper)
15
+ STUDIO_FLOWS_DIR_NAME = "flows"
101
16
 
102
17
 
103
18
  def merge_nlu_in_directory(
@@ -116,7 +31,7 @@ def merge_nlu_in_directory(
116
31
  data_path: The path to the training data directory.
117
32
  mapper: The RasaPrimitiveStorageMapper instance for mapping.
118
33
  """
119
- from rasa.studio.download.download import pretty_write_nlu_yaml
34
+ from rasa.studio.download import pretty_write_nlu_yaml
120
35
 
121
36
  nlu_data = data_from_studio.get_nlu_data()
122
37
  nlu_file_path = get_nlu_path(data_path, data_local, mapper)
@@ -154,29 +69,6 @@ def get_nlu_path(
154
69
  return _select_path(nlu_paths, "nlu", base_path, STUDIO_NLU_FILENAME)
155
70
 
156
71
 
157
- def get_flows_path(
158
- base_path: Path,
159
- data_local: TrainingDataImporter,
160
- mapper: RasaPrimitiveStorageMapper,
161
- ) -> Path:
162
- """Determines where flows data should be stored.
163
-
164
- Args:
165
- base_path: The base path for the training data.
166
- data_local: The TrainingDataImporter instance for local data.
167
- mapper: The RasaPrimitiveStorageMapper instance for mapping.
168
-
169
- Returns:
170
- The path where flows data should be stored.
171
- """
172
- flow_paths = set()
173
- for flow in data_local.get_user_flows().underlying_flows:
174
- for p in mapper.get_file(flow.id, "flows").get("training", []):
175
- flow_paths.add(p)
176
-
177
- return _select_path(flow_paths, "flows", base_path, "flows.yml")
178
-
179
-
180
72
  def merge_flows_in_directory(
181
73
  data_from_studio: TrainingDataImporter,
182
74
  data_path: Path,
@@ -298,27 +190,6 @@ def _dump_flows_as_separate_files(flows: List[Any], data_path: Path) -> None:
298
190
  )
299
191
 
300
192
 
301
- def strip_default_next_references(flows: FlowsList) -> FlowsList:
302
- """Strips default next references from flows.
303
-
304
- Args:
305
- flows: The FlowsList instance containing the flows.
306
-
307
- Returns:
308
- An updated FlowsList instance with default next references removed.
309
- """
310
- default_step_ids = [step.default_id for flow in flows for step in flow.steps]
311
- for flow in flows:
312
- for step in flow.steps:
313
- if (
314
- step.next.links
315
- and isinstance(step.next.links[0], StaticFlowStepLink)
316
- and step.next.links[0].target in default_step_ids
317
- ):
318
- step.next.links = []
319
- return flows
320
-
321
-
322
193
  def _select_path(
323
194
  paths: Set[Path], primitive_type: str, default_path: Path, default: str
324
195
  ) -> Path:
@@ -10,7 +10,7 @@ from rasa.studio.constants import STUDIO_DOMAIN_FILENAME
10
10
  logger = logging.getLogger(__name__)
11
11
 
12
12
 
13
- def merge_domain_with_overwrite(
13
+ def merge_domain(
14
14
  data_from_studio: TrainingDataImporter,
15
15
  data_local: TrainingDataImporter,
16
16
  domain_path: Path,