rasa-pro 3.13.0.dev10__py3-none-any.whl → 3.13.0.dev11__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of rasa-pro might be problematic. Click here for more details.
- rasa/cli/studio/download.py +3 -9
- rasa/cli/studio/link.py +1 -2
- rasa/cli/studio/pull.py +3 -2
- rasa/cli/studio/push.py +1 -1
- rasa/cli/studio/train.py +0 -1
- rasa/core/policies/enterprise_search_policy.py +1 -4
- rasa/core/policies/enterprise_search_prompt_with_relevancy_check_and_citation_template.jinja2 +6 -5
- rasa/core/utils.py +11 -2
- rasa/dialogue_understanding/commands/__init__.py +4 -0
- rasa/dialogue_understanding/generator/command_generator.py +11 -1
- rasa/dialogue_understanding/processor/command_processor.py +5 -5
- rasa/shared/utils/constants.py +0 -3
- rasa/shared/utils/pykwalify_extensions.py +0 -9
- rasa/studio/constants.py +1 -0
- rasa/studio/download.py +164 -0
- rasa/studio/link.py +1 -1
- rasa/studio/{download/flows.py → pull/data.py} +2 -131
- rasa/studio/{download → pull}/domains.py +1 -1
- rasa/studio/pull/pull.py +235 -0
- rasa/studio/push.py +5 -0
- rasa/studio/train.py +1 -1
- rasa/version.py +1 -1
- {rasa_pro-3.13.0.dev10.dist-info → rasa_pro-3.13.0.dev11.dist-info}/METADATA +1 -1
- {rasa_pro-3.13.0.dev10.dist-info → rasa_pro-3.13.0.dev11.dist-info}/RECORD +28 -28
- rasa/studio/download/download.py +0 -416
- rasa/studio/pull.py +0 -94
- /rasa/studio/{download → pull}/__init__.py +0 -0
- {rasa_pro-3.13.0.dev10.dist-info → rasa_pro-3.13.0.dev11.dist-info}/NOTICE +0 -0
- {rasa_pro-3.13.0.dev10.dist-info → rasa_pro-3.13.0.dev11.dist-info}/WHEEL +0 -0
- {rasa_pro-3.13.0.dev10.dist-info → rasa_pro-3.13.0.dev11.dist-info}/entry_points.txt +0 -0
rasa/cli/studio/download.py
CHANGED
|
@@ -4,11 +4,12 @@ from typing import List
|
|
|
4
4
|
from rasa.cli import SubParsersAction
|
|
5
5
|
from rasa.cli.arguments.default_arguments import (
|
|
6
6
|
add_config_param,
|
|
7
|
+
add_data_param,
|
|
8
|
+
add_domain_param,
|
|
7
9
|
add_endpoint_param,
|
|
8
10
|
)
|
|
9
|
-
from rasa.cli.arguments.train import add_data_param, add_domain_param
|
|
10
11
|
from rasa.shared.constants import DEFAULT_CONFIG_PATH, DEFAULT_ENDPOINTS_PATH
|
|
11
|
-
from rasa.studio.download
|
|
12
|
+
from rasa.studio.download import handle_download
|
|
12
13
|
|
|
13
14
|
|
|
14
15
|
def add_subparser(
|
|
@@ -49,13 +50,6 @@ def set_studio_download_arguments(parser: argparse.ArgumentParser) -> None:
|
|
|
49
50
|
parser.add_argument(
|
|
50
51
|
"assistant_name",
|
|
51
52
|
default=None,
|
|
52
|
-
nargs=1,
|
|
53
53
|
type=str,
|
|
54
54
|
help="Name of the assistant on Rasa Studio",
|
|
55
55
|
)
|
|
56
|
-
|
|
57
|
-
parser.add_argument(
|
|
58
|
-
"--overwrite",
|
|
59
|
-
action="store_true",
|
|
60
|
-
help="Overwrite local data with data from Rasa Studio",
|
|
61
|
-
)
|
rasa/cli/studio/link.py
CHANGED
|
@@ -2,7 +2,7 @@ import argparse
|
|
|
2
2
|
from typing import List, Text
|
|
3
3
|
|
|
4
4
|
from rasa.cli import SubParsersAction
|
|
5
|
-
from rasa.cli.
|
|
5
|
+
from rasa.cli.arguments.default_arguments import (
|
|
6
6
|
add_config_param,
|
|
7
7
|
add_data_param,
|
|
8
8
|
add_domain_param,
|
|
@@ -37,7 +37,6 @@ def add_subparser(
|
|
|
37
37
|
|
|
38
38
|
link_parser.add_argument(
|
|
39
39
|
"assistant_name",
|
|
40
|
-
nargs=1,
|
|
41
40
|
type=str,
|
|
42
41
|
help="Name of the assistant in Rasa Studio.",
|
|
43
42
|
)
|
rasa/cli/studio/pull.py
CHANGED
|
@@ -4,15 +4,16 @@ from typing import List, Text
|
|
|
4
4
|
from rasa.cli import SubParsersAction
|
|
5
5
|
from rasa.cli.arguments.default_arguments import (
|
|
6
6
|
add_config_param,
|
|
7
|
+
add_data_param,
|
|
8
|
+
add_domain_param,
|
|
7
9
|
add_endpoint_param,
|
|
8
10
|
)
|
|
9
|
-
from rasa.cli.studio.upload import add_data_param, add_domain_param
|
|
10
11
|
from rasa.shared.constants import (
|
|
11
12
|
DEFAULT_CONFIG_PATH,
|
|
12
13
|
DEFAULT_DOMAIN_PATH,
|
|
13
14
|
DEFAULT_ENDPOINTS_PATH,
|
|
14
15
|
)
|
|
15
|
-
from rasa.studio.pull import (
|
|
16
|
+
from rasa.studio.pull.pull import (
|
|
16
17
|
handle_pull,
|
|
17
18
|
handle_pull_config,
|
|
18
19
|
handle_pull_endpoints,
|
rasa/cli/studio/push.py
CHANGED
rasa/cli/studio/train.py
CHANGED
|
@@ -135,10 +135,7 @@ DEFAULT_ENTERPRISE_SEARCH_PROMPT_WITH_RELEVANCY_CHECK_AND_CITATION_TEMPLATE = (
|
|
|
135
135
|
)
|
|
136
136
|
)
|
|
137
137
|
|
|
138
|
-
|
|
139
|
-
_ENTERPRISE_SEARCH_ANSWER_NOT_RELEVANT_PATTERN = re.compile(
|
|
140
|
-
r"\[NO_RELEVANT_ANSWER_FOUND\]"
|
|
141
|
-
)
|
|
138
|
+
_ENTERPRISE_SEARCH_ANSWER_NOT_RELEVANT_PATTERN = re.compile(r"\[NO_RAG_ANSWER\]")
|
|
142
139
|
|
|
143
140
|
|
|
144
141
|
class VectorStoreConnectionError(RasaException):
|
rasa/core/policies/enterprise_search_prompt_with_relevancy_check_and_citation_template.jinja2
CHANGED
|
@@ -1,5 +1,9 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
1
|
+
Based on the provided documents and the recent conversation context, answer the following question.
|
|
2
|
+
Before responding, ensure the answer is directly supported by the documents or context.
|
|
3
|
+
Do not make assumptions or infer beyond the given information.
|
|
4
|
+
Only answer if you are more than 80% confident that the response is fully supported.
|
|
5
|
+
If the answer cannot be determined, respond with: [NO_RAG_ANSWER]
|
|
6
|
+
|
|
3
7
|
### Relevant Documents
|
|
4
8
|
Use the following documents to answer the question:
|
|
5
9
|
{% for doc in docs %}
|
|
@@ -57,7 +61,4 @@ Avoid speculating or making assumptions beyond the given information and keep yo
|
|
|
57
61
|
If you are unable to find an answer in the given relevant documents, do not cite sources from elsewhere in the conversation context.
|
|
58
62
|
{% endif %}
|
|
59
63
|
|
|
60
|
-
{% if check_relevancy %}
|
|
61
|
-
If answer is not relevant output: "[NO_RELEVANT_ANSWER_FOUND]"
|
|
62
|
-
{% endif %}
|
|
63
64
|
Your answer:
|
rasa/core/utils.py
CHANGED
|
@@ -318,16 +318,25 @@ def should_force_slot_filling(
|
|
|
318
318
|
and the name of the slot if applicable.
|
|
319
319
|
"""
|
|
320
320
|
from rasa.dialogue_understanding.processor.command_processor import (
|
|
321
|
+
find_updated_flows,
|
|
321
322
|
get_current_collect_step,
|
|
322
323
|
)
|
|
323
324
|
|
|
324
325
|
if tracker is None:
|
|
325
|
-
structlogger.
|
|
326
|
-
"slot.force_slot_filling.
|
|
326
|
+
structlogger.debug(
|
|
327
|
+
"slot.force_slot_filling.no_found_tracker",
|
|
327
328
|
event_info="Tracker is None. Cannot force slot filling.",
|
|
328
329
|
)
|
|
329
330
|
return False, None
|
|
330
331
|
|
|
332
|
+
updated_flows = find_updated_flows(tracker, flows)
|
|
333
|
+
if updated_flows:
|
|
334
|
+
structlogger.debug(
|
|
335
|
+
"slot.force_slot_filling.running_flows_were_updated",
|
|
336
|
+
updated_flow_ids=updated_flows,
|
|
337
|
+
)
|
|
338
|
+
return False, None
|
|
339
|
+
|
|
331
340
|
stack = tracker.stack
|
|
332
341
|
step = get_current_collect_step(stack, flows)
|
|
333
342
|
if step is None or not step.force_slot_filling:
|
|
@@ -16,6 +16,9 @@ from rasa.dialogue_understanding.commands.error_command import ErrorCommand
|
|
|
16
16
|
from rasa.dialogue_understanding.commands.free_form_answer_command import (
|
|
17
17
|
FreeFormAnswerCommand,
|
|
18
18
|
)
|
|
19
|
+
from rasa.dialogue_understanding.commands.handle_code_change_command import (
|
|
20
|
+
HandleCodeChangeCommand,
|
|
21
|
+
)
|
|
19
22
|
from rasa.dialogue_understanding.commands.human_handoff_command import (
|
|
20
23
|
HumanHandoffCommand,
|
|
21
24
|
)
|
|
@@ -49,6 +52,7 @@ __all__ = [
|
|
|
49
52
|
"SetSlotCommand",
|
|
50
53
|
"StartFlowCommand",
|
|
51
54
|
"HumanHandoffCommand",
|
|
55
|
+
"HandleCodeChangeCommand",
|
|
52
56
|
"CorrectSlotsCommand",
|
|
53
57
|
"CorrectedSlot",
|
|
54
58
|
"ErrorCommand",
|
|
@@ -8,6 +8,7 @@ from rasa.dialogue_understanding.commands import (
|
|
|
8
8
|
Command,
|
|
9
9
|
CorrectSlotsCommand,
|
|
10
10
|
ErrorCommand,
|
|
11
|
+
HandleCodeChangeCommand,
|
|
11
12
|
SetSlotCommand,
|
|
12
13
|
StartFlowCommand,
|
|
13
14
|
)
|
|
@@ -398,15 +399,24 @@ class CommandGenerator:
|
|
|
398
399
|
The filtered commands.
|
|
399
400
|
"""
|
|
400
401
|
from rasa.dialogue_understanding.processor.command_processor import (
|
|
402
|
+
find_updated_flows,
|
|
401
403
|
get_current_collect_step,
|
|
402
404
|
)
|
|
403
405
|
|
|
404
406
|
if tracker is None:
|
|
405
|
-
structlogger.
|
|
407
|
+
structlogger.debug(
|
|
406
408
|
"command_generator.filter_commands_during_force_slot_filling.tracker_not_found",
|
|
407
409
|
)
|
|
408
410
|
return commands
|
|
409
411
|
|
|
412
|
+
updated_flows = find_updated_flows(tracker, available_flows)
|
|
413
|
+
if updated_flows:
|
|
414
|
+
structlogger.debug(
|
|
415
|
+
"command_generator.filter_commands_during_force_slot_filling.running_flows_were_updated",
|
|
416
|
+
updated_flow_ids=updated_flows,
|
|
417
|
+
)
|
|
418
|
+
return [HandleCodeChangeCommand()]
|
|
419
|
+
|
|
410
420
|
stack = tracker.stack
|
|
411
421
|
step = get_current_collect_step(stack, available_flows)
|
|
412
422
|
|
|
@@ -214,18 +214,18 @@ def execute_commands(
|
|
|
214
214
|
commands: List[Command] = get_commands_from_tracker(tracker)
|
|
215
215
|
original_tracker = tracker.copy()
|
|
216
216
|
|
|
217
|
-
commands = clean_up_commands(
|
|
218
|
-
commands, tracker, all_flows, execution_context, story_graph, domain
|
|
219
|
-
)
|
|
220
|
-
|
|
221
217
|
updated_flows = find_updated_flows(tracker, all_flows)
|
|
222
218
|
if updated_flows:
|
|
223
|
-
#
|
|
219
|
+
# if there are updated flows, we need to handle the code change
|
|
224
220
|
structlogger.debug(
|
|
225
221
|
"command_processor.execute_commands.running_flows_were_updated",
|
|
226
222
|
updated_flow_ids=updated_flows,
|
|
227
223
|
)
|
|
228
224
|
commands = [HandleCodeChangeCommand()]
|
|
225
|
+
else:
|
|
226
|
+
commands = clean_up_commands(
|
|
227
|
+
commands, tracker, all_flows, execution_context, story_graph, domain
|
|
228
|
+
)
|
|
229
229
|
|
|
230
230
|
# store current flow hashes if they changed
|
|
231
231
|
new_hashes = calculate_flow_fingerprints(all_flows)
|
rasa/shared/utils/constants.py
CHANGED
|
@@ -2,9 +2,6 @@ DEFAULT_ENCODING = "utf-8"
|
|
|
2
2
|
|
|
3
3
|
READ_YAML_FILE_CACHE_MAXSIZE_ENV_VAR = "READ_YAML_FILE_CACHE_MAXSIZE"
|
|
4
4
|
DEFAULT_READ_YAML_FILE_CACHE_MAXSIZE = 256
|
|
5
|
-
RASA_PRO_BETA_PREDICATES_IN_RESPONSE_CONDITIONS_ENV_VAR_NAME = (
|
|
6
|
-
"RASA_PRO_BETA_PREDICATES_IN_RESPONSE_CONDITIONS"
|
|
7
|
-
)
|
|
8
5
|
|
|
9
6
|
LOG_COMPONENT_SOURCE_METHOD_INIT = "init"
|
|
10
7
|
LOG_COMPONENT_SOURCE_METHOD_FINGERPRINT_ADDON = "fingerprint_addon"
|
|
@@ -8,11 +8,6 @@ from typing import Any, Dict, List, Text, Union
|
|
|
8
8
|
|
|
9
9
|
from pykwalify.errors import SchemaError
|
|
10
10
|
|
|
11
|
-
from rasa.shared.utils.constants import (
|
|
12
|
-
RASA_PRO_BETA_PREDICATES_IN_RESPONSE_CONDITIONS_ENV_VAR_NAME,
|
|
13
|
-
)
|
|
14
|
-
from rasa.utils.beta import ensure_beta_feature_is_enabled
|
|
15
|
-
|
|
16
11
|
|
|
17
12
|
def require_response_keys(
|
|
18
13
|
responses: List[Dict[Text, Any]], _: Dict, __: Text
|
|
@@ -31,10 +26,6 @@ def require_response_keys(
|
|
|
31
26
|
|
|
32
27
|
conditions = response.get("condition", [])
|
|
33
28
|
if isinstance(conditions, str):
|
|
34
|
-
ensure_beta_feature_is_enabled(
|
|
35
|
-
"predicates in response conditions",
|
|
36
|
-
RASA_PRO_BETA_PREDICATES_IN_RESPONSE_CONDITIONS_ENV_VAR_NAME,
|
|
37
|
-
)
|
|
38
29
|
continue
|
|
39
30
|
|
|
40
31
|
for condition in conditions:
|
rasa/studio/constants.py
CHANGED
|
@@ -14,6 +14,7 @@ RASA_STUDIO_CLI_DISABLE_VERIFY_KEY_ENV = "RASA_STUDIO_CLI_DISABLE_VERIFY_KEY"
|
|
|
14
14
|
|
|
15
15
|
STUDIO_NLU_FILENAME = "studio_nlu.yml"
|
|
16
16
|
STUDIO_DOMAIN_FILENAME = "studio_domain.yml"
|
|
17
|
+
DOMAIN_FILENAME = "domain.yml"
|
|
17
18
|
STUDIO_FLOWS_FILENAME = "studio_flows.yml"
|
|
18
19
|
STUDIO_CONFIG_FILENAME = "studio_config.yml"
|
|
19
20
|
STUDIO_ENDPOINTS_FILENAME = "studio_endpoints.yml"
|
rasa/studio/download.py
ADDED
|
@@ -0,0 +1,164 @@
|
|
|
1
|
+
import argparse
|
|
2
|
+
import shutil
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
from typing import Dict
|
|
5
|
+
|
|
6
|
+
import questionary
|
|
7
|
+
import structlog
|
|
8
|
+
from ruamel import yaml
|
|
9
|
+
from ruamel.yaml.scalarstring import LiteralScalarString
|
|
10
|
+
|
|
11
|
+
import rasa.cli.utils
|
|
12
|
+
import rasa.shared.utils.cli
|
|
13
|
+
from rasa.shared.constants import (
|
|
14
|
+
DEFAULT_CONFIG_PATH,
|
|
15
|
+
DEFAULT_DATA_PATH,
|
|
16
|
+
DEFAULT_ENDPOINTS_PATH,
|
|
17
|
+
)
|
|
18
|
+
from rasa.shared.core.flows.yaml_flows_io import FlowsList
|
|
19
|
+
from rasa.shared.nlu.training_data.training_data import (
|
|
20
|
+
DEFAULT_TRAINING_DATA_OUTPUT_PATH,
|
|
21
|
+
)
|
|
22
|
+
from rasa.shared.utils.yaml import read_yaml, write_yaml
|
|
23
|
+
from rasa.studio.config import StudioConfig
|
|
24
|
+
from rasa.studio.constants import DOMAIN_FILENAME
|
|
25
|
+
from rasa.studio.data_handler import StudioDataHandler
|
|
26
|
+
from rasa.studio.pull.data import _dump_flows_as_separate_files
|
|
27
|
+
|
|
28
|
+
structlogger = structlog.get_logger()
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def handle_download(args: argparse.Namespace) -> None:
|
|
32
|
+
"""Download an assistant from Studio and store it in `<assistant_name>/`.
|
|
33
|
+
|
|
34
|
+
Args:
|
|
35
|
+
args: The command line arguments.
|
|
36
|
+
"""
|
|
37
|
+
assistant_name = args.assistant_name
|
|
38
|
+
target_root = _prepare_target_directory(assistant_name)
|
|
39
|
+
|
|
40
|
+
handler = StudioDataHandler(
|
|
41
|
+
studio_config=StudioConfig.read_config(), assistant_name=assistant_name
|
|
42
|
+
)
|
|
43
|
+
handler.request_all_data()
|
|
44
|
+
|
|
45
|
+
_handle_config(handler, target_root)
|
|
46
|
+
_handle_endpoints(handler, target_root)
|
|
47
|
+
_handle_domain(handler, target_root)
|
|
48
|
+
_handle_data(handler, target_root)
|
|
49
|
+
structlogger.info(
|
|
50
|
+
"studio.download.success",
|
|
51
|
+
event_info=f"Downloaded assistant '{assistant_name}' from Studio.",
|
|
52
|
+
assistant_name=assistant_name,
|
|
53
|
+
)
|
|
54
|
+
rasa.shared.utils.cli.print_success(
|
|
55
|
+
f"Downloaded assistant '{assistant_name}' from Studio."
|
|
56
|
+
)
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
def _prepare_target_directory(assistant_name: str) -> Path:
|
|
60
|
+
"""Create (or overwrite) the directory where everything is stored.
|
|
61
|
+
|
|
62
|
+
Args:
|
|
63
|
+
assistant_name: The name of the assistant to download.
|
|
64
|
+
|
|
65
|
+
Returns:
|
|
66
|
+
The path to the target directory where the assistant will be stored.
|
|
67
|
+
"""
|
|
68
|
+
target_root = Path(assistant_name)
|
|
69
|
+
|
|
70
|
+
if target_root.exists():
|
|
71
|
+
overwrite = questionary.confirm(
|
|
72
|
+
f"Directory '{assistant_name}' already exists. Overwrite it?"
|
|
73
|
+
).ask()
|
|
74
|
+
if not overwrite:
|
|
75
|
+
rasa.shared.utils.cli.print_error_and_exit("Download cancelled.")
|
|
76
|
+
|
|
77
|
+
shutil.rmtree(target_root)
|
|
78
|
+
|
|
79
|
+
target_root.mkdir(parents=True, exist_ok=True)
|
|
80
|
+
return target_root
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
def _handle_config(handler: StudioDataHandler, root: Path) -> None:
|
|
84
|
+
"""Download and persist the assistant’s config file.
|
|
85
|
+
|
|
86
|
+
Args:
|
|
87
|
+
handler: The data handler to retrieve the config from.
|
|
88
|
+
root: The root directory where the config file will be stored.
|
|
89
|
+
"""
|
|
90
|
+
config_data = handler.get_config()
|
|
91
|
+
if not config_data:
|
|
92
|
+
rasa.shared.utils.cli.print_error_and_exit("No config data found.")
|
|
93
|
+
|
|
94
|
+
config_path = root / DEFAULT_CONFIG_PATH
|
|
95
|
+
config_path.write_text(config_data, encoding="utf-8")
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
def _handle_endpoints(handler: StudioDataHandler, root: Path) -> None:
|
|
99
|
+
"""Download and persist the assistant’s endpoints file.
|
|
100
|
+
|
|
101
|
+
Args:
|
|
102
|
+
handler: The data handler to retrieve the endpoints from.
|
|
103
|
+
root: The root directory where the endpoints file will be stored.
|
|
104
|
+
"""
|
|
105
|
+
endpoints_data = handler.get_endpoints()
|
|
106
|
+
if not endpoints_data:
|
|
107
|
+
rasa.shared.utils.cli.print_error_and_exit("No endpoints data found.")
|
|
108
|
+
|
|
109
|
+
endpoints_path = root / DEFAULT_ENDPOINTS_PATH
|
|
110
|
+
endpoints_path.write_text(endpoints_data, encoding="utf-8")
|
|
111
|
+
|
|
112
|
+
|
|
113
|
+
def _handle_domain(handler: StudioDataHandler, root: Path) -> None:
|
|
114
|
+
"""Persist the assistant’s domain file.
|
|
115
|
+
|
|
116
|
+
Args:
|
|
117
|
+
handler: The data handler to retrieve the domain from.
|
|
118
|
+
root: The root directory where the domain file will be stored.
|
|
119
|
+
"""
|
|
120
|
+
domain_yaml = handler.domain
|
|
121
|
+
data = read_yaml(domain_yaml)
|
|
122
|
+
target = root / DOMAIN_FILENAME
|
|
123
|
+
write_yaml(
|
|
124
|
+
data=data,
|
|
125
|
+
target=target,
|
|
126
|
+
should_preserve_key_order=True,
|
|
127
|
+
)
|
|
128
|
+
|
|
129
|
+
|
|
130
|
+
def _handle_data(handler: StudioDataHandler, root: Path) -> None:
|
|
131
|
+
"""Persist NLU data and flows.
|
|
132
|
+
|
|
133
|
+
Args:
|
|
134
|
+
handler: The data handler to retrieve the NLU data and flows from.
|
|
135
|
+
root: The root directory where the NLU data and flows will be stored.
|
|
136
|
+
"""
|
|
137
|
+
data_path = root / DEFAULT_DATA_PATH
|
|
138
|
+
data_path.mkdir(parents=True, exist_ok=True)
|
|
139
|
+
|
|
140
|
+
if handler.has_nlu():
|
|
141
|
+
nlu_yaml = handler.nlu
|
|
142
|
+
nlu_data = read_yaml(nlu_yaml)
|
|
143
|
+
if nlu_data.get("nlu"):
|
|
144
|
+
pretty_write_nlu_yaml(
|
|
145
|
+
nlu_data, data_path / DEFAULT_TRAINING_DATA_OUTPUT_PATH
|
|
146
|
+
)
|
|
147
|
+
|
|
148
|
+
if handler.has_flows():
|
|
149
|
+
flows_yaml = handler.flows
|
|
150
|
+
data = read_yaml(flows_yaml)
|
|
151
|
+
flows_data = data.get("flows", {})
|
|
152
|
+
flows_list = FlowsList.from_json(flows_data)
|
|
153
|
+
_dump_flows_as_separate_files(flows_list.underlying_flows, data_path)
|
|
154
|
+
|
|
155
|
+
|
|
156
|
+
def pretty_write_nlu_yaml(data: Dict, file: Path) -> None:
|
|
157
|
+
"""Writes the NLU YAML in a pretty way."""
|
|
158
|
+
dumper = yaml.YAML()
|
|
159
|
+
if nlu_data := data.get("nlu"):
|
|
160
|
+
for item in nlu_data:
|
|
161
|
+
if item.get("examples"):
|
|
162
|
+
item["examples"] = LiteralScalarString(item["examples"])
|
|
163
|
+
with file.open("w", encoding="utf-8") as outfile:
|
|
164
|
+
dumper.dump(data, outfile)
|
rasa/studio/link.py
CHANGED
|
@@ -167,7 +167,7 @@ def handle_link(args: argparse.Namespace) -> None:
|
|
|
167
167
|
Args:
|
|
168
168
|
args: The command line arguments.
|
|
169
169
|
"""
|
|
170
|
-
assistant_name: Text = args.assistant_name
|
|
170
|
+
assistant_name: Text = args.assistant_name
|
|
171
171
|
studio_cfg = get_studio_config()
|
|
172
172
|
assistant_exists = _ensure_assistant_exists(assistant_name, studio_cfg, args)
|
|
173
173
|
if not assistant_exists:
|
|
@@ -3,101 +3,16 @@ from pathlib import Path
|
|
|
3
3
|
from typing import Any, Dict, List, Set, Text
|
|
4
4
|
|
|
5
5
|
from rasa.shared.core.flows import Flow
|
|
6
|
-
from rasa.shared.core.flows.flow_step_links import StaticFlowStepLink
|
|
7
6
|
from rasa.shared.core.flows.flows_list import FlowsList
|
|
8
7
|
from rasa.shared.core.flows.yaml_flows_io import YAMLFlowsReader, YamlFlowsWriter
|
|
9
8
|
from rasa.shared.importers.importer import TrainingDataImporter
|
|
10
9
|
from rasa.shared.utils.yaml import read_yaml
|
|
11
10
|
from rasa.studio.constants import STUDIO_NLU_FILENAME
|
|
12
|
-
from rasa.studio.data_handler import StudioDataHandler
|
|
13
11
|
from rasa.utils.mapper import RasaPrimitiveStorageMapper
|
|
14
12
|
|
|
15
13
|
logger = logging.getLogger(__name__)
|
|
16
14
|
|
|
17
|
-
STUDIO_FLOWS_DIR_NAME = "
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
def merge_flows_with_overwrite(
|
|
21
|
-
data_path: Path,
|
|
22
|
-
handler: Any,
|
|
23
|
-
data_from_studio: TrainingDataImporter,
|
|
24
|
-
data_local: TrainingDataImporter,
|
|
25
|
-
mapper: RasaPrimitiveStorageMapper,
|
|
26
|
-
) -> None:
|
|
27
|
-
"""
|
|
28
|
-
Merges flows data from a file or directory when overwrite is enabled.
|
|
29
|
-
|
|
30
|
-
Args:
|
|
31
|
-
data_path: List of paths to the training data.
|
|
32
|
-
handler: The StudioDataHandler instance.
|
|
33
|
-
data_from_studio: The TrainingDataImporter instance for Studio data.
|
|
34
|
-
data_local: The TrainingDataImporter instance for local data.
|
|
35
|
-
mapper: The RasaPrimitiveStorageMapper instance for mapping.
|
|
36
|
-
"""
|
|
37
|
-
if data_path.is_file():
|
|
38
|
-
merge_training_data_file(handler, data_from_studio, data_local, data_path)
|
|
39
|
-
elif data_path.is_dir():
|
|
40
|
-
merge_training_data_dir(
|
|
41
|
-
handler, data_from_studio, data_local, data_path, mapper
|
|
42
|
-
)
|
|
43
|
-
else:
|
|
44
|
-
raise ValueError("Provided data path is neither a file nor a directory.")
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
def merge_training_data_file(
|
|
48
|
-
handler: StudioDataHandler,
|
|
49
|
-
data_from_studio: TrainingDataImporter,
|
|
50
|
-
data_local: TrainingDataImporter,
|
|
51
|
-
file_path: Path,
|
|
52
|
-
) -> None:
|
|
53
|
-
"""
|
|
54
|
-
Merges NLU and flows data when training data is stored in a single file.
|
|
55
|
-
|
|
56
|
-
Args:
|
|
57
|
-
handler: The StudioDataHandler instance.
|
|
58
|
-
data_from_studio: The TrainingDataImporter instance for Studio data.
|
|
59
|
-
data_local: The TrainingDataImporter instance for local data.
|
|
60
|
-
file_path: The path to the training data file.
|
|
61
|
-
"""
|
|
62
|
-
if handler.has_nlu():
|
|
63
|
-
nlu_data_merged = data_from_studio.get_nlu_data().merge(
|
|
64
|
-
data_local.get_nlu_data()
|
|
65
|
-
)
|
|
66
|
-
nlu_data_merged.persist_nlu(file_path)
|
|
67
|
-
|
|
68
|
-
if handler.has_flows():
|
|
69
|
-
flows_data_merged = data_from_studio.get_user_flows().merge(
|
|
70
|
-
data_local.get_user_flows()
|
|
71
|
-
)
|
|
72
|
-
YamlFlowsWriter.dump(
|
|
73
|
-
flows=flows_data_merged.underlying_flows,
|
|
74
|
-
filename=file_path,
|
|
75
|
-
should_clean_json=True,
|
|
76
|
-
)
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
def merge_training_data_dir(
|
|
80
|
-
handler: StudioDataHandler,
|
|
81
|
-
data_from_studio: TrainingDataImporter,
|
|
82
|
-
data_local: TrainingDataImporter,
|
|
83
|
-
data_path: Path,
|
|
84
|
-
mapper: RasaPrimitiveStorageMapper,
|
|
85
|
-
) -> None:
|
|
86
|
-
"""
|
|
87
|
-
Merges NLU and flows data when training data is stored in a directory.
|
|
88
|
-
|
|
89
|
-
Args:
|
|
90
|
-
handler: The StudioDataHandler instance.
|
|
91
|
-
data_from_studio: The TrainingDataImporter instance for Studio data.
|
|
92
|
-
data_local: The TrainingDataImporter instance for local data.
|
|
93
|
-
data_path: The path to the training data directory.
|
|
94
|
-
mapper: The RasaPrimitiveStorageMapper instance for mapping.
|
|
95
|
-
"""
|
|
96
|
-
if handler.has_nlu():
|
|
97
|
-
merge_nlu_in_directory(data_from_studio, data_local, data_path, mapper)
|
|
98
|
-
|
|
99
|
-
if handler.has_flows():
|
|
100
|
-
merge_flows_in_directory(data_from_studio, data_path, mapper)
|
|
15
|
+
STUDIO_FLOWS_DIR_NAME = "flows"
|
|
101
16
|
|
|
102
17
|
|
|
103
18
|
def merge_nlu_in_directory(
|
|
@@ -116,7 +31,7 @@ def merge_nlu_in_directory(
|
|
|
116
31
|
data_path: The path to the training data directory.
|
|
117
32
|
mapper: The RasaPrimitiveStorageMapper instance for mapping.
|
|
118
33
|
"""
|
|
119
|
-
from rasa.studio.download
|
|
34
|
+
from rasa.studio.download import pretty_write_nlu_yaml
|
|
120
35
|
|
|
121
36
|
nlu_data = data_from_studio.get_nlu_data()
|
|
122
37
|
nlu_file_path = get_nlu_path(data_path, data_local, mapper)
|
|
@@ -154,29 +69,6 @@ def get_nlu_path(
|
|
|
154
69
|
return _select_path(nlu_paths, "nlu", base_path, STUDIO_NLU_FILENAME)
|
|
155
70
|
|
|
156
71
|
|
|
157
|
-
def get_flows_path(
|
|
158
|
-
base_path: Path,
|
|
159
|
-
data_local: TrainingDataImporter,
|
|
160
|
-
mapper: RasaPrimitiveStorageMapper,
|
|
161
|
-
) -> Path:
|
|
162
|
-
"""Determines where flows data should be stored.
|
|
163
|
-
|
|
164
|
-
Args:
|
|
165
|
-
base_path: The base path for the training data.
|
|
166
|
-
data_local: The TrainingDataImporter instance for local data.
|
|
167
|
-
mapper: The RasaPrimitiveStorageMapper instance for mapping.
|
|
168
|
-
|
|
169
|
-
Returns:
|
|
170
|
-
The path where flows data should be stored.
|
|
171
|
-
"""
|
|
172
|
-
flow_paths = set()
|
|
173
|
-
for flow in data_local.get_user_flows().underlying_flows:
|
|
174
|
-
for p in mapper.get_file(flow.id, "flows").get("training", []):
|
|
175
|
-
flow_paths.add(p)
|
|
176
|
-
|
|
177
|
-
return _select_path(flow_paths, "flows", base_path, "flows.yml")
|
|
178
|
-
|
|
179
|
-
|
|
180
72
|
def merge_flows_in_directory(
|
|
181
73
|
data_from_studio: TrainingDataImporter,
|
|
182
74
|
data_path: Path,
|
|
@@ -298,27 +190,6 @@ def _dump_flows_as_separate_files(flows: List[Any], data_path: Path) -> None:
|
|
|
298
190
|
)
|
|
299
191
|
|
|
300
192
|
|
|
301
|
-
def strip_default_next_references(flows: FlowsList) -> FlowsList:
|
|
302
|
-
"""Strips default next references from flows.
|
|
303
|
-
|
|
304
|
-
Args:
|
|
305
|
-
flows: The FlowsList instance containing the flows.
|
|
306
|
-
|
|
307
|
-
Returns:
|
|
308
|
-
An updated FlowsList instance with default next references removed.
|
|
309
|
-
"""
|
|
310
|
-
default_step_ids = [step.default_id for flow in flows for step in flow.steps]
|
|
311
|
-
for flow in flows:
|
|
312
|
-
for step in flow.steps:
|
|
313
|
-
if (
|
|
314
|
-
step.next.links
|
|
315
|
-
and isinstance(step.next.links[0], StaticFlowStepLink)
|
|
316
|
-
and step.next.links[0].target in default_step_ids
|
|
317
|
-
):
|
|
318
|
-
step.next.links = []
|
|
319
|
-
return flows
|
|
320
|
-
|
|
321
|
-
|
|
322
193
|
def _select_path(
|
|
323
194
|
paths: Set[Path], primitive_type: str, default_path: Path, default: str
|
|
324
195
|
) -> Path:
|
|
@@ -10,7 +10,7 @@ from rasa.studio.constants import STUDIO_DOMAIN_FILENAME
|
|
|
10
10
|
logger = logging.getLogger(__name__)
|
|
11
11
|
|
|
12
12
|
|
|
13
|
-
def
|
|
13
|
+
def merge_domain(
|
|
14
14
|
data_from_studio: TrainingDataImporter,
|
|
15
15
|
data_local: TrainingDataImporter,
|
|
16
16
|
domain_path: Path,
|