rasa-pro 3.12.19__py3-none-any.whl → 3.12.21__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of rasa-pro might be problematic. Click here for more details.
- rasa/core/channels/voice_stream/asr/azure.py +9 -0
- rasa/core/channels/voice_stream/twilio_media_streams.py +7 -0
- rasa/core/channels/voice_stream/voice_channel.py +40 -9
- rasa/core/policies/enterprise_search_policy.py +196 -72
- rasa/dialogue_understanding/processor/command_processor.py +12 -10
- rasa/e2e_test/e2e_test_coverage_report.py +1 -1
- rasa/llm_fine_tuning/annotation_module.py +43 -11
- rasa/shared/core/constants.py +1 -0
- rasa/shared/core/flows/constants.py +2 -0
- rasa/shared/core/flows/flow.py +129 -13
- rasa/shared/core/flows/flows_list.py +18 -1
- rasa/shared/core/flows/steps/link.py +7 -2
- rasa/version.py +1 -1
- {rasa_pro-3.12.19.dist-info → rasa_pro-3.12.21.dist-info}/METADATA +1 -1
- {rasa_pro-3.12.19.dist-info → rasa_pro-3.12.21.dist-info}/RECORD +18 -18
- {rasa_pro-3.12.19.dist-info → rasa_pro-3.12.21.dist-info}/NOTICE +0 -0
- {rasa_pro-3.12.19.dist-info → rasa_pro-3.12.21.dist-info}/WHEEL +0 -0
- {rasa_pro-3.12.19.dist-info → rasa_pro-3.12.21.dist-info}/entry_points.txt +0 -0
|
@@ -3,6 +3,8 @@ import os
|
|
|
3
3
|
from dataclasses import dataclass
|
|
4
4
|
from typing import Any, AsyncIterator, Dict, Optional
|
|
5
5
|
|
|
6
|
+
import structlog
|
|
7
|
+
|
|
6
8
|
from rasa.core.channels.voice_stream.asr.asr_engine import ASREngine, ASREngineConfig
|
|
7
9
|
from rasa.core.channels.voice_stream.asr.asr_event import (
|
|
8
10
|
ASREvent,
|
|
@@ -13,6 +15,8 @@ from rasa.core.channels.voice_stream.audio_bytes import HERTZ, RasaAudioBytes
|
|
|
13
15
|
from rasa.shared.constants import AZURE_SPEECH_API_KEY_ENV_VAR
|
|
14
16
|
from rasa.shared.exceptions import ConnectionException
|
|
15
17
|
|
|
18
|
+
logger = structlog.get_logger(__name__)
|
|
19
|
+
|
|
16
20
|
|
|
17
21
|
@dataclass
|
|
18
22
|
class AzureASRConfig(ASREngineConfig):
|
|
@@ -61,6 +65,11 @@ class AzureASR(ASREngine[AzureASRConfig]):
|
|
|
61
65
|
and self.config.speech_endpoint is None
|
|
62
66
|
):
|
|
63
67
|
self.config.speech_region = "eastus"
|
|
68
|
+
logger.warning(
|
|
69
|
+
"voice_channel.asr.azure.no_region",
|
|
70
|
+
message="No speech region configured, using 'eastus' as default",
|
|
71
|
+
region="eastus",
|
|
72
|
+
)
|
|
64
73
|
speech_config = speechsdk.SpeechConfig(
|
|
65
74
|
subscription=os.environ[AZURE_SPEECH_API_KEY_ENV_VAR],
|
|
66
75
|
region=self.config.speech_region,
|
|
@@ -135,6 +135,13 @@ class TwilioMediaStreamsInputChannel(VoiceInputChannel):
|
|
|
135
135
|
def name(cls) -> str:
|
|
136
136
|
return "twilio_media_streams"
|
|
137
137
|
|
|
138
|
+
def get_sender_id(self, call_parameters: CallParameters) -> str:
|
|
139
|
+
"""Get the sender ID for the channel.
|
|
140
|
+
|
|
141
|
+
Twilio Media Streams uses the Stream ID as Sender ID because
|
|
142
|
+
it is required in OutputChannel.send_text_message to send messages."""
|
|
143
|
+
return call_parameters.stream_id # type: ignore[return-value]
|
|
144
|
+
|
|
138
145
|
def channel_bytes_to_rasa_audio_bytes(self, input_bytes: bytes) -> RasaAudioBytes:
|
|
139
146
|
return RasaAudioBytes(base64.b64decode(input_bytes))
|
|
140
147
|
|
|
@@ -288,6 +288,17 @@ class VoiceInputChannel(InputChannel):
|
|
|
288
288
|
self.monitor_silence = monitor_silence
|
|
289
289
|
self.tts_cache = TTSCache(tts_config.get("cache_size", 1000))
|
|
290
290
|
|
|
291
|
+
logger.info(
|
|
292
|
+
"voice_channel.initialized",
|
|
293
|
+
server_url=self.server_url,
|
|
294
|
+
asr_config=self.asr_config,
|
|
295
|
+
tts_config=self.tts_config,
|
|
296
|
+
)
|
|
297
|
+
|
|
298
|
+
def get_sender_id(self, call_parameters: CallParameters) -> str:
|
|
299
|
+
"""Get the sender ID for the channel."""
|
|
300
|
+
return call_parameters.call_id
|
|
301
|
+
|
|
291
302
|
async def monitor_silence_timeout(self, asr_event_queue: asyncio.Queue) -> None:
|
|
292
303
|
timeout = call_state.silence_timeout
|
|
293
304
|
if not timeout:
|
|
@@ -334,9 +345,9 @@ class VoiceInputChannel(InputChannel):
|
|
|
334
345
|
) -> None:
|
|
335
346
|
output_channel = self.create_output_channel(channel_websocket, tts_engine)
|
|
336
347
|
message = UserMessage(
|
|
337
|
-
"/session_start",
|
|
338
|
-
output_channel,
|
|
339
|
-
call_parameters
|
|
348
|
+
text="/session_start",
|
|
349
|
+
output_channel=output_channel,
|
|
350
|
+
sender_id=self.get_sender_id(call_parameters),
|
|
340
351
|
input_channel=self.name(),
|
|
341
352
|
metadata=asdict(call_parameters),
|
|
342
353
|
)
|
|
@@ -393,6 +404,9 @@ class VoiceInputChannel(InputChannel):
|
|
|
393
404
|
await asr_engine.send_audio_chunks(channel_action.audio_bytes)
|
|
394
405
|
elif isinstance(channel_action, EndConversationAction):
|
|
395
406
|
# end stream event came from the other side
|
|
407
|
+
await self.handle_disconnect(
|
|
408
|
+
channel_websocket, on_new_message, tts_engine, call_parameters
|
|
409
|
+
)
|
|
396
410
|
break
|
|
397
411
|
|
|
398
412
|
async def receive_asr_events() -> None:
|
|
@@ -449,9 +463,9 @@ class VoiceInputChannel(InputChannel):
|
|
|
449
463
|
call_state.is_user_speaking = False # type: ignore[attr-defined]
|
|
450
464
|
output_channel = self.create_output_channel(voice_websocket, tts_engine)
|
|
451
465
|
message = UserMessage(
|
|
452
|
-
e.text,
|
|
453
|
-
output_channel,
|
|
454
|
-
call_parameters
|
|
466
|
+
text=e.text,
|
|
467
|
+
output_channel=output_channel,
|
|
468
|
+
sender_id=self.get_sender_id(call_parameters),
|
|
455
469
|
input_channel=self.name(),
|
|
456
470
|
metadata=asdict(call_parameters),
|
|
457
471
|
)
|
|
@@ -462,10 +476,27 @@ class VoiceInputChannel(InputChannel):
|
|
|
462
476
|
elif isinstance(e, UserSilence):
|
|
463
477
|
output_channel = self.create_output_channel(voice_websocket, tts_engine)
|
|
464
478
|
message = UserMessage(
|
|
465
|
-
"/silence_timeout",
|
|
466
|
-
output_channel,
|
|
467
|
-
call_parameters
|
|
479
|
+
text="/silence_timeout",
|
|
480
|
+
output_channel=output_channel,
|
|
481
|
+
sender_id=self.get_sender_id(call_parameters),
|
|
468
482
|
input_channel=self.name(),
|
|
469
483
|
metadata=asdict(call_parameters),
|
|
470
484
|
)
|
|
471
485
|
await on_new_message(message)
|
|
486
|
+
|
|
487
|
+
async def handle_disconnect(
|
|
488
|
+
self,
|
|
489
|
+
channel_websocket: Websocket,
|
|
490
|
+
on_new_message: Callable[[UserMessage], Awaitable[Any]],
|
|
491
|
+
tts_engine: TTSEngine,
|
|
492
|
+
call_parameters: CallParameters,
|
|
493
|
+
) -> None:
|
|
494
|
+
"""Handle disconnection from the channel."""
|
|
495
|
+
output_channel = self.create_output_channel(channel_websocket, tts_engine)
|
|
496
|
+
message = UserMessage(
|
|
497
|
+
text="/session_end",
|
|
498
|
+
output_channel=output_channel,
|
|
499
|
+
sender_id=self.get_sender_id(call_parameters),
|
|
500
|
+
input_channel=self.name(),
|
|
501
|
+
)
|
|
502
|
+
await on_new_message(message)
|
|
@@ -1,7 +1,9 @@
|
|
|
1
|
+
import glob
|
|
1
2
|
import importlib.resources
|
|
2
3
|
import json
|
|
4
|
+
import os.path
|
|
3
5
|
import re
|
|
4
|
-
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Text
|
|
6
|
+
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Text, Tuple
|
|
5
7
|
|
|
6
8
|
import dotenv
|
|
7
9
|
import structlog
|
|
@@ -162,6 +164,8 @@ DEFAULT_ENTERPRISE_SEARCH_PROMPT_WITH_CITATION_TEMPLATE = importlib.resources.re
|
|
|
162
164
|
"rasa.core.policies", "enterprise_search_prompt_with_citation_template.jinja2"
|
|
163
165
|
)
|
|
164
166
|
|
|
167
|
+
_ENTERPRISE_SEARCH_CITATION_PATTERN = re.compile(r"\[([^\]]+)\]")
|
|
168
|
+
|
|
165
169
|
|
|
166
170
|
class VectorStoreConnectionError(RasaException):
|
|
167
171
|
"""Exception raised for errors in connecting to the vector store."""
|
|
@@ -378,9 +382,11 @@ class EnterpriseSearchPolicy(LLMHealthCheckMixin, EmbeddingsHealthCheckMixin, Po
|
|
|
378
382
|
|
|
379
383
|
if store_type == DEFAULT_VECTOR_STORE_TYPE:
|
|
380
384
|
logger.info("enterprise_search_policy.train.faiss")
|
|
385
|
+
docs_folder = self.vector_store_config.get(SOURCE_PROPERTY)
|
|
386
|
+
self._validate_documents_folder(docs_folder)
|
|
381
387
|
with self._model_storage.write_to(self._resource) as path:
|
|
382
388
|
self.vector_store = FAISS_Store(
|
|
383
|
-
docs_folder=
|
|
389
|
+
docs_folder=docs_folder,
|
|
384
390
|
embeddings=embeddings,
|
|
385
391
|
index_path=path,
|
|
386
392
|
create_index=True,
|
|
@@ -760,6 +766,33 @@ class EnterpriseSearchPolicy(LLMHealthCheckMixin, EmbeddingsHealthCheckMixin, Po
|
|
|
760
766
|
result[domain.index_for_action(action_name)] = score # type: ignore[assignment]
|
|
761
767
|
return result
|
|
762
768
|
|
|
769
|
+
@classmethod
|
|
770
|
+
def _validate_documents_folder(cls, docs_folder: str) -> None:
|
|
771
|
+
if not os.path.exists(docs_folder) or not os.path.isdir(docs_folder):
|
|
772
|
+
error_message = (
|
|
773
|
+
f"Document source directory does not exist or is not a "
|
|
774
|
+
f"directory: '{docs_folder}'. "
|
|
775
|
+
"Please specify a valid path to the documents source directory in the "
|
|
776
|
+
"vector_store configuration."
|
|
777
|
+
)
|
|
778
|
+
logger.error(
|
|
779
|
+
"enterprise_search_policy.train.faiss.invalid_source_directory",
|
|
780
|
+
message=error_message,
|
|
781
|
+
)
|
|
782
|
+
print_error_and_exit(error_message)
|
|
783
|
+
|
|
784
|
+
docs = glob.glob(os.path.join(docs_folder, "*.txt"), recursive=True)
|
|
785
|
+
if not docs or len(docs) < 1:
|
|
786
|
+
error_message = (
|
|
787
|
+
f"Document source directory is empty: '{docs_folder}'. "
|
|
788
|
+
"Please add documents to this directory or specify a different one."
|
|
789
|
+
)
|
|
790
|
+
logger.error(
|
|
791
|
+
"enterprise_search_policy.train.faiss.source_directory_empty",
|
|
792
|
+
message=error_message,
|
|
793
|
+
)
|
|
794
|
+
print_error_and_exit(error_message)
|
|
795
|
+
|
|
763
796
|
@classmethod
|
|
764
797
|
def load(
|
|
765
798
|
cls,
|
|
@@ -833,7 +866,7 @@ class EnterpriseSearchPolicy(LLMHealthCheckMixin, EmbeddingsHealthCheckMixin, Po
|
|
|
833
866
|
return None
|
|
834
867
|
|
|
835
868
|
source = merged_config.get(VECTOR_STORE_PROPERTY, {}).get(SOURCE_PROPERTY)
|
|
836
|
-
if not source:
|
|
869
|
+
if not source or not os.path.exists(source) or not os.path.isdir(source):
|
|
837
870
|
return None
|
|
838
871
|
|
|
839
872
|
docs = FAISS_Store.load_documents(source)
|
|
@@ -870,10 +903,18 @@ class EnterpriseSearchPolicy(LLMHealthCheckMixin, EmbeddingsHealthCheckMixin, Po
|
|
|
870
903
|
|
|
871
904
|
@staticmethod
|
|
872
905
|
def post_process_citations(llm_answer: str) -> str:
|
|
873
|
-
"""Post-
|
|
874
|
-
|
|
875
|
-
|
|
876
|
-
|
|
906
|
+
"""Post-processes the LLM answer to correctly number and sort citations and
|
|
907
|
+
sources.
|
|
908
|
+
|
|
909
|
+
- Handles both single `[1]` and grouped `[1, 3]` citations.
|
|
910
|
+
- Rewrites the numbers in square brackets in the answer text to start from 1
|
|
911
|
+
and be sorted within each group.
|
|
912
|
+
- Reorders the sources according to the order of their first appearance
|
|
913
|
+
in the text.
|
|
914
|
+
- Removes citations from the text that point to sources missing from
|
|
915
|
+
the source list.
|
|
916
|
+
- Keeps sources that are not cited in the text, placing them at the end
|
|
917
|
+
of the list.
|
|
877
918
|
|
|
878
919
|
Args:
|
|
879
920
|
llm_answer: The LLM answer.
|
|
@@ -887,77 +928,160 @@ class EnterpriseSearchPolicy(LLMHealthCheckMixin, EmbeddingsHealthCheckMixin, Po
|
|
|
887
928
|
|
|
888
929
|
# Split llm_answer into answer and citations
|
|
889
930
|
try:
|
|
890
|
-
|
|
931
|
+
answer_part, sources_part = llm_answer.rsplit("Sources:", 1)
|
|
891
932
|
except ValueError:
|
|
892
|
-
# if there is no "Sources:"
|
|
893
|
-
return llm_answer
|
|
894
|
-
|
|
895
|
-
#
|
|
896
|
-
|
|
897
|
-
|
|
898
|
-
|
|
899
|
-
|
|
900
|
-
|
|
933
|
+
# if there is no "Sources:" separator, return the original llm_answer
|
|
934
|
+
return llm_answer.strip()
|
|
935
|
+
|
|
936
|
+
# Parse the sources block to extract valid sources and other lines
|
|
937
|
+
valid_sources, other_source_lines = EnterpriseSearchPolicy._parse_sources_block(
|
|
938
|
+
sources_part
|
|
939
|
+
)
|
|
940
|
+
|
|
941
|
+
# Find all unique, valid citations in the answer text in their order
|
|
942
|
+
# of appearance
|
|
943
|
+
cited_order = EnterpriseSearchPolicy._get_cited_order(
|
|
944
|
+
answer_part, valid_sources
|
|
945
|
+
)
|
|
946
|
+
|
|
947
|
+
# Create a mapping from the old source numbers to the new, sequential numbers.
|
|
948
|
+
# For example, if the citation order in the text was [3, 1, 2], this map
|
|
949
|
+
# becomes {3: 1, 1: 2, 2: 3}. This allows for a quick lookup when rewriting
|
|
950
|
+
# the citations
|
|
951
|
+
renumbering_map = {
|
|
952
|
+
old_num: new_num + 1 for new_num, old_num in enumerate(cited_order)
|
|
953
|
+
}
|
|
954
|
+
|
|
955
|
+
# Rewrite the citations in the answer text based on the renumbering map
|
|
956
|
+
processed_answer = EnterpriseSearchPolicy._rewrite_answer_citations(
|
|
957
|
+
answer_part, renumbering_map
|
|
958
|
+
)
|
|
959
|
+
|
|
960
|
+
# Build the new list of sources
|
|
961
|
+
new_sources_list = EnterpriseSearchPolicy._build_final_sources_list(
|
|
962
|
+
cited_order,
|
|
963
|
+
renumbering_map,
|
|
964
|
+
valid_sources,
|
|
965
|
+
other_source_lines,
|
|
966
|
+
)
|
|
967
|
+
|
|
968
|
+
if len(new_sources_list) > 0:
|
|
969
|
+
processed_answer += "\nSources:\n" + "\n".join(new_sources_list)
|
|
901
970
|
|
|
902
|
-
|
|
903
|
-
|
|
904
|
-
|
|
905
|
-
|
|
906
|
-
|
|
907
|
-
|
|
908
|
-
|
|
909
|
-
|
|
910
|
-
|
|
911
|
-
|
|
912
|
-
|
|
913
|
-
|
|
914
|
-
|
|
915
|
-
|
|
916
|
-
|
|
917
|
-
|
|
918
|
-
|
|
919
|
-
|
|
920
|
-
|
|
921
|
-
|
|
922
|
-
|
|
923
|
-
|
|
924
|
-
continue
|
|
925
|
-
|
|
926
|
-
word = word.replace(
|
|
927
|
-
match, f"{', '.join(map(str, new_indices))}"
|
|
928
|
-
)
|
|
929
|
-
else:
|
|
930
|
-
old_index = int(match.strip("[].,:;?!"))
|
|
931
|
-
new_index = renumber_mapping.get(old_index)
|
|
932
|
-
if not new_index:
|
|
933
|
-
continue
|
|
934
|
-
|
|
935
|
-
word = word.replace(str(old_index), str(new_index))
|
|
936
|
-
new_answer.append(word)
|
|
937
|
-
|
|
938
|
-
# join the words
|
|
939
|
-
joined_answer = " ".join(new_answer)
|
|
940
|
-
joined_answer += "\nSources:\n"
|
|
941
|
-
|
|
942
|
-
new_sources: List[str] = []
|
|
943
|
-
|
|
944
|
-
for line in citations.split("\n"):
|
|
945
|
-
pattern = r"(?<=\[)\d+"
|
|
946
|
-
match = re.search(pattern, line)
|
|
971
|
+
return processed_answer
|
|
972
|
+
|
|
973
|
+
@staticmethod
|
|
974
|
+
def _parse_sources_block(sources_part: str) -> Tuple[Dict[int, str], List[str]]:
|
|
975
|
+
"""Parses the sources block from the LLM response.
|
|
976
|
+
Returns a tuple containing:
|
|
977
|
+
- A dictionary of valid sources matching the "[1] ..." format,
|
|
978
|
+
where the key is the source number
|
|
979
|
+
- A list of other source lines that do not match the specified format
|
|
980
|
+
"""
|
|
981
|
+
valid_sources: Dict[int, str] = {}
|
|
982
|
+
other_source_lines: List[str] = []
|
|
983
|
+
source_line_pattern = re.compile(r"^\s*\[(\d+)\](.*)")
|
|
984
|
+
|
|
985
|
+
source_lines = sources_part.strip().split("\n")
|
|
986
|
+
|
|
987
|
+
for line in source_lines:
|
|
988
|
+
line = line.strip()
|
|
989
|
+
if not line:
|
|
990
|
+
continue
|
|
991
|
+
|
|
992
|
+
match = source_line_pattern.match(line)
|
|
947
993
|
if match:
|
|
948
|
-
|
|
949
|
-
|
|
950
|
-
|
|
951
|
-
|
|
994
|
+
num = int(match.group(1))
|
|
995
|
+
valid_sources[num] = line
|
|
996
|
+
else:
|
|
997
|
+
other_source_lines.append(line)
|
|
998
|
+
|
|
999
|
+
return valid_sources, other_source_lines
|
|
1000
|
+
|
|
1001
|
+
@staticmethod
|
|
1002
|
+
def _get_cited_order(
|
|
1003
|
+
answer_part: str, available_sources: Dict[int, str]
|
|
1004
|
+
) -> List[int]:
|
|
1005
|
+
"""Find all unique, valid citations in the answer text in their order
|
|
1006
|
+
# of appearance
|
|
1007
|
+
"""
|
|
1008
|
+
cited_order: List[int] = []
|
|
1009
|
+
seen_indices = set()
|
|
1010
|
+
|
|
1011
|
+
for match in _ENTERPRISE_SEARCH_CITATION_PATTERN.finditer(answer_part):
|
|
1012
|
+
content = match.group(1)
|
|
1013
|
+
indices_str = [s.strip() for s in content.split(",")]
|
|
1014
|
+
for index_str in indices_str:
|
|
1015
|
+
if index_str.isdigit():
|
|
1016
|
+
index = int(index_str)
|
|
1017
|
+
if index in available_sources and index not in seen_indices:
|
|
1018
|
+
cited_order.append(index)
|
|
1019
|
+
seen_indices.add(index)
|
|
1020
|
+
|
|
1021
|
+
return cited_order
|
|
1022
|
+
|
|
1023
|
+
@staticmethod
|
|
1024
|
+
def _rewrite_answer_citations(
|
|
1025
|
+
answer_part: str, renumber_map: Dict[int, int]
|
|
1026
|
+
) -> str:
|
|
1027
|
+
"""Rewrites the citations in the answer text based on the renumbering map."""
|
|
1028
|
+
|
|
1029
|
+
def replacer(match: re.Match) -> str:
|
|
1030
|
+
content = match.group(1)
|
|
1031
|
+
old_indices_str = [s.strip() for s in content.split(",")]
|
|
1032
|
+
new_indices = [
|
|
1033
|
+
renumber_map[int(s)]
|
|
1034
|
+
for s in old_indices_str
|
|
1035
|
+
if s.isdigit() and int(s) in renumber_map
|
|
1036
|
+
]
|
|
1037
|
+
if not new_indices:
|
|
1038
|
+
return ""
|
|
1039
|
+
|
|
1040
|
+
return f"[{', '.join(map(str, sorted(list(set(new_indices)))))}]"
|
|
1041
|
+
|
|
1042
|
+
processed_answer = _ENTERPRISE_SEARCH_CITATION_PATTERN.sub(
|
|
1043
|
+
replacer, answer_part
|
|
1044
|
+
)
|
|
1045
|
+
|
|
1046
|
+
# Clean up formatting after replacements
|
|
1047
|
+
processed_answer = re.sub(r"\s+([,.?])", r"\1", processed_answer)
|
|
1048
|
+
processed_answer = processed_answer.replace("[]", " ")
|
|
1049
|
+
processed_answer = re.sub(r"\s+", " ", processed_answer)
|
|
1050
|
+
processed_answer = processed_answer.strip()
|
|
1051
|
+
|
|
1052
|
+
return processed_answer
|
|
1053
|
+
|
|
1054
|
+
@staticmethod
|
|
1055
|
+
def _build_final_sources_list(
|
|
1056
|
+
cited_order: List[int],
|
|
1057
|
+
renumbering_map: Dict[int, int],
|
|
1058
|
+
valid_sources: Dict[int, str],
|
|
1059
|
+
other_source_lines: List[str],
|
|
1060
|
+
) -> List[str]:
|
|
1061
|
+
"""Builds the final list of sources based on the cited order and
|
|
1062
|
+
renumbering map.
|
|
1063
|
+
"""
|
|
1064
|
+
new_sources_list: List[str] = []
|
|
1065
|
+
|
|
1066
|
+
# First, add the sorted, used sources
|
|
1067
|
+
for old_num in cited_order:
|
|
1068
|
+
new_num = renumbering_map[old_num]
|
|
1069
|
+
source_line = valid_sources[old_num]
|
|
1070
|
+
new_sources_list.append(
|
|
1071
|
+
source_line.replace(f"[{old_num}]", f"[{new_num}]", 1)
|
|
1072
|
+
)
|
|
952
1073
|
|
|
953
|
-
|
|
954
|
-
|
|
955
|
-
|
|
956
|
-
|
|
1074
|
+
# Then, add the unused but validly numbered sources
|
|
1075
|
+
used_source_nums = set(cited_order)
|
|
1076
|
+
# Sort by number to ensure a consistent order for uncited sources
|
|
1077
|
+
for num, line in sorted(valid_sources.items()):
|
|
1078
|
+
if num not in used_source_nums:
|
|
1079
|
+
new_sources_list.append(line)
|
|
957
1080
|
|
|
958
|
-
|
|
1081
|
+
# Finally, add any other source lines
|
|
1082
|
+
new_sources_list.extend(other_source_lines)
|
|
959
1083
|
|
|
960
|
-
return
|
|
1084
|
+
return new_sources_list
|
|
961
1085
|
|
|
962
1086
|
@classmethod
|
|
963
1087
|
def _perform_health_checks(
|
|
@@ -64,12 +64,6 @@ from rasa.shared.nlu.constants import COMMANDS
|
|
|
64
64
|
|
|
65
65
|
structlogger = structlog.get_logger()
|
|
66
66
|
|
|
67
|
-
CANNOT_HANDLE_REASON = (
|
|
68
|
-
"A command generator attempted to set a slot "
|
|
69
|
-
"with a value extracted by an extractor "
|
|
70
|
-
"that is incompatible with the slot mapping type."
|
|
71
|
-
)
|
|
72
|
-
|
|
73
67
|
|
|
74
68
|
def contains_command(commands: List[Command], typ: Type[Command]) -> bool:
|
|
75
69
|
"""Check if a list of commands contains a command of a given type.
|
|
@@ -588,6 +582,11 @@ def clean_up_slot_command(
|
|
|
588
582
|
"command_processor.clean_up_slot_command.skip_command_slot_not_in_domain",
|
|
589
583
|
command=command,
|
|
590
584
|
)
|
|
585
|
+
resulting_commands.append(
|
|
586
|
+
CannotHandleCommand(
|
|
587
|
+
reason="The slot predicted by the LLM is not defined in the domain."
|
|
588
|
+
)
|
|
589
|
+
)
|
|
591
590
|
return resulting_commands
|
|
592
591
|
|
|
593
592
|
if not should_slot_be_set(slot, command, resulting_commands):
|
|
@@ -606,7 +605,10 @@ def clean_up_slot_command(
|
|
|
606
605
|
for command in resulting_commands
|
|
607
606
|
)
|
|
608
607
|
|
|
609
|
-
cannot_handle = CannotHandleCommand(
|
|
608
|
+
cannot_handle = CannotHandleCommand(
|
|
609
|
+
reason="A command generator attempted to set a slot with a value extracted "
|
|
610
|
+
"by an extractor that is incompatible with the slot mapping type."
|
|
611
|
+
)
|
|
610
612
|
if not slot_command_exists_already and cannot_handle not in resulting_commands:
|
|
611
613
|
resulting_commands.append(cannot_handle)
|
|
612
614
|
|
|
@@ -640,9 +642,9 @@ def clean_up_slot_command(
|
|
|
640
642
|
resulting_commands.append(command)
|
|
641
643
|
return resulting_commands
|
|
642
644
|
|
|
643
|
-
if (slot := tracker.slots.get(command.name)) is not None and
|
|
644
|
-
|
|
645
|
-
):
|
|
645
|
+
if (slot := tracker.slots.get(command.name)) is not None and str(
|
|
646
|
+
slot.value
|
|
647
|
+
) == str(command.value):
|
|
646
648
|
# the slot is already set, we don't need to set it again
|
|
647
649
|
structlogger.debug(
|
|
648
650
|
"command_processor.clean_up_slot_command.skip_command_slot_already_set",
|
|
@@ -21,7 +21,7 @@ from rasa.shared.core.flows.flow_path import FlowPath, FlowPathsList, PathNode
|
|
|
21
21
|
FLOW_NAME_COL_NAME = "Flow Name"
|
|
22
22
|
NUM_STEPS_COL_NAME = "Num Steps"
|
|
23
23
|
MISSING_STEPS_COL_NAME = "Missing Steps"
|
|
24
|
-
LINE_NUMBERS_COL_NAME = "Line Numbers"
|
|
24
|
+
LINE_NUMBERS_COL_NAME = "Line Numbers for Missing Steps"
|
|
25
25
|
COVERAGE_COL_NAME = "Coverage"
|
|
26
26
|
|
|
27
27
|
FLOWS_KEY = "flows"
|
|
@@ -9,8 +9,8 @@ from rasa.e2e_test.e2e_test_case import ActualStepOutput, TestCase, TestStep, Te
|
|
|
9
9
|
from rasa.e2e_test.e2e_test_runner import TEST_TURNS_TYPE, E2ETestRunner
|
|
10
10
|
from rasa.llm_fine_tuning.conversations import Conversation, ConversationStep
|
|
11
11
|
from rasa.llm_fine_tuning.storage import StorageContext
|
|
12
|
-
from rasa.shared.core.constants import USER
|
|
13
|
-
from rasa.shared.core.events import UserUttered
|
|
12
|
+
from rasa.shared.core.constants import BOT, USER
|
|
13
|
+
from rasa.shared.core.events import BotUttered, UserUttered
|
|
14
14
|
from rasa.shared.core.trackers import DialogueStateTracker
|
|
15
15
|
from rasa.shared.exceptions import FinetuningDataPreparationException
|
|
16
16
|
from rasa.shared.nlu.constants import LLM_COMMANDS, LLM_PROMPT
|
|
@@ -83,16 +83,18 @@ def generate_conversation(
|
|
|
83
83
|
Conversation.
|
|
84
84
|
"""
|
|
85
85
|
steps = []
|
|
86
|
-
tracker_event_indices = [
|
|
87
|
-
i for i, event in enumerate(tracker.events) if isinstance(event, UserUttered)
|
|
88
|
-
]
|
|
89
|
-
|
|
90
|
-
if len(test_case.steps) != len(tracker_event_indices):
|
|
91
|
-
raise FinetuningDataPreparationException(
|
|
92
|
-
"Number of test case steps and tracker events do not match."
|
|
93
|
-
)
|
|
94
86
|
|
|
95
87
|
if assertions_used:
|
|
88
|
+
tracker_event_indices = [
|
|
89
|
+
i
|
|
90
|
+
for i, event in enumerate(tracker.events)
|
|
91
|
+
if isinstance(event, UserUttered)
|
|
92
|
+
]
|
|
93
|
+
if len(test_case.steps) != len(tracker_event_indices):
|
|
94
|
+
raise FinetuningDataPreparationException(
|
|
95
|
+
"Number of test case steps and tracker events do not match."
|
|
96
|
+
)
|
|
97
|
+
|
|
96
98
|
# we only have user steps, extract the bot response from the bot uttered
|
|
97
99
|
# events of the test turn
|
|
98
100
|
for i, (original_step, tracker_event_index) in enumerate(
|
|
@@ -110,8 +112,30 @@ def generate_conversation(
|
|
|
110
112
|
)
|
|
111
113
|
steps.extend(_create_bot_test_steps(test_turns[i]))
|
|
112
114
|
else:
|
|
115
|
+
tracker_event_indices = [
|
|
116
|
+
i
|
|
117
|
+
for i, event in enumerate(tracker.events)
|
|
118
|
+
if isinstance(event, UserUttered) or isinstance(event, BotUttered)
|
|
119
|
+
]
|
|
120
|
+
|
|
121
|
+
# Generally, we expect one or more bot response(s) for each user utterance
|
|
122
|
+
# in the test case, so that we can evaluate the actual bot response.
|
|
123
|
+
# If the test case ends with one or more user utterance(s) instead,
|
|
124
|
+
# we should thus trim those from the test case steps.
|
|
125
|
+
# This only applies to test cases that have at least one bot utterance;
|
|
126
|
+
# otherwise, all test case steps would be removed.
|
|
127
|
+
has_bot_utterance = any(step.actor == BOT for step in test_case.steps)
|
|
128
|
+
i = len(test_case.steps)
|
|
129
|
+
if has_bot_utterance:
|
|
130
|
+
while i > 0 and test_case.steps[i - 1].actor == USER:
|
|
131
|
+
i -= 1
|
|
132
|
+
test_case_steps = test_case.steps[:i]
|
|
133
|
+
|
|
134
|
+
# If the number of test case steps and tracker events differ,
|
|
135
|
+
# using zip ensures we only process pairs that exist in both lists.
|
|
136
|
+
# Prevents index errors and ensures we don't process unmatched steps or events.
|
|
113
137
|
for i, (original_step, tracker_event_index) in enumerate(
|
|
114
|
-
zip(
|
|
138
|
+
zip(test_case_steps, tracker_event_indices)
|
|
115
139
|
):
|
|
116
140
|
if original_step.actor == USER:
|
|
117
141
|
previous_turn = _get_previous_actual_step_output(test_turns, i)
|
|
@@ -127,6 +151,14 @@ def generate_conversation(
|
|
|
127
151
|
else:
|
|
128
152
|
steps.append(original_step)
|
|
129
153
|
|
|
154
|
+
# the tracker should only include events up to the last bot utterance
|
|
155
|
+
# so that the resulting transcript ends with the last bot utterance too
|
|
156
|
+
# only applies to test cases that have at least one bot utterance
|
|
157
|
+
if has_bot_utterance and test_case.steps and test_case.steps[-1].actor == USER:
|
|
158
|
+
event_to_go_to = tracker_event_indices[len(test_case_steps)] - 1
|
|
159
|
+
timestamp = tracker.events[event_to_go_to].timestamp
|
|
160
|
+
tracker = tracker.travel_back_in_time(timestamp)
|
|
161
|
+
|
|
130
162
|
# Some messages in an e2e test case could be mapped to commands via
|
|
131
163
|
# 'NLUCommandAdapter', e.g. the message will not be annotated with a prompt and
|
|
132
164
|
# commands pair. Only convert steps that have a prompt and commands present into a
|
rasa/shared/core/constants.py
CHANGED
|
@@ -181,6 +181,7 @@ class SetSlotExtractor(Enum):
|
|
|
181
181
|
# the keys for `State` (USER, PREVIOUS_ACTION, SLOTS, ACTIVE_LOOP)
|
|
182
182
|
# represent the origin of a `SubState`
|
|
183
183
|
USER = "user"
|
|
184
|
+
BOT = "bot"
|
|
184
185
|
SLOTS = "slots"
|
|
185
186
|
|
|
186
187
|
USE_TEXT_FOR_FEATURIZATION = "use_text_for_featurization"
|
rasa/shared/core/flows/flow.py
CHANGED
|
@@ -4,7 +4,7 @@ import copy
|
|
|
4
4
|
from dataclasses import dataclass, field
|
|
5
5
|
from functools import cached_property
|
|
6
6
|
from pathlib import Path
|
|
7
|
-
from typing import Any, Dict, List, Optional, Set, Text, Union
|
|
7
|
+
from typing import Any, Dict, List, Optional, Set, Text, Tuple, Union
|
|
8
8
|
|
|
9
9
|
import structlog
|
|
10
10
|
from pydantic import BaseModel
|
|
@@ -15,10 +15,12 @@ from rasa.engine.language import Language
|
|
|
15
15
|
from rasa.shared.constants import RASA_DEFAULT_FLOW_PATTERN_PREFIX
|
|
16
16
|
from rasa.shared.core.flows.constants import (
|
|
17
17
|
KEY_ALWAYS_INCLUDE_IN_PROMPT,
|
|
18
|
+
KEY_CALLED_FLOW,
|
|
18
19
|
KEY_DESCRIPTION,
|
|
19
20
|
KEY_FILE_PATH,
|
|
20
21
|
KEY_ID,
|
|
21
22
|
KEY_IF,
|
|
23
|
+
KEY_LINKED_FLOW,
|
|
22
24
|
KEY_NAME,
|
|
23
25
|
KEY_NLU_TRIGGER,
|
|
24
26
|
KEY_PERSISTED_SLOTS,
|
|
@@ -41,6 +43,7 @@ from rasa.shared.core.flows.steps import (
|
|
|
41
43
|
CallFlowStep,
|
|
42
44
|
CollectInformationFlowStep,
|
|
43
45
|
EndFlowStep,
|
|
46
|
+
LinkFlowStep,
|
|
44
47
|
StartFlowStep,
|
|
45
48
|
)
|
|
46
49
|
from rasa.shared.core.flows.steps.constants import (
|
|
@@ -61,6 +64,8 @@ class FlowLanguageTranslation(BaseModel):
|
|
|
61
64
|
"""The human-readable name of the flow."""
|
|
62
65
|
|
|
63
66
|
class Config:
|
|
67
|
+
"""Configuration for the FlowLanguageTranslation model."""
|
|
68
|
+
|
|
64
69
|
extra = "ignore"
|
|
65
70
|
|
|
66
71
|
|
|
@@ -232,9 +237,9 @@ class Flow:
|
|
|
232
237
|
return translation.name if translation else None
|
|
233
238
|
|
|
234
239
|
def readable_name(self, language: Optional[Language] = None) -> str:
|
|
235
|
-
"""
|
|
236
|
-
|
|
237
|
-
falls back to the flow's name, and finally the flow's ID.
|
|
240
|
+
"""Returns the flow's name in the specified language if available.
|
|
241
|
+
|
|
242
|
+
Otherwise, falls back to the flow's name, and finally the flow's ID.
|
|
238
243
|
|
|
239
244
|
Args:
|
|
240
245
|
language: Preferred language code.
|
|
@@ -488,6 +493,9 @@ class Flow:
|
|
|
488
493
|
current_path: FlowPath,
|
|
489
494
|
all_paths: FlowPathsList,
|
|
490
495
|
visited_step_ids: Set[str],
|
|
496
|
+
call_stack: Optional[
|
|
497
|
+
List[Tuple[Optional[FlowStep], Optional[Flow], str]]
|
|
498
|
+
] = None,
|
|
491
499
|
) -> None:
|
|
492
500
|
"""Processes the flow steps recursively.
|
|
493
501
|
|
|
@@ -496,19 +504,25 @@ class Flow:
|
|
|
496
504
|
current_path: The current path being constructed.
|
|
497
505
|
all_paths: The list where completed paths are added.
|
|
498
506
|
visited_step_ids: A set of steps that have been visited to avoid cycles.
|
|
507
|
+
call_stack: Tuple list of (flow, path, flow_type) to track path when \
|
|
508
|
+
calling flows through call and link steps.
|
|
499
509
|
|
|
500
510
|
Returns:
|
|
501
511
|
None: This function modifies all_paths in place by appending new paths
|
|
502
512
|
as they are found.
|
|
503
513
|
"""
|
|
514
|
+
if call_stack is None:
|
|
515
|
+
call_stack = []
|
|
516
|
+
|
|
504
517
|
# Check if the step is relevant for testable_paths extraction.
|
|
505
|
-
# We only create new path nodes for
|
|
506
|
-
#
|
|
507
|
-
#
|
|
518
|
+
# We only create new path nodes for CollectInformationFlowStep,
|
|
519
|
+
# ActionFlowStep, CallFlowStep and LinkFlowStep,
|
|
520
|
+
# because these are externally visible changes
|
|
521
|
+
# in the assistant's behaviour (trackable in the e2e tests).
|
|
508
522
|
# For other flow steps, we only follow their links.
|
|
509
|
-
# We decided to ignore calls to other flows in our coverage analysis.
|
|
510
523
|
should_add_node = isinstance(
|
|
511
|
-
current_step,
|
|
524
|
+
current_step,
|
|
525
|
+
(CollectInformationFlowStep, ActionFlowStep, CallFlowStep, LinkFlowStep),
|
|
512
526
|
)
|
|
513
527
|
if should_add_node:
|
|
514
528
|
# Add current step to the current path that is being constructed.
|
|
@@ -520,10 +534,45 @@ class Flow:
|
|
|
520
534
|
)
|
|
521
535
|
)
|
|
522
536
|
|
|
537
|
+
# Check if the current step has already been visited or
|
|
538
|
+
# if the end of the path has been reached.
|
|
539
|
+
# If so, and we’re not within a called flow, we terminate the current path.
|
|
540
|
+
# This also applies for when we're inside a linked flow and reach its end.
|
|
541
|
+
# If we're inside a called flow and reach its end,
|
|
542
|
+
# continue with the next steps in its parent flow.
|
|
523
543
|
if current_step.id in visited_step_ids or self.is_end_of_path(current_step):
|
|
524
|
-
#
|
|
525
|
-
|
|
526
|
-
#
|
|
544
|
+
# Shallow copy is sufficient, since we only pop from the list and
|
|
545
|
+
# don't mutate the objects inside the tuples.
|
|
546
|
+
# The state of FlowStep and Flow does not change during the traversal.
|
|
547
|
+
call_stack_copy = call_stack.copy()
|
|
548
|
+
# parent_flow_type could be any of: None, i.e. main flow,
|
|
549
|
+
# KEY_CALLED_FLOW(=called_flow) or KEY_LINKED_FLOW(=linked_flow)
|
|
550
|
+
parent_step, parent_flow, parent_flow_type = (
|
|
551
|
+
call_stack_copy.pop() if call_stack_copy else (None, None, None)
|
|
552
|
+
)
|
|
553
|
+
|
|
554
|
+
# Check if within a called flow.
|
|
555
|
+
# If within linked flow, stop the traversal as this takes precedence.
|
|
556
|
+
if parent_step and parent_flow_type == KEY_CALLED_FLOW:
|
|
557
|
+
# As we have reached the END step of a called flow, we need to
|
|
558
|
+
# continue with the next links of the parent step.
|
|
559
|
+
if parent_flow is not None:
|
|
560
|
+
for link in parent_step.next.links:
|
|
561
|
+
parent_flow._handle_link(
|
|
562
|
+
current_path,
|
|
563
|
+
all_paths,
|
|
564
|
+
visited_step_ids,
|
|
565
|
+
link,
|
|
566
|
+
call_stack_copy,
|
|
567
|
+
)
|
|
568
|
+
|
|
569
|
+
else:
|
|
570
|
+
# Found a cycle, or reached an end step, do not proceed further.
|
|
571
|
+
all_paths.paths.append(copy.deepcopy(current_path))
|
|
572
|
+
|
|
573
|
+
# Backtrack: remove the last node after reaching a terminal step.
|
|
574
|
+
# Ensures the path is correctly backtracked, after a path ends or
|
|
575
|
+
# a cycle is detected.
|
|
527
576
|
if should_add_node:
|
|
528
577
|
current_path.nodes.pop()
|
|
529
578
|
return
|
|
@@ -531,6 +580,62 @@ class Flow:
|
|
|
531
580
|
# Mark current step as visited in this path.
|
|
532
581
|
visited_step_ids.add(current_step.id)
|
|
533
582
|
|
|
583
|
+
# If the current step is a call step, we need to resolve the call
|
|
584
|
+
# and continue with the steps of the called flow.
|
|
585
|
+
if isinstance(current_step, CallFlowStep):
|
|
586
|
+
# Get the steps of the called flow and continue with them.
|
|
587
|
+
called_flow = current_step.called_flow_reference
|
|
588
|
+
if called_flow and (
|
|
589
|
+
start_step_in_called_flow := called_flow.first_step_in_flow()
|
|
590
|
+
):
|
|
591
|
+
call_stack.append((current_step, self, KEY_CALLED_FLOW))
|
|
592
|
+
called_flow._go_over_steps(
|
|
593
|
+
start_step_in_called_flow,
|
|
594
|
+
current_path,
|
|
595
|
+
all_paths,
|
|
596
|
+
visited_step_ids,
|
|
597
|
+
call_stack,
|
|
598
|
+
)
|
|
599
|
+
|
|
600
|
+
# After processing the steps of the called (child) flow,
|
|
601
|
+
# remove them from the visited steps
|
|
602
|
+
# to allow the calling (parent) flow to revisit them later.
|
|
603
|
+
visited_step_ids.remove(current_step.id)
|
|
604
|
+
call_stack.pop()
|
|
605
|
+
|
|
606
|
+
# Backtrack: remove the last node
|
|
607
|
+
# after returning from a called (child) flow.
|
|
608
|
+
# Ensures the parent flow can continue exploring other branches.
|
|
609
|
+
if should_add_node:
|
|
610
|
+
current_path.nodes.pop()
|
|
611
|
+
return
|
|
612
|
+
|
|
613
|
+
# If the current step is a LinkFlowStep, step into the linked flow,
|
|
614
|
+
# process its links, and do not return from that flow anymore.
|
|
615
|
+
if isinstance(current_step, LinkFlowStep):
|
|
616
|
+
# Get the steps of the linked flow and continue with them.
|
|
617
|
+
linked_flow = current_step.linked_flow_reference
|
|
618
|
+
if linked_flow and (
|
|
619
|
+
start_step_in_linked_flow := linked_flow.first_step_in_flow()
|
|
620
|
+
):
|
|
621
|
+
call_stack.append((current_step, self, KEY_LINKED_FLOW))
|
|
622
|
+
linked_flow._go_over_steps(
|
|
623
|
+
start_step_in_linked_flow,
|
|
624
|
+
current_path,
|
|
625
|
+
all_paths,
|
|
626
|
+
visited_step_ids,
|
|
627
|
+
call_stack,
|
|
628
|
+
)
|
|
629
|
+
visited_step_ids.remove(current_step.id)
|
|
630
|
+
call_stack.pop()
|
|
631
|
+
|
|
632
|
+
# Backtrack: remove the last node
|
|
633
|
+
# after returning from a linked (child) flow.
|
|
634
|
+
# Ensures the parent can continue after the linked flow is processed.
|
|
635
|
+
if should_add_node:
|
|
636
|
+
current_path.nodes.pop()
|
|
637
|
+
return
|
|
638
|
+
|
|
534
639
|
# Iterate over all links of the current step.
|
|
535
640
|
for link in current_step.next.links:
|
|
536
641
|
self._handle_link(
|
|
@@ -538,12 +643,15 @@ class Flow:
|
|
|
538
643
|
all_paths,
|
|
539
644
|
visited_step_ids,
|
|
540
645
|
link,
|
|
646
|
+
call_stack,
|
|
541
647
|
)
|
|
542
648
|
|
|
543
649
|
# Backtrack the current step and remove it from the path.
|
|
544
650
|
visited_step_ids.remove(current_step.id)
|
|
545
651
|
|
|
546
|
-
#
|
|
652
|
+
# Backtrack: remove the last node
|
|
653
|
+
# after processing all links of the current step.
|
|
654
|
+
# Ensures the next recursion can start once all links are explored.
|
|
547
655
|
if should_add_node:
|
|
548
656
|
current_path.nodes.pop()
|
|
549
657
|
|
|
@@ -553,6 +661,9 @@ class Flow:
|
|
|
553
661
|
all_paths: FlowPathsList,
|
|
554
662
|
visited_step_ids: Set[str],
|
|
555
663
|
link: FlowStepLink,
|
|
664
|
+
call_stack: Optional[
|
|
665
|
+
List[Tuple[Optional[FlowStep], Optional[Flow], str]]
|
|
666
|
+
] = None,
|
|
556
667
|
) -> None:
|
|
557
668
|
"""Handles the next step in a flow.
|
|
558
669
|
|
|
@@ -561,6 +672,8 @@ class Flow:
|
|
|
561
672
|
all_paths: The list where completed paths are added.
|
|
562
673
|
visited_step_ids: A set of steps that have been visited to avoid cycles.
|
|
563
674
|
link: The link to be followed.
|
|
675
|
+
call_stack: Tuple list of (flow, path, flow_type) to track path when \
|
|
676
|
+
calling flows through call and link steps..
|
|
564
677
|
|
|
565
678
|
Returns:
|
|
566
679
|
None: This function modifies all_paths in place by appending new paths
|
|
@@ -575,6 +688,7 @@ class Flow:
|
|
|
575
688
|
current_path,
|
|
576
689
|
all_paths,
|
|
577
690
|
visited_step_ids,
|
|
691
|
+
call_stack,
|
|
578
692
|
)
|
|
579
693
|
return
|
|
580
694
|
# IfFlowStepLink and ElseFlowStepLink are conditional links.
|
|
@@ -588,6 +702,7 @@ class Flow:
|
|
|
588
702
|
current_path,
|
|
589
703
|
all_paths,
|
|
590
704
|
visited_step_ids,
|
|
705
|
+
call_stack,
|
|
591
706
|
)
|
|
592
707
|
return
|
|
593
708
|
else:
|
|
@@ -598,6 +713,7 @@ class Flow:
|
|
|
598
713
|
current_path,
|
|
599
714
|
all_paths,
|
|
600
715
|
visited_step_ids,
|
|
716
|
+
call_stack,
|
|
601
717
|
)
|
|
602
718
|
return
|
|
603
719
|
|
|
@@ -36,6 +36,7 @@ class FlowsList:
|
|
|
36
36
|
def __post_init__(self) -> None:
|
|
37
37
|
"""Initializes the FlowsList object."""
|
|
38
38
|
self._resolve_called_flows()
|
|
39
|
+
self._resolve_linked_flows()
|
|
39
40
|
|
|
40
41
|
def __iter__(self) -> Generator[Flow, None, None]:
|
|
41
42
|
"""Iterates over the flows."""
|
|
@@ -103,7 +104,10 @@ class FlowsList:
|
|
|
103
104
|
)
|
|
104
105
|
|
|
105
106
|
def _resolve_called_flows(self) -> None:
|
|
106
|
-
"""Resolves the called flows.
|
|
107
|
+
"""Resolves the called flows.
|
|
108
|
+
|
|
109
|
+
`Resolving` here means connecting the step to the actual `Flow` object.
|
|
110
|
+
"""
|
|
107
111
|
from rasa.shared.core.flows.steps import CallFlowStep
|
|
108
112
|
|
|
109
113
|
for flow in self.underlying_flows:
|
|
@@ -112,6 +116,19 @@ class FlowsList:
|
|
|
112
116
|
# only resolve the reference, if it isn't already resolved
|
|
113
117
|
step.called_flow_reference = self.flow_by_id(step.call)
|
|
114
118
|
|
|
119
|
+
def _resolve_linked_flows(self) -> None:
|
|
120
|
+
"""Resolves the linked flows.
|
|
121
|
+
|
|
122
|
+
`Resolving` here means connecting the step to the actual `Flow` object.
|
|
123
|
+
"""
|
|
124
|
+
from rasa.shared.core.flows.steps import LinkFlowStep
|
|
125
|
+
|
|
126
|
+
for flow in self.underlying_flows:
|
|
127
|
+
for step in flow.steps:
|
|
128
|
+
if isinstance(step, LinkFlowStep) and not step.linked_flow_reference:
|
|
129
|
+
# only resolve the reference, if it isn't already resolved
|
|
130
|
+
step.linked_flow_reference = self.flow_by_id(step.link)
|
|
131
|
+
|
|
115
132
|
def as_json_list(self) -> List[Dict[Text, Any]]:
|
|
116
133
|
"""Serialize the FlowsList object to list format and not to the original dict.
|
|
117
134
|
|
|
@@ -1,9 +1,12 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
3
|
from dataclasses import dataclass
|
|
4
|
-
from typing import Any, Dict, Text
|
|
4
|
+
from typing import TYPE_CHECKING, Any, Dict, Text
|
|
5
5
|
|
|
6
|
-
from rasa.shared.core.flows.flow_step import FlowStep
|
|
6
|
+
from rasa.shared.core.flows.flow_step import FlowStep, Optional
|
|
7
|
+
|
|
8
|
+
if TYPE_CHECKING:
|
|
9
|
+
from rasa.shared.core.flows.flow import Flow
|
|
7
10
|
|
|
8
11
|
|
|
9
12
|
@dataclass
|
|
@@ -12,6 +15,8 @@ class LinkFlowStep(FlowStep):
|
|
|
12
15
|
|
|
13
16
|
link: Text
|
|
14
17
|
"""The id of the flow that should be started subsequently."""
|
|
18
|
+
linked_flow_reference: Optional["Flow"] = None
|
|
19
|
+
"""The flow that is linked to by this step."""
|
|
15
20
|
|
|
16
21
|
def does_allow_for_next_step(self) -> bool:
|
|
17
22
|
"""Returns whether this step allows for following steps.
|
rasa/version.py
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.3
|
|
2
2
|
Name: rasa-pro
|
|
3
|
-
Version: 3.12.
|
|
3
|
+
Version: 3.12.21
|
|
4
4
|
Summary: State-of-the-art open-core Conversational AI framework for Enterprises that natively leverages generative AI for effortless assistant development.
|
|
5
5
|
Keywords: nlp,machine-learning,machine-learning-library,bot,bots,botkit,rasa conversational-agents,conversational-ai,chatbot,chatbot-framework,bot-framework
|
|
6
6
|
Author: Rasa Technologies GmbH
|
|
@@ -277,7 +277,7 @@ rasa/core/channels/voice_stream/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm
|
|
|
277
277
|
rasa/core/channels/voice_stream/asr/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
278
278
|
rasa/core/channels/voice_stream/asr/asr_engine.py,sha256=DpWEhkCHJPM1WDsBI5R3czqwvFiyaRMlgCubBNXO4U4,3237
|
|
279
279
|
rasa/core/channels/voice_stream/asr/asr_event.py,sha256=skPwrkRrcsptmeWXu9q68i4B-ZbvambCFFLtQ0TIgMo,297
|
|
280
|
-
rasa/core/channels/voice_stream/asr/azure.py,sha256=
|
|
280
|
+
rasa/core/channels/voice_stream/asr/azure.py,sha256=dUFxtNVVwGM2D1VyqQ5FWeSpKwUQekMXUxWZv6tPJ7w,6114
|
|
281
281
|
rasa/core/channels/voice_stream/asr/deepgram.py,sha256=9cIqRuv9gWzOfEKxeDbhijGoT8EPUV7Oo493WXaHlBo,5682
|
|
282
282
|
rasa/core/channels/voice_stream/audio_bytes.py,sha256=3V0QQplPD-kVfebaaeVcKgV7pwIJyjnTenujVD3y3sY,340
|
|
283
283
|
rasa/core/channels/voice_stream/audiocodes.py,sha256=WVAd5ksO97y7a6Wvv6PqQKQVgS1_IdRXeDIjnl6IAkY,12498
|
|
@@ -289,9 +289,9 @@ rasa/core/channels/voice_stream/tts/azure.py,sha256=RIS8wBpnX8yWM17UxUo5ko4QrxEx
|
|
|
289
289
|
rasa/core/channels/voice_stream/tts/cartesia.py,sha256=cH2eHicZ_NCWtDH-cn9Chq8SSm-1agJRy-ieDJCVlD4,5407
|
|
290
290
|
rasa/core/channels/voice_stream/tts/tts_cache.py,sha256=K4S2d8zWX2h2ylYALp7IdqFSkuTIqLvho--Yt0litb4,850
|
|
291
291
|
rasa/core/channels/voice_stream/tts/tts_engine.py,sha256=JMCWGHxT8QiqKoBeI6F4RX_-Q9EEqG3vUtkgOUnlt-w,1812
|
|
292
|
-
rasa/core/channels/voice_stream/twilio_media_streams.py,sha256=
|
|
292
|
+
rasa/core/channels/voice_stream/twilio_media_streams.py,sha256=cM09rwGpbyFD9lCfmWBjHE1XS-F4ufpSbvwJACHpVmI,9094
|
|
293
293
|
rasa/core/channels/voice_stream/util.py,sha256=d0Tl0tGAnVj3SgGovsUMHx-QL44nrPI29OTYKYleH0U,1987
|
|
294
|
-
rasa/core/channels/voice_stream/voice_channel.py,sha256=
|
|
294
|
+
rasa/core/channels/voice_stream/voice_channel.py,sha256=5XQjDkkWCOaXV3GKmzDBPIIwYVIS0StzzApxXrBKLd4,19611
|
|
295
295
|
rasa/core/channels/webexteams.py,sha256=z_o_jnc6B7hsHpd6XorImFkF43wB4yx_kiTPKAjPSuo,4805
|
|
296
296
|
rasa/core/concurrent_lock_store.py,sha256=ycd-aeJJWXIokMRimCdQFHdwuMfl512hZSUHE8oSd2c,7722
|
|
297
297
|
rasa/core/constants.py,sha256=dEokmEf6XkOFA_xpuwjqwNtlZv-a5Tz5dLMRc7Vu4CU,4070
|
|
@@ -327,7 +327,7 @@ rasa/core/nlg/translate.py,sha256=ZXRvysqXGdtHBJ7x3YkW6zfmnb9DuEGHCMTL41v-M8M,21
|
|
|
327
327
|
rasa/core/persistor.py,sha256=7LCZHAwCM-xrUI38aaJ5dkxJvLdJXWI1TEUKsBo4_EE,21295
|
|
328
328
|
rasa/core/policies/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
329
329
|
rasa/core/policies/ensemble.py,sha256=XoHxU0jcb_io_LBOpjJffylzqtGEB7CH9ivhRyO8pDc,12960
|
|
330
|
-
rasa/core/policies/enterprise_search_policy.py,sha256=
|
|
330
|
+
rasa/core/policies/enterprise_search_policy.py,sha256=QKN8mLEjDqVgLb78z3w_IJWanvaSacj4_-BxEUBNBKw,41961
|
|
331
331
|
rasa/core/policies/enterprise_search_prompt_template.jinja2,sha256=dCS_seyBGxMQoMsOjjvPp0dd31OSzZCJSZeev1FJK5Q,1187
|
|
332
332
|
rasa/core/policies/enterprise_search_prompt_with_citation_template.jinja2,sha256=va9rpP97dN3PKoJZOVfyuISt3cPBlb10Pqyz25RwO_Q,3294
|
|
333
333
|
rasa/core/policies/flow_policy.py,sha256=597G62hrLF_CAMCvu-TPRldFnjMP2XEIkhcIaPWcQAc,7489
|
|
@@ -436,7 +436,7 @@ rasa/dialogue_understanding/patterns/skip_question.py,sha256=fJ1MC0WEEtS-BpnGJEf
|
|
|
436
436
|
rasa/dialogue_understanding/patterns/user_silence.py,sha256=xP-QMnd-MsybH5z4g01hBv4OLOHcw6m3rc26LQfe2zo,1140
|
|
437
437
|
rasa/dialogue_understanding/patterns/validate_slot.py,sha256=hqd5AEGT3M3HLNhMwuI9W9kZNCvgU6GyI-2xc2b4kz8,2085
|
|
438
438
|
rasa/dialogue_understanding/processor/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
439
|
-
rasa/dialogue_understanding/processor/command_processor.py,sha256=
|
|
439
|
+
rasa/dialogue_understanding/processor/command_processor.py,sha256=dSakuFOQuYwatYafiFCbr3P_g59BnjPAC1iBb2wfSKk,30169
|
|
440
440
|
rasa/dialogue_understanding/processor/command_processor_component.py,sha256=rkErI_Uo7s3LsEojUSGSRbWGyGaX7GtGOYSJn0V-TI4,1650
|
|
441
441
|
rasa/dialogue_understanding/stack/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
442
442
|
rasa/dialogue_understanding/stack/dialogue_stack.py,sha256=cYV6aQeh0EuOJHODDqK3biqXozYTX8baPgLwHhPxFqs,5244
|
|
@@ -473,7 +473,7 @@ rasa/e2e_test/e2e_config_schema.yml,sha256=zQectcNvmNChdPMqO4O-CufqAF90AMBbP-Dmg
|
|
|
473
473
|
rasa/e2e_test/e2e_test_case.py,sha256=3fKan0GJOMKm-FKHjQaY9AVhI4ortQYuEsPh9GHwbio,20817
|
|
474
474
|
rasa/e2e_test/e2e_test_converter.py,sha256=bcSg-hWKPGvZBip6PKPvYAcgvSUCU5uXmC9D7UTmJYY,12570
|
|
475
475
|
rasa/e2e_test/e2e_test_converter_prompt.jinja2,sha256=EMy-aCd7jLARHmwAuZUGT5ABnNHjR872_pexRIMGA7c,2791
|
|
476
|
-
rasa/e2e_test/e2e_test_coverage_report.py,sha256=
|
|
476
|
+
rasa/e2e_test/e2e_test_coverage_report.py,sha256=UGQ3np2p_gtnhl17K5y886STiX9xBn95GVuN9LGIpGY,11344
|
|
477
477
|
rasa/e2e_test/e2e_test_result.py,sha256=qVurjFC4cAWIY7rOsc-A-4nIdcnnw98TaK86-bDwI7Y,1649
|
|
478
478
|
rasa/e2e_test/e2e_test_runner.py,sha256=eXV5DJ0rAVY7FAXYI9aKvYqZXdfsE92y6deEUqUvrTY,47965
|
|
479
479
|
rasa/e2e_test/e2e_test_schema.yml,sha256=0WG0I3baTRc76lff3UjQ8vGRzMUoV6qcE8r9adOAlCU,5638
|
|
@@ -534,7 +534,7 @@ rasa/hooks.py,sha256=5ZMrqNz323w56MMY6E8jeZ_YXgRqq8p-yi18S2XOmbo,4061
|
|
|
534
534
|
rasa/jupyter.py,sha256=TCYVD4QPQIMmfA6ZwDUBOBTAECwCwbU2XOkosodLO9k,1782
|
|
535
535
|
rasa/keys,sha256=2Stg1fstgJ203cOoW1B2gGMY29fhEnjIfTVxKv_fqPo,101
|
|
536
536
|
rasa/llm_fine_tuning/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
537
|
-
rasa/llm_fine_tuning/annotation_module.py,sha256=
|
|
537
|
+
rasa/llm_fine_tuning/annotation_module.py,sha256=7vKwesRLvtKQAt9etHIT51HN8D21dSR3smNY7aIbGx4,11267
|
|
538
538
|
rasa/llm_fine_tuning/conversations.py,sha256=qzoTFQiwADmzL9mocqML4a-nAgEu6hlOSE3K87LvhM0,4272
|
|
539
539
|
rasa/llm_fine_tuning/llm_data_preparation_module.py,sha256=Vh6HHDvH1ueaNgBWnzIA7ymcTwHpqVvKxIPAnMKZtyY,7153
|
|
540
540
|
rasa/llm_fine_tuning/paraphrasing/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@@ -628,18 +628,18 @@ rasa/shared/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
|
628
628
|
rasa/shared/constants.py,sha256=u9GnSSQYRjYN_mjd7XHMGgoVc6ipoiZQuLt3bFOF0O0,12264
|
|
629
629
|
rasa/shared/core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
630
630
|
rasa/shared/core/command_payload_reader.py,sha256=puHYsp9xbX0YQm2L1NDBItOFmdzI7AzmfGefgcHiCc0,3871
|
|
631
|
-
rasa/shared/core/constants.py,sha256=
|
|
631
|
+
rasa/shared/core/constants.py,sha256=kirMr9_Ls18szCCo3U80fx6AAkLu73tu2OGaLlu4U9s,6349
|
|
632
632
|
rasa/shared/core/conversation.py,sha256=0nUhcbQkPDnO3_Rig7oiinrWmPy5fsVQs_U6Fx1hG5c,1384
|
|
633
633
|
rasa/shared/core/domain.py,sha256=piJu4Kr2exC9ehC3e2oNaxPxXkeIhOYoQJQQOuzMw18,81638
|
|
634
634
|
rasa/shared/core/events.py,sha256=kTUWSpDepj3kpjjXveYXz3h2XcIQV3Sq8h7MTbx5fMw,86489
|
|
635
635
|
rasa/shared/core/flows/__init__.py,sha256=Z4pBY0qcEbHeOwgmKsyg2Nz4dX9CF67fFCwj2KXSMpg,180
|
|
636
|
-
rasa/shared/core/flows/constants.py,sha256=
|
|
637
|
-
rasa/shared/core/flows/flow.py,sha256=
|
|
636
|
+
rasa/shared/core/flows/constants.py,sha256=uno5qtsWl8lxELsDe04_5tJH1tBgj6uRRr_g83s10xA,404
|
|
637
|
+
rasa/shared/core/flows/flow.py,sha256=7EY_Jlqo21dB7yYNxB0zw1uRUImy0_zZ2bYrwKR8kHk,28382
|
|
638
638
|
rasa/shared/core/flows/flow_path.py,sha256=xstwahZBU5cfMY46mREA4NoOGlKLBRAqeP_mJ3UZqOI,2283
|
|
639
639
|
rasa/shared/core/flows/flow_step.py,sha256=ZvjXz1Fs5FR1_BlGBitOEYRnLhzk-bBYv1CC2Oi6iWQ,4537
|
|
640
640
|
rasa/shared/core/flows/flow_step_links.py,sha256=U9c4MFASieJGp_-XMhR0hrxFQISCJAF4TQ0wEy4IjB0,10530
|
|
641
641
|
rasa/shared/core/flows/flow_step_sequence.py,sha256=Rcw82OccjJsNc2wKXi6IePOIAPFRBTylSVphCRJCuc4,2362
|
|
642
|
-
rasa/shared/core/flows/flows_list.py,sha256=
|
|
642
|
+
rasa/shared/core/flows/flows_list.py,sha256=9KKvkLeNSe1oTZUpXAX-EvqYOKcXe5OQdZ-anut5bQc,9415
|
|
643
643
|
rasa/shared/core/flows/flows_yaml_schema.json,sha256=lILHEH3sp9rm61uV4HcN-3V3Dxg1xakcVvYFRAs3gu8,12399
|
|
644
644
|
rasa/shared/core/flows/nlu_trigger.py,sha256=GG6m5h6Z0jaukA5rPAHscnULgZGDjYMXfufX9nYQtzA,3907
|
|
645
645
|
rasa/shared/core/flows/steps/__init__.py,sha256=jvJp02o9_Wx-rZeQ3SYiLVMpO6ulS1yKuiiKg0ld_nE,655
|
|
@@ -650,7 +650,7 @@ rasa/shared/core/flows/steps/constants.py,sha256=DCxrEUGbJciBknHm-_t4tmcnH19IZKP
|
|
|
650
650
|
rasa/shared/core/flows/steps/continuation.py,sha256=5Rzayr80FsgS4bAajuRObVvVcLqPEh9nxGbT2te85xY,1498
|
|
651
651
|
rasa/shared/core/flows/steps/end.py,sha256=0XrPlQMVBnQKVeZs0or8P9IrVqG7i6RoSNDsVrvAeDk,749
|
|
652
652
|
rasa/shared/core/flows/steps/internal.py,sha256=5Peu4ANnxe4NEMIeDd_SfK4i1JdgjncalEgD4fNgskc,1449
|
|
653
|
-
rasa/shared/core/flows/steps/link.py,sha256=
|
|
653
|
+
rasa/shared/core/flows/steps/link.py,sha256=nOY1a92OEDjf4rqoZlbGAW2lXMeGOyNZwgLIK6e7if4,1702
|
|
654
654
|
rasa/shared/core/flows/steps/no_operation.py,sha256=SKqNQ4usLZ4c-faSZOX41lpHBD8YtVe2eGDSt50H05s,1399
|
|
655
655
|
rasa/shared/core/flows/steps/set_slots.py,sha256=DudtHKXaVSNmQL_vXLvkK23_JqgTBU9RJrdQeBpC4s0,1492
|
|
656
656
|
rasa/shared/core/flows/steps/start.py,sha256=AJpKIm0S3GZYLEs3ybXW0Zrq03Pu9lvirNahiUy2I6k,1010
|
|
@@ -822,9 +822,9 @@ rasa/utils/train_utils.py,sha256=ClJx-6x3-h3Vt6mskacgkcCUJTMXjFPe3zAcy_DfmaU,212
|
|
|
822
822
|
rasa/utils/url_tools.py,sha256=dZ1HGkVdWTJB7zYEdwoDIrEuyX9HE5WsxKKFVsXBLE0,1218
|
|
823
823
|
rasa/utils/yaml.py,sha256=KjbZq5C94ZP7Jdsw8bYYF7HASI6K4-C_kdHfrnPLpSI,2000
|
|
824
824
|
rasa/validator.py,sha256=524VlFTYK0B3iXYveVD6BDC3K0j1QfpzJ9O-TAWczmc,83166
|
|
825
|
-
rasa/version.py,sha256=
|
|
826
|
-
rasa_pro-3.12.
|
|
827
|
-
rasa_pro-3.12.
|
|
828
|
-
rasa_pro-3.12.
|
|
829
|
-
rasa_pro-3.12.
|
|
830
|
-
rasa_pro-3.12.
|
|
825
|
+
rasa/version.py,sha256=qwNQwF1ZW-TW5WmAk9DusZovcswu5IOz6HUAl37Qbfk,118
|
|
826
|
+
rasa_pro-3.12.21.dist-info/METADATA,sha256=CPpfxe6f-18tMn3193QUqhGe6v_u9QFSW58VENd40hU,10609
|
|
827
|
+
rasa_pro-3.12.21.dist-info/NOTICE,sha256=7HlBoMHJY9CL2GlYSfTQ-PZsVmLmVkYmMiPlTjhuCqA,218
|
|
828
|
+
rasa_pro-3.12.21.dist-info/WHEEL,sha256=fGIA9gx4Qxk2KDKeNJCbOEwSrmLtjWCwzBz351GyrPQ,88
|
|
829
|
+
rasa_pro-3.12.21.dist-info/entry_points.txt,sha256=ckJ2SfEyTPgBqj_I6vm_tqY9dZF_LAPJZA335Xp0Q9U,43
|
|
830
|
+
rasa_pro-3.12.21.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|