rasa-pro 3.13.0.dev8__py3-none-any.whl → 3.13.0.dev9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of rasa-pro might be problematic. Click here for more details.

Files changed (85) hide show
  1. rasa/core/channels/development_inspector.py +1 -1
  2. rasa/core/channels/facebook.py +1 -4
  3. rasa/core/channels/inspector/README.md +3 -3
  4. rasa/core/channels/inspector/dist/assets/{arc-c4b064fc.js → arc-02053cc1.js} +1 -1
  5. rasa/core/channels/inspector/dist/assets/{blockDiagram-38ab4fdb-215b5026.js → blockDiagram-38ab4fdb-008b6289.js} +1 -1
  6. rasa/core/channels/inspector/dist/assets/{c4Diagram-3d4e48cf-2b54a0a3.js → c4Diagram-3d4e48cf-fb2597be.js} +1 -1
  7. rasa/core/channels/inspector/dist/assets/channel-078dada8.js +1 -0
  8. rasa/core/channels/inspector/dist/assets/{classDiagram-70f12bd4-daacea5f.js → classDiagram-70f12bd4-7f847e00.js} +1 -1
  9. rasa/core/channels/inspector/dist/assets/{classDiagram-v2-f2320105-930d4dc2.js → classDiagram-v2-f2320105-ba1d689b.js} +1 -1
  10. rasa/core/channels/inspector/dist/assets/clone-5b4516de.js +1 -0
  11. rasa/core/channels/inspector/dist/assets/{createText-2e5e7dd3-83c206ba.js → createText-2e5e7dd3-dd8e67c4.js} +1 -1
  12. rasa/core/channels/inspector/dist/assets/{edges-e0da2a9e-b0eb01d0.js → edges-e0da2a9e-10784939.js} +1 -1
  13. rasa/core/channels/inspector/dist/assets/{erDiagram-9861fffd-17586500.js → erDiagram-9861fffd-24947ae6.js} +1 -1
  14. rasa/core/channels/inspector/dist/assets/{flowDb-956e92f1-be2a1776.js → flowDb-956e92f1-a9ced505.js} +1 -1
  15. rasa/core/channels/inspector/dist/assets/{flowDiagram-66a62f08-c2120ebd.js → flowDiagram-66a62f08-afda9c7c.js} +1 -1
  16. rasa/core/channels/inspector/dist/assets/flowDiagram-v2-96b9c2cf-f9613071.js +1 -0
  17. rasa/core/channels/inspector/dist/assets/{flowchart-elk-definition-4a651766-a6ab5c48.js → flowchart-elk-definition-4a651766-6ef530b8.js} +1 -1
  18. rasa/core/channels/inspector/dist/assets/{ganttDiagram-c361ad54-ef613457.js → ganttDiagram-c361ad54-0c7dd39a.js} +1 -1
  19. rasa/core/channels/inspector/dist/assets/{gitGraphDiagram-72cf32ee-d59185b3.js → gitGraphDiagram-72cf32ee-b57239d6.js} +1 -1
  20. rasa/core/channels/inspector/dist/assets/{graph-0f155405.js → graph-9ed57cec.js} +1 -1
  21. rasa/core/channels/inspector/dist/assets/{index-3862675e-d5f1d1b7.js → index-3862675e-233090de.js} +1 -1
  22. rasa/core/channels/inspector/dist/assets/{index-47737d3a.js → index-72184470.js} +3 -3
  23. rasa/core/channels/inspector/dist/assets/{infoDiagram-f8f76790-b07d141f.js → infoDiagram-f8f76790-aa116649.js} +1 -1
  24. rasa/core/channels/inspector/dist/assets/{journeyDiagram-49397b02-1936d429.js → journeyDiagram-49397b02-e51877cc.js} +1 -1
  25. rasa/core/channels/inspector/dist/assets/{layout-dde8d0f3.js → layout-3ca3798c.js} +1 -1
  26. rasa/core/channels/inspector/dist/assets/{line-0c2c7ee0.js → line-26ee10d3.js} +1 -1
  27. rasa/core/channels/inspector/dist/assets/{linear-35dd89a4.js → linear-aedded32.js} +1 -1
  28. rasa/core/channels/inspector/dist/assets/{mindmap-definition-fc14e90a-56192851.js → mindmap-definition-fc14e90a-d8957261.js} +1 -1
  29. rasa/core/channels/inspector/dist/assets/{pieDiagram-8a3498a8-fc21ed78.js → pieDiagram-8a3498a8-d771f885.js} +1 -1
  30. rasa/core/channels/inspector/dist/assets/{quadrantDiagram-120e2f19-25e98518.js → quadrantDiagram-120e2f19-09fdf50c.js} +1 -1
  31. rasa/core/channels/inspector/dist/assets/{requirementDiagram-deff3bca-546ff1f5.js → requirementDiagram-deff3bca-9f0af02e.js} +1 -1
  32. rasa/core/channels/inspector/dist/assets/{sankeyDiagram-04a897e0-02d8b82d.js → sankeyDiagram-04a897e0-84415b37.js} +1 -1
  33. rasa/core/channels/inspector/dist/assets/{sequenceDiagram-704730f1-3ca5a92e.js → sequenceDiagram-704730f1-8dec4055.js} +1 -1
  34. rasa/core/channels/inspector/dist/assets/{stateDiagram-587899a1-128ea07c.js → stateDiagram-587899a1-c5431d07.js} +1 -1
  35. rasa/core/channels/inspector/dist/assets/{stateDiagram-v2-d93cdb3a-95f290af.js → stateDiagram-v2-d93cdb3a-274e77d9.js} +1 -1
  36. rasa/core/channels/inspector/dist/assets/{styles-6aaf32cf-4984898a.js → styles-6aaf32cf-e364a1d7.js} +1 -1
  37. rasa/core/channels/inspector/dist/assets/{styles-9a916d00-1bf266ba.js → styles-9a916d00-0dae36f6.js} +1 -1
  38. rasa/core/channels/inspector/dist/assets/{styles-c10674c1-60521c63.js → styles-c10674c1-c4641675.js} +1 -1
  39. rasa/core/channels/inspector/dist/assets/{svgDrawCommon-08f97a94-a25b6e12.js → svgDrawCommon-08f97a94-831fe9a1.js} +1 -1
  40. rasa/core/channels/inspector/dist/assets/{timeline-definition-85554ec2-0fc086bf.js → timeline-definition-85554ec2-c3304b3a.js} +1 -1
  41. rasa/core/channels/inspector/dist/assets/{xychartDiagram-e933f94c-44ee592e.js → xychartDiagram-e933f94c-da799369.js} +1 -1
  42. rasa/core/channels/inspector/dist/index.html +1 -1
  43. rasa/core/channels/inspector/src/components/RecruitmentPanel.tsx +1 -1
  44. rasa/core/channels/socketio.py +56 -41
  45. rasa/core/channels/studio_chat.py +311 -8
  46. rasa/core/channels/voice_ready/audiocodes.py +1 -1
  47. rasa/core/channels/voice_stream/audiocodes.py +1 -1
  48. rasa/core/channels/voice_stream/browser_audio.py +1 -1
  49. rasa/core/channels/voice_stream/tts/__init__.py +8 -0
  50. rasa/core/channels/voice_stream/voice_channel.py +6 -1
  51. rasa/core/information_retrieval/faiss.py +18 -11
  52. rasa/core/information_retrieval/ingestion/__init__.py +0 -0
  53. rasa/core/information_retrieval/ingestion/faq_parser.py +158 -0
  54. rasa/core/nlg/contextual_response_rephraser.py +10 -1
  55. rasa/core/policies/enterprise_search_policy.py +8 -29
  56. rasa/core/policies/intentless_policy.py +47 -10
  57. rasa/dialogue_understanding/coexistence/llm_based_router.py +9 -6
  58. rasa/dialogue_understanding/commands/cancel_flow_command.py +3 -1
  59. rasa/dialogue_understanding/commands/correct_slots_command.py +1 -3
  60. rasa/dialogue_understanding/generator/nlu_command_adapter.py +2 -2
  61. rasa/dialogue_understanding/generator/single_step/single_step_based_llm_command_generator.py +2 -2
  62. rasa/dialogue_understanding/generator/single_step/single_step_llm_command_generator.py +8 -11
  63. rasa/dialogue_understanding/patterns/cancel.py +1 -2
  64. rasa/dialogue_understanding/patterns/clarify.py +1 -1
  65. rasa/dialogue_understanding/patterns/correction.py +2 -2
  66. rasa/dialogue_understanding/processor/command_processor.py +3 -4
  67. rasa/dialogue_understanding/stack/utils.py +3 -1
  68. rasa/engine/graph.py +2 -2
  69. rasa/llm_fine_tuning/paraphrasing/conversation_rephraser.py +1 -5
  70. rasa/shared/constants.py +11 -0
  71. rasa/shared/core/command_payload_reader.py +1 -5
  72. rasa/shared/core/events.py +1 -3
  73. rasa/shared/core/flows/validation.py +16 -3
  74. rasa/shared/core/training_data/story_reader/yaml_story_reader.py +1 -4
  75. rasa/shared/utils/common.py +1 -1
  76. rasa/shared/utils/llm.py +30 -0
  77. rasa/version.py +1 -1
  78. {rasa_pro-3.13.0.dev8.dist-info → rasa_pro-3.13.0.dev9.dist-info}/METADATA +3 -3
  79. {rasa_pro-3.13.0.dev8.dist-info → rasa_pro-3.13.0.dev9.dist-info}/RECORD +82 -80
  80. rasa/core/channels/inspector/dist/assets/channel-3730f5fd.js +0 -1
  81. rasa/core/channels/inspector/dist/assets/clone-e847561e.js +0 -1
  82. rasa/core/channels/inspector/dist/assets/flowDiagram-v2-96b9c2cf-efbbfe00.js +0 -1
  83. {rasa_pro-3.13.0.dev8.dist-info → rasa_pro-3.13.0.dev9.dist-info}/NOTICE +0 -0
  84. {rasa_pro-3.13.0.dev8.dist-info → rasa_pro-3.13.0.dev9.dist-info}/WHEEL +0 -0
  85. {rasa_pro-3.13.0.dev8.dist-info → rasa_pro-3.13.0.dev9.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,158 @@
1
+ """Utilities for parsing FAQ-style documents (Q/A pairs) used in extractive search."""
2
+
3
+ import re
4
+ from collections import defaultdict
5
+ from typing import TYPE_CHECKING, List
6
+
7
+ import structlog
8
+
9
+ from rasa.shared.constants import (
10
+ DOCUMENT_TYPE_FAQ,
11
+ FAQ_DOCUMENT_ENTRY_SEPARATOR,
12
+ FAQ_DOCUMENT_LINE_SEPARATOR,
13
+ FAQ_DOCUMENT_METADATA_ANSWER,
14
+ FAQ_DOCUMENT_METADATA_TITLE,
15
+ FAQ_DOCUMENT_METADATA_TYPE,
16
+ FAQ_INPUT_DATA_ANSWER_LINE_PREFIX,
17
+ FAQ_INPUT_DATA_QUESTION_LINE_PREFIX,
18
+ )
19
+
20
+ if TYPE_CHECKING:
21
+ from langchain.schema import Document
22
+
23
+ _FAQ_PAIR_PATTERN = re.compile(
24
+ rf"{re.escape(FAQ_INPUT_DATA_QUESTION_LINE_PREFIX)}\s*"
25
+ rf"(?P<question>.*?)\s*{FAQ_DOCUMENT_LINE_SEPARATOR}\s*"
26
+ rf"{re.escape(FAQ_INPUT_DATA_ANSWER_LINE_PREFIX)}\s*"
27
+ rf"(?P<answer>.*)",
28
+ re.DOTALL,
29
+ )
30
+
31
+
32
+ structlogger = structlog.get_logger()
33
+
34
+
35
+ def _format_faq_documents(documents: List["Document"]) -> List["Document"]:
36
+ """Splits each loaded file into individual FAQs.
37
+
38
+ Args:
39
+ documents: Documents representing whole files containing FAQs.
40
+
41
+ Returns:
42
+ List of Document objects, each containing a separate FAQ.
43
+
44
+ Examples:
45
+ An example of a file containing FAQs:
46
+
47
+ Q: Who is Finley?
48
+ A: Finley is your smart assistant for the FinX App. You can add him to your
49
+ favorite messenger and tell him what you need help with.
50
+
51
+ Q: How does Finley work?
52
+ A: Finley is powered by the latest chatbot technology leveraging a unique
53
+ interplay of large language models and secure logic.
54
+
55
+ More details in documentation: https://rasa.com/docs/reference/config/policies/extractive-search/
56
+ """
57
+ structured_faqs = []
58
+ from langchain.schema import Document
59
+
60
+ for document in documents:
61
+ chunks = document.page_content.strip().split(FAQ_DOCUMENT_ENTRY_SEPARATOR)
62
+
63
+ for chunk in chunks:
64
+ match = _FAQ_PAIR_PATTERN.match(chunk.strip())
65
+
66
+ if not match:
67
+ structlogger.warning(
68
+ "faq_parser.format_faq_documents.invalid_chunk_skipped",
69
+ event_info=(
70
+ "Chunk does not match expected QA format. "
71
+ "Please refer to the documentation: "
72
+ "https://rasa.com/docs/reference/config/"
73
+ "policies/extractive-search/"
74
+ ),
75
+ chunk_preview=chunk[:100],
76
+ )
77
+ continue
78
+
79
+ question = match.group("question").strip()
80
+ answer = match.group("answer").strip()
81
+ title = _sanitize_title(question)
82
+
83
+ formatted_document = Document(
84
+ page_content=question,
85
+ metadata={
86
+ FAQ_DOCUMENT_METADATA_TITLE: title,
87
+ FAQ_DOCUMENT_METADATA_TYPE: DOCUMENT_TYPE_FAQ,
88
+ FAQ_DOCUMENT_METADATA_ANSWER: answer,
89
+ },
90
+ )
91
+
92
+ structured_faqs.append(formatted_document)
93
+
94
+ structlogger.debug(
95
+ "faq_parser.format_faq_documents.parsed_chunk",
96
+ event_info="Parsed chunk.",
97
+ title=title,
98
+ question=question,
99
+ answer=answer,
100
+ parsed_chunk_preview=chunk[:100],
101
+ )
102
+
103
+ structlogger.debug(
104
+ "faq_parser.format_faq_documents.parsed_chunks",
105
+ event_info=(
106
+ f"Retrieved {len(structured_faqs)} FAQ pair(s)"
107
+ f"from {len(documents)} document(s)."
108
+ ),
109
+ num_structured_faqs=len(structured_faqs),
110
+ num_documents=len(documents),
111
+ )
112
+ _check_and_parsed_faq_documents_for_duplicates(structured_faqs)
113
+ return structured_faqs
114
+
115
+
116
+ def _sanitize_title(title: str) -> str:
117
+ title = title.lower()
118
+ # Remove all whitespaces with "_"
119
+ title = re.sub(r"\s+", "_", title)
120
+ # Remove all non alpha-numeric characters
121
+ title = re.sub(r"[^\w]", "", title)
122
+ # Collapse multiple "_"
123
+ title = re.sub(r"_+", "_", title)
124
+ # Clean up edges
125
+ return title.strip("_")
126
+
127
+
128
+ def _check_and_parsed_faq_documents_for_duplicates(documents: List["Document"]) -> None:
129
+ seen_qa_pairs = set()
130
+ seen_questions: defaultdict = defaultdict(list)
131
+
132
+ for doc in documents:
133
+ question = doc.page_content.strip()
134
+ answer = doc.metadata.get(FAQ_DOCUMENT_METADATA_ANSWER, "").strip()
135
+
136
+ if not question or not answer:
137
+ continue
138
+
139
+ if (question, answer) in seen_qa_pairs:
140
+ structlogger.warning(
141
+ "faq_parser.duplicate_qa_pair_found",
142
+ event_info="Duplicate QA pair found.",
143
+ question=question,
144
+ answer_preview=answer,
145
+ )
146
+ continue
147
+
148
+ if question in seen_questions and seen_questions[question] != answer:
149
+ structlogger.warning(
150
+ "faq_parser.inconsistent_answer",
151
+ event_info="Duplicate question with different answer found.",
152
+ question=question,
153
+ previous_answers=seen_questions[question],
154
+ new_answer=answer,
155
+ )
156
+
157
+ seen_qa_pairs.add((question, answer))
158
+ seen_questions[question].append(answer)
@@ -17,6 +17,7 @@ from rasa.shared.constants import (
17
17
  MODEL_NAME_CONFIG_KEY,
18
18
  OPENAI_PROVIDER,
19
19
  PROMPT_CONFIG_KEY,
20
+ PROMPT_TEMPLATE_CONFIG_KEY,
20
21
  PROVIDER_CONFIG_KEY,
21
22
  TEMPERATURE_CONFIG_KEY,
22
23
  TIMEOUT_CONFIG_KEY,
@@ -38,6 +39,7 @@ from rasa.shared.utils.llm import (
38
39
  DEFAULT_OPENAI_GENERATE_MODEL_NAME,
39
40
  DEFAULT_OPENAI_MAX_GENERATED_TOKENS,
40
41
  USER,
42
+ check_prompt_config_keys_and_warn_if_deprecated,
41
43
  combine_custom_and_default_config,
42
44
  get_prompt_template,
43
45
  llm_factory,
@@ -110,8 +112,15 @@ class ContextualResponseRephraser(
110
112
  super().__init__(domain.responses)
111
113
 
112
114
  self.nlg_endpoint = endpoint_config
115
+
116
+ # Warn if the prompt config key is used to set the prompt template
117
+ check_prompt_config_keys_and_warn_if_deprecated(
118
+ self.nlg_endpoint.kwargs, "contextual_response_rephraser"
119
+ )
120
+
113
121
  self.prompt_template = get_prompt_template(
114
- self.nlg_endpoint.kwargs.get(PROMPT_CONFIG_KEY),
122
+ self.nlg_endpoint.kwargs.get(PROMPT_TEMPLATE_CONFIG_KEY)
123
+ or self.nlg_endpoint.kwargs.get(PROMPT_CONFIG_KEY),
115
124
  DEFAULT_RESPONSE_VARIATION_PROMPT_TEMPLATE,
116
125
  log_source_component=ContextualResponseRephraser.__name__,
117
126
  log_source_method=LOG_COMPONENT_SOURCE_METHOD_INIT,
@@ -95,6 +95,7 @@ from rasa.shared.utils.io import deep_container_fingerprint
95
95
  from rasa.shared.utils.llm import (
96
96
  DEFAULT_OPENAI_CHAT_MODEL_NAME,
97
97
  DEFAULT_OPENAI_EMBEDDING_MODEL_NAME,
98
+ check_prompt_config_keys_and_warn_if_deprecated,
98
99
  embedder_factory,
99
100
  get_prompt_template,
100
101
  llm_factory,
@@ -246,7 +247,9 @@ class EnterpriseSearchPolicy(LLMHealthCheckMixin, EmbeddingsHealthCheckMixin, Po
246
247
  super().__init__(config, model_storage, resource, execution_context, featurizer)
247
248
 
248
249
  # Check for deprecated keys and issue a warning if those are used
249
- self._check_config_keys_and_warn_if_deprecated()
250
+ check_prompt_config_keys_and_warn_if_deprecated(
251
+ config, "enterprise_search_policy"
252
+ )
250
253
  # Check for mutual exclusivity of extractive and generative search
251
254
  self._check_and_warn_mutual_exclusivity_of_extractive_and_generative_search()
252
255
 
@@ -307,34 +310,6 @@ class EnterpriseSearchPolicy(LLMHealthCheckMixin, EmbeddingsHealthCheckMixin, Po
307
310
  self.config, LOG_COMPONENT_SOURCE_METHOD_INIT
308
311
  )
309
312
 
310
- def _check_config_keys_and_warn_if_deprecated(self) -> None:
311
- """Checks and warns about deprecated config parameters."""
312
- if (
313
- PROMPT_CONFIG_KEY in self.config
314
- and PROMPT_TEMPLATE_CONFIG_KEY in self.config
315
- ):
316
- structlogger.warning(
317
- "enterprise_search_policy.init"
318
- ".both_deprecated_and_non_deprecated_config_keys_used_at_the_same_time",
319
- event_info=(
320
- f"Both '{PROMPT_CONFIG_KEY}' and '{PROMPT_TEMPLATE_CONFIG_KEY}' "
321
- f"are present in the config. '{PROMPT_CONFIG_KEY}' will be ignored "
322
- f"in favor of {PROMPT_TEMPLATE_CONFIG_KEY}."
323
- ),
324
- )
325
-
326
- # 'prompt' config key is deprecated in favor of 'prompt_template'
327
- if PROMPT_CONFIG_KEY in self.config:
328
- structlogger.warning(
329
- "enterprise_search_policy.init.deprecated_config_key",
330
- event_info=(
331
- f"The config parameter '{PROMPT_CONFIG_KEY}' is deprecated "
332
- "and will be removed in Rasa 4.0.0. "
333
- f"Please use the config parameter '{PROMPT_TEMPLATE_CONFIG_KEY}'"
334
- f"instead. "
335
- ),
336
- )
337
-
338
313
  def _check_and_warn_mutual_exclusivity_of_extractive_and_generative_search(
339
314
  self,
340
315
  ) -> None:
@@ -464,6 +439,7 @@ class EnterpriseSearchPolicy(LLMHealthCheckMixin, EmbeddingsHealthCheckMixin, Po
464
439
  embeddings=embeddings,
465
440
  index_path=path,
466
441
  create_index=True,
442
+ parse_as_faq_pairs=not self.use_llm,
467
443
  )
468
444
  else:
469
445
  structlogger.info(
@@ -917,6 +893,9 @@ class EnterpriseSearchPolicy(LLMHealthCheckMixin, EmbeddingsHealthCheckMixin, Po
917
893
  index_path=path,
918
894
  docs_folder=None,
919
895
  create_index=False,
896
+ parse_as_faq_pairs=not config.get(
897
+ USE_LLM_PROPERTY, DEFAULT_USE_LLM_PROPERTY
898
+ ),
920
899
  )
921
900
  else:
922
901
  vector_store = create_from_endpoint_config(
@@ -38,6 +38,7 @@ from rasa.shared.constants import (
38
38
  MODEL_NAME_CONFIG_KEY,
39
39
  OPENAI_PROVIDER,
40
40
  PROMPT_CONFIG_KEY,
41
+ PROMPT_TEMPLATE_CONFIG_KEY,
41
42
  PROVIDER_CONFIG_KEY,
42
43
  TEMPERATURE_CONFIG_KEY,
43
44
  TIMEOUT_CONFIG_KEY,
@@ -56,7 +57,10 @@ from rasa.shared.providers.embedding._langchain_embedding_client_adapter import
56
57
  _LangchainEmbeddingClientAdapter,
57
58
  )
58
59
  from rasa.shared.providers.llm.llm_client import LLMClient
59
- from rasa.shared.utils.constants import LOG_COMPONENT_SOURCE_METHOD_FINGERPRINT_ADDON
60
+ from rasa.shared.utils.constants import (
61
+ LOG_COMPONENT_SOURCE_METHOD_FINGERPRINT_ADDON,
62
+ LOG_COMPONENT_SOURCE_METHOD_INIT,
63
+ )
60
64
  from rasa.shared.utils.health_check.embeddings_health_check_mixin import (
61
65
  EmbeddingsHealthCheckMixin,
62
66
  )
@@ -68,6 +72,7 @@ from rasa.shared.utils.llm import (
68
72
  DEFAULT_OPENAI_EMBEDDING_MODEL_NAME,
69
73
  DEFAULT_OPENAI_MAX_GENERATED_TOKENS,
70
74
  USER,
75
+ check_prompt_config_keys_and_warn_if_deprecated,
71
76
  combine_custom_and_default_config,
72
77
  embedder_factory,
73
78
  get_prompt_template,
@@ -119,9 +124,12 @@ DEFAULT_EMBEDDINGS_CONFIG = {
119
124
  MODEL_CONFIG_KEY: DEFAULT_OPENAI_EMBEDDING_MODEL_NAME,
120
125
  }
121
126
 
122
- DEFAULT_INTENTLESS_PROMPT_TEMPLATE = importlib.resources.open_text(
127
+ DEFAULT_INTENTLESS_PROMPT_TEMPLATE_FILE_NAME = importlib.resources.open_text(
123
128
  "rasa.core.policies", "intentless_prompt_template.jinja2"
124
129
  ).name
130
+ DEFAULT_INTENTLESS_PROMPT_TEMPLATE = importlib.resources.read_text(
131
+ "rasa.core.policies", "intentless_prompt_template.jinja2"
132
+ )
125
133
 
126
134
  INTENTLESS_PROMPT_TEMPLATE_FILE_NAME = "intentless_policy_prompt.jinja2"
127
135
  INTENTLESS_CONFIG_FILE_NAME = "config.json"
@@ -345,7 +353,7 @@ class IntentlessPolicy(LLMHealthCheckMixin, EmbeddingsHealthCheckMixin, Policy):
345
353
  # ensures that the policy will not override a deterministic policy
346
354
  # which utilizes the nlu predictions confidence (e.g. Memoization).
347
355
  NLU_ABSTENTION_THRESHOLD: 0.9,
348
- PROMPT_CONFIG_KEY: DEFAULT_INTENTLESS_PROMPT_TEMPLATE,
356
+ PROMPT_TEMPLATE_CONFIG_KEY: None, # TODO: remove in Rasa 4.0.0
349
357
  }
350
358
 
351
359
  @staticmethod
@@ -402,11 +410,43 @@ class IntentlessPolicy(LLMHealthCheckMixin, EmbeddingsHealthCheckMixin, Policy):
402
410
  self.response_index = responses_docsearch
403
411
  self.conversation_samples_index = samples_docsearch
404
412
  self.embedder = self._create_plain_embedder(config)
405
- self.prompt_template = prompt_template or rasa.shared.utils.io.read_file(
406
- self.config[PROMPT_CONFIG_KEY]
413
+
414
+ # Warn if the prompt config key is used to set the prompt template
415
+ check_prompt_config_keys_and_warn_if_deprecated(config, "intentless_policy")
416
+
417
+ self.prompt_template = prompt_template or self._resolve_prompt_template(
418
+ config, LOG_COMPONENT_SOURCE_METHOD_INIT
407
419
  )
408
420
  self.trace_prompt_tokens = self.config.get("trace_prompt_tokens", False)
409
421
 
422
+ @classmethod
423
+ def _resolve_prompt_template(
424
+ cls: Any,
425
+ config: dict,
426
+ log_source_method: str,
427
+ ) -> str:
428
+ """Resolves the prompt template from the config.
429
+
430
+ Args:
431
+ config: The config to resolve the prompt template from.
432
+ log_source_method: The method from which the prompt template is resolved.
433
+
434
+ Returns:
435
+ The resolved prompt template.
436
+ """
437
+ # Prefer prompt template over prompt config key.
438
+ prompt_template_file = (
439
+ config.get(PROMPT_TEMPLATE_CONFIG_KEY)
440
+ or config.get(PROMPT_CONFIG_KEY)
441
+ or DEFAULT_INTENTLESS_PROMPT_TEMPLATE_FILE_NAME
442
+ )
443
+ return get_prompt_template(
444
+ prompt_template_file,
445
+ DEFAULT_INTENTLESS_PROMPT_TEMPLATE,
446
+ log_source_component=IntentlessPolicy.__name__,
447
+ log_source_method=log_source_method,
448
+ )
449
+
410
450
  @classmethod
411
451
  def _create_plain_embedder(cls, config: Dict[Text, Any]) -> Embeddings:
412
452
  """Creates an embedder that uses the OpenAI API.
@@ -945,11 +985,8 @@ class IntentlessPolicy(LLMHealthCheckMixin, EmbeddingsHealthCheckMixin, Policy):
945
985
  @classmethod
946
986
  def fingerprint_addon(cls, config: Dict[str, Any]) -> Optional[str]:
947
987
  """Add a fingerprint of intentless policy for the graph."""
948
- prompt_template = get_prompt_template(
949
- config.get(PROMPT_CONFIG_KEY),
950
- DEFAULT_INTENTLESS_PROMPT_TEMPLATE,
951
- log_source_component=IntentlessPolicy.__name__,
952
- log_source_method=LOG_COMPONENT_SOURCE_METHOD_FINGERPRINT_ADDON,
988
+ prompt_template = cls._resolve_prompt_template(
989
+ config, LOG_COMPONENT_SOURCE_METHOD_FINGERPRINT_ADDON
953
990
  )
954
991
 
955
992
  llm_config = resolve_model_client_config(
@@ -15,9 +15,7 @@ from rasa.dialogue_understanding.coexistence.constants import (
15
15
  )
16
16
  from rasa.dialogue_understanding.commands import Command, SetSlotCommand
17
17
  from rasa.dialogue_understanding.commands.noop_command import NoopCommand
18
- from rasa.dialogue_understanding.generator.constants import (
19
- LLM_CONFIG_KEY,
20
- )
18
+ from rasa.dialogue_understanding.generator.constants import LLM_CONFIG_KEY
21
19
  from rasa.engine.graph import ExecutionContext, GraphComponent
22
20
  from rasa.engine.recipes.default_recipe import DefaultV1Recipe
23
21
  from rasa.engine.storage.resource import Resource
@@ -28,6 +26,7 @@ from rasa.shared.constants import (
28
26
  MODEL_CONFIG_KEY,
29
27
  OPENAI_PROVIDER,
30
28
  PROMPT_CONFIG_KEY,
29
+ PROMPT_TEMPLATE_CONFIG_KEY,
31
30
  PROVIDER_CONFIG_KEY,
32
31
  ROUTE_TO_CALM_SLOT,
33
32
  TEMPERATURE_CONFIG_KEY,
@@ -46,6 +45,7 @@ from rasa.shared.utils.health_check.llm_health_check_mixin import LLMHealthCheck
46
45
  from rasa.shared.utils.io import deep_container_fingerprint
47
46
  from rasa.shared.utils.llm import (
48
47
  DEFAULT_OPENAI_CHAT_MODEL_NAME,
48
+ check_prompt_config_keys_and_warn_if_deprecated,
49
49
  get_prompt_template,
50
50
  llm_factory,
51
51
  resolve_model_client_config,
@@ -90,7 +90,7 @@ class LLMBasedRouter(LLMHealthCheckMixin, GraphComponent):
90
90
  def get_default_config() -> Dict[str, Any]:
91
91
  """The component's default config (see parent class for full docstring)."""
92
92
  return {
93
- PROMPT_CONFIG_KEY: None,
93
+ PROMPT_TEMPLATE_CONFIG_KEY: None, # TODO: remove in Rasa 4.0.0
94
94
  CALM_ENTRY: {STICKY: None},
95
95
  NLU_ENTRY: {
96
96
  NON_STICKY: "handles chitchat",
@@ -111,10 +111,13 @@ class LLMBasedRouter(LLMHealthCheckMixin, GraphComponent):
111
111
  self.config.get(LLM_CONFIG_KEY), LLMBasedRouter.__name__
112
112
  )
113
113
 
114
+ # Warn if the prompt config key is used to set the prompt template
115
+ check_prompt_config_keys_and_warn_if_deprecated(config, "llm_based_router")
116
+
114
117
  self.prompt_template = (
115
118
  prompt_template
116
119
  or get_prompt_template(
117
- config.get(PROMPT_CONFIG_KEY),
120
+ config.get(PROMPT_TEMPLATE_CONFIG_KEY) or config.get(PROMPT_CONFIG_KEY),
118
121
  DEFAULT_COMMAND_PROMPT_TEMPLATE,
119
122
  log_source_component=LLMBasedRouter.__name__,
120
123
  log_source_method=LOG_COMPONENT_SOURCE_METHOD_INIT,
@@ -327,7 +330,7 @@ class LLMBasedRouter(LLMHealthCheckMixin, GraphComponent):
327
330
  def fingerprint_addon(cls, config: Dict[str, Any]) -> Optional[str]:
328
331
  """Add a fingerprint of llm based router for the graph."""
329
332
  prompt_template = get_prompt_template(
330
- config.get(PROMPT_CONFIG_KEY),
333
+ config.get(PROMPT_TEMPLATE_CONFIG_KEY) or config.get(PROMPT_CONFIG_KEY),
331
334
  DEFAULT_COMMAND_PROMPT_TEMPLATE,
332
335
  log_source_component=LLMBasedRouter.__name__,
333
336
  log_source_method=LOG_COMPONENT_SOURCE_METHOD_FINGERPRINT_ADDON,
@@ -95,7 +95,9 @@ class CancelFlowCommand(Command):
95
95
  original_stack = original_tracker.stack
96
96
 
97
97
  applied_events: List[Event] = []
98
- user_frame = top_user_flow_frame(original_stack)
98
+ user_frame = top_user_flow_frame(
99
+ original_stack, ignore_call_and_link_frames=False
100
+ )
99
101
  current_flow = user_frame.flow(all_flows) if user_frame else None
100
102
 
101
103
  if not current_flow:
@@ -274,9 +274,7 @@ class CorrectSlotsCommand(Command):
274
274
  # we shouldn't end up here as a correction shouldn't be triggered
275
275
  # if we are not in any flow. but just in case we do, we
276
276
  # just skip the command.
277
- structlogger.warning(
278
- "command_executor.correct_slots.no_active_flow", command=self
279
- )
277
+ structlogger.warning("command_executor.correct_slots.no_active_flow")
280
278
  return []
281
279
 
282
280
  structlogger.debug("command_executor.correct_slots", command=self)
@@ -190,9 +190,9 @@ class NLUCommandAdapter(GraphComponent, CommandGenerator):
190
190
  if len(commands) > 1:
191
191
  structlogger.warning(
192
192
  "nlu_command_adapter.predict_commands",
193
- messag=f"Two many flows found that are triggered by the "
193
+ message=f"Too many flows found that are triggered by the "
194
194
  f"intent '{message.get(INTENT)['name']}'. Take the first one.",
195
- commands=commands,
195
+ commands=[command.__class__.__name__ for command in commands],
196
196
  )
197
197
  commands = [commands[0]]
198
198
 
@@ -113,7 +113,7 @@ class SingleStepBasedLLMCommandGenerator(LLMBasedCommandGenerator, ABC):
113
113
  def get_default_config() -> Dict[str, Any]:
114
114
  """The component's default config (see parent class for full docstring)."""
115
115
  return {
116
- PROMPT_TEMPLATE_CONFIG_KEY: None,
116
+ PROMPT_TEMPLATE_CONFIG_KEY: None, # TODO: remove in Rasa 4.0.0
117
117
  USER_INPUT_CONFIG_KEY: None,
118
118
  LLM_CONFIG_KEY: None,
119
119
  FLOW_RETRIEVAL_KEY: FlowRetrieval.get_default_config(),
@@ -212,7 +212,7 @@ class SingleStepBasedLLMCommandGenerator(LLMBasedCommandGenerator, ABC):
212
212
  event_info=(
213
213
  "ProviderClientAPIException occurred while predicting commands."
214
214
  ),
215
- commands=commands,
215
+ commands=commands, # no PII
216
216
  )
217
217
 
218
218
  if not commands and not prior_commands:
@@ -16,7 +16,10 @@ from rasa.shared.constants import (
16
16
  PROMPT_CONFIG_KEY,
17
17
  PROMPT_TEMPLATE_CONFIG_KEY,
18
18
  )
19
- from rasa.shared.utils.llm import get_prompt_template
19
+ from rasa.shared.utils.llm import (
20
+ check_prompt_config_keys_and_warn_if_deprecated,
21
+ get_prompt_template,
22
+ )
20
23
 
21
24
  DEFAULT_COMMAND_PROMPT_TEMPLATE = importlib.resources.read_text(
22
25
  "rasa.dialogue_understanding.generator.prompt_templates",
@@ -52,16 +55,10 @@ class SingleStepLLMCommandGenerator(SingleStepBasedLLMCommandGenerator):
52
55
  **kwargs,
53
56
  )
54
57
 
55
- # Set the prompt template
56
- if config.get(PROMPT_CONFIG_KEY):
57
- structlogger.warning(
58
- "single_step_llm_command_generator.init",
59
- event_info=(
60
- "The config parameter 'prompt' is deprecated "
61
- "and will be removed in Rasa 4.0.0. "
62
- "Please use the config parameter 'prompt_template' instead. "
63
- ),
64
- )
58
+ # Warn if the prompt config key is used to set the prompt template
59
+ check_prompt_config_keys_and_warn_if_deprecated(
60
+ config, "single_step_llm_command_generator"
61
+ )
65
62
 
66
63
  @staticmethod
67
64
  def get_component_command_syntax_version() -> CommandSyntaxVersion:
@@ -90,7 +90,7 @@ class ActionCancelFlow(action.Action):
90
90
  return []
91
91
 
92
92
  if not isinstance(top, CancelPatternFlowStackFrame):
93
- structlogger.warning("action.cancel_flow.no_cancel_frame", top=top)
93
+ structlogger.warning("action.cancel_flow.no_cancel_frame")
94
94
  return []
95
95
 
96
96
  for canceled_frame_id in top.canceled_frames:
@@ -105,7 +105,6 @@ class ActionCancelFlow(action.Action):
105
105
  else:
106
106
  structlogger.warning(
107
107
  "action.cancel_flow.frame_not_found",
108
- dialogue_stack=stack,
109
108
  frame_id=canceled_frame_id,
110
109
  )
111
110
 
@@ -89,7 +89,7 @@ class ActionClarifyFlows(action.Action):
89
89
  return []
90
90
 
91
91
  if not isinstance(top, ClarifyPatternFlowStackFrame):
92
- structlogger.warning("action.clarify_flows.no_clarification_frame", top=top)
92
+ structlogger.warning("action.clarify_flows.no_clarification_frame")
93
93
  return []
94
94
 
95
95
  options_string = self.assemble_options_string(top.names)
@@ -114,7 +114,8 @@ class ActionCorrectFlowSlot(action.Action):
114
114
 
115
115
  if not isinstance(top, CorrectionPatternFlowStackFrame):
116
116
  structlogger.warning(
117
- "action.correct_flow_slot.no_correction_frame", top=top
117
+ "action.correct_flow_slot.no_correction_frame",
118
+ top=top, # no PII
118
119
  )
119
120
  return []
120
121
 
@@ -246,7 +247,6 @@ def reset_stack_on_tracker_to_prior_state(
246
247
  "action.correct_flow_slot.no_target_frame_found",
247
248
  reset_step_id=reset_step_id,
248
249
  reset_flow_id=reset_flow_id,
249
- stack_to_reset_to=stack_to_reset_to,
250
250
  )
251
251
  return tracker.stack
252
252
 
@@ -348,7 +348,6 @@ def get_current_collect_step(
348
348
  # but no flow that triggered it. this should never happen.
349
349
  structlogger.warning(
350
350
  "command_processor.get_current_collect_step.no_flow_on_stack",
351
- stack=dialogue_stack,
352
351
  )
353
352
  return None
354
353
 
@@ -358,7 +357,7 @@ def get_current_collect_step(
358
357
  # step from it
359
358
  structlogger.warning(
360
359
  "command_processor.get_current_collect_step.no_step_for_frame",
361
- frame=frame_that_triggered_collect_infos,
360
+ frame=frame_that_triggered_collect_infos.frame_id,
362
361
  )
363
362
  return None
364
363
 
@@ -724,7 +723,7 @@ def clean_up_chitchat_command(
724
723
  )
725
724
  structlogger.warn(
726
725
  "command_processor.clean_up_chitchat_command.pattern_chitchat_not_found",
727
- command=resulting_commands[0],
726
+ command=resulting_commands[0], # no PII
728
727
  )
729
728
  return resulting_commands
730
729
 
@@ -742,7 +741,7 @@ def clean_up_chitchat_command(
742
741
  )
743
742
  structlogger.warn(
744
743
  "command_processor.clean_up_chitchat_command.replace_chitchat_answer_with_cannot_handle",
745
- command=resulting_commands[0],
744
+ command=resulting_commands[0], # no PII
746
745
  pattern_chitchat_uses_action_trigger_chitchat=has_action_trigger_chitchat,
747
746
  defined_intentless_policy_in_config=defines_intentless_policy,
748
747
  )
@@ -209,7 +209,9 @@ def get_collect_steps_excluding_ask_before_filling_for_active_flow(
209
209
  All collect steps that are part of the current active flow,
210
210
  excluding the collect steps that have to be asked before filling.
211
211
  """
212
- active_frame = top_user_flow_frame(dialogue_stack)
212
+ active_frame = top_user_flow_frame(
213
+ dialogue_stack, ignore_call_and_link_frames=False
214
+ )
213
215
  if active_frame is None:
214
216
  return set()
215
217
  active_flow = active_frame.flow(all_flows)
rasa/engine/graph.py CHANGED
@@ -500,9 +500,9 @@ class GraphNode:
500
500
  structlogger.warning(
501
501
  "graph.node.input_not_resolved",
502
502
  node_name=self._node_name,
503
- input_name=i,
503
+ input_name=i, # no PII
504
504
  event_info=(
505
- "Node input was not resolved, there is no putput. "
505
+ "Node input was not resolved, there is no output. "
506
506
  "Another component should have provided this as an output."
507
507
  ),
508
508
  )
@@ -22,11 +22,7 @@ from rasa.shared.constants import (
22
22
  from rasa.shared.exceptions import ProviderClientAPIException
23
23
  from rasa.shared.providers.mappings import OPENAI_PROVIDER
24
24
  from rasa.shared.utils.constants import LOG_COMPONENT_SOURCE_METHOD_INIT
25
- from rasa.shared.utils.llm import (
26
- USER,
27
- get_prompt_template,
28
- llm_factory,
29
- )
25
+ from rasa.shared.utils.llm import USER, get_prompt_template, llm_factory
30
26
 
31
27
  SEPARATOR = "\n\n"
32
28
  BACKUP_SEPARATOR = "\nUSER:"
rasa/shared/constants.py CHANGED
@@ -295,6 +295,7 @@ CONTEXT = "context"
295
295
 
296
296
  RASA_PATTERN_INTERNAL_ERROR = "pattern_internal_error"
297
297
  RASA_PATTERN_HUMAN_HANDOFF = "pattern_human_handoff"
298
+ RASA_PATTERN_CHITCHAT = "pattern_chitchat"
298
299
 
299
300
  RASA_INTERNAL_ERROR_PREFIX = "rasa_internal_error_"
300
301
  RASA_PATTERN_INTERNAL_ERROR_DEFAULT = RASA_INTERNAL_ERROR_PREFIX + "default"
@@ -345,3 +346,13 @@ ROLE_SYSTEM = "system"
345
346
  # Used for key values in ValidateSlotPatternFlowStackFrame
346
347
  REFILL_UTTER = "refill_utter"
347
348
  REJECTIONS = "rejections"
349
+
350
+ # Constants for extractive search FAQ parsing (QA pairs from input documents)
351
+ FAQ_DOCUMENT_METADATA_TITLE = "title"
352
+ FAQ_DOCUMENT_METADATA_ANSWER = "answer"
353
+ FAQ_DOCUMENT_METADATA_TYPE = "type"
354
+ DOCUMENT_TYPE_FAQ = "faq"
355
+ FAQ_INPUT_DATA_QUESTION_LINE_PREFIX = "Q:"
356
+ FAQ_INPUT_DATA_ANSWER_LINE_PREFIX = "A:"
357
+ FAQ_DOCUMENT_ENTRY_SEPARATOR = "\n\n"
358
+ FAQ_DOCUMENT_LINE_SEPARATOR = "\n"