rasa-pro 3.9.17__py3-none-any.whl → 3.10.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of rasa-pro might be problematic. Click here for more details.

Files changed (187) hide show
  1. README.md +5 -37
  2. rasa/__init__.py +1 -2
  3. rasa/__main__.py +5 -0
  4. rasa/anonymization/anonymization_rule_executor.py +2 -2
  5. rasa/api.py +26 -22
  6. rasa/cli/arguments/data.py +27 -2
  7. rasa/cli/arguments/default_arguments.py +25 -3
  8. rasa/cli/arguments/run.py +9 -9
  9. rasa/cli/arguments/train.py +2 -0
  10. rasa/cli/data.py +70 -8
  11. rasa/cli/e2e_test.py +108 -433
  12. rasa/cli/interactive.py +1 -0
  13. rasa/cli/llm_fine_tuning.py +395 -0
  14. rasa/cli/project_templates/calm/endpoints.yml +1 -1
  15. rasa/cli/project_templates/tutorial/endpoints.yml +1 -1
  16. rasa/cli/run.py +14 -13
  17. rasa/cli/scaffold.py +10 -8
  18. rasa/cli/train.py +8 -7
  19. rasa/cli/utils.py +15 -0
  20. rasa/constants.py +7 -1
  21. rasa/core/actions/action.py +98 -49
  22. rasa/core/actions/action_run_slot_rejections.py +4 -1
  23. rasa/core/actions/custom_action_executor.py +9 -6
  24. rasa/core/actions/direct_custom_actions_executor.py +80 -0
  25. rasa/core/actions/e2e_stub_custom_action_executor.py +68 -0
  26. rasa/core/actions/grpc_custom_action_executor.py +2 -2
  27. rasa/core/actions/http_custom_action_executor.py +6 -5
  28. rasa/core/agent.py +21 -17
  29. rasa/core/channels/__init__.py +2 -0
  30. rasa/core/channels/audiocodes.py +1 -16
  31. rasa/core/channels/voice_aware/__init__.py +0 -0
  32. rasa/core/channels/voice_aware/jambonz.py +103 -0
  33. rasa/core/channels/voice_aware/jambonz_protocol.py +344 -0
  34. rasa/core/channels/voice_aware/utils.py +20 -0
  35. rasa/core/channels/voice_native/__init__.py +0 -0
  36. rasa/core/constants.py +6 -1
  37. rasa/core/featurizers/single_state_featurizer.py +1 -22
  38. rasa/core/featurizers/tracker_featurizers.py +18 -115
  39. rasa/core/information_retrieval/faiss.py +7 -4
  40. rasa/core/information_retrieval/information_retrieval.py +8 -0
  41. rasa/core/information_retrieval/milvus.py +9 -2
  42. rasa/core/information_retrieval/qdrant.py +1 -1
  43. rasa/core/nlg/contextual_response_rephraser.py +32 -10
  44. rasa/core/nlg/summarize.py +4 -3
  45. rasa/core/policies/enterprise_search_policy.py +100 -44
  46. rasa/core/policies/flows/flow_executor.py +155 -98
  47. rasa/core/policies/intentless_policy.py +52 -28
  48. rasa/core/policies/ted_policy.py +33 -58
  49. rasa/core/policies/unexpected_intent_policy.py +7 -15
  50. rasa/core/processor.py +15 -46
  51. rasa/core/run.py +5 -4
  52. rasa/core/tracker_store.py +8 -4
  53. rasa/core/utils.py +45 -56
  54. rasa/dialogue_understanding/coexistence/llm_based_router.py +45 -12
  55. rasa/dialogue_understanding/commands/__init__.py +4 -0
  56. rasa/dialogue_understanding/commands/change_flow_command.py +0 -6
  57. rasa/dialogue_understanding/commands/session_start_command.py +59 -0
  58. rasa/dialogue_understanding/commands/set_slot_command.py +1 -5
  59. rasa/dialogue_understanding/commands/utils.py +38 -0
  60. rasa/dialogue_understanding/generator/constants.py +10 -3
  61. rasa/dialogue_understanding/generator/flow_retrieval.py +14 -5
  62. rasa/dialogue_understanding/generator/llm_based_command_generator.py +12 -2
  63. rasa/dialogue_understanding/generator/multi_step/multi_step_llm_command_generator.py +106 -87
  64. rasa/dialogue_understanding/generator/nlu_command_adapter.py +28 -6
  65. rasa/dialogue_understanding/generator/single_step/single_step_llm_command_generator.py +90 -37
  66. rasa/dialogue_understanding/patterns/default_flows_for_patterns.yml +15 -15
  67. rasa/dialogue_understanding/patterns/session_start.py +37 -0
  68. rasa/dialogue_understanding/processor/command_processor.py +13 -14
  69. rasa/e2e_test/aggregate_test_stats_calculator.py +124 -0
  70. rasa/e2e_test/assertions.py +1181 -0
  71. rasa/e2e_test/assertions_schema.yml +106 -0
  72. rasa/e2e_test/constants.py +20 -0
  73. rasa/e2e_test/e2e_config.py +220 -0
  74. rasa/e2e_test/e2e_config_schema.yml +26 -0
  75. rasa/e2e_test/e2e_test_case.py +131 -8
  76. rasa/e2e_test/e2e_test_converter.py +363 -0
  77. rasa/e2e_test/e2e_test_converter_prompt.jinja2 +70 -0
  78. rasa/e2e_test/e2e_test_coverage_report.py +364 -0
  79. rasa/e2e_test/e2e_test_result.py +26 -6
  80. rasa/e2e_test/e2e_test_runner.py +498 -73
  81. rasa/e2e_test/e2e_test_schema.yml +96 -0
  82. rasa/e2e_test/pykwalify_extensions.py +39 -0
  83. rasa/e2e_test/stub_custom_action.py +70 -0
  84. rasa/e2e_test/utils/__init__.py +0 -0
  85. rasa/e2e_test/utils/e2e_yaml_utils.py +55 -0
  86. rasa/e2e_test/utils/io.py +596 -0
  87. rasa/e2e_test/utils/validation.py +80 -0
  88. rasa/engine/recipes/default_components.py +0 -2
  89. rasa/engine/storage/local_model_storage.py +0 -1
  90. rasa/env.py +9 -0
  91. rasa/llm_fine_tuning/__init__.py +0 -0
  92. rasa/llm_fine_tuning/annotation_module.py +241 -0
  93. rasa/llm_fine_tuning/conversations.py +144 -0
  94. rasa/llm_fine_tuning/llm_data_preparation_module.py +178 -0
  95. rasa/llm_fine_tuning/notebooks/unsloth_finetuning.ipynb +407 -0
  96. rasa/llm_fine_tuning/paraphrasing/__init__.py +0 -0
  97. rasa/llm_fine_tuning/paraphrasing/conversation_rephraser.py +281 -0
  98. rasa/llm_fine_tuning/paraphrasing/default_rephrase_prompt_template.jina2 +44 -0
  99. rasa/llm_fine_tuning/paraphrasing/rephrase_validator.py +121 -0
  100. rasa/llm_fine_tuning/paraphrasing/rephrased_user_message.py +10 -0
  101. rasa/llm_fine_tuning/paraphrasing_module.py +128 -0
  102. rasa/llm_fine_tuning/storage.py +174 -0
  103. rasa/llm_fine_tuning/train_test_split_module.py +441 -0
  104. rasa/model_training.py +48 -16
  105. rasa/nlu/classifiers/diet_classifier.py +25 -38
  106. rasa/nlu/classifiers/logistic_regression_classifier.py +9 -44
  107. rasa/nlu/classifiers/sklearn_intent_classifier.py +16 -37
  108. rasa/nlu/extractors/crf_entity_extractor.py +50 -93
  109. rasa/nlu/featurizers/sparse_featurizer/count_vectors_featurizer.py +45 -78
  110. rasa/nlu/featurizers/sparse_featurizer/lexical_syntactic_featurizer.py +17 -52
  111. rasa/nlu/featurizers/sparse_featurizer/regex_featurizer.py +3 -5
  112. rasa/nlu/persistor.py +129 -32
  113. rasa/server.py +45 -10
  114. rasa/shared/constants.py +63 -15
  115. rasa/shared/core/domain.py +15 -12
  116. rasa/shared/core/events.py +28 -2
  117. rasa/shared/core/flows/flow.py +208 -13
  118. rasa/shared/core/flows/flow_path.py +84 -0
  119. rasa/shared/core/flows/flows_list.py +28 -10
  120. rasa/shared/core/flows/flows_yaml_schema.json +269 -193
  121. rasa/shared/core/flows/validation.py +112 -25
  122. rasa/shared/core/flows/yaml_flows_io.py +149 -10
  123. rasa/shared/core/trackers.py +6 -0
  124. rasa/shared/core/training_data/visualization.html +2 -2
  125. rasa/shared/exceptions.py +4 -0
  126. rasa/shared/importers/importer.py +60 -11
  127. rasa/shared/importers/remote_importer.py +196 -0
  128. rasa/shared/nlu/constants.py +2 -0
  129. rasa/shared/nlu/training_data/features.py +2 -120
  130. rasa/shared/providers/_configs/__init__.py +0 -0
  131. rasa/shared/providers/_configs/azure_openai_client_config.py +181 -0
  132. rasa/shared/providers/_configs/client_config.py +57 -0
  133. rasa/shared/providers/_configs/default_litellm_client_config.py +130 -0
  134. rasa/shared/providers/_configs/huggingface_local_embedding_client_config.py +234 -0
  135. rasa/shared/providers/_configs/openai_client_config.py +175 -0
  136. rasa/shared/providers/_configs/self_hosted_llm_client_config.py +171 -0
  137. rasa/shared/providers/_configs/utils.py +101 -0
  138. rasa/shared/providers/_ssl_verification_utils.py +124 -0
  139. rasa/shared/providers/embedding/__init__.py +0 -0
  140. rasa/shared/providers/embedding/_base_litellm_embedding_client.py +254 -0
  141. rasa/shared/providers/embedding/_langchain_embedding_client_adapter.py +74 -0
  142. rasa/shared/providers/embedding/azure_openai_embedding_client.py +277 -0
  143. rasa/shared/providers/embedding/default_litellm_embedding_client.py +102 -0
  144. rasa/shared/providers/embedding/embedding_client.py +90 -0
  145. rasa/shared/providers/embedding/embedding_response.py +41 -0
  146. rasa/shared/providers/embedding/huggingface_local_embedding_client.py +191 -0
  147. rasa/shared/providers/embedding/openai_embedding_client.py +172 -0
  148. rasa/shared/providers/llm/__init__.py +0 -0
  149. rasa/shared/providers/llm/_base_litellm_client.py +227 -0
  150. rasa/shared/providers/llm/azure_openai_llm_client.py +338 -0
  151. rasa/shared/providers/llm/default_litellm_llm_client.py +84 -0
  152. rasa/shared/providers/llm/llm_client.py +76 -0
  153. rasa/shared/providers/llm/llm_response.py +50 -0
  154. rasa/shared/providers/llm/openai_llm_client.py +155 -0
  155. rasa/shared/providers/llm/self_hosted_llm_client.py +169 -0
  156. rasa/shared/providers/mappings.py +75 -0
  157. rasa/shared/utils/cli.py +30 -0
  158. rasa/shared/utils/io.py +65 -3
  159. rasa/shared/utils/llm.py +223 -200
  160. rasa/shared/utils/yaml.py +122 -7
  161. rasa/studio/download.py +19 -13
  162. rasa/studio/train.py +2 -3
  163. rasa/studio/upload.py +2 -3
  164. rasa/telemetry.py +113 -58
  165. rasa/tracing/config.py +2 -3
  166. rasa/tracing/instrumentation/attribute_extractors.py +29 -17
  167. rasa/tracing/instrumentation/instrumentation.py +4 -47
  168. rasa/utils/common.py +18 -19
  169. rasa/utils/endpoints.py +7 -4
  170. rasa/utils/io.py +66 -0
  171. rasa/utils/json_utils.py +60 -0
  172. rasa/utils/licensing.py +9 -1
  173. rasa/utils/ml_utils.py +4 -2
  174. rasa/utils/tensorflow/model_data.py +193 -2
  175. rasa/validator.py +195 -1
  176. rasa/version.py +1 -1
  177. {rasa_pro-3.9.17.dist-info → rasa_pro-3.10.3.dist-info}/METADATA +25 -51
  178. {rasa_pro-3.9.17.dist-info → rasa_pro-3.10.3.dist-info}/RECORD +183 -119
  179. rasa/nlu/classifiers/llm_intent_classifier.py +0 -519
  180. rasa/shared/providers/openai/clients.py +0 -43
  181. rasa/shared/providers/openai/session_handler.py +0 -110
  182. rasa/utils/tensorflow/feature_array.py +0 -366
  183. /rasa/{shared/providers/openai → cli/project_templates/tutorial/actions}/__init__.py +0 -0
  184. /rasa/cli/project_templates/tutorial/{actions.py → actions/actions.py} +0 -0
  185. {rasa_pro-3.9.17.dist-info → rasa_pro-3.10.3.dist-info}/NOTICE +0 -0
  186. {rasa_pro-3.9.17.dist-info → rasa_pro-3.10.3.dist-info}/WHEEL +0 -0
  187. {rasa_pro-3.9.17.dist-info → rasa_pro-3.10.3.dist-info}/entry_points.txt +0 -0
@@ -1,9 +1,11 @@
1
1
  from __future__ import annotations
2
-
3
- import logging
4
- from abc import abstractmethod
5
- from collections import defaultdict
6
2
  from pathlib import Path
3
+ from collections import defaultdict
4
+ from abc import abstractmethod
5
+ import jsonpickle
6
+ import logging
7
+
8
+ from tqdm import tqdm
7
9
  from typing import (
8
10
  Tuple,
9
11
  List,
@@ -16,30 +18,25 @@ from typing import (
16
18
  Set,
17
19
  DefaultDict,
18
20
  cast,
19
- Type,
20
- Callable,
21
- ClassVar,
22
21
  )
23
-
24
22
  import numpy as np
25
- from tqdm import tqdm
26
23
 
24
+ from rasa.core.featurizers.single_state_featurizer import SingleStateFeaturizer
25
+ from rasa.core.featurizers.precomputation import MessageContainerForCoreFeaturization
26
+ from rasa.core.exceptions import InvalidTrackerFeaturizerUsageError
27
27
  import rasa.shared.core.trackers
28
28
  import rasa.shared.utils.io
29
- from rasa.core.exceptions import InvalidTrackerFeaturizerUsageError
30
- from rasa.core.featurizers.precomputation import MessageContainerForCoreFeaturization
31
- from rasa.core.featurizers.single_state_featurizer import SingleStateFeaturizer
29
+ from rasa.shared.nlu.constants import TEXT, INTENT, ENTITIES, ACTION_NAME
30
+ from rasa.shared.nlu.training_data.features import Features
31
+ from rasa.shared.core.trackers import DialogueStateTracker
32
+ from rasa.shared.core.domain import State, Domain
33
+ from rasa.shared.core.events import Event, ActionExecuted, UserUttered
32
34
  from rasa.shared.core.constants import (
33
35
  USER,
34
36
  ACTION_UNLIKELY_INTENT_NAME,
35
37
  PREVIOUS_ACTION,
36
38
  )
37
- from rasa.shared.core.domain import State, Domain
38
- from rasa.shared.core.events import Event, ActionExecuted, UserUttered
39
- from rasa.shared.core.trackers import DialogueStateTracker
40
39
  from rasa.shared.exceptions import RasaException
41
- from rasa.shared.nlu.constants import TEXT, INTENT, ENTITIES, ACTION_NAME
42
- from rasa.shared.nlu.training_data.features import Features
43
40
  from rasa.utils.tensorflow.constants import LABEL_PAD_ID
44
41
  from rasa.utils.tensorflow.model_data import ragged_array_to_ndarray
45
42
 
@@ -67,10 +64,6 @@ class InvalidStory(RasaException):
67
64
  class TrackerFeaturizer:
68
65
  """Base class for actual tracker featurizers."""
69
66
 
70
- # Class registry to store all subclasses
71
- _registry: ClassVar[Dict[str, Type["TrackerFeaturizer"]]] = {}
72
- _featurizer_type: str = "TrackerFeaturizer"
73
-
74
67
  def __init__(
75
68
  self, state_featurizer: Optional[SingleStateFeaturizer] = None
76
69
  ) -> None:
@@ -81,36 +74,6 @@ class TrackerFeaturizer:
81
74
  """
82
75
  self.state_featurizer = state_featurizer
83
76
 
84
- @classmethod
85
- def register(cls, featurizer_type: str) -> Callable:
86
- """Decorator to register featurizer subclasses."""
87
-
88
- def wrapper(subclass: Type["TrackerFeaturizer"]) -> Type["TrackerFeaturizer"]:
89
- cls._registry[featurizer_type] = subclass
90
- # Store the type identifier in the class for serialization
91
- subclass._featurizer_type = featurizer_type
92
- return subclass
93
-
94
- return wrapper
95
-
96
- @classmethod
97
- def from_dict(cls, data: Dict[str, Any]) -> "TrackerFeaturizer":
98
- """Create featurizer instance from dictionary."""
99
- featurizer_type = data.pop("type")
100
-
101
- if featurizer_type not in cls._registry:
102
- raise ValueError(f"Unknown featurizer type: {featurizer_type}")
103
-
104
- # Get the correct subclass and instantiate it
105
- subclass = cls._registry[featurizer_type]
106
- return subclass.create_from_dict(data)
107
-
108
- @classmethod
109
- @abstractmethod
110
- def create_from_dict(cls, data: Dict[str, Any]) -> "TrackerFeaturizer":
111
- """Each subclass must implement its own creation from dict method."""
112
- pass
113
-
114
77
  @staticmethod
115
78
  def _create_states(
116
79
  tracker: DialogueStateTracker,
@@ -502,7 +465,9 @@ class TrackerFeaturizer:
502
465
  self.state_featurizer.entity_tag_specs = []
503
466
 
504
467
  # noinspection PyTypeChecker
505
- rasa.shared.utils.io.dump_obj_as_json_to_file(featurizer_file, self.to_dict())
468
+ rasa.shared.utils.io.write_text_file(
469
+ str(jsonpickle.encode(self)), featurizer_file
470
+ )
506
471
 
507
472
  @staticmethod
508
473
  def load(path: Union[Text, Path]) -> Optional[TrackerFeaturizer]:
@@ -516,17 +481,7 @@ class TrackerFeaturizer:
516
481
  """
517
482
  featurizer_file = Path(path) / FEATURIZER_FILE
518
483
  if featurizer_file.is_file():
519
- data = rasa.shared.utils.io.read_json_file(featurizer_file)
520
-
521
- if "type" not in data:
522
- logger.error(
523
- f"Couldn't load featurizer for policy. "
524
- f"File '{featurizer_file}' does not contain all "
525
- f"necessary information. 'type' is missing."
526
- )
527
- return None
528
-
529
- return TrackerFeaturizer.from_dict(data)
484
+ return jsonpickle.decode(rasa.shared.utils.io.read_file(featurizer_file))
530
485
 
531
486
  logger.error(
532
487
  f"Couldn't load featurizer for policy. "
@@ -553,16 +508,7 @@ class TrackerFeaturizer:
553
508
  )
554
509
  ]
555
510
 
556
- def to_dict(self) -> Dict[str, Any]:
557
- return {
558
- "type": self.__class__._featurizer_type,
559
- "state_featurizer": (
560
- self.state_featurizer.to_dict() if self.state_featurizer else None
561
- ),
562
- }
563
-
564
511
 
565
- @TrackerFeaturizer.register("FullDialogueTrackerFeaturizer")
566
512
  class FullDialogueTrackerFeaturizer(TrackerFeaturizer):
567
513
  """Creates full dialogue training data for time distributed architectures.
568
514
 
@@ -700,20 +646,7 @@ class FullDialogueTrackerFeaturizer(TrackerFeaturizer):
700
646
 
701
647
  return trackers_as_states
702
648
 
703
- def to_dict(self) -> Dict[str, Any]:
704
- return super().to_dict()
705
649
 
706
- @classmethod
707
- def create_from_dict(cls, data: Dict[str, Any]) -> "FullDialogueTrackerFeaturizer":
708
- state_featurizer = SingleStateFeaturizer.create_from_dict(
709
- data["state_featurizer"]
710
- )
711
- return cls(
712
- state_featurizer,
713
- )
714
-
715
-
716
- @TrackerFeaturizer.register("MaxHistoryTrackerFeaturizer")
717
650
  class MaxHistoryTrackerFeaturizer(TrackerFeaturizer):
718
651
  """Truncates the tracker history into `max_history` long sequences.
719
652
 
@@ -951,25 +884,7 @@ class MaxHistoryTrackerFeaturizer(TrackerFeaturizer):
951
884
 
952
885
  return trackers_as_states
953
886
 
954
- def to_dict(self) -> Dict[str, Any]:
955
- data = super().to_dict()
956
- data.update(
957
- {
958
- "remove_duplicates": self.remove_duplicates,
959
- "max_history": self.max_history,
960
- }
961
- )
962
- return data
963
-
964
- @classmethod
965
- def create_from_dict(cls, data: Dict[str, Any]) -> "MaxHistoryTrackerFeaturizer":
966
- state_featurizer = SingleStateFeaturizer.create_from_dict(
967
- data["state_featurizer"]
968
- )
969
- return cls(state_featurizer, data["max_history"], data["remove_duplicates"])
970
887
 
971
-
972
- @TrackerFeaturizer.register("IntentMaxHistoryTrackerFeaturizer")
973
888
  class IntentMaxHistoryTrackerFeaturizer(MaxHistoryTrackerFeaturizer):
974
889
  """Truncates the tracker history into `max_history` long sequences.
975
890
 
@@ -1244,18 +1159,6 @@ class IntentMaxHistoryTrackerFeaturizer(MaxHistoryTrackerFeaturizer):
1244
1159
 
1245
1160
  return trackers_as_states
1246
1161
 
1247
- def to_dict(self) -> Dict[str, Any]:
1248
- return super().to_dict()
1249
-
1250
- @classmethod
1251
- def create_from_dict(
1252
- cls, data: Dict[str, Any]
1253
- ) -> "IntentMaxHistoryTrackerFeaturizer":
1254
- state_featurizer = SingleStateFeaturizer.create_from_dict(
1255
- data["state_featurizer"]
1256
- )
1257
- return cls(state_featurizer, data["max_history"], data["remove_duplicates"])
1258
-
1259
1162
 
1260
1163
  def _is_prev_action_unlikely_intent_in_state(state: State) -> bool:
1261
1164
  prev_action_name = state.get(PREVIOUS_ACTION, {}).get(ACTION_NAME)
@@ -2,9 +2,10 @@ from pathlib import Path
2
2
  from typing import TYPE_CHECKING, List, Optional, Text, Any, Dict
3
3
 
4
4
  import structlog
5
- from langchain.document_loaders import DirectoryLoader, TextLoader
6
5
  from langchain.text_splitter import RecursiveCharacterTextSplitter
7
- from langchain.vectorstores.faiss import FAISS
6
+ from langchain_community.document_loaders.text import TextLoader
7
+ from langchain_community.document_loaders.directory import DirectoryLoader
8
+ from langchain_community.vectorstores.faiss import FAISS
8
9
  from rasa.utils.endpoints import EndpointConfig
9
10
 
10
11
  from rasa.core.information_retrieval import (
@@ -46,7 +47,9 @@ class FAISS_Store(InformationRetrieval):
46
47
  logger.info(
47
48
  "information_retrieval.faiss_store.load_index", path=path.absolute()
48
49
  )
49
- self.index = FAISS.load_local(str(path), embeddings)
50
+ self.index = FAISS.load_local(
51
+ str(path), embeddings, allow_dangerous_deserialization=True
52
+ )
50
53
 
51
54
  @staticmethod
52
55
  def load_documents(docs_folder: str) -> List["Document"]:
@@ -114,7 +117,7 @@ class FAISS_Store(InformationRetrieval):
114
117
  ) -> SearchResultList:
115
118
  logger.debug("information_retrieval.faiss_store.search", query=query)
116
119
  try:
117
- documents = await self.index.as_retriever().aget_relevant_documents(query)
120
+ documents = await self.index.as_retriever().ainvoke(query)
118
121
  except Exception as exc:
119
122
  raise InformationRetrievalException from exc
120
123
 
@@ -19,6 +19,14 @@ logger = structlog.get_logger()
19
19
 
20
20
  @dataclass
21
21
  class SearchResult:
22
+ """A search result object.
23
+
24
+ Attributes:
25
+ text: The text content of the retrieved document result.
26
+ metadata: The metadata associated with the document result.
27
+ score: The score of the search result.
28
+ """
29
+
22
30
  text: str
23
31
  metadata: dict
24
32
  score: Optional[float] = None
@@ -1,7 +1,7 @@
1
1
  from typing import Text, Any, Dict
2
2
 
3
3
  import structlog
4
- from langchain.vectorstores.milvus import Milvus
4
+ from langchain_community.vectorstores.milvus import Milvus
5
5
  from rasa.utils.endpoints import EndpointConfig
6
6
 
7
7
  from rasa.core.information_retrieval import (
@@ -48,5 +48,12 @@ class Milvus_Store(InformationRetrieval):
48
48
  except Exception as exc:
49
49
  raise InformationRetrievalException from exc
50
50
 
51
- filtered_hits = [doc for doc, score in hits if score >= threshold]
51
+ scores = [score for _, score in hits]
52
+ logger.debug(
53
+ "information_retrieval.milvus_store.search_results_before_threshold",
54
+ scores=scores,
55
+ )
56
+ # Milvus uses Euclidean distance metric by default
57
+ # so the lower the score, the better the match.
58
+ filtered_hits = [doc for doc, score in hits if score <= threshold]
52
59
  return SearchResultList.from_document_list(filtered_hits)
@@ -1,7 +1,7 @@
1
1
  from typing import Text, Any, Dict
2
2
 
3
3
  import structlog
4
- from langchain.vectorstores.qdrant import Qdrant
4
+ from langchain_community.vectorstores.qdrant import Qdrant
5
5
  from pydantic import ValidationError
6
6
  from qdrant_client import QdrantClient
7
7
  from rasa.utils.endpoints import EndpointConfig
@@ -5,6 +5,15 @@ from jinja2 import Template
5
5
 
6
6
  from rasa import telemetry
7
7
  from rasa.core.nlg.response import TemplatedNaturalLanguageGenerator
8
+ from rasa.shared.constants import (
9
+ LLM_CONFIG_KEY,
10
+ MODEL_CONFIG_KEY,
11
+ MODEL_NAME_CONFIG_KEY,
12
+ PROMPT_CONFIG_KEY,
13
+ PROVIDER_CONFIG_KEY,
14
+ OPENAI_PROVIDER,
15
+ TIMEOUT_CONFIG_KEY,
16
+ )
8
17
  from rasa.shared.core.domain import KEY_RESPONSES_TEXT, Domain
9
18
  from rasa.shared.core.events import BotUttered, UserUttered
10
19
  from rasa.shared.core.trackers import DialogueStateTracker
@@ -15,6 +24,7 @@ from rasa.shared.utils.llm import (
15
24
  combine_custom_and_default_config,
16
25
  get_prompt_template,
17
26
  llm_factory,
27
+ try_instantiate_llm_client,
18
28
  )
19
29
  from rasa.utils.endpoints import EndpointConfig
20
30
 
@@ -31,11 +41,11 @@ RESPONSE_REPHRASING_TEMPLATE_KEY = "rephrase_prompt"
31
41
  DEFAULT_REPHRASE_ALL = False
32
42
 
33
43
  DEFAULT_LLM_CONFIG = {
34
- "_type": "openai",
35
- "request_timeout": 5,
44
+ PROVIDER_CONFIG_KEY: OPENAI_PROVIDER,
45
+ MODEL_CONFIG_KEY: DEFAULT_OPENAI_GENERATE_MODEL_NAME,
36
46
  "temperature": 0.3,
37
- "model_name": DEFAULT_OPENAI_GENERATE_MODEL_NAME,
38
47
  "max_tokens": DEFAULT_OPENAI_MAX_GENERATED_TOKENS,
48
+ TIMEOUT_CONFIG_KEY: 5,
39
49
  }
40
50
 
41
51
  DEFAULT_RESPONSE_VARIATION_PROMPT_TEMPLATE = """The following is a conversation with
@@ -78,7 +88,7 @@ class ContextualResponseRephraser(TemplatedNaturalLanguageGenerator):
78
88
 
79
89
  self.nlg_endpoint = endpoint_config
80
90
  self.prompt_template = get_prompt_template(
81
- self.nlg_endpoint.kwargs.get("prompt"),
91
+ self.nlg_endpoint.kwargs.get(PROMPT_CONFIG_KEY),
82
92
  DEFAULT_RESPONSE_VARIATION_PROMPT_TEMPLATE,
83
93
  )
84
94
  self.rephrase_all = self.nlg_endpoint.kwargs.get(
@@ -87,6 +97,12 @@ class ContextualResponseRephraser(TemplatedNaturalLanguageGenerator):
87
97
  self.trace_prompt_tokens = self.nlg_endpoint.kwargs.get(
88
98
  "trace_prompt_tokens", False
89
99
  )
100
+ try_instantiate_llm_client(
101
+ self.nlg_endpoint.kwargs.get(LLM_CONFIG_KEY),
102
+ DEFAULT_LLM_CONFIG,
103
+ "contextual_response_rephraser.init",
104
+ "ContextualResponseRephraser",
105
+ )
90
106
 
91
107
  def _last_message_if_human(self, tracker: DialogueStateTracker) -> Optional[str]:
92
108
  """Returns the latest message from the tracker.
@@ -115,10 +131,13 @@ class ContextualResponseRephraser(TemplatedNaturalLanguageGenerator):
115
131
  Returns:
116
132
  generated text
117
133
  """
118
- llm = llm_factory(self.nlg_endpoint.kwargs.get("llm"), DEFAULT_LLM_CONFIG)
134
+ llm = llm_factory(
135
+ self.nlg_endpoint.kwargs.get(LLM_CONFIG_KEY), DEFAULT_LLM_CONFIG
136
+ )
119
137
 
120
138
  try:
121
- return await llm.apredict(prompt)
139
+ llm_response = await llm.acompletion(prompt)
140
+ return llm_response.choices[0]
122
141
  except Exception as e:
123
142
  # unfortunately, langchain does not wrap LLM exceptions which means
124
143
  # we have to catch all exceptions here
@@ -128,7 +147,7 @@ class ContextualResponseRephraser(TemplatedNaturalLanguageGenerator):
128
147
  def llm_property(self, prop: str) -> Optional[str]:
129
148
  """Returns a property of the LLM provider."""
130
149
  return combine_custom_and_default_config(
131
- self.nlg_endpoint.kwargs.get("llm"), DEFAULT_LLM_CONFIG
150
+ self.nlg_endpoint.kwargs.get(LLM_CONFIG_KEY), DEFAULT_LLM_CONFIG
132
151
  ).get(prop)
133
152
 
134
153
  def custom_prompt_template(self, prompt_template: str) -> Optional[str]:
@@ -161,7 +180,9 @@ class ContextualResponseRephraser(TemplatedNaturalLanguageGenerator):
161
180
  Returns:
162
181
  The history for the prompt.
163
182
  """
164
- llm = llm_factory(self.nlg_endpoint.kwargs.get("llm"), DEFAULT_LLM_CONFIG)
183
+ llm = llm_factory(
184
+ self.nlg_endpoint.kwargs.get(LLM_CONFIG_KEY), DEFAULT_LLM_CONFIG
185
+ )
165
186
  return await summarize_conversation(tracker, llm, max_turns=5)
166
187
 
167
188
  async def rephrase(
@@ -202,8 +223,9 @@ class ContextualResponseRephraser(TemplatedNaturalLanguageGenerator):
202
223
  telemetry.track_response_rephrase(
203
224
  rephrase_all=self.rephrase_all,
204
225
  custom_prompt_template=self.custom_prompt_template(prompt_template_text),
205
- llm_type=self.llm_property("_type"),
206
- llm_model=self.llm_property("model") or self.llm_property("model_name"),
226
+ llm_type=self.llm_property(PROVIDER_CONFIG_KEY),
227
+ llm_model=self.llm_property(MODEL_CONFIG_KEY)
228
+ or self.llm_property(MODEL_NAME_CONFIG_KEY),
207
229
  )
208
230
  if not (updated_text := await self._generate_llm_response(prompt)):
209
231
  # If the LLM fails to generate a response, we
@@ -2,8 +2,8 @@ from typing import Optional
2
2
 
3
3
  import structlog
4
4
  from jinja2 import Template
5
- from langchain.llms.base import BaseLLM
6
5
  from rasa.core.tracker_store import DialogueStateTracker
6
+ from rasa.shared.providers.llm.llm_client import LLMClient
7
7
  from rasa.shared.utils.llm import (
8
8
  tracker_as_readable_transcript,
9
9
  )
@@ -43,7 +43,7 @@ def _create_summarization_prompt(
43
43
 
44
44
  async def summarize_conversation(
45
45
  tracker: DialogueStateTracker,
46
- llm: BaseLLM,
46
+ llm: LLMClient,
47
47
  max_turns: Optional[int] = MAX_TURNS_DEFAULT,
48
48
  ) -> str:
49
49
  """Summarizes the dialogue using the LLM.
@@ -58,7 +58,8 @@ async def summarize_conversation(
58
58
  """
59
59
  prompt = _create_summarization_prompt(tracker, max_turns)
60
60
  try:
61
- summarization = (await llm.apredict(prompt)).strip()
61
+ llm_response = await llm.acompletion(prompt)
62
+ summarization = llm_response.choices[0].strip()
62
63
  structlogger.debug(
63
64
  "summarization.success", summarization=summarization, prompt=prompt
64
65
  )