rasa-pro 3.10.16__py3-none-any.whl → 3.11.0a1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of rasa-pro might be problematic. Click here for more details.

Files changed (185) hide show
  1. README.md +396 -17
  2. rasa/api.py +9 -3
  3. rasa/cli/arguments/default_arguments.py +23 -2
  4. rasa/cli/arguments/run.py +15 -0
  5. rasa/cli/arguments/train.py +3 -9
  6. rasa/cli/e2e_test.py +1 -1
  7. rasa/cli/evaluate.py +1 -1
  8. rasa/cli/inspect.py +8 -4
  9. rasa/cli/llm_fine_tuning.py +12 -15
  10. rasa/cli/run.py +8 -1
  11. rasa/cli/studio/studio.py +8 -18
  12. rasa/cli/train.py +11 -53
  13. rasa/cli/utils.py +8 -10
  14. rasa/cli/x.py +1 -1
  15. rasa/constants.py +1 -1
  16. rasa/core/actions/action.py +2 -0
  17. rasa/core/actions/action_hangup.py +29 -0
  18. rasa/core/agent.py +2 -2
  19. rasa/core/brokers/kafka.py +3 -1
  20. rasa/core/brokers/pika.py +3 -1
  21. rasa/core/channels/__init__.py +8 -6
  22. rasa/core/channels/channel.py +21 -4
  23. rasa/core/channels/development_inspector.py +143 -46
  24. rasa/core/channels/inspector/README.md +1 -1
  25. rasa/core/channels/inspector/dist/assets/{arc-b6e548fe.js → arc-86942a71.js} +1 -1
  26. rasa/core/channels/inspector/dist/assets/{c4Diagram-d0fbc5ce-fa03ac9e.js → c4Diagram-d0fbc5ce-b0290676.js} +1 -1
  27. rasa/core/channels/inspector/dist/assets/{classDiagram-936ed81e-ee67392a.js → classDiagram-936ed81e-f6405f6e.js} +1 -1
  28. rasa/core/channels/inspector/dist/assets/{classDiagram-v2-c3cb15f1-9b283fae.js → classDiagram-v2-c3cb15f1-ef61ac77.js} +1 -1
  29. rasa/core/channels/inspector/dist/assets/{createText-62fc7601-8b6fcc2a.js → createText-62fc7601-f0411e58.js} +1 -1
  30. rasa/core/channels/inspector/dist/assets/{edges-f2ad444c-22e77f4f.js → edges-f2ad444c-7dcc4f3b.js} +1 -1
  31. rasa/core/channels/inspector/dist/assets/{erDiagram-9d236eb7-60ffc87f.js → erDiagram-9d236eb7-e0c092d7.js} +1 -1
  32. rasa/core/channels/inspector/dist/assets/{flowDb-1972c806-9dd802e4.js → flowDb-1972c806-fba2e3ce.js} +1 -1
  33. rasa/core/channels/inspector/dist/assets/{flowDiagram-7ea5b25a-5fa1912f.js → flowDiagram-7ea5b25a-7a70b71a.js} +1 -1
  34. rasa/core/channels/inspector/dist/assets/flowDiagram-v2-855bc5b3-24a5f41a.js +1 -0
  35. rasa/core/channels/inspector/dist/assets/{flowchart-elk-definition-abe16c3d-622a1fd2.js → flowchart-elk-definition-abe16c3d-00a59b68.js} +1 -1
  36. rasa/core/channels/inspector/dist/assets/{ganttDiagram-9b5ea136-e285a63a.js → ganttDiagram-9b5ea136-293c91fa.js} +1 -1
  37. rasa/core/channels/inspector/dist/assets/{gitGraphDiagram-99d0ae7c-f237bdca.js → gitGraphDiagram-99d0ae7c-07b2d68c.js} +1 -1
  38. rasa/core/channels/inspector/dist/assets/{index-2c4b9a3b-4b03d70e.js → index-2c4b9a3b-bc959fbd.js} +1 -1
  39. rasa/core/channels/inspector/dist/assets/index-3a8a5a28.js +1317 -0
  40. rasa/core/channels/inspector/dist/assets/{infoDiagram-736b4530-72a0fa5f.js → infoDiagram-736b4530-4a350f72.js} +1 -1
  41. rasa/core/channels/inspector/dist/assets/{journeyDiagram-df861f2b-82218c41.js → journeyDiagram-df861f2b-af464fb7.js} +1 -1
  42. rasa/core/channels/inspector/dist/assets/{layout-78cff630.js → layout-0071f036.js} +1 -1
  43. rasa/core/channels/inspector/dist/assets/{line-5038b469.js → line-2f73cc83.js} +1 -1
  44. rasa/core/channels/inspector/dist/assets/{linear-c4fc4098.js → linear-f014b4cc.js} +1 -1
  45. rasa/core/channels/inspector/dist/assets/{mindmap-definition-beec6740-c33c8ea6.js → mindmap-definition-beec6740-d2426fb6.js} +1 -1
  46. rasa/core/channels/inspector/dist/assets/{pieDiagram-dbbf0591-a8d03059.js → pieDiagram-dbbf0591-776f01a2.js} +1 -1
  47. rasa/core/channels/inspector/dist/assets/{quadrantDiagram-4d7f4fd6-6a0e56b2.js → quadrantDiagram-4d7f4fd6-82e00b57.js} +1 -1
  48. rasa/core/channels/inspector/dist/assets/{requirementDiagram-6fc4c22a-2dc7c7bd.js → requirementDiagram-6fc4c22a-ea13c6bb.js} +1 -1
  49. rasa/core/channels/inspector/dist/assets/{sankeyDiagram-8f13d901-2360fe39.js → sankeyDiagram-8f13d901-1feca7e9.js} +1 -1
  50. rasa/core/channels/inspector/dist/assets/{sequenceDiagram-b655622a-41b9f9ad.js → sequenceDiagram-b655622a-070c61d2.js} +1 -1
  51. rasa/core/channels/inspector/dist/assets/{stateDiagram-59f0c015-0aad326f.js → stateDiagram-59f0c015-24f46263.js} +1 -1
  52. rasa/core/channels/inspector/dist/assets/{stateDiagram-v2-2b26beab-9847d984.js → stateDiagram-v2-2b26beab-c9056051.js} +1 -1
  53. rasa/core/channels/inspector/dist/assets/{styles-080da4f6-564d890e.js → styles-080da4f6-08abc34a.js} +1 -1
  54. rasa/core/channels/inspector/dist/assets/{styles-3dcbcfbf-38957613.js → styles-3dcbcfbf-bc74c25a.js} +1 -1
  55. rasa/core/channels/inspector/dist/assets/{styles-9c745c82-f0fc6921.js → styles-9c745c82-4e5d66de.js} +1 -1
  56. rasa/core/channels/inspector/dist/assets/{svgDrawCommon-4835440b-ef3c5a77.js → svgDrawCommon-4835440b-849c4517.js} +1 -1
  57. rasa/core/channels/inspector/dist/assets/{timeline-definition-5b62e21b-bf3e91c1.js → timeline-definition-5b62e21b-d0fb1598.js} +1 -1
  58. rasa/core/channels/inspector/dist/assets/{xychartDiagram-2b33534f-4d4026c0.js → xychartDiagram-2b33534f-04d115e2.js} +1 -1
  59. rasa/core/channels/inspector/dist/index.html +18 -17
  60. rasa/core/channels/inspector/index.html +17 -16
  61. rasa/core/channels/inspector/package.json +5 -1
  62. rasa/core/channels/inspector/src/App.tsx +117 -67
  63. rasa/core/channels/inspector/src/components/Chat.tsx +95 -0
  64. rasa/core/channels/inspector/src/components/DiagramFlow.tsx +11 -10
  65. rasa/core/channels/inspector/src/components/DialogueStack.tsx +10 -25
  66. rasa/core/channels/inspector/src/components/LoadingSpinner.tsx +1 -1
  67. rasa/core/channels/inspector/src/helpers/formatters.test.ts +10 -0
  68. rasa/core/channels/inspector/src/helpers/formatters.ts +107 -41
  69. rasa/core/channels/inspector/src/helpers/utils.ts +92 -7
  70. rasa/core/channels/inspector/src/types.ts +21 -1
  71. rasa/core/channels/inspector/yarn.lock +94 -1
  72. rasa/core/channels/rest.py +51 -46
  73. rasa/core/channels/socketio.py +22 -0
  74. rasa/core/channels/{audiocodes.py → voice_ready/audiocodes.py} +110 -68
  75. rasa/core/channels/{voice_aware → voice_ready}/jambonz.py +11 -4
  76. rasa/core/channels/{voice_aware → voice_ready}/jambonz_protocol.py +57 -5
  77. rasa/core/channels/{twilio_voice.py → voice_ready/twilio_voice.py} +58 -7
  78. rasa/core/channels/{voice_aware → voice_ready}/utils.py +16 -0
  79. rasa/core/channels/voice_stream/asr/__init__.py +0 -0
  80. rasa/core/channels/voice_stream/asr/asr_engine.py +71 -0
  81. rasa/core/channels/voice_stream/asr/asr_event.py +13 -0
  82. rasa/core/channels/voice_stream/asr/deepgram.py +77 -0
  83. rasa/core/channels/voice_stream/audio_bytes.py +7 -0
  84. rasa/core/channels/voice_stream/tts/__init__.py +0 -0
  85. rasa/core/channels/voice_stream/tts/azure.py +100 -0
  86. rasa/core/channels/voice_stream/tts/cartesia.py +114 -0
  87. rasa/core/channels/voice_stream/tts/tts_cache.py +27 -0
  88. rasa/core/channels/voice_stream/tts/tts_engine.py +48 -0
  89. rasa/core/channels/voice_stream/twilio_media_streams.py +164 -0
  90. rasa/core/channels/voice_stream/util.py +57 -0
  91. rasa/core/channels/voice_stream/voice_channel.py +247 -0
  92. rasa/core/featurizers/single_state_featurizer.py +1 -22
  93. rasa/core/featurizers/tracker_featurizers.py +18 -115
  94. rasa/core/nlg/contextual_response_rephraser.py +11 -2
  95. rasa/{nlu → core}/persistor.py +16 -38
  96. rasa/core/policies/enterprise_search_policy.py +12 -15
  97. rasa/core/policies/flows/flow_executor.py +8 -18
  98. rasa/core/policies/intentless_policy.py +10 -15
  99. rasa/core/policies/ted_policy.py +33 -58
  100. rasa/core/policies/unexpected_intent_policy.py +7 -15
  101. rasa/core/processor.py +13 -64
  102. rasa/core/run.py +11 -1
  103. rasa/core/secrets_manager/constants.py +4 -0
  104. rasa/core/secrets_manager/factory.py +8 -0
  105. rasa/core/secrets_manager/vault.py +11 -1
  106. rasa/core/training/interactive.py +1 -1
  107. rasa/core/utils.py +1 -11
  108. rasa/dialogue_understanding/coexistence/llm_based_router.py +10 -10
  109. rasa/dialogue_understanding/commands/__init__.py +2 -0
  110. rasa/dialogue_understanding/commands/change_flow_command.py +0 -6
  111. rasa/dialogue_understanding/commands/session_end_command.py +61 -0
  112. rasa/dialogue_understanding/generator/flow_retrieval.py +0 -7
  113. rasa/dialogue_understanding/generator/llm_based_command_generator.py +12 -3
  114. rasa/dialogue_understanding/generator/llm_command_generator.py +1 -1
  115. rasa/dialogue_understanding/generator/multi_step/multi_step_llm_command_generator.py +3 -28
  116. rasa/dialogue_understanding/generator/nlu_command_adapter.py +1 -19
  117. rasa/dialogue_understanding/generator/single_step/single_step_llm_command_generator.py +4 -37
  118. rasa/e2e_test/aggregate_test_stats_calculator.py +1 -11
  119. rasa/e2e_test/assertions.py +6 -48
  120. rasa/e2e_test/e2e_test_runner.py +6 -9
  121. rasa/e2e_test/utils/e2e_yaml_utils.py +1 -1
  122. rasa/e2e_test/utils/io.py +1 -3
  123. rasa/engine/graph.py +3 -10
  124. rasa/engine/recipes/config_files/default_config.yml +0 -3
  125. rasa/engine/recipes/default_recipe.py +0 -1
  126. rasa/engine/recipes/graph_recipe.py +0 -1
  127. rasa/engine/runner/dask.py +2 -2
  128. rasa/engine/storage/local_model_storage.py +12 -42
  129. rasa/engine/storage/storage.py +1 -5
  130. rasa/engine/validation.py +1 -78
  131. rasa/keys +1 -0
  132. rasa/model_training.py +13 -16
  133. rasa/nlu/classifiers/diet_classifier.py +25 -38
  134. rasa/nlu/classifiers/logistic_regression_classifier.py +9 -22
  135. rasa/nlu/classifiers/sklearn_intent_classifier.py +16 -37
  136. rasa/nlu/extractors/crf_entity_extractor.py +50 -93
  137. rasa/nlu/featurizers/sparse_featurizer/count_vectors_featurizer.py +16 -45
  138. rasa/nlu/featurizers/sparse_featurizer/lexical_syntactic_featurizer.py +17 -52
  139. rasa/nlu/featurizers/sparse_featurizer/regex_featurizer.py +3 -5
  140. rasa/server.py +1 -1
  141. rasa/shared/constants.py +3 -12
  142. rasa/shared/core/constants.py +4 -0
  143. rasa/shared/core/domain.py +101 -47
  144. rasa/shared/core/events.py +29 -0
  145. rasa/shared/core/flows/flows_list.py +20 -11
  146. rasa/shared/core/flows/validation.py +25 -0
  147. rasa/shared/core/flows/yaml_flows_io.py +3 -24
  148. rasa/shared/importers/importer.py +40 -39
  149. rasa/shared/importers/multi_project.py +23 -11
  150. rasa/shared/importers/rasa.py +7 -2
  151. rasa/shared/importers/remote_importer.py +196 -0
  152. rasa/shared/importers/utils.py +3 -1
  153. rasa/shared/nlu/training_data/features.py +2 -120
  154. rasa/shared/nlu/training_data/training_data.py +18 -19
  155. rasa/shared/providers/_configs/azure_openai_client_config.py +3 -5
  156. rasa/shared/providers/embedding/_base_litellm_embedding_client.py +1 -6
  157. rasa/shared/providers/llm/_base_litellm_client.py +11 -31
  158. rasa/shared/providers/llm/self_hosted_llm_client.py +3 -15
  159. rasa/shared/utils/common.py +3 -22
  160. rasa/shared/utils/io.py +0 -1
  161. rasa/shared/utils/llm.py +30 -27
  162. rasa/shared/utils/schemas/events.py +2 -0
  163. rasa/shared/utils/schemas/model_config.yml +0 -10
  164. rasa/shared/utils/yaml.py +44 -0
  165. rasa/studio/auth.py +5 -3
  166. rasa/studio/config.py +4 -13
  167. rasa/studio/constants.py +0 -1
  168. rasa/studio/data_handler.py +3 -10
  169. rasa/studio/upload.py +8 -17
  170. rasa/tracing/instrumentation/attribute_extractors.py +1 -1
  171. rasa/utils/io.py +66 -0
  172. rasa/utils/tensorflow/model_data.py +193 -2
  173. rasa/validator.py +0 -12
  174. rasa/version.py +1 -1
  175. rasa_pro-3.11.0a1.dist-info/METADATA +576 -0
  176. {rasa_pro-3.10.16.dist-info → rasa_pro-3.11.0a1.dist-info}/RECORD +181 -164
  177. rasa/core/channels/inspector/dist/assets/flowDiagram-v2-855bc5b3-1844e5a5.js +0 -1
  178. rasa/core/channels/inspector/dist/assets/index-a5d3e69d.js +0 -1040
  179. rasa/utils/tensorflow/feature_array.py +0 -366
  180. rasa_pro-3.10.16.dist-info/METADATA +0 -196
  181. /rasa/core/channels/{voice_aware → voice_ready}/__init__.py +0 -0
  182. /rasa/core/channels/{voice_native → voice_stream}/__init__.py +0 -0
  183. {rasa_pro-3.10.16.dist-info → rasa_pro-3.11.0a1.dist-info}/NOTICE +0 -0
  184. {rasa_pro-3.10.16.dist-info → rasa_pro-3.11.0a1.dist-info}/WHEEL +0 -0
  185. {rasa_pro-3.10.16.dist-info → rasa_pro-3.11.0a1.dist-info}/entry_points.txt +0 -0
@@ -1,133 +1,15 @@
1
1
  from __future__ import annotations
2
-
3
- import itertools
4
- from dataclasses import dataclass
5
2
  from typing import Iterable, Union, Text, Optional, List, Any, Tuple, Dict, Set
3
+ import itertools
6
4
 
7
5
  import numpy as np
8
6
  import scipy.sparse
9
- from safetensors.numpy import save_file, load_file
10
7
 
11
- import rasa.shared.nlu.training_data.util
12
8
  import rasa.shared.utils.io
9
+ import rasa.shared.nlu.training_data.util
13
10
  from rasa.shared.nlu.constants import FEATURE_TYPE_SEQUENCE, FEATURE_TYPE_SENTENCE
14
11
 
15
12
 
16
- @dataclass
17
- class FeatureMetadata:
18
- data_type: str
19
- attribute: str
20
- origin: Union[str, List[str]]
21
- is_sparse: bool
22
- shape: tuple
23
- safetensors_key: str
24
-
25
-
26
- def save_features(
27
- features_dict: Dict[Text, List[Features]], file_name: str
28
- ) -> Dict[str, Any]:
29
- """Save a dictionary of Features lists to disk using safetensors.
30
-
31
- Args:
32
- features_dict: Dictionary mapping strings to lists of Features objects
33
- file_name: File to save the features to
34
-
35
- Returns:
36
- The metadata to reconstruct the features.
37
- """
38
- # All tensors are stored in a single safetensors file
39
- tensors_to_save = {}
40
- # Metadata will be stored separately
41
- metadata = {}
42
-
43
- for key, features_list in features_dict.items():
44
- feature_metadata_list = []
45
-
46
- for idx, feature in enumerate(features_list):
47
- # Create a unique key for this tensor in the safetensors file
48
- safetensors_key = f"{key}_{idx}"
49
-
50
- # Convert sparse matrices to dense if needed
51
- if feature.is_sparse():
52
- # For sparse matrices, use the COO format
53
- coo = feature.features.tocoo() # type:ignore[union-attr]
54
- # Save data, row indices and col indices separately
55
- tensors_to_save[f"{safetensors_key}_data"] = coo.data
56
- tensors_to_save[f"{safetensors_key}_row"] = coo.row
57
- tensors_to_save[f"{safetensors_key}_col"] = coo.col
58
- else:
59
- tensors_to_save[safetensors_key] = feature.features
60
-
61
- # Store metadata
62
- metadata_item = FeatureMetadata(
63
- data_type=feature.type,
64
- attribute=feature.attribute,
65
- origin=feature.origin,
66
- is_sparse=feature.is_sparse(),
67
- shape=feature.features.shape,
68
- safetensors_key=safetensors_key,
69
- )
70
- feature_metadata_list.append(vars(metadata_item))
71
-
72
- metadata[key] = feature_metadata_list
73
-
74
- # Save tensors
75
- save_file(tensors_to_save, file_name)
76
-
77
- return metadata
78
-
79
-
80
- def load_features(
81
- filename: str, metadata: Dict[str, Any]
82
- ) -> Dict[Text, List[Features]]:
83
- """Load Features dictionary from disk.
84
-
85
- Args:
86
- filename: File name of the safetensors file.
87
- metadata: Metadata to reconstruct the features.
88
-
89
- Returns:
90
- Dictionary mapping strings to lists of Features objects
91
- """
92
- # Load tensors
93
- tensors = load_file(filename)
94
-
95
- # Reconstruct the features dictionary
96
- features_dict: Dict[Text, List[Features]] = {}
97
-
98
- for key, feature_metadata_list in metadata.items():
99
- features_list = []
100
-
101
- for meta in feature_metadata_list:
102
- safetensors_key = meta["safetensors_key"]
103
-
104
- if meta["is_sparse"]:
105
- # Reconstruct sparse matrix from COO format
106
- data = tensors[f"{safetensors_key}_data"]
107
- row = tensors[f"{safetensors_key}_row"]
108
- col = tensors[f"{safetensors_key}_col"]
109
-
110
- features_matrix = scipy.sparse.coo_matrix(
111
- (data, (row, col)), shape=tuple(meta["shape"])
112
- ).tocsr() # Convert back to CSR format
113
- else:
114
- features_matrix = tensors[safetensors_key]
115
-
116
- # Reconstruct Features object
117
- features = Features(
118
- features=features_matrix,
119
- feature_type=meta["data_type"],
120
- attribute=meta["attribute"],
121
- origin=meta["origin"],
122
- )
123
-
124
- features_list.append(features)
125
-
126
- features_dict[key] = features_list
127
-
128
- return features_dict
129
-
130
-
131
13
  class Features:
132
14
  """Stores the features produced by any featurizer."""
133
15
 
@@ -1,5 +1,6 @@
1
1
  import logging
2
2
  import os
3
+ from functools import cached_property
3
4
  from pathlib import Path
4
5
  import random
5
6
  from collections import Counter, OrderedDict
@@ -9,7 +10,6 @@ from typing import Any, Dict, List, Optional, Set, Text, Tuple, Callable
9
10
  import operator
10
11
 
11
12
  import rasa.shared.data
12
- from rasa.shared.utils.common import lazy_property
13
13
  import rasa.shared.utils.io
14
14
  from rasa.shared.nlu.constants import (
15
15
  RESPONSE,
@@ -202,7 +202,7 @@ class TrainingData:
202
202
 
203
203
  return list(OrderedDict.fromkeys(examples))
204
204
 
205
- @lazy_property
205
+ @cached_property
206
206
  def nlu_examples(self) -> List[Message]:
207
207
  """Return examples which have come from NLU training data.
208
208
 
@@ -215,32 +215,32 @@ class TrainingData:
215
215
  ex for ex in self.training_examples if not ex.is_core_or_domain_message()
216
216
  ]
217
217
 
218
- @lazy_property
218
+ @cached_property
219
219
  def intent_examples(self) -> List[Message]:
220
220
  """Returns the list of examples that have intent."""
221
221
  return [ex for ex in self.nlu_examples if ex.get(INTENT)]
222
222
 
223
- @lazy_property
223
+ @cached_property
224
224
  def response_examples(self) -> List[Message]:
225
225
  """Returns the list of examples that have response."""
226
226
  return [ex for ex in self.nlu_examples if ex.get(INTENT_RESPONSE_KEY)]
227
227
 
228
- @lazy_property
228
+ @cached_property
229
229
  def entity_examples(self) -> List[Message]:
230
230
  """Returns the list of examples that have entities."""
231
231
  return [ex for ex in self.nlu_examples if ex.get(ENTITIES)]
232
232
 
233
- @lazy_property
233
+ @cached_property
234
234
  def intents(self) -> Set[Text]:
235
235
  """Returns the set of intents in the training data."""
236
236
  return {ex.get(INTENT) for ex in self.training_examples} - {None}
237
237
 
238
- @lazy_property
238
+ @cached_property
239
239
  def action_names(self) -> Set[Text]:
240
240
  """Returns the set of action names in the training data."""
241
241
  return {ex.get(ACTION_NAME) for ex in self.training_examples} - {None}
242
242
 
243
- @lazy_property
243
+ @cached_property
244
244
  def retrieval_intents(self) -> Set[Text]:
245
245
  """Returns the total number of response types in the training data."""
246
246
  return {
@@ -249,13 +249,13 @@ class TrainingData:
249
249
  if ex.get(INTENT_RESPONSE_KEY)
250
250
  }
251
251
 
252
- @lazy_property
252
+ @cached_property
253
253
  def number_of_examples_per_intent(self) -> Dict[Text, int]:
254
254
  """Calculates the number of examples per intent."""
255
255
  intents = [ex.get(INTENT) for ex in self.nlu_examples]
256
256
  return dict(Counter(intents))
257
257
 
258
- @lazy_property
258
+ @cached_property
259
259
  def number_of_examples_per_response(self) -> Dict[Text, int]:
260
260
  """Calculates the number of examples per response."""
261
261
  responses = [
@@ -265,12 +265,12 @@ class TrainingData:
265
265
  ]
266
266
  return dict(Counter(responses))
267
267
 
268
- @lazy_property
268
+ @cached_property
269
269
  def entities(self) -> Set[Text]:
270
270
  """Returns the set of entity types in the training data."""
271
271
  return {e.get(ENTITY_ATTRIBUTE_TYPE) for e in self.sorted_entities()}
272
272
 
273
- @lazy_property
273
+ @cached_property
274
274
  def entity_roles(self) -> Set[Text]:
275
275
  """Returns the set of entity roles in the training data."""
276
276
  entity_types = {
@@ -280,7 +280,7 @@ class TrainingData:
280
280
  }
281
281
  return entity_types - {NO_ENTITY_TAG}
282
282
 
283
- @lazy_property
283
+ @cached_property
284
284
  def entity_groups(self) -> Set[Text]:
285
285
  """Returns the set of entity groups in the training data."""
286
286
  entity_types = {
@@ -299,7 +299,7 @@ class TrainingData:
299
299
 
300
300
  return entity_groups_used or entity_roles_used
301
301
 
302
- @lazy_property
302
+ @cached_property
303
303
  def number_of_examples_per_entity(self) -> Dict[Text, int]:
304
304
  """Calculates the number of examples per entity."""
305
305
  entities = []
@@ -426,8 +426,9 @@ class TrainingData:
426
426
  def persist(
427
427
  self, dir_name: Text, filename: Text = DEFAULT_TRAINING_DATA_OUTPUT_PATH
428
428
  ) -> Dict[Text, Any]:
429
- """Persists this training data to disk and returns necessary
430
- information to load it again.
429
+ """Persists this training data to disk.
430
+
431
+ Returns: necessary information to load it again.
431
432
  """
432
433
  if not os.path.exists(dir_name):
433
434
  os.makedirs(dir_name)
@@ -498,9 +499,7 @@ class TrainingData:
498
499
  def train_test_split(
499
500
  self, train_frac: float = 0.8, random_seed: Optional[int] = None
500
501
  ) -> Tuple["TrainingData", "TrainingData"]:
501
- """Split into a training and test dataset,
502
- preserving the fraction of examples per intent.
503
- """
502
+ """Split into a training and test dataset, preserving the fraction of examples per intent.""" # noqa: E501
504
503
  # collect all nlu data
505
504
  test, train = self.split_nlu_examples(train_frac, random_seed)
506
505
 
@@ -107,7 +107,8 @@ class AzureOpenAIClientConfig:
107
107
 
108
108
  @classmethod
109
109
  def from_dict(cls, config: dict) -> "AzureOpenAIClientConfig":
110
- """Initializes a dataclass from the passed config.
110
+ """
111
+ Initializes a dataclass from the passed config.
111
112
 
112
113
  Args:
113
114
  config: (dict) The config from which to initialize.
@@ -174,10 +175,7 @@ def is_azure_openai_config(config: dict) -> bool:
174
175
 
175
176
  # Case: Configuration contains `deployment` key
176
177
  # (specific to Azure OpenAI configuration)
177
- if (
178
- config.get(DEPLOYMENT_CONFIG_KEY) is not None
179
- and config.get(PROVIDER_CONFIG_KEY) is None
180
- ):
178
+ if config.get(DEPLOYMENT_CONFIG_KEY) is not None:
181
179
  return True
182
180
 
183
181
  return False
@@ -5,8 +5,6 @@ import litellm
5
5
  import logging
6
6
  import structlog
7
7
  from litellm import aembedding, embedding, validate_environment
8
-
9
- from rasa.shared.constants import API_BASE_CONFIG_KEY
10
8
  from rasa.shared.exceptions import (
11
9
  ProviderClientAPIException,
12
10
  ProviderClientValidationError,
@@ -87,10 +85,7 @@ class _BaseLiteLLMEmbeddingClient:
87
85
 
88
86
  def _validate_environment_variables(self) -> None:
89
87
  """Validate that the required environment variables are set."""
90
- validation_info = validate_environment(
91
- self._litellm_model_name,
92
- api_base=self._litellm_extra_parameters.get(API_BASE_CONFIG_KEY),
93
- )
88
+ validation_info = validate_environment(self._litellm_model_name)
94
89
  if missing_environment_variables := validation_info.get(
95
90
  _VALIDATE_ENVIRONMENT_MISSING_KEYS_KEY
96
91
  ):
@@ -1,7 +1,7 @@
1
1
  from abc import abstractmethod
2
2
  from typing import Dict, List, Any, Union
3
- import logging
4
3
 
4
+ import logging
5
5
  import structlog
6
6
  from litellm import (
7
7
  completion,
@@ -9,7 +9,6 @@ from litellm import (
9
9
  validate_environment,
10
10
  )
11
11
 
12
- from rasa.shared.constants import API_BASE_CONFIG_KEY
13
12
  from rasa.shared.exceptions import (
14
13
  ProviderClientAPIException,
15
14
  ProviderClientValidationError,
@@ -30,7 +29,8 @@ logging.getLogger("LiteLLM").setLevel(logging.WARNING)
30
29
 
31
30
 
32
31
  class _BaseLiteLLMClient:
33
- """An abstract base class for LiteLLM clients.
32
+ """
33
+ An abstract base class for LiteLLM clients.
34
34
 
35
35
  This class defines the interface and common functionality for all clients
36
36
  based on LiteLLM.
@@ -103,10 +103,7 @@ class _BaseLiteLLMClient:
103
103
 
104
104
  def _validate_environment_variables(self) -> None:
105
105
  """Validate that the required environment variables are set."""
106
- validation_info = validate_environment(
107
- self._litellm_model_name,
108
- api_base=self._litellm_extra_parameters.get(API_BASE_CONFIG_KEY),
109
- )
106
+ validation_info = validate_environment(self._litellm_model_name)
110
107
  if missing_environment_variables := validation_info.get(
111
108
  _VALIDATE_ENVIRONMENT_MISSING_KEYS_KEY
112
109
  ):
@@ -135,15 +132,14 @@ class _BaseLiteLLMClient:
135
132
 
136
133
  @suppress_logs(log_level=logging.WARNING)
137
134
  def completion(self, messages: Union[List[str], str]) -> LLMResponse:
138
- """Synchronously generate completions for given list of messages.
135
+ """
136
+ Synchronously generate completions for given list of messages.
139
137
 
140
138
  Args:
141
139
  messages: List of messages or a single message to generate the
142
140
  completion for.
143
-
144
141
  Returns:
145
142
  List of message completions.
146
-
147
143
  Raises:
148
144
  ProviderClientAPIException: If the API request fails.
149
145
  """
@@ -158,15 +154,14 @@ class _BaseLiteLLMClient:
158
154
 
159
155
  @suppress_logs(log_level=logging.WARNING)
160
156
  async def acompletion(self, messages: Union[List[str], str]) -> LLMResponse:
161
- """Asynchronously generate completions for given list of messages.
157
+ """
158
+ Asynchronously generate completions for given list of messages.
162
159
 
163
160
  Args:
164
161
  messages: List of messages or a single message to generate the
165
162
  completion for.
166
-
167
163
  Returns:
168
164
  List of message completions.
169
-
170
165
  Raises:
171
166
  ProviderClientAPIException: If the API request fails.
172
167
  """
@@ -177,23 +172,7 @@ class _BaseLiteLLMClient:
177
172
  )
178
173
  return self._format_response(response)
179
174
  except Exception as e:
180
- message = ""
181
- from rasa.shared.providers.llm.self_hosted_llm_client import (
182
- SelfHostedLLMClient,
183
- )
184
-
185
- if isinstance(self, SelfHostedLLMClient):
186
- message = (
187
- "If you are using 'provider=self-hosted' to call a hosted vllm "
188
- "server make sure your config is correctly setup. You should have "
189
- "the following mandatory keys in your config: "
190
- "provider=self-hosted; "
191
- "model='<your-vllm-model-name>'; "
192
- "api_base='your-hosted-vllm-serv'."
193
- "In case you are getting OpenAI connection errors, such as missing "
194
- "API key, your configuration is incorrect."
195
- )
196
- raise ProviderClientAPIException(e, message)
175
+ raise ProviderClientAPIException(e)
197
176
 
198
177
  def _format_messages(self, messages: Union[List[str], str]) -> List[Dict[str, str]]:
199
178
  """Formats messages (or a single message) to OpenAI format."""
@@ -237,7 +216,8 @@ class _BaseLiteLLMClient:
237
216
 
238
217
  @staticmethod
239
218
  def _ensure_certificates() -> None:
240
- """Configures SSL certificates for LiteLLM. This method is invoked during
219
+ """
220
+ Configures SSL certificates for LiteLLM. This method is invoked during
241
221
  client initialization.
242
222
 
243
223
  LiteLLM may utilize `openai` clients or other providers that require
@@ -4,13 +4,9 @@ from litellm import (
4
4
  atext_completion,
5
5
  )
6
6
  import logging
7
- import os
8
7
  import structlog
9
8
 
10
- from rasa.shared.constants import (
11
- SELF_HOSTED_VLLM_PREFIX,
12
- SELF_HOSTED_VLLM_API_KEY_ENV_VAR,
13
- )
9
+ from rasa.shared.constants import OPENAI_PROVIDER
14
10
  from rasa.shared.providers._configs.self_hosted_llm_client_config import (
15
11
  SelfHostedLLMClientConfig,
16
12
  )
@@ -61,7 +57,6 @@ class SelfHostedLLMClient(_BaseLiteLLMClient):
61
57
  self._api_version = api_version
62
58
  self._use_chat_completions_endpoint = use_chat_completions_endpoint
63
59
  self._extra_parameters = kwargs or {}
64
- self._apply_dummy_api_key_if_missing()
65
60
 
66
61
  @classmethod
67
62
  def from_config(cls, config: Dict[str, Any]) -> "SelfHostedLLMClient":
@@ -162,8 +157,8 @@ class SelfHostedLLMClient(_BaseLiteLLMClient):
162
157
 
163
158
  <openai>/<model or deployment name>
164
159
  """
165
- if self.model and f"{SELF_HOSTED_VLLM_PREFIX}/" not in self.model:
166
- return f"{SELF_HOSTED_VLLM_PREFIX}/{self.model}"
160
+ if self.model and f"{OPENAI_PROVIDER}/" not in self.model:
161
+ return f"{OPENAI_PROVIDER}/{self.model}"
167
162
  return self.model
168
163
 
169
164
  @property
@@ -284,10 +279,3 @@ class SelfHostedLLMClient(_BaseLiteLLMClient):
284
279
  formatted_response=formatted_response.to_dict(),
285
280
  )
286
281
  return formatted_response
287
-
288
- @staticmethod
289
- def _apply_dummy_api_key_if_missing() -> None:
290
- if not os.getenv(SELF_HOSTED_VLLM_API_KEY_ENV_VAR):
291
- os.environ[SELF_HOSTED_VLLM_API_KEY_ENV_VAR] = (
292
- "dummy_self_hosted_llm_api_key"
293
- )
@@ -86,31 +86,11 @@ def sort_list_of_dicts_by_first_key(dicts: List[Dict]) -> List[Dict]:
86
86
  return sorted(dicts, key=lambda d: next(iter(d.keys())))
87
87
 
88
88
 
89
- def lazy_property(function: Callable) -> Any:
90
- """Allows to avoid recomputing a property over and over.
91
-
92
- The result gets stored in a local var. Computation of the property
93
- will happen once, on the first call of the property. All
94
- succeeding calls will use the value stored in the private property.
95
- """
96
- attr_name = "_lazy_" + function.__name__
97
-
98
- def _lazyprop(self: Any) -> Any:
99
- if not hasattr(self, attr_name):
100
- setattr(self, attr_name, function(self))
101
- return getattr(self, attr_name)
102
-
103
- return property(_lazyprop)
104
-
105
-
106
89
  def cached_method(f: Callable[..., Any]) -> Callable[..., Any]:
107
90
  """Caches method calls based on the call's `args` and `kwargs`.
108
-
109
91
  Works for `async` and `sync` methods. Don't apply this to functions.
110
-
111
92
  Args:
112
93
  f: The decorated method whose return value should be cached.
113
-
114
94
  Returns:
115
95
  The return value which the method gives for the first call with the given
116
96
  arguments.
@@ -176,8 +156,9 @@ def transform_collection_to_sentence(collection: Collection[Text]) -> Text:
176
156
  def minimal_kwargs(
177
157
  kwargs: Dict[Text, Any], func: Callable, excluded_keys: Optional[List] = None
178
158
  ) -> Dict[Text, Any]:
179
- """Returns only the kwargs which are required by a function. Keys, contained in
180
- the exception list, are not included.
159
+ """Returns only the kwargs which are required by a function.
160
+
161
+ Keys, contained in the exception list, are not included.
181
162
 
182
163
  Args:
183
164
  kwargs: All available kwargs.
rasa/shared/utils/io.py CHANGED
@@ -13,7 +13,6 @@ from typing import Any, cast, Callable, Dict, List, Optional, Text, Type, TypeVa
13
13
  import warnings
14
14
  import random
15
15
  import string
16
-
17
16
  import portalocker
18
17
 
19
18
  from rasa.shared.constants import (
rasa/shared/utils/llm.py CHANGED
@@ -1,4 +1,3 @@
1
- import sys
2
1
  from functools import wraps
3
2
  from typing import (
4
3
  Any,
@@ -13,7 +12,6 @@ from typing import (
13
12
  cast,
14
13
  )
15
14
  import json
16
-
17
15
  import structlog
18
16
 
19
17
  import rasa.shared.utils.io
@@ -53,6 +51,7 @@ from rasa.shared.providers.mappings import (
53
51
  HUGGINGFACE_LOCAL_EMBEDDING_PROVIDER,
54
52
  get_client_config_class_from_provider,
55
53
  )
54
+ from rasa.shared.utils.cli import print_error_and_exit
56
55
 
57
56
  if TYPE_CHECKING:
58
57
  from rasa.shared.core.trackers import DialogueStateTracker
@@ -67,7 +66,7 @@ DEFAULT_OPENAI_GENERATE_MODEL_NAME = "gpt-3.5-turbo"
67
66
 
68
67
  DEFAULT_OPENAI_CHAT_MODEL_NAME = "gpt-3.5-turbo"
69
68
 
70
- DEFAULT_OPENAI_CHAT_MODEL_NAME_ADVANCED = "gpt-4-0613"
69
+ DEFAULT_OPENAI_CHAT_MODEL_NAME_ADVANCED = "gpt-4"
71
70
 
72
71
  DEFAULT_OPENAI_EMBEDDING_MODEL_NAME = "text-embedding-ada-002"
73
72
 
@@ -411,40 +410,44 @@ def try_instantiate_llm_client(
411
410
  default_llm_config: Optional[Dict],
412
411
  log_source_function: str,
413
412
  log_source_component: str,
414
- ) -> None:
413
+ ) -> LLMClient:
415
414
  """Validate llm configuration."""
416
415
  try:
417
- llm_factory(custom_llm_config, default_llm_config)
416
+ return llm_factory(custom_llm_config, default_llm_config)
418
417
  except (ProviderClientValidationError, ValueError) as e:
419
418
  structlogger.error(
420
419
  f"{log_source_function}.llm_instantiation_failed",
421
- event_info=(
422
- f"Unable to create the LLM client for component - "
423
- f"{log_source_component}. Please make sure you specified the required "
424
- f"environment variables and configuration keys."
425
- ),
420
+ message="Unable to instantiate LLM client.",
426
421
  error=e,
427
422
  )
428
- sys.exit(1)
423
+ print_error_and_exit(
424
+ f"Unable to create the LLM client for component - {log_source_component}. "
425
+ f"Please make sure you specified the required environment variables. "
426
+ f"Error: {e}"
427
+ )
429
428
 
430
429
 
431
- def try_instantiate_embedder(
432
- custom_embeddings_config: Optional[Dict],
433
- default_embeddings_config: Optional[Dict],
434
- log_source_function: str,
435
- log_source_component: str,
436
- ) -> EmbeddingClient:
437
- """Validate embeddings configuration."""
430
+ def llm_api_health_check(
431
+ llm_client: LLMClient, log_source_function: str, log_source_component: str
432
+ ) -> None:
433
+ """Perform a health check on the LLM API."""
434
+ structlogger.info(
435
+ f"{log_source_function}.llm_api_call",
436
+ event_info=(
437
+ f"Performing a health check on the LLM API for the component - "
438
+ f"{log_source_component}."
439
+ ),
440
+ config=llm_client.config,
441
+ )
438
442
  try:
439
- return embedder_factory(custom_embeddings_config, default_embeddings_config)
440
- except (ProviderClientValidationError, ValueError) as e:
443
+ llm_client.completion("hello")
444
+ except Exception as e:
441
445
  structlogger.error(
442
- f"{log_source_function}.embedder_instantiation_failed",
443
- event_info=(
444
- f"Unable to create the Embedding client for component - "
445
- f"{log_source_component}. Please make sure you specified the required "
446
- f"environment variables and configuration keys."
447
- ),
446
+ f"{log_source_function}.llm_api_call_failed",
447
+ event_info="call to the LLM API failed.",
448
448
  error=e,
449
449
  )
450
- sys.exit(1)
450
+ print_error_and_exit(
451
+ f"Call to the LLM API failed for component - {log_source_component}. "
452
+ f"Error: {e}"
453
+ )
@@ -127,6 +127,7 @@ ACTION_REVERTED = {"properties": {"event": {"const": "undo"}}}
127
127
  USER_UTTERANCE_REVERTED = {"properties": {"event": {"const": "rewind"}}}
128
128
  BOT_UTTERED = {"properties": {"event": {"const": "bot"}}}
129
129
  SESSION_STARTED = {"properties": {"event": {"const": "session_started"}}}
130
+ SESSION_ENDED = {"properties": {"event": {"const": "session_ended"}}}
130
131
  AGENT_UTTERED = {"properties": {"event": {"const": "agent"}}}
131
132
  FLOW_STARTED = {
132
133
  "properties": {"event": {"const": "flow_started"}, "flow_id": {"type": "string"}}
@@ -206,6 +207,7 @@ EVENT_SCHEMA = {
206
207
  FLOW_CANCELLED,
207
208
  DIALOGUE_STACK_UPDATED,
208
209
  ROUTING_SESSION_ENDED,
210
+ SESSION_ENDED,
209
211
  ],
210
212
  }
211
213
 
@@ -34,13 +34,3 @@ mapping:
34
34
  name:
35
35
  type: str
36
36
  required: True
37
- spaces:
38
- type: "seq"
39
- required: False
40
- sequence:
41
- - type: "map"
42
- allowempty: True
43
- mapping:
44
- name:
45
- type: str
46
- required: True