rasa-pro 3.11.0a4.dev3__py3-none-any.whl → 3.11.0rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of rasa-pro might be problematic. Click here for more details.

Files changed (163) hide show
  1. rasa/__main__.py +22 -12
  2. rasa/api.py +1 -1
  3. rasa/cli/arguments/default_arguments.py +1 -2
  4. rasa/cli/arguments/shell.py +5 -1
  5. rasa/cli/e2e_test.py +1 -1
  6. rasa/cli/evaluate.py +8 -8
  7. rasa/cli/inspect.py +4 -4
  8. rasa/cli/llm_fine_tuning.py +1 -1
  9. rasa/cli/project_templates/calm/config.yml +5 -7
  10. rasa/cli/project_templates/calm/endpoints.yml +8 -0
  11. rasa/cli/project_templates/tutorial/config.yml +8 -5
  12. rasa/cli/project_templates/tutorial/data/flows.yml +1 -1
  13. rasa/cli/project_templates/tutorial/data/patterns.yml +5 -0
  14. rasa/cli/project_templates/tutorial/domain.yml +14 -0
  15. rasa/cli/project_templates/tutorial/endpoints.yml +7 -7
  16. rasa/cli/run.py +1 -1
  17. rasa/cli/scaffold.py +4 -2
  18. rasa/cli/utils.py +5 -0
  19. rasa/cli/x.py +8 -8
  20. rasa/constants.py +1 -1
  21. rasa/core/channels/channel.py +3 -0
  22. rasa/core/channels/inspector/dist/assets/{arc-6852c607.js → arc-bc141fb2.js} +1 -1
  23. rasa/core/channels/inspector/dist/assets/{c4Diagram-d0fbc5ce-acc952b2.js → c4Diagram-d0fbc5ce-be2db283.js} +1 -1
  24. rasa/core/channels/inspector/dist/assets/{classDiagram-936ed81e-848a7597.js → classDiagram-936ed81e-55366915.js} +1 -1
  25. rasa/core/channels/inspector/dist/assets/{classDiagram-v2-c3cb15f1-a73d3e68.js → classDiagram-v2-c3cb15f1-bb529518.js} +1 -1
  26. rasa/core/channels/inspector/dist/assets/{createText-62fc7601-e5ee049d.js → createText-62fc7601-b0ec81d6.js} +1 -1
  27. rasa/core/channels/inspector/dist/assets/{edges-f2ad444c-771e517e.js → edges-f2ad444c-6166330c.js} +1 -1
  28. rasa/core/channels/inspector/dist/assets/{erDiagram-9d236eb7-aa347178.js → erDiagram-9d236eb7-5ccc6a8e.js} +1 -1
  29. rasa/core/channels/inspector/dist/assets/{flowDb-1972c806-651fc57d.js → flowDb-1972c806-fca3bfe4.js} +1 -1
  30. rasa/core/channels/inspector/dist/assets/{flowDiagram-7ea5b25a-ca67804f.js → flowDiagram-7ea5b25a-4739080f.js} +1 -1
  31. rasa/core/channels/inspector/dist/assets/flowDiagram-v2-855bc5b3-736177bf.js +1 -0
  32. rasa/core/channels/inspector/dist/assets/{flowchart-elk-definition-abe16c3d-2dbc568d.js → flowchart-elk-definition-abe16c3d-7c1b0e0f.js} +1 -1
  33. rasa/core/channels/inspector/dist/assets/{ganttDiagram-9b5ea136-25a65bd8.js → ganttDiagram-9b5ea136-772fd050.js} +1 -1
  34. rasa/core/channels/inspector/dist/assets/{gitGraphDiagram-99d0ae7c-fdc7378d.js → gitGraphDiagram-99d0ae7c-8eae1dc9.js} +1 -1
  35. rasa/core/channels/inspector/dist/assets/{index-2c4b9a3b-6f1fd606.js → index-2c4b9a3b-f55afcdf.js} +1 -1
  36. rasa/core/channels/inspector/dist/assets/{index-efdd30c1.js → index-e7cef9de.js} +68 -68
  37. rasa/core/channels/inspector/dist/assets/{infoDiagram-736b4530-cb1a041a.js → infoDiagram-736b4530-124d4a14.js} +1 -1
  38. rasa/core/channels/inspector/dist/assets/{journeyDiagram-df861f2b-14609879.js → journeyDiagram-df861f2b-7c4fae44.js} +1 -1
  39. rasa/core/channels/inspector/dist/assets/{layout-2490f52b.js → layout-b9885fb6.js} +1 -1
  40. rasa/core/channels/inspector/dist/assets/{line-40186f1f.js → line-7c59abb6.js} +1 -1
  41. rasa/core/channels/inspector/dist/assets/{linear-08814e93.js → linear-4776f780.js} +1 -1
  42. rasa/core/channels/inspector/dist/assets/{mindmap-definition-beec6740-1a534584.js → mindmap-definition-beec6740-2332c46c.js} +1 -1
  43. rasa/core/channels/inspector/dist/assets/{pieDiagram-dbbf0591-72397b61.js → pieDiagram-dbbf0591-8fb39303.js} +1 -1
  44. rasa/core/channels/inspector/dist/assets/{quadrantDiagram-4d7f4fd6-3bb0b6a3.js → quadrantDiagram-4d7f4fd6-3c7180a2.js} +1 -1
  45. rasa/core/channels/inspector/dist/assets/{requirementDiagram-6fc4c22a-57334f61.js → requirementDiagram-6fc4c22a-e910bcb8.js} +1 -1
  46. rasa/core/channels/inspector/dist/assets/{sankeyDiagram-8f13d901-111e1297.js → sankeyDiagram-8f13d901-ead16c89.js} +1 -1
  47. rasa/core/channels/inspector/dist/assets/{sequenceDiagram-b655622a-10bcfe62.js → sequenceDiagram-b655622a-29a02a19.js} +1 -1
  48. rasa/core/channels/inspector/dist/assets/{stateDiagram-59f0c015-acaf7513.js → stateDiagram-59f0c015-042b3137.js} +1 -1
  49. rasa/core/channels/inspector/dist/assets/{stateDiagram-v2-2b26beab-3ec2a235.js → stateDiagram-v2-2b26beab-2178c0f3.js} +1 -1
  50. rasa/core/channels/inspector/dist/assets/{styles-080da4f6-62730289.js → styles-080da4f6-23ffa4fc.js} +1 -1
  51. rasa/core/channels/inspector/dist/assets/{styles-3dcbcfbf-5284ee76.js → styles-3dcbcfbf-94f59763.js} +1 -1
  52. rasa/core/channels/inspector/dist/assets/{styles-9c745c82-642435e3.js → styles-9c745c82-78a6bebc.js} +1 -1
  53. rasa/core/channels/inspector/dist/assets/{svgDrawCommon-4835440b-b250a350.js → svgDrawCommon-4835440b-eae2a6f6.js} +1 -1
  54. rasa/core/channels/inspector/dist/assets/{timeline-definition-5b62e21b-c2b147ed.js → timeline-definition-5b62e21b-5c968d92.js} +1 -1
  55. rasa/core/channels/inspector/dist/assets/{xychartDiagram-2b33534f-f92cfea9.js → xychartDiagram-2b33534f-fd3db0d5.js} +1 -1
  56. rasa/core/channels/inspector/dist/index.html +1 -1
  57. rasa/core/channels/inspector/src/App.tsx +1 -1
  58. rasa/core/channels/inspector/src/helpers/audiostream.ts +77 -16
  59. rasa/core/channels/socketio.py +2 -1
  60. rasa/core/channels/telegram.py +1 -1
  61. rasa/core/channels/twilio.py +1 -1
  62. rasa/core/channels/voice_ready/jambonz.py +2 -2
  63. rasa/core/channels/voice_stream/asr/asr_event.py +5 -0
  64. rasa/core/channels/voice_stream/asr/azure.py +122 -0
  65. rasa/core/channels/voice_stream/asr/deepgram.py +16 -6
  66. rasa/core/channels/voice_stream/audio_bytes.py +1 -0
  67. rasa/core/channels/voice_stream/browser_audio.py +31 -8
  68. rasa/core/channels/voice_stream/call_state.py +23 -0
  69. rasa/core/channels/voice_stream/tts/azure.py +6 -2
  70. rasa/core/channels/voice_stream/tts/cartesia.py +10 -6
  71. rasa/core/channels/voice_stream/tts/tts_engine.py +1 -0
  72. rasa/core/channels/voice_stream/twilio_media_streams.py +27 -18
  73. rasa/core/channels/voice_stream/util.py +4 -4
  74. rasa/core/channels/voice_stream/voice_channel.py +177 -39
  75. rasa/core/featurizers/single_state_featurizer.py +22 -1
  76. rasa/core/featurizers/tracker_featurizers.py +115 -18
  77. rasa/core/nlg/contextual_response_rephraser.py +16 -22
  78. rasa/core/persistor.py +86 -39
  79. rasa/core/policies/enterprise_search_policy.py +159 -60
  80. rasa/core/policies/flows/flow_executor.py +7 -4
  81. rasa/core/policies/intentless_policy.py +120 -22
  82. rasa/core/policies/ted_policy.py +58 -33
  83. rasa/core/policies/unexpected_intent_policy.py +15 -7
  84. rasa/core/processor.py +25 -0
  85. rasa/core/training/interactive.py +34 -35
  86. rasa/core/utils.py +8 -3
  87. rasa/dialogue_understanding/coexistence/llm_based_router.py +58 -16
  88. rasa/dialogue_understanding/commands/change_flow_command.py +6 -0
  89. rasa/dialogue_understanding/commands/user_silence_command.py +59 -0
  90. rasa/dialogue_understanding/commands/utils.py +5 -0
  91. rasa/dialogue_understanding/generator/constants.py +4 -0
  92. rasa/dialogue_understanding/generator/flow_retrieval.py +65 -3
  93. rasa/dialogue_understanding/generator/llm_based_command_generator.py +68 -26
  94. rasa/dialogue_understanding/generator/multi_step/multi_step_llm_command_generator.py +57 -8
  95. rasa/dialogue_understanding/generator/single_step/single_step_llm_command_generator.py +64 -7
  96. rasa/dialogue_understanding/patterns/default_flows_for_patterns.yml +39 -0
  97. rasa/dialogue_understanding/patterns/user_silence.py +37 -0
  98. rasa/e2e_test/e2e_test_runner.py +4 -2
  99. rasa/e2e_test/utils/io.py +1 -1
  100. rasa/engine/validation.py +297 -7
  101. rasa/model_manager/config.py +15 -3
  102. rasa/model_manager/model_api.py +15 -7
  103. rasa/model_manager/runner_service.py +8 -6
  104. rasa/model_manager/socket_bridge.py +6 -3
  105. rasa/model_manager/trainer_service.py +7 -5
  106. rasa/model_manager/utils.py +28 -7
  107. rasa/model_service.py +6 -2
  108. rasa/model_training.py +2 -0
  109. rasa/nlu/classifiers/diet_classifier.py +38 -25
  110. rasa/nlu/classifiers/logistic_regression_classifier.py +22 -9
  111. rasa/nlu/classifiers/sklearn_intent_classifier.py +37 -16
  112. rasa/nlu/extractors/crf_entity_extractor.py +93 -50
  113. rasa/nlu/featurizers/sparse_featurizer/count_vectors_featurizer.py +45 -16
  114. rasa/nlu/featurizers/sparse_featurizer/lexical_syntactic_featurizer.py +52 -17
  115. rasa/nlu/featurizers/sparse_featurizer/regex_featurizer.py +5 -3
  116. rasa/shared/constants.py +36 -3
  117. rasa/shared/core/constants.py +7 -0
  118. rasa/shared/core/domain.py +26 -0
  119. rasa/shared/core/flows/flow.py +5 -0
  120. rasa/shared/core/flows/flows_yaml_schema.json +10 -0
  121. rasa/shared/core/flows/utils.py +39 -0
  122. rasa/shared/core/flows/validation.py +96 -0
  123. rasa/shared/core/slots.py +5 -0
  124. rasa/shared/nlu/training_data/features.py +120 -2
  125. rasa/shared/providers/_configs/azure_openai_client_config.py +5 -3
  126. rasa/shared/providers/_configs/litellm_router_client_config.py +200 -0
  127. rasa/shared/providers/_configs/model_group_config.py +167 -0
  128. rasa/shared/providers/_configs/openai_client_config.py +1 -1
  129. rasa/shared/providers/_configs/rasa_llm_client_config.py +73 -0
  130. rasa/shared/providers/_configs/self_hosted_llm_client_config.py +1 -0
  131. rasa/shared/providers/_configs/utils.py +16 -0
  132. rasa/shared/providers/embedding/_base_litellm_embedding_client.py +12 -15
  133. rasa/shared/providers/embedding/azure_openai_embedding_client.py +54 -21
  134. rasa/shared/providers/embedding/litellm_router_embedding_client.py +135 -0
  135. rasa/shared/providers/llm/_base_litellm_client.py +31 -30
  136. rasa/shared/providers/llm/azure_openai_llm_client.py +50 -29
  137. rasa/shared/providers/llm/litellm_router_llm_client.py +127 -0
  138. rasa/shared/providers/llm/rasa_llm_client.py +112 -0
  139. rasa/shared/providers/llm/self_hosted_llm_client.py +1 -1
  140. rasa/shared/providers/mappings.py +19 -0
  141. rasa/shared/providers/router/__init__.py +0 -0
  142. rasa/shared/providers/router/_base_litellm_router_client.py +149 -0
  143. rasa/shared/providers/router/router_client.py +73 -0
  144. rasa/shared/utils/common.py +8 -0
  145. rasa/shared/utils/health_check.py +533 -0
  146. rasa/shared/utils/io.py +28 -6
  147. rasa/shared/utils/llm.py +350 -46
  148. rasa/shared/utils/yaml.py +11 -13
  149. rasa/studio/upload.py +64 -20
  150. rasa/telemetry.py +80 -17
  151. rasa/tracing/instrumentation/attribute_extractors.py +74 -17
  152. rasa/utils/io.py +0 -66
  153. rasa/utils/log_utils.py +9 -2
  154. rasa/utils/tensorflow/feature_array.py +366 -0
  155. rasa/utils/tensorflow/model_data.py +2 -193
  156. rasa/validator.py +70 -0
  157. rasa/version.py +1 -1
  158. {rasa_pro-3.11.0a4.dev3.dist-info → rasa_pro-3.11.0rc1.dist-info}/METADATA +10 -10
  159. {rasa_pro-3.11.0a4.dev3.dist-info → rasa_pro-3.11.0rc1.dist-info}/RECORD +162 -146
  160. rasa/core/channels/inspector/dist/assets/flowDiagram-v2-855bc5b3-587d82d8.js +0 -1
  161. {rasa_pro-3.11.0a4.dev3.dist-info → rasa_pro-3.11.0rc1.dist-info}/NOTICE +0 -0
  162. {rasa_pro-3.11.0a4.dev3.dist-info → rasa_pro-3.11.0rc1.dist-info}/WHEEL +0 -0
  163. {rasa_pro-3.11.0a4.dev3.dist-info → rasa_pro-3.11.0rc1.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,366 @@
1
+ from typing import Dict, Any, List, Tuple, Optional, Union
2
+
3
+ from safetensors.numpy import save_file
4
+ import numpy as np
5
+ from safetensors.numpy import load_file
6
+ import scipy.sparse
7
+
8
+ import rasa.shared.utils.io
9
+
10
+
11
+ def _recursive_serialize(
12
+ array: Any, prefix: str, data_dict: Dict[str, Any], metadata: List[Dict[str, Any]]
13
+ ) -> None:
14
+ """Recursively serialize arrays and matrices for high dimensional data."""
15
+ if isinstance(array, np.ndarray) and array.ndim <= 2:
16
+ data_key = f"{prefix}_array"
17
+ data_dict[data_key] = array
18
+ metadata.append({"type": "dense", "key": data_key, "shape": array.shape})
19
+
20
+ elif isinstance(array, list) and all([isinstance(v, float) for v in array]):
21
+ data_key = f"{prefix}_list"
22
+ data_dict[data_key] = np.array(array, dtype=np.float32)
23
+ metadata.append({"type": "list", "key": data_key})
24
+
25
+ elif isinstance(array, list) and all([isinstance(v, int) for v in array]):
26
+ data_key = f"{prefix}_list"
27
+ data_dict[data_key] = np.array(array, dtype=np.int64)
28
+ metadata.append({"type": "list", "key": data_key})
29
+
30
+ elif isinstance(array, scipy.sparse.spmatrix):
31
+ data_key_data = f"{prefix}_data"
32
+ data_key_row = f"{prefix}_row"
33
+ data_key_col = f"{prefix}_col"
34
+ array = array.tocoo()
35
+ data_dict.update(
36
+ {
37
+ data_key_data: array.data,
38
+ data_key_row: array.row,
39
+ data_key_col: array.col,
40
+ }
41
+ )
42
+ metadata.append({"type": "sparse", "key": prefix, "shape": array.shape})
43
+
44
+ elif isinstance(array, list) or isinstance(array, np.ndarray):
45
+ group_metadata = {"type": "group", "subcomponents": []}
46
+ for idx, item in enumerate(array):
47
+ new_prefix = f"{prefix}_{idx}"
48
+ _recursive_serialize(
49
+ item, new_prefix, data_dict, group_metadata["subcomponents"]
50
+ )
51
+ metadata.append(group_metadata)
52
+
53
+
54
+ def _serialize_nested_data(
55
+ nested_data: Dict[str, Dict[str, List["FeatureArray"]]],
56
+ prefix: str,
57
+ data_dict: Dict[str, np.ndarray],
58
+ metadata: List[Dict[str, Union[str, List]]],
59
+ ) -> None:
60
+ """Handle serialization across dictionary and list levels."""
61
+ for outer_key, inner_dict in nested_data.items():
62
+ inner_metadata = {"key": outer_key, "components": []}
63
+
64
+ for inner_key, feature_arrays in inner_dict.items():
65
+ array_metadata = {
66
+ "key": inner_key,
67
+ "number_of_dimensions": feature_arrays[0].number_of_dimensions,
68
+ "features": [],
69
+ }
70
+
71
+ for idx, feature_array in enumerate(feature_arrays):
72
+ feature_prefix = f"{prefix}_{outer_key}_{inner_key}_{idx}"
73
+ _recursive_serialize(
74
+ feature_array.tolist(),
75
+ feature_prefix,
76
+ data_dict,
77
+ array_metadata["features"],
78
+ )
79
+
80
+ inner_metadata["components"].append(array_metadata) # type:ignore[attr-defined]
81
+
82
+ metadata.append(inner_metadata)
83
+
84
+
85
+ def serialize_nested_feature_arrays(
86
+ nested_feature_array: Dict[str, Dict[str, List["FeatureArray"]]],
87
+ data_filename: str,
88
+ metadata_filename: str,
89
+ ) -> None:
90
+ data_dict: Dict[str, np.ndarray] = {}
91
+ metadata: List[Dict[str, Union[str, List]]] = []
92
+
93
+ _serialize_nested_data(nested_feature_array, "component", data_dict, metadata)
94
+
95
+ # Save serialized data and metadata
96
+ save_file(data_dict, data_filename)
97
+ rasa.shared.utils.io.dump_obj_as_json_to_file(metadata_filename, metadata)
98
+
99
+
100
+ def _recursive_deserialize(
101
+ metadata: List[Dict[str, Any]], data: Dict[str, Any]
102
+ ) -> List[Any]:
103
+ """Recursively deserialize arrays and matrices for high dimensional data."""
104
+ result = []
105
+
106
+ for item in metadata:
107
+ if item["type"] == "dense":
108
+ key = item["key"]
109
+ array = np.asarray(data[key]).reshape(item["shape"])
110
+ result.append(array)
111
+
112
+ elif item["type"] == "list":
113
+ key = item["key"]
114
+ result.append(list(data[key]))
115
+
116
+ elif item["type"] == "sparse":
117
+ data_vals = data[f"{item['key']}_data"]
118
+ row_vals = data[f"{item['key']}_row"]
119
+ col_vals = data[f"{item['key']}_col"]
120
+ sparse_matrix = scipy.sparse.coo_matrix(
121
+ (data_vals, (row_vals, col_vals)), shape=item["shape"]
122
+ )
123
+ result.append(sparse_matrix)
124
+ elif item["type"] == "group":
125
+ sublist = _recursive_deserialize(item["subcomponents"], data)
126
+ result.append(sublist)
127
+
128
+ return result
129
+
130
+
131
+ def _deserialize_nested_data(
132
+ metadata: List[Dict[str, Any]], data_dict: Dict[str, Any]
133
+ ) -> Dict[str, Dict[str, List["FeatureArray"]]]:
134
+ """Handle deserialization across all dictionary and list levels."""
135
+ result: Dict[str, Dict[str, List["FeatureArray"]]] = {}
136
+
137
+ for outer_item in metadata:
138
+ outer_key = outer_item["key"]
139
+ result[outer_key] = {}
140
+
141
+ for inner_item in outer_item["components"]:
142
+ inner_key = inner_item["key"]
143
+ feature_arrays = []
144
+
145
+ # Reconstruct the list of FeatureArrays
146
+ for feature_item in inner_item["features"]:
147
+ # Reconstruct the list of FeatureArrays
148
+ feature_array_data = _recursive_deserialize([feature_item], data_dict)
149
+ # Prepare the input for the FeatureArray;
150
+ # ensure it is np.ndarray compatible
151
+ input_array = np.array(feature_array_data[0], dtype=object)
152
+ feature_array = FeatureArray(
153
+ input_array, inner_item["number_of_dimensions"]
154
+ )
155
+ feature_arrays.append(feature_array)
156
+
157
+ result[outer_key][inner_key] = feature_arrays
158
+
159
+ return result
160
+
161
+
162
+ def deserialize_nested_feature_arrays(
163
+ data_filename: str, metadata_filename: str
164
+ ) -> Dict[str, Dict[str, List["FeatureArray"]]]:
165
+ metadata = rasa.shared.utils.io.read_json_file(metadata_filename)
166
+ data_dict = load_file(data_filename)
167
+
168
+ return _deserialize_nested_data(metadata, data_dict)
169
+
170
+
171
+ class FeatureArray(np.ndarray):
172
+ """Stores any kind of features ready to be used by a RasaModel.
173
+
174
+ Next to the input numpy array of features, it also received the number of
175
+ dimensions of the features.
176
+ As our features can have 1 to 4 dimensions we might have different number of numpy
177
+ arrays stacked. The number of dimensions helps us to figure out how to handle this
178
+ particular feature array. Also, it is automatically determined whether the feature
179
+ array is sparse or not and the number of units is determined as well.
180
+
181
+ Subclassing np.array: https://numpy.org/doc/stable/user/basics.subclassing.html
182
+ """
183
+
184
+ def __new__(
185
+ cls, input_array: np.ndarray, number_of_dimensions: int
186
+ ) -> "FeatureArray":
187
+ """Create and return a new object. See help(type) for accurate signature."""
188
+ FeatureArray._validate_number_of_dimensions(number_of_dimensions, input_array)
189
+
190
+ feature_array = np.asarray(input_array).view(cls)
191
+
192
+ if number_of_dimensions <= 2:
193
+ feature_array.units = input_array.shape[-1]
194
+ feature_array.is_sparse = isinstance(input_array[0], scipy.sparse.spmatrix)
195
+ elif number_of_dimensions == 3:
196
+ feature_array.units = input_array[0].shape[-1]
197
+ feature_array.is_sparse = isinstance(input_array[0], scipy.sparse.spmatrix)
198
+ elif number_of_dimensions == 4:
199
+ feature_array.units = input_array[0][0].shape[-1]
200
+ feature_array.is_sparse = isinstance(
201
+ input_array[0][0], scipy.sparse.spmatrix
202
+ )
203
+ else:
204
+ raise ValueError(
205
+ f"Number of dimensions '{number_of_dimensions}' currently not "
206
+ f"supported."
207
+ )
208
+
209
+ feature_array.number_of_dimensions = number_of_dimensions
210
+
211
+ return feature_array
212
+
213
+ def __init__(
214
+ self, input_array: Any, number_of_dimensions: int, **kwargs: Any
215
+ ) -> None:
216
+ """Initialize. FeatureArray.
217
+
218
+ Needed in order to avoid 'Invalid keyword argument number_of_dimensions
219
+ to function FeatureArray.__init__ '
220
+ Args:
221
+ input_array: the array that contains features
222
+ number_of_dimensions: number of dimensions in input_array
223
+ """
224
+ super().__init__(**kwargs)
225
+ self.number_of_dimensions = number_of_dimensions
226
+
227
+ def __array_finalize__(self, obj: Optional[np.ndarray]) -> None:
228
+ """This method is called when the system allocates a new array from obj.
229
+
230
+ Args:
231
+ obj: A subclass (subtype) of ndarray.
232
+ """
233
+ if obj is None:
234
+ return
235
+
236
+ self.units = getattr(obj, "units", None)
237
+ self.number_of_dimensions = getattr(obj, "number_of_dimensions", None) # type: ignore[assignment]
238
+ self.is_sparse = getattr(obj, "is_sparse", None)
239
+
240
+ default_attributes = {
241
+ "units": self.units,
242
+ "number_of_dimensions": self.number_of_dimensions,
243
+ "is_spare": self.is_sparse,
244
+ }
245
+ self.__dict__.update(default_attributes)
246
+
247
+ # pytype: disable=attribute-error
248
+ def __array_ufunc__(
249
+ self, ufunc: Any, method: str, *inputs: Any, **kwargs: Any
250
+ ) -> Any:
251
+ """Overwrite this method as we are subclassing numpy array.
252
+
253
+ Args:
254
+ ufunc: The ufunc object that was called.
255
+ method: A string indicating which Ufunc method was called
256
+ (one of "__call__", "reduce", "reduceat", "accumulate", "outer",
257
+ "inner").
258
+ *inputs: A tuple of the input arguments to the ufunc.
259
+ **kwargs: Any additional arguments
260
+
261
+ Returns:
262
+ The result of the operation.
263
+ """
264
+ f = {
265
+ "reduce": ufunc.reduce,
266
+ "accumulate": ufunc.accumulate,
267
+ "reduceat": ufunc.reduceat,
268
+ "outer": ufunc.outer,
269
+ "at": ufunc.at,
270
+ "__call__": ufunc,
271
+ }
272
+ # convert the inputs to np.ndarray to prevent recursion, call the function,
273
+ # then cast it back as FeatureArray
274
+ output = FeatureArray(
275
+ f[method](*(i.view(np.ndarray) for i in inputs), **kwargs),
276
+ number_of_dimensions=kwargs["number_of_dimensions"],
277
+ )
278
+ output.__dict__ = self.__dict__ # carry forward attributes
279
+ return output
280
+
281
+ def __reduce__(self) -> Tuple[Any, Any, Any]:
282
+ """Needed in order to pickle this object.
283
+
284
+ Returns:
285
+ A tuple.
286
+ """
287
+ pickled_state = super(FeatureArray, self).__reduce__()
288
+ if isinstance(pickled_state, str):
289
+ raise TypeError("np array __reduce__ returned string instead of tuple.")
290
+ new_state = pickled_state[2] + (
291
+ self.number_of_dimensions,
292
+ self.is_sparse,
293
+ self.units,
294
+ )
295
+ return pickled_state[0], pickled_state[1], new_state
296
+
297
+ def __setstate__(self, state: Any, **kwargs: Any) -> None:
298
+ """Sets the state.
299
+
300
+ Args:
301
+ state: The state argument must be a sequence that contains the following
302
+ elements version, shape, dtype, isFortan, rawdata.
303
+ **kwargs: Any additional parameter
304
+ """
305
+ # Needed in order to load the object
306
+ self.number_of_dimensions = state[-3]
307
+ self.is_sparse = state[-2]
308
+ self.units = state[-1]
309
+ super(FeatureArray, self).__setstate__(state[0:-3], **kwargs)
310
+
311
+ # pytype: enable=attribute-error
312
+
313
+ @staticmethod
314
+ def _validate_number_of_dimensions(
315
+ number_of_dimensions: int, input_array: np.ndarray
316
+ ) -> None:
317
+ """Validates if the input array has given number of dimensions.
318
+
319
+ Args:
320
+ number_of_dimensions: number of dimensions
321
+ input_array: input array
322
+
323
+ Raises: ValueError in case the dimensions do not match
324
+ """
325
+ # when loading the feature arrays from disk, the shape represents
326
+ # the correct number of dimensions
327
+ if len(input_array.shape) == number_of_dimensions:
328
+ return
329
+
330
+ _sub_array = input_array
331
+ dim = 0
332
+ # Go number_of_dimensions into the given input_array
333
+ for i in range(1, number_of_dimensions + 1):
334
+ _sub_array = _sub_array[0]
335
+ if isinstance(_sub_array, scipy.sparse.spmatrix):
336
+ dim = i
337
+ break
338
+ if isinstance(_sub_array, np.ndarray) and _sub_array.shape[0] == 0:
339
+ # sequence dimension is 0, we are dealing with "fake" features
340
+ dim = i
341
+ break
342
+
343
+ # If the resulting sub_array is sparse, the remaining number of dimensions
344
+ # should be at least 2
345
+ if isinstance(_sub_array, scipy.sparse.spmatrix):
346
+ if dim > 2:
347
+ raise ValueError(
348
+ f"Given number of dimensions '{number_of_dimensions}' does not "
349
+ f"match dimensions of given input array: {input_array}."
350
+ )
351
+ elif isinstance(_sub_array, np.ndarray) and _sub_array.shape[0] == 0:
352
+ # sequence dimension is 0, we are dealing with "fake" features,
353
+ # but they should be of dim 2
354
+ if dim > 2:
355
+ raise ValueError(
356
+ f"Given number of dimensions '{number_of_dimensions}' does not "
357
+ f"match dimensions of given input array: {input_array}."
358
+ )
359
+ # If the resulting sub_array is dense, the sub_array should be a single number
360
+ elif not np.issubdtype(type(_sub_array), np.integer) and not isinstance(
361
+ _sub_array, (np.float32, np.float64)
362
+ ):
363
+ raise ValueError(
364
+ f"Given number of dimensions '{number_of_dimensions}' does not match "
365
+ f"dimensions of given input array: {input_array}."
366
+ )
@@ -20,6 +20,8 @@ import numpy as np
20
20
  import scipy.sparse
21
21
  from sklearn.model_selection import train_test_split
22
22
 
23
+ from rasa.utils.tensorflow.feature_array import FeatureArray
24
+
23
25
  logger = logging.getLogger(__name__)
24
26
 
25
27
 
@@ -37,199 +39,6 @@ def ragged_array_to_ndarray(ragged_array: Iterable[np.ndarray]) -> np.ndarray:
37
39
  return np.array(ragged_array, dtype=object)
38
40
 
39
41
 
40
- class FeatureArray(np.ndarray):
41
- """Stores any kind of features ready to be used by a RasaModel.
42
-
43
- Next to the input numpy array of features, it also received the number of
44
- dimensions of the features.
45
- As our features can have 1 to 4 dimensions we might have different number of numpy
46
- arrays stacked. The number of dimensions helps us to figure out how to handle this
47
- particular feature array. Also, it is automatically determined whether the feature
48
- array is sparse or not and the number of units is determined as well.
49
-
50
- Subclassing np.array: https://numpy.org/doc/stable/user/basics.subclassing.html
51
- """
52
-
53
- def __new__(
54
- cls, input_array: np.ndarray, number_of_dimensions: int
55
- ) -> "FeatureArray":
56
- """Create and return a new object. See help(type) for accurate signature."""
57
- FeatureArray._validate_number_of_dimensions(number_of_dimensions, input_array)
58
-
59
- feature_array = np.asarray(input_array).view(cls)
60
-
61
- if number_of_dimensions <= 2:
62
- feature_array.units = input_array.shape[-1]
63
- feature_array.is_sparse = isinstance(input_array[0], scipy.sparse.spmatrix)
64
- elif number_of_dimensions == 3:
65
- feature_array.units = input_array[0].shape[-1]
66
- feature_array.is_sparse = isinstance(input_array[0], scipy.sparse.spmatrix)
67
- elif number_of_dimensions == 4:
68
- feature_array.units = input_array[0][0].shape[-1]
69
- feature_array.is_sparse = isinstance(
70
- input_array[0][0], scipy.sparse.spmatrix
71
- )
72
- else:
73
- raise ValueError(
74
- f"Number of dimensions '{number_of_dimensions}' currently not "
75
- f"supported."
76
- )
77
-
78
- feature_array.number_of_dimensions = number_of_dimensions
79
-
80
- return feature_array
81
-
82
- def __init__(
83
- self, input_array: Any, number_of_dimensions: int, **kwargs: Any
84
- ) -> None:
85
- """Initialize. FeatureArray.
86
-
87
- Needed in order to avoid 'Invalid keyword argument number_of_dimensions
88
- to function FeatureArray.__init__ '
89
- Args:
90
- input_array: the array that contains features
91
- number_of_dimensions: number of dimensions in input_array
92
- """
93
- super().__init__(**kwargs)
94
- self.number_of_dimensions = number_of_dimensions
95
-
96
- def __array_finalize__(self, obj: Optional[np.ndarray]) -> None:
97
- """This method is called when the system allocates a new array from obj.
98
-
99
- Args:
100
- obj: A subclass (subtype) of ndarray.
101
- """
102
- if obj is None:
103
- return
104
-
105
- self.units = getattr(obj, "units", None)
106
- self.number_of_dimensions = getattr(obj, "number_of_dimensions", None) # type: ignore[assignment]
107
- self.is_sparse = getattr(obj, "is_sparse", None)
108
-
109
- default_attributes = {
110
- "units": self.units,
111
- "number_of_dimensions": self.number_of_dimensions,
112
- "is_spare": self.is_sparse,
113
- }
114
- self.__dict__.update(default_attributes)
115
-
116
- # pytype: disable=attribute-error
117
- def __array_ufunc__(
118
- self, ufunc: Any, method: Text, *inputs: Any, **kwargs: Any
119
- ) -> Any:
120
- """Overwrite this method as we are subclassing numpy array.
121
-
122
- Args:
123
- ufunc: The ufunc object that was called.
124
- method: A string indicating which Ufunc method was called
125
- (one of "__call__", "reduce", "reduceat", "accumulate", "outer",
126
- "inner").
127
- *inputs: A tuple of the input arguments to the ufunc.
128
- **kwargs: Any additional arguments
129
-
130
- Returns:
131
- The result of the operation.
132
- """
133
- f = {
134
- "reduce": ufunc.reduce,
135
- "accumulate": ufunc.accumulate,
136
- "reduceat": ufunc.reduceat,
137
- "outer": ufunc.outer,
138
- "at": ufunc.at,
139
- "__call__": ufunc,
140
- }
141
- # convert the inputs to np.ndarray to prevent recursion, call the function,
142
- # then cast it back as FeatureArray
143
- output = FeatureArray(
144
- f[method](*(i.view(np.ndarray) for i in inputs), **kwargs),
145
- number_of_dimensions=kwargs["number_of_dimensions"],
146
- )
147
- output.__dict__ = self.__dict__ # carry forward attributes
148
- return output
149
-
150
- def __reduce__(self) -> Tuple[Any, Any, Any]:
151
- """Needed in order to pickle this object.
152
-
153
- Returns:
154
- A tuple.
155
- """
156
- pickled_state = super(FeatureArray, self).__reduce__()
157
- if isinstance(pickled_state, str):
158
- raise TypeError("np array __reduce__ returned string instead of tuple.")
159
- new_state = pickled_state[2] + (
160
- self.number_of_dimensions,
161
- self.is_sparse,
162
- self.units,
163
- )
164
- return pickled_state[0], pickled_state[1], new_state
165
-
166
- def __setstate__(self, state: Any, **kwargs: Any) -> None:
167
- """Sets the state.
168
-
169
- Args:
170
- state: The state argument must be a sequence that contains the following
171
- elements version, shape, dtype, isFortan, rawdata.
172
- **kwargs: Any additional parameter
173
- """
174
- # Needed in order to load the object
175
- self.number_of_dimensions = state[-3]
176
- self.is_sparse = state[-2]
177
- self.units = state[-1]
178
- super(FeatureArray, self).__setstate__(state[0:-3], **kwargs)
179
-
180
- # pytype: enable=attribute-error
181
-
182
- @staticmethod
183
- def _validate_number_of_dimensions(
184
- number_of_dimensions: int, input_array: np.ndarray
185
- ) -> None:
186
- """Validates if the the input array has given number of dimensions.
187
-
188
- Args:
189
- number_of_dimensions: number of dimensions
190
- input_array: input array
191
-
192
- Raises: ValueError in case the dimensions do not match
193
- """
194
- _sub_array = input_array
195
- dim = 0
196
- # Go number_of_dimensions into the given input_array
197
- for i in range(1, number_of_dimensions + 1):
198
- _sub_array = _sub_array[0]
199
- if isinstance(_sub_array, scipy.sparse.spmatrix):
200
- dim = i
201
- break
202
- if isinstance(_sub_array, np.ndarray) and _sub_array.shape[0] == 0:
203
- # sequence dimension is 0, we are dealing with "fake" features
204
- dim = i
205
- break
206
-
207
- # If the resulting sub_array is sparse, the remaining number of dimensions
208
- # should be at least 2
209
- if isinstance(_sub_array, scipy.sparse.spmatrix):
210
- if dim > 2:
211
- raise ValueError(
212
- f"Given number of dimensions '{number_of_dimensions}' does not "
213
- f"match dimensions of given input array: {input_array}."
214
- )
215
- elif isinstance(_sub_array, np.ndarray) and _sub_array.shape[0] == 0:
216
- # sequence dimension is 0, we are dealing with "fake" features,
217
- # but they should be of dim 2
218
- if dim > 2:
219
- raise ValueError(
220
- f"Given number of dimensions '{number_of_dimensions}' does not "
221
- f"match dimensions of given input array: {input_array}."
222
- )
223
- # If the resulting sub_array is dense, the sub_array should be a single number
224
- elif not np.issubdtype(type(_sub_array), np.integer) and not isinstance(
225
- _sub_array, (np.float32, np.float64)
226
- ):
227
- raise ValueError(
228
- f"Given number of dimensions '{number_of_dimensions}' does not match "
229
- f"dimensions of given input array: {input_array}."
230
- )
231
-
232
-
233
42
  class FeatureSignature(NamedTuple):
234
43
  """Signature of feature arrays.
235
44
 
rasa/validator.py CHANGED
@@ -22,6 +22,11 @@ from rasa.shared.core.flows.steps.collect import CollectInformationFlowStep
22
22
  from rasa.shared.core.flows.steps.action import ActionFlowStep
23
23
  from rasa.shared.core.flows.steps.link import LinkFlowStep
24
24
  from rasa.shared.core.flows import FlowsList
25
+ from rasa.shared.core.flows.utils import (
26
+ warn_deprecated_collect_step_config,
27
+ get_duplicate_slot_persistence_config_error_message,
28
+ get_invalid_slot_persistence_config_error_message,
29
+ )
25
30
  import rasa.shared.nlu.constants
26
31
  from rasa.shared.constants import (
27
32
  ASSISTANT_ID_DEFAULT_VALUE,
@@ -55,6 +60,7 @@ from rasa.shared.importers.importer import TrainingDataImporter
55
60
  from rasa.shared.nlu.constants import COMMANDS
56
61
  from rasa.shared.nlu.training_data.message import Message
57
62
  from rasa.shared.nlu.training_data.training_data import TrainingData
63
+
58
64
  import rasa.shared.utils.cli
59
65
  import rasa.shared.utils.io
60
66
 
@@ -1237,6 +1243,7 @@ class Validator:
1237
1243
  self.verify_flows_steps_against_domain(),
1238
1244
  self.verify_unique_flows(),
1239
1245
  self.verify_predicates(),
1246
+ self.verify_slot_persistence_configuration(),
1240
1247
  ]
1241
1248
 
1242
1249
  all_good = all(flow_validation_conditions)
@@ -1533,3 +1540,66 @@ class Validator:
1533
1540
  ),
1534
1541
  )
1535
1542
  return False
1543
+
1544
+ def verify_slot_persistence_configuration(self) -> bool:
1545
+ """Verifies the validity of slot persistence after flow ends configuration.
1546
+
1547
+ Only slots used in either a collect step or a set_slot step can be persisted and
1548
+ the configuration can either set at the flow level or the collect step level,
1549
+ but not both.
1550
+
1551
+ Returns:
1552
+ bool: True if all slot persistence configuration is valid, False otherwise.
1553
+
1554
+ Raises:
1555
+ DeprecationWarning: If reset_after_flow_ends is used in collect steps.
1556
+ """
1557
+ all_good = True
1558
+
1559
+ for flow in self.flows.underlying_flows:
1560
+ flow_id = flow.id
1561
+ persist_slots = flow.persisted_slots
1562
+ has_flow_level_persistence = True if persist_slots else False
1563
+ flow_slots = set()
1564
+
1565
+ for step in flow.steps_with_calls_resolved:
1566
+ if isinstance(step, SetSlotsFlowStep):
1567
+ flow_slots.update([slot["key"] for slot in step.slots])
1568
+
1569
+ elif isinstance(step, CollectInformationFlowStep):
1570
+ collect_step = step.collect
1571
+ flow_slots.add(collect_step)
1572
+ if not step.reset_after_flow_ends:
1573
+ warn_deprecated_collect_step_config(flow_id, collect_step)
1574
+
1575
+ if has_flow_level_persistence:
1576
+ structlogger.error(
1577
+ "validator.verify_slot_persistence_configuration.duplicate_config",
1578
+ flow=flow_id,
1579
+ collect_step=collect_step,
1580
+ event_info=get_duplicate_slot_persistence_config_error_message(
1581
+ flow_id, collect_step
1582
+ ),
1583
+ )
1584
+ all_good = False
1585
+
1586
+ if has_flow_level_persistence:
1587
+ if not self._is_persist_slots_valid(persist_slots, flow_slots, flow_id):
1588
+ all_good = False
1589
+ return all_good
1590
+
1591
+ def _is_persist_slots_valid(
1592
+ self, persist_slots: List[str], flow_slots: Set[str], flow_id: str
1593
+ ) -> bool:
1594
+ invalid_slots = set(persist_slots) - flow_slots
1595
+ is_valid = False if invalid_slots else True
1596
+
1597
+ if invalid_slots:
1598
+ structlogger.error(
1599
+ "validator.verify_slot_persistence_configuration.invalid_persist_slot",
1600
+ flow=flow_id,
1601
+ event_info=get_invalid_slot_persistence_config_error_message(
1602
+ flow_id, invalid_slots
1603
+ ),
1604
+ )
1605
+ return is_valid
rasa/version.py CHANGED
@@ -1,3 +1,3 @@
1
1
  # this file will automatically be changed,
2
2
  # do not add anything but the version number here!
3
- __version__ = "3.11.0a4.dev3"
3
+ __version__ = "3.11.0rc1"