rasa-pro 3.10.9.dev1__py3-none-any.whl → 3.10.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of rasa-pro might be problematic. Click here for more details.

Files changed (35) hide show
  1. rasa/cli/arguments/train.py +9 -3
  2. rasa/cli/train.py +40 -2
  3. rasa/cli/utils.py +7 -5
  4. rasa/constants.py +1 -1
  5. rasa/core/featurizers/single_state_featurizer.py +1 -22
  6. rasa/core/featurizers/tracker_featurizers.py +18 -115
  7. rasa/core/policies/ted_policy.py +33 -58
  8. rasa/core/policies/unexpected_intent_policy.py +7 -15
  9. rasa/dialogue_understanding/generator/multi_step/multi_step_llm_command_generator.py +20 -3
  10. rasa/dialogue_understanding/generator/single_step/single_step_llm_command_generator.py +29 -4
  11. rasa/e2e_test/e2e_test_runner.py +4 -2
  12. rasa/engine/storage/local_model_storage.py +41 -12
  13. rasa/model_training.py +10 -3
  14. rasa/nlu/classifiers/diet_classifier.py +25 -38
  15. rasa/nlu/classifiers/logistic_regression_classifier.py +9 -22
  16. rasa/nlu/classifiers/sklearn_intent_classifier.py +16 -37
  17. rasa/nlu/extractors/crf_entity_extractor.py +50 -93
  18. rasa/nlu/featurizers/sparse_featurizer/count_vectors_featurizer.py +16 -45
  19. rasa/nlu/featurizers/sparse_featurizer/lexical_syntactic_featurizer.py +17 -52
  20. rasa/nlu/featurizers/sparse_featurizer/regex_featurizer.py +3 -5
  21. rasa/nlu/persistor.py +37 -15
  22. rasa/shared/constants.py +4 -1
  23. rasa/shared/importers/importer.py +7 -8
  24. rasa/shared/nlu/training_data/features.py +2 -120
  25. rasa/shared/utils/io.py +0 -1
  26. rasa/utils/io.py +66 -0
  27. rasa/utils/tensorflow/model_data.py +193 -2
  28. rasa/version.py +1 -1
  29. {rasa_pro-3.10.9.dev1.dist-info → rasa_pro-3.10.11.dist-info}/METADATA +6 -6
  30. {rasa_pro-3.10.9.dev1.dist-info → rasa_pro-3.10.11.dist-info}/RECORD +33 -35
  31. {rasa_pro-3.10.9.dev1.dist-info → rasa_pro-3.10.11.dist-info}/WHEEL +1 -1
  32. rasa/shared/importers/remote_importer.py +0 -196
  33. rasa/utils/tensorflow/feature_array.py +0 -366
  34. {rasa_pro-3.10.9.dev1.dist-info → rasa_pro-3.10.11.dist-info}/NOTICE +0 -0
  35. {rasa_pro-3.10.9.dev1.dist-info → rasa_pro-3.10.11.dist-info}/entry_points.txt +0 -0
rasa/shared/constants.py CHANGED
@@ -111,7 +111,10 @@ CONFIG_KEYS_NLU = ["language", "pipeline"] + CONFIG_MANDATORY_COMMON_KEYS
111
111
  CONFIG_KEYS = CONFIG_KEYS_CORE + CONFIG_KEYS_NLU
112
112
  CONFIG_MANDATORY_KEYS_CORE: List[Text] = [] + CONFIG_MANDATORY_COMMON_KEYS
113
113
  CONFIG_MANDATORY_KEYS_NLU = ["language"] + CONFIG_MANDATORY_COMMON_KEYS
114
- CONFIG_MANDATORY_KEYS = CONFIG_MANDATORY_KEYS_CORE + CONFIG_MANDATORY_KEYS_NLU
114
+ # we need the list to contain unique values
115
+ CONFIG_MANDATORY_KEYS = list(
116
+ set(CONFIG_MANDATORY_KEYS_CORE + CONFIG_MANDATORY_KEYS_NLU)
117
+ )
115
118
 
116
119
  # Keys related to Forms (in the Domain)
117
120
  REQUIRED_SLOTS_KEY = "required_slots"
@@ -1,3 +1,5 @@
1
+ from __future__ import annotations
2
+
1
3
  import logging
2
4
  from abc import ABC, abstractmethod
3
5
  from functools import reduce
@@ -114,7 +116,7 @@ class TrainingDataImporter(ABC):
114
116
  domain_path: Optional[Text] = None,
115
117
  training_data_paths: Optional[List[Text]] = None,
116
118
  args: Optional[Dict[Text, Any]] = {},
117
- ) -> "TrainingDataImporter":
119
+ ) -> TrainingDataImporter:
118
120
  """Loads a `TrainingDataImporter` instance from a configuration file."""
119
121
  config = read_config_file(config_path)
120
122
  return TrainingDataImporter.load_from_dict(
@@ -127,7 +129,7 @@ class TrainingDataImporter(ABC):
127
129
  domain_path: Optional[Text] = None,
128
130
  training_data_paths: Optional[List[Text]] = None,
129
131
  args: Optional[Dict[Text, Any]] = {},
130
- ) -> "TrainingDataImporter":
132
+ ) -> TrainingDataImporter:
131
133
  """Loads core `TrainingDataImporter` instance.
132
134
 
133
135
  Instance loaded from configuration file will only read Core training data.
@@ -143,7 +145,7 @@ class TrainingDataImporter(ABC):
143
145
  domain_path: Optional[Text] = None,
144
146
  training_data_paths: Optional[List[Text]] = None,
145
147
  args: Optional[Dict[Text, Any]] = {},
146
- ) -> "TrainingDataImporter":
148
+ ) -> TrainingDataImporter:
147
149
  """Loads nlu `TrainingDataImporter` instance.
148
150
 
149
151
  Instance loaded from configuration file will only read NLU training data.
@@ -166,7 +168,7 @@ class TrainingDataImporter(ABC):
166
168
  domain_path: Optional[Text] = None,
167
169
  training_data_paths: Optional[List[Text]] = None,
168
170
  args: Optional[Dict[Text, Any]] = None,
169
- ) -> "TrainingDataImporter":
171
+ ) -> TrainingDataImporter:
170
172
  """Loads a `TrainingDataImporter` instance from a dictionary."""
171
173
  from rasa.shared.importers.rasa import RasaFileImporter
172
174
 
@@ -195,18 +197,15 @@ class TrainingDataImporter(ABC):
195
197
  domain_path: Optional[Text] = None,
196
198
  training_data_paths: Optional[List[Text]] = None,
197
199
  args: Optional[Dict[Text, Any]] = None,
198
- ) -> Optional["TrainingDataImporter"]:
200
+ ) -> Optional[TrainingDataImporter]:
199
201
  from rasa.shared.importers.multi_project import MultiProjectImporter
200
202
  from rasa.shared.importers.rasa import RasaFileImporter
201
- from rasa.shared.importers.remote_importer import RemoteTrainingDataImporter
202
203
 
203
204
  module_path = importer_config.pop("name", None)
204
205
  if module_path == RasaFileImporter.__name__:
205
206
  importer_class: Type[TrainingDataImporter] = RasaFileImporter
206
207
  elif module_path == MultiProjectImporter.__name__:
207
208
  importer_class = MultiProjectImporter
208
- elif module_path == RemoteTrainingDataImporter.__name__:
209
- importer_class = RemoteTrainingDataImporter
210
209
  else:
211
210
  try:
212
211
  importer_class = rasa.shared.utils.common.class_from_module_path(
@@ -1,133 +1,15 @@
1
1
  from __future__ import annotations
2
-
3
- import itertools
4
- from dataclasses import dataclass
5
2
  from typing import Iterable, Union, Text, Optional, List, Any, Tuple, Dict, Set
3
+ import itertools
6
4
 
7
5
  import numpy as np
8
6
  import scipy.sparse
9
- from safetensors.numpy import save_file, load_file
10
7
 
11
- import rasa.shared.nlu.training_data.util
12
8
  import rasa.shared.utils.io
9
+ import rasa.shared.nlu.training_data.util
13
10
  from rasa.shared.nlu.constants import FEATURE_TYPE_SEQUENCE, FEATURE_TYPE_SENTENCE
14
11
 
15
12
 
16
- @dataclass
17
- class FeatureMetadata:
18
- data_type: str
19
- attribute: str
20
- origin: Union[str, List[str]]
21
- is_sparse: bool
22
- shape: tuple
23
- safetensors_key: str
24
-
25
-
26
- def save_features(
27
- features_dict: Dict[Text, List[Features]], file_name: str
28
- ) -> Dict[str, Any]:
29
- """Save a dictionary of Features lists to disk using safetensors.
30
-
31
- Args:
32
- features_dict: Dictionary mapping strings to lists of Features objects
33
- file_name: File to save the features to
34
-
35
- Returns:
36
- The metadata to reconstruct the features.
37
- """
38
- # All tensors are stored in a single safetensors file
39
- tensors_to_save = {}
40
- # Metadata will be stored separately
41
- metadata = {}
42
-
43
- for key, features_list in features_dict.items():
44
- feature_metadata_list = []
45
-
46
- for idx, feature in enumerate(features_list):
47
- # Create a unique key for this tensor in the safetensors file
48
- safetensors_key = f"{key}_{idx}"
49
-
50
- # Convert sparse matrices to dense if needed
51
- if feature.is_sparse():
52
- # For sparse matrices, use the COO format
53
- coo = feature.features.tocoo() # type:ignore[union-attr]
54
- # Save data, row indices and col indices separately
55
- tensors_to_save[f"{safetensors_key}_data"] = coo.data
56
- tensors_to_save[f"{safetensors_key}_row"] = coo.row
57
- tensors_to_save[f"{safetensors_key}_col"] = coo.col
58
- else:
59
- tensors_to_save[safetensors_key] = feature.features
60
-
61
- # Store metadata
62
- metadata_item = FeatureMetadata(
63
- data_type=feature.type,
64
- attribute=feature.attribute,
65
- origin=feature.origin,
66
- is_sparse=feature.is_sparse(),
67
- shape=feature.features.shape,
68
- safetensors_key=safetensors_key,
69
- )
70
- feature_metadata_list.append(vars(metadata_item))
71
-
72
- metadata[key] = feature_metadata_list
73
-
74
- # Save tensors
75
- save_file(tensors_to_save, file_name)
76
-
77
- return metadata
78
-
79
-
80
- def load_features(
81
- filename: str, metadata: Dict[str, Any]
82
- ) -> Dict[Text, List[Features]]:
83
- """Load Features dictionary from disk.
84
-
85
- Args:
86
- filename: File name of the safetensors file.
87
- metadata: Metadata to reconstruct the features.
88
-
89
- Returns:
90
- Dictionary mapping strings to lists of Features objects
91
- """
92
- # Load tensors
93
- tensors = load_file(filename)
94
-
95
- # Reconstruct the features dictionary
96
- features_dict: Dict[Text, List[Features]] = {}
97
-
98
- for key, feature_metadata_list in metadata.items():
99
- features_list = []
100
-
101
- for meta in feature_metadata_list:
102
- safetensors_key = meta["safetensors_key"]
103
-
104
- if meta["is_sparse"]:
105
- # Reconstruct sparse matrix from COO format
106
- data = tensors[f"{safetensors_key}_data"]
107
- row = tensors[f"{safetensors_key}_row"]
108
- col = tensors[f"{safetensors_key}_col"]
109
-
110
- features_matrix = scipy.sparse.coo_matrix(
111
- (data, (row, col)), shape=tuple(meta["shape"])
112
- ).tocsr() # Convert back to CSR format
113
- else:
114
- features_matrix = tensors[safetensors_key]
115
-
116
- # Reconstruct Features object
117
- features = Features(
118
- features=features_matrix,
119
- feature_type=meta["data_type"],
120
- attribute=meta["attribute"],
121
- origin=meta["origin"],
122
- )
123
-
124
- features_list.append(features)
125
-
126
- features_dict[key] = features_list
127
-
128
- return features_dict
129
-
130
-
131
13
  class Features:
132
14
  """Stores the features produced by any featurizer."""
133
15
 
rasa/shared/utils/io.py CHANGED
@@ -13,7 +13,6 @@ from typing import Any, cast, Callable, Dict, List, Optional, Text, Type, TypeVa
13
13
  import warnings
14
14
  import random
15
15
  import string
16
-
17
16
  import portalocker
18
17
 
19
18
  from rasa.shared.constants import (
rasa/utils/io.py CHANGED
@@ -2,6 +2,7 @@ import asyncio
2
2
  import filecmp
3
3
  import logging
4
4
  import os
5
+ import pickle
5
6
  import tempfile
6
7
  import warnings
7
8
  import re
@@ -97,6 +98,29 @@ def enable_async_loop_debugging(
97
98
  return event_loop
98
99
 
99
100
 
101
+ def pickle_dump(filename: Union[Text, Path], obj: Any) -> None:
102
+ """Saves object to file.
103
+
104
+ Args:
105
+ filename: the filename to save the object to
106
+ obj: the object to store
107
+ """
108
+ with open(filename, "wb") as f:
109
+ pickle.dump(obj, f)
110
+
111
+
112
+ def pickle_load(filename: Union[Text, Path]) -> Any:
113
+ """Loads an object from a file.
114
+
115
+ Args:
116
+ filename: the filename to load the object from
117
+
118
+ Returns: the loaded object
119
+ """
120
+ with open(filename, "rb") as f:
121
+ return pickle.load(f)
122
+
123
+
100
124
  def create_temporary_file(data: Any, suffix: Text = "", mode: Text = "w+") -> Text:
101
125
  """Creates a tempfile.NamedTemporaryFile object for data."""
102
126
  encoding = None if "b" in mode else rasa.shared.utils.io.DEFAULT_ENCODING
@@ -167,6 +191,48 @@ def create_validator(
167
191
  return FunctionValidator
168
192
 
169
193
 
194
+ def json_unpickle(
195
+ file_name: Union[Text, Path], encode_non_string_keys: bool = False
196
+ ) -> Any:
197
+ """Unpickle an object from file using json.
198
+
199
+ Args:
200
+ file_name: the file to load the object from
201
+ encode_non_string_keys: If set to `True` then jsonpickle will encode non-string
202
+ dictionary keys instead of coercing them into strings via `repr()`.
203
+
204
+ Returns: the object
205
+ """
206
+ import jsonpickle.ext.numpy as jsonpickle_numpy
207
+ import jsonpickle
208
+
209
+ jsonpickle_numpy.register_handlers()
210
+
211
+ file_content = rasa.shared.utils.io.read_file(file_name)
212
+ return jsonpickle.loads(file_content, keys=encode_non_string_keys)
213
+
214
+
215
+ def json_pickle(
216
+ file_name: Union[Text, Path], obj: Any, encode_non_string_keys: bool = False
217
+ ) -> None:
218
+ """Pickle an object to a file using json.
219
+
220
+ Args:
221
+ file_name: the file to store the object to
222
+ obj: the object to store
223
+ encode_non_string_keys: If set to `True` then jsonpickle will encode non-string
224
+ dictionary keys instead of coercing them into strings via `repr()`.
225
+ """
226
+ import jsonpickle.ext.numpy as jsonpickle_numpy
227
+ import jsonpickle
228
+
229
+ jsonpickle_numpy.register_handlers()
230
+
231
+ rasa.shared.utils.io.write_text_file(
232
+ jsonpickle.dumps(obj, keys=encode_non_string_keys), file_name
233
+ )
234
+
235
+
170
236
  def get_emoji_regex() -> Pattern:
171
237
  """Returns regex to identify emojis."""
172
238
  return re.compile(
@@ -20,8 +20,6 @@ import numpy as np
20
20
  import scipy.sparse
21
21
  from sklearn.model_selection import train_test_split
22
22
 
23
- from rasa.utils.tensorflow.feature_array import FeatureArray
24
-
25
23
  logger = logging.getLogger(__name__)
26
24
 
27
25
 
@@ -39,6 +37,199 @@ def ragged_array_to_ndarray(ragged_array: Iterable[np.ndarray]) -> np.ndarray:
39
37
  return np.array(ragged_array, dtype=object)
40
38
 
41
39
 
40
+ class FeatureArray(np.ndarray):
41
+ """Stores any kind of features ready to be used by a RasaModel.
42
+
43
+ Next to the input numpy array of features, it also received the number of
44
+ dimensions of the features.
45
+ As our features can have 1 to 4 dimensions we might have different number of numpy
46
+ arrays stacked. The number of dimensions helps us to figure out how to handle this
47
+ particular feature array. Also, it is automatically determined whether the feature
48
+ array is sparse or not and the number of units is determined as well.
49
+
50
+ Subclassing np.array: https://numpy.org/doc/stable/user/basics.subclassing.html
51
+ """
52
+
53
+ def __new__(
54
+ cls, input_array: np.ndarray, number_of_dimensions: int
55
+ ) -> "FeatureArray":
56
+ """Create and return a new object. See help(type) for accurate signature."""
57
+ FeatureArray._validate_number_of_dimensions(number_of_dimensions, input_array)
58
+
59
+ feature_array = np.asarray(input_array).view(cls)
60
+
61
+ if number_of_dimensions <= 2:
62
+ feature_array.units = input_array.shape[-1]
63
+ feature_array.is_sparse = isinstance(input_array[0], scipy.sparse.spmatrix)
64
+ elif number_of_dimensions == 3:
65
+ feature_array.units = input_array[0].shape[-1]
66
+ feature_array.is_sparse = isinstance(input_array[0], scipy.sparse.spmatrix)
67
+ elif number_of_dimensions == 4:
68
+ feature_array.units = input_array[0][0].shape[-1]
69
+ feature_array.is_sparse = isinstance(
70
+ input_array[0][0], scipy.sparse.spmatrix
71
+ )
72
+ else:
73
+ raise ValueError(
74
+ f"Number of dimensions '{number_of_dimensions}' currently not "
75
+ f"supported."
76
+ )
77
+
78
+ feature_array.number_of_dimensions = number_of_dimensions
79
+
80
+ return feature_array
81
+
82
+ def __init__(
83
+ self, input_array: Any, number_of_dimensions: int, **kwargs: Any
84
+ ) -> None:
85
+ """Initialize. FeatureArray.
86
+
87
+ Needed in order to avoid 'Invalid keyword argument number_of_dimensions
88
+ to function FeatureArray.__init__ '
89
+ Args:
90
+ input_array: the array that contains features
91
+ number_of_dimensions: number of dimensions in input_array
92
+ """
93
+ super().__init__(**kwargs)
94
+ self.number_of_dimensions = number_of_dimensions
95
+
96
+ def __array_finalize__(self, obj: Optional[np.ndarray]) -> None:
97
+ """This method is called when the system allocates a new array from obj.
98
+
99
+ Args:
100
+ obj: A subclass (subtype) of ndarray.
101
+ """
102
+ if obj is None:
103
+ return
104
+
105
+ self.units = getattr(obj, "units", None)
106
+ self.number_of_dimensions = getattr(obj, "number_of_dimensions", None) # type: ignore[assignment]
107
+ self.is_sparse = getattr(obj, "is_sparse", None)
108
+
109
+ default_attributes = {
110
+ "units": self.units,
111
+ "number_of_dimensions": self.number_of_dimensions,
112
+ "is_spare": self.is_sparse,
113
+ }
114
+ self.__dict__.update(default_attributes)
115
+
116
+ # pytype: disable=attribute-error
117
+ def __array_ufunc__(
118
+ self, ufunc: Any, method: Text, *inputs: Any, **kwargs: Any
119
+ ) -> Any:
120
+ """Overwrite this method as we are subclassing numpy array.
121
+
122
+ Args:
123
+ ufunc: The ufunc object that was called.
124
+ method: A string indicating which Ufunc method was called
125
+ (one of "__call__", "reduce", "reduceat", "accumulate", "outer",
126
+ "inner").
127
+ *inputs: A tuple of the input arguments to the ufunc.
128
+ **kwargs: Any additional arguments
129
+
130
+ Returns:
131
+ The result of the operation.
132
+ """
133
+ f = {
134
+ "reduce": ufunc.reduce,
135
+ "accumulate": ufunc.accumulate,
136
+ "reduceat": ufunc.reduceat,
137
+ "outer": ufunc.outer,
138
+ "at": ufunc.at,
139
+ "__call__": ufunc,
140
+ }
141
+ # convert the inputs to np.ndarray to prevent recursion, call the function,
142
+ # then cast it back as FeatureArray
143
+ output = FeatureArray(
144
+ f[method](*(i.view(np.ndarray) for i in inputs), **kwargs),
145
+ number_of_dimensions=kwargs["number_of_dimensions"],
146
+ )
147
+ output.__dict__ = self.__dict__ # carry forward attributes
148
+ return output
149
+
150
+ def __reduce__(self) -> Tuple[Any, Any, Any]:
151
+ """Needed in order to pickle this object.
152
+
153
+ Returns:
154
+ A tuple.
155
+ """
156
+ pickled_state = super(FeatureArray, self).__reduce__()
157
+ if isinstance(pickled_state, str):
158
+ raise TypeError("np array __reduce__ returned string instead of tuple.")
159
+ new_state = pickled_state[2] + (
160
+ self.number_of_dimensions,
161
+ self.is_sparse,
162
+ self.units,
163
+ )
164
+ return pickled_state[0], pickled_state[1], new_state
165
+
166
+ def __setstate__(self, state: Any, **kwargs: Any) -> None:
167
+ """Sets the state.
168
+
169
+ Args:
170
+ state: The state argument must be a sequence that contains the following
171
+ elements version, shape, dtype, isFortan, rawdata.
172
+ **kwargs: Any additional parameter
173
+ """
174
+ # Needed in order to load the object
175
+ self.number_of_dimensions = state[-3]
176
+ self.is_sparse = state[-2]
177
+ self.units = state[-1]
178
+ super(FeatureArray, self).__setstate__(state[0:-3], **kwargs)
179
+
180
+ # pytype: enable=attribute-error
181
+
182
+ @staticmethod
183
+ def _validate_number_of_dimensions(
184
+ number_of_dimensions: int, input_array: np.ndarray
185
+ ) -> None:
186
+ """Validates if the the input array has given number of dimensions.
187
+
188
+ Args:
189
+ number_of_dimensions: number of dimensions
190
+ input_array: input array
191
+
192
+ Raises: ValueError in case the dimensions do not match
193
+ """
194
+ _sub_array = input_array
195
+ dim = 0
196
+ # Go number_of_dimensions into the given input_array
197
+ for i in range(1, number_of_dimensions + 1):
198
+ _sub_array = _sub_array[0]
199
+ if isinstance(_sub_array, scipy.sparse.spmatrix):
200
+ dim = i
201
+ break
202
+ if isinstance(_sub_array, np.ndarray) and _sub_array.shape[0] == 0:
203
+ # sequence dimension is 0, we are dealing with "fake" features
204
+ dim = i
205
+ break
206
+
207
+ # If the resulting sub_array is sparse, the remaining number of dimensions
208
+ # should be at least 2
209
+ if isinstance(_sub_array, scipy.sparse.spmatrix):
210
+ if dim > 2:
211
+ raise ValueError(
212
+ f"Given number of dimensions '{number_of_dimensions}' does not "
213
+ f"match dimensions of given input array: {input_array}."
214
+ )
215
+ elif isinstance(_sub_array, np.ndarray) and _sub_array.shape[0] == 0:
216
+ # sequence dimension is 0, we are dealing with "fake" features,
217
+ # but they should be of dim 2
218
+ if dim > 2:
219
+ raise ValueError(
220
+ f"Given number of dimensions '{number_of_dimensions}' does not "
221
+ f"match dimensions of given input array: {input_array}."
222
+ )
223
+ # If the resulting sub_array is dense, the sub_array should be a single number
224
+ elif not np.issubdtype(type(_sub_array), np.integer) and not isinstance(
225
+ _sub_array, (np.float32, np.float64)
226
+ ):
227
+ raise ValueError(
228
+ f"Given number of dimensions '{number_of_dimensions}' does not match "
229
+ f"dimensions of given input array: {input_array}."
230
+ )
231
+
232
+
42
233
  class FeatureSignature(NamedTuple):
43
234
  """Signature of feature arrays.
44
235
 
rasa/version.py CHANGED
@@ -1,3 +1,3 @@
1
1
  # this file will automatically be changed,
2
2
  # do not add anything but the version number here!
3
- __version__ = "3.10.9.dev1"
3
+ __version__ = "3.10.11"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: rasa-pro
3
- Version: 3.10.9.dev1
3
+ Version: 3.10.11
4
4
  Summary: State-of-the-art open-core Conversational AI framework for Enterprises that natively leverages generative AI for effortless assistant development.
5
5
  Home-page: https://rasa.com
6
6
  Keywords: nlp,machine-learning,machine-learning-library,bot,bots,botkit,rasa conversational-agents,conversational-ai,chatbot,chatbot-framework,bot-framework
@@ -34,6 +34,7 @@ Requires-Dist: attrs (>=23.1,<23.2)
34
34
  Requires-Dist: azure-storage-blob (>=12.16.0,<12.17.0)
35
35
  Requires-Dist: boto3 (>=1.35.5,<1.36.0)
36
36
  Requires-Dist: certifi (>=2024.07.04)
37
+ Requires-Dist: cloudpickle (>=2.2.1,<3.1)
37
38
  Requires-Dist: colorama (>=0.4.6,<0.5.0) ; sys_platform == "win32"
38
39
  Requires-Dist: colorclass (>=2.2,<2.3)
39
40
  Requires-Dist: coloredlogs (>=15,<16)
@@ -56,19 +57,20 @@ Requires-Dist: importlib-metadata (>=8.5.0,<8.6.0)
56
57
  Requires-Dist: importlib-resources (==6.1.3)
57
58
  Requires-Dist: jieba (>=0.42.1,<0.43) ; extra == "jieba" or extra == "full"
58
59
  Requires-Dist: jinja2 (>=3.1.4,<4.0.0)
60
+ Requires-Dist: joblib (>=1.2.0,<1.3.0)
59
61
  Requires-Dist: jsonpatch (>=1.33,<2.0)
60
62
  Requires-Dist: jsonpickle (>=3.0,<3.1)
61
63
  Requires-Dist: jsonschema (>=4.22)
62
64
  Requires-Dist: keras (==2.14.0)
63
65
  Requires-Dist: langchain (>=0.2.0,<0.3.0)
64
66
  Requires-Dist: langchain-community (>=0.2.0,<0.3.0)
65
- Requires-Dist: litellm (>=1.50.0,<1.51.0)
67
+ Requires-Dist: litellm (>=1.52.6,<1.53.0)
66
68
  Requires-Dist: matplotlib (>=3.7,<3.8)
67
69
  Requires-Dist: mattermostwrapper (>=2.2,<2.3)
68
70
  Requires-Dist: mlflow (>=2.15.1,<3.0.0) ; extra == "mlflow"
69
71
  Requires-Dist: networkx (>=3.1,<3.2)
70
72
  Requires-Dist: numpy (>=1.23.5,<1.25.0) ; python_version >= "3.9" and python_version < "3.11"
71
- Requires-Dist: openai (>=1.52.0,<1.53.0)
73
+ Requires-Dist: openai (>=1.54.0,<1.55.0)
72
74
  Requires-Dist: openpyxl (>=3.1.5,<4.0.0)
73
75
  Requires-Dist: opentelemetry-api (>=1.16.0,<1.17.0)
74
76
  Requires-Dist: opentelemetry-exporter-jaeger (>=1.16.0,<1.17.0)
@@ -101,14 +103,13 @@ Requires-Dist: pyyaml (>=6.0)
101
103
  Requires-Dist: qdrant-client (>=1.9.0,<2.0.0)
102
104
  Requires-Dist: questionary (>=1.10.0,<2.1.0)
103
105
  Requires-Dist: randomname (>=0.2.1,<0.3.0)
104
- Requires-Dist: rasa-sdk (==3.10.0)
106
+ Requires-Dist: rasa-sdk (>=3.10.0,<3.11.0)
105
107
  Requires-Dist: redis (>=4.6.0,<6.0)
106
108
  Requires-Dist: regex (>=2022.10.31,<2022.11)
107
109
  Requires-Dist: requests (>=2.31.0,<2.32.0)
108
110
  Requires-Dist: rich (>=13.4.2,<14.0.0)
109
111
  Requires-Dist: rocketchat_API (>=1.30.0,<1.31.0)
110
112
  Requires-Dist: ruamel.yaml (>=0.17.21,<0.17.22)
111
- Requires-Dist: safetensors (>=0.4.5,<0.5.0)
112
113
  Requires-Dist: sanic (>=22.12,<22.13)
113
114
  Requires-Dist: sanic-cors (>=2.2.0,<2.3.0)
114
115
  Requires-Dist: sanic-jwt (>=1.8.0,<2.0.0)
@@ -119,7 +120,6 @@ Requires-Dist: sentencepiece[sentencepiece] (>=0.1.99,<0.2.0) ; extra == "transf
119
120
  Requires-Dist: sentry-sdk (>=1.14.0,<1.15.0)
120
121
  Requires-Dist: setuptools (>=70.0.0,<70.1.0)
121
122
  Requires-Dist: sklearn-crfsuite (>=0.3.6,<0.4.0)
122
- Requires-Dist: skops (>=0.10.0,<0.11.0)
123
123
  Requires-Dist: slack-sdk (>=3.27.1,<4.0.0)
124
124
  Requires-Dist: spacy (>=3.5.4,<4.0.0) ; extra == "spacy" or extra == "full"
125
125
  Requires-Dist: structlog (>=23.1.0,<23.2.0)