rasa-pro 3.10.4__py3-none-any.whl → 3.10.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of rasa-pro might be problematic. Click here for more details.

@@ -80,6 +80,8 @@ from rasa.utils.ml_utils import (
80
80
  persist_faiss_vector_store,
81
81
  response_for_template,
82
82
  )
83
+ from rasa.dialogue_understanding.patterns.chitchat import FLOW_PATTERN_CHITCHAT
84
+ from rasa.shared.core.constants import ACTION_TRIGGER_CHITCHAT
83
85
  from rasa.utils.log_utils import log_llm
84
86
 
85
87
  if TYPE_CHECKING:
@@ -174,6 +176,21 @@ def filter_responses(responses: Responses, forms: Forms, flows: FlowsList) -> Re
174
176
  for name, variants in responses.data.items()
175
177
  if name not in combined_responses
176
178
  }
179
+
180
+ pattern_chitchat = flows.flow_by_id(FLOW_PATTERN_CHITCHAT)
181
+
182
+ # The following condition is highly unlikely, but mypy requires the case
183
+ # of pattern_chitchat == None to be addressed
184
+ if not pattern_chitchat:
185
+ return Responses(data=filtered_responses)
186
+
187
+ # if action_trigger_chitchat, filter out "utter_free_chitchat_response"
188
+ has_action_trigger_chitchat = pattern_chitchat.has_action_step(
189
+ ACTION_TRIGGER_CHITCHAT
190
+ )
191
+ if has_action_trigger_chitchat:
192
+ filtered_responses.pop("utter_free_chitchat_response", None)
193
+
177
194
  return Responses(data=filtered_responses)
178
195
 
179
196
 
@@ -709,6 +726,7 @@ class IntentlessPolicy(Policy):
709
726
  number_of_samples=NUMBER_OF_CONVERSATION_SAMPLES,
710
727
  max_number_of_tokens=MAX_NUMBER_OF_TOKENS_FOR_SAMPLES,
711
728
  )
729
+
712
730
  extra_ai_responses = self.extract_ai_responses(conversation_samples)
713
731
 
714
732
  # put conversation responses in front of sampled examples,
@@ -1,6 +1,7 @@
1
1
  from typing import List, Optional, Type, Set, Dict
2
2
 
3
3
  import structlog
4
+ from rasa.shared.core.training_data.structures import StoryGraph
4
5
  from rasa.dialogue_understanding.commands import (
5
6
  CancelFlowCommand,
6
7
  ClarifyCommand,
@@ -179,6 +180,7 @@ def execute_commands(
179
180
  tracker: DialogueStateTracker,
180
181
  all_flows: FlowsList,
181
182
  execution_context: ExecutionContext,
183
+ story_graph: Optional[StoryGraph] = None,
182
184
  ) -> List[Event]:
183
185
  """Executes a list of commands.
184
186
 
@@ -187,6 +189,7 @@ def execute_commands(
187
189
  tracker: The tracker to execute the commands on.
188
190
  all_flows: All flows.
189
191
  execution_context: Information about the single graph run.
192
+ story_graph: StoryGraph object with stories available for training.
190
193
 
191
194
  Returns:
192
195
  A list of the events that were created.
@@ -194,7 +197,9 @@ def execute_commands(
194
197
  commands: List[Command] = get_commands_from_tracker(tracker)
195
198
  original_tracker = tracker.copy()
196
199
 
197
- commands = clean_up_commands(commands, tracker, all_flows, execution_context)
200
+ commands = clean_up_commands(
201
+ commands, tracker, all_flows, execution_context, story_graph
202
+ )
198
203
 
199
204
  updated_flows = find_updated_flows(tracker, all_flows)
200
205
  if updated_flows:
@@ -326,6 +331,7 @@ def clean_up_commands(
326
331
  tracker: DialogueStateTracker,
327
332
  all_flows: FlowsList,
328
333
  execution_context: ExecutionContext,
334
+ story_graph: Optional[StoryGraph] = None,
329
335
  ) -> List[Command]:
330
336
  """Clean up a list of commands.
331
337
 
@@ -340,6 +346,7 @@ def clean_up_commands(
340
346
  tracker: The tracker to clean up the commands for.
341
347
  all_flows: All flows.
342
348
  execution_context: Information about a single graph run.
349
+ story_graph: StoryGraph object with stories available for training.
343
350
 
344
351
  Returns:
345
352
  The cleaned up commands.
@@ -386,7 +393,7 @@ def clean_up_commands(
386
393
  # handle chitchat command differently from other free-form answer commands
387
394
  elif isinstance(command, ChitChatAnswerCommand):
388
395
  clean_commands = clean_up_chitchat_command(
389
- clean_commands, command, all_flows, execution_context
396
+ clean_commands, command, all_flows, execution_context, story_graph
390
397
  )
391
398
 
392
399
  elif isinstance(command, FreeFormAnswerCommand):
@@ -487,22 +494,23 @@ def clean_up_slot_command(
487
494
  stack = tracker.stack
488
495
 
489
496
  resulting_commands = commands_so_far[:]
490
- if command.name in slots_so_far and command.name != ROUTE_TO_CALM_SLOT:
491
- slot = tracker.slots.get(command.name)
492
- if slot is None:
493
- structlogger.debug(
494
- "command_processor.clean_up_slot_command.skip_command_slot_not_in_domain",
495
- command=command,
496
- )
497
- return resulting_commands
498
497
 
499
- if not should_slot_be_set(slot, command):
500
- cannot_handle = CannotHandleCommand(reason=CANNOT_HANDLE_REASON)
501
- if cannot_handle not in resulting_commands:
502
- resulting_commands.append(cannot_handle)
498
+ slot = tracker.slots.get(command.name)
499
+ if slot is None:
500
+ structlogger.debug(
501
+ "command_processor.clean_up_slot_command.skip_command_slot_not_in_domain",
502
+ command=command,
503
+ )
504
+ return resulting_commands
505
+
506
+ if not should_slot_be_set(slot, command):
507
+ cannot_handle = CannotHandleCommand(reason=CANNOT_HANDLE_REASON)
508
+ if cannot_handle not in resulting_commands:
509
+ resulting_commands.append(cannot_handle)
503
510
 
504
- return resulting_commands
511
+ return resulting_commands
505
512
 
513
+ if command.name in slots_so_far and command.name != ROUTE_TO_CALM_SLOT:
506
514
  current_collect_info = get_current_collect_step(stack, all_flows)
507
515
 
508
516
  if current_collect_info and current_collect_info.collect == command.name:
@@ -562,6 +570,7 @@ def clean_up_chitchat_command(
562
570
  command: ChitChatAnswerCommand,
563
571
  flows: FlowsList,
564
572
  execution_context: ExecutionContext,
573
+ story_graph: Optional[StoryGraph] = None,
565
574
  ) -> List[Command]:
566
575
  """Clean up a chitchat answer command.
567
576
 
@@ -573,6 +582,7 @@ def clean_up_chitchat_command(
573
582
  command: The command to clean up.
574
583
  flows: All flows.
575
584
  execution_context: Information about a single graph run.
585
+ story_graph: StoryGraph object with stories available for training.
576
586
  Returns:
577
587
  The cleaned up commands.
578
588
  """
@@ -598,7 +608,11 @@ def clean_up_chitchat_command(
598
608
  )
599
609
  defines_intentless_policy = execution_context.has_node(IntentlessPolicy)
600
610
 
601
- if has_action_trigger_chitchat and not defines_intentless_policy:
611
+ has_e2e_stories = True if (story_graph and story_graph.has_e2e_stories()) else False
612
+
613
+ if (has_action_trigger_chitchat and not defines_intentless_policy) or (
614
+ defines_intentless_policy and not has_e2e_stories
615
+ ):
602
616
  resulting_commands.insert(
603
617
  0, CannotHandleCommand(RASA_PATTERN_CANNOT_HANDLE_CHITCHAT)
604
618
  )
@@ -9,6 +9,7 @@ from rasa.engine.storage.storage import ModelStorage
9
9
  from rasa.shared.core.events import Event
10
10
  from rasa.shared.core.flows import FlowsList
11
11
  from rasa.shared.core.trackers import DialogueStateTracker
12
+ from rasa.shared.core.training_data.structures import StoryGraph
12
13
 
13
14
 
14
15
  class CommandProcessorComponent(GraphComponent):
@@ -31,9 +32,12 @@ class CommandProcessorComponent(GraphComponent):
31
32
  return cls(execution_context)
32
33
 
33
34
  def execute_commands(
34
- self, tracker: DialogueStateTracker, flows: FlowsList
35
+ self,
36
+ tracker: DialogueStateTracker,
37
+ flows: FlowsList,
38
+ story_graph: StoryGraph,
35
39
  ) -> List[Event]:
36
40
  """Execute commands to update tracker state."""
37
41
  return rasa.dialogue_understanding.processor.command_processor.execute_commands(
38
- tracker, flows, self._execution_context
42
+ tracker, flows, self._execution_context, story_graph
39
43
  )
@@ -405,7 +405,7 @@ class DefaultV1Recipe(Recipe):
405
405
  return {}
406
406
 
407
407
  def resolver_name_from_parameter(parameter: str) -> str:
408
- # we got a couple special cases to handle wher the parameter name
408
+ # we got a couple special cases to handle where the parameter name
409
409
  # doesn't match the provider name
410
410
  if "training_trackers" == parameter:
411
411
  return "training_tracker_provider"
@@ -597,7 +597,7 @@ class DefaultV1Recipe(Recipe):
597
597
  needs={"importer": "finetuning_validator"},
598
598
  uses=StoryGraphProvider,
599
599
  constructor_name="create",
600
- fn="provide",
600
+ fn="provide_train",
601
601
  config={"exclusion_percentage": cli_parameters.get("exclusion_percentage")},
602
602
  is_input=True,
603
603
  )
@@ -882,6 +882,14 @@ class DefaultV1Recipe(Recipe):
882
882
  config={},
883
883
  resource=Resource("domain_provider"),
884
884
  )
885
+ predict_nodes["story_graph_provider"] = SchemaNode(
886
+ **DEFAULT_PREDICT_KWARGS,
887
+ needs={},
888
+ uses=StoryGraphProvider,
889
+ fn="provide_inference",
890
+ config={},
891
+ resource=Resource("story_graph_provider"),
892
+ )
885
893
  predict_nodes["flows_provider"] = SchemaNode(
886
894
  **DEFAULT_PREDICT_KWARGS,
887
895
  needs={},
@@ -1,19 +1,37 @@
1
1
  from __future__ import annotations
2
- from typing import Dict, Text, Any
2
+ from typing import Dict, Text, Any, List
3
3
 
4
4
  from rasa.engine.graph import GraphComponent, ExecutionContext
5
5
  from rasa.engine.storage.resource import Resource
6
6
  from rasa.engine.storage.storage import ModelStorage
7
- from rasa.shared.core.training_data.structures import StoryGraph
7
+ from rasa.shared.core.training_data.structures import StoryGraph, StoryStep
8
8
  from rasa.shared.importers.importer import TrainingDataImporter
9
+ from rasa.shared.core.training_data.story_writer.yaml_story_writer import (
10
+ YAMLStoryWriter,
11
+ )
12
+ from rasa.shared.core.training_data.story_reader.yaml_story_reader import (
13
+ YAMLStoryReader,
14
+ )
15
+
16
+
17
+ STORIES_PERSISTENCE_FILE_NAME = "stories.yml"
9
18
 
10
19
 
11
20
  class StoryGraphProvider(GraphComponent):
12
21
  """Provides the training data from stories."""
13
22
 
14
- def __init__(self, config: Dict[Text, Any]) -> None:
23
+ def __init__(
24
+ self,
25
+ config: Dict[Text, Any],
26
+ model_storage: ModelStorage,
27
+ resource: Resource,
28
+ stories: StoryGraph = None,
29
+ ) -> None:
15
30
  """Creates provider from config."""
16
31
  self._config = config
32
+ self._model_storage = model_storage
33
+ self._resource = resource
34
+ self._stories = stories
17
35
 
18
36
  @staticmethod
19
37
  def get_default_config() -> Dict[Text, Any]:
@@ -29,9 +47,36 @@ class StoryGraphProvider(GraphComponent):
29
47
  execution_context: ExecutionContext,
30
48
  ) -> StoryGraphProvider:
31
49
  """Creates component (see parent class for full docstring)."""
32
- return cls(config)
50
+ return cls(config, model_storage, resource)
33
51
 
34
- def provide(self, importer: TrainingDataImporter) -> StoryGraph:
52
+ @classmethod
53
+ def load(
54
+ cls,
55
+ config: Dict[Text, Any],
56
+ model_storage: ModelStorage,
57
+ resource: Resource,
58
+ execution_context: ExecutionContext,
59
+ **kwargs: Any,
60
+ ) -> StoryGraphProvider:
61
+ """Creates provider using a persisted version of itself."""
62
+ with model_storage.read_from(resource) as resource_directory:
63
+ reader = YAMLStoryReader()
64
+ story_steps = reader.read_from_file(
65
+ resource_directory / STORIES_PERSISTENCE_FILE_NAME
66
+ )
67
+ stories = StoryGraph(story_steps)
68
+ return cls(config, model_storage, resource, stories)
69
+
70
+ def _persist(self, story_steps: List[StoryStep]) -> None:
71
+ """Persists flows to model storage."""
72
+ with self._model_storage.write_to(self._resource) as resource_directory:
73
+ writer = YAMLStoryWriter()
74
+ writer.dump(
75
+ resource_directory / STORIES_PERSISTENCE_FILE_NAME,
76
+ story_steps,
77
+ )
78
+
79
+ def provide_train(self, importer: TrainingDataImporter) -> StoryGraph:
35
80
  """Provides the story graph from the training data.
36
81
 
37
82
  Args:
@@ -40,4 +85,12 @@ class StoryGraphProvider(GraphComponent):
40
85
  Returns:
41
86
  The story graph containing stories and rules used for training.
42
87
  """
43
- return importer.get_stories(**self._config)
88
+ stories = importer.get_stories(**self._config)
89
+ self._persist(stories.story_steps)
90
+ return stories
91
+
92
+ def provide_inference(self) -> StoryGraph:
93
+ """Provides the stories configuration during inference."""
94
+ if self._stories is None:
95
+ self._stories = StoryGraph([])
96
+ return self._stories
@@ -3,7 +3,9 @@ from typing import Any, Text, Dict, List, Type, Tuple
3
3
  import joblib
4
4
  import structlog
5
5
  from scipy.sparse import hstack, vstack, csr_matrix
6
+ from sklearn.exceptions import NotFittedError
6
7
  from sklearn.linear_model import LogisticRegression
8
+ from sklearn.utils.validation import check_is_fitted
7
9
 
8
10
  from rasa.engine.storage.resource import Resource
9
11
  from rasa.engine.storage.storage import ModelStorage
@@ -154,6 +156,17 @@ class LogisticRegressionClassifier(IntentClassifier, GraphComponent):
154
156
 
155
157
  def process(self, messages: List[Message]) -> List[Message]:
156
158
  """Return the most likely intent and its probability for a message."""
159
+ # Check if the classifier is trained
160
+ if not self.is_trained():
161
+ structlogger.warning(
162
+ "logistic_regression_classifier.not_trained.skip_intent_prediction",
163
+ event_info=(
164
+ f"The '{self.__class__.__name__}' is not trained. "
165
+ f"Skipping intent prediction."
166
+ ),
167
+ )
168
+ return messages
169
+
157
170
  X = self._create_X(messages)
158
171
  probas = self.clf.predict_proba(X)
159
172
  for idx, message in enumerate(messages):
@@ -216,3 +229,12 @@ class LogisticRegressionClassifier(IntentClassifier, GraphComponent):
216
229
  def validate_config(cls, config: Dict[Text, Any]) -> None:
217
230
  """Validates that the component is configured properly."""
218
231
  pass
232
+
233
+ def is_trained(self) -> bool:
234
+ """Checks if the model has been trained."""
235
+ try:
236
+ # This will raise a NotFittedError if the classifier isn't fitted
237
+ check_is_fitted(self.clf)
238
+ return True
239
+ except NotFittedError:
240
+ return False
@@ -14,6 +14,7 @@ from rasa.nlu.featurizers.sparse_featurizer.sparse_featurizer import SparseFeatu
14
14
  from rasa.nlu.utils.spacy_utils import SpacyModel
15
15
  from rasa.shared.constants import DOCS_URL_COMPONENTS
16
16
  import rasa.utils.io as io_utils
17
+ from sklearn.exceptions import NotFittedError
17
18
  from sklearn.feature_extraction.text import CountVectorizer
18
19
  from rasa.shared.nlu.training_data.training_data import TrainingData
19
20
  from rasa.shared.nlu.training_data.message import Message
@@ -551,36 +552,39 @@ class CountVectorsFeaturizer(SparseFeaturizer, GraphComponent):
551
552
  sequence_features: List[Optional[scipy.sparse.spmatrix]] = []
552
553
  sentence_features: List[Optional[scipy.sparse.spmatrix]] = []
553
554
 
554
- for i, tokens in enumerate(all_tokens):
555
- if not tokens:
556
- # nothing to featurize
557
- sequence_features.append(None)
558
- sentence_features.append(None)
559
- continue
560
-
561
- # vectorizer.transform returns a sparse matrix of size
562
- # [n_samples, n_features]
563
- # set input to list of tokens if sequence should be returned
564
- # otherwise join all tokens to a single string and pass that as a list
565
- if not tokens:
566
- # attribute is not set (e.g. response not present)
567
- sequence_features.append(None)
568
- sentence_features.append(None)
569
- continue
570
-
571
- seq_vec = self.vectorizers[attribute].transform(tokens)
572
- seq_vec.sort_indices()
573
-
574
- sequence_features.append(seq_vec.tocoo())
575
-
576
- if attribute in DENSE_FEATURIZABLE_ATTRIBUTES:
577
- tokens_text = [" ".join(tokens)]
578
- sentence_vec = self.vectorizers[attribute].transform(tokens_text)
579
- sentence_vec.sort_indices()
580
-
581
- sentence_features.append(sentence_vec.tocoo())
582
- else:
583
- sentence_features.append(None)
555
+ try:
556
+ for i, tokens in enumerate(all_tokens):
557
+ # vectorizer.transform returns a sparse matrix of size
558
+ # [n_samples, n_features]
559
+ # set input to list of tokens if sequence should be returned
560
+ # otherwise join all tokens to a single string and pass that as a list
561
+ if not tokens:
562
+ # attribute is not set (e.g. response not present)
563
+ sequence_features.append(None)
564
+ sentence_features.append(None)
565
+ continue
566
+
567
+ seq_vec = self.vectorizers[attribute].transform(tokens)
568
+ seq_vec.sort_indices()
569
+
570
+ sequence_features.append(seq_vec.tocoo())
571
+
572
+ if attribute in DENSE_FEATURIZABLE_ATTRIBUTES:
573
+ tokens_text = [" ".join(tokens)]
574
+ sentence_vec = self.vectorizers[attribute].transform(tokens_text)
575
+ sentence_vec.sort_indices()
576
+
577
+ sentence_features.append(sentence_vec.tocoo())
578
+ else:
579
+ sentence_features.append(None)
580
+ except NotFittedError:
581
+ logger.warning(
582
+ f"Unable to train CountVectorizer for message "
583
+ f"attribute - {attribute}, since the call to sklearn's "
584
+ f"`.fit()` method failed. Leaving an untrained "
585
+ f"CountVectorizer for it."
586
+ )
587
+ return [None], [None]
584
588
 
585
589
  return sequence_features, sentence_features
586
590
 
rasa/nlu/persistor.py CHANGED
@@ -64,14 +64,14 @@ def parse_remote_storage(value: str) -> StorageType:
64
64
 
65
65
  if isinstance(value, str):
66
66
  if value == "":
67
- raise ValueError(
67
+ raise RasaException(
68
68
  f"The value can't be an empty string."
69
69
  f" {supported_storages_help_text}"
70
70
  )
71
71
 
72
72
  return value
73
73
 
74
- raise ValueError(
74
+ raise RasaException(
75
75
  f"Invalid storage type '{value}'. {supported_storages_help_text}"
76
76
  )
77
77
 
@@ -82,6 +82,8 @@ def get_persistor(storage: StorageType) -> Optional[Persistor]:
82
82
  Currently, `aws`, `gcs`, `azure` and providing module paths are supported remote
83
83
  storages.
84
84
  """
85
+ storage = storage.value if isinstance(storage, RemoteStorageType) else storage
86
+
85
87
  if storage == RemoteStorageType.AWS.value:
86
88
  return AWSPersistor(
87
89
  os.environ.get(BUCKET_NAME_ENV), os.environ.get(AWS_ENDPOINT_URL_ENV)
rasa/shared/constants.py CHANGED
@@ -181,6 +181,7 @@ PROMPT_TEMPLATE_CONFIG_KEY = "prompt_template"
181
181
 
182
182
  STREAM_CONFIG_KEY = "stream"
183
183
  N_REPHRASES_CONFIG_KEY = "n"
184
+ USE_CHAT_COMPLETIONS_ENDPOINT_CONFIG_KEY = "use_chat_completions_endpoint"
184
185
 
185
186
  AZURE_API_KEY_ENV_VAR = "AZURE_API_KEY"
186
187
  AZURE_AD_TOKEN_ENV_VAR = "AZURE_AD_TOKEN"
@@ -790,13 +790,18 @@ class Domain:
790
790
  }
791
791
  else:
792
792
  intent_name = next(iter(intent.keys()))
793
-
794
- return (
795
- intent_name,
796
- cls._transform_intent_properties_for_internal_use(
797
- intent, entity_properties
798
- ),
799
- )
793
+ try:
794
+ return (
795
+ intent_name,
796
+ cls._transform_intent_properties_for_internal_use(
797
+ intent, entity_properties
798
+ ),
799
+ )
800
+ except AttributeError:
801
+ raise InvalidDomain(
802
+ f"Detected invalid intent definition: {intent}. "
803
+ f"Please make sure all intent definitions are valid."
804
+ )
800
805
 
801
806
  @classmethod
802
807
  def _add_default_intents(
@@ -804,6 +804,26 @@ class StoryGraph:
804
804
  """Returns text representation of object."""
805
805
  return f"{self.__class__.__name__}: {len(self.story_steps)} story steps"
806
806
 
807
+ def has_e2e_stories(self) -> bool:
808
+ """
809
+ Checks if there are end-to-end (E2E) stories present in the story steps.
810
+
811
+ An E2E story is determined by checking if any `UserUttered` event has
812
+ associated text within the story steps.
813
+
814
+ Returns:
815
+ bool: True if any E2E story (i.e., a `UserUttered` event with text)
816
+ is found, False otherwise.
817
+ """
818
+ if not self.story_steps:
819
+ return False
820
+ for story_step in self.story_steps:
821
+ for event in story_step.events:
822
+ if isinstance(event, UserUttered):
823
+ if event.text:
824
+ return True
825
+ return False
826
+
807
827
 
808
828
  def generate_id(prefix: Text = "", max_chars: Optional[int] = None) -> Text:
809
829
  """Generate a random UUID.
@@ -21,6 +21,7 @@ from rasa.shared.constants import (
21
21
  PROVIDER_CONFIG_KEY,
22
22
  OPENAI_PROVIDER,
23
23
  SELF_HOSTED_PROVIDER,
24
+ USE_CHAT_COMPLETIONS_ENDPOINT_CONFIG_KEY,
24
25
  )
25
26
  from rasa.shared.providers._configs.utils import (
26
27
  raise_deprecation_warnings,
@@ -71,6 +72,7 @@ class SelfHostedLLMClientConfig:
71
72
  api_base: str
72
73
  api_version: Optional[str] = None
73
74
  api_type: Optional[str] = OPENAI_PROVIDER
75
+ use_chat_completions_endpoint: Optional[bool] = True
74
76
  extra_parameters: dict = field(default_factory=dict)
75
77
 
76
78
  def __post_init__(self) -> None:
@@ -140,6 +142,9 @@ class SelfHostedLLMClientConfig:
140
142
  # Optional parameters
141
143
  api_type=config.pop(API_TYPE_CONFIG_KEY, OPENAI_PROVIDER),
142
144
  api_version=config.pop(API_VERSION_CONFIG_KEY, None),
145
+ use_chat_completions_endpoint=config.pop(
146
+ USE_CHAT_COMPLETIONS_ENDPOINT_CONFIG_KEY, True
147
+ ),
143
148
  # The rest of parameters (e.g. model parameters) are considered
144
149
  # as extra parameters
145
150
  extra_parameters=config,
@@ -3,7 +3,11 @@ from typing import Dict, List, Any, Union
3
3
 
4
4
  import logging
5
5
  import structlog
6
- from litellm import completion, acompletion, validate_environment
6
+ from litellm import (
7
+ completion,
8
+ acompletion,
9
+ validate_environment,
10
+ )
7
11
 
8
12
  from rasa.shared.exceptions import (
9
13
  ProviderClientAPIException,
@@ -1,11 +1,19 @@
1
- from typing import Any, Dict, Optional
1
+ from typing import Any, Dict, List, Optional, Union
2
+ from litellm import (
3
+ text_completion,
4
+ atext_completion,
5
+ )
6
+ import logging
2
7
  import structlog
3
8
 
4
9
  from rasa.shared.constants import OPENAI_PROVIDER
5
10
  from rasa.shared.providers._configs.self_hosted_llm_client_config import (
6
11
  SelfHostedLLMClientConfig,
7
12
  )
13
+ from rasa.shared.exceptions import ProviderClientAPIException
8
14
  from rasa.shared.providers.llm._base_litellm_client import _BaseLiteLLMClient
15
+ from rasa.shared.providers.llm.llm_response import LLMResponse, LLMUsage
16
+ from rasa.shared.utils.io import suppress_logs
9
17
 
10
18
  structlogger = structlog.get_logger()
11
19
 
@@ -19,6 +27,8 @@ class SelfHostedLLMClient(_BaseLiteLLMClient):
19
27
  api_base (str): The base URL of the API endpoint.
20
28
  api_type (Optional[str]): The type of the API endpoint.
21
29
  api_version (Optional[str]): The version of the API endpoint.
30
+ use_chat_completions_endpoint (Optional[bool]): Whether to use the chat
31
+ completions endpoint for completions. Defaults to True.
22
32
  kwargs: Any: Additional configuration parameters that can include, but
23
33
  are not limited to model parameters and lite-llm specific
24
34
  parameters. These parameters will be passed to the
@@ -36,6 +46,7 @@ class SelfHostedLLMClient(_BaseLiteLLMClient):
36
46
  api_base: str,
37
47
  api_type: Optional[str] = None,
38
48
  api_version: Optional[str] = None,
49
+ use_chat_completions_endpoint: Optional[bool] = True,
39
50
  **kwargs: Any,
40
51
  ):
41
52
  super().__init__() # type: ignore
@@ -44,6 +55,7 @@ class SelfHostedLLMClient(_BaseLiteLLMClient):
44
55
  self._api_base = api_base
45
56
  self._api_type = api_type
46
57
  self._api_version = api_version
58
+ self._use_chat_completions_endpoint = use_chat_completions_endpoint
47
59
  self._extra_parameters = kwargs or {}
48
60
 
49
61
  @classmethod
@@ -66,6 +78,7 @@ class SelfHostedLLMClient(_BaseLiteLLMClient):
66
78
  api_base=client_config.api_base,
67
79
  api_type=client_config.api_type,
68
80
  api_version=client_config.api_version,
81
+ use_chat_completions_endpoint=client_config.use_chat_completions_endpoint,
69
82
  **client_config.extra_parameters,
70
83
  )
71
84
 
@@ -132,6 +145,7 @@ class SelfHostedLLMClient(_BaseLiteLLMClient):
132
145
  api_base=self._api_base,
133
146
  api_type=self._api_type,
134
147
  api_version=self._api_version,
148
+ use_chat_completions_endpoint=self._use_chat_completions_endpoint,
135
149
  extra_parameters=self._extra_parameters,
136
150
  )
137
151
  return config.to_dict()
@@ -167,3 +181,101 @@ class SelfHostedLLMClient(_BaseLiteLLMClient):
167
181
  }
168
182
  )
169
183
  return fn_args
184
+
185
+ @suppress_logs(log_level=logging.WARNING)
186
+ def _text_completion(self, prompt: Union[List[str], str]) -> LLMResponse:
187
+ """
188
+ Synchronously generate completions for given prompt.
189
+
190
+ Args:
191
+ prompt: Prompt to generate the completion for.
192
+ Returns:
193
+ List of message completions.
194
+ Raises:
195
+ ProviderClientAPIException: If the API request fails.
196
+ """
197
+ try:
198
+ response = text_completion(prompt=prompt, **self._completion_fn_args)
199
+ return self._format_text_completion_response(response)
200
+ except Exception as e:
201
+ raise ProviderClientAPIException(e)
202
+
203
+ @suppress_logs(log_level=logging.WARNING)
204
+ async def _atext_completion(self, prompt: Union[List[str], str]) -> LLMResponse:
205
+ """
206
+ Asynchronously generate completions for given prompt.
207
+
208
+ Args:
209
+ prompt: Prompt to generate the completion for.
210
+ Returns:
211
+ List of message completions.
212
+ Raises:
213
+ ProviderClientAPIException: If the API request fails.
214
+ """
215
+ try:
216
+ response = await atext_completion(prompt=prompt, **self._completion_fn_args)
217
+ return self._format_text_completion_response(response)
218
+ except Exception as e:
219
+ raise ProviderClientAPIException(e)
220
+
221
+ async def acompletion(self, messages: Union[List[str], str]) -> LLMResponse:
222
+ """Asynchronous completion of the model with the given messages.
223
+
224
+ Method overrides the base class method to call the appropriate
225
+ completion method based on the configuration. If the chat completions
226
+ endpoint is enabled, the acompletion method is called. Otherwise, the
227
+ atext_completion method is called.
228
+
229
+ Args:
230
+ messages: The messages to be used for completion.
231
+
232
+ Returns:
233
+ The completion response.
234
+ """
235
+ if self._use_chat_completions_endpoint:
236
+ return await super().acompletion(messages)
237
+ return await self._atext_completion(messages)
238
+
239
+ def completion(self, messages: Union[List[str], str]) -> LLMResponse:
240
+ """Completion of the model with the given messages.
241
+
242
+ Method overrides the base class method to call the appropriate
243
+ completion method based on the configuration. If the chat completions
244
+ endpoint is enabled, the completion method is called. Otherwise, the
245
+ text_completion method is called.
246
+
247
+ Args:
248
+ messages: The messages to be used for completion.
249
+
250
+ Returns:
251
+ The completion response.
252
+ """
253
+ if self._use_chat_completions_endpoint:
254
+ return super().completion(messages)
255
+ return self._text_completion(messages)
256
+
257
+ def _format_text_completion_response(self, response: Any) -> LLMResponse:
258
+ """Parses the LiteLLM text completion response to Rasa format."""
259
+ formatted_response = LLMResponse(
260
+ id=response.id,
261
+ created=response.created,
262
+ choices=[choice.text for choice in response.choices],
263
+ model=response.model,
264
+ )
265
+ if (usage := response.usage) is not None:
266
+ prompt_tokens = (
267
+ num_tokens
268
+ if isinstance(num_tokens := usage.prompt_tokens, (int, float))
269
+ else 0
270
+ )
271
+ completion_tokens = (
272
+ num_tokens
273
+ if isinstance(num_tokens := usage.completion_tokens, (int, float))
274
+ else 0
275
+ )
276
+ formatted_response.usage = LLMUsage(prompt_tokens, completion_tokens)
277
+ structlogger.debug(
278
+ "base_litellm_client.formatted_response",
279
+ formatted_response=formatted_response.to_dict(),
280
+ )
281
+ return formatted_response
rasa/shared/utils/yaml.py CHANGED
@@ -416,6 +416,47 @@ def validate_raw_yaml_using_schema_file_with_responses(
416
416
  )
417
417
 
418
418
 
419
+ def process_content(content: str) -> str:
420
+ """
421
+ Process the content to handle both Windows paths and emojis.
422
+ Windows paths are processed by escaping backslashes but emojis are left untouched.
423
+
424
+ Args:
425
+ content: yaml content to be processed
426
+ """
427
+ # Detect common Windows path patterns: e.g., C:\ or \\
428
+ UNESCAPED_WINDOWS_PATH_PATTERN = re.compile(
429
+ r"(?<!\w)[a-zA-Z]:(\\[a-zA-Z0-9_ -]+)*(\\)?(?!\\n)"
430
+ )
431
+ ESCAPED_WINDOWS_PATH_PATTERN = re.compile(
432
+ r"(?<!\w)[a-zA-Z]:(\\\\[a-zA-Z0-9_ -]+)+\\\\?(?!\\n)"
433
+ )
434
+
435
+ # Function to escape backslashes in Windows paths but leave other content as is
436
+ def escape_windows_paths(match: re.Match) -> str:
437
+ path = str(match.group(0))
438
+ return path.replace("\\", "\\\\") # Escape backslashes only in Windows paths
439
+
440
+ def unescape_windows_paths(match: re.Match) -> str:
441
+ path = str(match.group(0))
442
+ return path.replace("\\\\", "\\")
443
+
444
+ # First, process Windows paths by escaping backslashes
445
+ content = re.sub(UNESCAPED_WINDOWS_PATH_PATTERN, escape_windows_paths, content)
446
+
447
+ # Ensure proper handling of emojis by decoding Unicode sequences
448
+ content = (
449
+ content.encode("utf-8")
450
+ .decode("raw_unicode_escape")
451
+ .encode("utf-16", "surrogatepass")
452
+ .decode("utf-16")
453
+ )
454
+
455
+ content = re.sub(ESCAPED_WINDOWS_PATH_PATTERN, unescape_windows_paths, content)
456
+
457
+ return content
458
+
459
+
419
460
  def read_yaml(
420
461
  content: str,
421
462
  reader_type: Union[str, List[str]] = "safe",
@@ -432,13 +473,7 @@ def read_yaml(
432
473
  ruamel.yaml.parser.ParserError: If there was an error when parsing the YAML.
433
474
  """
434
475
  if _is_ascii(content):
435
- # Required to make sure emojis are correctly parsed
436
- content = (
437
- content.encode("utf-8")
438
- .decode("raw_unicode_escape")
439
- .encode("utf-16", "surrogatepass")
440
- .decode("utf-16")
441
- )
476
+ content = process_content(content)
442
477
 
443
478
  custom_constructor = kwargs.get("custom_constructor", None)
444
479
 
rasa/tracing/config.py CHANGED
@@ -20,6 +20,7 @@ from rasa.core.actions.custom_action_executor import (
20
20
  CustomActionExecutor,
21
21
  RetryCustomActionExecutor,
22
22
  )
23
+ from rasa.core.actions.grpc_custom_action_executor import GRPCCustomActionExecutor
23
24
  from rasa.core.agent import Agent
24
25
  from rasa.core.processor import MessageProcessor
25
26
  from rasa.core.tracker_store import TrackerStore
@@ -44,11 +45,10 @@ from rasa.tracing.constants import (
44
45
  ENDPOINTS_ROOT_CERTIFICATES_KEY,
45
46
  ENDPOINTS_METRICS_KEY,
46
47
  )
48
+ from rasa.tracing.instrumentation import instrumentation
47
49
  from rasa.tracing.metric_instrument_provider import MetricInstrumentProvider
48
50
  from rasa.utils.endpoints import EndpointConfig, read_endpoint_config
49
51
 
50
- from rasa.tracing.instrumentation import instrumentation
51
-
52
52
  TRACING_SERVICE_NAME = os.environ.get("TRACING_SERVICE_NAME", "rasa")
53
53
 
54
54
  logger = logging.getLogger(__name__)
@@ -107,6 +107,7 @@ def configure_tracing(tracer_provider: Optional[TracerProvider]) -> None:
107
107
  vector_store_subclasses=vector_store_subclasses,
108
108
  nlu_command_adapter_class=NLUCommandAdapter,
109
109
  endpoint_config_class=EndpointConfig,
110
+ grpc_custom_action_executor_class=GRPCCustomActionExecutor,
110
111
  single_step_llm_command_generator_class=SingleStepLLMCommandGenerator,
111
112
  multi_step_llm_command_generator_class=MultiStepLLMCommandGenerator,
112
113
  custom_action_executor_subclasses=custom_action_executor_subclasses,
@@ -42,6 +42,7 @@ from rasa.tracing.constants import (
42
42
  PROMPT_TOKEN_LENGTH_ATTRIBUTE_NAME,
43
43
  REQUEST_BODY_SIZE_IN_BYTES_ATTRIBUTE_NAME,
44
44
  )
45
+ from rasa.shared.core.training_data.structures import StoryGraph
45
46
 
46
47
  if TYPE_CHECKING:
47
48
  from langchain.llms.base import BaseLLM
@@ -374,6 +375,7 @@ def extract_attrs_for_execute_commands(
374
375
  tracker: DialogueStateTracker,
375
376
  all_flows: FlowsList,
376
377
  execution_context: ExecutionContext,
378
+ story_graph: Optional[StoryGraph] = None,
377
379
  ) -> Dict[str, Any]:
378
380
  return {
379
381
  "number_of_events": len(tracker.events),
@@ -416,6 +418,7 @@ def extract_attrs_for_clean_up_commands(
416
418
  tracker: DialogueStateTracker,
417
419
  all_flows: FlowsList,
418
420
  execution_context: ExecutionContext,
421
+ story_graph: Optional[StoryGraph] = None,
419
422
  ) -> Dict[str, Any]:
420
423
  commands_list = []
421
424
 
@@ -20,10 +20,10 @@ from typing import (
20
20
 
21
21
  from multidict import MultiDict
22
22
  from opentelemetry.context import Context
23
-
24
23
  from opentelemetry.sdk.trace import TracerProvider
25
24
  from opentelemetry.trace import SpanKind, Tracer
26
25
  from opentelemetry.trace.propagation.tracecontext import TraceContextTextMapPropagator
26
+
27
27
  from rasa.core.actions.action import Action, RemoteAction, CustomActionExecutor
28
28
  from rasa.core.actions.custom_action_executor import RetryCustomActionExecutor
29
29
  from rasa.core.actions.grpc_custom_action_executor import GRPCCustomActionExecutor
@@ -48,11 +48,13 @@ from rasa.dialogue_understanding.generator import (
48
48
  from rasa.dialogue_understanding.generator.nlu_command_adapter import NLUCommandAdapter
49
49
  from rasa.engine.graph import GraphNode
50
50
  from rasa.engine.training.graph_trainer import GraphTrainer
51
+ from rasa.shared.core.domain import Domain
51
52
  from rasa.shared.core.flows import FlowsList
52
53
  from rasa.shared.core.trackers import DialogueStateTracker
53
54
  from rasa.shared.nlu.constants import SET_SLOT_COMMAND
54
55
  from rasa.shared.nlu.training_data.message import Message
55
56
  from rasa.tracing.constants import REQUEST_BODY_SIZE_IN_BYTES_ATTRIBUTE_NAME
57
+ from rasa.tracing.instrumentation import attribute_extractors
56
58
  from rasa.tracing.instrumentation.intentless_policy_instrumentation import (
57
59
  _instrument_extract_ai_responses,
58
60
  _instrument_generate_answer,
@@ -68,9 +70,6 @@ from rasa.tracing.instrumentation.metrics import (
68
70
  )
69
71
  from rasa.utils.endpoints import concat_url, EndpointConfig
70
72
 
71
- from rasa.tracing.instrumentation import attribute_extractors
72
-
73
-
74
73
  # The `TypeVar` representing the return type for a function to be wrapped.
75
74
  S = TypeVar("S")
76
75
  # The `TypeVar` representing the type of the argument passed to the function to be
@@ -308,6 +307,7 @@ def instrument(
308
307
  vector_store_subclasses: Optional[List[Type[InformationRetrievalType]]] = None,
309
308
  nlu_command_adapter_class: Optional[Type[NLUCommandAdapterType]] = None,
310
309
  endpoint_config_class: Optional[Type[EndpointConfigType]] = None,
310
+ grpc_custom_action_executor_class: Optional[Type[GRPCCustomActionExecutor]] = None,
311
311
  single_step_llm_command_generator_class: Optional[
312
312
  Type[SingleStepLLMCommandGeneratorType]
313
313
  ] = None,
@@ -354,6 +354,9 @@ def instrument(
354
354
  `None` is given, no `NLUCommandAdapter` will be instrumented.
355
355
  :param endpoint_config_class: The `EndpointConfig` to be instrumented. If
356
356
  `None` is given, no `EndpointConfig` will be instrumented.
357
+ :param grpc_custom_action_executor_class: The `GRPCCustomActionExecution` to be
358
+ instrumented. If `None` is given, no `GRPCCustomActionExecution`
359
+ will be instrumented.
357
360
  :param single_step_llm_command_generator_class: The `SingleStepLLMCommandGenerator`
358
361
  to be instrumented. If `None` is given, no `SingleStepLLMCommandGenerator` will
359
362
  be instrumented.
@@ -581,6 +584,14 @@ def instrument(
581
584
  endpoint_config_class,
582
585
  )
583
586
 
587
+ if grpc_custom_action_executor_class is not None and not class_is_instrumented(
588
+ grpc_custom_action_executor_class
589
+ ):
590
+ _instrument_grpc_custom_action_executor(
591
+ tracer_provider.get_tracer(grpc_custom_action_executor_class.__module__),
592
+ grpc_custom_action_executor_class,
593
+ )
594
+
584
595
  if custom_action_executor_subclasses:
585
596
  for custom_action_executor_subclass in custom_action_executor_subclasses:
586
597
  if (
@@ -1096,6 +1107,38 @@ def _instrument_endpoint_config(
1096
1107
  logger.debug(f"Instrumented '{endpoint_config_class.__name__}.request'.")
1097
1108
 
1098
1109
 
1110
+ def _instrument_grpc_custom_action_executor(
1111
+ tracer: Tracer, grpc_custom_action_executor_class: Type[GRPCCustomActionExecutor]
1112
+ ) -> None:
1113
+ """Instrument the `run` method of the `GRPCCustomActionExecutor` class.
1114
+
1115
+ Args:
1116
+ tracer: The `Tracer` that shall be used for tracing.
1117
+ grpc_custom_action_executor_class: The `GRPCCustomActionExecutor` to
1118
+ be instrumented.
1119
+ """
1120
+
1121
+ def tracing_grpc_custom_action_executor_wrapper(fn: Callable) -> Callable:
1122
+ @functools.wraps(fn)
1123
+ async def wrapper(
1124
+ self: Type[GRPCCustomActionExecutor],
1125
+ tracker: Type[DialogueStateTracker],
1126
+ domain: Type[Domain],
1127
+ include_domain: bool = False,
1128
+ ) -> bool:
1129
+ TraceContextTextMapPropagator().inject(self.action_endpoint.headers)
1130
+ result = await fn(self, tracker, domain, include_domain)
1131
+ return result
1132
+
1133
+ return wrapper
1134
+
1135
+ grpc_custom_action_executor_class.run = tracing_grpc_custom_action_executor_wrapper( # type: ignore[assignment]
1136
+ grpc_custom_action_executor_class.run
1137
+ )
1138
+
1139
+ logger.debug(f"Instrumented '{grpc_custom_action_executor_class.__name__}.run.")
1140
+
1141
+
1099
1142
  def _mangled_instrumented_boolean_attribute_name(instrumented_class: Type) -> Text:
1100
1143
  # see https://peps.python.org/pep-0008/#method-names-and-instance-variables
1101
1144
  # and https://stackoverflow.com/a/50401073
rasa/version.py CHANGED
@@ -1,3 +1,3 @@
1
1
  # this file will automatically be changed,
2
2
  # do not add anything but the version number here!
3
- __version__ = "3.10.4"
3
+ __version__ = "3.10.6"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: rasa-pro
3
- Version: 3.10.4
3
+ Version: 3.10.6
4
4
  Summary: State-of-the-art open-core Conversational AI framework for Enterprises that natively leverages generative AI for effortless assistant development.
5
5
  Home-page: https://rasa.com
6
6
  Keywords: nlp,machine-learning,machine-learning-library,bot,bots,botkit,rasa conversational-agents,conversational-ai,chatbot,chatbot-framework,bot-framework
@@ -33,7 +33,7 @@ Requires-Dist: apscheduler (>=3.10,<3.11)
33
33
  Requires-Dist: attrs (>=23.1,<23.2)
34
34
  Requires-Dist: azure-storage-blob (>=12.16.0,<12.17.0)
35
35
  Requires-Dist: boto3 (>=1.35.5,<1.36.0)
36
- Requires-Dist: certifi (>=2024.2.2)
36
+ Requires-Dist: certifi (>=2024.07.04)
37
37
  Requires-Dist: cloudpickle (>=2.2.1,<3.1)
38
38
  Requires-Dist: colorama (>=0.4.6,<0.5.0) ; sys_platform == "win32"
39
39
  Requires-Dist: colorclass (>=2.2,<2.3)
@@ -49,6 +49,7 @@ Requires-Dist: faiss-cpu (>=1.7.4,<2.0.0)
49
49
  Requires-Dist: faker (>=26.0.0,<27.0.0)
50
50
  Requires-Dist: fbmessenger (>=6.0.0,<6.1.0)
51
51
  Requires-Dist: github3.py (>=3.2.0,<3.3.0) ; extra == "gh-release-notes"
52
+ Requires-Dist: gitpython (>=3.1.41,<3.2.0) ; extra == "full"
52
53
  Requires-Dist: google-auth (>=2.23.4,<3)
53
54
  Requires-Dist: google-cloud-storage (>=2.14.0,<3.0.0)
54
55
  Requires-Dist: hvac (>=1.2.1,<2.0.0)
@@ -63,13 +64,13 @@ Requires-Dist: jsonschema (>=4.22)
63
64
  Requires-Dist: keras (==2.14.0)
64
65
  Requires-Dist: langchain (>=0.2.0,<0.3.0)
65
66
  Requires-Dist: langchain-community (>=0.2.0,<0.3.0)
66
- Requires-Dist: litellm (>=1.44.7,<1.45.0)
67
+ Requires-Dist: litellm (>=1.45.0,<1.46.0)
67
68
  Requires-Dist: matplotlib (>=3.7,<3.8)
68
69
  Requires-Dist: mattermostwrapper (>=2.2,<2.3)
69
70
  Requires-Dist: mlflow (>=2.15.1,<3.0.0) ; extra == "mlflow"
70
71
  Requires-Dist: networkx (>=3.1,<3.2)
71
72
  Requires-Dist: numpy (>=1.23.5,<1.25.0) ; python_version >= "3.9" and python_version < "3.11"
72
- Requires-Dist: openai (>=1.40.0,<1.41.0)
73
+ Requires-Dist: openai (>=1.47.0,<1.48.0)
73
74
  Requires-Dist: openpyxl (>=3.1.5,<4.0.0)
74
75
  Requires-Dist: opentelemetry-api (>=1.16.0,<1.17.0)
75
76
  Requires-Dist: opentelemetry-exporter-jaeger (>=1.16.0,<1.17.0)
@@ -302,7 +302,7 @@ rasa/core/policies/flows/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZ
302
302
  rasa/core/policies/flows/flow_exceptions.py,sha256=_FQuN-cerQDM1pivce9bz4zylh5UYkljvYS1gjDukHI,1527
303
303
  rasa/core/policies/flows/flow_executor.py,sha256=okOxDzBekd9sMOLuxxVs6KLQj_xVqKgxO0sxKAA7gfg,25482
304
304
  rasa/core/policies/flows/flow_step_result.py,sha256=agjPrD6lahGSe2ViO5peBeoMdI9ngVGRSgtytgxmJmg,1360
305
- rasa/core/policies/intentless_policy.py,sha256=5NtOu4pepTfjTTQptqGcvLKAcmVNpj0ymQovlbSmYf4,34417
305
+ rasa/core/policies/intentless_policy.py,sha256=M2pUHtveJ7Oh6cDRX2NiFCnfRUP6HkW4PmOv_T-9Jv0,35121
306
306
  rasa/core/policies/intentless_prompt_template.jinja2,sha256=KhIL3cruMmkxhrs5oVbqgSvK6ZiN_6TQ_jXrgtEB-ZY,677
307
307
  rasa/core/policies/memoization.py,sha256=XoRxUdYUGRfO47tAEyc5k5pUgt38a4fipO336EU5Vdc,19466
308
308
  rasa/core/policies/policy.py,sha256=HeVtIaV0dA1QcAG3vjdn-4g7-oUEJPL4u01ETJt78YA,27464
@@ -385,8 +385,8 @@ rasa/dialogue_understanding/patterns/search.py,sha256=X7HaMyFwS8gPprKFSOJvCoC6Ng
385
385
  rasa/dialogue_understanding/patterns/session_start.py,sha256=yglhIEkkquRf0YppZ4Cuv2eHlA5qscGoVXr0d-2bV-E,1153
386
386
  rasa/dialogue_understanding/patterns/skip_question.py,sha256=rvZuVUxulikwUhP01MAIgkcHZ4Si7mzxNedH6QBPdX4,1214
387
387
  rasa/dialogue_understanding/processor/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
388
- rasa/dialogue_understanding/processor/command_processor.py,sha256=tzwSiQfVwJqTf5VINpU6pPUAbef1nd9Um_FnRzzk1Mg,24473
389
- rasa/dialogue_understanding/processor/command_processor_component.py,sha256=KVDW0H2K-CGXljVww1vn61g-TqCWGzbZ-wwvQ9THMV0,1398
388
+ rasa/dialogue_understanding/processor/command_processor.py,sha256=_kLtp3jV3MDKvakyyVp5O7ypp8btesPTfCxlTQ_QD-M,25058
389
+ rasa/dialogue_understanding/processor/command_processor_component.py,sha256=oyz6lq64CRmnugTrIJJ_Q7RLGYD_xbZJPVJ47xNbyvw,1526
390
390
  rasa/dialogue_understanding/stack/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
391
391
  rasa/dialogue_understanding/stack/dialogue_stack.py,sha256=j8MnLCyv6cAZVpKRaUVM-Z5HqgWP-scrnaiQXzLNBwY,5243
392
392
  rasa/dialogue_understanding/stack/frames/__init__.py,sha256=CXLs8I_eeJ-d2tQmS19V26OM6CHy3VN5whH5uHBodj4,656
@@ -425,7 +425,7 @@ rasa/engine/loader.py,sha256=ommFdjddJWkb-KHpemxRhWYlMYRAUs8rcmtJx_DzqnM,1384
425
425
  rasa/engine/recipes/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
426
426
  rasa/engine/recipes/config_files/default_config.yml,sha256=M1ehmJScNbVCQD_wkBktmf0YqXn-O4UzJVbB7tnAtqs,1219
427
427
  rasa/engine/recipes/default_components.py,sha256=XRg51DZ5CKmDjghXCUq-rTv8rkSOYUTkLNulLHnwWHc,3950
428
- rasa/engine/recipes/default_recipe.py,sha256=LSdhag8xjMtEIk1-lIuWgATKLGv8XsuVi9INO7xoulU,45630
428
+ rasa/engine/recipes/default_recipe.py,sha256=mxNyU8vxcsspGEBv_ne_Ibw_Wcj1m09eYGOcPvYWrdQ,45918
429
429
  rasa/engine/recipes/graph_recipe.py,sha256=Kd2vKIRCENzWe9W24HwNedk_b6lIXL4aYqVIZsCJ4ts,3301
430
430
  rasa/engine/recipes/recipe.py,sha256=v-t5jwntbXcLWVzfKhDuluz__ouEgl_Q5VFTZieqYgs,3346
431
431
  rasa/engine/runner/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -454,7 +454,7 @@ rasa/graph_components/providers/forms_provider.py,sha256=BU6BDzIFjt2SZOmZ8xY8m_s
454
454
  rasa/graph_components/providers/nlu_training_data_provider.py,sha256=ZoCRcVzAuoqIxfynWkwRf5AG3q7XUk6HMthd8LR1Uh0,2119
455
455
  rasa/graph_components/providers/responses_provider.py,sha256=yTtVABjLN1uTtVjAeqPjiEnm0egnA21CwWCjMWekqJY,1354
456
456
  rasa/graph_components/providers/rule_only_provider.py,sha256=mTzf5_PulQB7Y_voLnljm2Ob7shYPAAx_2cKN-MRovg,1540
457
- rasa/graph_components/providers/story_graph_provider.py,sha256=7kLBteTxWGJRFjkdwgpsEemsKSCkffsDKk3pdX0L6Vg,1466
457
+ rasa/graph_components/providers/story_graph_provider.py,sha256=tvTCq5dtAHa3VFOYQqcd15gA0cFJWqgPF3K0vQXCm_4,3300
458
458
  rasa/graph_components/providers/training_tracker_provider.py,sha256=nCHyLsiC8q3B0CIgVCbhUBCAil24ja43UKasoVFJ3DM,1965
459
459
  rasa/graph_components/validators/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
460
460
  rasa/graph_components/validators/default_recipe_validator.py,sha256=BHrF6NTfJz42xG1LfVjkP5CdQef4NTcmiiC8xtMemaI,24457
@@ -489,7 +489,7 @@ rasa/nlu/classifiers/classifier.py,sha256=9fm1mORuFf1vowYIXmqE9yLRKdSC4nGQW7UqNZ
489
489
  rasa/nlu/classifiers/diet_classifier.py,sha256=C2mKZ2GP7Uptpag240fFkAEZf6P1NuU_2TrnSsR3IA0,71936
490
490
  rasa/nlu/classifiers/fallback_classifier.py,sha256=FYOgM7bLG3HlasVWRozanz-MmDozygTlTIFcPHJWJoo,7150
491
491
  rasa/nlu/classifiers/keyword_intent_classifier.py,sha256=dxDzCK7YzYKslZiXYkBD1Al1y_yZWdZYkBBl7FLyPm8,7581
492
- rasa/nlu/classifiers/logistic_regression_classifier.py,sha256=tK8esDssBf74IyGOqHSefIQd0-vGu9Yn6MvlrbJ4cX8,8317
492
+ rasa/nlu/classifiers/logistic_regression_classifier.py,sha256=Qga66-PjW4I4D2uIMoX2aW8ywdufq9ISmt12rP3rj9g,9124
493
493
  rasa/nlu/classifiers/mitie_intent_classifier.py,sha256=_hf0aKWjcjZ8NdH61gbutgY5vAjMmpYDhCpO3dwIrDk,5559
494
494
  rasa/nlu/classifiers/regex_message_handler.py,sha256=r6Z-uFJvqFZjpI1rUeaZZnAOUL9lxuBxGK7W6WZIPOw,1989
495
495
  rasa/nlu/classifiers/sklearn_intent_classifier.py,sha256=zPLr1GNCEAG8xW5SEPLgc2lsenXavTG9KDby8JUDX3o,11923
@@ -518,12 +518,12 @@ rasa/nlu/featurizers/dense_featurizer/mitie_featurizer.py,sha256=xE-dOmdBqCJ4NEm
518
518
  rasa/nlu/featurizers/dense_featurizer/spacy_featurizer.py,sha256=tJzDeX8wkOO1iUNmx13FSIeMHNC0U0RB5ZF9pPo8nqQ,4888
519
519
  rasa/nlu/featurizers/featurizer.py,sha256=cV2v4f1V2DWDqJY1-oGAZsytv0L827nsCtUY6KjEChg,3348
520
520
  rasa/nlu/featurizers/sparse_featurizer/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
521
- rasa/nlu/featurizers/sparse_featurizer/count_vectors_featurizer.py,sha256=twFecYgWF4uyfV_VxHIDBCykllypSSndfszpAnniaY8,33295
521
+ rasa/nlu/featurizers/sparse_featurizer/count_vectors_featurizer.py,sha256=275NcC7W9_n7V0AyVXm8jtYd9fcVHXZRQMgr5MVZAvA,33600
522
522
  rasa/nlu/featurizers/sparse_featurizer/lexical_syntactic_featurizer.py,sha256=awydhZZTRmff35L1838bbghNbutEf5xty301OyRIgvI,21848
523
523
  rasa/nlu/featurizers/sparse_featurizer/regex_featurizer.py,sha256=PhzJ17lNv3I5h8WrCvjzjjcUvbu_MJBxY6k3pQTDCac,10289
524
524
  rasa/nlu/featurizers/sparse_featurizer/sparse_featurizer.py,sha256=m6qpixorfTDFWSfGVmLImTOHM6zKdgydPaP_wVxCQ-w,220
525
525
  rasa/nlu/model.py,sha256=r6StZb4Dmum_3dRoocxZWo2M5KVNV20_yKNvYZNvpOc,557
526
- rasa/nlu/persistor.py,sha256=SXTmuNBM9o2uReePRdohVkQQusnZ9YDBJHw3okkJc5U,13698
526
+ rasa/nlu/persistor.py,sha256=Sc0NH2VSK9efOYSD0INYd3za3esQvgNHa4FwClJVH-c,13788
527
527
  rasa/nlu/run.py,sha256=WumXqNn2PEyab463geNnOu3IPwgaCtBai-x685BYCNw,803
528
528
  rasa/nlu/selectors/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
529
529
  rasa/nlu/selectors/response_selector.py,sha256=gwffu9zLniMseOzX-SuqaZ2CQiGi4JUbDcwUe-BsThI,39021
@@ -545,12 +545,12 @@ rasa/nlu/utils/spacy_utils.py,sha256=pBvsCVKVuZ3b2Pjn-XuOVZ6lzZu9Voc2R4N1VczwtCM
545
545
  rasa/plugin.py,sha256=H_OZcHy_U3eAK-JHr43TSxcPqS0JEGcZkFvmumeeJEs,2670
546
546
  rasa/server.py,sha256=pUdhi5dkhzEj4bngj2bTUKJohepjpY-aZ4MGKHYZRH0,57775
547
547
  rasa/shared/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
548
- rasa/shared/constants.py,sha256=MDCiIeygQL5ZwPcRVgynW1WFWR4uQLbPSp422tuwCc4,9093
548
+ rasa/shared/constants.py,sha256=FL_lmM6w87X3tFUqDkXwIisoOODkomLorLPyG-TZpqQ,9168
549
549
  rasa/shared/core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
550
550
  rasa/shared/core/command_payload_reader.py,sha256=Vhiop9LWFawaEruRifBBrVmoEJ-fj1Tli1wBvsYu2_I,3563
551
551
  rasa/shared/core/constants.py,sha256=U1q0-oGMQ7aDsP5p6H4CRaBi-H-KZpm2IXpTzIbaEv8,5169
552
552
  rasa/shared/core/conversation.py,sha256=tw1fD2XB3gOdQjDI8hHo5TAAmE2JYNogQGWe3rE929w,1385
553
- rasa/shared/core/domain.py,sha256=L5QpwKckMUifGttbp-MPJiKUNyg9gCJJPspTRVRC5gg,79804
553
+ rasa/shared/core/domain.py,sha256=zHnwLneOmRRQQp7ZGAt0RSzH_jyhzRgEPRXCa_b70wo,80054
554
554
  rasa/shared/core/events.py,sha256=HXBy-DfulOhcHxrfVtP-uUBVnCx40wkRwqDx80mOFX4,82791
555
555
  rasa/shared/core/flows/__init__.py,sha256=HszhIvEARpmyxABFc1MKYvj8oy04WiZW1xmCdToakbs,181
556
556
  rasa/shared/core/flows/flow.py,sha256=cxf3AVbweOezV8SuCQSog2eYI6nKw7R0SQt8XJiG7zU,21208
@@ -588,7 +588,7 @@ rasa/shared/core/training_data/story_reader/yaml_story_reader.py,sha256=Yn4dpLCn
588
588
  rasa/shared/core/training_data/story_writer/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
589
589
  rasa/shared/core/training_data/story_writer/story_writer.py,sha256=cR4VVhhS2UzFYhhqL9iojR88Fknc63psCJ4YN7RPeKo,2544
590
590
  rasa/shared/core/training_data/story_writer/yaml_story_writer.py,sha256=0proT_BgtzCK3LO3nRA8blVabz3LElORQ-izF_7DdjE,15792
591
- rasa/shared/core/training_data/structures.py,sha256=GTGOdYo3M5u8yS5pPTN2mL0gWyCwtPU14FGdk7-IZFg,29341
591
+ rasa/shared/core/training_data/structures.py,sha256=diNYa5aCFz0TA9jM1H4-Q3I1vdsXtExk98FCuC0pDYg,30039
592
592
  rasa/shared/core/training_data/visualization.html,sha256=viHR5xPh9FxWGXH9b_4ZXzj-ZoG_S3k_fDKe2ZACVAg,3499
593
593
  rasa/shared/core/training_data/visualization.py,sha256=th3lzvbmJJh8OHdI7Bs-bHoisQRuhj8fy7xKVmBzgsI,20413
594
594
  rasa/shared/data.py,sha256=gIcyPU_ZG4nk3owSml_WkivxIASXlTzV5x3YhfnyZn8,6873
@@ -631,7 +631,7 @@ rasa/shared/providers/_configs/client_config.py,sha256=Tvnv9CCpzoHB053lvuqth_Jwj
631
631
  rasa/shared/providers/_configs/default_litellm_client_config.py,sha256=ywD0EYWCKqNpx0fPyOavIwwS2BTd3q9QtNeAyIvOd8E,4318
632
632
  rasa/shared/providers/_configs/huggingface_local_embedding_client_config.py,sha256=laahM8xQJ-r0nAL8X2SnBM5kZAOiPNo-w0hv7BudAdk,8166
633
633
  rasa/shared/providers/_configs/openai_client_config.py,sha256=bkSQyL0o3MYVSrKWe76QPo7tXtsZPhg3tfA7QQ2EmLo,5805
634
- rasa/shared/providers/_configs/self_hosted_llm_client_config.py,sha256=HKSYPuD5uGKTjKsOZwPhECplcX53ijIAl_-aZmAmohs,5694
634
+ rasa/shared/providers/_configs/self_hosted_llm_client_config.py,sha256=fUEvqCrhEIX-y2WLP2UlG8zMjNTWBZb1LUIEAUy6bYQ,5929
635
635
  rasa/shared/providers/_configs/utils.py,sha256=vxtWmrnpnH2ryHWiP7hpLbSBAq3x-3hsGxGWSEgp4Lk,3334
636
636
  rasa/shared/providers/_ssl_verification_utils.py,sha256=4tujCOjg0KKX2_DzOb7lZTdsUXtzRB4UkfhkC3W0jO0,4166
637
637
  rasa/shared/providers/embedding/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -644,13 +644,13 @@ rasa/shared/providers/embedding/embedding_response.py,sha256=H55mSAL3LfVvDlBklaC
644
644
  rasa/shared/providers/embedding/huggingface_local_embedding_client.py,sha256=Zo3gyj49h4LxXV7bx39TXpIPKlernG-5xzqXczTCbig,6913
645
645
  rasa/shared/providers/embedding/openai_embedding_client.py,sha256=XNRGE7apo2v3kWRrtgxE-Gq4rvNko3IiXtvgC4krDYE,5429
646
646
  rasa/shared/providers/llm/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
647
- rasa/shared/providers/llm/_base_litellm_client.py,sha256=qf_jsQqTmANKwYY316t1TrgNnFxOrMOGoV8-D4xkMD8,8375
647
+ rasa/shared/providers/llm/_base_litellm_client.py,sha256=q24u62HNVPKdEvr5fHy_5AWzi3EwexuuKOMqn5nP_U0,8392
648
648
  rasa/shared/providers/llm/azure_openai_llm_client.py,sha256=jwEntKsBHKOXUhNEQM-kTYIfRCJ6qEka58ZPnKsSsb8,11836
649
649
  rasa/shared/providers/llm/default_litellm_llm_client.py,sha256=yvqd4ARoGSi9iqfE2uFvVEYRU6rICePBnEEKTduCc9w,2777
650
650
  rasa/shared/providers/llm/llm_client.py,sha256=6-gMsEJqquhUPGXzNiq_ybM_McLWxAJ_QhbmWcLnb_Q,2358
651
651
  rasa/shared/providers/llm/llm_response.py,sha256=Ltmc8yk9cAqtK8QgwfZZywudM5ZQsT4y_AKAQ3q05hA,1490
652
652
  rasa/shared/providers/llm/openai_llm_client.py,sha256=uDdcugBcO3sfxbduc00eqaZdrJP0VFX5dkBd2Dem47M,4844
653
- rasa/shared/providers/llm/self_hosted_llm_client.py,sha256=oSegJImwrdT7tlYf-ehF8ceCwmhsglxgwzkEneLgOV8,5229
653
+ rasa/shared/providers/llm/self_hosted_llm_client.py,sha256=BeFndaz5gknobEFvNHM4GLFs_Kx2LnaXaPG_GGTqEpk,9765
654
654
  rasa/shared/providers/mappings.py,sha256=xg-KpfiqotGScmLywg7Ywu10orAPhOTQ4FZPv_JqhEo,2988
655
655
  rasa/shared/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
656
656
  rasa/shared/utils/cli.py,sha256=bJpkf0VzzmtpmBnDnIl7SgvrntnBuaJQMHBXHm2WxcA,2916
@@ -665,7 +665,7 @@ rasa/shared/utils/schemas/domain.yml,sha256=b2k4ZYSV-QL3hGjDaRg8rfoqaTh4hbhDc_hB
665
665
  rasa/shared/utils/schemas/events.py,sha256=nm7WotE1UCZpkMMLNW2vs-vKslwngfG7VvjFp0T8ADM,6819
666
666
  rasa/shared/utils/schemas/model_config.yml,sha256=GU1lL_apXjJ3Xbd9Fj5jKm2h1HeB6V6TNqrhK5hOrGY,998
667
667
  rasa/shared/utils/schemas/stories.yml,sha256=DV3wAFnv1leD7kV-FH-GQihF1QX5oKHc8Eb24mxjizc,4737
668
- rasa/shared/utils/yaml.py,sha256=r5s7jjsGWU7a5X0WuOvB3jKx37xySvOHe_ydL5vqroY,31362
668
+ rasa/shared/utils/yaml.py,sha256=XJoKLe_vOmCcTI_Vg5o-iJzu2uumen7j1rrO6T95QXM,32592
669
669
  rasa/studio/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
670
670
  rasa/studio/auth.py,sha256=yPbDHJQAttWyDcjurJywgFlcSfBs6j0FfA4mWO7vKEA,9535
671
671
  rasa/studio/config.py,sha256=wqjvg_hARKG-6cV3JrhnP_ptZIq0VSFbdv-aWrH0u_Q,4091
@@ -677,11 +677,11 @@ rasa/studio/train.py,sha256=gfPtirITzBDo9gV4hqDNSwPYtVp_22cq8OWI6YIBgyk,4243
677
677
  rasa/studio/upload.py,sha256=3XqYyIZE1L3ohJtRcpf039IRc73A_rZ4KVWC32_dBo4,14027
678
678
  rasa/telemetry.py,sha256=Q6MQnDhOY6cKRVs3PayiM6WYWb8QJ_Hb3_eOm12n0tI,61093
679
679
  rasa/tracing/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
680
- rasa/tracing/config.py,sha256=2UvJFmeH2fP-Te3hIP4WHNN9piJJRMBltpmfsBqbBCM,12586
680
+ rasa/tracing/config.py,sha256=O4iHkE4kF6r4uVAt3JUb--TniK7pWD4j3d08Vf_GuYY,12736
681
681
  rasa/tracing/constants.py,sha256=N_MJLStE3IkmPKQCQv42epd3jdBMJ4Ith1dVO65N5ho,2425
682
682
  rasa/tracing/instrumentation/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
683
- rasa/tracing/instrumentation/attribute_extractors.py,sha256=mKeI-aliz3U9MfwveUqyQWdDEzXZynlDcGqkEJX7VXw,23079
684
- rasa/tracing/instrumentation/instrumentation.py,sha256=wYSY9MftTX06fmMwphm8iL-ULIlVr1gpNb-zFmcJ_zg,43987
683
+ rasa/tracing/instrumentation/attribute_extractors.py,sha256=_AbL_8zhvK8MKh0TVP0Kv9SR66c0acYGmPVLa18-Es0,23236
684
+ rasa/tracing/instrumentation/instrumentation.py,sha256=buoETr3rUHGirkaSY95SiTRqMZI1FCXjphcGXEH_e1A,45821
685
685
  rasa/tracing/instrumentation/intentless_policy_instrumentation.py,sha256=8AdMOy_2mlKnlmt-muV8-eoT8jA52GXDzM0avejfg8A,4821
686
686
  rasa/tracing/instrumentation/metrics.py,sha256=ByfKshoxNOqjKZwKTulqL71s5b3WugqLfjha3So0OEU,10534
687
687
  rasa/tracing/metric_instrument_provider.py,sha256=VtqT1XGQiDODJxbJDXA89AZP5dZMpVRAJuKkblsI2Kg,9989
@@ -719,9 +719,9 @@ rasa/utils/train_utils.py,sha256=f1NWpp5y6al0dzoQyyio4hc4Nf73DRoRSHDzEK6-C4E,212
719
719
  rasa/utils/url_tools.py,sha256=JQcHL2aLqLHu82k7_d9imUoETCm2bmlHaDpOJ-dKqBc,1218
720
720
  rasa/utils/yaml.py,sha256=KjbZq5C94ZP7Jdsw8bYYF7HASI6K4-C_kdHfrnPLpSI,2000
721
721
  rasa/validator.py,sha256=HM0ZIWjo3JRt2FMIfgNI_s932OABOSXkflm-rFTNkvY,62608
722
- rasa/version.py,sha256=td7_iTF0LQr0S8id0WK1wCSUkhVbSJTmfFOtzTCXj-8,117
723
- rasa_pro-3.10.4.dist-info/METADATA,sha256=66kFNQG7Fv8mRsJgIl1HMpLAiOlEyQZ0r3Td-rx0enA,28148
724
- rasa_pro-3.10.4.dist-info/NOTICE,sha256=7HlBoMHJY9CL2GlYSfTQ-PZsVmLmVkYmMiPlTjhuCqA,218
725
- rasa_pro-3.10.4.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
726
- rasa_pro-3.10.4.dist-info/entry_points.txt,sha256=ckJ2SfEyTPgBqj_I6vm_tqY9dZF_LAPJZA335Xp0Q9U,43
727
- rasa_pro-3.10.4.dist-info/RECORD,,
722
+ rasa/version.py,sha256=GYr3HissJmOn-bRoxLYW3Vm7fq_m-bG_jo6kK71I-Oo,117
723
+ rasa_pro-3.10.6.dist-info/METADATA,sha256=xvdyCk1zQU8Aoa-H7UdsTOX2ZpJcbVz9aLCT9xz4icg,28211
724
+ rasa_pro-3.10.6.dist-info/NOTICE,sha256=7HlBoMHJY9CL2GlYSfTQ-PZsVmLmVkYmMiPlTjhuCqA,218
725
+ rasa_pro-3.10.6.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
726
+ rasa_pro-3.10.6.dist-info/entry_points.txt,sha256=ckJ2SfEyTPgBqj_I6vm_tqY9dZF_LAPJZA335Xp0Q9U,43
727
+ rasa_pro-3.10.6.dist-info/RECORD,,