rasa-pro 3.13.0.dev20250612__py3-none-any.whl → 3.13.0.dev20250613__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of rasa-pro might be problematic. Click here for more details.

Files changed (156) hide show
  1. rasa/__main__.py +0 -3
  2. rasa/api.py +1 -1
  3. rasa/cli/dialogue_understanding_test.py +1 -1
  4. rasa/cli/e2e_test.py +1 -1
  5. rasa/cli/evaluate.py +1 -1
  6. rasa/cli/export.py +1 -1
  7. rasa/cli/llm_fine_tuning.py +12 -11
  8. rasa/cli/project_templates/defaults.py +133 -0
  9. rasa/cli/run.py +1 -1
  10. rasa/cli/studio/link.py +53 -0
  11. rasa/cli/studio/pull.py +78 -0
  12. rasa/cli/studio/push.py +78 -0
  13. rasa/cli/studio/studio.py +12 -0
  14. rasa/cli/studio/upload.py +8 -0
  15. rasa/cli/train.py +1 -1
  16. rasa/cli/utils.py +1 -1
  17. rasa/cli/x.py +1 -1
  18. rasa/constants.py +2 -0
  19. rasa/core/__init__.py +0 -16
  20. rasa/core/actions/action.py +5 -1
  21. rasa/core/actions/action_repeat_bot_messages.py +18 -22
  22. rasa/core/actions/action_run_slot_rejections.py +0 -1
  23. rasa/core/agent.py +16 -1
  24. rasa/core/available_endpoints.py +146 -0
  25. rasa/core/brokers/pika.py +1 -2
  26. rasa/core/channels/botframework.py +2 -2
  27. rasa/core/channels/channel.py +2 -2
  28. rasa/core/channels/hangouts.py +8 -5
  29. rasa/core/channels/mattermost.py +1 -1
  30. rasa/core/channels/rasa_chat.py +2 -4
  31. rasa/core/channels/rest.py +5 -4
  32. rasa/core/channels/studio_chat.py +3 -2
  33. rasa/core/channels/vier_cvg.py +1 -2
  34. rasa/core/channels/voice_ready/audiocodes.py +1 -8
  35. rasa/core/channels/voice_stream/audiocodes.py +7 -4
  36. rasa/core/channels/voice_stream/genesys.py +2 -2
  37. rasa/core/channels/voice_stream/twilio_media_streams.py +10 -5
  38. rasa/core/channels/voice_stream/voice_channel.py +33 -22
  39. rasa/core/http_interpreter.py +3 -7
  40. rasa/core/jobs.py +2 -1
  41. rasa/core/nlg/contextual_response_rephraser.py +38 -11
  42. rasa/core/nlg/generator.py +0 -1
  43. rasa/core/nlg/interpolator.py +2 -3
  44. rasa/core/nlg/summarize.py +39 -5
  45. rasa/core/policies/enterprise_search_policy.py +290 -66
  46. rasa/core/policies/enterprise_search_prompt_with_relevancy_check_and_citation_template.jinja2 +63 -0
  47. rasa/core/policies/flow_policy.py +1 -1
  48. rasa/core/policies/flows/flow_executor.py +96 -17
  49. rasa/core/policies/intentless_policy.py +24 -16
  50. rasa/core/processor.py +104 -51
  51. rasa/core/run.py +33 -11
  52. rasa/core/tracker_stores/tracker_store.py +1 -1
  53. rasa/core/training/interactive.py +1 -1
  54. rasa/core/utils.py +24 -97
  55. rasa/dialogue_understanding/coexistence/intent_based_router.py +2 -1
  56. rasa/dialogue_understanding/coexistence/llm_based_router.py +8 -3
  57. rasa/dialogue_understanding/commands/can_not_handle_command.py +2 -0
  58. rasa/dialogue_understanding/commands/cancel_flow_command.py +2 -0
  59. rasa/dialogue_understanding/commands/chit_chat_answer_command.py +2 -0
  60. rasa/dialogue_understanding/commands/clarify_command.py +5 -1
  61. rasa/dialogue_understanding/commands/command_syntax_manager.py +1 -0
  62. rasa/dialogue_understanding/commands/human_handoff_command.py +2 -0
  63. rasa/dialogue_understanding/commands/knowledge_answer_command.py +2 -0
  64. rasa/dialogue_understanding/commands/repeat_bot_messages_command.py +2 -0
  65. rasa/dialogue_understanding/commands/set_slot_command.py +11 -1
  66. rasa/dialogue_understanding/commands/skip_question_command.py +2 -0
  67. rasa/dialogue_understanding/commands/start_flow_command.py +4 -0
  68. rasa/dialogue_understanding/commands/utils.py +26 -2
  69. rasa/dialogue_understanding/generator/__init__.py +7 -1
  70. rasa/dialogue_understanding/generator/command_generator.py +4 -2
  71. rasa/dialogue_understanding/generator/command_parser.py +2 -2
  72. rasa/dialogue_understanding/generator/command_parser_validator.py +63 -0
  73. rasa/dialogue_understanding/generator/constants.py +2 -2
  74. rasa/dialogue_understanding/generator/prompt_templates/command_prompt_v3_gpt_4o_2024_11_20_template.jinja2 +78 -0
  75. rasa/dialogue_understanding/generator/single_step/compact_llm_command_generator.py +28 -463
  76. rasa/dialogue_understanding/generator/single_step/search_ready_llm_command_generator.py +147 -0
  77. rasa/dialogue_understanding/generator/single_step/single_step_based_llm_command_generator.py +477 -0
  78. rasa/dialogue_understanding/generator/single_step/single_step_llm_command_generator.py +8 -58
  79. rasa/dialogue_understanding/patterns/default_flows_for_patterns.yml +37 -25
  80. rasa/dialogue_understanding/patterns/domain_for_patterns.py +190 -0
  81. rasa/dialogue_understanding/processor/command_processor.py +3 -3
  82. rasa/dialogue_understanding/processor/command_processor_component.py +3 -3
  83. rasa/dialogue_understanding/stack/frames/flow_stack_frame.py +17 -4
  84. rasa/dialogue_understanding/utils.py +68 -12
  85. rasa/dialogue_understanding_test/du_test_case.py +1 -1
  86. rasa/dialogue_understanding_test/du_test_runner.py +4 -22
  87. rasa/dialogue_understanding_test/test_case_simulation/test_case_tracker_simulator.py +2 -6
  88. rasa/e2e_test/e2e_test_runner.py +1 -1
  89. rasa/engine/constants.py +1 -1
  90. rasa/engine/recipes/default_recipe.py +26 -2
  91. rasa/engine/validation.py +3 -2
  92. rasa/hooks.py +0 -28
  93. rasa/llm_fine_tuning/annotation_module.py +39 -9
  94. rasa/llm_fine_tuning/conversations.py +3 -0
  95. rasa/llm_fine_tuning/llm_data_preparation_module.py +66 -49
  96. rasa/llm_fine_tuning/paraphrasing/conversation_rephraser.py +4 -2
  97. rasa/llm_fine_tuning/paraphrasing/rephrase_validator.py +52 -44
  98. rasa/llm_fine_tuning/paraphrasing_module.py +10 -12
  99. rasa/llm_fine_tuning/storage.py +4 -4
  100. rasa/llm_fine_tuning/utils.py +63 -1
  101. rasa/model_manager/model_api.py +88 -0
  102. rasa/model_manager/trainer_service.py +4 -4
  103. rasa/plugin.py +1 -11
  104. rasa/privacy/__init__.py +0 -0
  105. rasa/privacy/constants.py +83 -0
  106. rasa/privacy/event_broker_utils.py +77 -0
  107. rasa/privacy/privacy_config.py +281 -0
  108. rasa/privacy/privacy_config_schema.json +86 -0
  109. rasa/privacy/privacy_filter.py +340 -0
  110. rasa/privacy/privacy_manager.py +576 -0
  111. rasa/server.py +23 -2
  112. rasa/shared/constants.py +6 -0
  113. rasa/shared/core/constants.py +4 -3
  114. rasa/shared/core/domain.py +7 -0
  115. rasa/shared/core/events.py +37 -7
  116. rasa/shared/core/flows/flow.py +1 -2
  117. rasa/shared/core/flows/flows_yaml_schema.json +3 -0
  118. rasa/shared/core/flows/steps/collect.py +46 -2
  119. rasa/shared/core/slots.py +28 -0
  120. rasa/shared/exceptions.py +4 -0
  121. rasa/shared/providers/_configs/azure_openai_client_config.py +4 -0
  122. rasa/shared/providers/_configs/openai_client_config.py +4 -0
  123. rasa/shared/providers/embedding/_base_litellm_embedding_client.py +3 -0
  124. rasa/shared/providers/llm/_base_litellm_client.py +5 -2
  125. rasa/shared/utils/llm.py +161 -6
  126. rasa/shared/utils/yaml.py +32 -0
  127. rasa/studio/data_handler.py +3 -3
  128. rasa/studio/download/download.py +37 -60
  129. rasa/studio/download/flows.py +23 -31
  130. rasa/studio/link.py +200 -0
  131. rasa/studio/pull.py +94 -0
  132. rasa/studio/push.py +131 -0
  133. rasa/studio/upload.py +117 -67
  134. rasa/telemetry.py +82 -25
  135. rasa/tracing/config.py +3 -4
  136. rasa/tracing/constants.py +19 -1
  137. rasa/tracing/instrumentation/attribute_extractors.py +10 -2
  138. rasa/tracing/instrumentation/instrumentation.py +53 -2
  139. rasa/tracing/instrumentation/metrics.py +98 -15
  140. rasa/tracing/metric_instrument_provider.py +75 -3
  141. rasa/utils/common.py +1 -27
  142. rasa/utils/log_utils.py +1 -45
  143. rasa/validator.py +2 -8
  144. rasa/version.py +1 -1
  145. {rasa_pro-3.13.0.dev20250612.dist-info → rasa_pro-3.13.0.dev20250613.dist-info}/METADATA +5 -6
  146. {rasa_pro-3.13.0.dev20250612.dist-info → rasa_pro-3.13.0.dev20250613.dist-info}/RECORD +149 -135
  147. rasa/anonymization/__init__.py +0 -2
  148. rasa/anonymization/anonymisation_rule_yaml_reader.py +0 -91
  149. rasa/anonymization/anonymization_pipeline.py +0 -286
  150. rasa/anonymization/anonymization_rule_executor.py +0 -266
  151. rasa/anonymization/anonymization_rule_orchestrator.py +0 -119
  152. rasa/anonymization/schemas/config.yml +0 -47
  153. rasa/anonymization/utils.py +0 -118
  154. {rasa_pro-3.13.0.dev20250612.dist-info → rasa_pro-3.13.0.dev20250613.dist-info}/NOTICE +0 -0
  155. {rasa_pro-3.13.0.dev20250612.dist-info → rasa_pro-3.13.0.dev20250613.dist-info}/WHEEL +0 -0
  156. {rasa_pro-3.13.0.dev20250612.dist-info → rasa_pro-3.13.0.dev20250613.dist-info}/entry_points.txt +0 -0
@@ -1,3 +1,5 @@
1
+ from __future__ import annotations
2
+
1
3
  import abc
2
4
  import copy
3
5
  import json
@@ -6,7 +8,7 @@ import re
6
8
  import time
7
9
  import uuid
8
10
  from abc import ABC
9
- from datetime import datetime
11
+ from datetime import datetime, timezone
10
12
  from typing import (
11
13
  TYPE_CHECKING,
12
14
  Any,
@@ -453,6 +455,7 @@ class UserUttered(Event):
453
455
  message_id: Optional[Text] = None,
454
456
  metadata: Optional[Dict] = None,
455
457
  use_text_for_featurization: Optional[bool] = None,
458
+ anonymized_at: Optional[float] = None,
456
459
  ) -> None:
457
460
  """Creates event for incoming user message.
458
461
 
@@ -467,6 +470,7 @@ class UserUttered(Event):
467
470
  message_id: Unique ID for message.
468
471
  use_text_for_featurization: `True` if the message's text was used to predict
469
472
  next action. `False` if the message's intent was used.
473
+ anonymized_at: When the event was anonymized in the tracker store.
470
474
 
471
475
  """
472
476
  self.text = text
@@ -500,7 +504,11 @@ class UserUttered(Event):
500
504
  if parse_data:
501
505
  self.parse_data.update(**parse_data)
502
506
 
503
- self._anonymized_at: Optional[datetime] = None
507
+ self._anonymized_at: Optional[datetime] = (
508
+ datetime.fromtimestamp(anonymized_at, tz=timezone.utc)
509
+ if anonymized_at is not None
510
+ else None
511
+ )
504
512
 
505
513
  @property
506
514
  def anonymized_at(self) -> Optional[datetime]:
@@ -525,6 +533,7 @@ class UserUttered(Event):
525
533
  input_channel: Optional[Text] = None,
526
534
  message_id: Optional[Text] = None,
527
535
  metadata: Optional[Dict] = None,
536
+ anonymized_at: Optional[float] = None,
528
537
  ) -> "UserUttered":
529
538
  return UserUttered(
530
539
  text,
@@ -535,6 +544,7 @@ class UserUttered(Event):
535
544
  input_channel,
536
545
  message_id,
537
546
  metadata,
547
+ anonymized_at,
538
548
  )
539
549
 
540
550
  def __hash__(self) -> int:
@@ -633,7 +643,9 @@ class UserUttered(Event):
633
643
  "input_channel": getattr(self, "input_channel", None),
634
644
  "message_id": getattr(self, "message_id", None),
635
645
  "metadata": self.metadata,
636
- "anonymized_at": self.anonymized_at,
646
+ "anonymized_at": self.anonymized_at.timestamp()
647
+ if self.anonymized_at
648
+ else None,
637
649
  }
638
650
  )
639
651
  return _dict
@@ -693,6 +705,7 @@ class UserUttered(Event):
693
705
  parameters.get("input_channel"),
694
706
  parameters.get("message_id"),
695
707
  parameters.get("metadata"),
708
+ parameters.get("anonymized_at"),
696
709
  )
697
710
  ]
698
711
  except KeyError as e:
@@ -920,6 +933,7 @@ class BotUttered(SkipEventInMDStoryMixin):
920
933
  data: Optional[Dict] = None,
921
934
  metadata: Optional[Dict[Text, Any]] = None,
922
935
  timestamp: Optional[float] = None,
936
+ anonymized_at: Optional[float] = None,
923
937
  ) -> None:
924
938
  """Creates event for a bot response.
925
939
 
@@ -928,10 +942,15 @@ class BotUttered(SkipEventInMDStoryMixin):
928
942
  data: Additional data for more complex utterances (e.g. buttons).
929
943
  timestamp: When the event was created.
930
944
  metadata: Additional event metadata.
945
+ anonymized_at: When the event was anonymized in the tracker store.
931
946
  """
932
947
  self.text = text
933
948
  self.data = data or {}
934
- self._anonymized_at: Optional[datetime] = None
949
+ self._anonymized_at: Optional[datetime] = (
950
+ datetime.fromtimestamp(anonymized_at, tz=timezone.utc)
951
+ if anonymized_at is not None
952
+ else None
953
+ )
935
954
  super().__init__(timestamp, metadata)
936
955
 
937
956
  @property
@@ -1040,7 +1059,9 @@ class BotUttered(SkipEventInMDStoryMixin):
1040
1059
  "text": self.text,
1041
1060
  "data": self.data,
1042
1061
  "metadata": self.metadata,
1043
- "anonymized_at": self.anonymized_at,
1062
+ "anonymized_at": self.anonymized_at.timestamp()
1063
+ if self.anonymized_at
1064
+ else None,
1044
1065
  }
1045
1066
  )
1046
1067
  return d
@@ -1053,6 +1074,7 @@ class BotUttered(SkipEventInMDStoryMixin):
1053
1074
  parameters.get("data"),
1054
1075
  parameters.get("metadata"),
1055
1076
  parameters.get("timestamp"),
1077
+ parameters.get("anonymized_at"),
1056
1078
  )
1057
1079
  except KeyError as e:
1058
1080
  raise ValueError(f"Failed to parse bot uttered event. {e}")
@@ -1077,6 +1099,7 @@ class SlotSet(Event):
1077
1099
  timestamp: Optional[float] = None,
1078
1100
  metadata: Optional[Dict[Text, Any]] = None,
1079
1101
  filled_by: Optional[str] = None,
1102
+ anonymized_at: Optional[float] = None,
1080
1103
  ) -> None:
1081
1104
  """Creates event to set slot.
1082
1105
 
@@ -1089,7 +1112,11 @@ class SlotSet(Event):
1089
1112
  self.key = key
1090
1113
  self.value = value
1091
1114
  self._filled_by = filled_by
1092
- self._anonymized_at: Optional[datetime] = None
1115
+ self._anonymized_at: Optional[datetime] = (
1116
+ datetime.fromtimestamp(anonymized_at, tz=timezone.utc)
1117
+ if anonymized_at is not None
1118
+ else None
1119
+ )
1093
1120
  super().__init__(timestamp, metadata)
1094
1121
 
1095
1122
  @property
@@ -1156,7 +1183,9 @@ class SlotSet(Event):
1156
1183
  "name": self.key,
1157
1184
  "value": self.value,
1158
1185
  "filled_by": self.filled_by,
1159
- "anonymized_at": self.anonymized_at,
1186
+ "anonymized_at": self.anonymized_at.timestamp()
1187
+ if self.anonymized_at
1188
+ else None,
1160
1189
  }
1161
1190
  )
1162
1191
  return d
@@ -1170,6 +1199,7 @@ class SlotSet(Event):
1170
1199
  parameters.get("timestamp"),
1171
1200
  parameters.get("metadata"),
1172
1201
  filled_by=parameters.get("filled_by"),
1202
+ anonymized_at=parameters.get("anonymized_at"),
1173
1203
  )
1174
1204
  except KeyError as e:
1175
1205
  raise ValueError(f"Failed to parse set slot event. {e}")
@@ -406,8 +406,7 @@ class Flow:
406
406
  structlogger.error(
407
407
  "command_generator.validate_flow_starting_conditions.error",
408
408
  predicate=self.guard_condition,
409
- context=context,
410
- slots=slots,
409
+ flow_id=self.id,
411
410
  error=str(e),
412
411
  )
413
412
  return False
@@ -244,6 +244,9 @@
244
244
  }
245
245
  }
246
246
  }
247
+ },
248
+ "silence_timeout": {
249
+ "type": "number"
247
250
  }
248
251
  }
249
252
  }
@@ -1,16 +1,23 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  from dataclasses import dataclass
4
- from typing import Any, Dict, List, Set, Text
4
+ from typing import Any, Dict, List, Optional, Set, Text, Union
5
+
6
+ import structlog
5
7
 
6
8
  from rasa.shared.constants import ACTION_ASK_PREFIX, UTTER_ASK_PREFIX
7
9
  from rasa.shared.core.flows.flow_step import FlowStep
8
10
  from rasa.shared.core.slots import SlotRejection
11
+ from rasa.shared.exceptions import RasaException
9
12
 
10
13
  DEFAULT_ASK_BEFORE_FILLING = False
11
14
  DEFAULT_RESET_AFTER_FLOW_ENDS = True
12
15
  DEFAULT_FORCE_SLOT_FILLING = False
13
16
 
17
+ logger = structlog.get_logger(__name__)
18
+
19
+ SilenceTimeoutInstructionType = Union[int, float, Dict[str, Any]]
20
+
14
21
 
15
22
  @dataclass
16
23
  class CollectInformationFlowStep(FlowStep):
@@ -30,6 +37,8 @@ class CollectInformationFlowStep(FlowStep):
30
37
  """Whether to reset the slot value at the end of the flow."""
31
38
  force_slot_filling: bool = False
32
39
  """Whether to keep only the SetSlot command for the collected slot."""
40
+ silence_timeout: Optional[float] = None
41
+ """The silence timeout for the collect information step."""
33
42
 
34
43
  @classmethod
35
44
  def from_json(
@@ -44,6 +53,11 @@ class CollectInformationFlowStep(FlowStep):
44
53
  Returns:
45
54
  A CollectInformationFlowStep object
46
55
  """
56
+
57
+ silence_timeout = cls._deserialise_silence_timeout(
58
+ data.get("silence_timeout", None)
59
+ )
60
+
47
61
  base = super().from_json(flow_id, data)
48
62
  return CollectInformationFlowStep(
49
63
  collect=data["collect"],
@@ -58,14 +72,40 @@ class CollectInformationFlowStep(FlowStep):
58
72
  for rejection in data.get("rejections", [])
59
73
  ],
60
74
  force_slot_filling=data.get("force_slot_filling", False),
75
+ silence_timeout=silence_timeout,
61
76
  **base.__dict__,
62
77
  )
63
78
 
79
+ @staticmethod
80
+ def _deserialise_silence_timeout(
81
+ silence_timeout_json: Optional[SilenceTimeoutInstructionType],
82
+ ) -> Optional[float]:
83
+ """Deserialize silence timeout from JSON."""
84
+ if not silence_timeout_json:
85
+ return None
86
+
87
+ if not isinstance(silence_timeout_json, (int, float)):
88
+ raise RasaException(
89
+ f"Invalid silence timeout value: {silence_timeout_json}. "
90
+ "If defined at collect step, silence timeout must be a number."
91
+ )
92
+
93
+ silence_timeout = silence_timeout_json
94
+
95
+ if silence_timeout and silence_timeout < 0:
96
+ raise RasaException(
97
+ f"Invalid silence timeout value: {silence_timeout}. "
98
+ "Silence timeout must be a non-negative number."
99
+ )
100
+ return silence_timeout
101
+
64
102
  @staticmethod
65
103
  def _default_utter(collect: str) -> str:
66
104
  return f"{UTTER_ASK_PREFIX}{collect}"
67
105
 
68
- def as_json(self) -> Dict[str, Any]: # type: ignore[override]
106
+ def as_json(
107
+ self, step_properties: Optional[Dict[Text, Any]] = None
108
+ ) -> Dict[str, Any]:
69
109
  """Serialize the CollectInformationFlowStep object.
70
110
 
71
111
  Returns:
@@ -78,6 +118,9 @@ class CollectInformationFlowStep(FlowStep):
78
118
  data["reset_after_flow_ends"] = self.reset_after_flow_ends
79
119
  data["rejections"] = [rejection.as_dict() for rejection in self.rejections]
80
120
  data["force_slot_filling"] = self.force_slot_filling
121
+ if self.silence_timeout:
122
+ data["silence_timeout"] = self.silence_timeout
123
+
81
124
  return super().as_json(step_properties=data)
82
125
 
83
126
  @property
@@ -100,6 +143,7 @@ class CollectInformationFlowStep(FlowStep):
100
143
  and self.ask_before_filling == other.ask_before_filling
101
144
  and self.reset_after_flow_ends == other.reset_after_flow_ends
102
145
  and self.force_slot_filling == other.force_slot_filling
146
+ and self.silence_timeout == other.silence_timeout
103
147
  and super().__eq__(other)
104
148
  )
105
149
  return False
rasa/shared/core/slots.py CHANGED
@@ -311,6 +311,34 @@ class Slot(ABC):
311
311
  """Indicates if the slot requires validation."""
312
312
  return True if self.validation else False
313
313
 
314
+ def to_dict(
315
+ self,
316
+ *,
317
+ include_private: bool = False,
318
+ ) -> Dict[str, Any]:
319
+ """Return a dictionary with attributes of this slot instance.
320
+
321
+ Args:
322
+ include_private: If `True`, private attributes are included.
323
+
324
+ Returns:
325
+ A plain `dict` that can be JSON-serialised.
326
+ """
327
+ result: Dict[str, Any] = {}
328
+
329
+ for attr, value in vars(self).items():
330
+ if not include_private and attr.startswith("_"):
331
+ continue
332
+
333
+ if attr == "mappings":
334
+ result[attr] = [mapping.as_dict() for mapping in value]
335
+ continue
336
+
337
+ result[attr] = value
338
+
339
+ result.setdefault("type", self.type_name)
340
+ return result
341
+
314
342
 
315
343
  class FloatSlot(Slot):
316
344
  """A slot storing a float value."""
rasa/shared/exceptions.py CHANGED
@@ -165,3 +165,7 @@ class ProviderClientAPIException(RasaException):
165
165
 
166
166
  class ProviderClientValidationError(RasaException):
167
167
  """Raised for errors that occur during validation of the API client."""
168
+
169
+
170
+ class FinetuningDataPreparationException(RasaException):
171
+ """Raised when there is an error in data preparation for fine-tuning."""
@@ -23,6 +23,8 @@ from rasa.shared.constants import (
23
23
  DEPLOYMENT_NAME_CONFIG_KEY,
24
24
  ENGINE_CONFIG_KEY,
25
25
  LANGCHAIN_TYPE_CONFIG_KEY,
26
+ MAX_COMPLETION_TOKENS_CONFIG_KEY,
27
+ MAX_TOKENS_CONFIG_KEY,
26
28
  MODEL_CONFIG_KEY,
27
29
  MODEL_NAME_CONFIG_KEY,
28
30
  N_REPHRASES_CONFIG_KEY,
@@ -71,6 +73,8 @@ DEPRECATED_ALIASES_TO_STANDARD_KEY_MAPPING = {
71
73
  MODEL_NAME_CONFIG_KEY: MODEL_CONFIG_KEY,
72
74
  # Timeout aliases
73
75
  REQUEST_TIMEOUT_CONFIG_KEY: TIMEOUT_CONFIG_KEY,
76
+ # Max tokens aliases
77
+ MAX_TOKENS_CONFIG_KEY: MAX_COMPLETION_TOKENS_CONFIG_KEY,
74
78
  }
75
79
 
76
80
  REQUIRED_KEYS = [DEPLOYMENT_CONFIG_KEY]
@@ -10,6 +10,8 @@ from rasa.shared.constants import (
10
10
  API_TYPE_CONFIG_KEY,
11
11
  API_VERSION_CONFIG_KEY,
12
12
  LANGCHAIN_TYPE_CONFIG_KEY,
13
+ MAX_COMPLETION_TOKENS_CONFIG_KEY,
14
+ MAX_TOKENS_CONFIG_KEY,
13
15
  MODEL_CONFIG_KEY,
14
16
  MODEL_NAME_CONFIG_KEY,
15
17
  N_REPHRASES_CONFIG_KEY,
@@ -48,6 +50,8 @@ DEPRECATED_ALIASES_TO_STANDARD_KEY_MAPPING = {
48
50
  OPENAI_API_VERSION_CONFIG_KEY: API_VERSION_CONFIG_KEY,
49
51
  # Timeout aliases
50
52
  REQUEST_TIMEOUT_CONFIG_KEY: TIMEOUT_CONFIG_KEY,
53
+ # Max tokens aliases
54
+ MAX_TOKENS_CONFIG_KEY: MAX_COMPLETION_TOKENS_CONFIG_KEY,
51
55
  }
52
56
 
53
57
  REQUIRED_KEYS = [MODEL_CONFIG_KEY]
@@ -70,7 +70,10 @@ class _BaseLiteLLMEmbeddingClient:
70
70
  def _embedding_fn_args(self) -> Dict[str, Any]:
71
71
  """Returns the arguments to be passed to the embedding function."""
72
72
  return {
73
+ # Parameters set through config, can override drop_params
73
74
  **self._litellm_extra_parameters,
75
+ # Model name is constructed in the LiteLLM format from the provided config
76
+ # Non-overridable to ensure consistency
74
77
  "model": self._litellm_model_name,
75
78
  }
76
79
 
@@ -84,12 +84,15 @@ class _BaseLiteLLMClient:
84
84
  @property
85
85
  def _completion_fn_args(self) -> dict:
86
86
  return {
87
- **self._litellm_extra_parameters,
88
- "model": self._litellm_model_name,
89
87
  # Since all providers covered by LiteLLM use the OpenAI format, but
90
88
  # not all support every OpenAI parameter, raise an exception if
91
89
  # provider/model uses unsupported parameter
92
90
  "drop_params": False,
91
+ # All other parameters set through config, can override drop_params
92
+ **self._litellm_extra_parameters,
93
+ # Model name is constructed in the LiteLLM format from the provided config
94
+ # Non-overridable to ensure consistency
95
+ "model": self._litellm_model_name,
93
96
  }
94
97
 
95
98
  def validate_client_setup(self) -> None:
rasa/shared/utils/llm.py CHANGED
@@ -1,13 +1,17 @@
1
+ from __future__ import annotations
2
+
1
3
  import importlib.resources
2
4
  import json
3
5
  import logging
4
6
  from copy import deepcopy
7
+ from datetime import datetime
5
8
  from functools import wraps
6
9
  from typing import (
7
10
  TYPE_CHECKING,
8
11
  Any,
9
12
  Callable,
10
13
  Dict,
14
+ List,
11
15
  Literal,
12
16
  Optional,
13
17
  Text,
@@ -18,14 +22,20 @@ from typing import (
18
22
  )
19
23
 
20
24
  import structlog
25
+ from pydantic import BaseModel, Field
21
26
 
22
27
  import rasa.shared.utils.io
23
- from rasa.core.utils import AvailableEndpoints
28
+ from rasa.core.available_endpoints import AvailableEndpoints
24
29
  from rasa.shared.constants import (
30
+ CONFIG_NAME_KEY,
31
+ CONFIG_PIPELINE_KEY,
32
+ CONFIG_POLICIES_KEY,
25
33
  DEFAULT_PROMPT_PACKAGE_NAME,
34
+ LLM_CONFIG_KEY,
26
35
  MODEL_CONFIG_KEY,
27
36
  MODEL_GROUP_CONFIG_KEY,
28
37
  MODEL_GROUP_ID_CONFIG_KEY,
38
+ MODEL_GROUPS_CONFIG_KEY,
29
39
  MODELS_CONFIG_KEY,
30
40
  PROVIDER_CONFIG_KEY,
31
41
  RASA_PATTERN_INTERNAL_ERROR_USER_INPUT_EMPTY,
@@ -61,9 +71,11 @@ from rasa.shared.providers.mappings import (
61
71
  get_embedding_client_from_provider,
62
72
  get_llm_client_from_provider,
63
73
  )
74
+ from rasa.shared.utils.common import all_subclasses
64
75
  from rasa.shared.utils.constants import LOG_COMPONENT_SOURCE_METHOD_INIT
65
76
 
66
77
  if TYPE_CHECKING:
78
+ from rasa.core.agent import Agent
67
79
  from rasa.shared.core.trackers import DialogueStateTracker
68
80
 
69
81
 
@@ -107,6 +119,18 @@ _CombineConfigs_F = TypeVar(
107
119
  )
108
120
 
109
121
 
122
+ class SystemPrompts(BaseModel):
123
+ command_generator: str = Field(
124
+ ..., description="Prompt used by the LLM command generator."
125
+ )
126
+ enterprise_search: str = Field(
127
+ ..., description="Prompt for standard enterprise search requests."
128
+ )
129
+ contextual_response_rephraser: str = Field(
130
+ ..., description="Prompt used for re-phrasing assistant responses."
131
+ )
132
+
133
+
110
134
  def _compute_hash_for_cache_from_configs(
111
135
  config_x: Dict[str, Any], config_y: Dict[str, Any]
112
136
  ) -> int:
@@ -191,6 +215,7 @@ def tracker_as_readable_transcript(
191
215
  human_prefix: str = USER,
192
216
  ai_prefix: str = AI,
193
217
  max_turns: Optional[int] = 20,
218
+ turns_wrapper: Optional[Callable[[List[str]], List[str]]] = None,
194
219
  ) -> str:
195
220
  """Creates a readable dialogue from a tracker.
196
221
 
@@ -199,6 +224,7 @@ def tracker_as_readable_transcript(
199
224
  human_prefix: the prefix to use for human utterances
200
225
  ai_prefix: the prefix to use for ai utterances
201
226
  max_turns: the maximum number of turns to include in the transcript
227
+ turns_wrapper: optional function to wrap the turns in a custom way
202
228
 
203
229
  Example:
204
230
  >>> tracker = Tracker(
@@ -235,8 +261,11 @@ def tracker_as_readable_transcript(
235
261
  elif isinstance(event, BotUttered):
236
262
  transcript.append(f"{ai_prefix}: {sanitize_message_for_prompt(event.text)}")
237
263
 
238
- if max_turns:
239
- transcript = transcript[-max_turns:]
264
+ # turns_wrapper to count multiple utterances by bot/user as single turn
265
+ if turns_wrapper:
266
+ transcript = turns_wrapper(transcript)
267
+ # otherwise, just take the last `max_turns` lines of the transcript
268
+ transcript = transcript[-max_turns if max_turns is not None else None :]
240
269
 
241
270
  return "\n".join(transcript)
242
271
 
@@ -678,7 +707,6 @@ def get_prompt_template(
678
707
  Returns:
679
708
  The prompt template.
680
709
  """
681
-
682
710
  try:
683
711
  if jinja_file_path is not None:
684
712
  prompt_template = rasa.shared.utils.io.read_file(jinja_file_path)
@@ -814,7 +842,9 @@ def allowed_values_for_slot(slot: Slot) -> Union[str, None]:
814
842
 
815
843
 
816
844
  def resolve_model_client_config(
817
- model_config: Optional[Dict[str, Any]], component_name: Optional[str] = None
845
+ model_config: Optional[Dict[str, Any]],
846
+ component_name: Optional[str] = None,
847
+ model_groups: Optional[List[Dict[str, Any]]] = None,
818
848
  ) -> Optional[Dict[str, Any]]:
819
849
  """Resolve the model group in the model config.
820
850
 
@@ -828,6 +858,7 @@ def resolve_model_client_config(
828
858
  model_config: The model config to be resolved.
829
859
  component_name: The name of the component.
830
860
  component_name: The method of the component.
861
+ model_groups: Model groups from endpoints.yml.
831
862
 
832
863
  Returns:
833
864
  The resolved llm config.
@@ -854,7 +885,12 @@ def resolve_model_client_config(
854
885
 
855
886
  model_group_id = model_config.get(MODEL_GROUP_CONFIG_KEY)
856
887
 
857
- endpoints = AvailableEndpoints.get_instance()
888
+ # If `model_groups` is provided, use it to initialise `AvailableEndpoints`,
889
+ # since `get_instance()` reads from the local endpoints file instead.
890
+ if model_groups:
891
+ endpoints = AvailableEndpoints(model_groups=model_groups)
892
+ else:
893
+ endpoints = AvailableEndpoints.get_instance()
858
894
  if endpoints.model_groups is None:
859
895
  _raise_invalid_config_exception(
860
896
  reason=(
@@ -886,3 +922,122 @@ def resolve_model_client_config(
886
922
  )
887
923
 
888
924
  return model_group[0]
925
+
926
+
927
+ def generate_sender_id(test_case_name: str) -> str:
928
+ # add timestamp suffix to ensure sender_id is unique
929
+ return f"{test_case_name}_{datetime.now()}"
930
+
931
+
932
+ async def create_tracker_for_user_step(
933
+ step_sender_id: str,
934
+ agent: "Agent",
935
+ test_case_tracker: "DialogueStateTracker",
936
+ index_user_uttered_event: int,
937
+ ) -> None:
938
+ """Creates a tracker for the user step."""
939
+ tracker = test_case_tracker.copy()
940
+ # modify the sender id so that the original tracker is not overwritten
941
+ tracker.sender_id = step_sender_id
942
+
943
+ if tracker.events:
944
+ # get the timestamp of the event just before the user uttered event
945
+ timestamp = tracker.events[index_user_uttered_event - 1].timestamp
946
+ # revert the tracker to the event just before the user uttered event
947
+ tracker = tracker.travel_back_in_time(timestamp)
948
+
949
+ # store the tracker with the unique sender id
950
+ await agent.tracker_store.save(tracker)
951
+
952
+
953
+ def _get_llm_command_generator_config(
954
+ config: Dict[Text, Any],
955
+ ) -> Optional[Dict[Text, Any]]:
956
+ """Get the llm command generator config from config.yml.
957
+
958
+ Args:
959
+ config: The config.yml file data.
960
+
961
+ Returns:
962
+ The llm command generator config.
963
+ """
964
+ from rasa.dialogue_understanding.generator import LLMBasedCommandGenerator
965
+
966
+ # Collect all LLM based Command Generator class names.
967
+ command_generator_subclasses = all_subclasses(LLMBasedCommandGenerator)
968
+ command_generator_class_names = [
969
+ command_generator.__name__ for command_generator in command_generator_subclasses
970
+ ]
971
+
972
+ # Read the LLM config of the Command Generator from the config.yml file.
973
+ pipelines = config.get(CONFIG_PIPELINE_KEY, [])
974
+ for pipeline in pipelines:
975
+ if pipeline.get(CONFIG_NAME_KEY) in command_generator_class_names:
976
+ return pipeline.get(LLM_CONFIG_KEY)
977
+
978
+ return None
979
+
980
+
981
+ def _get_command_generator_prompt(
982
+ config: Dict[Text, Any], endpoints: Dict[Text, Any]
983
+ ) -> Text:
984
+ """Get the command generator prompt based on the config."""
985
+ from rasa.dialogue_understanding.generator.single_step.compact_llm_command_generator import ( # noqa: E501
986
+ DEFAULT_COMMAND_PROMPT_TEMPLATE_FILE_NAME,
987
+ FALLBACK_COMMAND_PROMPT_TEMPLATE_FILE_NAME,
988
+ MODEL_PROMPT_MAPPER,
989
+ )
990
+
991
+ model_config = _get_llm_command_generator_config(config)
992
+ llm_config = resolve_model_client_config(
993
+ model_config=model_config,
994
+ model_groups=endpoints.get(MODEL_GROUPS_CONFIG_KEY),
995
+ )
996
+ return get_default_prompt_template_based_on_model(
997
+ llm_config=llm_config,
998
+ model_prompt_mapping=MODEL_PROMPT_MAPPER,
999
+ default_prompt_path=DEFAULT_COMMAND_PROMPT_TEMPLATE_FILE_NAME,
1000
+ fallback_prompt_path=FALLBACK_COMMAND_PROMPT_TEMPLATE_FILE_NAME,
1001
+ )
1002
+
1003
+
1004
+ def _get_enterprise_search_prompt(config: Dict[Text, Any]) -> Text:
1005
+ """Get the enterprise search prompt based on the config."""
1006
+ from rasa.core.policies.enterprise_search_policy import EnterpriseSearchPolicy
1007
+
1008
+ def get_enterprise_search_config() -> Dict[Text, Any]:
1009
+ policies = config.get(CONFIG_POLICIES_KEY, [])
1010
+ for policy in policies:
1011
+ if policy.get(CONFIG_NAME_KEY) == EnterpriseSearchPolicy.__name__:
1012
+ return policy
1013
+
1014
+ return {}
1015
+
1016
+ enterprise_search_config = get_enterprise_search_config()
1017
+ return EnterpriseSearchPolicy.get_system_default_prompt_based_on_config(
1018
+ enterprise_search_config
1019
+ )
1020
+
1021
+
1022
+ def get_system_default_prompts(
1023
+ config: Dict[Text, Any], endpoints: Dict[Text, Any]
1024
+ ) -> SystemPrompts:
1025
+ """
1026
+ Returns the system default prompts for the component.
1027
+
1028
+ Args:
1029
+ config: The config.yml file data.
1030
+ endpoints: The endpoints.yml file data.
1031
+
1032
+ Returns:
1033
+ SystemPrompts: A Pydantic model containing all default prompts.
1034
+ """
1035
+ from rasa.core.nlg.contextual_response_rephraser import (
1036
+ DEFAULT_RESPONSE_VARIATION_PROMPT_TEMPLATE,
1037
+ )
1038
+
1039
+ return SystemPrompts(
1040
+ command_generator=_get_command_generator_prompt(config, endpoints),
1041
+ enterprise_search=_get_enterprise_search_prompt(config),
1042
+ contextual_response_rephraser=DEFAULT_RESPONSE_VARIATION_PROMPT_TEMPLATE,
1043
+ )