rasa-pro 3.11.3a1.dev5__py3-none-any.whl → 3.11.3a1.dev7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of rasa-pro might be problematic. Click here for more details.

Files changed (28) hide show
  1. rasa/core/actions/action.py +16 -9
  2. rasa/core/channels/__init__.py +2 -0
  3. rasa/core/channels/development_inspector.py +0 -3
  4. rasa/core/channels/socketio.py +1 -62
  5. rasa/core/channels/studio_chat.py +192 -0
  6. rasa/core/nlg/contextual_response_rephraser.py +13 -19
  7. rasa/core/policies/enterprise_search_policy.py +14 -17
  8. rasa/core/processor.py +11 -8
  9. rasa/dialogue_understanding/constants.py +1 -0
  10. rasa/dialogue_understanding/generator/command_generator.py +34 -48
  11. rasa/dialogue_understanding/generator/llm_based_command_generator.py +1 -3
  12. rasa/dialogue_understanding/generator/multi_step/multi_step_llm_command_generator.py +7 -3
  13. rasa/dialogue_understanding/generator/single_step/single_step_llm_command_generator.py +3 -1
  14. rasa/dialogue_understanding/utils.py +8 -1
  15. rasa/hooks.py +14 -9
  16. rasa/model_manager/runner_service.py +0 -1
  17. rasa/model_manager/socket_bridge.py +0 -1
  18. rasa/model_manager/warm_rasa_process.py +9 -4
  19. rasa/plugin.py +11 -0
  20. rasa/shared/constants.py +9 -0
  21. rasa/shared/nlu/constants.py +2 -0
  22. rasa/shared/providers/llm/llm_response.py +14 -2
  23. rasa/version.py +1 -1
  24. {rasa_pro-3.11.3a1.dev5.dist-info → rasa_pro-3.11.3a1.dev7.dist-info}/METADATA +1 -1
  25. {rasa_pro-3.11.3a1.dev5.dist-info → rasa_pro-3.11.3a1.dev7.dist-info}/RECORD +28 -26
  26. {rasa_pro-3.11.3a1.dev5.dist-info → rasa_pro-3.11.3a1.dev7.dist-info}/NOTICE +0 -0
  27. {rasa_pro-3.11.3a1.dev5.dist-info → rasa_pro-3.11.3a1.dev7.dist-info}/WHEEL +0 -0
  28. {rasa_pro-3.11.3a1.dev5.dist-info → rasa_pro-3.11.3a1.dev7.dist-info}/entry_points.txt +0 -0
@@ -17,6 +17,15 @@ from jsonschema import Draft202012Validator
17
17
 
18
18
  import rasa.core
19
19
  import rasa.shared.utils.io
20
+ from rasa.shared.constants import (
21
+ TEXT,
22
+ ELEMENTS,
23
+ QUICK_REPLIES,
24
+ BUTTONS,
25
+ ATTACHMENT,
26
+ IMAGE,
27
+ CUSTOM,
28
+ )
20
29
  from rasa.core.actions.custom_action_executor import (
21
30
  CustomActionExecutor,
22
31
  NoEndpointCustomActionExecutor,
@@ -93,7 +102,6 @@ from rasa.shared.nlu.constants import (
93
102
  INTENT_NAME_KEY,
94
103
  INTENT_RANKING_KEY,
95
104
  )
96
- from rasa.shared.nlu.constants import PROMPTS
97
105
  from rasa.shared.utils.io import raise_warning
98
106
  from rasa.shared.utils.schemas.events import EVENTS_SCHEMA
99
107
  from rasa.utils.endpoints import ClientResponseError, EndpointConfig
@@ -256,19 +264,18 @@ def action_for_name_or_text(
256
264
  def create_bot_utterance(message: Dict[Text, Any]) -> BotUttered:
257
265
  """Create BotUttered event from message."""
258
266
  bot_message = BotUttered(
259
- text=message.pop("text", None),
267
+ text=message.pop(TEXT, None),
260
268
  data={
261
- "elements": message.pop("elements", None),
262
- "quick_replies": message.pop("quick_replies", None),
263
- "buttons": message.pop("buttons", None),
269
+ ELEMENTS: message.pop(ELEMENTS, None),
270
+ QUICK_REPLIES: message.pop(QUICK_REPLIES, None),
271
+ BUTTONS: message.pop(BUTTONS, None),
264
272
  # for legacy / compatibility reasons we need to set the image
265
273
  # to be the attachment if there is no other attachment (the
266
274
  # `.get` is intentional - no `pop` as we still need the image`
267
275
  # property to set it in the following line)
268
- "attachment": message.pop("attachment", None) or message.get("image", None),
269
- "image": message.pop("image", None),
270
- "custom": message.pop("custom", None),
271
- PROMPTS: message.pop(PROMPTS, None),
276
+ ATTACHMENT: message.pop(ATTACHMENT, None) or message.get(IMAGE, None),
277
+ IMAGE: message.pop(IMAGE, None),
278
+ CUSTOM: message.pop(CUSTOM, None),
272
279
  },
273
280
  metadata=message,
274
281
  )
@@ -31,6 +31,7 @@ from rasa.core.channels.vier_cvg import CVGInput
31
31
  from rasa.core.channels.voice_stream.twilio_media_streams import (
32
32
  TwilioMediaStreamsInputChannel,
33
33
  )
34
+ from rasa.core.channels.studio_chat import StudioChatInput
34
35
 
35
36
  input_channel_classes: List[Type[InputChannel]] = [
36
37
  CmdlineInput,
@@ -53,6 +54,7 @@ input_channel_classes: List[Type[InputChannel]] = [
53
54
  JambonzVoiceReadyInput,
54
55
  TwilioMediaStreamsInputChannel,
55
56
  BrowserAudioInputChannel,
57
+ StudioChatInput,
56
58
  ]
57
59
 
58
60
  # Mapping from an input channel name to its class to allow name based lookup.
@@ -88,9 +88,6 @@ class DevelopmentInspectProxy(InputChannel):
88
88
  """Called when a tracker has been updated."""
89
89
  if self.tracker_stream:
90
90
  tracker_dump = await self.get_tracker_state(sender_id)
91
- # check if the underlying channel has an on_new_tracker_dump hook
92
- if hasattr(self.underlying, "on_new_tracker_dump"):
93
- await self.underlying.on_new_tracker_dump(sender_id, tracker_dump)
94
91
  await self.tracker_stream.broadcast(tracker_dump)
95
92
 
96
93
  async def on_message_proxy(
@@ -1,4 +1,3 @@
1
- import asyncio
2
1
  import inspect
3
2
  import logging
4
3
  import uuid
@@ -7,7 +6,6 @@ from typing import Any, Awaitable, Callable, Dict, Iterable, List, Optional, Tex
7
6
 
8
7
  import rasa.core.channels.channel
9
8
  from rasa.core.channels.channel import InputChannel, OutputChannel, UserMessage
10
- from rasa.shared.core.trackers import EventVerbosity
11
9
  import rasa.shared.utils.io
12
10
  from sanic import Blueprint, response, Sanic
13
11
  from sanic.request import Request
@@ -56,31 +54,9 @@ class SocketIOOutput(OutputChannel):
56
54
  super().__init__()
57
55
  self.sio = sio
58
56
  self.bot_message_evt = bot_message_evt
59
- self.last_event_timestamp = (
60
- -1
61
- ) # Initialize with -1 to send all events on first message
62
-
63
- def _get_new_events(self) -> List[Dict[Text, Any]]:
64
- """Get events that are newer than the last sent event."""
65
- events = self.tracker_state.get("events", []) if self.tracker_state else []
66
- new_events = [
67
- event for event in events if event["timestamp"] > self.last_event_timestamp
68
- ]
69
- if new_events:
70
- self.last_event_timestamp = new_events[-1]["timestamp"]
71
- return new_events
72
57
 
73
58
  async def _send_message(self, socket_id: Text, response: Any) -> None:
74
59
  """Sends a message to the recipient using the bot event."""
75
- # send tracker state (contains stack, slots and more)
76
- await self.sio.emit("tracker_state", self.tracker_state, room=socket_id)
77
-
78
- # send new events
79
- new_events = self._get_new_events()
80
- if new_events:
81
- await self.sio.emit("rasa_events", new_events, room=socket_id)
82
-
83
- # send bot response
84
60
  await self.sio.emit(self.bot_message_evt, response, room=socket_id)
85
61
 
86
62
  async def send_text_message(
@@ -190,8 +166,6 @@ class SocketIOInput(InputChannel):
190
166
  metadata_key: Optional[Text] = "metadata",
191
167
  ):
192
168
  """Creates a ``SocketIOInput`` object."""
193
- from rasa.core.agent import Agent
194
-
195
169
  self.bot_message_evt = bot_message_evt
196
170
  self.session_persistence = session_persistence
197
171
  self.user_message_evt = user_message_evt
@@ -203,8 +177,6 @@ class SocketIOInput(InputChannel):
203
177
  self.jwt_key = jwt_key
204
178
  self.jwt_algorithm = jwt_method
205
179
 
206
- self.agent: Optional[Agent] = None
207
-
208
180
  def get_output_channel(self) -> Optional["OutputChannel"]:
209
181
  """Creates socket.io output channel object."""
210
182
  if self.sio is None:
@@ -218,13 +190,9 @@ class SocketIOInput(InputChannel):
218
190
  return None
219
191
  return SocketIOOutput(self.sio, self.bot_message_evt)
220
192
 
221
- async def on_new_tracker_dump(self, sender_id: str, tracker_dump: str) -> None:
222
- if self.sio:
223
- await self.sio.emit("tracker", tracker_dump, room=sender_id)
224
-
225
193
  def blueprint(
226
194
  self, on_new_message: Callable[[UserMessage], Awaitable[Any]]
227
- ) -> Blueprint:
195
+ ) -> SocketBlueprint:
228
196
  """Defines a Sanic blueprint."""
229
197
  # Workaround so that socketio works with requests from other origins.
230
198
  # https://github.com/miguelgrinberg/python-socketio/issues/205#issuecomment-493769183
@@ -233,11 +201,6 @@ class SocketIOInput(InputChannel):
233
201
  sio, self.socketio_path, "socketio_webhook", __name__
234
202
  )
235
203
 
236
- @socketio_webhook.listener("after_server_start") # type: ignore[misc]
237
- async def after_server_start(app: Sanic, _: asyncio.AbstractEventLoop) -> None:
238
- """Prints a message after the server has started with inspect URL."""
239
- self.agent = app.ctx.agent
240
-
241
204
  # make sio object static to use in get_output_channel
242
205
  self.sio = sio
243
206
 
@@ -283,30 +246,6 @@ class SocketIOInput(InputChannel):
283
246
  await sio.emit("session_confirm", data["session_id"], room=sid)
284
247
  logger.debug(f"User {sid} connected to socketIO endpoint.")
285
248
 
286
- @sio.on("tracker", namespace=self.namespace)
287
- async def handle_tracker(sid: Text, data: Dict) -> None:
288
- from rasa.shared.core.trackers import DialogueStateTracker
289
-
290
- if self.agent is None:
291
- raise ValueError("Agent is not initialized")
292
-
293
- async with self.agent.lock_store.lock(data["sender_id"]):
294
- tracker = DialogueStateTracker.from_dict(
295
- data["sender_id"], data["events"], self.agent.domain.slots
296
- )
297
-
298
- # will override an existing tracker with the same id!
299
- await self.agent.tracker_store.save(tracker)
300
-
301
- # TODO: rather figure out how to trigger the on_tracker_updated
302
- # of the development inspector channel
303
- state = tracker.current_state(EventVerbosity.AFTER_RESTART)
304
- await sio.emit(
305
- "tracker",
306
- json.dumps(state),
307
- room=sid,
308
- )
309
-
310
249
  @sio.on(self.user_message_evt, namespace=self.namespace)
311
250
  async def handle_message(sid: Text, data: Dict) -> None:
312
251
  output_channel = SocketIOOutput(sio, self.bot_message_evt)
@@ -0,0 +1,192 @@
1
+ import asyncio
2
+ from functools import partial
3
+ import json
4
+ from typing import (
5
+ TYPE_CHECKING,
6
+ Any,
7
+ Awaitable,
8
+ Callable,
9
+ Dict,
10
+ List,
11
+ Optional,
12
+ Text,
13
+ )
14
+ import structlog
15
+
16
+ from rasa.core.channels.socketio import SocketBlueprint, SocketIOInput
17
+ from rasa.shared.core.trackers import EventVerbosity
18
+ from sanic import Sanic
19
+ from rasa.plugin import plugin_manager
20
+ from rasa.hooks import hookimpl
21
+
22
+ if TYPE_CHECKING:
23
+ from rasa.core.channels.channel import UserMessage
24
+ from rasa.shared.core.trackers import DialogueStateTracker
25
+
26
+
27
+ structlogger = structlog.get_logger()
28
+
29
+
30
+ def tracker_as_dump(tracker: "DialogueStateTracker") -> str:
31
+ """Create a dump of the tracker state."""
32
+ from rasa.shared.core.trackers import get_trackers_for_conversation_sessions
33
+
34
+ multiple_tracker_sessions = get_trackers_for_conversation_sessions(tracker)
35
+
36
+ if 0 <= len(multiple_tracker_sessions) <= 1:
37
+ last_tracker = tracker
38
+ else:
39
+ last_tracker = multiple_tracker_sessions[-1]
40
+
41
+ state = last_tracker.current_state(EventVerbosity.AFTER_RESTART)
42
+ return json.dumps(state)
43
+
44
+
45
+ class StudioTrackerUpdatePlugin:
46
+ """Plugin for publishing tracker updates a socketio channel."""
47
+
48
+ def __init__(self, socket_channel: "StudioChatInput") -> None:
49
+ self.socket_channel = socket_channel
50
+ self.tasks: List[asyncio.Task] = []
51
+
52
+ def _cancel_tasks(self) -> None:
53
+ """Cancel all remaining tasks."""
54
+ for task in self.tasks:
55
+ task.cancel()
56
+ self.tasks = []
57
+
58
+ def _cleanup_tasks(self) -> None:
59
+ """Remove tasks that have already completed."""
60
+ self.tasks = [task for task in self.tasks if not task.done()]
61
+
62
+ @hookimpl # type: ignore[misc]
63
+ def after_new_user_message(self, tracker: "DialogueStateTracker") -> None:
64
+ """Triggers a tracker update notification after a new user message."""
65
+ self.handle_tracker_update(tracker)
66
+
67
+ @hookimpl # type: ignore[misc]
68
+ def after_action_executed(self, tracker: "DialogueStateTracker") -> None:
69
+ """Triggers a tracker update notification after an action is executed."""
70
+ self.handle_tracker_update(tracker)
71
+
72
+ def handle_tracker_update(self, tracker: "DialogueStateTracker") -> None:
73
+ """Handles a tracker update when triggered by a hook."""
74
+ structlogger.info("studio_chat.after_tracker_update", tracker=tracker)
75
+ # directly create a dump to avoid the tracker getting modified by another
76
+ # function before it gets published (since the publishing is scheduled
77
+ # as an async task)
78
+ tracker_dump = tracker_as_dump(tracker)
79
+ task = asyncio.create_task(
80
+ self.socket_channel.publish_tracker_update(tracker.sender_id, tracker_dump)
81
+ )
82
+ self.tasks.append(task)
83
+ self._cleanup_tasks()
84
+
85
+ @hookimpl # type: ignore[misc]
86
+ def after_server_stop(self) -> None:
87
+ """Cancels all remaining tasks when the server stops."""
88
+ self._cancel_tasks()
89
+
90
+
91
+ class StudioChatInput(SocketIOInput):
92
+ """Input channel for the communication between Rasa Studio and Rasa Pro."""
93
+
94
+ @classmethod
95
+ def name(cls) -> Text:
96
+ return "studio_chat"
97
+
98
+ def __init__(
99
+ self,
100
+ *args: Any,
101
+ **kwargs: Any,
102
+ ) -> None:
103
+ """Creates a ``SocketIOInput`` object."""
104
+ from rasa.core.agent import Agent
105
+
106
+ super().__init__(*args, **kwargs)
107
+ self.agent: Optional[Agent] = None
108
+
109
+ self._register_tracker_update_hook()
110
+
111
+ def _register_tracker_update_hook(self) -> None:
112
+ plugin_manager().register(StudioTrackerUpdatePlugin(self))
113
+
114
+ async def on_tracker_updated(self, tracker: "DialogueStateTracker") -> None:
115
+ """Triggers a tracker update notification after a change to the tracker."""
116
+ await self.publish_tracker_update(tracker.sender_id, tracker_as_dump(tracker))
117
+
118
+ async def publish_tracker_update(self, sender_id: str, tracker_dump: Dict) -> None:
119
+ """Publishes a tracker update notification to the websocket."""
120
+ if not self.sio:
121
+ structlogger.error("studio_chat.on_tracker_updated.sio_not_initialized")
122
+ return
123
+ await self.sio.emit("tracker", tracker_dump, room=sender_id)
124
+
125
+ async def on_message_proxy(
126
+ self,
127
+ on_new_message: Callable[["UserMessage"], Awaitable[Any]],
128
+ message: "UserMessage",
129
+ ) -> None:
130
+ """Proxies the on_new_message call to the underlying channel.
131
+
132
+ Triggers a tracker update notification after processing the message.
133
+ """
134
+ await on_new_message(message)
135
+
136
+ if not self.agent:
137
+ structlogger.error("studio_chat.on_message_proxy.agent_not_initialized")
138
+ return
139
+
140
+ tracker = await self.agent.tracker_store.retrieve(message.sender_id)
141
+ if tracker is None:
142
+ structlogger.error("studio_chat.on_message_proxy.tracker_not_found")
143
+ return
144
+
145
+ await self.on_tracker_updated(tracker)
146
+
147
+ async def handle_tracker_update(self, sid: str, data: Dict) -> None:
148
+ from rasa.shared.core.trackers import DialogueStateTracker
149
+
150
+ structlogger.debug(
151
+ "studio_chat.sio.handle_tracker_update",
152
+ sid=sid,
153
+ sender_id=data["sender_id"],
154
+ )
155
+ if self.agent is None:
156
+ structlogger.error("studio_chat.sio.agent_not_initialized")
157
+ return None
158
+
159
+ if not (domain := self.agent.domain):
160
+ structlogger.error("studio_chat.sio.domain_not_initialized")
161
+ return None
162
+
163
+ async with self.agent.lock_store.lock(data["sender_id"]):
164
+ tracker = DialogueStateTracker.from_dict(
165
+ data["sender_id"], data["events"], domain.slots
166
+ )
167
+
168
+ # will override an existing tracker with the same id!
169
+ await self.agent.tracker_store.save(tracker)
170
+
171
+ await self.on_tracker_updated(tracker)
172
+
173
+ def blueprint(
174
+ self, on_new_message: Callable[["UserMessage"], Awaitable[Any]]
175
+ ) -> SocketBlueprint:
176
+ socket_blueprint = super().blueprint(
177
+ partial(self.on_message_proxy, on_new_message)
178
+ )
179
+
180
+ if not self.sio:
181
+ structlogger.error("studio_chat.blueprint.sio_not_initialized")
182
+ return socket_blueprint
183
+
184
+ @socket_blueprint.listener("after_server_start") # type: ignore[misc]
185
+ async def after_server_start(app: Sanic, _: asyncio.AbstractEventLoop) -> None:
186
+ self.agent = app.ctx.agent
187
+
188
+ @self.sio.on("update_tracker", namespace=self.namespace)
189
+ async def on_update_tracker(sid: Text, data: Dict) -> None:
190
+ await self.handle_tracker_update(sid, data)
191
+
192
+ return socket_blueprint
@@ -6,7 +6,6 @@ from jinja2 import Template
6
6
  from rasa import telemetry
7
7
  from rasa.core.nlg.response import TemplatedNaturalLanguageGenerator
8
8
  from rasa.core.nlg.summarize import summarize_conversation
9
- from rasa.dialogue_understanding.utils import record_commands_and_prompts
10
9
  from rasa.shared.constants import (
11
10
  LLM_CONFIG_KEY,
12
11
  MODEL_CONFIG_KEY,
@@ -24,6 +23,8 @@ from rasa.shared.nlu.constants import (
24
23
  PROMPTS,
25
24
  KEY_USER_PROMPT,
26
25
  KEY_LLM_RESPONSE_METADATA,
26
+ KEY_PROMPT_NAME,
27
+ KEY_COMPONENT_NAME,
27
28
  )
28
29
  from rasa.shared.providers.llm.llm_response import LLMResponse
29
30
  from rasa.shared.utils.health_check.llm_health_check_mixin import LLMHealthCheckMixin
@@ -148,26 +149,21 @@ class ContextualResponseRephraser(
148
149
  user_prompt: The user prompt that was sent to the LLM.
149
150
  llm_response: The response object from the LLM (None if no response).
150
151
  """
152
+ from rasa.dialogue_understanding.utils import record_commands_and_prompts
153
+
151
154
  if not record_commands_and_prompts:
152
155
  return response
153
156
 
154
157
  prompt_data: Dict[Text, Any] = {
158
+ KEY_COMPONENT_NAME: cls.__name__,
159
+ KEY_PROMPT_NAME: prompt_name,
155
160
  KEY_USER_PROMPT: user_prompt,
161
+ KEY_LLM_RESPONSE_METADATA: llm_response.to_dict() if llm_response else None,
156
162
  }
157
163
 
158
- if llm_response is not None:
159
- prompt_data[KEY_LLM_RESPONSE_METADATA] = llm_response.to_dict()
160
-
161
- prompt_tuple = (prompt_name, prompt_data)
162
-
163
- component_name = cls.__name__
164
- existing_prompts = response.get(PROMPTS, {})
165
- if component_name in existing_prompts:
166
- existing_prompts[component_name].append(prompt_tuple)
167
- else:
168
- existing_prompts[component_name] = [prompt_tuple]
169
-
170
- response[PROMPTS] = existing_prompts
164
+ prompts = response.get(PROMPTS, [])
165
+ prompts.append(prompt_data)
166
+ response[PROMPTS] = prompts
171
167
  return response
172
168
 
173
169
  def _last_message_if_human(self, tracker: DialogueStateTracker) -> Optional[str]:
@@ -202,10 +198,7 @@ class ContextualResponseRephraser(
202
198
  llm = llm_factory(self.llm_config, DEFAULT_LLM_CONFIG)
203
199
 
204
200
  try:
205
- raw_response = await llm.acompletion(prompt)
206
- response_dict = raw_response.to_dict()
207
- return LLMResponse.from_dict(response_dict)
208
-
201
+ return await llm.acompletion(prompt)
209
202
  except Exception as e:
210
203
  # unfortunately, langchain does not wrap LLM exceptions which means
211
204
  # we have to catch all exceptions here
@@ -306,6 +299,7 @@ class ContextualResponseRephraser(
306
299
  llm_model_group_id=self.llm_property(MODEL_GROUP_ID_CONFIG_KEY),
307
300
  )
308
301
  llm_response = await self._generate_llm_response(prompt)
302
+ llm_response = LLMResponse.ensure_llm_response(llm_response)
309
303
 
310
304
  response = self._add_prompt_and_llm_metadata_to_response(
311
305
  response=response,
@@ -314,7 +308,7 @@ class ContextualResponseRephraser(
314
308
  llm_response=llm_response,
315
309
  )
316
310
 
317
- if llm_response is None or not llm_response.choices:
311
+ if not (llm_response and llm_response.choices and llm_response.choices[0]):
318
312
  # If the LLM fails to generate a response, return the original response.
319
313
  return response
320
314
 
@@ -38,7 +38,6 @@ from rasa.dialogue_understanding.stack.frames import (
38
38
  SearchStackFrame,
39
39
  )
40
40
  from rasa.dialogue_understanding.stack.frames import PatternFlowStackFrame
41
- from rasa.dialogue_understanding.utils import record_commands_and_prompts
42
41
  from rasa.engine.graph import ExecutionContext
43
42
  from rasa.engine.recipes.default_recipe import DefaultV1Recipe
44
43
  from rasa.engine.storage.resource import Resource
@@ -69,6 +68,8 @@ from rasa.shared.nlu.constants import (
69
68
  PROMPTS,
70
69
  KEY_USER_PROMPT,
71
70
  KEY_LLM_RESPONSE_METADATA,
71
+ KEY_PROMPT_NAME,
72
+ KEY_COMPONENT_NAME,
72
73
  )
73
74
  from rasa.shared.nlu.training_data.training_data import TrainingData
74
75
  from rasa.shared.providers.embedding._langchain_embedding_client_adapter import (
@@ -281,7 +282,7 @@ class EnterpriseSearchPolicy(LLMHealthCheckMixin, EmbeddingsHealthCheckMixin, Po
281
282
  return _LangchainEmbeddingClientAdapter(client)
282
283
 
283
284
  @classmethod
284
- def _store_prompt_and_llm_response_in_tracker(
285
+ def _add_prompt_and_llm_response_to_latest_message(
285
286
  cls,
286
287
  tracker: DialogueStateTracker,
287
288
  prompt_name: str,
@@ -296,6 +297,8 @@ class EnterpriseSearchPolicy(LLMHealthCheckMixin, EmbeddingsHealthCheckMixin, Po
296
297
  user_prompt: The user prompt that was sent to the LLM.
297
298
  llm_response: The response object from the LLM (None if no response).
298
299
  """
300
+ from rasa.dialogue_understanding.utils import record_commands_and_prompts
301
+
299
302
  if not record_commands_and_prompts:
300
303
  return
301
304
 
@@ -303,21 +306,17 @@ class EnterpriseSearchPolicy(LLMHealthCheckMixin, EmbeddingsHealthCheckMixin, Po
303
306
  return
304
307
 
305
308
  parse_data = tracker.latest_message.parse_data
306
- if parse_data is not None and PROMPTS not in parse_data:
307
- parse_data[PROMPTS] = {} # type: ignore[literal-required]
308
-
309
- component_name = cls.__name__
310
- existing_prompts = parse_data[PROMPTS].get(component_name, []) # type: ignore[literal-required]
309
+ if PROMPTS not in parse_data:
310
+ parse_data[PROMPTS] = [] # type: ignore[literal-required]
311
311
 
312
312
  prompt_data: Dict[Text, Any] = {
313
+ KEY_COMPONENT_NAME: cls.__name__,
314
+ KEY_PROMPT_NAME: prompt_name,
313
315
  KEY_USER_PROMPT: user_prompt,
316
+ KEY_LLM_RESPONSE_METADATA: llm_response.to_dict() if llm_response else None,
314
317
  }
315
- if llm_response is not None:
316
- prompt_data[KEY_LLM_RESPONSE_METADATA] = llm_response.to_dict()
317
318
 
318
- prompt_tuple = (prompt_name, prompt_data)
319
- existing_prompts.append(prompt_tuple)
320
- parse_data[PROMPTS][component_name] = existing_prompts # type: ignore[literal-required]
319
+ parse_data[PROMPTS].append(prompt_data) # type: ignore[literal-required]
321
320
 
322
321
  def train( # type: ignore[override]
323
322
  self,
@@ -546,8 +545,9 @@ class EnterpriseSearchPolicy(LLMHealthCheckMixin, EmbeddingsHealthCheckMixin, Po
546
545
  if self.use_llm:
547
546
  prompt = self._render_prompt(tracker, documents.results)
548
547
  llm_response = await self._generate_llm_answer(llm, prompt)
548
+ llm_response = LLMResponse.ensure_llm_response(llm_response)
549
549
 
550
- self._store_prompt_and_llm_response_in_tracker(
550
+ self._add_prompt_and_llm_response_to_latest_message(
551
551
  tracker=tracker,
552
552
  prompt_name="enterprise_search_prompt",
553
553
  user_prompt=prompt,
@@ -651,10 +651,7 @@ class EnterpriseSearchPolicy(LLMHealthCheckMixin, EmbeddingsHealthCheckMixin, Po
651
651
  An LLMResponse object, or None if the call fails.
652
652
  """
653
653
  try:
654
- raw_response = await llm.acompletion(prompt)
655
- response_dict = raw_response.to_dict()
656
- return LLMResponse.from_dict(response_dict)
657
-
654
+ return await llm.acompletion(prompt)
658
655
  except Exception as e:
659
656
  # unfortunately, langchain does not wrap LLM exceptions which means
660
657
  # we have to catch all exceptions here
rasa/core/processor.py CHANGED
@@ -818,8 +818,9 @@ class MessageProcessor:
818
818
  return parse_data
819
819
 
820
820
  def _sanitize_message(self, message: UserMessage) -> UserMessage:
821
- """Sanitize user message by removing prepended slashes before the
822
- actual content.
821
+ """Sanitize user messages.
822
+
823
+ Removes prepended slashes before the actual content.
823
824
  """
824
825
  # Regex pattern to match leading slashes and any whitespace before
825
826
  # actual content
@@ -921,9 +922,7 @@ class MessageProcessor:
921
922
  return [command.as_dict() for command in commands]
922
923
 
923
924
  def _contains_undefined_intent(self, message: Message) -> bool:
924
- """Checks if the message contains an intent that is undefined
925
- in the domain.
926
- """
925
+ """Checks if the message contains an undefined intent."""
927
926
  intent_name = message.get(INTENT, {}).get("name")
928
927
  return intent_name is not None and intent_name not in self.domain.intents
929
928
 
@@ -987,6 +986,8 @@ class MessageProcessor:
987
986
  if parse_data["entities"]:
988
987
  self._log_slots(tracker)
989
988
 
989
+ plugin_manager().hook.after_new_user_message(tracker=tracker)
990
+
990
991
  logger.debug(
991
992
  f"Logged UserUtterance - tracker now has {len(tracker.events)} events."
992
993
  )
@@ -1305,7 +1306,7 @@ class MessageProcessor:
1305
1306
  self._log_slots(tracker)
1306
1307
 
1307
1308
  await self.execute_side_effects(events, tracker, output_channel)
1308
-
1309
+ plugin_manager().hook.after_action_executed(tracker=tracker)
1309
1310
  return self.should_predict_another_action(action.name())
1310
1311
 
1311
1312
  def _log_action_on_tracker(
@@ -1441,8 +1442,10 @@ class MessageProcessor:
1441
1442
  return len(filtered_commands) > 0
1442
1443
 
1443
1444
  def _is_calm_assistant(self) -> bool:
1444
- """Inspects the nodes of the graph schema to determine whether
1445
- any node is associated with the `FlowPolicy`, which is indicative of a
1445
+ """Inspects the nodes of the graph schema to decide if we are in CALM.
1446
+
1447
+ To determine whether we are in CALM mode, we check if any node is
1448
+ associated with the `FlowPolicy`, which is indicative of a
1446
1449
  CALM assistant setup.
1447
1450
 
1448
1451
  Returns:
@@ -0,0 +1 @@
1
+ RASA_RECORD_COMMANDS_AND_PROMPTS_ENV_VAR_NAME = "RASA_RECORD_COMMANDS_AND_PROMPTS"
@@ -27,6 +27,8 @@ from rasa.shared.nlu.constants import (
27
27
  KEY_USER_PROMPT,
28
28
  KEY_SYSTEM_PROMPT,
29
29
  KEY_LLM_RESPONSE_METADATA,
30
+ KEY_PROMPT_NAME,
31
+ KEY_COMPONENT_NAME,
30
32
  )
31
33
  from rasa.shared.nlu.training_data.message import Message
32
34
  from rasa.shared.providers.llm.llm_response import LLMResponse
@@ -408,65 +410,49 @@ class CommandGenerator:
408
410
  Prompt is only added in case the flag 'record_commands_and_prompts' is set.
409
411
  Example of prompts in the message parse data:
410
412
  Message(data={
411
- PROMPTS: {
412
- "MultiStepLLMCommandGenerator": [
413
- (
414
- "fill_slots_prompt",
415
- {
416
- "user_prompt": <prompt content>",
417
- "system_prompt": <prompt content>",
418
- "llm_response_metadata": <metadata dict from LLMResponse>
419
- }
420
- ),
421
- (
422
- "handle_flows_prompt",
423
- {
424
- "user_prompt": <prompt content>",
425
- "system_prompt": <prompt content>",
426
- "llm_response_metadata": <metadata dict from LLMResponse>
427
- }
428
- ),
429
- ],
430
- "SingleStepLLMCommandGenerator": [
431
- (
432
- "prompt_template",
433
- {
434
- "user_prompt": <prompt content>",
435
- "system_prompt": <prompt content>",
436
- "llm_response_metadata": <metadata dict from LLMResponse>
437
- }
438
- ),
413
+ PROMPTS: [
414
+ {
415
+ "component_name": "MultiStepLLMCommandGenerator",
416
+ "prompt_name": "fill_slots_prompt",
417
+ "user_prompt": "...",
418
+ "system_prompt": "...",
419
+ "llm_response_metadata": { ... }
420
+ },
421
+ {
422
+ "component_name": "MultiStepLLMCommandGenerator",
423
+ "prompt_name": "handle_flows_prompt",
424
+ "user_prompt": "...",
425
+ "system_prompt": "...",
426
+ "llm_response_metadata": { ... }
427
+ },
428
+ {
429
+ "component_name": "SingleStepLLMCommandGenerator",
430
+ "prompt_name": "prompt_template",
431
+ "user_prompt": "...",
432
+ "system_prompt": "...",
433
+ "llm_response_metadata": { ... }
434
+ }
439
435
  ]
440
- }
441
436
  })
442
437
  """
443
438
  from rasa.dialogue_understanding.utils import record_commands_and_prompts
444
439
 
445
- # only set prompt if the flag "record_commands_and_prompts" is set to True
440
+ # Only set prompt if the flag "record_commands_and_prompts" is set to True.
446
441
  if not record_commands_and_prompts:
447
442
  return
448
443
 
444
+ # Construct the dictionary with prompt details.
449
445
  prompt_data: Dict[Text, Any] = {
446
+ KEY_COMPONENT_NAME: component_name,
447
+ KEY_PROMPT_NAME: prompt_name,
450
448
  KEY_USER_PROMPT: user_prompt,
449
+ KEY_LLM_RESPONSE_METADATA: llm_response.to_dict() if llm_response else None,
451
450
  **({KEY_SYSTEM_PROMPT: system_prompt} if system_prompt else {}),
452
451
  }
453
452
 
454
- if llm_response is not None:
455
- prompt_data[KEY_LLM_RESPONSE_METADATA] = llm_response.to_dict()
456
-
457
- prompt_tuple = (prompt_name, prompt_data)
458
-
459
- if message.get(PROMPTS) is not None:
460
- prompts = message.get(PROMPTS)
461
- if component_name in prompts:
462
- prompts[component_name].append(prompt_tuple)
463
- else:
464
- prompts[component_name] = [prompt_tuple]
465
- else:
466
- prompts = {component_name: [prompt_tuple]}
453
+ # Get or create a top-level "prompts" list.
454
+ prompts = message.get(PROMPTS) or []
455
+ prompts.append(prompt_data)
467
456
 
468
- message.set(
469
- PROMPTS,
470
- prompts,
471
- add_to_output=True,
472
- )
457
+ # Update the message with the new prompts list.
458
+ message.set(PROMPTS, prompts, add_to_output=True)
@@ -319,9 +319,7 @@ class LLMBasedCommandGenerator(
319
319
  """
320
320
  llm = llm_factory(self.config.get(LLM_CONFIG_KEY), DEFAULT_LLM_CONFIG)
321
321
  try:
322
- raw_response = await llm.acompletion(prompt)
323
- response_dict = raw_response.to_dict()
324
- return LLMResponse.from_dict(response_dict)
322
+ return await llm.acompletion(prompt)
325
323
  except Exception as e:
326
324
  # unfortunately, langchain does not wrap LLM exceptions which means
327
325
  # we have to catch all exceptions here
@@ -51,6 +51,7 @@ from rasa.shared.core.trackers import DialogueStateTracker
51
51
  from rasa.shared.exceptions import ProviderClientAPIException
52
52
  from rasa.shared.nlu.constants import TEXT
53
53
  from rasa.shared.nlu.training_data.message import Message
54
+ from rasa.shared.providers.llm.llm_response import LLMResponse
54
55
  from rasa.shared.utils.io import deep_container_fingerprint
55
56
  from rasa.shared.utils.llm import (
56
57
  get_prompt_template,
@@ -535,7 +536,8 @@ class MultiStepLLMCommandGenerator(LLMBasedCommandGenerator):
535
536
  prompt=prompt,
536
537
  )
537
538
 
538
- llm_response = await self.invoke_llm(prompt)
539
+ response = await self.invoke_llm(prompt)
540
+ llm_response = LLMResponse.ensure_llm_response(response)
539
541
  actions = None
540
542
  if llm_response and llm_response.choices:
541
543
  actions = llm_response.choices[0]
@@ -589,7 +591,8 @@ class MultiStepLLMCommandGenerator(LLMBasedCommandGenerator):
589
591
  prompt=prompt,
590
592
  )
591
593
 
592
- llm_response = await self.invoke_llm(prompt)
594
+ response = await self.invoke_llm(prompt)
595
+ llm_response = LLMResponse.ensure_llm_response(response)
593
596
  actions = None
594
597
  if llm_response and llm_response.choices:
595
598
  actions = llm_response.choices[0]
@@ -678,7 +681,8 @@ class MultiStepLLMCommandGenerator(LLMBasedCommandGenerator):
678
681
  prompt=prompt,
679
682
  )
680
683
 
681
- llm_response = await self.invoke_llm(prompt)
684
+ response = await self.invoke_llm(prompt)
685
+ llm_response = LLMResponse.ensure_llm_response(response)
682
686
  actions = None
683
687
  if llm_response and llm_response.choices:
684
688
  actions = llm_response.choices[0]
@@ -46,6 +46,7 @@ from rasa.shared.core.trackers import DialogueStateTracker
46
46
  from rasa.shared.exceptions import ProviderClientAPIException
47
47
  from rasa.shared.nlu.constants import TEXT, LLM_COMMANDS, LLM_PROMPT
48
48
  from rasa.shared.nlu.training_data.message import Message
49
+ from rasa.shared.providers.llm.llm_response import LLMResponse
49
50
  from rasa.shared.utils.io import deep_container_fingerprint
50
51
  from rasa.shared.utils.llm import (
51
52
  get_prompt_template,
@@ -264,7 +265,8 @@ class SingleStepLLMCommandGenerator(LLMBasedCommandGenerator):
264
265
  prompt=flow_prompt,
265
266
  )
266
267
 
267
- llm_response = await self.invoke_llm(flow_prompt)
268
+ response = await self.invoke_llm(flow_prompt)
269
+ llm_response = LLMResponse.ensure_llm_response(response)
268
270
  # The check for 'None' maintains compatibility with older versions
269
271
  # of LLMCommandGenerator. In previous implementations, 'invoke_llm'
270
272
  # might return 'None' to indicate a failure to generate actions.
@@ -1,7 +1,14 @@
1
1
  from contextlib import contextmanager
2
2
  from typing import Generator
3
3
 
4
- record_commands_and_prompts = False
4
+ from rasa.dialogue_understanding.constants import (
5
+ RASA_RECORD_COMMANDS_AND_PROMPTS_ENV_VAR_NAME,
6
+ )
7
+ from rasa.utils.common import get_bool_env_variable
8
+
9
+ record_commands_and_prompts = get_bool_env_variable(
10
+ RASA_RECORD_COMMANDS_AND_PROMPTS_ENV_VAR_NAME, False
11
+ )
5
12
 
6
13
 
7
14
  @contextmanager
rasa/hooks.py CHANGED
@@ -3,17 +3,14 @@ import logging
3
3
  from typing import Optional, TYPE_CHECKING, List, Text, Union
4
4
 
5
5
  import pluggy
6
- from rasa.cli import SubParsersAction
7
6
 
8
- from rasa.cli import x as rasa_x
9
- from rasa.utils.endpoints import EndpointConfig
10
-
11
- from rasa.core.auth_retry_tracker_store import AuthRetryTrackerStore
12
- from rasa.core.secrets_manager.factory import load_secret_manager
13
-
14
- from rasa.tracing import config
7
+ # IMPORTANT: do not import anything from rasa here - use scoped imports
8
+ # this avoids circular imports, as the hooks are used in different places
9
+ # across the codebase.
15
10
 
16
11
  if TYPE_CHECKING:
12
+ from rasa.cli import SubParsersAction
13
+ from rasa.utils.endpoints import EndpointConfig
17
14
  from rasa.core.brokers.broker import EventBroker
18
15
  from rasa.core.tracker_store import TrackerStore
19
16
  from rasa.shared.core.domain import Domain
@@ -25,7 +22,7 @@ logger = logging.getLogger(__name__)
25
22
 
26
23
  @hookimpl # type: ignore[misc]
27
24
  def refine_cli(
28
- subparsers: SubParsersAction,
25
+ subparsers: "SubParsersAction",
29
26
  parent_parsers: List[argparse.ArgumentParser],
30
27
  ) -> None:
31
28
  from rasa.cli import e2e_test, inspect, markers
@@ -43,6 +40,9 @@ def refine_cli(
43
40
 
44
41
  @hookimpl # type: ignore[misc]
45
42
  def configure_commandline(cmdline_arguments: argparse.Namespace) -> Optional[Text]:
43
+ from rasa.tracing import config
44
+ from rasa.cli import x as rasa_x
45
+
46
46
  endpoints_file = None
47
47
 
48
48
  if cmdline_arguments.func.__name__ == "rasa_x":
@@ -69,6 +69,8 @@ def init_telemetry(endpoints_file: Optional[Text]) -> None:
69
69
 
70
70
  @hookimpl # type: ignore[misc]
71
71
  def init_managers(endpoints_file: Optional[Text]) -> None:
72
+ from rasa.core.secrets_manager.factory import load_secret_manager
73
+
72
74
  load_secret_manager(endpoints_file)
73
75
 
74
76
 
@@ -78,6 +80,9 @@ def create_tracker_store(
78
80
  domain: "Domain",
79
81
  event_broker: Optional["EventBroker"],
80
82
  ) -> "TrackerStore":
83
+ from rasa.utils.endpoints import EndpointConfig
84
+ from rasa.core.auth_retry_tracker_store import AuthRetryTrackerStore
85
+
81
86
  if isinstance(endpoint_config, EndpointConfig):
82
87
  return AuthRetryTrackerStore(
83
88
  endpoint_config=endpoint_config, domain=domain, event_broker=event_broker
@@ -207,7 +207,6 @@ def start_bot_process(
207
207
  "--credentials",
208
208
  f"{bot_base_path}/credentials.yml",
209
209
  "--debug",
210
- "--inspect",
211
210
  f"--port={port}",
212
211
  "--model",
213
212
  f"{bot_base_path}/models",
@@ -132,7 +132,6 @@ async def create_bridge_client(
132
132
 
133
133
  @client.event # type: ignore[misc]
134
134
  async def tracker(data: Dict[str, Any]) -> None:
135
- structlogger.debug("model_runner.tracker_message", deployment_id=deployment_id)
136
135
  await sio.emit("tracker", json.loads(data), room=sid)
137
136
 
138
137
  @client.event # type: ignore[misc]
@@ -1,12 +1,16 @@
1
+ import os
1
2
  import shlex
2
3
  import subprocess
3
- from rasa.__main__ import main
4
- import os
4
+ import uuid
5
+ from dataclasses import dataclass
5
6
  from typing import List
7
+
6
8
  import structlog
7
- from dataclasses import dataclass
8
- import uuid
9
9
 
10
+ from rasa.__main__ import main
11
+ from rasa.dialogue_understanding.constants import (
12
+ RASA_RECORD_COMMANDS_AND_PROMPTS_ENV_VAR_NAME,
13
+ )
10
14
  from rasa.model_manager import config
11
15
  from rasa.model_manager.utils import ensure_base_directory_exists, logs_path
12
16
 
@@ -43,6 +47,7 @@ def _create_warm_rasa_process() -> WarmRasaProcess:
43
47
 
44
48
  envs = os.environ.copy()
45
49
  envs["RASA_TELEMETRY_ENABLED"] = "false"
50
+ envs[RASA_RECORD_COMMANDS_AND_PROMPTS_ENV_VAR_NAME] = "true"
46
51
 
47
52
  log_id = uuid.uuid4().hex
48
53
  log_path = logs_path(log_id)
rasa/plugin.py CHANGED
@@ -14,6 +14,7 @@ if TYPE_CHECKING:
14
14
  from rasa.core.tracker_store import TrackerStore
15
15
  from rasa.shared.core.domain import Domain
16
16
  from rasa.utils.endpoints import EndpointConfig
17
+ from rasa.shared.core.trackers import DialogueStateTracker
17
18
 
18
19
 
19
20
  hookspec = pluggy.HookspecMarker("rasa")
@@ -88,3 +89,13 @@ def after_server_stop() -> None:
88
89
  Use this hook to de-initialize any resources that require explicit cleanup like,
89
90
  thread shutdown, closing connections, etc.
90
91
  """
92
+
93
+
94
+ @hookspec # type: ignore[misc]
95
+ def after_new_user_message(tracker: "DialogueStateTracker") -> None:
96
+ """Hook specification for after a new user message is received."""
97
+
98
+
99
+ @hookspec # type: ignore[misc]
100
+ def after_action_executed(tracker: "DialogueStateTracker") -> None:
101
+ """Hook specification for after an action is executed."""
rasa/shared/constants.py CHANGED
@@ -319,3 +319,12 @@ SENSITIVE_DATA = [
319
319
  AWS_SECRET_ACCESS_KEY_CONFIG_KEY,
320
320
  AWS_SESSION_TOKEN_CONFIG_KEY,
321
321
  ]
322
+
323
+ # Used for key values in BotUtterance.data
324
+ TEXT = "text"
325
+ ELEMENTS = "elements"
326
+ QUICK_REPLIES = "quick_replies"
327
+ BUTTONS = "buttons"
328
+ ATTACHMENT = "attachment"
329
+ IMAGE = "image"
330
+ CUSTOM = "custom"
@@ -7,6 +7,8 @@ PROMPTS = "prompts"
7
7
  KEY_USER_PROMPT = "user_prompt"
8
8
  KEY_SYSTEM_PROMPT = "system_prompt"
9
9
  KEY_LLM_RESPONSE_METADATA = "llm_response_metadata"
10
+ KEY_PROMPT_NAME = "prompt_name"
11
+ KEY_COMPONENT_NAME = "component_name"
10
12
  LLM_COMMANDS = "llm_commands" # needed for fine-tuning
11
13
  LLM_PROMPT = "llm_prompt" # needed for fine-tuning
12
14
  FLOWS_FROM_SEMANTIC_SEARCH = "flows_from_semantic_search"
@@ -1,5 +1,8 @@
1
1
  from dataclasses import dataclass, field, asdict
2
- from typing import Dict, List, Optional, Text, Any
2
+ from typing import Dict, List, Optional, Text, Any, Union
3
+ import structlog
4
+
5
+ structlogger = structlog.get_logger()
3
6
 
4
7
 
5
8
  @dataclass
@@ -59,7 +62,7 @@ class LLMResponse:
59
62
  """
60
63
  Creates an LLMResponse from a dictionary.
61
64
  """
62
- usage_data = data.get("usage")
65
+ usage_data = data.get("usage", {})
63
66
  usage_obj = LLMUsage.from_dict(usage_data) if usage_data else None
64
67
 
65
68
  return cls(
@@ -71,6 +74,15 @@ class LLMResponse:
71
74
  additional_info=data.get("additional_info"),
72
75
  )
73
76
 
77
+ @classmethod
78
+ def ensure_llm_response(cls, response: Union[str, "LLMResponse"]) -> "LLMResponse":
79
+ if isinstance(response, LLMResponse):
80
+ return response
81
+
82
+ structlogger.warn("llm_response.deprecated_response_type", response=response)
83
+ data = {"id": None, "choices": [response], "created": None}
84
+ return LLMResponse.from_dict(data)
85
+
74
86
  def to_dict(self) -> dict:
75
87
  """Converts the LLMResponse dataclass instance into a dictionary."""
76
88
  result = asdict(self)
rasa/version.py CHANGED
@@ -1,3 +1,3 @@
1
1
  # this file will automatically be changed,
2
2
  # do not add anything but the version number here!
3
- __version__ = "3.11.3a1.dev5"
3
+ __version__ = "3.11.3a1.dev7"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: rasa-pro
3
- Version: 3.11.3a1.dev5
3
+ Version: 3.11.3a1.dev7
4
4
  Summary: State-of-the-art open-core Conversational AI framework for Enterprises that natively leverages generative AI for effortless assistant development.
5
5
  Home-page: https://rasa.com
6
6
  Keywords: nlp,machine-learning,machine-learning-library,bot,bots,botkit,rasa conversational-agents,conversational-ai,chatbot,chatbot-framework,bot-framework
@@ -92,7 +92,7 @@ rasa/cli/x.py,sha256=C7dLtYXAkD-uj7hNj7Pz5YbOupp2yRcMjQbsEVqXUJ8,6825
92
92
  rasa/constants.py,sha256=YrrBiJUc0cL5Xrsap6IioNbQ6dKaqDiueqHmMIYkpF0,1348
93
93
  rasa/core/__init__.py,sha256=DYHLve7F1yQBVOZTA63efVIwLiULMuihOfdpzw1j0os,457
94
94
  rasa/core/actions/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
95
- rasa/core/actions/action.py,sha256=H4Mr5WvXHwgZ5ws7ZyAlQ_ESx5ryZvv7cAgNjtyYCn8,45381
95
+ rasa/core/actions/action.py,sha256=vrd-CiBDtZt9-1B6DSkCvtZVg8iBs-O94QrGW0bzpz4,45391
96
96
  rasa/core/actions/action_clean_stack.py,sha256=xUP-2ipPsPAnAiwP17c-ezmHPSrV4JSUZr-eSgPQwIs,2279
97
97
  rasa/core/actions/action_exceptions.py,sha256=hghzXYN6VeHC-O_O7WiPesCNV86ZTkHgG90ZnQcbai8,724
98
98
  rasa/core/actions/action_hangup.py,sha256=wpXunkGC71krAYZD3BbqzlHLZxNg1mIviwWz0j9Go-c,994
@@ -118,12 +118,12 @@ rasa/core/brokers/file.py,sha256=GpeDEgwJYAUNZwUUqzGFzzMHiVi-N_kX185cm8RF4BM,180
118
118
  rasa/core/brokers/kafka.py,sha256=sJl1g92fo__cs-y2SKs4Uof6HJRJ-1fwHkjRuMs-cF4,12216
119
119
  rasa/core/brokers/pika.py,sha256=HPJn4Bm1KDAD9-UCK4uBTCrFWEPEkaSO9MJldO94xok,14379
120
120
  rasa/core/brokers/sql.py,sha256=4cDqpbwXwjcq5THbrgRptfUq38-UOnckZq7S7d9wU9o,2728
121
- rasa/core/channels/__init__.py,sha256=WGzKxtcaoG2yvQ7Rjsh69tbZFl3DsnQj_FbXihwsnN8,2178
121
+ rasa/core/channels/__init__.py,sha256=_BcGTWxW-R2TuD0Y0tVQDlnKW1GFbGvoob_vDviaJ78,2258
122
122
  rasa/core/channels/botframework.py,sha256=xyc_n7DJ3uglqvkr0IrQ3xxPWgvaqSOLHWx9BUS0enE,11668
123
123
  rasa/core/channels/callback.py,sha256=4LpjtJgQMAAXHwZrcVlVEUdpDTRqTe6n7XtwCusa75U,2750
124
124
  rasa/core/channels/channel.py,sha256=0cicx4SZsm0icCSO-F-e-Qk5W08ef11ozZRSrLfFPto,15107
125
125
  rasa/core/channels/console.py,sha256=fYhkSY8a_pn09ssjTczsKTALinABogpFJzzWTnL7MP8,8076
126
- rasa/core/channels/development_inspector.py,sha256=u7bGL_uwydzJEQ4HVU6BcBnSe5SB7KOWBzYnjshgJbk,7189
126
+ rasa/core/channels/development_inspector.py,sha256=RXBCQ8GchV2RJd4Y3Xe74ZLb1kSOinC_Xmc2l2Qqw5I,6964
127
127
  rasa/core/channels/facebook.py,sha256=ub8DCnTPe3_EyYtdYE49mo2Y-UNpURj6Qx9590oadeM,15816
128
128
  rasa/core/channels/hangouts.py,sha256=GjTmiVvE_OJ7Ig1-j2Aax95Bp1RFL-TUW80rnNcxxY0,11562
129
129
  rasa/core/channels/inspector/.eslintrc.cjs,sha256=MXLV2wxhPZqg3wvFlyi1fM363_7XxtWsB87RqWN4gzY,580
@@ -254,7 +254,8 @@ rasa/core/channels/rasa_chat.py,sha256=XGZ7QLyQHhB-m7EjetDNEBSjAa2mEFqU-e-FuS9z3
254
254
  rasa/core/channels/rest.py,sha256=YDBnbdrlvaYL7Efy3cm2LbbSm7cBAFDhmcypojHXbog,7227
255
255
  rasa/core/channels/rocketchat.py,sha256=HWOMxXLuwadYEYIMMP-z6RqAJzMGZDLklpgqLOipXF0,5998
256
256
  rasa/core/channels/slack.py,sha256=3b8OZQ_gih5XBwhQ1q4BbBUC1SCAPaO9AoJEn2NaoQE,24405
257
- rasa/core/channels/socketio.py,sha256=qTxwow7BA4XMwzlSKAh2W2amQiBqtL_3WqnUc0rjY_s,13342
257
+ rasa/core/channels/socketio.py,sha256=uno9MG-sHxjZpYHrSO5VauyhAGshrwmT5FoHh99X2ws,10841
258
+ rasa/core/channels/studio_chat.py,sha256=V550z0ysvbv0laXxotEtowuYHe-4gMLcNWwkoz8mKwk,6729
258
259
  rasa/core/channels/telegram.py,sha256=5BrNECFM3qe9XjNpDb8Q9fbqCT5aKr5L6IH21W8sum8,10651
259
260
  rasa/core/channels/twilio.py,sha256=GsdjfplZdBj0fRB60bSggPF1DXFZ_x18V_dlcDy5VFs,5943
260
261
  rasa/core/channels/vier_cvg.py,sha256=PfvSluQqgJbP0JzZPFUvum3z7H55JPPeobcD-z5zCkw,13544
@@ -307,7 +308,7 @@ rasa/core/lock_store.py,sha256=fgdufUYXHEiTcD7NCCqgDAQRRtt7jrKafENHqFKOyi0,12504
307
308
  rasa/core/migrate.py,sha256=XNeYdiRytBmBNubOQ8KZOT_wR1o9aOpHHfBU9PCB2eg,14626
308
309
  rasa/core/nlg/__init__.py,sha256=0eQOZ0fB35b18oVhRFczcH30jJHgO8WXFhnbXGOxJek,240
309
310
  rasa/core/nlg/callback.py,sha256=rFkDe7CSAETASRefpERUT6-DHWPs0UXhx8x4tZ1QE0M,5238
310
- rasa/core/nlg/contextual_response_rephraser.py,sha256=YGBmSyXnaZMXC6AHQNuDEnP19Ak_rP6FzZHTTCdTk9E,13134
311
+ rasa/core/nlg/contextual_response_rephraser.py,sha256=af2TZ5YbMMt7k74KQ_iUx3OBMKqcf2KlOoDQw_V-Q-M,12966
311
312
  rasa/core/nlg/generator.py,sha256=YZ_rh--MeyzA6oXRqr_Ng-jcmPgbCmWMJJrquPmo__8,8436
312
313
  rasa/core/nlg/interpolator.py,sha256=Dc-J2Vf6vPPUbwIgZQm3AJDGvMaFTsh9Citd4CYuA9U,5189
313
314
  rasa/core/nlg/response.py,sha256=aHpy9BgjO7ub6v-sVPiQqutUA_7-UD1l3DJGVeQyp4k,5888
@@ -315,7 +316,7 @@ rasa/core/nlg/summarize.py,sha256=JO6VCfM_RnU0QX8Us42YkNOxC0ESKV1xcVH_sCW27ZU,21
315
316
  rasa/core/persistor.py,sha256=0BZvrA1xObxVtADWLVapj4NOmvqIEen1LKoMOdtZ63s,20337
316
317
  rasa/core/policies/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
317
318
  rasa/core/policies/ensemble.py,sha256=AjNOEy2Iubbe-LdKaoFUXG8ch6yPrg3bTvcTcAPmeOs,12959
318
- rasa/core/policies/enterprise_search_policy.py,sha256=-gcmJ8rS0tFMHXs_w_RTlVhhXHss_VqyS44OfkcWhEw,36674
319
+ rasa/core/policies/enterprise_search_policy.py,sha256=op1VxvxFX78DU9UUIkUWi0LhfkvfsGerpokBSkOppwg,36477
319
320
  rasa/core/policies/enterprise_search_prompt_template.jinja2,sha256=dCS_seyBGxMQoMsOjjvPp0dd31OSzZCJSZeev1FJK5Q,1187
320
321
  rasa/core/policies/enterprise_search_prompt_with_citation_template.jinja2,sha256=vRQBs3q13UmvRRgqA8-DmRtM7tqZP2ngwMVJ4gy7lE0,3302
321
322
  rasa/core/policies/flow_policy.py,sha256=wGb1l_59cGM9ZaexSIK5uXFi618739oNfLOxx2FC0_Y,7490
@@ -330,7 +331,7 @@ rasa/core/policies/policy.py,sha256=HeVtIaV0dA1QcAG3vjdn-4g7-oUEJPL4u01ETJt78YA,
330
331
  rasa/core/policies/rule_policy.py,sha256=YNDPZUZkpKFCvZwKe1kSfP6LQnDL9CQ6JU69JRwdmWw,50729
331
332
  rasa/core/policies/ted_policy.py,sha256=_DHiDH5Upx1yFNzMXBA3SGdHBRfsitTLlr7howUHPoo,87750
332
333
  rasa/core/policies/unexpected_intent_policy.py,sha256=5pGe9EMS-NLHIDDhqY6KCH_Kv7_TGMzSbe_GsjuKH1w,39649
333
- rasa/core/processor.py,sha256=vB9YpzGzn74lyqPn1Y-lIAxmaJd0xfPGMHxwyqeMHkg,55571
334
+ rasa/core/processor.py,sha256=OU8VrrfQlGEbUvV8u_YKhhCV9VhVdLR03Rn2wi1g3K4,55741
334
335
  rasa/core/run.py,sha256=8HG8GTDGO2RW5NX9Pz7UU9qBwDQE_rbStpoOwNcIUqc,11452
335
336
  rasa/core/secrets_manager/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
336
337
  rasa/core/secrets_manager/constants.py,sha256=dTDHenvG1JBVi34QIR6FpdO5RDOXQwAjAxLlgJ2ZNEI,1193
@@ -378,21 +379,22 @@ rasa/dialogue_understanding/commands/skip_question_command.py,sha256=bSrUFOHUz1o
378
379
  rasa/dialogue_understanding/commands/start_flow_command.py,sha256=a0Yk8xpBpFgC3Hkh4J8kAudz4s4ZLQWuoDq_a63lQXM,3309
379
380
  rasa/dialogue_understanding/commands/user_silence_command.py,sha256=QtqsMU5mrbUp5dla2yGSpxXfIfi_h6Eu72mTDZQ_aTU,1724
380
381
  rasa/dialogue_understanding/commands/utils.py,sha256=OiyLFGEsrfFSIJcvBY6lTIIXqDY9OxaikVGtcl4Kokk,1911
382
+ rasa/dialogue_understanding/constants.py,sha256=YcELaIss69Hnroclvn90Dl4Suk3S6e3t0UoIbUaXG2A,83
381
383
  rasa/dialogue_understanding/generator/__init__.py,sha256=Ykeb2wQ1DuiUWAWO0hLIPSTK1_Ktiq9DZXF6D3ugN78,764
382
- rasa/dialogue_understanding/generator/command_generator.py,sha256=RCrfvsvIGl9TlhJtiicHoondNb5DAjNvlo3zv0qZ_1w,16500
384
+ rasa/dialogue_understanding/generator/command_generator.py,sha256=b7jXvNn8iCAxKqTKwdY4g0K2kLWEfbkyUqJkS2UbVjQ,16143
383
385
  rasa/dialogue_understanding/generator/constants.py,sha256=9Nwjo2Qobioetr9SyyQxsGvEPSbKCVS5ZX1GGJtbA0E,716
384
386
  rasa/dialogue_understanding/generator/flow_document_template.jinja2,sha256=f4H6vVd-_nX_RtutMh1xD3ZQE_J2OyuPHAtiltfiAPY,253
385
387
  rasa/dialogue_understanding/generator/flow_retrieval.py,sha256=MkwUgQA9xRlAQUdWF2cBEX2tW2PQhBsq2Jsy2vmqWY4,17891
386
- rasa/dialogue_understanding/generator/llm_based_command_generator.py,sha256=O9sIoDTup2g7l1Uqy6LqMBi-hwZ3OpJk90ZwzhltMtc,17707
388
+ rasa/dialogue_understanding/generator/llm_based_command_generator.py,sha256=VO3ZrotELyfKY_LEw8FJ4bPGTRjYbUvQy4Q6Z5rcPCI,17592
387
389
  rasa/dialogue_understanding/generator/llm_command_generator.py,sha256=QpNXhjB9ugtPV8XAHmKjbJtOiI1yE9rC2osbsI_A4ZY,2529
388
390
  rasa/dialogue_understanding/generator/multi_step/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
389
391
  rasa/dialogue_understanding/generator/multi_step/fill_slots_prompt.jinja2,sha256=Y0m673tAML3cFPaLM-urMXDsBYUUcXIw9YUpkAhGUuA,2933
390
392
  rasa/dialogue_understanding/generator/multi_step/handle_flows_prompt.jinja2,sha256=8l93_QBKBYnqLICVdiTu5ejZDE8F36BU8-qwba0px44,1927
391
- rasa/dialogue_understanding/generator/multi_step/multi_step_llm_command_generator.py,sha256=E80aJUrvDgsWcAF3IphpYVT2x0-DcsI-ISwOY38vOlg,34172
393
+ rasa/dialogue_understanding/generator/multi_step/multi_step_llm_command_generator.py,sha256=CrTEgkhky6s5O7NohqZ9gCD8G0YLq4rKM49ujhrNzr4,34418
392
394
  rasa/dialogue_understanding/generator/nlu_command_adapter.py,sha256=pzd1q-syU_QuqTRcfd_GsXyOJaxfApqh_LsOKuEN46g,9332
393
395
  rasa/dialogue_understanding/generator/single_step/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
394
396
  rasa/dialogue_understanding/generator/single_step/command_prompt_template.jinja2,sha256=nMayu-heJYH1QmcL1cFmXb8SeiJzfdDR_9Oy5IRUXsM,3937
395
- rasa/dialogue_understanding/generator/single_step/single_step_llm_command_generator.py,sha256=a72P3SzuSaF0Mmm4b3k4jT4zOGE_RFXBDRlHOI7Px0g,18656
397
+ rasa/dialogue_understanding/generator/single_step/single_step_llm_command_generator.py,sha256=prDAi8i6PrkkljkfI1qh7kL0BXiRzxLvl4XBcMaqqqI,18780
396
398
  rasa/dialogue_understanding/patterns/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
397
399
  rasa/dialogue_understanding/patterns/cancel.py,sha256=IQ4GVHNnNCqwKRLlAqBtLsgolcbPPnHsHdb3aOAFhEs,3868
398
400
  rasa/dialogue_understanding/patterns/cannot_handle.py,sha256=pg0zJHl-hDBnl6y9IyxZzW57yuMdfD8xI8eiK6EVrG8,1406
@@ -424,7 +426,7 @@ rasa/dialogue_understanding/stack/frames/flow_stack_frame.py,sha256=W4mEmihIN5Bi
424
426
  rasa/dialogue_understanding/stack/frames/pattern_frame.py,sha256=EVrYWv5dCP7XTvNV-HqtOOrseP-IkF0jD2_JacAvIYw,235
425
427
  rasa/dialogue_understanding/stack/frames/search_frame.py,sha256=rJ9og28k_udUIjP-2Z5xeb_2T5HvCzwDCnxVG9K7lws,728
426
428
  rasa/dialogue_understanding/stack/utils.py,sha256=ysH6-IeMwNnKbF1__uMlq6I8zaGXFdMEpw1iYdEz4kA,7650
427
- rasa/dialogue_understanding/utils.py,sha256=ENXT_1ALY1Ev6Gs8jNz3dm3TC91Y5psp2Np6_L4cHXI,332
429
+ rasa/dialogue_understanding/utils.py,sha256=tw9O_fhuspk64v99B5_lwNZjBIMlpjIKekpyFzMylJ8,566
428
430
  rasa/dialogue_understanding_test/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
429
431
  rasa/dialogue_understanding_test/constants.py,sha256=rZhBIQV5jFzPTuDtK5WSwS8YKKDLaZ7TMQsaSQwNA2g,486
430
432
  rasa/dialogue_understanding_test/du_test_case.py,sha256=Is3brWanixDNXKq_Kr43tcUc4PjoiN-IfJBRwKnL4hU,3656
@@ -492,7 +494,7 @@ rasa/graph_components/providers/training_tracker_provider.py,sha256=nCHyLsiC8q3B
492
494
  rasa/graph_components/validators/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
493
495
  rasa/graph_components/validators/default_recipe_validator.py,sha256=BHrF6NTfJz42xG1LfVjkP5CdQef4NTcmiiC8xtMemaI,24457
494
496
  rasa/graph_components/validators/finetuning_validator.py,sha256=38AcwmV8cF5TIlWhUIzh98wtZf934ix04HcczCJiWkU,12863
495
- rasa/hooks.py,sha256=3nsfCA142V56mBQ7ktBXhD_RyaSrfj7fY3t7HnsD4Pc,3709
497
+ rasa/hooks.py,sha256=T_m4uGDRCRP8FS2hnieI8uIoD7u2VW3B9oiWjRcBoOk,3959
496
498
  rasa/jupyter.py,sha256=x_GF9PK2zMhltb48GEIV9YZ4pRhCto8nV5SioYSCljI,1782
497
499
  rasa/llm_fine_tuning/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
498
500
  rasa/llm_fine_tuning/annotation_module.py,sha256=wFmW3d6lI5o49OWmdbYQlgr24rqHDgA0T0hLM1pSb9U,8578
@@ -515,12 +517,12 @@ rasa/model.py,sha256=GH1-N6Po3gL3nwfa9eGoN2bMRNMrn4f3mi17-osW3T0,3491
515
517
  rasa/model_manager/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
516
518
  rasa/model_manager/config.py,sha256=OzSx8c0Su8Q8wXTuCldeI7GDmB-wh43RL56Fq1-ESUM,1159
517
519
  rasa/model_manager/model_api.py,sha256=MUqC2Tfdu857ALxOR55sgI5Tuow8JeIufjwU5slNhiw,20274
518
- rasa/model_manager/runner_service.py,sha256=X8lQ5rAeXFkldusNrYk3yVUayxCjyvSsK75kWWw4ZcU,8731
519
- rasa/model_manager/socket_bridge.py,sha256=klKaFA-PKNfha5ir0xKqba3Al6igYu3cD7BLIC7wIQ8,5028
520
+ rasa/model_manager/runner_service.py,sha256=JaD_xu-zDfPWI6onvMlWdB9KBtH7-91KKbTf4vsKuTg,8710
521
+ rasa/model_manager/socket_bridge.py,sha256=vzpO4-Oh6ZvnOOCV-z1b65q8438H1ERTvtkgTSRJ-lE,4940
520
522
  rasa/model_manager/studio_jwt_auth.py,sha256=eZ_srnbL2sKIKgx0OZIp29NbIrH2J8PlI8Or0lLg_Xo,2644
521
523
  rasa/model_manager/trainer_service.py,sha256=90WYl4fclgPLcLfFgDOtai9VahZx_ikn20PIMg_eSQM,10347
522
524
  rasa/model_manager/utils.py,sha256=tgj215CsJreqc4Ym8tAvv-hBieAC94nL0c4caPWIcZM,2643
523
- rasa/model_manager/warm_rasa_process.py,sha256=xFNP-ANZfUBKs_Sur2deAT2qqatWD3_XZJcUgQy2iiQ,5716
525
+ rasa/model_manager/warm_rasa_process.py,sha256=L6nYjI1vgEjT5zSc13HkS8t-16t7iOGkKZnXuNRf5sc,5887
524
526
  rasa/model_service.py,sha256=nj0wNoByYWg5WVd5GtIc5V-RhpVR_xspi-MeNQxurLE,3753
525
527
  rasa/model_testing.py,sha256=h0QUpJu6p_TDse3aHjCfYwI6OGH47b3Iuo5Ot0HQADM,14959
526
528
  rasa/model_training.py,sha256=gvmJ6bN6TdX6H6qnO5y14I_aYeqi_h1Dxfpavks3paU,21687
@@ -582,10 +584,10 @@ rasa/nlu/utils/hugging_face/transformers_pre_post_processors.py,sha256=U8HrRWM1p
582
584
  rasa/nlu/utils/mitie_utils.py,sha256=eupFltdG1nB8NXT4sh1pGJjDp0NKvlsKfPWYid6miGM,3887
583
585
  rasa/nlu/utils/pattern_utils.py,sha256=nSOJmvsp6bF8HCCRb2Iaty71R0GfflJiuM4X_oK5hdQ,5386
584
586
  rasa/nlu/utils/spacy_utils.py,sha256=pBvsCVKVuZ3b2Pjn-XuOVZ6lzZu9Voc2R4N1VczwtCM,11794
585
- rasa/plugin.py,sha256=H_OZcHy_U3eAK-JHr43TSxcPqS0JEGcZkFvmumeeJEs,2670
587
+ rasa/plugin.py,sha256=OJYvgBHnNTywFmmn-gvHkjtFUmylnWcuDny5YjmrsDc,3071
586
588
  rasa/server.py,sha256=eLRWFPoJrdc9_eNu0pUj9p52O8MR28zIm4ZP9_MWiH0,57899
587
589
  rasa/shared/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
588
- rasa/shared/constants.py,sha256=9s-6WXrypnn86QO3cT3Cw4Llriqh_NwQU7hK0sCsMFA,11540
590
+ rasa/shared/constants.py,sha256=35e_ujygM9ONU9UObmU6KvrXVdZLlpfARzVfhXdqRV0,11732
589
591
  rasa/shared/core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
590
592
  rasa/shared/core/command_payload_reader.py,sha256=Vhiop9LWFawaEruRifBBrVmoEJ-fj1Tli1wBvsYu2_I,3563
591
593
  rasa/shared/core/constants.py,sha256=WNFzABG-eiVREBL6aDZAmcNDiSmuSbvWuxXIMoX2Iv8,5704
@@ -643,7 +645,7 @@ rasa/shared/importers/rasa.py,sha256=877EU8qPZSMBk5VAVAAUhfsh6vatRJrYOqWz1YGR6p8
643
645
  rasa/shared/importers/remote_importer.py,sha256=fKLQskaCVPpD5cCMQ9sR71cZZlSIP-SSv3J3o2kra2w,7696
644
646
  rasa/shared/importers/utils.py,sha256=Gi3BM5RUr-9nX_Ujf-g-tt19_bKPizmQIi6eAflDAmo,1289
645
647
  rasa/shared/nlu/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
646
- rasa/shared/nlu/constants.py,sha256=oq-eaTMXRvT1mE8pFhxf1Jvc8vlZGIeOSdY7YQlKd2Q,1825
648
+ rasa/shared/nlu/constants.py,sha256=b3S7j61yL0jrqQ8_EaQerpzIJSAAoi6OzBlYGZrIDus,1895
647
649
  rasa/shared/nlu/interpreter.py,sha256=eCNJp61nQYTGVf4aJi8SCWb46jxZY6-C1M1LFxMyQTM,188
648
650
  rasa/shared/nlu/training_data/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
649
651
  rasa/shared/nlu/training_data/entities_parser.py,sha256=fC-VIso07so6E9b6KrQXOBC-ZUGCQGvnMvzVwiAO1GQ,6729
@@ -695,7 +697,7 @@ rasa/shared/providers/llm/azure_openai_llm_client.py,sha256=A6sg2bvulNczuzu1J0V7
695
697
  rasa/shared/providers/llm/default_litellm_llm_client.py,sha256=1oiUIXr_U5ldyBQZ8cnrV3P7Qw9kMw1yvaVg6mjKkHU,3940
696
698
  rasa/shared/providers/llm/litellm_router_llm_client.py,sha256=llko2DfOpiLMpHxnW26I1Hb1wTn7VmZ_yu43GRXhqwQ,6815
697
699
  rasa/shared/providers/llm/llm_client.py,sha256=6-gMsEJqquhUPGXzNiq_ybM_McLWxAJ_QhbmWcLnb_Q,2358
698
- rasa/shared/providers/llm/llm_response.py,sha256=HedtASFXW2GFWS4OAmk-wSjn5dRDFWB8dAkAO2Kdd_M,2426
700
+ rasa/shared/providers/llm/llm_response.py,sha256=rdXMBxbyz4vKnG-0b-NPsyiA1rehrvkU6Tjx1usX2BE,2871
699
701
  rasa/shared/providers/llm/openai_llm_client.py,sha256=uDdcugBcO3sfxbduc00eqaZdrJP0VFX5dkBd2Dem47M,4844
700
702
  rasa/shared/providers/llm/rasa_llm_client.py,sha256=SpgWn3uHHEezIcyvMfi468zRLw_W8VF6sIs-VIhElPc,3357
701
703
  rasa/shared/providers/llm/self_hosted_llm_client.py,sha256=98FaF0-lYnytC46ulhrCAQjUKy9TI0U2QILml__UCzc,9170
@@ -776,9 +778,9 @@ rasa/utils/train_utils.py,sha256=f1NWpp5y6al0dzoQyyio4hc4Nf73DRoRSHDzEK6-C4E,212
776
778
  rasa/utils/url_tools.py,sha256=JQcHL2aLqLHu82k7_d9imUoETCm2bmlHaDpOJ-dKqBc,1218
777
779
  rasa/utils/yaml.py,sha256=KjbZq5C94ZP7Jdsw8bYYF7HASI6K4-C_kdHfrnPLpSI,2000
778
780
  rasa/validator.py,sha256=wl5IKiyDmk6FlDcGO2Js-H-gHPeqVqUJ6hB4fgN0xjI,66796
779
- rasa/version.py,sha256=30hhHbpQCDfDXVFyr3PTuEccWoj0mUrtnPGHp39doZ0,124
780
- rasa_pro-3.11.3a1.dev5.dist-info/METADATA,sha256=m_N49daQs1B-kgsaTa3rRAlRqpG2oNRc1LHIA7Oc6JA,10798
781
- rasa_pro-3.11.3a1.dev5.dist-info/NOTICE,sha256=7HlBoMHJY9CL2GlYSfTQ-PZsVmLmVkYmMiPlTjhuCqA,218
782
- rasa_pro-3.11.3a1.dev5.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
783
- rasa_pro-3.11.3a1.dev5.dist-info/entry_points.txt,sha256=ckJ2SfEyTPgBqj_I6vm_tqY9dZF_LAPJZA335Xp0Q9U,43
784
- rasa_pro-3.11.3a1.dev5.dist-info/RECORD,,
781
+ rasa/version.py,sha256=sC8Ac4F7SMEWBdNHVJqaHXvjVQkFIjvFzczIKz8giGI,124
782
+ rasa_pro-3.11.3a1.dev7.dist-info/METADATA,sha256=5N7yD6hu3L1w1ZtDJcRYm1-JSXtvbIXkDDOf0wMpGpM,10798
783
+ rasa_pro-3.11.3a1.dev7.dist-info/NOTICE,sha256=7HlBoMHJY9CL2GlYSfTQ-PZsVmLmVkYmMiPlTjhuCqA,218
784
+ rasa_pro-3.11.3a1.dev7.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
785
+ rasa_pro-3.11.3a1.dev7.dist-info/entry_points.txt,sha256=ckJ2SfEyTPgBqj_I6vm_tqY9dZF_LAPJZA335Xp0Q9U,43
786
+ rasa_pro-3.11.3a1.dev7.dist-info/RECORD,,