rasa-pro 3.11.3a1.dev5__py3-none-any.whl → 3.11.3a1.dev6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of rasa-pro might be problematic. Click here for more details.

@@ -17,6 +17,15 @@ from jsonschema import Draft202012Validator
17
17
 
18
18
  import rasa.core
19
19
  import rasa.shared.utils.io
20
+ from rasa.core.actions.constants import (
21
+ TEXT,
22
+ ELEMENTS,
23
+ QUICK_REPLIES,
24
+ BUTTONS,
25
+ ATTACHMENT,
26
+ IMAGE,
27
+ CUSTOM,
28
+ )
20
29
  from rasa.core.actions.custom_action_executor import (
21
30
  CustomActionExecutor,
22
31
  NoEndpointCustomActionExecutor,
@@ -256,18 +265,18 @@ def action_for_name_or_text(
256
265
  def create_bot_utterance(message: Dict[Text, Any]) -> BotUttered:
257
266
  """Create BotUttered event from message."""
258
267
  bot_message = BotUttered(
259
- text=message.pop("text", None),
268
+ text=message.pop(TEXT, None),
260
269
  data={
261
- "elements": message.pop("elements", None),
262
- "quick_replies": message.pop("quick_replies", None),
263
- "buttons": message.pop("buttons", None),
270
+ ELEMENTS: message.pop(ELEMENTS, None),
271
+ QUICK_REPLIES: message.pop(QUICK_REPLIES, None),
272
+ BUTTONS: message.pop(BUTTONS, None),
264
273
  # for legacy / compatibility reasons we need to set the image
265
274
  # to be the attachment if there is no other attachment (the
266
275
  # `.get` is intentional - no `pop` as we still need the image`
267
276
  # property to set it in the following line)
268
- "attachment": message.pop("attachment", None) or message.get("image", None),
269
- "image": message.pop("image", None),
270
- "custom": message.pop("custom", None),
277
+ ATTACHMENT: message.pop(ATTACHMENT, None) or message.get(IMAGE, None),
278
+ IMAGE: message.pop(IMAGE, None),
279
+ CUSTOM: message.pop(CUSTOM, None),
271
280
  PROMPTS: message.pop(PROMPTS, None),
272
281
  },
273
282
  metadata=message,
@@ -3,3 +3,11 @@ SELECTIVE_DOMAIN = "enable_selective_domain"
3
3
 
4
4
  SSL_CLIENT_CERT_FIELD = "ssl_client_cert"
5
5
  SSL_CLIENT_KEY_FIELD = "ssl_client_key"
6
+
7
+ TEXT = "text"
8
+ ELEMENTS = "elements"
9
+ QUICK_REPLIES = "quick_replies"
10
+ BUTTONS = "buttons"
11
+ ATTACHMENT = "attachment"
12
+ IMAGE = "image"
13
+ CUSTOM = "custom"
@@ -6,7 +6,6 @@ from jinja2 import Template
6
6
  from rasa import telemetry
7
7
  from rasa.core.nlg.response import TemplatedNaturalLanguageGenerator
8
8
  from rasa.core.nlg.summarize import summarize_conversation
9
- from rasa.dialogue_understanding.utils import record_commands_and_prompts
10
9
  from rasa.shared.constants import (
11
10
  LLM_CONFIG_KEY,
12
11
  MODEL_CONFIG_KEY,
@@ -148,6 +147,8 @@ class ContextualResponseRephraser(
148
147
  user_prompt: The user prompt that was sent to the LLM.
149
148
  llm_response: The response object from the LLM (None if no response).
150
149
  """
150
+ from rasa.dialogue_understanding.utils import record_commands_and_prompts
151
+
151
152
  if not record_commands_and_prompts:
152
153
  return response
153
154
 
@@ -157,17 +158,19 @@ class ContextualResponseRephraser(
157
158
 
158
159
  if llm_response is not None:
159
160
  prompt_data[KEY_LLM_RESPONSE_METADATA] = llm_response.to_dict()
161
+ else:
162
+ prompt_data[KEY_LLM_RESPONSE_METADATA] = None
160
163
 
161
164
  prompt_tuple = (prompt_name, prompt_data)
162
165
 
163
166
  component_name = cls.__name__
164
- existing_prompts = response.get(PROMPTS, {})
165
- if component_name in existing_prompts:
166
- existing_prompts[component_name].append(prompt_tuple)
167
+ component_prompts = response.get(PROMPTS, {})
168
+ if component_name in component_prompts:
169
+ component_prompts[component_name].append(prompt_tuple)
167
170
  else:
168
- existing_prompts[component_name] = [prompt_tuple]
171
+ component_prompts[component_name] = [prompt_tuple]
169
172
 
170
- response[PROMPTS] = existing_prompts
173
+ response[PROMPTS] = component_prompts
171
174
  return response
172
175
 
173
176
  def _last_message_if_human(self, tracker: DialogueStateTracker) -> Optional[str]:
@@ -202,10 +205,7 @@ class ContextualResponseRephraser(
202
205
  llm = llm_factory(self.llm_config, DEFAULT_LLM_CONFIG)
203
206
 
204
207
  try:
205
- raw_response = await llm.acompletion(prompt)
206
- response_dict = raw_response.to_dict()
207
- return LLMResponse.from_dict(response_dict)
208
-
208
+ return await llm.acompletion(prompt)
209
209
  except Exception as e:
210
210
  # unfortunately, langchain does not wrap LLM exceptions which means
211
211
  # we have to catch all exceptions here
@@ -306,6 +306,7 @@ class ContextualResponseRephraser(
306
306
  llm_model_group_id=self.llm_property(MODEL_GROUP_ID_CONFIG_KEY),
307
307
  )
308
308
  llm_response = await self._generate_llm_response(prompt)
309
+ llm_response = LLMResponse.ensure_llm_response(llm_response)
309
310
 
310
311
  response = self._add_prompt_and_llm_metadata_to_response(
311
312
  response=response,
@@ -38,7 +38,6 @@ from rasa.dialogue_understanding.stack.frames import (
38
38
  SearchStackFrame,
39
39
  )
40
40
  from rasa.dialogue_understanding.stack.frames import PatternFlowStackFrame
41
- from rasa.dialogue_understanding.utils import record_commands_and_prompts
42
41
  from rasa.engine.graph import ExecutionContext
43
42
  from rasa.engine.recipes.default_recipe import DefaultV1Recipe
44
43
  from rasa.engine.storage.resource import Resource
@@ -281,7 +280,7 @@ class EnterpriseSearchPolicy(LLMHealthCheckMixin, EmbeddingsHealthCheckMixin, Po
281
280
  return _LangchainEmbeddingClientAdapter(client)
282
281
 
283
282
  @classmethod
284
- def _store_prompt_and_llm_response_in_tracker(
283
+ def _add_prompt_and_llm_response_to_latest_message(
285
284
  cls,
286
285
  tracker: DialogueStateTracker,
287
286
  prompt_name: str,
@@ -296,6 +295,8 @@ class EnterpriseSearchPolicy(LLMHealthCheckMixin, EmbeddingsHealthCheckMixin, Po
296
295
  user_prompt: The user prompt that was sent to the LLM.
297
296
  llm_response: The response object from the LLM (None if no response).
298
297
  """
298
+ from rasa.dialogue_understanding.utils import record_commands_and_prompts
299
+
299
300
  if not record_commands_and_prompts:
300
301
  return
301
302
 
@@ -307,17 +308,20 @@ class EnterpriseSearchPolicy(LLMHealthCheckMixin, EmbeddingsHealthCheckMixin, Po
307
308
  parse_data[PROMPTS] = {} # type: ignore[literal-required]
308
309
 
309
310
  component_name = cls.__name__
310
- existing_prompts = parse_data[PROMPTS].get(component_name, []) # type: ignore[literal-required]
311
+ component_prompts = parse_data[PROMPTS].get(component_name, []) # type: ignore[literal-required]
311
312
 
312
313
  prompt_data: Dict[Text, Any] = {
313
314
  KEY_USER_PROMPT: user_prompt,
314
315
  }
316
+
315
317
  if llm_response is not None:
316
318
  prompt_data[KEY_LLM_RESPONSE_METADATA] = llm_response.to_dict()
319
+ else:
320
+ prompt_data[KEY_LLM_RESPONSE_METADATA] = None
317
321
 
318
322
  prompt_tuple = (prompt_name, prompt_data)
319
- existing_prompts.append(prompt_tuple)
320
- parse_data[PROMPTS][component_name] = existing_prompts # type: ignore[literal-required]
323
+ component_prompts.append(prompt_tuple)
324
+ parse_data[PROMPTS][component_name] = component_prompts # type: ignore[literal-required]
321
325
 
322
326
  def train( # type: ignore[override]
323
327
  self,
@@ -545,9 +549,10 @@ class EnterpriseSearchPolicy(LLMHealthCheckMixin, EmbeddingsHealthCheckMixin, Po
545
549
 
546
550
  if self.use_llm:
547
551
  prompt = self._render_prompt(tracker, documents.results)
548
- llm_response = await self._generate_llm_answer(llm, prompt)
552
+ response = await self._generate_llm_answer(llm, prompt)
553
+ llm_response = LLMResponse.ensure_llm_response(response)
549
554
 
550
- self._store_prompt_and_llm_response_in_tracker(
555
+ self._add_prompt_and_llm_response_to_latest_message(
551
556
  tracker=tracker,
552
557
  prompt_name="enterprise_search_prompt",
553
558
  user_prompt=prompt,
@@ -651,10 +656,7 @@ class EnterpriseSearchPolicy(LLMHealthCheckMixin, EmbeddingsHealthCheckMixin, Po
651
656
  An LLMResponse object, or None if the call fails.
652
657
  """
653
658
  try:
654
- raw_response = await llm.acompletion(prompt)
655
- response_dict = raw_response.to_dict()
656
- return LLMResponse.from_dict(response_dict)
657
-
659
+ return await llm.acompletion(prompt)
658
660
  except Exception as e:
659
661
  # unfortunately, langchain does not wrap LLM exceptions which means
660
662
  # we have to catch all exceptions here
@@ -0,0 +1 @@
1
+ RASA_RECORD_COMMANDS_AND_PROMPTS_ENV_VAR_NAME = "RASA_RECORD_COMMANDS_AND_PROMPTS"
@@ -453,6 +453,8 @@ class CommandGenerator:
453
453
 
454
454
  if llm_response is not None:
455
455
  prompt_data[KEY_LLM_RESPONSE_METADATA] = llm_response.to_dict()
456
+ else:
457
+ prompt_data[KEY_LLM_RESPONSE_METADATA] = None
456
458
 
457
459
  prompt_tuple = (prompt_name, prompt_data)
458
460
 
@@ -319,9 +319,7 @@ class LLMBasedCommandGenerator(
319
319
  """
320
320
  llm = llm_factory(self.config.get(LLM_CONFIG_KEY), DEFAULT_LLM_CONFIG)
321
321
  try:
322
- raw_response = await llm.acompletion(prompt)
323
- response_dict = raw_response.to_dict()
324
- return LLMResponse.from_dict(response_dict)
322
+ return await llm.acompletion(prompt)
325
323
  except Exception as e:
326
324
  # unfortunately, langchain does not wrap LLM exceptions which means
327
325
  # we have to catch all exceptions here
@@ -51,6 +51,7 @@ from rasa.shared.core.trackers import DialogueStateTracker
51
51
  from rasa.shared.exceptions import ProviderClientAPIException
52
52
  from rasa.shared.nlu.constants import TEXT
53
53
  from rasa.shared.nlu.training_data.message import Message
54
+ from rasa.shared.providers.llm.llm_response import LLMResponse
54
55
  from rasa.shared.utils.io import deep_container_fingerprint
55
56
  from rasa.shared.utils.llm import (
56
57
  get_prompt_template,
@@ -535,7 +536,8 @@ class MultiStepLLMCommandGenerator(LLMBasedCommandGenerator):
535
536
  prompt=prompt,
536
537
  )
537
538
 
538
- llm_response = await self.invoke_llm(prompt)
539
+ response = await self.invoke_llm(prompt)
540
+ llm_response = LLMResponse.ensure_llm_response(response)
539
541
  actions = None
540
542
  if llm_response and llm_response.choices:
541
543
  actions = llm_response.choices[0]
@@ -589,7 +591,8 @@ class MultiStepLLMCommandGenerator(LLMBasedCommandGenerator):
589
591
  prompt=prompt,
590
592
  )
591
593
 
592
- llm_response = await self.invoke_llm(prompt)
594
+ response = await self.invoke_llm(prompt)
595
+ llm_response = LLMResponse.ensure_llm_response(response)
593
596
  actions = None
594
597
  if llm_response and llm_response.choices:
595
598
  actions = llm_response.choices[0]
@@ -678,7 +681,8 @@ class MultiStepLLMCommandGenerator(LLMBasedCommandGenerator):
678
681
  prompt=prompt,
679
682
  )
680
683
 
681
- llm_response = await self.invoke_llm(prompt)
684
+ response = await self.invoke_llm(prompt)
685
+ llm_response = LLMResponse.ensure_llm_response(response)
682
686
  actions = None
683
687
  if llm_response and llm_response.choices:
684
688
  actions = llm_response.choices[0]
@@ -46,6 +46,7 @@ from rasa.shared.core.trackers import DialogueStateTracker
46
46
  from rasa.shared.exceptions import ProviderClientAPIException
47
47
  from rasa.shared.nlu.constants import TEXT, LLM_COMMANDS, LLM_PROMPT
48
48
  from rasa.shared.nlu.training_data.message import Message
49
+ from rasa.shared.providers.llm.llm_response import LLMResponse
49
50
  from rasa.shared.utils.io import deep_container_fingerprint
50
51
  from rasa.shared.utils.llm import (
51
52
  get_prompt_template,
@@ -264,7 +265,8 @@ class SingleStepLLMCommandGenerator(LLMBasedCommandGenerator):
264
265
  prompt=flow_prompt,
265
266
  )
266
267
 
267
- llm_response = await self.invoke_llm(flow_prompt)
268
+ response = await self.invoke_llm(flow_prompt)
269
+ llm_response = LLMResponse.ensure_llm_response(response)
268
270
  # The check for 'None' maintains compatibility with older versions
269
271
  # of LLMCommandGenerator. In previous implementations, 'invoke_llm'
270
272
  # might return 'None' to indicate a failure to generate actions.
@@ -1,7 +1,14 @@
1
1
  from contextlib import contextmanager
2
2
  from typing import Generator
3
3
 
4
- record_commands_and_prompts = False
4
+ from rasa.dialogue_understanding.constants import (
5
+ RASA_RECORD_COMMANDS_AND_PROMPTS_ENV_VAR_NAME,
6
+ )
7
+ from rasa.utils.common import get_bool_env_variable
8
+
9
+ record_commands_and_prompts = get_bool_env_variable(
10
+ RASA_RECORD_COMMANDS_AND_PROMPTS_ENV_VAR_NAME, False
11
+ )
5
12
 
6
13
 
7
14
  @contextmanager
@@ -1,12 +1,16 @@
1
+ import os
1
2
  import shlex
2
3
  import subprocess
3
- from rasa.__main__ import main
4
- import os
4
+ import uuid
5
+ from dataclasses import dataclass
5
6
  from typing import List
7
+
6
8
  import structlog
7
- from dataclasses import dataclass
8
- import uuid
9
9
 
10
+ from rasa.__main__ import main
11
+ from rasa.dialogue_understanding.constants import (
12
+ RASA_RECORD_COMMANDS_AND_PROMPTS_ENV_VAR_NAME,
13
+ )
10
14
  from rasa.model_manager import config
11
15
  from rasa.model_manager.utils import ensure_base_directory_exists, logs_path
12
16
 
@@ -43,6 +47,7 @@ def _create_warm_rasa_process() -> WarmRasaProcess:
43
47
 
44
48
  envs = os.environ.copy()
45
49
  envs["RASA_TELEMETRY_ENABLED"] = "false"
50
+ envs[RASA_RECORD_COMMANDS_AND_PROMPTS_ENV_VAR_NAME] = "true"
46
51
 
47
52
  log_id = uuid.uuid4().hex
48
53
  log_path = logs_path(log_id)
@@ -1,5 +1,8 @@
1
1
  from dataclasses import dataclass, field, asdict
2
- from typing import Dict, List, Optional, Text, Any
2
+ from typing import Dict, List, Optional, Text, Any, Union
3
+ import structlog
4
+
5
+ structlogger = structlog.get_logger()
3
6
 
4
7
 
5
8
  @dataclass
@@ -59,7 +62,7 @@ class LLMResponse:
59
62
  """
60
63
  Creates an LLMResponse from a dictionary.
61
64
  """
62
- usage_data = data.get("usage")
65
+ usage_data = data.get("usage", {})
63
66
  usage_obj = LLMUsage.from_dict(usage_data) if usage_data else None
64
67
 
65
68
  return cls(
@@ -71,6 +74,15 @@ class LLMResponse:
71
74
  additional_info=data.get("additional_info"),
72
75
  )
73
76
 
77
+ @classmethod
78
+ def ensure_llm_response(cls, response: Union[str, "LLMResponse"]) -> "LLMResponse":
79
+ if isinstance(response, LLMResponse):
80
+ return response
81
+
82
+ structlogger.warn("llm_response.deprecated_response_type", response=response)
83
+ data = {"id": None, "choices": [response], "created": None}
84
+ return LLMResponse.from_dict(data)
85
+
74
86
  def to_dict(self) -> dict:
75
87
  """Converts the LLMResponse dataclass instance into a dictionary."""
76
88
  result = asdict(self)
rasa/version.py CHANGED
@@ -1,3 +1,3 @@
1
1
  # this file will automatically be changed,
2
2
  # do not add anything but the version number here!
3
- __version__ = "3.11.3a1.dev5"
3
+ __version__ = "3.11.3a1.dev6"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: rasa-pro
3
- Version: 3.11.3a1.dev5
3
+ Version: 3.11.3a1.dev6
4
4
  Summary: State-of-the-art open-core Conversational AI framework for Enterprises that natively leverages generative AI for effortless assistant development.
5
5
  Home-page: https://rasa.com
6
6
  Keywords: nlp,machine-learning,machine-learning-library,bot,bots,botkit,rasa conversational-agents,conversational-ai,chatbot,chatbot-framework,bot-framework
@@ -92,7 +92,7 @@ rasa/cli/x.py,sha256=C7dLtYXAkD-uj7hNj7Pz5YbOupp2yRcMjQbsEVqXUJ8,6825
92
92
  rasa/constants.py,sha256=YrrBiJUc0cL5Xrsap6IioNbQ6dKaqDiueqHmMIYkpF0,1348
93
93
  rasa/core/__init__.py,sha256=DYHLve7F1yQBVOZTA63efVIwLiULMuihOfdpzw1j0os,457
94
94
  rasa/core/actions/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
95
- rasa/core/actions/action.py,sha256=H4Mr5WvXHwgZ5ws7ZyAlQ_ESx5ryZvv7cAgNjtyYCn8,45381
95
+ rasa/core/actions/action.py,sha256=5AY1h4o4m14uKoGZgaNViwkREgabF5_x0Fww49upBIk,45492
96
96
  rasa/core/actions/action_clean_stack.py,sha256=xUP-2ipPsPAnAiwP17c-ezmHPSrV4JSUZr-eSgPQwIs,2279
97
97
  rasa/core/actions/action_exceptions.py,sha256=hghzXYN6VeHC-O_O7WiPesCNV86ZTkHgG90ZnQcbai8,724
98
98
  rasa/core/actions/action_hangup.py,sha256=wpXunkGC71krAYZD3BbqzlHLZxNg1mIviwWz0j9Go-c,994
@@ -101,7 +101,7 @@ rasa/core/actions/action_run_slot_rejections.py,sha256=F16a9aMJAw27Rh9wUJu0KYSAP
101
101
  rasa/core/actions/action_trigger_chitchat.py,sha256=cJcLg_RhfZx-JyomcBOJabnliuj8Fs1nLvONwPCIbpI,1084
102
102
  rasa/core/actions/action_trigger_flow.py,sha256=7pye_4iR_9xedyTntS9l49uEmTf5UXjE0hEFgOodfyw,3487
103
103
  rasa/core/actions/action_trigger_search.py,sha256=xKzSHZIi1bcadgzXJwtP_ZLWKz-ehmHUNmesR1brr0s,1064
104
- rasa/core/actions/constants.py,sha256=gfgdWmj-OJ5xTcTAS1OcXQ3dgcTiHO98NC-SGyKlTjs,161
104
+ rasa/core/actions/constants.py,sha256=7fba-a21R58JMCc6RCr3FJsVZDZtNds-Jv1RHThPhj0,310
105
105
  rasa/core/actions/custom_action_executor.py,sha256=SWsy35tsWZTSTvYyXdSqSV8efz_f3OA-dYOh_I_QXy0,6169
106
106
  rasa/core/actions/direct_custom_actions_executor.py,sha256=IzxRnPF92zs3WX-p9DoFq51Vf0QwfE6prB_AlyEEllc,3746
107
107
  rasa/core/actions/e2e_stub_custom_action_executor.py,sha256=D-kECC1QjVLv4owNxstW2xJPPsXTGfGepvquMeWB_ec,2282
@@ -307,7 +307,7 @@ rasa/core/lock_store.py,sha256=fgdufUYXHEiTcD7NCCqgDAQRRtt7jrKafENHqFKOyi0,12504
307
307
  rasa/core/migrate.py,sha256=XNeYdiRytBmBNubOQ8KZOT_wR1o9aOpHHfBU9PCB2eg,14626
308
308
  rasa/core/nlg/__init__.py,sha256=0eQOZ0fB35b18oVhRFczcH30jJHgO8WXFhnbXGOxJek,240
309
309
  rasa/core/nlg/callback.py,sha256=rFkDe7CSAETASRefpERUT6-DHWPs0UXhx8x4tZ1QE0M,5238
310
- rasa/core/nlg/contextual_response_rephraser.py,sha256=YGBmSyXnaZMXC6AHQNuDEnP19Ak_rP6FzZHTTCdTk9E,13134
310
+ rasa/core/nlg/contextual_response_rephraser.py,sha256=O6jGUs-vgy3ldhUYPLofwylZpbAObfFKLmpoQ8-CZqk,13173
311
311
  rasa/core/nlg/generator.py,sha256=YZ_rh--MeyzA6oXRqr_Ng-jcmPgbCmWMJJrquPmo__8,8436
312
312
  rasa/core/nlg/interpolator.py,sha256=Dc-J2Vf6vPPUbwIgZQm3AJDGvMaFTsh9Citd4CYuA9U,5189
313
313
  rasa/core/nlg/response.py,sha256=aHpy9BgjO7ub6v-sVPiQqutUA_7-UD1l3DJGVeQyp4k,5888
@@ -315,7 +315,7 @@ rasa/core/nlg/summarize.py,sha256=JO6VCfM_RnU0QX8Us42YkNOxC0ESKV1xcVH_sCW27ZU,21
315
315
  rasa/core/persistor.py,sha256=0BZvrA1xObxVtADWLVapj4NOmvqIEen1LKoMOdtZ63s,20337
316
316
  rasa/core/policies/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
317
317
  rasa/core/policies/ensemble.py,sha256=AjNOEy2Iubbe-LdKaoFUXG8ch6yPrg3bTvcTcAPmeOs,12959
318
- rasa/core/policies/enterprise_search_policy.py,sha256=-gcmJ8rS0tFMHXs_w_RTlVhhXHss_VqyS44OfkcWhEw,36674
318
+ rasa/core/policies/enterprise_search_policy.py,sha256=JZgzBI6TB0joUZnhBc08ADrA66PPNBCcSW6ARAUm5ew,36718
319
319
  rasa/core/policies/enterprise_search_prompt_template.jinja2,sha256=dCS_seyBGxMQoMsOjjvPp0dd31OSzZCJSZeev1FJK5Q,1187
320
320
  rasa/core/policies/enterprise_search_prompt_with_citation_template.jinja2,sha256=vRQBs3q13UmvRRgqA8-DmRtM7tqZP2ngwMVJ4gy7lE0,3302
321
321
  rasa/core/policies/flow_policy.py,sha256=wGb1l_59cGM9ZaexSIK5uXFi618739oNfLOxx2FC0_Y,7490
@@ -378,21 +378,22 @@ rasa/dialogue_understanding/commands/skip_question_command.py,sha256=bSrUFOHUz1o
378
378
  rasa/dialogue_understanding/commands/start_flow_command.py,sha256=a0Yk8xpBpFgC3Hkh4J8kAudz4s4ZLQWuoDq_a63lQXM,3309
379
379
  rasa/dialogue_understanding/commands/user_silence_command.py,sha256=QtqsMU5mrbUp5dla2yGSpxXfIfi_h6Eu72mTDZQ_aTU,1724
380
380
  rasa/dialogue_understanding/commands/utils.py,sha256=OiyLFGEsrfFSIJcvBY6lTIIXqDY9OxaikVGtcl4Kokk,1911
381
+ rasa/dialogue_understanding/constants.py,sha256=YcELaIss69Hnroclvn90Dl4Suk3S6e3t0UoIbUaXG2A,83
381
382
  rasa/dialogue_understanding/generator/__init__.py,sha256=Ykeb2wQ1DuiUWAWO0hLIPSTK1_Ktiq9DZXF6D3ugN78,764
382
- rasa/dialogue_understanding/generator/command_generator.py,sha256=RCrfvsvIGl9TlhJtiicHoondNb5DAjNvlo3zv0qZ_1w,16500
383
+ rasa/dialogue_understanding/generator/command_generator.py,sha256=Hc_19NVERoEU3pEREWU8RxJSdTxc-JXzRKVYGy5UYk4,16572
383
384
  rasa/dialogue_understanding/generator/constants.py,sha256=9Nwjo2Qobioetr9SyyQxsGvEPSbKCVS5ZX1GGJtbA0E,716
384
385
  rasa/dialogue_understanding/generator/flow_document_template.jinja2,sha256=f4H6vVd-_nX_RtutMh1xD3ZQE_J2OyuPHAtiltfiAPY,253
385
386
  rasa/dialogue_understanding/generator/flow_retrieval.py,sha256=MkwUgQA9xRlAQUdWF2cBEX2tW2PQhBsq2Jsy2vmqWY4,17891
386
- rasa/dialogue_understanding/generator/llm_based_command_generator.py,sha256=O9sIoDTup2g7l1Uqy6LqMBi-hwZ3OpJk90ZwzhltMtc,17707
387
+ rasa/dialogue_understanding/generator/llm_based_command_generator.py,sha256=VO3ZrotELyfKY_LEw8FJ4bPGTRjYbUvQy4Q6Z5rcPCI,17592
387
388
  rasa/dialogue_understanding/generator/llm_command_generator.py,sha256=QpNXhjB9ugtPV8XAHmKjbJtOiI1yE9rC2osbsI_A4ZY,2529
388
389
  rasa/dialogue_understanding/generator/multi_step/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
389
390
  rasa/dialogue_understanding/generator/multi_step/fill_slots_prompt.jinja2,sha256=Y0m673tAML3cFPaLM-urMXDsBYUUcXIw9YUpkAhGUuA,2933
390
391
  rasa/dialogue_understanding/generator/multi_step/handle_flows_prompt.jinja2,sha256=8l93_QBKBYnqLICVdiTu5ejZDE8F36BU8-qwba0px44,1927
391
- rasa/dialogue_understanding/generator/multi_step/multi_step_llm_command_generator.py,sha256=E80aJUrvDgsWcAF3IphpYVT2x0-DcsI-ISwOY38vOlg,34172
392
+ rasa/dialogue_understanding/generator/multi_step/multi_step_llm_command_generator.py,sha256=CrTEgkhky6s5O7NohqZ9gCD8G0YLq4rKM49ujhrNzr4,34418
392
393
  rasa/dialogue_understanding/generator/nlu_command_adapter.py,sha256=pzd1q-syU_QuqTRcfd_GsXyOJaxfApqh_LsOKuEN46g,9332
393
394
  rasa/dialogue_understanding/generator/single_step/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
394
395
  rasa/dialogue_understanding/generator/single_step/command_prompt_template.jinja2,sha256=nMayu-heJYH1QmcL1cFmXb8SeiJzfdDR_9Oy5IRUXsM,3937
395
- rasa/dialogue_understanding/generator/single_step/single_step_llm_command_generator.py,sha256=a72P3SzuSaF0Mmm4b3k4jT4zOGE_RFXBDRlHOI7Px0g,18656
396
+ rasa/dialogue_understanding/generator/single_step/single_step_llm_command_generator.py,sha256=prDAi8i6PrkkljkfI1qh7kL0BXiRzxLvl4XBcMaqqqI,18780
396
397
  rasa/dialogue_understanding/patterns/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
397
398
  rasa/dialogue_understanding/patterns/cancel.py,sha256=IQ4GVHNnNCqwKRLlAqBtLsgolcbPPnHsHdb3aOAFhEs,3868
398
399
  rasa/dialogue_understanding/patterns/cannot_handle.py,sha256=pg0zJHl-hDBnl6y9IyxZzW57yuMdfD8xI8eiK6EVrG8,1406
@@ -424,7 +425,7 @@ rasa/dialogue_understanding/stack/frames/flow_stack_frame.py,sha256=W4mEmihIN5Bi
424
425
  rasa/dialogue_understanding/stack/frames/pattern_frame.py,sha256=EVrYWv5dCP7XTvNV-HqtOOrseP-IkF0jD2_JacAvIYw,235
425
426
  rasa/dialogue_understanding/stack/frames/search_frame.py,sha256=rJ9og28k_udUIjP-2Z5xeb_2T5HvCzwDCnxVG9K7lws,728
426
427
  rasa/dialogue_understanding/stack/utils.py,sha256=ysH6-IeMwNnKbF1__uMlq6I8zaGXFdMEpw1iYdEz4kA,7650
427
- rasa/dialogue_understanding/utils.py,sha256=ENXT_1ALY1Ev6Gs8jNz3dm3TC91Y5psp2Np6_L4cHXI,332
428
+ rasa/dialogue_understanding/utils.py,sha256=tw9O_fhuspk64v99B5_lwNZjBIMlpjIKekpyFzMylJ8,566
428
429
  rasa/dialogue_understanding_test/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
429
430
  rasa/dialogue_understanding_test/constants.py,sha256=rZhBIQV5jFzPTuDtK5WSwS8YKKDLaZ7TMQsaSQwNA2g,486
430
431
  rasa/dialogue_understanding_test/du_test_case.py,sha256=Is3brWanixDNXKq_Kr43tcUc4PjoiN-IfJBRwKnL4hU,3656
@@ -520,7 +521,7 @@ rasa/model_manager/socket_bridge.py,sha256=klKaFA-PKNfha5ir0xKqba3Al6igYu3cD7BLI
520
521
  rasa/model_manager/studio_jwt_auth.py,sha256=eZ_srnbL2sKIKgx0OZIp29NbIrH2J8PlI8Or0lLg_Xo,2644
521
522
  rasa/model_manager/trainer_service.py,sha256=90WYl4fclgPLcLfFgDOtai9VahZx_ikn20PIMg_eSQM,10347
522
523
  rasa/model_manager/utils.py,sha256=tgj215CsJreqc4Ym8tAvv-hBieAC94nL0c4caPWIcZM,2643
523
- rasa/model_manager/warm_rasa_process.py,sha256=xFNP-ANZfUBKs_Sur2deAT2qqatWD3_XZJcUgQy2iiQ,5716
524
+ rasa/model_manager/warm_rasa_process.py,sha256=L6nYjI1vgEjT5zSc13HkS8t-16t7iOGkKZnXuNRf5sc,5887
524
525
  rasa/model_service.py,sha256=nj0wNoByYWg5WVd5GtIc5V-RhpVR_xspi-MeNQxurLE,3753
525
526
  rasa/model_testing.py,sha256=h0QUpJu6p_TDse3aHjCfYwI6OGH47b3Iuo5Ot0HQADM,14959
526
527
  rasa/model_training.py,sha256=gvmJ6bN6TdX6H6qnO5y14I_aYeqi_h1Dxfpavks3paU,21687
@@ -695,7 +696,7 @@ rasa/shared/providers/llm/azure_openai_llm_client.py,sha256=A6sg2bvulNczuzu1J0V7
695
696
  rasa/shared/providers/llm/default_litellm_llm_client.py,sha256=1oiUIXr_U5ldyBQZ8cnrV3P7Qw9kMw1yvaVg6mjKkHU,3940
696
697
  rasa/shared/providers/llm/litellm_router_llm_client.py,sha256=llko2DfOpiLMpHxnW26I1Hb1wTn7VmZ_yu43GRXhqwQ,6815
697
698
  rasa/shared/providers/llm/llm_client.py,sha256=6-gMsEJqquhUPGXzNiq_ybM_McLWxAJ_QhbmWcLnb_Q,2358
698
- rasa/shared/providers/llm/llm_response.py,sha256=HedtASFXW2GFWS4OAmk-wSjn5dRDFWB8dAkAO2Kdd_M,2426
699
+ rasa/shared/providers/llm/llm_response.py,sha256=rdXMBxbyz4vKnG-0b-NPsyiA1rehrvkU6Tjx1usX2BE,2871
699
700
  rasa/shared/providers/llm/openai_llm_client.py,sha256=uDdcugBcO3sfxbduc00eqaZdrJP0VFX5dkBd2Dem47M,4844
700
701
  rasa/shared/providers/llm/rasa_llm_client.py,sha256=SpgWn3uHHEezIcyvMfi468zRLw_W8VF6sIs-VIhElPc,3357
701
702
  rasa/shared/providers/llm/self_hosted_llm_client.py,sha256=98FaF0-lYnytC46ulhrCAQjUKy9TI0U2QILml__UCzc,9170
@@ -776,9 +777,9 @@ rasa/utils/train_utils.py,sha256=f1NWpp5y6al0dzoQyyio4hc4Nf73DRoRSHDzEK6-C4E,212
776
777
  rasa/utils/url_tools.py,sha256=JQcHL2aLqLHu82k7_d9imUoETCm2bmlHaDpOJ-dKqBc,1218
777
778
  rasa/utils/yaml.py,sha256=KjbZq5C94ZP7Jdsw8bYYF7HASI6K4-C_kdHfrnPLpSI,2000
778
779
  rasa/validator.py,sha256=wl5IKiyDmk6FlDcGO2Js-H-gHPeqVqUJ6hB4fgN0xjI,66796
779
- rasa/version.py,sha256=30hhHbpQCDfDXVFyr3PTuEccWoj0mUrtnPGHp39doZ0,124
780
- rasa_pro-3.11.3a1.dev5.dist-info/METADATA,sha256=m_N49daQs1B-kgsaTa3rRAlRqpG2oNRc1LHIA7Oc6JA,10798
781
- rasa_pro-3.11.3a1.dev5.dist-info/NOTICE,sha256=7HlBoMHJY9CL2GlYSfTQ-PZsVmLmVkYmMiPlTjhuCqA,218
782
- rasa_pro-3.11.3a1.dev5.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
783
- rasa_pro-3.11.3a1.dev5.dist-info/entry_points.txt,sha256=ckJ2SfEyTPgBqj_I6vm_tqY9dZF_LAPJZA335Xp0Q9U,43
784
- rasa_pro-3.11.3a1.dev5.dist-info/RECORD,,
780
+ rasa/version.py,sha256=Bhg94N2gHc9Q25ztAiy105xQbjhAUGrjG2rKzFAvRpg,124
781
+ rasa_pro-3.11.3a1.dev6.dist-info/METADATA,sha256=ab_MAK0yJM6BOlDfR49clYgd1lmlHG-MFTZztDSaIGs,10798
782
+ rasa_pro-3.11.3a1.dev6.dist-info/NOTICE,sha256=7HlBoMHJY9CL2GlYSfTQ-PZsVmLmVkYmMiPlTjhuCqA,218
783
+ rasa_pro-3.11.3a1.dev6.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
784
+ rasa_pro-3.11.3a1.dev6.dist-info/entry_points.txt,sha256=ckJ2SfEyTPgBqj_I6vm_tqY9dZF_LAPJZA335Xp0Q9U,43
785
+ rasa_pro-3.11.3a1.dev6.dist-info/RECORD,,