rasa-pro 3.12.6.dev2__py3-none-any.whl → 3.12.7.dev1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of rasa-pro might be problematic. Click here for more details.

Files changed (55) hide show
  1. rasa/__init__.py +0 -6
  2. rasa/cli/run.py +10 -6
  3. rasa/cli/utils.py +7 -0
  4. rasa/core/actions/action.py +0 -6
  5. rasa/core/channels/voice_ready/audiocodes.py +46 -17
  6. rasa/core/nlg/contextual_response_rephraser.py +4 -21
  7. rasa/core/nlg/summarize.py +1 -15
  8. rasa/core/policies/enterprise_search_policy.py +3 -16
  9. rasa/core/policies/flows/flow_executor.py +3 -38
  10. rasa/core/policies/intentless_policy.py +4 -17
  11. rasa/core/policies/policy.py +0 -2
  12. rasa/core/processor.py +19 -5
  13. rasa/core/utils.py +53 -0
  14. rasa/dialogue_understanding/coexistence/llm_based_router.py +4 -18
  15. rasa/dialogue_understanding/commands/cancel_flow_command.py +4 -59
  16. rasa/dialogue_understanding/commands/start_flow_command.py +0 -41
  17. rasa/dialogue_understanding/generator/command_generator.py +67 -0
  18. rasa/dialogue_understanding/generator/llm_based_command_generator.py +4 -20
  19. rasa/dialogue_understanding/generator/llm_command_generator.py +1 -3
  20. rasa/dialogue_understanding/generator/single_step/compact_llm_command_generator.py +1 -12
  21. rasa/dialogue_understanding/patterns/default_flows_for_patterns.yml +0 -61
  22. rasa/dialogue_understanding/processor/command_processor.py +7 -65
  23. rasa/dialogue_understanding/stack/utils.py +0 -38
  24. rasa/e2e_test/utils/validation.py +3 -3
  25. rasa/hooks.py +0 -55
  26. rasa/shared/constants.py +0 -5
  27. rasa/shared/core/constants.py +0 -8
  28. rasa/shared/core/domain.py +12 -3
  29. rasa/shared/core/flows/flow.py +0 -17
  30. rasa/shared/core/flows/flows_yaml_schema.json +3 -38
  31. rasa/shared/core/flows/steps/collect.py +5 -18
  32. rasa/shared/core/flows/utils.py +1 -16
  33. rasa/shared/core/slot_mappings.py +11 -5
  34. rasa/shared/nlu/constants.py +0 -1
  35. rasa/shared/providers/constants.py +0 -9
  36. rasa/shared/providers/llm/_base_litellm_client.py +4 -14
  37. rasa/shared/providers/llm/litellm_router_llm_client.py +7 -17
  38. rasa/shared/providers/llm/llm_client.py +15 -24
  39. rasa/shared/providers/llm/self_hosted_llm_client.py +2 -10
  40. rasa/shared/utils/common.py +11 -1
  41. rasa/shared/utils/health_check/health_check.py +1 -7
  42. rasa/tracing/instrumentation/attribute_extractors.py +4 -4
  43. rasa/tracing/instrumentation/intentless_policy_instrumentation.py +1 -2
  44. rasa/utils/licensing.py +0 -15
  45. rasa/validator.py +1 -123
  46. rasa/version.py +1 -1
  47. {rasa_pro-3.12.6.dev2.dist-info → rasa_pro-3.12.7.dev1.dist-info}/METADATA +3 -4
  48. {rasa_pro-3.12.6.dev2.dist-info → rasa_pro-3.12.7.dev1.dist-info}/RECORD +51 -55
  49. rasa/core/actions/action_handle_digressions.py +0 -164
  50. rasa/dialogue_understanding/commands/handle_digressions_command.py +0 -144
  51. rasa/dialogue_understanding/patterns/handle_digressions.py +0 -81
  52. rasa/monkey_patches.py +0 -91
  53. {rasa_pro-3.12.6.dev2.dist-info → rasa_pro-3.12.7.dev1.dist-info}/NOTICE +0 -0
  54. {rasa_pro-3.12.6.dev2.dist-info → rasa_pro-3.12.7.dev1.dist-info}/WHEEL +0 -0
  55. {rasa_pro-3.12.6.dev2.dist-info → rasa_pro-3.12.7.dev1.dist-info}/entry_points.txt +0 -0
@@ -1,6 +1,5 @@
1
1
  from __future__ import annotations
2
2
 
3
- import copy
4
3
  import re
5
4
  from dataclasses import dataclass
6
5
  from typing import Any, Dict, List
@@ -13,10 +12,11 @@ from rasa.dialogue_understanding.commands.command_syntax_manager import (
13
12
  CommandSyntaxVersion,
14
13
  )
15
14
  from rasa.dialogue_understanding.patterns.cancel import CancelPatternFlowStackFrame
16
- from rasa.dialogue_understanding.patterns.clarify import ClarifyPatternFlowStackFrame
17
15
  from rasa.dialogue_understanding.stack.dialogue_stack import DialogueStack
18
- from rasa.dialogue_understanding.stack.frames import UserFlowStackFrame
19
- from rasa.dialogue_understanding.stack.frames.flow_stack_frame import FlowStackFrameType
16
+ from rasa.dialogue_understanding.stack.frames.flow_stack_frame import (
17
+ FlowStackFrameType,
18
+ UserFlowStackFrame,
19
+ )
20
20
  from rasa.dialogue_understanding.stack.utils import top_user_flow_frame
21
21
  from rasa.shared.core.events import Event, FlowCancelled
22
22
  from rasa.shared.core.flows import FlowsList
@@ -95,8 +95,6 @@ class CancelFlowCommand(Command):
95
95
  original_stack = original_tracker.stack
96
96
 
97
97
  applied_events: List[Event] = []
98
- # capture the top frame before we push new frames onto the stack
99
- initial_top_frame = stack.top()
100
98
  user_frame = top_user_flow_frame(original_stack)
101
99
  current_flow = user_frame.flow(all_flows) if user_frame else None
102
100
 
@@ -123,21 +121,6 @@ class CancelFlowCommand(Command):
123
121
  if user_frame:
124
122
  applied_events.append(FlowCancelled(user_frame.flow_id, user_frame.step_id))
125
123
 
126
- if initial_top_frame and isinstance(
127
- initial_top_frame, ClarifyPatternFlowStackFrame
128
- ):
129
- structlogger.debug(
130
- "command_executor.cancel_flow.cancel_clarification_options",
131
- clarification_options=initial_top_frame.clarification_options,
132
- )
133
- applied_events += cancel_all_pending_clarification_options(
134
- initial_top_frame,
135
- original_stack,
136
- canceled_frames,
137
- all_flows,
138
- stack,
139
- )
140
-
141
124
  return applied_events + tracker.create_stack_updated_events(stack)
142
125
 
143
126
  def __hash__(self) -> int:
@@ -172,41 +155,3 @@ class CancelFlowCommand(Command):
172
155
  CommandSyntaxManager.get_syntax_version(),
173
156
  mapper[CommandSyntaxManager.get_default_syntax_version()],
174
157
  )
175
-
176
-
177
- def cancel_all_pending_clarification_options(
178
- initial_top_frame: ClarifyPatternFlowStackFrame,
179
- original_stack: DialogueStack,
180
- canceled_frames: List[str],
181
- all_flows: FlowsList,
182
- stack: DialogueStack,
183
- ) -> List[FlowCancelled]:
184
- """Cancel all pending clarification options.
185
-
186
- This is a special case when the assistant asks the user to clarify
187
- which pending digression flow to start after the completion of an active flow.
188
- If the user chooses to cancel all options, this function takes care of
189
- updating the stack by removing all pending flow stack frames
190
- listed as clarification options.
191
- """
192
- clarification_names = set(initial_top_frame.names)
193
- to_be_canceled_frames = []
194
- applied_events = []
195
- for frame in reversed(original_stack.frames):
196
- if frame.frame_id in canceled_frames:
197
- continue
198
-
199
- to_be_canceled_frames.append(frame.frame_id)
200
- if isinstance(frame, UserFlowStackFrame):
201
- readable_flow_name = frame.flow(all_flows).readable_name()
202
- if readable_flow_name in clarification_names:
203
- stack.push(
204
- CancelPatternFlowStackFrame(
205
- canceled_name=readable_flow_name,
206
- canceled_frames=copy.deepcopy(to_be_canceled_frames),
207
- )
208
- )
209
- applied_events.append(FlowCancelled(frame.flow_id, frame.step_id))
210
- to_be_canceled_frames.clear()
211
-
212
- return applied_events
@@ -11,11 +11,6 @@ from rasa.dialogue_understanding.commands.command_syntax_manager import (
11
11
  CommandSyntaxManager,
12
12
  CommandSyntaxVersion,
13
13
  )
14
- from rasa.dialogue_understanding.patterns.clarify import FLOW_PATTERN_CLARIFICATION
15
- from rasa.dialogue_understanding.patterns.continue_interrupted import (
16
- ContinueInterruptedPatternFlowStackFrame,
17
- )
18
- from rasa.dialogue_understanding.stack.dialogue_stack import DialogueStack
19
14
  from rasa.dialogue_understanding.stack.frames.flow_stack_frame import (
20
15
  FlowStackFrameType,
21
16
  UserFlowStackFrame,
@@ -77,10 +72,6 @@ class StartFlowCommand(Command):
77
72
  applied_events: List[Event] = []
78
73
 
79
74
  if self.flow in user_flows_on_the_stack(stack):
80
- top_frame = stack.top()
81
- if top_frame is not None and top_frame.type() == FLOW_PATTERN_CLARIFICATION:
82
- return self.change_flow_frame_position_in_the_stack(stack, tracker)
83
-
84
75
  structlogger.debug(
85
76
  "command_executor.skip_command.already_started_flow", command=self
86
77
  )
@@ -149,35 +140,3 @@ class StartFlowCommand(Command):
149
140
  CommandSyntaxManager.get_syntax_version(),
150
141
  mapper[CommandSyntaxManager.get_default_syntax_version()],
151
142
  )
152
-
153
- def change_flow_frame_position_in_the_stack(
154
- self, stack: DialogueStack, tracker: DialogueStateTracker
155
- ) -> List[Event]:
156
- """Changes the position of the flow frame in the stack.
157
-
158
- This is a special case when pattern clarification is the active flow and
159
- the same flow is selected to start. In this case, the existing flow frame
160
- should be moved up in the stack.
161
- """
162
- frames = stack.frames[:]
163
-
164
- for idx, frame in enumerate(frames):
165
- if isinstance(frame, UserFlowStackFrame) and frame.flow_id == self.flow:
166
- structlogger.debug(
167
- "command_executor.change_flow_position_during_clarification",
168
- command=self,
169
- index=idx,
170
- )
171
- # pop the continue interrupted flow frame if it exists
172
- next_frame = frames[idx + 1] if idx + 1 < len(frames) else None
173
- if (
174
- isinstance(next_frame, ContinueInterruptedPatternFlowStackFrame)
175
- and next_frame.previous_flow_name == self.flow
176
- ):
177
- stack.frames.pop(idx + 1)
178
- # move up the existing flow from the stack
179
- stack.frames.pop(idx)
180
- stack.push(frame)
181
- return tracker.create_stack_updated_events(stack)
182
-
183
- return []
@@ -4,6 +4,7 @@ from typing import Any, Dict, List, Optional, Set, Text, Tuple
4
4
  import structlog
5
5
 
6
6
  from rasa.dialogue_understanding.commands import (
7
+ CannotHandleCommand,
7
8
  Command,
8
9
  CorrectSlotsCommand,
9
10
  ErrorCommand,
@@ -107,6 +108,14 @@ class CommandGenerator:
107
108
  commands = self._check_commands_against_startable_flows(
108
109
  commands, startable_flows
109
110
  )
111
+
112
+ # During force slot filling, keep only the command that sets the
113
+ # slot asked by the active collect step.
114
+ # Or return a CannotHandleCommand if no matching command is found.
115
+ commands = self._filter_commands_during_force_slot_filling(
116
+ commands, available_flows, tracker
117
+ )
118
+
110
119
  commands_dicts = [command.as_dict() for command in commands]
111
120
  message.set(COMMANDS, commands_dicts, add_to_output=True)
112
121
 
@@ -370,6 +379,64 @@ class CommandGenerator:
370
379
  Command.command_from_json(command) for command in message.get(COMMANDS, [])
371
380
  ]
372
381
 
382
+ @staticmethod
383
+ def _filter_commands_during_force_slot_filling(
384
+ commands: List[Command],
385
+ available_flows: FlowsList,
386
+ tracker: Optional[DialogueStateTracker] = None,
387
+ ) -> List[Command]:
388
+ """Filter commands during a collect step that has set `force_slot_filling`.
389
+
390
+ Args:
391
+ commands: The commands to filter.
392
+ available_flows: The available flows.
393
+ tracker: The tracker.
394
+
395
+ Returns:
396
+ The filtered commands.
397
+ """
398
+ from rasa.dialogue_understanding.processor.command_processor import (
399
+ get_current_collect_step,
400
+ )
401
+
402
+ if tracker is None:
403
+ structlogger.error(
404
+ "command_generator.filter_commands_during_force_slot_filling.tracker_not_found",
405
+ )
406
+ return commands
407
+
408
+ stack = tracker.stack
409
+ step = get_current_collect_step(stack, available_flows)
410
+
411
+ if step is None or not step.force_slot_filling:
412
+ return commands
413
+
414
+ # Retain only the command that sets the slot asked by
415
+ # the active collect step
416
+ filtered_commands: List[Command] = [
417
+ command
418
+ for command in commands
419
+ if (isinstance(command, SetSlotCommand) and command.name == step.collect)
420
+ ]
421
+
422
+ if not filtered_commands:
423
+ # If no commands were predicted, we need to return a CannotHandleCommand
424
+ structlogger.debug(
425
+ "command_generator.filter_commands_during_force_slot_filling.no_commands",
426
+ event_info=f"The command generator did not find any SetSlot "
427
+ f"command at the collect step for the slot '{step.collect}'. "
428
+ f"Returning a CannotHandleCommand instead.",
429
+ )
430
+ return [CannotHandleCommand()]
431
+
432
+ structlogger.debug(
433
+ "command_generator.filter_commands_during_force_slot_filling.filtered_commands",
434
+ slot_name=step.collect,
435
+ filtered_commands=filtered_commands,
436
+ )
437
+
438
+ return filtered_commands
439
+
373
440
 
374
441
  def gather_slot_names(commands: List[Command]) -> Set[str]:
375
442
  """Gather all slot names from the commands."""
@@ -12,9 +12,6 @@ from rasa.dialogue_understanding.commands import (
12
12
  SetSlotCommand,
13
13
  StartFlowCommand,
14
14
  )
15
- from rasa.dialogue_understanding.commands.handle_digressions_command import (
16
- HandleDigressionsCommand,
17
- )
18
15
  from rasa.dialogue_understanding.constants import KEY_MINIMIZE_NUM_CALLS
19
16
  from rasa.dialogue_understanding.generator import CommandGenerator
20
17
  from rasa.dialogue_understanding.generator._jinja_filters import to_json_escaped_string
@@ -49,7 +46,6 @@ from rasa.shared.utils.llm import (
49
46
  llm_factory,
50
47
  resolve_model_client_config,
51
48
  )
52
- from rasa.utils.licensing import get_human_readable_licence_owner
53
49
  from rasa.utils.log_utils import log_llm
54
50
 
55
51
  structlogger = structlog.get_logger()
@@ -93,8 +89,6 @@ class LLMBasedCommandGenerator(
93
89
  else:
94
90
  self.flow_retrieval = None
95
91
 
96
- self.user_id = get_human_readable_licence_owner()
97
-
98
92
  ### Abstract methods
99
93
  @staticmethod
100
94
  @abstractmethod
@@ -334,9 +328,7 @@ class LLMBasedCommandGenerator(
334
328
 
335
329
  @measure_llm_latency
336
330
  async def invoke_llm(
337
- self,
338
- prompt: Union[List[dict], List[str], str],
339
- metadata: Optional[Dict[str, Any]] = None,
331
+ self, prompt: Union[List[dict], List[str], str]
340
332
  ) -> Optional[LLMResponse]:
341
333
  """Use LLM to generate a response.
342
334
 
@@ -349,7 +341,6 @@ class LLMBasedCommandGenerator(
349
341
  - a list of messages. Each message is a string and will be formatted
350
342
  as a user message.
351
343
  - a single message as a string which will be formatted as user message.
352
- metadata: Optional metadata to be passed to the LLM call.
353
344
 
354
345
  Returns:
355
346
  An LLMResponse object.
@@ -361,7 +352,7 @@ class LLMBasedCommandGenerator(
361
352
  self.config.get(LLM_CONFIG_KEY), self.get_default_llm_config()
362
353
  )
363
354
  try:
364
- return await llm.acompletion(prompt, metadata)
355
+ return await llm.acompletion(prompt)
365
356
  except Exception as e:
366
357
  # unfortunately, langchain does not wrap LLM exceptions which means
367
358
  # we have to catch all exceptions here
@@ -616,16 +607,9 @@ class LLMBasedCommandGenerator(
616
607
  ) -> bool:
617
608
  """Check if the LLM current commands should be merged with the prior commands.
618
609
 
619
- This can be done if there are no prior start flow commands and
620
- no prior handle digressions commands.
610
+ This can be done if there are no prior start flow commands.
621
611
  """
622
- prior_handle_digressions = [
623
- command
624
- for command in prior_commands
625
- if isinstance(command, HandleDigressionsCommand)
626
- ]
627
-
628
- return not prior_start_flow_names and not prior_handle_digressions
612
+ return not prior_start_flow_names
629
613
 
630
614
  def _check_start_flow_command_overlap(
631
615
  self,
@@ -55,9 +55,7 @@ class LLMCommandGenerator(SingleStepLLMCommandGenerator):
55
55
  )
56
56
 
57
57
  async def invoke_llm(
58
- self,
59
- prompt: Union[List[dict], List[str], str],
60
- metadata: Optional[Dict[str, Any]] = None,
58
+ self, prompt: Union[List[dict], List[str], str]
61
59
  ) -> Optional[LLMResponse]:
62
60
  try:
63
61
  return await super().invoke_llm(prompt)
@@ -47,10 +47,6 @@ from rasa.shared.constants import (
47
47
  AWS_BEDROCK_PROVIDER,
48
48
  AZURE_OPENAI_PROVIDER,
49
49
  EMBEDDINGS_CONFIG_KEY,
50
- LANGFUSE_CUSTOM_METADATA_DICT,
51
- LANGFUSE_METADATA_SESSION_ID,
52
- LANGFUSE_METADATA_USER_ID,
53
- LANGFUSE_TAGS,
54
50
  MAX_TOKENS_CONFIG_KEY,
55
51
  PROMPT_TEMPLATE_CONFIG_KEY,
56
52
  ROUTE_TO_CALM_SLOT,
@@ -366,14 +362,7 @@ class CompactLLMCommandGenerator(LLMBasedCommandGenerator):
366
362
  prompt=flow_prompt,
367
363
  )
368
364
 
369
- metadata = {
370
- LANGFUSE_METADATA_USER_ID: self.user_id,
371
- LANGFUSE_METADATA_SESSION_ID: tracker.sender_id if tracker else "",
372
- LANGFUSE_CUSTOM_METADATA_DICT: {"component": self.__class__.__name__},
373
- LANGFUSE_TAGS: [self.__class__.__name__],
374
- }
375
-
376
- response = await self.invoke_llm(flow_prompt, metadata)
365
+ response = await self.invoke_llm(flow_prompt)
377
366
  llm_response = LLMResponse.ensure_llm_response(response)
378
367
  # The check for 'None' maintains compatibility with older versions
379
368
  # of LLMCommandGenerator. In previous implementations, 'invoke_llm'
@@ -1,17 +1,6 @@
1
1
  version: "3.1"
2
2
  responses:
3
3
 
4
- utter_ask_continue_previous_flow:
5
- - text: "Confirm if you would like to continue with the initial topic: {{context.interrupted_flow_id}}?"
6
- metadata:
7
- rephrase: True
8
- template: jinja
9
- buttons:
10
- - title: Continue with the previous topic.
11
- payload: /SetSlots(continue_previous_flow=True)
12
- - title: Switch to new topic.
13
- payload: /SetSlots(continue_previous_flow=False)
14
-
15
4
  utter_ask_rephrase:
16
5
  - text: I’m sorry I am unable to understand you, could you please rephrase?
17
6
 
@@ -20,20 +9,6 @@ responses:
20
9
  metadata:
21
10
  rephrase: True
22
11
 
23
- utter_block_digressions:
24
- - text: "We can look into {{ context.interrupting_flow_id }} later. Let's focus on the current topic: {{ context.interrupted_flow_id }}."
25
- metadata:
26
- rephrase: True
27
- template: jinja
28
- - text: "Let's continue with the current topic: {{ context.interrupted_flow_id }}."
29
- condition:
30
- - type: slot
31
- name: continue_previous_flow
32
- value: True
33
- metadata:
34
- rephrase: True
35
- template: jinja
36
-
37
12
  utter_boolean_slot_rejection:
38
13
  - text: "Sorry, the value you provided, `{{value}}`, is not valid. Please respond with a valid value."
39
14
  metadata:
@@ -60,12 +35,6 @@ responses:
60
35
  rephrase: True
61
36
  template: jinja
62
37
 
63
- utter_continue_interruption:
64
- - text: "Let's continue with the chosen topic instead: {{ context.interrupting_flow_id }}."
65
- metadata:
66
- rephrase: True
67
- template: jinja
68
-
69
38
  utter_corrected_previous_input:
70
39
  - text: "Ok, I am updating {{ context.corrected_slots.keys()|join(', ') }} to {{ context.new_slot_values | join(', ') }} respectively."
71
40
  metadata:
@@ -150,10 +119,6 @@ slots:
150
119
  type: float
151
120
  initial_value: 0.0
152
121
  max_value: 1000000
153
- continue_previous_flow:
154
- type: bool
155
- mappings:
156
- - type: from_llm
157
122
 
158
123
  flows:
159
124
  pattern_cancel_flow:
@@ -197,7 +162,6 @@ flows:
197
162
  steps:
198
163
  - action: action_clarify_flows
199
164
  - action: utter_clarification_options_rasa
200
- - action: action_listen
201
165
 
202
166
  pattern_code_change:
203
167
  description: Conversation repair flow for cleaning the stack after an assistant update
@@ -247,31 +211,6 @@ flows:
247
211
  next: END
248
212
  - else: END
249
213
 
250
- pattern_handle_digressions:
251
- description: Conversation repair flow for handling digressions
252
- name: pattern handle digressions
253
- steps:
254
- - noop: true
255
- id: branching
256
- next:
257
- - if: context.ask_confirm_digressions contains context.interrupting_flow_id
258
- then: continue_previous_flow
259
- - if: context.block_digressions contains context.interrupting_flow_id
260
- then: block_digression
261
- - else: continue_digression
262
- - id: continue_previous_flow
263
- collect: continue_previous_flow
264
- next:
265
- - if: slots.continue_previous_flow
266
- then: block_digression
267
- - else: continue_digression
268
- - id: block_digression
269
- action: action_block_digression
270
- next: END
271
- - id: continue_digression
272
- action: action_continue_digression
273
- next: END
274
-
275
214
  pattern_human_handoff:
276
215
  description: Conversation repair flow for switching users to a human agent if their request can't be handled
277
216
  name: pattern human handoff
@@ -18,9 +18,6 @@ from rasa.dialogue_understanding.commands import (
18
18
  from rasa.dialogue_understanding.commands.handle_code_change_command import (
19
19
  HandleCodeChangeCommand,
20
20
  )
21
- from rasa.dialogue_understanding.commands.handle_digressions_command import (
22
- HandleDigressionsCommand,
23
- )
24
21
  from rasa.dialogue_understanding.commands.set_slot_command import SetSlotExtractor
25
22
  from rasa.dialogue_understanding.commands.utils import (
26
23
  create_validate_frames_from_slot_set_events,
@@ -454,21 +451,7 @@ def clean_up_commands(
454
451
  )
455
452
  continue
456
453
 
457
- if should_add_handle_digressions_command(tracker, all_flows, top_flow_id):
458
- handle_digression_command = HandleDigressionsCommand(flow=command.flow)
459
- if handle_digression_command in clean_commands:
460
- structlogger.debug(
461
- "command_processor.clean_up_commands.skip_handle_digressions.command_already_present",
462
- command=handle_digression_command,
463
- )
464
- continue
465
- clean_commands.append(handle_digression_command)
466
- structlogger.debug(
467
- "command_processor.clean_up_commands.push_handle_digressions",
468
- command=command,
469
- )
470
- else:
471
- clean_commands.append(command)
454
+ clean_commands.append(command)
472
455
 
473
456
  # handle chitchat command differently from other free-form answer commands
474
457
  elif isinstance(command, ChitChatAnswerCommand):
@@ -503,21 +486,9 @@ def clean_up_commands(
503
486
  # when coexistence is enabled, by default there will be a SetSlotCommand
504
487
  # for the ROUTE_TO_CALM_SLOT slot.
505
488
  if tracker.has_coexistence_routing_slot and len(clean_commands) > 2:
506
- clean_commands = filter_cannot_handle_command_for_skipped_slots(clean_commands)
489
+ clean_commands = filter_cannot_handle_command(clean_commands)
507
490
  elif not tracker.has_coexistence_routing_slot and len(clean_commands) > 1:
508
- clean_commands = filter_cannot_handle_command_for_skipped_slots(clean_commands)
509
-
510
- # remove cancel flow when there is a handle digression command
511
- # otherwise the cancel command will cancel the active flow which defined a specific
512
- # behavior for the digression
513
- if contains_command(clean_commands, HandleDigressionsCommand) and contains_command(
514
- clean_commands, CancelFlowCommand
515
- ):
516
- clean_commands = [
517
- command
518
- for command in clean_commands
519
- if not isinstance(command, CancelFlowCommand)
520
- ]
491
+ clean_commands = filter_cannot_handle_command(clean_commands)
521
492
 
522
493
  clean_commands = ensure_max_number_of_command_type(
523
494
  clean_commands, RepeatBotMessagesCommand, 1
@@ -857,12 +828,12 @@ def should_slot_be_set(
857
828
  return True
858
829
 
859
830
 
860
- def filter_cannot_handle_command_for_skipped_slots(
831
+ def filter_cannot_handle_command(
861
832
  clean_commands: List[Command],
862
833
  ) -> List[Command]:
863
- """Filter out a 'cannot handle' command for skipped slots.
834
+ """Filter out a 'cannot handle' command.
864
835
 
865
- This is used to filter out a 'cannot handle' command for skipped slots
836
+ This is used to filter out a 'cannot handle' command
866
837
  in case other commands are present.
867
838
 
868
839
  Returns:
@@ -871,34 +842,5 @@ def filter_cannot_handle_command_for_skipped_slots(
871
842
  return [
872
843
  command
873
844
  for command in clean_commands
874
- if not (
875
- isinstance(command, CannotHandleCommand)
876
- and command.reason
877
- and CANNOT_HANDLE_REASON == command.reason
878
- )
845
+ if not isinstance(command, CannotHandleCommand)
879
846
  ]
880
-
881
-
882
- def should_add_handle_digressions_command(
883
- tracker: DialogueStateTracker, all_flows: FlowsList, top_flow_id: str
884
- ) -> bool:
885
- """Check if a handle digressions command should be added to the commands.
886
-
887
- The command should replace a StartFlow command only if we are at a collect step of
888
- a flow and a new flow is predicted by the command generator to start.
889
- """
890
- current_flow = all_flows.flow_by_id(top_flow_id)
891
- current_flow_condition = current_flow and (
892
- current_flow.ask_confirm_digressions or current_flow.block_digressions
893
- )
894
-
895
- collect_info = get_current_collect_step(tracker.stack, all_flows)
896
-
897
- if collect_info and (
898
- collect_info.ask_confirm_digressions
899
- or collect_info.block_digressions
900
- or current_flow_condition
901
- ):
902
- return True
903
-
904
- return False
@@ -4,9 +4,6 @@ from typing import List, Optional, Set, Tuple
4
4
  from rasa.dialogue_understanding.patterns.collect_information import (
5
5
  CollectInformationPatternFlowStackFrame,
6
6
  )
7
- from rasa.dialogue_understanding.patterns.continue_interrupted import (
8
- ContinueInterruptedPatternFlowStackFrame,
9
- )
10
7
  from rasa.dialogue_understanding.stack.dialogue_stack import DialogueStack
11
8
  from rasa.dialogue_understanding.stack.frames import (
12
9
  BaseFlowStackFrame,
@@ -221,38 +218,3 @@ def get_collect_steps_excluding_ask_before_filling_for_active_flow(
221
218
  for step in active_flow.get_collect_steps()
222
219
  if not step.ask_before_filling
223
220
  )
224
-
225
-
226
- def remove_digression_from_stack(stack: DialogueStack, flow_id: str) -> DialogueStack:
227
- """Remove a specific flow frame from the stack and other frames that reference it.
228
-
229
- The main use-case is to prevent duplicate digressions from being added to the stack.
230
-
231
- Args:
232
- stack: The dialogue stack.
233
- flow_id: The flow to remove.
234
-
235
- Returns:
236
- The updated dialogue stack.
237
- """
238
- updated_stack = stack.copy()
239
- original_frames = updated_stack.frames[:]
240
- found_digression_index = -1
241
- for index, frame in enumerate(original_frames):
242
- if isinstance(frame, BaseFlowStackFrame) and frame.flow_id == flow_id:
243
- updated_stack.frames.pop(index)
244
- found_digression_index = index
245
-
246
- # we also need to remove the `ContinueInterruptedPatternFlowStackFrame`
247
- elif (
248
- isinstance(frame, ContinueInterruptedPatternFlowStackFrame)
249
- and frame.previous_flow_name == flow_id
250
- and found_digression_index + 1 == index
251
- ):
252
- # we know that this frame is always added after the digressing flow frame
253
- # that was blocked previously by action_block_digressions,
254
- # so this check would occur after the digressing flow was popped.
255
- # Therefore, we need to update the index dynamically before popping.
256
- updated_stack.frames.pop(index - 1)
257
-
258
- return updated_stack
@@ -7,6 +7,7 @@ import structlog
7
7
  import rasa.shared.utils.io
8
8
  from rasa.e2e_test.constants import SCHEMA_FILE_PATH
9
9
  from rasa.e2e_test.e2e_test_case import Fixture, Metadata
10
+ from rasa.exceptions import ModelNotFound
10
11
  from rasa.shared.utils.yaml import read_schema_file
11
12
 
12
13
  if TYPE_CHECKING:
@@ -152,10 +153,9 @@ def validate_model_path(model_path: Optional[str], parameter: str, default: str)
152
153
  return model_path
153
154
 
154
155
  if model_path and not Path(model_path).exists():
155
- rasa.shared.utils.io.raise_warning(
156
+ raise ModelNotFound(
156
157
  f"The provided model path '{model_path}' could not be found. "
157
- f"Using default location '{default}' instead.",
158
- UserWarning,
158
+ "Provide an existing model path."
159
159
  )
160
160
 
161
161
  elif model_path is None: