rasa-pro 3.12.0.dev12__py3-none-any.whl → 3.12.0rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of rasa-pro might be problematic. Click here for more details.

Files changed (153) hide show
  1. rasa/anonymization/anonymization_rule_executor.py +16 -10
  2. rasa/cli/data.py +16 -0
  3. rasa/cli/inspect.py +20 -1
  4. rasa/cli/project_templates/calm/config.yml +2 -2
  5. rasa/cli/project_templates/calm/endpoints.yml +2 -2
  6. rasa/cli/shell.py +3 -3
  7. rasa/cli/utils.py +12 -0
  8. rasa/core/actions/action.py +99 -193
  9. rasa/core/actions/action_handle_digressions.py +142 -0
  10. rasa/core/actions/action_run_slot_rejections.py +16 -4
  11. rasa/core/actions/forms.py +10 -5
  12. rasa/core/channels/__init__.py +4 -0
  13. rasa/core/channels/studio_chat.py +19 -0
  14. rasa/core/channels/telegram.py +42 -24
  15. rasa/core/channels/voice_ready/audiocodes.py +42 -23
  16. rasa/core/channels/voice_ready/utils.py +1 -1
  17. rasa/core/channels/voice_stream/asr/asr_engine.py +10 -4
  18. rasa/core/channels/voice_stream/asr/azure.py +14 -1
  19. rasa/core/channels/voice_stream/asr/deepgram.py +20 -4
  20. rasa/core/channels/voice_stream/audiocodes.py +264 -0
  21. rasa/core/channels/voice_stream/browser_audio.py +5 -1
  22. rasa/core/channels/voice_stream/call_state.py +10 -1
  23. rasa/core/channels/voice_stream/genesys.py +335 -0
  24. rasa/core/channels/voice_stream/tts/azure.py +11 -2
  25. rasa/core/channels/voice_stream/tts/cartesia.py +29 -10
  26. rasa/core/channels/voice_stream/twilio_media_streams.py +2 -1
  27. rasa/core/channels/voice_stream/voice_channel.py +25 -3
  28. rasa/core/constants.py +2 -0
  29. rasa/core/migrate.py +2 -2
  30. rasa/core/nlg/contextual_response_rephraser.py +18 -1
  31. rasa/core/nlg/generator.py +83 -15
  32. rasa/core/nlg/response.py +6 -3
  33. rasa/core/nlg/translate.py +55 -0
  34. rasa/core/policies/enterprise_search_prompt_with_citation_template.jinja2 +1 -1
  35. rasa/core/policies/flows/flow_executor.py +47 -46
  36. rasa/core/processor.py +72 -9
  37. rasa/core/run.py +4 -3
  38. rasa/dialogue_understanding/commands/can_not_handle_command.py +20 -2
  39. rasa/dialogue_understanding/commands/cancel_flow_command.py +80 -4
  40. rasa/dialogue_understanding/commands/change_flow_command.py +20 -2
  41. rasa/dialogue_understanding/commands/chit_chat_answer_command.py +20 -2
  42. rasa/dialogue_understanding/commands/clarify_command.py +29 -3
  43. rasa/dialogue_understanding/commands/command.py +1 -16
  44. rasa/dialogue_understanding/commands/command_syntax_manager.py +55 -0
  45. rasa/dialogue_understanding/commands/correct_slots_command.py +11 -2
  46. rasa/dialogue_understanding/commands/handle_digressions_command.py +150 -0
  47. rasa/dialogue_understanding/commands/human_handoff_command.py +20 -2
  48. rasa/dialogue_understanding/commands/knowledge_answer_command.py +20 -2
  49. rasa/dialogue_understanding/commands/prompt_command.py +94 -0
  50. rasa/dialogue_understanding/commands/repeat_bot_messages_command.py +20 -2
  51. rasa/dialogue_understanding/commands/set_slot_command.py +29 -15
  52. rasa/dialogue_understanding/commands/skip_question_command.py +20 -2
  53. rasa/dialogue_understanding/commands/start_flow_command.py +61 -2
  54. rasa/dialogue_understanding/commands/utils.py +98 -4
  55. rasa/dialogue_understanding/constants.py +1 -0
  56. rasa/dialogue_understanding/generator/__init__.py +2 -0
  57. rasa/dialogue_understanding/generator/command_generator.py +110 -73
  58. rasa/dialogue_understanding/generator/command_parser.py +16 -13
  59. rasa/dialogue_understanding/generator/constants.py +3 -0
  60. rasa/dialogue_understanding/generator/llm_based_command_generator.py +170 -5
  61. rasa/dialogue_understanding/generator/llm_command_generator.py +5 -3
  62. rasa/dialogue_understanding/generator/multi_step/multi_step_llm_command_generator.py +26 -4
  63. rasa/dialogue_understanding/generator/nlu_command_adapter.py +44 -3
  64. rasa/dialogue_understanding/generator/prompt_templates/__init__.py +0 -0
  65. rasa/dialogue_understanding/generator/prompt_templates/command_prompt_template.jinja2 +60 -0
  66. rasa/dialogue_understanding/generator/prompt_templates/command_prompt_v2_claude_3_5_sonnet_20240620_template.jinja2 +77 -0
  67. rasa/dialogue_understanding/generator/prompt_templates/command_prompt_v2_default.jinja2 +68 -0
  68. rasa/dialogue_understanding/generator/{single_step/command_prompt_template.jinja2 → prompt_templates/command_prompt_v2_gpt_4o_2024_11_20_template.jinja2} +1 -1
  69. rasa/dialogue_understanding/generator/single_step/compact_llm_command_generator.py +460 -0
  70. rasa/dialogue_understanding/generator/single_step/single_step_llm_command_generator.py +12 -318
  71. rasa/dialogue_understanding/generator/utils.py +32 -1
  72. rasa/dialogue_understanding/patterns/collect_information.py +1 -1
  73. rasa/dialogue_understanding/patterns/correction.py +13 -1
  74. rasa/dialogue_understanding/patterns/default_flows_for_patterns.yml +78 -2
  75. rasa/dialogue_understanding/patterns/handle_digressions.py +81 -0
  76. rasa/dialogue_understanding/patterns/validate_slot.py +65 -0
  77. rasa/dialogue_understanding/processor/command_processor.py +154 -28
  78. rasa/dialogue_understanding/utils.py +31 -0
  79. rasa/dialogue_understanding_test/README.md +50 -0
  80. rasa/dialogue_understanding_test/du_test_case.py +28 -8
  81. rasa/dialogue_understanding_test/du_test_result.py +13 -9
  82. rasa/dialogue_understanding_test/io.py +14 -0
  83. rasa/dialogue_understanding_test/test_case_simulation/test_case_tracker_simulator.py +3 -3
  84. rasa/e2e_test/utils/io.py +0 -37
  85. rasa/engine/graph.py +1 -0
  86. rasa/engine/language.py +140 -0
  87. rasa/engine/recipes/config_files/default_config.yml +4 -0
  88. rasa/engine/recipes/default_recipe.py +2 -0
  89. rasa/engine/recipes/graph_recipe.py +2 -0
  90. rasa/engine/storage/local_model_storage.py +1 -0
  91. rasa/engine/storage/storage.py +4 -1
  92. rasa/model_manager/runner_service.py +7 -4
  93. rasa/model_manager/socket_bridge.py +7 -6
  94. rasa/model_manager/warm_rasa_process.py +0 -1
  95. rasa/model_training.py +24 -27
  96. rasa/shared/constants.py +15 -13
  97. rasa/shared/core/constants.py +30 -3
  98. rasa/shared/core/domain.py +13 -20
  99. rasa/shared/core/events.py +13 -2
  100. rasa/shared/core/flows/constants.py +11 -0
  101. rasa/shared/core/flows/flow.py +100 -19
  102. rasa/shared/core/flows/flows_yaml_schema.json +69 -3
  103. rasa/shared/core/flows/steps/collect.py +19 -37
  104. rasa/shared/core/flows/utils.py +43 -4
  105. rasa/shared/core/flows/validation.py +1 -1
  106. rasa/shared/core/slot_mappings.py +350 -111
  107. rasa/shared/core/slots.py +154 -3
  108. rasa/shared/core/trackers.py +77 -2
  109. rasa/shared/importers/importer.py +50 -2
  110. rasa/shared/nlu/constants.py +1 -0
  111. rasa/shared/nlu/training_data/schemas/responses.yml +19 -12
  112. rasa/shared/providers/_configs/azure_entra_id_config.py +541 -0
  113. rasa/shared/providers/_configs/azure_openai_client_config.py +138 -3
  114. rasa/shared/providers/_configs/client_config.py +3 -1
  115. rasa/shared/providers/_configs/default_litellm_client_config.py +3 -1
  116. rasa/shared/providers/_configs/huggingface_local_embedding_client_config.py +3 -1
  117. rasa/shared/providers/_configs/litellm_router_client_config.py +3 -1
  118. rasa/shared/providers/_configs/model_group_config.py +4 -2
  119. rasa/shared/providers/_configs/oauth_config.py +33 -0
  120. rasa/shared/providers/_configs/openai_client_config.py +3 -1
  121. rasa/shared/providers/_configs/rasa_llm_client_config.py +3 -1
  122. rasa/shared/providers/_configs/self_hosted_llm_client_config.py +3 -1
  123. rasa/shared/providers/constants.py +6 -0
  124. rasa/shared/providers/embedding/azure_openai_embedding_client.py +28 -3
  125. rasa/shared/providers/embedding/litellm_router_embedding_client.py +3 -1
  126. rasa/shared/providers/llm/_base_litellm_client.py +42 -17
  127. rasa/shared/providers/llm/azure_openai_llm_client.py +81 -25
  128. rasa/shared/providers/llm/default_litellm_llm_client.py +3 -1
  129. rasa/shared/providers/llm/litellm_router_llm_client.py +29 -8
  130. rasa/shared/providers/llm/llm_client.py +23 -7
  131. rasa/shared/providers/llm/openai_llm_client.py +9 -3
  132. rasa/shared/providers/llm/rasa_llm_client.py +11 -2
  133. rasa/shared/providers/llm/self_hosted_llm_client.py +30 -11
  134. rasa/shared/providers/router/_base_litellm_router_client.py +3 -1
  135. rasa/shared/providers/router/router_client.py +3 -1
  136. rasa/shared/utils/constants.py +3 -0
  137. rasa/shared/utils/llm.py +31 -8
  138. rasa/shared/utils/pykwalify_extensions.py +24 -0
  139. rasa/shared/utils/schemas/domain.yml +26 -1
  140. rasa/telemetry.py +45 -14
  141. rasa/tracing/config.py +2 -0
  142. rasa/tracing/constants.py +12 -0
  143. rasa/tracing/instrumentation/instrumentation.py +36 -0
  144. rasa/tracing/instrumentation/metrics.py +41 -0
  145. rasa/tracing/metric_instrument_provider.py +40 -0
  146. rasa/utils/common.py +0 -1
  147. rasa/validator.py +561 -89
  148. rasa/version.py +1 -1
  149. {rasa_pro-3.12.0.dev12.dist-info → rasa_pro-3.12.0rc1.dist-info}/METADATA +2 -1
  150. {rasa_pro-3.12.0.dev12.dist-info → rasa_pro-3.12.0rc1.dist-info}/RECORD +153 -134
  151. {rasa_pro-3.12.0.dev12.dist-info → rasa_pro-3.12.0rc1.dist-info}/NOTICE +0 -0
  152. {rasa_pro-3.12.0.dev12.dist-info → rasa_pro-3.12.0rc1.dist-info}/WHEEL +0 -0
  153. {rasa_pro-3.12.0.dev12.dist-info → rasa_pro-3.12.0rc1.dist-info}/entry_points.txt +0 -0
@@ -1,14 +1,18 @@
1
1
  from abc import ABC, abstractmethod
2
2
  from functools import lru_cache
3
- from typing import Any, Dict, List, Optional, Text, Tuple, Union
3
+ from typing import Any, Dict, List, Optional, Set, Text, Tuple, Union
4
4
 
5
5
  import structlog
6
6
  from jinja2 import Template
7
7
 
8
+ import rasa.dialogue_understanding.generator.utils
8
9
  import rasa.shared.utils.io
9
10
  from rasa.dialogue_understanding.commands import (
10
11
  Command,
12
+ SetSlotCommand,
13
+ StartFlowCommand,
11
14
  )
15
+ from rasa.dialogue_understanding.constants import KEY_MINIMIZE_NUM_CALLS
12
16
  from rasa.dialogue_understanding.generator import CommandGenerator
13
17
  from rasa.dialogue_understanding.generator.constants import (
14
18
  DEFAULT_LLM_CONFIG,
@@ -18,13 +22,16 @@ from rasa.dialogue_understanding.generator.constants import (
18
22
  LLM_CONFIG_KEY,
19
23
  )
20
24
  from rasa.dialogue_understanding.generator.flow_retrieval import FlowRetrieval
25
+ from rasa.dialogue_understanding.stack.utils import top_flow_frame
21
26
  from rasa.engine.graph import ExecutionContext, GraphComponent
22
27
  from rasa.engine.recipes.default_recipe import DefaultV1Recipe
23
28
  from rasa.engine.storage.resource import Resource
24
29
  from rasa.engine.storage.storage import ModelStorage
30
+ from rasa.shared.core.constants import SetSlotExtractor
25
31
  from rasa.shared.core.domain import Domain
26
32
  from rasa.shared.core.flows import Flow, FlowsList, FlowStep
27
33
  from rasa.shared.core.flows.steps.collect import CollectInformationFlowStep
34
+ from rasa.shared.core.slot_mappings import SlotFillingManager
28
35
  from rasa.shared.core.trackers import DialogueStateTracker
29
36
  from rasa.shared.exceptions import FileIOException, ProviderClientAPIException
30
37
  from rasa.shared.nlu.constants import FLOWS_IN_PROMPT
@@ -304,11 +311,20 @@ class LLMBasedCommandGenerator(
304
311
  return filtered_flows
305
312
 
306
313
  @measure_llm_latency
307
- async def invoke_llm(self, prompt: Text) -> Optional[LLMResponse]:
314
+ async def invoke_llm(
315
+ self, prompt: Union[List[dict], List[str], str]
316
+ ) -> Optional[LLMResponse]:
308
317
  """Use LLM to generate a response.
309
318
 
310
319
  Args:
311
- prompt: The prompt to send to the LLM.
320
+ prompt: The prompt can be,
321
+ - a list of preformatted messages. Each message should be a dictionary
322
+ with the following keys:
323
+ - content: The message content.
324
+ - role: The role of the message (e.g. user or system).
325
+ - a list of messages. Each message is a string and will be formatted
326
+ as a user message.
327
+ - a single message as a string which will be formatted as user message.
312
328
 
313
329
  Returns:
314
330
  An LLMResponse object.
@@ -357,8 +373,7 @@ class LLMBasedCommandGenerator(
357
373
  "slots": slots_with_info,
358
374
  }
359
375
  )
360
-
361
- return sorted(result, key=lambda x: x["name"])
376
+ return result
362
377
 
363
378
  @staticmethod
364
379
  def is_extractable(
@@ -454,3 +469,153 @@ class LLMBasedCommandGenerator(
454
469
  if isinstance(current_step, CollectInformationFlowStep)
455
470
  else (None, None)
456
471
  )
472
+
473
+ @staticmethod
474
+ def _prior_commands_contain_start_flow(prior_commands: List[Command]) -> bool:
475
+ return any(isinstance(command, StartFlowCommand) for command in prior_commands)
476
+
477
+ @staticmethod
478
+ def _prior_commands_contain_set_slot_for_active_collect_step(
479
+ prior_commands: List[Command],
480
+ flows: FlowsList,
481
+ tracker: DialogueStateTracker,
482
+ ) -> bool:
483
+ latest_user_frame = top_flow_frame(tracker.stack, ignore_call_frames=False)
484
+
485
+ if latest_user_frame is None:
486
+ return False
487
+
488
+ active_flow = latest_user_frame.flow(flows)
489
+ active_step = active_flow.step_by_id(latest_user_frame.step_id)
490
+
491
+ if not isinstance(active_step, CollectInformationFlowStep):
492
+ return False
493
+
494
+ return any(
495
+ command.name == active_step.collect
496
+ for command in prior_commands
497
+ if isinstance(command, SetSlotCommand)
498
+ )
499
+
500
+ def _should_skip_llm_call(
501
+ self,
502
+ prior_commands: List[Command],
503
+ flows: FlowsList,
504
+ tracker: DialogueStateTracker,
505
+ ) -> bool:
506
+ """Skip invoking the LLM.
507
+
508
+ This returns True if the bot builder sets the property
509
+ KEY_MINIMIZE_NUM_CALLS to True and the prior commands
510
+ either contain a StartFlowCommand or a SetSlot command
511
+ for the current collect step.
512
+ """
513
+ return self.config.get(KEY_MINIMIZE_NUM_CALLS, False) and (
514
+ self._prior_commands_contain_start_flow(prior_commands)
515
+ or self._prior_commands_contain_set_slot_for_active_collect_step(
516
+ prior_commands, flows, tracker
517
+ )
518
+ )
519
+
520
+ @staticmethod
521
+ def _check_commands_against_slot_mappings(
522
+ commands: List[Command],
523
+ tracker: DialogueStateTracker,
524
+ domain: Optional[Domain] = None,
525
+ ) -> List[Command]:
526
+ """Check if the LLM-issued slot commands are fillable.
527
+
528
+ The LLM-issued slot commands are fillable if the slot
529
+ mappings are satisfied (in particular the mapping conditions).
530
+ """
531
+ if not domain:
532
+ return commands
533
+
534
+ llm_fillable_slots = [
535
+ tracker.slots.get(command.name)
536
+ for command in commands
537
+ if isinstance(command, SetSlotCommand)
538
+ and command.extractor == SetSlotExtractor.LLM.value
539
+ and tracker.slots.get(command.name) is not None
540
+ ]
541
+
542
+ if not llm_fillable_slots:
543
+ return commands
544
+
545
+ slot_filling_manager = SlotFillingManager(domain, tracker)
546
+ slots_to_be_removed = []
547
+
548
+ structlogger.debug(
549
+ "command_processor.check_commands_against_slot_mappings.active_flow",
550
+ active_flow=tracker.active_flow,
551
+ )
552
+
553
+ for slot in llm_fillable_slots:
554
+ should_fill_slot = False
555
+ for mapping in slot.mappings: # type: ignore[union-attr]
556
+ should_fill_slot = slot_filling_manager.should_fill_slot(
557
+ slot.name, # type: ignore[union-attr]
558
+ mapping,
559
+ )
560
+
561
+ if should_fill_slot:
562
+ break
563
+
564
+ if not should_fill_slot:
565
+ structlogger.debug(
566
+ "command_processor.check_commands_against_slot_mappings.slot_not_fillable",
567
+ slot_name=slot.name, # type: ignore[union-attr]
568
+ )
569
+ slots_to_be_removed.append(slot.name) # type: ignore[union-attr]
570
+
571
+ if not slots_to_be_removed:
572
+ return commands
573
+
574
+ filtered_commands = [
575
+ command
576
+ for command in commands
577
+ if not (
578
+ isinstance(command, SetSlotCommand)
579
+ and command.name in slots_to_be_removed
580
+ )
581
+ ]
582
+
583
+ return filtered_commands
584
+
585
+ def _check_start_flow_command_overlap(
586
+ self,
587
+ prior_commands: List[Command],
588
+ commands: List[Command],
589
+ prior_start_flow_names: Set[str],
590
+ current_start_flow_names: Set[str],
591
+ ) -> List[Command]:
592
+ """Prioritize the prior commands over the LLM-issued commands."""
593
+ different_flow_names = current_start_flow_names.difference(
594
+ prior_start_flow_names
595
+ )
596
+
597
+ if not different_flow_names:
598
+ return prior_commands + commands
599
+
600
+ # discard the flow names that are different to prior start flow commands
601
+ filtered_commands = [
602
+ command
603
+ for command in commands
604
+ if not isinstance(command, StartFlowCommand)
605
+ or command.flow not in different_flow_names
606
+ ]
607
+ return prior_commands + filtered_commands
608
+
609
+ def _filter_slot_commands(
610
+ self,
611
+ prior_commands: List[Command],
612
+ commands: List[Command],
613
+ overlapping_slot_names: Set[str],
614
+ ) -> Tuple[List[Command], List[Command]]:
615
+ """Prioritize prior commands over LLM ones in the case of same slot."""
616
+ filtered_commands = (
617
+ rasa.dialogue_understanding.generator.utils.filter_slot_commands(
618
+ commands, overlapping_slot_names
619
+ )
620
+ )
621
+ return prior_commands, filtered_commands
@@ -1,4 +1,4 @@
1
- from typing import Any, Dict, Optional, Text
1
+ from typing import Any, Dict, List, Optional, Text, Union
2
2
 
3
3
  import structlog
4
4
  from deprecated import deprecated # type: ignore[import]
@@ -25,7 +25,7 @@ structlogger = structlog.get_logger()
25
25
  @deprecated(
26
26
  reason=(
27
27
  "The LLMCommandGenerator is deprecated and will be removed in Rasa 4.0.0. "
28
- "Please use use SingleStepLLMCommandGenerator instead."
28
+ "Please use SingleStepLLMCommandGenerator instead."
29
29
  )
30
30
  )
31
31
  class LLMCommandGenerator(SingleStepLLMCommandGenerator):
@@ -54,7 +54,9 @@ class LLMCommandGenerator(SingleStepLLMCommandGenerator):
54
54
  **kwargs,
55
55
  )
56
56
 
57
- async def invoke_llm(self, prompt: Text) -> Optional[LLMResponse]:
57
+ async def invoke_llm(
58
+ self, prompt: Union[List[dict], List[str], str]
59
+ ) -> Optional[LLMResponse]:
58
60
  try:
59
61
  return await super().invoke_llm(prompt)
60
62
  except ProviderClientAPIException:
@@ -2,6 +2,7 @@ import importlib.resources
2
2
  from typing import Any, Dict, List, Optional, Text, Tuple, Union
3
3
 
4
4
  import structlog
5
+ from deprecated import deprecated # type: ignore[import]
5
6
  from jinja2 import Template
6
7
 
7
8
  import rasa.shared.utils.io
@@ -51,7 +52,7 @@ from rasa.shared.exceptions import ProviderClientAPIException
51
52
  from rasa.shared.nlu.constants import TEXT
52
53
  from rasa.shared.nlu.training_data.message import Message
53
54
  from rasa.shared.providers.llm.llm_response import LLMResponse
54
- from rasa.shared.utils.io import deep_container_fingerprint
55
+ from rasa.shared.utils.io import deep_container_fingerprint, raise_deprecation_warning
55
56
  from rasa.shared.utils.llm import (
56
57
  allowed_values_for_slot,
57
58
  get_prompt_template,
@@ -100,6 +101,12 @@ structlogger = structlog.get_logger()
100
101
  ],
101
102
  is_trainable=True,
102
103
  )
104
+ @deprecated(
105
+ reason=(
106
+ "The MultiStepLLMCommandGenerator is deprecated and will be removed in "
107
+ "Rasa `4.0.0`."
108
+ )
109
+ )
103
110
  class MultiStepLLMCommandGenerator(LLMBasedCommandGenerator):
104
111
  """An multi step command generator using LLM."""
105
112
 
@@ -111,6 +118,14 @@ class MultiStepLLMCommandGenerator(LLMBasedCommandGenerator):
111
118
  prompt_templates: Optional[Dict[Text, Optional[Text]]] = None,
112
119
  **kwargs: Any,
113
120
  ) -> None:
121
+ raise_deprecation_warning(
122
+ message=(
123
+ "Support for `MultiStepLLMCommandGenerator` will be removed in Rasa "
124
+ "`4.0.0`. Please modify your assistant's configuration to use other "
125
+ "LLM command generators like the `SingleStepLLMCommandGenerator`."
126
+ )
127
+ )
128
+
114
129
  super().__init__(
115
130
  config, model_storage, resource, prompt_templates=prompt_templates, **kwargs
116
131
  )
@@ -120,7 +135,6 @@ class MultiStepLLMCommandGenerator(LLMBasedCommandGenerator):
120
135
  FILL_SLOTS_KEY: None,
121
136
  }
122
137
  self._init_prompt_templates(prompt_templates)
123
-
124
138
  self.trace_prompt_tokens = self.config.get("trace_prompt_tokens", False)
125
139
 
126
140
  ### Implementations of LLMBasedCommandGenerator parent
@@ -190,9 +204,14 @@ class MultiStepLLMCommandGenerator(LLMBasedCommandGenerator):
190
204
  Returns:
191
205
  The commands generated by the llm.
192
206
  """
207
+ prior_commands = self._get_prior_commands(message)
208
+
193
209
  if tracker is None or flows.is_empty():
194
210
  # cannot do anything if there are no flows or no tracker
195
- return []
211
+ return prior_commands
212
+
213
+ if self._should_skip_llm_call(prior_commands, flows, tracker):
214
+ return prior_commands
196
215
 
197
216
  try:
198
217
  commands = await self._predict_commands_with_multi_step(
@@ -221,7 +240,10 @@ class MultiStepLLMCommandGenerator(LLMBasedCommandGenerator):
221
240
  commands=commands,
222
241
  )
223
242
 
224
- return commands
243
+ domain = kwargs.get("domain")
244
+ commands = self._check_commands_against_slot_mappings(commands, tracker, domain)
245
+
246
+ return self._check_commands_overlap(prior_commands, commands)
225
247
 
226
248
  @classmethod
227
249
  def parse_commands(
@@ -1,7 +1,8 @@
1
- from typing import Any, Dict, List, Optional, Text
1
+ from typing import Any, Dict, List, Optional, Set, Text, Tuple
2
2
 
3
3
  import structlog
4
4
 
5
+ import rasa.dialogue_understanding.generator.utils
5
6
  from rasa.dialogue_understanding.commands import (
6
7
  Command,
7
8
  SetSlotCommand,
@@ -98,9 +99,11 @@ class NLUCommandAdapter(GraphComponent, CommandGenerator):
98
99
  Returns:
99
100
  The commands triggered by NLU.
100
101
  """
102
+ prior_commands = self._get_prior_commands(message)
103
+
101
104
  if tracker is None or flows.is_empty():
102
105
  # cannot do anything if there are no flows or no tracker
103
- return []
106
+ return prior_commands
104
107
 
105
108
  domain = kwargs.get("domain", None)
106
109
  commands = self.convert_nlu_to_commands(message, tracker, flows, domain)
@@ -146,7 +149,7 @@ class NLUCommandAdapter(GraphComponent, CommandGenerator):
146
149
  commands=commands,
147
150
  )
148
151
 
149
- return commands
152
+ return self._check_commands_overlap(prior_commands, commands)
150
153
 
151
154
  @staticmethod
152
155
  def convert_nlu_to_commands(
@@ -208,6 +211,44 @@ class NLUCommandAdapter(GraphComponent, CommandGenerator):
208
211
  )
209
212
  return commands
210
213
 
214
+ def _check_start_flow_command_overlap(
215
+ self,
216
+ prior_commands: List[Command],
217
+ commands: List[Command],
218
+ prior_start_flow_names: Set[str],
219
+ current_start_flow_names: Set[str],
220
+ ) -> List[Command]:
221
+ """Prioritize the current NLU commands over the prior commands."""
222
+ different_flow_names = prior_start_flow_names.difference(
223
+ current_start_flow_names
224
+ )
225
+
226
+ if not different_flow_names:
227
+ return prior_commands + commands
228
+
229
+ filtered_commands = [
230
+ command
231
+ for command in prior_commands
232
+ if not isinstance(command, StartFlowCommand)
233
+ or command.flow not in different_flow_names
234
+ ]
235
+
236
+ return filtered_commands + commands
237
+
238
+ def _filter_slot_commands(
239
+ self,
240
+ prior_commands: List[Command],
241
+ commands: List[Command],
242
+ overlapping_slot_names: Set[str],
243
+ ) -> Tuple[List[Command], List[Command]]:
244
+ """Prioritize NLU commands over prior_commands in the case of same slot."""
245
+ filtered_prior_commands = (
246
+ rasa.dialogue_understanding.generator.utils.filter_slot_commands(
247
+ prior_commands, overlapping_slot_names
248
+ )
249
+ )
250
+ return filtered_prior_commands, commands
251
+
211
252
 
212
253
  def _issue_set_slot_commands(
213
254
  message: Message,
@@ -0,0 +1,60 @@
1
+ Your task is to analyze the current conversation context and generate a list of actions to start new business processes that we call flows, to extract slots, or respond to small talk and knowledge requests.
2
+
3
+ These are the flows that can be started, with their description and slots:
4
+ {% for flow in available_flows %}
5
+ {{ flow.name }}: {{ flow.description }}
6
+ {% for slot in flow.slots -%}
7
+ slot: {{ slot.name }}{% if slot.description %} ({{ slot.description }}){% endif %}{% if slot.allowed_values %}, allowed values: {{ slot.allowed_values }}{% endif %}
8
+ {% endfor %}
9
+ {%- endfor %}
10
+
11
+ ===
12
+ Here is what happened previously in the conversation:
13
+ {{ current_conversation }}
14
+
15
+ ===
16
+ {% if current_flow != None %}
17
+ You are currently in the flow "{{ current_flow }}".
18
+ You have just asked the user for the slot "{{ current_slot }}"{% if current_slot_description %} ({{ current_slot_description }}){% endif %}.
19
+
20
+ {% if flow_slots|length > 0 %}
21
+ Here are the slots of the currently active flow:
22
+ {% for slot in flow_slots -%}
23
+ - name: {{ slot.name }}, value: {{ slot.value }}, type: {{ slot.type }}, description: {{ slot.description}}{% if slot.allowed_values %}, allowed values: {{ slot.allowed_values }}{% endif %}
24
+ {% endfor %}
25
+ {% endif %}
26
+ {% else %}
27
+ You are currently not in any flow and so there are no active slots.
28
+ This means you can only set a slot if you first start a flow that requires that slot.
29
+ {% endif %}
30
+ If you start a flow, first start the flow and then optionally fill that flow's slots with information the user provided in their message.
31
+
32
+ The user just said """{{ user_message }}""".
33
+
34
+ ===
35
+ Based on this information generate a list of actions you want to take. Your job is to start flows and to fill slots where appropriate. Any logic of what happens afterwards is handled by the flow engine. These are your available actions:
36
+ * Slot setting, described by "SetSlot(slot_name, slot_value)". An example would be "SetSlot(recipient, Freddy)"
37
+ * Starting another flow, described by "StartFlow(flow_name)". An example would be "StartFlow(transfer_money)"
38
+ * Cancelling the current flow, described by "CancelFlow()"
39
+ * Clarifying which flow should be started. An example would be Clarify(list_contacts, add_contact, remove_contact) if the user just wrote "contacts" and there are multiple potential candidates. It also works with a single flow name to confirm you understood correctly, as in Clarify(transfer_money).
40
+ * Intercepting and handle user messages with the intent to bypass the current step in the flow, described by "SkipQuestion()". Examples of user skip phrases are: "Go to the next question", "Ask me something else".
41
+ * Responding to knowledge-oriented user messages, described by "SearchAndReply()"
42
+ * Responding to a casual, non-task-oriented user message, described by "ChitChat()".
43
+ * Handing off to a human, in case the user seems frustrated or explicitly asks to speak to one, described by "HumanHandoff()".
44
+ {% if is_repeat_command_enabled %}
45
+ * Repeat the last bot messages, described by "RepeatLastBotMessages()". This is useful when the user asks to repeat the last bot messages.
46
+ {% endif %}
47
+
48
+ ===
49
+ Write out the actions you want to take, one per line, in the order they should take place.
50
+ Do not fill slots with abstract values or placeholders.
51
+ Only use information provided by the user.
52
+ Only start a flow if it's completely clear what the user wants. Imagine you were a person reading this message. If it's not 100% clear, clarify the next step.
53
+ Don't be overly confident. Take a conservative approach and clarify before proceeding.
54
+ If the user asks for two things which seem contradictory, clarify before starting a flow.
55
+ If it's not clear whether the user wants to skip the step or to cancel the flow, cancel the flow.
56
+ Strictly adhere to the provided action types listed above.
57
+ Focus on the last message and take it one step at a time.
58
+ Use the previous conversation steps only to aid understanding.
59
+
60
+ Your action list:
@@ -0,0 +1,77 @@
1
+ Your task is to analyze the current conversation context and generate a list of actions to start new business processes that we call flows, to extract slots, or respond to small talk and knowledge requests.
2
+
3
+ ## Available Actions:
4
+ * `start flow flow_name`: Starting a flow. For example, `start flow transfer_money` or `start flow list_contacts`
5
+ * `set slot slot_name slot_value`: Slot setting. For example, `set slot transfer_money_recipient Freddy`. Can be used to correct and change previously set values
6
+ * `cancel flow`: Cancelling the current flow
7
+ * `disambiguate flows flow_name1 flow_name2 ... flow_name_n`: Disambiguate which flow should be started when user input is ambiguous by listing the potential flows as options. For example, `disambiguate flows list_contacts add_contact remove_contact ...` if the user just wrote "contacts".
8
+ * `provide info`: Responding to the user's questions by supplying relevant information, such as answering FAQs or explaining services
9
+ * `offtopic reply`: Responding to casual or social user messages that are unrelated to any flows, engaging in friendly conversation and addressing off-topic remarks.
10
+ * `hand over`: Handing over to a human, in case the user seems frustrated or explicitly asks to speak to one
11
+
12
+
13
+ ## General Tips
14
+ * Do not fill slots with abstract values or placeholders.
15
+ * Only use information provided by the user.
16
+ * Use clarification in ambiguous cases.
17
+ * Multiple flows can be started. If a user wants to digress into a second flow, you do not need to cancel the current flow.
18
+ * Strictly adhere to the provided action format.
19
+ * For categorical slots try to match the user message with potential slot values. Use "other" if you cannot match it
20
+ * Focus on the last message and take it one step at a time.
21
+ * Use the previous conversation steps only to aid understanding.
22
+
23
+
24
+ ## Available Flows:
25
+ Use the following structured date:
26
+ ```xml
27
+ <flows>
28
+ {% for flow in available_flows %}<flow>
29
+ <name>{{ flow.name }}</name>
30
+ <description>{{ flow.description }}</description>
31
+ <slots>{% for slot in flow.slots %}
32
+ <slot>
33
+ <name>{{ slot.name }}</name>
34
+ <description>{{ slot.description }}</description>
35
+ <allowed_values>{{ slot.allowed_values }}</allowed_values>
36
+ </slot>{% endfor %}
37
+ </slots>
38
+ </flow>
39
+ {% endfor %}
40
+ </flows>
41
+ ```
42
+
43
+ ## Current State
44
+ {% if current_flow != None %}
45
+ Use the following structured date:
46
+ ```xml
47
+ <current_state>
48
+ <active_flow>{{ current_flow }}</active_flow>
49
+ <current_step>
50
+ <requested_slot>{{ current_slot }}</requested_slot>
51
+ <requested_slot_description>{{ current_slot_description }}</requested_slot_description>
52
+ </current_step>
53
+ <slots>
54
+ {% for slot in flow_slots %}<slot>
55
+ <name>{{ slot.name }}</name>
56
+ <value>{{ slot.value }}</value>
57
+ <type>{{ slot.type }}</type>
58
+ <description>{{ slot.description }}</description>{% if slot.allowed_values %}
59
+ <allowed_values>{{ slot.allowed_values }}</allowed_values>{% endif %}
60
+ </slot>
61
+ {% endfor %}
62
+ </slots>
63
+ </current_state>
64
+ ```
65
+ {% else %}
66
+ You are currently not inside any flow.
67
+ {% endif %}
68
+
69
+
70
+ ## Conversation History
71
+ {{ current_conversation }}
72
+
73
+
74
+ ## Task
75
+ Create an action list with one action per line in response to the users last message: """{{ user_message }}""".
76
+
77
+ Your action list:
@@ -0,0 +1,68 @@
1
+ ## Task Description
2
+ Your task is to analyze the current conversation context and generate a list of actions to start new business processes that we call flows, to extract slots, or respond to small talk and knowledge requests.
3
+
4
+ --
5
+
6
+ ## Available Actions:
7
+ * `start flow flow_name`: Starting a flow. For example, `start flow transfer_money` or `start flow list_contacts`
8
+ * `set slot slot_name slot_value`: Slot setting. For example, `set slot transfer_money_recipient Freddy`. Can be used to correct and change previously set values
9
+ * `cancel flow`: Cancelling the current flow
10
+ * `disambiguate flows flow_name1 flow_name2 ... flow_name_n`: Disambiguate which flow should be started when user input is ambiguous by listing the potential flows as options. For example, `disambiguate flows list_contacts add_contact remove_contact ...` if the user just wrote "contacts".
11
+ * `provide info`: Responding to the user's questions by supplying relevant information, such as answering FAQs or explaining services
12
+ * `offtopic reply`: Responding to casual or social user messages that are unrelated to any flows, engaging in friendly conversation and addressing off-topic remarks.
13
+ * `hand over`: Handing over to a human, in case the user seems frustrated or explicitly asks to speak to one
14
+
15
+ --
16
+
17
+ ## General Tips
18
+ * Do not fill slots with abstract values or placeholders.
19
+ * Only use information provided by the user.
20
+ * Use clarification in ambiguous cases.
21
+ * Multiple flows can be started. If a user wants to digress into a second flow, you do not need to cancel the current flow.
22
+ * Strictly adhere to the provided action format.
23
+ * For categorical slots try to match the user message with potential slot values. Use "other" if you cannot match it
24
+ * Focus on the last message and take it one step at a time.
25
+ * Use the previous conversation steps only to aid understanding.
26
+
27
+ --
28
+
29
+ ## Available Flows and Slots
30
+ {% for flow in available_flows %}
31
+ * `{{ flow.name }}`: {{ flow.description }}
32
+ {% for slot in flow.slots -%}
33
+ * `{{ slot.name }}`{% if slot.description %} ({{ slot.description }}){% endif %}{% if slot.allowed_values %}, allowed values: `{{ slot.allowed_values }}`{% endif %}
34
+ {% endfor %}
35
+ {%- endfor %}
36
+
37
+ --
38
+
39
+ ## Current State
40
+ {% if current_flow != None %}
41
+ You are currently in the flow `{{ current_flow }}`.
42
+ You have just asked the user for the slot `{{ current_slot }}`{% if current_slot_description %} ({{ current_slot_description }}){% endif %}.
43
+
44
+ {% if flow_slots|length > 0 %}
45
+ Here are the slots of the flow `{{ current_flow }}`:
46
+ {% for slot in flow_slots -%}
47
+ * `{{ slot.name }}`
48
+ - value: "{{ slot.value }}"
49
+ - type: {{ slot.type }}
50
+ - description: {{ slot.description}}
51
+ {% if slot.allowed_values %} - allowed values: {{ slot.allowed_values }}{% endif %}
52
+ {% endfor %}
53
+ {% endif %}
54
+ {% else %}
55
+ You are currently not inside any flow.
56
+ {% endif %}
57
+
58
+ ---
59
+
60
+ ## Conversation History
61
+ {{ current_conversation }}
62
+
63
+ ---
64
+
65
+ ## Task
66
+ Create an action list with one action per line in response to the users last message: """{{ user_message }}""".
67
+
68
+ Your action list:
@@ -7,7 +7,7 @@ Your task is to analyze the current conversation context and generate a list of
7
7
  * `start flow flow_name`: Starting a flow. For example, `start flow transfer_money` or `start flow list_contacts`
8
8
  * `set slot slot_name slot_value`: Slot setting. For example, `set slot transfer_money_recipient Freddy`. Can be used to correct and change previously set values
9
9
  * `cancel flow`: Cancelling the current flow
10
- * `disambiguate flows flow_name1 flow_name2 ... flow_name_n`: Disambiguate which flow should be started when user input is ambiguous by listing the potential flows as options. For example, `clarify flows list_contacts add_contact remove_contact ...` if the user just wrote "contacts".
10
+ * `disambiguate flows flow_name1 flow_name2 ... flow_name_n`: Disambiguate which flow should be started when user input is ambiguous by listing the potential flows as options. For example, `disambiguate flows list_contacts add_contact remove_contact ...` if the user just wrote "contacts".
11
11
  * `provide info`: Responding to the user's questions by supplying relevant information, such as answering FAQs or explaining services
12
12
  * `offtopic reply`: Responding to casual or social user messages that are unrelated to any flows, engaging in friendly conversation and addressing off-topic remarks.
13
13
  * `hand over`: Handing over to a human, in case the user seems frustrated or explicitly asks to speak to one