rasa-pro 3.11.3a1.dev6__py3-none-any.whl → 3.11.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of rasa-pro might be problematic. Click here for more details.

Files changed (103) hide show
  1. rasa/core/actions/action.py +7 -18
  2. rasa/core/actions/constants.py +0 -8
  3. rasa/core/actions/direct_custom_actions_executor.py +0 -1
  4. rasa/core/channels/development_inspector.py +0 -3
  5. rasa/core/channels/inspector/dist/assets/{arc-861ddd57.js → arc-632a63ec.js} +1 -1
  6. rasa/core/channels/inspector/dist/assets/{c4Diagram-d0fbc5ce-921f02db.js → c4Diagram-d0fbc5ce-081e0df4.js} +1 -1
  7. rasa/core/channels/inspector/dist/assets/{classDiagram-936ed81e-b436c4f8.js → classDiagram-936ed81e-3df0afc2.js} +1 -1
  8. rasa/core/channels/inspector/dist/assets/{classDiagram-v2-c3cb15f1-511a23cb.js → classDiagram-v2-c3cb15f1-8c5ed31e.js} +1 -1
  9. rasa/core/channels/inspector/dist/assets/{createText-62fc7601-ef476ecd.js → createText-62fc7601-89c73b31.js} +1 -1
  10. rasa/core/channels/inspector/dist/assets/{edges-f2ad444c-f1878e0a.js → edges-f2ad444c-4fc48c3e.js} +1 -1
  11. rasa/core/channels/inspector/dist/assets/{erDiagram-9d236eb7-fac75185.js → erDiagram-9d236eb7-907e0440.js} +1 -1
  12. rasa/core/channels/inspector/dist/assets/{flowDb-1972c806-201c5bbc.js → flowDb-1972c806-9ec53a3c.js} +1 -1
  13. rasa/core/channels/inspector/dist/assets/{flowDiagram-7ea5b25a-f904ae41.js → flowDiagram-7ea5b25a-41da787a.js} +1 -1
  14. rasa/core/channels/inspector/dist/assets/flowDiagram-v2-855bc5b3-8bea338b.js +1 -0
  15. rasa/core/channels/inspector/dist/assets/{flowchart-elk-definition-abe16c3d-1813da66.js → flowchart-elk-definition-abe16c3d-ce370633.js} +1 -1
  16. rasa/core/channels/inspector/dist/assets/{ganttDiagram-9b5ea136-872af172.js → ganttDiagram-9b5ea136-90a36523.js} +1 -1
  17. rasa/core/channels/inspector/dist/assets/{gitGraphDiagram-99d0ae7c-34a0af5a.js → gitGraphDiagram-99d0ae7c-41e1aa3f.js} +1 -1
  18. rasa/core/channels/inspector/dist/assets/{index-2c4b9a3b-42ba3e3d.js → index-2c4b9a3b-e6f2af62.js} +1 -1
  19. rasa/core/channels/inspector/dist/assets/{index-37817b51.js → index-e793d777.js} +3 -3
  20. rasa/core/channels/inspector/dist/assets/{infoDiagram-736b4530-6b731386.js → infoDiagram-736b4530-8ceba4db.js} +1 -1
  21. rasa/core/channels/inspector/dist/assets/{journeyDiagram-df861f2b-e8579ac6.js → journeyDiagram-df861f2b-960d3809.js} +1 -1
  22. rasa/core/channels/inspector/dist/assets/{layout-89e6403a.js → layout-498807d8.js} +1 -1
  23. rasa/core/channels/inspector/dist/assets/{line-dc73d3fc.js → line-eeccc4e2.js} +1 -1
  24. rasa/core/channels/inspector/dist/assets/{linear-f5b1d2bc.js → linear-8a078617.js} +1 -1
  25. rasa/core/channels/inspector/dist/assets/{mindmap-definition-beec6740-82cb74fa.js → mindmap-definition-beec6740-396d17dd.js} +1 -1
  26. rasa/core/channels/inspector/dist/assets/{pieDiagram-dbbf0591-bdf5f29b.js → pieDiagram-dbbf0591-dc9b5e1b.js} +1 -1
  27. rasa/core/channels/inspector/dist/assets/{quadrantDiagram-4d7f4fd6-c7a0cbe4.js → quadrantDiagram-4d7f4fd6-a08cba6d.js} +1 -1
  28. rasa/core/channels/inspector/dist/assets/{requirementDiagram-6fc4c22a-7ec5410f.js → requirementDiagram-6fc4c22a-87242b9e.js} +1 -1
  29. rasa/core/channels/inspector/dist/assets/{sankeyDiagram-8f13d901-caee5554.js → sankeyDiagram-8f13d901-53f6f391.js} +1 -1
  30. rasa/core/channels/inspector/dist/assets/{sequenceDiagram-b655622a-2935f8db.js → sequenceDiagram-b655622a-715c9c20.js} +1 -1
  31. rasa/core/channels/inspector/dist/assets/{stateDiagram-59f0c015-8f5d9693.js → stateDiagram-59f0c015-2e8fb31f.js} +1 -1
  32. rasa/core/channels/inspector/dist/assets/{stateDiagram-v2-2b26beab-d565d1de.js → stateDiagram-v2-2b26beab-7e2d2aa0.js} +1 -1
  33. rasa/core/channels/inspector/dist/assets/{styles-080da4f6-75ad421d.js → styles-080da4f6-4420cea6.js} +1 -1
  34. rasa/core/channels/inspector/dist/assets/{styles-3dcbcfbf-7e764226.js → styles-3dcbcfbf-28676cf4.js} +1 -1
  35. rasa/core/channels/inspector/dist/assets/{styles-9c745c82-7a4e0e61.js → styles-9c745c82-cef936a6.js} +1 -1
  36. rasa/core/channels/inspector/dist/assets/{svgDrawCommon-4835440b-4019d1bf.js → svgDrawCommon-4835440b-151251e9.js} +1 -1
  37. rasa/core/channels/inspector/dist/assets/{timeline-definition-5b62e21b-01ea12df.js → timeline-definition-5b62e21b-0d39bdb2.js} +1 -1
  38. rasa/core/channels/inspector/dist/assets/{xychartDiagram-2b33534f-89407137.js → xychartDiagram-2b33534f-a03fa445.js} +1 -1
  39. rasa/core/channels/inspector/dist/index.html +3 -1
  40. rasa/core/channels/inspector/index.html +2 -0
  41. rasa/core/channels/inspector/src/App.tsx +1 -4
  42. rasa/core/channels/socketio.py +0 -40
  43. rasa/core/nlg/contextual_response_rephraser.py +9 -69
  44. rasa/core/policies/enterprise_search_policy.py +12 -82
  45. rasa/core/policies/flows/flow_executor.py +2 -26
  46. rasa/dialogue_understanding/generator/command_generator.py +5 -136
  47. rasa/dialogue_understanding/generator/llm_based_command_generator.py +5 -5
  48. rasa/dialogue_understanding/generator/llm_command_generator.py +1 -2
  49. rasa/dialogue_understanding/generator/multi_step/multi_step_llm_command_generator.py +4 -50
  50. rasa/dialogue_understanding/generator/nlu_command_adapter.py +0 -3
  51. rasa/dialogue_understanding/generator/single_step/single_step_llm_command_generator.py +3 -16
  52. rasa/dialogue_understanding/patterns/continue_interrupted.py +9 -0
  53. rasa/dialogue_understanding/stack/utils.py +1 -0
  54. rasa/e2e_test/e2e_test_case.py +1 -2
  55. rasa/e2e_test/utils/e2e_yaml_utils.py +1 -1
  56. rasa/e2e_test/utils/io.py +1 -1
  57. rasa/e2e_test/utils/validation.py +2 -100
  58. rasa/engine/recipes/default_recipe.py +49 -63
  59. rasa/engine/recipes/graph_recipe.py +7 -8
  60. rasa/model_manager/runner_service.py +0 -1
  61. rasa/model_manager/socket_bridge.py +2 -8
  62. rasa/model_manager/warm_rasa_process.py +4 -9
  63. rasa/model_training.py +1 -2
  64. rasa/nlu/classifiers/fallback_classifier.py +0 -3
  65. rasa/server.py +37 -1
  66. rasa/shared/constants.py +6 -17
  67. rasa/shared/core/events.py +8 -8
  68. rasa/shared/core/flows/flow.py +4 -4
  69. rasa/shared/core/flows/flow_step.py +15 -10
  70. rasa/shared/core/flows/flow_step_links.py +20 -12
  71. rasa/shared/core/flows/flow_step_sequence.py +5 -3
  72. rasa/shared/core/flows/steps/action.py +3 -2
  73. rasa/shared/core/flows/steps/call.py +3 -3
  74. rasa/shared/core/flows/steps/collect.py +6 -3
  75. rasa/shared/core/flows/steps/continuation.py +3 -1
  76. rasa/shared/core/flows/steps/end.py +3 -1
  77. rasa/shared/core/flows/steps/internal.py +2 -1
  78. rasa/shared/core/flows/steps/link.py +5 -3
  79. rasa/shared/core/flows/steps/no_operation.py +5 -3
  80. rasa/shared/core/flows/steps/set_slots.py +3 -2
  81. rasa/shared/core/flows/steps/start.py +3 -1
  82. rasa/shared/nlu/constants.py +0 -5
  83. rasa/shared/providers/llm/llm_response.py +1 -42
  84. rasa/shared/utils/llm.py +1 -1
  85. rasa/shared/utils/schemas/events.py +1 -1
  86. rasa/shared/utils/yaml.py +5 -6
  87. rasa/studio/upload.py +5 -19
  88. rasa/telemetry.py +33 -40
  89. rasa/tracing/instrumentation/attribute_extractors.py +9 -12
  90. rasa/validator.py +41 -32
  91. rasa/version.py +1 -1
  92. {rasa_pro-3.11.3a1.dev6.dist-info → rasa_pro-3.11.4.dist-info}/METADATA +7 -7
  93. {rasa_pro-3.11.3a1.dev6.dist-info → rasa_pro-3.11.4.dist-info}/RECORD +96 -102
  94. rasa/core/channels/inspector/dist/assets/flowDiagram-v2-855bc5b3-b080d6f2.js +0 -1
  95. rasa/dialogue_understanding/constants.py +0 -1
  96. rasa/dialogue_understanding/utils.py +0 -21
  97. rasa/dialogue_understanding_test/__init__.py +0 -0
  98. rasa/dialogue_understanding_test/constants.py +0 -15
  99. rasa/dialogue_understanding_test/du_test_case.py +0 -118
  100. rasa/dialogue_understanding_test/du_test_result.py +0 -11
  101. {rasa_pro-3.11.3a1.dev6.dist-info → rasa_pro-3.11.4.dist-info}/NOTICE +0 -0
  102. {rasa_pro-3.11.3a1.dev6.dist-info → rasa_pro-3.11.4.dist-info}/WHEEL +0 -0
  103. {rasa_pro-3.11.3a1.dev6.dist-info → rasa_pro-3.11.4.dist-info}/entry_points.txt +0 -0
@@ -10,27 +10,18 @@ from rasa.dialogue_understanding.commands import (
10
10
  ErrorCommand,
11
11
  )
12
12
  from rasa.dialogue_understanding.commands.set_slot_command import SetSlotExtractor
13
- from rasa.shared.constants import (
14
- RASA_PATTERN_INTERNAL_ERROR_USER_INPUT_TOO_LONG,
15
- RASA_PATTERN_INTERNAL_ERROR_USER_INPUT_EMPTY,
16
- )
17
13
  from rasa.shared.core.constants import SlotMappingType
18
14
  from rasa.shared.core.domain import Domain
19
15
  from rasa.shared.core.flows import FlowsList
20
16
  from rasa.shared.core.slot_mappings import SlotFillingManager
21
17
  from rasa.shared.core.trackers import DialogueStateTracker
22
- from rasa.shared.nlu.constants import (
23
- COMMANDS,
24
- TEXT,
25
- PREDICTED_COMMANDS,
26
- PROMPTS,
27
- KEY_USER_PROMPT,
28
- KEY_SYSTEM_PROMPT,
29
- KEY_LLM_RESPONSE_METADATA,
30
- )
31
18
  from rasa.shared.nlu.training_data.message import Message
32
- from rasa.shared.providers.llm.llm_response import LLMResponse
19
+ from rasa.shared.nlu.constants import COMMANDS, TEXT
33
20
  from rasa.shared.utils.llm import DEFAULT_MAX_USER_INPUT_CHARACTERS
21
+ from rasa.shared.constants import (
22
+ RASA_PATTERN_INTERNAL_ERROR_USER_INPUT_TOO_LONG,
23
+ RASA_PATTERN_INTERNAL_ERROR_USER_INPUT_EMPTY,
24
+ )
34
25
 
35
26
  structlogger = structlog.get_logger()
36
27
 
@@ -202,7 +193,6 @@ class CommandGenerator:
202
193
  flows: The flows to use for command prediction.
203
194
  tracker: The tracker containing the conversation history up to now.
204
195
  **kwargs: Keyword arguments for forward compatibility.
205
-
206
196
  Returns:
207
197
  The predicted commands.
208
198
  """
@@ -351,124 +341,3 @@ class CommandGenerator:
351
341
  ]
352
342
 
353
343
  return filtered_commands
354
-
355
- @staticmethod
356
- def _add_commands_to_message_parse_data(
357
- message: Message, component_name: str, commands: List[Command]
358
- ) -> None:
359
- """Add commands to the message parse data.
360
-
361
- Commands are only added in case the flag 'record_commands_and_prompts' is set.
362
- Example of predicted commands in the message parse data:
363
- Message(data={
364
- PREDICTED_COMMANDS: {
365
- "MultiStepLLMCommandGenerator": [
366
- {"command": "set_slot", "name": "slot_name", "value": "slot_value"},
367
- ],
368
- "NLUCommandAdapter": [
369
- {"command": "start_flow", "name": "test_flow"},
370
- ]
371
- }
372
- })
373
- """
374
- from rasa.dialogue_understanding.utils import record_commands_and_prompts
375
-
376
- # only set commands if the flag "record_commands_and_prompts" is set to True
377
- if not record_commands_and_prompts:
378
- return
379
-
380
- commands_as_dict = [command.as_dict() for command in commands]
381
-
382
- if message.get(PREDICTED_COMMANDS) is not None:
383
- predicted_commands = message.get(PREDICTED_COMMANDS)
384
- if component_name in predicted_commands:
385
- predicted_commands[component_name].extend(commands_as_dict)
386
- else:
387
- predicted_commands[component_name] = commands_as_dict
388
- else:
389
- predicted_commands = {component_name: commands_as_dict}
390
-
391
- message.set(
392
- PREDICTED_COMMANDS,
393
- predicted_commands,
394
- add_to_output=True,
395
- )
396
-
397
- @staticmethod
398
- def _add_prompt_to_message_parse_data(
399
- message: Message,
400
- component_name: str,
401
- prompt_name: str,
402
- user_prompt: str,
403
- system_prompt: Optional[str] = None,
404
- llm_response: Optional[LLMResponse] = None,
405
- ) -> None:
406
- """Add prompt to the message parse data.
407
-
408
- Prompt is only added in case the flag 'record_commands_and_prompts' is set.
409
- Example of prompts in the message parse data:
410
- Message(data={
411
- PROMPTS: {
412
- "MultiStepLLMCommandGenerator": [
413
- (
414
- "fill_slots_prompt",
415
- {
416
- "user_prompt": <prompt content>",
417
- "system_prompt": <prompt content>",
418
- "llm_response_metadata": <metadata dict from LLMResponse>
419
- }
420
- ),
421
- (
422
- "handle_flows_prompt",
423
- {
424
- "user_prompt": <prompt content>",
425
- "system_prompt": <prompt content>",
426
- "llm_response_metadata": <metadata dict from LLMResponse>
427
- }
428
- ),
429
- ],
430
- "SingleStepLLMCommandGenerator": [
431
- (
432
- "prompt_template",
433
- {
434
- "user_prompt": <prompt content>",
435
- "system_prompt": <prompt content>",
436
- "llm_response_metadata": <metadata dict from LLMResponse>
437
- }
438
- ),
439
- ]
440
- }
441
- })
442
- """
443
- from rasa.dialogue_understanding.utils import record_commands_and_prompts
444
-
445
- # only set prompt if the flag "record_commands_and_prompts" is set to True
446
- if not record_commands_and_prompts:
447
- return
448
-
449
- prompt_data: Dict[Text, Any] = {
450
- KEY_USER_PROMPT: user_prompt,
451
- **({KEY_SYSTEM_PROMPT: system_prompt} if system_prompt else {}),
452
- }
453
-
454
- if llm_response is not None:
455
- prompt_data[KEY_LLM_RESPONSE_METADATA] = llm_response.to_dict()
456
- else:
457
- prompt_data[KEY_LLM_RESPONSE_METADATA] = None
458
-
459
- prompt_tuple = (prompt_name, prompt_data)
460
-
461
- if message.get(PROMPTS) is not None:
462
- prompts = message.get(PROMPTS)
463
- if component_name in prompts:
464
- prompts[component_name].append(prompt_tuple)
465
- else:
466
- prompts[component_name] = [prompt_tuple]
467
- else:
468
- prompts = {component_name: [prompt_tuple]}
469
-
470
- message.set(
471
- PROMPTS,
472
- prompts,
473
- add_to_output=True,
474
- )
@@ -32,7 +32,6 @@ from rasa.shared.exceptions import ProviderClientAPIException
32
32
  from rasa.shared.nlu.constants import FLOWS_IN_PROMPT
33
33
  from rasa.shared.nlu.training_data.message import Message
34
34
  from rasa.shared.nlu.training_data.training_data import TrainingData
35
- from rasa.shared.providers.llm.llm_response import LLMResponse
36
35
  from rasa.shared.utils.health_check.llm_health_check_mixin import LLMHealthCheckMixin
37
36
  from rasa.shared.utils.llm import (
38
37
  allowed_values_for_slot,
@@ -305,21 +304,22 @@ class LLMBasedCommandGenerator(
305
304
  )
306
305
  return filtered_flows
307
306
 
308
- async def invoke_llm(self, prompt: Text) -> Optional[LLMResponse]:
307
+ async def invoke_llm(self, prompt: Text) -> Optional[Text]:
309
308
  """Use LLM to generate a response.
310
309
 
311
310
  Args:
312
311
  prompt: The prompt to send to the LLM.
313
312
 
314
313
  Returns:
315
- An LLMResponse object.
314
+ The generated text.
316
315
 
317
316
  Raises:
318
- ProviderClientAPIException: If an error occurs during the LLM API call.
317
+ ProviderClientAPIException if an error during API call.
319
318
  """
320
319
  llm = llm_factory(self.config.get(LLM_CONFIG_KEY), DEFAULT_LLM_CONFIG)
321
320
  try:
322
- return await llm.acompletion(prompt)
321
+ llm_response = await llm.acompletion(prompt)
322
+ return llm_response.choices[0]
323
323
  except Exception as e:
324
324
  # unfortunately, langchain does not wrap LLM exceptions which means
325
325
  # we have to catch all exceptions here
@@ -10,7 +10,6 @@ from rasa.engine.recipes.default_recipe import DefaultV1Recipe
10
10
  from rasa.engine.storage.resource import Resource
11
11
  from rasa.engine.storage.storage import ModelStorage
12
12
  from rasa.shared.exceptions import ProviderClientAPIException
13
- from rasa.shared.providers.llm.llm_response import LLMResponse
14
13
  from rasa.shared.utils.io import raise_deprecation_warning
15
14
 
16
15
  structlogger = structlog.get_logger()
@@ -54,7 +53,7 @@ class LLMCommandGenerator(SingleStepLLMCommandGenerator):
54
53
  **kwargs,
55
54
  )
56
55
 
57
- async def invoke_llm(self, prompt: Text) -> Optional[LLMResponse]:
56
+ async def invoke_llm(self, prompt: Text) -> Optional[Text]:
58
57
  try:
59
58
  return await super().invoke_llm(prompt)
60
59
  except ProviderClientAPIException:
@@ -51,7 +51,6 @@ from rasa.shared.core.trackers import DialogueStateTracker
51
51
  from rasa.shared.exceptions import ProviderClientAPIException
52
52
  from rasa.shared.nlu.constants import TEXT
53
53
  from rasa.shared.nlu.training_data.message import Message
54
- from rasa.shared.providers.llm.llm_response import LLMResponse
55
54
  from rasa.shared.utils.io import deep_container_fingerprint
56
55
  from rasa.shared.utils.llm import (
57
56
  get_prompt_template,
@@ -145,6 +144,7 @@ class MultiStepLLMCommandGenerator(LLMBasedCommandGenerator):
145
144
  **kwargs: Any,
146
145
  ) -> "MultiStepLLMCommandGenerator":
147
146
  """Loads trained component (see parent class for full docstring)."""
147
+
148
148
  # Perform health check of the LLM client config
149
149
  llm_config = resolve_model_client_config(config.get(LLM_CONFIG_KEY, {}))
150
150
  cls.perform_llm_health_check(
@@ -200,9 +200,6 @@ class MultiStepLLMCommandGenerator(LLMBasedCommandGenerator):
200
200
  message, flows, tracker
201
201
  )
202
202
  commands = self._clean_up_commands(commands)
203
- self._add_commands_to_message_parse_data(
204
- message, MultiStepLLMCommandGenerator.__name__, commands
205
- )
206
203
  except ProviderClientAPIException:
207
204
  # if any step resulted in API exception, the command prediction cannot
208
205
  # be completed, "predict" the ErrorCommand
@@ -536,12 +533,7 @@ class MultiStepLLMCommandGenerator(LLMBasedCommandGenerator):
536
533
  prompt=prompt,
537
534
  )
538
535
 
539
- response = await self.invoke_llm(prompt)
540
- llm_response = LLMResponse.ensure_llm_response(response)
541
- actions = None
542
- if llm_response and llm_response.choices:
543
- actions = llm_response.choices[0]
544
-
536
+ actions = await self.invoke_llm(prompt)
545
537
  structlogger.debug(
546
538
  "multi_step_llm_command_generator"
547
539
  ".predict_commands_for_active_flow"
@@ -550,16 +542,6 @@ class MultiStepLLMCommandGenerator(LLMBasedCommandGenerator):
550
542
  )
551
543
 
552
544
  commands = self.parse_commands(actions, tracker, available_flows)
553
-
554
- if commands:
555
- self._add_prompt_to_message_parse_data(
556
- message=message,
557
- component_name=MultiStepLLMCommandGenerator.__name__,
558
- prompt_name="fill_slots_for_active_flow_prompt",
559
- user_prompt=prompt,
560
- llm_response=llm_response,
561
- )
562
-
563
545
  return commands
564
546
 
565
547
  async def _predict_commands_for_handling_flows(
@@ -591,12 +573,7 @@ class MultiStepLLMCommandGenerator(LLMBasedCommandGenerator):
591
573
  prompt=prompt,
592
574
  )
593
575
 
594
- response = await self.invoke_llm(prompt)
595
- llm_response = LLMResponse.ensure_llm_response(response)
596
- actions = None
597
- if llm_response and llm_response.choices:
598
- actions = llm_response.choices[0]
599
-
576
+ actions = await self.invoke_llm(prompt)
600
577
  structlogger.debug(
601
578
  "multi_step_llm_command_generator"
602
579
  ".predict_commands_for_handling_flows"
@@ -608,15 +585,6 @@ class MultiStepLLMCommandGenerator(LLMBasedCommandGenerator):
608
585
  # filter out flows that are already started and active
609
586
  commands = self._filter_redundant_start_flow_commands(tracker, commands)
610
587
 
611
- if commands:
612
- self._add_prompt_to_message_parse_data(
613
- message=message,
614
- component_name=MultiStepLLMCommandGenerator.__name__,
615
- prompt_name="handle_flows_prompt",
616
- user_prompt=prompt,
617
- llm_response=llm_response,
618
- )
619
-
620
588
  return commands
621
589
 
622
590
  @staticmethod
@@ -681,12 +649,7 @@ class MultiStepLLMCommandGenerator(LLMBasedCommandGenerator):
681
649
  prompt=prompt,
682
650
  )
683
651
 
684
- response = await self.invoke_llm(prompt)
685
- llm_response = LLMResponse.ensure_llm_response(response)
686
- actions = None
687
- if llm_response and llm_response.choices:
688
- actions = llm_response.choices[0]
689
-
652
+ actions = await self.invoke_llm(prompt)
690
653
  structlogger.debug(
691
654
  "multi_step_llm_command_generator"
692
655
  ".predict_commands_for_newly_started_flow"
@@ -711,15 +674,6 @@ class MultiStepLLMCommandGenerator(LLMBasedCommandGenerator):
711
674
  commands=commands,
712
675
  )
713
676
 
714
- if commands:
715
- self._add_prompt_to_message_parse_data(
716
- message=message,
717
- component_name=MultiStepLLMCommandGenerator.__name__,
718
- prompt_name="fill_slots_for_new_flow_prompt",
719
- user_prompt=prompt,
720
- llm_response=llm_response,
721
- )
722
-
723
677
  return commands
724
678
 
725
679
  def _prepare_inputs(
@@ -205,9 +205,6 @@ class NLUCommandAdapter(GraphComponent, CommandGenerator):
205
205
  commands=commands,
206
206
  )
207
207
 
208
- CommandGenerator._add_commands_to_message_parse_data(
209
- message, NLUCommandAdapter.__name__, commands
210
- )
211
208
  return commands
212
209
 
213
210
 
@@ -46,7 +46,6 @@ from rasa.shared.core.trackers import DialogueStateTracker
46
46
  from rasa.shared.exceptions import ProviderClientAPIException
47
47
  from rasa.shared.nlu.constants import TEXT, LLM_COMMANDS, LLM_PROMPT
48
48
  from rasa.shared.nlu.training_data.message import Message
49
- from rasa.shared.providers.llm.llm_response import LLMResponse
50
49
  from rasa.shared.utils.io import deep_container_fingerprint
51
50
  from rasa.shared.utils.llm import (
52
51
  get_prompt_template,
@@ -138,6 +137,7 @@ class SingleStepLLMCommandGenerator(LLMBasedCommandGenerator):
138
137
  **kwargs: Any,
139
138
  ) -> "SingleStepLLMCommandGenerator":
140
139
  """Loads trained component (see parent class for full docstring)."""
140
+
141
141
  # Perform health check of the LLM API endpoint
142
142
  llm_config = resolve_model_client_config(config.get(LLM_CONFIG_KEY, {}))
143
143
  cls.perform_llm_health_check(
@@ -265,16 +265,13 @@ class SingleStepLLMCommandGenerator(LLMBasedCommandGenerator):
265
265
  prompt=flow_prompt,
266
266
  )
267
267
 
268
- response = await self.invoke_llm(flow_prompt)
269
- llm_response = LLMResponse.ensure_llm_response(response)
268
+ action_list = await self.invoke_llm(flow_prompt)
270
269
  # The check for 'None' maintains compatibility with older versions
271
270
  # of LLMCommandGenerator. In previous implementations, 'invoke_llm'
272
271
  # might return 'None' to indicate a failure to generate actions.
273
- if llm_response is None or not llm_response.choices:
272
+ if action_list is None:
274
273
  return [ErrorCommand()]
275
274
 
276
- action_list = llm_response.choices[0]
277
-
278
275
  log_llm(
279
276
  logger=structlogger,
280
277
  log_module="SingleStepLLMCommandGenerator",
@@ -285,16 +282,6 @@ class SingleStepLLMCommandGenerator(LLMBasedCommandGenerator):
285
282
  commands = self.parse_commands(action_list, tracker, flows)
286
283
 
287
284
  self._update_message_parse_data_for_fine_tuning(message, commands, flow_prompt)
288
- self._add_commands_to_message_parse_data(
289
- message, SingleStepLLMCommandGenerator.__name__, commands
290
- )
291
- self._add_prompt_to_message_parse_data(
292
- message=message,
293
- component_name=SingleStepLLMCommandGenerator.__name__,
294
- prompt_name="command_generator_prompt",
295
- user_prompt=flow_prompt,
296
- llm_response=llm_response,
297
- )
298
285
 
299
286
  return commands
300
287
 
@@ -40,3 +40,12 @@ class ContinueInterruptedPatternFlowStackFrame(PatternFlowStackFrame):
40
40
  step_id=data["step_id"],
41
41
  previous_flow_name=data["previous_flow_name"],
42
42
  )
43
+
44
+ def __eq__(self, other: Any) -> bool:
45
+ if not isinstance(other, ContinueInterruptedPatternFlowStackFrame):
46
+ return False
47
+ return (
48
+ self.flow_id == other.flow_id
49
+ and self.step_id == other.step_id
50
+ and self.previous_flow_name == other.previous_flow_name
51
+ )
@@ -77,6 +77,7 @@ def top_user_flow_frame(dialogue_stack: DialogueStack) -> Optional[UserFlowStack
77
77
  if (
78
78
  isinstance(frame, UserFlowStackFrame)
79
79
  and frame.frame_type != FlowStackFrameType.CALL
80
+ and frame.frame_type != FlowStackFrameType.LINK
80
81
  ):
81
82
  return frame
82
83
  return None
@@ -5,7 +5,6 @@ from typing import Any, Dict, List, Optional, Text, Union
5
5
 
6
6
  import structlog
7
7
 
8
- from rasa.dialogue_understanding_test.du_test_case import DialogueUnderstandingTestCase
9
8
  from rasa.e2e_test.assertions import Assertion
10
9
  from rasa.e2e_test.constants import (
11
10
  KEY_ASSERTIONS,
@@ -552,7 +551,7 @@ class Metadata:
552
551
  class TestSuite:
553
552
  """Class for representing all top level test suite keys."""
554
553
 
555
- test_cases: List[Union[TestCase, DialogueUnderstandingTestCase]]
554
+ test_cases: List[TestCase]
556
555
  fixtures: List[Fixture]
557
556
  metadata: List[Metadata]
558
557
  stub_custom_actions: Dict[Text, StubCustomAction]
@@ -45,7 +45,7 @@ class E2ETestYAMLWriter:
45
45
 
46
46
  yaml_data = ruamel.yaml.safe_load(tests)
47
47
 
48
- test_cases_yaml = [{KEY_TEST_CASES: yaml_data}]
48
+ test_cases_yaml = {KEY_TEST_CASES: yaml_data}
49
49
  with open(output_file, "w") as outfile:
50
50
  yaml = ruamel.yaml.YAML()
51
51
  yaml.dump(test_cases_yaml, outfile)
rasa/e2e_test/utils/io.py CHANGED
@@ -404,7 +404,7 @@ def read_test_cases(path: str) -> TestSuite:
404
404
  stub_data=stub_data,
405
405
  )
406
406
 
407
- validate_test_case(test_case_name, input_test_cases, fixtures, metadata)
407
+ validate_test_case(test_case_name, input_test_cases)
408
408
  try:
409
409
  if stub_custom_actions:
410
410
  ensure_beta_feature_is_enabled(
@@ -6,7 +6,6 @@ import structlog
6
6
 
7
7
  import rasa.shared.utils.io
8
8
  from rasa.e2e_test.constants import SCHEMA_FILE_PATH
9
- from rasa.e2e_test.e2e_test_case import Fixture, Metadata
10
9
  from rasa.shared.utils.yaml import read_schema_file
11
10
 
12
11
  if TYPE_CHECKING:
@@ -28,24 +27,8 @@ def validate_path_to_test_cases(path: str) -> None:
28
27
  sys.exit(1)
29
28
 
30
29
 
31
- def validate_test_case(
32
- test_case_name: str,
33
- input_test_cases: List["TestCase"],
34
- fixtures: Dict[str, Fixture],
35
- metadata: Dict[str, Metadata],
36
- ) -> None:
37
- """
38
- Validate the test case, its fixtures, and metadata.
39
-
40
- Args:
41
- test_case_name (str): The name of the test case to validate.
42
- input_test_cases (List["TestCase"]): A list of test cases to validate.
43
- fixtures (Dict[str, Fixture]): A dictionary of defined fixtures.
44
- metadata (Dict[str, Metadata]): A dictionary of defined metadata.
45
-
46
- Raises:
47
- SystemExit: If the test case, fixtures, or metadata are not defined.
48
- """
30
+ def validate_test_case(test_case_name: str, input_test_cases: List["TestCase"]) -> None:
31
+ """Validate that test case exists."""
49
32
  if test_case_name and not input_test_cases:
50
33
  rasa.shared.utils.io.raise_warning(
51
34
  f"Test case does not exist: {test_case_name}. "
@@ -55,87 +38,6 @@ def validate_test_case(
55
38
  )
56
39
  sys.exit(1)
57
40
 
58
- all_good = True
59
- for test_case in input_test_cases:
60
- all_good_fixtures = validate_test_case_fixtures(test_case, fixtures)
61
- all_good_metadata = validate_test_case_metadata(test_case, metadata)
62
- all_good = all_good and all_good_fixtures and all_good_metadata
63
-
64
- if not all_good:
65
- sys.exit(1)
66
-
67
-
68
- def validate_test_case_fixtures(
69
- test_case: "TestCase", fixtures: Dict[str, Fixture]
70
- ) -> bool:
71
- """Validates that the fixtures used in the test case are defined.
72
-
73
- Args:
74
- test_case (TestCase): The test case to validate.
75
- fixtures (Dict[str, Fixture]): A dictionary of defined fixtures.
76
-
77
- Returns:
78
- True if all fixtures used in the test case are defined, False otherwise.
79
-
80
- Raises:
81
- Logs an error if a fixture used in the test case is not defined.
82
- """
83
- all_good = True
84
- if not test_case.fixture_names:
85
- return all_good
86
-
87
- for fixture_name in test_case.fixture_names:
88
- if fixture_name not in fixtures:
89
- structlogger.error(
90
- "validation.validate_test_case_fixtures",
91
- event_info=(
92
- f"Fixture '{fixture_name}' referenced in the "
93
- f"test case '{test_case.name}' is not defined."
94
- ),
95
- )
96
- all_good = False
97
- return all_good
98
-
99
-
100
- def validate_test_case_metadata(
101
- test_case: "TestCase", metadata: Dict[str, Metadata]
102
- ) -> bool:
103
- """
104
- Validates that the metadata used in the test case and its steps are defined.
105
-
106
- Args:
107
- test_case (TestCase): The test case to validate.
108
- metadata (Dict[str, Metadata]): A dictionary of defined metadata.
109
-
110
- Returns:
111
- True if all fixtures used in the test case are defined, False otherwise.
112
-
113
- Raises:
114
- Logs an error if metadata used in the test case or its steps is not defined.
115
- """
116
- all_good = True
117
- if test_case.metadata_name and test_case.metadata_name not in metadata:
118
- structlogger.error(
119
- "validation.validate_test_case_metadata.test_case_metadata",
120
- event_info=(
121
- f"Metadata '{test_case.metadata_name}' referenced in "
122
- f"the test case '{test_case.name}' is not defined."
123
- ),
124
- )
125
- all_good = False
126
-
127
- for step in test_case.steps:
128
- if step.metadata_name and step.metadata_name not in metadata:
129
- structlogger.error(
130
- "validation.validate_test_case_metadata.step_metadata",
131
- event_info=(
132
- f"Metadata '{step.metadata_name}' referenced in the "
133
- f"step of the test case '{test_case.name}' is not defined."
134
- ),
135
- )
136
- all_good = False
137
- return all_good
138
-
139
41
 
140
42
  def validate_model_path(model_path: Optional[str], parameter: str, default: str) -> str:
141
43
  """Validate the model path.