rasa-pro 3.13.0.dev20250612__py3-none-any.whl → 3.13.0rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of rasa-pro might be problematic. Click here for more details.

Files changed (252) hide show
  1. rasa/__main__.py +0 -3
  2. rasa/api.py +1 -1
  3. rasa/cli/dialogue_understanding_test.py +1 -1
  4. rasa/cli/e2e_test.py +1 -8
  5. rasa/cli/evaluate.py +1 -1
  6. rasa/cli/export.py +3 -1
  7. rasa/cli/llm_fine_tuning.py +12 -11
  8. rasa/cli/project_templates/defaults.py +133 -0
  9. rasa/cli/project_templates/tutorial/config.yml +1 -1
  10. rasa/cli/project_templates/tutorial/endpoints.yml +1 -1
  11. rasa/cli/run.py +1 -1
  12. rasa/cli/studio/download.py +1 -23
  13. rasa/cli/studio/link.py +52 -0
  14. rasa/cli/studio/pull.py +79 -0
  15. rasa/cli/studio/push.py +78 -0
  16. rasa/cli/studio/studio.py +12 -0
  17. rasa/cli/studio/train.py +0 -1
  18. rasa/cli/studio/upload.py +8 -0
  19. rasa/cli/train.py +1 -1
  20. rasa/cli/utils.py +1 -1
  21. rasa/cli/x.py +1 -1
  22. rasa/constants.py +2 -0
  23. rasa/core/__init__.py +0 -16
  24. rasa/core/actions/action.py +5 -1
  25. rasa/core/actions/action_repeat_bot_messages.py +18 -22
  26. rasa/core/actions/action_run_slot_rejections.py +0 -1
  27. rasa/core/agent.py +16 -1
  28. rasa/core/available_endpoints.py +146 -0
  29. rasa/core/brokers/pika.py +1 -2
  30. rasa/core/channels/__init__.py +2 -0
  31. rasa/core/channels/botframework.py +2 -2
  32. rasa/core/channels/channel.py +2 -2
  33. rasa/core/channels/development_inspector.py +1 -1
  34. rasa/core/channels/facebook.py +1 -4
  35. rasa/core/channels/hangouts.py +8 -5
  36. rasa/core/channels/inspector/README.md +3 -3
  37. rasa/core/channels/inspector/dist/assets/{arc-c4b064fc.js → arc-371401b1.js} +1 -1
  38. rasa/core/channels/inspector/dist/assets/{blockDiagram-38ab4fdb-215b5026.js → blockDiagram-38ab4fdb-3f126156.js} +1 -1
  39. rasa/core/channels/inspector/dist/assets/{c4Diagram-3d4e48cf-2b54a0a3.js → c4Diagram-3d4e48cf-12f22eb7.js} +1 -1
  40. rasa/core/channels/inspector/dist/assets/channel-f1efda17.js +1 -0
  41. rasa/core/channels/inspector/dist/assets/{classDiagram-70f12bd4-daacea5f.js → classDiagram-70f12bd4-03b1d386.js} +1 -1
  42. rasa/core/channels/inspector/dist/assets/{classDiagram-v2-f2320105-930d4dc2.js → classDiagram-v2-f2320105-84f69d63.js} +1 -1
  43. rasa/core/channels/inspector/dist/assets/clone-fdf164e2.js +1 -0
  44. rasa/core/channels/inspector/dist/assets/{createText-2e5e7dd3-83c206ba.js → createText-2e5e7dd3-ca47fd38.js} +1 -1
  45. rasa/core/channels/inspector/dist/assets/{edges-e0da2a9e-b0eb01d0.js → edges-e0da2a9e-f837ca8a.js} +1 -1
  46. rasa/core/channels/inspector/dist/assets/{erDiagram-9861fffd-17586500.js → erDiagram-9861fffd-8717ac54.js} +1 -1
  47. rasa/core/channels/inspector/dist/assets/{flowDb-956e92f1-be2a1776.js → flowDb-956e92f1-94f38b83.js} +1 -1
  48. rasa/core/channels/inspector/dist/assets/{flowDiagram-66a62f08-c2120ebd.js → flowDiagram-66a62f08-b616f9fb.js} +1 -1
  49. rasa/core/channels/inspector/dist/assets/flowDiagram-v2-96b9c2cf-7d7a1629.js +1 -0
  50. rasa/core/channels/inspector/dist/assets/{flowchart-elk-definition-4a651766-a6ab5c48.js → flowchart-elk-definition-4a651766-f5d24bb8.js} +1 -1
  51. rasa/core/channels/inspector/dist/assets/{ganttDiagram-c361ad54-ef613457.js → ganttDiagram-c361ad54-b43ba8d9.js} +1 -1
  52. rasa/core/channels/inspector/dist/assets/{gitGraphDiagram-72cf32ee-d59185b3.js → gitGraphDiagram-72cf32ee-c3aafaa5.js} +1 -1
  53. rasa/core/channels/inspector/dist/assets/{graph-0f155405.js → graph-0d0a2c10.js} +1 -1
  54. rasa/core/channels/inspector/dist/assets/{index-3862675e-d5f1d1b7.js → index-3862675e-58ea0305.js} +1 -1
  55. rasa/core/channels/inspector/dist/assets/{index-47737d3a.js → index-cce6f8a1.js} +3 -3
  56. rasa/core/channels/inspector/dist/assets/{infoDiagram-f8f76790-b07d141f.js → infoDiagram-f8f76790-b8f60461.js} +1 -1
  57. rasa/core/channels/inspector/dist/assets/{journeyDiagram-49397b02-1936d429.js → journeyDiagram-49397b02-95be5545.js} +1 -1
  58. rasa/core/channels/inspector/dist/assets/{layout-dde8d0f3.js → layout-da885b9b.js} +1 -1
  59. rasa/core/channels/inspector/dist/assets/{line-0c2c7ee0.js → line-f1c817d3.js} +1 -1
  60. rasa/core/channels/inspector/dist/assets/{linear-35dd89a4.js → linear-d42801e6.js} +1 -1
  61. rasa/core/channels/inspector/dist/assets/{mindmap-definition-fc14e90a-56192851.js → mindmap-definition-fc14e90a-a38923a6.js} +1 -1
  62. rasa/core/channels/inspector/dist/assets/{pieDiagram-8a3498a8-fc21ed78.js → pieDiagram-8a3498a8-ca6e71e9.js} +1 -1
  63. rasa/core/channels/inspector/dist/assets/{quadrantDiagram-120e2f19-25e98518.js → quadrantDiagram-120e2f19-b290dae9.js} +1 -1
  64. rasa/core/channels/inspector/dist/assets/{requirementDiagram-deff3bca-546ff1f5.js → requirementDiagram-deff3bca-03f02ceb.js} +1 -1
  65. rasa/core/channels/inspector/dist/assets/{sankeyDiagram-04a897e0-02d8b82d.js → sankeyDiagram-04a897e0-c49eee40.js} +1 -1
  66. rasa/core/channels/inspector/dist/assets/{sequenceDiagram-704730f1-3ca5a92e.js → sequenceDiagram-704730f1-b2cd6a3d.js} +1 -1
  67. rasa/core/channels/inspector/dist/assets/{stateDiagram-587899a1-128ea07c.js → stateDiagram-587899a1-e53a2028.js} +1 -1
  68. rasa/core/channels/inspector/dist/assets/{stateDiagram-v2-d93cdb3a-95f290af.js → stateDiagram-v2-d93cdb3a-e1982a03.js} +1 -1
  69. rasa/core/channels/inspector/dist/assets/{styles-6aaf32cf-4984898a.js → styles-6aaf32cf-d0226ca5.js} +1 -1
  70. rasa/core/channels/inspector/dist/assets/{styles-9a916d00-1bf266ba.js → styles-9a916d00-0e21dc00.js} +1 -1
  71. rasa/core/channels/inspector/dist/assets/{styles-c10674c1-60521c63.js → styles-c10674c1-9588494e.js} +1 -1
  72. rasa/core/channels/inspector/dist/assets/{svgDrawCommon-08f97a94-a25b6e12.js → svgDrawCommon-08f97a94-be478d4f.js} +1 -1
  73. rasa/core/channels/inspector/dist/assets/{timeline-definition-85554ec2-0fc086bf.js → timeline-definition-85554ec2-74631749.js} +1 -1
  74. rasa/core/channels/inspector/dist/assets/{xychartDiagram-e933f94c-44ee592e.js → xychartDiagram-e933f94c-a043552f.js} +1 -1
  75. rasa/core/channels/inspector/dist/index.html +1 -1
  76. rasa/core/channels/inspector/src/components/RecruitmentPanel.tsx +1 -1
  77. rasa/core/channels/mattermost.py +1 -1
  78. rasa/core/channels/rasa_chat.py +2 -4
  79. rasa/core/channels/rest.py +5 -4
  80. rasa/core/channels/socketio.py +56 -41
  81. rasa/core/channels/studio_chat.py +314 -10
  82. rasa/core/channels/vier_cvg.py +1 -2
  83. rasa/core/channels/voice_ready/audiocodes.py +2 -9
  84. rasa/core/channels/voice_stream/asr/azure.py +9 -0
  85. rasa/core/channels/voice_stream/audiocodes.py +8 -5
  86. rasa/core/channels/voice_stream/browser_audio.py +1 -1
  87. rasa/core/channels/voice_stream/genesys.py +2 -2
  88. rasa/core/channels/voice_stream/jambonz.py +166 -0
  89. rasa/core/channels/voice_stream/tts/__init__.py +8 -0
  90. rasa/core/channels/voice_stream/twilio_media_streams.py +17 -5
  91. rasa/core/channels/voice_stream/voice_channel.py +44 -24
  92. rasa/core/exporter.py +36 -0
  93. rasa/core/http_interpreter.py +3 -7
  94. rasa/core/information_retrieval/faiss.py +18 -11
  95. rasa/core/information_retrieval/ingestion/faq_parser.py +158 -0
  96. rasa/core/jobs.py +2 -1
  97. rasa/core/nlg/contextual_response_rephraser.py +48 -12
  98. rasa/core/nlg/generator.py +0 -1
  99. rasa/core/nlg/interpolator.py +2 -3
  100. rasa/core/nlg/summarize.py +39 -5
  101. rasa/core/policies/enterprise_search_policy.py +298 -184
  102. rasa/core/policies/enterprise_search_policy_config.py +241 -0
  103. rasa/core/policies/enterprise_search_prompt_with_relevancy_check_and_citation_template.jinja2 +64 -0
  104. rasa/core/policies/flow_policy.py +1 -1
  105. rasa/core/policies/flows/flow_executor.py +96 -17
  106. rasa/core/policies/intentless_policy.py +71 -26
  107. rasa/core/processor.py +104 -51
  108. rasa/core/run.py +33 -11
  109. rasa/core/tracker_stores/tracker_store.py +1 -1
  110. rasa/core/training/interactive.py +1 -1
  111. rasa/core/utils.py +35 -99
  112. rasa/dialogue_understanding/coexistence/intent_based_router.py +2 -1
  113. rasa/dialogue_understanding/coexistence/llm_based_router.py +13 -17
  114. rasa/dialogue_understanding/commands/__init__.py +4 -0
  115. rasa/dialogue_understanding/commands/can_not_handle_command.py +2 -0
  116. rasa/dialogue_understanding/commands/cancel_flow_command.py +6 -2
  117. rasa/dialogue_understanding/commands/chit_chat_answer_command.py +2 -0
  118. rasa/dialogue_understanding/commands/clarify_command.py +7 -3
  119. rasa/dialogue_understanding/commands/command_syntax_manager.py +1 -0
  120. rasa/dialogue_understanding/commands/correct_slots_command.py +5 -6
  121. rasa/dialogue_understanding/commands/error_command.py +1 -1
  122. rasa/dialogue_understanding/commands/human_handoff_command.py +3 -3
  123. rasa/dialogue_understanding/commands/knowledge_answer_command.py +2 -0
  124. rasa/dialogue_understanding/commands/repeat_bot_messages_command.py +2 -0
  125. rasa/dialogue_understanding/commands/set_slot_command.py +15 -5
  126. rasa/dialogue_understanding/commands/skip_question_command.py +3 -3
  127. rasa/dialogue_understanding/commands/start_flow_command.py +7 -3
  128. rasa/dialogue_understanding/commands/utils.py +26 -2
  129. rasa/dialogue_understanding/generator/__init__.py +7 -1
  130. rasa/dialogue_understanding/generator/command_generator.py +15 -3
  131. rasa/dialogue_understanding/generator/command_parser.py +2 -2
  132. rasa/dialogue_understanding/generator/command_parser_validator.py +63 -0
  133. rasa/dialogue_understanding/generator/constants.py +2 -2
  134. rasa/dialogue_understanding/generator/nlu_command_adapter.py +2 -2
  135. rasa/dialogue_understanding/generator/prompt_templates/command_prompt_template.jinja2 +0 -2
  136. rasa/dialogue_understanding/generator/prompt_templates/command_prompt_v2_claude_3_5_sonnet_20240620_template.jinja2 +1 -0
  137. rasa/dialogue_understanding/generator/prompt_templates/command_prompt_v2_gpt_4o_2024_11_20_template.jinja2 +1 -0
  138. rasa/dialogue_understanding/generator/prompt_templates/command_prompt_v3_claude_3_5_sonnet_20240620_template.jinja2 +79 -0
  139. rasa/dialogue_understanding/generator/prompt_templates/command_prompt_v3_gpt_4o_2024_11_20_template.jinja2 +79 -0
  140. rasa/dialogue_understanding/generator/single_step/compact_llm_command_generator.py +28 -463
  141. rasa/dialogue_understanding/generator/single_step/search_ready_llm_command_generator.py +147 -0
  142. rasa/dialogue_understanding/generator/single_step/single_step_based_llm_command_generator.py +461 -0
  143. rasa/dialogue_understanding/generator/single_step/single_step_llm_command_generator.py +11 -64
  144. rasa/dialogue_understanding/patterns/cancel.py +1 -2
  145. rasa/dialogue_understanding/patterns/clarify.py +1 -1
  146. rasa/dialogue_understanding/patterns/correction.py +2 -2
  147. rasa/dialogue_understanding/patterns/default_flows_for_patterns.yml +37 -25
  148. rasa/dialogue_understanding/patterns/domain_for_patterns.py +190 -0
  149. rasa/dialogue_understanding/processor/command_processor.py +11 -12
  150. rasa/dialogue_understanding/processor/command_processor_component.py +3 -3
  151. rasa/dialogue_understanding/stack/frames/flow_stack_frame.py +17 -4
  152. rasa/dialogue_understanding/stack/utils.py +3 -1
  153. rasa/dialogue_understanding/utils.py +68 -12
  154. rasa/dialogue_understanding_test/du_test_case.py +1 -1
  155. rasa/dialogue_understanding_test/du_test_runner.py +4 -22
  156. rasa/dialogue_understanding_test/test_case_simulation/test_case_tracker_simulator.py +2 -6
  157. rasa/e2e_test/e2e_test_coverage_report.py +1 -1
  158. rasa/e2e_test/e2e_test_runner.py +1 -1
  159. rasa/engine/constants.py +1 -1
  160. rasa/engine/graph.py +2 -2
  161. rasa/engine/recipes/default_recipe.py +26 -2
  162. rasa/engine/validation.py +3 -2
  163. rasa/hooks.py +0 -28
  164. rasa/llm_fine_tuning/annotation_module.py +39 -9
  165. rasa/llm_fine_tuning/conversations.py +3 -0
  166. rasa/llm_fine_tuning/llm_data_preparation_module.py +66 -49
  167. rasa/llm_fine_tuning/paraphrasing/conversation_rephraser.py +5 -7
  168. rasa/llm_fine_tuning/paraphrasing/rephrase_validator.py +52 -44
  169. rasa/llm_fine_tuning/paraphrasing_module.py +10 -12
  170. rasa/llm_fine_tuning/storage.py +4 -4
  171. rasa/llm_fine_tuning/utils.py +63 -1
  172. rasa/model_manager/model_api.py +88 -0
  173. rasa/model_manager/trainer_service.py +4 -4
  174. rasa/plugin.py +1 -11
  175. rasa/privacy/__init__.py +0 -0
  176. rasa/privacy/constants.py +83 -0
  177. rasa/privacy/event_broker_utils.py +77 -0
  178. rasa/privacy/privacy_config.py +281 -0
  179. rasa/privacy/privacy_config_schema.json +86 -0
  180. rasa/privacy/privacy_filter.py +340 -0
  181. rasa/privacy/privacy_manager.py +576 -0
  182. rasa/server.py +23 -2
  183. rasa/shared/constants.py +18 -0
  184. rasa/shared/core/command_payload_reader.py +1 -5
  185. rasa/shared/core/constants.py +4 -3
  186. rasa/shared/core/domain.py +7 -0
  187. rasa/shared/core/events.py +38 -10
  188. rasa/shared/core/flows/constants.py +2 -0
  189. rasa/shared/core/flows/flow.py +127 -14
  190. rasa/shared/core/flows/flows_list.py +18 -1
  191. rasa/shared/core/flows/flows_yaml_schema.json +3 -0
  192. rasa/shared/core/flows/steps/collect.py +46 -2
  193. rasa/shared/core/flows/steps/link.py +7 -2
  194. rasa/shared/core/flows/validation.py +25 -5
  195. rasa/shared/core/slots.py +28 -0
  196. rasa/shared/core/training_data/story_reader/yaml_story_reader.py +1 -4
  197. rasa/shared/exceptions.py +4 -0
  198. rasa/shared/providers/_configs/azure_openai_client_config.py +6 -2
  199. rasa/shared/providers/_configs/default_litellm_client_config.py +1 -1
  200. rasa/shared/providers/_configs/huggingface_local_embedding_client_config.py +1 -1
  201. rasa/shared/providers/_configs/openai_client_config.py +5 -1
  202. rasa/shared/providers/_configs/rasa_llm_client_config.py +1 -1
  203. rasa/shared/providers/_configs/self_hosted_llm_client_config.py +1 -1
  204. rasa/shared/providers/_configs/utils.py +0 -99
  205. rasa/shared/providers/embedding/_base_litellm_embedding_client.py +3 -0
  206. rasa/shared/providers/llm/_base_litellm_client.py +5 -2
  207. rasa/shared/utils/common.py +1 -1
  208. rasa/shared/utils/configs.py +110 -0
  209. rasa/shared/utils/constants.py +0 -3
  210. rasa/shared/utils/llm.py +195 -9
  211. rasa/shared/utils/pykwalify_extensions.py +0 -9
  212. rasa/shared/utils/yaml.py +32 -0
  213. rasa/studio/constants.py +1 -0
  214. rasa/studio/data_handler.py +11 -4
  215. rasa/studio/download.py +167 -0
  216. rasa/studio/link.py +200 -0
  217. rasa/studio/prompts.py +223 -0
  218. rasa/studio/pull/__init__.py +0 -0
  219. rasa/studio/{download/flows.py → pull/data.py} +23 -160
  220. rasa/studio/{download → pull}/domains.py +1 -1
  221. rasa/studio/pull/pull.py +235 -0
  222. rasa/studio/push.py +136 -0
  223. rasa/studio/train.py +1 -1
  224. rasa/studio/upload.py +117 -67
  225. rasa/telemetry.py +82 -25
  226. rasa/tracing/config.py +3 -4
  227. rasa/tracing/constants.py +19 -1
  228. rasa/tracing/instrumentation/attribute_extractors.py +30 -8
  229. rasa/tracing/instrumentation/instrumentation.py +53 -2
  230. rasa/tracing/instrumentation/metrics.py +98 -15
  231. rasa/tracing/metric_instrument_provider.py +75 -3
  232. rasa/utils/common.py +7 -22
  233. rasa/utils/log_utils.py +1 -45
  234. rasa/validator.py +2 -8
  235. rasa/version.py +1 -1
  236. {rasa_pro-3.13.0.dev20250612.dist-info → rasa_pro-3.13.0rc1.dist-info}/METADATA +8 -9
  237. {rasa_pro-3.13.0.dev20250612.dist-info → rasa_pro-3.13.0rc1.dist-info}/RECORD +241 -220
  238. rasa/anonymization/__init__.py +0 -2
  239. rasa/anonymization/anonymisation_rule_yaml_reader.py +0 -91
  240. rasa/anonymization/anonymization_pipeline.py +0 -286
  241. rasa/anonymization/anonymization_rule_executor.py +0 -266
  242. rasa/anonymization/anonymization_rule_orchestrator.py +0 -119
  243. rasa/anonymization/schemas/config.yml +0 -47
  244. rasa/anonymization/utils.py +0 -118
  245. rasa/core/channels/inspector/dist/assets/channel-3730f5fd.js +0 -1
  246. rasa/core/channels/inspector/dist/assets/clone-e847561e.js +0 -1
  247. rasa/core/channels/inspector/dist/assets/flowDiagram-v2-96b9c2cf-efbbfe00.js +0 -1
  248. rasa/studio/download/download.py +0 -439
  249. /rasa/{studio/download → core/information_retrieval/ingestion}/__init__.py +0 -0
  250. {rasa_pro-3.13.0.dev20250612.dist-info → rasa_pro-3.13.0rc1.dist-info}/NOTICE +0 -0
  251. {rasa_pro-3.13.0.dev20250612.dist-info → rasa_pro-3.13.0rc1.dist-info}/WHEEL +0 -0
  252. {rasa_pro-3.13.0.dev20250612.dist-info → rasa_pro-3.13.0rc1.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,147 @@
1
+ from typing import Any, Dict, Literal, Optional, Text
2
+
3
+ import structlog
4
+
5
+ from rasa.dialogue_understanding.commands.command_syntax_manager import (
6
+ CommandSyntaxVersion,
7
+ )
8
+ from rasa.dialogue_understanding.generator.constants import (
9
+ DEFAULT_OPENAI_MAX_GENERATED_TOKENS,
10
+ LLM_CONFIG_KEY,
11
+ MODEL_CONFIG_KEY,
12
+ MODEL_NAME_CLAUDE_3_5_SONNET_20240620,
13
+ MODEL_NAME_GPT_4O_2024_11_20,
14
+ OPENAI_PROVIDER,
15
+ PROVIDER_CONFIG_KEY,
16
+ TIMEOUT_CONFIG_KEY,
17
+ )
18
+ from rasa.dialogue_understanding.generator.single_step.single_step_based_llm_command_generator import ( # noqa: E501 # noqa: E501
19
+ SingleStepBasedLLMCommandGenerator,
20
+ )
21
+ from rasa.engine.recipes.default_recipe import DefaultV1Recipe
22
+ from rasa.engine.storage.resource import Resource
23
+ from rasa.engine.storage.storage import ModelStorage
24
+ from rasa.shared.constants import (
25
+ ANTHROPIC_PROVIDER,
26
+ AWS_BEDROCK_PROVIDER,
27
+ AZURE_OPENAI_PROVIDER,
28
+ MAX_TOKENS_CONFIG_KEY,
29
+ PROMPT_TEMPLATE_CONFIG_KEY,
30
+ TEMPERATURE_CONFIG_KEY,
31
+ )
32
+ from rasa.shared.utils.llm import (
33
+ get_default_prompt_template_based_on_model,
34
+ get_prompt_template,
35
+ )
36
+
37
+ structlogger = structlog.get_logger()
38
+
39
+ DEFAULT_LLM_CONFIG = {
40
+ PROVIDER_CONFIG_KEY: OPENAI_PROVIDER,
41
+ MODEL_CONFIG_KEY: MODEL_NAME_GPT_4O_2024_11_20,
42
+ TEMPERATURE_CONFIG_KEY: 0.0,
43
+ MAX_TOKENS_CONFIG_KEY: DEFAULT_OPENAI_MAX_GENERATED_TOKENS,
44
+ TIMEOUT_CONFIG_KEY: 7,
45
+ }
46
+
47
+ DEFAULT_COMMAND_PROMPT_TEMPLATE_FILE_NAME = (
48
+ "command_prompt_v3_gpt_4o_2024_11_20_template.jinja2"
49
+ )
50
+ FALLBACK_COMMAND_PROMPT_TEMPLATE_FILE_NAME = (
51
+ "command_prompt_v3_gpt_4o_2024_11_20_template.jinja2"
52
+ )
53
+ MODEL_PROMPT_MAPPER = {
54
+ f"{OPENAI_PROVIDER}/{MODEL_NAME_GPT_4O_2024_11_20}": (
55
+ "command_prompt_v3_gpt_4o_2024_11_20_template.jinja2"
56
+ ),
57
+ f"{AZURE_OPENAI_PROVIDER}/{MODEL_NAME_GPT_4O_2024_11_20}": (
58
+ "command_prompt_v3_gpt_4o_2024_11_20_template.jinja2"
59
+ ),
60
+ f"{AWS_BEDROCK_PROVIDER}/anthropic."
61
+ f"{MODEL_NAME_CLAUDE_3_5_SONNET_20240620}-v1:0": (
62
+ "command_prompt_v3_claude_3_5_sonnet_20240620_template.jinja2"
63
+ ),
64
+ f"{ANTHROPIC_PROVIDER}/{MODEL_NAME_CLAUDE_3_5_SONNET_20240620}": (
65
+ "command_prompt_v3_claude_3_5_sonnet_20240620_template.jinja2"
66
+ ),
67
+ }
68
+
69
+
70
+ @DefaultV1Recipe.register(
71
+ [
72
+ DefaultV1Recipe.ComponentType.COMMAND_GENERATOR,
73
+ ],
74
+ is_trainable=True,
75
+ )
76
+ class SearchReadyLLMCommandGenerator(SingleStepBasedLLMCommandGenerator):
77
+ """A single step LLM-based command generator."""
78
+
79
+ def __init__(
80
+ self,
81
+ config: Dict[str, Any],
82
+ model_storage: ModelStorage,
83
+ resource: Resource,
84
+ prompt_template: Optional[Text] = None,
85
+ **kwargs: Any,
86
+ ) -> None:
87
+ super().__init__(
88
+ config,
89
+ model_storage,
90
+ resource,
91
+ prompt_template=prompt_template,
92
+ **kwargs,
93
+ )
94
+
95
+ @staticmethod
96
+ def get_default_llm_config() -> Dict[str, Any]:
97
+ """Get the default LLM config for the command generator."""
98
+ return DEFAULT_LLM_CONFIG
99
+
100
+ @staticmethod
101
+ def get_default_prompt_template_file_name() -> str:
102
+ """Get the default prompt template file name for the command generator."""
103
+ return DEFAULT_COMMAND_PROMPT_TEMPLATE_FILE_NAME
104
+
105
+ @staticmethod
106
+ def get_fallback_prompt_template_file_name() -> str:
107
+ """Get the fallback prompt template file name for the command generator."""
108
+ return FALLBACK_COMMAND_PROMPT_TEMPLATE_FILE_NAME
109
+
110
+ @staticmethod
111
+ def get_model_prompt_mapper() -> Dict[str, str]:
112
+ """Get the model prompt mapper for the command generator."""
113
+ return MODEL_PROMPT_MAPPER
114
+
115
+ @staticmethod
116
+ def get_component_command_syntax_version() -> CommandSyntaxVersion:
117
+ return CommandSyntaxVersion.v3
118
+
119
+ @classmethod
120
+ def _resolve_component_prompt_template(
121
+ cls,
122
+ config: Dict[str, Any],
123
+ prompt_template: Optional[str] = None,
124
+ log_context: Optional[Literal["init", "fingerprint_addon"]] = None,
125
+ log_source_component: Optional[str] = "SearchReadyLLMCommandGenerator",
126
+ ) -> Optional[str]:
127
+ """Get the prompt template from the config or the default prompt template."""
128
+ if prompt_template is not None:
129
+ return prompt_template
130
+
131
+ # Get the default prompt template based on the model name.
132
+ default_command_prompt_template = get_default_prompt_template_based_on_model(
133
+ llm_config=config.get(LLM_CONFIG_KEY, {}) or {},
134
+ model_prompt_mapping=cls.get_model_prompt_mapper(),
135
+ default_prompt_path=cls.get_default_prompt_template_file_name(),
136
+ fallback_prompt_path=cls.get_fallback_prompt_template_file_name(),
137
+ log_source_component=log_source_component,
138
+ log_source_method=log_context,
139
+ )
140
+
141
+ # Return the prompt template either from the config or the default prompt.
142
+ return get_prompt_template(
143
+ config.get(PROMPT_TEMPLATE_CONFIG_KEY),
144
+ default_command_prompt_template,
145
+ log_source_component=log_source_component,
146
+ log_source_method=log_context,
147
+ )
@@ -0,0 +1,461 @@
1
+ import copy
2
+ from abc import ABC, abstractmethod
3
+ from typing import Any, Dict, List, Literal, Optional, Text
4
+
5
+ import structlog
6
+
7
+ import rasa.shared.utils.io
8
+ from rasa.dialogue_understanding.commands import (
9
+ CannotHandleCommand,
10
+ Command,
11
+ ErrorCommand,
12
+ SetSlotCommand,
13
+ )
14
+ from rasa.dialogue_understanding.commands.command_syntax_manager import (
15
+ CommandSyntaxManager,
16
+ CommandSyntaxVersion,
17
+ )
18
+ from rasa.dialogue_understanding.generator import LLMBasedCommandGenerator
19
+ from rasa.dialogue_understanding.generator.command_parser import (
20
+ parse_commands as parse_commands_using_command_parsers,
21
+ )
22
+ from rasa.dialogue_understanding.generator.command_parser_validator import (
23
+ CommandParserValidatorSingleton,
24
+ )
25
+ from rasa.dialogue_understanding.generator.constants import (
26
+ COMMAND_PROMPT_FILE_NAME,
27
+ FLOW_RETRIEVAL_KEY,
28
+ LLM_BASED_COMMAND_GENERATOR_CONFIG_FILE,
29
+ LLM_CONFIG_KEY,
30
+ USER_INPUT_CONFIG_KEY,
31
+ )
32
+ from rasa.dialogue_understanding.generator.flow_retrieval import FlowRetrieval
33
+ from rasa.dialogue_understanding.stack.utils import top_flow_frame
34
+ from rasa.dialogue_understanding.utils import (
35
+ add_commands_to_message_parse_data,
36
+ add_prompt_to_message_parse_data,
37
+ )
38
+ from rasa.engine.graph import ExecutionContext
39
+ from rasa.engine.recipes.default_recipe import DefaultV1Recipe
40
+ from rasa.engine.storage.resource import Resource
41
+ from rasa.engine.storage.storage import ModelStorage
42
+ from rasa.shared.constants import (
43
+ EMBEDDINGS_CONFIG_KEY,
44
+ PROMPT_TEMPLATE_CONFIG_KEY,
45
+ ROUTE_TO_CALM_SLOT,
46
+ )
47
+ from rasa.shared.core.flows import FlowsList
48
+ from rasa.shared.core.trackers import DialogueStateTracker
49
+ from rasa.shared.exceptions import ProviderClientAPIException
50
+ from rasa.shared.nlu.constants import LLM_COMMANDS, LLM_PROMPT, TEXT
51
+ from rasa.shared.nlu.training_data.message import Message
52
+ from rasa.shared.providers.llm.llm_response import LLMResponse
53
+ from rasa.shared.utils.constants import (
54
+ LOG_COMPONENT_SOURCE_METHOD_FINGERPRINT_ADDON,
55
+ LOG_COMPONENT_SOURCE_METHOD_INIT,
56
+ )
57
+ from rasa.shared.utils.io import deep_container_fingerprint
58
+ from rasa.shared.utils.llm import (
59
+ allowed_values_for_slot,
60
+ resolve_model_client_config,
61
+ sanitize_message_for_prompt,
62
+ tracker_as_readable_transcript,
63
+ )
64
+ from rasa.utils.log_utils import log_llm
65
+
66
+ structlogger = structlog.get_logger()
67
+
68
+
69
+ @DefaultV1Recipe.register(
70
+ [
71
+ DefaultV1Recipe.ComponentType.COMMAND_GENERATOR,
72
+ ],
73
+ is_trainable=True,
74
+ )
75
+ class SingleStepBasedLLMCommandGenerator(LLMBasedCommandGenerator, ABC):
76
+ """Abstract class single step based LLM command generator."""
77
+
78
+ def __init__(
79
+ self,
80
+ config: Dict[str, Any],
81
+ model_storage: ModelStorage,
82
+ resource: Resource,
83
+ prompt_template: Optional[Text] = None,
84
+ **kwargs: Any,
85
+ ) -> None:
86
+ super().__init__(
87
+ config,
88
+ model_storage,
89
+ resource,
90
+ prompt_template=prompt_template,
91
+ **kwargs,
92
+ )
93
+
94
+ # Get the prompt template from the config or the default prompt template.
95
+ self.prompt_template = self._resolve_component_prompt_template(
96
+ self.config,
97
+ prompt_template,
98
+ log_context=LOG_COMPONENT_SOURCE_METHOD_INIT,
99
+ log_source_component=self.__class__.__name__,
100
+ )
101
+
102
+ # Set the command syntax version.
103
+ CommandSyntaxManager.set_syntax_version(
104
+ self.get_component_command_syntax_version()
105
+ )
106
+
107
+ self.trace_prompt_tokens = self.config.get("trace_prompt_tokens", False)
108
+
109
+ ### Implementations of LLMBasedCommandGenerator parent
110
+ @staticmethod
111
+ def get_default_config() -> Dict[str, Any]:
112
+ """The component's default config (see parent class for full docstring)."""
113
+ return {
114
+ PROMPT_TEMPLATE_CONFIG_KEY: None, # TODO: remove in Rasa 4.0.0
115
+ USER_INPUT_CONFIG_KEY: None,
116
+ LLM_CONFIG_KEY: None,
117
+ FLOW_RETRIEVAL_KEY: FlowRetrieval.get_default_config(),
118
+ }
119
+
120
+ def persist(self) -> None:
121
+ """Persist this component to disk for future loading."""
122
+ self._persist_prompt_template()
123
+ self._persist_config()
124
+ if self.flow_retrieval is not None:
125
+ self.flow_retrieval.persist()
126
+
127
+ def _persist_prompt_template(self) -> None:
128
+ """Persist prompt template for future loading."""
129
+ with self._model_storage.write_to(self._resource) as path:
130
+ rasa.shared.utils.io.write_text_file(
131
+ self.prompt_template, path / COMMAND_PROMPT_FILE_NAME
132
+ )
133
+
134
+ def _persist_config(self) -> None:
135
+ """Persist config as a source of truth for resolved clients."""
136
+ with self._model_storage.write_to(self._resource) as path:
137
+ rasa.shared.utils.io.dump_obj_as_json_to_file(
138
+ path / LLM_BASED_COMMAND_GENERATOR_CONFIG_FILE, self.config
139
+ )
140
+
141
+ @classmethod
142
+ def load(
143
+ cls: Any,
144
+ config: Dict[str, Any],
145
+ model_storage: ModelStorage,
146
+ resource: Resource,
147
+ execution_context: ExecutionContext,
148
+ **kwargs: Any,
149
+ ) -> "SingleStepBasedLLMCommandGenerator":
150
+ """Loads trained component (see parent class for full docstring)."""
151
+ # Perform health check of the LLM API endpoint
152
+ llm_config = resolve_model_client_config(config.get(LLM_CONFIG_KEY, {}))
153
+ cls.perform_llm_health_check(
154
+ llm_config,
155
+ cls.get_default_llm_config(),
156
+ "llm_based_command_generator.load",
157
+ cls.__name__,
158
+ )
159
+
160
+ # load prompt template from the model storage.
161
+ prompt_template = cls.load_prompt_template_from_model_storage(
162
+ model_storage, resource, COMMAND_PROMPT_FILE_NAME
163
+ )
164
+
165
+ # init base command generator
166
+ command_generator = cls(config, model_storage, resource, prompt_template)
167
+ # load flow retrieval if enabled
168
+ if command_generator.enabled_flow_retrieval:
169
+ command_generator.flow_retrieval = cls.load_flow_retrival(
170
+ command_generator.config, model_storage, resource
171
+ )
172
+
173
+ return command_generator
174
+
175
+ async def predict_commands(
176
+ self,
177
+ message: Message,
178
+ flows: FlowsList,
179
+ tracker: Optional[DialogueStateTracker] = None,
180
+ **kwargs: Any,
181
+ ) -> List[Command]:
182
+ """Predict commands using the LLM.
183
+
184
+ Args:
185
+ message: The message from the user.
186
+ flows: The flows available to the user.
187
+ tracker: The tracker containing the current state of the conversation.
188
+ **kwargs: Keyword arguments for forward compatibility.
189
+
190
+ Returns:
191
+ The commands generated by the llm.
192
+ """
193
+ prior_commands = self._get_prior_commands(message)
194
+
195
+ if tracker is None or flows.is_empty():
196
+ # cannot do anything if there are no flows or no tracker
197
+ return prior_commands
198
+
199
+ if self._should_skip_llm_call(prior_commands, flows, tracker):
200
+ return prior_commands
201
+
202
+ try:
203
+ commands = await self._predict_commands(message, flows, tracker)
204
+ except ProviderClientAPIException:
205
+ # if command predictions resulted in API exception
206
+ # "predict" the ErrorCommand
207
+ commands = [ErrorCommand()]
208
+ structlogger.warning(
209
+ "llm_command_generator.predict_commands.api_exception",
210
+ event_info=(
211
+ "ProviderClientAPIException occurred while predicting commands."
212
+ ),
213
+ commands=commands, # no PII
214
+ )
215
+
216
+ if not commands and not prior_commands:
217
+ # no commands are parsed or there's an invalid command
218
+ structlogger.warning(
219
+ "llm_command_generator.predict_commands",
220
+ message="No commands were predicted as the LLM response could "
221
+ "not be parsed or the LLM responded with an invalid command. "
222
+ "Returning a CannotHandleCommand instead.",
223
+ )
224
+ commands = [CannotHandleCommand()]
225
+
226
+ if tracker.has_coexistence_routing_slot:
227
+ # if coexistence feature is used, set the routing slot
228
+ commands += [SetSlotCommand(ROUTE_TO_CALM_SLOT, True)]
229
+
230
+ log_llm(
231
+ logger=structlogger,
232
+ log_module=self.__class__.__name__,
233
+ log_event="llm_command_generator.predict_commands.finished",
234
+ commands=commands,
235
+ )
236
+
237
+ domain = kwargs.get("domain")
238
+ commands = self._check_commands_against_slot_mappings(commands, tracker, domain)
239
+
240
+ return self._check_commands_overlap(prior_commands, commands)
241
+
242
+ async def _predict_commands(
243
+ self,
244
+ message: Message,
245
+ flows: FlowsList,
246
+ tracker: Optional[DialogueStateTracker] = None,
247
+ ) -> List[Command]:
248
+ """Predict commands using the LLM.
249
+
250
+ Args:
251
+ message: The message from the user.
252
+ flows: The flows available to the user.
253
+ tracker: The tracker containing the current state of the conversation.
254
+
255
+ Returns:
256
+ The commands generated by the llm.
257
+
258
+ Raises:
259
+ ProviderClientAPIException: If API calls raised an error.
260
+ """
261
+ # retrieve flows
262
+ filtered_flows = await self.filter_flows(message, flows, tracker)
263
+
264
+ flow_prompt = self.render_template(message, tracker, filtered_flows, flows)
265
+ log_llm(
266
+ logger=structlogger,
267
+ log_module=self.__class__.__name__,
268
+ log_event="llm_command_generator.predict_commands.prompt_rendered",
269
+ prompt=flow_prompt,
270
+ )
271
+
272
+ response = await self.invoke_llm(flow_prompt)
273
+ llm_response = LLMResponse.ensure_llm_response(response)
274
+ # The check for 'None' maintains compatibility with older versions
275
+ # of LLMCommandGenerator. In previous implementations, 'invoke_llm'
276
+ # might return 'None' to indicate a failure to generate actions.
277
+ if llm_response is None or not llm_response.choices:
278
+ structlogger.warning(
279
+ "llm_command_generator.predict_commands.no_actions_generated",
280
+ event_info=(
281
+ "No actions were generated by the LLM. Returning an ErrorCommand."
282
+ ),
283
+ )
284
+ return [ErrorCommand()]
285
+
286
+ action_list = llm_response.choices[0]
287
+
288
+ log_llm(
289
+ logger=structlogger,
290
+ log_module=self.__class__.__name__,
291
+ log_event="llm_command_generator.predict_commands.actions_generated",
292
+ action_list=action_list,
293
+ )
294
+
295
+ commands = self.parse_commands(action_list, tracker, flows)
296
+
297
+ if CommandParserValidatorSingleton.should_validate_command_parser():
298
+ CommandParserValidatorSingleton.validate_if_commands_are_parsed_from_llm_response(
299
+ commands, action_list
300
+ )
301
+
302
+ self._update_message_parse_data_for_fine_tuning(message, commands, flow_prompt)
303
+ add_commands_to_message_parse_data(message, self.__class__.__name__, commands)
304
+ add_prompt_to_message_parse_data(
305
+ message=message,
306
+ component_name=self.__class__.__name__,
307
+ prompt_name="command_generator_prompt",
308
+ user_prompt=flow_prompt,
309
+ llm_response=llm_response,
310
+ )
311
+
312
+ return commands
313
+
314
+ @staticmethod
315
+ def _update_message_parse_data_for_fine_tuning(
316
+ message: Message, commands: List[Command], prompt: str
317
+ ) -> None:
318
+ from rasa.llm_fine_tuning.annotation_module import preparing_fine_tuning_data
319
+
320
+ if preparing_fine_tuning_data:
321
+ # Add commands and prompt to the message object in order to create
322
+ # prompt -> commands pairs for fine-tuning
323
+ message.set(
324
+ LLM_COMMANDS,
325
+ [command.as_dict() for command in commands],
326
+ add_to_output=True,
327
+ )
328
+ message.set(LLM_PROMPT, prompt, add_to_output=True)
329
+
330
+ @classmethod
331
+ def parse_commands(
332
+ cls, actions: Optional[str], tracker: DialogueStateTracker, flows: FlowsList
333
+ ) -> List[Command]:
334
+ """Parse the actions returned by the llm into intent and entities.
335
+
336
+ Args:
337
+ actions: The actions returned by the llm.
338
+ tracker: The tracker containing the current state of the conversation.
339
+ flows: the list of flows
340
+
341
+ Returns:
342
+ The parsed commands.
343
+ """
344
+ commands = parse_commands_using_command_parsers(actions, flows)
345
+ if not commands:
346
+ structlogger.warning(
347
+ f"{cls.__name__}.parse_commands",
348
+ message="No commands were parsed from the LLM actions.",
349
+ actions=actions,
350
+ )
351
+
352
+ return commands
353
+
354
+ ### Helper methods
355
+ def render_template(
356
+ self,
357
+ message: Message,
358
+ tracker: DialogueStateTracker,
359
+ startable_flows: FlowsList,
360
+ all_flows: FlowsList,
361
+ ) -> str:
362
+ """Render the jinja template to create the prompt for the LLM.
363
+
364
+ Args:
365
+ message: The current message from the user.
366
+ tracker: The tracker containing the current state of the conversation.
367
+ startable_flows: The flows startable at this point in time by the user.
368
+ all_flows: all flows present in the assistant
369
+
370
+ Returns:
371
+ The rendered prompt template.
372
+ """
373
+ # need to make this distinction here because current step of the
374
+ # top_calling_frame would be the call step, but we need the collect step from
375
+ # the called frame. If no call is active calling and called frame are the same.
376
+ top_calling_frame = top_flow_frame(tracker.stack)
377
+ top_called_frame = top_flow_frame(tracker.stack, ignore_call_frames=False)
378
+
379
+ top_flow = top_calling_frame.flow(all_flows) if top_calling_frame else None
380
+ current_step = top_called_frame.step(all_flows) if top_called_frame else None
381
+
382
+ flow_slots = self.prepare_current_flow_slots_for_template(
383
+ top_flow, current_step, tracker
384
+ )
385
+ current_slot, current_slot_description = self.prepare_current_slot_for_template(
386
+ current_step
387
+ )
388
+ current_slot_type = None
389
+ current_slot_allowed_values = None
390
+ if current_slot:
391
+ current_slot_type = (
392
+ slot.type_name
393
+ if (slot := tracker.slots.get(current_slot)) is not None
394
+ else None
395
+ )
396
+ current_slot_allowed_values = allowed_values_for_slot(
397
+ tracker.slots.get(current_slot)
398
+ )
399
+ current_conversation = tracker_as_readable_transcript(tracker)
400
+ latest_user_message = sanitize_message_for_prompt(message.get(TEXT))
401
+ current_conversation += f"\nUSER: {latest_user_message}"
402
+
403
+ inputs = {
404
+ "available_flows": self.prepare_flows_for_template(
405
+ startable_flows, tracker
406
+ ),
407
+ "current_conversation": current_conversation,
408
+ "flow_slots": flow_slots,
409
+ "current_flow": top_flow.id if top_flow is not None else None,
410
+ "current_slot": current_slot,
411
+ "current_slot_description": current_slot_description,
412
+ "current_slot_type": current_slot_type,
413
+ "current_slot_allowed_values": current_slot_allowed_values,
414
+ "user_message": latest_user_message,
415
+ }
416
+
417
+ return self.compile_template(self.prompt_template).render(**inputs)
418
+
419
+ @classmethod
420
+ def fingerprint_addon(cls: Any, config: Dict[str, Any]) -> Optional[str]:
421
+ """Add a fingerprint for the graph."""
422
+ # Get the default prompt template based on the model name
423
+ llm_config = resolve_model_client_config(
424
+ config.get(LLM_CONFIG_KEY), cls.__name__
425
+ )
426
+ embedding_config = resolve_model_client_config(
427
+ config.get(FLOW_RETRIEVAL_KEY, {}).get(EMBEDDINGS_CONFIG_KEY),
428
+ FlowRetrieval.__name__,
429
+ )
430
+
431
+ # Create a copy of the config to avoid modifying the original config
432
+ # and update the llm config with the resolved llm config.
433
+ _config_copy = copy.deepcopy(config)
434
+ _config_copy[LLM_CONFIG_KEY] = llm_config
435
+ prompt_template = cls._resolve_component_prompt_template(
436
+ _config_copy,
437
+ log_context=LOG_COMPONENT_SOURCE_METHOD_FINGERPRINT_ADDON,
438
+ log_source_component=cls.__name__,
439
+ )
440
+
441
+ return deep_container_fingerprint(
442
+ [prompt_template, llm_config, embedding_config]
443
+ )
444
+
445
+ @staticmethod
446
+ @abstractmethod
447
+ def get_component_command_syntax_version() -> CommandSyntaxVersion:
448
+ """Get the command syntax version for the command generator."""
449
+ pass
450
+
451
+ @classmethod
452
+ @abstractmethod
453
+ def _resolve_component_prompt_template(
454
+ cls: Any,
455
+ config: Dict[str, Any],
456
+ prompt_template: Optional[str] = None,
457
+ log_context: Optional[Literal["init", "fingerprint_addon"]] = None,
458
+ log_source_component: Optional[str] = "SingleStepBasedLLMCommandGenerator",
459
+ ) -> Optional[str]:
460
+ """Get the prompt template from the config or the default prompt template."""
461
+ pass