rasa-pro 3.12.7.dev2__py3-none-any.whl → 3.12.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of rasa-pro might be problematic. Click here for more details.

Files changed (79) hide show
  1. rasa/cli/inspect.py +8 -4
  2. rasa/core/channels/channel.py +93 -0
  3. rasa/core/channels/inspector/dist/assets/{arc-c7691751.js → arc-9f75cc3b.js} +1 -1
  4. rasa/core/channels/inspector/dist/assets/{blockDiagram-38ab4fdb-ab99dff7.js → blockDiagram-38ab4fdb-7f34db23.js} +1 -1
  5. rasa/core/channels/inspector/dist/assets/{c4Diagram-3d4e48cf-08c35a6b.js → c4Diagram-3d4e48cf-948bab2c.js} +1 -1
  6. rasa/core/channels/inspector/dist/assets/channel-dfa68278.js +1 -0
  7. rasa/core/channels/inspector/dist/assets/{classDiagram-70f12bd4-9e9c71c9.js → classDiagram-70f12bd4-53b0dd0e.js} +1 -1
  8. rasa/core/channels/inspector/dist/assets/{classDiagram-v2-f2320105-15e7e2bf.js → classDiagram-v2-f2320105-fdf789e7.js} +1 -1
  9. rasa/core/channels/inspector/dist/assets/clone-edb7f119.js +1 -0
  10. rasa/core/channels/inspector/dist/assets/{createText-2e5e7dd3-9c105cb1.js → createText-2e5e7dd3-87c4ece5.js} +1 -1
  11. rasa/core/channels/inspector/dist/assets/{edges-e0da2a9e-77e89e48.js → edges-e0da2a9e-5a8b0749.js} +1 -1
  12. rasa/core/channels/inspector/dist/assets/{erDiagram-9861fffd-7a011646.js → erDiagram-9861fffd-66da90e2.js} +1 -1
  13. rasa/core/channels/inspector/dist/assets/{flowDb-956e92f1-b6f105ac.js → flowDb-956e92f1-10044f05.js} +1 -1
  14. rasa/core/channels/inspector/dist/assets/{flowDiagram-66a62f08-ce4f18c2.js → flowDiagram-66a62f08-f338f66a.js} +1 -1
  15. rasa/core/channels/inspector/dist/assets/flowDiagram-v2-96b9c2cf-65e7c670.js +1 -0
  16. rasa/core/channels/inspector/dist/assets/{flowchart-elk-definition-4a651766-cb5f6da4.js → flowchart-elk-definition-4a651766-b13140aa.js} +1 -1
  17. rasa/core/channels/inspector/dist/assets/{ganttDiagram-c361ad54-e4d19e28.js → ganttDiagram-c361ad54-f2b4a55a.js} +1 -1
  18. rasa/core/channels/inspector/dist/assets/{gitGraphDiagram-72cf32ee-727b1c33.js → gitGraphDiagram-72cf32ee-dedc298d.js} +1 -1
  19. rasa/core/channels/inspector/dist/assets/{graph-6e2ab9a7.js → graph-4ede11ff.js} +1 -1
  20. rasa/core/channels/inspector/dist/assets/{index-3862675e-84ec700f.js → index-3862675e-65549d37.js} +1 -1
  21. rasa/core/channels/inspector/dist/assets/{index-098a1a24.js → index-3a23e736.js} +142 -129
  22. rasa/core/channels/inspector/dist/assets/{infoDiagram-f8f76790-78dda442.js → infoDiagram-f8f76790-65439671.js} +1 -1
  23. rasa/core/channels/inspector/dist/assets/{journeyDiagram-49397b02-f1cc6dd1.js → journeyDiagram-49397b02-56d03d98.js} +1 -1
  24. rasa/core/channels/inspector/dist/assets/{layout-d98dcd0c.js → layout-dd48f7f4.js} +1 -1
  25. rasa/core/channels/inspector/dist/assets/{line-838e3d82.js → line-1569ad2c.js} +1 -1
  26. rasa/core/channels/inspector/dist/assets/{linear-eae72406.js → linear-48bf4935.js} +1 -1
  27. rasa/core/channels/inspector/dist/assets/{mindmap-definition-fc14e90a-c96fd84b.js → mindmap-definition-fc14e90a-688504c1.js} +1 -1
  28. rasa/core/channels/inspector/dist/assets/{pieDiagram-8a3498a8-c936d4e2.js → pieDiagram-8a3498a8-78b6d7e6.js} +1 -1
  29. rasa/core/channels/inspector/dist/assets/{quadrantDiagram-120e2f19-b338eb8f.js → quadrantDiagram-120e2f19-048b84b3.js} +1 -1
  30. rasa/core/channels/inspector/dist/assets/{requirementDiagram-deff3bca-c6b6c0d5.js → requirementDiagram-deff3bca-dd67f107.js} +1 -1
  31. rasa/core/channels/inspector/dist/assets/{sankeyDiagram-04a897e0-b9372e19.js → sankeyDiagram-04a897e0-8128436e.js} +1 -1
  32. rasa/core/channels/inspector/dist/assets/{sequenceDiagram-704730f1-479e0a3f.js → sequenceDiagram-704730f1-1a0d1461.js} +1 -1
  33. rasa/core/channels/inspector/dist/assets/{stateDiagram-587899a1-fd26eebc.js → stateDiagram-587899a1-46d388ed.js} +1 -1
  34. rasa/core/channels/inspector/dist/assets/{stateDiagram-v2-d93cdb3a-3233e0ae.js → stateDiagram-v2-d93cdb3a-ea42951a.js} +1 -1
  35. rasa/core/channels/inspector/dist/assets/{styles-6aaf32cf-1fdd392b.js → styles-6aaf32cf-7427ed0c.js} +1 -1
  36. rasa/core/channels/inspector/dist/assets/{styles-9a916d00-6d7bfa1b.js → styles-9a916d00-ff5e5a16.js} +1 -1
  37. rasa/core/channels/inspector/dist/assets/{styles-c10674c1-f86aab11.js → styles-c10674c1-7b3680cf.js} +1 -1
  38. rasa/core/channels/inspector/dist/assets/{svgDrawCommon-08f97a94-e3e49d7a.js → svgDrawCommon-08f97a94-f860f2ad.js} +1 -1
  39. rasa/core/channels/inspector/dist/assets/{timeline-definition-85554ec2-6fe08b4d.js → timeline-definition-85554ec2-2eebf0c8.js} +1 -1
  40. rasa/core/channels/inspector/dist/assets/{xychartDiagram-e933f94c-c2e06fd6.js → xychartDiagram-e933f94c-5d7f4e96.js} +1 -1
  41. rasa/core/channels/inspector/dist/index.html +1 -1
  42. rasa/core/channels/inspector/src/App.tsx +3 -2
  43. rasa/core/channels/inspector/src/components/Chat.tsx +23 -2
  44. rasa/core/channels/inspector/src/components/DiagramFlow.tsx +2 -5
  45. rasa/core/channels/inspector/src/helpers/conversation.ts +16 -0
  46. rasa/core/channels/inspector/src/types.ts +1 -1
  47. rasa/core/channels/voice_ready/audiocodes.py +7 -4
  48. rasa/core/channels/voice_ready/jambonz.py +25 -5
  49. rasa/core/channels/voice_ready/jambonz_protocol.py +4 -0
  50. rasa/core/channels/voice_ready/twilio_voice.py +48 -1
  51. rasa/core/channels/voice_stream/tts/azure.py +11 -2
  52. rasa/core/channels/voice_stream/twilio_media_streams.py +101 -26
  53. rasa/core/nlg/contextual_response_rephraser.py +3 -0
  54. rasa/core/policies/enterprise_search_policy.py +10 -0
  55. rasa/core/policies/intentless_policy.py +3 -0
  56. rasa/dialogue_understanding/coexistence/llm_based_router.py +8 -0
  57. rasa/dialogue_understanding/generator/flow_retrieval.py +1 -4
  58. rasa/dialogue_understanding/generator/multi_step/multi_step_llm_command_generator.py +13 -0
  59. rasa/dialogue_understanding/generator/single_step/compact_llm_command_generator.py +22 -10
  60. rasa/dialogue_understanding/generator/single_step/single_step_llm_command_generator.py +27 -12
  61. rasa/dialogue_understanding_test/du_test_case.py +16 -8
  62. rasa/llm_fine_tuning/paraphrasing/conversation_rephraser.py +3 -0
  63. rasa/shared/constants.py +1 -0
  64. rasa/shared/providers/_utils.py +83 -0
  65. rasa/shared/providers/llm/_base_litellm_client.py +6 -3
  66. rasa/shared/providers/llm/azure_openai_llm_client.py +6 -68
  67. rasa/shared/providers/router/_base_litellm_router_client.py +53 -1
  68. rasa/shared/utils/constants.py +3 -0
  69. rasa/shared/utils/llm.py +69 -23
  70. rasa/validator.py +1 -2
  71. rasa/version.py +1 -1
  72. {rasa_pro-3.12.7.dev2.dist-info → rasa_pro-3.12.8.dist-info}/METADATA +2 -2
  73. {rasa_pro-3.12.7.dev2.dist-info → rasa_pro-3.12.8.dist-info}/RECORD +76 -75
  74. rasa/core/channels/inspector/dist/assets/channel-11268142.js +0 -1
  75. rasa/core/channels/inspector/dist/assets/clone-ff7f2ce7.js +0 -1
  76. rasa/core/channels/inspector/dist/assets/flowDiagram-v2-96b9c2cf-cba7ae20.js +0 -1
  77. {rasa_pro-3.12.7.dev2.dist-info → rasa_pro-3.12.8.dist-info}/NOTICE +0 -0
  78. {rasa_pro-3.12.7.dev2.dist-info → rasa_pro-3.12.8.dist-info}/WHEEL +0 -0
  79. {rasa_pro-3.12.7.dev2.dist-info → rasa_pro-3.12.8.dist-info}/entry_points.txt +0 -0
@@ -52,6 +52,10 @@ from rasa.shared.exceptions import ProviderClientAPIException
52
52
  from rasa.shared.nlu.constants import TEXT
53
53
  from rasa.shared.nlu.training_data.message import Message
54
54
  from rasa.shared.providers.llm.llm_response import LLMResponse
55
+ from rasa.shared.utils.constants import (
56
+ LOG_COMPONENT_SOURCE_METHOD_FINGERPRINT_ADDON,
57
+ LOG_COMPONENT_SOURCE_METHOD_INIT,
58
+ )
55
59
  from rasa.shared.utils.io import deep_container_fingerprint, raise_deprecation_warning
56
60
  from rasa.shared.utils.llm import (
57
61
  allowed_values_for_slot,
@@ -330,6 +334,8 @@ class MultiStepLLMCommandGenerator(LLMBasedCommandGenerator):
330
334
  return get_prompt_template(
331
335
  config.get("prompt_templates", {}).get(key, {}).get(FILE_PATH_KEY),
332
336
  default_value,
337
+ log_source_component=MultiStepLLMCommandGenerator.__name__,
338
+ log_source_method=LOG_COMPONENT_SOURCE_METHOD_INIT,
333
339
  )
334
340
 
335
341
  @classmethod
@@ -786,17 +792,24 @@ class MultiStepLLMCommandGenerator(LLMBasedCommandGenerator):
786
792
  @classmethod
787
793
  def fingerprint_addon(cls, config: Dict[str, Any]) -> Optional[str]:
788
794
  """Add a fingerprint for the graph."""
795
+ get_prompt_template_log_params = {
796
+ "log_source_component": MultiStepLLMCommandGenerator.__name__,
797
+ "log_source_method": LOG_COMPONENT_SOURCE_METHOD_FINGERPRINT_ADDON,
798
+ }
799
+
789
800
  handle_flows_template = get_prompt_template(
790
801
  config.get("prompt_templates", {})
791
802
  .get(HANDLE_FLOWS_KEY, {})
792
803
  .get(FILE_PATH_KEY),
793
804
  DEFAULT_HANDLE_FLOWS_TEMPLATE,
805
+ **get_prompt_template_log_params,
794
806
  )
795
807
  fill_slots_template = get_prompt_template(
796
808
  config.get("prompt_templates", {})
797
809
  .get(FILL_SLOTS_KEY, {})
798
810
  .get(FILE_PATH_KEY),
799
811
  DEFAULT_FILL_SLOTS_TEMPLATE,
812
+ **get_prompt_template_log_params,
800
813
  )
801
814
 
802
815
  llm_config = resolve_model_client_config(
@@ -1,5 +1,5 @@
1
1
  import copy
2
- from typing import Any, Dict, List, Optional, Text
2
+ from typing import Any, Dict, List, Literal, Optional, Text
3
3
 
4
4
  import structlog
5
5
 
@@ -58,6 +58,10 @@ from rasa.shared.exceptions import ProviderClientAPIException
58
58
  from rasa.shared.nlu.constants import LLM_COMMANDS, LLM_PROMPT, TEXT
59
59
  from rasa.shared.nlu.training_data.message import Message
60
60
  from rasa.shared.providers.llm.llm_response import LLMResponse
61
+ from rasa.shared.utils.constants import (
62
+ LOG_COMPONENT_SOURCE_METHOD_FINGERPRINT_ADDON,
63
+ LOG_COMPONENT_SOURCE_METHOD_INIT,
64
+ )
61
65
  from rasa.shared.utils.io import deep_container_fingerprint
62
66
  from rasa.shared.utils.llm import (
63
67
  allowed_values_for_slot,
@@ -187,8 +191,8 @@ class CompactLLMCommandGenerator(LLMBasedCommandGenerator):
187
191
  )
188
192
 
189
193
  # Get the prompt template from the config or the default prompt template.
190
- self.prompt_template = self.resolve_component_prompt_template(
191
- self.config, prompt_template
194
+ self.prompt_template = self._resolve_component_prompt_template(
195
+ self.config, prompt_template, log_context=LOG_COMPONENT_SOURCE_METHOD_INIT
192
196
  )
193
197
 
194
198
  # Set the command syntax version to v2
@@ -539,7 +543,9 @@ class CompactLLMCommandGenerator(LLMBasedCommandGenerator):
539
543
  # and update the llm config with the resolved llm config.
540
544
  _config_copy = copy.deepcopy(config)
541
545
  _config_copy[LLM_CONFIG_KEY] = llm_config
542
- prompt_template = cls.resolve_component_prompt_template(_config_copy)
546
+ prompt_template = cls._resolve_component_prompt_template(
547
+ _config_copy, log_context=LOG_COMPONENT_SOURCE_METHOD_FINGERPRINT_ADDON
548
+ )
543
549
 
544
550
  return deep_container_fingerprint(
545
551
  [prompt_template, llm_config, embedding_config]
@@ -555,20 +561,26 @@ class CompactLLMCommandGenerator(LLMBasedCommandGenerator):
555
561
  return CommandSyntaxVersion.v2
556
562
 
557
563
  @staticmethod
558
- def resolve_component_prompt_template(
559
- config: Dict[str, Any], prompt_template: Optional[str] = None
564
+ def _resolve_component_prompt_template(
565
+ config: Dict[str, Any],
566
+ prompt_template: Optional[str] = None,
567
+ log_context: Optional[Literal["init", "fingerprint_addon"]] = None,
560
568
  ) -> Optional[str]:
561
569
  """Get the prompt template from the config or the default prompt template."""
562
570
  # Get the default prompt template based on the model name.
563
571
  default_command_prompt_template = get_default_prompt_template_based_on_model(
564
- config.get(LLM_CONFIG_KEY, {}) or {},
565
- MODEL_PROMPT_MAPPER,
566
- DEFAULT_COMMAND_PROMPT_TEMPLATE_FILE_NAME,
567
- FALLBACK_COMMAND_PROMPT_TEMPLATE_FILE_NAME,
572
+ llm_config=config.get(LLM_CONFIG_KEY, {}) or {},
573
+ model_prompt_mapping=MODEL_PROMPT_MAPPER,
574
+ default_prompt_path=DEFAULT_COMMAND_PROMPT_TEMPLATE_FILE_NAME,
575
+ fallback_prompt_path=FALLBACK_COMMAND_PROMPT_TEMPLATE_FILE_NAME,
576
+ log_source_component=CompactLLMCommandGenerator.__name__,
577
+ log_source_method=log_context,
568
578
  )
569
579
 
570
580
  # Return the prompt template either from the config or the default prompt.
571
581
  return prompt_template or get_prompt_template(
572
582
  config.get(PROMPT_TEMPLATE_CONFIG_KEY),
573
583
  default_command_prompt_template,
584
+ log_source_component=CompactLLMCommandGenerator.__name__,
585
+ log_source_method=log_context,
574
586
  )
@@ -1,5 +1,5 @@
1
1
  import importlib.resources
2
- from typing import Any, Dict, Optional, Text
2
+ from typing import Any, Dict, Literal, Optional, Text
3
3
 
4
4
  import structlog
5
5
 
@@ -25,8 +25,12 @@ from rasa.shared.constants import (
25
25
  PROMPT_CONFIG_KEY,
26
26
  PROMPT_TEMPLATE_CONFIG_KEY,
27
27
  )
28
+ from rasa.shared.utils.constants import LOG_COMPONENT_SOURCE_METHOD_FINGERPRINT_ADDON
28
29
  from rasa.shared.utils.io import deep_container_fingerprint
29
- from rasa.shared.utils.llm import get_prompt_template, resolve_model_client_config
30
+ from rasa.shared.utils.llm import (
31
+ get_prompt_template,
32
+ resolve_model_client_config,
33
+ )
30
34
 
31
35
  DEFAULT_COMMAND_PROMPT_TEMPLATE = importlib.resources.read_text(
32
36
  "rasa.dialogue_understanding.generator.prompt_templates",
@@ -72,9 +76,6 @@ class SingleStepLLMCommandGenerator(CompactLLMCommandGenerator):
72
76
  "Please use the config parameter 'prompt_template' instead. "
73
77
  ),
74
78
  )
75
- self.prompt_template = self.resolve_component_prompt_template(
76
- config, prompt_template
77
- )
78
79
 
79
80
  # Set the command syntax version to v1
80
81
  CommandSyntaxManager.set_syntax_version(
@@ -95,7 +96,9 @@ class SingleStepLLMCommandGenerator(CompactLLMCommandGenerator):
95
96
  @classmethod
96
97
  def fingerprint_addon(cls: Any, config: Dict[str, Any]) -> Optional[str]:
97
98
  """Add a fingerprint for the graph."""
98
- prompt_template = cls.resolve_component_prompt_template(config)
99
+ prompt_template = cls._resolve_component_prompt_template(
100
+ config, log_context=LOG_COMPONENT_SOURCE_METHOD_FINGERPRINT_ADDON
101
+ )
99
102
  llm_config = resolve_model_client_config(
100
103
  config.get(LLM_CONFIG_KEY), SingleStepLLMCommandGenerator.__name__
101
104
  )
@@ -117,17 +120,29 @@ class SingleStepLLMCommandGenerator(CompactLLMCommandGenerator):
117
120
  return CommandSyntaxVersion.v1
118
121
 
119
122
  @staticmethod
120
- def resolve_component_prompt_template(
121
- config: Dict[str, Any], prompt_template: Optional[str] = None
123
+ def _resolve_component_prompt_template(
124
+ config: Dict[str, Any],
125
+ prompt_template: Optional[str] = None,
126
+ log_context: Optional[Literal["init", "fingerprint_addon"]] = None,
122
127
  ) -> Optional[str]:
123
128
  """Get the prompt template from the config or the default prompt template."""
124
- # Get the default prompt template based on the model name.
125
- config_prompt = (
129
+ # Case when model is being loaded
130
+ if prompt_template is not None:
131
+ return prompt_template
132
+
133
+ # The prompt can be configured in the config via the "prompt" (deprecated) or
134
+ # "prompt_template" properties
135
+ prompt_template_path = (
126
136
  config.get(PROMPT_CONFIG_KEY)
127
137
  or config.get(PROMPT_TEMPLATE_CONFIG_KEY)
128
138
  or None
129
139
  )
130
- return prompt_template or get_prompt_template(
131
- config_prompt,
140
+
141
+ # Try to load the template from the given path or fallback to the default for
142
+ # the component
143
+ return get_prompt_template(
144
+ prompt_template_path,
132
145
  DEFAULT_COMMAND_PROMPT_TEMPLATE,
146
+ log_source_component=SingleStepLLMCommandGenerator.__name__,
147
+ log_source_method=log_context,
133
148
  )
@@ -241,19 +241,27 @@ class DialogueUnderstandingTestStep(BaseModel):
241
241
  # Safely extract commands from the step.
242
242
  commands = []
243
243
  for command in step.get(KEY_COMMANDS, []):
244
+ parsed_commands = None
244
245
  try:
245
- commands.extend(
246
- parse_commands(
247
- command,
248
- flows,
249
- clarify_options_optional=True,
250
- additional_commands=custom_command_classes,
251
- default_commands_to_remove=remove_default_commands,
252
- )
246
+ parsed_commands = parse_commands(
247
+ command,
248
+ flows,
249
+ clarify_options_optional=True,
250
+ additional_commands=custom_command_classes,
251
+ default_commands_to_remove=remove_default_commands,
253
252
  )
254
253
  except (IndexError, ValueError) as e:
255
254
  raise ValueError(f"Failed to parse command '{command}': {e}") from e
256
255
 
256
+ if not parsed_commands:
257
+ raise ValueError(
258
+ f"Failed to parse command '{command}': command parser returned "
259
+ f"None. Please make sure that you are using the correct command "
260
+ f"syntax and the command arguments are valid."
261
+ )
262
+
263
+ commands.extend(parsed_commands)
264
+
257
265
  # Construct the DialogueUnderstandingTestStep
258
266
  return DialogueUnderstandingTestStep(
259
267
  actor=ACTOR_USER if ACTOR_USER in step else ACTOR_BOT,
@@ -19,6 +19,7 @@ from rasa.shared.constants import (
19
19
  )
20
20
  from rasa.shared.exceptions import ProviderClientAPIException
21
21
  from rasa.shared.providers.mappings import OPENAI_PROVIDER
22
+ from rasa.shared.utils.constants import LOG_COMPONENT_SOURCE_METHOD_INIT
22
23
  from rasa.shared.utils.llm import (
23
24
  USER,
24
25
  get_prompt_template,
@@ -54,6 +55,8 @@ class ConversationRephraser:
54
55
  self.prompt_template = get_prompt_template(
55
56
  self.config.get(PROMPT_TEMPLATE_CONFIG_KEY),
56
57
  DEFAULT_REPHRASING_PROMPT_TEMPLATE,
58
+ log_source_component=ConversationRephraser.__name__,
59
+ log_source_method=LOG_COMPONENT_SOURCE_METHOD_INIT,
57
60
  )
58
61
 
59
62
  @staticmethod
rasa/shared/constants.py CHANGED
@@ -238,6 +238,7 @@ EXTRA_PARAMETERS_KEY = "extra_parameters"
238
238
  MODEL_GROUP_ID_KEY = "model_group_id"
239
239
  MODEL_LIST_KEY = "model_list"
240
240
  LITELLM_PARAMS_KEY = "litellm_params"
241
+ _VALIDATE_ENVIRONMENT_MISSING_KEYS_KEY = "missing_keys"
241
242
 
242
243
  LLM_API_HEALTH_CHECK_ENV_VAR = "LLM_API_HEALTH_CHECK"
243
244
  LLM_API_HEALTH_CHECK_DEFAULT_VALUE = "false"
@@ -1,7 +1,11 @@
1
+ from typing import Any, Dict, Optional
2
+
1
3
  import structlog
2
4
  from litellm import validate_environment
3
5
 
4
6
  from rasa.shared.constants import (
7
+ API_BASE_CONFIG_KEY,
8
+ API_VERSION_CONFIG_KEY,
5
9
  AWS_ACCESS_KEY_ID_CONFIG_KEY,
6
10
  AWS_ACCESS_KEY_ID_ENV_VAR,
7
11
  AWS_REGION_NAME_CONFIG_KEY,
@@ -10,6 +14,9 @@ from rasa.shared.constants import (
10
14
  AWS_SECRET_ACCESS_KEY_ENV_VAR,
11
15
  AWS_SESSION_TOKEN_CONFIG_KEY,
12
16
  AWS_SESSION_TOKEN_ENV_VAR,
17
+ AZURE_API_BASE_ENV_VAR,
18
+ AZURE_API_VERSION_ENV_VAR,
19
+ DEPLOYMENT_CONFIG_KEY,
13
20
  )
14
21
  from rasa.shared.exceptions import ProviderClientValidationError
15
22
  from rasa.shared.providers.embedding._base_litellm_embedding_client import (
@@ -77,3 +84,79 @@ def validate_aws_setup_for_litellm_clients(
77
84
  missing_environment_variables=missing_environment_variables,
78
85
  )
79
86
  raise ProviderClientValidationError(event_info)
87
+
88
+
89
+ def validate_azure_client_setup(
90
+ api_base: Optional[str],
91
+ api_version: Optional[str],
92
+ deployment: Optional[str],
93
+ ) -> None:
94
+ """Validates the Azure setup for LiteLLM Router clients to ensure
95
+ that all required configuration parameters are set.
96
+ Raises:
97
+ ProviderClientValidationError: If any required Azure configurations
98
+ is missing.
99
+ """
100
+
101
+ def generate_event_info_for_missing_setting(
102
+ setting: str,
103
+ setting_env_var: Optional[str] = None,
104
+ setting_config_key: Optional[str] = None,
105
+ ) -> str:
106
+ """Generate a part of the message with instructions on what to set
107
+ for the missing client setting.
108
+ """
109
+ info = "Set {setting} with {options}. "
110
+ options = ""
111
+ if setting_env_var is not None:
112
+ options += f"environment variable '{setting_env_var}'"
113
+ if setting_config_key is not None and setting_env_var is not None:
114
+ options += " or "
115
+ if setting_config_key is not None:
116
+ options += f"config key '{setting_config_key}'"
117
+
118
+ return info.format(setting=setting, options=options)
119
+
120
+ # All required settings for Azure OpenAI client
121
+ settings: Dict[str, Dict[str, Any]] = {
122
+ "API Base": {
123
+ "current_value": api_base,
124
+ "env_var": AZURE_API_BASE_ENV_VAR,
125
+ "config_key": API_BASE_CONFIG_KEY,
126
+ },
127
+ "API Version": {
128
+ "current_value": api_version,
129
+ "env_var": AZURE_API_VERSION_ENV_VAR,
130
+ "config_key": API_VERSION_CONFIG_KEY,
131
+ },
132
+ "Deployment Name": {
133
+ "current_value": deployment,
134
+ "env_var": None,
135
+ "config_key": DEPLOYMENT_CONFIG_KEY,
136
+ },
137
+ }
138
+
139
+ missing_settings = [
140
+ setting_name
141
+ for setting_name, setting_info in settings.items()
142
+ if setting_info["current_value"] is None
143
+ ]
144
+
145
+ if missing_settings:
146
+ event_info = f"Client settings not set: {', '.join(missing_settings)}. "
147
+
148
+ for missing_setting in missing_settings:
149
+ if settings[missing_setting]["current_value"] is not None:
150
+ continue
151
+ event_info += generate_event_info_for_missing_setting(
152
+ missing_setting,
153
+ settings[missing_setting]["env_var"],
154
+ settings[missing_setting]["config_key"],
155
+ )
156
+
157
+ structlogger.error(
158
+ "azure_openai_llm_client.not_configured",
159
+ event_info=event_info,
160
+ missing_settings=missing_settings,
161
+ )
162
+ raise ProviderClientValidationError(event_info)
@@ -7,7 +7,12 @@ from typing import Any, Dict, List, Union, cast
7
7
  import structlog
8
8
  from litellm import acompletion, completion, validate_environment
9
9
 
10
- from rasa.shared.constants import API_BASE_CONFIG_KEY, API_KEY, ROLE_USER
10
+ from rasa.shared.constants import (
11
+ _VALIDATE_ENVIRONMENT_MISSING_KEYS_KEY,
12
+ API_BASE_CONFIG_KEY,
13
+ API_KEY,
14
+ ROLE_USER,
15
+ )
11
16
  from rasa.shared.exceptions import (
12
17
  ProviderClientAPIException,
13
18
  ProviderClientValidationError,
@@ -21,8 +26,6 @@ from rasa.shared.utils.io import resolve_environment_variables, suppress_logs
21
26
 
22
27
  structlogger = structlog.get_logger()
23
28
 
24
- _VALIDATE_ENVIRONMENT_MISSING_KEYS_KEY = "missing_keys"
25
-
26
29
  # Suppress LiteLLM info and debug logs - Global level.
27
30
  logging.getLogger("LiteLLM").setLevel(logging.WARNING)
28
31
 
@@ -7,15 +7,12 @@ from typing import Any, Dict, Optional
7
7
  import structlog
8
8
 
9
9
  from rasa.shared.constants import (
10
- API_BASE_CONFIG_KEY,
11
10
  API_KEY,
12
- API_VERSION_CONFIG_KEY,
13
11
  AZURE_API_BASE_ENV_VAR,
14
12
  AZURE_API_KEY_ENV_VAR,
15
13
  AZURE_API_TYPE_ENV_VAR,
16
14
  AZURE_API_VERSION_ENV_VAR,
17
15
  AZURE_OPENAI_PROVIDER,
18
- DEPLOYMENT_CONFIG_KEY,
19
16
  OPENAI_API_BASE_ENV_VAR,
20
17
  OPENAI_API_KEY_ENV_VAR,
21
18
  OPENAI_API_TYPE_ENV_VAR,
@@ -26,6 +23,7 @@ from rasa.shared.providers._configs.azure_openai_client_config import (
26
23
  AzureEntraIDOAuthConfig,
27
24
  AzureOpenAIClientConfig,
28
25
  )
26
+ from rasa.shared.providers._utils import validate_azure_client_setup
29
27
  from rasa.shared.providers.constants import (
30
28
  DEFAULT_AZURE_API_KEY_NAME,
31
29
  LITE_LLM_API_BASE_FIELD,
@@ -348,68 +346,8 @@ class AzureOpenAILLMClient(_BaseLiteLLMClient):
348
346
  def validate_client_setup(self) -> None:
349
347
  """Validates that all required configuration parameters are set."""
350
348
 
351
- def generate_event_info_for_missing_setting(
352
- setting: str,
353
- setting_env_var: Optional[str] = None,
354
- setting_config_key: Optional[str] = None,
355
- ) -> str:
356
- """Generate a part of the message with instructions on what to set
357
- for the missing client setting.
358
- """
359
- info = "Set {setting} with {options}. "
360
- options = ""
361
- if setting_env_var is not None:
362
- options += f"environment variable '{setting_env_var}'"
363
- if setting_config_key is not None and setting_env_var is not None:
364
- options += " or "
365
- if setting_config_key is not None:
366
- options += f"config key '{setting_config_key}'"
367
-
368
- return info.format(setting=setting, options=options)
369
-
370
- env_var_field = "env_var"
371
- config_key_field = "config_key"
372
- current_value_field = "current_value"
373
- # All required settings for Azure OpenAI client
374
- settings: Dict[str, Dict[str, Any]] = {
375
- "API Base": {
376
- current_value_field: self.api_base,
377
- env_var_field: AZURE_API_BASE_ENV_VAR,
378
- config_key_field: API_BASE_CONFIG_KEY,
379
- },
380
- "API Version": {
381
- current_value_field: self.api_version,
382
- env_var_field: AZURE_API_VERSION_ENV_VAR,
383
- config_key_field: API_VERSION_CONFIG_KEY,
384
- },
385
- "Deployment Name": {
386
- current_value_field: self.deployment,
387
- env_var_field: None,
388
- config_key_field: DEPLOYMENT_CONFIG_KEY,
389
- },
390
- }
391
-
392
- missing_settings = [
393
- setting_name
394
- for setting_name, setting_info in settings.items()
395
- if setting_info[current_value_field] is None
396
- ]
397
-
398
- if missing_settings:
399
- event_info = f"Client settings not set: " f"{', '.join(missing_settings)}. "
400
-
401
- for missing_setting in missing_settings:
402
- if settings[missing_setting][current_value_field] is not None:
403
- continue
404
- event_info += generate_event_info_for_missing_setting(
405
- missing_setting,
406
- settings[missing_setting][env_var_field],
407
- settings[missing_setting][config_key_field],
408
- )
409
-
410
- structlogger.error(
411
- "azure_openai_llm_client.not_configured",
412
- event_info=event_info,
413
- missing_settings=missing_settings,
414
- )
415
- raise ProviderClientValidationError(event_info)
349
+ return validate_azure_client_setup(
350
+ api_base=self.api_base,
351
+ api_version=self.api_version,
352
+ deployment=self.deployment,
353
+ )
@@ -5,10 +5,16 @@ from copy import deepcopy
5
5
  from typing import Any, Dict, List
6
6
 
7
7
  import structlog
8
- from litellm import Router
8
+ from litellm import Router, validate_environment
9
9
 
10
10
  from rasa.shared.constants import (
11
+ _VALIDATE_ENVIRONMENT_MISSING_KEYS_KEY,
12
+ API_BASE_CONFIG_KEY,
11
13
  API_KEY,
14
+ API_VERSION_CONFIG_KEY,
15
+ AZURE_API_BASE_ENV_VAR,
16
+ AZURE_API_VERSION_ENV_VAR,
17
+ AZURE_OPENAI_PROVIDER,
12
18
  LITELLM_PARAMS_KEY,
13
19
  MODEL_CONFIG_KEY,
14
20
  MODEL_GROUP_ID_CONFIG_KEY,
@@ -23,6 +29,7 @@ from rasa.shared.providers._configs.azure_entra_id_config import AzureEntraIDOAu
23
29
  from rasa.shared.providers._configs.litellm_router_client_config import (
24
30
  LiteLLMRouterClientConfig,
25
31
  )
32
+ from rasa.shared.providers._utils import validate_azure_client_setup
26
33
  from rasa.shared.utils.io import resolve_environment_variables
27
34
 
28
35
  structlogger = structlog.get_logger()
@@ -183,6 +190,7 @@ class _BaseLiteLLMRouterClient:
183
190
 
184
191
  def _create_router_client(self) -> Router:
185
192
  resolved_model_configurations = self._resolve_env_vars_in_model_configurations()
193
+ self._validate_model_configurations(resolved_model_configurations)
186
194
  return Router(model_list=resolved_model_configurations, **self.router_settings)
187
195
 
188
196
  def _has_oauth(self) -> bool:
@@ -214,3 +222,47 @@ class _BaseLiteLLMRouterClient:
214
222
  )
215
223
  model_configuration_with_resolved_keys.append(resolved_model_configuration)
216
224
  return model_configuration_with_resolved_keys
225
+
226
+ def _validate_model_configurations(
227
+ self, resolved_model_configurations: List[Dict[str, Any]]
228
+ ) -> None:
229
+ """Validates the model configurations.
230
+ Args:
231
+ resolved_model_configurations: (List[Dict[str, Any]]) The list of model
232
+ configurations with resolved environment variables.
233
+ Raises:
234
+ ProviderClientValidationError: If the model configurations are invalid.
235
+ """
236
+ for model_configuration in resolved_model_configurations:
237
+ litellm_params = model_configuration.get(LITELLM_PARAMS_KEY, {})
238
+
239
+ model = litellm_params.get(MODEL_CONFIG_KEY)
240
+ provider, deployment = model.split("/", 1)
241
+ api_base = litellm_params.get(API_BASE_CONFIG_KEY)
242
+
243
+ if provider.lower() == AZURE_OPENAI_PROVIDER:
244
+ validate_azure_client_setup(
245
+ api_base=api_base or os.getenv(AZURE_API_BASE_ENV_VAR),
246
+ api_version=litellm_params.get(API_VERSION_CONFIG_KEY)
247
+ or os.getenv(AZURE_API_VERSION_ENV_VAR),
248
+ deployment=deployment,
249
+ )
250
+ else:
251
+ validation_info = validate_environment(
252
+ model=model,
253
+ api_key=litellm_params.get(API_KEY),
254
+ api_base=api_base,
255
+ )
256
+ if missing_environment_variables := validation_info.get(
257
+ _VALIDATE_ENVIRONMENT_MISSING_KEYS_KEY
258
+ ):
259
+ event_info = (
260
+ f"Environment variables: {missing_environment_variables} "
261
+ f"not set. Required for API calls."
262
+ )
263
+ structlogger.error(
264
+ "base_litellm_router_client.validate_environment_variables",
265
+ event_info=event_info,
266
+ missing_environment_variables=missing_environment_variables,
267
+ )
268
+ raise ProviderClientValidationError(event_info)
@@ -5,3 +5,6 @@ DEFAULT_READ_YAML_FILE_CACHE_MAXSIZE = 256
5
5
  RASA_PRO_BETA_PREDICATES_IN_RESPONSE_CONDITIONS_ENV_VAR_NAME = (
6
6
  "RASA_PRO_BETA_PREDICATES_IN_RESPONSE_CONDITIONS"
7
7
  )
8
+
9
+ LOG_COMPONENT_SOURCE_METHOD_INIT = "init"
10
+ LOG_COMPONENT_SOURCE_METHOD_FINGERPRINT_ADDON = "fingerprint_addon"