rasa-pro 3.12.7.dev2__py3-none-any.whl → 3.12.9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of rasa-pro might be problematic. Click here for more details.

Files changed (80) hide show
  1. rasa/cli/inspect.py +8 -4
  2. rasa/core/channels/channel.py +93 -0
  3. rasa/core/channels/inspector/dist/assets/{arc-c7691751.js → arc-9f75cc3b.js} +1 -1
  4. rasa/core/channels/inspector/dist/assets/{blockDiagram-38ab4fdb-ab99dff7.js → blockDiagram-38ab4fdb-7f34db23.js} +1 -1
  5. rasa/core/channels/inspector/dist/assets/{c4Diagram-3d4e48cf-08c35a6b.js → c4Diagram-3d4e48cf-948bab2c.js} +1 -1
  6. rasa/core/channels/inspector/dist/assets/channel-dfa68278.js +1 -0
  7. rasa/core/channels/inspector/dist/assets/{classDiagram-70f12bd4-9e9c71c9.js → classDiagram-70f12bd4-53b0dd0e.js} +1 -1
  8. rasa/core/channels/inspector/dist/assets/{classDiagram-v2-f2320105-15e7e2bf.js → classDiagram-v2-f2320105-fdf789e7.js} +1 -1
  9. rasa/core/channels/inspector/dist/assets/clone-edb7f119.js +1 -0
  10. rasa/core/channels/inspector/dist/assets/{createText-2e5e7dd3-9c105cb1.js → createText-2e5e7dd3-87c4ece5.js} +1 -1
  11. rasa/core/channels/inspector/dist/assets/{edges-e0da2a9e-77e89e48.js → edges-e0da2a9e-5a8b0749.js} +1 -1
  12. rasa/core/channels/inspector/dist/assets/{erDiagram-9861fffd-7a011646.js → erDiagram-9861fffd-66da90e2.js} +1 -1
  13. rasa/core/channels/inspector/dist/assets/{flowDb-956e92f1-b6f105ac.js → flowDb-956e92f1-10044f05.js} +1 -1
  14. rasa/core/channels/inspector/dist/assets/{flowDiagram-66a62f08-ce4f18c2.js → flowDiagram-66a62f08-f338f66a.js} +1 -1
  15. rasa/core/channels/inspector/dist/assets/flowDiagram-v2-96b9c2cf-65e7c670.js +1 -0
  16. rasa/core/channels/inspector/dist/assets/{flowchart-elk-definition-4a651766-cb5f6da4.js → flowchart-elk-definition-4a651766-b13140aa.js} +1 -1
  17. rasa/core/channels/inspector/dist/assets/{ganttDiagram-c361ad54-e4d19e28.js → ganttDiagram-c361ad54-f2b4a55a.js} +1 -1
  18. rasa/core/channels/inspector/dist/assets/{gitGraphDiagram-72cf32ee-727b1c33.js → gitGraphDiagram-72cf32ee-dedc298d.js} +1 -1
  19. rasa/core/channels/inspector/dist/assets/{graph-6e2ab9a7.js → graph-4ede11ff.js} +1 -1
  20. rasa/core/channels/inspector/dist/assets/{index-3862675e-84ec700f.js → index-3862675e-65549d37.js} +1 -1
  21. rasa/core/channels/inspector/dist/assets/{index-098a1a24.js → index-3a23e736.js} +142 -129
  22. rasa/core/channels/inspector/dist/assets/{infoDiagram-f8f76790-78dda442.js → infoDiagram-f8f76790-65439671.js} +1 -1
  23. rasa/core/channels/inspector/dist/assets/{journeyDiagram-49397b02-f1cc6dd1.js → journeyDiagram-49397b02-56d03d98.js} +1 -1
  24. rasa/core/channels/inspector/dist/assets/{layout-d98dcd0c.js → layout-dd48f7f4.js} +1 -1
  25. rasa/core/channels/inspector/dist/assets/{line-838e3d82.js → line-1569ad2c.js} +1 -1
  26. rasa/core/channels/inspector/dist/assets/{linear-eae72406.js → linear-48bf4935.js} +1 -1
  27. rasa/core/channels/inspector/dist/assets/{mindmap-definition-fc14e90a-c96fd84b.js → mindmap-definition-fc14e90a-688504c1.js} +1 -1
  28. rasa/core/channels/inspector/dist/assets/{pieDiagram-8a3498a8-c936d4e2.js → pieDiagram-8a3498a8-78b6d7e6.js} +1 -1
  29. rasa/core/channels/inspector/dist/assets/{quadrantDiagram-120e2f19-b338eb8f.js → quadrantDiagram-120e2f19-048b84b3.js} +1 -1
  30. rasa/core/channels/inspector/dist/assets/{requirementDiagram-deff3bca-c6b6c0d5.js → requirementDiagram-deff3bca-dd67f107.js} +1 -1
  31. rasa/core/channels/inspector/dist/assets/{sankeyDiagram-04a897e0-b9372e19.js → sankeyDiagram-04a897e0-8128436e.js} +1 -1
  32. rasa/core/channels/inspector/dist/assets/{sequenceDiagram-704730f1-479e0a3f.js → sequenceDiagram-704730f1-1a0d1461.js} +1 -1
  33. rasa/core/channels/inspector/dist/assets/{stateDiagram-587899a1-fd26eebc.js → stateDiagram-587899a1-46d388ed.js} +1 -1
  34. rasa/core/channels/inspector/dist/assets/{stateDiagram-v2-d93cdb3a-3233e0ae.js → stateDiagram-v2-d93cdb3a-ea42951a.js} +1 -1
  35. rasa/core/channels/inspector/dist/assets/{styles-6aaf32cf-1fdd392b.js → styles-6aaf32cf-7427ed0c.js} +1 -1
  36. rasa/core/channels/inspector/dist/assets/{styles-9a916d00-6d7bfa1b.js → styles-9a916d00-ff5e5a16.js} +1 -1
  37. rasa/core/channels/inspector/dist/assets/{styles-c10674c1-f86aab11.js → styles-c10674c1-7b3680cf.js} +1 -1
  38. rasa/core/channels/inspector/dist/assets/{svgDrawCommon-08f97a94-e3e49d7a.js → svgDrawCommon-08f97a94-f860f2ad.js} +1 -1
  39. rasa/core/channels/inspector/dist/assets/{timeline-definition-85554ec2-6fe08b4d.js → timeline-definition-85554ec2-2eebf0c8.js} +1 -1
  40. rasa/core/channels/inspector/dist/assets/{xychartDiagram-e933f94c-c2e06fd6.js → xychartDiagram-e933f94c-5d7f4e96.js} +1 -1
  41. rasa/core/channels/inspector/dist/index.html +1 -1
  42. rasa/core/channels/inspector/src/App.tsx +3 -2
  43. rasa/core/channels/inspector/src/components/Chat.tsx +23 -2
  44. rasa/core/channels/inspector/src/components/DiagramFlow.tsx +2 -5
  45. rasa/core/channels/inspector/src/helpers/conversation.ts +16 -0
  46. rasa/core/channels/inspector/src/types.ts +1 -1
  47. rasa/core/channels/voice_ready/audiocodes.py +7 -4
  48. rasa/core/channels/voice_ready/jambonz.py +25 -5
  49. rasa/core/channels/voice_ready/jambonz_protocol.py +4 -0
  50. rasa/core/channels/voice_ready/twilio_voice.py +48 -1
  51. rasa/core/channels/voice_stream/tts/azure.py +11 -2
  52. rasa/core/channels/voice_stream/twilio_media_streams.py +101 -26
  53. rasa/core/nlg/contextual_response_rephraser.py +3 -0
  54. rasa/core/policies/enterprise_search_policy.py +10 -0
  55. rasa/core/policies/intentless_policy.py +3 -0
  56. rasa/dialogue_understanding/coexistence/llm_based_router.py +8 -0
  57. rasa/dialogue_understanding/generator/flow_retrieval.py +1 -4
  58. rasa/dialogue_understanding/generator/multi_step/multi_step_llm_command_generator.py +13 -0
  59. rasa/dialogue_understanding/generator/single_step/compact_llm_command_generator.py +22 -10
  60. rasa/dialogue_understanding/generator/single_step/single_step_llm_command_generator.py +27 -12
  61. rasa/dialogue_understanding_test/du_test_case.py +16 -8
  62. rasa/llm_fine_tuning/paraphrasing/conversation_rephraser.py +3 -0
  63. rasa/shared/constants.py +1 -0
  64. rasa/shared/core/flows/flow.py +121 -125
  65. rasa/shared/providers/_utils.py +83 -0
  66. rasa/shared/providers/llm/_base_litellm_client.py +6 -3
  67. rasa/shared/providers/llm/azure_openai_llm_client.py +6 -68
  68. rasa/shared/providers/router/_base_litellm_router_client.py +53 -1
  69. rasa/shared/utils/constants.py +3 -0
  70. rasa/shared/utils/llm.py +69 -23
  71. rasa/validator.py +1 -2
  72. rasa/version.py +1 -1
  73. {rasa_pro-3.12.7.dev2.dist-info → rasa_pro-3.12.9.dist-info}/METADATA +3 -3
  74. {rasa_pro-3.12.7.dev2.dist-info → rasa_pro-3.12.9.dist-info}/RECORD +77 -76
  75. rasa/core/channels/inspector/dist/assets/channel-11268142.js +0 -1
  76. rasa/core/channels/inspector/dist/assets/clone-ff7f2ce7.js +0 -1
  77. rasa/core/channels/inspector/dist/assets/flowDiagram-v2-96b9c2cf-cba7ae20.js +0 -1
  78. {rasa_pro-3.12.7.dev2.dist-info → rasa_pro-3.12.9.dist-info}/NOTICE +0 -0
  79. {rasa_pro-3.12.7.dev2.dist-info → rasa_pro-3.12.9.dist-info}/WHEEL +0 -0
  80. {rasa_pro-3.12.7.dev2.dist-info → rasa_pro-3.12.9.dist-info}/entry_points.txt +0 -0
@@ -7,7 +7,12 @@ from typing import Any, Dict, List, Union, cast
7
7
  import structlog
8
8
  from litellm import acompletion, completion, validate_environment
9
9
 
10
- from rasa.shared.constants import API_BASE_CONFIG_KEY, API_KEY, ROLE_USER
10
+ from rasa.shared.constants import (
11
+ _VALIDATE_ENVIRONMENT_MISSING_KEYS_KEY,
12
+ API_BASE_CONFIG_KEY,
13
+ API_KEY,
14
+ ROLE_USER,
15
+ )
11
16
  from rasa.shared.exceptions import (
12
17
  ProviderClientAPIException,
13
18
  ProviderClientValidationError,
@@ -21,8 +26,6 @@ from rasa.shared.utils.io import resolve_environment_variables, suppress_logs
21
26
 
22
27
  structlogger = structlog.get_logger()
23
28
 
24
- _VALIDATE_ENVIRONMENT_MISSING_KEYS_KEY = "missing_keys"
25
-
26
29
  # Suppress LiteLLM info and debug logs - Global level.
27
30
  logging.getLogger("LiteLLM").setLevel(logging.WARNING)
28
31
 
@@ -7,15 +7,12 @@ from typing import Any, Dict, Optional
7
7
  import structlog
8
8
 
9
9
  from rasa.shared.constants import (
10
- API_BASE_CONFIG_KEY,
11
10
  API_KEY,
12
- API_VERSION_CONFIG_KEY,
13
11
  AZURE_API_BASE_ENV_VAR,
14
12
  AZURE_API_KEY_ENV_VAR,
15
13
  AZURE_API_TYPE_ENV_VAR,
16
14
  AZURE_API_VERSION_ENV_VAR,
17
15
  AZURE_OPENAI_PROVIDER,
18
- DEPLOYMENT_CONFIG_KEY,
19
16
  OPENAI_API_BASE_ENV_VAR,
20
17
  OPENAI_API_KEY_ENV_VAR,
21
18
  OPENAI_API_TYPE_ENV_VAR,
@@ -26,6 +23,7 @@ from rasa.shared.providers._configs.azure_openai_client_config import (
26
23
  AzureEntraIDOAuthConfig,
27
24
  AzureOpenAIClientConfig,
28
25
  )
26
+ from rasa.shared.providers._utils import validate_azure_client_setup
29
27
  from rasa.shared.providers.constants import (
30
28
  DEFAULT_AZURE_API_KEY_NAME,
31
29
  LITE_LLM_API_BASE_FIELD,
@@ -348,68 +346,8 @@ class AzureOpenAILLMClient(_BaseLiteLLMClient):
348
346
  def validate_client_setup(self) -> None:
349
347
  """Validates that all required configuration parameters are set."""
350
348
 
351
- def generate_event_info_for_missing_setting(
352
- setting: str,
353
- setting_env_var: Optional[str] = None,
354
- setting_config_key: Optional[str] = None,
355
- ) -> str:
356
- """Generate a part of the message with instructions on what to set
357
- for the missing client setting.
358
- """
359
- info = "Set {setting} with {options}. "
360
- options = ""
361
- if setting_env_var is not None:
362
- options += f"environment variable '{setting_env_var}'"
363
- if setting_config_key is not None and setting_env_var is not None:
364
- options += " or "
365
- if setting_config_key is not None:
366
- options += f"config key '{setting_config_key}'"
367
-
368
- return info.format(setting=setting, options=options)
369
-
370
- env_var_field = "env_var"
371
- config_key_field = "config_key"
372
- current_value_field = "current_value"
373
- # All required settings for Azure OpenAI client
374
- settings: Dict[str, Dict[str, Any]] = {
375
- "API Base": {
376
- current_value_field: self.api_base,
377
- env_var_field: AZURE_API_BASE_ENV_VAR,
378
- config_key_field: API_BASE_CONFIG_KEY,
379
- },
380
- "API Version": {
381
- current_value_field: self.api_version,
382
- env_var_field: AZURE_API_VERSION_ENV_VAR,
383
- config_key_field: API_VERSION_CONFIG_KEY,
384
- },
385
- "Deployment Name": {
386
- current_value_field: self.deployment,
387
- env_var_field: None,
388
- config_key_field: DEPLOYMENT_CONFIG_KEY,
389
- },
390
- }
391
-
392
- missing_settings = [
393
- setting_name
394
- for setting_name, setting_info in settings.items()
395
- if setting_info[current_value_field] is None
396
- ]
397
-
398
- if missing_settings:
399
- event_info = f"Client settings not set: " f"{', '.join(missing_settings)}. "
400
-
401
- for missing_setting in missing_settings:
402
- if settings[missing_setting][current_value_field] is not None:
403
- continue
404
- event_info += generate_event_info_for_missing_setting(
405
- missing_setting,
406
- settings[missing_setting][env_var_field],
407
- settings[missing_setting][config_key_field],
408
- )
409
-
410
- structlogger.error(
411
- "azure_openai_llm_client.not_configured",
412
- event_info=event_info,
413
- missing_settings=missing_settings,
414
- )
415
- raise ProviderClientValidationError(event_info)
349
+ return validate_azure_client_setup(
350
+ api_base=self.api_base,
351
+ api_version=self.api_version,
352
+ deployment=self.deployment,
353
+ )
@@ -5,10 +5,16 @@ from copy import deepcopy
5
5
  from typing import Any, Dict, List
6
6
 
7
7
  import structlog
8
- from litellm import Router
8
+ from litellm import Router, validate_environment
9
9
 
10
10
  from rasa.shared.constants import (
11
+ _VALIDATE_ENVIRONMENT_MISSING_KEYS_KEY,
12
+ API_BASE_CONFIG_KEY,
11
13
  API_KEY,
14
+ API_VERSION_CONFIG_KEY,
15
+ AZURE_API_BASE_ENV_VAR,
16
+ AZURE_API_VERSION_ENV_VAR,
17
+ AZURE_OPENAI_PROVIDER,
12
18
  LITELLM_PARAMS_KEY,
13
19
  MODEL_CONFIG_KEY,
14
20
  MODEL_GROUP_ID_CONFIG_KEY,
@@ -23,6 +29,7 @@ from rasa.shared.providers._configs.azure_entra_id_config import AzureEntraIDOAu
23
29
  from rasa.shared.providers._configs.litellm_router_client_config import (
24
30
  LiteLLMRouterClientConfig,
25
31
  )
32
+ from rasa.shared.providers._utils import validate_azure_client_setup
26
33
  from rasa.shared.utils.io import resolve_environment_variables
27
34
 
28
35
  structlogger = structlog.get_logger()
@@ -183,6 +190,7 @@ class _BaseLiteLLMRouterClient:
183
190
 
184
191
  def _create_router_client(self) -> Router:
185
192
  resolved_model_configurations = self._resolve_env_vars_in_model_configurations()
193
+ self._validate_model_configurations(resolved_model_configurations)
186
194
  return Router(model_list=resolved_model_configurations, **self.router_settings)
187
195
 
188
196
  def _has_oauth(self) -> bool:
@@ -214,3 +222,47 @@ class _BaseLiteLLMRouterClient:
214
222
  )
215
223
  model_configuration_with_resolved_keys.append(resolved_model_configuration)
216
224
  return model_configuration_with_resolved_keys
225
+
226
+ def _validate_model_configurations(
227
+ self, resolved_model_configurations: List[Dict[str, Any]]
228
+ ) -> None:
229
+ """Validates the model configurations.
230
+ Args:
231
+ resolved_model_configurations: (List[Dict[str, Any]]) The list of model
232
+ configurations with resolved environment variables.
233
+ Raises:
234
+ ProviderClientValidationError: If the model configurations are invalid.
235
+ """
236
+ for model_configuration in resolved_model_configurations:
237
+ litellm_params = model_configuration.get(LITELLM_PARAMS_KEY, {})
238
+
239
+ model = litellm_params.get(MODEL_CONFIG_KEY)
240
+ provider, deployment = model.split("/", 1)
241
+ api_base = litellm_params.get(API_BASE_CONFIG_KEY)
242
+
243
+ if provider.lower() == AZURE_OPENAI_PROVIDER:
244
+ validate_azure_client_setup(
245
+ api_base=api_base or os.getenv(AZURE_API_BASE_ENV_VAR),
246
+ api_version=litellm_params.get(API_VERSION_CONFIG_KEY)
247
+ or os.getenv(AZURE_API_VERSION_ENV_VAR),
248
+ deployment=deployment,
249
+ )
250
+ else:
251
+ validation_info = validate_environment(
252
+ model=model,
253
+ api_key=litellm_params.get(API_KEY),
254
+ api_base=api_base,
255
+ )
256
+ if missing_environment_variables := validation_info.get(
257
+ _VALIDATE_ENVIRONMENT_MISSING_KEYS_KEY
258
+ ):
259
+ event_info = (
260
+ f"Environment variables: {missing_environment_variables} "
261
+ f"not set. Required for API calls."
262
+ )
263
+ structlogger.error(
264
+ "base_litellm_router_client.validate_environment_variables",
265
+ event_info=event_info,
266
+ missing_environment_variables=missing_environment_variables,
267
+ )
268
+ raise ProviderClientValidationError(event_info)
@@ -5,3 +5,6 @@ DEFAULT_READ_YAML_FILE_CACHE_MAXSIZE = 256
5
5
  RASA_PRO_BETA_PREDICATES_IN_RESPONSE_CONDITIONS_ENV_VAR_NAME = (
6
6
  "RASA_PRO_BETA_PREDICATES_IN_RESPONSE_CONDITIONS"
7
7
  )
8
+
9
+ LOG_COMPONENT_SOURCE_METHOD_INIT = "init"
10
+ LOG_COMPONENT_SOURCE_METHOD_FINGERPRINT_ADDON = "fingerprint_addon"
rasa/shared/utils/llm.py CHANGED
@@ -1,5 +1,6 @@
1
1
  import importlib.resources
2
2
  import json
3
+ import logging
3
4
  from copy import deepcopy
4
5
  from functools import wraps
5
6
  from typing import (
@@ -7,6 +8,7 @@ from typing import (
7
8
  Any,
8
9
  Callable,
9
10
  Dict,
11
+ Literal,
10
12
  Optional,
11
13
  Text,
12
14
  Type,
@@ -59,6 +61,7 @@ from rasa.shared.providers.mappings import (
59
61
  get_embedding_client_from_provider,
60
62
  get_llm_client_from_provider,
61
63
  )
64
+ from rasa.shared.utils.constants import LOG_COMPONENT_SOURCE_METHOD_INIT
62
65
 
63
66
  if TYPE_CHECKING:
64
67
  from rasa.shared.core.trackers import DialogueStateTracker
@@ -654,35 +657,59 @@ def embedder_client_factory(
654
657
 
655
658
 
656
659
  def get_prompt_template(
657
- jinja_file_path: Optional[Text], default_prompt_template: Text
660
+ jinja_file_path: Optional[Text],
661
+ default_prompt_template: Text,
662
+ *,
663
+ log_source_component: Optional[Text] = None,
664
+ log_source_method: Optional[Literal["init", "fingerprint_addon"]] = None,
658
665
  ) -> Text:
659
666
  """Returns the jinja template.
660
667
 
661
668
  Args:
662
- jinja_file_path: the path to the jinja file
663
- default_prompt_template: the default prompt template
669
+ jinja_file_path: The path to the jinja template file. If not provided, the
670
+ default template will be used.
671
+ default_prompt_template: The fallback prompt template to use if no file is
672
+ found or specified.
673
+ log_source_component: The name of the component emitting the log, used to
674
+ identify the source in structured logging.
675
+ log_source_method: The name of the method or function emitting the log for
676
+ better traceability.
664
677
 
665
678
  Returns:
666
679
  The prompt template.
667
680
  """
681
+
668
682
  try:
669
683
  if jinja_file_path is not None:
670
684
  prompt_template = rasa.shared.utils.io.read_file(jinja_file_path)
671
- structlogger.info(
672
- "utils.llm.get_prompt_template.custom_prompt_template_read_successfull",
685
+
686
+ log_level = (
687
+ logging.INFO
688
+ if log_source_method == LOG_COMPONENT_SOURCE_METHOD_INIT
689
+ else logging.DEBUG
690
+ )
691
+
692
+ structlogger.log(
693
+ log_level,
694
+ "utils.llm.get_prompt_template"
695
+ ".custom_prompt_template_read_successfully",
673
696
  event_info=(
674
697
  f"Custom prompt template read successfully from "
675
698
  f"`{jinja_file_path}`."
676
699
  ),
677
700
  prompt_file_path=jinja_file_path,
701
+ log_source_component=log_source_component,
702
+ log_source_method=log_source_method,
678
703
  )
679
704
  return prompt_template
680
705
  except (FileIOException, FileNotFoundException):
681
706
  structlogger.warning(
682
- "utils.llm.get_prompt_template.failed_to_read_custom_prompt_template",
707
+ "utils.llm.get_prompt_template" ".failed_to_read_custom_prompt_template",
683
708
  event_info=(
684
709
  "Failed to read custom prompt template. Using default template instead."
685
710
  ),
711
+ log_source_component=log_source_component,
712
+ log_source_method=log_source_method,
686
713
  )
687
714
  return default_prompt_template
688
715
 
@@ -692,50 +719,66 @@ def get_default_prompt_template_based_on_model(
692
719
  model_prompt_mapping: Dict[str, Any],
693
720
  default_prompt_path: str,
694
721
  fallback_prompt_path: str,
722
+ *,
723
+ log_source_component: Optional[Text] = None,
724
+ log_source_method: Optional[Literal["init", "fingerprint_addon"]] = None,
695
725
  ) -> Text:
696
726
  """Returns the default prompt template based on the model name.
697
727
 
698
728
  Args:
699
729
  llm_config: The model config.
700
- model_prompt_mapping: The mapping of model name to prompt template.
701
- default_prompt_path: The default prompt path of the component.
702
- fallback_prompt_path: The fallback prompt path for all other models
703
- that do not have a mapping in the model_prompt_mapping.
730
+ model_prompt_mapping: The model name -> prompt template mapping.
731
+ default_prompt_path: The path to the default prompt template for the component.
732
+ fallback_prompt_path: The fallback prompt path for all other models that do not
733
+ have a mapping in the model_prompt_mapping.
734
+ log_source_component: The name of the component emitting the log, used to
735
+ identify the source in structured logging.
736
+ log_source_method: The name of the method or function emitting the log for
737
+ better traceability.
704
738
 
705
739
  Returns:
706
740
  The default prompt template.
707
741
  """
742
+ # Extract the provider and model name information from the configuration
708
743
  _llm_config = deepcopy(llm_config)
709
744
  if MODELS_CONFIG_KEY in _llm_config:
710
745
  _llm_config = _llm_config[MODELS_CONFIG_KEY][0]
711
746
  provider = _llm_config.get(PROVIDER_CONFIG_KEY)
712
747
  model = _llm_config.get(MODEL_CONFIG_KEY)
748
+
749
+ # If the model is not defined, we default to the default prompt template.
713
750
  if not model:
714
- # If the model is not defined, we default to the default prompt template.
715
- structlogger.info(
716
- "utils.llm.get_default_prompt_template_based_on_model.using_default_prompt_template",
751
+ structlogger.debug(
752
+ "utils.llm.get_default_prompt_template_based_on_model"
753
+ ".using_default_prompt_template",
717
754
  event_info=(
718
755
  f"Model not defined in the config. Default prompt template read from"
719
756
  f" - `{default_prompt_path}`."
720
757
  ),
721
758
  default_prompt_path=default_prompt_path,
759
+ log_source_component=log_source_component,
760
+ log_source_method=log_source_method,
722
761
  )
723
762
  return importlib.resources.read_text(
724
763
  DEFAULT_PROMPT_PACKAGE_NAME, default_prompt_path
725
764
  )
726
765
 
727
- model_name = model if provider and provider in model else f"{provider}/{model}"
728
- if prompt_file_path := model_prompt_mapping.get(model_name):
729
- # If the model is found in the mapping, we use the model-specific prompt
730
- # template.
731
- structlogger.info(
732
- "utils.llm.get_default_prompt_template_based_on_model.using_model_specific_prompt_template",
766
+ full_model_name = model if provider and provider in model else f"{provider}/{model}"
767
+
768
+ # If the model is found in the mapping, we use the model-specific prompt
769
+ # template.
770
+ if prompt_file_path := model_prompt_mapping.get(full_model_name):
771
+ structlogger.debug(
772
+ "utils.llm.get_default_prompt_template_based_on_model"
773
+ ".using_model_specific_prompt_template",
733
774
  event_info=(
734
775
  f"Using model-specific default prompt template. Default prompt "
735
776
  f"template read from - `{prompt_file_path}`."
736
777
  ),
737
778
  default_prompt_path=prompt_file_path,
738
- model_name=model_name,
779
+ model_name=full_model_name,
780
+ log_source_component=log_source_component,
781
+ log_source_method=log_source_method,
739
782
  )
740
783
  return importlib.resources.read_text(
741
784
  DEFAULT_PROMPT_PACKAGE_NAME, prompt_file_path
@@ -743,14 +786,17 @@ def get_default_prompt_template_based_on_model(
743
786
 
744
787
  # If the model is not found in the mapping, we default to the fallback prompt
745
788
  # template.
746
- structlogger.info(
747
- "utils.llm.get_default_prompt_template_based_on_model.using_fallback_prompt_template",
789
+ structlogger.debug(
790
+ "utils.llm.get_default_prompt_template_based_on_model"
791
+ ".using_fallback_prompt_template",
748
792
  event_info=(
749
793
  f"Model not found in the model prompt mapping. Fallback prompt template "
750
794
  f"read from - `{fallback_prompt_path}`."
751
795
  ),
752
796
  fallback_prompt_path=fallback_prompt_path,
753
- model_name=model_name,
797
+ model_name=full_model_name,
798
+ log_source_component=log_source_component,
799
+ log_source_method=log_source_method,
754
800
  )
755
801
  return importlib.resources.read_text(
756
802
  DEFAULT_PROMPT_PACKAGE_NAME, fallback_prompt_path
rasa/validator.py CHANGED
@@ -531,7 +531,7 @@ class Validator:
531
531
  condition_active_loop
532
532
  and condition_active_loop not in self.domain.form_names
533
533
  ):
534
- structlogger.warn(
534
+ structlogger.error(
535
535
  "validator.verify_slot_mappings.not_in_domain",
536
536
  slot=slot.name,
537
537
  form=condition_active_loop,
@@ -566,7 +566,6 @@ class Validator:
566
566
  f"The slot needs to be added to this key."
567
567
  ),
568
568
  )
569
- everything_is_alright = False
570
569
 
571
570
  return everything_is_alright
572
571
 
rasa/version.py CHANGED
@@ -1,3 +1,3 @@
1
1
  # this file will automatically be changed,
2
2
  # do not add anything but the version number here!
3
- __version__ = "3.12.7.dev2"
3
+ __version__ = "3.12.9"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: rasa-pro
3
- Version: 3.12.7.dev2
3
+ Version: 3.12.9
4
4
  Summary: State-of-the-art open-core Conversational AI framework for Enterprises that natively leverages generative AI for effortless assistant development.
5
5
  Keywords: nlp,machine-learning,machine-learning-library,bot,bots,botkit,rasa conversational-agents,conversational-ai,chatbot,chatbot-framework,bot-framework
6
6
  Author: Rasa Technologies GmbH
@@ -63,12 +63,12 @@ Requires-Dist: keras (==2.14.0)
63
63
  Requires-Dist: langchain (>=0.2.17,<0.3.0)
64
64
  Requires-Dist: langchain-community (>=0.2.19,<0.3.0)
65
65
  Requires-Dist: langcodes (>=3.5.0,<4.0.0)
66
- Requires-Dist: litellm (>=1.52.6,<1.53.0)
66
+ Requires-Dist: litellm (>=1.68.0,<1.69.0)
67
67
  Requires-Dist: matplotlib (>=3.7,<3.8)
68
68
  Requires-Dist: mattermostwrapper (>=2.2,<2.3)
69
69
  Requires-Dist: networkx (>=3.1,<3.2)
70
70
  Requires-Dist: numpy (>=1.26.4,<1.27.0)
71
- Requires-Dist: openai (>=1.55.3,<1.56.0)
71
+ Requires-Dist: openai (>=1.68.2,<1.69.0)
72
72
  Requires-Dist: openpyxl (>=3.1.5,<4.0.0)
73
73
  Requires-Dist: opentelemetry-api (>=1.16.0,<1.17.0)
74
74
  Requires-Dist: opentelemetry-exporter-jaeger (>=1.16.0,<1.17.0)