rasa-pro 3.9.17__py3-none-any.whl → 3.10.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of rasa-pro might be problematic. Click here for more details.

Files changed (187) hide show
  1. README.md +5 -37
  2. rasa/__init__.py +1 -2
  3. rasa/__main__.py +5 -0
  4. rasa/anonymization/anonymization_rule_executor.py +2 -2
  5. rasa/api.py +26 -22
  6. rasa/cli/arguments/data.py +27 -2
  7. rasa/cli/arguments/default_arguments.py +25 -3
  8. rasa/cli/arguments/run.py +9 -9
  9. rasa/cli/arguments/train.py +2 -0
  10. rasa/cli/data.py +70 -8
  11. rasa/cli/e2e_test.py +108 -433
  12. rasa/cli/interactive.py +1 -0
  13. rasa/cli/llm_fine_tuning.py +395 -0
  14. rasa/cli/project_templates/calm/endpoints.yml +1 -1
  15. rasa/cli/project_templates/tutorial/endpoints.yml +1 -1
  16. rasa/cli/run.py +14 -13
  17. rasa/cli/scaffold.py +10 -8
  18. rasa/cli/train.py +8 -7
  19. rasa/cli/utils.py +15 -0
  20. rasa/constants.py +7 -1
  21. rasa/core/actions/action.py +98 -49
  22. rasa/core/actions/action_run_slot_rejections.py +4 -1
  23. rasa/core/actions/custom_action_executor.py +9 -6
  24. rasa/core/actions/direct_custom_actions_executor.py +80 -0
  25. rasa/core/actions/e2e_stub_custom_action_executor.py +68 -0
  26. rasa/core/actions/grpc_custom_action_executor.py +2 -2
  27. rasa/core/actions/http_custom_action_executor.py +6 -5
  28. rasa/core/agent.py +21 -17
  29. rasa/core/channels/__init__.py +2 -0
  30. rasa/core/channels/audiocodes.py +1 -16
  31. rasa/core/channels/voice_aware/__init__.py +0 -0
  32. rasa/core/channels/voice_aware/jambonz.py +103 -0
  33. rasa/core/channels/voice_aware/jambonz_protocol.py +344 -0
  34. rasa/core/channels/voice_aware/utils.py +20 -0
  35. rasa/core/channels/voice_native/__init__.py +0 -0
  36. rasa/core/constants.py +6 -1
  37. rasa/core/featurizers/single_state_featurizer.py +1 -22
  38. rasa/core/featurizers/tracker_featurizers.py +18 -115
  39. rasa/core/information_retrieval/faiss.py +7 -4
  40. rasa/core/information_retrieval/information_retrieval.py +8 -0
  41. rasa/core/information_retrieval/milvus.py +9 -2
  42. rasa/core/information_retrieval/qdrant.py +1 -1
  43. rasa/core/nlg/contextual_response_rephraser.py +32 -10
  44. rasa/core/nlg/summarize.py +4 -3
  45. rasa/core/policies/enterprise_search_policy.py +100 -44
  46. rasa/core/policies/flows/flow_executor.py +155 -98
  47. rasa/core/policies/intentless_policy.py +52 -28
  48. rasa/core/policies/ted_policy.py +33 -58
  49. rasa/core/policies/unexpected_intent_policy.py +7 -15
  50. rasa/core/processor.py +15 -46
  51. rasa/core/run.py +5 -4
  52. rasa/core/tracker_store.py +8 -4
  53. rasa/core/utils.py +45 -56
  54. rasa/dialogue_understanding/coexistence/llm_based_router.py +45 -12
  55. rasa/dialogue_understanding/commands/__init__.py +4 -0
  56. rasa/dialogue_understanding/commands/change_flow_command.py +0 -6
  57. rasa/dialogue_understanding/commands/session_start_command.py +59 -0
  58. rasa/dialogue_understanding/commands/set_slot_command.py +1 -5
  59. rasa/dialogue_understanding/commands/utils.py +38 -0
  60. rasa/dialogue_understanding/generator/constants.py +10 -3
  61. rasa/dialogue_understanding/generator/flow_retrieval.py +14 -5
  62. rasa/dialogue_understanding/generator/llm_based_command_generator.py +12 -2
  63. rasa/dialogue_understanding/generator/multi_step/multi_step_llm_command_generator.py +106 -87
  64. rasa/dialogue_understanding/generator/nlu_command_adapter.py +28 -6
  65. rasa/dialogue_understanding/generator/single_step/single_step_llm_command_generator.py +90 -37
  66. rasa/dialogue_understanding/patterns/default_flows_for_patterns.yml +15 -15
  67. rasa/dialogue_understanding/patterns/session_start.py +37 -0
  68. rasa/dialogue_understanding/processor/command_processor.py +13 -14
  69. rasa/e2e_test/aggregate_test_stats_calculator.py +124 -0
  70. rasa/e2e_test/assertions.py +1181 -0
  71. rasa/e2e_test/assertions_schema.yml +106 -0
  72. rasa/e2e_test/constants.py +20 -0
  73. rasa/e2e_test/e2e_config.py +220 -0
  74. rasa/e2e_test/e2e_config_schema.yml +26 -0
  75. rasa/e2e_test/e2e_test_case.py +131 -8
  76. rasa/e2e_test/e2e_test_converter.py +363 -0
  77. rasa/e2e_test/e2e_test_converter_prompt.jinja2 +70 -0
  78. rasa/e2e_test/e2e_test_coverage_report.py +364 -0
  79. rasa/e2e_test/e2e_test_result.py +26 -6
  80. rasa/e2e_test/e2e_test_runner.py +498 -73
  81. rasa/e2e_test/e2e_test_schema.yml +96 -0
  82. rasa/e2e_test/pykwalify_extensions.py +39 -0
  83. rasa/e2e_test/stub_custom_action.py +70 -0
  84. rasa/e2e_test/utils/__init__.py +0 -0
  85. rasa/e2e_test/utils/e2e_yaml_utils.py +55 -0
  86. rasa/e2e_test/utils/io.py +596 -0
  87. rasa/e2e_test/utils/validation.py +80 -0
  88. rasa/engine/recipes/default_components.py +0 -2
  89. rasa/engine/storage/local_model_storage.py +0 -1
  90. rasa/env.py +9 -0
  91. rasa/llm_fine_tuning/__init__.py +0 -0
  92. rasa/llm_fine_tuning/annotation_module.py +241 -0
  93. rasa/llm_fine_tuning/conversations.py +144 -0
  94. rasa/llm_fine_tuning/llm_data_preparation_module.py +178 -0
  95. rasa/llm_fine_tuning/notebooks/unsloth_finetuning.ipynb +407 -0
  96. rasa/llm_fine_tuning/paraphrasing/__init__.py +0 -0
  97. rasa/llm_fine_tuning/paraphrasing/conversation_rephraser.py +281 -0
  98. rasa/llm_fine_tuning/paraphrasing/default_rephrase_prompt_template.jina2 +44 -0
  99. rasa/llm_fine_tuning/paraphrasing/rephrase_validator.py +121 -0
  100. rasa/llm_fine_tuning/paraphrasing/rephrased_user_message.py +10 -0
  101. rasa/llm_fine_tuning/paraphrasing_module.py +128 -0
  102. rasa/llm_fine_tuning/storage.py +174 -0
  103. rasa/llm_fine_tuning/train_test_split_module.py +441 -0
  104. rasa/model_training.py +48 -16
  105. rasa/nlu/classifiers/diet_classifier.py +25 -38
  106. rasa/nlu/classifiers/logistic_regression_classifier.py +9 -44
  107. rasa/nlu/classifiers/sklearn_intent_classifier.py +16 -37
  108. rasa/nlu/extractors/crf_entity_extractor.py +50 -93
  109. rasa/nlu/featurizers/sparse_featurizer/count_vectors_featurizer.py +45 -78
  110. rasa/nlu/featurizers/sparse_featurizer/lexical_syntactic_featurizer.py +17 -52
  111. rasa/nlu/featurizers/sparse_featurizer/regex_featurizer.py +3 -5
  112. rasa/nlu/persistor.py +129 -32
  113. rasa/server.py +45 -10
  114. rasa/shared/constants.py +63 -15
  115. rasa/shared/core/domain.py +15 -12
  116. rasa/shared/core/events.py +28 -2
  117. rasa/shared/core/flows/flow.py +208 -13
  118. rasa/shared/core/flows/flow_path.py +84 -0
  119. rasa/shared/core/flows/flows_list.py +28 -10
  120. rasa/shared/core/flows/flows_yaml_schema.json +269 -193
  121. rasa/shared/core/flows/validation.py +112 -25
  122. rasa/shared/core/flows/yaml_flows_io.py +149 -10
  123. rasa/shared/core/trackers.py +6 -0
  124. rasa/shared/core/training_data/visualization.html +2 -2
  125. rasa/shared/exceptions.py +4 -0
  126. rasa/shared/importers/importer.py +60 -11
  127. rasa/shared/importers/remote_importer.py +196 -0
  128. rasa/shared/nlu/constants.py +2 -0
  129. rasa/shared/nlu/training_data/features.py +2 -120
  130. rasa/shared/providers/_configs/__init__.py +0 -0
  131. rasa/shared/providers/_configs/azure_openai_client_config.py +181 -0
  132. rasa/shared/providers/_configs/client_config.py +57 -0
  133. rasa/shared/providers/_configs/default_litellm_client_config.py +130 -0
  134. rasa/shared/providers/_configs/huggingface_local_embedding_client_config.py +234 -0
  135. rasa/shared/providers/_configs/openai_client_config.py +175 -0
  136. rasa/shared/providers/_configs/self_hosted_llm_client_config.py +171 -0
  137. rasa/shared/providers/_configs/utils.py +101 -0
  138. rasa/shared/providers/_ssl_verification_utils.py +124 -0
  139. rasa/shared/providers/embedding/__init__.py +0 -0
  140. rasa/shared/providers/embedding/_base_litellm_embedding_client.py +254 -0
  141. rasa/shared/providers/embedding/_langchain_embedding_client_adapter.py +74 -0
  142. rasa/shared/providers/embedding/azure_openai_embedding_client.py +277 -0
  143. rasa/shared/providers/embedding/default_litellm_embedding_client.py +102 -0
  144. rasa/shared/providers/embedding/embedding_client.py +90 -0
  145. rasa/shared/providers/embedding/embedding_response.py +41 -0
  146. rasa/shared/providers/embedding/huggingface_local_embedding_client.py +191 -0
  147. rasa/shared/providers/embedding/openai_embedding_client.py +172 -0
  148. rasa/shared/providers/llm/__init__.py +0 -0
  149. rasa/shared/providers/llm/_base_litellm_client.py +227 -0
  150. rasa/shared/providers/llm/azure_openai_llm_client.py +338 -0
  151. rasa/shared/providers/llm/default_litellm_llm_client.py +84 -0
  152. rasa/shared/providers/llm/llm_client.py +76 -0
  153. rasa/shared/providers/llm/llm_response.py +50 -0
  154. rasa/shared/providers/llm/openai_llm_client.py +155 -0
  155. rasa/shared/providers/llm/self_hosted_llm_client.py +169 -0
  156. rasa/shared/providers/mappings.py +75 -0
  157. rasa/shared/utils/cli.py +30 -0
  158. rasa/shared/utils/io.py +65 -3
  159. rasa/shared/utils/llm.py +223 -200
  160. rasa/shared/utils/yaml.py +122 -7
  161. rasa/studio/download.py +19 -13
  162. rasa/studio/train.py +2 -3
  163. rasa/studio/upload.py +2 -3
  164. rasa/telemetry.py +113 -58
  165. rasa/tracing/config.py +2 -3
  166. rasa/tracing/instrumentation/attribute_extractors.py +29 -17
  167. rasa/tracing/instrumentation/instrumentation.py +4 -47
  168. rasa/utils/common.py +18 -19
  169. rasa/utils/endpoints.py +7 -4
  170. rasa/utils/io.py +66 -0
  171. rasa/utils/json_utils.py +60 -0
  172. rasa/utils/licensing.py +9 -1
  173. rasa/utils/ml_utils.py +4 -2
  174. rasa/utils/tensorflow/model_data.py +193 -2
  175. rasa/validator.py +195 -1
  176. rasa/version.py +1 -1
  177. {rasa_pro-3.9.17.dist-info → rasa_pro-3.10.3.dist-info}/METADATA +25 -51
  178. {rasa_pro-3.9.17.dist-info → rasa_pro-3.10.3.dist-info}/RECORD +183 -119
  179. rasa/nlu/classifiers/llm_intent_classifier.py +0 -519
  180. rasa/shared/providers/openai/clients.py +0 -43
  181. rasa/shared/providers/openai/session_handler.py +0 -110
  182. rasa/utils/tensorflow/feature_array.py +0 -366
  183. /rasa/{shared/providers/openai → cli/project_templates/tutorial/actions}/__init__.py +0 -0
  184. /rasa/cli/project_templates/tutorial/{actions.py → actions/actions.py} +0 -0
  185. {rasa_pro-3.9.17.dist-info → rasa_pro-3.10.3.dist-info}/NOTICE +0 -0
  186. {rasa_pro-3.9.17.dist-info → rasa_pro-3.10.3.dist-info}/WHEEL +0 -0
  187. {rasa_pro-3.9.17.dist-info → rasa_pro-3.10.3.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,172 @@
1
+ import os
2
+ from typing import Any, Dict, Optional
3
+
4
+ import structlog
5
+
6
+ from rasa.shared.constants import (
7
+ OPENAI_API_BASE_ENV_VAR,
8
+ OPENAI_API_TYPE_ENV_VAR,
9
+ OPENAI_API_VERSION_ENV_VAR,
10
+ OPENAI_PROVIDER,
11
+ )
12
+ from rasa.shared.providers._configs.openai_client_config import OpenAIClientConfig
13
+ from rasa.shared.providers.embedding._base_litellm_embedding_client import (
14
+ _BaseLiteLLMEmbeddingClient,
15
+ )
16
+
17
+ structlogger = structlog.get_logger()
18
+
19
+
20
+ class OpenAIEmbeddingClient(_BaseLiteLLMEmbeddingClient):
21
+ """A client for interfacing with OpenAI Embeddings.
22
+
23
+ Parameters:
24
+ model (str): The OpenAI model name.
25
+ api_base (Optional[str]): Optional, the base URL for the API endpoints.
26
+ If not provided, it will be set via environment variables.
27
+ api_type (Optional[str]): Optional, the type of the API to use.
28
+ If not provided, it will be set via environment variable.
29
+ api_version (Optional[str]): Optional, the version of the API to use.
30
+ If not provided, it will be set via environment variable.
31
+ kwargs (Optional[Dict[str, Any]]): Optional configuration parameters specific
32
+ to the embedding model deployment.
33
+
34
+ Raises:
35
+ ProviderClientValidationError: If validation of the client setup fails.
36
+ """
37
+
38
+ def __init__(
39
+ self,
40
+ model: str,
41
+ api_base: Optional[str] = None,
42
+ api_type: Optional[str] = None,
43
+ api_version: Optional[str] = None,
44
+ **kwargs: Any,
45
+ ):
46
+ super().__init__() # type: ignore
47
+ self._model = model
48
+ self._api_base = api_base or os.environ.get(OPENAI_API_BASE_ENV_VAR)
49
+ self._api_type = api_type or os.environ.get(OPENAI_API_TYPE_ENV_VAR)
50
+ self._api_version = api_version or os.environ.get(OPENAI_API_VERSION_ENV_VAR)
51
+ self._extra_parameters = kwargs or {}
52
+ self.validate_client_setup()
53
+
54
+ @classmethod
55
+ def from_config(cls, config: Dict[str, Any]) -> "OpenAIEmbeddingClient":
56
+ """
57
+ Initializes the client from given configuration.
58
+
59
+ Args:
60
+ config (Dict[str, Any]): Configuration.
61
+
62
+ Raises:
63
+ ValueError: Raised in cases of invalid configuration:
64
+ - If any of the required configuration keys are missing.
65
+ - If `api_type` has a value different from `azure`.
66
+
67
+ Returns:
68
+ OpenAIEmbeddingClient: Initialized client.
69
+ """
70
+ try:
71
+ openai_config = OpenAIClientConfig.from_dict(config)
72
+ except ValueError as e:
73
+ message = "Cannot instantiate a client from the passed configuration."
74
+ structlogger.error(
75
+ "openai_embedding_client.from_config.error",
76
+ message=message,
77
+ config=config,
78
+ original_error=e,
79
+ )
80
+ raise
81
+
82
+ return cls(
83
+ model=openai_config.model,
84
+ api_base=openai_config.api_base,
85
+ api_type=openai_config.api_type,
86
+ api_version=openai_config.api_version,
87
+ **openai_config.extra_parameters,
88
+ )
89
+
90
+ @property
91
+ def config(self) -> Dict:
92
+ """
93
+ Returns the configuration for the openai embedding client.
94
+
95
+ Returns:
96
+ Dictionary containing the configuration.
97
+ """
98
+ config = OpenAIClientConfig(
99
+ model=self.model,
100
+ api_base=self.api_base,
101
+ api_type=self.api_type,
102
+ api_version=self.api_version,
103
+ extra_parameters=self._extra_parameters,
104
+ )
105
+ return config.to_dict()
106
+
107
+ @property
108
+ def model(self) -> str:
109
+ """
110
+ Returns the model name for the openai embedding client.
111
+
112
+ Returns:
113
+ String representing the model name.
114
+ """
115
+ return self._model
116
+
117
+ @property
118
+ def api_base(self) -> Optional[str]:
119
+ """
120
+ Returns the base API URL for the openai embedding client.
121
+
122
+ Returns:
123
+ String representing the base API URL.
124
+ """
125
+ return self._api_base
126
+
127
+ @property
128
+ def api_type(self) -> Optional[str]:
129
+ """
130
+ Returns the API type for the openai embedding client.
131
+
132
+ Returns:
133
+ String representing the API version.
134
+ """
135
+ return self._api_type
136
+
137
+ @property
138
+ def api_version(self) -> Optional[str]:
139
+ """
140
+ Returns the API version for the openai embedding client.
141
+
142
+ Returns:
143
+ String representing the API version.
144
+ """
145
+ return self._api_version
146
+
147
+ @property
148
+ def _litellm_extra_parameters(self) -> Dict[str, Any]:
149
+ """
150
+ Returns the model parameters for the openai embedding client.
151
+
152
+ Returns:
153
+ Dictionary containing the model parameters.
154
+ """
155
+ return self._extra_parameters
156
+
157
+ @property
158
+ def _embedding_fn_args(self) -> Dict[str, Any]:
159
+ return {
160
+ "model": self._litellm_model_name,
161
+ "api_base": self.api_base,
162
+ "api_type": self.api_type,
163
+ "api_version": self.api_version,
164
+ **self._litellm_extra_parameters,
165
+ }
166
+
167
+ @property
168
+ def _litellm_model_name(self) -> str:
169
+ """Returns the LiteLLM model name for the openai embedding client."""
170
+ if self._model and f"{OPENAI_PROVIDER}/" not in self._model:
171
+ return f"{OPENAI_PROVIDER}/{self._model}"
172
+ return self._model
File without changes
@@ -0,0 +1,227 @@
1
+ from abc import abstractmethod
2
+ from typing import Dict, List, Any, Union
3
+
4
+ import logging
5
+ import structlog
6
+ from litellm import completion, acompletion, validate_environment
7
+
8
+ from rasa.shared.exceptions import (
9
+ ProviderClientAPIException,
10
+ ProviderClientValidationError,
11
+ )
12
+ from rasa.shared.providers._ssl_verification_utils import (
13
+ ensure_ssl_certificates_for_litellm_non_openai_based_clients,
14
+ ensure_ssl_certificates_for_litellm_openai_based_clients,
15
+ )
16
+ from rasa.shared.providers.llm.llm_response import LLMResponse, LLMUsage
17
+ from rasa.shared.utils.io import suppress_logs
18
+
19
+ structlogger = structlog.get_logger()
20
+
21
+ _VALIDATE_ENVIRONMENT_MISSING_KEYS_KEY = "missing_keys"
22
+
23
+ # Suppress LiteLLM info and debug logs - Global level.
24
+ logging.getLogger("LiteLLM").setLevel(logging.WARNING)
25
+
26
+
27
+ class _BaseLiteLLMClient:
28
+ """
29
+ An abstract base class for LiteLLM clients.
30
+
31
+ This class defines the interface and common functionality for all clients
32
+ based on LiteLLM.
33
+
34
+ The class is made private to prevent it from being part of the
35
+ public-facing interface, as it serves as an internal base class
36
+ for specific implementations of clients that are currently based on
37
+ LiteLLM.
38
+
39
+ By keeping it private, we ensure that only the derived, concrete
40
+ implementations are exposed to users, maintaining a cleaner and
41
+ more controlled API surface.
42
+ """
43
+
44
+ def __init__(self): # type: ignore
45
+ self._ensure_certificates()
46
+
47
+ @classmethod
48
+ @abstractmethod
49
+ def from_config(cls, config: Dict[str, Any]) -> "_BaseLiteLLMClient":
50
+ pass
51
+
52
+ @property
53
+ @abstractmethod
54
+ def config(self) -> dict:
55
+ """Returns the configuration for that the llm client
56
+ in dictionary form.
57
+ """
58
+ pass
59
+
60
+ @property
61
+ @abstractmethod
62
+ def _litellm_model_name(self) -> str:
63
+ """Returns the value of LiteLLM's model parameter to be used in
64
+ completion/acompletion in LiteLLM format:
65
+
66
+ <provider>/<model or deployment name>
67
+ """
68
+ pass
69
+
70
+ @property
71
+ def _litellm_extra_parameters(self) -> Dict[str, Any]:
72
+ """Returns a dictionary of extra parameters which include model
73
+ parameters as well as LiteLLM specific input parameters.
74
+
75
+ By default, this returns an empty dictionary (no extra parameters).
76
+ """
77
+ return {}
78
+
79
+ @property
80
+ def _completion_fn_args(self) -> dict:
81
+ return {
82
+ **self._litellm_extra_parameters,
83
+ "model": self._litellm_model_name,
84
+ # Since all providers covered by LiteLLM use the OpenAI format, but
85
+ # not all support every OpenAI parameter, raise an exception if
86
+ # provider/model uses unsupported parameter
87
+ "drop_params": False,
88
+ }
89
+
90
+ def validate_client_setup(self) -> None:
91
+ """Perform client validation. By default only environment variables
92
+ are validated.
93
+
94
+ Raises:
95
+ ProviderClientValidationError if validation fails.
96
+ """
97
+ self._validate_environment_variables()
98
+ self._validate_api_key_not_in_config()
99
+
100
+ def _validate_environment_variables(self) -> None:
101
+ """Validate that the required environment variables are set."""
102
+ validation_info = validate_environment(self._litellm_model_name)
103
+ if missing_environment_variables := validation_info.get(
104
+ _VALIDATE_ENVIRONMENT_MISSING_KEYS_KEY
105
+ ):
106
+ event_info = (
107
+ f"Environment variables: {missing_environment_variables} "
108
+ f"not set. Required for API calls."
109
+ )
110
+ structlogger.error(
111
+ "base_litellm_client.validate_environment_variables",
112
+ event_info=event_info,
113
+ missing_environment_variables=missing_environment_variables,
114
+ )
115
+ raise ProviderClientValidationError(event_info)
116
+
117
+ def _validate_api_key_not_in_config(self) -> None:
118
+ if "api_key" in self._litellm_extra_parameters:
119
+ event_info = (
120
+ "API Key is set through `api_key` extra parameter."
121
+ "Set API keys through environment variables."
122
+ )
123
+ structlogger.error(
124
+ "base_litellm_client.validate_api_key_not_in_config",
125
+ event_info=event_info,
126
+ )
127
+ raise ProviderClientValidationError(event_info)
128
+
129
+ @suppress_logs(log_level=logging.WARNING)
130
+ def completion(self, messages: Union[List[str], str]) -> LLMResponse:
131
+ """
132
+ Synchronously generate completions for given list of messages.
133
+
134
+ Args:
135
+ messages: List of messages or a single message to generate the
136
+ completion for.
137
+ Returns:
138
+ List of message completions.
139
+ Raises:
140
+ ProviderClientAPIException: If the API request fails.
141
+ """
142
+ try:
143
+ formatted_messages = self._format_messages(messages)
144
+ response = completion(
145
+ messages=formatted_messages, **self._completion_fn_args
146
+ )
147
+ return self._format_response(response)
148
+ except Exception as e:
149
+ raise ProviderClientAPIException(e)
150
+
151
+ @suppress_logs(log_level=logging.WARNING)
152
+ async def acompletion(self, messages: Union[List[str], str]) -> LLMResponse:
153
+ """
154
+ Asynchronously generate completions for given list of messages.
155
+
156
+ Args:
157
+ messages: List of messages or a single message to generate the
158
+ completion for.
159
+ Returns:
160
+ List of message completions.
161
+ Raises:
162
+ ProviderClientAPIException: If the API request fails.
163
+ """
164
+ try:
165
+ formatted_messages = self._format_messages(messages)
166
+ response = await acompletion(
167
+ messages=formatted_messages, **self._completion_fn_args
168
+ )
169
+ return self._format_response(response)
170
+ except Exception as e:
171
+ raise ProviderClientAPIException(e)
172
+
173
+ def _format_messages(self, messages: Union[List[str], str]) -> List[Dict[str, str]]:
174
+ """Formats messages (or a single message) to OpenAI format."""
175
+ if isinstance(messages, str):
176
+ messages = [messages]
177
+ return [{"content": message, "role": "user"} for message in messages]
178
+
179
+ def _format_response(self, response: Any) -> LLMResponse:
180
+ """Parses the LiteLLM response to Rasa format."""
181
+ formatted_response = LLMResponse(
182
+ id=response.id,
183
+ created=response.created,
184
+ choices=[choice.message.content for choice in response.choices],
185
+ model=response.model,
186
+ )
187
+ if (
188
+ response.model_extra
189
+ and (usage := response.model_extra.get("usage")) is not None
190
+ ):
191
+ # We use `.get()` for accessing litellm.utils.Usage attributes.
192
+ # litellm.utils.Usage does not set the attributes if
193
+ # `prompt_tokens` or `completion_tokens` are absent (None).
194
+ prompt_tokens = (
195
+ num_tokens
196
+ if isinstance(num_tokens := usage.get("prompt_tokens", 0), (int, float))
197
+ else 0
198
+ )
199
+ completion_tokens = (
200
+ num_tokens
201
+ if isinstance(
202
+ num_tokens := usage.get("completion_tokens", 0), (int, float)
203
+ )
204
+ else 0
205
+ )
206
+ formatted_response.usage = LLMUsage(prompt_tokens, completion_tokens)
207
+ structlogger.debug(
208
+ "base_litellm_client.formatted_response",
209
+ formatted_response=formatted_response.to_dict(),
210
+ )
211
+ return formatted_response
212
+
213
+ @staticmethod
214
+ def _ensure_certificates() -> None:
215
+ """
216
+ Configures SSL certificates for LiteLLM. This method is invoked during
217
+ client initialization.
218
+
219
+ LiteLLM may utilize `openai` clients or other providers that require
220
+ SSL verification settings through the `SSL_VERIFY` / `SSL_CERTIFICATE`
221
+ environment variables or the `litellm.ssl_verify` /
222
+ `litellm.ssl_certificate` global settings.
223
+
224
+ This method ensures proper SSL configuration for both cases.
225
+ """
226
+ ensure_ssl_certificates_for_litellm_non_openai_based_clients()
227
+ ensure_ssl_certificates_for_litellm_openai_based_clients()