rasa-pro 3.11.0__py3-none-any.whl → 3.11.0a2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of rasa-pro might be problematic. Click here for more details.

Files changed (217) hide show
  1. README.md +396 -17
  2. rasa/__main__.py +15 -31
  3. rasa/api.py +1 -5
  4. rasa/cli/arguments/default_arguments.py +2 -1
  5. rasa/cli/arguments/shell.py +1 -5
  6. rasa/cli/arguments/train.py +0 -14
  7. rasa/cli/e2e_test.py +1 -1
  8. rasa/cli/evaluate.py +8 -8
  9. rasa/cli/inspect.py +5 -7
  10. rasa/cli/interactive.py +0 -1
  11. rasa/cli/llm_fine_tuning.py +1 -1
  12. rasa/cli/project_templates/calm/config.yml +7 -5
  13. rasa/cli/project_templates/calm/endpoints.yml +2 -15
  14. rasa/cli/project_templates/tutorial/config.yml +5 -8
  15. rasa/cli/project_templates/tutorial/data/flows.yml +1 -1
  16. rasa/cli/project_templates/tutorial/data/patterns.yml +0 -5
  17. rasa/cli/project_templates/tutorial/domain.yml +0 -14
  18. rasa/cli/project_templates/tutorial/endpoints.yml +0 -5
  19. rasa/cli/run.py +1 -1
  20. rasa/cli/scaffold.py +2 -4
  21. rasa/cli/studio/studio.py +8 -18
  22. rasa/cli/studio/upload.py +15 -0
  23. rasa/cli/train.py +0 -3
  24. rasa/cli/utils.py +1 -6
  25. rasa/cli/x.py +8 -8
  26. rasa/constants.py +1 -3
  27. rasa/core/actions/action.py +33 -75
  28. rasa/core/actions/e2e_stub_custom_action_executor.py +1 -5
  29. rasa/core/actions/http_custom_action_executor.py +0 -4
  30. rasa/core/channels/channel.py +0 -20
  31. rasa/core/channels/development_inspector.py +2 -8
  32. rasa/core/channels/inspector/dist/assets/{arc-bc141fb2.js → arc-6852c607.js} +1 -1
  33. rasa/core/channels/inspector/dist/assets/{c4Diagram-d0fbc5ce-be2db283.js → c4Diagram-d0fbc5ce-acc952b2.js} +1 -1
  34. rasa/core/channels/inspector/dist/assets/{classDiagram-936ed81e-55366915.js → classDiagram-936ed81e-848a7597.js} +1 -1
  35. rasa/core/channels/inspector/dist/assets/{classDiagram-v2-c3cb15f1-bb529518.js → classDiagram-v2-c3cb15f1-a73d3e68.js} +1 -1
  36. rasa/core/channels/inspector/dist/assets/{createText-62fc7601-b0ec81d6.js → createText-62fc7601-e5ee049d.js} +1 -1
  37. rasa/core/channels/inspector/dist/assets/{edges-f2ad444c-6166330c.js → edges-f2ad444c-771e517e.js} +1 -1
  38. rasa/core/channels/inspector/dist/assets/{erDiagram-9d236eb7-5ccc6a8e.js → erDiagram-9d236eb7-aa347178.js} +1 -1
  39. rasa/core/channels/inspector/dist/assets/{flowDb-1972c806-fca3bfe4.js → flowDb-1972c806-651fc57d.js} +1 -1
  40. rasa/core/channels/inspector/dist/assets/{flowDiagram-7ea5b25a-4739080f.js → flowDiagram-7ea5b25a-ca67804f.js} +1 -1
  41. rasa/core/channels/inspector/dist/assets/flowDiagram-v2-855bc5b3-587d82d8.js +1 -0
  42. rasa/core/channels/inspector/dist/assets/{flowchart-elk-definition-abe16c3d-7c1b0e0f.js → flowchart-elk-definition-abe16c3d-2dbc568d.js} +1 -1
  43. rasa/core/channels/inspector/dist/assets/{ganttDiagram-9b5ea136-772fd050.js → ganttDiagram-9b5ea136-25a65bd8.js} +1 -1
  44. rasa/core/channels/inspector/dist/assets/{gitGraphDiagram-99d0ae7c-8eae1dc9.js → gitGraphDiagram-99d0ae7c-fdc7378d.js} +1 -1
  45. rasa/core/channels/inspector/dist/assets/{index-2c4b9a3b-f55afcdf.js → index-2c4b9a3b-6f1fd606.js} +1 -1
  46. rasa/core/channels/inspector/dist/assets/{index-e7cef9de.js → index-efdd30c1.js} +68 -68
  47. rasa/core/channels/inspector/dist/assets/{infoDiagram-736b4530-124d4a14.js → infoDiagram-736b4530-cb1a041a.js} +1 -1
  48. rasa/core/channels/inspector/dist/assets/{journeyDiagram-df861f2b-7c4fae44.js → journeyDiagram-df861f2b-14609879.js} +1 -1
  49. rasa/core/channels/inspector/dist/assets/{layout-b9885fb6.js → layout-2490f52b.js} +1 -1
  50. rasa/core/channels/inspector/dist/assets/{line-7c59abb6.js → line-40186f1f.js} +1 -1
  51. rasa/core/channels/inspector/dist/assets/{linear-4776f780.js → linear-08814e93.js} +1 -1
  52. rasa/core/channels/inspector/dist/assets/{mindmap-definition-beec6740-2332c46c.js → mindmap-definition-beec6740-1a534584.js} +1 -1
  53. rasa/core/channels/inspector/dist/assets/{pieDiagram-dbbf0591-8fb39303.js → pieDiagram-dbbf0591-72397b61.js} +1 -1
  54. rasa/core/channels/inspector/dist/assets/{quadrantDiagram-4d7f4fd6-3c7180a2.js → quadrantDiagram-4d7f4fd6-3bb0b6a3.js} +1 -1
  55. rasa/core/channels/inspector/dist/assets/{requirementDiagram-6fc4c22a-e910bcb8.js → requirementDiagram-6fc4c22a-57334f61.js} +1 -1
  56. rasa/core/channels/inspector/dist/assets/{sankeyDiagram-8f13d901-ead16c89.js → sankeyDiagram-8f13d901-111e1297.js} +1 -1
  57. rasa/core/channels/inspector/dist/assets/{sequenceDiagram-b655622a-29a02a19.js → sequenceDiagram-b655622a-10bcfe62.js} +1 -1
  58. rasa/core/channels/inspector/dist/assets/{stateDiagram-59f0c015-042b3137.js → stateDiagram-59f0c015-acaf7513.js} +1 -1
  59. rasa/core/channels/inspector/dist/assets/{stateDiagram-v2-2b26beab-2178c0f3.js → stateDiagram-v2-2b26beab-3ec2a235.js} +1 -1
  60. rasa/core/channels/inspector/dist/assets/{styles-080da4f6-23ffa4fc.js → styles-080da4f6-62730289.js} +1 -1
  61. rasa/core/channels/inspector/dist/assets/{styles-3dcbcfbf-94f59763.js → styles-3dcbcfbf-5284ee76.js} +1 -1
  62. rasa/core/channels/inspector/dist/assets/{styles-9c745c82-78a6bebc.js → styles-9c745c82-642435e3.js} +1 -1
  63. rasa/core/channels/inspector/dist/assets/{svgDrawCommon-4835440b-eae2a6f6.js → svgDrawCommon-4835440b-b250a350.js} +1 -1
  64. rasa/core/channels/inspector/dist/assets/{timeline-definition-5b62e21b-5c968d92.js → timeline-definition-5b62e21b-c2b147ed.js} +1 -1
  65. rasa/core/channels/inspector/dist/assets/{xychartDiagram-2b33534f-fd3db0d5.js → xychartDiagram-2b33534f-f92cfea9.js} +1 -1
  66. rasa/core/channels/inspector/dist/index.html +1 -1
  67. rasa/core/channels/inspector/src/App.tsx +1 -1
  68. rasa/core/channels/inspector/src/helpers/audiostream.ts +16 -77
  69. rasa/core/channels/socketio.py +2 -7
  70. rasa/core/channels/telegram.py +1 -1
  71. rasa/core/channels/twilio.py +1 -1
  72. rasa/core/channels/voice_ready/audiocodes.py +4 -15
  73. rasa/core/channels/voice_ready/jambonz.py +4 -15
  74. rasa/core/channels/voice_ready/twilio_voice.py +21 -6
  75. rasa/core/channels/voice_ready/utils.py +5 -6
  76. rasa/core/channels/voice_stream/asr/asr_engine.py +1 -19
  77. rasa/core/channels/voice_stream/asr/asr_event.py +0 -5
  78. rasa/core/channels/voice_stream/asr/deepgram.py +15 -28
  79. rasa/core/channels/voice_stream/audio_bytes.py +0 -1
  80. rasa/core/channels/voice_stream/browser_audio.py +9 -32
  81. rasa/core/channels/voice_stream/tts/azure.py +3 -9
  82. rasa/core/channels/voice_stream/tts/cartesia.py +8 -12
  83. rasa/core/channels/voice_stream/tts/tts_engine.py +1 -11
  84. rasa/core/channels/voice_stream/twilio_media_streams.py +19 -28
  85. rasa/core/channels/voice_stream/util.py +4 -4
  86. rasa/core/channels/voice_stream/voice_channel.py +42 -222
  87. rasa/core/featurizers/single_state_featurizer.py +1 -22
  88. rasa/core/featurizers/tracker_featurizers.py +18 -115
  89. rasa/core/information_retrieval/qdrant.py +0 -1
  90. rasa/core/nlg/contextual_response_rephraser.py +25 -44
  91. rasa/core/persistor.py +34 -191
  92. rasa/core/policies/enterprise_search_policy.py +60 -119
  93. rasa/core/policies/flows/flow_executor.py +4 -7
  94. rasa/core/policies/intentless_policy.py +22 -82
  95. rasa/core/policies/ted_policy.py +33 -58
  96. rasa/core/policies/unexpected_intent_policy.py +7 -15
  97. rasa/core/processor.py +5 -32
  98. rasa/core/training/interactive.py +35 -34
  99. rasa/core/utils.py +22 -58
  100. rasa/dialogue_understanding/coexistence/llm_based_router.py +12 -39
  101. rasa/dialogue_understanding/commands/__init__.py +0 -4
  102. rasa/dialogue_understanding/commands/change_flow_command.py +0 -6
  103. rasa/dialogue_understanding/commands/utils.py +0 -5
  104. rasa/dialogue_understanding/generator/constants.py +0 -2
  105. rasa/dialogue_understanding/generator/flow_retrieval.py +4 -49
  106. rasa/dialogue_understanding/generator/llm_based_command_generator.py +23 -37
  107. rasa/dialogue_understanding/generator/multi_step/multi_step_llm_command_generator.py +10 -57
  108. rasa/dialogue_understanding/generator/nlu_command_adapter.py +1 -19
  109. rasa/dialogue_understanding/generator/single_step/command_prompt_template.jinja2 +0 -3
  110. rasa/dialogue_understanding/generator/single_step/single_step_llm_command_generator.py +10 -90
  111. rasa/dialogue_understanding/patterns/default_flows_for_patterns.yml +0 -53
  112. rasa/dialogue_understanding/processor/command_processor.py +1 -21
  113. rasa/e2e_test/assertions.py +16 -133
  114. rasa/e2e_test/assertions_schema.yml +0 -23
  115. rasa/e2e_test/e2e_test_case.py +6 -85
  116. rasa/e2e_test/e2e_test_runner.py +4 -6
  117. rasa/e2e_test/utils/io.py +1 -3
  118. rasa/engine/loader.py +0 -12
  119. rasa/engine/validation.py +11 -541
  120. rasa/keys +1 -0
  121. rasa/llm_fine_tuning/notebooks/unsloth_finetuning.ipynb +407 -0
  122. rasa/model_training.py +7 -29
  123. rasa/nlu/classifiers/diet_classifier.py +25 -38
  124. rasa/nlu/classifiers/logistic_regression_classifier.py +9 -22
  125. rasa/nlu/classifiers/sklearn_intent_classifier.py +16 -37
  126. rasa/nlu/extractors/crf_entity_extractor.py +50 -93
  127. rasa/nlu/featurizers/sparse_featurizer/count_vectors_featurizer.py +16 -45
  128. rasa/nlu/featurizers/sparse_featurizer/lexical_syntactic_featurizer.py +17 -52
  129. rasa/nlu/featurizers/sparse_featurizer/regex_featurizer.py +3 -5
  130. rasa/nlu/tokenizers/whitespace_tokenizer.py +14 -3
  131. rasa/server.py +1 -3
  132. rasa/shared/constants.py +0 -61
  133. rasa/shared/core/constants.py +0 -9
  134. rasa/shared/core/domain.py +5 -8
  135. rasa/shared/core/flows/flow.py +0 -5
  136. rasa/shared/core/flows/flows_list.py +1 -5
  137. rasa/shared/core/flows/flows_yaml_schema.json +0 -10
  138. rasa/shared/core/flows/validation.py +0 -96
  139. rasa/shared/core/flows/yaml_flows_io.py +4 -13
  140. rasa/shared/core/slots.py +0 -5
  141. rasa/shared/importers/importer.py +2 -19
  142. rasa/shared/importers/rasa.py +1 -5
  143. rasa/shared/nlu/training_data/features.py +2 -120
  144. rasa/shared/nlu/training_data/formats/rasa_yaml.py +3 -18
  145. rasa/shared/providers/_configs/azure_openai_client_config.py +3 -5
  146. rasa/shared/providers/_configs/openai_client_config.py +1 -1
  147. rasa/shared/providers/_configs/self_hosted_llm_client_config.py +0 -1
  148. rasa/shared/providers/_configs/utils.py +0 -16
  149. rasa/shared/providers/embedding/_base_litellm_embedding_client.py +29 -18
  150. rasa/shared/providers/embedding/azure_openai_embedding_client.py +21 -54
  151. rasa/shared/providers/embedding/default_litellm_embedding_client.py +0 -24
  152. rasa/shared/providers/llm/_base_litellm_client.py +31 -63
  153. rasa/shared/providers/llm/azure_openai_llm_client.py +29 -50
  154. rasa/shared/providers/llm/default_litellm_llm_client.py +0 -24
  155. rasa/shared/providers/llm/self_hosted_llm_client.py +29 -17
  156. rasa/shared/providers/mappings.py +0 -19
  157. rasa/shared/utils/common.py +2 -37
  158. rasa/shared/utils/io.py +6 -28
  159. rasa/shared/utils/llm.py +46 -353
  160. rasa/shared/utils/yaml.py +82 -181
  161. rasa/studio/auth.py +5 -3
  162. rasa/studio/config.py +4 -13
  163. rasa/studio/constants.py +0 -1
  164. rasa/studio/data_handler.py +4 -13
  165. rasa/studio/upload.py +80 -175
  166. rasa/telemetry.py +17 -94
  167. rasa/tracing/config.py +1 -3
  168. rasa/tracing/instrumentation/attribute_extractors.py +17 -94
  169. rasa/tracing/instrumentation/instrumentation.py +0 -121
  170. rasa/utils/common.py +0 -5
  171. rasa/utils/endpoints.py +1 -27
  172. rasa/utils/io.py +81 -7
  173. rasa/utils/log_utils.py +2 -9
  174. rasa/utils/tensorflow/model_data.py +193 -2
  175. rasa/validator.py +4 -110
  176. rasa/version.py +1 -1
  177. rasa_pro-3.11.0a2.dist-info/METADATA +576 -0
  178. {rasa_pro-3.11.0.dist-info → rasa_pro-3.11.0a2.dist-info}/RECORD +181 -213
  179. rasa/core/actions/action_repeat_bot_messages.py +0 -89
  180. rasa/core/channels/inspector/dist/assets/flowDiagram-v2-855bc5b3-736177bf.js +0 -1
  181. rasa/core/channels/voice_stream/asr/azure.py +0 -129
  182. rasa/core/channels/voice_stream/call_state.py +0 -23
  183. rasa/dialogue_understanding/commands/repeat_bot_messages_command.py +0 -60
  184. rasa/dialogue_understanding/commands/user_silence_command.py +0 -59
  185. rasa/dialogue_understanding/patterns/repeat.py +0 -37
  186. rasa/dialogue_understanding/patterns/user_silence.py +0 -37
  187. rasa/model_manager/__init__.py +0 -0
  188. rasa/model_manager/config.py +0 -40
  189. rasa/model_manager/model_api.py +0 -559
  190. rasa/model_manager/runner_service.py +0 -286
  191. rasa/model_manager/socket_bridge.py +0 -146
  192. rasa/model_manager/studio_jwt_auth.py +0 -86
  193. rasa/model_manager/trainer_service.py +0 -325
  194. rasa/model_manager/utils.py +0 -87
  195. rasa/model_manager/warm_rasa_process.py +0 -187
  196. rasa/model_service.py +0 -112
  197. rasa/shared/core/flows/utils.py +0 -39
  198. rasa/shared/providers/_configs/litellm_router_client_config.py +0 -220
  199. rasa/shared/providers/_configs/model_group_config.py +0 -167
  200. rasa/shared/providers/_configs/rasa_llm_client_config.py +0 -73
  201. rasa/shared/providers/_utils.py +0 -79
  202. rasa/shared/providers/embedding/litellm_router_embedding_client.py +0 -135
  203. rasa/shared/providers/llm/litellm_router_llm_client.py +0 -182
  204. rasa/shared/providers/llm/rasa_llm_client.py +0 -112
  205. rasa/shared/providers/router/__init__.py +0 -0
  206. rasa/shared/providers/router/_base_litellm_router_client.py +0 -183
  207. rasa/shared/providers/router/router_client.py +0 -73
  208. rasa/shared/utils/health_check/__init__.py +0 -0
  209. rasa/shared/utils/health_check/embeddings_health_check_mixin.py +0 -31
  210. rasa/shared/utils/health_check/health_check.py +0 -258
  211. rasa/shared/utils/health_check/llm_health_check_mixin.py +0 -31
  212. rasa/utils/sanic_error_handler.py +0 -32
  213. rasa/utils/tensorflow/feature_array.py +0 -366
  214. rasa_pro-3.11.0.dist-info/METADATA +0 -198
  215. {rasa_pro-3.11.0.dist-info → rasa_pro-3.11.0a2.dist-info}/NOTICE +0 -0
  216. {rasa_pro-3.11.0.dist-info → rasa_pro-3.11.0a2.dist-info}/WHEEL +0 -0
  217. {rasa_pro-3.11.0.dist-info → rasa_pro-3.11.0a2.dist-info}/entry_points.txt +0 -0
rasa/telemetry.py CHANGED
@@ -32,13 +32,7 @@ from rasa.constants import (
32
32
  CONFIG_TELEMETRY_ENABLED,
33
33
  CONFIG_TELEMETRY_ID,
34
34
  )
35
- from rasa.shared.constants import (
36
- PROMPT_CONFIG_KEY,
37
- PROMPT_TEMPLATE_CONFIG_KEY,
38
- LLM_API_HEALTH_CHECK_ENV_VAR,
39
- LLM_API_HEALTH_CHECK_DEFAULT_VALUE,
40
- MODEL_GROUP_CONFIG_KEY,
41
- )
35
+ from rasa.shared.constants import PROMPT_CONFIG_KEY, PROMPT_TEMPLATE_CONFIG_KEY
42
36
  from rasa.engine.storage.local_model_storage import LocalModelStorage
43
37
  from rasa.shared.constants import DOCS_URL_TELEMETRY, UTTER_ASK_PREFIX
44
38
  from rasa.shared.core.flows import Flow
@@ -112,7 +106,6 @@ TELEMETRY_INTERACTIVE_LEARNING_STARTED_EVENT = "Interactive Learning Started"
112
106
  TELEMETRY_SERVER_STARTED_EVENT = "Server Started"
113
107
  TELEMETRY_PROJECT_CREATED_EVENT = "Project Created"
114
108
  TELEMETRY_SHELL_STARTED_EVENT = "Shell Started"
115
- TELEMETRY_INSPECT_STARTED_EVENT = "Inspect Started"
116
109
  TELEMETRY_VISUALIZATION_STARTED_EVENT = "Story Visualization Started"
117
110
  TELEMETRY_TEST_CORE_EVENT = "Model Core Tested"
118
111
  TELEMETRY_TEST_NLU_EVENT = "Model NLU Tested"
@@ -164,7 +157,6 @@ NUM_LINK_STEPS = "num_link_steps"
164
157
  NUM_CALL_STEPS = "num_call_steps"
165
158
  NUM_SHARED_SLOTS_BETWEEN_FLOWS = "num_shared_slots_between_flows"
166
159
  LLM_COMMAND_GENERATOR_MODEL_NAME = "llm_command_generator_model_name"
167
- LLM_COMMAND_GENERATOR_MODEL_GROUP_ID = "llm_command_generator_model_group_id"
168
160
  LLM_COMMAND_GENERATOR_CUSTOM_PROMPT_USED = "llm_command_generator_custom_prompt_used"
169
161
  MULTI_STEP_LLM_COMMAND_GENERATOR_HANDLE_FLOWS_PROMPT_USED = (
170
162
  "multi_step_llm_command_generator_custom_handle_flows_prompt_used"
@@ -174,7 +166,6 @@ MULTI_STEP_LLM_COMMAND_GENERATOR_FILL_SLOTS_PROMPT_USED = (
174
166
  )
175
167
  FLOW_RETRIEVAL_ENABLED = "flow_retrieval_enabled"
176
168
  FLOW_RETRIEVAL_EMBEDDING_MODEL_NAME = "flow_retrieval_embedding_model_name"
177
- FLOW_RETRIEVAL_EMBEDDING_MODEL_GROUP_ID = "flow_retrieval_embedding_model_group_id"
178
169
  TRACING_BACKEND = "tracing_backend"
179
170
  METRICS_BACKEND = "metrics_backend"
180
171
  VERSION = "version"
@@ -969,13 +960,6 @@ def track_model_training(
969
960
  "policies": config.get("policies"),
970
961
  "train_schema": config.get("train_schema"),
971
962
  "predict_schema": config.get("predict_schema"),
972
- "model_groups": rasa.core.utils.AvailableEndpoints.get_instance().model_groups,
973
- "api_health_check_enabled": (
974
- os.getenv(
975
- LLM_API_HEALTH_CHECK_ENV_VAR, LLM_API_HEALTH_CHECK_DEFAULT_VALUE
976
- ).lower()
977
- == "true"
978
- ),
979
963
  "num_intent_examples": len(nlu_data.intent_examples),
980
964
  "num_entity_examples": len(nlu_data.entity_examples),
981
965
  "num_actions": len(domain.action_names_or_texts),
@@ -1130,78 +1114,46 @@ def _get_llm_command_generator_config(config: Dict[str, Any]) -> Optional[Dict]:
1130
1114
  return component
1131
1115
  return None
1132
1116
 
1133
- def extract_llm_command_generator_llm_client_settings(component: Dict) -> Dict:
1134
- """Extracts settings related to LLM command generator."""
1117
+ def extract_settings(component: Dict) -> Dict:
1118
+ """Extracts the settings from the command generator component."""
1135
1119
  llm_config = component.get(LLM_CONFIG_KEY, {})
1136
- # Config at this stage is not yet resolved, so read from `model_group`
1137
- llm_model_group_id = llm_config.get(MODEL_GROUP_CONFIG_KEY)
1138
- llm_model_name = llm_config.get(MODEL_CONFIG_KEY) or llm_config.get(
1139
- MODEL_NAME_CONFIG_KEY
1120
+ llm_model_name = (
1121
+ llm_config.get(MODEL_CONFIG_KEY)
1122
+ or llm_config.get(MODEL_NAME_CONFIG_KEY)
1123
+ or DEFAULT_LLM_CONFIG[MODEL_CONFIG_KEY]
1140
1124
  )
1141
- if llm_model_group_id is None and llm_model_name is None:
1142
- llm_model_name = DEFAULT_LLM_CONFIG[MODEL_CONFIG_KEY]
1143
-
1144
- custom_prompt_used = (
1145
- PROMPT_CONFIG_KEY in component or PROMPT_TEMPLATE_CONFIG_KEY in component
1146
- )
1147
- return {
1148
- LLM_COMMAND_GENERATOR_MODEL_NAME: llm_model_name,
1149
- LLM_COMMAND_GENERATOR_MODEL_GROUP_ID: llm_model_group_id,
1150
- LLM_COMMAND_GENERATOR_CUSTOM_PROMPT_USED: custom_prompt_used,
1151
- }
1152
-
1153
- def extract_multistep_command_generator_prompt_settings(component: Dict) -> Dict:
1154
- """Extracts settings related to multistep command generator."""
1155
- prompt_templates = component.get("prompt_templates", {})
1156
- handle_flows_prompt_used = HANDLE_FLOWS_KEY in prompt_templates
1157
- fill_slots_prompt_used = FILL_SLOTS_KEY in prompt_templates
1158
- return {
1159
- MULTI_STEP_LLM_COMMAND_GENERATOR_HANDLE_FLOWS_PROMPT_USED: handle_flows_prompt_used, # noqa: E501
1160
- MULTI_STEP_LLM_COMMAND_GENERATOR_FILL_SLOTS_PROMPT_USED: fill_slots_prompt_used, # noqa: E501
1161
- }
1162
-
1163
- def extract_flow_retrieval_settings(component: Dict) -> Dict:
1164
- """Extracts settings related to flow retrieval."""
1165
1125
  flow_retrieval_config = component.get(FLOW_RETRIEVAL_KEY, {})
1166
1126
  flow_retrieval_enabled = flow_retrieval_config.get("active", True)
1167
- embeddings_config = flow_retrieval_config.get(
1127
+ flow_retrieval_embeddings_config = flow_retrieval_config.get(
1168
1128
  EMBEDDINGS_CONFIG_KEY, DEFAULT_EMBEDDINGS_CONFIG
1169
1129
  )
1170
1130
  flow_retrieval_embedding_model_name = (
1171
1131
  (
1172
- embeddings_config.get(MODEL_NAME_CONFIG_KEY)
1173
- or embeddings_config.get(MODEL_CONFIG_KEY)
1132
+ flow_retrieval_embeddings_config.get(MODEL_NAME_CONFIG_KEY)
1133
+ or flow_retrieval_embeddings_config.get(MODEL_CONFIG_KEY)
1174
1134
  )
1175
1135
  if flow_retrieval_enabled
1176
1136
  else None
1177
1137
  )
1178
- # Config at this stage is not yet resolved, so read from `model_group`
1179
- flow_retrieval_embedding_model_group_id = embeddings_config.get(
1180
- MODEL_GROUP_CONFIG_KEY
1181
- )
1182
1138
  return {
1139
+ LLM_COMMAND_GENERATOR_MODEL_NAME: llm_model_name,
1140
+ LLM_COMMAND_GENERATOR_CUSTOM_PROMPT_USED: PROMPT_CONFIG_KEY in component
1141
+ or PROMPT_TEMPLATE_CONFIG_KEY in component,
1142
+ MULTI_STEP_LLM_COMMAND_GENERATOR_HANDLE_FLOWS_PROMPT_USED: HANDLE_FLOWS_KEY
1143
+ in component.get("prompt_templates", {}),
1144
+ MULTI_STEP_LLM_COMMAND_GENERATOR_FILL_SLOTS_PROMPT_USED: FILL_SLOTS_KEY
1145
+ in component.get("prompt_templates", {}),
1183
1146
  FLOW_RETRIEVAL_ENABLED: flow_retrieval_enabled,
1184
1147
  FLOW_RETRIEVAL_EMBEDDING_MODEL_NAME: flow_retrieval_embedding_model_name,
1185
- FLOW_RETRIEVAL_EMBEDDING_MODEL_GROUP_ID: flow_retrieval_embedding_model_group_id, # noqa: E501
1186
1148
  }
1187
1149
 
1188
- def extract_settings(component: Dict) -> Dict:
1189
- """Extracts the settings from the command generator component."""
1190
- settings = {}
1191
- settings.update(extract_llm_command_generator_llm_client_settings(component))
1192
- settings.update(extract_multistep_command_generator_prompt_settings(component))
1193
- settings.update(extract_flow_retrieval_settings(component))
1194
- return settings
1195
-
1196
1150
  command_generator_config = {
1197
1151
  LLM_COMMAND_GENERATOR_MODEL_NAME: None,
1198
- LLM_COMMAND_GENERATOR_MODEL_GROUP_ID: None,
1199
1152
  LLM_COMMAND_GENERATOR_CUSTOM_PROMPT_USED: None,
1200
1153
  MULTI_STEP_LLM_COMMAND_GENERATOR_HANDLE_FLOWS_PROMPT_USED: None,
1201
1154
  MULTI_STEP_LLM_COMMAND_GENERATOR_FILL_SLOTS_PROMPT_USED: None,
1202
1155
  FLOW_RETRIEVAL_ENABLED: None,
1203
1156
  FLOW_RETRIEVAL_EMBEDDING_MODEL_NAME: None,
1204
- FLOW_RETRIEVAL_EMBEDDING_MODEL_GROUP_ID: None,
1205
1157
  }
1206
1158
 
1207
1159
  pipeline = config.get("pipeline", [])
@@ -1381,17 +1333,6 @@ def track_shell_started(model_type: Text) -> None:
1381
1333
  _track(TELEMETRY_SHELL_STARTED_EVENT, {"type": model_type})
1382
1334
 
1383
1335
 
1384
- @ensure_telemetry_enabled
1385
- def track_inspect_started(model_type: Text) -> None:
1386
- """Track when a user starts a bot using rasa inspect.
1387
-
1388
- Args:
1389
- channel: Channel name `socketio` (used for chat assistants)
1390
- or `browser_audio` (used for voice).
1391
- """
1392
- _track(TELEMETRY_INSPECT_STARTED_EVENT, {"type": model_type})
1393
-
1394
-
1395
1336
  @ensure_telemetry_enabled
1396
1337
  def track_visualization() -> None:
1397
1338
  """Track when a user runs the visualization."""
@@ -1612,7 +1553,6 @@ def track_response_rephrase(
1612
1553
  custom_prompt_template: Optional[str],
1613
1554
  llm_type: Optional[str],
1614
1555
  llm_model: Optional[str],
1615
- llm_model_group_id: Optional[str],
1616
1556
  ) -> None:
1617
1557
  """Track when a user rephrases a response."""
1618
1558
  _track(
@@ -1622,7 +1562,6 @@ def track_response_rephrase(
1622
1562
  "custom_prompt_template": custom_prompt_template,
1623
1563
  "llm_type": llm_type,
1624
1564
  "llm_model": llm_model,
1625
- "llm_model_group_id": llm_model_group_id,
1626
1565
  },
1627
1566
  )
1628
1567
 
@@ -1637,10 +1576,8 @@ def track_intentless_policy_train() -> None:
1637
1576
  def track_intentless_policy_train_completed(
1638
1577
  embeddings_type: Optional[str],
1639
1578
  embeddings_model: Optional[str],
1640
- embeddings_model_group_id: Optional[str],
1641
1579
  llm_type: Optional[str],
1642
1580
  llm_model: Optional[str],
1643
- llm_model_group_id: Optional[str],
1644
1581
  ) -> None:
1645
1582
  """Track when a user trains a policy."""
1646
1583
  _track(
@@ -1648,10 +1585,8 @@ def track_intentless_policy_train_completed(
1648
1585
  {
1649
1586
  "embeddings_type": embeddings_type,
1650
1587
  "embeddings_model": embeddings_model,
1651
- "embeddings_model_group_id": embeddings_model_group_id,
1652
1588
  "llm_type": llm_type,
1653
1589
  "llm_model": llm_model,
1654
- "llm_model_group_id": llm_model_group_id,
1655
1590
  },
1656
1591
  )
1657
1592
 
@@ -1660,10 +1595,8 @@ def track_intentless_policy_train_completed(
1660
1595
  def track_intentless_policy_predict(
1661
1596
  embeddings_type: Optional[str],
1662
1597
  embeddings_model: Optional[str],
1663
- embeddings_model_group_id: Optional[str],
1664
1598
  llm_type: Optional[str],
1665
1599
  llm_model: Optional[str],
1666
- llm_model_group_id: Optional[str],
1667
1600
  score: float,
1668
1601
  ) -> None:
1669
1602
  """Track when a user trains a policy."""
@@ -1672,10 +1605,8 @@ def track_intentless_policy_predict(
1672
1605
  {
1673
1606
  "embeddings_type": embeddings_type,
1674
1607
  "embeddings_model": embeddings_model,
1675
- "embeddings_model_group_id": embeddings_model_group_id,
1676
1608
  "llm_type": llm_type,
1677
1609
  "llm_model": llm_model,
1678
- "llm_model_group_id": llm_model_group_id,
1679
1610
  "score": score,
1680
1611
  },
1681
1612
  )
@@ -1765,10 +1696,8 @@ def track_enterprise_search_policy_train_completed(
1765
1696
  vector_store_type: Optional[str],
1766
1697
  embeddings_type: Optional[str],
1767
1698
  embeddings_model: Optional[str],
1768
- embeddings_model_group_id: Optional[str],
1769
1699
  llm_type: Optional[str],
1770
1700
  llm_model: Optional[str],
1771
- llm_model_group_id: Optional[str],
1772
1701
  citation_enabled: Optional[bool],
1773
1702
  ) -> None:
1774
1703
  """Track when a user completes training Enterprise Search policy."""
@@ -1778,10 +1707,8 @@ def track_enterprise_search_policy_train_completed(
1778
1707
  "vector_store_type": vector_store_type,
1779
1708
  "embeddings_type": embeddings_type,
1780
1709
  "embeddings_model": embeddings_model,
1781
- "embeddings_model_group_id": embeddings_model_group_id,
1782
1710
  "llm_type": llm_type,
1783
1711
  "llm_model": llm_model,
1784
- "llm_model_group_id": llm_model_group_id,
1785
1712
  "citation_enabled": citation_enabled,
1786
1713
  },
1787
1714
  )
@@ -1792,10 +1719,8 @@ def track_enterprise_search_policy_predict(
1792
1719
  vector_store_type: Optional[str],
1793
1720
  embeddings_type: Optional[str],
1794
1721
  embeddings_model: Optional[str],
1795
- embeddings_model_group_id: Optional[str],
1796
1722
  llm_type: Optional[str],
1797
1723
  llm_model: Optional[str],
1798
- llm_model_group_id: Optional[str],
1799
1724
  citation_enabled: Optional[bool],
1800
1725
  ) -> None:
1801
1726
  """Track when a user predicts the next action using Enterprise Search policy."""
@@ -1805,10 +1730,8 @@ def track_enterprise_search_policy_predict(
1805
1730
  "vector_store_type": vector_store_type,
1806
1731
  "embeddings_type": embeddings_type,
1807
1732
  "embeddings_model": embeddings_model,
1808
- "embeddings_model_group_id": embeddings_model_group_id,
1809
1733
  "llm_type": llm_type,
1810
1734
  "llm_model": llm_model,
1811
- "llm_model_group_id": llm_model_group_id,
1812
1735
  "citation_enabled": citation_enabled,
1813
1736
  },
1814
1737
  )
rasa/tracing/config.py CHANGED
@@ -33,7 +33,6 @@ from rasa.dialogue_understanding.generator import (
33
33
  SingleStepLLMCommandGenerator,
34
34
  MultiStepLLMCommandGenerator,
35
35
  )
36
- from rasa.dialogue_understanding.generator.flow_retrieval import FlowRetrieval
37
36
  from rasa.dialogue_understanding.generator.nlu_command_adapter import NLUCommandAdapter
38
37
  from rasa.engine.graph import GraphNode
39
38
  from rasa.engine.training.graph_trainer import GraphTrainer
@@ -112,7 +111,6 @@ def configure_tracing(tracer_provider: Optional[TracerProvider]) -> None:
112
111
  single_step_llm_command_generator_class=SingleStepLLMCommandGenerator,
113
112
  multi_step_llm_command_generator_class=MultiStepLLMCommandGenerator,
114
113
  custom_action_executor_subclasses=custom_action_executor_subclasses,
115
- flow_retrieval_class=FlowRetrieval,
116
114
  )
117
115
 
118
116
 
@@ -131,7 +129,7 @@ def get_tracer_provider(endpoints_file: Text) -> Optional[TracerProvider]:
131
129
 
132
130
  if not cfg:
133
131
  logger.info(
134
- f"No endpoint for tracing type available in {endpoints_file}, "
132
+ f"No endpoint for tracing type available in {endpoints_file},"
135
133
  f"tracing will not be configured."
136
134
  )
137
135
  return None
@@ -5,7 +5,6 @@ from typing import TYPE_CHECKING, Any, Dict, List, Optional, Text, Tuple, Union
5
5
 
6
6
  import tiktoken
7
7
  from numpy import ndarray
8
- from rasa.dialogue_understanding.generator.constants import FLOW_RETRIEVAL_KEY
9
8
  from rasa_sdk.grpc_py import action_webhook_pb2
10
9
 
11
10
  from rasa.core.actions.action import DirectCustomActionExecutor
@@ -20,7 +19,6 @@ from rasa.core.processor import MessageProcessor
20
19
  from rasa.core.tracker_store import TrackerStore
21
20
  from rasa.dialogue_understanding.commands import Command
22
21
  from rasa.dialogue_understanding.stack.dialogue_stack import DialogueStack
23
- from rasa.dialogue_understanding.generator import LLMBasedCommandGenerator
24
22
  from rasa.engine.graph import ExecutionContext, GraphModelConfiguration, GraphNode
25
23
  from rasa.engine.training.graph_trainer import GraphTrainer
26
24
  from rasa.shared.constants import (
@@ -29,8 +27,6 @@ from rasa.shared.constants import (
29
27
  PROVIDER_CONFIG_KEY,
30
28
  TIMEOUT_CONFIG_KEY,
31
29
  DEPLOYMENT_CONFIG_KEY,
32
- MODEL_GROUP_ID_CONFIG_KEY,
33
- LLM_CONFIG_KEY,
34
30
  )
35
31
  from rasa.shared.core.constants import REQUESTED_SLOT
36
32
  from rasa.shared.core.domain import Domain
@@ -54,7 +50,10 @@ if TYPE_CHECKING:
54
50
  from rasa.core.policies.enterprise_search_policy import EnterpriseSearchPolicy
55
51
  from rasa.core.policies.intentless_policy import IntentlessPolicy
56
52
  from rasa.core.policies.policy import PolicyPrediction
57
- from rasa.dialogue_understanding.generator import CommandGenerator
53
+ from rasa.dialogue_understanding.generator import (
54
+ CommandGenerator,
55
+ LLMBasedCommandGenerator,
56
+ )
58
57
 
59
58
  # This file contains all attribute extractors for tracing instrumentation.
60
59
  # These are functions that are applied to the arguments of the wrapped function to be
@@ -301,49 +300,22 @@ def extract_attrs_for_command(
301
300
  }
302
301
 
303
302
 
304
- def extract_llm_config(
305
- self: Any,
306
- default_llm_config: Dict[str, Any],
307
- default_embeddings_config: Dict[str, Any],
308
- ) -> Dict[str, Any]:
303
+ def extract_llm_config(self: Any, default_llm_config: Dict[str, Any]) -> Dict[str, Any]:
309
304
  if isinstance(self, ContextualResponseRephraser):
310
- # ContextualResponseRephraser is not a graph component, so it's
311
- # not having a full config.
312
- config = {"llm": self.llm_config}
305
+ config = self.nlg_endpoint.kwargs
313
306
  else:
314
307
  config = self.config
315
308
 
316
309
  llm_property = combine_custom_and_default_config(
317
- config.get(LLM_CONFIG_KEY), default_llm_config
310
+ config.get("llm"), default_llm_config
318
311
  )
319
312
 
320
- if isinstance(self, LLMBasedCommandGenerator):
321
- flow_retrieval_config = config.get(FLOW_RETRIEVAL_KEY, {}) or {}
322
- embeddings_property = combine_custom_and_default_config(
323
- flow_retrieval_config.get(EMBEDDINGS_CONFIG_KEY),
324
- default_embeddings_config,
325
- )
326
- else:
327
- embeddings_property = combine_custom_and_default_config(
328
- config.get(EMBEDDINGS_CONFIG_KEY), default_embeddings_config
329
- )
330
-
331
313
  attributes = {
332
314
  "class_name": self.__class__.__name__,
333
- # llm client attributes
334
315
  "llm_model": str(llm_property.get(MODEL_CONFIG_KEY)),
335
316
  "llm_type": str(llm_property.get(PROVIDER_CONFIG_KEY)),
336
- "llm_model_group_id": str(llm_property.get(MODEL_GROUP_ID_CONFIG_KEY)),
317
+ "embeddings": json.dumps(config.get(EMBEDDINGS_CONFIG_KEY, {})),
337
318
  "llm_temperature": str(llm_property.get("temperature")),
338
- "llm_request_timeout": str(llm_property.get(TIMEOUT_CONFIG_KEY)),
339
- # embedding client attributes
340
- "embeddings_model": str(embeddings_property.get(MODEL_CONFIG_KEY)),
341
- "embeddings_type": str(embeddings_property.get(PROVIDER_CONFIG_KEY)),
342
- "embeddings_model_group_id": str(
343
- embeddings_property.get(MODEL_GROUP_ID_CONFIG_KEY)
344
- ),
345
- # TODO: Keeping this to avoid potential breaking changes
346
- "embeddings": json.dumps(embeddings_property, sort_keys=True),
347
319
  "request_timeout": str(llm_property.get(TIMEOUT_CONFIG_KEY)),
348
320
  }
349
321
 
@@ -357,16 +329,11 @@ def extract_attrs_for_llm_based_command_generator(
357
329
  self: "LLMBasedCommandGenerator",
358
330
  prompt: str,
359
331
  ) -> Dict[str, Any]:
360
- from rasa.dialogue_understanding.generator.constants import DEFAULT_LLM_CONFIG
361
- from rasa.dialogue_understanding.generator.flow_retrieval import (
362
- DEFAULT_EMBEDDINGS_CONFIG,
332
+ from rasa.dialogue_understanding.generator.constants import (
333
+ DEFAULT_LLM_CONFIG,
363
334
  )
364
335
 
365
- attributes = extract_llm_config(
366
- self,
367
- default_llm_config=DEFAULT_LLM_CONFIG,
368
- default_embeddings_config=DEFAULT_EMBEDDINGS_CONFIG,
369
- )
336
+ attributes = extract_llm_config(self, default_llm_config=DEFAULT_LLM_CONFIG)
370
337
 
371
338
  return extend_attributes_with_prompt_tokens_length(self, attributes, prompt)
372
339
 
@@ -377,12 +344,7 @@ def extract_attrs_for_contextual_response_rephraser(
377
344
  ) -> Dict[str, Any]:
378
345
  from rasa.core.nlg.contextual_response_rephraser import DEFAULT_LLM_CONFIG
379
346
 
380
- attributes = extract_llm_config(
381
- self,
382
- default_llm_config=DEFAULT_LLM_CONFIG,
383
- # rephraser is not using embeddings
384
- default_embeddings_config={},
385
- )
347
+ attributes = extract_llm_config(self, default_llm_config=DEFAULT_LLM_CONFIG)
386
348
 
387
349
  return extend_attributes_with_prompt_tokens_length(self, attributes, prompt)
388
350
 
@@ -393,12 +355,7 @@ def extract_attrs_for_create_history(
393
355
  ) -> Dict[str, Any]:
394
356
  from rasa.core.nlg.contextual_response_rephraser import DEFAULT_LLM_CONFIG
395
357
 
396
- return extract_llm_config(
397
- self,
398
- default_llm_config=DEFAULT_LLM_CONFIG,
399
- # rephraser is not using embeddings
400
- default_embeddings_config={},
401
- )
358
+ return extract_llm_config(self, default_llm_config=DEFAULT_LLM_CONFIG)
402
359
 
403
360
 
404
361
  def extract_attrs_for_generate(
@@ -414,26 +371,6 @@ def extract_attrs_for_generate(
414
371
  }
415
372
 
416
373
 
417
- def extract_attrs_for_performing_health_check(
418
- custom_config: Optional[Dict[str, Any]],
419
- default_config: Dict[str, Any],
420
- log_source_method: str,
421
- log_source_component: str,
422
- ) -> Dict[str, Any]:
423
- from rasa.shared.utils.health_check.health_check import is_api_health_check_enabled
424
-
425
- attrs = {
426
- "api_health_check_enabled": is_api_health_check_enabled(),
427
- "health_check_trigger_component": log_source_component,
428
- "health_check_trigger_method": log_source_method,
429
- }
430
- if is_api_health_check_enabled():
431
- attrs["config"] = json.dumps(
432
- combine_custom_and_default_config(custom_config, default_config)
433
- )
434
- return attrs
435
-
436
-
437
374
  def extract_attrs_for_execute_commands(
438
375
  tracker: DialogueStateTracker,
439
376
  all_flows: FlowsList,
@@ -643,16 +580,9 @@ def extract_attrs_for_intentless_policy_find_closest_response(
643
580
  def extract_attrs_for_intentless_policy_generate_llm_answer(
644
581
  self: "IntentlessPolicy", llm: "BaseLLM", prompt: str
645
582
  ) -> Dict[str, Any]:
646
- from rasa.core.policies.intentless_policy import (
647
- DEFAULT_LLM_CONFIG,
648
- DEFAULT_EMBEDDINGS_CONFIG,
649
- )
583
+ from rasa.core.policies.intentless_policy import DEFAULT_LLM_CONFIG
650
584
 
651
- attributes = extract_llm_config(
652
- self,
653
- default_llm_config=DEFAULT_LLM_CONFIG,
654
- default_embeddings_config=DEFAULT_EMBEDDINGS_CONFIG,
655
- )
585
+ attributes = extract_llm_config(self, default_llm_config=DEFAULT_LLM_CONFIG)
656
586
 
657
587
  return extend_attributes_with_prompt_tokens_length(self, attributes, prompt)
658
588
 
@@ -660,16 +590,9 @@ def extract_attrs_for_intentless_policy_generate_llm_answer(
660
590
  def extract_attrs_for_enterprise_search_generate_llm_answer(
661
591
  self: "EnterpriseSearchPolicy", llm: "BaseLLM", prompt: str
662
592
  ) -> Dict[str, Any]:
663
- from rasa.core.policies.enterprise_search_policy import (
664
- DEFAULT_LLM_CONFIG,
665
- DEFAULT_EMBEDDINGS_CONFIG,
666
- )
593
+ from rasa.core.policies.enterprise_search_policy import DEFAULT_LLM_CONFIG
667
594
 
668
- attributes = extract_llm_config(
669
- self,
670
- default_llm_config=DEFAULT_LLM_CONFIG,
671
- default_embeddings_config=DEFAULT_EMBEDDINGS_CONFIG,
672
- )
595
+ attributes = extract_llm_config(self, default_llm_config=DEFAULT_LLM_CONFIG)
673
596
 
674
597
  return extend_attributes_with_prompt_tokens_length(self, attributes, prompt)
675
598
 
@@ -45,7 +45,6 @@ from rasa.dialogue_understanding.generator import (
45
45
  MultiStepLLMCommandGenerator,
46
46
  SingleStepLLMCommandGenerator,
47
47
  )
48
- from rasa.dialogue_understanding.generator.flow_retrieval import FlowRetrieval
49
48
  from rasa.dialogue_understanding.generator.nlu_command_adapter import NLUCommandAdapter
50
49
  from rasa.engine.graph import GraphNode
51
50
  from rasa.engine.training.graph_trainer import GraphTrainer
@@ -284,7 +283,6 @@ SingleStepLLMCommandGeneratorType = TypeVar(
284
283
  MultiStepLLMCommandGeneratorType = TypeVar(
285
284
  "MultiStepLLMCommandGeneratorType", bound=MultiStepLLMCommandGenerator
286
285
  )
287
- FlowRetrievalType = TypeVar("FlowRetrievalType", bound=FlowRetrieval)
288
286
  CommandType = TypeVar("CommandType", bound=Command)
289
287
  PolicyType = TypeVar("PolicyType", bound=Policy)
290
288
  InformationRetrievalType = TypeVar(
@@ -319,7 +317,6 @@ def instrument(
319
317
  custom_action_executor_subclasses: Optional[
320
318
  List[Type[CustomActionExecutor]]
321
319
  ] = None,
322
- flow_retrieval_class: Optional[Type[FlowRetrievalType]] = None,
323
320
  ) -> None:
324
321
  """Substitute methods to be traced by their traced counterparts.
325
322
 
@@ -448,12 +445,6 @@ def instrument(
448
445
  "_check_commands_against_startable_flows",
449
446
  attribute_extractors.extract_attrs_for_check_commands_against_startable_flows,
450
447
  )
451
- _instrument_perform_health_check_method_for_component(
452
- tracer_provider.get_tracer(llm_command_generator_class.__module__),
453
- llm_command_generator_class,
454
- "perform_llm_health_check",
455
- attribute_extractors.extract_attrs_for_performing_health_check,
456
- )
457
448
  mark_class_as_instrumented(llm_command_generator_class)
458
449
 
459
450
  if (
@@ -477,14 +468,6 @@ def instrument(
477
468
  "_check_commands_against_startable_flows",
478
469
  attribute_extractors.extract_attrs_for_check_commands_against_startable_flows,
479
470
  )
480
- _instrument_perform_health_check_method_for_component(
481
- tracer_provider.get_tracer(
482
- single_step_llm_command_generator_class.__module__
483
- ),
484
- single_step_llm_command_generator_class,
485
- "perform_llm_health_check",
486
- attribute_extractors.extract_attrs_for_performing_health_check,
487
- )
488
471
  mark_class_as_instrumented(single_step_llm_command_generator_class)
489
472
 
490
473
  if multi_step_llm_command_generator_class is not None and not class_is_instrumented(
@@ -505,36 +488,8 @@ def instrument(
505
488
  ),
506
489
  multi_step_llm_command_generator_class,
507
490
  )
508
- _instrument_perform_health_check_method_for_component(
509
- tracer_provider.get_tracer(
510
- multi_step_llm_command_generator_class.__module__
511
- ),
512
- multi_step_llm_command_generator_class,
513
- "perform_llm_health_check",
514
- attribute_extractors.extract_attrs_for_performing_health_check,
515
- )
516
491
  mark_class_as_instrumented(multi_step_llm_command_generator_class)
517
492
 
518
- if (
519
- any(
520
- llm_based_command_generator_class is not None
521
- for llm_based_command_generator_class in (
522
- llm_command_generator_class,
523
- single_step_llm_command_generator_class,
524
- multi_step_llm_command_generator_class,
525
- )
526
- )
527
- and flow_retrieval_class is not None
528
- and not class_is_instrumented(flow_retrieval_class)
529
- ):
530
- _instrument_perform_health_check_method_for_component(
531
- tracer_provider.get_tracer(flow_retrieval_class.__module__),
532
- flow_retrieval_class,
533
- "perform_embeddings_health_check",
534
- attribute_extractors.extract_attrs_for_performing_health_check,
535
- )
536
- mark_class_as_instrumented(flow_retrieval_class)
537
-
538
493
  if command_subclasses:
539
494
  for command_subclass in command_subclasses:
540
495
  if command_subclass is not None and not class_is_instrumented(
@@ -569,12 +524,6 @@ def instrument(
569
524
  "generate",
570
525
  attribute_extractors.extract_attrs_for_generate,
571
526
  )
572
- _instrument_perform_health_check_method_for_component(
573
- tracer_provider.get_tracer(contextual_response_rephraser_class.__module__),
574
- contextual_response_rephraser_class,
575
- "perform_llm_health_check",
576
- attribute_extractors.extract_attrs_for_performing_health_check,
577
- )
578
527
  mark_class_as_instrumented(contextual_response_rephraser_class)
579
528
 
580
529
  if not module_is_instrumented(COMMAND_PROCESSOR_MODULE_NAME):
@@ -806,18 +755,6 @@ def _instrument_enterprise_search_policy(
806
755
  "_generate_llm_answer",
807
756
  attribute_extractors.extract_attrs_for_enterprise_search_generate_llm_answer,
808
757
  )
809
- _instrument_perform_health_check_method_for_component(
810
- tracer_provider.get_tracer(policy_class.__module__),
811
- policy_class,
812
- "perform_embeddings_health_check",
813
- attribute_extractors.extract_attrs_for_performing_health_check,
814
- )
815
- _instrument_perform_health_check_method_for_component(
816
- tracer_provider.get_tracer(policy_class.__module__),
817
- policy_class,
818
- "perform_llm_health_check",
819
- attribute_extractors.extract_attrs_for_performing_health_check,
820
- )
821
758
 
822
759
 
823
760
  def _instrument_intentless_policy(
@@ -850,18 +787,6 @@ def _instrument_intentless_policy(
850
787
  "_generate_llm_answer",
851
788
  attribute_extractors.extract_attrs_for_intentless_policy_generate_llm_answer,
852
789
  )
853
- _instrument_perform_health_check_method_for_component(
854
- tracer_provider.get_tracer(policy_class.__module__),
855
- policy_class,
856
- "perform_embeddings_health_check",
857
- attribute_extractors.extract_attrs_for_performing_health_check,
858
- )
859
- _instrument_perform_health_check_method_for_component(
860
- tracer_provider.get_tracer(policy_class.__module__),
861
- policy_class,
862
- "perform_llm_health_check",
863
- attribute_extractors.extract_attrs_for_performing_health_check,
864
- )
865
790
 
866
791
 
867
792
  def _instrument_processor(
@@ -1214,52 +1139,6 @@ def _instrument_grpc_custom_action_executor(
1214
1139
  logger.debug(f"Instrumented '{grpc_custom_action_executor_class.__name__}.run.")
1215
1140
 
1216
1141
 
1217
- def _instrument_perform_health_check_method_for_component(
1218
- tracer: Tracer,
1219
- instrumented_class: Type,
1220
- method_name: Text,
1221
- attr_extractor: Optional[Callable] = None,
1222
- return_value_attr_extractor: Optional[Callable] = None,
1223
- ) -> None:
1224
- def tracing_perform_health_check_for_component(
1225
- fn: Callable[..., S],
1226
- ) -> Callable[..., S]:
1227
- @functools.wraps(fn)
1228
- def wrapper(*args: Any, **kwargs: Any) -> S:
1229
- # Check the first argument to adjust for self/cls depending on how
1230
- # the static method from LLMHealthCheckMixin / EmbeddingsLLMHealthCheckMixin
1231
- # is called.
1232
- if args and isinstance(
1233
- args[0], (instrumented_class, type(instrumented_class))
1234
- ):
1235
- # The first argument is self/cls; align args to match the signature
1236
- args = args[1:]
1237
-
1238
- span_name = f"{instrumented_class.__name__}.{fn.__name__}"
1239
- extracted_attrs = attr_extractor(*args, **kwargs) if attr_extractor else {}
1240
-
1241
- with tracer.start_as_current_span(span_name) as span:
1242
- result = fn(*args, **kwargs)
1243
-
1244
- # Extract attributes from the return value, if an extractor is provided
1245
- return_value_attributes = (
1246
- return_value_attr_extractor(result, *args, **kwargs)
1247
- if return_value_attr_extractor
1248
- else {}
1249
- )
1250
-
1251
- span.set_attributes({**extracted_attrs, **return_value_attributes})
1252
- return result
1253
-
1254
- return wrapper
1255
-
1256
- method_to_trace = getattr(instrumented_class, method_name)
1257
- traced_method = tracing_perform_health_check_for_component(method_to_trace)
1258
- setattr(instrumented_class, method_name, traced_method)
1259
-
1260
- logger.debug(f"Instrumented '{instrumented_class.__name__}.{method_name}'.")
1261
-
1262
-
1263
1142
  def _mangled_instrumented_boolean_attribute_name(instrumented_class: Type) -> Text:
1264
1143
  # see https://peps.python.org/pep-0008/#method-names-and-instance-variables
1265
1144
  # and https://stackoverflow.com/a/50401073