rasa-pro 3.9.18__py3-none-any.whl → 3.10.16__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of rasa-pro might be problematic. Click here for more details.

Files changed (183) hide show
  1. README.md +0 -374
  2. rasa/__init__.py +1 -2
  3. rasa/__main__.py +5 -0
  4. rasa/anonymization/anonymization_rule_executor.py +2 -2
  5. rasa/api.py +27 -23
  6. rasa/cli/arguments/data.py +27 -2
  7. rasa/cli/arguments/default_arguments.py +25 -3
  8. rasa/cli/arguments/run.py +9 -9
  9. rasa/cli/arguments/train.py +11 -3
  10. rasa/cli/data.py +70 -8
  11. rasa/cli/e2e_test.py +104 -431
  12. rasa/cli/evaluate.py +1 -1
  13. rasa/cli/interactive.py +1 -0
  14. rasa/cli/llm_fine_tuning.py +398 -0
  15. rasa/cli/project_templates/calm/endpoints.yml +1 -1
  16. rasa/cli/project_templates/tutorial/endpoints.yml +1 -1
  17. rasa/cli/run.py +15 -14
  18. rasa/cli/scaffold.py +10 -8
  19. rasa/cli/studio/studio.py +35 -5
  20. rasa/cli/train.py +56 -8
  21. rasa/cli/utils.py +22 -5
  22. rasa/cli/x.py +1 -1
  23. rasa/constants.py +7 -1
  24. rasa/core/actions/action.py +98 -49
  25. rasa/core/actions/action_run_slot_rejections.py +4 -1
  26. rasa/core/actions/custom_action_executor.py +9 -6
  27. rasa/core/actions/direct_custom_actions_executor.py +80 -0
  28. rasa/core/actions/e2e_stub_custom_action_executor.py +68 -0
  29. rasa/core/actions/grpc_custom_action_executor.py +2 -2
  30. rasa/core/actions/http_custom_action_executor.py +6 -5
  31. rasa/core/agent.py +21 -17
  32. rasa/core/channels/__init__.py +2 -0
  33. rasa/core/channels/audiocodes.py +1 -16
  34. rasa/core/channels/voice_aware/__init__.py +0 -0
  35. rasa/core/channels/voice_aware/jambonz.py +103 -0
  36. rasa/core/channels/voice_aware/jambonz_protocol.py +344 -0
  37. rasa/core/channels/voice_aware/utils.py +20 -0
  38. rasa/core/channels/voice_native/__init__.py +0 -0
  39. rasa/core/constants.py +6 -1
  40. rasa/core/information_retrieval/faiss.py +7 -4
  41. rasa/core/information_retrieval/information_retrieval.py +8 -0
  42. rasa/core/information_retrieval/milvus.py +9 -2
  43. rasa/core/information_retrieval/qdrant.py +1 -1
  44. rasa/core/nlg/contextual_response_rephraser.py +32 -10
  45. rasa/core/nlg/summarize.py +4 -3
  46. rasa/core/policies/enterprise_search_policy.py +113 -45
  47. rasa/core/policies/flows/flow_executor.py +122 -76
  48. rasa/core/policies/intentless_policy.py +83 -29
  49. rasa/core/processor.py +72 -54
  50. rasa/core/run.py +5 -4
  51. rasa/core/tracker_store.py +8 -4
  52. rasa/core/training/interactive.py +1 -1
  53. rasa/core/utils.py +56 -57
  54. rasa/dialogue_understanding/coexistence/llm_based_router.py +53 -13
  55. rasa/dialogue_understanding/commands/__init__.py +6 -0
  56. rasa/dialogue_understanding/commands/restart_command.py +58 -0
  57. rasa/dialogue_understanding/commands/session_start_command.py +59 -0
  58. rasa/dialogue_understanding/commands/utils.py +40 -0
  59. rasa/dialogue_understanding/generator/constants.py +10 -3
  60. rasa/dialogue_understanding/generator/flow_retrieval.py +21 -5
  61. rasa/dialogue_understanding/generator/llm_based_command_generator.py +13 -3
  62. rasa/dialogue_understanding/generator/multi_step/multi_step_llm_command_generator.py +134 -90
  63. rasa/dialogue_understanding/generator/nlu_command_adapter.py +47 -7
  64. rasa/dialogue_understanding/generator/single_step/single_step_llm_command_generator.py +127 -41
  65. rasa/dialogue_understanding/patterns/restart.py +37 -0
  66. rasa/dialogue_understanding/patterns/session_start.py +37 -0
  67. rasa/dialogue_understanding/processor/command_processor.py +16 -3
  68. rasa/dialogue_understanding/processor/command_processor_component.py +6 -2
  69. rasa/e2e_test/aggregate_test_stats_calculator.py +134 -0
  70. rasa/e2e_test/assertions.py +1223 -0
  71. rasa/e2e_test/assertions_schema.yml +106 -0
  72. rasa/e2e_test/constants.py +20 -0
  73. rasa/e2e_test/e2e_config.py +220 -0
  74. rasa/e2e_test/e2e_config_schema.yml +26 -0
  75. rasa/e2e_test/e2e_test_case.py +131 -8
  76. rasa/e2e_test/e2e_test_converter.py +363 -0
  77. rasa/e2e_test/e2e_test_converter_prompt.jinja2 +70 -0
  78. rasa/e2e_test/e2e_test_coverage_report.py +364 -0
  79. rasa/e2e_test/e2e_test_result.py +26 -6
  80. rasa/e2e_test/e2e_test_runner.py +493 -71
  81. rasa/e2e_test/e2e_test_schema.yml +96 -0
  82. rasa/e2e_test/pykwalify_extensions.py +39 -0
  83. rasa/e2e_test/stub_custom_action.py +70 -0
  84. rasa/e2e_test/utils/__init__.py +0 -0
  85. rasa/e2e_test/utils/e2e_yaml_utils.py +55 -0
  86. rasa/e2e_test/utils/io.py +598 -0
  87. rasa/e2e_test/utils/validation.py +80 -0
  88. rasa/engine/graph.py +9 -3
  89. rasa/engine/recipes/default_components.py +0 -2
  90. rasa/engine/recipes/default_recipe.py +10 -2
  91. rasa/engine/storage/local_model_storage.py +40 -12
  92. rasa/engine/validation.py +78 -1
  93. rasa/env.py +9 -0
  94. rasa/graph_components/providers/story_graph_provider.py +59 -6
  95. rasa/llm_fine_tuning/__init__.py +0 -0
  96. rasa/llm_fine_tuning/annotation_module.py +241 -0
  97. rasa/llm_fine_tuning/conversations.py +144 -0
  98. rasa/llm_fine_tuning/llm_data_preparation_module.py +178 -0
  99. rasa/llm_fine_tuning/notebooks/unsloth_finetuning.ipynb +407 -0
  100. rasa/llm_fine_tuning/paraphrasing/__init__.py +0 -0
  101. rasa/llm_fine_tuning/paraphrasing/conversation_rephraser.py +281 -0
  102. rasa/llm_fine_tuning/paraphrasing/default_rephrase_prompt_template.jina2 +44 -0
  103. rasa/llm_fine_tuning/paraphrasing/rephrase_validator.py +121 -0
  104. rasa/llm_fine_tuning/paraphrasing/rephrased_user_message.py +10 -0
  105. rasa/llm_fine_tuning/paraphrasing_module.py +128 -0
  106. rasa/llm_fine_tuning/storage.py +174 -0
  107. rasa/llm_fine_tuning/train_test_split_module.py +441 -0
  108. rasa/model_training.py +56 -16
  109. rasa/nlu/persistor.py +157 -36
  110. rasa/server.py +45 -10
  111. rasa/shared/constants.py +76 -16
  112. rasa/shared/core/domain.py +27 -19
  113. rasa/shared/core/events.py +28 -2
  114. rasa/shared/core/flows/flow.py +208 -13
  115. rasa/shared/core/flows/flow_path.py +84 -0
  116. rasa/shared/core/flows/flows_list.py +33 -11
  117. rasa/shared/core/flows/flows_yaml_schema.json +269 -193
  118. rasa/shared/core/flows/validation.py +112 -25
  119. rasa/shared/core/flows/yaml_flows_io.py +149 -10
  120. rasa/shared/core/trackers.py +6 -0
  121. rasa/shared/core/training_data/structures.py +20 -0
  122. rasa/shared/core/training_data/visualization.html +2 -2
  123. rasa/shared/exceptions.py +4 -0
  124. rasa/shared/importers/importer.py +64 -16
  125. rasa/shared/nlu/constants.py +2 -0
  126. rasa/shared/providers/_configs/__init__.py +0 -0
  127. rasa/shared/providers/_configs/azure_openai_client_config.py +183 -0
  128. rasa/shared/providers/_configs/client_config.py +57 -0
  129. rasa/shared/providers/_configs/default_litellm_client_config.py +130 -0
  130. rasa/shared/providers/_configs/huggingface_local_embedding_client_config.py +234 -0
  131. rasa/shared/providers/_configs/openai_client_config.py +175 -0
  132. rasa/shared/providers/_configs/self_hosted_llm_client_config.py +176 -0
  133. rasa/shared/providers/_configs/utils.py +101 -0
  134. rasa/shared/providers/_ssl_verification_utils.py +124 -0
  135. rasa/shared/providers/embedding/__init__.py +0 -0
  136. rasa/shared/providers/embedding/_base_litellm_embedding_client.py +259 -0
  137. rasa/shared/providers/embedding/_langchain_embedding_client_adapter.py +74 -0
  138. rasa/shared/providers/embedding/azure_openai_embedding_client.py +277 -0
  139. rasa/shared/providers/embedding/default_litellm_embedding_client.py +102 -0
  140. rasa/shared/providers/embedding/embedding_client.py +90 -0
  141. rasa/shared/providers/embedding/embedding_response.py +41 -0
  142. rasa/shared/providers/embedding/huggingface_local_embedding_client.py +191 -0
  143. rasa/shared/providers/embedding/openai_embedding_client.py +172 -0
  144. rasa/shared/providers/llm/__init__.py +0 -0
  145. rasa/shared/providers/llm/_base_litellm_client.py +251 -0
  146. rasa/shared/providers/llm/azure_openai_llm_client.py +338 -0
  147. rasa/shared/providers/llm/default_litellm_llm_client.py +84 -0
  148. rasa/shared/providers/llm/llm_client.py +76 -0
  149. rasa/shared/providers/llm/llm_response.py +50 -0
  150. rasa/shared/providers/llm/openai_llm_client.py +155 -0
  151. rasa/shared/providers/llm/self_hosted_llm_client.py +293 -0
  152. rasa/shared/providers/mappings.py +75 -0
  153. rasa/shared/utils/cli.py +30 -0
  154. rasa/shared/utils/io.py +65 -2
  155. rasa/shared/utils/llm.py +246 -200
  156. rasa/shared/utils/yaml.py +121 -15
  157. rasa/studio/auth.py +6 -4
  158. rasa/studio/config.py +13 -4
  159. rasa/studio/constants.py +1 -0
  160. rasa/studio/data_handler.py +10 -3
  161. rasa/studio/download.py +19 -13
  162. rasa/studio/train.py +2 -3
  163. rasa/studio/upload.py +19 -11
  164. rasa/telemetry.py +113 -58
  165. rasa/tracing/instrumentation/attribute_extractors.py +32 -17
  166. rasa/utils/common.py +18 -19
  167. rasa/utils/endpoints.py +7 -4
  168. rasa/utils/json_utils.py +60 -0
  169. rasa/utils/licensing.py +9 -1
  170. rasa/utils/ml_utils.py +4 -2
  171. rasa/validator.py +213 -3
  172. rasa/version.py +1 -1
  173. rasa_pro-3.10.16.dist-info/METADATA +196 -0
  174. {rasa_pro-3.9.18.dist-info → rasa_pro-3.10.16.dist-info}/RECORD +179 -113
  175. rasa/nlu/classifiers/llm_intent_classifier.py +0 -519
  176. rasa/shared/providers/openai/clients.py +0 -43
  177. rasa/shared/providers/openai/session_handler.py +0 -110
  178. rasa_pro-3.9.18.dist-info/METADATA +0 -563
  179. /rasa/{shared/providers/openai → cli/project_templates/tutorial/actions}/__init__.py +0 -0
  180. /rasa/cli/project_templates/tutorial/{actions.py → actions/actions.py} +0 -0
  181. {rasa_pro-3.9.18.dist-info → rasa_pro-3.10.16.dist-info}/NOTICE +0 -0
  182. {rasa_pro-3.9.18.dist-info → rasa_pro-3.10.16.dist-info}/WHEEL +0 -0
  183. {rasa_pro-3.9.18.dist-info → rasa_pro-3.10.16.dist-info}/entry_points.txt +0 -0
rasa/telemetry.py CHANGED
@@ -122,8 +122,6 @@ TELEMETRY_INTENTLESS_POLICY_TRAINING_COMPLETED_EVENT = (
122
122
  "Intentless Policy Training Completed"
123
123
  )
124
124
  TELEMETRY_INTENTLESS_POLICY_PREDICT_EVENT = "Intentless Policy Predicted"
125
- TELEMETRY_LLM_INTENT_PREDICT_EVENT = "LLM Intent Predicted"
126
- TELEMETRY_LLM_INTENT_TRAIN_COMPLETED_EVENT = "LLM Intent Training Completed"
127
125
  TELEMETRY_E2E_TEST_RUN_STARTED_EVENT = "E2E Test Run Started"
128
126
  TELEMETRY_ENTERPRISE_SEARCH_POLICY_TRAINING_STARTED_EVENT = (
129
127
  "Enterprise Search Policy Training Started"
@@ -172,6 +170,11 @@ TRACING_BACKEND = "tracing_backend"
172
170
  METRICS_BACKEND = "metrics_backend"
173
171
  VERSION = "version"
174
172
 
173
+ # E2E test conversion
174
+ TELEMETRY_E2E_TEST_CONVERSION_EVENT = "E2E Test Conversion Completed"
175
+ E2E_TEST_CONVERSION_FILE_TYPE = "file_type"
176
+ E2E_TEST_CONVERSION_TEST_CASE_COUNT = "test_case_count"
177
+
175
178
 
176
179
  def print_telemetry_reporting_info() -> None:
177
180
  """Print telemetry information to std out."""
@@ -1078,6 +1081,7 @@ def _get_llm_command_generator_config(config: Dict[str, Any]) -> Optional[Dict]:
1078
1081
  retrieval is enabled, and flow retrieval embedding model.
1079
1082
  """
1080
1083
  from rasa.shared.constants import (
1084
+ EMBEDDINGS_CONFIG_KEY,
1081
1085
  MODEL_CONFIG_KEY,
1082
1086
  MODEL_NAME_CONFIG_KEY,
1083
1087
  )
@@ -1116,13 +1120,17 @@ def _get_llm_command_generator_config(config: Dict[str, Any]) -> Optional[Dict]:
1116
1120
  llm_model_name = (
1117
1121
  llm_config.get(MODEL_CONFIG_KEY)
1118
1122
  or llm_config.get(MODEL_NAME_CONFIG_KEY)
1119
- or DEFAULT_LLM_CONFIG[MODEL_NAME_CONFIG_KEY]
1123
+ or DEFAULT_LLM_CONFIG[MODEL_CONFIG_KEY]
1120
1124
  )
1121
1125
  flow_retrieval_config = component.get(FLOW_RETRIEVAL_KEY, {})
1122
1126
  flow_retrieval_enabled = flow_retrieval_config.get("active", True)
1127
+ flow_retrieval_embeddings_config = flow_retrieval_config.get(
1128
+ EMBEDDINGS_CONFIG_KEY, DEFAULT_EMBEDDINGS_CONFIG
1129
+ )
1123
1130
  flow_retrieval_embedding_model_name = (
1124
- flow_retrieval_config.get("embeddings", DEFAULT_EMBEDDINGS_CONFIG).get(
1125
- "model"
1131
+ (
1132
+ flow_retrieval_embeddings_config.get(MODEL_NAME_CONFIG_KEY)
1133
+ or flow_retrieval_embeddings_config.get(MODEL_CONFIG_KEY)
1126
1134
  )
1127
1135
  if flow_retrieval_enabled
1128
1136
  else None
@@ -1439,6 +1447,76 @@ def track_markers_parsed_count(
1439
1447
  )
1440
1448
 
1441
1449
 
1450
+ def extract_assertion_type_counts(
1451
+ input_test_cases: List["TestCase"],
1452
+ ) -> typing.Tuple[bool, Dict[str, Any]]:
1453
+ """Extracts the total count of different assertion types from the test cases."""
1454
+ from rasa.e2e_test.assertions import AssertionType
1455
+
1456
+ uses_assertions = False
1457
+
1458
+ flow_started_count = 0
1459
+ flow_completed_count = 0
1460
+ flow_cancelled_count = 0
1461
+ pattern_clarification_contains_count = 0
1462
+ action_executed_count = 0
1463
+ slot_was_set_count = 0
1464
+ slot_was_not_set_count = 0
1465
+ bot_uttered_count = 0
1466
+ generative_response_is_relevant_count = 0
1467
+ generative_response_is_grounded_count = 0
1468
+
1469
+ for test_case in input_test_cases:
1470
+ for step in test_case.steps:
1471
+ assertions = step.assertions if step.assertions else []
1472
+ for assertion in assertions:
1473
+ if assertion.type == AssertionType.ACTION_EXECUTED.value:
1474
+ action_executed_count += 1
1475
+ elif assertion.type == AssertionType.SLOT_WAS_SET.value:
1476
+ slot_was_set_count += 1
1477
+ elif assertion.type == AssertionType.SLOT_WAS_NOT_SET.value:
1478
+ slot_was_not_set_count += 1
1479
+ elif assertion.type == AssertionType.BOT_UTTERED.value:
1480
+ bot_uttered_count += 1
1481
+ elif (
1482
+ assertion.type
1483
+ == AssertionType.GENERATIVE_RESPONSE_IS_RELEVANT.value
1484
+ ):
1485
+ generative_response_is_relevant_count += 1
1486
+ elif (
1487
+ assertion.type
1488
+ == AssertionType.GENERATIVE_RESPONSE_IS_GROUNDED.value
1489
+ ):
1490
+ generative_response_is_grounded_count += 1
1491
+ elif assertion.type == AssertionType.FLOW_STARTED.value:
1492
+ flow_started_count += 1
1493
+ elif assertion.type == AssertionType.FLOW_COMPLETED.value:
1494
+ flow_completed_count += 1
1495
+ elif assertion.type == AssertionType.FLOW_CANCELLED.value:
1496
+ flow_cancelled_count += 1
1497
+ elif (
1498
+ assertion.type == AssertionType.PATTERN_CLARIFICATION_CONTAINS.value
1499
+ ):
1500
+ pattern_clarification_contains_count += 1
1501
+
1502
+ uses_assertions = True
1503
+
1504
+ result = {
1505
+ "flow_started_count": flow_started_count,
1506
+ "flow_completed_count": flow_completed_count,
1507
+ "flow_cancelled_count": flow_cancelled_count,
1508
+ "pattern_clarification_contains_count": pattern_clarification_contains_count,
1509
+ "action_executed_count": action_executed_count,
1510
+ "slot_was_set_count": slot_was_set_count,
1511
+ "slot_was_not_set_count": slot_was_not_set_count,
1512
+ "bot_uttered_count": bot_uttered_count,
1513
+ "generative_response_is_relevant_count": generative_response_is_relevant_count,
1514
+ "generative_response_is_grounded_count": generative_response_is_grounded_count,
1515
+ }
1516
+
1517
+ return uses_assertions, result
1518
+
1519
+
1442
1520
  @ensure_telemetry_enabled
1443
1521
  def track_e2e_test_run(
1444
1522
  input_test_cases: List["TestCase"],
@@ -1446,15 +1524,26 @@ def track_e2e_test_run(
1446
1524
  input_metadata: List["Metadata"],
1447
1525
  ) -> None:
1448
1526
  """Track an end-to-end test run."""
1527
+ properties = {
1528
+ "number_of_test_cases": len(input_test_cases),
1529
+ "number_of_fixtures": len(input_fixtures),
1530
+ "uses_fixtures": len(input_fixtures) > 0,
1531
+ "uses_metadata": len(input_metadata) > 0,
1532
+ "number_of_metadata": len(input_metadata),
1533
+ }
1534
+
1535
+ uses_assertions, assertion_type_counts = extract_assertion_type_counts(
1536
+ input_test_cases
1537
+ )
1538
+
1539
+ properties.update({"uses_assertions": uses_assertions})
1540
+
1541
+ if uses_assertions:
1542
+ properties.update(assertion_type_counts)
1543
+
1449
1544
  _track(
1450
1545
  TELEMETRY_E2E_TEST_RUN_STARTED_EVENT,
1451
- {
1452
- "number_of_test_cases": len(input_test_cases),
1453
- "number_of_fixtures": len(input_fixtures),
1454
- "uses_fixtures": len(input_fixtures) > 0,
1455
- "uses_metadata": len(input_metadata) > 0,
1456
- "number_of_metadata": len(input_metadata),
1457
- },
1546
+ properties,
1458
1547
  )
1459
1548
 
1460
1549
 
@@ -1523,52 +1612,6 @@ def track_intentless_policy_predict(
1523
1612
  )
1524
1613
 
1525
1614
 
1526
- @ensure_telemetry_enabled
1527
- def track_llm_intent_predict(
1528
- embeddings_type: Optional[str],
1529
- embeddings_model: Optional[str],
1530
- llm_type: Optional[str],
1531
- llm_model: Optional[str],
1532
- ) -> None:
1533
- """Track when a user predicts an intent using the llm intent classifier."""
1534
- _track(
1535
- TELEMETRY_LLM_INTENT_PREDICT_EVENT,
1536
- {
1537
- "embeddings_type": embeddings_type,
1538
- "embeddings_model": embeddings_model,
1539
- "llm_type": llm_type,
1540
- "llm_model": llm_model,
1541
- },
1542
- )
1543
-
1544
-
1545
- @ensure_telemetry_enabled
1546
- def track_llm_intent_train_completed(
1547
- embeddings_type: Optional[str],
1548
- embeddings_model: Optional[str],
1549
- llm_type: Optional[str],
1550
- llm_model: Optional[str],
1551
- fallback_intent: Optional[str],
1552
- custom_prompt_template: Optional[str],
1553
- number_of_examples: int,
1554
- number_of_available_intents: int,
1555
- ) -> None:
1556
- """Track when a user trains the llm intent classifier."""
1557
- _track(
1558
- TELEMETRY_LLM_INTENT_TRAIN_COMPLETED_EVENT,
1559
- {
1560
- "embeddings_type": embeddings_type,
1561
- "embeddings_model": embeddings_model,
1562
- "llm_type": llm_type,
1563
- "llm_model": llm_model,
1564
- "fallback_intent": fallback_intent,
1565
- "custom_prompt_template": custom_prompt_template,
1566
- "number_of_examples": number_of_examples,
1567
- "number_of_available_intents": number_of_available_intents,
1568
- },
1569
- )
1570
-
1571
-
1572
1615
  @ensure_telemetry_enabled
1573
1616
  def identify_endpoint_config_traits(
1574
1617
  endpoints_file: Optional[Text],
@@ -1735,3 +1778,15 @@ def track_conversation_count(conversation_count: int, tracked_month: datetime) -
1735
1778
  "month": tracked_month.month,
1736
1779
  },
1737
1780
  )
1781
+
1782
+
1783
+ @ensure_telemetry_enabled
1784
+ def track_e2e_test_conversion_completed(file_type: str, test_case_count: int) -> None:
1785
+ """Track the used input file type for E2E test conversion."""
1786
+ _track(
1787
+ TELEMETRY_E2E_TEST_CONVERSION_EVENT,
1788
+ {
1789
+ E2E_TEST_CONVERSION_FILE_TYPE: file_type,
1790
+ E2E_TEST_CONVERSION_TEST_CASE_COUNT: test_case_count,
1791
+ },
1792
+ )
@@ -7,6 +7,7 @@ import tiktoken
7
7
  from numpy import ndarray
8
8
  from rasa_sdk.grpc_py import action_webhook_pb2
9
9
 
10
+ from rasa.core.actions.action import DirectCustomActionExecutor
10
11
  from rasa.core.actions.grpc_custom_action_executor import GRPCCustomActionExecutor
11
12
  from rasa.core.actions.http_custom_action_executor import HTTPCustomActionExecutor
12
13
  from rasa.core.agent import Agent
@@ -20,6 +21,13 @@ from rasa.dialogue_understanding.commands import Command
20
21
  from rasa.dialogue_understanding.stack.dialogue_stack import DialogueStack
21
22
  from rasa.engine.graph import ExecutionContext, GraphModelConfiguration, GraphNode
22
23
  from rasa.engine.training.graph_trainer import GraphTrainer
24
+ from rasa.shared.constants import (
25
+ EMBEDDINGS_CONFIG_KEY,
26
+ MODEL_CONFIG_KEY,
27
+ PROVIDER_CONFIG_KEY,
28
+ TIMEOUT_CONFIG_KEY,
29
+ DEPLOYMENT_CONFIG_KEY,
30
+ )
23
31
  from rasa.shared.core.constants import REQUESTED_SLOT
24
32
  from rasa.shared.core.domain import Domain
25
33
  from rasa.shared.core.events import DialogueStackUpdated, Event
@@ -27,11 +35,14 @@ from rasa.shared.core.flows import Flow, FlowsList, FlowStep
27
35
  from rasa.shared.core.trackers import DialogueStateTracker
28
36
  from rasa.shared.importers.importer import TrainingDataImporter
29
37
  from rasa.shared.nlu.constants import INTENT_NAME_KEY, SET_SLOT_COMMAND
30
- from rasa.shared.utils.llm import combine_custom_and_default_config
38
+ from rasa.shared.utils.llm import (
39
+ combine_custom_and_default_config,
40
+ )
31
41
  from rasa.tracing.constants import (
32
42
  PROMPT_TOKEN_LENGTH_ATTRIBUTE_NAME,
33
43
  REQUEST_BODY_SIZE_IN_BYTES_ATTRIBUTE_NAME,
34
44
  )
45
+ from rasa.shared.core.training_data.structures import StoryGraph
35
46
 
36
47
  if TYPE_CHECKING:
37
48
  from langchain.llms.base import BaseLLM
@@ -249,10 +260,7 @@ def extract_attrs_for_graph_trainer(
249
260
  }
250
261
 
251
262
 
252
- def extract_headers(
253
- message: UserMessage,
254
- **kwargs: Any,
255
- ) -> Any:
263
+ def extract_headers(message: UserMessage, **kwargs: Any) -> Any:
256
264
  """Extract the headers from the `UserMessage`."""
257
265
  if message.headers:
258
266
  return message.headers
@@ -304,18 +312,15 @@ def extract_llm_config(self: Any, default_llm_config: Dict[str, Any]) -> Dict[st
304
312
 
305
313
  attributes = {
306
314
  "class_name": self.__class__.__name__,
307
- "llm_model": str(config.get("model", llm_property.get("model_name"))),
308
- "llm_type": str(llm_property.get("_type")),
309
- "embeddings": json.dumps(config.get("embeddings", {})),
315
+ "llm_model": str(llm_property.get(MODEL_CONFIG_KEY)),
316
+ "llm_type": str(llm_property.get(PROVIDER_CONFIG_KEY)),
317
+ "embeddings": json.dumps(config.get(EMBEDDINGS_CONFIG_KEY, {})),
310
318
  "llm_temperature": str(llm_property.get("temperature")),
311
- "request_timeout": str(llm_property.get("request_timeout")),
319
+ "request_timeout": str(llm_property.get(TIMEOUT_CONFIG_KEY)),
312
320
  }
313
321
 
314
- if "model" in llm_property:
315
- attributes["llm_model"] = str(llm_property.get("model"))
316
-
317
- if "engine" in llm_property:
318
- attributes["llm_engine"] = str(llm_property.get("engine"))
322
+ if DEPLOYMENT_CONFIG_KEY in llm_property:
323
+ attributes["llm_engine"] = str(llm_property.get(DEPLOYMENT_CONFIG_KEY))
319
324
 
320
325
  return attributes
321
326
 
@@ -370,6 +375,7 @@ def extract_attrs_for_execute_commands(
370
375
  tracker: DialogueStateTracker,
371
376
  all_flows: FlowsList,
372
377
  execution_context: ExecutionContext,
378
+ story_graph: Optional[StoryGraph] = None,
373
379
  ) -> Dict[str, Any]:
374
380
  return {
375
381
  "number_of_events": len(tracker.events),
@@ -412,6 +418,7 @@ def extract_attrs_for_clean_up_commands(
412
418
  tracker: DialogueStateTracker,
413
419
  all_flows: FlowsList,
414
420
  execution_context: ExecutionContext,
421
+ story_graph: Optional[StoryGraph] = None,
415
422
  ) -> Dict[str, Any]:
416
423
  commands_list = []
417
424
 
@@ -643,16 +650,24 @@ def extend_attributes_with_prompt_tokens_length(
643
650
 
644
651
 
645
652
  def extract_attrs_for_custom_action_executor_run(
646
- self: Union[HTTPCustomActionExecutor, GRPCCustomActionExecutor],
653
+ self: Union[
654
+ HTTPCustomActionExecutor, GRPCCustomActionExecutor, DirectCustomActionExecutor
655
+ ],
647
656
  tracker: DialogueStateTracker,
648
657
  domain: Domain,
649
658
  include_domain: bool = False,
650
659
  ) -> Dict[str, Any]:
660
+ actions_module, url = None, None
661
+ if hasattr(self, "action_endpoint"):
662
+ url = self.action_endpoint.url
663
+ actions_module = self.action_endpoint.actions_module
664
+
651
665
  attrs: Dict[str, Any] = {
652
666
  "class_name": self.__class__.__name__,
653
- "action_name": self.action_name if hasattr(self, "action_name") else "None",
667
+ "action_name": self.action_name,
654
668
  "sender_id": tracker.sender_id,
655
- "url": self.action_endpoint.url if hasattr(self, "action_endpoint") else "None",
669
+ "url": str(url),
670
+ "actions_module": str(actions_module),
656
671
  }
657
672
  return attrs
658
673
 
rasa/utils/common.py CHANGED
@@ -32,6 +32,7 @@ from rasa.constants import (
32
32
  DEFAULT_LOG_LEVEL_LIBRARIES,
33
33
  ENV_LOG_LEVEL_LIBRARIES,
34
34
  ENV_LOG_LEVEL_MATPLOTLIB,
35
+ ENV_LOG_LEVEL_MLFLOW,
35
36
  ENV_LOG_LEVEL_RABBITMQ,
36
37
  ENV_LOG_LEVEL_KAFKA,
37
38
  )
@@ -44,12 +45,6 @@ logger = logging.getLogger(__name__)
44
45
 
45
46
  T = TypeVar("T")
46
47
 
47
- EXPECTED_PILLOW_DEPRECATION_WARNINGS: List[Tuple[Type[Warning], str]] = [
48
- # Keras uses deprecated Pillow features
49
- # cf. https://github.com/keras-team/keras/issues/16639
50
- (DeprecationWarning, f"{method} is deprecated and will be removed in Pillow 10 .*")
51
- for method in ["BICUBIC", "NEAREST", "BILINEAR", "HAMMING", "BOX", "LANCZOS"]
52
- ]
53
48
 
54
49
  EXPECTED_WARNINGS: List[Tuple[Type[Warning], str]] = [
55
50
  # TODO (issue #9932)
@@ -57,22 +52,17 @@ EXPECTED_WARNINGS: List[Tuple[Type[Warning], str]] = [
57
52
  np.VisibleDeprecationWarning,
58
53
  "Creating an ndarray from ragged nested sequences.*",
59
54
  ),
60
- # raised by langchain -> faiss
61
- (
62
- DeprecationWarning,
63
- "distutils Version classes are deprecated. Use packaging.version instead",
64
- ),
65
55
  # raised by pycountry (rasa-plus anonymization), magic_filter, google rpc
66
56
  # and probably other dependencies that use pkg_resources instead of importlib
67
57
  (DeprecationWarning, ".*pkg_resources.*"),
68
- # This warning is triggered by sanic-cors 2.0.0.
58
+ # This warning is triggered by sanic-cors 2.0.0 and by langchain -> faiss.
69
59
  # The warning can be removed after the packages are updated:
70
60
  # sanic-cors: ^2.1.0
71
61
  # packaging`: 23.2 (introduces breaking changes)
72
62
  # pep440-version-utils (also requires update on packaging)
73
63
  (
74
64
  DeprecationWarning,
75
- "distutils Version classes are deprecated. Use packaging.version instead.",
65
+ "distutils Version classes are deprecated. Use packaging.version instead",
76
66
  ),
77
67
  # cf. https://github.com/tensorflow/tensorflow/issues/38168
78
68
  (
@@ -81,11 +71,6 @@ EXPECTED_WARNINGS: List[Tuple[Type[Warning], str]] = [
81
71
  "shape. This may consume a large amount of memory.",
82
72
  ),
83
73
  (UserWarning, "Slot auto-fill has been removed in 3.0 .*"),
84
- # This warning is caused by the flatbuffers package
85
- # The import was fixed on Github, but the latest version
86
- # is not available on PyPi, so we cannot pin the newer version.
87
- # cf. https://github.com/google/flatbuffers/issues/6957
88
- (DeprecationWarning, "the imp module is deprecated in favour of importlib.*"),
89
74
  # Cannot fix this deprecation warning since we need to support two
90
75
  # numpy versions as long as we keep python 37 around
91
76
  (DeprecationWarning, "the `interpolation=` argument to quantile was renamed"),
@@ -102,10 +87,11 @@ EXPECTED_WARNINGS: List[Tuple[Type[Warning], str]] = [
102
87
  DeprecationWarning,
103
88
  "non-integer arguments to randrange\\(\\) have been deprecated since",
104
89
  ),
90
+ # Ignore Keras DeprecationWarning since it requires that we
91
+ # upgrade tensorflow-macos to 2.13.0 version.
105
92
  (DeprecationWarning, "invalid escape sequence*"),
106
93
  ]
107
94
 
108
- EXPECTED_WARNINGS.extend(EXPECTED_PILLOW_DEPRECATION_WARNINGS)
109
95
  PYTHON_LOGGING_SCHEMA_DOCS = (
110
96
  "https://docs.python.org/3/library/logging.config.html#dictionary-schema-details"
111
97
  )
@@ -402,6 +388,19 @@ def update_faker_log_level(library_log_level: Text) -> None:
402
388
  logging.getLogger("faker").propagate = False
403
389
 
404
390
 
391
+ def update_mlflow_log_level() -> None:
392
+ """Set the log level of mlflow.
393
+
394
+ Uses the library specific log level or the general libraries log level.
395
+ """
396
+ library_log_level = os.environ.get(
397
+ ENV_LOG_LEVEL_LIBRARIES, DEFAULT_LOG_LEVEL_LIBRARIES
398
+ )
399
+ log_level = os.environ.get(ENV_LOG_LEVEL_MLFLOW, library_log_level)
400
+ logging.getLogger("mlflow").setLevel(log_level)
401
+ logging.getLogger("mlflow").propagate = False
402
+
403
+
405
404
  def sort_list_of_dicts_by_first_key(dicts: List[Dict]) -> List[Dict]:
406
405
  """Sorts a list of dictionaries by their first key."""
407
406
  return sorted(dicts, key=lambda d: next(iter(d.keys())))
rasa/utils/endpoints.py CHANGED
@@ -1,14 +1,15 @@
1
+ import os
1
2
  import ssl
3
+ from types import ModuleType
4
+ from typing import Any, Optional, Text, Dict, Union
2
5
 
3
6
  import aiohttp
4
- import os
7
+ import structlog
5
8
  from aiohttp.client_exceptions import ContentTypeError
6
9
  from sanic.request import Request
7
- from typing import Any, Optional, Text, Dict
8
10
 
9
- from rasa.shared.exceptions import FileNotFoundException
10
- import structlog
11
11
  from rasa.core.constants import DEFAULT_REQUEST_TIMEOUT
12
+ from rasa.shared.exceptions import FileNotFoundException
12
13
  from rasa.shared.utils.yaml import read_config_file
13
14
 
14
15
  structlogger = structlog.get_logger()
@@ -87,6 +88,7 @@ class EndpointConfig:
87
88
  token: Optional[Text] = None,
88
89
  token_name: Text = "token",
89
90
  cafile: Optional[Text] = None,
91
+ actions_module: Optional[Union[Text, ModuleType]] = None,
90
92
  **kwargs: Any,
91
93
  ) -> None:
92
94
  """Creates an `EndpointConfig` instance."""
@@ -98,6 +100,7 @@ class EndpointConfig:
98
100
  self.token_name = token_name
99
101
  self.type = kwargs.pop("store_type", kwargs.pop("type", None))
100
102
  self.cafile = cafile
103
+ self.actions_module = actions_module
101
104
  self.kwargs = kwargs
102
105
 
103
106
  def session(self) -> aiohttp.ClientSession:
@@ -0,0 +1,60 @@
1
+ import json
2
+ from decimal import Decimal
3
+ from typing import Any, Text
4
+
5
+
6
+ class DecimalEncoder(json.JSONEncoder):
7
+ """`json.JSONEncoder` that dumps `Decimal`s as `float`s."""
8
+
9
+ def default(self, obj: Any) -> Any:
10
+ """Get serializable object for `o`.
11
+
12
+ Args:
13
+ obj: Object to serialize.
14
+
15
+ Returns:
16
+ `obj` converted to `float` if `o` is a `Decimals`, else the base class
17
+ `default()` method.
18
+ """
19
+ if isinstance(obj, Decimal):
20
+ return float(obj)
21
+ return super().default(obj)
22
+
23
+
24
+ class SetEncoder(json.JSONEncoder):
25
+ """`json.JSONEncoder` that dumps `set`s as `list`s."""
26
+
27
+ def default(self, obj: Any) -> Any:
28
+ if isinstance(obj, set):
29
+ return list(obj)
30
+ return super().default(obj)
31
+
32
+
33
+ def replace_floats_with_decimals(obj: Any, round_digits: int = 9) -> Any:
34
+ """Convert all instances in `obj` of `float` to `Decimal`.
35
+
36
+ Args:
37
+ obj: Input object.
38
+ round_digits: Rounding precision of `Decimal` values.
39
+
40
+ Returns:
41
+ Input `obj` with all `float` types replaced by `Decimal`s rounded to
42
+ `round_digits` decimal places.
43
+ """
44
+
45
+ def _float_to_rounded_decimal(s: Text) -> Decimal:
46
+ return Decimal(s).quantize(Decimal(10) ** -round_digits)
47
+
48
+ return json.loads(json.dumps(obj), parse_float=_float_to_rounded_decimal)
49
+
50
+
51
+ def replace_decimals_with_floats(obj: Any) -> Any:
52
+ """Convert all instances in `obj` of `Decimal` to `float`.
53
+
54
+ Args:
55
+ obj: A `List` or `Dict` object.
56
+
57
+ Returns:
58
+ Input `obj` with all `Decimal` types replaced by `float`s.
59
+ """
60
+ return json.loads(json.dumps(obj, cls=DecimalEncoder))
rasa/utils/licensing.py CHANGED
@@ -293,7 +293,9 @@ def validate_license_from_env(product_area: Text = PRODUCT_AREA) -> None:
293
293
  "Your license is about to expire. "
294
294
  "Please contact Rasa for a renewal."
295
295
  ),
296
- expiration_date=datetime.utcfromtimestamp(license.exp).isoformat(),
296
+ expiration_date=datetime.fromtimestamp(
297
+ license.exp, timezone.utc
298
+ ).isoformat(),
297
299
  )
298
300
  except LicenseNotFoundException:
299
301
  structlogger.error("license.not_found.error")
@@ -311,6 +313,12 @@ def validate_license_from_env(product_area: Text = PRODUCT_AREA) -> None:
311
313
  )
312
314
 
313
315
 
316
+ def get_license_expiration_date() -> Text:
317
+ """Return the expiration date of the license."""
318
+ license_exp = property_of_active_license(lambda active_license: active_license.exp)
319
+ return datetime.fromtimestamp(license_exp, timezone.utc).isoformat()
320
+
321
+
314
322
  def is_valid_license_scope(product_area: Text, license_scope: Text) -> bool:
315
323
  """Verifies that the license scope matches the rasa-plus product area."""
316
324
  required_scopes = derive_scope_hierarchy(product_area)
rasa/utils/ml_utils.py CHANGED
@@ -4,7 +4,7 @@ from typing import Any, Dict, List, Optional, Text
4
4
  import numpy as np
5
5
  import structlog
6
6
  from langchain.schema.embeddings import Embeddings
7
- from langchain.vectorstores import FAISS
7
+ from langchain_community.vectorstores import FAISS
8
8
  from rasa.shared.constants import REQUIRED_SLOTS_KEY
9
9
  from rasa.shared.core.domain import KEY_RESPONSES_TEXT, Domain
10
10
  from rasa.shared.utils.llm import AI
@@ -34,7 +34,9 @@ def load_faiss_vector_store(path: Path, embedder: Embeddings) -> Optional[FAISS]
34
34
  The loaded vector store or None if the path does not exist.
35
35
  """
36
36
  if path.exists():
37
- return FAISS.load_local(str(path), embedder)
37
+ return FAISS.load_local(
38
+ str(path), embedder, allow_dangerous_deserialization=True
39
+ )
38
40
  else:
39
41
  return None
40
42