rasa-pro 3.8.18__py3-none-any.whl → 3.9.14__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of rasa-pro might be problematic. Click here for more details.

Files changed (278) hide show
  1. README.md +6 -42
  2. rasa/__main__.py +14 -9
  3. rasa/anonymization/anonymization_pipeline.py +0 -1
  4. rasa/anonymization/anonymization_rule_executor.py +3 -3
  5. rasa/anonymization/utils.py +4 -3
  6. rasa/api.py +2 -2
  7. rasa/cli/arguments/default_arguments.py +1 -1
  8. rasa/cli/arguments/run.py +2 -2
  9. rasa/cli/arguments/test.py +1 -1
  10. rasa/cli/arguments/train.py +10 -10
  11. rasa/cli/e2e_test.py +27 -7
  12. rasa/cli/export.py +0 -1
  13. rasa/cli/license.py +3 -3
  14. rasa/cli/project_templates/calm/actions/action_template.py +1 -1
  15. rasa/cli/project_templates/calm/config.yml +1 -1
  16. rasa/cli/project_templates/calm/credentials.yml +1 -1
  17. rasa/cli/project_templates/calm/data/flows/add_contact.yml +1 -1
  18. rasa/cli/project_templates/calm/data/flows/remove_contact.yml +1 -1
  19. rasa/cli/project_templates/calm/domain/add_contact.yml +8 -2
  20. rasa/cli/project_templates/calm/domain/list_contacts.yml +3 -0
  21. rasa/cli/project_templates/calm/domain/remove_contact.yml +9 -2
  22. rasa/cli/project_templates/calm/domain/shared.yml +5 -0
  23. rasa/cli/project_templates/calm/endpoints.yml +4 -4
  24. rasa/cli/project_templates/default/actions/actions.py +1 -1
  25. rasa/cli/project_templates/default/config.yml +5 -5
  26. rasa/cli/project_templates/default/credentials.yml +1 -1
  27. rasa/cli/project_templates/default/endpoints.yml +4 -4
  28. rasa/cli/project_templates/default/tests/test_stories.yml +1 -1
  29. rasa/cli/project_templates/tutorial/config.yml +1 -1
  30. rasa/cli/project_templates/tutorial/credentials.yml +1 -1
  31. rasa/cli/project_templates/tutorial/data/patterns.yml +6 -0
  32. rasa/cli/project_templates/tutorial/domain.yml +4 -0
  33. rasa/cli/project_templates/tutorial/endpoints.yml +6 -6
  34. rasa/cli/run.py +0 -1
  35. rasa/cli/scaffold.py +3 -2
  36. rasa/cli/studio/download.py +11 -0
  37. rasa/cli/studio/studio.py +180 -24
  38. rasa/cli/studio/upload.py +0 -8
  39. rasa/cli/telemetry.py +18 -6
  40. rasa/cli/utils.py +21 -10
  41. rasa/cli/x.py +3 -2
  42. rasa/constants.py +1 -1
  43. rasa/core/actions/action.py +90 -315
  44. rasa/core/actions/action_exceptions.py +24 -0
  45. rasa/core/actions/constants.py +3 -0
  46. rasa/core/actions/custom_action_executor.py +188 -0
  47. rasa/core/actions/forms.py +11 -7
  48. rasa/core/actions/grpc_custom_action_executor.py +251 -0
  49. rasa/core/actions/http_custom_action_executor.py +140 -0
  50. rasa/core/actions/loops.py +3 -0
  51. rasa/core/actions/two_stage_fallback.py +1 -1
  52. rasa/core/agent.py +2 -4
  53. rasa/core/brokers/pika.py +1 -2
  54. rasa/core/channels/audiocodes.py +1 -1
  55. rasa/core/channels/botframework.py +0 -1
  56. rasa/core/channels/callback.py +0 -1
  57. rasa/core/channels/console.py +6 -8
  58. rasa/core/channels/development_inspector.py +1 -1
  59. rasa/core/channels/facebook.py +0 -3
  60. rasa/core/channels/hangouts.py +0 -6
  61. rasa/core/channels/inspector/dist/assets/{arc-5623b6dc.js → arc-b6e548fe.js} +1 -1
  62. rasa/core/channels/inspector/dist/assets/{c4Diagram-d0fbc5ce-685c106a.js → c4Diagram-d0fbc5ce-fa03ac9e.js} +1 -1
  63. rasa/core/channels/inspector/dist/assets/{classDiagram-936ed81e-8cbed007.js → classDiagram-936ed81e-ee67392a.js} +1 -1
  64. rasa/core/channels/inspector/dist/assets/{classDiagram-v2-c3cb15f1-5889cf12.js → classDiagram-v2-c3cb15f1-9b283fae.js} +1 -1
  65. rasa/core/channels/inspector/dist/assets/{createText-62fc7601-24c249d7.js → createText-62fc7601-8b6fcc2a.js} +1 -1
  66. rasa/core/channels/inspector/dist/assets/{edges-f2ad444c-7dd06a75.js → edges-f2ad444c-22e77f4f.js} +1 -1
  67. rasa/core/channels/inspector/dist/assets/{erDiagram-9d236eb7-62c1e54c.js → erDiagram-9d236eb7-60ffc87f.js} +1 -1
  68. rasa/core/channels/inspector/dist/assets/{flowDb-1972c806-ce49b86f.js → flowDb-1972c806-9dd802e4.js} +1 -1
  69. rasa/core/channels/inspector/dist/assets/{flowDiagram-7ea5b25a-4067e48f.js → flowDiagram-7ea5b25a-5fa1912f.js} +1 -1
  70. rasa/core/channels/inspector/dist/assets/flowDiagram-v2-855bc5b3-1844e5a5.js +1 -0
  71. rasa/core/channels/inspector/dist/assets/{flowchart-elk-definition-abe16c3d-59fe4051.js → flowchart-elk-definition-abe16c3d-622a1fd2.js} +1 -1
  72. rasa/core/channels/inspector/dist/assets/{ganttDiagram-9b5ea136-47e3a43b.js → ganttDiagram-9b5ea136-e285a63a.js} +1 -1
  73. rasa/core/channels/inspector/dist/assets/{gitGraphDiagram-99d0ae7c-5a2ac0d9.js → gitGraphDiagram-99d0ae7c-f237bdca.js} +1 -1
  74. rasa/core/channels/inspector/dist/assets/{index-2c4b9a3b-dfb8efc4.js → index-2c4b9a3b-4b03d70e.js} +1 -1
  75. rasa/core/channels/inspector/dist/assets/{index-268a75c0.js → index-a5d3e69d.js} +4 -4
  76. rasa/core/channels/inspector/dist/assets/{infoDiagram-736b4530-b0c470f2.js → infoDiagram-736b4530-72a0fa5f.js} +1 -1
  77. rasa/core/channels/inspector/dist/assets/{journeyDiagram-df861f2b-2edb829a.js → journeyDiagram-df861f2b-82218c41.js} +1 -1
  78. rasa/core/channels/inspector/dist/assets/{layout-b6873d69.js → layout-78cff630.js} +1 -1
  79. rasa/core/channels/inspector/dist/assets/{line-1efc5781.js → line-5038b469.js} +1 -1
  80. rasa/core/channels/inspector/dist/assets/{linear-661e9b94.js → linear-c4fc4098.js} +1 -1
  81. rasa/core/channels/inspector/dist/assets/{mindmap-definition-beec6740-2d2e727f.js → mindmap-definition-beec6740-c33c8ea6.js} +1 -1
  82. rasa/core/channels/inspector/dist/assets/{pieDiagram-dbbf0591-9d3ea93d.js → pieDiagram-dbbf0591-a8d03059.js} +1 -1
  83. rasa/core/channels/inspector/dist/assets/{quadrantDiagram-4d7f4fd6-06a178a2.js → quadrantDiagram-4d7f4fd6-6a0e56b2.js} +1 -1
  84. rasa/core/channels/inspector/dist/assets/{requirementDiagram-6fc4c22a-0bfedffc.js → requirementDiagram-6fc4c22a-2dc7c7bd.js} +1 -1
  85. rasa/core/channels/inspector/dist/assets/{sankeyDiagram-8f13d901-d76d0a04.js → sankeyDiagram-8f13d901-2360fe39.js} +1 -1
  86. rasa/core/channels/inspector/dist/assets/{sequenceDiagram-b655622a-37bb4341.js → sequenceDiagram-b655622a-41b9f9ad.js} +1 -1
  87. rasa/core/channels/inspector/dist/assets/{stateDiagram-59f0c015-f52f7f57.js → stateDiagram-59f0c015-0aad326f.js} +1 -1
  88. rasa/core/channels/inspector/dist/assets/{stateDiagram-v2-2b26beab-4a986a20.js → stateDiagram-v2-2b26beab-9847d984.js} +1 -1
  89. rasa/core/channels/inspector/dist/assets/{styles-080da4f6-7dd9ae12.js → styles-080da4f6-564d890e.js} +1 -1
  90. rasa/core/channels/inspector/dist/assets/{styles-3dcbcfbf-46e1ca14.js → styles-3dcbcfbf-38957613.js} +1 -1
  91. rasa/core/channels/inspector/dist/assets/{styles-9c745c82-4a97439a.js → styles-9c745c82-f0fc6921.js} +1 -1
  92. rasa/core/channels/inspector/dist/assets/{svgDrawCommon-4835440b-823917a3.js → svgDrawCommon-4835440b-ef3c5a77.js} +1 -1
  93. rasa/core/channels/inspector/dist/assets/{timeline-definition-5b62e21b-9ea72896.js → timeline-definition-5b62e21b-bf3e91c1.js} +1 -1
  94. rasa/core/channels/inspector/dist/assets/{xychartDiagram-2b33534f-b631a8b6.js → xychartDiagram-2b33534f-4d4026c0.js} +1 -1
  95. rasa/core/channels/inspector/dist/index.html +1 -1
  96. rasa/core/channels/inspector/src/components/DiagramFlow.tsx +10 -0
  97. rasa/core/channels/inspector/src/helpers/formatters.test.ts +4 -7
  98. rasa/core/channels/inspector/src/helpers/formatters.ts +3 -2
  99. rasa/core/channels/rest.py +36 -21
  100. rasa/core/channels/rocketchat.py +0 -1
  101. rasa/core/channels/socketio.py +1 -1
  102. rasa/core/channels/telegram.py +3 -3
  103. rasa/core/channels/webexteams.py +0 -1
  104. rasa/core/concurrent_lock_store.py +1 -1
  105. rasa/core/evaluation/marker_base.py +1 -3
  106. rasa/core/evaluation/marker_stats.py +1 -2
  107. rasa/core/featurizers/single_state_featurizer.py +3 -26
  108. rasa/core/featurizers/tracker_featurizers.py +18 -122
  109. rasa/core/information_retrieval/__init__.py +7 -0
  110. rasa/core/information_retrieval/faiss.py +9 -4
  111. rasa/core/information_retrieval/information_retrieval.py +64 -7
  112. rasa/core/information_retrieval/milvus.py +7 -14
  113. rasa/core/information_retrieval/qdrant.py +8 -15
  114. rasa/core/lock_store.py +0 -1
  115. rasa/core/migrate.py +1 -2
  116. rasa/core/nlg/callback.py +3 -4
  117. rasa/core/policies/enterprise_search_policy.py +86 -22
  118. rasa/core/policies/enterprise_search_prompt_template.jinja2 +4 -41
  119. rasa/core/policies/enterprise_search_prompt_with_citation_template.jinja2 +60 -0
  120. rasa/core/policies/flows/flow_executor.py +104 -2
  121. rasa/core/policies/intentless_policy.py +7 -9
  122. rasa/core/policies/memoization.py +3 -3
  123. rasa/core/policies/policy.py +18 -9
  124. rasa/core/policies/rule_policy.py +8 -11
  125. rasa/core/policies/ted_policy.py +61 -88
  126. rasa/core/policies/unexpected_intent_policy.py +8 -17
  127. rasa/core/processor.py +136 -47
  128. rasa/core/run.py +41 -25
  129. rasa/core/secrets_manager/endpoints.py +2 -2
  130. rasa/core/secrets_manager/vault.py +6 -8
  131. rasa/core/test.py +3 -5
  132. rasa/core/tracker_store.py +49 -14
  133. rasa/core/train.py +1 -3
  134. rasa/core/training/interactive.py +9 -6
  135. rasa/core/utils.py +5 -10
  136. rasa/dialogue_understanding/coexistence/intent_based_router.py +11 -4
  137. rasa/dialogue_understanding/coexistence/llm_based_router.py +2 -3
  138. rasa/dialogue_understanding/commands/__init__.py +4 -0
  139. rasa/dialogue_understanding/commands/can_not_handle_command.py +9 -0
  140. rasa/dialogue_understanding/commands/cancel_flow_command.py +9 -0
  141. rasa/dialogue_understanding/commands/change_flow_command.py +38 -0
  142. rasa/dialogue_understanding/commands/chit_chat_answer_command.py +9 -0
  143. rasa/dialogue_understanding/commands/clarify_command.py +9 -0
  144. rasa/dialogue_understanding/commands/correct_slots_command.py +9 -0
  145. rasa/dialogue_understanding/commands/error_command.py +12 -0
  146. rasa/dialogue_understanding/commands/handle_code_change_command.py +9 -0
  147. rasa/dialogue_understanding/commands/human_handoff_command.py +9 -0
  148. rasa/dialogue_understanding/commands/knowledge_answer_command.py +9 -0
  149. rasa/dialogue_understanding/commands/noop_command.py +9 -0
  150. rasa/dialogue_understanding/commands/set_slot_command.py +34 -3
  151. rasa/dialogue_understanding/commands/skip_question_command.py +9 -0
  152. rasa/dialogue_understanding/commands/start_flow_command.py +9 -0
  153. rasa/dialogue_understanding/generator/__init__.py +16 -1
  154. rasa/dialogue_understanding/generator/command_generator.py +92 -6
  155. rasa/dialogue_understanding/generator/constants.py +18 -0
  156. rasa/dialogue_understanding/generator/flow_retrieval.py +7 -5
  157. rasa/dialogue_understanding/generator/llm_based_command_generator.py +467 -0
  158. rasa/dialogue_understanding/generator/llm_command_generator.py +39 -609
  159. rasa/dialogue_understanding/generator/multi_step/__init__.py +0 -0
  160. rasa/dialogue_understanding/generator/multi_step/fill_slots_prompt.jinja2 +62 -0
  161. rasa/dialogue_understanding/generator/multi_step/handle_flows_prompt.jinja2 +38 -0
  162. rasa/dialogue_understanding/generator/multi_step/multi_step_llm_command_generator.py +827 -0
  163. rasa/dialogue_understanding/generator/nlu_command_adapter.py +69 -8
  164. rasa/dialogue_understanding/generator/single_step/__init__.py +0 -0
  165. rasa/dialogue_understanding/generator/single_step/single_step_llm_command_generator.py +345 -0
  166. rasa/dialogue_understanding/patterns/default_flows_for_patterns.yml +44 -39
  167. rasa/dialogue_understanding/processor/command_processor.py +111 -3
  168. rasa/e2e_test/constants.py +1 -0
  169. rasa/e2e_test/e2e_test_case.py +44 -0
  170. rasa/e2e_test/e2e_test_runner.py +114 -11
  171. rasa/e2e_test/e2e_test_schema.yml +18 -0
  172. rasa/engine/caching.py +0 -1
  173. rasa/engine/graph.py +18 -6
  174. rasa/engine/recipes/config_files/default_config.yml +3 -3
  175. rasa/engine/recipes/default_components.py +1 -1
  176. rasa/engine/recipes/default_recipe.py +4 -5
  177. rasa/engine/recipes/recipe.py +1 -1
  178. rasa/engine/runner/dask.py +3 -9
  179. rasa/engine/storage/local_model_storage.py +0 -2
  180. rasa/engine/validation.py +179 -145
  181. rasa/exceptions.py +2 -2
  182. rasa/graph_components/validators/default_recipe_validator.py +3 -5
  183. rasa/hooks.py +0 -1
  184. rasa/model.py +1 -1
  185. rasa/model_training.py +1 -0
  186. rasa/nlu/classifiers/diet_classifier.py +33 -52
  187. rasa/nlu/classifiers/logistic_regression_classifier.py +9 -22
  188. rasa/nlu/classifiers/sklearn_intent_classifier.py +16 -37
  189. rasa/nlu/extractors/crf_entity_extractor.py +54 -97
  190. rasa/nlu/extractors/duckling_entity_extractor.py +1 -1
  191. rasa/nlu/featurizers/dense_featurizer/convert_featurizer.py +1 -5
  192. rasa/nlu/featurizers/dense_featurizer/lm_featurizer.py +0 -4
  193. rasa/nlu/featurizers/featurizer.py +1 -1
  194. rasa/nlu/featurizers/sparse_featurizer/count_vectors_featurizer.py +18 -49
  195. rasa/nlu/featurizers/sparse_featurizer/lexical_syntactic_featurizer.py +26 -64
  196. rasa/nlu/featurizers/sparse_featurizer/regex_featurizer.py +3 -5
  197. rasa/nlu/persistor.py +68 -26
  198. rasa/nlu/selectors/response_selector.py +7 -10
  199. rasa/nlu/test.py +0 -3
  200. rasa/nlu/utils/hugging_face/registry.py +1 -1
  201. rasa/nlu/utils/spacy_utils.py +1 -3
  202. rasa/server.py +22 -7
  203. rasa/shared/constants.py +12 -1
  204. rasa/shared/core/command_payload_reader.py +109 -0
  205. rasa/shared/core/constants.py +4 -5
  206. rasa/shared/core/domain.py +57 -56
  207. rasa/shared/core/events.py +4 -7
  208. rasa/shared/core/flows/flow.py +9 -0
  209. rasa/shared/core/flows/flows_list.py +12 -0
  210. rasa/shared/core/flows/steps/action.py +7 -2
  211. rasa/shared/core/generator.py +12 -11
  212. rasa/shared/core/slot_mappings.py +315 -24
  213. rasa/shared/core/slots.py +4 -2
  214. rasa/shared/core/trackers.py +32 -14
  215. rasa/shared/core/training_data/loading.py +0 -1
  216. rasa/shared/core/training_data/story_reader/story_reader.py +3 -3
  217. rasa/shared/core/training_data/story_reader/yaml_story_reader.py +11 -11
  218. rasa/shared/core/training_data/story_writer/yaml_story_writer.py +5 -3
  219. rasa/shared/core/training_data/structures.py +1 -1
  220. rasa/shared/core/training_data/visualization.py +1 -1
  221. rasa/shared/data.py +58 -1
  222. rasa/shared/exceptions.py +36 -2
  223. rasa/shared/importers/importer.py +1 -2
  224. rasa/shared/importers/rasa.py +0 -1
  225. rasa/shared/nlu/constants.py +2 -0
  226. rasa/shared/nlu/training_data/entities_parser.py +1 -2
  227. rasa/shared/nlu/training_data/features.py +2 -120
  228. rasa/shared/nlu/training_data/formats/dialogflow.py +3 -2
  229. rasa/shared/nlu/training_data/formats/rasa_yaml.py +3 -5
  230. rasa/shared/nlu/training_data/formats/readerwriter.py +0 -1
  231. rasa/shared/nlu/training_data/message.py +13 -0
  232. rasa/shared/nlu/training_data/training_data.py +0 -2
  233. rasa/shared/providers/openai/session_handler.py +2 -2
  234. rasa/shared/utils/constants.py +3 -0
  235. rasa/shared/utils/io.py +11 -1
  236. rasa/shared/utils/llm.py +1 -2
  237. rasa/shared/utils/pykwalify_extensions.py +1 -0
  238. rasa/shared/utils/schemas/domain.yml +3 -0
  239. rasa/shared/utils/yaml.py +44 -35
  240. rasa/studio/auth.py +26 -10
  241. rasa/studio/constants.py +2 -0
  242. rasa/studio/data_handler.py +114 -107
  243. rasa/studio/download.py +160 -27
  244. rasa/studio/results_logger.py +137 -0
  245. rasa/studio/train.py +6 -7
  246. rasa/studio/upload.py +159 -134
  247. rasa/telemetry.py +188 -34
  248. rasa/tracing/config.py +18 -3
  249. rasa/tracing/constants.py +26 -2
  250. rasa/tracing/instrumentation/attribute_extractors.py +50 -41
  251. rasa/tracing/instrumentation/instrumentation.py +290 -44
  252. rasa/tracing/instrumentation/intentless_policy_instrumentation.py +7 -5
  253. rasa/tracing/instrumentation/metrics.py +109 -21
  254. rasa/tracing/metric_instrument_provider.py +83 -3
  255. rasa/utils/cli.py +2 -1
  256. rasa/utils/common.py +1 -1
  257. rasa/utils/endpoints.py +1 -2
  258. rasa/utils/io.py +72 -6
  259. rasa/utils/licensing.py +246 -31
  260. rasa/utils/ml_utils.py +1 -1
  261. rasa/utils/tensorflow/data_generator.py +1 -1
  262. rasa/utils/tensorflow/environment.py +1 -1
  263. rasa/utils/tensorflow/model_data.py +201 -12
  264. rasa/utils/tensorflow/model_data_utils.py +499 -500
  265. rasa/utils/tensorflow/models.py +5 -6
  266. rasa/utils/tensorflow/rasa_layers.py +15 -15
  267. rasa/utils/train_utils.py +1 -1
  268. rasa/utils/url_tools.py +53 -0
  269. rasa/validator.py +305 -3
  270. rasa/version.py +1 -1
  271. {rasa_pro-3.8.18.dist-info → rasa_pro-3.9.14.dist-info}/METADATA +25 -61
  272. {rasa_pro-3.8.18.dist-info → rasa_pro-3.9.14.dist-info}/RECORD +276 -259
  273. rasa/core/channels/inspector/dist/assets/flowDiagram-v2-855bc5b3-85583a23.js +0 -1
  274. rasa/utils/tensorflow/feature_array.py +0 -370
  275. /rasa/dialogue_understanding/generator/{command_prompt_template.jinja2 → single_step/command_prompt_template.jinja2} +0 -0
  276. {rasa_pro-3.8.18.dist-info → rasa_pro-3.9.14.dist-info}/NOTICE +0 -0
  277. {rasa_pro-3.8.18.dist-info → rasa_pro-3.9.14.dist-info}/WHEEL +0 -0
  278. {rasa_pro-3.8.18.dist-info → rasa_pro-3.9.14.dist-info}/entry_points.txt +0 -0
@@ -1,32 +1,30 @@
1
1
  from __future__ import annotations
2
-
3
2
  import logging
4
3
  import re
5
- from typing import Any, Dict, List, Optional, Text, Tuple, Set, Type, Union
6
-
7
- import numpy as np
8
4
  import scipy.sparse
9
- from sklearn.exceptions import NotFittedError
10
- from sklearn.feature_extraction.text import CountVectorizer
5
+ from typing import Any, Dict, List, Optional, Text, Tuple, Set, Type
6
+ from rasa.nlu.tokenizers.tokenizer import Tokenizer
11
7
 
12
8
  import rasa.shared.utils.io
13
9
  from rasa.engine.graph import GraphComponent, ExecutionContext
14
10
  from rasa.engine.recipes.default_recipe import DefaultV1Recipe
15
11
  from rasa.engine.storage.resource import Resource
16
12
  from rasa.engine.storage.storage import ModelStorage
13
+ from rasa.nlu.featurizers.sparse_featurizer.sparse_featurizer import SparseFeaturizer
14
+ from rasa.nlu.utils.spacy_utils import SpacyModel
15
+ from rasa.shared.constants import DOCS_URL_COMPONENTS
16
+ import rasa.utils.io as io_utils
17
+ from sklearn.exceptions import NotFittedError
18
+ from sklearn.feature_extraction.text import CountVectorizer
19
+ from rasa.shared.nlu.training_data.training_data import TrainingData
20
+ from rasa.shared.nlu.training_data.message import Message
21
+ from rasa.shared.exceptions import RasaException, FileIOException
17
22
  from rasa.nlu.constants import (
18
23
  TOKENS_NAMES,
19
24
  MESSAGE_ATTRIBUTES,
20
25
  DENSE_FEATURIZABLE_ATTRIBUTES,
21
26
  )
22
- from rasa.nlu.featurizers.sparse_featurizer.sparse_featurizer import SparseFeaturizer
23
- from rasa.nlu.tokenizers.tokenizer import Tokenizer
24
- from rasa.nlu.utils.spacy_utils import SpacyModel
25
- from rasa.shared.constants import DOCS_URL_COMPONENTS
26
- from rasa.shared.exceptions import RasaException, FileIOException
27
27
  from rasa.shared.nlu.constants import TEXT, INTENT, INTENT_RESPONSE_KEY, ACTION_NAME
28
- from rasa.shared.nlu.training_data.message import Message
29
- from rasa.shared.nlu.training_data.training_data import TrainingData
30
28
 
31
29
  BUFFER_SLOTS_PREFIX = "buf_"
32
30
 
@@ -101,7 +99,6 @@ class CountVectorsFeaturizer(SparseFeaturizer, GraphComponent):
101
99
  return ["sklearn"]
102
100
 
103
101
  def _load_count_vect_params(self) -> None:
104
-
105
102
  # Use shared vocabulary between text and all other attributes of Message
106
103
  self.use_shared_vocab = self._config["use_shared_vocab"]
107
104
 
@@ -342,7 +339,7 @@ class CountVectorsFeaturizer(SparseFeaturizer, GraphComponent):
342
339
 
343
340
  @staticmethod
344
341
  def _convert_attribute_tokens_to_texts(
345
- attribute_tokens: Dict[Text, List[List[Text]]]
342
+ attribute_tokens: Dict[Text, List[List[Text]]],
346
343
  ) -> Dict[Text, List[Text]]:
347
344
  attribute_texts = {}
348
345
 
@@ -661,7 +658,6 @@ class CountVectorsFeaturizer(SparseFeaturizer, GraphComponent):
661
658
 
662
659
  for message in messages:
663
660
  for attribute in self._attributes:
664
-
665
661
  message_tokens = self._get_processed_message_tokens_by_attribute(
666
662
  message, attribute
667
663
  )
@@ -687,36 +683,11 @@ class CountVectorsFeaturizer(SparseFeaturizer, GraphComponent):
687
683
 
688
684
  @staticmethod
689
685
  def _is_any_model_trained(
690
- attribute_vocabularies: Dict[Text, Optional[Dict[Text, int]]]
686
+ attribute_vocabularies: Dict[Text, Optional[Dict[Text, int]]],
691
687
  ) -> bool:
692
688
  """Check if any model got trained."""
693
689
  return any(value is not None for value in attribute_vocabularies.values())
694
690
 
695
- @staticmethod
696
- def convert_vocab(
697
- vocab: Dict[str, Union[int, Optional[Dict[str, int]]]], to_int: bool
698
- ) -> Dict[str, Union[None, int, np.int64, Dict[str, Union[int, np.int64]]]]:
699
- """Converts numpy integers in the vocabulary to Python integers."""
700
-
701
- def convert_value(value: int) -> Union[int, np.int64]:
702
- """Helper function to convert a single value based on to_int flag."""
703
- return int(value) if to_int else np.int64(value)
704
-
705
- result_dict: Dict[
706
- str, Union[None, int, np.int64, Dict[str, Union[int, np.int64]]]
707
- ] = {}
708
- for key, sub_dict in vocab.items():
709
- if isinstance(sub_dict, int):
710
- result_dict[key] = convert_value(sub_dict)
711
- elif not sub_dict:
712
- result_dict[key] = None
713
- else:
714
- result_dict[key] = {
715
- sub_key: convert_value(value) for sub_key, value in sub_dict.items()
716
- }
717
-
718
- return result_dict
719
-
720
691
  def persist(self) -> None:
721
692
  """Persist this model into the passed directory.
722
693
 
@@ -730,18 +701,17 @@ class CountVectorsFeaturizer(SparseFeaturizer, GraphComponent):
730
701
  attribute_vocabularies = self._collect_vectorizer_vocabularies()
731
702
  if self._is_any_model_trained(attribute_vocabularies):
732
703
  # Definitely need to persist some vocabularies
733
- featurizer_file = model_dir / "vocabularies.json"
704
+ featurizer_file = model_dir / "vocabularies.pkl"
734
705
 
735
706
  # Only persist vocabulary from one attribute if `use_shared_vocab`.
736
707
  # Can be loaded and distributed to all attributes.
737
- loaded_vocab = (
708
+ vocab = (
738
709
  attribute_vocabularies[TEXT]
739
710
  if self.use_shared_vocab
740
711
  else attribute_vocabularies
741
712
  )
742
- vocab = self.convert_vocab(loaded_vocab, to_int=True)
743
713
 
744
- rasa.shared.utils.io.dump_obj_as_json_to_file(featurizer_file, vocab)
714
+ io_utils.json_pickle(featurizer_file, vocab)
745
715
 
746
716
  # Dump OOV words separately as they might have been modified during
747
717
  # training
@@ -816,9 +786,8 @@ class CountVectorsFeaturizer(SparseFeaturizer, GraphComponent):
816
786
  """Loads trained component (see parent class for full docstring)."""
817
787
  try:
818
788
  with model_storage.read_from(resource) as model_dir:
819
- featurizer_file = model_dir / "vocabularies.json"
820
- vocabulary = rasa.shared.utils.io.read_json_file(featurizer_file)
821
- vocabulary = cls.convert_vocab(vocabulary, to_int=False)
789
+ featurizer_file = model_dir / "vocabularies.pkl"
790
+ vocabulary = io_utils.json_unpickle(featurizer_file)
822
791
 
823
792
  share_vocabulary = config["use_shared_vocab"]
824
793
 
@@ -1,7 +1,9 @@
1
1
  from __future__ import annotations
2
-
3
2
  import logging
4
3
  from collections import OrderedDict
4
+
5
+ import scipy.sparse
6
+ import numpy as np
5
7
  from typing import (
6
8
  Any,
7
9
  Dict,
@@ -15,34 +17,30 @@ from typing import (
15
17
  Union,
16
18
  )
17
19
 
18
- import numpy as np
19
- import scipy.sparse
20
-
21
- import rasa.shared.utils.io
22
- import rasa.utils.io
23
20
  from rasa.engine.graph import ExecutionContext, GraphComponent
24
21
  from rasa.engine.recipes.default_recipe import DefaultV1Recipe
25
22
  from rasa.engine.storage.resource import Resource
26
23
  from rasa.engine.storage.storage import ModelStorage
27
- from rasa.nlu.constants import TOKENS_NAMES
28
- from rasa.nlu.featurizers.sparse_featurizer.sparse_featurizer import SparseFeaturizer
29
24
  from rasa.nlu.tokenizers.spacy_tokenizer import POS_TAG_KEY, SpacyTokenizer
30
25
  from rasa.nlu.tokenizers.tokenizer import Token, Tokenizer
26
+ from rasa.nlu.featurizers.sparse_featurizer.sparse_featurizer import SparseFeaturizer
27
+ from rasa.nlu.constants import TOKENS_NAMES
31
28
  from rasa.shared.constants import DOCS_URL_COMPONENTS
32
- from rasa.shared.exceptions import InvalidConfigException
33
- from rasa.shared.nlu.constants import TEXT
34
- from rasa.shared.nlu.training_data.message import Message
35
29
  from rasa.shared.nlu.training_data.training_data import TrainingData
30
+ from rasa.shared.nlu.training_data.message import Message
31
+ from rasa.shared.nlu.constants import TEXT
32
+ from rasa.shared.exceptions import InvalidConfigException
33
+ import rasa.shared.utils.io
34
+ import rasa.utils.io
36
35
 
37
36
  logger = logging.getLogger(__name__)
38
37
 
38
+
39
39
  END_OF_SENTENCE = "EOS"
40
40
  BEGIN_OF_SENTENCE = "BOS"
41
41
 
42
42
  FEATURES = "features"
43
43
 
44
- SEPERATOR = "###"
45
-
46
44
 
47
45
  @DefaultV1Recipe.register(
48
46
  DefaultV1Recipe.ComponentType.MESSAGE_FEATURIZER, is_trainable=True
@@ -74,13 +72,11 @@ class LexicalSyntacticFeaturizer(SparseFeaturizer, GraphComponent):
74
72
  of the token at position `t+1`.
75
73
  """
76
74
 
77
- FILENAME_FEATURE_TO_IDX_DICT = "feature_to_idx_dict.json"
75
+ FILENAME_FEATURE_TO_IDX_DICT = "feature_to_idx_dict.pkl"
78
76
 
79
77
  # NOTE: "suffix5" of the token "is" will be "is". Hence, when combining multiple
80
78
  # prefixes, short words will be represented/encoded repeatedly.
81
- _FUNCTION_DICT: Dict[
82
- Text, Callable[[Token], Union[Text, bool, None]]
83
- ] = { # noqa: RUF012
79
+ _FUNCTION_DICT: Dict[Text, Callable[[Token], Union[Text, bool, None]]] = { # noqa: RUF012
84
80
  "low": lambda token: token.text.islower(),
85
81
  "title": lambda token: token.text.istitle(),
86
82
  "prefix5": lambda token: token.text[:5],
@@ -331,7 +327,6 @@ class LexicalSyntacticFeaturizer(SparseFeaturizer, GraphComponent):
331
327
  assert len(window_range) == window_size
332
328
 
333
329
  for anchor in range(len(tokens)):
334
-
335
330
  token_features: Dict[Tuple[int, Text], Text] = {}
336
331
 
337
332
  for window_position, relative_position in enumerate(window_range):
@@ -343,13 +338,13 @@ class LexicalSyntacticFeaturizer(SparseFeaturizer, GraphComponent):
343
338
 
344
339
  token = tokens[absolute_position]
345
340
  for feature_name in self._feature_config[window_position]:
346
- token_features[
347
- (window_position, feature_name)
348
- ] = self._extract_raw_features_from_token(
349
- token=token,
350
- feature_name=feature_name,
351
- token_position=absolute_position,
352
- num_tokens=len(tokens),
341
+ token_features[(window_position, feature_name)] = (
342
+ self._extract_raw_features_from_token(
343
+ token=token,
344
+ feature_name=feature_name,
345
+ token_position=absolute_position,
346
+ num_tokens=len(tokens),
347
+ )
353
348
  )
354
349
 
355
350
  sentence_features.append(token_features)
@@ -358,7 +353,7 @@ class LexicalSyntacticFeaturizer(SparseFeaturizer, GraphComponent):
358
353
 
359
354
  @staticmethod
360
355
  def _build_feature_to_index_map(
361
- feature_vocabulary: Dict[Tuple[int, Text], Set[Text]]
356
+ feature_vocabulary: Dict[Tuple[int, Text], Set[Text]],
362
357
  ) -> Dict[Tuple[int, Text], Dict[Text, int]]:
363
358
  """Creates a nested dictionary for mapping raw features to indices.
364
359
 
@@ -493,32 +488,6 @@ class LexicalSyntacticFeaturizer(SparseFeaturizer, GraphComponent):
493
488
  """Creates a new untrained component (see parent class for full docstring)."""
494
489
  return cls(config, model_storage, resource, execution_context)
495
490
 
496
- @staticmethod
497
- def _restructure_feature_to_idx_dict(
498
- loaded_data: Dict[str, Dict[str, int]],
499
- ) -> Dict[Tuple[int, str], Dict[str, int]]:
500
- """Reconstructs the feature to idx dict.
501
-
502
- When storing the feature_to_idx_dict to disk, we need to convert the tuple (key)
503
- into a string to be able to store it via json. When loading the data
504
- we need to reconstruct the tuple from the stored string.
505
-
506
- Args:
507
- loaded_data: The loaded feature to idx dict from file.
508
-
509
- Returns:
510
- The reconstructed feature_to_idx_dict
511
- """
512
- feature_to_idx_dict = {}
513
- for tuple_string, feature_value in loaded_data.items():
514
- # Example of tuple_string: "1###low"
515
- index, feature_name = tuple_string.split(SEPERATOR)
516
-
517
- feature_key = (int(index), feature_name)
518
- feature_to_idx_dict[feature_key] = feature_value
519
-
520
- return feature_to_idx_dict
521
-
522
491
  @classmethod
523
492
  def load(
524
493
  cls,
@@ -531,13 +500,10 @@ class LexicalSyntacticFeaturizer(SparseFeaturizer, GraphComponent):
531
500
  """Loads trained component (see parent class for full docstring)."""
532
501
  try:
533
502
  with model_storage.read_from(resource) as model_path:
534
- loaded_data = rasa.shared.utils.io.read_json_file(
503
+ feature_to_idx_dict = rasa.utils.io.json_unpickle(
535
504
  model_path / cls.FILENAME_FEATURE_TO_IDX_DICT,
505
+ encode_non_string_keys=True,
536
506
  )
537
-
538
- # convert the key back into tuple
539
- feature_to_idx_dict = cls._restructure_feature_to_idx_dict(loaded_data)
540
-
541
507
  return cls(
542
508
  config=config,
543
509
  model_storage=model_storage,
@@ -562,13 +528,9 @@ class LexicalSyntacticFeaturizer(SparseFeaturizer, GraphComponent):
562
528
  if not self._feature_to_idx_dict:
563
529
  return None
564
530
 
565
- # as we cannot dump tuples, convert the tuple into a string
566
- restructured_feature_dict = {
567
- f"{k[0]}{SEPERATOR}{k[1]}": v for k, v in self._feature_to_idx_dict.items()
568
- }
569
-
570
531
  with self._model_storage.write_to(self._resource) as model_path:
571
- rasa.shared.utils.io.dump_obj_as_json_to_file(
532
+ rasa.utils.io.json_pickle(
572
533
  model_path / self.FILENAME_FEATURE_TO_IDX_DICT,
573
- restructured_feature_dict,
534
+ self._feature_to_idx_dict,
535
+ encode_non_string_keys=True,
574
536
  )
@@ -1,13 +1,11 @@
1
1
  from __future__ import annotations
2
-
3
2
  import logging
4
3
  import re
5
4
  from typing import Any, Dict, List, Optional, Text, Tuple, Type
6
-
7
5
  import numpy as np
8
6
  import scipy.sparse
9
-
10
7
  from rasa.nlu.tokenizers.tokenizer import Tokenizer
8
+
11
9
  import rasa.shared.utils.io
12
10
  import rasa.utils.io
13
11
  import rasa.nlu.utils.pattern_utils as pattern_utils
@@ -242,7 +240,7 @@ class RegexFeaturizer(SparseFeaturizer, GraphComponent):
242
240
 
243
241
  try:
244
242
  with model_storage.read_from(resource) as model_dir:
245
- patterns_file_name = model_dir / "patterns.json"
243
+ patterns_file_name = model_dir / "patterns.pkl"
246
244
  known_patterns = rasa.shared.utils.io.read_json_file(patterns_file_name)
247
245
  except (ValueError, FileNotFoundError):
248
246
  logger.warning(
@@ -260,7 +258,7 @@ class RegexFeaturizer(SparseFeaturizer, GraphComponent):
260
258
 
261
259
  def _persist(self) -> None:
262
260
  with self._model_storage.write_to(self._resource) as model_dir:
263
- regex_file = model_dir / "patterns.json"
261
+ regex_file = model_dir / "patterns.pkl"
264
262
  rasa.shared.utils.io.dump_obj_as_json_to_file(
265
263
  regex_file, self.known_patterns
266
264
  )
rasa/nlu/persistor.py CHANGED
@@ -1,16 +1,18 @@
1
1
  import abc
2
- import logging
2
+ import structlog
3
3
  import os
4
4
  import shutil
5
5
  from typing import Optional, Text, Tuple, TYPE_CHECKING
6
6
 
7
+ from rasa.shared.exceptions import RasaException
8
+
7
9
  import rasa.shared.utils.common
8
10
  import rasa.utils.common
9
11
 
10
12
  if TYPE_CHECKING:
11
13
  from azure.storage.blob import ContainerClient
12
14
 
13
- logger = logging.getLogger(__name__)
15
+ structlogger = structlog.get_logger()
14
16
 
15
17
 
16
18
  def get_persistor(name: Text) -> Optional["Persistor"]:
@@ -95,7 +97,6 @@ class Persistor(abc.ABC):
95
97
 
96
98
  @staticmethod
97
99
  def _tar_name(model_name: Text, include_extension: bool = True) -> Text:
98
-
99
100
  ext = ".tar.gz" if include_extension else ""
100
101
  return f"{model_name}{ext}"
101
102
 
@@ -129,20 +130,36 @@ class AWSPersistor(Persistor):
129
130
  def _ensure_bucket_exists(
130
131
  self, bucket_name: Text, region_name: Optional[Text] = None
131
132
  ) -> None:
132
- import boto3
133
133
  import botocore
134
134
 
135
- if not region_name:
136
- region_name = boto3.DEFAULT_SESSION.region_name
137
-
138
- bucket_config = {"LocationConstraint": region_name}
139
135
  # noinspection PyUnresolvedReferences
140
136
  try:
141
- self.s3.create_bucket(
142
- Bucket=bucket_name, CreateBucketConfiguration=bucket_config
143
- )
144
- except botocore.exceptions.ClientError:
145
- pass # bucket already exists
137
+ self.s3.meta.client.head_bucket(Bucket=bucket_name)
138
+ except botocore.exceptions.ClientError as e:
139
+ error_code = int(e.response["Error"]["Code"])
140
+ if error_code == 403:
141
+ log = (
142
+ f"Access to the specified bucket '{bucket_name}' is forbidden. "
143
+ "Please make sure you have the necessary "
144
+ "permission to access the bucket."
145
+ )
146
+ structlogger.error(
147
+ "aws_persistor.ensure_bucket_exists.bucket_access_forbidden",
148
+ bucket_name=bucket_name,
149
+ event_info=log,
150
+ )
151
+ raise RasaException(log)
152
+ elif error_code == 404:
153
+ log = (
154
+ f"The specified bucket '{bucket_name}' does not exist. "
155
+ "Please make sure to create the bucket first."
156
+ )
157
+ structlogger.error(
158
+ "aws_persistor.ensure_bucket_exists.bucket_not_found",
159
+ bucket_name=bucket_name,
160
+ event_info=log,
161
+ )
162
+ raise RasaException(log)
146
163
 
147
164
  def _persist_tar(self, file_key: Text, tar_path: Text) -> None:
148
165
  """Uploads a model persisted in the `target_dir` to s3."""
@@ -180,10 +197,30 @@ class GCSPersistor(Persistor):
180
197
  from google.cloud import exceptions
181
198
 
182
199
  try:
183
- self.storage_client.create_bucket(bucket_name)
184
- except exceptions.Conflict:
185
- # bucket exists
186
- pass
200
+ self.storage_client.get_bucket(bucket_name)
201
+ except exceptions.NotFound:
202
+ log = (
203
+ f"The specified bucket '{bucket_name}' does not exist. "
204
+ "Please make sure to create the bucket first."
205
+ )
206
+ structlogger.error(
207
+ "gcp_persistor.ensure_bucket_exists.bucket_not_found",
208
+ bucket_name=bucket_name,
209
+ event_info=log,
210
+ )
211
+ raise RasaException(log)
212
+ except exceptions.Forbidden:
213
+ log = (
214
+ f"Access to the specified bucket '{bucket_name}' is forbidden. "
215
+ "Please make sure you have the necessary "
216
+ "permission to access the bucket. "
217
+ )
218
+ structlogger.error(
219
+ "gcp_persistor.ensure_bucket_exists.bucket_access_forbidden",
220
+ bucket_name=bucket_name,
221
+ event_info=log,
222
+ )
223
+ raise RasaException(log)
187
224
 
188
225
  def _persist_tar(self, file_key: Text, tar_path: Text) -> None:
189
226
  """Uploads a model persisted in the `target_dir` to GCS."""
@@ -210,18 +247,23 @@ class AzurePersistor(Persistor):
210
247
  account_url=f"https://{azure_account_name}.blob.core.windows.net/",
211
248
  credential=azure_account_key,
212
249
  )
213
-
214
- self._ensure_container_exists(azure_container)
215
250
  self.container_name = azure_container
251
+ self._ensure_container_exists()
216
252
 
217
- def _ensure_container_exists(self, container_name: Text) -> None:
218
- from azure.core.exceptions import ResourceExistsError
219
-
220
- try:
221
- self.blob_service.create_container(container_name)
222
- except ResourceExistsError:
223
- # no need to create the container, it already exists
253
+ def _ensure_container_exists(self) -> None:
254
+ if self._container_client().exists():
224
255
  pass
256
+ else:
257
+ log = (
258
+ f"The specified container '{self.container_name}' does not exist."
259
+ "Please make sure to create the container first."
260
+ )
261
+ structlogger.error(
262
+ "azure_persistor.ensure_container_exists.container_not_found",
263
+ container_name=self.container_name,
264
+ event_info=log,
265
+ )
266
+ raise RasaException(log)
225
267
 
226
268
  def _container_client(self) -> "ContainerClient":
227
269
  return self.blob_service.get_container_client(self.container_name)
@@ -430,9 +430,9 @@ class ResponseSelector(DIETClassifier):
430
430
  self, message: Message, prediction_dict: Dict[Text, Any], selector_key: Text
431
431
  ) -> None:
432
432
  message_selector_properties = message.get(RESPONSE_SELECTOR_PROPERTY_NAME, {})
433
- message_selector_properties[
434
- RESPONSE_SELECTOR_RETRIEVAL_INTENTS
435
- ] = self.all_retrieval_intents
433
+ message_selector_properties[RESPONSE_SELECTOR_RETRIEVAL_INTENTS] = (
434
+ self.all_retrieval_intents
435
+ )
436
436
  message_selector_properties[selector_key] = prediction_dict
437
437
  message.set(
438
438
  RESPONSE_SELECTOR_PROPERTY_NAME,
@@ -505,7 +505,6 @@ class ResponseSelector(DIETClassifier):
505
505
  been caught earlier and a warning should have been raised.
506
506
  """
507
507
  for key, responses in self.responses.items():
508
-
509
508
  # First check if the predicted label was the key itself
510
509
  search_key = util.template_key_to_intent_response_key(key)
511
510
  if search_key == label.get("name"):
@@ -626,7 +625,6 @@ class ResponseSelector(DIETClassifier):
626
625
  config: Dict[Text, Any],
627
626
  finetune_mode: bool = False,
628
627
  ) -> "RasaModel":
629
-
630
628
  predict_data_example = RasaModelData(
631
629
  label_key=model_data_example.label_key,
632
630
  data={
@@ -723,7 +721,6 @@ class DIET2BOW(DIET):
723
721
  logger.debug(f" {metric} ({name})")
724
722
 
725
723
  def _update_label_metrics(self, loss: tf.Tensor, acc: tf.Tensor) -> None:
726
-
727
724
  self.response_loss.update_state(loss)
728
725
  self.response_acc.update_state(acc)
729
726
 
@@ -796,10 +793,10 @@ class DIET2DIET(DIET):
796
793
  (self.text_name, self.config),
797
794
  (self.label_name, label_config),
798
795
  ]:
799
- self._tf_layers[
800
- f"sequence_layer.{attribute}"
801
- ] = rasa_layers.RasaSequenceLayer(
802
- attribute, self.data_signature[attribute], config
796
+ self._tf_layers[f"sequence_layer.{attribute}"] = (
797
+ rasa_layers.RasaSequenceLayer(
798
+ attribute, self.data_signature[attribute], config
799
+ )
803
800
  )
804
801
 
805
802
  if self.config[MASKED_LM]:
rasa/nlu/test.py CHANGED
@@ -886,7 +886,6 @@ def evaluate_entities(
886
886
  exclude_label=NO_ENTITY,
887
887
  )
888
888
  if output_directory:
889
-
890
889
  _dump_report(output_directory, f"{extractor}_report.json", report)
891
890
 
892
891
  if successes:
@@ -1550,7 +1549,6 @@ async def combine_result(
1550
1549
 
1551
1550
 
1552
1551
  def _contains_entity_labels(entity_results: List[EntityEvaluationResult]) -> bool:
1553
-
1554
1552
  for result in entity_results:
1555
1553
  if result.entity_targets or result.entity_predictions:
1556
1554
  return True
@@ -1791,7 +1789,6 @@ async def compare_nlu(
1791
1789
  training_examples_per_run = []
1792
1790
 
1793
1791
  for run in range(runs):
1794
-
1795
1792
  logger.info("Beginning comparison run {}/{}".format(run + 1, runs))
1796
1793
 
1797
1794
  run_path = os.path.join(output, "run_{}".format(run + 1))
@@ -25,7 +25,7 @@ from transformers import ( # noqa: E402
25
25
  RobertaTokenizer,
26
26
  CamembertTokenizer,
27
27
  )
28
- from rasa.nlu.utils.hugging_face.transformers_pre_post_processors import ( # noqa: E402, E501
28
+ from rasa.nlu.utils.hugging_face.transformers_pre_post_processors import ( # noqa: E402
29
29
  bert_tokens_pre_processor,
30
30
  gpt_tokens_pre_processor,
31
31
  xlnet_tokens_pre_processor,
@@ -195,7 +195,7 @@ class SpacyNLP(GraphComponent):
195
195
 
196
196
  @staticmethod
197
197
  def _filter_training_samples_by_content(
198
- indexed_training_samples: List[Tuple[int, Text]]
198
+ indexed_training_samples: List[Tuple[int, Text]],
199
199
  ) -> Tuple[List[Tuple[int, Text]], List[Tuple[int, Text]]]:
200
200
  """Separates empty training samples from content bearing ones."""
201
201
  docs_to_pipe = list(
@@ -251,7 +251,6 @@ class SpacyNLP(GraphComponent):
251
251
  ) -> Dict[Text, List[Any]]:
252
252
  attribute_docs = {}
253
253
  for attribute in DENSE_FEATURIZABLE_ATTRIBUTES:
254
-
255
254
  texts = [
256
255
  self._get_text(e, attribute) for e in training_data.training_examples
257
256
  ]
@@ -288,7 +287,6 @@ class SpacyNLP(GraphComponent):
288
287
  attribute_docs = self._docs_for_training_data(model.model, training_data)
289
288
 
290
289
  for attribute in DENSE_FEATURIZABLE_ATTRIBUTES:
291
-
292
290
  for idx, example in enumerate(training_data.training_examples):
293
291
  example_attribute_doc = attribute_docs[attribute][idx]
294
292
  if len(example_attribute_doc):
rasa/server.py CHANGED
@@ -4,6 +4,7 @@ import logging
4
4
  import multiprocessing
5
5
  import os
6
6
  import traceback
7
+ import warnings
7
8
  from collections import defaultdict
8
9
  from functools import reduce, wraps
9
10
  from http import HTTPStatus
@@ -25,6 +26,12 @@ from typing import (
25
26
 
26
27
  import aiohttp
27
28
  import jsonschema
29
+ from sanic import Sanic, response
30
+ from sanic.request import Request
31
+ from sanic.response import HTTPResponse
32
+ from sanic_cors import CORS
33
+ from sanic_jwt import Initialize, exceptions
34
+
28
35
  import rasa
29
36
  import rasa.core.utils
30
37
  import rasa.nlu.test
@@ -70,11 +77,6 @@ from rasa.shared.utils.schemas.events import EVENTS_SCHEMA
70
77
  from rasa.shared.utils.yaml import validate_training_data
71
78
  from rasa.utils.common import TempDirectoryPath, get_temp_dir_name
72
79
  from rasa.utils.endpoints import EndpointConfig
73
- from sanic import Sanic, response
74
- from sanic.request import Request
75
- from sanic.response import HTTPResponse
76
- from sanic_cors import CORS
77
- from sanic_jwt import Initialize, exceptions
78
80
 
79
81
  if TYPE_CHECKING:
80
82
  from ssl import SSLContext
@@ -233,7 +235,6 @@ def requires_auth(
233
235
  async def decorated(
234
236
  request: Request, *args: Any, **kwargs: Any
235
237
  ) -> response.HTTPResponse:
236
-
237
238
  provided = request.args.get("token", None)
238
239
 
239
240
  # noinspection PyProtectedMember
@@ -518,7 +519,18 @@ def add_root_route(app: Sanic) -> None:
518
519
  @app.get("/")
519
520
  async def hello(request: Request) -> HTTPResponse:
520
521
  """Check if the server is running and responds with the version."""
521
- return response.text("Hello from Rasa: " + rasa.__version__)
522
+ html_content = f"""
523
+ <html>
524
+ <body>
525
+ <p>Hello from Rasa: {rasa.__version__}</p>
526
+ <a href="./webhooks/inspector/inspect.html">Go to the inspector</a>
527
+ <script>
528
+ window.location.replace("./webhooks/inspector/inspect.html");
529
+ </script>
530
+ </body>
531
+ </html>
532
+ """
533
+ return response.html(html_content)
522
534
 
523
535
 
524
536
  def async_if_callback_url(f: Callable[..., Coroutine]) -> Callable:
@@ -647,6 +659,9 @@ def create_app(
647
659
  app.config.RESPONSE_TIMEOUT = response_timeout
648
660
  configure_cors(app, cors_origins)
649
661
 
662
+ # Reset Sanic warnings filter that allows the triggering of Sanic warnings
663
+ warnings.filterwarnings("ignore", category=DeprecationWarning, module=r"sanic.*")
664
+
650
665
  # Set up the Sanic-JWT extension
651
666
  if jwt_secret and jwt_method:
652
667
  # `sanic-jwt` depends on having an available event loop when making the call to