rasa-pro 3.10.7.dev4__py3-none-any.whl → 3.10.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of rasa-pro might be problematic. Click here for more details.

Files changed (76) hide show
  1. README.md +37 -1
  2. rasa/api.py +2 -8
  3. rasa/cli/arguments/default_arguments.py +2 -23
  4. rasa/cli/arguments/run.py +0 -2
  5. rasa/cli/e2e_test.py +8 -10
  6. rasa/cli/inspect.py +2 -5
  7. rasa/cli/run.py +0 -7
  8. rasa/cli/studio/studio.py +21 -1
  9. rasa/cli/train.py +4 -9
  10. rasa/cli/utils.py +3 -3
  11. rasa/core/agent.py +2 -2
  12. rasa/core/brokers/kafka.py +1 -3
  13. rasa/core/brokers/pika.py +1 -3
  14. rasa/core/channels/socketio.py +1 -5
  15. rasa/core/channels/voice_aware/utils.py +5 -6
  16. rasa/core/nlg/contextual_response_rephraser.py +2 -11
  17. rasa/core/policies/enterprise_search_policy.py +2 -11
  18. rasa/core/policies/intentless_policy.py +2 -9
  19. rasa/core/run.py +1 -2
  20. rasa/core/secrets_manager/constants.py +0 -4
  21. rasa/core/secrets_manager/factory.py +0 -8
  22. rasa/core/secrets_manager/vault.py +1 -11
  23. rasa/core/utils.py +19 -30
  24. rasa/dialogue_understanding/coexistence/llm_based_router.py +2 -9
  25. rasa/dialogue_understanding/commands/__init__.py +2 -0
  26. rasa/dialogue_understanding/commands/restart_command.py +58 -0
  27. rasa/dialogue_understanding/commands/set_slot_command.py +5 -1
  28. rasa/dialogue_understanding/commands/utils.py +3 -1
  29. rasa/dialogue_understanding/generator/llm_based_command_generator.py +2 -11
  30. rasa/dialogue_understanding/generator/llm_command_generator.py +1 -1
  31. rasa/dialogue_understanding/patterns/default_flows_for_patterns.yml +15 -15
  32. rasa/dialogue_understanding/patterns/restart.py +37 -0
  33. rasa/e2e_test/e2e_test_runner.py +1 -1
  34. rasa/engine/graph.py +1 -0
  35. rasa/engine/recipes/config_files/default_config.yml +3 -0
  36. rasa/engine/recipes/default_recipe.py +1 -0
  37. rasa/engine/recipes/graph_recipe.py +1 -0
  38. rasa/engine/storage/local_model_storage.py +1 -0
  39. rasa/engine/storage/storage.py +5 -1
  40. rasa/model_training.py +6 -11
  41. rasa/{core → nlu}/persistor.py +1 -1
  42. rasa/server.py +1 -1
  43. rasa/shared/constants.py +3 -2
  44. rasa/shared/core/domain.py +47 -101
  45. rasa/shared/core/flows/flows_list.py +6 -19
  46. rasa/shared/core/flows/validation.py +0 -25
  47. rasa/shared/core/flows/yaml_flows_io.py +24 -3
  48. rasa/shared/importers/importer.py +32 -32
  49. rasa/shared/importers/multi_project.py +11 -23
  50. rasa/shared/importers/rasa.py +2 -7
  51. rasa/shared/importers/remote_importer.py +2 -2
  52. rasa/shared/importers/utils.py +1 -3
  53. rasa/shared/nlu/training_data/training_data.py +19 -18
  54. rasa/shared/providers/_configs/azure_openai_client_config.py +5 -3
  55. rasa/shared/providers/llm/_base_litellm_client.py +26 -10
  56. rasa/shared/providers/llm/self_hosted_llm_client.py +15 -3
  57. rasa/shared/utils/common.py +22 -3
  58. rasa/shared/utils/llm.py +5 -29
  59. rasa/shared/utils/schemas/model_config.yml +10 -0
  60. rasa/studio/auth.py +4 -0
  61. rasa/tracing/instrumentation/attribute_extractors.py +1 -1
  62. rasa/validator.py +5 -2
  63. rasa/version.py +1 -1
  64. {rasa_pro-3.10.7.dev4.dist-info → rasa_pro-3.10.8.dist-info}/METADATA +43 -7
  65. {rasa_pro-3.10.7.dev4.dist-info → rasa_pro-3.10.8.dist-info}/RECORD +68 -74
  66. rasa/model_manager/__init__.py +0 -0
  67. rasa/model_manager/config.py +0 -12
  68. rasa/model_manager/model_api.py +0 -464
  69. rasa/model_manager/runner_service.py +0 -185
  70. rasa/model_manager/socket_bridge.py +0 -44
  71. rasa/model_manager/trainer_service.py +0 -240
  72. rasa/model_manager/utils.py +0 -27
  73. rasa/model_service.py +0 -66
  74. {rasa_pro-3.10.7.dev4.dist-info → rasa_pro-3.10.8.dist-info}/NOTICE +0 -0
  75. {rasa_pro-3.10.7.dev4.dist-info → rasa_pro-3.10.8.dist-info}/WHEEL +0 -0
  76. {rasa_pro-3.10.7.dev4.dist-info → rasa_pro-3.10.8.dist-info}/entry_points.txt +0 -0
@@ -4,9 +4,13 @@ from litellm import (
4
4
  atext_completion,
5
5
  )
6
6
  import logging
7
+ import os
7
8
  import structlog
8
9
 
9
- from rasa.shared.constants import OPENAI_PROVIDER
10
+ from rasa.shared.constants import (
11
+ SELF_HOSTED_VLLM_PREFIX,
12
+ SELF_HOSTED_VLLM_API_KEY_ENV_VAR,
13
+ )
10
14
  from rasa.shared.providers._configs.self_hosted_llm_client_config import (
11
15
  SelfHostedLLMClientConfig,
12
16
  )
@@ -57,6 +61,7 @@ class SelfHostedLLMClient(_BaseLiteLLMClient):
57
61
  self._api_version = api_version
58
62
  self._use_chat_completions_endpoint = use_chat_completions_endpoint
59
63
  self._extra_parameters = kwargs or {}
64
+ self._apply_dummy_api_key_if_missing()
60
65
 
61
66
  @classmethod
62
67
  def from_config(cls, config: Dict[str, Any]) -> "SelfHostedLLMClient":
@@ -157,8 +162,8 @@ class SelfHostedLLMClient(_BaseLiteLLMClient):
157
162
 
158
163
  <openai>/<model or deployment name>
159
164
  """
160
- if self.model and f"{OPENAI_PROVIDER}/" not in self.model:
161
- return f"{OPENAI_PROVIDER}/{self.model}"
165
+ if self.model and f"{SELF_HOSTED_VLLM_PREFIX}/" not in self.model:
166
+ return f"{SELF_HOSTED_VLLM_PREFIX}/{self.model}"
162
167
  return self.model
163
168
 
164
169
  @property
@@ -279,3 +284,10 @@ class SelfHostedLLMClient(_BaseLiteLLMClient):
279
284
  formatted_response=formatted_response.to_dict(),
280
285
  )
281
286
  return formatted_response
287
+
288
+ @staticmethod
289
+ def _apply_dummy_api_key_if_missing() -> None:
290
+ if not os.getenv(SELF_HOSTED_VLLM_API_KEY_ENV_VAR):
291
+ os.environ[SELF_HOSTED_VLLM_API_KEY_ENV_VAR] = (
292
+ "dummy_self_hosted_llm_api_key"
293
+ )
@@ -86,11 +86,31 @@ def sort_list_of_dicts_by_first_key(dicts: List[Dict]) -> List[Dict]:
86
86
  return sorted(dicts, key=lambda d: next(iter(d.keys())))
87
87
 
88
88
 
89
+ def lazy_property(function: Callable) -> Any:
90
+ """Allows to avoid recomputing a property over and over.
91
+
92
+ The result gets stored in a local var. Computation of the property
93
+ will happen once, on the first call of the property. All
94
+ succeeding calls will use the value stored in the private property.
95
+ """
96
+ attr_name = "_lazy_" + function.__name__
97
+
98
+ def _lazyprop(self: Any) -> Any:
99
+ if not hasattr(self, attr_name):
100
+ setattr(self, attr_name, function(self))
101
+ return getattr(self, attr_name)
102
+
103
+ return property(_lazyprop)
104
+
105
+
89
106
  def cached_method(f: Callable[..., Any]) -> Callable[..., Any]:
90
107
  """Caches method calls based on the call's `args` and `kwargs`.
108
+
91
109
  Works for `async` and `sync` methods. Don't apply this to functions.
110
+
92
111
  Args:
93
112
  f: The decorated method whose return value should be cached.
113
+
94
114
  Returns:
95
115
  The return value which the method gives for the first call with the given
96
116
  arguments.
@@ -156,9 +176,8 @@ def transform_collection_to_sentence(collection: Collection[Text]) -> Text:
156
176
  def minimal_kwargs(
157
177
  kwargs: Dict[Text, Any], func: Callable, excluded_keys: Optional[List] = None
158
178
  ) -> Dict[Text, Any]:
159
- """Returns only the kwargs which are required by a function.
160
-
161
- Keys, contained in the exception list, are not included.
179
+ """Returns only the kwargs which are required by a function. Keys, contained in
180
+ the exception list, are not included.
162
181
 
163
182
  Args:
164
183
  kwargs: All available kwargs.
rasa/shared/utils/llm.py CHANGED
@@ -12,6 +12,7 @@ from typing import (
12
12
  cast,
13
13
  )
14
14
  import json
15
+
15
16
  import structlog
16
17
 
17
18
  import rasa.shared.utils.io
@@ -410,10 +411,10 @@ def try_instantiate_llm_client(
410
411
  default_llm_config: Optional[Dict],
411
412
  log_source_function: str,
412
413
  log_source_component: str,
413
- ) -> LLMClient:
414
+ ) -> None:
414
415
  """Validate llm configuration."""
415
416
  try:
416
- return llm_factory(custom_llm_config, default_llm_config)
417
+ llm_factory(custom_llm_config, default_llm_config)
417
418
  except (ProviderClientValidationError, ValueError) as e:
418
419
  structlogger.error(
419
420
  f"{log_source_function}.llm_instantiation_failed",
@@ -422,32 +423,7 @@ def try_instantiate_llm_client(
422
423
  )
423
424
  print_error_and_exit(
424
425
  f"Unable to create the LLM client for component - {log_source_component}. "
425
- f"Please make sure you specified the required environment variables. "
426
- f"Error: {e}"
427
- )
428
-
429
-
430
- def llm_api_health_check(
431
- llm_client: LLMClient, log_source_function: str, log_source_component: str
432
- ) -> None:
433
- """Perform a health check on the LLM API."""
434
- structlogger.info(
435
- f"{log_source_function}.llm_api_call",
436
- event_info=(
437
- f"Performing a health check on the LLM API for the component - "
438
- f"{log_source_component}."
439
- ),
440
- config=llm_client.config,
441
- )
442
- try:
443
- llm_client.completion("hello")
444
- except Exception as e:
445
- structlogger.error(
446
- f"{log_source_function}.llm_api_call_failed",
447
- event_info="call to the LLM API failed.",
448
- error=e,
449
- )
450
- print_error_and_exit(
451
- f"Call to the LLM API failed for component - {log_source_component}. "
426
+ f"Please make sure you specified the required environment variables "
427
+ f"and configuration keys. "
452
428
  f"Error: {e}"
453
429
  )
@@ -34,3 +34,13 @@ mapping:
34
34
  name:
35
35
  type: str
36
36
  required: True
37
+ spaces:
38
+ type: "seq"
39
+ required: False
40
+ sequence:
41
+ - type: "map"
42
+ allowempty: True
43
+ mapping:
44
+ name:
45
+ type: str
46
+ required: True
rasa/studio/auth.py CHANGED
@@ -21,15 +21,19 @@ from rasa.studio.results_logger import with_studio_error_handler, StudioResult
21
21
 
22
22
 
23
23
  class StudioAuth:
24
+ """Handles the authentication with the Rasa Studio authentication server."""
25
+
24
26
  def __init__(
25
27
  self,
26
28
  studio_config: StudioConfig,
29
+ verify: bool = True,
27
30
  ) -> None:
28
31
  self.config = studio_config
29
32
  self.keycloak_openid = KeycloakOpenID(
30
33
  server_url=studio_config.authentication_server_url,
31
34
  client_id=studio_config.client_id,
32
35
  realm_name=studio_config.realm_name,
36
+ verify=verify,
33
37
  )
34
38
 
35
39
  def health_check(self) -> bool:
@@ -664,7 +664,7 @@ def extract_attrs_for_custom_action_executor_run(
664
664
 
665
665
  attrs: Dict[str, Any] = {
666
666
  "class_name": self.__class__.__name__,
667
- "action_name": self.action_name if hasattr(self, "action_name") else "None",
667
+ "action_name": self.action_name,
668
668
  "sender_id": tracker.sender_id,
669
669
  "url": str(url),
670
670
  "actions_module": str(actions_module),
rasa/validator.py CHANGED
@@ -1,10 +1,10 @@
1
1
  import logging
2
- import structlog
3
2
  import re
4
3
  import string
5
4
  from collections import defaultdict
6
5
  from typing import Set, Text, Optional, Dict, Any, List, Tuple
7
6
 
7
+ import structlog
8
8
  from jinja2 import Template
9
9
  from pypred import Predicate
10
10
  from pypred.ast import Literal, CompareOperator, NegateOperator
@@ -596,6 +596,7 @@ class Validator:
596
596
  collect: CollectInformationFlowStep,
597
597
  all_good: bool,
598
598
  domain_slots: Dict[Text, Slot],
599
+ flow_id: str,
599
600
  ) -> bool:
600
601
  """Validates that a collect step can have either an action or an utterance.
601
602
  Also logs an error if neither an action nor an utterance is defined.
@@ -625,6 +626,7 @@ class Validator:
625
626
  collect=collect.collect,
626
627
  has_utterance_defined=has_utterance_defined,
627
628
  has_action_defined=has_action_defined,
629
+ flow=flow_id,
628
630
  event_info=(
629
631
  f"The collect step '{collect.collect}' has an utterance "
630
632
  f"'{collect.utter}' as well as an action "
@@ -648,6 +650,7 @@ class Validator:
648
650
  collect=collect.collect,
649
651
  has_utterance_defined=has_utterance_defined,
650
652
  has_action_defined=has_action_defined,
653
+ flow=flow_id,
651
654
  event_info=(
652
655
  f"The collect step '{collect.collect}' has neither an utterance "
653
656
  f"nor an action defined, or an initial value defined in the domain."
@@ -716,7 +719,7 @@ class Validator:
716
719
  if isinstance(step, CollectInformationFlowStep):
717
720
  all_good = (
718
721
  self._log_error_if_either_action_or_utterance_are_not_defined(
719
- step, all_good, domain_slots
722
+ step, all_good, domain_slots, flow.id
720
723
  )
721
724
  )
722
725
 
rasa/version.py CHANGED
@@ -1,3 +1,3 @@
1
1
  # this file will automatically be changed,
2
2
  # do not add anything but the version number here!
3
- __version__ = "3.10.7.dev4"
3
+ __version__ = "3.10.8"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: rasa-pro
3
- Version: 3.10.7.dev4
3
+ Version: 3.10.8
4
4
  Summary: State-of-the-art open-core Conversational AI framework for Enterprises that natively leverages generative AI for effortless assistant development.
5
5
  Home-page: https://rasa.com
6
6
  Keywords: nlp,machine-learning,machine-learning-library,bot,bots,botkit,rasa conversational-agents,conversational-ai,chatbot,chatbot-framework,bot-framework
@@ -32,7 +32,7 @@ Requires-Dist: aiohttp (>=3.9.4,<3.10)
32
32
  Requires-Dist: apscheduler (>=3.10,<3.11)
33
33
  Requires-Dist: attrs (>=23.1,<23.2)
34
34
  Requires-Dist: azure-storage-blob (>=12.16.0,<12.17.0)
35
- Requires-Dist: boto3 (>=1.35.15,<1.36.0)
35
+ Requires-Dist: boto3 (>=1.35.5,<1.36.0)
36
36
  Requires-Dist: certifi (>=2024.07.04)
37
37
  Requires-Dist: cloudpickle (>=2.2.1,<3.1)
38
38
  Requires-Dist: colorama (>=0.4.6,<0.5.0) ; sys_platform == "win32"
@@ -53,8 +53,8 @@ Requires-Dist: gitpython (>=3.1.41,<3.2.0) ; extra == "full"
53
53
  Requires-Dist: google-auth (>=2.23.4,<3)
54
54
  Requires-Dist: google-cloud-storage (>=2.14.0,<3.0.0)
55
55
  Requires-Dist: hvac (>=1.2.1,<2.0.0)
56
- Requires-Dist: importlib-metadata (>=6.8.0,<7.0.0)
57
- Requires-Dist: importlib-resources (>=6.1.1,<7.0.0)
56
+ Requires-Dist: importlib-metadata (>=8.5.0,<8.6.0)
57
+ Requires-Dist: importlib-resources (==6.1.3)
58
58
  Requires-Dist: jieba (>=0.42.1,<0.43) ; extra == "jieba" or extra == "full"
59
59
  Requires-Dist: jinja2 (>=3.1.4,<4.0.0)
60
60
  Requires-Dist: joblib (>=1.2.0,<1.3.0)
@@ -64,13 +64,13 @@ Requires-Dist: jsonschema (>=4.22)
64
64
  Requires-Dist: keras (==2.14.0)
65
65
  Requires-Dist: langchain (>=0.2.0,<0.3.0)
66
66
  Requires-Dist: langchain-community (>=0.2.0,<0.3.0)
67
- Requires-Dist: litellm (>=1.45.0,<1.46.0)
67
+ Requires-Dist: litellm (>=1.50.0,<1.51.0)
68
68
  Requires-Dist: matplotlib (>=3.7,<3.8)
69
69
  Requires-Dist: mattermostwrapper (>=2.2,<2.3)
70
70
  Requires-Dist: mlflow (>=2.15.1,<3.0.0) ; extra == "mlflow"
71
71
  Requires-Dist: networkx (>=3.1,<3.2)
72
72
  Requires-Dist: numpy (>=1.23.5,<1.25.0) ; python_version >= "3.9" and python_version < "3.11"
73
- Requires-Dist: openai (>=1.47.0,<1.48.0)
73
+ Requires-Dist: openai (>=1.52.0,<1.53.0)
74
74
  Requires-Dist: openpyxl (>=3.1.5,<4.0.0)
75
75
  Requires-Dist: opentelemetry-api (>=1.16.0,<1.17.0)
76
76
  Requires-Dist: opentelemetry-exporter-jaeger (>=1.16.0,<1.17.0)
@@ -390,6 +390,39 @@ To check the types execute
390
390
  make types
391
391
  ```
392
392
 
393
+ ### Backporting
394
+
395
+ In order to port changes to `main` and across release branches, we use the `backport` workflow located at
396
+ the `.github/workflows/backport.yml` path.
397
+ This workflow is triggered by the `backport-to-<release-branch>` label applied to a PR, for example `backport-to-3.8.x`.
398
+ Current available target branches are `main` and maintained release branches.
399
+
400
+ When a PR gets labelled `backport-to-<release-branch>`, a PR is opened by the `backport-github-action` as soon as the
401
+ source PR gets closed (by merging). If you want to close the PR without merging changes, make sure to remove the `backport-to-<release-branch>` label.
402
+
403
+ The PR author which the action assigns to the backporting PR has to resolve any conflicts before approving and merging.
404
+ Release PRs should also be labelled with `backport-to-main` to backport the `CHANGELOG.md` updates to `main`.
405
+ Backporting version updates should be accepted to the `main` branch from the latest release branch only.
406
+
407
+ Here are some guidelines to follow when backporting changes and resolving conflicts:
408
+
409
+ a) for conflicts in `version.py`: accept only the version from the latest release branch. Do not merge version changes
410
+ from earlier release branches into `main` because this could cause issues when trying to make the next minor release.
411
+
412
+ b) for conflicts in `pyproject.toml`: if related to the `rasa-pro` version, accept only the latest release branch;
413
+ if related to other dependencies, accept `main` or whichever is the higher upgrade (main usually has the updated
414
+ dependencies because we only do housekeeping on `main`, apart from vulnerability updates). Be mindful of dependencies that
415
+ are removed from `main` but still exist in former release branches (for example `langchain`).
416
+
417
+ c) for conflicts in `poetry.lock`: accept changes which were already present on the target branch, then run
418
+ `poetry lock --no-update` so that the lock file contains your changes from `pyproject.toml` too.
419
+
420
+ d) for conflicts in `CHANGELOG.md`: Manually place the changelog in their allocated section (e.g. 3.8.10 will go under the
421
+ 3.8 section with the other releases, rather than go at the top of the file)
422
+
423
+ If the backporting workflow fails, you are encouraged to cherry-pick the commits manually and create a PR to
424
+ the target branch. Alternatively, you can install the backporting CLI tool as described [here](https://github.com/sorenlouv/backport?tab=readme-ov-file#install).
425
+
393
426
  ## Releases
394
427
  Rasa has implemented robust policies governing version naming, as well as release pace for major, minor, and patch releases.
395
428
 
@@ -472,9 +505,12 @@ Releasing a new version is quite simple, as the packages are build and distribut
472
505
  9. If however an error occurs in the build, then we should see a failure message automatically posted in the company's Slack (`dev-tribe` channel) like this [one](https://rasa-hq.slack.com/archives/C01M5TAHDHA/p1701444735622919)
473
506
  (In this case do the following checks):
474
507
  - Check the workflows in [Github Actions](https://github.com/RasaHQ/rasa-private/actions) and make sure that the merged PR of the current release is completed successfully. To easily find your PR you can use the filters `event: push` and `branch: <version number>` (example on release 2.4 you can see [here](https://github.com/RasaHQ/rasa/actions/runs/643344876))
475
- - If the workflow is not completed, then try to re run the workflow in case that solves the problem
508
+ - If the workflow is not completed, then try to re-run the workflow in case that solves the problem
476
509
  - If the problem persists, check also the log files and try to find the root cause of the issue
477
510
  - If you still cannot resolve the error, contact the infrastructure team by providing any helpful information from your investigation
511
+ 10. If the release is successful, add the newly created release branch to the backporting configuration in the `.backportrc.json` file to
512
+ the `targetBranchesChoices` list. This is necessary for the backporting workflow to work correctly with new release branches.
513
+
478
514
 
479
515
  ### Cutting a Patch release
480
516