rasa-pro 3.11.0rc1__py3-none-any.whl → 3.11.0rc3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of rasa-pro might be problematic. Click here for more details.
- rasa/cli/inspect.py +2 -0
- rasa/cli/studio/studio.py +18 -8
- rasa/core/actions/action_repeat_bot_messages.py +17 -0
- rasa/core/channels/channel.py +17 -0
- rasa/core/channels/development_inspector.py +4 -1
- rasa/core/channels/voice_ready/audiocodes.py +15 -4
- rasa/core/channels/voice_ready/jambonz.py +13 -2
- rasa/core/channels/voice_ready/twilio_voice.py +6 -21
- rasa/core/channels/voice_stream/asr/asr_event.py +1 -1
- rasa/core/channels/voice_stream/asr/azure.py +5 -7
- rasa/core/channels/voice_stream/asr/deepgram.py +13 -11
- rasa/core/channels/voice_stream/voice_channel.py +61 -19
- rasa/core/nlg/contextual_response_rephraser.py +20 -12
- rasa/core/policies/enterprise_search_policy.py +32 -72
- rasa/core/policies/intentless_policy.py +34 -72
- rasa/dialogue_understanding/coexistence/llm_based_router.py +18 -33
- rasa/dialogue_understanding/generator/constants.py +0 -2
- rasa/dialogue_understanding/generator/flow_retrieval.py +33 -50
- rasa/dialogue_understanding/generator/llm_based_command_generator.py +12 -40
- rasa/dialogue_understanding/generator/multi_step/multi_step_llm_command_generator.py +18 -20
- rasa/dialogue_understanding/generator/nlu_command_adapter.py +19 -1
- rasa/dialogue_understanding/generator/single_step/single_step_llm_command_generator.py +26 -22
- rasa/dialogue_understanding/patterns/default_flows_for_patterns.yml +9 -0
- rasa/dialogue_understanding/processor/command_processor.py +21 -1
- rasa/e2e_test/e2e_test_case.py +85 -6
- rasa/engine/validation.py +88 -60
- rasa/model_service.py +3 -0
- rasa/nlu/tokenizers/whitespace_tokenizer.py +3 -14
- rasa/server.py +3 -1
- rasa/shared/constants.py +5 -5
- rasa/shared/core/constants.py +1 -1
- rasa/shared/core/domain.py +0 -26
- rasa/shared/core/flows/flows_list.py +5 -1
- rasa/shared/providers/_configs/litellm_router_client_config.py +29 -9
- rasa/shared/providers/embedding/_base_litellm_embedding_client.py +6 -14
- rasa/shared/providers/embedding/litellm_router_embedding_client.py +1 -1
- rasa/shared/providers/llm/_base_litellm_client.py +32 -1
- rasa/shared/providers/llm/litellm_router_llm_client.py +56 -1
- rasa/shared/providers/llm/self_hosted_llm_client.py +4 -28
- rasa/shared/providers/router/_base_litellm_router_client.py +35 -1
- rasa/shared/utils/common.py +1 -1
- rasa/shared/utils/health_check/__init__.py +0 -0
- rasa/shared/utils/health_check/embeddings_health_check_mixin.py +31 -0
- rasa/shared/utils/health_check/health_check.py +256 -0
- rasa/shared/utils/health_check/llm_health_check_mixin.py +31 -0
- rasa/shared/utils/llm.py +5 -2
- rasa/shared/utils/yaml.py +102 -62
- rasa/studio/auth.py +3 -5
- rasa/studio/config.py +13 -4
- rasa/studio/constants.py +1 -0
- rasa/studio/data_handler.py +10 -3
- rasa/studio/upload.py +21 -10
- rasa/telemetry.py +15 -1
- rasa/tracing/config.py +3 -1
- rasa/tracing/instrumentation/attribute_extractors.py +20 -0
- rasa/tracing/instrumentation/instrumentation.py +121 -0
- rasa/utils/common.py +5 -0
- rasa/utils/io.py +8 -16
- rasa/utils/sanic_error_handler.py +32 -0
- rasa/version.py +1 -1
- {rasa_pro-3.11.0rc1.dist-info → rasa_pro-3.11.0rc3.dist-info}/METADATA +3 -2
- {rasa_pro-3.11.0rc1.dist-info → rasa_pro-3.11.0rc3.dist-info}/RECORD +65 -61
- rasa/shared/utils/health_check.py +0 -533
- {rasa_pro-3.11.0rc1.dist-info → rasa_pro-3.11.0rc3.dist-info}/NOTICE +0 -0
- {rasa_pro-3.11.0rc1.dist-info → rasa_pro-3.11.0rc3.dist-info}/WHEEL +0 -0
- {rasa_pro-3.11.0rc1.dist-info → rasa_pro-3.11.0rc3.dist-info}/entry_points.txt +0 -0
rasa/shared/utils/llm.py
CHANGED
|
@@ -690,14 +690,16 @@ def resolve_model_client_config(
|
|
|
690
690
|
) -> Optional[Dict[str, Any]]:
|
|
691
691
|
"""Resolve the model group in the model config.
|
|
692
692
|
|
|
693
|
-
If the config is pointing to a model group, the corresponding model group
|
|
693
|
+
1. If the config is pointing to a model group, the corresponding model group
|
|
694
694
|
of the endpoints.yml is returned.
|
|
695
|
-
If the config is using the old syntax, e.g. defining the llm
|
|
695
|
+
2. If the config is using the old syntax, e.g. defining the llm
|
|
696
696
|
directly in config.yml, the config is returned as is.
|
|
697
|
+
3. If the config is already resolved, return it as is.
|
|
697
698
|
|
|
698
699
|
Args:
|
|
699
700
|
model_config: The model config to be resolved.
|
|
700
701
|
component_name: The name of the component.
|
|
702
|
+
component_name: The method of the component.
|
|
701
703
|
|
|
702
704
|
Returns:
|
|
703
705
|
The resolved llm config.
|
|
@@ -718,6 +720,7 @@ def resolve_model_client_config(
|
|
|
718
720
|
if model_config is None:
|
|
719
721
|
return None
|
|
720
722
|
|
|
723
|
+
# Config is already resolved or defines a client without model groups
|
|
721
724
|
if MODEL_GROUP_CONFIG_KEY not in model_config:
|
|
722
725
|
return model_config
|
|
723
726
|
|
rasa/shared/utils/yaml.py
CHANGED
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import datetime
|
|
2
|
+
import io
|
|
2
3
|
import logging
|
|
3
4
|
import os
|
|
4
5
|
import re
|
|
@@ -8,19 +9,13 @@ from dataclasses import field
|
|
|
8
9
|
from functools import lru_cache
|
|
9
10
|
from io import StringIO
|
|
10
11
|
from pathlib import Path
|
|
11
|
-
from typing import
|
|
12
|
+
from typing import Any, List, Optional, Tuple, Dict, Callable, Union
|
|
12
13
|
|
|
13
14
|
import jsonschema
|
|
14
15
|
from importlib_resources import files
|
|
15
16
|
from packaging import version
|
|
16
17
|
from pykwalify.core import Core
|
|
17
18
|
from pykwalify.errors import SchemaError
|
|
18
|
-
from ruamel import yaml as yaml
|
|
19
|
-
from ruamel.yaml import RoundTripRepresenter, YAMLError
|
|
20
|
-
from ruamel.yaml.comments import CommentedSeq, CommentedMap
|
|
21
|
-
from ruamel.yaml.constructor import DuplicateKeyError, BaseConstructor, ScalarNode
|
|
22
|
-
from ruamel.yaml.loader import SafeLoader
|
|
23
|
-
|
|
24
19
|
from rasa.shared.constants import (
|
|
25
20
|
ASSERTIONS_SCHEMA_EXTENSIONS_FILE,
|
|
26
21
|
ASSERTIONS_SCHEMA_FILE,
|
|
@@ -51,6 +46,11 @@ from rasa.shared.utils.io import (
|
|
|
51
46
|
raise_warning,
|
|
52
47
|
read_json_file,
|
|
53
48
|
)
|
|
49
|
+
from ruamel import yaml as yaml
|
|
50
|
+
from ruamel.yaml import YAML, RoundTripRepresenter, YAMLError
|
|
51
|
+
from ruamel.yaml.comments import CommentedSeq, CommentedMap
|
|
52
|
+
from ruamel.yaml.constructor import DuplicateKeyError, BaseConstructor, ScalarNode
|
|
53
|
+
from ruamel.yaml.loader import SafeLoader
|
|
54
54
|
|
|
55
55
|
logger = logging.getLogger(__name__)
|
|
56
56
|
|
|
@@ -64,8 +64,17 @@ SENSITIVE_DATA = [API_KEY]
|
|
|
64
64
|
|
|
65
65
|
@dataclass
|
|
66
66
|
class PathWithError:
|
|
67
|
+
"""Represents a validation error at a specific location in the YAML content.
|
|
68
|
+
|
|
69
|
+
Attributes:
|
|
70
|
+
message (str): A description of the validation error.
|
|
71
|
+
path (List[str]): Path to the node where the error occurred.
|
|
72
|
+
key (Optional[str]): The specific key associated with the error, if any.
|
|
73
|
+
"""
|
|
74
|
+
|
|
67
75
|
message: str
|
|
68
76
|
path: List[str] = field(default_factory=list)
|
|
77
|
+
key: Optional[str] = None
|
|
69
78
|
|
|
70
79
|
|
|
71
80
|
def fix_yaml_loader() -> None:
|
|
@@ -146,21 +155,72 @@ class YamlValidationException(YamlException, ValueError):
|
|
|
146
155
|
if self.validation_errors:
|
|
147
156
|
unique_errors = {}
|
|
148
157
|
for error in self.validation_errors:
|
|
149
|
-
line_number = self._line_number_for_path(
|
|
158
|
+
line_number = self._line_number_for_path(
|
|
159
|
+
self.content, error.path, error.key
|
|
160
|
+
)
|
|
150
161
|
|
|
151
162
|
if line_number and self.filename:
|
|
152
|
-
|
|
163
|
+
error_location = f" in {self.filename}:{line_number}:\n"
|
|
153
164
|
elif line_number:
|
|
154
|
-
|
|
165
|
+
error_location = f" in Line {line_number}:\n"
|
|
155
166
|
else:
|
|
156
|
-
|
|
167
|
+
error_location = ""
|
|
157
168
|
|
|
158
|
-
|
|
159
|
-
|
|
169
|
+
code_snippet = self._get_code_snippet(line_number)
|
|
170
|
+
error_message = f"{error_location}\n{code_snippet}{error.message}\n"
|
|
171
|
+
unique_errors[error.message] = error_message
|
|
160
172
|
error_msg = "\n".join(unique_errors.values())
|
|
161
173
|
msg += f":\n{error_msg}"
|
|
162
174
|
return msg
|
|
163
175
|
|
|
176
|
+
def _get_code_snippet(
|
|
177
|
+
self,
|
|
178
|
+
error_line: Optional[int],
|
|
179
|
+
context_lines: int = 2,
|
|
180
|
+
) -> str:
|
|
181
|
+
"""Extract code snippet from the YAML lines around the error.
|
|
182
|
+
|
|
183
|
+
Args:
|
|
184
|
+
error_line: Line number where the error occurred (1-based).
|
|
185
|
+
context_lines: Number of context lines before and after the error line.
|
|
186
|
+
Default is 2, balancing context and readability. Adjust as needed.
|
|
187
|
+
|
|
188
|
+
Returns:
|
|
189
|
+
A string containing the code snippet with the error highlighted.
|
|
190
|
+
"""
|
|
191
|
+
yaml_lines = self._get_serialized_yaml_lines()
|
|
192
|
+
if not yaml_lines or error_line is None:
|
|
193
|
+
return ""
|
|
194
|
+
|
|
195
|
+
start = max(error_line - context_lines - 1, 0)
|
|
196
|
+
end = min(error_line + context_lines, len(yaml_lines))
|
|
197
|
+
snippet_lines = yaml_lines[start:end]
|
|
198
|
+
snippet = ""
|
|
199
|
+
for idx, line_content in enumerate(snippet_lines, start=start + 1):
|
|
200
|
+
prefix = ">>> " if idx == error_line else " "
|
|
201
|
+
line_number_str = str(idx)
|
|
202
|
+
snippet += f"{prefix}{line_number_str} | {line_content}\n"
|
|
203
|
+
return snippet
|
|
204
|
+
|
|
205
|
+
def _get_serialized_yaml_lines(self) -> List[str]:
|
|
206
|
+
"""Serialize the content back to YAML and return the lines."""
|
|
207
|
+
yaml_lines = []
|
|
208
|
+
try:
|
|
209
|
+
yaml = YAML()
|
|
210
|
+
yaml.default_flow_style = False
|
|
211
|
+
# Set width to 1000, so we don't break the lines of the original YAML file
|
|
212
|
+
yaml.width = 1000 # type: ignore[assignment]
|
|
213
|
+
yaml.indent(mapping=2, sequence=4, offset=2)
|
|
214
|
+
stream = io.StringIO()
|
|
215
|
+
yaml.dump(self.content, stream)
|
|
216
|
+
serialized_yaml = stream.getvalue()
|
|
217
|
+
yaml_lines = serialized_yaml.splitlines()
|
|
218
|
+
return yaml_lines
|
|
219
|
+
except Exception as exc:
|
|
220
|
+
logger.debug(f"Error serializing YAML content: {exc}")
|
|
221
|
+
|
|
222
|
+
return yaml_lines
|
|
223
|
+
|
|
164
224
|
def _calculate_number_of_lines(
|
|
165
225
|
self,
|
|
166
226
|
current: Union[CommentedSeq, CommentedMap],
|
|
@@ -228,7 +288,9 @@ class YamlValidationException(YamlException, ValueError):
|
|
|
228
288
|
# Return the calculated child offset and True indicating a line number was found
|
|
229
289
|
return child_offset, True
|
|
230
290
|
|
|
231
|
-
def _line_number_for_path(
|
|
291
|
+
def _line_number_for_path(
|
|
292
|
+
self, current: Any, path: List[str], key: Optional[str] = None
|
|
293
|
+
) -> Optional[int]:
|
|
232
294
|
"""Get line number for a yaml path in the current content.
|
|
233
295
|
|
|
234
296
|
Implemented using recursion: algorithm goes down the path navigating to the
|
|
@@ -237,6 +299,7 @@ class YamlValidationException(YamlException, ValueError):
|
|
|
237
299
|
Args:
|
|
238
300
|
current: current content
|
|
239
301
|
path: path to traverse within the content
|
|
302
|
+
key: the key associated with the error, if any
|
|
240
303
|
|
|
241
304
|
Returns:
|
|
242
305
|
the line number of the path in the content.
|
|
@@ -247,6 +310,10 @@ class YamlValidationException(YamlException, ValueError):
|
|
|
247
310
|
this_line = current.lc.line + 1 if hasattr(current, "lc") else None
|
|
248
311
|
|
|
249
312
|
if not path:
|
|
313
|
+
if key and hasattr(current, "lc"):
|
|
314
|
+
if hasattr(current.lc, "data") and key in current.lc.data:
|
|
315
|
+
key_line_no = current.lc.data[key][0] + 1
|
|
316
|
+
return key_line_no
|
|
250
317
|
return this_line
|
|
251
318
|
|
|
252
319
|
head, tail = path[0], path[1:]
|
|
@@ -256,7 +323,7 @@ class YamlValidationException(YamlException, ValueError):
|
|
|
256
323
|
|
|
257
324
|
if head:
|
|
258
325
|
if isinstance(current, dict) and head in current:
|
|
259
|
-
line = self._line_number_for_path(current[head], tail)
|
|
326
|
+
line = self._line_number_for_path(current[head], tail, key)
|
|
260
327
|
if line is None:
|
|
261
328
|
line_offset, found_lc = self._calculate_number_of_lines(
|
|
262
329
|
current, head
|
|
@@ -266,10 +333,13 @@ class YamlValidationException(YamlException, ValueError):
|
|
|
266
333
|
return this_line + line_offset
|
|
267
334
|
return line
|
|
268
335
|
elif isinstance(current, list) and head.isdigit():
|
|
269
|
-
return
|
|
336
|
+
return (
|
|
337
|
+
self._line_number_for_path(current[int(head)], tail, key)
|
|
338
|
+
or this_line
|
|
339
|
+
)
|
|
270
340
|
else:
|
|
271
341
|
return this_line
|
|
272
|
-
return self._line_number_for_path(current, tail) or this_line
|
|
342
|
+
return self._line_number_for_path(current, tail, key) or this_line
|
|
273
343
|
|
|
274
344
|
|
|
275
345
|
def read_schema_file(
|
|
@@ -331,13 +401,26 @@ def validate_yaml_content_using_schema(
|
|
|
331
401
|
try:
|
|
332
402
|
core.validate(raise_exception=True)
|
|
333
403
|
except SchemaError:
|
|
404
|
+
# PyKwalify propagates each validation error up the data hierarchy, resulting
|
|
405
|
+
# in multiple redundant errors for a single issue. To present a clear message
|
|
406
|
+
# about the root cause, we use only the first error.
|
|
407
|
+
error = core.errors[0]
|
|
408
|
+
|
|
409
|
+
# Increment numeric indices by 1 to convert from 0-based to 1-based indexing
|
|
410
|
+
error_message = re.sub(
|
|
411
|
+
r"(/)(\d+)", lambda m: f"/{int(m.group(2)) + 1}", str(error)
|
|
412
|
+
)
|
|
413
|
+
|
|
334
414
|
raise YamlValidationException(
|
|
335
415
|
"Please make sure the file is correct and all "
|
|
336
416
|
"mandatory parameters are specified. Here are the errors "
|
|
337
417
|
"found during validation",
|
|
338
418
|
[
|
|
339
|
-
PathWithError(
|
|
340
|
-
|
|
419
|
+
PathWithError(
|
|
420
|
+
message=error_message,
|
|
421
|
+
path=error.path.removeprefix("/").split("/"),
|
|
422
|
+
key=getattr(error, "key", None),
|
|
423
|
+
)
|
|
341
424
|
],
|
|
342
425
|
content=yaml_content,
|
|
343
426
|
)
|
|
@@ -424,46 +507,6 @@ def validate_raw_yaml_using_schema_file_with_responses(
|
|
|
424
507
|
)
|
|
425
508
|
|
|
426
509
|
|
|
427
|
-
def process_content(content: str) -> str:
|
|
428
|
-
"""Process the content to handle both Windows paths and emojis.
|
|
429
|
-
Windows paths are processed by escaping backslashes but emojis are left untouched.
|
|
430
|
-
|
|
431
|
-
Args:
|
|
432
|
-
content: yaml content to be processed
|
|
433
|
-
"""
|
|
434
|
-
# Detect common Windows path patterns: e.g., C:\ or \\
|
|
435
|
-
UNESCAPED_WINDOWS_PATH_PATTERN = re.compile(
|
|
436
|
-
r"(?<!\w)[a-zA-Z]:(\\[a-zA-Z0-9_ -]+)*(\\)?(?!\\n)"
|
|
437
|
-
)
|
|
438
|
-
ESCAPED_WINDOWS_PATH_PATTERN = re.compile(
|
|
439
|
-
r"(?<!\w)[a-zA-Z]:(\\\\[a-zA-Z0-9_ -]+)+\\\\?(?!\\n)"
|
|
440
|
-
)
|
|
441
|
-
|
|
442
|
-
# Function to escape backslashes in Windows paths but leave other content as is
|
|
443
|
-
def escape_windows_paths(match: re.Match) -> str:
|
|
444
|
-
path = str(match.group(0))
|
|
445
|
-
return path.replace("\\", "\\\\") # Escape backslashes only in Windows paths
|
|
446
|
-
|
|
447
|
-
def unescape_windows_paths(match: re.Match) -> str:
|
|
448
|
-
path = str(match.group(0))
|
|
449
|
-
return path.replace("\\\\", "\\")
|
|
450
|
-
|
|
451
|
-
# First, process Windows paths by escaping backslashes
|
|
452
|
-
content = re.sub(UNESCAPED_WINDOWS_PATH_PATTERN, escape_windows_paths, content)
|
|
453
|
-
|
|
454
|
-
# Ensure proper handling of emojis by decoding Unicode sequences
|
|
455
|
-
content = (
|
|
456
|
-
content.encode("utf-8")
|
|
457
|
-
.decode("raw_unicode_escape")
|
|
458
|
-
.encode("utf-16", "surrogatepass")
|
|
459
|
-
.decode("utf-16")
|
|
460
|
-
)
|
|
461
|
-
|
|
462
|
-
content = re.sub(ESCAPED_WINDOWS_PATH_PATTERN, unescape_windows_paths, content)
|
|
463
|
-
|
|
464
|
-
return content
|
|
465
|
-
|
|
466
|
-
|
|
467
510
|
def read_yaml(
|
|
468
511
|
content: str,
|
|
469
512
|
reader_type: Union[str, List[str]] = "safe",
|
|
@@ -479,9 +522,6 @@ def read_yaml(
|
|
|
479
522
|
Raises:
|
|
480
523
|
ruamel.yaml.parser.ParserError: If there was an error when parsing the YAML.
|
|
481
524
|
"""
|
|
482
|
-
if _is_ascii(content):
|
|
483
|
-
content = process_content(content)
|
|
484
|
-
|
|
485
525
|
custom_constructor = kwargs.get("custom_constructor", None)
|
|
486
526
|
|
|
487
527
|
# Create YAML parser with custom constructor
|
rasa/studio/auth.py
CHANGED
|
@@ -23,12 +23,10 @@ from rasa.studio.results_logger import with_studio_error_handler, StudioResult
|
|
|
23
23
|
class StudioAuth:
|
|
24
24
|
"""Handles the authentication with the Rasa Studio authentication server."""
|
|
25
25
|
|
|
26
|
-
def __init__(
|
|
27
|
-
self,
|
|
28
|
-
studio_config: StudioConfig,
|
|
29
|
-
verify: bool = True,
|
|
30
|
-
) -> None:
|
|
26
|
+
def __init__(self, studio_config: StudioConfig) -> None:
|
|
31
27
|
self.config = studio_config
|
|
28
|
+
verify = not studio_config.disable_verify
|
|
29
|
+
|
|
32
30
|
self.keycloak_openid = KeycloakOpenID(
|
|
33
31
|
server_url=studio_config.authentication_server_url,
|
|
34
32
|
client_id=studio_config.client_id,
|
rasa/studio/config.py
CHANGED
|
@@ -2,13 +2,14 @@ from __future__ import annotations
|
|
|
2
2
|
|
|
3
3
|
import os
|
|
4
4
|
from dataclasses import dataclass
|
|
5
|
-
from typing import Dict, Optional, Text
|
|
5
|
+
from typing import Any, Dict, Optional, Text
|
|
6
6
|
|
|
7
7
|
from rasa.utils.common import read_global_config_value, write_global_config_value
|
|
8
8
|
|
|
9
9
|
from rasa.studio.constants import (
|
|
10
10
|
RASA_STUDIO_AUTH_SERVER_URL_ENV,
|
|
11
11
|
RASA_STUDIO_CLI_CLIENT_ID_KEY_ENV,
|
|
12
|
+
RASA_STUDIO_CLI_DISABLE_VERIFY_KEY_ENV,
|
|
12
13
|
RASA_STUDIO_CLI_REALM_NAME_KEY_ENV,
|
|
13
14
|
RASA_STUDIO_CLI_STUDIO_URL_ENV,
|
|
14
15
|
STUDIO_CONFIG_KEY,
|
|
@@ -19,6 +20,7 @@ STUDIO_URL_KEY = "studio_url"
|
|
|
19
20
|
CLIENT_ID_KEY = "client_id"
|
|
20
21
|
REALM_NAME_KEY = "realm_name"
|
|
21
22
|
CLIENT_SECRET_KEY = "client_secret"
|
|
23
|
+
DISABLE_VERIFY = "disable_verify"
|
|
22
24
|
|
|
23
25
|
|
|
24
26
|
@dataclass
|
|
@@ -27,13 +29,15 @@ class StudioConfig:
|
|
|
27
29
|
studio_url: Optional[Text]
|
|
28
30
|
client_id: Optional[Text]
|
|
29
31
|
realm_name: Optional[Text]
|
|
32
|
+
disable_verify: bool = False
|
|
30
33
|
|
|
31
|
-
def to_dict(self) -> Dict[Text, Optional[
|
|
34
|
+
def to_dict(self) -> Dict[Text, Optional[Any]]:
|
|
32
35
|
return {
|
|
33
36
|
AUTH_SERVER_URL_KEY: self.authentication_server_url,
|
|
34
37
|
STUDIO_URL_KEY: self.studio_url,
|
|
35
38
|
CLIENT_ID_KEY: self.client_id,
|
|
36
39
|
REALM_NAME_KEY: self.realm_name,
|
|
40
|
+
DISABLE_VERIFY: self.disable_verify,
|
|
37
41
|
}
|
|
38
42
|
|
|
39
43
|
@classmethod
|
|
@@ -43,6 +47,7 @@ class StudioConfig:
|
|
|
43
47
|
studio_url=data[STUDIO_URL_KEY],
|
|
44
48
|
client_id=data[CLIENT_ID_KEY],
|
|
45
49
|
realm_name=data[REALM_NAME_KEY],
|
|
50
|
+
disable_verify=data.get(DISABLE_VERIFY, False),
|
|
46
51
|
)
|
|
47
52
|
|
|
48
53
|
def write_config(self) -> None:
|
|
@@ -73,7 +78,7 @@ class StudioConfig:
|
|
|
73
78
|
config = read_global_config_value(STUDIO_CONFIG_KEY, unavailable_ok=True)
|
|
74
79
|
|
|
75
80
|
if config is None:
|
|
76
|
-
return StudioConfig(None, None, None, None)
|
|
81
|
+
return StudioConfig(None, None, None, None, False)
|
|
77
82
|
|
|
78
83
|
if not isinstance(config, dict):
|
|
79
84
|
raise ValueError(
|
|
@@ -83,7 +88,7 @@ class StudioConfig:
|
|
|
83
88
|
)
|
|
84
89
|
|
|
85
90
|
for key in config:
|
|
86
|
-
if not isinstance(config[key], str):
|
|
91
|
+
if not isinstance(config[key], str) and key != DISABLE_VERIFY:
|
|
87
92
|
raise ValueError(
|
|
88
93
|
"Invalid config file format. "
|
|
89
94
|
f"Key '{key}' is not a text value."
|
|
@@ -102,6 +107,9 @@ class StudioConfig:
|
|
|
102
107
|
studio_url=StudioConfig._read_env_value(RASA_STUDIO_CLI_STUDIO_URL_ENV),
|
|
103
108
|
client_id=StudioConfig._read_env_value(RASA_STUDIO_CLI_CLIENT_ID_KEY_ENV),
|
|
104
109
|
realm_name=StudioConfig._read_env_value(RASA_STUDIO_CLI_REALM_NAME_KEY_ENV),
|
|
110
|
+
disable_verify=bool(
|
|
111
|
+
os.getenv(RASA_STUDIO_CLI_DISABLE_VERIFY_KEY_ENV, False)
|
|
112
|
+
),
|
|
105
113
|
)
|
|
106
114
|
|
|
107
115
|
@staticmethod
|
|
@@ -124,4 +132,5 @@ class StudioConfig:
|
|
|
124
132
|
studio_url=self.studio_url or other.studio_url,
|
|
125
133
|
client_id=self.client_id or other.client_id,
|
|
126
134
|
realm_name=self.realm_name or other.realm_name,
|
|
135
|
+
disable_verify=self.disable_verify or other.disable_verify,
|
|
127
136
|
)
|
rasa/studio/constants.py
CHANGED
|
@@ -10,6 +10,7 @@ RASA_STUDIO_AUTH_SERVER_URL_ENV = "RASA_STUDIO_AUTH_SERVER_URL"
|
|
|
10
10
|
RASA_STUDIO_CLI_STUDIO_URL_ENV = "RASA_STUDIO_CLI_STUDIO_URL"
|
|
11
11
|
RASA_STUDIO_CLI_REALM_NAME_KEY_ENV = "RASA_STUDIO_CLI_REALM_NAME_KEY"
|
|
12
12
|
RASA_STUDIO_CLI_CLIENT_ID_KEY_ENV = "RASA_STUDIO_CLI_CLIENT_ID_KEY"
|
|
13
|
+
RASA_STUDIO_CLI_DISABLE_VERIFY_KEY_ENV = "RASA_STUDIO_CLI_DISABLE_VERIFY_KEY"
|
|
13
14
|
|
|
14
15
|
STUDIO_NLU_FILENAME = "studio_nlu.yml"
|
|
15
16
|
STUDIO_DOMAIN_FILENAME = "studio_domain.yml"
|
rasa/studio/data_handler.py
CHANGED
|
@@ -76,7 +76,9 @@ class StudioDataHandler:
|
|
|
76
76
|
|
|
77
77
|
return request
|
|
78
78
|
|
|
79
|
-
def _make_request(
|
|
79
|
+
def _make_request(
|
|
80
|
+
self, GQL_req: Dict[Any, Any], verify: bool = True
|
|
81
|
+
) -> Dict[Any, Any]:
|
|
80
82
|
token = KeycloakTokenReader().get_token()
|
|
81
83
|
if token.is_expired():
|
|
82
84
|
token = self.refresh_token(token)
|
|
@@ -93,6 +95,7 @@ class StudioDataHandler:
|
|
|
93
95
|
"Authorization": f"{token.token_type} {token.access_token}",
|
|
94
96
|
"Content-Type": "application/json",
|
|
95
97
|
},
|
|
98
|
+
verify=verify,
|
|
96
99
|
)
|
|
97
100
|
|
|
98
101
|
if res.status_code != 200:
|
|
@@ -128,7 +131,9 @@ class StudioDataHandler:
|
|
|
128
131
|
The data from Rasa Studio.
|
|
129
132
|
"""
|
|
130
133
|
GQL_req = self._build_request()
|
|
131
|
-
|
|
134
|
+
verify = not self.studio_config.disable_verify
|
|
135
|
+
|
|
136
|
+
response = self._make_request(GQL_req, verify=verify)
|
|
132
137
|
self._extract_data(response)
|
|
133
138
|
|
|
134
139
|
def request_data(
|
|
@@ -145,7 +150,9 @@ class StudioDataHandler:
|
|
|
145
150
|
The data from Rasa Studio.
|
|
146
151
|
"""
|
|
147
152
|
GQL_req = self._build_request(intent_names, entity_names)
|
|
148
|
-
|
|
153
|
+
verify = not self.studio_config.disable_verify
|
|
154
|
+
|
|
155
|
+
response = self._make_request(GQL_req, verify=verify)
|
|
149
156
|
self._extract_data(response)
|
|
150
157
|
|
|
151
158
|
def get_config(self) -> Optional[str]:
|
rasa/studio/upload.py
CHANGED
|
@@ -56,14 +56,17 @@ def _get_selected_entities_and_intents(
|
|
|
56
56
|
|
|
57
57
|
def handle_upload(args: argparse.Namespace) -> None:
|
|
58
58
|
"""Uploads primitives to rasa studio."""
|
|
59
|
-
|
|
59
|
+
studio_config = StudioConfig.read_config()
|
|
60
|
+
endpoint = studio_config.studio_url
|
|
61
|
+
verify = not studio_config.disable_verify
|
|
62
|
+
|
|
60
63
|
if not endpoint:
|
|
61
64
|
rasa.shared.utils.cli.print_error_and_exit(
|
|
62
65
|
"No GraphQL endpoint found in config. Please run `rasa studio config`."
|
|
63
66
|
)
|
|
64
67
|
return
|
|
65
68
|
|
|
66
|
-
if not is_auth_working(endpoint):
|
|
69
|
+
if not is_auth_working(endpoint, verify):
|
|
67
70
|
rasa.shared.utils.cli.print_error_and_exit(
|
|
68
71
|
"Authentication is invalid or expired. Please run `rasa studio login`."
|
|
69
72
|
)
|
|
@@ -81,9 +84,9 @@ def handle_upload(args: argparse.Namespace) -> None:
|
|
|
81
84
|
|
|
82
85
|
# check safely if args.calm is set and not fail if not
|
|
83
86
|
if hasattr(args, "calm") and args.calm:
|
|
84
|
-
upload_calm_assistant(args, endpoint)
|
|
87
|
+
upload_calm_assistant(args, endpoint, verify=verify)
|
|
85
88
|
else:
|
|
86
|
-
upload_nlu_assistant(args, endpoint)
|
|
89
|
+
upload_nlu_assistant(args, endpoint, verify=verify)
|
|
87
90
|
|
|
88
91
|
|
|
89
92
|
config_keys = [
|
|
@@ -135,7 +138,9 @@ def _get_assistant_name(config: Dict[Text, Any]) -> str:
|
|
|
135
138
|
|
|
136
139
|
|
|
137
140
|
@with_studio_error_handler
|
|
138
|
-
def upload_calm_assistant(
|
|
141
|
+
def upload_calm_assistant(
|
|
142
|
+
args: argparse.Namespace, endpoint: str, verify: bool = True
|
|
143
|
+
) -> StudioResult:
|
|
139
144
|
"""Uploads the CALM assistant data to Rasa Studio.
|
|
140
145
|
|
|
141
146
|
Args:
|
|
@@ -227,11 +232,13 @@ def upload_calm_assistant(args: argparse.Namespace, endpoint: str) -> StudioResu
|
|
|
227
232
|
structlogger.info(
|
|
228
233
|
"rasa.studio.upload.calm", event_info="Uploading to Rasa Studio..."
|
|
229
234
|
)
|
|
230
|
-
return make_request(endpoint, graphql_req)
|
|
235
|
+
return make_request(endpoint, graphql_req, verify)
|
|
231
236
|
|
|
232
237
|
|
|
233
238
|
@with_studio_error_handler
|
|
234
|
-
def upload_nlu_assistant(
|
|
239
|
+
def upload_nlu_assistant(
|
|
240
|
+
args: argparse.Namespace, endpoint: str, verify: bool = True
|
|
241
|
+
) -> StudioResult:
|
|
235
242
|
"""Uploads the classic (dm1) assistant data to Rasa Studio.
|
|
236
243
|
|
|
237
244
|
Args:
|
|
@@ -241,6 +248,7 @@ def upload_nlu_assistant(args: argparse.Namespace, endpoint: str) -> StudioResul
|
|
|
241
248
|
- intents: The intents to upload
|
|
242
249
|
- entities: The entities to upload
|
|
243
250
|
endpoint: The studio endpoint
|
|
251
|
+
verify: Whether to verify SSL
|
|
244
252
|
Returns:
|
|
245
253
|
None
|
|
246
254
|
"""
|
|
@@ -286,10 +294,10 @@ def upload_nlu_assistant(args: argparse.Namespace, endpoint: str) -> StudioResul
|
|
|
286
294
|
structlogger.info(
|
|
287
295
|
"rasa.studio.upload.nlu", event_info="Uploading to Rasa Studio..."
|
|
288
296
|
)
|
|
289
|
-
return make_request(endpoint, graphql_req)
|
|
297
|
+
return make_request(endpoint, graphql_req, verify)
|
|
290
298
|
|
|
291
299
|
|
|
292
|
-
def is_auth_working(endpoint: str) -> bool:
|
|
300
|
+
def is_auth_working(endpoint: str, verify: bool = True) -> bool:
|
|
293
301
|
"""Send a test request to Studio to check if auth is working."""
|
|
294
302
|
result = make_request(
|
|
295
303
|
endpoint,
|
|
@@ -306,16 +314,18 @@ def is_auth_working(endpoint: str) -> bool:
|
|
|
306
314
|
),
|
|
307
315
|
"variables": {},
|
|
308
316
|
},
|
|
317
|
+
verify,
|
|
309
318
|
)
|
|
310
319
|
return result.was_successful
|
|
311
320
|
|
|
312
321
|
|
|
313
|
-
def make_request(endpoint: str, graphql_req: Dict) -> StudioResult:
|
|
322
|
+
def make_request(endpoint: str, graphql_req: Dict, verify: bool = True) -> StudioResult:
|
|
314
323
|
"""Makes a request to the studio endpoint to upload data.
|
|
315
324
|
|
|
316
325
|
Args:
|
|
317
326
|
endpoint: The studio endpoint
|
|
318
327
|
graphql_req: The graphql request
|
|
328
|
+
verify: Whether to verify SSL
|
|
319
329
|
"""
|
|
320
330
|
token = KeycloakTokenReader().get_token()
|
|
321
331
|
res = requests.post(
|
|
@@ -325,6 +335,7 @@ def make_request(endpoint: str, graphql_req: Dict) -> StudioResult:
|
|
|
325
335
|
"Authorization": f"{token.token_type} {token.access_token}",
|
|
326
336
|
"Content-Type": "application/json",
|
|
327
337
|
},
|
|
338
|
+
verify=verify,
|
|
328
339
|
)
|
|
329
340
|
|
|
330
341
|
if results_logger.response_has_errors(res.json()):
|
rasa/telemetry.py
CHANGED
|
@@ -35,9 +35,9 @@ from rasa.constants import (
|
|
|
35
35
|
from rasa.shared.constants import (
|
|
36
36
|
PROMPT_CONFIG_KEY,
|
|
37
37
|
PROMPT_TEMPLATE_CONFIG_KEY,
|
|
38
|
-
MODEL_GROUP_CONFIG_KEY,
|
|
39
38
|
LLM_API_HEALTH_CHECK_ENV_VAR,
|
|
40
39
|
LLM_API_HEALTH_CHECK_DEFAULT_VALUE,
|
|
40
|
+
MODEL_GROUP_CONFIG_KEY,
|
|
41
41
|
)
|
|
42
42
|
from rasa.engine.storage.local_model_storage import LocalModelStorage
|
|
43
43
|
from rasa.shared.constants import DOCS_URL_TELEMETRY, UTTER_ASK_PREFIX
|
|
@@ -112,6 +112,7 @@ TELEMETRY_INTERACTIVE_LEARNING_STARTED_EVENT = "Interactive Learning Started"
|
|
|
112
112
|
TELEMETRY_SERVER_STARTED_EVENT = "Server Started"
|
|
113
113
|
TELEMETRY_PROJECT_CREATED_EVENT = "Project Created"
|
|
114
114
|
TELEMETRY_SHELL_STARTED_EVENT = "Shell Started"
|
|
115
|
+
TELEMETRY_INSPECT_STARTED_EVENT = "Inspect Started"
|
|
115
116
|
TELEMETRY_VISUALIZATION_STARTED_EVENT = "Story Visualization Started"
|
|
116
117
|
TELEMETRY_TEST_CORE_EVENT = "Model Core Tested"
|
|
117
118
|
TELEMETRY_TEST_NLU_EVENT = "Model NLU Tested"
|
|
@@ -1132,6 +1133,7 @@ def _get_llm_command_generator_config(config: Dict[str, Any]) -> Optional[Dict]:
|
|
|
1132
1133
|
def extract_llm_command_generator_llm_client_settings(component: Dict) -> Dict:
|
|
1133
1134
|
"""Extracts settings related to LLM command generator."""
|
|
1134
1135
|
llm_config = component.get(LLM_CONFIG_KEY, {})
|
|
1136
|
+
# Config at this stage is not yet resolved, so read from `model_group`
|
|
1135
1137
|
llm_model_group_id = llm_config.get(MODEL_GROUP_CONFIG_KEY)
|
|
1136
1138
|
llm_model_name = llm_config.get(MODEL_CONFIG_KEY) or llm_config.get(
|
|
1137
1139
|
MODEL_NAME_CONFIG_KEY
|
|
@@ -1173,6 +1175,7 @@ def _get_llm_command_generator_config(config: Dict[str, Any]) -> Optional[Dict]:
|
|
|
1173
1175
|
if flow_retrieval_enabled
|
|
1174
1176
|
else None
|
|
1175
1177
|
)
|
|
1178
|
+
# Config at this stage is not yet resolved, so read from `model_group`
|
|
1176
1179
|
flow_retrieval_embedding_model_group_id = embeddings_config.get(
|
|
1177
1180
|
MODEL_GROUP_CONFIG_KEY
|
|
1178
1181
|
)
|
|
@@ -1378,6 +1381,17 @@ def track_shell_started(model_type: Text) -> None:
|
|
|
1378
1381
|
_track(TELEMETRY_SHELL_STARTED_EVENT, {"type": model_type})
|
|
1379
1382
|
|
|
1380
1383
|
|
|
1384
|
+
@ensure_telemetry_enabled
|
|
1385
|
+
def track_inspect_started(model_type: Text) -> None:
|
|
1386
|
+
"""Track when a user starts a bot using rasa inspect.
|
|
1387
|
+
|
|
1388
|
+
Args:
|
|
1389
|
+
channel: Channel name `socketio` (used for chat assistants)
|
|
1390
|
+
or `browser_audio` (used for voice).
|
|
1391
|
+
"""
|
|
1392
|
+
_track(TELEMETRY_INSPECT_STARTED_EVENT, {"type": model_type})
|
|
1393
|
+
|
|
1394
|
+
|
|
1381
1395
|
@ensure_telemetry_enabled
|
|
1382
1396
|
def track_visualization() -> None:
|
|
1383
1397
|
"""Track when a user runs the visualization."""
|
rasa/tracing/config.py
CHANGED
|
@@ -33,6 +33,7 @@ from rasa.dialogue_understanding.generator import (
|
|
|
33
33
|
SingleStepLLMCommandGenerator,
|
|
34
34
|
MultiStepLLMCommandGenerator,
|
|
35
35
|
)
|
|
36
|
+
from rasa.dialogue_understanding.generator.flow_retrieval import FlowRetrieval
|
|
36
37
|
from rasa.dialogue_understanding.generator.nlu_command_adapter import NLUCommandAdapter
|
|
37
38
|
from rasa.engine.graph import GraphNode
|
|
38
39
|
from rasa.engine.training.graph_trainer import GraphTrainer
|
|
@@ -111,6 +112,7 @@ def configure_tracing(tracer_provider: Optional[TracerProvider]) -> None:
|
|
|
111
112
|
single_step_llm_command_generator_class=SingleStepLLMCommandGenerator,
|
|
112
113
|
multi_step_llm_command_generator_class=MultiStepLLMCommandGenerator,
|
|
113
114
|
custom_action_executor_subclasses=custom_action_executor_subclasses,
|
|
115
|
+
flow_retrieval_class=FlowRetrieval,
|
|
114
116
|
)
|
|
115
117
|
|
|
116
118
|
|
|
@@ -129,7 +131,7 @@ def get_tracer_provider(endpoints_file: Text) -> Optional[TracerProvider]:
|
|
|
129
131
|
|
|
130
132
|
if not cfg:
|
|
131
133
|
logger.info(
|
|
132
|
-
f"No endpoint for tracing type available in {endpoints_file},"
|
|
134
|
+
f"No endpoint for tracing type available in {endpoints_file}, "
|
|
133
135
|
f"tracing will not be configured."
|
|
134
136
|
)
|
|
135
137
|
return None
|
|
@@ -414,6 +414,26 @@ def extract_attrs_for_generate(
|
|
|
414
414
|
}
|
|
415
415
|
|
|
416
416
|
|
|
417
|
+
def extract_attrs_for_performing_health_check(
|
|
418
|
+
custom_config: Optional[Dict[str, Any]],
|
|
419
|
+
default_config: Dict[str, Any],
|
|
420
|
+
log_source_method: str,
|
|
421
|
+
log_source_component: str,
|
|
422
|
+
) -> Dict[str, Any]:
|
|
423
|
+
from rasa.shared.utils.health_check.health_check import is_api_health_check_enabled
|
|
424
|
+
|
|
425
|
+
attrs = {
|
|
426
|
+
"api_health_check_enabled": is_api_health_check_enabled(),
|
|
427
|
+
"health_check_trigger_component": log_source_component,
|
|
428
|
+
"health_check_trigger_method": log_source_method,
|
|
429
|
+
}
|
|
430
|
+
if is_api_health_check_enabled():
|
|
431
|
+
attrs["config"] = json.dumps(
|
|
432
|
+
combine_custom_and_default_config(custom_config, default_config)
|
|
433
|
+
)
|
|
434
|
+
return attrs
|
|
435
|
+
|
|
436
|
+
|
|
417
437
|
def extract_attrs_for_execute_commands(
|
|
418
438
|
tracker: DialogueStateTracker,
|
|
419
439
|
all_flows: FlowsList,
|