rasa-pro 3.13.0.dev20250613__py3-none-any.whl → 3.13.0rc1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of rasa-pro might be problematic. Click here for more details.
- rasa/cli/e2e_test.py +0 -7
- rasa/cli/export.py +2 -0
- rasa/cli/project_templates/tutorial/config.yml +1 -1
- rasa/cli/project_templates/tutorial/endpoints.yml +1 -1
- rasa/cli/studio/download.py +1 -23
- rasa/cli/studio/link.py +1 -2
- rasa/cli/studio/pull.py +3 -2
- rasa/cli/studio/push.py +1 -1
- rasa/cli/studio/train.py +0 -1
- rasa/core/channels/__init__.py +2 -0
- rasa/core/channels/development_inspector.py +1 -1
- rasa/core/channels/facebook.py +1 -4
- rasa/core/channels/inspector/README.md +3 -3
- rasa/core/channels/inspector/dist/assets/{arc-c4b064fc.js → arc-371401b1.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{blockDiagram-38ab4fdb-215b5026.js → blockDiagram-38ab4fdb-3f126156.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{c4Diagram-3d4e48cf-2b54a0a3.js → c4Diagram-3d4e48cf-12f22eb7.js} +1 -1
- rasa/core/channels/inspector/dist/assets/channel-f1efda17.js +1 -0
- rasa/core/channels/inspector/dist/assets/{classDiagram-70f12bd4-daacea5f.js → classDiagram-70f12bd4-03b1d386.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{classDiagram-v2-f2320105-930d4dc2.js → classDiagram-v2-f2320105-84f69d63.js} +1 -1
- rasa/core/channels/inspector/dist/assets/clone-fdf164e2.js +1 -0
- rasa/core/channels/inspector/dist/assets/{createText-2e5e7dd3-83c206ba.js → createText-2e5e7dd3-ca47fd38.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{edges-e0da2a9e-b0eb01d0.js → edges-e0da2a9e-f837ca8a.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{erDiagram-9861fffd-17586500.js → erDiagram-9861fffd-8717ac54.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{flowDb-956e92f1-be2a1776.js → flowDb-956e92f1-94f38b83.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{flowDiagram-66a62f08-c2120ebd.js → flowDiagram-66a62f08-b616f9fb.js} +1 -1
- rasa/core/channels/inspector/dist/assets/flowDiagram-v2-96b9c2cf-7d7a1629.js +1 -0
- rasa/core/channels/inspector/dist/assets/{flowchart-elk-definition-4a651766-a6ab5c48.js → flowchart-elk-definition-4a651766-f5d24bb8.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{ganttDiagram-c361ad54-ef613457.js → ganttDiagram-c361ad54-b43ba8d9.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{gitGraphDiagram-72cf32ee-d59185b3.js → gitGraphDiagram-72cf32ee-c3aafaa5.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{graph-0f155405.js → graph-0d0a2c10.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{index-3862675e-d5f1d1b7.js → index-3862675e-58ea0305.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{index-47737d3a.js → index-cce6f8a1.js} +3 -3
- rasa/core/channels/inspector/dist/assets/{infoDiagram-f8f76790-b07d141f.js → infoDiagram-f8f76790-b8f60461.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{journeyDiagram-49397b02-1936d429.js → journeyDiagram-49397b02-95be5545.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{layout-dde8d0f3.js → layout-da885b9b.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{line-0c2c7ee0.js → line-f1c817d3.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{linear-35dd89a4.js → linear-d42801e6.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{mindmap-definition-fc14e90a-56192851.js → mindmap-definition-fc14e90a-a38923a6.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{pieDiagram-8a3498a8-fc21ed78.js → pieDiagram-8a3498a8-ca6e71e9.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{quadrantDiagram-120e2f19-25e98518.js → quadrantDiagram-120e2f19-b290dae9.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{requirementDiagram-deff3bca-546ff1f5.js → requirementDiagram-deff3bca-03f02ceb.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{sankeyDiagram-04a897e0-02d8b82d.js → sankeyDiagram-04a897e0-c49eee40.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{sequenceDiagram-704730f1-3ca5a92e.js → sequenceDiagram-704730f1-b2cd6a3d.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{stateDiagram-587899a1-128ea07c.js → stateDiagram-587899a1-e53a2028.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{stateDiagram-v2-d93cdb3a-95f290af.js → stateDiagram-v2-d93cdb3a-e1982a03.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{styles-6aaf32cf-4984898a.js → styles-6aaf32cf-d0226ca5.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{styles-9a916d00-1bf266ba.js → styles-9a916d00-0e21dc00.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{styles-c10674c1-60521c63.js → styles-c10674c1-9588494e.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{svgDrawCommon-08f97a94-a25b6e12.js → svgDrawCommon-08f97a94-be478d4f.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{timeline-definition-85554ec2-0fc086bf.js → timeline-definition-85554ec2-74631749.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{xychartDiagram-e933f94c-44ee592e.js → xychartDiagram-e933f94c-a043552f.js} +1 -1
- rasa/core/channels/inspector/dist/index.html +1 -1
- rasa/core/channels/inspector/src/components/RecruitmentPanel.tsx +1 -1
- rasa/core/channels/socketio.py +56 -41
- rasa/core/channels/studio_chat.py +311 -8
- rasa/core/channels/voice_ready/audiocodes.py +1 -1
- rasa/core/channels/voice_stream/asr/azure.py +9 -0
- rasa/core/channels/voice_stream/audiocodes.py +1 -1
- rasa/core/channels/voice_stream/browser_audio.py +1 -1
- rasa/core/channels/voice_stream/jambonz.py +166 -0
- rasa/core/channels/voice_stream/tts/__init__.py +8 -0
- rasa/core/channels/voice_stream/twilio_media_streams.py +7 -0
- rasa/core/channels/voice_stream/voice_channel.py +14 -5
- rasa/core/exporter.py +36 -0
- rasa/core/information_retrieval/faiss.py +18 -11
- rasa/core/information_retrieval/ingestion/faq_parser.py +158 -0
- rasa/core/nlg/contextual_response_rephraser.py +10 -1
- rasa/core/policies/enterprise_search_policy.py +152 -262
- rasa/core/policies/enterprise_search_policy_config.py +241 -0
- rasa/core/policies/enterprise_search_prompt_with_relevancy_check_and_citation_template.jinja2 +6 -5
- rasa/core/policies/intentless_policy.py +47 -10
- rasa/core/utils.py +11 -2
- rasa/dialogue_understanding/coexistence/llm_based_router.py +9 -18
- rasa/dialogue_understanding/commands/__init__.py +4 -0
- rasa/dialogue_understanding/commands/cancel_flow_command.py +4 -2
- rasa/dialogue_understanding/commands/clarify_command.py +2 -2
- rasa/dialogue_understanding/commands/correct_slots_command.py +5 -6
- rasa/dialogue_understanding/commands/error_command.py +1 -1
- rasa/dialogue_understanding/commands/human_handoff_command.py +1 -3
- rasa/dialogue_understanding/commands/set_slot_command.py +4 -4
- rasa/dialogue_understanding/commands/skip_question_command.py +1 -3
- rasa/dialogue_understanding/commands/start_flow_command.py +3 -3
- rasa/dialogue_understanding/generator/command_generator.py +11 -1
- rasa/dialogue_understanding/generator/nlu_command_adapter.py +2 -2
- rasa/dialogue_understanding/generator/prompt_templates/command_prompt_template.jinja2 +0 -2
- rasa/dialogue_understanding/generator/prompt_templates/command_prompt_v2_claude_3_5_sonnet_20240620_template.jinja2 +1 -0
- rasa/dialogue_understanding/generator/prompt_templates/command_prompt_v2_gpt_4o_2024_11_20_template.jinja2 +1 -0
- rasa/dialogue_understanding/generator/prompt_templates/command_prompt_v3_claude_3_5_sonnet_20240620_template.jinja2 +79 -0
- rasa/dialogue_understanding/generator/prompt_templates/command_prompt_v3_gpt_4o_2024_11_20_template.jinja2 +1 -0
- rasa/dialogue_understanding/generator/single_step/search_ready_llm_command_generator.py +2 -2
- rasa/dialogue_understanding/generator/single_step/single_step_based_llm_command_generator.py +2 -18
- rasa/dialogue_understanding/generator/single_step/single_step_llm_command_generator.py +8 -11
- rasa/dialogue_understanding/patterns/cancel.py +1 -2
- rasa/dialogue_understanding/patterns/clarify.py +1 -1
- rasa/dialogue_understanding/patterns/correction.py +2 -2
- rasa/dialogue_understanding/processor/command_processor.py +8 -9
- rasa/dialogue_understanding/stack/utils.py +3 -1
- rasa/e2e_test/e2e_test_coverage_report.py +1 -1
- rasa/engine/graph.py +2 -2
- rasa/llm_fine_tuning/paraphrasing/conversation_rephraser.py +1 -5
- rasa/shared/constants.py +12 -0
- rasa/shared/core/command_payload_reader.py +1 -5
- rasa/shared/core/events.py +1 -3
- rasa/shared/core/flows/constants.py +2 -0
- rasa/shared/core/flows/flow.py +126 -12
- rasa/shared/core/flows/flows_list.py +18 -1
- rasa/shared/core/flows/steps/link.py +7 -2
- rasa/shared/core/flows/validation.py +25 -5
- rasa/shared/core/training_data/story_reader/yaml_story_reader.py +1 -4
- rasa/shared/providers/_configs/azure_openai_client_config.py +2 -2
- rasa/shared/providers/_configs/default_litellm_client_config.py +1 -1
- rasa/shared/providers/_configs/huggingface_local_embedding_client_config.py +1 -1
- rasa/shared/providers/_configs/openai_client_config.py +1 -1
- rasa/shared/providers/_configs/rasa_llm_client_config.py +1 -1
- rasa/shared/providers/_configs/self_hosted_llm_client_config.py +1 -1
- rasa/shared/providers/_configs/utils.py +0 -99
- rasa/shared/utils/common.py +1 -1
- rasa/shared/utils/configs.py +110 -0
- rasa/shared/utils/constants.py +0 -3
- rasa/shared/utils/llm.py +37 -6
- rasa/shared/utils/pykwalify_extensions.py +0 -9
- rasa/studio/constants.py +1 -0
- rasa/studio/data_handler.py +8 -1
- rasa/studio/download.py +167 -0
- rasa/studio/link.py +1 -1
- rasa/studio/prompts.py +223 -0
- rasa/studio/pull/__init__.py +0 -0
- rasa/studio/{download/flows.py → pull/data.py} +2 -131
- rasa/studio/{download → pull}/domains.py +1 -1
- rasa/studio/pull/pull.py +235 -0
- rasa/studio/push.py +5 -0
- rasa/studio/train.py +1 -1
- rasa/tracing/instrumentation/attribute_extractors.py +20 -6
- rasa/utils/common.py +11 -0
- rasa/version.py +1 -1
- {rasa_pro-3.13.0.dev20250613.dist-info → rasa_pro-3.13.0rc1.dist-info}/METADATA +4 -4
- {rasa_pro-3.13.0.dev20250613.dist-info → rasa_pro-3.13.0rc1.dist-info}/RECORD +141 -134
- rasa/core/channels/inspector/dist/assets/channel-3730f5fd.js +0 -1
- rasa/core/channels/inspector/dist/assets/clone-e847561e.js +0 -1
- rasa/core/channels/inspector/dist/assets/flowDiagram-v2-96b9c2cf-efbbfe00.js +0 -1
- rasa/studio/download/download.py +0 -416
- rasa/studio/pull.py +0 -94
- /rasa/{studio/download → core/information_retrieval/ingestion}/__init__.py +0 -0
- {rasa_pro-3.13.0.dev20250613.dist-info → rasa_pro-3.13.0rc1.dist-info}/NOTICE +0 -0
- {rasa_pro-3.13.0.dev20250613.dist-info → rasa_pro-3.13.0rc1.dist-info}/WHEEL +0 -0
- {rasa_pro-3.13.0.dev20250613.dist-info → rasa_pro-3.13.0rc1.dist-info}/entry_points.txt +0 -0
|
@@ -0,0 +1,241 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from dataclasses import asdict, dataclass
|
|
4
|
+
from typing import Any, Dict, List, Optional
|
|
5
|
+
|
|
6
|
+
import structlog
|
|
7
|
+
|
|
8
|
+
from rasa.core.constants import (
|
|
9
|
+
POLICY_MAX_HISTORY,
|
|
10
|
+
POLICY_PRIORITY,
|
|
11
|
+
SEARCH_POLICY_PRIORITY,
|
|
12
|
+
)
|
|
13
|
+
from rasa.shared.constants import (
|
|
14
|
+
EMBEDDINGS_CONFIG_KEY,
|
|
15
|
+
LLM_CONFIG_KEY,
|
|
16
|
+
MAX_COMPLETION_TOKENS_CONFIG_KEY,
|
|
17
|
+
MAX_RETRIES_CONFIG_KEY,
|
|
18
|
+
MODEL_CONFIG_KEY,
|
|
19
|
+
OPENAI_PROVIDER,
|
|
20
|
+
PROMPT_CONFIG_KEY,
|
|
21
|
+
PROMPT_TEMPLATE_CONFIG_KEY,
|
|
22
|
+
PROVIDER_CONFIG_KEY,
|
|
23
|
+
TEMPERATURE_CONFIG_KEY,
|
|
24
|
+
TIMEOUT_CONFIG_KEY,
|
|
25
|
+
)
|
|
26
|
+
from rasa.shared.utils.configs import (
|
|
27
|
+
raise_deprecation_warnings,
|
|
28
|
+
resolve_aliases,
|
|
29
|
+
validate_forbidden_keys,
|
|
30
|
+
validate_required_keys,
|
|
31
|
+
)
|
|
32
|
+
from rasa.shared.utils.llm import (
|
|
33
|
+
DEFAULT_ENTERPRISE_SEARCH_POLICY_MODEL_NAME,
|
|
34
|
+
DEFAULT_OPENAI_EMBEDDING_MODEL_NAME,
|
|
35
|
+
resolve_model_client_config,
|
|
36
|
+
)
|
|
37
|
+
|
|
38
|
+
structlogger = structlog.get_logger()
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
SOURCE_PROPERTY = "source"
|
|
42
|
+
VECTOR_STORE_TYPE_PROPERTY = "type"
|
|
43
|
+
VECTOR_STORE_PROPERTY = "vector_store"
|
|
44
|
+
VECTOR_STORE_THRESHOLD_PROPERTY = "threshold"
|
|
45
|
+
TRACE_TOKENS_PROPERTY = "trace_prompt_tokens"
|
|
46
|
+
CITATION_ENABLED_PROPERTY = "citation_enabled"
|
|
47
|
+
USE_LLM_PROPERTY = "use_generative_llm"
|
|
48
|
+
CHECK_RELEVANCY_PROPERTY = "check_relevancy"
|
|
49
|
+
MAX_MESSAGES_IN_QUERY_KEY = "max_messages_in_query"
|
|
50
|
+
|
|
51
|
+
DEFAULT_VECTOR_STORE_TYPE = "faiss"
|
|
52
|
+
DEFAULT_VECTOR_STORE_THRESHOLD = 0.0
|
|
53
|
+
DEFAULT_VECTOR_STORE = {
|
|
54
|
+
VECTOR_STORE_TYPE_PROPERTY: DEFAULT_VECTOR_STORE_TYPE,
|
|
55
|
+
SOURCE_PROPERTY: "./docs",
|
|
56
|
+
VECTOR_STORE_THRESHOLD_PROPERTY: DEFAULT_VECTOR_STORE_THRESHOLD,
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
DEFAULT_CHECK_RELEVANCY_PROPERTY = False
|
|
60
|
+
DEFAULT_USE_LLM_PROPERTY = True
|
|
61
|
+
DEFAULT_CITATION_ENABLED_PROPERTY = False
|
|
62
|
+
DEFAULT_TRACE_PROMPT_TOKEN_PROPERTY = False
|
|
63
|
+
|
|
64
|
+
DEFAULT_MAX_MESSAGES_IN_QUERY = 2
|
|
65
|
+
|
|
66
|
+
DEFAULT_LLM_CONFIG = {
|
|
67
|
+
PROVIDER_CONFIG_KEY: OPENAI_PROVIDER,
|
|
68
|
+
MODEL_CONFIG_KEY: DEFAULT_ENTERPRISE_SEARCH_POLICY_MODEL_NAME,
|
|
69
|
+
TIMEOUT_CONFIG_KEY: 10,
|
|
70
|
+
TEMPERATURE_CONFIG_KEY: 0.0,
|
|
71
|
+
MAX_COMPLETION_TOKENS_CONFIG_KEY: 256,
|
|
72
|
+
MAX_RETRIES_CONFIG_KEY: 1,
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
DEFAULT_EMBEDDINGS_CONFIG = {
|
|
76
|
+
PROVIDER_CONFIG_KEY: OPENAI_PROVIDER,
|
|
77
|
+
MODEL_CONFIG_KEY: DEFAULT_OPENAI_EMBEDDING_MODEL_NAME,
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
DEFAULT_ENTERPRISE_SEARCH_CONFIG = {
|
|
81
|
+
POLICY_PRIORITY: SEARCH_POLICY_PRIORITY,
|
|
82
|
+
VECTOR_STORE_PROPERTY: DEFAULT_VECTOR_STORE,
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
REQUIRED_KEYS: List[str] = []
|
|
86
|
+
|
|
87
|
+
FORBIDDEN_KEYS: List[str] = []
|
|
88
|
+
|
|
89
|
+
DEPRECATED_ALIASES_TO_STANDARD_KEY_MAPPING = {
|
|
90
|
+
PROMPT_CONFIG_KEY: PROMPT_TEMPLATE_CONFIG_KEY
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
@dataclass
|
|
95
|
+
class EnterpriseSearchPolicyConfig:
|
|
96
|
+
"""Parses configuration for Enterprise Search Policy."""
|
|
97
|
+
|
|
98
|
+
# TODO: llm_config, embeddings_config, and vector_store_config should also be parsed
|
|
99
|
+
# as "Config" objects. Likely part of a broader Rasa 4.0 rewrite where all
|
|
100
|
+
# components rely on configuration parser. So, for example, llm_config and
|
|
101
|
+
# embeddings_config should be parsed as ClientConfig objects, and
|
|
102
|
+
# vector_store_config parsed as VectorStoreConfig object.
|
|
103
|
+
llm_config: dict
|
|
104
|
+
embeddings_config: dict
|
|
105
|
+
vector_store_config: dict
|
|
106
|
+
|
|
107
|
+
prompt_template: str
|
|
108
|
+
|
|
109
|
+
use_generative_llm: bool = DEFAULT_USE_LLM_PROPERTY
|
|
110
|
+
enable_citation: bool = DEFAULT_CITATION_ENABLED_PROPERTY
|
|
111
|
+
check_relevancy: bool = DEFAULT_CHECK_RELEVANCY_PROPERTY
|
|
112
|
+
|
|
113
|
+
max_history: Optional[int] = None
|
|
114
|
+
max_messages_in_query: int = DEFAULT_MAX_MESSAGES_IN_QUERY
|
|
115
|
+
trace_prompt_tokens: bool = DEFAULT_TRACE_PROMPT_TOKEN_PROPERTY
|
|
116
|
+
|
|
117
|
+
@property
|
|
118
|
+
def vector_store_type(self) -> str:
|
|
119
|
+
# TODO: In the future this should ideally be part of the Vector config
|
|
120
|
+
# and not the property of the EnterpriseSearch config
|
|
121
|
+
return (
|
|
122
|
+
self.vector_store_config.get(VECTOR_STORE_TYPE_PROPERTY)
|
|
123
|
+
or DEFAULT_VECTOR_STORE_TYPE
|
|
124
|
+
)
|
|
125
|
+
|
|
126
|
+
@property
|
|
127
|
+
def vector_store_threshold(self) -> float:
|
|
128
|
+
# TODO: In the future this should ideally be part of the Vector config
|
|
129
|
+
# and not the property of the EnterpriseSearch config
|
|
130
|
+
return (
|
|
131
|
+
self.vector_store_config.get(VECTOR_STORE_THRESHOLD_PROPERTY)
|
|
132
|
+
or DEFAULT_VECTOR_STORE_THRESHOLD
|
|
133
|
+
)
|
|
134
|
+
|
|
135
|
+
@property
|
|
136
|
+
def vector_store_source(self) -> Optional[str]:
|
|
137
|
+
# TODO: In the future this should ideally be part of the Vector config
|
|
138
|
+
# and not the property of the EnterpriseSearch config
|
|
139
|
+
return self.vector_store_config.get(SOURCE_PROPERTY)
|
|
140
|
+
|
|
141
|
+
def __post_init__(self) -> None:
|
|
142
|
+
if self.check_relevancy and not self.use_generative_llm:
|
|
143
|
+
structlogger.warning(
|
|
144
|
+
"enterprise_search_policy"
|
|
145
|
+
".relevancy_check_enabled_with_disabled_generative_search",
|
|
146
|
+
event_info=(
|
|
147
|
+
f"The config parameter '{CHECK_RELEVANCY_PROPERTY}' is set to"
|
|
148
|
+
f"'True', but the generative search is disabled (config"
|
|
149
|
+
f"parameter '{USE_LLM_PROPERTY}' is set to 'False'). As a result, "
|
|
150
|
+
"the relevancy check for the generative search will be disabled. "
|
|
151
|
+
f"To use this check, set the config parameter '{USE_LLM_PROPERTY}' "
|
|
152
|
+
f"to `True`."
|
|
153
|
+
),
|
|
154
|
+
)
|
|
155
|
+
if self.enable_citation and not self.use_generative_llm:
|
|
156
|
+
structlogger.warning(
|
|
157
|
+
"enterprise_search_policy"
|
|
158
|
+
".citation_enabled_with_disabled_generative_search",
|
|
159
|
+
event_info=(
|
|
160
|
+
f"The config parameter '{CITATION_ENABLED_PROPERTY}' is set to"
|
|
161
|
+
f"'True', but the generative search is disabled (config"
|
|
162
|
+
f"parameter '{USE_LLM_PROPERTY}' is set to 'False'). As a result, "
|
|
163
|
+
"the citation for the generative search will be disabled. "
|
|
164
|
+
f"To use this check, set the config parameter '{USE_LLM_PROPERTY}' "
|
|
165
|
+
f"to `True`."
|
|
166
|
+
),
|
|
167
|
+
)
|
|
168
|
+
|
|
169
|
+
@classmethod
|
|
170
|
+
def from_dict(cls, config: dict) -> EnterpriseSearchPolicyConfig:
|
|
171
|
+
"""Initializes a dataclass from the passed config.
|
|
172
|
+
|
|
173
|
+
Args:
|
|
174
|
+
config: (dict) The config from which to initialize.
|
|
175
|
+
|
|
176
|
+
Raises:
|
|
177
|
+
ValueError: Config is missing required keys.
|
|
178
|
+
|
|
179
|
+
Returns:
|
|
180
|
+
AzureOpenAIClientConfig
|
|
181
|
+
"""
|
|
182
|
+
# Resolve LLM config
|
|
183
|
+
llm_config = (
|
|
184
|
+
resolve_model_client_config(
|
|
185
|
+
config.get(LLM_CONFIG_KEY), EnterpriseSearchPolicyConfig.__name__
|
|
186
|
+
)
|
|
187
|
+
or DEFAULT_LLM_CONFIG
|
|
188
|
+
)
|
|
189
|
+
|
|
190
|
+
# Resolve embeddings config
|
|
191
|
+
embeddings_config = (
|
|
192
|
+
resolve_model_client_config(
|
|
193
|
+
config.get(EMBEDDINGS_CONFIG_KEY), EnterpriseSearchPolicyConfig.__name__
|
|
194
|
+
)
|
|
195
|
+
or DEFAULT_EMBEDDINGS_CONFIG
|
|
196
|
+
)
|
|
197
|
+
|
|
198
|
+
# Vector store config
|
|
199
|
+
vector_store_config = config.get(VECTOR_STORE_PROPERTY, DEFAULT_VECTOR_STORE)
|
|
200
|
+
|
|
201
|
+
# Check for deprecated keys
|
|
202
|
+
raise_deprecation_warnings(
|
|
203
|
+
config, DEPRECATED_ALIASES_TO_STANDARD_KEY_MAPPING, "EnterpriseSearchPolicy"
|
|
204
|
+
)
|
|
205
|
+
# Resolve any potential aliases (e.g. 'prompt_template' vs 'prompt')
|
|
206
|
+
config = cls.resolve_config_aliases(config)
|
|
207
|
+
|
|
208
|
+
# Validate that the required keys are present
|
|
209
|
+
validate_required_keys(config, REQUIRED_KEYS)
|
|
210
|
+
# Validate that the forbidden keys are not present
|
|
211
|
+
validate_forbidden_keys(config, FORBIDDEN_KEYS)
|
|
212
|
+
|
|
213
|
+
this = EnterpriseSearchPolicyConfig(
|
|
214
|
+
llm_config=llm_config,
|
|
215
|
+
embeddings_config=embeddings_config,
|
|
216
|
+
vector_store_config=vector_store_config,
|
|
217
|
+
prompt_template=config.get(PROMPT_TEMPLATE_CONFIG_KEY),
|
|
218
|
+
use_generative_llm=config.get(USE_LLM_PROPERTY, DEFAULT_USE_LLM_PROPERTY),
|
|
219
|
+
enable_citation=config.get(
|
|
220
|
+
CITATION_ENABLED_PROPERTY, DEFAULT_CITATION_ENABLED_PROPERTY
|
|
221
|
+
),
|
|
222
|
+
check_relevancy=config.get(
|
|
223
|
+
CHECK_RELEVANCY_PROPERTY, DEFAULT_CHECK_RELEVANCY_PROPERTY
|
|
224
|
+
),
|
|
225
|
+
max_history=config.get(POLICY_MAX_HISTORY),
|
|
226
|
+
max_messages_in_query=config.get(
|
|
227
|
+
MAX_MESSAGES_IN_QUERY_KEY, DEFAULT_MAX_MESSAGES_IN_QUERY
|
|
228
|
+
),
|
|
229
|
+
trace_prompt_tokens=config.get(
|
|
230
|
+
TRACE_TOKENS_PROPERTY, DEFAULT_TRACE_PROMPT_TOKEN_PROPERTY
|
|
231
|
+
),
|
|
232
|
+
)
|
|
233
|
+
return this
|
|
234
|
+
|
|
235
|
+
def to_dict(self) -> dict:
|
|
236
|
+
"""Converts the config instance into a dictionary."""
|
|
237
|
+
return asdict(self)
|
|
238
|
+
|
|
239
|
+
@staticmethod
|
|
240
|
+
def resolve_config_aliases(config: Dict[str, Any]) -> Dict[str, Any]:
|
|
241
|
+
return resolve_aliases(config, DEPRECATED_ALIASES_TO_STANDARD_KEY_MAPPING)
|
rasa/core/policies/enterprise_search_prompt_with_relevancy_check_and_citation_template.jinja2
CHANGED
|
@@ -1,5 +1,9 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
1
|
+
Based on the provided documents and the recent conversation context, answer the following question.
|
|
2
|
+
Before responding, ensure the answer is directly supported by the documents or context.
|
|
3
|
+
Do not make assumptions or infer beyond the given information.
|
|
4
|
+
Only answer if you are more than 80% confident that the response is fully supported.
|
|
5
|
+
If the answer cannot be determined, respond with: [NO_RAG_ANSWER]
|
|
6
|
+
|
|
3
7
|
### Relevant Documents
|
|
4
8
|
Use the following documents to answer the question:
|
|
5
9
|
{% for doc in docs %}
|
|
@@ -57,7 +61,4 @@ Avoid speculating or making assumptions beyond the given information and keep yo
|
|
|
57
61
|
If you are unable to find an answer in the given relevant documents, do not cite sources from elsewhere in the conversation context.
|
|
58
62
|
{% endif %}
|
|
59
63
|
|
|
60
|
-
{% if check_relevancy %}
|
|
61
|
-
If answer is not relevant output: "[NO_RELEVANT_ANSWER_FOUND]"
|
|
62
|
-
{% endif %}
|
|
63
64
|
Your answer:
|
|
@@ -38,6 +38,7 @@ from rasa.shared.constants import (
|
|
|
38
38
|
MODEL_NAME_CONFIG_KEY,
|
|
39
39
|
OPENAI_PROVIDER,
|
|
40
40
|
PROMPT_CONFIG_KEY,
|
|
41
|
+
PROMPT_TEMPLATE_CONFIG_KEY,
|
|
41
42
|
PROVIDER_CONFIG_KEY,
|
|
42
43
|
TEMPERATURE_CONFIG_KEY,
|
|
43
44
|
TIMEOUT_CONFIG_KEY,
|
|
@@ -56,7 +57,10 @@ from rasa.shared.providers.embedding._langchain_embedding_client_adapter import
|
|
|
56
57
|
_LangchainEmbeddingClientAdapter,
|
|
57
58
|
)
|
|
58
59
|
from rasa.shared.providers.llm.llm_client import LLMClient
|
|
59
|
-
from rasa.shared.utils.constants import
|
|
60
|
+
from rasa.shared.utils.constants import (
|
|
61
|
+
LOG_COMPONENT_SOURCE_METHOD_FINGERPRINT_ADDON,
|
|
62
|
+
LOG_COMPONENT_SOURCE_METHOD_INIT,
|
|
63
|
+
)
|
|
60
64
|
from rasa.shared.utils.health_check.embeddings_health_check_mixin import (
|
|
61
65
|
EmbeddingsHealthCheckMixin,
|
|
62
66
|
)
|
|
@@ -68,6 +72,7 @@ from rasa.shared.utils.llm import (
|
|
|
68
72
|
DEFAULT_OPENAI_EMBEDDING_MODEL_NAME,
|
|
69
73
|
DEFAULT_OPENAI_MAX_GENERATED_TOKENS,
|
|
70
74
|
USER,
|
|
75
|
+
check_prompt_config_keys_and_warn_if_deprecated,
|
|
71
76
|
combine_custom_and_default_config,
|
|
72
77
|
embedder_factory,
|
|
73
78
|
get_prompt_template,
|
|
@@ -119,9 +124,12 @@ DEFAULT_EMBEDDINGS_CONFIG = {
|
|
|
119
124
|
MODEL_CONFIG_KEY: DEFAULT_OPENAI_EMBEDDING_MODEL_NAME,
|
|
120
125
|
}
|
|
121
126
|
|
|
122
|
-
|
|
127
|
+
DEFAULT_INTENTLESS_PROMPT_TEMPLATE_FILE_NAME = importlib.resources.open_text(
|
|
123
128
|
"rasa.core.policies", "intentless_prompt_template.jinja2"
|
|
124
129
|
).name
|
|
130
|
+
DEFAULT_INTENTLESS_PROMPT_TEMPLATE = importlib.resources.read_text(
|
|
131
|
+
"rasa.core.policies", "intentless_prompt_template.jinja2"
|
|
132
|
+
)
|
|
125
133
|
|
|
126
134
|
INTENTLESS_PROMPT_TEMPLATE_FILE_NAME = "intentless_policy_prompt.jinja2"
|
|
127
135
|
INTENTLESS_CONFIG_FILE_NAME = "config.json"
|
|
@@ -345,7 +353,7 @@ class IntentlessPolicy(LLMHealthCheckMixin, EmbeddingsHealthCheckMixin, Policy):
|
|
|
345
353
|
# ensures that the policy will not override a deterministic policy
|
|
346
354
|
# which utilizes the nlu predictions confidence (e.g. Memoization).
|
|
347
355
|
NLU_ABSTENTION_THRESHOLD: 0.9,
|
|
348
|
-
|
|
356
|
+
PROMPT_TEMPLATE_CONFIG_KEY: None, # TODO: remove in Rasa 4.0.0
|
|
349
357
|
}
|
|
350
358
|
|
|
351
359
|
@staticmethod
|
|
@@ -402,11 +410,43 @@ class IntentlessPolicy(LLMHealthCheckMixin, EmbeddingsHealthCheckMixin, Policy):
|
|
|
402
410
|
self.response_index = responses_docsearch
|
|
403
411
|
self.conversation_samples_index = samples_docsearch
|
|
404
412
|
self.embedder = self._create_plain_embedder(config)
|
|
405
|
-
|
|
406
|
-
|
|
413
|
+
|
|
414
|
+
# Warn if the prompt config key is used to set the prompt template
|
|
415
|
+
check_prompt_config_keys_and_warn_if_deprecated(config, "intentless_policy")
|
|
416
|
+
|
|
417
|
+
self.prompt_template = prompt_template or self._resolve_prompt_template(
|
|
418
|
+
config, LOG_COMPONENT_SOURCE_METHOD_INIT
|
|
407
419
|
)
|
|
408
420
|
self.trace_prompt_tokens = self.config.get("trace_prompt_tokens", False)
|
|
409
421
|
|
|
422
|
+
@classmethod
|
|
423
|
+
def _resolve_prompt_template(
|
|
424
|
+
cls: Any,
|
|
425
|
+
config: dict,
|
|
426
|
+
log_source_method: str,
|
|
427
|
+
) -> str:
|
|
428
|
+
"""Resolves the prompt template from the config.
|
|
429
|
+
|
|
430
|
+
Args:
|
|
431
|
+
config: The config to resolve the prompt template from.
|
|
432
|
+
log_source_method: The method from which the prompt template is resolved.
|
|
433
|
+
|
|
434
|
+
Returns:
|
|
435
|
+
The resolved prompt template.
|
|
436
|
+
"""
|
|
437
|
+
# Prefer prompt template over prompt config key.
|
|
438
|
+
prompt_template_file = (
|
|
439
|
+
config.get(PROMPT_TEMPLATE_CONFIG_KEY)
|
|
440
|
+
or config.get(PROMPT_CONFIG_KEY)
|
|
441
|
+
or DEFAULT_INTENTLESS_PROMPT_TEMPLATE_FILE_NAME
|
|
442
|
+
)
|
|
443
|
+
return get_prompt_template(
|
|
444
|
+
prompt_template_file,
|
|
445
|
+
DEFAULT_INTENTLESS_PROMPT_TEMPLATE,
|
|
446
|
+
log_source_component=IntentlessPolicy.__name__,
|
|
447
|
+
log_source_method=log_source_method,
|
|
448
|
+
)
|
|
449
|
+
|
|
410
450
|
@classmethod
|
|
411
451
|
def _create_plain_embedder(cls, config: Dict[Text, Any]) -> Embeddings:
|
|
412
452
|
"""Creates an embedder that uses the OpenAI API.
|
|
@@ -945,11 +985,8 @@ class IntentlessPolicy(LLMHealthCheckMixin, EmbeddingsHealthCheckMixin, Policy):
|
|
|
945
985
|
@classmethod
|
|
946
986
|
def fingerprint_addon(cls, config: Dict[str, Any]) -> Optional[str]:
|
|
947
987
|
"""Add a fingerprint of intentless policy for the graph."""
|
|
948
|
-
prompt_template =
|
|
949
|
-
config
|
|
950
|
-
DEFAULT_INTENTLESS_PROMPT_TEMPLATE,
|
|
951
|
-
log_source_component=IntentlessPolicy.__name__,
|
|
952
|
-
log_source_method=LOG_COMPONENT_SOURCE_METHOD_FINGERPRINT_ADDON,
|
|
988
|
+
prompt_template = cls._resolve_prompt_template(
|
|
989
|
+
config, LOG_COMPONENT_SOURCE_METHOD_FINGERPRINT_ADDON
|
|
953
990
|
)
|
|
954
991
|
|
|
955
992
|
llm_config = resolve_model_client_config(
|
rasa/core/utils.py
CHANGED
|
@@ -318,16 +318,25 @@ def should_force_slot_filling(
|
|
|
318
318
|
and the name of the slot if applicable.
|
|
319
319
|
"""
|
|
320
320
|
from rasa.dialogue_understanding.processor.command_processor import (
|
|
321
|
+
find_updated_flows,
|
|
321
322
|
get_current_collect_step,
|
|
322
323
|
)
|
|
323
324
|
|
|
324
325
|
if tracker is None:
|
|
325
|
-
structlogger.
|
|
326
|
-
"slot.force_slot_filling.
|
|
326
|
+
structlogger.debug(
|
|
327
|
+
"slot.force_slot_filling.no_found_tracker",
|
|
327
328
|
event_info="Tracker is None. Cannot force slot filling.",
|
|
328
329
|
)
|
|
329
330
|
return False, None
|
|
330
331
|
|
|
332
|
+
updated_flows = find_updated_flows(tracker, flows)
|
|
333
|
+
if updated_flows:
|
|
334
|
+
structlogger.debug(
|
|
335
|
+
"slot.force_slot_filling.running_flows_were_updated",
|
|
336
|
+
updated_flow_ids=updated_flows,
|
|
337
|
+
)
|
|
338
|
+
return False, None
|
|
339
|
+
|
|
331
340
|
stack = tracker.stack
|
|
332
341
|
step = get_current_collect_step(stack, flows)
|
|
333
342
|
if step is None or not step.force_slot_filling:
|
|
@@ -15,19 +15,17 @@ from rasa.dialogue_understanding.coexistence.constants import (
|
|
|
15
15
|
)
|
|
16
16
|
from rasa.dialogue_understanding.commands import Command, SetSlotCommand
|
|
17
17
|
from rasa.dialogue_understanding.commands.noop_command import NoopCommand
|
|
18
|
-
from rasa.dialogue_understanding.generator.constants import
|
|
19
|
-
LLM_CONFIG_KEY,
|
|
20
|
-
)
|
|
18
|
+
from rasa.dialogue_understanding.generator.constants import LLM_CONFIG_KEY
|
|
21
19
|
from rasa.engine.graph import ExecutionContext, GraphComponent
|
|
22
20
|
from rasa.engine.recipes.default_recipe import DefaultV1Recipe
|
|
23
21
|
from rasa.engine.storage.resource import Resource
|
|
24
22
|
from rasa.engine.storage.storage import ModelStorage
|
|
25
23
|
from rasa.shared.constants import (
|
|
26
|
-
LOGIT_BIAS_CONFIG_KEY,
|
|
27
24
|
MAX_COMPLETION_TOKENS_CONFIG_KEY,
|
|
28
25
|
MODEL_CONFIG_KEY,
|
|
29
26
|
OPENAI_PROVIDER,
|
|
30
27
|
PROMPT_CONFIG_KEY,
|
|
28
|
+
PROMPT_TEMPLATE_CONFIG_KEY,
|
|
31
29
|
PROVIDER_CONFIG_KEY,
|
|
32
30
|
ROUTE_TO_CALM_SLOT,
|
|
33
31
|
TEMPERATURE_CONFIG_KEY,
|
|
@@ -46,6 +44,7 @@ from rasa.shared.utils.health_check.llm_health_check_mixin import LLMHealthCheck
|
|
|
46
44
|
from rasa.shared.utils.io import deep_container_fingerprint
|
|
47
45
|
from rasa.shared.utils.llm import (
|
|
48
46
|
DEFAULT_OPENAI_CHAT_MODEL_NAME,
|
|
47
|
+
check_prompt_config_keys_and_warn_if_deprecated,
|
|
49
48
|
get_prompt_template,
|
|
50
49
|
llm_factory,
|
|
51
50
|
resolve_model_client_config,
|
|
@@ -58,22 +57,12 @@ DEFAULT_COMMAND_PROMPT_TEMPLATE = importlib.resources.read_text(
|
|
|
58
57
|
)
|
|
59
58
|
LLM_BASED_ROUTER_CONFIG_FILE_NAME = "config.json"
|
|
60
59
|
|
|
61
|
-
# Token ids for gpt 3.5 and gpt 4 corresponding to space + capitalized Letter
|
|
62
|
-
A_TO_C_TOKEN_IDS_CHATGPT = [
|
|
63
|
-
362, # " A"
|
|
64
|
-
426, # " B"
|
|
65
|
-
356, # " C"
|
|
66
|
-
]
|
|
67
|
-
|
|
68
60
|
DEFAULT_LLM_CONFIG = {
|
|
69
61
|
PROVIDER_CONFIG_KEY: OPENAI_PROVIDER,
|
|
70
62
|
MODEL_CONFIG_KEY: DEFAULT_OPENAI_CHAT_MODEL_NAME,
|
|
71
63
|
TIMEOUT_CONFIG_KEY: 7,
|
|
72
64
|
TEMPERATURE_CONFIG_KEY: 0.0,
|
|
73
65
|
MAX_COMPLETION_TOKENS_CONFIG_KEY: 1,
|
|
74
|
-
LOGIT_BIAS_CONFIG_KEY: {
|
|
75
|
-
str(token_id): 100 for token_id in A_TO_C_TOKEN_IDS_CHATGPT
|
|
76
|
-
},
|
|
77
66
|
}
|
|
78
67
|
|
|
79
68
|
structlogger = structlog.get_logger()
|
|
@@ -90,7 +79,7 @@ class LLMBasedRouter(LLMHealthCheckMixin, GraphComponent):
|
|
|
90
79
|
def get_default_config() -> Dict[str, Any]:
|
|
91
80
|
"""The component's default config (see parent class for full docstring)."""
|
|
92
81
|
return {
|
|
93
|
-
|
|
82
|
+
PROMPT_TEMPLATE_CONFIG_KEY: None, # TODO: remove in Rasa 4.0.0
|
|
94
83
|
CALM_ENTRY: {STICKY: None},
|
|
95
84
|
NLU_ENTRY: {
|
|
96
85
|
NON_STICKY: "handles chitchat",
|
|
@@ -111,10 +100,13 @@ class LLMBasedRouter(LLMHealthCheckMixin, GraphComponent):
|
|
|
111
100
|
self.config.get(LLM_CONFIG_KEY), LLMBasedRouter.__name__
|
|
112
101
|
)
|
|
113
102
|
|
|
103
|
+
# Warn if the prompt config key is used to set the prompt template
|
|
104
|
+
check_prompt_config_keys_and_warn_if_deprecated(config, "llm_based_router")
|
|
105
|
+
|
|
114
106
|
self.prompt_template = (
|
|
115
107
|
prompt_template
|
|
116
108
|
or get_prompt_template(
|
|
117
|
-
config.get(PROMPT_CONFIG_KEY),
|
|
109
|
+
config.get(PROMPT_TEMPLATE_CONFIG_KEY) or config.get(PROMPT_CONFIG_KEY),
|
|
118
110
|
DEFAULT_COMMAND_PROMPT_TEMPLATE,
|
|
119
111
|
log_source_component=LLMBasedRouter.__name__,
|
|
120
112
|
log_source_method=LOG_COMPONENT_SOURCE_METHOD_INIT,
|
|
@@ -171,7 +163,6 @@ class LLMBasedRouter(LLMHealthCheckMixin, GraphComponent):
|
|
|
171
163
|
**kwargs: Any,
|
|
172
164
|
) -> "LLMBasedRouter":
|
|
173
165
|
"""Loads trained component (see parent class for full docstring)."""
|
|
174
|
-
|
|
175
166
|
# Perform health check on the resolved LLM client config
|
|
176
167
|
llm_config = resolve_model_client_config(config.get(LLM_CONFIG_KEY, {}))
|
|
177
168
|
cls.perform_llm_health_check(
|
|
@@ -327,7 +318,7 @@ class LLMBasedRouter(LLMHealthCheckMixin, GraphComponent):
|
|
|
327
318
|
def fingerprint_addon(cls, config: Dict[str, Any]) -> Optional[str]:
|
|
328
319
|
"""Add a fingerprint of llm based router for the graph."""
|
|
329
320
|
prompt_template = get_prompt_template(
|
|
330
|
-
config.get(PROMPT_CONFIG_KEY),
|
|
321
|
+
config.get(PROMPT_TEMPLATE_CONFIG_KEY) or config.get(PROMPT_CONFIG_KEY),
|
|
331
322
|
DEFAULT_COMMAND_PROMPT_TEMPLATE,
|
|
332
323
|
log_source_component=LLMBasedRouter.__name__,
|
|
333
324
|
log_source_method=LOG_COMPONENT_SOURCE_METHOD_FINGERPRINT_ADDON,
|
|
@@ -16,6 +16,9 @@ from rasa.dialogue_understanding.commands.error_command import ErrorCommand
|
|
|
16
16
|
from rasa.dialogue_understanding.commands.free_form_answer_command import (
|
|
17
17
|
FreeFormAnswerCommand,
|
|
18
18
|
)
|
|
19
|
+
from rasa.dialogue_understanding.commands.handle_code_change_command import (
|
|
20
|
+
HandleCodeChangeCommand,
|
|
21
|
+
)
|
|
19
22
|
from rasa.dialogue_understanding.commands.human_handoff_command import (
|
|
20
23
|
HumanHandoffCommand,
|
|
21
24
|
)
|
|
@@ -49,6 +52,7 @@ __all__ = [
|
|
|
49
52
|
"SetSlotCommand",
|
|
50
53
|
"StartFlowCommand",
|
|
51
54
|
"HumanHandoffCommand",
|
|
55
|
+
"HandleCodeChangeCommand",
|
|
52
56
|
"CorrectSlotsCommand",
|
|
53
57
|
"CorrectedSlot",
|
|
54
58
|
"ErrorCommand",
|
|
@@ -95,12 +95,14 @@ class CancelFlowCommand(Command):
|
|
|
95
95
|
original_stack = original_tracker.stack
|
|
96
96
|
|
|
97
97
|
applied_events: List[Event] = []
|
|
98
|
-
user_frame = top_user_flow_frame(
|
|
98
|
+
user_frame = top_user_flow_frame(
|
|
99
|
+
original_stack, ignore_call_and_link_frames=False
|
|
100
|
+
)
|
|
99
101
|
current_flow = user_frame.flow(all_flows) if user_frame else None
|
|
100
102
|
|
|
101
103
|
if not current_flow:
|
|
102
104
|
structlogger.debug(
|
|
103
|
-
"
|
|
105
|
+
"cancel_command.skip_cancel_flow.no_active_flow", command=self
|
|
104
106
|
)
|
|
105
107
|
return []
|
|
106
108
|
|
|
@@ -65,14 +65,14 @@ class ClarifyCommand(Command):
|
|
|
65
65
|
clean_options = [flow.id for flow in flows if flow is not None]
|
|
66
66
|
if len(clean_options) != len(self.options):
|
|
67
67
|
structlogger.debug(
|
|
68
|
-
"
|
|
68
|
+
"clarify_command.altered_command.dropped_clarification_options",
|
|
69
69
|
command=self,
|
|
70
70
|
original_options=self.options,
|
|
71
71
|
cleaned_options=clean_options,
|
|
72
72
|
)
|
|
73
73
|
if len(clean_options) == 0:
|
|
74
74
|
structlogger.debug(
|
|
75
|
-
"
|
|
75
|
+
"clarify_command.skip_command.empty_clarification", command=self
|
|
76
76
|
)
|
|
77
77
|
return []
|
|
78
78
|
|
|
@@ -144,7 +144,8 @@ class CorrectSlotsCommand(Command):
|
|
|
144
144
|
}
|
|
145
145
|
else:
|
|
146
146
|
structlogger.debug(
|
|
147
|
-
"
|
|
147
|
+
"correct_slots_command.skip_correction.slot_already_set",
|
|
148
|
+
command=self,
|
|
148
149
|
)
|
|
149
150
|
return proposed_slots
|
|
150
151
|
|
|
@@ -236,7 +237,7 @@ class CorrectSlotsCommand(Command):
|
|
|
236
237
|
# previously set, and we also don't want to reset the slots, do
|
|
237
238
|
# not correct the slots.
|
|
238
239
|
structlogger.debug(
|
|
239
|
-
"
|
|
240
|
+
"correct_slots_command.skip_correction",
|
|
240
241
|
is_reset_only=is_reset_only,
|
|
241
242
|
)
|
|
242
243
|
return None
|
|
@@ -274,12 +275,10 @@ class CorrectSlotsCommand(Command):
|
|
|
274
275
|
# we shouldn't end up here as a correction shouldn't be triggered
|
|
275
276
|
# if we are not in any flow. but just in case we do, we
|
|
276
277
|
# just skip the command.
|
|
277
|
-
structlogger.warning(
|
|
278
|
-
"command_executor.correct_slots.no_active_flow", command=self
|
|
279
|
-
)
|
|
278
|
+
structlogger.warning("correct_slots_command.no_active_flow")
|
|
280
279
|
return []
|
|
281
280
|
|
|
282
|
-
structlogger.debug("
|
|
281
|
+
structlogger.debug("correct_slots_command", command=self)
|
|
283
282
|
proposed_slots = self.corrected_slots_dict(tracker)
|
|
284
283
|
|
|
285
284
|
correction_frame = self.create_correction_frame(
|
|
@@ -58,7 +58,7 @@ class ErrorCommand(Command):
|
|
|
58
58
|
The events to apply to the tracker.
|
|
59
59
|
"""
|
|
60
60
|
stack = tracker.stack
|
|
61
|
-
structlogger.debug("
|
|
61
|
+
structlogger.debug("error_command.error", command=self)
|
|
62
62
|
stack.push(
|
|
63
63
|
InternalErrorPatternFlowStackFrame(
|
|
64
64
|
error_type=self.error_type, info=self.info
|
|
@@ -57,9 +57,7 @@ class HumanHandoffCommand(Command):
|
|
|
57
57
|
"""
|
|
58
58
|
stack = tracker.stack
|
|
59
59
|
stack.push(HumanHandoffPatternFlowStackFrame())
|
|
60
|
-
structlogger.debug(
|
|
61
|
-
"command_executor.human_handoff.pushed_to_stack", command=self
|
|
62
|
-
)
|
|
60
|
+
structlogger.debug("human_handoff_command.pushed_to_stack", command=self)
|
|
63
61
|
return tracker.create_stack_updated_events(stack)
|
|
64
62
|
|
|
65
63
|
def __hash__(self) -> int:
|
|
@@ -100,14 +100,14 @@ class SetSlotCommand(Command):
|
|
|
100
100
|
slot = tracker.slots.get(self.name)
|
|
101
101
|
if slot is None:
|
|
102
102
|
structlogger.debug(
|
|
103
|
-
"
|
|
103
|
+
"set_slot_command.skip_command.slot_not_in_domain", command=self
|
|
104
104
|
)
|
|
105
105
|
return []
|
|
106
106
|
|
|
107
107
|
if slot.has_same_coerced_value(self.value):
|
|
108
108
|
# value hasn't changed, skip this one
|
|
109
109
|
structlogger.debug(
|
|
110
|
-
"
|
|
110
|
+
"set_slot_command.skip_command.slot_already_set", command=self
|
|
111
111
|
)
|
|
112
112
|
return []
|
|
113
113
|
|
|
@@ -150,11 +150,11 @@ class SetSlotCommand(Command):
|
|
|
150
150
|
)
|
|
151
151
|
if not use_slot_fill:
|
|
152
152
|
structlogger.debug(
|
|
153
|
-
"
|
|
153
|
+
"set_slot_command.skip_command.slot_not_asked_for", command=self
|
|
154
154
|
)
|
|
155
155
|
return []
|
|
156
156
|
|
|
157
|
-
structlogger.debug("
|
|
157
|
+
structlogger.debug("set_slot_command.set_slot", command=self)
|
|
158
158
|
return [
|
|
159
159
|
SlotSet(self.name, slot.coerce_value(self.value), filled_by=self.extractor)
|
|
160
160
|
]
|
|
@@ -63,9 +63,7 @@ class SkipQuestionCommand(Command):
|
|
|
63
63
|
current_flow = user_frame.flow(all_flows) if user_frame else None
|
|
64
64
|
|
|
65
65
|
if not current_flow:
|
|
66
|
-
structlogger.debug(
|
|
67
|
-
"command_executor.skip_question.no_active_flow", command=self
|
|
68
|
-
)
|
|
66
|
+
structlogger.debug("skip_question_command.no_active_flow", command=self)
|
|
69
67
|
return []
|
|
70
68
|
|
|
71
69
|
stack.push(SkipQuestionPatternFlowStackFrame())
|
|
@@ -73,12 +73,12 @@ class StartFlowCommand(Command):
|
|
|
73
73
|
|
|
74
74
|
if self.flow in user_flows_on_the_stack(stack):
|
|
75
75
|
structlogger.debug(
|
|
76
|
-
"
|
|
76
|
+
"start_flow_command.skip_command.already_started_flow", command=self
|
|
77
77
|
)
|
|
78
78
|
return []
|
|
79
79
|
elif self.flow not in all_flows.flow_ids:
|
|
80
80
|
structlogger.debug(
|
|
81
|
-
"
|
|
81
|
+
"start_flow_command.skip_command.start_invalid_flow_id", command=self
|
|
82
82
|
)
|
|
83
83
|
return []
|
|
84
84
|
|
|
@@ -99,7 +99,7 @@ class StartFlowCommand(Command):
|
|
|
99
99
|
)
|
|
100
100
|
)
|
|
101
101
|
|
|
102
|
-
structlogger.debug("
|
|
102
|
+
structlogger.debug("start_flow_command.start_flow", command=self)
|
|
103
103
|
stack.push(UserFlowStackFrame(flow_id=self.flow, frame_type=frame_type))
|
|
104
104
|
return applied_events + tracker.create_stack_updated_events(stack)
|
|
105
105
|
|