rasa-pro 3.13.0.dev7__py3-none-any.whl → 3.13.0.dev8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of rasa-pro might be problematic. Click here for more details.

Files changed (150) hide show
  1. rasa/__main__.py +0 -3
  2. rasa/api.py +1 -1
  3. rasa/cli/dialogue_understanding_test.py +1 -1
  4. rasa/cli/e2e_test.py +1 -1
  5. rasa/cli/evaluate.py +1 -1
  6. rasa/cli/export.py +1 -1
  7. rasa/cli/llm_fine_tuning.py +12 -11
  8. rasa/cli/project_templates/defaults.py +133 -0
  9. rasa/cli/run.py +1 -1
  10. rasa/cli/studio/link.py +53 -0
  11. rasa/cli/studio/pull.py +78 -0
  12. rasa/cli/studio/push.py +78 -0
  13. rasa/cli/studio/studio.py +12 -0
  14. rasa/cli/studio/upload.py +8 -0
  15. rasa/cli/train.py +1 -1
  16. rasa/cli/utils.py +1 -1
  17. rasa/cli/x.py +1 -1
  18. rasa/constants.py +2 -0
  19. rasa/core/__init__.py +0 -16
  20. rasa/core/actions/action.py +5 -1
  21. rasa/core/actions/action_repeat_bot_messages.py +18 -22
  22. rasa/core/actions/action_run_slot_rejections.py +0 -1
  23. rasa/core/agent.py +16 -1
  24. rasa/core/available_endpoints.py +146 -0
  25. rasa/core/brokers/pika.py +1 -2
  26. rasa/core/channels/botframework.py +2 -2
  27. rasa/core/channels/channel.py +2 -2
  28. rasa/core/channels/hangouts.py +8 -5
  29. rasa/core/channels/mattermost.py +1 -1
  30. rasa/core/channels/rasa_chat.py +2 -4
  31. rasa/core/channels/rest.py +5 -4
  32. rasa/core/channels/studio_chat.py +3 -2
  33. rasa/core/channels/vier_cvg.py +1 -2
  34. rasa/core/channels/voice_ready/audiocodes.py +1 -8
  35. rasa/core/channels/voice_stream/audiocodes.py +7 -4
  36. rasa/core/channels/voice_stream/genesys.py +2 -2
  37. rasa/core/channels/voice_stream/twilio_media_streams.py +10 -5
  38. rasa/core/channels/voice_stream/voice_channel.py +33 -22
  39. rasa/core/http_interpreter.py +3 -7
  40. rasa/core/jobs.py +2 -1
  41. rasa/core/nlg/contextual_response_rephraser.py +34 -9
  42. rasa/core/nlg/generator.py +0 -1
  43. rasa/core/nlg/interpolator.py +2 -3
  44. rasa/core/nlg/summarize.py +39 -5
  45. rasa/core/policies/enterprise_search_policy.py +283 -62
  46. rasa/core/policies/enterprise_search_prompt_with_relevancy_check_and_citation_template.jinja2 +63 -0
  47. rasa/core/policies/flow_policy.py +1 -1
  48. rasa/core/policies/flows/flow_executor.py +96 -17
  49. rasa/core/policies/intentless_policy.py +9 -7
  50. rasa/core/processor.py +104 -51
  51. rasa/core/run.py +33 -11
  52. rasa/core/tracker_stores/tracker_store.py +1 -1
  53. rasa/core/training/interactive.py +1 -1
  54. rasa/core/utils.py +24 -97
  55. rasa/dialogue_understanding/coexistence/intent_based_router.py +2 -1
  56. rasa/dialogue_understanding/commands/can_not_handle_command.py +2 -0
  57. rasa/dialogue_understanding/commands/cancel_flow_command.py +2 -0
  58. rasa/dialogue_understanding/commands/chit_chat_answer_command.py +2 -0
  59. rasa/dialogue_understanding/commands/clarify_command.py +5 -1
  60. rasa/dialogue_understanding/commands/command_syntax_manager.py +1 -0
  61. rasa/dialogue_understanding/commands/human_handoff_command.py +2 -0
  62. rasa/dialogue_understanding/commands/knowledge_answer_command.py +4 -2
  63. rasa/dialogue_understanding/commands/repeat_bot_messages_command.py +2 -0
  64. rasa/dialogue_understanding/commands/set_slot_command.py +11 -1
  65. rasa/dialogue_understanding/commands/skip_question_command.py +2 -0
  66. rasa/dialogue_understanding/commands/start_flow_command.py +4 -0
  67. rasa/dialogue_understanding/commands/utils.py +26 -2
  68. rasa/dialogue_understanding/generator/__init__.py +7 -1
  69. rasa/dialogue_understanding/generator/command_generator.py +4 -2
  70. rasa/dialogue_understanding/generator/command_parser.py +2 -2
  71. rasa/dialogue_understanding/generator/command_parser_validator.py +63 -0
  72. rasa/dialogue_understanding/generator/prompt_templates/command_prompt_v2_gpt_4o_2024_11_20_template.jinja2 +12 -33
  73. rasa/dialogue_understanding/generator/prompt_templates/command_prompt_v3_gpt_4o_2024_11_20_template.jinja2 +78 -0
  74. rasa/dialogue_understanding/generator/single_step/compact_llm_command_generator.py +26 -461
  75. rasa/dialogue_understanding/generator/single_step/search_ready_llm_command_generator.py +147 -0
  76. rasa/dialogue_understanding/generator/single_step/single_step_based_llm_command_generator.py +477 -0
  77. rasa/dialogue_understanding/generator/single_step/single_step_llm_command_generator.py +8 -58
  78. rasa/dialogue_understanding/patterns/default_flows_for_patterns.yml +37 -25
  79. rasa/dialogue_understanding/patterns/domain_for_patterns.py +190 -0
  80. rasa/dialogue_understanding/processor/command_processor.py +3 -3
  81. rasa/dialogue_understanding/processor/command_processor_component.py +3 -3
  82. rasa/dialogue_understanding/stack/frames/flow_stack_frame.py +17 -4
  83. rasa/dialogue_understanding/utils.py +68 -12
  84. rasa/dialogue_understanding_test/du_test_case.py +1 -1
  85. rasa/dialogue_understanding_test/du_test_runner.py +4 -22
  86. rasa/dialogue_understanding_test/test_case_simulation/test_case_tracker_simulator.py +2 -6
  87. rasa/e2e_test/e2e_test_runner.py +1 -1
  88. rasa/engine/constants.py +1 -1
  89. rasa/engine/recipes/default_recipe.py +26 -2
  90. rasa/engine/validation.py +3 -2
  91. rasa/hooks.py +0 -28
  92. rasa/llm_fine_tuning/annotation_module.py +39 -9
  93. rasa/llm_fine_tuning/conversations.py +3 -0
  94. rasa/llm_fine_tuning/llm_data_preparation_module.py +66 -49
  95. rasa/llm_fine_tuning/paraphrasing/rephrase_validator.py +52 -44
  96. rasa/llm_fine_tuning/paraphrasing_module.py +10 -12
  97. rasa/llm_fine_tuning/storage.py +4 -4
  98. rasa/llm_fine_tuning/utils.py +63 -1
  99. rasa/model_manager/model_api.py +88 -0
  100. rasa/model_manager/trainer_service.py +4 -4
  101. rasa/plugin.py +1 -11
  102. rasa/privacy/__init__.py +0 -0
  103. rasa/privacy/constants.py +83 -0
  104. rasa/privacy/event_broker_utils.py +77 -0
  105. rasa/privacy/privacy_config.py +281 -0
  106. rasa/privacy/privacy_config_schema.json +86 -0
  107. rasa/privacy/privacy_filter.py +340 -0
  108. rasa/privacy/privacy_manager.py +576 -0
  109. rasa/server.py +23 -2
  110. rasa/shared/constants.py +3 -0
  111. rasa/shared/core/constants.py +4 -3
  112. rasa/shared/core/domain.py +7 -0
  113. rasa/shared/core/events.py +37 -7
  114. rasa/shared/core/flows/flow.py +1 -2
  115. rasa/shared/core/flows/flows_yaml_schema.json +3 -0
  116. rasa/shared/core/flows/steps/collect.py +46 -2
  117. rasa/shared/core/slots.py +28 -0
  118. rasa/shared/exceptions.py +4 -0
  119. rasa/shared/utils/llm.py +161 -6
  120. rasa/shared/utils/yaml.py +32 -0
  121. rasa/studio/data_handler.py +3 -3
  122. rasa/studio/download/download.py +37 -60
  123. rasa/studio/download/flows.py +23 -31
  124. rasa/studio/link.py +200 -0
  125. rasa/studio/pull.py +94 -0
  126. rasa/studio/push.py +131 -0
  127. rasa/studio/upload.py +117 -67
  128. rasa/telemetry.py +82 -25
  129. rasa/tracing/config.py +3 -4
  130. rasa/tracing/constants.py +19 -1
  131. rasa/tracing/instrumentation/attribute_extractors.py +10 -2
  132. rasa/tracing/instrumentation/instrumentation.py +53 -2
  133. rasa/tracing/instrumentation/metrics.py +98 -15
  134. rasa/tracing/metric_instrument_provider.py +75 -3
  135. rasa/utils/common.py +1 -27
  136. rasa/utils/log_utils.py +1 -45
  137. rasa/validator.py +2 -8
  138. rasa/version.py +1 -1
  139. {rasa_pro-3.13.0.dev7.dist-info → rasa_pro-3.13.0.dev8.dist-info}/METADATA +5 -6
  140. {rasa_pro-3.13.0.dev7.dist-info → rasa_pro-3.13.0.dev8.dist-info}/RECORD +143 -129
  141. rasa/anonymization/__init__.py +0 -2
  142. rasa/anonymization/anonymisation_rule_yaml_reader.py +0 -91
  143. rasa/anonymization/anonymization_pipeline.py +0 -286
  144. rasa/anonymization/anonymization_rule_executor.py +0 -266
  145. rasa/anonymization/anonymization_rule_orchestrator.py +0 -119
  146. rasa/anonymization/schemas/config.yml +0 -47
  147. rasa/anonymization/utils.py +0 -118
  148. {rasa_pro-3.13.0.dev7.dist-info → rasa_pro-3.13.0.dev8.dist-info}/NOTICE +0 -0
  149. {rasa_pro-3.13.0.dev7.dist-info → rasa_pro-3.13.0.dev8.dist-info}/WHEEL +0 -0
  150. {rasa_pro-3.13.0.dev7.dist-info → rasa_pro-3.13.0.dev8.dist-info}/entry_points.txt +0 -0
rasa/studio/push.py ADDED
@@ -0,0 +1,131 @@
1
+ from __future__ import annotations
2
+
3
+ import argparse
4
+ from pathlib import Path
5
+ from typing import Dict, Text
6
+
7
+ import structlog
8
+
9
+ import rasa.shared.utils.cli
10
+ from rasa.shared.core.flows.yaml_flows_io import YamlFlowsWriter
11
+ from rasa.shared.importers.importer import TrainingDataImporter
12
+ from rasa.shared.nlu.training_data.formats.rasa_yaml import RasaYAMLWriter
13
+ from rasa.shared.utils.io import read_file
14
+ from rasa.shared.utils.yaml import dump_obj_as_yaml_to_string
15
+ from rasa.studio.config import StudioConfig
16
+ from rasa.studio.link import get_studio_config, read_assistant_name
17
+ from rasa.studio.upload import (
18
+ build_import_request,
19
+ make_request,
20
+ run_validation,
21
+ )
22
+
23
+ structlogger = structlog.get_logger(__name__)
24
+
25
+
26
+ def _send_to_studio(
27
+ assistant_name: Text,
28
+ payload_parts: Dict[Text, Text],
29
+ studio_cfg: StudioConfig,
30
+ ) -> None:
31
+ """Build the GraphQL request and send it.
32
+
33
+ Args:
34
+ assistant_name: The name of the assistant.
35
+ payload_parts: The parts of the payload to send.
36
+ studio_cfg: The StudioConfig object.
37
+ """
38
+ graphql_req = build_import_request(
39
+ assistant_name=assistant_name,
40
+ flows_yaml=payload_parts.get("flows"),
41
+ domain_yaml=payload_parts.get("domain"),
42
+ config_yaml=payload_parts.get("config"),
43
+ endpoints=payload_parts.get("endpoints"),
44
+ nlu_yaml=payload_parts.get("nlu"),
45
+ )
46
+ verify = not studio_cfg.disable_verify
47
+ result = make_request(studio_cfg.studio_url, graphql_req, verify)
48
+ if not result.was_successful:
49
+ rasa.shared.utils.cli.print_error_and_exit(result.message)
50
+
51
+ rasa.shared.utils.cli.print_success(f"Pushed data to assistant '{assistant_name}'.")
52
+
53
+
54
+ def handle_push(args: argparse.Namespace) -> None:
55
+ """Push the entire assistant.
56
+
57
+ Args:
58
+ args: The command line arguments.
59
+ """
60
+ studio_cfg = get_studio_config()
61
+
62
+ run_validation(args)
63
+
64
+ importer = TrainingDataImporter.load_from_dict(
65
+ domain_path=args.domain,
66
+ config_path=args.config,
67
+ expand_env_vars=False,
68
+ )
69
+
70
+ domain_yaml = dump_obj_as_yaml_to_string(importer.get_user_domain().as_dict())
71
+ config_yaml = read_file(Path(args.config))
72
+ endpoints_yaml = read_file(Path(args.endpoints))
73
+
74
+ flow_importer = TrainingDataImporter.load_from_dict(
75
+ training_data_paths=args.data, expand_env_vars=False
76
+ )
77
+ flows_yaml = YamlFlowsWriter().dumps(flow_importer.get_user_flows())
78
+
79
+ nlu_importer = TrainingDataImporter.load_from_dict(
80
+ training_data_paths=args.data, expand_env_vars=False
81
+ )
82
+ nlu_yaml = RasaYAMLWriter().dumps(nlu_importer.get_nlu_data())
83
+
84
+ assistant_name = read_assistant_name()
85
+ _send_to_studio(
86
+ assistant_name,
87
+ {
88
+ "flows": flows_yaml,
89
+ "domain": domain_yaml,
90
+ "config": config_yaml,
91
+ "endpoints": endpoints_yaml,
92
+ "nlu": nlu_yaml,
93
+ },
94
+ studio_cfg,
95
+ )
96
+
97
+
98
+ def handle_push_config(args: argparse.Namespace) -> None:
99
+ """Push only the assistant configuration (config.yml).
100
+
101
+ Args:
102
+ args: The command line arguments.
103
+ """
104
+ studio_cfg = get_studio_config()
105
+ assistant_name = read_assistant_name()
106
+
107
+ config_yaml = read_file(Path(args.config))
108
+ if not config_yaml:
109
+ rasa.shared.utils.cli.print_error_and_exit(
110
+ "No configuration data was found in the assistant."
111
+ )
112
+
113
+ _send_to_studio(assistant_name, {"config": config_yaml}, studio_cfg)
114
+
115
+
116
+ def handle_push_endpoints(args: argparse.Namespace) -> None:
117
+ """Push only the endpoints configuration (endpoints.yml).
118
+
119
+ Args:
120
+ args: The command line arguments.
121
+ """
122
+ studio_cfg = get_studio_config()
123
+ assistant_name = read_assistant_name()
124
+
125
+ endpoints_yaml = read_file(Path(args.endpoints))
126
+ if not endpoints_yaml:
127
+ rasa.shared.utils.cli.print_error_and_exit(
128
+ "No endpoints data was found in the assistant."
129
+ )
130
+
131
+ _send_to_studio(assistant_name, {"endpoints": endpoints_yaml}, studio_cfg)
rasa/studio/upload.py CHANGED
@@ -2,11 +2,12 @@ import argparse
2
2
  import base64
3
3
  import re
4
4
  import sys
5
- from typing import Any, Dict, Iterable, List, Set, Text, Tuple, Union
5
+ from typing import Any, Dict, Iterable, List, Optional, Set, Text, Tuple, Union
6
6
 
7
7
  import questionary
8
8
  import requests
9
9
  import structlog
10
+ from pydantic import BaseModel, Field
10
11
 
11
12
  import rasa.cli.telemetry
12
13
  import rasa.cli.utils
@@ -32,6 +33,7 @@ from rasa.shared.nlu.training_data.formats.rasa_yaml import (
32
33
  )
33
34
  from rasa.shared.utils.yaml import (
34
35
  dump_obj_as_yaml_to_string,
36
+ read_yaml,
35
37
  read_yaml_file,
36
38
  )
37
39
  from rasa.studio import results_logger
@@ -64,6 +66,16 @@ DOMAIN_KEYS = [
64
66
  ]
65
67
 
66
68
 
69
+ class CALMImportParts(BaseModel):
70
+ """All pieces that will be uploaded to Rasa Studio."""
71
+
72
+ flows: Dict[str, Any]
73
+ domain: Dict[str, Any]
74
+ config: Dict[str, Any]
75
+ endpoints: Dict[str, Any]
76
+ nlu: Dict[str, Any] = Field(default_factory=dict)
77
+
78
+
67
79
  def _get_selected_entities_and_intents(
68
80
  args: argparse.Namespace,
69
81
  intents_from_files: Set[Text],
@@ -208,79 +220,93 @@ def _get_assistant_name(config: Dict[Text, Any]) -> str:
208
220
  return assistant_name
209
221
 
210
222
 
211
- @with_studio_error_handler
212
- def upload_calm_assistant(
213
- args: argparse.Namespace, endpoint: str, verify: bool = True
214
- ) -> StudioResult:
215
- """Validates and uploads the CALM assistant data to Rasa Studio.
223
+ def build_calm_import_parts(
224
+ data_path: Union[Text, List[Text]],
225
+ domain_path: Text,
226
+ config_path: Text,
227
+ endpoints_path: Optional[Text] = None,
228
+ assistant_name: Optional[Text] = None,
229
+ ) -> Tuple[str, CALMImportParts]:
230
+ """Builds the parts of the assistant to be uploaded to Studio.
216
231
 
217
232
  Args:
218
- args: The command line arguments
219
- - data: The path to the training data
220
- - domain: The path to the domain
221
- - flows: The path to the flows
222
- - endpoints: The path to the endpoints
223
- - config: The path to the config
224
- endpoint: The studio endpoint
225
- verify: Whether to verify SSL
233
+ data_path: The path to the training data
234
+ domain_path: The path to the domain
235
+ config_path: The path to the config
236
+ endpoints_path: The path to the endpoints
237
+ assistant_name: The name of the assistant
238
+
226
239
  Returns:
227
- None
240
+ The assistant name and the parts to be uploaded
228
241
  """
229
- run_validation(args)
230
-
231
- structlogger.info(
232
- "rasa.studio.upload.loading_data", event_info="Parsing CALM assistant data..."
233
- )
234
-
235
242
  importer = TrainingDataImporter.load_from_dict(
236
- domain_path=args.domain,
237
- config_path=args.config,
243
+ domain_path=domain_path,
244
+ config_path=config_path,
238
245
  expand_env_vars=False,
239
246
  )
240
247
 
241
- # Prepare config and domain
242
- config = importer.get_config()
243
- assistant_name = _get_assistant_name(config)
248
+ config = read_yaml_file(config_path, expand_env_vars=False)
249
+ endpoints = read_yaml_file(endpoints_path, expand_env_vars=False)
250
+ assistant_name = assistant_name or _get_assistant_name(config)
244
251
 
245
- config_from_files = read_yaml_file(args.config, expand_env_vars=False)
246
252
  domain_from_files = importer.get_user_domain().as_dict()
247
-
248
- # Extract domain and config values
249
253
  domain = extract_values(domain_from_files, DOMAIN_KEYS)
250
254
 
251
- # Prepare flows
252
255
  flow_importer = FlowSyncImporter.load_from_dict(
253
- training_data_paths=args.data, expand_env_vars=False
256
+ training_data_paths=data_path, expand_env_vars=False
254
257
  )
258
+
255
259
  flows = list(flow_importer.get_user_flows())
260
+ flows_yaml = YamlFlowsWriter().dumps(flows)
261
+ flows = read_yaml(flows_yaml, expand_env_vars=False)
256
262
 
257
- # We instantiate the TrainingDataImporter again on purpose to avoid
258
- # adding patterns to domain's actions. More info https://t.ly/W8uuc
259
263
  nlu_importer = TrainingDataImporter.load_from_dict(
260
- training_data_paths=args.data, expand_env_vars=False
264
+ training_data_paths=data_path, expand_env_vars=False
261
265
  )
262
266
  nlu_data = nlu_importer.get_nlu_data()
263
267
  nlu_examples = nlu_data.filter_training_examples(
264
268
  lambda ex: ex.get("intent") in nlu_data.intents
265
269
  )
266
270
  nlu_examples_yaml = RasaYAMLWriter().dumps(nlu_examples)
271
+ nlu = read_yaml(nlu_examples_yaml, expand_env_vars=False)
272
+
273
+ parts = CALMImportParts(
274
+ flows=flows,
275
+ domain=domain,
276
+ config=config,
277
+ endpoints=endpoints,
278
+ nlu=nlu,
279
+ )
280
+
281
+ return assistant_name, parts
282
+
283
+
284
+ @with_studio_error_handler
285
+ def upload_calm_assistant(
286
+ args: argparse.Namespace, endpoint: str, verify: bool = True
287
+ ) -> StudioResult:
288
+ def yaml_or_empty(part: Dict[Text, Any]) -> Optional[str]:
289
+ return dump_obj_as_yaml_to_string(part) if part else None
267
290
 
268
- # Prepare endpoints
269
- endpoints_from_files = read_yaml_file(args.endpoints, expand_env_vars=False)
270
- endpoints_str = dump_obj_as_yaml_to_string(
271
- endpoints_from_files, transform=remove_quotes
291
+ run_validation(args)
292
+ structlogger.info(
293
+ "rasa.studio.upload.loading_data", event_info="Parsing CALM assistant data..."
294
+ )
295
+ assistant_name, parts = build_calm_import_parts(
296
+ data_path=args.data,
297
+ domain_path=args.domain,
298
+ config_path=args.config,
299
+ endpoints_path=args.endpoints,
272
300
  )
273
301
 
274
- # Build GraphQL request
275
302
  graphql_req = build_import_request(
276
303
  assistant_name,
277
- flows_yaml=YamlFlowsWriter().dumps(flows),
278
- domain_yaml=dump_obj_as_yaml_to_string(domain),
279
- config_yaml=dump_obj_as_yaml_to_string(config_from_files),
280
- endpoints=endpoints_str,
281
- nlu_yaml=nlu_examples_yaml,
304
+ flows_yaml=yaml_or_empty(parts.flows),
305
+ domain_yaml=yaml_or_empty(parts.domain),
306
+ config_yaml=yaml_or_empty(parts.config),
307
+ endpoints=yaml_or_empty(parts.endpoints),
308
+ nlu_yaml=yaml_or_empty(parts.nlu),
282
309
  )
283
-
284
310
  structlogger.info(
285
311
  "rasa.studio.upload.calm", event_info="Uploading to Rasa Studio..."
286
312
  )
@@ -393,7 +419,6 @@ def make_request(endpoint: str, graphql_req: Dict, verify: bool = True) -> Studi
393
419
  },
394
420
  verify=verify,
395
421
  )
396
-
397
422
  if results_logger.response_has_errors(res.json()):
398
423
  track_upload_to_studio_failed(res.json())
399
424
  return StudioResult.error(res.json())
@@ -421,39 +446,64 @@ def _add_missing_entities(
421
446
 
422
447
  def build_import_request(
423
448
  assistant_name: str,
424
- flows_yaml: str,
425
- domain_yaml: str,
426
- config_yaml: str,
427
- endpoints: str,
428
- nlu_yaml: str = "",
449
+ flows_yaml: Optional[str] = None,
450
+ domain_yaml: Optional[str] = None,
451
+ config_yaml: Optional[str] = None,
452
+ endpoints: Optional[str] = None,
453
+ nlu_yaml: Optional[str] = None,
429
454
  ) -> Dict:
430
- # b64encode expects bytes and returns bytes so we need to decode to string
431
- base64_domain = base64.b64encode(domain_yaml.encode("utf-8")).decode("utf-8")
432
- base64_flows = base64.b64encode(flows_yaml.encode("utf-8")).decode("utf-8")
433
- base64_config = base64.b64encode(config_yaml.encode("utf-8")).decode("utf-8")
434
- base64_nlu = base64.b64encode(nlu_yaml.encode("utf-8")).decode("utf-8")
435
- base64_endpoints = base64.b64encode(endpoints.encode("utf-8")).decode("utf-8")
455
+ """Builds the GraphQL request for uploading a modern assistant.
456
+
457
+ Args:
458
+ assistant_name: The name of the assistant
459
+ flows_yaml: The YAML representation of the flows
460
+ domain_yaml: The YAML representation of the domain
461
+ config_yaml: The YAML representation of the config
462
+ endpoints: The YAML representation of the endpoints
463
+ nlu_yaml: The YAML representation of the NLU data
464
+
465
+ Returns:
466
+ A dictionary representing the GraphQL request for uploading the assistant.
467
+ """
468
+ inputs_map = {
469
+ "domain": domain_yaml,
470
+ "flows": flows_yaml,
471
+ "config": config_yaml,
472
+ "endpoints": endpoints,
473
+ "nlu": nlu_yaml,
474
+ }
475
+
476
+ payload = {
477
+ field: convert_string_to_base64(value)
478
+ for field, value in inputs_map.items()
479
+ if value is not None
480
+ }
481
+
482
+ variables_input = {"assistantName": assistant_name, **payload}
436
483
 
437
484
  graphql_req = {
438
485
  "query": (
439
486
  "mutation UploadModernAssistant($input: UploadModernAssistantInput!)"
440
487
  "{\n uploadModernAssistant(input: $input)\n}"
441
488
  ),
442
- "variables": {
443
- "input": {
444
- "assistantName": assistant_name,
445
- "domain": base64_domain,
446
- "flows": base64_flows,
447
- "nlu": base64_nlu,
448
- "config": base64_config,
449
- "endpoints": base64_endpoints,
450
- }
451
- },
489
+ "variables": {"input": variables_input},
452
490
  }
453
491
 
454
492
  return graphql_req
455
493
 
456
494
 
495
+ def convert_string_to_base64(string: str) -> str:
496
+ """Converts a string to base64.
497
+
498
+ Args:
499
+ string: The string to convert
500
+
501
+ Returns:
502
+ The base64 encoded string
503
+ """
504
+ return base64.b64encode(string.encode("utf-8")).decode("utf-8")
505
+
506
+
457
507
  def build_request(
458
508
  assistant_name: str, nlu_examples_yaml: str, domain_yaml: str
459
509
  ) -> Dict:
rasa/telemetry.py CHANGED
@@ -23,7 +23,6 @@ import structlog
23
23
  from terminaltables import SingleTable
24
24
 
25
25
  import rasa
26
- import rasa.anonymization.utils
27
26
  import rasa.shared.utils.io
28
27
  import rasa.utils.io
29
28
  from rasa import model
@@ -34,6 +33,7 @@ from rasa.constants import (
34
33
  CONFIG_TELEMETRY_ID,
35
34
  )
36
35
  from rasa.engine.storage.local_model_storage import LocalModelStorage
36
+ from rasa.privacy.privacy_config import AnonymizationType, PrivacyConfig
37
37
  from rasa.shared.constants import (
38
38
  ASSISTANT_ID_KEY,
39
39
  CONFIG_LANGUAGE_KEY,
@@ -62,10 +62,10 @@ from rasa.utils import common as rasa_utils
62
62
 
63
63
  if typing.TYPE_CHECKING:
64
64
  from rasa.core.agent import Agent
65
+ from rasa.core.available_endpoints import AvailableEndpoints
65
66
  from rasa.core.brokers.broker import EventBroker
66
67
  from rasa.core.channels.channel import InputChannel
67
68
  from rasa.core.tracker_stores.tracker_store import TrackerStore
68
- from rasa.core.utils import AvailableEndpoints
69
69
  from rasa.e2e_test.e2e_test_case import Fixture, Metadata, TestCase
70
70
  from rasa.shared.importers.importer import TrainingDataImporter
71
71
  from rasa.shared.nlu.training_data.training_data import TrainingData
@@ -149,6 +149,7 @@ TELEMETRY_ENTERPRISE_SEARCH_POLICY_TRAINING_COMPLETED_EVENT = (
149
149
  TELEMETRY_ENTERPRISE_SEARCH_POLICY_PREDICT_EVENT = "Enterprise Search Policy Predicted"
150
150
  TELEMETRY_VALIDATION_ERROR_LOG_EVENT = "Validation Error Logged"
151
151
  TELEMETRY_UPLOAD_TO_STUDIO_FAILED_EVENT = "Upload to Studio Failed"
152
+ TELEMETRY_PRIVACY_ENABLED_EVENT = "PII Management in CALM Enabled"
152
153
 
153
154
  # licensing events
154
155
  TELEMETRY_CONVERSATION_COUNT = "Conversation Count"
@@ -1129,6 +1130,7 @@ def _get_llm_command_generator_config(config: Dict[str, Any]) -> Optional[Dict]:
1129
1130
  LLMBasedCommandGenerator,
1130
1131
  LLMCommandGenerator,
1131
1132
  MultiStepLLMCommandGenerator,
1133
+ SearchReadyLLMCommandGenerator,
1132
1134
  SingleStepLLMCommandGenerator,
1133
1135
  )
1134
1136
  from rasa.dialogue_understanding.generator.constants import (
@@ -1156,6 +1158,7 @@ def _get_llm_command_generator_config(config: Dict[str, Any]) -> Optional[Dict]:
1156
1158
  SingleStepLLMCommandGenerator.__name__,
1157
1159
  MultiStepLLMCommandGenerator.__name__,
1158
1160
  CompactLLMCommandGenerator.__name__,
1161
+ SearchReadyLLMCommandGenerator.__name__,
1159
1162
  ]:
1160
1163
  return component
1161
1164
  return None
@@ -1167,6 +1170,7 @@ def _get_llm_command_generator_config(config: Dict[str, Any]) -> Optional[Dict]:
1167
1170
  SingleStepLLMCommandGenerator.__name__: SingleStepLLMCommandGenerator,
1168
1171
  MultiStepLLMCommandGenerator.__name__: MultiStepLLMCommandGenerator,
1169
1172
  CompactLLMCommandGenerator.__name__: CompactLLMCommandGenerator,
1173
+ SearchReadyLLMCommandGenerator.__name__: SearchReadyLLMCommandGenerator,
1170
1174
  }
1171
1175
  llm_config = component.get(LLM_CONFIG_KEY, {})
1172
1176
  # Config at this stage is not yet resolved, so read from `model_group`
@@ -1353,7 +1357,7 @@ def track_server_start(
1353
1357
  number_of_workers: number of used Sanic workers
1354
1358
  is_api_enabled: whether the rasa API server is enabled
1355
1359
  """
1356
- from rasa.core.utils import AvailableEndpoints
1360
+ from rasa.core.available_endpoints import AvailableEndpoints
1357
1361
 
1358
1362
  def project_fingerprint_and_assistant_id_from_model(
1359
1363
  _model_directory: Optional[Text],
@@ -1745,7 +1749,6 @@ def identify_endpoint_config_traits(
1745
1749
 
1746
1750
  traits = append_tracing_trait(traits, endpoints_file)
1747
1751
  traits = append_metrics_trait(traits, endpoints_file)
1748
- traits = append_anonymization_trait(traits, endpoints_file)
1749
1752
 
1750
1753
  _identify(traits, context)
1751
1754
 
@@ -1784,27 +1787,6 @@ def append_metrics_trait(
1784
1787
  return traits
1785
1788
 
1786
1789
 
1787
- def append_anonymization_trait(
1788
- traits: Dict[str, Any], endpoints_file: Optional[str]
1789
- ) -> Dict[str, Any]:
1790
- """Append the anonymization trait to the traits dictionary."""
1791
- from rasa.anonymization.anonymisation_rule_yaml_reader import (
1792
- KEY_ANONYMIZATION_RULES,
1793
- )
1794
-
1795
- anonymization_config = rasa.anonymization.utils.read_endpoint_config(
1796
- endpoints_file, KEY_ANONYMIZATION_RULES
1797
- )
1798
-
1799
- traits[KEY_ANONYMIZATION_RULES] = (
1800
- rasa.anonymization.utils.extract_anonymization_traits(
1801
- anonymization_config, KEY_ANONYMIZATION_RULES
1802
- )
1803
- )
1804
-
1805
- return traits
1806
-
1807
-
1808
1790
  @ensure_telemetry_enabled
1809
1791
  def track_enterprise_search_policy_train_started() -> None:
1810
1792
  """Track when a user starts training Enterprise Search policy."""
@@ -1821,6 +1803,7 @@ def track_enterprise_search_policy_train_completed(
1821
1803
  llm_model: Optional[str],
1822
1804
  llm_model_group_id: Optional[str],
1823
1805
  citation_enabled: Optional[bool],
1806
+ relevancy_check_enabled: Optional[bool],
1824
1807
  ) -> None:
1825
1808
  """Track when a user completes training Enterprise Search policy."""
1826
1809
  _track(
@@ -1834,6 +1817,7 @@ def track_enterprise_search_policy_train_completed(
1834
1817
  "llm_model": llm_model,
1835
1818
  "llm_model_group_id": llm_model_group_id,
1836
1819
  "citation_enabled": citation_enabled,
1820
+ "relevancy_check_enabled": relevancy_check_enabled,
1837
1821
  },
1838
1822
  )
1839
1823
 
@@ -1848,6 +1832,7 @@ def track_enterprise_search_policy_predict(
1848
1832
  llm_model: Optional[str],
1849
1833
  llm_model_group_id: Optional[str],
1850
1834
  citation_enabled: Optional[bool],
1835
+ relevancy_check_enabled: Optional[bool],
1851
1836
  ) -> None:
1852
1837
  """Track when a user predicts the next action using Enterprise Search policy."""
1853
1838
  _track(
@@ -1861,6 +1846,7 @@ def track_enterprise_search_policy_predict(
1861
1846
  "llm_model": llm_model,
1862
1847
  "llm_model_group_id": llm_model_group_id,
1863
1848
  "citation_enabled": citation_enabled,
1849
+ "relevancy_check_enabled": relevancy_check_enabled,
1864
1850
  },
1865
1851
  )
1866
1852
 
@@ -1946,3 +1932,74 @@ def track_upload_to_studio_failed(response_json: Dict[str, Any]) -> None:
1946
1932
  TELEMETRY_UPLOAD_TO_STUDIO_FAILED_EVENT,
1947
1933
  {"studio_response_json": response_json},
1948
1934
  )
1935
+
1936
+
1937
+ def _extract_privacy_enabled_event_properties(
1938
+ privacy_config: "PrivacyConfig",
1939
+ stream_pii: bool,
1940
+ ) -> Dict[str, Any]:
1941
+ """Extract properties when PII management is enabled."""
1942
+ number_of_total_rules = len(privacy_config.anonymization_rules)
1943
+ count_of_redact = sum(
1944
+ 1
1945
+ for rule in privacy_config.anonymization_rules.values()
1946
+ if rule.method_type == AnonymizationType.REDACT
1947
+ )
1948
+ count_of_mask = sum(
1949
+ 1
1950
+ for rule in privacy_config.anonymization_rules.values()
1951
+ if rule.method_type == AnonymizationType.MASK
1952
+ )
1953
+
1954
+ tracker_store_anonymization_enabled = (
1955
+ privacy_config.tracker_store_settings is not None
1956
+ and privacy_config.tracker_store_settings.anonymization_policy is not None
1957
+ )
1958
+ anonymization_cron_trigger = (
1959
+ privacy_config.tracker_store_settings.anonymization_policy.cron # type: ignore[union-attr]
1960
+ if tracker_store_anonymization_enabled
1961
+ else None
1962
+ )
1963
+
1964
+ tracker_store_deletion_enabled = (
1965
+ privacy_config.tracker_store_settings is not None
1966
+ and privacy_config.tracker_store_settings.deletion_policy is not None
1967
+ )
1968
+
1969
+ deletion_cron_trigger = (
1970
+ privacy_config.tracker_store_settings.deletion_policy.cron # type: ignore[union-attr]
1971
+ if tracker_store_deletion_enabled
1972
+ else None
1973
+ )
1974
+
1975
+ return {
1976
+ "num_total_rules": number_of_total_rules,
1977
+ "redact_count": count_of_redact,
1978
+ "mask_count": count_of_mask,
1979
+ "stream_pii": stream_pii,
1980
+ "tracker_store_anonymization_enabled": tracker_store_anonymization_enabled,
1981
+ "tracker_store_deletion_enabled": tracker_store_deletion_enabled,
1982
+ "anonymization_cron_trigger": str(anonymization_cron_trigger),
1983
+ "deletion_cron_trigger": str(deletion_cron_trigger),
1984
+ }
1985
+
1986
+
1987
+ def _extract_stream_pii(event_broker: Optional["EventBroker"]) -> bool:
1988
+ """Extract whether un-anonymized PII streaming is enabled for the event broker."""
1989
+ return (
1990
+ event_broker.stream_pii
1991
+ if event_broker is not None and hasattr(event_broker, "stream_pii")
1992
+ else False
1993
+ )
1994
+
1995
+
1996
+ @ensure_telemetry_enabled
1997
+ def track_privacy_enabled(
1998
+ privacy_config: "PrivacyConfig", event_broker: Optional["EventBroker"]
1999
+ ) -> None:
2000
+ """Track when PII management capability is enabled"""
2001
+ stream_pii = _extract_stream_pii(event_broker)
2002
+ privacy_properties = _extract_privacy_enabled_event_properties(
2003
+ privacy_config, stream_pii
2004
+ )
2005
+ _track(TELEMETRY_PRIVACY_ENABLED_EVENT, privacy_properties)
rasa/tracing/config.py CHANGED
@@ -24,14 +24,12 @@ from rasa.core.actions.grpc_custom_action_executor import GRPCCustomActionExecut
24
24
  from rasa.core.agent import Agent
25
25
  from rasa.core.processor import MessageProcessor
26
26
  from rasa.core.tracker_stores.tracker_store import TrackerStore
27
- from rasa.dialogue_understanding.commands import (
28
- Command,
29
- FreeFormAnswerCommand,
30
- )
27
+ from rasa.dialogue_understanding.commands import Command, FreeFormAnswerCommand
31
28
  from rasa.dialogue_understanding.generator import (
32
29
  CompactLLMCommandGenerator,
33
30
  LLMCommandGenerator,
34
31
  MultiStepLLMCommandGenerator,
32
+ SearchReadyLLMCommandGenerator,
35
33
  SingleStepLLMCommandGenerator,
36
34
  )
37
35
  from rasa.dialogue_understanding.generator.flow_retrieval import FlowRetrieval
@@ -112,6 +110,7 @@ def configure_tracing(tracer_provider: Optional[TracerProvider]) -> None:
112
110
  grpc_custom_action_executor_class=GRPCCustomActionExecutor,
113
111
  single_step_llm_command_generator_class=SingleStepLLMCommandGenerator,
114
112
  compact_llm_command_generator_class=CompactLLMCommandGenerator,
113
+ search_ready_llm_command_generator_class=SearchReadyLLMCommandGenerator,
115
114
  multi_step_llm_command_generator_class=MultiStepLLMCommandGenerator,
116
115
  custom_action_executor_subclasses=custom_action_executor_subclasses,
117
116
  flow_retrieval_class=FlowRetrieval,
rasa/tracing/constants.py CHANGED
@@ -44,6 +44,18 @@ COMPACT_LLM_COMMAND_GENERATOR_PROMPT_TOKEN_USAGE_METRIC_NAME = (
44
44
  COMPACT_LLM_COMMAND_GENERATOR_LLM_RESPONSE_DURATION_METRIC_NAME = (
45
45
  "compact_llm_command_generator_llm_response_duration"
46
46
  )
47
+ SEARCH_READY_LLM_COMMAND_GENERATOR_CPU_USAGE_METRIC_NAME = (
48
+ "search_ready_llm_command_generator_cpu_usage"
49
+ )
50
+ SEARCH_READY_LLM_COMMAND_GENERATOR_MEMORY_USAGE_METRIC_NAME = (
51
+ "search_ready_llm_command_generator_memory_usage"
52
+ )
53
+ SEARCH_READY_LLM_COMMAND_GENERATOR_PROMPT_TOKEN_USAGE_METRIC_NAME = (
54
+ "search_ready_llm_command_generator_prompt_token_usage"
55
+ )
56
+ SEARCH_READY_LLM_COMMAND_GENERATOR_LLM_RESPONSE_DURATION_METRIC_NAME = (
57
+ "search_ready_llm_command_generator_llm_response_duration"
58
+ )
47
59
  MULTI_STEP_LLM_COMMAND_GENERATOR_CPU_USAGE_METRIC_NAME = (
48
60
  "multi_step_llm_command_generator_cpu_usage"
49
61
  )
@@ -56,7 +68,13 @@ MULTI_STEP_LLM_COMMAND_GENERATOR_PROMPT_TOKEN_USAGE_METRIC_NAME = (
56
68
  MULTI_STEP_LLM_COMMAND_GENERATOR_LLM_RESPONSE_DURATION_METRIC_NAME = (
57
69
  "multi_step_llm_command_generator_llm_response_duration"
58
70
  )
59
-
71
+ ENTERPRISE_SEARCH_POLICY_CPU_USAGE_METRIC_NAME = "enterprise_search_policy_cpu_usage"
72
+ ENTERPRISE_SEARCH_POLICY_MEMORY_USAGE_METRIC_NAME = (
73
+ "enterprise_search_policy_memory_usage"
74
+ )
75
+ ENTERPRISE_SEARCH_POLICY_PROMPT_TOKEN_USAGE_METRIC_NAME = (
76
+ "enterprise_search_policy_prompt_token_usage"
77
+ )
60
78
  ENTERPRISE_SEARCH_POLICY_LLM_RESPONSE_DURATION_METRIC_NAME = (
61
79
  "enterprise_search_policy_llm_response_duration"
62
80
  )