rasa-pro 3.11.0rc1__py3-none-any.whl → 3.11.0rc2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of rasa-pro might be problematic. Click here for more details.

Files changed (52) hide show
  1. rasa/cli/inspect.py +2 -0
  2. rasa/cli/studio/studio.py +18 -8
  3. rasa/core/actions/action_repeat_bot_messages.py +17 -0
  4. rasa/core/channels/channel.py +17 -0
  5. rasa/core/channels/voice_ready/audiocodes.py +12 -0
  6. rasa/core/channels/voice_ready/jambonz.py +13 -2
  7. rasa/core/channels/voice_ready/twilio_voice.py +6 -21
  8. rasa/core/channels/voice_stream/voice_channel.py +13 -1
  9. rasa/core/nlg/contextual_response_rephraser.py +18 -10
  10. rasa/core/policies/enterprise_search_policy.py +27 -67
  11. rasa/core/policies/intentless_policy.py +25 -67
  12. rasa/dialogue_understanding/coexistence/llm_based_router.py +18 -33
  13. rasa/dialogue_understanding/generator/constants.py +0 -2
  14. rasa/dialogue_understanding/generator/flow_retrieval.py +33 -50
  15. rasa/dialogue_understanding/generator/llm_based_command_generator.py +12 -40
  16. rasa/dialogue_understanding/generator/multi_step/multi_step_llm_command_generator.py +18 -20
  17. rasa/dialogue_understanding/generator/nlu_command_adapter.py +19 -1
  18. rasa/dialogue_understanding/generator/single_step/single_step_llm_command_generator.py +24 -21
  19. rasa/dialogue_understanding/processor/command_processor.py +21 -1
  20. rasa/e2e_test/e2e_test_case.py +85 -6
  21. rasa/engine/validation.py +57 -41
  22. rasa/model_service.py +3 -0
  23. rasa/nlu/tokenizers/whitespace_tokenizer.py +3 -14
  24. rasa/server.py +3 -1
  25. rasa/shared/core/flows/flows_list.py +5 -1
  26. rasa/shared/providers/embedding/_base_litellm_embedding_client.py +6 -14
  27. rasa/shared/providers/llm/_base_litellm_client.py +6 -1
  28. rasa/shared/utils/health_check/__init__.py +0 -0
  29. rasa/shared/utils/health_check/embeddings_health_check_mixin.py +31 -0
  30. rasa/shared/utils/health_check/health_check.py +256 -0
  31. rasa/shared/utils/health_check/llm_health_check_mixin.py +31 -0
  32. rasa/shared/utils/llm.py +5 -2
  33. rasa/shared/utils/yaml.py +102 -62
  34. rasa/studio/auth.py +3 -5
  35. rasa/studio/config.py +13 -4
  36. rasa/studio/constants.py +1 -0
  37. rasa/studio/data_handler.py +10 -3
  38. rasa/studio/upload.py +21 -10
  39. rasa/telemetry.py +12 -0
  40. rasa/tracing/config.py +2 -0
  41. rasa/tracing/instrumentation/attribute_extractors.py +20 -0
  42. rasa/tracing/instrumentation/instrumentation.py +121 -0
  43. rasa/utils/common.py +5 -0
  44. rasa/utils/io.py +8 -16
  45. rasa/utils/sanic_error_handler.py +32 -0
  46. rasa/version.py +1 -1
  47. {rasa_pro-3.11.0rc1.dist-info → rasa_pro-3.11.0rc2.dist-info}/METADATA +3 -2
  48. {rasa_pro-3.11.0rc1.dist-info → rasa_pro-3.11.0rc2.dist-info}/RECORD +51 -47
  49. rasa/shared/utils/health_check.py +0 -533
  50. {rasa_pro-3.11.0rc1.dist-info → rasa_pro-3.11.0rc2.dist-info}/NOTICE +0 -0
  51. {rasa_pro-3.11.0rc1.dist-info → rasa_pro-3.11.0rc2.dist-info}/WHEEL +0 -0
  52. {rasa_pro-3.11.0rc1.dist-info → rasa_pro-3.11.0rc2.dist-info}/entry_points.txt +0 -0
rasa/studio/constants.py CHANGED
@@ -10,6 +10,7 @@ RASA_STUDIO_AUTH_SERVER_URL_ENV = "RASA_STUDIO_AUTH_SERVER_URL"
10
10
  RASA_STUDIO_CLI_STUDIO_URL_ENV = "RASA_STUDIO_CLI_STUDIO_URL"
11
11
  RASA_STUDIO_CLI_REALM_NAME_KEY_ENV = "RASA_STUDIO_CLI_REALM_NAME_KEY"
12
12
  RASA_STUDIO_CLI_CLIENT_ID_KEY_ENV = "RASA_STUDIO_CLI_CLIENT_ID_KEY"
13
+ RASA_STUDIO_CLI_DISABLE_VERIFY_KEY_ENV = "RASA_STUDIO_CLI_DISABLE_VERIFY_KEY"
13
14
 
14
15
  STUDIO_NLU_FILENAME = "studio_nlu.yml"
15
16
  STUDIO_DOMAIN_FILENAME = "studio_domain.yml"
@@ -76,7 +76,9 @@ class StudioDataHandler:
76
76
 
77
77
  return request
78
78
 
79
- def _make_request(self, GQL_req: Dict[Any, Any]) -> Dict[Any, Any]:
79
+ def _make_request(
80
+ self, GQL_req: Dict[Any, Any], verify: bool = True
81
+ ) -> Dict[Any, Any]:
80
82
  token = KeycloakTokenReader().get_token()
81
83
  if token.is_expired():
82
84
  token = self.refresh_token(token)
@@ -93,6 +95,7 @@ class StudioDataHandler:
93
95
  "Authorization": f"{token.token_type} {token.access_token}",
94
96
  "Content-Type": "application/json",
95
97
  },
98
+ verify=verify,
96
99
  )
97
100
 
98
101
  if res.status_code != 200:
@@ -128,7 +131,9 @@ class StudioDataHandler:
128
131
  The data from Rasa Studio.
129
132
  """
130
133
  GQL_req = self._build_request()
131
- response = self._make_request(GQL_req)
134
+ verify = not self.studio_config.disable_verify
135
+
136
+ response = self._make_request(GQL_req, verify=verify)
132
137
  self._extract_data(response)
133
138
 
134
139
  def request_data(
@@ -145,7 +150,9 @@ class StudioDataHandler:
145
150
  The data from Rasa Studio.
146
151
  """
147
152
  GQL_req = self._build_request(intent_names, entity_names)
148
- response = self._make_request(GQL_req)
153
+ verify = not self.studio_config.disable_verify
154
+
155
+ response = self._make_request(GQL_req, verify=verify)
149
156
  self._extract_data(response)
150
157
 
151
158
  def get_config(self) -> Optional[str]:
rasa/studio/upload.py CHANGED
@@ -56,14 +56,17 @@ def _get_selected_entities_and_intents(
56
56
 
57
57
  def handle_upload(args: argparse.Namespace) -> None:
58
58
  """Uploads primitives to rasa studio."""
59
- endpoint = StudioConfig.read_config().studio_url
59
+ studio_config = StudioConfig.read_config()
60
+ endpoint = studio_config.studio_url
61
+ verify = not studio_config.disable_verify
62
+
60
63
  if not endpoint:
61
64
  rasa.shared.utils.cli.print_error_and_exit(
62
65
  "No GraphQL endpoint found in config. Please run `rasa studio config`."
63
66
  )
64
67
  return
65
68
 
66
- if not is_auth_working(endpoint):
69
+ if not is_auth_working(endpoint, verify):
67
70
  rasa.shared.utils.cli.print_error_and_exit(
68
71
  "Authentication is invalid or expired. Please run `rasa studio login`."
69
72
  )
@@ -81,9 +84,9 @@ def handle_upload(args: argparse.Namespace) -> None:
81
84
 
82
85
  # check safely if args.calm is set and not fail if not
83
86
  if hasattr(args, "calm") and args.calm:
84
- upload_calm_assistant(args, endpoint)
87
+ upload_calm_assistant(args, endpoint, verify=verify)
85
88
  else:
86
- upload_nlu_assistant(args, endpoint)
89
+ upload_nlu_assistant(args, endpoint, verify=verify)
87
90
 
88
91
 
89
92
  config_keys = [
@@ -135,7 +138,9 @@ def _get_assistant_name(config: Dict[Text, Any]) -> str:
135
138
 
136
139
 
137
140
  @with_studio_error_handler
138
- def upload_calm_assistant(args: argparse.Namespace, endpoint: str) -> StudioResult:
141
+ def upload_calm_assistant(
142
+ args: argparse.Namespace, endpoint: str, verify: bool = True
143
+ ) -> StudioResult:
139
144
  """Uploads the CALM assistant data to Rasa Studio.
140
145
 
141
146
  Args:
@@ -227,11 +232,13 @@ def upload_calm_assistant(args: argparse.Namespace, endpoint: str) -> StudioResu
227
232
  structlogger.info(
228
233
  "rasa.studio.upload.calm", event_info="Uploading to Rasa Studio..."
229
234
  )
230
- return make_request(endpoint, graphql_req)
235
+ return make_request(endpoint, graphql_req, verify)
231
236
 
232
237
 
233
238
  @with_studio_error_handler
234
- def upload_nlu_assistant(args: argparse.Namespace, endpoint: str) -> StudioResult:
239
+ def upload_nlu_assistant(
240
+ args: argparse.Namespace, endpoint: str, verify: bool = True
241
+ ) -> StudioResult:
235
242
  """Uploads the classic (dm1) assistant data to Rasa Studio.
236
243
 
237
244
  Args:
@@ -241,6 +248,7 @@ def upload_nlu_assistant(args: argparse.Namespace, endpoint: str) -> StudioResul
241
248
  - intents: The intents to upload
242
249
  - entities: The entities to upload
243
250
  endpoint: The studio endpoint
251
+ verify: Whether to verify SSL
244
252
  Returns:
245
253
  None
246
254
  """
@@ -286,10 +294,10 @@ def upload_nlu_assistant(args: argparse.Namespace, endpoint: str) -> StudioResul
286
294
  structlogger.info(
287
295
  "rasa.studio.upload.nlu", event_info="Uploading to Rasa Studio..."
288
296
  )
289
- return make_request(endpoint, graphql_req)
297
+ return make_request(endpoint, graphql_req, verify)
290
298
 
291
299
 
292
- def is_auth_working(endpoint: str) -> bool:
300
+ def is_auth_working(endpoint: str, verify: bool = True) -> bool:
293
301
  """Send a test request to Studio to check if auth is working."""
294
302
  result = make_request(
295
303
  endpoint,
@@ -306,16 +314,18 @@ def is_auth_working(endpoint: str) -> bool:
306
314
  ),
307
315
  "variables": {},
308
316
  },
317
+ verify,
309
318
  )
310
319
  return result.was_successful
311
320
 
312
321
 
313
- def make_request(endpoint: str, graphql_req: Dict) -> StudioResult:
322
+ def make_request(endpoint: str, graphql_req: Dict, verify: bool = True) -> StudioResult:
314
323
  """Makes a request to the studio endpoint to upload data.
315
324
 
316
325
  Args:
317
326
  endpoint: The studio endpoint
318
327
  graphql_req: The graphql request
328
+ verify: Whether to verify SSL
319
329
  """
320
330
  token = KeycloakTokenReader().get_token()
321
331
  res = requests.post(
@@ -325,6 +335,7 @@ def make_request(endpoint: str, graphql_req: Dict) -> StudioResult:
325
335
  "Authorization": f"{token.token_type} {token.access_token}",
326
336
  "Content-Type": "application/json",
327
337
  },
338
+ verify=verify,
328
339
  )
329
340
 
330
341
  if results_logger.response_has_errors(res.json()):
rasa/telemetry.py CHANGED
@@ -112,6 +112,7 @@ TELEMETRY_INTERACTIVE_LEARNING_STARTED_EVENT = "Interactive Learning Started"
112
112
  TELEMETRY_SERVER_STARTED_EVENT = "Server Started"
113
113
  TELEMETRY_PROJECT_CREATED_EVENT = "Project Created"
114
114
  TELEMETRY_SHELL_STARTED_EVENT = "Shell Started"
115
+ TELEMETRY_INSPECT_STARTED_EVENT = "Inspect Started"
115
116
  TELEMETRY_VISUALIZATION_STARTED_EVENT = "Story Visualization Started"
116
117
  TELEMETRY_TEST_CORE_EVENT = "Model Core Tested"
117
118
  TELEMETRY_TEST_NLU_EVENT = "Model NLU Tested"
@@ -1378,6 +1379,17 @@ def track_shell_started(model_type: Text) -> None:
1378
1379
  _track(TELEMETRY_SHELL_STARTED_EVENT, {"type": model_type})
1379
1380
 
1380
1381
 
1382
+ @ensure_telemetry_enabled
1383
+ def track_inspect_started(model_type: Text) -> None:
1384
+ """Track when a user starts a bot using rasa inspect.
1385
+
1386
+ Args:
1387
+ channel: Channel name `socketio` (used for chat assistants)
1388
+ or `browser_audio` (used for voice).
1389
+ """
1390
+ _track(TELEMETRY_INSPECT_STARTED_EVENT, {"type": model_type})
1391
+
1392
+
1381
1393
  @ensure_telemetry_enabled
1382
1394
  def track_visualization() -> None:
1383
1395
  """Track when a user runs the visualization."""
rasa/tracing/config.py CHANGED
@@ -33,6 +33,7 @@ from rasa.dialogue_understanding.generator import (
33
33
  SingleStepLLMCommandGenerator,
34
34
  MultiStepLLMCommandGenerator,
35
35
  )
36
+ from rasa.dialogue_understanding.generator.flow_retrieval import FlowRetrieval
36
37
  from rasa.dialogue_understanding.generator.nlu_command_adapter import NLUCommandAdapter
37
38
  from rasa.engine.graph import GraphNode
38
39
  from rasa.engine.training.graph_trainer import GraphTrainer
@@ -111,6 +112,7 @@ def configure_tracing(tracer_provider: Optional[TracerProvider]) -> None:
111
112
  single_step_llm_command_generator_class=SingleStepLLMCommandGenerator,
112
113
  multi_step_llm_command_generator_class=MultiStepLLMCommandGenerator,
113
114
  custom_action_executor_subclasses=custom_action_executor_subclasses,
115
+ flow_retrieval_class=FlowRetrieval,
114
116
  )
115
117
 
116
118
 
@@ -414,6 +414,26 @@ def extract_attrs_for_generate(
414
414
  }
415
415
 
416
416
 
417
+ def extract_attrs_for_performing_health_check(
418
+ custom_config: Optional[Dict[str, Any]],
419
+ default_config: Dict[str, Any],
420
+ log_source_method: str,
421
+ log_source_component: str,
422
+ ) -> Dict[str, Any]:
423
+ from rasa.shared.utils.health_check.health_check import is_api_health_check_enabled
424
+
425
+ attrs = {
426
+ "api_health_check_enabled": is_api_health_check_enabled(),
427
+ "health_check_trigger_component": log_source_component,
428
+ "health_check_trigger_method": log_source_method,
429
+ }
430
+ if is_api_health_check_enabled():
431
+ attrs["config"] = json.dumps(
432
+ combine_custom_and_default_config(custom_config, default_config)
433
+ )
434
+ return attrs
435
+
436
+
417
437
  def extract_attrs_for_execute_commands(
418
438
  tracker: DialogueStateTracker,
419
439
  all_flows: FlowsList,
@@ -45,6 +45,7 @@ from rasa.dialogue_understanding.generator import (
45
45
  MultiStepLLMCommandGenerator,
46
46
  SingleStepLLMCommandGenerator,
47
47
  )
48
+ from rasa.dialogue_understanding.generator.flow_retrieval import FlowRetrieval
48
49
  from rasa.dialogue_understanding.generator.nlu_command_adapter import NLUCommandAdapter
49
50
  from rasa.engine.graph import GraphNode
50
51
  from rasa.engine.training.graph_trainer import GraphTrainer
@@ -283,6 +284,7 @@ SingleStepLLMCommandGeneratorType = TypeVar(
283
284
  MultiStepLLMCommandGeneratorType = TypeVar(
284
285
  "MultiStepLLMCommandGeneratorType", bound=MultiStepLLMCommandGenerator
285
286
  )
287
+ FlowRetrievalType = TypeVar("FlowRetrievalType", bound=FlowRetrieval)
286
288
  CommandType = TypeVar("CommandType", bound=Command)
287
289
  PolicyType = TypeVar("PolicyType", bound=Policy)
288
290
  InformationRetrievalType = TypeVar(
@@ -317,6 +319,7 @@ def instrument(
317
319
  custom_action_executor_subclasses: Optional[
318
320
  List[Type[CustomActionExecutor]]
319
321
  ] = None,
322
+ flow_retrieval_class: Optional[Type[FlowRetrievalType]] = None,
320
323
  ) -> None:
321
324
  """Substitute methods to be traced by their traced counterparts.
322
325
 
@@ -445,6 +448,12 @@ def instrument(
445
448
  "_check_commands_against_startable_flows",
446
449
  attribute_extractors.extract_attrs_for_check_commands_against_startable_flows,
447
450
  )
451
+ _instrument_perform_health_check_method_for_component(
452
+ tracer_provider.get_tracer(llm_command_generator_class.__module__),
453
+ llm_command_generator_class,
454
+ "perform_llm_health_check",
455
+ attribute_extractors.extract_attrs_for_performing_health_check,
456
+ )
448
457
  mark_class_as_instrumented(llm_command_generator_class)
449
458
 
450
459
  if (
@@ -468,6 +477,14 @@ def instrument(
468
477
  "_check_commands_against_startable_flows",
469
478
  attribute_extractors.extract_attrs_for_check_commands_against_startable_flows,
470
479
  )
480
+ _instrument_perform_health_check_method_for_component(
481
+ tracer_provider.get_tracer(
482
+ single_step_llm_command_generator_class.__module__
483
+ ),
484
+ single_step_llm_command_generator_class,
485
+ "perform_llm_health_check",
486
+ attribute_extractors.extract_attrs_for_performing_health_check,
487
+ )
471
488
  mark_class_as_instrumented(single_step_llm_command_generator_class)
472
489
 
473
490
  if multi_step_llm_command_generator_class is not None and not class_is_instrumented(
@@ -488,8 +505,36 @@ def instrument(
488
505
  ),
489
506
  multi_step_llm_command_generator_class,
490
507
  )
508
+ _instrument_perform_health_check_method_for_component(
509
+ tracer_provider.get_tracer(
510
+ multi_step_llm_command_generator_class.__module__
511
+ ),
512
+ multi_step_llm_command_generator_class,
513
+ "perform_llm_health_check",
514
+ attribute_extractors.extract_attrs_for_performing_health_check,
515
+ )
491
516
  mark_class_as_instrumented(multi_step_llm_command_generator_class)
492
517
 
518
+ if (
519
+ any(
520
+ llm_based_command_generator_class is not None
521
+ for llm_based_command_generator_class in (
522
+ llm_command_generator_class,
523
+ single_step_llm_command_generator_class,
524
+ multi_step_llm_command_generator_class,
525
+ )
526
+ )
527
+ and flow_retrieval_class is not None
528
+ and not class_is_instrumented(flow_retrieval_class)
529
+ ):
530
+ _instrument_perform_health_check_method_for_component(
531
+ tracer_provider.get_tracer(flow_retrieval_class.__module__),
532
+ flow_retrieval_class,
533
+ "perform_embeddings_health_check",
534
+ attribute_extractors.extract_attrs_for_performing_health_check,
535
+ )
536
+ mark_class_as_instrumented(flow_retrieval_class)
537
+
493
538
  if command_subclasses:
494
539
  for command_subclass in command_subclasses:
495
540
  if command_subclass is not None and not class_is_instrumented(
@@ -524,6 +569,12 @@ def instrument(
524
569
  "generate",
525
570
  attribute_extractors.extract_attrs_for_generate,
526
571
  )
572
+ _instrument_perform_health_check_method_for_component(
573
+ tracer_provider.get_tracer(contextual_response_rephraser_class.__module__),
574
+ contextual_response_rephraser_class,
575
+ "perform_llm_health_check",
576
+ attribute_extractors.extract_attrs_for_performing_health_check,
577
+ )
527
578
  mark_class_as_instrumented(contextual_response_rephraser_class)
528
579
 
529
580
  if not module_is_instrumented(COMMAND_PROCESSOR_MODULE_NAME):
@@ -755,6 +806,18 @@ def _instrument_enterprise_search_policy(
755
806
  "_generate_llm_answer",
756
807
  attribute_extractors.extract_attrs_for_enterprise_search_generate_llm_answer,
757
808
  )
809
+ _instrument_perform_health_check_method_for_component(
810
+ tracer_provider.get_tracer(policy_class.__module__),
811
+ policy_class,
812
+ "perform_embeddings_health_check",
813
+ attribute_extractors.extract_attrs_for_performing_health_check,
814
+ )
815
+ _instrument_perform_health_check_method_for_component(
816
+ tracer_provider.get_tracer(policy_class.__module__),
817
+ policy_class,
818
+ "perform_llm_health_check",
819
+ attribute_extractors.extract_attrs_for_performing_health_check,
820
+ )
758
821
 
759
822
 
760
823
  def _instrument_intentless_policy(
@@ -787,6 +850,18 @@ def _instrument_intentless_policy(
787
850
  "_generate_llm_answer",
788
851
  attribute_extractors.extract_attrs_for_intentless_policy_generate_llm_answer,
789
852
  )
853
+ _instrument_perform_health_check_method_for_component(
854
+ tracer_provider.get_tracer(policy_class.__module__),
855
+ policy_class,
856
+ "perform_embeddings_health_check",
857
+ attribute_extractors.extract_attrs_for_performing_health_check,
858
+ )
859
+ _instrument_perform_health_check_method_for_component(
860
+ tracer_provider.get_tracer(policy_class.__module__),
861
+ policy_class,
862
+ "perform_llm_health_check",
863
+ attribute_extractors.extract_attrs_for_performing_health_check,
864
+ )
790
865
 
791
866
 
792
867
  def _instrument_processor(
@@ -1139,6 +1214,52 @@ def _instrument_grpc_custom_action_executor(
1139
1214
  logger.debug(f"Instrumented '{grpc_custom_action_executor_class.__name__}.run.")
1140
1215
 
1141
1216
 
1217
+ def _instrument_perform_health_check_method_for_component(
1218
+ tracer: Tracer,
1219
+ instrumented_class: Type,
1220
+ method_name: Text,
1221
+ attr_extractor: Optional[Callable] = None,
1222
+ return_value_attr_extractor: Optional[Callable] = None,
1223
+ ) -> None:
1224
+ def tracing_perform_health_check_for_component(
1225
+ fn: Callable[..., S],
1226
+ ) -> Callable[..., S]:
1227
+ @functools.wraps(fn)
1228
+ def wrapper(*args: Any, **kwargs: Any) -> S:
1229
+ # Check the first argument to adjust for self/cls depending on how
1230
+ # the static method from LLMHealthCheckMixin / EmbeddingsLLMHealthCheckMixin
1231
+ # is called.
1232
+ if args and isinstance(
1233
+ args[0], (instrumented_class, type(instrumented_class))
1234
+ ):
1235
+ # The first argument is self/cls; align args to match the signature
1236
+ args = args[1:]
1237
+
1238
+ span_name = f"{instrumented_class.__name__}.{fn.__name__}"
1239
+ extracted_attrs = attr_extractor(*args, **kwargs) if attr_extractor else {}
1240
+
1241
+ with tracer.start_as_current_span(span_name) as span:
1242
+ result = fn(*args, **kwargs)
1243
+
1244
+ # Extract attributes from the return value, if an extractor is provided
1245
+ return_value_attributes = (
1246
+ return_value_attr_extractor(result, *args, **kwargs)
1247
+ if return_value_attr_extractor
1248
+ else {}
1249
+ )
1250
+
1251
+ span.set_attributes({**extracted_attrs, **return_value_attributes})
1252
+ return result
1253
+
1254
+ return wrapper
1255
+
1256
+ method_to_trace = getattr(instrumented_class, method_name)
1257
+ traced_method = tracing_perform_health_check_for_component(method_to_trace)
1258
+ setattr(instrumented_class, method_name, traced_method)
1259
+
1260
+ logger.debug(f"Instrumented '{instrumented_class.__name__}.{method_name}'.")
1261
+
1262
+
1142
1263
  def _mangled_instrumented_boolean_attribute_name(instrumented_class: Type) -> Text:
1143
1264
  # see https://peps.python.org/pep-0008/#method-names-and-instance-variables
1144
1265
  # and https://stackoverflow.com/a/50401073
rasa/utils/common.py CHANGED
@@ -90,6 +90,11 @@ EXPECTED_WARNINGS: List[Tuple[Type[Warning], str]] = [
90
90
  # Ignore Keras DeprecationWarning since it requires that we
91
91
  # upgrade tensorflow-macos to 2.13.0 version.
92
92
  (DeprecationWarning, "invalid escape sequence*"),
93
+ # Ignore importlib open_text and read_text warnings for now
94
+ (
95
+ DeprecationWarning,
96
+ "https://importlib-resources.readthedocs.io/en/latest/using.html#migrating-from-legacy",
97
+ ),
93
98
  ]
94
99
 
95
100
  PYTHON_LOGGING_SCHEMA_DOCS = (
rasa/utils/io.py CHANGED
@@ -18,7 +18,6 @@ from typing import (
18
18
  Type,
19
19
  Callable,
20
20
  TYPE_CHECKING,
21
- Pattern,
22
21
  )
23
22
 
24
23
  from ruamel import yaml
@@ -167,21 +166,14 @@ def create_validator(
167
166
  return FunctionValidator
168
167
 
169
168
 
170
- def get_emoji_regex() -> Pattern:
171
- """Returns regex to identify emojis."""
172
- return re.compile(
173
- "["
174
- "\U0001f600-\U0001f64f" # emoticons
175
- "\U0001f300-\U0001f5ff" # symbols & pictographs
176
- "\U0001f680-\U0001f6ff" # transport & map symbols
177
- "\U0001f1e0-\U0001f1ff" # flags (iOS)
178
- "\U00002702-\U000027b0"
179
- "\U000024c2-\U0001f251"
180
- "\u200d" # zero width joiner
181
- "\u200c" # zero width non-joiner
182
- "]+",
183
- flags=re.UNICODE,
184
- )
169
+ def remove_emojis(s: str) -> str:
170
+ import demoji
171
+
172
+ replaced = demoji.replace(s)
173
+ if replaced == s:
174
+ return s
175
+ # remove duplicate or trailing whitespaces if emojis were removed
176
+ return re.sub(r" +", " ", replaced).strip()
185
177
 
186
178
 
187
179
  def are_directories_equal(dir1: Path, dir2: Path) -> bool:
@@ -0,0 +1,32 @@
1
+ from sanic import Sanic
2
+ from sanic.handlers import ErrorHandler
3
+ from sanic.request import Request
4
+ from sanic.exceptions import ServerError
5
+
6
+
7
+ # TODO: remove custom handler when upgrading to sanic >= 24
8
+ # the underlying issue https://github.com/sanic-org/sanic/issues/2572
9
+ # has been fixed in sanic 24
10
+ class IgnoreWSServerErrorHandler(ErrorHandler):
11
+ @staticmethod
12
+ def log(request: Request, exception: Exception) -> None:
13
+ try:
14
+ if (
15
+ request.url.startswith("ws")
16
+ and isinstance(exception, ServerError)
17
+ and exception.args
18
+ and (
19
+ exception.args[0]
20
+ == "Invalid response type None (need HTTPResponse)"
21
+ )
22
+ ):
23
+ # in case we are in a websocket connection, we don't want to log the
24
+ # the error, as this is a bug in sanic
25
+ return
26
+ except Exception:
27
+ pass
28
+ ErrorHandler.log(request, exception) # type: ignore
29
+
30
+
31
+ def register_custom_sanic_error_handler(app: Sanic) -> None:
32
+ app.error_handler = IgnoreWSServerErrorHandler()
rasa/version.py CHANGED
@@ -1,3 +1,3 @@
1
1
  # this file will automatically be changed,
2
2
  # do not add anything but the version number here!
3
- __version__ = "3.11.0rc1"
3
+ __version__ = "3.11.0rc2"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: rasa-pro
3
- Version: 3.11.0rc1
3
+ Version: 3.11.0rc2
4
4
  Summary: State-of-the-art open-core Conversational AI framework for Enterprises that natively leverages generative AI for effortless assistant development.
5
5
  Home-page: https://rasa.com
6
6
  Keywords: nlp,machine-learning,machine-learning-library,bot,bots,botkit,rasa conversational-agents,conversational-ai,chatbot,chatbot-framework,bot-framework
@@ -43,6 +43,7 @@ Requires-Dist: confluent-kafka (>=2.3.0,<3.0.0)
43
43
  Requires-Dist: cryptography (>=42.0.5)
44
44
  Requires-Dist: cvg-python-sdk (>=0.5.1,<0.6.0)
45
45
  Requires-Dist: dask (>=2024.7.0,<2024.8.0)
46
+ Requires-Dist: demoji (>=1.1.0,<2.0.0)
46
47
  Requires-Dist: diskcache (>=5.6.3,<5.7.0)
47
48
  Requires-Dist: dnspython (==2.6.1)
48
49
  Requires-Dist: faiss-cpu (>=1.7.4,<2.0.0)
@@ -69,7 +70,7 @@ Requires-Dist: mattermostwrapper (>=2.2,<2.3)
69
70
  Requires-Dist: mlflow (>=2.15.1,<3.0.0) ; extra == "mlflow"
70
71
  Requires-Dist: networkx (>=3.1,<3.2)
71
72
  Requires-Dist: numpy (>=1.26.4,<1.27.0)
72
- Requires-Dist: openai (>=1.54.0,<1.55.0)
73
+ Requires-Dist: openai (>=1.55.3,<1.56.0)
73
74
  Requires-Dist: openpyxl (>=3.1.5,<4.0.0)
74
75
  Requires-Dist: opentelemetry-api (>=1.16.0,<1.17.0)
75
76
  Requires-Dist: opentelemetry-exporter-jaeger (>=1.16.0,<1.17.0)