rasa-pro 3.10.11__py3-none-any.whl → 3.10.13__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of rasa-pro might be problematic. Click here for more details.

Files changed (33) hide show
  1. README.md +17 -396
  2. rasa/cli/studio/studio.py +18 -8
  3. rasa/constants.py +1 -1
  4. rasa/core/featurizers/single_state_featurizer.py +22 -1
  5. rasa/core/featurizers/tracker_featurizers.py +115 -18
  6. rasa/core/policies/ted_policy.py +58 -33
  7. rasa/core/policies/unexpected_intent_policy.py +15 -7
  8. rasa/dialogue_understanding/commands/change_flow_command.py +6 -0
  9. rasa/nlu/classifiers/diet_classifier.py +38 -25
  10. rasa/nlu/classifiers/logistic_regression_classifier.py +22 -9
  11. rasa/nlu/classifiers/sklearn_intent_classifier.py +37 -16
  12. rasa/nlu/extractors/crf_entity_extractor.py +93 -50
  13. rasa/nlu/featurizers/sparse_featurizer/count_vectors_featurizer.py +45 -16
  14. rasa/nlu/featurizers/sparse_featurizer/lexical_syntactic_featurizer.py +52 -17
  15. rasa/nlu/featurizers/sparse_featurizer/regex_featurizer.py +5 -3
  16. rasa/shared/nlu/training_data/features.py +120 -2
  17. rasa/shared/utils/io.py +1 -0
  18. rasa/shared/utils/yaml.py +0 -44
  19. rasa/studio/auth.py +3 -5
  20. rasa/studio/config.py +13 -4
  21. rasa/studio/constants.py +1 -0
  22. rasa/studio/data_handler.py +10 -3
  23. rasa/studio/upload.py +17 -8
  24. rasa/utils/io.py +0 -66
  25. rasa/utils/tensorflow/feature_array.py +366 -0
  26. rasa/utils/tensorflow/model_data.py +2 -193
  27. rasa/version.py +1 -1
  28. rasa_pro-3.10.13.dist-info/METADATA +196 -0
  29. {rasa_pro-3.10.11.dist-info → rasa_pro-3.10.13.dist-info}/RECORD +32 -31
  30. rasa_pro-3.10.11.dist-info/METADATA +0 -575
  31. {rasa_pro-3.10.11.dist-info → rasa_pro-3.10.13.dist-info}/NOTICE +0 -0
  32. {rasa_pro-3.10.11.dist-info → rasa_pro-3.10.13.dist-info}/WHEEL +0 -0
  33. {rasa_pro-3.10.11.dist-info → rasa_pro-3.10.13.dist-info}/entry_points.txt +0 -0
@@ -1,15 +1,133 @@
1
1
  from __future__ import annotations
2
- from typing import Iterable, Union, Text, Optional, List, Any, Tuple, Dict, Set
2
+
3
3
  import itertools
4
+ from dataclasses import dataclass
5
+ from typing import Iterable, Union, Text, Optional, List, Any, Tuple, Dict, Set
4
6
 
5
7
  import numpy as np
6
8
  import scipy.sparse
9
+ from safetensors.numpy import save_file, load_file
7
10
 
8
- import rasa.shared.utils.io
9
11
  import rasa.shared.nlu.training_data.util
12
+ import rasa.shared.utils.io
10
13
  from rasa.shared.nlu.constants import FEATURE_TYPE_SEQUENCE, FEATURE_TYPE_SENTENCE
11
14
 
12
15
 
16
+ @dataclass
17
+ class FeatureMetadata:
18
+ data_type: str
19
+ attribute: str
20
+ origin: Union[str, List[str]]
21
+ is_sparse: bool
22
+ shape: tuple
23
+ safetensors_key: str
24
+
25
+
26
+ def save_features(
27
+ features_dict: Dict[Text, List[Features]], file_name: str
28
+ ) -> Dict[str, Any]:
29
+ """Save a dictionary of Features lists to disk using safetensors.
30
+
31
+ Args:
32
+ features_dict: Dictionary mapping strings to lists of Features objects
33
+ file_name: File to save the features to
34
+
35
+ Returns:
36
+ The metadata to reconstruct the features.
37
+ """
38
+ # All tensors are stored in a single safetensors file
39
+ tensors_to_save = {}
40
+ # Metadata will be stored separately
41
+ metadata = {}
42
+
43
+ for key, features_list in features_dict.items():
44
+ feature_metadata_list = []
45
+
46
+ for idx, feature in enumerate(features_list):
47
+ # Create a unique key for this tensor in the safetensors file
48
+ safetensors_key = f"{key}_{idx}"
49
+
50
+ # Convert sparse matrices to dense if needed
51
+ if feature.is_sparse():
52
+ # For sparse matrices, use the COO format
53
+ coo = feature.features.tocoo() # type:ignore[union-attr]
54
+ # Save data, row indices and col indices separately
55
+ tensors_to_save[f"{safetensors_key}_data"] = coo.data
56
+ tensors_to_save[f"{safetensors_key}_row"] = coo.row
57
+ tensors_to_save[f"{safetensors_key}_col"] = coo.col
58
+ else:
59
+ tensors_to_save[safetensors_key] = feature.features
60
+
61
+ # Store metadata
62
+ metadata_item = FeatureMetadata(
63
+ data_type=feature.type,
64
+ attribute=feature.attribute,
65
+ origin=feature.origin,
66
+ is_sparse=feature.is_sparse(),
67
+ shape=feature.features.shape,
68
+ safetensors_key=safetensors_key,
69
+ )
70
+ feature_metadata_list.append(vars(metadata_item))
71
+
72
+ metadata[key] = feature_metadata_list
73
+
74
+ # Save tensors
75
+ save_file(tensors_to_save, file_name)
76
+
77
+ return metadata
78
+
79
+
80
+ def load_features(
81
+ filename: str, metadata: Dict[str, Any]
82
+ ) -> Dict[Text, List[Features]]:
83
+ """Load Features dictionary from disk.
84
+
85
+ Args:
86
+ filename: File name of the safetensors file.
87
+ metadata: Metadata to reconstruct the features.
88
+
89
+ Returns:
90
+ Dictionary mapping strings to lists of Features objects
91
+ """
92
+ # Load tensors
93
+ tensors = load_file(filename)
94
+
95
+ # Reconstruct the features dictionary
96
+ features_dict: Dict[Text, List[Features]] = {}
97
+
98
+ for key, feature_metadata_list in metadata.items():
99
+ features_list = []
100
+
101
+ for meta in feature_metadata_list:
102
+ safetensors_key = meta["safetensors_key"]
103
+
104
+ if meta["is_sparse"]:
105
+ # Reconstruct sparse matrix from COO format
106
+ data = tensors[f"{safetensors_key}_data"]
107
+ row = tensors[f"{safetensors_key}_row"]
108
+ col = tensors[f"{safetensors_key}_col"]
109
+
110
+ features_matrix = scipy.sparse.coo_matrix(
111
+ (data, (row, col)), shape=tuple(meta["shape"])
112
+ ).tocsr() # Convert back to CSR format
113
+ else:
114
+ features_matrix = tensors[safetensors_key]
115
+
116
+ # Reconstruct Features object
117
+ features = Features(
118
+ features=features_matrix,
119
+ feature_type=meta["data_type"],
120
+ attribute=meta["attribute"],
121
+ origin=meta["origin"],
122
+ )
123
+
124
+ features_list.append(features)
125
+
126
+ features_dict[key] = features_list
127
+
128
+ return features_dict
129
+
130
+
13
131
  class Features:
14
132
  """Stores the features produced by any featurizer."""
15
133
 
rasa/shared/utils/io.py CHANGED
@@ -13,6 +13,7 @@ from typing import Any, cast, Callable, Dict, List, Optional, Text, Type, TypeVa
13
13
  import warnings
14
14
  import random
15
15
  import string
16
+
16
17
  import portalocker
17
18
 
18
19
  from rasa.shared.constants import (
rasa/shared/utils/yaml.py CHANGED
@@ -416,47 +416,6 @@ def validate_raw_yaml_using_schema_file_with_responses(
416
416
  )
417
417
 
418
418
 
419
- def process_content(content: str) -> str:
420
- """
421
- Process the content to handle both Windows paths and emojis.
422
- Windows paths are processed by escaping backslashes but emojis are left untouched.
423
-
424
- Args:
425
- content: yaml content to be processed
426
- """
427
- # Detect common Windows path patterns: e.g., C:\ or \\
428
- UNESCAPED_WINDOWS_PATH_PATTERN = re.compile(
429
- r"(?<!\w)[a-zA-Z]:(\\[a-zA-Z0-9_ -]+)*(\\)?(?!\\n)"
430
- )
431
- ESCAPED_WINDOWS_PATH_PATTERN = re.compile(
432
- r"(?<!\w)[a-zA-Z]:(\\\\[a-zA-Z0-9_ -]+)+\\\\?(?!\\n)"
433
- )
434
-
435
- # Function to escape backslashes in Windows paths but leave other content as is
436
- def escape_windows_paths(match: re.Match) -> str:
437
- path = str(match.group(0))
438
- return path.replace("\\", "\\\\") # Escape backslashes only in Windows paths
439
-
440
- def unescape_windows_paths(match: re.Match) -> str:
441
- path = str(match.group(0))
442
- return path.replace("\\\\", "\\")
443
-
444
- # First, process Windows paths by escaping backslashes
445
- content = re.sub(UNESCAPED_WINDOWS_PATH_PATTERN, escape_windows_paths, content)
446
-
447
- # Ensure proper handling of emojis by decoding Unicode sequences
448
- content = (
449
- content.encode("utf-8")
450
- .decode("raw_unicode_escape")
451
- .encode("utf-16", "surrogatepass")
452
- .decode("utf-16")
453
- )
454
-
455
- content = re.sub(ESCAPED_WINDOWS_PATH_PATTERN, unescape_windows_paths, content)
456
-
457
- return content
458
-
459
-
460
419
  def read_yaml(
461
420
  content: str,
462
421
  reader_type: Union[str, List[str]] = "safe",
@@ -472,9 +431,6 @@ def read_yaml(
472
431
  Raises:
473
432
  ruamel.yaml.parser.ParserError: If there was an error when parsing the YAML.
474
433
  """
475
- if _is_ascii(content):
476
- content = process_content(content)
477
-
478
434
  custom_constructor = kwargs.get("custom_constructor", None)
479
435
 
480
436
  # Create YAML parser with custom constructor
rasa/studio/auth.py CHANGED
@@ -23,12 +23,10 @@ from rasa.studio.results_logger import with_studio_error_handler, StudioResult
23
23
  class StudioAuth:
24
24
  """Handles the authentication with the Rasa Studio authentication server."""
25
25
 
26
- def __init__(
27
- self,
28
- studio_config: StudioConfig,
29
- verify: bool = True,
30
- ) -> None:
26
+ def __init__(self, studio_config: StudioConfig) -> None:
31
27
  self.config = studio_config
28
+ verify = not studio_config.disable_verify
29
+
32
30
  self.keycloak_openid = KeycloakOpenID(
33
31
  server_url=studio_config.authentication_server_url,
34
32
  client_id=studio_config.client_id,
rasa/studio/config.py CHANGED
@@ -2,13 +2,14 @@ from __future__ import annotations
2
2
 
3
3
  import os
4
4
  from dataclasses import dataclass
5
- from typing import Dict, Optional, Text
5
+ from typing import Any, Dict, Optional, Text
6
6
 
7
7
  from rasa.utils.common import read_global_config_value, write_global_config_value
8
8
 
9
9
  from rasa.studio.constants import (
10
10
  RASA_STUDIO_AUTH_SERVER_URL_ENV,
11
11
  RASA_STUDIO_CLI_CLIENT_ID_KEY_ENV,
12
+ RASA_STUDIO_CLI_DISABLE_VERIFY_KEY_ENV,
12
13
  RASA_STUDIO_CLI_REALM_NAME_KEY_ENV,
13
14
  RASA_STUDIO_CLI_STUDIO_URL_ENV,
14
15
  STUDIO_CONFIG_KEY,
@@ -19,6 +20,7 @@ STUDIO_URL_KEY = "studio_url"
19
20
  CLIENT_ID_KEY = "client_id"
20
21
  REALM_NAME_KEY = "realm_name"
21
22
  CLIENT_SECRET_KEY = "client_secret"
23
+ DISABLE_VERIFY = "disable_verify"
22
24
 
23
25
 
24
26
  @dataclass
@@ -27,13 +29,15 @@ class StudioConfig:
27
29
  studio_url: Optional[Text]
28
30
  client_id: Optional[Text]
29
31
  realm_name: Optional[Text]
32
+ disable_verify: bool = False
30
33
 
31
- def to_dict(self) -> Dict[Text, Optional[Text]]:
34
+ def to_dict(self) -> Dict[Text, Optional[Any]]:
32
35
  return {
33
36
  AUTH_SERVER_URL_KEY: self.authentication_server_url,
34
37
  STUDIO_URL_KEY: self.studio_url,
35
38
  CLIENT_ID_KEY: self.client_id,
36
39
  REALM_NAME_KEY: self.realm_name,
40
+ DISABLE_VERIFY: self.disable_verify,
37
41
  }
38
42
 
39
43
  @classmethod
@@ -43,6 +47,7 @@ class StudioConfig:
43
47
  studio_url=data[STUDIO_URL_KEY],
44
48
  client_id=data[CLIENT_ID_KEY],
45
49
  realm_name=data[REALM_NAME_KEY],
50
+ disable_verify=data.get(DISABLE_VERIFY, False),
46
51
  )
47
52
 
48
53
  def write_config(self) -> None:
@@ -73,7 +78,7 @@ class StudioConfig:
73
78
  config = read_global_config_value(STUDIO_CONFIG_KEY, unavailable_ok=True)
74
79
 
75
80
  if config is None:
76
- return StudioConfig(None, None, None, None)
81
+ return StudioConfig(None, None, None, None, False)
77
82
 
78
83
  if not isinstance(config, dict):
79
84
  raise ValueError(
@@ -83,7 +88,7 @@ class StudioConfig:
83
88
  )
84
89
 
85
90
  for key in config:
86
- if not isinstance(config[key], str):
91
+ if not isinstance(config[key], str) and key != DISABLE_VERIFY:
87
92
  raise ValueError(
88
93
  "Invalid config file format. "
89
94
  f"Key '{key}' is not a text value."
@@ -102,6 +107,9 @@ class StudioConfig:
102
107
  studio_url=StudioConfig._read_env_value(RASA_STUDIO_CLI_STUDIO_URL_ENV),
103
108
  client_id=StudioConfig._read_env_value(RASA_STUDIO_CLI_CLIENT_ID_KEY_ENV),
104
109
  realm_name=StudioConfig._read_env_value(RASA_STUDIO_CLI_REALM_NAME_KEY_ENV),
110
+ disable_verify=bool(
111
+ os.getenv(RASA_STUDIO_CLI_DISABLE_VERIFY_KEY_ENV, False)
112
+ ),
105
113
  )
106
114
 
107
115
  @staticmethod
@@ -124,4 +132,5 @@ class StudioConfig:
124
132
  studio_url=self.studio_url or other.studio_url,
125
133
  client_id=self.client_id or other.client_id,
126
134
  realm_name=self.realm_name or other.realm_name,
135
+ disable_verify=self.disable_verify or other.disable_verify,
127
136
  )
rasa/studio/constants.py CHANGED
@@ -10,6 +10,7 @@ RASA_STUDIO_AUTH_SERVER_URL_ENV = "RASA_STUDIO_AUTH_SERVER_URL"
10
10
  RASA_STUDIO_CLI_STUDIO_URL_ENV = "RASA_STUDIO_CLI_STUDIO_URL"
11
11
  RASA_STUDIO_CLI_REALM_NAME_KEY_ENV = "RASA_STUDIO_CLI_REALM_NAME_KEY"
12
12
  RASA_STUDIO_CLI_CLIENT_ID_KEY_ENV = "RASA_STUDIO_CLI_CLIENT_ID_KEY"
13
+ RASA_STUDIO_CLI_DISABLE_VERIFY_KEY_ENV = "RASA_STUDIO_CLI_DISABLE_VERIFY_KEY"
13
14
 
14
15
  STUDIO_NLU_FILENAME = "studio_nlu.yml"
15
16
  STUDIO_DOMAIN_FILENAME = "studio_domain.yml"
@@ -76,7 +76,9 @@ class StudioDataHandler:
76
76
 
77
77
  return request
78
78
 
79
- def _make_request(self, GQL_req: Dict[Any, Any]) -> Dict[Any, Any]:
79
+ def _make_request(
80
+ self, GQL_req: Dict[Any, Any], verify: bool = True
81
+ ) -> Dict[Any, Any]:
80
82
  token = KeycloakTokenReader().get_token()
81
83
  if token.is_expired():
82
84
  token = self.refresh_token(token)
@@ -93,6 +95,7 @@ class StudioDataHandler:
93
95
  "Authorization": f"{token.token_type} {token.access_token}",
94
96
  "Content-Type": "application/json",
95
97
  },
98
+ verify=verify,
96
99
  )
97
100
 
98
101
  if res.status_code != 200:
@@ -128,7 +131,9 @@ class StudioDataHandler:
128
131
  The data from Rasa Studio.
129
132
  """
130
133
  GQL_req = self._build_request()
131
- response = self._make_request(GQL_req)
134
+ verify = not self.studio_config.disable_verify
135
+
136
+ response = self._make_request(GQL_req, verify=verify)
132
137
  self._extract_data(response)
133
138
 
134
139
  def request_data(
@@ -145,7 +150,9 @@ class StudioDataHandler:
145
150
  The data from Rasa Studio.
146
151
  """
147
152
  GQL_req = self._build_request(intent_names, entity_names)
148
- response = self._make_request(GQL_req)
153
+ verify = not self.studio_config.disable_verify
154
+
155
+ response = self._make_request(GQL_req, verify=verify)
149
156
  self._extract_data(response)
150
157
 
151
158
  def get_config(self) -> Optional[str]:
rasa/studio/upload.py CHANGED
@@ -56,7 +56,10 @@ def _get_selected_entities_and_intents(
56
56
 
57
57
  def handle_upload(args: argparse.Namespace) -> None:
58
58
  """Uploads primitives to rasa studio."""
59
- endpoint = StudioConfig.read_config().studio_url
59
+ studio_config = StudioConfig.read_config()
60
+ endpoint = studio_config.studio_url
61
+ verify = not studio_config.disable_verify
62
+
60
63
  if not endpoint:
61
64
  rasa.shared.utils.cli.print_error_and_exit(
62
65
  "No GraphQL endpoint found in config. Please run `rasa studio config`."
@@ -76,9 +79,9 @@ def handle_upload(args: argparse.Namespace) -> None:
76
79
 
77
80
  # check safely if args.calm is set and not fail if not
78
81
  if hasattr(args, "calm") and args.calm:
79
- upload_calm_assistant(args, endpoint)
82
+ upload_calm_assistant(args, endpoint, verify=verify)
80
83
  else:
81
- upload_nlu_assistant(args, endpoint)
84
+ upload_nlu_assistant(args, endpoint, verify=verify)
82
85
 
83
86
 
84
87
  config_keys = [
@@ -126,7 +129,9 @@ def _get_assistant_name(config: Dict[Text, Any]) -> str:
126
129
 
127
130
 
128
131
  @with_studio_error_handler
129
- def upload_calm_assistant(args: argparse.Namespace, endpoint: str) -> StudioResult:
132
+ def upload_calm_assistant(
133
+ args: argparse.Namespace, endpoint: str, verify: bool = True
134
+ ) -> StudioResult:
130
135
  """Uploads the CALM assistant data to Rasa Studio.
131
136
 
132
137
  Args:
@@ -216,11 +221,13 @@ def upload_calm_assistant(args: argparse.Namespace, endpoint: str) -> StudioResu
216
221
  )
217
222
 
218
223
  structlogger.info("Uploading to Rasa Studio...")
219
- return make_request(endpoint, graphql_req)
224
+ return make_request(endpoint, graphql_req, verify)
220
225
 
221
226
 
222
227
  @with_studio_error_handler
223
- def upload_nlu_assistant(args: argparse.Namespace, endpoint: str) -> StudioResult:
228
+ def upload_nlu_assistant(
229
+ args: argparse.Namespace, endpoint: str, verify: bool = True
230
+ ) -> StudioResult:
224
231
  """Uploads the classic (dm1) assistant data to Rasa Studio.
225
232
 
226
233
  Args:
@@ -268,15 +275,16 @@ def upload_nlu_assistant(args: argparse.Namespace, endpoint: str) -> StudioResul
268
275
  graphql_req = build_request(assistant_name, nlu_examples_yaml, domain_yaml)
269
276
 
270
277
  structlogger.info("Uploading to Rasa Studio...")
271
- return make_request(endpoint, graphql_req)
278
+ return make_request(endpoint, graphql_req, verify)
272
279
 
273
280
 
274
- def make_request(endpoint: str, graphql_req: Dict) -> StudioResult:
281
+ def make_request(endpoint: str, graphql_req: Dict, verify: bool = True) -> StudioResult:
275
282
  """Makes a request to the studio endpoint to upload data.
276
283
 
277
284
  Args:
278
285
  endpoint: The studio endpoint
279
286
  graphql_req: The graphql request
287
+ verify: Whether to verify SSL
280
288
  """
281
289
  token = KeycloakTokenReader().get_token()
282
290
  res = requests.post(
@@ -286,6 +294,7 @@ def make_request(endpoint: str, graphql_req: Dict) -> StudioResult:
286
294
  "Authorization": f"{token.token_type} {token.access_token}",
287
295
  "Content-Type": "application/json",
288
296
  },
297
+ verify=verify,
289
298
  )
290
299
 
291
300
  if results_logger.response_has_errors(res.json()):
rasa/utils/io.py CHANGED
@@ -2,7 +2,6 @@ import asyncio
2
2
  import filecmp
3
3
  import logging
4
4
  import os
5
- import pickle
6
5
  import tempfile
7
6
  import warnings
8
7
  import re
@@ -98,29 +97,6 @@ def enable_async_loop_debugging(
98
97
  return event_loop
99
98
 
100
99
 
101
- def pickle_dump(filename: Union[Text, Path], obj: Any) -> None:
102
- """Saves object to file.
103
-
104
- Args:
105
- filename: the filename to save the object to
106
- obj: the object to store
107
- """
108
- with open(filename, "wb") as f:
109
- pickle.dump(obj, f)
110
-
111
-
112
- def pickle_load(filename: Union[Text, Path]) -> Any:
113
- """Loads an object from a file.
114
-
115
- Args:
116
- filename: the filename to load the object from
117
-
118
- Returns: the loaded object
119
- """
120
- with open(filename, "rb") as f:
121
- return pickle.load(f)
122
-
123
-
124
100
  def create_temporary_file(data: Any, suffix: Text = "", mode: Text = "w+") -> Text:
125
101
  """Creates a tempfile.NamedTemporaryFile object for data."""
126
102
  encoding = None if "b" in mode else rasa.shared.utils.io.DEFAULT_ENCODING
@@ -191,48 +167,6 @@ def create_validator(
191
167
  return FunctionValidator
192
168
 
193
169
 
194
- def json_unpickle(
195
- file_name: Union[Text, Path], encode_non_string_keys: bool = False
196
- ) -> Any:
197
- """Unpickle an object from file using json.
198
-
199
- Args:
200
- file_name: the file to load the object from
201
- encode_non_string_keys: If set to `True` then jsonpickle will encode non-string
202
- dictionary keys instead of coercing them into strings via `repr()`.
203
-
204
- Returns: the object
205
- """
206
- import jsonpickle.ext.numpy as jsonpickle_numpy
207
- import jsonpickle
208
-
209
- jsonpickle_numpy.register_handlers()
210
-
211
- file_content = rasa.shared.utils.io.read_file(file_name)
212
- return jsonpickle.loads(file_content, keys=encode_non_string_keys)
213
-
214
-
215
- def json_pickle(
216
- file_name: Union[Text, Path], obj: Any, encode_non_string_keys: bool = False
217
- ) -> None:
218
- """Pickle an object to a file using json.
219
-
220
- Args:
221
- file_name: the file to store the object to
222
- obj: the object to store
223
- encode_non_string_keys: If set to `True` then jsonpickle will encode non-string
224
- dictionary keys instead of coercing them into strings via `repr()`.
225
- """
226
- import jsonpickle.ext.numpy as jsonpickle_numpy
227
- import jsonpickle
228
-
229
- jsonpickle_numpy.register_handlers()
230
-
231
- rasa.shared.utils.io.write_text_file(
232
- jsonpickle.dumps(obj, keys=encode_non_string_keys), file_name
233
- )
234
-
235
-
236
170
  def get_emoji_regex() -> Pattern:
237
171
  """Returns regex to identify emojis."""
238
172
  return re.compile(