sonatoki 0.11.0__py3-none-any.whl → 0.11.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
sonatoki/Cleaners.py CHANGED
@@ -1,6 +1,7 @@
1
1
  # STL
2
2
  import re
3
3
  from abc import ABC, abstractmethod
4
+ from sys import intern
4
5
 
5
6
  # PDM
6
7
  from typing_extensions import override
@@ -21,7 +22,7 @@ class RegexCleaner(Cleaner):
21
22
  @classmethod
22
23
  @override
23
24
  def clean(cls, token: str) -> str:
24
- return re.sub(cls.pattern, cls.replace, token)
25
+ return intern(re.sub(cls.pattern, cls.replace, token))
25
26
 
26
27
 
27
28
  class ConsecutiveDuplicates(Cleaner):
@@ -44,29 +45,30 @@ class ConsecutiveDuplicates(Cleaner):
44
45
  return token
45
46
 
46
47
  output = token[0]
47
-
48
48
  last_output = output.lower() # ignore case in comparison
49
49
  for i in range(1, len(token)):
50
- cur_char = token[i].lower()
51
- if cur_char == last_output:
50
+ cur_char = intern(token[i])
51
+ lower_cur_char = intern(cur_char.lower())
52
+ if lower_cur_char == last_output:
52
53
  continue
53
- output += token[i] # preserve case of string
54
- last_output = cur_char
54
+ output += cur_char # preserve case of string
55
+ last_output = lower_cur_char
56
+ output = intern(output)
55
57
  return output
56
58
 
57
59
 
58
60
  class ConsecutiveDuplicatesRe(RegexCleaner):
59
61
  """Reference implementation for `ConsecutiveDuplicates`."""
60
62
 
61
- pattern = re.compile(r"(.)\1+", flags=re.IGNORECASE)
62
- replace = r"\1"
63
+ pattern: "re.Pattern[str]" = re.compile(r"(.)\1+", flags=re.IGNORECASE)
64
+ replace: str = r"\1"
63
65
 
64
66
 
65
67
  class Lowercase(Cleaner):
66
68
  @classmethod
67
69
  @override
68
70
  def clean(cls, token: str) -> str:
69
- return token.lower()
71
+ return intern(token.lower())
70
72
 
71
73
 
72
74
  __all__ = [
sonatoki/Tokenizers.py CHANGED
@@ -1,6 +1,7 @@
1
1
  # STL
2
2
  import re
3
3
  from abc import ABC, abstractmethod
4
+ from sys import intern
4
5
  from typing import Set, List
5
6
 
6
7
  # PDM
@@ -40,7 +41,11 @@ class RegexTokenizer(Tokenizer):
40
41
  @classmethod
41
42
  @override
42
43
  def tokenize(cls, s: str) -> List[str]:
43
- return [clean for word in re.split(cls.pattern, s) if (clean := word.strip())]
44
+ return [
45
+ intern(clean)
46
+ for word in re.split(cls.pattern, s)
47
+ if (clean := word.strip())
48
+ ]
44
49
 
45
50
 
46
51
  class Regex1Tokenizer(Tokenizer):
@@ -50,7 +55,9 @@ class Regex1Tokenizer(Tokenizer):
50
55
  @override
51
56
  def tokenize(cls, s: str) -> List[str]:
52
57
  return [
53
- clean for word in regex.split(cls.pattern, s) if (clean := word.strip())
58
+ intern(clean)
59
+ for word in regex.split(cls.pattern, s)
60
+ if (clean := word.strip())
54
61
  ]
55
62
 
56
63
 
@@ -65,7 +72,8 @@ class WordTokenizer(SetTokenizer):
65
72
  @classmethod
66
73
  def add_token(cls, s: str, tokens: List[str], last_match: int, i: int):
67
74
  if i > last_match:
68
- tokens.append(s[last_match:i])
75
+ token = intern(s[last_match:i])
76
+ tokens.append(token)
69
77
 
70
78
  @classmethod
71
79
  def to_tokens(cls, s: str) -> List[str]:
@@ -91,7 +99,7 @@ class WordTokenizer(SetTokenizer):
91
99
 
92
100
  if NimiUCSUR.filter(s[i]):
93
101
  cls.add_token(s, tokens, last_match, i)
94
- tokens.append(s[i])
102
+ cls.add_token(s, tokens, i, i + 1)
95
103
  i += 1
96
104
  last_match = i
97
105
  continue
sonatoki/constants.py CHANGED
@@ -498,7 +498,10 @@ ALL_PUNCT_RANGES_STR = "".join(find_unicode_ranges(ALL_PUNCT))
498
498
 
499
499
 
500
500
  UNICODE_WHITESPACE_RANGES = [
501
- "\\U00000020",
501
+ "\\U00000009", # tab
502
+ "\\U0000000A", # line feed
503
+ "\\U0000000D", # carriage return
504
+ "\\U00000020", # space
502
505
  "\\U000000a0",
503
506
  "\\U00001680",
504
507
  "\\U00002000-\\U0000200a",
sonatoki/utils.py CHANGED
@@ -1,6 +1,6 @@
1
1
  # STL
2
2
  import itertools
3
- from typing import Set, List, TypeVar, Iterable
3
+ from typing import Set, List, Tuple, TypeVar, Iterable
4
4
 
5
5
  # LOCAL
6
6
  from sonatoki.Cleaners import Lowercase, ConsecutiveDuplicates
@@ -72,12 +72,7 @@ def find_unicode_chars(ranges: List[str]) -> str:
72
72
  return "".join(result)
73
73
 
74
74
 
75
- def overlapping_pairs(iterable: Iterable[T]) -> Iterable[T]:
76
- "s -> (s0,s1), (s1,s2), (s2, s3), ..."
77
- return overlapping_ntuples(iterable, n=2)
78
-
79
-
80
- def overlapping_ntuples(iterable: Iterable[T], n: int) -> Iterable[T]:
75
+ def overlapping_ntuples(iterable: Iterable[T], n: int) -> Iterable[Tuple[T, ...]]:
81
76
  teed = itertools.tee(iterable, n)
82
77
  for i in range(1, n):
83
78
  for j in range(i):
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: sonatoki
3
- Version: 0.11.0
3
+ Version: 0.11.2
4
4
  Summary: ilo li moku e toki li pana e sona ni: ni li toki ala toki pona?
5
5
  Author-Email: "jan Kekan San (@gregdan3)" <gregory.danielson3@gmail.com>
6
6
  License: AGPL-3.0-or-later
@@ -1,22 +1,22 @@
1
- sonatoki-0.11.0.dist-info/METADATA,sha256=LXxje9dMJHy-2LGPS0nSRre5jWq4qvGU2bLEpJsUbpo,7303
2
- sonatoki-0.11.0.dist-info/WHEEL,sha256=thaaA2w1JzcGC48WYufAs8nrYZjJm8LqNfnXFOFyCC4,90
3
- sonatoki-0.11.0.dist-info/entry_points.txt,sha256=6OYgBcLyFCUgeqLgnvMyOJxPCWzgy7se4rLPKtNonMs,34
4
- sonatoki-0.11.0.dist-info/licenses/LICENSE,sha256=DZak_2itbUtvHzD3E7GNUYSRK6jdOJ-GqncQ2weavLA,34523
5
- sonatoki/Cleaners.py,sha256=x2dT3MpDUfbrHA0EP2D3n1sTiKFFi5jw9ha-1dX973o,1958
1
+ sonatoki-0.11.2.dist-info/METADATA,sha256=DVseX_yIbP6VzT8tmmjU0Tual5Fu1Dg-Qr2y-fMptIg,7303
2
+ sonatoki-0.11.2.dist-info/WHEEL,sha256=9P2ygRxDrTJz3gsagc0Z96ukrxjr-LFBGOgv3AuKlCA,90
3
+ sonatoki-0.11.2.dist-info/entry_points.txt,sha256=6OYgBcLyFCUgeqLgnvMyOJxPCWzgy7se4rLPKtNonMs,34
4
+ sonatoki-0.11.2.dist-info/licenses/LICENSE,sha256=DZak_2itbUtvHzD3E7GNUYSRK6jdOJ-GqncQ2weavLA,34523
5
+ sonatoki/Cleaners.py,sha256=3qLpX2I2aqNxSmqtSXOvIBDaryEw9ooArvFHl2PflRE,2118
6
6
  sonatoki/Configs.py,sha256=cQizs-wqgtM9T9F4kkUsDHpIXN91p4FAgSMefDl333s,5114
7
7
  sonatoki/Filters.py,sha256=8HAtR6_Rk6GPboaS_MHwSjZBJxYnAA8kYbRPI0eR6sM,14823
8
8
  sonatoki/Preprocessors.py,sha256=RmzkvPVo6Kdx1rZ5HeR9cTtx6oxpp2iLKrOMCUEqIrM,7107
9
9
  sonatoki/Scorers.py,sha256=zkdWc0hbtCX1HPdhI2tu2mL4Z5_S5sv7T83MefE4Yik,7756
10
- sonatoki/Tokenizers.py,sha256=yAHqVF7G-bH5i7nsvYH-dMV2qjeKvLW2W2F-fgyUnR4,6783
10
+ sonatoki/Tokenizers.py,sha256=BUs2jhnthP1WqknFiEYYjazvy-hcHQ-kyHkzQwWe7d0,6941
11
11
  sonatoki/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
12
12
  sonatoki/__main__.py,sha256=394ldEB4tFpw1UJLV4S4jJ55NfyLgH8rE7o3VWJoGik,6650
13
13
  sonatoki/alphabetic.txt,sha256=duyqAKilD2vLIr75RShCIAnktNJcGeEoQIk18V6czmg,11702
14
- sonatoki/constants.py,sha256=KhE385XkF-64bGBxkQNisu7vODsjpOfmrL8bnyQP_1k,20572
14
+ sonatoki/constants.py,sha256=ifh9VIYb3cDjbcRFNSRc2x3P_Jxb5LVnWfiHlzsBAV0,20677
15
15
  sonatoki/ilo.py,sha256=MWoONZaYh8h92ZrMlG-MkNktFyqHX8Jb5zOD57800KI,6755
16
16
  sonatoki/linku.json,sha256=U5KVxFJSageQydXXDsQCT8X_QoNAK2OaZhJmbu0eoZo,299939
17
17
  sonatoki/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
18
18
  sonatoki/sandbox.json,sha256=QAviQZ7_nwstUr1ejKegxiIoYmBL2YJIoiZovDYNFL8,147485
19
19
  sonatoki/syllabic.txt,sha256=HnqY4TrZ3tPcHah3TsvG9F9gjMrnAGdJ8hHJNHyyUPc,1712
20
20
  sonatoki/types.py,sha256=VjYSGAzsbR_d3mg8n-VHg__7LyXpmGdEIMDsbPHyxFw,1265
21
- sonatoki/utils.py,sha256=sT5xLMEj0aLpy8GP92HKblJU1Wt1m8NUlMgCFWB32xQ,2265
22
- sonatoki-0.11.0.dist-info/RECORD,,
21
+ sonatoki/utils.py,sha256=jFW2LcqLfeR_dlEEcPiBsXOp8i2Fxdh_t6LCwu-mxN0,2132
22
+ sonatoki-0.11.2.dist-info/RECORD,,
@@ -1,4 +1,4 @@
1
1
  Wheel-Version: 1.0
2
- Generator: pdm-backend (2.4.3)
2
+ Generator: pdm-backend (2.4.5)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any