@keymanapp/kmc-model 17.0.85-alpha

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (180) hide show
  1. package/.nyc_output/coverage-10524-1681239236645-0.json +1 -0
  2. package/Makefile +38 -0
  3. package/build/cjs-src/lexical-model-compiler.cjs +152688 -0
  4. package/build/src/build-trie.d.ts +40 -0
  5. package/build/src/build-trie.d.ts.map +1 -0
  6. package/build/src/build-trie.js +362 -0
  7. package/build/src/build-trie.js.map +1 -0
  8. package/build/src/join-word-breaker-decorator.d.ts +10 -0
  9. package/build/src/join-word-breaker-decorator.d.ts.map +1 -0
  10. package/build/src/join-word-breaker-decorator.js +121 -0
  11. package/build/src/join-word-breaker-decorator.js.map +1 -0
  12. package/build/src/lexical-model-compiler.d.ts +19 -0
  13. package/build/src/lexical-model-compiler.d.ts.map +1 -0
  14. package/build/src/lexical-model-compiler.js +155 -0
  15. package/build/src/lexical-model-compiler.js.map +1 -0
  16. package/build/src/lexical-model.d.ts +135 -0
  17. package/build/src/lexical-model.d.ts.map +1 -0
  18. package/build/src/lexical-model.js +6 -0
  19. package/build/src/lexical-model.js.map +1 -0
  20. package/build/src/main.d.ts +15 -0
  21. package/build/src/main.d.ts.map +1 -0
  22. package/build/src/main.js +46 -0
  23. package/build/src/main.js.map +1 -0
  24. package/build/src/model-compiler-errors.d.ts +77 -0
  25. package/build/src/model-compiler-errors.d.ts.map +1 -0
  26. package/build/src/model-compiler-errors.js +156 -0
  27. package/build/src/model-compiler-errors.js.map +1 -0
  28. package/build/src/model-defaults.d.ts +56 -0
  29. package/build/src/model-defaults.d.ts.map +1 -0
  30. package/build/src/model-defaults.js +106 -0
  31. package/build/src/model-defaults.js.map +1 -0
  32. package/build/src/model-definitions.d.ts +71 -0
  33. package/build/src/model-definitions.d.ts.map +1 -0
  34. package/build/src/model-definitions.js +189 -0
  35. package/build/src/model-definitions.js.map +1 -0
  36. package/build/src/script-overrides-decorator.d.ts +4 -0
  37. package/build/src/script-overrides-decorator.d.ts.map +1 -0
  38. package/build/src/script-overrides-decorator.js +63 -0
  39. package/build/src/script-overrides-decorator.js.map +1 -0
  40. package/build/test/helpers/index.d.ts +69 -0
  41. package/build/test/helpers/index.d.ts.map +1 -0
  42. package/build/test/helpers/index.js +160 -0
  43. package/build/test/helpers/index.js.map +1 -0
  44. package/build/test/test-compile-model-with-pseudoclosure.d.ts +2 -0
  45. package/build/test/test-compile-model-with-pseudoclosure.d.ts.map +1 -0
  46. package/build/test/test-compile-model-with-pseudoclosure.js +200 -0
  47. package/build/test/test-compile-model-with-pseudoclosure.js.map +1 -0
  48. package/build/test/test-compile-model.d.ts +2 -0
  49. package/build/test/test-compile-model.d.ts.map +1 -0
  50. package/build/test/test-compile-model.js +30 -0
  51. package/build/test/test-compile-model.js.map +1 -0
  52. package/build/test/test-compile-trie.d.ts +2 -0
  53. package/build/test/test-compile-trie.d.ts.map +1 -0
  54. package/build/test/test-compile-trie.js +125 -0
  55. package/build/test/test-compile-trie.js.map +1 -0
  56. package/build/test/test-default-apply-case.d.ts +2 -0
  57. package/build/test/test-default-apply-case.d.ts.map +1 -0
  58. package/build/test/test-default-apply-case.js +105 -0
  59. package/build/test/test-default-apply-case.js.map +1 -0
  60. package/build/test/test-default-search-term-to-key.d.ts +2 -0
  61. package/build/test/test-default-search-term-to-key.d.ts.map +1 -0
  62. package/build/test/test-default-search-term-to-key.js +148 -0
  63. package/build/test/test-default-search-term-to-key.js.map +1 -0
  64. package/build/test/test-error-logger.d.ts +2 -0
  65. package/build/test/test-error-logger.d.ts.map +1 -0
  66. package/build/test/test-error-logger.js +26 -0
  67. package/build/test/test-error-logger.js.map +1 -0
  68. package/build/test/test-join-word-breaker.d.ts +2 -0
  69. package/build/test/test-join-word-breaker.d.ts.map +1 -0
  70. package/build/test/test-join-word-breaker.js +84 -0
  71. package/build/test/test-join-word-breaker.js.map +1 -0
  72. package/build/test/test-model-definitions.d.ts +2 -0
  73. package/build/test/test-model-definitions.d.ts.map +1 -0
  74. package/build/test/test-model-definitions.js +165 -0
  75. package/build/test/test-model-definitions.js.map +1 -0
  76. package/build/test/test-override-script-defaults.d.ts +2 -0
  77. package/build/test/test-override-script-defaults.d.ts.map +1 -0
  78. package/build/test/test-override-script-defaults.js +28 -0
  79. package/build/test/test-override-script-defaults.js.map +1 -0
  80. package/build/test/test-parse-wordlist.d.ts +2 -0
  81. package/build/test/test-parse-wordlist.d.ts.map +1 -0
  82. package/build/test/test-parse-wordlist.js +110 -0
  83. package/build/test/test-parse-wordlist.js.map +1 -0
  84. package/build/test/test-punctuation.d.ts +2 -0
  85. package/build/test/test-punctuation.d.ts.map +1 -0
  86. package/build/test/test-punctuation.js +31 -0
  87. package/build/test/test-punctuation.js.map +1 -0
  88. package/build/test/tsconfig.tsbuildinfo +1 -0
  89. package/build/test/wordbreakers/data.d.ts +35 -0
  90. package/build/test/wordbreakers/data.d.ts.map +1 -0
  91. package/build/test/wordbreakers/data.js +1778 -0
  92. package/build/test/wordbreakers/data.js.map +1 -0
  93. package/build/test/wordbreakers/default-wordbreaker-esm.d.ts +10 -0
  94. package/build/test/wordbreakers/default-wordbreaker-esm.d.ts.map +1 -0
  95. package/build/test/wordbreakers/default-wordbreaker-esm.js +354 -0
  96. package/build/test/wordbreakers/default-wordbreaker-esm.js.map +1 -0
  97. package/build/tsconfig.tsbuildinfo +1 -0
  98. package/build.sh +73 -0
  99. package/coverage/lcov-report/base.css +224 -0
  100. package/coverage/lcov-report/block-navigation.js +87 -0
  101. package/coverage/lcov-report/favicon.png +0 -0
  102. package/coverage/lcov-report/index.html +161 -0
  103. package/coverage/lcov-report/prettify.css +1 -0
  104. package/coverage/lcov-report/prettify.js +2 -0
  105. package/coverage/lcov-report/sort-arrow-sprite.png +0 -0
  106. package/coverage/lcov-report/sorter.js +196 -0
  107. package/coverage/lcov-report/src/build-trie.ts.html +1618 -0
  108. package/coverage/lcov-report/src/index.html +221 -0
  109. package/coverage/lcov-report/src/join-word-breaker-decorator.ts.html +487 -0
  110. package/coverage/lcov-report/src/lexical-model-compiler.ts.html +622 -0
  111. package/coverage/lcov-report/src/main.ts.html +271 -0
  112. package/coverage/lcov-report/src/model-compiler-errors.ts.html +691 -0
  113. package/coverage/lcov-report/src/model-defaults.ts.html +415 -0
  114. package/coverage/lcov-report/src/model-definitions.ts.html +748 -0
  115. package/coverage/lcov-report/src/script-overrides-decorator.ts.html +310 -0
  116. package/coverage/lcov-report/test/helpers/index.html +116 -0
  117. package/coverage/lcov-report/test/helpers/index.ts.html +646 -0
  118. package/coverage/lcov-report/test/index.html +266 -0
  119. package/coverage/lcov-report/test/test-compile-model-with-pseudoclosure.ts.html +802 -0
  120. package/coverage/lcov-report/test/test-compile-model.ts.html +187 -0
  121. package/coverage/lcov-report/test/test-compile-trie.ts.html +541 -0
  122. package/coverage/lcov-report/test/test-default-apply-case.ts.html +466 -0
  123. package/coverage/lcov-report/test/test-default-search-term-to-key.ts.html +628 -0
  124. package/coverage/lcov-report/test/test-error-logger.ts.html +196 -0
  125. package/coverage/lcov-report/test/test-join-word-breaker.ts.html +376 -0
  126. package/coverage/lcov-report/test/test-model-definitions.ts.html +676 -0
  127. package/coverage/lcov-report/test/test-override-script-defaults.ts.html +184 -0
  128. package/coverage/lcov-report/test/test-parse-wordlist.ts.html +466 -0
  129. package/coverage/lcov-report/test/test-punctuation.ts.html +190 -0
  130. package/coverage/lcov-report/test/wordbreakers/data.ts.html +5413 -0
  131. package/coverage/lcov-report/test/wordbreakers/default-wordbreaker-esm.ts.html +1234 -0
  132. package/coverage/lcov-report/test/wordbreakers/index.html +131 -0
  133. package/coverage/lcov.info +5969 -0
  134. package/package.json +61 -0
  135. package/src/build-trie.ts +511 -0
  136. package/src/join-word-breaker-decorator.ts +134 -0
  137. package/src/lexical-model-compiler.ts +179 -0
  138. package/src/lexical-model.ts +150 -0
  139. package/src/main.ts +62 -0
  140. package/src/model-compiler-errors.ts +203 -0
  141. package/src/model-defaults.ts +111 -0
  142. package/src/model-definitions.ts +222 -0
  143. package/src/script-overrides-decorator.ts +75 -0
  144. package/test/README.md +15 -0
  145. package/test/fixtures/example.qaa.joinwordbreaker/example.qaa.joinwordbreaker.model.ts +10 -0
  146. package/test/fixtures/example.qaa.joinwordbreaker/wordlist.tsv +3 -0
  147. package/test/fixtures/example.qaa.scriptusesspaces/example.qaa.scriptusesspaces.model.ts +10 -0
  148. package/test/fixtures/example.qaa.scriptusesspaces/wordlist.tsv +8 -0
  149. package/test/fixtures/example.qaa.sencoten/example.qaa.sencoten.model.kmp.json +45 -0
  150. package/test/fixtures/example.qaa.sencoten/example.qaa.sencoten.model.kps +35 -0
  151. package/test/fixtures/example.qaa.sencoten/example.qaa.sencoten.model.ts +6 -0
  152. package/test/fixtures/example.qaa.sencoten/wordlist.tsv +10 -0
  153. package/test/fixtures/example.qaa.smp/example.qaa.smp.model.ts +6 -0
  154. package/test/fixtures/example.qaa.smp/wordlist.tsv +5 -0
  155. package/test/fixtures/example.qaa.trivial/example.qaa.trivial.model.ts +5 -0
  156. package/test/fixtures/example.qaa.trivial/wordlist.tsv +3 -0
  157. package/test/fixtures/example.qaa.utf16be/example.qaa.utf16be.model.ts +5 -0
  158. package/test/fixtures/example.qaa.utf16be/wordlist.txt +0 -0
  159. package/test/fixtures/example.qaa.utf16le/example.qaa.utf16le.model.ts +5 -0
  160. package/test/fixtures/example.qaa.utf16le/wordlist.txt +0 -0
  161. package/test/fixtures/example.qaa.wordbreaker/example.qaa.wordbreaker.model.ts +9 -0
  162. package/test/fixtures/example.qaa.wordbreaker/wordlist.tsv +3 -0
  163. package/test/helpers/index.ts +187 -0
  164. package/test/test-compile-model-with-pseudoclosure.ts +239 -0
  165. package/test/test-compile-model.ts +34 -0
  166. package/test/test-compile-trie.ts +152 -0
  167. package/test/test-default-apply-case.ts +128 -0
  168. package/test/test-default-search-term-to-key.ts +181 -0
  169. package/test/test-error-logger.ts +38 -0
  170. package/test/test-join-word-breaker.ts +97 -0
  171. package/test/test-model-definitions.ts +198 -0
  172. package/test/test-override-script-defaults.ts +33 -0
  173. package/test/test-parse-wordlist.ts +127 -0
  174. package/test/test-punctuation.ts +35 -0
  175. package/test/tsconfig.json +22 -0
  176. package/test/wordbreakers/README.md +3 -0
  177. package/test/wordbreakers/data.ts +1776 -0
  178. package/test/wordbreakers/default-wordbreaker-esm.ts +383 -0
  179. package/tools/create-override-script-regexp.ts +145 -0
  180. package/tsconfig.json +17 -0
package/package.json ADDED
@@ -0,0 +1,61 @@
1
+ {
2
+ "name": "@keymanapp/kmc-model",
3
+ "description": "Keyman Developer lexical model compiler",
4
+ "keywords": [
5
+ "keyboard",
6
+ "keyman",
7
+ "unicode",
8
+ "lexical-model",
9
+ "predictive-text"
10
+ ],
11
+ "type": "module",
12
+ "exports": {
13
+ ".": "./build/src/main.js"
14
+ },
15
+ "scripts": {
16
+ "build": "tsc -b && npm run build-cjs",
17
+ "build-cjs": "esbuild build/src/lexical-model-compiler.js --bundle --platform=node --external:../../node_modules/* > build/cjs-src/lexical-model-compiler.cjs",
18
+ "test": "cd test && tsc -b && cd .. && c8 --reporter=lcov --reporter=text mocha",
19
+ "prepublishOnly": "npm run build"
20
+ },
21
+ "author": "Marc Durdin <marc@keyman.com> (https://github.com/mcdurdin)",
22
+ "contributors": [
23
+ "Eddie Antonio Santos <Eddie.Santos@nrc-cnrc.gc.ca>",
24
+ "Joshua Horton"
25
+ ],
26
+ "license": "MIT",
27
+ "bugs": {
28
+ "url": "https://github.com/keymanapp/keyman/issues"
29
+ },
30
+ "dependencies": {
31
+ "@keymanapp/keyman-version": "*",
32
+ "@keymanapp/models-types": "*",
33
+ "typescript": "^4.9.5",
34
+ "xml2js": "^0.4.19"
35
+ },
36
+ "devDependencies": {
37
+ "@keymanapp/models-templates": "*",
38
+ "@keymanapp/models-wordbreakers": "*",
39
+ "@types/chai": "^4.1.7",
40
+ "@types/mocha": "^5.2.7",
41
+ "@types/node": "^10.14.6",
42
+ "@types/xml2js": "^0.4.5",
43
+ "c8": "^7.12.0",
44
+ "chai": "^4.3.4",
45
+ "chalk": "^2.4.2",
46
+ "esbuild": "^0.15.7",
47
+ "mocha": "^10.0.0",
48
+ "ts-node": "^10.9.1"
49
+ },
50
+ "mocha": {
51
+ "spec": "build/test/**/test-*.js",
52
+ "require": [
53
+ "source-map-support/register"
54
+ ]
55
+ },
56
+ "repository": {
57
+ "type": "git",
58
+ "url": "git+https://github.com/keymanapp/keyman.git"
59
+ },
60
+ "version": "17.0.85-alpha"
61
+ }
@@ -0,0 +1,511 @@
1
+ import { readFileSync } from "fs";
2
+ import { log, KeymanCompilerError } from "./model-compiler-errors.js";
3
+
4
+ // Supports LF or CRLF line terminators.
5
+ const NEWLINE_SEPARATOR = /\u000d?\u000a/;
6
+
7
+ /**
8
+ * A word list is (conceptually) an array of pairs: the concrete word form itself + a
9
+ * non-negative count.
10
+ *
11
+ * Since each word should only appear once within the list, we represent it with
12
+ * an associative array pattern keyed by the wordform.
13
+ */
14
+ export type WordList = {[wordform: string]: number};
15
+
16
+ /**
17
+ * Returns a data structure that can be loaded by the TrieModel.
18
+ *
19
+ * It implements a **weighted** trie, whose indices (paths down the trie) are
20
+ * generated by a search key, and not concrete wordforms themselves.
21
+ *
22
+ * @param sourceFiles an array of source files that will be read to generate the trie.
23
+ */
24
+ export function createTrieDataStructure(filenames: string[], searchTermToKey?: (wf: string) => string): string {
25
+ if (typeof searchTermToKey !== "function") {
26
+ throw new TypeError("searchTermToKey must be explicitly specified")
27
+ }
28
+ // Make one big word list out of all of the filenames provided.
29
+ let wordlist: WordList = {};
30
+ filenames.forEach(filename => parseWordListFromFilename(wordlist, filename));
31
+
32
+ let trie = Trie.buildTrie(wordlist, searchTermToKey as Trie.SearchTermToKey);
33
+ return JSON.stringify(trie);
34
+ }
35
+
36
+ /**
37
+ * Parses a word list from a file, merging duplicate entries.
38
+ *
39
+ * The word list may be encoded in:
40
+ *
41
+ * - UTF-8, with or without BOM [exported by most software]
42
+ * - UTF-16, little endian, with BOM [exported by Microsoft Excel]
43
+ *
44
+ * @param wordlist word list to merge entries into (may have existing entries)
45
+ * @param filename filename of the word list
46
+ */
47
+ export function parseWordListFromFilename(wordlist: WordList, filename: string): void {
48
+ _parseWordList(wordlist, new WordListFromFilename(filename));
49
+ }
50
+
51
+ /**
52
+ * Parses a word list from a string. The string should have multiple lines
53
+ * with LF or CRLF line terminators.
54
+ *
55
+ * @param wordlist word list to merge entries into (may have existing entries)
56
+ * @param filename filename of the word list
57
+ */
58
+ export function parseWordListFromContents(wordlist: WordList, contents: string): void {
59
+ _parseWordList(wordlist, new WordListFromMemory(contents));
60
+ }
61
+
62
+ /**
63
+ * Reads a tab-separated values file into a word list. This function converts all
64
+ * entries into NFC and merges duplicate entries across wordlists. Duplication is
65
+ * on the basis of character-for-character equality after normalisation to NFC.
66
+ *
67
+ * Format specification:
68
+ *
69
+ * - the file is a UTF-8 encoded text file.
70
+ * - new lines are either LF or CRLF.
71
+ * - the file MAY start with the UTF-8 byte-order mark (BOM); that is, if the
72
+ * first three bytes of the file are EF BB BF, these will be interepreted as
73
+ * the BOM and will be ignored.
74
+ * - the file either consists of a comment or an entry.
75
+ * - comment lines MUST start with the '#' character on the very first column.
76
+ * - entries are one to three columns, separated by the (horizontal) tab
77
+ * character.
78
+ * - column 1 (REQUIRED): the wordform: can have any character except tab, CR,
79
+ * LF. Surrounding whitespace characters are trimmed.
80
+ * - column 2 (optional): the count: a non-negative integer specifying how many
81
+ * times this entry has appeared in the corpus. Blank means 'indeterminate';
82
+ * commas are permissible in the digits.
83
+ * - column 3 (optional): comment: an informative comment, ignored by the tool.
84
+ *
85
+ * @param wordlist word list to merge entries into (may have existing entries)
86
+ * @param contents contents of the file to import
87
+ */
88
+ function _parseWordList(wordlist: WordList, source: WordListSource): void {
89
+ const TAB = "\t";
90
+
91
+ let wordsSeenInThisFile = new Set<string>();
92
+
93
+ for (let [lineno, line] of source.lines()) {
94
+ // Remove the byte-order mark (BOM) from the beginning of the string.
95
+ // Because `contents` can be the concatenation of several files, we have to remove
96
+ // the BOM from every possible start of file -- i.e., beginning of every line.
97
+ line = line.replace(/^\uFEFF/, '').trim();
98
+
99
+ if (line.startsWith('#') || line === "") {
100
+ continue; // skip comments and empty lines
101
+ }
102
+
103
+ // The third column is the comment. Always ignored!
104
+ let [wordform, countText] = line.split(TAB);
105
+
106
+ // Clean the word form.
107
+ let original = wordform;
108
+
109
+ wordform = wordform.normalize('NFC');
110
+ if (original !== wordform) {
111
+ // Mixed normalization forms are yucky! Warn about it.
112
+ log(
113
+ KeymanCompilerError.CWARN_MixedNormalizationForms,
114
+ `“${wordform}” is not in Unicode NFC. Automatically converting to NFC.`,
115
+ {filename: source.name, lineno}
116
+ )
117
+ }
118
+
119
+ wordform = wordform.trim()
120
+
121
+ countText = (countText || '').trim().replace(/,/g, '');
122
+ let count = parseInt(countText, 10);
123
+
124
+ // When parsing a decimal integer fails (e.g., blank or something else):
125
+ if (!isFinite(count) || count < 0) {
126
+ // TODO: is this the right thing to do?
127
+ // Treat it like a hapax legonmenom -- it exist, but only once.
128
+ count = 1;
129
+ }
130
+
131
+ if (wordsSeenInThisFile.has(wordform)) {
132
+ // The same word seen across multiple files is fine,
133
+ // but a word seen multiple times in one file is a problem!
134
+ log(
135
+ KeymanCompilerError.CWARN_DuplicateWordInSameFile,
136
+ `duplicate word “${wordform}” found in same file; summing counts`,
137
+ {filename: source.name, lineno}
138
+ )
139
+ }
140
+ wordsSeenInThisFile.add(wordform);
141
+
142
+ wordlist[wordform] = (isNaN(wordlist[wordform]) ? 0 : wordlist[wordform] || 0) + count;
143
+ }
144
+ }
145
+
146
+ type LineNoAndText = [number, string];
147
+
148
+ interface WordListSource {
149
+ readonly name: string;
150
+ lines(): Iterable<LineNoAndText>;
151
+ }
152
+
153
+ class WordListFromMemory implements WordListSource {
154
+ readonly name = '<memory>';
155
+ private readonly _contents: string;
156
+
157
+ constructor(contents: string) {
158
+ this._contents = contents;
159
+ }
160
+
161
+ *lines() {
162
+ yield *enumerateLines(this._contents.split(NEWLINE_SEPARATOR));
163
+ }
164
+ }
165
+
166
+ class WordListFromFilename {
167
+ readonly name: string;
168
+ constructor(filename: string) {
169
+ this.name = filename;
170
+ }
171
+
172
+ *lines() {
173
+ let contents = readFileSync(this.name, detectEncoding(this.name));
174
+ yield *enumerateLines(contents.split(NEWLINE_SEPARATOR));
175
+ }
176
+ }
177
+
178
+ /**
179
+ * Yields pairs of [lineno, line], given an Array of lines.
180
+ */
181
+ function* enumerateLines(lines: string[]): Generator<LineNoAndText> {
182
+ let i = 1;
183
+ for (let line of lines) {
184
+ yield [i, line];
185
+ i++;
186
+ }
187
+ }
188
+
189
+ namespace Trie {
190
+ /**
191
+ * An **opaque** type for a string that is exclusively used as a search key in
192
+ * the trie. There should be a function that converts arbitrary strings
193
+ * (queries) and converts them into a standard search key for a given language
194
+ * model.
195
+ *
196
+ * Fun fact: This opaque type has ALREADY saved my bacon and found a bug!
197
+ */
198
+ type SearchKey = string & { _: 'SearchKey'};
199
+
200
+ /**
201
+ * A function that converts a string (word form or query) into a search key
202
+ * (secretly, this is also a string).
203
+ */
204
+ export interface SearchTermToKey {
205
+ (wordform: string): SearchKey;
206
+ }
207
+
208
+ // The following trie implementation has been (heavily) derived from trie-ing
209
+ // by Conrad Irwin.
210
+ //
211
+ // trie-ing is distributed under the terms of the MIT license, reproduced here:
212
+ //
213
+ // The MIT License
214
+ // Copyright (c) 2015-2017 Conrad Irwin <conrad.irwin@gmail.com>
215
+ // Copyright (c) 2011 Marc Campbell <marc.e.campbell@gmail.com>
216
+ //
217
+ // Permission is hereby granted, free of charge, to any person obtaining a copy
218
+ // of this software and associated documentation files (the "Software"), to deal
219
+ // in the Software without restriction, including without limitation the rights
220
+ // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
221
+ // copies of the Software, and to permit persons to whom the Software is
222
+ // furnished to do so, subject to the following conditions:
223
+ //
224
+ // The above copyright notice and this permission notice shall be included in
225
+ // all copies or substantial portions of the Software.
226
+ //
227
+ // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
228
+ // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
229
+ // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
230
+ // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
231
+ // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
232
+ // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
233
+ // THE SOFTWARE.
234
+ //
235
+ // See: https://github.com/ConradIrwin/trie-ing/blob/df55d7af7068d357829db9e0a7faa8a38add1d1d/LICENSE
236
+
237
+ /**
238
+ * An entry in the prefix trie. The matched word is "content".
239
+ */
240
+ interface Entry {
241
+ content: string;
242
+ key: SearchKey;
243
+ weight: number;
244
+ }
245
+
246
+ /**
247
+ * The trie is made up of nodes. A node can be EITHER an internal node (whose
248
+ * only children are other nodes) OR a leaf, which actually contains the word
249
+ * form entries.
250
+ */
251
+ type Node = InternalNode | Leaf;
252
+
253
+ /**
254
+ * An internal node.
255
+ */
256
+ interface InternalNode {
257
+ type: 'internal';
258
+ weight: number;
259
+ // TODO: As an optimization, "values" can be a single string!
260
+ values: string[];
261
+ children: { [codeunit: string]: Node };
262
+ unsorted?: true;
263
+ }
264
+
265
+ /**
266
+ * A leaf node.
267
+ */
268
+ interface Leaf {
269
+ type: 'leaf';
270
+ weight: number;
271
+ entries: Entry[];
272
+ unsorted?: true;
273
+ }
274
+
275
+ /**
276
+ * A sentinel value for when an internal node has contents and requires an
277
+ * "internal" leaf. That is, this internal node has content. Instead of placing
278
+ * entries as children in an internal node, a "fake" leaf is created, and its
279
+ * key is this special internal value.
280
+ *
281
+ * The value is a valid Unicode BMP code point, but it is a "non-character".
282
+ * Unicode will never assign semantics to these characters, as they are
283
+ * intended to be used internally as sentinel values.
284
+ */
285
+ const INTERNAL_VALUE = '\uFDD0';
286
+
287
+ /**
288
+ * Builds a trie from a word list.
289
+ *
290
+ * @param wordlist The wordlist with non-negative weights.
291
+ * @param keyFunction Function that converts word forms into indexed search keys
292
+ * @returns A JSON-serialiable object that can be given to the TrieModel constructor.
293
+ */
294
+ export function buildTrie(wordlist: WordList, keyFunction: SearchTermToKey): object {
295
+ let root = new Trie(keyFunction).buildFromWordList(wordlist).root;
296
+ return {
297
+ totalWeight: sumWeights(root),
298
+ root: root
299
+ }
300
+ }
301
+
302
+ /**
303
+ * Wrapper class for the trie and its nodes and wordform to search
304
+ */
305
+ class Trie {
306
+ readonly root = createRootNode();
307
+ toKey: SearchTermToKey;
308
+ constructor(wordform2key: SearchTermToKey) {
309
+ this.toKey = wordform2key;
310
+ }
311
+
312
+ /**
313
+ * Populates the trie with the contents of an entire wordlist.
314
+ * @param words a list of word and count pairs.
315
+ */
316
+ buildFromWordList(words: WordList): Trie {
317
+ for (let [wordform, weight] of Object.entries(words)) {
318
+ let key = this.toKey(wordform);
319
+ addUnsorted(this.root, { key, weight, content: wordform }, 0);
320
+ }
321
+ sortTrie(this.root);
322
+ return this;
323
+ }
324
+ }
325
+
326
+ // "Constructors"
327
+ function createRootNode(): Node {
328
+ return {
329
+ type: 'leaf',
330
+ weight: 0,
331
+ entries: []
332
+ };
333
+ }
334
+
335
+ // Implement Trie creation.
336
+
337
+ /**
338
+ * Adds an entry to the trie.
339
+ *
340
+ * Note that the trie will likely be unsorted after the add occurs. Before
341
+ * performing a lookup on the trie, use call sortTrie() on the root note!
342
+ *
343
+ * @param node Which node should the entry be added to?
344
+ * @param entry the wordform/weight/key to add to the trie
345
+ * @param index the index in the key and also the trie depth. Should be set to
346
+ * zero when adding onto the root node of the trie.
347
+ */
348
+ function addUnsorted(node: Node, entry: Entry, index: number = 0) {
349
+ // Each node stores the MAXIMUM weight out of all of its decesdents, to
350
+ // enable a greedy search through the trie.
351
+ node.weight = Math.max(node.weight, entry.weight);
352
+
353
+ // When should a leaf become an interior node?
354
+ // When it already has a value, but the key of the current value is longer
355
+ // than the prefix.
356
+ if (node.type === 'leaf' && index < entry.key.length && node.entries.length >= 1) {
357
+ convertLeafToInternalNode(node, index);
358
+ }
359
+
360
+ if (node.type === 'leaf') {
361
+ // The key matches this leaf node, so add yet another entry.
362
+ addItemToLeaf(node, entry);
363
+ } else {
364
+ // Push the node down to a lower node.
365
+ addItemToInternalNode(node, entry, index);
366
+ }
367
+
368
+ node.unsorted = true;
369
+ }
370
+
371
+ /**
372
+ * Adds an item to the internal node at a given depth.
373
+ * @param item
374
+ * @param index
375
+ */
376
+ function addItemToInternalNode(node: InternalNode, item: Entry, index: number) {
377
+ let char = item.key[index];
378
+ if (!node.children[char]) {
379
+ node.children[char] = createRootNode();
380
+ node.values.push(char);
381
+ }
382
+ addUnsorted(node.children[char], item, index + 1);
383
+ }
384
+
385
+ function addItemToLeaf(leaf: Leaf, item: Entry) {
386
+ leaf.entries.push(item);
387
+ }
388
+
389
+ /**
390
+ * Mutates the given Leaf to turn it into an InternalNode.
391
+ *
392
+ * NOTE: the node passed in will be DESTRUCTIVELY CHANGED into a different
393
+ * type when passed into this function!
394
+ *
395
+ * @param depth depth of the trie at this level.
396
+ */
397
+ function convertLeafToInternalNode(leaf: Leaf, depth: number): void {
398
+ let entries = leaf.entries;
399
+
400
+ // Alias the current node, as the desired type.
401
+ let internal = (<unknown> leaf) as InternalNode;
402
+ internal.type = 'internal';
403
+
404
+ delete leaf.entries;
405
+ internal.values = [];
406
+ internal.children = {};
407
+
408
+ // Convert the old values array into the format for interior nodes.
409
+ for (let item of entries) {
410
+ let char: string;
411
+ if (depth < item.key.length) {
412
+ char = item.key[depth];
413
+ } else {
414
+ char = INTERNAL_VALUE;
415
+ }
416
+
417
+ if (!internal.children[char]) {
418
+ internal.children[char] = createRootNode();
419
+ internal.values.push(char);
420
+ }
421
+ addUnsorted(internal.children[char], item, depth + 1);
422
+ }
423
+
424
+ internal.unsorted = true;
425
+ }
426
+
427
+ /**
428
+ * Recursively sort the trie, in descending order of weight.
429
+ * @param node any node in the trie
430
+ */
431
+ function sortTrie(node: Node) {
432
+ if (node.type === 'leaf') {
433
+ if (!node.unsorted) {
434
+ return;
435
+ }
436
+
437
+ node.entries.sort(function (a, b) { return b.weight - a.weight; });
438
+ } else {
439
+ // We MUST recurse and sort children before returning.
440
+ for (let char of node.values) {
441
+ sortTrie(node.children[char]);
442
+ }
443
+
444
+ if (!node.unsorted) {
445
+ return;
446
+ }
447
+
448
+ node.values.sort((a, b) => {
449
+ return node.children[b].weight - node.children[a].weight;
450
+ });
451
+ }
452
+
453
+ delete node.unsorted;
454
+ }
455
+
456
+ /**
457
+ * O(n) recursive traversal to sum the total weight of all leaves in the
458
+ * trie, starting at the provided node.
459
+ *
460
+ * @param node The node to start summing weights.
461
+ */
462
+ function sumWeights(node: Node): number {
463
+ let val: number;
464
+ if (node.type === 'leaf') {
465
+ val = node.entries
466
+ .map(entry => entry.weight)
467
+ //.map(entry => isNaN(entry.weight) ? 1 : entry.weight)
468
+ .reduce((acc, count) => acc + count, 0);
469
+ } else {
470
+ val = Object.keys(node.children)
471
+ .map((key) => sumWeights(node.children[key]))
472
+ .reduce((acc, count) => acc + count, 0);
473
+ }
474
+
475
+ if(isNaN(val)) {
476
+ console.error("Unexpected NaN has appeared!");
477
+ }
478
+ return val;
479
+ }
480
+ }
481
+
482
+ /**
483
+ * Detects the encoding of a text file.
484
+ *
485
+ * Supported encodings are:
486
+ *
487
+ * - UTF-8, with or without BOM
488
+ * - UTF-16, little endian, with BOM
489
+ *
490
+ * UTF-16 in big endian is explicitly NOT supported! The reason is two-fold:
491
+ * 1) Node does not support it without resorting to an external library (or
492
+ * swapping every byte in the file!); and 2) I'm not sure anything actually
493
+ * outputs in this format anyway!
494
+ *
495
+ * @param filename filename of the file to detect encoding
496
+ */
497
+ function detectEncoding(filename: string): 'utf8' | 'utf16le' {
498
+ let buffer = readFileSync(filename);
499
+ // Note: BOM is U+FEFF
500
+ // In little endian, this is 0xFF 0xFE
501
+ if (buffer[0] == 0xFF && buffer[1] == 0xFE) {
502
+ return 'utf16le';
503
+ } else if (buffer[0] == 0xFE && buffer[1] == 0xFF) {
504
+ // Big Endian, is NOT supported because Node does not support it (???)
505
+ // See: https://stackoverflow.com/a/14551669/6626414
506
+ throw new Error('UTF-16BE is unsupported')
507
+ } else {
508
+ // Assume its in UTF-8, with or without a BOM.
509
+ return 'utf8';
510
+ }
511
+ }
@@ -0,0 +1,134 @@
1
+ /// <reference types="@keymanapp/models-types" />
2
+
3
+ /**
4
+ * Returns a word breaker that joins spans of an existing word breaker.
5
+ * Spans are joined if they are connected by a delimiter.
6
+ *
7
+ * @param breaker The word breaker whose results will be decorated.
8
+ * @param joiners What delimiters should be used to join spans.
9
+ */
10
+ export function decorateWithJoin(breaker: WordBreakingFunction, joiners: string[]): WordBreakingFunction {
11
+ // Make a copy so that if the original array is accidentally mutated, it
12
+ // won't affect the joiner.
13
+ const delimiters = joiners.concat();
14
+
15
+ return function (input: string): Span[] {
16
+ let originalSpans = breaker(input);
17
+
18
+ // Implements a finite-state transducer (FST) where:
19
+ // - Transductions are pushed onto a stack
20
+ // - There are three states:
21
+ // - empty stack (initial state)
22
+ // - unjoined
23
+ // - joined
24
+ // - all three states are accepting states
25
+ // - there is NO backtracking on the input
26
+ // (hence the for-loop over the input tape)
27
+ // - each state is a JavaScript callback (function)
28
+ let state = emptyStack;
29
+ let stack: Span[] = [];
30
+ for (let span of originalSpans) {
31
+ state = state(span);
32
+ }
33
+
34
+ return stack;
35
+
36
+ /******************* States *******************/
37
+ function emptyStack(span: Span) {
38
+ stack.push(span);
39
+
40
+ if (isJoiner(span)) {
41
+ return joined;
42
+ } else {
43
+ return unjoined
44
+ }
45
+ }
46
+
47
+ function unjoined(span: Span) {
48
+ // NB: stack has at least one span in it
49
+ if (isJoiner(span)) {
50
+ if (spansAreBackToBack(lastFrom(stack), span)) {
51
+ concatLastSpanInStackWith(span);
52
+ } else {
53
+ // Spans are non-contiguous, so don't join them!
54
+ stack.push(span);
55
+ }
56
+ return joined;
57
+
58
+ } else {
59
+ // Span cannot be joined
60
+ stack.push(span);
61
+ return unjoined;
62
+ }
63
+ }
64
+
65
+ function joined(span: Span) {
66
+ // NB: stack has at least one span in it
67
+ if (!spansAreBackToBack(lastFrom(stack), span)) {
68
+ // Spans are non-contiguous and cannot be joined:
69
+ stack.push(span);
70
+ return unjoined;
71
+ }
72
+
73
+ // Spans are contiguous
74
+ concatLastSpanInStackWith(span);
75
+ if (isJoiner(span)) {
76
+ return joined;
77
+ } else {
78
+ return unjoined;
79
+ }
80
+ }
81
+
82
+ /****************** Helpers ******************/
83
+ function concatLastSpanInStackWith(span: Span) {
84
+ let lastIndex = stack.length - 1;
85
+
86
+ let top = stack[lastIndex];
87
+ let joinedSpan = concatenateSpans(top, span);
88
+ stack[lastIndex] = joinedSpan;
89
+ }
90
+ }
91
+
92
+ function isJoiner(span: Span) {
93
+ return includes(delimiters, span.text);
94
+ }
95
+
96
+ /**
97
+ * Returns true when the spans are contiguous.
98
+ * Order matters when calling this function!
99
+ */
100
+ function spansAreBackToBack(former: Span, latter: Span): boolean {
101
+ return former.end === latter.start;
102
+ }
103
+
104
+ function concatenateSpans(former: Span, latter: Span) {
105
+ if (latter.start !== former.end) {
106
+ throw new Error(`Cannot concatenate non-contiguous spans: ${JSON.stringify(former)}/${JSON.stringify(latter)}`);
107
+ }
108
+
109
+ return {
110
+ start: former.start,
111
+ end: latter.end,
112
+ length: former.length + latter.length,
113
+ text: former.text + latter.text
114
+ };
115
+ }
116
+
117
+ /**
118
+ * When Array.prototype.include() doesn't exist:
119
+ */
120
+ function includes<T>(haystack: T[], needle: T): boolean {
121
+ for (let item of haystack) {
122
+ if (item === needle)
123
+ return true;
124
+ }
125
+ return false;
126
+ }
127
+
128
+ /**
129
+ * Get the last element from the array.
130
+ */
131
+ function lastFrom<T>(array: T[]): T | undefined {
132
+ return array[array.length - 1];
133
+ }
134
+ }